diff --git a/.codespellrc b/.codespellrc index eefde42a4e8..da831d8957e 100644 --- a/.codespellrc +++ b/.codespellrc @@ -1,6 +1,6 @@ [codespell] # Ref: https://github.com/codespell-project/codespell#using-a-config-file -skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl +skip = .git*,vendor,*-lock.yaml,*.lock,.codespellrc,*test.ts,*.jsonl,frame*.txt check-hidden = true ignore-regex = ^\s*"image/\S+": ".*|\b(afterAll)\b ignore-words-list = ratatui,ser diff --git a/.github/dotslash-config.json b/.github/dotslash-config.json index 8566e03490c..5e28cdf2072 100644 --- a/.github/dotslash-config.json +++ b/.github/dotslash-config.json @@ -27,6 +27,34 @@ "path": "codex.exe" } } + }, + "codex-responses-api-proxy": { + "platforms": { + "macos-aarch64": { + "regex": "^codex-responses-api-proxy-aarch64-apple-darwin\\.zst$", + "path": "codex-responses-api-proxy" + }, + "macos-x86_64": { + "regex": "^codex-responses-api-proxy-x86_64-apple-darwin\\.zst$", + "path": "codex-responses-api-proxy" + }, + "linux-x86_64": { + "regex": "^codex-responses-api-proxy-x86_64-unknown-linux-musl\\.zst$", + "path": "codex-responses-api-proxy" + }, + "linux-aarch64": { + "regex": "^codex-responses-api-proxy-aarch64-unknown-linux-musl\\.zst$", + "path": "codex-responses-api-proxy" + }, + "windows-x86_64": { + "regex": "^codex-responses-api-proxy-x86_64-pc-windows-msvc\\.exe\\.zst$", + "path": "codex-responses-api-proxy.exe" + }, + "windows-aarch64": { + "regex": "^codex-responses-api-proxy-aarch64-pc-windows-msvc\\.exe\\.zst$", + "path": "codex-responses-api-proxy.exe" + } + } } } } diff --git a/.github/prompts/issue-deduplicator.txt b/.github/prompts/issue-deduplicator.txt new file mode 100644 index 00000000000..9bdd3af9ef7 --- /dev/null +++ b/.github/prompts/issue-deduplicator.txt @@ -0,0 +1,18 @@ +You are an assistant that triages new GitHub issues by identifying potential duplicates. + +You will receive the following JSON files located in the current working directory: +- `codex-current-issue.json`: JSON object describing the newly created issue (fields: number, title, body). +- `codex-existing-issues.json`: JSON array of recent issues (each element includes number, title, body, createdAt). + +Instructions: +- Load both files as JSON and review their contents carefully. The codex-existing-issues.json file is large, ensure you explore all of it. +- Compare the current issue against the existing issues to find up to five that appear to describe the same underlying problem or request. +- Only consider an issue a potential duplicate if there is a clear overlap in symptoms, feature requests, reproduction steps, or error messages. +- Prioritize newer issues when similarity is comparable. +- Ignore pull requests and issues whose similarity is tenuous. +- When unsure, prefer returning fewer matches. + +Output requirements: +- Respond with a JSON array of issue numbers (integers), ordered from most likely duplicate to least. +- Include at most five numbers. +- If you find no plausible duplicates, respond with `[]`. diff --git a/.github/prompts/issue-labeler.txt b/.github/prompts/issue-labeler.txt new file mode 100644 index 00000000000..7e880d307fe --- /dev/null +++ b/.github/prompts/issue-labeler.txt @@ -0,0 +1,26 @@ +You are an assistant that reviews GitHub issues for the repository. + +Your job is to choose the most appropriate existing labels for the issue described later in this prompt. +Follow these rules: +- Only pick labels out of the list below. +- Prefer a small set of precise labels over many broad ones. +- If none of the labels fit, respond with an empty JSON array: [] +- Output must be a JSON array of label names (strings) with no additional commentary. + +Labels to apply: +1. bug — Reproducible defects in Codex products (CLI, VS Code extension, web, auth). +2. enhancement — Feature requests or usability improvements that ask for new capabilities, better ergonomics, or quality-of-life tweaks. +3. extension — VS Code (or other IDE) extension-specific issues. +4. windows-os — Bugs or friction specific to Windows environments (PowerShell behavior, path handling, copy/paste, OS-specific auth or tooling failures). +5. mcp — Topics involving Model Context Protocol servers/clients. +6. codex-web — Issues targeting the Codex web UI/Cloud experience. +8. azure — Problems or requests tied to Azure OpenAI deployments. +9. documentation — Updates or corrections needed in docs/README/config references (broken links, missing examples, outdated keys, clarification requests). +10. model-behavior — Undesirable LLM behavior: forgetting goals, refusing work, hallucinating environment details, quota misreports, or other reasoning/performance anomalies. + +Issue information is available in environment variables: + +ISSUE_NUMBER +ISSUE_TITLE +ISSUE_BODY +REPO_FULL_NAME diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4dfd7596d1b..c0e41666001 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,7 +1,7 @@ name: ci on: - pull_request: { branches: [main] } + pull_request: {} push: { branches: [main] } jobs: @@ -27,12 +27,29 @@ jobs: - name: Install dependencies run: pnpm install --frozen-lockfile - # Run all tasks using workspace filters + # stage_npm_packages.py requires DotSlash when staging releases. + - uses: facebook/install-dotslash@v2 - - name: Ensure staging a release works. + - name: Stage npm package + id: stage_npm_package env: GH_TOKEN: ${{ github.token }} - run: ./codex-cli/scripts/stage_release.sh + run: | + set -euo pipefail + CODEX_VERSION=0.40.0 + OUTPUT_DIR="${RUNNER_TEMP}" + python3 ./scripts/stage_npm_packages.py \ + --release-version "$CODEX_VERSION" \ + --package codex \ + --output-dir "$OUTPUT_DIR" + PACK_OUTPUT="${OUTPUT_DIR}/codex-npm-${CODEX_VERSION}.tgz" + echo "pack_output=$PACK_OUTPUT" >> "$GITHUB_OUTPUT" + + - name: Upload staged npm package artifact + uses: actions/upload-artifact@v4 + with: + name: codex-npm-staging + path: ${{ steps.stage_npm_package.outputs.pack_output }} - name: Ensure root README.md contains only ASCII and certain Unicode code points run: ./scripts/asciicheck.py README.md diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index ebf55649b82..ad16769471b 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -22,7 +22,6 @@ jobs: - name: Annotate locations with typos uses: codespell-project/codespell-problem-matcher@b80729f885d32f78a716c2f107b4db1025001c42 # v1 - name: Codespell - uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2 + uses: codespell-project/actions-codespell@406322ec52dd7b488e48c1c4b82e2a8b3a1bf630 # v2.1 with: ignore_words_file: .codespellignore - skip: frame*.txt diff --git a/.github/workflows/generate-vhs-gifs.yml b/.github/workflows/generate-vhs-gifs.yml new file mode 100644 index 00000000000..d4c693cf5cd --- /dev/null +++ b/.github/workflows/generate-vhs-gifs.yml @@ -0,0 +1,196 @@ +name: Render VHS GIFs + +on: + workflow_dispatch: + push: + paths: + - 'docs/tapes/**' + - '.github/workflows/generate-vhs-gifs.yml' + pull_request: + paths: + - 'docs/tapes/**' + - '.github/workflows/generate-vhs-gifs.yml' + +jobs: + render-gifs: + runs-on: ubuntu-latest + timeout-minutes: 15 + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Ensure GIF output directory exists + run: mkdir -p docs/gifs + + - name: Setup FFmpeg + uses: FedericoCarboni/setup-ffmpeg@v3 + + - run: sudo apt-get update && sudo apt-get install -y ttyd + + - uses: actions/setup-go@v6 + with: + go-version: '1.25' + - run: go install github.com/charmbracelet/vhs@v0.10.0 + - run: $(go env GOPATH)/bin/vhs ./docs/tapes/*.tape + + - name: Upload GIFs artifact + if: always() + uses: actions/upload-artifact@v4 + with: + name: vhs-gifs + path: docs/gifs + if-no-files-found: warn + + comment-on-pr: + needs: render-gifs + runs-on: ubuntu-latest + if: github.event_name == 'pull_request' + steps: + - name: Download GIFs artifact + id: download-artifacts + uses: actions/download-artifact@v4 + with: + name: vhs-gifs + path: gifs + continue-on-error: true + + - name: Build PR comment with GIFs + env: + DOWNLOAD_OUTCOME: ${{ steps.download-artifacts.outcome }} + run: | + set -euo pipefail + mkdir -p gifs + shopt -s nullglob + gifs=(gifs/*.gif) + { + echo "" + echo "## Rendered VHS GIFs" + echo + if [ "${DOWNLOAD_OUTCOME}" != "success" ]; then + echo "No GIF artifacts were available for this run." + elif [ "${#gifs[@]}" -eq 0 ]; then + echo "No GIFs were generated." + else + for gif in "${gifs[@]}"; do + filename=$(basename "${gif}") + base64_data=$(base64 -w0 "${gif}") + echo "
" + echo "${filename}" + echo + echo "![${filename}](data:image/gif;base64,${base64_data})" + echo + echo "
" + echo + done + fi + } > vhs_comment.md + + - name: Find existing VHS comment + id: find-vhs-comment + uses: peter-evans/find-comment@v3 + with: + issue-number: ${{ github.event.pull_request.number }} + comment-author: 'github-actions[bot]' + body-includes: '' + + - name: Comment on PR with GIFs + uses: peter-evans/create-or-update-comment@v5 + with: + issue-number: ${{ github.event.pull_request.number }} + comment-id: ${{ steps.find-vhs-comment.outputs.comment-id }} + body-path: vhs_comment.md + + comment-on-commit: + needs: render-gifs + runs-on: ubuntu-latest + if: github.event_name == 'pull_request' || github.event_name == 'push' + steps: + - name: Download GIFs artifact + id: download-artifacts + uses: actions/download-artifact@v4 + with: + name: vhs-gifs + path: gifs + continue-on-error: true + + - name: Build commit comment with GIFs + env: + DOWNLOAD_OUTCOME: ${{ steps.download-artifacts.outcome }} + run: | + set -euo pipefail + mkdir -p gifs + shopt -s nullglob + gifs=(gifs/*.gif) + { + echo "" + echo "## Rendered VHS GIFs" + echo + if [ "${DOWNLOAD_OUTCOME}" != "success" ]; then + echo "No GIF artifacts were available for this run." + elif [ "${#gifs[@]}" -eq 0 ]; then + echo "No GIFs were generated." + else + for gif in "${gifs[@]}"; do + filename=$(basename "${gif}") + base64_data=$(base64 -w0 "${gif}") + echo "
" + echo "${filename}" + echo + echo "![${filename}](data:image/gif;base64,${base64_data})" + echo + echo "
" + echo + done + fi + } > vhs_comment.md + + - name: Determine commit SHA + id: commit-sha + run: | + set -euo pipefail + if [ "${GITHUB_EVENT_NAME}" = "pull_request" ] && [ -n "${PULL_REQUEST_SHA}" ]; then + echo "sha=${PULL_REQUEST_SHA}" >> "$GITHUB_OUTPUT" + else + echo "sha=${GITHUB_SHA}" >> "$GITHUB_OUTPUT" + fi + env: + GITHUB_EVENT_NAME: ${{ github.event_name }} + PULL_REQUEST_SHA: ${{ github.event.pull_request.head.sha || '' }} + GITHUB_SHA: ${{ github.sha }} + + - name: Find existing commit comment + id: find-commit-comment + run: | + set -euo pipefail + comment_id="" + if [ -n "${COMMIT_SHA}" ]; then + comment_id=$(gh api repos/${GITHUB_REPOSITORY}/commits/${COMMIT_SHA}/comments \ + --jq 'map(select(.body | contains(""))) | .[0].id // empty' || true) + fi + if [ -n "${comment_id}" ]; then + echo "comment-id=${comment_id}" >> "$GITHUB_OUTPUT" + fi + env: + COMMIT_SHA: ${{ steps.commit-sha.outputs.sha }} + GITHUB_REPOSITORY: ${{ github.repository }} + GH_TOKEN: ${{ github.token }} + + - name: Upsert commit comment with GIFs + if: steps.find-commit-comment.outputs.comment-id != '' + uses: peter-evans/create-or-update-comment@v5 + with: + token: ${{ github.token }} + comment-id: ${{ steps.find-commit-comment.outputs.comment-id }} + body-path: vhs_comment.md + + - name: Create commit comment with GIFs + if: steps.find-commit-comment.outputs.comment-id == '' && steps.commit-sha.outputs.sha != '' + env: + COMMIT_SHA: ${{ steps.commit-sha.outputs.sha }} + GITHUB_REPOSITORY: ${{ github.repository }} + GH_TOKEN: ${{ github.token }} + run: | + set -euo pipefail + gh api repos/${GITHUB_REPOSITORY}/commits/${COMMIT_SHA}/comments \ + -X POST \ + -F body=@vhs_comment.md > /dev/null diff --git a/.github/workflows/issue-deduplicator.yml b/.github/workflows/issue-deduplicator.yml new file mode 100644 index 00000000000..779e739b20d --- /dev/null +++ b/.github/workflows/issue-deduplicator.yml @@ -0,0 +1,102 @@ +name: Issue Deduplicator + +on: + issues: + types: + - opened + - labeled + +jobs: + gather-duplicates: + name: Identify potential duplicates + if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate') }} + runs-on: ubuntu-latest + permissions: + contents: read + outputs: + codex_output: ${{ steps.codex.outputs.final_message }} + steps: + - uses: actions/checkout@v4 + + - name: Prepare Codex inputs + env: + GH_TOKEN: ${{ github.token }} + run: | + set -eo pipefail + + CURRENT_ISSUE_FILE=codex-current-issue.json + EXISTING_ISSUES_FILE=codex-existing-issues.json + + gh issue list --repo "${{ github.repository }}" \ + --json number,title,body,createdAt \ + --limit 1000 \ + --state all \ + --search "sort:created-desc" \ + | jq '.' \ + > "$EXISTING_ISSUES_FILE" + + gh issue view "${{ github.event.issue.number }}" \ + --repo "${{ github.repository }}" \ + --json number,title,body \ + | jq '.' \ + > "$CURRENT_ISSUE_FILE" + + - id: codex + uses: openai/codex-action@main + with: + openai_api_key: ${{ secrets.CODEX_OPENAI_API_KEY }} + prompt_file: .github/prompts/issue-deduplicator.txt + require_repo_write: false + codex_version: 0.43.0-alpha.16 + codex_args: -m gpt-5 + + comment-on-issue: + name: Comment with potential duplicates + needs: gather-duplicates + if: ${{ needs.gather-duplicates.result != 'skipped' }} + runs-on: ubuntu-latest + permissions: + contents: read + issues: write + steps: + - name: Comment on issue + uses: actions/github-script@v7 + env: + CODEX_OUTPUT: ${{ needs.gather-duplicates.outputs.codex_output }} + with: + github-token: ${{ github.token }} + script: | + let numbers; + try { + numbers = JSON.parse(process.env.CODEX_OUTPUT); + } catch (error) { + core.info(`Codex output was not valid JSON. Raw output: ${raw}`); + return; + } + + if (numbers.length === 0) { + core.info('Codex reported no potential duplicates.'); + return; + } + + const lines = [ + 'Potential duplicates detected:' + ...numbers.map((value) => `- #${value}`), + '', + '*Powered by [Codex Action](https://github.com/openai/codex-action)*']; + + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.payload.issue.number, + body: lines.join("\n"), + }); + + - name: Remove codex-deduplicate label + if: ${{ always() && github.event.action == 'labeled' && github.event.label.name == 'codex-deduplicate' }} + env: + GH_TOKEN: ${{ github.token }} + GH_REPO: ${{ github.repository }} + run: | + gh issue edit "${{ github.event.issue.number }}" --remove-label codex-deduplicate || true + echo "Attempted to remove label: codex-deduplicate" diff --git a/.github/workflows/issue-labeler.yml b/.github/workflows/issue-labeler.yml new file mode 100644 index 00000000000..4b893d0c919 --- /dev/null +++ b/.github/workflows/issue-labeler.yml @@ -0,0 +1,78 @@ +name: Issue Labeler + +on: + issues: + types: + - opened + - labeled + +jobs: + gather-labels: + name: Generate label suggestions + if: ${{ github.event.action == 'opened' || (github.event.action == 'labeled' && github.event.label.name == 'codex-label') }} + runs-on: ubuntu-latest + permissions: + contents: read + env: + ISSUE_NUMBER: ${{ github.event.issue.number }} + ISSUE_TITLE: ${{ github.event.issue.title }} + ISSUE_BODY: ${{ github.event.issue.body }} + REPO_FULL_NAME: ${{ github.repository }} + outputs: + codex_output: ${{ steps.codex.outputs.final_message }} + steps: + - uses: actions/checkout@v4 + + - id: codex + uses: openai/codex-action@main + with: + openai_api_key: ${{ secrets.CODEX_OPENAI_API_KEY }} + prompt_file: .github/prompts/issue-labeler.txt + require_repo_write: false + codex_version: 0.43.0-alpha.16 + + apply-labels: + name: Apply labels from Codex output + needs: gather-labels + if: ${{ needs.gather-labels.result != 'skipped' }} + runs-on: ubuntu-latest + permissions: + contents: read + issues: write + env: + GH_TOKEN: ${{ github.token }} + GH_REPO: ${{ github.repository }} + ISSUE_NUMBER: ${{ github.event.issue.number }} + CODEX_OUTPUT: ${{ needs.gather-labels.outputs.codex_output }} + steps: + - name: Apply labels + run: | + json=${CODEX_OUTPUT//$'\r'/} + if [ -z "$json" ]; then + echo "Codex produced no output. Skipping label application." + exit 0 + fi + + if ! printf '%s' "$json" | jq -e 'type == "array"' >/dev/null 2>&1; then + echo "Codex output was not a JSON array. Raw output: $json" + exit 0 + fi + + labels=$(printf '%s' "$json" | jq -r '.[] | tostring') + if [ -z "$labels" ]; then + echo "Codex returned an empty array. Nothing to do." + exit 0 + fi + + cmd=(gh issue edit "$ISSUE_NUMBER") + while IFS= read -r label; do + cmd+=(--add-label "$label") + done <<< "$labels" + + "${cmd[@]}" || true + + - name: Remove codex-label trigger + if: ${{ always() && github.event.action == 'labeled' && github.event.label.name == 'codex-label' }} + run: | + gh issue edit "$ISSUE_NUMBER" --remove-label codex-label || true + echo "Attempted to remove label: codex-label" diff --git a/.github/workflows/rust-ci.yml b/.github/workflows/rust-ci.yml index 280939c611d..846408f3a80 100644 --- a/.github/workflows/rust-ci.yml +++ b/.github/workflows/rust-ci.yml @@ -57,7 +57,7 @@ jobs: working-directory: codex-rs steps: - uses: actions/checkout@v5 - - uses: dtolnay/rust-toolchain@1.89 + - uses: dtolnay/rust-toolchain@1.90 with: components: rustfmt - name: cargo fmt @@ -75,7 +75,7 @@ jobs: working-directory: codex-rs steps: - uses: actions/checkout@v5 - - uses: dtolnay/rust-toolchain@1.89 + - uses: dtolnay/rust-toolchain@1.90 - uses: taiki-e/install-action@0c5db7f7f897c03b771660e91d065338615679f4 # v2 with: tool: cargo-shear @@ -143,7 +143,7 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: dtolnay/rust-toolchain@1.89 + - uses: dtolnay/rust-toolchain@1.90 with: targets: ${{ matrix.target }} components: clippy diff --git a/.github/workflows/rust-release.yml b/.github/workflows/rust-release.yml index e3776f149c0..aebdef59488 100644 --- a/.github/workflows/rust-release.yml +++ b/.github/workflows/rust-release.yml @@ -77,7 +77,7 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: dtolnay/rust-toolchain@1.89 + - uses: dtolnay/rust-toolchain@1.90 with: targets: ${{ matrix.target }} @@ -97,7 +97,7 @@ jobs: sudo apt install -y musl-tools pkg-config - name: Cargo build - run: cargo build --target ${{ matrix.target }} --release --bin codex + run: cargo build --target ${{ matrix.target }} --release --bin codex --bin codex-responses-api-proxy - name: Stage artifacts shell: bash @@ -107,8 +107,10 @@ jobs: if [[ "${{ matrix.runner }}" == windows* ]]; then cp target/${{ matrix.target }}/release/codex.exe "$dest/codex-${{ matrix.target }}.exe" + cp target/${{ matrix.target }}/release/codex-responses-api-proxy.exe "$dest/codex-responses-api-proxy-${{ matrix.target }}.exe" else cp target/${{ matrix.target }}/release/codex "$dest/codex-${{ matrix.target }}" + cp target/${{ matrix.target }}/release/codex-responses-api-proxy "$dest/codex-responses-api-proxy-${{ matrix.target }}" fi - if: ${{ matrix.runner == 'windows-11-arm' }} @@ -173,6 +175,8 @@ jobs: outputs: version: ${{ steps.release_name.outputs.name }} tag: ${{ github.ref_name }} + should_publish_npm: ${{ steps.npm_publish_settings.outputs.should_publish }} + npm_tag: ${{ steps.npm_publish_settings.outputs.npm_tag }} steps: - name: Checkout repository @@ -193,21 +197,49 @@ jobs: version="${GITHUB_REF_NAME#rust-v}" echo "name=${version}" >> $GITHUB_OUTPUT - - name: Stage npm package + - name: Determine npm publish settings + id: npm_publish_settings env: - GH_TOKEN: ${{ github.token }} + VERSION: ${{ steps.release_name.outputs.name }} run: | set -euo pipefail - TMP_DIR="${RUNNER_TEMP}/npm-stage" - python3 codex-cli/scripts/stage_rust_release.py \ + version="${VERSION}" + + if [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "should_publish=true" >> "$GITHUB_OUTPUT" + echo "npm_tag=" >> "$GITHUB_OUTPUT" + elif [[ "${version}" =~ ^[0-9]+\.[0-9]+\.[0-9]+-alpha\.[0-9]+$ ]]; then + echo "should_publish=true" >> "$GITHUB_OUTPUT" + echo "npm_tag=alpha" >> "$GITHUB_OUTPUT" + else + echo "should_publish=false" >> "$GITHUB_OUTPUT" + echo "npm_tag=" >> "$GITHUB_OUTPUT" + fi + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + run_install: false + + - name: Setup Node.js for npm packaging + uses: actions/setup-node@v5 + with: + node-version: 22 + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + # stage_npm_packages.py requires DotSlash when staging releases. + - uses: facebook/install-dotslash@v2 + - name: Stage npm packages + env: + GH_TOKEN: ${{ github.token }} + run: | + ./scripts/stage_npm_packages.py \ --release-version "${{ steps.release_name.outputs.name }}" \ - --tmp "${TMP_DIR}" - mkdir -p dist/npm - # Produce an npm-ready tarball using `npm pack` and store it in dist/npm. - # We then rename it to a stable name used by our publishing script. - (cd "$TMP_DIR" && npm pack --pack-destination "${GITHUB_WORKSPACE}/dist/npm") - mv "${GITHUB_WORKSPACE}"/dist/npm/*.tgz \ - "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ steps.release_name.outputs.name }}.tgz" + --package codex \ + --package codex-responses-api-proxy \ + --package codex-sdk - name: Create GitHub Release uses: softprops/action-gh-release@v2 @@ -230,8 +262,8 @@ jobs: # July 31, 2025: https://github.blog/changelog/2025-07-31-npm-trusted-publishing-with-oidc-is-generally-available/ # npm docs: https://docs.npmjs.com/trusted-publishers publish-npm: - # Skip this step for pre-releases (alpha/beta). - if: ${{ !contains(needs.release.outputs.version, '-') }} + # Publish to npm for stable releases and alpha pre-releases with numeric suffixes. + if: ${{ needs.release.outputs.should_publish_npm == 'true' }} name: publish-npm needs: release runs-on: ubuntu-latest @@ -251,7 +283,7 @@ jobs: - name: Update npm run: npm install -g npm@latest - - name: Download npm tarball from release + - name: Download npm tarballs from release env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | @@ -263,10 +295,36 @@ jobs: --repo "${GITHUB_REPOSITORY}" \ --pattern "codex-npm-${version}.tgz" \ --dir dist/npm + gh release download "$tag" \ + --repo "${GITHUB_REPOSITORY}" \ + --pattern "codex-responses-api-proxy-npm-${version}.tgz" \ + --dir dist/npm + gh release download "$tag" \ + --repo "${GITHUB_REPOSITORY}" \ + --pattern "codex-sdk-npm-${version}.tgz" \ + --dir dist/npm # No NODE_AUTH_TOKEN needed because we use OIDC. - name: Publish to npm - run: npm publish "${GITHUB_WORKSPACE}/dist/npm/codex-npm-${{ needs.release.outputs.version }}.tgz" + env: + VERSION: ${{ needs.release.outputs.version }} + NPM_TAG: ${{ needs.release.outputs.npm_tag }} + run: | + set -euo pipefail + tag_args=() + if [[ -n "${NPM_TAG}" ]]; then + tag_args+=(--tag "${NPM_TAG}") + fi + + tarballs=( + "codex-npm-${VERSION}.tgz" + "codex-responses-api-proxy-npm-${VERSION}.tgz" + "codex-sdk-npm-${VERSION}.tgz" + ) + + for tarball in "${tarballs[@]}"; do + npm publish "${GITHUB_WORKSPACE}/dist/npm/${tarball}" "${tag_args[@]}" + done update-branch: name: Update latest-alpha-cli branch diff --git a/.github/workflows/sdk.yml b/.github/workflows/sdk.yml new file mode 100644 index 00000000000..0f3a7a194bc --- /dev/null +++ b/.github/workflows/sdk.yml @@ -0,0 +1,43 @@ +name: sdk + +on: + push: + branches: [main] + pull_request: {} + +jobs: + sdks: + runs-on: ubuntu-latest + timeout-minutes: 10 + steps: + - name: Checkout repository + uses: actions/checkout@v5 + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + run_install: false + + - name: Setup Node.js + uses: actions/setup-node@v5 + with: + node-version: 22 + cache: pnpm + + - uses: dtolnay/rust-toolchain@1.90 + + - name: build codex + run: cargo build --bin codex + working-directory: codex-rs + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Build SDK packages + run: pnpm -r --filter ./sdk/typescript run build + + - name: Lint SDK packages + run: pnpm -r --filter ./sdk/typescript run lint + + - name: Test SDK packages + run: pnpm -r --filter ./sdk/typescript run test diff --git a/README.md b/README.md index ab93ecad22e..0a57a608eb5 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,3 @@ -

OpenAI Codex CLI

npm i -g @openai/codex
or brew install codex

@@ -84,6 +83,7 @@ Codex CLI supports a rich set of configuration options, with preferences stored - [**Authentication**](./docs/authentication.md) - [Auth methods](./docs/authentication.md#forcing-a-specific-auth-method-advanced) - [Login on a "Headless" machine](./docs/authentication.md#connecting-on-a-headless-machine) +- [**Non-interactive mode**](./docs/exec.md) - [**Advanced**](./docs/advanced.md) - [Non-interactive / CI mode](./docs/advanced.md#non-interactive--ci-mode) - [Tracing / verbose logging](./docs/advanced.md#tracing--verbose-logging) @@ -102,4 +102,3 @@ Codex CLI supports a rich set of configuration options, with preferences stored ## License This repository is licensed under the [Apache-2.0 License](LICENSE). - diff --git a/codex-cli/.gitignore b/codex-cli/.gitignore index f886e64f466..57872d0f1e5 100644 --- a/codex-cli/.gitignore +++ b/codex-cli/.gitignore @@ -1,7 +1 @@ -# Added by ./scripts/install_native_deps.sh -/bin/codex-aarch64-apple-darwin -/bin/codex-aarch64-unknown-linux-musl -/bin/codex-linux-sandbox-arm64 -/bin/codex-linux-sandbox-x64 -/bin/codex-x86_64-apple-darwin -/bin/codex-x86_64-unknown-linux-musl +/vendor/ diff --git a/codex-cli/README.md b/codex-cli/README.md index e988b384ab2..f3414f1c4be 100644 --- a/codex-cli/README.md +++ b/codex-cli/README.md @@ -208,7 +208,7 @@ The hardening mechanism Codex uses depends on your OS: | Requirement | Details | | --------------------------- | --------------------------------------------------------------- | | Operating systems | macOS 12+, Ubuntu 20.04+/Debian 10+, or Windows 11 **via WSL2** | -| Node.js | **22 or newer** (LTS recommended) | +| Node.js | **16 or newer** (Node 20 LTS recommended) | | Git (optional, recommended) | 2.23+ for built-in PR helpers | | RAM | 4-GB minimum (8-GB recommended) | @@ -513,7 +513,7 @@ Codex runs model-generated commands in a sandbox. If a proposed command or file
Does it work on Windows? -Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex has been tested on macOS and Linux with Node 22. +Not directly. It requires [Windows Subsystem for Linux (WSL2)](https://learn.microsoft.com/en-us/windows/wsl/install) - Codex is regularly tested on macOS and Linux with Node 20+, and also supports Node 16.
diff --git a/codex-cli/bin/codex.js b/codex-cli/bin/codex.js index f24065170d8..17dd98a8e82 100755 --- a/codex-cli/bin/codex.js +++ b/codex-cli/bin/codex.js @@ -1,6 +1,8 @@ #!/usr/bin/env node // Unified entry point for the Codex CLI. +import { spawn } from "node:child_process"; +import { existsSync } from "fs"; import path from "path"; import { fileURLToPath } from "url"; @@ -40,10 +42,10 @@ switch (platform) { case "win32": switch (arch) { case "x64": - targetTriple = "x86_64-pc-windows-msvc.exe"; + targetTriple = "x86_64-pc-windows-msvc"; break; case "arm64": - targetTriple = "aarch64-pc-windows-msvc.exe"; + targetTriple = "aarch64-pc-windows-msvc"; break; default: break; @@ -57,31 +59,16 @@ if (!targetTriple) { throw new Error(`Unsupported platform: ${platform} (${arch})`); } -const binaryPath = path.join(__dirname, "..", "bin", `codex-${targetTriple}`); +const vendorRoot = path.join(__dirname, "..", "vendor"); +const archRoot = path.join(vendorRoot, targetTriple); +const codexBinaryName = process.platform === "win32" ? "codex.exe" : "codex"; +const binaryPath = path.join(archRoot, "codex", codexBinaryName); // Use an asynchronous spawn instead of spawnSync so that Node is able to // respond to signals (e.g. Ctrl-C / SIGINT) while the native binary is // executing. This allows us to forward those signals to the child process // and guarantees that when either the child terminates or the parent // receives a fatal signal, both processes exit in a predictable manner. -const { spawn } = await import("child_process"); - -async function tryImport(moduleName) { - try { - // eslint-disable-next-line node/no-unsupported-features/es-syntax - return await import(moduleName); - } catch (err) { - return null; - } -} - -async function resolveRgDir() { - const ripgrep = await tryImport("@vscode/ripgrep"); - if (!ripgrep?.rgPath) { - return null; - } - return path.dirname(ripgrep.rgPath); -} function getUpdatedPath(newDirs) { const pathSep = process.platform === "win32" ? ";" : ":"; @@ -94,9 +81,9 @@ function getUpdatedPath(newDirs) { } const additionalDirs = []; -const rgDir = await resolveRgDir(); -if (rgDir) { - additionalDirs.push(rgDir); +const pathDir = path.join(archRoot, "path"); +if (existsSync(pathDir)) { + additionalDirs.push(pathDir); } const updatedPath = getUpdatedPath(additionalDirs); diff --git a/codex-cli/bin/rg b/codex-cli/bin/rg new file mode 100755 index 00000000000..5a992570a9a --- /dev/null +++ b/codex-cli/bin/rg @@ -0,0 +1,79 @@ +#!/usr/bin/env dotslash + +{ + "name": "rg", + "platforms": { + "macos-aarch64": { + "size": 1787248, + "hash": "blake3", + "digest": "8d9942032585ea8ee805937634238d9aee7b210069f4703c88fbe568e26fb78a", + "format": "tar.gz", + "path": "ripgrep-14.1.1-aarch64-apple-darwin/rg", + "providers": [ + { + "url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-apple-darwin.tar.gz" + } + ] + }, + "linux-aarch64": { + "size": 2047405, + "hash": "blake3", + "digest": "0b670b8fa0a3df2762af2fc82cc4932f684ca4c02dbd1260d4f3133fd4b2a515", + "format": "tar.gz", + "path": "ripgrep-14.1.1-aarch64-unknown-linux-gnu/rg", + "providers": [ + { + "url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-aarch64-unknown-linux-gnu.tar.gz" + } + ] + }, + "macos-x86_64": { + "size": 2082672, + "hash": "blake3", + "digest": "e9b862fc8da3127f92791f0ff6a799504154ca9d36c98bf3e60a81c6b1f7289e", + "format": "tar.gz", + "path": "ripgrep-14.1.1-x86_64-apple-darwin/rg", + "providers": [ + { + "url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-apple-darwin.tar.gz" + } + ] + }, + "linux-x86_64": { + "size": 2566310, + "hash": "blake3", + "digest": "f73cca4e54d78c31f832c7f6e2c0b4db8b04fa3eaa747915727d570893dbee76", + "format": "tar.gz", + "path": "ripgrep-14.1.1-x86_64-unknown-linux-musl/rg", + "providers": [ + { + "url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-unknown-linux-musl.tar.gz" + } + ] + }, + "windows-x86_64": { + "size": 2058893, + "hash": "blake3", + "digest": "a8ce1a6fed4f8093ee997e57f33254e94b2cd18e26358b09db599c89882eadbd", + "format": "zip", + "path": "ripgrep-14.1.1-x86_64-pc-windows-msvc/rg.exe", + "providers": [ + { + "url": "https://github.com/BurntSushi/ripgrep/releases/download/14.1.1/ripgrep-14.1.1-x86_64-pc-windows-msvc.zip" + } + ] + }, + "windows-aarch64": { + "size": 1667740, + "hash": "blake3", + "digest": "47b971a8c4fca1d23a4e7c19bd4d88465ebc395598458133139406d3bf85f3fa", + "format": "zip", + "path": "rg.exe", + "providers": [ + { + "url": "https://github.com/microsoft/ripgrep-prebuilt/releases/download/v13.0.0-13/ripgrep-v13.0.0-13-aarch64-pc-windows-msvc.zip" + } + ] + } + } +} diff --git a/codex-cli/package-lock.json b/codex-cli/package-lock.json index a1c840ade0e..58ee846306e 100644 --- a/codex-cli/package-lock.json +++ b/codex-cli/package-lock.json @@ -2,117 +2,16 @@ "name": "@openai/codex", "version": "0.0.0-dev", "lockfileVersion": 3, - "requires": true, "packages": { "": { "name": "@openai/codex", "version": "0.0.0-dev", "license": "Apache-2.0", - "dependencies": { - "@vscode/ripgrep": "^1.15.14" - }, "bin": { "codex": "bin/codex.js" }, "engines": { - "node": ">=20" - } - }, - "node_modules/@vscode/ripgrep": { - "version": "1.15.14", - "resolved": "https://registry.npmjs.org/@vscode/ripgrep/-/ripgrep-1.15.14.tgz", - "integrity": "sha512-/G1UJPYlm+trBWQ6cMO3sv6b8D1+G16WaJH1/DSqw32JOVlzgZbLkDxRyzIpTpv30AcYGMkCf5tUqGlW6HbDWw==", - "hasInstallScript": true, - "license": "MIT", - "dependencies": { - "https-proxy-agent": "^7.0.2", - "proxy-from-env": "^1.1.0", - "yauzl": "^2.9.2" - } - }, - "node_modules/agent-base": { - "version": "7.1.4", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", - "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==", - "license": "MIT", - "engines": { - "node": ">= 14" - } - }, - "node_modules/buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", - "license": "MIT", - "engines": { - "node": "*" - } - }, - "node_modules/debug": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", - "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/fd-slicer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", - "license": "MIT", - "dependencies": { - "pend": "~1.2.0" - } - }, - "node_modules/https-proxy-agent": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", - "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.2", - "debug": "4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "license": "MIT" - }, - "node_modules/pend": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", - "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", - "license": "MIT" - }, - "node_modules/proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "license": "MIT" - }, - "node_modules/yauzl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", - "license": "MIT", - "dependencies": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" + "node": ">=16" } } } diff --git a/codex-cli/package.json b/codex-cli/package.json index 02124f32576..b83309e42b6 100644 --- a/codex-cli/package.json +++ b/codex-cli/package.json @@ -7,21 +7,15 @@ }, "type": "module", "engines": { - "node": ">=20" + "node": ">=16" }, "files": [ "bin", - "dist" + "vendor" ], "repository": { "type": "git", "url": "git+https://github.com/openai/codex.git", "directory": "codex-cli" - }, - "dependencies": { - "@vscode/ripgrep": "^1.15.14" - }, - "devDependencies": { - "prettier": "^3.3.3" } } diff --git a/codex-cli/scripts/README.md b/codex-cli/scripts/README.md index 21e4f3e883b..052cf81a372 100644 --- a/codex-cli/scripts/README.md +++ b/codex-cli/scripts/README.md @@ -1,9 +1,19 @@ # npm releases -Run the following: - -To build the 0.2.x or later version of the npm module, which runs the Rust version of the CLI, build it as follows: +Use the staging helper in the repo root to generate npm tarballs for a release. For +example, to stage the CLI, responses proxy, and SDK packages for version `0.6.0`: ```bash -./codex-cli/scripts/stage_rust_release.py --release-version 0.6.0 +./scripts/stage_npm_packages.py \ + --release-version 0.6.0 \ + --package codex \ + --package codex-responses-api-proxy \ + --package codex-sdk ``` + +This downloads the native artifacts once, hydrates `vendor/` for each package, and writes +tarballs to `dist/npm/`. + +If you need to invoke `build_npm_package.py` directly, run +`codex-cli/scripts/install_native_deps.py` first and pass `--vendor-src` pointing to the +directory that contains the populated `vendor/` tree. diff --git a/codex-cli/scripts/build_npm_package.py b/codex-cli/scripts/build_npm_package.py new file mode 100755 index 00000000000..ef96bef2eef --- /dev/null +++ b/codex-cli/scripts/build_npm_package.py @@ -0,0 +1,308 @@ +#!/usr/bin/env python3 +"""Stage and optionally package the @openai/codex npm module.""" + +import argparse +import json +import shutil +import subprocess +import sys +import tempfile +from pathlib import Path + +SCRIPT_DIR = Path(__file__).resolve().parent +CODEX_CLI_ROOT = SCRIPT_DIR.parent +REPO_ROOT = CODEX_CLI_ROOT.parent +RESPONSES_API_PROXY_NPM_ROOT = REPO_ROOT / "codex-rs" / "responses-api-proxy" / "npm" +CODEX_SDK_ROOT = REPO_ROOT / "sdk" / "typescript" + +PACKAGE_NATIVE_COMPONENTS: dict[str, list[str]] = { + "codex": ["codex", "rg"], + "codex-responses-api-proxy": ["codex-responses-api-proxy"], + "codex-sdk": ["codex"], +} +COMPONENT_DEST_DIR: dict[str, str] = { + "codex": "codex", + "codex-responses-api-proxy": "codex-responses-api-proxy", + "rg": "path", +} + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Build or stage the Codex CLI npm package.") + parser.add_argument( + "--package", + choices=("codex", "codex-responses-api-proxy", "codex-sdk"), + default="codex", + help="Which npm package to stage (default: codex).", + ) + parser.add_argument( + "--version", + help="Version number to write to package.json inside the staged package.", + ) + parser.add_argument( + "--release-version", + help=( + "Version to stage for npm release." + ), + ) + parser.add_argument( + "--staging-dir", + type=Path, + help=( + "Directory to stage the package contents. Defaults to a new temporary directory " + "if omitted. The directory must be empty when provided." + ), + ) + parser.add_argument( + "--tmp", + dest="staging_dir", + type=Path, + help=argparse.SUPPRESS, + ) + parser.add_argument( + "--pack-output", + type=Path, + help="Path where the generated npm tarball should be written.", + ) + parser.add_argument( + "--vendor-src", + type=Path, + help="Directory containing pre-installed native binaries to bundle (vendor root).", + ) + return parser.parse_args() + + +def main() -> int: + args = parse_args() + + package = args.package + version = args.version + release_version = args.release_version + if release_version: + if version and version != release_version: + raise RuntimeError("--version and --release-version must match when both are provided.") + version = release_version + + if not version: + raise RuntimeError("Must specify --version or --release-version.") + + staging_dir, created_temp = prepare_staging_dir(args.staging_dir) + + try: + stage_sources(staging_dir, version, package) + + vendor_src = args.vendor_src.resolve() if args.vendor_src else None + native_components = PACKAGE_NATIVE_COMPONENTS.get(package, []) + + if native_components: + if vendor_src is None: + components_str = ", ".join(native_components) + raise RuntimeError( + "Native components " + f"({components_str}) required for package '{package}'. Provide --vendor-src " + "pointing to a directory containing pre-installed binaries." + ) + + copy_native_binaries(vendor_src, staging_dir, native_components) + + if release_version: + staging_dir_str = str(staging_dir) + if package == "codex": + print( + f"Staged version {version} for release in {staging_dir_str}\n\n" + "Verify the CLI:\n" + f" node {staging_dir_str}/bin/codex.js --version\n" + f" node {staging_dir_str}/bin/codex.js --help\n\n" + ) + elif package == "codex-responses-api-proxy": + print( + f"Staged version {version} for release in {staging_dir_str}\n\n" + "Verify the responses API proxy:\n" + f" node {staging_dir_str}/bin/codex-responses-api-proxy.js --help\n\n" + ) + else: + print( + f"Staged version {version} for release in {staging_dir_str}\n\n" + "Verify the SDK contents:\n" + f" ls {staging_dir_str}/dist\n" + f" ls {staging_dir_str}/vendor\n" + " node -e \"import('./dist/index.js').then(() => console.log('ok'))\"\n\n" + ) + else: + print(f"Staged package in {staging_dir}") + + if args.pack_output is not None: + output_path = run_npm_pack(staging_dir, args.pack_output) + print(f"npm pack output written to {output_path}") + finally: + if created_temp: + # Preserve the staging directory for further inspection. + pass + + return 0 + + +def prepare_staging_dir(staging_dir: Path | None) -> tuple[Path, bool]: + if staging_dir is not None: + staging_dir = staging_dir.resolve() + staging_dir.mkdir(parents=True, exist_ok=True) + if any(staging_dir.iterdir()): + raise RuntimeError(f"Staging directory {staging_dir} is not empty.") + return staging_dir, False + + temp_dir = Path(tempfile.mkdtemp(prefix="codex-npm-stage-")) + return temp_dir, True + + +def stage_sources(staging_dir: Path, version: str, package: str) -> None: + if package == "codex": + bin_dir = staging_dir / "bin" + bin_dir.mkdir(parents=True, exist_ok=True) + shutil.copy2(CODEX_CLI_ROOT / "bin" / "codex.js", bin_dir / "codex.js") + rg_manifest = CODEX_CLI_ROOT / "bin" / "rg" + if rg_manifest.exists(): + shutil.copy2(rg_manifest, bin_dir / "rg") + + readme_src = REPO_ROOT / "README.md" + if readme_src.exists(): + shutil.copy2(readme_src, staging_dir / "README.md") + + package_json_path = CODEX_CLI_ROOT / "package.json" + elif package == "codex-responses-api-proxy": + bin_dir = staging_dir / "bin" + bin_dir.mkdir(parents=True, exist_ok=True) + launcher_src = RESPONSES_API_PROXY_NPM_ROOT / "bin" / "codex-responses-api-proxy.js" + shutil.copy2(launcher_src, bin_dir / "codex-responses-api-proxy.js") + + readme_src = RESPONSES_API_PROXY_NPM_ROOT / "README.md" + if readme_src.exists(): + shutil.copy2(readme_src, staging_dir / "README.md") + + package_json_path = RESPONSES_API_PROXY_NPM_ROOT / "package.json" + elif package == "codex-sdk": + package_json_path = CODEX_SDK_ROOT / "package.json" + stage_codex_sdk_sources(staging_dir) + else: + raise RuntimeError(f"Unknown package '{package}'.") + + with open(package_json_path, "r", encoding="utf-8") as fh: + package_json = json.load(fh) + package_json["version"] = version + + if package == "codex-sdk": + scripts = package_json.get("scripts") + if isinstance(scripts, dict): + scripts.pop("prepare", None) + + files = package_json.get("files") + if isinstance(files, list): + if "vendor" not in files: + files.append("vendor") + else: + package_json["files"] = ["dist", "vendor"] + + with open(staging_dir / "package.json", "w", encoding="utf-8") as out: + json.dump(package_json, out, indent=2) + out.write("\n") + + +def run_command(cmd: list[str], cwd: Path | None = None) -> None: + print("+", " ".join(cmd)) + subprocess.run(cmd, cwd=cwd, check=True) + + +def stage_codex_sdk_sources(staging_dir: Path) -> None: + package_root = CODEX_SDK_ROOT + + run_command(["pnpm", "install", "--frozen-lockfile"], cwd=package_root) + run_command(["pnpm", "run", "build"], cwd=package_root) + + dist_src = package_root / "dist" + if not dist_src.exists(): + raise RuntimeError("codex-sdk build did not produce a dist directory.") + + shutil.copytree(dist_src, staging_dir / "dist") + + readme_src = package_root / "README.md" + if readme_src.exists(): + shutil.copy2(readme_src, staging_dir / "README.md") + + license_src = REPO_ROOT / "LICENSE" + if license_src.exists(): + shutil.copy2(license_src, staging_dir / "LICENSE") + + +def copy_native_binaries(vendor_src: Path, staging_dir: Path, components: list[str]) -> None: + vendor_src = vendor_src.resolve() + if not vendor_src.exists(): + raise RuntimeError(f"Vendor source directory not found: {vendor_src}") + + components_set = {component for component in components if component in COMPONENT_DEST_DIR} + if not components_set: + return + + vendor_dest = staging_dir / "vendor" + if vendor_dest.exists(): + shutil.rmtree(vendor_dest) + vendor_dest.mkdir(parents=True, exist_ok=True) + + for target_dir in vendor_src.iterdir(): + if not target_dir.is_dir(): + continue + + dest_target_dir = vendor_dest / target_dir.name + dest_target_dir.mkdir(parents=True, exist_ok=True) + + for component in components_set: + dest_dir_name = COMPONENT_DEST_DIR.get(component) + if dest_dir_name is None: + continue + + src_component_dir = target_dir / dest_dir_name + if not src_component_dir.exists(): + raise RuntimeError( + f"Missing native component '{component}' in vendor source: {src_component_dir}" + ) + + dest_component_dir = dest_target_dir / dest_dir_name + if dest_component_dir.exists(): + shutil.rmtree(dest_component_dir) + shutil.copytree(src_component_dir, dest_component_dir) + + +def run_npm_pack(staging_dir: Path, output_path: Path) -> Path: + output_path = output_path.resolve() + output_path.parent.mkdir(parents=True, exist_ok=True) + + with tempfile.TemporaryDirectory(prefix="codex-npm-pack-") as pack_dir_str: + pack_dir = Path(pack_dir_str) + stdout = subprocess.check_output( + ["npm", "pack", "--json", "--pack-destination", str(pack_dir)], + cwd=staging_dir, + text=True, + ) + try: + pack_output = json.loads(stdout) + except json.JSONDecodeError as exc: + raise RuntimeError("Failed to parse npm pack output.") from exc + + if not pack_output: + raise RuntimeError("npm pack did not produce an output tarball.") + + tarball_name = pack_output[0].get("filename") or pack_output[0].get("name") + if not tarball_name: + raise RuntimeError("Unable to determine npm pack output filename.") + + tarball_path = pack_dir / tarball_name + if not tarball_path.exists(): + raise RuntimeError(f"Expected npm pack output not found: {tarball_path}") + + shutil.move(str(tarball_path), output_path) + + return output_path + + +if __name__ == "__main__": + import sys + + sys.exit(main()) diff --git a/codex-cli/scripts/install_native_deps.py b/codex-cli/scripts/install_native_deps.py new file mode 100755 index 00000000000..8d3909c9e13 --- /dev/null +++ b/codex-cli/scripts/install_native_deps.py @@ -0,0 +1,383 @@ +#!/usr/bin/env python3 +"""Install Codex native binaries (Rust CLI plus ripgrep helpers).""" + +import argparse +import json +import os +import shutil +import subprocess +import tarfile +import tempfile +import zipfile +from dataclasses import dataclass +from concurrent.futures import ThreadPoolExecutor, as_completed +from pathlib import Path +from typing import Iterable, Sequence +from urllib.parse import urlparse +from urllib.request import urlopen + +SCRIPT_DIR = Path(__file__).resolve().parent +CODEX_CLI_ROOT = SCRIPT_DIR.parent +DEFAULT_WORKFLOW_URL = "https://github.com/openai/codex/actions/runs/17952349351" # rust-v0.40.0 +VENDOR_DIR_NAME = "vendor" +RG_MANIFEST = CODEX_CLI_ROOT / "bin" / "rg" +BINARY_TARGETS = ( + "x86_64-unknown-linux-musl", + "aarch64-unknown-linux-musl", + "x86_64-apple-darwin", + "aarch64-apple-darwin", + "x86_64-pc-windows-msvc", + "aarch64-pc-windows-msvc", +) + + +@dataclass(frozen=True) +class BinaryComponent: + artifact_prefix: str # matches the artifact filename prefix (e.g. codex-.zst) + dest_dir: str # directory under vendor// where the binary is installed + binary_basename: str # executable name inside dest_dir (before optional .exe) + + +BINARY_COMPONENTS = { + "codex": BinaryComponent( + artifact_prefix="codex", + dest_dir="codex", + binary_basename="codex", + ), + "codex-responses-api-proxy": BinaryComponent( + artifact_prefix="codex-responses-api-proxy", + dest_dir="codex-responses-api-proxy", + binary_basename="codex-responses-api-proxy", + ), +} + +RG_TARGET_PLATFORM_PAIRS: list[tuple[str, str]] = [ + ("x86_64-unknown-linux-musl", "linux-x86_64"), + ("aarch64-unknown-linux-musl", "linux-aarch64"), + ("x86_64-apple-darwin", "macos-x86_64"), + ("aarch64-apple-darwin", "macos-aarch64"), + ("x86_64-pc-windows-msvc", "windows-x86_64"), + ("aarch64-pc-windows-msvc", "windows-aarch64"), +] +RG_TARGET_TO_PLATFORM = {target: platform for target, platform in RG_TARGET_PLATFORM_PAIRS} +DEFAULT_RG_TARGETS = [target for target, _ in RG_TARGET_PLATFORM_PAIRS] + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Install native Codex binaries.") + parser.add_argument( + "--workflow-url", + help=( + "GitHub Actions workflow URL that produced the artifacts. Defaults to a " + "known good run when omitted." + ), + ) + parser.add_argument( + "--component", + dest="components", + action="append", + choices=tuple(list(BINARY_COMPONENTS) + ["rg"]), + help=( + "Limit installation to the specified components." + " May be repeated. Defaults to 'codex' and 'rg'." + ), + ) + parser.add_argument( + "root", + nargs="?", + type=Path, + help=( + "Directory containing package.json for the staged package. If omitted, the " + "repository checkout is used." + ), + ) + return parser.parse_args() + + +def main() -> int: + args = parse_args() + + codex_cli_root = (args.root or CODEX_CLI_ROOT).resolve() + vendor_dir = codex_cli_root / VENDOR_DIR_NAME + vendor_dir.mkdir(parents=True, exist_ok=True) + + components = args.components or ["codex", "rg"] + + workflow_url = (args.workflow_url or DEFAULT_WORKFLOW_URL).strip() + if not workflow_url: + workflow_url = DEFAULT_WORKFLOW_URL + + workflow_id = workflow_url.rstrip("/").split("/")[-1] + print(f"Downloading native artifacts from workflow {workflow_id}...") + + with tempfile.TemporaryDirectory(prefix="codex-native-artifacts-") as artifacts_dir_str: + artifacts_dir = Path(artifacts_dir_str) + _download_artifacts(workflow_id, artifacts_dir) + install_binary_components( + artifacts_dir, + vendor_dir, + BINARY_TARGETS, + [name for name in components if name in BINARY_COMPONENTS], + ) + + if "rg" in components: + print("Fetching ripgrep binaries...") + fetch_rg(vendor_dir, DEFAULT_RG_TARGETS, manifest_path=RG_MANIFEST) + + print(f"Installed native dependencies into {vendor_dir}") + return 0 + + +def fetch_rg( + vendor_dir: Path, + targets: Sequence[str] | None = None, + *, + manifest_path: Path, +) -> list[Path]: + """Download ripgrep binaries described by the DotSlash manifest.""" + + if targets is None: + targets = DEFAULT_RG_TARGETS + + if not manifest_path.exists(): + raise FileNotFoundError(f"DotSlash manifest not found: {manifest_path}") + + manifest = _load_manifest(manifest_path) + platforms = manifest.get("platforms", {}) + + vendor_dir.mkdir(parents=True, exist_ok=True) + + targets = list(targets) + if not targets: + return [] + + task_configs: list[tuple[str, str, dict]] = [] + for target in targets: + platform_key = RG_TARGET_TO_PLATFORM.get(target) + if platform_key is None: + raise ValueError(f"Unsupported ripgrep target '{target}'.") + + platform_info = platforms.get(platform_key) + if platform_info is None: + raise RuntimeError(f"Platform '{platform_key}' not found in manifest {manifest_path}.") + + task_configs.append((target, platform_key, platform_info)) + + results: dict[str, Path] = {} + max_workers = min(len(task_configs), max(1, (os.cpu_count() or 1))) + + print("Installing ripgrep binaries for targets: " + ", ".join(targets)) + + with ThreadPoolExecutor(max_workers=max_workers) as executor: + future_map = { + executor.submit( + _fetch_single_rg, + vendor_dir, + target, + platform_key, + platform_info, + manifest_path, + ): target + for target, platform_key, platform_info in task_configs + } + + for future in as_completed(future_map): + target = future_map[future] + results[target] = future.result() + print(f" installed ripgrep for {target}") + + return [results[target] for target in targets] + + +def _download_artifacts(workflow_id: str, dest_dir: Path) -> None: + cmd = [ + "gh", + "run", + "download", + "--dir", + str(dest_dir), + "--repo", + "openai/codex", + workflow_id, + ] + subprocess.check_call(cmd) + + +def install_binary_components( + artifacts_dir: Path, + vendor_dir: Path, + targets: Iterable[str], + component_names: Sequence[str], +) -> None: + selected_components = [BINARY_COMPONENTS[name] for name in component_names if name in BINARY_COMPONENTS] + if not selected_components: + return + + targets = list(targets) + if not targets: + return + + for component in selected_components: + print( + f"Installing {component.binary_basename} binaries for targets: " + + ", ".join(targets) + ) + max_workers = min(len(targets), max(1, (os.cpu_count() or 1))) + with ThreadPoolExecutor(max_workers=max_workers) as executor: + futures = { + executor.submit( + _install_single_binary, + artifacts_dir, + vendor_dir, + target, + component, + ): target + for target in targets + } + for future in as_completed(futures): + installed_path = future.result() + print(f" installed {installed_path}") + + +def _install_single_binary( + artifacts_dir: Path, + vendor_dir: Path, + target: str, + component: BinaryComponent, +) -> Path: + artifact_subdir = artifacts_dir / target + archive_name = _archive_name_for_target(component.artifact_prefix, target) + archive_path = artifact_subdir / archive_name + if not archive_path.exists(): + raise FileNotFoundError(f"Expected artifact not found: {archive_path}") + + dest_dir = vendor_dir / target / component.dest_dir + dest_dir.mkdir(parents=True, exist_ok=True) + + binary_name = ( + f"{component.binary_basename}.exe" if "windows" in target else component.binary_basename + ) + dest = dest_dir / binary_name + dest.unlink(missing_ok=True) + extract_archive(archive_path, "zst", None, dest) + if "windows" not in target: + dest.chmod(0o755) + return dest + + +def _archive_name_for_target(artifact_prefix: str, target: str) -> str: + if "windows" in target: + return f"{artifact_prefix}-{target}.exe.zst" + return f"{artifact_prefix}-{target}.zst" + + +def _fetch_single_rg( + vendor_dir: Path, + target: str, + platform_key: str, + platform_info: dict, + manifest_path: Path, +) -> Path: + providers = platform_info.get("providers", []) + if not providers: + raise RuntimeError(f"No providers listed for platform '{platform_key}' in {manifest_path}.") + + url = providers[0]["url"] + archive_format = platform_info.get("format", "zst") + archive_member = platform_info.get("path") + + dest_dir = vendor_dir / target / "path" + dest_dir.mkdir(parents=True, exist_ok=True) + + is_windows = platform_key.startswith("win") + binary_name = "rg.exe" if is_windows else "rg" + dest = dest_dir / binary_name + + with tempfile.TemporaryDirectory() as tmp_dir_str: + tmp_dir = Path(tmp_dir_str) + archive_filename = os.path.basename(urlparse(url).path) + download_path = tmp_dir / archive_filename + _download_file(url, download_path) + + dest.unlink(missing_ok=True) + extract_archive(download_path, archive_format, archive_member, dest) + + if not is_windows: + dest.chmod(0o755) + + return dest + + +def _download_file(url: str, dest: Path) -> None: + dest.parent.mkdir(parents=True, exist_ok=True) + with urlopen(url) as response, open(dest, "wb") as out: + shutil.copyfileobj(response, out) + + +def extract_archive( + archive_path: Path, + archive_format: str, + archive_member: str | None, + dest: Path, +) -> None: + dest.parent.mkdir(parents=True, exist_ok=True) + + if archive_format == "zst": + output_path = archive_path.parent / dest.name + subprocess.check_call( + ["zstd", "-f", "-d", str(archive_path), "-o", str(output_path)] + ) + shutil.move(str(output_path), dest) + return + + if archive_format == "tar.gz": + if not archive_member: + raise RuntimeError("Missing 'path' for tar.gz archive in DotSlash manifest.") + with tarfile.open(archive_path, "r:gz") as tar: + try: + member = tar.getmember(archive_member) + except KeyError as exc: + raise RuntimeError( + f"Entry '{archive_member}' not found in archive {archive_path}." + ) from exc + tar.extract(member, path=archive_path.parent, filter="data") + extracted = archive_path.parent / archive_member + shutil.move(str(extracted), dest) + return + + if archive_format == "zip": + if not archive_member: + raise RuntimeError("Missing 'path' for zip archive in DotSlash manifest.") + with zipfile.ZipFile(archive_path) as archive: + try: + with archive.open(archive_member) as src, open(dest, "wb") as out: + shutil.copyfileobj(src, out) + except KeyError as exc: + raise RuntimeError( + f"Entry '{archive_member}' not found in archive {archive_path}." + ) from exc + return + + raise RuntimeError(f"Unsupported archive format '{archive_format}'.") + + +def _load_manifest(manifest_path: Path) -> dict: + cmd = ["dotslash", "--", "parse", str(manifest_path)] + stdout = subprocess.check_output(cmd, text=True) + try: + manifest = json.loads(stdout) + except json.JSONDecodeError as exc: + raise RuntimeError(f"Invalid DotSlash manifest output from {manifest_path}.") from exc + + if not isinstance(manifest, dict): + raise RuntimeError( + f"Unexpected DotSlash manifest structure for {manifest_path}: {type(manifest)!r}" + ) + + return manifest + + +if __name__ == "__main__": + import sys + + sys.exit(main()) diff --git a/codex-cli/scripts/install_native_deps.sh b/codex-cli/scripts/install_native_deps.sh deleted file mode 100755 index b9fda2543e0..00000000000 --- a/codex-cli/scripts/install_native_deps.sh +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env bash - -# Install native runtime dependencies for codex-cli. -# -# Usage -# install_native_deps.sh [--workflow-url URL] [CODEX_CLI_ROOT] -# -# The optional RELEASE_ROOT is the path that contains package.json. Omitting -# it installs the binaries into the repository's own bin/ folder to support -# local development. - -set -euo pipefail - -# ------------------ -# Parse arguments -# ------------------ - -CODEX_CLI_ROOT="" - -# Until we start publishing stable GitHub releases, we have to grab the binaries -# from the GitHub Action that created them. Update the URL below to point to the -# appropriate workflow run: -WORKFLOW_URL="https://github.com/openai/codex/actions/runs/17417194663" # rust-v0.28.0 - -while [[ $# -gt 0 ]]; do - case "$1" in - --workflow-url) - shift || { echo "--workflow-url requires an argument"; exit 1; } - if [ -n "$1" ]; then - WORKFLOW_URL="$1" - fi - ;; - *) - if [[ -z "$CODEX_CLI_ROOT" ]]; then - CODEX_CLI_ROOT="$1" - else - echo "Unexpected argument: $1" >&2 - exit 1 - fi - ;; - esac - shift -done - -# ---------------------------------------------------------------------------- -# Determine where the binaries should be installed. -# ---------------------------------------------------------------------------- - -if [ -n "$CODEX_CLI_ROOT" ]; then - # The caller supplied a release root directory. - BIN_DIR="$CODEX_CLI_ROOT/bin" -else - # No argument; fall back to the repo’s own bin directory. - # Resolve the path of this script, then walk up to the repo root. - SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" - CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" - BIN_DIR="$CODEX_CLI_ROOT/bin" -fi - -# Make sure the destination directory exists. -mkdir -p "$BIN_DIR" - -# ---------------------------------------------------------------------------- -# Download and decompress the artifacts from the GitHub Actions workflow. -# ---------------------------------------------------------------------------- - -WORKFLOW_ID="${WORKFLOW_URL##*/}" - -ARTIFACTS_DIR="$(mktemp -d)" -trap 'rm -rf "$ARTIFACTS_DIR"' EXIT - -# NB: The GitHub CLI `gh` must be installed and authenticated. -gh run download --dir "$ARTIFACTS_DIR" --repo openai/codex "$WORKFLOW_ID" - -# x64 Linux -zstd -d "$ARTIFACTS_DIR/x86_64-unknown-linux-musl/codex-x86_64-unknown-linux-musl.zst" \ - -o "$BIN_DIR/codex-x86_64-unknown-linux-musl" -# ARM64 Linux -zstd -d "$ARTIFACTS_DIR/aarch64-unknown-linux-musl/codex-aarch64-unknown-linux-musl.zst" \ - -o "$BIN_DIR/codex-aarch64-unknown-linux-musl" -# x64 macOS -zstd -d "$ARTIFACTS_DIR/x86_64-apple-darwin/codex-x86_64-apple-darwin.zst" \ - -o "$BIN_DIR/codex-x86_64-apple-darwin" -# ARM64 macOS -zstd -d "$ARTIFACTS_DIR/aarch64-apple-darwin/codex-aarch64-apple-darwin.zst" \ - -o "$BIN_DIR/codex-aarch64-apple-darwin" -# x64 Windows -zstd -d "$ARTIFACTS_DIR/x86_64-pc-windows-msvc/codex-x86_64-pc-windows-msvc.exe.zst" \ - -o "$BIN_DIR/codex-x86_64-pc-windows-msvc.exe" -# ARM64 Windows -zstd -d "$ARTIFACTS_DIR/aarch64-pc-windows-msvc/codex-aarch64-pc-windows-msvc.exe.zst" \ - -o "$BIN_DIR/codex-aarch64-pc-windows-msvc.exe" - -echo "Installed native dependencies into $BIN_DIR" diff --git a/codex-cli/scripts/stage_release.sh b/codex-cli/scripts/stage_release.sh deleted file mode 100755 index 96236fc53c0..00000000000 --- a/codex-cli/scripts/stage_release.sh +++ /dev/null @@ -1,120 +0,0 @@ -#!/usr/bin/env bash -# ----------------------------------------------------------------------------- -# stage_release.sh -# ----------------------------------------------------------------------------- -# Stages an npm release for @openai/codex. -# -# Usage: -# -# --tmp : Use instead of a freshly created temp directory. -# -h|--help : Print usage. -# -# ----------------------------------------------------------------------------- - -set -euo pipefail - -# Helper - usage / flag parsing - -usage() { - cat <&2 - usage 1 - ;; - *) - echo "Unexpected extra argument: $1" >&2 - usage 1 - ;; - esac - shift -done - -# Fallback when the caller did not specify a directory. -# If no directory was specified create a fresh temporary one. -if [[ -z "$TMPDIR" ]]; then - TMPDIR="$(mktemp -d)" -fi - -# Ensure the directory exists, then resolve to an absolute path. -mkdir -p "$TMPDIR" -TMPDIR="$(cd "$TMPDIR" && pwd)" - -# Main build logic - -echo "Staging release in $TMPDIR" - -# The script lives in codex-cli/scripts/ - change into codex-cli root so that -# relative paths keep working. -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -CODEX_CLI_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" - -pushd "$CODEX_CLI_ROOT" >/dev/null - -# 1. Build the JS artifacts --------------------------------------------------- - -# Paths inside the staged package -mkdir -p "$TMPDIR/bin" - -cp -r bin/codex.js "$TMPDIR/bin/codex.js" -cp ../README.md "$TMPDIR" || true # README is one level up - ignore if missing - -# Modify package.json - bump version and optionally add the native directory to -# the files array so that the binaries are published to npm. - -jq --arg version "$VERSION" \ - '.version = $version' \ - package.json > "$TMPDIR/package.json" - -# 2. Native runtime deps (sandbox plus optional Rust binaries) - -./scripts/install_native_deps.sh --workflow-url "$WORKFLOW_URL" "$TMPDIR" - -popd >/dev/null - -echo "Staged version $VERSION for release in $TMPDIR" - -echo "Verify the CLI:" -echo " node ${TMPDIR}/bin/codex.js --version" -echo " node ${TMPDIR}/bin/codex.js --help" - -# Print final hint for convenience -echo "Next: cd \"$TMPDIR\" && npm publish" diff --git a/codex-cli/scripts/stage_rust_release.py b/codex-cli/scripts/stage_rust_release.py deleted file mode 100755 index 9a554b77d01..00000000000 --- a/codex-cli/scripts/stage_rust_release.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python3 - -import json -import subprocess -import sys -import argparse -from pathlib import Path - - -def main() -> int: - parser = argparse.ArgumentParser( - description="""Stage a release for the npm module. - -Run this after the GitHub Release has been created and use -`--release-version` to specify the version to release. - -Optionally pass `--tmp` to control the temporary staging directory that will be -forwarded to stage_release.sh. -""" - ) - parser.add_argument( - "--release-version", required=True, help="Version to release, e.g., 0.3.0" - ) - parser.add_argument( - "--tmp", - help="Optional path to stage the npm package; forwarded to stage_release.sh", - ) - args = parser.parse_args() - version = args.release_version - - gh_run = subprocess.run( - [ - "gh", - "run", - "list", - "--branch", - f"rust-v{version}", - "--json", - "workflowName,url,headSha", - "--jq", - 'first(.[] | select(.workflowName == "rust-release"))', - ], - stdout=subprocess.PIPE, - check=True, - ) - gh_run.check_returncode() - workflow = json.loads(gh_run.stdout) - sha = workflow["headSha"] - - print(f"should `git checkout {sha}`") - - current_dir = Path(__file__).parent.resolve() - cmd = [ - str(current_dir / "stage_release.sh"), - "--version", - version, - "--workflow-url", - workflow["url"], - ] - if args.tmp: - cmd.extend(["--tmp", args.tmp]) - - stage_release = subprocess.run(cmd) - stage_release.check_returncode() - - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/codex-rs/Cargo.lock b/codex-rs/Cargo.lock index 71681aa57fb..7a3ca22cb91 100644 --- a/codex-rs/Cargo.lock +++ b/codex-rs/Cargo.lock @@ -14,9 +14,9 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.24.2" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" dependencies = [ "gimli", ] @@ -56,7 +56,7 @@ checksum = "8fac2ce611db8b8cee9b2aa886ca03c924e9da5e5295d0dbd0526e5d0b0710f7" dependencies = [ "allocative_derive", "bumpalo", - "ctor", + "ctor 0.1.26", "hashbrown 0.14.5", "num-bigint", ] @@ -69,7 +69,7 @@ checksum = "fe233a377643e0fc1a56421d7c90acdec45c291b30345eb9f08e8d0ddce5a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -78,12 +78,6 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - [[package]] name = "android_system_properties" version = "0.1.5" @@ -117,9 +111,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.19" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301af1932e46185686725e0fad2f8f2aa7da69dd70bf6ecc44d6b703844a3933" +checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192" dependencies = [ "anstyle", "anstyle-parse", @@ -147,35 +141,48 @@ dependencies = [ [[package]] name = "anstyle-query" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8bdeb6047d8983be085bab0ba1472e6dc604e7041dbf6fcd5e71523014fae9" +checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "anstyle-wincon" -version = "3.0.9" +version = "3.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "403f75924867bb1033c59fbf0797484329750cfbe3c4325cd33127941fabc882" +checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a" dependencies = [ "anstyle", "once_cell_polyfill", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "anyhow" -version = "1.0.99" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" + +[[package]] +name = "app_test_support" +version = "0.0.0" +dependencies = [ + "anyhow", + "assert_cmd", + "codex-app-server-protocol", + "serde", + "serde_json", + "tokio", + "wiremock", +] [[package]] name = "arboard" -version = "3.6.0" +version = "3.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55f533f8e0af236ffe5eb979b99381df3258853f00ba2e44b6e1955292c75227" +checksum = "0348a1c054491f4bfe6ab86a7b6ab1e44e45d899005de92f58b3df180b36ddaf" dependencies = [ "clipboard-win", "image", @@ -187,7 +194,7 @@ dependencies = [ "objc2-foundation", "parking_lot", "percent-encoding", - "windows-sys 0.59.0", + "windows-sys 0.60.2", "x11rb", ] @@ -238,7 +245,7 @@ dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -313,7 +320,18 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", +] + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] @@ -328,11 +346,57 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "axum" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98e529aee37b5c8206bb4bf4c44797127566d72f76952c970bd3d1e85de8f4e2" +dependencies = [ + "axum-core", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "serde_core", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ac7a6beb1182c7e30253ee75c3e918080bfb83f5a3023bcdf7209d85fd147e6" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "sync_wrapper", + "tower-layer", + "tower-service", +] + [[package]] name = "backtrace" -version = "0.3.75" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" +checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" dependencies = [ "addr2line", "cfg-if", @@ -340,7 +404,7 @@ dependencies = [ "miniz_oxide", "object", "rustc-demangle", - "windows-targets 0.52.6", + "windows-link 0.2.0", ] [[package]] @@ -387,9 +451,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.1" +version = "2.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" +checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" [[package]] name = "block-buffer" @@ -419,9 +483,9 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "bytemuck" -version = "1.23.1" +version = "1.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c76a5792e44e4abe34d3abf15636779261d45a7450612059293d1d2cfc63422" +checksum = "3995eaeebcdf32f91f980d360f78732ddc061097ab4e39991ae7a6ace9194677" [[package]] name = "byteorder" @@ -458,10 +522,11 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.30" +version = "1.2.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "deec109607ca693028562ed836a5f1c4b8bd77755c4e132fc5ce11b0b6211ae7" +checksum = "e1354349954c6fc9cb0deab020f27f783cf0b604e8bb754dc4658ecf0d29c35f" dependencies = [ + "find-msvc-tools", "shlex", ] @@ -473,9 +538,9 @@ checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" [[package]] name = "cfg-if" -version = "1.0.1" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" [[package]] name = "cfg_aliases" @@ -483,19 +548,24 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chrono" -version = "0.4.41" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ - "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", "wasm-bindgen", - "windows-link", + "windows-link 0.2.0", ] [[package]] @@ -506,9 +576,9 @@ checksum = "6e4de3bc4ea267985becf712dc6d9eed8b04c953b3fcfb339ebc87acd9804901" [[package]] name = "clap" -version = "4.5.47" +version = "4.5.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eac00902d9d136acd712710d71823fb8ac8004ca445a89e73a41d45aa712931" +checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae" dependencies = [ "clap_builder", "clap_derive", @@ -516,9 +586,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.47" +version = "4.5.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ad9bbf750e73b5884fb8a211a9424a1906c1e156724260fdae972f31d70e1d6" +checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9" dependencies = [ "anstream", "anstyle", @@ -529,9 +599,9 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.57" +version = "4.5.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d9501bd3f5f09f7bbee01da9a511073ed30a80cd7a509f1214bb74eadea71ad" +checksum = "75bf0b32ad2e152de789bb635ea4d3078f6b838ad7974143e99b99f45a04af4a" dependencies = [ "clap", ] @@ -545,7 +615,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -578,13 +648,57 @@ dependencies = [ "tracing", ] +[[package]] +name = "codex-app-server" +version = "0.0.0" +dependencies = [ + "anyhow", + "app_test_support", + "assert_cmd", + "base64", + "codex-app-server-protocol", + "codex-arg0", + "codex-common", + "codex-core", + "codex-file-search", + "codex-login", + "codex-protocol", + "codex-utils-json-to-toml", + "core_test_support", + "os_info", + "pretty_assertions", + "serde", + "serde_json", + "tempfile", + "tokio", + "toml", + "tracing", + "tracing-subscriber", + "uuid", + "wiremock", +] + +[[package]] +name = "codex-app-server-protocol" +version = "0.0.0" +dependencies = [ + "anyhow", + "codex-protocol", + "paste", + "pretty_assertions", + "serde", + "serde_json", + "strum_macros 0.27.2", + "ts-rs", + "uuid", +] + [[package]] name = "codex-apply-patch" version = "0.0.0" dependencies = [ "anyhow", "assert_cmd", - "once_cell", "pretty_assertions", "similar", "tempfile", @@ -606,6 +720,26 @@ dependencies = [ "tokio", ] +[[package]] +name = "codex-backend-client" +version = "0.0.0" +dependencies = [ + "anyhow", + "codex-backend-openapi-models", + "pretty_assertions", + "reqwest", + "serde", + "serde_json", +] + +[[package]] +name = "codex-backend-openapi-models" +version = "0.0.0" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "codex-chatgpt" version = "0.0.0" @@ -614,6 +748,7 @@ dependencies = [ "clap", "codex-common", "codex-core", + "codex-git-apply", "serde", "serde_json", "tempfile", @@ -628,16 +763,22 @@ dependencies = [ "assert_cmd", "clap", "clap_complete", + "codex-app-server", + "codex-app-server-protocol", "codex-arg0", "codex-chatgpt", + "codex-cloud-tasks", "codex-common", "codex-core", "codex-exec", "codex-login", "codex-mcp-server", + "codex-process-hardening", "codex-protocol", "codex-protocol-ts", + "codex-responses-api-proxy", "codex-tui", + "ctor 0.5.0", "owo-colors", "predicates", "pretty_assertions", @@ -645,8 +786,48 @@ dependencies = [ "supports-color", "tempfile", "tokio", +] + +[[package]] +name = "codex-cloud-tasks" +version = "0.0.0" +dependencies = [ + "anyhow", + "async-trait", + "base64", + "chrono", + "clap", + "codex-cloud-tasks-client", + "codex-common", + "codex-core", + "codex-login", + "codex-tui", + "crossterm", + "ratatui", + "reqwest", + "serde", + "serde_json", + "throbber-widgets-tui", + "tokio", + "tokio-stream", "tracing", "tracing-subscriber", + "unicode-width 0.1.14", +] + +[[package]] +name = "codex-cloud-tasks-client" +version = "0.0.0" +dependencies = [ + "anyhow", + "async-trait", + "chrono", + "codex-backend-client", + "codex-git-apply", + "diffy", + "serde", + "serde_json", + "thiserror 2.0.16", ] [[package]] @@ -654,6 +835,7 @@ name = "codex-common" version = "0.0.0" dependencies = [ "clap", + "codex-app-server-protocol", "codex-core", "codex-protocol", "serde", @@ -668,18 +850,25 @@ dependencies = [ "askama", "assert_cmd", "async-channel", + "async-trait", "base64", "bytes", "chrono", + "codex-app-server-protocol", "codex-apply-patch", "codex-file-search", "codex-mcp-client", + "codex-otel", "codex-protocol", + "codex-rmcp-client", "core_test_support", "dirs", + "dunce", "env-flags", + "escargot", "eventsource-stream", "futures", + "indexmap 2.11.4", "landlock", "libc", "maplit", @@ -689,7 +878,7 @@ dependencies = [ "portable-pty", "predicates", "pretty_assertions", - "rand", + "rand 0.9.2", "regex-lite", "reqwest", "seccompiler", @@ -708,6 +897,7 @@ dependencies = [ "toml", "toml_edit", "tracing", + "tracing-test", "tree-sitter", "tree-sitter-bash", "uuid", @@ -723,7 +913,6 @@ version = "0.0.0" dependencies = [ "anyhow", "assert_cmd", - "chrono", "clap", "codex-arg0", "codex-common", @@ -732,14 +921,20 @@ dependencies = [ "codex-protocol", "core_test_support", "libc", + "mcp-types", + "opentelemetry-appender-tracing", "owo-colors", "predicates", + "pretty_assertions", + "serde", "serde_json", "shlex", + "supports-color", "tempfile", "tokio", "tracing", "tracing-subscriber", + "ts-rs", "uuid", "walkdir", "wiremock", @@ -778,6 +973,25 @@ dependencies = [ "tokio", ] +[[package]] +name = "codex-git-apply" +version = "0.0.0" +dependencies = [ + "once_cell", + "regex", + "tempfile", +] + +[[package]] +name = "codex-git-tooling" +version = "0.0.0" +dependencies = [ + "pretty_assertions", + "tempfile", + "thiserror 2.0.16", + "walkdir", +] + [[package]] name = "codex-linux-sandbox" version = "0.0.0" @@ -795,12 +1009,13 @@ dependencies = [ name = "codex-login" version = "0.0.0" dependencies = [ + "anyhow", "base64", "chrono", + "codex-app-server-protocol", "codex-core", - "codex-protocol", "core_test_support", - "rand", + "rand 0.9.2", "reqwest", "serde", "serde_json", @@ -811,6 +1026,7 @@ dependencies = [ "url", "urlencoding", "webbrowser", + "wiremock", ] [[package]] @@ -832,12 +1048,11 @@ version = "0.0.0" dependencies = [ "anyhow", "assert_cmd", - "base64", "codex-arg0", "codex-common", "codex-core", - "codex-login", "codex-protocol", + "codex-utils-json-to-toml", "core_test_support", "mcp-types", "mcp_test_support", @@ -849,10 +1064,8 @@ dependencies = [ "shlex", "tempfile", "tokio", - "toml", "tracing", "tracing-subscriber", - "uuid", "wiremock", ] @@ -871,16 +1084,44 @@ dependencies = [ "wiremock", ] +[[package]] +name = "codex-otel" +version = "0.0.0" +dependencies = [ + "chrono", + "codex-app-server-protocol", + "codex-protocol", + "eventsource-stream", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry-semantic-conventions", + "opentelemetry_sdk", + "reqwest", + "serde", + "serde_json", + "strum_macros 0.27.2", + "tokio", + "tonic", + "tracing", +] + +[[package]] +name = "codex-process-hardening" +version = "0.0.0" +dependencies = [ + "libc", +] + [[package]] name = "codex-protocol" version = "0.0.0" dependencies = [ + "anyhow", "base64", "icu_decimal", "icu_locale_core", "mcp-types", "mime_guess", - "pretty_assertions", "serde", "serde_json", "serde_with", @@ -899,11 +1140,43 @@ version = "0.0.0" dependencies = [ "anyhow", "clap", - "codex-protocol", - "mcp-types", + "codex-app-server-protocol", "ts-rs", ] +[[package]] +name = "codex-responses-api-proxy" +version = "0.0.0" +dependencies = [ + "anyhow", + "clap", + "codex-process-hardening", + "ctor 0.5.0", + "libc", + "reqwest", + "serde", + "serde_json", + "tiny_http", + "zeroize", +] + +[[package]] +name = "codex-rmcp-client" +version = "0.0.0" +dependencies = [ + "anyhow", + "axum", + "futures", + "mcp-types", + "pretty_assertions", + "reqwest", + "rmcp", + "serde", + "serde_json", + "tokio", + "tracing", +] + [[package]] name = "codex-tui" version = "0.0.0" @@ -915,10 +1188,12 @@ dependencies = [ "chrono", "clap", "codex-ansi-escape", + "codex-app-server-protocol", "codex-arg0", "codex-common", "codex-core", "codex-file-search", + "codex-git-tooling", "codex-login", "codex-ollama", "codex-protocol", @@ -926,18 +1201,19 @@ dependencies = [ "crossterm", "diffy", "dirs", + "dunce", "image", "insta", "itertools 0.14.0", "lazy_static", "libc", "mcp-types", - "once_cell", + "opentelemetry-appender-tracing", "path-clean", "pathdiff", "pretty_assertions", "pulldown-cmark", - "rand", + "rand 0.9.2", "ratatui", "regex-lite", "serde", @@ -954,11 +1230,30 @@ dependencies = [ "tracing-appender", "tracing-subscriber", "unicode-segmentation", - "unicode-width 0.1.14", + "unicode-width 0.2.1", "url", "vt100", ] +[[package]] +name = "codex-utils-json-to-toml" +version = "0.0.0" +dependencies = [ + "pretty_assertions", + "serde_json", + "toml", +] + +[[package]] +name = "codex-utils-readiness" +version = "0.0.0" +dependencies = [ + "async-trait", + "thiserror 2.0.16", + "time", + "tokio", +] + [[package]] name = "color-eyre" version = "0.6.5" @@ -1076,6 +1371,8 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" name = "core_test_support" version = "0.0.0" dependencies = [ + "anyhow", + "assert_cmd", "codex-core", "serde_json", "tempfile", @@ -1141,7 +1438,7 @@ version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "crossterm_winapi", "futures-core", "mio", @@ -1187,14 +1484,40 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "ctor" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67773048316103656a637612c4a62477603b777d91d9c62ff2290f9cde178fdb" +dependencies = [ + "ctor-proc-macro", + "dtor", +] + +[[package]] +name = "ctor-proc-macro" +version = "0.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2931af7e13dc045d8e9d26afccc6fa115d64e115c9c84b1166288b46f6782c2" + [[package]] name = "darling" version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.20.11", + "darling_macro 0.20.11", +] + +[[package]] +name = "darling" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" +dependencies = [ + "darling_core 0.21.3", + "darling_macro 0.21.3", ] [[package]] @@ -1208,7 +1531,21 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.11.1", - "syn 2.0.104", + "syn 2.0.106", +] + +[[package]] +name = "darling_core" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim 0.11.1", + "syn 2.0.106", ] [[package]] @@ -1217,9 +1554,20 @@ version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ - "darling_core", + "darling_core 0.20.11", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "darling_macro" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" +dependencies = [ + "darling_core 0.21.3", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1253,12 +1601,12 @@ dependencies = [ [[package]] name = "deranged" -version = "0.4.0" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" +checksum = "a41953f86f8a05768a6cda24def994fd2f424b04ec5c719cf89989779f199071" dependencies = [ "powerfmt", - "serde", + "serde_core", ] [[package]] @@ -1299,7 +1647,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "unicode-xid", ] @@ -1311,7 +1659,7 @@ checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "unicode-xid", ] @@ -1373,8 +1721,8 @@ checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" dependencies = [ "libc", "option-ext", - "redox_users 0.5.0", - "windows-sys 0.60.2", + "redox_users 0.5.2", + "windows-sys 0.61.1", ] [[package]] @@ -1394,7 +1742,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "objc2", ] @@ -1416,7 +1764,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1437,6 +1785,27 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2" +[[package]] +name = "dtor" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e58a0764cddb55ab28955347b45be00ade43d4d6f3ba4bf3dc354e4ec9432934" +dependencies = [ + "dtor-proc-macro", +] + +[[package]] +name = "dtor-proc-macro" +version = "0.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f678cf4a922c215c63e0de95eb1ff08a958a81d47e485cf9da1e27bf6305cfa5" + +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + [[package]] name = "dupe" version = "0.9.1" @@ -1454,14 +1823,14 @@ checksum = "83e195b4945e88836d826124af44fdcb262ec01ef94d44f14f4fb5103f19892a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "dyn-clone" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c7a8fb8a9fbf66c1f703fe16184d10ca0ee9d23be5b4436400408ba54a95005" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" [[package]] name = "either" @@ -1516,7 +1885,7 @@ checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1565,12 +1934,12 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.60.2", + "windows-sys 0.61.1", ] [[package]] @@ -1579,11 +1948,22 @@ version = "3.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dea2df4cf52843e0452895c455a1a2cfbb842a1e7329671acf418fdc53ed4c59" +[[package]] +name = "escargot" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11c3aea32bc97b500c9ca6a72b768a26e558264303d101d3409cf6d57a9ed0cf" +dependencies = [ + "log", + "serde", + "serde_json", +] + [[package]] name = "event-listener" -version = "5.4.0" +version = "5.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3492acde4c3fc54c845eaab3eed8bd00c7a7d881f78bfc801e43a93dec1331ae" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" dependencies = [ "concurrent-queue", "parking", @@ -1644,7 +2024,7 @@ checksum = "a0aca10fb742cb43f9e7bb8467c91aa9bcb8e3ffbc6a6f7389bb93ffc920577d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1654,7 +2034,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78" dependencies = [ "cfg-if", - "rustix 1.0.8", + "rustix 1.1.2", "windows-sys 0.59.0", ] @@ -1678,6 +2058,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "find-msvc-tools" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ced73b1dacfc750a6db6c0a0c3a3853c8b41997e2e2c563dc90804ae6867959" + [[package]] name = "fixed_decimal" version = "0.7.0" @@ -1743,9 +2129,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] @@ -1806,7 +2192,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -1860,19 +2246,19 @@ dependencies = [ [[package]] name = "gethostname" -version = "0.4.3" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0176e0459c2e4a1fe232f984bca6890e681076abb9934f6cea7c326f3fc47818" +checksum = "fc257fdb4038301ce4b9cd1b3b51704509692bb3ff716a410cbd07925d9dae55" dependencies = [ - "libc", - "windows-targets 0.48.5", + "rustix 1.1.2", + "windows-targets 0.52.6", ] [[package]] name = "getopts" -version = "0.2.23" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cba6ae63eb948698e300f645f87c70f76630d505f23b8907cf1e193ee85048c1" +checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df" dependencies = [ "unicode-width 0.2.1", ] @@ -1884,8 +2270,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi 0.11.1+wasi-snapshot-preview1", + "wasm-bindgen", ] [[package]] @@ -1895,16 +2283,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", + "js-sys", "libc", "r-efi", - "wasi 0.14.2+wasi-0.2.4", + "wasi 0.14.7+wasi-0.2.4", + "wasm-bindgen", ] [[package]] name = "gimli" -version = "0.31.1" +version = "0.32.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" [[package]] name = "globset" @@ -1916,14 +2306,14 @@ dependencies = [ "bstr", "log", "regex-automata", - "regex-syntax 0.8.5", + "regex-syntax 0.8.6", ] [[package]] name = "h2" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17da50a276f1e01e0ba6c029e47b7100754904ee8a278f886546e98575380785" +checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" dependencies = [ "atomic-waker", "bytes", @@ -1931,7 +2321,7 @@ dependencies = [ "futures-core", "futures-sink", "http", - "indexmap 2.10.0", + "indexmap 2.11.4", "slab", "tokio", "tokio-util", @@ -1966,15 +2356,21 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.4" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "allocator-api2", "equivalent", "foldhash", ] +[[package]] +name = "hashbrown" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" + [[package]] name = "heck" version = "0.5.0" @@ -2090,10 +2486,25 @@ dependencies = [ "hyper", "hyper-util", "rustls", + "rustls-native-certs", "rustls-pki-types", "tokio", "tokio-rustls", "tower-service", + "webpki-roots", +] + +[[package]] +name = "hyper-timeout" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" +dependencies = [ + "hyper", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", ] [[package]] @@ -2114,9 +2525,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e" +checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8" dependencies = [ "base64", "bytes", @@ -2130,7 +2541,7 @@ dependencies = [ "libc", "percent-encoding", "pin-project-lite", - "socket2", + "socket2 0.6.0", "system-configuration", "tokio", "tower-service", @@ -2140,9 +2551,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.63" +version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -2150,7 +2561,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core", + "windows-core 0.62.1", ] [[package]] @@ -2301,9 +2712,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -2354,9 +2765,9 @@ dependencies = [ [[package]] name = "indenter" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" +checksum = "964de6e86d545b246d84badc0fef527924ace5134f30641c203ef52ba83f58d5" [[package]] name = "indexmap" @@ -2371,13 +2782,14 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.10.0" +version = "2.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ "equivalent", - "hashbrown 0.15.4", + "hashbrown 0.16.0", "serde", + "serde_core", ] [[package]] @@ -2403,29 +2815,29 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "435d80800b936787d62688c927b6490e887c7ef5ff9ce922c6c6050fca75eb9a" dependencies = [ - "darling", + "darling 0.20.11", "indoc", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "inventory" -version = "0.3.20" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab08d7cd2c5897f2c949e5383ea7c7db03fb19130ffcfbf7eda795137ae3cb83" +checksum = "bc61209c082fbeb19919bee74b176221b27223e27b65d781eb91af24eb1fb46e" dependencies = [ "rustversion", ] [[package]] name = "io-uring" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4" +checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "cfg-if", "libc", ] @@ -2523,7 +2935,7 @@ checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -2550,9 +2962,9 @@ checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "js-sys" -version = "0.3.77" +version = "0.3.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +checksum = "ec48937a97411dcb524a265206ccd4c90bb711fca92b2792c407f268825b9305" dependencies = [ "once_cell", "wasm-bindgen", @@ -2591,9 +3003,9 @@ dependencies = [ [[package]] name = "landlock" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d2ef408b88e913bfc6594f5e693d57676f6463ded7d8bf994175364320c706" +checksum = "affe8b77dce5b172f8e290bd801b12832a77cd1942d1ea98259916e89d5829d6" dependencies = [ "enumflags2", "libc", @@ -2608,9 +3020,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.175" +version = "0.2.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" +checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" [[package]] name = "libm" @@ -2620,11 +3032,11 @@ checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" [[package]] name = "libredox" -version = "0.1.6" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4488594b9328dee448adb906d8b126d9b7deb7cf5c22161ee591610bb1be83c0" +checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "libc", ] @@ -2636,9 +3048,9 @@ checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "linux-raw-sys" -version = "0.9.4" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" [[package]] name = "litemap" @@ -2658,9 +3070,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" [[package]] name = "logos" @@ -2691,9 +3103,15 @@ version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.15.4", + "hashbrown 0.15.5", ] +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + [[package]] name = "lsp-types" version = "0.94.1" @@ -2722,6 +3140,12 @@ dependencies = [ "regex-automata", ] +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + [[package]] name = "mcp-types" version = "0.0.0" @@ -2739,7 +3163,6 @@ dependencies = [ "assert_cmd", "codex-core", "codex-mcp-server", - "codex-protocol", "mcp-types", "os_info", "pretty_assertions", @@ -2751,9 +3174,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.5" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] name = "memoffset" @@ -2839,7 +3262,7 @@ dependencies = [ "openssl-probe", "openssl-sys", "schannel", - "security-framework", + "security-framework 2.11.1", "security-framework-sys", "tempfile", ] @@ -2871,9 +3294,21 @@ version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", + "cfg-if", + "cfg_aliases 0.1.1", + "libc", +] + +[[package]] +name = "nix" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" +dependencies = [ + "bitflags 2.9.4", "cfg-if", - "cfg_aliases", + "cfg_aliases 0.2.1", "libc", ] @@ -2980,7 +3415,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6f29f568bec459b0ddff777cec4fe3fd8666d82d5a40ebd0ff7e66134f89bcc" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "objc2", "objc2-core-graphics", "objc2-foundation", @@ -2992,7 +3427,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c10c2894a6fed806ade6027bcd50662746363a9589d3ec9d9bef30a4e4bc166" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "dispatch2", "objc2", ] @@ -3003,7 +3438,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "989c6c68c13021b5c2d6b71456ebb0f9dc78d752e86a98da7c716f4f9470f5a4" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "dispatch2", "objc2", "objc2-core-foundation", @@ -3022,7 +3457,7 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "900831247d2fe1a09a683278e5384cfb8c80c79fe6b166f9d14bfdde0ea1b03c" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "objc2", "objc2-core-foundation", ] @@ -3033,16 +3468,16 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7282e9ac92529fa3457ce90ebb15f4ecbc383e8338060960760fa2cf75420c3c" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "objc2", "objc2-core-foundation", ] [[package]] name = "object" -version = "0.36.7" +version = "0.37.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" dependencies = [ "memchr", ] @@ -3065,7 +3500,7 @@ version = "0.10.73" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "cfg-if", "foreign-types", "libc", @@ -3082,7 +3517,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -3093,9 +3528,9 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-src" -version = "300.5.1+3.5.1" +version = "300.5.2+3.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "735230c832b28c000e3bc117119e6466a663ec73506bc0a9907ea4187508e42a" +checksum = "d270b79e2926f5150189d475bc7e9d2c69f9c4697b185fa917d5a32b792d21b4" dependencies = [ "cc", ] @@ -3113,6 +3548,104 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "opentelemetry" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aaf416e4cb72756655126f7dd7bb0af49c674f4c1b9903e80c009e0c37e552e6" +dependencies = [ + "futures-core", + "futures-sink", + "js-sys", + "pin-project-lite", + "thiserror 2.0.16", + "tracing", +] + +[[package]] +name = "opentelemetry-appender-tracing" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e68f63eca5fad47e570e00e893094fc17be959c80c79a7d6ec1abdd5ae6ffc16" +dependencies = [ + "opentelemetry", + "tracing", + "tracing-core", + "tracing-subscriber", +] + +[[package]] +name = "opentelemetry-http" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f6639e842a97dbea8886e3439710ae463120091e2e064518ba8e716e6ac36d" +dependencies = [ + "async-trait", + "bytes", + "http", + "opentelemetry", + "reqwest", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbee664a43e07615731afc539ca60c6d9f1a9425e25ca09c57bc36c87c55852b" +dependencies = [ + "http", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-proto", + "opentelemetry_sdk", + "prost", + "reqwest", + "serde_json", + "thiserror 2.0.16", + "tokio", + "tonic", + "tracing", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e046fd7660710fe5a05e8748e70d9058dc15c94ba914e7c4faa7c728f0e8ddc" +dependencies = [ + "base64", + "hex", + "opentelemetry", + "opentelemetry_sdk", + "prost", + "serde", + "tonic", +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83d059a296a47436748557a353c5e6c5705b9470ef6c95cfc52c21a8814ddac2" + +[[package]] +name = "opentelemetry_sdk" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11f644aa9e5e31d11896e024305d7e3c98a88884d9f8919dbf37a9991bc47a4b" +dependencies = [ + "futures-channel", + "futures-executor", + "futures-util", + "opentelemetry", + "percent-encoding", + "rand 0.9.2", + "serde_json", + "thiserror 2.0.16", + "tokio", + "tokio-stream", +] + [[package]] name = "option-ext" version = "0.2.0" @@ -3204,9 +3737,9 @@ checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "petgraph" @@ -3215,7 +3748,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.10.0", + "indexmap 2.11.4", ] [[package]] @@ -3227,6 +3760,26 @@ dependencies = [ "siphasher", ] +[[package]] +name = "pin-project" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "pin-project-lite" version = "0.2.16" @@ -3247,12 +3800,12 @@ checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "plist" -version = "1.7.4" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3af6b589e163c5a788fab00ce0c0366f6efbb9959c2f9874b224936af7fce7e1" +checksum = "740ebea15c5d1428f910cd1a5f52cebf8d25006245ed8ade92702f4943d91e07" dependencies = [ "base64", - "indexmap 2.10.0", + "indexmap 2.11.4", "quick-xml", "serde", "time", @@ -3264,7 +3817,7 @@ version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97baced388464909d42d89643fe4361939af9b7ce7a31ee32a168f832a70f2a0" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "crc32fast", "fdeflate", "flate2", @@ -3299,7 +3852,7 @@ dependencies = [ "lazy_static", "libc", "log", - "nix", + "nix 0.28.0", "serial2", "shared_library", "shell-words", @@ -3309,9 +3862,9 @@ dependencies = [ [[package]] name = "potential_utf" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a" dependencies = [ "serde", "zerovec", @@ -3380,20 +3933,57 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.95" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" dependencies = [ "unicode-ident", ] +[[package]] +name = "process-wrap" +version = "8.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3ef4f2f0422f23a82ec9f628ea2acd12871c81a9362b02c43c1aa86acfc3ba1" +dependencies = [ + "futures", + "indexmap 2.11.4", + "nix 0.30.1", + "tokio", + "tracing", + "windows", +] + +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools 0.14.0", + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "pulldown-cmark" version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76979bea66e7875e7509c4ec5300112b316af87fa7a252ca91c448b32dfe3993" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "getopts", "memchr", "pulldown-cmark-escape", @@ -3408,9 +3998,9 @@ checksum = "bd348ff538bc9caeda7ee8cad2d1d48236a1f443c1fa3913c6a02fe0043b1dd3" [[package]] name = "pxfm" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55f4fedc84ed39cb7a489322318976425e42a147e2be79d8f878e2884f94e84" +checksum = "83f9b339b02259ada5c0f4a389b7fb472f933aa17ce176fd2ad98f28bb401fde" dependencies = [ "num-traits", ] @@ -3423,13 +4013,68 @@ checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" [[package]] name = "quick-xml" -version = "0.38.0" +version = "0.38.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8927b0664f5c5a98265138b7e3f90aa19a6b21353182469ace36d4ac527b7b1b" +checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89" dependencies = [ "memchr", ] +[[package]] +name = "quinn" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8" +dependencies = [ + "bytes", + "cfg_aliases 0.2.1", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2 0.5.10", + "thiserror 2.0.16", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e" +dependencies = [ + "bytes", + "getrandom 0.3.3", + "lru-slab", + "rand 0.9.2", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.16", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcebb1209ee276352ef14ff8732e24cc2b02bbac986cd74a4c81bcb2f9881970" +dependencies = [ + "cfg_aliases 0.2.1", + "libc", + "once_cell", + "socket2 0.5.10", + "tracing", + "windows-sys 0.59.0", +] + [[package]] name = "quote" version = "1.0.40" @@ -3455,14 +4100,35 @@ dependencies = [ "nibble_vec", ] +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + [[package]] name = "rand" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ - "rand_chacha", - "rand_core", + "rand_chacha 0.9.0", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", ] [[package]] @@ -3472,7 +4138,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.9.3", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.16", ] [[package]] @@ -3489,7 +4164,7 @@ name = "ratatui" version = "0.29.0" source = "git+https://github.com/nornagon/ratatui?branch=nornagon-v0.29.0-patch#9b2ad1298408c45918ee9f8241a6f95498cdbed2" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "cassowary", "compact_str", "crossterm", @@ -3506,11 +4181,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.15" +version = "0.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8af0dde094006011e6a740d4879319439489813bd0bcdc7d821beaeeff48ec" +checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", ] [[package]] @@ -3526,9 +4201,9 @@ dependencies = [ [[package]] name = "redox_users" -version = "0.5.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" +checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" dependencies = [ "getrandom 0.2.16", "libredox", @@ -3552,30 +4227,30 @@ checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "regex" -version = "1.11.1" +version = "1.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "8b5288124840bee7b386bc413c487869b360b2b4ec421ea56425128692f2a82c" dependencies = [ "aho-corasick", "memchr", "regex-automata", - "regex-syntax 0.8.5", + "regex-syntax 0.8.6", ] [[package]] name = "regex-automata" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "833eb9ce86d40ef33cb1306d8accf7bc8ec2bfea4355cbdebb3df68b40925cad" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.5", + "regex-syntax 0.8.6", ] [[package]] @@ -3592,9 +4267,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" [[package]] name = "reqwest" @@ -3622,6 +4297,9 @@ dependencies = [ "native-tls", "percent-encoding", "pin-project-lite", + "quinn", + "rustls", + "rustls-native-certs", "rustls-pki-types", "serde", "serde_json", @@ -3629,6 +4307,7 @@ dependencies = [ "sync_wrapper", "tokio", "tokio-native-tls", + "tokio-rustls", "tokio-util", "tower", "tower-http", @@ -3638,6 +4317,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", + "webpki-roots", ] [[package]] @@ -3654,11 +4334,62 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "rmcp" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534fd1cd0601e798ac30545ff2b7f4a62c6f14edd4aaed1cc5eb1e85f69f09af" +dependencies = [ + "base64", + "bytes", + "chrono", + "futures", + "http", + "http-body", + "http-body-util", + "paste", + "pin-project-lite", + "process-wrap", + "rand 0.9.2", + "reqwest", + "rmcp-macros", + "schemars 1.0.4", + "serde", + "serde_json", + "sse-stream", + "thiserror 2.0.16", + "tokio", + "tokio-stream", + "tokio-util", + "tower-service", + "tracing", + "uuid", +] + +[[package]] +name = "rmcp-macros" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ba777eb0e5f53a757e36f0e287441da0ab766564ba7201600eeb92a4753022e" +dependencies = [ + "darling 0.21.3", + "proc-macro2", + "quote", + "serde_json", + "syn 2.0.106", +] + [[package]] name = "rustc-demangle" -version = "0.1.25" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" + +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustix" @@ -3666,7 +4397,7 @@ version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "errno", "libc", "linux-raw-sys 0.4.15", @@ -3675,44 +4406,58 @@ dependencies = [ [[package]] name = "rustix" -version = "1.0.8" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" +checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "errno", "libc", - "linux-raw-sys 0.9.4", - "windows-sys 0.60.2", + "linux-raw-sys 0.11.0", + "windows-sys 0.61.1", ] [[package]] name = "rustls" -version = "0.23.29" +version = "0.23.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2491382039b29b9b11ff08b76ff6c97cf287671dbb74f0be44bda389fffe9bd1" +checksum = "cd3c25631629d034ce7cd9940adc9d45762d46de2b0f57193c4443b92c6d4d40" dependencies = [ "once_cell", + "ring", "rustls-pki-types", "rustls-webpki", "subtle", "zeroize", ] +[[package]] +name = "rustls-native-certs" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework 3.3.0", +] + [[package]] name = "rustls-pki-types" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" dependencies = [ + "web-time", "zeroize", ] [[package]] name = "rustls-webpki" -version = "0.103.4" +version = "0.103.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" +checksum = "8572f3c2cb9934231157b45499fc41e1f58c589fdfb81a844ba873265e80f8eb" dependencies = [ "ring", "rustls-pki-types", @@ -3721,9 +4466,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "rustyline" @@ -3731,7 +4476,7 @@ version = "14.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7803e8936da37efd9b6d4478277f4b2b9bb5cdb37a113e8d63222e58da647e63" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "cfg-if", "clipboard-win", "fd-lock", @@ -3739,7 +4484,7 @@ dependencies = [ "libc", "log", "memchr", - "nix", + "nix 0.28.0", "radix_trie", "unicode-segmentation", "unicode-width 0.1.14", @@ -3764,11 +4509,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.1", ] [[package]] @@ -3820,7 +4565,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" dependencies = [ "dyn-clone", - "schemars_derive", + "schemars_derive 0.8.22", "serde", "serde_json", ] @@ -3843,8 +4588,10 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" dependencies = [ + "chrono", "dyn-clone", "ref-cast", + "schemars_derive 1.0.4", "serde", "serde_json", ] @@ -3858,7 +4605,19 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.104", + "syn 2.0.106", +] + +[[package]] +name = "schemars_derive" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.106", ] [[package]] @@ -3882,18 +4641,31 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "core-foundation 0.9.4", "core-foundation-sys", "libc", "security-framework-sys", ] +[[package]] +name = "security-framework" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80fb1d92c5028aa318b4b8bd7302a5bfcf48be96a37fc6fc790f806b0004ee0c" +dependencies = [ + "bitflags 2.9.4", + "core-foundation 0.10.1", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + [[package]] name = "security-framework-sys" -version = "2.14.0" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" dependencies = [ "core-foundation-sys", "libc", @@ -3901,9 +4673,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.224" +version = "1.0.227" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aaeb1e94f53b16384af593c71e20b095e958dab1d26939c1b70645c5cfbcc0b" +checksum = "80ece43fc6fbed4eb5392ab50c07334d3e577cbf40997ee896fe7af40bba4245" dependencies = [ "serde_core", "serde_derive", @@ -3911,22 +4683,22 @@ dependencies = [ [[package]] name = "serde_core" -version = "1.0.224" +version = "1.0.227" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f39390fa6346e24defbcdd3d9544ba8a19985d0af74df8501fbfe9a64341ab" +checksum = "7a576275b607a2c86ea29e410193df32bc680303c82f31e275bbfcafe8b33be5" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.224" +version = "1.0.227" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ff78ab5e8561c9a675bfc1785cb07ae721f0ee53329a595cefd8c04c2ac4e0" +checksum = "51e694923b8824cf0e9b382adf0f60d4e05f348f357b38833a3fa5ed7c2ede04" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -3937,7 +4709,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -3946,7 +4718,7 @@ version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ - "indexmap 2.10.0", + "indexmap 2.11.4", "itoa", "memchr", "ryu", @@ -3962,16 +4734,16 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "serde_spanned" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83" +checksum = "5417783452c2be558477e104686f7de5dae53dba813c28435e0e70f82d9b04ee" dependencies = [ - "serde", + "serde_core", ] [[package]] @@ -3988,15 +4760,15 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.14.0" +version = "3.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5" +checksum = "c522100790450cf78eeac1507263d0a350d4d5b30df0c8e1fe051a10c22b376e" dependencies = [ "base64", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.10.0", + "indexmap 2.11.4", "schemars 0.9.0", "schemars 1.0.4", "serde", @@ -4008,21 +4780,21 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.14.0" +version = "3.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f" +checksum = "327ada00f7d64abaac1e55a6911e90cf665aa051b9a561c7006c157f4633135e" dependencies = [ - "darling", + "darling 0.21.3", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "serial2" -version = "0.2.31" +version = "0.2.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26e1e5956803a69ddd72ce2de337b577898801528749565def03515f82bad5bb" +checksum = "8cc76fa68e25e771492ca1e3c53d447ef0be3093e05cd3b47f4b712ba10c6f3c" dependencies = [ "cfg-if", "libc", @@ -4105,9 +4877,9 @@ dependencies = [ [[package]] name = "signal-hook-registry" -version = "1.4.5" +version = "1.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410" +checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" dependencies = [ "libc", ] @@ -4154,6 +4926,16 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + [[package]] name = "socket2" version = "0.6.0" @@ -4164,6 +4946,19 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "sse-stream" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb4dc4d33c68ec1f27d386b5610a351922656e1fdf5c05bbaad930cd1519479a" +dependencies = [ + "bytes", + "futures-util", + "http-body", + "http-body-util", + "pin-project-lite", +] + [[package]] name = "stable_deref_trait" version = "1.2.0" @@ -4219,7 +5014,7 @@ dependencies = [ "dupe", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4321,7 +5116,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4333,7 +5128,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4364,9 +5159,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.104" +version = "2.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" dependencies = [ "proc-macro2", "quote", @@ -4390,7 +5185,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4408,7 +5203,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "core-foundation 0.9.4", "system-configuration-sys", ] @@ -4425,15 +5220,15 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.20.0" +version = "3.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" dependencies = [ "fastrand", "getrandom 0.3.3", "once_cell", - "rustix 1.0.8", - "windows-sys 0.59.0", + "rustix 1.1.2", + "windows-sys 0.61.1", ] [[package]] @@ -4458,12 +5253,12 @@ dependencies = [ [[package]] name = "terminal_size" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed" +checksum = "60b8cb979cb11c32ce1603f8137b22262a9d131aaa5c37b5678025f22b8becd0" dependencies = [ - "rustix 1.0.8", - "windows-sys 0.59.0", + "rustix 1.1.2", + "windows-sys 0.60.2", ] [[package]] @@ -4518,7 +5313,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4529,7 +5324,7 @@ checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4541,6 +5336,16 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "throbber-widgets-tui" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d36b5738d666a2b4c91b7c24998a8588db724b3107258343ebf8824bf55b06d" +dependencies = [ + "rand 0.8.5", + "ratatui", +] + [[package]] name = "tiff" version = "0.10.3" @@ -4557,9 +5362,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.41" +version = "0.3.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", "itoa", @@ -4574,15 +5379,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.4" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" [[package]] name = "time-macros" -version = "0.2.22" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" +checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" dependencies = [ "num-conv", "time-core", @@ -4619,6 +5424,21 @@ dependencies = [ "zerovec", ] +[[package]] +name = "tinyvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + [[package]] name = "tokio" version = "1.47.1" @@ -4634,7 +5454,7 @@ dependencies = [ "pin-project-lite", "signal-hook-registry", "slab", - "socket2", + "socket2 0.6.0", "tokio-macros", "windows-sys 0.59.0", ] @@ -4647,7 +5467,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4662,9 +5482,9 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.2" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ "rustls", "tokio", @@ -4709,12 +5529,12 @@ dependencies = [ [[package]] name = "toml" -version = "0.9.5" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75129e1dc5000bfbaa9fee9d1b21f974f9fbad9daec557a521ee6e080825f6e8" +checksum = "00e5e5d9bf2475ac9d4f0d9edab68cc573dc2fd644b0dba36b0c30a92dd9eaa0" dependencies = [ - "indexmap 2.10.0", - "serde", + "indexmap 2.11.4", + "serde_core", "serde_spanned", "toml_datetime", "toml_parser", @@ -4724,20 +5544,20 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.7.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3" +checksum = "32f1085dec27c2b6632b04c80b3bb1b4300d6495d1e129693bdda7d91e72eec1" dependencies = [ - "serde", + "serde_core", ] [[package]] name = "toml_edit" -version = "0.23.4" +version = "0.23.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7211ff1b8f0d3adae1663b7da9ffe396eabe1ca25f0b0bee42b0da29a9ddce93" +checksum = "f3effe7c0e86fdff4f69cdd2ccc1b96f933e24811c5441d44904e8683e27184b" dependencies = [ - "indexmap 2.10.0", + "indexmap 2.11.4", "toml_datetime", "toml_parser", "toml_writer", @@ -4746,18 +5566,47 @@ dependencies = [ [[package]] name = "toml_parser" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b551886f449aa90d4fe2bdaa9f4a2577ad2dde302c61ecf262d80b116db95c10" +checksum = "4cf893c33be71572e0e9aa6dd15e6677937abd686b066eac3f8cd3531688a627" dependencies = [ "winnow", ] [[package]] name = "toml_writer" -version = "1.0.2" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d163a63c116ce562a22cda521fcc4d79152e7aba014456fb5eb442f6d6a10109" + +[[package]] +name = "tonic" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64" +checksum = "7e581ba15a835f4d9ea06c55ab1bd4dce26fc53752c69a04aac00703bfb49ba9" +dependencies = [ + "async-trait", + "axum", + "base64", + "bytes", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-timeout", + "hyper-util", + "percent-encoding", + "pin-project", + "prost", + "socket2 0.5.10", + "tokio", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] [[package]] name = "tower" @@ -4767,11 +5616,15 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", + "indexmap 2.11.4", "pin-project-lite", + "slab", "sync_wrapper", "tokio", + "tokio-util", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -4780,7 +5633,7 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "bytes", "futures-util", "http", @@ -4836,7 +5689,7 @@ checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -4888,15 +5741,36 @@ dependencies = [ "tracing-log", ] +[[package]] +name = "tracing-test" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "557b891436fe0d5e0e363427fc7f217abf9ccd510d5136549847bdcbcd011d68" +dependencies = [ + "tracing-core", + "tracing-subscriber", + "tracing-test-macro", +] + +[[package]] +name = "tracing-test-macro" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" +dependencies = [ + "quote", + "syn 2.0.106", +] + [[package]] name = "tree-sitter" -version = "0.25.9" +version = "0.25.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccd2a058a86cfece0bf96f7cce1021efef9c8ed0e892ab74639173e5ed7a34fa" +checksum = "78f873475d258561b06f1c595d93308a7ed124d9977cb26b148c2084a4a3cc87" dependencies = [ "cc", "regex", - "regex-syntax 0.8.5", + "regex-syntax 0.8.6", "serde_json", "streaming-iterator", "tree-sitter-language", @@ -4944,7 +5818,7 @@ checksum = "e9d4ed7b4c18cc150a6a0a1e9ea1ecfa688791220781af6e119f9599a8502a0a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "termcolor", ] @@ -4962,9 +5836,9 @@ checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" [[package]] name = "unicode-linebreak" @@ -5015,9 +5889,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.4" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", @@ -5130,44 +6004,54 @@ checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasi" -version = "0.14.2+wasi-0.2.4" +version = "0.14.7+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" +dependencies = [ + "wasip2", +] + +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" dependencies = [ - "wit-bindgen-rt", + "wit-bindgen", ] [[package]] name = "wasm-bindgen" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +checksum = "c1da10c01ae9f1ae40cbfac0bac3b1e724b320abfcf52229f80b547c0d250e2d" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", + "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +checksum = "671c9a5a66f49d8a47345ab942e2cb93c7d1d0339065d4f8139c486121b43b19" dependencies = [ "bumpalo", "log", "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.50" +version = "0.4.54" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +checksum = "7e038d41e478cc73bae0ff9b36c60cff1c98b8f38f8d7e8061e79ee63608ac5c" dependencies = [ "cfg-if", "js-sys", @@ -5178,9 +6062,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +checksum = "7ca60477e4c59f5f2986c50191cd972e3a50d8a95603bc9434501cf156a9a119" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5188,22 +6072,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +checksum = "9f07d2f20d4da7b26400c9f4a0511e6e0345b040694e8a75bd41d578fa4421d7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +checksum = "bad67dc8b2a1a6e5448428adec4c3e84c43e561d8c9ee8a9e5aabeb193ec41d1" dependencies = [ "unicode-ident", ] @@ -5223,9 +6107,19 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.77" +version = "0.3.81" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9367c417a924a74cae129e6a2ae3b47fabb1f8995595ab474029da749a8be120" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" dependencies = [ "js-sys", "wasm-bindgen", @@ -5247,6 +6141,15 @@ dependencies = [ "web-sys", ] +[[package]] +name = "webpki-roots" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "weezl" version = "0.1.10" @@ -5289,11 +6192,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.1", ] [[package]] @@ -5302,6 +6205,28 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows" +version = "0.61.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" +dependencies = [ + "windows-collections", + "windows-core 0.61.2", + "windows-future", + "windows-link 0.1.3", + "windows-numerics", +] + +[[package]] +name = "windows-collections" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" +dependencies = [ + "windows-core 0.61.2", +] + [[package]] name = "windows-core" version = "0.61.2" @@ -5310,31 +6235,55 @@ checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" dependencies = [ "windows-implement", "windows-interface", - "windows-link", - "windows-result", - "windows-strings", + "windows-link 0.1.3", + "windows-result 0.3.4", + "windows-strings 0.4.2", +] + +[[package]] +name = "windows-core" +version = "0.62.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6844ee5416b285084d3d3fffd743b925a6c9385455f64f6d4fa3031c4c2749a9" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link 0.2.0", + "windows-result 0.4.0", + "windows-strings 0.5.0", +] + +[[package]] +name = "windows-future" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" +dependencies = [ + "windows-core 0.61.2", + "windows-link 0.1.3", + "windows-threading", ] [[package]] name = "windows-implement" -version = "0.60.0" +version = "0.60.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +checksum = "edb307e42a74fb6de9bf3a02d9712678b22399c87e6fa869d6dfcd8c1b7754e0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] name = "windows-interface" -version = "0.59.1" +version = "0.59.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +checksum = "c0abd1ddbc6964ac14db11c7213d6532ef34bd9aa042c2e5935f59d7908b46a5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -5343,15 +6292,31 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" +[[package]] +name = "windows-link" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65" + +[[package]] +name = "windows-numerics" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" +dependencies = [ + "windows-core 0.61.2", + "windows-link 0.1.3", +] + [[package]] name = "windows-registry" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" dependencies = [ - "windows-link", - "windows-result", - "windows-strings", + "windows-link 0.1.3", + "windows-result 0.3.4", + "windows-strings 0.4.2", ] [[package]] @@ -5360,7 +6325,16 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ - "windows-link", + "windows-link 0.1.3", +] + +[[package]] +name = "windows-result" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7084dcc306f89883455a206237404d3eaf961e5bd7e0f312f7c91f57eb44167f" +dependencies = [ + "windows-link 0.2.0", ] [[package]] @@ -5369,7 +6343,16 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" dependencies = [ - "windows-link", + "windows-link 0.1.3", +] + +[[package]] +name = "windows-strings" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7218c655a553b0bed4426cf54b20d7ba363ef543b52d515b3e48d7fd55318dda" +dependencies = [ + "windows-link 0.2.0", ] [[package]] @@ -5405,7 +6388,16 @@ version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows-targets 0.53.2", + "windows-targets 0.53.4", +] + +[[package]] +name = "windows-sys" +version = "0.61.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f109e41dd4a3c848907eb83d5a42ea98b3769495597450cf6d153507b166f0f" +dependencies = [ + "windows-link 0.2.0", ] [[package]] @@ -5423,21 +6415,6 @@ dependencies = [ "windows_x86_64_msvc 0.42.2", ] -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - [[package]] name = "windows-targets" version = "0.52.6" @@ -5456,10 +6433,11 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.53.2" +version = "0.53.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef" +checksum = "2d42b7b7f66d2a06854650af09cfdf8713e427a439c97ad65a6375318033ac4b" dependencies = [ + "windows-link 0.2.0", "windows_aarch64_gnullvm 0.53.0", "windows_aarch64_msvc 0.53.0", "windows_i686_gnu 0.53.0", @@ -5471,16 +6449,19 @@ dependencies = [ ] [[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" +name = "windows-threading" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" +checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" +dependencies = [ + "windows-link 0.1.3", +] [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.5" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" [[package]] name = "windows_aarch64_gnullvm" @@ -5500,12 +6481,6 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - [[package]] name = "windows_aarch64_msvc" version = "0.52.6" @@ -5524,12 +6499,6 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - [[package]] name = "windows_i686_gnu" version = "0.52.6" @@ -5560,12 +6529,6 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - [[package]] name = "windows_i686_msvc" version = "0.52.6" @@ -5584,12 +6547,6 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - [[package]] name = "windows_x86_64_gnu" version = "0.52.6" @@ -5608,12 +6565,6 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" @@ -5632,12 +6583,6 @@ version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - [[package]] name = "windows_x86_64_msvc" version = "0.52.6" @@ -5652,9 +6597,9 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" dependencies = [ "memchr", ] @@ -5698,13 +6643,10 @@ dependencies = [ ] [[package]] -name = "wit-bindgen-rt" -version = "0.39.0" +name = "wit-bindgen" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" -dependencies = [ - "bitflags 2.9.1", -] +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "writeable" @@ -5714,20 +6656,20 @@ checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "x11rb" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d91ffca73ee7f68ce055750bf9f6eca0780b8c85eff9bc046a3b0da41755e12" +checksum = "9993aa5be5a26815fe2c3eacfc1fde061fc1a1f094bf1ad2a18bf9c495dd7414" dependencies = [ "gethostname", - "rustix 0.38.44", + "rustix 1.1.2", "x11rb-protocol", ] [[package]] name = "x11rb-protocol" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec107c4503ea0b4a98ef47356329af139c0a4f7750e621cf2973cd3385ebcb3d" +checksum = "ea6fc2961e4ef194dcbfe56bb845534d0dc8098940c7e5c012a258bfec6701bd" [[package]] name = "yansi" @@ -5755,28 +6697,28 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "synstructure", ] [[package]] name = "zerocopy" -version = "0.8.26" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" +checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.26" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" +checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -5796,7 +6738,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", "synstructure", ] @@ -5819,9 +6761,9 @@ dependencies = [ [[package]] name = "zerovec" -version = "0.11.2" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" +checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" dependencies = [ "yoke", "zerofrom", @@ -5836,7 +6778,7 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.104", + "syn 2.0.106", ] [[package]] @@ -5847,9 +6789,9 @@ checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a" [[package]] name = "zune-jpeg" -version = "0.4.19" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9e525af0a6a658e031e95f14b7f889976b74a11ba0eca5a5fc9ac8a1c43a6a" +checksum = "29ce2c8a9384ad323cf564b67da86e21d3cfdff87908bc1223ed5c99bc792713" dependencies = [ "zune-core", ] diff --git a/codex-rs/Cargo.toml b/codex-rs/Cargo.toml index 7e0d5620742..06c83c819ed 100644 --- a/codex-rs/Cargo.toml +++ b/codex-rs/Cargo.toml @@ -1,23 +1,37 @@ [workspace] members = [ + "backend-client", "ansi-escape", + "app-server", + "app-server-protocol", "apply-patch", "arg0", + "codex-backend-openapi-models", + "cloud-tasks", + "cloud-tasks-client", "cli", "common", "core", "exec", "execpolicy", "file-search", + "git-tooling", "linux-sandbox", "login", "mcp-client", "mcp-server", "mcp-types", "ollama", + "process-hardening", "protocol", "protocol-ts", + "rmcp-client", + "responses-api-proxy", + "otel", "tui", + "git-apply", + "utils/json-to-toml", + "utils/readiness", ] resolver = "2" @@ -31,7 +45,10 @@ edition = "2024" [workspace.dependencies] # Internal +app_test_support = { path = "app-server/tests/common" } codex-ansi-escape = { path = "ansi-escape" } +codex-app-server = { path = "app-server" } +codex-app-server-protocol = { path = "app-server-protocol" } codex-apply-patch = { path = "apply-patch" } codex-arg0 = { path = "arg0" } codex-chatgpt = { path = "chatgpt" } @@ -39,14 +56,21 @@ codex-common = { path = "common" } codex-core = { path = "core" } codex-exec = { path = "exec" } codex-file-search = { path = "file-search" } +codex-git-tooling = { path = "git-tooling" } codex-linux-sandbox = { path = "linux-sandbox" } codex-login = { path = "login" } codex-mcp-client = { path = "mcp-client" } codex-mcp-server = { path = "mcp-server" } codex-ollama = { path = "ollama" } +codex-otel = { path = "otel" } +codex-process-hardening = { path = "process-hardening" } codex-protocol = { path = "protocol" } codex-protocol-ts = { path = "protocol-ts" } +codex-responses-api-proxy = { path = "responses-api-proxy" } +codex-rmcp-client = { path = "rmcp-client" } codex-tui = { path = "tui" } +codex-utils-json-to-toml = { path = "utils/json-to-toml" } +codex-utils-readiness = { path = "utils/readiness" } core_test_support = { path = "core/tests/common" } mcp-types = { path = "mcp-types" } mcp_test_support = { path = "mcp-server/tests/common" } @@ -60,25 +84,30 @@ askama = "0.12" assert_cmd = "2" async-channel = "2.3.1" async-stream = "0.3.6" +async-trait = "0.1.89" base64 = "0.22.1" bytes = "1.10.1" -chrono = "0.4.40" +chrono = "0.4.42" clap = "4" clap_complete = "4" color-eyre = "0.6.3" crossterm = "0.28.1" +ctor = "0.5.0" derive_more = "2" diffy = "0.4.2" dirs = "6" dotenvy = "0.15.7" +dunce = "1.0.4" env-flags = "0.1.1" env_logger = "0.11.5" +escargot = "0.5" eventsource-stream = "0.2.3" futures = "0.3" icu_decimal = "2.0.0" icu_locale_core = "2.0.0" ignore = "0.4.23" image = { version = "^0.25.8", default-features = false } +indexmap = "2.6.0" insta = "1.43.2" itertools = "0.14.0" landlock = "0.4.1" @@ -89,10 +118,15 @@ maplit = "1.0.2" mime_guess = "2.0.5" multimap = "0.10.0" nucleo-matcher = "0.3.1" -once_cell = "1" openssl-sys = "*" +opentelemetry = "0.30.0" +opentelemetry-appender-tracing = "0.30.0" +opentelemetry-otlp = "0.30.0" +opentelemetry-semantic-conventions = "0.30.0" +opentelemetry_sdk = "0.30.0" os_info = "3.12.0" owo-colors = "4.2.0" +paste = "1.0.15" path-absolutize = "3.1.1" path-clean = "1.0.1" pathdiff = "0.2" @@ -118,7 +152,7 @@ strum = "0.27.2" strum_macros = "0.27.2" supports-color = "3.0.2" sys-locale = "0.3.2" -tempfile = "3.13.0" +tempfile = "3.23.0" textwrap = "0.16.2" thiserror = "2.0.16" time = "0.3" @@ -129,14 +163,16 @@ tokio-test = "0.4" tokio-util = "0.7.16" toml = "0.9.5" toml_edit = "0.23.4" +tonic = "0.13.1" tracing = "0.1.41" tracing-appender = "0.2.3" tracing-subscriber = "0.3.20" +tracing-test = "0.2.5" tree-sitter = "0.25.9" tree-sitter-bash = "0.25.0" ts-rs = "11" unicode-segmentation = "1.12.0" -unicode-width = "0.1" +unicode-width = "0.2" url = "2" urlencoding = "2.1" uuid = "1" @@ -146,6 +182,7 @@ webbrowser = "1.0" which = "6" wildmatch = "2.5.0" wiremock = "0.6" +zeroize = "1.8.1" [workspace.lints] rust = {} @@ -188,7 +225,7 @@ unwrap_used = "deny" # cargo-shear cannot see the platform-specific openssl-sys usage, so we # silence the false positive here instead of deleting a real dependency. [workspace.metadata.cargo-shear] -ignored = ["openssl-sys"] +ignored = ["openssl-sys", "codex-utils-readiness"] [profile.release] lto = "fat" diff --git a/codex-rs/README.md b/codex-rs/README.md index 043d872afef..46eda63a1e8 100644 --- a/codex-rs/README.md +++ b/codex-rs/README.md @@ -4,18 +4,18 @@ We provide Codex CLI as a standalone, native executable to ensure a zero-depende ## Installing Codex -Today, the easiest way to install Codex is via `npm`, though we plan to publish Codex to other package managers soon. +Today, the easiest way to install Codex is via `npm`: ```shell -npm i -g @openai/codex@native +npm i -g @openai/codex codex ``` -You can also download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases). +You can also install via Homebrew (`brew install codex`) or download a platform-specific release directly from our [GitHub Releases](https://github.com/openai/codex/releases). ## What's new in the Rust CLI -While we are [working to close the gap between the TypeScript and Rust implementations of Codex CLI](https://github.com/openai/codex/issues/1262), note that the Rust CLI has a number of features that the TypeScript CLI does not! +The Rust implementation is now the maintained Codex CLI and serves as the default experience. It includes a number of features that the legacy TypeScript CLI never supported. ### Config @@ -25,12 +25,14 @@ Codex supports a rich set of configuration options. Note that the Rust CLI uses Codex CLI functions as an MCP client that can connect to MCP servers on startup. See the [`mcp_servers`](../docs/config.md#mcp_servers) section in the configuration documentation for details. -It is still experimental, but you can also launch Codex as an MCP _server_ by running `codex mcp`. Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out: +It is still experimental, but you can also launch Codex as an MCP _server_ by running `codex mcp-server`. Use the [`@modelcontextprotocol/inspector`](https://github.com/modelcontextprotocol/inspector) to try it out: ```shell -npx @modelcontextprotocol/inspector codex mcp +npx @modelcontextprotocol/inspector codex mcp-server ``` +Use `codex mcp` to add/list/get/remove MCP server launchers defined in `config.toml`, and `codex mcp-server` to run the MCP server directly. + ### Notifications You can enable notifications by configuring a script that is run whenever the agent finishes a turn. The [notify documentation](../docs/config.md#notify) includes a detailed example that explains how to get desktop notifications via [terminal-notifier](https://github.com/julienXX/terminal-notifier) on macOS. diff --git a/codex-rs/app-server-protocol/Cargo.toml b/codex-rs/app-server-protocol/Cargo.toml new file mode 100644 index 00000000000..b18028fbe4e --- /dev/null +++ b/codex-rs/app-server-protocol/Cargo.toml @@ -0,0 +1,24 @@ +[package] +edition = "2024" +name = "codex-app-server-protocol" +version = { workspace = true } + +[lib] +name = "codex_app_server_protocol" +path = "src/lib.rs" + +[lints] +workspace = true + +[dependencies] +codex-protocol = { workspace = true } +paste = { workspace = true } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +strum_macros = { workspace = true } +ts-rs = { workspace = true } +uuid = { workspace = true, features = ["serde", "v7"] } + +[dev-dependencies] +anyhow = { workspace = true } +pretty_assertions = { workspace = true } diff --git a/codex-rs/app-server-protocol/src/jsonrpc_lite.rs b/codex-rs/app-server-protocol/src/jsonrpc_lite.rs new file mode 100644 index 00000000000..9d6d6da06f8 --- /dev/null +++ b/codex-rs/app-server-protocol/src/jsonrpc_lite.rs @@ -0,0 +1,67 @@ +//! We do not do true JSON-RPC 2.0, as we neither send nor expect the +//! "jsonrpc": "2.0" field. + +use serde::Deserialize; +use serde::Serialize; +use ts_rs::TS; + +pub const JSONRPC_VERSION: &str = "2.0"; + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Hash, Eq, TS)] +#[serde(untagged)] +pub enum RequestId { + String(String), + #[ts(type = "number")] + Integer(i64), +} + +pub type Result = serde_json::Value; + +/// Refers to any valid JSON-RPC object that can be decoded off the wire, or encoded to be sent. +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)] +#[serde(untagged)] +pub enum JSONRPCMessage { + Request(JSONRPCRequest), + Notification(JSONRPCNotification), + Response(JSONRPCResponse), + Error(JSONRPCError), +} + +/// A request that expects a response. +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)] +pub struct JSONRPCRequest { + pub id: RequestId, + pub method: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub params: Option, +} + +/// A notification which does not expect a response. +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)] +pub struct JSONRPCNotification { + pub method: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub params: Option, +} + +/// A successful (non-error) response to a request. +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)] +pub struct JSONRPCResponse { + pub id: RequestId, + pub result: Result, +} + +/// A response to a request that indicates an error occurred. +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)] +pub struct JSONRPCError { + pub error: JSONRPCErrorError, + pub id: RequestId, +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, TS)] +pub struct JSONRPCErrorError { + pub code: i64, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub data: Option, + pub message: String, +} diff --git a/codex-rs/app-server-protocol/src/lib.rs b/codex-rs/app-server-protocol/src/lib.rs new file mode 100644 index 00000000000..517702bbd84 --- /dev/null +++ b/codex-rs/app-server-protocol/src/lib.rs @@ -0,0 +1,5 @@ +mod jsonrpc_lite; +mod protocol; + +pub use jsonrpc_lite::*; +pub use protocol::*; diff --git a/codex-rs/protocol/src/mcp_protocol.rs b/codex-rs/app-server-protocol/src/protocol.rs similarity index 65% rename from codex-rs/protocol/src/mcp_protocol.rs rename to codex-rs/app-server-protocol/src/protocol.rs index 52418b27550..845a2431f44 100644 --- a/codex-rs/protocol/src/mcp_protocol.rs +++ b/codex-rs/app-server-protocol/src/protocol.rs @@ -1,76 +1,27 @@ use std::collections::HashMap; -use std::fmt::Display; use std::path::PathBuf; -use crate::config_types::ReasoningEffort; -use crate::config_types::ReasoningSummary; -use crate::config_types::SandboxMode; -use crate::config_types::Verbosity; -use crate::protocol::AskForApproval; -use crate::protocol::EventMsg; -use crate::protocol::FileChange; -use crate::protocol::ReviewDecision; -use crate::protocol::SandboxPolicy; -use crate::protocol::TurnAbortReason; -use mcp_types::RequestId; +use crate::JSONRPCNotification; +use crate::JSONRPCRequest; +use crate::RequestId; +use codex_protocol::ConversationId; +use codex_protocol::config_types::ReasoningEffort; +use codex_protocol::config_types::ReasoningSummary; +use codex_protocol::config_types::SandboxMode; +use codex_protocol::config_types::Verbosity; +use codex_protocol::protocol::AskForApproval; +use codex_protocol::protocol::EventMsg; +use codex_protocol::protocol::FileChange; +use codex_protocol::protocol::ReviewDecision; +use codex_protocol::protocol::SandboxPolicy; +use codex_protocol::protocol::TurnAbortReason; +use paste::paste; use serde::Deserialize; use serde::Serialize; use strum_macros::Display; use ts_rs::TS; use uuid::Uuid; -#[derive(Debug, Clone, Copy, PartialEq, Eq, TS, Hash)] -#[ts(type = "string")] -pub struct ConversationId { - uuid: Uuid, -} - -impl ConversationId { - pub fn new() -> Self { - Self { - uuid: Uuid::now_v7(), - } - } - - pub fn from_string(s: &str) -> Result { - Ok(Self { - uuid: Uuid::parse_str(s)?, - }) - } -} - -impl Default for ConversationId { - fn default() -> Self { - Self::new() - } -} - -impl Display for ConversationId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.uuid) - } -} - -impl Serialize for ConversationId { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - serializer.collect_str(&self.uuid) - } -} - -impl<'de> Deserialize<'de> for ConversationId { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - let value = String::deserialize(deserializer)?; - let uuid = Uuid::parse_str(&value).map_err(serde::de::Error::custom)?; - Ok(Self { uuid }) - } -} - #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, TS)] #[ts(type = "string")] pub struct GitSha(pub String); @@ -81,117 +32,168 @@ impl GitSha { } } -#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, TS)] +#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display, TS)] #[serde(rename_all = "lowercase")] pub enum AuthMode { ApiKey, ChatGPT, } -/// Request from the client to the server. -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] -#[serde(tag = "method", rename_all = "camelCase")] -pub enum ClientRequest { +/// Generates an `enum ClientRequest` where each variant is a request that the +/// client can send to the server. Each variant has associated `params` and +/// `response` types. Also generates a `export_client_responses()` function to +/// export all response types to TypeScript. +macro_rules! client_request_definitions { + ( + $( + $(#[$variant_meta:meta])* + $variant:ident { + params: $(#[$params_meta:meta])* $params:ty, + response: $response:ty, + } + ),* $(,)? + ) => { + /// Request from the client to the server. + #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] + #[serde(tag = "method", rename_all = "camelCase")] + pub enum ClientRequest { + $( + $(#[$variant_meta])* + $variant { + #[serde(rename = "id")] + request_id: RequestId, + $(#[$params_meta])* + params: $params, + }, + )* + } + + pub fn export_client_responses( + out_dir: &::std::path::Path, + ) -> ::std::result::Result<(), ::ts_rs::ExportError> { + $( + <$response as ::ts_rs::TS>::export_all_to(out_dir)?; + )* + Ok(()) + } + }; +} + +client_request_definitions! { + Initialize { + params: InitializeParams, + response: InitializeResponse, + }, NewConversation { - #[serde(rename = "id")] - request_id: RequestId, params: NewConversationParams, + response: NewConversationResponse, }, /// List recorded Codex conversations (rollouts) with optional pagination and search. ListConversations { - #[serde(rename = "id")] - request_id: RequestId, params: ListConversationsParams, + response: ListConversationsResponse, }, /// Resume a recorded Codex conversation from a rollout file. ResumeConversation { - #[serde(rename = "id")] - request_id: RequestId, params: ResumeConversationParams, + response: ResumeConversationResponse, }, ArchiveConversation { - #[serde(rename = "id")] - request_id: RequestId, params: ArchiveConversationParams, + response: ArchiveConversationResponse, }, SendUserMessage { - #[serde(rename = "id")] - request_id: RequestId, params: SendUserMessageParams, + response: SendUserMessageResponse, }, SendUserTurn { - #[serde(rename = "id")] - request_id: RequestId, params: SendUserTurnParams, + response: SendUserTurnResponse, }, InterruptConversation { - #[serde(rename = "id")] - request_id: RequestId, params: InterruptConversationParams, + response: InterruptConversationResponse, }, AddConversationListener { - #[serde(rename = "id")] - request_id: RequestId, params: AddConversationListenerParams, + response: AddConversationSubscriptionResponse, }, RemoveConversationListener { - #[serde(rename = "id")] - request_id: RequestId, params: RemoveConversationListenerParams, + response: RemoveConversationSubscriptionResponse, }, GitDiffToRemote { - #[serde(rename = "id")] - request_id: RequestId, params: GitDiffToRemoteParams, + response: GitDiffToRemoteResponse, }, LoginApiKey { - #[serde(rename = "id")] - request_id: RequestId, params: LoginApiKeyParams, + response: LoginApiKeyResponse, }, LoginChatGpt { - #[serde(rename = "id")] - request_id: RequestId, + params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>, + response: LoginChatGptResponse, }, CancelLoginChatGpt { - #[serde(rename = "id")] - request_id: RequestId, params: CancelLoginChatGptParams, + response: CancelLoginChatGptResponse, }, LogoutChatGpt { - #[serde(rename = "id")] - request_id: RequestId, + params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>, + response: LogoutChatGptResponse, }, GetAuthStatus { - #[serde(rename = "id")] - request_id: RequestId, params: GetAuthStatusParams, + response: GetAuthStatusResponse, }, GetUserSavedConfig { - #[serde(rename = "id")] - request_id: RequestId, + params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>, + response: GetUserSavedConfigResponse, }, SetDefaultModel { - #[serde(rename = "id")] - request_id: RequestId, params: SetDefaultModelParams, + response: SetDefaultModelResponse, }, GetUserAgent { - #[serde(rename = "id")] - request_id: RequestId, + params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>, + response: GetUserAgentResponse, }, UserInfo { - #[serde(rename = "id")] - request_id: RequestId, + params: #[ts(type = "undefined")] #[serde(skip_serializing_if = "Option::is_none")] Option<()>, + response: UserInfoResponse, + }, + FuzzyFileSearch { + params: FuzzyFileSearchParams, + response: FuzzyFileSearchResponse, }, /// Execute a command (argv vector) under the server's sandbox. ExecOneOffCommand { - #[serde(rename = "id")] - request_id: RequestId, params: ExecOneOffCommandParams, + response: ExecOneOffCommandResponse, }, } +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, TS)] +#[serde(rename_all = "camelCase")] +pub struct InitializeParams { + pub client_info: ClientInfo, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, TS)] +#[serde(rename_all = "camelCase")] +pub struct ClientInfo { + pub name: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub title: Option, + pub version: String, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] +#[serde(rename_all = "camelCase")] +pub struct InitializeResponse { + pub user_agent: String, +} + #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Default, TS)] #[serde(rename_all = "camelCase")] pub struct NewConversationParams { @@ -397,7 +399,7 @@ pub struct ExecOneOffCommandParams { #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] #[serde(rename_all = "camelCase")] -pub struct ExecArbitraryCommandResponse { +pub struct ExecOneOffCommandResponse { pub exit_code: i32, pub stdout: String, pub stderr: String, @@ -601,30 +603,74 @@ pub enum InputItem { }, } -// TODO(mbolin): Need test to ensure these constants match the enum variants. +/// Generates an `enum ServerRequest` where each variant is a request that the +/// server can send to the client along with the corresponding params and +/// response types. It also generates helper types used by the app/server +/// infrastructure (payload enum, request constructor, and export helpers). +macro_rules! server_request_definitions { + ( + $( + $(#[$variant_meta:meta])* + $variant:ident + ),* $(,)? + ) => { + paste! { + /// Request initiated from the server and sent to the client. + #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] + #[serde(tag = "method", rename_all = "camelCase")] + pub enum ServerRequest { + $( + $(#[$variant_meta])* + $variant { + #[serde(rename = "id")] + request_id: RequestId, + params: [<$variant Params>], + }, + )* + } + + #[derive(Debug, Clone, PartialEq)] + pub enum ServerRequestPayload { + $( $variant([<$variant Params>]), )* + } + + impl ServerRequestPayload { + pub fn request_with_id(self, request_id: RequestId) -> ServerRequest { + match self { + $(Self::$variant(params) => ServerRequest::$variant { request_id, params },)* + } + } + } + } -pub const APPLY_PATCH_APPROVAL_METHOD: &str = "applyPatchApproval"; -pub const EXEC_COMMAND_APPROVAL_METHOD: &str = "execCommandApproval"; + pub fn export_server_responses( + out_dir: &::std::path::Path, + ) -> ::std::result::Result<(), ::ts_rs::ExportError> { + paste! { + $(<[<$variant Response>] as ::ts_rs::TS>::export_all_to(out_dir)?;)* + } + Ok(()) + } + }; +} -/// Request initiated from the server and sent to the client. -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] -#[serde(tag = "method", rename_all = "camelCase")] -pub enum ServerRequest { +impl TryFrom for ServerRequest { + type Error = serde_json::Error; + + fn try_from(value: JSONRPCRequest) -> Result { + serde_json::from_value(serde_json::to_value(value)?) + } +} + +server_request_definitions! { /// Request to approve a patch. - ApplyPatchApproval { - #[serde(rename = "id")] - request_id: RequestId, - params: ApplyPatchApprovalParams, - }, + ApplyPatchApproval, /// Request to exec a command. - ExecCommandApproval { - #[serde(rename = "id")] - request_id: RequestId, - params: ExecCommandApprovalParams, - }, + ExecCommandApproval, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] +#[serde(rename_all = "camelCase")] pub struct ApplyPatchApprovalParams { pub conversation_id: ConversationId, /// Use to correlate this with [codex_core::protocol::PatchApplyBeginEvent] @@ -641,6 +687,7 @@ pub struct ApplyPatchApprovalParams { } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] +#[serde(rename_all = "camelCase")] pub struct ExecCommandApprovalParams { pub conversation_id: ConversationId, /// Use to correlate this with [codex_core::protocol::ExecCommandBeginEvent] @@ -662,6 +709,33 @@ pub struct ApplyPatchApprovalResponse { pub decision: ReviewDecision, } +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] +#[serde(rename_all = "camelCase")] +#[ts(rename_all = "camelCase")] +pub struct FuzzyFileSearchParams { + pub query: String, + pub roots: Vec, + // if provided, will cancel any previous request that used the same value + #[serde(skip_serializing_if = "Option::is_none")] + pub cancellation_token: Option, +} + +/// Superset of [`codex_file_search::FileMatch`] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] +pub struct FuzzyFileSearchResult { + pub root: String, + pub path: String, + pub file_name: String, + pub score: u32, + #[serde(skip_serializing_if = "Option::is_none")] + pub indices: Option>, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] +pub struct FuzzyFileSearchResponse { + pub files: Vec, +} + #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] #[serde(rename_all = "camelCase")] pub struct LoginChatGptCompleteNotification { @@ -671,6 +745,34 @@ pub struct LoginChatGptCompleteNotification { pub error: Option, } +#[derive(Serialize, Deserialize, Debug, Clone, TS)] +#[serde(rename_all = "camelCase")] +pub struct SessionConfiguredNotification { + /// Name left as session_id instead of conversation_id for backwards compatibility. + pub session_id: ConversationId, + + /// Tell the client what model is being queried. + pub model: String, + + /// The effort the model is putting into reasoning about the user's request. + #[serde(skip_serializing_if = "Option::is_none")] + pub reasoning_effort: Option, + + /// Identifier of the history log file (inode on Unix, 0 otherwise). + pub history_log_id: u64, + + /// Current number of entries in the history log. + #[ts(type = "number")] + pub history_entry_count: usize, + + /// Optional initial messages (as events) for resumed sessions. + /// When present, UIs can use these to seed the history. + #[serde(skip_serializing_if = "Option::is_none")] + pub initial_messages: Option>, + + pub rollout_path: PathBuf, +} + #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS)] #[serde(rename_all = "camelCase")] pub struct AuthStatusChangeNotification { @@ -679,7 +781,8 @@ pub struct AuthStatusChangeNotification { pub auth_method: Option, } -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, TS, Display)] +/// Notification sent from the server to the client. +#[derive(Serialize, Deserialize, Debug, Clone, TS, Display)] #[serde(tag = "method", content = "params", rename_all = "camelCase")] #[strum(serialize_all = "camelCase")] pub enum ServerNotification { @@ -688,6 +791,9 @@ pub enum ServerNotification { /// ChatGPT login flow completed LoginChatGptComplete(LoginChatGptCompleteNotification), + + /// The special session configured event for a new or resumed conversation. + SessionConfigured(SessionConfiguredNotification), } impl ServerNotification { @@ -695,18 +801,36 @@ impl ServerNotification { match self { ServerNotification::AuthStatusChange(params) => serde_json::to_value(params), ServerNotification::LoginChatGptComplete(params) => serde_json::to_value(params), + ServerNotification::SessionConfigured(params) => serde_json::to_value(params), } } } +impl TryFrom for ServerNotification { + type Error = serde_json::Error; + + fn try_from(value: JSONRPCNotification) -> Result { + serde_json::from_value(serde_json::to_value(value)?) + } +} + +/// Notification sent from the client to the server. +#[derive(Serialize, Deserialize, Debug, Clone, TS, Display)] +#[serde(tag = "method", content = "params", rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum ClientNotification { + Initialized, +} + #[cfg(test)] mod tests { use super::*; + use anyhow::Result; use pretty_assertions::assert_eq; use serde_json::json; #[test] - fn serialize_new_conversation() { + fn serialize_new_conversation() -> Result<()> { let request = ClientRequest::NewConversation { request_id: RequestId::Integer(42), params: NewConversationParams { @@ -730,34 +854,79 @@ mod tests { "approvalPolicy": "on-request" } }), - serde_json::to_value(&request).unwrap(), + serde_json::to_value(&request)?, ); + Ok(()) } #[test] - fn test_conversation_id_default_is_not_zeroes() { - let id = ConversationId::default(); - assert_ne!(id.uuid, Uuid::nil()); - } - - #[test] - fn conversation_id_serializes_as_plain_string() { - let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8").unwrap(); + fn conversation_id_serializes_as_plain_string() -> Result<()> { + let id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?; assert_eq!( json!("67e55044-10b1-426f-9247-bb680e5fe0c8"), - serde_json::to_value(id).unwrap() + serde_json::to_value(id)? ); + Ok(()) } #[test] - fn conversation_id_deserializes_from_plain_string() { + fn conversation_id_deserializes_from_plain_string() -> Result<()> { let id: ConversationId = - serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8")).unwrap(); + serde_json::from_value(json!("67e55044-10b1-426f-9247-bb680e5fe0c8"))?; assert_eq!( - ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8").unwrap(), + ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?, id, ); + Ok(()) + } + + #[test] + fn serialize_client_notification() -> Result<()> { + let notification = ClientNotification::Initialized; + // Note there is no "params" field for this notification. + assert_eq!( + json!({ + "method": "initialized", + }), + serde_json::to_value(¬ification)?, + ); + Ok(()) + } + + #[test] + fn serialize_server_request() -> Result<()> { + let conversation_id = ConversationId::from_string("67e55044-10b1-426f-9247-bb680e5fe0c8")?; + let params = ExecCommandApprovalParams { + conversation_id, + call_id: "call-42".to_string(), + command: vec!["echo".to_string(), "hello".to_string()], + cwd: PathBuf::from("/tmp"), + reason: Some("because tests".to_string()), + }; + let request = ServerRequest::ExecCommandApproval { + request_id: RequestId::Integer(7), + params: params.clone(), + }; + + assert_eq!( + json!({ + "method": "execCommandApproval", + "id": 7, + "params": { + "conversationId": "67e55044-10b1-426f-9247-bb680e5fe0c8", + "callId": "call-42", + "command": ["echo", "hello"], + "cwd": "/tmp", + "reason": "because tests", + } + }), + serde_json::to_value(&request)?, + ); + + let payload = ServerRequestPayload::ExecCommandApproval(params); + assert_eq!(payload.request_with_id(RequestId::Integer(7)), request); + Ok(()) } } diff --git a/codex-rs/app-server/Cargo.toml b/codex-rs/app-server/Cargo.toml new file mode 100644 index 00000000000..545ef4898c9 --- /dev/null +++ b/codex-rs/app-server/Cargo.toml @@ -0,0 +1,49 @@ +[package] +edition = "2024" +name = "codex-app-server" +version = { workspace = true } + +[[bin]] +name = "codex-app-server" +path = "src/main.rs" + +[lib] +name = "codex_app_server" +path = "src/lib.rs" + +[lints] +workspace = true + +[dependencies] +anyhow = { workspace = true } +codex-arg0 = { workspace = true } +codex-common = { workspace = true, features = ["cli"] } +codex-core = { workspace = true } +codex-file-search = { workspace = true } +codex-login = { workspace = true } +codex-protocol = { workspace = true } +codex-app-server-protocol = { workspace = true } +codex-utils-json-to-toml = { workspace = true } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +tokio = { workspace = true, features = [ + "io-std", + "macros", + "process", + "rt-multi-thread", + "signal", +] } +tracing = { workspace = true, features = ["log"] } +tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] } +uuid = { workspace = true, features = ["serde", "v7"] } + +[dev-dependencies] +app_test_support = { workspace = true } +assert_cmd = { workspace = true } +base64 = { workspace = true } +core_test_support = { workspace = true } +os_info = { workspace = true } +pretty_assertions = { workspace = true } +tempfile = { workspace = true } +toml = { workspace = true } +wiremock = { workspace = true } diff --git a/codex-rs/mcp-server/src/codex_message_processor.rs b/codex-rs/app-server/src/codex_message_processor.rs similarity index 87% rename from codex-rs/mcp-server/src/codex_message_processor.rs rename to codex-rs/app-server/src/codex_message_processor.rs index 1d4a1b0fbb4..c158621d2e4 100644 --- a/codex-rs/mcp-server/src/codex_message_processor.rs +++ b/codex-rs/app-server/src/codex_message_processor.rs @@ -1,12 +1,59 @@ use crate::error_code::INTERNAL_ERROR_CODE; use crate::error_code::INVALID_REQUEST_ERROR_CODE; -use crate::json_to_toml::json_to_toml; +use crate::fuzzy_file_search::run_fuzzy_file_search; use crate::outgoing_message::OutgoingMessageSender; use crate::outgoing_message::OutgoingNotification; +use codex_app_server_protocol::AddConversationListenerParams; +use codex_app_server_protocol::AddConversationSubscriptionResponse; +use codex_app_server_protocol::ApplyPatchApprovalParams; +use codex_app_server_protocol::ApplyPatchApprovalResponse; +use codex_app_server_protocol::ArchiveConversationParams; +use codex_app_server_protocol::ArchiveConversationResponse; +use codex_app_server_protocol::AuthStatusChangeNotification; +use codex_app_server_protocol::ClientRequest; +use codex_app_server_protocol::ConversationSummary; +use codex_app_server_protocol::ExecCommandApprovalParams; +use codex_app_server_protocol::ExecCommandApprovalResponse; +use codex_app_server_protocol::ExecOneOffCommandParams; +use codex_app_server_protocol::ExecOneOffCommandResponse; +use codex_app_server_protocol::FuzzyFileSearchParams; +use codex_app_server_protocol::FuzzyFileSearchResponse; +use codex_app_server_protocol::GetUserAgentResponse; +use codex_app_server_protocol::GetUserSavedConfigResponse; +use codex_app_server_protocol::GitDiffToRemoteResponse; +use codex_app_server_protocol::InputItem as WireInputItem; +use codex_app_server_protocol::InterruptConversationParams; +use codex_app_server_protocol::InterruptConversationResponse; +use codex_app_server_protocol::JSONRPCErrorError; +use codex_app_server_protocol::ListConversationsParams; +use codex_app_server_protocol::ListConversationsResponse; +use codex_app_server_protocol::LoginApiKeyParams; +use codex_app_server_protocol::LoginApiKeyResponse; +use codex_app_server_protocol::LoginChatGptCompleteNotification; +use codex_app_server_protocol::LoginChatGptResponse; +use codex_app_server_protocol::NewConversationParams; +use codex_app_server_protocol::NewConversationResponse; +use codex_app_server_protocol::RemoveConversationListenerParams; +use codex_app_server_protocol::RemoveConversationSubscriptionResponse; +use codex_app_server_protocol::RequestId; +use codex_app_server_protocol::Result as JsonRpcResult; +use codex_app_server_protocol::ResumeConversationParams; +use codex_app_server_protocol::SendUserMessageParams; +use codex_app_server_protocol::SendUserMessageResponse; +use codex_app_server_protocol::SendUserTurnParams; +use codex_app_server_protocol::SendUserTurnResponse; +use codex_app_server_protocol::ServerNotification; +use codex_app_server_protocol::ServerRequestPayload; +use codex_app_server_protocol::SessionConfiguredNotification; +use codex_app_server_protocol::SetDefaultModelParams; +use codex_app_server_protocol::SetDefaultModelResponse; +use codex_app_server_protocol::UserInfoResponse; +use codex_app_server_protocol::UserSavedConfig; use codex_core::AuthManager; use codex_core::CodexConversation; use codex_core::ConversationManager; use codex_core::Cursor as RolloutCursor; +use codex_core::INTERACTIVE_SESSION_SOURCES; use codex_core::NewConversation; use codex_core::RolloutRecorder; use codex_core::SessionMeta; @@ -36,58 +83,18 @@ use codex_core::protocol::ReviewDecision; use codex_login::ServerOptions as LoginServerOptions; use codex_login::ShutdownHandle; use codex_login::run_login_server; -use codex_protocol::mcp_protocol::APPLY_PATCH_APPROVAL_METHOD; -use codex_protocol::mcp_protocol::AddConversationListenerParams; -use codex_protocol::mcp_protocol::AddConversationSubscriptionResponse; -use codex_protocol::mcp_protocol::ApplyPatchApprovalParams; -use codex_protocol::mcp_protocol::ApplyPatchApprovalResponse; -use codex_protocol::mcp_protocol::ArchiveConversationParams; -use codex_protocol::mcp_protocol::ArchiveConversationResponse; -use codex_protocol::mcp_protocol::AuthStatusChangeNotification; -use codex_protocol::mcp_protocol::ClientRequest; -use codex_protocol::mcp_protocol::ConversationId; -use codex_protocol::mcp_protocol::ConversationSummary; -use codex_protocol::mcp_protocol::EXEC_COMMAND_APPROVAL_METHOD; -use codex_protocol::mcp_protocol::ExecArbitraryCommandResponse; -use codex_protocol::mcp_protocol::ExecCommandApprovalParams; -use codex_protocol::mcp_protocol::ExecCommandApprovalResponse; -use codex_protocol::mcp_protocol::ExecOneOffCommandParams; -use codex_protocol::mcp_protocol::GetUserAgentResponse; -use codex_protocol::mcp_protocol::GetUserSavedConfigResponse; -use codex_protocol::mcp_protocol::GitDiffToRemoteResponse; -use codex_protocol::mcp_protocol::InputItem as WireInputItem; -use codex_protocol::mcp_protocol::InterruptConversationParams; -use codex_protocol::mcp_protocol::InterruptConversationResponse; -use codex_protocol::mcp_protocol::ListConversationsParams; -use codex_protocol::mcp_protocol::ListConversationsResponse; -use codex_protocol::mcp_protocol::LoginApiKeyParams; -use codex_protocol::mcp_protocol::LoginApiKeyResponse; -use codex_protocol::mcp_protocol::LoginChatGptCompleteNotification; -use codex_protocol::mcp_protocol::LoginChatGptResponse; -use codex_protocol::mcp_protocol::NewConversationParams; -use codex_protocol::mcp_protocol::NewConversationResponse; -use codex_protocol::mcp_protocol::RemoveConversationListenerParams; -use codex_protocol::mcp_protocol::RemoveConversationSubscriptionResponse; -use codex_protocol::mcp_protocol::ResumeConversationParams; -use codex_protocol::mcp_protocol::SendUserMessageParams; -use codex_protocol::mcp_protocol::SendUserMessageResponse; -use codex_protocol::mcp_protocol::SendUserTurnParams; -use codex_protocol::mcp_protocol::SendUserTurnResponse; -use codex_protocol::mcp_protocol::ServerNotification; -use codex_protocol::mcp_protocol::SetDefaultModelParams; -use codex_protocol::mcp_protocol::SetDefaultModelResponse; -use codex_protocol::mcp_protocol::UserInfoResponse; -use codex_protocol::mcp_protocol::UserSavedConfig; +use codex_protocol::ConversationId; use codex_protocol::models::ContentItem; use codex_protocol::models::ResponseItem; use codex_protocol::protocol::InputMessageKind; use codex_protocol::protocol::USER_MESSAGE_BEGIN; -use mcp_types::JSONRPCErrorError; -use mcp_types::RequestId; +use codex_utils_json_to_toml::json_to_toml; use std::collections::HashMap; use std::ffi::OsStr; use std::path::PathBuf; use std::sync::Arc; +use std::sync::atomic::AtomicBool; +use std::sync::atomic::Ordering; use std::time::Duration; use tokio::select; use tokio::sync::Mutex; @@ -122,6 +129,7 @@ pub(crate) struct CodexMessageProcessor { active_login: Arc>>, // Queue of pending interrupt requests per conversation. We reply when TurnAborted arrives. pending_interrupts: Arc>>>, + pending_fuzzy_searches: Arc>>>, } impl CodexMessageProcessor { @@ -141,11 +149,15 @@ impl CodexMessageProcessor { conversation_listeners: HashMap::new(), active_login: Arc::new(Mutex::new(None)), pending_interrupts: Arc::new(Mutex::new(HashMap::new())), + pending_fuzzy_searches: Arc::new(Mutex::new(HashMap::new())), } } pub async fn process_request(&mut self, request: ClientRequest) { match request { + ClientRequest::Initialize { .. } => { + panic!("Initialize should be handled in MessageProcessor"); + } ClientRequest::NewConversation { request_id, params } => { // Do not tokio::spawn() to process new_conversation() // asynchronously because we need to ensure the conversation is @@ -182,30 +194,48 @@ impl CodexMessageProcessor { ClientRequest::LoginApiKey { request_id, params } => { self.login_api_key(request_id, params).await; } - ClientRequest::LoginChatGpt { request_id } => { + ClientRequest::LoginChatGpt { + request_id, + params: _, + } => { self.login_chatgpt(request_id).await; } ClientRequest::CancelLoginChatGpt { request_id, params } => { self.cancel_login_chatgpt(request_id, params.login_id).await; } - ClientRequest::LogoutChatGpt { request_id } => { + ClientRequest::LogoutChatGpt { + request_id, + params: _, + } => { self.logout_chatgpt(request_id).await; } ClientRequest::GetAuthStatus { request_id, params } => { self.get_auth_status(request_id, params).await; } - ClientRequest::GetUserSavedConfig { request_id } => { + ClientRequest::GetUserSavedConfig { + request_id, + params: _, + } => { self.get_user_saved_config(request_id).await; } ClientRequest::SetDefaultModel { request_id, params } => { self.set_default_model(request_id, params).await; } - ClientRequest::GetUserAgent { request_id } => { + ClientRequest::GetUserAgent { + request_id, + params: _, + } => { self.get_user_agent(request_id).await; } - ClientRequest::UserInfo { request_id } => { + ClientRequest::UserInfo { + request_id, + params: _, + } => { self.get_user_info(request_id).await; } + ClientRequest::FuzzyFileSearch { request_id, params } => { + self.fuzzy_file_search(request_id, params).await; + } ClientRequest::ExecOneOffCommand { request_id, params } => { self.exec_one_off_command(request_id, params).await; } @@ -357,7 +387,7 @@ impl CodexMessageProcessor { self.outgoing .send_response( request_id, - codex_protocol::mcp_protocol::CancelLoginChatGptResponse {}, + codex_app_server_protocol::CancelLoginChatGptResponse {}, ) .await; } else { @@ -393,7 +423,7 @@ impl CodexMessageProcessor { self.outgoing .send_response( request_id, - codex_protocol::mcp_protocol::LogoutChatGptResponse {}, + codex_app_server_protocol::LogoutChatGptResponse {}, ) .await; @@ -411,7 +441,7 @@ impl CodexMessageProcessor { async fn get_auth_status( &self, request_id: RequestId, - params: codex_protocol::mcp_protocol::GetAuthStatusParams, + params: codex_app_server_protocol::GetAuthStatusParams, ) { let include_token = params.include_token.unwrap_or(false); let do_refresh = params.refresh_token.unwrap_or(false); @@ -426,7 +456,7 @@ impl CodexMessageProcessor { let requires_openai_auth = self.config.model_provider.requires_openai_auth; let response = if !requires_openai_auth { - codex_protocol::mcp_protocol::GetAuthStatusResponse { + codex_app_server_protocol::GetAuthStatusResponse { auth_method: None, auth_token: None, requires_openai_auth: Some(false), @@ -446,13 +476,13 @@ impl CodexMessageProcessor { (None, None) } }; - codex_protocol::mcp_protocol::GetAuthStatusResponse { + codex_app_server_protocol::GetAuthStatusResponse { auth_method: reported_auth_method, auth_token: token_opt, requires_openai_auth: Some(true), } } - None => codex_protocol::mcp_protocol::GetAuthStatusResponse { + None => codex_app_server_protocol::GetAuthStatusResponse { auth_method: None, auth_token: None, requires_openai_auth: Some(true), @@ -603,7 +633,7 @@ impl CodexMessageProcessor { .await { Ok(output) => { - let response = ExecArbitraryCommandResponse { + let response = ExecOneOffCommandResponse { exit_code: output.exit_code, stdout: output.stdout.text, stderr: output.stderr.text, @@ -679,6 +709,7 @@ impl CodexMessageProcessor { &self.config.codex_home, page_size, cursor_ref, + INTERACTIVE_SESSION_SOURCES, ) .await { @@ -752,11 +783,19 @@ impl CodexMessageProcessor { session_configured, .. }) => { - let event = Event { - id: "".to_string(), - msg: EventMsg::SessionConfigured(session_configured.clone()), - }; - self.outgoing.send_event_as_notification(&event, None).await; + self.outgoing + .send_server_notification(ServerNotification::SessionConfigured( + SessionConfiguredNotification { + session_id: session_configured.session_id, + model: session_configured.model.clone(), + reasoning_effort: session_configured.reasoning_effort, + history_log_id: session_configured.history_log_id, + history_entry_count: session_configured.history_entry_count, + initial_messages: session_configured.initial_messages.clone(), + rollout_path: session_configured.rollout_path.clone(), + }, + )) + .await; let initial_messages = session_configured.initial_messages.map(|msgs| { msgs.into_iter() .filter(|event| { @@ -771,7 +810,7 @@ impl CodexMessageProcessor { }); // Reply with conversation id + model and initial messages (when present) - let response = codex_protocol::mcp_protocol::ResumeConversationResponse { + let response = codex_app_server_protocol::ResumeConversationResponse { conversation_id, model: session_configured.model.clone(), initial_messages, @@ -1009,6 +1048,7 @@ impl CodexMessageProcessor { model, effort, summary, + final_output_json_schema: None, }) .await; @@ -1166,6 +1206,46 @@ impl CodexMessageProcessor { } } } + + async fn fuzzy_file_search(&mut self, request_id: RequestId, params: FuzzyFileSearchParams) { + let FuzzyFileSearchParams { + query, + roots, + cancellation_token, + } = params; + + let cancel_flag = match cancellation_token.clone() { + Some(token) => { + let mut pending_fuzzy_searches = self.pending_fuzzy_searches.lock().await; + // if a cancellation_token is provided and a pending_request exists for + // that token, cancel it + if let Some(existing) = pending_fuzzy_searches.get(&token) { + existing.store(true, Ordering::Relaxed); + } + let flag = Arc::new(AtomicBool::new(false)); + pending_fuzzy_searches.insert(token.clone(), flag.clone()); + flag + } + None => Arc::new(AtomicBool::new(false)), + }; + + let results = match query.as_str() { + "" => vec![], + _ => run_fuzzy_file_search(query, roots, cancel_flag.clone()).await, + }; + + if let Some(token) = cancellation_token { + let mut pending_fuzzy_searches = self.pending_fuzzy_searches.lock().await; + if let Some(current_flag) = pending_fuzzy_searches.get(&token) + && Arc::ptr_eq(current_flag, &cancel_flag) + { + pending_fuzzy_searches.remove(&token); + } + } + + let response = FuzzyFileSearchResponse { files: results }; + self.outgoing.send_response(request_id, response).await; + } } async fn apply_bespoke_event_handling( @@ -1190,9 +1270,8 @@ async fn apply_bespoke_event_handling( reason, grant_root, }; - let value = serde_json::to_value(¶ms).unwrap_or_default(); let rx = outgoing - .send_request(APPLY_PATCH_APPROVAL_METHOD, Some(value)) + .send_request(ServerRequestPayload::ApplyPatchApproval(params)) .await; // TODO(mbolin): Enforce a timeout so this task does not live indefinitely? tokio::spawn(async move { @@ -1212,9 +1291,8 @@ async fn apply_bespoke_event_handling( cwd, reason, }; - let value = serde_json::to_value(¶ms).unwrap_or_default(); let rx = outgoing - .send_request(EXEC_COMMAND_APPROVAL_METHOD, Some(value)) + .send_request(ServerRequestPayload::ExecCommandApproval(params)) .await; // TODO(mbolin): Enforce a timeout so this task does not live indefinitely? @@ -1285,7 +1363,7 @@ fn derive_config_from_params( async fn on_patch_approval_response( event_id: String, - receiver: oneshot::Receiver, + receiver: oneshot::Receiver, codex: Arc, ) { let response = receiver.await; @@ -1327,7 +1405,7 @@ async fn on_patch_approval_response( async fn on_exec_approval_response( event_id: String, - receiver: oneshot::Receiver, + receiver: oneshot::Receiver, conversation: Arc, ) { let response = receiver.await; @@ -1410,13 +1488,13 @@ fn extract_conversation_summary( #[cfg(test)] mod tests { use super::*; + use anyhow::Result; use pretty_assertions::assert_eq; use serde_json::json; #[test] - fn extract_conversation_summary_prefers_plain_user_messages() { - let conversation_id = - ConversationId::from_string("3f941c35-29b3-493b-b0a4-e25800d9aeb0").unwrap(); + fn extract_conversation_summary_prefers_plain_user_messages() -> Result<()> { + let conversation_id = ConversationId::from_string("3f941c35-29b3-493b-b0a4-e25800d9aeb0")?; let timestamp = Some("2025-09-05T16:53:11.850Z".to_string()); let path = PathBuf::from("rollout.jsonl"); @@ -1456,5 +1534,6 @@ mod tests { ); assert_eq!(summary.path, path); assert_eq!(summary.preview, "Count to 5"); + Ok(()) } } diff --git a/codex-rs/app-server/src/error_code.rs b/codex-rs/app-server/src/error_code.rs new file mode 100644 index 00000000000..1ffd889d404 --- /dev/null +++ b/codex-rs/app-server/src/error_code.rs @@ -0,0 +1,2 @@ +pub(crate) const INVALID_REQUEST_ERROR_CODE: i64 = -32600; +pub(crate) const INTERNAL_ERROR_CODE: i64 = -32603; diff --git a/codex-rs/app-server/src/fuzzy_file_search.rs b/codex-rs/app-server/src/fuzzy_file_search.rs new file mode 100644 index 00000000000..6c83a0f4ec7 --- /dev/null +++ b/codex-rs/app-server/src/fuzzy_file_search.rs @@ -0,0 +1,92 @@ +use std::num::NonZero; +use std::num::NonZeroUsize; +use std::path::Path; +use std::path::PathBuf; +use std::sync::Arc; +use std::sync::atomic::AtomicBool; + +use codex_app_server_protocol::FuzzyFileSearchResult; +use codex_file_search as file_search; +use tokio::task::JoinSet; +use tracing::warn; + +const LIMIT_PER_ROOT: usize = 50; +const MAX_THREADS: usize = 12; +const COMPUTE_INDICES: bool = true; + +pub(crate) async fn run_fuzzy_file_search( + query: String, + roots: Vec, + cancellation_flag: Arc, +) -> Vec { + #[expect(clippy::expect_used)] + let limit_per_root = + NonZero::new(LIMIT_PER_ROOT).expect("LIMIT_PER_ROOT should be a valid non-zero usize"); + + let cores = std::thread::available_parallelism() + .map(std::num::NonZero::get) + .unwrap_or(1); + let threads = cores.min(MAX_THREADS); + let threads_per_root = (threads / roots.len()).max(1); + let threads = NonZero::new(threads_per_root).unwrap_or(NonZeroUsize::MIN); + + let mut files: Vec = Vec::new(); + let mut join_set = JoinSet::new(); + + for root in roots { + let search_dir = PathBuf::from(&root); + let query = query.clone(); + let cancel_flag = cancellation_flag.clone(); + join_set.spawn_blocking(move || { + match file_search::run( + query.as_str(), + limit_per_root, + &search_dir, + Vec::new(), + threads, + cancel_flag, + COMPUTE_INDICES, + ) { + Ok(res) => Ok((root, res)), + Err(err) => Err((root, err)), + } + }); + } + + while let Some(res) = join_set.join_next().await { + match res { + Ok(Ok((root, res))) => { + for m in res.matches { + let path = m.path; + //TODO(shijie): Move file name generation to file_search lib. + let file_name = Path::new(&path) + .file_name() + .map(|name| name.to_string_lossy().into_owned()) + .unwrap_or_else(|| path.clone()); + let result = FuzzyFileSearchResult { + root: root.clone(), + path, + file_name, + score: m.score, + indices: m.indices, + }; + files.push(result); + } + } + Ok(Err((root, err))) => { + warn!("fuzzy-file-search in dir '{root}' failed: {err}"); + } + Err(err) => { + warn!("fuzzy-file-search join_next failed: {err}"); + } + } + } + + files.sort_by(file_search::cmp_by_score_desc_then_path_asc::< + FuzzyFileSearchResult, + _, + _, + >(|f| f.score, |f| f.path.as_str())); + + files +} diff --git a/codex-rs/app-server/src/lib.rs b/codex-rs/app-server/src/lib.rs new file mode 100644 index 00000000000..3e888c6960c --- /dev/null +++ b/codex-rs/app-server/src/lib.rs @@ -0,0 +1,139 @@ +#![deny(clippy::print_stdout, clippy::print_stderr)] + +use std::io::ErrorKind; +use std::io::Result as IoResult; +use std::path::PathBuf; + +use codex_common::CliConfigOverrides; +use codex_core::config::Config; +use codex_core::config::ConfigOverrides; + +use codex_app_server_protocol::JSONRPCMessage; +use tokio::io::AsyncBufReadExt; +use tokio::io::AsyncWriteExt; +use tokio::io::BufReader; +use tokio::io::{self}; +use tokio::sync::mpsc; +use tracing::debug; +use tracing::error; +use tracing::info; +use tracing_subscriber::EnvFilter; + +use crate::message_processor::MessageProcessor; +use crate::outgoing_message::OutgoingMessage; +use crate::outgoing_message::OutgoingMessageSender; + +mod codex_message_processor; +mod error_code; +mod fuzzy_file_search; +mod message_processor; +mod outgoing_message; + +/// Size of the bounded channels used to communicate between tasks. The value +/// is a balance between throughput and memory usage – 128 messages should be +/// plenty for an interactive CLI. +const CHANNEL_CAPACITY: usize = 128; + +pub async fn run_main( + codex_linux_sandbox_exe: Option, + cli_config_overrides: CliConfigOverrides, +) -> IoResult<()> { + // Install a simple subscriber so `tracing` output is visible. Users can + // control the log level with `RUST_LOG`. + tracing_subscriber::fmt() + .with_writer(std::io::stderr) + .with_env_filter(EnvFilter::from_default_env()) + .init(); + + // Set up channels. + let (incoming_tx, mut incoming_rx) = mpsc::channel::(CHANNEL_CAPACITY); + let (outgoing_tx, mut outgoing_rx) = mpsc::unbounded_channel::(); + + // Task: read from stdin, push to `incoming_tx`. + let stdin_reader_handle = tokio::spawn({ + async move { + let stdin = io::stdin(); + let reader = BufReader::new(stdin); + let mut lines = reader.lines(); + + while let Some(line) = lines.next_line().await.unwrap_or_default() { + match serde_json::from_str::(&line) { + Ok(msg) => { + if incoming_tx.send(msg).await.is_err() { + // Receiver gone – nothing left to do. + break; + } + } + Err(e) => error!("Failed to deserialize JSONRPCMessage: {e}"), + } + } + + debug!("stdin reader finished (EOF)"); + } + }); + + // Parse CLI overrides once and derive the base Config eagerly so later + // components do not need to work with raw TOML values. + let cli_kv_overrides = cli_config_overrides.parse_overrides().map_err(|e| { + std::io::Error::new( + ErrorKind::InvalidInput, + format!("error parsing -c overrides: {e}"), + ) + })?; + let config = Config::load_with_cli_overrides(cli_kv_overrides, ConfigOverrides::default()) + .map_err(|e| { + std::io::Error::new(ErrorKind::InvalidData, format!("error loading config: {e}")) + })?; + + // Task: process incoming messages. + let processor_handle = tokio::spawn({ + let outgoing_message_sender = OutgoingMessageSender::new(outgoing_tx); + let mut processor = MessageProcessor::new( + outgoing_message_sender, + codex_linux_sandbox_exe, + std::sync::Arc::new(config), + ); + async move { + while let Some(msg) = incoming_rx.recv().await { + match msg { + JSONRPCMessage::Request(r) => processor.process_request(r).await, + JSONRPCMessage::Response(r) => processor.process_response(r).await, + JSONRPCMessage::Notification(n) => processor.process_notification(n).await, + JSONRPCMessage::Error(e) => processor.process_error(e), + } + } + + info!("processor task exited (channel closed)"); + } + }); + + // Task: write outgoing messages to stdout. + let stdout_writer_handle = tokio::spawn(async move { + let mut stdout = io::stdout(); + while let Some(outgoing_message) = outgoing_rx.recv().await { + let Ok(value) = serde_json::to_value(outgoing_message) else { + error!("Failed to convert OutgoingMessage to JSON value"); + continue; + }; + match serde_json::to_string(&value) { + Ok(mut json) => { + json.push('\n'); + if let Err(e) = stdout.write_all(json.as_bytes()).await { + error!("Failed to write to stdout: {e}"); + break; + } + } + Err(e) => error!("Failed to serialize JSONRPCMessage: {e}"), + } + } + + info!("stdout writer exited (channel closed)"); + }); + + // Wait for all tasks to finish. The typical exit path is the stdin reader + // hitting EOF which, once it drops `incoming_tx`, propagates shutdown to + // the processor and then to the stdout task. + let _ = tokio::join!(stdin_reader_handle, processor_handle, stdout_writer_handle); + + Ok(()) +} diff --git a/codex-rs/app-server/src/main.rs b/codex-rs/app-server/src/main.rs new file mode 100644 index 00000000000..689ec0877a7 --- /dev/null +++ b/codex-rs/app-server/src/main.rs @@ -0,0 +1,10 @@ +use codex_app_server::run_main; +use codex_arg0::arg0_dispatch_or_else; +use codex_common::CliConfigOverrides; + +fn main() -> anyhow::Result<()> { + arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move { + run_main(codex_linux_sandbox_exe, CliConfigOverrides::default()).await?; + Ok(()) + }) +} diff --git a/codex-rs/app-server/src/message_processor.rs b/codex-rs/app-server/src/message_processor.rs new file mode 100644 index 00000000000..15086c19e18 --- /dev/null +++ b/codex-rs/app-server/src/message_processor.rs @@ -0,0 +1,137 @@ +use std::path::PathBuf; + +use crate::codex_message_processor::CodexMessageProcessor; +use crate::error_code::INVALID_REQUEST_ERROR_CODE; +use crate::outgoing_message::OutgoingMessageSender; +use codex_app_server_protocol::ClientInfo; +use codex_app_server_protocol::ClientRequest; +use codex_app_server_protocol::InitializeResponse; + +use codex_app_server_protocol::JSONRPCError; +use codex_app_server_protocol::JSONRPCErrorError; +use codex_app_server_protocol::JSONRPCNotification; +use codex_app_server_protocol::JSONRPCRequest; +use codex_app_server_protocol::JSONRPCResponse; +use codex_core::AuthManager; +use codex_core::ConversationManager; +use codex_core::config::Config; +use codex_core::default_client::USER_AGENT_SUFFIX; +use codex_core::default_client::get_codex_user_agent; +use codex_protocol::protocol::SessionSource; +use std::sync::Arc; + +pub(crate) struct MessageProcessor { + outgoing: Arc, + codex_message_processor: CodexMessageProcessor, + initialized: bool, +} + +impl MessageProcessor { + /// Create a new `MessageProcessor`, retaining a handle to the outgoing + /// `Sender` so handlers can enqueue messages to be written to stdout. + pub(crate) fn new( + outgoing: OutgoingMessageSender, + codex_linux_sandbox_exe: Option, + config: Arc, + ) -> Self { + let outgoing = Arc::new(outgoing); + let auth_manager = AuthManager::shared(config.codex_home.clone(), false); + let conversation_manager = Arc::new(ConversationManager::new( + auth_manager.clone(), + SessionSource::VSCode, + )); + let codex_message_processor = CodexMessageProcessor::new( + auth_manager, + conversation_manager, + outgoing.clone(), + codex_linux_sandbox_exe, + config, + ); + + Self { + outgoing, + codex_message_processor, + initialized: false, + } + } + + pub(crate) async fn process_request(&mut self, request: JSONRPCRequest) { + let request_id = request.id.clone(); + if let Ok(request_json) = serde_json::to_value(request) + && let Ok(codex_request) = serde_json::from_value::(request_json) + { + match codex_request { + // Handle Initialize internally so CodexMessageProcessor does not have to concern + // itself with the `initialized` bool. + ClientRequest::Initialize { request_id, params } => { + if self.initialized { + let error = JSONRPCErrorError { + code: INVALID_REQUEST_ERROR_CODE, + message: "Already initialized".to_string(), + data: None, + }; + self.outgoing.send_error(request_id, error).await; + return; + } else { + let ClientInfo { + name, + title: _title, + version, + } = params.client_info; + let user_agent_suffix = format!("{name}; {version}"); + if let Ok(mut suffix) = USER_AGENT_SUFFIX.lock() { + *suffix = Some(user_agent_suffix); + } + + let user_agent = get_codex_user_agent(); + let response = InitializeResponse { user_agent }; + self.outgoing.send_response(request_id, response).await; + + self.initialized = true; + return; + } + } + _ => { + if !self.initialized { + let error = JSONRPCErrorError { + code: INVALID_REQUEST_ERROR_CODE, + message: "Not initialized".to_string(), + data: None, + }; + self.outgoing.send_error(request_id, error).await; + return; + } + } + } + + self.codex_message_processor + .process_request(codex_request) + .await; + } else { + let error = JSONRPCErrorError { + code: INVALID_REQUEST_ERROR_CODE, + message: "Invalid request".to_string(), + data: None, + }; + self.outgoing.send_error(request_id, error).await; + } + } + + pub(crate) async fn process_notification(&self, notification: JSONRPCNotification) { + // Currently, we do not expect to receive any notifications from the + // client, so we just log them. + tracing::info!("<- notification: {:?}", notification); + } + + /// Handle a standalone JSON-RPC response originating from the peer. + pub(crate) async fn process_response(&mut self, response: JSONRPCResponse) { + tracing::info!("<- response: {:?}", response); + let JSONRPCResponse { id, result, .. } = response; + self.outgoing.notify_client_response(id, result).await + } + + /// Handle an error object received from the peer. + pub(crate) fn process_error(&mut self, err: JSONRPCError) { + tracing::error!("<- error: {:?}", err); + } +} diff --git a/codex-rs/app-server/src/outgoing_message.rs b/codex-rs/app-server/src/outgoing_message.rs new file mode 100644 index 00000000000..96a2c5a96c5 --- /dev/null +++ b/codex-rs/app-server/src/outgoing_message.rs @@ -0,0 +1,174 @@ +use std::collections::HashMap; +use std::sync::atomic::AtomicI64; +use std::sync::atomic::Ordering; + +use codex_app_server_protocol::JSONRPCErrorError; +use codex_app_server_protocol::RequestId; +use codex_app_server_protocol::Result; +use codex_app_server_protocol::ServerNotification; +use codex_app_server_protocol::ServerRequest; +use codex_app_server_protocol::ServerRequestPayload; +use serde::Serialize; +use tokio::sync::Mutex; +use tokio::sync::mpsc; +use tokio::sync::oneshot; +use tracing::warn; + +use crate::error_code::INTERNAL_ERROR_CODE; + +/// Sends messages to the client and manages request callbacks. +pub(crate) struct OutgoingMessageSender { + next_request_id: AtomicI64, + sender: mpsc::UnboundedSender, + request_id_to_callback: Mutex>>, +} + +impl OutgoingMessageSender { + pub(crate) fn new(sender: mpsc::UnboundedSender) -> Self { + Self { + next_request_id: AtomicI64::new(0), + sender, + request_id_to_callback: Mutex::new(HashMap::new()), + } + } + + pub(crate) async fn send_request( + &self, + request: ServerRequestPayload, + ) -> oneshot::Receiver { + let id = RequestId::Integer(self.next_request_id.fetch_add(1, Ordering::Relaxed)); + let outgoing_message_id = id.clone(); + let (tx_approve, rx_approve) = oneshot::channel(); + { + let mut request_id_to_callback = self.request_id_to_callback.lock().await; + request_id_to_callback.insert(id, tx_approve); + } + + let outgoing_message = + OutgoingMessage::Request(request.request_with_id(outgoing_message_id)); + let _ = self.sender.send(outgoing_message); + rx_approve + } + + pub(crate) async fn notify_client_response(&self, id: RequestId, result: Result) { + let entry = { + let mut request_id_to_callback = self.request_id_to_callback.lock().await; + request_id_to_callback.remove_entry(&id) + }; + + match entry { + Some((id, sender)) => { + if let Err(err) = sender.send(result) { + warn!("could not notify callback for {id:?} due to: {err:?}"); + } + } + None => { + warn!("could not find callback for {id:?}"); + } + } + } + + pub(crate) async fn send_response(&self, id: RequestId, response: T) { + match serde_json::to_value(response) { + Ok(result) => { + let outgoing_message = OutgoingMessage::Response(OutgoingResponse { id, result }); + let _ = self.sender.send(outgoing_message); + } + Err(err) => { + self.send_error( + id, + JSONRPCErrorError { + code: INTERNAL_ERROR_CODE, + message: format!("failed to serialize response: {err}"), + data: None, + }, + ) + .await; + } + } + } + + pub(crate) async fn send_server_notification(&self, notification: ServerNotification) { + let _ = self + .sender + .send(OutgoingMessage::AppServerNotification(notification)); + } + + /// All notifications should be migrated to [`ServerNotification`] and + /// [`OutgoingMessage::Notification`] should be removed. + pub(crate) async fn send_notification(&self, notification: OutgoingNotification) { + let outgoing_message = OutgoingMessage::Notification(notification); + let _ = self.sender.send(outgoing_message); + } + + pub(crate) async fn send_error(&self, id: RequestId, error: JSONRPCErrorError) { + let outgoing_message = OutgoingMessage::Error(OutgoingError { id, error }); + let _ = self.sender.send(outgoing_message); + } +} + +/// Outgoing message from the server to the client. +#[derive(Debug, Clone, Serialize)] +#[serde(untagged)] +pub(crate) enum OutgoingMessage { + Request(ServerRequest), + Notification(OutgoingNotification), + /// AppServerNotification is specific to the case where this is run as an + /// "app server" as opposed to an MCP server. + AppServerNotification(ServerNotification), + Response(OutgoingResponse), + Error(OutgoingError), +} + +#[derive(Debug, Clone, PartialEq, Serialize)] +pub(crate) struct OutgoingNotification { + pub method: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub params: Option, +} + +#[derive(Debug, Clone, PartialEq, Serialize)] +pub(crate) struct OutgoingResponse { + pub id: RequestId, + pub result: Result, +} + +#[derive(Debug, Clone, PartialEq, Serialize)] +pub(crate) struct OutgoingError { + pub error: JSONRPCErrorError, + pub id: RequestId, +} + +#[cfg(test)] +mod tests { + use codex_app_server_protocol::LoginChatGptCompleteNotification; + use pretty_assertions::assert_eq; + use serde_json::json; + use uuid::Uuid; + + use super::*; + + #[test] + fn verify_server_notification_serialization() { + let notification = + ServerNotification::LoginChatGptComplete(LoginChatGptCompleteNotification { + login_id: Uuid::nil(), + success: true, + error: None, + }); + + let jsonrpc_notification = OutgoingMessage::AppServerNotification(notification); + assert_eq!( + json!({ + "method": "loginChatGptComplete", + "params": { + "loginId": Uuid::nil(), + "success": true, + }, + }), + serde_json::to_value(jsonrpc_notification) + .expect("ensure the strum macros serialize the method field correctly"), + "ensure the strum macros serialize the method field correctly" + ); + } +} diff --git a/codex-rs/app-server/tests/all.rs b/codex-rs/app-server/tests/all.rs new file mode 100644 index 00000000000..7e136e4cce2 --- /dev/null +++ b/codex-rs/app-server/tests/all.rs @@ -0,0 +1,3 @@ +// Single integration test binary that aggregates all test modules. +// The submodules live in `tests/suite/`. +mod suite; diff --git a/codex-rs/app-server/tests/common/Cargo.toml b/codex-rs/app-server/tests/common/Cargo.toml new file mode 100644 index 00000000000..306b1e187ed --- /dev/null +++ b/codex-rs/app-server/tests/common/Cargo.toml @@ -0,0 +1,21 @@ +[package] +edition = "2024" +name = "app_test_support" +version = { workspace = true } + +[lib] +path = "lib.rs" + +[dependencies] +anyhow = { workspace = true } +assert_cmd = { workspace = true } +codex-app-server-protocol = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +tokio = { workspace = true, features = [ + "io-std", + "macros", + "process", + "rt-multi-thread", +] } +wiremock = { workspace = true } diff --git a/codex-rs/app-server/tests/common/lib.rs b/codex-rs/app-server/tests/common/lib.rs new file mode 100644 index 00000000000..2acb52de69f --- /dev/null +++ b/codex-rs/app-server/tests/common/lib.rs @@ -0,0 +1,17 @@ +mod mcp_process; +mod mock_model_server; +mod responses; + +use codex_app_server_protocol::JSONRPCResponse; +pub use mcp_process::McpProcess; +pub use mock_model_server::create_mock_chat_completions_server; +pub use responses::create_apply_patch_sse_response; +pub use responses::create_final_assistant_message_sse_response; +pub use responses::create_shell_sse_response; +use serde::de::DeserializeOwned; + +pub fn to_response(response: JSONRPCResponse) -> anyhow::Result { + let value = serde_json::to_value(response.result)?; + let codex_response = serde_json::from_value(value)?; + Ok(codex_response) +} diff --git a/codex-rs/app-server/tests/common/mcp_process.rs b/codex-rs/app-server/tests/common/mcp_process.rs new file mode 100644 index 00000000000..bdc96cad0aa --- /dev/null +++ b/codex-rs/app-server/tests/common/mcp_process.rs @@ -0,0 +1,474 @@ +use std::path::Path; +use std::process::Stdio; +use std::sync::atomic::AtomicI64; +use std::sync::atomic::Ordering; +use tokio::io::AsyncBufReadExt; +use tokio::io::AsyncWriteExt; +use tokio::io::BufReader; +use tokio::process::Child; +use tokio::process::ChildStdin; +use tokio::process::ChildStdout; + +use anyhow::Context; +use assert_cmd::prelude::*; +use codex_app_server_protocol::AddConversationListenerParams; +use codex_app_server_protocol::ArchiveConversationParams; +use codex_app_server_protocol::CancelLoginChatGptParams; +use codex_app_server_protocol::ClientInfo; +use codex_app_server_protocol::ClientNotification; +use codex_app_server_protocol::GetAuthStatusParams; +use codex_app_server_protocol::InitializeParams; +use codex_app_server_protocol::InterruptConversationParams; +use codex_app_server_protocol::ListConversationsParams; +use codex_app_server_protocol::LoginApiKeyParams; +use codex_app_server_protocol::NewConversationParams; +use codex_app_server_protocol::RemoveConversationListenerParams; +use codex_app_server_protocol::ResumeConversationParams; +use codex_app_server_protocol::SendUserMessageParams; +use codex_app_server_protocol::SendUserTurnParams; +use codex_app_server_protocol::ServerRequest; +use codex_app_server_protocol::SetDefaultModelParams; + +use codex_app_server_protocol::JSONRPCError; +use codex_app_server_protocol::JSONRPCMessage; +use codex_app_server_protocol::JSONRPCNotification; +use codex_app_server_protocol::JSONRPCRequest; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::RequestId; +use std::process::Command as StdCommand; +use tokio::process::Command; + +pub struct McpProcess { + next_request_id: AtomicI64, + /// Retain this child process until the client is dropped. The Tokio runtime + /// will make a "best effort" to reap the process after it exits, but it is + /// not a guarantee. See the `kill_on_drop` documentation for details. + #[allow(dead_code)] + process: Child, + stdin: ChildStdin, + stdout: BufReader, +} + +impl McpProcess { + pub async fn new(codex_home: &Path) -> anyhow::Result { + Self::new_with_env(codex_home, &[]).await + } + + /// Creates a new MCP process, allowing tests to override or remove + /// specific environment variables for the child process only. + /// + /// Pass a tuple of (key, Some(value)) to set/override, or (key, None) to + /// remove a variable from the child's environment. + pub async fn new_with_env( + codex_home: &Path, + env_overrides: &[(&str, Option<&str>)], + ) -> anyhow::Result { + // Use assert_cmd to locate the binary path and then switch to tokio::process::Command + let std_cmd = StdCommand::cargo_bin("codex-app-server") + .context("should find binary for codex-mcp-server")?; + + let program = std_cmd.get_program().to_owned(); + + let mut cmd = Command::new(program); + + cmd.stdin(Stdio::piped()); + cmd.stdout(Stdio::piped()); + cmd.stderr(Stdio::piped()); + cmd.env("CODEX_HOME", codex_home); + cmd.env("RUST_LOG", "debug"); + + for (k, v) in env_overrides { + match v { + Some(val) => { + cmd.env(k, val); + } + None => { + cmd.env_remove(k); + } + } + } + + let mut process = cmd + .kill_on_drop(true) + .spawn() + .context("codex-mcp-server proc should start")?; + let stdin = process + .stdin + .take() + .ok_or_else(|| anyhow::format_err!("mcp should have stdin fd"))?; + let stdout = process + .stdout + .take() + .ok_or_else(|| anyhow::format_err!("mcp should have stdout fd"))?; + let stdout = BufReader::new(stdout); + + // Forward child's stderr to our stderr so failures are visible even + // when stdout/stderr are captured by the test harness. + if let Some(stderr) = process.stderr.take() { + let mut stderr_reader = BufReader::new(stderr).lines(); + tokio::spawn(async move { + while let Ok(Some(line)) = stderr_reader.next_line().await { + eprintln!("[mcp stderr] {line}"); + } + }); + } + Ok(Self { + next_request_id: AtomicI64::new(0), + process, + stdin, + stdout, + }) + } + + /// Performs the initialization handshake with the MCP server. + pub async fn initialize(&mut self) -> anyhow::Result<()> { + let params = Some(serde_json::to_value(InitializeParams { + client_info: ClientInfo { + name: "codex-app-server-tests".to_string(), + title: None, + version: "0.1.0".to_string(), + }, + })?); + let req_id = self.send_request("initialize", params).await?; + let initialized = self.read_jsonrpc_message().await?; + let JSONRPCMessage::Response(response) = initialized else { + unreachable!("expected JSONRPCMessage::Response for initialize, got {initialized:?}"); + }; + if response.id != RequestId::Integer(req_id) { + anyhow::bail!( + "initialize response id mismatch: expected {}, got {:?}", + req_id, + response.id + ); + } + + // Send notifications/initialized to ack the response. + self.send_notification(ClientNotification::Initialized) + .await?; + + Ok(()) + } + + /// Send a `newConversation` JSON-RPC request. + pub async fn send_new_conversation_request( + &mut self, + params: NewConversationParams, + ) -> anyhow::Result { + let params = Some(serde_json::to_value(params)?); + self.send_request("newConversation", params).await + } + + /// Send an `archiveConversation` JSON-RPC request. + pub async fn send_archive_conversation_request( + &mut self, + params: ArchiveConversationParams, + ) -> anyhow::Result { + let params = Some(serde_json::to_value(params)?); + self.send_request("archiveConversation", params).await + } + + /// Send an `addConversationListener` JSON-RPC request. + pub async fn send_add_conversation_listener_request( + &mut self, + params: AddConversationListenerParams, + ) -> anyhow::Result { + let params = Some(serde_json::to_value(params)?); + self.send_request("addConversationListener", params).await + } + + /// Send a `sendUserMessage` JSON-RPC request with a single text item. + pub async fn send_send_user_message_request( + &mut self, + params: SendUserMessageParams, + ) -> anyhow::Result { + // Wire format expects variants in camelCase; text item uses external tagging. + let params = Some(serde_json::to_value(params)?); + self.send_request("sendUserMessage", params).await + } + + /// Send a `removeConversationListener` JSON-RPC request. + pub async fn send_remove_conversation_listener_request( + &mut self, + params: RemoveConversationListenerParams, + ) -> anyhow::Result { + let params = Some(serde_json::to_value(params)?); + self.send_request("removeConversationListener", params) + .await + } + + /// Send a `sendUserTurn` JSON-RPC request. + pub async fn send_send_user_turn_request( + &mut self, + params: SendUserTurnParams, + ) -> anyhow::Result { + let params = Some(serde_json::to_value(params)?); + self.send_request("sendUserTurn", params).await + } + + /// Send a `interruptConversation` JSON-RPC request. + pub async fn send_interrupt_conversation_request( + &mut self, + params: InterruptConversationParams, + ) -> anyhow::Result { + let params = Some(serde_json::to_value(params)?); + self.send_request("interruptConversation", params).await + } + + /// Send a `getAuthStatus` JSON-RPC request. + pub async fn send_get_auth_status_request( + &mut self, + params: GetAuthStatusParams, + ) -> anyhow::Result { + let params = Some(serde_json::to_value(params)?); + self.send_request("getAuthStatus", params).await + } + + /// Send a `getUserSavedConfig` JSON-RPC request. + pub async fn send_get_user_saved_config_request(&mut self) -> anyhow::Result { + self.send_request("getUserSavedConfig", None).await + } + + /// Send a `getUserAgent` JSON-RPC request. + pub async fn send_get_user_agent_request(&mut self) -> anyhow::Result { + self.send_request("getUserAgent", None).await + } + + /// Send a `userInfo` JSON-RPC request. + pub async fn send_user_info_request(&mut self) -> anyhow::Result { + self.send_request("userInfo", None).await + } + + /// Send a `setDefaultModel` JSON-RPC request. + pub async fn send_set_default_model_request( + &mut self, + params: SetDefaultModelParams, + ) -> anyhow::Result { + let params = Some(serde_json::to_value(params)?); + self.send_request("setDefaultModel", params).await + } + + /// Send a `listConversations` JSON-RPC request. + pub async fn send_list_conversations_request( + &mut self, + params: ListConversationsParams, + ) -> anyhow::Result { + let params = Some(serde_json::to_value(params)?); + self.send_request("listConversations", params).await + } + + /// Send a `resumeConversation` JSON-RPC request. + pub async fn send_resume_conversation_request( + &mut self, + params: ResumeConversationParams, + ) -> anyhow::Result { + let params = Some(serde_json::to_value(params)?); + self.send_request("resumeConversation", params).await + } + + /// Send a `loginApiKey` JSON-RPC request. + pub async fn send_login_api_key_request( + &mut self, + params: LoginApiKeyParams, + ) -> anyhow::Result { + let params = Some(serde_json::to_value(params)?); + self.send_request("loginApiKey", params).await + } + + /// Send a `loginChatGpt` JSON-RPC request. + pub async fn send_login_chat_gpt_request(&mut self) -> anyhow::Result { + self.send_request("loginChatGpt", None).await + } + + /// Send a `cancelLoginChatGpt` JSON-RPC request. + pub async fn send_cancel_login_chat_gpt_request( + &mut self, + params: CancelLoginChatGptParams, + ) -> anyhow::Result { + let params = Some(serde_json::to_value(params)?); + self.send_request("cancelLoginChatGpt", params).await + } + + /// Send a `logoutChatGpt` JSON-RPC request. + pub async fn send_logout_chat_gpt_request(&mut self) -> anyhow::Result { + self.send_request("logoutChatGpt", None).await + } + + /// Send a `fuzzyFileSearch` JSON-RPC request. + pub async fn send_fuzzy_file_search_request( + &mut self, + query: &str, + roots: Vec, + cancellation_token: Option, + ) -> anyhow::Result { + let mut params = serde_json::json!({ + "query": query, + "roots": roots, + }); + if let Some(token) = cancellation_token { + params["cancellationToken"] = serde_json::json!(token); + } + self.send_request("fuzzyFileSearch", Some(params)).await + } + + async fn send_request( + &mut self, + method: &str, + params: Option, + ) -> anyhow::Result { + let request_id = self.next_request_id.fetch_add(1, Ordering::Relaxed); + + let message = JSONRPCMessage::Request(JSONRPCRequest { + id: RequestId::Integer(request_id), + method: method.to_string(), + params, + }); + self.send_jsonrpc_message(message).await?; + Ok(request_id) + } + + pub async fn send_response( + &mut self, + id: RequestId, + result: serde_json::Value, + ) -> anyhow::Result<()> { + self.send_jsonrpc_message(JSONRPCMessage::Response(JSONRPCResponse { id, result })) + .await + } + + pub async fn send_notification( + &mut self, + notification: ClientNotification, + ) -> anyhow::Result<()> { + let value = serde_json::to_value(notification)?; + self.send_jsonrpc_message(JSONRPCMessage::Notification(JSONRPCNotification { + method: value + .get("method") + .and_then(|m| m.as_str()) + .ok_or_else(|| anyhow::format_err!("notification missing method field"))? + .to_string(), + params: value.get("params").cloned(), + })) + .await + } + + async fn send_jsonrpc_message(&mut self, message: JSONRPCMessage) -> anyhow::Result<()> { + eprintln!("writing message to stdin: {message:?}"); + let payload = serde_json::to_string(&message)?; + self.stdin.write_all(payload.as_bytes()).await?; + self.stdin.write_all(b"\n").await?; + self.stdin.flush().await?; + Ok(()) + } + + async fn read_jsonrpc_message(&mut self) -> anyhow::Result { + let mut line = String::new(); + self.stdout.read_line(&mut line).await?; + let message = serde_json::from_str::(&line)?; + eprintln!("read message from stdout: {message:?}"); + Ok(message) + } + + pub async fn read_stream_until_request_message(&mut self) -> anyhow::Result { + eprintln!("in read_stream_until_request_message()"); + + loop { + let message = self.read_jsonrpc_message().await?; + + match message { + JSONRPCMessage::Notification(_) => { + eprintln!("notification: {message:?}"); + } + JSONRPCMessage::Request(jsonrpc_request) => { + return jsonrpc_request.try_into().with_context( + || "failed to deserialize ServerRequest from JSONRPCRequest", + ); + } + JSONRPCMessage::Error(_) => { + anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}"); + } + JSONRPCMessage::Response(_) => { + anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}"); + } + } + } + } + + pub async fn read_stream_until_response_message( + &mut self, + request_id: RequestId, + ) -> anyhow::Result { + eprintln!("in read_stream_until_response_message({request_id:?})"); + + loop { + let message = self.read_jsonrpc_message().await?; + match message { + JSONRPCMessage::Notification(_) => { + eprintln!("notification: {message:?}"); + } + JSONRPCMessage::Request(_) => { + anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}"); + } + JSONRPCMessage::Error(_) => { + anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}"); + } + JSONRPCMessage::Response(jsonrpc_response) => { + if jsonrpc_response.id == request_id { + return Ok(jsonrpc_response); + } + } + } + } + } + + pub async fn read_stream_until_error_message( + &mut self, + request_id: RequestId, + ) -> anyhow::Result { + loop { + let message = self.read_jsonrpc_message().await?; + match message { + JSONRPCMessage::Notification(_) => { + eprintln!("notification: {message:?}"); + } + JSONRPCMessage::Request(_) => { + anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}"); + } + JSONRPCMessage::Response(_) => { + // Keep scanning; we're waiting for an error with matching id. + } + JSONRPCMessage::Error(err) => { + if err.id == request_id { + return Ok(err); + } + } + } + } + } + + pub async fn read_stream_until_notification_message( + &mut self, + method: &str, + ) -> anyhow::Result { + eprintln!("in read_stream_until_notification_message({method})"); + + loop { + let message = self.read_jsonrpc_message().await?; + match message { + JSONRPCMessage::Notification(notification) => { + if notification.method == method { + return Ok(notification); + } + } + JSONRPCMessage::Request(_) => { + anyhow::bail!("unexpected JSONRPCMessage::Request: {message:?}"); + } + JSONRPCMessage::Error(_) => { + anyhow::bail!("unexpected JSONRPCMessage::Error: {message:?}"); + } + JSONRPCMessage::Response(_) => { + anyhow::bail!("unexpected JSONRPCMessage::Response: {message:?}"); + } + } + } + } +} diff --git a/codex-rs/app-server/tests/common/mock_model_server.rs b/codex-rs/app-server/tests/common/mock_model_server.rs new file mode 100644 index 00000000000..be7f3eb5b37 --- /dev/null +++ b/codex-rs/app-server/tests/common/mock_model_server.rs @@ -0,0 +1,47 @@ +use std::sync::atomic::AtomicUsize; +use std::sync::atomic::Ordering; + +use wiremock::Mock; +use wiremock::MockServer; +use wiremock::Respond; +use wiremock::ResponseTemplate; +use wiremock::matchers::method; +use wiremock::matchers::path; + +/// Create a mock server that will provide the responses, in order, for +/// requests to the `/v1/chat/completions` endpoint. +pub async fn create_mock_chat_completions_server(responses: Vec) -> MockServer { + let server = MockServer::start().await; + + let num_calls = responses.len(); + let seq_responder = SeqResponder { + num_calls: AtomicUsize::new(0), + responses, + }; + + Mock::given(method("POST")) + .and(path("/v1/chat/completions")) + .respond_with(seq_responder) + .expect(num_calls as u64) + .mount(&server) + .await; + + server +} + +struct SeqResponder { + num_calls: AtomicUsize, + responses: Vec, +} + +impl Respond for SeqResponder { + fn respond(&self, _: &wiremock::Request) -> ResponseTemplate { + let call_num = self.num_calls.fetch_add(1, Ordering::SeqCst); + match self.responses.get(call_num) { + Some(response) => ResponseTemplate::new(200) + .insert_header("content-type", "text/event-stream") + .set_body_raw(response.clone(), "text/event-stream"), + None => panic!("no response for {call_num}"), + } + } +} diff --git a/codex-rs/app-server/tests/common/responses.rs b/codex-rs/app-server/tests/common/responses.rs new file mode 100644 index 00000000000..9a827fb986d --- /dev/null +++ b/codex-rs/app-server/tests/common/responses.rs @@ -0,0 +1,95 @@ +use serde_json::json; +use std::path::Path; + +pub fn create_shell_sse_response( + command: Vec, + workdir: Option<&Path>, + timeout_ms: Option, + call_id: &str, +) -> anyhow::Result { + // The `arguments`` for the `shell` tool is a serialized JSON object. + let tool_call_arguments = serde_json::to_string(&json!({ + "command": command, + "workdir": workdir.map(|w| w.to_string_lossy()), + "timeout": timeout_ms + }))?; + let tool_call = json!({ + "choices": [ + { + "delta": { + "tool_calls": [ + { + "id": call_id, + "function": { + "name": "shell", + "arguments": tool_call_arguments + } + } + ] + }, + "finish_reason": "tool_calls" + } + ] + }); + + let sse = format!( + "data: {}\n\ndata: DONE\n\n", + serde_json::to_string(&tool_call)? + ); + Ok(sse) +} + +pub fn create_final_assistant_message_sse_response(message: &str) -> anyhow::Result { + let assistant_message = json!({ + "choices": [ + { + "delta": { + "content": message + }, + "finish_reason": "stop" + } + ] + }); + + let sse = format!( + "data: {}\n\ndata: DONE\n\n", + serde_json::to_string(&assistant_message)? + ); + Ok(sse) +} + +pub fn create_apply_patch_sse_response( + patch_content: &str, + call_id: &str, +) -> anyhow::Result { + // Use shell command to call apply_patch with heredoc format + let shell_command = format!("apply_patch <<'EOF'\n{patch_content}\nEOF"); + let tool_call_arguments = serde_json::to_string(&json!({ + "command": ["bash", "-lc", shell_command] + }))?; + + let tool_call = json!({ + "choices": [ + { + "delta": { + "tool_calls": [ + { + "id": call_id, + "function": { + "name": "shell", + "arguments": tool_call_arguments + } + } + ] + }, + "finish_reason": "tool_calls" + } + ] + }); + + let sse = format!( + "data: {}\n\ndata: DONE\n\n", + serde_json::to_string(&tool_call)? + ); + Ok(sse) +} diff --git a/codex-rs/mcp-server/tests/suite/archive_conversation.rs b/codex-rs/app-server/tests/suite/archive_conversation.rs similarity index 89% rename from codex-rs/mcp-server/tests/suite/archive_conversation.rs rename to codex-rs/app-server/tests/suite/archive_conversation.rs index e54a99896c8..6dcfefdbb8d 100644 --- a/codex-rs/mcp-server/tests/suite/archive_conversation.rs +++ b/codex-rs/app-server/tests/suite/archive_conversation.rs @@ -1,14 +1,14 @@ use std::path::Path; +use app_test_support::McpProcess; +use app_test_support::to_response; +use codex_app_server_protocol::ArchiveConversationParams; +use codex_app_server_protocol::ArchiveConversationResponse; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::NewConversationParams; +use codex_app_server_protocol::NewConversationResponse; +use codex_app_server_protocol::RequestId; use codex_core::ARCHIVED_SESSIONS_SUBDIR; -use codex_protocol::mcp_protocol::ArchiveConversationParams; -use codex_protocol::mcp_protocol::ArchiveConversationResponse; -use codex_protocol::mcp_protocol::NewConversationParams; -use codex_protocol::mcp_protocol::NewConversationResponse; -use mcp_test_support::McpProcess; -use mcp_test_support::to_response; -use mcp_types::JSONRPCResponse; -use mcp_types::RequestId; use tempfile::TempDir; use tokio::time::timeout; diff --git a/codex-rs/mcp-server/tests/suite/auth.rs b/codex-rs/app-server/tests/suite/auth.rs similarity index 94% rename from codex-rs/mcp-server/tests/suite/auth.rs rename to codex-rs/app-server/tests/suite/auth.rs index 6681fd75469..f45a27fda0a 100644 --- a/codex-rs/mcp-server/tests/suite/auth.rs +++ b/codex-rs/app-server/tests/suite/auth.rs @@ -1,14 +1,14 @@ use std::path::Path; -use codex_protocol::mcp_protocol::AuthMode; -use codex_protocol::mcp_protocol::GetAuthStatusParams; -use codex_protocol::mcp_protocol::GetAuthStatusResponse; -use codex_protocol::mcp_protocol::LoginApiKeyParams; -use codex_protocol::mcp_protocol::LoginApiKeyResponse; -use mcp_test_support::McpProcess; -use mcp_test_support::to_response; -use mcp_types::JSONRPCResponse; -use mcp_types::RequestId; +use app_test_support::McpProcess; +use app_test_support::to_response; +use codex_app_server_protocol::AuthMode; +use codex_app_server_protocol::GetAuthStatusParams; +use codex_app_server_protocol::GetAuthStatusResponse; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::LoginApiKeyParams; +use codex_app_server_protocol::LoginApiKeyResponse; +use codex_app_server_protocol::RequestId; use pretty_assertions::assert_eq; use tempfile::TempDir; use tokio::time::timeout; diff --git a/codex-rs/mcp-server/tests/suite/codex_message_processor_flow.rs b/codex-rs/app-server/tests/suite/codex_message_processor_flow.rs similarity index 87% rename from codex-rs/mcp-server/tests/suite/codex_message_processor_flow.rs rename to codex-rs/app-server/tests/suite/codex_message_processor_flow.rs index 50a480b4a09..f1f34f952f3 100644 --- a/codex-rs/mcp-server/tests/suite/codex_message_processor_flow.rs +++ b/codex-rs/app-server/tests/suite/codex_message_processor_flow.rs @@ -1,29 +1,30 @@ use std::path::Path; +use app_test_support::McpProcess; +use app_test_support::create_final_assistant_message_sse_response; +use app_test_support::create_mock_chat_completions_server; +use app_test_support::create_shell_sse_response; +use app_test_support::to_response; +use codex_app_server_protocol::AddConversationListenerParams; +use codex_app_server_protocol::AddConversationSubscriptionResponse; +use codex_app_server_protocol::ExecCommandApprovalParams; +use codex_app_server_protocol::JSONRPCNotification; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::NewConversationParams; +use codex_app_server_protocol::NewConversationResponse; +use codex_app_server_protocol::RemoveConversationListenerParams; +use codex_app_server_protocol::RemoveConversationSubscriptionResponse; +use codex_app_server_protocol::RequestId; +use codex_app_server_protocol::SendUserMessageParams; +use codex_app_server_protocol::SendUserMessageResponse; +use codex_app_server_protocol::SendUserTurnParams; +use codex_app_server_protocol::SendUserTurnResponse; +use codex_app_server_protocol::ServerRequest; use codex_core::protocol::AskForApproval; use codex_core::protocol::SandboxPolicy; use codex_core::protocol_config_types::ReasoningEffort; use codex_core::protocol_config_types::ReasoningSummary; use codex_core::spawn::CODEX_SANDBOX_NETWORK_DISABLED_ENV_VAR; -use codex_protocol::mcp_protocol::AddConversationListenerParams; -use codex_protocol::mcp_protocol::AddConversationSubscriptionResponse; -use codex_protocol::mcp_protocol::EXEC_COMMAND_APPROVAL_METHOD; -use codex_protocol::mcp_protocol::NewConversationParams; -use codex_protocol::mcp_protocol::NewConversationResponse; -use codex_protocol::mcp_protocol::RemoveConversationListenerParams; -use codex_protocol::mcp_protocol::RemoveConversationSubscriptionResponse; -use codex_protocol::mcp_protocol::SendUserMessageParams; -use codex_protocol::mcp_protocol::SendUserMessageResponse; -use codex_protocol::mcp_protocol::SendUserTurnParams; -use codex_protocol::mcp_protocol::SendUserTurnResponse; -use mcp_test_support::McpProcess; -use mcp_test_support::create_final_assistant_message_sse_response; -use mcp_test_support::create_mock_chat_completions_server; -use mcp_test_support::create_shell_sse_response; -use mcp_test_support::to_response; -use mcp_types::JSONRPCNotification; -use mcp_types::JSONRPCResponse; -use mcp_types::RequestId; use pretty_assertions::assert_eq; use std::env; use tempfile::TempDir; @@ -115,7 +116,7 @@ async fn test_codex_jsonrpc_conversation_flow() { let send_user_id = mcp .send_send_user_message_request(SendUserMessageParams { conversation_id, - items: vec![codex_protocol::mcp_protocol::InputItem::Text { + items: vec![codex_app_server_protocol::InputItem::Text { text: "text".to_string(), }], }) @@ -265,7 +266,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() { let send_user_id = mcp .send_send_user_message_request(SendUserMessageParams { conversation_id, - items: vec![codex_protocol::mcp_protocol::InputItem::Text { + items: vec![codex_app_server_protocol::InputItem::Text { text: "run python".to_string(), }], }) @@ -290,11 +291,28 @@ async fn test_send_user_turn_changes_approval_policy_behavior() { .await .expect("waiting for exec approval request timeout") .expect("exec approval request"); - assert_eq!(request.method, EXEC_COMMAND_APPROVAL_METHOD); + let ServerRequest::ExecCommandApproval { request_id, params } = request else { + panic!("expected ExecCommandApproval request, got: {request:?}"); + }; + + assert_eq!( + ExecCommandApprovalParams { + conversation_id, + call_id: "call1".to_string(), + command: vec![ + "python3".to_string(), + "-c".to_string(), + "print(42)".to_string(), + ], + cwd: working_directory.clone(), + reason: None, + }, + params + ); // Approve so the first turn can complete mcp.send_response( - request.id, + request_id, serde_json::json!({ "decision": codex_core::protocol::ReviewDecision::Approved }), ) .await @@ -313,7 +331,7 @@ async fn test_send_user_turn_changes_approval_policy_behavior() { let send_turn_id = mcp .send_send_user_turn_request(SendUserTurnParams { conversation_id, - items: vec![codex_protocol::mcp_protocol::InputItem::Text { + items: vec![codex_app_server_protocol::InputItem::Text { text: "run python again".to_string(), }], cwd: working_directory.clone(), diff --git a/codex-rs/mcp-server/tests/suite/config.rs b/codex-rs/app-server/tests/suite/config.rs similarity index 92% rename from codex-rs/mcp-server/tests/suite/config.rs rename to codex-rs/app-server/tests/suite/config.rs index da64648c498..577eeb388d1 100644 --- a/codex-rs/mcp-server/tests/suite/config.rs +++ b/codex-rs/app-server/tests/suite/config.rs @@ -1,20 +1,20 @@ use std::collections::HashMap; use std::path::Path; +use app_test_support::McpProcess; +use app_test_support::to_response; +use codex_app_server_protocol::GetUserSavedConfigResponse; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::Profile; +use codex_app_server_protocol::RequestId; +use codex_app_server_protocol::SandboxSettings; +use codex_app_server_protocol::Tools; +use codex_app_server_protocol::UserSavedConfig; use codex_core::protocol::AskForApproval; use codex_protocol::config_types::ReasoningEffort; use codex_protocol::config_types::ReasoningSummary; use codex_protocol::config_types::SandboxMode; use codex_protocol::config_types::Verbosity; -use codex_protocol::mcp_protocol::GetUserSavedConfigResponse; -use codex_protocol::mcp_protocol::Profile; -use codex_protocol::mcp_protocol::SandboxSettings; -use codex_protocol::mcp_protocol::Tools; -use codex_protocol::mcp_protocol::UserSavedConfig; -use mcp_test_support::McpProcess; -use mcp_test_support::to_response; -use mcp_types::JSONRPCResponse; -use mcp_types::RequestId; use pretty_assertions::assert_eq; use tempfile::TempDir; use tokio::time::timeout; diff --git a/codex-rs/mcp-server/tests/suite/create_conversation.rs b/codex-rs/app-server/tests/suite/create_conversation.rs similarity index 88% rename from codex-rs/mcp-server/tests/suite/create_conversation.rs rename to codex-rs/app-server/tests/suite/create_conversation.rs index 1b62d01d46e..37a0db84fa5 100644 --- a/codex-rs/mcp-server/tests/suite/create_conversation.rs +++ b/codex-rs/app-server/tests/suite/create_conversation.rs @@ -1,18 +1,18 @@ use std::path::Path; -use codex_protocol::mcp_protocol::AddConversationListenerParams; -use codex_protocol::mcp_protocol::AddConversationSubscriptionResponse; -use codex_protocol::mcp_protocol::InputItem; -use codex_protocol::mcp_protocol::NewConversationParams; -use codex_protocol::mcp_protocol::NewConversationResponse; -use codex_protocol::mcp_protocol::SendUserMessageParams; -use codex_protocol::mcp_protocol::SendUserMessageResponse; -use mcp_test_support::McpProcess; -use mcp_test_support::create_final_assistant_message_sse_response; -use mcp_test_support::create_mock_chat_completions_server; -use mcp_test_support::to_response; -use mcp_types::JSONRPCResponse; -use mcp_types::RequestId; +use app_test_support::McpProcess; +use app_test_support::create_final_assistant_message_sse_response; +use app_test_support::create_mock_chat_completions_server; +use app_test_support::to_response; +use codex_app_server_protocol::AddConversationListenerParams; +use codex_app_server_protocol::AddConversationSubscriptionResponse; +use codex_app_server_protocol::InputItem; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::NewConversationParams; +use codex_app_server_protocol::NewConversationResponse; +use codex_app_server_protocol::RequestId; +use codex_app_server_protocol::SendUserMessageParams; +use codex_app_server_protocol::SendUserMessageResponse; use pretty_assertions::assert_eq; use serde_json::json; use tempfile::TempDir; diff --git a/codex-rs/app-server/tests/suite/fuzzy_file_search.rs b/codex-rs/app-server/tests/suite/fuzzy_file_search.rs new file mode 100644 index 00000000000..a2bc974ad08 --- /dev/null +++ b/codex-rs/app-server/tests/suite/fuzzy_file_search.rs @@ -0,0 +1,146 @@ +use anyhow::Context; +use anyhow::Result; +use app_test_support::McpProcess; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::RequestId; +use pretty_assertions::assert_eq; +use serde_json::json; +use tempfile::TempDir; +use tokio::time::timeout; + +const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10); + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_fuzzy_file_search_sorts_and_includes_indices() -> Result<()> { + // Prepare a temporary Codex home and a separate root with test files. + let codex_home = TempDir::new().context("create temp codex home")?; + let root = TempDir::new().context("create temp search root")?; + + // Create files designed to have deterministic ordering for query "abe". + std::fs::write(root.path().join("abc"), "x").context("write file abc")?; + std::fs::write(root.path().join("abcde"), "x").context("write file abcde")?; + std::fs::write(root.path().join("abexy"), "x").context("write file abexy")?; + std::fs::write(root.path().join("zzz.txt"), "x").context("write file zzz")?; + let sub_dir = root.path().join("sub"); + std::fs::create_dir_all(&sub_dir).context("create sub dir")?; + let sub_abce_path = sub_dir.join("abce"); + std::fs::write(&sub_abce_path, "x").context("write file sub/abce")?; + let sub_abce_rel = sub_abce_path + .strip_prefix(root.path()) + .context("strip root prefix from sub/abce")? + .to_string_lossy() + .to_string(); + + // Start MCP server and initialize. + let mut mcp = McpProcess::new(codex_home.path()) + .await + .context("spawn mcp")?; + timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()) + .await + .context("init timeout")? + .context("init failed")?; + + let root_path = root.path().to_string_lossy().to_string(); + // Send fuzzyFileSearch request. + let request_id = mcp + .send_fuzzy_file_search_request("abe", vec![root_path.clone()], None) + .await + .context("send fuzzyFileSearch")?; + + // Read response and verify shape and ordering. + let resp: JSONRPCResponse = timeout( + DEFAULT_READ_TIMEOUT, + mcp.read_stream_until_response_message(RequestId::Integer(request_id)), + ) + .await + .context("fuzzyFileSearch timeout")? + .context("fuzzyFileSearch resp")?; + + let value = resp.result; + // The path separator on Windows affects the score. + let expected_score = if cfg!(windows) { 69 } else { 72 }; + + assert_eq!( + value, + json!({ + "files": [ + { + "root": root_path.clone(), + "path": "abexy", + "file_name": "abexy", + "score": 88, + "indices": [0, 1, 2], + }, + { + "root": root_path.clone(), + "path": "abcde", + "file_name": "abcde", + "score": 74, + "indices": [0, 1, 4], + }, + { + "root": root_path.clone(), + "path": sub_abce_rel, + "file_name": "abce", + "score": expected_score, + "indices": [4, 5, 7], + }, + ] + }) + ); + + Ok(()) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_fuzzy_file_search_accepts_cancellation_token() -> Result<()> { + let codex_home = TempDir::new().context("create temp codex home")?; + let root = TempDir::new().context("create temp search root")?; + + std::fs::write(root.path().join("alpha.txt"), "contents").context("write alpha")?; + + let mut mcp = McpProcess::new(codex_home.path()) + .await + .context("spawn mcp")?; + timeout(DEFAULT_READ_TIMEOUT, mcp.initialize()) + .await + .context("init timeout")? + .context("init failed")?; + + let root_path = root.path().to_string_lossy().to_string(); + let request_id = mcp + .send_fuzzy_file_search_request("alp", vec![root_path.clone()], None) + .await + .context("send fuzzyFileSearch")?; + + let request_id_2 = mcp + .send_fuzzy_file_search_request( + "alp", + vec![root_path.clone()], + Some(request_id.to_string()), + ) + .await + .context("send fuzzyFileSearch")?; + + let resp: JSONRPCResponse = timeout( + DEFAULT_READ_TIMEOUT, + mcp.read_stream_until_response_message(RequestId::Integer(request_id_2)), + ) + .await + .context("fuzzyFileSearch timeout")? + .context("fuzzyFileSearch resp")?; + + let files = resp + .result + .get("files") + .context("files key missing")? + .as_array() + .context("files not array")? + .clone(); + + assert_eq!(files.len(), 1); + assert_eq!(files[0]["root"], root_path); + assert_eq!(files[0]["path"], "alpha.txt"); + + Ok(()) +} diff --git a/codex-rs/mcp-server/tests/suite/interrupt.rs b/codex-rs/app-server/tests/suite/interrupt.rs similarity index 85% rename from codex-rs/mcp-server/tests/suite/interrupt.rs rename to codex-rs/app-server/tests/suite/interrupt.rs index e4daeae0de7..2500d20ff48 100644 --- a/codex-rs/mcp-server/tests/suite/interrupt.rs +++ b/codex-rs/app-server/tests/suite/interrupt.rs @@ -1,32 +1,32 @@ #![cfg(unix)] -// Support code lives in the `mcp_test_support` crate under tests/common. +// Support code lives in the `app_test_support` crate under tests/common. use std::path::Path; +use codex_app_server_protocol::AddConversationListenerParams; +use codex_app_server_protocol::InterruptConversationParams; +use codex_app_server_protocol::InterruptConversationResponse; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::NewConversationParams; +use codex_app_server_protocol::NewConversationResponse; +use codex_app_server_protocol::RequestId; +use codex_app_server_protocol::SendUserMessageParams; +use codex_app_server_protocol::SendUserMessageResponse; use codex_core::protocol::TurnAbortReason; -use codex_protocol::mcp_protocol::AddConversationListenerParams; -use codex_protocol::mcp_protocol::InterruptConversationParams; -use codex_protocol::mcp_protocol::InterruptConversationResponse; -use codex_protocol::mcp_protocol::NewConversationParams; -use codex_protocol::mcp_protocol::NewConversationResponse; -use codex_protocol::mcp_protocol::SendUserMessageParams; -use codex_protocol::mcp_protocol::SendUserMessageResponse; -use core_test_support::non_sandbox_test; -use mcp_types::JSONRPCResponse; -use mcp_types::RequestId; +use core_test_support::skip_if_no_network; use tempfile::TempDir; use tokio::time::timeout; -use mcp_test_support::McpProcess; -use mcp_test_support::create_mock_chat_completions_server; -use mcp_test_support::create_shell_sse_response; -use mcp_test_support::to_response; +use app_test_support::McpProcess; +use app_test_support::create_mock_chat_completions_server; +use app_test_support::create_shell_sse_response; +use app_test_support::to_response; const DEFAULT_READ_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10); #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn test_shell_command_interruption() { - non_sandbox_test!(); + skip_if_no_network!(); if let Err(err) = shell_command_interruption().await { panic!("failure: {err}"); @@ -100,7 +100,7 @@ async fn shell_command_interruption() -> anyhow::Result<()> { let send_user_id = mcp .send_send_user_message_request(SendUserMessageParams { conversation_id, - items: vec![codex_protocol::mcp_protocol::InputItem::Text { + items: vec![codex_app_server_protocol::InputItem::Text { text: "run first sleep command".to_string(), }], }) diff --git a/codex-rs/mcp-server/tests/suite/list_resume.rs b/codex-rs/app-server/tests/suite/list_resume.rs similarity index 81% rename from codex-rs/mcp-server/tests/suite/list_resume.rs rename to codex-rs/app-server/tests/suite/list_resume.rs index 9302b429906..a4178b08738 100644 --- a/codex-rs/mcp-server/tests/suite/list_resume.rs +++ b/codex-rs/app-server/tests/suite/list_resume.rs @@ -1,16 +1,18 @@ use std::fs; use std::path::Path; -use codex_protocol::mcp_protocol::ListConversationsParams; -use codex_protocol::mcp_protocol::ListConversationsResponse; -use codex_protocol::mcp_protocol::NewConversationParams; // reused for overrides shape -use codex_protocol::mcp_protocol::ResumeConversationParams; -use codex_protocol::mcp_protocol::ResumeConversationResponse; -use mcp_test_support::McpProcess; -use mcp_test_support::to_response; -use mcp_types::JSONRPCNotification; -use mcp_types::JSONRPCResponse; -use mcp_types::RequestId; +use app_test_support::McpProcess; +use app_test_support::to_response; +use codex_app_server_protocol::JSONRPCNotification; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::ListConversationsParams; +use codex_app_server_protocol::ListConversationsResponse; +use codex_app_server_protocol::NewConversationParams; // reused for overrides shape +use codex_app_server_protocol::RequestId; +use codex_app_server_protocol::ResumeConversationParams; +use codex_app_server_protocol::ResumeConversationResponse; +use codex_app_server_protocol::ServerNotification; +use codex_app_server_protocol::SessionConfiguredNotification; use pretty_assertions::assert_eq; use serde_json::json; use tempfile::TempDir; @@ -111,23 +113,28 @@ async fn test_list_and_resume_conversations() { .await .expect("send resumeConversation"); - // Expect a codex/event notification with msg.type == session_configured + // Expect a codex/event notification with msg.type == sessionConfigured let notification: JSONRPCNotification = timeout( DEFAULT_READ_TIMEOUT, - mcp.read_stream_until_notification_message("codex/event"), + mcp.read_stream_until_notification_message("sessionConfigured"), ) .await - .expect("session_configured notification timeout") - .expect("session_configured notification"); - // Basic shape assertion: ensure event type is session_configured - let msg_type = notification - .params - .as_ref() - .and_then(|p| p.get("msg")) - .and_then(|m| m.get("type")) - .and_then(|t| t.as_str()) - .unwrap_or(""); - assert_eq!(msg_type, "session_configured"); + .expect("sessionConfigured notification timeout") + .expect("sessionConfigured notification"); + let session_configured: ServerNotification = notification + .try_into() + .expect("deserialize sessionConfigured notification"); + // Basic shape assertion: ensure event type is sessionConfigured + let ServerNotification::SessionConfigured(SessionConfiguredNotification { + model, + rollout_path, + .. + }) = session_configured + else { + unreachable!("expected sessionConfigured notification"); + }; + assert_eq!(model, "o3"); + assert_eq!(items[0].path.clone(), rollout_path); // Then the response for resumeConversation let resume_resp: JSONRPCResponse = timeout( diff --git a/codex-rs/mcp-server/tests/suite/login.rs b/codex-rs/app-server/tests/suite/login.rs similarity index 90% rename from codex-rs/mcp-server/tests/suite/login.rs rename to codex-rs/app-server/tests/suite/login.rs index 071154c643c..6dcbde11254 100644 --- a/codex-rs/mcp-server/tests/suite/login.rs +++ b/codex-rs/app-server/tests/suite/login.rs @@ -1,17 +1,17 @@ use std::path::Path; use std::time::Duration; +use app_test_support::McpProcess; +use app_test_support::to_response; +use codex_app_server_protocol::CancelLoginChatGptParams; +use codex_app_server_protocol::CancelLoginChatGptResponse; +use codex_app_server_protocol::GetAuthStatusParams; +use codex_app_server_protocol::GetAuthStatusResponse; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::LoginChatGptResponse; +use codex_app_server_protocol::LogoutChatGptResponse; +use codex_app_server_protocol::RequestId; use codex_login::login_with_api_key; -use codex_protocol::mcp_protocol::CancelLoginChatGptParams; -use codex_protocol::mcp_protocol::CancelLoginChatGptResponse; -use codex_protocol::mcp_protocol::GetAuthStatusParams; -use codex_protocol::mcp_protocol::GetAuthStatusResponse; -use codex_protocol::mcp_protocol::LoginChatGptResponse; -use codex_protocol::mcp_protocol::LogoutChatGptResponse; -use mcp_test_support::McpProcess; -use mcp_test_support::to_response; -use mcp_types::JSONRPCResponse; -use mcp_types::RequestId; use tempfile::TempDir; use tokio::time::timeout; diff --git a/codex-rs/app-server/tests/suite/mod.rs b/codex-rs/app-server/tests/suite/mod.rs new file mode 100644 index 00000000000..78ce310e749 --- /dev/null +++ b/codex-rs/app-server/tests/suite/mod.rs @@ -0,0 +1,13 @@ +mod archive_conversation; +mod auth; +mod codex_message_processor_flow; +mod config; +mod create_conversation; +mod fuzzy_file_search; +mod interrupt; +mod list_resume; +mod login; +mod send_message; +mod set_default_model; +mod user_agent; +mod user_info; diff --git a/codex-rs/mcp-server/tests/suite/send_message.rs b/codex-rs/app-server/tests/suite/send_message.rs similarity index 88% rename from codex-rs/mcp-server/tests/suite/send_message.rs rename to codex-rs/app-server/tests/suite/send_message.rs index 158cb12d1c8..22fb02dca97 100644 --- a/codex-rs/mcp-server/tests/suite/send_message.rs +++ b/codex-rs/app-server/tests/suite/send_message.rs @@ -1,20 +1,20 @@ use std::path::Path; -use codex_protocol::mcp_protocol::AddConversationListenerParams; -use codex_protocol::mcp_protocol::AddConversationSubscriptionResponse; -use codex_protocol::mcp_protocol::ConversationId; -use codex_protocol::mcp_protocol::InputItem; -use codex_protocol::mcp_protocol::NewConversationParams; -use codex_protocol::mcp_protocol::NewConversationResponse; -use codex_protocol::mcp_protocol::SendUserMessageParams; -use codex_protocol::mcp_protocol::SendUserMessageResponse; -use mcp_test_support::McpProcess; -use mcp_test_support::create_final_assistant_message_sse_response; -use mcp_test_support::create_mock_chat_completions_server; -use mcp_test_support::to_response; -use mcp_types::JSONRPCNotification; -use mcp_types::JSONRPCResponse; -use mcp_types::RequestId; +use app_test_support::McpProcess; +use app_test_support::create_final_assistant_message_sse_response; +use app_test_support::create_mock_chat_completions_server; +use app_test_support::to_response; +use codex_app_server_protocol::AddConversationListenerParams; +use codex_app_server_protocol::AddConversationSubscriptionResponse; +use codex_app_server_protocol::InputItem; +use codex_app_server_protocol::JSONRPCNotification; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::NewConversationParams; +use codex_app_server_protocol::NewConversationResponse; +use codex_app_server_protocol::RequestId; +use codex_app_server_protocol::SendUserMessageParams; +use codex_app_server_protocol::SendUserMessageResponse; +use codex_protocol::ConversationId; use pretty_assertions::assert_eq; use tempfile::TempDir; use tokio::time::timeout; diff --git a/codex-rs/mcp-server/tests/suite/set_default_model.rs b/codex-rs/app-server/tests/suite/set_default_model.rs similarity index 88% rename from codex-rs/mcp-server/tests/suite/set_default_model.rs rename to codex-rs/app-server/tests/suite/set_default_model.rs index f7e1041fa71..6769faa9dc0 100644 --- a/codex-rs/mcp-server/tests/suite/set_default_model.rs +++ b/codex-rs/app-server/tests/suite/set_default_model.rs @@ -1,12 +1,12 @@ use std::path::Path; +use app_test_support::McpProcess; +use app_test_support::to_response; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::RequestId; +use codex_app_server_protocol::SetDefaultModelParams; +use codex_app_server_protocol::SetDefaultModelResponse; use codex_core::config::ConfigToml; -use codex_protocol::mcp_protocol::SetDefaultModelParams; -use codex_protocol::mcp_protocol::SetDefaultModelResponse; -use mcp_test_support::McpProcess; -use mcp_test_support::to_response; -use mcp_types::JSONRPCResponse; -use mcp_types::RequestId; use pretty_assertions::assert_eq; use tempfile::TempDir; use tokio::time::timeout; diff --git a/codex-rs/mcp-server/tests/suite/user_agent.rs b/codex-rs/app-server/tests/suite/user_agent.rs similarity index 83% rename from codex-rs/mcp-server/tests/suite/user_agent.rs rename to codex-rs/app-server/tests/suite/user_agent.rs index 718e1452503..95a0b1a3e0b 100644 --- a/codex-rs/mcp-server/tests/suite/user_agent.rs +++ b/codex-rs/app-server/tests/suite/user_agent.rs @@ -1,8 +1,8 @@ -use codex_protocol::mcp_protocol::GetUserAgentResponse; -use mcp_test_support::McpProcess; -use mcp_test_support::to_response; -use mcp_types::JSONRPCResponse; -use mcp_types::RequestId; +use app_test_support::McpProcess; +use app_test_support::to_response; +use codex_app_server_protocol::GetUserAgentResponse; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::RequestId; use pretty_assertions::assert_eq; use tempfile::TempDir; use tokio::time::timeout; @@ -35,7 +35,7 @@ async fn get_user_agent_returns_current_codex_user_agent() { let os_info = os_info::get(); let user_agent = format!( - "codex_cli_rs/0.0.0 ({} {}; {}) {} (elicitation test; 0.0.0)", + "codex_cli_rs/0.0.0 ({} {}; {}) {} (codex-app-server-tests; 0.1.0)", os_info.os_type(), os_info.version(), os_info.architecture().unwrap_or("unknown"), diff --git a/codex-rs/mcp-server/tests/suite/user_info.rs b/codex-rs/app-server/tests/suite/user_info.rs similarity index 92% rename from codex-rs/mcp-server/tests/suite/user_info.rs rename to codex-rs/app-server/tests/suite/user_info.rs index 7bcb2acc6be..edd041e553c 100644 --- a/codex-rs/mcp-server/tests/suite/user_info.rs +++ b/codex-rs/app-server/tests/suite/user_info.rs @@ -1,18 +1,18 @@ use std::time::Duration; use anyhow::Context; +use app_test_support::McpProcess; +use app_test_support::to_response; use base64::Engine; use base64::engine::general_purpose::URL_SAFE_NO_PAD; +use codex_app_server_protocol::JSONRPCResponse; +use codex_app_server_protocol::RequestId; +use codex_app_server_protocol::UserInfoResponse; use codex_core::auth::AuthDotJson; use codex_core::auth::get_auth_file; use codex_core::auth::write_auth_json; use codex_core::token_data::IdTokenInfo; use codex_core::token_data::TokenData; -use codex_protocol::mcp_protocol::UserInfoResponse; -use mcp_test_support::McpProcess; -use mcp_test_support::to_response; -use mcp_types::JSONRPCResponse; -use mcp_types::RequestId; use pretty_assertions::assert_eq; use serde_json::json; use tempfile::TempDir; diff --git a/codex-rs/apply-patch/Cargo.toml b/codex-rs/apply-patch/Cargo.toml index d37404c15bd..9445ae08b74 100644 --- a/codex-rs/apply-patch/Cargo.toml +++ b/codex-rs/apply-patch/Cargo.toml @@ -20,7 +20,6 @@ similar = { workspace = true } thiserror = { workspace = true } tree-sitter = { workspace = true } tree-sitter-bash = { workspace = true } -once_cell = { workspace = true } [dev-dependencies] assert_cmd = { workspace = true } diff --git a/codex-rs/apply-patch/src/lib.rs b/codex-rs/apply-patch/src/lib.rs index 189c1a07d0b..3737c6ea6df 100644 --- a/codex-rs/apply-patch/src/lib.rs +++ b/codex-rs/apply-patch/src/lib.rs @@ -6,10 +6,10 @@ use std::collections::HashMap; use std::path::Path; use std::path::PathBuf; use std::str::Utf8Error; +use std::sync::LazyLock; use anyhow::Context; use anyhow::Result; -use once_cell::sync::Lazy; pub use parser::Hunk; pub use parser::ParseError; use parser::ParseError::*; @@ -351,7 +351,7 @@ fn extract_apply_patch_from_bash( // also run an arbitrary query against the AST. This is useful for understanding // how tree-sitter parses the script and whether the query syntax is correct. Be sure // to test both positive and negative cases. - static APPLY_PATCH_QUERY: Lazy = Lazy::new(|| { + static APPLY_PATCH_QUERY: LazyLock = LazyLock::new(|| { let language = BASH.into(); #[expect(clippy::expect_used)] Query::new( diff --git a/codex-rs/backend-client/Cargo.toml b/codex-rs/backend-client/Cargo.toml new file mode 100644 index 00000000000..a8e2ee85fb1 --- /dev/null +++ b/codex-rs/backend-client/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "codex-backend-client" +version = "0.0.0" +edition = "2024" +publish = false + +[lib] +path = "src/lib.rs" + +[dependencies] +anyhow = "1" +serde = { version = "1", features = ["derive"] } +serde_json = "1" +reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] } +codex-backend-openapi-models = { path = "../codex-backend-openapi-models" } + +[dev-dependencies] +pretty_assertions = "1" diff --git a/codex-rs/backend-client/src/client.rs b/codex-rs/backend-client/src/client.rs new file mode 100644 index 00000000000..06ad00cb806 --- /dev/null +++ b/codex-rs/backend-client/src/client.rs @@ -0,0 +1,244 @@ +use crate::types::CodeTaskDetailsResponse; +use crate::types::PaginatedListTaskListItem; +use crate::types::TurnAttemptsSiblingTurnsResponse; +use anyhow::Result; +use reqwest::header::AUTHORIZATION; +use reqwest::header::CONTENT_TYPE; +use reqwest::header::HeaderMap; +use reqwest::header::HeaderName; +use reqwest::header::HeaderValue; +use reqwest::header::USER_AGENT; +use serde::de::DeserializeOwned; + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum PathStyle { + /// /api/codex/… + CodexApi, + /// /wham/… + ChatGptApi, +} + +impl PathStyle { + pub fn from_base_url(base_url: &str) -> Self { + if base_url.contains("/backend-api") { + PathStyle::ChatGptApi + } else { + PathStyle::CodexApi + } + } +} + +#[derive(Clone, Debug)] +pub struct Client { + base_url: String, + http: reqwest::Client, + bearer_token: Option, + user_agent: Option, + chatgpt_account_id: Option, + path_style: PathStyle, +} + +impl Client { + pub fn new(base_url: impl Into) -> Result { + let mut base_url = base_url.into(); + // Normalize common ChatGPT hostnames to include /backend-api so we hit the WHAM paths. + // Also trim trailing slashes for consistent URL building. + while base_url.ends_with('/') { + base_url.pop(); + } + if (base_url.starts_with("https://chatgpt.com") + || base_url.starts_with("https://chat.openai.com")) + && !base_url.contains("/backend-api") + { + base_url = format!("{base_url}/backend-api"); + } + let http = reqwest::Client::builder().build()?; + let path_style = PathStyle::from_base_url(&base_url); + Ok(Self { + base_url, + http, + bearer_token: None, + user_agent: None, + chatgpt_account_id: None, + path_style, + }) + } + + pub fn with_bearer_token(mut self, token: impl Into) -> Self { + self.bearer_token = Some(token.into()); + self + } + + pub fn with_user_agent(mut self, ua: impl Into) -> Self { + if let Ok(hv) = HeaderValue::from_str(&ua.into()) { + self.user_agent = Some(hv); + } + self + } + + pub fn with_chatgpt_account_id(mut self, account_id: impl Into) -> Self { + self.chatgpt_account_id = Some(account_id.into()); + self + } + + pub fn with_path_style(mut self, style: PathStyle) -> Self { + self.path_style = style; + self + } + + fn headers(&self) -> HeaderMap { + let mut h = HeaderMap::new(); + if let Some(ua) = &self.user_agent { + h.insert(USER_AGENT, ua.clone()); + } else { + h.insert(USER_AGENT, HeaderValue::from_static("codex-cli")); + } + if let Some(token) = &self.bearer_token { + let value = format!("Bearer {token}"); + if let Ok(hv) = HeaderValue::from_str(&value) { + h.insert(AUTHORIZATION, hv); + } + } + if let Some(acc) = &self.chatgpt_account_id + && let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id") + && let Ok(hv) = HeaderValue::from_str(acc) + { + h.insert(name, hv); + } + h + } + + async fn exec_request( + &self, + req: reqwest::RequestBuilder, + method: &str, + url: &str, + ) -> Result<(String, String)> { + let res = req.send().await?; + let status = res.status(); + let ct = res + .headers() + .get(CONTENT_TYPE) + .and_then(|v| v.to_str().ok()) + .unwrap_or("") + .to_string(); + let body = res.text().await.unwrap_or_default(); + if !status.is_success() { + anyhow::bail!("{method} {url} failed: {status}; content-type={ct}; body={body}"); + } + Ok((body, ct)) + } + + fn decode_json(&self, url: &str, ct: &str, body: &str) -> Result { + match serde_json::from_str::(body) { + Ok(v) => Ok(v), + Err(e) => { + anyhow::bail!("Decode error for {url}: {e}; content-type={ct}; body={body}"); + } + } + } + + pub async fn list_tasks( + &self, + limit: Option, + task_filter: Option<&str>, + environment_id: Option<&str>, + ) -> Result { + let url = match self.path_style { + PathStyle::CodexApi => format!("{}/api/codex/tasks/list", self.base_url), + PathStyle::ChatGptApi => format!("{}/wham/tasks/list", self.base_url), + }; + let req = self.http.get(&url).headers(self.headers()); + let req = if let Some(lim) = limit { + req.query(&[("limit", lim)]) + } else { + req + }; + let req = if let Some(tf) = task_filter { + req.query(&[("task_filter", tf)]) + } else { + req + }; + let req = if let Some(id) = environment_id { + req.query(&[("environment_id", id)]) + } else { + req + }; + let (body, ct) = self.exec_request(req, "GET", &url).await?; + self.decode_json::(&url, &ct, &body) + } + + pub async fn get_task_details(&self, task_id: &str) -> Result { + let (parsed, _body, _ct) = self.get_task_details_with_body(task_id).await?; + Ok(parsed) + } + + pub async fn get_task_details_with_body( + &self, + task_id: &str, + ) -> Result<(CodeTaskDetailsResponse, String, String)> { + let url = match self.path_style { + PathStyle::CodexApi => format!("{}/api/codex/tasks/{}", self.base_url, task_id), + PathStyle::ChatGptApi => format!("{}/wham/tasks/{}", self.base_url, task_id), + }; + let req = self.http.get(&url).headers(self.headers()); + let (body, ct) = self.exec_request(req, "GET", &url).await?; + let parsed: CodeTaskDetailsResponse = self.decode_json(&url, &ct, &body)?; + Ok((parsed, body, ct)) + } + + pub async fn list_sibling_turns( + &self, + task_id: &str, + turn_id: &str, + ) -> Result { + let url = match self.path_style { + PathStyle::CodexApi => format!( + "{}/api/codex/tasks/{}/turns/{}/sibling_turns", + self.base_url, task_id, turn_id + ), + PathStyle::ChatGptApi => format!( + "{}/wham/tasks/{}/turns/{}/sibling_turns", + self.base_url, task_id, turn_id + ), + }; + let req = self.http.get(&url).headers(self.headers()); + let (body, ct) = self.exec_request(req, "GET", &url).await?; + self.decode_json::(&url, &ct, &body) + } + + /// Create a new task (user turn) by POSTing to the appropriate backend path + /// based on `path_style`. Returns the created task id. + pub async fn create_task(&self, request_body: serde_json::Value) -> Result { + let url = match self.path_style { + PathStyle::CodexApi => format!("{}/api/codex/tasks", self.base_url), + PathStyle::ChatGptApi => format!("{}/wham/tasks", self.base_url), + }; + let req = self + .http + .post(&url) + .headers(self.headers()) + .header(CONTENT_TYPE, HeaderValue::from_static("application/json")) + .json(&request_body); + let (body, ct) = self.exec_request(req, "POST", &url).await?; + // Extract id from JSON: prefer `task.id`; fallback to top-level `id` when present. + match serde_json::from_str::(&body) { + Ok(v) => { + if let Some(id) = v + .get("task") + .and_then(|t| t.get("id")) + .and_then(|s| s.as_str()) + { + Ok(id.to_string()) + } else if let Some(id) = v.get("id").and_then(|s| s.as_str()) { + Ok(id.to_string()) + } else { + anyhow::bail!( + "POST {url} succeeded but no task id found; content-type={ct}; body={body}" + ); + } + } + Err(e) => anyhow::bail!("Decode error for {url}: {e}; content-type={ct}; body={body}"), + } + } +} diff --git a/codex-rs/backend-client/src/lib.rs b/codex-rs/backend-client/src/lib.rs new file mode 100644 index 00000000000..29fe9f3c6be --- /dev/null +++ b/codex-rs/backend-client/src/lib.rs @@ -0,0 +1,9 @@ +mod client; +pub mod types; + +pub use client::Client; +pub use types::CodeTaskDetailsResponse; +pub use types::CodeTaskDetailsResponseExt; +pub use types::PaginatedListTaskListItem; +pub use types::TaskListItem; +pub use types::TurnAttemptsSiblingTurnsResponse; diff --git a/codex-rs/backend-client/src/types.rs b/codex-rs/backend-client/src/types.rs new file mode 100644 index 00000000000..8e2dfb21267 --- /dev/null +++ b/codex-rs/backend-client/src/types.rs @@ -0,0 +1,369 @@ +pub use codex_backend_openapi_models::models::PaginatedListTaskListItem; +pub use codex_backend_openapi_models::models::TaskListItem; + +use serde::Deserialize; +use serde::de::Deserializer; +use serde_json::Value; +use std::collections::HashMap; + +/// Hand-rolled models for the Cloud Tasks task-details response. +/// The generated OpenAPI models are pretty bad. This is a half-step +/// towards hand-rolling them. +#[derive(Clone, Debug, Deserialize)] +pub struct CodeTaskDetailsResponse { + #[serde(default)] + pub current_user_turn: Option, + #[serde(default)] + pub current_assistant_turn: Option, + #[serde(default)] + pub current_diff_task_turn: Option, +} + +#[derive(Clone, Debug, Default, Deserialize)] +pub struct Turn { + #[serde(default)] + pub id: Option, + #[serde(default)] + pub attempt_placement: Option, + #[serde(default, rename = "turn_status")] + pub turn_status: Option, + #[serde(default, deserialize_with = "deserialize_vec")] + pub sibling_turn_ids: Vec, + #[serde(default, deserialize_with = "deserialize_vec")] + pub input_items: Vec, + #[serde(default, deserialize_with = "deserialize_vec")] + pub output_items: Vec, + #[serde(default)] + pub worklog: Option, + #[serde(default)] + pub error: Option, +} + +#[derive(Clone, Debug, Default, Deserialize)] +pub struct TurnItem { + #[serde(rename = "type", default)] + pub kind: String, + #[serde(default)] + pub role: Option, + #[serde(default, deserialize_with = "deserialize_vec")] + pub content: Vec, + #[serde(default)] + pub diff: Option, + #[serde(default)] + pub output_diff: Option, +} + +#[derive(Clone, Debug, Deserialize)] +#[serde(untagged)] +pub enum ContentFragment { + Structured(StructuredContent), + Text(String), +} + +#[derive(Clone, Debug, Default, Deserialize)] +pub struct StructuredContent { + #[serde(rename = "content_type", default)] + pub content_type: Option, + #[serde(default)] + pub text: Option, +} + +#[derive(Clone, Debug, Default, Deserialize)] +pub struct DiffPayload { + #[serde(default)] + pub diff: Option, +} + +#[derive(Clone, Debug, Default, Deserialize)] +pub struct Worklog { + #[serde(default, deserialize_with = "deserialize_vec")] + pub messages: Vec, +} + +#[derive(Clone, Debug, Default, Deserialize)] +pub struct WorklogMessage { + #[serde(default)] + pub author: Option, + #[serde(default)] + pub content: Option, +} + +#[derive(Clone, Debug, Default, Deserialize)] +pub struct Author { + #[serde(default)] + pub role: Option, +} + +#[derive(Clone, Debug, Default, Deserialize)] +pub struct WorklogContent { + #[serde(default)] + pub parts: Vec, +} + +#[derive(Clone, Debug, Default, Deserialize)] +pub struct TurnError { + #[serde(default)] + pub code: Option, + #[serde(default)] + pub message: Option, +} + +impl ContentFragment { + fn text(&self) -> Option<&str> { + match self { + ContentFragment::Structured(inner) => { + if inner + .content_type + .as_deref() + .map(|ct| ct.eq_ignore_ascii_case("text")) + .unwrap_or(false) + { + inner.text.as_deref().filter(|s| !s.is_empty()) + } else { + None + } + } + ContentFragment::Text(raw) => { + if raw.trim().is_empty() { + None + } else { + Some(raw.as_str()) + } + } + } + } +} + +impl TurnItem { + fn text_values(&self) -> Vec { + self.content + .iter() + .filter_map(|fragment| fragment.text().map(str::to_string)) + .collect() + } + + fn diff_text(&self) -> Option { + if self.kind == "output_diff" { + if let Some(diff) = &self.diff + && !diff.is_empty() + { + return Some(diff.clone()); + } + } else if self.kind == "pr" + && let Some(payload) = &self.output_diff + && let Some(diff) = &payload.diff + && !diff.is_empty() + { + return Some(diff.clone()); + } + None + } +} + +impl Turn { + fn unified_diff(&self) -> Option { + self.output_items.iter().find_map(TurnItem::diff_text) + } + + fn message_texts(&self) -> Vec { + let mut out: Vec = self + .output_items + .iter() + .filter(|item| item.kind == "message") + .flat_map(TurnItem::text_values) + .collect(); + + if let Some(log) = &self.worklog { + for message in &log.messages { + if message.is_assistant() { + out.extend(message.text_values()); + } + } + } + + out + } + + fn user_prompt(&self) -> Option { + let parts: Vec = self + .input_items + .iter() + .filter(|item| item.kind == "message") + .filter(|item| { + item.role + .as_deref() + .map(|r| r.eq_ignore_ascii_case("user")) + .unwrap_or(true) + }) + .flat_map(TurnItem::text_values) + .collect(); + + if parts.is_empty() { + None + } else { + Some(parts.join( + " + +", + )) + } + } + + fn error_summary(&self) -> Option { + self.error.as_ref().and_then(TurnError::summary) + } +} + +impl WorklogMessage { + fn is_assistant(&self) -> bool { + self.author + .as_ref() + .and_then(|a| a.role.as_deref()) + .map(|role| role.eq_ignore_ascii_case("assistant")) + .unwrap_or(false) + } + + fn text_values(&self) -> Vec { + self.content + .as_ref() + .map(|content| { + content + .parts + .iter() + .filter_map(|fragment| fragment.text().map(str::to_string)) + .collect() + }) + .unwrap_or_default() + } +} + +impl TurnError { + fn summary(&self) -> Option { + let code = self.code.as_deref().unwrap_or(""); + let message = self.message.as_deref().unwrap_or(""); + match (code.is_empty(), message.is_empty()) { + (true, true) => None, + (false, true) => Some(code.to_string()), + (true, false) => Some(message.to_string()), + (false, false) => Some(format!("{code}: {message}")), + } + } +} + +pub trait CodeTaskDetailsResponseExt { + /// Attempt to extract a unified diff string from the assistant or diff turn. + fn unified_diff(&self) -> Option; + /// Extract assistant text output messages (no diff) from current turns. + fn assistant_text_messages(&self) -> Vec; + /// Extract the user's prompt text from the current user turn, when present. + fn user_text_prompt(&self) -> Option; + /// Extract an assistant error message (if the turn failed and provided one). + fn assistant_error_message(&self) -> Option; +} + +impl CodeTaskDetailsResponseExt for CodeTaskDetailsResponse { + fn unified_diff(&self) -> Option { + [ + self.current_diff_task_turn.as_ref(), + self.current_assistant_turn.as_ref(), + ] + .into_iter() + .flatten() + .find_map(Turn::unified_diff) + } + + fn assistant_text_messages(&self) -> Vec { + let mut out = Vec::new(); + for turn in [ + self.current_diff_task_turn.as_ref(), + self.current_assistant_turn.as_ref(), + ] + .into_iter() + .flatten() + { + out.extend(turn.message_texts()); + } + out + } + + fn user_text_prompt(&self) -> Option { + self.current_user_turn.as_ref().and_then(Turn::user_prompt) + } + + fn assistant_error_message(&self) -> Option { + self.current_assistant_turn + .as_ref() + .and_then(Turn::error_summary) + } +} + +fn deserialize_vec<'de, D, T>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, + T: Deserialize<'de>, +{ + Option::>::deserialize(deserializer).map(|opt| opt.unwrap_or_default()) +} + +#[derive(Clone, Debug, Deserialize)] +pub struct TurnAttemptsSiblingTurnsResponse { + #[serde(default)] + pub sibling_turns: Vec>, +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + + fn fixture(name: &str) -> CodeTaskDetailsResponse { + let json = match name { + "diff" => include_str!("../tests/fixtures/task_details_with_diff.json"), + "error" => include_str!("../tests/fixtures/task_details_with_error.json"), + other => panic!("unknown fixture {other}"), + }; + serde_json::from_str(json).expect("fixture should deserialize") + } + + #[test] + fn unified_diff_prefers_current_diff_task_turn() { + let details = fixture("diff"); + let diff = details.unified_diff().expect("diff present"); + assert!(diff.contains("diff --git")); + } + + #[test] + fn unified_diff_falls_back_to_pr_output_diff() { + let details = fixture("error"); + let diff = details.unified_diff().expect("diff from pr output"); + assert!(diff.contains("lib.rs")); + } + + #[test] + fn assistant_text_messages_extracts_text_content() { + let details = fixture("diff"); + let messages = details.assistant_text_messages(); + assert_eq!(messages, vec!["Assistant response".to_string()]); + } + + #[test] + fn user_text_prompt_joins_parts_with_spacing() { + let details = fixture("diff"); + let prompt = details.user_text_prompt().expect("prompt present"); + assert_eq!( + prompt, + "First line + +Second line" + ); + } + + #[test] + fn assistant_error_message_combines_code_and_message() { + let details = fixture("error"); + let msg = details + .assistant_error_message() + .expect("error should be present"); + assert_eq!(msg, "APPLY_FAILED: Patch could not be applied"); + } +} diff --git a/codex-rs/backend-client/tests/fixtures/task_details_with_diff.json b/codex-rs/backend-client/tests/fixtures/task_details_with_diff.json new file mode 100644 index 00000000000..3a06b04c1ce --- /dev/null +++ b/codex-rs/backend-client/tests/fixtures/task_details_with_diff.json @@ -0,0 +1,38 @@ +{ + "task": { + "id": "task_123", + "title": "Refactor cloud task client", + "archived": false, + "external_pull_requests": [] + }, + "current_user_turn": { + "input_items": [ + { + "type": "message", + "role": "user", + "content": [ + { "content_type": "text", "text": "First line" }, + { "content_type": "text", "text": "Second line" } + ] + } + ] + }, + "current_assistant_turn": { + "output_items": [ + { + "type": "message", + "content": [ + { "content_type": "text", "text": "Assistant response" } + ] + } + ] + }, + "current_diff_task_turn": { + "output_items": [ + { + "type": "output_diff", + "diff": "diff --git a/src/main.rs b/src/main.rs\n+fn main() { println!(\"hi\"); }\n" + } + ] + } +} diff --git a/codex-rs/backend-client/tests/fixtures/task_details_with_error.json b/codex-rs/backend-client/tests/fixtures/task_details_with_error.json new file mode 100644 index 00000000000..6f6b66a72c3 --- /dev/null +++ b/codex-rs/backend-client/tests/fixtures/task_details_with_error.json @@ -0,0 +1,22 @@ +{ + "task": { + "id": "task_456", + "title": "Investigate failure", + "archived": false, + "external_pull_requests": [] + }, + "current_assistant_turn": { + "output_items": [ + { + "type": "pr", + "output_diff": { + "diff": "diff --git a/lib.rs b/lib.rs\n+pub fn hello() {}\n" + } + } + ], + "error": { + "code": "APPLY_FAILED", + "message": "Patch could not be applied" + } + } +} diff --git a/codex-rs/chatgpt/Cargo.toml b/codex-rs/chatgpt/Cargo.toml index 97e14d7fe7c..0a3170865ab 100644 --- a/codex-rs/chatgpt/Cargo.toml +++ b/codex-rs/chatgpt/Cargo.toml @@ -14,6 +14,7 @@ codex-core = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } tokio = { workspace = true, features = ["full"] } +codex-git-apply = { path = "../git-apply" } [dev-dependencies] tempfile = { workspace = true } diff --git a/codex-rs/chatgpt/src/apply_command.rs b/codex-rs/chatgpt/src/apply_command.rs index 52ab205a0cf..656197f7679 100644 --- a/codex-rs/chatgpt/src/apply_command.rs +++ b/codex-rs/chatgpt/src/apply_command.rs @@ -56,46 +56,24 @@ pub async fn apply_diff_from_task( } async fn apply_diff(diff: &str, cwd: Option) -> anyhow::Result<()> { - let mut cmd = tokio::process::Command::new("git"); - if let Some(cwd) = cwd { - cmd.current_dir(cwd); - } - let toplevel_output = cmd - .args(vec!["rev-parse", "--show-toplevel"]) - .output() - .await?; - - if !toplevel_output.status.success() { - anyhow::bail!("apply must be run from a git repository."); - } - - let repo_root = String::from_utf8(toplevel_output.stdout)? - .trim() - .to_string(); - - let mut git_apply_cmd = tokio::process::Command::new("git") - .args(vec!["apply", "--3way"]) - .current_dir(&repo_root) - .stdin(std::process::Stdio::piped()) - .stdout(std::process::Stdio::piped()) - .stderr(std::process::Stdio::piped()) - .spawn()?; - - if let Some(mut stdin) = git_apply_cmd.stdin.take() { - tokio::io::AsyncWriteExt::write_all(&mut stdin, diff.as_bytes()).await?; - drop(stdin); - } - - let output = git_apply_cmd.wait_with_output().await?; - - if !output.status.success() { + let cwd = cwd.unwrap_or(std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir())); + let req = codex_git_apply::ApplyGitRequest { + cwd, + diff: diff.to_string(), + revert: false, + preflight: false, + }; + let res = codex_git_apply::apply_git_patch(&req)?; + if res.exit_code != 0 { anyhow::bail!( - "Git apply failed with status {}: {}", - output.status, - String::from_utf8_lossy(&output.stderr) + "Git apply failed (applied={}, skipped={}, conflicts={})\nstdout:\n{}\nstderr:\n{}", + res.applied_paths.len(), + res.skipped_paths.len(), + res.conflicted_paths.len(), + res.stdout, + res.stderr ); } - println!("Successfully applied diff"); Ok(()) } diff --git a/codex-rs/chatgpt/src/chatgpt_client.rs b/codex-rs/chatgpt/src/chatgpt_client.rs index a4545f075e1..d450f7c59ec 100644 --- a/codex-rs/chatgpt/src/chatgpt_client.rs +++ b/codex-rs/chatgpt/src/chatgpt_client.rs @@ -44,6 +44,6 @@ pub(crate) async fn chatgpt_get_request( } else { let status = response.status(); let body = response.text().await.unwrap_or_default(); - anyhow::bail!("Request failed with status {}: {}", status, body) + anyhow::bail!("Request failed with status {status}: {body}") } } diff --git a/codex-rs/cli/Cargo.toml b/codex-rs/cli/Cargo.toml index c410e09a826..c4dac80ec39 100644 --- a/codex-rs/cli/Cargo.toml +++ b/codex-rs/cli/Cargo.toml @@ -18,6 +18,7 @@ workspace = true anyhow = { workspace = true } clap = { workspace = true, features = ["derive"] } clap_complete = { workspace = true } +codex-app-server = { workspace = true } codex-arg0 = { workspace = true } codex-chatgpt = { workspace = true } codex-common = { workspace = true, features = ["cli"] } @@ -25,9 +26,14 @@ codex-core = { workspace = true } codex-exec = { workspace = true } codex-login = { workspace = true } codex-mcp-server = { workspace = true } +codex-process-hardening = { workspace = true } codex-protocol = { workspace = true } +codex-app-server-protocol = { workspace = true } codex-protocol-ts = { workspace = true } +codex-responses-api-proxy = { workspace = true } codex-tui = { workspace = true } +codex-cloud-tasks = { path = "../cloud-tasks" } +ctor = { workspace = true } owo-colors = { workspace = true } serde_json = { workspace = true } supports-color = { workspace = true } @@ -38,8 +44,6 @@ tokio = { workspace = true, features = [ "rt-multi-thread", "signal", ] } -tracing = { workspace = true } -tracing-subscriber = { workspace = true } [dev-dependencies] assert_cmd = { workspace = true } diff --git a/codex-rs/cli/src/lib.rs b/codex-rs/cli/src/lib.rs index c6d80c0adfa..c05d570e640 100644 --- a/codex-rs/cli/src/lib.rs +++ b/codex-rs/cli/src/lib.rs @@ -1,7 +1,6 @@ pub mod debug_sandbox; mod exit_status; pub mod login; -pub mod proto; use clap::Parser; use codex_common::CliConfigOverrides; diff --git a/codex-rs/cli/src/login.rs b/codex-rs/cli/src/login.rs index f0816d0b298..2b497c0642d 100644 --- a/codex-rs/cli/src/login.rs +++ b/codex-rs/cli/src/login.rs @@ -1,3 +1,4 @@ +use codex_app_server_protocol::AuthMode; use codex_common::CliConfigOverrides; use codex_core::CodexAuth; use codex_core::auth::CLIENT_ID; @@ -6,8 +7,8 @@ use codex_core::auth::logout; use codex_core::config::Config; use codex_core::config::ConfigOverrides; use codex_login::ServerOptions; +use codex_login::run_device_code_login; use codex_login::run_login_server; -use codex_protocol::mcp_protocol::AuthMode; use std::path::PathBuf; pub async fn login_with_chatgpt(codex_home: PathBuf) -> std::io::Result<()> { @@ -55,6 +56,32 @@ pub async fn run_login_with_api_key( } } +/// Login using the OAuth device code flow. +pub async fn run_login_with_device_code( + cli_config_overrides: CliConfigOverrides, + issuer_base_url: Option, + client_id: Option, +) -> ! { + let config = load_config_or_exit(cli_config_overrides); + let mut opts = ServerOptions::new( + config.codex_home, + client_id.unwrap_or(CLIENT_ID.to_string()), + ); + if let Some(iss) = issuer_base_url { + opts.issuer = iss; + } + match run_device_code_login(opts).await { + Ok(()) => { + eprintln!("Successfully logged in"); + std::process::exit(0); + } + Err(e) => { + eprintln!("Error logging in with device code: {e}"); + std::process::exit(1); + } + } +} + pub async fn run_login_status(cli_config_overrides: CliConfigOverrides) -> ! { let config = load_config_or_exit(cli_config_overrides); diff --git a/codex-rs/cli/src/main.rs b/codex-rs/cli/src/main.rs index df757b0cc5d..10e26215334 100644 --- a/codex-rs/cli/src/main.rs +++ b/codex-rs/cli/src/main.rs @@ -10,10 +10,12 @@ use codex_cli::SeatbeltCommand; use codex_cli::login::run_login_status; use codex_cli::login::run_login_with_api_key; use codex_cli::login::run_login_with_chatgpt; +use codex_cli::login::run_login_with_device_code; use codex_cli::login::run_logout; -use codex_cli::proto; +use codex_cloud_tasks::Cli as CloudTasksCli; use codex_common::CliConfigOverrides; use codex_exec::Cli as ExecCli; +use codex_responses_api_proxy::Args as ResponsesApiProxyArgs; use codex_tui::AppExitInfo; use codex_tui::Cli as TuiCli; use owo_colors::OwoColorize; @@ -23,7 +25,6 @@ use supports_color::Stream; mod mcp_cmd; use crate::mcp_cmd::McpCli; -use crate::proto::ProtoCli; /// Codex CLI /// @@ -65,9 +66,11 @@ enum Subcommand { /// [experimental] Run Codex as an MCP server and manage MCP servers. Mcp(McpCli), - /// Run the Protocol stream via stdin/stdout - #[clap(visible_alias = "p")] - Proto(ProtoCli), + /// [experimental] Run the Codex MCP server (stdio transport). + McpServer, + + /// [experimental] Run the app server. + AppServer, /// Generate shell completion scripts. Completion(CompletionCommand), @@ -85,6 +88,13 @@ enum Subcommand { /// Internal: generate TypeScript protocol bindings. #[clap(hide = true)] GenerateTs(GenerateTsCommand), + /// [EXPERIMENTAL] Browse tasks from Codex Cloud and apply changes locally. + #[clap(name = "cloud", alias = "cloud-tasks")] + Cloud(CloudTasksCli), + + /// Internal: run the responses API proxy. + #[clap(hide = true)] + ResponsesApiProxy(ResponsesApiProxyArgs), } #[derive(Debug, Parser)] @@ -132,6 +142,20 @@ struct LoginCommand { #[arg(long = "api-key", value_name = "API_KEY")] api_key: Option, + /// EXPERIMENTAL: Use device code flow (not yet supported) + /// This feature is experimental and may changed in future releases. + #[arg(long = "experimental_use-device-code", hide = true)] + use_device_code: bool, + + /// EXPERIMENTAL: Use custom OAuth issuer base URL (advanced) + /// Override the OAuth issuer base URL (advanced) + #[arg(long = "experimental_issuer", value_name = "URL", hide = true)] + issuer_base_url: Option, + + /// EXPERIMENTAL: Use custom OAuth client ID (advanced) + #[arg(long = "experimental_client-id", value_name = "CLIENT_ID", hide = true)] + client_id: Option, + #[command(subcommand)] action: Option, } @@ -181,7 +205,7 @@ fn format_exit_messages(exit_info: AppExitInfo, color_enabled: bool) -> Vec anyhow::Result<()> { arg0_dispatch_or_else(|codex_linux_sandbox_exe| async move { cli_main(codex_linux_sandbox_exe).await?; @@ -224,10 +256,16 @@ async fn cli_main(codex_linux_sandbox_exe: Option) -> anyhow::Result<() ); codex_exec::run_main(exec_cli, codex_linux_sandbox_exe).await?; } + Some(Subcommand::McpServer) => { + codex_mcp_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?; + } Some(Subcommand::Mcp(mut mcp_cli)) => { // Propagate any root-level config overrides (e.g. `-c key=value`). prepend_config_flags(&mut mcp_cli.config_overrides, root_config_overrides.clone()); - mcp_cli.run(codex_linux_sandbox_exe).await?; + mcp_cli.run().await?; + } + Some(Subcommand::AppServer) => { + codex_app_server::run_main(codex_linux_sandbox_exe, root_config_overrides).await?; } Some(Subcommand::Resume(ResumeCommand { session_id, @@ -253,7 +291,14 @@ async fn cli_main(codex_linux_sandbox_exe: Option) -> anyhow::Result<() run_login_status(login_cli.config_overrides).await; } None => { - if let Some(api_key) = login_cli.api_key { + if login_cli.use_device_code { + run_login_with_device_code( + login_cli.config_overrides, + login_cli.issuer_base_url, + login_cli.client_id, + ) + .await; + } else if let Some(api_key) = login_cli.api_key { run_login_with_api_key(login_cli.config_overrides, api_key).await; } else { run_login_with_chatgpt(login_cli.config_overrides).await; @@ -268,15 +313,15 @@ async fn cli_main(codex_linux_sandbox_exe: Option) -> anyhow::Result<() ); run_logout(logout_cli.config_overrides).await; } - Some(Subcommand::Proto(mut proto_cli)) => { + Some(Subcommand::Completion(completion_cli)) => { + print_completion(completion_cli); + } + Some(Subcommand::Cloud(mut cloud_cli)) => { prepend_config_flags( - &mut proto_cli.config_overrides, + &mut cloud_cli.config_overrides, root_config_overrides.clone(), ); - proto::run_main(proto_cli).await?; - } - Some(Subcommand::Completion(completion_cli)) => { - print_completion(completion_cli); + codex_cloud_tasks::run_main(cloud_cli, codex_linux_sandbox_exe).await?; } Some(Subcommand::Debug(debug_args)) => match debug_args.cmd { DebugCommand::Seatbelt(mut seatbelt_cli) => { @@ -309,6 +354,10 @@ async fn cli_main(codex_linux_sandbox_exe: Option) -> anyhow::Result<() ); run_apply_command(apply_cli, None).await?; } + Some(Subcommand::ResponsesApiProxy(args)) => { + tokio::task::spawn_blocking(move || codex_responses_api_proxy::run_main(args)) + .await??; + } Some(Subcommand::GenerateTs(gen_cli)) => { codex_protocol_ts::generate_ts(&gen_cli.out_dir, gen_cli.prettier.as_deref())?; } @@ -406,7 +455,7 @@ fn print_completion(cmd: CompletionCommand) { mod tests { use super::*; use codex_core::protocol::TokenUsage; - use codex_protocol::mcp_protocol::ConversationId; + use codex_protocol::ConversationId; fn finalize_from_args(args: &[&str]) -> TuiCli { let cli = MultitoolCli::try_parse_from(args).expect("parse"); @@ -460,7 +509,7 @@ mod tests { lines, vec![ "Token usage: total=2 input=0 output=2".to_string(), - "To continue this session, run codex resume 123e4567-e89b-12d3-a456-426614174000." + "To continue this session, run codex resume 123e4567-e89b-12d3-a456-426614174000" .to_string(), ] ); diff --git a/codex-rs/cli/src/mcp_cmd.rs b/codex-rs/cli/src/mcp_cmd.rs index 465de71aac1..85243a641b1 100644 --- a/codex-rs/cli/src/mcp_cmd.rs +++ b/codex-rs/cli/src/mcp_cmd.rs @@ -1,6 +1,4 @@ -use std::collections::BTreeMap; use std::collections::HashMap; -use std::path::PathBuf; use anyhow::Context; use anyhow::Result; @@ -13,6 +11,7 @@ use codex_core::config::find_codex_home; use codex_core::config::load_global_mcp_servers; use codex_core::config::write_global_mcp_servers; use codex_core::config_types::McpServerConfig; +use codex_core::config_types::McpServerTransportConfig; /// [experimental] Launch Codex as an MCP server or manage configured MCP servers. /// @@ -28,14 +27,11 @@ pub struct McpCli { pub config_overrides: CliConfigOverrides, #[command(subcommand)] - pub cmd: Option, + pub subcommand: McpSubcommand, } #[derive(Debug, clap::Subcommand)] pub enum McpSubcommand { - /// [experimental] Run the Codex MCP server (stdio transport). - Serve, - /// [experimental] List configured MCP servers. List(ListArgs), @@ -87,17 +83,13 @@ pub struct RemoveArgs { } impl McpCli { - pub async fn run(self, codex_linux_sandbox_exe: Option) -> Result<()> { + pub async fn run(self) -> Result<()> { let McpCli { config_overrides, - cmd, + subcommand, } = self; - let subcommand = cmd.unwrap_or(McpSubcommand::Serve); match subcommand { - McpSubcommand::Serve => { - codex_mcp_server::run_main(codex_linux_sandbox_exe, config_overrides).await?; - } McpSubcommand::List(args) => { run_list(&config_overrides, args)?; } @@ -145,9 +137,11 @@ fn run_add(config_overrides: &CliConfigOverrides, add_args: AddArgs) -> Result<( .with_context(|| format!("failed to load MCP servers from {}", codex_home.display()))?; let new_entry = McpServerConfig { - command: command_bin, - args: command_args, - env: env_map, + transport: McpServerTransportConfig::Stdio { + command: command_bin, + args: command_args, + env: env_map, + }, startup_timeout_sec: None, tool_timeout_sec: None, }; @@ -201,16 +195,25 @@ fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Resul let json_entries: Vec<_> = entries .into_iter() .map(|(name, cfg)| { - let env = cfg.env.as_ref().map(|env| { - env.iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect::>() - }); + let transport = match &cfg.transport { + McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({ + "type": "stdio", + "command": command, + "args": args, + "env": env, + }), + McpServerTransportConfig::StreamableHttp { url, bearer_token } => { + serde_json::json!({ + "type": "streamable_http", + "url": url, + "bearer_token": bearer_token, + }) + } + }; + serde_json::json!({ "name": name, - "command": cfg.command, - "args": cfg.args, - "env": env, + "transport": transport, "startup_timeout_sec": cfg .startup_timeout_sec .map(|timeout| timeout.as_secs_f64()), @@ -230,62 +233,111 @@ fn run_list(config_overrides: &CliConfigOverrides, list_args: ListArgs) -> Resul return Ok(()); } - let mut rows: Vec<[String; 4]> = Vec::new(); - for (name, cfg) in entries { - let args = if cfg.args.is_empty() { - "-".to_string() - } else { - cfg.args.join(" ") - }; + let mut stdio_rows: Vec<[String; 4]> = Vec::new(); + let mut http_rows: Vec<[String; 3]> = Vec::new(); - let env = match cfg.env.as_ref() { - None => "-".to_string(), - Some(map) if map.is_empty() => "-".to_string(), - Some(map) => { - let mut pairs: Vec<_> = map.iter().collect(); - pairs.sort_by(|(a, _), (b, _)| a.cmp(b)); - pairs - .into_iter() - .map(|(k, v)| format!("{k}={v}")) - .collect::>() - .join(", ") + for (name, cfg) in entries { + match &cfg.transport { + McpServerTransportConfig::Stdio { command, args, env } => { + let args_display = if args.is_empty() { + "-".to_string() + } else { + args.join(" ") + }; + let env_display = match env.as_ref() { + None => "-".to_string(), + Some(map) if map.is_empty() => "-".to_string(), + Some(map) => { + let mut pairs: Vec<_> = map.iter().collect(); + pairs.sort_by(|(a, _), (b, _)| a.cmp(b)); + pairs + .into_iter() + .map(|(k, v)| format!("{k}={v}")) + .collect::>() + .join(", ") + } + }; + stdio_rows.push([name.clone(), command.clone(), args_display, env_display]); } - }; - - rows.push([name.clone(), cfg.command.clone(), args, env]); + McpServerTransportConfig::StreamableHttp { url, bearer_token } => { + let has_bearer = if bearer_token.is_some() { + "True" + } else { + "False" + }; + http_rows.push([name.clone(), url.clone(), has_bearer.into()]); + } + } } - let mut widths = ["Name".len(), "Command".len(), "Args".len(), "Env".len()]; - for row in &rows { - for (i, cell) in row.iter().enumerate() { - widths[i] = widths[i].max(cell.len()); + if !stdio_rows.is_empty() { + let mut widths = ["Name".len(), "Command".len(), "Args".len(), "Env".len()]; + for row in &stdio_rows { + for (i, cell) in row.iter().enumerate() { + widths[i] = widths[i].max(cell.len()); + } } - } - println!( - "{: Result<( }; if get_args.json { - let env = server.env.as_ref().map(|env| { - env.iter() - .map(|(k, v)| (k.clone(), v.clone())) - .collect::>() - }); + let transport = match &server.transport { + McpServerTransportConfig::Stdio { command, args, env } => serde_json::json!({ + "type": "stdio", + "command": command, + "args": args, + "env": env, + }), + McpServerTransportConfig::StreamableHttp { url, bearer_token } => serde_json::json!({ + "type": "streamable_http", + "url": url, + "bearer_token": bearer_token, + }), + }; let output = serde_json::to_string_pretty(&serde_json::json!({ "name": get_args.name, - "command": server.command, - "args": server.args, - "env": env, + "transport": transport, "startup_timeout_sec": server .startup_timeout_sec .map(|timeout| timeout.as_secs_f64()), @@ -323,27 +381,38 @@ fn run_get(config_overrides: &CliConfigOverrides, get_args: GetArgs) -> Result<( } println!("{}", get_args.name); - println!(" command: {}", server.command); - let args = if server.args.is_empty() { - "-".to_string() - } else { - server.args.join(" ") - }; - println!(" args: {args}"); - let env_display = match server.env.as_ref() { - None => "-".to_string(), - Some(map) if map.is_empty() => "-".to_string(), - Some(map) => { - let mut pairs: Vec<_> = map.iter().collect(); - pairs.sort_by(|(a, _), (b, _)| a.cmp(b)); - pairs - .into_iter() - .map(|(k, v)| format!("{k}={v}")) - .collect::>() - .join(", ") + match &server.transport { + McpServerTransportConfig::Stdio { command, args, env } => { + println!(" transport: stdio"); + println!(" command: {command}"); + let args_display = if args.is_empty() { + "-".to_string() + } else { + args.join(" ") + }; + println!(" args: {args_display}"); + let env_display = match env.as_ref() { + None => "-".to_string(), + Some(map) if map.is_empty() => "-".to_string(), + Some(map) => { + let mut pairs: Vec<_> = map.iter().collect(); + pairs.sort_by(|(a, _), (b, _)| a.cmp(b)); + pairs + .into_iter() + .map(|(k, v)| format!("{k}={v}")) + .collect::>() + .join(", ") + } + }; + println!(" env: {env_display}"); } - }; - println!(" env: {env_display}"); + McpServerTransportConfig::StreamableHttp { url, bearer_token } => { + println!(" transport: streamable_http"); + println!(" url: {url}"); + let bearer = bearer_token.as_deref().unwrap_or("-"); + println!(" bearer_token: {bearer}"); + } + } if let Some(timeout) = server.startup_timeout_sec { println!(" startup_timeout_sec: {}", timeout.as_secs_f64()); } diff --git a/codex-rs/cli/src/proto.rs b/codex-rs/cli/src/proto.rs deleted file mode 100644 index 623edca5a8f..00000000000 --- a/codex-rs/cli/src/proto.rs +++ /dev/null @@ -1,133 +0,0 @@ -use std::io::IsTerminal; - -use clap::Parser; -use codex_common::CliConfigOverrides; -use codex_core::AuthManager; -use codex_core::ConversationManager; -use codex_core::NewConversation; -use codex_core::config::Config; -use codex_core::config::ConfigOverrides; -use codex_core::protocol::Event; -use codex_core::protocol::EventMsg; -use codex_core::protocol::Submission; -use tokio::io::AsyncBufReadExt; -use tokio::io::BufReader; -use tracing::error; -use tracing::info; - -#[derive(Debug, Parser)] -pub struct ProtoCli { - #[clap(skip)] - pub config_overrides: CliConfigOverrides, -} - -pub async fn run_main(opts: ProtoCli) -> anyhow::Result<()> { - if std::io::stdin().is_terminal() { - anyhow::bail!("Protocol mode expects stdin to be a pipe, not a terminal"); - } - - tracing_subscriber::fmt() - .with_writer(std::io::stderr) - .init(); - - let ProtoCli { config_overrides } = opts; - let overrides_vec = config_overrides - .parse_overrides() - .map_err(anyhow::Error::msg)?; - - let config = Config::load_with_cli_overrides(overrides_vec, ConfigOverrides::default())?; - // Use conversation_manager API to start a conversation - let conversation_manager = - ConversationManager::new(AuthManager::shared(config.codex_home.clone())); - let NewConversation { - conversation_id: _, - conversation, - session_configured, - } = conversation_manager.new_conversation(config).await?; - - // Simulate streaming the session_configured event. - let synthetic_event = Event { - // Fake id value. - id: "".to_string(), - msg: EventMsg::SessionConfigured(session_configured), - }; - let session_configured_event = match serde_json::to_string(&synthetic_event) { - Ok(s) => s, - Err(e) => { - error!("Failed to serialize session_configured: {e}"); - return Err(anyhow::Error::from(e)); - } - }; - println!("{session_configured_event}"); - - // Task that reads JSON lines from stdin and forwards to Submission Queue - let sq_fut = { - let conversation = conversation.clone(); - async move { - let stdin = BufReader::new(tokio::io::stdin()); - let mut lines = stdin.lines(); - loop { - let result = tokio::select! { - _ = tokio::signal::ctrl_c() => { - break - }, - res = lines.next_line() => res, - }; - - match result { - Ok(Some(line)) => { - let line = line.trim(); - if line.is_empty() { - continue; - } - match serde_json::from_str::(line) { - Ok(sub) => { - if let Err(e) = conversation.submit_with_id(sub).await { - error!("{e:#}"); - break; - } - } - Err(e) => { - error!("invalid submission: {e}"); - } - } - } - _ => { - info!("Submission queue closed"); - break; - } - } - } - } - }; - - // Task that reads events from the agent and prints them as JSON lines to stdout - let eq_fut = async move { - loop { - let event = tokio::select! { - _ = tokio::signal::ctrl_c() => break, - event = conversation.next_event() => event, - }; - match event { - Ok(event) => { - let event_str = match serde_json::to_string(&event) { - Ok(s) => s, - Err(e) => { - error!("Failed to serialize event: {e}"); - continue; - } - }; - println!("{event_str}"); - } - Err(e) => { - error!("{e:#}"); - break; - } - } - } - info!("Event queue closed"); - }; - - tokio::join!(sq_fut, eq_fut); - Ok(()) -} diff --git a/codex-rs/cli/tests/mcp_add_remove.rs b/codex-rs/cli/tests/mcp_add_remove.rs index 9e54f0d8678..cf3ea9f7398 100644 --- a/codex-rs/cli/tests/mcp_add_remove.rs +++ b/codex-rs/cli/tests/mcp_add_remove.rs @@ -2,6 +2,7 @@ use std::path::Path; use anyhow::Result; use codex_core::config::load_global_mcp_servers; +use codex_core::config_types::McpServerTransportConfig; use predicates::str::contains; use pretty_assertions::assert_eq; use tempfile::TempDir; @@ -26,9 +27,14 @@ fn add_and_remove_server_updates_global_config() -> Result<()> { let servers = load_global_mcp_servers(codex_home.path())?; assert_eq!(servers.len(), 1); let docs = servers.get("docs").expect("server should exist"); - assert_eq!(docs.command, "echo"); - assert_eq!(docs.args, vec!["hello".to_string()]); - assert!(docs.env.is_none()); + match &docs.transport { + McpServerTransportConfig::Stdio { command, args, env } => { + assert_eq!(command, "echo"); + assert_eq!(args, &vec!["hello".to_string()]); + assert!(env.is_none()); + } + other => panic!("unexpected transport: {other:?}"), + } let mut remove_cmd = codex_command(codex_home.path())?; remove_cmd @@ -76,7 +82,10 @@ fn add_with_env_preserves_key_order_and_values() -> Result<()> { let servers = load_global_mcp_servers(codex_home.path())?; let envy = servers.get("envy").expect("server should exist"); - let env = envy.env.as_ref().expect("env should be present"); + let env = match &envy.transport { + McpServerTransportConfig::Stdio { env: Some(env), .. } => env, + other => panic!("unexpected transport: {other:?}"), + }; assert_eq!(env.len(), 2); assert_eq!(env.get("FOO"), Some(&"bar".to_string())); diff --git a/codex-rs/cli/tests/mcp_list.rs b/codex-rs/cli/tests/mcp_list.rs index e53f42cc8f7..6c83de19fa3 100644 --- a/codex-rs/cli/tests/mcp_list.rs +++ b/codex-rs/cli/tests/mcp_list.rs @@ -4,6 +4,7 @@ use anyhow::Result; use predicates::str::contains; use pretty_assertions::assert_eq; use serde_json::Value as JsonValue; +use serde_json::json; use tempfile::TempDir; fn codex_command(codex_home: &Path) -> Result { @@ -58,38 +59,35 @@ fn list_and_get_render_expected_output() -> Result<()> { assert!(json_output.status.success()); let stdout = String::from_utf8(json_output.stdout)?; let parsed: JsonValue = serde_json::from_str(&stdout)?; - let array = parsed.as_array().expect("expected array"); - assert_eq!(array.len(), 1); - let entry = &array[0]; - assert_eq!(entry.get("name"), Some(&JsonValue::String("docs".into()))); assert_eq!( - entry.get("command"), - Some(&JsonValue::String("docs-server".into())) - ); - - let args = entry - .get("args") - .and_then(|v| v.as_array()) - .expect("args array"); - assert_eq!( - args, - &vec![ - JsonValue::String("--port".into()), - JsonValue::String("4000".into()) + parsed, + json!([ + { + "name": "docs", + "transport": { + "type": "stdio", + "command": "docs-server", + "args": [ + "--port", + "4000" + ], + "env": { + "TOKEN": "secret" + } + }, + "startup_timeout_sec": null, + "tool_timeout_sec": null + } ] + ) ); - let env = entry - .get("env") - .and_then(|v| v.as_object()) - .expect("env map"); - assert_eq!(env.get("TOKEN"), Some(&JsonValue::String("secret".into()))); - let mut get_cmd = codex_command(codex_home.path())?; let get_output = get_cmd.args(["mcp", "get", "docs"]).output()?; assert!(get_output.status.success()); let stdout = String::from_utf8(get_output.stdout)?; assert!(stdout.contains("docs")); + assert!(stdout.contains("transport: stdio")); assert!(stdout.contains("command: docs-server")); assert!(stdout.contains("args: --port 4000")); assert!(stdout.contains("env: TOKEN=secret")); diff --git a/codex-rs/cloud-tasks-client/Cargo.toml b/codex-rs/cloud-tasks-client/Cargo.toml new file mode 100644 index 00000000000..ca45b6e15b5 --- /dev/null +++ b/codex-rs/cloud-tasks-client/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "codex-cloud-tasks-client" +version = { workspace = true } +edition = "2024" + +[lib] +name = "codex_cloud_tasks_client" +path = "src/lib.rs" + +[lints] +workspace = true + +[features] +default = ["online"] +online = ["dep:codex-backend-client"] +mock = [] + +[dependencies] +anyhow = "1" +async-trait = "0.1" +chrono = { version = "0.4", features = ["serde"] } +diffy = "0.4.2" +serde = { version = "1", features = ["derive"] } +serde_json = "1" +thiserror = "2.0.12" +codex-backend-client = { path = "../backend-client", optional = true } +codex-git-apply = { path = "../git-apply" } diff --git a/codex-rs/cloud-tasks-client/src/api.rs b/codex-rs/cloud-tasks-client/src/api.rs new file mode 100644 index 00000000000..4bd12939e84 --- /dev/null +++ b/codex-rs/cloud-tasks-client/src/api.rs @@ -0,0 +1,158 @@ +use chrono::DateTime; +use chrono::Utc; +use serde::Deserialize; +use serde::Serialize; + +pub type Result = std::result::Result; + +#[derive(Debug, thiserror::Error)] +pub enum CloudTaskError { + #[error("unimplemented: {0}")] + Unimplemented(&'static str), + #[error("http error: {0}")] + Http(String), + #[error("io error: {0}")] + Io(String), + #[error("{0}")] + Msg(String), +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(transparent)] +pub struct TaskId(pub String); + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] +pub enum TaskStatus { + Pending, + Ready, + Applied, + Error, +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct TaskSummary { + pub id: TaskId, + pub title: String, + pub status: TaskStatus, + pub updated_at: DateTime, + /// Backend environment identifier (when available) + pub environment_id: Option, + /// Human-friendly environment label (when available) + pub environment_label: Option, + pub summary: DiffSummary, + /// True when the backend reports this task as a code review. + #[serde(default)] + pub is_review: bool, + /// Number of assistant attempts (best-of-N), when reported by the backend. + #[serde(default)] + pub attempt_total: Option, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)] +pub enum AttemptStatus { + Pending, + InProgress, + Completed, + Failed, + Cancelled, + #[default] + Unknown, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TurnAttempt { + pub turn_id: String, + pub attempt_placement: Option, + pub created_at: Option>, + pub status: AttemptStatus, + pub diff: Option, + pub messages: Vec, +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ApplyStatus { + Success, + Partial, + Error, +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct ApplyOutcome { + pub applied: bool, + pub status: ApplyStatus, + pub message: String, + #[serde(default)] + pub skipped_paths: Vec, + #[serde(default)] + pub conflict_paths: Vec, +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct CreatedTask { + pub id: TaskId, +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)] +pub struct DiffSummary { + pub files_changed: usize, + pub lines_added: usize, + pub lines_removed: usize, +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TaskText { + pub prompt: Option, + pub messages: Vec, + pub turn_id: Option, + pub sibling_turn_ids: Vec, + pub attempt_placement: Option, + pub attempt_status: AttemptStatus, +} + +impl Default for TaskText { + fn default() -> Self { + Self { + prompt: None, + messages: Vec::new(), + turn_id: None, + sibling_turn_ids: Vec::new(), + attempt_placement: None, + attempt_status: AttemptStatus::Unknown, + } + } +} + +#[async_trait::async_trait] +pub trait CloudBackend: Send + Sync { + async fn list_tasks(&self, env: Option<&str>) -> Result>; + async fn get_task_diff(&self, id: TaskId) -> Result>; + /// Return assistant output messages (no diff) when available. + async fn get_task_messages(&self, id: TaskId) -> Result>; + /// Return the creating prompt and assistant messages (when available). + async fn get_task_text(&self, id: TaskId) -> Result; + /// Return any sibling attempts (best-of-N) for the given assistant turn. + async fn list_sibling_attempts( + &self, + task: TaskId, + turn_id: String, + ) -> Result>; + /// Dry-run apply (preflight) that validates whether the patch would apply cleanly. + /// Never modifies the working tree. When `diff_override` is supplied, the provided diff is + /// used instead of re-fetching the task details so callers can apply alternate attempts. + async fn apply_task_preflight( + &self, + id: TaskId, + diff_override: Option, + ) -> Result; + async fn apply_task(&self, id: TaskId, diff_override: Option) -> Result; + async fn create_task( + &self, + env_id: &str, + prompt: &str, + git_ref: &str, + qa_mode: bool, + best_of_n: usize, + ) -> Result; +} diff --git a/codex-rs/cloud-tasks-client/src/http.rs b/codex-rs/cloud-tasks-client/src/http.rs new file mode 100644 index 00000000000..912681cd3b7 --- /dev/null +++ b/codex-rs/cloud-tasks-client/src/http.rs @@ -0,0 +1,769 @@ +use crate::ApplyOutcome; +use crate::ApplyStatus; +use crate::AttemptStatus; +use crate::CloudBackend; +use crate::CloudTaskError; +use crate::DiffSummary; +use crate::Result; +use crate::TaskId; +use crate::TaskStatus; +use crate::TaskSummary; +use crate::TurnAttempt; +use crate::api::TaskText; +use chrono::DateTime; +use chrono::Utc; + +use codex_backend_client as backend; +use codex_backend_client::CodeTaskDetailsResponseExt; + +#[derive(Clone)] +pub struct HttpClient { + pub base_url: String, + backend: backend::Client, +} + +impl HttpClient { + pub fn new(base_url: impl Into) -> anyhow::Result { + let base_url = base_url.into(); + let backend = backend::Client::new(base_url.clone())?; + Ok(Self { base_url, backend }) + } + + pub fn with_bearer_token(mut self, token: impl Into) -> Self { + self.backend = self.backend.clone().with_bearer_token(token); + self + } + + pub fn with_user_agent(mut self, ua: impl Into) -> Self { + self.backend = self.backend.clone().with_user_agent(ua); + self + } + + pub fn with_chatgpt_account_id(mut self, account_id: impl Into) -> Self { + self.backend = self.backend.clone().with_chatgpt_account_id(account_id); + self + } + + fn tasks_api(&self) -> api::Tasks<'_> { + api::Tasks::new(self) + } + + fn attempts_api(&self) -> api::Attempts<'_> { + api::Attempts::new(self) + } + + fn apply_api(&self) -> api::Apply<'_> { + api::Apply::new(self) + } +} + +#[async_trait::async_trait] +impl CloudBackend for HttpClient { + async fn list_tasks(&self, env: Option<&str>) -> Result> { + self.tasks_api().list(env).await + } + + async fn get_task_diff(&self, id: TaskId) -> Result> { + self.tasks_api().diff(id).await + } + + async fn get_task_messages(&self, id: TaskId) -> Result> { + self.tasks_api().messages(id).await + } + + async fn get_task_text(&self, id: TaskId) -> Result { + self.tasks_api().task_text(id).await + } + + async fn list_sibling_attempts( + &self, + task: TaskId, + turn_id: String, + ) -> Result> { + self.attempts_api().list(task, turn_id).await + } + + async fn apply_task(&self, id: TaskId, diff_override: Option) -> Result { + self.apply_api().run(id, diff_override, false).await + } + + async fn apply_task_preflight( + &self, + id: TaskId, + diff_override: Option, + ) -> Result { + self.apply_api().run(id, diff_override, true).await + } + + async fn create_task( + &self, + env_id: &str, + prompt: &str, + git_ref: &str, + qa_mode: bool, + best_of_n: usize, + ) -> Result { + self.tasks_api() + .create(env_id, prompt, git_ref, qa_mode, best_of_n) + .await + } +} + +mod api { + use super::*; + use serde_json::Value; + use std::cmp::Ordering; + use std::collections::HashMap; + + pub(crate) struct Tasks<'a> { + base_url: &'a str, + backend: &'a backend::Client, + } + + impl<'a> Tasks<'a> { + pub(crate) fn new(client: &'a HttpClient) -> Self { + Self { + base_url: &client.base_url, + backend: &client.backend, + } + } + + pub(crate) async fn list(&self, env: Option<&str>) -> Result> { + let resp = self + .backend + .list_tasks(Some(20), Some("current"), env) + .await + .map_err(|e| CloudTaskError::Http(format!("list_tasks failed: {e}")))?; + + let tasks: Vec = resp + .items + .into_iter() + .map(map_task_list_item_to_summary) + .collect(); + + append_error_log(&format!( + "http.list_tasks: env={} items={}", + env.unwrap_or(""), + tasks.len() + )); + Ok(tasks) + } + + pub(crate) async fn diff(&self, id: TaskId) -> Result> { + let (details, body, ct) = self + .details_with_body(&id.0) + .await + .map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?; + if let Some(diff) = details.unified_diff() { + return Ok(Some(diff)); + } + let _ = (body, ct); + Ok(None) + } + + pub(crate) async fn messages(&self, id: TaskId) -> Result> { + let (details, body, ct) = self + .details_with_body(&id.0) + .await + .map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?; + + let mut msgs = details.assistant_text_messages(); + if msgs.is_empty() { + msgs.extend(extract_assistant_messages_from_body(&body)); + } + if !msgs.is_empty() { + return Ok(msgs); + } + if let Some(err) = details.assistant_error_message() { + return Ok(vec![format!("Task failed: {err}")]); + } + + let url = match details_path(self.base_url, &id.0) { + Some(url) => url, + None => format!("{}/api/codex/tasks/{}", self.base_url, id.0), + }; + Err(CloudTaskError::Http(format!( + "No assistant text messages in response. GET {url}; content-type={ct}; body={body}" + ))) + } + + pub(crate) async fn task_text(&self, id: TaskId) -> Result { + let (details, body, _ct) = self + .details_with_body(&id.0) + .await + .map_err(|e| CloudTaskError::Http(format!("get_task_details failed: {e}")))?; + let prompt = details.user_text_prompt(); + let mut messages = details.assistant_text_messages(); + if messages.is_empty() { + messages.extend(extract_assistant_messages_from_body(&body)); + } + let assistant_turn = details.current_assistant_turn.as_ref(); + let turn_id = assistant_turn.and_then(|turn| turn.id.clone()); + let sibling_turn_ids = assistant_turn + .map(|turn| turn.sibling_turn_ids.clone()) + .unwrap_or_default(); + let attempt_placement = assistant_turn.and_then(|turn| turn.attempt_placement); + let attempt_status = attempt_status_from_str( + assistant_turn.and_then(|turn| turn.turn_status.as_deref()), + ); + Ok(TaskText { + prompt, + messages, + turn_id, + sibling_turn_ids, + attempt_placement, + attempt_status, + }) + } + + pub(crate) async fn create( + &self, + env_id: &str, + prompt: &str, + git_ref: &str, + qa_mode: bool, + best_of_n: usize, + ) -> Result { + let mut input_items: Vec = Vec::new(); + input_items.push(serde_json::json!({ + "type": "message", + "role": "user", + "content": [{ "content_type": "text", "text": prompt }] + })); + + if let Ok(diff) = std::env::var("CODEX_STARTING_DIFF") + && !diff.is_empty() + { + input_items.push(serde_json::json!({ + "type": "pre_apply_patch", + "output_diff": { "diff": diff } + })); + } + + let mut request_body = serde_json::json!({ + "new_task": { + "environment_id": env_id, + "branch": git_ref, + "run_environment_in_qa_mode": qa_mode, + }, + "input_items": input_items, + }); + + if best_of_n > 1 + && let Some(obj) = request_body.as_object_mut() + { + obj.insert( + "metadata".to_string(), + serde_json::json!({ "best_of_n": best_of_n }), + ); + } + + match self.backend.create_task(request_body).await { + Ok(id) => { + append_error_log(&format!( + "new_task: created id={id} env={} prompt_chars={}", + env_id, + prompt.chars().count() + )); + Ok(crate::CreatedTask { id: TaskId(id) }) + } + Err(e) => { + append_error_log(&format!( + "new_task: create failed env={} prompt_chars={}: {}", + env_id, + prompt.chars().count(), + e + )); + Err(CloudTaskError::Http(format!("create_task failed: {e}"))) + } + } + } + + async fn details_with_body( + &self, + id: &str, + ) -> anyhow::Result<(backend::CodeTaskDetailsResponse, String, String)> { + let (parsed, body, ct) = self.backend.get_task_details_with_body(id).await?; + Ok((parsed, body, ct)) + } + } + + pub(crate) struct Attempts<'a> { + backend: &'a backend::Client, + } + + impl<'a> Attempts<'a> { + pub(crate) fn new(client: &'a HttpClient) -> Self { + Self { + backend: &client.backend, + } + } + + pub(crate) async fn list(&self, task: TaskId, turn_id: String) -> Result> { + let resp = self + .backend + .list_sibling_turns(&task.0, &turn_id) + .await + .map_err(|e| CloudTaskError::Http(format!("list_sibling_turns failed: {e}")))?; + + let mut attempts: Vec = resp + .sibling_turns + .iter() + .filter_map(turn_attempt_from_map) + .collect(); + attempts.sort_by(compare_attempts); + Ok(attempts) + } + } + + pub(crate) struct Apply<'a> { + backend: &'a backend::Client, + } + + impl<'a> Apply<'a> { + pub(crate) fn new(client: &'a HttpClient) -> Self { + Self { + backend: &client.backend, + } + } + + pub(crate) async fn run( + &self, + task_id: TaskId, + diff_override: Option, + preflight: bool, + ) -> Result { + let id = task_id.0.clone(); + let diff = match diff_override { + Some(diff) => diff, + None => { + let details = self.backend.get_task_details(&id).await.map_err(|e| { + CloudTaskError::Http(format!("get_task_details failed: {e}")) + })?; + details.unified_diff().ok_or_else(|| { + CloudTaskError::Msg(format!("No diff available for task {id}")) + })? + } + }; + + if !is_unified_diff(&diff) { + let summary = summarize_patch_for_logging(&diff); + let mode = if preflight { "preflight" } else { "apply" }; + append_error_log(&format!( + "apply_error: id={id} mode={mode} format=non-unified; {summary}" + )); + return Ok(ApplyOutcome { + applied: false, + status: ApplyStatus::Error, + message: "Expected unified git diff; backend returned an incompatible format." + .to_string(), + skipped_paths: Vec::new(), + conflict_paths: Vec::new(), + }); + } + + let req = codex_git_apply::ApplyGitRequest { + cwd: std::env::current_dir().unwrap_or_else(|_| std::env::temp_dir()), + diff: diff.clone(), + revert: false, + preflight, + }; + let r = codex_git_apply::apply_git_patch(&req) + .map_err(|e| CloudTaskError::Io(format!("git apply failed to run: {e}")))?; + + let status = if r.exit_code == 0 { + ApplyStatus::Success + } else if !r.applied_paths.is_empty() || !r.conflicted_paths.is_empty() { + ApplyStatus::Partial + } else { + ApplyStatus::Error + }; + let applied = matches!(status, ApplyStatus::Success) && !preflight; + + let message = if preflight { + match status { + ApplyStatus::Success => { + format!("Preflight passed for task {id} (applies cleanly)") + } + ApplyStatus::Partial => format!( + "Preflight: patch does not fully apply for task {id} (applied={}, skipped={}, conflicts={})", + r.applied_paths.len(), + r.skipped_paths.len(), + r.conflicted_paths.len() + ), + ApplyStatus::Error => format!( + "Preflight failed for task {id} (applied={}, skipped={}, conflicts={})", + r.applied_paths.len(), + r.skipped_paths.len(), + r.conflicted_paths.len() + ), + } + } else { + match status { + ApplyStatus::Success => format!( + "Applied task {id} locally ({} files)", + r.applied_paths.len() + ), + ApplyStatus::Partial => format!( + "Apply partially succeeded for task {id} (applied={}, skipped={}, conflicts={})", + r.applied_paths.len(), + r.skipped_paths.len(), + r.conflicted_paths.len() + ), + ApplyStatus::Error => format!( + "Apply failed for task {id} (applied={}, skipped={}, conflicts={})", + r.applied_paths.len(), + r.skipped_paths.len(), + r.conflicted_paths.len() + ), + } + }; + + if matches!(status, ApplyStatus::Partial | ApplyStatus::Error) + || (preflight && !matches!(status, ApplyStatus::Success)) + { + let mut log = String::new(); + let summary = summarize_patch_for_logging(&diff); + let mode = if preflight { "preflight" } else { "apply" }; + use std::fmt::Write as _; + let _ = writeln!( + &mut log, + "apply_result: mode={} id={} status={:?} applied={} skipped={} conflicts={} cmd={}", + mode, + id, + status, + r.applied_paths.len(), + r.skipped_paths.len(), + r.conflicted_paths.len(), + r.cmd_for_log + ); + let _ = writeln!( + &mut log, + "stdout_tail=\n{}\nstderr_tail=\n{}", + tail(&r.stdout, 2000), + tail(&r.stderr, 2000) + ); + let _ = writeln!(&mut log, "{summary}"); + let _ = writeln!( + &mut log, + "----- PATCH BEGIN -----\n{diff}\n----- PATCH END -----" + ); + append_error_log(&log); + } + + Ok(ApplyOutcome { + applied, + status, + message, + skipped_paths: r.skipped_paths, + conflict_paths: r.conflicted_paths, + }) + } + } + + fn details_path(base_url: &str, id: &str) -> Option { + if base_url.contains("/backend-api") { + Some(format!("{base_url}/wham/tasks/{id}")) + } else if base_url.contains("/api/codex") { + Some(format!("{base_url}/tasks/{id}")) + } else { + None + } + } + + fn extract_assistant_messages_from_body(body: &str) -> Vec { + let mut msgs = Vec::new(); + if let Ok(full) = serde_json::from_str::(body) + && let Some(arr) = full + .get("current_assistant_turn") + .and_then(|v| v.get("worklog")) + .and_then(|v| v.get("messages")) + .and_then(|v| v.as_array()) + { + for m in arr { + let is_assistant = m + .get("author") + .and_then(|a| a.get("role")) + .and_then(|r| r.as_str()) + == Some("assistant"); + if !is_assistant { + continue; + } + if let Some(parts) = m + .get("content") + .and_then(|c| c.get("parts")) + .and_then(|p| p.as_array()) + { + for p in parts { + if let Some(s) = p.as_str() { + if !s.is_empty() { + msgs.push(s.to_string()); + } + continue; + } + if let Some(obj) = p.as_object() + && obj.get("content_type").and_then(|t| t.as_str()) == Some("text") + && let Some(txt) = obj.get("text").and_then(|t| t.as_str()) + { + msgs.push(txt.to_string()); + } + } + } + } + } + msgs + } + + fn turn_attempt_from_map(turn: &HashMap) -> Option { + let turn_id = turn.get("id").and_then(Value::as_str)?.to_string(); + let attempt_placement = turn.get("attempt_placement").and_then(Value::as_i64); + let created_at = parse_timestamp_value(turn.get("created_at")); + let status = attempt_status_from_str(turn.get("turn_status").and_then(Value::as_str)); + let diff = extract_diff_from_turn(turn); + let messages = extract_assistant_messages_from_turn(turn); + Some(TurnAttempt { + turn_id, + attempt_placement, + created_at, + status, + diff, + messages, + }) + } + + fn compare_attempts(a: &TurnAttempt, b: &TurnAttempt) -> Ordering { + match (a.attempt_placement, b.attempt_placement) { + (Some(lhs), Some(rhs)) => lhs.cmp(&rhs), + (Some(_), None) => Ordering::Less, + (None, Some(_)) => Ordering::Greater, + (None, None) => match (a.created_at, b.created_at) { + (Some(lhs), Some(rhs)) => lhs.cmp(&rhs), + (Some(_), None) => Ordering::Less, + (None, Some(_)) => Ordering::Greater, + (None, None) => a.turn_id.cmp(&b.turn_id), + }, + } + } + + fn extract_diff_from_turn(turn: &HashMap) -> Option { + let items = turn.get("output_items").and_then(Value::as_array)?; + for item in items { + match item.get("type").and_then(Value::as_str) { + Some("output_diff") => { + if let Some(diff) = item.get("diff").and_then(Value::as_str) + && !diff.is_empty() + { + return Some(diff.to_string()); + } + } + Some("pr") => { + if let Some(diff) = item + .get("output_diff") + .and_then(Value::as_object) + .and_then(|od| od.get("diff")) + .and_then(Value::as_str) + && !diff.is_empty() + { + return Some(diff.to_string()); + } + } + _ => {} + } + } + None + } + + fn extract_assistant_messages_from_turn(turn: &HashMap) -> Vec { + let mut msgs = Vec::new(); + if let Some(items) = turn.get("output_items").and_then(Value::as_array) { + for item in items { + if item.get("type").and_then(Value::as_str) != Some("message") { + continue; + } + if let Some(content) = item.get("content").and_then(Value::as_array) { + for part in content { + if part.get("content_type").and_then(Value::as_str) == Some("text") + && let Some(txt) = part.get("text").and_then(Value::as_str) + && !txt.is_empty() + { + msgs.push(txt.to_string()); + } + } + } + } + } + msgs + } + + fn attempt_status_from_str(raw: Option<&str>) -> AttemptStatus { + match raw.unwrap_or_default() { + "failed" => AttemptStatus::Failed, + "completed" => AttemptStatus::Completed, + "in_progress" => AttemptStatus::InProgress, + "pending" => AttemptStatus::Pending, + _ => AttemptStatus::Pending, + } + } + + fn parse_timestamp_value(v: Option<&Value>) -> Option> { + let ts = v?.as_f64()?; + let secs = ts as i64; + let nanos = ((ts - secs as f64) * 1_000_000_000.0) as u32; + Some(DateTime::::from( + std::time::UNIX_EPOCH + std::time::Duration::new(secs.max(0) as u64, nanos), + )) + } + + fn map_task_list_item_to_summary(src: backend::TaskListItem) -> TaskSummary { + let status_display = src.task_status_display.as_ref(); + TaskSummary { + id: TaskId(src.id), + title: src.title, + status: map_status(status_display), + updated_at: parse_updated_at(src.updated_at.as_ref()), + environment_id: None, + environment_label: env_label_from_status_display(status_display), + summary: diff_summary_from_status_display(status_display), + is_review: src + .pull_requests + .as_ref() + .is_some_and(|prs| !prs.is_empty()), + attempt_total: attempt_total_from_status_display(status_display), + } + } + + fn map_status(v: Option<&HashMap>) -> TaskStatus { + if let Some(val) = v { + if let Some(turn) = val + .get("latest_turn_status_display") + .and_then(Value::as_object) + && let Some(s) = turn.get("turn_status").and_then(Value::as_str) + { + return match s { + "failed" => TaskStatus::Error, + "completed" => TaskStatus::Ready, + "in_progress" => TaskStatus::Pending, + "pending" => TaskStatus::Pending, + "cancelled" => TaskStatus::Error, + _ => TaskStatus::Pending, + }; + } + if let Some(state) = val.get("state").and_then(Value::as_str) { + return match state { + "pending" => TaskStatus::Pending, + "ready" => TaskStatus::Ready, + "applied" => TaskStatus::Applied, + "error" => TaskStatus::Error, + _ => TaskStatus::Pending, + }; + } + } + TaskStatus::Pending + } + + fn parse_updated_at(ts: Option<&f64>) -> DateTime { + if let Some(v) = ts { + let secs = *v as i64; + let nanos = ((*v - secs as f64) * 1_000_000_000.0) as u32; + return DateTime::::from( + std::time::UNIX_EPOCH + std::time::Duration::new(secs.max(0) as u64, nanos), + ); + } + Utc::now() + } + + fn env_label_from_status_display(v: Option<&HashMap>) -> Option { + let map = v?; + map.get("environment_label") + .and_then(Value::as_str) + .map(str::to_string) + } + + fn diff_summary_from_status_display(v: Option<&HashMap>) -> DiffSummary { + let mut out = DiffSummary::default(); + let Some(map) = v else { return out }; + let latest = map + .get("latest_turn_status_display") + .and_then(Value::as_object); + let Some(latest) = latest else { return out }; + if let Some(ds) = latest.get("diff_stats").and_then(Value::as_object) { + if let Some(n) = ds.get("files_modified").and_then(Value::as_i64) { + out.files_changed = n.max(0) as usize; + } + if let Some(n) = ds.get("lines_added").and_then(Value::as_i64) { + out.lines_added = n.max(0) as usize; + } + if let Some(n) = ds.get("lines_removed").and_then(Value::as_i64) { + out.lines_removed = n.max(0) as usize; + } + } + out + } + + fn attempt_total_from_status_display(v: Option<&HashMap>) -> Option { + let map = v?; + let latest = map + .get("latest_turn_status_display") + .and_then(Value::as_object)?; + let siblings = latest.get("sibling_turn_ids").and_then(Value::as_array)?; + Some(siblings.len().saturating_add(1)) + } + + fn is_unified_diff(diff: &str) -> bool { + let t = diff.trim_start(); + if t.starts_with("diff --git ") { + return true; + } + let has_dash_headers = diff.contains("\n--- ") && diff.contains("\n+++ "); + let has_hunk = diff.contains("\n@@ ") || diff.starts_with("@@ "); + has_dash_headers && has_hunk + } + + fn tail(s: &str, max: usize) -> String { + if s.len() <= max { + s.to_string() + } else { + s[s.len() - max..].to_string() + } + } + + fn summarize_patch_for_logging(patch: &str) -> String { + let trimmed = patch.trim_start(); + let kind = if trimmed.starts_with("*** Begin Patch") { + "codex-patch" + } else if trimmed.starts_with("diff --git ") || trimmed.contains("\n*** End Patch\n") { + "git-diff" + } else if trimmed.starts_with("@@ ") || trimmed.contains("\n@@ ") { + "unified-diff" + } else { + "unknown" + }; + let lines = patch.lines().count(); + let chars = patch.len(); + let cwd = std::env::current_dir() + .ok() + .map(|p| p.display().to_string()) + .unwrap_or_else(|| "".to_string()); + let head: String = patch.lines().take(20).collect::>().join("\n"); + let head_trunc = if head.len() > 800 { + format!("{}…", &head[..800]) + } else { + head + }; + format!( + "patch_summary: kind={kind} lines={lines} chars={chars} cwd={cwd} ; head=\n{head_trunc}" + ) + } +} + +fn append_error_log(message: &str) { + let ts = Utc::now().to_rfc3339(); + if let Ok(mut f) = std::fs::OpenOptions::new() + .create(true) + .append(true) + .open("error.log") + { + use std::io::Write as _; + let _ = writeln!(f, "[{ts}] {message}"); + } +} diff --git a/codex-rs/cloud-tasks-client/src/lib.rs b/codex-rs/cloud-tasks-client/src/lib.rs new file mode 100644 index 00000000000..1c5a01e2aa5 --- /dev/null +++ b/codex-rs/cloud-tasks-client/src/lib.rs @@ -0,0 +1,29 @@ +mod api; + +pub use api::ApplyOutcome; +pub use api::ApplyStatus; +pub use api::AttemptStatus; +pub use api::CloudBackend; +pub use api::CloudTaskError; +pub use api::CreatedTask; +pub use api::DiffSummary; +pub use api::Result; +pub use api::TaskId; +pub use api::TaskStatus; +pub use api::TaskSummary; +pub use api::TaskText; +pub use api::TurnAttempt; + +#[cfg(feature = "mock")] +mod mock; + +#[cfg(feature = "online")] +mod http; + +#[cfg(feature = "mock")] +pub use mock::MockClient; + +#[cfg(feature = "online")] +pub use http::HttpClient; + +// Reusable apply engine now lives in the shared crate `codex-git-apply`. diff --git a/codex-rs/cloud-tasks-client/src/mock.rs b/codex-rs/cloud-tasks-client/src/mock.rs new file mode 100644 index 00000000000..97bc5520a83 --- /dev/null +++ b/codex-rs/cloud-tasks-client/src/mock.rs @@ -0,0 +1,180 @@ +use crate::ApplyOutcome; +use crate::AttemptStatus; +use crate::CloudBackend; +use crate::DiffSummary; +use crate::Result; +use crate::TaskId; +use crate::TaskStatus; +use crate::TaskSummary; +use crate::TurnAttempt; +use crate::api::TaskText; +use chrono::Utc; + +#[derive(Clone, Default)] +pub struct MockClient; + +#[async_trait::async_trait] +impl CloudBackend for MockClient { + async fn list_tasks(&self, _env: Option<&str>) -> Result> { + // Slightly vary content by env to aid tests that rely on the mock + let rows = match _env { + Some("env-A") => vec![("T-2000", "A: First", TaskStatus::Ready)], + Some("env-B") => vec![ + ("T-3000", "B: One", TaskStatus::Ready), + ("T-3001", "B: Two", TaskStatus::Pending), + ], + _ => vec![ + ("T-1000", "Update README formatting", TaskStatus::Ready), + ("T-1001", "Fix clippy warnings in core", TaskStatus::Pending), + ("T-1002", "Add contributing guide", TaskStatus::Ready), + ], + }; + let environment_id = _env.map(str::to_string); + let environment_label = match _env { + Some("env-A") => Some("Env A".to_string()), + Some("env-B") => Some("Env B".to_string()), + Some(other) => Some(other.to_string()), + None => Some("Global".to_string()), + }; + let mut out = Vec::new(); + for (id_str, title, status) in rows { + let id = TaskId(id_str.to_string()); + let diff = mock_diff_for(&id); + let (a, d) = count_from_unified(&diff); + out.push(TaskSummary { + id, + title: title.to_string(), + status, + updated_at: Utc::now(), + environment_id: environment_id.clone(), + environment_label: environment_label.clone(), + summary: DiffSummary { + files_changed: 1, + lines_added: a, + lines_removed: d, + }, + is_review: false, + attempt_total: Some(if id_str == "T-1000" { 2 } else { 1 }), + }); + } + Ok(out) + } + + async fn get_task_diff(&self, id: TaskId) -> Result> { + Ok(Some(mock_diff_for(&id))) + } + + async fn get_task_messages(&self, _id: TaskId) -> Result> { + Ok(vec![ + "Mock assistant output: this task contains no diff.".to_string(), + ]) + } + + async fn get_task_text(&self, _id: TaskId) -> Result { + Ok(TaskText { + prompt: Some("Why is there no diff?".to_string()), + messages: vec!["Mock assistant output: this task contains no diff.".to_string()], + turn_id: Some("mock-turn".to_string()), + sibling_turn_ids: Vec::new(), + attempt_placement: Some(0), + attempt_status: AttemptStatus::Completed, + }) + } + + async fn apply_task(&self, id: TaskId, _diff_override: Option) -> Result { + Ok(ApplyOutcome { + applied: true, + status: crate::ApplyStatus::Success, + message: format!("Applied task {} locally (mock)", id.0), + skipped_paths: Vec::new(), + conflict_paths: Vec::new(), + }) + } + + async fn apply_task_preflight( + &self, + id: TaskId, + _diff_override: Option, + ) -> Result { + Ok(ApplyOutcome { + applied: false, + status: crate::ApplyStatus::Success, + message: format!("Preflight passed for task {} (mock)", id.0), + skipped_paths: Vec::new(), + conflict_paths: Vec::new(), + }) + } + + async fn list_sibling_attempts( + &self, + task: TaskId, + _turn_id: String, + ) -> Result> { + if task.0 == "T-1000" { + return Ok(vec![TurnAttempt { + turn_id: "T-1000-attempt-2".to_string(), + attempt_placement: Some(1), + created_at: Some(Utc::now()), + status: AttemptStatus::Completed, + diff: Some(mock_diff_for(&task)), + messages: vec!["Mock alternate attempt".to_string()], + }]); + } + Ok(Vec::new()) + } + + async fn create_task( + &self, + env_id: &str, + prompt: &str, + git_ref: &str, + qa_mode: bool, + best_of_n: usize, + ) -> Result { + let _ = (env_id, prompt, git_ref, qa_mode, best_of_n); + let id = format!("task_local_{}", chrono::Utc::now().timestamp_millis()); + Ok(crate::CreatedTask { id: TaskId(id) }) + } +} + +fn mock_diff_for(id: &TaskId) -> String { + match id.0.as_str() { + "T-1000" => { + "diff --git a/README.md b/README.md\nindex 000000..111111 100644\n--- a/README.md\n+++ b/README.md\n@@ -1,2 +1,3 @@\n Intro\n-Hello\n+Hello, world!\n+Task: T-1000\n".to_string() + } + "T-1001" => { + "diff --git a/core/src/lib.rs b/core/src/lib.rs\nindex 000000..111111 100644\n--- a/core/src/lib.rs\n+++ b/core/src/lib.rs\n@@ -1,2 +1,1 @@\n-use foo;\n use bar;\n".to_string() + } + _ => { + "diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md\nindex 000000..111111 100644\n--- /dev/null\n+++ b/CONTRIBUTING.md\n@@ -0,0 +1,3 @@\n+## Contributing\n+Please open PRs.\n+Thanks!\n".to_string() + } + } +} + +fn count_from_unified(diff: &str) -> (usize, usize) { + if let Ok(patch) = diffy::Patch::from_str(diff) { + patch + .hunks() + .iter() + .flat_map(diffy::Hunk::lines) + .fold((0, 0), |(a, d), l| match l { + diffy::Line::Insert(_) => (a + 1, d), + diffy::Line::Delete(_) => (a, d + 1), + _ => (a, d), + }) + } else { + let mut a = 0; + let mut d = 0; + for l in diff.lines() { + if l.starts_with("+++") || l.starts_with("---") || l.starts_with("@@") { + continue; + } + match l.as_bytes().first() { + Some(b'+') => a += 1, + Some(b'-') => d += 1, + _ => {} + } + } + (a, d) + } +} diff --git a/codex-rs/cloud-tasks/Cargo.toml b/codex-rs/cloud-tasks/Cargo.toml new file mode 100644 index 00000000000..d0cee3ff1a5 --- /dev/null +++ b/codex-rs/cloud-tasks/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "codex-cloud-tasks" +version = { workspace = true } +edition = "2024" + +[lib] +name = "codex_cloud_tasks" +path = "src/lib.rs" + +[lints] +workspace = true + +[dependencies] +anyhow = "1" +clap = { version = "4", features = ["derive"] } +codex-common = { path = "../common", features = ["cli"] } +tokio = { version = "1", features = ["macros", "rt-multi-thread"] } +tracing = { version = "0.1.41", features = ["log"] } +tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } +codex-cloud-tasks-client = { path = "../cloud-tasks-client", features = ["mock", "online"] } +ratatui = { version = "0.29.0" } +crossterm = { version = "0.28.1", features = ["event-stream"] } +tokio-stream = "0.1.17" +chrono = { version = "0.4", features = ["serde"] } +codex-login = { path = "../login" } +codex-core = { path = "../core" } +throbber-widgets-tui = "0.8.0" +base64 = "0.22" +serde_json = "1" +reqwest = { version = "0.12", features = ["json"] } +serde = { version = "1", features = ["derive"] } +unicode-width = "0.1" +codex-tui = { path = "../tui" } + +[dev-dependencies] +async-trait = "0.1" diff --git a/codex-rs/cloud-tasks/src/app.rs b/codex-rs/cloud-tasks/src/app.rs new file mode 100644 index 00000000000..adf17419ee0 --- /dev/null +++ b/codex-rs/cloud-tasks/src/app.rs @@ -0,0 +1,482 @@ +use std::time::Duration; + +// Environment filter data models for the TUI +#[derive(Clone, Debug, Default)] +pub struct EnvironmentRow { + pub id: String, + pub label: Option, + pub is_pinned: bool, + pub repo_hints: Option, // e.g., "openai/codex" +} + +#[derive(Clone, Debug, Default)] +pub struct EnvModalState { + pub query: String, + pub selected: usize, +} + +#[derive(Clone, Debug, Default)] +pub struct BestOfModalState { + pub selected: usize, +} + +#[derive(Clone, Debug, Copy, PartialEq, Eq)] +pub enum ApplyResultLevel { + Success, + Partial, + Error, +} + +#[derive(Clone, Debug)] +pub struct ApplyModalState { + pub task_id: TaskId, + pub title: String, + pub result_message: Option, + pub result_level: Option, + pub skipped_paths: Vec, + pub conflict_paths: Vec, + pub diff_override: Option, +} + +use crate::scrollable_diff::ScrollableDiff; +use codex_cloud_tasks_client::CloudBackend; +use codex_cloud_tasks_client::TaskId; +use codex_cloud_tasks_client::TaskSummary; +use throbber_widgets_tui::ThrobberState; + +#[derive(Default)] +pub struct App { + pub tasks: Vec, + pub selected: usize, + pub status: String, + pub diff_overlay: Option, + pub throbber: ThrobberState, + pub refresh_inflight: bool, + pub details_inflight: bool, + // Environment filter state + pub env_filter: Option, + pub env_modal: Option, + pub apply_modal: Option, + pub best_of_modal: Option, + pub environments: Vec, + pub env_last_loaded: Option, + pub env_loading: bool, + pub env_error: Option, + // New Task page + pub new_task: Option, + pub best_of_n: usize, + // Apply preflight spinner state + pub apply_preflight_inflight: bool, + // Apply action spinner state + pub apply_inflight: bool, + // Background enrichment coordination + pub list_generation: u64, + pub in_flight: std::collections::HashSet, + // Background enrichment caches were planned; currently unused. +} + +impl App { + pub fn new() -> Self { + Self { + tasks: Vec::new(), + selected: 0, + status: "Press r to refresh".to_string(), + diff_overlay: None, + throbber: ThrobberState::default(), + refresh_inflight: false, + details_inflight: false, + env_filter: None, + env_modal: None, + apply_modal: None, + best_of_modal: None, + environments: Vec::new(), + env_last_loaded: None, + env_loading: false, + env_error: None, + new_task: None, + best_of_n: 1, + apply_preflight_inflight: false, + apply_inflight: false, + list_generation: 0, + in_flight: std::collections::HashSet::new(), + } + } + + pub fn next(&mut self) { + if self.tasks.is_empty() { + return; + } + self.selected = (self.selected + 1).min(self.tasks.len().saturating_sub(1)); + } + + pub fn prev(&mut self) { + if self.tasks.is_empty() { + return; + } + if self.selected > 0 { + self.selected -= 1; + } + } +} + +pub async fn load_tasks( + backend: &dyn CloudBackend, + env: Option<&str>, +) -> anyhow::Result> { + // In later milestones, add a small debounce, spinner, and error display. + let tasks = tokio::time::timeout(Duration::from_secs(5), backend.list_tasks(env)).await??; + // Hide review-only tasks from the main list. + let filtered: Vec = tasks.into_iter().filter(|t| !t.is_review).collect(); + Ok(filtered) +} + +pub struct DiffOverlay { + pub title: String, + pub task_id: TaskId, + pub sd: ScrollableDiff, + pub base_can_apply: bool, + pub diff_lines: Vec, + pub text_lines: Vec, + pub prompt: Option, + pub attempts: Vec, + pub selected_attempt: usize, + pub current_view: DetailView, + pub base_turn_id: Option, + pub sibling_turn_ids: Vec, + pub attempt_total_hint: Option, +} + +#[derive(Clone, Debug, Default)] +pub struct AttemptView { + pub turn_id: Option, + pub status: codex_cloud_tasks_client::AttemptStatus, + pub attempt_placement: Option, + pub diff_lines: Vec, + pub text_lines: Vec, + pub prompt: Option, + pub diff_raw: Option, +} + +impl AttemptView { + pub fn has_diff(&self) -> bool { + !self.diff_lines.is_empty() + } + + pub fn has_text(&self) -> bool { + !self.text_lines.is_empty() || self.prompt.is_some() + } +} + +impl DiffOverlay { + pub fn new(task_id: TaskId, title: String, attempt_total_hint: Option) -> Self { + let mut sd = ScrollableDiff::new(); + sd.set_content(Vec::new()); + Self { + title, + task_id, + sd, + base_can_apply: false, + diff_lines: Vec::new(), + text_lines: Vec::new(), + prompt: None, + attempts: vec![AttemptView::default()], + selected_attempt: 0, + current_view: DetailView::Prompt, + base_turn_id: None, + sibling_turn_ids: Vec::new(), + attempt_total_hint, + } + } + + pub fn current_attempt(&self) -> Option<&AttemptView> { + self.attempts.get(self.selected_attempt) + } + + pub fn base_attempt_mut(&mut self) -> &mut AttemptView { + if self.attempts.is_empty() { + self.attempts.push(AttemptView::default()); + } + &mut self.attempts[0] + } + + pub fn set_view(&mut self, view: DetailView) { + self.current_view = view; + self.apply_selection_to_fields(); + } + + pub fn expected_attempts(&self) -> Option { + self.attempt_total_hint.or({ + if self.attempts.is_empty() { + None + } else { + Some(self.attempts.len()) + } + }) + } + + pub fn attempt_count(&self) -> usize { + self.attempts.len() + } + + pub fn attempt_display_total(&self) -> usize { + self.expected_attempts() + .unwrap_or_else(|| self.attempts.len().max(1)) + } + + pub fn step_attempt(&mut self, delta: isize) -> bool { + let total = self.attempts.len(); + if total <= 1 { + return false; + } + let total_isize = total as isize; + let current = self.selected_attempt as isize; + let mut next = current + delta; + next = ((next % total_isize) + total_isize) % total_isize; + let next = next as usize; + self.selected_attempt = next; + self.apply_selection_to_fields(); + true + } + + pub fn current_can_apply(&self) -> bool { + matches!(self.current_view, DetailView::Diff) + && self + .current_attempt() + .and_then(|attempt| attempt.diff_raw.as_ref()) + .map(|diff| !diff.is_empty()) + .unwrap_or(false) + } + + pub fn apply_selection_to_fields(&mut self) { + let (diff_lines, text_lines, prompt) = if let Some(attempt) = self.current_attempt() { + ( + attempt.diff_lines.clone(), + attempt.text_lines.clone(), + attempt.prompt.clone(), + ) + } else { + self.diff_lines.clear(); + self.text_lines.clear(); + self.prompt = None; + self.sd.set_content(vec!["".to_string()]); + return; + }; + + self.diff_lines = diff_lines.clone(); + self.text_lines = text_lines.clone(); + self.prompt = prompt; + + match self.current_view { + DetailView::Diff => { + if diff_lines.is_empty() { + self.sd.set_content(vec!["".to_string()]); + } else { + self.sd.set_content(diff_lines); + } + } + DetailView::Prompt => { + if text_lines.is_empty() { + self.sd.set_content(vec!["".to_string()]); + } else { + self.sd.set_content(text_lines); + } + } + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum DetailView { + Diff, + Prompt, +} + +/// Internal app events delivered from background tasks. +/// These let the UI event loop remain responsive and keep the spinner animating. +#[derive(Debug)] +pub enum AppEvent { + TasksLoaded { + env: Option, + result: anyhow::Result>, + }, + // Background diff summary events were planned; removed for now to keep code minimal. + /// Autodetection of a likely environment id finished + EnvironmentAutodetected(anyhow::Result), + /// Background completion of environment list fetch + EnvironmentsLoaded(anyhow::Result>), + DetailsDiffLoaded { + id: TaskId, + title: String, + diff: String, + }, + DetailsMessagesLoaded { + id: TaskId, + title: String, + messages: Vec, + prompt: Option, + turn_id: Option, + sibling_turn_ids: Vec, + attempt_placement: Option, + attempt_status: codex_cloud_tasks_client::AttemptStatus, + }, + DetailsFailed { + id: TaskId, + title: String, + error: String, + }, + AttemptsLoaded { + id: TaskId, + attempts: Vec, + }, + /// Background completion of new task submission + NewTaskSubmitted(Result), + /// Background completion of apply preflight when opening modal or on demand + ApplyPreflightFinished { + id: TaskId, + title: String, + message: String, + level: ApplyResultLevel, + skipped: Vec, + conflicts: Vec, + }, + /// Background completion of apply action (actual patch application) + ApplyFinished { + id: TaskId, + result: std::result::Result, + }, +} + +// Convenience aliases; currently unused. +#[cfg(test)] +mod tests { + use super::*; + use chrono::Utc; + + struct FakeBackend { + // maps env key to titles + by_env: std::collections::HashMap, Vec<&'static str>>, + } + + #[async_trait::async_trait] + impl codex_cloud_tasks_client::CloudBackend for FakeBackend { + async fn list_tasks( + &self, + env: Option<&str>, + ) -> codex_cloud_tasks_client::Result> { + let key = env.map(str::to_string); + let titles = self + .by_env + .get(&key) + .cloned() + .unwrap_or_else(|| vec!["default-a", "default-b"]); + let mut out = Vec::new(); + for (i, t) in titles.into_iter().enumerate() { + out.push(TaskSummary { + id: TaskId(format!("T-{i}")), + title: t.to_string(), + status: codex_cloud_tasks_client::TaskStatus::Ready, + updated_at: Utc::now(), + environment_id: env.map(str::to_string), + environment_label: None, + summary: codex_cloud_tasks_client::DiffSummary::default(), + is_review: false, + attempt_total: Some(1), + }); + } + Ok(out) + } + + async fn get_task_diff( + &self, + _id: TaskId, + ) -> codex_cloud_tasks_client::Result> { + Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented( + "not used in test", + )) + } + + async fn get_task_messages( + &self, + _id: TaskId, + ) -> codex_cloud_tasks_client::Result> { + Ok(vec![]) + } + async fn get_task_text( + &self, + _id: TaskId, + ) -> codex_cloud_tasks_client::Result { + Ok(codex_cloud_tasks_client::TaskText { + prompt: Some("Example prompt".to_string()), + messages: Vec::new(), + turn_id: Some("fake-turn".to_string()), + sibling_turn_ids: Vec::new(), + attempt_placement: Some(0), + attempt_status: codex_cloud_tasks_client::AttemptStatus::Completed, + }) + } + + async fn list_sibling_attempts( + &self, + _task: TaskId, + _turn_id: String, + ) -> codex_cloud_tasks_client::Result> { + Ok(Vec::new()) + } + + async fn apply_task( + &self, + _id: TaskId, + _diff_override: Option, + ) -> codex_cloud_tasks_client::Result { + Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented( + "not used in test", + )) + } + + async fn apply_task_preflight( + &self, + _id: TaskId, + _diff_override: Option, + ) -> codex_cloud_tasks_client::Result { + Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented( + "not used in test", + )) + } + + async fn create_task( + &self, + _env_id: &str, + _prompt: &str, + _git_ref: &str, + _qa_mode: bool, + _best_of_n: usize, + ) -> codex_cloud_tasks_client::Result { + Err(codex_cloud_tasks_client::CloudTaskError::Unimplemented( + "not used in test", + )) + } + } + + #[tokio::test] + async fn load_tasks_uses_env_parameter() { + // Arrange: env-specific task titles + let mut by_env = std::collections::HashMap::new(); + by_env.insert(None, vec!["root-1", "root-2"]); + by_env.insert(Some("env-A".to_string()), vec!["A-1"]); + by_env.insert(Some("env-B".to_string()), vec!["B-1", "B-2", "B-3"]); + let backend = FakeBackend { by_env }; + + // Act + Assert + let root = load_tasks(&backend, None).await.unwrap(); + assert_eq!(root.len(), 2); + assert_eq!(root[0].title, "root-1"); + + let a = load_tasks(&backend, Some("env-A")).await.unwrap(); + assert_eq!(a.len(), 1); + assert_eq!(a[0].title, "A-1"); + + let b = load_tasks(&backend, Some("env-B")).await.unwrap(); + assert_eq!(b.len(), 3); + assert_eq!(b[2].title, "B-3"); + } +} diff --git a/codex-rs/cloud-tasks/src/cli.rs b/codex-rs/cloud-tasks/src/cli.rs new file mode 100644 index 00000000000..81125aeb1ce --- /dev/null +++ b/codex-rs/cloud-tasks/src/cli.rs @@ -0,0 +1,9 @@ +use clap::Parser; +use codex_common::CliConfigOverrides; + +#[derive(Parser, Debug, Default)] +#[command(version)] +pub struct Cli { + #[clap(skip)] + pub config_overrides: CliConfigOverrides, +} diff --git a/codex-rs/cloud-tasks/src/env_detect.rs b/codex-rs/cloud-tasks/src/env_detect.rs new file mode 100644 index 00000000000..e7e8fb6b16a --- /dev/null +++ b/codex-rs/cloud-tasks/src/env_detect.rs @@ -0,0 +1,361 @@ +use reqwest::header::CONTENT_TYPE; +use reqwest::header::HeaderMap; +use std::collections::HashMap; +use tracing::info; +use tracing::warn; + +#[derive(Debug, Clone, serde::Deserialize)] +struct CodeEnvironment { + id: String, + #[serde(default)] + label: Option, + #[serde(default)] + is_pinned: Option, + #[serde(default)] + task_count: Option, +} + +#[derive(Debug, Clone)] +pub struct AutodetectSelection { + pub id: String, + pub label: Option, +} + +pub async fn autodetect_environment_id( + base_url: &str, + headers: &HeaderMap, + desired_label: Option, +) -> anyhow::Result { + // 1) Try repo-specific environments based on local git origins (GitHub only, like VSCode) + let origins = get_git_origins(); + crate::append_error_log(format!("env: git origins: {origins:?}")); + let mut by_repo_envs: Vec = Vec::new(); + for origin in &origins { + if let Some((owner, repo)) = parse_owner_repo(origin) { + let url = if base_url.contains("/backend-api") { + format!( + "{}/wham/environments/by-repo/{}/{}/{}", + base_url, "github", owner, repo + ) + } else { + format!( + "{}/api/codex/environments/by-repo/{}/{}/{}", + base_url, "github", owner, repo + ) + }; + crate::append_error_log(format!("env: GET {url}")); + match get_json::>(&url, headers).await { + Ok(mut list) => { + crate::append_error_log(format!( + "env: by-repo returned {} env(s) for {owner}/{repo}", + list.len(), + )); + by_repo_envs.append(&mut list); + } + Err(e) => crate::append_error_log(format!( + "env: by-repo fetch failed for {owner}/{repo}: {e}" + )), + } + } + } + if let Some(env) = pick_environment_row(&by_repo_envs, desired_label.as_deref()) { + return Ok(AutodetectSelection { + id: env.id.clone(), + label: env.label.as_deref().map(str::to_owned), + }); + } + + // 2) Fallback to the full list + let list_url = if base_url.contains("/backend-api") { + format!("{base_url}/wham/environments") + } else { + format!("{base_url}/api/codex/environments") + }; + crate::append_error_log(format!("env: GET {list_url}")); + // Fetch and log the full environments JSON for debugging + let http = reqwest::Client::builder().build()?; + let res = http.get(&list_url).headers(headers.clone()).send().await?; + let status = res.status(); + let ct = res + .headers() + .get(CONTENT_TYPE) + .and_then(|v| v.to_str().ok()) + .unwrap_or("") + .to_string(); + let body = res.text().await.unwrap_or_default(); + crate::append_error_log(format!("env: status={status} content-type={ct}")); + match serde_json::from_str::(&body) { + Ok(v) => { + let pretty = serde_json::to_string_pretty(&v).unwrap_or(body.clone()); + crate::append_error_log(format!("env: /environments JSON (pretty):\n{pretty}")); + } + Err(_) => crate::append_error_log(format!("env: /environments (raw):\n{body}")), + } + if !status.is_success() { + anyhow::bail!("GET {list_url} failed: {status}; content-type={ct}; body={body}"); + } + let all_envs: Vec = serde_json::from_str(&body).map_err(|e| { + anyhow::anyhow!("Decode error for {list_url}: {e}; content-type={ct}; body={body}") + })?; + if let Some(env) = pick_environment_row(&all_envs, desired_label.as_deref()) { + return Ok(AutodetectSelection { + id: env.id.clone(), + label: env.label.as_deref().map(str::to_owned), + }); + } + anyhow::bail!("no environments available") +} + +fn pick_environment_row( + envs: &[CodeEnvironment], + desired_label: Option<&str>, +) -> Option { + if envs.is_empty() { + return None; + } + if let Some(label) = desired_label { + let lc = label.to_lowercase(); + if let Some(e) = envs + .iter() + .find(|e| e.label.as_deref().unwrap_or("").to_lowercase() == lc) + { + crate::append_error_log(format!("env: matched by label: {label} -> {}", e.id)); + return Some(e.clone()); + } + } + if envs.len() == 1 { + crate::append_error_log("env: single environment available; selecting it"); + return Some(envs[0].clone()); + } + if let Some(e) = envs.iter().find(|e| e.is_pinned.unwrap_or(false)) { + crate::append_error_log(format!("env: selecting pinned environment: {}", e.id)); + return Some(e.clone()); + } + // Highest task_count as heuristic + if let Some(e) = envs + .iter() + .max_by_key(|e| e.task_count.unwrap_or(0)) + .or_else(|| envs.first()) + { + crate::append_error_log(format!("env: selecting by task_count/first: {}", e.id)); + return Some(e.clone()); + } + None +} + +async fn get_json( + url: &str, + headers: &HeaderMap, +) -> anyhow::Result { + let http = reqwest::Client::builder().build()?; + let res = http.get(url).headers(headers.clone()).send().await?; + let status = res.status(); + let ct = res + .headers() + .get(CONTENT_TYPE) + .and_then(|v| v.to_str().ok()) + .unwrap_or("") + .to_string(); + let body = res.text().await.unwrap_or_default(); + crate::append_error_log(format!("env: status={status} content-type={ct}")); + if !status.is_success() { + anyhow::bail!("GET {url} failed: {status}; content-type={ct}; body={body}"); + } + let parsed = serde_json::from_str::(&body).map_err(|e| { + anyhow::anyhow!("Decode error for {url}: {e}; content-type={ct}; body={body}") + })?; + Ok(parsed) +} + +fn get_git_origins() -> Vec { + // Prefer: git config --get-regexp remote\..*\.url + let out = std::process::Command::new("git") + .args(["config", "--get-regexp", "remote\\..*\\.url"]) + .output(); + if let Ok(ok) = out + && ok.status.success() + { + let s = String::from_utf8_lossy(&ok.stdout); + let mut urls = Vec::new(); + for line in s.lines() { + if let Some((_, url)) = line.split_once(' ') { + urls.push(url.trim().to_string()); + } + } + if !urls.is_empty() { + return uniq(urls); + } + } + // Fallback: git remote -v + let out = std::process::Command::new("git") + .args(["remote", "-v"]) + .output(); + if let Ok(ok) = out + && ok.status.success() + { + let s = String::from_utf8_lossy(&ok.stdout); + let mut urls = Vec::new(); + for line in s.lines() { + let parts: Vec<&str> = line.split_whitespace().collect(); + if parts.len() >= 2 { + urls.push(parts[1].to_string()); + } + } + if !urls.is_empty() { + return uniq(urls); + } + } + Vec::new() +} + +fn uniq(mut v: Vec) -> Vec { + v.sort(); + v.dedup(); + v +} + +fn parse_owner_repo(url: &str) -> Option<(String, String)> { + // Normalize common prefixes and handle multiple SSH/HTTPS variants. + let mut s = url.trim().to_string(); + // Drop protocol scheme for ssh URLs + if let Some(rest) = s.strip_prefix("ssh://") { + s = rest.to_string(); + } + // Accept any user before @github.com (e.g., git@, org-123@) + if let Some(idx) = s.find("@github.com:") { + let rest = &s[idx + "@github.com:".len()..]; + let rest = rest.trim_start_matches('/').trim_end_matches(".git"); + let mut parts = rest.splitn(2, '/'); + let owner = parts.next()?.to_string(); + let repo = parts.next()?.to_string(); + crate::append_error_log(format!("env: parsed SSH GitHub origin => {owner}/{repo}")); + return Some((owner, repo)); + } + // HTTPS or git protocol + for prefix in [ + "https://github.com/", + "http://github.com/", + "git://github.com/", + "github.com/", + ] { + if let Some(rest) = s.strip_prefix(prefix) { + let rest = rest.trim_start_matches('/').trim_end_matches(".git"); + let mut parts = rest.splitn(2, '/'); + let owner = parts.next()?.to_string(); + let repo = parts.next()?.to_string(); + crate::append_error_log(format!("env: parsed HTTP GitHub origin => {owner}/{repo}")); + return Some((owner, repo)); + } + } + None +} + +/// List environments for the current repo(s) with a fallback to the global list. +/// Returns a de-duplicated, sorted set suitable for the TUI modal. +pub async fn list_environments( + base_url: &str, + headers: &HeaderMap, +) -> anyhow::Result> { + let mut map: HashMap = HashMap::new(); + + // 1) By-repo lookup for each parsed GitHub origin + let origins = get_git_origins(); + for origin in &origins { + if let Some((owner, repo)) = parse_owner_repo(origin) { + let url = if base_url.contains("/backend-api") { + format!( + "{}/wham/environments/by-repo/{}/{}/{}", + base_url, "github", owner, repo + ) + } else { + format!( + "{}/api/codex/environments/by-repo/{}/{}/{}", + base_url, "github", owner, repo + ) + }; + match get_json::>(&url, headers).await { + Ok(list) => { + info!("env_tui: by-repo {}:{} -> {} envs", owner, repo, list.len()); + for e in list { + let entry = + map.entry(e.id.clone()) + .or_insert_with(|| crate::app::EnvironmentRow { + id: e.id.clone(), + label: e.label.clone(), + is_pinned: e.is_pinned.unwrap_or(false), + repo_hints: Some(format!("{owner}/{repo}")), + }); + // Merge: keep label if present, or use new; accumulate pinned flag + if entry.label.is_none() { + entry.label = e.label.clone(); + } + entry.is_pinned = entry.is_pinned || e.is_pinned.unwrap_or(false); + if entry.repo_hints.is_none() { + entry.repo_hints = Some(format!("{owner}/{repo}")); + } + } + } + Err(e) => { + warn!( + "env_tui: by-repo fetch failed for {}/{}: {}", + owner, repo, e + ); + } + } + } + } + + // 2) Fallback to the full list; on error return what we have if any. + let list_url = if base_url.contains("/backend-api") { + format!("{base_url}/wham/environments") + } else { + format!("{base_url}/api/codex/environments") + }; + match get_json::>(&list_url, headers).await { + Ok(list) => { + info!("env_tui: global list -> {} envs", list.len()); + for e in list { + let entry = map + .entry(e.id.clone()) + .or_insert_with(|| crate::app::EnvironmentRow { + id: e.id.clone(), + label: e.label.clone(), + is_pinned: e.is_pinned.unwrap_or(false), + repo_hints: None, + }); + if entry.label.is_none() { + entry.label = e.label.clone(); + } + entry.is_pinned = entry.is_pinned || e.is_pinned.unwrap_or(false); + } + } + Err(e) => { + if map.is_empty() { + return Err(e); + } else { + warn!( + "env_tui: global list failed; using by-repo results only: {}", + e + ); + } + } + } + + let mut rows: Vec = map.into_values().collect(); + rows.sort_by(|a, b| { + // pinned first + let p = b.is_pinned.cmp(&a.is_pinned); + if p != std::cmp::Ordering::Equal { + return p; + } + // then label (ci), then id + let al = a.label.as_deref().unwrap_or("").to_lowercase(); + let bl = b.label.as_deref().unwrap_or("").to_lowercase(); + let l = al.cmp(&bl); + if l != std::cmp::Ordering::Equal { + return l; + } + a.id.cmp(&b.id) + }); + Ok(rows) +} diff --git a/codex-rs/cloud-tasks/src/lib.rs b/codex-rs/cloud-tasks/src/lib.rs new file mode 100644 index 00000000000..da2d4eb9081 --- /dev/null +++ b/codex-rs/cloud-tasks/src/lib.rs @@ -0,0 +1,1634 @@ +mod app; +mod cli; +pub mod env_detect; +mod new_task; +pub mod scrollable_diff; +mod ui; +pub mod util; +pub use cli::Cli; + +use std::io::IsTerminal; +use std::path::PathBuf; +use std::sync::Arc; +use std::time::Duration; +use std::time::Instant; +use tokio::sync::mpsc::UnboundedSender; +use tracing::info; +use tracing_subscriber::EnvFilter; +use util::append_error_log; +use util::set_user_agent_suffix; + +struct ApplyJob { + task_id: codex_cloud_tasks_client::TaskId, + diff_override: Option, +} + +fn level_from_status(status: codex_cloud_tasks_client::ApplyStatus) -> app::ApplyResultLevel { + match status { + codex_cloud_tasks_client::ApplyStatus::Success => app::ApplyResultLevel::Success, + codex_cloud_tasks_client::ApplyStatus::Partial => app::ApplyResultLevel::Partial, + codex_cloud_tasks_client::ApplyStatus::Error => app::ApplyResultLevel::Error, + } +} + +fn spawn_preflight( + app: &mut app::App, + backend: &Arc, + tx: &UnboundedSender, + frame_tx: &UnboundedSender, + title: String, + job: ApplyJob, +) -> bool { + if app.apply_inflight { + app.status = "An apply is already running; wait for it to finish first.".to_string(); + return false; + } + if app.apply_preflight_inflight { + app.status = "A preflight is already running; wait for it to finish first.".to_string(); + return false; + } + + app.apply_preflight_inflight = true; + let _ = frame_tx.send(Instant::now() + Duration::from_millis(100)); + + let backend = backend.clone(); + let tx = tx.clone(); + tokio::spawn(async move { + let ApplyJob { + task_id, + diff_override, + } = job; + let result = codex_cloud_tasks_client::CloudBackend::apply_task_preflight( + &*backend, + task_id.clone(), + diff_override, + ) + .await; + + let event = match result { + Ok(outcome) => { + let level = level_from_status(outcome.status); + app::AppEvent::ApplyPreflightFinished { + id: task_id, + title, + message: outcome.message, + level, + skipped: outcome.skipped_paths, + conflicts: outcome.conflict_paths, + } + } + Err(e) => app::AppEvent::ApplyPreflightFinished { + id: task_id, + title, + message: format!("Preflight failed: {e}"), + level: app::ApplyResultLevel::Error, + skipped: Vec::new(), + conflicts: Vec::new(), + }, + }; + + let _ = tx.send(event); + }); + + true +} + +fn spawn_apply( + app: &mut app::App, + backend: &Arc, + tx: &UnboundedSender, + frame_tx: &UnboundedSender, + job: ApplyJob, +) -> bool { + if app.apply_inflight { + app.status = "An apply is already running; wait for it to finish first.".to_string(); + return false; + } + if app.apply_preflight_inflight { + app.status = "Finish the current preflight before starting another apply.".to_string(); + return false; + } + + app.apply_inflight = true; + let _ = frame_tx.send(Instant::now() + Duration::from_millis(100)); + + let backend = backend.clone(); + let tx = tx.clone(); + tokio::spawn(async move { + let ApplyJob { + task_id, + diff_override, + } = job; + let result = codex_cloud_tasks_client::CloudBackend::apply_task( + &*backend, + task_id.clone(), + diff_override, + ) + .await; + + let event = match result { + Ok(outcome) => app::AppEvent::ApplyFinished { + id: task_id, + result: Ok(outcome), + }, + Err(e) => app::AppEvent::ApplyFinished { + id: task_id, + result: Err(format!("{e}")), + }, + }; + + let _ = tx.send(event); + }); + + true +} + +// logging helper lives in util module + +// (no standalone patch summarizer needed – UI displays raw diffs) + +/// Entry point for the `codex cloud` subcommand. +pub async fn run_main(_cli: Cli, _codex_linux_sandbox_exe: Option) -> anyhow::Result<()> { + // Very minimal logging setup; mirrors other crates' pattern. + let default_level = "error"; + let _ = tracing_subscriber::fmt() + .with_env_filter( + EnvFilter::try_from_default_env() + .or_else(|_| EnvFilter::try_new(default_level)) + .unwrap_or_else(|_| EnvFilter::new(default_level)), + ) + .with_ansi(std::io::stderr().is_terminal()) + .with_writer(std::io::stderr) + .try_init(); + + info!("Launching Cloud Tasks list UI"); + set_user_agent_suffix("codex_cloud_tasks_tui"); + + // Default to online unless explicitly configured to use mock. + let use_mock = matches!( + std::env::var("CODEX_CLOUD_TASKS_MODE").ok().as_deref(), + Some("mock") | Some("MOCK") + ); + + let backend: Arc = if use_mock { + Arc::new(codex_cloud_tasks_client::MockClient) + } else { + // Build an HTTP client against the configured (or default) base URL. + let base_url = std::env::var("CODEX_CLOUD_TASKS_BASE_URL") + .unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string()); + let ua = codex_core::default_client::get_codex_user_agent(); + let mut http = + codex_cloud_tasks_client::HttpClient::new(base_url.clone())?.with_user_agent(ua); + // Log which base URL and path style we're going to use. + let style = if base_url.contains("/backend-api") { + "wham" + } else { + "codex-api" + }; + append_error_log(format!("startup: base_url={base_url} path_style={style}")); + + // Require ChatGPT login (SWIC). Exit with a clear message if missing. + let _token = match codex_core::config::find_codex_home() + .ok() + .map(|home| codex_login::AuthManager::new(home, false)) + .and_then(|am| am.auth()) + { + Some(auth) => { + // Log account context for debugging workspace selection. + if let Some(acc) = auth.get_account_id() { + append_error_log(format!("auth: mode=ChatGPT account_id={acc}")); + } + match auth.get_token().await { + Ok(t) if !t.is_empty() => { + // Attach token and ChatGPT-Account-Id header if available + http = http.with_bearer_token(t.clone()); + if let Some(acc) = auth + .get_account_id() + .or_else(|| util::extract_chatgpt_account_id(&t)) + { + append_error_log(format!("auth: set ChatGPT-Account-Id header: {acc}")); + http = http.with_chatgpt_account_id(acc); + } + t + } + _ => { + eprintln!( + "Not signed in. Please run 'codex login' to sign in with ChatGPT, then re-run 'codex cloud'." + ); + std::process::exit(1); + } + } + } + None => { + eprintln!( + "Not signed in. Please run 'codex login' to sign in with ChatGPT, then re-run 'codex cloud'." + ); + std::process::exit(1); + } + }; + Arc::new(http) + }; + + // Terminal setup + use crossterm::ExecutableCommand; + use crossterm::event::DisableBracketedPaste; + use crossterm::event::EnableBracketedPaste; + use crossterm::event::KeyboardEnhancementFlags; + use crossterm::event::PopKeyboardEnhancementFlags; + use crossterm::event::PushKeyboardEnhancementFlags; + use crossterm::terminal::EnterAlternateScreen; + use crossterm::terminal::LeaveAlternateScreen; + use crossterm::terminal::disable_raw_mode; + use crossterm::terminal::enable_raw_mode; + use ratatui::Terminal; + use ratatui::backend::CrosstermBackend; + let mut stdout = std::io::stdout(); + enable_raw_mode()?; + stdout.execute(EnterAlternateScreen)?; + stdout.execute(EnableBracketedPaste)?; + // Enable enhanced key reporting so Shift+Enter is distinguishable from Enter. + // Some terminals may not support these flags; ignore errors if enabling fails. + let _ = crossterm::execute!( + std::io::stdout(), + PushKeyboardEnhancementFlags( + KeyboardEnhancementFlags::DISAMBIGUATE_ESCAPE_CODES + | KeyboardEnhancementFlags::REPORT_EVENT_TYPES + | KeyboardEnhancementFlags::REPORT_ALTERNATE_KEYS + ) + ); + let backend_ui = CrosstermBackend::new(stdout); + let mut terminal = Terminal::new(backend_ui)?; + terminal.clear()?; + + // App state + let mut app = app::App::new(); + // Initial load + let force_internal = matches!( + std::env::var("CODEX_CLOUD_TASKS_FORCE_INTERNAL") + .ok() + .as_deref(), + Some("1") | Some("true") | Some("TRUE") + ); + append_error_log(format!( + "startup: wham_force_internal={} ua={}", + force_internal, + codex_core::default_client::get_codex_user_agent() + )); + // Non-blocking initial load so the in-box spinner can animate + app.status = "Loading tasks…".to_string(); + app.refresh_inflight = true; + // New list generation; reset background enrichment coordination + app.list_generation = app.list_generation.saturating_add(1); + app.in_flight.clear(); + // reset any in-flight enrichment state + + // Event stream + use crossterm::event::Event; + use crossterm::event::EventStream; + use crossterm::event::KeyCode; + use crossterm::event::KeyEventKind; + use crossterm::event::KeyModifiers; + use tokio_stream::StreamExt; + let mut events = EventStream::new(); + + // Channel for non-blocking background loads + use tokio::sync::mpsc::unbounded_channel; + let (tx, mut rx) = unbounded_channel::(); + // Kick off the initial load in background + { + let backend = Arc::clone(&backend); + let tx = tx.clone(); + tokio::spawn(async move { + let res = app::load_tasks(&*backend, None).await; + let _ = tx.send(app::AppEvent::TasksLoaded { + env: None, + result: res, + }); + }); + } + // Fetch environment list in parallel so the header can show friendly names quickly. + { + let tx = tx.clone(); + tokio::spawn(async move { + let base_url = util::normalize_base_url( + &std::env::var("CODEX_CLOUD_TASKS_BASE_URL") + .unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string()), + ); + let headers = util::build_chatgpt_headers().await; + let res = crate::env_detect::list_environments(&base_url, &headers).await; + let _ = tx.send(app::AppEvent::EnvironmentsLoaded(res)); + }); + } + + // Try to auto-detect a likely environment id on startup and refresh if found. + // Do this concurrently so the initial list shows quickly; on success we refetch with filter. + { + let tx = tx.clone(); + tokio::spawn(async move { + let base_url = util::normalize_base_url( + &std::env::var("CODEX_CLOUD_TASKS_BASE_URL") + .unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string()), + ); + // Build headers: UA + ChatGPT auth if available + let headers = util::build_chatgpt_headers().await; + + // Run autodetect. If it fails, we keep using "All". + let res = crate::env_detect::autodetect_environment_id(&base_url, &headers, None).await; + let _ = tx.send(app::AppEvent::EnvironmentAutodetected(res)); + }); + } + + // Event-driven redraws with a tiny coalescing scheduler (snappy UI, no fixed 250ms tick). + let mut needs_redraw = true; + use std::time::Instant; + use tokio::time::Instant as TokioInstant; + use tokio::time::sleep_until; + let (frame_tx, mut frame_rx) = tokio::sync::mpsc::unbounded_channel::(); + let (redraw_tx, mut redraw_rx) = tokio::sync::mpsc::unbounded_channel::<()>(); + + // Coalesce frame requests to the earliest deadline; emit a single redraw signal. + tokio::spawn(async move { + let mut next_deadline: Option = None; + loop { + let target = + next_deadline.unwrap_or_else(|| Instant::now() + Duration::from_secs(24 * 60 * 60)); + let sleeper = sleep_until(TokioInstant::from_std(target)); + tokio::pin!(sleeper); + tokio::select! { + recv = frame_rx.recv() => { + match recv { + Some(at) => { + if next_deadline.is_none_or(|cur| at < cur) { + next_deadline = Some(at); + } + continue; // recompute sleep target + } + None => break, + } + } + _ = &mut sleeper => { + if next_deadline.take().is_some() { + let _ = redraw_tx.send(()); + } + } + } + } + }); + // Kick an initial draw so the UI appears immediately. + let _ = frame_tx.send(Instant::now()); + + // Render helper to centralize immediate redraws after handling events. + let render_if_needed = |terminal: &mut Terminal>, + app: &mut app::App, + needs_redraw: &mut bool| + -> anyhow::Result<()> { + if *needs_redraw { + terminal.draw(|f| ui::draw(f, app))?; + *needs_redraw = false; + } + Ok(()) + }; + + let exit_code = loop { + tokio::select! { + // Coalesced redraw requests: spinner animation and paste-burst micro‑flush. + Some(()) = redraw_rx.recv() => { + // Micro‑flush pending first key held by paste‑burst. + if let Some(page) = app.new_task.as_mut() { + if page.composer.flush_paste_burst_if_due() { needs_redraw = true; } + if page.composer.is_in_paste_burst() { + let _ = frame_tx.send(Instant::now() + codex_tui::ComposerInput::recommended_flush_delay()); + } + } + // Advance throbber only while loading. + if app.refresh_inflight + || app.details_inflight + || app.env_loading + || app.apply_preflight_inflight + || app.apply_inflight + { + app.throbber.calc_next(); + needs_redraw = true; + let _ = frame_tx.send(Instant::now() + Duration::from_millis(100)); + } + render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?; + } + maybe_app_event = rx.recv() => { + if let Some(ev) = maybe_app_event { + match ev { + app::AppEvent::TasksLoaded { env, result } => { + // Only apply results for the current filter to avoid races. + if env.as_deref() != app.env_filter.as_deref() { + append_error_log(format!( + "refresh.drop: env={} current={}", + env.clone().unwrap_or_else(|| "".to_string()), + app.env_filter.clone().unwrap_or_else(|| "".to_string()) + )); + continue; + } + app.refresh_inflight = false; + match result { + Ok(tasks) => { + append_error_log(format!( + "refresh.apply: env={} count={}", + env.clone().unwrap_or_else(|| "".to_string()), + tasks.len() + )); + app.tasks = tasks; + if app.selected >= app.tasks.len() { app.selected = app.tasks.len().saturating_sub(1); } + app.status = "Loaded tasks".to_string(); + } + Err(e) => { + append_error_log(format!("refresh load_tasks failed: {e}")); + app.status = format!("Failed to load tasks: {e}"); + } + } + needs_redraw = true; + let _ = frame_tx.send(Instant::now()); + } + app::AppEvent::NewTaskSubmitted(result) => { + match result { + Ok(created) => { + append_error_log(format!("new-task: created id={}", created.id.0)); + app.status = format!("Submitted as {}", created.id.0); + app.new_task = None; + // Refresh tasks in background for current filter + app.status = format!("Submitted as {} — refreshing…", created.id.0); + app.refresh_inflight = true; + app.list_generation = app.list_generation.saturating_add(1); + needs_redraw = true; + let backend = Arc::clone(&backend); + let tx = tx.clone(); + let env_sel = app.env_filter.clone(); + tokio::spawn(async move { + let res = app::load_tasks(&*backend, env_sel.as_deref()).await; + let _ = tx.send(app::AppEvent::TasksLoaded { env: env_sel, result: res }); + }); + let _ = frame_tx.send(Instant::now()); + } + Err(msg) => { + append_error_log(format!("new-task: submit failed: {msg}")); + if let Some(page) = app.new_task.as_mut() { page.submitting = false; } + app.status = format!("Submit failed: {msg}. See error.log for details."); + needs_redraw = true; + let _ = frame_tx.send(Instant::now()); + } + } + } + // (removed TaskSummaryUpdated; unused in this prototype) + app::AppEvent::ApplyPreflightFinished { id, title, message, level, skipped, conflicts } => { + // Only update if modal is still open and ids match + if let Some(m) = app.apply_modal.as_mut() + && m.task_id == id + { + m.title = title; + m.result_message = Some(message); + m.result_level = Some(level); + m.skipped_paths = skipped; + m.conflict_paths = conflicts; + app.apply_preflight_inflight = false; + needs_redraw = true; + let _ = frame_tx.send(Instant::now()); + } + } + app::AppEvent::EnvironmentsLoaded(result) => { + app.env_loading = false; + match result { + Ok(list) => { + app.environments = list; + app.env_error = None; + app.env_last_loaded = Some(std::time::Instant::now()); + } + Err(e) => { + app.env_error = Some(e.to_string()); + } + } + needs_redraw = true; + let _ = frame_tx.send(Instant::now()); + } + app::AppEvent::EnvironmentAutodetected(result) => { + if let Ok(sel) = result { + // Only apply if user hasn't set a filter yet or it's different. + if app.env_filter.as_deref() != Some(sel.id.as_str()) { + append_error_log(format!( + "env.select: autodetected id={} label={}", + sel.id, + sel.label.clone().unwrap_or_else(|| "".to_string()) + )); + // Preseed environments with detected label so header can show it even before list arrives + if let Some(lbl) = sel.label.clone() { + let present = app.environments.iter().any(|r| r.id == sel.id); + if !present { + app.environments.push(app::EnvironmentRow { id: sel.id.clone(), label: Some(lbl), is_pinned: false, repo_hints: None }); + } + } + app.env_filter = Some(sel.id); + app.status = "Loading tasks…".to_string(); + app.refresh_inflight = true; + app.list_generation = app.list_generation.saturating_add(1); + app.in_flight.clear(); + // reset spinner state + needs_redraw = true; + { + let backend = Arc::clone(&backend); + let tx = tx.clone(); + let env_sel = app.env_filter.clone(); + tokio::spawn(async move { + let res = app::load_tasks(&*backend, env_sel.as_deref()).await; + let _ = tx.send(app::AppEvent::TasksLoaded { env: env_sel, result: res }); + }); + } + // Proactively fetch environments to resolve a friendly name for the header. + app.env_loading = true; + { + let tx = tx.clone(); + tokio::spawn(async move { + let base_url = crate::util::normalize_base_url( + &std::env::var("CODEX_CLOUD_TASKS_BASE_URL") + .unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string()), + ); + let headers = crate::util::build_chatgpt_headers().await; + let res = crate::env_detect::list_environments(&base_url, &headers).await; + let _ = tx.send(app::AppEvent::EnvironmentsLoaded(res)); + }); + } + let _ = frame_tx.send(Instant::now()); + } + } + // on Err, silently continue with All + } + app::AppEvent::DetailsDiffLoaded { id, title, diff } => { + if let Some(ov) = &app.diff_overlay + && ov.task_id != id { + continue; + } + let diff_lines: Vec = diff.lines().map(str::to_string).collect(); + if let Some(ov) = app.diff_overlay.as_mut() { + ov.title = title; + { + let base = ov.base_attempt_mut(); + base.diff_lines = diff_lines.clone(); + base.diff_raw = Some(diff.clone()); + } + ov.base_can_apply = true; + ov.apply_selection_to_fields(); + } else { + let mut overlay = app::DiffOverlay::new(id.clone(), title, None); + { + let base = overlay.base_attempt_mut(); + base.diff_lines = diff_lines.clone(); + base.diff_raw = Some(diff.clone()); + } + overlay.base_can_apply = true; + overlay.current_view = app::DetailView::Diff; + overlay.apply_selection_to_fields(); + app.diff_overlay = Some(overlay); + } + app.details_inflight = false; + app.status.clear(); + needs_redraw = true; + } + app::AppEvent::DetailsMessagesLoaded { + id, + title, + messages, + prompt, + turn_id, + sibling_turn_ids, + attempt_placement, + attempt_status, + } => { + if let Some(ov) = &app.diff_overlay + && ov.task_id != id { + continue; + } + let conv = conversation_lines(prompt.clone(), &messages); + if let Some(ov) = app.diff_overlay.as_mut() { + ov.title = title.clone(); + { + let base = ov.base_attempt_mut(); + base.text_lines = conv.clone(); + base.prompt = prompt.clone(); + base.turn_id = turn_id.clone(); + base.status = attempt_status; + base.attempt_placement = attempt_placement; + } + ov.base_turn_id = turn_id.clone(); + ov.sibling_turn_ids = sibling_turn_ids.clone(); + ov.attempt_total_hint = Some(sibling_turn_ids.len().saturating_add(1)); + if !ov.base_can_apply { + ov.current_view = app::DetailView::Prompt; + } + ov.apply_selection_to_fields(); + if let (Some(turn_id), true) = (turn_id.clone(), !sibling_turn_ids.is_empty()) + && ov.attempts.len() == 1 { + let backend = Arc::clone(&backend); + let tx = tx.clone(); + let task_id = id.clone(); + tokio::spawn(async move { + match codex_cloud_tasks_client::CloudBackend::list_sibling_attempts( + &*backend, + task_id.clone(), + turn_id, + ) + .await + { + Ok(attempts) => { + let _ = tx.send(app::AppEvent::AttemptsLoaded { id: task_id, attempts }); + } + Err(e) => { + crate::util::append_error_log(format!( + "attempts.load failed for {}: {e}", + task_id.0 + )); + } + } + }); + } + } else { + let mut overlay = app::DiffOverlay::new(id.clone(), title, None); + { + let base = overlay.base_attempt_mut(); + base.text_lines = conv.clone(); + base.prompt = prompt.clone(); + base.turn_id = turn_id.clone(); + base.status = attempt_status; + base.attempt_placement = attempt_placement; + } + overlay.base_turn_id = turn_id.clone(); + overlay.sibling_turn_ids = sibling_turn_ids.clone(); + overlay.attempt_total_hint = Some(sibling_turn_ids.len().saturating_add(1)); + overlay.current_view = app::DetailView::Prompt; + overlay.apply_selection_to_fields(); + app.diff_overlay = Some(overlay); + } + app.details_inflight = false; + app.status.clear(); + needs_redraw = true; + } + app::AppEvent::AttemptsLoaded { id, attempts } => { + if let Some(ov) = app.diff_overlay.as_mut() { + if ov.task_id != id { + continue; + } + for attempt in attempts { + if ov + .attempts + .iter() + .any(|existing| existing.turn_id.as_deref() == Some(attempt.turn_id.as_str())) + { + continue; + } + let diff_lines = attempt + .diff + .as_ref() + .map(|d| d.lines().map(str::to_string).collect()) + .unwrap_or_default(); + let text_lines = conversation_lines(None, &attempt.messages); + ov.attempts.push(app::AttemptView { + turn_id: Some(attempt.turn_id.clone()), + status: attempt.status, + attempt_placement: attempt.attempt_placement, + diff_lines, + text_lines, + prompt: None, + diff_raw: attempt.diff.clone(), + }); + } + if ov.attempts.len() > 1 { + let (_, rest) = ov.attempts.split_at_mut(1); + rest.sort_by(|a, b| match (a.attempt_placement, b.attempt_placement) { + (Some(lhs), Some(rhs)) => lhs.cmp(&rhs), + (Some(_), None) => std::cmp::Ordering::Less, + (None, Some(_)) => std::cmp::Ordering::Greater, + (None, None) => a.turn_id.cmp(&b.turn_id), + }); + } + if ov.selected_attempt >= ov.attempts.len() { + ov.selected_attempt = ov.attempts.len().saturating_sub(1); + } + ov.attempt_total_hint = Some(ov.attempts.len()); + ov.apply_selection_to_fields(); + needs_redraw = true; + } + } + app::AppEvent::DetailsFailed { id, title, error } => { + if let Some(ov) = &app.diff_overlay + && ov.task_id != id { + continue; + } + append_error_log(format!("details failed for {}: {error}", id.0)); + let pretty = pretty_lines_from_error(&error); + if let Some(ov) = app.diff_overlay.as_mut() { + ov.title = title.clone(); + { + let base = ov.base_attempt_mut(); + base.diff_lines.clear(); + base.text_lines = pretty.clone(); + base.prompt = None; + } + ov.base_can_apply = false; + ov.current_view = app::DetailView::Prompt; + ov.apply_selection_to_fields(); + } else { + let mut overlay = app::DiffOverlay::new(id.clone(), title, None); + { + let base = overlay.base_attempt_mut(); + base.text_lines = pretty; + } + overlay.base_can_apply = false; + overlay.current_view = app::DetailView::Prompt; + overlay.apply_selection_to_fields(); + app.diff_overlay = Some(overlay); + } + app.details_inflight = false; + needs_redraw = true; + } + app::AppEvent::ApplyFinished { id, result } => { + // Only update if the modal still corresponds to this id. + if let Some(m) = &app.apply_modal { + if m.task_id != id { continue; } + } else { + continue; + } + app.apply_inflight = false; + match result { + Ok(outcome) => { + app.status = outcome.message.clone(); + if matches!(outcome.status, codex_cloud_tasks_client::ApplyStatus::Success) { + app.apply_modal = None; + app.diff_overlay = None; + // Refresh tasks after successful apply + let backend = Arc::clone(&backend); + let tx = tx.clone(); + let env_sel = app.env_filter.clone(); + tokio::spawn(async move { + let res = app::load_tasks(&*backend, env_sel.as_deref()).await; + let _ = tx.send(app::AppEvent::TasksLoaded { env: env_sel, result: res }); + }); + } + } + Err(e) => { + append_error_log(format!("apply_task failed for {}: {e}", id.0)); + app.status = format!("Apply failed: {e}"); + } + } + needs_redraw = true; + } + } + } + // Render immediately after processing app events. + render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?; + } + maybe_event = events.next() => { + match maybe_event { + Some(Ok(Event::Paste(pasted))) => { + if app.env_modal.is_some() { + if let Some(m) = app.env_modal.as_mut() { + for ch in pasted.chars() { + match ch { + '\r' | '\n' => continue, + '\t' => m.query.push(' '), + _ => m.query.push(ch), + } + } + } + needs_redraw = true; + } else if let Some(page) = app.new_task.as_mut() + && !page.submitting + { + if page.composer.handle_paste(pasted) { + needs_redraw = true; + } + let _ = frame_tx.send(Instant::now()); + } + } + Some(Ok(Event::Key(key))) if matches!(key.kind, KeyEventKind::Press | KeyEventKind::Repeat) => { + // Treat Ctrl-C like pressing 'q' in the current context. + if key.modifiers.contains(KeyModifiers::CONTROL) + && matches!(key.code, KeyCode::Char('c') | KeyCode::Char('C')) + { + if app.env_modal.is_some() { + // Close environment selector if open (don’t quit composer). + app.env_modal = None; + needs_redraw = true; + } else if app.best_of_modal.is_some() { + app.best_of_modal = None; + needs_redraw = true; + } else if app.apply_modal.is_some() { + app.apply_modal = None; + app.status = "Apply canceled".to_string(); + needs_redraw = true; + } else if app.new_task.is_some() { + app.new_task = None; + app.status = "Canceled new task".to_string(); + needs_redraw = true; + } else if app.diff_overlay.is_some() { + app.diff_overlay = None; + needs_redraw = true; + } else { + break 0; + } + // Render updated state immediately before continuing to next loop iteration. + render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?; + // Render after New Task branch to reflect input changes immediately. + render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?; + continue; + } + let is_ctrl_n = key.modifiers.contains(KeyModifiers::CONTROL) + && matches!(key.code, KeyCode::Char('n') | KeyCode::Char('N')) + || matches!(key.code, KeyCode::Char('\u{000E}')); + if is_ctrl_n { + if app.new_task.is_none() { + continue; + } + if app.best_of_modal.is_some() { + app.best_of_modal = None; + needs_redraw = true; + } else { + let selected = app.best_of_n.saturating_sub(1).min(3); + app.best_of_modal = Some(app::BestOfModalState { selected }); + app.status = format!( + "Select best-of attempts (current: {} attempt{})", + app.best_of_n, + if app.best_of_n == 1 { "" } else { "s" } + ); + needs_redraw = true; + } + render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?; + continue; + } + if app.best_of_modal.is_some() { + match key.code { + KeyCode::Esc => { + app.best_of_modal = None; + needs_redraw = true; + } + KeyCode::Down | KeyCode::Char('j') => { + if let Some(m) = app.best_of_modal.as_mut() { + m.selected = (m.selected + 1).min(3); + } + needs_redraw = true; + } + KeyCode::Up | KeyCode::Char('k') => { + if let Some(m) = app.best_of_modal.as_mut() { + m.selected = m.selected.saturating_sub(1); + } + needs_redraw = true; + } + KeyCode::Char('1') | KeyCode::Char('2') | KeyCode::Char('3') | KeyCode::Char('4') => { + if let Some(m) = app.best_of_modal.as_mut() { + let val = match key.code { + KeyCode::Char('1') => 0, + KeyCode::Char('2') => 1, + KeyCode::Char('3') => 2, + KeyCode::Char('4') => 3, + _ => m.selected, + }; + m.selected = val; + } + needs_redraw = true; + } + KeyCode::Enter => { + if let Some(state) = app.best_of_modal.take() { + let new_value = state.selected + 1; + app.best_of_n = new_value; + if let Some(page) = app.new_task.as_mut() { + page.best_of_n = new_value; + } + append_error_log(format!("best-of.select: attempts={new_value}")); + app.status = format!( + "Best-of updated to {new_value} attempt{}", + if new_value == 1 { "" } else { "s" } + ); + needs_redraw = true; + } + } + _ => {} + } + render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?; + continue; + } + // New Task page: Ctrl+O opens environment switcher while composing. + let is_ctrl_o = key.modifiers.contains(KeyModifiers::CONTROL) + && matches!(key.code, KeyCode::Char('o') | KeyCode::Char('O')) + || matches!(key.code, KeyCode::Char('\u{000F}')); + if is_ctrl_o && app.new_task.is_some() { + // Close task modal/pending apply if present before opening env modal + app.diff_overlay = None; + app.env_modal = Some(app::EnvModalState { query: String::new(), selected: 0 }); + // Cache environments until user explicitly refreshes with 'r' inside the modal. + let should_fetch = app.environments.is_empty(); + if should_fetch { + app.env_loading = true; + app.env_error = None; + // Ensure spinner animates while loading environments. + let _ = frame_tx.send(Instant::now() + Duration::from_millis(100)); + } + needs_redraw = true; + if should_fetch { + let tx = tx.clone(); + tokio::spawn(async move { + let base_url = crate::util::normalize_base_url(&std::env::var("CODEX_CLOUD_TASKS_BASE_URL").unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string())); + let headers = crate::util::build_chatgpt_headers().await; + let res = crate::env_detect::list_environments(&base_url, &headers).await; + let _ = tx.send(app::AppEvent::EnvironmentsLoaded(res)); + }); + } + // Render after opening env modal to show it instantly. + render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?; + continue; + } + + // New Task page has priority when active, unless an env modal is open. + if let Some(page) = app.new_task.as_mut() { + if app.env_modal.is_some() { + // Defer handling to env-modal branch below. + } else { + match key.code { + KeyCode::Esc => { + app.new_task = None; + app.status = "Canceled new task".to_string(); + needs_redraw = true; + } + _ => { + if page.submitting { + // Ignore input while submitting + } else if let codex_tui::ComposerAction::Submitted(text) = page.composer.input(key) { + // Submit only if we have an env id + if let Some(env) = page.env_id.clone() { + append_error_log(format!( + "new-task: submit env={} size={}", + env, + text.chars().count() + )); + page.submitting = true; + app.status = "Submitting new task…".to_string(); + let tx = tx.clone(); + let backend = Arc::clone(&backend); + let best_of_n = page.best_of_n; + tokio::spawn(async move { + let result = codex_cloud_tasks_client::CloudBackend::create_task(&*backend, &env, &text, "main", false, best_of_n).await; + let evt = match result { + Ok(ok) => app::AppEvent::NewTaskSubmitted(Ok(ok)), + Err(e) => app::AppEvent::NewTaskSubmitted(Err(format!("{e}"))), + }; + let _ = tx.send(evt); + }); + } else { + app.status = "No environment selected (press 'e' to choose)".to_string(); + } + } + needs_redraw = true; + // If paste‑burst is active, schedule a micro‑flush frame. + if page.composer.is_in_paste_burst() { + let _ = frame_tx.send(Instant::now() + codex_tui::ComposerInput::recommended_flush_delay()); + } + // Always schedule an immediate redraw for key edits in the composer. + let _ = frame_tx.send(Instant::now()); + // Draw now so non-char edits (e.g., Option+Delete) reflect instantly. + render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?; + } + } + continue; + } + } + // If a diff overlay is open, handle its keys first. + if app.apply_modal.is_some() { + // Simple apply confirmation modal: y apply, p preflight, n/Esc cancel + match key.code { + KeyCode::Char('y') => { + if let Some(m) = app.apply_modal.as_ref() { + let title = m.title.clone(); + let job = ApplyJob { + task_id: m.task_id.clone(), + diff_override: m.diff_override.clone(), + }; + if spawn_apply(&mut app, &backend, &tx, &frame_tx, job) { + app.status = format!("Applying '{title}'..."); + } + needs_redraw = true; + } + } + KeyCode::Char('p') => { + if let Some(m) = app.apply_modal.take() { + let title = m.title.clone(); + let job = ApplyJob { + task_id: m.task_id.clone(), + diff_override: m.diff_override.clone(), + }; + if spawn_preflight(&mut app, &backend, &tx, &frame_tx, title.clone(), job) { + app.apply_modal = Some(app::ApplyModalState { + task_id: m.task_id, + title: title.clone(), + result_message: None, + result_level: None, + skipped_paths: Vec::new(), + conflict_paths: Vec::new(), + diff_override: m.diff_override, + }); + app.status = format!("Preflighting '{title}'..."); + } else { + app.apply_modal = Some(m); + } + needs_redraw = true; + } + } + KeyCode::Esc + | KeyCode::Char('n') + | KeyCode::Char('q') + | KeyCode::Char('Q') => { app.apply_modal = None; app.status = "Apply canceled".to_string(); needs_redraw = true; } + _ => {} + } + } else if app.diff_overlay.is_some() { + let mut cycle_attempt = |delta: isize| { + if let Some(ov) = app.diff_overlay.as_mut() + && ov.attempt_count() > 1 { + ov.step_attempt(delta); + let total = ov.attempt_display_total(); + let current = ov.selected_attempt + 1; + app.status = format!("Viewing attempt {current} of {total}"); + ov.sd.to_top(); + needs_redraw = true; + } + }; + + match key.code { + KeyCode::Char('a') => { + if app.apply_inflight || app.apply_preflight_inflight { + app.status = "Finish the current apply/preflight before starting another.".to_string(); + needs_redraw = true; + continue; + } + let snapshot = app.diff_overlay.as_ref().map(|ov| { + ( + ov.task_id.clone(), + ov.title.clone(), + ov.current_can_apply(), + ov.current_attempt().and_then(|attempt| attempt.diff_raw.clone()), + ) + }); + if let Some((task_id, title, can_apply, diff_override)) = snapshot { + if can_apply { + let job = ApplyJob { + task_id: task_id.clone(), + diff_override: diff_override.clone(), + }; + if spawn_preflight(&mut app, &backend, &tx, &frame_tx, title.clone(), job) { + app.apply_modal = Some(app::ApplyModalState { + task_id, + title: title.clone(), + result_message: None, + result_level: None, + skipped_paths: Vec::new(), + conflict_paths: Vec::new(), + diff_override, + }); + app.status = format!("Preflighting '{title}'..."); + } + } else { + app.status = "No diff available to apply.".to_string(); + } + needs_redraw = true; + } + } + KeyCode::Tab => { + cycle_attempt(1); + } + KeyCode::BackTab => { + cycle_attempt(-1); + } + // From task modal, 'o' should close it and open the env selector + KeyCode::Char('o') | KeyCode::Char('O') => { + app.diff_overlay = None; + app.env_modal = Some(app::EnvModalState { query: String::new(), selected: 0 }); + // Use cached environments unless empty + if app.environments.is_empty() { app.env_loading = true; app.env_error = None; } + needs_redraw = true; + if app.environments.is_empty() { + let tx = tx.clone(); + tokio::spawn(async move { + let base_url = crate::util::normalize_base_url( + &std::env::var("CODEX_CLOUD_TASKS_BASE_URL") + .unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string()), + ); + let headers = crate::util::build_chatgpt_headers().await; + let res = crate::env_detect::list_environments(&base_url, &headers).await; + let _ = tx.send(app::AppEvent::EnvironmentsLoaded(res)); + }); + } + } + KeyCode::Left => { + if let Some(ov) = &mut app.diff_overlay { + let has_text = ov.current_attempt().is_some_and(app::AttemptView::has_text); + let has_diff = ov.current_attempt().is_some_and(app::AttemptView::has_diff) || ov.base_can_apply; + if has_text && has_diff { + ov.set_view(app::DetailView::Prompt); + ov.sd.to_top(); + needs_redraw = true; + } + } + } + KeyCode::Right => { + if let Some(ov) = &mut app.diff_overlay { + let has_text = ov.current_attempt().is_some_and(app::AttemptView::has_text); + let has_diff = ov.current_attempt().is_some_and(app::AttemptView::has_diff) || ov.base_can_apply; + if has_text && has_diff { + ov.set_view(app::DetailView::Diff); + ov.sd.to_top(); + needs_redraw = true; + } + } + } + KeyCode::Char(']') | KeyCode::Char('}') => { + cycle_attempt(1); + } + KeyCode::Char('[') | KeyCode::Char('{') => { + cycle_attempt(-1); + } + KeyCode::Esc | KeyCode::Char('q') => { + app.diff_overlay = None; + needs_redraw = true; + } + KeyCode::Down | KeyCode::Char('j') => { + if let Some(ov) = &mut app.diff_overlay { ov.sd.scroll_by(1); } + needs_redraw = true; + } + KeyCode::Up | KeyCode::Char('k') => { + if let Some(ov) = &mut app.diff_overlay { ov.sd.scroll_by(-1); } + needs_redraw = true; + } + KeyCode::PageDown | KeyCode::Char(' ') => { + if let Some(ov) = &mut app.diff_overlay { let step = ov.sd.state.viewport_h.saturating_sub(1) as i16; ov.sd.page_by(step); } + needs_redraw = true; + } + KeyCode::PageUp => { + if let Some(ov) = &mut app.diff_overlay { let step = ov.sd.state.viewport_h.saturating_sub(1) as i16; ov.sd.page_by(-step); } + needs_redraw = true; + } + KeyCode::Home => { if let Some(ov) = &mut app.diff_overlay { ov.sd.to_top(); } needs_redraw = true; } + KeyCode::End => { if let Some(ov) = &mut app.diff_overlay { ov.sd.to_bottom(); } needs_redraw = true; } + _ => {} + } + } else if app.env_modal.is_some() { + // Environment modal key handling + match key.code { + KeyCode::Esc => { app.env_modal = None; needs_redraw = true; } + KeyCode::Char('r') | KeyCode::Char('R') => { + // Trigger refresh of environments + app.env_loading = true; app.env_error = None; needs_redraw = true; + let _ = frame_tx.send(Instant::now() + Duration::from_millis(100)); + let tx = tx.clone(); + tokio::spawn(async move { + let base_url = crate::util::normalize_base_url(&std::env::var("CODEX_CLOUD_TASKS_BASE_URL").unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string())); + let headers = crate::util::build_chatgpt_headers().await; + let res = crate::env_detect::list_environments(&base_url, &headers).await; + let _ = tx.send(app::AppEvent::EnvironmentsLoaded(res)); + }); + } + KeyCode::Char(ch) if !key.modifiers.contains(KeyModifiers::CONTROL) && !key.modifiers.contains(KeyModifiers::ALT) => { + if let Some(m) = app.env_modal.as_mut() { m.query.push(ch); } + needs_redraw = true; + } + KeyCode::Backspace => { if let Some(m) = app.env_modal.as_mut() { m.query.pop(); } needs_redraw = true; } + KeyCode::Down | KeyCode::Char('j') => { if let Some(m) = app.env_modal.as_mut() { m.selected = m.selected.saturating_add(1); } needs_redraw = true; } + KeyCode::Up | KeyCode::Char('k') => { if let Some(m) = app.env_modal.as_mut() { m.selected = m.selected.saturating_sub(1); } needs_redraw = true; } + KeyCode::Home => { if let Some(m) = app.env_modal.as_mut() { m.selected = 0; } needs_redraw = true; } + KeyCode::End => { if let Some(m) = app.env_modal.as_mut() { m.selected = app.environments.len(); } needs_redraw = true; } + KeyCode::PageDown | KeyCode::Char(' ') => { if let Some(m) = app.env_modal.as_mut() { let step = 10usize; m.selected = m.selected.saturating_add(step); } needs_redraw = true; } + KeyCode::PageUp => { if let Some(m) = app.env_modal.as_mut() { let step = 10usize; m.selected = m.selected.saturating_sub(step); } needs_redraw = true; } + KeyCode::Char('n') => { + if app.env_filter.is_none() { + app.new_task = Some(crate::new_task::NewTaskPage::new(None, app.best_of_n)); + } else { + app.new_task = Some(crate::new_task::NewTaskPage::new(app.env_filter.clone(), app.best_of_n)); + } + app.status = "New Task: Enter to submit; Esc to cancel".to_string(); + needs_redraw = true; + } + KeyCode::Enter => { + // Resolve selection over filtered set + if let Some(state) = app.env_modal.take() { + let q = state.query.to_lowercase(); + let filtered: Vec<&app::EnvironmentRow> = app.environments.iter().filter(|r| { + if q.is_empty() { return true; } + let mut hay = String::new(); + if let Some(l) = &r.label { hay.push_str(&l.to_lowercase()); hay.push(' '); } + hay.push_str(&r.id.to_lowercase()); + if let Some(h) = &r.repo_hints { hay.push(' '); hay.push_str(&h.to_lowercase()); } + hay.contains(&q) + }).collect(); + // Keep original order (already sorted) — no need to re-sort + let idx = state.selected; + if idx == 0 { app.env_filter = None; append_error_log("env.select: All"); } + else { + let env_idx = idx.saturating_sub(1); + if let Some(row) = filtered.get(env_idx) { + append_error_log(format!( + "env.select: id={} label={}", + row.id, + row.label.clone().unwrap_or_else(|| "".to_string()) + )); + app.env_filter = Some(row.id.clone()); + } + } + // If New Task page is open, reflect the new selection in its header immediately. + if let Some(page) = app.new_task.as_mut() { + page.env_id = app.env_filter.clone(); + } + // Trigger tasks refresh with the selected filter + app.status = "Loading tasks…".to_string(); + app.refresh_inflight = true; + app.list_generation = app.list_generation.saturating_add(1); + app.in_flight.clear(); + // reset spinner state + needs_redraw = true; + let backend = Arc::clone(&backend); + let tx = tx.clone(); + let env_sel = app.env_filter.clone(); + tokio::spawn(async move { + let res = app::load_tasks(&*backend, env_sel.as_deref()).await; + let _ = tx.send(app::AppEvent::TasksLoaded { env: env_sel, result: res }); + }); + } + } + _ => {} + } + } else { + // Base list view keys + match key.code { + KeyCode::Char('q') | KeyCode::Esc => { + break 0; + } + KeyCode::Down | KeyCode::Char('j') => { + app.next(); + needs_redraw = true; + } + KeyCode::Up | KeyCode::Char('k') => { + app.prev(); + needs_redraw = true; + } + // Ensure 'r' does not refresh tasks when the env modal is open. + KeyCode::Char('r') | KeyCode::Char('R') => { + if app.env_modal.is_some() { break 0; } + append_error_log(format!( + "refresh.request: env={}", + app.env_filter.clone().unwrap_or_else(|| "".to_string()) + )); + app.status = "Refreshing…".to_string(); + app.refresh_inflight = true; + app.list_generation = app.list_generation.saturating_add(1); + app.in_flight.clear(); + // reset spinner state + needs_redraw = true; + // Spawn background refresh + let backend = Arc::clone(&backend); + let tx = tx.clone(); + let env_sel = app.env_filter.clone(); + tokio::spawn(async move { + let res = app::load_tasks(&*backend, env_sel.as_deref()).await; + let _ = tx.send(app::AppEvent::TasksLoaded { env: env_sel, result: res }); + }); + } + KeyCode::Char('o') | KeyCode::Char('O') => { + app.env_modal = Some(app::EnvModalState { query: String::new(), selected: 0 }); + // Cache environments until user explicitly refreshes with 'r' inside the modal. + let should_fetch = app.environments.is_empty(); + if should_fetch { app.env_loading = true; app.env_error = None; } + needs_redraw = true; + if should_fetch { + let tx = tx.clone(); + tokio::spawn(async move { + let base_url = crate::util::normalize_base_url(&std::env::var("CODEX_CLOUD_TASKS_BASE_URL").unwrap_or_else(|_| "https://chatgpt.com/backend-api".to_string())); + let headers = crate::util::build_chatgpt_headers().await; + let res = crate::env_detect::list_environments(&base_url, &headers).await; + let _ = tx.send(app::AppEvent::EnvironmentsLoaded(res)); + }); + } + } + KeyCode::Char('n') => { + let env_opt = app.env_filter.clone(); + app.new_task = Some(crate::new_task::NewTaskPage::new(env_opt, app.best_of_n)); + app.status = "New Task: Enter to submit; Esc to cancel".to_string(); + needs_redraw = true; + } + KeyCode::Enter => { + if let Some(task) = app.tasks.get(app.selected).cloned() { + app.status = format!("Loading details for {title}…", title = task.title); + app.details_inflight = true; + // Open empty overlay immediately; content arrives via events + let overlay = app::DiffOverlay::new( + task.id.clone(), + task.title.clone(), + task.attempt_total, + ); + app.diff_overlay = Some(overlay); + needs_redraw = true; + // Spawn background details load (diff first, then messages fallback) + let id = task.id.clone(); + let title = task.title.clone(); + { + let backend = Arc::clone(&backend); + let tx = tx.clone(); + let diff_id = id.clone(); + let diff_title = title.clone(); + tokio::spawn(async move { + match codex_cloud_tasks_client::CloudBackend::get_task_diff(&*backend, diff_id.clone()).await { + Ok(Some(diff)) => { + let _ = tx.send(app::AppEvent::DetailsDiffLoaded { id: diff_id, title: diff_title, diff }); + } + Ok(None) => { + match codex_cloud_tasks_client::CloudBackend::get_task_text(&*backend, diff_id.clone()).await { + Ok(text) => { + let evt = app::AppEvent::DetailsMessagesLoaded { + id: diff_id, + title: diff_title, + messages: text.messages, + prompt: text.prompt, + turn_id: text.turn_id, + sibling_turn_ids: text.sibling_turn_ids, + attempt_placement: text.attempt_placement, + attempt_status: text.attempt_status, + }; + let _ = tx.send(evt); + } + Err(e2) => { + let _ = tx.send(app::AppEvent::DetailsFailed { id: diff_id, title: diff_title, error: format!("{e2}") }); + } + } + } + Err(e) => { + append_error_log(format!("get_task_diff failed for {}: {e}", diff_id.0)); + match codex_cloud_tasks_client::CloudBackend::get_task_text(&*backend, diff_id.clone()).await { + Ok(text) => { + let evt = app::AppEvent::DetailsMessagesLoaded { + id: diff_id, + title: diff_title, + messages: text.messages, + prompt: text.prompt, + turn_id: text.turn_id, + sibling_turn_ids: text.sibling_turn_ids, + attempt_placement: text.attempt_placement, + attempt_status: text.attempt_status, + }; + let _ = tx.send(evt); + } + Err(e2) => { + let _ = tx.send(app::AppEvent::DetailsFailed { id: diff_id, title: diff_title, error: format!("{e2}") }); + } + } + } + } + }); + } + // Also fetch conversation text even when diff exists + { + let backend = Arc::clone(&backend); + let tx = tx.clone(); + let msg_id = id; + let msg_title = title; + tokio::spawn(async move { + if let Ok(text) = codex_cloud_tasks_client::CloudBackend::get_task_text(&*backend, msg_id.clone()).await { + let evt = app::AppEvent::DetailsMessagesLoaded { + id: msg_id, + title: msg_title, + messages: text.messages, + prompt: text.prompt, + turn_id: text.turn_id, + sibling_turn_ids: text.sibling_turn_ids, + attempt_placement: text.attempt_placement, + attempt_status: text.attempt_status, + }; + let _ = tx.send(evt); + } + }); + } + // Animate spinner while details load. + let _ = frame_tx.send(Instant::now() + Duration::from_millis(100)); + } + } + KeyCode::Char('a') => { + if app.apply_inflight || app.apply_preflight_inflight { + app.status = "Finish the current apply/preflight before starting another.".to_string(); + needs_redraw = true; + continue; + } + + if let Some(task) = app.tasks.get(app.selected).cloned() { + match codex_cloud_tasks_client::CloudBackend::get_task_diff(&*backend, task.id.clone()).await { + Ok(Some(diff)) => { + let diff_override = Some(diff.clone()); + let task_id = task.id.clone(); + let title = task.title.clone(); + let job = ApplyJob { + task_id: task_id.clone(), + diff_override: diff_override.clone(), + }; + if spawn_preflight( + &mut app, + &backend, + &tx, + &frame_tx, + title.clone(), + job, + ) { + app.apply_modal = Some(app::ApplyModalState { + task_id, + title: title.clone(), + result_message: None, + result_level: None, + skipped_paths: Vec::new(), + conflict_paths: Vec::new(), + diff_override, + }); + app.status = format!("Preflighting '{title}'..."); + } + } + Ok(None) | Err(_) => { + app.status = "No diff available to apply".to_string(); + } + } + needs_redraw = true; + } + } + _ => {} + } + } + // Render after handling a key event (when not quitting). + render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?; + } + Some(Ok(Event::Resize(_, _))) => { + needs_redraw = true; + // Redraw immediately on resize for snappier UX. + render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?; + } + Some(Err(_)) | None => {} + _ => {} + } + // Fallback: if any other event path requested a redraw, render now. + render_if_needed(&mut terminal, &mut app, &mut needs_redraw)?; + } + } + }; + + // Restore terminal + disable_raw_mode().ok(); + terminal.show_cursor().ok(); + let _ = crossterm::execute!(std::io::stdout(), DisableBracketedPaste); + // Best-effort restore of keyboard enhancement flags before leaving alt screen. + let _ = crossterm::execute!(std::io::stdout(), PopKeyboardEnhancementFlags); + let _ = crossterm::execute!(std::io::stdout(), LeaveAlternateScreen); + + if exit_code != 0 { + std::process::exit(exit_code); + } + Ok(()) +} + +// extract_chatgpt_account_id moved to util.rs + +/// Build plain-text conversation lines: a labeled user prompt followed by assistant messages. +fn conversation_lines(prompt: Option, messages: &[String]) -> Vec { + let mut out: Vec = Vec::new(); + if let Some(p) = prompt { + out.push("user:".to_string()); + for l in p.lines() { + out.push(l.to_string()); + } + out.push(String::new()); + } + if !messages.is_empty() { + out.push("assistant:".to_string()); + for (i, m) in messages.iter().enumerate() { + for l in m.lines() { + out.push(l.to_string()); + } + if i + 1 < messages.len() { + out.push(String::new()); + } + } + } + if out.is_empty() { + out.push("".to_string()); + } + out +} + +/// Convert a verbose HTTP error with embedded JSON body into concise, user-friendly lines +/// for the details overlay. Falls back to a short raw message when parsing fails. +fn pretty_lines_from_error(raw: &str) -> Vec { + let mut lines: Vec = Vec::new(); + let is_no_diff = raw.contains("No output_diff in response."); + let is_no_msgs = raw.contains("No assistant text messages in response."); + if is_no_diff { + lines.push("No diff available for this task.".to_string()); + } else if is_no_msgs { + lines.push("No assistant messages found for this task.".to_string()); + } else { + lines.push("Failed to load task details.".to_string()); + } + + // Try to parse the embedded JSON body: find the first '{' after " body=" and decode. + if let Some(body_idx) = raw.find(" body=") + && let Some(json_start_rel) = raw[body_idx..].find('{') + { + let json_start = body_idx + json_start_rel; + let json_str = raw[json_start..].trim(); + if let Ok(v) = serde_json::from_str::(json_str) { + // Prefer assistant turn context. + let turn = v + .get("current_assistant_turn") + .and_then(|x| x.as_object()) + .cloned() + .or_else(|| { + v.get("current_diff_task_turn") + .and_then(|x| x.as_object()) + .cloned() + }); + if let Some(t) = turn { + if let Some(err) = t.get("error").and_then(|e| e.as_object()) { + let code = err.get("code").and_then(|s| s.as_str()).unwrap_or(""); + let msg = err.get("message").and_then(|s| s.as_str()).unwrap_or(""); + if !code.is_empty() || !msg.is_empty() { + let summary = if code.is_empty() { + msg.to_string() + } else if msg.is_empty() { + code.to_string() + } else { + format!("{code}: {msg}") + }; + lines.push(format!("Assistant error: {summary}")); + } + } + if let Some(status) = t.get("turn_status").and_then(|s| s.as_str()) { + lines.push(format!("Status: {status}")); + } + if let Some(text) = t + .get("latest_event") + .and_then(|e| e.get("text")) + .and_then(|s| s.as_str()) + && !text.trim().is_empty() + { + lines.push(format!("Latest event: {}", text.trim())); + } + } + } + } + + if lines.len() == 1 { + // Parsing yielded nothing; include a trimmed, short raw message tail for context. + let tail = if raw.len() > 320 { + format!("{}…", &raw[..320]) + } else { + raw.to_string() + }; + lines.push(tail); + } else if lines.len() >= 2 { + // Add a hint to refresh when still in progress. + if lines.iter().any(|l| l.contains("in_progress")) { + lines.push("This task may still be running. Press 'r' to refresh.".to_string()); + } + // Avoid an empty overlay + lines.push(String::new()); + } + lines +} + +#[cfg(test)] +mod tests { + use codex_tui::ComposerAction; + use codex_tui::ComposerInput; + use crossterm::event::KeyCode; + use crossterm::event::KeyEvent; + use crossterm::event::KeyModifiers; + use ratatui::buffer::Buffer; + use ratatui::layout::Rect; + + #[test] + fn composer_input_renders_typed_characters() { + let mut composer = ComposerInput::new(); + let key = KeyEvent::new(KeyCode::Char('a'), KeyModifiers::NONE); + match composer.input(key) { + ComposerAction::Submitted(_) => panic!("unexpected submission"), + ComposerAction::None => {} + } + + let area = Rect::new(0, 0, 20, 5); + let mut buf = Buffer::empty(area); + composer.render_ref(area, &mut buf); + + let found = buf.content().iter().any(|cell| cell.symbol() == "a"); + assert!(found, "typed character was not rendered: {buf:?}"); + + composer.set_hint_items(vec![("⌃O", "env"), ("⌃C", "quit")]); + composer.render_ref(area, &mut buf); + let footer = buf + .content() + .iter() + .skip((area.width as usize) * (area.height as usize - 1)) + .map(ratatui::buffer::Cell::symbol) + .collect::>() + .join(""); + assert!(footer.contains("⌃O env")); + } +} diff --git a/codex-rs/cloud-tasks/src/new_task.rs b/codex-rs/cloud-tasks/src/new_task.rs new file mode 100644 index 00000000000..162fd3bb3a8 --- /dev/null +++ b/codex-rs/cloud-tasks/src/new_task.rs @@ -0,0 +1,35 @@ +use codex_tui::ComposerInput; + +pub struct NewTaskPage { + pub composer: ComposerInput, + pub submitting: bool, + pub env_id: Option, + pub best_of_n: usize, +} + +impl NewTaskPage { + pub fn new(env_id: Option, best_of_n: usize) -> Self { + let mut composer = ComposerInput::new(); + composer.set_hint_items(vec![ + ("⏎", "send"), + ("Shift+⏎", "newline"), + ("Ctrl+O", "env"), + ("Ctrl+N", "attempts"), + ("Ctrl+C", "quit"), + ]); + Self { + composer, + submitting: false, + env_id, + best_of_n, + } + } + + // Additional helpers can be added as usage evolves. +} + +impl Default for NewTaskPage { + fn default() -> Self { + Self::new(None, 1) + } +} diff --git a/codex-rs/cloud-tasks/src/scrollable_diff.rs b/codex-rs/cloud-tasks/src/scrollable_diff.rs new file mode 100644 index 00000000000..97dfb248958 --- /dev/null +++ b/codex-rs/cloud-tasks/src/scrollable_diff.rs @@ -0,0 +1,176 @@ +use unicode_width::UnicodeWidthChar; +use unicode_width::UnicodeWidthStr; + +/// Scroll position and geometry for a vertical scroll view. +#[derive(Clone, Copy, Debug, Default)] +pub struct ScrollViewState { + pub scroll: u16, + pub viewport_h: u16, + pub content_h: u16, +} + +impl ScrollViewState { + pub fn clamp(&mut self) { + let max_scroll = self.content_h.saturating_sub(self.viewport_h); + if self.scroll > max_scroll { + self.scroll = max_scroll; + } + } +} + +/// A simple, local scrollable view for diffs or message text. +/// +/// Owns raw lines, caches wrapped lines for a given width, and maintains +/// a small scroll state that is clamped whenever geometry shrinks. +#[derive(Clone, Debug, Default)] +pub struct ScrollableDiff { + raw: Vec, + wrapped: Vec, + wrapped_src_idx: Vec, + wrap_cols: Option, + pub state: ScrollViewState, +} + +impl ScrollableDiff { + pub fn new() -> Self { + Self::default() + } + + /// Replace the raw content lines. Does not rewrap immediately; call `set_width` next. + pub fn set_content(&mut self, lines: Vec) { + self.raw = lines; + self.wrapped.clear(); + self.wrapped_src_idx.clear(); + self.state.content_h = 0; + // Force rewrap on next set_width even if width is unchanged + self.wrap_cols = None; + } + + /// Set the wrap width. If changed, rebuild wrapped lines and clamp scroll. + pub fn set_width(&mut self, width: u16) { + if self.wrap_cols == Some(width) { + return; + } + self.wrap_cols = Some(width); + self.rewrap(width); + self.state.clamp(); + } + + /// Update viewport height and clamp scroll if needed. + pub fn set_viewport(&mut self, height: u16) { + self.state.viewport_h = height; + self.state.clamp(); + } + + /// Return the cached wrapped lines. Call `set_width` first when area changes. + pub fn wrapped_lines(&self) -> &[String] { + &self.wrapped + } + + pub fn wrapped_src_indices(&self) -> &[usize] { + &self.wrapped_src_idx + } + + pub fn raw_line_at(&self, idx: usize) -> &str { + self.raw.get(idx).map(String::as_str).unwrap_or("") + } + + /// Scroll by a signed delta; clamps to content. + pub fn scroll_by(&mut self, delta: i16) { + let s = self.state.scroll as i32 + delta as i32; + self.state.scroll = s.clamp(0, self.max_scroll() as i32) as u16; + } + + /// Page by a signed delta; typically viewport_h - 1. + pub fn page_by(&mut self, delta: i16) { + self.scroll_by(delta); + } + + pub fn to_top(&mut self) { + self.state.scroll = 0; + } + + pub fn to_bottom(&mut self) { + self.state.scroll = self.max_scroll(); + } + + /// Optional percent scrolled; None when not enough geometry is known. + pub fn percent_scrolled(&self) -> Option { + if self.state.content_h == 0 || self.state.viewport_h == 0 { + return None; + } + if self.state.content_h <= self.state.viewport_h { + return None; + } + let visible_bottom = self.state.scroll.saturating_add(self.state.viewport_h) as f32; + let pct = (visible_bottom / self.state.content_h as f32 * 100.0).round(); + Some(pct.clamp(0.0, 100.0) as u8) + } + + fn max_scroll(&self) -> u16 { + self.state.content_h.saturating_sub(self.state.viewport_h) + } + + fn rewrap(&mut self, width: u16) { + if width == 0 { + self.wrapped = self.raw.clone(); + self.state.content_h = self.wrapped.len() as u16; + return; + } + let max_cols = width as usize; + let mut out: Vec = Vec::new(); + let mut out_idx: Vec = Vec::new(); + for (raw_idx, raw) in self.raw.iter().enumerate() { + // Normalize tabs for width accounting (MVP: 4 spaces). + let raw = raw.replace('\t', " "); + if raw.is_empty() { + out.push(String::new()); + out_idx.push(raw_idx); + continue; + } + let mut line = String::new(); + let mut line_cols = 0usize; + let mut last_soft_idx: Option = None; // last whitespace or punctuation break + for (_i, ch) in raw.char_indices() { + if ch == '\n' { + out.push(std::mem::take(&mut line)); + out_idx.push(raw_idx); + line_cols = 0; + last_soft_idx = None; + continue; + } + let w = UnicodeWidthChar::width(ch).unwrap_or(0); + if line_cols.saturating_add(w) > max_cols { + if let Some(split) = last_soft_idx { + let (prefix, rest) = line.split_at(split); + out.push(prefix.trim_end().to_string()); + out_idx.push(raw_idx); + line = rest.trim_start().to_string(); + last_soft_idx = None; + // retry add current ch now that line may be shorter + } else if !line.is_empty() { + out.push(std::mem::take(&mut line)); + out_idx.push(raw_idx); + } + } + if ch.is_whitespace() + || matches!( + ch, + ',' | ';' | '.' | ':' | ')' | ']' | '}' | '|' | '/' | '?' | '!' | '-' | '_' + ) + { + last_soft_idx = Some(line.len()); + } + line.push(ch); + line_cols = UnicodeWidthStr::width(line.as_str()); + } + if !line.is_empty() { + out.push(line); + out_idx.push(raw_idx); + } + } + self.wrapped = out; + self.wrapped_src_idx = out_idx; + self.state.content_h = self.wrapped.len() as u16; + } +} diff --git a/codex-rs/cloud-tasks/src/ui.rs b/codex-rs/cloud-tasks/src/ui.rs new file mode 100644 index 00000000000..fe91b0ce9b0 --- /dev/null +++ b/codex-rs/cloud-tasks/src/ui.rs @@ -0,0 +1,1056 @@ +use ratatui::layout::Constraint; +use ratatui::layout::Direction; +use ratatui::layout::Layout; +use ratatui::prelude::*; +use ratatui::style::Color; +use ratatui::style::Modifier; +use ratatui::style::Style; +use ratatui::style::Stylize; +use ratatui::widgets::Block; +use ratatui::widgets::BorderType; +use ratatui::widgets::Borders; +use ratatui::widgets::Clear; +use ratatui::widgets::List; +use ratatui::widgets::ListItem; +use ratatui::widgets::ListState; +use ratatui::widgets::Padding; +use ratatui::widgets::Paragraph; +use std::sync::OnceLock; + +use crate::app::App; +use crate::app::AttemptView; +use chrono::Local; +use chrono::Utc; +use codex_cloud_tasks_client::AttemptStatus; +use codex_cloud_tasks_client::TaskStatus; +use codex_tui::render_markdown_text; + +pub fn draw(frame: &mut Frame, app: &mut App) { + let area = frame.area(); + let chunks = Layout::default() + .direction(Direction::Vertical) + .constraints([ + Constraint::Min(1), // list + Constraint::Length(2), // two-line footer (help + status) + ]) + .split(area); + if app.new_task.is_some() { + draw_new_task_page(frame, chunks[0], app); + draw_footer(frame, chunks[1], app); + } else { + draw_list(frame, chunks[0], app); + draw_footer(frame, chunks[1], app); + } + + if app.diff_overlay.is_some() { + draw_diff_overlay(frame, area, app); + } + if app.env_modal.is_some() { + draw_env_modal(frame, area, app); + } + if app.best_of_modal.is_some() { + draw_best_of_modal(frame, area, app); + } + if app.apply_modal.is_some() { + draw_apply_modal(frame, area, app); + } +} + +// ===== Overlay helpers (geometry + styling) ===== +static ROUNDED: OnceLock = OnceLock::new(); + +fn rounded_enabled() -> bool { + *ROUNDED.get_or_init(|| { + std::env::var("CODEX_TUI_ROUNDED") + .ok() + .map(|v| v == "1") + .unwrap_or(true) + }) +} + +fn overlay_outer(area: Rect) -> Rect { + let outer_v = Layout::default() + .direction(Direction::Vertical) + .constraints([ + Constraint::Percentage(10), + Constraint::Percentage(80), + Constraint::Percentage(10), + ]) + .split(area)[1]; + Layout::default() + .direction(Direction::Horizontal) + .constraints([ + Constraint::Percentage(10), + Constraint::Percentage(80), + Constraint::Percentage(10), + ]) + .split(outer_v)[1] +} + +fn overlay_block() -> Block<'static> { + let base = Block::default().borders(Borders::ALL); + let base = if rounded_enabled() { + base.border_type(BorderType::Rounded) + } else { + base + }; + base.padding(Padding::new(2, 2, 1, 1)) +} + +fn overlay_content(area: Rect) -> Rect { + overlay_block().inner(area) +} + +pub fn draw_new_task_page(frame: &mut Frame, area: Rect, app: &mut App) { + let title_spans = { + let mut spans: Vec = vec!["New Task".magenta().bold()]; + if let Some(id) = app + .new_task + .as_ref() + .and_then(|p| p.env_id.as_ref()) + .cloned() + { + spans.push(" • ".into()); + // Try to map id to label + let label = app + .environments + .iter() + .find(|r| r.id == id) + .and_then(|r| r.label.clone()) + .unwrap_or(id); + spans.push(label.dim()); + } else { + spans.push(" • ".into()); + spans.push("Env: none (press ctrl-o to choose)".red()); + } + if let Some(page) = app.new_task.as_ref() { + spans.push(" • ".into()); + let attempts = page.best_of_n; + let label = format!( + "{} attempt{}", + attempts, + if attempts == 1 { "" } else { "s" } + ); + spans.push(label.cyan()); + } + spans + }; + let block = Block::default() + .borders(Borders::ALL) + .title(Line::from(title_spans)); + + frame.render_widget(Clear, area); + frame.render_widget(block.clone(), area); + let content = block.inner(area); + + // Expand composer height up to (terminal height - 6), with a 3-line minimum. + let max_allowed = frame.area().height.saturating_sub(6).max(3); + let desired = app + .new_task + .as_ref() + .map(|p| p.composer.desired_height(content.width)) + .unwrap_or(3) + .clamp(3, max_allowed); + + // Anchor the composer to the bottom-left by allocating a flexible spacer + // above it and a fixed `desired`-height area for the composer. + let rows = Layout::default() + .direction(Direction::Vertical) + .constraints([Constraint::Min(1), Constraint::Length(desired)]) + .split(content); + let composer_area = rows[1]; + + if let Some(page) = app.new_task.as_ref() { + page.composer.render_ref(composer_area, frame.buffer_mut()); + // Composer renders its own footer hints; no extra row here. + } + + // Place cursor where composer wants it + if let Some(page) = app.new_task.as_ref() + && let Some((x, y)) = page.composer.cursor_pos(composer_area) + { + frame.set_cursor_position((x, y)); + } +} + +fn draw_list(frame: &mut Frame, area: Rect, app: &mut App) { + let items: Vec = app.tasks.iter().map(|t| render_task_item(app, t)).collect(); + + // Selection reflects the actual task index (no artificial spacer item). + let mut state = ListState::default().with_selected(Some(app.selected)); + // Dim task list when a modal/overlay is active to emphasize focus. + let dim_bg = app.env_modal.is_some() + || app.apply_modal.is_some() + || app.best_of_modal.is_some() + || app.diff_overlay.is_some(); + // Dynamic title includes current environment filter + let suffix_span = if let Some(ref id) = app.env_filter { + let label = app + .environments + .iter() + .find(|r| &r.id == id) + .and_then(|r| r.label.clone()) + .unwrap_or_else(|| "Selected".to_string()); + format!(" • {label}").dim() + } else { + " • All".dim() + }; + // Percent scrolled based on selection position in the list (0% at top, 100% at bottom). + let percent_span = if app.tasks.len() <= 1 { + " • 0%".dim() + } else { + let p = ((app.selected as f32) / ((app.tasks.len() - 1) as f32) * 100.0).round() as i32; + format!(" • {}%", p.clamp(0, 100)).dim() + }; + let title_line = { + let base = Line::from(vec!["Cloud Tasks".into(), suffix_span, percent_span]); + if dim_bg { + base.style(Style::default().add_modifier(Modifier::DIM)) + } else { + base + } + }; + let block = Block::default().borders(Borders::ALL).title(title_line); + // Render the outer block first + frame.render_widget(block.clone(), area); + // Draw list inside with a persistent top spacer row + let inner = block.inner(area); + let rows = Layout::default() + .direction(Direction::Vertical) + .constraints([Constraint::Length(1), Constraint::Min(1)]) + .split(inner); + let mut list = List::new(items) + .highlight_symbol("› ") + .highlight_style(Style::default().bold()); + if dim_bg { + list = list.style(Style::default().add_modifier(Modifier::DIM)); + } + frame.render_stateful_widget(list, rows[1], &mut state); + + // In-box spinner during initial/refresh loads + if app.refresh_inflight { + draw_centered_spinner(frame, inner, &mut app.throbber, "Loading tasks…"); + } +} + +fn draw_footer(frame: &mut Frame, area: Rect, app: &mut App) { + let mut help = vec![ + "↑/↓".dim(), + ": Move ".dim(), + "r".dim(), + ": Refresh ".dim(), + "Enter".dim(), + ": Open ".dim(), + ]; + // Apply hint; show disabled note when overlay is open without a diff. + if let Some(ov) = app.diff_overlay.as_ref() { + if !ov.current_can_apply() { + help.push("a".dim()); + help.push(": Apply (disabled) ".dim()); + } else { + help.push("a".dim()); + help.push(": Apply ".dim()); + } + if ov.attempt_count() > 1 { + help.push("Tab".dim()); + help.push(": Next attempt ".dim()); + help.push("[ ]".dim()); + help.push(": Cycle attempts ".dim()); + } + } else { + help.push("a".dim()); + help.push(": Apply ".dim()); + } + help.push("o : Set Env ".dim()); + if app.new_task.is_some() { + help.push("Ctrl+N".dim()); + help.push(format!(": Attempts {}x ", app.best_of_n).dim()); + help.push("(editing new task) ".dim()); + } else { + help.push("n : New Task ".dim()); + } + help.extend(vec!["q".dim(), ": Quit ".dim()]); + // Split footer area into two rows: help+spinner (top) and status (bottom) + let rows = Layout::default() + .direction(Direction::Vertical) + .constraints([Constraint::Length(1), Constraint::Length(1)]) + .split(area); + + // Top row: help text + spinner at right + let top = Layout::default() + .direction(Direction::Horizontal) + .constraints([Constraint::Fill(1), Constraint::Length(18)]) + .split(rows[0]); + let para = Paragraph::new(Line::from(help)); + // Draw help text; avoid clearing the whole footer area every frame. + frame.render_widget(para, top[0]); + // Right side: spinner or clear the spinner area if idle to prevent stale glyphs. + if app.refresh_inflight + || app.details_inflight + || app.env_loading + || app.apply_preflight_inflight + || app.apply_inflight + { + draw_inline_spinner(frame, top[1], &mut app.throbber, "Loading…"); + } else { + frame.render_widget(Clear, top[1]); + } + + // Bottom row: status/log text across full width (single-line; sanitize newlines) + let mut status_line = app.status.replace('\n', " "); + if status_line.len() > 2000 { + // hard cap to avoid TUI noise + status_line.truncate(2000); + status_line.push('…'); + } + // Clear the status row to avoid trailing characters when the message shrinks. + frame.render_widget(Clear, rows[1]); + let status = Paragraph::new(status_line); + frame.render_widget(status, rows[1]); +} + +fn draw_diff_overlay(frame: &mut Frame, area: Rect, app: &mut App) { + let inner = overlay_outer(area); + if app.diff_overlay.is_none() { + return; + } + let ov_can_apply = app + .diff_overlay + .as_ref() + .map(super::app::DiffOverlay::current_can_apply) + .unwrap_or(false); + let is_error = app + .diff_overlay + .as_ref() + .and_then(|o| o.sd.wrapped_lines().first().cloned()) + .map(|s| s.trim_start().starts_with("Task failed:")) + .unwrap_or(false) + && !ov_can_apply; + let title = app + .diff_overlay + .as_ref() + .map(|o| o.title.clone()) + .unwrap_or_default(); + + // Title block + let title_ref = title.as_str(); + let mut title_spans: Vec = if is_error { + vec![ + "Details ".magenta(), + "[FAILED]".red().bold(), + " ".into(), + title_ref.magenta(), + ] + } else if ov_can_apply { + vec!["Diff: ".magenta(), title_ref.magenta()] + } else { + vec!["Details: ".magenta(), title_ref.magenta()] + }; + if let Some(p) = app + .diff_overlay + .as_ref() + .and_then(|o| o.sd.percent_scrolled()) + { + title_spans.push(" • ".dim()); + title_spans.push(format!("{p}%").dim()); + } + frame.render_widget(Clear, inner); + frame.render_widget( + overlay_block().title(Line::from(title_spans)).clone(), + inner, + ); + + // Content area and optional status bar + let content_full = overlay_content(inner); + let mut content_area = content_full; + if let Some(ov) = app.diff_overlay.as_mut() { + let has_text = ov.current_attempt().is_some_and(AttemptView::has_text); + let has_diff = ov.current_attempt().is_some_and(AttemptView::has_diff) || ov.base_can_apply; + if has_diff || has_text { + let rows = Layout::default() + .direction(Direction::Vertical) + .constraints([Constraint::Length(1), Constraint::Min(1)]) + .split(content_full); + // Status bar label + let mut spans: Vec = Vec::new(); + if has_diff && has_text { + let prompt_lbl = if matches!(ov.current_view, crate::app::DetailView::Prompt) { + "[Prompt]".magenta().bold() + } else { + "Prompt".dim() + }; + let diff_lbl = if matches!(ov.current_view, crate::app::DetailView::Diff) { + "[Diff]".magenta().bold() + } else { + "Diff".dim() + }; + spans.extend(vec![ + prompt_lbl, + " ".into(), + diff_lbl, + " ".into(), + "(← → to switch view)".dim(), + ]); + } else if has_text { + spans.push("Conversation".magenta().bold()); + } else { + spans.push("Diff".magenta().bold()); + } + if let Some(total) = ov.expected_attempts().or({ + if ov.attempts.is_empty() { + None + } else { + Some(ov.attempts.len()) + } + }) && total > 1 + { + spans.extend(vec![ + " ".into(), + format!("Attempt {}/{}", ov.selected_attempt + 1, total) + .bold() + .dim(), + " ".into(), + "(Tab/Shift-Tab or [ ] to cycle attempts)".dim(), + ]); + } + frame.render_widget(Paragraph::new(Line::from(spans)), rows[0]); + ov.sd.set_width(rows[1].width); + ov.sd.set_viewport(rows[1].height); + content_area = rows[1]; + } else { + ov.sd.set_width(content_full.width); + ov.sd.set_viewport(content_full.height); + content_area = content_full; + } + } + + // Styled content render + // Choose styling by the active view, not just presence of a diff + let is_diff_view = app + .diff_overlay + .as_ref() + .map(|o| matches!(o.current_view, crate::app::DetailView::Diff)) + .unwrap_or(false); + let styled_lines: Vec> = if is_diff_view { + let raw = app.diff_overlay.as_ref().map(|o| o.sd.wrapped_lines()); + raw.unwrap_or(&[]) + .iter() + .map(|l| style_diff_line(l)) + .collect() + } else { + app.diff_overlay + .as_ref() + .map(|o| style_conversation_lines(&o.sd, o.current_attempt())) + .unwrap_or_default() + }; + let raw_empty = app + .diff_overlay + .as_ref() + .map(|o| o.sd.wrapped_lines().is_empty()) + .unwrap_or(true); + if app.details_inflight && raw_empty { + draw_centered_spinner(frame, content_area, &mut app.throbber, "Loading details…"); + } else { + let scroll = app + .diff_overlay + .as_ref() + .map(|o| o.sd.state.scroll) + .unwrap_or(0); + let content = Paragraph::new(Text::from(styled_lines)).scroll((scroll, 0)); + frame.render_widget(content, content_area); + } +} + +pub fn draw_apply_modal(frame: &mut Frame, area: Rect, app: &mut App) { + use ratatui::widgets::Wrap; + let inner = overlay_outer(area); + let title = Line::from("Apply Changes?".magenta().bold()); + let block = overlay_block().title(title); + frame.render_widget(Clear, inner); + frame.render_widget(block.clone(), inner); + let content = overlay_content(inner); + + if let Some(m) = &app.apply_modal { + // Header + let header = Paragraph::new(Line::from( + format!("Apply '{}' ?", m.title).magenta().bold(), + )) + .wrap(Wrap { trim: true }); + // Footer instructions + let footer = + Paragraph::new(Line::from("Press Y to apply, P to preflight, N to cancel.").dim()) + .wrap(Wrap { trim: true }); + + // Split into header/body/footer + let rows = Layout::default() + .direction(Direction::Vertical) + .constraints([ + Constraint::Length(1), + Constraint::Min(1), + Constraint::Length(1), + ]) + .split(content); + + frame.render_widget(header, rows[0]); + // Body: spinner while preflight/apply runs; otherwise show result message and path lists + if app.apply_preflight_inflight { + draw_centered_spinner(frame, rows[1], &mut app.throbber, "Checking…"); + } else if app.apply_inflight { + draw_centered_spinner(frame, rows[1], &mut app.throbber, "Applying…"); + } else if m.result_message.is_none() { + draw_centered_spinner(frame, rows[1], &mut app.throbber, "Loading…"); + } else if let Some(msg) = &m.result_message { + let mut body_lines: Vec = Vec::new(); + let first = match m.result_level { + Some(crate::app::ApplyResultLevel::Success) => msg.clone().green(), + Some(crate::app::ApplyResultLevel::Partial) => msg.clone().magenta(), + Some(crate::app::ApplyResultLevel::Error) => msg.clone().red(), + None => msg.clone().into(), + }; + body_lines.push(Line::from(first)); + + // On partial or error, show conflicts/skips if present + if !matches!(m.result_level, Some(crate::app::ApplyResultLevel::Success)) { + use ratatui::text::Span; + if !m.conflict_paths.is_empty() { + body_lines.push(Line::from("")); + body_lines.push( + Line::from(format!("Conflicts ({}):", m.conflict_paths.len())) + .red() + .bold(), + ); + for p in &m.conflict_paths { + body_lines + .push(Line::from(vec![" • ".into(), Span::raw(p.clone()).dim()])); + } + } + if !m.skipped_paths.is_empty() { + body_lines.push(Line::from("")); + body_lines.push( + Line::from(format!("Skipped ({}):", m.skipped_paths.len())) + .magenta() + .bold(), + ); + for p in &m.skipped_paths { + body_lines + .push(Line::from(vec![" • ".into(), Span::raw(p.clone()).dim()])); + } + } + } + let body = Paragraph::new(body_lines).wrap(Wrap { trim: true }); + frame.render_widget(body, rows[1]); + } + frame.render_widget(footer, rows[2]); + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +enum ConversationSpeaker { + User, + Assistant, +} + +fn style_conversation_lines( + sd: &crate::scrollable_diff::ScrollableDiff, + attempt: Option<&AttemptView>, +) -> Vec> { + use ratatui::text::Span; + + let wrapped = sd.wrapped_lines(); + if wrapped.is_empty() { + return Vec::new(); + } + + let indices = sd.wrapped_src_indices(); + let mut styled: Vec> = Vec::new(); + let mut speaker: Option = None; + let mut in_code = false; + let mut last_src: Option = None; + let mut bullet_indent: Option = None; + + for (display, &src_idx) in wrapped.iter().zip(indices.iter()) { + let raw = sd.raw_line_at(src_idx); + let trimmed = raw.trim(); + let is_new_raw = last_src.map(|prev| prev != src_idx).unwrap_or(true); + + if trimmed.eq_ignore_ascii_case("user:") { + speaker = Some(ConversationSpeaker::User); + in_code = false; + bullet_indent = None; + styled.push(conversation_header_line(ConversationSpeaker::User, None)); + last_src = Some(src_idx); + continue; + } + if trimmed.eq_ignore_ascii_case("assistant:") { + speaker = Some(ConversationSpeaker::Assistant); + in_code = false; + bullet_indent = None; + styled.push(conversation_header_line( + ConversationSpeaker::Assistant, + attempt, + )); + last_src = Some(src_idx); + continue; + } + if raw.is_empty() { + let mut spans: Vec = Vec::new(); + if let Some(role) = speaker { + spans.push(conversation_gutter_span(role)); + } else { + spans.push(Span::raw(String::new())); + } + styled.push(Line::from(spans)); + last_src = Some(src_idx); + bullet_indent = None; + continue; + } + + if is_new_raw { + let trimmed_start = raw.trim_start(); + if trimmed_start.starts_with("```") { + in_code = !in_code; + bullet_indent = None; + } else if !in_code + && (trimmed_start.starts_with("- ") || trimmed_start.starts_with("* ")) + { + let indent = raw.chars().take_while(|c| c.is_whitespace()).count(); + bullet_indent = Some(indent); + } else if !in_code { + bullet_indent = None; + } + } + + let mut spans: Vec = Vec::new(); + if let Some(role) = speaker { + spans.push(conversation_gutter_span(role)); + } + + spans.extend(conversation_text_spans( + display, + in_code, + is_new_raw, + bullet_indent, + )); + + styled.push(Line::from(spans)); + last_src = Some(src_idx); + } + + if styled.is_empty() { + wrapped.iter().map(|l| Line::from(l.to_string())).collect() + } else { + styled + } +} + +fn conversation_header_line( + speaker: ConversationSpeaker, + attempt: Option<&AttemptView>, +) -> Line<'static> { + use ratatui::text::Span; + + let mut spans: Vec = vec!["╭ ".dim()]; + match speaker { + ConversationSpeaker::User => { + spans.push("User".cyan().bold()); + spans.push(" prompt".dim()); + } + ConversationSpeaker::Assistant => { + spans.push("Assistant".magenta().bold()); + spans.push(" response".dim()); + if let Some(attempt) = attempt + && let Some(status_span) = attempt_status_span(attempt.status) + { + spans.push(" • ".dim()); + spans.push(status_span); + } + } + } + Line::from(spans) +} + +fn conversation_gutter_span(speaker: ConversationSpeaker) -> ratatui::text::Span<'static> { + match speaker { + ConversationSpeaker::User => "│ ".cyan().dim(), + ConversationSpeaker::Assistant => "│ ".magenta().dim(), + } +} + +fn conversation_text_spans( + display: &str, + in_code: bool, + is_new_raw: bool, + bullet_indent: Option, +) -> Vec> { + use ratatui::text::Span; + + if in_code { + return vec![Span::styled( + display.to_string(), + Style::default().fg(Color::Cyan), + )]; + } + + let trimmed = display.trim_start(); + + if let Some(indent) = bullet_indent { + if is_new_raw { + let rest = trimmed.get(2..).unwrap_or("").trim_start(); + let mut spans: Vec = Vec::new(); + if indent > 0 { + spans.push(Span::raw(" ".repeat(indent))); + } + spans.push("• ".into()); + spans.push(Span::raw(rest.to_string())); + return spans; + } + let mut continuation = String::new(); + continuation.push_str(&" ".repeat(indent + 2)); + continuation.push_str(trimmed); + return vec![Span::raw(continuation)]; + } + + if is_new_raw + && (trimmed.starts_with("### ") || trimmed.starts_with("## ") || trimmed.starts_with("# ")) + { + return vec![Span::styled( + display.to_string(), + Style::default() + .fg(Color::Magenta) + .add_modifier(Modifier::BOLD), + )]; + } + + let mut rendered = render_markdown_text(display); + if rendered.lines.is_empty() { + return vec![Span::raw(display.to_string())]; + } + // `render_markdown_text` can yield multiple lines when the input contains + // explicit breaks. We only expect a single line here; join the spans of the + // first rendered line for styling. + rendered.lines.remove(0).spans.into_iter().collect() +} + +fn attempt_status_span(status: AttemptStatus) -> Option> { + match status { + AttemptStatus::Completed => Some("Completed".green()), + AttemptStatus::Failed => Some("Failed".red().bold()), + AttemptStatus::InProgress => Some("In progress".magenta()), + AttemptStatus::Pending => Some("Pending".cyan()), + AttemptStatus::Cancelled => Some("Cancelled".dim()), + AttemptStatus::Unknown => None, + } +} + +fn style_diff_line(raw: &str) -> Line<'static> { + use ratatui::style::Color; + use ratatui::style::Modifier; + use ratatui::style::Style; + use ratatui::text::Span; + + if raw.starts_with("@@") { + return Line::from(vec![Span::styled( + raw.to_string(), + Style::default() + .fg(Color::Magenta) + .add_modifier(Modifier::BOLD), + )]); + } + if raw.starts_with("+++") || raw.starts_with("---") { + return Line::from(vec![Span::styled( + raw.to_string(), + Style::default().add_modifier(Modifier::DIM), + )]); + } + if raw.starts_with('+') { + return Line::from(vec![Span::styled( + raw.to_string(), + Style::default().fg(Color::Green), + )]); + } + if raw.starts_with('-') { + return Line::from(vec![Span::styled( + raw.to_string(), + Style::default().fg(Color::Red), + )]); + } + Line::from(vec![Span::raw(raw.to_string())]) +} + +fn render_task_item(_app: &App, t: &codex_cloud_tasks_client::TaskSummary) -> ListItem<'static> { + let status = match t.status { + TaskStatus::Ready => "READY".green(), + TaskStatus::Pending => "PENDING".magenta(), + TaskStatus::Applied => "APPLIED".blue(), + TaskStatus::Error => "ERROR".red(), + }; + + // Title line: [STATUS] Title + let title = Line::from(vec![ + "[".into(), + status, + "] ".into(), + t.title.clone().into(), + ]); + + // Meta line: environment label and relative time (dim) + let mut meta: Vec = Vec::new(); + if let Some(lbl) = t.environment_label.as_ref().filter(|s| !s.is_empty()) { + meta.push(lbl.clone().dim()); + } + let when = format_relative_time(t.updated_at).dim(); + if !meta.is_empty() { + meta.push(" ".into()); + meta.push("•".dim()); + meta.push(" ".into()); + } + meta.push(when); + let meta_line = Line::from(meta); + + // Subline: summary when present; otherwise show "no diff" + let sub = if t.summary.files_changed > 0 + || t.summary.lines_added > 0 + || t.summary.lines_removed > 0 + { + let adds = t.summary.lines_added; + let dels = t.summary.lines_removed; + let files = t.summary.files_changed; + Line::from(vec![ + format!("+{adds}").green(), + "/".into(), + format!("−{dels}").red(), + " ".into(), + "•".dim(), + " ".into(), + format!("{files}").into(), + " ".into(), + "files".dim(), + ]) + } else { + Line::from("no diff".to_string().dim()) + }; + + // Insert a blank spacer line after the summary to separate tasks + let spacer = Line::from(""); + ListItem::new(vec![title, meta_line, sub, spacer]) +} + +fn format_relative_time(ts: chrono::DateTime) -> String { + let now = Utc::now(); + let mut secs = (now - ts).num_seconds(); + if secs < 0 { + secs = 0; + } + if secs < 60 { + return format!("{secs}s ago"); + } + let mins = secs / 60; + if mins < 60 { + return format!("{mins}m ago"); + } + let hours = mins / 60; + if hours < 24 { + return format!("{hours}h ago"); + } + let local = ts.with_timezone(&Local); + local.format("%b %e %H:%M").to_string() +} + +fn draw_inline_spinner( + frame: &mut Frame, + area: Rect, + state: &mut throbber_widgets_tui::ThrobberState, + label: &str, +) { + use ratatui::style::Style; + use throbber_widgets_tui::BRAILLE_EIGHT; + use throbber_widgets_tui::Throbber; + use throbber_widgets_tui::WhichUse; + let w = Throbber::default() + .label(label) + .style(Style::default().cyan()) + .throbber_style(Style::default().magenta().bold()) + .throbber_set(BRAILLE_EIGHT) + .use_type(WhichUse::Spin); + frame.render_stateful_widget(w, area, state); +} + +fn draw_centered_spinner( + frame: &mut Frame, + area: Rect, + state: &mut throbber_widgets_tui::ThrobberState, + label: &str, +) { + // Center a 1xN throbber within the given rect + let rows = Layout::default() + .direction(Direction::Vertical) + .constraints([ + Constraint::Percentage(50), + Constraint::Length(1), + Constraint::Percentage(49), + ]) + .split(area); + let cols = Layout::default() + .direction(Direction::Horizontal) + .constraints([ + Constraint::Percentage(50), + Constraint::Length(18), + Constraint::Percentage(50), + ]) + .split(rows[1]); + draw_inline_spinner(frame, cols[1], state, label); +} + +// Styling helpers for diff rendering live inline where used. + +pub fn draw_env_modal(frame: &mut Frame, area: Rect, app: &mut App) { + use ratatui::widgets::Wrap; + + // Use shared overlay geometry and padding. + let inner = overlay_outer(area); + + // Title: primary only; move long hints to a subheader inside content. + let title = Line::from(vec!["Select Environment".magenta().bold()]); + let block = overlay_block().title(title); + + frame.render_widget(Clear, inner); + frame.render_widget(block.clone(), inner); + let content = overlay_content(inner); + + if app.env_loading { + draw_centered_spinner(frame, content, &mut app.throbber, "Loading environments…"); + return; + } + + // Layout: subheader + search + results list + let rows = Layout::default() + .direction(Direction::Vertical) + .constraints([ + Constraint::Length(1), // subheader + Constraint::Length(1), // search + Constraint::Min(1), // list + ]) + .split(content); + + // Subheader with usage hints (dim cyan) + let subheader = Paragraph::new(Line::from( + "Type to search, Enter select, Esc cancel; r refresh" + .cyan() + .dim(), + )) + .wrap(Wrap { trim: true }); + frame.render_widget(subheader, rows[0]); + + let query = app + .env_modal + .as_ref() + .map(|m| m.query.clone()) + .unwrap_or_default(); + let ql = query.to_lowercase(); + let search = Paragraph::new(format!("Search: {query}")).wrap(Wrap { trim: true }); + frame.render_widget(search, rows[1]); + + // Filter environments by query (case-insensitive substring over label/id/hints) + let envs: Vec<&crate::app::EnvironmentRow> = app + .environments + .iter() + .filter(|e| { + if ql.is_empty() { + return true; + } + let mut hay = String::new(); + if let Some(l) = &e.label { + hay.push_str(&l.to_lowercase()); + hay.push(' '); + } + hay.push_str(&e.id.to_lowercase()); + if let Some(h) = &e.repo_hints { + hay.push(' '); + hay.push_str(&h.to_lowercase()); + } + hay.contains(&ql) + }) + .collect(); + + let mut items: Vec = Vec::new(); + items.push(ListItem::new(Line::from("All Environments (Global)"))); + for env in envs.iter() { + let primary = env.label.clone().unwrap_or_else(|| "".to_string()); + let mut spans: Vec = vec![primary.into()]; + if env.is_pinned { + spans.push(" ".into()); + spans.push("PINNED".magenta().bold()); + } + spans.push(" ".into()); + spans.push(env.id.clone().dim()); + if let Some(hint) = &env.repo_hints { + spans.push(" ".into()); + spans.push(hint.clone().dim()); + } + items.push(ListItem::new(Line::from(spans))); + } + + let sel_desired = app.env_modal.as_ref().map(|m| m.selected).unwrap_or(0); + let sel = sel_desired.min(envs.len()); + let mut list_state = ListState::default().with_selected(Some(sel)); + let list = List::new(items) + .highlight_symbol("› ") + .highlight_style(Style::default().bold()) + .block(Block::default().borders(Borders::NONE)); + frame.render_stateful_widget(list, rows[2], &mut list_state); +} + +pub fn draw_best_of_modal(frame: &mut Frame, area: Rect, app: &mut App) { + use ratatui::widgets::Wrap; + + let inner = overlay_outer(area); + const MAX_WIDTH: u16 = 40; + const MIN_WIDTH: u16 = 20; + const MAX_HEIGHT: u16 = 12; + const MIN_HEIGHT: u16 = 6; + let modal_width = inner.width.min(MAX_WIDTH).max(inner.width.min(MIN_WIDTH)); + let modal_height = inner + .height + .min(MAX_HEIGHT) + .max(inner.height.min(MIN_HEIGHT)); + let modal_x = inner.x + (inner.width.saturating_sub(modal_width)) / 2; + let modal_y = inner.y + (inner.height.saturating_sub(modal_height)) / 2; + let modal_area = Rect::new(modal_x, modal_y, modal_width, modal_height); + let title = Line::from(vec!["Parallel Attempts".magenta().bold()]); + let block = overlay_block().title(title); + + frame.render_widget(Clear, modal_area); + frame.render_widget(block.clone(), modal_area); + let content = overlay_content(modal_area); + + let rows = Layout::default() + .direction(Direction::Vertical) + .constraints([Constraint::Length(2), Constraint::Min(1)]) + .split(content); + + let hint = Paragraph::new(Line::from("Use ↑/↓ to choose, 1-4 jump".cyan().dim())) + .wrap(Wrap { trim: true }); + frame.render_widget(hint, rows[0]); + + let selected = app.best_of_modal.as_ref().map(|m| m.selected).unwrap_or(0); + let options = [1usize, 2, 3, 4]; + let mut items: Vec = Vec::new(); + for &attempts in &options { + let noun = if attempts == 1 { "attempt" } else { "attempts" }; + let mut spans: Vec = vec![format!("{attempts} {noun:<8}").into()]; + spans.push(" ".into()); + spans.push(format!("{attempts}x parallel").dim()); + if attempts == app.best_of_n { + spans.push(" ".into()); + spans.push("Current".magenta().bold()); + } + items.push(ListItem::new(Line::from(spans))); + } + let sel = selected.min(options.len().saturating_sub(1)); + let mut list_state = ListState::default().with_selected(Some(sel)); + let list = List::new(items) + .highlight_symbol("› ") + .highlight_style(Style::default().bold()) + .block(Block::default().borders(Borders::NONE)); + frame.render_stateful_widget(list, rows[1], &mut list_state); +} diff --git a/codex-rs/cloud-tasks/src/util.rs b/codex-rs/cloud-tasks/src/util.rs new file mode 100644 index 00000000000..8003a02f1e5 --- /dev/null +++ b/codex-rs/cloud-tasks/src/util.rs @@ -0,0 +1,93 @@ +use base64::Engine as _; +use chrono::Utc; +use reqwest::header::HeaderMap; + +pub fn set_user_agent_suffix(suffix: &str) { + if let Ok(mut guard) = codex_core::default_client::USER_AGENT_SUFFIX.lock() { + guard.replace(suffix.to_string()); + } +} + +pub fn append_error_log(message: impl AsRef) { + let ts = Utc::now().to_rfc3339(); + if let Ok(mut f) = std::fs::OpenOptions::new() + .create(true) + .append(true) + .open("error.log") + { + use std::io::Write as _; + let _ = writeln!(f, "[{ts}] {}", message.as_ref()); + } +} + +/// Normalize the configured base URL to a canonical form used by the backend client. +/// - trims trailing '/' +/// - appends '/backend-api' for ChatGPT hosts when missing +pub fn normalize_base_url(input: &str) -> String { + let mut base_url = input.to_string(); + while base_url.ends_with('/') { + base_url.pop(); + } + if (base_url.starts_with("https://chatgpt.com") + || base_url.starts_with("https://chat.openai.com")) + && !base_url.contains("/backend-api") + { + base_url = format!("{base_url}/backend-api"); + } + base_url +} + +/// Extract the ChatGPT account id from a JWT token, when present. +pub fn extract_chatgpt_account_id(token: &str) -> Option { + let mut parts = token.split('.'); + let (_h, payload_b64, _s) = match (parts.next(), parts.next(), parts.next()) { + (Some(h), Some(p), Some(s)) if !h.is_empty() && !p.is_empty() && !s.is_empty() => (h, p, s), + _ => return None, + }; + let payload_bytes = base64::engine::general_purpose::URL_SAFE_NO_PAD + .decode(payload_b64) + .ok()?; + let v: serde_json::Value = serde_json::from_slice(&payload_bytes).ok()?; + v.get("https://api.openai.com/auth") + .and_then(|auth| auth.get("chatgpt_account_id")) + .and_then(|id| id.as_str()) + .map(str::to_string) +} + +/// Build headers for ChatGPT-backed requests: `User-Agent`, optional `Authorization`, +/// and optional `ChatGPT-Account-Id`. +pub async fn build_chatgpt_headers() -> HeaderMap { + use reqwest::header::AUTHORIZATION; + use reqwest::header::HeaderName; + use reqwest::header::HeaderValue; + use reqwest::header::USER_AGENT; + + set_user_agent_suffix("codex_cloud_tasks_tui"); + let ua = codex_core::default_client::get_codex_user_agent(); + let mut headers = HeaderMap::new(); + headers.insert( + USER_AGENT, + HeaderValue::from_str(&ua).unwrap_or(HeaderValue::from_static("codex-cli")), + ); + if let Ok(home) = codex_core::config::find_codex_home() { + let am = codex_login::AuthManager::new(home, false); + if let Some(auth) = am.auth() + && let Ok(tok) = auth.get_token().await + && !tok.is_empty() + { + let v = format!("Bearer {tok}"); + if let Ok(hv) = HeaderValue::from_str(&v) { + headers.insert(AUTHORIZATION, hv); + } + if let Some(acc) = auth + .get_account_id() + .or_else(|| extract_chatgpt_account_id(&tok)) + && let Ok(name) = HeaderName::from_bytes(b"ChatGPT-Account-Id") + && let Ok(hv) = HeaderValue::from_str(&acc) + { + headers.insert(name, hv); + } + } + } + headers +} diff --git a/codex-rs/cloud-tasks/tests/env_filter.rs b/codex-rs/cloud-tasks/tests/env_filter.rs new file mode 100644 index 00000000000..8c737c6c284 --- /dev/null +++ b/codex-rs/cloud-tasks/tests/env_filter.rs @@ -0,0 +1,22 @@ +use codex_cloud_tasks_client::CloudBackend; +use codex_cloud_tasks_client::MockClient; + +#[tokio::test] +async fn mock_backend_varies_by_env() { + let client = MockClient; + + let root = CloudBackend::list_tasks(&client, None).await.unwrap(); + assert!(root.iter().any(|t| t.title.contains("Update README"))); + + let a = CloudBackend::list_tasks(&client, Some("env-A")) + .await + .unwrap(); + assert_eq!(a.len(), 1); + assert_eq!(a[0].title, "A: First"); + + let b = CloudBackend::list_tasks(&client, Some("env-B")) + .await + .unwrap(); + assert_eq!(b.len(), 2); + assert!(b[0].title.starts_with("B: ")); +} diff --git a/codex-rs/code b/codex-rs/code new file mode 100644 index 00000000000..e69de29bb2d diff --git a/codex-rs/codex-backend-openapi-models/Cargo.toml b/codex-rs/codex-backend-openapi-models/Cargo.toml new file mode 100644 index 00000000000..811ee72d8f5 --- /dev/null +++ b/codex-rs/codex-backend-openapi-models/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "codex-backend-openapi-models" +version = { workspace = true } +edition = "2024" + +[lib] +name = "codex_backend_openapi_models" +path = "src/lib.rs" + +# Important: generated code often violates our workspace lints. +# Allow unwrap/expect in this crate so the workspace builds cleanly +# after models are regenerated. +# Lint overrides are applied in src/lib.rs via crate attributes + +[dependencies] +serde = { version = "1", features = ["derive"] } +serde_json = "1" diff --git a/codex-rs/codex-backend-openapi-models/src/lib.rs b/codex-rs/codex-backend-openapi-models/src/lib.rs new file mode 100644 index 00000000000..f9e6d52fa8f --- /dev/null +++ b/codex-rs/codex-backend-openapi-models/src/lib.rs @@ -0,0 +1,6 @@ +#![allow(clippy::unwrap_used, clippy::expect_used)] + +// Re-export generated OpenAPI models. +// The regen script populates `src/models/*.rs` and writes `src/models/mod.rs`. +// This module intentionally contains no hand-written types. +pub mod models; diff --git a/codex-rs/codex-backend-openapi-models/src/models/code_task_details_response.rs b/codex-rs/codex-backend-openapi-models/src/models/code_task_details_response.rs new file mode 100644 index 00000000000..725b3a37371 --- /dev/null +++ b/codex-rs/codex-backend-openapi-models/src/models/code_task_details_response.rs @@ -0,0 +1,42 @@ +/* + * codex-backend + * + * codex-backend + * + * The version of the OpenAPI document: 0.0.1 + * + * Generated by: https://openapi-generator.tech + */ + +use crate::models; +use serde::Deserialize; +use serde::Serialize; + +#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)] +pub struct CodeTaskDetailsResponse { + #[serde(rename = "task")] + pub task: Box, + #[serde(rename = "current_user_turn", skip_serializing_if = "Option::is_none")] + pub current_user_turn: Option>, + #[serde( + rename = "current_assistant_turn", + skip_serializing_if = "Option::is_none" + )] + pub current_assistant_turn: Option>, + #[serde( + rename = "current_diff_task_turn", + skip_serializing_if = "Option::is_none" + )] + pub current_diff_task_turn: Option>, +} + +impl CodeTaskDetailsResponse { + pub fn new(task: models::TaskResponse) -> CodeTaskDetailsResponse { + CodeTaskDetailsResponse { + task: Box::new(task), + current_user_turn: None, + current_assistant_turn: None, + current_diff_task_turn: None, + } + } +} diff --git a/codex-rs/codex-backend-openapi-models/src/models/external_pull_request_response.rs b/codex-rs/codex-backend-openapi-models/src/models/external_pull_request_response.rs new file mode 100644 index 00000000000..92b56db2ca9 --- /dev/null +++ b/codex-rs/codex-backend-openapi-models/src/models/external_pull_request_response.rs @@ -0,0 +1,40 @@ +/* + * codex-backend + * + * codex-backend + * + * The version of the OpenAPI document: 0.0.1 + * + * Generated by: https://openapi-generator.tech + */ + +use crate::models; +use serde::Deserialize; +use serde::Serialize; + +#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)] +pub struct ExternalPullRequestResponse { + #[serde(rename = "id")] + pub id: String, + #[serde(rename = "assistant_turn_id")] + pub assistant_turn_id: String, + #[serde(rename = "pull_request")] + pub pull_request: Box, + #[serde(rename = "codex_updated_sha", skip_serializing_if = "Option::is_none")] + pub codex_updated_sha: Option, +} + +impl ExternalPullRequestResponse { + pub fn new( + id: String, + assistant_turn_id: String, + pull_request: models::GitPullRequest, + ) -> ExternalPullRequestResponse { + ExternalPullRequestResponse { + id, + assistant_turn_id, + pull_request: Box::new(pull_request), + codex_updated_sha: None, + } + } +} diff --git a/codex-rs/codex-backend-openapi-models/src/models/git_pull_request.rs b/codex-rs/codex-backend-openapi-models/src/models/git_pull_request.rs new file mode 100644 index 00000000000..a7e995f3ef2 --- /dev/null +++ b/codex-rs/codex-backend-openapi-models/src/models/git_pull_request.rs @@ -0,0 +1,77 @@ +/* + * codex-backend + * + * codex-backend + * + * The version of the OpenAPI document: 0.0.1 + * + * Generated by: https://openapi-generator.tech + */ + +use serde::Deserialize; +use serde::Serialize; + +#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)] +pub struct GitPullRequest { + #[serde(rename = "number")] + pub number: i32, + #[serde(rename = "url")] + pub url: String, + #[serde(rename = "state")] + pub state: String, + #[serde(rename = "merged")] + pub merged: bool, + #[serde(rename = "mergeable")] + pub mergeable: bool, + #[serde(rename = "draft", skip_serializing_if = "Option::is_none")] + pub draft: Option, + #[serde(rename = "title", skip_serializing_if = "Option::is_none")] + pub title: Option, + #[serde(rename = "body", skip_serializing_if = "Option::is_none")] + pub body: Option, + #[serde(rename = "base", skip_serializing_if = "Option::is_none")] + pub base: Option, + #[serde(rename = "head", skip_serializing_if = "Option::is_none")] + pub head: Option, + #[serde(rename = "base_sha", skip_serializing_if = "Option::is_none")] + pub base_sha: Option, + #[serde(rename = "head_sha", skip_serializing_if = "Option::is_none")] + pub head_sha: Option, + #[serde(rename = "merge_commit_sha", skip_serializing_if = "Option::is_none")] + pub merge_commit_sha: Option, + #[serde(rename = "comments", skip_serializing_if = "Option::is_none")] + pub comments: Option, + #[serde(rename = "diff", skip_serializing_if = "Option::is_none")] + pub diff: Option, + #[serde(rename = "user", skip_serializing_if = "Option::is_none")] + pub user: Option, +} + +impl GitPullRequest { + pub fn new( + number: i32, + url: String, + state: String, + merged: bool, + mergeable: bool, + ) -> GitPullRequest { + GitPullRequest { + number, + url, + state, + merged, + mergeable, + draft: None, + title: None, + body: None, + base: None, + head: None, + base_sha: None, + head_sha: None, + merge_commit_sha: None, + comments: None, + diff: None, + user: None, + } + } +} diff --git a/codex-rs/codex-backend-openapi-models/src/models/mod.rs b/codex-rs/codex-backend-openapi-models/src/models/mod.rs new file mode 100644 index 00000000000..e2cb972f101 --- /dev/null +++ b/codex-rs/codex-backend-openapi-models/src/models/mod.rs @@ -0,0 +1,22 @@ +// Curated minimal export list for current workspace usage. +// NOTE: This file was previously auto-generated by the OpenAPI generator. +// Currently export only the types referenced by the workspace +// The process for this will change + +pub mod code_task_details_response; +pub use self::code_task_details_response::CodeTaskDetailsResponse; + +pub mod task_response; +pub use self::task_response::TaskResponse; + +pub mod external_pull_request_response; +pub use self::external_pull_request_response::ExternalPullRequestResponse; + +pub mod git_pull_request; +pub use self::git_pull_request::GitPullRequest; + +pub mod task_list_item; +pub use self::task_list_item::TaskListItem; + +pub mod paginated_list_task_list_item_; +pub use self::paginated_list_task_list_item_::PaginatedListTaskListItem; diff --git a/codex-rs/codex-backend-openapi-models/src/models/paginated_list_task_list_item_.rs b/codex-rs/codex-backend-openapi-models/src/models/paginated_list_task_list_item_.rs new file mode 100644 index 00000000000..5af75afaabf --- /dev/null +++ b/codex-rs/codex-backend-openapi-models/src/models/paginated_list_task_list_item_.rs @@ -0,0 +1,30 @@ +/* + * codex-backend + * + * codex-backend + * + * The version of the OpenAPI document: 0.0.1 + * + * Generated by: https://openapi-generator.tech + */ + +use crate::models; +use serde::Deserialize; +use serde::Serialize; + +#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)] +pub struct PaginatedListTaskListItem { + #[serde(rename = "items")] + pub items: Vec, + #[serde(rename = "cursor", skip_serializing_if = "Option::is_none")] + pub cursor: Option, +} + +impl PaginatedListTaskListItem { + pub fn new(items: Vec) -> PaginatedListTaskListItem { + PaginatedListTaskListItem { + items, + cursor: None, + } + } +} diff --git a/codex-rs/codex-backend-openapi-models/src/models/task_list_item.rs b/codex-rs/codex-backend-openapi-models/src/models/task_list_item.rs new file mode 100644 index 00000000000..5f34738a43f --- /dev/null +++ b/codex-rs/codex-backend-openapi-models/src/models/task_list_item.rs @@ -0,0 +1,63 @@ +/* + * codex-backend + * + * codex-backend + * + * The version of the OpenAPI document: 0.0.1 + * + * Generated by: https://openapi-generator.tech + */ + +use crate::models; +use serde::Deserialize; +use serde::Serialize; + +#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)] +pub struct TaskListItem { + #[serde(rename = "id")] + pub id: String, + #[serde(rename = "title")] + pub title: String, + #[serde( + rename = "has_generated_title", + skip_serializing_if = "Option::is_none" + )] + pub has_generated_title: Option, + #[serde(rename = "updated_at", skip_serializing_if = "Option::is_none")] + pub updated_at: Option, + #[serde(rename = "created_at", skip_serializing_if = "Option::is_none")] + pub created_at: Option, + #[serde( + rename = "task_status_display", + skip_serializing_if = "Option::is_none" + )] + pub task_status_display: Option>, + #[serde(rename = "archived")] + pub archived: bool, + #[serde(rename = "has_unread_turn")] + pub has_unread_turn: bool, + #[serde(rename = "pull_requests", skip_serializing_if = "Option::is_none")] + pub pull_requests: Option>, +} + +impl TaskListItem { + pub fn new( + id: String, + title: String, + has_generated_title: Option, + archived: bool, + has_unread_turn: bool, + ) -> TaskListItem { + TaskListItem { + id, + title, + has_generated_title, + updated_at: None, + created_at: None, + task_status_display: None, + archived, + has_unread_turn, + pull_requests: None, + } + } +} diff --git a/codex-rs/codex-backend-openapi-models/src/models/task_response.rs b/codex-rs/codex-backend-openapi-models/src/models/task_response.rs new file mode 100644 index 00000000000..6251b56b7ea --- /dev/null +++ b/codex-rs/codex-backend-openapi-models/src/models/task_response.rs @@ -0,0 +1,62 @@ +/* + * codex-backend + * + * codex-backend + * + * The version of the OpenAPI document: 0.0.1 + * + * Generated by: https://openapi-generator.tech + */ + +use crate::models; +use serde::Deserialize; +use serde::Serialize; + +#[derive(Clone, Default, Debug, PartialEq, Serialize, Deserialize)] +pub struct TaskResponse { + #[serde(rename = "id")] + pub id: String, + #[serde(rename = "created_at", skip_serializing_if = "Option::is_none")] + pub created_at: Option, + #[serde(rename = "title")] + pub title: String, + #[serde( + rename = "has_generated_title", + skip_serializing_if = "Option::is_none" + )] + pub has_generated_title: Option, + #[serde(rename = "current_turn_id", skip_serializing_if = "Option::is_none")] + pub current_turn_id: Option, + #[serde(rename = "has_unread_turn", skip_serializing_if = "Option::is_none")] + pub has_unread_turn: Option, + #[serde( + rename = "denormalized_metadata", + skip_serializing_if = "Option::is_none" + )] + pub denormalized_metadata: Option>, + #[serde(rename = "archived")] + pub archived: bool, + #[serde(rename = "external_pull_requests")] + pub external_pull_requests: Vec, +} + +impl TaskResponse { + pub fn new( + id: String, + title: String, + archived: bool, + external_pull_requests: Vec, + ) -> TaskResponse { + TaskResponse { + id, + created_at: None, + title, + has_generated_title: None, + current_turn_id: None, + has_unread_turn: None, + denormalized_metadata: None, + archived, + external_pull_requests, + } + } +} diff --git a/codex-rs/common/Cargo.toml b/codex-rs/common/Cargo.toml index 3ce84a6f502..d8f30cc09dd 100644 --- a/codex-rs/common/Cargo.toml +++ b/codex-rs/common/Cargo.toml @@ -10,6 +10,7 @@ workspace = true clap = { workspace = true, features = ["derive", "wrap_help"], optional = true } codex-core = { workspace = true } codex-protocol = { workspace = true } +codex-app-server-protocol = { workspace = true } serde = { workspace = true, optional = true } toml = { workspace = true, optional = true } diff --git a/codex-rs/common/src/model_presets.rs b/codex-rs/common/src/model_presets.rs index 8eb5beacda5..9954ad18908 100644 --- a/codex-rs/common/src/model_presets.rs +++ b/codex-rs/common/src/model_presets.rs @@ -1,5 +1,5 @@ +use codex_app_server_protocol::AuthMode; use codex_core::protocol_config_types::ReasoningEffort; -use codex_protocol::mcp_protocol::AuthMode; /// A simple preset pairing a model slug with a reasoning effort. #[derive(Debug, Clone, Copy)] @@ -20,49 +20,49 @@ const PRESETS: &[ModelPreset] = &[ ModelPreset { id: "gpt-5-codex-low", label: "gpt-5-codex low", - description: "", + description: "Fastest responses with limited reasoning", model: "gpt-5-codex", effort: Some(ReasoningEffort::Low), }, ModelPreset { id: "gpt-5-codex-medium", label: "gpt-5-codex medium", - description: "", + description: "Dynamically adjusts reasoning based on the task", model: "gpt-5-codex", - effort: None, + effort: Some(ReasoningEffort::Medium), }, ModelPreset { id: "gpt-5-codex-high", label: "gpt-5-codex high", - description: "", + description: "Maximizes reasoning depth for complex or ambiguous problems", model: "gpt-5-codex", effort: Some(ReasoningEffort::High), }, ModelPreset { id: "gpt-5-minimal", label: "gpt-5 minimal", - description: "— fastest responses with limited reasoning; ideal for coding, instructions, or lightweight tasks", + description: "Fastest responses with little reasoning", model: "gpt-5", effort: Some(ReasoningEffort::Minimal), }, ModelPreset { id: "gpt-5-low", label: "gpt-5 low", - description: "— balances speed with some reasoning; useful for straightforward queries and short explanations", + description: "Balances speed with some reasoning; useful for straightforward queries and short explanations", model: "gpt-5", effort: Some(ReasoningEffort::Low), }, ModelPreset { id: "gpt-5-medium", label: "gpt-5 medium", - description: "— default setting; provides a solid balance of reasoning depth and latency for general-purpose tasks", + description: "Provides a solid balance of reasoning depth and latency for general-purpose tasks", model: "gpt-5", effort: Some(ReasoningEffort::Medium), }, ModelPreset { id: "gpt-5-high", label: "gpt-5 high", - description: "— maximizes reasoning depth for complex or ambiguous problems", + description: "Maximizes reasoning depth for complex or ambiguous problems", model: "gpt-5", effort: Some(ReasoningEffort::High), }, diff --git a/codex-rs/core/Cargo.toml b/codex-rs/core/Cargo.toml index d9ded082838..8c56e7d1327 100644 --- a/codex-rs/core/Cargo.toml +++ b/codex-rs/core/Cargo.toml @@ -15,17 +15,23 @@ workspace = true anyhow = { workspace = true } askama = { workspace = true } async-channel = { workspace = true } +async-trait = { workspace = true } base64 = { workspace = true } bytes = { workspace = true } chrono = { workspace = true, features = ["serde"] } codex-apply-patch = { workspace = true } codex-file-search = { workspace = true } codex-mcp-client = { workspace = true } +codex-rmcp-client = { workspace = true } codex-protocol = { workspace = true } +codex-app-server-protocol = { workspace = true } +codex-otel = { workspace = true, features = ["otel"] } dirs = { workspace = true } +dunce = { workspace = true } env-flags = { workspace = true } eventsource-stream = { workspace = true } futures = { workspace = true } +indexmap = { workspace = true } libc = { workspace = true } mcp-types = { workspace = true } os_info = { workspace = true } @@ -80,6 +86,7 @@ openssl-sys = { workspace = true, features = ["vendored"] } [dev-dependencies] assert_cmd = { workspace = true } core_test_support = { workspace = true } +escargot = { workspace = true } maplit = { workspace = true } predicates = { workspace = true } pretty_assertions = { workspace = true } @@ -87,6 +94,7 @@ tempfile = { workspace = true } tokio-test = { workspace = true } walkdir = { workspace = true } wiremock = { workspace = true } +tracing-test = { workspace = true, features = ["no-env-filter"] } [package.metadata.cargo-shear] ignored = ["openssl-sys"] diff --git a/codex-rs/core/gpt_5_codex_prompt.md b/codex-rs/core/gpt_5_codex_prompt.md index 9a298f460f4..33ab98807d2 100644 --- a/codex-rs/core/gpt_5_codex_prompt.md +++ b/codex-rs/core/gpt_5_codex_prompt.md @@ -89,7 +89,7 @@ You are producing plain text that will later be styled by the CLI. Follow these - Headers: optional; short Title Case (1-3 words) wrapped in **…**; no blank line before the first bullet; add only if they truly help. - Bullets: use - ; merge related points; keep to one line when possible; 4–6 per list ordered by importance; keep phrasing consistent. - Monospace: backticks for commands/paths/env vars/code ids and inline examples; use for literal keyword bullets; never combine with **. -- Code samples or multi-line snippets should be wrapped in fenced code blocks; add a language hint whenever obvious. +- Code samples or multi-line snippets should be wrapped in fenced code blocks; include an info string as often as possible. - Structure: group related bullets; order sections general → specific → supporting; for subsections, start with a bolded keyword bullet, then items; match complexity to the task. - Tone: collaborative, concise, factual; present tense, active voice; self‑contained; no "above/below"; parallel wording. - Don'ts: no nested bullets/hierarchies; no ANSI codes; don't cram unrelated keywords; keep keyword lists short—wrap/reformat if long; avoid naming formatting styles in answers. diff --git a/codex-rs/core/src/apply_patch.rs b/codex-rs/core/src/apply_patch.rs index 0afc06e99cf..836b859633d 100644 --- a/codex-rs/core/src/apply_patch.rs +++ b/codex-rs/core/src/apply_patch.rs @@ -1,13 +1,12 @@ use crate::codex::Session; use crate::codex::TurnContext; +use crate::function_tool::FunctionCallError; use crate::protocol::FileChange; use crate::protocol::ReviewDecision; use crate::safety::SafetyCheck; use crate::safety::assess_patch_safety; use codex_apply_patch::ApplyPatchAction; use codex_apply_patch::ApplyPatchFileChange; -use codex_protocol::models::FunctionCallOutputPayload; -use codex_protocol::models::ResponseInputItem; use std::collections::HashMap; use std::path::PathBuf; @@ -17,7 +16,7 @@ pub(crate) enum InternalApplyPatchInvocation { /// The `apply_patch` call was handled programmatically, without any sort /// of sandbox, because the user explicitly approved it. This is the /// result to use with the `shell` function call that contained `apply_patch`. - Output(ResponseInputItem), + Output(Result), /// The `apply_patch` call was approved, either automatically because it /// appears that it should be allowed based on the user's sandbox policy @@ -33,12 +32,6 @@ pub(crate) struct ApplyPatchExec { pub(crate) user_explicitly_approved_this_action: bool, } -impl From for InternalApplyPatchInvocation { - fn from(item: ResponseInputItem) -> Self { - InternalApplyPatchInvocation::Output(item) - } -} - pub(crate) async fn apply_patch( sess: &Session, turn_context: &TurnContext, @@ -52,12 +45,13 @@ pub(crate) async fn apply_patch( &turn_context.sandbox_policy, &turn_context.cwd, ) { - SafetyCheck::AutoApprove { .. } => { - InternalApplyPatchInvocation::DelegateToExec(ApplyPatchExec { - action, - user_explicitly_approved_this_action: false, - }) - } + SafetyCheck::AutoApprove { + user_explicitly_approved, + .. + } => InternalApplyPatchInvocation::DelegateToExec(ApplyPatchExec { + action, + user_explicitly_approved_this_action: user_explicitly_approved, + }), SafetyCheck::AskUser => { // Compute a readable summary of path changes to include in the // approval request so the user can make an informed decision. @@ -77,25 +71,15 @@ pub(crate) async fn apply_patch( }) } ReviewDecision::Denied | ReviewDecision::Abort => { - ResponseInputItem::FunctionCallOutput { - call_id: call_id.to_owned(), - output: FunctionCallOutputPayload { - content: "patch rejected by user".to_string(), - success: Some(false), - }, - } - .into() + InternalApplyPatchInvocation::Output(Err(FunctionCallError::RespondToModel( + "patch rejected by user".to_string(), + ))) } } } - SafetyCheck::Reject { reason } => ResponseInputItem::FunctionCallOutput { - call_id: call_id.to_owned(), - output: FunctionCallOutputPayload { - content: format!("patch rejected: {reason}"), - success: Some(false), - }, - } - .into(), + SafetyCheck::Reject { reason } => InternalApplyPatchInvocation::Output(Err( + FunctionCallError::RespondToModel(format!("patch rejected: {reason}")), + )), } } diff --git a/codex-rs/core/src/auth.rs b/codex-rs/core/src/auth.rs index a2158310dcc..4eea313e667 100644 --- a/codex-rs/core/src/auth.rs +++ b/codex-rs/core/src/auth.rs @@ -15,7 +15,7 @@ use std::sync::Arc; use std::sync::Mutex; use std::time::Duration; -use codex_protocol::mcp_protocol::AuthMode; +use codex_app_server_protocol::AuthMode; use crate::token_data::PlanType; use crate::token_data::TokenData; @@ -73,7 +73,7 @@ impl CodexAuth { /// Loads the available auth information from the auth.json. pub fn from_codex_home(codex_home: &Path) -> std::io::Result> { - load_auth(codex_home) + load_auth(codex_home, false) } pub async fn get_token_data(&self) -> Result { @@ -188,6 +188,7 @@ impl CodexAuth { } pub const OPENAI_API_KEY_ENV_VAR: &str = "OPENAI_API_KEY"; +pub const CODEX_API_KEY_ENV_VAR: &str = "CODEX_API_KEY"; pub fn read_openai_api_key_from_env() -> Option { env::var(OPENAI_API_KEY_ENV_VAR) @@ -196,6 +197,13 @@ pub fn read_openai_api_key_from_env() -> Option { .filter(|value| !value.is_empty()) } +pub fn read_codex_api_key_from_env() -> Option { + env::var(CODEX_API_KEY_ENV_VAR) + .ok() + .map(|value| value.trim().to_string()) + .filter(|value| !value.is_empty()) +} + pub fn get_auth_file(codex_home: &Path) -> PathBuf { codex_home.join("auth.json") } @@ -221,7 +229,18 @@ pub fn login_with_api_key(codex_home: &Path, api_key: &str) -> std::io::Result<( write_auth_json(&get_auth_file(codex_home), &auth_dot_json) } -fn load_auth(codex_home: &Path) -> std::io::Result> { +fn load_auth( + codex_home: &Path, + enable_codex_api_key_env: bool, +) -> std::io::Result> { + if enable_codex_api_key_env && let Some(api_key) = read_codex_api_key_from_env() { + let client = crate::default_client::create_client(); + return Ok(Some(CodexAuth::from_api_key_with_client( + api_key.as_str(), + client, + ))); + } + let auth_file = get_auth_file(codex_home); let client = crate::default_client::create_client(); let auth_dot_json = match try_read_auth_json(&auth_file) { @@ -267,6 +286,9 @@ pub fn try_read_auth_json(auth_file: &Path) -> std::io::Result { } pub fn write_auth_json(auth_file: &Path, auth_dot_json: &AuthDotJson) -> std::io::Result<()> { + if let Some(parent) = auth_file.parent() { + std::fs::create_dir_all(parent)?; + } let json_data = serde_json::to_string_pretty(auth_dot_json)?; let mut options = OpenOptions::new(); options.truncate(true).write(true).create(true); @@ -452,7 +474,7 @@ mod tests { auth_dot_json, auth_file: _, .. - } = super::load_auth(codex_home.path()).unwrap().unwrap(); + } = super::load_auth(codex_home.path(), false).unwrap().unwrap(); assert_eq!(None, api_key); assert_eq!(AuthMode::ChatGPT, mode); @@ -491,7 +513,7 @@ mod tests { ) .unwrap(); - let auth = super::load_auth(dir.path()).unwrap().unwrap(); + let auth = super::load_auth(dir.path(), false).unwrap().unwrap(); assert_eq!(auth.mode, AuthMode::ApiKey); assert_eq!(auth.api_key, Some("sk-test-key".to_string())); @@ -574,6 +596,7 @@ mod tests { pub struct AuthManager { codex_home: PathBuf, inner: RwLock, + enable_codex_api_key_env: bool, } impl AuthManager { @@ -581,11 +604,14 @@ impl AuthManager { /// preferred auth method. Errors loading auth are swallowed; `auth()` will /// simply return `None` in that case so callers can treat it as an /// unauthenticated state. - pub fn new(codex_home: PathBuf) -> Self { - let auth = CodexAuth::from_codex_home(&codex_home).ok().flatten(); + pub fn new(codex_home: PathBuf, enable_codex_api_key_env: bool) -> Self { + let auth = load_auth(&codex_home, enable_codex_api_key_env) + .ok() + .flatten(); Self { codex_home, inner: RwLock::new(CachedAuth { auth }), + enable_codex_api_key_env, } } @@ -595,6 +621,7 @@ impl AuthManager { Arc::new(Self { codex_home: PathBuf::new(), inner: RwLock::new(cached), + enable_codex_api_key_env: false, }) } @@ -606,7 +633,9 @@ impl AuthManager { /// Force a reload of the auth information from auth.json. Returns /// whether the auth value changed. pub fn reload(&self) -> bool { - let new_auth = CodexAuth::from_codex_home(&self.codex_home).ok().flatten(); + let new_auth = load_auth(&self.codex_home, self.enable_codex_api_key_env) + .ok() + .flatten(); if let Ok(mut guard) = self.inner.write() { let changed = !AuthManager::auths_equal(&guard.auth, &new_auth); guard.auth = new_auth; @@ -625,8 +654,8 @@ impl AuthManager { } /// Convenience constructor returning an `Arc` wrapper. - pub fn shared(codex_home: PathBuf) -> Arc { - Arc::new(Self::new(codex_home)) + pub fn shared(codex_home: PathBuf, enable_codex_api_key_env: bool) -> Arc { + Arc::new(Self::new(codex_home, enable_codex_api_key_env)) } /// Attempt to refresh the current auth token (if any). On success, reload diff --git a/codex-rs/core/src/bash.rs b/codex-rs/core/src/bash.rs index f25b4f7f67e..c87f2764b1c 100644 --- a/codex-rs/core/src/bash.rs +++ b/codex-rs/core/src/bash.rs @@ -88,6 +88,21 @@ pub fn try_parse_word_only_commands_sequence(tree: &Tree, src: &str) -> Option Option>> { + let [bash, flag, script] = command else { + return None; + }; + + if bash != "bash" || flag != "-lc" { + return None; + } + + let tree = try_parse_bash(script)?; + try_parse_word_only_commands_sequence(&tree, script) +} + fn parse_plain_command_from_node(cmd: tree_sitter::Node, src: &str) -> Option> { if cmd.kind() != "command" { return None; diff --git a/codex-rs/core/src/chat_completions.rs b/codex-rs/core/src/chat_completions.rs index a32b59b552c..feedf3c4e54 100644 --- a/codex-rs/core/src/chat_completions.rs +++ b/codex-rs/core/src/chat_completions.rs @@ -1,6 +1,21 @@ use std::time::Duration; +use crate::ModelProviderInfo; +use crate::client_common::Prompt; +use crate::client_common::ResponseEvent; +use crate::client_common::ResponseStream; +use crate::error::CodexErr; +use crate::error::Result; +use crate::error::RetryLimitReachedError; +use crate::error::UnexpectedResponseError; +use crate::model_family::ModelFamily; +use crate::openai_tools::create_tools_json_for_chat_completions_api; +use crate::util::backoff; use bytes::Bytes; +use codex_otel::otel_event_manager::OtelEventManager; +use codex_protocol::models::ContentItem; +use codex_protocol::models::ReasoningItemContent; +use codex_protocol::models::ResponseItem; use eventsource_stream::Eventsource; use futures::Stream; use futures::StreamExt; @@ -15,26 +30,20 @@ use tokio::time::timeout; use tracing::debug; use tracing::trace; -use crate::ModelProviderInfo; -use crate::client_common::Prompt; -use crate::client_common::ResponseEvent; -use crate::client_common::ResponseStream; -use crate::error::CodexErr; -use crate::error::Result; -use crate::model_family::ModelFamily; -use crate::openai_tools::create_tools_json_for_chat_completions_api; -use crate::util::backoff; -use codex_protocol::models::ContentItem; -use codex_protocol::models::ReasoningItemContent; -use codex_protocol::models::ResponseItem; - /// Implementation for the classic Chat Completions API. pub(crate) async fn stream_chat_completions( prompt: &Prompt, model_family: &ModelFamily, client: &reqwest::Client, provider: &ModelProviderInfo, + otel_event_manager: &OtelEventManager, ) -> Result { + if prompt.output_schema.is_some() { + return Err(CodexErr::UnsupportedOperation( + "output_schema is not supported for Chat Completions API".to_string(), + )); + } + // Build messages array let mut messages = Vec::::new(); @@ -288,10 +297,13 @@ pub(crate) async fn stream_chat_completions( let req_builder = provider.create_request_builder(client, &None).await?; - let res = req_builder - .header(reqwest::header::ACCEPT, "text/event-stream") - .json(&payload) - .send() + let res = otel_event_manager + .log_request(attempt, || { + req_builder + .header(reqwest::header::ACCEPT, "text/event-stream") + .json(&payload) + .send() + }) .await; match res { @@ -302,6 +314,7 @@ pub(crate) async fn stream_chat_completions( stream, tx_event, provider.stream_idle_timeout(), + otel_event_manager.clone(), )); return Ok(ResponseStream { rx_event }); } @@ -309,11 +322,18 @@ pub(crate) async fn stream_chat_completions( let status = res.status(); if !(status == StatusCode::TOO_MANY_REQUESTS || status.is_server_error()) { let body = (res.text().await).unwrap_or_default(); - return Err(CodexErr::UnexpectedStatus(status, body)); + return Err(CodexErr::UnexpectedStatus(UnexpectedResponseError { + status, + body, + request_id: None, + })); } if attempt > max_retries { - return Err(CodexErr::RetryLimit(status)); + return Err(CodexErr::RetryLimit(RetryLimitReachedError { + status, + request_id: None, + })); } let retry_after_secs = res @@ -345,6 +365,7 @@ async fn process_chat_sse( stream: S, tx_event: mpsc::Sender>, idle_timeout: Duration, + otel_event_manager: OtelEventManager, ) where S: Stream> + Unpin, { @@ -368,7 +389,10 @@ async fn process_chat_sse( let mut reasoning_text = String::new(); loop { - let sse = match timeout(idle_timeout, stream.next()).await { + let sse = match otel_event_manager + .log_sse_event(|| timeout(idle_timeout, stream.next())) + .await + { Ok(Some(Ok(ev))) => ev, Ok(Some(Err(e))) => { let _ = tx_event diff --git a/codex-rs/core/src/client.rs b/codex-rs/core/src/client.rs index 72ca770abdb..7cf60f56b9c 100644 --- a/codex-rs/core/src/client.rs +++ b/codex-rs/core/src/client.rs @@ -5,9 +5,11 @@ use std::time::Duration; use crate::AuthManager; use crate::auth::CodexAuth; +use crate::error::RetryLimitReachedError; +use crate::error::UnexpectedResponseError; use bytes::Bytes; -use codex_protocol::mcp_protocol::AuthMode; -use codex_protocol::mcp_protocol::ConversationId; +use codex_app_server_protocol::AuthMode; +use codex_protocol::ConversationId; use eventsource_stream::Eventsource; use futures::prelude::*; use regex_lite::Regex; @@ -42,10 +44,12 @@ use crate::model_provider_info::ModelProviderInfo; use crate::model_provider_info::WireApi; use crate::openai_model_info::get_model_info; use crate::openai_tools::create_tools_json_for_responses_api; -use crate::protocol::RateLimitSnapshotEvent; +use crate::protocol::RateLimitSnapshot; +use crate::protocol::RateLimitWindow; use crate::protocol::TokenUsage; use crate::token_data::PlanType; use crate::util::backoff; +use codex_otel::otel_event_manager::OtelEventManager; use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig; use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig; use codex_protocol::models::ResponseItem; @@ -72,6 +76,7 @@ struct Error { pub struct ModelClient { config: Arc, auth_manager: Option>, + otel_event_manager: OtelEventManager, client: reqwest::Client, provider: ModelProviderInfo, conversation_id: ConversationId, @@ -83,6 +88,7 @@ impl ModelClient { pub fn new( config: Arc, auth_manager: Option>, + otel_event_manager: OtelEventManager, provider: ModelProviderInfo, effort: Option, summary: ReasoningSummaryConfig, @@ -93,6 +99,7 @@ impl ModelClient { Self { config, auth_manager, + otel_event_manager, client, provider, conversation_id, @@ -126,6 +133,7 @@ impl ModelClient { &self.config.model_family, &self.client, &self.provider, + &self.otel_event_manager, ) .await?; @@ -162,7 +170,12 @@ impl ModelClient { if let Some(path) = &*CODEX_RS_SSE_FIXTURE { // short circuit for tests warn!(path, "Streaming from fixture"); - return stream_from_fixture(path, self.provider.clone()).await; + return stream_from_fixture( + path, + self.provider.clone(), + self.otel_event_manager.clone(), + ) + .await; } let auth_manager = self.auth_manager.clone(); @@ -183,19 +196,23 @@ impl ModelClient { let input_with_instructions = prompt.get_formatted_input(); - // Only include `text.verbosity` for GPT-5 family models - let text = if self.config.model_family.family == "gpt-5" { - create_text_param_for_request(self.config.model_verbosity) - } else { - if self.config.model_verbosity.is_some() { - warn!( - "model_verbosity is set but ignored for non-gpt-5 model family: {}", - self.config.model_family.family - ); + let verbosity = match &self.config.model_family.family { + family if family == "gpt-5" => self.config.model_verbosity, + _ => { + if self.config.model_verbosity.is_some() { + warn!( + "model_verbosity is set but ignored for non-gpt-5 model family: {}", + self.config.model_family.family + ); + } + + None } - None }; + // Only include `text.verbosity` for GPT-5 family models + let text = create_text_param_for_request(verbosity, &prompt.output_schema); + // In general, we want to explicitly send `store: false` when using the Responses API, // but in practice, the Azure Responses API rejects `store: false`: // @@ -224,153 +241,183 @@ impl ModelClient { if azure_workaround { attach_item_ids(&mut payload_json, &input_with_instructions); } - let payload_body = serde_json::to_string(&payload_json)?; - let mut attempt = 0; - let max_retries = self.provider.request_max_retries(); + let max_attempts = self.provider.request_max_retries(); + for attempt in 0..=max_attempts { + match self + .attempt_stream_responses(attempt, &payload_json, &auth_manager) + .await + { + Ok(stream) => { + return Ok(stream); + } + Err(StreamAttemptError::Fatal(e)) => { + return Err(e); + } + Err(retryable_attempt_error) => { + if attempt == max_attempts { + return Err(retryable_attempt_error.into_error()); + } - loop { - attempt += 1; + tokio::time::sleep(retryable_attempt_error.delay(attempt)).await; + } + } + } - // Always fetch the latest auth in case a prior attempt refreshed the token. - let auth = auth_manager.as_ref().and_then(|m| m.auth()); + unreachable!("stream_responses_attempt should always return"); + } - trace!( - "POST to {}: {}", - self.provider.get_full_url(&auth), - payload_body.as_str() - ); + /// Single attempt to start a streaming Responses API call. + async fn attempt_stream_responses( + &self, + attempt: u64, + payload_json: &Value, + auth_manager: &Option>, + ) -> std::result::Result { + // Always fetch the latest auth in case a prior attempt refreshed the token. + let auth = auth_manager.as_ref().and_then(|m| m.auth()); + + trace!( + "POST to {}: {:?}", + self.provider.get_full_url(&auth), + serde_json::to_string(payload_json) + ); - let mut req_builder = self - .provider - .create_request_builder(&self.client, &auth) - .await?; + let mut req_builder = self + .provider + .create_request_builder(&self.client, &auth) + .await + .map_err(StreamAttemptError::Fatal)?; + + req_builder = req_builder + .header("OpenAI-Beta", "responses=experimental") + // Send session_id for compatibility. + .header("conversation_id", self.conversation_id.to_string()) + .header("session_id", self.conversation_id.to_string()) + .header(reqwest::header::ACCEPT, "text/event-stream") + .json(payload_json); + + if let Some(auth) = auth.as_ref() + && auth.mode == AuthMode::ChatGPT + && let Some(account_id) = auth.get_account_id() + { + req_builder = req_builder.header("chatgpt-account-id", account_id); + } - req_builder = req_builder - .header("OpenAI-Beta", "responses=experimental") - // Send session_id for compatibility. - .header("conversation_id", self.conversation_id.to_string()) - .header("session_id", self.conversation_id.to_string()) - .header(reqwest::header::ACCEPT, "text/event-stream") - .json(&payload_json); - - if let Some(auth) = auth.as_ref() - && auth.mode == AuthMode::ChatGPT - && let Some(account_id) = auth.get_account_id() - { - req_builder = req_builder.header("chatgpt-account-id", account_id); - } + let res = self + .otel_event_manager + .log_request(attempt, || req_builder.send()) + .await; - let res = req_builder.send().await; - if let Ok(resp) = &res { - trace!( - "Response status: {}, cf-ray: {}", - resp.status(), - resp.headers() - .get("cf-ray") - .map(|v| v.to_str().unwrap_or_default()) - .unwrap_or_default() - ); - } + let mut request_id = None; + if let Ok(resp) = &res { + request_id = resp + .headers() + .get("cf-ray") + .map(|v| v.to_str().unwrap_or_default().to_string()); - match res { - Ok(resp) if resp.status().is_success() => { - let (tx_event, rx_event) = mpsc::channel::>(1600); + trace!( + "Response status: {}, cf-ray: {:?}", + resp.status(), + request_id + ); + } - if let Some(snapshot) = parse_rate_limit_snapshot(resp.headers()) - && tx_event - .send(Ok(ResponseEvent::RateLimits(snapshot))) - .await - .is_err() - { - debug!("receiver dropped rate limit snapshot event"); - } + match res { + Ok(resp) if resp.status().is_success() => { + let (tx_event, rx_event) = mpsc::channel::>(1600); + + if let Some(snapshot) = parse_rate_limit_snapshot(resp.headers()) + && tx_event + .send(Ok(ResponseEvent::RateLimits(snapshot))) + .await + .is_err() + { + debug!("receiver dropped rate limit snapshot event"); + } - // spawn task to process SSE - let stream = resp.bytes_stream().map_err(CodexErr::Reqwest); - tokio::spawn(process_sse( - stream, - tx_event, - self.provider.stream_idle_timeout(), - )); + // spawn task to process SSE + let stream = resp.bytes_stream().map_err(CodexErr::Reqwest); + tokio::spawn(process_sse( + stream, + tx_event, + self.provider.stream_idle_timeout(), + self.otel_event_manager.clone(), + )); - return Ok(ResponseStream { rx_event }); + Ok(ResponseStream { rx_event }) + } + Ok(res) => { + let status = res.status(); + + // Pull out Retry‑After header if present. + let retry_after_secs = res + .headers() + .get(reqwest::header::RETRY_AFTER) + .and_then(|v| v.to_str().ok()) + .and_then(|s| s.parse::().ok()); + let retry_after = retry_after_secs.map(|s| Duration::from_millis(s * 1_000)); + + if status == StatusCode::UNAUTHORIZED + && let Some(manager) = auth_manager.as_ref() + && manager.auth().is_some() + { + let _ = manager.refresh_token().await; } - Ok(res) => { - let status = res.status(); - - // Pull out Retry‑After header if present. - let retry_after_secs = res - .headers() - .get(reqwest::header::RETRY_AFTER) - .and_then(|v| v.to_str().ok()) - .and_then(|s| s.parse::().ok()); - - if status == StatusCode::UNAUTHORIZED - && let Some(manager) = auth_manager.as_ref() - && manager.auth().is_some() - { - let _ = manager.refresh_token().await; - } - // The OpenAI Responses endpoint returns structured JSON bodies even for 4xx/5xx - // errors. When we bubble early with only the HTTP status the caller sees an opaque - // "unexpected status 400 Bad Request" which makes debugging nearly impossible. - // Instead, read (and include) the response text so higher layers and users see the - // exact error message (e.g. "Unknown parameter: 'input[0].metadata'"). The body is - // small and this branch only runs on error paths so the extra allocation is - // negligible. - if !(status == StatusCode::TOO_MANY_REQUESTS - || status == StatusCode::UNAUTHORIZED - || status.is_server_error()) - { - // Surface the error body to callers. Use `unwrap_or_default` per Clippy. - let body = res.text().await.unwrap_or_default(); - return Err(CodexErr::UnexpectedStatus(status, body)); - } - - if status == StatusCode::TOO_MANY_REQUESTS { - let body = res.json::().await.ok(); - if let Some(ErrorResponse { error }) = body { - if error.r#type.as_deref() == Some("usage_limit_reached") { - // Prefer the plan_type provided in the error message if present - // because it's more up to date than the one encoded in the auth - // token. - let plan_type = error - .plan_type - .or_else(|| auth.as_ref().and_then(CodexAuth::get_plan_type)); - let resets_in_seconds = error.resets_in_seconds; - return Err(CodexErr::UsageLimitReached(UsageLimitReachedError { - plan_type, - resets_in_seconds, - })); - } else if error.r#type.as_deref() == Some("usage_not_included") { - return Err(CodexErr::UsageNotIncluded); - } - } - } + // The OpenAI Responses endpoint returns structured JSON bodies even for 4xx/5xx + // errors. When we bubble early with only the HTTP status the caller sees an opaque + // "unexpected status 400 Bad Request" which makes debugging nearly impossible. + // Instead, read (and include) the response text so higher layers and users see the + // exact error message (e.g. "Unknown parameter: 'input[0].metadata'"). The body is + // small and this branch only runs on error paths so the extra allocation is + // negligible. + if !(status == StatusCode::TOO_MANY_REQUESTS + || status == StatusCode::UNAUTHORIZED + || status.is_server_error()) + { + // Surface the error body to callers. Use `unwrap_or_default` per Clippy. + let body = res.text().await.unwrap_or_default(); + return Err(StreamAttemptError::Fatal(CodexErr::UnexpectedStatus( + UnexpectedResponseError { + status, + body, + request_id: None, + }, + ))); + } - if attempt > max_retries { - if status == StatusCode::INTERNAL_SERVER_ERROR { - return Err(CodexErr::InternalServerError); + if status == StatusCode::TOO_MANY_REQUESTS { + let rate_limit_snapshot = parse_rate_limit_snapshot(res.headers()); + let body = res.json::().await.ok(); + if let Some(ErrorResponse { error }) = body { + if error.r#type.as_deref() == Some("usage_limit_reached") { + // Prefer the plan_type provided in the error message if present + // because it's more up to date than the one encoded in the auth + // token. + let plan_type = error + .plan_type + .or_else(|| auth.as_ref().and_then(CodexAuth::get_plan_type)); + let resets_in_seconds = error.resets_in_seconds; + let codex_err = CodexErr::UsageLimitReached(UsageLimitReachedError { + plan_type, + resets_in_seconds, + rate_limits: rate_limit_snapshot, + }); + return Err(StreamAttemptError::Fatal(codex_err)); + } else if error.r#type.as_deref() == Some("usage_not_included") { + return Err(StreamAttemptError::Fatal(CodexErr::UsageNotIncluded)); } - - return Err(CodexErr::RetryLimit(status)); - } - - let delay = retry_after_secs - .map(|s| Duration::from_millis(s * 1_000)) - .unwrap_or_else(|| backoff(attempt)); - tokio::time::sleep(delay).await; - } - Err(e) => { - if attempt > max_retries { - return Err(e.into()); } - let delay = backoff(attempt); - tokio::time::sleep(delay).await; } + + Err(StreamAttemptError::RetryableHttpError { + status, + retry_after, + request_id, + }) } + Err(e) => Err(StreamAttemptError::RetryableTransportError(e.into())), } } @@ -378,6 +425,10 @@ impl ModelClient { self.provider.clone() } + pub fn get_otel_event_manager(&self) -> OtelEventManager { + self.otel_event_manager.clone() + } + /// Returns the currently configured model slug. pub fn get_model(&self) -> String { self.config.model.clone() @@ -403,6 +454,50 @@ impl ModelClient { } } +enum StreamAttemptError { + RetryableHttpError { + status: StatusCode, + retry_after: Option, + request_id: Option, + }, + RetryableTransportError(CodexErr), + Fatal(CodexErr), +} + +impl StreamAttemptError { + /// attempt is 0-based. + fn delay(&self, attempt: u64) -> Duration { + // backoff() uses 1-based attempts. + let backoff_attempt = attempt + 1; + match self { + Self::RetryableHttpError { retry_after, .. } => { + retry_after.unwrap_or_else(|| backoff(backoff_attempt)) + } + Self::RetryableTransportError { .. } => backoff(backoff_attempt), + Self::Fatal(_) => { + // Should not be called on Fatal errors. + Duration::from_secs(0) + } + } + } + + fn into_error(self) -> CodexErr { + match self { + Self::RetryableHttpError { + status, request_id, .. + } => { + if status == StatusCode::INTERNAL_SERVER_ERROR { + CodexErr::InternalServerError + } else { + CodexErr::RetryLimit(RetryLimitReachedError { status, request_id }) + } + } + Self::RetryableTransportError(error) => error, + Self::Fatal(error) => error, + } + } +} + #[derive(Debug, Deserialize, Serialize)] struct SseEvent { #[serde(rename = "type")] @@ -412,9 +507,6 @@ struct SseEvent { delta: Option, } -#[derive(Debug, Deserialize)] -struct ResponseCreated {} - #[derive(Debug, Deserialize)] struct ResponseCompleted { id: String, @@ -485,20 +577,45 @@ fn attach_item_ids(payload_json: &mut Value, original_items: &[ResponseItem]) { } } -fn parse_rate_limit_snapshot(headers: &HeaderMap) -> Option { - let primary_used_percent = parse_header_f64(headers, "x-codex-primary-used-percent")?; - let secondary_used_percent = parse_header_f64(headers, "x-codex-secondary-used-percent")?; - let primary_to_secondary_ratio_percent = - parse_header_f64(headers, "x-codex-primary-over-secondary-limit-percent")?; - let primary_window_minutes = parse_header_u64(headers, "x-codex-primary-window-minutes")?; - let secondary_window_minutes = parse_header_u64(headers, "x-codex-secondary-window-minutes")?; - - Some(RateLimitSnapshotEvent { - primary_used_percent, - secondary_used_percent, - primary_to_secondary_ratio_percent, - primary_window_minutes, - secondary_window_minutes, +fn parse_rate_limit_snapshot(headers: &HeaderMap) -> Option { + let primary = parse_rate_limit_window( + headers, + "x-codex-primary-used-percent", + "x-codex-primary-window-minutes", + "x-codex-primary-reset-after-seconds", + ); + + let secondary = parse_rate_limit_window( + headers, + "x-codex-secondary-used-percent", + "x-codex-secondary-window-minutes", + "x-codex-secondary-reset-after-seconds", + ); + + Some(RateLimitSnapshot { primary, secondary }) +} + +fn parse_rate_limit_window( + headers: &HeaderMap, + used_percent_header: &str, + window_minutes_header: &str, + resets_header: &str, +) -> Option { + let used_percent: Option = parse_header_f64(headers, used_percent_header); + + used_percent.and_then(|used_percent| { + let window_minutes = parse_header_u64(headers, window_minutes_header); + let resets_in_seconds = parse_header_u64(headers, resets_header); + + let has_data = used_percent != 0.0 + || window_minutes.is_some_and(|minutes| minutes != 0) + || resets_in_seconds.is_some_and(|seconds| seconds != 0); + + has_data.then_some(RateLimitWindow { + used_percent, + window_minutes, + resets_in_seconds, + }) }) } @@ -521,6 +638,7 @@ async fn process_sse( stream: S, tx_event: mpsc::Sender>, idle_timeout: Duration, + otel_event_manager: OtelEventManager, ) where S: Stream> + Unpin, { @@ -532,7 +650,10 @@ async fn process_sse( let mut response_error: Option = None; loop { - let sse = match timeout(idle_timeout, stream.next()).await { + let sse = match otel_event_manager + .log_sse_event(|| timeout(idle_timeout, stream.next())) + .await + { Ok(Some(Ok(sse))) => sse, Ok(Some(Err(e))) => { debug!("SSE Error: {e:#}"); @@ -546,6 +667,21 @@ async fn process_sse( id: response_id, usage, }) => { + if let Some(token_usage) = &usage { + otel_event_manager.sse_event_completed( + token_usage.input_tokens, + token_usage.output_tokens, + token_usage + .input_tokens_details + .as_ref() + .map(|d| d.cached_tokens), + token_usage + .output_tokens_details + .as_ref() + .map(|d| d.reasoning_tokens), + token_usage.total_tokens, + ); + } let event = ResponseEvent::Completed { response_id, token_usage: usage.map(Into::into), @@ -553,12 +689,13 @@ async fn process_sse( let _ = tx_event.send(Ok(event)).await; } None => { - let _ = tx_event - .send(Err(response_error.unwrap_or(CodexErr::Stream( - "stream closed before response.completed".into(), - None, - )))) - .await; + let error = response_error.unwrap_or(CodexErr::Stream( + "stream closed before response.completed".into(), + None, + )); + otel_event_manager.see_event_completed_failed(&error); + + let _ = tx_event.send(Err(error)).await; } } return; @@ -662,7 +799,9 @@ async fn process_sse( response_error = Some(CodexErr::Stream(message, delay)); } Err(e) => { - debug!("failed to parse ErrorResponse: {e}"); + let error = format!("failed to parse ErrorResponse: {e}"); + debug!(error); + response_error = Some(CodexErr::Stream(error, None)) } } } @@ -676,7 +815,9 @@ async fn process_sse( response_completed = Some(r); } Err(e) => { - debug!("failed to parse ResponseCompleted: {e}"); + let error = format!("failed to parse ResponseCompleted: {e}"); + debug!(error); + response_error = Some(CodexErr::Stream(error, None)); continue; } }; @@ -723,6 +864,7 @@ async fn process_sse( async fn stream_from_fixture( path: impl AsRef, provider: ModelProviderInfo, + otel_event_manager: OtelEventManager, ) -> Result { let (tx_event, rx_event) = mpsc::channel::>(1600); let f = std::fs::File::open(path.as_ref())?; @@ -741,6 +883,7 @@ async fn stream_from_fixture( stream, tx_event, provider.stream_idle_timeout(), + otel_event_manager, )); Ok(ResponseStream { rx_event }) } @@ -796,6 +939,7 @@ mod tests { async fn collect_events( chunks: &[&[u8]], provider: ModelProviderInfo, + otel_event_manager: OtelEventManager, ) -> Vec> { let mut builder = IoBuilder::new(); for chunk in chunks { @@ -805,7 +949,12 @@ mod tests { let reader = builder.build(); let stream = ReaderStream::new(reader).map_err(CodexErr::Io); let (tx, mut rx) = mpsc::channel::>(16); - tokio::spawn(process_sse(stream, tx, provider.stream_idle_timeout())); + tokio::spawn(process_sse( + stream, + tx, + provider.stream_idle_timeout(), + otel_event_manager, + )); let mut events = Vec::new(); while let Some(ev) = rx.recv().await { @@ -819,6 +968,7 @@ mod tests { async fn run_sse( events: Vec, provider: ModelProviderInfo, + otel_event_manager: OtelEventManager, ) -> Vec { let mut body = String::new(); for e in events { @@ -835,7 +985,12 @@ mod tests { let (tx, mut rx) = mpsc::channel::>(8); let stream = ReaderStream::new(std::io::Cursor::new(body)).map_err(CodexErr::Io); - tokio::spawn(process_sse(stream, tx, provider.stream_idle_timeout())); + tokio::spawn(process_sse( + stream, + tx, + provider.stream_idle_timeout(), + otel_event_manager, + )); let mut out = Vec::new(); while let Some(ev) = rx.recv().await { @@ -844,6 +999,18 @@ mod tests { out } + fn otel_event_manager() -> OtelEventManager { + OtelEventManager::new( + ConversationId::new(), + "test", + "test", + None, + Some(AuthMode::ChatGPT), + false, + "test".to_string(), + ) + } + // ──────────────────────────── // Tests from `implement-test-for-responses-api-sse-parser` // ──────────────────────────── @@ -895,9 +1062,12 @@ mod tests { requires_openai_auth: false, }; + let otel_event_manager = otel_event_manager(); + let events = collect_events( &[sse1.as_bytes(), sse2.as_bytes(), sse3.as_bytes()], provider, + otel_event_manager, ) .await; @@ -955,7 +1125,9 @@ mod tests { requires_openai_auth: false, }; - let events = collect_events(&[sse1.as_bytes()], provider).await; + let otel_event_manager = otel_event_manager(); + + let events = collect_events(&[sse1.as_bytes()], provider, otel_event_manager).await; assert_eq!(events.len(), 2); @@ -989,7 +1161,9 @@ mod tests { requires_openai_auth: false, }; - let events = collect_events(&[sse1.as_bytes()], provider).await; + let otel_event_manager = otel_event_manager(); + + let events = collect_events(&[sse1.as_bytes()], provider, otel_event_manager).await; assert_eq!(events.len(), 1); @@ -1094,7 +1268,9 @@ mod tests { requires_openai_auth: false, }; - let out = run_sse(evs, provider).await; + let otel_event_manager = otel_event_manager(); + + let out = run_sse(evs, provider, otel_event_manager).await; assert_eq!(out.len(), case.expected_len, "case {}", case.name); assert!( (case.expect_first)(&out[0]), diff --git a/codex-rs/core/src/client_common.rs b/codex-rs/core/src/client_common.rs index 15bfb5d4001..b695581deb2 100644 --- a/codex-rs/core/src/client_common.rs +++ b/codex-rs/core/src/client_common.rs @@ -1,7 +1,7 @@ use crate::error::Result; use crate::model_family::ModelFamily; use crate::openai_tools::OpenAiTool; -use crate::protocol::RateLimitSnapshotEvent; +use crate::protocol::RateLimitSnapshot; use crate::protocol::TokenUsage; use codex_apply_patch::APPLY_PATCH_TOOL_INSTRUCTIONS; use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig; @@ -10,6 +10,7 @@ use codex_protocol::config_types::Verbosity as VerbosityConfig; use codex_protocol::models::ResponseItem; use futures::Stream; use serde::Serialize; +use serde_json::Value; use std::borrow::Cow; use std::ops::Deref; use std::pin::Pin; @@ -32,6 +33,9 @@ pub struct Prompt { /// Optional override for the built-in BASE_INSTRUCTIONS. pub base_instructions_override: Option, + + /// Optional the output schema for the model's response. + pub output_schema: Option, } impl Prompt { @@ -79,7 +83,7 @@ pub enum ResponseEvent { WebSearchCallBegin { call_id: String, }, - RateLimits(RateLimitSnapshotEvent), + RateLimits(RateLimitSnapshot), } #[derive(Debug, Serialize)] @@ -90,14 +94,31 @@ pub(crate) struct Reasoning { pub(crate) summary: Option, } +#[derive(Debug, Serialize, Default, Clone)] +#[serde(rename_all = "snake_case")] +pub(crate) enum TextFormatType { + #[default] + JsonSchema, +} + +#[derive(Debug, Serialize, Default, Clone)] +pub(crate) struct TextFormat { + pub(crate) r#type: TextFormatType, + pub(crate) strict: bool, + pub(crate) schema: Value, + pub(crate) name: String, +} + /// Controls under the `text` field in the Responses API for GPT-5. -#[derive(Debug, Serialize, Default, Clone, Copy)] +#[derive(Debug, Serialize, Default, Clone)] pub(crate) struct TextControls { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) verbosity: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) format: Option, } -#[derive(Debug, Serialize, Default, Clone, Copy)] +#[derive(Debug, Serialize, Default, Clone)] #[serde(rename_all = "lowercase")] pub(crate) enum OpenAiVerbosity { Low, @@ -156,9 +177,20 @@ pub(crate) fn create_reasoning_param_for_request( pub(crate) fn create_text_param_for_request( verbosity: Option, + output_schema: &Option, ) -> Option { - verbosity.map(|v| TextControls { - verbosity: Some(v.into()), + if verbosity.is_none() && output_schema.is_none() { + return None; + } + + Some(TextControls { + verbosity: verbosity.map(std::convert::Into::into), + format: output_schema.as_ref().map(|schema| TextFormat { + r#type: TextFormatType::JsonSchema, + strict: true, + schema: schema.clone(), + name: "codex_output_schema".to_string(), + }), }) } @@ -255,6 +287,7 @@ mod tests { prompt_cache_key: None, text: Some(TextControls { verbosity: Some(OpenAiVerbosity::Low), + format: None, }), }; @@ -267,6 +300,52 @@ mod tests { ); } + #[test] + fn serializes_text_schema_with_strict_format() { + let input: Vec = vec![]; + let tools: Vec = vec![]; + let schema = serde_json::json!({ + "type": "object", + "properties": { + "answer": {"type": "string"} + }, + "required": ["answer"], + }); + let text_controls = + create_text_param_for_request(None, &Some(schema.clone())).expect("text controls"); + + let req = ResponsesApiRequest { + model: "gpt-5", + instructions: "i", + input: &input, + tools: &tools, + tool_choice: "auto", + parallel_tool_calls: false, + reasoning: None, + store: false, + stream: true, + include: vec![], + prompt_cache_key: None, + text: Some(text_controls), + }; + + let v = serde_json::to_value(&req).expect("json"); + let text = v.get("text").expect("text field"); + assert!(text.get("verbosity").is_none()); + let format = text.get("format").expect("format field"); + + assert_eq!( + format.get("name"), + Some(&serde_json::Value::String("codex_output_schema".into())) + ); + assert_eq!( + format.get("type"), + Some(&serde_json::Value::String("json_schema".into())) + ); + assert_eq!(format.get("strict"), Some(&serde_json::Value::Bool(true))); + assert_eq!(format.get("schema"), Some(&schema)); + } + #[test] fn omits_text_when_not_set() { let input: Vec = vec![]; diff --git a/codex-rs/core/src/codex.rs b/codex-rs/core/src/codex.rs index 08b28bdef54..4baa63ec3e3 100644 --- a/codex-rs/core/src/codex.rs +++ b/codex-rs/core/src/codex.rs @@ -1,6 +1,6 @@ use std::borrow::Cow; use std::collections::HashMap; -use std::collections::HashSet; +use std::fmt::Debug; use std::path::Path; use std::path::PathBuf; use std::sync::Arc; @@ -10,29 +10,32 @@ use std::time::Duration; use crate::AuthManager; use crate::client_common::REVIEW_PROMPT; use crate::event_mapping::map_response_item_to_event_messages; +use crate::function_tool::FunctionCallError; use crate::review_format::format_review_findings_block; +use crate::terminal; +use crate::user_notification::UserNotifier; use async_channel::Receiver; use async_channel::Sender; use codex_apply_patch::ApplyPatchAction; use codex_apply_patch::MaybeApplyPatchVerified; use codex_apply_patch::maybe_parse_apply_patch_verified; -use codex_protocol::mcp_protocol::ConversationId; +use codex_protocol::ConversationId; use codex_protocol::protocol::ConversationPathResponseEvent; use codex_protocol::protocol::ExitedReviewModeEvent; use codex_protocol::protocol::ReviewRequest; use codex_protocol::protocol::RolloutItem; +use codex_protocol::protocol::SessionSource; use codex_protocol::protocol::TaskStartedEvent; use codex_protocol::protocol::TurnAbortReason; -use codex_protocol::protocol::TurnAbortedEvent; use codex_protocol::protocol::TurnContextItem; use futures::prelude::*; use mcp_types::CallToolResult; use serde::Deserialize; use serde::Serialize; use serde_json; +use serde_json::Value; use tokio::sync::Mutex; use tokio::sync::oneshot; -use tokio::task::AbortHandle; use tracing::debug; use tracing::error; use tracing::info; @@ -98,18 +101,17 @@ use crate::protocol::ListCustomPromptsResponseEvent; use crate::protocol::Op; use crate::protocol::PatchApplyBeginEvent; use crate::protocol::PatchApplyEndEvent; -use crate::protocol::RateLimitSnapshotEvent; +use crate::protocol::RateLimitSnapshot; use crate::protocol::ReviewDecision; use crate::protocol::ReviewOutputEvent; use crate::protocol::SandboxPolicy; use crate::protocol::SessionConfiguredEvent; use crate::protocol::StreamErrorEvent; use crate::protocol::Submission; -use crate::protocol::TaskCompleteEvent; use crate::protocol::TokenCountEvent; use crate::protocol::TokenUsage; -use crate::protocol::TokenUsageInfo; use crate::protocol::TurnDiffEvent; +use crate::protocol::ViewImageToolCallEvent; use crate::protocol::WebSearchBeginEvent; use crate::rollout::RolloutRecorder; use crate::rollout::RolloutRecorderParams; @@ -117,11 +119,18 @@ use crate::safety::SafetyCheck; use crate::safety::assess_command_safety; use crate::safety::assess_safety_for_untrusted_command; use crate::shell; +use crate::state::ActiveTurn; +use crate::state::SessionServices; +use crate::tasks::CompactTask; +use crate::tasks::RegularTask; +use crate::tasks::ReviewTask; use crate::turn_diff_tracker::TurnDiffTracker; use crate::unified_exec::UnifiedExecSessionManager; use crate::user_instructions::UserInstructions; use crate::user_notification::UserNotification; use crate::util::backoff; +use codex_otel::otel_event_manager::OtelEventManager; +use codex_otel::otel_event_manager::ToolDecisionSource; use codex_protocol::config_types::ReasoningEffort as ReasoningEffortConfig; use codex_protocol::config_types::ReasoningSummary as ReasoningSummaryConfig; use codex_protocol::custom_prompts::CustomPrompt; @@ -169,6 +178,7 @@ impl Codex { config: Config, auth_manager: Arc, conversation_history: InitialHistory, + session_source: SessionSource, ) -> CodexResult { let (tx_sub, rx_sub) = async_channel::bounded(SUBMISSION_CHANNEL_CAPACITY); let (tx_event, rx_event) = async_channel::unbounded(); @@ -186,7 +196,7 @@ impl Codex { base_instructions: config.base_instructions.clone(), approval_policy: config.approval_policy, sandbox_policy: config.sandbox_policy.clone(), - notify: config.notify.clone(), + notify: UserNotifier::new(config.notify.clone()), cwd: config.cwd.clone(), }; @@ -197,6 +207,7 @@ impl Codex { auth_manager.clone(), tx_event.clone(), conversation_history, + session_source, ) .await .map_err(|e| { @@ -250,17 +261,7 @@ impl Codex { } } -/// Mutable state of the agent -#[derive(Default)] -struct State { - approved_commands: HashSet>, - current_task: Option, - pending_approvals: HashMap>, - pending_input: Vec, - history: ConversationHistory, - token_info: Option, - latest_rate_limits: Option, -} +use crate::state::SessionState; /// Context for an initialized model agent /// @@ -268,23 +269,9 @@ struct State { pub(crate) struct Session { conversation_id: ConversationId, tx_event: Sender, - - /// Manager for external MCP servers/tools. - mcp_connection_manager: McpConnectionManager, - session_manager: ExecSessionManager, - unified_exec_manager: UnifiedExecSessionManager, - - /// External notifier command (will be passed as args to exec()). When - /// `None` this feature is disabled. - notify: Option>, - - /// Optional rollout recorder for persisting the conversation transcript so - /// sessions can be replayed or inspected later. - rollout: Mutex>, - state: Mutex, - codex_linux_sandbox_exe: Option, - user_shell: shell::Shell, - show_raw_agent_reasoning: bool, + state: Mutex, + pub(crate) active_turn: Mutex>, + services: SessionServices, next_internal_sub_id: AtomicU64, } @@ -303,6 +290,7 @@ pub(crate) struct TurnContext { pub(crate) shell_environment_policy: ShellEnvironmentPolicy, pub(crate) tools_config: ToolsConfig, pub(crate) is_review_mode: bool, + pub(crate) final_output_json_schema: Option, } impl TurnContext { @@ -335,10 +323,7 @@ struct ConfigureSession { /// How to sandbox commands executed in the system sandbox_policy: SandboxPolicy, - /// Optional external notifier command tokens. Present only when the - /// client wants the agent to spawn a program after each completed - /// turn. - notify: Option>, + notify: UserNotifier, /// Working directory that should be treated as the *root* of the /// session. All relative paths supplied by the model as well as the @@ -357,6 +342,7 @@ impl Session { auth_manager: Arc, tx_event: Sender, initial_history: InitialHistory, + session_source: SessionSource, ) -> anyhow::Result<(Arc, TurnContext)> { let ConfigureSession { provider, @@ -380,7 +366,11 @@ impl Session { let conversation_id = ConversationId::default(); ( conversation_id, - RolloutRecorderParams::new(conversation_id, user_instructions.clone()), + RolloutRecorderParams::new( + conversation_id, + user_instructions.clone(), + session_source, + ), ) } InitialHistory::Resumed(resumed_history) => ( @@ -400,7 +390,10 @@ impl Session { // - load history metadata let rollout_fut = RolloutRecorder::new(&config, rollout_params); - let mcp_fut = McpConnectionManager::new(config.mcp_servers.clone()); + let mcp_fut = McpConnectionManager::new( + config.mcp_servers.clone(), + config.use_experimental_use_rmcp_client, + ); let default_shell_fut = shell::default_user_shell(); let history_meta_fut = crate::message_history::history_metadata(&config); @@ -414,10 +407,7 @@ impl Session { })?; let rollout_path = rollout_recorder.rollout_path.clone(); // Create the mutable state for the Session. - let state = State { - history: ConversationHistory::new(), - ..Default::default() - }; + let state = SessionState::new(); // Handle MCP manager result and record any startup failures. let (mcp_connection_manager, failed_clients) = match mcp_res { @@ -445,11 +435,35 @@ impl Session { } } + let otel_event_manager = OtelEventManager::new( + conversation_id, + config.model.as_str(), + config.model_family.slug.as_str(), + auth_manager.auth().and_then(|a| a.get_account_id()), + auth_manager.auth().map(|a| a.mode), + config.otel.log_user_prompt, + terminal::user_agent(), + ); + + otel_event_manager.conversation_starts( + config.model_provider.name.as_str(), + config.model_reasoning_effort, + config.model_reasoning_summary, + config.model_context_window, + config.model_max_output_tokens, + config.model_auto_compact_token_limit, + config.approval_policy, + config.sandbox_policy.clone(), + config.mcp_servers.keys().map(String::as_str).collect(), + config.active_profile.clone(), + ); + // Now that the conversation id is final (may have been updated by resume), // construct the model client. let client = ModelClient::new( config.clone(), Some(auth_manager.clone()), + otel_event_manager, provider.clone(), model_reasoning_effort, model_reasoning_summary, @@ -473,19 +487,25 @@ impl Session { shell_environment_policy: config.shell_environment_policy.clone(), cwd, is_review_mode: false, + final_output_json_schema: None, }; - let sess = Arc::new(Session { - conversation_id, - tx_event: tx_event.clone(), + let services = SessionServices { mcp_connection_manager, session_manager: ExecSessionManager::default(), unified_exec_manager: UnifiedExecSessionManager::default(), - notify, - state: Mutex::new(state), + notifier: notify, rollout: Mutex::new(Some(rollout_recorder)), codex_linux_sandbox_exe: config.codex_linux_sandbox_exe.clone(), user_shell: default_shell, show_raw_agent_reasoning: config.show_raw_agent_reasoning, + }; + + let sess = Arc::new(Session { + conversation_id, + tx_event: tx_event.clone(), + state: Mutex::new(state), + active_turn: Mutex::new(None), + services, next_internal_sub_id: AtomicU64::new(0), }); @@ -515,23 +535,6 @@ impl Session { Ok((sess, turn_context)) } - pub async fn set_task(&self, task: AgentTask) { - let mut state = self.state.lock().await; - if let Some(current_task) = state.current_task.take() { - current_task.abort(TurnAbortReason::Replaced); - } - state.current_task = Some(task); - } - - pub async fn remove_task(&self, sub_id: &str) { - let mut state = self.state.lock().await; - if let Some(task) = &state.current_task - && task.sub_id == sub_id - { - state.current_task.take(); - } - } - fn next_internal_sub_id(&self) -> String { let id = self .next_internal_sub_id @@ -586,13 +589,19 @@ impl Session { command: Vec, cwd: PathBuf, reason: Option, - ) -> oneshot::Receiver { + ) -> ReviewDecision { // Add the tx_approve callback to the map before sending the request. let (tx_approve, rx_approve) = oneshot::channel(); let event_id = sub_id.clone(); let prev_entry = { - let mut state = self.state.lock().await; - state.pending_approvals.insert(sub_id, tx_approve) + let mut active = self.active_turn.lock().await; + match active.as_mut() { + Some(at) => { + let mut ts = at.turn_state.lock().await; + ts.insert_pending_approval(sub_id, tx_approve) + } + None => None, + } }; if prev_entry.is_some() { warn!("Overwriting existing pending approval for sub_id: {event_id}"); @@ -608,7 +617,7 @@ impl Session { }), }; self.send_event(event).await; - rx_approve + rx_approve.await.unwrap_or_default() } pub async fn request_patch_approval( @@ -623,8 +632,14 @@ impl Session { let (tx_approve, rx_approve) = oneshot::channel(); let event_id = sub_id.clone(); let prev_entry = { - let mut state = self.state.lock().await; - state.pending_approvals.insert(sub_id, tx_approve) + let mut active = self.active_turn.lock().await; + match active.as_mut() { + Some(at) => { + let mut ts = at.turn_state.lock().await; + ts.insert_pending_approval(sub_id, tx_approve) + } + None => None, + } }; if prev_entry.is_some() { warn!("Overwriting existing pending approval for sub_id: {event_id}"); @@ -645,8 +660,14 @@ impl Session { pub async fn notify_approval(&self, sub_id: &str, decision: ReviewDecision) { let entry = { - let mut state = self.state.lock().await; - state.pending_approvals.remove(sub_id) + let mut active = self.active_turn.lock().await; + match active.as_mut() { + Some(at) => { + let mut ts = at.turn_state.lock().await; + ts.remove_pending_approval(sub_id) + } + None => None, + } }; match entry { Some(tx_approve) => { @@ -660,7 +681,7 @@ impl Session { pub async fn add_approved_command(&self, cmd: Vec) { let mut state = self.state.lock().await; - state.approved_commands.insert(cmd); + state.add_approved_command(cmd); } /// Records input items: always append to conversation history and @@ -700,7 +721,12 @@ impl Session { /// Append ResponseItems to the in-memory conversation history only. async fn record_into_history(&self, items: &[ResponseItem]) { let mut state = self.state.lock().await; - state.history.record_items(items.iter()); + state.record_items(items.iter()); + } + + async fn replace_history(&self, items: Vec) { + let mut state = self.state.lock().await; + state.replace_history(items); } async fn persist_rollout_response_items(&self, items: &[ResponseItem]) { @@ -721,14 +747,14 @@ impl Session { Some(turn_context.cwd.clone()), Some(turn_context.approval_policy), Some(turn_context.sandbox_policy.clone()), - Some(self.user_shell.clone()), + Some(self.user_shell().clone()), ))); items } async fn persist_rollout_items(&self, items: &[RolloutItem]) { let recorder = { - let guard = self.rollout.lock().await; + let guard = self.services.rollout.lock().await; guard.clone() }; if let Some(rec) = recorder @@ -738,33 +764,47 @@ impl Session { } } + pub(crate) async fn history_snapshot(&self) -> Vec { + let state = self.state.lock().await; + state.history_snapshot() + } + async fn update_token_usage_info( &self, + sub_id: &str, turn_context: &TurnContext, token_usage: Option<&TokenUsage>, ) { - let mut state = self.state.lock().await; - if let Some(token_usage) = token_usage { - let info = TokenUsageInfo::new_or_append( - &state.token_info, - &Some(token_usage.clone()), - turn_context.client.get_model_context_window(), - ); - state.token_info = info; + { + let mut state = self.state.lock().await; + if let Some(token_usage) = token_usage { + state.update_token_info_from_usage( + token_usage, + turn_context.client.get_model_context_window(), + ); + } } + self.send_token_count_event(sub_id).await; } - async fn update_rate_limits(&self, new_rate_limits: RateLimitSnapshotEvent) { - let mut state = self.state.lock().await; - state.latest_rate_limits = Some(new_rate_limits); + async fn update_rate_limits(&self, sub_id: &str, new_rate_limits: RateLimitSnapshot) { + { + let mut state = self.state.lock().await; + state.set_rate_limits(new_rate_limits); + } + self.send_token_count_event(sub_id).await; } - async fn get_token_count_event(&self) -> TokenCountEvent { - let state = self.state.lock().await; - TokenCountEvent { - info: state.token_info.clone(), - rate_limits: state.latest_rate_limits.clone(), - } + async fn send_token_count_event(&self, sub_id: &str) { + let (info, rate_limits) = { + let state = self.state.lock().await; + state.token_info_and_rate_limits() + }; + let event = Event { + id: sub_id.to_string(), + msg: EventMsg::TokenCount(TokenCountEvent { info, rate_limits }), + }; + self.send_event(event).await; } /// Record a user input item to conversation history and also persist a @@ -777,7 +817,7 @@ impl Session { // Derive user message events and persist only UserMessage to rollout let msgs = - map_response_item_to_event_messages(&response_item, self.show_raw_agent_reasoning); + map_response_item_to_event_messages(&response_item, self.show_raw_agent_reasoning()); let user_msgs: Vec = msgs .into_iter() .filter_map(|m| match m { @@ -976,30 +1016,32 @@ impl Session { pub async fn turn_input_with_history(&self, extra: Vec) -> Vec { let history = { let state = self.state.lock().await; - state.history.contents() + state.history_snapshot() }; [history, extra].concat() } /// Returns the input if there was no task running to inject into pub async fn inject_input(&self, input: Vec) -> Result<(), Vec> { - let mut state = self.state.lock().await; - if state.current_task.is_some() { - state.pending_input.push(input.into()); - Ok(()) - } else { - Err(input) + let mut active = self.active_turn.lock().await; + match active.as_mut() { + Some(at) => { + let mut ts = at.turn_state.lock().await; + ts.push_pending_input(input.into()); + Ok(()) + } + None => Err(input), } } pub async fn get_pending_input(&self) -> Vec { - let mut state = self.state.lock().await; - if state.pending_input.is_empty() { - Vec::with_capacity(0) - } else { - let mut ret = Vec::new(); - std::mem::swap(&mut ret, &mut state.pending_input); - ret + let mut active = self.active_turn.lock().await; + match active.as_mut() { + Some(at) => { + let mut ts = at.turn_state.lock().await; + ts.take_pending_input() + } + None => Vec::with_capacity(0), } } @@ -1009,58 +1051,40 @@ impl Session { tool: &str, arguments: Option, ) -> anyhow::Result { - self.mcp_connection_manager + self.services + .mcp_connection_manager .call_tool(server, tool, arguments) .await } - pub async fn interrupt_task(&self) { + pub async fn interrupt_task(self: &Arc) { info!("interrupt received: abort current task, if any"); - let mut state = self.state.lock().await; - state.pending_approvals.clear(); - state.pending_input.clear(); - if let Some(task) = state.current_task.take() { - task.abort(TurnAbortReason::Interrupted); - } + self.abort_all_tasks(TurnAbortReason::Interrupted).await; } fn interrupt_task_sync(&self) { - if let Ok(mut state) = self.state.try_lock() { - state.pending_approvals.clear(); - state.pending_input.clear(); - if let Some(task) = state.current_task.take() { - task.abort(TurnAbortReason::Interrupted); + if let Ok(mut active) = self.active_turn.try_lock() + && let Some(at) = active.as_mut() + { + at.try_clear_pending_sync(); + let tasks = at.drain_tasks(); + *active = None; + for (_sub_id, task) in tasks { + task.handle.abort(); } } } - /// Spawn the configured notifier (if any) with the given JSON payload as - /// the last argument. Failures are logged but otherwise ignored so that - /// notification issues do not interfere with the main workflow. - fn maybe_notify(&self, notification: UserNotification) { - let Some(notify_command) = &self.notify else { - return; - }; - - if notify_command.is_empty() { - return; - } - - let Ok(json) = serde_json::to_string(¬ification) else { - error!("failed to serialise notification payload"); - return; - }; + pub(crate) fn notifier(&self) -> &UserNotifier { + &self.services.notifier + } - let mut command = std::process::Command::new(¬ify_command[0]); - if notify_command.len() > 1 { - command.args(¬ify_command[1..]); - } - command.arg(json); + fn user_shell(&self) -> &shell::Shell { + &self.services.user_shell + } - // Fire-and-forget – we do not wait for completion. - if let Err(e) = command.spawn() { - warn!("failed to spawn notifier '{}': {e}", notify_command[0]); - } + fn show_raw_agent_reasoning(&self) -> bool { + self.services.show_raw_agent_reasoning } } @@ -1085,105 +1109,6 @@ pub(crate) struct ApplyPatchCommandContext { pub(crate) changes: HashMap, } -#[derive(Clone, Debug, Eq, PartialEq)] -enum AgentTaskKind { - Regular, - Review, - Compact, -} - -/// A series of Turns in response to user input. -pub(crate) struct AgentTask { - sess: Arc, - sub_id: String, - handle: AbortHandle, - kind: AgentTaskKind, -} - -impl AgentTask { - fn spawn( - sess: Arc, - turn_context: Arc, - sub_id: String, - input: Vec, - ) -> Self { - let handle = { - let sess = sess.clone(); - let sub_id = sub_id.clone(); - let tc = Arc::clone(&turn_context); - tokio::spawn(async move { run_task(sess, tc, sub_id, input).await }).abort_handle() - }; - Self { - sess, - sub_id, - handle, - kind: AgentTaskKind::Regular, - } - } - - fn review( - sess: Arc, - turn_context: Arc, - sub_id: String, - input: Vec, - ) -> Self { - let handle = { - let sess = sess.clone(); - let sub_id = sub_id.clone(); - let tc = Arc::clone(&turn_context); - tokio::spawn(async move { run_task(sess, tc, sub_id, input).await }).abort_handle() - }; - Self { - sess, - sub_id, - handle, - kind: AgentTaskKind::Review, - } - } - - fn compact( - sess: Arc, - turn_context: Arc, - sub_id: String, - input: Vec, - compact_instructions: String, - ) -> Self { - let handle = { - let sess = sess.clone(); - let sub_id = sub_id.clone(); - let tc = Arc::clone(&turn_context); - tokio::spawn(async move { - compact::run_compact_task(sess, tc, sub_id, input, compact_instructions).await - }) - .abort_handle() - }; - Self { - sess, - sub_id, - handle, - kind: AgentTaskKind::Compact, - } - } - - fn abort(self, reason: TurnAbortReason) { - // TOCTOU? - if !self.handle.is_finished() { - self.handle.abort(); - let event = Event { - id: self.sub_id.clone(), - msg: EventMsg::TurnAborted(TurnAbortedEvent { reason }), - }; - let sess = self.sess; - tokio::spawn(async move { - if self.kind == AgentTaskKind::Review { - exit_review_mode(sess.clone(), self.sub_id, None).await; - } - sess.send_event(event).await; - }); - } - } -} - async fn submission_loop( sess: Arc, turn_context: TurnContext, @@ -1234,9 +1159,15 @@ async fn submission_loop( updated_config.model_context_window = Some(model_info.context_window); } + let otel_event_manager = prev.client.get_otel_event_manager().with_model( + updated_config.model.as_str(), + updated_config.model_family.slug.as_str(), + ); + let client = ModelClient::new( Arc::new(updated_config), auth_manager, + otel_event_manager, provider, effective_effort, effective_summary, @@ -1269,6 +1200,7 @@ async fn submission_loop( shell_environment_policy: prev.shell_environment_policy.clone(), cwd: new_cwd.clone(), is_review_mode: false, + final_output_json_schema: None, }; // Install the new persistent context for subsequent tasks/turns. @@ -1287,12 +1219,15 @@ async fn submission_loop( } } Op::UserInput { items } => { + turn_context + .client + .get_otel_event_manager() + .user_prompt(&items); // attempt to inject input into current task if let Err(items) = sess.inject_input(items).await { // no current task, spawn a new one - let task = - AgentTask::spawn(sess.clone(), Arc::clone(&turn_context), sub.id, items); - sess.set_task(task).await; + sess.spawn_task(Arc::clone(&turn_context), sub.id, items, RegularTask) + .await; } } Op::UserTurn { @@ -1303,7 +1238,12 @@ async fn submission_loop( model, effort, summary, + final_output_json_schema, } => { + turn_context + .client + .get_otel_event_manager() + .user_prompt(&items); // attempt to inject input into current task if let Err(items) = sess.inject_input(items).await { // Derive a fresh TurnContext for this turn using the provided overrides. @@ -1322,11 +1262,18 @@ async fn submission_loop( per_turn_config.model_context_window = Some(model_info.context_window); } + let otel_event_manager = + turn_context.client.get_otel_event_manager().with_model( + per_turn_config.model.as_str(), + per_turn_config.model_family.slug.as_str(), + ); + // Build a new client with per‑turn reasoning settings. // Reuse the same provider and session id; auth defaults to env/API key. let client = ModelClient::new( Arc::new(per_turn_config), auth_manager, + otel_event_manager, provider, effort, summary, @@ -1353,6 +1300,7 @@ async fn submission_loop( shell_environment_policy: turn_context.shell_environment_policy.clone(), cwd, is_review_mode: false, + final_output_json_schema, }; // if the environment context has changed, record it in the conversation history @@ -1366,10 +1314,9 @@ async fn submission_loop( // Install the new persistent context for subsequent tasks/turns. turn_context = Arc::new(fresh_turn_context); - // no current task, spawn a new one with the per‑turn context - let task = - AgentTask::spawn(sess.clone(), Arc::clone(&turn_context), sub.id, items); - sess.set_task(task).await; + // no current task, spawn a new one with the per-turn context + sess.spawn_task(Arc::clone(&turn_context), sub.id, items, RegularTask) + .await; } } Op::ExecApproval { id, decision } => match decision { @@ -1432,7 +1379,7 @@ async fn submission_loop( let sub_id = sub.id.clone(); // This is a cheap lookup from the connection manager's cache. - let tools = sess.mcp_connection_manager.list_all_tools(); + let tools = sess.services.mcp_connection_manager.list_all_tools(); let event = Event { id: sub_id, msg: EventMsg::McpListToolsResponse( @@ -1463,26 +1410,22 @@ async fn submission_loop( // Attempt to inject input into current task if let Err(items) = sess .inject_input(vec![InputItem::Text { - text: compact::COMPACT_TRIGGER_TEXT.to_string(), + text: compact::SUMMARIZATION_PROMPT.to_string(), }]) .await { - compact::spawn_compact_task( - sess.clone(), - Arc::clone(&turn_context), - sub.id, - items, - ) - .await; + sess.spawn_task(Arc::clone(&turn_context), sub.id, items, CompactTask) + .await; } } Op::Shutdown => { + sess.abort_all_tasks(TurnAbortReason::Interrupted).await; info!("Shutting down Codex instance"); // Gracefully flush and shutdown rollout recorder on session end so tests // that inspect the rollout file do not race with the background writer. let recorder_opt = { - let mut guard = sess.rollout.lock().await; + let mut guard = sess.services.rollout.lock().await; guard.take() }; if let Some(rec) = recorder_opt @@ -1509,7 +1452,7 @@ async fn submission_loop( let sub_id = sub.id.clone(); // Flush rollout writes before returning the path so readers observe a consistent file. let (path, rec_opt) = { - let guard = sess.rollout.lock().await; + let guard = sess.services.rollout.lock().await; match guard.as_ref() { Some(rec) => (rec.get_rollout_path(), Some(rec.clone())), None => { @@ -1587,10 +1530,19 @@ async fn spawn_review_thread( per_turn_config.model_context_window = Some(model_info.context_window); } + let otel_event_manager = parent_turn_context + .client + .get_otel_event_manager() + .with_model( + per_turn_config.model.as_str(), + per_turn_config.model_family.slug.as_str(), + ); + let per_turn_config = Arc::new(per_turn_config); let client = ModelClient::new( per_turn_config.clone(), auth_manager, + otel_event_manager, provider, per_turn_config.model_reasoning_effort, per_turn_config.model_reasoning_summary, @@ -1607,6 +1559,7 @@ async fn spawn_review_thread( shell_environment_policy: parent_turn_context.shell_environment_policy.clone(), cwd: parent_turn_context.cwd.clone(), is_review_mode: true, + final_output_json_schema: None, }; // Seed the child task with the review prompt as the initial user message. @@ -1617,8 +1570,7 @@ async fn spawn_review_thread( // Clone sub_id for the upcoming announcement before moving it into the task. let sub_id_for_event = sub_id.clone(); - let task = AgentTask::review(sess.clone(), tc.clone(), sub_id, input); - sess.set_task(task).await; + sess.spawn_task(tc.clone(), sub_id, input, ReviewTask).await; // Announce entering review mode so UIs can switch modes. sess.send_event(Event { @@ -1645,14 +1597,14 @@ async fn spawn_review_thread( /// Review mode: when `turn_context.is_review_mode` is true, the turn runs in an /// isolated in-memory thread without the parent session's prior history or /// user_instructions. Emits ExitedReviewMode upon final review message. -async fn run_task( +pub(crate) async fn run_task( sess: Arc, turn_context: Arc, sub_id: String, input: Vec, -) { +) -> Option { if input.is_empty() { - return; + return None; } let event = Event { id: sub_id.clone(), @@ -1883,11 +1835,12 @@ async fn run_task( last_agent_message = get_last_assistant_message_from_turn( &items_to_record_in_conversation_history, ); - sess.maybe_notify(UserNotification::AgentTurnComplete { - turn_id: sub_id.clone(), - input_messages: turn_input_messages, - last_assistant_message: last_agent_message.clone(), - }); + sess.notifier() + .notify(&UserNotification::AgentTurnComplete { + turn_id: sub_id.clone(), + input_messages: turn_input_messages, + last_assistant_message: last_agent_message.clone(), + }); break; } continue; @@ -1923,12 +1876,7 @@ async fn run_task( .await; } - sess.remove_task(&sub_id).await; - let event = Event { - id: sub_id, - msg: EventMsg::TaskComplete(TaskCompleteEvent { last_agent_message }), - }; - sess.send_event(event).await; + last_agent_message } /// Parse the review output; when not valid JSON, build a structured @@ -1965,13 +1913,14 @@ async fn run_turn( ) -> CodexResult { let tools = get_openai_tools( &turn_context.tools_config, - Some(sess.mcp_connection_manager.list_all_tools()), + Some(sess.services.mcp_connection_manager.list_all_tools()), ); let prompt = Prompt { input, tools, base_instructions_override: turn_context.base_instructions.clone(), + output_schema: turn_context.final_output_json_schema.clone(), }; let mut retries = 0; @@ -1980,9 +1929,14 @@ async fn run_turn( Ok(output) => return Ok(output), Err(CodexErr::Interrupted) => return Err(CodexErr::Interrupted), Err(CodexErr::EnvVar(var)) => return Err(CodexErr::EnvVar(var)), - Err(e @ (CodexErr::UsageLimitReached(_) | CodexErr::UsageNotIncluded)) => { - return Err(e); + Err(CodexErr::UsageLimitReached(e)) => { + let rate_limits = e.rate_limits.clone(); + if let Some(rate_limits) = rate_limits { + sess.update_rate_limits(&sub_id, rate_limits).await; + } + return Err(CodexErr::UsageLimitReached(e)); } + Err(CodexErr::UsageNotIncluded) => return Err(CodexErr::UsageNotIncluded), Err(e) => { // Use the configured provider-specific stream retry budget. let max_retries = turn_context.client.get_provider().stream_max_retries(); @@ -2155,20 +2109,13 @@ async fn try_run_turn( ResponseEvent::RateLimits(snapshot) => { // Update internal state with latest rate limits, but defer sending until // token usage is available to avoid duplicate TokenCount events. - sess.update_rate_limits(snapshot).await; + sess.update_rate_limits(sub_id, snapshot).await; } ResponseEvent::Completed { response_id: _, token_usage, } => { - sess.update_token_usage_info(turn_context, token_usage.as_ref()) - .await; - let token_event = sess.get_token_count_event().await; - let _ = sess - .send_event(Event { - id: sub_id.to_string(), - msg: EventMsg::TokenCount(token_event), - }) + sess.update_token_usage_info(sub_id, turn_context, token_usage.as_ref()) .await; let unified_diff = turn_diff_tracker.get_unified_diff(); @@ -2216,7 +2163,7 @@ async fn try_run_turn( sess.send_event(event).await; } ResponseEvent::ReasoningContentDelta(delta) => { - if sess.show_raw_agent_reasoning { + if sess.show_raw_agent_reasoning() { let event = Event { id: sub_id.to_string(), msg: EventMsg::AgentReasoningRawContentDelta( @@ -2246,18 +2193,48 @@ async fn handle_response_item( .. } => { info!("FunctionCall: {name}({arguments})"); - Some( - handle_function_call( + if let Some((server, tool_name)) = + sess.services.mcp_connection_manager.parse_tool_name(&name) + { + let resp = handle_mcp_tool_call( sess, - turn_context, - turn_diff_tracker, - sub_id.to_string(), - name, + sub_id, + call_id.clone(), + server, + tool_name, arguments, - call_id, ) - .await, - ) + .await; + Some(resp) + } else { + let result = turn_context + .client + .get_otel_event_manager() + .log_tool_result(name.as_str(), call_id.as_str(), arguments.as_str(), || { + handle_function_call( + sess, + turn_context, + turn_diff_tracker, + sub_id.to_string(), + name.to_owned(), + arguments.to_owned(), + call_id.clone(), + ) + }) + .await; + + let output = match result { + Ok(content) => FunctionCallOutputPayload { + content, + success: Some(true), + }, + Err(FunctionCallError::RespondToModel(msg)) => FunctionCallOutputPayload { + content: msg, + success: Some(false), + }, + }; + Some(ResponseInputItem::FunctionCallOutput { call_id, output }) + } } ResponseItem::LocalShellCall { id, @@ -2265,6 +2242,7 @@ async fn handle_response_item( status: _, action, } => { + let name = "local_shell"; let LocalShellAction::Exec(action) = action; tracing::info!("LocalShellCall: {action:?}"); let params = ShellToolCallParams { @@ -2278,11 +2256,18 @@ async fn handle_response_item( (Some(call_id), _) => call_id, (None, Some(id)) => id, (None, None) => { - error!("LocalShellCall without call_id or id"); + let error_message = "LocalShellCall without call_id or id"; + + turn_context + .client + .get_otel_event_manager() + .log_tool_failed(name, error_message); + + error!(error_message); return Ok(Some(ResponseInputItem::FunctionCallOutput { call_id: "".to_string(), output: FunctionCallOutputPayload { - content: "LocalShellCall without call_id or id".to_string(), + content: error_message.to_string(), success: None, }, })); @@ -2290,17 +2275,43 @@ async fn handle_response_item( }; let exec_params = to_exec_params(params, turn_context); - Some( - handle_container_exec_with_params( - exec_params, - sess, - turn_context, - turn_diff_tracker, - sub_id.to_string(), - effective_call_id, - ) - .await, - ) + { + let result = turn_context + .client + .get_otel_event_manager() + .log_tool_result( + name, + effective_call_id.as_str(), + exec_params.command.join(" ").as_str(), + || { + handle_container_exec_with_params( + name, + exec_params, + sess, + turn_context, + turn_diff_tracker, + sub_id.to_string(), + effective_call_id.clone(), + ) + }, + ) + .await; + + let output = match result { + Ok(content) => FunctionCallOutputPayload { + content, + success: Some(true), + }, + Err(FunctionCallError::RespondToModel(msg)) => FunctionCallOutputPayload { + content: msg, + success: Some(false), + }, + }; + Some(ResponseInputItem::FunctionCallOutput { + call_id: effective_call_id, + output, + }) + } } ResponseItem::CustomToolCall { id: _, @@ -2308,18 +2319,29 @@ async fn handle_response_item( name, input, status: _, - } => Some( - handle_custom_tool_call( - sess, - turn_context, - turn_diff_tracker, - sub_id.to_string(), - name, - input, - call_id, - ) - .await, - ), + } => { + let result = turn_context + .client + .get_otel_event_manager() + .log_tool_result(name.as_str(), call_id.as_str(), input.as_str(), || { + handle_custom_tool_call( + sess, + turn_context, + turn_diff_tracker, + sub_id.to_string(), + name.to_owned(), + input.to_owned(), + call_id.clone(), + ) + }) + .await; + + let output = match result { + Ok(content) => content, + Err(FunctionCallError::RespondToModel(msg)) => msg, + }; + Some(ResponseInputItem::CustomToolCallOutput { call_id, output }) + } ResponseItem::FunctionCallOutput { .. } => { debug!("unexpected FunctionCallOutput from stream"); None @@ -2338,7 +2360,7 @@ async fn handle_response_item( trace!("suppressing assistant Message in review mode"); Vec::new() } - _ => map_response_item_to_event_messages(&item, sess.show_raw_agent_reasoning), + _ => map_response_item_to_event_messages(&item, sess.show_raw_agent_reasoning()), }; for msg in msgs { let event = Event { @@ -2356,22 +2378,17 @@ async fn handle_response_item( async fn handle_unified_exec_tool_call( sess: &Session, - call_id: String, session_id: Option, arguments: Vec, timeout_ms: Option, -) -> ResponseInputItem { +) -> Result { let parsed_session_id = if let Some(session_id) = session_id { match session_id.parse::() { Ok(parsed) => Some(parsed), Err(output) => { - return ResponseInputItem::FunctionCallOutput { - call_id: call_id.to_string(), - output: FunctionCallOutputPayload { - content: format!("invalid session_id: {session_id} due to error {output}"), - success: Some(false), - }, - }; + return Err(FunctionCallError::RespondToModel(format!( + "invalid session_id: {session_id} due to error {output:?}" + ))); } } } else { @@ -2384,40 +2401,30 @@ async fn handle_unified_exec_tool_call( timeout_ms, }; - let result = sess.unified_exec_manager.handle_request(request).await; - - let output_payload = match result { - Ok(value) => { - #[derive(Serialize)] - struct SerializedUnifiedExecResult<'a> { - session_id: Option, - output: &'a str, - } - - match serde_json::to_string(&SerializedUnifiedExecResult { - session_id: value.session_id.map(|id| id.to_string()), - output: &value.output, - }) { - Ok(serialized) => FunctionCallOutputPayload { - content: serialized, - success: Some(true), - }, - Err(err) => FunctionCallOutputPayload { - content: format!("failed to serialize unified exec output: {err}"), - success: Some(false), - }, - } - } - Err(err) => FunctionCallOutputPayload { - content: format!("unified exec failed: {err}"), - success: Some(false), - }, - }; + let value = sess + .services + .unified_exec_manager + .handle_request(request) + .await + .map_err(|err| { + FunctionCallError::RespondToModel(format!("unified exec failed: {err:?}")) + })?; - ResponseInputItem::FunctionCallOutput { - call_id, - output: output_payload, + #[derive(Serialize)] + struct SerializedUnifiedExecResult { + session_id: Option, + output: String, } + + serde_json::to_string(&SerializedUnifiedExecResult { + session_id: value.session_id.map(|id| id.to_string()), + output: value.output, + }) + .map_err(|err| { + FunctionCallError::RespondToModel(format!( + "failed to serialize unified exec output: {err:?}" + )) + }) } async fn handle_function_call( @@ -2428,16 +2435,12 @@ async fn handle_function_call( name: String, arguments: String, call_id: String, -) -> ResponseInputItem { +) -> Result { match name.as_str() { "container.exec" | "shell" => { - let params = match parse_container_exec_arguments(arguments, turn_context, &call_id) { - Ok(params) => params, - Err(output) => { - return *output; - } - }; + let params = parse_container_exec_arguments(arguments, turn_context, &call_id)?; handle_container_exec_with_params( + name.as_str(), params, sess, turn_context, @@ -2457,74 +2460,49 @@ async fn handle_function_call( timeout_ms: Option, } - let args = match serde_json::from_str::(&arguments) { - Ok(args) => args, - Err(err) => { - return ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: format!("failed to parse function arguments: {err}"), - success: Some(false), - }, - }; - } - }; + let args: UnifiedExecArgs = serde_json::from_str(&arguments).map_err(|err| { + FunctionCallError::RespondToModel(format!( + "failed to parse function arguments: {err:?}" + )) + })?; - handle_unified_exec_tool_call( - sess, - call_id, - args.session_id, - args.input, - args.timeout_ms, - ) - .await + handle_unified_exec_tool_call(sess, args.session_id, args.input, args.timeout_ms).await } "view_image" => { #[derive(serde::Deserialize)] struct SeeImageArgs { path: String, } - let args = match serde_json::from_str::(&arguments) { - Ok(a) => a, - Err(e) => { - return ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: format!("failed to parse function arguments: {e}"), - success: Some(false), - }, - }; - } - }; + let args: SeeImageArgs = serde_json::from_str(&arguments).map_err(|e| { + FunctionCallError::RespondToModel(format!( + "failed to parse function arguments: {e:?}" + )) + })?; let abs = turn_context.resolve_path(Some(args.path)); - let output = match sess - .inject_input(vec![InputItem::LocalImage { path: abs }]) + sess.inject_input(vec![InputItem::LocalImage { path: abs.clone() }]) .await - { - Ok(()) => FunctionCallOutputPayload { - content: "attached local image path".to_string(), - success: Some(true), - }, - Err(_) => FunctionCallOutputPayload { - content: "unable to attach image (no active task)".to_string(), - success: Some(false), - }, - }; - ResponseInputItem::FunctionCallOutput { call_id, output } + .map_err(|_| { + FunctionCallError::RespondToModel( + "unable to attach image (no active task)".to_string(), + ) + })?; + sess.send_event(Event { + id: sub_id.clone(), + msg: EventMsg::ViewImageToolCall(ViewImageToolCallEvent { + call_id: call_id.clone(), + path: abs, + }), + }) + .await; + + Ok("attached local image path".to_string()) } "apply_patch" => { - let args = match serde_json::from_str::(&arguments) { - Ok(a) => a, - Err(e) => { - return ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: format!("failed to parse function arguments: {e}"), - success: None, - }, - }; - } - }; + let args: ApplyPatchToolArgs = serde_json::from_str(&arguments).map_err(|e| { + FunctionCallError::RespondToModel(format!( + "failed to parse function arguments: {e:?}" + )) + })?; let exec_params = ExecParams { command: vec!["apply_patch".to_string(), args.input.clone()], cwd: turn_context.cwd.clone(), @@ -2534,6 +2512,7 @@ async fn handle_function_call( justification: None, }; handle_container_exec_with_params( + name.as_str(), exec_params, sess, turn_context, @@ -2546,69 +2525,41 @@ async fn handle_function_call( "update_plan" => handle_update_plan(sess, arguments, sub_id, call_id).await, EXEC_COMMAND_TOOL_NAME => { // TODO(mbolin): Sandbox check. - let exec_params = match serde_json::from_str::(&arguments) { - Ok(params) => params, - Err(e) => { - return ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: format!("failed to parse function arguments: {e}"), - success: Some(false), - }, - }; - } - }; + let exec_params: ExecCommandParams = serde_json::from_str(&arguments).map_err(|e| { + FunctionCallError::RespondToModel(format!( + "failed to parse function arguments: {e:?}" + )) + })?; let result = sess + .services .session_manager .handle_exec_command_request(exec_params) .await; - let function_call_output = crate::exec_command::result_into_payload(result); - ResponseInputItem::FunctionCallOutput { - call_id, - output: function_call_output, + match result { + Ok(output) => Ok(output.to_text_output()), + Err(err) => Err(FunctionCallError::RespondToModel(err)), } } WRITE_STDIN_TOOL_NAME => { - let write_stdin_params = match serde_json::from_str::(&arguments) { - Ok(params) => params, - Err(e) => { - return ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: format!("failed to parse function arguments: {e}"), - success: Some(false), - }, - }; - } - }; + let write_stdin_params = + serde_json::from_str::(&arguments).map_err(|e| { + FunctionCallError::RespondToModel(format!( + "failed to parse function arguments: {e:?}" + )) + })?; + let result = sess + .services .session_manager .handle_write_stdin_request(write_stdin_params) - .await; - let function_call_output: FunctionCallOutputPayload = - crate::exec_command::result_into_payload(result); - ResponseInputItem::FunctionCallOutput { - call_id, - output: function_call_output, - } - } - _ => { - match sess.mcp_connection_manager.parse_tool_name(&name) { - Some((server, tool_name)) => { - handle_mcp_tool_call(sess, &sub_id, call_id, server, tool_name, arguments).await - } - None => { - // Unknown function: reply with structured failure so the model can adapt. - ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: format!("unsupported call: {name}"), - success: None, - }, - } - } - } + .await + .map_err(FunctionCallError::RespondToModel)?; + + Ok(result.to_text_output()) } + _ => Err(FunctionCallError::RespondToModel(format!( + "unsupported call: {name}" + ))), } } @@ -2620,7 +2571,7 @@ async fn handle_custom_tool_call( name: String, input: String, call_id: String, -) -> ResponseInputItem { +) -> Result { info!("CustomToolCall: {name} {input}"); match name.as_str() { "apply_patch" => { @@ -2632,7 +2583,9 @@ async fn handle_custom_tool_call( with_escalated_permissions: None, justification: None, }; - let resp = handle_container_exec_with_params( + + handle_container_exec_with_params( + name.as_str(), exec_params, sess, turn_context, @@ -2640,26 +2593,13 @@ async fn handle_custom_tool_call( sub_id, call_id, ) - .await; - - // Convert function-call style output into a custom tool call output - match resp { - ResponseInputItem::FunctionCallOutput { call_id, output } => { - ResponseInputItem::CustomToolCallOutput { - call_id, - output: output.content, - } - } - // Pass through if already a custom tool output or other variant - other => other, - } + .await } _ => { debug!("unexpected CustomToolCall from stream"); - ResponseInputItem::CustomToolCallOutput { - call_id, - output: format!("unsupported custom tool call: {name}"), - } + Err(FunctionCallError::RespondToModel(format!( + "unsupported custom tool call: {name}" + ))) } } } @@ -2678,23 +2618,13 @@ fn to_exec_params(params: ShellToolCallParams, turn_context: &TurnContext) -> Ex fn parse_container_exec_arguments( arguments: String, turn_context: &TurnContext, - call_id: &str, -) -> Result> { - // parse command - match serde_json::from_str::(&arguments) { - Ok(shell_tool_call_params) => Ok(to_exec_params(shell_tool_call_params, turn_context)), - Err(e) => { - // allow model to re-sample - let output = ResponseInputItem::FunctionCallOutput { - call_id: call_id.to_string(), - output: FunctionCallOutputPayload { - content: format!("failed to parse function arguments: {e}"), - success: None, - }, - }; - Err(Box::new(output)) - } - } + _call_id: &str, +) -> Result { + serde_json::from_str::(&arguments) + .map(|p| to_exec_params(p, turn_context)) + .map_err(|e| { + FunctionCallError::RespondToModel(format!("failed to parse function arguments: {e:?}")) + }) } pub struct ExecInvokeArgs<'a> { @@ -2711,12 +2641,12 @@ fn maybe_translate_shell_command( sess: &Session, turn_context: &TurnContext, ) -> ExecParams { - let should_translate = matches!(sess.user_shell, crate::shell::Shell::PowerShell(_)) + let should_translate = matches!(sess.user_shell(), crate::shell::Shell::PowerShell(_)) || turn_context.shell_environment_policy.use_profile; if should_translate && let Some(command) = sess - .user_shell + .user_shell() .format_default_shell_invocation(params.command.clone()) { return ExecParams { command, ..params }; @@ -2725,26 +2655,23 @@ fn maybe_translate_shell_command( } async fn handle_container_exec_with_params( + tool_name: &str, params: ExecParams, sess: &Session, turn_context: &TurnContext, turn_diff_tracker: &mut TurnDiffTracker, sub_id: String, call_id: String, -) -> ResponseInputItem { +) -> Result { + let otel_event_manager = turn_context.client.get_otel_event_manager(); + if params.with_escalated_permissions.unwrap_or(false) && !matches!(turn_context.approval_policy, AskForApproval::OnRequest) { - return ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: format!( - "approval policy is {policy:?}; reject command — you should not ask for escalated permissions if the approval policy is {policy:?}", - policy = turn_context.approval_policy - ), - success: None, - }, - }; + return Err(FunctionCallError::RespondToModel(format!( + "approval policy is {policy:?}; reject command — you should not ask for escalated permissions if the approval policy is {policy:?}", + policy = turn_context.approval_policy + ))); } // check if this was a patch, and apply it if so @@ -2761,13 +2688,9 @@ async fn handle_container_exec_with_params( // It looks like an invocation of `apply_patch`, but we // could not resolve it into a patch that would apply // cleanly. Return to model for resample. - return ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: format!("error: {parse_error:#}"), - success: None, - }, - }; + return Err(FunctionCallError::RespondToModel(format!( + "error: {parse_error:#?}" + ))); } MaybeApplyPatchVerified::ShellParseError(error) => { trace!("Failed to parse shell command, {error:?}"); @@ -2785,13 +2708,9 @@ async fn handle_container_exec_with_params( .ok() .map(|p| p.to_string_lossy().to_string()); let Some(path_to_codex) = path_to_codex else { - return ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: "failed to determine path to codex executable".to_string(), - success: None, - }, - }; + return Err(FunctionCallError::RespondToModel( + "failed to determine path to codex executable".to_string(), + )); }; let params = ExecParams { @@ -2809,6 +2728,7 @@ async fn handle_container_exec_with_params( let safety = if *user_explicitly_approved_this_action { SafetyCheck::AutoApprove { sandbox_type: SandboxType::None, + user_explicitly_approved: true, } } else { assess_safety_for_untrusted_command( @@ -2830,7 +2750,7 @@ async fn handle_container_exec_with_params( ¶ms.command, turn_context.approval_policy, &turn_context.sandbox_policy, - &state.approved_commands, + state.approved_commands_ref(), params.with_escalated_permissions.unwrap_or(false), ) }; @@ -2840,9 +2760,25 @@ async fn handle_container_exec_with_params( }; let sandbox_type = match safety { - SafetyCheck::AutoApprove { sandbox_type } => sandbox_type, + SafetyCheck::AutoApprove { + sandbox_type, + user_explicitly_approved, + } => { + otel_event_manager.tool_decision( + tool_name, + call_id.as_str(), + ReviewDecision::Approved, + if user_explicitly_approved { + ToolDecisionSource::User + } else { + ToolDecisionSource::Config + }, + ); + + sandbox_type + } SafetyCheck::AskUser => { - let rx_approve = sess + let decision = sess .request_command_approval( sub_id.clone(), call_id.clone(), @@ -2851,19 +2787,45 @@ async fn handle_container_exec_with_params( params.justification.clone(), ) .await; - match rx_approve.await.unwrap_or_default() { - ReviewDecision::Approved => (), + match decision { + ReviewDecision::Approved => { + otel_event_manager.tool_decision( + tool_name, + call_id.as_str(), + ReviewDecision::Approved, + ToolDecisionSource::User, + ); + } ReviewDecision::ApprovedForSession => { + otel_event_manager.tool_decision( + tool_name, + call_id.as_str(), + ReviewDecision::ApprovedForSession, + ToolDecisionSource::User, + ); sess.add_approved_command(params.command.clone()).await; } - ReviewDecision::Denied | ReviewDecision::Abort => { - return ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: "exec command rejected by user".to_string(), - success: None, - }, - }; + ReviewDecision::Denied => { + otel_event_manager.tool_decision( + tool_name, + call_id.as_str(), + ReviewDecision::Denied, + ToolDecisionSource::User, + ); + return Err(FunctionCallError::RespondToModel( + "exec command rejected by user".to_string(), + )); + } + ReviewDecision::Abort => { + otel_event_manager.tool_decision( + tool_name, + call_id.as_str(), + ReviewDecision::Abort, + ToolDecisionSource::User, + ); + return Err(FunctionCallError::RespondToModel( + "exec command aborted by user".to_string(), + )); } } // No sandboxing is applied because the user has given @@ -2873,13 +2835,15 @@ async fn handle_container_exec_with_params( SandboxType::None } SafetyCheck::Reject { reason } => { - return ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: format!("exec command rejected: {reason}"), - success: None, - }, - }; + otel_event_manager.tool_decision( + tool_name, + call_id.as_str(), + ReviewDecision::Denied, + ToolDecisionSource::Config, + ); + return Err(FunctionCallError::RespondToModel(format!( + "exec command rejected: {reason:?}" + ))); } }; @@ -2909,7 +2873,7 @@ async fn handle_container_exec_with_params( sandbox_type, sandbox_policy: &turn_context.sandbox_policy, sandbox_cwd: &turn_context.cwd, - codex_linux_sandbox_exe: &sess.codex_linux_sandbox_exe, + codex_linux_sandbox_exe: &sess.services.codex_linux_sandbox_exe, stdout_stream: if exec_command_context.apply_patch.is_some() { None } else { @@ -2926,19 +2890,16 @@ async fn handle_container_exec_with_params( match output_result { Ok(output) => { let ExecToolCallOutput { exit_code, .. } = &output; - - let is_success = *exit_code == 0; let content = format_exec_output(&output); - ResponseInputItem::FunctionCallOutput { - call_id: call_id.clone(), - output: FunctionCallOutputPayload { - content, - success: Some(is_success), - }, + if *exit_code == 0 { + Ok(content) + } else { + Err(FunctionCallError::RespondToModel(content)) } } Err(CodexErr::Sandbox(error)) => { handle_sandbox_error( + tool_name, turn_diff_tracker, params, exec_command_context, @@ -2946,20 +2907,19 @@ async fn handle_container_exec_with_params( sandbox_type, sess, turn_context, + &otel_event_manager, ) .await } - Err(e) => ResponseInputItem::FunctionCallOutput { - call_id: call_id.clone(), - output: FunctionCallOutputPayload { - content: format!("execution error: {e}"), - success: None, - }, - }, + Err(e) => Err(FunctionCallError::RespondToModel(format!( + "execution error: {e:?}" + ))), } } +#[allow(clippy::too_many_arguments)] async fn handle_sandbox_error( + tool_name: &str, turn_diff_tracker: &mut TurnDiffTracker, params: ExecParams, exec_command_context: ExecCommandContext, @@ -2967,35 +2927,24 @@ async fn handle_sandbox_error( sandbox_type: SandboxType, sess: &Session, turn_context: &TurnContext, -) -> ResponseInputItem { + otel_event_manager: &OtelEventManager, +) -> Result { let call_id = exec_command_context.call_id.clone(); let sub_id = exec_command_context.sub_id.clone(); let cwd = exec_command_context.cwd.clone(); if let SandboxErr::Timeout { output } = &error { let content = format_exec_output(output); - return ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content, - success: Some(false), - }, - }; + return Err(FunctionCallError::RespondToModel(content)); } // Early out if either the user never wants to be asked for approval, or // we're letting the model manage escalation requests. Otherwise, continue match turn_context.approval_policy { AskForApproval::Never | AskForApproval::OnRequest => { - return ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: format!( - "failed in sandbox {sandbox_type:?} with execution error: {error}" - ), - success: Some(false), - }, - }; + return Err(FunctionCallError::RespondToModel(format!( + "failed in sandbox {sandbox_type:?} with execution error: {error:?}" + ))); } AskForApproval::UnlessTrusted | AskForApproval::OnFailure => (), } @@ -3012,7 +2961,7 @@ async fn handle_sandbox_error( sess.notify_background_event(&sub_id, format!("Execution failed: {error}")) .await; - let rx_approve = sess + let decision = sess .request_command_approval( sub_id.clone(), call_id.clone(), @@ -3022,7 +2971,7 @@ async fn handle_sandbox_error( ) .await; - match rx_approve.await.unwrap_or_default() { + match decision { ReviewDecision::Approved | ReviewDecision::ApprovedForSession => { // Persist this command as pre‑approved for the // remainder of the session so future @@ -3033,6 +2982,13 @@ async fn handle_sandbox_error( sess.notify_background_event(&sub_id, "retrying command without sandbox") .await; + otel_event_manager.tool_decision( + tool_name, + call_id.as_str(), + decision, + ToolDecisionSource::User, + ); + // This is an escalated retry; the policy will not be // examined and the sandbox has been set to `None`. let retry_output_result = sess @@ -3044,7 +3000,7 @@ async fn handle_sandbox_error( sandbox_type: SandboxType::None, sandbox_policy: &turn_context.sandbox_policy, sandbox_cwd: &turn_context.cwd, - codex_linux_sandbox_exe: &sess.codex_linux_sandbox_exe, + codex_linux_sandbox_exe: &sess.services.codex_linux_sandbox_exe, stdout_stream: if exec_command_context.apply_patch.is_some() { None } else { @@ -3061,36 +3017,30 @@ async fn handle_sandbox_error( match retry_output_result { Ok(retry_output) => { let ExecToolCallOutput { exit_code, .. } = &retry_output; - - let is_success = *exit_code == 0; let content = format_exec_output(&retry_output); - - ResponseInputItem::FunctionCallOutput { - call_id: call_id.clone(), - output: FunctionCallOutputPayload { - content, - success: Some(is_success), - }, + if *exit_code == 0 { + Ok(content) + } else { + Err(FunctionCallError::RespondToModel(content)) } } - Err(e) => ResponseInputItem::FunctionCallOutput { - call_id: call_id.clone(), - output: FunctionCallOutputPayload { - content: format!("retry failed: {e}"), - success: None, - }, - }, + Err(e) => Err(FunctionCallError::RespondToModel(format!( + "retry failed: {e}" + ))), } } - ReviewDecision::Denied | ReviewDecision::Abort => { + decision @ (ReviewDecision::Denied | ReviewDecision::Abort) => { + otel_event_manager.tool_decision( + tool_name, + call_id.as_str(), + decision, + ToolDecisionSource::User, + ); + // Fall through to original failure handling. - ResponseInputItem::FunctionCallOutput { - call_id, - output: FunctionCallOutputPayload { - content: "exec command rejected by user".to_string(), - success: None, - }, - } + Err(FunctionCallError::RespondToModel( + "exec command rejected by user".to_string(), + )) } } } @@ -3300,7 +3250,7 @@ fn convert_call_tool_result_to_function_call_output_payload( /// Emits an ExitedReviewMode Event with optional ReviewOutput, /// and records a developer message with the review output. -async fn exit_review_mode( +pub(crate) async fn exit_review_mode( session: Arc, task_sub_id: String, review_output: Option, @@ -3361,10 +3311,17 @@ mod tests { use super::*; use crate::config::ConfigOverrides; use crate::config::ConfigToml; + use crate::protocol::CompactedItem; use crate::protocol::InitialHistory; use crate::protocol::ResumedHistory; + use crate::state::TaskKind; + use crate::tasks::SessionTask; + use crate::tasks::SessionTaskContext; + use codex_app_server_protocol::AuthMode; use codex_protocol::models::ContentItem; + use codex_protocol::models::ResponseItem; + use mcp_types::ContentBlock; use mcp_types::TextContent; use pretty_assertions::assert_eq; @@ -3373,6 +3330,8 @@ mod tests { use std::path::PathBuf; use std::sync::Arc; use std::time::Duration as StdDuration; + use tokio::time::Duration; + use tokio::time::sleep; #[test] fn reconstruct_history_matches_live_compactions() { @@ -3398,7 +3357,7 @@ mod tests { }), )); - let actual = tokio_test::block_on(async { session.state.lock().await.history.contents() }); + let actual = tokio_test::block_on(async { session.state.lock().await.history_snapshot() }); assert_eq!(expected, actual); } @@ -3411,7 +3370,7 @@ mod tests { session.record_initial_history(&turn_context, InitialHistory::Forked(rollout_items)), ); - let actual = tokio_test::block_on(async { session.state.lock().await.history.contents() }); + let actual = tokio_test::block_on(async { session.state.lock().await.history_snapshot() }); assert_eq!(expected, actual); } @@ -3597,6 +3556,18 @@ mod tests { }) } + fn otel_event_manager(conversation_id: ConversationId, config: &Config) -> OtelEventManager { + OtelEventManager::new( + conversation_id, + config.model.as_str(), + config.model_family.slug.as_str(), + None, + Some(AuthMode::ChatGPT), + false, + "test".to_string(), + ) + } + pub(crate) fn make_session_and_context() -> (Session, TurnContext) { let (tx_event, _rx_event) = async_channel::unbounded(); let codex_home = tempfile::tempdir().expect("create temp dir"); @@ -3608,9 +3579,11 @@ mod tests { .expect("load default test config"); let config = Arc::new(config); let conversation_id = ConversationId::default(); + let otel_event_manager = otel_event_manager(conversation_id, config.as_ref()); let client = ModelClient::new( config.clone(), None, + otel_event_manager, config.model_provider.clone(), config.model_reasoning_effort, config.model_reasoning_summary, @@ -3635,25 +3608,197 @@ mod tests { shell_environment_policy: config.shell_environment_policy.clone(), tools_config, is_review_mode: false, + final_output_json_schema: None, + }; + let services = SessionServices { + mcp_connection_manager: McpConnectionManager::default(), + session_manager: ExecSessionManager::default(), + unified_exec_manager: UnifiedExecSessionManager::default(), + notifier: UserNotifier::default(), + rollout: Mutex::new(None), + codex_linux_sandbox_exe: None, + user_shell: shell::Shell::Unknown, + show_raw_agent_reasoning: config.show_raw_agent_reasoning, }; let session = Session { conversation_id, tx_event, + state: Mutex::new(SessionState::new()), + active_turn: Mutex::new(None), + services, + next_internal_sub_id: AtomicU64::new(0), + }; + (session, turn_context) + } + + // Like make_session_and_context, but returns Arc and the event receiver + // so tests can assert on emitted events. + fn make_session_and_context_with_rx() -> ( + Arc, + Arc, + async_channel::Receiver, + ) { + let (tx_event, rx_event) = async_channel::unbounded(); + let codex_home = tempfile::tempdir().expect("create temp dir"); + let config = Config::load_from_base_config_with_overrides( + ConfigToml::default(), + ConfigOverrides::default(), + codex_home.path().to_path_buf(), + ) + .expect("load default test config"); + let config = Arc::new(config); + let conversation_id = ConversationId::default(); + let otel_event_manager = otel_event_manager(conversation_id, config.as_ref()); + let client = ModelClient::new( + config.clone(), + None, + otel_event_manager, + config.model_provider.clone(), + config.model_reasoning_effort, + config.model_reasoning_summary, + conversation_id, + ); + let tools_config = ToolsConfig::new(&ToolsConfigParams { + model_family: &config.model_family, + include_plan_tool: config.include_plan_tool, + include_apply_patch_tool: config.include_apply_patch_tool, + include_web_search_request: config.tools_web_search_request, + use_streamable_shell_tool: config.use_experimental_streamable_shell_tool, + include_view_image_tool: config.include_view_image_tool, + experimental_unified_exec_tool: config.use_experimental_unified_exec_tool, + }); + let turn_context = Arc::new(TurnContext { + client, + cwd: config.cwd.clone(), + base_instructions: config.base_instructions.clone(), + user_instructions: config.user_instructions.clone(), + approval_policy: config.approval_policy, + sandbox_policy: config.sandbox_policy.clone(), + shell_environment_policy: config.shell_environment_policy.clone(), + tools_config, + is_review_mode: false, + final_output_json_schema: None, + }); + let services = SessionServices { mcp_connection_manager: McpConnectionManager::default(), session_manager: ExecSessionManager::default(), unified_exec_manager: UnifiedExecSessionManager::default(), - notify: None, + notifier: UserNotifier::default(), rollout: Mutex::new(None), - state: Mutex::new(State { - history: ConversationHistory::new(), - ..Default::default() - }), codex_linux_sandbox_exe: None, user_shell: shell::Shell::Unknown, show_raw_agent_reasoning: config.show_raw_agent_reasoning, - next_internal_sub_id: AtomicU64::new(0), }; - (session, turn_context) + let session = Arc::new(Session { + conversation_id, + tx_event, + state: Mutex::new(SessionState::new()), + active_turn: Mutex::new(None), + services, + next_internal_sub_id: AtomicU64::new(0), + }); + (session, turn_context, rx_event) + } + + #[derive(Clone, Copy)] + struct NeverEndingTask(TaskKind); + + #[async_trait::async_trait] + impl SessionTask for NeverEndingTask { + fn kind(&self) -> TaskKind { + self.0 + } + + async fn run( + self: Arc, + _session: Arc, + _ctx: Arc, + _sub_id: String, + _input: Vec, + ) -> Option { + loop { + sleep(Duration::from_secs(60)).await; + } + } + + async fn abort(&self, session: Arc, sub_id: &str) { + if let TaskKind::Review = self.0 { + exit_review_mode(session.clone_session(), sub_id.to_string(), None).await; + } + } + } + + #[tokio::test] + async fn abort_regular_task_emits_turn_aborted_only() { + let (sess, tc, rx) = make_session_and_context_with_rx(); + let sub_id = "sub-regular".to_string(); + let input = vec![InputItem::Text { + text: "hello".to_string(), + }]; + sess.spawn_task( + Arc::clone(&tc), + sub_id.clone(), + input, + NeverEndingTask(TaskKind::Regular), + ) + .await; + + sess.abort_all_tasks(TurnAbortReason::Interrupted).await; + + let evt = rx.recv().await.expect("event"); + match evt.msg { + EventMsg::TurnAborted(e) => assert_eq!(TurnAbortReason::Interrupted, e.reason), + other => panic!("unexpected event: {other:?}"), + } + assert!(rx.try_recv().is_err()); + } + + #[tokio::test] + async fn abort_review_task_emits_exited_then_aborted_and_records_history() { + let (sess, tc, rx) = make_session_and_context_with_rx(); + let sub_id = "sub-review".to_string(); + let input = vec![InputItem::Text { + text: "start review".to_string(), + }]; + sess.spawn_task( + Arc::clone(&tc), + sub_id.clone(), + input, + NeverEndingTask(TaskKind::Review), + ) + .await; + + sess.abort_all_tasks(TurnAbortReason::Interrupted).await; + + let first = rx.recv().await.expect("first event"); + match first.msg { + EventMsg::ExitedReviewMode(ev) => assert!(ev.review_output.is_none()), + other => panic!("unexpected first event: {other:?}"), + } + let second = rx.recv().await.expect("second event"); + match second.msg { + EventMsg::TurnAborted(e) => assert_eq!(TurnAbortReason::Interrupted, e.reason), + other => panic!("unexpected second event: {other:?}"), + } + + let history = sess.history_snapshot().await; + let found = history.iter().any(|item| match item { + ResponseItem::Message { role, content, .. } if role == "user" => { + content.iter().any(|ci| match ci { + ContentItem::InputText { text } => { + text.contains("") + && text.contains("review") + && text.contains("interrupted") + } + _ => false, + }) + } + _ => false, + }); + assert!( + found, + "synthetic review interruption not recorded in history" + ); } fn sample_rollout( @@ -3798,10 +3943,12 @@ mod tests { let mut turn_diff_tracker = TurnDiffTracker::new(); + let tool_name = "shell"; let sub_id = "test-sub".to_string(); let call_id = "test-call".to_string(); let resp = handle_container_exec_with_params( + tool_name, params, &session, &turn_context, @@ -3811,8 +3958,8 @@ mod tests { ) .await; - let ResponseInputItem::FunctionCallOutput { output, .. } = resp else { - panic!("expected FunctionCallOutput"); + let Err(FunctionCallError::RespondToModel(output)) = resp else { + panic!("expected error result"); }; let expected = format!( @@ -3820,13 +3967,14 @@ mod tests { policy = turn_context.approval_policy ); - pretty_assertions::assert_eq!(output.content, expected); + pretty_assertions::assert_eq!(output, expected); // Now retry the same command WITHOUT escalated permissions; should succeed. // Force DangerFullAccess to avoid platform sandbox dependencies in tests. turn_context.sandbox_policy = SandboxPolicy::DangerFullAccess; let resp2 = handle_container_exec_with_params( + tool_name, params2, &session, &turn_context, @@ -3836,9 +3984,7 @@ mod tests { ) .await; - let ResponseInputItem::FunctionCallOutput { output, .. } = resp2 else { - panic!("expected FunctionCallOutput on retry"); - }; + let output = resp2.expect("expected Ok result"); #[derive(Deserialize, PartialEq, Eq, Debug)] struct ResponseExecMetadata { @@ -3852,10 +3998,9 @@ mod tests { } let exec_output: ResponseExecOutput = - serde_json::from_str(&output.content).expect("valid exec output json"); + serde_json::from_str(&output).expect("valid exec output json"); pretty_assertions::assert_eq!(exec_output.metadata, ResponseExecMetadata { exit_code: 0 }); assert!(exec_output.output.contains("hi")); - pretty_assertions::assert_eq!(output.success, Some(true)); } } diff --git a/codex-rs/core/src/codex/compact.rs b/codex-rs/core/src/codex/compact.rs index d1547a48185..136e68e401e 100644 --- a/codex-rs/core/src/codex/compact.rs +++ b/codex-rs/core/src/codex/compact.rs @@ -1,6 +1,5 @@ use std::sync::Arc; -use super::AgentTask; use super::Session; use super::TurnContext; use super::get_last_assistant_message_from_turn; @@ -15,7 +14,6 @@ use crate::protocol::Event; use crate::protocol::EventMsg; use crate::protocol::InputItem; use crate::protocol::InputMessageKind; -use crate::protocol::TaskCompleteEvent; use crate::protocol::TaskStartedEvent; use crate::protocol::TurnContextItem; use crate::truncate::truncate_middle; @@ -27,8 +25,7 @@ use codex_protocol::models::ResponseItem; use codex_protocol::protocol::RolloutItem; use futures::prelude::*; -pub(super) const COMPACT_TRIGGER_TEXT: &str = "Start Summarization"; -const SUMMARIZATION_PROMPT: &str = include_str!("../../templates/compact/prompt.md"); +pub const SUMMARIZATION_PROMPT: &str = include_str!("../../templates/compact/prompt.md"); const COMPACT_USER_MESSAGE_MAX_TOKENS: usize = 20_000; #[derive(Template)] @@ -38,48 +35,23 @@ struct HistoryBridgeTemplate<'a> { summary_text: &'a str, } -pub(super) async fn spawn_compact_task( - sess: Arc, - turn_context: Arc, - sub_id: String, - input: Vec, -) { - let task = AgentTask::compact( - sess.clone(), - turn_context, - sub_id, - input, - SUMMARIZATION_PROMPT.to_string(), - ); - sess.set_task(task).await; -} - -pub(super) async fn run_inline_auto_compact_task( +pub(crate) async fn run_inline_auto_compact_task( sess: Arc, turn_context: Arc, ) { let sub_id = sess.next_internal_sub_id(); let input = vec![InputItem::Text { - text: COMPACT_TRIGGER_TEXT.to_string(), + text: SUMMARIZATION_PROMPT.to_string(), }]; - run_compact_task_inner( - sess, - turn_context, - sub_id, - input, - SUMMARIZATION_PROMPT.to_string(), - false, - ) - .await; + run_compact_task_inner(sess, turn_context, sub_id, input).await; } -pub(super) async fn run_compact_task( +pub(crate) async fn run_compact_task( sess: Arc, turn_context: Arc, sub_id: String, input: Vec, - compact_instructions: String, -) { +) -> Option { let start_event = Event { id: sub_id.clone(), msg: EventMsg::TaskStarted(TaskStartedEvent { @@ -87,22 +59,8 @@ pub(super) async fn run_compact_task( }), }; sess.send_event(start_event).await; - run_compact_task_inner( - sess.clone(), - turn_context, - sub_id.clone(), - input, - compact_instructions, - true, - ) - .await; - let event = Event { - id: sub_id, - msg: EventMsg::TaskComplete(TaskCompleteEvent { - last_agent_message: None, - }), - }; - sess.send_event(event).await; + run_compact_task_inner(sess.clone(), turn_context, sub_id.clone(), input).await; + None } async fn run_compact_task_inner( @@ -110,19 +68,15 @@ async fn run_compact_task_inner( turn_context: Arc, sub_id: String, input: Vec, - compact_instructions: String, - remove_task_on_completion: bool, ) { let initial_input_for_turn: ResponseInputItem = ResponseInputItem::from(input); - let instructions_override = compact_instructions; let turn_input = sess .turn_input_with_history(vec![initial_input_for_turn.clone().into()]) .await; let prompt = Prompt { input: turn_input, - tools: Vec::new(), - base_instructions_override: Some(instructions_override), + ..Default::default() }; let max_retries = turn_context.client.get_provider().stream_max_retries(); @@ -139,7 +93,8 @@ async fn run_compact_task_inner( sess.persist_rollout_items(&[rollout_item]).await; loop { - let attempt_result = drain_to_completed(&sess, turn_context.as_ref(), &prompt).await; + let attempt_result = + drain_to_completed(&sess, turn_context.as_ref(), &sub_id, &prompt).await; match attempt_result { Ok(()) => { @@ -175,21 +130,12 @@ async fn run_compact_task_inner( } } - if remove_task_on_completion { - sess.remove_task(&sub_id).await; - } - let history_snapshot = { - let state = sess.state.lock().await; - state.history.contents() - }; + let history_snapshot = sess.history_snapshot().await; let summary_text = get_last_assistant_message_from_turn(&history_snapshot).unwrap_or_default(); let user_messages = collect_user_messages(&history_snapshot); let initial_context = sess.build_initial_context(turn_context.as_ref()); let new_history = build_compacted_history(initial_context, &user_messages, &summary_text); - { - let mut state = sess.state.lock().await; - state.history.replace(new_history); - } + sess.replace_history(new_history).await; let rollout_item = RolloutItem::Compacted(CompactedItem { message: summary_text.clone(), @@ -284,6 +230,7 @@ pub(crate) fn build_compacted_history( async fn drain_to_completed( sess: &Session, turn_context: &TurnContext, + sub_id: &str, prompt: &Prompt, ) -> CodexResult<()> { let mut stream = turn_context.client.clone().stream(prompt).await?; @@ -297,10 +244,14 @@ async fn drain_to_completed( }; match event { Ok(ResponseEvent::OutputItemDone(item)) => { - let mut state = sess.state.lock().await; - state.history.record_items(std::slice::from_ref(&item)); + sess.record_into_history(std::slice::from_ref(&item)).await; } - Ok(ResponseEvent::Completed { .. }) => { + Ok(ResponseEvent::RateLimits(snapshot)) => { + sess.update_rate_limits(sub_id, snapshot).await; + } + Ok(ResponseEvent::Completed { token_usage, .. }) => { + sess.update_token_usage_info(sub_id, turn_context, token_usage.as_ref()) + .await; return Ok(()); } Ok(_) => continue, diff --git a/codex-rs/core/src/command_safety/is_dangerous_command.rs b/codex-rs/core/src/command_safety/is_dangerous_command.rs new file mode 100644 index 00000000000..852af93ef96 --- /dev/null +++ b/codex-rs/core/src/command_safety/is_dangerous_command.rs @@ -0,0 +1,99 @@ +use crate::bash::parse_bash_lc_plain_commands; + +pub fn command_might_be_dangerous(command: &[String]) -> bool { + if is_dangerous_to_call_with_exec(command) { + return true; + } + + // Support `bash -lc "