Compare commits
17 Commits
1b4603ed3b
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 7dd3095974 | |||
| 9ba783f10b | |||
| 5af8acb2bb | |||
| 523b8c0a4a | |||
| 2264aea591 | |||
| 86e7dbd1ab | |||
| 5149320afd | |||
| 4d506aabf7 | |||
| 010ded8476 | |||
| 02f22d80bd | |||
| 7cf8afab73 | |||
| 832035a087 | |||
| f06a2c2740 | |||
| f65baebb3c | |||
| eb96764770 | |||
| 3e5461df9b | |||
| ce8f0dfd2b |
169
.github/workflows/release.yml
vendored
Normal file
169
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,169 @@
|
||||
name: Build & Release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*.*.*'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: 'Release tag (e.g. v2.1.88)'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build (${{ matrix.os }})
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: linux-x64
|
||||
runner: ubuntu-latest
|
||||
artifact: claude-linux-x64
|
||||
- os: linux-arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
artifact: claude-linux-arm64
|
||||
- os: macos-x64
|
||||
runner: macos-13
|
||||
artifact: claude-macos-x64
|
||||
- os: macos-arm64
|
||||
runner: macos-latest
|
||||
artifact: claude-macos-arm64
|
||||
- os: windows-x64
|
||||
runner: windows-latest
|
||||
artifact: claude-windows-x64.exe
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: '1.3.11'
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install --frozen-lockfile
|
||||
|
||||
- name: Build binary
|
||||
run: bun run compile
|
||||
|
||||
- name: Rename binary (Unix)
|
||||
if: runner.os != 'Windows'
|
||||
run: |
|
||||
mkdir -p release
|
||||
cp dist/cli release/${{ matrix.artifact }}
|
||||
chmod +x release/${{ matrix.artifact }}
|
||||
|
||||
- name: Rename binary (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: |
|
||||
New-Item -ItemType Directory -Force -Path release
|
||||
Copy-Item dist/cli.exe release/${{ matrix.artifact }}
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.artifact }}
|
||||
path: release/${{ matrix.artifact }}
|
||||
retention-days: 7
|
||||
|
||||
release:
|
||||
name: Create GitHub Release
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/') || github.event_name == 'workflow_dispatch'
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: release/
|
||||
|
||||
- name: Flatten release directory
|
||||
run: |
|
||||
find release/ -type f | while read f; do
|
||||
mv "$f" release/$(basename "$f")
|
||||
done
|
||||
find release/ -type d -empty -delete
|
||||
|
||||
- name: Determine release tag
|
||||
id: tag
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" = "workflow_dispatch" ] && [ -n "${{ github.event.inputs.tag }}" ]; then
|
||||
echo "tag=${{ github.event.inputs.tag }}" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "tag=${GITHUB_REF_NAME}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
cd release
|
||||
sha256sum claude-linux-x64 claude-linux-arm64 claude-macos-x64 claude-macos-arm64 claude-windows-x64.exe > SHA256SUMS.txt 2>/dev/null || true
|
||||
cat SHA256SUMS.txt
|
||||
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
tag_name: ${{ steps.tag.outputs.tag }}
|
||||
name: Claude Code ${{ steps.tag.outputs.tag }}
|
||||
draft: false
|
||||
prerelease: false
|
||||
generate_release_notes: true
|
||||
body: |
|
||||
## 安装说明 / Installation
|
||||
|
||||
### macOS (Apple Silicon)
|
||||
```bash
|
||||
curl -L https://github.com/${{ github.repository }}/releases/download/${{ steps.tag.outputs.tag }}/claude-macos-arm64 -o claude
|
||||
chmod +x claude && sudo mv claude /usr/local/bin/claude
|
||||
```
|
||||
|
||||
### macOS (Intel)
|
||||
```bash
|
||||
curl -L https://github.com/${{ github.repository }}/releases/download/${{ steps.tag.outputs.tag }}/claude-macos-x64 -o claude
|
||||
chmod +x claude && sudo mv claude /usr/local/bin/claude
|
||||
```
|
||||
|
||||
### Linux (x64)
|
||||
```bash
|
||||
curl -L https://github.com/${{ github.repository }}/releases/download/${{ steps.tag.outputs.tag }}/claude-linux-x64 -o claude
|
||||
chmod +x claude && sudo mv claude /usr/local/bin/claude
|
||||
```
|
||||
|
||||
### Linux (ARM64)
|
||||
```bash
|
||||
curl -L https://github.com/${{ github.repository }}/releases/download/${{ steps.tag.outputs.tag }}/claude-linux-arm64 -o claude
|
||||
chmod +x claude && sudo mv claude /usr/local/bin/claude
|
||||
```
|
||||
|
||||
### Windows (x64)
|
||||
下载 `claude-windows-x64.exe`,将其重命名为 `claude.exe` 并添加到 PATH。
|
||||
|
||||
### 验证 / Verify
|
||||
```bash
|
||||
claude --version
|
||||
```
|
||||
|
||||
### 隐私说明 / Privacy
|
||||
本构建已移除以下外部数据传输:
|
||||
- ✅ 已删除 WebFetch 域名检查(不再向 Anthropic 上报访问域名)
|
||||
- ✅ 已禁用 Codex API 路由(不再将对话转发至 OpenAI chatgpt.com)
|
||||
- ✅ Analytics/遥测已为空存根(无实际数据发送)
|
||||
- ✅ GrowthBook/Statsig 仅使用本地缓存(无远程请求)
|
||||
files: |
|
||||
release/claude-linux-x64
|
||||
release/claude-linux-arm64
|
||||
release/claude-macos-x64
|
||||
release/claude-macos-arm64
|
||||
release/claude-windows-x64.exe
|
||||
release/SHA256SUMS.txt
|
||||
@@ -48,6 +48,7 @@ Removed in this repository:
|
||||
- Datadog analytics and Anthropic 1P event-logging egress.
|
||||
- GrowthBook remote evaluation/network fetches; local env/config overrides and cached values remain available for compatibility.
|
||||
- OpenTelemetry initialization and event export paths.
|
||||
- Perfetto local trace-file output paths that could persist request/tool metadata to disk.
|
||||
- Extra dead telemetry scaffolding tied to the removed egress paths, including startup/session analytics fanout, logout telemetry flush, and remote GrowthBook metadata collectors.
|
||||
|
||||
Still present:
|
||||
|
||||
366
docs/free-code-main-diff-analysis.md
Normal file
366
docs/free-code-main-diff-analysis.md
Normal file
@@ -0,0 +1,366 @@
|
||||
# `/Users/yovinchen/project/claude` 与 `/Users/yovinchen/Downloads/free-code-main` 差异分析
|
||||
|
||||
## 1. 分析目标
|
||||
|
||||
本文档用于比较当前工作区:
|
||||
|
||||
- `/Users/yovinchen/project/claude`
|
||||
|
||||
与参考项目:
|
||||
|
||||
- `/Users/yovinchen/Downloads/free-code-main`
|
||||
|
||||
重点回答三个问题:
|
||||
|
||||
1. 当前项目相对参考项目改了什么。
|
||||
2. 哪些改动属于“恢复后为保证可运行而做的必要修复”。
|
||||
3. 哪些差异仍然值得继续收敛或补做验证。
|
||||
|
||||
## 2. 总体结论
|
||||
|
||||
当前项目不是简单复制参考项目,而是一个“基于参考快照恢复后可运行化”的工作副本。
|
||||
|
||||
核心判断如下:
|
||||
|
||||
1. 工程配置层与参考项目总体高度接近。
|
||||
2. 当前项目为了恢复 `bun run dev`、`build`、`compile` 能力,加入了一层运行时补丁和仓库管理文件。
|
||||
3. 源码层存在较多文件差异,主要集中在 CLI 启动链路、遥测、认证、模型配置、LogoV2、Claude in Chrome、MCP/SDK 辅助代码等区域。
|
||||
4. 当前项目额外引入了一批 `.js` 文件,明显属于“补齐运行时依赖/类型生成产物/兼容层”的恢复性文件。
|
||||
5. 参考项目仍然保留一些当前仓库没有带入的资源文件、说明文件和脚本文件,这些不一定影响运行,但会影响“与参考仓库完全一致”的完整度。
|
||||
|
||||
## 3. 差异概览
|
||||
|
||||
### 3.1 顶层目录差异
|
||||
|
||||
当前项目独有的顶层内容:
|
||||
|
||||
- `.gitattributes`
|
||||
- `docs/`
|
||||
- `vendor/`
|
||||
- `cli.js.map`
|
||||
- `.DS_Store`
|
||||
|
||||
参考项目独有的顶层内容:
|
||||
|
||||
- `.env`
|
||||
- `CLAUDE.md`
|
||||
- `FEATURES.md`
|
||||
- `assets/`
|
||||
- `changes.md`
|
||||
- `install.sh`
|
||||
- `run.sh`
|
||||
|
||||
说明:
|
||||
|
||||
1. 当前项目更像“已接入 Git 管理、可持续维护”的开发仓库。
|
||||
2. 参考项目更像“恢复快照 + 使用说明 + 辅助资源”的完整分发目录。
|
||||
3. `assets/`、`CLAUDE.md`、`FEATURES.md`、`changes.md` 当前未带入,功能上未必是阻塞,但文档与资源完整度低于参考项目。
|
||||
|
||||
### 3.2 源码文件差异规模
|
||||
|
||||
通过目录级比较可见:
|
||||
|
||||
1. `src/` 下有约 `55` 个同名文件内容不同。
|
||||
2. 参考项目在 `src/` 下没有发现当前缺失而参考独有的源码文件。
|
||||
3. 当前项目反而额外多出一批源码/运行时补丁文件。
|
||||
|
||||
这说明当前项目的主体源码骨架已经基本补齐,但很多文件内容已经偏离参考项目,不再是“原样恢复”。
|
||||
|
||||
## 4. 工程配置差异
|
||||
|
||||
### 4.1 `package.json`
|
||||
|
||||
文件:
|
||||
|
||||
- `/Users/yovinchen/project/claude/package.json`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/package.json`
|
||||
|
||||
关键差异:
|
||||
|
||||
1. 包身份不同
|
||||
- 当前:`name = "claude-code-recover"`
|
||||
- 参考:`name = "claude-code-source-snapshot"`
|
||||
|
||||
2. 版本号不同
|
||||
- 当前:`2.1.88`
|
||||
- 参考:`2.1.87`
|
||||
|
||||
3. 当前项目增加了 `main: "./cli"`
|
||||
|
||||
4. `bin` 被精简
|
||||
- 当前只保留 `claude`
|
||||
- 参考同时暴露 `claude` 和 `claude-source`
|
||||
|
||||
5. `scripts` 被精简
|
||||
- 当前保留:`build`、`compile`、`dev`
|
||||
- 参考还包含:`build:dev`、`build:dev:full`
|
||||
|
||||
6. 当前 `dev` 脚本加入了 `MACRO` 注入
|
||||
- 当前:通过 `bun run -d 'MACRO:...' ./src/entrypoints/cli.tsx`
|
||||
- 参考:直接 `bun run ./src/entrypoints/cli.tsx`
|
||||
|
||||
7. 当前额外声明了依赖:
|
||||
- `scheduler`
|
||||
|
||||
分析:
|
||||
|
||||
1. 这些差异不是随机漂移,而是为了让恢复后的工作区更适合直接运行。
|
||||
2. `MACRO` 注入是本项目最关键的运行性修复之一,因为当前源码曾出现 `MACRO is not defined` 的实际故障。
|
||||
3. 删除 `claude-source` 和精简 `scripts` 会降低与参考项目的“接口一致性”,但能让当前项目更聚焦于单一运行入口。
|
||||
4. 新增 `scheduler` 很像一个恢复期补依赖动作,说明当前项目在实际运行时遇到过依赖缺失。
|
||||
|
||||
### 4.2 `tsconfig.json`
|
||||
|
||||
文件:
|
||||
|
||||
- `/Users/yovinchen/project/claude/tsconfig.json`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/tsconfig.json`
|
||||
|
||||
关键差异:
|
||||
|
||||
1. 当前项目增加了:
|
||||
- `"ignoreDeprecations": "6.0"`
|
||||
|
||||
分析:
|
||||
|
||||
1. 这属于 TypeScript 版本兼容调优。
|
||||
2. 它不会直接改变运行时行为,但说明当前项目更偏向“先保证开发过程稳定”。
|
||||
|
||||
### 4.3 构建脚本
|
||||
|
||||
文件:
|
||||
|
||||
- `/Users/yovinchen/project/claude/scripts/build.ts`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/scripts/build.ts`
|
||||
|
||||
结论:
|
||||
|
||||
1. 构建脚本主体保持一致。
|
||||
2. 当前工程与参考项目的差异主要不在构建逻辑本身,而在于 `package.json` 对入口和开发脚本的包装方式。
|
||||
|
||||
## 5. 运行时恢复性差异
|
||||
|
||||
这一类差异是当前项目最值得单独识别的部分,因为它们明显是“为了跑起来”而不是“为了贴近参考”。
|
||||
|
||||
### 5.1 `MACRO` 兜底与注入
|
||||
|
||||
关键文件:
|
||||
|
||||
- `/Users/yovinchen/project/claude/src/entrypoints/cli.tsx`
|
||||
- `/Users/yovinchen/project/claude/src/main.tsx`
|
||||
|
||||
观察到的现象:
|
||||
|
||||
1. 当前项目与参考项目在这两个入口文件上都存在差异。
|
||||
2. 当前项目为了开发态运行,已经通过 `package.json` 的 `dev` 脚本显式注入 `MACRO`。
|
||||
3. 当前项目的 `src/main.tsx` 中还保留了一层 `MAIN_MACRO` 兜底逻辑,而参考项目直接使用 `MACRO.VERSION`。
|
||||
|
||||
分析:
|
||||
|
||||
1. 这是非常明确的“开发态/恢复态兼容修复”。
|
||||
2. 它解决的是参考项目默认依赖构建期注入、但恢复项目直接 `bun run` 时缺少注入的问题。
|
||||
3. 这类修复提高了当前项目的可运行性,但也让入口行为不再完全等同于参考项目。
|
||||
|
||||
### 5.2 SDK 运行时补齐文件
|
||||
|
||||
当前项目独有文件:
|
||||
|
||||
- `/Users/yovinchen/project/claude/src/entrypoints/sdk/controlTypes.js`
|
||||
- `/Users/yovinchen/project/claude/src/entrypoints/sdk/coreTypes.generated.js`
|
||||
- `/Users/yovinchen/project/claude/src/entrypoints/sdk/runtimeTypes.js`
|
||||
- `/Users/yovinchen/project/claude/src/entrypoints/sdk/settingsTypes.generated.js`
|
||||
- `/Users/yovinchen/project/claude/src/entrypoints/sdk/toolTypes.js`
|
||||
|
||||
分析:
|
||||
|
||||
1. 参考项目只有对应的 `.ts` 类型/生成源码,而当前项目额外保留了 `.js` 文件。
|
||||
2. 这些文件高概率是为了解决 Bun 运行时直接加载、模块解析或类型生成产物缺失的问题。
|
||||
3. 它们属于典型“恢复补丁文件”。
|
||||
|
||||
风险:
|
||||
|
||||
1. 如果这些 `.js` 文件并非由统一生成流程产出,而是手工补入,那么后续源码变更后容易和 `.ts` 文件脱节。
|
||||
2. 如果要长期维护,最好明确这些文件是“源码的一部分”还是“应由生成流程产出”。
|
||||
|
||||
### 5.3 其他当前项目独有源码
|
||||
|
||||
当前项目独有文件:
|
||||
|
||||
- `/Users/yovinchen/project/claude/src/skills/bundled/verify/SKILL.md`
|
||||
- `/Users/yovinchen/project/claude/src/skills/bundled/verify/examples/cli.md`
|
||||
- `/Users/yovinchen/project/claude/src/skills/bundled/verify/examples/server.md`
|
||||
- `/Users/yovinchen/project/claude/src/tools/TungstenTool/TungstenLiveMonitor.js`
|
||||
- `/Users/yovinchen/project/claude/src/tools/TungstenTool/TungstenTool.js`
|
||||
- `/Users/yovinchen/project/claude/src/tools/WorkflowTool/constants.js`
|
||||
- `/Users/yovinchen/project/claude/src/types/connectorText.js`
|
||||
|
||||
分析:
|
||||
|
||||
1. 这批文件同样更像运行时补齐或恢复期追加文件,而不是参考项目原始快照的一部分。
|
||||
2. 其中 `.js` 文件的存在说明当前项目对“直接运行”做过较强适配。
|
||||
3. `verify` 技能目录属于额外内置资源,偏离参考项目,但不一定是负面差异。
|
||||
|
||||
## 6. 同名源码文件差异分布
|
||||
|
||||
当前与参考项目存在内容差异的主要文件区域包括:
|
||||
|
||||
- `src/main.tsx`
|
||||
- `src/entrypoints/cli.tsx`
|
||||
- `src/entrypoints/init.ts`
|
||||
- `src/commands.ts`
|
||||
- `src/commands/release-notes/release-notes.ts`
|
||||
- `src/commands/ultraplan.tsx`
|
||||
- `src/components/ConsoleOAuthFlow.tsx`
|
||||
- `src/components/LogoV2/*`
|
||||
- `src/components/StructuredDiff/colorDiff.ts`
|
||||
- `src/constants/*`
|
||||
- `src/hooks/useApiKeyVerification.ts`
|
||||
- `src/screens/REPL.tsx`
|
||||
- `src/services/analytics/*`
|
||||
- `src/services/api/client.ts`
|
||||
- `src/services/mcp/client.ts`
|
||||
- `src/services/oauth/*`
|
||||
- `src/services/voice.ts`
|
||||
- `src/skills/bundled/claudeInChrome.ts`
|
||||
- `src/skills/bundled/verifyContent.ts`
|
||||
- `src/utils/auth.ts`
|
||||
- `src/utils/claudeInChrome/*`
|
||||
- `src/utils/config.ts`
|
||||
- `src/utils/logoV2Utils.ts`
|
||||
- `src/utils/model/*`
|
||||
- `src/utils/modifiers.ts`
|
||||
- `src/utils/releaseNotes.ts`
|
||||
- `src/utils/ripgrep.ts`
|
||||
- `src/utils/telemetry/*`
|
||||
- `src/utils/theme.ts`
|
||||
|
||||
分析:
|
||||
|
||||
1. 差异覆盖面很广,不像单点修复,更像恢复过程中发生过多轮替换、补抄和本地修订。
|
||||
2. 受影响的区域里,很多都属于“用户可感知行为”或“外部集成逻辑”,比如认证、OAuth、模型选择、遥测、CLI 启动参数、UI 展示。
|
||||
3. 这意味着当前项目虽然已经可运行,但和参考项目在行为层面未必完全一致。
|
||||
|
||||
## 7. 文档、资源和仓库管理层差异
|
||||
|
||||
### 7.1 当前项目新增的仓库管理能力
|
||||
|
||||
当前项目比参考项目多出:
|
||||
|
||||
- `.gitattributes`
|
||||
- 更严格的 `.gitignore`
|
||||
- `docs/`
|
||||
|
||||
其中当前 `.gitignore` 比参考项目更偏向真实开发仓库,额外忽略了:
|
||||
|
||||
- `.DS_Store`
|
||||
- `.idea/`
|
||||
- `.claude/`
|
||||
- `cli.js.map`
|
||||
- `*.log`
|
||||
|
||||
分析:
|
||||
|
||||
1. 当前项目已经从“快照目录”转向“可持续维护仓库”。
|
||||
2. 这是正向改动,但它说明当前项目的目标已经不只是还原参考仓库。
|
||||
|
||||
### 7.2 当前缺失的参考项目文档与资源
|
||||
|
||||
参考项目存在、当前项目没有纳入的内容:
|
||||
|
||||
- `/Users/yovinchen/Downloads/free-code-main/CLAUDE.md`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/FEATURES.md`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/changes.md`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/assets/`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/install.sh`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/run.sh`
|
||||
|
||||
分析:
|
||||
|
||||
1. 当前项目缺的更多是“说明性与辅助性内容”,而不是主干源码。
|
||||
2. 如果目标是“恢复可运行 CLI”,这些缺失不是第一优先级。
|
||||
3. 如果目标是“尽量贴近参考项目完整交付物”,这些内容应该补回或至少评估是否要保留。
|
||||
|
||||
## 8. 差异定性判断
|
||||
|
||||
### 8.1 明显合理的差异
|
||||
|
||||
这部分差异大概率是正确且有价值的:
|
||||
|
||||
1. `package.json` 中 `dev` 脚本注入 `MACRO`
|
||||
2. `tsconfig.json` 增加 `ignoreDeprecations`
|
||||
3. 增加 `.gitignore`、`.gitattributes`、`docs/`
|
||||
4. 将当前仓库定位为可维护的 Git 项目
|
||||
|
||||
### 8.2 明显属于恢复补丁的差异
|
||||
|
||||
这部分差异很可能是为了跑起来而做的临时或兼容性补丁:
|
||||
|
||||
1. `src/main.tsx` 的 `MAIN_MACRO` 兜底
|
||||
2. `src/entrypoints/sdk/*.js`
|
||||
3. `src/tools/TungstenTool/*.js`
|
||||
4. `src/tools/WorkflowTool/constants.js`
|
||||
5. `src/types/connectorText.js`
|
||||
6. `scheduler` 依赖补入
|
||||
|
||||
### 8.3 需要继续验证的差异
|
||||
|
||||
这部分差异可能带来行为偏移,建议后续重点回归:
|
||||
|
||||
1. `src/main.tsx`
|
||||
2. `src/entrypoints/cli.tsx`
|
||||
3. `src/services/oauth/*`
|
||||
4. `src/services/api/client.ts`
|
||||
5. `src/services/mcp/client.ts`
|
||||
6. `src/utils/model/*`
|
||||
7. `src/services/analytics/*`
|
||||
8. `src/components/LogoV2/*`
|
||||
9. `src/commands.ts` 与 `src/commands/ultraplan.tsx`
|
||||
|
||||
原因:
|
||||
|
||||
1. 这些区域要么直接影响 CLI 主流程,要么影响鉴权/模型/遥测/展示逻辑。
|
||||
2. 即使项目现在能跑,也不代表与参考项目完全同构。
|
||||
|
||||
## 9. 建议的后续动作
|
||||
|
||||
### 9.1 如果目标是“继续可用优先”
|
||||
|
||||
建议:
|
||||
|
||||
1. 保留当前 `MACRO` 注入方案。
|
||||
2. 继续把 `.js` 补丁文件当作运行时兼容层管理。
|
||||
3. 用当前仓库作为主维护仓库,不强求逐字对齐参考项目。
|
||||
|
||||
### 9.2 如果目标是“尽量收敛到参考项目”
|
||||
|
||||
建议:
|
||||
|
||||
1. 逐步审计 `src/main.tsx`、`src/entrypoints/cli.tsx` 与 `package.json`。
|
||||
2. 确认 `src/entrypoints/sdk/*.js` 等补丁文件是否可以通过生成流程替代。
|
||||
3. 评估是否恢复 `claude-source`、`build:dev`、`build:dev:full`。
|
||||
4. 视需求补回 `assets/`、`CLAUDE.md`、`FEATURES.md`、`changes.md`、`install.sh`、`run.sh`。
|
||||
|
||||
### 9.3 如果目标是“做正式恢复基线”
|
||||
|
||||
建议:
|
||||
|
||||
1. 把当前差异分成:
|
||||
- `必要修复`
|
||||
- `兼容补丁`
|
||||
- `尚未验证的行为偏移`
|
||||
2. 为主链路建立最少一轮验证:
|
||||
- `bun run dev -- --help`
|
||||
- `bun run dev -- --version`
|
||||
- `bun run build`
|
||||
- `bun run compile`
|
||||
3. 针对鉴权、模型选择、OAuth、MCP 连接、遥测开关做专项回归。
|
||||
|
||||
## 10. 最终结论
|
||||
|
||||
当前项目已经不是参考项目的简单副本,而是一个“参考快照基础上恢复成功、可直接运行、带本地修补层”的工程化版本。
|
||||
|
||||
可以用一句话概括:
|
||||
|
||||
`/Users/yovinchen/project/claude` 的主要价值在于“已经能跑并且适合继续维护”,而 `/Users/yovinchen/Downloads/free-code-main` 的主要价值在于“作为参考基线和资源来源”。
|
||||
|
||||
如果下一步要继续治理代码,最合理的策略不是盲目回滚当前差异,而是先把差异分类,再决定哪些保留、哪些收敛、哪些补测试。
|
||||
423
docs/free-code-main-local-system-info-removal-report.md
Normal file
423
docs/free-code-main-local-system-info-removal-report.md
Normal file
@@ -0,0 +1,423 @@
|
||||
# `free-code-main` 本地系统信息外发移除实现报告
|
||||
|
||||
- 分析时间: 2026-04-03
|
||||
- 对照文档: `docs/local-system-info-egress-audit.md`
|
||||
- 分析对象: `/Users/yovinchen/Downloads/free-code-main`
|
||||
- 对照基线: `/Users/yovinchen/project/claude`
|
||||
- 分析方式: 静态代码审计 + 关键链路比对 + 同名文件差异核查
|
||||
- 说明: 本报告只基于源码静态分析,不包含运行时抓包或服务端验证。
|
||||
|
||||
## 结论摘要
|
||||
|
||||
结论是: **`free-code-main` 只“部分移除”了审计文档里的本地系统信息外发链路。**
|
||||
|
||||
更准确地说,它做的是:
|
||||
|
||||
1. **把 telemetry / analytics / OTel 相关外发出口失活了**
|
||||
- Datadog
|
||||
- Anthropic 1P event logging
|
||||
- OTel 事件与 metrics/tracing 初始化
|
||||
- GrowthBook 远程评估链路也被间接短路
|
||||
|
||||
2. **但没有把“所有本地信息外发”都移除**
|
||||
- 模型请求里的环境/项目上下文注入仍在
|
||||
- Feedback 上传仍在
|
||||
- Transcript Share 仍在
|
||||
- Remote Control / Bridge 上传 `hostname`、目录、分支、git remote URL 的链路仍在
|
||||
- Trusted Device 注册仍在
|
||||
- `/insights` 的 ant-only 上传逻辑仍在
|
||||
|
||||
3. **移除方式不是“彻底删代码”,而是“保留兼容接口 + 启动链路短路 + sink/no-op stub 化”**
|
||||
- 这意味着仓库里仍然保留了不少采集/导出代码。
|
||||
- 但默认运行时,关键出口函数已经被改成空实现,导致这些链路无法真正发出请求。
|
||||
|
||||
因此,如果问题是:
|
||||
|
||||
> `free-code-main` 是否已经把 `docs/local-system-info-egress-audit.md` 中描述的“本地系统信息外发”整体移除?
|
||||
|
||||
答案是:
|
||||
|
||||
**没有整体移除,只移除了其中“遥测/观测”这一类外发;产品主链路里的上下文外发和若干用户触发上传链路仍然存在。**
|
||||
|
||||
## 对照矩阵
|
||||
|
||||
| 审计项 | `free-code-main` 状态 | 结论 |
|
||||
| --- | --- | --- |
|
||||
| F1 模型请求 system prompt / user context | 未移除 | 默认仍会把 cwd、git 状态、CLAUDE.md、日期,以及 prompts 里的平台/壳层/OS 版本注入到模型请求 |
|
||||
| F2 Datadog analytics | 已移除 | Datadog 初始化与上报函数被 stub 成 no-op |
|
||||
| F3 Anthropic 1P event logging | 已移除 | 1P logger 整体改为空实现,启用判断恒为 `false` |
|
||||
| F4 GrowthBook remote eval | 实际已失活 | 依赖 `is1PEventLoggingEnabled()`,而 1P 已被硬关,默认不会创建 GrowthBook client |
|
||||
| F5 Feedback | 未移除 | 用户触发后仍会 POST 到 `claude_cli_feedback` |
|
||||
| F6 Transcript Share | 未移除 | 用户触发后仍会 POST 到 `claude_code_shared_session_transcripts` |
|
||||
| F7 Remote Control / Bridge | 未移除 | 仍会采集并上送 `hostname`、目录、分支、git remote URL |
|
||||
| F8 Trusted Device | 未移除 | 仍会注册 `Claude Code on <hostname> · <platform>` |
|
||||
| F9 OpenTelemetry | 已移除 | telemetry 初始化与 `logOTelEvent()` 都被改成 no-op |
|
||||
| F10 `/insights` 内部上传 | 未移除 | ant-only S3 上传逻辑仍保留 |
|
||||
|
||||
## 关键判断
|
||||
|
||||
这次比对里最重要的判断有两个:
|
||||
|
||||
1. **`README.md` 里的 “Telemetry removed” 只覆盖了“遥测/观测”语义,不等于“所有本地信息外发已删除”。**
|
||||
2. **`free-code-main` 的移除策略主要是“切断出口”,而不是“删除所有采集代码”。**
|
||||
|
||||
这也是为什么你会看到:
|
||||
|
||||
- `src/services/analytics/metadata.ts` 这类环境信息构造代码还在
|
||||
- `src/utils/api.ts` 里上下文统计代码还在
|
||||
- `src/services/analytics/firstPartyEventLoggingExporter.ts`、`src/utils/telemetry/bigqueryExporter.ts` 这类导出器文件也还在
|
||||
|
||||
但是:
|
||||
|
||||
- 事件 sink
|
||||
- telemetry bootstrap
|
||||
- OTel event logging
|
||||
- Datadog / 1P logger 初始化
|
||||
|
||||
都已经被改成空实现或被前置条件短路掉了。
|
||||
|
||||
## 已移除部分: 实现方式分析
|
||||
|
||||
### 1. Analytics 公共入口被改成 compatibility boundary + no-op
|
||||
|
||||
`/Users/yovinchen/Downloads/free-code-main/src/services/analytics/index.ts:4-40` 明确写到:
|
||||
|
||||
- “open build intentionally ships without product telemetry”
|
||||
- 保留模块只是为了不改动现有调用点
|
||||
- `attachAnalyticsSink()`、`logEvent()`、`logEventAsync()` 都是空实现
|
||||
|
||||
这意味着:
|
||||
|
||||
- 各业务模块里仍然可以继续 `import { logEvent }`
|
||||
- 但这些调用不会再入队、不会再挂 sink、也不会再向任何后端发送
|
||||
|
||||
对照 `/Users/yovinchen/project/claude/src/services/analytics/index.ts`,当前工作区版本还保留:
|
||||
|
||||
- 事件队列
|
||||
- `attachAnalyticsSink()` 的真实绑定
|
||||
- `logEvent()` / `logEventAsync()` 的真实分发
|
||||
|
||||
所以这里是非常明确的“出口 stub 化”。
|
||||
|
||||
### 2. Datadog 被直接 stub 掉
|
||||
|
||||
`/Users/yovinchen/Downloads/free-code-main/src/services/analytics/datadog.ts:1-12` 中:
|
||||
|
||||
- `initializeDatadog()` 直接返回 `false`
|
||||
- `shutdownDatadog()` 空实现
|
||||
- `trackDatadogEvent()` 空实现
|
||||
|
||||
而对照 `/Users/yovinchen/project/claude/src/services/analytics/datadog.ts:12-140`,基线版本仍然保留:
|
||||
|
||||
- Datadog endpoint
|
||||
- 批量缓冲
|
||||
- `axios.post(...)`
|
||||
|
||||
因此 F2 可以判定为**已移除**。
|
||||
|
||||
### 3. 1P event logging 被整体空实现
|
||||
|
||||
`/Users/yovinchen/Downloads/free-code-main/src/services/analytics/firstPartyEventLogger.ts:1-48` 中:
|
||||
|
||||
- `is1PEventLoggingEnabled()` 恒为 `false`
|
||||
- `logEventTo1P()` 空实现
|
||||
- `initialize1PEventLogging()` 空实现
|
||||
- `reinitialize1PEventLoggingIfConfigChanged()` 空实现
|
||||
|
||||
这和基线 `/Users/yovinchen/project/claude/src/services/analytics/firstPartyEventLogger.ts:141-220` 中真实存在的:
|
||||
|
||||
- `getEventMetadata(...)`
|
||||
- `getCoreUserData(true)`
|
||||
- OTel logger emit
|
||||
|
||||
形成了直接对照。
|
||||
|
||||
需要注意的是:
|
||||
|
||||
- `src/services/analytics/firstPartyEventLoggingExporter.ts` 文件仍然存在
|
||||
- 里面仍保留 `/api/event_logging/batch` 的完整实现
|
||||
|
||||
但由于 logger 初始化入口已经空了,这个 exporter 在默认路径上已经不会被接上。
|
||||
|
||||
因此 F3 的移除方式属于:
|
||||
|
||||
**保留 exporter 源码,但把“上游 logger/provider 初始化”整体切断。**
|
||||
|
||||
### 4. Analytics sink 初始化被清空,启动调用点保留
|
||||
|
||||
`/Users/yovinchen/Downloads/free-code-main/src/services/analytics/sink.ts:1-10` 中:
|
||||
|
||||
- `initializeAnalyticsGates()` 空实现
|
||||
- `initializeAnalyticsSink()` 空实现
|
||||
|
||||
但启动链路并没有删调用点:
|
||||
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/main.tsx:83-86,416-417` 仍然 import 并调用 `initializeAnalyticsGates()`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/setup.ts:371` 仍然调用 `initSinks()`
|
||||
|
||||
这说明作者的思路不是“到处改业务调用点”,而是:
|
||||
|
||||
**保留启动顺序与依赖图,统一在 sink 层面把行为变空。**
|
||||
|
||||
### 5. OTel 初始化被显式短路
|
||||
|
||||
`/Users/yovinchen/Downloads/free-code-main/src/entrypoints/init.ts:207-212` 直接把:
|
||||
|
||||
- `initializeTelemetryAfterTrust()`
|
||||
|
||||
改成了立即 `return`。
|
||||
|
||||
同时:
|
||||
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/utils/telemetry/instrumentation.ts:1-24`
|
||||
- `bootstrapTelemetry()` 空实现
|
||||
- `isTelemetryEnabled()` 恒为 `false`
|
||||
- `initializeTelemetry()` 返回 `null`
|
||||
- `flushTelemetry()` 空实现
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/utils/telemetry/events.ts:1-12`
|
||||
- `logOTelEvent()` 空实现
|
||||
- 用户 prompt 内容默认只会被 `redactIfDisabled()` 处理成 `<REDACTED>`
|
||||
|
||||
而调用点仍保留:
|
||||
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/main.tsx:2595-2597` 仍会调用 `initializeTelemetryAfterTrust()`
|
||||
- 多个业务模块仍会调用 `logOTelEvent(...)`
|
||||
|
||||
所以 F9 的移除方式也是:
|
||||
|
||||
**不删调用点,只把 telemetry bootstrap 和 event emit 统一改成 no-op。**
|
||||
|
||||
### 6. GrowthBook 不是“彻底删文件”,而是被前置条件短路
|
||||
|
||||
`/Users/yovinchen/Downloads/free-code-main/src/services/analytics/growthbook.ts:420-425`:
|
||||
|
||||
- `isGrowthBookEnabled()` 直接返回 `is1PEventLoggingEnabled()`
|
||||
|
||||
而 1P 在 `firstPartyEventLogger.ts:26-27` 中已经被硬编码为 `false`。
|
||||
|
||||
继续往下看:
|
||||
|
||||
- `growthbook.ts:490-493` 在 client 创建前就会因为 `!isGrowthBookEnabled()` 返回 `null`
|
||||
- `growthbook.ts:685-691`、`748-750` 会在取 feature value 时直接返回默认值
|
||||
|
||||
这意味着从当前源码推断:
|
||||
|
||||
- 默认路径不会创建 GrowthBook client
|
||||
- 默认路径不会执行 remote eval 网络请求
|
||||
- 默认路径不会把 `deviceID/sessionId/platform/org/email` 发出去
|
||||
|
||||
所以 F4 应该判定为:
|
||||
|
||||
**远程评估外发链路实际上已失活。**
|
||||
|
||||
这里有一个值得单独记录的点:
|
||||
|
||||
- `README.md:58-64` 写的是 “GrowthBook feature flag evaluation still works locally but does not report back”
|
||||
- 但从当前代码看,更准确的说法应该是:
|
||||
- **默认的远程评估链路已经被短路**
|
||||
- 留下的是兼容性结构和本地 override/cache 框架
|
||||
|
||||
这条判断是**基于源码的推断**。
|
||||
|
||||
### 7. 本地采集代码仍有残留,但最终不会出网
|
||||
|
||||
这部分很关键,容易误判。
|
||||
|
||||
`free-code-main` 不是把所有采集逻辑都删掉了。典型例子:
|
||||
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/services/analytics/metadata.ts:574-740`
|
||||
- 仍会构造 `platform`、`arch`、`nodeVersion`、`terminal`、Linux distro、`process.memoryUsage()`、`process.cpuUsage()`、repo remote hash 等元数据
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/utils/api.ts:479-562`
|
||||
- 仍会收集 `gitStatusSize`、`claudeMdSize`、项目文件数、MCP tool 数量
|
||||
- 最后仍调用 `logEvent('tengu_context_size', ...)`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/main.tsx:2521-2522`
|
||||
- 启动时仍会执行 `logContextMetrics(...)`
|
||||
|
||||
但由于 `src/services/analytics/index.ts:28-38` 中 `logEvent()` 已经是空实现,这些数据虽然可能仍在本地被计算,但不会从该链路继续发出。
|
||||
|
||||
所以更准确的评价是:
|
||||
|
||||
**移除的是 egress,不是所有 collection 语句。**
|
||||
|
||||
## 未移除部分: 逐项核对
|
||||
|
||||
### F1. 默认模型请求上下文外发未移除
|
||||
|
||||
这部分在 `free-code-main` 里仍然存在,而且关键文件与基线高度一致。
|
||||
|
||||
直接证据:
|
||||
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/constants/prompts.ts:606-648`
|
||||
- `computeEnvInfo()` 仍拼接:
|
||||
- `Working directory`
|
||||
- `Is directory a git repo`
|
||||
- `Platform`
|
||||
- `Shell`
|
||||
- `OS Version`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/constants/prompts.ts:651-709`
|
||||
- `computeSimpleEnvInfo()` 仍拼接:
|
||||
- `Primary working directory`
|
||||
- `Platform`
|
||||
- `Shell`
|
||||
- `OS Version`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/context.ts:36-109`
|
||||
- `getGitStatus()` 仍读取:
|
||||
- 当前分支
|
||||
- 默认分支
|
||||
- `git status --short`
|
||||
- 最近 5 条提交
|
||||
- `git config user.name`
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/context.ts:116-149`
|
||||
- `getSystemContext()` 仍把 `gitStatus` 放入上下文
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/context.ts:155-187`
|
||||
- `getUserContext()` 仍把 `CLAUDE.md` 内容和日期放入上下文
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/utils/api.ts:437-474`
|
||||
- `appendSystemContext()` / `prependUserContext()` 仍会把这些内容拼进消息
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/query.ts:449-451,659-661`
|
||||
- 查询时仍将这些上下文交给模型调用
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/services/api/claude.ts:1822-1832`
|
||||
- 最终仍通过 `anthropic.beta.messages.create(...)` 发送
|
||||
|
||||
补充比对:
|
||||
|
||||
- `src/constants/prompts.ts`
|
||||
- `src/context.ts`
|
||||
- `src/utils/api.ts`
|
||||
- `src/query.ts`
|
||||
|
||||
与基线仓库对应文件比对时,未看到针对这条链路的“移除性改造”。
|
||||
|
||||
因此 F1 在 `free-code-main` 中**没有被移除**。
|
||||
|
||||
### F5. Feedback 上传未移除
|
||||
|
||||
`/Users/yovinchen/Downloads/free-code-main/src/components/Feedback.tsx:523-550` 仍会在用户触发时:
|
||||
|
||||
- 刷新 OAuth
|
||||
- 取 auth headers
|
||||
- POST 到 `https://api.anthropic.com/api/claude_cli_feedback`
|
||||
|
||||
这个文件与基线对应文件比对无差异。
|
||||
|
||||
因此 F5 **未移除**。
|
||||
|
||||
### F6. Transcript Share 上传未移除
|
||||
|
||||
`/Users/yovinchen/Downloads/free-code-main/src/components/FeedbackSurvey/submitTranscriptShare.ts:37-94` 仍会收集:
|
||||
|
||||
- `platform`
|
||||
- `transcript`
|
||||
- `subagentTranscripts`
|
||||
- `rawTranscriptJsonl`
|
||||
|
||||
并 POST 到:
|
||||
|
||||
- `https://api.anthropic.com/api/claude_code_shared_session_transcripts`
|
||||
|
||||
这个文件与基线对应文件比对无差异。
|
||||
|
||||
因此 F6 **未移除**。
|
||||
|
||||
### F7. Remote Control / Bridge 未移除
|
||||
|
||||
`/Users/yovinchen/Downloads/free-code-main/src/bridge/bridgeMain.ts:2340-2435` 仍会采集:
|
||||
|
||||
- `branch`
|
||||
- `gitRepoUrl`
|
||||
- `machineName = hostname()`
|
||||
- `dir`
|
||||
|
||||
随后:
|
||||
|
||||
- `/Users/yovinchen/Downloads/free-code-main/src/bridge/bridgeApi.ts:142-178`
|
||||
|
||||
仍会把这些字段 POST 到:
|
||||
|
||||
- `/v1/environments/bridge`
|
||||
|
||||
上传体中明确包含:
|
||||
|
||||
- `machine_name`
|
||||
- `directory`
|
||||
- `branch`
|
||||
- `git_repo_url`
|
||||
|
||||
`src/bridge/bridgeApi.ts` 与基线对应文件比对无差异。
|
||||
|
||||
因此 F7 **未移除**。
|
||||
|
||||
### F8. Trusted Device 未移除
|
||||
|
||||
`/Users/yovinchen/Downloads/free-code-main/src/bridge/trustedDevice.ts:142-159` 仍会向:
|
||||
|
||||
- `${baseUrl}/api/auth/trusted_devices`
|
||||
|
||||
提交:
|
||||
|
||||
- `display_name: Claude Code on ${hostname()} · ${process.platform}`
|
||||
|
||||
这条链路虽然会受 `isEssentialTrafficOnly()` 影响,但代码并未被删除。
|
||||
|
||||
`src/bridge/trustedDevice.ts` 与基线对应文件比对无差异。
|
||||
|
||||
因此 F8 **未移除**。
|
||||
|
||||
### F10. `/insights` ant-only 上传未移除
|
||||
|
||||
`/Users/yovinchen/Downloads/free-code-main/src/commands/insights.ts:3075-3098` 仍保留:
|
||||
|
||||
- `process.env.USER_TYPE === 'ant'` 分支
|
||||
- 使用 `ff cp` 上传 HTML report 到 S3
|
||||
|
||||
这条链路不是默认外部版路径,但它在源码里仍然存在。
|
||||
|
||||
因此 F10 **未移除**。
|
||||
|
||||
## 与基线仓库的“未改动区域”总结
|
||||
|
||||
以下文件经对比未看到差异,说明 `free-code-main` 没有在这些链路上做“移除”改造:
|
||||
|
||||
- `src/constants/prompts.ts`
|
||||
- `src/context.ts`
|
||||
- `src/utils/api.ts`
|
||||
- `src/query.ts`
|
||||
- `src/components/Feedback.tsx`
|
||||
- `src/components/FeedbackSurvey/submitTranscriptShare.ts`
|
||||
- `src/bridge/bridgeApi.ts`
|
||||
- `src/bridge/trustedDevice.ts`
|
||||
- `src/commands/insights.ts`
|
||||
|
||||
这也是为什么报告结论是“部分移除”,而不是“整体移除”。
|
||||
|
||||
## 最终结论
|
||||
|
||||
如果把 `docs/local-system-info-egress-audit.md` 中的链路拆开看,`free-code-main` 的状态可以总结为:
|
||||
|
||||
1. **遥测类默认外发**
|
||||
- Datadog: 已移除
|
||||
- 1P event logging: 已移除
|
||||
- OTel: 已移除
|
||||
- GrowthBook remote eval: 默认已失活
|
||||
|
||||
2. **产品主链路或用户触发上传**
|
||||
- 模型 system/user context 外发: 未移除
|
||||
- Feedback: 未移除
|
||||
- Transcript Share: 未移除
|
||||
- Remote Control / Bridge: 未移除
|
||||
- Trusted Device: 未移除
|
||||
- `/insights` ant-only 上传: 未移除
|
||||
|
||||
因此,`free-code-main` 的真实定位更适合表述为:
|
||||
|
||||
**它移除了“遥测/观测型外发实现”,但没有移除“产品功能本身依赖的本地信息外发”。**
|
||||
|
||||
如果后续目标是做“彻底版本地信息外发移除”,还需要继续处理至少这些区域:
|
||||
|
||||
- `src/constants/prompts.ts`
|
||||
- `src/context.ts`
|
||||
- `src/utils/api.ts`
|
||||
- `src/components/Feedback.tsx`
|
||||
- `src/components/FeedbackSurvey/submitTranscriptShare.ts`
|
||||
- `src/bridge/*`
|
||||
- `src/commands/insights.ts`
|
||||
|
||||
430
docs/local-system-info-egress-audit.md
Normal file
430
docs/local-system-info-egress-audit.md
Normal file
@@ -0,0 +1,430 @@
|
||||
# 本地系统信息外发审计报告
|
||||
|
||||
- 审计时间: 2026-04-03
|
||||
- 审计对象: `/Users/yovinchen/project/claude`
|
||||
- 审计方式: 静态代码扫描 + 关键数据流人工追踪
|
||||
- 说明: 本报告基于源码静态分析得出,未做运行时抓包或服务端行为验证。
|
||||
|
||||
## 结论摘要
|
||||
|
||||
结论是: **存在“采集本地/环境信息并向外发送”的代码路径,而且其中一部分是默认链路。**
|
||||
|
||||
我把风险按类型拆开后,结论如下:
|
||||
|
||||
1. **默认会发生的外发**
|
||||
- 模型请求链路会把本地环境信息放进 system prompt / meta message 后发送给 Claude API。
|
||||
- analytics/telemetry 链路会把平台、架构、Node 版本、终端、运行时、Linux 发行版、进程内存/CPU 指标等发送到 Datadog 和 Anthropic 1P 事件日志接口。
|
||||
|
||||
2. **用户显式触发后才会发生的外发**
|
||||
- Feedback / Transcript Share 会上传 transcript、平台信息、错误信息、最近 API 请求等。
|
||||
- Remote Control / Bridge 会上传 `hostname`、本地目录、git 分支、git remote URL。
|
||||
- Trusted Device 注册会上传 `hostname + platform` 组成的设备显示名。
|
||||
- 可选 OpenTelemetry 在启用后会把 `user.id`、`session.id`、`organization.id`、`user.email`、`terminal.type` 等发往配置的 OTLP endpoint。
|
||||
|
||||
3. **目前未发现的自动采集项**
|
||||
- 未发现自动读取并外发 MAC 地址、网卡列表、IP 地址、`/etc/machine-id`、BIOS/主板序列号、硬件 UUID、`dmidecode`、`ioreg`、`system_profiler` 之类更敏感的硬件唯一标识。
|
||||
|
||||
4. **额外重要发现**
|
||||
- 这套代码不仅会外发“系统信息”,还会外发一部分“项目上下文”。
|
||||
- 典型例子包括: 当前工作目录、是否 git 仓库、当前分支、main 分支、git user.name、`git status --short`、最近 5 条提交、`CLAUDE.md` 内容、当前日期。
|
||||
|
||||
## 审计方法
|
||||
|
||||
本次审计主要做了两件事:
|
||||
|
||||
1. 搜索本地系统/环境信息采集点。
|
||||
- 关键词包括 `os.*`、`process.platform`、`process.arch`、`process.env`、`hostname()`、`userInfo()`、`/etc/os-release`、`uname`、`git status`、`getCwd()` 等。
|
||||
|
||||
2. 搜索外发点并做数据流关联。
|
||||
- 关键词包括 `axios.post`、`fetch`、`WebSocket`、`anthropic.beta.messages.create`、`Datadog`、`event_logging`、`trusted_devices`、`/v1/environments/bridge`、`/v1/sessions` 等。
|
||||
|
||||
## 发现清单
|
||||
|
||||
| 编号 | 链路 | 是否默认 | 外发内容 | 目标位置 | 结论 |
|
||||
| --- | --- | --- | --- | --- | --- |
|
||||
| F1 | 模型请求 system prompt / user context | 是 | cwd、平台、shell、OS 版本、git 状态、git 用户、最近提交、`CLAUDE.md`、日期 | Claude API | 已确认 |
|
||||
| F2 | Datadog analytics | 是 | 平台、架构、Node 版本、终端、运行时、Linux 发行版/内核、进程 CPU/内存、repo remote hash | Datadog | 已确认 |
|
||||
| F3 | Anthropic 1P event logging | 是 | 与 F2 类似,外加 user/account/org 元数据与 process blob | `https://api.anthropic.com/api/event_logging/batch` | 已确认 |
|
||||
| F4 | GrowthBook remote eval | 大概率是 | deviceId、sessionId、platform、org/account、email、版本、GitHub Actions 元数据 | `https://api.anthropic.com/` 上的 GrowthBook 接口 | **推断成立概率高** |
|
||||
| F5 | Feedback | 否,用户触发 | platform、terminal、是否 git、transcript、raw transcript、errors、lastApiRequest | `https://api.anthropic.com/api/claude_cli_feedback` | 已确认 |
|
||||
| F6 | Transcript Share | 否,用户触发 | platform、transcript、subagent transcripts、raw transcript JSONL | `https://api.anthropic.com/api/claude_code_shared_session_transcripts` | 已确认 |
|
||||
| F7 | Remote Control / Bridge | 否,功能触发 | hostname、directory、branch、git_repo_url、session context | `/v1/environments/bridge`、`/v1/sessions` | 已确认 |
|
||||
| F8 | Trusted Device | 否,登录/设备注册 | `Claude Code on <hostname> · <platform>` | `/api/auth/trusted_devices` | 已确认 |
|
||||
| F9 | OpenTelemetry | 否,需启用 | user/session/account/email/terminal + OTEL 检测到的 OS/host arch | 配置的 OTLP endpoint | 已确认 |
|
||||
| F10 | `/insights` 内部上传 | 非外部版默认不可用 | username、报告文件 | S3 | 已确认,且 `ant-only` |
|
||||
|
||||
## 详细分析
|
||||
|
||||
### F1. 默认模型请求链路会外发本地环境和项目上下文
|
||||
|
||||
证据链如下:
|
||||
|
||||
1. `src/constants/prompts.ts:606-648` 的 `computeEnvInfo()` 会构造环境块,包含:
|
||||
- `Working directory`
|
||||
- `Is directory a git repo`
|
||||
- `Platform`
|
||||
- `Shell`
|
||||
- `OS Version`
|
||||
|
||||
2. `src/constants/prompts.ts:651-709` 的 `computeSimpleEnvInfo()` 也会构造同类信息,且包含 `Primary working directory`。
|
||||
|
||||
3. `src/context.ts:36-103` 的 `getGitStatus()` 会进一步读取:
|
||||
- 当前分支
|
||||
- main 分支
|
||||
- `git config user.name`
|
||||
- `git status --short`
|
||||
- 最近 5 条提交
|
||||
|
||||
4. `src/context.ts:116-149` 的 `getSystemContext()` 会把 `gitStatus` 注入系统上下文。
|
||||
|
||||
5. `src/context.ts:155-187` 的 `getUserContext()` 会把 `CLAUDE.md` 内容和当前日期放入用户上下文。
|
||||
|
||||
6. `src/utils/api.ts:437-446` 的 `appendSystemContext()` 会把 `systemContext` 拼到 system prompt。
|
||||
|
||||
7. `src/utils/api.ts:449-470` 的 `prependUserContext()` 会把 `userContext` 作为 `<system-reminder>` 前置到消息里。
|
||||
|
||||
8. `src/query.ts:449-450`、`src/query.ts:659-661` 把这两部分上下文真正交给模型调用。
|
||||
|
||||
9. `src/services/api/claude.ts:3213-3236` 会把 `systemPrompt` 序列化为 API 文本块,`src/services/api/claude.ts:1822-1832` 通过 `anthropic.beta.messages.create(...)` 发出请求。
|
||||
|
||||
结论:
|
||||
|
||||
- **这是默认链路**,不是用户额外点击“上传”后才发生。
|
||||
- 外发的不只是主机 OS 信息,还包括当前项目目录和 git 元信息。
|
||||
- 从数据敏感性看,`cwd`、`git user.name`、最近提交标题、`CLAUDE.md` 都可能包含组织或项目标识。
|
||||
|
||||
### F2. 默认 Datadog analytics 会外发环境与进程指标
|
||||
|
||||
证据链如下:
|
||||
|
||||
1. `src/main.tsx:416-430` 会在启动早期初始化用户/上下文/analytics gate。
|
||||
|
||||
2. `src/main.tsx:943-946` 会初始化 sinks,从而启用 analytics sink。
|
||||
|
||||
3. `src/services/analytics/metadata.ts:417-467` 定义了要采集的 `EnvContext` 和 `ProcessMetrics` 字段。
|
||||
|
||||
4. `src/services/analytics/metadata.ts:574-637` 实际构造环境信息,包含:
|
||||
- `platform` / `platformRaw`
|
||||
- `arch`
|
||||
- `nodeVersion`
|
||||
- `terminal`
|
||||
- `packageManagers`
|
||||
- `runtimes`
|
||||
- `isCi`
|
||||
- `isClaudeCodeRemote`
|
||||
- `remoteEnvironmentType`
|
||||
- `containerId`
|
||||
- `github actions` 相关字段
|
||||
- `wslVersion`
|
||||
- `linuxDistroId`
|
||||
- `linuxDistroVersion`
|
||||
- `linuxKernel`
|
||||
- `vcs`
|
||||
|
||||
5. `src/services/analytics/metadata.ts:648-678` 采集进程指标,包含:
|
||||
- `uptime`
|
||||
- `rss`
|
||||
- `heapTotal`
|
||||
- `heapUsed`
|
||||
- `external`
|
||||
- `arrayBuffers`
|
||||
- `constrainedMemory`
|
||||
- `cpuUsage`
|
||||
- `cpuPercent`
|
||||
|
||||
6. `src/services/analytics/metadata.ts:701-739` 会把这些信息合并进每个 analytics event,并附加 `rh`。
|
||||
|
||||
7. `src/utils/git.ts:329-337` 表明 `rh` 是 **git remote URL 的 SHA256 前 16 位哈希**,不是明文 remote URL。
|
||||
|
||||
8. `src/services/analytics/datadog.ts:12-13` 指向 Datadog endpoint,`src/services/analytics/datadog.ts:108-115` 通过 `axios.post(...)` 发送。
|
||||
|
||||
结论:
|
||||
|
||||
- **Datadog 默认是活跃链路**,除非被隐私设置或 provider 条件关闭。
|
||||
- 这条链路没有看到把 `cwd`、源码正文、文件路径直接送去 Datadog;它主要发送环境维度与运行指标。
|
||||
- repo remote 不是明文发出,而是哈希值。
|
||||
|
||||
### F3. 默认 Anthropic 1P event logging 也会外发环境与身份元数据
|
||||
|
||||
证据链如下:
|
||||
|
||||
1. `src/services/analytics/firstPartyEventLogger.ts:141-177` 表明 1P event logging 默认启用时,会把 `core_metadata`、`user_metadata`、`event_metadata` 一起记录。
|
||||
|
||||
2. `src/services/analytics/firstPartyEventLoggingExporter.ts:114-120` 指定 1P 上报 endpoint 为:
|
||||
- `https://api.anthropic.com/api/event_logging/batch`
|
||||
- 或 staging 对应路径
|
||||
|
||||
3. `src/services/analytics/firstPartyEventLoggingExporter.ts:587-609` 表明最终通过 `axios.post(this.endpoint, payload, ...)` 发送。
|
||||
|
||||
4. `src/services/analytics/metadata.ts:796-970` 表明在 1P 格式化阶段,以下字段会进入上报内容:
|
||||
- `platform/platform_raw`
|
||||
- `arch`
|
||||
- `node_version`
|
||||
- `terminal`
|
||||
- `package_managers`
|
||||
- `runtimes`
|
||||
- `is_ci`
|
||||
- `is_github_action`
|
||||
- `linux_distro_id`
|
||||
- `linux_distro_version`
|
||||
- `linux_kernel`
|
||||
- `vcs`
|
||||
- `process` base64 blob
|
||||
- `account_uuid`
|
||||
- `organization_uuid`
|
||||
- `session_id`
|
||||
- `client_type`
|
||||
|
||||
结论:
|
||||
|
||||
- **这也是默认链路**。
|
||||
- 与 Datadog 相比,1P event logging 能接收更完整的内部结构化元数据。
|
||||
|
||||
### F4. GrowthBook 很可能会把本地/身份属性发到远端做特性分流
|
||||
|
||||
证据链如下:
|
||||
|
||||
1. `src/services/analytics/growthbook.ts:454-484` 构造了 `attributes`,包含:
|
||||
- `id` / `deviceID`
|
||||
- `sessionId`
|
||||
- `platform`
|
||||
- `apiBaseUrlHost`
|
||||
- `organizationUUID`
|
||||
- `accountUUID`
|
||||
- `userType`
|
||||
- `subscriptionType`
|
||||
- `rateLimitTier`
|
||||
- `firstTokenTime`
|
||||
- `email`
|
||||
- `appVersion`
|
||||
- `githubActionsMetadata`
|
||||
|
||||
2. `src/services/analytics/growthbook.ts:526-536` 使用:
|
||||
- `apiHost`
|
||||
- `attributes`
|
||||
- `remoteEval: true`
|
||||
创建 `GrowthBook` client。
|
||||
|
||||
判断:
|
||||
|
||||
- 由于真正的 HTTP 逻辑在第三方库内部,不在本仓库源码里直接展开,所以这里我不能把“已确认发送”说死。
|
||||
- 但从 `attributes + apiHost + remoteEval: true` 的组合看,**高概率**存在把这些属性发送到 GrowthBook 后端做远程特性评估的行为。
|
||||
- 这一条应标记为 **推断**,但可信度较高。
|
||||
|
||||
### F5. Feedback 会在用户触发时上传平台、转录、错误和最近请求
|
||||
|
||||
证据链如下:
|
||||
|
||||
1. `src/components/Feedback.tsx:54-68` 的 `FeedbackData` 定义包含:
|
||||
- `platform`
|
||||
- `gitRepo`
|
||||
- `version`
|
||||
- `transcript`
|
||||
- `rawTranscriptJsonl`
|
||||
|
||||
2. `src/components/Feedback.tsx:206-224` 实际组装 `reportData` 时还加入:
|
||||
- `terminal`
|
||||
- `errors`
|
||||
- `lastApiRequest`
|
||||
- `subagentTranscripts`
|
||||
|
||||
3. `src/components/Feedback.tsx:543-550` 发送到 `https://api.anthropic.com/api/claude_cli_feedback`。
|
||||
|
||||
结论:
|
||||
|
||||
- 这是 **用户显式触发** 的上传,不属于静默默认遥测。
|
||||
- 但数据面比普通 analytics 大得多,包含对话转录和最近 API 请求内容。
|
||||
|
||||
### F6. Transcript Share 会在用户触发时上传 transcript 和平台
|
||||
|
||||
证据链如下:
|
||||
|
||||
1. `src/components/FeedbackSurvey/submitTranscriptShare.ts:37-70` 采集:
|
||||
- `platform`
|
||||
- `transcript`
|
||||
- `subagentTranscripts`
|
||||
- `rawTranscriptJsonl`
|
||||
|
||||
2. `src/components/FeedbackSurvey/submitTranscriptShare.ts:87-94` 发送到 `https://api.anthropic.com/api/claude_code_shared_session_transcripts`。
|
||||
|
||||
结论:
|
||||
|
||||
- 这是 **显式分享链路**。
|
||||
- 风险面和 Feedback 类似,重点在 transcript 内容,而不是系统信息本身。
|
||||
|
||||
### F7. Remote Control / Bridge 会上传 hostname、目录、分支、git remote URL
|
||||
|
||||
证据链如下:
|
||||
|
||||
1. `src/bridge/bridgeMain.ts:2340-2452` 与 `src/bridge/bridgeMain.ts:2874-2909` 都会在 bridge 启动时读取:
|
||||
- `branch`
|
||||
- `gitRepoUrl`
|
||||
- `machineName = hostname()`
|
||||
- `dir`
|
||||
|
||||
2. `src/bridge/initReplBridge.ts:463-505` 也会把 `hostname()`、branch、gitRepoUrl 传入 bridge core。
|
||||
|
||||
3. `src/bridge/bridgeApi.ts:142-183` 注册环境时 POST 到 `/v1/environments/bridge`,字段包括:
|
||||
- `machine_name`
|
||||
- `directory`
|
||||
- `branch`
|
||||
- `git_repo_url`
|
||||
- `max_sessions`
|
||||
- `worker_type`
|
||||
|
||||
4. `src/bridge/createSession.ts:77-136` 创建 session 时还会把 git 仓库上下文放进 `session_context`,包括:
|
||||
- 规范化后的 repo URL
|
||||
- revision / branch
|
||||
- owner/repo
|
||||
- model
|
||||
|
||||
结论:
|
||||
|
||||
- 这是 **功能型外发**,不是无条件默认发生。
|
||||
- 但一旦启用 Remote Control,它会把本地主机名和项目标识信息发送出去。
|
||||
|
||||
### F8. Trusted Device 会上传 hostname + platform
|
||||
|
||||
证据链如下:
|
||||
|
||||
1. `src/bridge/trustedDevice.ts:145-159` 会向 `${baseUrl}/api/auth/trusted_devices` 发送:
|
||||
- `display_name: "Claude Code on <hostname> · <platform>"`
|
||||
|
||||
结论:
|
||||
|
||||
- 这是 **登录/设备注册链路**,不是普通对话请求。
|
||||
- 这里出现了明确的 `hostname()` 外发。
|
||||
|
||||
### F9. OpenTelemetry 是可选链路,但一旦启用也会对外发送本地属性
|
||||
|
||||
证据链如下:
|
||||
|
||||
1. `src/utils/telemetry/instrumentation.ts:324-325` 表明只有 `CLAUDE_CODE_ENABLE_TELEMETRY=1` 时才启用。
|
||||
|
||||
2. `src/utils/telemetry/instrumentation.ts:458-510` 会组装 OTEL resource,包含:
|
||||
- service/version
|
||||
- WSL version
|
||||
- OS detector 结果
|
||||
- host arch detector 结果
|
||||
- env detector 结果
|
||||
|
||||
3. `src/utils/telemetry/instrumentation.ts:575-607` 会初始化 log exporter 并对外发送。
|
||||
|
||||
4. `src/utils/telemetryAttributes.ts:29-68` 还会加入:
|
||||
- `user.id`
|
||||
- `session.id`
|
||||
- `app.version`
|
||||
- `organization.id`
|
||||
- `user.email`
|
||||
- `user.account_uuid`
|
||||
- `user.account_id`
|
||||
- `terminal.type`
|
||||
|
||||
结论:
|
||||
|
||||
- 这是 **可选链路**,默认不是强制开启。
|
||||
- 但如果启用并配置了 OTLP endpoint,确实会把本地身份/终端/会话属性发到外部。
|
||||
|
||||
### F10. `/insights` 还存在内部版上传链路
|
||||
|
||||
证据链如下:
|
||||
|
||||
1. `src/commands/insights.ts:2721-2736` 报告元数据包含:
|
||||
- `username`
|
||||
- 生成时间
|
||||
- 版本
|
||||
- 远程 homespace 信息
|
||||
|
||||
2. `src/commands/insights.ts:3075-3098` 会在 `process.env.USER_TYPE === 'ant'` 时尝试上传 HTML 报告到 S3。
|
||||
|
||||
结论:
|
||||
|
||||
- 这是 **内部版 ant-only** 逻辑,不应算外部公开版本默认行为。
|
||||
- 但从源码角度,确实存在上传用户名和报告的链路。
|
||||
|
||||
## 未发现项
|
||||
|
||||
本次静态审计中,**没有发现**以下类型的自动采集/外发实现:
|
||||
|
||||
- `os.networkInterfaces()`
|
||||
- `os.userInfo()` 用于遥测/外发
|
||||
- `/etc/machine-id`
|
||||
- `node-machine-id`
|
||||
- `dmidecode`
|
||||
- `ioreg`
|
||||
- `system_profiler`
|
||||
- `wmic bios`
|
||||
- `getmac`
|
||||
- `ifconfig` / `ip addr` / `ipconfig /all` 被程序主动执行用于遥测
|
||||
- MAC 地址、IP 地址、硬件序列号、主板 UUID、BIOS UUID 等硬件唯一标识
|
||||
|
||||
补充说明:
|
||||
|
||||
- 搜到的 `ip addr`、`ipconfig`、`hostname` 主要出现在 Bash/PowerShell 工具的只读命令校验规则里,不是程序自身自动采集再上报。
|
||||
- `hostname()` 的真实外发点主要集中在 Remote Control / Trusted Device。
|
||||
|
||||
## 开关与缓解建议
|
||||
|
||||
### 1. 如果你的目标是关闭默认 analytics/telemetry
|
||||
|
||||
源码里明确支持以下限制:
|
||||
|
||||
- `src/utils/privacyLevel.ts:1-55`
|
||||
- `src/services/analytics/config.ts:11-26`
|
||||
|
||||
建议:
|
||||
|
||||
- 设置 `DISABLE_TELEMETRY=1`
|
||||
- 会进入 `no-telemetry`
|
||||
- Datadog / 1P analytics 会被关闭
|
||||
- 设置 `CLAUDE_CODE_DISABLE_NONESSENTIAL_TRAFFIC=1`
|
||||
- 会进入 `essential-traffic`
|
||||
- 非必要网络流量会被进一步压缩
|
||||
|
||||
### 2. 如果你的目标是避免把本地目录和 git 信息送入模型
|
||||
|
||||
需要重点关注默认 prompt 链路,因为这部分不是传统“遥测”,而是模型上下文本身。
|
||||
|
||||
缓解思路:
|
||||
|
||||
- 在不敏感目录中运行,而不是直接在真实业务仓库根目录运行
|
||||
- 避免在 `git user.name`、commit message、`CLAUDE.md` 中放入敏感标识
|
||||
- 禁用或清理 `CLAUDE.md`
|
||||
- 不启用 Remote Control / Bridge / Transcript Share / Feedback
|
||||
|
||||
### 3. 如果你的目标是避免 hostname 外发
|
||||
|
||||
避免使用:
|
||||
|
||||
- Remote Control / Bridge
|
||||
- Trusted Device 注册 / 某些登录设备绑定流程
|
||||
|
||||
## 最终判断
|
||||
|
||||
从“是否采集本地系统信息并向外发送”这个问题本身看,答案是:
|
||||
|
||||
**是,存在,并且不止一条。**
|
||||
|
||||
但需要区分严重程度:
|
||||
|
||||
- **默认自动发生** 的,主要是:
|
||||
- 模型请求中的环境/项目上下文
|
||||
- analytics 中的环境/进程元数据
|
||||
|
||||
- **需要用户显式动作或特定功能开启** 才发生的,主要是:
|
||||
- Feedback / Transcript Share
|
||||
- Remote Control / Bridge
|
||||
- Trusted Device
|
||||
- OpenTelemetry
|
||||
- ant-only `/insights`
|
||||
|
||||
- **未发现** 自动采集 MAC/IP/硬件序列号/机器唯一硬件 ID 的实现。
|
||||
|
||||
## 审计局限
|
||||
|
||||
- 本报告只基于本仓库源码,不包含第三方依赖内部实现的完全展开。
|
||||
- 因此 GrowthBook `remoteEval` 被标为“高概率推断”,不是 100% 抓包确认。
|
||||
- 如果你需要,我下一步可以继续补一版:
|
||||
- 运行时抓包建议
|
||||
- 外发域名清单
|
||||
- 按“默认开启 / 可关闭 / 必须用户触发”生成一张更适合合规审查的表
|
||||
@@ -23,16 +23,6 @@ type BridgeApiDeps = {
|
||||
* tokens don't refresh, so 401 goes straight to BridgeFatalError.
|
||||
*/
|
||||
onAuth401?: (staleAccessToken: string) => Promise<boolean>
|
||||
/**
|
||||
* Returns the trusted device token to send as X-Trusted-Device-Token on
|
||||
* bridge API calls. Bridge sessions have SecurityTier=ELEVATED on the
|
||||
* server (CCR v2); when the server's enforcement flag is on,
|
||||
* ConnectBridgeWorker requires a trusted device at JWT-issuance.
|
||||
* Optional — when absent or returning undefined, the header is omitted
|
||||
* and the server falls through to its flag-off/no-op path. The CLI-side
|
||||
* gate is tengu_sessions_elevated_auth_enforcement (see trustedDevice.ts).
|
||||
*/
|
||||
getTrustedDeviceToken?: () => string | undefined
|
||||
}
|
||||
|
||||
const BETA_HEADER = 'environments-2025-11-01'
|
||||
@@ -65,6 +55,36 @@ export class BridgeFatalError extends Error {
|
||||
}
|
||||
}
|
||||
|
||||
function summarizeBridgeApiPayloadForDebug(data: unknown): string {
|
||||
if (data === null) return 'null'
|
||||
if (data === undefined) return 'undefined'
|
||||
if (Array.isArray(data)) {
|
||||
return debugBody({
|
||||
type: 'array',
|
||||
length: data.length,
|
||||
})
|
||||
}
|
||||
if (typeof data !== 'object') {
|
||||
return String(data)
|
||||
}
|
||||
const value = data as Record<string, unknown>
|
||||
const workData =
|
||||
value.data && typeof value.data === 'object'
|
||||
? (value.data as Record<string, unknown>)
|
||||
: undefined
|
||||
return debugBody({
|
||||
type: 'object',
|
||||
keys: Object.keys(value)
|
||||
.sort()
|
||||
.slice(0, 10),
|
||||
hasEnvironmentId: typeof value.environment_id === 'string',
|
||||
hasEnvironmentSecret: typeof value.environment_secret === 'string',
|
||||
hasWorkId: typeof value.id === 'string',
|
||||
workType: typeof workData?.type === 'string' ? workData.type : undefined,
|
||||
hasSessionId: typeof workData?.id === 'string',
|
||||
})
|
||||
}
|
||||
|
||||
export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
|
||||
function debug(msg: string): void {
|
||||
deps.onDebug?.(msg)
|
||||
@@ -74,18 +94,13 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
|
||||
const EMPTY_POLL_LOG_INTERVAL = 100
|
||||
|
||||
function getHeaders(accessToken: string): Record<string, string> {
|
||||
const headers: Record<string, string> = {
|
||||
return {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
'anthropic-version': '2023-06-01',
|
||||
'anthropic-beta': BETA_HEADER,
|
||||
'x-environment-runner-version': deps.runnerVersion,
|
||||
}
|
||||
const deviceToken = deps.getTrustedDeviceToken?.()
|
||||
if (deviceToken) {
|
||||
headers['X-Trusted-Device-Token'] = deviceToken
|
||||
}
|
||||
return headers
|
||||
}
|
||||
|
||||
function resolveAuth(): string {
|
||||
@@ -183,12 +198,14 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
|
||||
|
||||
handleErrorStatus(response.status, response.data, 'Registration')
|
||||
debug(
|
||||
`[bridge:api] POST /v1/environments/bridge -> ${response.status} environment_id=${response.data.environment_id}`,
|
||||
`[bridge:api] POST /v1/environments/bridge -> ${response.status}`,
|
||||
)
|
||||
debug(
|
||||
`[bridge:api] >>> ${debugBody({ max_sessions: config.maxSessions, metadata: { worker_type: config.workerType } })}`,
|
||||
)
|
||||
debug(`[bridge:api] <<< ${debugBody(response.data)}`)
|
||||
debug(
|
||||
`[bridge:api] <<< ${summarizeBridgeApiPayloadForDebug(response.data)}`,
|
||||
)
|
||||
return response.data
|
||||
},
|
||||
|
||||
@@ -236,9 +253,11 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
|
||||
}
|
||||
|
||||
debug(
|
||||
`[bridge:api] GET .../work/poll -> ${response.status} workId=${response.data.id} type=${response.data.data?.type}${response.data.data?.id ? ` sessionId=${response.data.data.id}` : ''}`,
|
||||
`[bridge:api] GET .../work/poll -> ${response.status} type=${response.data.data?.type ?? 'unknown'}`,
|
||||
)
|
||||
debug(
|
||||
`[bridge:api] <<< ${summarizeBridgeApiPayloadForDebug(response.data)}`,
|
||||
)
|
||||
debug(`[bridge:api] <<< ${debugBody(response.data)}`)
|
||||
return response.data
|
||||
},
|
||||
|
||||
@@ -442,7 +461,9 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
|
||||
`[bridge:api] POST /v1/sessions/${sessionId}/events -> ${response.status}`,
|
||||
)
|
||||
debug(`[bridge:api] >>> ${debugBody({ events: [event] })}`)
|
||||
debug(`[bridge:api] <<< ${debugBody(response.data)}`)
|
||||
debug(
|
||||
`[bridge:api] <<< ${summarizeBridgeApiPayloadForDebug(response.data)}`,
|
||||
)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,8 +3,6 @@ import { randomUUID } from 'crypto'
|
||||
import { tmpdir } from 'os'
|
||||
import { basename, join, resolve } from 'path'
|
||||
import { getRemoteSessionUrl } from '../constants/product.js'
|
||||
import { shutdownDatadog } from '../services/analytics/datadog.js'
|
||||
import { shutdown1PEventLogging } from '../services/analytics/firstPartyEventLogger.js'
|
||||
import { checkGate_CACHED_OR_BLOCKING } from '../services/analytics/growthbook.js'
|
||||
import {
|
||||
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
@@ -30,12 +28,11 @@ import {
|
||||
import { formatDuration } from './bridgeStatusUtil.js'
|
||||
import { createBridgeLogger } from './bridgeUI.js'
|
||||
import { createCapacityWake } from './capacityWake.js'
|
||||
import { describeAxiosError } from './debugUtils.js'
|
||||
import { describeAxiosError, summarizeBridgeErrorForDebug } from './debugUtils.js'
|
||||
import { createTokenRefreshScheduler } from './jwtUtils.js'
|
||||
import { getPollIntervalConfig } from './pollConfig.js'
|
||||
import { toCompatSessionId, toInfraSessionId } from './sessionIdCompat.js'
|
||||
import { createSessionSpawner, safeFilenameId } from './sessionRunner.js'
|
||||
import { getTrustedDeviceToken } from './trustedDevice.js'
|
||||
import {
|
||||
BRIDGE_LOGIN_ERROR,
|
||||
type BridgeApiClient,
|
||||
@@ -2042,16 +2039,15 @@ export async function bridgeMain(args: string[]): Promise<void> {
|
||||
)
|
||||
enableConfigs()
|
||||
|
||||
// Initialize analytics and error reporting sinks. The bridge bypasses the
|
||||
// setup() init flow, so we call initSinks() directly to attach sinks here.
|
||||
// Initialize shared sinks. The bridge bypasses setup(), so it attaches the
|
||||
// local error-log sink directly here.
|
||||
const { initSinks } = await import('../utils/sinks.js')
|
||||
initSinks()
|
||||
|
||||
// Gate-aware validation: --spawn / --capacity / --create-session-in-dir require
|
||||
// the multi-session gate. parseArgs has already validated flag combinations;
|
||||
// here we only check the gate since that requires an async GrowthBook call.
|
||||
// Runs after enableConfigs() (GrowthBook cache reads global config) and after
|
||||
// initSinks() so the denial event can be enqueued.
|
||||
// Runs after enableConfigs() because GrowthBook cache reads global config.
|
||||
const multiSessionEnabled = await isMultiSessionSpawnEnabled()
|
||||
if (usedMultiSessionFeature && !multiSessionEnabled) {
|
||||
await logEventAsync('tengu_bridge_multi_session_denied', {
|
||||
@@ -2059,14 +2055,6 @@ export async function bridgeMain(args: string[]): Promise<void> {
|
||||
used_capacity: parsedCapacity !== undefined,
|
||||
used_create_session_in_dir: parsedCreateSessionInDir !== undefined,
|
||||
})
|
||||
// logEventAsync only enqueues — process.exit() discards buffered events.
|
||||
// Flush explicitly, capped at 500ms to match gracefulShutdown.ts.
|
||||
// (sleep() doesn't unref its timer, but process.exit() follows immediately
|
||||
// so the ref'd timer can't delay shutdown.)
|
||||
await Promise.race([
|
||||
Promise.all([shutdown1PEventLogging(), shutdownDatadog()]),
|
||||
sleep(500, undefined, { unref: true }),
|
||||
]).catch(() => {})
|
||||
// biome-ignore lint/suspicious/noConsole: intentional error output
|
||||
console.error(
|
||||
'Error: Multi-session Remote Control is not enabled for your account yet.',
|
||||
@@ -2344,7 +2332,6 @@ export async function bridgeMain(args: string[]): Promise<void> {
|
||||
runnerVersion: MACRO.VERSION,
|
||||
onDebug: logForDebugging,
|
||||
onAuth401: handleOAuth401Error,
|
||||
getTrustedDeviceToken,
|
||||
})
|
||||
|
||||
// When resuming a session via --session-id, fetch it to learn its
|
||||
@@ -2877,7 +2864,6 @@ export async function runBridgeHeadless(
|
||||
runnerVersion: MACRO.VERSION,
|
||||
onDebug: log,
|
||||
onAuth401: opts.onAuth401,
|
||||
getTrustedDeviceToken,
|
||||
})
|
||||
|
||||
let environmentId: string
|
||||
|
||||
@@ -23,9 +23,9 @@ import type { Message } from '../types/message.js'
|
||||
import { normalizeControlMessageKeys } from '../utils/controlMessageCompat.js'
|
||||
import { logForDebugging } from '../utils/debug.js'
|
||||
import { stripDisplayTagsAllowEmpty } from '../utils/displayTags.js'
|
||||
import { errorMessage } from '../utils/errors.js'
|
||||
import type { PermissionMode } from '../utils/permissions/PermissionMode.js'
|
||||
import { jsonParse } from '../utils/slowOperations.js'
|
||||
import { summarizeBridgeErrorForDebug } from './debugUtils.js'
|
||||
import type { ReplBridgeTransport } from './replBridgeTransport.js'
|
||||
|
||||
// ─── Type guards ─────────────────────────────────────────────────────────────
|
||||
@@ -179,13 +179,13 @@ export function handleIngressMessage(
|
||||
// receiving any frames, etc).
|
||||
if (uuid && recentInboundUUIDs.has(uuid)) {
|
||||
logForDebugging(
|
||||
`[bridge:repl] Ignoring re-delivered inbound: type=${parsed.type} uuid=${uuid}`,
|
||||
`[bridge:repl] Ignoring re-delivered inbound: type=${parsed.type}`,
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
logForDebugging(
|
||||
`[bridge:repl] Ingress message type=${parsed.type}${uuid ? ` uuid=${uuid}` : ''}`,
|
||||
`[bridge:repl] Ingress message type=${parsed.type}`,
|
||||
)
|
||||
|
||||
if (parsed.type === 'user') {
|
||||
@@ -202,7 +202,9 @@ export function handleIngressMessage(
|
||||
}
|
||||
} catch (err) {
|
||||
logForDebugging(
|
||||
`[bridge:repl] Failed to parse ingress message: ${errorMessage(err)}`,
|
||||
`[bridge:repl] Failed to parse ingress message: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -277,7 +279,7 @@ export function handleServerControlRequest(
|
||||
const event = { ...response, session_id: sessionId }
|
||||
void transport.write(event)
|
||||
logForDebugging(
|
||||
`[bridge:repl] Rejected ${request.request.subtype} (outbound-only) request_id=${request.request_id}`,
|
||||
`[bridge:repl] Rejected ${request.request.subtype} (outbound-only)`,
|
||||
)
|
||||
return
|
||||
}
|
||||
@@ -386,7 +388,7 @@ export function handleServerControlRequest(
|
||||
const event = { ...response, session_id: sessionId }
|
||||
void transport.write(event)
|
||||
logForDebugging(
|
||||
`[bridge:repl] Sent control_response for ${request.request.subtype} request_id=${request.request_id} result=${response.response.subtype}`,
|
||||
`[bridge:repl] Sent control_response for ${request.request.subtype} result=${response.response.subtype}`,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
|
||||
import axios from 'axios'
|
||||
import { logForDebugging } from '../utils/debug.js'
|
||||
import { errorMessage } from '../utils/errors.js'
|
||||
import { toError } from '../utils/errors.js'
|
||||
import { jsonStringify } from '../utils/slowOperations.js'
|
||||
import { extractErrorDetail } from './debugUtils.js'
|
||||
|
||||
@@ -23,6 +23,62 @@ function oauthHeaders(accessToken: string): Record<string, string> {
|
||||
}
|
||||
}
|
||||
|
||||
function summarizeCodeSessionResponseForDebug(data: unknown): string {
|
||||
if (data === null) return 'null'
|
||||
if (data === undefined) return 'undefined'
|
||||
if (Array.isArray(data)) {
|
||||
return jsonStringify({
|
||||
payloadType: 'array',
|
||||
length: data.length,
|
||||
})
|
||||
}
|
||||
if (typeof data === 'object') {
|
||||
const value = data as Record<string, unknown>
|
||||
const session =
|
||||
value.session && typeof value.session === 'object'
|
||||
? (value.session as Record<string, unknown>)
|
||||
: undefined
|
||||
return jsonStringify({
|
||||
payloadType: 'object',
|
||||
keys: Object.keys(value)
|
||||
.sort()
|
||||
.slice(0, 10),
|
||||
hasSession: Boolean(session),
|
||||
hasSessionId: typeof session?.id === 'string',
|
||||
hasWorkerJwt: typeof value.worker_jwt === 'string',
|
||||
hasApiBaseUrl: typeof value.api_base_url === 'string',
|
||||
hasExpiresIn: typeof value.expires_in === 'number',
|
||||
hasWorkerEpoch:
|
||||
typeof value.worker_epoch === 'number' ||
|
||||
typeof value.worker_epoch === 'string',
|
||||
})
|
||||
}
|
||||
return typeof data
|
||||
}
|
||||
|
||||
function summarizeCodeSessionErrorForDebug(err: unknown): string {
|
||||
const error = toError(err)
|
||||
const summary: Record<string, unknown> = {
|
||||
errorType: error.constructor.name,
|
||||
errorName: error.name,
|
||||
hasMessage: error.message.length > 0,
|
||||
hasStack: Boolean(error.stack),
|
||||
}
|
||||
if (err && typeof err === 'object') {
|
||||
const errorObj = err as Record<string, unknown>
|
||||
if (typeof errorObj.code === 'string' || typeof errorObj.code === 'number') {
|
||||
summary.code = errorObj.code
|
||||
}
|
||||
if (errorObj.response && typeof errorObj.response === 'object') {
|
||||
const response = errorObj.response as Record<string, unknown>
|
||||
if (typeof response.status === 'number') {
|
||||
summary.httpStatus = response.status
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonStringify(summary)
|
||||
}
|
||||
|
||||
export async function createCodeSession(
|
||||
baseUrl: string,
|
||||
accessToken: string,
|
||||
@@ -47,7 +103,9 @@ export async function createCodeSession(
|
||||
)
|
||||
} catch (err: unknown) {
|
||||
logForDebugging(
|
||||
`[code-session] Session create request failed: ${errorMessage(err)}`,
|
||||
`[code-session] Session create request failed: ${summarizeCodeSessionErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
return null
|
||||
}
|
||||
@@ -72,7 +130,9 @@ export async function createCodeSession(
|
||||
!data.session.id.startsWith('cse_')
|
||||
) {
|
||||
logForDebugging(
|
||||
`[code-session] No session.id (cse_*) in response: ${jsonStringify(data).slice(0, 200)}`,
|
||||
`[code-session] No session.id (cse_*) in response: ${summarizeCodeSessionResponseForDebug(
|
||||
data,
|
||||
)}`,
|
||||
)
|
||||
return null
|
||||
}
|
||||
@@ -95,27 +155,24 @@ export async function fetchRemoteCredentials(
|
||||
baseUrl: string,
|
||||
accessToken: string,
|
||||
timeoutMs: number,
|
||||
trustedDeviceToken?: string,
|
||||
): Promise<RemoteCredentials | null> {
|
||||
const url = `${baseUrl}/v1/code/sessions/${sessionId}/bridge`
|
||||
const headers = oauthHeaders(accessToken)
|
||||
if (trustedDeviceToken) {
|
||||
headers['X-Trusted-Device-Token'] = trustedDeviceToken
|
||||
}
|
||||
let response
|
||||
try {
|
||||
response = await axios.post(
|
||||
url,
|
||||
{},
|
||||
{
|
||||
headers,
|
||||
headers: oauthHeaders(accessToken),
|
||||
timeout: timeoutMs,
|
||||
validateStatus: s => s < 500,
|
||||
},
|
||||
)
|
||||
} catch (err: unknown) {
|
||||
logForDebugging(
|
||||
`[code-session] /bridge request failed: ${errorMessage(err)}`,
|
||||
`[code-session] /bridge request failed: ${summarizeCodeSessionErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
return null
|
||||
}
|
||||
@@ -141,7 +198,9 @@ export async function fetchRemoteCredentials(
|
||||
!('worker_epoch' in data)
|
||||
) {
|
||||
logForDebugging(
|
||||
`[code-session] /bridge response malformed (need worker_jwt, expires_in, api_base_url, worker_epoch): ${jsonStringify(data).slice(0, 200)}`,
|
||||
`[code-session] /bridge response malformed (need worker_jwt, expires_in, api_base_url, worker_epoch): ${summarizeCodeSessionResponseForDebug(
|
||||
data,
|
||||
)}`,
|
||||
)
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -21,15 +21,10 @@ const SECRET_PATTERN = new RegExp(
|
||||
'g',
|
||||
)
|
||||
|
||||
const REDACT_MIN_LENGTH = 16
|
||||
|
||||
export function redactSecrets(s: string): string {
|
||||
return s.replace(SECRET_PATTERN, (_match, field: string, value: string) => {
|
||||
if (value.length < REDACT_MIN_LENGTH) {
|
||||
void value
|
||||
return `"${field}":"[REDACTED]"`
|
||||
}
|
||||
const redacted = `${value.slice(0, 8)}...${value.slice(-4)}`
|
||||
return `"${field}":"${redacted}"`
|
||||
})
|
||||
}
|
||||
|
||||
@@ -52,6 +47,73 @@ export function debugBody(data: unknown): string {
|
||||
return s.slice(0, DEBUG_MSG_LIMIT) + `... (${s.length} chars)`
|
||||
}
|
||||
|
||||
function summarizeValueShapeForDebug(value: unknown): unknown {
|
||||
if (value === null) return 'null'
|
||||
if (value === undefined) return 'undefined'
|
||||
if (Array.isArray(value)) {
|
||||
return {
|
||||
type: 'array',
|
||||
length: value.length,
|
||||
}
|
||||
}
|
||||
if (typeof value === 'object') {
|
||||
return {
|
||||
type: 'object',
|
||||
keys: Object.keys(value as Record<string, unknown>)
|
||||
.sort()
|
||||
.slice(0, 10),
|
||||
}
|
||||
}
|
||||
return typeof value
|
||||
}
|
||||
|
||||
export function summarizeBridgeErrorForDebug(err: unknown): string {
|
||||
const summary: Record<string, unknown> = {}
|
||||
|
||||
if (err instanceof Error) {
|
||||
summary.errorType = err.constructor.name
|
||||
summary.errorName = err.name
|
||||
summary.hasMessage = err.message.length > 0
|
||||
summary.hasStack = Boolean(err.stack)
|
||||
} else {
|
||||
summary.errorType = typeof err
|
||||
summary.hasValue = err !== undefined && err !== null
|
||||
}
|
||||
|
||||
if (err && typeof err === 'object') {
|
||||
const errorObj = err as Record<string, unknown>
|
||||
if (
|
||||
typeof errorObj.code === 'string' ||
|
||||
typeof errorObj.code === 'number'
|
||||
) {
|
||||
summary.code = errorObj.code
|
||||
}
|
||||
if (
|
||||
typeof errorObj.errno === 'string' ||
|
||||
typeof errorObj.errno === 'number'
|
||||
) {
|
||||
summary.errno = errorObj.errno
|
||||
}
|
||||
if (typeof errorObj.status === 'number') {
|
||||
summary.status = errorObj.status
|
||||
}
|
||||
if (typeof errorObj.syscall === 'string') {
|
||||
summary.syscall = errorObj.syscall
|
||||
}
|
||||
if (errorObj.response && typeof errorObj.response === 'object') {
|
||||
const response = errorObj.response as Record<string, unknown>
|
||||
if (typeof response.status === 'number') {
|
||||
summary.httpStatus = response.status
|
||||
}
|
||||
if ('data' in response) {
|
||||
summary.responseData = summarizeValueShapeForDebug(response.data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return jsonStringify(summary)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract a descriptive error message from an axios error (or any error).
|
||||
* For HTTP errors, appends the server's response body message if available,
|
||||
|
||||
@@ -107,7 +107,7 @@ export function createTokenRefreshScheduler({
|
||||
// (such as the follow-up refresh set by doRefresh) so the refresh
|
||||
// chain is not broken.
|
||||
logForDebugging(
|
||||
`[${label}:token] Could not decode JWT expiry for sessionId=${sessionId}, token prefix=${token.slice(0, 15)}…, keeping existing timer`,
|
||||
`[${label}:token] Could not decode JWT expiry for sessionId=${sessionId}, keeping existing timer`,
|
||||
)
|
||||
return
|
||||
}
|
||||
@@ -209,7 +209,7 @@ export function createTokenRefreshScheduler({
|
||||
failureCounts.delete(sessionId)
|
||||
|
||||
logForDebugging(
|
||||
`[${label}:token] Refreshing token for sessionId=${sessionId}: new token prefix=${oauthToken.slice(0, 15)}…`,
|
||||
`[${label}:token] Refreshing token for sessionId=${sessionId}`,
|
||||
)
|
||||
logEvent('tengu_bridge_token_refreshed', {})
|
||||
onRefresh(sessionId, oauthToken)
|
||||
|
||||
@@ -38,7 +38,6 @@ import { buildCCRv2SdkUrl } from './workSecret.js'
|
||||
import { toCompatSessionId } from './sessionIdCompat.js'
|
||||
import { FlushGate } from './flushGate.js'
|
||||
import { createTokenRefreshScheduler } from './jwtUtils.js'
|
||||
import { getTrustedDeviceToken } from './trustedDevice.js'
|
||||
import {
|
||||
getEnvLessBridgeConfig,
|
||||
type EnvLessBridgeConfig,
|
||||
@@ -51,7 +50,10 @@ import {
|
||||
extractTitleText,
|
||||
BoundedUUIDSet,
|
||||
} from './bridgeMessaging.js'
|
||||
import { logBridgeSkip } from './debugUtils.js'
|
||||
import {
|
||||
logBridgeSkip,
|
||||
summarizeBridgeErrorForDebug,
|
||||
} from './debugUtils.js'
|
||||
import { logForDebugging } from '../utils/debug.js'
|
||||
import { logForDiagnosticsNoPII } from '../utils/diagLogs.js'
|
||||
import { isInProtectedNamespace } from '../utils/envUtils.js'
|
||||
@@ -182,7 +184,7 @@ export async function initEnvLessBridgeCore(
|
||||
return null
|
||||
}
|
||||
const sessionId: string = createdSessionId
|
||||
logForDebugging(`[remote-bridge] Created session ${sessionId}`)
|
||||
logForDebugging('[remote-bridge] Created remote bridge session')
|
||||
logForDiagnosticsNoPII('info', 'bridge_repl_v2_session_created')
|
||||
|
||||
// ── 2. Fetch bridge credentials (POST /bridge → worker_jwt, expires_in, api_base_url) ──
|
||||
@@ -215,7 +217,7 @@ export async function initEnvLessBridgeCore(
|
||||
|
||||
// ── 3. Build v2 transport (SSETransport + CCRClient) ────────────────────
|
||||
const sessionUrl = buildCCRv2SdkUrl(credentials.api_base_url, sessionId)
|
||||
logForDebugging(`[remote-bridge] v2 session URL: ${sessionUrl}`)
|
||||
logForDebugging('[remote-bridge] Configured v2 session transport endpoint')
|
||||
|
||||
let transport: ReplBridgeTransport
|
||||
try {
|
||||
@@ -236,10 +238,12 @@ export async function initEnvLessBridgeCore(
|
||||
})
|
||||
} catch (err) {
|
||||
logForDebugging(
|
||||
`[remote-bridge] v2 transport setup failed: ${errorMessage(err)}`,
|
||||
`[remote-bridge] v2 transport setup failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
{ level: 'error' },
|
||||
)
|
||||
onStateChange?.('failed', `Transport setup failed: ${errorMessage(err)}`)
|
||||
onStateChange?.('failed', 'Transport setup failed')
|
||||
logBridgeSkip('v2_transport_setup_failed', undefined, true)
|
||||
void archiveSession(
|
||||
sessionId,
|
||||
@@ -357,7 +361,9 @@ export async function initEnvLessBridgeCore(
|
||||
)
|
||||
} catch (err) {
|
||||
logForDebugging(
|
||||
`[remote-bridge] Proactive refresh rebuild failed: ${errorMessage(err)}`,
|
||||
`[remote-bridge] Proactive refresh rebuild failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
{ level: 'error' },
|
||||
)
|
||||
logForDiagnosticsNoPII(
|
||||
@@ -365,7 +371,7 @@ export async function initEnvLessBridgeCore(
|
||||
'bridge_repl_v2_proactive_refresh_failed',
|
||||
)
|
||||
if (!tornDown) {
|
||||
onStateChange?.('failed', `Refresh failed: ${errorMessage(err)}`)
|
||||
onStateChange?.('failed', 'Refresh failed')
|
||||
}
|
||||
} finally {
|
||||
authRecoveryInFlight = false
|
||||
@@ -395,9 +401,13 @@ export async function initEnvLessBridgeCore(
|
||||
// (Same guard pattern as replBridge.ts:1119.)
|
||||
const flushTransport = transport
|
||||
void flushHistory(initialMessages)
|
||||
.catch(e =>
|
||||
logForDebugging(`[remote-bridge] flushHistory failed: ${e}`),
|
||||
.catch(e => {
|
||||
logForDebugging(
|
||||
`[remote-bridge] flushHistory failed: ${summarizeBridgeErrorForDebug(
|
||||
e,
|
||||
)}`,
|
||||
)
|
||||
})
|
||||
.finally(() => {
|
||||
// authRecoveryInFlight catches the v1-vs-v2 asymmetry: v1 nulls
|
||||
// transport synchronously in setOnClose (replBridge.ts:1175), so
|
||||
@@ -577,12 +587,14 @@ export async function initEnvLessBridgeCore(
|
||||
logForDebugging('[remote-bridge] Transport rebuilt after 401')
|
||||
} catch (err) {
|
||||
logForDebugging(
|
||||
`[remote-bridge] 401 recovery failed: ${errorMessage(err)}`,
|
||||
`[remote-bridge] 401 recovery failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
{ level: 'error' },
|
||||
)
|
||||
logForDiagnosticsNoPII('error', 'bridge_repl_v2_jwt_refresh_failed')
|
||||
if (!tornDown) {
|
||||
onStateChange?.('failed', `JWT refresh failed: ${errorMessage(err)}`)
|
||||
onStateChange?.('failed', 'JWT refresh failed')
|
||||
}
|
||||
} finally {
|
||||
authRecoveryInFlight = false
|
||||
@@ -707,7 +719,9 @@ export async function initEnvLessBridgeCore(
|
||||
)
|
||||
} catch (err) {
|
||||
logForDebugging(
|
||||
`[remote-bridge] Teardown 401 retry threw: ${errorMessage(err)}`,
|
||||
`[remote-bridge] Teardown 401 retry threw: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
{ level: 'error' },
|
||||
)
|
||||
}
|
||||
@@ -824,7 +838,7 @@ export async function initEnvLessBridgeCore(
|
||||
sendControlRequest(request: SDKControlRequest) {
|
||||
if (authRecoveryInFlight) {
|
||||
logForDebugging(
|
||||
`[remote-bridge] Dropping control_request during 401 recovery: ${request.request_id}`,
|
||||
'[remote-bridge] Dropping control_request during 401 recovery',
|
||||
)
|
||||
return
|
||||
}
|
||||
@@ -833,9 +847,7 @@ export async function initEnvLessBridgeCore(
|
||||
transport.reportState('requires_action')
|
||||
}
|
||||
void transport.write(event)
|
||||
logForDebugging(
|
||||
`[remote-bridge] Sent control_request request_id=${request.request_id}`,
|
||||
)
|
||||
logForDebugging('[remote-bridge] Sent control_request')
|
||||
},
|
||||
sendControlResponse(response: SDKControlResponse) {
|
||||
if (authRecoveryInFlight) {
|
||||
@@ -852,7 +864,7 @@ export async function initEnvLessBridgeCore(
|
||||
sendControlCancelRequest(requestId: string) {
|
||||
if (authRecoveryInFlight) {
|
||||
logForDebugging(
|
||||
`[remote-bridge] Dropping control_cancel_request during 401 recovery: ${requestId}`,
|
||||
'[remote-bridge] Dropping control_cancel_request during 401 recovery',
|
||||
)
|
||||
return
|
||||
}
|
||||
@@ -866,9 +878,7 @@ export async function initEnvLessBridgeCore(
|
||||
// those paths, so without this the server stays on requires_action.
|
||||
transport.reportState('running')
|
||||
void transport.write(event)
|
||||
logForDebugging(
|
||||
`[remote-bridge] Sent control_cancel_request request_id=${requestId}`,
|
||||
)
|
||||
logForDebugging('[remote-bridge] Sent control_cancel_request')
|
||||
},
|
||||
sendResult() {
|
||||
if (authRecoveryInFlight) {
|
||||
@@ -877,7 +887,7 @@ export async function initEnvLessBridgeCore(
|
||||
}
|
||||
transport.reportState('idle')
|
||||
void transport.write(makeResultMessage(sessionId))
|
||||
logForDebugging(`[remote-bridge] Sent result`)
|
||||
logForDebugging('[remote-bridge] Sent result')
|
||||
},
|
||||
async teardown() {
|
||||
unregister()
|
||||
@@ -925,9 +935,8 @@ import {
|
||||
} from './codeSessionApi.js'
|
||||
import { getBridgeBaseUrlOverride } from './bridgeConfig.js'
|
||||
|
||||
// CLI-side wrapper that applies the CLAUDE_BRIDGE_BASE_URL dev override and
|
||||
// injects the trusted-device token (both are env/GrowthBook reads that the
|
||||
// SDK-facing codeSessionApi.ts export must stay free of).
|
||||
// CLI-side wrapper that applies the CLAUDE_BRIDGE_BASE_URL dev override while
|
||||
// keeping the SDK-facing codeSessionApi.ts export free of CLI config reads.
|
||||
export async function fetchRemoteCredentials(
|
||||
sessionId: string,
|
||||
baseUrl: string,
|
||||
@@ -939,7 +948,6 @@ export async function fetchRemoteCredentials(
|
||||
baseUrl,
|
||||
accessToken,
|
||||
timeoutMs,
|
||||
getTrustedDeviceToken(),
|
||||
)
|
||||
if (!creds) return null
|
||||
return getBridgeBaseUrlOverride()
|
||||
@@ -995,12 +1003,13 @@ async function archiveSession(
|
||||
},
|
||||
)
|
||||
logForDebugging(
|
||||
`[remote-bridge] Archive ${compatId} status=${response.status}`,
|
||||
`[remote-bridge] Archive status=${response.status}`,
|
||||
)
|
||||
return response.status
|
||||
} catch (err) {
|
||||
const msg = errorMessage(err)
|
||||
logForDebugging(`[remote-bridge] Archive failed: ${msg}`)
|
||||
logForDebugging(
|
||||
`[remote-bridge] Archive failed: ${summarizeBridgeErrorForDebug(err)}`,
|
||||
)
|
||||
return axios.isAxiosError(err) && err.code === 'ECONNABORTED'
|
||||
? 'timeout'
|
||||
: 'error'
|
||||
|
||||
@@ -30,7 +30,6 @@ import {
|
||||
} from './workSecret.js'
|
||||
import { toCompatSessionId, toInfraSessionId } from './sessionIdCompat.js'
|
||||
import { updateSessionBridgeId } from '../utils/concurrentSessions.js'
|
||||
import { getTrustedDeviceToken } from './trustedDevice.js'
|
||||
import { HybridTransport } from '../cli/transports/HybridTransport.js'
|
||||
import {
|
||||
type ReplBridgeTransport,
|
||||
@@ -44,6 +43,7 @@ import {
|
||||
describeAxiosError,
|
||||
extractHttpStatus,
|
||||
logBridgeSkip,
|
||||
summarizeBridgeErrorForDebug,
|
||||
} from './debugUtils.js'
|
||||
import type { Message } from '../types/message.js'
|
||||
import type { SDKMessage } from '../entrypoints/agentSdkTypes.ts'
|
||||
@@ -304,7 +304,7 @@ export async function initBridgeCore(
|
||||
const prior = rawPrior?.source === 'repl' ? rawPrior : null
|
||||
|
||||
logForDebugging(
|
||||
`[bridge:repl] initBridgeCore #${seq} starting (initialMessages=${initialMessages?.length ?? 0}${prior ? ` perpetual prior=env:${prior.environmentId}` : ''})`,
|
||||
`[bridge:repl] initBridgeCore #${seq} starting (initialMessages=${initialMessages?.length ?? 0}${prior ? ' perpetual prior pointer present' : ''})`,
|
||||
)
|
||||
|
||||
// 5. Register bridge environment
|
||||
@@ -314,7 +314,6 @@ export async function initBridgeCore(
|
||||
runnerVersion: MACRO.VERSION,
|
||||
onDebug: logForDebugging,
|
||||
onAuth401,
|
||||
getTrustedDeviceToken,
|
||||
})
|
||||
// Ant-only: interpose so /bridge-kick can inject poll/register/heartbeat
|
||||
// failures. Zero cost in external builds (rawApi passes through unchanged).
|
||||
@@ -344,7 +343,9 @@ export async function initBridgeCore(
|
||||
} catch (err) {
|
||||
logBridgeSkip(
|
||||
'registration_failed',
|
||||
`[bridge:repl] Environment registration failed: ${errorMessage(err)}`,
|
||||
`[bridge:repl] Environment registration failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
// Stale pointer may be the cause (expired/deleted env) — clear it so
|
||||
// the next start doesn't retry the same dead ID.
|
||||
@@ -355,7 +356,7 @@ export async function initBridgeCore(
|
||||
return null
|
||||
}
|
||||
|
||||
logForDebugging(`[bridge:repl] Environment registered: ${environmentId}`)
|
||||
logForDebugging('[bridge:repl] Environment registered')
|
||||
logForDiagnosticsNoPII('info', 'bridge_repl_env_registered')
|
||||
logEvent('tengu_bridge_repl_env_registered', {})
|
||||
|
||||
@@ -373,7 +374,7 @@ export async function initBridgeCore(
|
||||
): Promise<boolean> {
|
||||
if (environmentId !== requestedEnvId) {
|
||||
logForDebugging(
|
||||
`[bridge:repl] Env mismatch (requested ${requestedEnvId}, got ${environmentId}) — cannot reconnect in place`,
|
||||
'[bridge:repl] Env mismatch — cannot reconnect in place',
|
||||
)
|
||||
return false
|
||||
}
|
||||
@@ -391,13 +392,13 @@ export async function initBridgeCore(
|
||||
for (const id of candidates) {
|
||||
try {
|
||||
await api.reconnectSession(environmentId, id)
|
||||
logForDebugging(
|
||||
`[bridge:repl] Reconnected session ${id} in place on env ${environmentId}`,
|
||||
)
|
||||
logForDebugging('[bridge:repl] Reconnected existing session in place')
|
||||
return true
|
||||
} catch (err) {
|
||||
logForDebugging(
|
||||
`[bridge:repl] reconnectSession(${id}) failed: ${errorMessage(err)}`,
|
||||
`[bridge:repl] reconnectSession failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -681,7 +682,9 @@ export async function initBridgeCore(
|
||||
} catch (err) {
|
||||
bridgeConfig.reuseEnvironmentId = undefined
|
||||
logForDebugging(
|
||||
`[bridge:repl] Environment re-registration failed: ${errorMessage(err)}`,
|
||||
`[bridge:repl] Environment re-registration failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
return false
|
||||
}
|
||||
@@ -690,7 +693,7 @@ export async function initBridgeCore(
|
||||
bridgeConfig.reuseEnvironmentId = undefined
|
||||
|
||||
logForDebugging(
|
||||
`[bridge:repl] Re-registered: requested=${requestedEnvId} got=${environmentId}`,
|
||||
'[bridge:repl] Re-registered environment',
|
||||
)
|
||||
|
||||
// Bail out if teardown started while we were registering
|
||||
@@ -986,7 +989,7 @@ export async function initBridgeCore(
|
||||
injectFault: injectBridgeFault,
|
||||
wakePollLoop,
|
||||
describe: () =>
|
||||
`env=${environmentId} session=${currentSessionId} transport=${transport?.getStateLabel() ?? 'null'} workId=${currentWorkId ?? 'null'}`,
|
||||
`transport=${transport?.getStateLabel() ?? 'null'} hasSession=${Boolean(currentSessionId)} hasWork=${Boolean(currentWorkId)}`,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1040,7 +1043,9 @@ export async function initBridgeCore(
|
||||
.stopWork(environmentId, currentWorkId, false)
|
||||
.catch((e: unknown) => {
|
||||
logForDebugging(
|
||||
`[bridge:repl] stopWork after heartbeat fatal: ${errorMessage(e)}`,
|
||||
`[bridge:repl] stopWork after heartbeat fatal: ${summarizeBridgeErrorForDebug(
|
||||
e,
|
||||
)}`,
|
||||
)
|
||||
})
|
||||
}
|
||||
@@ -1367,7 +1372,7 @@ export async function initBridgeCore(
|
||||
const sessionUrl = buildCCRv2SdkUrl(baseUrl, workSessionId)
|
||||
const thisGen = v2Generation
|
||||
logForDebugging(
|
||||
`[bridge:repl] CCR v2: sessionUrl=${sessionUrl} session=${workSessionId} gen=${thisGen}`,
|
||||
`[bridge:repl] CCR v2: creating transport gen=${thisGen}`,
|
||||
)
|
||||
void createV2ReplTransport({
|
||||
sessionUrl,
|
||||
@@ -1401,7 +1406,9 @@ export async function initBridgeCore(
|
||||
},
|
||||
(err: unknown) => {
|
||||
logForDebugging(
|
||||
`[bridge:repl] CCR v2: createV2ReplTransport failed: ${errorMessage(err)}`,
|
||||
`[bridge:repl] CCR v2: createV2ReplTransport failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
{ level: 'error' },
|
||||
)
|
||||
logEvent('tengu_bridge_repl_ccr_v2_init_failed', {})
|
||||
@@ -1416,7 +1423,9 @@ export async function initBridgeCore(
|
||||
.stopWork(environmentId, currentWorkId, false)
|
||||
.catch((e: unknown) => {
|
||||
logForDebugging(
|
||||
`[bridge:repl] stopWork after v2 init failure: ${errorMessage(e)}`,
|
||||
`[bridge:repl] stopWork after v2 init failure: ${summarizeBridgeErrorForDebug(
|
||||
e,
|
||||
)}`,
|
||||
)
|
||||
})
|
||||
currentWorkId = null
|
||||
@@ -1437,10 +1446,8 @@ export async function initBridgeCore(
|
||||
// secret. refreshHeaders picks up the latest OAuth token on each
|
||||
// WS reconnect attempt.
|
||||
const wsUrl = buildSdkUrl(sessionIngressUrl, workSessionId)
|
||||
logForDebugging(`[bridge:repl] Ingress URL: ${wsUrl}`)
|
||||
logForDebugging(
|
||||
`[bridge:repl] Creating HybridTransport: session=${workSessionId}`,
|
||||
)
|
||||
logForDebugging('[bridge:repl] Using session ingress WebSocket endpoint')
|
||||
logForDebugging('[bridge:repl] Creating HybridTransport')
|
||||
// v1OauthToken was validated non-null above (we'd have returned early).
|
||||
const oauthToken = v1OauthToken ?? ''
|
||||
wireTransport(
|
||||
@@ -1525,7 +1532,9 @@ export async function initBridgeCore(
|
||||
logForDebugging('[bridge:repl] keep_alive sent')
|
||||
void transport.write({ type: 'keep_alive' }).catch((err: unknown) => {
|
||||
logForDebugging(
|
||||
`[bridge:repl] keep_alive write failed: ${errorMessage(err)}`,
|
||||
`[bridge:repl] keep_alive write failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
})
|
||||
}, keepAliveIntervalMs)
|
||||
@@ -1538,15 +1547,13 @@ export async function initBridgeCore(
|
||||
doTeardownImpl = async (): Promise<void> => {
|
||||
if (teardownStarted) {
|
||||
logForDebugging(
|
||||
`[bridge:repl] Teardown already in progress, skipping duplicate call env=${environmentId} session=${currentSessionId}`,
|
||||
'[bridge:repl] Teardown already in progress, skipping duplicate call',
|
||||
)
|
||||
return
|
||||
}
|
||||
teardownStarted = true
|
||||
const teardownStart = Date.now()
|
||||
logForDebugging(
|
||||
`[bridge:repl] Teardown starting: env=${environmentId} session=${currentSessionId} workId=${currentWorkId ?? 'none'} transportState=${transport?.getStateLabel() ?? 'null'}`,
|
||||
)
|
||||
logForDebugging('[bridge:repl] Teardown starting')
|
||||
|
||||
if (pointerRefreshTimer !== null) {
|
||||
clearInterval(pointerRefreshTimer)
|
||||
@@ -1595,7 +1602,7 @@ export async function initBridgeCore(
|
||||
source: 'repl',
|
||||
})
|
||||
logForDebugging(
|
||||
`[bridge:repl] Teardown (perpetual): leaving env=${environmentId} session=${currentSessionId} alive on server, duration=${Date.now() - teardownStart}ms`,
|
||||
`[bridge:repl] Teardown (perpetual): leaving bridge session alive on server, duration=${Date.now() - teardownStart}ms`,
|
||||
)
|
||||
return
|
||||
}
|
||||
@@ -1621,7 +1628,9 @@ export async function initBridgeCore(
|
||||
})
|
||||
.catch((err: unknown) => {
|
||||
logForDebugging(
|
||||
`[bridge:repl] Teardown stopWork failed: ${errorMessage(err)}`,
|
||||
`[bridge:repl] Teardown stopWork failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
})
|
||||
: Promise.resolve()
|
||||
@@ -1638,7 +1647,9 @@ export async function initBridgeCore(
|
||||
|
||||
await api.deregisterEnvironment(environmentId).catch((err: unknown) => {
|
||||
logForDebugging(
|
||||
`[bridge:repl] Teardown deregister failed: ${errorMessage(err)}`,
|
||||
`[bridge:repl] Teardown deregister failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
})
|
||||
|
||||
@@ -1648,16 +1659,14 @@ export async function initBridgeCore(
|
||||
await clearBridgePointer(dir)
|
||||
|
||||
logForDebugging(
|
||||
`[bridge:repl] Teardown complete: env=${environmentId} duration=${Date.now() - teardownStart}ms`,
|
||||
`[bridge:repl] Teardown complete: duration=${Date.now() - teardownStart}ms`,
|
||||
)
|
||||
}
|
||||
|
||||
// 8. Register cleanup for graceful shutdown
|
||||
const unregister = registerCleanup(() => doTeardownImpl?.())
|
||||
|
||||
logForDebugging(
|
||||
`[bridge:repl] Ready: env=${environmentId} session=${currentSessionId}`,
|
||||
)
|
||||
logForDebugging('[bridge:repl] Ready')
|
||||
onStateChange?.('ready')
|
||||
|
||||
return {
|
||||
@@ -1715,7 +1724,7 @@ export async function initBridgeCore(
|
||||
if (!transport) {
|
||||
const types = filtered.map(m => m.type).join(',')
|
||||
logForDebugging(
|
||||
`[bridge:repl] Transport not configured, dropping ${filtered.length} message(s) [${types}] for session=${currentSessionId}`,
|
||||
`[bridge:repl] Transport not configured, dropping ${filtered.length} message(s) [${types}]`,
|
||||
{ level: 'warn' },
|
||||
)
|
||||
return
|
||||
@@ -1750,7 +1759,7 @@ export async function initBridgeCore(
|
||||
if (filtered.length === 0) return
|
||||
if (!transport) {
|
||||
logForDebugging(
|
||||
`[bridge:repl] Transport not configured, dropping ${filtered.length} SDK message(s) for session=${currentSessionId}`,
|
||||
`[bridge:repl] Transport not configured, dropping ${filtered.length} SDK message(s)`,
|
||||
{ level: 'warn' },
|
||||
)
|
||||
return
|
||||
@@ -1770,9 +1779,7 @@ export async function initBridgeCore(
|
||||
}
|
||||
const event = { ...request, session_id: currentSessionId }
|
||||
void transport.write(event)
|
||||
logForDebugging(
|
||||
`[bridge:repl] Sent control_request request_id=${request.request_id}`,
|
||||
)
|
||||
logForDebugging('[bridge:repl] Sent control_request')
|
||||
},
|
||||
sendControlResponse(response: SDKControlResponse) {
|
||||
if (!transport) {
|
||||
@@ -1798,21 +1805,17 @@ export async function initBridgeCore(
|
||||
session_id: currentSessionId,
|
||||
}
|
||||
void transport.write(event)
|
||||
logForDebugging(
|
||||
`[bridge:repl] Sent control_cancel_request request_id=${requestId}`,
|
||||
)
|
||||
logForDebugging('[bridge:repl] Sent control_cancel_request')
|
||||
},
|
||||
sendResult() {
|
||||
if (!transport) {
|
||||
logForDebugging(
|
||||
`[bridge:repl] sendResult: skipping, transport not configured session=${currentSessionId}`,
|
||||
'[bridge:repl] sendResult: skipping, transport not configured',
|
||||
)
|
||||
return
|
||||
}
|
||||
void transport.write(makeResultMessage(currentSessionId))
|
||||
logForDebugging(
|
||||
`[bridge:repl] Sent result for session=${currentSessionId}`,
|
||||
)
|
||||
logForDebugging('[bridge:repl] Sent result')
|
||||
},
|
||||
async teardown() {
|
||||
unregister()
|
||||
@@ -1905,7 +1908,7 @@ async function startWorkPollLoop({
|
||||
const MAX_ENVIRONMENT_RECREATIONS = 3
|
||||
|
||||
logForDebugging(
|
||||
`[bridge:repl] Starting work poll loop for env=${getCredentials().environmentId}`,
|
||||
'[bridge:repl] Starting work poll loop',
|
||||
)
|
||||
|
||||
let consecutiveErrors = 0
|
||||
@@ -2008,7 +2011,9 @@ async function startWorkPollLoop({
|
||||
)
|
||||
} catch (err) {
|
||||
logForDebugging(
|
||||
`[bridge:repl:heartbeat] Failed: ${errorMessage(err)}`,
|
||||
`[bridge:repl:heartbeat] Failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
if (err instanceof BridgeFatalError) {
|
||||
cap.cleanup()
|
||||
@@ -2126,7 +2131,9 @@ async function startWorkPollLoop({
|
||||
secret = decodeWorkSecret(work.secret)
|
||||
} catch (err) {
|
||||
logForDebugging(
|
||||
`[bridge:repl] Failed to decode work secret: ${errorMessage(err)}`,
|
||||
`[bridge:repl] Failed to decode work secret: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
logEvent('tengu_bridge_repl_work_secret_failed', {})
|
||||
// Can't ack (needs the JWT we failed to decode). stopWork uses OAuth.
|
||||
@@ -2137,12 +2144,14 @@ async function startWorkPollLoop({
|
||||
|
||||
// Explicitly acknowledge to prevent redelivery. Non-fatal on failure:
|
||||
// server re-delivers, and the onWorkReceived callback handles dedup.
|
||||
logForDebugging(`[bridge:repl] Acknowledging workId=${work.id}`)
|
||||
logForDebugging('[bridge:repl] Acknowledging work item')
|
||||
try {
|
||||
await api.acknowledgeWork(envId, work.id, secret.session_ingress_token)
|
||||
} catch (err) {
|
||||
logForDebugging(
|
||||
`[bridge:repl] Acknowledge failed workId=${work.id}: ${errorMessage(err)}`,
|
||||
`[bridge:repl] Acknowledge failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -2194,7 +2203,7 @@ async function startWorkPollLoop({
|
||||
const currentEnvId = getCredentials().environmentId
|
||||
if (envId !== currentEnvId) {
|
||||
logForDebugging(
|
||||
`[bridge:repl] Stale poll error for old env=${envId}, current env=${currentEnvId} — skipping onEnvironmentLost`,
|
||||
'[bridge:repl] Stale poll error for superseded environment — skipping onEnvironmentLost',
|
||||
)
|
||||
consecutiveErrors = 0
|
||||
firstErrorTime = null
|
||||
@@ -2240,9 +2249,7 @@ async function startWorkPollLoop({
|
||||
consecutiveErrors = 0
|
||||
firstErrorTime = null
|
||||
onStateChange?.('ready')
|
||||
logForDebugging(
|
||||
`[bridge:repl] Re-registered environment: ${newCreds.environmentId}`,
|
||||
)
|
||||
logForDebugging('[bridge:repl] Re-registered environment')
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -2378,7 +2385,7 @@ async function startWorkPollLoop({
|
||||
}
|
||||
|
||||
logForDebugging(
|
||||
`[bridge:repl] Work poll loop ended (aborted=${signal.aborted}) env=${getCredentials().environmentId}`,
|
||||
`[bridge:repl] Work poll loop ended (aborted=${signal.aborted})`,
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@ import { CCRClient } from '../cli/transports/ccrClient.js'
|
||||
import type { HybridTransport } from '../cli/transports/HybridTransport.js'
|
||||
import { SSETransport } from '../cli/transports/SSETransport.js'
|
||||
import { logForDebugging } from '../utils/debug.js'
|
||||
import { errorMessage } from '../utils/errors.js'
|
||||
import { updateSessionIngressAuthToken } from '../utils/sessionIngressAuth.js'
|
||||
import type { SessionState } from '../utils/sessionState.js'
|
||||
import { summarizeBridgeErrorForDebug } from './debugUtils.js'
|
||||
import { registerWorker } from './workSecret.js'
|
||||
|
||||
/**
|
||||
@@ -54,8 +54,6 @@ export type ReplBridgeTransport = {
|
||||
* (user watches the REPL locally); multi-session worker callers do.
|
||||
*/
|
||||
reportState(state: SessionState): void
|
||||
/** PUT /worker external_metadata (v2 only; v1 is a no-op). */
|
||||
reportMetadata(metadata: Record<string, unknown>): void
|
||||
/**
|
||||
* POST /worker/events/{id}/delivery (v2 only; v1 is a no-op). Populates
|
||||
* CCR's processing_at/processed_at columns. `received` is auto-fired by
|
||||
@@ -96,7 +94,6 @@ export function createV1ReplTransport(
|
||||
return hybrid.droppedBatchCount
|
||||
},
|
||||
reportState: () => {},
|
||||
reportMetadata: () => {},
|
||||
reportDelivery: () => {},
|
||||
flush: () => Promise.resolve(),
|
||||
}
|
||||
@@ -182,7 +179,7 @@ export async function createV2ReplTransport(opts: {
|
||||
|
||||
const epoch = opts.epoch ?? (await registerWorker(sessionUrl, ingressToken))
|
||||
logForDebugging(
|
||||
`[bridge:repl] CCR v2: worker sessionId=${sessionId} epoch=${epoch}${opts.epoch !== undefined ? ' (from /bridge)' : ' (via registerWorker)'}`,
|
||||
`[bridge:repl] CCR v2: worker registered epoch=${epoch}${opts.epoch !== undefined ? ' (from /bridge)' : ' (via registerWorker)'}`,
|
||||
)
|
||||
|
||||
// Derive SSE stream URL. Same logic as transportUtils.ts:26-33 but
|
||||
@@ -220,7 +217,9 @@ export async function createV2ReplTransport(opts: {
|
||||
onCloseCb?.(4090)
|
||||
} catch (closeErr: unknown) {
|
||||
logForDebugging(
|
||||
`[bridge:repl] CCR v2: error during epoch-mismatch cleanup: ${errorMessage(closeErr)}`,
|
||||
`[bridge:repl] CCR v2: error during epoch-mismatch cleanup: ${summarizeBridgeErrorForDebug(
|
||||
closeErr,
|
||||
)}`,
|
||||
{ level: 'error' },
|
||||
)
|
||||
}
|
||||
@@ -324,9 +323,6 @@ export async function createV2ReplTransport(opts: {
|
||||
reportState(state) {
|
||||
ccr.reportState(state)
|
||||
},
|
||||
reportMetadata(metadata) {
|
||||
ccr.reportMetadata(metadata)
|
||||
},
|
||||
reportDelivery(eventId, status) {
|
||||
ccr.reportDelivery(eventId, status)
|
||||
},
|
||||
@@ -353,7 +349,9 @@ export async function createV2ReplTransport(opts: {
|
||||
},
|
||||
(err: unknown) => {
|
||||
logForDebugging(
|
||||
`[bridge:repl] CCR v2 initialize failed: ${errorMessage(err)}`,
|
||||
`[bridge:repl] CCR v2 initialize failed: ${summarizeBridgeErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
{ level: 'error' },
|
||||
)
|
||||
// Close transport resources and notify replBridge via onClose
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { type ChildProcess, spawn } from 'child_process'
|
||||
import { createWriteStream, type WriteStream } from 'fs'
|
||||
import { tmpdir } from 'os'
|
||||
import { dirname, join } from 'path'
|
||||
import { basename, dirname, join } from 'path'
|
||||
import { createInterface } from 'readline'
|
||||
import { jsonParse, jsonStringify } from '../utils/slowOperations.js'
|
||||
import { debugTruncate } from './debugUtils.js'
|
||||
import type {
|
||||
SessionActivity,
|
||||
SessionDoneStatus,
|
||||
@@ -25,6 +24,61 @@ export function safeFilenameId(id: string): string {
|
||||
return id.replace(/[^a-zA-Z0-9_-]/g, '_')
|
||||
}
|
||||
|
||||
function summarizeSessionRunnerErrorForDebug(error: unknown): string {
|
||||
return jsonStringify({
|
||||
errorType:
|
||||
error instanceof Error ? error.constructor.name : typeof error,
|
||||
errorName: error instanceof Error ? error.name : undefined,
|
||||
hasMessage: error instanceof Error ? error.message.length > 0 : false,
|
||||
hasStack: error instanceof Error ? Boolean(error.stack) : false,
|
||||
})
|
||||
}
|
||||
|
||||
function summarizeSessionRunnerFrameForDebug(data: string): string {
|
||||
try {
|
||||
const parsed = jsonParse(data)
|
||||
if (parsed && typeof parsed === 'object') {
|
||||
const value = parsed as Record<string, unknown>
|
||||
return jsonStringify({
|
||||
frameType: typeof value.type === 'string' ? value.type : 'unknown',
|
||||
subtype:
|
||||
typeof value.subtype === 'string'
|
||||
? value.subtype
|
||||
: value.response &&
|
||||
typeof value.response === 'object' &&
|
||||
typeof (value.response as Record<string, unknown>).subtype ===
|
||||
'string'
|
||||
? (value.response as Record<string, unknown>).subtype
|
||||
: value.request &&
|
||||
typeof value.request === 'object' &&
|
||||
typeof (value.request as Record<string, unknown>).subtype ===
|
||||
'string'
|
||||
? (value.request as Record<string, unknown>).subtype
|
||||
: undefined,
|
||||
hasUuid: typeof value.uuid === 'string',
|
||||
length: data.length,
|
||||
})
|
||||
}
|
||||
} catch {
|
||||
// fall through to raw-length summary
|
||||
}
|
||||
return jsonStringify({
|
||||
frameType: 'unparsed',
|
||||
length: data.length,
|
||||
})
|
||||
}
|
||||
|
||||
function summarizeSessionRunnerArgsForDebug(args: string[]): string {
|
||||
return jsonStringify({
|
||||
argCount: args.length,
|
||||
hasSdkUrl: args.includes('--sdk-url'),
|
||||
hasSessionId: args.includes('--session-id'),
|
||||
hasDebugFile: args.includes('--debug-file'),
|
||||
hasVerbose: args.includes('--verbose'),
|
||||
hasPermissionMode: args.includes('--permission-mode'),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* A control_request emitted by the child CLI when it needs permission to
|
||||
* execute a **specific** tool invocation (not a general capability check).
|
||||
@@ -144,9 +198,7 @@ function extractActivities(
|
||||
summary,
|
||||
timestamp: now,
|
||||
})
|
||||
onDebug(
|
||||
`[bridge:activity] sessionId=${sessionId} tool_use name=${name} ${inputPreview(input)}`,
|
||||
)
|
||||
onDebug(`[bridge:activity] tool_use name=${name}`)
|
||||
} else if (b.type === 'text') {
|
||||
const text = (b.text as string) ?? ''
|
||||
if (text.length > 0) {
|
||||
@@ -156,7 +208,7 @@ function extractActivities(
|
||||
timestamp: now,
|
||||
})
|
||||
onDebug(
|
||||
`[bridge:activity] sessionId=${sessionId} text "${text.slice(0, 100)}"`,
|
||||
`[bridge:activity] text length=${text.length}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -171,9 +223,7 @@ function extractActivities(
|
||||
summary: 'Session completed',
|
||||
timestamp: now,
|
||||
})
|
||||
onDebug(
|
||||
`[bridge:activity] sessionId=${sessionId} result subtype=success`,
|
||||
)
|
||||
onDebug('[bridge:activity] result subtype=success')
|
||||
} else if (subtype) {
|
||||
const errors = msg.errors as string[] | undefined
|
||||
const errorSummary = errors?.[0] ?? `Error: ${subtype}`
|
||||
@@ -182,13 +232,9 @@ function extractActivities(
|
||||
summary: errorSummary,
|
||||
timestamp: now,
|
||||
})
|
||||
onDebug(
|
||||
`[bridge:activity] sessionId=${sessionId} result subtype=${subtype} error="${errorSummary}"`,
|
||||
)
|
||||
onDebug(`[bridge:activity] result subtype=${subtype}`)
|
||||
} else {
|
||||
onDebug(
|
||||
`[bridge:activity] sessionId=${sessionId} result subtype=undefined`,
|
||||
)
|
||||
onDebug('[bridge:activity] result subtype=undefined')
|
||||
}
|
||||
break
|
||||
}
|
||||
@@ -233,18 +279,6 @@ function extractUserMessageText(
|
||||
return text ? text : undefined
|
||||
}
|
||||
|
||||
/** Build a short preview of tool input for debug logging. */
|
||||
function inputPreview(input: Record<string, unknown>): string {
|
||||
const parts: string[] = []
|
||||
for (const [key, val] of Object.entries(input)) {
|
||||
if (typeof val === 'string') {
|
||||
parts.push(`${key}="${val.slice(0, 100)}"`)
|
||||
}
|
||||
if (parts.length >= 3) break
|
||||
}
|
||||
return parts.join(' ')
|
||||
}
|
||||
|
||||
export function createSessionSpawner(deps: SessionSpawnerDeps): SessionSpawner {
|
||||
return {
|
||||
spawn(opts: SessionSpawnOpts, dir: string): SessionHandle {
|
||||
@@ -277,11 +311,15 @@ export function createSessionSpawner(deps: SessionSpawnerDeps): SessionSpawner {
|
||||
transcriptStream = createWriteStream(transcriptPath, { flags: 'a' })
|
||||
transcriptStream.on('error', err => {
|
||||
deps.onDebug(
|
||||
`[bridge:session] Transcript write error: ${err.message}`,
|
||||
`[bridge:session] Transcript write error: ${summarizeSessionRunnerErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
transcriptStream = null
|
||||
})
|
||||
deps.onDebug(`[bridge:session] Transcript log: ${transcriptPath}`)
|
||||
deps.onDebug(
|
||||
`[bridge:session] Transcript log configured (${basename(transcriptPath)})`,
|
||||
)
|
||||
}
|
||||
|
||||
const args = [
|
||||
@@ -323,11 +361,15 @@ export function createSessionSpawner(deps: SessionSpawnerDeps): SessionSpawner {
|
||||
}
|
||||
|
||||
deps.onDebug(
|
||||
`[bridge:session] Spawning sessionId=${opts.sessionId} sdkUrl=${opts.sdkUrl} accessToken=${opts.accessToken ? 'present' : 'MISSING'}`,
|
||||
`[bridge:session] Spawning child session process (accessToken=${opts.accessToken ? 'present' : 'MISSING'})`,
|
||||
)
|
||||
deps.onDebug(
|
||||
`[bridge:session] Child args: ${summarizeSessionRunnerArgsForDebug(args)}`,
|
||||
)
|
||||
deps.onDebug(`[bridge:session] Child args: ${args.join(' ')}`)
|
||||
if (debugFile) {
|
||||
deps.onDebug(`[bridge:session] Debug log: ${debugFile}`)
|
||||
deps.onDebug(
|
||||
`[bridge:session] Debug log configured (${basename(debugFile)})`,
|
||||
)
|
||||
}
|
||||
|
||||
// Pipe all three streams: stdin for control, stdout for NDJSON parsing,
|
||||
@@ -339,9 +381,7 @@ export function createSessionSpawner(deps: SessionSpawnerDeps): SessionSpawner {
|
||||
windowsHide: true,
|
||||
})
|
||||
|
||||
deps.onDebug(
|
||||
`[bridge:session] sessionId=${opts.sessionId} pid=${child.pid}`,
|
||||
)
|
||||
deps.onDebug('[bridge:session] Child process started')
|
||||
|
||||
const activities: SessionActivity[] = []
|
||||
let currentActivity: SessionActivity | null = null
|
||||
@@ -376,7 +416,7 @@ export function createSessionSpawner(deps: SessionSpawnerDeps): SessionSpawner {
|
||||
|
||||
// Log all messages flowing from the child CLI to the bridge
|
||||
deps.onDebug(
|
||||
`[bridge:ws] sessionId=${opts.sessionId} <<< ${debugTruncate(line)}`,
|
||||
`[bridge:ws] <<< ${summarizeSessionRunnerFrameForDebug(line)}`,
|
||||
)
|
||||
|
||||
// In verbose mode, forward raw output to stderr
|
||||
@@ -455,25 +495,23 @@ export function createSessionSpawner(deps: SessionSpawnerDeps): SessionSpawner {
|
||||
|
||||
if (signal === 'SIGTERM' || signal === 'SIGINT') {
|
||||
deps.onDebug(
|
||||
`[bridge:session] sessionId=${opts.sessionId} interrupted signal=${signal} pid=${child.pid}`,
|
||||
`[bridge:session] interrupted signal=${signal ?? 'unknown'}`,
|
||||
)
|
||||
resolve('interrupted')
|
||||
} else if (code === 0) {
|
||||
deps.onDebug(
|
||||
`[bridge:session] sessionId=${opts.sessionId} completed exit_code=0 pid=${child.pid}`,
|
||||
)
|
||||
deps.onDebug('[bridge:session] completed exit_code=0')
|
||||
resolve('completed')
|
||||
} else {
|
||||
deps.onDebug(
|
||||
`[bridge:session] sessionId=${opts.sessionId} failed exit_code=${code} pid=${child.pid}`,
|
||||
)
|
||||
deps.onDebug(`[bridge:session] failed exit_code=${code}`)
|
||||
resolve('failed')
|
||||
}
|
||||
})
|
||||
|
||||
child.on('error', err => {
|
||||
deps.onDebug(
|
||||
`[bridge:session] sessionId=${opts.sessionId} spawn error: ${err.message}`,
|
||||
`[bridge:session] spawn error: ${summarizeSessionRunnerErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
resolve('failed')
|
||||
})
|
||||
@@ -490,9 +528,7 @@ export function createSessionSpawner(deps: SessionSpawnerDeps): SessionSpawner {
|
||||
},
|
||||
kill(): void {
|
||||
if (!child.killed) {
|
||||
deps.onDebug(
|
||||
`[bridge:session] Sending SIGTERM to sessionId=${opts.sessionId} pid=${child.pid}`,
|
||||
)
|
||||
deps.onDebug('[bridge:session] Sending SIGTERM to child process')
|
||||
// On Windows, child.kill('SIGTERM') throws; use default signal.
|
||||
if (process.platform === 'win32') {
|
||||
child.kill()
|
||||
@@ -506,9 +542,7 @@ export function createSessionSpawner(deps: SessionSpawnerDeps): SessionSpawner {
|
||||
// not when the process exits. We need to send SIGKILL even after SIGTERM.
|
||||
if (!sigkillSent && child.pid) {
|
||||
sigkillSent = true
|
||||
deps.onDebug(
|
||||
`[bridge:session] Sending SIGKILL to sessionId=${opts.sessionId} pid=${child.pid}`,
|
||||
)
|
||||
deps.onDebug('[bridge:session] Sending SIGKILL to child process')
|
||||
if (process.platform === 'win32') {
|
||||
child.kill()
|
||||
} else {
|
||||
@@ -519,7 +553,7 @@ export function createSessionSpawner(deps: SessionSpawnerDeps): SessionSpawner {
|
||||
writeStdin(data: string): void {
|
||||
if (child.stdin && !child.stdin.destroyed) {
|
||||
deps.onDebug(
|
||||
`[bridge:ws] sessionId=${opts.sessionId} >>> ${debugTruncate(data)}`,
|
||||
`[bridge:ws] >>> ${summarizeSessionRunnerFrameForDebug(data)}`,
|
||||
)
|
||||
child.stdin.write(data)
|
||||
}
|
||||
@@ -536,9 +570,7 @@ export function createSessionSpawner(deps: SessionSpawnerDeps): SessionSpawner {
|
||||
variables: { CLAUDE_CODE_SESSION_ACCESS_TOKEN: token },
|
||||
}) + '\n',
|
||||
)
|
||||
deps.onDebug(
|
||||
`[bridge:session] Sent token refresh via stdin for sessionId=${opts.sessionId}`,
|
||||
)
|
||||
deps.onDebug('[bridge:session] Sent token refresh via stdin')
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -2,35 +2,19 @@ import { logForDebugging } from '../utils/debug.js'
|
||||
import { getSecureStorage } from '../utils/secureStorage/index.js'
|
||||
|
||||
/**
|
||||
* Trusted device token source for bridge (remote-control) sessions.
|
||||
* Trusted-device compatibility helpers for bridge (remote-control) sessions.
|
||||
*
|
||||
* Bridge sessions have SecurityTier=ELEVATED on the server (CCR v2).
|
||||
* The server gates ConnectBridgeWorker on its own flag
|
||||
* (sessions_elevated_auth_enforcement in Anthropic Main); this CLI-side
|
||||
* flag controls whether the CLI sends X-Trusted-Device-Token at all.
|
||||
* Two flags so rollout can be staged: flip CLI-side first (headers
|
||||
* start flowing, server still no-ops), then flip server-side.
|
||||
*
|
||||
* Enrollment (POST /auth/trusted_devices) is gated server-side by
|
||||
* account_session.created_at < 10min, so it must happen during /login.
|
||||
* Token is persistent (90d rolling expiry) and stored in keychain.
|
||||
*
|
||||
* See anthropics/anthropic#274559 (spec), #310375 (B1b tenant RPCs),
|
||||
* #295987 (B2 Python routes), #307150 (C1' CCR v2 gate).
|
||||
* This fork disables trusted-device enrollment and header emission. The
|
||||
* remaining helpers only clear any previously stored token during login/logout
|
||||
* so old state is not carried forward.
|
||||
*/
|
||||
|
||||
export function getTrustedDeviceToken(): string | undefined {
|
||||
return undefined
|
||||
}
|
||||
|
||||
export function clearTrustedDeviceTokenCache(): void {
|
||||
return
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the stored trusted device token from secure storage and the memo cache.
|
||||
* Called during /login so a stale token from the previous account isn't sent
|
||||
* as X-Trusted-Device-Token after account switches.
|
||||
* Clear any stored trusted-device token from secure storage.
|
||||
*/
|
||||
export function clearTrustedDeviceToken(): void {
|
||||
const secureStorage = getSecureStorage()
|
||||
|
||||
@@ -2,6 +2,33 @@ import axios from 'axios'
|
||||
import { jsonParse, jsonStringify } from '../utils/slowOperations.js'
|
||||
import type { WorkSecret } from './types.js'
|
||||
|
||||
function summarizeRegisterWorkerResponseForDebug(data: unknown): string {
|
||||
if (data === null) return 'null'
|
||||
if (data === undefined) return 'undefined'
|
||||
if (Array.isArray(data)) {
|
||||
return jsonStringify({
|
||||
payloadType: 'array',
|
||||
length: data.length,
|
||||
})
|
||||
}
|
||||
if (typeof data === 'object') {
|
||||
const value = data as Record<string, unknown>
|
||||
return jsonStringify({
|
||||
payloadType: 'object',
|
||||
keys: Object.keys(value)
|
||||
.sort()
|
||||
.slice(0, 10),
|
||||
hasWorkerEpoch:
|
||||
typeof value.worker_epoch === 'number' ||
|
||||
typeof value.worker_epoch === 'string',
|
||||
hasSessionIngressToken:
|
||||
typeof value.session_ingress_token === 'string',
|
||||
hasApiBaseUrl: typeof value.api_base_url === 'string',
|
||||
})
|
||||
}
|
||||
return typeof data
|
||||
}
|
||||
|
||||
/** Decode a base64url-encoded work secret and validate its version. */
|
||||
export function decodeWorkSecret(secret: string): WorkSecret {
|
||||
const json = Buffer.from(secret, 'base64url').toString('utf-8')
|
||||
@@ -120,7 +147,9 @@ export async function registerWorker(
|
||||
!Number.isSafeInteger(epoch)
|
||||
) {
|
||||
throw new Error(
|
||||
`registerWorker: invalid worker_epoch in response: ${jsonStringify(response.data)}`,
|
||||
`registerWorker: invalid worker_epoch in response: ${summarizeRegisterWorkerResponseForDebug(
|
||||
response.data,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
return epoch
|
||||
|
||||
@@ -34,11 +34,10 @@ export async function call(onDone: LocalJSXCommandOnDone, context: LocalJSXComma
|
||||
resetUserCache();
|
||||
// Refresh GrowthBook after login to get updated feature flags (e.g., for claude.ai MCPs)
|
||||
refreshGrowthBookAfterAuthChange();
|
||||
// Clear any stale trusted device token from a previous account before
|
||||
// re-enrolling — prevents sending the old token on bridge calls while
|
||||
// the async enrollTrustedDevice() is in-flight.
|
||||
// Clear any stale trusted-device token from a previous account before
|
||||
// running the disabled enrollment stub so old bridge state is discarded.
|
||||
clearTrustedDeviceToken();
|
||||
// Enroll as a trusted device for Remote Control (10-min fresh-session window)
|
||||
// Keep the login flow aligned with builds that still import the helper.
|
||||
void enrollTrustedDevice();
|
||||
// Reset killswitch gate checks and re-run with new org
|
||||
resetBypassPermissionsCheck();
|
||||
|
||||
@@ -2,8 +2,6 @@ import * as React from 'react';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { readFile, stat } from 'fs/promises';
|
||||
import { getLastAPIRequest } from 'src/bootstrap/state.js';
|
||||
import { logEventTo1P } from 'src/services/analytics/firstPartyEventLogger.js';
|
||||
import { type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS, logEvent } from 'src/services/analytics/index.js';
|
||||
import { getLastAssistantMessage, normalizeMessagesForAPI } from 'src/utils/messages.js';
|
||||
import type { CommandResultDisplay } from '../commands.js';
|
||||
import { useTerminalSize } from '../hooks/useTerminalSize.js';
|
||||
|
||||
14
src/components/FeedbackSurvey/submitTranscriptShare.test.ts
Normal file
14
src/components/FeedbackSurvey/submitTranscriptShare.test.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { describe, expect, it } from 'bun:test'
|
||||
|
||||
import { submitTranscriptShare } from './submitTranscriptShare.js'
|
||||
|
||||
describe('submitTranscriptShare', () => {
|
||||
it('returns the disabled result in this build', async () => {
|
||||
await expect(
|
||||
submitTranscriptShare([], 'good_feedback_survey', 'appearance-id'),
|
||||
).resolves.toEqual({
|
||||
success: false,
|
||||
disabled: true,
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -9,7 +9,6 @@ import { isEnvTruthy } from '../../utils/envUtils.js';
|
||||
import { getLastAssistantMessage } from '../../utils/messages.js';
|
||||
import { getMainLoopModel } from '../../utils/model/model.js';
|
||||
import { getInitialSettings } from '../../utils/settings/settings.js';
|
||||
import { logOTelEvent } from '../../utils/telemetry/events.js';
|
||||
import { submitTranscriptShare, type TranscriptShareTrigger } from './submitTranscriptShare.js';
|
||||
import type { TranscriptShareResponse } from './TranscriptSharePrompt.js';
|
||||
import { useSurveyState } from './useSurveyState.js';
|
||||
@@ -99,11 +98,6 @@ export function useFeedbackSurvey(messages: Message[], isLoading: boolean, submi
|
||||
last_assistant_message_id: lastAssistantMessageIdRef.current as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
survey_type: surveyType as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
|
||||
});
|
||||
void logOTelEvent('feedback_survey', {
|
||||
event_type: 'appeared',
|
||||
appearance_id: appearanceId,
|
||||
survey_type: surveyType
|
||||
});
|
||||
}, [updateLastShownTime, surveyType]);
|
||||
const onSelect = useCallback((appearanceId_0: string, selected: FeedbackSurveyResponse) => {
|
||||
updateLastShownTime(Date.now(), submitCountRef.current);
|
||||
@@ -114,12 +108,6 @@ export function useFeedbackSurvey(messages: Message[], isLoading: boolean, submi
|
||||
last_assistant_message_id: lastAssistantMessageIdRef.current as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
survey_type: surveyType as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
|
||||
});
|
||||
void logOTelEvent('feedback_survey', {
|
||||
event_type: 'responded',
|
||||
appearance_id: appearanceId_0,
|
||||
response: selected,
|
||||
survey_type: surveyType
|
||||
});
|
||||
}, [updateLastShownTime, surveyType]);
|
||||
const shouldShowTranscriptPrompt = useCallback((selected_0: FeedbackSurveyResponse) => {
|
||||
// Only bad and good ratings trigger the transcript ask
|
||||
@@ -150,11 +138,6 @@ export function useFeedbackSurvey(messages: Message[], isLoading: boolean, submi
|
||||
survey_type: surveyType as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
trigger: trigger as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
|
||||
});
|
||||
void logOTelEvent('feedback_survey', {
|
||||
event_type: 'transcript_prompt_appeared',
|
||||
appearance_id: appearanceId_1,
|
||||
survey_type: surveyType
|
||||
});
|
||||
}, [surveyType]);
|
||||
const onTranscriptSelect = useCallback(async (appearanceId_2: string, selected_1: TranscriptShareResponse, surveyResponse_0: FeedbackSurveyResponse | null): Promise<boolean> => {
|
||||
const trigger_0: TranscriptShareTrigger = surveyResponse_0 === 'good' ? 'good_feedback_survey' : 'bad_feedback_survey';
|
||||
|
||||
@@ -10,7 +10,6 @@ import { getGlobalConfig, saveGlobalConfig } from '../../utils/config.js';
|
||||
import { isEnvTruthy } from '../../utils/envUtils.js';
|
||||
import { isAutoManagedMemoryFile } from '../../utils/memoryFileDetection.js';
|
||||
import { extractTextContent, getLastAssistantMessage } from '../../utils/messages.js';
|
||||
import { logOTelEvent } from '../../utils/telemetry/events.js';
|
||||
import { submitTranscriptShare } from './submitTranscriptShare.js';
|
||||
import type { TranscriptShareResponse } from './TranscriptSharePrompt.js';
|
||||
import { useSurveyState } from './useSurveyState.js';
|
||||
@@ -67,11 +66,6 @@ export function useMemorySurvey(messages: Message[], isLoading: boolean, hasActi
|
||||
event_type: 'appeared' as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
appearance_id: appearanceId as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
|
||||
});
|
||||
void logOTelEvent('feedback_survey', {
|
||||
event_type: 'appeared',
|
||||
appearance_id: appearanceId,
|
||||
survey_type: 'memory'
|
||||
});
|
||||
}, []);
|
||||
const onSelect = useCallback((appearanceId_0: string, selected: FeedbackSurveyResponse) => {
|
||||
logEvent(MEMORY_SURVEY_EVENT, {
|
||||
@@ -79,12 +73,6 @@ export function useMemorySurvey(messages: Message[], isLoading: boolean, hasActi
|
||||
appearance_id: appearanceId_0 as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
response: selected as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
|
||||
});
|
||||
void logOTelEvent('feedback_survey', {
|
||||
event_type: 'responded',
|
||||
appearance_id: appearanceId_0,
|
||||
response: selected,
|
||||
survey_type: 'memory'
|
||||
});
|
||||
}, []);
|
||||
const shouldShowTranscriptPrompt = useCallback((selected_0: FeedbackSurveyResponse) => {
|
||||
if ("external" !== 'ant') {
|
||||
@@ -107,11 +95,6 @@ export function useMemorySurvey(messages: Message[], isLoading: boolean, hasActi
|
||||
appearance_id: appearanceId_1 as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
trigger: TRANSCRIPT_SHARE_TRIGGER as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
|
||||
});
|
||||
void logOTelEvent('feedback_survey', {
|
||||
event_type: 'transcript_prompt_appeared',
|
||||
appearance_id: appearanceId_1,
|
||||
survey_type: 'memory'
|
||||
});
|
||||
}, []);
|
||||
const onTranscriptSelect = useCallback(async (appearanceId_2: string, selected_1: TranscriptShareResponse): Promise<boolean> => {
|
||||
logEvent(MEMORY_SURVEY_EVENT, {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -5,7 +5,6 @@ import * as React from 'react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useInterval } from 'usehooks-ts';
|
||||
import { useRegisterOverlay } from '../../context/overlayContext.js';
|
||||
import { stringWidth } from '../../ink/stringWidth.js';
|
||||
// eslint-disable-next-line custom-rules/prefer-use-keybindings -- raw j/k/arrow dialog navigation
|
||||
import { Box, Text, useInput } from '../../ink.js';
|
||||
import { useKeybindings } from '../../keybindings/useKeybinding.js';
|
||||
@@ -15,7 +14,6 @@ import { getEmptyToolPermissionContext } from '../../Tool.js';
|
||||
import { AGENT_COLOR_TO_THEME_COLOR } from '../../tools/AgentTool/agentColorManager.js';
|
||||
import { logForDebugging } from '../../utils/debug.js';
|
||||
import { execFileNoThrow } from '../../utils/execFileNoThrow.js';
|
||||
import { truncateToWidth } from '../../utils/format.js';
|
||||
import { getNextPermissionMode } from '../../utils/permissions/getNextPermissionMode.js';
|
||||
import { getModeColor, type PermissionMode, permissionModeFromString, permissionModeSymbol } from '../../utils/permissions/PermissionMode.js';
|
||||
import { jsonStringify } from '../../utils/slowOperations.js';
|
||||
@@ -381,7 +379,6 @@ function TeammateDetailView(t0) {
|
||||
teamName,
|
||||
onCancel
|
||||
} = t0;
|
||||
const [promptExpanded, setPromptExpanded] = useState(false);
|
||||
const cycleModeShortcut = useShortcutDisplay("confirm:cycleMode", "Confirmation", "shift+tab");
|
||||
const themeColor = teammate.color ? AGENT_COLOR_TO_THEME_COLOR[teammate.color as keyof typeof AGENT_COLOR_TO_THEME_COLOR] : undefined;
|
||||
let t1;
|
||||
@@ -418,18 +415,6 @@ function TeammateDetailView(t0) {
|
||||
t3 = $[5];
|
||||
}
|
||||
useEffect(t2, t3);
|
||||
let t4;
|
||||
if ($[6] === Symbol.for("react.memo_cache_sentinel")) {
|
||||
t4 = input => {
|
||||
if (input === "p") {
|
||||
setPromptExpanded(_temp);
|
||||
}
|
||||
};
|
||||
$[6] = t4;
|
||||
} else {
|
||||
t4 = $[6];
|
||||
}
|
||||
useInput(t4);
|
||||
const workingPath = teammate.worktreePath || teammate.cwd;
|
||||
let subtitleParts;
|
||||
if ($[7] !== teammate.model || $[8] !== teammate.worktreePath || $[9] !== workingPath) {
|
||||
@@ -498,21 +483,11 @@ function TeammateDetailView(t0) {
|
||||
} else {
|
||||
t9 = $[24];
|
||||
}
|
||||
let t10;
|
||||
if ($[25] !== promptExpanded || $[26] !== teammate.prompt) {
|
||||
t10 = teammate.prompt && <Box flexDirection="column"><Text bold={true}>Prompt</Text><Text>{promptExpanded ? teammate.prompt : truncateToWidth(teammate.prompt, 80)}{stringWidth(teammate.prompt) > 80 && !promptExpanded && <Text dimColor={true}> (p to expand)</Text>}</Text></Box>;
|
||||
$[25] = promptExpanded;
|
||||
$[26] = teammate.prompt;
|
||||
$[27] = t10;
|
||||
} else {
|
||||
t10 = $[27];
|
||||
}
|
||||
let t11;
|
||||
if ($[28] !== onCancel || $[29] !== subtitle || $[30] !== t10 || $[31] !== t9 || $[32] !== title) {
|
||||
t11 = <Dialog title={title} subtitle={subtitle} onCancel={onCancel} color="background" hideInputGuide={true}>{t9}{t10}</Dialog>;
|
||||
if ($[28] !== onCancel || $[29] !== subtitle || $[31] !== t9 || $[32] !== title) {
|
||||
t11 = <Dialog title={title} subtitle={subtitle} onCancel={onCancel} color="background" hideInputGuide={true}>{t9}</Dialog>;
|
||||
$[28] = onCancel;
|
||||
$[29] = subtitle;
|
||||
$[30] = t10;
|
||||
$[31] = t9;
|
||||
$[32] = title;
|
||||
$[33] = t11;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// Centralized analytics/telemetry logging for tool permission decisions.
|
||||
// All permission approve/reject events flow through logPermissionDecision(),
|
||||
// which fans out to Statsig analytics, OTel telemetry, and code-edit metrics.
|
||||
// which fans out to analytics compatibility calls and code-edit metrics.
|
||||
import { feature } from 'bun:bundle'
|
||||
import {
|
||||
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
@@ -11,7 +11,6 @@ import { getCodeEditToolDecisionCounter } from '../../bootstrap/state.js'
|
||||
import type { Tool as ToolType, ToolUseContext } from '../../Tool.js'
|
||||
import { getLanguageName } from '../../utils/cliHighlight.js'
|
||||
import { SandboxManager } from '../../utils/sandbox/sandbox-adapter.js'
|
||||
import { logOTelEvent } from '../../utils/telemetry/events.js'
|
||||
import type {
|
||||
PermissionApprovalSource,
|
||||
PermissionRejectionSource,
|
||||
@@ -227,11 +226,6 @@ function logPermissionDecision(
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
|
||||
void logOTelEvent('tool_decision', {
|
||||
decision,
|
||||
source: sourceString,
|
||||
tool_name: sanitizeToolNameForAnalytics(tool.name),
|
||||
})
|
||||
}
|
||||
|
||||
export { isCodeEditingTool, buildCodeEditToolAttributes, logPermissionDecision }
|
||||
|
||||
@@ -59,7 +59,7 @@ import {
|
||||
isShutdownApproved,
|
||||
isShutdownRequest,
|
||||
isTeamPermissionUpdate,
|
||||
markMessagesAsRead,
|
||||
markMessagesAsReadByPredicate,
|
||||
readUnreadMessages,
|
||||
type TeammateMessage,
|
||||
writeToMailbox,
|
||||
@@ -195,10 +195,20 @@ export function useInboxPoller({
|
||||
}
|
||||
}
|
||||
|
||||
// Helper to mark messages as read in the inbox file.
|
||||
// Helper to remove the unread batch we just processed from the inbox file.
|
||||
// Called after messages are successfully delivered or reliably queued.
|
||||
const deliveredMessageKeys = new Set(
|
||||
unread.map(message => `${message.from}|${message.timestamp}|${message.text}`),
|
||||
)
|
||||
const markRead = () => {
|
||||
void markMessagesAsRead(agentName, currentAppState.teamContext?.teamName)
|
||||
void markMessagesAsReadByPredicate(
|
||||
agentName,
|
||||
message =>
|
||||
deliveredMessageKeys.has(
|
||||
`${message.from}|${message.timestamp}|${message.text}`,
|
||||
),
|
||||
currentAppState.teamContext?.teamName,
|
||||
)
|
||||
}
|
||||
|
||||
// Separate permission messages from regular teammate messages
|
||||
@@ -503,9 +513,7 @@ export function useInboxPoller({
|
||||
for (const m of teamPermissionUpdates) {
|
||||
const parsed = isTeamPermissionUpdate(m.text)
|
||||
if (!parsed) {
|
||||
logForDebugging(
|
||||
`[InboxPoller] Failed to parse team permission update: ${m.text.substring(0, 100)}`,
|
||||
)
|
||||
logForDebugging('[InboxPoller] Failed to parse team permission update')
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -522,10 +530,7 @@ export function useInboxPoller({
|
||||
|
||||
// Apply the permission update to the teammate's context
|
||||
logForDebugging(
|
||||
`[InboxPoller] Applying team permission update: ${parsed.toolName} allowed in ${parsed.directoryPath}`,
|
||||
)
|
||||
logForDebugging(
|
||||
`[InboxPoller] Permission update rules: ${jsonStringify(parsed.permissionUpdate.rules)}`,
|
||||
`[InboxPoller] Applying team permission update for ${parsed.toolName} (${parsed.permissionUpdate.rules.length} rule(s))`,
|
||||
)
|
||||
|
||||
setAppState(prev => {
|
||||
@@ -536,7 +541,7 @@ export function useInboxPoller({
|
||||
destination: 'session',
|
||||
})
|
||||
logForDebugging(
|
||||
`[InboxPoller] Updated session allow rules: ${jsonStringify(updated.alwaysAllowRules.session)}`,
|
||||
`[InboxPoller] Updated session allow rules (${updated.alwaysAllowRules.session.length} total)`,
|
||||
)
|
||||
return {
|
||||
...prev,
|
||||
@@ -563,9 +568,7 @@ export function useInboxPoller({
|
||||
|
||||
const parsed = isModeSetRequest(m.text)
|
||||
if (!parsed) {
|
||||
logForDebugging(
|
||||
`[InboxPoller] Failed to parse mode set request: ${m.text.substring(0, 100)}`,
|
||||
)
|
||||
logForDebugging('[InboxPoller] Failed to parse mode set request')
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
@@ -1,31 +1,16 @@
|
||||
/**
|
||||
* Swarm Permission Poller Hook
|
||||
* Swarm permission callback registry helpers.
|
||||
*
|
||||
* This hook polls for permission responses from the team leader when running
|
||||
* as a worker agent in a swarm. When a response is received, it calls the
|
||||
* appropriate callback (onAllow/onReject) to continue execution.
|
||||
*
|
||||
* This hook should be used in conjunction with the worker-side integration
|
||||
* in useCanUseTool.ts, which creates pending requests that this hook monitors.
|
||||
* Permission requests/responses now flow entirely through teammate mailboxes.
|
||||
* Workers register callbacks here, and the inbox poller dispatches mailbox
|
||||
* responses back into those callbacks.
|
||||
*/
|
||||
|
||||
import { useCallback, useEffect, useRef } from 'react'
|
||||
import { useInterval } from 'usehooks-ts'
|
||||
import { logForDebugging } from '../utils/debug.js'
|
||||
import { errorMessage } from '../utils/errors.js'
|
||||
import {
|
||||
type PermissionUpdate,
|
||||
permissionUpdateSchema,
|
||||
} from '../utils/permissions/PermissionUpdateSchema.js'
|
||||
import {
|
||||
isSwarmWorker,
|
||||
type PermissionResponse,
|
||||
pollForResponse,
|
||||
removeWorkerResponse,
|
||||
} from '../utils/swarm/permissionSync.js'
|
||||
import { getAgentName, getTeamName } from '../utils/teammate.js'
|
||||
|
||||
const POLL_INTERVAL_MS = 500
|
||||
|
||||
/**
|
||||
* Validate permissionUpdates from external sources (mailbox IPC, disk polling).
|
||||
@@ -226,105 +211,9 @@ export function processSandboxPermissionResponse(params: {
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a permission response by invoking the registered callback
|
||||
*/
|
||||
function processResponse(response: PermissionResponse): boolean {
|
||||
const callback = pendingCallbacks.get(response.requestId)
|
||||
|
||||
if (!callback) {
|
||||
logForDebugging(
|
||||
`[SwarmPermissionPoller] No callback registered for request ${response.requestId}`,
|
||||
)
|
||||
return false
|
||||
}
|
||||
|
||||
logForDebugging(
|
||||
`[SwarmPermissionPoller] Processing response for request ${response.requestId}: ${response.decision}`,
|
||||
)
|
||||
|
||||
// Remove from registry before invoking callback
|
||||
pendingCallbacks.delete(response.requestId)
|
||||
|
||||
if (response.decision === 'approved') {
|
||||
const permissionUpdates = parsePermissionUpdates(response.permissionUpdates)
|
||||
const updatedInput = response.updatedInput
|
||||
callback.onAllow(updatedInput, permissionUpdates)
|
||||
} else {
|
||||
callback.onReject(response.feedback)
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook that polls for permission responses when running as a swarm worker.
|
||||
*
|
||||
* This hook:
|
||||
* 1. Only activates when isSwarmWorker() returns true
|
||||
* 2. Polls every 500ms for responses
|
||||
* 3. When a response is found, invokes the registered callback
|
||||
* 4. Cleans up the response file after processing
|
||||
* Legacy no-op hook kept for compatibility with older imports.
|
||||
* Mailbox responses are handled by useInboxPoller instead of disk polling.
|
||||
*/
|
||||
export function useSwarmPermissionPoller(): void {
|
||||
const isProcessingRef = useRef(false)
|
||||
|
||||
const poll = useCallback(async () => {
|
||||
// Don't poll if not a swarm worker
|
||||
if (!isSwarmWorker()) {
|
||||
return
|
||||
}
|
||||
|
||||
// Prevent concurrent polling
|
||||
if (isProcessingRef.current) {
|
||||
return
|
||||
}
|
||||
|
||||
// Don't poll if no callbacks are registered
|
||||
if (pendingCallbacks.size === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
isProcessingRef.current = true
|
||||
|
||||
try {
|
||||
const agentName = getAgentName()
|
||||
const teamName = getTeamName()
|
||||
|
||||
if (!agentName || !teamName) {
|
||||
return
|
||||
}
|
||||
|
||||
// Check each pending request for a response
|
||||
for (const [requestId, _callback] of pendingCallbacks) {
|
||||
const response = await pollForResponse(requestId, agentName, teamName)
|
||||
|
||||
if (response) {
|
||||
// Process the response
|
||||
const processed = processResponse(response)
|
||||
|
||||
if (processed) {
|
||||
// Clean up the response from the worker's inbox
|
||||
await removeWorkerResponse(requestId, agentName, teamName)
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logForDebugging(
|
||||
`[SwarmPermissionPoller] Error during poll: ${errorMessage(error)}`,
|
||||
)
|
||||
} finally {
|
||||
isProcessingRef.current = false
|
||||
}
|
||||
}, [])
|
||||
|
||||
// Only poll if we're a swarm worker
|
||||
const shouldPoll = isSwarmWorker()
|
||||
useInterval(() => void poll(), shouldPoll ? POLL_INTERVAL_MS : null)
|
||||
|
||||
// Initial poll on mount
|
||||
useEffect(() => {
|
||||
if (isSwarmWorker()) {
|
||||
void poll()
|
||||
}
|
||||
}, [poll])
|
||||
// Intentionally empty.
|
||||
}
|
||||
|
||||
14
src/main.tsx
14
src/main.tsx
@@ -864,11 +864,8 @@ async function run(): Promise<CommanderCommand> {
|
||||
process.title = 'claude';
|
||||
}
|
||||
|
||||
// Attach logging sinks so subcommand handlers can use logEvent/logError.
|
||||
// Before PR #11106 logEvent dispatched directly; after, events queue until
|
||||
// a sink attaches. setup() attaches sinks for the default command, but
|
||||
// subcommands (doctor, mcp, plugin, auth) never call setup() and would
|
||||
// silently drop events on process.exit(). Both inits are idempotent.
|
||||
// Attach shared sinks for subcommands that bypass setup(). Today this is
|
||||
// just the local error-log sink; analytics/event logging is already inert.
|
||||
const {
|
||||
initSinks
|
||||
} = await import('./utils/sinks.js');
|
||||
@@ -2226,11 +2223,8 @@ async function run(): Promise<CommanderCommand> {
|
||||
resetUserCache();
|
||||
// Refresh GrowthBook after login to get updated feature flags (e.g., for claude.ai MCPs)
|
||||
refreshGrowthBookAfterAuthChange();
|
||||
// Clear any stale trusted device token then enroll for Remote Control.
|
||||
// Both self-gate on tengu_sessions_elevated_auth_enforcement internally
|
||||
// — enrollTrustedDevice() via checkGate_CACHED_OR_BLOCKING (awaits
|
||||
// the GrowthBook reinit above), clearTrustedDeviceToken() via the
|
||||
// sync cached check (acceptable since clear is idempotent).
|
||||
// Clear any stale trusted-device token, then run the no-op enrollment
|
||||
// stub so the disabled bridge path stays consistent after login.
|
||||
void import('./bridge/trustedDevice.js').then(m => {
|
||||
m.clearTrustedDeviceToken();
|
||||
return m.enrollTrustedDevice();
|
||||
|
||||
@@ -8,7 +8,6 @@ import type {
|
||||
SDKControlResponse,
|
||||
} from '../entrypoints/sdk/controlTypes.ts'
|
||||
import { logForDebugging } from '../utils/debug.js'
|
||||
import { errorMessage } from '../utils/errors.js'
|
||||
import { logError } from '../utils/log.js'
|
||||
import { getWebSocketTLSOptions } from '../utils/mtls.js'
|
||||
import { getWebSocketProxyAgent, getWebSocketProxyUrl } from '../utils/proxy.js'
|
||||
@@ -54,6 +53,16 @@ function isSessionsMessage(value: unknown): value is SessionsMessage {
|
||||
return typeof value.type === 'string'
|
||||
}
|
||||
|
||||
function summarizeSessionsWebSocketErrorForDebug(error: unknown): string {
|
||||
return jsonStringify({
|
||||
errorType:
|
||||
error instanceof Error ? error.constructor.name : typeof error,
|
||||
errorName: error instanceof Error ? error.name : undefined,
|
||||
hasMessage: error instanceof Error ? error.message.length > 0 : false,
|
||||
hasStack: error instanceof Error ? Boolean(error.stack) : false,
|
||||
})
|
||||
}
|
||||
|
||||
export type SessionsWebSocketCallbacks = {
|
||||
onMessage: (message: SessionsMessage) => void
|
||||
onClose?: () => void
|
||||
@@ -108,7 +117,9 @@ export class SessionsWebSocket {
|
||||
const baseUrl = getOauthConfig().BASE_API_URL.replace('https://', 'wss://')
|
||||
const url = `${baseUrl}/v1/sessions/ws/${this.sessionId}/subscribe?organization_uuid=${this.orgUuid}`
|
||||
|
||||
logForDebugging(`[SessionsWebSocket] Connecting to ${url}`)
|
||||
logForDebugging(
|
||||
'[SessionsWebSocket] Connecting to session subscription endpoint',
|
||||
)
|
||||
|
||||
// Get fresh token for each connection attempt
|
||||
const accessToken = this.getAccessToken()
|
||||
@@ -152,9 +163,7 @@ export class SessionsWebSocket {
|
||||
|
||||
// eslint-disable-next-line eslint-plugin-n/no-unsupported-features/node-builtins
|
||||
ws.addEventListener('close', (event: CloseEvent) => {
|
||||
logForDebugging(
|
||||
`[SessionsWebSocket] Closed: code=${event.code} reason=${event.reason}`,
|
||||
)
|
||||
logForDebugging(`[SessionsWebSocket] Closed: code=${event.code}`)
|
||||
this.handleClose(event.code)
|
||||
})
|
||||
|
||||
@@ -187,14 +196,19 @@ export class SessionsWebSocket {
|
||||
})
|
||||
|
||||
ws.on('error', (err: Error) => {
|
||||
logError(new Error(`[SessionsWebSocket] Error: ${err.message}`))
|
||||
logError(
|
||||
new Error(
|
||||
`[SessionsWebSocket] Error: ${summarizeSessionsWebSocketErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
),
|
||||
)
|
||||
this.callbacks.onError?.(err)
|
||||
})
|
||||
|
||||
ws.on('close', (code: number, reason: Buffer) => {
|
||||
logForDebugging(
|
||||
`[SessionsWebSocket] Closed: code=${code} reason=${reason.toString()}`,
|
||||
)
|
||||
void reason
|
||||
logForDebugging(`[SessionsWebSocket] Closed: code=${code}`)
|
||||
this.handleClose(code)
|
||||
})
|
||||
|
||||
@@ -222,7 +236,9 @@ export class SessionsWebSocket {
|
||||
} catch (error) {
|
||||
logError(
|
||||
new Error(
|
||||
`[SessionsWebSocket] Failed to parse message: ${errorMessage(error)}`,
|
||||
`[SessionsWebSocket] Failed to parse message: ${summarizeSessionsWebSocketErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -44,7 +44,6 @@ import { WorkerPendingPermission } from '../components/permissions/WorkerPending
|
||||
import { injectUserMessageToTeammate, getAllInProcessTeammateTasks } from '../tasks/InProcessTeammateTask/InProcessTeammateTask.js';
|
||||
import { isLocalAgentTask, queuePendingMessage, appendMessageToLocalAgent, type LocalAgentTaskState } from '../tasks/LocalAgentTask/LocalAgentTask.js';
|
||||
import { registerLeaderToolUseConfirmQueue, unregisterLeaderToolUseConfirmQueue, registerLeaderSetToolPermissionContext, unregisterLeaderSetToolPermissionContext } from '../utils/swarm/leaderPermissionBridge.js';
|
||||
import { endInteractionSpan } from '../utils/telemetry/sessionTracing.js';
|
||||
import { useLogMessages } from '../hooks/useLogMessages.js';
|
||||
import { useReplBridge } from '../hooks/useReplBridge.js';
|
||||
import { type Command, type CommandResultDisplay, type ResumeEntrypoint, getCommandName, isCommandEnabled } from '../commands.js';
|
||||
@@ -1579,7 +1578,6 @@ export function REPL({
|
||||
setSpinnerColor(null);
|
||||
setSpinnerShimmerColor(null);
|
||||
pickNewSpinnerTip();
|
||||
endInteractionSpan();
|
||||
// Speculative bash classifier checks are only valid for the current
|
||||
// turn's commands — clear after each turn to avoid accumulating
|
||||
// Promise chains for unconsumed checks (denied/aborted paths).
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
/**
|
||||
* Datadog analytics egress is disabled in this build.
|
||||
*
|
||||
* Only shutdown compatibility remains for existing cleanup paths.
|
||||
*/
|
||||
|
||||
export async function shutdownDatadog(): Promise<void> {
|
||||
return
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
/**
|
||||
* Anthropic 1P event logging egress is disabled in this build.
|
||||
*
|
||||
* Only the shutdown and feedback call sites still need a local stub.
|
||||
*/
|
||||
|
||||
export async function shutdown1PEventLogging(): Promise<void> {
|
||||
return
|
||||
}
|
||||
|
||||
export function logEventTo1P(
|
||||
_eventName: string,
|
||||
_metadata: Record<string, number | boolean | undefined> = {},
|
||||
): void {
|
||||
return
|
||||
}
|
||||
32
src/services/analytics/index.test.ts
Normal file
32
src/services/analytics/index.test.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { describe, expect, it } from 'bun:test'
|
||||
|
||||
import {
|
||||
_resetForTesting,
|
||||
attachAnalyticsSink,
|
||||
logEvent,
|
||||
logEventAsync,
|
||||
} from './index.js'
|
||||
|
||||
describe('analytics compatibility boundary', () => {
|
||||
it('stays inert even if a sink is attached', async () => {
|
||||
let syncCalls = 0
|
||||
let asyncCalls = 0
|
||||
|
||||
attachAnalyticsSink({
|
||||
logEvent: () => {
|
||||
syncCalls += 1
|
||||
},
|
||||
logEventAsync: async () => {
|
||||
asyncCalls += 1
|
||||
},
|
||||
})
|
||||
|
||||
logEvent('tengu_test_event', {})
|
||||
await logEventAsync('tengu_test_event_async', {})
|
||||
|
||||
expect(syncCalls).toBe(0)
|
||||
expect(asyncCalls).toBe(0)
|
||||
|
||||
_resetForTesting()
|
||||
})
|
||||
})
|
||||
@@ -1,10 +0,0 @@
|
||||
/**
|
||||
* Analytics sink implementation
|
||||
*
|
||||
* Telemetry sinks are disabled in this build. The exported functions remain so
|
||||
* startup code does not need to special-case the open build.
|
||||
*/
|
||||
|
||||
export function initializeAnalyticsSink(): void {
|
||||
return
|
||||
}
|
||||
@@ -209,11 +209,6 @@ import {
|
||||
stopSessionActivity,
|
||||
} from '../../utils/sessionActivity.js'
|
||||
import { jsonStringify } from '../../utils/slowOperations.js'
|
||||
import {
|
||||
isBetaTracingEnabled,
|
||||
type LLMRequestNewContext,
|
||||
startLLMRequestSpan,
|
||||
} from '../../utils/telemetry/sessionTracing.js'
|
||||
/* eslint-enable @typescript-eslint/no-require-imports */
|
||||
import {
|
||||
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
@@ -1379,9 +1374,6 @@ async function* queryModel(
|
||||
})
|
||||
const useBetas = betas.length > 0
|
||||
|
||||
// Build minimal context for detailed tracing (when beta tracing is enabled)
|
||||
// Note: The actual new_context message extraction is done in sessionTracing.ts using
|
||||
// hash-based tracking per querySource (agent) from the messagesForAPI array
|
||||
const extraToolSchemas = [...(options.extraToolSchemas ?? [])]
|
||||
if (advisorModel) {
|
||||
// Server tools must be in the tools array by API contract. Appended after
|
||||
@@ -1485,23 +1477,6 @@ async function* queryModel(
|
||||
})
|
||||
}
|
||||
|
||||
const newContext: LLMRequestNewContext | undefined = isBetaTracingEnabled()
|
||||
? {
|
||||
systemPrompt: systemPrompt.join('\n\n'),
|
||||
querySource: options.querySource,
|
||||
tools: jsonStringify(allTools),
|
||||
}
|
||||
: undefined
|
||||
|
||||
// Capture the span so we can pass it to endLLMRequestSpan later
|
||||
// This ensures responses are matched to the correct request when multiple requests run in parallel
|
||||
const llmSpan = startLLMRequestSpan(
|
||||
options.model,
|
||||
newContext,
|
||||
messagesForAPI,
|
||||
isFastMode,
|
||||
)
|
||||
|
||||
const startIncludingRetries = Date.now()
|
||||
let start = Date.now()
|
||||
let attemptNumber = 0
|
||||
@@ -2730,7 +2705,6 @@ async function* queryModel(
|
||||
didFallBackToNonStreaming,
|
||||
queryTracking: options.queryTracking,
|
||||
querySource: options.querySource,
|
||||
llmSpan,
|
||||
fastMode: isFastModeRequest,
|
||||
previousRequestId,
|
||||
})
|
||||
@@ -2786,7 +2760,6 @@ async function* queryModel(
|
||||
didFallBackToNonStreaming,
|
||||
queryTracking: options.queryTracking,
|
||||
querySource: options.querySource,
|
||||
llmSpan,
|
||||
fastMode: isFastModeRequest,
|
||||
previousRequestId,
|
||||
})
|
||||
@@ -2874,10 +2847,7 @@ async function* queryModel(
|
||||
costUSD,
|
||||
queryTracking: options.queryTracking,
|
||||
permissionMode: permissionContext.mode,
|
||||
// Pass newMessages for beta tracing - extraction happens in logging.ts
|
||||
// only when beta tracing is enabled
|
||||
newMessages,
|
||||
llmSpan,
|
||||
globalCacheStrategy,
|
||||
requestSetupMs: start - startIncludingRetries,
|
||||
attemptStartTimes,
|
||||
|
||||
@@ -736,77 +736,27 @@ async function translateCodexStreamToAnthropic(
|
||||
|
||||
// ── Main fetch interceptor ──────────────────────────────────────────
|
||||
|
||||
const CODEX_BASE_URL = 'https://chatgpt.com/backend-api/codex/responses'
|
||||
|
||||
/**
|
||||
* Creates a fetch function that intercepts Anthropic API calls and routes them to Codex.
|
||||
* @param accessToken - The Codex access token for authentication
|
||||
* @returns A fetch function that translates Anthropic requests to Codex format
|
||||
* createCodexFetch is disabled: routing conversations to chatgpt.com would
|
||||
* send full user conversation content to OpenAI's backend, which is a
|
||||
* privacy violation. The function is kept as a stub that always returns an
|
||||
* error so existing call sites don't break at compile time.
|
||||
*/
|
||||
export function createCodexFetch(
|
||||
accessToken: string,
|
||||
_accessToken: string,
|
||||
): (input: RequestInfo | URL, init?: RequestInit) => Promise<Response> {
|
||||
const accountId = extractAccountId(accessToken)
|
||||
|
||||
return async (input: RequestInfo | URL, init?: RequestInit): Promise<Response> => {
|
||||
const url = input instanceof Request ? input.url : String(input)
|
||||
|
||||
// Only intercept Anthropic API message calls
|
||||
if (!url.includes('/v1/messages')) {
|
||||
return globalThis.fetch(input, init)
|
||||
}
|
||||
|
||||
// Parse the Anthropic request body
|
||||
let anthropicBody: Record<string, unknown>
|
||||
try {
|
||||
const bodyText =
|
||||
init?.body instanceof ReadableStream
|
||||
? await new Response(init.body).text()
|
||||
: typeof init?.body === 'string'
|
||||
? init.body
|
||||
: '{}'
|
||||
anthropicBody = JSON.parse(bodyText)
|
||||
} catch {
|
||||
anthropicBody = {}
|
||||
}
|
||||
|
||||
// Get current token (may have been refreshed)
|
||||
const tokens = getCodexOAuthTokens()
|
||||
const currentToken = tokens?.accessToken || accessToken
|
||||
|
||||
// Translate to Codex format
|
||||
const { codexBody, codexModel } = translateToCodexBody(anthropicBody)
|
||||
|
||||
// Call Codex API
|
||||
const codexResponse = await globalThis.fetch(CODEX_BASE_URL, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'text/event-stream',
|
||||
Authorization: `Bearer ${currentToken}`,
|
||||
'chatgpt-account-id': accountId,
|
||||
originator: 'pi',
|
||||
'OpenAI-Beta': 'responses=experimental',
|
||||
},
|
||||
body: JSON.stringify(codexBody),
|
||||
})
|
||||
|
||||
if (!codexResponse.ok) {
|
||||
const errorText = await codexResponse.text()
|
||||
return async (_input: RequestInfo | URL, _init?: RequestInit): Promise<Response> => {
|
||||
const errorBody = {
|
||||
type: 'error',
|
||||
error: {
|
||||
type: 'api_error',
|
||||
message: `Codex API error (${codexResponse.status}): ${errorText}`,
|
||||
message:
|
||||
'Codex API routing is disabled. External Codex forwarding has been removed for privacy reasons.',
|
||||
},
|
||||
}
|
||||
return new Response(JSON.stringify(errorBody), {
|
||||
status: codexResponse.status,
|
||||
status: 403,
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
})
|
||||
}
|
||||
|
||||
// Translate streaming response
|
||||
return translateCodexStreamToAnthropic(codexResponse, codexModel)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,9 +14,10 @@ import * as path from 'path'
|
||||
import { count } from '../../utils/array.js'
|
||||
import { getCwd } from '../../utils/cwd.js'
|
||||
import { logForDebugging } from '../../utils/debug.js'
|
||||
import { errorMessage } from '../../utils/errors.js'
|
||||
import { errorMessage, getErrnoCode } from '../../utils/errors.js'
|
||||
import { logError } from '../../utils/log.js'
|
||||
import { sleep } from '../../utils/sleep.js'
|
||||
import { jsonStringify } from '../../utils/slowOperations.js'
|
||||
import {
|
||||
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
logEvent,
|
||||
@@ -45,6 +46,37 @@ function logDebug(message: string): void {
|
||||
logForDebugging(`[files-api] ${message}`)
|
||||
}
|
||||
|
||||
function summarizeFilesApiError(error: unknown): string {
|
||||
const summary: Record<string, boolean | number | string> = {}
|
||||
|
||||
if (error instanceof Error) {
|
||||
summary.errorType = error.constructor.name
|
||||
summary.errorName = error.name
|
||||
summary.hasMessage = error.message.length > 0
|
||||
} else {
|
||||
summary.errorType = typeof error
|
||||
summary.hasValue = error !== undefined && error !== null
|
||||
}
|
||||
|
||||
const errno = getErrnoCode(error)
|
||||
if (errno) {
|
||||
summary.errno = errno
|
||||
}
|
||||
|
||||
if (axios.isAxiosError(error)) {
|
||||
summary.errorType = 'AxiosError'
|
||||
if (error.code) {
|
||||
summary.axiosCode = error.code
|
||||
}
|
||||
if (typeof error.response?.status === 'number') {
|
||||
summary.httpStatus = error.response.status
|
||||
}
|
||||
summary.hasResponseData = error.response?.data !== undefined
|
||||
}
|
||||
|
||||
return jsonStringify(summary)
|
||||
}
|
||||
|
||||
/**
|
||||
* File specification parsed from CLI args
|
||||
* Format: --file=<file_id>:<relative_path>
|
||||
@@ -108,9 +140,7 @@ async function retryWithBackoff<T>(
|
||||
}
|
||||
|
||||
lastError = result.error || `${operation} failed`
|
||||
logDebug(
|
||||
`${operation} attempt ${attempt}/${MAX_RETRIES} failed: ${lastError}`,
|
||||
)
|
||||
logDebug(`${operation} attempt ${attempt}/${MAX_RETRIES} failed`)
|
||||
|
||||
if (attempt < MAX_RETRIES) {
|
||||
const delayMs = BASE_DELAY_MS * Math.pow(2, attempt - 1)
|
||||
@@ -142,7 +172,7 @@ export async function downloadFile(
|
||||
'anthropic-beta': FILES_API_BETA_HEADER,
|
||||
}
|
||||
|
||||
logDebug(`Downloading file ${fileId} from ${url}`)
|
||||
logDebug(`Downloading file ${fileId} from configured Files API endpoint`)
|
||||
|
||||
return retryWithBackoff(`Download file ${fileId}`, async () => {
|
||||
try {
|
||||
@@ -191,9 +221,7 @@ export function buildDownloadPath(
|
||||
): string | null {
|
||||
const normalized = path.normalize(relativePath)
|
||||
if (normalized.startsWith('..')) {
|
||||
logDebugError(
|
||||
`Invalid file path: ${relativePath}. Path must not traverse above workspace`,
|
||||
)
|
||||
logDebugError('Invalid file path rejected: path traversal is not allowed')
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -243,7 +271,7 @@ export async function downloadAndSaveFile(
|
||||
// Write the file
|
||||
await fs.writeFile(fullPath, content)
|
||||
|
||||
logDebug(`Saved file ${fileId} to ${fullPath} (${content.length} bytes)`)
|
||||
logDebug(`Saved file ${fileId} (${content.length} bytes)`)
|
||||
|
||||
return {
|
||||
fileId,
|
||||
@@ -252,10 +280,16 @@ export async function downloadAndSaveFile(
|
||||
bytesWritten: content.length,
|
||||
}
|
||||
} catch (error) {
|
||||
logDebugError(`Failed to download file ${fileId}: ${errorMessage(error)}`)
|
||||
if (error instanceof Error) {
|
||||
logError(error)
|
||||
}
|
||||
logDebugError(
|
||||
`Failed to download file ${fileId}: ${summarizeFilesApiError(error)}`,
|
||||
)
|
||||
logError(
|
||||
new Error(
|
||||
`Files API download failed for ${fileId}: ${summarizeFilesApiError(
|
||||
error,
|
||||
)}`,
|
||||
),
|
||||
)
|
||||
|
||||
return {
|
||||
fileId,
|
||||
@@ -390,7 +424,7 @@ export async function uploadFile(
|
||||
'anthropic-beta': FILES_API_BETA_HEADER,
|
||||
}
|
||||
|
||||
logDebug(`Uploading file ${filePath} as ${relativePath}`)
|
||||
logDebug('Uploading file to configured Files API endpoint')
|
||||
|
||||
// Read file content first (outside retry loop since it's not a network operation)
|
||||
let content: Buffer
|
||||
@@ -455,7 +489,7 @@ export async function uploadFile(
|
||||
const body = Buffer.concat(bodyParts)
|
||||
|
||||
try {
|
||||
return await retryWithBackoff(`Upload file ${relativePath}`, async () => {
|
||||
return await retryWithBackoff('Upload session file', async () => {
|
||||
try {
|
||||
const response = await axios.post(url, body, {
|
||||
headers: {
|
||||
@@ -476,7 +510,7 @@ export async function uploadFile(
|
||||
error: 'Upload succeeded but no file ID returned',
|
||||
}
|
||||
}
|
||||
logDebug(`Uploaded file ${filePath} -> ${fileId} (${fileSize} bytes)`)
|
||||
logDebug(`Uploaded file (${fileSize} bytes)`)
|
||||
return {
|
||||
done: true,
|
||||
value: {
|
||||
@@ -735,9 +769,7 @@ export function parseFileSpecs(fileSpecs: string[]): File[] {
|
||||
const relativePath = spec.substring(colonIndex + 1)
|
||||
|
||||
if (!fileId || !relativePath) {
|
||||
logDebugError(
|
||||
`Invalid file spec: ${spec}. Both file_id and path are required`,
|
||||
)
|
||||
logDebugError('Invalid file spec: missing file_id or relative path')
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
@@ -22,12 +22,6 @@ import { logError } from 'src/utils/log.js'
|
||||
import { getAPIProviderForStatsig } from 'src/utils/model/providers.js'
|
||||
import type { PermissionMode } from 'src/utils/permissions/PermissionMode.js'
|
||||
import { jsonStringify } from 'src/utils/slowOperations.js'
|
||||
import { logOTelEvent } from 'src/utils/telemetry/events.js'
|
||||
import {
|
||||
endLLMRequestSpan,
|
||||
isBetaTracingEnabled,
|
||||
type Span,
|
||||
} from 'src/utils/telemetry/sessionTracing.js'
|
||||
import type { NonNullableUsage } from '../../entrypoints/sdk/sdkUtilityTypes.js'
|
||||
import { consumeInvokingRequestId } from '../../utils/agentContext.js'
|
||||
import {
|
||||
@@ -247,7 +241,6 @@ export function logAPIError({
|
||||
headers,
|
||||
queryTracking,
|
||||
querySource,
|
||||
llmSpan,
|
||||
fastMode,
|
||||
previousRequestId,
|
||||
}: {
|
||||
@@ -266,8 +259,6 @@ export function logAPIError({
|
||||
headers?: globalThis.Headers
|
||||
queryTracking?: QueryChainTracking
|
||||
querySource?: string
|
||||
/** The span from startLLMRequestSpan - pass this to correctly match responses to requests */
|
||||
llmSpan?: Span
|
||||
fastMode?: boolean
|
||||
previousRequestId?: string | null
|
||||
}): void {
|
||||
@@ -364,24 +355,6 @@ export function logAPIError({
|
||||
...getAnthropicEnvMetadata(),
|
||||
})
|
||||
|
||||
// Log API error event for OTLP
|
||||
void logOTelEvent('api_error', {
|
||||
model: model,
|
||||
error: errStr,
|
||||
status_code: String(status),
|
||||
duration_ms: String(durationMs),
|
||||
attempt: String(attempt),
|
||||
speed: fastMode ? 'fast' : 'normal',
|
||||
})
|
||||
|
||||
// Pass the span to correctly match responses to requests when beta tracing is enabled
|
||||
endLLMRequestSpan(llmSpan, {
|
||||
success: false,
|
||||
statusCode: status ? parseInt(status) : undefined,
|
||||
error: errStr,
|
||||
attempt,
|
||||
})
|
||||
|
||||
// Log first error for teleported sessions (reliability tracking)
|
||||
const teleportInfo = getTeleportedSessionInfo()
|
||||
if (teleportInfo?.isTeleported && !teleportInfo.hasLoggedFirstMessage) {
|
||||
@@ -597,7 +570,6 @@ export function logAPISuccessAndDuration({
|
||||
queryTracking,
|
||||
permissionMode,
|
||||
newMessages,
|
||||
llmSpan,
|
||||
globalCacheStrategy,
|
||||
requestSetupMs,
|
||||
attemptStartTimes,
|
||||
@@ -622,11 +594,7 @@ export function logAPISuccessAndDuration({
|
||||
costUSD: number
|
||||
queryTracking?: QueryChainTracking
|
||||
permissionMode?: PermissionMode
|
||||
/** Assistant messages from the response - used to extract model_output and thinking_output
|
||||
* when beta tracing is enabled */
|
||||
newMessages?: AssistantMessage[]
|
||||
/** The span from startLLMRequestSpan - pass this to correctly match responses to requests */
|
||||
llmSpan?: Span
|
||||
/** Strategy used for global prompt caching: 'tool_based', 'system_prompt', or 'none' */
|
||||
globalCacheStrategy?: GlobalCacheStrategy
|
||||
/** Time spent in pre-request setup before the successful attempt */
|
||||
@@ -714,68 +682,6 @@ export function logAPISuccessAndDuration({
|
||||
previousRequestId,
|
||||
betas,
|
||||
})
|
||||
// Log API request event for OTLP
|
||||
void logOTelEvent('api_request', {
|
||||
model,
|
||||
input_tokens: String(usage.input_tokens),
|
||||
output_tokens: String(usage.output_tokens),
|
||||
cache_read_tokens: String(usage.cache_read_input_tokens),
|
||||
cache_creation_tokens: String(usage.cache_creation_input_tokens),
|
||||
cost_usd: String(costUSD),
|
||||
duration_ms: String(durationMs),
|
||||
speed: fastMode ? 'fast' : 'normal',
|
||||
})
|
||||
|
||||
// Extract model output, thinking output, and tool call flag when beta tracing is enabled
|
||||
let modelOutput: string | undefined
|
||||
let thinkingOutput: string | undefined
|
||||
let hasToolCall: boolean | undefined
|
||||
|
||||
if (isBetaTracingEnabled() && newMessages) {
|
||||
// Model output - visible to all users
|
||||
modelOutput =
|
||||
newMessages
|
||||
.flatMap(m =>
|
||||
m.message.content
|
||||
.filter(c => c.type === 'text')
|
||||
.map(c => (c as { type: 'text'; text: string }).text),
|
||||
)
|
||||
.join('\n') || undefined
|
||||
|
||||
// Thinking output - Ant-only (build-time gated)
|
||||
if (process.env.USER_TYPE === 'ant') {
|
||||
thinkingOutput =
|
||||
newMessages
|
||||
.flatMap(m =>
|
||||
m.message.content
|
||||
.filter(c => c.type === 'thinking')
|
||||
.map(c => (c as { type: 'thinking'; thinking: string }).thinking),
|
||||
)
|
||||
.join('\n') || undefined
|
||||
}
|
||||
|
||||
// Check if any tool_use blocks were in the output
|
||||
hasToolCall = newMessages.some(m =>
|
||||
m.message.content.some(c => c.type === 'tool_use'),
|
||||
)
|
||||
}
|
||||
|
||||
// Pass the span to correctly match responses to requests when beta tracing is enabled
|
||||
endLLMRequestSpan(llmSpan, {
|
||||
success: true,
|
||||
inputTokens: usage.input_tokens,
|
||||
outputTokens: usage.output_tokens,
|
||||
cacheReadTokens: usage.cache_read_input_tokens,
|
||||
cacheCreationTokens: usage.cache_creation_input_tokens,
|
||||
attempt,
|
||||
modelOutput,
|
||||
thinkingOutput,
|
||||
hasToolCall,
|
||||
ttftMs: ttftMs ?? undefined,
|
||||
requestSetupMs,
|
||||
attemptStartTimes,
|
||||
})
|
||||
|
||||
// Log first successful message for teleported sessions (reliability tracking)
|
||||
const teleportInfo = getTeleportedSessionInfo()
|
||||
if (teleportInfo?.isTeleported && !teleportInfo.hasLoggedFirstMessage) {
|
||||
|
||||
@@ -19,6 +19,37 @@ interface SessionIngressError {
|
||||
}
|
||||
}
|
||||
|
||||
function summarizeSessionIngressPayload(payload: unknown): string {
|
||||
if (payload === null) return 'null'
|
||||
if (payload === undefined) return 'undefined'
|
||||
if (Array.isArray(payload)) return `array(${payload.length})`
|
||||
if (typeof payload === 'object') {
|
||||
const value = payload as Record<string, unknown>
|
||||
return jsonStringify({
|
||||
payloadType: 'object',
|
||||
keys: Object.keys(value)
|
||||
.sort()
|
||||
.slice(0, 10),
|
||||
loglinesCount: Array.isArray(value.loglines) ? value.loglines.length : 0,
|
||||
dataCount: Array.isArray(value.data) ? value.data.length : 0,
|
||||
hasNextCursor: typeof value.next_cursor === 'string',
|
||||
})
|
||||
}
|
||||
return typeof payload
|
||||
}
|
||||
|
||||
function summarizeSessionIngressErrorForDebug(error: unknown): string {
|
||||
const err = error as AxiosError<SessionIngressError>
|
||||
return jsonStringify({
|
||||
errorType:
|
||||
error instanceof Error ? error.constructor.name : typeof error,
|
||||
hasMessage: error instanceof Error ? err.message.length > 0 : false,
|
||||
hasStack: error instanceof Error ? Boolean(err.stack) : false,
|
||||
status: err.status,
|
||||
code: typeof err.code === 'string' ? err.code : undefined,
|
||||
})
|
||||
}
|
||||
|
||||
// Module-level state
|
||||
const lastUuidMap: Map<string, UUID> = new Map()
|
||||
|
||||
@@ -81,9 +112,7 @@ async function appendSessionLogImpl(
|
||||
|
||||
if (response.status === 200 || response.status === 201) {
|
||||
lastUuidMap.set(sessionId, entry.uuid)
|
||||
logForDebugging(
|
||||
`Successfully persisted session log entry for session ${sessionId}`,
|
||||
)
|
||||
logForDebugging('Successfully persisted session log entry')
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -96,7 +125,7 @@ async function appendSessionLogImpl(
|
||||
// Our entry IS the last entry on server - it was stored successfully previously
|
||||
lastUuidMap.set(sessionId, entry.uuid)
|
||||
logForDebugging(
|
||||
`Session entry ${entry.uuid} already present on server, recovering from stale state`,
|
||||
'Session entry already present on server, recovering from stale state',
|
||||
)
|
||||
logForDiagnosticsNoPII('info', 'session_persist_recovered_from_409')
|
||||
return true
|
||||
@@ -108,7 +137,7 @@ async function appendSessionLogImpl(
|
||||
if (serverLastUuid) {
|
||||
lastUuidMap.set(sessionId, serverLastUuid as UUID)
|
||||
logForDebugging(
|
||||
`Session 409: adopting server lastUuid=${serverLastUuid} from header, retrying entry ${entry.uuid}`,
|
||||
'Session 409: adopting server last UUID from header and retrying',
|
||||
)
|
||||
} else {
|
||||
// Server didn't return x-last-uuid (e.g. v1 endpoint). Re-fetch
|
||||
@@ -118,7 +147,7 @@ async function appendSessionLogImpl(
|
||||
if (adoptedUuid) {
|
||||
lastUuidMap.set(sessionId, adoptedUuid)
|
||||
logForDebugging(
|
||||
`Session 409: re-fetched ${logs!.length} entries, adopting lastUuid=${adoptedUuid}, retrying entry ${entry.uuid}`,
|
||||
`Session 409: re-fetched ${logs!.length} entries, adopting recovered last UUID and retrying`,
|
||||
)
|
||||
} else {
|
||||
// Can't determine server state — give up
|
||||
@@ -127,7 +156,7 @@ async function appendSessionLogImpl(
|
||||
errorData.error?.message || 'Concurrent modification detected'
|
||||
logError(
|
||||
new Error(
|
||||
`Session persistence conflict: UUID mismatch for session ${sessionId}, entry ${entry.uuid}. ${errorMessage}`,
|
||||
`Session persistence conflict: UUID mismatch detected. ${errorMessage}`,
|
||||
),
|
||||
)
|
||||
logForDiagnosticsNoPII(
|
||||
@@ -149,7 +178,7 @@ async function appendSessionLogImpl(
|
||||
|
||||
// Other 4xx (429, etc.) - retryable
|
||||
logForDebugging(
|
||||
`Failed to persist session log: ${response.status} ${response.statusText}`,
|
||||
`Failed to persist session log: status=${response.status}`,
|
||||
)
|
||||
logForDiagnosticsNoPII('error', 'session_persist_fail_status', {
|
||||
status: response.status,
|
||||
@@ -158,7 +187,13 @@ async function appendSessionLogImpl(
|
||||
} catch (error) {
|
||||
// Network errors, 5xx - retryable
|
||||
const axiosError = error as AxiosError<SessionIngressError>
|
||||
logError(new Error(`Error persisting session log: ${axiosError.message}`))
|
||||
logError(
|
||||
new Error(
|
||||
`Error persisting session log: ${summarizeSessionIngressErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
),
|
||||
)
|
||||
logForDiagnosticsNoPII('error', 'session_persist_fail_status', {
|
||||
status: axiosError.status,
|
||||
attempt,
|
||||
@@ -249,7 +284,7 @@ export async function getSessionLogsViaOAuth(
|
||||
orgUUID: string,
|
||||
): Promise<Entry[] | null> {
|
||||
const url = `${getOauthConfig().BASE_API_URL}/v1/session_ingress/session/${sessionId}`
|
||||
logForDebugging(`[session-ingress] Fetching session logs from: ${url}`)
|
||||
logForDebugging('[session-ingress] Fetching session logs via OAuth endpoint')
|
||||
const headers = {
|
||||
...getOAuthHeaders(accessToken),
|
||||
'x-organization-uuid': orgUUID,
|
||||
@@ -299,7 +334,7 @@ export async function getTeleportEvents(
|
||||
'x-organization-uuid': orgUUID,
|
||||
}
|
||||
|
||||
logForDebugging(`[teleport] Fetching events from: ${baseUrl}`)
|
||||
logForDebugging('[teleport] Fetching session events via teleport endpoint')
|
||||
|
||||
const all: Entry[] = []
|
||||
let cursor: string | undefined
|
||||
@@ -346,7 +381,7 @@ export async function getTeleportEvents(
|
||||
// 404 mid-pagination (pages > 0) means session was deleted between
|
||||
// pages — return what we have.
|
||||
logForDebugging(
|
||||
`[teleport] Session ${sessionId} not found (page ${pages})`,
|
||||
`[teleport] Session not found while fetching events (page ${pages})`,
|
||||
)
|
||||
logForDiagnosticsNoPII('warn', 'teleport_events_not_found')
|
||||
return pages === 0 ? null : all
|
||||
@@ -362,7 +397,9 @@ export async function getTeleportEvents(
|
||||
if (response.status !== 200) {
|
||||
logError(
|
||||
new Error(
|
||||
`Teleport events returned ${response.status}: ${jsonStringify(response.data)}`,
|
||||
`Teleport events returned ${response.status}: ${summarizeSessionIngressPayload(
|
||||
response.data,
|
||||
)}`,
|
||||
),
|
||||
)
|
||||
logForDiagnosticsNoPII('error', 'teleport_events_bad_status')
|
||||
@@ -373,7 +410,9 @@ export async function getTeleportEvents(
|
||||
if (!Array.isArray(data)) {
|
||||
logError(
|
||||
new Error(
|
||||
`Teleport events invalid response shape: ${jsonStringify(response.data)}`,
|
||||
`Teleport events invalid response shape: ${summarizeSessionIngressPayload(
|
||||
response.data,
|
||||
)}`,
|
||||
),
|
||||
)
|
||||
logForDiagnosticsNoPII('error', 'teleport_events_invalid_shape')
|
||||
@@ -403,13 +442,13 @@ export async function getTeleportEvents(
|
||||
// Don't fail — return what we have. Better to teleport with a
|
||||
// truncated transcript than not at all.
|
||||
logError(
|
||||
new Error(`Teleport events hit page cap (${maxPages}) for ${sessionId}`),
|
||||
new Error(`Teleport events hit page cap (${maxPages})`),
|
||||
)
|
||||
logForDiagnosticsNoPII('warn', 'teleport_events_page_cap')
|
||||
}
|
||||
|
||||
logForDebugging(
|
||||
`[teleport] Fetched ${all.length} events over ${pages} page(s) for ${sessionId}`,
|
||||
`[teleport] Fetched ${all.length} events over ${pages} page(s)`,
|
||||
)
|
||||
return all
|
||||
}
|
||||
@@ -439,7 +478,9 @@ async function fetchSessionLogsFromUrl(
|
||||
if (!data || typeof data !== 'object' || !Array.isArray(data.loglines)) {
|
||||
logError(
|
||||
new Error(
|
||||
`Invalid session logs response format: ${jsonStringify(data)}`,
|
||||
`Invalid session logs response format: ${summarizeSessionIngressPayload(
|
||||
data,
|
||||
)}`,
|
||||
),
|
||||
)
|
||||
logForDiagnosticsNoPII('error', 'session_get_fail_invalid_response')
|
||||
@@ -447,14 +488,12 @@ async function fetchSessionLogsFromUrl(
|
||||
}
|
||||
|
||||
const logs = data.loglines as Entry[]
|
||||
logForDebugging(
|
||||
`Fetched ${logs.length} session logs for session ${sessionId}`,
|
||||
)
|
||||
logForDebugging(`Fetched ${logs.length} session logs`)
|
||||
return logs
|
||||
}
|
||||
|
||||
if (response.status === 404) {
|
||||
logForDebugging(`No existing logs for session ${sessionId}`)
|
||||
logForDebugging('No existing session logs')
|
||||
logForDiagnosticsNoPII('warn', 'session_get_no_logs_for_session')
|
||||
return []
|
||||
}
|
||||
@@ -468,7 +507,7 @@ async function fetchSessionLogsFromUrl(
|
||||
}
|
||||
|
||||
logForDebugging(
|
||||
`Failed to fetch session logs: ${response.status} ${response.statusText}`,
|
||||
`Failed to fetch session logs: status=${response.status}`,
|
||||
)
|
||||
logForDiagnosticsNoPII('error', 'session_get_fail_status', {
|
||||
status: response.status,
|
||||
@@ -476,7 +515,13 @@ async function fetchSessionLogsFromUrl(
|
||||
return null
|
||||
} catch (error) {
|
||||
const axiosError = error as AxiosError<SessionIngressError>
|
||||
logError(new Error(`Error fetching session logs: ${axiosError.message}`))
|
||||
logError(
|
||||
new Error(
|
||||
`Error fetching session logs: ${summarizeSessionIngressErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
),
|
||||
)
|
||||
logForDiagnosticsNoPII('error', 'session_get_fail_status', {
|
||||
status: axiosError.status,
|
||||
})
|
||||
|
||||
@@ -6,7 +6,6 @@ import { clearSpeculativeChecks } from '../../tools/BashTool/bashPermissions.js'
|
||||
import { clearClassifierApprovals } from '../../utils/classifierApprovals.js'
|
||||
import { resetGetMemoryFilesCache } from '../../utils/claudemd.js'
|
||||
import { clearSessionMessagesCache } from '../../utils/sessionStorage.js'
|
||||
import { clearBetaTracingState } from '../../utils/telemetry/betaSessionTracing.js'
|
||||
import { resetMicrocompactState } from './microCompact.js'
|
||||
|
||||
/**
|
||||
@@ -67,7 +66,6 @@ export function runPostCompactCleanup(querySource?: QuerySource): void {
|
||||
// model still has SkillTool in schema, invoked_skills preserves used
|
||||
// skills, and dynamic additions are handled by skillChangeDetector /
|
||||
// cacheUtils resets. See compactConversation() for full rationale.
|
||||
clearBetaTracingState()
|
||||
if (feature('COMMIT_ATTRIBUTION')) {
|
||||
void import('../../utils/attributionHooks.js').then(m =>
|
||||
m.sweepFileContentCache(),
|
||||
|
||||
@@ -8,6 +8,34 @@ import type { DiagnosticFile } from '../diagnosticTracking.js'
|
||||
import { registerPendingLSPDiagnostic } from './LSPDiagnosticRegistry.js'
|
||||
import type { LSPServerManager } from './LSPServerManager.js'
|
||||
|
||||
function summarizeLspErrorForDebug(error: unknown): string {
|
||||
const err = toError(error)
|
||||
return jsonStringify({
|
||||
errorType: err.constructor.name,
|
||||
errorName: err.name,
|
||||
hasMessage: err.message.length > 0,
|
||||
})
|
||||
}
|
||||
|
||||
function summarizeDiagnosticParamsForDebug(params: unknown): string {
|
||||
if (!params || typeof params !== 'object') {
|
||||
return jsonStringify({
|
||||
paramsType: typeof params,
|
||||
hasValue: params !== undefined && params !== null,
|
||||
})
|
||||
}
|
||||
|
||||
const paramRecord = params as Record<string, unknown>
|
||||
const diagnostics = paramRecord.diagnostics
|
||||
return jsonStringify({
|
||||
keys: Object.keys(paramRecord)
|
||||
.sort()
|
||||
.slice(0, 10),
|
||||
hasUri: typeof paramRecord.uri === 'string',
|
||||
diagnosticsCount: Array.isArray(diagnostics) ? diagnostics.length : 0,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Map LSP severity to Claude diagnostic severity
|
||||
*
|
||||
@@ -54,7 +82,9 @@ export function formatDiagnosticsForAttachment(
|
||||
const err = toError(error)
|
||||
logError(err)
|
||||
logForDebugging(
|
||||
`Failed to convert URI to file path: ${params.uri}. Error: ${err.message}. Using original URI as fallback.`,
|
||||
`Failed to convert diagnostic URI to file path; using original URI fallback (${summarizeLspErrorForDebug(
|
||||
err,
|
||||
)})`,
|
||||
)
|
||||
// Gracefully fallback to original URI - LSP servers may send malformed URIs
|
||||
uri = params.uri
|
||||
@@ -177,14 +207,16 @@ export function registerLSPNotificationHandlers(
|
||||
)
|
||||
logError(err)
|
||||
logForDebugging(
|
||||
`Invalid diagnostic params from ${serverName}: ${jsonStringify(params)}`,
|
||||
`Invalid diagnostic params from ${serverName}: ${summarizeDiagnosticParamsForDebug(
|
||||
params,
|
||||
)}`,
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const diagnosticParams = params as PublishDiagnosticsParams
|
||||
logForDebugging(
|
||||
`Received diagnostics from ${serverName}: ${diagnosticParams.diagnostics.length} diagnostic(s) for ${diagnosticParams.uri}`,
|
||||
`Received diagnostics from ${serverName}: ${diagnosticParams.diagnostics.length} diagnostic(s)`,
|
||||
)
|
||||
|
||||
// Convert LSP diagnostics to Claude format (can throw on invalid URIs)
|
||||
@@ -199,7 +231,7 @@ export function registerLSPNotificationHandlers(
|
||||
firstFile.diagnostics.length === 0
|
||||
) {
|
||||
logForDebugging(
|
||||
`Skipping empty diagnostics from ${serverName} for ${diagnosticParams.uri}`,
|
||||
`Skipping empty diagnostics from ${serverName}`,
|
||||
)
|
||||
return
|
||||
}
|
||||
@@ -223,9 +255,8 @@ export function registerLSPNotificationHandlers(
|
||||
logError(err)
|
||||
logForDebugging(
|
||||
`Error registering LSP diagnostics from ${serverName}: ` +
|
||||
`URI: ${diagnosticParams.uri}, ` +
|
||||
`Diagnostic count: ${firstFile.diagnostics.length}, ` +
|
||||
`Error: ${err.message}`,
|
||||
`Error: ${summarizeLspErrorForDebug(err)}`,
|
||||
)
|
||||
|
||||
// Track consecutive failures and warn after 3+
|
||||
@@ -234,7 +265,7 @@ export function registerLSPNotificationHandlers(
|
||||
lastError: '',
|
||||
}
|
||||
failures.count++
|
||||
failures.lastError = err.message
|
||||
failures.lastError = summarizeLspErrorForDebug(err)
|
||||
diagnosticFailures.set(serverName, failures)
|
||||
|
||||
if (failures.count >= 3) {
|
||||
@@ -251,7 +282,9 @@ export function registerLSPNotificationHandlers(
|
||||
const err = toError(error)
|
||||
logError(err)
|
||||
logForDebugging(
|
||||
`Unexpected error processing diagnostics from ${serverName}: ${err.message}`,
|
||||
`Unexpected error processing diagnostics from ${serverName}: ${summarizeLspErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
|
||||
// Track consecutive failures and warn after 3+
|
||||
@@ -260,7 +293,7 @@ export function registerLSPNotificationHandlers(
|
||||
lastError: '',
|
||||
}
|
||||
failures.count++
|
||||
failures.lastError = err.message
|
||||
failures.lastError = summarizeLspErrorForDebug(err)
|
||||
diagnosticFailures.set(serverName, failures)
|
||||
|
||||
if (failures.count >= 3) {
|
||||
@@ -284,13 +317,13 @@ export function registerLSPNotificationHandlers(
|
||||
|
||||
registrationErrors.push({
|
||||
serverName,
|
||||
error: err.message,
|
||||
error: summarizeLspErrorForDebug(err),
|
||||
})
|
||||
|
||||
logError(err)
|
||||
logForDebugging(
|
||||
`Failed to register diagnostics handler for ${serverName}: ` +
|
||||
`Error: ${err.message}`,
|
||||
`Error: ${summarizeLspErrorForDebug(err)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -93,35 +93,77 @@ type MCPOAuthFlowErrorReason =
|
||||
|
||||
const MAX_LOCK_RETRIES = 5
|
||||
|
||||
/**
|
||||
* OAuth query parameters that should be redacted from logs.
|
||||
* These contain sensitive values that could enable CSRF or session fixation attacks.
|
||||
*/
|
||||
const SENSITIVE_OAUTH_PARAMS = [
|
||||
'state',
|
||||
'nonce',
|
||||
'code_challenge',
|
||||
'code_verifier',
|
||||
'code',
|
||||
]
|
||||
function summarizeHeadersForDebug(
|
||||
headers: Record<string, string> | undefined,
|
||||
): {
|
||||
headerCount: number
|
||||
headerNames: string[]
|
||||
hasAuthorization: boolean
|
||||
} {
|
||||
if (!headers) {
|
||||
return {
|
||||
headerCount: 0,
|
||||
headerNames: [],
|
||||
hasAuthorization: false,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Redacts sensitive OAuth query parameters from a URL for safe logging.
|
||||
* Prevents exposure of state, nonce, code_challenge, code_verifier, and authorization codes.
|
||||
*/
|
||||
function redactSensitiveUrlParams(url: string): string {
|
||||
try {
|
||||
const parsedUrl = new URL(url)
|
||||
for (const param of SENSITIVE_OAUTH_PARAMS) {
|
||||
if (parsedUrl.searchParams.has(param)) {
|
||||
parsedUrl.searchParams.set(param, '[REDACTED]')
|
||||
const headerNames = Object.keys(headers).sort()
|
||||
return {
|
||||
headerCount: headerNames.length,
|
||||
headerNames,
|
||||
hasAuthorization: headerNames.some(
|
||||
headerName => headerName.toLowerCase() === 'authorization',
|
||||
),
|
||||
}
|
||||
}
|
||||
return parsedUrl.toString()
|
||||
} catch {
|
||||
// Return as-is if not a valid URL
|
||||
return url
|
||||
|
||||
function extractHttpStatusFromErrorMessage(message: string): number | undefined {
|
||||
const statusMatch = message.match(/^HTTP (\d{3}):/)
|
||||
if (!statusMatch) {
|
||||
return undefined
|
||||
}
|
||||
return Number(statusMatch[1])
|
||||
}
|
||||
|
||||
function summarizeOAuthErrorForDebug(error: unknown): string {
|
||||
const summary: Record<string, boolean | number | string> = {}
|
||||
|
||||
if (error instanceof Error) {
|
||||
summary.errorType = error.constructor.name
|
||||
summary.errorName = error.name
|
||||
summary.hasMessage = error.message.length > 0
|
||||
|
||||
const httpStatus = extractHttpStatusFromErrorMessage(error.message)
|
||||
if (httpStatus !== undefined) {
|
||||
summary.httpStatus = httpStatus
|
||||
}
|
||||
|
||||
if (error instanceof OAuthError) {
|
||||
summary.oauthErrorCode = error.errorCode
|
||||
}
|
||||
} else {
|
||||
summary.errorType = typeof error
|
||||
summary.hasValue = error !== undefined && error !== null
|
||||
}
|
||||
|
||||
const errno = getErrnoCode(error)
|
||||
if (errno) {
|
||||
summary.errno = errno
|
||||
}
|
||||
|
||||
if (axios.isAxiosError(error)) {
|
||||
summary.errorType = 'AxiosError'
|
||||
if (error.code) {
|
||||
summary.axiosCode = error.code
|
||||
}
|
||||
if (typeof error.response?.status === 'number') {
|
||||
summary.httpStatus = error.response.status
|
||||
}
|
||||
summary.hasResponseData = error.response?.data !== undefined
|
||||
}
|
||||
|
||||
return jsonStringify(summary)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -295,7 +337,9 @@ async function fetchAuthServerMetadata(
|
||||
// to the legacy path-aware retry.
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`RFC 9728 discovery failed, falling back: ${errorMessage(err)}`,
|
||||
`RFC 9728 discovery failed, falling back: ${summarizeOAuthErrorForDebug(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -517,7 +561,7 @@ export async function revokeServerTokens(
|
||||
: 'client_secret_basic'
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`Revoking tokens via ${revocationEndpointStr} (${authMethod})`,
|
||||
`Revoking tokens via discovered OAuth revocation endpoint (${authMethod})`,
|
||||
)
|
||||
|
||||
// Revoke refresh token first (more important - prevents future access token generation)
|
||||
@@ -537,7 +581,9 @@ export async function revokeServerTokens(
|
||||
// Log but continue
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`Failed to revoke refresh token: ${errorMessage(error)}`,
|
||||
`Failed to revoke refresh token: ${summarizeOAuthErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -558,7 +604,9 @@ export async function revokeServerTokens(
|
||||
} catch (error: unknown) {
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`Failed to revoke access token: ${errorMessage(error)}`,
|
||||
`Failed to revoke access token: ${summarizeOAuthErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -566,7 +614,10 @@ export async function revokeServerTokens(
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
// Log error but don't throw - revocation is best-effort
|
||||
logMCPDebug(serverName, `Failed to revoke tokens: ${errorMessage(error)}`)
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`Failed to revoke tokens: ${summarizeOAuthErrorForDebug(error)}`,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
logMCPDebug(serverName, 'No tokens to revoke')
|
||||
@@ -696,14 +747,11 @@ async function performMCPXaaAuth(
|
||||
const haveKeys = Object.keys(
|
||||
getSecureStorage().read()?.mcpOAuthClientConfig ?? {},
|
||||
)
|
||||
const headersForLogging = Object.fromEntries(
|
||||
Object.entries(serverConfig.headers ?? {}).map(([k, v]) =>
|
||||
k.toLowerCase() === 'authorization' ? [k, '[REDACTED]'] : [k, v],
|
||||
),
|
||||
)
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`XAA: secret lookup miss. wanted=${wantedKey} have=[${haveKeys.join(', ')}] configHeaders=${jsonStringify(headersForLogging)}`,
|
||||
`XAA: secret lookup miss. wanted=${wantedKey} availableKeys=${haveKeys.length} configHeaderSummary=${jsonStringify(
|
||||
summarizeHeadersForDebug(serverConfig.headers),
|
||||
)}`,
|
||||
)
|
||||
throw new Error(
|
||||
`XAA: AS client secret not found for '${serverName}'. Re-add with --client-secret.`,
|
||||
@@ -923,10 +971,7 @@ export async function performMCPOAuthFlow(
|
||||
try {
|
||||
resourceMetadataUrl = new URL(cachedResourceMetadataUrl)
|
||||
} catch {
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`Invalid cached resourceMetadataUrl: ${cachedResourceMetadataUrl}`,
|
||||
)
|
||||
logMCPDebug(serverName, 'Invalid cached resource metadata URL')
|
||||
}
|
||||
}
|
||||
const wwwAuthParams: WWWAuthenticateParams = {
|
||||
@@ -988,13 +1033,15 @@ export async function performMCPOAuthFlow(
|
||||
provider.setMetadata(metadata)
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`Fetched OAuth metadata with scope: ${getScopeFromMetadata(metadata) || 'NONE'}`,
|
||||
`Fetched OAuth metadata (hasScope=${Boolean(
|
||||
getScopeFromMetadata(metadata),
|
||||
)})`,
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`Failed to fetch OAuth metadata: ${errorMessage(error)}`,
|
||||
`Failed to fetch OAuth metadata: ${summarizeOAuthErrorForDebug(error)}`,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1170,8 +1217,10 @@ export async function performMCPOAuthFlow(
|
||||
|
||||
server.listen(port, '127.0.0.1', async () => {
|
||||
try {
|
||||
logMCPDebug(serverName, `Starting SDK auth`)
|
||||
logMCPDebug(serverName, `Server URL: ${serverConfig.url}`)
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`Starting SDK auth (transport=${serverConfig.type})`,
|
||||
)
|
||||
|
||||
// First call to start the auth flow - should redirect
|
||||
// Pass the scope and resource_metadata from WWW-Authenticate header if available
|
||||
@@ -1189,7 +1238,10 @@ export async function performMCPOAuthFlow(
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
logMCPDebug(serverName, `SDK auth error: ${error}`)
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`SDK auth error: ${summarizeOAuthErrorForDebug(error)}`,
|
||||
)
|
||||
cleanup()
|
||||
rejectOnce(new Error(`SDK auth failed: ${errorMessage(error)}`))
|
||||
}
|
||||
@@ -1235,9 +1287,13 @@ export async function performMCPOAuthFlow(
|
||||
if (savedTokens) {
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`Token access_token length: ${savedTokens.access_token?.length}`,
|
||||
`Token summary after auth: ${jsonStringify({
|
||||
hasAccessToken: Boolean(savedTokens.access_token),
|
||||
hasRefreshToken: Boolean(savedTokens.refresh_token),
|
||||
expiresInSec: savedTokens.expires_in,
|
||||
hasScope: Boolean(savedTokens.scope),
|
||||
})}`,
|
||||
)
|
||||
logMCPDebug(serverName, `Token expires_in: ${savedTokens.expires_in}`)
|
||||
}
|
||||
|
||||
logEvent('tengu_mcp_oauth_flow_success', {
|
||||
@@ -1257,7 +1313,10 @@ export async function performMCPOAuthFlow(
|
||||
throw new Error('Unexpected auth result: ' + result)
|
||||
}
|
||||
} catch (error) {
|
||||
logMCPDebug(serverName, `Error during auth completion: ${error}`)
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`Error during auth completion: ${summarizeOAuthErrorForDebug(error)}`,
|
||||
)
|
||||
|
||||
// Determine failure reason for attribution telemetry. The try block covers
|
||||
// port acquisition, the callback server, the redirect flow, and token
|
||||
@@ -1298,9 +1357,9 @@ export async function performMCPOAuthFlow(
|
||||
// SDK does not attach HTTP status as a property, but the fallback ServerError
|
||||
// embeds it in the message as "HTTP {status}:" when the response body was
|
||||
// unparseable. Best-effort extraction.
|
||||
const statusMatch = error.message.match(/^HTTP (\d{3}):/)
|
||||
if (statusMatch) {
|
||||
httpStatus = Number(statusMatch[1])
|
||||
const parsedStatus = extractHttpStatusFromErrorMessage(error.message)
|
||||
if (parsedStatus !== undefined) {
|
||||
httpStatus = parsedStatus
|
||||
}
|
||||
// If client not found, clear the stored client ID and suggest retry
|
||||
if (
|
||||
@@ -1429,7 +1488,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
metadata.scope = metadataScope
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Using scope from metadata: ${metadata.scope}`,
|
||||
'Using scope from metadata',
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1445,7 +1504,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
get clientMetadataUrl(): string | undefined {
|
||||
const override = process.env.MCP_OAUTH_CLIENT_METADATA_URL
|
||||
if (override) {
|
||||
logMCPDebug(this.serverName, `Using CIMD URL from env: ${override}`)
|
||||
logMCPDebug(this.serverName, 'Using CIMD URL from env override')
|
||||
return override
|
||||
}
|
||||
return MCP_CLIENT_METADATA_URL
|
||||
@@ -1467,7 +1526,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
*/
|
||||
markStepUpPending(scope: string): void {
|
||||
this._pendingStepUpScope = scope
|
||||
logMCPDebug(this.serverName, `Marked step-up pending: ${scope}`)
|
||||
logMCPDebug(this.serverName, 'Marked step-up pending')
|
||||
}
|
||||
|
||||
async state(): Promise<string> {
|
||||
@@ -1606,7 +1665,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
} catch (e) {
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`XAA silent exchange failed: ${errorMessage(e)}`,
|
||||
`XAA silent exchange failed: ${summarizeOAuthErrorForDebug(e)}`,
|
||||
)
|
||||
}
|
||||
// Fall through. Either id_token isn't cached (xaaRefresh returned
|
||||
@@ -1632,7 +1691,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
if (needsStepUp) {
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Step-up pending (${this._pendingStepUpScope}), omitting refresh_token`,
|
||||
'Step-up pending, omitting refresh_token',
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1679,7 +1738,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
} catch (error) {
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Token refresh error: ${errorMessage(error)}`,
|
||||
`Token refresh error: ${summarizeOAuthErrorForDebug(error)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1693,10 +1752,15 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
token_type: 'Bearer',
|
||||
}
|
||||
|
||||
logMCPDebug(this.serverName, `Returning tokens`)
|
||||
logMCPDebug(this.serverName, `Token length: ${tokens.access_token?.length}`)
|
||||
logMCPDebug(this.serverName, `Has refresh token: ${!!tokens.refresh_token}`)
|
||||
logMCPDebug(this.serverName, `Expires in: ${Math.floor(expiresIn)}s`)
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Returning tokens: ${jsonStringify({
|
||||
hasAccessToken: Boolean(tokens.access_token),
|
||||
hasRefreshToken: Boolean(tokens.refresh_token),
|
||||
hasScope: Boolean(tokens.scope),
|
||||
expiresInSec: Math.floor(expiresIn),
|
||||
})}`,
|
||||
)
|
||||
|
||||
return tokens
|
||||
}
|
||||
@@ -1707,9 +1771,15 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
const existingData = storage.read() || {}
|
||||
const serverKey = getServerKey(this.serverName, this.serverConfig)
|
||||
|
||||
logMCPDebug(this.serverName, `Saving tokens`)
|
||||
logMCPDebug(this.serverName, `Token expires in: ${tokens.expires_in}`)
|
||||
logMCPDebug(this.serverName, `Has refresh token: ${!!tokens.refresh_token}`)
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Saving tokens: ${jsonStringify({
|
||||
hasAccessToken: Boolean(tokens.access_token),
|
||||
hasRefreshToken: Boolean(tokens.refresh_token),
|
||||
hasScope: Boolean(tokens.scope),
|
||||
expiresInSec: tokens.expires_in,
|
||||
})}`,
|
||||
)
|
||||
|
||||
const updatedData: SecureStorageData = {
|
||||
...existingData,
|
||||
@@ -1783,7 +1853,9 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
} catch (e) {
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`XAA: OIDC discovery failed in silent refresh: ${errorMessage(e)}`,
|
||||
`XAA: OIDC discovery failed in silent refresh: ${summarizeOAuthErrorForDebug(
|
||||
e,
|
||||
)}`,
|
||||
)
|
||||
return undefined
|
||||
}
|
||||
@@ -1855,29 +1927,18 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
|
||||
// Extract and store scopes from the authorization URL for later use in token exchange
|
||||
const scopes = authorizationUrl.searchParams.get('scope')
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Authorization URL: ${redactSensitiveUrlParams(authorizationUrl.toString())}`,
|
||||
)
|
||||
logMCPDebug(this.serverName, `Scopes in URL: ${scopes || 'NOT FOUND'}`)
|
||||
|
||||
if (scopes) {
|
||||
this._scopes = scopes
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Captured scopes from authorization URL: ${scopes}`,
|
||||
)
|
||||
logMCPDebug(this.serverName, 'Captured scopes from authorization URL')
|
||||
} else {
|
||||
// If no scope in URL, try to get it from metadata
|
||||
const metadataScope = getScopeFromMetadata(this._metadata)
|
||||
if (metadataScope) {
|
||||
this._scopes = metadataScope
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Using scopes from metadata: ${metadataScope}`,
|
||||
)
|
||||
logMCPDebug(this.serverName, 'Using scopes from metadata')
|
||||
} else {
|
||||
logMCPDebug(this.serverName, `No scopes available from URL or metadata`)
|
||||
logMCPDebug(this.serverName, 'No scopes available from URL or metadata')
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1895,7 +1956,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
if (existing) {
|
||||
existing.stepUpScope = this._scopes
|
||||
storage.update(existingData)
|
||||
logMCPDebug(this.serverName, `Persisted step-up scope: ${this._scopes}`)
|
||||
logMCPDebug(this.serverName, 'Persisted step-up scope')
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1916,8 +1977,6 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
}
|
||||
|
||||
logMCPDebug(this.serverName, `Redirecting to authorization URL`)
|
||||
const redactedUrl = redactSensitiveUrlParams(urlString)
|
||||
logMCPDebug(this.serverName, `Authorization URL: ${redactedUrl}`)
|
||||
|
||||
// Notify the UI about the authorization URL BEFORE opening the browser,
|
||||
// so users can see the URL as a fallback if the browser fails to open
|
||||
@@ -1926,7 +1985,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
}
|
||||
|
||||
if (!this.skipBrowserOpen) {
|
||||
logMCPDebug(this.serverName, `Opening authorization URL: ${redactedUrl}`)
|
||||
logMCPDebug(this.serverName, 'Opening authorization URL')
|
||||
|
||||
const success = await openBrowser(urlString)
|
||||
if (!success) {
|
||||
@@ -1938,7 +1997,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
} else {
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Skipping browser open (skipBrowserOpen=true). URL: ${redactedUrl}`,
|
||||
'Skipping browser open (skipBrowserOpen=true)',
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1991,7 +2050,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
}
|
||||
|
||||
storage.update(existingData)
|
||||
logMCPDebug(this.serverName, `Invalidated credentials (scope: ${scope})`)
|
||||
logMCPDebug(this.serverName, `Invalidated credentials (${scope})`)
|
||||
}
|
||||
|
||||
async saveDiscoveryState(state: OAuthDiscoveryState): Promise<void> {
|
||||
@@ -1999,10 +2058,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
const existingData = storage.read() || {}
|
||||
const serverKey = getServerKey(this.serverName, this.serverConfig)
|
||||
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Saving discovery state (authServer: ${state.authorizationServerUrl})`,
|
||||
)
|
||||
logMCPDebug(this.serverName, 'Saving discovery state')
|
||||
|
||||
// Persist only the URLs, NOT the full metadata blobs.
|
||||
// authorizationServerMetadata alone is ~1.5-2KB per MCP server (every
|
||||
@@ -2041,10 +2097,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
|
||||
const cached = data?.mcpOAuth?.[serverKey]?.discoveryState
|
||||
if (cached?.authorizationServerUrl) {
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Returning cached discovery state (authServer: ${cached.authorizationServerUrl})`,
|
||||
)
|
||||
logMCPDebug(this.serverName, 'Returning cached discovery state')
|
||||
|
||||
return {
|
||||
authorizationServerUrl: cached.authorizationServerUrl,
|
||||
@@ -2061,7 +2114,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
if (metadataUrl) {
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Fetching metadata from configured URL: ${metadataUrl}`,
|
||||
'Fetching metadata from configured override URL',
|
||||
)
|
||||
try {
|
||||
const metadata = await fetchAuthServerMetadata(
|
||||
@@ -2079,7 +2132,9 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
} catch (error) {
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Failed to fetch from configured metadata URL: ${errorMessage(error)}`,
|
||||
`Failed to fetch from configured metadata URL: ${summarizeOAuthErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -2231,7 +2286,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
} else if (cached?.authorizationServerUrl) {
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Re-discovering metadata from persisted auth server URL: ${cached.authorizationServerUrl}`,
|
||||
'Re-discovering metadata from persisted auth server URL',
|
||||
)
|
||||
metadata = await discoverAuthorizationServerMetadata(
|
||||
cached.authorizationServerUrl,
|
||||
@@ -2287,10 +2342,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
// Invalid grant means the refresh token itself is invalid/revoked/expired.
|
||||
// But another process may have already refreshed successfully — check first.
|
||||
if (error instanceof InvalidGrantError) {
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Token refresh failed with invalid_grant: ${error.message}`,
|
||||
)
|
||||
logMCPDebug(this.serverName, 'Token refresh failed with invalid_grant')
|
||||
clearKeychainCache()
|
||||
const storage = getSecureStorage()
|
||||
const data = storage.read()
|
||||
@@ -2337,7 +2389,7 @@ export class ClaudeAuthProvider implements OAuthClientProvider {
|
||||
if (!isRetryable || attempt >= MAX_ATTEMPTS) {
|
||||
logMCPDebug(
|
||||
this.serverName,
|
||||
`Token refresh failed: ${errorMessage(error)}`,
|
||||
`Token refresh failed: ${summarizeOAuthErrorForDebug(error)}`,
|
||||
)
|
||||
emitRefreshEvent(
|
||||
'failure',
|
||||
|
||||
@@ -332,6 +332,94 @@ function mcpBaseUrlAnalytics(serverRef: ScopedMcpServerConfig): {
|
||||
: {}
|
||||
}
|
||||
|
||||
function mcpBaseUrlForDebug(serverRef: ScopedMcpServerConfig): string {
|
||||
return getLoggingSafeMcpBaseUrl(serverRef) || '[unavailable]'
|
||||
}
|
||||
|
||||
function summarizeHeadersForDebug(
|
||||
headers: Record<string, string> | undefined,
|
||||
): {
|
||||
headerCount: number
|
||||
headerNames: string[]
|
||||
hasAuthorization: boolean
|
||||
} {
|
||||
if (!headers) {
|
||||
return {
|
||||
headerCount: 0,
|
||||
headerNames: [],
|
||||
hasAuthorization: false,
|
||||
}
|
||||
}
|
||||
|
||||
const headerNames = Object.keys(headers)
|
||||
return {
|
||||
headerCount: headerNames.length,
|
||||
headerNames: headerNames.sort(),
|
||||
hasAuthorization: headerNames.some(
|
||||
headerName => headerName.toLowerCase() === 'authorization',
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
function summarizeProxyEnvForDebug(): Record<string, string | boolean> {
|
||||
return {
|
||||
hasNodeOptions: Boolean(process.env.NODE_OPTIONS),
|
||||
uvThreadpoolSizeConfigured: Boolean(process.env.UV_THREADPOOL_SIZE),
|
||||
hasHttpProxy: Boolean(process.env.HTTP_PROXY),
|
||||
hasHttpsProxy: Boolean(process.env.HTTPS_PROXY),
|
||||
hasNoProxy: Boolean(process.env.NO_PROXY),
|
||||
}
|
||||
}
|
||||
|
||||
function summarizeStderrForDebug(stderrOutput: string): string {
|
||||
const trimmed = stderrOutput.trim()
|
||||
const lineCount = trimmed === '' ? 0 : trimmed.split('\n').length
|
||||
return `Server stderr captured (${trimmed.length} chars, ${lineCount} lines)`
|
||||
}
|
||||
|
||||
function summarizeMcpErrorForDebug(error: unknown): string {
|
||||
const summary: Record<string, boolean | number | string> = {}
|
||||
|
||||
if (error instanceof Error) {
|
||||
summary.errorType = error.constructor.name
|
||||
summary.errorName = error.name
|
||||
summary.hasMessage = error.message.length > 0
|
||||
summary.hasStack = Boolean(error.stack)
|
||||
|
||||
const errorObj = error as Error & {
|
||||
code?: unknown
|
||||
errno?: unknown
|
||||
syscall?: unknown
|
||||
status?: unknown
|
||||
cause?: unknown
|
||||
}
|
||||
|
||||
if (typeof errorObj.code === 'string' || typeof errorObj.code === 'number') {
|
||||
summary.code = errorObj.code
|
||||
}
|
||||
if (
|
||||
typeof errorObj.errno === 'string' ||
|
||||
typeof errorObj.errno === 'number'
|
||||
) {
|
||||
summary.errno = errorObj.errno
|
||||
}
|
||||
if (typeof errorObj.syscall === 'string') {
|
||||
summary.syscall = errorObj.syscall
|
||||
}
|
||||
if (typeof errorObj.status === 'number') {
|
||||
summary.status = errorObj.status
|
||||
}
|
||||
if (errorObj.cause !== undefined) {
|
||||
summary.hasCause = true
|
||||
}
|
||||
} else {
|
||||
summary.errorType = typeof error
|
||||
summary.hasValue = error !== undefined && error !== null
|
||||
}
|
||||
|
||||
return jsonStringify(summary)
|
||||
}
|
||||
|
||||
/**
|
||||
* Shared handler for sse/http/claudeai-proxy auth failures during connect:
|
||||
* emits tengu_mcp_server_needs_auth, caches the needs-auth entry, and returns
|
||||
@@ -676,7 +764,10 @@ export const connectToServer = memoize(
|
||||
)
|
||||
logMCPDebug(name, `SSE transport initialized, awaiting connection`)
|
||||
} else if (serverRef.type === 'sse-ide') {
|
||||
logMCPDebug(name, `Setting up SSE-IDE transport to ${serverRef.url}`)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Setting up SSE-IDE transport to ${mcpBaseUrlForDebug(serverRef)}`,
|
||||
)
|
||||
// IDE servers don't need authentication
|
||||
// TODO: Use the auth token provided in the lockfile
|
||||
const proxyOptions = getProxyFetchOptions()
|
||||
@@ -735,7 +826,7 @@ export const connectToServer = memoize(
|
||||
} else if (serverRef.type === 'ws') {
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Initializing WebSocket transport to ${serverRef.url}`,
|
||||
`Initializing WebSocket transport to ${mcpBaseUrlForDebug(serverRef)}`,
|
||||
)
|
||||
|
||||
const combinedHeaders = await getMcpServerHeaders(name, serverRef)
|
||||
@@ -749,16 +840,17 @@ export const connectToServer = memoize(
|
||||
...combinedHeaders,
|
||||
}
|
||||
|
||||
// Redact sensitive headers before logging
|
||||
const wsHeadersForLogging = mapValues(wsHeaders, (value, key) =>
|
||||
key.toLowerCase() === 'authorization' ? '[REDACTED]' : value,
|
||||
const wsHeadersForLogging = summarizeHeadersForDebug(
|
||||
mapValues(wsHeaders, (_value, key) =>
|
||||
key.toLowerCase() === 'authorization' ? '[REDACTED]' : '[set]',
|
||||
),
|
||||
)
|
||||
|
||||
logMCPDebug(
|
||||
name,
|
||||
`WebSocket transport options: ${jsonStringify({
|
||||
url: serverRef.url,
|
||||
headers: wsHeadersForLogging,
|
||||
url: mcpBaseUrlForDebug(serverRef),
|
||||
...wsHeadersForLogging,
|
||||
hasSessionAuth: !!sessionIngressToken,
|
||||
})}`,
|
||||
)
|
||||
@@ -782,20 +874,17 @@ export const connectToServer = memoize(
|
||||
}
|
||||
transport = new WebSocketTransport(wsClient)
|
||||
} else if (serverRef.type === 'http') {
|
||||
logMCPDebug(name, `Initializing HTTP transport to ${serverRef.url}`)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Initializing HTTP transport to ${mcpBaseUrlForDebug(serverRef)}`,
|
||||
)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Node version: ${process.version}, Platform: ${process.platform}`,
|
||||
)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Environment: ${jsonStringify({
|
||||
NODE_OPTIONS: process.env.NODE_OPTIONS || 'not set',
|
||||
UV_THREADPOOL_SIZE: process.env.UV_THREADPOOL_SIZE || 'default',
|
||||
HTTP_PROXY: process.env.HTTP_PROXY || 'not set',
|
||||
HTTPS_PROXY: process.env.HTTPS_PROXY || 'not set',
|
||||
NO_PROXY: process.env.NO_PROXY || 'not set',
|
||||
})}`,
|
||||
`Environment: ${jsonStringify(summarizeProxyEnvForDebug())}`,
|
||||
)
|
||||
|
||||
// Create an auth provider for this server
|
||||
@@ -843,16 +932,16 @@ export const connectToServer = memoize(
|
||||
const headersForLogging = transportOptions.requestInit?.headers
|
||||
? mapValues(
|
||||
transportOptions.requestInit.headers as Record<string, string>,
|
||||
(value, key) =>
|
||||
key.toLowerCase() === 'authorization' ? '[REDACTED]' : value,
|
||||
(_value, key) =>
|
||||
key.toLowerCase() === 'authorization' ? '[REDACTED]' : '[set]',
|
||||
)
|
||||
: undefined
|
||||
|
||||
logMCPDebug(
|
||||
name,
|
||||
`HTTP transport options: ${jsonStringify({
|
||||
url: serverRef.url,
|
||||
headers: headersForLogging,
|
||||
url: mcpBaseUrlForDebug(serverRef),
|
||||
...summarizeHeadersForDebug(headersForLogging),
|
||||
hasAuthProvider: !!authProvider,
|
||||
timeoutMs: MCP_REQUEST_TIMEOUT_MS,
|
||||
})}`,
|
||||
@@ -879,7 +968,7 @@ export const connectToServer = memoize(
|
||||
const oauthConfig = getOauthConfig()
|
||||
const proxyUrl = `${oauthConfig.MCP_PROXY_URL}${oauthConfig.MCP_PROXY_PATH.replace('{server_id}', serverRef.id)}`
|
||||
|
||||
logMCPDebug(name, `Using claude.ai proxy at ${proxyUrl}`)
|
||||
logMCPDebug(name, `Using claude.ai proxy transport`)
|
||||
|
||||
// eslint-disable-next-line eslint-plugin-n/no-unsupported-features/node-builtins
|
||||
const fetchWithAuth = createClaudeAiProxyFetch(globalThis.fetch)
|
||||
@@ -1025,23 +1114,28 @@ export const connectToServer = memoize(
|
||||
|
||||
// For HTTP transport, try a basic connectivity test first
|
||||
if (serverRef.type === 'http') {
|
||||
logMCPDebug(name, `Testing basic HTTP connectivity to ${serverRef.url}`)
|
||||
try {
|
||||
const testUrl = new URL(serverRef.url)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Parsed URL: host=${testUrl.hostname}, port=${testUrl.port || 'default'}, protocol=${testUrl.protocol}`,
|
||||
`Testing basic HTTP connectivity to ${mcpBaseUrlForDebug(serverRef)}`,
|
||||
)
|
||||
try {
|
||||
const testUrl = new URL(serverRef.url)
|
||||
logMCPDebug(name, 'Parsed HTTP endpoint for preflight checks')
|
||||
|
||||
// Log DNS resolution attempt
|
||||
if (
|
||||
testUrl.hostname === '127.0.0.1' ||
|
||||
testUrl.hostname === 'localhost'
|
||||
) {
|
||||
logMCPDebug(name, `Using loopback address: ${testUrl.hostname}`)
|
||||
logMCPDebug(name, 'Using loopback HTTP endpoint')
|
||||
}
|
||||
} catch (urlError) {
|
||||
logMCPDebug(name, `Failed to parse URL: ${urlError}`)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Failed to parse HTTP endpoint for preflight: ${summarizeMcpErrorForDebug(
|
||||
urlError,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1079,7 +1173,7 @@ export const connectToServer = memoize(
|
||||
try {
|
||||
await Promise.race([connectPromise, timeoutPromise])
|
||||
if (stderrOutput) {
|
||||
logMCPError(name, `Server stderr: ${stderrOutput}`)
|
||||
logMCPError(name, summarizeStderrForDebug(stderrOutput))
|
||||
stderrOutput = '' // Release accumulated string to prevent memory growth
|
||||
}
|
||||
const elapsed = Date.now() - connectStartTime
|
||||
@@ -1093,30 +1187,29 @@ export const connectToServer = memoize(
|
||||
if (serverRef.type === 'sse' && error instanceof Error) {
|
||||
logMCPDebug(
|
||||
name,
|
||||
`SSE Connection failed after ${elapsed}ms: ${jsonStringify({
|
||||
url: serverRef.url,
|
||||
error: error.message,
|
||||
errorType: error.constructor.name,
|
||||
stack: error.stack,
|
||||
})}`,
|
||||
`SSE connection failed after ${elapsed}ms: ${summarizeMcpErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
logMCPError(
|
||||
name,
|
||||
`SSE connection failed: ${summarizeMcpErrorForDebug(error)}`,
|
||||
)
|
||||
logMCPError(name, error)
|
||||
|
||||
if (error instanceof UnauthorizedError) {
|
||||
return handleRemoteAuthFailure(name, serverRef, 'sse')
|
||||
}
|
||||
} else if (serverRef.type === 'http' && error instanceof Error) {
|
||||
const errorObj = error as Error & {
|
||||
cause?: unknown
|
||||
code?: string
|
||||
errno?: string | number
|
||||
syscall?: string
|
||||
}
|
||||
logMCPDebug(
|
||||
name,
|
||||
`HTTP Connection failed after ${elapsed}ms: ${error.message} (code: ${errorObj.code || 'none'}, errno: ${errorObj.errno || 'none'})`,
|
||||
`HTTP connection failed after ${elapsed}ms: ${summarizeMcpErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
logMCPError(
|
||||
name,
|
||||
`HTTP connection failed: ${summarizeMcpErrorForDebug(error)}`,
|
||||
)
|
||||
logMCPError(name, error)
|
||||
|
||||
if (error instanceof UnauthorizedError) {
|
||||
return handleRemoteAuthFailure(name, serverRef, 'http')
|
||||
@@ -1127,9 +1220,16 @@ export const connectToServer = memoize(
|
||||
) {
|
||||
logMCPDebug(
|
||||
name,
|
||||
`claude.ai proxy connection failed after ${elapsed}ms: ${error.message}`,
|
||||
`claude.ai proxy connection failed after ${elapsed}ms: ${summarizeMcpErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
logMCPError(
|
||||
name,
|
||||
`claude.ai proxy connection failed: ${summarizeMcpErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
logMCPError(name, error)
|
||||
|
||||
// StreamableHTTPError has a `code` property with the HTTP status
|
||||
const errorCode = (error as Error & { code?: number }).code
|
||||
@@ -1149,7 +1249,7 @@ export const connectToServer = memoize(
|
||||
}
|
||||
transport.close().catch(() => {})
|
||||
if (stderrOutput) {
|
||||
logMCPError(name, `Server stderr: ${stderrOutput}`)
|
||||
logMCPError(name, summarizeStderrForDebug(stderrOutput))
|
||||
}
|
||||
throw error
|
||||
}
|
||||
@@ -1208,7 +1308,9 @@ export const connectToServer = memoize(
|
||||
} catch (error) {
|
||||
logMCPError(
|
||||
name,
|
||||
`Failed to send ide_connected notification: ${error}`,
|
||||
`Failed to send ide_connected notification: ${summarizeMcpErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1242,7 +1344,10 @@ export const connectToServer = memoize(
|
||||
hasTriggeredClose = true
|
||||
logMCPDebug(name, `Closing transport (${reason})`)
|
||||
void client.close().catch(e => {
|
||||
logMCPDebug(name, `Error during close: ${errorMessage(e)}`)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Error during close: ${summarizeMcpErrorForDebug(e)}`,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1306,7 +1411,10 @@ export const connectToServer = memoize(
|
||||
`Failed to spawn process - check command and permissions`,
|
||||
)
|
||||
} else {
|
||||
logMCPDebug(name, `Connection error: ${error.message}`)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Connection error: ${summarizeMcpErrorForDebug(error)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1407,12 +1515,20 @@ export const connectToServer = memoize(
|
||||
try {
|
||||
await inProcessServer.close()
|
||||
} catch (error) {
|
||||
logMCPDebug(name, `Error closing in-process server: ${error}`)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Error closing in-process server: ${summarizeMcpErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
try {
|
||||
await client.close()
|
||||
} catch (error) {
|
||||
logMCPDebug(name, `Error closing client: ${error}`)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Error closing client: ${summarizeMcpErrorForDebug(error)}`,
|
||||
)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -1438,7 +1554,10 @@ export const connectToServer = memoize(
|
||||
try {
|
||||
process.kill(childPid, 'SIGINT')
|
||||
} catch (error) {
|
||||
logMCPDebug(name, `Error sending SIGINT: ${error}`)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Error sending SIGINT: ${summarizeMcpErrorForDebug(error)}`,
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1492,7 +1611,12 @@ export const connectToServer = memoize(
|
||||
try {
|
||||
process.kill(childPid, 'SIGTERM')
|
||||
} catch (termError) {
|
||||
logMCPDebug(name, `Error sending SIGTERM: ${termError}`)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Error sending SIGTERM: ${summarizeMcpErrorForDebug(
|
||||
termError,
|
||||
)}`,
|
||||
)
|
||||
resolved = true
|
||||
clearInterval(checkInterval)
|
||||
clearTimeout(failsafeTimeout)
|
||||
@@ -1525,7 +1649,9 @@ export const connectToServer = memoize(
|
||||
} catch (killError) {
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Error sending SIGKILL: ${killError}`,
|
||||
`Error sending SIGKILL: ${summarizeMcpErrorForDebug(
|
||||
killError,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
} catch {
|
||||
@@ -1557,7 +1683,12 @@ export const connectToServer = memoize(
|
||||
})
|
||||
}
|
||||
} catch (processError) {
|
||||
logMCPDebug(name, `Error terminating process: ${processError}`)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Error terminating process: ${summarizeMcpErrorForDebug(
|
||||
processError,
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1565,7 +1696,10 @@ export const connectToServer = memoize(
|
||||
try {
|
||||
await client.close()
|
||||
} catch (error) {
|
||||
logMCPDebug(name, `Error closing client: ${error}`)
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Error closing client: ${summarizeMcpErrorForDebug(error)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1622,9 +1756,14 @@ export const connectToServer = memoize(
|
||||
})
|
||||
logMCPDebug(
|
||||
name,
|
||||
`Connection failed after ${connectionDurationMs}ms: ${errorMessage(error)}`,
|
||||
`Connection failed after ${connectionDurationMs}ms: ${summarizeMcpErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
logMCPError(
|
||||
name,
|
||||
`Connection failed: ${summarizeMcpErrorForDebug(error)}`,
|
||||
)
|
||||
logMCPError(name, `Connection failed: ${errorMessage(error)}`)
|
||||
|
||||
if (inProcessServer) {
|
||||
inProcessServer.close().catch(() => {})
|
||||
@@ -1989,7 +2128,10 @@ export const fetchToolsForClient = memoizeWithLRU(
|
||||
})
|
||||
.filter(isIncludedMcpTool)
|
||||
} catch (error) {
|
||||
logMCPError(client.name, `Failed to fetch tools: ${errorMessage(error)}`)
|
||||
logMCPError(
|
||||
client.name,
|
||||
`Failed to fetch tools: ${summarizeMcpErrorForDebug(error)}`,
|
||||
)
|
||||
return []
|
||||
}
|
||||
},
|
||||
@@ -2021,7 +2163,7 @@ export const fetchResourcesForClient = memoizeWithLRU(
|
||||
} catch (error) {
|
||||
logMCPError(
|
||||
client.name,
|
||||
`Failed to fetch resources: ${errorMessage(error)}`,
|
||||
`Failed to fetch resources: ${summarizeMcpErrorForDebug(error)}`,
|
||||
)
|
||||
return []
|
||||
}
|
||||
@@ -2087,7 +2229,9 @@ export const fetchCommandsForClient = memoizeWithLRU(
|
||||
} catch (error) {
|
||||
logMCPError(
|
||||
client.name,
|
||||
`Error running command '${prompt.name}': ${errorMessage(error)}`,
|
||||
`Error running command '${prompt.name}': ${summarizeMcpErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
throw error
|
||||
}
|
||||
@@ -2097,7 +2241,7 @@ export const fetchCommandsForClient = memoizeWithLRU(
|
||||
} catch (error) {
|
||||
logMCPError(
|
||||
client.name,
|
||||
`Failed to fetch commands: ${errorMessage(error)}`,
|
||||
`Failed to fetch commands: ${summarizeMcpErrorForDebug(error)}`,
|
||||
)
|
||||
return []
|
||||
}
|
||||
@@ -2198,7 +2342,10 @@ export async function reconnectMcpServerImpl(
|
||||
}
|
||||
} catch (error) {
|
||||
// Handle errors gracefully - connection might have closed during fetch
|
||||
logMCPError(name, `Error during reconnection: ${errorMessage(error)}`)
|
||||
logMCPError(
|
||||
name,
|
||||
`Error during reconnection: ${summarizeMcpErrorForDebug(error)}`,
|
||||
)
|
||||
|
||||
// Return with failed status
|
||||
return {
|
||||
@@ -2373,7 +2520,9 @@ export async function getMcpToolsCommandsAndResources(
|
||||
// Handle errors gracefully - connection might have closed during fetch
|
||||
logMCPError(
|
||||
name,
|
||||
`Error fetching tools/commands/resources: ${errorMessage(error)}`,
|
||||
`Error fetching tools/commands/resources: ${summarizeMcpErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
|
||||
// Still update with the client but no tools/commands
|
||||
@@ -2460,7 +2609,7 @@ export function prefetchAllMcpResources(
|
||||
}, mcpConfigs).catch(error => {
|
||||
logMCPError(
|
||||
'prefetchAllMcpResources',
|
||||
`Failed to get MCP resources: ${errorMessage(error)}`,
|
||||
`Failed to get MCP resources: ${summarizeMcpErrorForDebug(error)}`,
|
||||
)
|
||||
// Still resolve with empty results
|
||||
void resolve({
|
||||
@@ -3322,7 +3471,12 @@ export async function setupSdkMcpClients(
|
||||
}
|
||||
} catch (error) {
|
||||
// If connection fails, return failed server
|
||||
logMCPError(name, `Failed to connect SDK MCP server: ${error}`)
|
||||
logMCPError(
|
||||
name,
|
||||
`Failed to connect SDK MCP server: ${summarizeMcpErrorForDebug(
|
||||
error,
|
||||
)}`,
|
||||
)
|
||||
return {
|
||||
client: {
|
||||
type: 'failed' as const,
|
||||
|
||||
@@ -1397,6 +1397,7 @@ export function parseMcpConfigFromFilePath(params: {
|
||||
configContent = fs.readFileSync(filePath, { encoding: 'utf8' })
|
||||
} catch (error: unknown) {
|
||||
const code = getErrnoCode(error)
|
||||
const fileName = parse(filePath).base
|
||||
if (code === 'ENOENT') {
|
||||
return {
|
||||
config: null,
|
||||
@@ -1415,7 +1416,7 @@ export function parseMcpConfigFromFilePath(params: {
|
||||
}
|
||||
}
|
||||
logForDebugging(
|
||||
`MCP config read error for ${filePath} (scope=${scope}): ${error}`,
|
||||
`MCP config read error (scope=${scope}, file=${fileName}, errno=${code ?? 'none'}, errorType=${error instanceof Error ? error.name : typeof error})`,
|
||||
{ level: 'error' },
|
||||
)
|
||||
return {
|
||||
@@ -1439,7 +1440,7 @@ export function parseMcpConfigFromFilePath(params: {
|
||||
|
||||
if (!parsedJson) {
|
||||
logForDebugging(
|
||||
`MCP config is not valid JSON: ${filePath} (scope=${scope}, length=${configContent.length}, first100=${jsonStringify(configContent.slice(0, 100))})`,
|
||||
`MCP config is not valid JSON (scope=${scope}, file=${parse(filePath).base}, length=${configContent.length})`,
|
||||
{ level: 'error' },
|
||||
)
|
||||
return {
|
||||
|
||||
@@ -96,6 +96,24 @@ function redactTokens(raw: unknown): string {
|
||||
return s.replace(SENSITIVE_TOKEN_RE, (_, k) => `"${k}":"[REDACTED]"`)
|
||||
}
|
||||
|
||||
function summarizeXaaPayload(raw: unknown): string {
|
||||
if (typeof raw === 'string') {
|
||||
return `text(${raw.length} chars)`
|
||||
}
|
||||
if (Array.isArray(raw)) {
|
||||
return `array(${raw.length})`
|
||||
}
|
||||
if (raw && typeof raw === 'object') {
|
||||
return jsonStringify({
|
||||
payloadType: 'object',
|
||||
keys: Object.keys(raw as Record<string, unknown>)
|
||||
.sort()
|
||||
.slice(0, 10),
|
||||
})
|
||||
}
|
||||
return typeof raw
|
||||
}
|
||||
|
||||
// ─── Zod Schemas ────────────────────────────────────────────────────────────
|
||||
|
||||
const TokenExchangeResponseSchema = lazySchema(() =>
|
||||
@@ -145,7 +163,7 @@ export async function discoverProtectedResource(
|
||||
)
|
||||
} catch (e) {
|
||||
throw new Error(
|
||||
`XAA: PRM discovery failed: ${e instanceof Error ? e.message : String(e)}`,
|
||||
`XAA: PRM discovery failed (${e instanceof Error ? e.name : typeof e})`,
|
||||
)
|
||||
}
|
||||
if (!prm.resource || !prm.authorization_servers?.[0]) {
|
||||
@@ -154,9 +172,7 @@ export async function discoverProtectedResource(
|
||||
)
|
||||
}
|
||||
if (normalizeUrl(prm.resource) !== normalizeUrl(serverUrl)) {
|
||||
throw new Error(
|
||||
`XAA: PRM discovery failed: PRM resource mismatch: expected ${serverUrl}, got ${prm.resource}`,
|
||||
)
|
||||
throw new Error('XAA: PRM discovery failed: PRM resource mismatch')
|
||||
}
|
||||
return {
|
||||
resource: prm.resource,
|
||||
@@ -183,22 +199,16 @@ export async function discoverAuthorizationServer(
|
||||
fetchFn: opts?.fetchFn ?? defaultFetch,
|
||||
})
|
||||
if (!meta?.issuer || !meta.token_endpoint) {
|
||||
throw new Error(
|
||||
`XAA: AS metadata discovery failed: no valid metadata at ${asUrl}`,
|
||||
)
|
||||
throw new Error('XAA: AS metadata discovery failed: no valid metadata')
|
||||
}
|
||||
if (normalizeUrl(meta.issuer) !== normalizeUrl(asUrl)) {
|
||||
throw new Error(
|
||||
`XAA: AS metadata discovery failed: issuer mismatch: expected ${asUrl}, got ${meta.issuer}`,
|
||||
)
|
||||
throw new Error('XAA: AS metadata discovery failed: issuer mismatch')
|
||||
}
|
||||
// RFC 8414 §3.3 / RFC 9728 §3 require HTTPS. A PRM-advertised http:// AS
|
||||
// that self-consistently reports an http:// issuer would pass the mismatch
|
||||
// check above, then we'd POST id_token + client_secret over plaintext.
|
||||
if (new URL(meta.token_endpoint).protocol !== 'https:') {
|
||||
throw new Error(
|
||||
`XAA: refusing non-HTTPS token endpoint: ${meta.token_endpoint}`,
|
||||
)
|
||||
throw new Error('XAA: refusing non-HTTPS token endpoint')
|
||||
}
|
||||
return {
|
||||
issuer: meta.issuer,
|
||||
@@ -263,7 +273,7 @@ export async function requestJwtAuthorizationGrant(opts: {
|
||||
body: params,
|
||||
})
|
||||
if (!res.ok) {
|
||||
const body = redactTokens(await res.text()).slice(0, 200)
|
||||
const body = summarizeXaaPayload(redactTokens(await res.text()))
|
||||
// 4xx → id_token rejected (invalid_grant etc.), clear cache.
|
||||
// 5xx → IdP outage, id_token may still be valid, preserve it.
|
||||
const shouldClear = res.status < 500
|
||||
@@ -278,21 +288,25 @@ export async function requestJwtAuthorizationGrant(opts: {
|
||||
} catch {
|
||||
// Transient network condition (captive portal, proxy) — don't clear id_token.
|
||||
throw new XaaTokenExchangeError(
|
||||
`XAA: token exchange returned non-JSON (captive portal?) at ${opts.tokenEndpoint}`,
|
||||
'XAA: token exchange returned non-JSON response (captive portal?)',
|
||||
false,
|
||||
)
|
||||
}
|
||||
const exchangeParsed = TokenExchangeResponseSchema().safeParse(rawExchange)
|
||||
if (!exchangeParsed.success) {
|
||||
throw new XaaTokenExchangeError(
|
||||
`XAA: token exchange response did not match expected shape: ${redactTokens(rawExchange)}`,
|
||||
`XAA: token exchange response did not match expected shape: ${summarizeXaaPayload(
|
||||
redactTokens(rawExchange),
|
||||
)}`,
|
||||
true,
|
||||
)
|
||||
}
|
||||
const result = exchangeParsed.data
|
||||
if (!result.access_token) {
|
||||
throw new XaaTokenExchangeError(
|
||||
`XAA: token exchange response missing access_token: ${redactTokens(result)}`,
|
||||
`XAA: token exchange response missing access_token: ${summarizeXaaPayload(
|
||||
redactTokens(result),
|
||||
)}`,
|
||||
true,
|
||||
)
|
||||
}
|
||||
@@ -373,7 +387,7 @@ export async function exchangeJwtAuthGrant(opts: {
|
||||
body: params,
|
||||
})
|
||||
if (!res.ok) {
|
||||
const body = redactTokens(await res.text()).slice(0, 200)
|
||||
const body = summarizeXaaPayload(redactTokens(await res.text()))
|
||||
throw new Error(`XAA: jwt-bearer grant failed: HTTP ${res.status}: ${body}`)
|
||||
}
|
||||
let rawTokens: unknown
|
||||
@@ -381,13 +395,15 @@ export async function exchangeJwtAuthGrant(opts: {
|
||||
rawTokens = await res.json()
|
||||
} catch {
|
||||
throw new Error(
|
||||
`XAA: jwt-bearer grant returned non-JSON (captive portal?) at ${opts.tokenEndpoint}`,
|
||||
'XAA: jwt-bearer grant returned non-JSON response (captive portal?)',
|
||||
)
|
||||
}
|
||||
const tokensParsed = JwtBearerResponseSchema().safeParse(rawTokens)
|
||||
if (!tokensParsed.success) {
|
||||
throw new Error(
|
||||
`XAA: jwt-bearer response did not match expected shape: ${redactTokens(rawTokens)}`,
|
||||
`XAA: jwt-bearer response did not match expected shape: ${summarizeXaaPayload(
|
||||
redactTokens(rawTokens),
|
||||
)}`,
|
||||
)
|
||||
}
|
||||
return tokensParsed.data
|
||||
@@ -431,11 +447,14 @@ export async function performCrossAppAccess(
|
||||
): Promise<XaaResult> {
|
||||
const fetchFn = makeXaaFetch(abortSignal)
|
||||
|
||||
logMCPDebug(serverName, `XAA: discovering PRM for ${serverUrl}`)
|
||||
logMCPDebug(serverName, 'XAA: discovering protected resource metadata')
|
||||
const prm = await discoverProtectedResource(serverUrl, { fetchFn })
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`XAA: discovered resource=${prm.resource} ASes=[${prm.authorization_servers.join(', ')}]`,
|
||||
`XAA: discovered protected resource metadata ${jsonStringify({
|
||||
hasResource: Boolean(prm.resource),
|
||||
authorizationServerCount: prm.authorization_servers.length,
|
||||
})}`,
|
||||
)
|
||||
|
||||
// Try each advertised AS in order. grant_types_supported is OPTIONAL per
|
||||
@@ -449,16 +468,16 @@ export async function performCrossAppAccess(
|
||||
candidate = await discoverAuthorizationServer(asUrl, { fetchFn })
|
||||
} catch (e) {
|
||||
if (abortSignal?.aborted) throw e
|
||||
asErrors.push(`${asUrl}: ${e instanceof Error ? e.message : String(e)}`)
|
||||
asErrors.push(
|
||||
`authorization server discovery failed (${e instanceof Error ? e.name : typeof e})`,
|
||||
)
|
||||
continue
|
||||
}
|
||||
if (
|
||||
candidate.grant_types_supported &&
|
||||
!candidate.grant_types_supported.includes(JWT_BEARER_GRANT)
|
||||
) {
|
||||
asErrors.push(
|
||||
`${asUrl}: does not advertise jwt-bearer grant (supported: ${candidate.grant_types_supported.join(', ')})`,
|
||||
)
|
||||
asErrors.push('authorization server does not advertise jwt-bearer grant')
|
||||
continue
|
||||
}
|
||||
asMeta = candidate
|
||||
@@ -466,7 +485,7 @@ export async function performCrossAppAccess(
|
||||
}
|
||||
if (!asMeta) {
|
||||
throw new Error(
|
||||
`XAA: no authorization server supports jwt-bearer. Tried: ${asErrors.join('; ')}`,
|
||||
`XAA: no authorization server supports jwt-bearer (${asErrors.length} candidates tried)`,
|
||||
)
|
||||
}
|
||||
// Pick auth method from what the AS advertises. We handle
|
||||
@@ -481,7 +500,7 @@ export async function performCrossAppAccess(
|
||||
: 'client_secret_basic'
|
||||
logMCPDebug(
|
||||
serverName,
|
||||
`XAA: AS issuer=${asMeta.issuer} token_endpoint=${asMeta.token_endpoint} auth_method=${authMethod}`,
|
||||
`XAA: selected authorization server (auth_method=${authMethod})`,
|
||||
)
|
||||
|
||||
logMCPDebug(serverName, `XAA: exchanging id_token for ID-JAG at IdP`)
|
||||
|
||||
@@ -210,9 +210,7 @@ export async function discoverOidc(
|
||||
signal: AbortSignal.timeout(IDP_REQUEST_TIMEOUT_MS),
|
||||
})
|
||||
if (!res.ok) {
|
||||
throw new Error(
|
||||
`XAA IdP: OIDC discovery failed: HTTP ${res.status} at ${url}`,
|
||||
)
|
||||
throw new Error(`XAA IdP: OIDC discovery failed (HTTP ${res.status})`)
|
||||
}
|
||||
// Captive portals and proxy auth pages return 200 with HTML. res.json()
|
||||
// throws a raw SyntaxError before safeParse can give a useful message.
|
||||
@@ -221,17 +219,15 @@ export async function discoverOidc(
|
||||
body = await res.json()
|
||||
} catch {
|
||||
throw new Error(
|
||||
`XAA IdP: OIDC discovery returned non-JSON at ${url} (captive portal or proxy?)`,
|
||||
'XAA IdP: OIDC discovery returned non-JSON response (captive portal or proxy?)',
|
||||
)
|
||||
}
|
||||
const parsed = OpenIdProviderDiscoveryMetadataSchema.safeParse(body)
|
||||
if (!parsed.success) {
|
||||
throw new Error(`XAA IdP: invalid OIDC metadata: ${parsed.error.message}`)
|
||||
throw new Error('XAA IdP: invalid OIDC metadata')
|
||||
}
|
||||
if (new URL(parsed.data.token_endpoint).protocol !== 'https:') {
|
||||
throw new Error(
|
||||
`XAA IdP: refusing non-HTTPS token endpoint: ${parsed.data.token_endpoint}`,
|
||||
)
|
||||
throw new Error('XAA IdP: refusing non-HTTPS token endpoint')
|
||||
}
|
||||
return parsed.data
|
||||
}
|
||||
@@ -373,7 +369,7 @@ function waitForCallback(
|
||||
),
|
||||
)
|
||||
} else {
|
||||
rejectOnce(new Error(`XAA IdP: callback server failed: ${err.message}`))
|
||||
rejectOnce(new Error('XAA IdP: callback server failed'))
|
||||
}
|
||||
})
|
||||
|
||||
@@ -405,11 +401,11 @@ export async function acquireIdpIdToken(
|
||||
|
||||
const cached = getCachedIdpIdToken(idpIssuer)
|
||||
if (cached) {
|
||||
logMCPDebug('xaa', `Using cached id_token for ${idpIssuer}`)
|
||||
logMCPDebug('xaa', 'Using cached id_token for configured IdP')
|
||||
return cached
|
||||
}
|
||||
|
||||
logMCPDebug('xaa', `No cached id_token for ${idpIssuer}; starting OIDC login`)
|
||||
logMCPDebug('xaa', 'No cached id_token for configured IdP; starting OIDC login')
|
||||
|
||||
const metadata = await discoverOidc(idpIssuer)
|
||||
const port = opts.callbackPort ?? (await findAvailablePort())
|
||||
@@ -478,10 +474,7 @@ export async function acquireIdpIdToken(
|
||||
: Date.now() + (tokens.expires_in ?? 3600) * 1000
|
||||
|
||||
saveIdpIdToken(idpIssuer, tokens.id_token, expiresAt)
|
||||
logMCPDebug(
|
||||
'xaa',
|
||||
`Cached id_token for ${idpIssuer} (expires ${new Date(expiresAt).toISOString()})`,
|
||||
)
|
||||
logMCPDebug('xaa', 'Cached id_token for configured IdP')
|
||||
|
||||
return tokens.id_token
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ import {
|
||||
import {
|
||||
extractMcpToolDetails,
|
||||
extractSkillName,
|
||||
extractToolInputForTelemetry,
|
||||
getFileExtensionForAnalytics,
|
||||
getFileExtensionsFromBashCommand,
|
||||
isToolDetailsLoggingEnabled,
|
||||
@@ -87,17 +86,6 @@ import {
|
||||
} from '../../utils/sessionActivity.js'
|
||||
import { jsonStringify } from '../../utils/slowOperations.js'
|
||||
import { Stream } from '../../utils/stream.js'
|
||||
import { logOTelEvent } from '../../utils/telemetry/events.js'
|
||||
import {
|
||||
addToolContentEvent,
|
||||
endToolBlockedOnUserSpan,
|
||||
endToolExecutionSpan,
|
||||
endToolSpan,
|
||||
isBetaTracingEnabled,
|
||||
startToolBlockedOnUserSpan,
|
||||
startToolExecutionSpan,
|
||||
startToolSpan,
|
||||
} from '../../utils/telemetry/sessionTracing.js'
|
||||
import {
|
||||
formatError,
|
||||
formatZodValidationError,
|
||||
@@ -204,7 +192,7 @@ function ruleSourceToOTelSource(
|
||||
* Without it, we fall back conservatively: allow → user_temporary,
|
||||
* deny → user_reject.
|
||||
*/
|
||||
function decisionReasonToOTelSource(
|
||||
function decisionReasonToSource(
|
||||
reason: PermissionDecisionReason | undefined,
|
||||
behavior: 'allow' | 'deny',
|
||||
): string {
|
||||
@@ -890,29 +878,6 @@ async function checkPermissionsAndCallTool(
|
||||
}
|
||||
}
|
||||
|
||||
const toolAttributes: Record<string, string | number | boolean> = {}
|
||||
if (processedInput && typeof processedInput === 'object') {
|
||||
if (tool.name === FILE_READ_TOOL_NAME && 'file_path' in processedInput) {
|
||||
toolAttributes.file_path = String(processedInput.file_path)
|
||||
} else if (
|
||||
(tool.name === FILE_EDIT_TOOL_NAME ||
|
||||
tool.name === FILE_WRITE_TOOL_NAME) &&
|
||||
'file_path' in processedInput
|
||||
) {
|
||||
toolAttributes.file_path = String(processedInput.file_path)
|
||||
} else if (tool.name === BASH_TOOL_NAME && 'command' in processedInput) {
|
||||
const bashInput = processedInput as BashToolInput
|
||||
toolAttributes.full_command = bashInput.command
|
||||
}
|
||||
}
|
||||
|
||||
startToolSpan(
|
||||
tool.name,
|
||||
toolAttributes,
|
||||
isBetaTracingEnabled() ? jsonStringify(processedInput) : undefined,
|
||||
)
|
||||
startToolBlockedOnUserSpan()
|
||||
|
||||
// Check whether we have permission to use the tool,
|
||||
// and ask the user for permission if we don't
|
||||
const permissionMode = toolUseContext.getAppState().toolPermissionContext.mode
|
||||
@@ -945,33 +910,22 @@ async function checkPermissionsAndCallTool(
|
||||
)
|
||||
}
|
||||
|
||||
// Emit tool_decision OTel event and code-edit counter if the interactive
|
||||
// permission path didn't already log it (headless mode bypasses permission
|
||||
// logging, so we need to emit both the generic event and the code-edit
|
||||
// counter here)
|
||||
// Increment the code-edit counter here when the interactive permission path
|
||||
// did not already log a decision (headless mode bypasses permission logging).
|
||||
if (
|
||||
permissionDecision.behavior !== 'ask' &&
|
||||
!toolUseContext.toolDecisions?.has(toolUseID)
|
||||
) {
|
||||
const decision =
|
||||
permissionDecision.behavior === 'allow' ? 'accept' : 'reject'
|
||||
const source = decisionReasonToOTelSource(
|
||||
permissionDecision.decisionReason,
|
||||
permissionDecision.behavior,
|
||||
)
|
||||
void logOTelEvent('tool_decision', {
|
||||
decision,
|
||||
source,
|
||||
tool_name: sanitizeToolNameForAnalytics(tool.name),
|
||||
})
|
||||
|
||||
// Increment code-edit tool decision counter for headless mode
|
||||
if (isCodeEditingTool(tool.name)) {
|
||||
void buildCodeEditToolAttributes(
|
||||
tool,
|
||||
processedInput,
|
||||
decision,
|
||||
source,
|
||||
decisionReasonToSource(
|
||||
permissionDecision.decisionReason,
|
||||
permissionDecision.behavior,
|
||||
),
|
||||
).then(attributes => getCodeEditToolDecisionCounter()?.add(1, attributes))
|
||||
}
|
||||
}
|
||||
@@ -994,10 +948,6 @@ async function checkPermissionsAndCallTool(
|
||||
|
||||
if (permissionDecision.behavior !== 'allow') {
|
||||
logForDebugging(`${tool.name} tool permission denied`)
|
||||
const decisionInfo = toolUseContext.toolDecisions?.get(toolUseID)
|
||||
endToolBlockedOnUserSpan('reject', decisionInfo?.source || 'unknown')
|
||||
endToolSpan()
|
||||
|
||||
logEvent('tengu_tool_use_can_use_tool_rejected', {
|
||||
messageID:
|
||||
messageId as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
@@ -1131,10 +1081,6 @@ async function checkPermissionsAndCallTool(
|
||||
processedInput = permissionDecision.updatedInput
|
||||
}
|
||||
|
||||
// Prepare tool parameters for logging in tool_result event.
|
||||
// Gated by OTEL_LOG_TOOL_DETAILS — tool parameters can contain sensitive
|
||||
// content (bash commands, MCP server names, etc.) so they're opt-in only.
|
||||
const telemetryToolInput = extractToolInputForTelemetry(processedInput)
|
||||
let toolParameters: Record<string, unknown> = {}
|
||||
if (isToolDetailsLoggingEnabled()) {
|
||||
if (tool.name === BASH_TOOL_NAME && 'command' in processedInput) {
|
||||
@@ -1168,13 +1114,6 @@ async function checkPermissionsAndCallTool(
|
||||
}
|
||||
}
|
||||
|
||||
const decisionInfo = toolUseContext.toolDecisions?.get(toolUseID)
|
||||
endToolBlockedOnUserSpan(
|
||||
decisionInfo?.decision || 'unknown',
|
||||
decisionInfo?.source || 'unknown',
|
||||
)
|
||||
startToolExecutionSpan()
|
||||
|
||||
const startTime = Date.now()
|
||||
|
||||
startSessionActivity('tool_exec')
|
||||
@@ -1223,51 +1162,6 @@ async function checkPermissionsAndCallTool(
|
||||
const durationMs = Date.now() - startTime
|
||||
addToToolDuration(durationMs)
|
||||
|
||||
// Log tool content/output as span event if enabled
|
||||
if (result.data && typeof result.data === 'object') {
|
||||
const contentAttributes: Record<string, string | number | boolean> = {}
|
||||
|
||||
// Read tool: capture file_path and content
|
||||
if (tool.name === FILE_READ_TOOL_NAME && 'content' in result.data) {
|
||||
if ('file_path' in processedInput) {
|
||||
contentAttributes.file_path = String(processedInput.file_path)
|
||||
}
|
||||
contentAttributes.content = String(result.data.content)
|
||||
}
|
||||
|
||||
// Edit/Write tools: capture file_path and diff
|
||||
if (
|
||||
(tool.name === FILE_EDIT_TOOL_NAME ||
|
||||
tool.name === FILE_WRITE_TOOL_NAME) &&
|
||||
'file_path' in processedInput
|
||||
) {
|
||||
contentAttributes.file_path = String(processedInput.file_path)
|
||||
|
||||
// For Edit, capture the actual changes made
|
||||
if (tool.name === FILE_EDIT_TOOL_NAME && 'diff' in result.data) {
|
||||
contentAttributes.diff = String(result.data.diff)
|
||||
}
|
||||
// For Write, capture the written content
|
||||
if (tool.name === FILE_WRITE_TOOL_NAME && 'content' in processedInput) {
|
||||
contentAttributes.content = String(processedInput.content)
|
||||
}
|
||||
}
|
||||
|
||||
// Bash tool: capture command
|
||||
if (tool.name === BASH_TOOL_NAME && 'command' in processedInput) {
|
||||
const bashInput = processedInput as BashToolInput
|
||||
contentAttributes.bash_command = bashInput.command
|
||||
// Also capture output if available
|
||||
if ('output' in result.data) {
|
||||
contentAttributes.output = String(result.data.output)
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(contentAttributes).length > 0) {
|
||||
addToolContentEvent('tool.output', contentAttributes)
|
||||
}
|
||||
}
|
||||
|
||||
// Capture structured output from tool result if present
|
||||
if (typeof result === 'object' && 'structured_output' in result) {
|
||||
// Store the structured output in an attachment message
|
||||
@@ -1279,14 +1173,6 @@ async function checkPermissionsAndCallTool(
|
||||
})
|
||||
}
|
||||
|
||||
endToolExecutionSpan({ success: true })
|
||||
// Pass tool result for new_context logging
|
||||
const toolResultStr =
|
||||
result.data && typeof result.data === 'object'
|
||||
? jsonStringify(result.data)
|
||||
: String(result.data ?? '')
|
||||
endToolSpan(toolResultStr)
|
||||
|
||||
// Map the tool result to API format once and cache it. This block is reused
|
||||
// by addToolResult (skipping the remap) and measured here for analytics.
|
||||
const mappedToolResultBlock = tool.mapToolResultToToolResultBlockParam(
|
||||
@@ -1373,27 +1259,10 @@ async function checkPermissionsAndCallTool(
|
||||
}
|
||||
}
|
||||
|
||||
// Log tool result event for OTLP with tool parameters and decision context
|
||||
const mcpServerScope = isMcpTool(tool)
|
||||
? getMcpServerScopeFromToolName(tool.name)
|
||||
: null
|
||||
|
||||
void logOTelEvent('tool_result', {
|
||||
tool_name: sanitizeToolNameForAnalytics(tool.name),
|
||||
success: 'true',
|
||||
duration_ms: String(durationMs),
|
||||
...(Object.keys(toolParameters).length > 0 && {
|
||||
tool_parameters: jsonStringify(toolParameters),
|
||||
}),
|
||||
...(telemetryToolInput && { tool_input: telemetryToolInput }),
|
||||
tool_result_size_bytes: String(toolResultSizeBytes),
|
||||
...(decisionInfo && {
|
||||
decision_source: decisionInfo.source,
|
||||
decision_type: decisionInfo.decision,
|
||||
}),
|
||||
...(mcpServerScope && { mcp_server_scope: mcpServerScope }),
|
||||
})
|
||||
|
||||
// Run PostToolUse hooks
|
||||
let toolOutput = result.data
|
||||
const hookResults = []
|
||||
@@ -1590,12 +1459,6 @@ async function checkPermissionsAndCallTool(
|
||||
const durationMs = Date.now() - startTime
|
||||
addToToolDuration(durationMs)
|
||||
|
||||
endToolExecutionSpan({
|
||||
success: false,
|
||||
error: errorMessage(error),
|
||||
})
|
||||
endToolSpan()
|
||||
|
||||
// Handle MCP auth errors by updating the client status to 'needs-auth'
|
||||
// This updates the /mcp display to show the server needs re-authorization
|
||||
if (error instanceof McpAuthError) {
|
||||
@@ -1666,27 +1529,9 @@ async function checkPermissionsAndCallTool(
|
||||
mcpServerBaseUrl,
|
||||
),
|
||||
})
|
||||
// Log tool result error event for OTLP with tool parameters and decision context
|
||||
const mcpServerScope = isMcpTool(tool)
|
||||
? getMcpServerScopeFromToolName(tool.name)
|
||||
: null
|
||||
|
||||
void logOTelEvent('tool_result', {
|
||||
tool_name: sanitizeToolNameForAnalytics(tool.name),
|
||||
use_id: toolUseID,
|
||||
success: 'false',
|
||||
duration_ms: String(durationMs),
|
||||
error: errorMessage(error),
|
||||
...(Object.keys(toolParameters).length > 0 && {
|
||||
tool_parameters: jsonStringify(toolParameters),
|
||||
}),
|
||||
...(telemetryToolInput && { tool_input: telemetryToolInput }),
|
||||
...(decisionInfo && {
|
||||
decision_source: decisionInfo.source,
|
||||
decision_type: decisionInfo.decision,
|
||||
}),
|
||||
...(mcpServerScope && { mcp_server_scope: mcpServerScope }),
|
||||
})
|
||||
}
|
||||
const content = formatError(error)
|
||||
|
||||
|
||||
@@ -174,7 +174,7 @@ export async function connectVoiceStream(
|
||||
|
||||
const url = `${wsBaseUrl}${VOICE_STREAM_PATH}?${params.toString()}`
|
||||
|
||||
logForDebugging(`[voice_stream] Connecting to ${url}`)
|
||||
logForDebugging('[voice_stream] Connecting to voice stream websocket')
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
Authorization: `Bearer ${tokens.accessToken}`,
|
||||
@@ -357,7 +357,7 @@ export async function connectVoiceStream(
|
||||
ws.on('message', (raw: Buffer | string) => {
|
||||
const text = raw.toString()
|
||||
logForDebugging(
|
||||
`[voice_stream] Message received (${String(text.length)} chars): ${text.slice(0, 200)}`,
|
||||
`[voice_stream] Message received (${String(text.length)} chars)`,
|
||||
)
|
||||
let msg: VoiceStreamMessage
|
||||
try {
|
||||
@@ -369,7 +369,9 @@ export async function connectVoiceStream(
|
||||
switch (msg.type) {
|
||||
case 'TranscriptText': {
|
||||
const transcript = msg.data
|
||||
logForDebugging(`[voice_stream] TranscriptText: "${transcript ?? ''}"`)
|
||||
logForDebugging(
|
||||
`[voice_stream] TranscriptText received (${String((transcript ?? '').length)} chars)`,
|
||||
)
|
||||
// Data arrived after CloseStream — disarm the no-data timer so
|
||||
// a slow-but-real flush isn't cut off. Only disarm once finalized
|
||||
// (CloseStream sent); pre-CloseStream data racing the deferred
|
||||
@@ -403,7 +405,7 @@ export async function connectVoiceStream(
|
||||
!prev.startsWith(next)
|
||||
) {
|
||||
logForDebugging(
|
||||
`[voice_stream] Auto-finalizing previous segment (new segment detected): "${lastTranscriptText}"`,
|
||||
'[voice_stream] Auto-finalizing previous segment (new segment detected)',
|
||||
)
|
||||
callbacks.onTranscript(lastTranscriptText, true)
|
||||
}
|
||||
@@ -416,7 +418,7 @@ export async function connectVoiceStream(
|
||||
}
|
||||
case 'TranscriptEndpoint': {
|
||||
logForDebugging(
|
||||
`[voice_stream] TranscriptEndpoint received, lastTranscriptText="${lastTranscriptText}"`,
|
||||
`[voice_stream] TranscriptEndpoint received (hasBufferedTranscript=${Boolean(lastTranscriptText)})`,
|
||||
)
|
||||
// The server signals the end of an utterance. Emit the last
|
||||
// TranscriptText as a final transcript so the caller can commit it.
|
||||
@@ -441,7 +443,9 @@ export async function connectVoiceStream(
|
||||
case 'TranscriptError': {
|
||||
const desc =
|
||||
msg.description ?? msg.error_code ?? 'unknown transcription error'
|
||||
logForDebugging(`[voice_stream] TranscriptError: ${desc}`)
|
||||
logForDebugging(
|
||||
`[voice_stream] TranscriptError received (${msg.error_code ?? 'unknown'})`,
|
||||
)
|
||||
if (!finalizing) {
|
||||
callbacks.onError(desc)
|
||||
}
|
||||
@@ -449,7 +453,7 @@ export async function connectVoiceStream(
|
||||
}
|
||||
case 'error': {
|
||||
const errorDetail = msg.message ?? jsonStringify(msg)
|
||||
logForDebugging(`[voice_stream] Server error: ${errorDetail}`)
|
||||
logForDebugging('[voice_stream] Server error received')
|
||||
if (!finalizing) {
|
||||
callbacks.onError(errorDetail)
|
||||
}
|
||||
|
||||
@@ -368,13 +368,10 @@ export async function setup(
|
||||
) // Start team memory sync watcher
|
||||
}
|
||||
}
|
||||
initSinks() // Attach error log sink and analytics compatibility stubs
|
||||
initSinks() // Attach the shared error-log sink
|
||||
|
||||
// Session-success-rate denominator. Emit immediately after the analytics
|
||||
// sink is attached — before any parsing, fetching, or I/O that could throw.
|
||||
// inc-3694 (P0 CHANGELOG crash) threw at checkForReleaseNotes below; every
|
||||
// event after this point was dead. This beacon is the earliest reliable
|
||||
// "process started" signal for release health monitoring.
|
||||
// Keep the startup compatibility event as early as possible, before any
|
||||
// parsing, fetching, or I/O that could throw.
|
||||
logEvent('tengu_started', {})
|
||||
|
||||
void prefetchApiKeyFromApiKeyHelperIfSafe(getIsNonInteractiveSession()) // Prefetch safely - only executes if trust already confirmed
|
||||
|
||||
@@ -1,197 +0,0 @@
|
||||
import { mkdir, readdir, readFile, unlink, writeFile } from 'fs/promises'
|
||||
import { join } from 'path'
|
||||
import { z } from 'zod/v4'
|
||||
import { getCwd } from '../../utils/cwd.js'
|
||||
import { logForDebugging } from '../../utils/debug.js'
|
||||
import { lazySchema } from '../../utils/lazySchema.js'
|
||||
import { jsonParse, jsonStringify } from '../../utils/slowOperations.js'
|
||||
import { type AgentMemoryScope, getAgentMemoryDir } from './agentMemory.js'
|
||||
|
||||
const SNAPSHOT_BASE = 'agent-memory-snapshots'
|
||||
const SNAPSHOT_JSON = 'snapshot.json'
|
||||
const SYNCED_JSON = '.snapshot-synced.json'
|
||||
|
||||
const snapshotMetaSchema = lazySchema(() =>
|
||||
z.object({
|
||||
updatedAt: z.string().min(1),
|
||||
}),
|
||||
)
|
||||
|
||||
const syncedMetaSchema = lazySchema(() =>
|
||||
z.object({
|
||||
syncedFrom: z.string().min(1),
|
||||
}),
|
||||
)
|
||||
type SyncedMeta = z.infer<ReturnType<typeof syncedMetaSchema>>
|
||||
|
||||
/**
|
||||
* Returns the path to the snapshot directory for an agent in the current project.
|
||||
* e.g., <cwd>/.claude/agent-memory-snapshots/<agentType>/
|
||||
*/
|
||||
export function getSnapshotDirForAgent(agentType: string): string {
|
||||
return join(getCwd(), '.claude', SNAPSHOT_BASE, agentType)
|
||||
}
|
||||
|
||||
function getSnapshotJsonPath(agentType: string): string {
|
||||
return join(getSnapshotDirForAgent(agentType), SNAPSHOT_JSON)
|
||||
}
|
||||
|
||||
function getSyncedJsonPath(agentType: string, scope: AgentMemoryScope): string {
|
||||
return join(getAgentMemoryDir(agentType, scope), SYNCED_JSON)
|
||||
}
|
||||
|
||||
async function readJsonFile<T>(
|
||||
path: string,
|
||||
schema: z.ZodType<T>,
|
||||
): Promise<T | null> {
|
||||
try {
|
||||
const content = await readFile(path, { encoding: 'utf-8' })
|
||||
const result = schema.safeParse(jsonParse(content))
|
||||
return result.success ? result.data : null
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
async function copySnapshotToLocal(
|
||||
agentType: string,
|
||||
scope: AgentMemoryScope,
|
||||
): Promise<void> {
|
||||
const snapshotMemDir = getSnapshotDirForAgent(agentType)
|
||||
const localMemDir = getAgentMemoryDir(agentType, scope)
|
||||
|
||||
await mkdir(localMemDir, { recursive: true })
|
||||
|
||||
try {
|
||||
const files = await readdir(snapshotMemDir, { withFileTypes: true })
|
||||
for (const dirent of files) {
|
||||
if (!dirent.isFile() || dirent.name === SNAPSHOT_JSON) continue
|
||||
const content = await readFile(join(snapshotMemDir, dirent.name), {
|
||||
encoding: 'utf-8',
|
||||
})
|
||||
await writeFile(join(localMemDir, dirent.name), content)
|
||||
}
|
||||
} catch (e) {
|
||||
logForDebugging(`Failed to copy snapshot to local agent memory: ${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
async function saveSyncedMeta(
|
||||
agentType: string,
|
||||
scope: AgentMemoryScope,
|
||||
snapshotTimestamp: string,
|
||||
): Promise<void> {
|
||||
const syncedPath = getSyncedJsonPath(agentType, scope)
|
||||
const localMemDir = getAgentMemoryDir(agentType, scope)
|
||||
await mkdir(localMemDir, { recursive: true })
|
||||
const meta: SyncedMeta = { syncedFrom: snapshotTimestamp }
|
||||
try {
|
||||
await writeFile(syncedPath, jsonStringify(meta))
|
||||
} catch (e) {
|
||||
logForDebugging(`Failed to save snapshot sync metadata: ${e}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a snapshot exists and whether it's newer than what we last synced.
|
||||
*/
|
||||
export async function checkAgentMemorySnapshot(
|
||||
agentType: string,
|
||||
scope: AgentMemoryScope,
|
||||
): Promise<{
|
||||
action: 'none' | 'initialize' | 'prompt-update'
|
||||
snapshotTimestamp?: string
|
||||
}> {
|
||||
const snapshotMeta = await readJsonFile(
|
||||
getSnapshotJsonPath(agentType),
|
||||
snapshotMetaSchema(),
|
||||
)
|
||||
|
||||
if (!snapshotMeta) {
|
||||
return { action: 'none' }
|
||||
}
|
||||
|
||||
const localMemDir = getAgentMemoryDir(agentType, scope)
|
||||
|
||||
let hasLocalMemory = false
|
||||
try {
|
||||
const dirents = await readdir(localMemDir, { withFileTypes: true })
|
||||
hasLocalMemory = dirents.some(d => d.isFile() && d.name.endsWith('.md'))
|
||||
} catch {
|
||||
// Directory doesn't exist
|
||||
}
|
||||
|
||||
if (!hasLocalMemory) {
|
||||
return { action: 'initialize', snapshotTimestamp: snapshotMeta.updatedAt }
|
||||
}
|
||||
|
||||
const syncedMeta = await readJsonFile(
|
||||
getSyncedJsonPath(agentType, scope),
|
||||
syncedMetaSchema(),
|
||||
)
|
||||
|
||||
if (
|
||||
!syncedMeta ||
|
||||
new Date(snapshotMeta.updatedAt) > new Date(syncedMeta.syncedFrom)
|
||||
) {
|
||||
return {
|
||||
action: 'prompt-update',
|
||||
snapshotTimestamp: snapshotMeta.updatedAt,
|
||||
}
|
||||
}
|
||||
|
||||
return { action: 'none' }
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize local agent memory from a snapshot (first-time setup).
|
||||
*/
|
||||
export async function initializeFromSnapshot(
|
||||
agentType: string,
|
||||
scope: AgentMemoryScope,
|
||||
snapshotTimestamp: string,
|
||||
): Promise<void> {
|
||||
logForDebugging(
|
||||
`Initializing agent memory for ${agentType} from project snapshot`,
|
||||
)
|
||||
await copySnapshotToLocal(agentType, scope)
|
||||
await saveSyncedMeta(agentType, scope, snapshotTimestamp)
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace local agent memory with the snapshot.
|
||||
*/
|
||||
export async function replaceFromSnapshot(
|
||||
agentType: string,
|
||||
scope: AgentMemoryScope,
|
||||
snapshotTimestamp: string,
|
||||
): Promise<void> {
|
||||
logForDebugging(
|
||||
`Replacing agent memory for ${agentType} with project snapshot`,
|
||||
)
|
||||
// Remove existing .md files before copying to avoid orphans
|
||||
const localMemDir = getAgentMemoryDir(agentType, scope)
|
||||
try {
|
||||
const existing = await readdir(localMemDir, { withFileTypes: true })
|
||||
for (const dirent of existing) {
|
||||
if (dirent.isFile() && dirent.name.endsWith('.md')) {
|
||||
await unlink(join(localMemDir, dirent.name))
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Directory may not exist yet
|
||||
}
|
||||
await copySnapshotToLocal(agentType, scope)
|
||||
await saveSyncedMeta(agentType, scope, snapshotTimestamp)
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark the current snapshot as synced without changing local memory.
|
||||
*/
|
||||
export async function markSnapshotSynced(
|
||||
agentType: string,
|
||||
scope: AgentMemoryScope,
|
||||
snapshotTimestamp: string,
|
||||
): Promise<void> {
|
||||
await saveSyncedMeta(agentType, scope, snapshotTimestamp)
|
||||
}
|
||||
@@ -47,10 +47,6 @@ import {
|
||||
setAgentColor,
|
||||
} from './agentColorManager.js'
|
||||
import { type AgentMemoryScope, loadAgentMemoryPrompt } from './agentMemory.js'
|
||||
import {
|
||||
checkAgentMemorySnapshot,
|
||||
initializeFromSnapshot,
|
||||
} from './agentMemorySnapshot.js'
|
||||
import { getBuiltInAgents } from './builtInAgents.js'
|
||||
|
||||
// Type for MCP server specification in agent definitions
|
||||
@@ -255,41 +251,14 @@ export function filterAgentsByMcpRequirements(
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for and initialize agent memory from project snapshots.
|
||||
* For agents with memory enabled, copies snapshot to local if no local memory exists.
|
||||
* For agents with newer snapshots, logs a debug message (user prompt TODO).
|
||||
* Agent memory snapshot sync is disabled in this fork to avoid copying
|
||||
* project-scoped memory into persistent user/local agent memory.
|
||||
*/
|
||||
async function initializeAgentMemorySnapshots(
|
||||
agents: CustomAgentDefinition[],
|
||||
_agents: CustomAgentDefinition[],
|
||||
): Promise<void> {
|
||||
await Promise.all(
|
||||
agents.map(async agent => {
|
||||
if (agent.memory !== 'user') return
|
||||
const result = await checkAgentMemorySnapshot(
|
||||
agent.agentType,
|
||||
agent.memory,
|
||||
)
|
||||
switch (result.action) {
|
||||
case 'initialize':
|
||||
logForDebugging(
|
||||
`Initializing ${agent.agentType} memory from project snapshot`,
|
||||
)
|
||||
await initializeFromSnapshot(
|
||||
agent.agentType,
|
||||
agent.memory,
|
||||
result.snapshotTimestamp!,
|
||||
)
|
||||
break
|
||||
case 'prompt-update':
|
||||
agent.pendingSnapshotUpdate = {
|
||||
snapshotTimestamp: result.snapshotTimestamp!,
|
||||
}
|
||||
logForDebugging(
|
||||
`Newer snapshot available for ${agent.agentType} memory (snapshot: ${result.snapshotTimestamp})`,
|
||||
)
|
||||
break
|
||||
}
|
||||
}),
|
||||
'[loadAgentsDir] Agent memory snapshot sync is disabled in this build',
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -72,11 +72,6 @@ import {
|
||||
asSystemPrompt,
|
||||
type SystemPrompt,
|
||||
} from '../../utils/systemPromptType.js'
|
||||
import {
|
||||
isPerfettoTracingEnabled,
|
||||
registerAgent as registerPerfettoAgent,
|
||||
unregisterAgent as unregisterPerfettoAgent,
|
||||
} from '../../utils/telemetry/perfettoTracing.js'
|
||||
import type { ContentReplacementState } from '../../utils/toolResultStorage.js'
|
||||
import { createAgentId } from '../../utils/uuid.js'
|
||||
import { resolveAgentTools } from './agentToolUtils.js'
|
||||
@@ -352,12 +347,6 @@ export async function* runAgent({
|
||||
setAgentTranscriptSubdir(agentId, transcriptSubdir)
|
||||
}
|
||||
|
||||
// Register agent in Perfetto trace for hierarchy visualization
|
||||
if (isPerfettoTracingEnabled()) {
|
||||
const parentId = toolUseContext.agentId ?? getSessionId()
|
||||
registerPerfettoAgent(agentId, agentDefinition.agentType, parentId)
|
||||
}
|
||||
|
||||
// Log API calls path for subagents (ant-only)
|
||||
if (process.env.USER_TYPE === 'ant') {
|
||||
logForDebugging(
|
||||
@@ -828,8 +817,6 @@ export async function* runAgent({
|
||||
agentToolUseContext.readFileState.clear()
|
||||
// Release the cloned fork context messages
|
||||
initialMessages.length = 0
|
||||
// Release perfetto agent registry entry
|
||||
unregisterPerfettoAgent(agentId)
|
||||
// Release transcript subdir mapping
|
||||
clearAgentTranscriptSubdir(agentId)
|
||||
// Release this agent's todos entry. Without this, every subagent that
|
||||
|
||||
@@ -57,6 +57,47 @@ function debug(msg: string): void {
|
||||
logForDebugging(`[brief:upload] ${msg}`)
|
||||
}
|
||||
|
||||
function summarizeUploadError(error: unknown): string {
|
||||
const summary: Record<string, boolean | number | string> = {}
|
||||
|
||||
if (error instanceof Error) {
|
||||
summary.errorType = error.constructor.name
|
||||
summary.errorName = error.name
|
||||
summary.hasMessage = error.message.length > 0
|
||||
} else {
|
||||
summary.errorType = typeof error
|
||||
summary.hasValue = error !== undefined && error !== null
|
||||
}
|
||||
|
||||
if (axios.isAxiosError(error)) {
|
||||
summary.errorType = 'AxiosError'
|
||||
if (error.code) {
|
||||
summary.axiosCode = error.code
|
||||
}
|
||||
if (typeof error.response?.status === 'number') {
|
||||
summary.httpStatus = error.response.status
|
||||
}
|
||||
summary.hasResponseData = error.response?.data !== undefined
|
||||
}
|
||||
|
||||
return jsonStringify(summary)
|
||||
}
|
||||
|
||||
function summarizeUploadResponse(data: unknown): string {
|
||||
if (data === undefined) return 'undefined'
|
||||
if (data === null) return 'null'
|
||||
if (Array.isArray(data)) return `array(${data.length})`
|
||||
if (typeof data === 'object') {
|
||||
return jsonStringify({
|
||||
responseType: 'object',
|
||||
keys: Object.keys(data as Record<string, unknown>)
|
||||
.sort()
|
||||
.slice(0, 10),
|
||||
})
|
||||
}
|
||||
return typeof data
|
||||
}
|
||||
|
||||
/**
|
||||
* Base URL for uploads. Must match the host the token is valid for.
|
||||
*
|
||||
@@ -100,7 +141,9 @@ export async function uploadBriefAttachment(
|
||||
if (!ctx.replBridgeEnabled) return undefined
|
||||
|
||||
if (size > MAX_UPLOAD_BYTES) {
|
||||
debug(`skip ${fullPath}: ${size} bytes exceeds ${MAX_UPLOAD_BYTES} limit`)
|
||||
debug(
|
||||
`skip attachment upload: ${size} bytes exceeds ${MAX_UPLOAD_BYTES} limit`,
|
||||
)
|
||||
return undefined
|
||||
}
|
||||
|
||||
@@ -114,7 +157,7 @@ export async function uploadBriefAttachment(
|
||||
try {
|
||||
content = await readFile(fullPath)
|
||||
} catch (e) {
|
||||
debug(`read failed for ${fullPath}: ${e}`)
|
||||
debug(`read failed before upload: ${summarizeUploadError(e)}`)
|
||||
return undefined
|
||||
}
|
||||
|
||||
@@ -150,23 +193,23 @@ export async function uploadBriefAttachment(
|
||||
|
||||
if (response.status !== 201) {
|
||||
debug(
|
||||
`upload failed for ${fullPath}: status=${response.status} body=${jsonStringify(response.data).slice(0, 200)}`,
|
||||
`upload failed: status=${response.status} response=${summarizeUploadResponse(
|
||||
response.data,
|
||||
)}`,
|
||||
)
|
||||
return undefined
|
||||
}
|
||||
|
||||
const parsed = uploadResponseSchema().safeParse(response.data)
|
||||
if (!parsed.success) {
|
||||
debug(
|
||||
`unexpected response shape for ${fullPath}: ${parsed.error.message}`,
|
||||
)
|
||||
debug('unexpected upload response shape')
|
||||
return undefined
|
||||
}
|
||||
|
||||
debug(`uploaded ${fullPath} → ${parsed.data.file_uuid} (${size} bytes)`)
|
||||
debug(`uploaded attachment (${size} bytes)`)
|
||||
return parsed.data.file_uuid
|
||||
} catch (e) {
|
||||
debug(`upload threw for ${fullPath}: ${e}`)
|
||||
debug(`upload threw: ${summarizeUploadError(e)}`)
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,7 +29,6 @@ import {
|
||||
fileHistoryEnabled,
|
||||
fileHistoryTrackEdit,
|
||||
} from '../../utils/fileHistory.js'
|
||||
import { logFileOperation } from '../../utils/fileOperationAnalytics.js'
|
||||
import {
|
||||
type LineEndingType,
|
||||
readFileSyncWithMetadata,
|
||||
@@ -530,12 +529,6 @@ export const FileEditTool = buildTool({
|
||||
}
|
||||
countLinesChanged(patch)
|
||||
|
||||
logFileOperation({
|
||||
operation: 'edit',
|
||||
tool: 'FileEditTool',
|
||||
filePath: absoluteFilePath,
|
||||
})
|
||||
|
||||
logEvent('tengu_edit_string_lengths', {
|
||||
oldStringBytes: Buffer.byteLength(old_string, 'utf8'),
|
||||
newStringBytes: Buffer.byteLength(new_string, 'utf8'),
|
||||
|
||||
@@ -37,7 +37,6 @@ import {
|
||||
getFileModificationTimeAsync,
|
||||
suggestPathUnderCwd,
|
||||
} from '../../utils/file.js'
|
||||
import { logFileOperation } from '../../utils/fileOperationAnalytics.js'
|
||||
import { formatFileSize } from '../../utils/format.js'
|
||||
import { getFsImplementation } from '../../utils/fsOperations.js'
|
||||
import {
|
||||
@@ -852,13 +851,6 @@ async function callInner(
|
||||
file: { filePath: file_path, cells },
|
||||
}
|
||||
|
||||
logFileOperation({
|
||||
operation: 'read',
|
||||
tool: 'FileReadTool',
|
||||
filePath: fullFilePath,
|
||||
content: cellsJson,
|
||||
})
|
||||
|
||||
return { data }
|
||||
}
|
||||
|
||||
@@ -869,13 +861,6 @@ async function callInner(
|
||||
const data = await readImageWithTokenBudget(resolvedFilePath, maxTokens)
|
||||
context.nestedMemoryAttachmentTriggers?.add(fullFilePath)
|
||||
|
||||
logFileOperation({
|
||||
operation: 'read',
|
||||
tool: 'FileReadTool',
|
||||
filePath: fullFilePath,
|
||||
content: data.file.base64,
|
||||
})
|
||||
|
||||
const metadataText = data.file.dimensions
|
||||
? createImageMetadataText(data.file.dimensions)
|
||||
: null
|
||||
@@ -907,12 +892,6 @@ async function callInner(
|
||||
fileSize: extractResult.data.file.originalSize,
|
||||
hasPageRange: true,
|
||||
})
|
||||
logFileOperation({
|
||||
operation: 'read',
|
||||
tool: 'FileReadTool',
|
||||
filePath: fullFilePath,
|
||||
content: `PDF pages ${pages}`,
|
||||
})
|
||||
const entries = await readdir(extractResult.data.file.outputDir)
|
||||
const imageFiles = entries.filter(f => f.endsWith('.jpg')).sort()
|
||||
const imageBlocks = await Promise.all(
|
||||
@@ -989,13 +968,6 @@ async function callInner(
|
||||
throw new Error(readResult.error.message)
|
||||
}
|
||||
const pdfData = readResult.data
|
||||
logFileOperation({
|
||||
operation: 'read',
|
||||
tool: 'FileReadTool',
|
||||
filePath: fullFilePath,
|
||||
content: pdfData.file.base64,
|
||||
})
|
||||
|
||||
return {
|
||||
data: pdfData,
|
||||
newMessages: [
|
||||
@@ -1057,13 +1029,6 @@ async function callInner(
|
||||
memoryFileMtimes.set(data, mtimeMs)
|
||||
}
|
||||
|
||||
logFileOperation({
|
||||
operation: 'read',
|
||||
tool: 'FileReadTool',
|
||||
filePath: fullFilePath,
|
||||
content,
|
||||
})
|
||||
|
||||
const sessionFileType = detectSessionFileType(fullFilePath)
|
||||
const analyticsExt = getFileExtensionForAnalytics(fullFilePath)
|
||||
logEvent('tengu_session_file_read', {
|
||||
|
||||
@@ -24,7 +24,6 @@ import {
|
||||
fileHistoryEnabled,
|
||||
fileHistoryTrackEdit,
|
||||
} from '../../utils/fileHistory.js'
|
||||
import { logFileOperation } from '../../utils/fileOperationAnalytics.js'
|
||||
import { readFileSyncWithMetadata } from '../../utils/fileRead.js'
|
||||
import { getFsImplementation } from '../../utils/fsOperations.js'
|
||||
import {
|
||||
@@ -380,13 +379,6 @@ export const FileWriteTool = buildTool({
|
||||
// Track lines added and removed for file updates, right before yielding result
|
||||
countLinesChanged(patch)
|
||||
|
||||
logFileOperation({
|
||||
operation: 'write',
|
||||
tool: 'FileWriteTool',
|
||||
filePath: fullFilePath,
|
||||
type: 'update',
|
||||
})
|
||||
|
||||
return {
|
||||
data,
|
||||
}
|
||||
@@ -404,13 +396,6 @@ export const FileWriteTool = buildTool({
|
||||
// For creation of new files, count all lines as additions, right before yielding the result
|
||||
countLinesChanged([], content)
|
||||
|
||||
logFileOperation({
|
||||
operation: 'write',
|
||||
tool: 'FileWriteTool',
|
||||
filePath: fullFilePath,
|
||||
type: 'create',
|
||||
})
|
||||
|
||||
return {
|
||||
data,
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@ import type {
|
||||
ScopedMcpServerConfig,
|
||||
} from '../../services/mcp/types.js'
|
||||
import type { Tool } from '../../Tool.js'
|
||||
import { errorMessage } from '../../utils/errors.js'
|
||||
import { lazySchema } from '../../utils/lazySchema.js'
|
||||
import { logMCPDebug, logMCPError } from '../../utils/log.js'
|
||||
import type { PermissionDecision } from '../../utils/permissions/PermissionResult.js'
|
||||
@@ -29,9 +28,11 @@ export type McpAuthOutput = {
|
||||
authUrl?: string
|
||||
}
|
||||
|
||||
function getConfigUrl(config: ScopedMcpServerConfig): string | undefined {
|
||||
if ('url' in config) return config.url
|
||||
return undefined
|
||||
function summarizeMcpAuthToolError(error: unknown): string {
|
||||
if (error instanceof Error) {
|
||||
return `${error.name} (hasMessage=${error.message.length > 0})`
|
||||
}
|
||||
return `non-Error (${typeof error})`
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -50,12 +51,10 @@ export function createMcpAuthTool(
|
||||
serverName: string,
|
||||
config: ScopedMcpServerConfig,
|
||||
): Tool<InputSchema, McpAuthOutput> {
|
||||
const url = getConfigUrl(config)
|
||||
const transport = config.type ?? 'stdio'
|
||||
const location = url ? `${transport} at ${url}` : transport
|
||||
|
||||
const description =
|
||||
`The \`${serverName}\` MCP server (${location}) is installed but requires authentication. ` +
|
||||
`The \`${serverName}\` MCP server (${transport}) is installed but requires authentication. ` +
|
||||
`Call this tool to start the OAuth flow — you'll receive an authorization URL to share with the user. ` +
|
||||
`Once the user completes authorization in their browser, the server's real tools will become available automatically.`
|
||||
|
||||
@@ -167,7 +166,9 @@ export function createMcpAuthTool(
|
||||
.catch(err => {
|
||||
logMCPError(
|
||||
serverName,
|
||||
`OAuth flow failed after tool-triggered start: ${errorMessage(err)}`,
|
||||
`OAuth flow failed after tool-triggered start: ${summarizeMcpAuthToolError(
|
||||
err,
|
||||
)}`,
|
||||
)
|
||||
})
|
||||
|
||||
@@ -199,7 +200,7 @@ export function createMcpAuthTool(
|
||||
return {
|
||||
data: {
|
||||
status: 'error' as const,
|
||||
message: `Failed to start OAuth flow for ${serverName}: ${errorMessage(err)}. Ask the user to run /mcp and authenticate manually.`,
|
||||
message: `Failed to start OAuth flow for ${serverName}. Ask the user to run /mcp and authenticate manually.`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { z } from 'zod/v4'
|
||||
import { getSessionId } from '../../bootstrap/state.js'
|
||||
import { logEvent } from '../../services/analytics/index.js'
|
||||
import type { AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS } from '../../services/analytics/metadata.js'
|
||||
import type { Tool } from '../../Tool.js'
|
||||
@@ -159,7 +158,6 @@ export const TeamCreateTool: Tool<InputSchema, Output> = buildTool({
|
||||
description: _description,
|
||||
createdAt: Date.now(),
|
||||
leadAgentId,
|
||||
leadSessionId: getSessionId(), // Store actual session ID for team discovery
|
||||
members: [
|
||||
{
|
||||
agentId: leadAgentId,
|
||||
@@ -169,7 +167,6 @@ export const TeamCreateTool: Tool<InputSchema, Output> = buildTool({
|
||||
joinedAt: Date.now(),
|
||||
tmuxPaneId: '',
|
||||
cwd: getCwd(),
|
||||
subscriptions: [],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
@@ -68,18 +68,8 @@ const URL_CACHE = new LRUCache<string, CacheEntry>({
|
||||
ttl: CACHE_TTL_MS,
|
||||
})
|
||||
|
||||
// Separate cache for preflight domain checks. URL_CACHE is URL-keyed, so
|
||||
// fetching two paths on the same domain triggers two identical preflight
|
||||
// HTTP round-trips to api.anthropic.com. This hostname-keyed cache avoids
|
||||
// that. Only 'allowed' is cached — blocked/failed re-check on next attempt.
|
||||
const DOMAIN_CHECK_CACHE = new LRUCache<string, true>({
|
||||
max: 128,
|
||||
ttl: 5 * 60 * 1000, // 5 minutes — shorter than URL_CACHE TTL
|
||||
})
|
||||
|
||||
export function clearWebFetchCache(): void {
|
||||
URL_CACHE.clear()
|
||||
DOMAIN_CHECK_CACHE.clear()
|
||||
}
|
||||
|
||||
// Lazy singleton — defers the turndown → @mixmark-io/domino import (~1.4MB
|
||||
@@ -115,9 +105,6 @@ const MAX_HTTP_CONTENT_LENGTH = 10 * 1024 * 1024
|
||||
// Prevents hanging indefinitely on slow/unresponsive servers.
|
||||
const FETCH_TIMEOUT_MS = 60_000
|
||||
|
||||
// Timeout for the domain blocklist preflight check (10 seconds).
|
||||
const DOMAIN_CHECK_TIMEOUT_MS = 10_000
|
||||
|
||||
// Cap same-host redirect hops. Without this a malicious server can return
|
||||
// a redirect loop (/a → /b → /a …) and the per-request FETCH_TIMEOUT_MS
|
||||
// resets on every hop, hanging the tool until user interrupt. 10 matches
|
||||
@@ -174,33 +161,13 @@ type DomainCheckResult =
|
||||
| { status: 'check_failed'; error: Error }
|
||||
|
||||
export async function checkDomainBlocklist(
|
||||
domain: string,
|
||||
_domain: string,
|
||||
): Promise<DomainCheckResult> {
|
||||
if (DOMAIN_CHECK_CACHE.has(domain)) {
|
||||
// Remote domain-blocklist check removed: no user domain names are sent to
|
||||
// external servers. Users explicitly approve each domain via the tool
|
||||
// permission dialog, which is the primary security boundary.
|
||||
return { status: 'allowed' }
|
||||
}
|
||||
try {
|
||||
const response = await axios.get(
|
||||
`https://api.anthropic.com/api/web/domain_info?domain=${encodeURIComponent(domain)}`,
|
||||
{ timeout: DOMAIN_CHECK_TIMEOUT_MS },
|
||||
)
|
||||
if (response.status === 200) {
|
||||
if (response.data.can_fetch === true) {
|
||||
DOMAIN_CHECK_CACHE.set(domain, true)
|
||||
return { status: 'allowed' }
|
||||
}
|
||||
return { status: 'blocked' }
|
||||
}
|
||||
// Non-200 status but didn't throw
|
||||
return {
|
||||
status: 'check_failed',
|
||||
error: new Error(`Domain check returned status ${response.status}`),
|
||||
}
|
||||
} catch (e) {
|
||||
logError(e)
|
||||
return { status: 'check_failed', error: e as Error }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a redirect is safe to follow
|
||||
|
||||
@@ -497,13 +497,11 @@ async function handleSpawnSplitPane(
|
||||
name: sanitizedName,
|
||||
agentType: agent_type,
|
||||
model,
|
||||
prompt,
|
||||
color: teammateColor,
|
||||
planModeRequired: plan_mode_required,
|
||||
joinedAt: Date.now(),
|
||||
tmuxPaneId: paneId,
|
||||
cwd: workingDir,
|
||||
subscriptions: [],
|
||||
backendType: detectionResult.backend.type,
|
||||
})
|
||||
await writeTeamFileAsync(teamName, teamFile)
|
||||
@@ -711,13 +709,11 @@ async function handleSpawnSeparateWindow(
|
||||
name: sanitizedName,
|
||||
agentType: agent_type,
|
||||
model,
|
||||
prompt,
|
||||
color: teammateColor,
|
||||
planModeRequired: plan_mode_required,
|
||||
joinedAt: Date.now(),
|
||||
tmuxPaneId: paneId,
|
||||
cwd: workingDir,
|
||||
subscriptions: [],
|
||||
backendType: 'tmux', // This handler always uses tmux directly
|
||||
})
|
||||
await writeTeamFileAsync(teamName, teamFile)
|
||||
@@ -997,13 +993,11 @@ async function handleSpawnInProcess(
|
||||
name: sanitizedName,
|
||||
agentType: agent_type,
|
||||
model,
|
||||
prompt,
|
||||
color: teammateColor,
|
||||
planModeRequired: plan_mode_required,
|
||||
joinedAt: Date.now(),
|
||||
tmuxPaneId: 'in-process',
|
||||
cwd: getCwd(),
|
||||
subscriptions: [],
|
||||
backendType: 'in-process',
|
||||
})
|
||||
await writeTeamFileAsync(teamName, teamFile)
|
||||
|
||||
@@ -1,223 +0,0 @@
|
||||
// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-ts_proto v2.6.1
|
||||
// protoc unknown
|
||||
// source: events_mono/growthbook/v1/growthbook_experiment_event.proto
|
||||
|
||||
/* eslint-disable */
|
||||
import { Timestamp } from '../../../google/protobuf/timestamp.js'
|
||||
import { PublicApiAuth } from '../../common/v1/auth.js'
|
||||
|
||||
/**
|
||||
* GrowthBook experiment assignment event
|
||||
* This event tracks when a user is exposed to an experiment variant
|
||||
* See: https://docs.growthbook.io/guide/bigquery
|
||||
*/
|
||||
export interface GrowthbookExperimentEvent {
|
||||
/** Unique event identifier (for deduplication) */
|
||||
event_id?: string | undefined
|
||||
/** When user was exposed to experiment (maps to GrowthBook's timestamp column) */
|
||||
timestamp?: Date | undefined
|
||||
/** Experiment tracking key (maps to GrowthBook's experiment_id column) */
|
||||
experiment_id?: string | undefined
|
||||
/** Variation index: 0=control, 1+=variants (maps to GrowthBook's variation_id column) */
|
||||
variation_id?: number | undefined
|
||||
/** Environment where assignment occurred */
|
||||
environment?: string | undefined
|
||||
/** User attributes at time of assignment */
|
||||
user_attributes?: string | undefined
|
||||
/** Experiment metadata */
|
||||
experiment_metadata?: string | undefined
|
||||
/** Device identifier for the client */
|
||||
device_id?: string | undefined
|
||||
/** Authentication context automatically injected by the API */
|
||||
auth?: PublicApiAuth | undefined
|
||||
/** Session identifier for tracking user sessions */
|
||||
session_id?: string | undefined
|
||||
/** Anonymous identifier for unauthenticated users */
|
||||
anonymous_id?: string | undefined
|
||||
/** Event metadata variables (automatically populated by internal-tools-common event_logging library) */
|
||||
event_metadata_vars?: string | undefined
|
||||
}
|
||||
|
||||
function createBaseGrowthbookExperimentEvent(): GrowthbookExperimentEvent {
|
||||
return {
|
||||
event_id: '',
|
||||
timestamp: undefined,
|
||||
experiment_id: '',
|
||||
variation_id: 0,
|
||||
environment: '',
|
||||
user_attributes: '',
|
||||
experiment_metadata: '',
|
||||
device_id: '',
|
||||
auth: undefined,
|
||||
session_id: '',
|
||||
anonymous_id: '',
|
||||
event_metadata_vars: '',
|
||||
}
|
||||
}
|
||||
|
||||
export const GrowthbookExperimentEvent: MessageFns<GrowthbookExperimentEvent> =
|
||||
{
|
||||
fromJSON(object: any): GrowthbookExperimentEvent {
|
||||
return {
|
||||
event_id: isSet(object.event_id)
|
||||
? globalThis.String(object.event_id)
|
||||
: '',
|
||||
timestamp: isSet(object.timestamp)
|
||||
? fromJsonTimestamp(object.timestamp)
|
||||
: undefined,
|
||||
experiment_id: isSet(object.experiment_id)
|
||||
? globalThis.String(object.experiment_id)
|
||||
: '',
|
||||
variation_id: isSet(object.variation_id)
|
||||
? globalThis.Number(object.variation_id)
|
||||
: 0,
|
||||
environment: isSet(object.environment)
|
||||
? globalThis.String(object.environment)
|
||||
: '',
|
||||
user_attributes: isSet(object.user_attributes)
|
||||
? globalThis.String(object.user_attributes)
|
||||
: '',
|
||||
experiment_metadata: isSet(object.experiment_metadata)
|
||||
? globalThis.String(object.experiment_metadata)
|
||||
: '',
|
||||
device_id: isSet(object.device_id)
|
||||
? globalThis.String(object.device_id)
|
||||
: '',
|
||||
auth: isSet(object.auth)
|
||||
? PublicApiAuth.fromJSON(object.auth)
|
||||
: undefined,
|
||||
session_id: isSet(object.session_id)
|
||||
? globalThis.String(object.session_id)
|
||||
: '',
|
||||
anonymous_id: isSet(object.anonymous_id)
|
||||
? globalThis.String(object.anonymous_id)
|
||||
: '',
|
||||
event_metadata_vars: isSet(object.event_metadata_vars)
|
||||
? globalThis.String(object.event_metadata_vars)
|
||||
: '',
|
||||
}
|
||||
},
|
||||
|
||||
toJSON(message: GrowthbookExperimentEvent): unknown {
|
||||
const obj: any = {}
|
||||
if (message.event_id !== undefined) {
|
||||
obj.event_id = message.event_id
|
||||
}
|
||||
if (message.timestamp !== undefined) {
|
||||
obj.timestamp = message.timestamp.toISOString()
|
||||
}
|
||||
if (message.experiment_id !== undefined) {
|
||||
obj.experiment_id = message.experiment_id
|
||||
}
|
||||
if (message.variation_id !== undefined) {
|
||||
obj.variation_id = Math.round(message.variation_id)
|
||||
}
|
||||
if (message.environment !== undefined) {
|
||||
obj.environment = message.environment
|
||||
}
|
||||
if (message.user_attributes !== undefined) {
|
||||
obj.user_attributes = message.user_attributes
|
||||
}
|
||||
if (message.experiment_metadata !== undefined) {
|
||||
obj.experiment_metadata = message.experiment_metadata
|
||||
}
|
||||
if (message.device_id !== undefined) {
|
||||
obj.device_id = message.device_id
|
||||
}
|
||||
if (message.auth !== undefined) {
|
||||
obj.auth = PublicApiAuth.toJSON(message.auth)
|
||||
}
|
||||
if (message.session_id !== undefined) {
|
||||
obj.session_id = message.session_id
|
||||
}
|
||||
if (message.anonymous_id !== undefined) {
|
||||
obj.anonymous_id = message.anonymous_id
|
||||
}
|
||||
if (message.event_metadata_vars !== undefined) {
|
||||
obj.event_metadata_vars = message.event_metadata_vars
|
||||
}
|
||||
return obj
|
||||
},
|
||||
|
||||
create<I extends Exact<DeepPartial<GrowthbookExperimentEvent>, I>>(
|
||||
base?: I,
|
||||
): GrowthbookExperimentEvent {
|
||||
return GrowthbookExperimentEvent.fromPartial(base ?? ({} as any))
|
||||
},
|
||||
fromPartial<I extends Exact<DeepPartial<GrowthbookExperimentEvent>, I>>(
|
||||
object: I,
|
||||
): GrowthbookExperimentEvent {
|
||||
const message = createBaseGrowthbookExperimentEvent()
|
||||
message.event_id = object.event_id ?? ''
|
||||
message.timestamp = object.timestamp ?? undefined
|
||||
message.experiment_id = object.experiment_id ?? ''
|
||||
message.variation_id = object.variation_id ?? 0
|
||||
message.environment = object.environment ?? ''
|
||||
message.user_attributes = object.user_attributes ?? ''
|
||||
message.experiment_metadata = object.experiment_metadata ?? ''
|
||||
message.device_id = object.device_id ?? ''
|
||||
message.auth =
|
||||
object.auth !== undefined && object.auth !== null
|
||||
? PublicApiAuth.fromPartial(object.auth)
|
||||
: undefined
|
||||
message.session_id = object.session_id ?? ''
|
||||
message.anonymous_id = object.anonymous_id ?? ''
|
||||
message.event_metadata_vars = object.event_metadata_vars ?? ''
|
||||
return message
|
||||
},
|
||||
}
|
||||
|
||||
type Builtin =
|
||||
| Date
|
||||
| Function
|
||||
| Uint8Array
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| undefined
|
||||
|
||||
type DeepPartial<T> = T extends Builtin
|
||||
? T
|
||||
: T extends globalThis.Array<infer U>
|
||||
? globalThis.Array<DeepPartial<U>>
|
||||
: T extends ReadonlyArray<infer U>
|
||||
? ReadonlyArray<DeepPartial<U>>
|
||||
: T extends {}
|
||||
? { [K in keyof T]?: DeepPartial<T[K]> }
|
||||
: Partial<T>
|
||||
|
||||
type KeysOfUnion<T> = T extends T ? keyof T : never
|
||||
type Exact<P, I extends P> = P extends Builtin
|
||||
? P
|
||||
: P & { [K in keyof P]: Exact<P[K], I[K]> } & {
|
||||
[K in Exclude<keyof I, KeysOfUnion<P>>]: never
|
||||
}
|
||||
|
||||
function fromTimestamp(t: Timestamp): Date {
|
||||
let millis = (t.seconds || 0) * 1_000
|
||||
millis += (t.nanos || 0) / 1_000_000
|
||||
return new globalThis.Date(millis)
|
||||
}
|
||||
|
||||
function fromJsonTimestamp(o: any): Date {
|
||||
if (o instanceof globalThis.Date) {
|
||||
return o
|
||||
} else if (typeof o === 'string') {
|
||||
return new globalThis.Date(o)
|
||||
} else {
|
||||
return fromTimestamp(Timestamp.fromJSON(o))
|
||||
}
|
||||
}
|
||||
|
||||
function isSet(value: any): boolean {
|
||||
return value !== null && value !== undefined
|
||||
}
|
||||
|
||||
interface MessageFns<T> {
|
||||
fromJSON(object: any): T
|
||||
toJSON(message: T): unknown
|
||||
create<I extends Exact<DeepPartial<T>, I>>(base?: I): T
|
||||
fromPartial<I extends Exact<DeepPartial<T>, I>>(object: I): T
|
||||
}
|
||||
@@ -40,7 +40,7 @@ export function maybePersistTokenForSubprocesses(
|
||||
mkdirSync(CCR_TOKEN_DIR, { recursive: true, mode: 0o700 })
|
||||
// eslint-disable-next-line custom-rules/no-sync-fs -- one-shot startup write in CCR, caller is sync
|
||||
writeFileSync(path, token, { encoding: 'utf8', mode: 0o600 })
|
||||
logForDebugging(`Persisted ${tokenName} to ${path} for subprocess access`)
|
||||
logForDebugging(`Persisted ${tokenName} for subprocess access`)
|
||||
} catch (error) {
|
||||
logForDebugging(
|
||||
`Failed to persist ${tokenName} to disk (non-fatal): ${errorMessage(error)}`,
|
||||
@@ -65,7 +65,7 @@ export function readTokenFromWellKnownFile(
|
||||
if (!token) {
|
||||
return null
|
||||
}
|
||||
logForDebugging(`Read ${tokenName} from well-known file ${path}`)
|
||||
logForDebugging(`Read ${tokenName} from well-known file`)
|
||||
return token
|
||||
} catch (error) {
|
||||
// ENOENT is the expected outcome outside CCR — stay silent. Anything
|
||||
@@ -73,7 +73,7 @@ export function readTokenFromWellKnownFile(
|
||||
// debug log so subprocess auth failures aren't mysterious.
|
||||
if (!isENOENT(error)) {
|
||||
logForDebugging(
|
||||
`Failed to read ${tokenName} from ${path}: ${errorMessage(error)}`,
|
||||
`Failed to read ${tokenName} from well-known file: ${errorMessage(error)}`,
|
||||
{ level: 'debug' },
|
||||
)
|
||||
}
|
||||
@@ -124,7 +124,7 @@ function getCredentialFromFd({
|
||||
const fd = parseInt(fdEnv, 10)
|
||||
if (Number.isNaN(fd)) {
|
||||
logForDebugging(
|
||||
`${envVar} must be a valid file descriptor number, got: ${fdEnv}`,
|
||||
`${envVar} must be a valid file descriptor number`,
|
||||
{ level: 'error' },
|
||||
)
|
||||
setCached(null)
|
||||
@@ -148,13 +148,13 @@ function getCredentialFromFd({
|
||||
setCached(null)
|
||||
return null
|
||||
}
|
||||
logForDebugging(`Successfully read ${label} from file descriptor ${fd}`)
|
||||
logForDebugging(`Successfully read ${label} from file descriptor`)
|
||||
setCached(token)
|
||||
maybePersistTokenForSubprocesses(wellKnownPath, token, label)
|
||||
return token
|
||||
} catch (error) {
|
||||
logForDebugging(
|
||||
`Failed to read ${label} from file descriptor ${fd}: ${errorMessage(error)}`,
|
||||
`Failed to read ${label} from file descriptor: ${errorMessage(error)}`,
|
||||
{ level: 'error' },
|
||||
)
|
||||
// FD env var was set but read failed — typically a subprocess that
|
||||
|
||||
@@ -6,14 +6,11 @@ import {
|
||||
} from '@ant/claude-for-chrome-mcp'
|
||||
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'
|
||||
import { format } from 'util'
|
||||
import { shutdownDatadog } from '../../services/analytics/datadog.js'
|
||||
import { shutdown1PEventLogging } from '../../services/analytics/firstPartyEventLogger.js'
|
||||
import { getFeatureValue_CACHED_MAY_BE_STALE } from '../../services/analytics/growthbook.js'
|
||||
import {
|
||||
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
logEvent,
|
||||
} from '../../services/analytics/index.js'
|
||||
import { initializeAnalyticsSink } from '../../services/analytics/sink.js'
|
||||
import { getClaudeAIOAuthTokens } from '../auth.js'
|
||||
import { enableConfigs, getGlobalConfig, saveGlobalConfig } from '../config.js'
|
||||
import { logForDebugging } from '../debug.js'
|
||||
@@ -225,7 +222,7 @@ export function createChromeContext(
|
||||
} = {}
|
||||
if (metadata) {
|
||||
for (const [key, value] of Object.entries(metadata)) {
|
||||
// Rename 'status' to 'bridge_status' to avoid Datadog's reserved field
|
||||
// Keep the status field namespaced to avoid downstream collisions.
|
||||
const safeKey = key === 'status' ? 'bridge_status' : key
|
||||
if (typeof value === 'boolean' || typeof value === 'number') {
|
||||
safeMetadata[safeKey] = value
|
||||
@@ -247,22 +244,18 @@ export function createChromeContext(
|
||||
|
||||
export async function runClaudeInChromeMcpServer(): Promise<void> {
|
||||
enableConfigs()
|
||||
initializeAnalyticsSink()
|
||||
const context = createChromeContext()
|
||||
|
||||
const server = createClaudeForChromeMcpServer(context)
|
||||
const transport = new StdioServerTransport()
|
||||
|
||||
// Exit when parent process dies (stdin pipe closes).
|
||||
// Flush analytics before exiting so final-batch events (e.g. disconnect) aren't lost.
|
||||
let exiting = false
|
||||
const shutdownAndExit = async (): Promise<void> => {
|
||||
const shutdownAndExit = (): void => {
|
||||
if (exiting) {
|
||||
return
|
||||
}
|
||||
exiting = true
|
||||
await shutdown1PEventLogging()
|
||||
await shutdownDatadog()
|
||||
// eslint-disable-next-line custom-rules/no-process-exit
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
@@ -83,53 +83,22 @@ function convertToOpenAIMessage(message: Message): OpenAIMessage {
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a request to OpenAI Codex API
|
||||
* fetchCodexResponse is disabled: sending conversation content to
|
||||
* api.openai.com would leak user data to a third-party service.
|
||||
* This function is retained as a stub to avoid breaking any call sites.
|
||||
*/
|
||||
export async function fetchCodexResponse(
|
||||
messages: Message[],
|
||||
model: string,
|
||||
options: {
|
||||
_messages: Message[],
|
||||
_model: string,
|
||||
_options: {
|
||||
apiKey?: string
|
||||
baseUrl?: string
|
||||
stream?: boolean
|
||||
} = {}
|
||||
): Promise<OpenAIResponse> {
|
||||
const { apiKey, baseUrl = 'https://api.openai.com/v1', stream = false } = options
|
||||
|
||||
if (!apiKey) {
|
||||
throw new Error('OpenAI API key is required for Codex requests')
|
||||
}
|
||||
|
||||
const openAIMessages = messages.map(convertToOpenAIMessage)
|
||||
|
||||
const requestBody = {
|
||||
model,
|
||||
messages: openAIMessages,
|
||||
stream,
|
||||
temperature: 0.7,
|
||||
max_tokens: 4096,
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${baseUrl}/chat/completions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`OpenAI API error: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const data = await response.json() as OpenAIResponse
|
||||
return data
|
||||
} catch (error) {
|
||||
logError(error)
|
||||
throw error
|
||||
}
|
||||
throw new Error(
|
||||
'OpenAI Codex API calls are disabled for privacy. External data forwarding has been removed.',
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -6,9 +6,6 @@ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'
|
||||
import { ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { homedir } from 'os'
|
||||
|
||||
import { shutdownDatadog } from '../../services/analytics/datadog.js'
|
||||
import { shutdown1PEventLogging } from '../../services/analytics/firstPartyEventLogger.js'
|
||||
import { initializeAnalyticsSink } from '../../services/analytics/sink.js'
|
||||
import { enableConfigs } from '../config.js'
|
||||
import { logForDebugging } from '../debug.js'
|
||||
import { filterAppsForDescription } from './appNames.js'
|
||||
@@ -80,20 +77,18 @@ export async function createComputerUseMcpServerForCli(): Promise<
|
||||
/**
|
||||
* Subprocess entrypoint for `--computer-use-mcp`. Mirror of
|
||||
* `runClaudeInChromeMcpServer` — stdio transport, exit on stdin close,
|
||||
* flush analytics before exit.
|
||||
* and exit promptly when the parent process closes stdin.
|
||||
*/
|
||||
export async function runComputerUseMcpServer(): Promise<void> {
|
||||
enableConfigs()
|
||||
initializeAnalyticsSink()
|
||||
|
||||
const server = await createComputerUseMcpServerForCli()
|
||||
const transport = new StdioServerTransport()
|
||||
|
||||
let exiting = false
|
||||
const shutdownAndExit = async (): Promise<void> => {
|
||||
const shutdownAndExit = (): void => {
|
||||
if (exiting) return
|
||||
exiting = true
|
||||
await Promise.all([shutdown1PEventLogging(), shutdownDatadog()])
|
||||
// eslint-disable-next-line custom-rules/no-process-exit
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
@@ -17,12 +17,19 @@ import {
|
||||
filterExistingPaths,
|
||||
getKnownPathsForRepo,
|
||||
} from '../githubRepoPathMapping.js'
|
||||
import { jsonStringify } from '../slowOperations.js'
|
||||
import { readLastFetchTime } from './banner.js'
|
||||
import { parseDeepLink } from './parseDeepLink.js'
|
||||
import { MACOS_BUNDLE_ID } from './registerProtocol.js'
|
||||
import { launchInTerminal } from './terminalLauncher.js'
|
||||
|
||||
function summarizeDeepLinkAction(action: {
|
||||
query?: string
|
||||
cwd?: string
|
||||
repo?: string
|
||||
}): string {
|
||||
return `hasQuery=${Boolean(action.query)} hasCwd=${Boolean(action.cwd)} hasRepo=${Boolean(action.repo)}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle an incoming deep link URI.
|
||||
*
|
||||
@@ -34,7 +41,7 @@ import { launchInTerminal } from './terminalLauncher.js'
|
||||
* @returns exit code (0 = success)
|
||||
*/
|
||||
export async function handleDeepLinkUri(uri: string): Promise<number> {
|
||||
logForDebugging(`Handling deep link URI: ${uri}`)
|
||||
logForDebugging('Handling deep link URI')
|
||||
|
||||
let action
|
||||
try {
|
||||
@@ -46,7 +53,7 @@ export async function handleDeepLinkUri(uri: string): Promise<number> {
|
||||
return 1
|
||||
}
|
||||
|
||||
logForDebugging(`Parsed deep link action: ${jsonStringify(action)}`)
|
||||
logForDebugging(`Parsed deep link action (${summarizeDeepLinkAction(action)})`)
|
||||
|
||||
// Always the running executable — no PATH lookup. The OS launched us via
|
||||
// an absolute path (bundle symlink / .desktop Exec= / registry command)
|
||||
@@ -125,11 +132,11 @@ async function resolveCwd(action: {
|
||||
const known = getKnownPathsForRepo(action.repo)
|
||||
const existing = await filterExistingPaths(known)
|
||||
if (existing[0]) {
|
||||
logForDebugging(`Resolved repo ${action.repo} → ${existing[0]}`)
|
||||
logForDebugging('Resolved repo deep link to local clone')
|
||||
return { cwd: existing[0], resolvedRepo: action.repo }
|
||||
}
|
||||
logForDebugging(
|
||||
`No local clone found for repo ${action.repo}, falling back to home`,
|
||||
'No local clone found for repo deep link, falling back to home',
|
||||
)
|
||||
}
|
||||
return { cwd: homedir() }
|
||||
|
||||
@@ -116,7 +116,6 @@ function appendToLog(path: string, message: object): void {
|
||||
const messageWithTimestamp = {
|
||||
timestamp: new Date().toISOString(),
|
||||
...message,
|
||||
cwd: getFsImplementation().cwd(),
|
||||
userType: process.env.USER_TYPE,
|
||||
sessionId: getSessionId(),
|
||||
version: MACRO.VERSION,
|
||||
@@ -125,26 +124,13 @@ function appendToLog(path: string, message: object): void {
|
||||
getLogWriter(path).write(messageWithTimestamp)
|
||||
}
|
||||
|
||||
function extractServerMessage(data: unknown): string | undefined {
|
||||
if (typeof data === 'string') {
|
||||
return data
|
||||
}
|
||||
if (data && typeof data === 'object') {
|
||||
const obj = data as Record<string, unknown>
|
||||
if (typeof obj.message === 'string') {
|
||||
return obj.message
|
||||
}
|
||||
if (
|
||||
typeof obj.error === 'object' &&
|
||||
obj.error &&
|
||||
'message' in obj.error &&
|
||||
typeof (obj.error as Record<string, unknown>).message === 'string'
|
||||
) {
|
||||
return (obj.error as Record<string, unknown>).message as string
|
||||
}
|
||||
}
|
||||
function summarizeUrlForLogs(url: string): string | undefined {
|
||||
try {
|
||||
return new URL(url).host || undefined
|
||||
} catch {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation for logError - writes error to debug log and file.
|
||||
@@ -155,15 +141,15 @@ function logErrorImpl(error: Error): void {
|
||||
// Enrich axios errors with request URL, status, and server message for debugging
|
||||
let context = ''
|
||||
if (axios.isAxiosError(error) && error.config?.url) {
|
||||
const parts = [`url=${error.config.url}`]
|
||||
const parts: string[] = []
|
||||
const host = summarizeUrlForLogs(error.config.url)
|
||||
if (host) {
|
||||
parts.push(`host=${host}`)
|
||||
}
|
||||
if (error.response?.status !== undefined) {
|
||||
parts.push(`status=${error.response.status}`)
|
||||
}
|
||||
const serverMessage = extractServerMessage(error.response?.data)
|
||||
if (serverMessage) {
|
||||
parts.push(`body=${serverMessage}`)
|
||||
}
|
||||
context = `[${parts.join(',')}] `
|
||||
context = parts.length > 0 ? `[${parts.join(',')}] ` : ''
|
||||
}
|
||||
|
||||
logForDebugging(`${error.name}: ${context}${errorStr}`, { level: 'error' })
|
||||
@@ -188,7 +174,6 @@ function logMCPErrorImpl(serverName: string, error: unknown): void {
|
||||
error: errorStr,
|
||||
timestamp: new Date().toISOString(),
|
||||
sessionId: getSessionId(),
|
||||
cwd: getFsImplementation().cwd(),
|
||||
}
|
||||
|
||||
getLogWriter(logFile).write(errorInfo)
|
||||
@@ -206,7 +191,6 @@ function logMCPDebugImpl(serverName: string, message: string): void {
|
||||
debug: message,
|
||||
timestamp: new Date().toISOString(),
|
||||
sessionId: getSessionId(),
|
||||
cwd: getFsImplementation().cwd(),
|
||||
}
|
||||
|
||||
getLogWriter(logFile).write(debugInfo)
|
||||
@@ -218,8 +202,6 @@ function logMCPDebugImpl(serverName: string, message: string): void {
|
||||
* Call this during app startup to attach the error logging backend.
|
||||
* Any errors logged before this is called will be queued and drained.
|
||||
*
|
||||
* Should be called BEFORE initializeAnalyticsSink() in the startup sequence.
|
||||
*
|
||||
* Idempotent: safe to call multiple times (subsequent calls are no-ops).
|
||||
*/
|
||||
export function initializeErrorLogSink(): void {
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
import { createHash } from 'crypto'
|
||||
import type { AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS } from 'src/services/analytics/index.js'
|
||||
import { logEvent } from 'src/services/analytics/index.js'
|
||||
|
||||
/**
|
||||
* Creates a truncated SHA256 hash (16 chars) for file paths
|
||||
* Used for privacy-preserving analytics on file operations
|
||||
*/
|
||||
function hashFilePath(
|
||||
filePath: string,
|
||||
): AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS {
|
||||
return createHash('sha256')
|
||||
.update(filePath)
|
||||
.digest('hex')
|
||||
.slice(0, 16) as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a full SHA256 hash (64 chars) for file contents
|
||||
* Used for deduplication and change detection analytics
|
||||
*/
|
||||
function hashFileContent(
|
||||
content: string,
|
||||
): AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS {
|
||||
return createHash('sha256')
|
||||
.update(content)
|
||||
.digest('hex') as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
|
||||
}
|
||||
|
||||
// Maximum content size to hash (100KB)
|
||||
// Prevents memory exhaustion when hashing large files (e.g., base64-encoded images)
|
||||
const MAX_CONTENT_HASH_SIZE = 100 * 1024
|
||||
|
||||
/**
|
||||
* Logs file operation analytics to Statsig
|
||||
*/
|
||||
export function logFileOperation(params: {
|
||||
operation: 'read' | 'write' | 'edit'
|
||||
tool: 'FileReadTool' | 'FileWriteTool' | 'FileEditTool'
|
||||
filePath: string
|
||||
content?: string
|
||||
type?: 'create' | 'update'
|
||||
}): void {
|
||||
const metadata: Record<
|
||||
string,
|
||||
| AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
|
||||
| number
|
||||
| boolean
|
||||
> = {
|
||||
operation:
|
||||
params.operation as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
tool: params.tool as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
filePathHash: hashFilePath(params.filePath),
|
||||
}
|
||||
|
||||
// Only hash content if it's provided and below size limit
|
||||
// This prevents memory exhaustion from hashing large files (e.g., base64-encoded images)
|
||||
if (
|
||||
params.content !== undefined &&
|
||||
params.content.length <= MAX_CONTENT_HASH_SIZE
|
||||
) {
|
||||
metadata.contentHash = hashFileContent(params.content)
|
||||
}
|
||||
|
||||
if (params.type !== undefined) {
|
||||
metadata.type =
|
||||
params.type as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
|
||||
}
|
||||
|
||||
logEvent('tengu_file_operation', metadata)
|
||||
}
|
||||
@@ -29,8 +29,6 @@ import {
|
||||
supportsTabStatus,
|
||||
wrapForMultiplexer,
|
||||
} from '../ink/termio/osc.js'
|
||||
import { shutdownDatadog } from '../services/analytics/datadog.js'
|
||||
import { shutdown1PEventLogging } from '../services/analytics/firstPartyEventLogger.js'
|
||||
import {
|
||||
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
logEvent,
|
||||
@@ -41,7 +39,6 @@ import { logForDebugging } from './debug.js'
|
||||
import { logForDiagnosticsNoPII } from './diagLogs.js'
|
||||
import { isEnvTruthy } from './envUtils.js'
|
||||
import { getCurrentSessionTitle, sessionIdExists } from './sessionStorage.js'
|
||||
import { sleep } from './sleep.js'
|
||||
import { profileReport } from './startupProfiler.js'
|
||||
|
||||
/**
|
||||
@@ -301,7 +298,7 @@ export const setupGracefulShutdown = memoize(() => {
|
||||
process.on('uncaughtException', error => {
|
||||
logForDiagnosticsNoPII('error', 'uncaught_exception', {
|
||||
error_name: error.name,
|
||||
error_message: error.message.slice(0, 2000),
|
||||
has_message: error.message.length > 0,
|
||||
})
|
||||
logEvent('tengu_uncaught_exception', {
|
||||
error_name:
|
||||
@@ -321,10 +318,10 @@ export const setupGracefulShutdown = memoize(() => {
|
||||
reason instanceof Error
|
||||
? {
|
||||
error_name: reason.name,
|
||||
error_message: reason.message.slice(0, 2000),
|
||||
error_stack: reason.stack?.slice(0, 4000),
|
||||
has_message: reason.message.length > 0,
|
||||
has_stack: Boolean(reason.stack),
|
||||
}
|
||||
: { error_message: String(reason).slice(0, 2000) }
|
||||
: { reason_type: typeof reason }
|
||||
logForDiagnosticsNoPII('error', 'unhandled_rejection', errorInfo)
|
||||
logEvent('tengu_unhandled_rejection', {
|
||||
error_name:
|
||||
@@ -413,7 +410,7 @@ export async function gracefulShutdown(
|
||||
|
||||
// Failsafe: guarantee process exits even if cleanup hangs (e.g., MCP connections).
|
||||
// Runs cleanupTerminalModes first so a hung cleanup doesn't leave the terminal dirty.
|
||||
// Budget = max(5s, hook budget + 3.5s headroom for cleanup + analytics flush).
|
||||
// Budget = max(5s, hook budget + 3.5s headroom for remaining cleanup).
|
||||
failsafeTimer = setTimeout(
|
||||
code => {
|
||||
cleanupTerminalModes()
|
||||
@@ -487,7 +484,7 @@ export async function gracefulShutdown(
|
||||
}
|
||||
|
||||
// Signal to inference that this session's cache can be evicted.
|
||||
// Fires before analytics flush so the event makes it to the pipeline.
|
||||
// Emit before the final forced-exit path runs.
|
||||
const lastRequestId = getLastMainRequestId()
|
||||
if (lastRequestId) {
|
||||
logEvent('tengu_cache_eviction_hint', {
|
||||
@@ -498,18 +495,6 @@ export async function gracefulShutdown(
|
||||
})
|
||||
}
|
||||
|
||||
// Flush analytics — capped at 500ms. Previously unbounded: the 1P exporter
|
||||
// awaits all pending axios POSTs (10s each), eating the full failsafe budget.
|
||||
// Lost analytics on slow networks are acceptable; a hanging exit is not.
|
||||
try {
|
||||
await Promise.race([
|
||||
Promise.all([shutdown1PEventLogging(), shutdownDatadog()]),
|
||||
sleep(500),
|
||||
])
|
||||
} catch {
|
||||
// Ignore analytics shutdown errors
|
||||
}
|
||||
|
||||
if (options?.finalMessage) {
|
||||
try {
|
||||
// eslint-disable-next-line custom-rules/no-sync-fs -- must flush before forceExit
|
||||
|
||||
@@ -55,13 +55,7 @@ import {
|
||||
logEvent,
|
||||
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
|
||||
} from 'src/services/analytics/index.js'
|
||||
import { logOTelEvent } from './telemetry/events.js'
|
||||
import { ALLOWED_OFFICIAL_MARKETPLACE_NAMES } from './plugins/schemas.js'
|
||||
import {
|
||||
startHookSpan,
|
||||
endHookSpan,
|
||||
isBetaTracingEnabled,
|
||||
} from './telemetry/sessionTracing.js'
|
||||
import {
|
||||
hookJSONOutputSchema,
|
||||
promptRequestSchema,
|
||||
@@ -2066,31 +2060,6 @@ async function* executeHooks({
|
||||
return
|
||||
}
|
||||
|
||||
// Collect hook definitions for beta tracing telemetry
|
||||
const hookDefinitionsJson = isBetaTracingEnabled()
|
||||
? jsonStringify(getHookDefinitionsForTelemetry(matchingHooks))
|
||||
: '[]'
|
||||
|
||||
// Log hook execution start to OTEL (only for beta tracing)
|
||||
if (isBetaTracingEnabled()) {
|
||||
void logOTelEvent('hook_execution_start', {
|
||||
hook_event: hookEvent,
|
||||
hook_name: hookName,
|
||||
num_hooks: String(matchingHooks.length),
|
||||
managed_only: String(shouldAllowManagedHooksOnly()),
|
||||
hook_definitions: hookDefinitionsJson,
|
||||
hook_source: shouldAllowManagedHooksOnly() ? 'policySettings' : 'merged',
|
||||
})
|
||||
}
|
||||
|
||||
// Start hook span for beta tracing
|
||||
const hookSpan = startHookSpan(
|
||||
hookEvent,
|
||||
hookName,
|
||||
matchingHooks.length,
|
||||
hookDefinitionsJson,
|
||||
)
|
||||
|
||||
// Yield progress messages for each hook before execution
|
||||
for (const { hook } of matchingHooks) {
|
||||
yield {
|
||||
@@ -2943,32 +2912,6 @@ async function* executeHooks({
|
||||
totalDurationMs,
|
||||
})
|
||||
|
||||
// Log hook execution completion to OTEL (only for beta tracing)
|
||||
if (isBetaTracingEnabled()) {
|
||||
const hookDefinitionsComplete =
|
||||
getHookDefinitionsForTelemetry(matchingHooks)
|
||||
|
||||
void logOTelEvent('hook_execution_complete', {
|
||||
hook_event: hookEvent,
|
||||
hook_name: hookName,
|
||||
num_hooks: String(matchingHooks.length),
|
||||
num_success: String(outcomes.success),
|
||||
num_blocking: String(outcomes.blocking),
|
||||
num_non_blocking_error: String(outcomes.non_blocking_error),
|
||||
num_cancelled: String(outcomes.cancelled),
|
||||
managed_only: String(shouldAllowManagedHooksOnly()),
|
||||
hook_definitions: jsonStringify(hookDefinitionsComplete),
|
||||
hook_source: shouldAllowManagedHooksOnly() ? 'policySettings' : 'merged',
|
||||
})
|
||||
}
|
||||
|
||||
// End hook span for beta tracing
|
||||
endHookSpan(hookSpan, {
|
||||
numSuccess: outcomes.success,
|
||||
numBlocking: outcomes.blocking,
|
||||
numNonBlockingError: outcomes.non_blocking_error,
|
||||
numCancelled: outcomes.cancelled,
|
||||
})
|
||||
}
|
||||
|
||||
export type HookOutsideReplResult = {
|
||||
@@ -5001,22 +4944,3 @@ export async function executeWorktreeRemoveHook(
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
function getHookDefinitionsForTelemetry(
|
||||
matchedHooks: MatchedHook[],
|
||||
): Array<{ type: string; command?: string; prompt?: string; name?: string }> {
|
||||
return matchedHooks.map(({ hook }) => {
|
||||
if (hook.type === 'command') {
|
||||
return { type: 'command', command: hook.command }
|
||||
} else if (hook.type === 'prompt') {
|
||||
return { type: 'prompt', prompt: hook.prompt }
|
||||
} else if (hook.type === 'http') {
|
||||
return { type: 'http', command: hook.url }
|
||||
} else if (hook.type === 'function') {
|
||||
return { type: 'function', name: 'function' }
|
||||
} else if (hook.type === 'callback') {
|
||||
return { type: 'callback', name: 'callback' }
|
||||
}
|
||||
return { type: 'unknown' }
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
/**
|
||||
* Telemetry for plugin/marketplace fetches that hit the network.
|
||||
*
|
||||
* Added for inc-5046 (GitHub complained about claude-plugins-official load).
|
||||
* Before this, fetch operations only had logForDebugging — no way to measure
|
||||
* actual network volume. This surfaces what's hitting GitHub vs GCS vs
|
||||
* user-hosted so we can see the GCS migration take effect and catch future
|
||||
* hot-path regressions before GitHub emails us again.
|
||||
*
|
||||
* Volume: these fire at startup (install-counts 24h-TTL)
|
||||
* and on explicit user action (install/update). NOT per-interaction. Similar
|
||||
* envelope to tengu_binary_download_*.
|
||||
*/
|
||||
|
||||
import {
|
||||
logEvent,
|
||||
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS as SafeString,
|
||||
} from '../../services/analytics/index.js'
|
||||
import { OFFICIAL_MARKETPLACE_NAME } from './officialMarketplace.js'
|
||||
|
||||
export type PluginFetchSource =
|
||||
| 'install_counts'
|
||||
| 'marketplace_clone'
|
||||
| 'marketplace_pull'
|
||||
| 'marketplace_url'
|
||||
| 'plugin_clone'
|
||||
| 'mcpb'
|
||||
|
||||
export type PluginFetchOutcome = 'success' | 'failure' | 'cache_hit'
|
||||
|
||||
// Allowlist of public hosts we report by name. Anything else (enterprise
|
||||
// git, self-hosted, internal) is bucketed as 'other' — we don't want
|
||||
// internal hostnames (git.mycorp.internal) landing in telemetry. Bounded
|
||||
// cardinality also keeps the dashboard host-breakdown tractable.
|
||||
const KNOWN_PUBLIC_HOSTS = new Set([
|
||||
'github.com',
|
||||
'raw.githubusercontent.com',
|
||||
'objects.githubusercontent.com',
|
||||
'gist.githubusercontent.com',
|
||||
'gitlab.com',
|
||||
'bitbucket.org',
|
||||
'codeberg.org',
|
||||
'dev.azure.com',
|
||||
'ssh.dev.azure.com',
|
||||
'storage.googleapis.com', // GCS — where Dickson's migration points
|
||||
])
|
||||
|
||||
/**
|
||||
* Extract hostname from a URL or git spec and bucket to the allowlist.
|
||||
* Handles `https://host/...`, `git@host:path`, `ssh://host/...`.
|
||||
* Returns a known public host, 'other' (parseable but not allowlisted —
|
||||
* don't leak private hostnames), or 'unknown' (unparseable / local path).
|
||||
*/
|
||||
function extractHost(urlOrSpec: string): string {
|
||||
let host: string
|
||||
const scpMatch = /^[^@/]+@([^:/]+):/.exec(urlOrSpec)
|
||||
if (scpMatch) {
|
||||
host = scpMatch[1]!
|
||||
} else {
|
||||
try {
|
||||
host = new URL(urlOrSpec).hostname
|
||||
} catch {
|
||||
return 'unknown'
|
||||
}
|
||||
}
|
||||
const normalized = host.toLowerCase()
|
||||
return KNOWN_PUBLIC_HOSTS.has(normalized) ? normalized : 'other'
|
||||
}
|
||||
|
||||
/**
|
||||
* True if the URL/spec points at anthropics/claude-plugins-official — the
|
||||
* repo GitHub complained about. Lets the dashboard separate "our problem"
|
||||
* traffic from user-configured marketplaces.
|
||||
*/
|
||||
function isOfficialRepo(urlOrSpec: string): boolean {
|
||||
return urlOrSpec.includes(`anthropics/${OFFICIAL_MARKETPLACE_NAME}`)
|
||||
}
|
||||
|
||||
export function logPluginFetch(
|
||||
source: PluginFetchSource,
|
||||
urlOrSpec: string | undefined,
|
||||
outcome: PluginFetchOutcome,
|
||||
durationMs: number,
|
||||
errorKind?: string,
|
||||
): void {
|
||||
// String values are bounded enums / hostname-only — no code, no paths,
|
||||
// no raw error messages. Same privacy envelope as tengu_web_fetch_host.
|
||||
logEvent('tengu_plugin_remote_fetch', {
|
||||
source: source as SafeString,
|
||||
host: (urlOrSpec ? extractHost(urlOrSpec) : 'unknown') as SafeString,
|
||||
is_official: urlOrSpec ? isOfficialRepo(urlOrSpec) : false,
|
||||
outcome: outcome as SafeString,
|
||||
duration_ms: Math.round(durationMs),
|
||||
...(errorKind && { error_kind: errorKind as SafeString }),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Classify an error into a stable bucket for the error_kind field. Keeps
|
||||
* cardinality bounded — raw error messages would explode dashboard grouping.
|
||||
*
|
||||
* Handles both axios Error objects (Node.js error codes like ENOTFOUND) and
|
||||
* git stderr strings (human phrases like "Could not resolve host"). DNS
|
||||
* checked BEFORE timeout because gitClone's error enhancement at
|
||||
* marketplaceManager.ts:~950 rewrites DNS failures to include the word
|
||||
* "timeout" — ordering the other way would misclassify git DNS as timeout.
|
||||
*/
|
||||
export function classifyFetchError(error: unknown): string {
|
||||
const msg = String((error as { message?: unknown })?.message ?? error)
|
||||
if (
|
||||
/ENOTFOUND|ECONNREFUSED|EAI_AGAIN|Could not resolve host|Connection refused/i.test(
|
||||
msg,
|
||||
)
|
||||
) {
|
||||
return 'dns_or_refused'
|
||||
}
|
||||
if (/ETIMEDOUT|timed out|timeout/i.test(msg)) return 'timeout'
|
||||
if (
|
||||
/ECONNRESET|socket hang up|Connection reset by peer|remote end hung up/i.test(
|
||||
msg,
|
||||
)
|
||||
) {
|
||||
return 'conn_reset'
|
||||
}
|
||||
if (/403|401|authentication|permission denied/i.test(msg)) return 'auth'
|
||||
if (/404|not found|repository not found/i.test(msg)) return 'not_found'
|
||||
if (/certificate|SSL|TLS|unable to get local issuer/i.test(msg)) return 'tls'
|
||||
// Schema validation throws "Invalid response format" (install_counts) —
|
||||
// distinguish from true unknowns so the dashboard can
|
||||
// see "server sent garbage" separately.
|
||||
if (/Invalid response format|Invalid marketplace schema/i.test(msg)) {
|
||||
return 'invalid_schema'
|
||||
}
|
||||
return 'other'
|
||||
}
|
||||
@@ -17,7 +17,6 @@ import { errorMessage, getErrnoCode } from '../errors.js'
|
||||
import { getFsImplementation } from '../fsOperations.js'
|
||||
import { logError } from '../log.js'
|
||||
import { jsonParse, jsonStringify } from '../slowOperations.js'
|
||||
import { classifyFetchError, logPluginFetch } from './fetchTelemetry.js'
|
||||
import { getPluginsDirectory } from './pluginDirectories.js'
|
||||
|
||||
const INSTALL_COUNTS_CACHE_VERSION = 1
|
||||
@@ -196,21 +195,8 @@ async function fetchInstallCountsFromGitHub(): Promise<
|
||||
throw new Error('Invalid response format from install counts API')
|
||||
}
|
||||
|
||||
logPluginFetch(
|
||||
'install_counts',
|
||||
INSTALL_COUNTS_URL,
|
||||
'success',
|
||||
performance.now() - started,
|
||||
)
|
||||
return response.data.plugins
|
||||
} catch (error) {
|
||||
logPluginFetch(
|
||||
'install_counts',
|
||||
INSTALL_COUNTS_URL,
|
||||
'failure',
|
||||
performance.now() - started,
|
||||
classifyFetchError(error),
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
@@ -227,7 +213,6 @@ export async function getInstallCounts(): Promise<Map<string, number> | null> {
|
||||
const cache = await loadInstallCountsCache()
|
||||
if (cache) {
|
||||
logForDebugging('Using cached install counts')
|
||||
logPluginFetch('install_counts', INSTALL_COUNTS_URL, 'cache_hit', 0)
|
||||
const map = new Map<string, number>()
|
||||
for (const entry of cache.counts) {
|
||||
map.set(entry.plugin, entry.unique_installs)
|
||||
|
||||
@@ -53,7 +53,6 @@ import {
|
||||
getAddDirExtraMarketplaces,
|
||||
} from './addDirPluginSettings.js'
|
||||
import { markPluginVersionOrphaned } from './cacheUtils.js'
|
||||
import { classifyFetchError, logPluginFetch } from './fetchTelemetry.js'
|
||||
import { removeAllPluginsForMarketplace } from './installedPluginsManager.js'
|
||||
import {
|
||||
extractHostFromSource,
|
||||
@@ -1110,13 +1109,7 @@ async function cacheMarketplaceFromGit(
|
||||
disableCredentialHelper: options?.disableCredentialHelper,
|
||||
sparsePaths,
|
||||
})
|
||||
logPluginFetch(
|
||||
'marketplace_pull',
|
||||
gitUrl,
|
||||
pullResult.code === 0 ? 'success' : 'failure',
|
||||
performance.now() - pullStarted,
|
||||
pullResult.code === 0 ? undefined : classifyFetchError(pullResult.stderr),
|
||||
)
|
||||
void pullStarted
|
||||
if (pullResult.code === 0) return
|
||||
logForDebugging(`git pull failed, will re-clone: ${pullResult.stderr}`, {
|
||||
level: 'warn',
|
||||
@@ -1156,13 +1149,7 @@ async function cacheMarketplaceFromGit(
|
||||
)
|
||||
const cloneStarted = performance.now()
|
||||
const result = await gitClone(gitUrl, cachePath, ref, sparsePaths)
|
||||
logPluginFetch(
|
||||
'marketplace_clone',
|
||||
gitUrl,
|
||||
result.code === 0 ? 'success' : 'failure',
|
||||
performance.now() - cloneStarted,
|
||||
result.code === 0 ? undefined : classifyFetchError(result.stderr),
|
||||
)
|
||||
void cloneStarted
|
||||
if (result.code !== 0) {
|
||||
// Clean up any partial directory created by the failed clone so the next
|
||||
// attempt starts fresh. Best-effort: if this fails, the stale dir will be
|
||||
@@ -1284,13 +1271,6 @@ async function cacheMarketplaceFromUrl(
|
||||
headers,
|
||||
})
|
||||
} catch (error) {
|
||||
logPluginFetch(
|
||||
'marketplace_url',
|
||||
url,
|
||||
'failure',
|
||||
performance.now() - fetchStarted,
|
||||
classifyFetchError(error),
|
||||
)
|
||||
if (axios.isAxiosError(error)) {
|
||||
if (error.code === 'ECONNREFUSED' || error.code === 'ENOTFOUND') {
|
||||
throw new Error(
|
||||
@@ -1317,25 +1297,13 @@ async function cacheMarketplaceFromUrl(
|
||||
// Validate the response is a valid marketplace
|
||||
const result = PluginMarketplaceSchema().safeParse(response.data)
|
||||
if (!result.success) {
|
||||
logPluginFetch(
|
||||
'marketplace_url',
|
||||
url,
|
||||
'failure',
|
||||
performance.now() - fetchStarted,
|
||||
'invalid_schema',
|
||||
)
|
||||
throw new ConfigParseError(
|
||||
`Invalid marketplace schema from URL: ${result.error.issues.map(e => `${e.path.join('.')}: ${e.message}`).join(', ')}`,
|
||||
redactedUrl,
|
||||
response.data,
|
||||
)
|
||||
}
|
||||
logPluginFetch(
|
||||
'marketplace_url',
|
||||
url,
|
||||
'success',
|
||||
performance.now() - fetchStarted,
|
||||
)
|
||||
void fetchStarted
|
||||
|
||||
safeCallProgress(onProgress, 'Saving marketplace to cache')
|
||||
// Ensure cache directory exists
|
||||
|
||||
@@ -20,7 +20,6 @@ import {
|
||||
} from '../settings/settings.js'
|
||||
import { jsonParse, jsonStringify } from '../slowOperations.js'
|
||||
import { getSystemDirectories } from '../systemDirectories.js'
|
||||
import { classifyFetchError, logPluginFetch } from './fetchTelemetry.js'
|
||||
/**
|
||||
* User configuration values for MCPB
|
||||
*/
|
||||
@@ -490,7 +489,6 @@ async function downloadMcpb(
|
||||
}
|
||||
|
||||
const started = performance.now()
|
||||
let fetchTelemetryFired = false
|
||||
try {
|
||||
const response = await axios.get(url, {
|
||||
timeout: 120000, // 2 minute timeout
|
||||
@@ -507,11 +505,6 @@ async function downloadMcpb(
|
||||
})
|
||||
|
||||
const data = new Uint8Array(response.data)
|
||||
// Fire telemetry before writeFile — the event measures the network
|
||||
// fetch, not disk I/O. A writeFile EACCES would otherwise match
|
||||
// classifyFetchError's /permission denied/ → misreport as auth.
|
||||
logPluginFetch('mcpb', url, 'success', performance.now() - started)
|
||||
fetchTelemetryFired = true
|
||||
|
||||
// Save to disk (binary data)
|
||||
await writeFile(destPath, Buffer.from(data))
|
||||
@@ -523,15 +516,7 @@ async function downloadMcpb(
|
||||
|
||||
return data
|
||||
} catch (error) {
|
||||
if (!fetchTelemetryFired) {
|
||||
logPluginFetch(
|
||||
'mcpb',
|
||||
url,
|
||||
'failure',
|
||||
performance.now() - started,
|
||||
classifyFetchError(error),
|
||||
)
|
||||
}
|
||||
void started
|
||||
const errorMsg = errorMessage(error)
|
||||
const fullError = new Error(
|
||||
`Failed to download MCPB file from ${url}: ${errorMsg}`,
|
||||
|
||||
@@ -85,7 +85,6 @@ import { SettingsSchema } from '../settings/types.js'
|
||||
import { jsonParse, jsonStringify } from '../slowOperations.js'
|
||||
import { getAddDirEnabledPlugins } from './addDirPluginSettings.js'
|
||||
import { verifyAndDemote } from './dependencyResolver.js'
|
||||
import { classifyFetchError, logPluginFetch } from './fetchTelemetry.js'
|
||||
import { checkGitAvailable } from './gitAvailability.js'
|
||||
import { getInMemoryInstalledPlugins } from './installedPluginsManager.js'
|
||||
import { getManagedPluginNames } from './managedPlugins.js'
|
||||
@@ -563,13 +562,6 @@ export async function gitClone(
|
||||
const cloneResult = await execFileNoThrow(gitExe(), args)
|
||||
|
||||
if (cloneResult.code !== 0) {
|
||||
logPluginFetch(
|
||||
'plugin_clone',
|
||||
gitUrl,
|
||||
'failure',
|
||||
performance.now() - cloneStarted,
|
||||
classifyFetchError(cloneResult.stderr),
|
||||
)
|
||||
throw new Error(`Failed to clone repository: ${cloneResult.stderr}`)
|
||||
}
|
||||
|
||||
@@ -595,13 +587,6 @@ export async function gitClone(
|
||||
)
|
||||
|
||||
if (unshallowResult.code !== 0) {
|
||||
logPluginFetch(
|
||||
'plugin_clone',
|
||||
gitUrl,
|
||||
'failure',
|
||||
performance.now() - cloneStarted,
|
||||
classifyFetchError(unshallowResult.stderr),
|
||||
)
|
||||
throw new Error(
|
||||
`Failed to fetch commit ${sha}: ${unshallowResult.stderr}`,
|
||||
)
|
||||
@@ -616,27 +601,12 @@ export async function gitClone(
|
||||
)
|
||||
|
||||
if (checkoutResult.code !== 0) {
|
||||
logPluginFetch(
|
||||
'plugin_clone',
|
||||
gitUrl,
|
||||
'failure',
|
||||
performance.now() - cloneStarted,
|
||||
classifyFetchError(checkoutResult.stderr),
|
||||
)
|
||||
throw new Error(
|
||||
`Failed to checkout commit ${sha}: ${checkoutResult.stderr}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Fire success only after ALL network ops (clone + optional SHA fetch)
|
||||
// complete — same telemetry-scope discipline as mcpb and marketplace_url.
|
||||
logPluginFetch(
|
||||
'plugin_clone',
|
||||
gitUrl,
|
||||
'success',
|
||||
performance.now() - cloneStarted,
|
||||
)
|
||||
void cloneStarted
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -40,7 +40,6 @@ import { isRestrictedToPluginOnly, isSourceAdminTrusted } from '../settings/plug
|
||||
import { parseSlashCommand } from '../slashCommandParsing.js';
|
||||
import { sleep } from '../sleep.js';
|
||||
import { recordSkillUsage } from '../suggestions/skillUsageTracking.js';
|
||||
import { logOTelEvent, redactIfDisabled } from '../telemetry/events.js';
|
||||
import { buildPluginCommandTelemetryFields } from '../telemetry/pluginTelemetry.js';
|
||||
import { getAssistantMessageContentLength } from '../tokens.js';
|
||||
import { createAgentId } from '../uuid.js';
|
||||
@@ -362,12 +361,6 @@ export async function processSlashCommand(inputString: string, precedingInputBlo
|
||||
const promptId = randomUUID();
|
||||
setPromptId(promptId);
|
||||
logEvent('tengu_input_prompt', {});
|
||||
// Log user prompt event for OTLP
|
||||
void logOTelEvent('user_prompt', {
|
||||
prompt_length: String(inputString.length),
|
||||
prompt: redactIfDisabled(inputString),
|
||||
'prompt.id': promptId
|
||||
});
|
||||
return {
|
||||
messages: [createUserMessage({
|
||||
content: prepareUserContent({
|
||||
|
||||
@@ -9,8 +9,6 @@ import type {
|
||||
import { logEvent } from '../../services/analytics/index.js'
|
||||
import type { PermissionMode } from '../../types/permissions.js'
|
||||
import { createUserMessage } from '../messages.js'
|
||||
import { logOTelEvent, redactIfDisabled } from '../telemetry/events.js'
|
||||
import { startInteractionSpan } from '../telemetry/sessionTracing.js'
|
||||
import {
|
||||
matchesKeepGoingKeyword,
|
||||
matchesNegativeKeyword,
|
||||
@@ -35,26 +33,6 @@ export function processTextPrompt(
|
||||
typeof input === 'string'
|
||||
? input
|
||||
: input.find(block => block.type === 'text')?.text || ''
|
||||
startInteractionSpan(userPromptText)
|
||||
|
||||
// Emit user_prompt OTEL event for both string (CLI) and array (SDK/VS Code)
|
||||
// input shapes. Previously gated on `typeof input === 'string'`, so VS Code
|
||||
// sessions never emitted user_prompt (anthropics/claude-code#33301).
|
||||
// For array input, use the LAST text block: createUserContent pushes the
|
||||
// user's message last (after any <ide_selection>/attachment context blocks),
|
||||
// so .findLast gets the actual prompt. userPromptText (first block) is kept
|
||||
// unchanged for startInteractionSpan to preserve existing span attributes.
|
||||
const otelPromptText =
|
||||
typeof input === 'string'
|
||||
? input
|
||||
: input.findLast(block => block.type === 'text')?.text || ''
|
||||
if (otelPromptText) {
|
||||
void logOTelEvent('user_prompt', {
|
||||
prompt_length: String(otelPromptText.length),
|
||||
prompt: redactIfDisabled(otelPromptText),
|
||||
'prompt.id': promptId,
|
||||
})
|
||||
}
|
||||
|
||||
const isNegative = matchesNegativeKeyword(userPromptText)
|
||||
const isKeepGoing = matchesKeepGoingKeyword(userPromptText)
|
||||
|
||||
@@ -78,13 +78,13 @@ export async function getSessionEnvironmentScript(): Promise<string | null> {
|
||||
if (envScript) {
|
||||
scripts.push(envScript)
|
||||
logForDebugging(
|
||||
`Session environment loaded from CLAUDE_ENV_FILE: ${envFile} (${envScript.length} chars)`,
|
||||
`Session environment loaded from CLAUDE_ENV_FILE (${envScript.length} chars)`,
|
||||
)
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
const code = getErrnoCode(e)
|
||||
if (code !== 'ENOENT') {
|
||||
logForDebugging(`Failed to read CLAUDE_ENV_FILE: ${errorMessage(e)}`)
|
||||
logForDebugging('Failed to read CLAUDE_ENV_FILE')
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -109,9 +109,7 @@ export async function getSessionEnvironmentScript(): Promise<string | null> {
|
||||
} catch (e: unknown) {
|
||||
const code = getErrnoCode(e)
|
||||
if (code !== 'ENOENT') {
|
||||
logForDebugging(
|
||||
`Failed to read hook file ${filePath}: ${errorMessage(e)}`,
|
||||
)
|
||||
logForDebugging(`Failed to read hook env file ${file}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ function getTokenFromFileDescriptor(): string | null {
|
||||
const fd = parseInt(fdEnv, 10)
|
||||
if (Number.isNaN(fd)) {
|
||||
logForDebugging(
|
||||
`CLAUDE_CODE_WEBSOCKET_AUTH_FILE_DESCRIPTOR must be a valid file descriptor number, got: ${fdEnv}`,
|
||||
'CLAUDE_CODE_WEBSOCKET_AUTH_FILE_DESCRIPTOR must be a valid file descriptor number',
|
||||
{ level: 'error' },
|
||||
)
|
||||
setSessionIngressToken(null)
|
||||
@@ -61,7 +61,7 @@ function getTokenFromFileDescriptor(): string | null {
|
||||
setSessionIngressToken(null)
|
||||
return null
|
||||
}
|
||||
logForDebugging(`Successfully read token from file descriptor ${fd}`)
|
||||
logForDebugging('Successfully read token from file descriptor')
|
||||
setSessionIngressToken(token)
|
||||
maybePersistTokenForSubprocesses(
|
||||
CCR_SESSION_INGRESS_TOKEN_PATH,
|
||||
@@ -71,7 +71,7 @@ function getTokenFromFileDescriptor(): string | null {
|
||||
return token
|
||||
} catch (error) {
|
||||
logForDebugging(
|
||||
`Failed to read token from file descriptor ${fd}: ${errorMessage(error)}`,
|
||||
`Failed to read token from file descriptor: ${errorMessage(error)}`,
|
||||
{ level: 'error' },
|
||||
)
|
||||
// FD env var was set but read failed — typically a subprocess that
|
||||
|
||||
@@ -1344,7 +1344,11 @@ class Project {
|
||||
|
||||
setRemoteIngressUrl(url: string): void {
|
||||
this.remoteIngressUrl = url
|
||||
logForDebugging(`Remote persistence enabled with URL: ${url}`)
|
||||
logForDebugging(
|
||||
url
|
||||
? 'Remote persistence enabled (remote ingress configured)'
|
||||
: 'Remote persistence disabled',
|
||||
)
|
||||
if (url) {
|
||||
// If using CCR, don't delay messages by any more than 10ms.
|
||||
this.FLUSH_INTERVAL_MS = REMOTE_FLUSH_INTERVAL_MS
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
import { initializeAnalyticsSink } from '../services/analytics/sink.js'
|
||||
import { initializeErrorLogSink } from './errorLogSink.js'
|
||||
|
||||
/**
|
||||
* Attach error log and analytics compatibility sinks. Both inits are
|
||||
* idempotent. Called from setup() for the default command; other entrypoints
|
||||
* (subcommands, daemon, bridge) call this directly since they bypass setup().
|
||||
* Attach startup sinks used by all entrypoints. The error-log init is
|
||||
* idempotent, so callers that bypass setup() can safely invoke this too.
|
||||
*
|
||||
* Leaf module — kept out of setup.ts to avoid the setup → commands → bridge
|
||||
* → setup import cycle.
|
||||
*/
|
||||
export function initSinks(): void {
|
||||
initializeErrorLogSink()
|
||||
initializeAnalyticsSink()
|
||||
}
|
||||
|
||||
@@ -96,7 +96,6 @@ import {
|
||||
readMailbox,
|
||||
writeToMailbox,
|
||||
} from '../teammateMailbox.js'
|
||||
import { unregisterAgent as unregisterPerfettoAgent } from '../telemetry/perfettoTracing.js'
|
||||
import { createContentReplacementState } from '../toolResultStorage.js'
|
||||
import { TEAM_LEAD_NAME } from './constants.js'
|
||||
import {
|
||||
@@ -1460,7 +1459,6 @@ export async function runInProcessTeammate(
|
||||
})
|
||||
}
|
||||
|
||||
unregisterPerfettoAgent(identity.agentId)
|
||||
return { success: true, messages: allMessages }
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
@@ -1524,7 +1522,6 @@ export async function runInProcessTeammate(
|
||||
},
|
||||
)
|
||||
|
||||
unregisterPerfettoAgent(identity.agentId)
|
||||
return {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
|
||||
@@ -18,16 +18,10 @@
|
||||
* 6. Worker polls mailbox for responses and continues execution
|
||||
*/
|
||||
|
||||
import { mkdir, readdir, readFile, unlink, writeFile } from 'fs/promises'
|
||||
import { join } from 'path'
|
||||
import { z } from 'zod/v4'
|
||||
import { logForDebugging } from '../debug.js'
|
||||
import { getErrnoCode } from '../errors.js'
|
||||
import { lazySchema } from '../lazySchema.js'
|
||||
import * as lockfile from '../lockfile.js'
|
||||
import { logError } from '../log.js'
|
||||
import type { PermissionUpdate } from '../permissions/PermissionUpdateSchema.js'
|
||||
import { jsonParse, jsonStringify } from '../slowOperations.js'
|
||||
import { jsonStringify } from '../slowOperations.js'
|
||||
import {
|
||||
getAgentId,
|
||||
getAgentName,
|
||||
@@ -41,53 +35,44 @@ import {
|
||||
createSandboxPermissionResponseMessage,
|
||||
writeToMailbox,
|
||||
} from '../teammateMailbox.js'
|
||||
import { getTeamDir, readTeamFileAsync } from './teamHelpers.js'
|
||||
import { readTeamFileAsync } from './teamHelpers.js'
|
||||
|
||||
/**
|
||||
* Full request schema for a permission request from a worker to the leader
|
||||
*/
|
||||
export const SwarmPermissionRequestSchema = lazySchema(() =>
|
||||
z.object({
|
||||
export type SwarmPermissionRequest = {
|
||||
/** Unique identifier for this request */
|
||||
id: z.string(),
|
||||
id: string
|
||||
/** Worker's CLAUDE_CODE_AGENT_ID */
|
||||
workerId: z.string(),
|
||||
workerId: string
|
||||
/** Worker's CLAUDE_CODE_AGENT_NAME */
|
||||
workerName: z.string(),
|
||||
workerName: string
|
||||
/** Worker's CLAUDE_CODE_AGENT_COLOR */
|
||||
workerColor: z.string().optional(),
|
||||
workerColor?: string
|
||||
/** Team name for routing */
|
||||
teamName: z.string(),
|
||||
teamName: string
|
||||
/** Tool name requiring permission (e.g., "Bash", "Edit") */
|
||||
toolName: z.string(),
|
||||
toolName: string
|
||||
/** Original toolUseID from worker's context */
|
||||
toolUseId: z.string(),
|
||||
toolUseId: string
|
||||
/** Human-readable description of the tool use */
|
||||
description: z.string(),
|
||||
description: string
|
||||
/** Serialized tool input */
|
||||
input: z.record(z.string(), z.unknown()),
|
||||
input: Record<string, unknown>
|
||||
/** Suggested permission rules from the permission result */
|
||||
permissionSuggestions: z.array(z.unknown()),
|
||||
permissionSuggestions: unknown[]
|
||||
/** Status of the request */
|
||||
status: z.enum(['pending', 'approved', 'rejected']),
|
||||
status: 'pending' | 'approved' | 'rejected'
|
||||
/** Who resolved the request */
|
||||
resolvedBy: z.enum(['worker', 'leader']).optional(),
|
||||
resolvedBy?: 'worker' | 'leader'
|
||||
/** Timestamp when resolved */
|
||||
resolvedAt: z.number().optional(),
|
||||
resolvedAt?: number
|
||||
/** Rejection feedback message */
|
||||
feedback: z.string().optional(),
|
||||
feedback?: string
|
||||
/** Modified input if changed by resolver */
|
||||
updatedInput: z.record(z.string(), z.unknown()).optional(),
|
||||
updatedInput?: Record<string, unknown>
|
||||
/** "Always allow" rules applied during resolution */
|
||||
permissionUpdates: z.array(z.unknown()).optional(),
|
||||
permissionUpdates?: unknown[]
|
||||
/** Timestamp when request was created */
|
||||
createdAt: z.number(),
|
||||
}),
|
||||
)
|
||||
|
||||
export type SwarmPermissionRequest = z.infer<
|
||||
ReturnType<typeof SwarmPermissionRequestSchema>
|
||||
>
|
||||
createdAt: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolution data returned when leader/worker resolves a request
|
||||
@@ -105,55 +90,6 @@ export type PermissionResolution = {
|
||||
permissionUpdates?: PermissionUpdate[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the base directory for a team's permission requests
|
||||
* Path: ~/.claude/teams/{teamName}/permissions/
|
||||
*/
|
||||
export function getPermissionDir(teamName: string): string {
|
||||
return join(getTeamDir(teamName), 'permissions')
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the pending directory for a team
|
||||
*/
|
||||
function getPendingDir(teamName: string): string {
|
||||
return join(getPermissionDir(teamName), 'pending')
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the resolved directory for a team
|
||||
*/
|
||||
function getResolvedDir(teamName: string): string {
|
||||
return join(getPermissionDir(teamName), 'resolved')
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the permissions directory structure exists (async)
|
||||
*/
|
||||
async function ensurePermissionDirsAsync(teamName: string): Promise<void> {
|
||||
const permDir = getPermissionDir(teamName)
|
||||
const pendingDir = getPendingDir(teamName)
|
||||
const resolvedDir = getResolvedDir(teamName)
|
||||
|
||||
for (const dir of [permDir, pendingDir, resolvedDir]) {
|
||||
await mkdir(dir, { recursive: true })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the path to a pending request file
|
||||
*/
|
||||
function getPendingRequestPath(teamName: string, requestId: string): string {
|
||||
return join(getPendingDir(teamName), `${requestId}.json`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the path to a resolved request file
|
||||
*/
|
||||
function getResolvedRequestPath(teamName: string, requestId: string): string {
|
||||
return join(getResolvedDir(teamName), `${requestId}.json`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unique request ID
|
||||
*/
|
||||
@@ -206,375 +142,6 @@ export function createPermissionRequest(params: {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a permission request to the pending directory with file locking
|
||||
* Called by worker agents when they need permission approval from the leader
|
||||
*
|
||||
* @returns The written request
|
||||
*/
|
||||
export async function writePermissionRequest(
|
||||
request: SwarmPermissionRequest,
|
||||
): Promise<SwarmPermissionRequest> {
|
||||
await ensurePermissionDirsAsync(request.teamName)
|
||||
|
||||
const pendingPath = getPendingRequestPath(request.teamName, request.id)
|
||||
const lockDir = getPendingDir(request.teamName)
|
||||
|
||||
// Create a directory-level lock file for atomic writes
|
||||
const lockFilePath = join(lockDir, '.lock')
|
||||
await writeFile(lockFilePath, '', 'utf-8')
|
||||
|
||||
let release: (() => Promise<void>) | undefined
|
||||
try {
|
||||
release = await lockfile.lock(lockFilePath)
|
||||
|
||||
// Write the request file
|
||||
await writeFile(pendingPath, jsonStringify(request, null, 2), 'utf-8')
|
||||
|
||||
logForDebugging(
|
||||
`[PermissionSync] Wrote pending request ${request.id} from ${request.workerName} for ${request.toolName}`,
|
||||
)
|
||||
|
||||
return request
|
||||
} catch (error) {
|
||||
logForDebugging(
|
||||
`[PermissionSync] Failed to write permission request: ${error}`,
|
||||
)
|
||||
logError(error)
|
||||
throw error
|
||||
} finally {
|
||||
if (release) {
|
||||
await release()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read all pending permission requests for a team
|
||||
* Called by the team leader to see what requests need attention
|
||||
*/
|
||||
export async function readPendingPermissions(
|
||||
teamName?: string,
|
||||
): Promise<SwarmPermissionRequest[]> {
|
||||
const team = teamName || getTeamName()
|
||||
if (!team) {
|
||||
logForDebugging('[PermissionSync] No team name available')
|
||||
return []
|
||||
}
|
||||
|
||||
const pendingDir = getPendingDir(team)
|
||||
|
||||
let files: string[]
|
||||
try {
|
||||
files = await readdir(pendingDir)
|
||||
} catch (e: unknown) {
|
||||
const code = getErrnoCode(e)
|
||||
if (code === 'ENOENT') {
|
||||
return []
|
||||
}
|
||||
logForDebugging(`[PermissionSync] Failed to read pending requests: ${e}`)
|
||||
logError(e)
|
||||
return []
|
||||
}
|
||||
|
||||
const jsonFiles = files.filter(f => f.endsWith('.json') && f !== '.lock')
|
||||
|
||||
const results = await Promise.all(
|
||||
jsonFiles.map(async file => {
|
||||
const filePath = join(pendingDir, file)
|
||||
try {
|
||||
const content = await readFile(filePath, 'utf-8')
|
||||
const parsed = SwarmPermissionRequestSchema().safeParse(
|
||||
jsonParse(content),
|
||||
)
|
||||
if (parsed.success) {
|
||||
return parsed.data
|
||||
}
|
||||
logForDebugging(
|
||||
`[PermissionSync] Invalid request file ${file}: ${parsed.error.message}`,
|
||||
)
|
||||
return null
|
||||
} catch (err) {
|
||||
logForDebugging(
|
||||
`[PermissionSync] Failed to read request file ${file}: ${err}`,
|
||||
)
|
||||
return null
|
||||
}
|
||||
}),
|
||||
)
|
||||
|
||||
const requests = results.filter(r => r !== null)
|
||||
|
||||
// Sort by creation time (oldest first)
|
||||
requests.sort((a, b) => a.createdAt - b.createdAt)
|
||||
|
||||
return requests
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a resolved permission request by ID
|
||||
* Called by workers to check if their request has been resolved
|
||||
*
|
||||
* @returns The resolved request, or null if not yet resolved
|
||||
*/
|
||||
export async function readResolvedPermission(
|
||||
requestId: string,
|
||||
teamName?: string,
|
||||
): Promise<SwarmPermissionRequest | null> {
|
||||
const team = teamName || getTeamName()
|
||||
if (!team) {
|
||||
return null
|
||||
}
|
||||
|
||||
const resolvedPath = getResolvedRequestPath(team, requestId)
|
||||
|
||||
try {
|
||||
const content = await readFile(resolvedPath, 'utf-8')
|
||||
const parsed = SwarmPermissionRequestSchema().safeParse(jsonParse(content))
|
||||
if (parsed.success) {
|
||||
return parsed.data
|
||||
}
|
||||
logForDebugging(
|
||||
`[PermissionSync] Invalid resolved request ${requestId}: ${parsed.error.message}`,
|
||||
)
|
||||
return null
|
||||
} catch (e: unknown) {
|
||||
const code = getErrnoCode(e)
|
||||
if (code === 'ENOENT') {
|
||||
return null
|
||||
}
|
||||
logForDebugging(
|
||||
`[PermissionSync] Failed to read resolved request ${requestId}: ${e}`,
|
||||
)
|
||||
logError(e)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a permission request
|
||||
* Called by the team leader (or worker in self-resolution cases)
|
||||
*
|
||||
* Writes the resolution to resolved/, removes from pending/
|
||||
*/
|
||||
export async function resolvePermission(
|
||||
requestId: string,
|
||||
resolution: PermissionResolution,
|
||||
teamName?: string,
|
||||
): Promise<boolean> {
|
||||
const team = teamName || getTeamName()
|
||||
if (!team) {
|
||||
logForDebugging('[PermissionSync] No team name available')
|
||||
return false
|
||||
}
|
||||
|
||||
await ensurePermissionDirsAsync(team)
|
||||
|
||||
const pendingPath = getPendingRequestPath(team, requestId)
|
||||
const resolvedPath = getResolvedRequestPath(team, requestId)
|
||||
const lockFilePath = join(getPendingDir(team), '.lock')
|
||||
|
||||
await writeFile(lockFilePath, '', 'utf-8')
|
||||
|
||||
let release: (() => Promise<void>) | undefined
|
||||
try {
|
||||
release = await lockfile.lock(lockFilePath)
|
||||
|
||||
// Read the pending request
|
||||
let content: string
|
||||
try {
|
||||
content = await readFile(pendingPath, 'utf-8')
|
||||
} catch (e: unknown) {
|
||||
const code = getErrnoCode(e)
|
||||
if (code === 'ENOENT') {
|
||||
logForDebugging(
|
||||
`[PermissionSync] Pending request not found: ${requestId}`,
|
||||
)
|
||||
return false
|
||||
}
|
||||
throw e
|
||||
}
|
||||
|
||||
const parsed = SwarmPermissionRequestSchema().safeParse(jsonParse(content))
|
||||
if (!parsed.success) {
|
||||
logForDebugging(
|
||||
`[PermissionSync] Invalid pending request ${requestId}: ${parsed.error.message}`,
|
||||
)
|
||||
return false
|
||||
}
|
||||
|
||||
const request = parsed.data
|
||||
|
||||
// Update the request with resolution data
|
||||
const resolvedRequest: SwarmPermissionRequest = {
|
||||
...request,
|
||||
status: resolution.decision === 'approved' ? 'approved' : 'rejected',
|
||||
resolvedBy: resolution.resolvedBy,
|
||||
resolvedAt: Date.now(),
|
||||
feedback: resolution.feedback,
|
||||
updatedInput: resolution.updatedInput,
|
||||
permissionUpdates: resolution.permissionUpdates,
|
||||
}
|
||||
|
||||
// Write to resolved directory
|
||||
await writeFile(
|
||||
resolvedPath,
|
||||
jsonStringify(resolvedRequest, null, 2),
|
||||
'utf-8',
|
||||
)
|
||||
|
||||
// Remove from pending directory
|
||||
await unlink(pendingPath)
|
||||
|
||||
logForDebugging(
|
||||
`[PermissionSync] Resolved request ${requestId} with ${resolution.decision}`,
|
||||
)
|
||||
|
||||
return true
|
||||
} catch (error) {
|
||||
logForDebugging(`[PermissionSync] Failed to resolve request: ${error}`)
|
||||
logError(error)
|
||||
return false
|
||||
} finally {
|
||||
if (release) {
|
||||
await release()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up old resolved permission files
|
||||
* Called periodically to prevent file accumulation
|
||||
*
|
||||
* @param teamName - Team name
|
||||
* @param maxAgeMs - Maximum age in milliseconds (default: 1 hour)
|
||||
*/
|
||||
export async function cleanupOldResolutions(
|
||||
teamName?: string,
|
||||
maxAgeMs = 3600000,
|
||||
): Promise<number> {
|
||||
const team = teamName || getTeamName()
|
||||
if (!team) {
|
||||
return 0
|
||||
}
|
||||
|
||||
const resolvedDir = getResolvedDir(team)
|
||||
|
||||
let files: string[]
|
||||
try {
|
||||
files = await readdir(resolvedDir)
|
||||
} catch (e: unknown) {
|
||||
const code = getErrnoCode(e)
|
||||
if (code === 'ENOENT') {
|
||||
return 0
|
||||
}
|
||||
logForDebugging(`[PermissionSync] Failed to cleanup resolutions: ${e}`)
|
||||
logError(e)
|
||||
return 0
|
||||
}
|
||||
|
||||
const now = Date.now()
|
||||
const jsonFiles = files.filter(f => f.endsWith('.json'))
|
||||
|
||||
const cleanupResults = await Promise.all(
|
||||
jsonFiles.map(async file => {
|
||||
const filePath = join(resolvedDir, file)
|
||||
try {
|
||||
const content = await readFile(filePath, 'utf-8')
|
||||
const request = jsonParse(content) as SwarmPermissionRequest
|
||||
|
||||
// Check if the resolution is old enough to clean up
|
||||
// Use >= to handle edge case where maxAgeMs is 0 (clean up everything)
|
||||
const resolvedAt = request.resolvedAt || request.createdAt
|
||||
if (now - resolvedAt >= maxAgeMs) {
|
||||
await unlink(filePath)
|
||||
logForDebugging(`[PermissionSync] Cleaned up old resolution: ${file}`)
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
} catch {
|
||||
// If we can't parse it, clean it up anyway
|
||||
try {
|
||||
await unlink(filePath)
|
||||
return 1
|
||||
} catch {
|
||||
// Ignore deletion errors
|
||||
return 0
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
|
||||
const cleanedCount = cleanupResults.reduce<number>((sum, n) => sum + n, 0)
|
||||
|
||||
if (cleanedCount > 0) {
|
||||
logForDebugging(
|
||||
`[PermissionSync] Cleaned up ${cleanedCount} old resolutions`,
|
||||
)
|
||||
}
|
||||
|
||||
return cleanedCount
|
||||
}
|
||||
|
||||
/**
|
||||
* Legacy response type for worker polling
|
||||
* Used for backward compatibility with worker integration code
|
||||
*/
|
||||
export type PermissionResponse = {
|
||||
/** ID of the request this responds to */
|
||||
requestId: string
|
||||
/** Decision: approved or denied */
|
||||
decision: 'approved' | 'denied'
|
||||
/** Timestamp when response was created */
|
||||
timestamp: string
|
||||
/** Optional feedback message if denied */
|
||||
feedback?: string
|
||||
/** Optional updated input if the resolver modified it */
|
||||
updatedInput?: Record<string, unknown>
|
||||
/** Permission updates to apply (e.g., "always allow" rules) */
|
||||
permissionUpdates?: unknown[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll for a permission response (worker-side convenience function)
|
||||
* Converts the resolved request into a simpler response format
|
||||
*
|
||||
* @returns The permission response, or null if not yet resolved
|
||||
*/
|
||||
export async function pollForResponse(
|
||||
requestId: string,
|
||||
_agentName?: string,
|
||||
teamName?: string,
|
||||
): Promise<PermissionResponse | null> {
|
||||
const resolved = await readResolvedPermission(requestId, teamName)
|
||||
if (!resolved) {
|
||||
return null
|
||||
}
|
||||
|
||||
return {
|
||||
requestId: resolved.id,
|
||||
decision: resolved.status === 'approved' ? 'approved' : 'denied',
|
||||
timestamp: resolved.resolvedAt
|
||||
? new Date(resolved.resolvedAt).toISOString()
|
||||
: new Date(resolved.createdAt).toISOString(),
|
||||
feedback: resolved.feedback,
|
||||
updatedInput: resolved.updatedInput,
|
||||
permissionUpdates: resolved.permissionUpdates,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a worker's response after processing
|
||||
* This is an alias for deleteResolvedPermission for backward compatibility
|
||||
*/
|
||||
export async function removeWorkerResponse(
|
||||
requestId: string,
|
||||
_agentName?: string,
|
||||
teamName?: string,
|
||||
): Promise<void> {
|
||||
await deleteResolvedPermission(requestId, teamName)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the current agent is a team leader
|
||||
*/
|
||||
@@ -600,46 +167,6 @@ export function isSwarmWorker(): boolean {
|
||||
return !!teamName && !!agentId && !isTeamLeader()
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a resolved permission file
|
||||
* Called after a worker has processed the resolution
|
||||
*/
|
||||
export async function deleteResolvedPermission(
|
||||
requestId: string,
|
||||
teamName?: string,
|
||||
): Promise<boolean> {
|
||||
const team = teamName || getTeamName()
|
||||
if (!team) {
|
||||
return false
|
||||
}
|
||||
|
||||
const resolvedPath = getResolvedRequestPath(team, requestId)
|
||||
|
||||
try {
|
||||
await unlink(resolvedPath)
|
||||
logForDebugging(
|
||||
`[PermissionSync] Deleted resolved permission: ${requestId}`,
|
||||
)
|
||||
return true
|
||||
} catch (e: unknown) {
|
||||
const code = getErrnoCode(e)
|
||||
if (code === 'ENOENT') {
|
||||
return false
|
||||
}
|
||||
logForDebugging(
|
||||
`[PermissionSync] Failed to delete resolved permission: ${e}`,
|
||||
)
|
||||
logError(e)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Submit a permission request (alias for writePermissionRequest)
|
||||
* Provided for backward compatibility with worker integration code
|
||||
*/
|
||||
export const submitPermissionRequest = writePermissionRequest
|
||||
|
||||
// ============================================================================
|
||||
// Mailbox-Based Permission System
|
||||
// ============================================================================
|
||||
|
||||
@@ -35,11 +35,6 @@ import {
|
||||
STOPPED_DISPLAY_MS,
|
||||
} from '../task/framework.js'
|
||||
import { createTeammateContext } from '../teammateContext.js'
|
||||
import {
|
||||
isPerfettoTracingEnabled,
|
||||
registerAgent as registerPerfettoAgent,
|
||||
unregisterAgent as unregisterPerfettoAgent,
|
||||
} from '../telemetry/perfettoTracing.js'
|
||||
import { removeMemberByAgentId } from './teamHelpers.js'
|
||||
|
||||
type SetAppStateFn = (updater: (prev: AppState) => AppState) => void
|
||||
@@ -146,11 +141,6 @@ export async function spawnInProcessTeammate(
|
||||
abortController,
|
||||
})
|
||||
|
||||
// Register agent in Perfetto trace for hierarchy visualization
|
||||
if (isPerfettoTracingEnabled()) {
|
||||
registerPerfettoAgent(agentId, name, parentSessionId)
|
||||
}
|
||||
|
||||
// Create task state
|
||||
const description = `${name}: ${prompt.substring(0, 50)}${prompt.length > 50 ? '...' : ''}`
|
||||
|
||||
@@ -319,10 +309,5 @@ export function killInProcessTeammate(
|
||||
)
|
||||
}
|
||||
|
||||
// Release perfetto agent registry entry
|
||||
if (agentId) {
|
||||
unregisterPerfettoAgent(agentId)
|
||||
}
|
||||
|
||||
return killed
|
||||
}
|
||||
|
||||
@@ -66,7 +66,6 @@ export type TeamFile = {
|
||||
description?: string
|
||||
createdAt: number
|
||||
leadAgentId: string
|
||||
leadSessionId?: string // Actual session UUID of the leader (for discovery)
|
||||
hiddenPaneIds?: string[] // Pane IDs that are currently hidden from the UI
|
||||
teamAllowedPaths?: TeamAllowedPath[] // Paths all teammates can edit without asking
|
||||
members: Array<{
|
||||
@@ -74,15 +73,13 @@ export type TeamFile = {
|
||||
name: string
|
||||
agentType?: string
|
||||
model?: string
|
||||
prompt?: string
|
||||
prompt?: string // Legacy field; stripped from persisted configs
|
||||
color?: string
|
||||
planModeRequired?: boolean
|
||||
joinedAt: number
|
||||
tmuxPaneId: string
|
||||
cwd: string
|
||||
worktreePath?: string
|
||||
sessionId?: string
|
||||
subscriptions: string[]
|
||||
backendType?: BackendType
|
||||
isActive?: boolean // false when idle, undefined/true when active
|
||||
mode?: PermissionMode // Current permission mode for this teammate
|
||||
@@ -123,6 +120,42 @@ export function getTeamFilePath(teamName: string): string {
|
||||
return join(getTeamDir(teamName), 'config.json')
|
||||
}
|
||||
|
||||
function sanitizeTeamFileForPersistence(teamFile: TeamFile): TeamFile {
|
||||
return {
|
||||
name: teamFile.name,
|
||||
...(teamFile.description ? { description: teamFile.description } : {}),
|
||||
createdAt: teamFile.createdAt,
|
||||
leadAgentId: teamFile.leadAgentId,
|
||||
...(teamFile.hiddenPaneIds && teamFile.hiddenPaneIds.length > 0
|
||||
? { hiddenPaneIds: [...teamFile.hiddenPaneIds] }
|
||||
: {}),
|
||||
...(teamFile.teamAllowedPaths && teamFile.teamAllowedPaths.length > 0
|
||||
? {
|
||||
teamAllowedPaths: teamFile.teamAllowedPaths.map(path => ({
|
||||
...path,
|
||||
})),
|
||||
}
|
||||
: {}),
|
||||
members: teamFile.members.map(member => ({
|
||||
agentId: member.agentId,
|
||||
name: member.name,
|
||||
...(member.agentType ? { agentType: member.agentType } : {}),
|
||||
...(member.model ? { model: member.model } : {}),
|
||||
...(member.color ? { color: member.color } : {}),
|
||||
...(member.planModeRequired !== undefined
|
||||
? { planModeRequired: member.planModeRequired }
|
||||
: {}),
|
||||
joinedAt: member.joinedAt,
|
||||
tmuxPaneId: member.tmuxPaneId,
|
||||
cwd: member.cwd,
|
||||
...(member.worktreePath ? { worktreePath: member.worktreePath } : {}),
|
||||
...(member.backendType ? { backendType: member.backendType } : {}),
|
||||
...(member.isActive !== undefined ? { isActive: member.isActive } : {}),
|
||||
...(member.mode ? { mode: member.mode } : {}),
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a team file by name (sync — for sync contexts like React render paths)
|
||||
* @internal Exported for team discovery UI
|
||||
@@ -131,7 +164,7 @@ export function getTeamFilePath(teamName: string): string {
|
||||
export function readTeamFile(teamName: string): TeamFile | null {
|
||||
try {
|
||||
const content = readFileSync(getTeamFilePath(teamName), 'utf-8')
|
||||
return jsonParse(content) as TeamFile
|
||||
return sanitizeTeamFileForPersistence(jsonParse(content) as TeamFile)
|
||||
} catch (e) {
|
||||
if (getErrnoCode(e) === 'ENOENT') return null
|
||||
logForDebugging(
|
||||
@@ -149,7 +182,7 @@ export async function readTeamFileAsync(
|
||||
): Promise<TeamFile | null> {
|
||||
try {
|
||||
const content = await readFile(getTeamFilePath(teamName), 'utf-8')
|
||||
return jsonParse(content) as TeamFile
|
||||
return sanitizeTeamFileForPersistence(jsonParse(content) as TeamFile)
|
||||
} catch (e) {
|
||||
if (getErrnoCode(e) === 'ENOENT') return null
|
||||
logForDebugging(
|
||||
@@ -166,7 +199,10 @@ export async function readTeamFileAsync(
|
||||
function writeTeamFile(teamName: string, teamFile: TeamFile): void {
|
||||
const teamDir = getTeamDir(teamName)
|
||||
mkdirSync(teamDir, { recursive: true })
|
||||
writeFileSync(getTeamFilePath(teamName), jsonStringify(teamFile, null, 2))
|
||||
writeFileSync(
|
||||
getTeamFilePath(teamName),
|
||||
jsonStringify(sanitizeTeamFileForPersistence(teamFile), null, 2),
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -178,7 +214,10 @@ export async function writeTeamFileAsync(
|
||||
): Promise<void> {
|
||||
const teamDir = getTeamDir(teamName)
|
||||
await mkdir(teamDir, { recursive: true })
|
||||
await writeFile(getTeamFilePath(teamName), jsonStringify(teamFile, null, 2))
|
||||
await writeFile(
|
||||
getTeamFilePath(teamName),
|
||||
jsonStringify(sanitizeTeamFileForPersistence(teamFile), null, 2),
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -20,7 +20,6 @@ export type TeammateStatus = {
|
||||
agentId: string
|
||||
agentType?: string
|
||||
model?: string
|
||||
prompt?: string
|
||||
status: 'running' | 'idle' | 'unknown'
|
||||
color?: string
|
||||
idleSince?: string // ISO timestamp from idle notification
|
||||
@@ -60,7 +59,6 @@ export function getTeammateStatuses(teamName: string): TeammateStatus[] {
|
||||
agentId: member.agentId,
|
||||
agentType: member.agentType,
|
||||
model: member.model,
|
||||
prompt: member.prompt,
|
||||
status,
|
||||
color: member.color,
|
||||
tmuxPaneId: member.tmuxPaneId,
|
||||
|
||||
@@ -15,7 +15,6 @@ import { PermissionModeSchema } from '../entrypoints/sdk/coreSchemas.js'
|
||||
import { SEND_MESSAGE_TOOL_NAME } from '../tools/SendMessageTool/constants.js'
|
||||
import type { Message } from '../types/message.js'
|
||||
import { generateRequestId } from './agentId.js'
|
||||
import { count } from './array.js'
|
||||
import { logForDebugging } from './debug.js'
|
||||
import { getTeamsDir } from './envUtils.js'
|
||||
import { getErrnoCode } from './errors.js'
|
||||
@@ -58,11 +57,7 @@ export function getInboxPath(agentName: string, teamName?: string): string {
|
||||
const safeTeam = sanitizePathComponent(team)
|
||||
const safeAgentName = sanitizePathComponent(agentName)
|
||||
const inboxDir = join(getTeamsDir(), safeTeam, 'inboxes')
|
||||
const fullPath = join(inboxDir, `${safeAgentName}.json`)
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] getInboxPath: agent=${agentName}, team=${team}, fullPath=${fullPath}`,
|
||||
)
|
||||
return fullPath
|
||||
return join(inboxDir, `${safeAgentName}.json`)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -73,7 +68,7 @@ async function ensureInboxDir(teamName?: string): Promise<void> {
|
||||
const safeTeam = sanitizePathComponent(team)
|
||||
const inboxDir = join(getTeamsDir(), safeTeam, 'inboxes')
|
||||
await mkdir(inboxDir, { recursive: true })
|
||||
logForDebugging(`[TeammateMailbox] Ensured inbox directory: ${inboxDir}`)
|
||||
logForDebugging('[TeammateMailbox] Ensured inbox directory')
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -86,7 +81,6 @@ export async function readMailbox(
|
||||
teamName?: string,
|
||||
): Promise<TeammateMessage[]> {
|
||||
const inboxPath = getInboxPath(agentName, teamName)
|
||||
logForDebugging(`[TeammateMailbox] readMailbox: path=${inboxPath}`)
|
||||
|
||||
try {
|
||||
const content = await readFile(inboxPath, 'utf-8')
|
||||
@@ -101,7 +95,7 @@ export async function readMailbox(
|
||||
logForDebugging(`[TeammateMailbox] readMailbox: file does not exist`)
|
||||
return []
|
||||
}
|
||||
logForDebugging(`Failed to read inbox for ${agentName}: ${error}`)
|
||||
logForDebugging(`[TeammateMailbox] Failed to read inbox for ${agentName}`)
|
||||
logError(error)
|
||||
return []
|
||||
}
|
||||
@@ -142,7 +136,7 @@ export async function writeToMailbox(
|
||||
const lockFilePath = `${inboxPath}.lock`
|
||||
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] writeToMailbox: recipient=${recipientName}, from=${message.from}, path=${inboxPath}`,
|
||||
`[TeammateMailbox] writeToMailbox: recipient=${recipientName}, from=${message.from}`,
|
||||
)
|
||||
|
||||
// Ensure the inbox file exists before locking (proper-lockfile requires the file to exist)
|
||||
@@ -153,7 +147,7 @@ export async function writeToMailbox(
|
||||
const code = getErrnoCode(error)
|
||||
if (code !== 'EEXIST') {
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] writeToMailbox: failed to create inbox file: ${error}`,
|
||||
`[TeammateMailbox] writeToMailbox: failed to create inbox file`,
|
||||
)
|
||||
logError(error)
|
||||
return
|
||||
@@ -182,7 +176,9 @@ export async function writeToMailbox(
|
||||
`[TeammateMailbox] Wrote message to ${recipientName}'s inbox from ${message.from}`,
|
||||
)
|
||||
} catch (error) {
|
||||
logForDebugging(`Failed to write to inbox for ${recipientName}: ${error}`)
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] Failed to write to inbox for ${recipientName}`,
|
||||
)
|
||||
logError(error)
|
||||
} finally {
|
||||
if (release) {
|
||||
@@ -192,8 +188,8 @@ export async function writeToMailbox(
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a specific message in a teammate's inbox as read by index
|
||||
* Uses file locking to prevent race conditions
|
||||
* Remove a specific processed message from a teammate's inbox by index.
|
||||
* Uses file locking to prevent race conditions.
|
||||
* @param agentName - The agent name to mark message as read for
|
||||
* @param teamName - Optional team name
|
||||
* @param messageIndex - Index of the message to mark as read
|
||||
@@ -205,7 +201,7 @@ export async function markMessageAsReadByIndex(
|
||||
): Promise<void> {
|
||||
const inboxPath = getInboxPath(agentName, teamName)
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] markMessageAsReadByIndex called: agentName=${agentName}, teamName=${teamName}, index=${messageIndex}, path=${inboxPath}`,
|
||||
`[TeammateMailbox] markMessageAsReadByIndex called: agentName=${agentName}, index=${messageIndex}`,
|
||||
)
|
||||
|
||||
const lockFilePath = `${inboxPath}.lock`
|
||||
@@ -242,22 +238,26 @@ export async function markMessageAsReadByIndex(
|
||||
return
|
||||
}
|
||||
|
||||
messages[messageIndex] = { ...message, read: true }
|
||||
const updatedMessages = messages.filter(
|
||||
(currentMessage, index) => index !== messageIndex && !currentMessage.read,
|
||||
)
|
||||
|
||||
await writeFile(inboxPath, jsonStringify(messages, null, 2), 'utf-8')
|
||||
await writeFile(
|
||||
inboxPath,
|
||||
jsonStringify(updatedMessages, null, 2),
|
||||
'utf-8',
|
||||
)
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] markMessageAsReadByIndex: marked message at index ${messageIndex} as read`,
|
||||
`[TeammateMailbox] markMessageAsReadByIndex: removed message at index ${messageIndex} from inbox`,
|
||||
)
|
||||
} catch (error) {
|
||||
const code = getErrnoCode(error)
|
||||
if (code === 'ENOENT') {
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] markMessageAsReadByIndex: file does not exist at ${inboxPath}`,
|
||||
)
|
||||
logForDebugging(`[TeammateMailbox] markMessageAsReadByIndex: file missing`)
|
||||
return
|
||||
}
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] markMessageAsReadByIndex FAILED for ${agentName}: ${error}`,
|
||||
`[TeammateMailbox] markMessageAsReadByIndex failed for ${agentName}`,
|
||||
)
|
||||
logError(error)
|
||||
} finally {
|
||||
@@ -270,77 +270,6 @@ export async function markMessageAsReadByIndex(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark all messages in a teammate's inbox as read
|
||||
* Uses file locking to prevent race conditions
|
||||
* @param agentName - The agent name to mark messages as read for
|
||||
* @param teamName - Optional team name
|
||||
*/
|
||||
export async function markMessagesAsRead(
|
||||
agentName: string,
|
||||
teamName?: string,
|
||||
): Promise<void> {
|
||||
const inboxPath = getInboxPath(agentName, teamName)
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] markMessagesAsRead called: agentName=${agentName}, teamName=${teamName}, path=${inboxPath}`,
|
||||
)
|
||||
|
||||
const lockFilePath = `${inboxPath}.lock`
|
||||
|
||||
let release: (() => Promise<void>) | undefined
|
||||
try {
|
||||
logForDebugging(`[TeammateMailbox] markMessagesAsRead: acquiring lock...`)
|
||||
release = await lockfile.lock(inboxPath, {
|
||||
lockfilePath: lockFilePath,
|
||||
...LOCK_OPTIONS,
|
||||
})
|
||||
logForDebugging(`[TeammateMailbox] markMessagesAsRead: lock acquired`)
|
||||
|
||||
// Re-read messages after acquiring lock to get the latest state
|
||||
const messages = await readMailbox(agentName, teamName)
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] markMessagesAsRead: read ${messages.length} messages after lock`,
|
||||
)
|
||||
|
||||
if (messages.length === 0) {
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] markMessagesAsRead: no messages to mark`,
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const unreadCount = count(messages, m => !m.read)
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] markMessagesAsRead: ${unreadCount} unread of ${messages.length} total`,
|
||||
)
|
||||
|
||||
// messages comes from jsonParse — fresh, unshared objects safe to mutate
|
||||
for (const m of messages) m.read = true
|
||||
|
||||
await writeFile(inboxPath, jsonStringify(messages, null, 2), 'utf-8')
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] markMessagesAsRead: WROTE ${unreadCount} message(s) as read to ${inboxPath}`,
|
||||
)
|
||||
} catch (error) {
|
||||
const code = getErrnoCode(error)
|
||||
if (code === 'ENOENT') {
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] markMessagesAsRead: file does not exist at ${inboxPath}`,
|
||||
)
|
||||
return
|
||||
}
|
||||
logForDebugging(
|
||||
`[TeammateMailbox] markMessagesAsRead FAILED for ${agentName}: ${error}`,
|
||||
)
|
||||
logError(error)
|
||||
} finally {
|
||||
if (release) {
|
||||
await release()
|
||||
logForDebugging(`[TeammateMailbox] markMessagesAsRead: lock released`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear a teammate's inbox (delete all messages)
|
||||
* @param agentName - The agent name to clear inbox for
|
||||
@@ -362,7 +291,7 @@ export async function clearMailbox(
|
||||
if (code === 'ENOENT') {
|
||||
return
|
||||
}
|
||||
logForDebugging(`Failed to clear inbox for ${agentName}: ${error}`)
|
||||
logForDebugging(`[TeammateMailbox] Failed to clear inbox for ${agentName}`)
|
||||
logError(error)
|
||||
}
|
||||
}
|
||||
@@ -1095,8 +1024,8 @@ export function isStructuredProtocolMessage(messageText: string): boolean {
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks only messages matching a predicate as read, leaving others unread.
|
||||
* Uses the same file-locking mechanism as markMessagesAsRead.
|
||||
* Removes only messages matching a predicate, leaving the rest unread.
|
||||
* Uses the same file-locking mechanism as the other mailbox update helpers.
|
||||
*/
|
||||
export async function markMessagesAsReadByPredicate(
|
||||
agentName: string,
|
||||
@@ -1119,8 +1048,8 @@ export async function markMessagesAsReadByPredicate(
|
||||
return
|
||||
}
|
||||
|
||||
const updatedMessages = messages.map(m =>
|
||||
!m.read && predicate(m) ? { ...m, read: true } : m,
|
||||
const updatedMessages = messages.filter(
|
||||
m => !m.read && !predicate(m),
|
||||
)
|
||||
|
||||
await writeFile(inboxPath, jsonStringify(updatedMessages, null, 2), 'utf-8')
|
||||
@@ -1174,7 +1103,7 @@ export function getLastPeerDmSummary(messages: Message[]): string | undefined {
|
||||
const summary =
|
||||
'summary' in block.input && typeof block.input.summary === 'string'
|
||||
? block.input.summary
|
||||
: block.input.message.slice(0, 80)
|
||||
: 'sent update'
|
||||
return `[to ${to}] ${summary}`
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
/**
|
||||
* Detailed beta tracing egress is disabled in this build.
|
||||
*
|
||||
* The exported helpers remain for compile-time compatibility, but do not
|
||||
* retain tracing state or emit tracing attributes.
|
||||
*/
|
||||
|
||||
type AttributeValue = string | number | boolean
|
||||
|
||||
export interface SpanAttributeWriter {
|
||||
setAttribute?(_key: string, _value: AttributeValue): void
|
||||
setAttributes?(_attributes: Record<string, AttributeValue>): void
|
||||
}
|
||||
|
||||
export interface LLMRequestNewContext {
|
||||
systemPrompt?: string
|
||||
querySource?: string
|
||||
tools?: string
|
||||
}
|
||||
|
||||
const MAX_CONTENT_SIZE = 60 * 1024
|
||||
|
||||
export function clearBetaTracingState(): void {
|
||||
return
|
||||
}
|
||||
|
||||
export function isBetaTracingEnabled(): boolean {
|
||||
return false
|
||||
}
|
||||
|
||||
export function truncateContent(
|
||||
content: string,
|
||||
maxSize: number = MAX_CONTENT_SIZE,
|
||||
): { content: string; truncated: boolean } {
|
||||
if (content.length <= maxSize) {
|
||||
return { content, truncated: false }
|
||||
}
|
||||
|
||||
return {
|
||||
content:
|
||||
content.slice(0, maxSize) +
|
||||
'\n\n[TRUNCATED - Content exceeds 60KB limit]',
|
||||
truncated: true,
|
||||
}
|
||||
}
|
||||
|
||||
export function addBetaInteractionAttributes(
|
||||
_span: SpanAttributeWriter,
|
||||
_userPrompt: string,
|
||||
): void {
|
||||
return
|
||||
}
|
||||
|
||||
export function addBetaLLMRequestAttributes(
|
||||
_span: SpanAttributeWriter,
|
||||
_newContext?: LLMRequestNewContext,
|
||||
_messagesForAPI?: unknown[],
|
||||
): void {
|
||||
return
|
||||
}
|
||||
|
||||
export function addBetaLLMResponseAttributes(
|
||||
_attributes: Record<string, AttributeValue>,
|
||||
_metadata?: {
|
||||
modelOutput?: string
|
||||
thinkingOutput?: string
|
||||
},
|
||||
): void {
|
||||
return
|
||||
}
|
||||
|
||||
export function addBetaToolInputAttributes(
|
||||
_span: SpanAttributeWriter,
|
||||
_toolName: string,
|
||||
_toolInput: string,
|
||||
): void {
|
||||
return
|
||||
}
|
||||
|
||||
export function addBetaToolResultAttributes(
|
||||
_attributes: Record<string, AttributeValue>,
|
||||
_toolName: string | number | boolean,
|
||||
_toolResult: string,
|
||||
): void {
|
||||
return
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
/**
|
||||
* OpenTelemetry event egress is disabled in this build.
|
||||
*/
|
||||
|
||||
export function redactIfDisabled(_content: string): string {
|
||||
return '<REDACTED>'
|
||||
}
|
||||
|
||||
export async function logOTelEvent(
|
||||
_eventName: string,
|
||||
_metadata: { [key: string]: string | undefined } = {},
|
||||
): Promise<void> {
|
||||
return
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,14 +1,10 @@
|
||||
/**
|
||||
* Plugin telemetry helpers — shared field builders for plugin lifecycle events.
|
||||
* Legacy plugin metadata helpers shared by call sites that still assemble
|
||||
* analytics-compatible payload shapes.
|
||||
*
|
||||
* Implements the twin-column privacy pattern: every user-defined-name field
|
||||
* emits both a raw value (routed to PII-tagged _PROTO_* BQ columns) and a
|
||||
* redacted twin (real name iff marketplace ∈ allowlist, else 'third-party').
|
||||
*
|
||||
* plugin_id_hash provides an opaque per-plugin aggregation key with no privacy
|
||||
* dependency — sha256(name@marketplace + FIXED_SALT) truncated to 16 chars.
|
||||
* This answers distinct-count and per-plugin-trend questions that the
|
||||
* redacted column can't, without exposing user-defined names.
|
||||
* In this fork the downstream analytics sinks are disabled, so these helpers
|
||||
* only normalize/redact fields for local compatibility code; they do not
|
||||
* imply an active telemetry export path.
|
||||
*/
|
||||
|
||||
import { createHash } from 'crypto'
|
||||
@@ -25,18 +21,14 @@ import {
|
||||
// through commands.js. Marketplace schemas.ts enforces 'builtin' is reserved.
|
||||
const BUILTIN_MARKETPLACE_NAME = 'builtin'
|
||||
|
||||
// Fixed salt for plugin_id_hash. Same constant across all repos and emission
|
||||
// sites. Not per-org, not rotated — per-org salt would defeat cross-org
|
||||
// distinct-count, rotation would break trend lines. Customers can compute the
|
||||
// same hash on their known plugin names to reverse-match their own telemetry.
|
||||
// Fixed salt for plugin_id_hash. Kept stable so legacy field shapes that still
|
||||
// use this helper continue to derive the same opaque key.
|
||||
const PLUGIN_ID_HASH_SALT = 'claude-plugin-telemetry-v1'
|
||||
|
||||
/**
|
||||
* Opaque per-plugin aggregation key. Input is the name@marketplace string as
|
||||
* it appears in enabledPlugins keys, lowercased on the marketplace suffix for
|
||||
* reproducibility. 16-char truncation keeps BQ GROUP BY cardinality manageable
|
||||
* while making collisions negligible at projected 10k-plugin scale. Name case
|
||||
* is preserved in both branches (enabledPlugins keys are case-sensitive).
|
||||
* Opaque per-plugin compatibility key derived from the name@marketplace
|
||||
* string. The 16-char truncation keeps the identifier short while preserving
|
||||
* a stable grouping key for local compatibility code.
|
||||
*/
|
||||
export function hashPluginId(name: string, marketplace?: string): string {
|
||||
const key = marketplace ? `${name}@${marketplace.toLowerCase()}` : name
|
||||
@@ -90,9 +82,9 @@ export type InstallSource =
|
||||
| 'deep-link'
|
||||
|
||||
/**
|
||||
* Common plugin telemetry fields keyed off name@marketplace. Returns the
|
||||
* hash, scope enum, and the redacted-twin columns. Callers add the raw
|
||||
* _PROTO_* fields separately (those require the PII-tagged marker type).
|
||||
* Common plugin metadata fields keyed off name@marketplace. Keeps the legacy
|
||||
* field set in one place so no-op analytics compatibility callers do not have
|
||||
* to duplicate redaction logic.
|
||||
*/
|
||||
export function buildPluginTelemetryFields(
|
||||
name: string,
|
||||
@@ -144,9 +136,7 @@ export function buildPluginCommandTelemetryFields(
|
||||
}
|
||||
|
||||
/**
|
||||
* Bounded-cardinality error bucket for CLI plugin operation failures.
|
||||
* Maps free-form error messages to 5 stable categories so dashboard
|
||||
* GROUP BY stays tractable.
|
||||
* Stable error buckets for CLI plugin operation failures.
|
||||
*/
|
||||
export type PluginCommandErrorCategory =
|
||||
| 'network'
|
||||
|
||||
@@ -1,172 +0,0 @@
|
||||
/**
|
||||
* OpenTelemetry session tracing is disabled in this build.
|
||||
*
|
||||
* This module preserves the tracing API surface for callers, but all exported
|
||||
* operations are local no-ops and never collect or forward tracing data.
|
||||
*/
|
||||
|
||||
export { isBetaTracingEnabled, type LLMRequestNewContext } from './betaSessionTracing.js'
|
||||
|
||||
export interface Span {
|
||||
end(): void
|
||||
setAttribute(
|
||||
_key: string,
|
||||
_value: string | number | boolean,
|
||||
): void
|
||||
setAttributes(
|
||||
_attributes: Record<string, string | number | boolean>,
|
||||
): void
|
||||
addEvent(
|
||||
_eventName: string,
|
||||
_attributes?: Record<string, string | number | boolean>,
|
||||
): void
|
||||
recordException(_error: Error): void
|
||||
}
|
||||
|
||||
class NoopSpan implements Span {
|
||||
end(): void {}
|
||||
|
||||
setAttribute(
|
||||
_key: string,
|
||||
_value: string | number | boolean,
|
||||
): void {}
|
||||
|
||||
setAttributes(
|
||||
_attributes: Record<string, string | number | boolean>,
|
||||
): void {}
|
||||
|
||||
addEvent(
|
||||
_eventName: string,
|
||||
_attributes?: Record<string, string | number | boolean>,
|
||||
): void {}
|
||||
|
||||
recordException(_error: Error): void {}
|
||||
}
|
||||
|
||||
const NOOP_SPAN: Span = new NoopSpan()
|
||||
|
||||
type LLMRequestMetadata = {
|
||||
inputTokens?: number
|
||||
outputTokens?: number
|
||||
cacheReadTokens?: number
|
||||
cacheCreationTokens?: number
|
||||
success?: boolean
|
||||
statusCode?: number
|
||||
error?: string
|
||||
attempt?: number
|
||||
modelResponse?: string
|
||||
modelOutput?: string
|
||||
thinkingOutput?: string
|
||||
hasToolCall?: boolean
|
||||
ttftMs?: number
|
||||
requestSetupMs?: number
|
||||
attemptStartTimes?: number[]
|
||||
}
|
||||
|
||||
type HookSpanMetadata = {
|
||||
numSuccess?: number
|
||||
numBlocking?: number
|
||||
numNonBlockingError?: number
|
||||
numCancelled?: number
|
||||
}
|
||||
|
||||
export function isEnhancedTelemetryEnabled(): boolean {
|
||||
return false
|
||||
}
|
||||
|
||||
export function startInteractionSpan(_userPrompt: string): Span {
|
||||
return NOOP_SPAN
|
||||
}
|
||||
|
||||
export function endInteractionSpan(): void {
|
||||
return
|
||||
}
|
||||
|
||||
export function startLLMRequestSpan(
|
||||
_model: string,
|
||||
_newContext?: import('./betaSessionTracing.js').LLMRequestNewContext,
|
||||
_messagesForAPI?: unknown[],
|
||||
_fastMode?: boolean,
|
||||
): Span {
|
||||
return NOOP_SPAN
|
||||
}
|
||||
|
||||
export function endLLMRequestSpan(
|
||||
_span?: Span,
|
||||
_metadata?: LLMRequestMetadata,
|
||||
): void {
|
||||
return
|
||||
}
|
||||
|
||||
export function startToolSpan(
|
||||
_toolName: string,
|
||||
_toolAttributes?: Record<string, string | number | boolean>,
|
||||
_toolInput?: string,
|
||||
): Span {
|
||||
return NOOP_SPAN
|
||||
}
|
||||
|
||||
export function startToolBlockedOnUserSpan(): Span {
|
||||
return NOOP_SPAN
|
||||
}
|
||||
|
||||
export function endToolBlockedOnUserSpan(
|
||||
_decision?: string,
|
||||
_source?: string,
|
||||
): void {
|
||||
return
|
||||
}
|
||||
|
||||
export function startToolExecutionSpan(): Span {
|
||||
return NOOP_SPAN
|
||||
}
|
||||
|
||||
export function endToolExecutionSpan(metadata?: {
|
||||
success?: boolean
|
||||
error?: string
|
||||
}): void {
|
||||
void metadata
|
||||
return
|
||||
}
|
||||
|
||||
export function endToolSpan(
|
||||
_toolResult?: string,
|
||||
_resultTokens?: number,
|
||||
): void {
|
||||
return
|
||||
}
|
||||
|
||||
export function addToolContentEvent(
|
||||
_eventName: string,
|
||||
_attributes: Record<string, string | number | boolean>,
|
||||
): void {
|
||||
return
|
||||
}
|
||||
|
||||
export function getCurrentSpan(): Span | null {
|
||||
return null
|
||||
}
|
||||
|
||||
export async function executeInSpan<T>(
|
||||
_spanName: string,
|
||||
fn: (span: Span) => Promise<T>,
|
||||
_attributes?: Record<string, string | number | boolean>,
|
||||
): Promise<T> {
|
||||
return fn(NOOP_SPAN)
|
||||
}
|
||||
|
||||
export function startHookSpan(
|
||||
_hookEvent: string,
|
||||
_hookName: string,
|
||||
_numHooks: number,
|
||||
_hookDefinitions: string,
|
||||
): Span {
|
||||
return NOOP_SPAN
|
||||
}
|
||||
|
||||
export function endHookSpan(
|
||||
_span: Span,
|
||||
_metadata?: HookSpanMetadata,
|
||||
): void {
|
||||
return
|
||||
}
|
||||
@@ -47,6 +47,119 @@ export type TeleportResult = {
|
||||
export type TeleportProgressStep = 'validating' | 'fetching_logs' | 'fetching_branch' | 'checking_out' | 'done';
|
||||
export type TeleportProgressCallback = (step: TeleportProgressStep) => void;
|
||||
|
||||
function summarizeTeleportPayloadForDebug(payload: {
|
||||
title: string;
|
||||
events: Array<{
|
||||
type: string;
|
||||
data?: Record<string, unknown>;
|
||||
}>;
|
||||
session_context: {
|
||||
sources?: Array<{
|
||||
type?: string;
|
||||
}>;
|
||||
outcomes?: Array<{
|
||||
type?: string;
|
||||
}>;
|
||||
environment_variables?: Record<string, string>;
|
||||
seed_bundle_file_id?: string;
|
||||
model?: string;
|
||||
reuse_outcome_branches?: boolean;
|
||||
github_pr?: unknown;
|
||||
};
|
||||
environment_id: string;
|
||||
}): string {
|
||||
const eventTypes = payload.events.map(event => {
|
||||
if (event.type !== 'event') return event.type;
|
||||
const request =
|
||||
event.data && typeof event.data.request === 'object' && event.data.request
|
||||
? (event.data.request as Record<string, unknown>)
|
||||
: undefined;
|
||||
const subtype =
|
||||
request && typeof request.subtype === 'string' ? request.subtype : undefined;
|
||||
const dataType =
|
||||
event.data && typeof event.data.type === 'string' ? event.data.type : 'unknown';
|
||||
return subtype ? `event:${dataType}:${subtype}` : `event:${dataType}`;
|
||||
});
|
||||
return jsonStringify({
|
||||
titleLength: payload.title.length,
|
||||
eventCount: payload.events.length,
|
||||
eventTypes,
|
||||
sourceTypes: (payload.session_context.sources ?? []).map(source => source.type ?? 'unknown'),
|
||||
outcomeTypes: (payload.session_context.outcomes ?? []).map(outcome => outcome.type ?? 'unknown'),
|
||||
envVarCount: Object.keys(payload.session_context.environment_variables ?? {}).length,
|
||||
hasSeedBundle: typeof payload.session_context.seed_bundle_file_id === 'string',
|
||||
hasModel: typeof payload.session_context.model === 'string',
|
||||
reuseOutcomeBranches: Boolean(payload.session_context.reuse_outcome_branches),
|
||||
hasGithubPr: Boolean(payload.session_context.github_pr),
|
||||
hasEnvironmentId: Boolean(payload.environment_id)
|
||||
});
|
||||
}
|
||||
|
||||
function summarizeTeleportResponseDataForDebug(data: unknown): string {
|
||||
if (data === null) return 'null';
|
||||
if (data === undefined) return 'undefined';
|
||||
if (Array.isArray(data)) {
|
||||
return jsonStringify({
|
||||
payloadType: 'array',
|
||||
length: data.length
|
||||
});
|
||||
}
|
||||
if (typeof data === 'object') {
|
||||
const value = data as Record<string, unknown>;
|
||||
return jsonStringify({
|
||||
payloadType: 'object',
|
||||
keys: Object.keys(value).sort().slice(0, 12),
|
||||
hasId: typeof value.id === 'string',
|
||||
hasTitle: typeof value.title === 'string',
|
||||
hasSessionStatus: typeof value.session_status === 'string',
|
||||
loglinesCount: Array.isArray(value.loglines) ? value.loglines.length : 0,
|
||||
dataCount: Array.isArray(value.data) ? value.data.length : 0
|
||||
});
|
||||
}
|
||||
return typeof data;
|
||||
}
|
||||
|
||||
function summarizeTeleportEnvironmentsForDebug(environments: Array<{
|
||||
kind: string;
|
||||
}>): string {
|
||||
const kinds = [...new Set(environments.map(environment => environment.kind))].sort();
|
||||
return jsonStringify({
|
||||
count: environments.length,
|
||||
kinds,
|
||||
hasAnthropicCloud: kinds.includes('anthropic_cloud'),
|
||||
hasBridge: kinds.includes('bridge')
|
||||
});
|
||||
}
|
||||
|
||||
function summarizeTeleportErrorForDebug(error: unknown): string {
|
||||
const err = toError(error);
|
||||
const summary: Record<string, unknown> = {
|
||||
errorType: err.constructor.name,
|
||||
errorName: err.name,
|
||||
hasMessage: err.message.length > 0,
|
||||
hasStack: Boolean(err.stack)
|
||||
};
|
||||
if (error && typeof error === 'object') {
|
||||
const errorObj = error as Record<string, unknown>;
|
||||
if (typeof errorObj.code === 'string' || typeof errorObj.code === 'number') {
|
||||
summary.code = errorObj.code;
|
||||
}
|
||||
if (typeof errorObj.status === 'number') {
|
||||
summary.status = errorObj.status;
|
||||
}
|
||||
if (errorObj.response && typeof errorObj.response === 'object') {
|
||||
const response = errorObj.response as Record<string, unknown>;
|
||||
if (typeof response.status === 'number') {
|
||||
summary.httpStatus = response.status;
|
||||
}
|
||||
if ('data' in response) {
|
||||
summary.response = summarizeTeleportResponseDataForDebug(response.data);
|
||||
}
|
||||
}
|
||||
}
|
||||
return jsonStringify(summary);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a system message to inform about teleport session resume
|
||||
* @returns SystemMessage indicating session was resumed from another machine
|
||||
@@ -702,7 +815,7 @@ export async function pollRemoteSessionEvents(sessionId: string, afterId: string
|
||||
branch = getBranchFromSession(sessionData);
|
||||
sessionStatus = sessionData.session_status as PollRemoteSessionResponse['sessionStatus'];
|
||||
} catch (e) {
|
||||
logForDebugging(`teleport: failed to fetch session ${sessionId} metadata: ${e}`, {
|
||||
logForDebugging(`teleport: failed to fetch session metadata: ${summarizeTeleportErrorForDebug(e)}`, {
|
||||
level: 'debug'
|
||||
});
|
||||
}
|
||||
@@ -877,18 +990,23 @@ export async function teleportToRemote(options: {
|
||||
},
|
||||
environment_id: options.environmentId
|
||||
};
|
||||
logForDebugging(`[teleportToRemote] explicit env ${options.environmentId}, ${Object.keys(envVars).length} env vars, ${seedBundleFileId ? `bundle=${seedBundleFileId}` : `source=${gitSource?.url ?? 'none'}@${options.branchName ?? 'default'}`}`);
|
||||
logForDebugging(`[teleportToRemote] explicit environment request ${jsonStringify({
|
||||
envVarCount: Object.keys(envVars).length,
|
||||
hasBundle: Boolean(seedBundleFileId),
|
||||
hasGitSource: Boolean(gitSource),
|
||||
hasBranchOverride: Boolean(options.branchName)
|
||||
})}`);
|
||||
const response = await axios.post(url, requestBody, {
|
||||
headers,
|
||||
signal
|
||||
});
|
||||
if (response.status !== 200 && response.status !== 201) {
|
||||
logError(new Error(`CreateSession ${response.status}: ${jsonStringify(response.data)}`));
|
||||
logError(new Error(`CreateSession ${response.status}: ${summarizeTeleportResponseDataForDebug(response.data)}`));
|
||||
return null;
|
||||
}
|
||||
const sessionData = response.data as SessionResource;
|
||||
if (!sessionData || typeof sessionData.id !== 'string') {
|
||||
logError(new Error(`No session id in response: ${jsonStringify(response.data)}`));
|
||||
logError(new Error(`No session id in response: ${summarizeTeleportResponseDataForDebug(response.data)}`));
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
@@ -969,7 +1087,10 @@ export async function teleportToRemote(options: {
|
||||
} = repoInfo;
|
||||
// Resolve the base branch: prefer explicit branchName, fall back to default branch
|
||||
const revision = options.branchName ?? (await getDefaultBranch()) ?? undefined;
|
||||
logForDebugging(`[teleportToRemote] Git source: ${host}/${owner}/${name}, revision: ${revision ?? 'none'}`);
|
||||
logForDebugging(`[teleportToRemote] Git source selected ${jsonStringify({
|
||||
hasRevision: Boolean(revision),
|
||||
allowUnrestrictedPush: Boolean(options.reuseOutcomeBranch)
|
||||
})}`);
|
||||
gitSource = {
|
||||
type: 'git_repository',
|
||||
url: `https://${host}/${owner}/${name}`,
|
||||
@@ -1057,7 +1178,7 @@ export async function teleportToRemote(options: {
|
||||
logError(new Error('No environments available for session creation'));
|
||||
return null;
|
||||
}
|
||||
logForDebugging(`Available environments: ${environments.map(e => `${e.environment_id} (${e.name}, ${e.kind})`).join(', ')}`);
|
||||
logForDebugging(`Available environments: ${summarizeTeleportEnvironmentsForDebug(environments)}`);
|
||||
|
||||
// Select environment based on settings, then anthropic_cloud preference, then first available.
|
||||
// Prefer anthropic_cloud environments over byoc: anthropic_cloud environments (e.g. "Default")
|
||||
@@ -1075,7 +1196,7 @@ export async function teleportToRemote(options: {
|
||||
const retried = await fetchEnvironments();
|
||||
cloudEnv = retried?.find(env => env.kind === 'anthropic_cloud');
|
||||
if (!cloudEnv) {
|
||||
logError(new Error(`No anthropic_cloud environment available after retry (got: ${(retried ?? environments).map(e => `${e.name} (${e.kind})`).join(', ')}). Silent byoc fallthrough would launch into a dead env — fail fast instead.`));
|
||||
logError(new Error(`No anthropic_cloud environment available after retry (${summarizeTeleportEnvironmentsForDebug(retried ?? environments)}). Silent byoc fallthrough would launch into a dead env — fail fast instead.`));
|
||||
return null;
|
||||
}
|
||||
if (retried) environments = retried;
|
||||
@@ -1087,10 +1208,13 @@ export async function teleportToRemote(options: {
|
||||
}
|
||||
if (defaultEnvironmentId) {
|
||||
const matchedDefault = selectedEnvironment.environment_id === defaultEnvironmentId;
|
||||
logForDebugging(matchedDefault ? `Using configured default environment: ${defaultEnvironmentId}` : `Configured default environment ${defaultEnvironmentId} not found, using first available`);
|
||||
logForDebugging(matchedDefault ? 'Using configured default environment' : 'Configured default environment not found, using fallback environment');
|
||||
}
|
||||
const environmentId = selectedEnvironment.environment_id;
|
||||
logForDebugging(`Selected environment: ${environmentId} (${selectedEnvironment.name}, ${selectedEnvironment.kind})`);
|
||||
logForDebugging(`Selected environment: ${jsonStringify({
|
||||
kind: selectedEnvironment.kind,
|
||||
usedConfiguredDefault: Boolean(defaultEnvironmentId && selectedEnvironment.environment_id === defaultEnvironmentId)
|
||||
})}`);
|
||||
|
||||
// Prepare API request for Sessions API
|
||||
const url = `${getOauthConfig().BASE_API_URL}/v1/sessions`;
|
||||
@@ -1158,7 +1282,7 @@ export async function teleportToRemote(options: {
|
||||
session_context: sessionContext,
|
||||
environment_id: environmentId
|
||||
};
|
||||
logForDebugging(`Creating session with payload: ${jsonStringify(requestBody, null, 2)}`);
|
||||
logForDebugging(`Creating session with payload summary: ${summarizeTeleportPayloadForDebug(requestBody)}`);
|
||||
|
||||
// Make API call
|
||||
const response = await axios.post(url, requestBody, {
|
||||
@@ -1167,17 +1291,17 @@ export async function teleportToRemote(options: {
|
||||
});
|
||||
const isSuccess = response.status === 200 || response.status === 201;
|
||||
if (!isSuccess) {
|
||||
logError(new Error(`API request failed with status ${response.status}: ${response.statusText}\n\nResponse data: ${jsonStringify(response.data, null, 2)}`));
|
||||
logError(new Error(`API request failed with status ${response.status}: ${summarizeTeleportResponseDataForDebug(response.data)}`));
|
||||
return null;
|
||||
}
|
||||
|
||||
// Parse response as SessionResource
|
||||
const sessionData = response.data as SessionResource;
|
||||
if (!sessionData || typeof sessionData.id !== 'string') {
|
||||
logError(new Error(`Cannot determine session ID from API response: ${jsonStringify(response.data)}`));
|
||||
logError(new Error(`Cannot determine session ID from API response: ${summarizeTeleportResponseDataForDebug(response.data)}`));
|
||||
return null;
|
||||
}
|
||||
logForDebugging(`Successfully created remote session: ${sessionData.id}`);
|
||||
logForDebugging('Successfully created remote session');
|
||||
return {
|
||||
id: sessionData.id,
|
||||
title: sessionData.title || requestBody.title
|
||||
@@ -1215,9 +1339,9 @@ export async function archiveRemoteSession(sessionId: string): Promise<void> {
|
||||
validateStatus: s => s < 500
|
||||
});
|
||||
if (resp.status === 200 || resp.status === 409) {
|
||||
logForDebugging(`[archiveRemoteSession] archived ${sessionId}`);
|
||||
logForDebugging('[archiveRemoteSession] archived remote session');
|
||||
} else {
|
||||
logForDebugging(`[archiveRemoteSession] ${sessionId} failed ${resp.status}: ${jsonStringify(resp.data)}`);
|
||||
logForDebugging(`[archiveRemoteSession] archive failed status=${resp.status} ${summarizeTeleportResponseDataForDebug(resp.data)}`);
|
||||
}
|
||||
} catch (err) {
|
||||
logError(err);
|
||||
|
||||
Reference in New Issue
Block a user