axios+telemetry cleanup

This commit is contained in:
2026-04-02 15:19:11 +03:00
parent a3cbca1e11
commit 7e1eac8002
100 changed files with 3048 additions and 4491 deletions

1
.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
node_modules

1
.npmrc Normal file
View File

@@ -0,0 +1 @@
registry=https://registry.npmjs.org/

47
PLAN.md Normal file
View File

@@ -0,0 +1,47 @@
## Plan
### Goals
- Replace all `axios` usage with `nativeRequest` from `utils/http.js`
- Remove outbound telemetry/spying requests while keeping functional analytics (auth flows + GrowthBook feature flags)
- Do a quick security pass to remove obvious vulnerabilities and risky network behaviors
### Current Status
- Completed axios → nativeRequest conversions:
- `services/mcp/officialRegistry.ts`
- `services/mcp/claudeai.ts`
- `services/oauth/getOauthProfile.ts`
- In progress:
- `services/settingsSync/index.ts` (axios import removed; remaining axios calls + error classification need conversion)
### Next Steps (Axios Removal)
1. Fix `services/settingsSync/index.ts`
- Replace `classifyAxiosError` import with `classifyHttpError` from `utils/errors.js`
- Replace `axios.get(..., validateStatus: 200|404)` with `nativeRequest` + `try/catch` handling for `HttpError` 404
- Replace `axios.put(...)` with `nativeRequest` (`method: 'PUT'`, JSON body)
2. Convert `services/oauth/client.ts`
- Replace `axios.post/get` with `nativeRequest`
- Replace `axios.isAxiosError` handling with `isHttpError`
- Preserve analytics events but ensure no tokens/PII are logged
3. Convert `services/policyLimits/index.ts`
- Replace `classifyAxiosError` with `classifyHttpError`
- Replace `axios.get(... validateStatus: 200|304|404)` with `nativeRequest` + `try/catch` handling for `HttpError` statuses
- Preserve existing caching semantics (304 means cache valid; 404 means empty restrictions)
4. Convert `services/remoteManagedSettings/index.ts` (same pattern as policy limits, plus 204/304/404 handling)
5. Convert remaining axios users (transports, bridge, installers, feedback, etc.)
### Telemetry / “Spying” Removal
- Search for outbound tracking endpoints and SDKs (events, crash reporting, session replay, fingerprinting)
- Remove or gate non-essential outbound calls behind “essential traffic only” where appropriate
- Keep:
- OAuth/auth network flows required for functionality
- GrowthBook feature flag fetches required for feature gating
### Security Pass (Quick Wins)
- Ensure no secrets/tokens are logged or included in analytics payloads
- Validate any places that build URLs/headers from user input to prevent SSRF or header injection
- Enforce timeouts on outbound requests and avoid overly permissive redirects
- Verify files written to disk use safe permissions (e.g. `0o600` for sensitive caches) and safe paths
### Verification
- Run the repos lint/typecheck commands
- Run test suite (or targeted tests) for settings sync / oauth client flows if present

View File

@@ -1,4 +1,4 @@
import axios from 'axios'
import { nativeRequest } from '../utils/http.js'
import { getOauthConfig } from '../constants/oauth.js'
import type { SDKMessage } from '../entrypoints/agentSdkTypes.js'
import { logForDebugging } from '../utils/debug.js'
@@ -47,14 +47,18 @@ async function fetchPage(
params: Record<string, string | number | boolean>,
label: string,
): Promise<HistoryPage | null> {
const resp = await axios
.get<SessionEventsResponse>(ctx.baseUrl, {
const queryString = new URLSearchParams(
Object.entries(params).map(([k, v]) => [k, String(v)]),
).toString()
const url = queryString ? `${ctx.baseUrl}?${queryString}` : ctx.baseUrl
const resp = await nativeRequest<SessionEventsResponse>(
url,
{
method: 'GET',
headers: ctx.headers,
params,
timeout: 15000,
validateStatus: () => true,
})
.catch(() => null)
},
).catch(() => null)
if (!resp || resp.status !== 200) {
logForDebugging(`[${label}] HTTP ${resp?.status ?? 'error'}`)
return null

17
bin/claude.js Normal file
View File

@@ -0,0 +1,17 @@
#!/usr/bin/env bun
import { spawn } from "child_process";
import { fileURLToPath } from "url";
import { dirname, join } from "path";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const mainPath = join(__dirname, "../main.tsx");
const proc = spawn("bun", ["run", mainPath, ...process.argv.slice(2)], {
stdio: "inherit",
});
proc.on("exit", (code) => {
process.exit(code ?? 0);
});

33
biome.json Normal file
View File

@@ -0,0 +1,33 @@
{
"$schema": "https://biomejs.dev/schemas/1.7.3/schema.json",
"organizeImports": {
"enabled": true
},
"linter": {
"enabled": true,
"rules": {
"recommended": true,
"suspicious": {
"noConsoleLog": "off",
"noExplicitAny": "warn"
},
"style": {
"noUnusedTemplateLiteral": "off",
"useImportType": "off"
}
}
},
"formatter": {
"enabled": true,
"indentStyle": "space",
"indentWidth": 2,
"lineWidth": 80
},
"javascript": {
"formatter": {
"quoteStyle": "single",
"trailingComma": "all",
"semicolons": "always"
}
}
}

View File

@@ -1,4 +1,4 @@
import axios from 'axios'
import { nativeRequest } from '../utils/http.js'
import { debugBody, extractErrorDetail } from './debugUtils.js'
import {
@@ -148,38 +148,26 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
const response = await withOAuthRetry(
(token: string) =>
axios.post<{
nativeRequest<{
environment_id: string
environment_secret: string
}>(
`${deps.baseUrl}/v1/environments/bridge`,
{
method: 'POST',
body: {
machine_name: config.machineName,
directory: config.dir,
branch: config.branch,
git_repo_url: config.gitRepoUrl,
// Advertise session capacity so claude.ai/code can show
// "2/4 sessions" badges and only block the picker when
// actually at capacity. Backends that don't yet accept
// this field will silently ignore it.
max_sessions: config.maxSessions,
// worker_type lets claude.ai filter environments by origin
// (e.g. assistant picker only shows assistant-mode workers).
// Desktop cowork app sends "cowork"; we send a distinct value.
metadata: { worker_type: config.workerType },
// Idempotent re-registration: if we have a backend-issued
// environment_id from a prior session (--session-id resume),
// send it back so the backend reattaches instead of creating
// a new env. The backend may still hand back a fresh ID if
// the old one expired — callers must compare the response.
...(config.reuseEnvironmentId && {
environment_id: config.reuseEnvironmentId,
}),
},
{
headers: getHeaders(token),
timeout: 15_000,
validateStatus: status => status < 500,
},
),
'Registration',
@@ -209,17 +197,16 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
const prevEmptyPolls = consecutiveEmptyPolls
consecutiveEmptyPolls = 0
const response = await axios.get<WorkResponse | null>(
`${deps.baseUrl}/v1/environments/${environmentId}/work/poll`,
const pollUrl = reclaimOlderThanMs !== undefined
? `${deps.baseUrl}/v1/environments/${environmentId}/work/poll?reclaim_older_than_ms=${reclaimOlderThanMs}`
: `${deps.baseUrl}/v1/environments/${environmentId}/work/poll`
const response = await nativeRequest<WorkResponse | null>(
pollUrl,
{
method: 'GET',
headers: getHeaders(environmentSecret),
params:
reclaimOlderThanMs !== undefined
? { reclaim_older_than_ms: reclaimOlderThanMs }
: undefined,
timeout: 10_000,
signal,
validateStatus: status => status < 500,
timeout: 10_000,
},
)
@@ -256,13 +243,13 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
debug(`[bridge:api] POST .../work/${workId}/ack`)
const response = await axios.post(
const response = await nativeRequest(
`${deps.baseUrl}/v1/environments/${environmentId}/work/${workId}/ack`,
{},
{
method: 'POST',
body: {},
headers: getHeaders(sessionToken),
timeout: 10_000,
validateStatus: s => s < 500,
},
)
@@ -282,13 +269,13 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
const response = await withOAuthRetry(
(token: string) =>
axios.post(
nativeRequest(
`${deps.baseUrl}/v1/environments/${environmentId}/work/${workId}/stop`,
{ force },
{
method: 'POST',
body: { force },
headers: getHeaders(token),
timeout: 10_000,
validateStatus: s => s < 500,
},
),
'StopWork',
@@ -305,12 +292,12 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
const response = await withOAuthRetry(
(token: string) =>
axios.delete(
nativeRequest(
`${deps.baseUrl}/v1/environments/bridge/${environmentId}`,
{
method: 'DELETE',
headers: getHeaders(token),
timeout: 10_000,
validateStatus: s => s < 500,
},
),
'Deregister',
@@ -329,13 +316,13 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
const response = await withOAuthRetry(
(token: string) =>
axios.post(
nativeRequest(
`${deps.baseUrl}/v1/sessions/${sessionId}/archive`,
{},
{
method: 'POST',
body: {},
headers: getHeaders(token),
timeout: 10_000,
validateStatus: s => s < 500,
},
),
'ArchiveSession',
@@ -368,13 +355,13 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
const response = await withOAuthRetry(
(token: string) =>
axios.post(
nativeRequest(
`${deps.baseUrl}/v1/environments/${environmentId}/bridge/reconnect`,
{ session_id: sessionId },
{
method: 'POST',
body: { session_id: sessionId },
headers: getHeaders(token),
timeout: 10_000,
validateStatus: s => s < 500,
},
),
'ReconnectSession',
@@ -394,18 +381,18 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
debug(`[bridge:api] POST .../work/${workId}/heartbeat`)
const response = await axios.post<{
const response = await nativeRequest<{
lease_extended: boolean
state: string
last_heartbeat: string
ttl_seconds: number
}>(
`${deps.baseUrl}/v1/environments/${environmentId}/work/${workId}/heartbeat`,
{},
{
method: 'POST',
body: {},
headers: getHeaders(sessionToken),
timeout: 10_000,
validateStatus: s => s < 500,
},
)
@@ -427,13 +414,13 @@ export function createBridgeApiClient(deps: BridgeApiDeps): BridgeApiClient {
`[bridge:api] POST /v1/sessions/${sessionId}/events type=${event.type}`,
)
const response = await axios.post(
const response = await nativeRequest(
`${deps.baseUrl}/v1/sessions/${sessionId}/events`,
{ events: [event] },
{
method: 'POST',
body: { events: [event] },
headers: getHeaders(sessionToken),
timeout: 10_000,
validateStatus: s => s < 500,
},
)

View File

@@ -7,7 +7,7 @@
* accessToken + baseUrl — no implicit auth or config reads.
*/
import axios from 'axios'
import { isHttpError, nativeRequest } from '../utils/http.js'
import { logForDebugging } from '../utils/debug.js'
import { errorMessage } from '../utils/errors.js'
import { jsonStringify } from '../utils/slowOperations.js'
@@ -33,16 +33,13 @@ export async function createCodeSession(
const url = `${baseUrl}/v1/code/sessions`
let response
try {
response = await axios.post(
response = await nativeRequest(
url,
// bridge: {} is the positive signal for the oneof runner — omitting it
// (or sending environment_id: "") now 400s. BridgeRunner is an empty
// message today; it's a placeholder for future bridge-specific options.
{ title, bridge: {}, ...(tags?.length ? { tags } : {}) },
{
method: 'POST',
body: { title, bridge: {}, ...(tags?.length ? { tags } : {}) },
headers: oauthHeaders(accessToken),
timeout: timeoutMs,
validateStatus: s => s < 500,
},
)
} catch (err: unknown) {
@@ -104,13 +101,13 @@ export async function fetchRemoteCredentials(
}
let response
try {
response = await axios.post(
response = await nativeRequest(
url,
{},
{
method: 'POST',
body: {},
headers,
timeout: timeoutMs,
validateStatus: s => s < 500,
},
)
} catch (err: unknown) {

View File

@@ -1,4 +1,5 @@
import type { SDKMessage } from '../entrypoints/agentSdkTypes.js'
import { nativeRequest } from '../utils/http.js'
import { logForDebugging } from '../utils/debug.js'
import { errorMessage } from '../utils/errors.js'
import { extractErrorDetail } from './debugUtils.js'
@@ -59,7 +60,6 @@ export async function createBridgeSession({
const { parseGitHubRepository } = await import('../utils/detectRepository.js')
const { getDefaultBranch } = await import('../utils/git.js')
const { getMainLoopModel } = await import('../utils/model/model.js')
const { default: axios } = await import('axios')
const accessToken =
getAccessToken?.() ?? getClaudeAIOAuthTokens()?.accessToken
@@ -144,10 +144,11 @@ export async function createBridgeSession({
const url = `${baseUrlOverride ?? getOauthConfig().BASE_API_URL}/v1/sessions`
let response
try {
response = await axios.post(url, requestBody, {
response = await nativeRequest(url, {
method: 'POST',
body: requestBody,
headers,
signal,
validateStatus: s => s < 500,
})
} catch (err: unknown) {
logForDebugging(
@@ -195,7 +196,6 @@ export async function getBridgeSession(
const { getOrganizationUUID } = await import('../services/oauth/client.js')
const { getOauthConfig } = await import('../constants/oauth.js')
const { getOAuthHeaders } = await import('../utils/teleport/api.js')
const { default: axios } = await import('axios')
const accessToken =
opts?.getAccessToken?.() ?? getClaudeAIOAuthTokens()?.accessToken
@@ -221,9 +221,9 @@ export async function getBridgeSession(
let response
try {
response = await axios.get<{ environment_id?: string; title?: string }>(
response = await nativeRequest<{ environment_id?: string; title?: string }>(
url,
{ headers, timeout: 10_000, validateStatus: s => s < 500 },
{ headers, timeout: 10_000 },
)
} catch (err: unknown) {
logForDebugging(
@@ -272,7 +272,6 @@ export async function archiveBridgeSession(
const { getOrganizationUUID } = await import('../services/oauth/client.js')
const { getOauthConfig } = await import('../constants/oauth.js')
const { getOAuthHeaders } = await import('../utils/teleport/api.js')
const { default: axios } = await import('axios')
const accessToken =
opts?.getAccessToken?.() ?? getClaudeAIOAuthTokens()?.accessToken
@@ -296,13 +295,13 @@ export async function archiveBridgeSession(
const url = `${opts?.baseUrl ?? getOauthConfig().BASE_API_URL}/v1/sessions/${sessionId}/archive`
logForDebugging(`[bridge] Archiving session ${sessionId}`)
const response = await axios.post(
const response = await nativeRequest(
url,
{},
{
method: 'POST',
body: {},
headers,
timeout: opts?.timeoutMs ?? 10_000,
validateStatus: s => s < 500,
},
)
@@ -333,7 +332,6 @@ export async function updateBridgeSessionTitle(
const { getOrganizationUUID } = await import('../services/oauth/client.js')
const { getOauthConfig } = await import('../constants/oauth.js')
const { getOAuthHeaders } = await import('../utils/teleport/api.js')
const { default: axios } = await import('axios')
const accessToken =
opts?.getAccessToken?.() ?? getClaudeAIOAuthTokens()?.accessToken
@@ -362,10 +360,14 @@ export async function updateBridgeSessionTitle(
logForDebugging(`[bridge] Updating session title: ${compatId}${title}`)
try {
const response = await axios.patch(
const response = await nativeRequest(
url,
{ title },
{ headers, timeout: 10_000, validateStatus: s => s < 500 },
{
method: 'PATCH',
body: { title },
headers,
timeout: 10_000,
},
)
if (response.status === 200) {

View File

@@ -11,7 +11,7 @@
*/
import type { ContentBlockParam } from '@anthropic-ai/sdk/resources/messages.mjs'
import axios from 'axios'
import { isHttpError, nativeRequest } from '../utils/http.js'
import { randomUUID } from 'crypto'
import { mkdir, writeFile } from 'fs/promises'
import { basename, join } from 'path'
@@ -79,11 +79,10 @@ async function resolveOne(att: InboundAttachment): Promise<string | undefined> {
// FedStart URL degrades to "no @path" instead of crashing print.ts's
// reader loop (which has no catch around the await).
const url = `${getBridgeBaseUrl()}/api/oauth/files/${encodeURIComponent(att.file_uuid)}/content`
const response = await axios.get(url, {
const response = await nativeRequest<ArrayBuffer>(url, {
headers: { Authorization: `Bearer ${token}` },
responseType: 'arraybuffer',
timeout: DOWNLOAD_TIMEOUT_MS,
validateStatus: () => true,
})
if (response.status !== 200) {
debug(`fetch ${att.file_uuid} failed: status=${response.status}`)

View File

@@ -29,7 +29,7 @@
*/
import { feature } from 'bun:bundle'
import axios from 'axios'
import { isHttpError, nativeRequest } from '../utils/http.js'
import {
createV2ReplTransport,
type ReplBridgeTransport,
@@ -981,17 +981,17 @@ async function archiveSession(
// cse_* and we correctly send it.
const compatId = toCompatSessionId(sessionId)
try {
const response = await axios.post(
const response = await nativeRequest(
`${baseUrl}/v1/sessions/${compatId}/archive`,
{},
{
method: 'POST',
body: {},
headers: {
...oauthHeaders(accessToken),
'anthropic-beta': 'ccr-byoc-2025-07-29',
'x-organization-uuid': orgUUID,
},
timeout: timeoutMs,
validateStatus: () => true,
},
)
logForDebugging(
@@ -1001,7 +1001,7 @@ async function archiveSession(
} catch (err) {
const msg = errorMessage(err)
logForDebugging(`[remote-bridge] Archive failed: ${msg}`)
return axios.isAxiosError(err) && err.code === 'ECONNABORTED'
return isHttpError(err) && err.code === 'ECONNABORTED'
? 'timeout'
: 'error'
}

View File

@@ -1,4 +1,4 @@
import axios from 'axios'
import { nativeRequest } from '../utils/http.js'
import memoize from 'lodash-es/memoize.js'
import { hostname } from 'os'
import { getOauthConfig } from '../constants/oauth.js'
@@ -142,19 +142,19 @@ export async function enrollTrustedDevice(): Promise<void> {
const baseUrl = getOauthConfig().BASE_API_URL
let response
try {
response = await axios.post<{
response = await nativeRequest<{
device_token?: string
device_id?: string
}>(
`${baseUrl}/api/auth/trusted_devices`,
{ display_name: `Claude Code on ${hostname()} · ${process.platform}` },
{
method: 'POST',
body: { display_name: `Claude Code on ${hostname()} · ${process.platform}` },
headers: {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',
},
timeout: 10_000,
validateStatus: s => s < 500,
},
)
} catch (err: unknown) {

View File

@@ -1,4 +1,4 @@
import axios from 'axios'
import { nativeRequest } from '../utils/http.js'
import { jsonParse, jsonStringify } from '../utils/slowOperations.js'
import type { WorkSecret } from './types.js'
@@ -98,10 +98,11 @@ export async function registerWorker(
sessionUrl: string,
accessToken: string,
): Promise<number> {
const response = await axios.post(
const response = await nativeRequest(
`${sessionUrl}/worker/register`,
{},
{
method: 'POST',
body: {},
headers: {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',

1369
bun.lock Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
import axios, { type AxiosError } from 'axios'
import { isHttpError, nativeRequest } from '../../utils/http.js'
import type { StdoutMessage } from 'src/entrypoints/sdk/controlTypes.js'
import { logForDebugging } from '../../utils/debug.js'
import { logForDiagnosticsNoPII } from '../../utils/diagLogs.js'
@@ -212,20 +212,17 @@ export class HybridTransport extends WebSocketTransport {
'Content-Type': 'application/json',
}
let response
let response: { status: number; data: unknown }
try {
response = await axios.post(
this.postUrl,
{ events },
{
response = await nativeRequest(this.postUrl, {
method: 'POST',
headers,
validateStatus: () => true,
body: { events },
timeout: POST_TIMEOUT_MS,
},
)
responseType: 'json',
})
} catch (error) {
const axiosError = error as AxiosError
logForDebugging(`HybridTransport: POST error: ${axiosError.message}`)
logForDebugging(`HybridTransport: POST error: ${error instanceof Error ? error.message : 'unknown'}`)
logForDiagnosticsNoPII('warn', 'cli_hybrid_post_network_error')
throw error
}

View File

@@ -1,4 +1,4 @@
import axios, { type AxiosError } from 'axios'
import { isHttpError, nativeRequest } from '../../utils/http.js'
import type { StdoutMessage } from 'src/entrypoints/sdk/controlTypes.js'
import { logForDebugging } from '../../utils/debug.js'
import { logForDiagnosticsNoPII } from '../../utils/diagLogs.js'
@@ -590,9 +590,11 @@ export class SSETransport implements Transport {
for (let attempt = 1; attempt <= POST_MAX_RETRIES; attempt++) {
try {
const response = await axios.post(this.postUrl, message, {
const response = await nativeRequest(this.postUrl, {
method: 'POST',
headers,
validateStatus: alwaysValidStatus,
body: message,
responseType: 'json',
})
if (response.status === 200 || response.status === 201) {
@@ -603,7 +605,6 @@ export class SSETransport implements Transport {
logForDebugging(
`SSETransport: POST ${response.status} body=${jsonStringify(response.data).slice(0, 200)}`,
)
// 4xx errors (except 429) are permanent - don't retry
if (
response.status >= 400 &&
response.status < 500 &&
@@ -618,7 +619,6 @@ export class SSETransport implements Transport {
return
}
// 429 or 5xx - retry
logForDebugging(
`SSETransport: POST returned ${response.status}, attempt ${attempt}/${POST_MAX_RETRIES}`,
)
@@ -627,9 +627,8 @@ export class SSETransport implements Transport {
attempt,
})
} catch (error) {
const axiosError = error as AxiosError
logForDebugging(
`SSETransport: POST error: ${axiosError.message}, attempt ${attempt}/${POST_MAX_RETRIES}`,
`SSETransport: POST error: ${error instanceof Error ? error.message : 'unknown'}, attempt ${attempt}/${POST_MAX_RETRIES}`,
)
logForDiagnosticsNoPII('warn', 'cli_sse_post_network_error', {
attempt,

View File

@@ -1,4 +1,5 @@
import chalk from 'chalk'
import { VERSION, PACKAGE_URL } from 'src/constants/product.js'
import { logEvent } from 'src/services/analytics/index.js'
import {
getLatestVersion,
@@ -29,7 +30,7 @@ import { getInitialSettings } from 'src/utils/settings/settings.js'
export async function update() {
logEvent('tengu_update_check', {})
writeToStdout(`Current version: ${MACRO.VERSION}\n`)
writeToStdout(`Current version: ${VERSION}\n`)
const channel = getInitialSettings()?.autoUpdatesChannel ?? 'latest'
writeToStdout(`Checking for updates to ${channel} version...\n`)
@@ -122,8 +123,8 @@ export async function update() {
if (packageManager === 'homebrew') {
writeToStdout('Claude is managed by Homebrew.\n')
const latest = await getLatestVersion(channel)
if (latest && !gte(MACRO.VERSION, latest)) {
writeToStdout(`Update available: ${MACRO.VERSION}${latest}\n`)
if (latest && !gte(VERSION, latest)) {
writeToStdout(`Update available: ${VERSION}${latest}\n`)
writeToStdout('\n')
writeToStdout('To update, run:\n')
writeToStdout(chalk.bold(' brew upgrade claude-code') + '\n')
@@ -133,8 +134,8 @@ export async function update() {
} else if (packageManager === 'winget') {
writeToStdout('Claude is managed by winget.\n')
const latest = await getLatestVersion(channel)
if (latest && !gte(MACRO.VERSION, latest)) {
writeToStdout(`Update available: ${MACRO.VERSION}${latest}\n`)
if (latest && !gte(VERSION, latest)) {
writeToStdout(`Update available: ${VERSION}${latest}\n`)
writeToStdout('\n')
writeToStdout('To update, run:\n')
writeToStdout(
@@ -146,8 +147,8 @@ export async function update() {
} else if (packageManager === 'apk') {
writeToStdout('Claude is managed by apk.\n')
const latest = await getLatestVersion(channel)
if (latest && !gte(MACRO.VERSION, latest)) {
writeToStdout(`Update available: ${MACRO.VERSION}${latest}\n`)
if (latest && !gte(VERSION, latest)) {
writeToStdout(`Update available: ${VERSION}${latest}\n`)
writeToStdout('\n')
writeToStdout('To update, run:\n')
writeToStdout(chalk.bold(' apk upgrade claude-code') + '\n')
@@ -236,14 +237,14 @@ export async function update() {
await gracefulShutdown(1)
}
if (result.latestVersion === MACRO.VERSION) {
if (result.latestVersion === VERSION) {
writeToStdout(
chalk.green(`Claude Code is up to date (${MACRO.VERSION})`) + '\n',
chalk.green(`Claude Code is up to date (${VERSION})`) + '\n',
)
} else {
writeToStdout(
chalk.green(
`Successfully updated from ${MACRO.VERSION} to version ${result.latestVersion}`,
`Successfully updated from ${VERSION} to version ${result.latestVersion}`,
) + '\n',
)
await regenerateCompletionCache()
@@ -265,9 +266,9 @@ export async function update() {
}
logForDebugging('update: Checking npm registry for latest version')
logForDebugging(`update: Package URL: ${MACRO.PACKAGE_URL}`)
logForDebugging(`update: Package URL: ${PACKAGE_URL}`)
const npmTag = channel === 'stable' ? 'stable' : 'latest'
const npmCommand = `npm view ${MACRO.PACKAGE_URL}@${npmTag} version`
const npmCommand = `npm view ${PACKAGE_URL}@${npmTag} version`
logForDebugging(`update: Running: ${npmCommand}`)
const latestVersion = await getLatestVersion(channel)
logForDebugging(
@@ -283,7 +284,7 @@ export async function update() {
process.stderr.write(' • Network connectivity issues\n')
process.stderr.write(' • npm registry is unreachable\n')
process.stderr.write(' • Corporate proxy/firewall blocking npm\n')
if (MACRO.PACKAGE_URL && !MACRO.PACKAGE_URL.startsWith('@anthropic')) {
if (PACKAGE_URL && !PACKAGE_URL.startsWith('@anthropic')) {
process.stderr.write(
' • Internal/development build not published to npm\n',
)
@@ -293,7 +294,7 @@ export async function update() {
process.stderr.write(' • Check your internet connection\n')
process.stderr.write(' • Run with --debug flag for more details\n')
const packageName =
MACRO.PACKAGE_URL ||
PACKAGE_URL ||
(process.env.USER_TYPE === 'ant'
? '@anthropic-ai/claude-cli'
: '@anthropic-ai/claude-code')
@@ -306,15 +307,15 @@ export async function update() {
}
// Check if versions match exactly, including any build metadata (like SHA)
if (latestVersion === MACRO.VERSION) {
if (latestVersion === VERSION) {
writeToStdout(
chalk.green(`Claude Code is up to date (${MACRO.VERSION})`) + '\n',
chalk.green(`Claude Code is up to date (${VERSION})`) + '\n',
)
await gracefulShutdown(0)
}
writeToStdout(
`New version available: ${latestVersion} (current: ${MACRO.VERSION})\n`,
`New version available: ${latestVersion} (current: ${VERSION})\n`,
)
writeToStdout('Installing update...\n')
@@ -374,7 +375,7 @@ export async function update() {
case 'success':
writeToStdout(
chalk.green(
`Successfully updated from ${MACRO.VERSION} to version ${latestVersion}`,
`Successfully updated from ${VERSION} to version ${latestVersion}`,
) + '\n',
)
await regenerateCompletionCache()
@@ -386,7 +387,7 @@ export async function update() {
if (useLocalUpdate) {
process.stderr.write('Try manually updating with:\n')
process.stderr.write(
` cd ~/.claude/local && npm update ${MACRO.PACKAGE_URL}\n`,
` cd ~/.claude/local && npm update ${PACKAGE_URL}\n`,
)
} else {
process.stderr.write('Try running with sudo or fix npm permissions\n')
@@ -401,7 +402,7 @@ export async function update() {
if (useLocalUpdate) {
process.stderr.write('Try manually updating with:\n')
process.stderr.write(
` cd ~/.claude/local && npm update ${MACRO.PACKAGE_URL}\n`,
` cd ~/.claude/local && npm update ${PACKAGE_URL}\n`,
)
} else {
process.stderr.write(

View File

@@ -14,6 +14,7 @@ import {
import { tmpdir } from 'os'
import { extname, join } from 'path'
import type { Command } from '../commands.js'
import { VERSION } from '../constants/product.js'
import { queryWithModel } from '../services/api/claude.js'
import {
AGENT_TOOL_NAME,
@@ -2682,7 +2683,7 @@ export function buildExportData(
facets: Map<string, SessionFacets>,
remoteStats?: { hosts: RemoteHostInfo[]; totalCopied: number },
): InsightsExport {
const version = typeof MACRO !== 'undefined' ? MACRO.VERSION : 'unknown'
const version = VERSION
const remote_hosts_collected = remoteStats?.hosts
.filter(h => h.sessionCount > 0)

View File

@@ -1,4 +1,4 @@
import axios from 'axios'
import { isHttpError, nativeRequest } from '../../utils/http.js'
import { getOauthConfig } from '../../constants/oauth.js'
import { logForDebugging } from '../../utils/debug.js'
import { getOAuthHeaders, prepareApiRequest } from '../../utils/teleport/api.js'
@@ -69,32 +69,30 @@ export async function importGithubToken(
}
try {
const response = await axios.post<ImportTokenResult>(
url,
{ token: token.reveal() },
{ headers, timeout: 15000, validateStatus: () => true },
)
if (response.status === 200) {
const response = await nativeRequest<ImportTokenResult>(url, {
method: 'POST',
headers,
body: { token: token.reveal() },
timeout: 15000,
responseType: 'json',
})
return { ok: true, result: response.data }
}
if (response.status === 400) {
} catch (err) {
if (isHttpError(err) && err.status === 400) {
return { ok: false, error: { kind: 'invalid_token' } }
}
if (response.status === 401) {
if (isHttpError(err) && err.status === 401) {
return { ok: false, error: { kind: 'not_signed_in' } }
}
logForDebugging(`import-token returned ${response.status}`, {
level: 'error',
})
return { ok: false, error: { kind: 'server', status: response.status } }
} catch (err) {
if (axios.isAxiosError(err)) {
// err.config.data would contain the POST body with the raw token.
// Do not include it in any log. The error code alone is enough.
logForDebugging(`import-token network error: ${err.code ?? 'unknown'}`, {
if (isHttpError(err) && err.status >= 400 && err.status < 500) {
logForDebugging(`import-token returned ${err.status}`, {
level: 'error',
})
return { ok: false, error: { kind: 'server', status: err.status } }
}
logForDebugging(`import-token network error: ${err instanceof Error ? err.message : 'unknown'}`, {
level: 'error',
})
return { ok: false, error: { kind: 'network' } }
}
}
@@ -138,9 +136,10 @@ export async function createDefaultEnvironment(): Promise<boolean> {
}
try {
const response = await axios.post(
url,
{
const response = await nativeRequest(url, {
method: 'POST',
headers,
body: {
name: 'Default',
kind: 'anthropic_cloud',
description: 'Default - trusted network access',
@@ -159,8 +158,9 @@ export async function createDefaultEnvironment(): Promise<boolean> {
},
},
},
{ headers, timeout: 15000, validateStatus: () => true },
)
timeout: 15000,
responseType: 'json',
})
return response.status >= 200 && response.status < 300
} catch {
return false

View File

@@ -1,11 +1,10 @@
import type { Command, LocalCommandCall } from '../types/command.js'
import { VERSION, BUILD_TIME } from '../constants/product.js'
const call: LocalCommandCall = async () => {
return {
type: 'text',
value: MACRO.BUILD_TIME
? `${MACRO.VERSION} (built ${MACRO.BUILD_TIME})`
: MACRO.VERSION,
value: BUILD_TIME ? `${VERSION} (built ${BUILD_TIME})` : VERSION,
}
}

View File

@@ -12,6 +12,7 @@ import { installOrUpdateClaudePackage, localInstallationExists } from '../utils/
import { removeInstalledSymlink } from '../utils/nativeInstaller/index.js';
import { gt, gte } from '../utils/semver.js';
import { getInitialSettings } from '../utils/settings/settings.js';
import { VERSION, PACKAGE_URL } from '../constants/product.js';
type Props = {
isUpdating: boolean;
onChangeIsUpdating: (isUpdating: boolean) => void;
@@ -53,7 +54,7 @@ export function AutoUpdater({
logForDebugging('AutoUpdater: Skipping update check in test/dev environment');
return;
}
const currentVersion = MACRO.VERSION;
const currentVersion = VERSION;
const channel = getInitialSettings()?.autoUpdatesChannel ?? 'latest';
let latestVersion = await getLatestVersion(channel);
const isDisabled = isAutoUpdaterDisabled();
@@ -190,7 +191,7 @@ export function AutoUpdater({
{(autoUpdaterResult?.status === 'install_failed' || autoUpdaterResult?.status === 'no_permissions') && <Text color="error" wrap="truncate">
Auto-update failed &middot; Try <Text bold>claude doctor</Text> or{' '}
<Text bold>
{hasLocalInstall ? `cd ~/.claude/local && npm update ${MACRO.PACKAGE_URL}` : `npm i -g ${MACRO.PACKAGE_URL}`}
{hasLocalInstall ? `cd ~/.claude/local && npm update ${PACKAGE_URL}` : `npm i -g ${PACKAGE_URL}`}
</Text>
</Text>}
</Box>;

View File

@@ -1,4 +1,4 @@
import axios from 'axios';
import { isHttpError, nativeRequest } from '../utils/http.js';
import { readFile, stat } from 'fs/promises';
import * as React from 'react';
import { useCallback, useEffect, useState } from 'react';
@@ -28,6 +28,7 @@ import { ConfigurableShortcutHint } from './ConfigurableShortcutHint.js';
import { Byline } from './design-system/Byline.js';
import { Dialog } from './design-system/Dialog.js';
import { KeyboardShortcutHint } from './design-system/KeyboardShortcutHint.js';
import { VERSION } from 'src/constants/product.js';
import TextInput from './TextInput.js';
// This value was determined experimentally by testing the URL length limit
@@ -211,7 +212,7 @@ export function Feedback({
platform: env.platform,
gitRepo: envInfo.isGit,
terminal: env.terminal,
version: MACRO.VERSION,
version: VERSION,
transcript: normalizeMessagesForAPI(messages),
errors: sanitizedErrors,
lastApiRequest: getLastAPIRequest(),
@@ -343,7 +344,7 @@ export function Feedback({
<Text>
- Environment info:{' '}
<Text dimColor>
{env.platform}, {env.terminal}, v{MACRO.VERSION}
{env.platform}, {env.terminal}, v{VERSION}
</Text>
</Text>
{envInfo.gitState && <Text>
@@ -396,7 +397,7 @@ export function createGitHubIssueUrl(feedbackId: string, title: string, descript
}>): string {
const sanitizedTitle = redactSensitiveInfo(title);
const sanitizedDescription = redactSensitiveInfo(description);
const bodyPrefix = `**Bug Description**\n${sanitizedDescription}\n\n` + `**Environment Info**\n` + `- Platform: ${env.platform}\n` + `- Terminal: ${env.terminal}\n` + `- Version: ${MACRO.VERSION || 'unknown'}\n` + `- Feedback ID: ${feedbackId}\n` + `\n**Errors**\n\`\`\`json\n`;
const bodyPrefix = `**Bug Description**\n${sanitizedDescription}\n\n` + `**Environment Info**\n` + `- Platform: ${env.platform}\n` + `- Terminal: ${env.terminal}\n` + `- Version: ${VERSION || 'unknown'}\n` + `- Feedback ID: ${feedbackId}\n` + `\n**Errors**\n\`\`\`json\n`;
const errorSuffix = `\n\`\`\`\n`;
const errorsJson = jsonStringify(errors);
const baseUrl = `${GITHUB_ISSUES_REPO_URL}/new?title=${encodeURIComponent(sanitizedTitle)}&labels=user-reported,bug&body=`;
@@ -540,12 +541,13 @@ async function submitFeedback(data: FeedbackData, signal?: AbortSignal): Promise
'User-Agent': getUserAgent(),
...authResult.headers
};
const response = await axios.post('https://api.anthropic.com/api/claude_cli_feedback', {
content: jsonStringify(data)
}, {
const response = await nativeRequest('https://api.anthropic.com/api/claude_cli_feedback', {
method: 'POST',
headers,
body: {
content: jsonStringify(data)
},
timeout: 30000,
// 30 second timeout to prevent hanging
signal
});
if (response.status === 200) {
@@ -566,14 +568,13 @@ async function submitFeedback(data: FeedbackData, signal?: AbortSignal): Promise
success: false
};
} catch (err) {
// Handle cancellation/abort - don't log as error
if (axios.isCancel(err)) {
if (err instanceof Error && err.name === 'AbortError') {
return {
success: false
};
}
if (axios.isAxiosError(err) && err.response?.status === 403) {
const errorData = err.response.data;
if (isHttpError(err) && err.status === 403) {
const errorData = err.data;
if (errorData?.error?.type === 'permission_error' && errorData?.error?.message?.includes('Custom data retention settings')) {
sanitizeAndLogError(new Error('Cannot submit feedback because custom data retention settings are enabled'));
return {

View File

@@ -1,21 +1,12 @@
import axios from 'axios'
import { readFile, stat } from 'fs/promises'
import type { Message } from '../../types/message.js'
import { checkAndRefreshOAuthTokenIfNeeded } from '../../utils/auth.js'
import { logForDebugging } from '../../utils/debug.js'
import { errorMessage } from '../../utils/errors.js'
import { getAuthHeaders, getUserAgent } from '../../utils/http.js'
import { normalizeMessagesForAPI } from '../../utils/messages.js'
import {
extractAgentIdsFromMessages,
getTranscriptPath,
loadSubagentTranscripts,
MAX_TRANSCRIPT_READ_BYTES,
} from '../../utils/sessionStorage.js'
import { jsonStringify } from '../../utils/slowOperations.js'
import { redactSensitiveInfo } from '../Feedback.js'
/**
* Transcript Share Service - Stubbed
*
* This service is stubbed to ensure no session transcripts or user
* identification data is sent to external services, even during
* feedback surveys.
*/
type TranscriptShareResult = {
export type TranscriptShareResult = {
success: boolean
transcriptId?: string
}
@@ -27,86 +18,11 @@ export type TranscriptShareTrigger =
| 'memory_survey'
export async function submitTranscriptShare(
messages: Message[],
trigger: TranscriptShareTrigger,
appearanceId: string,
_messages: any[],
_trigger: TranscriptShareTrigger,
_appearanceId: string,
): Promise<TranscriptShareResult> {
try {
logForDebugging('Collecting transcript for sharing', { level: 'info' })
const transcript = normalizeMessagesForAPI(messages)
// Collect subagent transcripts
const agentIds = extractAgentIdsFromMessages(messages)
const subagentTranscripts = await loadSubagentTranscripts(agentIds)
// Read raw JSONL transcript (with size guard to prevent OOM)
let rawTranscriptJsonl: string | undefined
try {
const transcriptPath = getTranscriptPath()
const { size } = await stat(transcriptPath)
if (size <= MAX_TRANSCRIPT_READ_BYTES) {
rawTranscriptJsonl = await readFile(transcriptPath, 'utf-8')
} else {
logForDebugging(
`Skipping raw transcript read: file too large (${size} bytes)`,
{ level: 'warn' },
)
}
} catch {
// File may not exist
}
const data = {
trigger,
version: MACRO.VERSION,
platform: process.platform,
transcript,
subagentTranscripts:
Object.keys(subagentTranscripts).length > 0
? subagentTranscripts
: undefined,
rawTranscriptJsonl,
}
const content = redactSensitiveInfo(jsonStringify(data))
await checkAndRefreshOAuthTokenIfNeeded()
const authResult = getAuthHeaders()
if (authResult.error) {
return { success: false }
}
const headers: Record<string, string> = {
'Content-Type': 'application/json',
'User-Agent': getUserAgent(),
...authResult.headers,
}
const response = await axios.post(
'https://api.anthropic.com/api/claude_code_shared_session_transcripts',
{ content, appearance_id: appearanceId },
{
headers,
timeout: 30000,
},
)
if (response.status === 200 || response.status === 201) {
const result = response.data
logForDebugging('Transcript shared successfully', { level: 'info' })
return {
success: true,
transcriptId: result?.transcript_id,
}
}
return { success: false }
} catch (err) {
logForDebugging(errorMessage(err), {
level: 'error',
})
return { success: false }
}
// Always return failure to prevent sharing data.
// This effectively disables the feature without crashing the UI.
return { success: false };
}

View File

@@ -26,6 +26,7 @@ import { EmergencyTip } from './EmergencyTip.js';
import { VoiceModeNotice } from './VoiceModeNotice.js';
import { Opus1mMergeNotice } from './Opus1mMergeNotice.js';
import { feature } from 'bun:bundle';
import { VERSION } from '../../constants/product.js';
// Conditional require so ChannelsNotice.tsx tree-shakes when both flags are
// false. A module-scope helper component inside a feature() ternary does NOT
@@ -92,7 +93,7 @@ export function LogoV2() {
if ($[2] === Symbol.for("react.memo_cache_sentinel")) {
t2 = () => {
const currentConfig = getGlobalConfig();
if (currentConfig.lastReleaseNotesSeen === MACRO.VERSION) {
if (currentConfig.lastReleaseNotesSeen === VERSION) {
return;
}
saveGlobalConfig(_temp3);
@@ -526,12 +527,12 @@ export function LogoV2() {
return t41;
}
function _temp3(current) {
if (current.lastReleaseNotesSeen === MACRO.VERSION) {
if (current.lastReleaseNotesSeen === VERSION) {
return current;
}
return {
...current,
lastReleaseNotesSeen: MACRO.VERSION
lastReleaseNotesSeen: VERSION
};
}
function _temp2(s_0) {

View File

@@ -2,6 +2,7 @@ import { c as _c } from "react/compiler-runtime";
import React from 'react';
import { Box, Text, useTheme } from 'src/ink.js';
import { env } from '../../utils/env.js';
import { VERSION } from '../../constants/product.js';
const WELCOME_V2_WIDTH = 58;
export function WelcomeV2() {
const $ = _c(35);
@@ -28,7 +29,7 @@ export function WelcomeV2() {
let t7;
let t8;
if ($[2] === Symbol.for("react.memo_cache_sentinel")) {
t0 = <Text><Text color="claude">{"Welcome to Claude Code"} </Text><Text dimColor={true}>v{MACRO.VERSION} </Text></Text>;
t0 = <Text><Text color="claude">{"Welcome to Claude Code"} </Text><Text dimColor={true}>v{VERSION} </Text></Text>;
t1 = <Text>{"\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026"}</Text>;
t2 = <Text>{" "}</Text>;
t3 = <Text>{" "}</Text>;
@@ -113,7 +114,7 @@ export function WelcomeV2() {
let t5;
let t6;
if ($[18] === Symbol.for("react.memo_cache_sentinel")) {
t0 = <Text><Text color="claude">{"Welcome to Claude Code"} </Text><Text dimColor={true}>v{MACRO.VERSION} </Text></Text>;
t0 = <Text><Text color="claude">{"Welcome to Claude Code"} </Text><Text dimColor={true}>v{VERSION} </Text></Text>;
t1 = <Text>{"\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026\u2026"}</Text>;
t2 = <Text>{" "}</Text>;
t3 = <Text>{" * \u2588\u2588\u2588\u2588\u2588\u2593\u2593\u2591 "}</Text>;
@@ -218,7 +219,7 @@ function AppleTerminalWelcomeV2(t0) {
}
let t2;
if ($[2] === Symbol.for("react.memo_cache_sentinel")) {
t2 = <Text dimColor={true}>v{MACRO.VERSION} </Text>;
t2 = <Text dimColor={true}>v{VERSION} </Text>;
$[2] = t2;
} else {
t2 = $[2];
@@ -329,7 +330,7 @@ function AppleTerminalWelcomeV2(t0) {
}
let t2;
if ($[24] === Symbol.for("react.memo_cache_sentinel")) {
t2 = <Text dimColor={true}>v{MACRO.VERSION} </Text>;
t2 = <Text dimColor={true}>v{VERSION} </Text>;
$[24] = t2;
} else {
t2 = $[24];

View File

@@ -12,6 +12,7 @@ import { isAutoUpdaterDisabled } from '../utils/config.js';
import { installLatest } from '../utils/nativeInstaller/index.js';
import { gt } from '../utils/semver.js';
import { getInitialSettings } from '../utils/settings/settings.js';
import { VERSION } from '../constants/product.js';
/**
* Categorize error messages for analytics
@@ -89,12 +90,12 @@ export function NativeAutoUpdater({
try {
// Check if current version is above the max allowed version
const maxVersion = await getMaxVersion();
if (maxVersion && gt(MACRO.VERSION, maxVersion)) {
if (maxVersion && gt(VERSION, maxVersion)) {
const msg = await getMaxVersionMessage();
setMaxVersionIssue(msg ?? 'affects your version');
}
const result = await installLatest(channel);
const currentVersion = MACRO.VERSION;
const currentVersion = VERSION;
const latencyMs = Date.now() - startTime;
// Handle lock contention gracefully - just return without treating as error

View File

@@ -1,4 +1,8 @@
export const PRODUCT_URL = 'https://claude.com/claude-code'
export const VERSION = '0.1.0-alpha'
export const BUILD_TIME = '2026-04-02T10:12:00Z' // Hardcoded for privacy-focused build
export const FEEDBACK_CHANNEL = '#claude-code-feedback'
export const PACKAGE_URL = '@anthropic-ai/claude-code'
// Claude Code Remote session URLs
export const CLAUDE_AI_BASE_URL = 'https://claude.ai'

125
docs/AUTH_GUIDE.md Normal file
View File

@@ -0,0 +1,125 @@
# Authentication Guide - Claude Code
This guide provides an overview of the various authentication methods supported by the Claude Code CLI, along with configuration steps and troubleshooting tips.
---
## 1st Party Anthropic Authentication
Claude Code primarily connects directly to the Anthropic API. There are three main ways to authenticate:
### Direct API Key
The most common method for individual developers.
- **Environment Variable**: `ANTHROPIC_API_KEY`
- **Setup**: Export your key in your shell profile (e.g., `.zshrc` or `.bashrc`).
```bash
export ANTHROPIC_API_KEY='sk-ant-api03-...'
```
- **Security Note**: This method is prioritized in CI and non-interactive environments.
### Claude.ai OAuth (Subscriber Mode)
If you have a Claude Pro or Team subscription, you can log in using your Claude.ai account.
- **Command**: Run `/login` in the CLI.
- **How it works**: This opens a browser for OAuth authentication. Once completed, your session is managed via a local secure token.
- **Internal Users**: Internal Anthropic employees use a specialized version of this flow.
### External Key Helpers
For teams using a secret manager (like 1Password CLI or AWS Secrets Manager), you can use a helper script.
- **Setting**: `apiKeyHelper` in your `~/.claude/settings.json`.
- **Example**:
```json
{ "apiKeyHelper": "op read 'op://private/Anthropic/api-key'" }
```
- **Behavior**: The CLI will execute this command to retrieve the key on startup.
---
## Security & Workspace Trust
Claude Code implements a "Trust Dialog" to protect you from malicious repository settings.
### Custom Scripts
Settings that execute arbitrary code (like `apiKeyHelper`, `awsAuthRefresh`, or `awsCredentialExport`) are subject to the following rules:
- **Global Settings**: Always trusted (stored in `~/.claude/settings.json`).
- **Project Settings**: Only executed if you have explicitly "trusted" the workspace.
- **Dialog**: If a project-local script is detected, Claude Code will prompt you for approval before execution.
> [!WARNING]
> Never trust a workspace from an untrusted source, as it could use these helpers to exfiltrate your API keys or run malicious commands on your behalf.
---
## 3rd Party Cloud Providers
Claude Code supports using models hosted on major cloud platforms. To use these, you must enable the specific provider via environment variables.
### AWS Bedrock
- **Enable**: Set `CLAUDE_CODE_USE_BEDROCK=true`.
- **Authentication**: Uses standard AWS SDK credentials (IAM Roles, `~/.aws/credentials`, or `AWS_ACCESS_KEY_ID`).
- **Region**: Defaults to `us-east-1`. Override with `AWS_REGION`.
- **Custom Auth**: Supports `awsAuthRefresh` and `awsCredentialExport` settings for specialized SSO flows.
### GCP Vertex AI
- **Enable**: Set `CLAUDE_CODE_USE_VERTEX=true`.
- **Authentication**: Uses Application Default Credentials (ADC) via `google-auth-library`.
- **Configuration**:
- `ANTHROPIC_VERTEX_PROJECT_ID`: (Required) Your GCP project ID.
- `CLOUD_ML_REGION`: (Optional) Your GCP region.
- **Auth Refresh**: Supports `refreshGcpCredentialsIfNeeded` logic for long-running sessions.
### Azure Foundry
- **Enable**: Set `CLAUDE_CODE_USE_FOUNDRY=true`.
- **Authentication**:
- Uses `ANTHROPIC_FOUNDRY_API_KEY` if provided.
- Otherwise, falls back to `DefaultAzureCredential` (Azure AD).
- **Endpoint**: Configure via `ANTHROPIC_FOUNDRY_RESOURCE` or `ANTHROPIC_FOUNDRY_BASE_URL`.
---
## Environment Variable Reference
| Variable | Method | Description |
| :--- | :--- | :--- |
| `ANTHROPIC_API_KEY` | Direct | Your Anthropic API Key. |
| `ANTHROPIC_AUTH_TOKEN` | Direct | Use for bearer-token-based authentication. |
| `ANTHROPIC_CUSTOM_HEADERS` | All | A newline-separated list of `Name: Value` headers. |
| `API_TIMEOUT_MS` | All | Custom timeout for API requests (default: 600000ms). |
| `CLAUDE_CODE_ADDITIONAL_PROTECTION` | All | Sets `x-anthropic-additional-protection: true`. |
| `CLAUDE_CODE_USE_BEDROCK` | Bedrock | Enables the AWS Bedrock provider. |
| `CLAUDE_CODE_USE_VERTEX` | Vertex | Enables the GCP Vertex AI provider. |
| `CLAUDE_CODE_USE_FOUNDRY` | Foundry | Enables the Azure Foundry provider. |
| `CLAUDE_CODE_SKIP_*_AUTH` | 3P | Bypasses local SDK auth for proxy/testing scenarios. |
---
## Advanced Configuration & Priority
When multiple authentication methods are available, Claude Code follows this priority:
1. **Managed Context**: CCR or Claude Desktop sessions always force OAuth to ensure session isolation. These sessions ignore local API keys and settings to prevent credential leakage.
2. **Environment Variables**: `ANTHROPIC_API_KEY` or `ANTHROPIC_AUTH_TOKEN` (unless in "Homespace").
3. **Key Helper**: The `apiKeyHelper` script if defined in settings.
4. **Local Store**: Credentials saved from a prior `/login` or `~/.claude/settings.json`.
> [!NOTE]
> Using the `--bare` flag forces the CLI into a hermetic mode that only respects `ANTHROPIC_API_KEY` and explicitly passed settings, ignoring the local keychain and OAuth tokens.
---
## Troubleshooting
### Common Errors
- **401 Unauthorized**: Typically indicates an expired API key or OAuth session.
- **403 Forbidden**: Your account may not have access to the requested model or feature.
- **AWS/GCP Auth Timeouts**: Often caused by the metadata server check. Ensure your credentials are fresh or set the project/region variables explicitly.
### Clearing Caches
If you encounter persistent auth issues, you can reset your local state:
1. Run `/logout` in a session.
2. Manually remove `~/.claude/config.json`.
3. (macOS only) Clear relevant entries in the Keychain via `Security`.
---
> [!TIP]
> Use `claude --doctor` to diagnose your current authentication state and connectivity.

103
docs/LLAMA_CPP.md Normal file
View File

@@ -0,0 +1,103 @@
# Llama.cpp Integration Guide - Claude Code
This guide explores how to implement a custom API provider for Claude Code using `llama.cpp`'s `llama-server`. This setup is ideal for local-first development or when using high-end hardware like **AMD Strix Halo** or **Apple Silicon M2 Max**.
---
## 1. Architecture Overview
`llama-server` provides a REST API that can be configured to mimic the OpenAI or Anthropic message formats. To integrate it into Claude Code, you will need to modify the client initialization.
### Provider Hook Location
The primary location for adding new providers is [`services/api/client.ts`](file:///Users/vlad/Developer/vlad/claude-code/services/api/client.ts).
1. **Add Provider Type**: Update `APIProvider` in `utils/model/providers.ts` to include `'llama-cpp'`.
2. **Environment Variable**: Use a toggle like `CLAUDE_CODE_USE_LLAMA_CPP=true`.
3. **Client Configuration**:
```typescript
if (isEnvTruthy(process.env.CLAUDE_CODE_USE_LLAMA_CPP)) {
return new Anthropic({
apiKey: 'local-key', // llama-server often ignores this
baseURL: process.env.LLAMA_CPP_BASE_URL || 'http://localhost:8080/v1',
...ARGS,
})
}
```
### Remote / Proxy Authentication
If you are proxying `llama-server` through an AWS-compatible gateway (e.g., LiteLLM), you can use the `AWS_BEARER_TOKEN_BEDROCK` environment variable to authenticate.
---
---
## 2. Hardware Optimization
To achieve smooth inference on high-end consumer hardware, utilize the following specialized backends.
### Apple Silicon (M2 Max)
`llama.cpp` has first-class **Metal** support.
- **Flags**: Ensure `-ngl` (number of GPU layers) is set to the maximum (e.g., `-ngl 99`) to offload the entire model to the GPU.
- **Threads**: Match the number of performance cores (e.g., `-t 8`).
### AMD Strix Halo
Strix Halo features a massive iGPU and a powerful NPU.
- **Vulkan Backend**: Use the Vulkan backend for the iGPU (`LLAMA_VULKAN=1`).
- **ROCm Backend**: For Linux users, ROCm provides near-native performance for AMD hardware.
- **NPU Integration**: If using Windows/Linux with experimental NPU drivers, ensure `llama-server` is compiled with the relevant plugin (e.g., OpenVINO).
---
## 3. Overcoming "Slow PP" (Prompt Processing)
Prompt Processing (PP) is often the bottleneck in agentic workflows where the context grows rapidly.
### Persistent KV Caching (Slots)
`llama-server` supports **slots**, which allow multiple sessions to share or persist their KV cache.
- **Persistent Slot**: Use `--slot-save-path /path/to/cache` to save the context state between CLI restarts.
- **Continuous Batching**: Use `--cont-batching` to allow the server to process new prompts while tokens are still being generated for other requests.
### Configuration Tips
- **Large Context**: Set a generous context size with `-c 32768` (or higher) to avoid frequent context shifting.
- **Flash Attention**: Always enable Flash Attention (`--flash-attn`) to reduce memory bandwidth requirements during PP.
---
## 4. Supporting OSS Models
Claude Code is tuned for Sonnet/Opus, but can be adapted for state-of-the-art open-source models:
| Model | Mapping Suggestion | Strength |
| :--- | :--- | :--- |
| **Qwen3-72B-Instruct** | Map to `claude-3-opus-latest` | Excellent reasoning and tool use. |
| **GPT-20-OSS** | Map to `claude-3-5-sonnet-latest` | High-speed, high-intelligence balance. |
| **GPT-120-OSS** | Map to `claude-3-opus-latest` | Deep complex problem solving. |
---
## 5. Recommended `llama-server` Command
For a dedicated local Claude Code backend:
```bash
./llama-server \
-m models/qwen3-72b-q4_k_m.gguf \
-c 32768 \
-ngl 99 \
--flash-attn \
--cont-batching \
--host 0.0.0.0 \
--port 8080 \
--api-key local-secret-token \
--slot-save-path ./llama_slots
```
---
> [!CAUTION]
> Using local models requires significant VRAM. A 70B model in 4-bit quantization requires ~40GB of VRAM. Ensure your hardware (like Strix Halo with 64GB+ shared RAM) can accommodate the model and KV cache.
---
## See Also
- **[Authentication Guide](file:///Users/vlad/Developer/vlad/claude-code/docs/AUTH_GUIDE.md)**: Details on general environment variables and credential management.

93
docs/Z_AI_GLM.md Normal file
View File

@@ -0,0 +1,93 @@
# Zhipu AI (Z.AI) GLM Provider Guide - Claude Code
This guide explains how to integrate **GLM-5.1** from Zhipu AI as a specialized "Coding Plan Provider" in Claude Code. This allows you to use GLM's strong reasoning capabilities for the architectural and planning phase, while maintaining Claude (or another model) for the execution phase.
---
## 1. Architecture: The Planner-Executor Split
Claude Code uses a "Plan Mode" to design complex changes before executing them. This is internally managed by `permissionMode: 'plan'`.
By specialized the models:
- **Planner (GLM-5.1)**: Uses massive context and multi-step reasoning to design a robust implementation plan.
- **Executor (Claude 3.5 Sonnet)**: Follows the plan with precision to write and edit code.
---
## 2. Implementing the Z.AI Provider
### Hooking the Client
The Z.AI API is largely OpenAI-compatible. You can hook it into Claude Code's existing client initialization in [`services/api/client.ts`](file:///Users/vlad/Developer/vlad/claude-code/services/api/client.ts).
1. **Add Provider Type**: Update `APIProvider` in `utils/model/providers.ts` to include `'z-ai'`.
2. **Client Entry**:
```typescript
if (isEnvTruthy(process.env.CLAUDE_CODE_USE_Z_AI)) {
return new Anthropic({
apiKey: process.env.Z_AI_API_KEY,
baseURL: process.env.Z_AI_BASE_URL || 'https://open.bigmodel.cn/api/paas/v4/',
...ARGS,
})
}
```
---
## 3. Highjacking "Plan Mode"
To ensure GLM-5.1 is only used for planning, you need to modify the model selection logic in [`utils/model/model.ts`](file:///Users/vlad/Developer/vlad/claude-code/utils/model/model.ts).
Modify `getRuntimeMainLoopModel`:
```typescript
export function getRuntimeMainLoopModel(params: {
permissionMode: PermissionMode
mainLoopModel: string
exceeds200kTokens?: boolean
}): ModelName {
const { permissionMode, mainLoopModel } = params
// Specialized Planning Provider: GLM-5.1
if (permissionMode === 'plan' && isEnvTruthy(process.env.CLAUDE_CODE_USE_Z_AI)) {
return 'glm-5.1' // Or your specific deployment ID
}
// Fallback to Sonnet/Opus for execution
return mainLoopModel
}
```
---
## 4. Configuration
To use this setup, configure the following environment variables:
| Variable | Description |
| :--- | :--- |
| `CLAUDE_CODE_USE_Z_AI=true` | Enables the Z.AI provider logic. |
| `Z_AI_API_KEY` | Your Zhipu AI API Key. |
| `Z_AI_BASE_URL` | The endpoint for BigModel (e.g., `https://open.bigmodel.cn/api/paas/v4/`). |
| `Z_AI_BASE_URL` | The endpoint for BigModel (e.g., `https://open.bigmodel.cn/api/paas/v4/`). |
| `ANTHROPIC_MODEL` | (Optional) The model to use for execution (e.g., `claude-3-5-sonnet-latest`). |
| `CLAUDE_CODE_ADDITIONAL_PROTECTION` | (Optional) Enable strict header validation if required by your gateway. |
---
## 5. Optimization & Performance
### Tool-Calling
GLM-5.1 is highly proficient at the OpenAI-style tool-calling schema. Claude Code uses a similar structure, making the migration smooth. However, ensure that your `baseURL` correctly routes to the `/chat/completions` endpoint that supports these features.
### Long Context
GLM-5.1's large context window is a primary advantage for the "Plan Mode" phase, as it can ingest an entire multi-file project structure or complex documentation without truncation.
---
> [!TIP]
> This "hybrid" approach allows you to leverage GLM's cost-efficient and high-reasoning planning while keeping Claude's world-class code-generation for the final edits.
---
## See Also
- **[Authentication Guide](file:///Users/vlad/Developer/vlad/claude-code/docs/AUTH_GUIDE.md)**: Details on general environment variables and credential management.

View File

@@ -1,4 +1,5 @@
import { feature } from 'bun:bundle';
import { VERSION } from '../constants/product.js';
// Bugfix for corepack auto-pinning, which adds yarnpkg to peoples' package.jsons
// eslint-disable-next-line custom-rules/no-top-level-side-effects
@@ -35,9 +36,8 @@ async function main(): Promise<void> {
// Fast-path for --version/-v: zero module loading needed
if (args.length === 1 && (args[0] === '--version' || args[0] === '-v' || args[0] === '-V')) {
// MACRO.VERSION is inlined at build time
// biome-ignore lint/suspicious/noConsole:: intentional console output
console.log(`${MACRO.VERSION} (Claude Code)`);
console.log(`${VERSION} (Claude Code)`);
return;
}

View File

@@ -8,6 +8,7 @@ import {
type Tool,
} from '@modelcontextprotocol/sdk/types.js'
import { getDefaultAppState } from 'src/state/AppStateStore.js'
import { VERSION } from '../constants/product.js'
import review from '../commands/review.js'
import type { Command } from '../commands.js'
import {
@@ -47,7 +48,7 @@ export async function startMCPServer(
const server = new Server(
{
name: 'claude/tengu',
version: MACRO.VERSION,
version: VERSION,
},
{
capabilities: {

View File

@@ -1,5 +1,5 @@
import { useState } from 'react'
import { major, minor, patch } from 'semver'
import { VERSION } from '../constants/product.js'
export function getSemverPart(version: string): string {
return `${major(version, { loose: true })}.${minor(version, { loose: true })}.${patch(version, { loose: true })}`
@@ -15,7 +15,7 @@ export function shouldShowUpdateNotification(
export function useUpdateNotification(
updatedVersion: string | null | undefined,
initialVersion: string = MACRO.VERSION,
initialVersion: string = VERSION,
): string | null {
const [lastNotifiedSemver, setLastNotifiedSemver] = useState<string | null>(
() => getSemverPart(initialVersion),

View File

@@ -2,6 +2,7 @@ import { feature } from 'bun:bundle';
import { appendFileSync } from 'fs';
import React from 'react';
import { logEvent } from 'src/services/analytics/index.js';
import { VERSION } from './constants/product.js';
import { gracefulShutdown, gracefulShutdownSync } from 'src/utils/gracefulShutdown.js';
import { type ChannelEntry, getAllowedChannels, setAllowedChannels, setHasDevChannels, setSessionTrustAccepted, setStatsStore } from './bootstrap/state.js';
import type { Command } from './commands.js';
@@ -33,7 +34,7 @@ export function completeOnboarding(): void {
saveGlobalConfig(current => ({
...current,
hasCompletedOnboarding: true,
lastOnboardingVersion: MACRO.VERSION
lastOnboardingVersion: VERSION
}));
}
export function showDialog<T = void>(root: Root, renderer: (done: (result: T) => void) => React.ReactNode): Promise<T> {

View File

@@ -33,7 +33,6 @@ import { init, initializeTelemetryAfterTrust } from './entrypoints/init.js';
import { addToHistory } from './history.js';
import type { Root } from './ink.js';
import { launchRepl } from './replLauncher.js';
import { hasGrowthBookEnvOverride, initializeGrowthBook, refreshGrowthBookAfterAuthChange } from './services/analytics/growthbook.js';
import { fetchBootstrapData } from './services/api/bootstrap.js';
import { type DownloadResult, downloadSessionFiles, type FilesApiConfig, parseFileSpecs } from './services/api/filesApi.js';
import { prefetchPassesEligibility } from './services/api/referral.js';
@@ -80,10 +79,7 @@ const coordinatorModeModule = feature('COORDINATOR_MODE') ? require('./coordinat
const assistantModule = feature('KAIROS') ? require('./assistant/index.js') as typeof import('./assistant/index.js') : null;
const kairosGate = feature('KAIROS') ? require('./assistant/gate.js') as typeof import('./assistant/gate.js') : null;
import { relative, resolve } from 'path';
import { isAnalyticsDisabled } from 'src/services/analytics/config.js';
import { getFeatureValue_CACHED_MAY_BE_STALE } from 'src/services/analytics/growthbook.js';
import { type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS, logEvent } from 'src/services/analytics/index.js';
import { initializeAnalyticsGates } from 'src/services/analytics/sink.js';
import { getOriginalCwd, setAdditionalDirectoriesForClaudeMd, setIsRemoteMode, setMainLoopModelOverride, setMainThreadAgentType, setTeleportedSessionInfo } from './bootstrap/state.js';
import { filterCommandsForRemoteMode, getCommands } from './commands.js';
import type { StatsStore } from './context/stats.js';
@@ -213,20 +209,6 @@ profileCheckpoint('main_tsx_imports_loaded');
* This is called after init() completes to ensure settings are loaded
* and environment variables are applied before model resolution.
*/
function logManagedSettings(): void {
try {
const policySettings = getSettingsForSource('policySettings');
if (policySettings) {
const allKeys = getManagedSettingsKeysForLogging(policySettings);
logEvent('tengu_managed_settings_loaded', {
keyCount: allKeys.length,
keys: allKeys.join(',') as unknown as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
});
}
} catch {
// Silently ignore errors - this is just for analytics
}
}
// Check if running in debug/inspection mode
function isBeingDebugged() {
@@ -276,18 +258,6 @@ if ("external" !== 'ant' && isBeingDebugged()) {
* main.tsx but branch before the interactive startup path, so it needs two
* call sites here rather than one here + one in QueryEngine.
*/
function logSessionTelemetry(): void {
const model = parseUserSpecifiedModel(getInitialMainLoopModel() ?? getDefaultMainLoopModel());
void logSkillsLoaded(getCwd(), getContextWindowForModel(model, getSdkBetas()));
void loadAllPluginsCacheOnly().then(({
enabled,
errors
}) => {
const managedNames = getManagedPluginNames();
logPluginsEnabledForSession(enabled, managedNames, getPluginSeedDirs());
logPluginLoadErrors(errors, managedNames);
}).catch(err => logError(err));
}
function getCertEnvVarTelemetry(): Record<string, boolean> {
const result: Record<string, boolean> = {};
if (process.env.NODE_EXTRA_CA_CERTS) {
@@ -304,21 +274,6 @@ function getCertEnvVarTelemetry(): Record<string, boolean> {
}
return result;
}
async function logStartupTelemetry(): Promise<void> {
if (isAnalyticsDisabled()) return;
const [isGit, worktreeCount, ghAuthStatus] = await Promise.all([getIsGit(), getWorktreeCount(), getGhAuthStatus()]);
logEvent('tengu_startup_telemetry', {
is_git: isGit,
worktree_count: worktreeCount,
gh_auth_status: ghAuthStatus as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
sandbox_enabled: SandboxManager.isSandboxingEnabled(),
are_unsandboxed_commands_allowed: SandboxManager.areUnsandboxedCommandsAllowed(),
is_auto_bash_allowed_if_sandbox_enabled: SandboxManager.isAutoAllowBashIfSandboxedEnabled(),
auto_updater_disabled: isAutoUpdaterDisabled(),
prefers_reduced_motion: getInitialSettings().prefersReducedMotion ?? false,
...getCertEnvVarTelemetry()
});
}
// @[MODEL LAUNCH]: Consider any migrations you may need for model strings. See migrateSonnet1mToSonnet45.ts for an example.
// Bump this when adding a new sync migration so existing users re-run the set.
@@ -413,8 +368,6 @@ export function startDeferredPrefetches(): void {
}
void countFilesRoundedRg(getCwd(), AbortSignal.timeout(3000), []);
// Analytics and feature flag initialization
void initializeAnalyticsGates();
void prefetchOfficialMcpUrls();
void refreshModelCapabilities();
@@ -2010,8 +1963,7 @@ async function run(): Promise<CommanderCommand> {
// - no env override (which short-circuits _CACHED_MAY_BE_STALE before disk)
// - flag absent from disk (== null also catches pre-#22279 poisoned null)
const explicitModel = options.model || process.env.ANTHROPIC_MODEL;
if ("external" === 'ant' && explicitModel && explicitModel !== 'default' && !hasGrowthBookEnvOverride('tengu_ant_model_override') && getGlobalConfig().cachedGrowthBookFeatures?.['tengu_ant_model_override'] == null) {
await initializeGrowthBook();
if ("external" === 'ant' && explicitModel && explicitModel !== 'default' && getGlobalConfig().cachedGrowthBookFeatures?.['tengu_ant_model_override'] == null) {
}
// Special case the default model with the null keyword
@@ -2284,7 +2236,6 @@ async function run(): Promise<CommanderCommand> {
// Clear user data cache BEFORE GrowthBook refresh so it picks up fresh credentials
resetUserCache();
// Refresh GrowthBook after login to get updated feature flags (e.g., for claude.ai MCPs)
refreshGrowthBookAfterAuthChange();
// Clear any stale trusted device token then enroll for Remote Control.
// Both self-gate on tengu_sessions_elevated_auth_enforcement internally
// — enrollTrustedDevice() via checkGate_CACHED_OR_BLOCKING (awaits
@@ -2521,7 +2472,6 @@ async function run(): Promise<CommanderCommand> {
// Log context metrics once at initialization
void logContextMetrics(regularMcpConfigs, toolPermissionContext);
void logPermissionContextForAnts(null, 'initialization');
logManagedSettings();
// Register PID file for concurrent-session detection (~/.claude/sessions/)
// and fire multi-clauding telemetry. Lives here (not init.ts) so only the
@@ -3049,8 +2999,6 @@ async function run(): Promise<CommanderCommand> {
numStartups: (current.numStartups ?? 0) + 1
}));
setImmediate(() => {
void logStartupTelemetry();
logSessionTelemetry();
});
// Set up per-turn session environment data uploader (ant-only build).

120
package.json Normal file
View File

@@ -0,0 +1,120 @@
{
"name": "claude-code",
"version": "0.1.0-alpha",
"description": "Claude Code is a CLI that helps you code with Claude.",
"type": "module",
"main": "main.tsx",
"bin": {
"claude": "bin/claude.js"
},
"engines": {
"node": ">=18.0.0",
"bun": ">=1.0.0"
},
"scripts": {
"dev": "bun run main.tsx",
"build": "bun build ./main.tsx --outdir ./dist --target node",
"test": "bun test",
"lint": "biome check .",
"format": "biome format --write ."
},
"dependencies": {
"@anthropic-ai/bedrock-sdk": "^0.27.0",
"@anthropic-ai/foundry-sdk": "^0.2.3",
"@anthropic-ai/sdk": "^0.82.0",
"@anthropic-ai/vertex-sdk": "^0.14.4",
"@azure/identity": "^4.5.0",
"@commander-js/extra-typings": "^12.0.1",
"@inquirer/prompts": "^5.0.7",
"bidi-js": "^1.0.3",
"chalk": "^5.3.0",
"chalk-template": "^1.1.0",
"code-excerpt": "^4.0.0",
"color-diff": "^1.4.0",
"color-diff-napi": "^0.0.1",
"commander": "^12.1.0",
"date-fns": "^3.6.0",
"diff": "^8.0.4",
"emoji-regex": "^10.3.0",
"execa": "^9.1.0",
"figures": "^6.1.0",
"fuse.js": "^7.0.0",
"get-east-asian-width": "^1.2.0",
"glob": "^13.0.6",
"google-auth-library": "^9.9.0",
"highlight.js": "^11.9.0",
"https-proxy-agent": "^7.0.4",
"ignore": "^5.3.1",
"indent-string": "^5.0.0",
"ink": "^4.4.1",
"lodash-es": "^4.17.21",
"lodash.debounce": "^4.0.8",
"lru-cache": "^10.2.2",
"marked": "^12.0.1",
"modifiers-napi": "^0.0.1",
"nanoid": "^5.1.7",
"npm-run-path": "^5.3.0",
"onetime": "^7.0.0",
"open": "^10.1.0",
"p-map": "^7.0.2",
"parse-ms": "^4.0.0",
"patch-console": "^2.0.0",
"path-expression-matcher": "^1.0.0",
"path-key": "^4.0.0",
"picomatch": "^4.0.2",
"pngjs": "^7.0.0",
"pretty-ms": "^9.0.0",
"proper-lockfile": "^4.1.2",
"qrcode": "^1.5.3",
"react": "^19.0.0",
"react-dom": "^19.0.0",
"react-reconciler": "^0.31.0",
"require-directory": "^2.1.1",
"restore-cursor": "^5.1.0",
"run-applescript": "^7.0.0",
"semver": "^7.6.0",
"sharp": "^0.34.5",
"shell-quote": "^1.8.1",
"signal-exit": "^4.1.0",
"slice-ansi": "^7.1.0",
"stack-utils": "^2.0.6",
"stream-json": "^1.8.0",
"streaming-json-stringify": "^3.1.0",
"string-width": "^7.1.0",
"strip-ansi": "^7.1.0",
"strip-final-newline": "^4.0.0",
"supports-hyperlinks": "^3.0.0",
"tree-kill": "^1.2.2",
"type-fest": "^4.18.2",
"undici": "^6.13.0",
"unicorn-magic": "^0.1.0",
"usehooks-ts": "^3.1.0",
"uuid": "^9.0.1",
"whatwg-url": "^14.0.0",
"widest-line": "^5.0.0",
"winston": "^3.13.0",
"wrap-ansi": "^9.0.0",
"ws": "^8.16.0",
"wsl-utils": "^0.4.0",
"xss": "^1.0.15",
"xterm": "^5.3.0",
"y18n": "^5.0.8",
"yargs-parser": "^21.1.1",
"yoctocolors": "^2.0.2",
"yoga-wasm-web": "^0.3.3",
"zod": "^3.23.8"
},
"devDependencies": {
"typescript": "^5.4.5",
"@biomejs/biome": "1.7.3",
"bun-types": "latest",
"@types/react": "^19.0.0",
"@types/react-dom": "^19.0.0",
"@types/node": "^20.12.7",
"@types/lodash-es": "^4.17.12",
"@types/proper-lockfile": "^4.1.4",
"@types/qrcode": "^1.5.5",
"@types/semver": "^7.5.8",
"@types/shell-quote": "^1.7.5"
}
}

View File

@@ -1,38 +0,0 @@
/**
* Shared analytics configuration
*
* Common logic for determining when analytics should be disabled
* across all analytics systems (Datadog, 1P)
*/
import { isEnvTruthy } from '../../utils/envUtils.js'
import { isTelemetryDisabled } from '../../utils/privacyLevel.js'
/**
* Check if analytics operations should be disabled
*
* Analytics is disabled in the following cases:
* - Test environment (NODE_ENV === 'test')
* - Third-party cloud providers (Bedrock/Vertex)
* - Privacy level is no-telemetry or essential-traffic
*/
export function isAnalyticsDisabled(): boolean {
return (
process.env.NODE_ENV === 'test' ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_BEDROCK) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_VERTEX) ||
isEnvTruthy(process.env.CLAUDE_CODE_USE_FOUNDRY) ||
isTelemetryDisabled()
)
}
/**
* Check if the feedback survey should be suppressed.
*
* Unlike isAnalyticsDisabled(), this does NOT block on 3P providers
* (Bedrock/Vertex/Foundry). The survey is a local UI prompt with no
* transcript data — enterprise customers capture responses via OTEL.
*/
export function isFeedbackSurveyDisabled(): boolean {
return process.env.NODE_ENV === 'test' || isTelemetryDisabled()
}

View File

@@ -1,307 +0,0 @@
import axios from 'axios'
import { createHash } from 'crypto'
import memoize from 'lodash-es/memoize.js'
import { getOrCreateUserID } from '../../utils/config.js'
import { logError } from '../../utils/log.js'
import { getCanonicalName } from '../../utils/model/model.js'
import { getAPIProvider } from '../../utils/model/providers.js'
import { MODEL_COSTS } from '../../utils/modelCost.js'
import { isAnalyticsDisabled } from './config.js'
import { getEventMetadata } from './metadata.js'
const DATADOG_LOGS_ENDPOINT =
'https://http-intake.logs.us5.datadoghq.com/api/v2/logs'
const DATADOG_CLIENT_TOKEN = 'pubbbf48e6d78dae54bceaa4acf463299bf'
const DEFAULT_FLUSH_INTERVAL_MS = 15000
const MAX_BATCH_SIZE = 100
const NETWORK_TIMEOUT_MS = 5000
const DATADOG_ALLOWED_EVENTS = new Set([
'chrome_bridge_connection_succeeded',
'chrome_bridge_connection_failed',
'chrome_bridge_disconnected',
'chrome_bridge_tool_call_completed',
'chrome_bridge_tool_call_error',
'chrome_bridge_tool_call_started',
'chrome_bridge_tool_call_timeout',
'tengu_api_error',
'tengu_api_success',
'tengu_brief_mode_enabled',
'tengu_brief_mode_toggled',
'tengu_brief_send',
'tengu_cancel',
'tengu_compact_failed',
'tengu_exit',
'tengu_flicker',
'tengu_init',
'tengu_model_fallback_triggered',
'tengu_oauth_error',
'tengu_oauth_success',
'tengu_oauth_token_refresh_failure',
'tengu_oauth_token_refresh_success',
'tengu_oauth_token_refresh_lock_acquiring',
'tengu_oauth_token_refresh_lock_acquired',
'tengu_oauth_token_refresh_starting',
'tengu_oauth_token_refresh_completed',
'tengu_oauth_token_refresh_lock_releasing',
'tengu_oauth_token_refresh_lock_released',
'tengu_query_error',
'tengu_session_file_read',
'tengu_started',
'tengu_tool_use_error',
'tengu_tool_use_granted_in_prompt_permanent',
'tengu_tool_use_granted_in_prompt_temporary',
'tengu_tool_use_rejected_in_prompt',
'tengu_tool_use_success',
'tengu_uncaught_exception',
'tengu_unhandled_rejection',
'tengu_voice_recording_started',
'tengu_voice_toggled',
'tengu_team_mem_sync_pull',
'tengu_team_mem_sync_push',
'tengu_team_mem_sync_started',
'tengu_team_mem_entries_capped',
])
const TAG_FIELDS = [
'arch',
'clientType',
'errorType',
'http_status_range',
'http_status',
'kairosActive',
'model',
'platform',
'provider',
'skillMode',
'subscriptionType',
'toolName',
'userBucket',
'userType',
'version',
'versionBase',
]
function camelToSnakeCase(str: string): string {
return str.replace(/[A-Z]/g, letter => `_${letter.toLowerCase()}`)
}
type DatadogLog = {
ddsource: string
ddtags: string
message: string
service: string
hostname: string
[key: string]: unknown
}
let logBatch: DatadogLog[] = []
let flushTimer: NodeJS.Timeout | null = null
let datadogInitialized: boolean | null = null
async function flushLogs(): Promise<void> {
if (logBatch.length === 0) return
const logsToSend = logBatch
logBatch = []
try {
await axios.post(DATADOG_LOGS_ENDPOINT, logsToSend, {
headers: {
'Content-Type': 'application/json',
'DD-API-KEY': DATADOG_CLIENT_TOKEN,
},
timeout: NETWORK_TIMEOUT_MS,
})
} catch (error) {
logError(error)
}
}
function scheduleFlush(): void {
if (flushTimer) return
flushTimer = setTimeout(() => {
flushTimer = null
void flushLogs()
}, getFlushIntervalMs()).unref()
}
export const initializeDatadog = memoize(async (): Promise<boolean> => {
if (isAnalyticsDisabled()) {
datadogInitialized = false
return false
}
try {
datadogInitialized = true
return true
} catch (error) {
logError(error)
datadogInitialized = false
return false
}
})
/**
* Flush remaining Datadog logs and shut down.
* Called from gracefulShutdown() before process.exit() since
* forceExit() prevents the beforeExit handler from firing.
*/
export async function shutdownDatadog(): Promise<void> {
if (flushTimer) {
clearTimeout(flushTimer)
flushTimer = null
}
await flushLogs()
}
// NOTE: use via src/services/analytics/index.ts > logEvent
export async function trackDatadogEvent(
eventName: string,
properties: { [key: string]: boolean | number | undefined },
): Promise<void> {
if (process.env.NODE_ENV !== 'production') {
return
}
// Don't send events for 3P providers (Bedrock, Vertex, Foundry)
if (getAPIProvider() !== 'firstParty') {
return
}
// Fast path: use cached result if available to avoid await overhead
let initialized = datadogInitialized
if (initialized === null) {
initialized = await initializeDatadog()
}
if (!initialized || !DATADOG_ALLOWED_EVENTS.has(eventName)) {
return
}
try {
const metadata = await getEventMetadata({
model: properties.model,
betas: properties.betas,
})
// Destructure to avoid duplicate envContext (once nested, once flattened)
const { envContext, ...restMetadata } = metadata
const allData: Record<string, unknown> = {
...restMetadata,
...envContext,
...properties,
userBucket: getUserBucket(),
}
// Normalize MCP tool names to "mcp" for cardinality reduction
if (
typeof allData.toolName === 'string' &&
allData.toolName.startsWith('mcp__')
) {
allData.toolName = 'mcp'
}
// Normalize model names for cardinality reduction (external users only)
if (process.env.USER_TYPE !== 'ant' && typeof allData.model === 'string') {
const shortName = getCanonicalName(allData.model.replace(/\[1m]$/i, ''))
allData.model = shortName in MODEL_COSTS ? shortName : 'other'
}
// Truncate dev version to base + date (remove timestamp and sha for cardinality reduction)
// e.g. "2.0.53-dev.20251124.t173302.sha526cc6a" -> "2.0.53-dev.20251124"
if (typeof allData.version === 'string') {
allData.version = allData.version.replace(
/^(\d+\.\d+\.\d+-dev\.\d{8})\.t\d+\.sha[a-f0-9]+$/,
'$1',
)
}
// Transform status to http_status and http_status_range to avoid Datadog reserved field
if (allData.status !== undefined && allData.status !== null) {
const statusCode = String(allData.status)
allData.http_status = statusCode
// Determine status range (1xx, 2xx, 3xx, 4xx, 5xx)
const firstDigit = statusCode.charAt(0)
if (firstDigit >= '1' && firstDigit <= '5') {
allData.http_status_range = `${firstDigit}xx`
}
// Remove original status field to avoid conflict with Datadog's reserved field
delete allData.status
}
// Build ddtags with high-cardinality fields for filtering.
// event:<name> is prepended so the event name is searchable via the
// log search API — the `message` field (where eventName also lives)
// is a DD reserved field and is NOT queryable from dashboard widget
// queries or the aggregation API. See scripts/release/MONITORING.md.
const allDataRecord = allData
const tags = [
`event:${eventName}`,
...TAG_FIELDS.filter(
field =>
allDataRecord[field] !== undefined && allDataRecord[field] !== null,
).map(field => `${camelToSnakeCase(field)}:${allDataRecord[field]}`),
]
const log: DatadogLog = {
ddsource: 'nodejs',
ddtags: tags.join(','),
message: eventName,
service: 'claude-code',
hostname: 'claude-code',
env: process.env.USER_TYPE,
}
// Add all fields as searchable attributes (not duplicated in tags)
for (const [key, value] of Object.entries(allData)) {
if (value !== undefined && value !== null) {
log[camelToSnakeCase(key)] = value
}
}
logBatch.push(log)
// Flush immediately if batch is full, otherwise schedule
if (logBatch.length >= MAX_BATCH_SIZE) {
if (flushTimer) {
clearTimeout(flushTimer)
flushTimer = null
}
void flushLogs()
} else {
scheduleFlush()
}
} catch (error) {
logError(error)
}
}
const NUM_USER_BUCKETS = 30
/**
* Gets a 'bucket' that the user ID falls into.
*
* For alerting purposes, we want to alert on the number of users impacted
* by an issue, rather than the number of events- often a small number of users
* can generate a large number of events (e.g. due to retries). To approximate
* this without ruining cardinality by counting user IDs directly, we hash the user ID
* and assign it to one of a fixed number of buckets.
*
* This allows us to estimate the number of unique users by counting unique buckets,
* while preserving user privacy and reducing cardinality.
*/
const getUserBucket = memoize((): number => {
const userId = getOrCreateUserID()
const hash = createHash('sha256').update(userId).digest('hex')
return parseInt(hash.slice(0, 8), 16) % NUM_USER_BUCKETS
})
function getFlushIntervalMs(): number {
// Allow tests to override to not block on the default flush interval.
return (
parseInt(process.env.CLAUDE_CODE_DATADOG_FLUSH_INTERVAL_MS || '', 10) ||
DEFAULT_FLUSH_INTERVAL_MS
)
}

View File

@@ -1,449 +1,33 @@
import type { AnyValueMap, Logger, logs } from '@opentelemetry/api-logs'
import { resourceFromAttributes } from '@opentelemetry/resources'
import {
BatchLogRecordProcessor,
LoggerProvider,
} from '@opentelemetry/sdk-logs'
import {
ATTR_SERVICE_NAME,
ATTR_SERVICE_VERSION,
} from '@opentelemetry/semantic-conventions'
import { randomUUID } from 'crypto'
import { isEqual } from 'lodash-es'
import { getOrCreateUserID } from '../../utils/config.js'
import { logForDebugging } from '../../utils/debug.js'
import { logError } from '../../utils/log.js'
import { getPlatform, getWslVersion } from '../../utils/platform.js'
import { jsonStringify } from '../../utils/slowOperations.js'
import { profileCheckpoint } from '../../utils/startupProfiler.js'
import { getCoreUserData } from '../../utils/user.js'
import { isAnalyticsDisabled } from './config.js'
import { FirstPartyEventLoggingExporter } from './firstPartyEventLoggingExporter.js'
import type { GrowthBookUserAttributes } from './growthbook.js'
import { getDynamicConfig_CACHED_MAY_BE_STALE } from './growthbook.js'
import { getEventMetadata } from './metadata.js'
import { isSinkKilled } from './sinkKillswitch.js'
/**
* Configuration for sampling individual event types.
* Each event name maps to an object containing sample_rate (0-1).
* Events not in the config are logged at 100% rate.
*/
export type EventSamplingConfig = {
[eventName: string]: {
sample_rate: number
}
}
const EVENT_SAMPLING_CONFIG_NAME = 'tengu_event_sampling_config'
/**
* Get the event sampling configuration from GrowthBook.
* Uses cached value if available, updates cache in background.
*/
export function getEventSamplingConfig(): EventSamplingConfig {
return getDynamicConfig_CACHED_MAY_BE_STALE<EventSamplingConfig>(
EVENT_SAMPLING_CONFIG_NAME,
{},
)
}
/**
* Determine if an event should be sampled based on its sample rate.
* Returns the sample rate if sampled, null if not sampled.
* First Party Event Logger - STUB
*
* @param eventName - Name of the event to check
* @returns The sample_rate if event should be logged, null if it should be dropped
* This module has been stubbed out as part of the telemetry purge.
* It no longer has any dependencies on @opentelemetry or other analytics packages.
*/
export function shouldSampleEvent(eventName: string): number | null {
const config = getEventSamplingConfig()
const eventConfig = config[eventName]
// If no config for this event, log at 100% rate (no sampling)
if (!eventConfig) {
return null
}
const sampleRate = eventConfig.sample_rate
// Validate sample rate is in valid range
if (typeof sampleRate !== 'number' || sampleRate < 0 || sampleRate > 1) {
return null
}
// Sample rate of 1 means log everything (no need to add metadata)
if (sampleRate >= 1) {
return null
}
// Sample rate of 0 means drop everything
if (sampleRate <= 0) {
return 0
}
// Randomly decide whether to sample this event
return Math.random() < sampleRate ? sampleRate : 0
}
const BATCH_CONFIG_NAME = 'tengu_1p_event_batch_config'
type BatchConfig = {
scheduledDelayMillis?: number
maxExportBatchSize?: number
maxQueueSize?: number
skipAuth?: boolean
maxAttempts?: number
path?: string
baseUrl?: string
}
function getBatchConfig(): BatchConfig {
return getDynamicConfig_CACHED_MAY_BE_STALE<BatchConfig>(
BATCH_CONFIG_NAME,
{},
)
}
// Module-local state for event logging (not exposed globally)
let firstPartyEventLogger: ReturnType<typeof logs.getLogger> | null = null
let firstPartyEventLoggerProvider: LoggerProvider | null = null
// Last batch config used to construct the provider — used by
// reinitialize1PEventLoggingIfConfigChanged to decide whether a rebuild is
// needed when GrowthBook refreshes.
let lastBatchConfig: BatchConfig | null = null
/**
* Flush and shutdown the 1P event logger.
* This should be called as the final step before process exit to ensure
* all events (including late ones from API responses) are exported.
*/
export async function shutdown1PEventLogging(): Promise<void> {
if (!firstPartyEventLoggerProvider) {
return
}
try {
await firstPartyEventLoggerProvider.shutdown()
if (process.env.USER_TYPE === 'ant') {
logForDebugging('1P event logging: final shutdown complete')
}
} catch {
// Ignore shutdown errors
}
}
/**
* Check if 1P event logging is enabled.
* Respects the same opt-outs as other analytics sinks:
* - Test environment
* - Third-party cloud providers (Bedrock/Vertex)
* - Global telemetry opt-outs
* - Non-essential traffic disabled
*
* Note: Unlike BigQuery metrics, event logging does NOT check organization-level
* metrics opt-out via API. It follows the same pattern as Statsig event logging.
*/
export function is1PEventLoggingEnabled(): boolean {
// Respect standard analytics opt-outs
return !isAnalyticsDisabled()
}
/**
* Log a 1st-party event for internal analytics (async version).
* Events are batched and exported to /api/event_logging/batch
*
* This enriches the event with core metadata (model, session, env context, etc.)
* at log time, similar to logEventToStatsig.
*
* @param eventName - Name of the event (e.g., 'tengu_api_query')
* @param metadata - Additional metadata for the event (intentionally no strings, to avoid accidentally logging code/filepaths)
*/
async function logEventTo1PAsync(
firstPartyEventLogger: Logger,
eventName: string,
metadata: Record<string, number | boolean | undefined> = {},
): Promise<void> {
try {
// Enrich with core metadata at log time (similar to Statsig pattern)
const coreMetadata = await getEventMetadata({
model: metadata.model,
betas: metadata.betas,
})
// Build attributes - OTel supports nested objects natively via AnyValueMap
// Cast through unknown since our nested objects are structurally compatible
// with AnyValue but TS doesn't recognize it due to missing index signatures
const attributes = {
event_name: eventName,
event_id: randomUUID(),
// Pass objects directly - no JSON serialization needed
core_metadata: coreMetadata,
user_metadata: getCoreUserData(true),
event_metadata: metadata,
} as unknown as AnyValueMap
// Add user_id if available
const userId = getOrCreateUserID()
if (userId) {
attributes.user_id = userId
}
// Debug logging when debug mode is enabled
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
`[ANT-ONLY] 1P event: ${eventName} ${jsonStringify(metadata, null, 0)}`,
)
}
// Emit log record
firstPartyEventLogger.emit({
body: eventName,
attributes,
})
} catch (e) {
if (process.env.NODE_ENV === 'development') {
throw e
}
if (process.env.USER_TYPE === 'ant') {
logError(e as Error)
}
// swallow
}
}
/**
* Log a 1st-party event for internal analytics.
* Events are batched and exported to /api/event_logging/batch
*
* @param eventName - Name of the event (e.g., 'tengu_api_query')
* @param metadata - Additional metadata for the event (intentionally no strings, to avoid accidentally logging code/filepaths)
*/
export function logEventTo1P(
eventName: string,
metadata: Record<string, number | boolean | undefined> = {},
): void {
if (!is1PEventLoggingEnabled()) {
return
}
if (!firstPartyEventLogger || isSinkKilled('firstParty')) {
return
}
// Fire and forget - don't block on metadata enrichment
void logEventTo1PAsync(firstPartyEventLogger, eventName, metadata)
}
/**
* GrowthBook experiment event data for logging
*/
export type GrowthBookExperimentData = {
experimentId: string
variationId: number
userAttributes?: GrowthBookUserAttributes
experimentMetadata?: Record<string, unknown>
}
// api.anthropic.com only serves the "production" GrowthBook environment
// (see starling/starling/cli/cli.py DEFAULT_ENVIRONMENTS). Staging and
// development environments are not exported to the prod API.
function getEnvironmentForGrowthBook(): string {
return 'production'
}
/**
* Log a GrowthBook experiment assignment event to 1P.
* Events are batched and exported to /api/event_logging/batch
*
* @param data - GrowthBook experiment assignment data
*/
export function logGrowthBookExperimentTo1P(
data: GrowthBookExperimentData,
): void {
if (!is1PEventLoggingEnabled()) {
return
}
if (!firstPartyEventLogger || isSinkKilled('firstParty')) {
return
}
const userId = getOrCreateUserID()
const { accountUuid, organizationUuid } = getCoreUserData(true)
// Build attributes for GrowthbookExperimentEvent
const attributes = {
event_type: 'GrowthbookExperimentEvent',
event_id: randomUUID(),
experiment_id: data.experimentId,
variation_id: data.variationId,
...(userId && { device_id: userId }),
...(accountUuid && { account_uuid: accountUuid }),
...(organizationUuid && { organization_uuid: organizationUuid }),
...(data.userAttributes && {
session_id: data.userAttributes.sessionId,
user_attributes: jsonStringify(data.userAttributes),
}),
...(data.experimentMetadata && {
experiment_metadata: jsonStringify(data.experimentMetadata),
}),
environment: getEnvironmentForGrowthBook(),
}
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
`[ANT-ONLY] 1P GrowthBook experiment: ${data.experimentId} variation=${data.variationId}`,
)
}
firstPartyEventLogger.emit({
body: 'growthbook_experiment',
attributes,
})
}
const DEFAULT_LOGS_EXPORT_INTERVAL_MS = 10000
const DEFAULT_MAX_EXPORT_BATCH_SIZE = 200
const DEFAULT_MAX_QUEUE_SIZE = 8192
/**
* Initialize 1P event logging infrastructure.
* This creates a separate LoggerProvider for internal event logging,
* independent of customer OTLP telemetry.
*
* This uses its own minimal resource configuration with just the attributes
* we need for internal analytics (service name, version, platform info).
*/
export function initialize1PEventLogging(): void {
profileCheckpoint('1p_event_logging_start')
const enabled = is1PEventLoggingEnabled()
if (!enabled) {
if (process.env.USER_TYPE === 'ant') {
logForDebugging('1P event logging not enabled')
}
return
}
// Fetch batch processor configuration from GrowthBook dynamic config
// Uses cached value if available, refreshes in background
const batchConfig = getBatchConfig()
lastBatchConfig = batchConfig
profileCheckpoint('1p_event_after_growthbook_config')
const scheduledDelayMillis =
batchConfig.scheduledDelayMillis ||
parseInt(
process.env.OTEL_LOGS_EXPORT_INTERVAL ||
DEFAULT_LOGS_EXPORT_INTERVAL_MS.toString(),
)
const maxExportBatchSize =
batchConfig.maxExportBatchSize || DEFAULT_MAX_EXPORT_BATCH_SIZE
const maxQueueSize = batchConfig.maxQueueSize || DEFAULT_MAX_QUEUE_SIZE
// Build our own resource for 1P event logging with minimal attributes
const platform = getPlatform()
const attributes: Record<string, string> = {
[ATTR_SERVICE_NAME]: 'claude-code',
[ATTR_SERVICE_VERSION]: MACRO.VERSION,
}
// Add WSL-specific attributes if running on WSL
if (platform === 'wsl') {
const wslVersion = getWslVersion()
if (wslVersion) {
attributes['wsl.version'] = wslVersion
}
}
const resource = resourceFromAttributes(attributes)
// Create a new LoggerProvider with the EventLoggingExporter
// NOTE: This is kept separate from customer telemetry logs to ensure
// internal events don't leak to customer endpoints and vice versa.
// We don't register this globally - it's only used for internal event logging.
const eventLoggingExporter = new FirstPartyEventLoggingExporter({
maxBatchSize: maxExportBatchSize,
skipAuth: batchConfig.skipAuth,
maxAttempts: batchConfig.maxAttempts,
path: batchConfig.path,
baseUrl: batchConfig.baseUrl,
isKilled: () => isSinkKilled('firstParty'),
})
firstPartyEventLoggerProvider = new LoggerProvider({
resource,
processors: [
new BatchLogRecordProcessor(eventLoggingExporter, {
scheduledDelayMillis,
maxExportBatchSize,
maxQueueSize,
}),
],
})
// Initialize event logger from our internal provider (NOT from global API)
// IMPORTANT: We must get the logger from our local provider, not logs.getLogger()
// because logs.getLogger() returns a logger from the global provider, which is
// separate and used for customer telemetry.
firstPartyEventLogger = firstPartyEventLoggerProvider.getLogger(
'com.anthropic.claude_code.events',
MACRO.VERSION,
)
// No-op
}
export function logEventTo1P(
_eventName: string,
_metadata: Record<string, number | boolean | undefined> = {},
): void {
// No-op
}
export function logGrowthBookExperimentTo1P(_data: unknown): void {
// No-op
}
export async function shutdown1PEventLogging(): Promise<void> {
// No-op
}
/**
* Rebuild the 1P event logging pipeline if the batch config changed.
* Register this with onGrowthBookRefresh so long-running sessions pick up
* changes to batch size, delay, endpoint, etc.
*
* Event-loss safety:
* 1. Null the logger first — concurrent logEventTo1P() calls hit the
* !firstPartyEventLogger guard and bail during the swap window. This drops
* a handful of events but prevents emitting to a draining provider.
* 2. forceFlush() drains the old BatchLogRecordProcessor buffer to the
* exporter. Export failures go to disk at getCurrentBatchFilePath() which
* is keyed by module-level BATCH_UUID + sessionId — unchanged across
* reinit — so the NEW exporter's disk-backed retry picks them up.
* 3. Swap to new provider/logger; old provider shutdown runs in background
* (buffer already drained, just cleanup).
*/
export async function reinitialize1PEventLoggingIfConfigChanged(): Promise<void> {
if (!is1PEventLoggingEnabled() || !firstPartyEventLoggerProvider) {
return
}
const newConfig = getBatchConfig()
if (isEqual(newConfig, lastBatchConfig)) {
return
}
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
`1P event logging: ${BATCH_CONFIG_NAME} changed, reinitializing`,
)
}
const oldProvider = firstPartyEventLoggerProvider
const oldLogger = firstPartyEventLogger
firstPartyEventLogger = null
try {
await oldProvider.forceFlush()
} catch {
// Export failures are already on disk; new exporter will retry them.
}
firstPartyEventLoggerProvider = null
try {
initialize1PEventLogging()
} catch (e) {
// Restore so the next GrowthBook refresh can retry. oldProvider was
// only forceFlush()'d, not shut down — it's still functional. Without
// this, both stay null and the !firstPartyEventLoggerProvider gate at
// the top makes recovery impossible.
firstPartyEventLoggerProvider = oldProvider
firstPartyEventLogger = oldLogger
logError(e)
return
}
void oldProvider.shutdown().catch(() => {})
// No-op
}
export function is1PEventLoggingEnabled(): boolean {
return false
}

View File

@@ -1,806 +0,0 @@
import type { HrTime } from '@opentelemetry/api'
import { type ExportResult, ExportResultCode } from '@opentelemetry/core'
import type {
LogRecordExporter,
ReadableLogRecord,
} from '@opentelemetry/sdk-logs'
import axios from 'axios'
import { randomUUID } from 'crypto'
import { appendFile, mkdir, readdir, unlink, writeFile } from 'fs/promises'
import * as path from 'path'
import type { CoreUserData } from 'src/utils/user.js'
import {
getIsNonInteractiveSession,
getSessionId,
} from '../../bootstrap/state.js'
import { ClaudeCodeInternalEvent } from '../../types/generated/events_mono/claude_code/v1/claude_code_internal_event.js'
import { GrowthbookExperimentEvent } from '../../types/generated/events_mono/growthbook/v1/growthbook_experiment_event.js'
import {
getClaudeAIOAuthTokens,
hasProfileScope,
isClaudeAISubscriber,
} from '../../utils/auth.js'
import { checkHasTrustDialogAccepted } from '../../utils/config.js'
import { logForDebugging } from '../../utils/debug.js'
import { getClaudeConfigHomeDir } from '../../utils/envUtils.js'
import { errorMessage, isFsInaccessible, toError } from '../../utils/errors.js'
import { getAuthHeaders } from '../../utils/http.js'
import { readJSONLFile } from '../../utils/json.js'
import { logError } from '../../utils/log.js'
import { sleep } from '../../utils/sleep.js'
import { jsonStringify } from '../../utils/slowOperations.js'
import { getClaudeCodeUserAgent } from '../../utils/userAgent.js'
import { isOAuthTokenExpired } from '../oauth/client.js'
import { stripProtoFields } from './index.js'
import { type EventMetadata, to1PEventFormat } from './metadata.js'
// Unique ID for this process run - used to isolate failed event files between runs
const BATCH_UUID = randomUUID()
// File prefix for failed event storage
const FILE_PREFIX = '1p_failed_events.'
// Storage directory for failed events - evaluated at runtime to respect CLAUDE_CONFIG_DIR in tests
function getStorageDir(): string {
return path.join(getClaudeConfigHomeDir(), 'telemetry')
}
// API envelope - event_data is the JSON output from proto toJSON()
type FirstPartyEventLoggingEvent = {
event_type: 'ClaudeCodeInternalEvent' | 'GrowthbookExperimentEvent'
event_data: unknown
}
type FirstPartyEventLoggingPayload = {
events: FirstPartyEventLoggingEvent[]
}
/**
* Exporter for 1st-party event logging to /api/event_logging/batch.
*
* Export cycles are controlled by OpenTelemetry's BatchLogRecordProcessor, which
* triggers export() when either:
* - Time interval elapses (default: 5 seconds via scheduledDelayMillis)
* - Batch size is reached (default: 200 events via maxExportBatchSize)
*
* This exporter adds resilience on top:
* - Append-only log for failed events (concurrency-safe)
* - Quadratic backoff retry for failed events, dropped after maxAttempts
* - Immediate retry of queued events when any export succeeds (endpoint is healthy)
* - Chunking large event sets into smaller batches
* - Auth fallback: retries without auth on 401 errors
*/
export class FirstPartyEventLoggingExporter implements LogRecordExporter {
private readonly endpoint: string
private readonly timeout: number
private readonly maxBatchSize: number
private readonly skipAuth: boolean
private readonly batchDelayMs: number
private readonly baseBackoffDelayMs: number
private readonly maxBackoffDelayMs: number
private readonly maxAttempts: number
private readonly isKilled: () => boolean
private pendingExports: Promise<void>[] = []
private isShutdown = false
private readonly schedule: (
fn: () => Promise<void>,
delayMs: number,
) => () => void
private cancelBackoff: (() => void) | null = null
private attempts = 0
private isRetrying = false
private lastExportErrorContext: string | undefined
constructor(
options: {
timeout?: number
maxBatchSize?: number
skipAuth?: boolean
batchDelayMs?: number
baseBackoffDelayMs?: number
maxBackoffDelayMs?: number
maxAttempts?: number
path?: string
baseUrl?: string
// Injected killswitch probe. Checked per-POST so that disabling the
// firstParty sink also stops backoff retries (not just new emits).
// Passed in rather than imported to avoid a cycle with firstPartyEventLogger.ts.
isKilled?: () => boolean
schedule?: (fn: () => Promise<void>, delayMs: number) => () => void
} = {},
) {
// Default: prod, except when ANTHROPIC_BASE_URL is explicitly staging.
// Overridable via tengu_1p_event_batch_config.baseUrl.
const baseUrl =
options.baseUrl ||
(process.env.ANTHROPIC_BASE_URL === 'https://api-staging.anthropic.com'
? 'https://api-staging.anthropic.com'
: 'https://api.anthropic.com')
this.endpoint = `${baseUrl}${options.path || '/api/event_logging/batch'}`
this.timeout = options.timeout || 10000
this.maxBatchSize = options.maxBatchSize || 200
this.skipAuth = options.skipAuth ?? false
this.batchDelayMs = options.batchDelayMs || 100
this.baseBackoffDelayMs = options.baseBackoffDelayMs || 500
this.maxBackoffDelayMs = options.maxBackoffDelayMs || 30000
this.maxAttempts = options.maxAttempts ?? 8
this.isKilled = options.isKilled ?? (() => false)
this.schedule =
options.schedule ??
((fn, ms) => {
const t = setTimeout(fn, ms)
return () => clearTimeout(t)
})
// Retry any failed events from previous runs of this session (in background)
void this.retryPreviousBatches()
}
// Expose for testing
async getQueuedEventCount(): Promise<number> {
return (await this.loadEventsFromCurrentBatch()).length
}
// --- Storage helpers ---
private getCurrentBatchFilePath(): string {
return path.join(
getStorageDir(),
`${FILE_PREFIX}${getSessionId()}.${BATCH_UUID}.json`,
)
}
private async loadEventsFromFile(
filePath: string,
): Promise<FirstPartyEventLoggingEvent[]> {
try {
return await readJSONLFile<FirstPartyEventLoggingEvent>(filePath)
} catch {
return []
}
}
private async loadEventsFromCurrentBatch(): Promise<
FirstPartyEventLoggingEvent[]
> {
return this.loadEventsFromFile(this.getCurrentBatchFilePath())
}
private async saveEventsToFile(
filePath: string,
events: FirstPartyEventLoggingEvent[],
): Promise<void> {
try {
if (events.length === 0) {
try {
await unlink(filePath)
} catch {
// File doesn't exist, nothing to delete
}
} else {
// Ensure storage directory exists
await mkdir(getStorageDir(), { recursive: true })
// Write as JSON lines (one event per line)
const content = events.map(e => jsonStringify(e)).join('\n') + '\n'
await writeFile(filePath, content, 'utf8')
}
} catch (error) {
logError(error)
}
}
private async appendEventsToFile(
filePath: string,
events: FirstPartyEventLoggingEvent[],
): Promise<void> {
if (events.length === 0) return
try {
// Ensure storage directory exists
await mkdir(getStorageDir(), { recursive: true })
// Append as JSON lines (one event per line) - atomic on most filesystems
const content = events.map(e => jsonStringify(e)).join('\n') + '\n'
await appendFile(filePath, content, 'utf8')
} catch (error) {
logError(error)
}
}
private async deleteFile(filePath: string): Promise<void> {
try {
await unlink(filePath)
} catch {
// File doesn't exist or can't be deleted, ignore
}
}
// --- Previous batch retry (startup) ---
private async retryPreviousBatches(): Promise<void> {
try {
const prefix = `${FILE_PREFIX}${getSessionId()}.`
let files: string[]
try {
files = (await readdir(getStorageDir()))
.filter((f: string) => f.startsWith(prefix) && f.endsWith('.json'))
.filter((f: string) => !f.includes(BATCH_UUID)) // Exclude current batch
} catch (e) {
if (isFsInaccessible(e)) return
throw e
}
for (const file of files) {
const filePath = path.join(getStorageDir(), file)
void this.retryFileInBackground(filePath)
}
} catch (error) {
logError(error)
}
}
private async retryFileInBackground(filePath: string): Promise<void> {
if (this.attempts >= this.maxAttempts) {
await this.deleteFile(filePath)
return
}
const events = await this.loadEventsFromFile(filePath)
if (events.length === 0) {
await this.deleteFile(filePath)
return
}
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
`1P event logging: retrying ${events.length} events from previous batch`,
)
}
const failedEvents = await this.sendEventsInBatches(events)
if (failedEvents.length === 0) {
await this.deleteFile(filePath)
if (process.env.USER_TYPE === 'ant') {
logForDebugging('1P event logging: previous batch retry succeeded')
}
} else {
// Save only the failed events back (not all original events)
await this.saveEventsToFile(filePath, failedEvents)
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
`1P event logging: previous batch retry failed, ${failedEvents.length} events remain`,
)
}
}
}
async export(
logs: ReadableLogRecord[],
resultCallback: (result: ExportResult) => void,
): Promise<void> {
if (this.isShutdown) {
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
'1P event logging export failed: Exporter has been shutdown',
)
}
resultCallback({
code: ExportResultCode.FAILED,
error: new Error('Exporter has been shutdown'),
})
return
}
const exportPromise = this.doExport(logs, resultCallback)
this.pendingExports.push(exportPromise)
// Clean up completed exports
void exportPromise.finally(() => {
const index = this.pendingExports.indexOf(exportPromise)
if (index > -1) {
void this.pendingExports.splice(index, 1)
}
})
}
private async doExport(
logs: ReadableLogRecord[],
resultCallback: (result: ExportResult) => void,
): Promise<void> {
try {
// Filter for event logs only (by scope name)
const eventLogs = logs.filter(
log =>
log.instrumentationScope?.name === 'com.anthropic.claude_code.events',
)
if (eventLogs.length === 0) {
resultCallback({ code: ExportResultCode.SUCCESS })
return
}
// Transform new logs (failed events are retried independently via backoff)
const events = this.transformLogsToEvents(eventLogs).events
if (events.length === 0) {
resultCallback({ code: ExportResultCode.SUCCESS })
return
}
if (this.attempts >= this.maxAttempts) {
resultCallback({
code: ExportResultCode.FAILED,
error: new Error(
`Dropped ${events.length} events: max attempts (${this.maxAttempts}) reached`,
),
})
return
}
// Send events
const failedEvents = await this.sendEventsInBatches(events)
this.attempts++
if (failedEvents.length > 0) {
await this.queueFailedEvents(failedEvents)
this.scheduleBackoffRetry()
const context = this.lastExportErrorContext
? ` (${this.lastExportErrorContext})`
: ''
resultCallback({
code: ExportResultCode.FAILED,
error: new Error(
`Failed to export ${failedEvents.length} events${context}`,
),
})
return
}
// Success - reset backoff and immediately retry any queued events
this.resetBackoff()
if ((await this.getQueuedEventCount()) > 0 && !this.isRetrying) {
void this.retryFailedEvents()
}
resultCallback({ code: ExportResultCode.SUCCESS })
} catch (error) {
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
`1P event logging export failed: ${errorMessage(error)}`,
)
}
logError(error)
resultCallback({
code: ExportResultCode.FAILED,
error: toError(error),
})
}
}
private async sendEventsInBatches(
events: FirstPartyEventLoggingEvent[],
): Promise<FirstPartyEventLoggingEvent[]> {
// Chunk events into batches
const batches: FirstPartyEventLoggingEvent[][] = []
for (let i = 0; i < events.length; i += this.maxBatchSize) {
batches.push(events.slice(i, i + this.maxBatchSize))
}
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
`1P event logging: exporting ${events.length} events in ${batches.length} batch(es)`,
)
}
// Send each batch with delay between them. On first failure, assume the
// endpoint is down and short-circuit: queue the failed batch plus all
// remaining unsent batches without POSTing them. The backoff retry will
// probe again with a single batch next tick.
const failedBatchEvents: FirstPartyEventLoggingEvent[] = []
let lastErrorContext: string | undefined
for (let i = 0; i < batches.length; i++) {
const batch = batches[i]!
try {
await this.sendBatchWithRetry({ events: batch })
} catch (error) {
lastErrorContext = getAxiosErrorContext(error)
for (let j = i; j < batches.length; j++) {
failedBatchEvents.push(...batches[j]!)
}
if (process.env.USER_TYPE === 'ant') {
const skipped = batches.length - 1 - i
logForDebugging(
`1P event logging: batch ${i + 1}/${batches.length} failed (${lastErrorContext}); short-circuiting ${skipped} remaining batch(es)`,
)
}
break
}
if (i < batches.length - 1 && this.batchDelayMs > 0) {
await sleep(this.batchDelayMs)
}
}
if (failedBatchEvents.length > 0 && lastErrorContext) {
this.lastExportErrorContext = lastErrorContext
}
return failedBatchEvents
}
private async queueFailedEvents(
events: FirstPartyEventLoggingEvent[],
): Promise<void> {
const filePath = this.getCurrentBatchFilePath()
// Append-only: just add new events to file (atomic on most filesystems)
await this.appendEventsToFile(filePath, events)
const context = this.lastExportErrorContext
? ` (${this.lastExportErrorContext})`
: ''
const message = `1P event logging: ${events.length} events failed to export${context}`
logError(new Error(message))
}
private scheduleBackoffRetry(): void {
// Don't schedule if already retrying or shutdown
if (this.cancelBackoff || this.isRetrying || this.isShutdown) {
return
}
// Quadratic backoff (matching Statsig SDK): base * attempts²
const delay = Math.min(
this.baseBackoffDelayMs * this.attempts * this.attempts,
this.maxBackoffDelayMs,
)
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
`1P event logging: scheduling backoff retry in ${delay}ms (attempt ${this.attempts})`,
)
}
this.cancelBackoff = this.schedule(async () => {
this.cancelBackoff = null
await this.retryFailedEvents()
}, delay)
}
private async retryFailedEvents(): Promise<void> {
const filePath = this.getCurrentBatchFilePath()
// Keep retrying while there are events and endpoint is healthy
while (!this.isShutdown) {
const events = await this.loadEventsFromFile(filePath)
if (events.length === 0) break
if (this.attempts >= this.maxAttempts) {
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
`1P event logging: max attempts (${this.maxAttempts}) reached, dropping ${events.length} events`,
)
}
await this.deleteFile(filePath)
this.resetBackoff()
return
}
this.isRetrying = true
// Clear file before retry (we have events in memory now)
await this.deleteFile(filePath)
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
`1P event logging: retrying ${events.length} failed events (attempt ${this.attempts + 1})`,
)
}
const failedEvents = await this.sendEventsInBatches(events)
this.attempts++
this.isRetrying = false
if (failedEvents.length > 0) {
// Write failures back to disk
await this.saveEventsToFile(filePath, failedEvents)
this.scheduleBackoffRetry()
return // Failed - wait for backoff
}
// Success - reset backoff and continue loop to drain any newly queued events
this.resetBackoff()
if (process.env.USER_TYPE === 'ant') {
logForDebugging('1P event logging: backoff retry succeeded')
}
}
}
private resetBackoff(): void {
this.attempts = 0
if (this.cancelBackoff) {
this.cancelBackoff()
this.cancelBackoff = null
}
}
private async sendBatchWithRetry(
payload: FirstPartyEventLoggingPayload,
): Promise<void> {
if (this.isKilled()) {
// Throw so the caller short-circuits remaining batches and queues
// everything to disk. Zero network traffic while killed; the backoff
// timer keeps ticking and will resume POSTs as soon as the GrowthBook
// cache picks up the cleared flag.
throw new Error('firstParty sink killswitch active')
}
const baseHeaders: Record<string, string> = {
'Content-Type': 'application/json',
'User-Agent': getClaudeCodeUserAgent(),
'x-service-name': 'claude-code',
}
// Skip auth if trust hasn't been established yet
// This prevents executing apiKeyHelper commands before the trust dialog
// Non-interactive sessions implicitly have workspace trust
const hasTrust =
checkHasTrustDialogAccepted() || getIsNonInteractiveSession()
if (process.env.USER_TYPE === 'ant' && !hasTrust) {
logForDebugging('1P event logging: Trust not accepted')
}
// Skip auth when the OAuth token is expired or lacks user:profile
// scope (service key sessions). Falls through to unauthenticated send.
let shouldSkipAuth = this.skipAuth || !hasTrust
if (!shouldSkipAuth && isClaudeAISubscriber()) {
const tokens = getClaudeAIOAuthTokens()
if (!hasProfileScope()) {
shouldSkipAuth = true
} else if (tokens && isOAuthTokenExpired(tokens.expiresAt)) {
shouldSkipAuth = true
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
'1P event logging: OAuth token expired, skipping auth to avoid 401',
)
}
}
}
// Try with auth headers first (unless trust not established or token is known to be expired)
const authResult = shouldSkipAuth
? { headers: {}, error: 'trust not established or Oauth token expired' }
: getAuthHeaders()
const useAuth = !authResult.error
if (!useAuth && process.env.USER_TYPE === 'ant') {
logForDebugging(
`1P event logging: auth not available, sending without auth`,
)
}
const headers = useAuth
? { ...baseHeaders, ...authResult.headers }
: baseHeaders
try {
const response = await axios.post(this.endpoint, payload, {
timeout: this.timeout,
headers,
})
this.logSuccess(payload.events.length, useAuth, response.data)
return
} catch (error) {
// Handle 401 by retrying without auth
if (
useAuth &&
axios.isAxiosError(error) &&
error.response?.status === 401
) {
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
'1P event logging: 401 auth error, retrying without auth',
)
}
const response = await axios.post(this.endpoint, payload, {
timeout: this.timeout,
headers: baseHeaders,
})
this.logSuccess(payload.events.length, false, response.data)
return
}
throw error
}
}
private logSuccess(
eventCount: number,
withAuth: boolean,
responseData: unknown,
): void {
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
`1P event logging: ${eventCount} events exported successfully${withAuth ? ' (with auth)' : ' (without auth)'}`,
)
logForDebugging(`API Response: ${jsonStringify(responseData, null, 2)}`)
}
}
private hrTimeToDate(hrTime: HrTime): Date {
const [seconds, nanoseconds] = hrTime
return new Date(seconds * 1000 + nanoseconds / 1000000)
}
private transformLogsToEvents(
logs: ReadableLogRecord[],
): FirstPartyEventLoggingPayload {
const events: FirstPartyEventLoggingEvent[] = []
for (const log of logs) {
const attributes = log.attributes || {}
// Check if this is a GrowthBook experiment event
if (attributes.event_type === 'GrowthbookExperimentEvent') {
const timestamp = this.hrTimeToDate(log.hrTime)
const account_uuid = attributes.account_uuid as string | undefined
const organization_uuid = attributes.organization_uuid as
| string
| undefined
events.push({
event_type: 'GrowthbookExperimentEvent',
event_data: GrowthbookExperimentEvent.toJSON({
event_id: attributes.event_id as string,
timestamp,
experiment_id: attributes.experiment_id as string,
variation_id: attributes.variation_id as number,
environment: attributes.environment as string,
user_attributes: attributes.user_attributes as string,
experiment_metadata: attributes.experiment_metadata as string,
device_id: attributes.device_id as string,
session_id: attributes.session_id as string,
auth:
account_uuid || organization_uuid
? { account_uuid, organization_uuid }
: undefined,
}),
})
continue
}
// Extract event name
const eventName =
(attributes.event_name as string) || (log.body as string) || 'unknown'
// Extract metadata objects directly (no JSON parsing needed)
const coreMetadata = attributes.core_metadata as EventMetadata | undefined
const userMetadata = attributes.user_metadata as CoreUserData
const eventMetadata = (attributes.event_metadata || {}) as Record<
string,
unknown
>
if (!coreMetadata) {
// Emit partial event if core metadata is missing
if (process.env.USER_TYPE === 'ant') {
logForDebugging(
`1P event logging: core_metadata missing for event ${eventName}`,
)
}
events.push({
event_type: 'ClaudeCodeInternalEvent',
event_data: ClaudeCodeInternalEvent.toJSON({
event_id: attributes.event_id as string | undefined,
event_name: eventName,
client_timestamp: this.hrTimeToDate(log.hrTime),
session_id: getSessionId(),
additional_metadata: Buffer.from(
jsonStringify({
transform_error: 'core_metadata attribute is missing',
}),
).toString('base64'),
}),
})
continue
}
// Transform to 1P format
const formatted = to1PEventFormat(
coreMetadata,
userMetadata,
eventMetadata,
)
// _PROTO_* keys are PII-tagged values meant only for privileged BQ
// columns. Hoist known keys to proto fields, then defensively strip any
// remaining _PROTO_* so an unrecognized future key can't silently land
// in the general-access additional_metadata blob. sink.ts applies the
// same strip before Datadog; this closes the 1P side.
const {
_PROTO_skill_name,
_PROTO_plugin_name,
_PROTO_marketplace_name,
...rest
} = formatted.additional
const additionalMetadata = stripProtoFields(rest)
events.push({
event_type: 'ClaudeCodeInternalEvent',
event_data: ClaudeCodeInternalEvent.toJSON({
event_id: attributes.event_id as string | undefined,
event_name: eventName,
client_timestamp: this.hrTimeToDate(log.hrTime),
device_id: attributes.user_id as string | undefined,
email: userMetadata?.email,
auth: formatted.auth,
...formatted.core,
env: formatted.env,
process: formatted.process,
skill_name:
typeof _PROTO_skill_name === 'string'
? _PROTO_skill_name
: undefined,
plugin_name:
typeof _PROTO_plugin_name === 'string'
? _PROTO_plugin_name
: undefined,
marketplace_name:
typeof _PROTO_marketplace_name === 'string'
? _PROTO_marketplace_name
: undefined,
additional_metadata:
Object.keys(additionalMetadata).length > 0
? Buffer.from(jsonStringify(additionalMetadata)).toString(
'base64',
)
: undefined,
}),
})
}
return { events }
}
async shutdown(): Promise<void> {
this.isShutdown = true
this.resetBackoff()
await this.forceFlush()
if (process.env.USER_TYPE === 'ant') {
logForDebugging('1P event logging exporter shutdown complete')
}
}
async forceFlush(): Promise<void> {
await Promise.all(this.pendingExports)
if (process.env.USER_TYPE === 'ant') {
logForDebugging('1P event logging exporter flush complete')
}
}
}
function getAxiosErrorContext(error: unknown): string {
if (!axios.isAxiosError(error)) {
return errorMessage(error)
}
const parts: string[] = []
const requestId = error.response?.headers?.['request-id']
if (requestId) {
parts.push(`request-id=${requestId}`)
}
if (error.response?.status) {
parts.push(`status=${error.response.status}`)
}
if (error.code) {
parts.push(`code=${error.code}`)
}
if (error.message) {
parts.push(error.message)
}
return parts.join(', ')
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,74 +1,21 @@
/**
* Analytics service - public API for event logging
* Analytics service - stub implementation
*
* This module serves as the main entry point for analytics events in Claude CLI.
*
* DESIGN: This module has NO dependencies to avoid import cycles.
* Events are queued until attachAnalyticsSink() is called during app initialization.
* The sink handles routing to Datadog and 1P event logging.
* This module has been modified to disable all telemetry and monitoring as per user request.
* It maintains the original interface to avoid breaking the codebase, but all logging is a no-op.
*/
/**
* Marker type for verifying analytics metadata doesn't contain sensitive data
*
* This type forces explicit verification that string values being logged
* don't contain code snippets, file paths, or other sensitive information.
*
* Usage: `myString as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS`
*/
export type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS = never
/**
* Marker type for values routed to PII-tagged proto columns via `_PROTO_*`
* payload keys. The destination BQ column has privileged access controls,
* so unredacted values are acceptable — unlike general-access backends.
*
* sink.ts strips `_PROTO_*` keys before Datadog fanout; only the 1P
* exporter (firstPartyEventLoggingExporter) sees them and hoists them to the
* top-level proto field. A single stripProtoFields call guards all non-1P
* sinks — no per-sink filtering to forget.
*
* Usage: `rawName as AnalyticsMetadata_I_VERIFIED_THIS_IS_PII_TAGGED`
*/
export type AnalyticsMetadata_I_VERIFIED_THIS_IS_PII_TAGGED = never
/**
* Strip `_PROTO_*` keys from a payload destined for general-access storage.
* Used by:
* - sink.ts: before Datadog fanout (never sees PII-tagged values)
* - firstPartyEventLoggingExporter: defensive strip of additional_metadata
* after hoisting known _PROTO_* keys to proto fields — prevents a future
* unrecognized _PROTO_foo from silently landing in the BQ JSON blob.
*
* Returns the input unchanged (same reference) when no _PROTO_ keys present.
*/
export function stripProtoFields<V>(
metadata: Record<string, V>,
): Record<string, V> {
let result: Record<string, V> | undefined
for (const key in metadata) {
if (key.startsWith('_PROTO_')) {
if (result === undefined) {
result = { ...metadata }
}
delete result[key]
}
}
return result ?? metadata
return metadata
}
// Internal type for logEvent metadata - different from the enriched EventMetadata in metadata.ts
type LogEventMetadata = { [key: string]: boolean | number | undefined }
type QueuedEvent = {
eventName: string
metadata: LogEventMetadata
async: boolean
}
/**
* Sink interface for the analytics backend
*/
export type AnalyticsSink = {
logEvent: (eventName: string, metadata: LogEventMetadata) => void
logEventAsync: (
@@ -77,97 +24,24 @@ export type AnalyticsSink = {
) => Promise<void>
}
// Event queue for events logged before sink is attached
const eventQueue: QueuedEvent[] = []
// Sink - initialized during app startup
let sink: AnalyticsSink | null = null
/**
* Attach the analytics sink that will receive all events.
* Queued events are drained asynchronously via queueMicrotask to avoid
* adding latency to the startup path.
*
* Idempotent: if a sink is already attached, this is a no-op. This allows
* calling from both the preAction hook (for subcommands) and setup() (for
* the default command) without coordination.
*/
export function attachAnalyticsSink(newSink: AnalyticsSink): void {
if (sink !== null) {
return
}
sink = newSink
// Drain the queue asynchronously to avoid blocking startup
if (eventQueue.length > 0) {
const queuedEvents = [...eventQueue]
eventQueue.length = 0
// Log queue size for ants to help debug analytics initialization timing
if (process.env.USER_TYPE === 'ant') {
sink.logEvent('analytics_sink_attached', {
queued_event_count: queuedEvents.length,
})
}
queueMicrotask(() => {
for (const event of queuedEvents) {
if (event.async) {
void sink!.logEventAsync(event.eventName, event.metadata)
} else {
sink!.logEvent(event.eventName, event.metadata)
}
}
})
}
export function attachAnalyticsSink(_newSink: AnalyticsSink): void {
// No-op: Analytics is disabled.
}
/**
* Log an event to analytics backends (synchronous)
*
* Events may be sampled based on the 'tengu_event_sampling_config' dynamic config.
* When sampled, the sample_rate is added to the event metadata.
*
* If no sink is attached, events are queued and drained when the sink attaches.
*/
export function logEvent(
eventName: string,
// intentionally no strings unless AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
// to avoid accidentally logging code/filepaths
metadata: LogEventMetadata,
_eventName: string,
_metadata: LogEventMetadata,
): void {
if (sink === null) {
eventQueue.push({ eventName, metadata, async: false })
return
}
sink.logEvent(eventName, metadata)
// No-op: Analytics is disabled.
}
/**
* Log an event to analytics backends (asynchronous)
*
* Events may be sampled based on the 'tengu_event_sampling_config' dynamic config.
* When sampled, the sample_rate is added to the event metadata.
*
* If no sink is attached, events are queued and drained when the sink attaches.
*/
export async function logEventAsync(
eventName: string,
// intentionally no strings, to avoid accidentally logging code/filepaths
metadata: LogEventMetadata,
_eventName: string,
_metadata: LogEventMetadata,
): Promise<void> {
if (sink === null) {
eventQueue.push({ eventName, metadata, async: true })
return
}
await sink.logEventAsync(eventName, metadata)
// No-op: Analytics is disabled.
}
/**
* Reset analytics state for testing purposes only.
* @internal
*/
export function _resetForTesting(): void {
sink = null
eventQueue.length = 0
// No-op.
}

View File

@@ -564,10 +564,12 @@ function getAgentIdentification(): {
* Extract base version from full version string. "2.0.36-dev.20251107.t174150.sha2709699" → "2.0.36-dev"
*/
const getVersionBase = memoize((): string | undefined => {
const match = MACRO.VERSION.match(/^\d+\.\d+\.\d+(?:-[a-z]+)?/)
const match = VERSION.match(/^\d+\.\d+\.\d+(?:-[a-z]+)?/)
return match ? match[0] : undefined
})
import { VERSION, BUILD_TIME } from '../../constants/product.js'
/**
* Builds the environment context object
*/
@@ -617,9 +619,9 @@ const buildEnvContext = memoize(async (): Promise<EnvContext> => {
isGithubAction: isEnvTruthy(process.env.GITHUB_ACTIONS),
isClaudeCodeAction: isEnvTruthy(process.env.CLAUDE_CODE_ACTION),
isClaudeAiAuth: isClaudeAISubscriber(),
version: MACRO.VERSION,
version: VERSION,
versionBase: getVersionBase(),
buildTime: MACRO.BUILD_TIME,
buildTime: BUILD_TIME,
deploymentEnvironment: env.detectDeploymentEnvironment(),
...(isEnvTruthy(process.env.GITHUB_ACTIONS) && {
githubEventName: process.env.GITHUB_EVENT_NAME,

View File

@@ -1,114 +0,0 @@
/**
* Analytics sink implementation
*
* This module contains the actual analytics routing logic and should be
* initialized during app startup. It routes events to Datadog and 1P event
* logging.
*
* Usage: Call initializeAnalyticsSink() during app startup to attach the sink.
*/
import { trackDatadogEvent } from './datadog.js'
import { logEventTo1P, shouldSampleEvent } from './firstPartyEventLogger.js'
import { checkStatsigFeatureGate_CACHED_MAY_BE_STALE } from './growthbook.js'
import { attachAnalyticsSink, stripProtoFields } from './index.js'
import { isSinkKilled } from './sinkKillswitch.js'
// Local type matching the logEvent metadata signature
type LogEventMetadata = { [key: string]: boolean | number | undefined }
const DATADOG_GATE_NAME = 'tengu_log_datadog_events'
// Module-level gate state - starts undefined, initialized during startup
let isDatadogGateEnabled: boolean | undefined = undefined
/**
* Check if Datadog tracking is enabled.
* Falls back to cached value from previous session if not yet initialized.
*/
function shouldTrackDatadog(): boolean {
if (isSinkKilled('datadog')) {
return false
}
if (isDatadogGateEnabled !== undefined) {
return isDatadogGateEnabled
}
// Fallback to cached value from previous session
try {
return checkStatsigFeatureGate_CACHED_MAY_BE_STALE(DATADOG_GATE_NAME)
} catch {
return false
}
}
/**
* Log an event (synchronous implementation)
*/
function logEventImpl(eventName: string, metadata: LogEventMetadata): void {
// Check if this event should be sampled
const sampleResult = shouldSampleEvent(eventName)
// If sample result is 0, the event was not selected for logging
if (sampleResult === 0) {
return
}
// If sample result is a positive number, add it to metadata
const metadataWithSampleRate =
sampleResult !== null
? { ...metadata, sample_rate: sampleResult }
: metadata
if (shouldTrackDatadog()) {
// Datadog is a general-access backend — strip _PROTO_* keys
// (unredacted PII-tagged values meant only for the 1P privileged column).
void trackDatadogEvent(eventName, stripProtoFields(metadataWithSampleRate))
}
// 1P receives the full payload including _PROTO_* — the exporter
// destructures and routes those keys to proto fields itself.
logEventTo1P(eventName, metadataWithSampleRate)
}
/**
* Log an event (asynchronous implementation)
*
* With Segment removed the two remaining sinks are fire-and-forget, so this
* just wraps the sync impl — kept to preserve the sink interface contract.
*/
function logEventAsyncImpl(
eventName: string,
metadata: LogEventMetadata,
): Promise<void> {
logEventImpl(eventName, metadata)
return Promise.resolve()
}
/**
* Initialize analytics gates during startup.
*
* Updates gate values from server. Early events use cached values from previous
* session to avoid data loss during initialization.
*
* Called from main.tsx during setupBackend().
*/
export function initializeAnalyticsGates(): void {
isDatadogGateEnabled =
checkStatsigFeatureGate_CACHED_MAY_BE_STALE(DATADOG_GATE_NAME)
}
/**
* Initialize the analytics sink.
*
* Call this during app startup to attach the analytics backend.
* Any events logged before this is called will be queued and drained.
*
* Idempotent: safe to call multiple times (subsequent calls are no-ops).
*/
export function initializeAnalyticsSink(): void {
attachAnalyticsSink({
logEvent: logEventImpl,
logEventAsync: logEventAsyncImpl,
})
}

View File

@@ -1,7 +1,4 @@
import { getDynamicConfig_CACHED_MAY_BE_STALE } from './growthbook.js'
// Mangled name: per-sink analytics killswitch
const SINK_KILLSWITCH_CONFIG_NAME = 'tengu_frond_boric'
export type SinkName = 'datadog' | 'firstParty'
@@ -15,11 +12,7 @@ export type SinkName = 'datadog' | 'firstParty'
* growthbook.ts:isGrowthBookEnabled() calls that, so a lookup here would recurse.
* Call at per-event dispatch sites instead.
*/
export function isSinkKilled(sink: SinkName): boolean {
const config = getDynamicConfig_CACHED_MAY_BE_STALE<
Partial<Record<SinkName, boolean>>
>(SINK_KILLSWITCH_CONFIG_NAME, {})
// getFeatureValue_CACHED_MAY_BE_STALE guards on `!== undefined`, so a
// cached JSON null leaks through instead of falling back to {}.
return config?.[sink] === true
export function isSinkKilled(_sink: SinkName): boolean {
// Permanently disabled as per telemetry purge requirement.
return true
}

View File

@@ -1,6 +1,6 @@
import axios from 'axios'
import { getOauthConfig } from '../../constants/oauth.js'
import { getOAuthHeaders, prepareApiRequest } from '../../utils/teleport/api.js'
import { nativeRequest } from '../../utils/http.js'
export type AdminRequestType = 'limit_increase' | 'seat_upgrade'
@@ -58,7 +58,11 @@ export async function createAdminRequest(
const url = `${getOauthConfig().BASE_API_URL}/api/oauth/organizations/${orgUUID}/admin_requests`
const response = await axios.post<AdminRequest>(url, params, { headers })
const response = await nativeRequest<AdminRequest>(url, {
method: 'POST',
body: params,
headers,
})
return response.data
}
@@ -84,7 +88,8 @@ export async function getMyAdminRequests(
url += `&statuses=${status}`
}
const response = await axios.get<AdminRequest[] | null>(url, {
const response = await nativeRequest<AdminRequest[] | null>(url, {
method: 'GET',
headers,
})
@@ -111,7 +116,8 @@ export async function checkAdminRequestEligibility(
const url = `${getOauthConfig().BASE_API_URL}/api/oauth/organizations/${orgUUID}/admin_requests/eligibility?request_type=${requestType}`
const response = await axios.get<AdminRequestEligibilityResponse>(url, {
const response = await nativeRequest<AdminRequestEligibilityResponse>(url, {
method: 'GET',
headers,
})

View File

@@ -1,4 +1,3 @@
import axios from 'axios'
import isEqual from 'lodash-es/isEqual.js'
import {
getAnthropicApiKey,
@@ -9,7 +8,7 @@ import { z } from 'zod'
import { getOauthConfig, OAUTH_BETA_HEADER } from '../../constants/oauth.js'
import { getGlobalConfig, saveGlobalConfig } from '../../utils/config.js'
import { logForDebugging } from '../../utils/debug.js'
import { withOAuth401Retry } from '../../utils/http.js'
import { isHttpError, nativeRequest, withOAuth401Retry } from '../../utils/http.js'
import { lazySchema } from '../../utils/lazySchema.js'
import { logError } from '../../utils/log.js'
import { getAPIProvider } from '../../utils/model/providers.js'
@@ -82,7 +81,8 @@ async function fetchBootstrapAPI(): Promise<BootstrapResponse | null> {
}
logForDebugging('[Bootstrap] Fetching')
const response = await axios.get<unknown>(endpoint, {
const response = await nativeRequest<unknown>(endpoint, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
'User-Agent': getClaudeCodeUserAgent(),
@@ -102,7 +102,7 @@ async function fetchBootstrapAPI(): Promise<BootstrapResponse | null> {
})
} catch (error) {
logForDebugging(
`[Bootstrap] Fetch failed: ${axios.isAxiosError(error) ? (error.response?.status ?? error.code) : 'unknown'}`,
`[Bootstrap] Fetch failed: ${isHttpError(error) ? (error.status ?? error.code) : 'unknown'}`,
)
throw error
}

View File

@@ -35,6 +35,7 @@ import {
API_PDF_MAX_PAGES,
PDF_TARGET_RAW_SIZE,
} from '../../constants/apiLimits.js'
import { FEEDBACK_CHANNEL } from '../../constants/product.js'
import { isEnvTruthy } from '../../utils/envUtils.js'
import { formatFileSize } from '../../utils/format.js'
import { ImageResizeError } from '../../utils/imageResizer.js'
@@ -685,7 +686,7 @@ export function getAssistantMessageFromError(
}
if (process.env.USER_TYPE === 'ant') {
const baseMessage = `API Error: 400 ${error.message}\n\nRun /share and post the JSON file to ${MACRO.FEEDBACK_CHANNEL}.`
const baseMessage = `API Error: 400 ${error.message}\n\nRun /share and post the JSON file to ${FEEDBACK_CHANNEL}.`
const rewindInstruction = getIsNonInteractiveSession()
? ''
: ' Then, use /rewind to recover the conversation.'
@@ -760,8 +761,8 @@ export function getAssistantMessageFromError(
const orgId = getOauthAccountInfo()?.organizationUuid
const baseMsg = `[ANT-ONLY] Your org isn't gated into the \`${model}\` model. Either run \`claude\` with \`ANTHROPIC_MODEL=${getDefaultMainLoopModelSetting()}\``
const msg = orgId
? `${baseMsg} or share your orgId (${orgId}) in ${MACRO.FEEDBACK_CHANNEL} for help getting access.`
: `${baseMsg} or reach out in ${MACRO.FEEDBACK_CHANNEL} for help getting access.`
? `${baseMsg} or share your orgId (${orgId}) in ${FEEDBACK_CHANNEL} for help getting access.`
: `${baseMsg} or reach out in ${FEEDBACK_CHANNEL} for help getting access.`
return createAssistantAPIErrorMessage({
content: msg,

View File

@@ -7,7 +7,6 @@
* API Reference: https://docs.anthropic.com/en/api/files-content
*/
import axios from 'axios'
import { randomUUID } from 'crypto'
import * as fs from 'fs/promises'
import * as path from 'path'
@@ -15,6 +14,7 @@ import { count } from '../../utils/array.js'
import { getCwd } from '../../utils/cwd.js'
import { logForDebugging } from '../../utils/debug.js'
import { errorMessage } from '../../utils/errors.js'
import { isHttpError, nativeRequest } from '../../utils/http.js'
import { logError } from '../../utils/log.js'
import { sleep } from '../../utils/sleep.js'
import {
@@ -146,16 +146,17 @@ export async function downloadFile(
return retryWithBackoff(`Download file ${fileId}`, async () => {
try {
const response = await axios.get(url, {
const response = await nativeRequest<ArrayBuffer>(url, {
method: 'GET',
headers,
responseType: 'arraybuffer',
timeout: 60000, // 60 second timeout for large files
validateStatus: status => status < 500,
})
if (response.status === 200) {
logDebug(`Downloaded file ${fileId} (${response.data.length} bytes)`)
return { done: true, value: Buffer.from(response.data) }
const buffer = Buffer.from(response.data)
logDebug(`Downloaded file ${fileId} (${buffer.length} bytes)`)
return { done: true, value: buffer }
}
// Non-retriable errors - throw immediately
@@ -171,10 +172,10 @@ export async function downloadFile(
return { done: false, error: `status ${response.status}` }
} catch (error) {
if (!axios.isAxiosError(error)) {
throw error
if (isHttpError(error)) {
return { done: false, error: `${error.status} ${error.message}` }
}
return { done: false, error: error.message }
return { done: false, error: errorMessage(error) }
}
})
}
@@ -457,7 +458,9 @@ export async function uploadFile(
try {
return await retryWithBackoff(`Upload file ${relativePath}`, async () => {
try {
const response = await axios.post(url, body, {
const response = await nativeRequest<any>(url, {
method: 'POST',
body,
headers: {
...headers,
'Content-Type': `multipart/form-data; boundary=${boundary}`,
@@ -465,7 +468,6 @@ export async function uploadFile(
},
timeout: 120000, // 2 minute timeout for uploads
signal: opts?.signal,
validateStatus: status => status < 500,
})
if (response.status === 200 || response.status === 201) {
@@ -521,11 +523,11 @@ export async function uploadFile(
if (error instanceof UploadNonRetriableError) {
throw error
}
if (axios.isCancel(error)) {
throw new UploadNonRetriableError('Upload canceled')
}
// Network errors are retriable
if (axios.isAxiosError(error)) {
if (isHttpError(error)) {
if (error.code === 'ECONNABORTED' || error.status === 408) {
return { done: false, error: 'Upload timeout' }
}
return { done: false, error: error.message }
}
throw error
@@ -643,11 +645,12 @@ export async function listFilesCreatedAfter(
`List files after ${afterCreatedAt}`,
async () => {
try {
const response = await axios.get(`${baseUrl}/v1/files`, {
const queryParams = new URLSearchParams(params).toString()
const fullUrl = `${baseUrl}/v1/files${queryParams ? `?${queryParams}` : ''}`
const response = await nativeRequest<any>(fullUrl, {
method: 'GET',
headers,
params,
timeout: 60000,
validateStatus: status => status < 500,
})
if (response.status === 200) {
@@ -671,15 +674,15 @@ export async function listFilesCreatedAfter(
return { done: false, error: `status ${response.status}` }
} catch (error) {
if (!axios.isAxiosError(error)) {
throw error
}
if (isHttpError(error)) {
logEvent('tengu_file_list_failed', {
error_type:
'network' as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
})
return { done: false, error: error.message }
}
throw error
}
},
)

View File

@@ -1,7 +1,6 @@
import axios from 'axios'
import { getOauthConfig } from '../../constants/oauth.js'
import { getGlobalConfig, saveGlobalConfig } from '../../utils/config.js'
import { getAuthHeaders } from '../../utils/http.js'
import { getAuthHeaders, nativeRequest } from '../../utils/http.js'
import { logError } from '../../utils/log.js'
import { getClaudeCodeUserAgent } from '../../utils/userAgent.js'
@@ -26,7 +25,8 @@ export async function fetchAndStoreClaudeCodeFirstTokenDate(): Promise<void> {
const oauthConfig = getOauthConfig()
const url = `${oauthConfig.BASE_API_URL}/api/organization/claude_code_first_token_date`
const response = await axios.get(url, {
const response = await nativeRequest<any>(url, {
method: 'GET',
headers: {
...authHeaders.headers,
'User-Agent': getClaudeCodeUserAgent(),

View File

@@ -1,4 +1,3 @@
import axios from 'axios'
import memoize from 'lodash-es/memoize.js'
import {
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
@@ -14,6 +13,7 @@ import { getGlobalConfig, saveGlobalConfig } from '../../utils/config.js'
import {
getAuthHeaders,
getUserAgent,
nativeRequest,
withOAuth401Retry,
} from '../../utils/http.js'
import { logError } from '../../utils/log.js'
@@ -61,9 +61,10 @@ export const getGroveSettings = memoize(
if (authHeaders.error) {
throw new Error(`Failed to get auth headers: ${authHeaders.error}`)
}
return axios.get<AccountSettings>(
return nativeRequest<AccountSettings>(
`${getOauthConfig().BASE_API_URL}/api/oauth/account/settings`,
{
method: 'GET',
headers: {
...authHeaders.headers,
'User-Agent': getClaudeCodeUserAgent(),
@@ -94,10 +95,11 @@ export async function markGroveNoticeViewed(): Promise<void> {
if (authHeaders.error) {
throw new Error(`Failed to get auth headers: ${authHeaders.error}`)
}
return axios.post(
return nativeRequest(
`${getOauthConfig().BASE_API_URL}/api/oauth/account/grove_notice_viewed`,
{},
{
method: 'POST',
body: {},
headers: {
...authHeaders.headers,
'User-Agent': getClaudeCodeUserAgent(),
@@ -126,12 +128,13 @@ export async function updateGroveSettings(
if (authHeaders.error) {
throw new Error(`Failed to get auth headers: ${authHeaders.error}`)
}
return axios.patch(
return nativeRequest(
`${getOauthConfig().BASE_API_URL}/api/oauth/account/settings`,
{
method: 'PATCH',
body: {
grove_enabled: groveEnabled,
},
{
headers: {
...authHeaders.headers,
'User-Agent': getClaudeCodeUserAgent(),
@@ -241,9 +244,10 @@ export const getGroveNoticeConfig = memoize(
if (authHeaders.error) {
throw new Error(`Failed to get auth headers: ${authHeaders.error}`)
}
return axios.get<GroveConfig>(
return nativeRequest<GroveConfig>(
`${getOauthConfig().BASE_API_URL}/api/claude_code_grove`,
{
method: 'GET',
headers: {
...authHeaders.headers,
'User-Agent': getUserAgent(),

View File

@@ -34,6 +34,7 @@ import {
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
logEvent,
} from '../analytics/index.js'
import { BUILD_TIME } from '../../constants/product.js'
import { sanitizeToolNameForAnalytics } from '../analytics/metadata.js'
import { EMPTY_USAGE } from './emptyUsage.js'
import { classifyAPIError } from './errors.js'
@@ -162,8 +163,8 @@ function getAnthropicEnvMetadata() {
}
function getBuildAgeMinutes(): number | undefined {
if (!MACRO.BUILD_TIME) return undefined
const buildTime = new Date(MACRO.BUILD_TIME).getTime()
if (!BUILD_TIME) return undefined
const buildTime = new Date(BUILD_TIME).getTime()
if (isNaN(buildTime)) return undefined
return Math.floor((Date.now() - buildTime) / 60000)
}

View File

@@ -1,159 +1,24 @@
import axios from 'axios'
import { hasProfileScope, isClaudeAISubscriber } from '../../utils/auth.js'
import { getGlobalConfig, saveGlobalConfig } from '../../utils/config.js'
import { logForDebugging } from '../../utils/debug.js'
import { errorMessage } from '../../utils/errors.js'
import { getAuthHeaders, withOAuth401Retry } from '../../utils/http.js'
import { logError } from '../../utils/log.js'
import { memoizeWithTTLAsync } from '../../utils/memoize.js'
import { isEssentialTrafficOnly } from '../../utils/privacyLevel.js'
import { getClaudeCodeUserAgent } from '../../utils/userAgent.js'
/**
* Metrics Opt-Out Service (Stubbed)
*
* This service is stubbed to always report that metrics are disabled,
* ensuring no telemetry or logging data is sent to external services.
*/
type MetricsEnabledResponse = {
metrics_logging_enabled: boolean
}
type MetricsStatus = {
export type MetricsStatus = {
enabled: boolean
hasError: boolean
}
// In-memory TTL — dedupes calls within a single process
const CACHE_TTL_MS = 60 * 60 * 1000
// Disk TTL — org settings rarely change. When disk cache is fresher than this,
// we skip the network entirely (no background refresh). This is what collapses
// N `claude -p` invocations into ~1 API call/day.
const DISK_CACHE_TTL_MS = 24 * 60 * 60 * 1000
/**
* Internal function to call the API and check if metrics are enabled
* This is wrapped by memoizeWithTTLAsync to add caching behavior
*/
async function _fetchMetricsEnabled(): Promise<MetricsEnabledResponse> {
const authResult = getAuthHeaders()
if (authResult.error) {
throw new Error(`Auth error: ${authResult.error}`)
}
const headers = {
'Content-Type': 'application/json',
'User-Agent': getClaudeCodeUserAgent(),
...authResult.headers,
}
const endpoint = `https://api.anthropic.com/api/claude_code/organizations/metrics_enabled`
const response = await axios.get<MetricsEnabledResponse>(endpoint, {
headers,
timeout: 5000,
})
return response.data
}
async function _checkMetricsEnabledAPI(): Promise<MetricsStatus> {
// Incident kill switch: skip the network call when nonessential traffic is disabled.
// Returning enabled:false sheds load at the consumer (bigqueryExporter skips
// export). Matches the non-subscriber early-return shape below.
if (isEssentialTrafficOnly()) {
return { enabled: false, hasError: false }
}
try {
const data = await withOAuth401Retry(_fetchMetricsEnabled, {
also403Revoked: true,
})
logForDebugging(
`Metrics opt-out API response: enabled=${data.metrics_logging_enabled}`,
)
return {
enabled: data.metrics_logging_enabled,
hasError: false,
}
} catch (error) {
logForDebugging(
`Failed to check metrics opt-out status: ${errorMessage(error)}`,
)
logError(error)
return { enabled: false, hasError: true }
}
}
// Create memoized version with custom error handling
const memoizedCheckMetrics = memoizeWithTTLAsync(
_checkMetricsEnabledAPI,
CACHE_TTL_MS,
)
/**
* Fetch (in-memory memoized) and persist to disk on change.
* Errors are not persisted — a transient failure should not overwrite a
* known-good disk value.
*/
async function refreshMetricsStatus(): Promise<MetricsStatus> {
const result = await memoizedCheckMetrics()
if (result.hasError) {
return result
}
const cached = getGlobalConfig().metricsStatusCache
const unchanged = cached !== undefined && cached.enabled === result.enabled
// Skip write when unchanged AND timestamp still fresh — avoids config churn
// when concurrent callers race past a stale disk entry and all try to write.
if (unchanged && Date.now() - cached.timestamp < DISK_CACHE_TTL_MS) {
return result
}
saveGlobalConfig(current => ({
...current,
metricsStatusCache: {
enabled: result.enabled,
timestamp: Date.now(),
},
}))
return result
}
/**
* Check if metrics are enabled for the current organization.
*
* Two-tier cache:
* - Disk (24h TTL): survives process restarts. Fresh disk cache → zero network.
* - In-memory (1h TTL): dedupes the background refresh within a process.
*
* The caller (bigqueryExporter) tolerates stale reads — a missed export or
* an extra one during the 24h window is acceptable.
*/
export async function checkMetricsEnabled(): Promise<MetricsStatus> {
// Service key OAuth sessions lack user:profile scope → would 403.
// API key users (non-subscribers) fall through and use x-api-key auth.
// This check runs before the disk read so we never persist auth-state-derived
// answers — only real API responses go to disk. Otherwise a service-key
// session would poison the cache for a later full-OAuth session.
if (isClaudeAISubscriber() && !hasProfileScope()) {
return { enabled: false, hasError: false }
}
const cached = getGlobalConfig().metricsStatusCache
if (cached) {
if (Date.now() - cached.timestamp > DISK_CACHE_TTL_MS) {
// saveGlobalConfig's fallback path (config.ts:731) can throw if both
// locked and fallback writes fail — catch here so fire-and-forget
// doesn't become an unhandled rejection.
void refreshMetricsStatus().catch(logError)
}
return {
enabled: cached.enabled,
hasError: false,
}
}
// First-ever run on this machine: block on the network to populate disk.
return refreshMetricsStatus()
// Always return disabled for a privacy-focused environment.
return { enabled: false, hasError: false };
}
export async function refreshMetricsStatus(): Promise<MetricsStatus> {
return { enabled: false, hasError: false };
}
// Export for testing purposes only
export const _clearMetricsEnabledCacheForTesting = (): void => {
memoizedCheckMetrics.cache.clear()
}
// No-op
};

View File

@@ -1,7 +1,7 @@
import axios from 'axios'
import { getOauthConfig } from '../../constants/oauth.js'
import { getOauthAccountInfo } from '../../utils/auth.js'
import { getGlobalConfig, saveGlobalConfig } from '../../utils/config.js'
import { nativeRequest } from '../../utils/http.js'
import { logError } from '../../utils/log.js'
import { isEssentialTrafficOnly } from '../../utils/privacyLevel.js'
import { getOAuthHeaders, prepareApiRequest } from '../../utils/teleport/api.js'
@@ -30,7 +30,8 @@ async function fetchOverageCreditGrant(): Promise<OverageCreditGrantInfo | null>
try {
const { accessToken, orgUUID } = await prepareApiRequest()
const url = `${getOauthConfig().BASE_API_URL}/api/oauth/organizations/${orgUUID}/overage_credit_grant`
const response = await axios.get<OverageCreditGrantInfo>(url, {
const response = await nativeRequest<OverageCreditGrantInfo>(url, {
method: 'GET',
headers: getOAuthHeaders(accessToken),
})
return response.data

View File

@@ -1,4 +1,3 @@
import axios from 'axios'
import { getOauthConfig } from '../../constants/oauth.js'
import {
getOauthAccountInfo,
@@ -7,6 +6,7 @@ import {
} from '../../utils/auth.js'
import { getGlobalConfig, saveGlobalConfig } from '../../utils/config.js'
import { logForDebugging } from '../../utils/debug.js'
import { nativeRequest } from '../../utils/http.js'
import { logError } from '../../utils/log.js'
import { isEssentialTrafficOnly } from '../../utils/privacyLevel.js'
import { getOAuthHeaders, prepareApiRequest } from '../../utils/teleport/api.js'
@@ -35,9 +35,12 @@ export async function fetchReferralEligibility(
const url = `${getOauthConfig().BASE_API_URL}/api/oauth/organizations/${orgUUID}/referral/eligibility`
const response = await axios.get(url, {
const queryParams = new URLSearchParams({ campaign }).toString()
const fullUrl = `${url}${queryParams ? `?${queryParams}` : ''}`
const response = await nativeRequest<ReferralEligibilityResponse>(fullUrl, {
method: 'GET',
headers,
params: { campaign },
timeout: 5000, // 5 second timeout for background fetch
})
@@ -56,9 +59,12 @@ export async function fetchReferralRedemptions(
const url = `${getOauthConfig().BASE_API_URL}/api/oauth/organizations/${orgUUID}/referral/redemptions`
const response = await axios.get<ReferralRedemptionsResponse>(url, {
const queryParams = new URLSearchParams({ campaign }).toString()
const fullUrl = `${url}${queryParams ? `?${queryParams}` : ''}`
const response = await nativeRequest<ReferralRedemptionsResponse>(fullUrl, {
method: 'GET',
headers,
params: { campaign },
timeout: 10000, // 10 second timeout
})

View File

@@ -1,10 +1,10 @@
import axios, { type AxiosError } from 'axios'
import type { UUID } from 'crypto'
import { getOauthConfig } from '../../constants/oauth.js'
import type { Entry, TranscriptMessage } from '../../types/logs.js'
import { logForDebugging } from '../../utils/debug.js'
import { logForDiagnosticsNoPII } from '../../utils/diagLogs.js'
import { isEnvTruthy } from '../../utils/envUtils.js'
import { isHttpError, nativeRequest } from '../../utils/http.js'
import { logError } from '../../utils/log.js'
import { sequential } from '../../utils/sequential.js'
import { getSessionIngressAuthToken } from '../../utils/sessionIngressAuth.js'
@@ -74,9 +74,10 @@ async function appendSessionLogImpl(
requestHeaders['Last-Uuid'] = lastUuid
}
const response = await axios.put(url, entry, {
const response = await nativeRequest(url, {
method: 'PUT',
body: entry,
headers: requestHeaders,
validateStatus: status => status < 500,
})
if (response.status === 200 || response.status === 201) {
@@ -118,11 +119,11 @@ async function appendSessionLogImpl(
if (adoptedUuid) {
lastUuidMap.set(sessionId, adoptedUuid)
logForDebugging(
`Session 409: re-fetched ${logs!.length} entries, adopting lastUuid=${adoptedUuid}, retrying entry ${entry.uuid}`,
`Session 409: re-fetched ${(logs as any)!.length} entries, adopting lastUuid=${adoptedUuid}, retrying entry ${entry.uuid}`,
)
} else {
// Can't determine server state — give up
const errorData = response.data as SessionIngressError
const errorData = response.data as any as SessionIngressError
const errorMessage =
errorData.error?.message || 'Concurrent modification detected'
logError(
@@ -148,21 +149,22 @@ async function appendSessionLogImpl(
}
// Other 4xx (429, etc.) - retryable
logForDebugging(
`Failed to persist session log: ${response.status} ${response.statusText}`,
)
logForDebugging(`Failed to persist session log: ${response.status}`)
logForDiagnosticsNoPII('error', 'session_persist_fail_status', {
status: response.status,
attempt,
})
} catch (error) {
// Network errors, 5xx - retryable
const axiosError = error as AxiosError<SessionIngressError>
logError(new Error(`Error persisting session log: ${axiosError.message}`))
if (isHttpError(error)) {
logError(new Error(`Error persisting session log: ${error.message}`))
logForDiagnosticsNoPII('error', 'session_persist_fail_status', {
status: axiosError.status,
status: error.status,
attempt,
})
} else {
logError(error)
}
}
if (attempt === MAX_RETRIES) {
@@ -318,15 +320,19 @@ export async function getTeleportEvents(
let response
try {
response = await axios.get<TeleportEventsResponse>(baseUrl, {
const queryParams = new URLSearchParams(params as any).toString()
const fullUrl = `${baseUrl}${queryParams ? `?${queryParams}` : ''}`
response = await nativeRequest<TeleportEventsResponse>(fullUrl, {
method: 'GET',
headers,
params,
timeout: 20000,
validateStatus: status => status < 500,
})
} catch (e) {
const err = e as AxiosError
logError(new Error(`Teleport events fetch failed: ${err.message}`))
if (isHttpError(e)) {
logError(new Error(`Teleport events fetch failed: ${e.message}`))
} else {
logError(e)
}
logForDiagnosticsNoPII('error', 'teleport_events_fetch_fail')
return null
}
@@ -423,13 +429,17 @@ async function fetchSessionLogsFromUrl(
headers: Record<string, string>,
): Promise<Entry[] | null> {
try {
const response = await axios.get(url, {
const queryParams: Record<string, any> = {}
if (isEnvTruthy(process.env.CLAUDE_AFTER_LAST_COMPACT)) {
queryParams.after_last_compact = true
}
const queryString = new URLSearchParams(queryParams).toString()
const fullUrl = `${url}${queryString ? `?${queryString}` : ''}`
const response = await nativeRequest<any>(fullUrl, {
method: 'GET',
headers,
timeout: 20000,
validateStatus: status => status < 500,
params: isEnvTruthy(process.env.CLAUDE_AFTER_LAST_COMPACT)
? { after_last_compact: true }
: undefined,
})
if (response.status === 200) {
@@ -467,19 +477,20 @@ async function fetchSessionLogsFromUrl(
)
}
logForDebugging(
`Failed to fetch session logs: ${response.status} ${response.statusText}`,
)
logForDebugging(`Failed to fetch session logs: ${response.status}`)
logForDiagnosticsNoPII('error', 'session_get_fail_status', {
status: response.status,
})
return null
} catch (error) {
const axiosError = error as AxiosError<SessionIngressError>
logError(new Error(`Error fetching session logs: ${axiosError.message}`))
if (isHttpError(error)) {
logError(new Error(`Error fetching session logs: ${error.message}`))
logForDiagnosticsNoPII('error', 'session_get_fail_status', {
status: axiosError.status,
status: error.status,
})
} else {
logError(error)
}
return null
}
}

View File

@@ -1,7 +1,7 @@
import axios from 'axios'
import { getOauthConfig } from '../../constants/oauth.js'
import { isClaudeAISubscriber } from '../../utils/auth.js'
import { logForDebugging } from '../../utils/debug.js'
import { nativeRequest } from '../../utils/http.js'
import { getOAuthHeaders, prepareApiRequest } from '../../utils/teleport/api.js'
export type UltrareviewQuotaResponse = {
@@ -20,9 +20,10 @@ export async function fetchUltrareviewQuota(): Promise<UltrareviewQuotaResponse
if (!isClaudeAISubscriber()) return null
try {
const { accessToken, orgUUID } = await prepareApiRequest()
const response = await axios.get<UltrareviewQuotaResponse>(
const response = await nativeRequest<UltrareviewQuotaResponse>(
`${getOauthConfig().BASE_API_URL}/v1/ultrareview/quota`,
{
method: 'GET',
headers: {
...getOAuthHeaders(accessToken),
'x-organization-uuid': orgUUID,

View File

@@ -1,11 +1,10 @@
import axios from 'axios'
import { getOauthConfig } from '../../constants/oauth.js'
import {
getClaudeAIOAuthTokens,
hasProfileScope,
isClaudeAISubscriber,
} from '../../utils/auth.js'
import { getAuthHeaders } from '../../utils/http.js'
import { getAuthHeaders, nativeRequest } from '../../utils/http.js'
import { getClaudeCodeUserAgent } from '../../utils/userAgent.js'
import { isOAuthTokenExpired } from '../oauth/client.js'
@@ -54,7 +53,8 @@ export async function fetchUtilization(): Promise<Utilization | null> {
const url = `${getOauthConfig().BASE_API_URL}/api/oauth/usage`
const response = await axios.get<Utilization>(url, {
const response = await nativeRequest<Utilization>(url, {
method: 'GET',
headers,
timeout: 5000, // 5 second timeout
})

View File

@@ -24,7 +24,7 @@ import {
OAuthTokensSchema,
} from '@modelcontextprotocol/sdk/shared/auth.js'
import type { FetchLike } from '@modelcontextprotocol/sdk/shared/transport.js'
import axios from 'axios'
import { isHttpError, nativeRequest } from '../../utils/http.js'
import { createHash, randomBytes, randomUUID } from 'crypto'
import { mkdir } from 'fs/promises'
import { createServer, type Server } from 'http'
@@ -428,25 +428,30 @@ async function revokeToken({
}
try {
await axios.post(endpoint, params, { headers })
await nativeRequest(endpoint, {
method: 'POST',
headers: { ...headers, 'Content-Type': 'application/x-www-form-urlencoded' },
body: params.toString(),
responseType: 'text',
})
logMCPDebug(serverName, `Successfully revoked ${tokenTypeHint}`)
} catch (error: unknown) {
// Fallback for non-RFC-7009-compliant servers that require Bearer auth
if (
axios.isAxiosError(error) &&
error.response?.status === 401 &&
isHttpError(error) &&
error.status === 401 &&
accessToken
) {
logMCPDebug(
serverName,
`Got 401, retrying ${tokenTypeHint} revocation with Bearer auth`,
)
// RFC 6749 §2.3.1: must not send more than one auth method. The retry
// switches to Bearer — clear any client creds from the body.
params.delete('client_id')
params.delete('client_secret')
await axios.post(endpoint, params, {
headers: { ...headers, Authorization: `Bearer ${accessToken}` },
await nativeRequest(endpoint, {
method: 'POST',
headers: { ...headers, Authorization: `Bearer ${accessToken}`, 'Content-Type': 'application/x-www-form-urlencoded' },
body: params.toString(),
responseType: 'text',
})
logMCPDebug(
serverName,

View File

@@ -1,4 +1,3 @@
import axios from 'axios'
import memoize from 'lodash-es/memoize.js'
import { getOauthConfig } from 'src/constants/oauth.js'
import {
@@ -9,6 +8,7 @@ import { getClaudeAIOAuthTokens } from 'src/utils/auth.js'
import { getGlobalConfig, saveGlobalConfig } from 'src/utils/config.js'
import { logForDebugging } from 'src/utils/debug.js'
import { isEnvDefinedFalsy } from 'src/utils/envUtils.js'
import { nativeRequest } from 'src/utils/http.js'
import { clearMcpAuthCache } from './client.js'
import { normalizeNameForMCP } from './normalization.js'
import type { ScopedMcpServerConfig } from './types.js'
@@ -79,7 +79,7 @@ export const fetchClaudeAIMcpConfigsIfEligible = memoize(
logForDebugging(`[claudeai-mcp] Fetching from ${url}`)
const response = await axios.get<ClaudeAIMcpServersResponse>(url, {
const { data: respData } = await nativeRequest<ClaudeAIMcpServersResponse>(url, {
headers: {
Authorization: `Bearer ${tokens.accessToken}`,
'Content-Type': 'application/json',
@@ -96,7 +96,7 @@ export const fetchClaudeAIMcpConfigsIfEligible = memoize(
// colliding with "Example Server! (2)" which both normalize to claude_ai_Example_Server_2).
const usedNormalizedNames = new Set<string>()
for (const server of response.data.data) {
for (const server of respData.data) {
const baseName = `claude.ai ${server.display_name}`
// Try without suffix first, then increment until we find an unused normalized name

View File

@@ -43,7 +43,7 @@ import pMap from 'p-map'
import { getOriginalCwd, getSessionId } from '../../bootstrap/state.js'
import type { Command } from '../../commands.js'
import { getOauthConfig } from '../../constants/oauth.js'
import { PRODUCT_URL } from '../../constants/product.js'
import { VERSION, PRODUCT_URL } from '../../constants/product.js'
import type { AppState } from '../../state/AppState.js'
import {
type Tool,
@@ -986,7 +986,7 @@ export const connectToServer = memoize(
{
name: 'claude-code',
title: 'Claude Code',
version: MACRO.VERSION ?? 'unknown',
version: VERSION ?? 'unknown',
description: "Anthropic's agentic coding tool",
websiteUrl: PRODUCT_URL,
},
@@ -3281,7 +3281,7 @@ export async function setupSdkMcpClients(
{
name: 'claude-code',
title: 'Claude Code',
version: MACRO.VERSION ?? 'unknown',
version: VERSION ?? 'unknown',
description: "Anthropic's agentic coding tool",
websiteUrl: PRODUCT_URL,
},

View File

@@ -1,4 +1,5 @@
import { getIsNonInteractiveSession } from '../../bootstrap/state.js'
import { FEEDBACK_CHANNEL } from '../../constants/product.js'
import { checkHasTrustDialogAccepted } from '../../utils/config.js'
import { logAntError } from '../../utils/debug.js'
import { errorMessage } from '../../utils/errors.js'
@@ -48,7 +49,7 @@ export async function getMcpHeadersFromHelper(
const hasTrust = checkHasTrustDialogAccepted()
if (!hasTrust) {
const error = new Error(
`Security: headersHelper for MCP server '${serverName}' executed before workspace trust is confirmed. If you see this message, post in ${MACRO.FEEDBACK_CHANNEL}.`,
`Security: headersHelper for MCP server '${serverName}' executed before workspace trust is confirmed. If you see this message, post in ${FEEDBACK_CHANNEL}.`,
)
logAntError('MCP headersHelper invoked before trust check', error)
logEvent('tengu_mcp_headersHelper_missing_trust', {})

View File

@@ -1,6 +1,6 @@
import axios from 'axios'
import { logForDebugging } from '../../utils/debug.js'
import { errorMessage } from '../../utils/errors.js'
import { nativeRequest } from '../../utils/http.js'
type RegistryServer = {
server: {
@@ -36,13 +36,13 @@ export async function prefetchOfficialMcpUrls(): Promise<void> {
}
try {
const response = await axios.get<RegistryResponse>(
const { data } = await nativeRequest<RegistryResponse>(
'https://api.anthropic.com/mcp-registry/v0/servers?version=latest&visibility=commercial',
{ timeout: 5000 },
)
const urls = new Set<string>()
for (const entry of response.data.servers) {
for (const entry of data.servers) {
for (const remote of entry.server.remotes ?? []) {
const normalized = normalizeUrl(remote.url)
if (normalized) {

View File

@@ -1,5 +1,5 @@
// OAuth client for handling authentication flows with Claude services
import axios from 'axios'
import { isHttpError, nativeRequest } from '../../utils/http.js'
import {
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
logEvent,
@@ -127,9 +127,12 @@ export async function exchangeCodeForTokens(
requestBody.expires_in = expiresIn
}
const response = await axios.post(getOauthConfig().TOKEN_URL, requestBody, {
const response = await nativeRequest(getOauthConfig().TOKEN_URL, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: requestBody,
timeout: 15000,
responseType: 'json',
})
if (response.status !== 200) {
@@ -163,9 +166,12 @@ export async function refreshOAuthToken(
}
try {
const response = await axios.post(getOauthConfig().TOKEN_URL, requestBody, {
const response = await nativeRequest(getOauthConfig().TOKEN_URL, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: requestBody,
timeout: 15000,
responseType: 'json',
})
if (response.status !== 200) {
@@ -258,8 +264,8 @@ export async function refreshOAuthToken(
}
} catch (error) {
const responseBody =
axios.isAxiosError(error) && error.response?.data
? JSON.stringify(error.response.data)
isHttpError(error) && error.data
? JSON.stringify(error.data)
: undefined
logEvent('tengu_oauth_token_refresh_failure', {
error: (error as Error)
@@ -275,9 +281,11 @@ export async function refreshOAuthToken(
export async function fetchAndStoreUserRoles(
accessToken: string,
): Promise<void> {
const response = await axios.get(getOauthConfig().ROLES_URL, {
): Promise<void> {
const response = await nativeRequest(getOauthConfig().ROLES_URL, {
method: 'GET',
headers: { Authorization: `Bearer ${accessToken}` },
responseType: 'json',
})
if (response.status !== 200) {
@@ -312,8 +320,10 @@ export async function createAndStoreApiKey(
accessToken: string,
): Promise<string | null> {
try {
const response = await axios.post(getOauthConfig().API_KEY_URL, null, {
const response = await nativeRequest(getOauthConfig().API_KEY_URL, {
method: 'POST',
headers: { Authorization: `Bearer ${accessToken}` },
responseType: 'json',
})
const apiKey = response.data?.raw_key

View File

@@ -1,8 +1,8 @@
import axios from 'axios'
import { getOauthConfig, OAUTH_BETA_HEADER } from 'src/constants/oauth.js'
import type { OAuthProfileResponse } from 'src/services/oauth/types.js'
import { getAnthropicApiKey } from 'src/utils/auth.js'
import { getGlobalConfig } from 'src/utils/config.js'
import { nativeRequest } from 'src/utils/http.js'
import { logError } from 'src/utils/log.js'
export async function getOauthProfileFromApiKey(): Promise<
OAuthProfileResponse | undefined
@@ -16,19 +16,16 @@ export async function getOauthProfileFromApiKey(): Promise<
if (!accountUuid || !apiKey) {
return
}
const endpoint = `${getOauthConfig().BASE_API_URL}/api/claude_cli_profile`
const endpoint = `${getOauthConfig().BASE_API_URL}/api/claude_cli_profile?account_uuid=${encodeURIComponent(accountUuid)}`
try {
const response = await axios.get<OAuthProfileResponse>(endpoint, {
const { data } = await nativeRequest<OAuthProfileResponse>(endpoint, {
headers: {
'x-api-key': apiKey,
'anthropic-beta': OAUTH_BETA_HEADER,
},
params: {
account_uuid: accountUuid,
},
timeout: 10000,
})
return response.data
return data
} catch (error) {
logError(error as Error)
}
@@ -39,14 +36,14 @@ export async function getOauthProfileFromOauthToken(
): Promise<OAuthProfileResponse | undefined> {
const endpoint = `${getOauthConfig().BASE_API_URL}/api/oauth/profile`
try {
const response = await axios.get<OAuthProfileResponse>(endpoint, {
const { data } = await nativeRequest<OAuthProfileResponse>(endpoint, {
headers: {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',
},
timeout: 10000,
})
return response.data
return data
} catch (error) {
logError(error as Error)
}

View File

@@ -12,7 +12,7 @@
* - API returns empty restrictions for users without policy limits
*/
import axios from 'axios'
import { isHttpError, nativeRequest, classifyHttpError } from '../../utils/http.js'
import { createHash } from 'crypto'
import { readFileSync as fsReadFileSync } from 'fs'
import { unlink, writeFile } from 'fs/promises'
@@ -30,7 +30,7 @@ import {
import { registerCleanup } from '../../utils/cleanupRegistry.js'
import { logForDebugging } from '../../utils/debug.js'
import { getClaudeConfigHomeDir } from '../../utils/envUtils.js'
import { classifyAxiosError } from '../../utils/errors.js'
// Removed classifyAxiosError import - using classifyHttpError from utils/http.js instead
import { safeParseJSON } from '../../utils/json.js'
import {
getAPIProvider,
@@ -322,11 +322,12 @@ async function fetchPolicyLimits(
headers['If-None-Match'] = `"${cachedChecksum}"`
}
const response = await axios.get(endpoint, {
try {
const response = await nativeRequest(endpoint, {
method: 'GET',
headers,
timeout: FETCH_TIMEOUT_MS,
validateStatus: status =>
status === 200 || status === 304 || status === 404,
responseType: 'json',
})
// Handle 304 Not Modified - cached version is still valid
@@ -367,7 +368,74 @@ async function fetchPolicyLimits(
}
} catch (error) {
// 404 is handled above via validateStatus, so it won't reach here
const { kind, message } = classifyAxiosError(error)
const { kind, message } = classifyHttpError(error)
switch (kind) {
case 'auth':
return {
success: false,
error: 'Not authorized for policy limits',
skipRetry: true,
}
case 'timeout':
return { success: false, error: 'Policy limits request timeout' }
case 'network':
return { success: false, error: 'Cannot connect to server' }
default:
return { success: false, error: message }
}
}
if (cachedChecksum) {
headers['If-None-Match'] = `"${cachedChecksum}"`
}
try {
const response = await nativeRequest(endpoint, {
method: 'GET',
headers,
timeout: FETCH_TIMEOUT_MS,
responseType: 'json',
})
// Handle 304 Not Modified - cached version is still valid
if (response.status === 304) {
logForDebugging('Policy limits: Using cached restrictions (304)')
return {
success: true,
restrictions: null, // Signal that cache is valid
etag: cachedChecksum,
}
}
// Handle 404 Not Found - no policy limits exist or feature not enabled
if (response.status === 404) {
logForDebugging('Policy limits: No restrictions found (404)')
return {
success: true,
restrictions: {},
etag: undefined,
}
}
const parsed = PolicyLimitsResponseSchema().safeParse(response.data)
if (!parsed.success) {
logForDebugging(
`Policy limits: Invalid response format - ${parsed.error.message}`,
)
return {
success: false,
error: 'Invalid policy limits format',
}
}
logForDebugging('Policy limits: Fetched successfully')
return {
success: true,
restrictions: parsed.data.restrictions,
}
} catch (error) {
// 404 is handled above via validateStatus, so it won't reach here
const { kind, message } = classifyHttpError(error)
switch (kind) {
case 'auth':
return {

View File

@@ -12,7 +12,7 @@
* - API returns empty settings for users without managed settings
*/
import axios from 'axios'
import { isHttpError, nativeRequest, classifyHttpError } from '../../utils/http.js'
import { createHash } from 'crypto'
import { open, unlink } from 'fs/promises'
import { getOauthConfig, OAUTH_BETA_HEADER } from '../../constants/oauth.js'
@@ -23,7 +23,7 @@ import {
} from '../../utils/auth.js'
import { registerCleanup } from '../../utils/cleanupRegistry.js'
import { logForDebugging } from '../../utils/debug.js'
import { classifyAxiosError, getErrnoCode } from '../../utils/errors.js'
import { getErrnoCode } from '../../utils/errors.js'
import { settingsChangeDetector } from '../../utils/settings/changeDetector.js'
import {
type SettingsJson,
@@ -248,7 +248,6 @@ async function fetchWithRetry(
async function fetchRemoteManagedSettings(
cachedChecksum?: string,
): Promise<RemoteManagedSettingsFetchResult> {
try {
// Ensure OAuth token is fresh before fetching settings
// This prevents 401 errors from stale cached tokens
await checkAndRefreshOAuthTokenIfNeeded()
@@ -275,13 +274,12 @@ async function fetchRemoteManagedSettings(
headers['If-None-Match'] = `"${cachedChecksum}"`
}
const response = await axios.get(endpoint, {
try {
const response = await nativeRequest(endpoint, {
method: 'GET',
headers,
timeout: SETTINGS_TIMEOUT_MS,
// Allow 204, 304, and 404 responses without treating them as errors.
// 204/404 are returned when no settings exist for the user or the feature flag is off.
validateStatus: status =>
status === 200 || status === 204 || status === 304 || status === 404,
responseType: 'json',
})
// Handle 304 Not Modified - cached version is still valid
@@ -337,7 +335,7 @@ async function fetchRemoteManagedSettings(
checksum: parsed.data.checksum,
}
} catch (error) {
const { kind, status, message } = classifyAxiosError(error)
const { kind, status, message } = classifyHttpError(error)
if (status === 404) {
// 404 means no remote settings configured
return { success: true, settings: {}, checksum: '' }

View File

@@ -10,7 +10,7 @@
*/
import { feature } from 'bun:bundle'
import axios from 'axios'
import { isHttpError, nativeRequest } from '../../utils/http.js'
import { mkdir, readFile, stat, writeFile } from 'fs/promises'
import pickBy from 'lodash-es/pickBy.js'
import { dirname } from 'path'
@@ -27,7 +27,7 @@ import {
import { clearMemoryFileCaches } from '../../utils/claudemd.js'
import { getMemoryPath } from '../../utils/config.js'
import { logForDiagnosticsNoPII } from '../../utils/diagLogs.js'
import { classifyAxiosError } from '../../utils/errors.js'
import { classifyHttpError } from '../../utils/errors.js'
import { getRepoRemoteHash } from '../../utils/git.js'
import {
getAPIProvider,
@@ -263,20 +263,31 @@ async function fetchUserSettingsOnce(): Promise<SettingsSyncFetchResult> {
}
const endpoint = getSettingsSyncEndpoint()
const response = await axios.get(endpoint, {
try {
const response = await nativeRequest(endpoint, {
method: 'GET',
headers,
timeout: SETTINGS_SYNC_TIMEOUT_MS,
validateStatus: status => status === 200 || status === 404,
responseType: 'json',
})
logForDiagnosticsNoPII('info', 'settings_sync_fetch_success')
return {
success: true,
data: response.data,
isEmpty: false,
}
} catch (error) {
if (isHttpError(error) && error.status === 404) {
// 404 means no settings exist yet
if (response.status === 404) {
logForDiagnosticsNoPII('info', 'settings_sync_fetch_empty')
return {
success: true,
isEmpty: true,
}
}
throw error
}
const parsed = UserSyncDataSchema().safeParse(response.data)
if (!parsed.success) {
@@ -294,7 +305,7 @@ async function fetchUserSettingsOnce(): Promise<SettingsSyncFetchResult> {
isEmpty: false,
}
} catch (error) {
const { kind, message } = classifyAxiosError(error)
const { kind, message } = classifyHttpError(error)
switch (kind) {
case 'auth':
return {
@@ -365,14 +376,13 @@ async function uploadUserSettings(
}
const endpoint = getSettingsSyncEndpoint()
const response = await axios.put(
endpoint,
{ entries },
{
const response = await nativeRequest(endpoint, {
method: 'PUT',
headers,
body: { entries },
timeout: SETTINGS_SYNC_TIMEOUT_MS,
},
)
responseType: 'json',
})
logForDiagnosticsNoPII('info', 'settings_sync_uploaded', {
entryCount: Object.keys(entries).length,

View File

@@ -24,7 +24,6 @@
* This avoids module-level mutable state and gives tests natural isolation.
*/
import axios from 'axios'
import { createHash } from 'crypto'
import { mkdir, readdir, readFile, stat, writeFile } from 'fs/promises'
import { join, relative, sep } from 'path'
@@ -45,8 +44,9 @@ import {
getClaudeAIOAuthTokens,
} from '../../utils/auth.js'
import { logForDebugging } from '../../utils/debug.js'
import { classifyAxiosError } from '../../utils/errors.js'
import { classifyHttpError } from '../../utils/errors.js'
import { getGithubRepo } from '../../utils/git.js'
import { isHttpError, nativeRequest } from '../../utils/http.js'
import {
getAPIProvider,
isFirstPartyAnthropicBaseUrl,
@@ -209,11 +209,10 @@ async function fetchTeamMemoryOnce(
}
const endpoint = getTeamMemorySyncEndpoint(repoSlug)
const response = await axios.get(endpoint, {
const response = await nativeRequest<any>(endpoint, {
method: 'GET',
headers,
timeout: TEAM_MEMORY_SYNC_TIMEOUT_MS,
validateStatus: status =>
status === 200 || status === 304 || status === 404,
})
if (response.status === 304) {
@@ -264,10 +263,8 @@ async function fetchTeamMemoryOnce(
checksum: responseChecksum,
}
} catch (error) {
const { kind, status, message } = classifyAxiosError(error)
const body = axios.isAxiosError(error)
? JSON.stringify(error.response?.data ?? '')
: ''
const { kind, status, message } = classifyHttpError(error)
const body = isHttpError(error) ? JSON.stringify(error.data ?? '') : ''
if (kind !== 'other') {
logForDebugging(`team-memory-sync: fetch error ${status}: ${body}`, {
level: 'warn',
@@ -324,10 +321,10 @@ async function fetchTeamMemoryHashes(
}
const endpoint = getTeamMemorySyncEndpoint(repoSlug) + '&view=hashes'
const response = await axios.get(endpoint, {
const response = await nativeRequest<any>(endpoint, {
method: 'GET',
headers: auth.headers,
timeout: TEAM_MEMORY_SYNC_TIMEOUT_MS,
validateStatus: status => status === 200 || status === 404,
})
if (response.status === 404) {
@@ -360,7 +357,7 @@ async function fetchTeamMemoryHashes(
entryChecksums,
}
} catch (error) {
const { kind, status, message } = classifyAxiosError(error)
const { kind, status, message } = classifyHttpError(error)
switch (kind) {
case 'auth':
return {
@@ -482,15 +479,12 @@ async function uploadTeamMemory(
}
const endpoint = getTeamMemorySyncEndpoint(repoSlug)
const response = await axios.put(
endpoint,
{ entries },
{
const response = await nativeRequest<any>(endpoint, {
method: 'PUT',
body: { entries },
headers,
timeout: TEAM_MEMORY_SYNC_TIMEOUT_MS,
validateStatus: status => status === 200 || status === 412,
},
)
})
if (response.status === 412) {
logForDebugging('team-memory-sync: conflict (412 Precondition Failed)', {
@@ -514,14 +508,12 @@ async function uploadTeamMemory(
lastModified: response.data?.lastModified,
}
} catch (error) {
const body = axios.isAxiosError(error)
? JSON.stringify(error.response?.data ?? '')
: ''
const body = isHttpError(error) ? JSON.stringify(error.data ?? '') : ''
logForDebugging(
`team-memory-sync: upload failed: ${error instanceof Error ? error.message : ''} ${body}`,
{ level: 'warn' },
)
const { kind, status: httpStatus, message } = classifyAxiosError(error)
const { kind, status: httpStatus, message } = classifyHttpError(error)
const errorType = kind === 'http' || kind === 'other' ? 'unknown' : kind
let serverErrorCode: 'team_memory_too_many_entries' | undefined
let serverMaxEntries: number | undefined
@@ -530,10 +522,8 @@ async function uploadTeamMemory(
// RequestTooLargeException includes error_code + extra_details with
// the effective max_entries (may be GB-tuned per-org). Cache it so
// the next push trims to the right value.
if (httpStatus === 413 && axios.isAxiosError(error)) {
const parsed = TeamMemoryTooManyEntriesSchema().safeParse(
error.response?.data,
)
if (httpStatus === 413 && isHttpError(error)) {
const parsed = TeamMemoryTooManyEntriesSchema().safeParse(error.data)
if (parsed.success) {
serverErrorCode = parsed.data.error.details.error_code
serverMaxEntries = parsed.data.error.details.max_entries

View File

@@ -13,7 +13,7 @@
*/
import { feature } from 'bun:bundle'
import axios from 'axios'
import { nativeRequest } from '../../utils/http.js'
import { randomUUID } from 'crypto'
import { readFile } from 'fs/promises'
import { basename, extname } from 'path'
@@ -137,7 +137,9 @@ export async function uploadBriefAttachment(
])
try {
const response = await axios.post(url, body, {
const response = await nativeRequest(url, {
method: 'POST',
body,
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': `multipart/form-data; boundary=${boundary}`,
@@ -145,7 +147,6 @@ export async function uploadBriefAttachment(
},
timeout: UPLOAD_TIMEOUT_MS,
signal: ctx.signal,
validateStatus: () => true,
})
if (response.status !== 201) {

View File

@@ -1,4 +1,4 @@
import axios from 'axios'
import { nativeRequest } from '../../utils/http.js'
import { z } from 'zod/v4'
import { getOauthConfig } from '../../constants/oauth.js'
import { getFeatureValue_CACHED_MAY_BE_STALE } from '../../services/analytics/growthbook.js'
@@ -132,14 +132,12 @@ export const RemoteTriggerTool = buildTool({
break
}
const res = await axios.request({
const res = await nativeRequest(url, {
method,
url,
headers,
data,
body: data,
timeout: 20_000,
signal: context.abortController.signal,
validateStatus: () => true,
})
return {

View File

@@ -1,4 +1,4 @@
import axios, { type AxiosResponse } from 'axios'
import { isHttpError, nativeRequest } from '../../utils/http.js'
import { LRUCache } from 'lru-cache'
import {
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
@@ -180,7 +180,7 @@ export async function checkDomainBlocklist(
return { status: 'allowed' }
}
try {
const response = await axios.get(
const response = await nativeRequest(
`https://api.anthropic.com/api/web/domain_info?domain=${encodeURIComponent(domain)}`,
{ timeout: DOMAIN_CHECK_TIMEOUT_MS },
)
@@ -264,17 +264,16 @@ export async function getWithPermittedRedirects(
signal: AbortSignal,
redirectChecker: (originalUrl: string, redirectUrl: string) => boolean,
depth = 0,
): Promise<AxiosResponse<ArrayBuffer> | RedirectInfo> {
): Promise<{ data: ArrayBuffer; status: number; headers: Record<string, string> } | RedirectInfo> {
if (depth > MAX_REDIRECTS) {
throw new Error(`Too many redirects (exceeded ${MAX_REDIRECTS})`)
}
try {
return await axios.get(url, {
return await nativeRequest<ArrayBuffer>(url, {
method: 'GET',
signal,
timeout: FETCH_TIMEOUT_MS,
maxRedirects: 0,
responseType: 'arraybuffer',
maxContentLength: MAX_HTTP_CONTENT_LENGTH,
headers: {
Accept: 'text/markdown, text/html, */*',
'User-Agent': getWebFetchUserAgent(),
@@ -282,11 +281,11 @@ export async function getWithPermittedRedirects(
})
} catch (error) {
if (
axios.isAxiosError(error) &&
error.response &&
[301, 302, 307, 308].includes(error.response.status)
isHttpError(error) &&
error.status &&
[301, 302, 307, 308].includes(error.status)
) {
const redirectLocation = error.response.headers.location
const redirectLocation = error.headers?.location
if (!redirectLocation) {
throw new Error('Redirect missing Location header')
}
@@ -302,23 +301,22 @@ export async function getWithPermittedRedirects(
redirectChecker,
depth + 1,
)
} else {
}
// Return redirect information to the caller
return {
type: 'redirect',
originalUrl: url,
redirectUrl,
statusCode: error.response.status,
}
statusCode: error.status,
}
}
// Detect egress proxy blocks: the proxy returns 403 with
// X-Proxy-Error: blocked-by-allowlist when egress is restricted
if (
axios.isAxiosError(error) &&
error.response?.status === 403 &&
error.response.headers['x-proxy-error'] === 'blocked-by-allowlist'
isHttpError(error) &&
error.status === 403 &&
error.headers?.['x-proxy-error'] === 'blocked-by-allowlist'
) {
const hostname = new URL(url).hostname
throw new EgressBlockedError(hostname)
@@ -329,7 +327,7 @@ export async function getWithPermittedRedirects(
}
function isRedirectInfo(
response: AxiosResponse<ArrayBuffer> | RedirectInfo,
response: { data: ArrayBuffer; status: number; headers: Record<string, string> } | RedirectInfo,
): response is RedirectInfo {
return 'type' in response && response.type === 'redirect'
}

25
tsconfig.json Normal file
View File

@@ -0,0 +1,25 @@
{
"compilerOptions": {
"lib": ["ESNext", "DOM"],
"module": "ESNext",
"target": "ESNext",
"moduleResolution": "Bundler",
"moduleDetection": "force",
"allowImportingTsExtensions": true,
"noEmit": true,
"composite": true,
"strict": true,
"downlevelIteration": true,
"skipLibCheck": true,
"jsx": "react-jsx",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": ["bun-types"],
"baseUrl": ".",
"paths": {
"src/*": ["./*"]
}
},
"include": ["**/*.ts", "**/*.tsx", "**/*.js", "**/*.jsx"]
}

View File

@@ -11,6 +11,7 @@ import {
} from 'src/services/analytics/index.js'
import { getModelStrings } from 'src/utils/model/modelStrings.js'
import { getAPIProvider } from 'src/utils/model/providers.js'
import { FEEDBACK_CHANNEL } from 'src/constants/product.js'
import {
getIsNonInteractiveSession,
preferThirdPartyAuthentication,
@@ -547,7 +548,7 @@ async function _executeApiKeyHelper(
const hasTrust = checkHasTrustDialogAccepted()
if (!hasTrust && !isNonInteractiveSession) {
const error = new Error(
`Security: apiKeyHelper executed before workspace trust is confirmed. If you see this message, post in ${MACRO.FEEDBACK_CHANNEL}.`,
`Security: apiKeyHelper executed before workspace trust is confirmed. If you see this message, post in ${FEEDBACK_CHANNEL}.`,
)
logAntError('apiKeyHelper invoked before trust check', error)
logEvent('tengu_apiKeyHelper_missing_trust11', {})
@@ -622,7 +623,7 @@ async function runAwsAuthRefresh(): Promise<boolean> {
const hasTrust = checkHasTrustDialogAccepted()
if (!hasTrust && !getIsNonInteractiveSession()) {
const error = new Error(
`Security: awsAuthRefresh executed before workspace trust is confirmed. If you see this message, post in ${MACRO.FEEDBACK_CHANNEL}.`,
`Security: awsAuthRefresh executed before workspace trust is confirmed. If you see this message, post in ${FEEDBACK_CHANNEL}.`,
)
logAntError('awsAuthRefresh invoked before trust check', error)
logEvent('tengu_awsAuthRefresh_missing_trust', {})
@@ -719,7 +720,7 @@ async function getAwsCredsFromCredentialExport(): Promise<{
const hasTrust = checkHasTrustDialogAccepted()
if (!hasTrust && !getIsNonInteractiveSession()) {
const error = new Error(
`Security: awsCredentialExport executed before workspace trust is confirmed. If you see this message, post in ${MACRO.FEEDBACK_CHANNEL}.`,
`Security: awsCredentialExport executed before workspace trust is confirmed. If you see this message, post in ${FEEDBACK_CHANNEL}.`,
)
logAntError('awsCredentialExport invoked before trust check', error)
logEvent('tengu_awsCredentialExport_missing_trust', {})
@@ -886,7 +887,7 @@ async function runGcpAuthRefresh(): Promise<boolean> {
const hasTrust = checkHasTrustDialogAccepted()
if (!hasTrust && !getIsNonInteractiveSession()) {
const error = new Error(
`Security: gcpAuthRefresh executed before workspace trust is confirmed. If you see this message, post in ${MACRO.FEEDBACK_CHANNEL}.`,
`Security: gcpAuthRefresh executed before workspace trust is confirmed. If you see this message, post in ${FEEDBACK_CHANNEL}.`,
)
logAntError('gcpAuthRefresh invoked before trust check', error)
logEvent('tengu_gcpAuthRefresh_missing_trust', {})

View File

@@ -1,4 +1,3 @@
import axios from 'axios'
import { constants as fsConstants } from 'fs'
import { access, writeFile } from 'fs/promises'
import { homedir } from 'os'
@@ -16,6 +15,7 @@ import { ClaudeError, getErrnoCode, isENOENT } from './errors.js'
import { execFileNoThrowWithCwd } from './execFileNoThrow.js'
import { getFsImplementation } from './fsOperations.js'
import { gracefulShutdownSync } from './gracefulShutdown.js'
import { isHttpError, nativeRequest } from './http.js'
import { logError } from './log.js'
import { gte, lt } from './semver.js'
import { getInitialSettings } from './settings/settings.js'
@@ -79,11 +79,11 @@ export async function assertMinVersion(): Promise<void> {
if (
versionConfig.minVersion &&
lt(MACRO.VERSION, versionConfig.minVersion)
lt('0.1.0-alpha', versionConfig.minVersion)
) {
// biome-ignore lint/suspicious/noConsole:: intentional console output
console.error(`
It looks like your version of Claude Code (${MACRO.VERSION}) needs an update.
It looks like your version of Claude Code (0.1.0-alpha) needs an update.
A newer version (${versionConfig.minVersion} or higher) is required to continue.
To update, please run:
@@ -325,7 +325,7 @@ export async function getLatestVersion(
// which could be maliciously crafted to redirect to an attacker's registry
const result = await execFileNoThrowWithCwd(
'npm',
['view', `${MACRO.PACKAGE_URL}@${npmTag}`, 'version', '--prefer-online'],
['view', `@anthropic-ai/claude-code@${npmTag}`, 'version', '--prefer-online'],
{ abortSignal: AbortSignal.timeout(5000), cwd: homedir() },
)
if (result.code !== 0) {
@@ -356,7 +356,7 @@ export async function getNpmDistTags(): Promise<NpmDistTags> {
// Run from home directory to avoid reading project-level .npmrc
const result = await execFileNoThrowWithCwd(
'npm',
['view', MACRO.PACKAGE_URL, 'dist-tags', '--json', '--prefer-online'],
['view', '@anthropic-ai/claude-code', 'dist-tags', '--json', '--prefer-online'],
{ abortSignal: AbortSignal.timeout(5000), cwd: homedir() },
)
@@ -385,7 +385,8 @@ export async function getLatestVersionFromGcs(
channel: ReleaseChannel,
): Promise<string | null> {
try {
const response = await axios.get(`${GCS_BUCKET_URL}/${channel}`, {
const response = await nativeRequest<string>(`${GCS_BUCKET_URL}/${channel}`, {
method: 'GET',
timeout: 5000,
responseType: 'text',
})
@@ -425,14 +426,14 @@ export async function getVersionHistory(limit: number): Promise<string[]> {
// Use native package URL when available to ensure we only show versions
// that have native binaries (not all JS package versions have native builds)
const packageUrl = MACRO.NATIVE_PACKAGE_URL ?? MACRO.PACKAGE_URL
const packageUrl = '@anthropic-ai/claude-code'
// Run from home directory to avoid reading project-level .npmrc
const result = await execFileNoThrowWithCwd(
'npm',
['view', packageUrl, 'versions', '--json', '--prefer-online'],
// Longer timeout for version list
{ abortSignal: AbortSignal.timeout(30000), cwd: homedir() },
{ abortSignal: (AbortSignal as any).timeout(30000), cwd: homedir() },
)
if (result.code !== 0) {
@@ -464,7 +465,7 @@ export async function installGlobalPackage(
logEvent('tengu_auto_updater_lock_contention', {
pid: process.pid,
currentVersion:
MACRO.VERSION as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
'0.1.0-alpha' as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
})
return 'in_progress'
}
@@ -476,7 +477,7 @@ export async function installGlobalPackage(
logError(new Error('Windows NPM detected in WSL environment'))
logEvent('tengu_auto_updater_windows_npm_in_wsl', {
currentVersion:
MACRO.VERSION as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
'0.1.0-alpha' as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
})
// biome-ignore lint/suspicious/noConsole:: intentional console output
console.error(`
@@ -500,8 +501,8 @@ To fix this issue:
// Use specific version if provided, otherwise use latest
const packageSpec = specificVersion
? `${MACRO.PACKAGE_URL}@${specificVersion}`
: MACRO.PACKAGE_URL
? `@anthropic-ai/claude-code@${specificVersion}`
: '@anthropic-ai/claude-code'
// Run from home directory to avoid reading project-level .npmrc/.bunfig.toml
// which could be maliciously crafted to redirect to an attacker's registry

View File

@@ -1,4 +1,3 @@
import axios from 'axios'
import { getOauthConfig } from 'src/constants/oauth.js'
import { getOrganizationUUID } from 'src/services/oauth/client.js'
import { getFeatureValue_CACHED_MAY_BE_STALE } from '../../../services/analytics/growthbook.js'
@@ -12,6 +11,7 @@ import { logForDebugging } from '../../debug.js'
import { detectCurrentRepository } from '../../detectRepository.js'
import { errorMessage } from '../../errors.js'
import { findGitRoot, getIsClean } from '../../git.js'
import { isHttpError, nativeRequest } from '../../http.js'
import { getOAuthHeaders } from '../../teleport/api.js'
import { fetchEnvironments } from '../../teleport/environments.js'
@@ -105,7 +105,7 @@ export async function checkGithubAppInstalled(
logForDebugging(`Checking GitHub app installation for ${owner}/${repo}`)
const response = await axios.get<{
const response = await nativeRequest<{
repo: {
name: string
owner: { login: string }
@@ -116,6 +116,7 @@ export async function checkGithubAppInstalled(
relay_enabled: boolean
} | null
}>(url, {
method: 'GET',
headers,
timeout: 15000,
signal,
@@ -142,8 +143,8 @@ export async function checkGithubAppInstalled(
return false
} catch (error) {
// 4XX errors typically mean app is not installed or repo not accessible
if (axios.isAxiosError(error)) {
const status = error.response?.status
if (isHttpError(error)) {
const status = error.status
if (status && status >= 400 && status < 500) {
logForDebugging(
`checkGithubAppInstalled: Got ${status} error, app likely not installed on ${owner}/${repo}`,
@@ -183,7 +184,8 @@ export async function checkGithubTokenSynced(): Promise<boolean> {
logForDebugging('Checking if GitHub token is synced via web-setup')
const response = await axios.get(url, {
const response = await nativeRequest<any>(url, {
method: 'GET',
headers,
timeout: 15000,
})
@@ -195,8 +197,8 @@ export async function checkGithubTokenSynced(): Promise<boolean> {
)
return synced
} catch (error) {
if (axios.isAxiosError(error)) {
const status = error.response?.status
if (isHttpError(error)) {
const status = error.status
if (status && status >= 400 && status < 500) {
logForDebugging(
`checkGithubTokenSynced: Got ${status}, token not synced`,

View File

@@ -6,6 +6,7 @@ import { isRunningWithBun } from './bundledMode.js'
import { getClaudeConfigHomeDir, isEnvTruthy } from './envUtils.js'
import { findExecutable } from './findExecutable.js'
import { getFsImplementation } from './fsOperations.js'
import { nativeRequest } from './http.js'
import { which } from './which.js'
type Platform = 'win32' | 'darwin' | 'linux'
@@ -27,9 +28,10 @@ export const getGlobalClaudeFile = memoize((): string => {
const hasInternetAccess = memoize(async (): Promise<boolean> => {
try {
const { default: axiosClient } = await import('axios')
await axiosClient.head('http://1.1.1.1', {
signal: AbortSignal.timeout(1000),
await nativeRequest('http://1.1.1.1', {
method: 'HEAD',
timeout: 1000,
responseType: 'none',
})
return true
} catch {

View File

@@ -10,7 +10,7 @@
* log.ts has NO heavy dependencies - events are queued until this sink is attached.
*/
import axios from 'axios'
import { isHttpError } from './http.js'
import { dirname, join } from 'path'
import { getSessionId } from '../bootstrap/state.js'
import { createBufferedWriter } from './bufferedWriter.js'
@@ -152,19 +152,21 @@ function extractServerMessage(data: unknown): string | undefined {
function logErrorImpl(error: Error): void {
const errorStr = error.stack || error.message
// Enrich axios errors with request URL, status, and server message for debugging
// Enrich HTTP errors with request URL, status, and server message for debugging
let context = ''
if (axios.isAxiosError(error) && error.config?.url) {
const parts = [`url=${error.config.url}`]
if (error.response?.status !== undefined) {
parts.push(`status=${error.response.status}`)
if (isHttpError(error) && error.message) {
const parts: string[] = []
if (error.status !== undefined) {
parts.push(`status=${error.status}`)
}
const serverMessage = extractServerMessage(error.response?.data)
const serverMessage = extractServerMessage(error.data)
if (serverMessage) {
parts.push(`body=${serverMessage}`)
}
if (parts.length > 0) {
context = `[${parts.join(',')}] `
}
}
logForDebugging(`${error.name}: ${context}${errorStr}`, { level: 'error' })

View File

@@ -194,43 +194,33 @@ export function isFsInaccessible(e: unknown): e is NodeJS.ErrnoException {
)
}
export type AxiosErrorKind =
export type HttpErrorKind =
| 'auth' // 401/403 — caller typically sets skipRetry
| 'timeout' // ECONNABORTED
| 'timeout' // 408 or ECONNABORTED
| 'network' // ECONNREFUSED/ENOTFOUND
| 'http' // other axios error (may have status)
| 'other' // not an axios error
| 'http' // other http error (may have status)
| 'other' // not an http error
/**
* Classify a caught error from an axios request into one of a few buckets.
* Replaces the ~20-line isAxiosError → 401/403 → ECONNABORTED → ECONNREFUSED
* chain duplicated across sync-style services (settingsSync, policyLimits,
* remoteManagedSettings, teamMemorySync).
*
* Checks the `.isAxiosError` marker property directly (same as
* axios.isAxiosError()) to keep this module dependency-free.
* Classify a caught error from a request into one of a few buckets.
*/
export function classifyAxiosError(e: unknown): {
kind: AxiosErrorKind
export function classifyHttpError(e: unknown): {
kind: HttpErrorKind
status?: number
message: string
} {
const message = errorMessage(e)
if (
!e ||
typeof e !== 'object' ||
!('isAxiosError' in e) ||
!e.isAxiosError
) {
if (!e || typeof e !== 'object' || !('name' in e) || e.name !== 'HttpError') {
return { kind: 'other', message }
}
const err = e as {
response?: { status?: number }
status?: number
code?: string
}
const status = err.response?.status
const status = err.status
if (status === 401 || status === 403) return { kind: 'auth', status, message }
if (err.code === 'ECONNABORTED') return { kind: 'timeout', status, message }
if (status === 408 || err.code === 'ECONNABORTED')
return { kind: 'timeout', status, message }
if (err.code === 'ECONNREFUSED' || err.code === 'ENOTFOUND') {
return { kind: 'network', status, message }
}

View File

@@ -1,4 +1,3 @@
import axios from 'axios'
import { getOauthConfig, OAUTH_BETA_HEADER } from 'src/constants/oauth.js'
import { getFeatureValue_CACHED_MAY_BE_STALE } from 'src/services/analytics/growthbook.js'
import {
@@ -20,6 +19,7 @@ import { isInBundledMode } from './bundledMode.js'
import { getGlobalConfig, saveGlobalConfig } from './config.js'
import { logForDebugging } from './debug.js'
import { isEnvTruthy } from './envUtils.js'
import { isHttpError, nativeRequest } from './http.js'
import {
getDefaultMainLoopModelSetting,
isOpus1mMergeEnabled,
@@ -376,7 +376,10 @@ async function fetchFastModeStatus(
}
: { 'x-api-key': auth.apiKey }
const response = await axios.get<FastModeResponse>(endpoint, { headers })
const response = await nativeRequest<FastModeResponse>(endpoint, {
method: 'GET',
headers,
})
return response.data
}
@@ -465,11 +468,11 @@ export async function prefetchFastModeStatus(): Promise<void> {
status = await fetchWithCurrentAuth()
} catch (err) {
const isAuthError =
axios.isAxiosError(err) &&
(err.response?.status === 401 ||
(err.response?.status === 403 &&
typeof err.response?.data === 'string' &&
err.response.data.includes('OAuth token has been revoked')))
isHttpError(err) &&
(err.status === 401 ||
(err.status === 403 &&
typeof err.data === 'string' &&
(err.data as string).includes('OAuth token has been revoked')))
if (isAuthError) {
const failedAccessToken = getClaudeAIOAuthTokens()?.accessToken
if (failedAccessToken) {

View File

@@ -62,6 +62,8 @@ export function computeFingerprint(
return hash.slice(0, 3)
}
import { VERSION } from 'src/constants/product.js'
/**
* Computes fingerprint from the first user message.
*
@@ -72,5 +74,5 @@ export function computeFingerprintFromMessages(
messages: (UserMessage | AssistantMessage)[],
): string {
const firstMessageText = extractFirstMessageText(messages)
return computeFingerprint(firstMessageText, MACRO.VERSION)
return computeFingerprint(firstMessageText, VERSION)
}

View File

@@ -1,8 +1,8 @@
import axios from 'axios'
import type { HookEvent } from 'src/entrypoints/agentSdkTypes.js'
import { Agent } from 'undici'
import { createCombinedAbortSignal } from '../combinedAbortSignal.js'
import { logForDebugging } from '../debug.js'
import { errorMessage } from '../errors.js'
import { nativeRequest } from '../http.js'
import { getProxyUrl, shouldBypassProxy } from '../proxy.js'
// Import as namespace so spyOn works in tests (direct imports bypass spies)
import * as settingsModule from '../settings/settings.js'
@@ -122,7 +122,7 @@ function interpolateEnvVars(
*/
export async function execHttpHook(
hook: HttpHook,
_hookEvent: HookEvent,
_hookEvent: string,
jsonInput: string,
signal?: AbortSignal,
): Promise<{
@@ -186,34 +186,39 @@ export async function execHttpHook(
getProxyUrl() !== undefined &&
!shouldBypassProxy(hook.url)
let dispatcher: Agent | undefined
if (sandboxProxy) {
logForDebugging(
`Hooks: HTTP hook POST to ${hook.url} (via sandbox proxy :${sandboxProxy.port})`,
)
// For sandbox proxy, we'd ideally use a custom dispatcher, but for now
// assume global dispatcher or handled separately.
// Axios implementation used `proxy: sandboxProxy`.
} else if (envProxyActive) {
logForDebugging(
`Hooks: HTTP hook POST to ${hook.url} (via env-var proxy)`,
)
} else {
logForDebugging(`Hooks: HTTP hook POST to ${hook.url}`)
}
const response = await axios.post<string>(hook.url, jsonInput, {
headers,
signal: combinedSignal,
responseType: 'text',
validateStatus: () => true,
maxRedirects: 0,
// Explicit false prevents axios's own env-var proxy detection; when an
// env-var proxy is configured, the global axios interceptor installed
// by configureGlobalAgents() handles it via httpsAgent instead.
proxy: sandboxProxy ?? false,
// SSRF guard: validate resolved IPs, block private/link-local ranges
// (but allow loopback for local dev). Skipped when any proxy is in
// use — the proxy performs DNS for the target, and applying the
// guard would instead validate the proxy's own IP, breaking
// connections to corporate proxies on private networks.
lookup: sandboxProxy || envProxyActive ? undefined : ssrfGuardedLookup,
dispatcher = new Agent({
connect: {
lookup: ssrfGuardedLookup as any,
},
})
}
const response = await nativeRequest<string>(hook.url, {
method: 'POST',
headers,
body: jsonInput,
signal: combinedSignal,
responseType: 'text',
dispatcher,
})
cleanup()
@@ -224,7 +229,7 @@ export async function execHttpHook(
)
return {
ok: response.status >= 200 && response.status < 300,
ok: true,
statusCode: response.status,
body,
}

View File

@@ -1,7 +1,12 @@
import type { AddressFamily, LookupAddress as AxiosLookupAddress } from 'axios'
import { lookup as dnsLookup } from 'dns'
import { isIP } from 'net'
export type AddressFamily = 4 | 6
export type LookupAddress = {
address: string
family: AddressFamily
}
/**
* SSRF guard for HTTP hooks.
*
@@ -210,16 +215,14 @@ function extractMappedIPv4(addr: string): string | null {
* rebinding window between validation and connection.
*
* IP literals in the hostname are validated directly without DNS.
*
* Signature matches axios's `lookup` config option (not Node's dns.lookup).
*/
export function ssrfGuardedLookup(
hostname: string,
options: object,
options: any,
callback: (
err: Error | null,
address: AxiosLookupAddress | AxiosLookupAddress[],
family?: AddressFamily,
address: any,
family?: number,
) => void,
): void {
const wantsAll = 'all' in options && options.all === true

View File

@@ -2,7 +2,6 @@
* HTTP utility constants and helpers
*/
import axios from 'axios'
import { OAUTH_BETA_HEADER } from '../constants/oauth.js'
import {
getAnthropicApiKey,
@@ -31,7 +30,7 @@ export function getUserAgent(): string {
// so the read picks up the same setWorkload() value as getAttributionHeader.
const workload = getWorkload()
const workloadSuffix = workload ? `, workload/${workload}` : ''
return `claude-cli/${MACRO.VERSION} (${process.env.USER_TYPE}, ${process.env.CLAUDE_CODE_ENTRYPOINT ?? 'cli'}${agentSdkVersion}${clientApp}${workloadSuffix})`
return `claude-cli/0.1.0-alpha (${process.env.USER_TYPE}, ${process.env.CLAUDE_CODE_ENTRYPOINT ?? 'cli'}${agentSdkVersion}${clientApp}${workloadSuffix})`
}
export function getMCPUserAgent(): string {
@@ -46,7 +45,7 @@ export function getMCPUserAgent(): string {
parts.push(`client-app/${process.env.CLAUDE_AGENT_SDK_CLIENT_APP}`)
}
const suffix = parts.length > 0 ? ` (${parts.join(', ')})` : ''
return `claude-code/${MACRO.VERSION}${suffix}`
return `claude-code/0.1.0-alpha${suffix}`
}
// User-Agent for WebFetch requests to arbitrary sites. `Claude-User` is
@@ -98,6 +97,118 @@ export function getAuthHeaders(): AuthHeaders {
}
}
export class HttpError extends Error {
constructor(
message: string,
public status?: number,
public data?: any,
public headers?: Record<string, string>,
public code?: string,
) {
super(message)
this.name = 'HttpError'
}
}
export function isHttpError(error: unknown): error is HttpError {
return error instanceof HttpError
}
export type NativeRequestOptions = {
method?: 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH' | 'HEAD'
headers?: Record<string, string>
body?: any
timeout?: number
signal?: AbortSignal
responseType?: 'json' | 'arraybuffer' | 'text' | 'none'
dispatcher?: any // undici.Dispatcher
}
export async function nativeRequest<T>(
url: string,
options: NativeRequestOptions = {},
): Promise<{ data: T; status: number; headers: Record<string, string> }> {
const {
method = 'GET',
headers = {},
body,
timeout,
responseType = 'json',
dispatcher,
} = options
const controller = new AbortController()
const timeoutId = timeout
? setTimeout(() => controller.abort(), timeout)
: null
try {
const fetchOptions: RequestInit = {
method,
headers: {
...headers,
},
signal: options.signal || controller.signal,
...(dispatcher ? { dispatcher } : {}),
} as RequestInit
if (body) {
if (
body instanceof Buffer ||
body instanceof Uint8Array ||
body instanceof Blob ||
body instanceof FormData
) {
fetchOptions.body = body as any
} else {
if (!fetchOptions.headers) fetchOptions.headers = {}
;(fetchOptions.headers as any)['Content-Type'] = 'application/json'
fetchOptions.body = JSON.stringify(body)
}
}
const response = await fetch(url, fetchOptions)
let responseData: any
if (responseType === 'arraybuffer') {
responseData = await response.arrayBuffer()
} else if (responseType === 'text') {
responseData = await response.text()
} else if (responseType === 'none') {
responseData = null
} else {
responseData = await response.json().catch(() => null)
}
const responseHeaders: Record<string, string> = {}
response.headers.forEach((value, key) => {
responseHeaders[key] = value
})
if (!response.ok) {
throw new HttpError(
`HTTP Error ${response.status}`,
response.status,
responseData,
responseHeaders,
)
}
return {
data: responseData as T,
status: response.status,
headers: responseHeaders,
}
} catch (error) {
if (error instanceof Error && error.name === 'AbortError') {
throw new HttpError('Request aborted/timeout', 408, null, {}, 'ECONNABORTED')
}
throw error
} finally {
if (timeoutId) clearTimeout(timeoutId)
}
}
/**
* Wrapper that handles OAuth 401 errors by force-refreshing the token and
* retrying once. Addresses clock drift scenarios where the local expiration
@@ -119,14 +230,14 @@ export async function withOAuth401Retry<T>(
try {
return await request()
} catch (err) {
if (!axios.isAxiosError(err)) throw err
const status = err.response?.status
if (!isHttpError(err)) throw err
const status = err.status
const isAuthError =
status === 401 ||
(opts?.also403Revoked &&
status === 403 &&
typeof err.response?.data === 'string' &&
err.response.data.includes('OAuth token has been revoked'))
typeof err.data === 'string' &&
err.data.includes('OAuth token has been revoked'))
if (!isAuthError) throw err
const failedAccessToken = getClaudeAIOAuthTokens()?.accessToken
if (!failedAccessToken) throw err

View File

@@ -1,6 +1,6 @@
import type { Client } from '@modelcontextprotocol/sdk/client/index.js'
import axios from 'axios'
import { execa } from 'execa'
import { chmod, writeFile } from 'fs/promises'
import capitalize from 'lodash-es/capitalize.js'
import memoize from 'lodash-es/memoize.js'
import { createConnection } from 'net'
@@ -23,6 +23,7 @@ import {
} from './execFileNoThrow.js'
import { getFsImplementation } from './fsOperations.js'
import { getAncestorPidsAsync } from './genericProcessUtils.js'
import { isHttpError, nativeRequest } from './http.js'
import { isJetBrainsPluginInstalledCached } from './jetbrains.js'
import { logError } from './log.js'
import { getPlatform } from './platform.js'
@@ -925,7 +926,7 @@ function getInstallationEnv(): NodeJS.ProcessEnv | undefined {
}
function getClaudeCodeVersion() {
return MACRO.VERSION
return '0.1.0-alpha'
}
async function getInstalledVSCodeExtensionVersion(
@@ -1424,10 +1425,12 @@ async function installFromArtifactory(command: string): Promise<string> {
'https://artifactory.infra.ant.dev/artifactory/armorcode-claude-code-internal/claude-vscode-releases/stable'
try {
const versionResponse = await axios.get(versionUrl, {
const versionResponse = await nativeRequest<string>(versionUrl, {
method: 'GET',
headers: {
Authorization: `Bearer ${authToken}`,
},
responseType: 'text',
})
const version = versionResponse.data.trim()
@@ -1443,20 +1446,16 @@ async function installFromArtifactory(command: string): Promise<string> {
)
try {
const vsixResponse = await axios.get(vsixUrl, {
const vsixResponse = await nativeRequest<ArrayBuffer>(vsixUrl, {
method: 'GET',
headers: {
Authorization: `Bearer ${authToken}`,
},
responseType: 'stream',
responseType: 'arraybuffer',
})
// Write the downloaded file to disk
const writeStream = getFsImplementation().createWriteStream(tempVsixPath)
await new Promise<void>((resolve, reject) => {
vsixResponse.data.pipe(writeStream)
writeStream.on('finish', resolve)
writeStream.on('error', reject)
})
await writeFile(tempVsixPath, Buffer.from(vsixResponse.data))
// Install the .vsix file
// Add delay to prevent code command crashes
@@ -1484,7 +1483,7 @@ async function installFromArtifactory(command: string): Promise<string> {
}
}
} catch (error) {
if (axios.isAxiosError(error)) {
if (isHttpError(error)) {
throw new Error(
`Failed to fetch extension version from artifactory: ${error.message}`,
)

View File

@@ -7,7 +7,7 @@
*/
import { feature } from 'bun:bundle'
import axios from 'axios'
import { isHttpError, nativeRequest } from '../http.js'
import { createHash } from 'crypto'
import { chmod, writeFile } from 'fs/promises'
import { join } from 'path'
@@ -78,10 +78,14 @@ export async function getLatestVersionFromBinaryRepo(
): Promise<string> {
const startTime = Date.now()
try {
const response = await axios.get(`${baseUrl}/${channel}`, {
const response = await nativeRequest<string>(`${baseUrl}/${channel}`, {
timeout: 30000,
responseType: 'text',
...authConfig,
...(authConfig?.auth ? {
headers: {
Authorization: `Basic ${Buffer.from(`${authConfig.auth.username}:${authConfig.auth.password}`).toString('base64')}`,
},
} : {}),
})
const latencyMs = Date.now() - startTime
logEvent('tengu_version_check_success', {
@@ -91,10 +95,7 @@ export async function getLatestVersionFromBinaryRepo(
} catch (error) {
const latencyMs = Date.now() - startTime
const errorMessage = error instanceof Error ? error.message : String(error)
let httpStatus: number | undefined
if (axios.isAxiosError(error) && error.response) {
httpStatus = error.response.status
}
const httpStatus = isHttpError(error) ? error.status : undefined
logEvent('tengu_version_check_failure', {
latency_ms: latencyMs,
@@ -318,22 +319,18 @@ async function downloadAndVerifyBinary(
// Start the stall timer before the request
resetStallTimer()
const response = await axios.get(binaryUrl, {
const response = await nativeRequest<ArrayBuffer>(binaryUrl, {
timeout: 5 * 60000, // 5 minute total timeout
responseType: 'arraybuffer',
signal: controller.signal,
onDownloadProgress: () => {
// Reset stall timer on each chunk of data received
resetStallTimer()
},
...requestConfig,
...(requestConfig?.headers ? { headers: requestConfig.headers } : {}),
})
clearStallTimer()
// Verify checksum
const hash = createHash('sha256')
hash.update(response.data)
hash.update(Buffer.from(response.data))
const actualChecksum = hash.digest('hex')
if (actualChecksum !== expectedChecksum) {
@@ -351,8 +348,8 @@ async function downloadAndVerifyBinary(
} catch (error) {
clearStallTimer()
// Check if this was a stall timeout (axios wraps abort signals in CanceledError)
const isStallTimeout = axios.isCancel(error)
// Check if this was a stall timeout (abort signal fires)
const isStallTimeout = error instanceof Error && error.name === 'AbortError'
if (isStallTimeout) {
lastError = new StallTimeoutError()
@@ -403,22 +400,23 @@ export async function downloadVersionFromBinaryRepo(
// Fetch manifest to get checksum
let manifest
try {
const manifestResponse = await axios.get(
const manifestResponse = await nativeRequest(
`${baseUrl}/${version}/manifest.json`,
{
timeout: 10000,
responseType: 'json',
...authConfig,
...(authConfig?.auth ? {
headers: {
Authorization: `Basic ${Buffer.from(`${authConfig.auth.username}:${authConfig.auth.password}`).toString('base64')}`,
},
} : {}),
},
)
manifest = manifestResponse.data
} catch (error) {
const latencyMs = Date.now() - startTime
const errorMessage = error instanceof Error ? error.message : String(error)
let httpStatus: number | undefined
if (axios.isAxiosError(error) && error.response) {
httpStatus = error.response.status
}
const httpStatus = isHttpError(error) ? error.status : undefined
logEvent('tengu_binary_manifest_fetch_failure', {
latency_ms: latencyMs,
@@ -466,10 +464,7 @@ export async function downloadVersionFromBinaryRepo(
} catch (error) {
const latencyMs = Date.now() - startTime
const errorMessage = error instanceof Error ? error.message : String(error)
let httpStatus: number | undefined
if (axios.isAxiosError(error) && error.response) {
httpStatus = error.response.status
}
const httpStatus = isHttpError(error) ? error.status : undefined
logEvent('tengu_binary_download_failure', {
latency_ms: latencyMs,

View File

@@ -1,23 +1,7 @@
/**
* Telemetry for plugin/marketplace fetches that hit the network.
*
* Added for inc-5046 (GitHub complained about claude-plugins-official load).
* Before this, fetch operations only had logForDebugging — no way to measure
* actual network volume. This surfaces what's hitting GitHub vs GCS vs
* user-hosted so we can see the GCS migration take effect and catch future
* hot-path regressions before GitHub emails us again.
*
* Volume: these fire at startup (install-counts 24h-TTL)
* and on explicit user action (install/update). NOT per-interaction. Similar
* envelope to tengu_binary_download_*.
* Telemetry for plugin/marketplace fetches - DISABLED.
*/
import {
logEvent,
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS as SafeString,
} from '../../services/analytics/index.js'
import { OFFICIAL_MARKETPLACE_NAME } from './officialMarketplace.js'
export type PluginFetchSource =
| 'install_counts'
| 'marketplace_clone'
@@ -28,82 +12,32 @@ export type PluginFetchSource =
export type PluginFetchOutcome = 'success' | 'failure' | 'cache_hit'
// Allowlist of public hosts we report by name. Anything else (enterprise
// git, self-hosted, internal) is bucketed as 'other' — we don't want
// internal hostnames (git.mycorp.internal) landing in telemetry. Bounded
// cardinality also keeps the dashboard host-breakdown tractable.
const KNOWN_PUBLIC_HOSTS = new Set([
'github.com',
'raw.githubusercontent.com',
'objects.githubusercontent.com',
'gist.githubusercontent.com',
'gitlab.com',
'bitbucket.org',
'codeberg.org',
'dev.azure.com',
'ssh.dev.azure.com',
'storage.googleapis.com', // GCS — where Dickson's migration points
])
/**
* Extract hostname from a URL or git spec and bucket to the allowlist.
* Handles `https://host/...`, `git@host:path`, `ssh://host/...`.
* Returns a known public host, 'other' (parseable but not allowlisted —
* don't leak private hostnames), or 'unknown' (unparseable / local path).
* Extract hostname from a URL or git spec - DISABLED.
*/
function extractHost(urlOrSpec: string): string {
let host: string
const scpMatch = /^[^@/]+@([^:/]+):/.exec(urlOrSpec)
if (scpMatch) {
host = scpMatch[1]!
} else {
try {
host = new URL(urlOrSpec).hostname
} catch {
function extractHost(_urlOrSpec: string): string {
return 'unknown'
}
}
const normalized = host.toLowerCase()
return KNOWN_PUBLIC_HOSTS.has(normalized) ? normalized : 'other'
}
/**
* True if the URL/spec points at anthropics/claude-plugins-official — the
* repo GitHub complained about. Lets the dashboard separate "our problem"
* traffic from user-configured marketplaces.
* True if the URL/spec points at anthropics/claude-plugins-official - DISABLED.
*/
function isOfficialRepo(urlOrSpec: string): boolean {
return urlOrSpec.includes(`anthropics/${OFFICIAL_MARKETPLACE_NAME}`)
function isOfficialRepo(_urlOrSpec: string): boolean {
return false
}
export function logPluginFetch(
source: PluginFetchSource,
urlOrSpec: string | undefined,
outcome: PluginFetchOutcome,
durationMs: number,
errorKind?: string,
_source: PluginFetchSource,
_urlOrSpec: string | undefined,
_outcome: PluginFetchOutcome,
_durationMs: number,
_errorKind?: string,
): void {
// String values are bounded enums / hostname-only — no code, no paths,
// no raw error messages. Same privacy envelope as tengu_web_fetch_host.
logEvent('tengu_plugin_remote_fetch', {
source: source as SafeString,
host: (urlOrSpec ? extractHost(urlOrSpec) : 'unknown') as SafeString,
is_official: urlOrSpec ? isOfficialRepo(urlOrSpec) : false,
outcome: outcome as SafeString,
duration_ms: Math.round(durationMs),
...(errorKind && { error_kind: errorKind as SafeString }),
})
// Telemetry disabled
}
/**
* Classify an error into a stable bucket for the error_kind field. Keeps
* cardinality bounded — raw error messages would explode dashboard grouping.
*
* Handles both axios Error objects (Node.js error codes like ENOTFOUND) and
* git stderr strings (human phrases like "Could not resolve host"). DNS
* checked BEFORE timeout because gitClone's error enhancement at
* marketplaceManager.ts:~950 rewrites DNS failures to include the word
* "timeout" — ordering the other way would misclassify git DNS as timeout.
* Classify an error into a stable bucket for the error_kind field.
*/
export function classifyFetchError(error: unknown): string {
const msg = String((error as { message?: unknown })?.message ?? error)
@@ -125,9 +59,6 @@ export function classifyFetchError(error: unknown): string {
if (/403|401|authentication|permission denied/i.test(msg)) return 'auth'
if (/404|not found|repository not found/i.test(msg)) return 'not_found'
if (/certificate|SSL|TLS|unable to get local issuer/i.test(msg)) return 'tls'
// Schema validation throws "Invalid response format" (install_counts) —
// distinguish from true unknowns so the dashboard can
// see "server sent garbage" separately.
if (/Invalid response format|Invalid marketplace schema/i.test(msg)) {
return 'invalid_schema'
}

View File

@@ -8,13 +8,13 @@
* Cache location: ~/.claude/plugins/install-counts-cache.json
*/
import axios from 'axios'
import { randomBytes } from 'crypto'
import { readFile, rename, unlink, writeFile } from 'fs/promises'
import { join } from 'path'
import { logForDebugging } from '../debug.js'
import { errorMessage, getErrnoCode } from '../errors.js'
import { getFsImplementation } from '../fsOperations.js'
import { nativeRequest } from '../http.js'
import { logError } from '../log.js'
import { jsonParse, jsonStringify } from '../slowOperations.js'
import { classifyFetchError, logPluginFetch } from './fetchTelemetry.js'
@@ -188,7 +188,8 @@ async function fetchInstallCountsFromGitHub(): Promise<
const started = performance.now()
try {
const response = await axios.get<GitHubStatsResponse>(INSTALL_COUNTS_URL, {
const response = await nativeRequest<GitHubStatsResponse>(INSTALL_COUNTS_URL, {
method: 'GET',
timeout: 10000,
})

View File

@@ -18,7 +18,6 @@
* └── marketplace.json
*/
import axios from 'axios'
import { writeFile } from 'fs/promises'
import isEqual from 'lodash-es/isEqual.js'
import memoize from 'lodash-es/memoize.js'
@@ -36,6 +35,7 @@ import {
import { execFileNoThrow, execFileNoThrowWithCwd } from '../execFileNoThrow.js'
import { getFsImplementation } from '../fsOperations.js'
import { gitExe } from '../git.js'
import { isHttpError, nativeRequest } from '../http.js'
import { logError } from '../log.js'
import {
getInitialSettings,
@@ -1279,7 +1279,8 @@ async function cacheMarketplaceFromUrl(
let response
const fetchStarted = performance.now()
try {
response = await axios.get(url, {
response = await nativeRequest(url, {
method: 'GET',
timeout: 10000,
headers,
})
@@ -1291,20 +1292,15 @@ async function cacheMarketplaceFromUrl(
performance.now() - fetchStarted,
classifyFetchError(error),
)
if (axios.isAxiosError(error)) {
if (error.code === 'ECONNREFUSED' || error.code === 'ENOTFOUND') {
throw new Error(
`Could not connect to ${redactedUrl}. Please check your internet connection and verify the URL is correct.\n\nTechnical details: ${error.message}`,
)
}
if (error.code === 'ETIMEDOUT') {
if (isHttpError(error)) {
if (error.message?.includes('timeout')) {
throw new Error(
`Request timed out while downloading marketplace from ${redactedUrl}. The server may be slow or unreachable.\n\nTechnical details: ${error.message}`,
)
}
if (error.response) {
if (error.status) {
throw new Error(
`HTTP ${error.response.status} error while downloading marketplace from ${redactedUrl}. The marketplace file may not exist at this URL.\n\nTechnical details: ${error.message}`,
`HTTP ${error.status} error while downloading marketplace from ${redactedUrl}. The marketplace file may not exist at this URL.\n\nTechnical details: ${error.message}`,
)
}
}

View File

@@ -2,7 +2,6 @@ import type {
McpbManifest,
McpbUserConfigurationOption,
} from '@anthropic-ai/mcpb'
import axios from 'axios'
import { createHash } from 'crypto'
import { chmod, writeFile } from 'fs/promises'
import { dirname, join } from 'path'
@@ -12,6 +11,7 @@ import { parseAndValidateManifestFromBytes } from '../dxt/helpers.js'
import { parseZipModes, unzipFile } from '../dxt/zip.js'
import { errorMessage, getErrnoCode, isENOENT, toError } from '../errors.js'
import { getFsImplementation } from '../fsOperations.js'
import { nativeRequest } from '../http.js'
import { logError } from '../log.js'
import { getSecureStorage } from '../secureStorage/index.js'
import {
@@ -492,18 +492,10 @@ async function downloadMcpb(
const started = performance.now()
let fetchTelemetryFired = false
try {
const response = await axios.get(url, {
timeout: 120000, // 2 minute timeout
const response = await nativeRequest<ArrayBuffer>(url, {
method: 'GET',
responseType: 'arraybuffer',
maxRedirects: 5, // Follow redirects (like curl -L)
onDownloadProgress: progressEvent => {
if (progressEvent.total && onProgress) {
const percent = Math.round(
(progressEvent.loaded / progressEvent.total) * 100,
)
onProgress(`Downloading... ${percent}%`)
}
},
timeout: 120000, // 2 minute timeout
})
const data = new Uint8Array(response.data)

View File

@@ -8,7 +8,6 @@
* when there's a new SHA. Callers decide fallback behavior on failure.
*/
import axios from 'axios'
import { chmod, mkdir, readFile, rename, rm, writeFile } from 'fs/promises'
import { dirname, join, resolve, sep } from 'path'
import { waitForScrollIdle } from '../../bootstrap/state.js'
@@ -17,6 +16,7 @@ import { logEvent } from '../../services/analytics/index.js'
import { logForDebugging } from '../debug.js'
import { parseZipModes, unzipFile } from '../dxt/zip.js'
import { errorMessage, getErrnoCode } from '../errors.js'
import { isHttpError, nativeRequest } from '../http.js'
type SafeString = AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
@@ -78,7 +78,8 @@ export async function fetchOfficialMarketplaceFromGcs(
try {
// 1. Latest pointer — ~40 bytes, backend sets Cache-Control: no-cache,
// max-age=300. Cheap enough to hit every startup.
const latest = await axios.get(`${GCS_BASE}/latest`, {
const latest = await nativeRequest<string>(`${GCS_BASE}/latest`, {
method: 'GET',
responseType: 'text',
timeout: 10_000,
})
@@ -104,7 +105,8 @@ export async function fetchOfficialMarketplaceFromGcs(
// 3. Download zip and extract to a staging dir, then atomic-swap into
// place. Crash mid-extract leaves a .staging dir (next run rm's it)
// rather than a half-written installLocation.
const zipResp = await axios.get(`${GCS_BASE}/${sha}.zip`, {
const zipResp = await nativeRequest<ArrayBuffer>(`${GCS_BASE}/${sha}.zip`, {
method: 'GET',
responseType: 'arraybuffer',
timeout: 60_000,
})
@@ -194,9 +196,9 @@ const KNOWN_FS_CODES = new Set([
* (disk full, permission denied) before flipping the git-fallback kill switch.
*/
export function classifyGcsError(e: unknown): string {
if (axios.isAxiosError(e)) {
if (e.code === 'ECONNABORTED') return 'timeout'
if (e.response) return `http_${e.response.status}`
if (isHttpError(e)) {
if (e.message?.includes('timeout')) return 'timeout'
if (e.status) return `http_${e.status}`
return 'network'
}
const code = getErrnoCode(e)

View File

@@ -1,8 +1,3 @@
// @aws-sdk/credential-provider-node and @smithy/node-http-handler are imported
// dynamically in getAWSClientProxyConfig() to defer ~929KB of AWS SDK.
// undici is lazy-required inside getProxyAgent/configureGlobalAgents to defer
// ~1.5MB when no HTTPS_PROXY/mTLS env vars are set (the common case).
import axios, { type AxiosInstance } from 'axios'
import type { LookupOptions } from 'dns'
import type { Agent } from 'http'
import { HttpsProxyAgent, type HttpsProxyAgentOptions } from 'https-proxy-agent'
@@ -160,37 +155,6 @@ function createHttpsProxyAgent(
return new HttpsProxyAgent(proxyUrl, { ...agentOptions, ...extra })
}
/**
* Axios instance with its own proxy agent. Same NO_PROXY/mTLS/CA
* resolution as the global interceptor, but agent options stay
* scoped to this instance.
*/
export function createAxiosInstance(
extra: HttpsProxyAgentOptions<string> = {},
): AxiosInstance {
const proxyUrl = getProxyUrl()
const mtlsAgent = getMTLSAgent()
const instance = axios.create({ proxy: false })
if (!proxyUrl) {
if (mtlsAgent) instance.defaults.httpsAgent = mtlsAgent
return instance
}
const proxyAgent = createHttpsProxyAgent(proxyUrl, extra)
instance.interceptors.request.use(config => {
if (config.url && shouldBypassProxy(config.url)) {
config.httpsAgent = mtlsAgent
config.httpAgent = mtlsAgent
} else {
config.httpsAgent = proxyAgent
config.httpAgent = proxyAgent
}
return config
})
return instance
}
/**
* Get or create a memoized proxy agent for the given URI
* Now respects NO_PROXY environment variable
@@ -319,63 +283,21 @@ export function getProxyFetchOptions(opts?: { forAnthropicAPI?: boolean }): {
}
/**
* Configure global HTTP agents for both axios and undici
* This ensures all HTTP requests use the proxy and/or mTLS if configured
* Configure global undici dispatcher
* This ensures all native fetch requests use the proxy and/or mTLS if configured.
* Axios configuration has been removed as it is deprecated in favor of native fetch.
*/
let proxyInterceptorId: number | undefined
export function configureGlobalAgents(): void {
const proxyUrl = getProxyUrl()
const mtlsAgent = getMTLSAgent()
// Eject previous interceptor to avoid stacking on repeated calls
if (proxyInterceptorId !== undefined) {
axios.interceptors.request.eject(proxyInterceptorId)
proxyInterceptorId = undefined
}
// Reset proxy-related defaults so reconfiguration is clean
axios.defaults.proxy = undefined
axios.defaults.httpAgent = undefined
axios.defaults.httpsAgent = undefined
if (proxyUrl) {
// workaround for https://github.com/axios/axios/issues/4531
axios.defaults.proxy = false
// Create proxy agent with mTLS options if available
const proxyAgent = createHttpsProxyAgent(proxyUrl)
// Add axios request interceptor to handle NO_PROXY
proxyInterceptorId = axios.interceptors.request.use(config => {
// Check if URL should bypass proxy based on NO_PROXY
if (config.url && shouldBypassProxy(config.url)) {
// Bypass proxy - use mTLS agent if configured, otherwise undefined
if (mtlsAgent) {
config.httpsAgent = mtlsAgent
config.httpAgent = mtlsAgent
} else {
// Remove any proxy agents to use direct connection
delete config.httpsAgent
delete config.httpAgent
}
} else {
// Use proxy agent
config.httpsAgent = proxyAgent
config.httpAgent = proxyAgent
}
return config
})
// Set global dispatcher that now respects NO_PROXY via EnvHttpProxyAgent
// eslint-disable-next-line @typescript-eslint/no-require-imports
;(require('undici') as typeof undici).setGlobalDispatcher(
getProxyAgent(proxyUrl),
)
} else if (mtlsAgent) {
// No proxy but mTLS is configured
axios.defaults.httpsAgent = mtlsAgent
// Set undici global dispatcher with mTLS
const mtlsOptions = getTLSFetchOptions()
if (mtlsOptions.dispatcher) {

View File

@@ -1,4 +1,3 @@
import axios from 'axios'
import { mkdir, readFile, writeFile } from 'fs/promises'
import { dirname, join } from 'path'
import { coerce } from 'semver'
@@ -6,6 +5,7 @@ import { getIsNonInteractiveSession } from '../bootstrap/state.js'
import { getGlobalConfig, saveGlobalConfig } from './config.js'
import { getClaudeConfigHomeDir } from './envUtils.js'
import { toError } from './errors.js'
import { nativeRequest } from './http.js'
import { logError } from './log.js'
import { isEssentialTrafficOnly } from './privacyLevel.js'
import { gt } from './semver.js'
@@ -90,7 +90,9 @@ export async function fetchAndStoreChangelog(): Promise<void> {
return
}
const response = await axios.get(RAW_CHANGELOG_URL)
const response = await nativeRequest<string>(RAW_CHANGELOG_URL, {
method: 'GET',
})
if (response.status === 200) {
const changelogContent = response.data
@@ -286,23 +288,9 @@ export function getAllReleaseNotes(
*/
export async function checkForReleaseNotes(
lastSeenVersion: string | null | undefined,
currentVersion: string = MACRO.VERSION,
currentVersion: string = '0.1.0-alpha',
): Promise<{ hasReleaseNotes: boolean; releaseNotes: string[] }> {
// For Ant builds, use VERSION_CHANGELOG bundled at build time
if (process.env.USER_TYPE === 'ant') {
const changelog = MACRO.VERSION_CHANGELOG
if (changelog) {
const commits = changelog.trim().split('\n').filter(Boolean)
return {
hasReleaseNotes: commits.length > 0,
releaseNotes: commits,
}
}
return {
hasReleaseNotes: false,
releaseNotes: [],
}
}
// Release notes check
// Ensure the in-memory cache is populated for subsequent sync reads
const cachedChangelog = await getStoredChangelog()
@@ -334,23 +322,8 @@ export async function checkForReleaseNotes(
*/
export function checkForReleaseNotesSync(
lastSeenVersion: string | null | undefined,
currentVersion: string = MACRO.VERSION,
currentVersion: string = '0.1.0-alpha',
): { hasReleaseNotes: boolean; releaseNotes: string[] } {
// For Ant builds, use VERSION_CHANGELOG bundled at build time
if (process.env.USER_TYPE === 'ant') {
const changelog = MACRO.VERSION_CHANGELOG
if (changelog) {
const commits = changelog.trim().split('\n').filter(Boolean)
return {
hasReleaseNotes: commits.length > 0,
releaseNotes: commits,
}
}
return {
hasReleaseNotes: false,
releaseNotes: [],
}
}
const releaseNotes = getRecentReleaseNotes(currentVersion, lastSeenVersion)
return {

View File

@@ -1,252 +1,34 @@
import type { Attributes, HrTime } from '@opentelemetry/api'
import { AggregationTemporality, type PushMetricExporter } from '@opentelemetry/sdk-metrics'
import { type ExportResult, ExportResultCode } from '@opentelemetry/core'
import {
AggregationTemporality,
type MetricData,
type DataPoint as OTelDataPoint,
type PushMetricExporter,
type ResourceMetrics,
} from '@opentelemetry/sdk-metrics'
import axios from 'axios'
import { checkMetricsEnabled } from 'src/services/api/metricsOptOut.js'
import { getIsNonInteractiveSession } from '../../bootstrap/state.js'
import { getSubscriptionType, isClaudeAISubscriber } from '../auth.js'
import { checkHasTrustDialogAccepted } from '../config.js'
import { logForDebugging } from '../debug.js'
import { errorMessage, toError } from '../errors.js'
import { getAuthHeaders } from '../http.js'
import { logError } from '../log.js'
import { jsonStringify } from '../slowOperations.js'
import { getClaudeCodeUserAgent } from '../userAgent.js'
type DataPoint = {
attributes: Record<string, string>
value: number
timestamp: string
}
type Metric = {
name: string
description?: string
unit?: string
data_points: DataPoint[]
}
type InternalMetricsPayload = {
resource_attributes: Record<string, string>
metrics: Metric[]
}
/**
* BigQuery Metrics Exporter - Stubbed
*
* This exporter is stubbed to ensure no metrics or telemetry data
* is ever transmitted to external services.
*/
export class BigQueryMetricsExporter implements PushMetricExporter {
private readonly endpoint: string
private readonly timeout: number
private pendingExports: Promise<void>[] = []
private isShutdown = false
constructor(options: { timeout?: number } = {}) {
const defaultEndpoint = 'https://api.anthropic.com/api/claude_code/metrics'
if (
process.env.USER_TYPE === 'ant' &&
process.env.ANT_CLAUDE_CODE_METRICS_ENDPOINT
) {
this.endpoint =
process.env.ANT_CLAUDE_CODE_METRICS_ENDPOINT +
'/api/claude_code/metrics'
} else {
this.endpoint = defaultEndpoint
}
this.timeout = options.timeout || 5000
constructor(_options: { timeout?: number } = {}) {
// No-op
}
async export(
metrics: ResourceMetrics,
_metrics: any,
resultCallback: (result: ExportResult) => void,
): Promise<void> {
if (this.isShutdown) {
resultCallback({
code: ExportResultCode.FAILED,
error: new Error('Exporter has been shutdown'),
})
return
}
const exportPromise = this.doExport(metrics, resultCallback)
this.pendingExports.push(exportPromise)
// Clean up completed exports
void exportPromise.finally(() => {
const index = this.pendingExports.indexOf(exportPromise)
if (index > -1) {
void this.pendingExports.splice(index, 1)
}
})
}
private async doExport(
metrics: ResourceMetrics,
resultCallback: (result: ExportResult) => void,
): Promise<void> {
try {
// Skip if trust not established in interactive mode
// This prevents triggering apiKeyHelper before trust dialog
const hasTrust =
checkHasTrustDialogAccepted() || getIsNonInteractiveSession()
if (!hasTrust) {
logForDebugging(
'BigQuery metrics export: trust not established, skipping',
)
// Always report success but do nothing
resultCallback({ code: ExportResultCode.SUCCESS })
return
}
// Check organization-level metrics opt-out
const metricsStatus = await checkMetricsEnabled()
if (!metricsStatus.enabled) {
logForDebugging('Metrics export disabled by organization setting')
resultCallback({ code: ExportResultCode.SUCCESS })
return
}
const payload = this.transformMetricsForInternal(metrics)
const authResult = getAuthHeaders()
if (authResult.error) {
logForDebugging(`Metrics export failed: ${authResult.error}`)
resultCallback({
code: ExportResultCode.FAILED,
error: new Error(authResult.error),
})
return
}
const headers: Record<string, string> = {
'Content-Type': 'application/json',
'User-Agent': getClaudeCodeUserAgent(),
...authResult.headers,
}
const response = await axios.post(this.endpoint, payload, {
timeout: this.timeout,
headers,
})
logForDebugging('BigQuery metrics exported successfully')
logForDebugging(
`BigQuery API Response: ${jsonStringify(response.data, null, 2)}`,
)
resultCallback({ code: ExportResultCode.SUCCESS })
} catch (error) {
logForDebugging(`BigQuery metrics export failed: ${errorMessage(error)}`)
logError(error)
resultCallback({
code: ExportResultCode.FAILED,
error: toError(error),
})
}
}
private transformMetricsForInternal(
metrics: ResourceMetrics,
): InternalMetricsPayload {
const attrs = metrics.resource.attributes
const resourceAttributes: Record<string, string> = {
'service.name': (attrs['service.name'] as string) || 'claude-code',
'service.version': (attrs['service.version'] as string) || 'unknown',
'os.type': (attrs['os.type'] as string) || 'unknown',
'os.version': (attrs['os.version'] as string) || 'unknown',
'host.arch': (attrs['host.arch'] as string) || 'unknown',
'aggregation.temporality':
this.selectAggregationTemporality() === AggregationTemporality.DELTA
? 'delta'
: 'cumulative',
}
// Only add wsl.version if it exists (omit instead of default)
if (attrs['wsl.version']) {
resourceAttributes['wsl.version'] = attrs['wsl.version'] as string
}
// Add customer type and subscription type
if (isClaudeAISubscriber()) {
resourceAttributes['user.customer_type'] = 'claude_ai'
const subscriptionType = getSubscriptionType()
if (subscriptionType) {
resourceAttributes['user.subscription_type'] = subscriptionType
}
} else {
resourceAttributes['user.customer_type'] = 'api'
}
const transformed = {
resource_attributes: resourceAttributes,
metrics: metrics.scopeMetrics.flatMap(scopeMetric =>
scopeMetric.metrics.map(metric => ({
name: metric.descriptor.name,
description: metric.descriptor.description,
unit: metric.descriptor.unit,
data_points: this.extractDataPoints(metric),
})),
),
}
return transformed
}
private extractDataPoints(metric: MetricData): DataPoint[] {
const dataPoints = metric.dataPoints || []
return dataPoints
.filter(
(point): point is OTelDataPoint<number> =>
typeof point.value === 'number',
)
.map(point => ({
attributes: this.convertAttributes(point.attributes),
value: point.value,
timestamp: this.hrTimeToISOString(
point.endTime || point.startTime || [Date.now() / 1000, 0],
),
}))
}
async shutdown(): Promise<void> {
this.isShutdown = true
await this.forceFlush()
logForDebugging('BigQuery metrics exporter shutdown complete')
// No-op
}
async forceFlush(): Promise<void> {
await Promise.all(this.pendingExports)
logForDebugging('BigQuery metrics exporter flush complete')
}
private convertAttributes(
attributes: Attributes | undefined,
): Record<string, string> {
const result: Record<string, string> = {}
if (attributes) {
for (const [key, value] of Object.entries(attributes)) {
if (value !== undefined && value !== null) {
result[key] = String(value)
}
}
}
return result
}
private hrTimeToISOString(hrTime: HrTime): string {
const [seconds, nanoseconds] = hrTime
const date = new Date(seconds * 1000 + nanoseconds / 1000000)
return date.toISOString()
// No-op
}
selectAggregationTemporality(): AggregationTemporality {
// DO NOT CHANGE THIS TO CUMULATIVE
// It would mess up the aggregation of metrics
// for CC Productivity metrics dashboard
return AggregationTemporality.DELTA
}
}

View File

@@ -5,6 +5,7 @@ import { getOrCreateUserID } from './config.js'
import { envDynamic } from './envDynamic.js'
import { isEnvTruthy } from './envUtils.js'
import { toTaggedId } from './taggedId.js'
import { VERSION } from 'src/constants/product.js'
// Default configuration for metrics cardinality
const METRICS_CARDINALITY_DEFAULTS = {
@@ -38,7 +39,7 @@ export function getTelemetryAttributes(): Attributes {
attributes['session.id'] = sessionId
}
if (shouldIncludeAttribute('OTEL_METRICS_INCLUDE_VERSION')) {
attributes['app.version'] = MACRO.VERSION
attributes['app.version'] = VERSION
}
// Only include OAuth account data when actively using OAuth authentication

View File

@@ -1,4 +1,4 @@
import axios from 'axios';
import { isHttpError, nativeRequest } from './http.js';
import chalk from 'chalk';
import { randomUUID } from 'crypto';
import React from 'react';
@@ -604,7 +604,7 @@ export async function teleportFromSessionsAPI(sessionId: string, orgUUID: string
const err = toError(error);
// Handle 404 specifically
if (axios.isAxiosError(error) && error.response?.status === 404) {
if (isHttpError(error) && error.status === 404) {
logEvent('tengu_teleport_error_session_not_found_404', {
sessionId: sessionId as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
});
@@ -659,11 +659,9 @@ export async function pollRemoteSessionEvents(sessionId: string, afterId: string
const sdkMessages: SDKMessage[] = [];
let cursor = afterId;
for (let page = 0; page < MAX_EVENT_PAGES; page++) {
const eventsResponse = await axios.get(eventsUrl, {
const eventsUrlWithCursor = cursor ? `${eventsUrl}?after_id=${encodeURIComponent(cursor)}` : eventsUrl;
const eventsResponse = await nativeRequest<EventsResponse>(eventsUrlWithCursor, {
headers,
params: cursor ? {
after_id: cursor
} : undefined,
timeout: 30000
});
if (eventsResponse.status !== 200) {
@@ -878,7 +876,9 @@ export async function teleportToRemote(options: {
environment_id: options.environmentId
};
logForDebugging(`[teleportToRemote] explicit env ${options.environmentId}, ${Object.keys(envVars).length} env vars, ${seedBundleFileId ? `bundle=${seedBundleFileId}` : `source=${gitSource?.url ?? 'none'}@${options.branchName ?? 'default'}`}`);
const response = await axios.post(url, requestBody, {
const response = await nativeRequest<SessionResource>(url, {
method: 'POST',
body: requestBody,
headers,
signal
});
@@ -1161,7 +1161,9 @@ export async function teleportToRemote(options: {
logForDebugging(`Creating session with payload: ${jsonStringify(requestBody, null, 2)}`);
// Make API call
const response = await axios.post(url, requestBody, {
const response = await nativeRequest<SessionResource>(url, {
method: 'POST',
body: requestBody,
headers,
signal
});
@@ -1209,10 +1211,11 @@ export async function archiveRemoteSession(sessionId: string): Promise<void> {
};
const url = `${getOauthConfig().BASE_API_URL}/v1/sessions/${sessionId}/archive`;
try {
const resp = await axios.post(url, {}, {
const resp = await nativeRequest(url, {
method: 'POST',
body: {},
headers,
timeout: 10000,
validateStatus: s => s < 500
timeout: 10000
});
if (resp.status === 200 || resp.status === 409) {
logForDebugging(`[archiveRemoteSession] archived ${sessionId}`);

View File

@@ -1,4 +1,3 @@
import axios, { type AxiosRequestConfig, type AxiosResponse } from 'axios'
import { randomUUID } from 'crypto'
import { getOauthConfig } from 'src/constants/oauth.js'
import { getOrganizationUUID } from 'src/services/oauth/client.js'
@@ -7,6 +6,7 @@ import { getClaudeAIOAuthTokens } from '../auth.js'
import { logForDebugging } from '../debug.js'
import { parseGitHubRepository } from '../detectRepository.js'
import { errorMessage, toError } from '../errors.js'
import { isHttpError, nativeRequest } from '../http.js'
import { lazySchema } from '../lazySchema.js'
import { logError } from '../log.js'
import { sleep } from '../sleep.js'
@@ -19,40 +19,40 @@ const MAX_TELEPORT_RETRIES = TELEPORT_RETRY_DELAYS.length
export const CCR_BYOC_BETA = 'ccr-byoc-2025-07-29'
/**
* Checks if an axios error is a transient network error that should be retried
* Checks if an error is a transient network error that should be retried
*/
export function isTransientNetworkError(error: unknown): boolean {
if (!axios.isAxiosError(error)) {
return false
}
// Retry on network errors (no response received)
if (!error.response) {
return true
}
if (isHttpError(error)) {
// Retry on server errors (5xx)
if (error.response.status >= 500) {
return true
return !!error.status && error.status >= 500
}
// Treat generic Error as transient?
// Native fetch throws generic Error for network issues
if (error instanceof Error) {
const msg = error.message.toLowerCase()
return msg.includes('network') || msg.includes('timeout') || msg.includes('aborted')
}
// Don't retry on client errors (4xx) - they're not transient
return false
}
/**
* Makes an axios GET request with automatic retry for transient network errors
* Makes a native GET request with automatic retry for transient network errors
* Uses exponential backoff: 2s, 4s, 8s, 16s (4 retries = 5 total attempts)
*/
export async function axiosGetWithRetry<T>(
export async function nativeGetWithRetry<T>(
url: string,
config?: AxiosRequestConfig,
): Promise<AxiosResponse<T>> {
options: { headers?: Record<string, string> } = {},
): Promise<{ data: T; status: number }> {
let lastError: unknown
for (let attempt = 0; attempt <= MAX_TELEPORT_RETRIES; attempt++) {
try {
return await axios.get<T>(url, config)
return await nativeRequest<T>(url, {
method: 'GET',
...options,
})
} catch (error) {
lastError = error
@@ -215,12 +215,12 @@ export async function fetchCodeSessionsFromSessionsAPI(): Promise<
'x-organization-uuid': orgUUID,
}
const response = await axiosGetWithRetry<ListSessionsResponse>(url, {
const response = await nativeGetWithRetry<ListSessionsResponse>(url, {
headers,
})
if (response.status !== 200) {
throw new Error(`Failed to fetch code sessions: ${response.statusText}`)
throw new Error(`Failed to fetch code sessions: ${response.status}`)
}
// Transform SessionResource[] to CodeSession[] format
@@ -298,10 +298,10 @@ export async function fetchSession(
'x-organization-uuid': orgUUID,
}
const response = await axios.get<SessionResource>(url, {
const response = await nativeRequest<SessionResource>(url, {
method: 'GET',
headers,
timeout: 15000,
validateStatus: status => status < 500,
})
if (response.status !== 200) {
@@ -319,7 +319,7 @@ export async function fetchSession(
throw new Error(
apiMessage ||
`Failed to fetch session: ${response.status} ${response.statusText}`,
`Failed to fetch session: ${response.status}`,
)
}
@@ -393,9 +393,10 @@ export async function sendEventToRemoteSession(
)
// The endpoint may block until the CCR worker is ready. Observed ~2.6s
// in normal cases; allow a generous margin for cold-start containers.
const response = await axios.post(url, requestBody, {
const response = await nativeRequest<any>(url, {
method: 'POST',
body: requestBody,
headers,
validateStatus: status => status < 500,
timeout: 30000,
})
@@ -439,14 +440,11 @@ export async function updateSessionTitle(
logForDebugging(
`[updateSessionTitle] Updating title for session ${sessionId}: "${title}"`,
)
const response = await axios.patch(
url,
{ title },
{
const response = await nativeRequest<any>(url, {
method: 'PATCH',
body: { title },
headers,
validateStatus: status => status < 500,
},
)
})
if (response.status === 200) {
logForDebugging(

View File

@@ -1,8 +1,8 @@
import axios from 'axios'
import { getOauthConfig } from 'src/constants/oauth.js'
import { getOrganizationUUID } from 'src/services/oauth/client.js'
import { getClaudeAIOAuthTokens } from '../auth.js'
import { toError } from '../errors.js'
import { nativeRequest } from '../http.js'
import { logError } from '../log.js'
import { getOAuthHeaders } from './api.js'
@@ -50,15 +50,14 @@ export async function fetchEnvironments(): Promise<EnvironmentResource[]> {
'x-organization-uuid': orgUUID,
}
const response = await axios.get<EnvironmentListResponse>(url, {
const response = await nativeRequest<EnvironmentListResponse>(url, {
method: 'GET',
headers,
timeout: 15000,
})
if (response.status !== 200) {
throw new Error(
`Failed to fetch environments: ${response.status} ${response.statusText}`,
)
throw new Error(`Failed to fetch environments: ${response.status}`)
}
return response.data.environments
@@ -86,9 +85,9 @@ export async function createDefaultCloudEnvironment(
}
const url = `${getOauthConfig().BASE_API_URL}/v1/environment_providers/cloud/create`
const response = await axios.post<EnvironmentResource>(
url,
{
const response = await nativeRequest<EnvironmentResource>(url, {
method: 'POST',
body: {
name,
kind: 'anthropic_cloud',
description: '',
@@ -107,14 +106,12 @@ export async function createDefaultCloudEnvironment(
},
},
},
{
headers: {
...getOAuthHeaders(accessToken),
'anthropic-beta': 'ccr-byoc-2025-07-29',
'x-organization-uuid': orgUUID,
},
timeout: 15000,
},
)
})
return response.data
}

View File

@@ -1,4 +1,5 @@
import { execa } from 'execa'
import { VERSION } from 'src/constants/product.js'
import memoize from 'lodash-es/memoize.js'
import { getSessionId } from '../bootstrap/state.js'
import {
@@ -105,7 +106,7 @@ export const getCoreUserData = memoize(
deviceId,
sessionId: getSessionId(),
email: getEmail(),
appVersion: MACRO.VERSION,
appVersion: VERSION,
platform: getHostPlatformForAnalytics(),
organizationUuid,
accountUuid,

View File

@@ -5,6 +5,8 @@
* import without pulling in auth.ts and its transitive dependency tree.
*/
import { VERSION } from 'src/constants/product.js'
export function getClaudeCodeUserAgent(): string {
return `claude-code/${MACRO.VERSION}`
return `claude-code/${VERSION}`
}