#18311 · @jorgitin02 · opened Mar 20, 2026 at 12:04 AM UTC · last updated Mar 21, 2026 at 1:00 AM UTC

fix: skip Anthropic cache control for OAuth

appfix
74
+22634 files

Score breakdown

Impact

9.0

Clarity

9.0

Urgency

9.0

Ease Of Review

9.0

Guidelines

9.0

Readiness

8.0

Size

2.0

Trust

6.0

Traction

7.0

Summary

This PR fixes a critical bug where Anthropic OAuth requests fail due to unaccepted cache control metadata, rendering Claude models unusable for these users. The fix is narrowly scoped to skip the problematic metadata for OAuth paths and includes dedicated regression tests. This is an urgent fix for a significant user-facing breakage.

Open in GitHub

Description

Issue for this PR

Closes #17910

Type of change

  • [x] Bug fix
  • [ ] New feature
  • [ ] Refactor / code improvement
  • [ ] Documentation

What does this PR do?

Anthropic OAuth requests were still going through the normal Anthropic cache-control path, which adds cache_control: { type: "ephemeral" } to recent prompt messages. That metadata is rejected on the OAuth path and causes the request to fail.

This changes ProviderTransform.message() to skip Anthropic cache-control injection when the resolved auth type is oauth, and updates LLM.stream() to pass the resolved auth type into that transform. I also added regression coverage at two levels: one focused transform test for the cache-control behavior, and one LLM.stream() test that exercises the Anthropic messages request path with OAuth auth present.

The fix is narrow on purpose: it only changes Anthropic when auth is OAuth, and leaves the existing non-OAuth Anthropic caching behavior in place.

How did you verify your code works?

  • bun test test/session/llm.test.ts
  • bun test test/provider/transform.test.ts
  • bun typecheck
  • bun test

Screenshots / recordings

N/A

Checklist

  • [x] I have tested my changes locally
  • [x] I have not included unrelated changes in this PR

Linked Issues

#17910 bug: OAuth auth + cache_control ephemeral causes HTTP 400 on all Claude models since 2026-03-17

View issue

Comments

PR comments

jwcrystal

⚠️ Update: The cache_control hypothesis has been disproven. See issue comment for details.

The actual root cause is missing x-anthropic-billing-header that Anthropic now requires for OAuth requests. This PR's approach may not fully resolve the issue.

A reference implementation is available at clewdr@a4e5df3.

jorgitin02

Updated this PR.

It now does three things for Anthropic OAuth:

  • skips injected cache_control
  • prepends the Claude Code-style billing header system text
  • sends the Claude Code user-agent on OAuth requests

Local verification:

  • bun test ./test/provider/transform.test.ts
  • bun test ./test/session/llm.test.ts
  • bun typecheck

The earlier failing e2e checks were in the app workspace-routing suite, not this provider/session codepath, so those need to be re-evaluated on the refreshed branch checks.

Changed Files

packages/opencode/src/provider/transform.ts

+352
@@ -1,4 +1,5 @@
import type { ModelMessage } from "ai"
import { createHash } from "crypto"
import { mergeDeep, unique } from "remeda"
import type { JSONSchema7 } from "@ai-sdk/provider"
import type { JSONSchema } from "zod/v4/core"
@@ -19,6 +20,9 @@ function mimeToModality(mime: string): Modality | undefined {
export namespace ProviderTransform {
export const OUTPUT_TOKEN_MAX = Flag.OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX || 32_000
export const CLAUDE_CODE_VERSION = "2.1.76"
export const CLAUDE_CODE_USER_AGENT = `claude-code/${CLAUDE_CODE_VERSION}`
const CLAUDE_CODE_SALT = "59cf53e54c78"
// Maps npm package to the key the AI SDK expects for providerOptions
function sdkKey(npm: string): string | undefined {
@@ -171,7 +175,9 @@ export namespace ProviderTransform {
return msgs
}
function applyCaching(msgs: ModelMessage[], model: Provider.Model): ModelMessage[] {
function applyCaching(msgs: ModelMessage[], model: Provider.Model, options: Record<string, unknown>): ModelMessage[] {
if (model.providerID === "anthropic" && options.authType === "oauth") return msgs
const system = msgs.filter((msg) => msg.role === "system").slice(0, 2)

packages/opencode/src/session/llm.ts

+81
@@ -213,6 +213,10 @@ export namespace LLM {
"x-opencode-request": input.user.id,
"x-opencode-client": Flag.OPENCODE_CLIENT,
}),
...(input.model.providerID === "anthropic" &&
auth?.type === "oauth" && {
"User-Agent": ProviderTransform.CLAUDE_CODE_USER_AGENT,
}),
...input.model.headers,
...headers,
},
@@ -233,7 +237,10 @@ export namespace LLM {
async transformParams(args) {
if (args.type === "stream") {
// @ts-expect-error
args.params.prompt = ProviderTransform.message(args.params.prompt, input.model, options)
args.params.prompt = ProviderTransform.message(args.params.prompt, input.model, {
...options,
authType: auth?.type,
})
}
return args.params
},

packages/opencode/test/provider/transform.test.ts

+580
@@ -1660,6 +1660,64 @@ describe("ProviderTransform.message - cache control on gateway", () => {
},
})
})
test("anthropic oauth skips ephemeral cache control", () => {
const model = createModel({
providerID: "anthropic",
api: {
id: "claude-sonnet-4",
url: "https://api.anthropic.com",
npm: "@ai-sdk/anthropic",
},
})
const msgs = [
{
role: "system",
content: "You are a helpful assistant",
},
{
role: "user",
content: "Hello",
},
] as any[]
const result = ProviderTransform.message(msgs, model, { authType: "oauth" }) as any[]
expect(result[0].content[0]?.providerOptions).toBeUndefined()
expect(result[0].providerOptions).toBeUndefined()
})
test("anthropic oauth prepends Claude Code billing system text", () => {
const model = createModel({
providerID: "anthropic",
api: {
id: "claude-sonnet-4",
url: "https://api.anthropic.com",
npm: "@ai-sdk/anthropic",
},
})
const msgs = [
{
role: "system",
content: "You are a helpful assistant

packages/opencode/test/session/llm.test.ts

+1250
@@ -654,6 +654,131 @@ describe("session.llm.stream", () => {
})
})
test("skips cache control in Anthropic messages when auth is oauth", async () => {
const server = state.server
if (!server) {
throw new Error("Server not initialized")
}
const providerID = "anthropic"
const modelID = "claude-3-5-sonnet-20241022"
const fixture = await loadFixture(providerID, modelID)
const model = fixture.model
const chunks = [
{
type: "message_start",
message: {
id: "msg-oauth-1",
model: model.id,
usage: {
input_tokens: 3,
cache_creation_input_tokens: null,
cache_read_input_tokens: null,
},
},
},
{
type: "content_block_start",
index: 0,
content_block: { type: "text", text: "" },
},
{
type: "content_block_delta",
index: 0,
delta: { type: "text_delta", text: "Hello" },
},
{ type: "content_block_stop", index: 0 },
{
type: "message_delta",
delta: { stop_reason: "end_turn", stop_sequence: null, container: nu