Skip to content

Commit 40e4cd2

Browse files
authored
tweak: adjust chat.params hook to allow altering of the maxOutputTokens (anomalyco#21220)
1 parent 5a6d10c commit 40e4cd2

2 files changed

Lines changed: 14 additions & 7 deletions

File tree

packages/opencode/src/session/llm.ts

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -160,6 +160,11 @@ export namespace LLM {
160160
...input.messages,
161161
]
162162

163+
const maxOutputTokens =
164+
isOpenaiOauth || provider.id.includes("github-copilot")
165+
? undefined
166+
: ProviderTransform.maxOutputTokens(input.model)
167+
163168
const params = await Plugin.trigger(
164169
"chat.params",
165170
{
@@ -175,6 +180,7 @@ export namespace LLM {
175180
: undefined,
176181
topP: input.agent.topP ?? ProviderTransform.topP(input.model),
177182
topK: ProviderTransform.topK(input.model),
183+
maxOutputTokens,
178184
options,
179185
},
180186
)
@@ -193,11 +199,6 @@ export namespace LLM {
193199
},
194200
)
195201

196-
const maxOutputTokens =
197-
isOpenaiOauth || provider.id.includes("github-copilot")
198-
? undefined
199-
: ProviderTransform.maxOutputTokens(input.model)
200-
201202
const tools = await resolveTools(input)
202203

203204
// LiteLLM and some Anthropic proxies require the tools parameter to be present
@@ -291,7 +292,7 @@ export namespace LLM {
291292
activeTools: Object.keys(tools).filter((x) => x !== "invalid"),
292293
tools,
293294
toolChoice: input.toolChoice,
294-
maxOutputTokens,
295+
maxOutputTokens: params.maxOutputTokens,
295296
abortSignal: input.abort,
296297
headers: {
297298
...(input.model.providerID.startsWith("opencode")

packages/plugin/src/index.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -212,7 +212,13 @@ export interface Hooks {
212212
*/
213213
"chat.params"?: (
214214
input: { sessionID: string; agent: string; model: Model; provider: ProviderContext; message: UserMessage },
215-
output: { temperature: number; topP: number; topK: number; options: Record<string, any> },
215+
output: {
216+
temperature: number
217+
topP: number
218+
topK: number
219+
maxOutputTokens: number | undefined
220+
options: Record<string, any>
221+
},
216222
) => Promise<void>
217223
"chat.headers"?: (
218224
input: { sessionID: string; agent: string; model: Model; provider: ProviderContext; message: UserMessage },

0 commit comments

Comments
 (0)