Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 3 additions & 24 deletions index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import type { Plugin } from "@opencode-ai/plugin"
import type { Model } from "@opencode-ai/sdk"
import { getConfig } from "./lib/config"
import { Logger } from "./lib/logger"
import { loadPrompt } from "./lib/prompt"
Expand Down Expand Up @@ -27,40 +26,20 @@ const plugin: Plugin = (async (ctx) => {
})

return {
"chat.params": async (
input: { sessionID: string; agent: string; model: Model; provider: any; message: any },
_output: { temperature: number; topP: number; options: Record<string, any> },
) => {
const isReasoning = input.model.capabilities?.reasoning ?? false
if (state.isReasoningModel !== isReasoning) {
logger.info(
`Reasoning model status changed: ${state.isReasoningModel} -> ${isReasoning}`,
{
modelId: input.model.id,
providerId: input.model.providerID,
},
)
}
state.isReasoningModel = isReasoning
},
"experimental.chat.system.transform": async (
_input: unknown,
output: { system: string[] },
) => {
const discardEnabled = config.tools.discard.enabled
const extractEnabled = config.tools.extract.enabled

// Use user-role prompts for reasoning models (second person),
// assistant-role prompts for non-reasoning models (first person)
const roleDir = state.isReasoningModel ? "user" : "assistant"

let promptName: string
if (discardEnabled && extractEnabled) {
promptName = `${roleDir}/system/system-prompt-both`
promptName = "user/system/system-prompt-both"
} else if (discardEnabled) {
promptName = `${roleDir}/system/system-prompt-discard`
promptName = "user/system/system-prompt-discard"
} else if (extractEnabled) {
promptName = `${roleDir}/system/system-prompt-extract`
promptName = "user/system/system-prompt-extract"
} else {
return
}
Expand Down
63 changes: 20 additions & 43 deletions lib/messages/prune.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,46 +2,35 @@ import type { SessionState, WithParts } from "../state"
import type { Logger } from "../logger"
import type { PluginConfig } from "../config"
import { loadPrompt } from "../prompt"
import {
extractParameterKey,
buildToolIdList,
createSyntheticUserMessage,
createSyntheticAssistantMessage,
} from "./utils"
import { getLastAssistantMessage, getLastUserMessage, isMessageCompacted } from "../shared-utils"
import { extractParameterKey, buildToolIdList, createSyntheticUserMessage } from "./utils"
import { getLastUserMessage, isMessageCompacted } from "../shared-utils"

const PRUNED_TOOL_INPUT_REPLACEMENT =
"[content removed to save context, this is not what was written to the file, but a placeholder]"
const PRUNED_TOOL_OUTPUT_REPLACEMENT =
"[Output removed to save context - information superseded or no longer needed]"
const PRUNED_TOOL_ERROR_INPUT_REPLACEMENT = "[input removed due to failed tool call]"

const getNudgeString = (config: PluginConfig, isReasoningModel: boolean): string => {
const getNudgeString = (config: PluginConfig): string => {
const discardEnabled = config.tools.discard.enabled
const extractEnabled = config.tools.extract.enabled
const roleDir = isReasoningModel ? "user" : "assistant"

if (discardEnabled && extractEnabled) {
return loadPrompt(`${roleDir}/nudge/nudge-both`)
return loadPrompt(`user/nudge/nudge-both`)
} else if (discardEnabled) {
return loadPrompt(`${roleDir}/nudge/nudge-discard`)
return loadPrompt(`user/nudge/nudge-discard`)
} else if (extractEnabled) {
return loadPrompt(`${roleDir}/nudge/nudge-extract`)
return loadPrompt(`user/nudge/nudge-extract`)
}
return ""
}

const wrapPrunableToolsUser = (content: string): string => `<prunable-tools>
const wrapPrunableTools = (content: string): string => `<prunable-tools>
The following tools have been invoked and are available for pruning. This list does not mandate immediate action. Consider your current goals and the resources you need before discarding valuable tool inputs or outputs. Consolidate your prunes for efficiency; it is rarely worth pruning a single tiny tool output. Keep the context free of noise.
${content}
</prunable-tools>`

const wrapPrunableToolsAssistant = (content: string): string => `<prunable-tools>
I have the following tool outputs available for pruning. I should consider my current goals and the resources I need before discarding valuable inputs or outputs. I should consolidate prunes for efficiency; it is rarely worth pruning a single tiny tool output.
${content}
</prunable-tools>`

const getCooldownMessage = (config: PluginConfig, isReasoningModel: boolean): string => {
const getCooldownMessage = (config: PluginConfig): string => {
const discardEnabled = config.tools.discard.enabled
const extractEnabled = config.tools.extract.enabled

Expand All @@ -54,11 +43,9 @@ const getCooldownMessage = (config: PluginConfig, isReasoningModel: boolean): st
toolName = "extract tool"
}

const message = isReasoningModel
? `Context management was just performed. Do not use the ${toolName} again. A fresh list will be available after your next tool use.`
: `I just performed context management. I will not use the ${toolName} again until after my next tool use, when a fresh list will be available.`

return `<prunable-tools>\n${message}\n</prunable-tools>`
return `<prunable-tools>
Context management was just performed. Do not use the ${toolName} again. A fresh list will be available after your next tool use.
</prunable-tools>`
}

const buildPrunableToolsList = (
Expand All @@ -74,10 +61,12 @@ const buildPrunableToolsList = (
if (state.prune.toolIds.includes(toolCallId)) {
return
}

const allProtectedTools = config.tools.settings.protectedTools
if (allProtectedTools.includes(toolParameterEntry.tool)) {
return
}

const numericId = toolIdList.indexOf(toolCallId)
if (numericId === -1) {
logger.warn(`Tool in cache but not in toolIdList - possible stale entry`, {
Expand All @@ -100,8 +89,7 @@ const buildPrunableToolsList = (
return ""
}

const wrapFn = state.isReasoningModel ? wrapPrunableToolsUser : wrapPrunableToolsAssistant
return wrapFn(lines.join("\n"))
return wrapPrunableTools(lines.join("\n"))
}

export const insertPruneToolContext = (
Expand All @@ -114,14 +102,11 @@ export const insertPruneToolContext = (
return
}

// For reasoning models, inject into user role; for non-reasoning, inject into assistant role
const isReasoningModel = state.isReasoningModel

let prunableToolsContent: string

if (state.lastToolPrune) {
logger.debug("Last tool was prune - injecting cooldown message")
prunableToolsContent = getCooldownMessage(config, isReasoningModel)
prunableToolsContent = getCooldownMessage(config)
} else {
const prunableToolsList = buildPrunableToolsList(state, config, logger, messages)
if (!prunableToolsList) {
Expand All @@ -136,25 +121,17 @@ export const insertPruneToolContext = (
state.nudgeCounter >= config.tools.settings.nudgeFrequency
) {
logger.info("Inserting prune nudge message")
nudgeString = "\n" + getNudgeString(config, isReasoningModel)
nudgeString = "\n" + getNudgeString(config)
}

prunableToolsContent = prunableToolsList + nudgeString
}

if (isReasoningModel) {
const lastUserMessage = getLastUserMessage(messages)
if (!lastUserMessage) {
return
}
messages.push(createSyntheticUserMessage(lastUserMessage, prunableToolsContent))
} else {
const lastAssistantMessage = getLastAssistantMessage(messages)
if (!lastAssistantMessage) {
return
}
messages.push(createSyntheticAssistantMessage(lastAssistantMessage, prunableToolsContent))
const lastUserMessage = getLastUserMessage(messages)
if (!lastUserMessage) {
return
}
messages.push(createSyntheticUserMessage(lastUserMessage, prunableToolsContent))
}

export const prune = (
Expand Down
33 changes: 1 addition & 32 deletions lib/messages/utils.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { Logger } from "../logger"
import { isMessageCompacted } from "../shared-utils"
import type { SessionState, WithParts } from "../state"
import type { AssistantMessage, UserMessage } from "@opencode-ai/sdk"
import type { UserMessage } from "@opencode-ai/sdk"

const SYNTHETIC_MESSAGE_ID = "msg_01234567890123456789012345"
const SYNTHETIC_PART_ID = "prt_01234567890123456789012345"
Expand Down Expand Up @@ -32,37 +32,6 @@ export const createSyntheticUserMessage = (baseMessage: WithParts, content: stri
}
}

export const createSyntheticAssistantMessage = (
baseMessage: WithParts,
content: string,
): WithParts => {
const assistantInfo = baseMessage.info as AssistantMessage
return {
info: {
id: SYNTHETIC_MESSAGE_ID,
sessionID: assistantInfo.sessionID,
role: "assistant",
parentID: assistantInfo.parentID,
modelID: assistantInfo.modelID,
providerID: assistantInfo.providerID,
time: { created: Date.now() },
tokens: { input: 0, output: 0, reasoning: 0, cache: { read: 0, write: 0 } },
cost: 0,
path: assistantInfo.path,
mode: assistantInfo.mode,
},
parts: [
{
id: SYNTHETIC_PART_ID,
sessionID: assistantInfo.sessionID,
messageID: SYNTHETIC_MESSAGE_ID,
type: "text",
text: content,
},
],
}
}

/**
* Extracts a human-readable key from tool metadata for display purposes.
*/
Expand Down
10 changes: 0 additions & 10 deletions lib/prompts/assistant/nudge/nudge-both.txt

This file was deleted.

9 changes: 0 additions & 9 deletions lib/prompts/assistant/nudge/nudge-discard.txt

This file was deleted.

9 changes: 0 additions & 9 deletions lib/prompts/assistant/nudge/nudge-extract.txt

This file was deleted.

44 changes: 0 additions & 44 deletions lib/prompts/assistant/system/system-prompt-both.txt

This file was deleted.

36 changes: 0 additions & 36 deletions lib/prompts/assistant/system/system-prompt-discard.txt

This file was deleted.

Loading