Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion internal/runtime/executor/iflow_executor.go
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ func (e *IFlowExecutor) ExecuteStream(ctx context.Context, auth *cliproxyauth.Au
}()

scanner := bufio.NewScanner(httpResp.Body)
scanner.Buffer(nil, 52_428_800) // 50MB
scanner.Buffer(nil, 52_428_800) // 50MB
var param any
for scanner.Scan() {
line := scanner.Bytes()
Expand Down
2 changes: 1 addition & 1 deletion internal/translator/codex/claude/codex_claude_request.go
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ func ConvertClaudeRequestToCodex(modelName string, inputRawJSON []byte, _ bool)

// Add additional configuration parameters for the Codex API.
template, _ = sjson.Set(template, "parallel_tool_calls", true)
template, _ = sjson.Set(template, "reasoning.effort", "low")
template, _ = sjson.Set(template, "reasoning.effort", "medium")
template, _ = sjson.Set(template, "reasoning.summary", "auto")
template, _ = sjson.Set(template, "stream", true)
template, _ = sjson.Set(template, "store", false)
Expand Down
2 changes: 1 addition & 1 deletion internal/translator/codex/gemini/codex_gemini_request.go
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ func ConvertGeminiRequestToCodex(modelName string, inputRawJSON []byte, _ bool)

// Fixed flags aligning with Codex expectations
out, _ = sjson.Set(out, "parallel_tool_calls", true)
out, _ = sjson.Set(out, "reasoning.effort", "low")
out, _ = sjson.Set(out, "reasoning.effort", "medium")
out, _ = sjson.Set(out, "reasoning.summary", "auto")
out, _ = sjson.Set(out, "stream", true)
out, _ = sjson.Set(out, "store", false)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ func ConvertOpenAIRequestToCodex(modelName string, inputRawJSON []byte, stream b
if v := gjson.GetBytes(rawJSON, "reasoning_effort"); v.Exists() {
out, _ = sjson.Set(out, "reasoning.effort", v.Value())
} else {
out, _ = sjson.Set(out, "reasoning.effort", "low")
out, _ = sjson.Set(out, "reasoning.effort", "medium")
}
out, _ = sjson.Set(out, "parallel_tool_calls", true)
out, _ = sjson.Set(out, "reasoning.summary", "auto")
Expand Down
75 changes: 65 additions & 10 deletions internal/watcher/watcher.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import (
"os"
"path/filepath"
"reflect"
"runtime"
"sort"
"strings"
"sync"
Expand Down Expand Up @@ -62,6 +63,7 @@ type Watcher struct {
reloadCallback func(*config.Config)
watcher *fsnotify.Watcher
lastAuthHashes map[string]string
lastRemoveTimes map[string]time.Time
lastConfigHash string
authQueue chan<- AuthUpdate
currentAuths map[string]*coreauth.Auth
Expand Down Expand Up @@ -128,8 +130,9 @@ type AuthUpdate struct {
const (
// replaceCheckDelay is a short delay to allow atomic replace (rename) to settle
// before deciding whether a Remove event indicates a real deletion.
replaceCheckDelay = 50 * time.Millisecond
configReloadDebounce = 150 * time.Millisecond
replaceCheckDelay = 50 * time.Millisecond
configReloadDebounce = 150 * time.Millisecond
authRemoveDebounceWindow = 1 * time.Second
)

// NewWatcher creates a new file watcher instance
Expand Down Expand Up @@ -750,8 +753,9 @@ func (w *Watcher) authFileUnchanged(path string) (bool, error) {
sum := sha256.Sum256(data)
curHash := hex.EncodeToString(sum[:])

normalized := w.normalizeAuthPath(path)
w.clientsMutex.RLock()
prevHash, ok := w.lastAuthHashes[path]
prevHash, ok := w.lastAuthHashes[normalized]
w.clientsMutex.RUnlock()
if ok && prevHash == curHash {
return true, nil
Expand All @@ -760,19 +764,63 @@ func (w *Watcher) authFileUnchanged(path string) (bool, error) {
}

func (w *Watcher) isKnownAuthFile(path string) bool {
normalized := w.normalizeAuthPath(path)
w.clientsMutex.RLock()
defer w.clientsMutex.RUnlock()
_, ok := w.lastAuthHashes[path]
_, ok := w.lastAuthHashes[normalized]
return ok
}

func (w *Watcher) normalizeAuthPath(path string) string {
trimmed := strings.TrimSpace(path)
if trimmed == "" {
return ""
}
cleaned := filepath.Clean(trimmed)
if runtime.GOOS == "windows" {
cleaned = strings.TrimPrefix(cleaned, `\\?\`)
cleaned = strings.ToLower(cleaned)
}
return cleaned
}

func (w *Watcher) shouldDebounceRemove(normalizedPath string, now time.Time) bool {
if normalizedPath == "" {
return false
}
w.clientsMutex.Lock()
if w.lastRemoveTimes == nil {
w.lastRemoveTimes = make(map[string]time.Time)
}
if last, ok := w.lastRemoveTimes[normalizedPath]; ok {
if now.Sub(last) < authRemoveDebounceWindow {
w.clientsMutex.Unlock()
return true
}
}
w.lastRemoveTimes[normalizedPath] = now
if len(w.lastRemoveTimes) > 128 {

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The value 128 is a magic number that determines the maximum size of the lastRemoveTimes cache. It's better to define this as a constant to improve readability and maintainability. I suggest defining a constant like authRemoveDebounceCacheSize = 128 in the const block around line 134 and using it here.

cutoff := now.Add(-2 * authRemoveDebounceWindow)
for p, t := range w.lastRemoveTimes {
if t.Before(cutoff) {
delete(w.lastRemoveTimes, p)
}
}
}
w.clientsMutex.Unlock()
return false
}

// handleEvent processes individual file system events
func (w *Watcher) handleEvent(event fsnotify.Event) {
// Filter only relevant events: config file or auth-dir JSON files.
configOps := fsnotify.Write | fsnotify.Create | fsnotify.Rename
isConfigEvent := event.Name == w.configPath && event.Op&configOps != 0
normalizedName := w.normalizeAuthPath(event.Name)
normalizedConfigPath := w.normalizeAuthPath(w.configPath)
normalizedAuthDir := w.normalizeAuthPath(w.authDir)
isConfigEvent := normalizedName == normalizedConfigPath && event.Op&configOps != 0
authOps := fsnotify.Create | fsnotify.Write | fsnotify.Remove | fsnotify.Rename
isAuthJSON := strings.HasPrefix(event.Name, w.authDir) && strings.HasSuffix(event.Name, ".json") && event.Op&authOps != 0
isAuthJSON := strings.HasPrefix(normalizedName, normalizedAuthDir) && strings.HasSuffix(normalizedName, ".json") && event.Op&authOps != 0

// Check for Kiro IDE token file changes
isKiroIDEToken := w.isKiroIDETokenFile(event.Name) && event.Op&authOps != 0
Expand Down Expand Up @@ -800,6 +848,10 @@ func (w *Watcher) handleEvent(event fsnotify.Event) {

// Handle auth directory changes incrementally (.json only)
if event.Op&(fsnotify.Remove|fsnotify.Rename) != 0 {
if w.shouldDebounceRemove(normalizedName, now) {
log.Debugf("debouncing remove event for %s", filepath.Base(event.Name))
return
}
// Atomic replace on some platforms may surface as Rename (or Remove) before the new file is ready.
// Wait briefly; if the path exists again, treat as an update instead of removal.
time.Sleep(replaceCheckDelay)
Expand Down Expand Up @@ -1062,7 +1114,8 @@ func (w *Watcher) reloadClients(rescanAuth bool, affectedOAuthProviders []string
if !info.IsDir() && strings.HasSuffix(strings.ToLower(info.Name()), ".json") {
if data, errReadFile := os.ReadFile(path); errReadFile == nil && len(data) > 0 {
sum := sha256.Sum256(data)
w.lastAuthHashes[path] = hex.EncodeToString(sum[:])
normalizedPath := w.normalizeAuthPath(path)
w.lastAuthHashes[normalizedPath] = hex.EncodeToString(sum[:])
}
}
return nil
Expand Down Expand Up @@ -1109,6 +1162,7 @@ func (w *Watcher) addOrUpdateClient(path string) {

sum := sha256.Sum256(data)
curHash := hex.EncodeToString(sum[:])
normalized := w.normalizeAuthPath(path)

w.clientsMutex.Lock()

Expand All @@ -1118,14 +1172,14 @@ func (w *Watcher) addOrUpdateClient(path string) {
w.clientsMutex.Unlock()
return
}
if prev, ok := w.lastAuthHashes[path]; ok && prev == curHash {
if prev, ok := w.lastAuthHashes[normalized]; ok && prev == curHash {
log.Debugf("auth file unchanged (hash match), skipping reload: %s", filepath.Base(path))
w.clientsMutex.Unlock()
return
}

// Update hash cache
w.lastAuthHashes[path] = curHash
w.lastAuthHashes[normalized] = curHash

w.clientsMutex.Unlock() // Unlock before the callback

Expand All @@ -1140,10 +1194,11 @@ func (w *Watcher) addOrUpdateClient(path string) {

// removeClient handles the removal of a single client.
func (w *Watcher) removeClient(path string) {
normalized := w.normalizeAuthPath(path)
w.clientsMutex.Lock()

cfg := w.config
delete(w.lastAuthHashes, path)
delete(w.lastAuthHashes, normalized)

w.clientsMutex.Unlock() // Release the lock before the callback

Expand Down
Loading