diff --git a/cmd/server/main.go b/cmd/server/main.go index 2b20bcb5f..1e098f117 100644 --- a/cmd/server/main.go +++ b/cmd/server/main.go @@ -23,6 +23,7 @@ import ( "github.com/router-for-me/CLIProxyAPI/v6/internal/logging" "github.com/router-for-me/CLIProxyAPI/v6/internal/managementasset" "github.com/router-for-me/CLIProxyAPI/v6/internal/misc" + "github.com/router-for-me/CLIProxyAPI/v6/internal/securefile" "github.com/router-for-me/CLIProxyAPI/v6/internal/store" _ "github.com/router-for-me/CLIProxyAPI/v6/internal/translator" "github.com/router-for-me/CLIProxyAPI/v6/internal/usage" @@ -47,6 +48,50 @@ func init() { buildinfo.BuildDate = BuildDate } +type authPersister interface { + PersistAuthFiles(ctx context.Context, message string, paths ...string) error +} + +func applyAuthEncryptionConfig(cfg *config.Config, authDir string, persister authPersister, migrate bool) { + if cfg == nil { + securefile.ConfigureAuthEncryption(securefile.AuthEncryptionSettings{}) + return + } + settings := securefile.AuthEncryptionSettings{ + Enabled: cfg.AuthEncryption.Enabled, + AllowPlaintextFallback: cfg.AuthEncryption.AllowPlaintextFallback, + } + secret := securefile.ResolveAuthEncryptionSecret(settings.Secret) + settings.Secret = secret + securefile.ConfigureAuthEncryption(settings) + if secret == "" { + if settings.Enabled { + log.Warn("auth-encryption enabled but no key configured; set CLIPROXY_AUTH_ENCRYPTION_KEY or CLI_PROXY_API_AUTH_ENCRYPTION_KEY") + } else if migrate { + log.Warn("auth-encryption disabled but no key configured; encrypted auth files cannot be decrypted without CLIPROXY_AUTH_ENCRYPTION_KEY or CLI_PROXY_API_AUTH_ENCRYPTION_KEY") + } + } + if !migrate || secret == "" { + return + } + changed, err := securefile.MigrateAuthJSONDir(authDir, settings) + if err != nil { + log.WithError(err).Warn("auth encryption migration encountered errors") + } + if len(changed) == 0 { + return + } + log.Infof("auth encryption migration updated %d auth file(s)", len(changed)) + if persister == nil { + return + } + ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() + if err := persister.PersistAuthFiles(ctx, "Migrate auth encryption", changed...); err != nil { + log.WithError(err).Warn("failed to persist auth encryption migration") + } +} + // main is the entry point of the application. // It parses command-line flags, loads configuration, and starts the appropriate // service based on the provided flags (login, codex-login, or server mode). @@ -421,6 +466,13 @@ func main() { } else { cfg.AuthDir = resolvedAuthDir } + if repoRoot, ok := util.FindGitRepoRoot(cfg.AuthDir); ok { + if useGitStore { + log.Warnf("auth-dir %q is inside a git repository (%q); git-backed token storage is enabled, ensure the repo/remote is private and tokens are not exposed", cfg.AuthDir, repoRoot) + } else { + log.Warnf("auth-dir %q is inside a git repository (%q); do not commit token files, add it to .gitignore or set auth-dir outside the repo", cfg.AuthDir, repoRoot) + } + } managementasset.SetCurrentConfig(cfg) // Create login options to be used in authentication flows. @@ -439,6 +491,14 @@ func main() { sdkAuth.RegisterTokenStore(sdkAuth.NewFileTokenStore()) } + var persister authPersister + if store := sdkAuth.GetTokenStore(); store != nil { + if p, ok := store.(authPersister); ok { + persister = p + } + } + applyAuthEncryptionConfig(cfg, cfg.AuthDir, persister, true) + // Register built-in access providers before constructing services. configaccess.Register() diff --git a/config.example.yaml b/config.example.yaml index f6390d2ff..b16902679 100644 --- a/config.example.yaml +++ b/config.example.yaml @@ -31,6 +31,14 @@ remote-management: # Authentication directory (supports ~ for home directory) auth-dir: "~/.cli-proxy-api" +# Auth file encryption-at-rest +auth-encryption: + enabled: false + allow-plaintext-fallback: true + # Encryption key is read from env: + # - CLIPROXY_AUTH_ENCRYPTION_KEY + # - CLI_PROXY_API_AUTH_ENCRYPTION_KEY + # API keys for authentication api-keys: - "your-api-key-1" diff --git a/internal/api/handlers/management/auth_files.go b/internal/api/handlers/management/auth_files.go index 41a4fde40..54bf181ea 100644 --- a/internal/api/handlers/management/auth_files.go +++ b/internal/api/handlers/management/auth_files.go @@ -27,6 +27,7 @@ import ( "github.com/router-for-me/CLIProxyAPI/v6/internal/interfaces" "github.com/router-for-me/CLIProxyAPI/v6/internal/misc" "github.com/router-for-me/CLIProxyAPI/v6/internal/registry" + "github.com/router-for-me/CLIProxyAPI/v6/internal/securefile" "github.com/router-for-me/CLIProxyAPI/v6/internal/util" sdkAuth "github.com/router-for-me/CLIProxyAPI/v6/sdk/auth" coreauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth" @@ -345,7 +346,7 @@ func (h *Handler) listAuthFilesFromDisk(c *gin.Context) { // Read file to get type field full := filepath.Join(h.cfg.AuthDir, name) - if data, errRead := os.ReadFile(full); errRead == nil { + if data, errRead := readAuthJSON(full); errRead == nil { typeValue := gjson.GetBytes(data, "type").String() emailValue := gjson.GetBytes(data, "email").String() fileData["type"] = typeValue @@ -457,6 +458,17 @@ func authAttribute(auth *coreauth.Auth, key string) string { return auth.Attributes[key] } +func readAuthJSON(path string) ([]byte, error) { + if strings.TrimSpace(path) == "" { + return nil, fmt.Errorf("auth file path is empty") + } + data, _, err := securefile.ReadAuthJSONFile(path) + if err != nil { + return nil, err + } + return data, nil +} + func isRuntimeOnlyAuth(auth *coreauth.Auth) bool { if auth == nil || len(auth.Attributes) == 0 { return false @@ -476,7 +488,7 @@ func (h *Handler) DownloadAuthFile(c *gin.Context) { return } full := filepath.Join(h.cfg.AuthDir, name) - data, err := os.ReadFile(full) + data, err := readAuthJSON(full) if err != nil { if os.IsNotExist(err) { c.JSON(404, gin.H{"error": "file not found"}) @@ -512,11 +524,15 @@ func (h *Handler) UploadAuthFile(c *gin.Context) { c.JSON(500, gin.H{"error": fmt.Sprintf("failed to save file: %v", errSave)}) return } - data, errRead := os.ReadFile(dst) + data, errRead := readAuthJSON(dst) if errRead != nil { c.JSON(500, gin.H{"error": fmt.Sprintf("failed to read saved file: %v", errRead)}) return } + if errWrite := securefile.WriteAuthJSONFile(dst, data); errWrite != nil { + c.JSON(500, gin.H{"error": fmt.Sprintf("failed to secure auth file: %v", errWrite)}) + return + } if errReg := h.registerAuthFromFile(ctx, dst, data); errReg != nil { c.JSON(500, gin.H{"error": errReg.Error()}) return @@ -544,8 +560,8 @@ func (h *Handler) UploadAuthFile(c *gin.Context) { dst = abs } } - if errWrite := os.WriteFile(dst, data, 0o600); errWrite != nil { - c.JSON(500, gin.H{"error": fmt.Sprintf("failed to write file: %v", errWrite)}) + if errWrite := securefile.WriteAuthJSONFile(dst, data); errWrite != nil { + c.JSON(500, gin.H{"error": fmt.Sprintf("failed to secure auth file: %v", errWrite)}) return } if err = h.registerAuthFromFile(ctx, dst, data); err != nil { @@ -649,7 +665,7 @@ func (h *Handler) registerAuthFromFile(ctx context.Context, path string, data [] } if data == nil { var err error - data, err = os.ReadFile(path) + data, err = readAuthJSON(path) if err != nil { return fmt.Errorf("failed to read auth file: %w", err) } diff --git a/internal/config/config.go b/internal/config/config.go index 9d0ad606d..0e3c9c132 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -36,6 +36,9 @@ type Config struct { // AuthDir is the directory where authentication token files are stored. AuthDir string `yaml:"auth-dir" json:"-"` + // AuthEncryption controls encryption-at-rest for auth JSON files. + AuthEncryption AuthEncryptionConfig `yaml:"auth-encryption" json:"auth-encryption"` + // Debug enables or disables debug-level logging and other debug features. Debug bool `yaml:"debug" json:"debug"` @@ -117,6 +120,14 @@ type RemoteManagement struct { PanelGitHubRepository string `yaml:"panel-github-repository"` } +// AuthEncryptionConfig controls auth file encryption settings. +type AuthEncryptionConfig struct { + // Enabled toggles encryption-at-rest for auth JSON files. + Enabled bool `yaml:"enabled" json:"enabled"` + // AllowPlaintextFallback enables best-effort re-encryption of plaintext auth files when enabled. + AllowPlaintextFallback bool `yaml:"allow-plaintext-fallback" json:"allow-plaintext-fallback"` +} + // QuotaExceeded defines the behavior when API quota limits are exceeded. // It provides configuration options for automatic failover mechanisms. type QuotaExceeded struct { diff --git a/internal/logging/request_logger.go b/internal/logging/request_logger.go index 397a4a083..946bc9f6d 100644 --- a/internal/logging/request_logger.go +++ b/internal/logging/request_logger.go @@ -228,7 +228,7 @@ func (l *FileRequestLogger) logRequest(url, method string, requestHeaders map[st responseToWrite = response } - logFile, errOpen := os.OpenFile(filePath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644) + logFile, errOpen := os.OpenFile(filePath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o600) if errOpen != nil { return fmt.Errorf("failed to create log file: %w", errOpen) } @@ -344,7 +344,7 @@ func (l *FileRequestLogger) generateErrorFilename(url string, requestID ...strin // - error: An error if directory creation fails, nil otherwise func (l *FileRequestLogger) ensureLogsDir() error { if _, err := os.Stat(l.logsDir); os.IsNotExist(err) { - return os.MkdirAll(l.logsDir, 0755) + return os.MkdirAll(l.logsDir, 0o700) } return nil } @@ -917,7 +917,7 @@ func (l *FileRequestLogger) formatRequestInfo(url, method string, headers map[st content.WriteString("\n") content.WriteString("=== REQUEST BODY ===\n") - content.Write(body) + content.Write(util.MaskSensitiveJSON(body)) content.WriteString("\n\n") return content.String() @@ -1082,7 +1082,7 @@ func (w *FileStreamingLogWriter) Close() error { return nil } - logFile, errOpen := os.OpenFile(w.logFilePath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644) + logFile, errOpen := os.OpenFile(w.logFilePath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o600) if errOpen != nil { w.cleanupTempFiles() return fmt.Errorf("failed to create log file: %w", errOpen) diff --git a/internal/securefile/atomic.go b/internal/securefile/atomic.go new file mode 100644 index 000000000..20697d35c --- /dev/null +++ b/internal/securefile/atomic.go @@ -0,0 +1,94 @@ +package securefile + +import ( + "fmt" + "os" + "path/filepath" + "time" +) + +// EnsurePrivateDir creates dirPath (and parents) with 0700 permissions. +func EnsurePrivateDir(dirPath string) error { + if dirPath == "" { + return fmt.Errorf("securefile: dir path is empty") + } + if err := os.MkdirAll(dirPath, 0o700); err != nil { + return err + } + // Best-effort permission hardening. Ignore errors (e.g., non-POSIX FS). + _ = os.Chmod(dirPath, 0o700) + return nil +} + +// AtomicWriteFile writes data to path using a temp file + rename, and attempts to fsync. +// mode controls the final file permissions. +func AtomicWriteFile(path string, data []byte, mode os.FileMode) error { + if path == "" { + return fmt.Errorf("securefile: path is empty") + } + dir := filepath.Dir(path) + if err := EnsurePrivateDir(dir); err != nil { + return err + } + + tmp, err := os.CreateTemp(dir, ".tmp.*") + if err != nil { + return err + } + tmpName := tmp.Name() + defer func() { + _ = tmp.Close() + _ = os.Remove(tmpName) + }() + + if mode == 0 { + mode = 0o600 + } + if err := tmp.Chmod(mode); err != nil { + // Best-effort: ignore chmod failure on some filesystems. + } + + if _, err := tmp.Write(data); err != nil { + return err + } + if err := tmp.Sync(); err != nil { + return err + } + if err := tmp.Close(); err != nil { + return err + } + + if err := os.Rename(tmpName, path); err != nil { + return err + } + + // Best-effort: ensure final mode. + _ = os.Chmod(path, mode) + return nil +} + +// ReadFileRawLocked reads the file at path while holding an advisory lock on path+".lock". +func ReadFileRawLocked(path string) ([]byte, error) { + lockPath := path + ".lock" + var out []byte + err := WithLock(lockPath, 10*time.Second, func() error { + data, err := os.ReadFile(path) + if err != nil { + return err + } + out = data + return nil + }) + if err != nil { + return nil, err + } + return out, nil +} + +// WriteFileRawLocked writes data to path using an advisory lock on path+".lock" and atomic replace. +func WriteFileRawLocked(path string, data []byte, mode os.FileMode) error { + lockPath := path + ".lock" + return WithLock(lockPath, 10*time.Second, func() error { + return AtomicWriteFile(path, data, mode) + }) +} diff --git a/internal/securefile/authjson.go b/internal/securefile/authjson.go new file mode 100644 index 000000000..fc7a12971 --- /dev/null +++ b/internal/securefile/authjson.go @@ -0,0 +1,201 @@ +package securefile + +import ( + "crypto/aes" + "crypto/cipher" + "crypto/rand" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "fmt" + "os" + "path/filepath" + "strings" + "time" +) + +const ( + authEnvelopeVersion = 1 + authEnvelopeAlgAES = "aes-256-gcm" +) + +type authEnvelope struct { + V int `json:"v"` + Alg string `json:"alg"` + Nonce string `json:"nonce"` + Ct string `json:"ct"` +} + +// deriveKey returns a 32-byte key derived from the provided secret. If the secret is base64 +// for exactly 32 bytes, it is used directly; otherwise SHA-256(secret) is used. +func deriveKey(secret string) ([]byte, error) { + secret = strings.TrimSpace(secret) + if secret == "" { + return nil, fmt.Errorf("securefile: encryption secret is empty") + } + if decoded, err := base64.StdEncoding.DecodeString(secret); err == nil && len(decoded) == 32 { + return decoded, nil + } + sum := sha256.Sum256([]byte(secret)) + return sum[:], nil +} + +func encryptBytes(plaintext []byte, secret string) ([]byte, error) { + key, err := deriveKey(secret) + if err != nil { + return nil, err + } + block, err := aes.NewCipher(key) + if err != nil { + return nil, err + } + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + nonce := make([]byte, gcm.NonceSize()) + if _, err := rand.Read(nonce); err != nil { + return nil, err + } + ct := gcm.Seal(nil, nonce, plaintext, nil) + env := authEnvelope{ + V: authEnvelopeVersion, + Alg: authEnvelopeAlgAES, + Nonce: base64.StdEncoding.EncodeToString(nonce), + Ct: base64.StdEncoding.EncodeToString(ct), + } + return json.Marshal(env) +} + +func decryptBytes(envelopeBytes []byte, secret string) ([]byte, error) { + var env authEnvelope + if err := json.Unmarshal(envelopeBytes, &env); err != nil { + return nil, fmt.Errorf("securefile: invalid encrypted envelope: %w", err) + } + if env.V != authEnvelopeVersion || strings.TrimSpace(env.Alg) != authEnvelopeAlgAES { + return nil, fmt.Errorf("securefile: unsupported envelope (v=%d alg=%s)", env.V, env.Alg) + } + key, err := deriveKey(secret) + if err != nil { + return nil, err + } + block, err := aes.NewCipher(key) + if err != nil { + return nil, err + } + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + nonce, err := base64.StdEncoding.DecodeString(env.Nonce) + if err != nil { + return nil, fmt.Errorf("securefile: invalid nonce encoding: %w", err) + } + ct, err := base64.StdEncoding.DecodeString(env.Ct) + if err != nil { + return nil, fmt.Errorf("securefile: invalid ciphertext encoding: %w", err) + } + plaintext, err := gcm.Open(nil, nonce, ct, nil) + if err != nil { + return nil, fmt.Errorf("securefile: decrypt failed: %w", err) + } + return plaintext, nil +} + +func looksEncryptedEnvelope(raw []byte) bool { + var probe map[string]any + if err := json.Unmarshal(raw, &probe); err != nil { + return false + } + _, hasV := probe["v"] + _, hasAlg := probe["alg"] + _, hasCt := probe["ct"] + return hasV && hasAlg && hasCt +} + +// DecodeAuthJSON returns decrypted JSON bytes if raw is an encrypted envelope; otherwise returns raw. +// It returns (plaintext, wasEncrypted, error). +func DecodeAuthJSON(raw []byte, settings AuthEncryptionSettings) ([]byte, bool, error) { + trimmed := strings.TrimSpace(string(raw)) + if trimmed == "" { + return raw, false, nil + } + if !looksEncryptedEnvelope(raw) { + return raw, false, nil + } + secret := ResolveAuthEncryptionSecret(settings.Secret) + if strings.TrimSpace(secret) == "" { + return nil, true, fmt.Errorf("securefile: auth file is encrypted but no encryption key is configured") + } + plaintext, err := decryptBytes(raw, secret) + if err != nil { + return nil, true, err + } + return plaintext, true, nil +} + +func writeAuthJSONFileUnlocked(path string, jsonBytes []byte, settings AuthEncryptionSettings) error { + payload := jsonBytes + if settings.Enabled { + secret := ResolveAuthEncryptionSecret(settings.Secret) + if strings.TrimSpace(secret) == "" { + return fmt.Errorf("securefile: auth encryption enabled but no encryption key configured") + } + enc, err := encryptBytes(jsonBytes, secret) + if err != nil { + return err + } + payload = enc + } + if err := EnsurePrivateDir(filepath.Dir(path)); err != nil { + return err + } + return AtomicWriteFile(path, payload, 0o600) +} + +// ReadAuthJSONFile reads path, locking path+".lock", and returns decrypted JSON when needed. +func ReadAuthJSONFile(path string) ([]byte, bool, error) { + settings := CurrentAuthEncryption() + lockPath := path + ".lock" + var ( + out []byte + encrypted bool + readErr error + ) + err := WithLock(lockPath, 10*time.Second, func() error { + raw, err := os.ReadFile(path) + if err != nil { + return err + } + plaintext, wasEncrypted, err := DecodeAuthJSON(raw, settings) + if err != nil { + return err + } + out = plaintext + encrypted = wasEncrypted + // Best-effort migration: if encryption is enabled and we read plaintext, re-save encrypted. + if settings.Enabled && !wasEncrypted && settings.AllowPlaintextFallback { + if err := writeAuthJSONFileUnlocked(path, plaintext, settings); err != nil { + // ignore; caller still gets plaintext content + } + } + return nil + }) + if err != nil { + readErr = err + } + return out, encrypted, readErr +} + +// WriteAuthJSONFile writes jsonBytes to path with 0600 perms, using lock + atomic write. +// If auth encryption is enabled, it stores an encrypted envelope. +func WriteAuthJSONFile(path string, jsonBytes []byte) error { + if path == "" { + return fmt.Errorf("securefile: path is empty") + } + settings := CurrentAuthEncryption() + lockPath := path + ".lock" + return WithLock(lockPath, 10*time.Second, func() error { + return writeAuthJSONFileUnlocked(path, jsonBytes, settings) + }) +} diff --git a/internal/securefile/errors.go b/internal/securefile/errors.go new file mode 100644 index 000000000..796235ba5 --- /dev/null +++ b/internal/securefile/errors.go @@ -0,0 +1,8 @@ +package securefile + +// LoadError captures best-effort load failures (parse/decrypt/read) when scanning auth stores. +type LoadError struct { + Path string `json:"path"` + ErrorType string `json:"error_type"` + Message string `json:"message"` +} diff --git a/internal/securefile/lock_unix.go b/internal/securefile/lock_unix.go new file mode 100644 index 000000000..d261cd68e --- /dev/null +++ b/internal/securefile/lock_unix.go @@ -0,0 +1,54 @@ +//go:build !windows + +package securefile + +import ( + "fmt" + "os" + "path/filepath" + "syscall" + "time" +) + +// WithLock obtains an advisory exclusive lock on lockPath (creating it if needed), +// runs fn, and then releases the lock. It retries until timeout. +func WithLock(lockPath string, timeout time.Duration, fn func() error) error { + if lockPath == "" { + return fmt.Errorf("securefile: lock path is empty") + } + if fn == nil { + return fmt.Errorf("securefile: lock fn is nil") + } + if timeout <= 0 { + timeout = 10 * time.Second + } + if err := EnsurePrivateDir(filepath.Dir(lockPath)); err != nil { + return err + } + f, err := os.OpenFile(lockPath, os.O_CREATE|os.O_RDWR, 0o600) + if err != nil { + return err + } + defer func() { _ = f.Close() }() + _ = f.Chmod(0o600) + + deadline := time.Now().Add(timeout) + for { + err := syscall.Flock(int(f.Fd()), syscall.LOCK_EX|syscall.LOCK_NB) + if err == nil { + break + } + // EWOULDBLOCK/EAGAIN indicates the lock is held by another process. + if err != syscall.EWOULDBLOCK && err != syscall.EAGAIN { + return fmt.Errorf("securefile: unexpected error acquiring lock on %s: %w", lockPath, err) + } + if time.Now().After(deadline) { + return fmt.Errorf("securefile: timed out acquiring lock: %s", lockPath) + } + time.Sleep(50 * time.Millisecond) + } + defer func() { + _ = syscall.Flock(int(f.Fd()), syscall.LOCK_UN) + }() + return fn() +} diff --git a/internal/securefile/lock_windows.go b/internal/securefile/lock_windows.go new file mode 100644 index 000000000..65c18a63f --- /dev/null +++ b/internal/securefile/lock_windows.go @@ -0,0 +1,58 @@ +//go:build windows + +package securefile + +import ( + "fmt" + "os" + "path/filepath" + "time" + + "golang.org/x/sys/windows" +) + +// WithLock obtains an exclusive file lock on lockPath (creating it if needed), +// runs fn, and then releases the lock. It retries until timeout. +func WithLock(lockPath string, timeout time.Duration, fn func() error) error { + if lockPath == "" { + return fmt.Errorf("securefile: lock path is empty") + } + if fn == nil { + return fmt.Errorf("securefile: lock fn is nil") + } + if timeout <= 0 { + timeout = 10 * time.Second + } + if err := EnsurePrivateDir(filepath.Dir(lockPath)); err != nil { + return err + } + + f, err := os.OpenFile(lockPath, os.O_CREATE|os.O_RDWR, 0o600) + if err != nil { + return err + } + defer func() { _ = f.Close() }() + _ = f.Chmod(0o600) + + handle := windows.Handle(f.Fd()) + var overlapped windows.Overlapped + + deadline := time.Now().Add(timeout) + for { + errLock := windows.LockFileEx(handle, windows.LOCKFILE_EXCLUSIVE_LOCK|windows.LOCKFILE_FAIL_IMMEDIATELY, 0, 1, 0, &overlapped) + if errLock == nil { + break + } + if errLock != windows.ERROR_LOCK_VIOLATION && errLock != windows.ERROR_SHARING_VIOLATION { + return errLock + } + if time.Now().After(deadline) { + return fmt.Errorf("securefile: timed out acquiring lock: %s", lockPath) + } + time.Sleep(50 * time.Millisecond) + } + defer func() { + _ = windows.UnlockFileEx(handle, 0, 1, 0, &overlapped) + }() + return fn() +} diff --git a/internal/securefile/migrate.go b/internal/securefile/migrate.go new file mode 100644 index 000000000..cff33b4fa --- /dev/null +++ b/internal/securefile/migrate.go @@ -0,0 +1,77 @@ +package securefile + +import ( + "errors" + "fmt" + "io/fs" + "path/filepath" + "strings" + "time" +) + +// MigrateAuthJSONDir rewrites auth JSON files in authDir to match settings. +// It returns the list of files that were updated. +func MigrateAuthJSONDir(authDir string, settings AuthEncryptionSettings) ([]string, error) { + root := strings.TrimSpace(authDir) + if root == "" { + return nil, nil + } + + var ( + changed []string + errs []error + ) + + walkErr := filepath.WalkDir(root, func(path string, d fs.DirEntry, err error) error { + if err != nil { + errs = append(errs, err) + return nil + } + if d.IsDir() { + return nil + } + if !strings.HasSuffix(strings.ToLower(d.Name()), ".json") { + return nil + } + raw, err := ReadFileRawLocked(path) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + return nil + } + errs = append(errs, fmt.Errorf("read auth file %s: %w", path, err)) + return nil + } + if len(raw) == 0 { + return nil + } + plaintext, wasEncrypted, err := DecodeAuthJSON(raw, settings) + if err != nil { + errs = append(errs, fmt.Errorf("decode auth file %s: %w", path, err)) + return nil + } + + if settings.Enabled && wasEncrypted { + return nil + } + if !settings.Enabled && !wasEncrypted { + return nil + } + + lockPath := path + ".lock" + if err := WithLock(lockPath, 10*time.Second, func() error { + return writeAuthJSONFileUnlocked(path, plaintext, settings) + }); err != nil { + errs = append(errs, fmt.Errorf("write auth file %s: %w", path, err)) + return nil + } + changed = append(changed, path) + return nil + }) + if walkErr != nil { + errs = append(errs, walkErr) + } + if len(errs) > 0 { + return changed, errors.Join(errs...) + } + return changed, nil +} diff --git a/internal/securefile/settings.go b/internal/securefile/settings.go new file mode 100644 index 000000000..bdd7bfbaa --- /dev/null +++ b/internal/securefile/settings.go @@ -0,0 +1,52 @@ +package securefile + +import ( + "os" + "strings" + "sync/atomic" +) + +// AuthEncryptionSettings controls encryption-at-rest for auth JSON files. +type AuthEncryptionSettings struct { + Enabled bool + Secret string + AllowPlaintextFallback bool +} + +var authEncryptionSettings atomic.Value // stores AuthEncryptionSettings + +func init() { + authEncryptionSettings.Store(AuthEncryptionSettings{}) +} + +// ConfigureAuthEncryption updates global auth encryption behavior. +func ConfigureAuthEncryption(settings AuthEncryptionSettings) { + settings.Secret = strings.TrimSpace(settings.Secret) + authEncryptionSettings.Store(settings) +} + +// CurrentAuthEncryption returns the active auth encryption settings. +func CurrentAuthEncryption() AuthEncryptionSettings { + if v := authEncryptionSettings.Load(); v != nil { + if s, ok := v.(AuthEncryptionSettings); ok { + return s + } + } + return AuthEncryptionSettings{} +} + +// ResolveAuthEncryptionSecret resolves a secret from config/env. +// Explicit secret wins; otherwise checks env CLIPROXY_AUTH_ENCRYPTION_KEY then CLI_PROXY_API_AUTH_ENCRYPTION_KEY. +func ResolveAuthEncryptionSecret(explicit string) string { + if trimmed := strings.TrimSpace(explicit); trimmed != "" { + return trimmed + } + for _, key := range []string{"CLIPROXY_AUTH_ENCRYPTION_KEY", "CLI_PROXY_API_AUTH_ENCRYPTION_KEY"} { + if v, ok := os.LookupEnv(key); ok { + if trimmed := strings.TrimSpace(v); trimmed != "" { + return trimmed + } + } + } + return "" +} diff --git a/internal/store/gitstore.go b/internal/store/gitstore.go index 3b68e4b0a..ca9953827 100644 --- a/internal/store/gitstore.go +++ b/internal/store/gitstore.go @@ -18,6 +18,7 @@ import ( "github.com/go-git/go-git/v6/plumbing/object" "github.com/go-git/go-git/v6/plumbing/transport" "github.com/go-git/go-git/v6/plumbing/transport/http" + "github.com/router-for-me/CLIProxyAPI/v6/internal/securefile" cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth" ) @@ -249,19 +250,15 @@ func (s *GitTokenStore) Save(_ context.Context, auth *cliproxyauth.Auth) (string if errMarshal != nil { return "", fmt.Errorf("auth filestore: marshal metadata failed: %w", errMarshal) } - if existing, errRead := os.ReadFile(path); errRead == nil { + if existing, _, errRead := securefile.ReadAuthJSONFile(path); errRead == nil { if jsonEqual(existing, raw) { return path, nil } } else if !os.IsNotExist(errRead) { return "", fmt.Errorf("auth filestore: read existing failed: %w", errRead) } - tmp := path + ".tmp" - if errWrite := os.WriteFile(tmp, raw, 0o600); errWrite != nil { - return "", fmt.Errorf("auth filestore: write temp failed: %w", errWrite) - } - if errRename := os.Rename(tmp, path); errRename != nil { - return "", fmt.Errorf("auth filestore: rename failed: %w", errRename) + if errWrite := securefile.WriteAuthJSONFile(path, raw); errWrite != nil { + return "", fmt.Errorf("auth filestore: write failed: %w", errWrite) } default: return "", fmt.Errorf("auth filestore: nothing to persist for %s", auth.ID) @@ -406,7 +403,7 @@ func (s *GitTokenStore) resolveDeletePath(id string) (string, error) { } func (s *GitTokenStore) readAuthFile(path, baseDir string) (*cliproxyauth.Auth, error) { - data, err := os.ReadFile(path) + data, _, err := securefile.ReadAuthJSONFile(path) if err != nil { return nil, fmt.Errorf("read file: %w", err) } @@ -679,7 +676,7 @@ func (s *GitTokenStore) PersistConfig(_ context.Context) error { func ensureEmptyFile(path string) error { if _, err := os.Stat(path); err != nil { if errors.Is(err, fs.ErrNotExist) { - return os.WriteFile(path, []byte{}, 0o600) + return securefile.WriteFileRawLocked(path, []byte{}, 0o600) } return err } diff --git a/internal/store/objectstore.go b/internal/store/objectstore.go index 726ebc9fa..e5dc505d7 100644 --- a/internal/store/objectstore.go +++ b/internal/store/objectstore.go @@ -18,6 +18,7 @@ import ( "github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7/pkg/credentials" "github.com/router-for-me/CLIProxyAPI/v6/internal/misc" + "github.com/router-for-me/CLIProxyAPI/v6/internal/securefile" cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth" log "github.com/sirupsen/logrus" ) @@ -192,19 +193,15 @@ func (s *ObjectTokenStore) Save(ctx context.Context, auth *cliproxyauth.Auth) (s if errMarshal != nil { return "", fmt.Errorf("object store: marshal metadata: %w", errMarshal) } - if existing, errRead := os.ReadFile(path); errRead == nil { + if existing, _, errRead := securefile.ReadAuthJSONFile(path); errRead == nil { if jsonEqual(existing, raw) { return path, nil } } else if errRead != nil && !errors.Is(errRead, fs.ErrNotExist) { return "", fmt.Errorf("object store: read existing metadata: %w", errRead) } - tmp := path + ".tmp" - if errWrite := os.WriteFile(tmp, raw, 0o600); errWrite != nil { - return "", fmt.Errorf("object store: write temp auth file: %w", errWrite) - } - if errRename := os.Rename(tmp, path); errRename != nil { - return "", fmt.Errorf("object store: rename auth file: %w", errRename) + if errWrite := securefile.WriteAuthJSONFile(path, raw); errWrite != nil { + return "", fmt.Errorf("object store: write auth file: %w", errWrite) } default: return "", fmt.Errorf("object store: nothing to persist for %s", auth.ID) @@ -311,7 +308,7 @@ func (s *ObjectTokenStore) PersistConfig(ctx context.Context) error { s.mu.Lock() defer s.mu.Unlock() - data, err := os.ReadFile(s.configPath) + data, err := securefile.ReadFileRawLocked(s.configPath) if err != nil { if errors.Is(err, fs.ErrNotExist) { return s.deleteObject(ctx, objectStoreConfigKey) @@ -352,7 +349,7 @@ func (s *ObjectTokenStore) syncConfigFromBucket(ctx context.Context, example str if errRead != nil { return fmt.Errorf("object store: read config: %w", errRead) } - if errWrite := os.WriteFile(s.configPath, normalizeLineEndingsBytes(data), 0o600); errWrite != nil { + if errWrite := securefile.WriteFileRawLocked(s.configPath, normalizeLineEndingsBytes(data), 0o600); errWrite != nil { return fmt.Errorf("object store: write config: %w", errWrite) } case isObjectNotFound(err): @@ -365,12 +362,12 @@ func (s *ObjectTokenStore) syncConfigFromBucket(ctx context.Context, example str if errCreate := os.MkdirAll(filepath.Dir(s.configPath), 0o700); errCreate != nil { return fmt.Errorf("object store: prepare config directory: %w", errCreate) } - if errWrite := os.WriteFile(s.configPath, []byte{}, 0o600); errWrite != nil { + if errWrite := securefile.WriteFileRawLocked(s.configPath, []byte{}, 0o600); errWrite != nil { return fmt.Errorf("object store: create empty config: %w", errWrite) } } } - data, errRead := os.ReadFile(s.configPath) + data, errRead := securefile.ReadFileRawLocked(s.configPath) if errRead != nil { return fmt.Errorf("object store: read local config: %w", errRead) } @@ -429,7 +426,7 @@ func (s *ObjectTokenStore) syncAuthFromBucket(ctx context.Context) error { if errRead != nil { return fmt.Errorf("object store: read auth %s: %w", object.Key, errRead) } - if errWrite := os.WriteFile(local, data, 0o600); errWrite != nil { + if errWrite := securefile.WriteFileRawLocked(local, data, 0o600); errWrite != nil { return fmt.Errorf("object store: write auth %s: %w", local, errWrite) } } @@ -444,7 +441,7 @@ func (s *ObjectTokenStore) uploadAuth(ctx context.Context, path string) error { if err != nil { return fmt.Errorf("object store: resolve auth relative path: %w", err) } - data, err := os.ReadFile(path) + data, err := securefile.ReadFileRawLocked(path) if err != nil { if errors.Is(err, fs.ErrNotExist) { return s.deleteAuthObject(ctx, path) @@ -553,7 +550,7 @@ func (s *ObjectTokenStore) resolveDeletePath(id string) (string, error) { } func (s *ObjectTokenStore) readAuthFile(path, baseDir string) (*cliproxyauth.Auth, error) { - data, err := os.ReadFile(path) + data, _, err := securefile.ReadAuthJSONFile(path) if err != nil { return nil, fmt.Errorf("read file: %w", err) } diff --git a/internal/store/postgresstore.go b/internal/store/postgresstore.go index a18f45f8b..d6af5ae7b 100644 --- a/internal/store/postgresstore.go +++ b/internal/store/postgresstore.go @@ -15,6 +15,7 @@ import ( _ "github.com/jackc/pgx/v5/stdlib" "github.com/router-for-me/CLIProxyAPI/v6/internal/misc" + "github.com/router-for-me/CLIProxyAPI/v6/internal/securefile" cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth" log "github.com/sirupsen/logrus" ) @@ -222,19 +223,15 @@ func (s *PostgresStore) Save(ctx context.Context, auth *cliproxyauth.Auth) (stri if errMarshal != nil { return "", fmt.Errorf("postgres store: marshal metadata: %w", errMarshal) } - if existing, errRead := os.ReadFile(path); errRead == nil { + if existing, _, errRead := securefile.ReadAuthJSONFile(path); errRead == nil { if jsonEqual(existing, raw) { return path, nil } } else if errRead != nil && !errors.Is(errRead, fs.ErrNotExist) { return "", fmt.Errorf("postgres store: read existing metadata: %w", errRead) } - tmp := path + ".tmp" - if errWrite := os.WriteFile(tmp, raw, 0o600); errWrite != nil { - return "", fmt.Errorf("postgres store: write temp auth file: %w", errWrite) - } - if errRename := os.Rename(tmp, path); errRename != nil { - return "", fmt.Errorf("postgres store: rename auth file: %w", errRename) + if errWrite := securefile.WriteAuthJSONFile(path, raw); errWrite != nil { + return "", fmt.Errorf("postgres store: write auth file: %w", errWrite) } default: return "", fmt.Errorf("postgres store: nothing to persist for %s", auth.ID) @@ -285,7 +282,13 @@ func (s *PostgresStore) List(ctx context.Context) ([]*cliproxyauth.Auth, error) continue } metadata := make(map[string]any) - if err = json.Unmarshal([]byte(payload), &metadata); err != nil { + raw := []byte(payload) + decoded, _, errDecode := securefile.DecodeAuthJSON(raw, securefile.CurrentAuthEncryption()) + if errDecode != nil { + log.WithError(errDecode).Warnf("postgres store: auth %s decode failed, skipping", id) + continue + } + if err = json.Unmarshal(decoded, &metadata); err != nil { log.WithError(err).Warnf("postgres store: skipping auth %s with invalid json", id) continue } @@ -381,7 +384,7 @@ func (s *PostgresStore) PersistConfig(ctx context.Context) error { s.mu.Lock() defer s.mu.Unlock() - data, err := os.ReadFile(s.configPath) + data, err := securefile.ReadFileRawLocked(s.configPath) if err != nil { if errors.Is(err, fs.ErrNotExist) { return s.deleteConfigRecord(ctx) @@ -407,12 +410,12 @@ func (s *PostgresStore) syncConfigFromDatabase(ctx context.Context, exampleConfi if errCreate := os.MkdirAll(filepath.Dir(s.configPath), 0o700); errCreate != nil { return fmt.Errorf("postgres store: prepare config directory: %w", errCreate) } - if errWrite := os.WriteFile(s.configPath, []byte{}, 0o600); errWrite != nil { + if errWrite := securefile.WriteFileRawLocked(s.configPath, []byte{}, 0o600); errWrite != nil { return fmt.Errorf("postgres store: create empty config: %w", errWrite) } } } - data, errRead := os.ReadFile(s.configPath) + data, errRead := securefile.ReadFileRawLocked(s.configPath) if errRead != nil { return fmt.Errorf("postgres store: read local config: %w", errRead) } @@ -426,7 +429,7 @@ func (s *PostgresStore) syncConfigFromDatabase(ctx context.Context, exampleConfi return fmt.Errorf("postgres store: prepare config directory: %w", err) } normalized := normalizeLineEndings(content) - if err = os.WriteFile(s.configPath, []byte(normalized), 0o600); err != nil { + if err = securefile.WriteFileRawLocked(s.configPath, []byte(normalized), 0o600); err != nil { return fmt.Errorf("postgres store: write config to spool: %w", err) } } @@ -465,7 +468,7 @@ func (s *PostgresStore) syncAuthFromDatabase(ctx context.Context) error { if err = os.MkdirAll(filepath.Dir(path), 0o700); err != nil { return fmt.Errorf("postgres store: create auth subdir: %w", err) } - if err = os.WriteFile(path, []byte(payload), 0o600); err != nil { + if err = securefile.WriteFileRawLocked(path, []byte(payload), 0o600); err != nil { return fmt.Errorf("postgres store: write auth file: %w", err) } } @@ -476,7 +479,7 @@ func (s *PostgresStore) syncAuthFromDatabase(ctx context.Context) error { } func (s *PostgresStore) syncAuthFile(ctx context.Context, relID, path string) error { - data, err := os.ReadFile(path) + data, err := securefile.ReadFileRawLocked(path) if err != nil { if errors.Is(err, fs.ErrNotExist) { return s.deleteAuthRecord(ctx, relID) @@ -490,7 +493,7 @@ func (s *PostgresStore) syncAuthFile(ctx context.Context, relID, path string) er } func (s *PostgresStore) upsertAuthRecord(ctx context.Context, relID, path string) error { - data, err := os.ReadFile(path) + data, err := securefile.ReadFileRawLocked(path) if err != nil { return fmt.Errorf("postgres store: read auth file: %w", err) } diff --git a/internal/util/gemini_schema.go b/internal/util/gemini_schema.go index 2daf0a79b..6b214479f 100644 --- a/internal/util/gemini_schema.go +++ b/internal/util/gemini_schema.go @@ -38,6 +38,13 @@ func CleanJSONSchemaForAntigravity(jsonStr string) string { return jsonStr } +// CleanJSONSchemaForGemini removes JSON Schema keywords that Gemini rejects. +func CleanJSONSchemaForGemini(jsonStr string) string { + jsonStr = removeUnsupportedKeywords(jsonStr) + jsonStr = cleanupRequiredFields(jsonStr) + return jsonStr +} + // convertRefsToHints converts $ref to description hints (Lazy Hint strategy). func convertRefsToHints(jsonStr string) string { paths := findPaths(jsonStr, "$ref") diff --git a/internal/util/gitrepo.go b/internal/util/gitrepo.go new file mode 100644 index 000000000..ae189df21 --- /dev/null +++ b/internal/util/gitrepo.go @@ -0,0 +1,36 @@ +package util + +import ( + "os" + "path/filepath" + "strings" +) + +// FindGitRepoRoot walks upward from the provided path looking for a ".git" +// directory or file and returns the matching repository root. +func FindGitRepoRoot(path string) (string, bool) { + start := strings.TrimSpace(path) + if start == "" { + return "", false + } + start = filepath.Clean(start) + if info, err := os.Stat(start); err == nil && info != nil && !info.IsDir() { + start = filepath.Dir(start) + } + + dir := start + for { + if dir == "" || dir == "." { + return "", false + } + gitPath := filepath.Join(dir, ".git") + if _, err := os.Stat(gitPath); err == nil { + return dir, true + } + parent := filepath.Dir(dir) + if parent == dir { + return "", false + } + dir = parent + } +} diff --git a/internal/util/sensitive_json.go b/internal/util/sensitive_json.go new file mode 100644 index 000000000..d22509dc9 --- /dev/null +++ b/internal/util/sensitive_json.go @@ -0,0 +1,87 @@ +package util + +import ( + "bytes" + "encoding/json" + "strings" +) + +var sensitiveJSONKeys = map[string]struct{}{ + "access_token": {}, + "api_key": {}, + "api-key": {}, + "apikey": {}, + "authorization": {}, + "client_secret": {}, + "clientsecret": {}, + "id_token": {}, + "password": {}, + "refresh_token": {}, + "secret": {}, + "secret_access_key": {}, + "session_token": {}, + "token": {}, + "x-api-key": {}, + "x-goog-api-key": {}, + "x-goog-vertex-token": {}, +} + +// MaskSensitiveJSON redacts common credential fields in a JSON request/response body. +// If the payload is not valid JSON, the original bytes are returned unchanged. +func MaskSensitiveJSON(body []byte) []byte { + trimmed := bytes.TrimSpace(body) + if len(trimmed) == 0 { + return body + } + + var value any + decoder := json.NewDecoder(bytes.NewReader(trimmed)) + decoder.UseNumber() + if err := decoder.Decode(&value); err != nil { + return body + } + + masked := maskSensitiveJSONValue(value) + out, err := json.MarshalIndent(masked, "", " ") + if err != nil { + return body + } + return out +} + +func maskSensitiveJSONValue(value any) any { + switch typed := value.(type) { + case map[string]any: + out := make(map[string]any, len(typed)) + for key, v := range typed { + if isSensitiveJSONKey(key) { + out[key] = "***" + continue + } + out[key] = maskSensitiveJSONValue(v) + } + return out + case []any: + out := make([]any, len(typed)) + for i := range typed { + out[i] = maskSensitiveJSONValue(typed[i]) + } + return out + default: + return value + } +} + +func isSensitiveJSONKey(key string) bool { + normalized := strings.ToLower(strings.TrimSpace(key)) + if normalized == "" { + return false + } + if _, ok := sensitiveJSONKeys[normalized]; ok { + return true + } + if strings.HasSuffix(normalized, "_secret") { + return true + } + return false +} diff --git a/internal/watcher/config_reload.go b/internal/watcher/config_reload.go index 244f738e6..c145e2f38 100644 --- a/internal/watcher/config_reload.go +++ b/internal/watcher/config_reload.go @@ -9,6 +9,7 @@ import ( "time" "github.com/router-for-me/CLIProxyAPI/v6/internal/config" + "github.com/router-for-me/CLIProxyAPI/v6/internal/securefile" "github.com/router-for-me/CLIProxyAPI/v6/internal/util" "github.com/router-for-me/CLIProxyAPI/v6/internal/watcher/diff" "gopkg.in/yaml.v3" @@ -103,6 +104,9 @@ func (w *Watcher) reloadConfig() bool { w.config = newConfig w.clientsMutex.Unlock() + authEncryptionChanged := oldConfig == nil || oldConfig.AuthEncryption.Enabled != newConfig.AuthEncryption.Enabled + applyAuthEncryptionConfig(newConfig, newConfig.AuthDir, authEncryptionChanged) + var affectedOAuthProviders []string if oldConfig != nil { _, affectedOAuthProviders = diff.DiffOAuthExcludedModelChanges(oldConfig.OAuthExcludedModels, newConfig.OAuthExcludedModels) @@ -132,3 +136,35 @@ func (w *Watcher) reloadConfig() bool { w.reloadClients(authDirChanged, affectedOAuthProviders, forceAuthRefresh) return true } + +func applyAuthEncryptionConfig(cfg *config.Config, authDir string, migrate bool) { + if cfg == nil { + securefile.ConfigureAuthEncryption(securefile.AuthEncryptionSettings{}) + return + } + settings := securefile.AuthEncryptionSettings{ + Enabled: cfg.AuthEncryption.Enabled, + AllowPlaintextFallback: cfg.AuthEncryption.AllowPlaintextFallback, + } + secret := securefile.ResolveAuthEncryptionSecret(settings.Secret) + settings.Secret = secret + securefile.ConfigureAuthEncryption(settings) + if secret == "" { + if settings.Enabled { + log.Warn("auth-encryption enabled but no key configured; set CLIPROXY_AUTH_ENCRYPTION_KEY or CLI_PROXY_API_AUTH_ENCRYPTION_KEY") + } else if migrate { + log.Warn("auth-encryption disabled but no key configured; encrypted auth files cannot be decrypted without CLIPROXY_AUTH_ENCRYPTION_KEY or CLI_PROXY_API_AUTH_ENCRYPTION_KEY") + } + } + if !migrate || secret == "" { + return + } + changed, err := securefile.MigrateAuthJSONDir(authDir, settings) + if err != nil { + log.WithError(err).Warn("auth encryption migration encountered errors") + } + if len(changed) == 0 { + return + } + log.Infof("auth encryption migration updated %d auth file(s)", len(changed)) +} diff --git a/internal/watcher/diff/config_diff.go b/internal/watcher/diff/config_diff.go index ecc15b391..ebc29de14 100644 --- a/internal/watcher/diff/config_diff.go +++ b/internal/watcher/diff/config_diff.go @@ -54,6 +54,12 @@ func BuildConfigChangeDetails(oldCfg, newCfg *config.Config) []string { if oldCfg.ForceModelPrefix != newCfg.ForceModelPrefix { changes = append(changes, fmt.Sprintf("force-model-prefix: %t -> %t", oldCfg.ForceModelPrefix, newCfg.ForceModelPrefix)) } + if oldCfg.AuthEncryption.Enabled != newCfg.AuthEncryption.Enabled { + changes = append(changes, fmt.Sprintf("auth-encryption.enabled: %t -> %t", oldCfg.AuthEncryption.Enabled, newCfg.AuthEncryption.Enabled)) + } + if oldCfg.AuthEncryption.AllowPlaintextFallback != newCfg.AuthEncryption.AllowPlaintextFallback { + changes = append(changes, fmt.Sprintf("auth-encryption.allow-plaintext-fallback: %t -> %t", oldCfg.AuthEncryption.AllowPlaintextFallback, newCfg.AuthEncryption.AllowPlaintextFallback)) + } // Quota-exceeded behavior if oldCfg.QuotaExceeded.SwitchProject != newCfg.QuotaExceeded.SwitchProject { diff --git a/sdk/auth/filestore.go b/sdk/auth/filestore.go index 84092d379..33ae6c0ad 100644 --- a/sdk/auth/filestore.go +++ b/sdk/auth/filestore.go @@ -11,6 +11,7 @@ import ( "sync" "time" + "github.com/router-for-me/CLIProxyAPI/v6/internal/securefile" cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth" ) @@ -19,6 +20,9 @@ type FileTokenStore struct { mu sync.Mutex dirLock sync.RWMutex baseDir string + + errorsMu sync.RWMutex + lastLoadErrors []securefile.LoadError } // NewFileTokenStore creates a token store that saves credentials to disk through the @@ -27,6 +31,18 @@ func NewFileTokenStore() *FileTokenStore { return &FileTokenStore{} } +// LastLoadErrors returns the most recent best-effort load errors captured during List(). +func (s *FileTokenStore) LastLoadErrors() []securefile.LoadError { + if s == nil { + return nil + } + s.errorsMu.RLock() + defer s.errorsMu.RUnlock() + out := make([]securefile.LoadError, len(s.lastLoadErrors)) + copy(out, s.lastLoadErrors) + return out +} + // SetBaseDir updates the default directory used for auth JSON persistence when no explicit path is provided. func (s *FileTokenStore) SetBaseDir(dir string) { s.dirLock.Lock() @@ -71,7 +87,7 @@ func (s *FileTokenStore) Save(ctx context.Context, auth *cliproxyauth.Auth) (str if errMarshal != nil { return "", fmt.Errorf("auth filestore: marshal metadata failed: %w", errMarshal) } - if existing, errRead := os.ReadFile(path); errRead == nil { + if existing, _, errRead := securefile.ReadAuthJSONFile(path); errRead == nil { // Use metadataEqualIgnoringTimestamps to skip writes when only timestamp fields change. // This prevents the token refresh loop caused by timestamp/expired/expires_in changes. if metadataEqualIgnoringTimestamps(existing, raw) { @@ -80,12 +96,8 @@ func (s *FileTokenStore) Save(ctx context.Context, auth *cliproxyauth.Auth) (str } else if errRead != nil && !os.IsNotExist(errRead) { return "", fmt.Errorf("auth filestore: read existing failed: %w", errRead) } - tmp := path + ".tmp" - if errWrite := os.WriteFile(tmp, raw, 0o600); errWrite != nil { - return "", fmt.Errorf("auth filestore: write temp failed: %w", errWrite) - } - if errRename := os.Rename(tmp, path); errRename != nil { - return "", fmt.Errorf("auth filestore: rename failed: %w", errRename) + if errWrite := securefile.WriteAuthJSONFile(path, raw); errWrite != nil { + return "", fmt.Errorf("auth filestore: write failed: %w", errWrite) } default: return "", fmt.Errorf("auth filestore: nothing to persist for %s", auth.ID) @@ -109,6 +121,7 @@ func (s *FileTokenStore) List(ctx context.Context) ([]*cliproxyauth.Auth, error) if dir == "" { return nil, fmt.Errorf("auth filestore: directory not configured") } + loadErrors := make([]securefile.LoadError, 0) entries := make([]*cliproxyauth.Auth, 0) err := filepath.WalkDir(dir, func(path string, d fs.DirEntry, walkErr error) error { if walkErr != nil { @@ -122,6 +135,15 @@ func (s *FileTokenStore) List(ctx context.Context) ([]*cliproxyauth.Auth, error) } auth, err := s.readAuthFile(path, dir) if err != nil { + errorType := "read_failed" + lower := strings.ToLower(err.Error()) + switch { + case strings.Contains(lower, "decrypt") || strings.Contains(lower, "encryption key"): + errorType = "decrypt_failed" + case strings.Contains(lower, "unmarshal") || strings.Contains(lower, "invalid"): + errorType = "invalid_json" + } + loadErrors = append(loadErrors, securefile.LoadError{Path: path, ErrorType: errorType, Message: err.Error()}) return nil } if auth != nil { @@ -132,6 +154,9 @@ func (s *FileTokenStore) List(ctx context.Context) ([]*cliproxyauth.Auth, error) if err != nil { return nil, err } + s.errorsMu.Lock() + s.lastLoadErrors = loadErrors + s.errorsMu.Unlock() return entries, nil } @@ -163,7 +188,7 @@ func (s *FileTokenStore) resolveDeletePath(id string) (string, error) { } func (s *FileTokenStore) readAuthFile(path, baseDir string) (*cliproxyauth.Auth, error) { - data, err := os.ReadFile(path) + data, _, err := securefile.ReadAuthJSONFile(path) if err != nil { return nil, fmt.Errorf("read file: %w", err) } diff --git a/sdk/config/config.go b/sdk/config/config.go index b471e5e0d..8af9610d3 100644 --- a/sdk/config/config.go +++ b/sdk/config/config.go @@ -15,6 +15,7 @@ type Config = internalconfig.Config type StreamingConfig = internalconfig.StreamingConfig type TLSConfig = internalconfig.TLSConfig type RemoteManagement = internalconfig.RemoteManagement +type AuthEncryptionConfig = internalconfig.AuthEncryptionConfig type AmpCode = internalconfig.AmpCode type PayloadConfig = internalconfig.PayloadConfig type PayloadRule = internalconfig.PayloadRule