diff --git a/cmd/server/main.go b/cmd/server/main.go index bbf500e71..8a7cb782d 100644 --- a/cmd/server/main.go +++ b/cmd/server/main.go @@ -404,6 +404,13 @@ func main() { } else { cfg.AuthDir = resolvedAuthDir } + if repoRoot, ok := util.FindGitRepoRoot(cfg.AuthDir); ok { + if useGitStore { + log.Warnf("auth-dir %q is inside a git repository (%q); git-backed token storage is enabled, ensure the repo/remote is private and tokens are not exposed", cfg.AuthDir, repoRoot) + } else { + log.Warnf("auth-dir %q is inside a git repository (%q); do not commit token files, add it to .gitignore or set auth-dir outside the repo", cfg.AuthDir, repoRoot) + } + } managementasset.SetCurrentConfig(cfg) // Create login options to be used in authentication flows. diff --git a/internal/logging/request_logger.go b/internal/logging/request_logger.go index eb31bfa9d..9f89cab7c 100644 --- a/internal/logging/request_logger.go +++ b/internal/logging/request_logger.go @@ -194,7 +194,7 @@ func (l *FileRequestLogger) logRequest(url, method string, requestHeaders map[st content := l.formatLogContent(url, method, requestHeaders, body, apiRequest, apiResponse, decompressedResponse, statusCode, responseHeaders, apiResponseErrors) // Write to file - if err = os.WriteFile(filePath, []byte(content), 0644); err != nil { + if err = os.WriteFile(filePath, []byte(content), 0o600); err != nil { return fmt.Errorf("failed to write log file: %w", err) } @@ -233,7 +233,7 @@ func (l *FileRequestLogger) LogStreamingRequest(url, method string, headers map[ filePath := filepath.Join(l.logsDir, filename) // Create and open file - file, err := os.Create(filePath) + file, err := os.OpenFile(filePath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o600) if err != nil { return nil, fmt.Errorf("failed to create log file: %w", err) } @@ -270,7 +270,7 @@ func (l *FileRequestLogger) generateErrorFilename(url string) string { // - error: An error if directory creation fails, nil otherwise func (l *FileRequestLogger) ensureLogsDir() error { if _, err := os.Stat(l.logsDir); os.IsNotExist(err) { - return os.MkdirAll(l.logsDir, 0755) + return os.MkdirAll(l.logsDir, 0o700) } return nil } @@ -618,7 +618,7 @@ func (l *FileRequestLogger) formatRequestInfo(url, method string, headers map[st content.WriteString("\n") content.WriteString("=== REQUEST BODY ===\n") - content.Write(body) + content.Write(util.MaskSensitiveJSON(body)) content.WriteString("\n\n") return content.String() diff --git a/internal/securefile/atomic.go b/internal/securefile/atomic.go new file mode 100644 index 000000000..20697d35c --- /dev/null +++ b/internal/securefile/atomic.go @@ -0,0 +1,94 @@ +package securefile + +import ( + "fmt" + "os" + "path/filepath" + "time" +) + +// EnsurePrivateDir creates dirPath (and parents) with 0700 permissions. +func EnsurePrivateDir(dirPath string) error { + if dirPath == "" { + return fmt.Errorf("securefile: dir path is empty") + } + if err := os.MkdirAll(dirPath, 0o700); err != nil { + return err + } + // Best-effort permission hardening. Ignore errors (e.g., non-POSIX FS). + _ = os.Chmod(dirPath, 0o700) + return nil +} + +// AtomicWriteFile writes data to path using a temp file + rename, and attempts to fsync. +// mode controls the final file permissions. +func AtomicWriteFile(path string, data []byte, mode os.FileMode) error { + if path == "" { + return fmt.Errorf("securefile: path is empty") + } + dir := filepath.Dir(path) + if err := EnsurePrivateDir(dir); err != nil { + return err + } + + tmp, err := os.CreateTemp(dir, ".tmp.*") + if err != nil { + return err + } + tmpName := tmp.Name() + defer func() { + _ = tmp.Close() + _ = os.Remove(tmpName) + }() + + if mode == 0 { + mode = 0o600 + } + if err := tmp.Chmod(mode); err != nil { + // Best-effort: ignore chmod failure on some filesystems. + } + + if _, err := tmp.Write(data); err != nil { + return err + } + if err := tmp.Sync(); err != nil { + return err + } + if err := tmp.Close(); err != nil { + return err + } + + if err := os.Rename(tmpName, path); err != nil { + return err + } + + // Best-effort: ensure final mode. + _ = os.Chmod(path, mode) + return nil +} + +// ReadFileRawLocked reads the file at path while holding an advisory lock on path+".lock". +func ReadFileRawLocked(path string) ([]byte, error) { + lockPath := path + ".lock" + var out []byte + err := WithLock(lockPath, 10*time.Second, func() error { + data, err := os.ReadFile(path) + if err != nil { + return err + } + out = data + return nil + }) + if err != nil { + return nil, err + } + return out, nil +} + +// WriteFileRawLocked writes data to path using an advisory lock on path+".lock" and atomic replace. +func WriteFileRawLocked(path string, data []byte, mode os.FileMode) error { + lockPath := path + ".lock" + return WithLock(lockPath, 10*time.Second, func() error { + return AtomicWriteFile(path, data, mode) + }) +} diff --git a/internal/securefile/authjson.go b/internal/securefile/authjson.go new file mode 100644 index 000000000..fc7a12971 --- /dev/null +++ b/internal/securefile/authjson.go @@ -0,0 +1,201 @@ +package securefile + +import ( + "crypto/aes" + "crypto/cipher" + "crypto/rand" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "fmt" + "os" + "path/filepath" + "strings" + "time" +) + +const ( + authEnvelopeVersion = 1 + authEnvelopeAlgAES = "aes-256-gcm" +) + +type authEnvelope struct { + V int `json:"v"` + Alg string `json:"alg"` + Nonce string `json:"nonce"` + Ct string `json:"ct"` +} + +// deriveKey returns a 32-byte key derived from the provided secret. If the secret is base64 +// for exactly 32 bytes, it is used directly; otherwise SHA-256(secret) is used. +func deriveKey(secret string) ([]byte, error) { + secret = strings.TrimSpace(secret) + if secret == "" { + return nil, fmt.Errorf("securefile: encryption secret is empty") + } + if decoded, err := base64.StdEncoding.DecodeString(secret); err == nil && len(decoded) == 32 { + return decoded, nil + } + sum := sha256.Sum256([]byte(secret)) + return sum[:], nil +} + +func encryptBytes(plaintext []byte, secret string) ([]byte, error) { + key, err := deriveKey(secret) + if err != nil { + return nil, err + } + block, err := aes.NewCipher(key) + if err != nil { + return nil, err + } + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + nonce := make([]byte, gcm.NonceSize()) + if _, err := rand.Read(nonce); err != nil { + return nil, err + } + ct := gcm.Seal(nil, nonce, plaintext, nil) + env := authEnvelope{ + V: authEnvelopeVersion, + Alg: authEnvelopeAlgAES, + Nonce: base64.StdEncoding.EncodeToString(nonce), + Ct: base64.StdEncoding.EncodeToString(ct), + } + return json.Marshal(env) +} + +func decryptBytes(envelopeBytes []byte, secret string) ([]byte, error) { + var env authEnvelope + if err := json.Unmarshal(envelopeBytes, &env); err != nil { + return nil, fmt.Errorf("securefile: invalid encrypted envelope: %w", err) + } + if env.V != authEnvelopeVersion || strings.TrimSpace(env.Alg) != authEnvelopeAlgAES { + return nil, fmt.Errorf("securefile: unsupported envelope (v=%d alg=%s)", env.V, env.Alg) + } + key, err := deriveKey(secret) + if err != nil { + return nil, err + } + block, err := aes.NewCipher(key) + if err != nil { + return nil, err + } + gcm, err := cipher.NewGCM(block) + if err != nil { + return nil, err + } + nonce, err := base64.StdEncoding.DecodeString(env.Nonce) + if err != nil { + return nil, fmt.Errorf("securefile: invalid nonce encoding: %w", err) + } + ct, err := base64.StdEncoding.DecodeString(env.Ct) + if err != nil { + return nil, fmt.Errorf("securefile: invalid ciphertext encoding: %w", err) + } + plaintext, err := gcm.Open(nil, nonce, ct, nil) + if err != nil { + return nil, fmt.Errorf("securefile: decrypt failed: %w", err) + } + return plaintext, nil +} + +func looksEncryptedEnvelope(raw []byte) bool { + var probe map[string]any + if err := json.Unmarshal(raw, &probe); err != nil { + return false + } + _, hasV := probe["v"] + _, hasAlg := probe["alg"] + _, hasCt := probe["ct"] + return hasV && hasAlg && hasCt +} + +// DecodeAuthJSON returns decrypted JSON bytes if raw is an encrypted envelope; otherwise returns raw. +// It returns (plaintext, wasEncrypted, error). +func DecodeAuthJSON(raw []byte, settings AuthEncryptionSettings) ([]byte, bool, error) { + trimmed := strings.TrimSpace(string(raw)) + if trimmed == "" { + return raw, false, nil + } + if !looksEncryptedEnvelope(raw) { + return raw, false, nil + } + secret := ResolveAuthEncryptionSecret(settings.Secret) + if strings.TrimSpace(secret) == "" { + return nil, true, fmt.Errorf("securefile: auth file is encrypted but no encryption key is configured") + } + plaintext, err := decryptBytes(raw, secret) + if err != nil { + return nil, true, err + } + return plaintext, true, nil +} + +func writeAuthJSONFileUnlocked(path string, jsonBytes []byte, settings AuthEncryptionSettings) error { + payload := jsonBytes + if settings.Enabled { + secret := ResolveAuthEncryptionSecret(settings.Secret) + if strings.TrimSpace(secret) == "" { + return fmt.Errorf("securefile: auth encryption enabled but no encryption key configured") + } + enc, err := encryptBytes(jsonBytes, secret) + if err != nil { + return err + } + payload = enc + } + if err := EnsurePrivateDir(filepath.Dir(path)); err != nil { + return err + } + return AtomicWriteFile(path, payload, 0o600) +} + +// ReadAuthJSONFile reads path, locking path+".lock", and returns decrypted JSON when needed. +func ReadAuthJSONFile(path string) ([]byte, bool, error) { + settings := CurrentAuthEncryption() + lockPath := path + ".lock" + var ( + out []byte + encrypted bool + readErr error + ) + err := WithLock(lockPath, 10*time.Second, func() error { + raw, err := os.ReadFile(path) + if err != nil { + return err + } + plaintext, wasEncrypted, err := DecodeAuthJSON(raw, settings) + if err != nil { + return err + } + out = plaintext + encrypted = wasEncrypted + // Best-effort migration: if encryption is enabled and we read plaintext, re-save encrypted. + if settings.Enabled && !wasEncrypted && settings.AllowPlaintextFallback { + if err := writeAuthJSONFileUnlocked(path, plaintext, settings); err != nil { + // ignore; caller still gets plaintext content + } + } + return nil + }) + if err != nil { + readErr = err + } + return out, encrypted, readErr +} + +// WriteAuthJSONFile writes jsonBytes to path with 0600 perms, using lock + atomic write. +// If auth encryption is enabled, it stores an encrypted envelope. +func WriteAuthJSONFile(path string, jsonBytes []byte) error { + if path == "" { + return fmt.Errorf("securefile: path is empty") + } + settings := CurrentAuthEncryption() + lockPath := path + ".lock" + return WithLock(lockPath, 10*time.Second, func() error { + return writeAuthJSONFileUnlocked(path, jsonBytes, settings) + }) +} diff --git a/internal/securefile/errors.go b/internal/securefile/errors.go new file mode 100644 index 000000000..796235ba5 --- /dev/null +++ b/internal/securefile/errors.go @@ -0,0 +1,8 @@ +package securefile + +// LoadError captures best-effort load failures (parse/decrypt/read) when scanning auth stores. +type LoadError struct { + Path string `json:"path"` + ErrorType string `json:"error_type"` + Message string `json:"message"` +} diff --git a/internal/securefile/lock_unix.go b/internal/securefile/lock_unix.go new file mode 100644 index 000000000..d261cd68e --- /dev/null +++ b/internal/securefile/lock_unix.go @@ -0,0 +1,54 @@ +//go:build !windows + +package securefile + +import ( + "fmt" + "os" + "path/filepath" + "syscall" + "time" +) + +// WithLock obtains an advisory exclusive lock on lockPath (creating it if needed), +// runs fn, and then releases the lock. It retries until timeout. +func WithLock(lockPath string, timeout time.Duration, fn func() error) error { + if lockPath == "" { + return fmt.Errorf("securefile: lock path is empty") + } + if fn == nil { + return fmt.Errorf("securefile: lock fn is nil") + } + if timeout <= 0 { + timeout = 10 * time.Second + } + if err := EnsurePrivateDir(filepath.Dir(lockPath)); err != nil { + return err + } + f, err := os.OpenFile(lockPath, os.O_CREATE|os.O_RDWR, 0o600) + if err != nil { + return err + } + defer func() { _ = f.Close() }() + _ = f.Chmod(0o600) + + deadline := time.Now().Add(timeout) + for { + err := syscall.Flock(int(f.Fd()), syscall.LOCK_EX|syscall.LOCK_NB) + if err == nil { + break + } + // EWOULDBLOCK/EAGAIN indicates the lock is held by another process. + if err != syscall.EWOULDBLOCK && err != syscall.EAGAIN { + return fmt.Errorf("securefile: unexpected error acquiring lock on %s: %w", lockPath, err) + } + if time.Now().After(deadline) { + return fmt.Errorf("securefile: timed out acquiring lock: %s", lockPath) + } + time.Sleep(50 * time.Millisecond) + } + defer func() { + _ = syscall.Flock(int(f.Fd()), syscall.LOCK_UN) + }() + return fn() +} diff --git a/internal/securefile/lock_windows.go b/internal/securefile/lock_windows.go new file mode 100644 index 000000000..65c18a63f --- /dev/null +++ b/internal/securefile/lock_windows.go @@ -0,0 +1,58 @@ +//go:build windows + +package securefile + +import ( + "fmt" + "os" + "path/filepath" + "time" + + "golang.org/x/sys/windows" +) + +// WithLock obtains an exclusive file lock on lockPath (creating it if needed), +// runs fn, and then releases the lock. It retries until timeout. +func WithLock(lockPath string, timeout time.Duration, fn func() error) error { + if lockPath == "" { + return fmt.Errorf("securefile: lock path is empty") + } + if fn == nil { + return fmt.Errorf("securefile: lock fn is nil") + } + if timeout <= 0 { + timeout = 10 * time.Second + } + if err := EnsurePrivateDir(filepath.Dir(lockPath)); err != nil { + return err + } + + f, err := os.OpenFile(lockPath, os.O_CREATE|os.O_RDWR, 0o600) + if err != nil { + return err + } + defer func() { _ = f.Close() }() + _ = f.Chmod(0o600) + + handle := windows.Handle(f.Fd()) + var overlapped windows.Overlapped + + deadline := time.Now().Add(timeout) + for { + errLock := windows.LockFileEx(handle, windows.LOCKFILE_EXCLUSIVE_LOCK|windows.LOCKFILE_FAIL_IMMEDIATELY, 0, 1, 0, &overlapped) + if errLock == nil { + break + } + if errLock != windows.ERROR_LOCK_VIOLATION && errLock != windows.ERROR_SHARING_VIOLATION { + return errLock + } + if time.Now().After(deadline) { + return fmt.Errorf("securefile: timed out acquiring lock: %s", lockPath) + } + time.Sleep(50 * time.Millisecond) + } + defer func() { + _ = windows.UnlockFileEx(handle, 0, 1, 0, &overlapped) + }() + return fn() +} diff --git a/internal/securefile/settings.go b/internal/securefile/settings.go new file mode 100644 index 000000000..bdd7bfbaa --- /dev/null +++ b/internal/securefile/settings.go @@ -0,0 +1,52 @@ +package securefile + +import ( + "os" + "strings" + "sync/atomic" +) + +// AuthEncryptionSettings controls encryption-at-rest for auth JSON files. +type AuthEncryptionSettings struct { + Enabled bool + Secret string + AllowPlaintextFallback bool +} + +var authEncryptionSettings atomic.Value // stores AuthEncryptionSettings + +func init() { + authEncryptionSettings.Store(AuthEncryptionSettings{}) +} + +// ConfigureAuthEncryption updates global auth encryption behavior. +func ConfigureAuthEncryption(settings AuthEncryptionSettings) { + settings.Secret = strings.TrimSpace(settings.Secret) + authEncryptionSettings.Store(settings) +} + +// CurrentAuthEncryption returns the active auth encryption settings. +func CurrentAuthEncryption() AuthEncryptionSettings { + if v := authEncryptionSettings.Load(); v != nil { + if s, ok := v.(AuthEncryptionSettings); ok { + return s + } + } + return AuthEncryptionSettings{} +} + +// ResolveAuthEncryptionSecret resolves a secret from config/env. +// Explicit secret wins; otherwise checks env CLIPROXY_AUTH_ENCRYPTION_KEY then CLI_PROXY_API_AUTH_ENCRYPTION_KEY. +func ResolveAuthEncryptionSecret(explicit string) string { + if trimmed := strings.TrimSpace(explicit); trimmed != "" { + return trimmed + } + for _, key := range []string{"CLIPROXY_AUTH_ENCRYPTION_KEY", "CLI_PROXY_API_AUTH_ENCRYPTION_KEY"} { + if v, ok := os.LookupEnv(key); ok { + if trimmed := strings.TrimSpace(v); trimmed != "" { + return trimmed + } + } + } + return "" +} diff --git a/internal/store/gitstore.go b/internal/store/gitstore.go index 3b68e4b0a..ca9953827 100644 --- a/internal/store/gitstore.go +++ b/internal/store/gitstore.go @@ -18,6 +18,7 @@ import ( "github.com/go-git/go-git/v6/plumbing/object" "github.com/go-git/go-git/v6/plumbing/transport" "github.com/go-git/go-git/v6/plumbing/transport/http" + "github.com/router-for-me/CLIProxyAPI/v6/internal/securefile" cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth" ) @@ -249,19 +250,15 @@ func (s *GitTokenStore) Save(_ context.Context, auth *cliproxyauth.Auth) (string if errMarshal != nil { return "", fmt.Errorf("auth filestore: marshal metadata failed: %w", errMarshal) } - if existing, errRead := os.ReadFile(path); errRead == nil { + if existing, _, errRead := securefile.ReadAuthJSONFile(path); errRead == nil { if jsonEqual(existing, raw) { return path, nil } } else if !os.IsNotExist(errRead) { return "", fmt.Errorf("auth filestore: read existing failed: %w", errRead) } - tmp := path + ".tmp" - if errWrite := os.WriteFile(tmp, raw, 0o600); errWrite != nil { - return "", fmt.Errorf("auth filestore: write temp failed: %w", errWrite) - } - if errRename := os.Rename(tmp, path); errRename != nil { - return "", fmt.Errorf("auth filestore: rename failed: %w", errRename) + if errWrite := securefile.WriteAuthJSONFile(path, raw); errWrite != nil { + return "", fmt.Errorf("auth filestore: write failed: %w", errWrite) } default: return "", fmt.Errorf("auth filestore: nothing to persist for %s", auth.ID) @@ -406,7 +403,7 @@ func (s *GitTokenStore) resolveDeletePath(id string) (string, error) { } func (s *GitTokenStore) readAuthFile(path, baseDir string) (*cliproxyauth.Auth, error) { - data, err := os.ReadFile(path) + data, _, err := securefile.ReadAuthJSONFile(path) if err != nil { return nil, fmt.Errorf("read file: %w", err) } @@ -679,7 +676,7 @@ func (s *GitTokenStore) PersistConfig(_ context.Context) error { func ensureEmptyFile(path string) error { if _, err := os.Stat(path); err != nil { if errors.Is(err, fs.ErrNotExist) { - return os.WriteFile(path, []byte{}, 0o600) + return securefile.WriteFileRawLocked(path, []byte{}, 0o600) } return err } diff --git a/internal/store/objectstore.go b/internal/store/objectstore.go index 726ebc9fa..e5dc505d7 100644 --- a/internal/store/objectstore.go +++ b/internal/store/objectstore.go @@ -18,6 +18,7 @@ import ( "github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7/pkg/credentials" "github.com/router-for-me/CLIProxyAPI/v6/internal/misc" + "github.com/router-for-me/CLIProxyAPI/v6/internal/securefile" cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth" log "github.com/sirupsen/logrus" ) @@ -192,19 +193,15 @@ func (s *ObjectTokenStore) Save(ctx context.Context, auth *cliproxyauth.Auth) (s if errMarshal != nil { return "", fmt.Errorf("object store: marshal metadata: %w", errMarshal) } - if existing, errRead := os.ReadFile(path); errRead == nil { + if existing, _, errRead := securefile.ReadAuthJSONFile(path); errRead == nil { if jsonEqual(existing, raw) { return path, nil } } else if errRead != nil && !errors.Is(errRead, fs.ErrNotExist) { return "", fmt.Errorf("object store: read existing metadata: %w", errRead) } - tmp := path + ".tmp" - if errWrite := os.WriteFile(tmp, raw, 0o600); errWrite != nil { - return "", fmt.Errorf("object store: write temp auth file: %w", errWrite) - } - if errRename := os.Rename(tmp, path); errRename != nil { - return "", fmt.Errorf("object store: rename auth file: %w", errRename) + if errWrite := securefile.WriteAuthJSONFile(path, raw); errWrite != nil { + return "", fmt.Errorf("object store: write auth file: %w", errWrite) } default: return "", fmt.Errorf("object store: nothing to persist for %s", auth.ID) @@ -311,7 +308,7 @@ func (s *ObjectTokenStore) PersistConfig(ctx context.Context) error { s.mu.Lock() defer s.mu.Unlock() - data, err := os.ReadFile(s.configPath) + data, err := securefile.ReadFileRawLocked(s.configPath) if err != nil { if errors.Is(err, fs.ErrNotExist) { return s.deleteObject(ctx, objectStoreConfigKey) @@ -352,7 +349,7 @@ func (s *ObjectTokenStore) syncConfigFromBucket(ctx context.Context, example str if errRead != nil { return fmt.Errorf("object store: read config: %w", errRead) } - if errWrite := os.WriteFile(s.configPath, normalizeLineEndingsBytes(data), 0o600); errWrite != nil { + if errWrite := securefile.WriteFileRawLocked(s.configPath, normalizeLineEndingsBytes(data), 0o600); errWrite != nil { return fmt.Errorf("object store: write config: %w", errWrite) } case isObjectNotFound(err): @@ -365,12 +362,12 @@ func (s *ObjectTokenStore) syncConfigFromBucket(ctx context.Context, example str if errCreate := os.MkdirAll(filepath.Dir(s.configPath), 0o700); errCreate != nil { return fmt.Errorf("object store: prepare config directory: %w", errCreate) } - if errWrite := os.WriteFile(s.configPath, []byte{}, 0o600); errWrite != nil { + if errWrite := securefile.WriteFileRawLocked(s.configPath, []byte{}, 0o600); errWrite != nil { return fmt.Errorf("object store: create empty config: %w", errWrite) } } } - data, errRead := os.ReadFile(s.configPath) + data, errRead := securefile.ReadFileRawLocked(s.configPath) if errRead != nil { return fmt.Errorf("object store: read local config: %w", errRead) } @@ -429,7 +426,7 @@ func (s *ObjectTokenStore) syncAuthFromBucket(ctx context.Context) error { if errRead != nil { return fmt.Errorf("object store: read auth %s: %w", object.Key, errRead) } - if errWrite := os.WriteFile(local, data, 0o600); errWrite != nil { + if errWrite := securefile.WriteFileRawLocked(local, data, 0o600); errWrite != nil { return fmt.Errorf("object store: write auth %s: %w", local, errWrite) } } @@ -444,7 +441,7 @@ func (s *ObjectTokenStore) uploadAuth(ctx context.Context, path string) error { if err != nil { return fmt.Errorf("object store: resolve auth relative path: %w", err) } - data, err := os.ReadFile(path) + data, err := securefile.ReadFileRawLocked(path) if err != nil { if errors.Is(err, fs.ErrNotExist) { return s.deleteAuthObject(ctx, path) @@ -553,7 +550,7 @@ func (s *ObjectTokenStore) resolveDeletePath(id string) (string, error) { } func (s *ObjectTokenStore) readAuthFile(path, baseDir string) (*cliproxyauth.Auth, error) { - data, err := os.ReadFile(path) + data, _, err := securefile.ReadAuthJSONFile(path) if err != nil { return nil, fmt.Errorf("read file: %w", err) } diff --git a/internal/store/postgresstore.go b/internal/store/postgresstore.go index a18f45f8b..d6af5ae7b 100644 --- a/internal/store/postgresstore.go +++ b/internal/store/postgresstore.go @@ -15,6 +15,7 @@ import ( _ "github.com/jackc/pgx/v5/stdlib" "github.com/router-for-me/CLIProxyAPI/v6/internal/misc" + "github.com/router-for-me/CLIProxyAPI/v6/internal/securefile" cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth" log "github.com/sirupsen/logrus" ) @@ -222,19 +223,15 @@ func (s *PostgresStore) Save(ctx context.Context, auth *cliproxyauth.Auth) (stri if errMarshal != nil { return "", fmt.Errorf("postgres store: marshal metadata: %w", errMarshal) } - if existing, errRead := os.ReadFile(path); errRead == nil { + if existing, _, errRead := securefile.ReadAuthJSONFile(path); errRead == nil { if jsonEqual(existing, raw) { return path, nil } } else if errRead != nil && !errors.Is(errRead, fs.ErrNotExist) { return "", fmt.Errorf("postgres store: read existing metadata: %w", errRead) } - tmp := path + ".tmp" - if errWrite := os.WriteFile(tmp, raw, 0o600); errWrite != nil { - return "", fmt.Errorf("postgres store: write temp auth file: %w", errWrite) - } - if errRename := os.Rename(tmp, path); errRename != nil { - return "", fmt.Errorf("postgres store: rename auth file: %w", errRename) + if errWrite := securefile.WriteAuthJSONFile(path, raw); errWrite != nil { + return "", fmt.Errorf("postgres store: write auth file: %w", errWrite) } default: return "", fmt.Errorf("postgres store: nothing to persist for %s", auth.ID) @@ -285,7 +282,13 @@ func (s *PostgresStore) List(ctx context.Context) ([]*cliproxyauth.Auth, error) continue } metadata := make(map[string]any) - if err = json.Unmarshal([]byte(payload), &metadata); err != nil { + raw := []byte(payload) + decoded, _, errDecode := securefile.DecodeAuthJSON(raw, securefile.CurrentAuthEncryption()) + if errDecode != nil { + log.WithError(errDecode).Warnf("postgres store: auth %s decode failed, skipping", id) + continue + } + if err = json.Unmarshal(decoded, &metadata); err != nil { log.WithError(err).Warnf("postgres store: skipping auth %s with invalid json", id) continue } @@ -381,7 +384,7 @@ func (s *PostgresStore) PersistConfig(ctx context.Context) error { s.mu.Lock() defer s.mu.Unlock() - data, err := os.ReadFile(s.configPath) + data, err := securefile.ReadFileRawLocked(s.configPath) if err != nil { if errors.Is(err, fs.ErrNotExist) { return s.deleteConfigRecord(ctx) @@ -407,12 +410,12 @@ func (s *PostgresStore) syncConfigFromDatabase(ctx context.Context, exampleConfi if errCreate := os.MkdirAll(filepath.Dir(s.configPath), 0o700); errCreate != nil { return fmt.Errorf("postgres store: prepare config directory: %w", errCreate) } - if errWrite := os.WriteFile(s.configPath, []byte{}, 0o600); errWrite != nil { + if errWrite := securefile.WriteFileRawLocked(s.configPath, []byte{}, 0o600); errWrite != nil { return fmt.Errorf("postgres store: create empty config: %w", errWrite) } } } - data, errRead := os.ReadFile(s.configPath) + data, errRead := securefile.ReadFileRawLocked(s.configPath) if errRead != nil { return fmt.Errorf("postgres store: read local config: %w", errRead) } @@ -426,7 +429,7 @@ func (s *PostgresStore) syncConfigFromDatabase(ctx context.Context, exampleConfi return fmt.Errorf("postgres store: prepare config directory: %w", err) } normalized := normalizeLineEndings(content) - if err = os.WriteFile(s.configPath, []byte(normalized), 0o600); err != nil { + if err = securefile.WriteFileRawLocked(s.configPath, []byte(normalized), 0o600); err != nil { return fmt.Errorf("postgres store: write config to spool: %w", err) } } @@ -465,7 +468,7 @@ func (s *PostgresStore) syncAuthFromDatabase(ctx context.Context) error { if err = os.MkdirAll(filepath.Dir(path), 0o700); err != nil { return fmt.Errorf("postgres store: create auth subdir: %w", err) } - if err = os.WriteFile(path, []byte(payload), 0o600); err != nil { + if err = securefile.WriteFileRawLocked(path, []byte(payload), 0o600); err != nil { return fmt.Errorf("postgres store: write auth file: %w", err) } } @@ -476,7 +479,7 @@ func (s *PostgresStore) syncAuthFromDatabase(ctx context.Context) error { } func (s *PostgresStore) syncAuthFile(ctx context.Context, relID, path string) error { - data, err := os.ReadFile(path) + data, err := securefile.ReadFileRawLocked(path) if err != nil { if errors.Is(err, fs.ErrNotExist) { return s.deleteAuthRecord(ctx, relID) @@ -490,7 +493,7 @@ func (s *PostgresStore) syncAuthFile(ctx context.Context, relID, path string) er } func (s *PostgresStore) upsertAuthRecord(ctx context.Context, relID, path string) error { - data, err := os.ReadFile(path) + data, err := securefile.ReadFileRawLocked(path) if err != nil { return fmt.Errorf("postgres store: read auth file: %w", err) } diff --git a/internal/util/gitrepo.go b/internal/util/gitrepo.go new file mode 100644 index 000000000..ae189df21 --- /dev/null +++ b/internal/util/gitrepo.go @@ -0,0 +1,36 @@ +package util + +import ( + "os" + "path/filepath" + "strings" +) + +// FindGitRepoRoot walks upward from the provided path looking for a ".git" +// directory or file and returns the matching repository root. +func FindGitRepoRoot(path string) (string, bool) { + start := strings.TrimSpace(path) + if start == "" { + return "", false + } + start = filepath.Clean(start) + if info, err := os.Stat(start); err == nil && info != nil && !info.IsDir() { + start = filepath.Dir(start) + } + + dir := start + for { + if dir == "" || dir == "." { + return "", false + } + gitPath := filepath.Join(dir, ".git") + if _, err := os.Stat(gitPath); err == nil { + return dir, true + } + parent := filepath.Dir(dir) + if parent == dir { + return "", false + } + dir = parent + } +} diff --git a/internal/util/sensitive_json.go b/internal/util/sensitive_json.go new file mode 100644 index 000000000..d22509dc9 --- /dev/null +++ b/internal/util/sensitive_json.go @@ -0,0 +1,87 @@ +package util + +import ( + "bytes" + "encoding/json" + "strings" +) + +var sensitiveJSONKeys = map[string]struct{}{ + "access_token": {}, + "api_key": {}, + "api-key": {}, + "apikey": {}, + "authorization": {}, + "client_secret": {}, + "clientsecret": {}, + "id_token": {}, + "password": {}, + "refresh_token": {}, + "secret": {}, + "secret_access_key": {}, + "session_token": {}, + "token": {}, + "x-api-key": {}, + "x-goog-api-key": {}, + "x-goog-vertex-token": {}, +} + +// MaskSensitiveJSON redacts common credential fields in a JSON request/response body. +// If the payload is not valid JSON, the original bytes are returned unchanged. +func MaskSensitiveJSON(body []byte) []byte { + trimmed := bytes.TrimSpace(body) + if len(trimmed) == 0 { + return body + } + + var value any + decoder := json.NewDecoder(bytes.NewReader(trimmed)) + decoder.UseNumber() + if err := decoder.Decode(&value); err != nil { + return body + } + + masked := maskSensitiveJSONValue(value) + out, err := json.MarshalIndent(masked, "", " ") + if err != nil { + return body + } + return out +} + +func maskSensitiveJSONValue(value any) any { + switch typed := value.(type) { + case map[string]any: + out := make(map[string]any, len(typed)) + for key, v := range typed { + if isSensitiveJSONKey(key) { + out[key] = "***" + continue + } + out[key] = maskSensitiveJSONValue(v) + } + return out + case []any: + out := make([]any, len(typed)) + for i := range typed { + out[i] = maskSensitiveJSONValue(typed[i]) + } + return out + default: + return value + } +} + +func isSensitiveJSONKey(key string) bool { + normalized := strings.ToLower(strings.TrimSpace(key)) + if normalized == "" { + return false + } + if _, ok := sensitiveJSONKeys[normalized]; ok { + return true + } + if strings.HasSuffix(normalized, "_secret") { + return true + } + return false +} diff --git a/sdk/auth/filestore.go b/sdk/auth/filestore.go index 3c2d60c4a..7a3dc5a81 100644 --- a/sdk/auth/filestore.go +++ b/sdk/auth/filestore.go @@ -11,6 +11,7 @@ import ( "sync" "time" + "github.com/router-for-me/CLIProxyAPI/v6/internal/securefile" cliproxyauth "github.com/router-for-me/CLIProxyAPI/v6/sdk/cliproxy/auth" ) @@ -19,6 +20,9 @@ type FileTokenStore struct { mu sync.Mutex dirLock sync.RWMutex baseDir string + + errorsMu sync.RWMutex + lastLoadErrors []securefile.LoadError } // NewFileTokenStore creates a token store that saves credentials to disk through the @@ -27,6 +31,18 @@ func NewFileTokenStore() *FileTokenStore { return &FileTokenStore{} } +// LastLoadErrors returns the most recent best-effort load errors captured during List(). +func (s *FileTokenStore) LastLoadErrors() []securefile.LoadError { + if s == nil { + return nil + } + s.errorsMu.RLock() + defer s.errorsMu.RUnlock() + out := make([]securefile.LoadError, len(s.lastLoadErrors)) + copy(out, s.lastLoadErrors) + return out +} + // SetBaseDir updates the default directory used for auth JSON persistence when no explicit path is provided. func (s *FileTokenStore) SetBaseDir(dir string) { s.dirLock.Lock() @@ -71,19 +87,15 @@ func (s *FileTokenStore) Save(ctx context.Context, auth *cliproxyauth.Auth) (str if errMarshal != nil { return "", fmt.Errorf("auth filestore: marshal metadata failed: %w", errMarshal) } - if existing, errRead := os.ReadFile(path); errRead == nil { + if existing, _, errRead := securefile.ReadAuthJSONFile(path); errRead == nil { if jsonEqual(existing, raw) { return path, nil } } else if errRead != nil && !os.IsNotExist(errRead) { return "", fmt.Errorf("auth filestore: read existing failed: %w", errRead) } - tmp := path + ".tmp" - if errWrite := os.WriteFile(tmp, raw, 0o600); errWrite != nil { - return "", fmt.Errorf("auth filestore: write temp failed: %w", errWrite) - } - if errRename := os.Rename(tmp, path); errRename != nil { - return "", fmt.Errorf("auth filestore: rename failed: %w", errRename) + if errWrite := securefile.WriteAuthJSONFile(path, raw); errWrite != nil { + return "", fmt.Errorf("auth filestore: write failed: %w", errWrite) } default: return "", fmt.Errorf("auth filestore: nothing to persist for %s", auth.ID) @@ -107,6 +119,7 @@ func (s *FileTokenStore) List(ctx context.Context) ([]*cliproxyauth.Auth, error) if dir == "" { return nil, fmt.Errorf("auth filestore: directory not configured") } + loadErrors := make([]securefile.LoadError, 0) entries := make([]*cliproxyauth.Auth, 0) err := filepath.WalkDir(dir, func(path string, d fs.DirEntry, walkErr error) error { if walkErr != nil { @@ -120,6 +133,15 @@ func (s *FileTokenStore) List(ctx context.Context) ([]*cliproxyauth.Auth, error) } auth, err := s.readAuthFile(path, dir) if err != nil { + errorType := "read_failed" + lower := strings.ToLower(err.Error()) + switch { + case strings.Contains(lower, "decrypt") || strings.Contains(lower, "encryption key"): + errorType = "decrypt_failed" + case strings.Contains(lower, "unmarshal") || strings.Contains(lower, "invalid"): + errorType = "invalid_json" + } + loadErrors = append(loadErrors, securefile.LoadError{Path: path, ErrorType: errorType, Message: err.Error()}) return nil } if auth != nil { @@ -130,6 +152,9 @@ func (s *FileTokenStore) List(ctx context.Context) ([]*cliproxyauth.Auth, error) if err != nil { return nil, err } + s.errorsMu.Lock() + s.lastLoadErrors = loadErrors + s.errorsMu.Unlock() return entries, nil } @@ -161,7 +186,7 @@ func (s *FileTokenStore) resolveDeletePath(id string) (string, error) { } func (s *FileTokenStore) readAuthFile(path, baseDir string) (*cliproxyauth.Auth, error) { - data, err := os.ReadFile(path) + data, _, err := securefile.ReadAuthJSONFile(path) if err != nil { return nil, fmt.Errorf("read file: %w", err) }