|
| 1 | +import { app } from 'electron' |
| 2 | +import log from 'electron-log' |
| 3 | +import fs from 'fs' |
| 4 | +import path from 'path' |
| 5 | +import { |
| 6 | + LOG_DATA_MAX_LENGTH, |
| 7 | + LOG_FILE_MAX_SIZE_BYTES, |
| 8 | + LOG_MESSAGE_MAX_LENGTH, |
| 9 | + LOG_RETENTION_DAYS, |
| 10 | + LOG_STACK_HEAD_LINES, |
| 11 | + LOG_STACK_TAIL_LINES, |
| 12 | +} from '../shared/constants' |
| 13 | +import type { LogEntryPayload } from '../shared/types' |
| 14 | + |
| 15 | +const LOG_FILE_NAME = 'voice-key.log' |
| 16 | +const LOG_FILE_PREFIX = 'voice-key' |
| 17 | +const MAX_DATA_LENGTH = LOG_DATA_MAX_LENGTH |
| 18 | +const MAX_MESSAGE_LENGTH = LOG_MESSAGE_MAX_LENGTH |
| 19 | + |
| 20 | +let initialized = false |
| 21 | + |
| 22 | +const getLogDir = () => app.getPath('logs') |
| 23 | +const getLogFilePath = () => path.join(getLogDir(), LOG_FILE_NAME) |
| 24 | + |
| 25 | +const clampText = (value: string, maxLength: number) => { |
| 26 | + if (value.length <= maxLength) return value |
| 27 | + const lines = value.split('\n') |
| 28 | + if (lines.length > LOG_STACK_HEAD_LINES + LOG_STACK_TAIL_LINES) { |
| 29 | + const head = lines.slice(0, LOG_STACK_HEAD_LINES).join('\n') |
| 30 | + const tail = lines.slice(-LOG_STACK_TAIL_LINES).join('\n') |
| 31 | + const omitted = lines.length - (LOG_STACK_HEAD_LINES + LOG_STACK_TAIL_LINES) |
| 32 | + return `${head}\n... (${omitted} lines omitted) ...\n${tail}` |
| 33 | + } |
| 34 | + return `${value.slice(0, maxLength)}...` |
| 35 | +} |
| 36 | + |
| 37 | +const sanitize = (value: string) => value.replace(/\r/g, '\\r').replace(/\n/g, '\\n') |
| 38 | + |
| 39 | +const safeStringify = (data: unknown): string => { |
| 40 | + if (data === undefined) return '' |
| 41 | + if (typeof data === 'string') return sanitize(data) |
| 42 | + if (data instanceof Error) { |
| 43 | + const stack = data.stack ? `\n${data.stack}` : '' |
| 44 | + return sanitize(`${data.name}: ${data.message}${stack}`) |
| 45 | + } |
| 46 | + try { |
| 47 | + return sanitize(JSON.stringify(data)) |
| 48 | + } catch { |
| 49 | + return '[unserializable]' |
| 50 | + } |
| 51 | +} |
| 52 | + |
| 53 | +const formatArgs = (args: unknown[]) => { |
| 54 | + const text = args.map((arg) => safeStringify(arg)).join(' ') |
| 55 | + return clampText(text, MAX_MESSAGE_LENGTH) |
| 56 | +} |
| 57 | + |
| 58 | +const ensureLogDir = () => { |
| 59 | + const dir = getLogDir() |
| 60 | + if (!fs.existsSync(dir)) { |
| 61 | + fs.mkdirSync(dir, { recursive: true }) |
| 62 | + } |
| 63 | +} |
| 64 | + |
| 65 | +const cleanupOldLogs = async () => { |
| 66 | + const dir = getLogDir() |
| 67 | + if (!fs.existsSync(dir)) return |
| 68 | + const cutoff = Date.now() - LOG_RETENTION_DAYS * 24 * 60 * 60 * 1000 |
| 69 | + const currentLogFile = getLogFilePath() |
| 70 | + |
| 71 | + for (const entry of fs.readdirSync(dir)) { |
| 72 | + if (!entry.startsWith(LOG_FILE_PREFIX)) continue |
| 73 | + const filePath = path.join(dir, entry) |
| 74 | + if (filePath === currentLogFile) continue |
| 75 | + try { |
| 76 | + const stats = await fs.promises.stat(filePath) |
| 77 | + if (!stats.isFile()) continue |
| 78 | + if (stats.mtimeMs < cutoff) { |
| 79 | + await fs.promises.unlink(filePath).catch((error: NodeJS.ErrnoException) => { |
| 80 | + if (error.code !== 'EBUSY') throw error |
| 81 | + }) |
| 82 | + } |
| 83 | + } catch (error) { |
| 84 | + log.scope('main').warn('[Logger] Failed to cleanup log file', { |
| 85 | + filePath, |
| 86 | + error: safeStringify(error), |
| 87 | + }) |
| 88 | + } |
| 89 | + } |
| 90 | +} |
| 91 | + |
| 92 | +const configureTransports = () => { |
| 93 | + log.transports.file.resolvePath = getLogFilePath |
| 94 | + log.transports.file.maxSize = LOG_FILE_MAX_SIZE_BYTES |
| 95 | + log.transports.file.level = process.env.VITE_DEV_SERVER_URL ? 'debug' : 'info' |
| 96 | + log.transports.file.format = '{y}-{m}-{d} {h}:{i}:{s}.{ms} [{level}] [{scope}] {text}' |
| 97 | + log.transports.console.level = false |
| 98 | + |
| 99 | + log.transports.file.archiveLog = (oldLogFile) => { |
| 100 | + const filePath = oldLogFile.path |
| 101 | + const dir = path.dirname(filePath) |
| 102 | + const ext = path.extname(filePath) |
| 103 | + const base = path.basename(filePath, ext) |
| 104 | + const timestamp = new Date().toISOString().replace(/[:.]/g, '-') |
| 105 | + const archivedPath = path.join(dir, `${base}-${timestamp}${ext}`) |
| 106 | + fs.renameSync(filePath, archivedPath) |
| 107 | + } |
| 108 | +} |
| 109 | + |
| 110 | +const attachConsole = (scopedLog: ReturnType<typeof log.scope>) => { |
| 111 | + const original = { |
| 112 | + log: console.log.bind(console), |
| 113 | + info: console.info.bind(console), |
| 114 | + warn: console.warn.bind(console), |
| 115 | + error: console.error.bind(console), |
| 116 | + debug: console.debug.bind(console), |
| 117 | + } |
| 118 | + |
| 119 | + console.log = (...args: unknown[]) => { |
| 120 | + scopedLog.info(formatArgs(args)) |
| 121 | + original.log(...args) |
| 122 | + } |
| 123 | + console.info = (...args: unknown[]) => { |
| 124 | + scopedLog.info(formatArgs(args)) |
| 125 | + original.info(...args) |
| 126 | + } |
| 127 | + console.warn = (...args: unknown[]) => { |
| 128 | + scopedLog.warn(formatArgs(args)) |
| 129 | + original.warn(...args) |
| 130 | + } |
| 131 | + console.error = (...args: unknown[]) => { |
| 132 | + scopedLog.error(formatArgs(args)) |
| 133 | + original.error(...args) |
| 134 | + } |
| 135 | + console.debug = (...args: unknown[]) => { |
| 136 | + scopedLog.debug(formatArgs(args)) |
| 137 | + original.debug(...args) |
| 138 | + } |
| 139 | +} |
| 140 | + |
| 141 | +export const initializeLogger = () => { |
| 142 | + if (initialized) return log |
| 143 | + ensureLogDir() |
| 144 | + configureTransports() |
| 145 | + let errorCount = 0 |
| 146 | + log.catchErrors({ |
| 147 | + showDialog: false, |
| 148 | + onError: () => { |
| 149 | + errorCount += 1 |
| 150 | + return errorCount <= 10 |
| 151 | + }, |
| 152 | + }) |
| 153 | + void cleanupOldLogs() |
| 154 | + |
| 155 | + const scoped = log.scope('main') |
| 156 | + attachConsole(scoped) |
| 157 | + scoped.info('[Logger] Initialized', { |
| 158 | + logFile: getLogFilePath(), |
| 159 | + retentionDays: LOG_RETENTION_DAYS, |
| 160 | + maxFileSizeBytes: LOG_FILE_MAX_SIZE_BYTES, |
| 161 | + }) |
| 162 | + |
| 163 | + initialized = true |
| 164 | + return log |
| 165 | +} |
| 166 | + |
| 167 | +export const writeLog = ({ level, message, scope, data }: LogEntryPayload) => { |
| 168 | + const target = log.scope(scope ?? 'main') |
| 169 | + const extra = data === undefined ? '' : clampText(safeStringify(data), MAX_DATA_LENGTH) |
| 170 | + const text = extra ? `${message} ${extra}` : message |
| 171 | + |
| 172 | + switch (level) { |
| 173 | + case 'debug': |
| 174 | + target.debug(text) |
| 175 | + break |
| 176 | + case 'warn': |
| 177 | + target.warn(text) |
| 178 | + break |
| 179 | + case 'error': |
| 180 | + target.error(text) |
| 181 | + break |
| 182 | + default: |
| 183 | + target.info(text) |
| 184 | + break |
| 185 | + } |
| 186 | +} |
| 187 | + |
| 188 | +export const readLogTail = (maxBytes: number) => { |
| 189 | + const filePath = getLogFilePath() |
| 190 | + try { |
| 191 | + if (!fs.existsSync(filePath)) return '' |
| 192 | + const stats = fs.statSync(filePath) |
| 193 | + const size = stats.size |
| 194 | + if (size === 0) return '' |
| 195 | + const readSize = Math.min(size, maxBytes) |
| 196 | + const buffer = Buffer.alloc(readSize) |
| 197 | + const fd = fs.openSync(filePath, 'r') |
| 198 | + try { |
| 199 | + fs.readSync(fd, buffer, 0, readSize, size - readSize) |
| 200 | + } finally { |
| 201 | + fs.closeSync(fd) |
| 202 | + } |
| 203 | + const text = buffer.toString('utf8') |
| 204 | + return text.includes('\uFFFD') ? text.replace(/\uFFFD/g, '?') : text |
| 205 | + } catch (error) { |
| 206 | + log.scope('main').error('[Logger] Failed to read log tail', safeStringify(error)) |
| 207 | + return '' |
| 208 | + } |
| 209 | +} |
| 210 | + |
| 211 | +export const getLogDirectory = () => getLogDir() |
0 commit comments