1const os = require('os')
2const { join, dirname, basename } = require('path')
3const { format } = require('util')
4const { glob } = require('glob')
5const { Minipass } = require('minipass')
6const fsMiniPass = require('fs-minipass')
7const fs = require('fs/promises')
8const log = require('./log-shim')
9const Display = require('./display')
10
11const padZero = (n, length) => n.toString().padStart(length.toString().length, '0')
12const globify = pattern => pattern.split('\\').join('/')
13
14class LogFiles {
15  // Default to a plain minipass stream so we can buffer
16  // initial writes before we know the cache location
17  #logStream = null
18
19  // We cap log files at a certain number of log events per file.
20  // Note that each log event can write more than one line to the
21  // file. Then we rotate log files once this number of events is reached
22  #MAX_LOGS_PER_FILE = null
23
24  // Now that we write logs continuously we need to have a backstop
25  // here for infinite loops that still log. This is also partially handled
26  // by the config.get('max-files') option, but this is a failsafe to
27  // prevent runaway log file creation
28  #MAX_FILES_PER_PROCESS = null
29
30  #fileLogCount = 0
31  #totalLogCount = 0
32  #path = null
33  #logsMax = null
34  #files = []
35
36  constructor ({
37    maxLogsPerFile = 50_000,
38    maxFilesPerProcess = 5,
39  } = {}) {
40    this.#MAX_LOGS_PER_FILE = maxLogsPerFile
41    this.#MAX_FILES_PER_PROCESS = maxFilesPerProcess
42    this.on()
43  }
44
45  static format (count, level, title, ...args) {
46    let prefix = `${count} ${level}`
47    if (title) {
48      prefix += ` ${title}`
49    }
50
51    return format(...args)
52      .split(/\r?\n/)
53      .map(Display.clean)
54      .reduce((lines, line) =>
55        lines += prefix + (line ? ' ' : '') + line + os.EOL,
56      ''
57      )
58  }
59
60  on () {
61    this.#logStream = new Minipass()
62    process.on('log', this.#logHandler)
63  }
64
65  off () {
66    process.off('log', this.#logHandler)
67    this.#endStream()
68  }
69
70  load ({ path, logsMax = Infinity } = {}) {
71    // dir is user configurable and is required to exist so
72    // this can error if the dir is missing or not configured correctly
73    this.#path = path
74    this.#logsMax = logsMax
75
76    // Log stream has already ended
77    if (!this.#logStream) {
78      return
79    }
80
81    log.verbose('logfile', `logs-max:${logsMax} dir:${this.#path}`)
82
83    // Pipe our initial stream to our new file stream and
84    // set that as the new log logstream for future writes
85    // if logs max is 0 then the user does not want a log file
86    if (this.#logsMax > 0) {
87      const initialFile = this.#openLogFile()
88      if (initialFile) {
89        this.#logStream = this.#logStream.pipe(initialFile)
90      }
91    }
92
93    // Kickoff cleaning process, even if we aren't writing a logfile.
94    // This is async but it will always ignore the current logfile
95    // Return the result so it can be awaited in tests
96    return this.#cleanLogs()
97  }
98
99  log (...args) {
100    this.#logHandler(...args)
101  }
102
103  get files () {
104    return this.#files
105  }
106
107  get #isBuffered () {
108    return this.#logStream instanceof Minipass
109  }
110
111  #endStream (output) {
112    if (this.#logStream) {
113      this.#logStream.end(output)
114      this.#logStream = null
115    }
116  }
117
118  #logHandler = (level, ...args) => {
119    // Ignore pause and resume events since we
120    // write everything to the log file
121    if (level === 'pause' || level === 'resume') {
122      return
123    }
124
125    // If the stream is ended then do nothing
126    if (!this.#logStream) {
127      return
128    }
129
130    const logOutput = this.#formatLogItem(level, ...args)
131
132    if (this.#isBuffered) {
133      // Cant do anything but buffer the output if we dont
134      // have a file stream yet
135      this.#logStream.write(logOutput)
136      return
137    }
138
139    // Open a new log file if we've written too many logs to this one
140    if (this.#fileLogCount >= this.#MAX_LOGS_PER_FILE) {
141      // Write last chunk to the file and close it
142      this.#endStream(logOutput)
143      if (this.#files.length >= this.#MAX_FILES_PER_PROCESS) {
144        // but if its way too many then we just stop listening
145        this.off()
146      } else {
147        // otherwise we are ready for a new file for the next event
148        this.#logStream = this.#openLogFile()
149      }
150    } else {
151      this.#logStream.write(logOutput)
152    }
153  }
154
155  #formatLogItem (...args) {
156    this.#fileLogCount += 1
157    return LogFiles.format(this.#totalLogCount++, ...args)
158  }
159
160  #getLogFilePath (count = '') {
161    return `${this.#path}debug-${count}.log`
162  }
163
164  #openLogFile () {
165    // Count in filename will be 0 indexed
166    const count = this.#files.length
167
168    try {
169      // Pad with zeros so that our log files are always sorted properly
170      // We never want to write files ending in `-9.log` and `-10.log` because
171      // log file cleaning is done by deleting the oldest so in this example
172      // `-10.log` would be deleted next
173      const f = this.#getLogFilePath(padZero(count, this.#MAX_FILES_PER_PROCESS))
174      // Some effort was made to make the async, but we need to write logs
175      // during process.on('exit') which has to be synchronous. So in order
176      // to never drop log messages, it is easiest to make it sync all the time
177      // and this was measured to be about 1.5% slower for 40k lines of output
178      const logStream = new fsMiniPass.WriteStreamSync(f, { flags: 'a' })
179      if (count > 0) {
180        // Reset file log count if we are opening
181        // after our first file
182        this.#fileLogCount = 0
183      }
184      this.#files.push(logStream.path)
185      return logStream
186    } catch (e) {
187      // If the user has a readonly logdir then we don't want to
188      // warn this on every command so it should be verbose
189      log.verbose('logfile', `could not be created: ${e}`)
190    }
191  }
192
193  async #cleanLogs () {
194    // module to clean out the old log files
195    // this is a best-effort attempt.  if a rm fails, we just
196    // log a message about it and move on.  We do return a
197    // Promise that succeeds when we've tried to delete everything,
198    // just for the benefit of testing this function properly.
199
200    try {
201      const logPath = this.#getLogFilePath()
202      const logGlob = join(dirname(logPath), basename(logPath)
203        // tell glob to only match digits
204        .replace(/\d/g, '[0123456789]')
205        // Handle the old (prior to 8.2.0) log file names which did not have a
206        // counter suffix
207        .replace(/-\.log$/, '*.log')
208      )
209
210      // Always ignore the currently written files
211      const files = await glob(globify(logGlob), { ignore: this.#files.map(globify), silent: true })
212      const toDelete = files.length - this.#logsMax
213
214      if (toDelete <= 0) {
215        return
216      }
217
218      log.silly('logfile', `start cleaning logs, removing ${toDelete} files`)
219
220      for (const file of files.slice(0, toDelete)) {
221        try {
222          await fs.rm(file, { force: true })
223        } catch (e) {
224          log.silly('logfile', 'error removing log file', file, e)
225        }
226      }
227    } catch (e) {
228      // Disable cleanup failure warnings when log writing is disabled
229      if (this.#logsMax > 0) {
230        log.warn('logfile', 'error cleaning log files', e)
231      }
232    } finally {
233      log.silly('logfile', 'done cleaning log files')
234    }
235  }
236}
237
238module.exports = LogFiles
239