Projektstart

This commit is contained in:
2026-01-22 15:49:12 +01:00
parent 7212eb6f7a
commit 57e5f652f8
10637 changed files with 2598792 additions and 64 deletions

13
backend/node_modules/pino-pretty/.editorconfig generated vendored Normal file
View File

@@ -0,0 +1,13 @@
root = true
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
indent_style = space
indent_size = 2
trim_trailing_whitespace = true
# [*.md]
# trim_trailing_whitespace = false

8
backend/node_modules/pino-pretty/.eslintrc generated vendored Normal file
View File

@@ -0,0 +1,8 @@
{
"extends": [
"standard"
],
"rules": {
"no-var": "off"
}
}

View File

@@ -0,0 +1,13 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10

View File

@@ -0,0 +1,81 @@
name: CI
on:
push:
paths-ignore:
- 'docs/**'
- '*.md'
pull_request:
paths-ignore:
- 'docs/**'
- '*.md'
# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
cancel-in-progress: true
jobs:
dependency-review:
name: Dependency Review
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Check out repo
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Dependency review
uses: actions/dependency-review-action@v3
test:
name: Test
runs-on: ${{ matrix.os }}
outputs:
COVERALLS: ${{ steps.coveralls-trigger.outputs.COVERALLS_TRIGGER }}
permissions:
contents: read
strategy:
matrix:
node-version: [14, 16, 18]
os: [ubuntu-latest]
steps:
- name: Check out repo
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Node ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- name: Restore cached dependencies
uses: actions/cache@v3
with:
path: node_modules
key: node-modules-${{ hashFiles('package.json') }}
- name: Install dependencies
run: npm i --ignore-scripts
- name: Run tests
run: npm run ci
automerge:
name: Automerge Dependabot PRs
if: >
github.event_name == 'pull_request' &&
github.event.pull_request.user.login == 'dependabot[bot]'
needs: test
permissions:
pull-requests: write
contents: write
runs-on: ubuntu-latest
steps:
- uses: fastify/github-action-merge-dependabot@v3
with:
github-token: ${{ secrets.GITHUB_TOKEN }}

8
backend/node_modules/pino-pretty/.taprc.yaml generated vendored Normal file
View File

@@ -0,0 +1,8 @@
coverage: true
coverage-map: 'coverage-map.js'
reporter: terse
files:
- 'lib/**/*.test.js'
- 'test/**/*.test.js'

23
backend/node_modules/pino-pretty/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,23 @@
The MIT License (MIT)
Copyright (c) 2019 the Pino team
Pino team listed at https://github.com/pinojs/pino#the-team
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

380
backend/node_modules/pino-pretty/Readme.md generated vendored Normal file
View File

@@ -0,0 +1,380 @@
<a id="intro"></a>
# pino-pretty
[![NPM Package Version](https://img.shields.io/npm/v/pino-pretty)](https://www.npmjs.com/package/pino-pretty)
[![Build Status](https://img.shields.io/github/actions/workflow/status/pinojs/pino-pretty/ci.yml?branch=master)](https://github.com/pinojs/pino-pretty/actions?query=workflow%3ACI)
[![Coverage Status](https://img.shields.io/coveralls/github/pinojs/pino-pretty)](https://coveralls.io/github/pinojs/pino-pretty?branch=master)
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/)
This module provides a basic [ndjson](http://ndjson.org/) formatter to be used in __development__. If an
incoming line looks like it could be a log line from an ndjson logger, in
particular the [Pino](https://getpino.io/) logging library, then it will apply
extra formatting by considering things like the log level and timestamp.
A standard Pino log line like:
```
{"level":30,"time":1522431328992,"msg":"hello world","pid":42,"hostname":"foo","v":1}
```
Will format to:
```
[17:35:28.992] INFO (42): hello world
```
If you landed on this page due to the deprecation of the `prettyPrint` option
of `pino`, read the [Programmatic Integration](#integration) section.
<a id="example"></a>
## Example
Using the [example script][exscript] from the Pino module, we can see what the
prettified logs will look like:
![demo](demo.png)
[exscript]: https://github.com/pinojs/pino/blob/25ba61f40ea5a1a753c85002812426d765da52a4/examples/basic.js
<a id="install"></a>
## Install
```sh
$ npm install -g pino-pretty
```
<a id="usage"></a>
## Usage
It is recommended to use `pino-pretty` with `pino`
by piping output to the CLI tool:
```sh
node app.js | pino-pretty
```
<a id="cliargs"></a>
### CLI Arguments
- `--colorize` (`-c`): Adds terminal color escape sequences to the output.
- `--colorizeObjects` (`-C`): Allows suppressing colorization of objects when set to `false`. In combination with `--singleLine`, this ensures that the end of each line is parsable JSON.
- `--crlf` (`-f`): Appends carriage return and line feed, instead of just a line
feed, to the formatted log line.
- `--errorProps` (`-e`): When formatting an error object, display this list
of properties. The list should be a comma-separated list of properties Default: `''`.
Do not use this option if logging from pino@7. Support will be removed from future verions.
- `--levelFirst` (`-l`): Display the log level name before the logged date and time.
- `--errorLikeObjectKeys` (`-k`): Define the log keys that are associated with
error like objects. Default: `err,error`.
- `--messageKey` (`-m`): Define the key that contains the main log message.
Default: `msg`.
- `--levelKey` (`--levelKey`): Define the key that contains the level of the log. Nested keys are supported with each property delimited by a dot character (`.`).
Keys may be escaped to target property names that contains the delimiter itself:
(`--levelKey tags\\.level`).
Default: `level`.
- `--levelLabel` (`-b`): Output the log level using the specified label.
Default: `levelLabel`.
- `--minimumLevel` (`-L`): Hide messages below the specified log level. Accepts a number, `trace`, `debug`, `info`, `warn`, `error`, or `fatal`. If any more filtering is required, consider using [`jq`](https://stedolan.github.io/jq/).
- `--customLevels` (`-x`): Override default levels with custom levels, e.g. `-x err:99,info:1`
- `--customColors` (`-X`): Override default colors with custom colors, e.g. `-X err:red,info:blue`
- `--useOnlyCustomProps` (`-U`): Only use custom levels and colors (if provided) (default: true); else fallback to default levels and colors, e.g. `-U false`
- `--messageFormat` (`-o`): Format output of message, e.g. `{levelLabel} - {pid} - url:{req.url}` will output message: `INFO - 1123 - url:localhost:3000/test`
Default: `false`
- `--timestampKey` (`-a`): Define the key that contains the log timestamp.
Default: `time`.
- `--translateTime` (`-t`): Translate the epoch time value into a human-readable
date and time string. This flag also can set the format string to apply when
translating the date to a human-readable format. For a list of available pattern
letters, see the [`dateformat` documentation](https://www.npmjs.com/package/dateformat).
- The default format is `HH:MM:ss.l` in the local timezone.
- Require a `UTC:` prefix to translate time to UTC, e.g. `UTC:yyyy-mm-dd HH:MM:ss.l o`.
- Require a `SYS:` prefix to translate time to the local system's time zone. A
shortcut `SYS:standard` to translate time to `yyyy-mm-dd HH:MM:ss.l o` in
system time zone.
- `--ignore` (`-i`): Ignore one or several keys, nested keys are supported with each property delimited by a dot character (`.`),
keys may be escaped to target property names that contains the delimiter itself:
(`-i time,hostname,req.headers,log\\.domain\\.corp/foo`).
The `--ignore` option would be ignored, if both `--ignore` and `--include` are passed.
Default: `hostname`.
- `--include` (`-I`): The opposite of `--ignore`. Include one or several keys.
- `--hideObject` (`-H`): Hide objects from output (but not error object)
- `--singleLine` (`-S`): Print each log message on a single line (errors will still be multi-line)
- `--config`: Specify a path to a config file containing the pino-pretty options. pino-pretty will attempt to read from a `.pino-prettyrc` in your current directory (`process.cwd`) if not specified
<a id="integration"></a>
## Programmatic Integration
We recommend against using `pino-pretty` in production and highly
recommend installing `pino-pretty` as a development dependency.
Install `pino-pretty` alongside `pino` and set the transport target to `'pino-pretty'`:
```js
const pino = require('pino')
const logger = pino({
transport: {
target: 'pino-pretty'
},
})
logger.info('hi')
```
The transport option can also have an options object containing `pino-pretty` options:
```js
const pino = require('pino')
const logger = pino({
transport: {
target: 'pino-pretty',
options: {
colorize: true
}
}
})
logger.info('hi')
```
Use it as a stream:
```js
const pino = require('pino')
const pretty = require('pino-pretty')
const logger = pino(pretty())
logger.info('hi')
```
Options are also supported:
```js
const pino = require('pino')
const pretty = require('pino-pretty')
const stream = pretty({
colorize: true
})
const logger = pino(stream)
logger.info('hi')
```
See the [Options](#options) section for all possible options.
### Usage as a stream
If you are using `pino-pretty` as a stream and you need to provide options to `pino`,
pass the options as the first argument and `pino-pretty` as second argument:
```js
const pino = require('pino')
const pretty = require('pino-pretty')
const stream = pretty({
colorize: true
})
const logger = pino({ level: 'info' }, stream)
// Nothing is printed
logger.debug('hi')
```
### Usage with Jest
Logging with Jest is _problematic_, as the test framework requires no asynchronous operation to
continue after the test has finished. The following is the only supported way to use this module
with Jest:
```js
import pino from 'pino'
import pretty from 'pino-pretty'
test('test pino-pretty', () => {
const logger = pino(pretty({ sync: true }));
logger.info('Info');
logger.error('Error');
});
```
### Handling non-serializable options
Using the new [pino v7+
transports](https://getpino.io/#/docs/transports?id=v7-transports) not all
options are serializable, for example if you want to use `messageFormat` as a
function you will need to wrap `pino-pretty` in a custom module.
Executing `main.js` below will log a colorized `hello world` message using a
custom function `messageFormat`:
```js
// main.js
const pino = require('pino')
const logger = pino({
transport: {
target: './pino-pretty-transport',
options: {
colorize: true
}
},
})
logger.info('world')
```
```js
// pino-pretty-transport.js
module.exports = opts => require('pino-pretty')({
...opts,
messageFormat: (log, messageKey) => `hello ${log[messageKey]}`
})
```
<a id="options"></a>
### Options
The options accepted have keys corresponding to the options described in [CLI Arguments](#cliargs):
```js
{
colorize: colorette.isColorSupported, // --colorize
colorizeObjects: true, //--colorizeObjects
crlf: false, // --crlf
errorLikeObjectKeys: ['err', 'error'], // --errorLikeObjectKeys
errorProps: '', // --errorProps
levelFirst: false, // --levelFirst
messageKey: 'msg', // --messageKey
levelKey: 'level', // --levelKey
messageFormat: false, // --messageFormat
timestampKey: 'time', // --timestampKey
translateTime: false, // --translateTime
ignore: 'pid,hostname', // --ignore
include: 'level,time', // --include
hideObject: false, // --hideObject
singleLine: false, // --singleLine
customColors: 'err:red,info:blue', // --customColors
customLevels: 'err:99,info:1', // --customLevels
levelLabel: 'levelLabel', // --levelLabel
minimumLevel: 'info', // --minimumLevel
useOnlyCustomProps: true, // --useOnlyCustomProps
// The file or file descriptor (1 is stdout) to write to
destination: 1,
// Alternatively, pass a `sonic-boom` instance (allowing more flexibility):
// destination: new SonicBoom({ dest: 'a/file', mkdir: true })
// You can also configure some SonicBoom options directly
sync: false, // by default we write asynchronously
append: true, // the file is opened with the 'a' flag
mkdir: true, // create the target destination
customPrettifiers: {}
}
```
The `colorize` default follows
[`colorette.isColorSupported`](https://github.com/jorgebucaran/colorette#iscolorsupported).
The defaults for `sync`, `append`, `mkdir` inherit from
[`SonicBoom(opts)`](https://github.com/pinojs/sonic-boom#API).
`customPrettifiers` option provides the ability to add a custom prettify function
for specific log properties. `customPrettifiers` is an object, where keys are
log properties that will be prettified and value is the prettify function itself.
For example, if a log line contains a `query` property,
you can specify a prettifier for it:
```js
{
customPrettifiers: {
query: prettifyQuery
}
}
//...
const prettifyQuery = value => {
// do some prettify magic
}
```
Additionally, `customPrettifiers` can be used to format the `time`, `hostname`,
`pid`, `name`, `caller` and `level` outputs:
```js
{
customPrettifiers: {
// The argument for this function will be the same
// string that's at the start of the log-line by default:
time: timestamp => `🕰 ${timestamp}`,
// The argument for the level-prettifier may vary depending
// on if the levelKey option is used or not.
// By default this will be the same numerics as the Pino default:
level: logLevel => `LEVEL: ${logLevel}`,
// other prettifiers can be used for the other keys if needed, for example
hostname: hostname => colorGreen(hostname),
pid: pid => colorRed(pid),
name: name => colorBlue(name),
caller: caller => colorCyan(caller)
}
}
```
Note that prettifiers do not include any coloring, if the stock coloring on
`level` is desired, it can be accomplished using the following:
```js
const { colorizerFactory } = require('pino-pretty')
const levelColorize = colorizerFactory(true)
const levelPrettifier = logLevel => `LEVEL: ${levelColorize(logLevel)}`
//...
{
customPrettifiers: { level: levelPrettifier }
}
```
`messageFormat` option allows you to customize the message output.
A template `string` like this can define the format:
```js
{
messageFormat: '{levelLabel} - {pid} - url:{req.url}'
}
```
In addition to this, if / end statement blocks can also be specified.
Else statements and nested conditions are not supported.
```js
{
messageFormat: '{levelLabel} - {if pid}{pid} - {end}url:{req.url}'
}
```
This option can also be defined as a `function` with this prototype:
```js
{
messageFormat: (log, messageKey, levelLabel) => {
// do some log message customization
return customized_message;
}
}
```
## Limitations
Because `pino-pretty` uses stdout redirection, in some cases the command may
terminate with an error due to shell limitations.
For example, currently, mingw64 based shells (e.g. Bash as supplied by [git for
Windows](https://gitforwindows.org)) are affected and terminate the process with
a `stdout is not a tty` error message.
Any PRs are welcomed!
<a id="license"></a>
## License
MIT License

105
backend/node_modules/pino-pretty/benchmark.js generated vendored Normal file
View File

@@ -0,0 +1,105 @@
'use strict'
// We do not expect amazing numbers from `pino-pretty` as the whole purpose
// of the module is a very slow operation. However, this benchmark should give
// us some guidance on how features, or code changes, will affect the
// performance of the module.
const bench = require('fastbench')
const {
prettyFactory
} = require('./index')
const max = 10
const tstampMillis = 1693401358754
/* eslint-disable no-var */
const run = bench([
function basicLog (cb) {
const pretty = prettyFactory({})
const input = `{"time":${tstampMillis},"pid":1,"hostname":"foo","msg":"benchmark","foo":"foo","bar":{"bar":"bar"}}\n`
for (var i = 0; i < max; i += 1) {
pretty(input)
}
setImmediate(cb)
},
function objectLog (cb) {
const pretty = prettyFactory({})
const input = {
time: tstampMillis,
pid: 1,
hostname: 'foo',
msg: 'benchmark',
foo: 'foo',
bar: { bar: 'bar' }
}
for (var i = 0; i < max; i += 1) {
pretty(input)
}
setImmediate(cb)
},
function coloredLog (cb) {
const pretty = prettyFactory({ colorize: true })
const input = `{"time":${tstampMillis},"pid":1,"hostname":"foo","msg":"benchmark","foo":"foo","bar":{"bar":"bar"}}\n`
for (var i = 0; i < max; i += 1) {
pretty(input)
}
setImmediate(cb)
},
function customPrettifiers (cb) {
const pretty = prettyFactory({
customPrettifiers: {
time (tstamp) {
return tstamp
},
pid () {
return ''
}
}
})
const input = `{"time":${tstampMillis},"pid":1,"hostname":"foo","msg":"benchmark","foo":"foo","bar":{"bar":"bar"}}\n`
for (var i = 0; i < max; i += 1) {
pretty(input)
}
setImmediate(cb)
},
function logWithErrorObject (cb) {
const pretty = prettyFactory({})
const err = Error('boom')
const input = `{"time":${tstampMillis},"pid":1,"hostname":"foo","msg":"benchmark","foo":"foo","bar":{"bar":"bar"},"err":{"message":"${err.message}","stack":"${err.stack}"}}\n`
for (var i = 0; i < max; i += 1) {
pretty(input)
}
setImmediate(cb)
},
function logRemappedMsgErrKeys (cb) {
const pretty = prettyFactory({
messageKey: 'message',
errorLikeObjectKeys: ['myError']
})
const err = Error('boom')
const input = `{"time":${tstampMillis},"pid":1,"hostname":"foo","message":"benchmark","foo":"foo","bar":{"bar":"bar"},"myError":{"message":"${err.message}","stack":"${err.stack}"}}\n`
for (var i = 0; i < max; i += 1) {
pretty(input)
}
setImmediate(cb)
},
function messageFormatString (cb) {
const pretty = prettyFactory({
messageFormat: '{levelLabel}{if pid} {pid} - {end}{msg}'
})
const input = `{"time":${tstampMillis},"pid":1,"hostname":"foo","msg":"benchmark","foo":"foo","bar":{"bar":"bar"}}\n`
for (var i = 0; i < max; i += 1) {
pretty(input)
}
setImmediate(cb)
}
], 10000)
run(run)

112
backend/node_modules/pino-pretty/bin.js generated vendored Executable file
View File

@@ -0,0 +1,112 @@
#!/usr/bin/env node
const fs = require('fs')
const path = require('path')
const help = require('help-me')({
dir: path.join(__dirname, 'help'),
ext: '.txt'
})
const pump = require('pump')
const sjp = require('secure-json-parse')
const JoyCon = require('joycon')
const stripJsonComments = require('strip-json-comments')
const build = require('./')
const CONSTANTS = require('./lib/constants')
const { isObject } = require('./lib/utils')
const minimist = require('minimist')
const parseJSON = input => {
return sjp.parse(stripJsonComments(input), { protoAction: 'remove' })
}
const joycon = new JoyCon({
parseJSON,
files: [
'pino-pretty.config.cjs',
'pino-pretty.config.js',
'.pino-prettyrc',
'.pino-prettyrc.json'
],
stopDir: path.dirname(process.cwd())
})
const cmd = minimist(process.argv.slice(2))
if (cmd.h || cmd.help) {
help.toStdout()
} else {
const DEFAULT_VALUE = '\0default'
let opts = minimist(process.argv, {
alias: {
colorize: 'c',
colorizeObjects: 'C',
crlf: 'f',
errorProps: 'e',
levelFirst: 'l',
minimumLevel: 'L',
customLevels: 'x',
customColors: 'X',
useOnlyCustomProps: 'U',
errorLikeObjectKeys: 'k',
messageKey: 'm',
levelKey: CONSTANTS.LEVEL_KEY,
levelLabel: 'b',
messageFormat: 'o',
timestampKey: 'a',
translateTime: 't',
ignore: 'i',
include: 'I',
hideObject: 'H',
singleLine: 'S'
},
default: {
messageKey: DEFAULT_VALUE,
minimumLevel: DEFAULT_VALUE,
levelKey: DEFAULT_VALUE,
timestampKey: DEFAULT_VALUE
}
})
// Remove default values
opts = filter(opts, value => value !== DEFAULT_VALUE)
const config = loadConfig(opts.config)
// Override config with cli options
opts = Object.assign({}, config, opts)
// set defaults
opts.errorLikeObjectKeys = opts.errorLikeObjectKeys || 'err,error'
opts.errorProps = opts.errorProps || ''
const res = build(opts)
pump(process.stdin, res)
// https://github.com/pinojs/pino/pull/358
/* istanbul ignore next */
if (!process.stdin.isTTY && !fs.fstatSync(process.stdin.fd).isFile()) {
process.once('SIGINT', function noOp () {})
}
function loadConfig (configPath) {
const files = configPath ? [path.resolve(configPath)] : undefined
const result = joycon.loadSync(files)
if (result.path && !isObject(result.data)) {
configPath = configPath || path.basename(result.path)
throw new Error(`Invalid runtime configuration file: ${configPath}`)
}
if (configPath && !result.data) {
throw new Error(`Failed to load runtime configuration file: ${configPath}`)
}
return result.data
}
function filter (obj, cb) {
return Object.keys(obj).reduce((acc, key) => {
const value = obj[key]
if (cb(value, key)) {
acc[key] = value
}
return acc
}, {})
}
}

9
backend/node_modules/pino-pretty/coverage-map.js generated vendored Normal file
View File

@@ -0,0 +1,9 @@
'use strict'
module.exports = testFile => {
// Ignore coverage on files that do not have a direct corollary.
if (testFile.startsWith('test/')) return false
// Indicate the matching name, sans '.test.js', should be checked for coverage.
return testFile.replace(/\.test\.js$/, '.js')
}

BIN
backend/node_modules/pino-pretty/demo.png generated vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

24
backend/node_modules/pino-pretty/docs/help.md generated vendored Normal file
View File

@@ -0,0 +1,24 @@
<a id="Systemd"></a>
## Systemd example
If you run your Node.js process via [Systemd](https://www.freedesktop.org/wiki/Software/systemd/) and you examine your logs with [journalctl](https://www.freedesktop.org/software/systemd/man/journalctl.html) some data will be duplicated. You can use a combination of `journalctl` options and `pino-pretty` options to shape the output.
For example viewing the prettified logs of a process named `monitor` with `journalctl -u monitor -f | pino-pretty`, might output something like this:
```
Apr 24 07:40:01 nanopi node[6080]: {"level":30,"time":1587706801902,"pid":6080,"hostname":"nanopi","msg":"TT
21","v":1}
```
As you can see, the timestamp, hostname, and pid are duplicated.
If you just want the bare prettified Pino logs you can strip out the duplicate items from the `journalctl` output with the `-o cat` option of `journalctl`:
```
journalctl -u monitor -f -o cat | pino-pretty
```
the output now looks something like this:
```
[1587706801902] INFO (6080 on nanopi): TT 21
```
Make the output even more human readable by using the pino-pretty options `-t` to format the timestamp and `-i pid, hostname` to filter out hostname and pid:
```
[2020-04-24 05:42:24.836 +0000] INFO : TT 21
```

68
backend/node_modules/pino-pretty/help/help.txt generated vendored Normal file
View File

@@ -0,0 +1,68 @@
Usage: pino pretty [options] [command]
Commands:
help Display help
version Display version
Options:
-c, --colorize Force adding color sequences to the output
-C, --config specify a path to a json file containing the pino-pretty options
-f, --crlf Append CRLF instead of LF to formatted lines
-X, --customColors Override default colors using names from https://www.npmjs.com/package/colorette (`-X err:red,info:blue`)
-x, --customLevels Override default levels (`-x err:99,info:1`)
-k, --errorLikeObjectKeys Define which keys contain error objects (`-k err,error`) (defaults to `err,error`)
-e, --errorProps Comma separated list of properties on error objects to show (`*` for all properties) (defaults to ``)
-h, --help Output usage information
-H, --hideObject Hide objects from output (but not error object)
-i, --ignore Ignore one or several keys: (`-i time,hostname`)
-I, --include The opposite of `--ignore`, only include one or several keys: (`-I level,time`)
-l, --levelFirst Display the log level as the first output field
-L, --levelKey [value] Detect the log level under the specified key (defaults to "level")
-b, --levelLabel [value] Output the log level using the specified label (defaults to "levelLabel")
-o, --messageFormat Format output of message
-m, --messageKey [value] Highlight the message under the specified key (defaults to "msg")
-L, --minimumLevel Hide messages below the specified log level
-S, --singleLine Print all non-error objects on a single line
-a, --timestampKey [value] Display the timestamp from the specified key (defaults to "time")
-t, --translateTime Display epoch timestamps as UTC ISO format or according to an optional format string (default ISO 8601)
-U, --useOnlyCustomProps Only use custom levels and colors (if provided); don't fallback to default levels and colors (-U false)
-v, --version Output the version number
Examples:
- To prettify logs, simply pipe a log file through
$ cat log | pino-pretty
- To highlight a string at a key other than 'msg'
$ cat log | pino-pretty -m fooMessage
- To detect the log level at a key other than 'level'
$ cat log | pino-pretty --levelKey fooLevel
- To output the log level label using a key other than 'levelLabel'
$ cat log | pino-pretty --levelLabel LVL -o "{LVL}"
- To display timestamp from a key other than 'time'
$ cat log | pino-pretty -a fooTimestamp
- To convert Epoch timestamps to ISO timestamps use the -t option
$ cat log | pino-pretty -t
- To convert Epoch timestamps to local timezone format use the -t option with "SYS:" prefixed format string
$ cat log | pino-pretty -t "SYS:yyyy-mm-dd HH:MM:ss"
- To flip level and time/date in standard output use the -l option
$ cat log | pino-pretty -l
- Only prints messages with a minimum log level of info
$ cat log | pino-pretty -L info
- Prettify logs but don't print pid and hostname
$ cat log | pino-pretty -i pid,hostname
- Prettify logs but only print time and level
$ cat log | pino-pretty -I time,level
- Loads options from a config file
$ cat log | pino-pretty --config=/path/to/config.json

217
backend/node_modules/pino-pretty/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,217 @@
// Type definitions for pino-pretty 7.0
// Project: https://github.com/pinojs/pino-pretty#readme
// Definitions by: Adam Vigneaux <https://github.com/AdamVig>
// tearwyx <https://github.com/tearwyx>
// Minimum TypeScript Version: 3.0
/// <reference types="node" />
import { Transform } from 'stream';
import { OnUnknown } from 'pino-abstract-transport';
// @ts-ignore fall back to any if pino is not available, i.e. when running pino tests
import { DestinationStream, Level } from 'pino';
type LogDescriptor = Record<string, unknown>;
declare function PinoPretty(options?: PrettyOptions_): PinoPretty.PrettyStream;
declare function colorizerFactory(
useColors?: boolean,
customColors?: [number, string][],
useOnlyCustomProps?: boolean,
): {
(
level?: number | string,
opts?: {
customLevels?: { [level: number]: string };
customLevelNames?: { [name: string]: number };
},
): string,
message: (input: string | number) => string,
greyMessage: (input: string | number) => string,
}
declare function prettyFactory(
options: PrettyOptions_,
): (inputData: any) => string
interface PrettyOptions_ {
/**
* Hide objects from output (but not error object).
* @default false
*/
hideObject?: boolean;
/**
* Translate the epoch time value into a human readable date and time string. This flag also can set the format
* string to apply when translating the date to human readable format. For a list of available pattern letters
* see the {@link https://www.npmjs.com/package/dateformat|dateformat documentation}.
* - The default format is `yyyy-mm-dd HH:MM:ss.l o` in UTC.
* - Requires a `SYS:` prefix to translate time to the local system's timezone. Use the shortcut `SYS:standard`
* to translate time to `yyyy-mm-dd HH:MM:ss.l o` in system timezone.
* @default false
*/
translateTime?: boolean | string;
/**
* If set to true, it will print the name of the log level as the first field in the log line.
* @default false
*/
levelFirst?: boolean;
/**
* Define the key that contains the level of the log.
* @default "level"
*/
levelKey?: string;
/**
* Output the log level using the specified label.
* @default "levelLabel"
*/
levelLabel?: string;
/**
* The key in the JSON object to use as the highlighted message.
* @default "msg"
*/
messageKey?: string;
/**
* Print each log message on a single line (errors will still be multi-line).
* @default false
*/
singleLine?: boolean;
/**
* The key in the JSON object to use for timestamp display.
* @default "time"
*/
timestampKey?: string;
/**
* The minimum log level to include in the output.
* @default "trace"
*/
minimumLevel?: Level;
/**
* Format output of message, e.g. {level} - {pid} will output message: INFO - 1123
* @default false
*
* @example
* ```typescript
* {
* messageFormat: (log, messageKey) => {
* const message = log[messageKey];
* if (log.requestId) return `[${log.requestId}] ${message}`;
* return message;
* }
* }
* ```
*/
messageFormat?: false | string | PinoPretty.MessageFormatFunc;
/**
* If set to true, will add color information to the formatted output message.
* @default false
*/
colorize?: boolean;
/**
* If set to false while `colorize` is `true`, will output JSON objects without color.
* @default true
*/
colorizeObjects?: boolean;
/**
* Appends carriage return and line feed, instead of just a line feed, to the formatted log line.
* @default false
*/
crlf?: boolean;
/**
* Define the log keys that are associated with error like objects.
* @default ["err", "error"]
*/
errorLikeObjectKeys?: string[];
/**
* When formatting an error object, display this list of properties.
* The list should be a comma separated list of properties.
* @default ""
*/
errorProps?: string;
/**
* Ignore one or several keys.
* Will be overridden by the option include if include is presented.
* @example "time,hostname"
*/
ignore?: string;
/**
* Include one or several keys.
* @example "time,level"
*/
include?: string;
/**
* Makes messaging synchronous.
* @default false
*/
sync?: boolean;
/**
* The file, file descriptor, or stream to write to. Defaults to 1 (stdout).
* @default 1
*/
destination?: string | number | DestinationStream | NodeJS.WritableStream;
/**
* Opens the file with the 'a' flag.
* @default true
*/
append?: boolean;
/**
* Ensure directory for destination file exists.
* @default false
*/
mkdir?: boolean;
/**
* Provides the ability to add a custom prettify function for specific log properties.
* `customPrettifiers` is an object, where keys are log properties that will be prettified
* and value is the prettify function itself.
* For example, if a log line contains a query property, you can specify a prettifier for it:
* @default {}
*
* @example
* ```typescript
* {
* customPrettifiers: {
* query: prettifyQuery
* }
* }
* //...
* const prettifyQuery = value => {
* // do some prettify magic
* }
* ```
*/
customPrettifiers?: Record<string, PinoPretty.Prettifier>;
/**
* Change the level names and values to an user custom preset.
*
* Can be a CSV string in 'level_name:level_value' format or an object.
*
* @example ( CSV ) customLevels: 'info:10,some_level:40'
* @example ( Object ) customLevels: { info: 10, some_level: 40 }
*/
customLevels?: string|object;
/**
* Change the level colors to an user custom preset.
*
* Can be a CSV string in 'level_name:color_value' format or an object.
* Also supports 'default' as level_name for fallback color.
*
* @example ( CSV ) customColors: 'info:white,some_level:red'
* @example ( Object ) customColors: { info: 'white', some_level: 'red' }
*/
customColors?: string|object;
}
declare function build(options: PrettyOptions_): PinoPretty.PrettyStream;
declare namespace PinoPretty {
type Prettifier = (inputData: string | object) => string;
type MessageFormatFunc = (log: LogDescriptor, messageKey: string, levelLabel: string) => string;
type PrettyOptions = PrettyOptions_;
type PrettyStream = Transform & OnUnknown;
type ColorizerFactory = typeof colorizerFactory;
type PrettyFactory = typeof prettyFactory;
type Build = typeof build;
}
export default PinoPretty;
export { build, PinoPretty, PrettyOptions_ as PrettyOptions, colorizerFactory, prettyFactory };

171
backend/node_modules/pino-pretty/index.js generated vendored Normal file
View File

@@ -0,0 +1,171 @@
'use strict'
const { isColorSupported } = require('colorette')
const pump = require('pump')
const { Transform } = require('readable-stream')
const abstractTransport = require('pino-abstract-transport')
const colors = require('./lib/colors')
const {
ERROR_LIKE_KEYS,
LEVEL_KEY,
LEVEL_LABEL,
MESSAGE_KEY,
TIMESTAMP_KEY
} = require('./lib/constants')
const {
buildSafeSonicBoom,
parseFactoryOptions
} = require('./lib/utils')
const pretty = require('./lib/pretty')
/**
* @typedef {object} PinoPrettyOptions
* @property {boolean} [colorize] Indicates if colors should be used when
* prettifying. The default will be determined by the terminal capabilities at
* run time.
* @property {boolean} [colorizeObjects=true] Apply coloring to rendered objects
* when coloring is enabled.
* @property {boolean} [crlf=false] End lines with `\r\n` instead of `\n`.
* @property {string|null} [customColors=null] A comma separated list of colors
* to use for specific level labels, e.g. `err:red,info:blue`.
* @property {string|null} [customLevels=null] A comma separated list of user
* defined level names and numbers, e.g. `err:99,info:1`.
* @property {CustomPrettifiers} [customPrettifiers={}] A set of prettifier
* functions to apply to keys defined in this object.
* @property {K_ERROR_LIKE_KEYS} [errorLikeObjectKeys] A list of string property
* names to consider as error objects.
* @property {string} [errorProps=''] A comma separated list of properties on
* error objects to include in the output.
* @property {boolean} [hideObject=false] When `true`, data objects will be
* omitted from the output (except for error objects).
* @property {string} [ignore='hostname'] A comma separated list of log keys
* to omit when outputting the prettified log information.
* @property {undefined|string} [include=undefined] A comma separated list of
* log keys to include in the prettified log information. Only the keys in this
* list will be included in the output.
* @property {boolean} [levelFirst=false] When true, the log level will be the
* first field in the prettified output.
* @property {string} [levelKey='level'] The key name in the log data that
* contains the level value for the log.
* @property {string} [levelLabel='levelLabel'] Token name to use in
* `messageFormat` to represent the name of the logged level.
* @property {null|MessageFormatString|MessageFormatFunction} [messageFormat=null]
* When a string, defines how the prettified line should be formatted according
* to defined tokens. When a function, a synchronous function that returns a
* formatted string.
* @property {string} [messageKey='msg'] Defines the key in incoming logs that
* contains the message of the log, if present.
* @property {undefined|string|number} [minimumLevel=undefined] The minimum
* level for logs that should be processed. Any logs below this level will
* be omitted.
* @property {object} [outputStream=process.stdout] The stream to write
* prettified log lines to.
* @property {boolean} [singleLine=false] When `true` any objects, except error
* objects, in the log data will be printed as a single line instead as multiple
* lines.
* @property {string} [timestampKey='time'] Defines the key in incoming logs
* that contains the timestamp of the log, if present.
* @property {boolean|string} [translateTime=true] When true, will translate a
* JavaScript date integer into a human-readable string. If set to a string,
* it must be a format string.
* @property {boolean} [useOnlyCustomProps=true] When true, only custom levels
* and colors will be used if they have been provided.
*/
/**
* The default options that will be used when prettifying log lines.
*
* @type {PinoPrettyOptions}
*/
const defaultOptions = {
colorize: isColorSupported,
colorizeObjects: true,
crlf: false,
customColors: null,
customLevels: null,
customPrettifiers: {},
errorLikeObjectKeys: ERROR_LIKE_KEYS,
errorProps: '',
hideObject: false,
ignore: 'hostname',
include: undefined,
levelFirst: false,
levelKey: LEVEL_KEY,
levelLabel: LEVEL_LABEL,
messageFormat: null,
messageKey: MESSAGE_KEY,
minimumLevel: undefined,
outputStream: process.stdout,
singleLine: false,
timestampKey: TIMESTAMP_KEY,
translateTime: true,
useOnlyCustomProps: true
}
/**
* Processes the supplied options and returns a function that accepts log data
* and produces a prettified log string.
*
* @param {PinoPrettyOptions} options Configuration for the prettifier.
* @returns {LogPrettifierFunc}
*/
function prettyFactory (options) {
const context = parseFactoryOptions(Object.assign({}, defaultOptions, options))
return pretty.bind({ ...context, context })
}
/**
* @typedef {PinoPrettyOptions} BuildStreamOpts
* @property {object|number|string} [destination] A destination stream, file
* descriptor, or target path to a file.
* @property {boolean} [append]
* @property {boolean} [mkdir]
* @property {boolean} [sync=false]
*/
/**
* Constructs a {@link LogPrettifierFunc} and a stream to which the produced
* prettified log data will be written.
*
* @param {BuildStreamOpts} opts
* @returns {Transform | (Transform & OnUnknown)}
*/
function build (opts = {}) {
const pretty = prettyFactory(opts)
return abstractTransport(function (source) {
const stream = new Transform({
objectMode: true,
autoDestroy: true,
transform (chunk, enc, cb) {
const line = pretty(chunk)
cb(null, line)
}
})
let destination
if (typeof opts.destination === 'object' && typeof opts.destination.write === 'function') {
destination = opts.destination
} else {
destination = buildSafeSonicBoom({
dest: opts.destination || 1,
append: opts.append,
mkdir: opts.mkdir,
sync: opts.sync // by default sonic will be async
})
}
source.on('unknown', function (line) {
destination.write(line + '\n')
})
pump(source, stream, destination)
return stream
}, { parse: 'lines' })
}
module.exports = build
module.exports.build = build
module.exports.prettyFactory = prettyFactory
module.exports.colorizerFactory = colors
module.exports.default = build

136
backend/node_modules/pino-pretty/lib/colors.js generated vendored Normal file
View File

@@ -0,0 +1,136 @@
'use strict'
const { LEVELS, LEVEL_NAMES } = require('./constants')
const nocolor = input => input
const plain = {
default: nocolor,
60: nocolor,
50: nocolor,
40: nocolor,
30: nocolor,
20: nocolor,
10: nocolor,
message: nocolor,
greyMessage: nocolor
}
const { createColors } = require('colorette')
const availableColors = createColors({ useColor: true })
const { white, bgRed, red, yellow, green, blue, gray, cyan } = availableColors
const colored = {
default: white,
60: bgRed,
50: red,
40: yellow,
30: green,
20: blue,
10: gray,
message: cyan,
greyMessage: gray
}
function resolveCustomColoredColorizer (customColors) {
return customColors.reduce(
function (agg, [level, color]) {
agg[level] = typeof availableColors[color] === 'function' ? availableColors[color] : white
return agg
},
{ default: white, message: cyan, greyMessage: gray }
)
}
function colorizeLevel (useOnlyCustomProps) {
return function (level, colorizer, { customLevels, customLevelNames } = {}) {
const levels = useOnlyCustomProps ? customLevels || LEVELS : Object.assign({}, LEVELS, customLevels)
const levelNames = useOnlyCustomProps ? customLevelNames || LEVEL_NAMES : Object.assign({}, LEVEL_NAMES, customLevelNames)
let levelNum = 'default'
if (Number.isInteger(+level)) {
levelNum = Object.prototype.hasOwnProperty.call(levels, level) ? level : levelNum
} else {
levelNum = Object.prototype.hasOwnProperty.call(levelNames, level.toLowerCase()) ? levelNames[level.toLowerCase()] : levelNum
}
const levelStr = levels[levelNum]
return Object.prototype.hasOwnProperty.call(colorizer, levelNum) ? colorizer[levelNum](levelStr) : colorizer.default(levelStr)
}
}
function plainColorizer (useOnlyCustomProps) {
const newPlainColorizer = colorizeLevel(useOnlyCustomProps)
const customColoredColorizer = function (level, opts) {
return newPlainColorizer(level, plain, opts)
}
customColoredColorizer.message = plain.message
customColoredColorizer.greyMessage = plain.greyMessage
return customColoredColorizer
}
function coloredColorizer (useOnlyCustomProps) {
const newColoredColorizer = colorizeLevel(useOnlyCustomProps)
const customColoredColorizer = function (level, opts) {
return newColoredColorizer(level, colored, opts)
}
customColoredColorizer.message = colored.message
customColoredColorizer.greyMessage = colored.greyMessage
return customColoredColorizer
}
function customColoredColorizerFactory (customColors, useOnlyCustomProps) {
const onlyCustomColored = resolveCustomColoredColorizer(customColors)
const customColored = useOnlyCustomProps ? onlyCustomColored : Object.assign({}, colored, onlyCustomColored)
const colorizeLevelCustom = colorizeLevel(useOnlyCustomProps)
const customColoredColorizer = function (level, opts) {
return colorizeLevelCustom(level, customColored, opts)
}
customColoredColorizer.message = customColoredColorizer.message || customColored.message
customColoredColorizer.greyMessage = customColoredColorizer.greyMessage || customColored.greyMessage
return customColoredColorizer
}
/**
* Applies colorization, if possible, to a string representing the passed in
* `level`. For example, the default colorizer will return a "green" colored
* string for the "info" level.
*
* @typedef {function} ColorizerFunc
* @param {string|number} level In either case, the input will map to a color
* for the specified level or to the color for `USERLVL` if the level is not
* recognized.
* @property {function} message Accepts one string parameter that will be
* colorized to a predefined color.
*/
/**
* Factory function get a function to colorized levels. The returned function
* also includes a `.message(str)` method to colorize strings.
*
* @param {boolean} [useColors=false] When `true` a function that applies standard
* terminal colors is returned.
* @param {array[]} [customColors] Tuple where first item of each array is the
* level index and the second item is the color
* @param {boolean} [useOnlyCustomProps] When `true`, only use the provided
* custom colors provided and not fallback to default
*
* @returns {ColorizerFunc} `function (level) {}` has a `.message(str)` method to
* apply colorization to a string. The core function accepts either an integer
* `level` or a `string` level. The integer level will map to a known level
* string or to `USERLVL` if not known. The string `level` will map to the same
* colors as the integer `level` and will also default to `USERLVL` if the given
* string is not a recognized level name.
*/
module.exports = function getColorizer (useColors = false, customColors, useOnlyCustomProps) {
if (useColors && customColors !== undefined) {
return customColoredColorizerFactory(customColors, useOnlyCustomProps)
} else if (useColors) {
return coloredColorizer(useOnlyCustomProps)
}
return plainColorizer(useOnlyCustomProps)
}

132
backend/node_modules/pino-pretty/lib/colors.test.js generated vendored Normal file
View File

@@ -0,0 +1,132 @@
'use strict'
const { test } = require('tap')
const getColorizer = require('./colors')
const testDefaultColorizer = getColorizer => async t => {
const colorizer = getColorizer()
let colorized = colorizer(10)
t.equal(colorized, 'TRACE')
colorized = colorizer(20)
t.equal(colorized, 'DEBUG')
colorized = colorizer(30)
t.equal(colorized, 'INFO')
colorized = colorizer(40)
t.equal(colorized, 'WARN')
colorized = colorizer(50)
t.equal(colorized, 'ERROR')
colorized = colorizer(60)
t.equal(colorized, 'FATAL')
colorized = colorizer(900)
t.equal(colorized, 'USERLVL')
colorized = colorizer('info')
t.equal(colorized, 'INFO')
colorized = colorizer('use-default')
t.equal(colorized, 'USERLVL')
colorized = colorizer.message('foo')
t.equal(colorized, 'foo')
colorized = colorizer.greyMessage('foo')
t.equal(colorized, 'foo')
}
const testColoringColorizer = getColorizer => async t => {
const colorizer = getColorizer(true)
let colorized = colorizer(10)
t.equal(colorized, '\u001B[90mTRACE\u001B[39m')
colorized = colorizer(20)
t.equal(colorized, '\u001B[34mDEBUG\u001B[39m')
colorized = colorizer(30)
t.equal(colorized, '\u001B[32mINFO\u001B[39m')
colorized = colorizer(40)
t.equal(colorized, '\u001B[33mWARN\u001B[39m')
colorized = colorizer(50)
t.equal(colorized, '\u001B[31mERROR\u001B[39m')
colorized = colorizer(60)
t.equal(colorized, '\u001B[41mFATAL\u001B[49m')
colorized = colorizer(900)
t.equal(colorized, '\u001B[37mUSERLVL\u001B[39m')
colorized = colorizer('info')
t.equal(colorized, '\u001B[32mINFO\u001B[39m')
colorized = colorizer('use-default')
t.equal(colorized, '\u001B[37mUSERLVL\u001B[39m')
colorized = colorizer.message('foo')
t.equal(colorized, '\u001B[36mfoo\u001B[39m')
colorized = colorizer.greyMessage('foo')
t.equal(colorized, '\u001B[90mfoo\u001B[39m')
}
const testCustomColoringColorizer = getColorizer => async t => {
const customLevels = {
0: 'INFO',
1: 'ERR',
default: 'USERLVL'
}
const customLevelNames = {
info: 0,
err: 1
}
const customColors = [
[0, 'not-a-color'],
[1, 'red']
]
const opts = {
customLevels,
customLevelNames
}
const colorizer = getColorizer(true, customColors)
const colorizerWithCustomPropUse = getColorizer(true, customColors, true)
let colorized = colorizer(1, opts)
t.equal(colorized, '\u001B[31mERR\u001B[39m')
colorized = colorizer(0, opts)
t.equal(colorized, '\u001B[37mINFO\u001B[39m')
colorized = colorizer(900)
t.equal(colorized, '\u001B[37mUSERLVL\u001B[39m')
colorized = colorizer('err', opts)
t.equal(colorized, '\u001B[31mERR\u001B[39m')
colorized = colorizer('info', opts)
t.equal(colorized, '\u001B[37mINFO\u001B[39m')
colorized = colorizer('use-default')
t.equal(colorized, '\u001B[37mUSERLVL\u001B[39m')
colorized = colorizer(40, opts)
t.equal(colorized, '\u001B[33mWARN\u001B[39m')
colorized = colorizerWithCustomPropUse(50, opts)
t.equal(colorized, '\u001B[37mUSERLVL\u001B[39m')
}
test('returns default colorizer - private export', testDefaultColorizer(getColorizer))
test('returns colorizing colorizer - private export', testColoringColorizer(getColorizer))
test('returns custom colorizing colorizer - private export', testCustomColoringColorizer(getColorizer))
test('custom props defaults to standard levels', async t => {
const colorizer = getColorizer(true, [], true)
const colorized = colorizer('info')
t.equal(colorized, '\u001B[37mINFO\u001B[39m')
})

55
backend/node_modules/pino-pretty/lib/constants.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
'use strict'
/**
* A set of property names that indicate the value represents an error object.
*
* @typedef {string[]} K_ERROR_LIKE_KEYS
*/
module.exports = {
DATE_FORMAT: 'yyyy-mm-dd HH:MM:ss.l o',
DATE_FORMAT_SIMPLE: 'HH:MM:ss.l',
/**
* @type {K_ERROR_LIKE_KEYS}
*/
ERROR_LIKE_KEYS: ['err', 'error'],
MESSAGE_KEY: 'msg',
LEVEL_KEY: 'level',
LEVEL_LABEL: 'levelLabel',
TIMESTAMP_KEY: 'time',
LEVELS: {
default: 'USERLVL',
60: 'FATAL',
50: 'ERROR',
40: 'WARN',
30: 'INFO',
20: 'DEBUG',
10: 'TRACE'
},
LEVEL_NAMES: {
fatal: 60,
error: 50,
warn: 40,
info: 30,
debug: 20,
trace: 10
},
// Object keys that probably came from a logger like Pino or Bunyan.
LOGGER_KEYS: [
'pid',
'hostname',
'name',
'level',
'time',
'timestamp',
'caller'
]
}

169
backend/node_modules/pino-pretty/lib/pretty.js generated vendored Normal file
View File

@@ -0,0 +1,169 @@
'use strict'
module.exports = pretty
const sjs = require('secure-json-parse')
const isObject = require('./utils/is-object')
const prettifyErrorLog = require('./utils/prettify-error-log')
const prettifyLevel = require('./utils/prettify-level')
const prettifyMessage = require('./utils/prettify-message')
const prettifyMetadata = require('./utils/prettify-metadata')
const prettifyObject = require('./utils/prettify-object')
const prettifyTime = require('./utils/prettify-time')
const filterLog = require('./utils/filter-log')
const {
LEVELS,
LEVEL_KEY,
LEVEL_NAMES
} = require('./constants')
const jsonParser = input => {
try {
return { value: sjs.parse(input, { protoAction: 'remove' }) }
} catch (err) {
return { err }
}
}
/**
* Orchestrates processing the received log data according to the provided
* configuration and returns a prettified log string.
*
* @typedef {function} LogPrettifierFunc
* @param {string|object} inputData A log string or a log-like object.
* @returns {string} A string that represents the prettified log data.
*/
function pretty (inputData) {
let log
if (!isObject(inputData)) {
const parsed = jsonParser(inputData)
if (parsed.err || !isObject(parsed.value)) {
// pass through
return inputData + this.EOL
}
log = parsed.value
} else {
log = inputData
}
if (this.minimumLevel) {
// We need to figure out if the custom levels has the desired minimum
// level & use that one if found. If not, determine if the level exists
// in the standard levels. In both cases, make sure we have the level
// number instead of the level name.
let condition
if (this.useOnlyCustomProps) {
condition = this.customLevels
} else {
condition = this.customLevelNames[this.minimumLevel] !== undefined
}
let minimum
if (condition) {
minimum = this.customLevelNames[this.minimumLevel]
} else {
minimum = LEVEL_NAMES[this.minimumLevel]
}
if (!minimum) {
minimum = typeof this.minimumLevel === 'string'
? LEVEL_NAMES[this.minimumLevel]
: LEVEL_NAMES[LEVELS[this.minimumLevel].toLowerCase()]
}
const level = log[this.levelKey === undefined ? LEVEL_KEY : this.levelKey]
if (level < minimum) return
}
const prettifiedMessage = prettifyMessage({ log, context: this.context })
if (this.ignoreKeys || this.includeKeys) {
log = filterLog({ log, context: this.context })
}
const prettifiedLevel = prettifyLevel({
log,
context: {
...this.context,
// This is odd. The colorizer ends up relying on the value of
// `customProperties` instead of the original `customLevels` and
// `customLevelNames`.
...this.context.customProperties
}
})
const prettifiedMetadata = prettifyMetadata({ log, context: this.context })
const prettifiedTime = prettifyTime({ log, context: this.context })
let line = ''
if (this.levelFirst && prettifiedLevel) {
line = `${prettifiedLevel}`
}
if (prettifiedTime && line === '') {
line = `${prettifiedTime}`
} else if (prettifiedTime) {
line = `${line} ${prettifiedTime}`
}
if (!this.levelFirst && prettifiedLevel) {
if (line.length > 0) {
line = `${line} ${prettifiedLevel}`
} else {
line = prettifiedLevel
}
}
if (prettifiedMetadata) {
if (line.length > 0) {
line = `${line} ${prettifiedMetadata}:`
} else {
line = prettifiedMetadata
}
}
if (line.endsWith(':') === false && line !== '') {
line += ':'
}
if (prettifiedMessage !== undefined) {
if (line.length > 0) {
line = `${line} ${prettifiedMessage}`
} else {
line = prettifiedMessage
}
}
if (line.length > 0 && !this.singleLine) {
line += this.EOL
}
// pino@7+ does not log this anymore
if (log.type === 'Error' && log.stack) {
const prettifiedErrorLog = prettifyErrorLog({ log, context: this.context })
if (this.singleLine) line += this.EOL
line += prettifiedErrorLog
} else if (this.hideObject === false) {
const skipKeys = [
this.messageKey,
this.levelKey,
this.timestampKey
].filter(key => {
return typeof log[key] === 'string' ||
typeof log[key] === 'number' ||
typeof log[key] === 'boolean'
})
const prettifiedObject = prettifyObject({
log,
skipKeys,
context: this.context
})
// In single line mode, include a space only if prettified version isn't empty
if (this.singleLine && !/^\s$/.test(prettifiedObject)) {
line += ' '
}
line += prettifiedObject
}
return line
}

View File

@@ -0,0 +1,71 @@
'use strict'
module.exports = buildSafeSonicBoom
const { isMainThread } = require('worker_threads')
const SonicBoom = require('sonic-boom')
const noop = require('./noop')
/**
* Creates a safe SonicBoom instance
*
* @param {object} opts Options for SonicBoom
*
* @returns {object} A new SonicBoom stream
*/
function buildSafeSonicBoom (opts) {
const stream = new SonicBoom(opts)
stream.on('error', filterBrokenPipe)
// if we are sync: false, we must flush on exit
// NODE_V8_COVERAGE must breaks everything
// https://github.com/nodejs/node/issues/49344
if (!process.env.NODE_V8_COVERAGE && !opts.sync && isMainThread) {
setupOnExit(stream)
}
return stream
function filterBrokenPipe (err) {
if (err.code === 'EPIPE') {
stream.write = noop
stream.end = noop
stream.flushSync = noop
stream.destroy = noop
return
}
stream.removeListener('error', filterBrokenPipe)
}
}
function setupOnExit (stream) {
/* istanbul ignore next */
if (global.WeakRef && global.WeakMap && global.FinalizationRegistry) {
// This is leak free, it does not leave event handlers
const onExit = require('on-exit-leak-free')
onExit.register(stream, autoEnd)
stream.on('close', function () {
onExit.unregister(stream)
})
}
}
/* istanbul ignore next */
function autoEnd (stream, eventName) {
// This check is needed only on some platforms
if (stream.destroyed) {
return
}
if (eventName === 'beforeExit') {
// We still have an event loop, let's use it
stream.flush()
stream.on('drain', function () {
stream.end()
})
} else {
// We do not have an event loop, so flush synchronously
stream.flushSync()
}
}

View File

@@ -0,0 +1,86 @@
'use strict'
const tap = require('tap')
const rimraf = require('rimraf')
const fs = require('fs')
const { join } = require('path')
const buildSafeSonicBoom = require('./build-safe-sonic-boom')
function noop () {}
const file = () => {
const dest = join(__dirname, `${process.pid}-${process.hrtime().toString()}`)
const fd = fs.openSync(dest, 'w')
return { dest, fd }
}
tap.test('should not write when error emitted and code is "EPIPE"', async t => {
t.plan(1)
const { fd, dest } = file()
const stream = buildSafeSonicBoom({ sync: true, fd, mkdir: true })
t.teardown(() => rimraf(dest, noop))
stream.emit('error', { code: 'EPIPE' })
stream.write('will not work')
const dataFile = fs.readFileSync(dest)
t.equal(dataFile.length, 0)
})
tap.test('should stream.write works when error code is not "EPIPE"', async t => {
t.plan(3)
const { fd, dest } = file()
const stream = buildSafeSonicBoom({ sync: true, fd, mkdir: true })
t.teardown(() => rimraf(dest, noop))
stream.on('error', () => t.pass('error emitted'))
stream.emit('error', 'fake error description')
t.ok(stream.write('will work'))
const dataFile = fs.readFileSync(dest)
t.equal(dataFile.toString(), 'will work')
})
tap.test('cover setupOnExit', async t => {
t.plan(3)
const { fd, dest } = file()
const stream = buildSafeSonicBoom({ sync: false, fd, mkdir: true })
t.teardown(() => rimraf(dest, noop))
stream.on('error', () => t.pass('error emitted'))
stream.emit('error', 'fake error description')
t.ok(stream.write('will work'))
await watchFileCreated(dest)
const dataFile = fs.readFileSync(dest)
t.equal(dataFile.toString(), 'will work')
})
function watchFileCreated (filename) {
return new Promise((resolve, reject) => {
const TIMEOUT = 2000
const INTERVAL = 100
const threshold = TIMEOUT / INTERVAL
let counter = 0
const interval = setInterval(() => {
// On some CI runs file is created but not filled
if (fs.existsSync(filename) && fs.statSync(filename).size !== 0) {
clearInterval(interval)
resolve()
} else if (counter <= threshold) {
counter++
} else {
clearInterval(interval)
reject(new Error(`${filename} was not created.`))
}
}, INTERVAL)
})
}

View File

@@ -0,0 +1,26 @@
'use strict'
module.exports = createDate
const isValidDate = require('./is-valid-date')
/**
* Constructs a JS Date from a number or string. Accepts any single number
* or single string argument that is valid for the Date() constructor,
* or an epoch as a string.
*
* @param {string|number} epoch The representation of the Date.
*
* @returns {Date} The constructed Date.
*/
function createDate (epoch) {
// If epoch is already a valid argument, return the valid Date
let date = new Date(epoch)
if (isValidDate(date)) {
return date
}
// Convert to a number to permit epoch as a string
date = new Date(+epoch)
return date
}

View File

@@ -0,0 +1,20 @@
'use strict'
const tap = require('tap')
const createDate = require('./create-date')
const wanted = 1624450038567
tap.test('accepts arguments the Date constructor would accept', async t => {
t.plan(2)
t.same(createDate(1624450038567).getTime(), wanted)
t.same(createDate('2021-06-23T12:07:18.567Z').getTime(), wanted)
})
tap.test('accepts epoch as a string', async t => {
// If Date() accepts this argument, the createDate function is not needed
// and can be replaced with Date()
t.plan(2)
t.notSame(new Date('16244500385-67').getTime(), wanted)
t.same(createDate('1624450038567').getTime(), wanted)
})

View File

@@ -0,0 +1,28 @@
'use strict'
module.exports = deleteLogProperty
const getPropertyValue = require('./get-property-value')
const splitPropertyKey = require('./split-property-key')
/**
* Deletes a specified property from a log object if it exists.
* This function mutates the passed in `log` object.
*
* @param {object} log The log object to be modified.
* @param {string} property A string identifying the property to be deleted from
* the log object. Accepts nested properties delimited by a `.`
* Delimiter can be escaped to preserve property names that contain the delimiter.
* e.g. `'prop1.prop2'` or `'prop2\.domain\.corp.prop2'`
*/
function deleteLogProperty (log, property) {
const props = splitPropertyKey(property)
const propToDelete = props.pop()
log = getPropertyValue(log, props)
/* istanbul ignore else */
if (log !== null && typeof log === 'object' && Object.prototype.hasOwnProperty.call(log, propToDelete)) {
delete log[propToDelete]
}
}

View File

@@ -0,0 +1,31 @@
'use strict'
const tap = require('tap')
const { createCopier } = require('fast-copy')
const fastCopy = createCopier({})
const deleteLogProperty = require('./delete-log-property')
const logData = {
level: 30,
data1: {
data2: { 'data-3': 'bar' }
}
}
tap.test('deleteLogProperty deletes property of depth 1', async t => {
const log = fastCopy(logData)
deleteLogProperty(log, 'data1')
t.same(log, { level: 30 })
})
tap.test('deleteLogProperty deletes property of depth 2', async t => {
const log = fastCopy(logData)
deleteLogProperty(log, 'data1.data2')
t.same(log, { level: 30, data1: { } })
})
tap.test('deleteLogProperty deletes property of depth 3', async t => {
const log = fastCopy(logData)
deleteLogProperty(log, 'data1.data2.data-3')
t.same(log, { level: 30, data1: { data2: { } } })
})

View File

@@ -0,0 +1,45 @@
'use strict'
module.exports = filterLog
const { createCopier } = require('fast-copy')
const fastCopy = createCopier({})
const deleteLogProperty = require('./delete-log-property')
/**
* @typedef {object} FilterLogParams
* @property {object} log The log object to be modified.
* @property {PrettyContext} context The context object built from parsing
* the options.
*/
/**
* Filter a log object by removing or including keys accordingly.
* When `includeKeys` is passed, `ignoredKeys` will be ignored.
* One of ignoreKeys or includeKeys must be pass in.
*
* @param {FilterLogParams} input
*
* @returns {object} A new `log` object instance that
* either only includes the keys in ignoreKeys
* or does not include those in ignoredKeys.
*/
function filterLog ({ log, context }) {
const { ignoreKeys, includeKeys } = context
const logCopy = fastCopy(log)
if (includeKeys) {
const logIncluded = {}
includeKeys.forEach((key) => {
logIncluded[key] = logCopy[key]
})
return logIncluded
}
ignoreKeys.forEach((ignoreKey) => {
deleteLogProperty(logCopy, ignoreKey)
})
return logCopy
}

View File

@@ -0,0 +1,190 @@
'use strict'
const tap = require('tap')
const filterLog = require('./filter-log')
const context = {
includeKeys: undefined,
ignoreKeys: undefined
}
const logData = {
level: 30,
time: 1522431328992,
data1: {
data2: { 'data-3': 'bar' },
error: new Error('test')
}
}
const logData2 = Object.assign({
'logging.domain.corp/operation': {
id: 'foo',
producer: 'bar'
}
}, logData)
tap.test('#filterLog with an ignoreKeys option', t => {
t.test('filterLog removes single entry', async t => {
const result = filterLog({
log: logData,
context: {
...context,
ignoreKeys: ['data1.data2.data-3']
}
})
t.same(result, { level: 30, time: 1522431328992, data1: { data2: { }, error: new Error('test') } })
})
t.test('filterLog removes multiple entries', async t => {
const result = filterLog({
log: logData,
context: {
...context,
ignoreKeys: ['time', 'data1']
}
})
t.same(result, { level: 30 })
})
t.test('filterLog keeps error instance', async t => {
const result = filterLog({
log: logData,
context: {
...context,
ignoreKeys: []
}
})
t.equal(logData.data1.error, result.data1.error)
})
t.test('filterLog removes entry with escape sequence', async t => {
const result = filterLog({
log: logData2,
context: {
...context,
ignoreKeys: ['data1', 'logging\\.domain\\.corp/operation']
}
})
t.same(result, { level: 30, time: 1522431328992 })
})
t.test('filterLog removes entry with escape sequence nested', async t => {
const result = filterLog({
log: logData2,
context: {
...context,
ignoreKeys: ['data1', 'logging\\.domain\\.corp/operation.producer']
}
})
t.same(result, { level: 30, time: 1522431328992, 'logging.domain.corp/operation': { id: 'foo' } })
})
t.end()
})
const ignoreKeysArray = [
undefined,
['level'],
['level', 'data1.data2.data-3']
]
ignoreKeysArray.forEach(ignoreKeys => {
tap.test(`#filterLog with an includeKeys option when the ignoreKeys being ${ignoreKeys}`, t => {
t.test('filterLog include nothing', async t => {
const result = filterLog({
log: logData,
context: {
...context,
ignoreKeys,
includeKeys: []
}
})
t.same(result, {})
})
t.test('filterLog include single entry', async t => {
const result = filterLog({
log: logData,
context: {
...context,
ignoreKeys,
includeKeys: ['time']
}
})
t.same(result, { time: 1522431328992 })
})
t.test('filterLog include multiple entries', async t => {
const result = filterLog({
log: logData,
context: {
...context,
ignoreKeys,
includeKeys: ['time', 'data1']
}
})
t.same(result, {
time: 1522431328992,
data1: {
data2: { 'data-3': 'bar' },
error: new Error('test')
}
})
})
t.end()
})
})
tap.test('#filterLog with circular references', t => {
const logData = {
level: 30,
time: 1522431328992,
data1: 'test'
}
logData.circular = logData
t.test('filterLog removes single entry', async t => {
const result = filterLog({
log: logData,
context: {
...context,
ignoreKeys: ['data1']
}
})
t.same(result.circular.level, result.level)
t.same(result.circular.time, result.time)
delete result.circular
t.same(result, { level: 30, time: 1522431328992 })
})
t.test('filterLog includes single entry', async t => {
const result = filterLog({
log: logData,
context: {
...context,
includeKeys: ['data1']
}
})
t.same(result, { data1: 'test' })
})
t.test('filterLog includes circular keys', async t => {
const result = filterLog({
log: logData,
context: {
...context,
includeKeys: ['level', 'circular']
}
})
t.same(result.circular.level, logData.level)
t.same(result.circular.time, logData.time)
delete result.circular
t.same(result, { level: 30 })
})
t.end()
})

View File

@@ -0,0 +1,66 @@
'use strict'
module.exports = formatTime
const {
DATE_FORMAT,
DATE_FORMAT_SIMPLE
} = require('../constants')
const dateformat = require('dateformat')
const createDate = require('./create-date')
const isValidDate = require('./is-valid-date')
/**
* Converts a given `epoch` to a desired display format.
*
* @param {number|string} epoch The time to convert. May be any value that is
* valid for `new Date()`.
* @param {boolean|string} [translateTime=false] When `false`, the given `epoch`
* will simply be returned. When `true`, the given `epoch` will be converted
* to a string at UTC using the `DATE_FORMAT` constant. If `translateTime` is
* a string, the following rules are available:
*
* - `<format string>`: The string is a literal format string. This format
* string will be used to interpret the `epoch` and return a display string
* at UTC.
* - `SYS:STANDARD`: The returned display string will follow the `DATE_FORMAT`
* constant at the system's local timezone.
* - `SYS:<format string>`: The returned display string will follow the given
* `<format string>` at the system's local timezone.
* - `UTC:<format string>`: The returned display string will follow the given
* `<format string>` at UTC.
*
* @returns {number|string} The formatted time.
*/
function formatTime (epoch, translateTime = false) {
if (translateTime === false) {
return epoch
}
const instant = createDate(epoch)
// If the Date is invalid, do not attempt to format
if (!isValidDate(instant)) {
return epoch
}
if (translateTime === true) {
return dateformat(instant, DATE_FORMAT_SIMPLE)
}
const upperFormat = translateTime.toUpperCase()
if (upperFormat === 'SYS:STANDARD') {
return dateformat(instant, DATE_FORMAT)
}
const prefix = upperFormat.substr(0, 4)
if (prefix === 'SYS:' || prefix === 'UTC:') {
if (prefix === 'UTC:') {
return dateformat(instant, translateTime)
}
return dateformat(instant, translateTime.slice(4))
}
return dateformat(instant, `UTC:${translateTime}`)
}

View File

@@ -0,0 +1,71 @@
'use strict'
process.env.TZ = 'UTC'
const tap = require('tap')
const formatTime = require('./format-time')
const dateStr = '2019-04-06T13:30:00.000-04:00'
const epoch = new Date(dateStr)
const epochMS = epoch.getTime()
tap.test('passes through epoch if `translateTime` is `false`', async t => {
const formattedTime = formatTime(epochMS)
t.equal(formattedTime, epochMS)
})
tap.test('passes through epoch if date is invalid', async t => {
const input = 'this is not a date'
const formattedTime = formatTime(input, true)
t.equal(formattedTime, input)
})
tap.test('translates epoch milliseconds if `translateTime` is `true`', async t => {
const formattedTime = formatTime(epochMS, true)
t.equal(formattedTime, '17:30:00.000')
})
tap.test('translates epoch milliseconds to UTC string given format', async t => {
const formattedTime = formatTime(epochMS, 'd mmm yyyy H:MM')
t.equal(formattedTime, '6 Apr 2019 17:30')
})
tap.test('translates epoch milliseconds to SYS:STANDARD', async t => {
const formattedTime = formatTime(epochMS, 'SYS:STANDARD')
t.match(formattedTime, /\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} [-+]?\d{4}/)
})
tap.test('translates epoch milliseconds to SYS:<FORMAT>', async t => {
const formattedTime = formatTime(epochMS, 'SYS:d mmm yyyy H:MM')
t.match(formattedTime, /\d{1} \w{3} \d{4} \d{1,2}:\d{2}/)
})
tap.test('passes through date string if `translateTime` is `false`', async t => {
const formattedTime = formatTime(dateStr)
t.equal(formattedTime, dateStr)
})
tap.test('translates date string if `translateTime` is `true`', async t => {
const formattedTime = formatTime(dateStr, true)
t.equal(formattedTime, '17:30:00.000')
})
tap.test('translates date string to UTC string given format', async t => {
const formattedTime = formatTime(dateStr, 'd mmm yyyy H:MM')
t.equal(formattedTime, '6 Apr 2019 17:30')
})
tap.test('translates date string to SYS:STANDARD', async t => {
const formattedTime = formatTime(dateStr, 'SYS:STANDARD')
t.match(formattedTime, /\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{3} [-+]?\d{4}/)
})
tap.test('translates date string to UTC:<FORMAT>', async t => {
const formattedTime = formatTime(dateStr, 'UTC:d mmm yyyy H:MM')
t.equal(formattedTime, '6 Apr 2019 17:30')
})
tap.test('translates date string to SYS:<FORMAT>', async t => {
const formattedTime = formatTime(dateStr, 'SYS:d mmm yyyy H:MM')
t.match(formattedTime, /\d{1} \w{3} \d{4} \d{1,2}:\d{2}/)
})

View File

@@ -0,0 +1,30 @@
'use strict'
module.exports = getPropertyValue
const splitPropertyKey = require('./split-property-key')
/**
* Gets a specified property from an object if it exists.
*
* @param {object} obj The object to be searched.
* @param {string|string[]} property A string, or an array of strings, identifying
* the property to be retrieved from the object.
* Accepts nested properties delimited by a `.`.
* Delimiter can be escaped to preserve property names that contain the delimiter.
* e.g. `'prop1.prop2'` or `'prop2\.domain\.corp.prop2'`.
*
* @returns {*}
*/
function getPropertyValue (obj, property) {
const props = Array.isArray(property) ? property : splitPropertyKey(property)
for (const prop of props) {
if (!Object.prototype.hasOwnProperty.call(obj, prop)) {
return
}
obj = obj[prop]
}
return obj
}

View File

@@ -0,0 +1,31 @@
'use strict'
const tap = require('tap')
const getPropertyValue = require('./get-property-value')
tap.test('getPropertyValue returns the value of the property', async t => {
const result = getPropertyValue({
foo: 'bar'
}, 'foo')
t.same(result, 'bar')
})
tap.test('getPropertyValue returns the value of the nested property', async t => {
const result = getPropertyValue({ extra: { foo: { value: 'bar' } } }, 'extra.foo.value')
t.same(result, 'bar')
})
tap.test('getPropertyValue returns the value of the nested property using the array of nested property keys', async t => {
const result = getPropertyValue({ extra: { foo: { value: 'bar' } } }, ['extra', 'foo', 'value'])
t.same(result, 'bar')
})
tap.test('getPropertyValue returns undefined for non-existing properties', async t => {
const result = getPropertyValue({ extra: { foo: { value: 'bar' } } }, 'extra.foo.value-2')
t.same(result, undefined)
})
tap.test('getPropertyValue returns undefined for non-existing properties using the array of nested property keys', async t => {
const result = getPropertyValue({ extra: { foo: { value: 'bar' } } }, ['extra', 'foo', 'value-2'])
t.same(result, undefined)
})

View File

@@ -0,0 +1,38 @@
'use strict'
module.exports = handleCustomLevelsNamesOpts
/**
* Parse a CSV string or options object that maps level
* labels to level values.
*
* @param {string|object} cLevels An object mapping level
* names to level values, e.g. `{ info: 30, debug: 65 }`, or a
* CSV string in the format `level_name:level_value`, e.g.
* `info:30,debug:65`.
*
* @returns {object} An object mapping levels names to level values
* e.g. `{ info: 30, debug: 65 }`.
*/
function handleCustomLevelsNamesOpts (cLevels) {
if (!cLevels) return {}
if (typeof cLevels === 'string') {
return cLevels
.split(',')
.reduce((agg, value, idx) => {
const [levelName, levelNum = idx] = value.split(':')
agg[levelName.toLowerCase()] = levelNum
return agg
}, {})
} else if (Object.prototype.toString.call(cLevels) === '[object Object]') {
return Object
.keys(cLevels)
.reduce((agg, levelName) => {
agg[levelName.toLowerCase()] = cLevels[levelName]
return agg
}, {})
} else {
return {}
}
}

View File

@@ -0,0 +1,44 @@
'use strict'
const tap = require('tap')
const handleCustomLevelsNamesOpts = require('./handle-custom-levels-names-opts')
tap.test('returns a empty object `{}` for undefined parameter', async t => {
const handledCustomLevelNames = handleCustomLevelsNamesOpts()
t.same(handledCustomLevelNames, {})
})
tap.test('returns a empty object `{}` for unknown parameter', async t => {
const handledCustomLevelNames = handleCustomLevelsNamesOpts(123)
t.same(handledCustomLevelNames, {})
})
tap.test('returns a filled object for string parameter', async t => {
const handledCustomLevelNames = handleCustomLevelsNamesOpts('ok:10,warn:20,error:35')
t.same(handledCustomLevelNames, {
ok: 10,
warn: 20,
error: 35
})
})
tap.test('returns a filled object for object parameter', async t => {
const handledCustomLevelNames = handleCustomLevelsNamesOpts({
ok: 10,
warn: 20,
error: 35
})
t.same(handledCustomLevelNames, {
ok: 10,
warn: 20,
error: 35
})
})
tap.test('defaults missing level num to first index', async t => {
const result = handleCustomLevelsNamesOpts('ok:10,info')
t.same(result, {
ok: 10,
info: 1
})
})

View File

@@ -0,0 +1,39 @@
'use strict'
module.exports = handleCustomLevelsOpts
/**
* Parse a CSV string or options object that specifies
* configuration for custom levels.
*
* @param {string|object} cLevels An object mapping level
* names to values, e.g. `{ info: 30, debug: 65 }`, or a
* CSV string in the format `level_name:level_value`, e.g.
* `info:30,debug:65`.
*
* @returns {object} An object mapping levels to labels that
* appear in logs, e.g. `{ '30': 'INFO', '65': 'DEBUG' }`.
*/
function handleCustomLevelsOpts (cLevels) {
if (!cLevels) return {}
if (typeof cLevels === 'string') {
return cLevels
.split(',')
.reduce((agg, value, idx) => {
const [levelName, levelNum = idx] = value.split(':')
agg[levelNum] = levelName.toUpperCase()
return agg
},
{ default: 'USERLVL' })
} else if (Object.prototype.toString.call(cLevels) === '[object Object]') {
return Object
.keys(cLevels)
.reduce((agg, levelName) => {
agg[cLevels[levelName]] = levelName.toUpperCase()
return agg
}, { default: 'USERLVL' })
} else {
return {}
}
}

View File

@@ -0,0 +1,47 @@
'use strict'
const tap = require('tap')
const handleCustomLevelsOpts = require('./handle-custom-levels-opts')
tap.test('returns a empty object `{}` for undefined parameter', async t => {
const handledCustomLevel = handleCustomLevelsOpts()
t.same(handledCustomLevel, {})
})
tap.test('returns a empty object `{}` for unknown parameter', async t => {
const handledCustomLevel = handleCustomLevelsOpts(123)
t.same(handledCustomLevel, {})
})
tap.test('returns a filled object for string parameter', async t => {
const handledCustomLevel = handleCustomLevelsOpts('ok:10,warn:20,error:35')
t.same(handledCustomLevel, {
10: 'OK',
20: 'WARN',
35: 'ERROR',
default: 'USERLVL'
})
})
tap.test('returns a filled object for object parameter', async t => {
const handledCustomLevel = handleCustomLevelsOpts({
ok: 10,
warn: 20,
error: 35
})
t.same(handledCustomLevel, {
10: 'OK',
20: 'WARN',
35: 'ERROR',
default: 'USERLVL'
})
})
tap.test('defaults missing level num to first index', async t => {
const result = handleCustomLevelsOpts('ok:10,info')
t.same(result, {
10: 'OK',
1: 'INFO',
default: 'USERLVL'
})
})

99
backend/node_modules/pino-pretty/lib/utils/index.js generated vendored Normal file
View File

@@ -0,0 +1,99 @@
'use strict'
module.exports = {
buildSafeSonicBoom: require('./build-safe-sonic-boom.js'),
createDate: require('./create-date.js'),
deleteLogProperty: require('./delete-log-property.js'),
filterLog: require('./filter-log.js'),
formatTime: require('./format-time.js'),
getPropertyValue: require('./get-property-value.js'),
handleCustomLevelsNamesOpts: require('./handle-custom-levels-names-opts.js'),
handleCustomLevelsOpts: require('./handle-custom-levels-opts.js'),
interpretConditionals: require('./interpret-conditionals.js'),
isObject: require('./is-object.js'),
isValidDate: require('./is-valid-date.js'),
joinLinesWithIndentation: require('./join-lines-with-indentation.js'),
noop: require('./noop.js'),
parseFactoryOptions: require('./parse-factory-options.js'),
prettifyErrorLog: require('./prettify-error-log.js'),
prettifyError: require('./prettify-error.js'),
prettifyLevel: require('./prettify-level.js'),
prettifyMessage: require('./prettify-message.js'),
prettifyMetadata: require('./prettify-metadata.js'),
prettifyObject: require('./prettify-object.js'),
prettifyTime: require('./prettify-time.js'),
splitPropertyKey: require('./split-property-key.js')
}
// The remainder of this file consists of jsdoc blocks that are difficult to
// determine a more appropriate "home" for. As an example, the blocks associated
// with custom prettifiers could live in either the `prettify-level`,
// `prettify-metadata`, or `prettify-time` files since they are the primary
// files where such code is used. But we want a central place to define common
// doc blocks, so we are picking this file as the answer.
/**
* A hash of log property names mapped to prettifier functions. When the
* incoming log data is being processed for prettification, any key on the log
* that matches a key in a custom prettifiers hash will be prettified using
* that matching custom prettifier. The value passed to the custom prettifier
* will the value associated with the corresponding log key.
*
* The hash may contain any arbitrary keys for arbitrary log properties, but it
* may also contain a set of predefined key names that map to well-known log
* properties. These keys are:
*
* + `time` (for the timestamp field)
* + `level` (for the level label field; value may be a level number instead
* of a level label)
* + `hostname`
* + `pid`
* + `name`
* + `caller`
*
* @typedef {Object.<string, CustomPrettifierFunc>} CustomPrettifiers
*/
/**
* A synchronous function to be used for prettifying a log property. It must
* return a string.
*
* @typedef {function} CustomPrettifierFunc
* @param {any} value The value to be prettified for the key associated with
* the prettifier.
* @returns {string}
*/
/**
* A tokenized string that indicates how the prettified log line should be
* formatted. Tokens are either log properties enclosed in curly braces, e.g.
* `{levelLabel}`, `{pid}`, or `{req.url}`, or conditional directives in curly
* braces. The only conditional directives supported are `if` and `end`, e.g.
* `{if pid}{pid}{end}`; every `if` must have a matching `end`. Nested
* conditions are not supported.
*
* @typedef {string} MessageFormatString
*
* @example
* `{levelLabel} - {if pid}{pid} - {end}url:{req.url}`
*/
/**
* A function that accepts a log object, name of the message key, and name of
* the level label key and returns a formatted log line.
*
* Note: this function must be synchronous.
*
* @typedef {function} MessageFormatFunction
* @param {object} log The log object to be processed.
* @param {string} messageKey The name of the key in the `log` object that
* contains the log message.
* @param {string} levelLabel The name of the key in the `log` object that
* contains the log level name.
* @returns {string}
*
* @example
* function (log, messageKey, levelLabel) {
* return `${log[levelLabel]} - ${log[messageKey]}`
* }
*/

View File

@@ -0,0 +1,37 @@
'use strict'
const tap = require('tap')
const index = require('./index.js')
const { readdirSync } = require('fs')
const { basename } = require('path')
tap.test(
'index exports exactly all non-test files excluding itself',
async t => {
// Read all files in the `util` directory
const files = readdirSync(__dirname)
for (const file of files) {
const kebabName = basename(file, '.js')
const snakeName = kebabName.split('-').map((part, idx) => {
if (idx === 0) return part
return part[0].toUpperCase() + part.slice(1)
}).join('')
if (file.endsWith('.test.js') === false && file !== 'index.js') {
// We expect all files to be exported except…
t.ok(index[snakeName], `exports ${snakeName}`)
} else {
// …test files and the index file itself those must not be exported
t.notOk(index[snakeName], `does not export ${snakeName}`)
}
// Remove the exported file from the index object
delete index[snakeName]
}
// Now the index is expected to be empty, as nothing else should be
// exported from it
t.same(index, {}, 'does not export anything else')
}
)

View File

@@ -0,0 +1,37 @@
'use strict'
module.exports = interpretConditionals
const getPropertyValue = require('./get-property-value')
/**
* Translates all conditional blocks from within the messageFormat. Translates
* any matching {if key}{key}{end} statements and returns everything between
* if and else blocks if the key provided was found in log.
*
* @param {MessageFormatString|MessageFormatFunction} messageFormat A format
* string or function that defines how the logged message should be
* conditionally formatted.
* @param {object} log The log object to be modified.
*
* @returns {string} The parsed messageFormat.
*/
function interpretConditionals (messageFormat, log) {
messageFormat = messageFormat.replace(/{if (.*?)}(.*?){end}/g, replacer)
// Remove non-terminated if blocks
messageFormat = messageFormat.replace(/{if (.*?)}/g, '')
// Remove floating end blocks
messageFormat = messageFormat.replace(/{end}/g, '')
return messageFormat.replace(/\s+/g, ' ').trim()
function replacer (_, key, value) {
const propertyValue = getPropertyValue(log, key)
if (propertyValue && value.includes(key)) {
return value.replace(new RegExp('{' + key + '}', 'g'), propertyValue)
} else {
return ''
}
}
}

View File

@@ -0,0 +1,69 @@
'use strict'
const tap = require('tap')
const { createCopier } = require('fast-copy')
const fastCopy = createCopier({})
const interpretConditionals = require('./interpret-conditionals')
const logData = {
level: 30,
data1: {
data2: 'bar'
},
msg: 'foo'
}
tap.test('interpretConditionals translates if / else statement to found property value', async t => {
const log = fastCopy(logData)
t.equal(interpretConditionals('{level} - {if data1.data2}{data1.data2}{end}', log), '{level} - bar')
})
tap.test('interpretConditionals translates if / else statement to found property value and leave unmatched property key untouched', async t => {
const log = fastCopy(logData)
t.equal(interpretConditionals('{level} - {if data1.data2}{data1.data2} ({msg}){end}', log), '{level} - bar ({msg})')
})
tap.test('interpretConditionals removes non-terminated if statements', async t => {
const log = fastCopy(logData)
t.equal(interpretConditionals('{level} - {if data1.data2}{data1.data2}', log), '{level} - {data1.data2}')
})
tap.test('interpretConditionals removes floating end statements', async t => {
const log = fastCopy(logData)
t.equal(interpretConditionals('{level} - {data1.data2}{end}', log), '{level} - {data1.data2}')
})
tap.test('interpretConditionals removes floating end statements within translated if / end statements', async t => {
const log = fastCopy(logData)
t.equal(interpretConditionals('{level} - {if msg}({msg}){end}{end}', log), '{level} - (foo)')
})
tap.test('interpretConditionals removes if / end blocks if existent condition key does not match existent property key', async t => {
const log = fastCopy(logData)
t.equal(interpretConditionals('{level}{if msg}{data1.data2}{end}', log), '{level}')
})
tap.test('interpretConditionals removes if / end blocks if non-existent condition key does not match existent property key', async t => {
const log = fastCopy(logData)
t.equal(interpretConditionals('{level}{if foo}{msg}{end}', log), '{level}')
})
tap.test('interpretConditionals removes if / end blocks if existent condition key does not match non-existent property key', async t => {
const log = fastCopy(logData)
t.equal(interpretConditionals('{level}{if msg}{foo}{end}', log), '{level}')
})
tap.test('interpretConditionals removes if / end blocks if non-existent condition key does not match non-existent property key', async t => {
const log = fastCopy(logData)
t.equal(interpretConditionals('{level}{if foo}{bar}{end}', log), '{level}')
})
tap.test('interpretConditionals removes if / end blocks if nested condition key does not match property key', async t => {
const log = fastCopy(logData)
t.equal(interpretConditionals('{level}{if data1.msg}{data1.data2}{end}', log), '{level}')
})
tap.test('interpretConditionals removes nested if / end statement blocks', async t => {
const log = fastCopy(logData)
t.equal(interpretConditionals('{if msg}{if data1.data2}{msg}{data1.data2}{end}{end}', log), 'foo{data1.data2}')
})

View File

@@ -0,0 +1,7 @@
'use strict'
module.exports = isObject
function isObject (input) {
return Object.prototype.toString.apply(input) === '[object Object]'
}

View File

@@ -0,0 +1,10 @@
'use strict'
const tap = require('tap')
const isObject = require('./is-object')
tap.test('returns correct answer', async t => {
t.equal(isObject({}), true)
t.equal(isObject([]), false)
t.equal(isObject(42), false)
})

View File

@@ -0,0 +1,14 @@
'use strict'
module.exports = isValidDate
/**
* Checks if the argument is a JS Date and not 'Invalid Date'.
*
* @param {Date} date The date to check.
*
* @returns {boolean} true if the argument is a JS Date and not 'Invalid Date'.
*/
function isValidDate (date) {
return date instanceof Date && !Number.isNaN(date.getTime())
}

View File

@@ -0,0 +1,16 @@
'use strict'
process.env.TZ = 'UTC'
const tap = require('tap')
const isValidDate = require('./is-valid-date')
tap.test('returns true for valid dates', async t => {
t.same(isValidDate(new Date()), true)
})
tap.test('returns false for non-dates and invalid dates', async t => {
t.plan(2)
t.same(isValidDate('20210621'), false)
t.same(isValidDate(new Date('2021-41-99')), false)
})

View File

@@ -0,0 +1,29 @@
'use strict'
module.exports = joinLinesWithIndentation
/**
* @typedef {object} JoinLinesWithIndentationParams
* @property {string} input The string to split and reformat.
* @property {string} [ident] The indentation string. Default: ` ` (4 spaces).
* @property {string} [eol] The end of line sequence to use when rejoining
* the lines. Default: `'\n'`.
*/
/**
* Given a string with line separators, either `\r\n` or `\n`, add indentation
* to all lines subsequent to the first line and rejoin the lines using an
* end of line sequence.
*
* @param {JoinLinesWithIndentationParams} input
*
* @returns {string} A string with lines subsequent to the first indented
* with the given indentation sequence.
*/
function joinLinesWithIndentation ({ input, ident = ' ', eol = '\n' }) {
const lines = input.split(/\r?\n/)
for (let i = 1; i < lines.length; i += 1) {
lines[i] = ident + lines[i]
}
return lines.join(eol)
}

View File

@@ -0,0 +1,16 @@
'use strict'
const tap = require('tap')
const joinLinesWithIndentation = require('./join-lines-with-indentation')
tap.test('joinLinesWithIndentation adds indentation to beginning of subsequent lines', async t => {
const input = 'foo\nbar\nbaz'
const result = joinLinesWithIndentation({ input })
t.equal(result, 'foo\n bar\n baz')
})
tap.test('joinLinesWithIndentation accepts custom indentation, line breaks, and eol', async t => {
const input = 'foo\nbar\r\nbaz'
const result = joinLinesWithIndentation({ input, ident: ' ', eol: '^' })
t.equal(result, 'foo^ bar^ baz')
})

3
backend/node_modules/pino-pretty/lib/utils/noop.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
'use strict'
module.exports = function noop () {}

View File

@@ -0,0 +1,12 @@
'use strict'
const tap = require('tap')
const noop = require('./noop')
tap.test('is a function', async t => {
t.type(noop, Function)
})
tap.test('does nothing', async t => {
t.equal(noop('stuff'), undefined)
})

View File

@@ -0,0 +1,153 @@
'use strict'
module.exports = parseFactoryOptions
const {
LEVEL_NAMES
} = require('../constants')
const colors = require('../colors')
const handleCustomLevelsOpts = require('./handle-custom-levels-opts')
const handleCustomLevelsNamesOpts = require('./handle-custom-levels-names-opts')
/**
* A `PrettyContext` is an object to be used by the various functions that
* process log data. It is derived from the provided {@link PinoPrettyOptions}.
* It may be used as a `this` context.
*
* @typedef {object} PrettyContext
* @property {string} EOL The escape sequence chosen as the line terminator.
* @property {string} IDENT The string to use as the indentation sequence.
* @property {ColorizerFunc} colorizer A configured colorizer function.
* @property {Array[Array<number, string>]} customColors A set of custom color
* names associated with level numbers.
* @property {object} customLevelNames A hash of level numbers to level names,
* e.g. `{ 30: "info" }`.
* @property {object} customLevels A hash of level names to level numbers,
* e.g. `{ info: 30 }`.
* @property {CustomPrettifiers} customPrettifiers A hash of custom prettifier
* functions.
* @property {object} customProperties Comprised of `customLevels` and
* `customLevelNames` if such options are provided.
* @property {string[]} errorLikeObjectKeys The key names in the log data that
* should be considered as holding error objects.
* @property {string[]} errorProps A list of error object keys that should be
* included in the output.
* @property {boolean} hideObject Indicates the prettifier should omit objects
* in the output.
* @property {string[]} ignoreKeys Set of log data keys to omit.
* @property {string[]} includeKeys Opposite of `ignoreKeys`.
* @property {boolean} levelFirst Indicates the level should be printed first.
* @property {string} levelKey Name of the key in the log data that contains
* the message.
* @property {string} levelLabel Format token to represent the position of the
* level name in the output string.
* @property {MessageFormatString|MessageFormatFunction} messageFormat
* @property {string} messageKey Name of the key in the log data that contains
* the message.
* @property {string|number} minimumLevel The minimum log level to process
* and output.
* @property {ColorizerFunc} objectColorizer
* @property {boolean} singleLine Indicates objects should be printed on a
* single output line.
* @property {string} timestampKey The name of the key in the log data that
* contains the log timestamp.
* @property {boolean} translateTime Indicates if timestamps should be
* translated to a human-readable string.
* @property {boolean} useOnlyCustomProps
*/
/**
* @param {PinoPrettyOptions} options The user supplied object of options.
*
* @returns {PrettyContext}
*/
function parseFactoryOptions (options) {
const EOL = options.crlf ? '\r\n' : '\n'
const IDENT = ' '
const {
customPrettifiers,
errorLikeObjectKeys,
hideObject,
levelFirst,
levelKey,
levelLabel,
messageFormat,
messageKey,
minimumLevel,
singleLine,
timestampKey,
translateTime
} = options
const errorProps = options.errorProps.split(',')
const useOnlyCustomProps = typeof options.useOnlyCustomProps === 'boolean'
? options.useOnlyCustomProps
: (options.useOnlyCustomProps === 'true')
const customLevels = handleCustomLevelsOpts(options.customLevels)
const customLevelNames = handleCustomLevelsNamesOpts(options.customLevels)
let customColors
if (options.customColors) {
customColors = options.customColors.split(',').reduce((agg, value) => {
const [level, color] = value.split(':')
const condition = useOnlyCustomProps
? options.customLevels
: customLevelNames[level] !== undefined
const levelNum = condition
? customLevelNames[level]
: LEVEL_NAMES[level]
const colorIdx = levelNum !== undefined
? levelNum
: level
agg.push([colorIdx, color])
return agg
}, [])
}
const customProperties = { customLevels, customLevelNames }
if (useOnlyCustomProps === true && !options.customLevels) {
customProperties.customLevels = undefined
customProperties.customLevelNames = undefined
}
const includeKeys = options.include !== undefined
? new Set(options.include.split(','))
: undefined
const ignoreKeys = (!includeKeys && options.ignore)
? new Set(options.ignore.split(','))
: undefined
const colorizer = colors(options.colorize, customColors, useOnlyCustomProps)
const objectColorizer = options.colorizeObjects
? colorizer
: colors(false, [], false)
return {
EOL,
IDENT,
colorizer,
customColors,
customLevelNames,
customLevels,
customPrettifiers,
customProperties,
errorLikeObjectKeys,
errorProps,
hideObject,
ignoreKeys,
includeKeys,
levelFirst,
levelKey,
levelLabel,
messageFormat,
messageKey,
minimumLevel,
objectColorizer,
singleLine,
timestampKey,
translateTime,
useOnlyCustomProps
}
}

View File

@@ -0,0 +1,73 @@
'use strict'
module.exports = prettifyErrorLog
const {
LOGGER_KEYS
} = require('../constants')
const isObject = require('./is-object')
const joinLinesWithIndentation = require('./join-lines-with-indentation')
const prettifyObject = require('./prettify-object')
/**
* @typedef {object} PrettifyErrorLogParams
* @property {object} log The error log to prettify.
* @property {PrettyContext} context The context object built from parsing
* the options.
*/
/**
* Given a log object that has a `type: 'Error'` key, prettify the object and
* return the result. In other
*
* @param {PrettifyErrorLogParams} input
*
* @returns {string} A string that represents the prettified error log.
*/
function prettifyErrorLog ({ log, context }) {
const {
EOL: eol,
IDENT: ident,
errorProps: errorProperties,
messageKey
} = context
const stack = log.stack
const joinedLines = joinLinesWithIndentation({ input: stack, ident, eol })
let result = `${ident}${joinedLines}${eol}`
if (errorProperties.length > 0) {
const excludeProperties = LOGGER_KEYS.concat(messageKey, 'type', 'stack')
let propertiesToPrint
if (errorProperties[0] === '*') {
// Print all sibling properties except for the standard exclusions.
propertiesToPrint = Object.keys(log).filter(k => excludeProperties.includes(k) === false)
} else {
// Print only specified properties unless the property is a standard exclusion.
propertiesToPrint = errorProperties.filter(k => excludeProperties.includes(k) === false)
}
for (let i = 0; i < propertiesToPrint.length; i += 1) {
const key = propertiesToPrint[i]
if (key in log === false) continue
if (isObject(log[key])) {
// The nested object may have "logger" type keys but since they are not
// at the root level of the object being processed, we want to print them.
// Thus, we invoke with `excludeLoggerKeys: false`.
const prettifiedObject = prettifyObject({
log: log[key],
excludeLoggerKeys: false,
context: {
...context,
IDENT: ident + ident
}
})
result = `${result}${ident}${key}: {${eol}${prettifiedObject}${ident}}${eol}`
continue
}
result = `${result}${ident}${key}: ${log[key]}${eol}`
}
}
return result
}

View File

@@ -0,0 +1,110 @@
'use strict'
const tap = require('tap')
const prettifyErrorLog = require('./prettify-error-log')
const {
ERROR_LIKE_KEYS,
MESSAGE_KEY
} = require('../constants')
const context = {
EOL: '\n',
IDENT: ' ',
customPrettifiers: {},
errorLikeObjectKeys: ERROR_LIKE_KEYS,
errorProps: [],
messageKey: MESSAGE_KEY
}
tap.test('returns string with default settings', async t => {
const err = Error('Something went wrong')
const str = prettifyErrorLog({ log: err, context })
t.ok(str.startsWith(' Error: Something went wrong'))
})
tap.test('returns string with custom ident', async t => {
const err = Error('Something went wrong')
const str = prettifyErrorLog({
log: err,
context: {
...context,
IDENT: ' '
}
})
t.ok(str.startsWith(' Error: Something went wrong'))
})
tap.test('returns string with custom eol', async t => {
const err = Error('Something went wrong')
const str = prettifyErrorLog({
log: err,
context: {
...context,
EOL: '\r\n'
}
})
t.ok(str.startsWith(' Error: Something went wrong\r\n'))
})
tap.test('errorProperties', t => {
t.test('excludes all for wildcard', async t => {
const err = Error('boom')
err.foo = 'foo'
const str = prettifyErrorLog({
log: err,
context: {
...context,
errorProps: ['*']
}
})
t.ok(str.startsWith(' Error: boom'))
t.equal(str.includes('foo: "foo"'), false)
})
t.test('excludes only selected properties', async t => {
const err = Error('boom')
err.foo = 'foo'
const str = prettifyErrorLog({
log: err,
context: {
...context,
errorProps: ['foo']
}
})
t.ok(str.startsWith(' Error: boom'))
t.equal(str.includes('foo: foo'), true)
})
t.test('ignores specified properties if not present', async t => {
const err = Error('boom')
err.foo = 'foo'
const str = prettifyErrorLog({
log: err,
context: {
...context,
errorProps: ['foo', 'bar']
}
})
t.ok(str.startsWith(' Error: boom'))
t.equal(str.includes('foo: foo'), true)
t.equal(str.includes('bar'), false)
})
t.test('processes nested objects', async t => {
const err = Error('boom')
err.foo = { bar: 'bar', message: 'included' }
const str = prettifyErrorLog({
log: err,
context: {
...context,
errorProps: ['foo']
}
})
t.ok(str.startsWith(' Error: boom'))
t.equal(str.includes('foo: {'), true)
t.equal(str.includes('bar: "bar"'), true)
t.equal(str.includes('message: "included"'), true)
})
t.end()
})

View File

@@ -0,0 +1,49 @@
'use strict'
module.exports = prettifyError
const joinLinesWithIndentation = require('./join-lines-with-indentation')
/**
* @typedef {object} PrettifyErrorParams
* @property {string} keyName The key assigned to this error in the log object.
* @property {string} lines The STRINGIFIED error. If the error field has a
* custom prettifier, that should be pre-applied as well.
* @property {string} ident The indentation sequence to use.
* @property {string} eol The EOL sequence to use.
*/
/**
* Prettifies an error string into a multi-line format.
*
* @param {PrettifyErrorParams} input
*
* @returns {string}
*/
function prettifyError ({ keyName, lines, eol, ident }) {
let result = ''
const joinedLines = joinLinesWithIndentation({ input: lines, ident, eol })
const splitLines = `${ident}${keyName}: ${joinedLines}${eol}`.split(eol)
for (let j = 0; j < splitLines.length; j += 1) {
if (j !== 0) result += eol
const line = splitLines[j]
if (/^\s*"stack"/.test(line)) {
const matches = /^(\s*"stack":)\s*(".*"),?$/.exec(line)
/* istanbul ignore else */
if (matches && matches.length === 3) {
const indentSize = /^\s*/.exec(line)[0].length + 4
const indentation = ' '.repeat(indentSize)
const stackMessage = matches[2]
result += matches[1] + eol + indentation + JSON.parse(stackMessage).replace(/\n/g, eol + indentation)
} else {
result += line
}
} else {
result += line
}
}
return result
}

View File

@@ -0,0 +1,14 @@
'use strict'
const tap = require('tap')
const stringifySafe = require('fast-safe-stringify')
const prettifyError = require('./prettify-error')
tap.test('prettifies error', t => {
const error = Error('Bad error!')
const lines = stringifySafe(error, Object.getOwnPropertyNames(error), 2)
const prettyError = prettifyError({ keyName: 'errorKey', lines, ident: ' ', eol: '\n' })
t.match(prettyError, /\s*errorKey: {\n\s*"stack":[\s\S]*"message": "Bad error!"/)
t.end()
})

View File

@@ -0,0 +1,35 @@
'use strict'
module.exports = prettifyLevel
const getPropertyValue = require('./get-property-value')
/**
* @typedef {object} PrettifyLevelParams
* @property {object} log The log object.
* @property {PrettyContext} context The context object built from parsing
* the options.
*/
/**
* Checks if the passed in log has a `level` value and returns a prettified
* string for that level if so.
*
* @param {PrettifyLevelParams} input
*
* @returns {undefined|string} If `log` does not have a `level` property then
* `undefined` will be returned. Otherwise, a string from the specified
* `colorizer` is returned.
*/
function prettifyLevel ({ log, context }) {
const {
colorizer,
customLevels,
customLevelNames,
levelKey
} = context
const prettifier = context.customPrettifiers?.level
const output = getPropertyValue(log, levelKey)
if (output === undefined) return undefined
return prettifier ? prettifier(output) : colorizer(output, { customLevels, customLevelNames })
}

View File

@@ -0,0 +1,68 @@
'use strict'
const tap = require('tap')
const prettifyLevel = require('./prettify-level')
const getColorizer = require('../colors')
const {
LEVEL_KEY
} = require('../constants')
const context = {
colorizer: getColorizer(),
customLevelNames: undefined,
customLevels: undefined,
levelKey: LEVEL_KEY,
customPrettifiers: undefined
}
tap.test('returns `undefined` for unknown level', async t => {
const colorized = prettifyLevel({
log: {},
context: {
...context
}
})
t.equal(colorized, undefined)
})
tap.test('returns non-colorized value for default colorizer', async t => {
const log = {
level: 30
}
const colorized = prettifyLevel({
log,
context: {
...context
}
})
t.equal(colorized, 'INFO')
})
tap.test('returns colorized value for color colorizer', async t => {
const log = {
level: 30
}
const colorizer = getColorizer(true)
const colorized = prettifyLevel({
log,
context: {
...context,
colorizer
}
})
t.equal(colorized, '\u001B[32mINFO\u001B[39m')
})
tap.test('passes output through provided prettifier', async t => {
const log = {
level: 30
}
const colorized = prettifyLevel({
log,
context: {
...context,
customPrettifiers: { level () { return 'modified' } }
}
})
t.equal(colorized, 'modified')
})

View File

@@ -0,0 +1,63 @@
'use strict'
module.exports = prettifyMessage
const {
LEVELS
} = require('../constants')
const getPropertyValue = require('./get-property-value')
const interpretConditionals = require('./interpret-conditionals')
/**
* @typedef {object} PrettifyMessageParams
* @property {object} log The log object with the message to colorize.
* @property {PrettyContext} context The context object built from parsing
* the options.
*/
/**
* Prettifies a message string if the given `log` has a message property.
*
* @param {PrettifyMessageParams} input
*
* @returns {undefined|string} If the message key is not found, or the message
* key is not a string, then `undefined` will be returned. Otherwise, a string
* that is the prettified message.
*/
function prettifyMessage ({ log, context }) {
const {
colorizer,
customLevels,
levelKey,
levelLabel,
messageFormat,
messageKey,
useOnlyCustomProps
} = context
if (messageFormat && typeof messageFormat === 'string') {
const parsedMessageFormat = interpretConditionals(messageFormat, log)
const message = String(parsedMessageFormat).replace(
/{([^{}]+)}/g,
function (match, p1) {
// return log level as string instead of int
let level
if (p1 === levelLabel && (level = getPropertyValue(log, levelKey)) !== undefined) {
const condition = useOnlyCustomProps ? customLevels === undefined : customLevels[level] === undefined
return condition ? LEVELS[level] : customLevels[level]
}
// Parse nested key access, e.g. `{keyA.subKeyB}`.
return getPropertyValue(log, p1) || ''
})
return colorizer.message(message)
}
if (messageFormat && typeof messageFormat === 'function') {
const msg = messageFormat(log, messageKey, levelLabel)
return colorizer.message(msg)
}
if (messageKey in log === false) return undefined
if (typeof log[messageKey] !== 'string' && typeof log[messageKey] !== 'number' && typeof log[messageKey] !== 'boolean') return undefined
return colorizer.message(log[messageKey])
}

View File

@@ -0,0 +1,187 @@
'use strict'
const tap = require('tap')
const prettifyMessage = require('./prettify-message')
const getColorizer = require('../colors')
const {
LEVEL_KEY,
LEVEL_LABEL
} = require('../constants')
const context = {
colorizer: getColorizer(),
levelKey: LEVEL_KEY,
levelLabel: LEVEL_LABEL,
messageKey: 'msg'
}
tap.test('returns `undefined` if `messageKey` not found', async t => {
const str = prettifyMessage({ log: {}, context })
t.equal(str, undefined)
})
tap.test('returns `undefined` if `messageKey` not string', async t => {
const str = prettifyMessage({ log: { msg: {} }, context })
t.equal(str, undefined)
})
tap.test('returns non-colorized value for default colorizer', async t => {
const colorizer = getColorizer()
const str = prettifyMessage({
log: { msg: 'foo' },
context: { ...context, colorizer }
})
t.equal(str, 'foo')
})
tap.test('returns non-colorized value for alternate `messageKey`', async t => {
const str = prettifyMessage({
log: { message: 'foo' },
context: { ...context, messageKey: 'message' }
})
t.equal(str, 'foo')
})
tap.test('returns colorized value for color colorizer', async t => {
const colorizer = getColorizer(true)
const str = prettifyMessage({
log: { msg: 'foo' },
context: { ...context, colorizer }
})
t.equal(str, '\u001B[36mfoo\u001B[39m')
})
tap.test('returns colorized value for color colorizer for alternate `messageKey`', async t => {
const colorizer = getColorizer(true)
const str = prettifyMessage({
log: { message: 'foo' },
context: { ...context, messageKey: 'message', colorizer }
})
t.equal(str, '\u001B[36mfoo\u001B[39m')
})
tap.test('returns message formatted by `messageFormat` option', async t => {
const str = prettifyMessage({
log: { msg: 'foo', context: 'appModule' },
context: { ...context, messageFormat: '{context} - {msg}' }
})
t.equal(str, 'appModule - foo')
})
tap.test('returns message formatted by `messageFormat` option - missing prop', async t => {
const str = prettifyMessage({
log: { context: 'appModule' },
context: { ...context, messageFormat: '{context} - {msg}' }
})
t.equal(str, 'appModule - ')
})
tap.test('returns message formatted by `messageFormat` option - levelLabel & useOnlyCustomProps false', async t => {
const str = prettifyMessage({
log: { msg: 'foo', context: 'appModule', level: 30 },
context: {
...context,
messageFormat: '[{level}] {levelLabel} {context} - {msg}',
customLevels: {}
}
})
t.equal(str, '[30] INFO appModule - foo')
})
tap.test('returns message formatted by `messageFormat` option - levelLabel & useOnlyCustomProps true', async t => {
const str = prettifyMessage({
log: { msg: 'foo', context: 'appModule', level: 30 },
context: {
...context,
messageFormat: '[{level}] {levelLabel} {context} - {msg}',
customLevels: { 30: 'CHECK' },
useOnlyCustomProps: true
}
})
t.equal(str, '[30] CHECK appModule - foo')
})
tap.test('returns message formatted by `messageFormat` option - levelLabel & customLevels', async t => {
const str = prettifyMessage({
log: { msg: 'foo', context: 'appModule', level: 123 },
context: {
...context,
messageFormat: '[{level}] {levelLabel} {context} - {msg}',
customLevels: { 123: 'CUSTOM' }
}
})
t.equal(str, '[123] CUSTOM appModule - foo')
})
tap.test('returns message formatted by `messageFormat` option - levelLabel, customLevels & useOnlyCustomProps', async t => {
const str = prettifyMessage({
log: { msg: 'foo', context: 'appModule', level: 123 },
context: {
...context,
messageFormat: '[{level}] {levelLabel} {context} - {msg}',
customLevels: { 123: 'CUSTOM' },
useOnlyCustomProps: true
}
})
t.equal(str, '[123] CUSTOM appModule - foo')
})
tap.test('returns message formatted by `messageFormat` option - levelLabel, customLevels & useOnlyCustomProps false', async t => {
const str = prettifyMessage({
log: { msg: 'foo', context: 'appModule', level: 40 },
context: {
...context,
messageFormat: '[{level}] {levelLabel} {context} - {msg}',
customLevels: { 123: 'CUSTOM' },
useOnlyCustomProps: false
}
})
t.equal(str, '[40] WARN appModule - foo')
})
tap.test('`messageFormat` supports nested curly brackets', async t => {
const str = prettifyMessage({
log: { level: 30 },
context: {
...context,
messageFormat: '{{level}}-{level}-{{level}-{level}}'
}
})
t.equal(str, '{30}-30-{30-30}')
})
tap.test('`messageFormat` supports nested object', async t => {
const str = prettifyMessage({
log: { level: 30, request: { url: 'localhost/test' }, msg: 'foo' },
context: {
...context,
messageFormat: '{request.url} - param: {request.params.process} - {msg}'
}
})
t.equal(str, 'localhost/test - param: - foo')
})
tap.test('`messageFormat` supports conditional blocks', async t => {
const str = prettifyMessage({
log: { level: 30, req: { id: 'foo' } },
context: {
...context,
messageFormat: '{level} | {if req.id}({req.id}){end}{if msg}{msg}{end}'
}
})
t.equal(str, '30 | (foo)')
})
tap.test('`messageFormat` supports function definition', async t => {
const str = prettifyMessage({
log: { level: 30, request: { url: 'localhost/test' }, msg: 'incoming request' },
context: {
...context,
messageFormat: (log, messageKey, levelLabel) => {
let msg = log[messageKey]
if (msg === 'incoming request') msg = `--> ${log.request.url}`
return msg
}
}
})
t.equal(str, '--> localhost/test')
})

View File

@@ -0,0 +1,61 @@
'use strict'
module.exports = prettifyMetadata
/**
* @typedef {object} PrettifyMetadataParams
* @property {object} log The log that may or may not contain metadata to
* be prettified.
* @property {PrettyContext} context The context object built from parsing
* the options.
*/
/**
* Prettifies metadata that is usually present in a Pino log line. It looks for
* fields `name`, `pid`, `hostname`, and `caller` and returns a formatted string using
* the fields it finds.
*
* @param {PrettifyMetadataParams} input
*
* @returns {undefined|string} If no metadata is found then `undefined` is
* returned. Otherwise, a string of prettified metadata is returned.
*/
function prettifyMetadata ({ log, context }) {
const prettifiers = context.customPrettifiers
let line = ''
if (log.name || log.pid || log.hostname) {
line += '('
if (log.name) {
line += prettifiers.name ? prettifiers.name(log.name) : log.name
}
if (log.pid) {
const prettyPid = prettifiers.pid ? prettifiers.pid(log.pid) : log.pid
if (log.name && log.pid) {
line += '/' + prettyPid
} else {
line += prettyPid
}
}
if (log.hostname) {
// If `pid` and `name` were in the ignore keys list then we don't need
// the leading space.
line += `${line === '(' ? 'on' : ' on'} ${prettifiers.hostname ? prettifiers.hostname(log.hostname) : log.hostname}`
}
line += ')'
}
if (log.caller) {
line += `${line === '' ? '' : ' '}<${prettifiers.caller ? prettifiers.caller(log.caller) : log.caller}>`
}
if (line === '') {
return undefined
} else {
return line
}
}

View File

@@ -0,0 +1,111 @@
'use strict'
const tap = require('tap')
const prettifyMetadata = require('./prettify-metadata')
const context = {
customPrettifiers: {}
}
tap.test('returns `undefined` if no metadata present', async t => {
const str = prettifyMetadata({ log: {}, context })
t.equal(str, undefined)
})
tap.test('works with only `name` present', async t => {
const str = prettifyMetadata({ log: { name: 'foo' }, context })
t.equal(str, '(foo)')
})
tap.test('works with only `pid` present', async t => {
const str = prettifyMetadata({ log: { pid: '1234' }, context })
t.equal(str, '(1234)')
})
tap.test('works with only `hostname` present', async t => {
const str = prettifyMetadata({ log: { hostname: 'bar' }, context })
t.equal(str, '(on bar)')
})
tap.test('works with only `name` & `pid` present', async t => {
const str = prettifyMetadata({ log: { name: 'foo', pid: '1234' }, context })
t.equal(str, '(foo/1234)')
})
tap.test('works with only `name` & `hostname` present', async t => {
const str = prettifyMetadata({ log: { name: 'foo', hostname: 'bar' }, context })
t.equal(str, '(foo on bar)')
})
tap.test('works with only `pid` & `hostname` present', async t => {
const str = prettifyMetadata({ log: { pid: '1234', hostname: 'bar' }, context })
t.equal(str, '(1234 on bar)')
})
tap.test('works with only `name`, `pid`, & `hostname` present', async t => {
const str = prettifyMetadata({ log: { name: 'foo', pid: '1234', hostname: 'bar' }, context })
t.equal(str, '(foo/1234 on bar)')
})
tap.test('works with only `name` & `caller` present', async t => {
const str = prettifyMetadata({ log: { name: 'foo', caller: 'baz' }, context })
t.equal(str, '(foo) <baz>')
})
tap.test('works with only `pid` & `caller` present', async t => {
const str = prettifyMetadata({ log: { pid: '1234', caller: 'baz' }, context })
t.equal(str, '(1234) <baz>')
})
tap.test('works with only `hostname` & `caller` present', async t => {
const str = prettifyMetadata({ log: { hostname: 'bar', caller: 'baz' }, context })
t.equal(str, '(on bar) <baz>')
})
tap.test('works with only `name`, `pid`, & `caller` present', async t => {
const str = prettifyMetadata({ log: { name: 'foo', pid: '1234', caller: 'baz' }, context })
t.equal(str, '(foo/1234) <baz>')
})
tap.test('works with only `name`, `hostname`, & `caller` present', async t => {
const str = prettifyMetadata({ log: { name: 'foo', hostname: 'bar', caller: 'baz' }, context })
t.equal(str, '(foo on bar) <baz>')
})
tap.test('works with only `caller` present', async t => {
const str = prettifyMetadata({ log: { caller: 'baz' }, context })
t.equal(str, '<baz>')
})
tap.test('works with only `pid`, `hostname`, & `caller` present', async t => {
const str = prettifyMetadata({ log: { pid: '1234', hostname: 'bar', caller: 'baz' }, context })
t.equal(str, '(1234 on bar) <baz>')
})
tap.test('works with all four present', async t => {
const str = prettifyMetadata({ log: { name: 'foo', pid: '1234', hostname: 'bar', caller: 'baz' }, context })
t.equal(str, '(foo/1234 on bar) <baz>')
})
tap.test('uses prettifiers from passed prettifiers object', async t => {
const prettifiers = {
name (input) {
return input.toUpperCase()
},
pid (input) {
return input + '__'
},
hostname (input) {
return input.toUpperCase()
},
caller (input) {
return input.toUpperCase()
}
}
const str = prettifyMetadata({
log: { pid: '1234', hostname: 'bar', caller: 'baz', name: 'joe' },
context: {
customPrettifiers: prettifiers
}
})
t.equal(str, '(JOE/1234__ on BAR) <BAZ>')
})

View File

@@ -0,0 +1,111 @@
'use strict'
module.exports = prettifyObject
const {
LOGGER_KEYS
} = require('../constants')
const stringifySafe = require('fast-safe-stringify')
const joinLinesWithIndentation = require('./join-lines-with-indentation')
const prettifyError = require('./prettify-error')
/**
* @typedef {object} PrettifyObjectParams
* @property {object} log The object to prettify.
* @property {boolean} [excludeLoggerKeys] Indicates if known logger specific
* keys should be excluded from prettification. Default: `true`.
* @property {string[]} [skipKeys] A set of object keys to exclude from the
* * prettified result. Default: `[]`.
* @property {PrettyContext} context The context object built from parsing
* the options.
*/
/**
* Prettifies a standard object. Special care is taken when processing the object
* to handle child objects that are attached to keys known to contain error
* objects.
*
* @param {PrettifyObjectParams} input
*
* @returns {string} The prettified string. This can be as little as `''` if
* there was nothing to prettify.
*/
function prettifyObject ({
log,
excludeLoggerKeys = true,
skipKeys = [],
context
}) {
const {
EOL: eol,
IDENT: ident,
customPrettifiers,
errorLikeObjectKeys: errorLikeKeys,
objectColorizer,
singleLine
} = context
const keysToIgnore = [].concat(skipKeys)
/* istanbul ignore else */
if (excludeLoggerKeys === true) Array.prototype.push.apply(keysToIgnore, LOGGER_KEYS)
let result = ''
// Split object keys into two categories: error and non-error
const { plain, errors } = Object.entries(log).reduce(({ plain, errors }, [k, v]) => {
if (keysToIgnore.includes(k) === false) {
// Pre-apply custom prettifiers, because all 3 cases below will need this
const pretty = typeof customPrettifiers[k] === 'function'
? customPrettifiers[k](v, k, log)
: v
if (errorLikeKeys.includes(k)) {
errors[k] = pretty
} else {
plain[k] = pretty
}
}
return { plain, errors }
}, { plain: {}, errors: {} })
if (singleLine) {
// Stringify the entire object as a single JSON line
/* istanbul ignore else */
if (Object.keys(plain).length > 0) {
result += objectColorizer.greyMessage(stringifySafe(plain))
}
result += eol
// Avoid printing the escape character on escaped backslashes.
result = result.replace(/\\\\/gi, '\\')
} else {
// Put each object entry on its own line
Object.entries(plain).forEach(([keyName, keyValue]) => {
// custom prettifiers are already applied above, so we can skip it now
let lines = typeof customPrettifiers[keyName] === 'function'
? keyValue
: stringifySafe(keyValue, null, 2)
if (lines === undefined) return
// Avoid printing the escape character on escaped backslashes.
lines = lines.replace(/\\\\/gi, '\\')
const joinedLines = joinLinesWithIndentation({ input: lines, ident, eol })
result += `${ident}${keyName}:${joinedLines.startsWith(eol) ? '' : ' '}${joinedLines}${eol}`
})
}
// Errors
Object.entries(errors).forEach(([keyName, keyValue]) => {
// custom prettifiers are already applied above, so we can skip it now
const lines = typeof customPrettifiers[keyName] === 'function'
? keyValue
: stringifySafe(keyValue, null, 2)
if (lines === undefined) return
result += prettifyError({ keyName, lines, eol, ident })
})
return result
}

View File

@@ -0,0 +1,152 @@
'use strict'
const tap = require('tap')
const colors = require('../colors')
const prettifyObject = require('./prettify-object')
const {
ERROR_LIKE_KEYS
} = require('../constants')
const context = {
EOL: '\n',
IDENT: ' ',
customPrettifiers: {},
errorLikeObjectKeys: ERROR_LIKE_KEYS,
objectColorizer: colors(),
singleLine: false
}
tap.test('returns empty string if no properties present', async t => {
const str = prettifyObject({ log: {}, context })
t.equal(str, '')
})
tap.test('works with single level properties', async t => {
const str = prettifyObject({ log: { foo: 'bar' }, context })
t.equal(str, ' foo: "bar"\n')
})
tap.test('works with multiple level properties', async t => {
const str = prettifyObject({ log: { foo: { bar: 'baz' } }, context })
t.equal(str, ' foo: {\n "bar": "baz"\n }\n')
})
tap.test('skips specified keys', async t => {
const str = prettifyObject({
log: { foo: 'bar', hello: 'world' },
skipKeys: ['foo'],
context
})
t.equal(str, ' hello: "world"\n')
})
tap.test('ignores predefined keys', async t => {
const str = prettifyObject({ log: { foo: 'bar', pid: 12345 }, context })
t.equal(str, ' foo: "bar"\n')
})
tap.test('ignores escaped backslashes in string values', async t => {
const str = prettifyObject({ log: { foo_regexp: '\\[^\\w\\s]\\' }, context })
t.equal(str, ' foo_regexp: "\\[^\\w\\s]\\"\n')
})
tap.test('ignores escaped backslashes in string values (singleLine option)', async t => {
const str = prettifyObject({
log: { foo_regexp: '\\[^\\w\\s]\\' },
context: {
...context,
singleLine: true
}
})
t.equal(str, '{"foo_regexp":"\\[^\\w\\s]\\"}\n')
})
tap.test('works with error props', async t => {
const err = Error('Something went wrong')
const serializedError = {
message: err.message,
stack: err.stack
}
const str = prettifyObject({ log: { error: serializedError }, context })
t.ok(str.startsWith(' error:'))
t.ok(str.includes(' "message": "Something went wrong",'))
t.ok(str.includes(' Error: Something went wrong'))
})
tap.test('customPrettifiers gets applied', async t => {
const customPrettifiers = {
foo: v => v.toUpperCase()
}
const str = prettifyObject({
log: { foo: 'foo' },
context: {
...context,
customPrettifiers
}
})
t.equal(str.startsWith(' foo: FOO'), true)
})
tap.test('skips lines omitted by customPrettifiers', async t => {
const customPrettifiers = {
foo: () => { return undefined }
}
const str = prettifyObject({
log: { foo: 'foo', bar: 'bar' },
context: {
...context,
customPrettifiers
}
})
t.equal(str.includes('bar: "bar"'), true)
t.equal(str.includes('foo: "foo"'), false)
})
tap.test('joined lines omits starting eol', async t => {
const str = prettifyObject({
log: { msg: 'doing work', calls: ['step 1', 'step 2', 'step 3'], level: 30 },
context: {
...context,
IDENT: '',
customPrettifiers: {
calls: val => '\n' + val.map(it => ' ' + it).join('\n')
}
}
})
t.equal(str, [
'msg: "doing work"',
'calls:',
' step 1',
' step 2',
' step 3',
''
].join('\n'))
})
tap.test('errors skips prettifiers', async t => {
const customPrettifiers = {
err: () => { return 'is_err' }
}
const str = prettifyObject({
log: { err: Error('boom') },
context: {
...context,
customPrettifiers
}
})
t.equal(str.includes('err: is_err'), true)
})
tap.test('errors skips prettifying if no lines are present', async t => {
const customPrettifiers = {
err: () => { return undefined }
}
const str = prettifyObject({
log: { err: Error('boom') },
context: {
...context,
customPrettifiers
}
})
t.equal(str, '')
})

View File

@@ -0,0 +1,42 @@
'use strict'
module.exports = prettifyTime
const formatTime = require('./format-time')
/**
* @typedef {object} PrettifyTimeParams
* @property {object} log The log object with the timestamp to be prettified.
* @property {PrettyContext} context The context object built from parsing
* the options.
*/
/**
* Prettifies a timestamp if the given `log` has either `time`, `timestamp` or custom specified timestamp
* property.
*
* @param {PrettifyTimeParams} input
*
* @returns {undefined|string} If a timestamp property cannot be found then
* `undefined` is returned. Otherwise, the prettified time is returned as a
* string.
*/
function prettifyTime ({ log, context }) {
const {
timestampKey,
translateTime: translateFormat
} = context
const prettifier = context.customPrettifiers?.time
let time = null
if (timestampKey in log) {
time = log[timestampKey]
} else if ('timestamp' in log) {
time = log.timestamp
}
if (time === null) return undefined
const output = translateFormat ? formatTime(time, translateFormat) : time
return prettifier ? prettifier(output) : `[${output}]`
}

View File

@@ -0,0 +1,227 @@
'use strict'
process.env.TZ = 'UTC'
const tap = require('tap')
const prettifyTime = require('./prettify-time')
const {
TIMESTAMP_KEY
} = require('../constants')
const context = {
timestampKey: TIMESTAMP_KEY,
translateTime: true,
customPrettifiers: {}
}
tap.test('returns `undefined` if `time` or `timestamp` not in log', async t => {
const str = prettifyTime({ log: {}, context })
t.equal(str, undefined)
})
tap.test('returns prettified formatted time from custom field', async t => {
const log = { customtime: 1554642900000 }
let str = prettifyTime({
log,
context: {
...context,
timestampKey: 'customtime'
}
})
t.equal(str, '[13:15:00.000]')
str = prettifyTime({
log,
context: {
...context,
translateTime: false,
timestampKey: 'customtime'
}
})
t.equal(str, '[1554642900000]')
})
tap.test('returns prettified formatted time', async t => {
let log = { time: 1554642900000 }
let str = prettifyTime({
log,
context: {
...context
}
})
t.equal(str, '[13:15:00.000]')
log = { timestamp: 1554642900000 }
str = prettifyTime({
log,
context: {
...context
}
})
t.equal(str, '[13:15:00.000]')
log = { time: '2019-04-07T09:15:00.000-04:00' }
str = prettifyTime({
log,
context: {
...context
}
})
t.equal(str, '[13:15:00.000]')
log = { timestamp: '2019-04-07T09:15:00.000-04:00' }
str = prettifyTime({
log,
context: {
...context
}
})
t.equal(str, '[13:15:00.000]')
log = { time: 1554642900000 }
str = prettifyTime({
log,
context: {
...context,
translateTime: 'd mmm yyyy H:MM'
}
})
t.equal(str, '[7 Apr 2019 13:15]')
log = { timestamp: 1554642900000 }
str = prettifyTime({
log,
context: {
...context,
translateTime: 'd mmm yyyy H:MM'
}
})
t.equal(str, '[7 Apr 2019 13:15]')
log = { time: '2019-04-07T09:15:00.000-04:00' }
str = prettifyTime({
log,
context: {
...context,
translateTime: 'd mmm yyyy H:MM'
}
})
t.equal(str, '[7 Apr 2019 13:15]')
log = { timestamp: '2019-04-07T09:15:00.000-04:00' }
str = prettifyTime({
log,
context: {
...context,
translateTime: 'd mmm yyyy H:MM'
}
})
t.equal(str, '[7 Apr 2019 13:15]')
})
tap.test('passes through value', async t => {
let log = { time: 1554642900000 }
let str = prettifyTime({
log,
context: {
...context,
translateTime: undefined
}
})
t.equal(str, '[1554642900000]')
log = { timestamp: 1554642900000 }
str = prettifyTime({
log,
context: {
...context,
translateTime: undefined
}
})
t.equal(str, '[1554642900000]')
log = { time: '2019-04-07T09:15:00.000-04:00' }
str = prettifyTime({
log,
context: {
...context,
translateTime: undefined
}
})
t.equal(str, '[2019-04-07T09:15:00.000-04:00]')
log = { timestamp: '2019-04-07T09:15:00.000-04:00' }
str = prettifyTime({
log,
context: {
...context,
translateTime: undefined
}
})
t.equal(str, '[2019-04-07T09:15:00.000-04:00]')
})
tap.test('handles the 0 timestamp', async t => {
let log = { time: 0 }
let str = prettifyTime({
log,
context: {
...context,
translateTime: undefined
}
})
t.equal(str, '[0]')
log = { timestamp: 0 }
str = prettifyTime({
log,
context: {
...context,
translateTime: undefined
}
})
t.equal(str, '[0]')
})
tap.test('works with epoch as a number or string', (t) => {
t.plan(3)
const epoch = 1522431328992
const asNumber = prettifyTime({
log: { time: epoch, msg: 'foo' },
context: {
...context,
translateTime: true
}
})
const asString = prettifyTime({
log: { time: `${epoch}`, msg: 'foo' },
context: {
...context,
translateTime: true
}
})
const invalid = prettifyTime({
log: { time: '2 days ago', msg: 'foo' },
context: {
...context,
translateTime: true
}
})
t.same(asString, '[17:35:28.992]')
t.same(asNumber, '[17:35:28.992]')
t.same(invalid, '[2 days ago]')
})
tap.test('uses custom prettifier', async t => {
const str = prettifyTime({
log: { time: 0 },
context: {
...context,
customPrettifiers: {
time () {
return 'done'
}
}
}
})
t.equal(str, 'done')
})

View File

@@ -0,0 +1,49 @@
'use strict'
module.exports = splitPropertyKey
/**
* Splits the property key delimited by a dot character but not when it is preceded
* by a backslash.
*
* @param {string} key A string identifying the property.
*
* @returns {string[]} Returns a list of string containing each delimited property.
* e.g. `'prop2\.domain\.corp.prop2'` should return [ 'prop2.domain.com', 'prop2' ]
*/
function splitPropertyKey (key) {
const result = []
let backslash = false
let segment = ''
for (let i = 0; i < key.length; i++) {
const c = key.charAt(i)
if (c === '\\') {
backslash = true
continue
}
if (backslash) {
backslash = false
segment += c
continue
}
/* Non-escaped dot, push to result */
if (c === '.') {
result.push(segment)
segment = ''
continue
}
segment += c
}
/* Push last entry to result */
if (segment.length) {
result.push(segment)
}
return result
}

View File

@@ -0,0 +1,29 @@
'use strict'
const tap = require('tap')
const splitPropertyKey = require('./split-property-key')
tap.test('splitPropertyKey does not change key', async t => {
const result = splitPropertyKey('data1')
t.same(result, ['data1'])
})
tap.test('splitPropertyKey splits nested key', async t => {
const result = splitPropertyKey('data1.data2.data-3')
t.same(result, ['data1', 'data2', 'data-3'])
})
tap.test('splitPropertyKey splits nested keys ending with a dot', async t => {
const result = splitPropertyKey('data1.data2.data-3.')
t.same(result, ['data1', 'data2', 'data-3'])
})
tap.test('splitPropertyKey splits nested escaped key', async t => {
const result = splitPropertyKey('logging\\.domain\\.corp/operation.foo.bar-2')
t.same(result, ['logging.domain.corp/operation', 'foo', 'bar-2'])
})
tap.test('splitPropertyKey splits nested escaped key with special characters', async t => {
const result = splitPropertyKey('logging\\.domain\\.corp/operation.!\t@#$%^&*()_+=-<>.bar\\.2')
t.same(result, ['logging.domain.corp/operation', '!\t@#$%^&*()_+=-<>', 'bar.2'])
})

View File

@@ -0,0 +1,13 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"
open-pull-requests-limit: 10
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10

View File

@@ -0,0 +1,99 @@
name: CI
on:
push:
paths-ignore:
- 'docs/**'
- '*.md'
pull_request:
paths-ignore:
- 'docs/**'
- '*.md'
# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
cancel-in-progress: true
jobs:
dependency-review:
name: Dependency Review
if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Check out repo
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Dependency review
uses: actions/dependency-review-action@v3
test:
name: Test
runs-on: ${{ matrix.os }}
permissions:
contents: read
strategy:
matrix:
node-version: [14, 16, 18, 20]
os: [macos-latest, ubuntu-latest, windows-latest]
exclude:
- node-version: 14
os: windows-latest
steps:
- name: Check out repo
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Node ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
- name: Restore cached dependencies
uses: actions/cache@v4
with:
path: node_modules
key: node-modules-${{ hashFiles('package.json') }}
- name: Install dependencies
run: npm i --ignore-scripts
- name: Run Tests
run: npm run test-ci
- name: Coveralls Parallel
uses: coverallsapp/github-action@v2.1.2
with:
github-token: ${{ secrets.github_token }}
parallel: true
flag-name: run-${{ matrix.node-version }}-${{ matrix.os }}
coverage:
needs: test
runs-on: ubuntu-latest
steps:
- name: Coveralls Finished
uses: coverallsapp/github-action@v2.1.2
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
parallel-finished: true
automerge:
name: Automerge Dependabot PRs
if: >
github.event_name == 'pull_request' &&
github.event.pull_request.user.login == 'dependabot[bot]'
needs: test
permissions:
pull-requests: write
contents: write
runs-on: ubuntu-latest
steps:
- uses: fastify/github-action-merge-dependabot@v3
with:
github-token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npm test

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 pino
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,172 @@
# pino-abstract-transport
[![npm version](https://img.shields.io/npm/v/pino-abstract-transport)](https://www.npmjs.com/package/pino-abstract-transport)
[![Build Status](https://img.shields.io/github/actions/workflow/status/pinojs/pino-abstract-transport/ci.yml?branch=main)](https://github.com/pinojs/pino-abstract-transport/actions)
[![Coverage Status](https://coveralls.io/repos/github/pinojs/pino-abstract-transport/badge.svg?branch=main)](https://coveralls.io/github/pinojs/pino-abstract-transport?branch=main)
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/)
Write Pino transports easily.
## Install
```sh
npm i pino-abstract-transport
```
## Usage
```js
import build from 'pino-abstract-transport'
export default async function (opts) {
return build(async function (source) {
for await (let obj of source) {
console.log(obj)
}
})
}
```
or in CommonJS and streams:
```js
'use strict'
const build = require('pino-abstract-transport')
module.exports = function (opts) {
return build(function (source) {
source.on('data', function (obj) {
console.log(obj)
})
})
}
```
## Typescript usage
Install the type definitions for node. Make sure the major version of the type definitions matches the node version you are using.
#### Node 16
```sh
npm i -D @types/node@16
```
## API
### build(fn, opts) => Stream
Create a [`split2`](http://npm.im/split2) instance and returns it.
This same instance is also passed to the given function, which is called
synchronously.
If `opts.transform` is `true`, `pino-abstract-transform` will
wrap the split2 instance and the returned stream using [`duplexify`](https://www.npmjs.com/package/duplexify),
so they can be concatenated into multiple transports.
#### Events emitted
In addition to all events emitted by a [`Readable`](https://nodejs.org/api/stream.html#stream_class_stream_readable)
stream, it emits the following events:
* `unknown` where an unparsable line is found, both the line and optional error is emitted.
#### Options
* `parse` an option to change to data format passed to build function. When this option is set to `lines`,
the data is passed as a string, otherwise the data is passed as an object. Default: `undefined`.
* `close(err, cb)` a function that is called to shutdown the transport. It's called both on error and non-error shutdowns.
It can also return a promise. In this case discard the the `cb` argument.
* `parseLine(line)` a function that is used to parse line received from `pino`.
* `expectPinoConfig` a boolean that indicates if the transport expects Pino to add some of its configuration to the stream. Default: `false`.
## Example
### custom parseLine
You can allow custom `parseLine` from users while providing a simple and safe default parseLine.
```js
'use strict'
const build = require('pino-abstract-transport')
function defaultParseLine (line) {
const obj = JSON.parse(line)
// property foo will be added on each line
obj.foo = 'bar'
return obj
}
module.exports = function (opts) {
const parseLine = typeof opts.parseLine === 'function' ? opts.parseLine : defaultParseLine
return build(function (source) {
source.on('data', function (obj) {
console.log(obj)
})
}, {
parseLine: parseLine
})
}
```
### Stream concatenation / pipeline
You can pipeline multiple transports:
```js
const build = require('pino-abstract-transport')
const { Transform, pipeline } = require('stream')
function buildTransform () {
return build(function (source) {
return new Transform({
objectMode: true,
autoDestroy: true,
transform (line, enc, cb) {
line.service = 'bob'
cb(null, JSON.stringify(line))
}
})
}, { enablePipelining: true })
}
function buildDestination () {
return build(function (source) {
source.on('data', function (obj) {
console.log(obj)
})
})
}
pipeline(process.stdin, buildTransform(), buildDestination(), function (err) {
console.log('pipeline completed!', err)
})
```
### Using pino config
Setting `expectPinoConfig` to `true` will make the transport wait for pino to send its configuration before starting to process logs. It will add `levels`, `messageKey` and `errorKey` to the stream.
When used with an incompatible version of pino, the stream will immediately error.
```js
import build from 'pino-abstract-transport'
export default function (opts) {
return build(async function (source) {
for await (const obj of source) {
console.log(`[${source.levels.labels[obj.level]}]: ${obj[source.messageKey]}`)
}
}, {
expectPinoConfig: true
})
}
```
## License
MIT

View File

@@ -0,0 +1,122 @@
// Type definitions for pino-abstract-transport 0.4.0
// Project: https://github.com/pinojs/pino-abstract-transport#readme
// Definitions by: Diyar Oktay <https://github.com/windupbird144>
/// <reference types="node" />
import { Transform } from "stream";
type BuildOptions = {
/**
* `parseLine(line)` a function that is used to parse line received from pino.
* @default JSON.parse
*/
parseLine?: (line: string) => unknown;
/**
* `parse` an option to change to data format passed to build function.
* @default undefined
*
*/
parse?: "lines";
/**
* `close(err, cb)` a function that is called to shutdown the transport.
* It's called both on error and non-error shutdowns. It can also return
* a promise. In this case discard the the cb argument.
*
* @example
* ```typescript
* {
* close: function (err, cb) {
* process.nextTick(cb, err)
* }
* }
* ```
* */
close?: (err: Error, cb: Function) => void | Promise<void>;
/**
* `metadata` If set to false, do not add metadata properties to the returned stream
*/
metadata?: false;
/**
* `expectPinoConfig` If set to true, the transport will wait for pino to send its
* configuration before starting to process logs.
*/
expectPinoConfig?: boolean;
};
/**
* Pass these options to wrap the split2 stream and
* the returned stream into a Duplex
*/
type EnablePipelining = BuildOptions & {
enablePipelining: true;
};
/**
* Create a split2 instance and returns it. This same instance is also passed
* to the given function, which is called after pino has sent its configuration.
*
* @returns {Promise<Transform>} the split2 instance
*/
declare function build(
fn: (transform: Transform & build.OnUnknown) => void | Promise<void>,
opts: BuildOptions & { expectPinoConfig: true }
): Promise<Transform & build.OnUnknown>;
/**
* Create a split2 instance and returns it. This same instance is also passed
* to the given function, which is called synchronously.
*
* @returns {Transform} the split2 instance
*/
declare function build(
fn: (transform: Transform & build.OnUnknown) => void | Promise<void>,
opts?: BuildOptions
): Transform & build.OnUnknown;
/**
* Creates a split2 instance and passes it to the given function, which is called
* after pino has sent its configuration. Then wraps the split2 instance and
* the returned stream into a Duplex, so they can be concatenated into multiple
* transports.
*
* @returns {Promise<Transform>} the wrapped split2 instance
*/
declare function build(
fn: (transform: Transform & build.OnUnknown) => Transform & build.OnUnknown,
opts: EnablePipelining & { expectPinoConfig: true }
): Promise<Transform>;
/**
* Creates a split2 instance and passes it to the given function, which is called
* synchronously. Then wraps the split2 instance and the returned stream into a
* Duplex, so they can be concatenated into multiple transports.
*
* @returns {Transform} the wrapped split2 instance
*/
declare function build(
fn: (transform: Transform & build.OnUnknown) => Transform & build.OnUnknown,
opts: EnablePipelining
): Transform;
declare namespace build {
export interface OnUnknown {
/**
* `unknown` is the event emitted where an unparsable line is found
*
* @param event 'unknown'
* @param line the unparsable line
* @param error the error that was thrown when parsing the line
*/
on(
event: "unknown",
listener: (line: string, error: unknown) => void
): void;
}
}
export = build;

View File

@@ -0,0 +1,128 @@
'use strict'
const metadata = Symbol.for('pino.metadata')
const split = require('split2')
const { Duplex } = require('readable-stream')
const { parentPort, workerData } = require('worker_threads')
function createDeferred () {
let resolve
let reject
const promise = new Promise((_resolve, _reject) => {
resolve = _resolve
reject = _reject
})
promise.resolve = resolve
promise.reject = reject
return promise
}
module.exports = function build (fn, opts = {}) {
const waitForConfig = opts.expectPinoConfig === true && workerData?.workerData?.pinoWillSendConfig === true
const parseLines = opts.parse === 'lines'
const parseLine = typeof opts.parseLine === 'function' ? opts.parseLine : JSON.parse
const close = opts.close || defaultClose
const stream = split(function (line) {
let value
try {
value = parseLine(line)
} catch (error) {
this.emit('unknown', line, error)
return
}
if (value === null) {
this.emit('unknown', line, 'Null value ignored')
return
}
if (typeof value !== 'object') {
value = {
data: value,
time: Date.now()
}
}
if (stream[metadata]) {
stream.lastTime = value.time
stream.lastLevel = value.level
stream.lastObj = value
}
if (parseLines) {
return line
}
return value
}, { autoDestroy: true })
stream._destroy = function (err, cb) {
const promise = close(err, cb)
if (promise && typeof promise.then === 'function') {
promise.then(cb, cb)
}
}
if (opts.expectPinoConfig === true && workerData?.workerData?.pinoWillSendConfig !== true) {
setImmediate(() => {
stream.emit('error', new Error('This transport is not compatible with the current version of pino. Please upgrade pino to the latest version.'))
})
}
if (opts.metadata !== false) {
stream[metadata] = true
stream.lastTime = 0
stream.lastLevel = 0
stream.lastObj = null
}
if (waitForConfig) {
let pinoConfig = {}
const configReceived = createDeferred()
parentPort.on('message', function handleMessage (message) {
if (message.code === 'PINO_CONFIG') {
pinoConfig = message.config
configReceived.resolve()
parentPort.off('message', handleMessage)
}
})
Object.defineProperties(stream, {
levels: {
get () { return pinoConfig.levels }
},
messageKey: {
get () { return pinoConfig.messageKey }
},
errorKey: {
get () { return pinoConfig.errorKey }
}
})
return configReceived.then(finish)
}
return finish()
function finish () {
let res = fn(stream)
if (res && typeof res.catch === 'function') {
res.catch((err) => {
stream.destroy(err)
})
// set it to null to not retain a reference to the promise
res = null
} else if (opts.enablePipelining && res) {
return Duplex.from({ writable: stream, readable: res })
}
return stream
}
}
function defaultClose (err, cb) {
process.nextTick(cb, err)
}

View File

@@ -0,0 +1,41 @@
{
"name": "pino-abstract-transport",
"version": "1.2.0",
"description": "Write Pino transports easily",
"main": "index.js",
"scripts": {
"prepare": "husky install",
"test": "standard | snazzy && tap test/*.test.js && tsd",
"test-ci": "standard | snazzy && tap test/*.test.js --coverage-report=lcovonly && tsd"
},
"repository": {
"type": "git",
"url": "git+https://github.com/pinojs/pino-abstract-transport.git"
},
"keywords": [
"pino",
"transport"
],
"author": "Matteo Collina <hello@matteocollina.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/pinojs/pino-abstract-transport/issues"
},
"homepage": "https://github.com/pinojs/pino-abstract-transport#readme",
"dependencies": {
"readable-stream": "^4.0.0",
"split2": "^4.0.0"
},
"devDependencies": {
"@types/node": "^20.1.0",
"husky": "^9.0.6",
"snazzy": "^9.0.0",
"standard": "^17.0.0",
"tap": "^16.0.0",
"thread-stream": "^2.4.1",
"tsd": "^0.31.0"
},
"tsd": {
"directory": "./test/types"
}
}

View File

@@ -0,0 +1,445 @@
'use strict'
const { once } = require('events')
const { Transform, pipeline } = require('stream')
const { test } = require('tap')
const build = require('../')
test('parse newlined delimited JSON', ({ same, plan }) => {
plan(2)
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const stream = build(function (source) {
source.on('data', function (line) {
same(expected.shift(), line)
})
})
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.end()
})
test('parse newlined delimited JSON', ({ same, plan }) => {
plan(2)
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const stream = build(function (source) {
source.on('data', function (line) {
same(expected.shift(), line)
})
}, { parse: 'json' })
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.end()
})
test('null support', ({ same, plan }) => {
plan(1)
const stream = build(function (source) {
source.on('unknown', function (line) {
same('null', line)
})
})
stream.write('null\n')
stream.end()
})
test('broken json', ({ match, same, plan }) => {
plan(2)
const expected = '{ "truncated'
const stream = build(function (source) {
source.on('unknown', function (line, error) {
same(expected, line)
const regex = /^(Unexpected end of JSON input|Unterminated string in JSON at position 12)$/
match(error.message, regex)
})
})
stream.write(expected + '\n')
stream.end()
})
test('pure values', ({ same, ok, plan }) => {
plan(3)
const stream = build(function (source) {
source.on('data', function (line) {
same(line.data, 42)
ok(line.time)
same(new Date(line.time).getTime(), line.time)
})
})
stream.write('42\n')
stream.end()
})
test('support async iteration', ({ same, plan }) => {
plan(2)
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const stream = build(async function (source) {
for await (const line of source) {
same(expected.shift(), line)
}
})
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.end()
})
test('rejecting errors the stream', async ({ same, plan }) => {
const stream = build(async function (source) {
throw new Error('kaboom')
})
const [err] = await once(stream, 'error')
same(err.message, 'kaboom')
})
test('emits an error if the transport expects pino to send the config, but pino is not going to', async function ({ plan, same }) {
plan(1)
const stream = build(() => {}, { expectPinoConfig: true })
const [err] = await once(stream, 'error')
same(err.message, 'This transport is not compatible with the current version of pino. Please upgrade pino to the latest version.')
})
test('set metadata', ({ same, plan, equal }) => {
plan(9)
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const stream = build(function (source) {
source.on('data', function (line) {
const obj = expected.shift()
same(this.lastLevel, obj.level)
same(this.lastTime, obj.time)
same(this.lastObj, obj)
same(obj, line)
})
}, { metadata: true })
equal(stream[Symbol.for('pino.metadata')], true)
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.end()
})
test('parse lines', ({ same, plan, equal }) => {
plan(9)
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const stream = build(function (source) {
source.on('data', function (line) {
const obj = expected.shift()
same(this.lastLevel, obj.level)
same(this.lastTime, obj.time)
same(this.lastObj, obj)
same(JSON.stringify(obj), line)
})
}, { metadata: true, parse: 'lines' })
equal(stream[Symbol.for('pino.metadata')], true)
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.end()
})
test('custom parse line function', ({ same, plan, equal }) => {
plan(11)
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
let num = 0
function parseLine (str) {
const obj = JSON.parse(str)
same(expected[num], obj)
return obj
}
const stream = build(function (source) {
source.on('data', function (line) {
const obj = expected[num]
same(this.lastLevel, obj.level)
same(this.lastTime, obj.time)
same(this.lastObj, obj)
same(obj, line)
num++
})
}, { metadata: true, parseLine })
equal(stream[Symbol.for('pino.metadata')], true)
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.end()
})
test('set metadata (default)', ({ same, plan, equal }) => {
plan(9)
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const stream = build(function (source) {
source.on('data', function (line) {
const obj = expected.shift()
same(this.lastLevel, obj.level)
same(this.lastTime, obj.time)
same(this.lastObj, obj)
same(obj, line)
})
})
equal(stream[Symbol.for('pino.metadata')], true)
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.end()
})
test('do not set metadata', ({ same, plan, equal }) => {
plan(9)
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const stream = build(function (source) {
source.on('data', function (line) {
const obj = expected.shift()
same(this.lastLevel, undefined)
same(this.lastTime, undefined)
same(this.lastObj, undefined)
same(obj, line)
})
}, { metadata: false })
equal(stream[Symbol.for('pino.metadata')], undefined)
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.end()
})
test('close logic', ({ same, plan, pass }) => {
plan(3)
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const stream = build(function (source) {
source.on('data', function (line) {
same(expected.shift(), line)
})
}, {
close (err, cb) {
pass('close called')
process.nextTick(cb, err)
}
})
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.end()
})
test('close with promises', ({ same, plan, pass }) => {
plan(3)
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const stream = build(function (source) {
source.on('data', function (line) {
same(expected.shift(), line)
})
}, {
async close () {
pass('close called')
}
})
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.end()
})
test('support Transform streams', ({ same, plan, error }) => {
plan(7)
const expected1 = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const expected2 = []
const stream1 = build(function (source) {
const transform = new Transform({
objectMode: true,
autoDestroy: true,
transform (chunk, enc, cb) {
same(expected1.shift(), chunk)
chunk.service = 'from transform'
expected2.push(chunk)
cb(null, JSON.stringify(chunk) + '\n')
}
})
pipeline(source, transform, () => {})
return transform
}, { enablePipelining: true })
const stream2 = build(function (source) {
source.on('data', function (line) {
same(expected2.shift(), line)
})
})
pipeline(stream1, stream2, function (err) {
error(err)
same(expected1, [])
same(expected2, [])
})
const lines = expected1.map(JSON.stringify).join('\n')
stream1.write(lines)
stream1.end()
})

View File

@@ -0,0 +1,22 @@
'use strict'
const build = require('../..')
module.exports = async function (threadStreamOpts) {
const { port, opts = {} } = threadStreamOpts
return build(
async function (source) {
for await (const obj of source) {
port.postMessage({
data: obj,
pinoConfig: {
levels: source.levels,
messageKey: source.messageKey,
errorKey: source.errorKey
}
})
}
},
opts
)
}

View File

@@ -0,0 +1,22 @@
'use strict'
const build = require('../..')
module.exports = async function (threadStreamOpts) {
const { port, opts = {} } = threadStreamOpts
return build(
function (source) {
source.on('data', function (line) {
port.postMessage({
data: line,
pinoConfig: {
levels: source.levels,
messageKey: source.messageKey,
errorKey: source.errorKey
}
})
})
},
opts
)
}

View File

@@ -0,0 +1,24 @@
'use strict'
const { Transform, pipeline } = require('stream')
const build = require('../..')
module.exports = function (threadStreamOpts) {
const { opts = {} } = threadStreamOpts
return build(function (source) {
const transform = new Transform({
objectMode: true,
autoDestroy: true,
transform (chunk, enc, cb) {
chunk.service = 'from transform'
chunk.level = `${source.levels.labels[chunk.level]}(${chunk.level})`
chunk[source.messageKey] = chunk[source.messageKey].toUpperCase()
cb(null, JSON.stringify(chunk) + '\n')
}
})
pipeline(source, transform, () => {})
return transform
}, { ...opts, enablePipelining: true })
}

View File

@@ -0,0 +1,15 @@
'use strict'
const { pipeline, PassThrough } = require('stream')
module.exports = async function ({ targets }) {
const streams = await Promise.all(targets.map(async (t) => {
const fn = require(t.target)
const stream = await fn(t.options)
return stream
}))
const stream = new PassThrough()
pipeline(stream, ...streams, () => {})
return stream
}

View File

@@ -0,0 +1,31 @@
import build, { OnUnknown } from "../../index";
import { expectType } from "tsd";
import { Transform } from "stream";
/**
* If enablePipelining is set to true, the function passed as an argument
* must return a transform. The unknown event should be listened to on the
* stream passed in the first argument.
*/
expectType<Transform>(build((source) => source, { enablePipelining: true }));
/**
* If expectPinoConfig is set with enablePipelining, build returns a promise
*/
expectType<(Promise<Transform>)>(build((source) => source, { enablePipelining: true, expectPinoConfig: true }));
/**
* If enablePipelining is not set the unknown event can be listened to on
* the returned stream.
*/
expectType<Transform & OnUnknown>(build((source) => {}));
/**
* If expectPinoConfig is set, build returns a promise
*/
expectType<(Promise<Transform & OnUnknown>)>(build((source) => {}, { expectPinoConfig: true }));
/**
* build also accepts an async function
*/
expectType<Transform & OnUnknown>(build(async (source) => {}));

View File

@@ -0,0 +1,364 @@
'use strict'
const { once } = require('events')
const { join } = require('path')
const ThreadStream = require('thread-stream')
const { MessageChannel } = require('worker_threads')
const { test } = require('tap')
workerTest('transport-on-data.js')
workerTest('transport-async-iteration.js', ' when using async iteration')
function workerTest (filename, description = '') {
test(`does not wait for pino to send config by default${description}`, function ({ same, plan }) {
plan(4)
const { port1, port2 } = new MessageChannel()
const stream = new ThreadStream({
filename: join(__dirname, 'fixtures', filename),
workerData: { port: port1 },
workerOpts: {
transferList: [port1]
}
})
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const emptyPinoConfig = {
levels: undefined,
messageKey: undefined,
errorKey: undefined
}
port2.on('message', function (message) {
same(expected.shift(), message.data)
same(emptyPinoConfig, message.pinoConfig)
})
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.end()
})
test(`does not wait for pino to send config if transport is not expecting it${description}`, function ({ same, plan }) {
plan(4)
const { port1, port2 } = new MessageChannel()
const stream = new ThreadStream({
filename: join(__dirname, 'fixtures', filename),
workerData: {
port: port1,
pinoWillSendConfig: true
},
workerOpts: {
transferList: [port1]
}
})
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const emptyPinoConfig = {
levels: undefined,
messageKey: undefined,
errorKey: undefined
}
const pinoConfig = {
levels: {
labels: { 30: 'info' },
values: { info: 30 }
},
messageKey: 'msg',
errorKey: 'err'
}
stream.worker.postMessage({ code: 'PINO_CONFIG', config: pinoConfig })
// stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig })
port2.on('message', function (message) {
same(expected.shift(), message.data)
same(emptyPinoConfig, message.pinoConfig)
})
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.end()
})
test(`waits for the pino config when pino intends to send it and the transport requests it${description}`, function ({ same, plan }) {
plan(4)
const { port1, port2 } = new MessageChannel()
const stream = new ThreadStream({
filename: join(__dirname, 'fixtures', filename),
workerData: {
port: port1,
pinoWillSendConfig: true,
opts: {
expectPinoConfig: true
}
},
workerOpts: {
transferList: [port1]
}
})
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const pinoConfig = {
levels: {
labels: { 30: 'info' },
values: { info: 30 }
},
messageKey: 'msg',
errorKey: 'err'
}
port2.on('message', function (message) {
same(expected.shift(), message.data)
same(pinoConfig, message.pinoConfig)
})
const lines = expected.map(JSON.stringify).join('\n')
stream.worker.postMessage({ code: 'PINO_CONFIG', config: pinoConfig })
// stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig })
stream.write(lines)
stream.end()
})
test(`continues to listen if it receives a message that is not PINO_CONFIG${description}`, function ({ same, plan }) {
plan(4)
const { port1, port2 } = new MessageChannel()
const stream = new ThreadStream({
filename: join(__dirname, 'fixtures', 'transport-on-data.js'),
workerData: {
port: port1,
pinoWillSendConfig: true,
opts: {
expectPinoConfig: true
}
},
workerOpts: {
transferList: [port1]
}
})
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const pinoConfig = {
levels: {
labels: { 30: 'info' },
values: { info: 30 }
},
messageKey: 'msg',
errorKey: 'err'
}
port2.on('message', function (message) {
same(expected.shift(), message.data)
same(pinoConfig, message.pinoConfig)
})
const lines = expected.map(JSON.stringify).join('\n')
stream.worker.postMessage('not a PINO_CONFIG')
// stream.emit('message', 'not a PINO_CONFIG')
stream.worker.postMessage({ code: 'NOT_PINO_CONFIG', config: { levels: 'foo', messageKey: 'bar', errorKey: 'baz' } })
// stream.emit('message', { code: 'NOT_PINO_CONFIG', config: { levels: 'foo', messageKey: 'bar', errorKey: 'baz' } })
stream.worker.postMessage({ code: 'PINO_CONFIG', config: pinoConfig })
// stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig })
stream.write(lines)
stream.end()
})
test(`waits for the pino config even if it is sent after write${description}`, function ({ same, plan }) {
plan(4)
const { port1, port2 } = new MessageChannel()
const stream = new ThreadStream({
filename: join(__dirname, 'fixtures', filename),
workerData: {
port: port1,
pinoWillSendConfig: true,
opts: {
expectPinoConfig: true
}
},
workerOpts: {
transferList: [port1]
}
})
const expected = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}]
const pinoConfig = {
levels: {
labels: { 30: 'info' },
values: { info: 30 }
},
messageKey: 'msg',
errorKey: 'err'
}
port2.on('message', function (message) {
same(expected.shift(), message.data)
same(pinoConfig, message.pinoConfig)
})
const lines = expected.map(JSON.stringify).join('\n')
stream.write(lines)
stream.worker.postMessage({ code: 'PINO_CONFIG', config: pinoConfig })
// stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig })
stream.end()
})
test(`emits an error if the transport expects pino to send the config, but pino is not going to${description}`, async function ({ plan, same, ok }) {
plan(2)
const stream = new ThreadStream({
filename: join(__dirname, 'fixtures', filename),
workerData: {
opts: {
expectPinoConfig: true
}
}
})
const [err] = await once(stream, 'error')
same(err.message, 'This transport is not compatible with the current version of pino. Please upgrade pino to the latest version.')
ok(stream.destroyed)
})
}
test('waits for the pino config when pipelining', function ({ same, plan }) {
plan(2)
const { port1, port2 } = new MessageChannel()
const stream = new ThreadStream({
filename: join(__dirname, 'fixtures', 'worker-pipeline.js'),
workerData: {
pinoWillSendConfig: true,
targets: [{
target: './transport-transform.js',
options: {
opts: { expectPinoConfig: true }
}
}, {
target: './transport-on-data.js',
options: {
port: port1
}
}]
},
workerOpts: {
transferList: [port1]
}
})
const expected = [{
level: 'info(30)',
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'HELLO WORLD',
service: 'from transform'
}, {
level: 'info(30)',
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'ANOTHER MESSAGE',
prop: 42,
service: 'from transform'
}]
const lines = [{
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'hello world'
}, {
level: 30,
time: 1617955768092,
pid: 2942,
hostname: 'MacBook-Pro.local',
msg: 'another message',
prop: 42
}].map(JSON.stringify).join('\n')
const pinoConfig = {
levels: {
labels: { 30: 'info' },
values: { info: 30 }
},
messageKey: 'msg',
errorKey: 'err'
}
port2.on('message', function (message) {
same(expected.shift(), message.data)
})
stream.worker.postMessage({ code: 'PINO_CONFIG', config: pinoConfig })
// stream.emit('message', { code: 'PINO_CONFIG', config: pinoConfig })
stream.write(lines)
stream.end()
})

View File

@@ -0,0 +1,2 @@
types/index.d.ts
types/index.test-d.ts

View File

@@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npm test

View File

@@ -0,0 +1,11 @@
coverage: true
flow: false
ts: false
jsx: false
timeout: 240
check-coverage: false
reporter: terse
files:
- 'test/**/*.test.js'

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017 Matteo Collina
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,151 @@
# sonic-boom
[![NPM Package Version](https://img.shields.io/npm/v/sonic-boom)](https://www.npmjs.com/package/sonic-boom)
[![Build Status](https://github.com/pinojs/sonic-boom/workflows/CI/badge.svg)](https://github.com/pinojs/sonic-boom/actions?query=workflow%3ACI)
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/)
Extremely fast utf8-only stream implementation to write to files and
file descriptors.
This implementation is partial, but support backpressure and `.pipe()` in is here.
However, it is 2-3x faster than Node Core `fs.createWriteStream()`:
```
benchSonic*1000: 1916.904ms
benchSonicSync*1000: 8605.265ms
benchSonic4k*1000: 1965.231ms
benchSonicSync4k*1000: 1588.224ms
benchCore*1000: 5851.959ms
benchConsole*1000: 7605.713ms
```
Note that sync mode without buffering is _slower_ than a Node Core WritableStream, however
this mode matches the expected behavior of `console.log()`.
Note that if this is used to log to a windows terminal (`cmd.exe` or
powershell), it is needed to run `chcp 65001` in the terminal to
correctly display utf-8 characters, see
[chcp](https://ss64.com/nt/chcp.html) for more details.
## Install
```
npm i sonic-boom
```
## Example
```js
'use strict'
const SonicBoom = require('sonic-boom')
const sonic = new SonicBoom({ fd: process.stdout.fd }) // or { dest: '/path/to/destination' }
for (let i = 0; i < 10; i++) {
sonic.write('hello sonic\n')
}
```
## API
### SonicBoom(opts)
Creates a new instance of SonicBoom.
The options are:
* `fd`: a file descriptor, something that is returned by `fs.open` or
`fs.openSync`.
* `dest`: a string that is a path to a file to be written to (mode controlled by the `append` option).
* `minLength`: the minimum length of the internal buffer that is
required to be full before flushing.
* `maxLength`: the maximum length of the internal buffer. If a write operation would cause the buffer
to exceed `maxLength`, the data written is dropped and a `drop` event is emitted with the dropped data
* `maxWrite`: the maximum number of bytes that can be written; default: 16384
* `sync`: perform writes synchronously (similar to `console.log`).
* `fsync`: perform a [fsyncSync](https://nodejs.org/api/fs.html#fsfsyncsyncfd) every time a write is completed.
* `append`: appends writes to dest file instead of truncating it (default `true`).
* `mode`: specify the creating file `mode` (see [fs.open()](https://nodejs.org/api/fs.html#fsopenpath-flags-mode-callback) from Node.js core).
* `mkdir`: ensure directory for dest file exists when `true` (default `false`).
* `retryEAGAIN(err, writeBufferLen, remainingBufferLen)`: a function that will be called when sonic-boom
write/writeSync/flushSync encounters a EAGAIN or EBUSY error. If the return value is
true sonic-boom will retry the operation, otherwise it will bubble the
error. `err` is the error that caused this function to be called,
`writeBufferLen` is the length of the buffer sonic-boom tried to write, and
`remainingBufferLen` is the length of the remaining buffer sonic-boom didn't try to write.
For `sync:false` a `SonicBoom` instance will emit the `'ready'` event when a file descriptor is available.
For `sync:true` this is not relevant because the `'ready'` event will be fired when the `SonicBoom` instance is created, before it can be subscribed to.
### SonicBoom#write(string)
Writes the string to the file.
It will return false to signal the producer to slow down.
### SonicBoom#flush([cb])
Writes the current buffer to the file if a write was not in progress.
Do nothing if `minLength` is zero or if it is already writing.
call the callback when the flush operation is completed. when failed the callback is called with an error.
### SonicBoom#reopen([file])
Reopen the file in place, useful for log rotation.
Example:
```js
const stream = new SonicBoom('./my.log')
process.on('SIGUSR2', function () {
stream.reopen()
})
```
### SonicBoom#flushSync()
Flushes the buffered data synchronously. This is a costly operation.
### SonicBoom#end()
Closes the stream, the data will be flushed down asynchronously
### SonicBoom#destroy()
Closes the stream immediately, the data is not flushed.
### Events
#### SonicBoom#close
See [Stream#close](https://nodejs.org/api/stream.html#event-close). The `'close'` event when the instance has been closed.
#### SonicBoom#drain
See [Stream#drain](https://nodejs.org/api/stream.html#event-drain). The `'drain'` event is emitted when source can resume sending data.
#### SonicBoom#drop <any>
When destination file maximal length is reached, the `'drop'` event is emitted with data that could not be written.
#### SonicBoom#error <Error>
The `'error'` event is emitted when the destination file can not be opened, or written.
#### SonicBoom#finish
See [Stream#finish](https://nodejs.org/api/stream.html#event-finish). The `'finish'` event after calling `end()` method and when all data was written.
#### SonicBoom#ready
The `'ready'` event occurs when the created instance is ready to process input.
#### SonicBoom#write <number>
The `'write'` event occurs every time data is written to the underlying file. It emits the number of written bytes.
## License
MIT

View File

@@ -0,0 +1,98 @@
'use strict'
const bench = require('fastbench')
const SonicBoom = require('./')
const Console = require('console').Console
const fs = require('fs')
const core = fs.createWriteStream('/dev/null')
const fd = fs.openSync('/dev/null', 'w')
const sonic = new SonicBoom({ fd })
const sonic4k = new SonicBoom({ fd, minLength: 4096 })
const sonicSync = new SonicBoom({ fd, sync: true })
const sonicSync4k = new SonicBoom({ fd, minLength: 4096, sync: true })
const sonicBuffer = new SonicBoom({ fd, contentMode: 'buffer' })
const sonic4kBuffer = new SonicBoom({ fd, contentMode: 'buffer', minLength: 4096 })
const sonicSyncBuffer = new SonicBoom({ fd, contentMode: 'buffer', sync: true })
const sonicSync4kBuffer = new SonicBoom({ fd, contentMode: 'buffer', minLength: 4096, sync: true })
const dummyConsole = new Console(fs.createWriteStream('/dev/null'))
const MAX = 10000
const buf = Buffer.alloc(50, 'hello', 'utf8')
const str = buf.toString()
setTimeout(doBench, 100)
const run = bench([
function benchSonic (cb) {
sonic.once('drain', cb)
for (let i = 0; i < MAX; i++) {
sonic.write(str)
}
},
function benchSonicSync (cb) {
sonicSync.once('drain', cb)
for (let i = 0; i < MAX; i++) {
sonicSync.write(str)
}
},
function benchSonic4k (cb) {
sonic4k.once('drain', cb)
for (let i = 0; i < MAX; i++) {
sonic4k.write(str)
}
},
function benchSonicSync4k (cb) {
sonicSync4k.once('drain', cb)
for (let i = 0; i < MAX; i++) {
sonicSync4k.write(str)
}
},
function benchCore (cb) {
core.once('drain', cb)
for (let i = 0; i < MAX; i++) {
core.write(str)
}
},
function benchConsole (cb) {
for (let i = 0; i < MAX; i++) {
dummyConsole.log(str)
}
setImmediate(cb)
},
function benchSonicBuf (cb) {
sonicBuffer.once('drain', cb)
for (let i = 0; i < MAX; i++) {
sonicBuffer.write(buf)
}
},
function benchSonicSyncBuf (cb) {
sonicSyncBuffer.once('drain', cb)
for (let i = 0; i < MAX; i++) {
sonicSyncBuffer.write(buf)
}
},
function benchSonic4kBuf (cb) {
sonic4kBuffer.once('drain', cb)
for (let i = 0; i < MAX; i++) {
sonic4kBuffer.write(buf)
}
},
function benchSonicSync4kBuf (cb) {
sonicSync4kBuffer.once('drain', cb)
for (let i = 0; i < MAX; i++) {
sonicSync4kBuffer.write(buf)
}
},
function benchCoreBuf (cb) {
core.once('drain', cb)
for (let i = 0; i < MAX; i++) {
core.write(buf)
}
}
], 1000)
function doBench () {
run(run)
}

View File

@@ -0,0 +1,18 @@
'use strict'
const SonicBoom = require('.')
const sonic = new SonicBoom({ fd: process.stdout.fd })
let count = 0
function scheduleWrites () {
for (let i = 0; i < 1000; i++) {
sonic.write('hello sonic\n')
console.log('hello console')
}
if (++count < 10) {
setTimeout(scheduleWrites, 100)
}
}
scheduleWrites()

View File

@@ -0,0 +1,8 @@
'use strict'
const SonicBoom = require('.')
const sonic = new SonicBoom({ fd: process.stdout.fd }) // or 'destination'
for (let i = 0; i < 10; i++) {
sonic.write('hello sonic\n')
}

View File

@@ -0,0 +1,22 @@
'use strict'
const SonicBoom = require('..')
const out = new SonicBoom({ fd: process.stdout.fd })
const str = Buffer.alloc(1000).fill('a').toString()
let i = 0
function write () {
if (i++ === 10) {
return
}
if (out.write(str)) {
write()
} else {
out.once('drain', write)
}
}
write()

View File

@@ -0,0 +1,690 @@
'use strict'
const fs = require('fs')
const EventEmitter = require('events')
const inherits = require('util').inherits
const path = require('path')
const sleep = require('atomic-sleep')
const BUSY_WRITE_TIMEOUT = 100
const kEmptyBuffer = Buffer.allocUnsafe(0)
// 16 KB. Don't write more than docker buffer size.
// https://github.com/moby/moby/blob/513ec73831269947d38a644c278ce3cac36783b2/daemon/logger/copier.go#L13
const MAX_WRITE = 16 * 1024
const kContentModeBuffer = 'buffer'
const kContentModeUtf8 = 'utf8'
function openFile (file, sonic) {
sonic._opening = true
sonic._writing = true
sonic._asyncDrainScheduled = false
// NOTE: 'error' and 'ready' events emitted below only relevant when sonic.sync===false
// for sync mode, there is no way to add a listener that will receive these
function fileOpened (err, fd) {
if (err) {
sonic._reopening = false
sonic._writing = false
sonic._opening = false
if (sonic.sync) {
process.nextTick(() => {
if (sonic.listenerCount('error') > 0) {
sonic.emit('error', err)
}
})
} else {
sonic.emit('error', err)
}
return
}
const reopening = sonic._reopening
sonic.fd = fd
sonic.file = file
sonic._reopening = false
sonic._opening = false
sonic._writing = false
if (sonic.sync) {
process.nextTick(() => sonic.emit('ready'))
} else {
sonic.emit('ready')
}
if (sonic.destroyed) {
return
}
// start
if ((!sonic._writing && sonic._len > sonic.minLength) || sonic._flushPending) {
sonic._actualWrite()
} else if (reopening) {
process.nextTick(() => sonic.emit('drain'))
}
}
const flags = sonic.append ? 'a' : 'w'
const mode = sonic.mode
if (sonic.sync) {
try {
if (sonic.mkdir) fs.mkdirSync(path.dirname(file), { recursive: true })
const fd = fs.openSync(file, flags, mode)
fileOpened(null, fd)
} catch (err) {
fileOpened(err)
throw err
}
} else if (sonic.mkdir) {
fs.mkdir(path.dirname(file), { recursive: true }, (err) => {
if (err) return fileOpened(err)
fs.open(file, flags, mode, fileOpened)
})
} else {
fs.open(file, flags, mode, fileOpened)
}
}
function SonicBoom (opts) {
if (!(this instanceof SonicBoom)) {
return new SonicBoom(opts)
}
let { fd, dest, minLength, maxLength, maxWrite, sync, append = true, mkdir, retryEAGAIN, fsync, contentMode, mode } = opts || {}
fd = fd || dest
this._len = 0
this.fd = -1
this._bufs = []
this._lens = []
this._writing = false
this._ending = false
this._reopening = false
this._asyncDrainScheduled = false
this._flushPending = false
this._hwm = Math.max(minLength || 0, 16387)
this.file = null
this.destroyed = false
this.minLength = minLength || 0
this.maxLength = maxLength || 0
this.maxWrite = maxWrite || MAX_WRITE
this.sync = sync || false
this.writable = true
this._fsync = fsync || false
this.append = append || false
this.mode = mode
this.retryEAGAIN = retryEAGAIN || (() => true)
this.mkdir = mkdir || false
let fsWriteSync
let fsWrite
if (contentMode === kContentModeBuffer) {
this._writingBuf = kEmptyBuffer
this.write = writeBuffer
this.flush = flushBuffer
this.flushSync = flushBufferSync
this._actualWrite = actualWriteBuffer
fsWriteSync = () => fs.writeSync(this.fd, this._writingBuf)
fsWrite = () => fs.write(this.fd, this._writingBuf, this.release)
} else if (contentMode === undefined || contentMode === kContentModeUtf8) {
this._writingBuf = ''
this.write = write
this.flush = flush
this.flushSync = flushSync
this._actualWrite = actualWrite
fsWriteSync = () => fs.writeSync(this.fd, this._writingBuf, 'utf8')
fsWrite = () => fs.write(this.fd, this._writingBuf, 'utf8', this.release)
} else {
throw new Error(`SonicBoom supports "${kContentModeUtf8}" and "${kContentModeBuffer}", but passed ${contentMode}`)
}
if (typeof fd === 'number') {
this.fd = fd
process.nextTick(() => this.emit('ready'))
} else if (typeof fd === 'string') {
openFile(fd, this)
} else {
throw new Error('SonicBoom supports only file descriptors and files')
}
if (this.minLength >= this.maxWrite) {
throw new Error(`minLength should be smaller than maxWrite (${this.maxWrite})`)
}
this.release = (err, n) => {
if (err) {
if ((err.code === 'EAGAIN' || err.code === 'EBUSY') && this.retryEAGAIN(err, this._writingBuf.length, this._len - this._writingBuf.length)) {
if (this.sync) {
// This error code should not happen in sync mode, because it is
// not using the underlining operating system asynchronous functions.
// However it happens, and so we handle it.
// Ref: https://github.com/pinojs/pino/issues/783
try {
sleep(BUSY_WRITE_TIMEOUT)
this.release(undefined, 0)
} catch (err) {
this.release(err)
}
} else {
// Let's give the destination some time to process the chunk.
setTimeout(fsWrite, BUSY_WRITE_TIMEOUT)
}
} else {
this._writing = false
this.emit('error', err)
}
return
}
this.emit('write', n)
const releasedBufObj = releaseWritingBuf(this._writingBuf, this._len, n)
this._len = releasedBufObj.len
this._writingBuf = releasedBufObj.writingBuf
if (this._writingBuf.length) {
if (!this.sync) {
fsWrite()
return
}
try {
do {
const n = fsWriteSync()
const releasedBufObj = releaseWritingBuf(this._writingBuf, this._len, n)
this._len = releasedBufObj.len
this._writingBuf = releasedBufObj.writingBuf
} while (this._writingBuf.length)
} catch (err) {
this.release(err)
return
}
}
if (this._fsync) {
fs.fsyncSync(this.fd)
}
const len = this._len
if (this._reopening) {
this._writing = false
this._reopening = false
this.reopen()
} else if (len > this.minLength) {
this._actualWrite()
} else if (this._ending) {
if (len > 0) {
this._actualWrite()
} else {
this._writing = false
actualClose(this)
}
} else {
this._writing = false
if (this.sync) {
if (!this._asyncDrainScheduled) {
this._asyncDrainScheduled = true
process.nextTick(emitDrain, this)
}
} else {
this.emit('drain')
}
}
}
this.on('newListener', function (name) {
if (name === 'drain') {
this._asyncDrainScheduled = false
}
})
}
/**
* Release the writingBuf after fs.write n bytes data
* @param {string | Buffer} writingBuf - currently writing buffer, usually be instance._writingBuf.
* @param {number} len - currently buffer length, usually be instance._len.
* @param {number} n - number of bytes fs already written
* @returns {{writingBuf: string | Buffer, len: number}} released writingBuf and length
*/
function releaseWritingBuf (writingBuf, len, n) {
// if Buffer.byteLength is equal to n, that means writingBuf contains no multi-byte character
if (typeof writingBuf === 'string' && Buffer.byteLength(writingBuf) !== n) {
// Since the fs.write callback parameter `n` means how many bytes the passed of string
// We calculate the original string length for avoiding the multi-byte character issue
n = Buffer.from(writingBuf).subarray(0, n).toString().length
}
len = Math.max(len - n, 0)
writingBuf = writingBuf.slice(n)
return { writingBuf, len }
}
function emitDrain (sonic) {
const hasListeners = sonic.listenerCount('drain') > 0
if (!hasListeners) return
sonic._asyncDrainScheduled = false
sonic.emit('drain')
}
inherits(SonicBoom, EventEmitter)
function mergeBuf (bufs, len) {
if (bufs.length === 0) {
return kEmptyBuffer
}
if (bufs.length === 1) {
return bufs[0]
}
return Buffer.concat(bufs, len)
}
function write (data) {
if (this.destroyed) {
throw new Error('SonicBoom destroyed')
}
const len = this._len + data.length
const bufs = this._bufs
if (this.maxLength && len > this.maxLength) {
this.emit('drop', data)
return this._len < this._hwm
}
if (
bufs.length === 0 ||
bufs[bufs.length - 1].length + data.length > this.maxWrite
) {
bufs.push('' + data)
} else {
bufs[bufs.length - 1] += data
}
this._len = len
if (!this._writing && this._len >= this.minLength) {
this._actualWrite()
}
return this._len < this._hwm
}
function writeBuffer (data) {
if (this.destroyed) {
throw new Error('SonicBoom destroyed')
}
const len = this._len + data.length
const bufs = this._bufs
const lens = this._lens
if (this.maxLength && len > this.maxLength) {
this.emit('drop', data)
return this._len < this._hwm
}
if (
bufs.length === 0 ||
lens[lens.length - 1] + data.length > this.maxWrite
) {
bufs.push([data])
lens.push(data.length)
} else {
bufs[bufs.length - 1].push(data)
lens[lens.length - 1] += data.length
}
this._len = len
if (!this._writing && this._len >= this.minLength) {
this._actualWrite()
}
return this._len < this._hwm
}
function callFlushCallbackOnDrain (cb) {
this._flushPending = true
const onDrain = () => {
// only if _fsync is false to avoid double fsync
if (!this._fsync) {
fs.fsync(this.fd, (err) => {
this._flushPending = false
cb(err)
})
} else {
this._flushPending = false
cb()
}
this.off('error', onError)
}
const onError = (err) => {
this._flushPending = false
cb(err)
this.off('drain', onDrain)
}
this.once('drain', onDrain)
this.once('error', onError)
}
function flush (cb) {
if (cb != null && typeof cb !== 'function') {
throw new Error('flush cb must be a function')
}
if (this.destroyed) {
const error = new Error('SonicBoom destroyed')
if (cb) {
cb(error)
return
}
throw error
}
if (this.minLength <= 0) {
cb?.()
return
}
if (cb) {
callFlushCallbackOnDrain.call(this, cb)
}
if (this._writing) {
return
}
if (this._bufs.length === 0) {
this._bufs.push('')
}
this._actualWrite()
}
function flushBuffer (cb) {
if (cb != null && typeof cb !== 'function') {
throw new Error('flush cb must be a function')
}
if (this.destroyed) {
const error = new Error('SonicBoom destroyed')
if (cb) {
cb(error)
return
}
throw error
}
if (this.minLength <= 0) {
cb?.()
return
}
if (cb) {
callFlushCallbackOnDrain.call(this, cb)
}
if (this._writing) {
return
}
if (this._bufs.length === 0) {
this._bufs.push([])
this._lens.push(0)
}
this._actualWrite()
}
SonicBoom.prototype.reopen = function (file) {
if (this.destroyed) {
throw new Error('SonicBoom destroyed')
}
if (this._opening) {
this.once('ready', () => {
this.reopen(file)
})
return
}
if (this._ending) {
return
}
if (!this.file) {
throw new Error('Unable to reopen a file descriptor, you must pass a file to SonicBoom')
}
if (file) {
this.file = file
}
this._reopening = true
if (this._writing) {
return
}
const fd = this.fd
this.once('ready', () => {
if (fd !== this.fd) {
fs.close(fd, (err) => {
if (err) {
return this.emit('error', err)
}
})
}
})
openFile(this.file, this)
}
SonicBoom.prototype.end = function () {
if (this.destroyed) {
throw new Error('SonicBoom destroyed')
}
if (this._opening) {
this.once('ready', () => {
this.end()
})
return
}
if (this._ending) {
return
}
this._ending = true
if (this._writing) {
return
}
if (this._len > 0 && this.fd >= 0) {
this._actualWrite()
} else {
actualClose(this)
}
}
function flushSync () {
if (this.destroyed) {
throw new Error('SonicBoom destroyed')
}
if (this.fd < 0) {
throw new Error('sonic boom is not ready yet')
}
if (!this._writing && this._writingBuf.length > 0) {
this._bufs.unshift(this._writingBuf)
this._writingBuf = ''
}
let buf = ''
while (this._bufs.length || buf) {
if (buf.length <= 0) {
buf = this._bufs[0]
}
try {
const n = fs.writeSync(this.fd, buf, 'utf8')
const releasedBufObj = releaseWritingBuf(buf, this._len, n)
buf = releasedBufObj.writingBuf
this._len = releasedBufObj.len
if (buf.length <= 0) {
this._bufs.shift()
}
} catch (err) {
const shouldRetry = err.code === 'EAGAIN' || err.code === 'EBUSY'
if (shouldRetry && !this.retryEAGAIN(err, buf.length, this._len - buf.length)) {
throw err
}
sleep(BUSY_WRITE_TIMEOUT)
}
}
try {
fs.fsyncSync(this.fd)
} catch {
// Skip the error. The fd might not support fsync.
}
}
function flushBufferSync () {
if (this.destroyed) {
throw new Error('SonicBoom destroyed')
}
if (this.fd < 0) {
throw new Error('sonic boom is not ready yet')
}
if (!this._writing && this._writingBuf.length > 0) {
this._bufs.unshift([this._writingBuf])
this._writingBuf = kEmptyBuffer
}
let buf = kEmptyBuffer
while (this._bufs.length || buf.length) {
if (buf.length <= 0) {
buf = mergeBuf(this._bufs[0], this._lens[0])
}
try {
const n = fs.writeSync(this.fd, buf)
buf = buf.subarray(n)
this._len = Math.max(this._len - n, 0)
if (buf.length <= 0) {
this._bufs.shift()
this._lens.shift()
}
} catch (err) {
const shouldRetry = err.code === 'EAGAIN' || err.code === 'EBUSY'
if (shouldRetry && !this.retryEAGAIN(err, buf.length, this._len - buf.length)) {
throw err
}
sleep(BUSY_WRITE_TIMEOUT)
}
}
}
SonicBoom.prototype.destroy = function () {
if (this.destroyed) {
return
}
actualClose(this)
}
function actualWrite () {
const release = this.release
this._writing = true
this._writingBuf = this._writingBuf || this._bufs.shift() || ''
if (this.sync) {
try {
const written = fs.writeSync(this.fd, this._writingBuf, 'utf8')
release(null, written)
} catch (err) {
release(err)
}
} else {
fs.write(this.fd, this._writingBuf, 'utf8', release)
}
}
function actualWriteBuffer () {
const release = this.release
this._writing = true
this._writingBuf = this._writingBuf.length ? this._writingBuf : mergeBuf(this._bufs.shift(), this._lens.shift())
if (this.sync) {
try {
const written = fs.writeSync(this.fd, this._writingBuf)
release(null, written)
} catch (err) {
release(err)
}
} else {
fs.write(this.fd, this._writingBuf, release)
}
}
function actualClose (sonic) {
if (sonic.fd === -1) {
sonic.once('ready', actualClose.bind(null, sonic))
return
}
sonic.destroyed = true
sonic._bufs = []
sonic._lens = []
fs.fsync(sonic.fd, closeWrapped)
function closeWrapped () {
// We skip errors in fsync
if (sonic.fd !== 1 && sonic.fd !== 2) {
fs.close(sonic.fd, done)
} else {
done()
}
}
function done (err) {
if (err) {
sonic.emit('error', err)
return
}
if (sonic._ending && !sonic._writing) {
sonic.emit('finish')
}
sonic.emit('close')
}
}
/**
* These export configurations enable JS and TS developers
* to consumer SonicBoom in whatever way best suits their needs.
* Some examples of supported import syntax includes:
* - `const SonicBoom = require('SonicBoom')`
* - `const { SonicBoom } = require('SonicBoom')`
* - `import * as SonicBoom from 'SonicBoom'`
* - `import { SonicBoom } from 'SonicBoom'`
* - `import SonicBoom from 'SonicBoom'`
*/
SonicBoom.SonicBoom = SonicBoom
SonicBoom.default = SonicBoom
module.exports = SonicBoom

View File

@@ -0,0 +1,50 @@
{
"name": "sonic-boom",
"version": "3.8.1",
"description": "Extremely fast utf8 only stream implementation",
"main": "index.js",
"type": "commonjs",
"types": "types/index.d.ts",
"scripts": {
"test": "npm run test:types && standard && npm run test:unit",
"test:unit": "tap",
"test:types": "tsc && tsd && ts-node types/tests/test.ts",
"prepare": "husky install"
},
"repository": {
"type": "git",
"url": "git+https://github.com/pinojs/sonic-boom.git"
},
"keywords": [
"stream",
"fs",
"net",
"fd",
"file",
"descriptor",
"fast"
],
"author": "Matteo Collina <hello@matteocollina.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/pinojs/sonic-boom/issues"
},
"homepage": "https://github.com/pinojs/sonic-boom#readme",
"devDependencies": {
"@types/node": "^20.1.0",
"fastbench": "^1.0.1",
"husky": "^9.0.6",
"proxyquire": "^2.1.3",
"standard": "^17.0.0",
"tap": "^16.2.0",
"tsd": "^0.31.0",
"typescript": "^5.0.2",
"ts-node": "^10.8.0"
},
"dependencies": {
"atomic-sleep": "^1.0.0"
},
"tsd": {
"directory": "./types"
}
}

View File

@@ -0,0 +1,49 @@
'use strict'
const fs = require('fs')
const SonicBoom = require('../')
const { file, runTests } = require('./helper')
runTests(buildTests)
function buildTests (test, sync) {
// Reset the umask for testing
process.umask(0o000)
test('destroy', (t) => {
t.plan(5)
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, sync })
t.ok(stream.write('hello world\n'))
stream.destroy()
t.throws(() => { stream.write('hello world\n') })
fs.readFile(dest, 'utf8', function (err, data) {
t.error(err)
t.equal(data, 'hello world\n')
})
stream.on('finish', () => {
t.fail('finish emitted')
})
stream.on('close', () => {
t.pass('close emitted')
})
})
test('destroy while opening', (t) => {
t.plan(1)
const dest = file()
const stream = new SonicBoom({ dest })
stream.destroy()
stream.on('close', () => {
t.pass('close emitted')
})
})
}

View File

@@ -0,0 +1,98 @@
'use strict'
const { join } = require('path')
const { fork } = require('child_process')
const fs = require('fs')
const SonicBoom = require('../')
const { file, runTests } = require('./helper')
runTests(buildTests)
function buildTests (test, sync) {
// Reset the umask for testing
process.umask(0o000)
test('end after reopen', (t) => {
t.plan(4)
const dest = file()
const stream = new SonicBoom({ dest, minLength: 4096, sync })
stream.once('ready', () => {
t.pass('ready emitted')
const after = dest + '-moved'
stream.reopen(after)
stream.write('after reopen\n')
stream.on('finish', () => {
t.pass('finish emitted')
fs.readFile(after, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'after reopen\n')
})
})
stream.end()
})
})
test('end after 2x reopen', (t) => {
t.plan(4)
const dest = file()
const stream = new SonicBoom({ dest, minLength: 4096, sync })
stream.once('ready', () => {
t.pass('ready emitted')
stream.reopen(dest + '-moved')
const after = dest + '-moved-moved'
stream.reopen(after)
stream.write('after reopen\n')
stream.on('finish', () => {
t.pass('finish emitted')
fs.readFile(after, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'after reopen\n')
})
})
stream.end()
})
})
test('end if not ready', (t) => {
t.plan(3)
const dest = file()
const stream = new SonicBoom({ dest, minLength: 4096, sync })
const after = dest + '-moved'
stream.reopen(after)
stream.write('after reopen\n')
stream.on('finish', () => {
t.pass('finish emitted')
fs.readFile(after, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'after reopen\n')
})
})
stream.end()
})
test('chunk data accordingly', (t) => {
t.plan(2)
const child = fork(join(__dirname, '..', 'fixtures', 'firehose.js'), { silent: true })
const str = Buffer.alloc(10000).fill('a').toString()
let data = ''
child.stdout.on('data', function (chunk) {
data += chunk.toString()
})
child.stdout.on('end', function () {
t.equal(data, str)
})
child.on('close', function (code) {
t.equal(code, 0)
})
})
}

View File

@@ -0,0 +1,140 @@
'use strict'
const { test } = require('tap')
const fs = require('fs')
const proxyquire = require('proxyquire')
const SonicBoom = require('../')
const { file, runTests } = require('./helper')
runTests(buildTests)
function buildTests (test, sync) {
// Reset the umask for testing
process.umask(0o000)
test('flushSync', (t) => {
t.plan(4)
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, minLength: 4096, sync })
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
stream.flushSync()
// let the file system settle down things
setImmediate(function () {
stream.end()
const data = fs.readFileSync(dest, 'utf8')
t.equal(data, 'hello world\nsomething else\n')
stream.on('close', () => {
t.pass('close emitted')
})
})
})
}
test('retry in flushSync on EAGAIN', (t) => {
t.plan(7)
const fakeFs = Object.create(fs)
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, sync: false, minLength: 0 })
stream.on('ready', () => {
t.pass('ready emitted')
})
t.ok(stream.write('hello world\n'))
fakeFs.writeSync = function (fd, buf, enc) {
t.pass('fake fs.write called')
fakeFs.writeSync = fs.writeSync
const err = new Error('EAGAIN')
err.code = 'EAGAIN'
throw err
}
t.ok(stream.write('something else\n'))
stream.flushSync()
stream.end()
stream.on('finish', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
})
})
stream.on('close', () => {
t.pass('close emitted')
})
})
test('throw error in flushSync on EAGAIN', (t) => {
t.plan(12)
const fakeFs = Object.create(fs)
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({
fd,
sync: false,
minLength: 1000,
retryEAGAIN: (err, writeBufferLen, remainingBufferLen) => {
t.equal(err.code, 'EAGAIN')
t.equal(writeBufferLen, 12)
t.equal(remainingBufferLen, 0)
return false
}
})
stream.on('ready', () => {
t.pass('ready emitted')
})
const err = new Error('EAGAIN')
err.code = 'EAGAIN'
fakeFs.writeSync = function (fd, buf, enc) {
Error.captureStackTrace(err)
t.pass('fake fs.write called')
fakeFs.writeSync = fs.writeSync
throw err
}
fakeFs.fsyncSync = function (...args) {
t.pass('fake fs.fsyncSync called')
fakeFs.fsyncSync = fs.fsyncSync
return fs.fsyncSync.apply(null, args)
}
t.ok(stream.write('hello world\n'))
t.throws(stream.flushSync.bind(stream), err, 'EAGAIN')
t.ok(stream.write('something else\n'))
stream.flushSync()
stream.end()
stream.on('finish', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
})
})
stream.on('close', () => {
t.pass('close emitted')
})
})

View File

@@ -0,0 +1,419 @@
'use strict'
const fs = require('fs')
const path = require('path')
const SonicBoom = require('../')
const { file, runTests } = require('./helper')
const proxyquire = require('proxyquire')
runTests(buildTests)
function buildTests (test, sync) {
// Reset the unmask for testing
process.umask(0o000)
test('append', (t) => {
t.plan(4)
const dest = file()
fs.writeFileSync(dest, 'hello world\n')
const stream = new SonicBoom({ dest, append: false, sync })
stream.on('ready', () => {
t.pass('ready emitted')
})
t.ok(stream.write('something else\n'))
stream.flush()
stream.on('drain', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'something else\n')
stream.end()
})
})
})
test('mkdir', (t) => {
t.plan(4)
const dest = path.join(file(), 'out.log')
const stream = new SonicBoom({ dest, mkdir: true, sync })
stream.on('ready', () => {
t.pass('ready emitted')
})
t.ok(stream.write('hello world\n'))
stream.flush()
stream.on('drain', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\n')
stream.end()
})
})
})
test('flush', (t) => {
t.plan(5)
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, minLength: 4096, sync })
stream.on('ready', () => {
t.pass('ready emitted')
})
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
stream.flush()
stream.on('drain', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
stream.end()
})
})
})
test('flush with no data', (t) => {
t.plan(2)
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, minLength: 4096, sync })
stream.on('ready', () => {
t.pass('ready emitted')
})
stream.flush()
stream.on('drain', () => {
t.pass('drain emitted')
})
})
test('call flush cb after flushed', (t) => {
t.plan(4)
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, minLength: 4096, sync })
stream.on('ready', () => {
t.pass('ready emitted')
})
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
stream.flush((err) => {
if (err) t.fail(err)
else t.pass('flush cb called')
})
})
test('only call fsyncSync and not fsync when fsync: true', (t) => {
t.plan(6)
const fakeFs = Object.create(fs)
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({
fd,
sync,
fsync: true,
minLength: 4096
})
stream.on('ready', () => {
t.pass('ready emitted')
})
fakeFs.fsync = function (fd, cb) {
t.fail('fake fs.fsync called while should not')
cb()
}
fakeFs.fsyncSync = function (fd) {
t.pass('fake fsyncSync called')
}
function successOnAsyncOrSyncFn (isSync, originalFn) {
return function (...args) {
t.pass(`fake fs.${originalFn.name} called`)
fakeFs[originalFn.name] = originalFn
return fakeFs[originalFn.name](...args)
}
}
if (sync) {
fakeFs.writeSync = successOnAsyncOrSyncFn(true, fs.writeSync)
} else {
fakeFs.write = successOnAsyncOrSyncFn(false, fs.write)
}
t.ok(stream.write('hello world\n'))
stream.flush((err) => {
if (err) t.fail(err)
else t.pass('flush cb called')
process.nextTick(() => {
// to make sure fsync is not called as well
t.pass('nextTick after flush called')
})
})
})
test('call flush cb with error when fsync failed', (t) => {
t.plan(5)
const fakeFs = Object.create(fs)
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({
fd,
sync,
minLength: 4096
})
stream.on('ready', () => {
t.pass('ready emitted')
})
const err = new Error('other')
err.code = 'other'
function onFsyncOnFsyncSync (isSync, originalFn) {
return function (...args) {
Error.captureStackTrace(err)
t.pass(`fake fs.${originalFn.name} called`)
fakeFs[originalFn.name] = originalFn
const cb = args[args.length - 1]
cb(err)
}
}
// only one is called depends on sync
fakeFs.fsync = onFsyncOnFsyncSync(false, fs.fsync)
function successOnAsyncOrSyncFn (isSync, originalFn) {
return function (...args) {
t.pass(`fake fs.${originalFn.name} called`)
fakeFs[originalFn.name] = originalFn
return fakeFs[originalFn.name](...args)
}
}
if (sync) {
fakeFs.writeSync = successOnAsyncOrSyncFn(true, fs.writeSync)
} else {
fakeFs.write = successOnAsyncOrSyncFn(false, fs.write)
}
t.ok(stream.write('hello world\n'))
stream.flush((err) => {
if (err) t.equal(err.code, 'other')
else t.fail('flush cb called without an error')
})
})
test('call flush cb even when have no data', (t) => {
t.plan(2)
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, minLength: 4096, sync })
stream.on('ready', () => {
t.pass('ready emitted')
stream.flush((err) => {
if (err) t.fail(err)
else t.pass('flush cb called')
})
})
})
test('call flush cb even when minLength is 0', (t) => {
t.plan(1)
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, minLength: 0, sync })
stream.flush((err) => {
if (err) t.fail(err)
else t.pass('flush cb called')
})
})
test('call flush cb with an error when trying to flush destroyed stream', (t) => {
t.plan(1)
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, minLength: 4096, sync })
stream.destroy()
stream.flush((err) => {
if (err) t.pass(err)
else t.fail('flush cb called without an error')
})
})
test('call flush cb with an error when failed to flush', (t) => {
t.plan(5)
const fakeFs = Object.create(fs)
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({
fd,
sync,
minLength: 4096
})
stream.on('ready', () => {
t.pass('ready emitted')
})
const err = new Error('other')
err.code = 'other'
function onWriteOrWriteSync (isSync, originalFn) {
return function (...args) {
Error.captureStackTrace(err)
t.pass(`fake fs.${originalFn.name} called`)
fakeFs[originalFn.name] = originalFn
if (isSync) throw err
const cb = args[args.length - 1]
cb(err)
}
}
// only one is called depends on sync
fakeFs.write = onWriteOrWriteSync(false, fs.write)
fakeFs.writeSync = onWriteOrWriteSync(true, fs.writeSync)
t.ok(stream.write('hello world\n'))
stream.flush((err) => {
if (err) t.equal(err.code, 'other')
else t.fail('flush cb called without an error')
})
stream.end()
stream.on('close', () => {
t.pass('close emitted')
})
})
test('call flush cb when finish writing when currently in the middle', (t) => {
t.plan(4)
const fakeFs = Object.create(fs)
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({
fd,
sync,
// to trigger write without calling flush
minLength: 1
})
stream.on('ready', () => {
t.pass('ready emitted')
})
function onWriteOrWriteSync (originalFn) {
return function (...args) {
stream.flush((err) => {
if (err) t.fail(err)
else t.pass('flush cb called')
})
t.pass(`fake fs.${originalFn.name} called`)
fakeFs[originalFn.name] = originalFn
return originalFn(...args)
}
}
// only one is called depends on sync
fakeFs.write = onWriteOrWriteSync(fs.write)
fakeFs.writeSync = onWriteOrWriteSync(fs.writeSync)
t.ok(stream.write('hello world\n'))
})
test('call flush cb when writing and trying to flush before ready (on async)', (t) => {
t.plan(4)
const fakeFs = Object.create(fs)
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
fakeFs.open = fsOpen
const dest = file()
const stream = new SonicBoom({
fd: dest,
// only async as sync is part of the constructor so the user will not be able to call write/flush
// before ready
sync: false,
// to not trigger write without calling flush
minLength: 4096
})
stream.on('ready', () => {
t.pass('ready emitted')
})
function fsOpen (...args) {
process.nextTick(() => {
// try writing and flushing before ready and in the middle of opening
t.pass('fake fs.open called')
t.ok(stream.write('hello world\n'))
// calling flush
stream.flush((err) => {
if (err) t.fail(err)
else t.pass('flush cb called')
})
fakeFs.open = fs.open
fs.open(...args)
})
}
})
}

View File

@@ -0,0 +1,63 @@
'use strict'
const { test } = require('tap')
const fs = require('fs')
const proxyquire = require('proxyquire')
const { file } = require('./helper')
test('fsync with sync', (t) => {
t.plan(5)
const fakeFs = Object.create(fs)
fakeFs.fsyncSync = function (fd) {
t.pass('fake fs.fsyncSync called')
return fs.fsyncSync(fd)
}
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, sync: true, fsync: true })
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
stream.end()
const data = fs.readFileSync(dest, 'utf8')
t.equal(data, 'hello world\nsomething else\n')
})
test('fsync with async', (t) => {
t.plan(7)
const fakeFs = Object.create(fs)
fakeFs.fsyncSync = function (fd) {
t.pass('fake fs.fsyncSync called')
return fs.fsyncSync(fd)
}
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, fsync: true })
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
stream.end()
stream.on('finish', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
})
})
stream.on('close', () => {
t.pass('close emitted')
})
})

View File

@@ -0,0 +1,42 @@
'use strict'
const { test, teardown } = require('tap')
const fs = require('fs')
const os = require('os')
const path = require('path')
const files = []
let count = 0
function file () {
const file = path.join(os.tmpdir(), `sonic-boom-${process.pid}-${process.hrtime().toString()}-${count++}`)
files.push(file)
return file
}
teardown(() => {
const rmSync = fs.rmSync || fs.rmdirSync
files.forEach((file) => {
try {
if (fs.existsSync(file)) {
fs.statSync(file).isDirectory() ? rmSync(file, { recursive: true, maxRetries: 10 }) : fs.unlinkSync(file)
}
} catch (e) {
console.log(e)
}
})
})
function runTests (buildTests) {
test('sync false', (t) => {
buildTests(t.test, false)
t.end()
})
test('sync true', (t) => {
buildTests(t.test, true)
t.end()
})
}
module.exports = { file, runTests }

View File

@@ -0,0 +1,35 @@
'use strict'
const { test } = require('tap')
const fs = require('fs')
const SonicBoom = require('../')
const { file } = require('./helper')
const MAX_WRITE = 16 * 1024
test('drain deadlock', (t) => {
t.plan(4)
const dest = file()
const stream = new SonicBoom({ dest, sync: false, minLength: 9999 })
t.ok(stream.write(Buffer.alloc(1500).fill('x').toString()))
t.ok(stream.write(Buffer.alloc(1500).fill('x').toString()))
t.ok(!stream.write(Buffer.alloc(MAX_WRITE).fill('x').toString()))
stream.on('drain', () => {
t.pass()
})
})
test('should throw if minLength >= maxWrite', (t) => {
t.plan(1)
t.throws(() => {
const dest = file()
const fd = fs.openSync(dest, 'w')
SonicBoom({
fd,
minLength: MAX_WRITE
})
})
})

View File

@@ -0,0 +1,116 @@
'use strict'
const fs = require('fs')
const path = require('path')
const SonicBoom = require('../')
const { file, runTests } = require('./helper')
const isWindows = process.platform === 'win32'
runTests(buildTests)
function buildTests (test, sync) {
// Reset the umask for testing
process.umask(0o000)
test('mode', { skip: isWindows }, (t) => {
t.plan(6)
const dest = file()
const mode = 0o666
const stream = new SonicBoom({ dest, sync, mode })
stream.on('ready', () => {
t.pass('ready emitted')
})
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
stream.end()
stream.on('finish', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
t.equal(fs.statSync(dest).mode & 0o777, stream.mode)
})
})
})
test('mode default', { skip: isWindows }, (t) => {
t.plan(6)
const dest = file()
const defaultMode = 0o666
const stream = new SonicBoom({ dest, sync })
stream.on('ready', () => {
t.pass('ready emitted')
})
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
stream.end()
stream.on('finish', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
t.equal(fs.statSync(dest).mode & 0o777, defaultMode)
})
})
})
test('mode on mkdir', { skip: isWindows }, (t) => {
t.plan(5)
const dest = path.join(file(), 'out.log')
const mode = 0o666
const stream = new SonicBoom({ dest, mkdir: true, mode, sync })
stream.on('ready', () => {
t.pass('ready emitted')
})
t.ok(stream.write('hello world\n'))
stream.flush()
stream.on('drain', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\n')
t.equal(fs.statSync(dest).mode & 0o777, stream.mode)
stream.end()
})
})
})
test('mode on append', { skip: isWindows }, (t) => {
t.plan(5)
const dest = file()
fs.writeFileSync(dest, 'hello world\n', 'utf8', 0o422)
const mode = isWindows ? 0o444 : 0o666
const stream = new SonicBoom({ dest, append: false, mode, sync })
stream.on('ready', () => {
t.pass('ready emitted')
})
t.ok(stream.write('something else\n'))
stream.flush()
stream.on('drain', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'something else\n')
t.equal(fs.statSync(dest).mode & 0o777, stream.mode)
stream.end()
})
})
})
}

View File

@@ -0,0 +1,239 @@
'use strict'
const fs = require('fs')
const proxyquire = require('proxyquire')
const SonicBoom = require('../')
const { file, runTests } = require('./helper')
runTests(buildTests)
function buildTests (test, sync) {
// Reset the umask for testing
process.umask(0o000)
test('reopen', (t) => {
t.plan(9)
const dest = file()
const stream = new SonicBoom({ dest, sync })
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
const after = dest + '-moved'
stream.once('drain', () => {
t.pass('drain emitted')
fs.renameSync(dest, after)
stream.reopen()
stream.once('ready', () => {
t.pass('ready emitted')
t.ok(stream.write('after reopen\n'))
stream.once('drain', () => {
fs.readFile(after, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'after reopen\n')
stream.end()
})
})
})
})
})
})
test('reopen with buffer', (t) => {
t.plan(9)
const dest = file()
const stream = new SonicBoom({ dest, minLength: 4096, sync })
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
const after = dest + '-moved'
stream.once('ready', () => {
t.pass('drain emitted')
stream.flush()
fs.renameSync(dest, after)
stream.reopen()
stream.once('ready', () => {
t.pass('ready emitted')
t.ok(stream.write('after reopen\n'))
stream.flush()
stream.once('drain', () => {
fs.readFile(after, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'after reopen\n')
stream.end()
})
})
})
})
})
})
test('reopen if not open', (t) => {
t.plan(3)
const dest = file()
const stream = new SonicBoom({ dest, sync })
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
stream.reopen()
stream.end()
stream.on('close', function () {
t.pass('ended')
})
})
test('reopen with file', (t) => {
t.plan(10)
const dest = file()
const stream = new SonicBoom({ dest, minLength: 0, sync })
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
const after = dest + '-new'
stream.once('drain', () => {
t.pass('drain emitted')
stream.reopen(after)
t.equal(stream.file, after)
stream.once('ready', () => {
t.pass('ready emitted')
t.ok(stream.write('after reopen\n'))
stream.once('drain', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
fs.readFile(after, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'after reopen\n')
stream.end()
})
})
})
})
})
})
test('reopen throws an error', (t) => {
t.plan(sync ? 10 : 9)
const fakeFs = Object.create(fs)
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const stream = new SonicBoom({ dest, sync })
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
const after = dest + '-moved'
stream.on('error', () => {
t.pass('error emitted')
})
stream.once('drain', () => {
t.pass('drain emitted')
fs.renameSync(dest, after)
if (sync) {
fakeFs.openSync = function (file, flags) {
t.pass('fake fs.openSync called')
throw new Error('open error')
}
} else {
fakeFs.open = function (file, flags, mode, cb) {
t.pass('fake fs.open called')
setTimeout(() => cb(new Error('open error')), 0)
}
}
if (sync) {
try {
stream.reopen()
} catch (err) {
t.pass('reopen throwed')
}
} else {
stream.reopen()
}
setTimeout(() => {
t.ok(stream.write('after reopen\n'))
stream.end()
stream.on('finish', () => {
fs.readFile(after, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\nafter reopen\n')
})
})
stream.on('close', () => {
t.pass('close emitted')
})
}, 0)
})
})
test('reopen emits drain', (t) => {
t.plan(9)
const dest = file()
const stream = new SonicBoom({ dest, sync })
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
const after = dest + '-moved'
stream.once('drain', () => {
t.pass('drain emitted')
fs.renameSync(dest, after)
stream.reopen()
stream.once('drain', () => {
t.pass('drain emitted')
t.ok(stream.write('after reopen\n'))
stream.once('drain', () => {
fs.readFile(after, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'after reopen\n')
stream.end()
})
})
})
})
})
})
}

View File

@@ -0,0 +1,414 @@
'use strict'
const { test } = require('tap')
const fs = require('fs')
const proxyquire = require('proxyquire')
const { file, runTests } = require('./helper')
const MAX_WRITE = 16 * 1024
runTests(buildTests)
function buildTests (test, sync) {
// Reset the umask for testing
process.umask(0o000)
test('retry on EAGAIN', (t) => {
t.plan(7)
const fakeFs = Object.create(fs)
fakeFs.write = function (fd, buf, ...args) {
t.pass('fake fs.write called')
fakeFs.write = fs.write
const err = new Error('EAGAIN')
err.code = 'EAGAIN'
process.nextTick(args.pop(), err)
}
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, sync: false, minLength: 0 })
stream.on('ready', () => {
t.pass('ready emitted')
})
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
stream.end()
stream.on('finish', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
})
})
stream.on('close', () => {
t.pass('close emitted')
})
})
}
test('emit error on async EAGAIN', (t) => {
t.plan(11)
const fakeFs = Object.create(fs)
fakeFs.write = function (fd, buf, ...args) {
t.pass('fake fs.write called')
fakeFs.write = fs.write
const err = new Error('EAGAIN')
err.code = 'EAGAIN'
process.nextTick(args[args.length - 1], err)
}
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({
fd,
sync: false,
minLength: 12,
retryEAGAIN: (err, writeBufferLen, remainingBufferLen) => {
t.equal(err.code, 'EAGAIN')
t.equal(writeBufferLen, 12)
t.equal(remainingBufferLen, 0)
return false
}
})
stream.on('ready', () => {
t.pass('ready emitted')
})
stream.once('error', err => {
t.equal(err.code, 'EAGAIN')
t.ok(stream.write('something else\n'))
})
t.ok(stream.write('hello world\n'))
stream.end()
stream.on('finish', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
})
})
stream.on('close', () => {
t.pass('close emitted')
})
})
test('retry on EAGAIN (sync)', (t) => {
t.plan(7)
const fakeFs = Object.create(fs)
fakeFs.writeSync = function (fd, buf, enc) {
t.pass('fake fs.writeSync called')
fakeFs.writeSync = fs.writeSync
const err = new Error('EAGAIN')
err.code = 'EAGAIN'
throw err
}
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, minLength: 0, sync: true })
stream.on('ready', () => {
t.pass('ready emitted')
})
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
stream.end()
stream.on('finish', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
})
})
stream.on('close', () => {
t.pass('close emitted')
})
})
test('emit error on EAGAIN (sync)', (t) => {
t.plan(11)
const fakeFs = Object.create(fs)
fakeFs.writeSync = function (fd, buf, enc) {
t.pass('fake fs.writeSync called')
fakeFs.writeSync = fs.writeSync
const err = new Error('EAGAIN')
err.code = 'EAGAIN'
throw err
}
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({
fd,
minLength: 0,
sync: true,
retryEAGAIN: (err, writeBufferLen, remainingBufferLen) => {
t.equal(err.code, 'EAGAIN')
t.equal(writeBufferLen, 12)
t.equal(remainingBufferLen, 0)
return false
}
})
stream.on('ready', () => {
t.pass('ready emitted')
})
stream.once('error', err => {
t.equal(err.code, 'EAGAIN')
t.ok(stream.write('something else\n'))
})
t.ok(stream.write('hello world\n'))
stream.end()
stream.on('finish', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
})
})
stream.on('close', () => {
t.pass('close emitted')
})
})
test('retryEAGAIN receives remaining buffer on async if write fails', (t) => {
t.plan(12)
const fakeFs = Object.create(fs)
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({
fd,
sync: false,
minLength: 12,
retryEAGAIN: (err, writeBufferLen, remainingBufferLen) => {
t.equal(err.code, 'EAGAIN')
t.equal(writeBufferLen, 12)
t.equal(remainingBufferLen, 11)
return false
}
})
stream.on('ready', () => {
t.pass('ready emitted')
})
stream.once('error', err => {
t.equal(err.code, 'EAGAIN')
t.ok(stream.write('done'))
})
fakeFs.write = function (fd, buf, ...args) {
t.pass('fake fs.write called')
fakeFs.write = fs.write
const err = new Error('EAGAIN')
err.code = 'EAGAIN'
t.ok(stream.write('sonic boom\n'))
process.nextTick(args[args.length - 1], err)
}
t.ok(stream.write('hello world\n'))
stream.end()
stream.on('finish', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsonic boom\ndone')
})
})
stream.on('close', () => {
t.pass('close emitted')
})
})
test('retryEAGAIN receives remaining buffer if exceeds maxWrite', (t) => {
t.plan(17)
const fakeFs = Object.create(fs)
const SonicBoom = proxyquire('../', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const buf = Buffer.alloc(MAX_WRITE - 2).fill('x').toString() // 1 MB
const stream = new SonicBoom({
fd,
sync: false,
minLength: MAX_WRITE - 1,
retryEAGAIN: (err, writeBufferLen, remainingBufferLen) => {
t.equal(err.code, 'EAGAIN', 'retryEAGAIN received EAGAIN error')
t.equal(writeBufferLen, buf.length, 'writeBufferLen === buf.length')
t.equal(remainingBufferLen, 23, 'remainingBufferLen === 23')
return false
}
})
stream.on('ready', () => {
t.pass('ready emitted')
})
fakeFs.write = function (fd, buf, ...args) {
t.pass('fake fs.write called')
const err = new Error('EAGAIN')
err.code = 'EAGAIN'
process.nextTick(args.pop(), err)
}
fakeFs.writeSync = function (fd, buf, enc) {
t.pass('fake fs.write called')
const err = new Error('EAGAIN')
err.code = 'EAGAIN'
throw err
}
t.ok(stream.write(buf), 'write buf')
t.notOk(stream.write('hello world\nsonic boom\n'), 'write hello world sonic boom')
stream.once('error', err => {
t.equal(err.code, 'EAGAIN', 'bubbled error should be EAGAIN')
try {
stream.flushSync()
} catch (err) {
t.equal(err.code, 'EAGAIN', 'thrown error should be EAGAIN')
fakeFs.write = fs.write
fakeFs.writeSync = fs.writeSync
stream.end()
}
})
stream.on('finish', () => {
t.pass('finish emitted')
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, `${buf}hello world\nsonic boom\n`, 'data on file should match written')
})
})
stream.on('close', () => {
t.pass('close emitted')
})
})
test('retry on EBUSY', (t) => {
t.plan(7)
const fakeFs = Object.create(fs)
fakeFs.write = function (fd, buf, ...args) {
t.pass('fake fs.write called')
fakeFs.write = fs.write
const err = new Error('EBUSY')
err.code = 'EBUSY'
process.nextTick(args.pop(), err)
}
const SonicBoom = proxyquire('..', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({ fd, sync: false, minLength: 0 })
stream.on('ready', () => {
t.pass('ready emitted')
})
t.ok(stream.write('hello world\n'))
t.ok(stream.write('something else\n'))
stream.end()
stream.on('finish', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
})
})
stream.on('close', () => {
t.pass('close emitted')
})
})
test('emit error on async EBUSY', (t) => {
t.plan(11)
const fakeFs = Object.create(fs)
fakeFs.write = function (fd, buf, ...args) {
t.pass('fake fs.write called')
fakeFs.write = fs.write
const err = new Error('EBUSY')
err.code = 'EBUSY'
process.nextTick(args.pop(), err)
}
const SonicBoom = proxyquire('..', {
fs: fakeFs
})
const dest = file()
const fd = fs.openSync(dest, 'w')
const stream = new SonicBoom({
fd,
sync: false,
minLength: 12,
retryEAGAIN: (err, writeBufferLen, remainingBufferLen) => {
t.equal(err.code, 'EBUSY')
t.equal(writeBufferLen, 12)
t.equal(remainingBufferLen, 0)
return false
}
})
stream.on('ready', () => {
t.pass('ready emitted')
})
stream.once('error', err => {
t.equal(err.code, 'EBUSY')
t.ok(stream.write('something else\n'))
})
t.ok(stream.write('hello world\n'))
stream.end()
stream.on('finish', () => {
fs.readFile(dest, 'utf8', (err, data) => {
t.error(err)
t.equal(data, 'hello world\nsomething else\n')
})
})
stream.on('close', () => {
t.pass('close emitted')
})
})

Some files were not shown because too many files have changed in this diff Show More