Projektstart
This commit is contained in:
2
backend/node_modules/pino/.eslintignore
generated
vendored
Normal file
2
backend/node_modules/pino/.eslintignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
pino.d.ts
|
||||
test/types/pino.test-d.ts
|
||||
8
backend/node_modules/pino/.eslintrc
generated
vendored
Normal file
8
backend/node_modules/pino/.eslintrc
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"extends": [
|
||||
"standard"
|
||||
],
|
||||
"rules": {
|
||||
"no-var": "off"
|
||||
}
|
||||
}
|
||||
13
backend/node_modules/pino/.github/dependabot.yml
generated
vendored
Normal file
13
backend/node_modules/pino/.github/dependabot.yml
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
61
backend/node_modules/pino/.github/workflows/bench.yml
generated
vendored
Normal file
61
backend/node_modules/pino/.github/workflows/bench.yml
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
name: Benchmarks
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
benchmark_current:
|
||||
name: benchmark current
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v5.0.0
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
persist-credentials: false
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: lts/*
|
||||
- name: Install Modules
|
||||
run: npm i --ignore-scripts
|
||||
- name: Run Benchmark
|
||||
run: npm run bench | tee current.txt
|
||||
- name: Upload Current Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: current
|
||||
path: current.txt
|
||||
|
||||
benchmark_branch:
|
||||
name: benchmark branch
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: lts/*
|
||||
- name: Install Modules
|
||||
run: npm i --ignore-scripts
|
||||
- name: Run Benchmark
|
||||
run: npm run bench | tee branch.txt
|
||||
- name: Upload Branch Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: branch
|
||||
path: branch.txt
|
||||
90
backend/node_modules/pino/.github/workflows/ci.yml
generated
vendored
Normal file
90
backend/node_modules/pino/.github/workflows/ci.yml
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- 'v*'
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
|
||||
# This allows a subsequently queued workflow run to interrupt previous runs
|
||||
concurrency:
|
||||
group: "${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
name: Dependency Review
|
||||
if: github.event_name == 'pull_request'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@v4
|
||||
|
||||
test:
|
||||
name: ${{ matrix.node-version }} ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [macOS-latest, windows-latest, ubuntu-latest]
|
||||
node-version: [18, '18.18', 20, 22]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
node-version: 22
|
||||
- os: windows-latest
|
||||
node-version: '18.18'
|
||||
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup Node ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm i --ignore-scripts
|
||||
|
||||
- name: Run tests
|
||||
run: npm run test-ci
|
||||
|
||||
- name: Run smoke test
|
||||
if: >
|
||||
matrix.os != 'windows-latest' &&
|
||||
matrix.node-version > 14
|
||||
run: npm run test:smoke
|
||||
|
||||
automerge:
|
||||
name: Automerge Dependabot PRs
|
||||
if: >
|
||||
github.event_name == 'pull_request' &&
|
||||
github.event.pull_request.user.login == 'dependabot[bot]'
|
||||
needs: test
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: fastify/github-action-merge-dependabot@v3
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
exclude: 'sonic-boom,pino-std-serializers,quick-format-unescaped,fast-redact'
|
||||
30
backend/node_modules/pino/.github/workflows/lock-threads.yml
generated
vendored
Normal file
30
backend/node_modules/pino/.github/workflows/lock-threads.yml
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: 'Lock Threads'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
concurrency:
|
||||
group: lock
|
||||
|
||||
jobs:
|
||||
action:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: jsumners/lock-threads@b27edac0ac998d42b2815e122b6c24b32b568321
|
||||
with:
|
||||
log-output: true
|
||||
issue-inactive-days: '30'
|
||||
issue-comment: >
|
||||
This issue has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new issue for related bugs.
|
||||
pr-comment: >
|
||||
This pull request has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new issue for related bugs.
|
||||
43
backend/node_modules/pino/.github/workflows/publish-release.yml
generated
vendored
Normal file
43
backend/node_modules/pino/.github/workflows/publish-release.yml
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
name: Publish release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: 'The version number to tag and release'
|
||||
required: true
|
||||
type: string
|
||||
prerelease:
|
||||
description: 'Release as pre-release'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
release-npm:
|
||||
runs-on: ubuntu-latest
|
||||
environment: main
|
||||
permissions:
|
||||
contents: write
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493 # v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '22'
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- run: npm install npm -g
|
||||
- run: npm install
|
||||
- name: Change version number and sync
|
||||
run: |
|
||||
node build/sync-version.js ${{ inputs.version }}
|
||||
- name: GIT commit and push all changed files
|
||||
run: |
|
||||
git config --global user.name "mcollina"
|
||||
git config --global user.email "hello@matteocollina.com"
|
||||
git commit -n -a -m "Bumped v${{ inputs.version }}"
|
||||
git push origin HEAD:${{ github.ref }}
|
||||
- run: npm publish --access public --tag ${{ inputs.prerelease == true && 'next' || 'latest' }}
|
||||
- name: 'Create release notes'
|
||||
run: |
|
||||
npx @matteo.collina/release-notes -a ${{ secrets.GITHUB_TOKEN }} -t v${{ inputs.version }} -r pino -o pinojs ${{ github.event.inputs.prerelease == 'true' && '-p' || '' }} -c ${{ github.ref }}
|
||||
23
backend/node_modules/pino/.github/workflows/target-main.yml
generated
vendored
Normal file
23
backend/node_modules/pino/.github/workflows/target-main.yml
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: PR Target Check
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened]
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
if: ${{ github.base_ref != "master" }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: '⚠️ This pull request does not target the master branch.'
|
||||
})
|
||||
0
backend/node_modules/pino/.nojekyll
generated
vendored
Normal file
0
backend/node_modules/pino/.nojekyll
generated
vendored
Normal file
1
backend/node_modules/pino/.prettierignore
generated
vendored
Normal file
1
backend/node_modules/pino/.prettierignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*
|
||||
8
backend/node_modules/pino/.taprc.yaml
generated
vendored
Normal file
8
backend/node_modules/pino/.taprc.yaml
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
coverage: true
|
||||
timeout: 480
|
||||
check-coverage: false
|
||||
|
||||
reporter: terse
|
||||
|
||||
files:
|
||||
- 'test/**/*.test.js'
|
||||
1
backend/node_modules/pino/CNAME
generated
vendored
Normal file
1
backend/node_modules/pino/CNAME
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
getpino.io
|
||||
30
backend/node_modules/pino/CONTRIBUTING.md
generated
vendored
Normal file
30
backend/node_modules/pino/CONTRIBUTING.md
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
# Pino is an OPEN Open Source Project
|
||||
|
||||
## What?
|
||||
|
||||
Individuals making significant and valuable contributions are given commit-access to the project to contribute as they see fit. This project is more like an open wiki than a standard guarded open source project.
|
||||
|
||||
## Rules
|
||||
|
||||
Before you start coding, please read [Contributing to projects with git](https://jrfom.com/posts/2017/03/08/a-primer-on-contributing-to-projects-with-git/).
|
||||
|
||||
Notice that as long as you don't have commit-access to the project, you have to fork the project and open PRs from the feature branches of the forked project.
|
||||
|
||||
There are a few basic ground-rules for contributors:
|
||||
|
||||
1. **No `--force` pushes** on `master` or modifying the Git history in any way after a PR has been merged.
|
||||
1. **Non-master branches** ought to be used for ongoing work.
|
||||
1. **Non-trivial changes** ought to be subject to an **internal pull-request** to solicit feedback from other contributors.
|
||||
1. All pull-requests for new features **must** target the `master` branch. PRs to fix bugs in LTS releases are also allowed.
|
||||
1. Contributors should attempt to adhere to the prevailing code-style.
|
||||
1. 100% code coverage
|
||||
|
||||
## Releases
|
||||
|
||||
Declaring formal releases remains the prerogative of the project maintainer.
|
||||
|
||||
## Changes to this arrangement
|
||||
|
||||
This is an experiment and feedback is welcome! This document may also be subject to pull-requests or changes by contributors where you believe you have something valuable to add or change.
|
||||
|
||||
-----------------------------------------
|
||||
21
backend/node_modules/pino/LICENSE
generated
vendored
Normal file
21
backend/node_modules/pino/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016-2025 Matteo Collina, David Mark Clements and the Pino contributors listed at <https://github.com/pinojs/pino#the-team> and in the README file.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
177
backend/node_modules/pino/README.md
generated
vendored
Normal file
177
backend/node_modules/pino/README.md
generated
vendored
Normal file
@@ -0,0 +1,177 @@
|
||||

|
||||
|
||||
# pino
|
||||
[](https://www.npmjs.com/package/pino)
|
||||
[](https://github.com/pinojs/pino/actions)
|
||||
[](https://standardjs.com/)
|
||||
|
||||
[Very low overhead](#low-overhead) JavaScript logger.
|
||||
|
||||
## Documentation
|
||||
|
||||
* [Benchmarks ⇗](/docs/benchmarks.md)
|
||||
* [API ⇗](/docs/api.md)
|
||||
* [Browser API ⇗](/docs/browser.md)
|
||||
* [Redaction ⇗](/docs/redaction.md)
|
||||
* [Child Loggers ⇗](/docs/child-loggers.md)
|
||||
* [Transports ⇗](/docs/transports.md)
|
||||
* [Diagnostics ⇗](/docs/diagnostics.md)
|
||||
* [Web Frameworks ⇗](/docs/web.md)
|
||||
* [Pretty Printing ⇗](/docs/pretty.md)
|
||||
* [Asynchronous Logging ⇗](/docs/asynchronous.md)
|
||||
* [Ecosystem ⇗](/docs/ecosystem.md)
|
||||
* [Help ⇗](/docs/help.md)
|
||||
* [Long Term Support Policy ⇗](/docs/lts.md)
|
||||
|
||||
## Runtimes
|
||||
|
||||
### Node.js
|
||||
|
||||
Pino is built to run on [Node.js](http://nodejs.org).
|
||||
|
||||
### Bare
|
||||
|
||||
Pino works on [Bare](https://github.com/holepunchto/bare) with the [`pino-bare`](https://github.com/pinojs/pino-bare) compatability module.
|
||||
|
||||
### Pear
|
||||
|
||||
Pino works on [Pear](https://docs.pears.com), which is built on [Bare](https://github.com/holepunchto/bare), with the [`pino-bare`](https://github.com/pinojs/pino-bare) compatibility module.
|
||||
|
||||
|
||||
## Install
|
||||
|
||||
Using NPM:
|
||||
```
|
||||
$ npm install pino
|
||||
```
|
||||
|
||||
Using YARN:
|
||||
```
|
||||
$ yarn add pino
|
||||
```
|
||||
|
||||
If you would like to install pino v6, refer to https://github.com/pinojs/pino/tree/v6.x.
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const logger = require('pino')()
|
||||
|
||||
logger.info('hello world')
|
||||
|
||||
const child = logger.child({ a: 'property' })
|
||||
child.info('hello child!')
|
||||
```
|
||||
|
||||
This produces:
|
||||
|
||||
```
|
||||
{"level":30,"time":1531171074631,"msg":"hello world","pid":657,"hostname":"Davids-MBP-3.fritz.box"}
|
||||
{"level":30,"time":1531171082399,"msg":"hello child!","pid":657,"hostname":"Davids-MBP-3.fritz.box","a":"property"}
|
||||
```
|
||||
|
||||
For using Pino with a web framework see:
|
||||
|
||||
* [Pino with Fastify](docs/web.md#fastify)
|
||||
* [Pino with Express](docs/web.md#express)
|
||||
* [Pino with Hapi](docs/web.md#hapi)
|
||||
* [Pino with Restify](docs/web.md#restify)
|
||||
* [Pino with Koa](docs/web.md#koa)
|
||||
* [Pino with Node core `http`](docs/web.md#http)
|
||||
* [Pino with Nest](docs/web.md#nest)
|
||||
* [Pino with Hono](docs/web.md#hono)
|
||||
|
||||
<a name="essentials"></a>
|
||||
## Essentials
|
||||
|
||||
### Development Formatting
|
||||
|
||||
The [`pino-pretty`](https://github.com/pinojs/pino-pretty) module can be used to
|
||||
format logs during development:
|
||||
|
||||

|
||||
|
||||
### Transports & Log Processing
|
||||
|
||||
Due to Node's single-threaded event-loop, it's highly recommended that sending,
|
||||
alert triggering, reformatting, and all forms of log processing
|
||||
are conducted in a separate process or thread.
|
||||
|
||||
In Pino terminology, we call all log processors "transports" and recommend that the
|
||||
transports be run in a worker thread using our `pino.transport` API.
|
||||
|
||||
For more details see our [Transports⇗](docs/transports.md) document.
|
||||
|
||||
### Low overhead
|
||||
|
||||
Using minimum resources for logging is very important. Log messages
|
||||
tend to get added over time and this can lead to a throttling effect
|
||||
on applications – such as reduced requests per second.
|
||||
|
||||
In many cases, Pino is over 5x faster than alternatives.
|
||||
|
||||
See the [Benchmarks](docs/benchmarks.md) document for comparisons.
|
||||
|
||||
### Bundling support
|
||||
|
||||
Pino supports being bundled using tools like webpack or esbuild.
|
||||
|
||||
See [Bundling](docs/bundling.md) document for more information.
|
||||
|
||||
<a name="team"></a>
|
||||
## The Team
|
||||
|
||||
### Matteo Collina
|
||||
|
||||
<https://github.com/mcollina>
|
||||
|
||||
<https://www.npmjs.com/~matteo.collina>
|
||||
|
||||
<https://twitter.com/matteocollina>
|
||||
|
||||
### David Mark Clements
|
||||
|
||||
<https://github.com/davidmarkclements>
|
||||
|
||||
<https://www.npmjs.com/~davidmarkclements>
|
||||
|
||||
<https://twitter.com/davidmarkclem>
|
||||
|
||||
### James Sumners
|
||||
|
||||
<https://github.com/jsumners>
|
||||
|
||||
<https://www.npmjs.com/~jsumners>
|
||||
|
||||
<https://twitter.com/jsumners79>
|
||||
|
||||
### Thomas Watson Steen
|
||||
|
||||
<https://github.com/watson>
|
||||
|
||||
<https://www.npmjs.com/~watson>
|
||||
|
||||
<https://twitter.com/wa7son>
|
||||
|
||||
## Contributing
|
||||
|
||||
Pino is an **OPEN Open Source Project**. This means that:
|
||||
|
||||
> Individuals making significant and valuable contributions are given commit-access to the project to contribute as they see fit. This project is more like an open wiki than a standard guarded open source project.
|
||||
|
||||
See the [CONTRIBUTING.md](https://github.com/pinojs/pino/blob/main/CONTRIBUTING.md) file for more details.
|
||||
|
||||
<a name="acknowledgments"></a>
|
||||
## Acknowledgments
|
||||
|
||||
This project was kindly sponsored by [nearForm](https://nearform.com).
|
||||
This project is kindly sponsored by [Platformatic](https://platformatic.dev).
|
||||
|
||||
Logo and identity designed by Cosmic Fox Design: https://www.behance.net/cosmicfox.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under [MIT](./LICENSE).
|
||||
|
||||
[elasticsearch]: https://www.elastic.co/products/elasticsearch
|
||||
[kibana]: https://www.elastic.co/products/kibana
|
||||
68
backend/node_modules/pino/SECURITY.md
generated
vendored
Normal file
68
backend/node_modules/pino/SECURITY.md
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
# Security Policy
|
||||
|
||||
This document describes the management of vulnerabilities for the
|
||||
Pino project and all modules within the Pino organization.
|
||||
|
||||
## Reporting vulnerabilities
|
||||
|
||||
Individuals who find potential vulnerabilities in Pino are invited
|
||||
to report them via email at matteo.collina@gmail.com.
|
||||
|
||||
### Strict measures when reporting vulnerabilities
|
||||
|
||||
Avoid creating new "informative" reports. Only create new
|
||||
report a potential vulnerability if you are absolutely sure this
|
||||
should be tagged as an actual vulnerability. Be careful on the maintainers time.
|
||||
|
||||
## Handling vulnerability reports
|
||||
|
||||
When a potential vulnerability is reported, the following actions are taken:
|
||||
|
||||
### Triage
|
||||
|
||||
**Delay:** 5 business days
|
||||
|
||||
Within 5 business days, a member of the security team provides a first answer to the
|
||||
individual who submitted the potential vulnerability. The possible responses
|
||||
can be:
|
||||
|
||||
* Acceptance: what was reported is considered as a new vulnerability
|
||||
* Rejection: what was reported is not considered as a new vulnerability
|
||||
* Need more information: the security team needs more information in order to evaluate what was reported.
|
||||
|
||||
Triaging should include updating issue fields:
|
||||
* Asset - set/create the module affected by the report
|
||||
* Severity - TBD, currently left empty
|
||||
|
||||
### Correction follow-up
|
||||
|
||||
**Delay:** 90 days
|
||||
|
||||
When a vulnerability is confirmed, a member of the security team volunteers to follow
|
||||
up on this report.
|
||||
|
||||
With the help of the individual who reported the vulnerability, they contact
|
||||
the maintainers of the vulnerable package to make them aware of the
|
||||
vulnerability. The maintainers can be invited as participants to the reported issue.
|
||||
|
||||
With the package maintainer, they define a release date for the publication
|
||||
of the vulnerability. Ideally, this release date should not happen before
|
||||
the package has been patched.
|
||||
|
||||
The report's vulnerable versions upper limit should be set to:
|
||||
* `*` if there is no fixed version available by the time of publishing the report.
|
||||
* the last vulnerable version. For example: `<=1.2.3` if a fix exists in `1.2.4`
|
||||
|
||||
### Publication
|
||||
|
||||
**Delay:** 90 days
|
||||
|
||||
Within 90 days after the triage date, the vulnerability must be made public.
|
||||
|
||||
**Severity**: Vulnerability severity is assessed using [CVSS v.3](https://www.first.org/cvss/user-guide).
|
||||
|
||||
If the package maintainer is actively developing a patch, an additional delay
|
||||
can be added with the approval of the security team and the individual who
|
||||
reported the vulnerability.
|
||||
|
||||
At this point, a CVE will be requested by the team.
|
||||
95
backend/node_modules/pino/benchmarks/basic.bench.js
generated
vendored
Normal file
95
backend/node_modules/pino/benchmarks/basic.bench.js
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../')
|
||||
const bunyan = require('bunyan')
|
||||
const bole = require('bole')('bench')
|
||||
const winston = require('winston')
|
||||
const fs = require('node:fs')
|
||||
const dest = fs.createWriteStream('/dev/null')
|
||||
const loglevel = require('./utils/wrap-log-level')(dest)
|
||||
const plogNodeStream = pino(dest)
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', minLength: 4096 }))
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogDest = require('../')(pino.destination('/dev/null'))
|
||||
|
||||
process.env.DEBUG = 'dlog'
|
||||
const debug = require('debug')
|
||||
const dlog = debug('dlog')
|
||||
dlog.log = function (s) { dest.write(s) }
|
||||
|
||||
const max = 10
|
||||
const blog = bunyan.createLogger({
|
||||
name: 'myapp',
|
||||
streams: [{
|
||||
level: 'trace',
|
||||
stream: dest
|
||||
}]
|
||||
})
|
||||
|
||||
require('bole').output({
|
||||
level: 'info',
|
||||
stream: dest
|
||||
}).setFastTime(true)
|
||||
|
||||
const chill = winston.createLogger({
|
||||
transports: [
|
||||
new winston.transports.Stream({
|
||||
stream: fs.createWriteStream('/dev/null')
|
||||
})
|
||||
]
|
||||
})
|
||||
|
||||
const run = bench([
|
||||
function benchBunyan (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
blog.info('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchWinston (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
chill.log('info', 'hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBole (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
bole.info('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchDebug (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
dlog('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchLogLevel (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
loglevel.info('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPino (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMinLength (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogMinLength.info('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoNodeStream (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogNodeStream.info('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run(run)
|
||||
52
backend/node_modules/pino/benchmarks/child-child.bench.js
generated
vendored
Normal file
52
backend/node_modules/pino/benchmarks/child-child.bench.js
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../')
|
||||
const bunyan = require('bunyan')
|
||||
const fs = require('node:fs')
|
||||
const dest = fs.createWriteStream('/dev/null')
|
||||
const plogNodeStream = pino(dest).child({ a: 'property' }).child({ sub: 'child' })
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogDest = require('../')(pino.destination('/dev/null')).child({ a: 'property' }).child({ sub: 'child' })
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
|
||||
.child({ a: 'property' })
|
||||
.child({ sub: 'child' })
|
||||
|
||||
const max = 10
|
||||
const blog = bunyan.createLogger({
|
||||
name: 'myapp',
|
||||
streams: [{
|
||||
level: 'trace',
|
||||
stream: dest
|
||||
}]
|
||||
}).child({ a: 'property' }).child({ sub: 'child' })
|
||||
|
||||
const run = bench([
|
||||
function benchBunyanChildChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
blog.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChildChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMinLengthChildChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogMinLength.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoNodeStreamChildChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogNodeStream.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run(run)
|
||||
73
backend/node_modules/pino/benchmarks/child-creation.bench.js
generated
vendored
Normal file
73
backend/node_modules/pino/benchmarks/child-creation.bench.js
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../')
|
||||
const bunyan = require('bunyan')
|
||||
const bole = require('bole')('bench')
|
||||
const fs = require('node:fs')
|
||||
const dest = fs.createWriteStream('/dev/null')
|
||||
const plogNodeStream = pino(dest)
|
||||
const plogDest = pino(pino.destination(('/dev/null')))
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
|
||||
|
||||
const max = 10
|
||||
const blog = bunyan.createLogger({
|
||||
name: 'myapp',
|
||||
streams: [{
|
||||
level: 'trace',
|
||||
stream: dest
|
||||
}]
|
||||
})
|
||||
|
||||
require('bole').output({
|
||||
level: 'info',
|
||||
stream: dest
|
||||
}).setFastTime(true)
|
||||
|
||||
const run = bench([
|
||||
function benchBunyanCreation (cb) {
|
||||
const child = blog.child({ a: 'property' })
|
||||
for (var i = 0; i < max; i++) {
|
||||
child.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBoleCreation (cb) {
|
||||
const child = bole('child')
|
||||
for (var i = 0; i < max; i++) {
|
||||
child.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoCreation (cb) {
|
||||
const child = plogDest.child({ a: 'property' })
|
||||
for (var i = 0; i < max; i++) {
|
||||
child.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMinLengthCreation (cb) {
|
||||
const child = plogMinLength.child({ a: 'property' })
|
||||
for (var i = 0; i < max; i++) {
|
||||
child.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoNodeStreamCreation (cb) {
|
||||
const child = plogNodeStream.child({ a: 'property' })
|
||||
for (var i = 0; i < max; i++) {
|
||||
child.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoCreationWithOption (cb) {
|
||||
const child = plogDest.child({ a: 'property' }, { redact: [] })
|
||||
for (var i = 0; i < max; i++) {
|
||||
child.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run(run)
|
||||
62
backend/node_modules/pino/benchmarks/child.bench.js
generated
vendored
Normal file
62
backend/node_modules/pino/benchmarks/child.bench.js
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../')
|
||||
const bunyan = require('bunyan')
|
||||
const bole = require('bole')('bench')('child')
|
||||
const fs = require('node:fs')
|
||||
const dest = fs.createWriteStream('/dev/null')
|
||||
const plogNodeStream = pino(dest).child({ a: 'property' })
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogDest = require('../')(pino.destination('/dev/null')).child({ a: 'property' })
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
|
||||
|
||||
const max = 10
|
||||
const blog = bunyan.createLogger({
|
||||
name: 'myapp',
|
||||
streams: [{
|
||||
level: 'trace',
|
||||
stream: dest
|
||||
}]
|
||||
}).child({ a: 'property' })
|
||||
|
||||
require('bole').output({
|
||||
level: 'info',
|
||||
stream: dest
|
||||
}).setFastTime(true)
|
||||
|
||||
const run = bench([
|
||||
function benchBunyanChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
blog.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBoleChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
bole.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMinLengthChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogMinLength.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoNodeStreamChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogNodeStream.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run(run)
|
||||
88
backend/node_modules/pino/benchmarks/deep-object.bench.js
generated
vendored
Normal file
88
backend/node_modules/pino/benchmarks/deep-object.bench.js
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../')
|
||||
const bunyan = require('bunyan')
|
||||
const bole = require('bole')('bench')
|
||||
const winston = require('winston')
|
||||
const fs = require('node:fs')
|
||||
const dest = fs.createWriteStream('/dev/null')
|
||||
const plogNodeStream = pino(dest)
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogDest = require('../')(pino.destination('/dev/null'))
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
|
||||
delete require.cache[require.resolve('../')]
|
||||
|
||||
const loglevel = require('./utils/wrap-log-level')(dest)
|
||||
|
||||
const deep = Object.assign({}, require('../package.json'), { level: 'info' })
|
||||
|
||||
const max = 10
|
||||
const blog = bunyan.createLogger({
|
||||
name: 'myapp',
|
||||
streams: [{
|
||||
level: 'trace',
|
||||
stream: dest
|
||||
}]
|
||||
})
|
||||
|
||||
require('bole').output({
|
||||
level: 'info',
|
||||
stream: dest
|
||||
}).setFastTime(true)
|
||||
|
||||
const chill = winston.createLogger({
|
||||
transports: [
|
||||
new winston.transports.Stream({
|
||||
stream: fs.createWriteStream('/dev/null')
|
||||
})
|
||||
]
|
||||
})
|
||||
|
||||
const run = bench([
|
||||
function benchBunyanDeepObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
blog.info(deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchWinstonDeepObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
chill.log(deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBoleDeepObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
bole.info(deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchLogLevelDeepObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
loglevel.info(deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDeepObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info(deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMinLengthDeepObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogMinLength.info(deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoNodeStreamDeepObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogNodeStream.info(deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run(run)
|
||||
50
backend/node_modules/pino/benchmarks/formatters.bench.js
generated
vendored
Normal file
50
backend/node_modules/pino/benchmarks/formatters.bench.js
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
'use strict'
|
||||
|
||||
const formatters = {
|
||||
level (label, number) {
|
||||
return {
|
||||
log: {
|
||||
level: label
|
||||
}
|
||||
}
|
||||
},
|
||||
bindings (bindings) {
|
||||
return {
|
||||
process: {
|
||||
pid: bindings.pid
|
||||
},
|
||||
host: {
|
||||
name: bindings.hostname
|
||||
}
|
||||
}
|
||||
},
|
||||
log (obj) {
|
||||
return { foo: 'bar', ...obj }
|
||||
}
|
||||
}
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../')
|
||||
delete require.cache[require.resolve('../')]
|
||||
const pinoNoFormatters = require('../')(pino.destination('/dev/null'))
|
||||
delete require.cache[require.resolve('../')]
|
||||
const pinoFormatters = require('../')({ formatters }, pino.destination('/dev/null'))
|
||||
|
||||
const max = 10
|
||||
|
||||
const run = bench([
|
||||
function benchPinoNoFormatters (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
pinoNoFormatters.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoFormatters (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
pinoFormatters.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run(run)
|
||||
67
backend/node_modules/pino/benchmarks/internal/custom-levels.js
generated
vendored
Normal file
67
backend/node_modules/pino/benchmarks/internal/custom-levels.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../../')
|
||||
|
||||
const base = pino(pino.destination('/dev/null'))
|
||||
const baseCl = pino({
|
||||
customLevels: { foo: 31 }
|
||||
}, pino.destination('/dev/null'))
|
||||
const child = base.child({})
|
||||
const childCl = base.child({
|
||||
customLevels: { foo: 31 }
|
||||
})
|
||||
const childOfBaseCl = baseCl.child({})
|
||||
|
||||
const max = 100
|
||||
|
||||
const run = bench([
|
||||
function benchPinoNoCustomLevel (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
base.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoCustomLevel (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
baseCl.foo({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchChildNoCustomLevel (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
child.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChildCustomLevel (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
childCl.foo({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChildInheritedCustomLevel (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
childOfBaseCl.foo({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChildCreation (cb) {
|
||||
const child = base.child({})
|
||||
for (var i = 0; i < max; i++) {
|
||||
child.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChildCreationCustomLevel (cb) {
|
||||
const child = base.child({
|
||||
customLevels: { foo: 31 }
|
||||
})
|
||||
for (var i = 0; i < max; i++) {
|
||||
child.foo({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run(run)
|
||||
76
backend/node_modules/pino/benchmarks/internal/just-pino-heavy.bench.js
generated
vendored
Normal file
76
backend/node_modules/pino/benchmarks/internal/just-pino-heavy.bench.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../../')
|
||||
const fs = require('node:fs')
|
||||
const dest = fs.createWriteStream('/dev/null')
|
||||
const plog = pino(dest)
|
||||
delete require.cache[require.resolve('../../')]
|
||||
const plogDest = require('../../')(pino.destination('/dev/null'))
|
||||
delete require.cache[require.resolve('../../')]
|
||||
const plogAsync = require('../../')(pino.destination({ dest: '/dev/null', sync: false }))
|
||||
const deep = require('../../package.json')
|
||||
deep.deep = JSON.parse(JSON.stringify(deep))
|
||||
deep.deep.deep = JSON.parse(JSON.stringify(deep))
|
||||
const longStr = JSON.stringify(deep)
|
||||
|
||||
const max = 10
|
||||
|
||||
const run = bench([
|
||||
function benchPinoLongString (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plog.info(longStr)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDestLongString (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info(longStr)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoAsyncLongString (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogAsync.info(longStr)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDeepObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plog.info(deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDestDeepObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info(deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoAsyncDeepObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogAsync.info(deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoInterpolateDeep (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plog.info('hello %j', deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDestInterpolateDeep (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info('hello %j', deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoAsyncInterpolateDeep (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogAsync.info('hello %j', deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 1000)
|
||||
|
||||
run(run)
|
||||
182
backend/node_modules/pino/benchmarks/internal/just-pino.bench.js
generated
vendored
Normal file
182
backend/node_modules/pino/benchmarks/internal/just-pino.bench.js
generated
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../../')
|
||||
const fs = require('node:fs')
|
||||
const dest = fs.createWriteStream('/dev/null')
|
||||
const plog = pino(dest)
|
||||
delete require.cache[require.resolve('../../')]
|
||||
const plogDest = require('../../')(pino.destination('/dev/null'))
|
||||
delete require.cache[require.resolve('../../')]
|
||||
const plogAsync = require('../../')(pino.destination({ dest: '/dev/null', sync: false }))
|
||||
const plogChild = plog.child({ a: 'property' })
|
||||
const plogDestChild = plogDest.child({ a: 'property' })
|
||||
const plogAsyncChild = plogAsync.child({ a: 'property' })
|
||||
const plogChildChild = plog.child({ a: 'property' }).child({ sub: 'child' })
|
||||
const plogDestChildChild = plogDest.child({ a: 'property' }).child({ sub: 'child' })
|
||||
const plogAsyncChildChild = plogAsync.child({ a: 'property' }).child({ sub: 'child' })
|
||||
|
||||
const max = 10
|
||||
|
||||
const run = bench([
|
||||
function benchPino (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plog.info('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDest (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoExtreme (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogAsync.info('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plog.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDestObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoAsyncObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogAsync.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogChild.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDestChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDestChild.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoAsyncChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogAsyncChild.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChildChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogChildChild.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDestChildChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDestChildChild.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoAsyncChildChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogAsyncChildChild.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChildCreation (cb) {
|
||||
const child = plog.child({ a: 'property' })
|
||||
for (var i = 0; i < max; i++) {
|
||||
child.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDestChildCreation (cb) {
|
||||
const child = plogDest.child({ a: 'property' })
|
||||
for (var i = 0; i < max; i++) {
|
||||
child.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMulti (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plog.info('hello', 'world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDestMulti (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info('hello', 'world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoAsyncMulti (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogAsync.info('hello', 'world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoInterpolate (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plog.info('hello %s', 'world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDestInterpolate (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info('hello %s', 'world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDestInterpolate (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info('hello %s', 'world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoInterpolateAll (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plog.info('hello %s %j %d', 'world', { obj: true }, 4)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDestInterpolateAll (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info('hello %s %j %d', 'world', { obj: true }, 4)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoAsyncInterpolateAll (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogAsync.info('hello %s %j %d', 'world', { obj: true }, 4)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoInterpolateExtra (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plog.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoDestInterpolateExtra (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoAsyncInterpolateExtra (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogAsync.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run(run)
|
||||
75
backend/node_modules/pino/benchmarks/internal/parent-vs-child.bench.js
generated
vendored
Normal file
75
backend/node_modules/pino/benchmarks/internal/parent-vs-child.bench.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../../')
|
||||
|
||||
const base = pino(pino.destination('/dev/null'))
|
||||
const child = base.child({})
|
||||
const childChild = child.child({})
|
||||
const childChildChild = childChild.child({})
|
||||
const childChildChildChild = childChildChild.child({})
|
||||
const child2 = base.child({})
|
||||
const baseSerializers = pino(pino.destination('/dev/null'))
|
||||
const baseSerializersChild = baseSerializers.child({})
|
||||
const baseSerializersChildSerializers = baseSerializers.child({})
|
||||
|
||||
const max = 100
|
||||
|
||||
const run = bench([
|
||||
function benchPinoBase (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
base.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
child.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChildChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
childChild.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChildChildChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
childChildChild.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChildChildChildChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
childChildChildChild.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoChild2 (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
child2.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoBaseSerializers (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
baseSerializers.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoBaseSerializersChild (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
baseSerializersChild.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoBaseSerializersChildSerializers (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
baseSerializersChildSerializers.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run(run)
|
||||
86
backend/node_modules/pino/benchmarks/internal/redact.bench.js
generated
vendored
Normal file
86
backend/node_modules/pino/benchmarks/internal/redact.bench.js
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../../')
|
||||
const fs = require('node:fs')
|
||||
const dest = fs.createWriteStream('/dev/null')
|
||||
const plog = pino(dest)
|
||||
delete require.cache[require.resolve('../../')]
|
||||
const plogAsync = require('../../')(pino.destination({ dest: '/dev/null', sync: false }))
|
||||
delete require.cache[require.resolve('../../')]
|
||||
const plogUnsafe = require('../../')({ safe: false }, dest)
|
||||
delete require.cache[require.resolve('../../')]
|
||||
const plogUnsafeAsync = require('../../')(
|
||||
{ safe: false },
|
||||
pino.destination({ dest: '/dev/null', sync: false })
|
||||
)
|
||||
const plogRedact = pino({ redact: ['a.b.c'] }, dest)
|
||||
delete require.cache[require.resolve('../../')]
|
||||
const plogAsyncRedact = require('../../')(
|
||||
{ redact: ['a.b.c'] },
|
||||
pino.destination({ dest: '/dev/null', sync: false })
|
||||
)
|
||||
delete require.cache[require.resolve('../../')]
|
||||
const plogUnsafeRedact = require('../../')({ redact: ['a.b.c'], safe: false }, dest)
|
||||
delete require.cache[require.resolve('../../')]
|
||||
const plogUnsafeAsyncRedact = require('../../')(
|
||||
{ redact: ['a.b.c'], safe: false },
|
||||
pino.destination({ dest: '/dev/null', sync: false })
|
||||
)
|
||||
|
||||
const max = 10
|
||||
|
||||
// note that "redact me." is the same amount of bytes as the censor: "[Redacted]"
|
||||
|
||||
const run = bench([
|
||||
function benchPinoNoRedact (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plog.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoRedact (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoUnsafeNoRedact (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogUnsafe.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoUnsafeRedact (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogUnsafeRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoAsyncNoRedact (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogAsync.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoAsyncRedact (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogAsyncRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoUnsafeAsyncNoRedact (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogUnsafeAsync.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoUnsafeAsyncRedact (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogUnsafeAsyncRedact.info({ a: { b: { c: 'redact me.', d: 'leave me' } } })
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run(run)
|
||||
81
backend/node_modules/pino/benchmarks/long-string.bench.js
generated
vendored
Normal file
81
backend/node_modules/pino/benchmarks/long-string.bench.js
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../')
|
||||
const bunyan = require('bunyan')
|
||||
const bole = require('bole')('bench')
|
||||
const winston = require('winston')
|
||||
const fs = require('node:fs')
|
||||
const dest = fs.createWriteStream('/dev/null')
|
||||
const plogNodeStream = pino(dest)
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogDest = require('../')(pino.destination('/dev/null'))
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
|
||||
|
||||
const crypto = require('crypto')
|
||||
|
||||
const longStr = crypto.randomBytes(2000).toString()
|
||||
|
||||
const max = 10
|
||||
const blog = bunyan.createLogger({
|
||||
name: 'myapp',
|
||||
streams: [{
|
||||
level: 'trace',
|
||||
stream: dest
|
||||
}]
|
||||
})
|
||||
|
||||
require('bole').output({
|
||||
level: 'info',
|
||||
stream: dest
|
||||
}).setFastTime(true)
|
||||
|
||||
const chill = winston.createLogger({
|
||||
transports: [
|
||||
new winston.transports.Stream({
|
||||
stream: fs.createWriteStream('/dev/null')
|
||||
})
|
||||
]
|
||||
})
|
||||
|
||||
const run = bench([
|
||||
function benchBunyan (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
blog.info(longStr)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchWinston (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
chill.info(longStr)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBole (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
bole.info(longStr)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPino (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info(longStr)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMinLength (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogMinLength.info(longStr)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoNodeStream (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogNodeStream.info(longStr)
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 1000)
|
||||
|
||||
run(run)
|
||||
193
backend/node_modules/pino/benchmarks/multi-arg.bench.js
generated
vendored
Normal file
193
backend/node_modules/pino/benchmarks/multi-arg.bench.js
generated
vendored
Normal file
@@ -0,0 +1,193 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../')
|
||||
const bunyan = require('bunyan')
|
||||
const bole = require('bole')('bench')
|
||||
const winston = require('winston')
|
||||
const fs = require('node:fs')
|
||||
const dest = fs.createWriteStream('/dev/null')
|
||||
const plogNodeStream = pino(dest)
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogDest = require('../')(pino.destination('/dev/null'))
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
|
||||
delete require.cache[require.resolve('../')]
|
||||
|
||||
const deep = require('../package.json')
|
||||
deep.deep = Object.assign({}, JSON.parse(JSON.stringify(deep)))
|
||||
deep.deep.deep = Object.assign({}, JSON.parse(JSON.stringify(deep)))
|
||||
deep.deep.deep.deep = Object.assign({}, JSON.parse(JSON.stringify(deep)))
|
||||
|
||||
const blog = bunyan.createLogger({
|
||||
name: 'myapp',
|
||||
streams: [{
|
||||
level: 'trace',
|
||||
stream: dest
|
||||
}]
|
||||
})
|
||||
|
||||
require('bole').output({
|
||||
level: 'info',
|
||||
stream: dest
|
||||
}).setFastTime(true)
|
||||
|
||||
const chill = winston.createLogger({
|
||||
transports: [
|
||||
new winston.transports.Stream({
|
||||
stream: fs.createWriteStream('/dev/null')
|
||||
})
|
||||
]
|
||||
})
|
||||
|
||||
const max = 10
|
||||
|
||||
const run = bench([
|
||||
function benchBunyanInterpolate (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
blog.info('hello %s', 'world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchWinstonInterpolate (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
chill.log('info', 'hello %s', 'world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBoleInterpolate (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
bole.info('hello %s', 'world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoInterpolate (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info('hello %s', 'world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMinLengthInterpolate (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogMinLength.info('hello %s', 'world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoNodeStreamInterpolate (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogNodeStream.info('hello %s', 'world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBunyanInterpolateAll (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
blog.info('hello %s %j %d', 'world', { obj: true }, 4)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
|
||||
function benchWinstonInterpolateAll (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
chill.log('info', 'hello %s %j %d', 'world', { obj: true }, 4)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBoleInterpolateAll (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
bole.info('hello %s %j %d', 'world', { obj: true }, 4)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoInterpolateAll (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info('hello %s %j %d', 'world', { obj: true }, 4)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMinLengthInterpolateAll (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogMinLength.info('hello %s %j %d', 'world', { obj: true }, 4)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoNodeStreamInterpolateAll (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogNodeStream.info('hello %s %j %d', 'world', { obj: true }, 4)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBunyanInterpolateExtra (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
blog.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchWinstonInterpolateExtra (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
chill.log('info', 'hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBoleInterpolateExtra (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
bole.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoInterpolateExtra (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMinLengthInterpolateExtra (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogMinLength.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoNodeStreamInterpolateExtra (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogNodeStream.info('hello %s %j %d', 'world', { obj: true }, 4, { another: 'obj' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBunyanInterpolateDeep (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
blog.info('hello %j', deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchWinstonInterpolateDeep (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
chill.log('info', 'hello %j', deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBoleInterpolateDeep (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
bole.info('hello %j', deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoInterpolateDeep (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info('hello %j', deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMinLengthInterpolateDeep (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogMinLength.info('hello %j', deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoNodeStreamInterpolateDeep (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogNodeStream.info('hello %j', deep)
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run(run)
|
||||
98
backend/node_modules/pino/benchmarks/multistream.js
generated
vendored
Normal file
98
backend/node_modules/pino/benchmarks/multistream.js
generated
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const bunyan = require('bunyan')
|
||||
const pino = require('../')
|
||||
const fs = require('node:fs')
|
||||
const dest = fs.createWriteStream('/dev/null')
|
||||
|
||||
const tenStreams = [
|
||||
{ stream: dest },
|
||||
{ stream: dest },
|
||||
{ stream: dest },
|
||||
{ stream: dest },
|
||||
{ stream: dest },
|
||||
{ level: 'debug', stream: dest },
|
||||
{ level: 'debug', stream: dest },
|
||||
{ level: 'trace', stream: dest },
|
||||
{ level: 'warn', stream: dest },
|
||||
{ level: 'fatal', stream: dest }
|
||||
]
|
||||
const pinomsTen = pino({ level: 'debug' }, pino.multistream(tenStreams))
|
||||
|
||||
const fourStreams = [
|
||||
{ stream: dest },
|
||||
{ stream: dest },
|
||||
{ level: 'debug', stream: dest },
|
||||
{ level: 'trace', stream: dest }
|
||||
]
|
||||
const pinomsFour = pino({ level: 'debug' }, pino.multistream(fourStreams))
|
||||
|
||||
const pinomsOne = pino({ level: 'info' }, pino.multistream(dest))
|
||||
const blogOne = bunyan.createLogger({
|
||||
name: 'myapp',
|
||||
streams: [{ stream: dest }]
|
||||
})
|
||||
|
||||
const blogTen = bunyan.createLogger({
|
||||
name: 'myapp',
|
||||
streams: tenStreams
|
||||
})
|
||||
const blogFour = bunyan.createLogger({
|
||||
name: 'myapp',
|
||||
streams: fourStreams
|
||||
})
|
||||
|
||||
const max = 10
|
||||
const run = bench([
|
||||
function benchBunyanTen (cb) {
|
||||
for (let i = 0; i < max; i++) {
|
||||
blogTen.info('hello world')
|
||||
blogTen.debug('hello world')
|
||||
blogTen.trace('hello world')
|
||||
blogTen.warn('hello world')
|
||||
blogTen.fatal('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMSTen (cb) {
|
||||
for (let i = 0; i < max; i++) {
|
||||
pinomsTen.info('hello world')
|
||||
pinomsTen.debug('hello world')
|
||||
pinomsTen.trace('hello world')
|
||||
pinomsTen.warn('hello world')
|
||||
pinomsTen.fatal('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBunyanFour (cb) {
|
||||
for (let i = 0; i < max; i++) {
|
||||
blogFour.info('hello world')
|
||||
blogFour.debug('hello world')
|
||||
blogFour.trace('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMSFour (cb) {
|
||||
for (let i = 0; i < max; i++) {
|
||||
pinomsFour.info('hello world')
|
||||
pinomsFour.debug('hello world')
|
||||
pinomsFour.trace('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBunyanOne (cb) {
|
||||
for (let i = 0; i < max; i++) {
|
||||
blogOne.info('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMSOne (cb) {
|
||||
for (let i = 0; i < max; i++) {
|
||||
pinomsOne.info('hello world')
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run()
|
||||
82
backend/node_modules/pino/benchmarks/object.bench.js
generated
vendored
Normal file
82
backend/node_modules/pino/benchmarks/object.bench.js
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
'use strict'
|
||||
|
||||
const bench = require('fastbench')
|
||||
const pino = require('../')
|
||||
const bunyan = require('bunyan')
|
||||
const bole = require('bole')('bench')
|
||||
const winston = require('winston')
|
||||
const fs = require('node:fs')
|
||||
const dest = fs.createWriteStream('/dev/null')
|
||||
const loglevel = require('./utils/wrap-log-level')(dest)
|
||||
const plogNodeStream = pino(dest)
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogDest = require('../')(pino.destination('/dev/null'))
|
||||
delete require.cache[require.resolve('../')]
|
||||
const plogMinLength = require('../')(pino.destination({ dest: '/dev/null', sync: false, minLength: 4096 }))
|
||||
const blog = bunyan.createLogger({
|
||||
name: 'myapp',
|
||||
streams: [{
|
||||
level: 'trace',
|
||||
stream: dest
|
||||
}]
|
||||
})
|
||||
require('bole').output({
|
||||
level: 'info',
|
||||
stream: dest
|
||||
}).setFastTime(true)
|
||||
const chill = winston.createLogger({
|
||||
transports: [
|
||||
new winston.transports.Stream({
|
||||
stream: fs.createWriteStream('/dev/null')
|
||||
})
|
||||
]
|
||||
})
|
||||
|
||||
const max = 10
|
||||
|
||||
const run = bench([
|
||||
function benchBunyanObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
blog.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchWinstonObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
chill.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchBoleObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
bole.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchLogLevelObject (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
loglevel.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogDest.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoMinLengthObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogMinLength.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
},
|
||||
function benchPinoNodeStreamObj (cb) {
|
||||
for (var i = 0; i < max; i++) {
|
||||
plogNodeStream.info({ hello: 'world' })
|
||||
}
|
||||
setImmediate(cb)
|
||||
}
|
||||
], 10000)
|
||||
|
||||
run(run)
|
||||
36
backend/node_modules/pino/benchmarks/utils/generate-benchmark-doc.js
generated
vendored
Normal file
36
backend/node_modules/pino/benchmarks/utils/generate-benchmark-doc.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
'use strict'
|
||||
const { join } = require('node:path')
|
||||
const { execSync } = require('node:child_process')
|
||||
|
||||
const run = (type) => {
|
||||
process.stderr.write(`benchmarking ${type}\n`)
|
||||
return execSync(`node ${join(__dirname, 'runbench')} ${type} -q`)
|
||||
}
|
||||
|
||||
console.log(`
|
||||
# Benchmarks
|
||||
|
||||
\`pino.info('hello world')\`:
|
||||
|
||||
\`\`\`
|
||||
${run('basic')}
|
||||
\`\`\`
|
||||
|
||||
\`pino.info({'hello': 'world'})\`:
|
||||
|
||||
\`\`\`
|
||||
${run('object')}
|
||||
\`\`\`
|
||||
|
||||
\`pino.info(aBigDeeplyNestedObject)\`:
|
||||
|
||||
\`\`\`
|
||||
${run('deep-object')}
|
||||
\`\`\`
|
||||
|
||||
\`pino.info('hello %s %j %d', 'world', {obj: true}, 4, {another: 'obj'})\`:
|
||||
|
||||
For a fair comparison, [LogLevel](http://npm.im/loglevel) was extended
|
||||
to include a timestamp and [bole](http://npm.im/bole) had
|
||||
\`fastTime\` mode switched on.
|
||||
`)
|
||||
138
backend/node_modules/pino/benchmarks/utils/runbench.js
generated
vendored
Normal file
138
backend/node_modules/pino/benchmarks/utils/runbench.js
generated
vendored
Normal file
@@ -0,0 +1,138 @@
|
||||
'use strict'
|
||||
|
||||
const { type, platform, arch, release, cpus } = require('node:os')
|
||||
const { resolve, join } = require('node:path')
|
||||
const spawn = require('node:child_process').spawn
|
||||
const pump = require('pump')
|
||||
const split = require('split2')
|
||||
const through = require('through2')
|
||||
const steed = require('steed')
|
||||
|
||||
function usage () {
|
||||
console.log(`
|
||||
Pino Benchmarks
|
||||
|
||||
To run a benchmark, specify which to run:
|
||||
|
||||
・all ⁃ run all benchmarks (takes a while)
|
||||
・basic ⁃ log a simple string
|
||||
・object ⁃ logging a basic object
|
||||
・deep-object ⁃ logging a large object
|
||||
・multi-arg ⁃ multiple log method arguments
|
||||
・child ⁃ child from a parent
|
||||
・child-child ⁃ child from a child
|
||||
・child-creation ⁃ child constructor
|
||||
・formatters ⁃ difference between with or without formatters
|
||||
|
||||
Example:
|
||||
|
||||
node runbench basic
|
||||
`)
|
||||
}
|
||||
|
||||
if (!process.argv[2]) {
|
||||
usage()
|
||||
process.exit()
|
||||
}
|
||||
|
||||
const quiet = process.argv[3] === '-q'
|
||||
|
||||
const selectedBenchmark = process.argv[2].toLowerCase()
|
||||
const benchmarkDir = resolve(__dirname, '..')
|
||||
const benchmarks = {
|
||||
basic: 'basic.bench.js',
|
||||
object: 'object.bench.js',
|
||||
'deep-object': 'deep-object.bench.js',
|
||||
'multi-arg': 'multi-arg.bench.js',
|
||||
'long-string': 'long-string.bench.js',
|
||||
child: 'child.bench.js',
|
||||
'child-child': 'child-child.bench.js',
|
||||
'child-creation': 'child-creation.bench.js',
|
||||
formatters: 'formatters.bench.js'
|
||||
}
|
||||
|
||||
function runBenchmark (name, done) {
|
||||
const benchmarkResults = {}
|
||||
benchmarkResults[name] = {}
|
||||
|
||||
const processor = through(function (line, enc, cb) {
|
||||
const [label, time] = ('' + line).split(': ')
|
||||
const [target, iterations] = label.split('*')
|
||||
const logger = target.replace('bench', '')
|
||||
|
||||
if (!benchmarkResults[name][logger]) benchmarkResults[name][logger] = []
|
||||
|
||||
benchmarkResults[name][logger].push({
|
||||
time: time.replace('ms', ''),
|
||||
iterations: iterations.replace(':', '')
|
||||
})
|
||||
|
||||
cb()
|
||||
})
|
||||
|
||||
if (quiet === false) console.log(`Running ${name.toUpperCase()} benchmark\n`)
|
||||
|
||||
const benchmark = spawn(
|
||||
process.argv[0],
|
||||
[join(benchmarkDir, benchmarks[name])]
|
||||
)
|
||||
|
||||
if (quiet === false) {
|
||||
benchmark.stdout.pipe(process.stdout)
|
||||
}
|
||||
|
||||
pump(benchmark.stdout, split(), processor)
|
||||
|
||||
benchmark.on('exit', () => {
|
||||
console.log()
|
||||
if (done && typeof done === 'function') done(null, benchmarkResults)
|
||||
})
|
||||
}
|
||||
|
||||
function sum (arr) {
|
||||
let result = 0
|
||||
for (var i = 0; i < arr.length; i += 1) {
|
||||
result += Number.parseFloat(arr[i].time)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
function displayResults (results) {
|
||||
if (quiet === false) console.log('==========')
|
||||
const benchNames = Object.keys(results)
|
||||
for (var i = 0; i < benchNames.length; i += 1) {
|
||||
console.log(`${benchNames[i].toUpperCase()} benchmark averages`)
|
||||
const benchmark = results[benchNames[i]]
|
||||
const loggers = Object.keys(benchmark)
|
||||
for (var j = 0; j < loggers.length; j += 1) {
|
||||
const logger = benchmark[loggers[j]]
|
||||
const average = sum(logger) / logger.length
|
||||
console.log(`${loggers[j]} average: ${average.toFixed(3)}ms`)
|
||||
}
|
||||
}
|
||||
if (quiet === false) {
|
||||
console.log('==========')
|
||||
console.log(
|
||||
`System: ${type()}/${platform()} ${arch()} ${release()}`,
|
||||
`~ ${cpus()[0].model} (cores/threads: ${cpus().length})`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function toBench (done) {
|
||||
runBenchmark(this.name, done)
|
||||
}
|
||||
|
||||
const benchQueue = []
|
||||
if (selectedBenchmark !== 'all') {
|
||||
benchQueue.push(toBench.bind({ name: selectedBenchmark }))
|
||||
} else {
|
||||
const keys = Object.keys(benchmarks)
|
||||
for (var i = 0; i < keys.length; i += 1) {
|
||||
benchQueue.push(toBench.bind({ name: keys[i] }))
|
||||
}
|
||||
}
|
||||
steed.series(benchQueue, function (err, results) {
|
||||
if (err) return console.error(err.message)
|
||||
results.forEach(displayResults)
|
||||
})
|
||||
55
backend/node_modules/pino/benchmarks/utils/wrap-log-level.js
generated
vendored
Normal file
55
backend/node_modules/pino/benchmarks/utils/wrap-log-level.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
'use strict'
|
||||
|
||||
const { readFileSync } = require('node:fs')
|
||||
const vm = require('vm')
|
||||
const { join } = require('node:path')
|
||||
const code = readFileSync(
|
||||
join(__dirname, '..', '..', 'node_modules', 'loglevel', 'lib', 'loglevel.js')
|
||||
)
|
||||
const { Console } = require('console')
|
||||
|
||||
function build (dest) {
|
||||
const sandbox = {
|
||||
module: {},
|
||||
console: new Console(dest, dest)
|
||||
}
|
||||
const context = vm.createContext(sandbox)
|
||||
|
||||
const script = new vm.Script(code)
|
||||
script.runInContext(context)
|
||||
|
||||
const loglevel = sandbox.log
|
||||
|
||||
const originalFactory = loglevel.methodFactory
|
||||
loglevel.methodFactory = function (methodName, logLevel, loggerName) {
|
||||
const rawMethod = originalFactory(methodName, logLevel, loggerName)
|
||||
|
||||
return function () {
|
||||
const time = new Date()
|
||||
let array
|
||||
if (typeof arguments[0] === 'string') {
|
||||
arguments[0] = '[' + time.toISOString() + '] ' + arguments[0]
|
||||
rawMethod.apply(null, arguments)
|
||||
} else {
|
||||
array = new Array(arguments.length + 1)
|
||||
array[0] = '[' + time.toISOString() + ']'
|
||||
for (var i = 0; i < arguments.length; i++) {
|
||||
array[i + 1] = arguments[i]
|
||||
}
|
||||
rawMethod.apply(null, array)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loglevel.setLevel(loglevel.levels.INFO)
|
||||
return loglevel
|
||||
}
|
||||
|
||||
module.exports = build
|
||||
|
||||
if (require.main === module) {
|
||||
const loglevel = build(process.stdout)
|
||||
loglevel.info('hello')
|
||||
loglevel.info({ hello: 'world' })
|
||||
loglevel.info('hello %j', { hello: 'world' })
|
||||
}
|
||||
6
backend/node_modules/pino/bin.js
generated
vendored
Executable file
6
backend/node_modules/pino/bin.js
generated
vendored
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
console.error(
|
||||
'`pino` cli has been removed. Use `pino-pretty` cli instead.\n' +
|
||||
'\nSee: https://github.com/pinojs/pino-pretty'
|
||||
)
|
||||
process.exit(1)
|
||||
505
backend/node_modules/pino/browser.js
generated
vendored
Normal file
505
backend/node_modules/pino/browser.js
generated
vendored
Normal file
@@ -0,0 +1,505 @@
|
||||
'use strict'
|
||||
|
||||
const format = require('quick-format-unescaped')
|
||||
|
||||
module.exports = pino
|
||||
|
||||
const _console = pfGlobalThisOrFallback().console || {}
|
||||
const stdSerializers = {
|
||||
mapHttpRequest: mock,
|
||||
mapHttpResponse: mock,
|
||||
wrapRequestSerializer: passthrough,
|
||||
wrapResponseSerializer: passthrough,
|
||||
wrapErrorSerializer: passthrough,
|
||||
req: mock,
|
||||
res: mock,
|
||||
err: asErrValue,
|
||||
errWithCause: asErrValue
|
||||
}
|
||||
function levelToValue (level, logger) {
|
||||
return level === 'silent'
|
||||
? Infinity
|
||||
: logger.levels.values[level]
|
||||
}
|
||||
const baseLogFunctionSymbol = Symbol('pino.logFuncs')
|
||||
const hierarchySymbol = Symbol('pino.hierarchy')
|
||||
|
||||
const logFallbackMap = {
|
||||
error: 'log',
|
||||
fatal: 'error',
|
||||
warn: 'error',
|
||||
info: 'log',
|
||||
debug: 'log',
|
||||
trace: 'log'
|
||||
}
|
||||
|
||||
function appendChildLogger (parentLogger, childLogger) {
|
||||
const newEntry = {
|
||||
logger: childLogger,
|
||||
parent: parentLogger[hierarchySymbol]
|
||||
}
|
||||
childLogger[hierarchySymbol] = newEntry
|
||||
}
|
||||
|
||||
function setupBaseLogFunctions (logger, levels, proto) {
|
||||
const logFunctions = {}
|
||||
levels.forEach(level => {
|
||||
logFunctions[level] = proto[level] ? proto[level] : (_console[level] || _console[logFallbackMap[level] || 'log'] || noop)
|
||||
})
|
||||
logger[baseLogFunctionSymbol] = logFunctions
|
||||
}
|
||||
|
||||
function shouldSerialize (serialize, serializers) {
|
||||
if (Array.isArray(serialize)) {
|
||||
const hasToFilter = serialize.filter(function (k) {
|
||||
return k !== '!stdSerializers.err'
|
||||
})
|
||||
return hasToFilter
|
||||
} else if (serialize === true) {
|
||||
return Object.keys(serializers)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
function pino (opts) {
|
||||
opts = opts || {}
|
||||
opts.browser = opts.browser || {}
|
||||
|
||||
const transmit = opts.browser.transmit
|
||||
if (transmit && typeof transmit.send !== 'function') { throw Error('pino: transmit option must have a send function') }
|
||||
|
||||
const proto = opts.browser.write || _console
|
||||
if (opts.browser.write) opts.browser.asObject = true
|
||||
const serializers = opts.serializers || {}
|
||||
const serialize = shouldSerialize(opts.browser.serialize, serializers)
|
||||
let stdErrSerialize = opts.browser.serialize
|
||||
|
||||
if (
|
||||
Array.isArray(opts.browser.serialize) &&
|
||||
opts.browser.serialize.indexOf('!stdSerializers.err') > -1
|
||||
) stdErrSerialize = false
|
||||
|
||||
const customLevels = Object.keys(opts.customLevels || {})
|
||||
const levels = ['error', 'fatal', 'warn', 'info', 'debug', 'trace'].concat(customLevels)
|
||||
|
||||
if (typeof proto === 'function') {
|
||||
levels.forEach(function (level) {
|
||||
proto[level] = proto
|
||||
})
|
||||
}
|
||||
if (opts.enabled === false || opts.browser.disabled) opts.level = 'silent'
|
||||
const level = opts.level || 'info'
|
||||
const logger = Object.create(proto)
|
||||
if (!logger.log) logger.log = noop
|
||||
|
||||
setupBaseLogFunctions(logger, levels, proto)
|
||||
// setup root hierarchy entry
|
||||
appendChildLogger({}, logger)
|
||||
|
||||
Object.defineProperty(logger, 'levelVal', {
|
||||
get: getLevelVal
|
||||
})
|
||||
Object.defineProperty(logger, 'level', {
|
||||
get: getLevel,
|
||||
set: setLevel
|
||||
})
|
||||
|
||||
const setOpts = {
|
||||
transmit,
|
||||
serialize,
|
||||
asObject: opts.browser.asObject,
|
||||
asObjectBindingsOnly: opts.browser.asObjectBindingsOnly,
|
||||
formatters: opts.browser.formatters,
|
||||
levels,
|
||||
timestamp: getTimeFunction(opts),
|
||||
messageKey: opts.messageKey || 'msg',
|
||||
onChild: opts.onChild || noop
|
||||
}
|
||||
logger.levels = getLevels(opts)
|
||||
logger.level = level
|
||||
|
||||
logger.isLevelEnabled = function (level) {
|
||||
if (!this.levels.values[level]) {
|
||||
return false
|
||||
}
|
||||
|
||||
return this.levels.values[level] >= this.levels.values[this.level]
|
||||
}
|
||||
logger.setMaxListeners = logger.getMaxListeners =
|
||||
logger.emit = logger.addListener = logger.on =
|
||||
logger.prependListener = logger.once =
|
||||
logger.prependOnceListener = logger.removeListener =
|
||||
logger.removeAllListeners = logger.listeners =
|
||||
logger.listenerCount = logger.eventNames =
|
||||
logger.write = logger.flush = noop
|
||||
logger.serializers = serializers
|
||||
logger._serialize = serialize
|
||||
logger._stdErrSerialize = stdErrSerialize
|
||||
logger.child = function (...args) { return child.call(this, setOpts, ...args) }
|
||||
|
||||
if (transmit) logger._logEvent = createLogEventShape()
|
||||
|
||||
function getLevelVal () {
|
||||
return levelToValue(this.level, this)
|
||||
}
|
||||
|
||||
function getLevel () {
|
||||
return this._level
|
||||
}
|
||||
function setLevel (level) {
|
||||
if (level !== 'silent' && !this.levels.values[level]) {
|
||||
throw Error('unknown level ' + level)
|
||||
}
|
||||
this._level = level
|
||||
|
||||
set(this, setOpts, logger, 'error') // <-- must stay first
|
||||
set(this, setOpts, logger, 'fatal')
|
||||
set(this, setOpts, logger, 'warn')
|
||||
set(this, setOpts, logger, 'info')
|
||||
set(this, setOpts, logger, 'debug')
|
||||
set(this, setOpts, logger, 'trace')
|
||||
|
||||
customLevels.forEach((level) => {
|
||||
set(this, setOpts, logger, level)
|
||||
})
|
||||
}
|
||||
|
||||
function child (setOpts, bindings, childOptions) {
|
||||
if (!bindings) {
|
||||
throw new Error('missing bindings for child Pino')
|
||||
}
|
||||
childOptions = childOptions || {}
|
||||
if (serialize && bindings.serializers) {
|
||||
childOptions.serializers = bindings.serializers
|
||||
}
|
||||
const childOptionsSerializers = childOptions.serializers
|
||||
if (serialize && childOptionsSerializers) {
|
||||
var childSerializers = Object.assign({}, serializers, childOptionsSerializers)
|
||||
var childSerialize = opts.browser.serialize === true
|
||||
? Object.keys(childSerializers)
|
||||
: serialize
|
||||
delete bindings.serializers
|
||||
applySerializers([bindings], childSerialize, childSerializers, this._stdErrSerialize)
|
||||
}
|
||||
function Child (parent) {
|
||||
this._childLevel = (parent._childLevel | 0) + 1
|
||||
|
||||
// make sure bindings are available in the `set` function
|
||||
this.bindings = bindings
|
||||
|
||||
if (childSerializers) {
|
||||
this.serializers = childSerializers
|
||||
this._serialize = childSerialize
|
||||
}
|
||||
if (transmit) {
|
||||
this._logEvent = createLogEventShape(
|
||||
[].concat(parent._logEvent.bindings, bindings)
|
||||
)
|
||||
}
|
||||
}
|
||||
Child.prototype = this
|
||||
const newLogger = new Child(this)
|
||||
|
||||
// must happen before the level is assigned
|
||||
appendChildLogger(this, newLogger)
|
||||
newLogger.child = function (...args) { return child.call(this, setOpts, ...args) }
|
||||
// required to actually initialize the logger functions for any given child
|
||||
newLogger.level = childOptions.level || this.level // allow level to be set by childOptions
|
||||
setOpts.onChild(newLogger)
|
||||
|
||||
return newLogger
|
||||
}
|
||||
return logger
|
||||
}
|
||||
|
||||
function getLevels (opts) {
|
||||
const customLevels = opts.customLevels || {}
|
||||
|
||||
const values = Object.assign({}, pino.levels.values, customLevels)
|
||||
const labels = Object.assign({}, pino.levels.labels, invertObject(customLevels))
|
||||
|
||||
return {
|
||||
values,
|
||||
labels
|
||||
}
|
||||
}
|
||||
|
||||
function invertObject (obj) {
|
||||
const inverted = {}
|
||||
Object.keys(obj).forEach(function (key) {
|
||||
inverted[obj[key]] = key
|
||||
})
|
||||
return inverted
|
||||
}
|
||||
|
||||
pino.levels = {
|
||||
values: {
|
||||
fatal: 60,
|
||||
error: 50,
|
||||
warn: 40,
|
||||
info: 30,
|
||||
debug: 20,
|
||||
trace: 10
|
||||
},
|
||||
labels: {
|
||||
10: 'trace',
|
||||
20: 'debug',
|
||||
30: 'info',
|
||||
40: 'warn',
|
||||
50: 'error',
|
||||
60: 'fatal'
|
||||
}
|
||||
}
|
||||
|
||||
pino.stdSerializers = stdSerializers
|
||||
pino.stdTimeFunctions = Object.assign({}, { nullTime, epochTime, unixTime, isoTime })
|
||||
|
||||
function getBindingChain (logger) {
|
||||
const bindings = []
|
||||
if (logger.bindings) {
|
||||
bindings.push(logger.bindings)
|
||||
}
|
||||
|
||||
// traverse up the tree to get all bindings
|
||||
let hierarchy = logger[hierarchySymbol]
|
||||
while (hierarchy.parent) {
|
||||
hierarchy = hierarchy.parent
|
||||
if (hierarchy.logger.bindings) {
|
||||
bindings.push(hierarchy.logger.bindings)
|
||||
}
|
||||
}
|
||||
|
||||
return bindings.reverse()
|
||||
}
|
||||
|
||||
function set (self, opts, rootLogger, level) {
|
||||
// override the current log functions with either `noop` or the base log function
|
||||
Object.defineProperty(self, level, {
|
||||
value: (levelToValue(self.level, rootLogger) > levelToValue(level, rootLogger)
|
||||
? noop
|
||||
: rootLogger[baseLogFunctionSymbol][level]),
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
})
|
||||
|
||||
if (self[level] === noop) {
|
||||
if (!opts.transmit) return
|
||||
|
||||
const transmitLevel = opts.transmit.level || self.level
|
||||
const transmitValue = levelToValue(transmitLevel, rootLogger)
|
||||
const methodValue = levelToValue(level, rootLogger)
|
||||
if (methodValue < transmitValue) return
|
||||
}
|
||||
|
||||
// make sure the log format is correct
|
||||
self[level] = createWrap(self, opts, rootLogger, level)
|
||||
|
||||
// prepend bindings if it is not the root logger
|
||||
const bindings = getBindingChain(self)
|
||||
if (bindings.length === 0) {
|
||||
// early exit in case for rootLogger
|
||||
return
|
||||
}
|
||||
self[level] = prependBindingsInArguments(bindings, self[level])
|
||||
}
|
||||
|
||||
function prependBindingsInArguments (bindings, logFunc) {
|
||||
return function () {
|
||||
return logFunc.apply(this, [...bindings, ...arguments])
|
||||
}
|
||||
}
|
||||
|
||||
function createWrap (self, opts, rootLogger, level) {
|
||||
return (function (write) {
|
||||
return function LOG () {
|
||||
const ts = opts.timestamp()
|
||||
const args = new Array(arguments.length)
|
||||
const proto = (Object.getPrototypeOf && Object.getPrototypeOf(this) === _console) ? _console : this
|
||||
for (var i = 0; i < args.length; i++) args[i] = arguments[i]
|
||||
|
||||
var argsIsSerialized = false
|
||||
if (opts.serialize) {
|
||||
applySerializers(args, this._serialize, this.serializers, this._stdErrSerialize)
|
||||
argsIsSerialized = true
|
||||
}
|
||||
if (opts.asObject || opts.formatters) {
|
||||
write.call(proto, ...asObject(this, level, args, ts, opts))
|
||||
} else write.apply(proto, args)
|
||||
|
||||
if (opts.transmit) {
|
||||
const transmitLevel = opts.transmit.level || self._level
|
||||
const transmitValue = levelToValue(transmitLevel, rootLogger)
|
||||
const methodValue = levelToValue(level, rootLogger)
|
||||
if (methodValue < transmitValue) return
|
||||
transmit(this, {
|
||||
ts,
|
||||
methodLevel: level,
|
||||
methodValue,
|
||||
transmitLevel,
|
||||
transmitValue: rootLogger.levels.values[opts.transmit.level || self._level],
|
||||
send: opts.transmit.send,
|
||||
val: levelToValue(self._level, rootLogger)
|
||||
}, args, argsIsSerialized)
|
||||
}
|
||||
}
|
||||
})(self[baseLogFunctionSymbol][level])
|
||||
}
|
||||
|
||||
function asObject (logger, level, args, ts, opts) {
|
||||
const {
|
||||
level: levelFormatter,
|
||||
log: logObjectFormatter = (obj) => obj
|
||||
} = opts.formatters || {}
|
||||
const argsCloned = args.slice()
|
||||
let msg = argsCloned[0]
|
||||
const logObject = {}
|
||||
|
||||
let lvl = (logger._childLevel | 0) + 1
|
||||
if (lvl < 1) lvl = 1
|
||||
|
||||
if (ts) {
|
||||
logObject.time = ts
|
||||
}
|
||||
|
||||
if (levelFormatter) {
|
||||
const formattedLevel = levelFormatter(level, logger.levels.values[level])
|
||||
Object.assign(logObject, formattedLevel)
|
||||
} else {
|
||||
logObject.level = logger.levels.values[level]
|
||||
}
|
||||
|
||||
if (opts.asObjectBindingsOnly) {
|
||||
if (msg !== null && typeof msg === 'object') {
|
||||
while (lvl-- && typeof argsCloned[0] === 'object') {
|
||||
Object.assign(logObject, argsCloned.shift())
|
||||
}
|
||||
}
|
||||
|
||||
const formattedLogObject = logObjectFormatter(logObject)
|
||||
return [formattedLogObject, ...argsCloned]
|
||||
} else {
|
||||
// deliberate, catching objects, arrays
|
||||
if (msg !== null && typeof msg === 'object') {
|
||||
while (lvl-- && typeof argsCloned[0] === 'object') {
|
||||
Object.assign(logObject, argsCloned.shift())
|
||||
}
|
||||
msg = argsCloned.length ? format(argsCloned.shift(), argsCloned) : undefined
|
||||
} else if (typeof msg === 'string') msg = format(argsCloned.shift(), argsCloned)
|
||||
if (msg !== undefined) logObject[opts.messageKey] = msg
|
||||
|
||||
const formattedLogObject = logObjectFormatter(logObject)
|
||||
return [formattedLogObject]
|
||||
}
|
||||
}
|
||||
|
||||
function applySerializers (args, serialize, serializers, stdErrSerialize) {
|
||||
for (const i in args) {
|
||||
if (stdErrSerialize && args[i] instanceof Error) {
|
||||
args[i] = pino.stdSerializers.err(args[i])
|
||||
} else if (typeof args[i] === 'object' && !Array.isArray(args[i]) && serialize) {
|
||||
for (const k in args[i]) {
|
||||
if (serialize.indexOf(k) > -1 && k in serializers) {
|
||||
args[i][k] = serializers[k](args[i][k])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function transmit (logger, opts, args, argsIsSerialized = false) {
|
||||
const send = opts.send
|
||||
const ts = opts.ts
|
||||
const methodLevel = opts.methodLevel
|
||||
const methodValue = opts.methodValue
|
||||
const val = opts.val
|
||||
const bindings = logger._logEvent.bindings
|
||||
|
||||
if (!argsIsSerialized) {
|
||||
applySerializers(
|
||||
args,
|
||||
logger._serialize || Object.keys(logger.serializers),
|
||||
logger.serializers,
|
||||
logger._stdErrSerialize === undefined ? true : logger._stdErrSerialize
|
||||
)
|
||||
}
|
||||
|
||||
logger._logEvent.ts = ts
|
||||
logger._logEvent.messages = args.filter(function (arg) {
|
||||
// bindings can only be objects, so reference equality check via indexOf is fine
|
||||
return bindings.indexOf(arg) === -1
|
||||
})
|
||||
|
||||
logger._logEvent.level.label = methodLevel
|
||||
logger._logEvent.level.value = methodValue
|
||||
|
||||
send(methodLevel, logger._logEvent, val)
|
||||
|
||||
logger._logEvent = createLogEventShape(bindings)
|
||||
}
|
||||
|
||||
function createLogEventShape (bindings) {
|
||||
return {
|
||||
ts: 0,
|
||||
messages: [],
|
||||
bindings: bindings || [],
|
||||
level: { label: '', value: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
function asErrValue (err) {
|
||||
const obj = {
|
||||
type: err.constructor.name,
|
||||
msg: err.message,
|
||||
stack: err.stack
|
||||
}
|
||||
for (const key in err) {
|
||||
if (obj[key] === undefined) {
|
||||
obj[key] = err[key]
|
||||
}
|
||||
}
|
||||
return obj
|
||||
}
|
||||
|
||||
function getTimeFunction (opts) {
|
||||
if (typeof opts.timestamp === 'function') {
|
||||
return opts.timestamp
|
||||
}
|
||||
if (opts.timestamp === false) {
|
||||
return nullTime
|
||||
}
|
||||
return epochTime
|
||||
}
|
||||
|
||||
function mock () { return {} }
|
||||
function passthrough (a) { return a }
|
||||
function noop () {}
|
||||
|
||||
function nullTime () { return false }
|
||||
function epochTime () { return Date.now() }
|
||||
function unixTime () { return Math.round(Date.now() / 1000.0) }
|
||||
function isoTime () { return new Date(Date.now()).toISOString() } // using Date.now() for testability
|
||||
|
||||
/* eslint-disable */
|
||||
/* istanbul ignore next */
|
||||
function pfGlobalThisOrFallback () {
|
||||
function defd (o) { return typeof o !== 'undefined' && o }
|
||||
try {
|
||||
if (typeof globalThis !== 'undefined') return globalThis
|
||||
Object.defineProperty(Object.prototype, 'globalThis', {
|
||||
get: function () {
|
||||
delete Object.prototype.globalThis
|
||||
return (this.globalThis = this)
|
||||
},
|
||||
configurable: true
|
||||
})
|
||||
return globalThis
|
||||
} catch (e) {
|
||||
return defd(self) || defd(window) || defd(this) || {}
|
||||
}
|
||||
}
|
||||
/* eslint-enable */
|
||||
|
||||
module.exports.default = pino
|
||||
module.exports.pino = pino
|
||||
25
backend/node_modules/pino/build/sync-version.js
generated
vendored
Normal file
25
backend/node_modules/pino/build/sync-version.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('node:fs')
|
||||
const path = require('node:path')
|
||||
let { version } = require('../package.json')
|
||||
|
||||
let passedVersion = process.argv[2]
|
||||
|
||||
if (passedVersion) {
|
||||
passedVersion = passedVersion.trim().replace(/^v/, '')
|
||||
if (version !== passedVersion) {
|
||||
console.log(`Syncing version from ${version} to ${passedVersion}`)
|
||||
version = passedVersion
|
||||
const packageJson = require('../package.json')
|
||||
packageJson.version = version
|
||||
fs.writeFileSync(path.resolve('./package.json'), JSON.stringify(packageJson, null, 2) + '\n', { encoding: 'utf-8' })
|
||||
}
|
||||
}
|
||||
|
||||
const metaContent = `'use strict'
|
||||
|
||||
module.exports = { version: '${version}' }
|
||||
`
|
||||
|
||||
fs.writeFileSync(path.resolve('./lib/meta.js'), metaContent, { encoding: 'utf-8' })
|
||||
1588
backend/node_modules/pino/docs/api.md
generated
vendored
Normal file
1588
backend/node_modules/pino/docs/api.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
40
backend/node_modules/pino/docs/asynchronous.md
generated
vendored
Normal file
40
backend/node_modules/pino/docs/asynchronous.md
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
# Asynchronous Logging
|
||||
|
||||
Asynchronous logging enables the minimum overhead of Pino.
|
||||
Asynchronous logging works by buffering log messages and writing them in larger chunks.
|
||||
|
||||
```js
|
||||
const pino = require('pino')
|
||||
const logger = pino(pino.destination({
|
||||
dest: './my-file', // omit for stdout
|
||||
minLength: 4096, // Buffer before writing
|
||||
sync: false // Asynchronous logging
|
||||
}))
|
||||
```
|
||||
|
||||
It's always possible to turn on synchronous logging by passing `sync: true`.
|
||||
In this mode of operation, log messages are directly written to the
|
||||
output stream as the messages are generated with a _blocking_ operation.
|
||||
|
||||
* See [`pino.destination`](/docs/api.md#pino-destination)
|
||||
* `pino.destination` is implemented on [`sonic-boom` ⇗](https://github.com/mcollina/sonic-boom).
|
||||
|
||||
### AWS Lambda
|
||||
|
||||
Asynchronous logging is disabled by default on AWS Lambda or any other environment
|
||||
that modifies `process.stdout`. If forcefully turned on, we recommend calling `dest.flushSync()` at the end
|
||||
of each function execution to avoid losing data.
|
||||
|
||||
## Caveats
|
||||
|
||||
Asynchronous logging has a couple of important caveats:
|
||||
|
||||
* As opposed to the synchronous mode, there is not a one-to-one relationship between
|
||||
calls to logging methods (e.g. `logger.info`) and writes to a log file
|
||||
* There is a possibility of the most recently buffered log messages being lost
|
||||
in case of a system failure, e.g. a power cut.
|
||||
|
||||
See also:
|
||||
|
||||
* [`pino.destination` API](/docs/api.md#pino-destination)
|
||||
* [`destination` parameter](/docs/api.md#destination)
|
||||
55
backend/node_modules/pino/docs/benchmarks.md
generated
vendored
Normal file
55
backend/node_modules/pino/docs/benchmarks.md
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
|
||||
# Benchmarks
|
||||
|
||||
`pino.info('hello world')`:
|
||||
|
||||
```
|
||||
|
||||
BASIC benchmark averages
|
||||
Bunyan average: 377.434ms
|
||||
Winston average: 270.249ms
|
||||
Bole average: 172.690ms
|
||||
Debug average: 220.527ms
|
||||
LogLevel average: 222.802ms
|
||||
Pino average: 114.801ms
|
||||
PinoMinLength average: 70.968ms
|
||||
PinoNodeStream average: 159.192ms
|
||||
|
||||
```
|
||||
|
||||
`pino.info({'hello': 'world'})`:
|
||||
|
||||
```
|
||||
|
||||
OBJECT benchmark averages
|
||||
BunyanObj average: 410.379ms
|
||||
WinstonObj average: 273.120ms
|
||||
BoleObj average: 185.069ms
|
||||
LogLevelObject average: 433.425ms
|
||||
PinoObj average: 119.315ms
|
||||
PinoMinLengthObj average: 76.968ms
|
||||
PinoNodeStreamObj average: 164.268ms
|
||||
|
||||
```
|
||||
|
||||
`pino.info(aBigDeeplyNestedObject)`:
|
||||
|
||||
```
|
||||
|
||||
DEEP-OBJECT benchmark averages
|
||||
BunyanDeepObj average: 1.839ms
|
||||
WinstonDeepObj average: 5.604ms
|
||||
BoleDeepObj average: 3.422ms
|
||||
LogLevelDeepObj average: 11.716ms
|
||||
PinoDeepObj average: 2.256ms
|
||||
PinoMinLengthDeepObj average: 2.240ms
|
||||
PinoNodeStreamDeepObj average: 2.595ms
|
||||
|
||||
```
|
||||
|
||||
`pino.info('hello %s %j %d', 'world', {obj: true}, 4, {another: 'obj'})`:
|
||||
|
||||
For a fair comparison, [LogLevel](http://npm.im/loglevel) was extended
|
||||
to include a timestamp and [bole](http://npm.im/bole) had
|
||||
`fastTime` mode switched on.
|
||||
|
||||
242
backend/node_modules/pino/docs/browser.md
generated
vendored
Normal file
242
backend/node_modules/pino/docs/browser.md
generated
vendored
Normal file
@@ -0,0 +1,242 @@
|
||||
# Browser API
|
||||
|
||||
Pino is compatible with [`browserify`](https://npm.im/browserify) for browser-side usage:
|
||||
|
||||
This can be useful with isomorphic/universal JavaScript code.
|
||||
|
||||
By default, in the browser,
|
||||
`pino` uses corresponding [Log4j](https://en.wikipedia.org/wiki/Log4j) `console` methods (`console.error`, `console.warn`, `console.info`, `console.debug`, `console.trace`) and uses `console.error` for any `fatal` level logs.
|
||||
|
||||
## Options
|
||||
|
||||
Pino can be passed a `browser` object in the options object,
|
||||
which can have the following properties:
|
||||
|
||||
### `asObject` (Boolean)
|
||||
|
||||
```js
|
||||
const pino = require('pino')({browser: {asObject: true}})
|
||||
```
|
||||
|
||||
The `asObject` option will create a pino-like log object instead of
|
||||
passing all arguments to a console method, for instance:
|
||||
|
||||
```js
|
||||
pino.info('hi') // creates and logs {msg: 'hi', level: 30, time: <ts>}
|
||||
```
|
||||
|
||||
When `write` is set, `asObject` will always be `true`.
|
||||
|
||||
### `asObjectBindingsOnly` (Boolean)
|
||||
|
||||
```js
|
||||
const pino = require('pino')({browser: {asObjectBindingsOnly: true}})
|
||||
```
|
||||
|
||||
The `asObjectBindingsOnly` option is similar to `asObject` but will keep the message
|
||||
and arguments unformatted. This allows to defer formatting the message to the
|
||||
actual call to `console` methods, where browsers then have richer formatting in
|
||||
their devtools than when pino will format the message to a string first.
|
||||
|
||||
```js
|
||||
pino.info('hello %s', 'world') // creates and logs {level: 30, time: <ts>}, 'hello %s', 'world'
|
||||
```
|
||||
|
||||
### `formatters` (Object)
|
||||
|
||||
An object containing functions for formatting the shape of the log lines. When provided, it enables the logger to produce a pino-like log object with customized formatting. Currently, it supports formatting for the `level` object only.
|
||||
|
||||
##### `level`
|
||||
|
||||
Changes the shape of the log level. The default shape is `{ level: number }`.
|
||||
The function takes two arguments, the label of the level (e.g. `'info'`)
|
||||
and the numeric value (e.g. `30`).
|
||||
|
||||
```js
|
||||
const formatters = {
|
||||
level (label, number) {
|
||||
return { level: number }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### `write` (Function | Object)
|
||||
|
||||
Instead of passing log messages to `console.log` they can be passed to
|
||||
a supplied function.
|
||||
|
||||
If `write` is set to a single function, all logging objects are passed
|
||||
to this function.
|
||||
|
||||
```js
|
||||
const pino = require('pino')({
|
||||
browser: {
|
||||
write: (o) => {
|
||||
// do something with o
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
If `write` is an object, it can have methods that correspond to the
|
||||
levels. When a message is logged at a given level, the corresponding
|
||||
method is called. If a method isn't present, the logging falls back
|
||||
to using the `console`.
|
||||
|
||||
|
||||
```js
|
||||
const pino = require('pino')({
|
||||
browser: {
|
||||
write: {
|
||||
info: function (o) {
|
||||
//process info log object
|
||||
},
|
||||
error: function (o) {
|
||||
//process error log object
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
### `serialize`: (Boolean | Array)
|
||||
|
||||
The serializers provided to `pino` are ignored by default in the browser, including
|
||||
the standard serializers provided with Pino. Since the default destination for log
|
||||
messages is the console, values such as `Error` objects are enhanced for inspection,
|
||||
which they otherwise wouldn't be if the Error serializer was enabled.
|
||||
|
||||
We can turn all serializers on,
|
||||
|
||||
```js
|
||||
const pino = require('pino')({
|
||||
browser: {
|
||||
serialize: true
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
Or we can selectively enable them via an array:
|
||||
|
||||
```js
|
||||
const pino = require('pino')({
|
||||
serializers: {
|
||||
custom: myCustomSerializer,
|
||||
another: anotherSerializer
|
||||
},
|
||||
browser: {
|
||||
serialize: ['custom']
|
||||
}
|
||||
})
|
||||
// following will apply myCustomSerializer to the custom property,
|
||||
// but will not apply anotherSerializer to another key
|
||||
pino.info({custom: 'a', another: 'b'})
|
||||
```
|
||||
|
||||
When `serialize` is `true` the standard error serializer is also enabled (see https://github.com/pinojs/pino/blob/master/docs/api.md#stdSerializers).
|
||||
This is a global serializer, which will apply to any `Error` objects passed to the logger methods.
|
||||
|
||||
If `serialize` is an array the standard error serializer is also automatically enabled, it can
|
||||
be explicitly disabled by including a string in the serialize array: `!stdSerializers.err`, like so:
|
||||
|
||||
```js
|
||||
const pino = require('pino')({
|
||||
serializers: {
|
||||
custom: myCustomSerializer,
|
||||
another: anotherSerializer
|
||||
},
|
||||
browser: {
|
||||
serialize: ['!stdSerializers.err', 'custom'] //will not serialize Errors, will serialize `custom` keys
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
The `serialize` array also applies to any child logger serializers (see https://github.com/pinojs/pino/blob/master/docs/api.md#discussion-2
|
||||
for how to set child-bound serializers).
|
||||
|
||||
Unlike server pino the serializers apply to every object passed to the logger method,
|
||||
if the `asObject` option is `true`, this results in the serializers applying to the
|
||||
first object (as in server pino).
|
||||
|
||||
For more info on serializers see https://github.com/pinojs/pino/blob/master/docs/api.md#mergingobject.
|
||||
|
||||
### `transmit` (Object)
|
||||
|
||||
An object with `send` and `level` properties.
|
||||
|
||||
The `transmit.level` property specifies the minimum level (inclusive) of when the `send` function
|
||||
should be called, if not supplied the `send` function be called based on the main logging `level`
|
||||
(set via `options.level`, defaulting to `info`).
|
||||
|
||||
The `transmit` object must have a `send` function which will be called after
|
||||
writing the log message. The `send` function is passed the level of the log
|
||||
message and a `logEvent` object.
|
||||
|
||||
The `logEvent` object is a data structure representing a log message, it represents
|
||||
the arguments passed to a logger statement, the level
|
||||
at which they were logged, and the hierarchy of child bindings.
|
||||
|
||||
The `logEvent` format is structured like so:
|
||||
|
||||
```js
|
||||
{
|
||||
ts = Number,
|
||||
messages = Array,
|
||||
bindings = Array,
|
||||
level: { label = String, value = Number}
|
||||
}
|
||||
```
|
||||
|
||||
The `ts` property is a Unix epoch timestamp in milliseconds, the time is taken from the moment the
|
||||
logger method is called.
|
||||
|
||||
The `messages` array is all arguments passed to logger method, (for instance `logger.info('a', 'b', 'c')`
|
||||
would result in `messages` array `['a', 'b', 'c']`).
|
||||
|
||||
The `bindings` array represents each child logger (if any), and the relevant bindings.
|
||||
For instance, given `logger.child({a: 1}).child({b: 2}).info({c: 3})`, the bindings array
|
||||
would hold `[{a: 1}, {b: 2}]` and the `messages` array would be `[{c: 3}]`. The `bindings`
|
||||
are ordered according to their position in the child logger hierarchy, with the lowest index
|
||||
being the top of the hierarchy.
|
||||
|
||||
By default, serializers are not applied to log output in the browser, but they will *always* be
|
||||
applied to `messages` and `bindings` in the `logEvent` object. This allows us to ensure a consistent
|
||||
format for all values between server and client.
|
||||
|
||||
The `level` holds the label (for instance `info`), and the corresponding numerical value
|
||||
(for instance `30`). This could be important in cases where client-side level values and
|
||||
labels differ from server-side.
|
||||
|
||||
The point of the `send` function is to remotely record log messages:
|
||||
|
||||
```js
|
||||
const pino = require('pino')({
|
||||
browser: {
|
||||
transmit: {
|
||||
level: 'warn',
|
||||
send: function (level, logEvent) {
|
||||
if (level === 'warn') {
|
||||
// maybe send the logEvent to a separate endpoint
|
||||
// or maybe analyze the messages further before sending
|
||||
}
|
||||
// we could also use the `logEvent.level.value` property to determine
|
||||
// numerical value
|
||||
if (logEvent.level.value >= 50) { // covers error and fatal
|
||||
|
||||
// send the logEvent somewhere
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
### `disabled` (Boolean)
|
||||
|
||||
```js
|
||||
const pino = require('pino')({browser: {disabled: true}})
|
||||
```
|
||||
|
||||
The `disabled` option will disable logging in browser if set
|
||||
to `true`, by default it is set to `false`.
|
||||
40
backend/node_modules/pino/docs/bundling.md
generated
vendored
Normal file
40
backend/node_modules/pino/docs/bundling.md
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
# Bundling
|
||||
|
||||
Due to its internal architecture based on Worker Threads, it is not possible to bundle Pino *without* generating additional files.
|
||||
|
||||
In particular, a bundler must ensure that the following files are also bundled separately:
|
||||
|
||||
* `lib/worker.js` from the `thread-stream` dependency
|
||||
* `file.js`
|
||||
* `lib/worker.js`
|
||||
* Any transport used by the user (like `pino-pretty`)
|
||||
|
||||
Once the files above have been generated, the bundler must also add information about the files above by injecting a code that sets `__bundlerPathsOverrides` in the `globalThis` object.
|
||||
|
||||
The variable is an object whose keys are an identifier for the files and the values are the paths of files relative to the currently bundle files.
|
||||
|
||||
Example:
|
||||
|
||||
```javascript
|
||||
// Inject this using your bundle plugin
|
||||
globalThis.__bundlerPathsOverrides = {
|
||||
'thread-stream-worker': pinoWebpackAbsolutePath('./thread-stream-worker.js')
|
||||
'pino/file': pinoWebpackAbsolutePath('./pino-file.js'),
|
||||
'pino-worker': pinoWebpackAbsolutePath('./pino-worker.js'),
|
||||
'pino-pretty': pinoWebpackAbsolutePath('./pino-pretty.js'),
|
||||
};
|
||||
```
|
||||
|
||||
Note that `pino/file`, `pino-worker` and `thread-stream-worker` are required identifiers. Other identifiers are possible based on the user configuration.
|
||||
|
||||
## Webpack Plugin
|
||||
|
||||
If you are a Webpack user, you can achieve this with [pino-webpack-plugin](https://github.com/pinojs/pino-webpack-plugin) without manual configuration of `__bundlerPathsOverrides`; however, you still need to configure it manually if you are using other bundlers.
|
||||
|
||||
## Esbuild Plugin
|
||||
|
||||
[esbuild-plugin-pino](https://github.com/davipon/esbuild-plugin-pino) is the esbuild plugin to generate extra pino files for bundling.
|
||||
|
||||
## Bun Plugin
|
||||
|
||||
[bun-plugin-pino](https://github.com/vktrl/bun-plugin-pino) is the Bun plugin to generate extra pino files for bundling.
|
||||
95
backend/node_modules/pino/docs/child-loggers.md
generated
vendored
Normal file
95
backend/node_modules/pino/docs/child-loggers.md
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
# Child loggers
|
||||
|
||||
Let's assume we want to have `"module":"foo"` added to every log within a
|
||||
module `foo.js`.
|
||||
|
||||
To accomplish this, simply use a child logger:
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
// imports a pino logger instance of `require('pino')()`
|
||||
const parentLogger = require('./lib/logger')
|
||||
const log = parentLogger.child({module: 'foo'})
|
||||
|
||||
function doSomething () {
|
||||
log.info('doSomething invoked')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
doSomething
|
||||
}
|
||||
```
|
||||
|
||||
## Cost of child logging
|
||||
|
||||
Child logger creation is fast:
|
||||
|
||||
```
|
||||
benchBunyanCreation*10000: 564.514ms
|
||||
benchBoleCreation*10000: 283.276ms
|
||||
benchPinoCreation*10000: 258.745ms
|
||||
benchPinoExtremeCreation*10000: 150.506ms
|
||||
```
|
||||
|
||||
Logging through a child logger has little performance penalty:
|
||||
|
||||
```
|
||||
benchBunyanChild*10000: 556.275ms
|
||||
benchBoleChild*10000: 288.124ms
|
||||
benchPinoChild*10000: 231.695ms
|
||||
benchPinoExtremeChild*10000: 122.117ms
|
||||
```
|
||||
|
||||
Logging via the child logger of a child logger also has negligible overhead:
|
||||
|
||||
```
|
||||
benchBunyanChildChild*10000: 559.082ms
|
||||
benchPinoChildChild*10000: 229.264ms
|
||||
benchPinoExtremeChildChild*10000: 127.753ms
|
||||
```
|
||||
|
||||
## Duplicate keys caveat
|
||||
|
||||
Naming conflicts can arise between child loggers and
|
||||
children of child loggers.
|
||||
|
||||
This isn't as bad as it sounds, even if the same keys between
|
||||
parent and child loggers are used, Pino resolves the conflict in the sanest way.
|
||||
|
||||
For example, consider the following:
|
||||
|
||||
```js
|
||||
const pino = require('pino')
|
||||
pino(pino.destination('./my-log'))
|
||||
.child({a: 'property'})
|
||||
.child({a: 'prop'})
|
||||
.info('howdy')
|
||||
```
|
||||
|
||||
```sh
|
||||
$ cat my-log
|
||||
{"pid":95469,"hostname":"MacBook-Pro-3.home","level":30,"msg":"howdy","time":1459534114473,"a":"property","a":"prop"}
|
||||
```
|
||||
|
||||
Notice how there are two keys named `a` in the JSON output. The sub-child's properties
|
||||
appear after the parent child properties.
|
||||
|
||||
At some point, the logs will most likely be processed (for instance with a [transport](transports.md)),
|
||||
and this generally involves parsing. `JSON.parse` will return an object where the conflicting
|
||||
namespace holds the final value assigned to it:
|
||||
|
||||
```sh
|
||||
$ cat my-log | node -e "process.stdin.once('data', (line) => console.log(JSON.stringify(JSON.parse(line))))"
|
||||
{"pid":95469,"hostname":"MacBook-Pro-3.home","level":30,"msg":"howdy","time":"2016-04-01T18:08:34.473Z","a":"prop"}
|
||||
```
|
||||
|
||||
Ultimately the conflict is resolved by taking the last value, which aligns with Bunyan's child logging
|
||||
behavior.
|
||||
|
||||
There may be cases where this edge case becomes problematic if a JSON parser with alternative behavior
|
||||
is used to process the logs. It's recommended to be conscious of namespace conflicts with child loggers,
|
||||
in light of an expected log processing approach.
|
||||
|
||||
One of Pino's performance tricks is to avoid building objects and stringifying
|
||||
them, so we're building strings instead. This is why duplicate keys between
|
||||
parents and children will end up in the log output.
|
||||
16
backend/node_modules/pino/docs/diagnostics.md
generated
vendored
Normal file
16
backend/node_modules/pino/docs/diagnostics.md
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
# Diagnostics
|
||||
|
||||
Pino provides [tracing channel](tc) events that allow insight into the
|
||||
internal workings of the library. The currently supported events are:
|
||||
|
||||
+ `tracing:pino_asJson:start`: emitted when the final serialization process
|
||||
of logs is started. The emitted event payload has the following fields:
|
||||
- `instance`: the Pino instance associated with the function
|
||||
- `arguments`: the arguments passed to the function
|
||||
+ `tracing:pino_asJson:end`: emitted at the end of the final serialization
|
||||
process. The emitted event payload has the following fields:
|
||||
- `instance`: the Pino instance associated with the function
|
||||
- `arguments`: the arguments passed to the function
|
||||
- `result`: the finalized, newline delimited, log line as a string
|
||||
|
||||
[tc]: https://nodejs.org/docs/latest/api/diagnostics_channel.html#tracingchannel-channels
|
||||
86
backend/node_modules/pino/docs/ecosystem.md
generated
vendored
Normal file
86
backend/node_modules/pino/docs/ecosystem.md
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
# Pino Ecosystem
|
||||
|
||||
This is a list of ecosystem modules that integrate with `pino`.
|
||||
|
||||
Modules listed under [Core](#core) are maintained by the Pino team. Modules
|
||||
listed under [Community](#community) are maintained by independent community
|
||||
members.
|
||||
|
||||
Please send a PR to add new modules!
|
||||
|
||||
<a id="core"></a>
|
||||
## Core
|
||||
|
||||
### Frameworks
|
||||
+ [`express-pino-logger`](https://github.com/pinojs/express-pino-logger): use
|
||||
Pino to log requests within [express](https://expressjs.com/).
|
||||
+ [`koa-pino-logger`](https://github.com/pinojs/koa-pino-logger): use Pino to
|
||||
log requests within [Koa](https://koajs.com/).
|
||||
+ [`restify-pino-logger`](https://github.com/pinojs/restify-pino-logger): use
|
||||
Pino to log requests within [restify](http://restify.com/).
|
||||
+ [`rill-pino-logger`](https://github.com/pinojs/rill-pino-logger): use Pino as
|
||||
the logger for the [Rill framework](https://rill.site/).
|
||||
|
||||
### Utilities
|
||||
+ [`pino-arborsculpture`](https://github.com/pinojs/pino-arborsculpture): change
|
||||
log levels at runtime.
|
||||
+ [`pino-caller`](https://github.com/pinojs/pino-caller): add callsite to the log line.
|
||||
+ [`pino-clf`](https://github.com/pinojs/pino-clf): reformat Pino logs into
|
||||
Common Log Format.
|
||||
+ [`pino-console`](https://github.com/pinojs/pino-console): adapter for the [WHATWG Console](https://console.spec.whatwg.org/) spec.
|
||||
+ [`pino-debug`](https://github.com/pinojs/pino-debug): use Pino to interpret
|
||||
[`debug`](https://npm.im/debug) logs.
|
||||
+ [`pino-elasticsearch`](https://github.com/pinojs/pino-elasticsearch): send
|
||||
Pino logs to an Elasticsearch instance.
|
||||
+ [`pino-eventhub`](https://github.com/pinojs/pino-eventhub): send Pino logs
|
||||
to an [Event Hub](https://docs.microsoft.com/en-us/azure/event-hubs/event-hubs-what-is-event-hubs).
|
||||
+ [`pino-filter`](https://github.com/pinojs/pino-filter): filter Pino logs in
|
||||
the same fashion as the [`debug`](https://npm.im/debug) module.
|
||||
+ [`pino-gelf`](https://github.com/pinojs/pino-gelf): reformat Pino logs into
|
||||
GELF format for Graylog.
|
||||
+ [`pino-hapi`](https://github.com/pinojs/hapi-pino): use Pino as the logger
|
||||
for [Hapi](https://hapijs.com/).
|
||||
+ [`pino-http`](https://github.com/pinojs/pino-http): easily use Pino to log
|
||||
requests with the core `http` module.
|
||||
+ [`pino-http-print`](https://github.com/pinojs/pino-http-print): reformat Pino
|
||||
logs into traditional [HTTPD](https://httpd.apache.org/) style request logs.
|
||||
+ [`pino-mongodb`](https://github.com/pinojs/pino-mongodb): store Pino logs
|
||||
in a MongoDB database.
|
||||
+ [`pino-multi-stream`](https://github.com/pinojs/pino-multi-stream): send
|
||||
logs to multiple destination streams (slow!).
|
||||
+ [`pino-noir`](https://github.com/pinojs/pino-noir): redact sensitive information
|
||||
in logs.
|
||||
+ [`pino-pretty`](https://github.com/pinojs/pino-pretty): basic prettifier to
|
||||
make log lines human-readable.
|
||||
+ [`pino-socket`](https://github.com/pinojs/pino-socket): send logs to TCP or UDP
|
||||
destinations.
|
||||
+ [`pino-std-serializers`](https://github.com/pinojs/pino-std-serializers): the
|
||||
core object serializers used within Pino.
|
||||
+ [`pino-syslog`](https://github.com/pinojs/pino-syslog): reformat Pino logs
|
||||
to standard syslog format.
|
||||
+ [`pino-tee`](https://github.com/pinojs/pino-tee): pipe Pino logs into files
|
||||
based upon log levels.
|
||||
+ [`pino-test`](https://github.com/pinojs/pino-test): a set of utilities for
|
||||
verifying logs generated by the Pino logger.
|
||||
+ [`pino-toke`](https://github.com/pinojs/pino-toke): reformat Pino logs
|
||||
according to a given format string.
|
||||
|
||||
|
||||
<a id="community"></a>
|
||||
## Community
|
||||
|
||||
+ [`@google-cloud/pino-logging-gcp-config`](https://www.npmjs.com/package/@google-cloud/pino-logging-gcp-config): Config helper and formatter to output [Google Cloud Platform Structured Logging](https://cloud.google.com/logging/docs/structured-logging)
|
||||
+ [`@newrelic/pino-enricher`](https://github.com/newrelic/newrelic-node-log-extensions/blob/main/packages/pino-log-enricher): a log customization to add New Relic context to use [Logs In Context](https://docs.newrelic.com/docs/logs/logs-context/logs-in-context/)
|
||||
+ [`cloud-pine`](https://github.com/metcoder95/cloud-pine): transport that provides abstraction and compatibility with [`@google-cloud/logging`](https://www.npmjs.com/package/@google-cloud/logging).
|
||||
+ [`cls-proxify`](https://github.com/keenondrums/cls-proxify): integration of pino and [CLS](https://github.com/jeff-lewis/cls-hooked). Useful for creating dynamically configured child loggers (e.g. with added trace ID) for each request.
|
||||
+ [`crawlee-pino`](https://github.com/imyelo/crawlee-pino): use Pino to log within Crawlee
|
||||
+ [`eslint-plugin-pino`](https://github.com/orzarchi/eslint-plugin-pino): linting rules for pino usage, primarly for preventing missing context in logs due to incorrect argument order.
|
||||
+ [`pino-colada`](https://github.com/lrlna/pino-colada): cute ndjson formatter for pino.
|
||||
+ [`pino-dev`](https://github.com/dnjstrom/pino-dev): simple prettifier for pino with built-in support for common ecosystem packages.
|
||||
+ [`pino-fluentd`](https://github.com/davidedantonio/pino-fluentd): send Pino logs to Elasticsearch,
|
||||
MongoDB, and many [others](https://www.fluentd.org/dataoutputs) via Fluentd.
|
||||
+ [`pino-lambda`](https://github.com/FormidableLabs/pino-lambda): log transport for cloudwatch support inside aws-lambda
|
||||
+ [`pino-pretty-min`](https://github.com/unjello/pino-pretty-min): a minimal
|
||||
prettifier inspired by the [logrus](https://github.com/sirupsen/logrus) logger.
|
||||
+ [`pino-rotating-file`](https://github.com/homeaway/pino-rotating-file): a hapi-pino log transport for splitting logs into separate, automatically rotating files.
|
||||
+ [`pino-tiny`](https://github.com/holmok/pino-tiny): a tiny (and extensible?) little log formatter for pino.
|
||||
345
backend/node_modules/pino/docs/help.md
generated
vendored
Normal file
345
backend/node_modules/pino/docs/help.md
generated
vendored
Normal file
@@ -0,0 +1,345 @@
|
||||
# Help
|
||||
|
||||
* [Log rotation](#rotate)
|
||||
* [Reopening log files](#reopening)
|
||||
* [Saving to multiple files](#multiple)
|
||||
* [Log filtering](#filter-logs)
|
||||
* [Transports and systemd](#transport-systemd)
|
||||
* [Log to different streams](#multi-stream)
|
||||
* [Duplicate keys](#dupe-keys)
|
||||
* [Log levels as labels instead of numbers](#level-string)
|
||||
* [Pino with `debug`](#debug)
|
||||
* [Unicode and Windows terminal](#windows)
|
||||
* [Mapping Pino Log Levels to Google Cloud Logging (Stackdriver) Severity Levels](#stackdriver)
|
||||
* [Using Grafana Loki to evaluate pino logs in a kubernetes cluster](#grafana-loki)
|
||||
* [Avoid Message Conflict](#avoid-message-conflict)
|
||||
* [Best performance for logging to `stdout`](#best-performance-for-stdout)
|
||||
* [Testing](#testing)
|
||||
|
||||
<a id="rotate"></a>
|
||||
## Log rotation
|
||||
|
||||
Use a separate tool for log rotation:
|
||||
We recommend [logrotate](https://github.com/logrotate/logrotate).
|
||||
Consider we output our logs to `/var/log/myapp.log` like so:
|
||||
|
||||
```
|
||||
$ node server.js > /var/log/myapp.log
|
||||
```
|
||||
|
||||
We would rotate our log files with logrotate, by adding the following to `/etc/logrotate.d/myapp`:
|
||||
|
||||
```
|
||||
/var/log/myapp.log {
|
||||
su root
|
||||
daily
|
||||
rotate 7
|
||||
delaycompress
|
||||
compress
|
||||
notifempty
|
||||
missingok
|
||||
copytruncate
|
||||
}
|
||||
```
|
||||
|
||||
The `copytruncate` configuration has a very slight possibility of lost log lines due
|
||||
to a gap between copying and truncating - the truncate may occur after additional lines
|
||||
have been written. To perform log rotation without `copytruncate`, see the [Reopening log files](#reopening)
|
||||
help.
|
||||
|
||||
<a id="reopening"></a>
|
||||
## Reopening log files
|
||||
|
||||
In cases where a log rotation tool doesn't offer copy-truncate capabilities,
|
||||
or where using them is deemed inappropriate, `pino.destination`
|
||||
can reopen file paths after a file has been moved away.
|
||||
|
||||
One way to use this is to set up a `SIGUSR2` or `SIGHUP` signal handler that
|
||||
reopens the log file destination, making sure to write the process PID out
|
||||
somewhere so the log rotation tool knows where to send the signal.
|
||||
|
||||
```js
|
||||
// write the process pid to a well known location for later
|
||||
const fs = require('node:fs')
|
||||
fs.writeFileSync('/var/run/myapp.pid', process.pid)
|
||||
|
||||
const dest = pino.destination('/log/file')
|
||||
const logger = require('pino')(dest)
|
||||
process.on('SIGHUP', () => dest.reopen())
|
||||
```
|
||||
|
||||
The log rotation tool can then be configured to send this signal to the process
|
||||
after a log rotation event has occurred.
|
||||
|
||||
Given a similar scenario as in the [Log rotation](#rotate) section a basic
|
||||
`logrotate` config that aligns with this strategy would look similar to the following:
|
||||
|
||||
```
|
||||
/var/log/myapp.log {
|
||||
su root
|
||||
daily
|
||||
rotate 7
|
||||
delaycompress
|
||||
compress
|
||||
notifempty
|
||||
missingok
|
||||
postrotate
|
||||
kill -HUP `cat /var/run/myapp.pid`
|
||||
endscript
|
||||
}
|
||||
```
|
||||
|
||||
<a id="multiple"></a>
|
||||
## Saving to multiple files
|
||||
|
||||
See [`pino.multistream`](/docs/api.md#pino-multistream).
|
||||
|
||||
<a id="filter-logs"></a>
|
||||
## Log Filtering
|
||||
The Pino philosophy advocates common, preexisting, system utilities.
|
||||
|
||||
Some recommendations in line with this philosophy are:
|
||||
|
||||
1. Use [`grep`](https://linux.die.net/man/1/grep):
|
||||
```sh
|
||||
$ # View all "INFO" level logs
|
||||
$ node app.js | grep '"level":30'
|
||||
```
|
||||
1. Use [`jq`](https://stedolan.github.io/jq/):
|
||||
```sh
|
||||
$ # View all "ERROR" level logs
|
||||
$ node app.js | jq 'select(.level == 50)'
|
||||
```
|
||||
|
||||
<a id="transport-systemd"></a>
|
||||
## Transports and systemd
|
||||
`systemd` makes it complicated to use pipes in services. One method for overcoming
|
||||
this challenge is to use a subshell:
|
||||
|
||||
```
|
||||
ExecStart=/bin/sh -c '/path/to/node app.js | pino-transport'
|
||||
```
|
||||
|
||||
<a id="multi-stream"></a>
|
||||
## Log to different streams
|
||||
|
||||
Pino's default log destination is the singular destination of `stdout`. While
|
||||
not recommended for performance reasons, multiple destinations can be targeted
|
||||
by using [`pino.multistream`](/docs/api.md#pino-multistream).
|
||||
|
||||
In this example, we use `stderr` for `error` level logs and `stdout` as default
|
||||
for all other levels (e.g. `debug`, `info`, and `warn`).
|
||||
|
||||
```js
|
||||
const pino = require('pino')
|
||||
var streams = [
|
||||
{level: 'debug', stream: process.stdout},
|
||||
{level: 'error', stream: process.stderr},
|
||||
{level: 'fatal', stream: process.stderr}
|
||||
]
|
||||
|
||||
const logger = pino({
|
||||
name: 'my-app',
|
||||
level: 'debug', // must be the lowest level of all streams
|
||||
}, pino.multistream(streams))
|
||||
```
|
||||
|
||||
<a id="dupe-keys"></a>
|
||||
## How Pino handles duplicate keys
|
||||
|
||||
Duplicate keys are possibly when a child logger logs an object with a key that
|
||||
collides with a key in the child loggers bindings.
|
||||
|
||||
See the [child logger duplicate keys caveat](/docs/child-loggers.md#duplicate-keys-caveat)
|
||||
for information on this is handled.
|
||||
|
||||
<a id="level-string"></a>
|
||||
## Log levels as labels instead of numbers
|
||||
Pino log lines are meant to be parsable. Thus, Pino's default mode of operation
|
||||
is to print the level value instead of the string name.
|
||||
However, you can use the [`formatters`](/docs/api.md#formatters-object) option
|
||||
with a [`level`](/docs/api.md#level) function to print the string name instead of the level value :
|
||||
|
||||
```js
|
||||
const pino = require('pino')
|
||||
|
||||
const log = pino({
|
||||
formatters: {
|
||||
level: (label) => {
|
||||
return {
|
||||
level: label
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
log.info('message')
|
||||
|
||||
// {"level":"info","time":1661632832200,"pid":18188,"hostname":"foo","msg":"message"}
|
||||
```
|
||||
|
||||
Although it works, we recommend using one of these options instead if you are able:
|
||||
|
||||
1. If the only change desired is the name then a transport can be used. One such
|
||||
transport is [`pino-text-level-transport`](https://npm.im/pino-text-level-transport).
|
||||
1. Use a prettifier like [`pino-pretty`](https://npm.im/pino-pretty) to make
|
||||
the logs human friendly.
|
||||
|
||||
<a id="debug"></a>
|
||||
## Pino with `debug`
|
||||
|
||||
The popular [`debug`](https://npm.im/debug) is used in many modules across the ecosystem.
|
||||
|
||||
The [`pino-debug`](https://github.com/pinojs/pino-debug) module
|
||||
can capture calls to `debug` loggers and run them
|
||||
through `pino` instead. This results in a 10x (20x in asynchronous mode)
|
||||
performance improvement - even though `pino-debug` is logging additional
|
||||
data and wrapping it in JSON.
|
||||
|
||||
To quickly enable this install [`pino-debug`](https://github.com/pinojs/pino-debug)
|
||||
and preload it with the `-r` flag, enabling any `debug` logs with the
|
||||
`DEBUG` environment variable:
|
||||
|
||||
```sh
|
||||
$ npm i pino-debug
|
||||
$ DEBUG=* node -r pino-debug app.js
|
||||
```
|
||||
|
||||
[`pino-debug`](https://github.com/pinojs/pino-debug) also offers fine-grain control to map specific `debug`
|
||||
namespaces to `pino` log levels. See [`pino-debug`](https://github.com/pinojs/pino-debug)
|
||||
for more.
|
||||
|
||||
<a id="windows"></a>
|
||||
## Unicode and Windows terminal
|
||||
|
||||
Pino uses [sonic-boom](https://github.com/mcollina/sonic-boom) to speed
|
||||
up logging. Internally, it uses [`fs.write`](https://nodejs.org/dist/latest-v10.x/docs/api/fs.html#fs_fs_write_fd_string_position_encoding_callback) to write log lines directly to a file
|
||||
descriptor. On Windows, Unicode output is not handled properly in the
|
||||
terminal (both `cmd.exe` and PowerShell), and as such the output could
|
||||
be visualized incorrectly if the log lines include utf8 characters. It
|
||||
is possible to configure the terminal to visualize those characters
|
||||
correctly with the use of [`chcp`](https://ss64.com/nt/chcp.html) by
|
||||
executing in the terminal `chcp 65001`. This is a known limitation of
|
||||
Node.js.
|
||||
|
||||
<a id="stackdriver"></a>
|
||||
## Mapping Pino Log Levels to Google Cloud Logging (Stackdriver) Severity Levels
|
||||
|
||||
Google Cloud Logging uses `severity` levels instead of log levels. As a result, all logs may show as INFO
|
||||
level logs while completely ignoring the level set in the pino log. Google Cloud Logging also prefers that
|
||||
log data is present inside a `message` key instead of the default `msg` key that Pino uses. Use a technique
|
||||
similar to the one below to retain log levels in Google Cloud Logging
|
||||
|
||||
```js
|
||||
const pino = require('pino')
|
||||
|
||||
// https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#logseverity
|
||||
const PinoLevelToSeverityLookup = {
|
||||
trace: 'DEBUG',
|
||||
debug: 'DEBUG',
|
||||
info: 'INFO',
|
||||
warn: 'WARNING',
|
||||
error: 'ERROR',
|
||||
fatal: 'CRITICAL',
|
||||
};
|
||||
|
||||
const defaultPinoConf = {
|
||||
messageKey: 'message',
|
||||
formatters: {
|
||||
level(label, number) {
|
||||
return {
|
||||
severity: PinoLevelToSeverityLookup[label] || PinoLevelToSeverityLookup['info'],
|
||||
level: number,
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = function createLogger(options) {
|
||||
return pino(Object.assign({}, options, defaultPinoConf))
|
||||
}
|
||||
```
|
||||
|
||||
A library that configures Pino for
|
||||
[Google Cloud Structured Logging](https://cloud.google.com/logging/docs/structured-logging)
|
||||
is available at:
|
||||
[@google-cloud/pino-logging-gcp-config](https://www.npmjs.com/package/@google-cloud/pino-logging-gcp-config)
|
||||
|
||||
This library has the following features:
|
||||
|
||||
+ Converts Pino log levels to Google Cloud Logging log levels, as above
|
||||
+ Uses `message` instead of `msg` for the message key, as above
|
||||
+ Adds a millisecond-granularity timestamp in the
|
||||
[structure](https://cloud.google.com/logging/docs/agent/logging/configuration#timestamp-processing)
|
||||
recognised by Google Cloud Logging eg: \
|
||||
`"timestamp":{"seconds":1445470140,"nanos":123000000}`
|
||||
+ Adds a sequential
|
||||
[`insertId`](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#FIELDS.insert_id)
|
||||
to ensure log messages with identical timestamps are ordered correctly.
|
||||
+ Logs including an `Error` object have the
|
||||
[`stack_trace`](https://cloud.google.com/error-reporting/docs/formatting-error-messages#log-error)
|
||||
property set so that the error is forwarded to Google Cloud Error Reporting.
|
||||
+ Includes a
|
||||
[`ServiceContext`](https://cloud.google.com/error-reporting/reference/rest/v1beta1/ServiceContext)
|
||||
object in the logs for Google Cloud Error Reporting, auto detected from the
|
||||
environment if not specified
|
||||
+ Maps the OpenTelemetry properties `span_id`, `trace_id`, and `trace_flags`
|
||||
to the equivalent Google Cloud Logging fields.
|
||||
|
||||
<a id="grafana-loki"></a>
|
||||
## Using Grafana Loki to evaluate pino logs in a kubernetes cluster
|
||||
|
||||
To get pino logs into Grafana Loki there are two options:
|
||||
|
||||
1. **Push:** Use [pino-loki](https://github.com/Julien-R44/pino-loki) to send logs directly to Loki.
|
||||
1. **Pull:** Configure Grafana Promtail to read and properly parse the logs before sending them to Loki.
|
||||
Similar to Google Cloud logging, this involves remapping the log levels. See this [article](https://medium.com/@janpaepke/structured-logging-in-the-grafana-monitoring-stack-8aff0a5af2f5) for details.
|
||||
|
||||
<a id="avoid-message-conflict"></a>
|
||||
## Avoid Message Conflict
|
||||
|
||||
As described in the [`message` documentation](/docs/api.md#message), when a log
|
||||
is written like `log.info({ msg: 'a message' }, 'another message')` then the
|
||||
final output JSON will have `"msg":"another message"` and the `'a message'`
|
||||
string will be lost. To overcome this, the [`logMethod` hook](/docs/api.md#logmethod)
|
||||
can be used:
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const log = require('pino')({
|
||||
level: 'debug',
|
||||
hooks: {
|
||||
logMethod (inputArgs, method) {
|
||||
if (inputArgs.length === 2 && inputArgs[0].msg) {
|
||||
inputArgs[0].originalMsg = inputArgs[0].msg
|
||||
}
|
||||
return method.apply(this, inputArgs)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
log.info('no original message')
|
||||
log.info({ msg: 'mapped to originalMsg' }, 'a message')
|
||||
|
||||
// {"level":30,"time":1596313323106,"pid":63739,"hostname":"foo","msg":"no original message"}
|
||||
// {"level":30,"time":1596313323107,"pid":63739,"hostname":"foo","msg":"a message","originalMsg":"mapped to originalMsg"}
|
||||
```
|
||||
|
||||
<a id="best-performance-for-stdout"></a>
|
||||
## Best performance for logging to `stdout`
|
||||
|
||||
The best performance for logging directly to stdout is _usually_ achieved by using the
|
||||
default configuration:
|
||||
|
||||
```js
|
||||
const log = require('pino')();
|
||||
```
|
||||
|
||||
You should only have to configure custom transports or other settings
|
||||
if you have broader logging requirements.
|
||||
|
||||
<a id="testing"></a>
|
||||
## Testing
|
||||
|
||||
See [`pino-test`](https://github.com/pinojs/pino-test).
|
||||
64
backend/node_modules/pino/docs/lts.md
generated
vendored
Normal file
64
backend/node_modules/pino/docs/lts.md
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
## Long Term Support
|
||||
|
||||
Pino's Long Term Support (LTS) is provided according to the schedule laid
|
||||
out in this document:
|
||||
|
||||
1. Major releases, "X" release of [semantic versioning][semver] X.Y.Z release
|
||||
versions, are supported for a minimum period of six months from their release
|
||||
date. The release date of any specific version can be found at
|
||||
[https://github.com/pinojs/pino/releases](https://github.com/pinojs/pino/releases).
|
||||
|
||||
1. Major releases will receive security updates for an additional six months
|
||||
from the release of the next major release. After this period
|
||||
we will still review and release security fixes as long as they are
|
||||
provided by the community and they do not violate other constraints,
|
||||
e.g. minimum supported Node.js version.
|
||||
|
||||
1. Major releases will be tested and verified against all Node.js
|
||||
release lines that are supported by the
|
||||
[Node.js LTS policy](https://github.com/nodejs/Release) within the
|
||||
LTS period of that given Pino release line. This implies that only
|
||||
the latest Node.js release of a given line is supported.
|
||||
|
||||
A "month" is defined as 30 consecutive days.
|
||||
|
||||
> ## Security Releases and Semver
|
||||
>
|
||||
> As a consequence of providing long-term support for major releases, there
|
||||
> are occasions where we need to release breaking changes as a _minor_
|
||||
> version release. Such changes will _always_ be noted in the
|
||||
> [release notes](https://github.com/pinojs/pino/releases).
|
||||
>
|
||||
> To avoid automatically receiving breaking security updates it is possible to use
|
||||
> the tilde (`~`) range qualifier. For example, to get patches for the 6.1
|
||||
> release, and avoid automatically updating to the 6.1 release, specify
|
||||
> the dependency as `"pino": "~6.1.x"`. This will leave your application vulnerable,
|
||||
> so please use with caution.
|
||||
|
||||
[semver]: https://semver.org/
|
||||
|
||||
<a name="lts-schedule"></a>
|
||||
|
||||
### Schedule
|
||||
|
||||
| Version | Release Date | End Of LTS Date | Node.js |
|
||||
| :------ | :----------- | :-------------- | :------------------- |
|
||||
| 9.x | 2024-04-26 | TBD | 18, 20, 22 |
|
||||
| 8.x | 2022-06-01 | 2024-10-26 | 14, 16, 18, 20 |
|
||||
| 7.x | 2021-10-14 | 2023-06-01 | 12, 14, 16 |
|
||||
| 6.x | 2020-03-07 | 2022-04-14 | 10, 12, 14, 16 |
|
||||
|
||||
<a name="supported-os"></a>
|
||||
|
||||
### CI tested operating systems
|
||||
|
||||
Pino uses GitHub Actions for CI testing, please refer to
|
||||
[GitHub's documentation regarding workflow runners](https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources)
|
||||
for further details on what the latest virtual environment is in relation to
|
||||
the YAML workflow labels below:
|
||||
|
||||
| OS | YAML Workflow Label | Node.js |
|
||||
|---------|------------------------|--------------|
|
||||
| Linux | `ubuntu-latest` | 18, 20, 22 |
|
||||
| Windows | `windows-latest` | 18, 20, 22 |
|
||||
| MacOS | `macos-latest` | 18, 20, 22 |
|
||||
35
backend/node_modules/pino/docs/pretty.md
generated
vendored
Normal file
35
backend/node_modules/pino/docs/pretty.md
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
# Pretty Printing
|
||||
|
||||
By default, Pino log lines are newline delimited JSON (NDJSON). This is perfect
|
||||
for production usage and long-term storage. It's not so great for development
|
||||
environments. Thus, Pino logs can be prettified by using a Pino prettifier
|
||||
module like [`pino-pretty`][pp]:
|
||||
|
||||
1. Install a prettifier module as a separate dependency, e.g. `npm install pino-pretty`.
|
||||
2. Instantiate the logger with the `transport.target` option set to `'pino-pretty'`:
|
||||
```js
|
||||
const pino = require('pino')
|
||||
const logger = pino({
|
||||
transport: {
|
||||
target: 'pino-pretty'
|
||||
},
|
||||
})
|
||||
|
||||
logger.info('hi')
|
||||
```
|
||||
3. The transport option can also have an options object containing `pino-pretty` options:
|
||||
```js
|
||||
const pino = require('pino')
|
||||
const logger = pino({
|
||||
transport: {
|
||||
target: 'pino-pretty',
|
||||
options: {
|
||||
colorize: true
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
logger.info('hi')
|
||||
```
|
||||
|
||||
[pp]: https://github.com/pinojs/pino-pretty
|
||||
135
backend/node_modules/pino/docs/redaction.md
generated
vendored
Normal file
135
backend/node_modules/pino/docs/redaction.md
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
# Redaction
|
||||
|
||||
> Redaction is not supported in the browser [#670](https://github.com/pinojs/pino/issues/670)
|
||||
|
||||
To redact sensitive information, supply paths to keys that hold sensitive data
|
||||
using the `redact` option. Note that paths that contain hyphens need to use
|
||||
brackets to access the hyphenated property:
|
||||
|
||||
```js
|
||||
const logger = require('.')({
|
||||
redact: ['key', 'path.to.key', 'stuff.thats[*].secret', 'path["with-hyphen"]']
|
||||
})
|
||||
|
||||
logger.info({
|
||||
key: 'will be redacted',
|
||||
path: {
|
||||
to: {key: 'sensitive', another: 'thing'}
|
||||
},
|
||||
stuff: {
|
||||
thats: [
|
||||
{secret: 'will be redacted', logme: 'will be logged'},
|
||||
{secret: 'as will this', logme: 'as will this'}
|
||||
]
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
This will output:
|
||||
|
||||
```JSON
|
||||
{"level":30,"time":1527777350011,"pid":3186,"hostname":"Davids-MacBook-Pro-3.local","key":"[Redacted]","path":{"to":{"key":"[Redacted]","another":"thing"}},"stuff":{"thats":[{"secret":"[Redacted]","logme":"will be logged"},{"secret":"[Redacted]","logme":"as will this"}]}}
|
||||
```
|
||||
|
||||
The `redact` option can take an array (as shown in the above example) or
|
||||
an object. This allows control over *how* information is redacted.
|
||||
|
||||
For instance, setting the censor:
|
||||
|
||||
```js
|
||||
const logger = require('.')({
|
||||
redact: {
|
||||
paths: ['key', 'path.to.key', 'stuff.thats[*].secret'],
|
||||
censor: '**GDPR COMPLIANT**'
|
||||
}
|
||||
})
|
||||
|
||||
logger.info({
|
||||
key: 'will be redacted',
|
||||
path: {
|
||||
to: {key: 'sensitive', another: 'thing'}
|
||||
},
|
||||
stuff: {
|
||||
thats: [
|
||||
{secret: 'will be redacted', logme: 'will be logged'},
|
||||
{secret: 'as will this', logme: 'as will this'}
|
||||
]
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
This will output:
|
||||
|
||||
```JSON
|
||||
{"level":30,"time":1527778563934,"pid":3847,"hostname":"Davids-MacBook-Pro-3.local","key":"**GDPR COMPLIANT**","path":{"to":{"key":"**GDPR COMPLIANT**","another":"thing"}},"stuff":{"thats":[{"secret":"**GDPR COMPLIANT**","logme":"will be logged"},{"secret":"**GDPR COMPLIANT**","logme":"as will this"}]}}
|
||||
```
|
||||
|
||||
The `redact.remove` option also allows for the key and value to be removed from output:
|
||||
|
||||
```js
|
||||
const logger = require('.')({
|
||||
redact: {
|
||||
paths: ['key', 'path.to.key', 'stuff.thats[*].secret'],
|
||||
remove: true
|
||||
}
|
||||
})
|
||||
|
||||
logger.info({
|
||||
key: 'will be redacted',
|
||||
path: {
|
||||
to: {key: 'sensitive', another: 'thing'}
|
||||
},
|
||||
stuff: {
|
||||
thats: [
|
||||
{secret: 'will be redacted', logme: 'will be logged'},
|
||||
{secret: 'as will this', logme: 'as will this'}
|
||||
]
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
This will output
|
||||
|
||||
```JSON
|
||||
{"level":30,"time":1527782356751,"pid":5758,"hostname":"Davids-MacBook-Pro-3.local","path":{"to":{"another":"thing"}},"stuff":{"thats":[{"logme":"will be logged"},{"logme":"as will this"}]}}
|
||||
```
|
||||
|
||||
See [pino options in API](/docs/api.md#redact-array-object) for `redact` API details.
|
||||
|
||||
<a name="paths"></a>
|
||||
## Path Syntax
|
||||
|
||||
The syntax for paths supplied to the `redact` option conform to the syntax in path lookups
|
||||
in standard ECMAScript, with two additions:
|
||||
|
||||
* paths may start with bracket notation
|
||||
* paths may contain the asterisk `*` to denote a wildcard
|
||||
* paths are **case sensitive**
|
||||
|
||||
By way of example, the following are all valid paths:
|
||||
|
||||
* `a.b.c`
|
||||
* `a["b-c"].d`
|
||||
* `["a-b"].c`
|
||||
* `a.b.*`
|
||||
* `a[*].b`
|
||||
|
||||
## Overhead
|
||||
|
||||
Pino's redaction functionality is built on top of [`fast-redact`](https://github.com/davidmarkclements/fast-redact)
|
||||
which adds about 2% overhead to `JSON.stringify` when using paths without wildcards.
|
||||
|
||||
When used with pino logger with a single redacted path, any overhead is within noise -
|
||||
a way to deterministically measure its effect has not been found. This is because it is not a bottleneck.
|
||||
|
||||
However, wildcard redaction does carry a non-trivial cost relative to explicitly declaring the keys
|
||||
(50% in a case where four keys are redacted across two objects). See
|
||||
the [`fast-redact` benchmarks](https://github.com/davidmarkclements/fast-redact#benchmarks) for details.
|
||||
|
||||
## Safety
|
||||
|
||||
The `redact` option is intended as an initialization time configuration option.
|
||||
Path strings must not originate from user input.
|
||||
The `fast-redact` module uses a VM context to syntax check the paths, user input
|
||||
should never be combined with such an approach. See the [`fast-redact` Caveat](https://github.com/davidmarkclements/fast-redact#caveat)
|
||||
and the [`fast-redact` Approach](https://github.com/davidmarkclements/fast-redact#approach) for in-depth information.
|
||||
1263
backend/node_modules/pino/docs/transports.md
generated
vendored
Normal file
1263
backend/node_modules/pino/docs/transports.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
309
backend/node_modules/pino/docs/web.md
generated
vendored
Normal file
309
backend/node_modules/pino/docs/web.md
generated
vendored
Normal file
@@ -0,0 +1,309 @@
|
||||
# Web Frameworks
|
||||
|
||||
Since HTTP logging is a primary use case, Pino has first-class support for the Node.js
|
||||
web framework ecosystem.
|
||||
|
||||
- [Web Frameworks](#web-frameworks)
|
||||
- [Pino with Fastify](#pino-with-fastify)
|
||||
- [Pino with Express](#pino-with-express)
|
||||
- [Pino with Hapi](#pino-with-hapi)
|
||||
- [Pino with Restify](#pino-with-restify)
|
||||
- [Pino with Koa](#pino-with-koa)
|
||||
- [Pino with Node core `http`](#pino-with-node-core-http)
|
||||
- [Pino with Nest](#pino-with-nest)
|
||||
- [Pino with H3](#pino-with-h3)
|
||||
- [Pino with Hono](#pino-with-hono)
|
||||
|
||||
<a id="fastify"></a>
|
||||
## Pino with Fastify
|
||||
|
||||
The Fastify web framework comes bundled with Pino by default, simply set Fastify's
|
||||
`logger` option to `true` and use `request.log` or `reply.log` for log messages that correspond
|
||||
to each request:
|
||||
|
||||
```js
|
||||
const fastify = require('fastify')({
|
||||
logger: true
|
||||
})
|
||||
|
||||
fastify.get('/', async (request, reply) => {
|
||||
request.log.info('something')
|
||||
return { hello: 'world' }
|
||||
})
|
||||
|
||||
fastify.listen({ port: 3000 }, (err) => {
|
||||
if (err) {
|
||||
fastify.log.error(err)
|
||||
process.exit(1)
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
The `logger` option can also be set to an object, which will be passed through directly
|
||||
as the [`pino` options object](/docs/api.md#options-object).
|
||||
|
||||
See the [fastify documentation](https://www.fastify.io/docs/latest/Reference/Logging/) for more information.
|
||||
|
||||
<a id="express"></a>
|
||||
## Pino with Express
|
||||
|
||||
```sh
|
||||
npm install pino-http
|
||||
```
|
||||
|
||||
```js
|
||||
const app = require('express')()
|
||||
const pino = require('pino-http')()
|
||||
|
||||
app.use(pino)
|
||||
|
||||
app.get('/', function (req, res) {
|
||||
req.log.info('something')
|
||||
res.send('hello world')
|
||||
})
|
||||
|
||||
app.listen(3000)
|
||||
```
|
||||
|
||||
See the [pino-http README](https://npm.im/pino-http) for more info.
|
||||
|
||||
<a id="hapi"></a>
|
||||
## Pino with Hapi
|
||||
|
||||
```sh
|
||||
npm install hapi-pino
|
||||
```
|
||||
|
||||
```js
|
||||
'use strict'
|
||||
|
||||
const Hapi = require('@hapi/hapi')
|
||||
const Pino = require('hapi-pino');
|
||||
|
||||
async function start () {
|
||||
// Create a server with a host and port
|
||||
const server = Hapi.server({
|
||||
host: 'localhost',
|
||||
port: 3000
|
||||
})
|
||||
|
||||
// Add the route
|
||||
server.route({
|
||||
method: 'GET',
|
||||
path: '/',
|
||||
handler: async function (request, h) {
|
||||
// request.log is HAPI's standard way of logging
|
||||
request.log(['a', 'b'], 'Request into hello world')
|
||||
|
||||
// a pino instance can also be used, which will be faster
|
||||
request.logger.info('In handler %s', request.path)
|
||||
|
||||
return 'hello world'
|
||||
}
|
||||
})
|
||||
|
||||
await server.register(Pino)
|
||||
|
||||
// also as a decorated API
|
||||
server.logger.info('another way for accessing it')
|
||||
|
||||
// and through Hapi standard logging system
|
||||
server.log(['subsystem'], 'third way for accessing it')
|
||||
|
||||
await server.start()
|
||||
|
||||
return server
|
||||
}
|
||||
|
||||
start().catch((err) => {
|
||||
console.log(err)
|
||||
process.exit(1)
|
||||
})
|
||||
```
|
||||
|
||||
See the [hapi-pino README](https://npm.im/hapi-pino) for more info.
|
||||
|
||||
<a id="restify"></a>
|
||||
## Pino with Restify
|
||||
|
||||
```sh
|
||||
npm install restify-pino-logger
|
||||
```
|
||||
|
||||
```js
|
||||
const server = require('restify').createServer({name: 'server'})
|
||||
const pino = require('restify-pino-logger')()
|
||||
|
||||
server.use(pino)
|
||||
|
||||
server.get('/', function (req, res) {
|
||||
req.log.info('something')
|
||||
res.send('hello world')
|
||||
})
|
||||
|
||||
server.listen(3000)
|
||||
```
|
||||
|
||||
See the [restify-pino-logger README](https://npm.im/restify-pino-logger) for more info.
|
||||
|
||||
<a id="koa"></a>
|
||||
## Pino with Koa
|
||||
|
||||
```sh
|
||||
npm install koa-pino-logger
|
||||
```
|
||||
|
||||
```js
|
||||
const Koa = require('koa')
|
||||
const app = new Koa()
|
||||
const pino = require('koa-pino-logger')()
|
||||
|
||||
app.use(pino)
|
||||
|
||||
app.use((ctx) => {
|
||||
ctx.log.info('something else')
|
||||
ctx.body = 'hello world'
|
||||
})
|
||||
|
||||
app.listen(3000)
|
||||
```
|
||||
|
||||
See the [koa-pino-logger README](https://github.com/pinojs/koa-pino-logger) for more info.
|
||||
|
||||
<a id="http"></a>
|
||||
## Pino with Node core `http`
|
||||
|
||||
```sh
|
||||
npm install pino-http
|
||||
```
|
||||
|
||||
```js
|
||||
const http = require('http')
|
||||
const server = http.createServer(handle)
|
||||
const logger = require('pino-http')()
|
||||
|
||||
function handle (req, res) {
|
||||
logger(req, res)
|
||||
req.log.info('something else')
|
||||
res.end('hello world')
|
||||
}
|
||||
|
||||
server.listen(3000)
|
||||
```
|
||||
|
||||
See the [pino-http README](https://npm.im/pino-http) for more info.
|
||||
|
||||
|
||||
<a id="nest"></a>
|
||||
## Pino with Nest
|
||||
|
||||
```sh
|
||||
npm install nestjs-pino
|
||||
```
|
||||
|
||||
```ts
|
||||
import { NestFactory } from '@nestjs/core'
|
||||
import { Controller, Get, Module } from '@nestjs/common'
|
||||
import { LoggerModule, Logger } from 'nestjs-pino'
|
||||
|
||||
@Controller()
|
||||
export class AppController {
|
||||
constructor(private readonly logger: Logger) {}
|
||||
|
||||
@Get()
|
||||
getHello() {
|
||||
this.logger.log('something')
|
||||
return `Hello world`
|
||||
}
|
||||
}
|
||||
|
||||
@Module({
|
||||
controllers: [AppController],
|
||||
imports: [LoggerModule.forRoot()]
|
||||
})
|
||||
class MyModule {}
|
||||
|
||||
async function bootstrap() {
|
||||
const app = await NestFactory.create(MyModule)
|
||||
await app.listen(3000)
|
||||
}
|
||||
bootstrap()
|
||||
```
|
||||
|
||||
See the [nestjs-pino README](https://npm.im/nestjs-pino) for more info.
|
||||
|
||||
|
||||
<a id="h3"></a>
|
||||
## Pino with H3
|
||||
|
||||
```sh
|
||||
npm install pino-http h3
|
||||
```
|
||||
|
||||
Save as `server.mjs`:
|
||||
|
||||
```js
|
||||
import { createApp, createRouter, eventHandler, fromNodeMiddleware } from "h3";
|
||||
import pino from 'pino-http'
|
||||
|
||||
export const app = createApp();
|
||||
|
||||
const router = createRouter();
|
||||
app.use(router);
|
||||
app.use(fromNodeMiddleware(pino()))
|
||||
|
||||
app.use(eventHandler((event) => {
|
||||
event.node.req.log.info('something')
|
||||
return 'hello world'
|
||||
}))
|
||||
|
||||
router.get(
|
||||
"/",
|
||||
eventHandler((event) => {
|
||||
return { path: event.path, message: "Hello World!" };
|
||||
}),
|
||||
);
|
||||
```
|
||||
|
||||
Execute `npx --yes listhen -w --open ./server.mjs`.
|
||||
|
||||
See the [pino-http README](https://npm.im/pino-http) for more info.
|
||||
|
||||
|
||||
<a id="hono"></a>
|
||||
## Pino with Hono
|
||||
|
||||
```sh
|
||||
npm install pino pino-http hono
|
||||
```
|
||||
|
||||
```js
|
||||
import { serve } from '@hono/node-server';
|
||||
import { Hono } from 'hono';
|
||||
import { requestId } from 'hono/request-id';
|
||||
import { pinoHttp } from 'pino-http';
|
||||
|
||||
const app = new Hono();
|
||||
app.use(requestId());
|
||||
app.use(async (c, next) => {
|
||||
// pass hono's request-id to pino-http
|
||||
c.env.incoming.id = c.var.requestId;
|
||||
|
||||
// map express style middleware to hono
|
||||
await new Promise((resolve) => pinoHttp()(c.env.incoming, c.env.outgoing, () => resolve()));
|
||||
|
||||
c.set('logger', c.env.incoming.log);
|
||||
|
||||
await next();
|
||||
});
|
||||
|
||||
app.get('/', (c) => {
|
||||
c.var.logger.info('something');
|
||||
|
||||
return c.text('Hello Node.js!');
|
||||
});
|
||||
|
||||
serve(app);
|
||||
```
|
||||
|
||||
See the [pino-http README](https://npm.im/pino-http) for more info.
|
||||
26
backend/node_modules/pino/docsify/sidebar.md
generated
vendored
Normal file
26
backend/node_modules/pino/docsify/sidebar.md
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
* [Readme](/)
|
||||
* [API](/docs/api.md)
|
||||
* [Browser API](/docs/browser.md)
|
||||
* [Redaction](/docs/redaction.md)
|
||||
* [Child Loggers](/docs/child-loggers.md)
|
||||
* [Transports](/docs/transports.md)
|
||||
* [Web Frameworks](/docs/web.md)
|
||||
* [Pretty Printing](/docs/pretty.md)
|
||||
* [Asynchronous Logging](/docs/asynchronous.md)
|
||||
* [Ecosystem](/docs/ecosystem.md)
|
||||
* [Benchmarks](/docs/benchmarks.md)
|
||||
* [Long Term Support](/docs/lts.md)
|
||||
* [Help](/docs/help.md)
|
||||
* [Log rotation](/docs/help.md#rotate)
|
||||
* [Reopening log files](/docs/help.md#reopening)
|
||||
* [Saving to multiple files](/docs/help.md#multiple)
|
||||
* [Log filtering](/docs/help.md#filter-logs)
|
||||
* [Transports and systemd](/docs/help.md#transport-systemd)
|
||||
* [Duplicate keys](/docs/help.md#dupe-keys)
|
||||
* [Log levels as labels instead of numbers](/docs/help.md#level-string)
|
||||
* [Pino with `debug`](/docs/help.md#debug)
|
||||
* [Unicode and Windows terminal](/docs/help.md#windows)
|
||||
* [Mapping Pino Log Levels to Google Cloud Logging (Stackdriver) Severity Levels](/docs/help.md#stackdriver)
|
||||
* [Avoid Message Conflict](/docs/help.md#avoid-message-conflict)
|
||||
* [Best performance for logging to `stdout`](/docs/help.md#best-performance-for-stdout)
|
||||
* [Testing](/docs/help.md#testing)
|
||||
43
backend/node_modules/pino/examples/basic.js
generated
vendored
Normal file
43
backend/node_modules/pino/examples/basic.js
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
'use strict'
|
||||
|
||||
// Pino's primary usage writes ndjson to `stdout`:
|
||||
const pino = require('..')()
|
||||
|
||||
// However, if "human readable" output is desired,
|
||||
// `pino-pretty` can be provided as the destination
|
||||
// stream by uncommenting the following line in place
|
||||
// of the previous declaration:
|
||||
// const pino = require('..')(require('pino-pretty')())
|
||||
|
||||
pino.info('hello world')
|
||||
pino.error('this is at error level')
|
||||
pino.info('the answer is %d', 42)
|
||||
pino.info({ obj: 42 }, 'hello world')
|
||||
pino.info({ obj: 42, b: 2 }, 'hello world')
|
||||
pino.info({ nested: { obj: 42 } }, 'nested')
|
||||
setImmediate(() => {
|
||||
pino.info('after setImmediate')
|
||||
})
|
||||
pino.error(new Error('an error'))
|
||||
|
||||
const child = pino.child({ a: 'property' })
|
||||
child.info('hello child!')
|
||||
|
||||
const childsChild = child.child({ another: 'property' })
|
||||
childsChild.info('hello baby..')
|
||||
|
||||
pino.debug('this should be mute')
|
||||
|
||||
pino.level = 'trace'
|
||||
|
||||
pino.debug('this is a debug statement')
|
||||
|
||||
pino.child({ another: 'property' }).debug('this is a debug statement via child')
|
||||
pino.trace('this is a trace statement')
|
||||
|
||||
pino.debug('this is a "debug" statement with "')
|
||||
|
||||
pino.info(new Error('kaboom'))
|
||||
pino.info(null)
|
||||
|
||||
pino.info(new Error('kaboom'), 'with', 'a', 'message')
|
||||
68
backend/node_modules/pino/examples/transport.js
generated
vendored
Normal file
68
backend/node_modules/pino/examples/transport.js
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
'use strict'
|
||||
|
||||
const pino = require('..')
|
||||
const { tmpdir } = require('node:os')
|
||||
const { join } = require('node:path')
|
||||
|
||||
const file = join(tmpdir(), `pino-${process.pid}-example`)
|
||||
|
||||
const transport = pino.transport({
|
||||
targets: [{
|
||||
level: 'warn',
|
||||
target: 'pino/file',
|
||||
options: {
|
||||
destination: file
|
||||
}
|
||||
/*
|
||||
}, {
|
||||
level: 'info',
|
||||
target: 'pino-elasticsearch',
|
||||
options: {
|
||||
node: 'http://localhost:9200'
|
||||
}
|
||||
*/
|
||||
}, {
|
||||
level: 'info',
|
||||
target: 'pino-pretty'
|
||||
}]
|
||||
})
|
||||
|
||||
const logger = pino(transport)
|
||||
|
||||
logger.info({
|
||||
file
|
||||
}, 'logging destination')
|
||||
|
||||
logger.info('hello world')
|
||||
logger.error('this is at error level')
|
||||
logger.info('the answer is %d', 42)
|
||||
logger.info({ obj: 42 }, 'hello world')
|
||||
logger.info({ obj: 42, b: 2 }, 'hello world')
|
||||
logger.info({ nested: { obj: 42 } }, 'nested')
|
||||
logger.warn('WARNING!')
|
||||
setImmediate(() => {
|
||||
logger.info('after setImmediate')
|
||||
})
|
||||
logger.error(new Error('an error'))
|
||||
|
||||
const child = logger.child({ a: 'property' })
|
||||
child.info('hello child!')
|
||||
|
||||
const childsChild = child.child({ another: 'property' })
|
||||
childsChild.info('hello baby..')
|
||||
|
||||
logger.debug('this should be mute')
|
||||
|
||||
logger.level = 'trace'
|
||||
|
||||
logger.debug('this is a debug statement')
|
||||
|
||||
logger.child({ another: 'property' }).debug('this is a debug statement via child')
|
||||
logger.trace('this is a trace statement')
|
||||
|
||||
logger.debug('this is a "debug" statement with "')
|
||||
|
||||
logger.info(new Error('kaboom'))
|
||||
logger.info(null)
|
||||
|
||||
logger.info(new Error('kaboom'), 'with', 'a', 'message')
|
||||
BIN
backend/node_modules/pino/favicon-16x16.png
generated
vendored
Normal file
BIN
backend/node_modules/pino/favicon-16x16.png
generated
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 970 B |
BIN
backend/node_modules/pino/favicon-32x32.png
generated
vendored
Normal file
BIN
backend/node_modules/pino/favicon-32x32.png
generated
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.5 KiB |
BIN
backend/node_modules/pino/favicon.ico
generated
vendored
Normal file
BIN
backend/node_modules/pino/favicon.ico
generated
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
12
backend/node_modules/pino/file.js
generated
vendored
Normal file
12
backend/node_modules/pino/file.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
const pino = require('./pino')
|
||||
const { once } = require('node:events')
|
||||
|
||||
module.exports = async function (opts = {}) {
|
||||
const destOpts = Object.assign({}, opts, { dest: opts.destination || 1, sync: false })
|
||||
delete destOpts.destination
|
||||
const destination = pino.destination(destOpts)
|
||||
await once(destination, 'ready')
|
||||
return destination
|
||||
}
|
||||
42
backend/node_modules/pino/inc-version.sh
generated
vendored
Executable file
42
backend/node_modules/pino/inc-version.sh
generated
vendored
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
PATH=./node_modules/.bin:${PATH}
|
||||
CURRENT_VERSION=$(jq -r .version package.json)
|
||||
|
||||
case ${1} in
|
||||
Major | MAJOR | major)
|
||||
LEVEL=major
|
||||
;;
|
||||
|
||||
Minor | MINOR | minor)
|
||||
LEVEL=minor
|
||||
;;
|
||||
|
||||
Patch | PATCH | patch)
|
||||
LEVEL=patch
|
||||
;;
|
||||
|
||||
*)
|
||||
LEVEL=patch
|
||||
;;
|
||||
esac
|
||||
|
||||
NEW_VERSION=$(semver -i ${LEVEL} ${CURRENT_VERSION})
|
||||
echo "${CURRENT_VERSION} => ${NEW_VERSION}"
|
||||
read -n 1 -s -r -p "Press any key to continue (ctrl+c to abort)..."
|
||||
echo ""
|
||||
|
||||
echo "Patching package.json..."
|
||||
cat package.json | \
|
||||
jq --arg vers "${NEW_VERSION}" '.version = $vers' | \
|
||||
tee package.json 1>/dev/null
|
||||
|
||||
echo "Patching lib/meta.js ..."
|
||||
SED_SCRIPT=$(printf 's/%s/%s/' ${CURRENT_VERSION//\./\\.} ${NEW_VERSION//\./\\.})
|
||||
cat ./lib/meta.js | \
|
||||
sed -e ${SED_SCRIPT} | \
|
||||
tee ./lib/meta.js 1>/dev/null
|
||||
|
||||
echo "Done."
|
||||
55
backend/node_modules/pino/index.html
generated
vendored
Normal file
55
backend/node_modules/pino/index.html
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Pino - Super fast, all natural JSON logger for Node.js</title>
|
||||
<meta name="description" content="Super fast, all natural JSON logger for Node.js">
|
||||
<meta name="viewport" content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
|
||||
<link rel="stylesheet" href="//unpkg.com/docsify-themeable/dist/css/theme-simple.css">
|
||||
<style>
|
||||
:root {
|
||||
--base-font-size: 16px;
|
||||
--theme-color: rgb(104, 118, 52);
|
||||
--link-color: rgb(104, 118, 52);
|
||||
--link-color--hover: rgb(137, 152, 100);
|
||||
--sidebar-name-margin: 0;
|
||||
--sidebar-name-padding: 0;
|
||||
--code-font-size: .9em;
|
||||
}
|
||||
.sidebar > h1 {
|
||||
margin-bottom: -.75em;
|
||||
margin-top: .75em;
|
||||
}
|
||||
.sidebar > h1 img {
|
||||
height: 4em;
|
||||
}
|
||||
.markdown-section a code {
|
||||
color: var(--link-color)!important;
|
||||
}
|
||||
.markdown-section code:not([class*="lang-"]):not([class*="language-"]) {
|
||||
white-space: unset
|
||||
}
|
||||
</style>
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="favicon-16x16.png">
|
||||
</head>
|
||||
<body>
|
||||
<div id="app"></div>
|
||||
</body>
|
||||
<script>
|
||||
window.$docsify = {
|
||||
name: 'pino',
|
||||
logo: './pino-tree.png',
|
||||
loadSidebar: 'docsify/sidebar.md',
|
||||
repo: 'https://github.com/pinojs/pino',
|
||||
auto2top: true,
|
||||
ga: 'UA-103155139-1'
|
||||
}
|
||||
</script>
|
||||
<script src="//unpkg.com/docsify/lib/docsify.min.js"></script>
|
||||
<script src="//unpkg.com/docsify/lib/plugins/search.min.js"></script>
|
||||
<script src="//unpkg.com/docsify/lib/plugins/ga.min.js"></script>
|
||||
<!-- To enable syntax highlighting on TypeScript codes: -->
|
||||
<script src="//cdn.jsdelivr.net/npm/prismjs@1/components/prism-typescript.min.js"></script>
|
||||
|
||||
</html>
|
||||
30
backend/node_modules/pino/lib/caller.js
generated
vendored
Normal file
30
backend/node_modules/pino/lib/caller.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict'
|
||||
|
||||
function noOpPrepareStackTrace (_, stack) {
|
||||
return stack
|
||||
}
|
||||
|
||||
module.exports = function getCallers () {
|
||||
const originalPrepare = Error.prepareStackTrace
|
||||
Error.prepareStackTrace = noOpPrepareStackTrace
|
||||
const stack = new Error().stack
|
||||
Error.prepareStackTrace = originalPrepare
|
||||
|
||||
if (!Array.isArray(stack)) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
const entries = stack.slice(2)
|
||||
|
||||
const fileNames = []
|
||||
|
||||
for (const entry of entries) {
|
||||
if (!entry) {
|
||||
continue
|
||||
}
|
||||
|
||||
fileNames.push(entry.getFileName())
|
||||
}
|
||||
|
||||
return fileNames
|
||||
}
|
||||
28
backend/node_modules/pino/lib/constants.js
generated
vendored
Normal file
28
backend/node_modules/pino/lib/constants.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
/**
|
||||
* Represents default log level values
|
||||
*
|
||||
* @enum {number}
|
||||
*/
|
||||
const DEFAULT_LEVELS = {
|
||||
trace: 10,
|
||||
debug: 20,
|
||||
info: 30,
|
||||
warn: 40,
|
||||
error: 50,
|
||||
fatal: 60
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents sort order direction: `ascending` or `descending`
|
||||
*
|
||||
* @enum {string}
|
||||
*/
|
||||
const SORTING_ORDER = {
|
||||
ASC: 'ASC',
|
||||
DESC: 'DESC'
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
DEFAULT_LEVELS,
|
||||
SORTING_ORDER
|
||||
}
|
||||
8
backend/node_modules/pino/lib/deprecations.js
generated
vendored
Normal file
8
backend/node_modules/pino/lib/deprecations.js
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
'use strict'
|
||||
|
||||
const warning = require('process-warning')()
|
||||
module.exports = warning
|
||||
|
||||
// const warnName = 'PinoWarning'
|
||||
|
||||
// warning.create(warnName, 'PINODEP010', 'A new deprecation')
|
||||
241
backend/node_modules/pino/lib/levels.js
generated
vendored
Normal file
241
backend/node_modules/pino/lib/levels.js
generated
vendored
Normal file
@@ -0,0 +1,241 @@
|
||||
'use strict'
|
||||
/* eslint no-prototype-builtins: 0 */
|
||||
const {
|
||||
lsCacheSym,
|
||||
levelValSym,
|
||||
useOnlyCustomLevelsSym,
|
||||
streamSym,
|
||||
formattersSym,
|
||||
hooksSym,
|
||||
levelCompSym
|
||||
} = require('./symbols')
|
||||
const { noop, genLog } = require('./tools')
|
||||
const { DEFAULT_LEVELS, SORTING_ORDER } = require('./constants')
|
||||
|
||||
const levelMethods = {
|
||||
fatal: (hook) => {
|
||||
const logFatal = genLog(DEFAULT_LEVELS.fatal, hook)
|
||||
return function (...args) {
|
||||
const stream = this[streamSym]
|
||||
logFatal.call(this, ...args)
|
||||
if (typeof stream.flushSync === 'function') {
|
||||
try {
|
||||
stream.flushSync()
|
||||
} catch (e) {
|
||||
// https://github.com/pinojs/pino/pull/740#discussion_r346788313
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
error: (hook) => genLog(DEFAULT_LEVELS.error, hook),
|
||||
warn: (hook) => genLog(DEFAULT_LEVELS.warn, hook),
|
||||
info: (hook) => genLog(DEFAULT_LEVELS.info, hook),
|
||||
debug: (hook) => genLog(DEFAULT_LEVELS.debug, hook),
|
||||
trace: (hook) => genLog(DEFAULT_LEVELS.trace, hook)
|
||||
}
|
||||
|
||||
const nums = Object.keys(DEFAULT_LEVELS).reduce((o, k) => {
|
||||
o[DEFAULT_LEVELS[k]] = k
|
||||
return o
|
||||
}, {})
|
||||
|
||||
const initialLsCache = Object.keys(nums).reduce((o, k) => {
|
||||
o[k] = '{"level":' + Number(k)
|
||||
return o
|
||||
}, {})
|
||||
|
||||
function genLsCache (instance) {
|
||||
const formatter = instance[formattersSym].level
|
||||
const { labels } = instance.levels
|
||||
const cache = {}
|
||||
for (const label in labels) {
|
||||
const level = formatter(labels[label], Number(label))
|
||||
cache[label] = JSON.stringify(level).slice(0, -1)
|
||||
}
|
||||
instance[lsCacheSym] = cache
|
||||
return instance
|
||||
}
|
||||
|
||||
function isStandardLevel (level, useOnlyCustomLevels) {
|
||||
if (useOnlyCustomLevels) {
|
||||
return false
|
||||
}
|
||||
|
||||
switch (level) {
|
||||
case 'fatal':
|
||||
case 'error':
|
||||
case 'warn':
|
||||
case 'info':
|
||||
case 'debug':
|
||||
case 'trace':
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function setLevel (level) {
|
||||
const { labels, values } = this.levels
|
||||
if (typeof level === 'number') {
|
||||
if (labels[level] === undefined) throw Error('unknown level value' + level)
|
||||
level = labels[level]
|
||||
}
|
||||
if (values[level] === undefined) throw Error('unknown level ' + level)
|
||||
const preLevelVal = this[levelValSym]
|
||||
const levelVal = this[levelValSym] = values[level]
|
||||
const useOnlyCustomLevelsVal = this[useOnlyCustomLevelsSym]
|
||||
const levelComparison = this[levelCompSym]
|
||||
const hook = this[hooksSym].logMethod
|
||||
|
||||
for (const key in values) {
|
||||
if (levelComparison(values[key], levelVal) === false) {
|
||||
this[key] = noop
|
||||
continue
|
||||
}
|
||||
this[key] = isStandardLevel(key, useOnlyCustomLevelsVal) ? levelMethods[key](hook) : genLog(values[key], hook)
|
||||
}
|
||||
|
||||
this.emit(
|
||||
'level-change',
|
||||
level,
|
||||
levelVal,
|
||||
labels[preLevelVal],
|
||||
preLevelVal,
|
||||
this
|
||||
)
|
||||
}
|
||||
|
||||
function getLevel (level) {
|
||||
const { levels, levelVal } = this
|
||||
// protection against potential loss of Pino scope from serializers (edge case with circular refs - https://github.com/pinojs/pino/issues/833)
|
||||
return (levels && levels.labels) ? levels.labels[levelVal] : ''
|
||||
}
|
||||
|
||||
function isLevelEnabled (logLevel) {
|
||||
const { values } = this.levels
|
||||
const logLevelVal = values[logLevel]
|
||||
return logLevelVal !== undefined && this[levelCompSym](logLevelVal, this[levelValSym])
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if the given `current` level is enabled by comparing it
|
||||
* against the current threshold (`expected`).
|
||||
*
|
||||
* @param {SORTING_ORDER} direction comparison direction "ASC" or "DESC"
|
||||
* @param {number} current current log level number representation
|
||||
* @param {number} expected threshold value to compare with
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function compareLevel (direction, current, expected) {
|
||||
if (direction === SORTING_ORDER.DESC) {
|
||||
return current <= expected
|
||||
}
|
||||
|
||||
return current >= expected
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a level comparison function based on `levelComparison`
|
||||
* it could a default function which compares levels either in "ascending" or "descending" order or custom comparison function
|
||||
*
|
||||
* @param {SORTING_ORDER | Function} levelComparison sort levels order direction or custom comparison function
|
||||
* @returns Function
|
||||
*/
|
||||
function genLevelComparison (levelComparison) {
|
||||
if (typeof levelComparison === 'string') {
|
||||
return compareLevel.bind(null, levelComparison)
|
||||
}
|
||||
|
||||
return levelComparison
|
||||
}
|
||||
|
||||
function mappings (customLevels = null, useOnlyCustomLevels = false) {
|
||||
const customNums = customLevels
|
||||
/* eslint-disable */
|
||||
? Object.keys(customLevels).reduce((o, k) => {
|
||||
o[customLevels[k]] = k
|
||||
return o
|
||||
}, {})
|
||||
: null
|
||||
/* eslint-enable */
|
||||
|
||||
const labels = Object.assign(
|
||||
Object.create(Object.prototype, { Infinity: { value: 'silent' } }),
|
||||
useOnlyCustomLevels ? null : nums,
|
||||
customNums
|
||||
)
|
||||
const values = Object.assign(
|
||||
Object.create(Object.prototype, { silent: { value: Infinity } }),
|
||||
useOnlyCustomLevels ? null : DEFAULT_LEVELS,
|
||||
customLevels
|
||||
)
|
||||
return { labels, values }
|
||||
}
|
||||
|
||||
function assertDefaultLevelFound (defaultLevel, customLevels, useOnlyCustomLevels) {
|
||||
if (typeof defaultLevel === 'number') {
|
||||
const values = [].concat(
|
||||
Object.keys(customLevels || {}).map(key => customLevels[key]),
|
||||
useOnlyCustomLevels ? [] : Object.keys(nums).map(level => +level),
|
||||
Infinity
|
||||
)
|
||||
if (!values.includes(defaultLevel)) {
|
||||
throw Error(`default level:${defaultLevel} must be included in custom levels`)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
const labels = Object.assign(
|
||||
Object.create(Object.prototype, { silent: { value: Infinity } }),
|
||||
useOnlyCustomLevels ? null : DEFAULT_LEVELS,
|
||||
customLevels
|
||||
)
|
||||
if (!(defaultLevel in labels)) {
|
||||
throw Error(`default level:${defaultLevel} must be included in custom levels`)
|
||||
}
|
||||
}
|
||||
|
||||
function assertNoLevelCollisions (levels, customLevels) {
|
||||
const { labels, values } = levels
|
||||
for (const k in customLevels) {
|
||||
if (k in values) {
|
||||
throw Error('levels cannot be overridden')
|
||||
}
|
||||
if (customLevels[k] in labels) {
|
||||
throw Error('pre-existing level values cannot be used for new levels')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates whether `levelComparison` is correct
|
||||
*
|
||||
* @throws Error
|
||||
* @param {SORTING_ORDER | Function} levelComparison - value to validate
|
||||
* @returns
|
||||
*/
|
||||
function assertLevelComparison (levelComparison) {
|
||||
if (typeof levelComparison === 'function') {
|
||||
return
|
||||
}
|
||||
|
||||
if (typeof levelComparison === 'string' && Object.values(SORTING_ORDER).includes(levelComparison)) {
|
||||
return
|
||||
}
|
||||
|
||||
throw new Error('Levels comparison should be one of "ASC", "DESC" or "function" type')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
initialLsCache,
|
||||
genLsCache,
|
||||
levelMethods,
|
||||
getLevel,
|
||||
setLevel,
|
||||
isLevelEnabled,
|
||||
mappings,
|
||||
assertNoLevelCollisions,
|
||||
assertDefaultLevelFound,
|
||||
genLevelComparison,
|
||||
assertLevelComparison
|
||||
}
|
||||
3
backend/node_modules/pino/lib/meta.js
generated
vendored
Normal file
3
backend/node_modules/pino/lib/meta.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = { version: '9.14.0' }
|
||||
203
backend/node_modules/pino/lib/multistream.js
generated
vendored
Normal file
203
backend/node_modules/pino/lib/multistream.js
generated
vendored
Normal file
@@ -0,0 +1,203 @@
|
||||
'use strict'
|
||||
|
||||
const metadata = Symbol.for('pino.metadata')
|
||||
const { DEFAULT_LEVELS } = require('./constants')
|
||||
|
||||
const DEFAULT_INFO_LEVEL = DEFAULT_LEVELS.info
|
||||
|
||||
function multistream (streamsArray, opts) {
|
||||
streamsArray = streamsArray || []
|
||||
opts = opts || { dedupe: false }
|
||||
|
||||
const streamLevels = Object.create(DEFAULT_LEVELS)
|
||||
streamLevels.silent = Infinity
|
||||
if (opts.levels && typeof opts.levels === 'object') {
|
||||
Object.keys(opts.levels).forEach(i => {
|
||||
streamLevels[i] = opts.levels[i]
|
||||
})
|
||||
}
|
||||
|
||||
const res = {
|
||||
write,
|
||||
add,
|
||||
remove,
|
||||
emit,
|
||||
flushSync,
|
||||
end,
|
||||
minLevel: 0,
|
||||
lastId: 0,
|
||||
streams: [],
|
||||
clone,
|
||||
[metadata]: true,
|
||||
streamLevels
|
||||
}
|
||||
|
||||
if (Array.isArray(streamsArray)) {
|
||||
streamsArray.forEach(add, res)
|
||||
} else {
|
||||
add.call(res, streamsArray)
|
||||
}
|
||||
|
||||
// clean this object up
|
||||
// or it will stay allocated forever
|
||||
// as it is closed on the following closures
|
||||
streamsArray = null
|
||||
|
||||
return res
|
||||
|
||||
// we can exit early because the streams are ordered by level
|
||||
function write (data) {
|
||||
let dest
|
||||
const level = this.lastLevel
|
||||
const { streams } = this
|
||||
// for handling situation when several streams has the same level
|
||||
let recordedLevel = 0
|
||||
let stream
|
||||
|
||||
// if dedupe set to true we send logs to the stream with the highest level
|
||||
// therefore, we have to change sorting order
|
||||
for (let i = initLoopVar(streams.length, opts.dedupe); checkLoopVar(i, streams.length, opts.dedupe); i = adjustLoopVar(i, opts.dedupe)) {
|
||||
dest = streams[i]
|
||||
if (dest.level <= level) {
|
||||
if (recordedLevel !== 0 && recordedLevel !== dest.level) {
|
||||
break
|
||||
}
|
||||
stream = dest.stream
|
||||
if (stream[metadata]) {
|
||||
const { lastTime, lastMsg, lastObj, lastLogger } = this
|
||||
stream.lastLevel = level
|
||||
stream.lastTime = lastTime
|
||||
stream.lastMsg = lastMsg
|
||||
stream.lastObj = lastObj
|
||||
stream.lastLogger = lastLogger
|
||||
}
|
||||
stream.write(data)
|
||||
if (opts.dedupe) {
|
||||
recordedLevel = dest.level
|
||||
}
|
||||
} else if (!opts.dedupe) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function emit (...args) {
|
||||
for (const { stream } of this.streams) {
|
||||
if (typeof stream.emit === 'function') {
|
||||
stream.emit(...args)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function flushSync () {
|
||||
for (const { stream } of this.streams) {
|
||||
if (typeof stream.flushSync === 'function') {
|
||||
stream.flushSync()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function add (dest) {
|
||||
if (!dest) {
|
||||
return res
|
||||
}
|
||||
|
||||
// Check that dest implements either StreamEntry or DestinationStream
|
||||
const isStream = typeof dest.write === 'function' || dest.stream
|
||||
const stream_ = dest.write ? dest : dest.stream
|
||||
// This is necessary to provide a meaningful error message, otherwise it throws somewhere inside write()
|
||||
if (!isStream) {
|
||||
throw Error('stream object needs to implement either StreamEntry or DestinationStream interface')
|
||||
}
|
||||
|
||||
const { streams, streamLevels } = this
|
||||
|
||||
let level
|
||||
if (typeof dest.levelVal === 'number') {
|
||||
level = dest.levelVal
|
||||
} else if (typeof dest.level === 'string') {
|
||||
level = streamLevels[dest.level]
|
||||
} else if (typeof dest.level === 'number') {
|
||||
level = dest.level
|
||||
} else {
|
||||
level = DEFAULT_INFO_LEVEL
|
||||
}
|
||||
|
||||
const dest_ = {
|
||||
stream: stream_,
|
||||
level,
|
||||
levelVal: undefined,
|
||||
id: ++res.lastId
|
||||
}
|
||||
|
||||
streams.unshift(dest_)
|
||||
streams.sort(compareByLevel)
|
||||
|
||||
this.minLevel = streams[0].level
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
function remove (id) {
|
||||
const { streams } = this
|
||||
const index = streams.findIndex(s => s.id === id)
|
||||
|
||||
if (index >= 0) {
|
||||
streams.splice(index, 1)
|
||||
streams.sort(compareByLevel)
|
||||
this.minLevel = streams.length > 0 ? streams[0].level : -1
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
function end () {
|
||||
for (const { stream } of this.streams) {
|
||||
if (typeof stream.flushSync === 'function') {
|
||||
stream.flushSync()
|
||||
}
|
||||
stream.end()
|
||||
}
|
||||
}
|
||||
|
||||
function clone (level) {
|
||||
const streams = new Array(this.streams.length)
|
||||
|
||||
for (let i = 0; i < streams.length; i++) {
|
||||
streams[i] = {
|
||||
level,
|
||||
stream: this.streams[i].stream
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
write,
|
||||
add,
|
||||
remove,
|
||||
minLevel: level,
|
||||
streams,
|
||||
clone,
|
||||
emit,
|
||||
flushSync,
|
||||
[metadata]: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function compareByLevel (a, b) {
|
||||
return a.level - b.level
|
||||
}
|
||||
|
||||
function initLoopVar (length, dedupe) {
|
||||
return dedupe ? length - 1 : 0
|
||||
}
|
||||
|
||||
function adjustLoopVar (i, dedupe) {
|
||||
return dedupe ? i - 1 : i + 1
|
||||
}
|
||||
|
||||
function checkLoopVar (i, length, dedupe) {
|
||||
return dedupe ? i >= 0 : i < length
|
||||
}
|
||||
|
||||
module.exports = multistream
|
||||
260
backend/node_modules/pino/lib/proto.js
generated
vendored
Normal file
260
backend/node_modules/pino/lib/proto.js
generated
vendored
Normal file
@@ -0,0 +1,260 @@
|
||||
'use strict'
|
||||
|
||||
/* eslint no-prototype-builtins: 0 */
|
||||
|
||||
const { EventEmitter } = require('node:events')
|
||||
const {
|
||||
lsCacheSym,
|
||||
levelValSym,
|
||||
setLevelSym,
|
||||
getLevelSym,
|
||||
chindingsSym,
|
||||
parsedChindingsSym,
|
||||
mixinSym,
|
||||
asJsonSym,
|
||||
writeSym,
|
||||
mixinMergeStrategySym,
|
||||
timeSym,
|
||||
timeSliceIndexSym,
|
||||
streamSym,
|
||||
serializersSym,
|
||||
formattersSym,
|
||||
errorKeySym,
|
||||
messageKeySym,
|
||||
useOnlyCustomLevelsSym,
|
||||
needsMetadataGsym,
|
||||
redactFmtSym,
|
||||
stringifySym,
|
||||
formatOptsSym,
|
||||
stringifiersSym,
|
||||
msgPrefixSym,
|
||||
hooksSym
|
||||
} = require('./symbols')
|
||||
const {
|
||||
getLevel,
|
||||
setLevel,
|
||||
isLevelEnabled,
|
||||
mappings,
|
||||
initialLsCache,
|
||||
genLsCache,
|
||||
assertNoLevelCollisions
|
||||
} = require('./levels')
|
||||
const {
|
||||
asChindings,
|
||||
asJson,
|
||||
buildFormatters,
|
||||
stringify,
|
||||
noop
|
||||
} = require('./tools')
|
||||
const {
|
||||
version
|
||||
} = require('./meta')
|
||||
const redaction = require('./redaction')
|
||||
|
||||
// note: use of class is satirical
|
||||
// https://github.com/pinojs/pino/pull/433#pullrequestreview-127703127
|
||||
const constructor = class Pino {}
|
||||
const prototype = {
|
||||
constructor,
|
||||
child,
|
||||
bindings,
|
||||
setBindings,
|
||||
flush,
|
||||
isLevelEnabled,
|
||||
version,
|
||||
get level () { return this[getLevelSym]() },
|
||||
set level (lvl) { this[setLevelSym](lvl) },
|
||||
get levelVal () { return this[levelValSym] },
|
||||
set levelVal (n) { throw Error('levelVal is read-only') },
|
||||
get msgPrefix () { return this[msgPrefixSym] },
|
||||
get [Symbol.toStringTag] () { return 'Pino' },
|
||||
[lsCacheSym]: initialLsCache,
|
||||
[writeSym]: write,
|
||||
[asJsonSym]: asJson,
|
||||
[getLevelSym]: getLevel,
|
||||
[setLevelSym]: setLevel
|
||||
}
|
||||
|
||||
Object.setPrototypeOf(prototype, EventEmitter.prototype)
|
||||
|
||||
// exporting and consuming the prototype object using factory pattern fixes scoping issues with getters when serializing
|
||||
module.exports = function () {
|
||||
return Object.create(prototype)
|
||||
}
|
||||
|
||||
const resetChildingsFormatter = bindings => bindings
|
||||
function child (bindings, options) {
|
||||
if (!bindings) {
|
||||
throw Error('missing bindings for child Pino')
|
||||
}
|
||||
const serializers = this[serializersSym]
|
||||
const formatters = this[formattersSym]
|
||||
const instance = Object.create(this)
|
||||
|
||||
// If an `options` object was not supplied, we can improve
|
||||
// the performance of child creation by skipping
|
||||
// the checks for set options and simply return
|
||||
// a baseline instance.
|
||||
if (options == null) {
|
||||
if (instance[formattersSym].bindings !== resetChildingsFormatter) {
|
||||
instance[formattersSym] = buildFormatters(
|
||||
formatters.level,
|
||||
resetChildingsFormatter,
|
||||
formatters.log
|
||||
)
|
||||
}
|
||||
|
||||
instance[chindingsSym] = asChindings(instance, bindings)
|
||||
|
||||
// Always call setLevel to ensure child gets own method references
|
||||
// This prevents issues when parent methods are wrapped (e.g., by Sinon)
|
||||
instance[setLevelSym](this.level)
|
||||
|
||||
if (this.onChild !== noop) {
|
||||
this.onChild(instance)
|
||||
}
|
||||
|
||||
return instance
|
||||
}
|
||||
|
||||
if (options.hasOwnProperty('serializers') === true) {
|
||||
instance[serializersSym] = Object.create(null)
|
||||
|
||||
for (const k in serializers) {
|
||||
instance[serializersSym][k] = serializers[k]
|
||||
}
|
||||
const parentSymbols = Object.getOwnPropertySymbols(serializers)
|
||||
/* eslint no-var: off */
|
||||
for (var i = 0; i < parentSymbols.length; i++) {
|
||||
const ks = parentSymbols[i]
|
||||
instance[serializersSym][ks] = serializers[ks]
|
||||
}
|
||||
|
||||
for (const bk in options.serializers) {
|
||||
instance[serializersSym][bk] = options.serializers[bk]
|
||||
}
|
||||
const bindingsSymbols = Object.getOwnPropertySymbols(options.serializers)
|
||||
for (var bi = 0; bi < bindingsSymbols.length; bi++) {
|
||||
const bks = bindingsSymbols[bi]
|
||||
instance[serializersSym][bks] = options.serializers[bks]
|
||||
}
|
||||
} else instance[serializersSym] = serializers
|
||||
if (options.hasOwnProperty('formatters')) {
|
||||
const { level, bindings: chindings, log } = options.formatters
|
||||
instance[formattersSym] = buildFormatters(
|
||||
level || formatters.level,
|
||||
chindings || resetChildingsFormatter,
|
||||
log || formatters.log
|
||||
)
|
||||
} else {
|
||||
instance[formattersSym] = buildFormatters(
|
||||
formatters.level,
|
||||
resetChildingsFormatter,
|
||||
formatters.log
|
||||
)
|
||||
}
|
||||
if (options.hasOwnProperty('customLevels') === true) {
|
||||
assertNoLevelCollisions(this.levels, options.customLevels)
|
||||
instance.levels = mappings(options.customLevels, instance[useOnlyCustomLevelsSym])
|
||||
genLsCache(instance)
|
||||
}
|
||||
|
||||
// redact must place before asChindings and only replace if exist
|
||||
if ((typeof options.redact === 'object' && options.redact !== null) || Array.isArray(options.redact)) {
|
||||
instance.redact = options.redact // replace redact directly
|
||||
const stringifiers = redaction(instance.redact, stringify)
|
||||
const formatOpts = { stringify: stringifiers[redactFmtSym] }
|
||||
instance[stringifySym] = stringify
|
||||
instance[stringifiersSym] = stringifiers
|
||||
instance[formatOptsSym] = formatOpts
|
||||
}
|
||||
|
||||
if (typeof options.msgPrefix === 'string') {
|
||||
instance[msgPrefixSym] = (this[msgPrefixSym] || '') + options.msgPrefix
|
||||
}
|
||||
|
||||
instance[chindingsSym] = asChindings(instance, bindings)
|
||||
const childLevel = options.level || this.level
|
||||
instance[setLevelSym](childLevel)
|
||||
this.onChild(instance)
|
||||
return instance
|
||||
}
|
||||
|
||||
function bindings () {
|
||||
const chindings = this[chindingsSym]
|
||||
const chindingsJson = `{${chindings.substr(1)}}` // at least contains ,"pid":7068,"hostname":"myMac"
|
||||
const bindingsFromJson = JSON.parse(chindingsJson)
|
||||
delete bindingsFromJson.pid
|
||||
delete bindingsFromJson.hostname
|
||||
return bindingsFromJson
|
||||
}
|
||||
|
||||
function setBindings (newBindings) {
|
||||
const chindings = asChindings(this, newBindings)
|
||||
this[chindingsSym] = chindings
|
||||
delete this[parsedChindingsSym]
|
||||
}
|
||||
|
||||
/**
|
||||
* Default strategy for creating `mergeObject` from arguments and the result from `mixin()`.
|
||||
* Fields from `mergeObject` have higher priority in this strategy.
|
||||
*
|
||||
* @param {Object} mergeObject The object a user has supplied to the logging function.
|
||||
* @param {Object} mixinObject The result of the `mixin` method.
|
||||
* @return {Object}
|
||||
*/
|
||||
function defaultMixinMergeStrategy (mergeObject, mixinObject) {
|
||||
return Object.assign(mixinObject, mergeObject)
|
||||
}
|
||||
|
||||
function write (_obj, msg, num) {
|
||||
const t = this[timeSym]()
|
||||
const mixin = this[mixinSym]
|
||||
const errorKey = this[errorKeySym]
|
||||
const messageKey = this[messageKeySym]
|
||||
const mixinMergeStrategy = this[mixinMergeStrategySym] || defaultMixinMergeStrategy
|
||||
let obj
|
||||
const streamWriteHook = this[hooksSym].streamWrite
|
||||
|
||||
if (_obj === undefined || _obj === null) {
|
||||
obj = {}
|
||||
} else if (_obj instanceof Error) {
|
||||
obj = { [errorKey]: _obj }
|
||||
if (msg === undefined) {
|
||||
msg = _obj.message
|
||||
}
|
||||
} else {
|
||||
obj = _obj
|
||||
if (msg === undefined && _obj[messageKey] === undefined && _obj[errorKey]) {
|
||||
msg = _obj[errorKey].message
|
||||
}
|
||||
}
|
||||
|
||||
if (mixin) {
|
||||
obj = mixinMergeStrategy(obj, mixin(obj, num, this))
|
||||
}
|
||||
|
||||
const s = this[asJsonSym](obj, msg, num, t)
|
||||
|
||||
const stream = this[streamSym]
|
||||
if (stream[needsMetadataGsym] === true) {
|
||||
stream.lastLevel = num
|
||||
stream.lastObj = obj
|
||||
stream.lastMsg = msg
|
||||
stream.lastTime = t.slice(this[timeSliceIndexSym])
|
||||
stream.lastLogger = this // for child loggers
|
||||
}
|
||||
stream.write(streamWriteHook ? streamWriteHook(s) : s)
|
||||
}
|
||||
|
||||
function flush (cb) {
|
||||
if (cb != null && typeof cb !== 'function') {
|
||||
throw Error('callback must be a function')
|
||||
}
|
||||
|
||||
const stream = this[streamSym]
|
||||
|
||||
if (typeof stream.flush === 'function') {
|
||||
stream.flush(cb || noop)
|
||||
} else if (cb) cb()
|
||||
}
|
||||
114
backend/node_modules/pino/lib/redaction.js
generated
vendored
Normal file
114
backend/node_modules/pino/lib/redaction.js
generated
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
'use strict'
|
||||
|
||||
const Redact = require('@pinojs/redact')
|
||||
const { redactFmtSym, wildcardFirstSym } = require('./symbols')
|
||||
|
||||
// Custom rx regex equivalent to fast-redact's rx
|
||||
const rx = /[^.[\]]+|\[([^[\]]*?)\]/g
|
||||
|
||||
const CENSOR = '[Redacted]'
|
||||
const strict = false // TODO should this be configurable?
|
||||
|
||||
function redaction (opts, serialize) {
|
||||
const { paths, censor, remove } = handle(opts)
|
||||
|
||||
const shape = paths.reduce((o, str) => {
|
||||
rx.lastIndex = 0
|
||||
const first = rx.exec(str)
|
||||
const next = rx.exec(str)
|
||||
|
||||
// ns is the top-level path segment, brackets + quoting removed.
|
||||
let ns = first[1] !== undefined
|
||||
? first[1].replace(/^(?:"|'|`)(.*)(?:"|'|`)$/, '$1')
|
||||
: first[0]
|
||||
|
||||
if (ns === '*') {
|
||||
ns = wildcardFirstSym
|
||||
}
|
||||
|
||||
// top level key:
|
||||
if (next === null) {
|
||||
o[ns] = null
|
||||
return o
|
||||
}
|
||||
|
||||
// path with at least two segments:
|
||||
// if ns is already redacted at the top level, ignore lower level redactions
|
||||
if (o[ns] === null) {
|
||||
return o
|
||||
}
|
||||
|
||||
const { index } = next
|
||||
const nextPath = `${str.substr(index, str.length - 1)}`
|
||||
|
||||
o[ns] = o[ns] || []
|
||||
|
||||
// shape is a mix of paths beginning with literal values and wildcard
|
||||
// paths [ "a.b.c", "*.b.z" ] should reduce to a shape of
|
||||
// { "a": [ "b.c", "b.z" ], *: [ "b.z" ] }
|
||||
// note: "b.z" is in both "a" and * arrays because "a" matches the wildcard.
|
||||
// (* entry has wildcardFirstSym as key)
|
||||
if (ns !== wildcardFirstSym && o[ns].length === 0) {
|
||||
// first time ns's get all '*' redactions so far
|
||||
o[ns].push(...(o[wildcardFirstSym] || []))
|
||||
}
|
||||
|
||||
if (ns === wildcardFirstSym) {
|
||||
// new * path gets added to all previously registered literal ns's.
|
||||
Object.keys(o).forEach(function (k) {
|
||||
if (o[k]) {
|
||||
o[k].push(nextPath)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
o[ns].push(nextPath)
|
||||
return o
|
||||
}, {})
|
||||
|
||||
// the redactor assigned to the format symbol key
|
||||
// provides top level redaction for instances where
|
||||
// an object is interpolated into the msg string
|
||||
const result = {
|
||||
[redactFmtSym]: Redact({ paths, censor, serialize, strict, remove })
|
||||
}
|
||||
|
||||
const topCensor = (...args) => {
|
||||
return typeof censor === 'function' ? serialize(censor(...args)) : serialize(censor)
|
||||
}
|
||||
|
||||
return [...Object.keys(shape), ...Object.getOwnPropertySymbols(shape)].reduce((o, k) => {
|
||||
// top level key:
|
||||
if (shape[k] === null) {
|
||||
o[k] = (value) => topCensor(value, [k])
|
||||
} else {
|
||||
const wrappedCensor = typeof censor === 'function'
|
||||
? (value, path) => {
|
||||
return censor(value, [k, ...path])
|
||||
}
|
||||
: censor
|
||||
o[k] = Redact({
|
||||
paths: shape[k],
|
||||
censor: wrappedCensor,
|
||||
serialize,
|
||||
strict,
|
||||
remove
|
||||
})
|
||||
}
|
||||
return o
|
||||
}, result)
|
||||
}
|
||||
|
||||
function handle (opts) {
|
||||
if (Array.isArray(opts)) {
|
||||
opts = { paths: opts, censor: CENSOR }
|
||||
return opts
|
||||
}
|
||||
let { paths, censor = CENSOR, remove } = opts
|
||||
if (Array.isArray(paths) === false) { throw Error('pino – redact must contain an array of strings') }
|
||||
if (remove === true) censor = undefined
|
||||
|
||||
return { paths, censor, remove }
|
||||
}
|
||||
|
||||
module.exports = redaction
|
||||
74
backend/node_modules/pino/lib/symbols.js
generated
vendored
Normal file
74
backend/node_modules/pino/lib/symbols.js
generated
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
'use strict'
|
||||
|
||||
const setLevelSym = Symbol('pino.setLevel')
|
||||
const getLevelSym = Symbol('pino.getLevel')
|
||||
const levelValSym = Symbol('pino.levelVal')
|
||||
const levelCompSym = Symbol('pino.levelComp')
|
||||
const useLevelLabelsSym = Symbol('pino.useLevelLabels')
|
||||
const useOnlyCustomLevelsSym = Symbol('pino.useOnlyCustomLevels')
|
||||
const mixinSym = Symbol('pino.mixin')
|
||||
|
||||
const lsCacheSym = Symbol('pino.lsCache')
|
||||
const chindingsSym = Symbol('pino.chindings')
|
||||
|
||||
const asJsonSym = Symbol('pino.asJson')
|
||||
const writeSym = Symbol('pino.write')
|
||||
const redactFmtSym = Symbol('pino.redactFmt')
|
||||
|
||||
const timeSym = Symbol('pino.time')
|
||||
const timeSliceIndexSym = Symbol('pino.timeSliceIndex')
|
||||
const streamSym = Symbol('pino.stream')
|
||||
const stringifySym = Symbol('pino.stringify')
|
||||
const stringifySafeSym = Symbol('pino.stringifySafe')
|
||||
const stringifiersSym = Symbol('pino.stringifiers')
|
||||
const endSym = Symbol('pino.end')
|
||||
const formatOptsSym = Symbol('pino.formatOpts')
|
||||
const messageKeySym = Symbol('pino.messageKey')
|
||||
const errorKeySym = Symbol('pino.errorKey')
|
||||
const nestedKeySym = Symbol('pino.nestedKey')
|
||||
const nestedKeyStrSym = Symbol('pino.nestedKeyStr')
|
||||
const mixinMergeStrategySym = Symbol('pino.mixinMergeStrategy')
|
||||
const msgPrefixSym = Symbol('pino.msgPrefix')
|
||||
|
||||
const wildcardFirstSym = Symbol('pino.wildcardFirst')
|
||||
|
||||
// public symbols, no need to use the same pino
|
||||
// version for these
|
||||
const serializersSym = Symbol.for('pino.serializers')
|
||||
const formattersSym = Symbol.for('pino.formatters')
|
||||
const hooksSym = Symbol.for('pino.hooks')
|
||||
const needsMetadataGsym = Symbol.for('pino.metadata')
|
||||
|
||||
module.exports = {
|
||||
setLevelSym,
|
||||
getLevelSym,
|
||||
levelValSym,
|
||||
levelCompSym,
|
||||
useLevelLabelsSym,
|
||||
mixinSym,
|
||||
lsCacheSym,
|
||||
chindingsSym,
|
||||
asJsonSym,
|
||||
writeSym,
|
||||
serializersSym,
|
||||
redactFmtSym,
|
||||
timeSym,
|
||||
timeSliceIndexSym,
|
||||
streamSym,
|
||||
stringifySym,
|
||||
stringifySafeSym,
|
||||
stringifiersSym,
|
||||
endSym,
|
||||
formatOptsSym,
|
||||
messageKeySym,
|
||||
errorKeySym,
|
||||
nestedKeySym,
|
||||
wildcardFirstSym,
|
||||
needsMetadataGsym,
|
||||
useOnlyCustomLevelsSym,
|
||||
formattersSym,
|
||||
hooksSym,
|
||||
nestedKeyStrSym,
|
||||
mixinMergeStrategySym,
|
||||
msgPrefixSym
|
||||
}
|
||||
39
backend/node_modules/pino/lib/time.js
generated
vendored
Normal file
39
backend/node_modules/pino/lib/time.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
'use strict'
|
||||
|
||||
const nullTime = () => ''
|
||||
|
||||
const epochTime = () => `,"time":${Date.now()}`
|
||||
|
||||
const unixTime = () => `,"time":${Math.round(Date.now() / 1000.0)}`
|
||||
|
||||
const isoTime = () => `,"time":"${new Date(Date.now()).toISOString()}"` // using Date.now() for testability
|
||||
|
||||
const NS_PER_MS = 1_000_000n
|
||||
const NS_PER_SEC = 1_000_000_000n
|
||||
|
||||
const startWallTimeNs = BigInt(Date.now()) * NS_PER_MS
|
||||
const startHrTime = process.hrtime.bigint()
|
||||
|
||||
const isoTimeNano = () => {
|
||||
const elapsedNs = process.hrtime.bigint() - startHrTime
|
||||
const currentTimeNs = startWallTimeNs + elapsedNs
|
||||
|
||||
const secondsSinceEpoch = currentTimeNs / NS_PER_SEC
|
||||
const nanosWithinSecond = currentTimeNs % NS_PER_SEC
|
||||
|
||||
const msSinceEpoch = Number(secondsSinceEpoch * 1000n + nanosWithinSecond / 1_000_000n)
|
||||
const date = new Date(msSinceEpoch)
|
||||
|
||||
const year = date.getUTCFullYear()
|
||||
const month = (date.getUTCMonth() + 1).toString().padStart(2, '0')
|
||||
const day = date.getUTCDate().toString().padStart(2, '0')
|
||||
const hours = date.getUTCHours().toString().padStart(2, '0')
|
||||
const minutes = date.getUTCMinutes().toString().padStart(2, '0')
|
||||
const seconds = date.getUTCSeconds().toString().padStart(2, '0')
|
||||
|
||||
return `,"time":"${year}-${month}-${day}T${hours}:${minutes}:${seconds}.${nanosWithinSecond
|
||||
.toString()
|
||||
.padStart(9, '0')}Z"`
|
||||
}
|
||||
|
||||
module.exports = { nullTime, epochTime, unixTime, isoTime, isoTimeNano }
|
||||
436
backend/node_modules/pino/lib/tools.js
generated
vendored
Normal file
436
backend/node_modules/pino/lib/tools.js
generated
vendored
Normal file
@@ -0,0 +1,436 @@
|
||||
'use strict'
|
||||
|
||||
/* eslint no-prototype-builtins: 0 */
|
||||
|
||||
const diagChan = require('node:diagnostics_channel')
|
||||
const format = require('quick-format-unescaped')
|
||||
const { mapHttpRequest, mapHttpResponse } = require('pino-std-serializers')
|
||||
const SonicBoom = require('sonic-boom')
|
||||
const onExit = require('on-exit-leak-free')
|
||||
const {
|
||||
lsCacheSym,
|
||||
chindingsSym,
|
||||
writeSym,
|
||||
serializersSym,
|
||||
formatOptsSym,
|
||||
endSym,
|
||||
stringifiersSym,
|
||||
stringifySym,
|
||||
stringifySafeSym,
|
||||
wildcardFirstSym,
|
||||
nestedKeySym,
|
||||
formattersSym,
|
||||
messageKeySym,
|
||||
errorKeySym,
|
||||
nestedKeyStrSym,
|
||||
msgPrefixSym
|
||||
} = require('./symbols')
|
||||
const { isMainThread } = require('worker_threads')
|
||||
const transport = require('./transport')
|
||||
|
||||
let asJsonChan
|
||||
// Node >= 18.19 supports diagnostics_channel.tracingChannel
|
||||
if (typeof diagChan.tracingChannel === 'function') {
|
||||
asJsonChan = diagChan.tracingChannel('pino_asJson')
|
||||
} else {
|
||||
// Older Node 18.x (e.g. 18.18), provided a no-op fallback
|
||||
asJsonChan = {
|
||||
hasSubscribers: false,
|
||||
traceSync (fn, store, thisArg, ...args) {
|
||||
return fn.call(thisArg, ...args)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function noop () {
|
||||
}
|
||||
|
||||
function genLog (level, hook) {
|
||||
if (!hook) return LOG
|
||||
|
||||
return function hookWrappedLog (...args) {
|
||||
hook.call(this, args, LOG, level)
|
||||
}
|
||||
|
||||
function LOG (o, ...n) {
|
||||
if (typeof o === 'object') {
|
||||
let msg = o
|
||||
if (o !== null) {
|
||||
if (o.method && o.headers && o.socket) {
|
||||
o = mapHttpRequest(o)
|
||||
} else if (typeof o.setHeader === 'function') {
|
||||
o = mapHttpResponse(o)
|
||||
}
|
||||
}
|
||||
let formatParams
|
||||
if (msg === null && n.length === 0) {
|
||||
formatParams = [null]
|
||||
} else {
|
||||
msg = n.shift()
|
||||
formatParams = n
|
||||
}
|
||||
// We do not use a coercive check for `msg` as it is
|
||||
// measurably slower than the explicit checks.
|
||||
if (typeof this[msgPrefixSym] === 'string' && msg !== undefined && msg !== null) {
|
||||
msg = this[msgPrefixSym] + msg
|
||||
}
|
||||
this[writeSym](o, format(msg, formatParams, this[formatOptsSym]), level)
|
||||
} else {
|
||||
let msg = o === undefined ? n.shift() : o
|
||||
|
||||
// We do not use a coercive check for `msg` as it is
|
||||
// measurably slower than the explicit checks.
|
||||
if (typeof this[msgPrefixSym] === 'string' && msg !== undefined && msg !== null) {
|
||||
msg = this[msgPrefixSym] + msg
|
||||
}
|
||||
this[writeSym](null, format(msg, n, this[formatOptsSym]), level)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// magically escape strings for json
|
||||
// relying on their charCodeAt
|
||||
// everything below 32 needs JSON.stringify()
|
||||
// 34 and 92 happens all the time, so we
|
||||
// have a fast case for them
|
||||
function asString (str) {
|
||||
let result = ''
|
||||
let last = 0
|
||||
let found = false
|
||||
let point = 255
|
||||
const l = str.length
|
||||
if (l > 100) {
|
||||
return JSON.stringify(str)
|
||||
}
|
||||
for (var i = 0; i < l && point >= 32; i++) {
|
||||
point = str.charCodeAt(i)
|
||||
if (point === 34 || point === 92) {
|
||||
result += str.slice(last, i) + '\\'
|
||||
last = i
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
result = str
|
||||
} else {
|
||||
result += str.slice(last)
|
||||
}
|
||||
return point < 32 ? JSON.stringify(str) : '"' + result + '"'
|
||||
}
|
||||
|
||||
/**
|
||||
* `asJson` wraps `_asJson` in order to facilitate generating diagnostics.
|
||||
*
|
||||
* @param {object} obj The merging object passed to the log method.
|
||||
* @param {string} msg The log message passed to the log method.
|
||||
* @param {number} num The log level number.
|
||||
* @param {number} time The log time in milliseconds.
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
function asJson (obj, msg, num, time) {
|
||||
if (asJsonChan.hasSubscribers === false) {
|
||||
return _asJson.call(this, obj, msg, num, time)
|
||||
}
|
||||
|
||||
const store = { instance: this, arguments }
|
||||
return asJsonChan.traceSync(_asJson, store, this, obj, msg, num, time)
|
||||
}
|
||||
|
||||
/**
|
||||
* `_asJson` parses all collected data and generates the finalized newline
|
||||
* delimited JSON string.
|
||||
*
|
||||
* @param {object} obj The merging object passed to the log method.
|
||||
* @param {string} msg The log message passed to the log method.
|
||||
* @param {number} num The log level number.
|
||||
* @param {number} time The log time in milliseconds.
|
||||
*
|
||||
* @returns {string} The finalized log string terminated with a newline.
|
||||
* @private
|
||||
*/
|
||||
function _asJson (obj, msg, num, time) {
|
||||
const stringify = this[stringifySym]
|
||||
const stringifySafe = this[stringifySafeSym]
|
||||
const stringifiers = this[stringifiersSym]
|
||||
const end = this[endSym]
|
||||
const chindings = this[chindingsSym]
|
||||
const serializers = this[serializersSym]
|
||||
const formatters = this[formattersSym]
|
||||
const messageKey = this[messageKeySym]
|
||||
const errorKey = this[errorKeySym]
|
||||
let data = this[lsCacheSym][num] + time
|
||||
|
||||
// we need the child bindings added to the output first so instance logged
|
||||
// objects can take precedence when JSON.parse-ing the resulting log line
|
||||
data = data + chindings
|
||||
|
||||
let value
|
||||
if (formatters.log) {
|
||||
obj = formatters.log(obj)
|
||||
}
|
||||
const wildcardStringifier = stringifiers[wildcardFirstSym]
|
||||
let propStr = ''
|
||||
for (const key in obj) {
|
||||
value = obj[key]
|
||||
if (Object.prototype.hasOwnProperty.call(obj, key) && value !== undefined) {
|
||||
if (serializers[key]) {
|
||||
value = serializers[key](value)
|
||||
} else if (key === errorKey && serializers.err) {
|
||||
value = serializers.err(value)
|
||||
}
|
||||
|
||||
const stringifier = stringifiers[key] || wildcardStringifier
|
||||
|
||||
switch (typeof value) {
|
||||
case 'undefined':
|
||||
case 'function':
|
||||
continue
|
||||
case 'number':
|
||||
/* eslint no-fallthrough: "off" */
|
||||
if (Number.isFinite(value) === false) {
|
||||
value = null
|
||||
}
|
||||
// this case explicitly falls through to the next one
|
||||
case 'boolean':
|
||||
if (stringifier) value = stringifier(value)
|
||||
break
|
||||
case 'string':
|
||||
value = (stringifier || asString)(value)
|
||||
break
|
||||
default:
|
||||
value = (stringifier || stringify)(value, stringifySafe)
|
||||
}
|
||||
if (value === undefined) continue
|
||||
const strKey = asString(key)
|
||||
propStr += ',' + strKey + ':' + value
|
||||
}
|
||||
}
|
||||
|
||||
let msgStr = ''
|
||||
if (msg !== undefined) {
|
||||
value = serializers[messageKey] ? serializers[messageKey](msg) : msg
|
||||
const stringifier = stringifiers[messageKey] || wildcardStringifier
|
||||
|
||||
switch (typeof value) {
|
||||
case 'function':
|
||||
break
|
||||
case 'number':
|
||||
/* eslint no-fallthrough: "off" */
|
||||
if (Number.isFinite(value) === false) {
|
||||
value = null
|
||||
}
|
||||
// this case explicitly falls through to the next one
|
||||
case 'boolean':
|
||||
if (stringifier) value = stringifier(value)
|
||||
msgStr = ',"' + messageKey + '":' + value
|
||||
break
|
||||
case 'string':
|
||||
value = (stringifier || asString)(value)
|
||||
msgStr = ',"' + messageKey + '":' + value
|
||||
break
|
||||
default:
|
||||
value = (stringifier || stringify)(value, stringifySafe)
|
||||
msgStr = ',"' + messageKey + '":' + value
|
||||
}
|
||||
}
|
||||
|
||||
if (this[nestedKeySym] && propStr) {
|
||||
// place all the obj properties under the specified key
|
||||
// the nested key is already formatted from the constructor
|
||||
return data + this[nestedKeyStrSym] + propStr.slice(1) + '}' + msgStr + end
|
||||
} else {
|
||||
return data + propStr + msgStr + end
|
||||
}
|
||||
}
|
||||
|
||||
function asChindings (instance, bindings) {
|
||||
let value
|
||||
let data = instance[chindingsSym]
|
||||
const stringify = instance[stringifySym]
|
||||
const stringifySafe = instance[stringifySafeSym]
|
||||
const stringifiers = instance[stringifiersSym]
|
||||
const wildcardStringifier = stringifiers[wildcardFirstSym]
|
||||
const serializers = instance[serializersSym]
|
||||
const formatter = instance[formattersSym].bindings
|
||||
bindings = formatter(bindings)
|
||||
|
||||
for (const key in bindings) {
|
||||
value = bindings[key]
|
||||
const valid = (key.length < 5 || (key !== 'level' &&
|
||||
key !== 'serializers' &&
|
||||
key !== 'formatters' &&
|
||||
key !== 'customLevels')) &&
|
||||
bindings.hasOwnProperty(key) &&
|
||||
value !== undefined
|
||||
if (valid === true) {
|
||||
value = serializers[key] ? serializers[key](value) : value
|
||||
value = (stringifiers[key] || wildcardStringifier || stringify)(value, stringifySafe)
|
||||
if (value === undefined) continue
|
||||
data += ',"' + key + '":' + value
|
||||
}
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
function hasBeenTampered (stream) {
|
||||
return stream.write !== stream.constructor.prototype.write
|
||||
}
|
||||
|
||||
function buildSafeSonicBoom (opts) {
|
||||
const stream = new SonicBoom(opts)
|
||||
stream.on('error', filterBrokenPipe)
|
||||
// If we are sync: false, we must flush on exit
|
||||
if (!opts.sync && isMainThread) {
|
||||
onExit.register(stream, autoEnd)
|
||||
|
||||
stream.on('close', function () {
|
||||
onExit.unregister(stream)
|
||||
})
|
||||
}
|
||||
return stream
|
||||
|
||||
function filterBrokenPipe (err) {
|
||||
// Impossible to replicate across all operating systems
|
||||
/* istanbul ignore next */
|
||||
if (err.code === 'EPIPE') {
|
||||
// If we get EPIPE, we should stop logging here
|
||||
// however we have no control to the consumer of
|
||||
// SonicBoom, so we just overwrite the write method
|
||||
stream.write = noop
|
||||
stream.end = noop
|
||||
stream.flushSync = noop
|
||||
stream.destroy = noop
|
||||
return
|
||||
}
|
||||
stream.removeListener('error', filterBrokenPipe)
|
||||
stream.emit('error', err)
|
||||
}
|
||||
}
|
||||
|
||||
function autoEnd (stream, eventName) {
|
||||
// This check is needed only on some platforms
|
||||
/* istanbul ignore next */
|
||||
if (stream.destroyed) {
|
||||
return
|
||||
}
|
||||
|
||||
if (eventName === 'beforeExit') {
|
||||
// We still have an event loop, let's use it
|
||||
stream.flush()
|
||||
stream.on('drain', function () {
|
||||
stream.end()
|
||||
})
|
||||
} else {
|
||||
// For some reason istanbul is not detecting this, but it's there
|
||||
/* istanbul ignore next */
|
||||
// We do not have an event loop, so flush synchronously
|
||||
stream.flushSync()
|
||||
}
|
||||
}
|
||||
|
||||
function createArgsNormalizer (defaultOptions) {
|
||||
return function normalizeArgs (instance, caller, opts = {}, stream) {
|
||||
// support stream as a string
|
||||
if (typeof opts === 'string') {
|
||||
stream = buildSafeSonicBoom({ dest: opts })
|
||||
opts = {}
|
||||
} else if (typeof stream === 'string') {
|
||||
if (opts && opts.transport) {
|
||||
throw Error('only one of option.transport or stream can be specified')
|
||||
}
|
||||
stream = buildSafeSonicBoom({ dest: stream })
|
||||
} else if (opts instanceof SonicBoom || opts.writable || opts._writableState) {
|
||||
stream = opts
|
||||
opts = {}
|
||||
} else if (opts.transport) {
|
||||
if (opts.transport instanceof SonicBoom || opts.transport.writable || opts.transport._writableState) {
|
||||
throw Error('option.transport do not allow stream, please pass to option directly. e.g. pino(transport)')
|
||||
}
|
||||
if (opts.transport.targets && opts.transport.targets.length && opts.formatters && typeof opts.formatters.level === 'function') {
|
||||
throw Error('option.transport.targets do not allow custom level formatters')
|
||||
}
|
||||
|
||||
let customLevels
|
||||
if (opts.customLevels) {
|
||||
customLevels = opts.useOnlyCustomLevels ? opts.customLevels : Object.assign({}, opts.levels, opts.customLevels)
|
||||
}
|
||||
stream = transport({ caller, ...opts.transport, levels: customLevels })
|
||||
}
|
||||
opts = Object.assign({}, defaultOptions, opts)
|
||||
opts.serializers = Object.assign({}, defaultOptions.serializers, opts.serializers)
|
||||
opts.formatters = Object.assign({}, defaultOptions.formatters, opts.formatters)
|
||||
|
||||
if (opts.prettyPrint) {
|
||||
throw new Error('prettyPrint option is no longer supported, see the pino-pretty package (https://github.com/pinojs/pino-pretty)')
|
||||
}
|
||||
|
||||
const { enabled, onChild } = opts
|
||||
if (enabled === false) opts.level = 'silent'
|
||||
if (!onChild) opts.onChild = noop
|
||||
if (!stream) {
|
||||
if (!hasBeenTampered(process.stdout)) {
|
||||
// If process.stdout.fd is undefined, it means that we are running
|
||||
// in a worker thread. Let's assume we are logging to file descriptor 1.
|
||||
stream = buildSafeSonicBoom({ fd: process.stdout.fd || 1 })
|
||||
} else {
|
||||
stream = process.stdout
|
||||
}
|
||||
}
|
||||
return { opts, stream }
|
||||
}
|
||||
}
|
||||
|
||||
function stringify (obj, stringifySafeFn) {
|
||||
try {
|
||||
return JSON.stringify(obj)
|
||||
} catch (_) {
|
||||
try {
|
||||
const stringify = stringifySafeFn || this[stringifySafeSym]
|
||||
return stringify(obj)
|
||||
} catch (_) {
|
||||
return '"[unable to serialize, circular reference is too complex to analyze]"'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function buildFormatters (level, bindings, log) {
|
||||
return {
|
||||
level,
|
||||
bindings,
|
||||
log
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a string integer file descriptor to a proper native integer
|
||||
* file descriptor.
|
||||
*
|
||||
* @param {string} destination The file descriptor string to attempt to convert.
|
||||
*
|
||||
* @returns {Number}
|
||||
*/
|
||||
function normalizeDestFileDescriptor (destination) {
|
||||
const fd = Number(destination)
|
||||
if (typeof destination === 'string' && Number.isFinite(fd)) {
|
||||
return fd
|
||||
}
|
||||
// destination could be undefined if we are in a worker
|
||||
if (destination === undefined) {
|
||||
// This is stdout in UNIX systems
|
||||
return 1
|
||||
}
|
||||
return destination
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
noop,
|
||||
buildSafeSonicBoom,
|
||||
asChindings,
|
||||
asJson,
|
||||
genLog,
|
||||
createArgsNormalizer,
|
||||
stringify,
|
||||
buildFormatters,
|
||||
normalizeDestFileDescriptor
|
||||
}
|
||||
56
backend/node_modules/pino/lib/transport-stream.js
generated
vendored
Normal file
56
backend/node_modules/pino/lib/transport-stream.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
'use strict'
|
||||
|
||||
const { realImport, realRequire } = require('real-require')
|
||||
|
||||
module.exports = loadTransportStreamBuilder
|
||||
|
||||
/**
|
||||
* Loads & returns a function to build transport streams
|
||||
* @param {string} target
|
||||
* @returns {Promise<function(object): Promise<import('node:stream').Writable>>}
|
||||
* @throws {Error} In case the target module does not export a function
|
||||
*/
|
||||
async function loadTransportStreamBuilder (target) {
|
||||
let fn
|
||||
try {
|
||||
const toLoad = target.startsWith('file://') ? target : 'file://' + target
|
||||
|
||||
if (toLoad.endsWith('.ts') || toLoad.endsWith('.cts')) {
|
||||
// TODO: add support for the TSM modules loader ( https://github.com/lukeed/tsm ).
|
||||
if (process[Symbol.for('ts-node.register.instance')]) {
|
||||
realRequire('ts-node/register')
|
||||
} else if (process.env && process.env.TS_NODE_DEV) {
|
||||
realRequire('ts-node-dev')
|
||||
}
|
||||
// TODO: Support ES imports once tsc, tap & ts-node provide better compatibility guarantees.
|
||||
fn = realRequire(decodeURIComponent(target))
|
||||
} else {
|
||||
fn = (await realImport(toLoad))
|
||||
}
|
||||
} catch (error) {
|
||||
// See this PR for details: https://github.com/pinojs/thread-stream/pull/34
|
||||
if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND')) {
|
||||
fn = realRequire(target)
|
||||
} else if (error.code === undefined || error.code === 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING') {
|
||||
// When bundled with pkg, an undefined error is thrown when called with realImport
|
||||
// When bundled with pkg and using node v20, an ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING error is thrown when called with realImport
|
||||
// More info at: https://github.com/pinojs/thread-stream/issues/143
|
||||
try {
|
||||
fn = realRequire(decodeURIComponent(target))
|
||||
} catch {
|
||||
throw error
|
||||
}
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Depending on how the default export is performed, and on how the code is
|
||||
// transpiled, we may find cases of two nested "default" objects.
|
||||
// See https://github.com/pinojs/pino/issues/1243#issuecomment-982774762
|
||||
if (typeof fn === 'object') fn = fn.default
|
||||
if (typeof fn === 'object') fn = fn.default
|
||||
if (typeof fn !== 'function') throw Error('exported worker is not a function')
|
||||
|
||||
return fn
|
||||
}
|
||||
167
backend/node_modules/pino/lib/transport.js
generated
vendored
Normal file
167
backend/node_modules/pino/lib/transport.js
generated
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
'use strict'
|
||||
|
||||
const { createRequire } = require('module')
|
||||
const getCallers = require('./caller')
|
||||
const { join, isAbsolute, sep } = require('node:path')
|
||||
const sleep = require('atomic-sleep')
|
||||
const onExit = require('on-exit-leak-free')
|
||||
const ThreadStream = require('thread-stream')
|
||||
|
||||
function setupOnExit (stream) {
|
||||
// This is leak free, it does not leave event handlers
|
||||
onExit.register(stream, autoEnd)
|
||||
onExit.registerBeforeExit(stream, flush)
|
||||
|
||||
stream.on('close', function () {
|
||||
onExit.unregister(stream)
|
||||
})
|
||||
}
|
||||
|
||||
function buildStream (filename, workerData, workerOpts, sync) {
|
||||
const stream = new ThreadStream({
|
||||
filename,
|
||||
workerData,
|
||||
workerOpts,
|
||||
sync
|
||||
})
|
||||
|
||||
stream.on('ready', onReady)
|
||||
stream.on('close', function () {
|
||||
process.removeListener('exit', onExit)
|
||||
})
|
||||
|
||||
process.on('exit', onExit)
|
||||
|
||||
function onReady () {
|
||||
process.removeListener('exit', onExit)
|
||||
stream.unref()
|
||||
|
||||
if (workerOpts.autoEnd !== false) {
|
||||
setupOnExit(stream)
|
||||
}
|
||||
}
|
||||
|
||||
function onExit () {
|
||||
/* istanbul ignore next */
|
||||
if (stream.closed) {
|
||||
return
|
||||
}
|
||||
stream.flushSync()
|
||||
// Apparently there is a very sporadic race condition
|
||||
// that in certain OS would prevent the messages to be flushed
|
||||
// because the thread might not have been created still.
|
||||
// Unfortunately we need to sleep(100) in this case.
|
||||
sleep(100)
|
||||
stream.end()
|
||||
}
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
function autoEnd (stream) {
|
||||
stream.ref()
|
||||
stream.flushSync()
|
||||
stream.end()
|
||||
stream.once('close', function () {
|
||||
stream.unref()
|
||||
})
|
||||
}
|
||||
|
||||
function flush (stream) {
|
||||
stream.flushSync()
|
||||
}
|
||||
|
||||
function transport (fullOptions) {
|
||||
const { pipeline, targets, levels, dedupe, worker = {}, caller = getCallers(), sync = false } = fullOptions
|
||||
|
||||
const options = {
|
||||
...fullOptions.options
|
||||
}
|
||||
|
||||
// Backwards compatibility
|
||||
const callers = typeof caller === 'string' ? [caller] : caller
|
||||
|
||||
// This will be eventually modified by bundlers
|
||||
const bundlerOverrides = '__bundlerPathsOverrides' in globalThis ? globalThis.__bundlerPathsOverrides : {}
|
||||
|
||||
let target = fullOptions.target
|
||||
|
||||
if (target && targets) {
|
||||
throw new Error('only one of target or targets can be specified')
|
||||
}
|
||||
|
||||
if (targets) {
|
||||
target = bundlerOverrides['pino-worker'] || join(__dirname, 'worker.js')
|
||||
options.targets = targets.filter(dest => dest.target).map((dest) => {
|
||||
return {
|
||||
...dest,
|
||||
target: fixTarget(dest.target)
|
||||
}
|
||||
})
|
||||
options.pipelines = targets.filter(dest => dest.pipeline).map((dest) => {
|
||||
return dest.pipeline.map((t) => {
|
||||
return {
|
||||
...t,
|
||||
level: dest.level, // duplicate the pipeline `level` property defined in the upper level
|
||||
target: fixTarget(t.target)
|
||||
}
|
||||
})
|
||||
})
|
||||
} else if (pipeline) {
|
||||
target = bundlerOverrides['pino-worker'] || join(__dirname, 'worker.js')
|
||||
options.pipelines = [pipeline.map((dest) => {
|
||||
return {
|
||||
...dest,
|
||||
target: fixTarget(dest.target)
|
||||
}
|
||||
})]
|
||||
}
|
||||
|
||||
if (levels) {
|
||||
options.levels = levels
|
||||
}
|
||||
|
||||
if (dedupe) {
|
||||
options.dedupe = dedupe
|
||||
}
|
||||
|
||||
options.pinoWillSendConfig = true
|
||||
|
||||
return buildStream(fixTarget(target), options, worker, sync)
|
||||
|
||||
function fixTarget (origin) {
|
||||
origin = bundlerOverrides[origin] || origin
|
||||
|
||||
if (isAbsolute(origin) || origin.indexOf('file://') === 0) {
|
||||
return origin
|
||||
}
|
||||
|
||||
if (origin === 'pino/file') {
|
||||
return join(__dirname, '..', 'file.js')
|
||||
}
|
||||
|
||||
let fixTarget
|
||||
|
||||
for (const filePath of callers) {
|
||||
try {
|
||||
const context = filePath === 'node:repl'
|
||||
? process.cwd() + sep
|
||||
: filePath
|
||||
|
||||
fixTarget = createRequire(context).resolve(origin)
|
||||
break
|
||||
} catch (err) {
|
||||
// Silent catch
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if (!fixTarget) {
|
||||
throw new Error(`unable to determine transport target for "${origin}"`)
|
||||
}
|
||||
|
||||
return fixTarget
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = transport
|
||||
194
backend/node_modules/pino/lib/worker.js
generated
vendored
Normal file
194
backend/node_modules/pino/lib/worker.js
generated
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
'use strict'
|
||||
|
||||
const EE = require('node:events')
|
||||
const { pipeline, PassThrough } = require('node:stream')
|
||||
const pino = require('../pino.js')
|
||||
const build = require('pino-abstract-transport')
|
||||
const loadTransportStreamBuilder = require('./transport-stream')
|
||||
|
||||
// This file is not checked by the code coverage tool,
|
||||
// as it is not reliable.
|
||||
|
||||
/* istanbul ignore file */
|
||||
|
||||
/*
|
||||
* > Multiple targets & pipelines
|
||||
*
|
||||
*
|
||||
* ┌─────────────────────────────────────────────────┐ ┌─────┐
|
||||
* │ │ │ p │
|
||||
* │ │ │ i │
|
||||
* │ target │ │ n │
|
||||
* │ │ ────────────────────────────────┼────┤ o │
|
||||
* │ targets │ target │ │ . │
|
||||
* │ ────────────► │ ────────────────────────────────┼────┤ m │ source
|
||||
* │ │ target │ │ u │ │
|
||||
* │ │ ────────────────────────────────┼────┤ l │ │write
|
||||
* │ │ │ │ t │ ▼
|
||||
* │ │ pipeline ┌───────────────┐ │ │ i │ ┌────────┐
|
||||
* │ │ ──────────► │ PassThrough ├───┼────┤ s ├──────┤ │
|
||||
* │ │ └───────────────┘ │ │ t │ write│ Thread │
|
||||
* │ │ │ │ r │◄─────┤ Stream │
|
||||
* │ │ pipeline ┌───────────────┐ │ │ e │ │ │
|
||||
* │ │ ──────────► │ PassThrough ├───┼────┤ a │ └────────┘
|
||||
* │ └───────────────┘ │ │ m │
|
||||
* │ │ │ │
|
||||
* └─────────────────────────────────────────────────┘ └─────┘
|
||||
*
|
||||
*
|
||||
*
|
||||
* > One single pipeline or target
|
||||
*
|
||||
*
|
||||
* source
|
||||
* │
|
||||
* ┌────────────────────────────────────────────────┐ │write
|
||||
* │ │ ▼
|
||||
* │ │ ┌────────┐
|
||||
* │ targets │ target │ │ │
|
||||
* │ ────────────► │ ──────────────────────────────┤ │ │
|
||||
* │ │ │ │ │
|
||||
* │ ├──────┤ │
|
||||
* │ │ │ │
|
||||
* │ │ │ │
|
||||
* │ OR │ │ │
|
||||
* │ │ │ │
|
||||
* │ │ │ │
|
||||
* │ ┌──────────────┐ │ │ │
|
||||
* │ targets │ pipeline │ │ │ │ Thread │
|
||||
* │ ────────────► │ ────────────►│ PassThrough ├─┤ │ Stream │
|
||||
* │ │ │ │ │ │ │
|
||||
* │ └──────────────┘ │ │ │
|
||||
* │ │ │ │
|
||||
* │ OR │ write│ │
|
||||
* │ │◄─────┤ │
|
||||
* │ │ │ │
|
||||
* │ ┌──────────────┐ │ │ │
|
||||
* │ pipeline │ │ │ │ │
|
||||
* │ ──────────────►│ PassThrough ├────────────────┤ │ │
|
||||
* │ │ │ │ │ │
|
||||
* │ └──────────────┘ │ └────────┘
|
||||
* │ │
|
||||
* │ │
|
||||
* └────────────────────────────────────────────────┘
|
||||
*/
|
||||
|
||||
module.exports = async function ({ targets, pipelines, levels, dedupe }) {
|
||||
const targetStreams = []
|
||||
|
||||
// Process targets
|
||||
if (targets && targets.length) {
|
||||
targets = await Promise.all(targets.map(async (t) => {
|
||||
const fn = await loadTransportStreamBuilder(t.target)
|
||||
const stream = await fn(t.options)
|
||||
return {
|
||||
level: t.level,
|
||||
stream
|
||||
}
|
||||
}))
|
||||
|
||||
targetStreams.push(...targets)
|
||||
}
|
||||
|
||||
// Process pipelines
|
||||
if (pipelines && pipelines.length) {
|
||||
pipelines = await Promise.all(
|
||||
pipelines.map(async (p) => {
|
||||
let level
|
||||
const pipeDests = await Promise.all(
|
||||
p.map(async (t) => {
|
||||
// level assigned to pipeline is duplicated over all its targets, just store it
|
||||
level = t.level
|
||||
const fn = await loadTransportStreamBuilder(t.target)
|
||||
const stream = await fn(t.options)
|
||||
return stream
|
||||
}
|
||||
))
|
||||
|
||||
return {
|
||||
level,
|
||||
stream: createPipeline(pipeDests)
|
||||
}
|
||||
})
|
||||
)
|
||||
targetStreams.push(...pipelines)
|
||||
}
|
||||
|
||||
// Skip building the multistream step if either one single pipeline or target is defined and
|
||||
// return directly the stream instance back to TreadStream.
|
||||
// This is equivalent to define either:
|
||||
//
|
||||
// pino.transport({ target: ... })
|
||||
//
|
||||
// OR
|
||||
//
|
||||
// pino.transport({ pipeline: ... })
|
||||
if (targetStreams.length === 1) {
|
||||
return targetStreams[0].stream
|
||||
} else {
|
||||
return build(process, {
|
||||
parse: 'lines',
|
||||
metadata: true,
|
||||
close (err, cb) {
|
||||
let expected = 0
|
||||
for (const transport of targetStreams) {
|
||||
expected++
|
||||
transport.stream.on('close', closeCb)
|
||||
transport.stream.end()
|
||||
}
|
||||
|
||||
function closeCb () {
|
||||
if (--expected === 0) {
|
||||
cb(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: Why split2 was not used for pipelines?
|
||||
function process (stream) {
|
||||
const multi = pino.multistream(targetStreams, { levels, dedupe })
|
||||
// TODO manage backpressure
|
||||
stream.on('data', function (chunk) {
|
||||
const { lastTime, lastMsg, lastObj, lastLevel } = this
|
||||
multi.lastLevel = lastLevel
|
||||
multi.lastTime = lastTime
|
||||
multi.lastMsg = lastMsg
|
||||
multi.lastObj = lastObj
|
||||
|
||||
// TODO handle backpressure
|
||||
multi.write(chunk + '\n')
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a pipeline using the provided streams and return an instance of `PassThrough` stream
|
||||
* as a source for the pipeline.
|
||||
*
|
||||
* @param {(TransformStream|WritableStream)[]} streams An array of streams.
|
||||
* All intermediate streams in the array *MUST* be `Transform` streams and only the last one `Writable`.
|
||||
* @returns A `PassThrough` stream instance representing the source stream of the pipeline
|
||||
*/
|
||||
function createPipeline (streams) {
|
||||
const ee = new EE()
|
||||
const stream = new PassThrough({
|
||||
autoDestroy: true,
|
||||
destroy (_, cb) {
|
||||
ee.on('error', cb)
|
||||
ee.on('closed', cb)
|
||||
}
|
||||
})
|
||||
|
||||
pipeline(stream, ...streams, function (err) {
|
||||
if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
|
||||
ee.emit('error', err)
|
||||
return
|
||||
}
|
||||
|
||||
ee.emit('closed')
|
||||
})
|
||||
|
||||
return stream
|
||||
}
|
||||
}
|
||||
2
backend/node_modules/pino/node_modules/process-warning/.gitattributes
generated
vendored
Normal file
2
backend/node_modules/pino/node_modules/process-warning/.gitattributes
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Set default behavior to automatically convert line endings
|
||||
* text=auto eol=lf
|
||||
13
backend/node_modules/pino/node_modules/process-warning/.github/dependabot.yml
generated
vendored
Normal file
13
backend/node_modules/pino/node_modules/process-warning/.github/dependabot.yml
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
open-pull-requests-limit: 10
|
||||
22
backend/node_modules/pino/node_modules/process-warning/.github/workflows/ci.yml
generated
vendored
Normal file
22
backend/node_modules/pino/node_modules/process-warning/.github/workflows/ci.yml
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- next
|
||||
- 'v*'
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
uses: fastify/workflows/.github/workflows/plugins-ci.yml@v5
|
||||
with:
|
||||
license-check: true
|
||||
lint: true
|
||||
21
backend/node_modules/pino/node_modules/process-warning/LICENSE
generated
vendored
Normal file
21
backend/node_modules/pino/node_modules/process-warning/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Fastify
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
118
backend/node_modules/pino/node_modules/process-warning/README.md
generated
vendored
Normal file
118
backend/node_modules/pino/node_modules/process-warning/README.md
generated
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
# process-warning
|
||||
|
||||
[](https://github.com/fastify/process-warning/actions/workflows/ci.yml)
|
||||
[](https://www.npmjs.com/package/process-warning)
|
||||
[](https://github.com/neostandard/neostandard)
|
||||
|
||||
A small utility for generating consistent warning objects across your codebase.
|
||||
It also exposes a utility for emitting those warnings, guaranteeing that they are issued only once (unless configured otherwise).
|
||||
|
||||
_This module is used by the [Fastify](https://fastify.dev) framework and it was called `fastify-warning` prior to version 1.0.0._
|
||||
|
||||
### Install
|
||||
|
||||
```
|
||||
npm i process-warning
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
The module exports two builder functions for creating warnings.
|
||||
|
||||
```js
|
||||
const {
|
||||
createWarning,
|
||||
createDeprecation
|
||||
} = require('process-warning')
|
||||
|
||||
const warning = createWarning({
|
||||
name: 'ExampleWarning',
|
||||
code: 'EXP_WRN_001',
|
||||
message: 'Hello %s',
|
||||
unlimited: true
|
||||
})
|
||||
warning('world')
|
||||
```
|
||||
|
||||
#### Methods
|
||||
|
||||
##### `createWarning({ name, code, message[, unlimited] })`
|
||||
|
||||
- `name` (`string`, required) - The error name, you can access it later with
|
||||
`error.name`. For consistency, we recommend prefixing module error names
|
||||
with `{YourModule}Warning`
|
||||
- `code` (`string`, required) - The warning code, you can access it later with
|
||||
`error.code`. For consistency, we recommend prefixing plugin error codes with
|
||||
`{ThreeLetterModuleName}_`, e.g. `FST_`. NOTE: codes should be all uppercase.
|
||||
- `message` (`string`, required) - The warning message. You can also use
|
||||
interpolated strings for formatting the message.
|
||||
- `options` (`object`, optional) - Optional options with the following
|
||||
properties:
|
||||
+ `unlimited` (`boolean`, optional) - Should the warning be emitted more than
|
||||
once? Defaults to `false`.
|
||||
|
||||
|
||||
##### `createDeprecation({code, message[, options]})`
|
||||
|
||||
This is a wrapper for `createWarning`. It is equivalent to invoking
|
||||
`createWarning` with the `name` parameter set to "DeprecationWarning".
|
||||
|
||||
Deprecation warnings have extended support for the Node.js CLI options:
|
||||
`--throw-deprecation`, `--no-deprecation`, and `--trace-deprecation`.
|
||||
|
||||
##### `warning([, a [, b [, c]]])`
|
||||
|
||||
The returned `warning` function can used for emitting warnings.
|
||||
A warning is guaranteed to be emitted at least once.
|
||||
|
||||
- `[, a [, b [, c]]]` (`any`, optional) - Parameters for string interpolation.
|
||||
|
||||
```js
|
||||
const { createWarning } = require('process-warning')
|
||||
const FST_ERROR_CODE = createWarning({ name: 'MyAppWarning', code: 'FST_ERROR_CODE', message: 'message' })
|
||||
FST_ERROR_CODE()
|
||||
```
|
||||
|
||||
How to use an interpolated string:
|
||||
```js
|
||||
const { createWarning } = require('process-warning')
|
||||
const FST_ERROR_CODE = createWarning({ name: 'MyAppWarning', code: 'FST_ERROR_CODE', message: 'Hello %s'})
|
||||
FST_ERROR_CODE('world')
|
||||
```
|
||||
|
||||
The `warning` object has methods and properties for managing the warning's state. Useful for testing.
|
||||
```js
|
||||
const { createWarning } = require('process-warning')
|
||||
const FST_ERROR_CODE = createWarning({ name: 'MyAppWarning', code: 'FST_ERROR_CODE', message: 'Hello %s'})
|
||||
console.log(FST_ERROR_CODE.emitted) // false
|
||||
FST_ERROR_CODE('world')
|
||||
console.log(FST_ERROR_CODE.emitted) // true
|
||||
|
||||
const FST_ERROR_CODE_2 = createWarning('MyAppWarning', 'FST_ERROR_CODE_2', 'Hello %s')
|
||||
FST_ERROR_CODE_2.emitted = true
|
||||
FST_ERROR_CODE_2('world') // will not be emitted because it is not unlimited
|
||||
```
|
||||
|
||||
How to use an unlimited warning:
|
||||
```js
|
||||
const { createWarning } = require('process-warning')
|
||||
const FST_ERROR_CODE = createWarning({ name: 'MyAppWarning', code: 'FST_ERROR_CODE', message: 'Hello %s', unlimited: true })
|
||||
FST_ERROR_CODE('world') // will be emitted
|
||||
FST_ERROR_CODE('world') // will be emitted again
|
||||
```
|
||||
|
||||
#### Suppressing warnings
|
||||
|
||||
It is possible to suppress warnings by utilizing one of node's built-in warning suppression mechanisms.
|
||||
|
||||
Warnings can be suppressed:
|
||||
|
||||
- by setting the `NODE_NO_WARNINGS` environment variable to `1`
|
||||
- by passing the `--no-warnings` flag to the node process
|
||||
- by setting '--no-warnings' in the `NODE_OPTIONS` environment variable
|
||||
|
||||
For more information see [node's documentation](https://nodejs.org/api/cli.html).
|
||||
|
||||
## License
|
||||
|
||||
Licensed under [MIT](./LICENSE).
|
||||
25
backend/node_modules/pino/node_modules/process-warning/benchmarks/warn.js
generated
vendored
Normal file
25
backend/node_modules/pino/node_modules/process-warning/benchmarks/warn.js
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
'use strict'
|
||||
|
||||
const { Suite } = require('benchmark')
|
||||
const { createWarning } = require('..')
|
||||
|
||||
const err1 = createWarning({
|
||||
name: 'TestWarning',
|
||||
code: 'TST_ERROR_CODE_1',
|
||||
message: 'message'
|
||||
})
|
||||
const err2 = createWarning({
|
||||
name: 'TestWarning',
|
||||
code: 'TST_ERROR_CODE_2',
|
||||
message: 'message'
|
||||
})
|
||||
|
||||
new Suite()
|
||||
.add('warn', function () {
|
||||
err1()
|
||||
err2()
|
||||
})
|
||||
.on('cycle', function (event) {
|
||||
console.log(String(event.target))
|
||||
})
|
||||
.run()
|
||||
6
backend/node_modules/pino/node_modules/process-warning/eslint.config.js
generated
vendored
Normal file
6
backend/node_modules/pino/node_modules/process-warning/eslint.config.js
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = require('neostandard')({
|
||||
ignores: require('neostandard').resolveIgnoresFromGitignore(),
|
||||
ts: true
|
||||
})
|
||||
11
backend/node_modules/pino/node_modules/process-warning/examples/example.js
generated
vendored
Normal file
11
backend/node_modules/pino/node_modules/process-warning/examples/example.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
'use strict'
|
||||
|
||||
const { createWarning } = require('..')
|
||||
|
||||
const CUSTDEP001 = createWarning({
|
||||
name: 'DeprecationWarning',
|
||||
code: 'CUSTDEP001',
|
||||
message: 'This is a deprecation warning'
|
||||
})
|
||||
|
||||
CUSTDEP001()
|
||||
124
backend/node_modules/pino/node_modules/process-warning/index.js
generated
vendored
Normal file
124
backend/node_modules/pino/node_modules/process-warning/index.js
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
'use strict'
|
||||
|
||||
const { format } = require('node:util')
|
||||
|
||||
/**
|
||||
* @namespace processWarning
|
||||
*/
|
||||
|
||||
/**
|
||||
* Represents a warning item with details.
|
||||
* @typedef {Function} WarningItem
|
||||
* @param {*} [a] Possible message interpolation value.
|
||||
* @param {*} [b] Possible message interpolation value.
|
||||
* @param {*} [c] Possible message interpolation value.
|
||||
* @property {string} name - The name of the warning.
|
||||
* @property {string} code - The code associated with the warning.
|
||||
* @property {string} message - The warning message.
|
||||
* @property {boolean} emitted - Indicates if the warning has been emitted.
|
||||
* @property {function} format - Formats the warning message.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Options for creating a process warning.
|
||||
* @typedef {Object} ProcessWarningOptions
|
||||
* @property {string} name - The name of the warning.
|
||||
* @property {string} code - The code associated with the warning.
|
||||
* @property {string} message - The warning message.
|
||||
* @property {boolean} [unlimited=false] - If true, allows unlimited emissions of the warning.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Represents the process warning functionality.
|
||||
* @typedef {Object} ProcessWarning
|
||||
* @property {function} createWarning - Creates a warning item.
|
||||
* @property {function} createDeprecation - Creates a deprecation warning item.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Creates a deprecation warning item.
|
||||
* @function
|
||||
* @memberof processWarning
|
||||
* @param {ProcessWarningOptions} params - Options for creating the warning.
|
||||
* @returns {WarningItem} The created deprecation warning item.
|
||||
*/
|
||||
function createDeprecation (params) {
|
||||
return createWarning({ ...params, name: 'DeprecationWarning' })
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a warning item.
|
||||
* @function
|
||||
* @memberof processWarning
|
||||
* @param {ProcessWarningOptions} params - Options for creating the warning.
|
||||
* @returns {WarningItem} The created warning item.
|
||||
* @throws {Error} Throws an error if name, code, or message is empty, or if opts.unlimited is not a boolean.
|
||||
*/
|
||||
function createWarning ({ name, code, message, unlimited = false } = {}) {
|
||||
if (!name) throw new Error('Warning name must not be empty')
|
||||
if (!code) throw new Error('Warning code must not be empty')
|
||||
if (!message) throw new Error('Warning message must not be empty')
|
||||
if (typeof unlimited !== 'boolean') throw new Error('Warning opts.unlimited must be a boolean')
|
||||
|
||||
code = code.toUpperCase()
|
||||
|
||||
let warningContainer = {
|
||||
[name]: function (a, b, c) {
|
||||
if (warning.emitted === true && warning.unlimited !== true) {
|
||||
return
|
||||
}
|
||||
warning.emitted = true
|
||||
process.emitWarning(warning.format(a, b, c), warning.name, warning.code)
|
||||
}
|
||||
}
|
||||
if (unlimited) {
|
||||
warningContainer = {
|
||||
[name]: function (a, b, c) {
|
||||
warning.emitted = true
|
||||
process.emitWarning(warning.format(a, b, c), warning.name, warning.code)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const warning = warningContainer[name]
|
||||
|
||||
warning.emitted = false
|
||||
warning.message = message
|
||||
warning.unlimited = unlimited
|
||||
warning.code = code
|
||||
|
||||
/**
|
||||
* Formats the warning message.
|
||||
* @param {*} [a] Possible message interpolation value.
|
||||
* @param {*} [b] Possible message interpolation value.
|
||||
* @param {*} [c] Possible message interpolation value.
|
||||
* @returns {string} The formatted warning message.
|
||||
*/
|
||||
warning.format = function (a, b, c) {
|
||||
let formatted
|
||||
if (a && b && c) {
|
||||
formatted = format(message, a, b, c)
|
||||
} else if (a && b) {
|
||||
formatted = format(message, a, b)
|
||||
} else if (a) {
|
||||
formatted = format(message, a)
|
||||
} else {
|
||||
formatted = message
|
||||
}
|
||||
return formatted
|
||||
}
|
||||
|
||||
return warning
|
||||
}
|
||||
|
||||
/**
|
||||
* Module exports containing the process warning functionality.
|
||||
* @namespace
|
||||
* @property {function} createWarning - Creates a warning item.
|
||||
* @property {function} createDeprecation - Creates a deprecation warning item.
|
||||
* @property {ProcessWarning} processWarning - Represents the process warning functionality.
|
||||
*/
|
||||
const out = { createWarning, createDeprecation }
|
||||
module.exports = out
|
||||
module.exports.default = out
|
||||
module.exports.processWarning = out
|
||||
73
backend/node_modules/pino/node_modules/process-warning/package.json
generated
vendored
Normal file
73
backend/node_modules/pino/node_modules/process-warning/package.json
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
{
|
||||
"name": "process-warning",
|
||||
"version": "5.0.0",
|
||||
"description": "A small utility for creating warnings and emitting them.",
|
||||
"main": "index.js",
|
||||
"type": "commonjs",
|
||||
"types": "types/index.d.ts",
|
||||
"scripts": {
|
||||
"lint": "eslint",
|
||||
"lint:fix": "eslint --fix",
|
||||
"test": "npm run test:unit && npm run test:jest && npm run test:typescript",
|
||||
"test:jest": "jest jest.test.js",
|
||||
"test:unit": "c8 --100 node --test",
|
||||
"test:typescript": "tsd"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/fastify/process-warning.git"
|
||||
},
|
||||
"keywords": [
|
||||
"fastify",
|
||||
"error",
|
||||
"warning",
|
||||
"utility",
|
||||
"plugin",
|
||||
"emit",
|
||||
"once"
|
||||
],
|
||||
"author": "Tomas Della Vedova",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Matteo Collina",
|
||||
"email": "hello@matteocollina.com"
|
||||
},
|
||||
{
|
||||
"name": "Manuel Spigolon",
|
||||
"email": "behemoth89@gmail.com"
|
||||
},
|
||||
{
|
||||
"name": "James Sumners",
|
||||
"url": "https://james.sumners.info"
|
||||
},
|
||||
{
|
||||
"name": "Frazer Smith",
|
||||
"email": "frazer.dev@icloud.com",
|
||||
"url": "https://github.com/fdawgs"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/fastify/fastify-warning/issues"
|
||||
},
|
||||
"homepage": "https://github.com/fastify/fastify-warning#readme",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fastify"
|
||||
},
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/fastify"
|
||||
}
|
||||
],
|
||||
"devDependencies": {
|
||||
"@fastify/pre-commit": "^2.1.0",
|
||||
"benchmark": "^2.1.4",
|
||||
"c8": "^10.1.3",
|
||||
"eslint": "^9.17.0",
|
||||
"jest": "^29.7.0",
|
||||
"neostandard": "^0.12.0",
|
||||
"tsd": "^0.31.0"
|
||||
}
|
||||
}
|
||||
34
backend/node_modules/pino/node_modules/process-warning/test/emit-interpolated-string.test.js
generated
vendored
Normal file
34
backend/node_modules/pino/node_modules/process-warning/test/emit-interpolated-string.test.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('node:test')
|
||||
const { createWarning } = require('..')
|
||||
const { withResolvers } = require('./promise')
|
||||
|
||||
test('emit with interpolated string', t => {
|
||||
t.plan(4)
|
||||
|
||||
const { promise, resolve } = withResolvers()
|
||||
|
||||
process.on('warning', onWarning)
|
||||
function onWarning (warning) {
|
||||
t.assert.deepStrictEqual(warning.name, 'TestDeprecation')
|
||||
t.assert.deepStrictEqual(warning.code, 'CODE')
|
||||
t.assert.deepStrictEqual(warning.message, 'Hello world')
|
||||
t.assert.ok(codeWarning.emitted)
|
||||
}
|
||||
|
||||
const codeWarning = createWarning({
|
||||
name: 'TestDeprecation',
|
||||
code: 'CODE',
|
||||
message: 'Hello %s'
|
||||
})
|
||||
codeWarning('world')
|
||||
codeWarning('world')
|
||||
|
||||
setImmediate(() => {
|
||||
process.removeListener('warning', onWarning)
|
||||
resolve()
|
||||
})
|
||||
|
||||
return promise
|
||||
})
|
||||
33
backend/node_modules/pino/node_modules/process-warning/test/emit-once-only.test.js
generated
vendored
Normal file
33
backend/node_modules/pino/node_modules/process-warning/test/emit-once-only.test.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('node:test')
|
||||
const { createWarning } = require('..')
|
||||
const { withResolvers } = require('./promise')
|
||||
|
||||
test('emit should emit a given code only once', t => {
|
||||
t.plan(4)
|
||||
|
||||
const { promise, resolve } = withResolvers()
|
||||
|
||||
process.on('warning', onWarning)
|
||||
function onWarning (warning) {
|
||||
t.assert.deepStrictEqual(warning.name, 'TestDeprecation')
|
||||
t.assert.deepStrictEqual(warning.code, 'CODE')
|
||||
t.assert.deepStrictEqual(warning.message, 'Hello world')
|
||||
t.assert.ok(warn.emitted)
|
||||
}
|
||||
|
||||
const warn = createWarning({
|
||||
name: 'TestDeprecation',
|
||||
code: 'CODE',
|
||||
message: 'Hello world'
|
||||
})
|
||||
warn()
|
||||
warn()
|
||||
setImmediate(() => {
|
||||
process.removeListener('warning', onWarning)
|
||||
resolve()
|
||||
})
|
||||
|
||||
return promise
|
||||
})
|
||||
40
backend/node_modules/pino/node_modules/process-warning/test/emit-reset.test.js
generated
vendored
Normal file
40
backend/node_modules/pino/node_modules/process-warning/test/emit-reset.test.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('node:test')
|
||||
const { createWarning } = require('../')
|
||||
const { withResolvers } = require('./promise')
|
||||
|
||||
test('a limited warning can be re-set', t => {
|
||||
t.plan(4)
|
||||
|
||||
const { promise, resolve } = withResolvers()
|
||||
let count = 0
|
||||
process.on('warning', onWarning)
|
||||
function onWarning () {
|
||||
count++
|
||||
}
|
||||
|
||||
const warn = createWarning({
|
||||
name: 'TestDeprecation',
|
||||
code: 'CODE',
|
||||
message: 'Hello world'
|
||||
})
|
||||
|
||||
warn()
|
||||
t.assert.ok(warn.emitted)
|
||||
|
||||
warn()
|
||||
t.assert.ok(warn.emitted)
|
||||
|
||||
warn.emitted = false
|
||||
warn()
|
||||
t.assert.ok(warn.emitted)
|
||||
|
||||
setImmediate(() => {
|
||||
t.assert.deepStrictEqual(count, 2)
|
||||
process.removeListener('warning', onWarning)
|
||||
resolve()
|
||||
})
|
||||
|
||||
return promise
|
||||
})
|
||||
35
backend/node_modules/pino/node_modules/process-warning/test/emit-set.test.js
generated
vendored
Normal file
35
backend/node_modules/pino/node_modules/process-warning/test/emit-set.test.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('node:test')
|
||||
const { createWarning } = require('../')
|
||||
const { withResolvers } = require('./promise')
|
||||
|
||||
test('emit should set the emitted state', t => {
|
||||
t.plan(3)
|
||||
|
||||
const { promise, resolve } = withResolvers()
|
||||
|
||||
process.on('warning', onWarning)
|
||||
function onWarning () {
|
||||
t.fail('should not be called')
|
||||
}
|
||||
|
||||
const warn = createWarning({
|
||||
name: 'TestDeprecation',
|
||||
code: 'CODE',
|
||||
message: 'Hello world'
|
||||
})
|
||||
t.assert.ok(!warn.emitted)
|
||||
warn.emitted = true
|
||||
t.assert.ok(warn.emitted)
|
||||
|
||||
warn()
|
||||
t.assert.ok(warn.emitted)
|
||||
|
||||
setImmediate(() => {
|
||||
process.removeListener('warning', onWarning)
|
||||
resolve()
|
||||
})
|
||||
|
||||
return promise
|
||||
})
|
||||
42
backend/node_modules/pino/node_modules/process-warning/test/emit-unlimited.test.js
generated
vendored
Normal file
42
backend/node_modules/pino/node_modules/process-warning/test/emit-unlimited.test.js
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('node:test')
|
||||
const { createWarning } = require('..')
|
||||
const { withResolvers } = require('./promise')
|
||||
|
||||
test('emit should emit a given code unlimited times', t => {
|
||||
t.plan(50)
|
||||
|
||||
let runs = 0
|
||||
const expectedRun = []
|
||||
const times = 10
|
||||
|
||||
const { promise, resolve } = withResolvers()
|
||||
|
||||
process.on('warning', onWarning)
|
||||
function onWarning (warning) {
|
||||
t.assert.deepStrictEqual(warning.name, 'TestDeprecation')
|
||||
t.assert.deepStrictEqual(warning.code, 'CODE')
|
||||
t.assert.deepStrictEqual(warning.message, 'Hello world')
|
||||
t.assert.ok(warn.emitted)
|
||||
t.assert.deepStrictEqual(runs++, expectedRun.shift())
|
||||
}
|
||||
|
||||
const warn = createWarning({
|
||||
name: 'TestDeprecation',
|
||||
code: 'CODE',
|
||||
message: 'Hello world',
|
||||
unlimited: true
|
||||
})
|
||||
|
||||
for (let i = 0; i < times; i++) {
|
||||
expectedRun.push(i)
|
||||
warn()
|
||||
}
|
||||
setImmediate(() => {
|
||||
process.removeListener('warning', onWarning)
|
||||
resolve()
|
||||
})
|
||||
|
||||
return promise
|
||||
})
|
||||
99
backend/node_modules/pino/node_modules/process-warning/test/index.test.js
generated
vendored
Normal file
99
backend/node_modules/pino/node_modules/process-warning/test/index.test.js
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('node:test')
|
||||
const { createWarning, createDeprecation } = require('..')
|
||||
|
||||
process.removeAllListeners('warning')
|
||||
|
||||
test('Create warning with zero parameter', t => {
|
||||
t.plan(3)
|
||||
|
||||
const warnItem = createWarning({
|
||||
name: 'TestWarning',
|
||||
code: 'CODE',
|
||||
message: 'Not available'
|
||||
})
|
||||
t.assert.deepStrictEqual(warnItem.name, 'TestWarning')
|
||||
t.assert.deepStrictEqual(warnItem.message, 'Not available')
|
||||
t.assert.deepStrictEqual(warnItem.code, 'CODE')
|
||||
})
|
||||
|
||||
test('Create error with 1 parameter', t => {
|
||||
t.plan(3)
|
||||
|
||||
const warnItem = createWarning({
|
||||
name: 'TestWarning',
|
||||
code: 'CODE',
|
||||
message: 'hey %s'
|
||||
})
|
||||
t.assert.deepStrictEqual(warnItem.name, 'TestWarning')
|
||||
t.assert.deepStrictEqual(warnItem.format('alice'), 'hey alice')
|
||||
t.assert.deepStrictEqual(warnItem.code, 'CODE')
|
||||
})
|
||||
|
||||
test('Create error with 2 parameters', t => {
|
||||
t.plan(3)
|
||||
|
||||
const warnItem = createWarning({
|
||||
name: 'TestWarning',
|
||||
code: 'CODE',
|
||||
message: 'hey %s, I like your %s'
|
||||
})
|
||||
t.assert.deepStrictEqual(warnItem.name, 'TestWarning')
|
||||
t.assert.deepStrictEqual(warnItem.format('alice', 'attitude'), 'hey alice, I like your attitude')
|
||||
t.assert.deepStrictEqual(warnItem.code, 'CODE')
|
||||
})
|
||||
|
||||
test('Create error with 3 parameters', t => {
|
||||
t.plan(3)
|
||||
|
||||
const warnItem = createWarning({
|
||||
name: 'TestWarning',
|
||||
code: 'CODE',
|
||||
message: 'hey %s, I like your %s %s'
|
||||
})
|
||||
t.assert.deepStrictEqual(warnItem.name, 'TestWarning')
|
||||
t.assert.deepStrictEqual(warnItem.format('alice', 'attitude', 'see you'), 'hey alice, I like your attitude see you')
|
||||
t.assert.deepStrictEqual(warnItem.code, 'CODE')
|
||||
})
|
||||
|
||||
test('Creates a deprecation warning', t => {
|
||||
t.plan(3)
|
||||
|
||||
const deprecationItem = createDeprecation({
|
||||
name: 'DeprecationWarning',
|
||||
code: 'CODE',
|
||||
message: 'hello %s'
|
||||
})
|
||||
t.assert.deepStrictEqual(deprecationItem.name, 'DeprecationWarning')
|
||||
t.assert.deepStrictEqual(deprecationItem.format('world'), 'hello world')
|
||||
t.assert.deepStrictEqual(deprecationItem.code, 'CODE')
|
||||
})
|
||||
|
||||
test('Should throw when error code has no name', t => {
|
||||
t.plan(1)
|
||||
t.assert.throws(() => createWarning(), new Error('Warning name must not be empty'))
|
||||
})
|
||||
|
||||
test('Should throw when error has no code', t => {
|
||||
t.plan(1)
|
||||
t.assert.throws(() => createWarning({ name: 'name' }), new Error('Warning code must not be empty'))
|
||||
})
|
||||
|
||||
test('Should throw when error has no message', t => {
|
||||
t.plan(1)
|
||||
t.assert.throws(() => createWarning({
|
||||
name: 'name',
|
||||
code: 'code'
|
||||
}), new Error('Warning message must not be empty'))
|
||||
})
|
||||
|
||||
test('Cannot set unlimited other than boolean', t => {
|
||||
t.plan(1)
|
||||
t.assert.throws(() => createWarning({
|
||||
name: 'name',
|
||||
code: 'code',
|
||||
message: 'message',
|
||||
unlimited: 'unlimited'
|
||||
}), new Error('Warning opts.unlimited must be a boolean'))
|
||||
})
|
||||
38
backend/node_modules/pino/node_modules/process-warning/test/issue-88.test.js
generated
vendored
Normal file
38
backend/node_modules/pino/node_modules/process-warning/test/issue-88.test.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('node:test')
|
||||
const { createWarning } = require('..')
|
||||
const { withResolvers } = require('./promise')
|
||||
|
||||
test('Must not overwrite config', t => {
|
||||
t.plan(1)
|
||||
|
||||
function onWarning (warning) {
|
||||
t.assert.deepStrictEqual(warning.code, 'CODE_1')
|
||||
}
|
||||
|
||||
const a = createWarning({
|
||||
name: 'TestWarning',
|
||||
code: 'CODE_1',
|
||||
message: 'Msg'
|
||||
})
|
||||
createWarning({
|
||||
name: 'TestWarning',
|
||||
code: 'CODE_2',
|
||||
message: 'Msg',
|
||||
unlimited: true
|
||||
})
|
||||
|
||||
const { promise, resolve } = withResolvers()
|
||||
|
||||
process.on('warning', onWarning)
|
||||
a('CODE_1')
|
||||
a('CODE_1')
|
||||
|
||||
setImmediate(() => {
|
||||
process.removeListener('warning', onWarning)
|
||||
resolve()
|
||||
})
|
||||
|
||||
return promise
|
||||
})
|
||||
24
backend/node_modules/pino/node_modules/process-warning/test/jest.test.js
generated
vendored
Normal file
24
backend/node_modules/pino/node_modules/process-warning/test/jest.test.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
/* global test, expect */
|
||||
'use strict'
|
||||
|
||||
const { createWarning } = require('..')
|
||||
|
||||
if (globalThis.test) {
|
||||
test('works with jest', done => {
|
||||
const code = createWarning({
|
||||
name: 'TestDeprecation',
|
||||
code: 'CODE',
|
||||
message: 'Hello world'
|
||||
})
|
||||
code('world')
|
||||
|
||||
// we cannot actually listen to process warning event
|
||||
// because jest messes with it (that's the point of this test)
|
||||
// we can only test it was emitted indirectly
|
||||
// and test no exception is raised
|
||||
setImmediate(() => {
|
||||
expect(code.emitted).toBeTruthy()
|
||||
done()
|
||||
})
|
||||
})
|
||||
}
|
||||
80
backend/node_modules/pino/node_modules/process-warning/test/no-warnings.test.js
generated
vendored
Normal file
80
backend/node_modules/pino/node_modules/process-warning/test/no-warnings.test.js
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
'use strict'
|
||||
|
||||
const { test } = require('node:test')
|
||||
const { spawnSync } = require('node:child_process')
|
||||
const { resolve } = require('node:path')
|
||||
|
||||
const entry = resolve(__dirname, '../examples', 'example.js')
|
||||
|
||||
test('--no-warnings is set in cli', t => {
|
||||
t.plan(1)
|
||||
const child = spawnSync(process.execPath, [
|
||||
'--no-warnings',
|
||||
entry
|
||||
])
|
||||
|
||||
const stderr = child.stderr.toString()
|
||||
t.assert.deepStrictEqual(stderr, '')
|
||||
})
|
||||
|
||||
test('--no-warnings is not set in cli', t => {
|
||||
t.plan(1)
|
||||
const child = spawnSync(process.execPath, [
|
||||
entry
|
||||
])
|
||||
|
||||
const stderr = child.stderr.toString()
|
||||
t.assert.match(stderr, /\[CUSTDEP001\] DeprecationWarning: This is a deprecation warning/)
|
||||
})
|
||||
|
||||
test('NODE_NO_WARNINGS is set to 1', t => {
|
||||
t.plan(1)
|
||||
const child = spawnSync(process.execPath, [
|
||||
entry
|
||||
], {
|
||||
env: {
|
||||
NODE_NO_WARNINGS: '1'
|
||||
}
|
||||
})
|
||||
|
||||
const stderr = child.stderr.toString()
|
||||
t.assert.deepStrictEqual(stderr, '')
|
||||
})
|
||||
|
||||
test('NODE_NO_WARNINGS is set to 0', t => {
|
||||
t.plan(1)
|
||||
const child = spawnSync(process.execPath, [
|
||||
entry
|
||||
], {
|
||||
env: {
|
||||
NODE_NO_WARNINGS: '0'
|
||||
}
|
||||
})
|
||||
|
||||
const stderr = child.stderr.toString()
|
||||
t.assert.match(stderr, /\[CUSTDEP001\] DeprecationWarning: This is a deprecation warning/)
|
||||
})
|
||||
|
||||
test('NODE_NO_WARNINGS is not set', t => {
|
||||
t.plan(1)
|
||||
const child = spawnSync(process.execPath, [
|
||||
entry
|
||||
])
|
||||
|
||||
const stderr = child.stderr.toString()
|
||||
t.assert.match(stderr, /\[CUSTDEP001\] DeprecationWarning: This is a deprecation warning/)
|
||||
})
|
||||
|
||||
test('NODE_Options contains --no-warnings', t => {
|
||||
t.plan(1)
|
||||
const child = spawnSync(process.execPath, [
|
||||
entry
|
||||
], {
|
||||
env: {
|
||||
NODE_OPTIONS: '--no-warnings'
|
||||
}
|
||||
})
|
||||
|
||||
const stderr = child.stderr.toString()
|
||||
t.assert.deepStrictEqual(stderr, '')
|
||||
})
|
||||
10
backend/node_modules/pino/node_modules/process-warning/test/promise.js
generated
vendored
Normal file
10
backend/node_modules/pino/node_modules/process-warning/test/promise.js
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
module.exports = {
|
||||
withResolvers: function () {
|
||||
let promiseResolve, promiseReject
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
promiseResolve = resolve
|
||||
promiseReject = reject
|
||||
})
|
||||
return { promise, resolve: promiseResolve, reject: promiseReject }
|
||||
}
|
||||
}
|
||||
37
backend/node_modules/pino/node_modules/process-warning/types/index.d.ts
generated
vendored
Normal file
37
backend/node_modules/pino/node_modules/process-warning/types/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
declare namespace processWarning {
|
||||
export interface WarningItem {
|
||||
(a?: any, b?: any, c?: any): void;
|
||||
name: string;
|
||||
code: string;
|
||||
message: string;
|
||||
emitted: boolean;
|
||||
unlimited: boolean;
|
||||
format(a?: any, b?: any, c?: any): string;
|
||||
}
|
||||
|
||||
export type WarningOptions = {
|
||||
name: string;
|
||||
code: string;
|
||||
message: string;
|
||||
unlimited?: boolean;
|
||||
}
|
||||
|
||||
export type DeprecationOptions = Omit<WarningOptions, 'name'>
|
||||
|
||||
export type ProcessWarningOptions = {
|
||||
unlimited?: boolean;
|
||||
}
|
||||
|
||||
export type ProcessWarning = {
|
||||
createWarning(params: WarningOptions): WarningItem;
|
||||
createDeprecation(params: DeprecationOptions): WarningItem;
|
||||
}
|
||||
|
||||
export function createWarning (params: WarningOptions): WarningItem
|
||||
export function createDeprecation (params: DeprecationOptions): WarningItem
|
||||
|
||||
const processWarning: ProcessWarning
|
||||
export { processWarning as default }
|
||||
}
|
||||
|
||||
export = processWarning
|
||||
36
backend/node_modules/pino/node_modules/process-warning/types/index.test-d.ts
generated
vendored
Normal file
36
backend/node_modules/pino/node_modules/process-warning/types/index.test-d.ts
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
import { expectType } from 'tsd'
|
||||
import { createWarning, createDeprecation } from '..'
|
||||
|
||||
const WarnInstance = createWarning({
|
||||
name: 'TypeScriptWarning',
|
||||
code: 'CODE',
|
||||
message: 'message'
|
||||
})
|
||||
|
||||
expectType<string>(WarnInstance.code)
|
||||
expectType<string>(WarnInstance.message)
|
||||
expectType<string>(WarnInstance.name)
|
||||
expectType<boolean>(WarnInstance.emitted)
|
||||
expectType<boolean>(WarnInstance.unlimited)
|
||||
|
||||
expectType<void>(WarnInstance())
|
||||
expectType<void>(WarnInstance('foo'))
|
||||
expectType<void>(WarnInstance('foo', 'bar'))
|
||||
|
||||
const buildWarnUnlimited = createWarning({
|
||||
name: 'TypeScriptWarning',
|
||||
code: 'CODE',
|
||||
message: 'message',
|
||||
unlimited: true
|
||||
})
|
||||
expectType<boolean>(buildWarnUnlimited.unlimited)
|
||||
|
||||
const DeprecationInstance = createDeprecation({
|
||||
code: 'CODE',
|
||||
message: 'message'
|
||||
})
|
||||
expectType<string>(DeprecationInstance.code)
|
||||
|
||||
DeprecationInstance()
|
||||
DeprecationInstance('foo')
|
||||
DeprecationInstance('foo', 'bar')
|
||||
122
backend/node_modules/pino/package.json
generated
vendored
Normal file
122
backend/node_modules/pino/package.json
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
{
|
||||
"name": "pino",
|
||||
"version": "9.14.0",
|
||||
"description": "super fast, all natural json logger",
|
||||
"main": "pino.js",
|
||||
"type": "commonjs",
|
||||
"types": "pino.d.ts",
|
||||
"browser": "./browser.js",
|
||||
"scripts": {
|
||||
"docs": "docsify serve",
|
||||
"browser-test": "airtap --local 8080 test/browser*test.js",
|
||||
"lint": "eslint .",
|
||||
"prepublishOnly": "tap --no-check-coverage test/internals/version.test.js",
|
||||
"test": "npm run lint && npm run transpile && tap --ts && jest test/jest && npm run test-types",
|
||||
"test-ci": "npm run lint && npm run transpile && tap --ts --no-check-coverage --coverage-report=lcovonly && npm run test-types",
|
||||
"test-ci-pnpm": "pnpm run lint && npm run transpile && tap --ts --no-coverage --no-check-coverage && pnpm run test-types",
|
||||
"test-ci-yarn-pnp": "yarn run lint && npm run transpile && tap --ts --no-check-coverage --coverage-report=lcovonly",
|
||||
"test-types": "tsc && tsd && ts-node test/types/pino.ts && attw --pack .",
|
||||
"test:smoke": "smoker smoke:pino && smoker smoke:browser && smoker smoke:file",
|
||||
"smoke:pino": "node ./pino.js",
|
||||
"smoke:browser": "node ./browser.js",
|
||||
"smoke:file": "node ./file.js",
|
||||
"transpile": "node ./test/fixtures/ts/transpile.cjs",
|
||||
"cov-ui": "tap --ts --coverage-report=html",
|
||||
"bench": "node benchmarks/utils/runbench all",
|
||||
"bench-basic": "node benchmarks/utils/runbench basic",
|
||||
"bench-object": "node benchmarks/utils/runbench object",
|
||||
"bench-deep-object": "node benchmarks/utils/runbench deep-object",
|
||||
"bench-multi-arg": "node benchmarks/utils/runbench multi-arg",
|
||||
"bench-long-string": "node benchmarks/utils/runbench long-string",
|
||||
"bench-child": "node benchmarks/utils/runbench child",
|
||||
"bench-child-child": "node benchmarks/utils/runbench child-child",
|
||||
"bench-child-creation": "node benchmarks/utils/runbench child-creation",
|
||||
"bench-formatters": "node benchmarks/utils/runbench formatters",
|
||||
"update-bench-doc": "node benchmarks/utils/generate-benchmark-doc > docs/benchmarks.md"
|
||||
},
|
||||
"bin": {
|
||||
"pino": "./bin.js"
|
||||
},
|
||||
"precommit": "test",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/pinojs/pino.git"
|
||||
},
|
||||
"keywords": [
|
||||
"fast",
|
||||
"logger",
|
||||
"stream",
|
||||
"json"
|
||||
],
|
||||
"author": "Matteo Collina <hello@matteocollina.com>",
|
||||
"contributors": [
|
||||
"David Mark Clements <huperekchuno@googlemail.com>",
|
||||
"James Sumners <james.sumners@gmail.com>",
|
||||
"Thomas Watson Steen <w@tson.dk> (https://twitter.com/wa7son)"
|
||||
],
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/pinojs/pino/issues"
|
||||
},
|
||||
"homepage": "https://getpino.io",
|
||||
"devDependencies": {
|
||||
"@arethetypeswrong/cli": "^0.18.1",
|
||||
"@matteo.collina/tspl": "^0.2.0",
|
||||
"@types/flush-write-stream": "^1.0.0",
|
||||
"@types/node": "^24.0.8",
|
||||
"@types/tap": "^15.0.6",
|
||||
"@yao-pkg/pkg": "6.7.0",
|
||||
"airtap": "5.0.0",
|
||||
"bole": "^5.0.5",
|
||||
"bunyan": "^1.8.14",
|
||||
"debug": "^4.3.4",
|
||||
"docsify-cli": "^4.4.4",
|
||||
"eslint": "^8.17.0",
|
||||
"eslint-config-standard": "^17.0.0",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
"eslint-plugin-n": "15.7.0",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-promise": "^6.0.0",
|
||||
"execa": "^5.0.0",
|
||||
"fastbench": "^1.0.1",
|
||||
"flush-write-stream": "^2.0.0",
|
||||
"import-fresh": "^3.2.1",
|
||||
"jest": "^30.0.3",
|
||||
"log": "^6.0.0",
|
||||
"loglevel": "^1.6.7",
|
||||
"midnight-smoker": "1.1.1",
|
||||
"pino-pretty": "^13.0.0",
|
||||
"pre-commit": "^1.2.2",
|
||||
"proxyquire": "^2.1.3",
|
||||
"pump": "^3.0.0",
|
||||
"rimraf": "^6.0.1",
|
||||
"semver": "^7.3.7",
|
||||
"sinon": "^21.0.0",
|
||||
"split2": "^4.0.0",
|
||||
"steed": "^1.1.3",
|
||||
"strip-ansi": "^6.0.0",
|
||||
"tap": "^16.2.0",
|
||||
"tape": "^5.5.3",
|
||||
"through2": "^4.0.0",
|
||||
"ts-node": "^10.9.1",
|
||||
"tsd": "^0.33.0",
|
||||
"typescript": "~5.9.2",
|
||||
"winston": "^3.7.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"atomic-sleep": "^1.0.0",
|
||||
"on-exit-leak-free": "^2.1.0",
|
||||
"pino-abstract-transport": "^2.0.0",
|
||||
"pino-std-serializers": "^7.0.0",
|
||||
"process-warning": "^5.0.0",
|
||||
"quick-format-unescaped": "^4.0.3",
|
||||
"real-require": "^0.2.0",
|
||||
"safe-stable-stringify": "^2.3.1",
|
||||
"@pinojs/redact": "^0.4.0",
|
||||
"sonic-boom": "^4.0.1",
|
||||
"thread-stream": "^3.0.0"
|
||||
},
|
||||
"tsd": {
|
||||
"directory": "test/types"
|
||||
}
|
||||
}
|
||||
BIN
backend/node_modules/pino/pino-banner.png
generated
vendored
Normal file
BIN
backend/node_modules/pino/pino-banner.png
generated
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 41 KiB |
BIN
backend/node_modules/pino/pino-logo-hire.png
generated
vendored
Normal file
BIN
backend/node_modules/pino/pino-logo-hire.png
generated
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 50 KiB |
BIN
backend/node_modules/pino/pino-tree.png
generated
vendored
Normal file
BIN
backend/node_modules/pino/pino-tree.png
generated
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 13 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user