Aktueller Stand

This commit is contained in:
2026-01-22 19:05:45 +01:00
parent 85dee61a4d
commit e280e4eadb
1967 changed files with 397327 additions and 74093 deletions

View File

@@ -9,5 +9,5 @@ updates:
- package-ecosystem: "npm"
directory: "/"
schedule:
interval: "weekly"
interval: "monthly"
open-pull-requests-limit: 10

View File

@@ -1,85 +0,0 @@
name: Benchmark PR
on:
pull_request_target:
types: [labeled]
jobs:
benchmark:
if: ${{ github.event.label.name == 'benchmark' }}
runs-on: ubuntu-latest
permissions:
contents: read
outputs:
PR-BENCH: ${{ steps.benchmark-pr.outputs.BENCH_RESULT }}
MASTER-BENCH: ${{ steps.benchmark-master.outputs.BENCH_RESULT }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
ref: ${{github.event.pull_request.head.sha}}
repository: ${{github.event.pull_request.head.repo.full_name}}
- uses: actions/setup-node@v4
with:
node-version: 18
- name: Install
run: |
npm install --ignore-scripts
- name: Run benchmark
id: benchmark-pr
run: |
npm run --silent bench > ./bench-result
content=$(cat ./bench-result)
content="${content//'%'/'%25'}"
content="${content//$'\n'/'%0A'}"
content="${content//$'\r'/'%0D'}"
echo "::set-output name=BENCH_RESULT::$content"
# master benchmark
- uses: actions/checkout@v4
with:
ref: 'master'
- name: Install
run: |
npm install --ignore-scripts
- name: Run benchmark
id: benchmark-master
run: |
npm run --silent bench > ./bench-result
content=$(cat ./bench-result)
content="${content//'%'/'%25'}"
content="${content//$'\n'/'%0A'}"
content="${content//$'\r'/'%0D'}"
echo "::set-output name=BENCH_RESULT::$content"
output-benchmark:
if: "always()"
needs: [benchmark]
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- name: Comment PR
uses: thollander/actions-comment-pull-request@v2
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
message: |
**PR**:
```
${{ needs.benchmark.outputs.PR-BENCH }}
```
**MASTER**:
```
${{ needs.benchmark.outputs.MASTER-BENCH }}
```
- uses: actions-ecosystem/action-remove-labels@v1
with:
labels: |
benchmark
github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -4,7 +4,6 @@ on:
push:
branches:
- main
- master
- next
- 'v*'
paths-ignore:
@@ -15,9 +14,20 @@ on:
- 'docs/**'
- '*.md'
# This allows a subsequently queued workflow run to interrupt previous runs
concurrency:
group: "${{ github.workflow }}-${{ github.event.pull_request.head.label || github.head_ref || github.ref }}"
cancel-in-progress: true
permissions:
contents: read
jobs:
test:
uses: fastify/workflows/.github/workflows/plugins-ci.yml@v3
permissions:
contents: write
pull-requests: write
uses: fastify/workflows/.github/workflows/plugins-ci.yml@v5
with:
license-check: true
lint: true

View File

@@ -1,14 +0,0 @@
ts: false
jsx: false
branches: 65
functions: 100
lines: 89
statements: 89
nyc-arg:
- "--exclude=.cache/*"
- "--exclude=lib/schema-validator.js"
files:
- test/**/*.test.js

View File

@@ -1,6 +1,9 @@
The MIT License (MIT)
MIT License
Copyright (c) 2016-2018 Matteo Collina
Copyright (c) 2016-present Matteo Collina
Copyright (c) 2016-present The Fastify team
The Fastify team members are listed at https://github.com/fastify/fastify#team.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,14 +1,12 @@
# fast-json-stringify
![CI](https://github.com/fastify/fast-json-stringify/workflows/CI/badge.svg)
[![CI](https://github.com/fastify/fast-json-stringify/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/fastify/fast-json-stringify/actions/workflows/ci.yml)
[![NPM version](https://img.shields.io/npm/v/fast-json-stringify.svg?style=flat)](https://www.npmjs.com/package/fast-json-stringify)
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/)
[![neostandard javascript style](https://img.shields.io/badge/code_style-neostandard-brightgreen?style=flat)](https://github.com/neostandard/neostandard)
[![NPM downloads](https://img.shields.io/npm/dm/fast-json-stringify.svg?style=flat)](https://www.npmjs.com/package/fast-json-stringify)
__fast-json-stringify__ is significantly faster than `JSON.stringify()` for small payloads.
Its performance advantage shrinks as your payload grows.
It pairs well with [__flatstr__](https://www.npmjs.com/package/flatstr), which triggers a V8 optimization that improves performance when eventually converting the string to a `Buffer`.
### How it works
@@ -18,37 +16,43 @@ fast-json-stringify requires a [JSON Schema Draft 7](https://json-schema.org/spe
##### Benchmarks
- Machine: `EX41S-SSD, Intel Core i7, 4Ghz, 64GB RAM, 4C/8T, SSD`.
- Node.js `v18.12.1`
- Node.js `v22.14.0`
```
FJS creation x 4,129 ops/sec ±0.82% (92 runs sampled)
CJS creation x 184,196 ops/sec ±0.12% (97 runs sampled)
AJV Serialize creation x 61,130,591 ops/sec ±0.40% (92 runs sampled)
JSON.stringify array x 5,057 ops/sec ±0.10% (100 runs sampled)
fast-json-stringify array default x 6,243 ops/sec ±0.14% (98 runs sampled)
fast-json-stringify array json-stringify x 6,261 ops/sec ±0.30% (99 runs sampled)
compile-json-stringify array x 6,842 ops/sec ±0.18% (96 runs sampled)
AJV Serialize array x 6,964 ops/sec ±0.11% (95 runs sampled)
JSON.stringify large array x 248 ops/sec ±0.07% (90 runs sampled)
fast-json-stringify large array default x 99.96 ops/sec ±0.22% (74 runs sampled)
fast-json-stringify large array json-stringify x 248 ops/sec ±0.07% (90 runs sampled)
compile-json-stringify large array x 317 ops/sec ±0.09% (89 runs sampled)
AJV Serialize large array x 111 ops/sec ±0.07% (33 runs sampled)
JSON.stringify long string x 16,002 ops/sec ±0.09% (98 runs sampled)
fast-json-stringify long string x 15,979 ops/sec ±0.09% (96 runs sampled)
compile-json-stringify long string x 15,952 ops/sec ±0.31% (97 runs sampled)
AJV Serialize long string x 21,416 ops/sec ±0.08% (98 runs sampled)
JSON.stringify short string x 12,944,272 ops/sec ±0.09% (96 runs sampled)
fast-json-stringify short string x 30,585,790 ops/sec ±0.27% (97 runs sampled)
compile-json-stringify short string x 30,656,406 ops/sec ±0.12% (96 runs sampled)
AJV Serialize short string x 30,406,785 ops/sec ±0.37% (96 runs sampled)
JSON.stringify obj x 3,153,043 ops/sec ±0.33% (99 runs sampled)
fast-json-stringify obj x 6,866,434 ops/sec ±0.11% (100 runs sampled)
compile-json-stringify obj x 15,886,723 ops/sec ±0.15% (98 runs sampled)
AJV Serialize obj x 8,969,043 ops/sec ±0.36% (97 runs sampled)
JSON stringify date x 1,126,547 ops/sec ±0.09% (97 runs sampled)
fast-json-stringify date format x 1,836,188 ops/sec ±0.12% (99 runs sampled)
compile-json-stringify date format x 1,125,735 ops/sec ±0.19% (98 runs sampled)
FJS creation x 9,696 ops/sec ±0.77% (94 runs sampled)
CJS creation x 197,267 ops/sec ±0.22% (95 runs sampled)
AJV Serialize creation x 48,302,927 ops/sec ±2.09% (90 runs sampled)
json-accelerator creation x 668,430 ops/sec ±0.43% (95 runs sampled)
JSON.stringify array x 7,924 ops/sec ±0.11% (98 runs sampled)
fast-json-stringify array default x 7,183 ops/sec ±0.09% (97 runs sampled)
json-accelerator array x 5,762 ops/sec ±0.27% (99 runs sampled)
fast-json-stringify array json-stringify x 7,171 ops/sec ±0.17% (97 runs sampled)
compile-json-stringify array x 6,889 ops/sec ±0.41% (96 runs sampled)
AJV Serialize array x 6,945 ops/sec ±0.17% (98 runs sampled)
JSON.stringify large array x 331 ops/sec ±0.17% (93 runs sampled)
fast-json-stringify large array default x 208 ops/sec ±0.21% (91 runs sampled)
fast-json-stringify large array json-stringify x 330 ops/sec ±0.17% (93 runs sampled)
compile-json-stringify large array x 318 ops/sec ±0.11% (90 runs sampled)
AJV Serialize large array x 114 ops/sec ±0.27% (74 runs sampled)
JSON.stringify long string x 13,452 ops/sec ±0.15% (99 runs sampled)
fast-json-stringify long string x 13,454 ops/sec ±0.10% (99 runs sampled)
json-accelerator long string x 13,439 ops/sec ±0.09% (98 runs sampled)
compile-json-stringify long string x 13,380 ops/sec ±0.12% (100 runs sampled)
AJV Serialize long string x 21,932 ops/sec ±0.06% (99 runs sampled)
JSON.stringify short string x 12,114,052 ops/sec ±0.59% (97 runs sampled)
fast-json-stringify short string x 29,408,175 ops/sec ±1.12% (91 runs sampled)
json-accelerator short string x 29,431,694 ops/sec ±1.05% (93 runs sampled)
compile-json-stringify short string x 24,740,338 ops/sec ±1.02% (91 runs sampled)
AJV Serialize short string x 17,841,869 ops/sec ±0.90% (91 runs sampled)
JSON.stringify obj x 4,577,494 ops/sec ±0.25% (94 runs sampled)
fast-json-stringify obj x 7,291,157 ops/sec ±0.40% (97 runs sampled)
json-accelerator obj x 6,473,194 ops/sec ±0.31% (99 runs sampled)
compile-json-stringify obj x 14,724,935 ops/sec ±0.50% (96 runs sampled)
AJV Serialize obj x 8,782,944 ops/sec ±0.45% (93 runs sampled)
JSON stringify date x 803,522 ops/sec ±0.47% (98 runs sampled)
fast-json-stringify date format x 1,117,776 ops/sec ±0.69% (95 runs sampled)
json-accelerate date format x 1,122,419 ops/sec ±0.20% (97 runs sampled)
compile-json-stringify date format x 803,214 ops/sec ±0.23% (97 runs sampled)
```
#### Table of contents:
@@ -70,7 +74,7 @@ compile-json-stringify date format x 1,125,735 ops/sec ±0.19% (98 runs sampled)
- <a href="#security">`Security Notice`</a>
- <a href="#debug">`Debug Mode`</a>
- <a href="#standalone">`Standalone Mode`</a>
- <a href="#acknowledgements">`Acknowledgements`</a>
- <a href="#acknowledgments">`Acknowledgments`</a>
- <a href="#license">`License`</a>
@@ -111,7 +115,7 @@ console.log(stringify({
<a name="options"></a>
## Options
Optionally, you may provide to `fast-json-stringify` an option object as second parameter:
Optionally, you may provide to `fast-json-stringify` an option object as the second parameter:
```js
const fastJson = require('fast-json-stringify')
@@ -165,7 +169,7 @@ And nested ones, too.
| `date` | `2020-04-03` |
| `time` | `09:11:08` |
**Note**: In the case of string formatted Date and not Date Object, there will be no manipulation on it. It should be properly formatted.
**Note**: In the case of a string formatted Date and not Date Object, there will be no manipulation on it. It should be properly formatted.
Example with a Date object:
@@ -290,7 +294,7 @@ console.log(stringify(obj)) // '{"matchfoo":"42","otherfoo":"str","matchnum":3,"
*additionalProperties* must be an object or a boolean, declared in this way: `{ type: 'type' }`.
*additionalProperties* will work only for the properties that are not explicitly listed in the *properties* and *patternProperties* objects.
If *additionalProperties* is not present or is set to `false`, every property that is not explicitly listed in the *properties* and *patternProperties* objects,will be ignored, as described in <a href="#missingFields">Missing fields</a>.
If *additionalProperties* is not present or is set to `false`, every property that is not explicitly listed in the *properties* and *patternProperties* objects will be ignored, as described in <a href="#missingFields">Missing fields</a>.
Missing fields are ignored to avoid having to rewrite objects before serializing. However, other schema rules would throw in similar situations.
If *additionalProperties* is set to `true`, it will be used by `JSON.stringify` to stringify the additional properties. If you want to achieve maximum performance, we strongly encourage you to use a fixed schema where possible.
The additional properties will always be serialized at the end of the object.
@@ -543,12 +547,12 @@ const stringify = fastJson(schema, { schema: externalSchema })
<a name="long"></a>
#### Long integers
By default the library will handle automatically [BigInt](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt).
By default, the library will handle automatically [BigInt](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt).
<a name="integer"></a>
#### Integers
The `type: integer` property will be truncated if a floating point is provided.
You can customize this behaviour with the `rounding` option that will accept [`round`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/round), [`ceil`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/ceil), [`floor`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/floor) or [`trunc`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/trunc). Default is `trunc`:
You can customize this behavior with the `rounding` option that will accept [`round`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/round), [`ceil`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/ceil), [`floor`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/floor), or [`trunc`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/trunc). Default is `trunc`:
```js
const stringify = fastJson(schema, { rounding: 'ceil' })
@@ -650,8 +654,8 @@ const stringify = fastJson({
##### Benchmarks
For reference, here goes some benchmarks for comparison over the three
mechanisms. Benchmarks conducted on an old machine.
For reference, here are some benchmarks for comparison over the three
mechanisms. Benchmarks were conducted on an old machine.
- Machine: `ST1000LM024 HN-M 1TB HDD, Intel Core i7-3610QM @ 2.3GHz, 12GB RAM, 4C/8T`.
- Node.js `v16.13.1`
@@ -678,7 +682,7 @@ expose your application to remote attacks.
Users are responsible for sending trusted data. `fast-json-stringify` guarantees that you will get
a valid output only if your input matches the schema or can be coerced to the schema. If your input
doesn't match the schema, you will get undefined behavior.
doesn't match the schema, you will get undefined behavior.
<a name="debug"></a>
### Debug Mode
@@ -699,7 +703,7 @@ const debugCompiled = fastJson({
console.log(debugCompiled) // it is a object contain code, ajv instance
const rawString = debugCompiled.code // it is the generated code
console.log(rawString)
console.log(rawString)
const stringify = fastJson.restore(debugCompiled) // use the generated string to get back the `stringify` function
console.log(stringify({ firstName: 'Foo', surname: 'bar' })) // '{"firstName":"Foo"}'
@@ -728,12 +732,12 @@ const stringify = require('stringify.js')
console.log(stringify({ firstName: 'Foo', surname: 'bar' })) // '{"firstName":"Foo"}'
```
<a name="acknowledgements"></a>
## Acknowledgements
<a name="acknowledgments"></a>
## Acknowledgments
This project was kindly sponsored by [nearForm](https://nearform.com).
<a name="license"></a>
## License
MIT
Licensed under [MIT](./LICENSE).

View File

@@ -8,7 +8,7 @@ const simpleGit = require('simple-git')
const git = simpleGit(process.cwd())
const COMMAND = 'npm run bench'
const DEFAULT_BRANCH = 'master'
const DEFAULT_BRANCH = 'main'
const PERCENT_THRESHOLD = 5
const greyColor = '\x1b[30m'
const redColor = '\x1b[31m'

View File

@@ -1,7 +1,15 @@
'use strict'
const benchmark = require('benchmark')
const suite = new benchmark.Suite()
const { Bench } = require('tinybench')
const suite = new Bench({
name: 'Library Comparison Benchmarks',
setup: (_task, mode) => {
// Run the garbage collector before warmup at each cycle
if (mode === 'warmup' && typeof globalThis.gc === 'function') {
globalThis.gc()
}
}
})
const STR_LEN = 1e4
const LARGE_ARRAY_SIZE = 2e4
@@ -118,6 +126,12 @@ const ajvSerialize = ajv.compileSerializer(schemaAJVJTD)
const ajvSerializeArray = ajv.compileSerializer(arraySchemaAJVJTD)
const ajvSerializeString = ajv.compileSerializer({ type: 'string' })
const { createAccelerator } = require('json-accelerator')
const accelStringify = createAccelerator(schema)
const accelArray = createAccelerator(arraySchema)
const accelDate = FJS(dateFormatSchema)
const accelString = FJS({ type: 'string' })
const getRandomString = (length) => {
if (!Number.isInteger(length)) {
throw new Error('Expected integer length')
@@ -134,7 +148,6 @@ const getRandomString = (length) => {
return result[0].toUpperCase() + result.slice(1)
}
// eslint-disable-next-line
for (let i = 0; i < STR_LEN; i++) {
largeArray[i] = {
firstName: getRandomString(8),
@@ -171,6 +184,9 @@ suite.add('CJS creation', function () {
suite.add('AJV Serialize creation', function () {
ajv.compileSerializer(schemaAJVJTD)
})
suite.add('json-accelerator creation', function () {
createAccelerator(schema)
})
suite.add('JSON.stringify array', function () {
JSON.stringify(multiArray)
@@ -180,6 +196,10 @@ suite.add('fast-json-stringify array default', function () {
stringifyArrayDefault(multiArray)
})
suite.add('json-accelerator array', function () {
accelArray(multiArray)
})
suite.add('fast-json-stringify array json-stringify', function () {
stringifyArrayJSONStringify(multiArray)
})
@@ -220,6 +240,10 @@ suite.add('fast-json-stringify long string', function () {
stringifyString(str)
})
suite.add('json-accelerator long string', function () {
stringifyString(str)
})
suite.add('compile-json-stringify long string', function () {
CJSStringifyString(str)
})
@@ -236,6 +260,10 @@ suite.add('fast-json-stringify short string', function () {
stringifyString('hello world')
})
suite.add('json-accelerator short string', function () {
accelString('hello world')
})
suite.add('compile-json-stringify short string', function () {
CJSStringifyString('hello world')
})
@@ -252,6 +280,10 @@ suite.add('fast-json-stringify obj', function () {
stringify(obj)
})
suite.add('json-accelerator obj', function () {
accelStringify(obj)
})
suite.add('compile-json-stringify obj', function () {
CJSStringify(obj)
})
@@ -268,14 +300,24 @@ suite.add('fast-json-stringify date format', function () {
stringifyDate(date)
})
suite.add('json-accelerate date format', function () {
accelDate(date)
})
suite.add('compile-json-stringify date format', function () {
CJSStringifyDate(date)
})
suite.on('cycle', cycle)
suite.run().then(() => {
for (const task of suite.tasks) {
const hz = task.result.hz // ops/sec
const rme = task.result.rme // relative margin of error (%)
const samples = task.result.samples.length
suite.run()
const formattedHz = hz.toLocaleString('en-US', { maximumFractionDigits: 0 })
const formattedRme = rme.toFixed(2)
function cycle (e) {
console.log(e.target.toString())
}
const output = `${task.name} x ${formattedHz} ops/sec ±${formattedRme}% (${samples} runs sampled)`
console.log(output)
}
}).catch(err => console.error(`Error: ${err.message}`))

View File

@@ -2,20 +2,32 @@
const { workerData: benchmark, parentPort } = require('worker_threads')
const Benchmark = require('benchmark')
Benchmark.options.minSamples = 100
const { Bench } = require('tinybench')
const suite = Benchmark.Suite()
const bench = new Bench({
name: benchmark.name,
setup: (_task, mode) => {
// Run the garbage collector before warmup at each cycle
if (mode === 'warmup' && typeof globalThis.gc === 'function') {
globalThis.gc()
}
}
})
const FJS = require('..')
const stringify = FJS(benchmark.schema)
suite
.add(benchmark.name, () => {
stringify(benchmark.input)
})
.on('cycle', (event) => {
parentPort.postMessage(String(event.target))
})
.on('complete', () => {})
.run()
bench.add(benchmark.name, () => {
stringify(benchmark.input)
}).run().then(() => {
const task = bench.tasks[0]
const hz = task.result.hz // ops/sec
const rme = task.result.rme // relative margin of error (%)
const samples = task.result.samples.length
const formattedHz = hz.toLocaleString('en-US', { maximumFractionDigits: 0 })
const formattedRme = rme.toFixed(2)
const output = `${task.name} x ${formattedHz} ops/sec ±${formattedRme}% (${samples} runs sampled)`
parentPort.postMessage(output)
}).catch(err => parentPort.postMessage(`Error: ${err.message}`))

View File

@@ -0,0 +1,9 @@
'use strict'
module.exports = require('neostandard')({
ignores: [
...require('neostandard').resolveIgnoresFromGitignore(),
'lib/schema-validator.js'
],
ts: true
})

View File

@@ -15,17 +15,32 @@ const SINGLE_TICK = /'/g
let largeArraySize = 2e4
let largeArrayMechanism = 'default'
const validRoundingMethods = [
const serializerFns = `
const {
asString,
asNumber,
asBoolean,
asDateTime,
asDate,
asTime,
asUnsafeString
} = serializer
const asInteger = serializer.asInteger.bind(serializer)
`
const validRoundingMethods = new Set([
'floor',
'ceil',
'round',
'trunc'
]
])
const validLargeArrayMechanisms = [
const validLargeArrayMechanisms = new Set([
'default',
'json-stringify'
]
])
let schemaIdCounter = 0
@@ -79,6 +94,14 @@ function getSchemaId (schema, rootSchemaId) {
return rootSchemaId
}
function getSafeSchemaRef (context, location) {
let schemaRef = location.getSchemaRef() || ''
if (schemaRef.startsWith(context.rootSchemaId)) {
schemaRef = schemaRef.replace(context.rootSchemaId, '') || '#'
}
return schemaRef
}
function build (schema, options) {
isValidSchema(schema)
@@ -92,7 +115,11 @@ function build (schema, options) {
refResolver: new RefResolver(),
rootSchemaId: schema.$id || `__fjs_root_${schemaIdCounter++}`,
validatorSchemasIds: new Set(),
mergedSchemasIds: new Map()
mergedSchemasIds: new Map(),
recursiveSchemas: new Set(),
recursivePaths: new Set(),
buildingSet: new Set(),
uid: 0
}
const schemaId = getSchemaId(schema, context.rootSchemaId)
@@ -112,13 +139,13 @@ function build (schema, options) {
}
if (options.rounding) {
if (!validRoundingMethods.includes(options.rounding)) {
if (!validRoundingMethods.has(options.rounding)) {
throw new Error(`Unsupported integer rounding method ${options.rounding}`)
}
}
if (options.largeArrayMechanism) {
if (validLargeArrayMechanisms.includes(options.largeArrayMechanism)) {
if (validLargeArrayMechanisms.has(options.largeArrayMechanism)) {
largeArrayMechanism = options.largeArrayMechanism
} else {
throw new Error(`Unsupported large array mechanism ${options.largeArrayMechanism}`)
@@ -126,11 +153,14 @@ function build (schema, options) {
}
if (options.largeArraySize) {
if (typeof options.largeArraySize === 'string' && Number.isFinite(Number.parseInt(options.largeArraySize, 10))) {
largeArraySize = Number.parseInt(options.largeArraySize, 10)
} else if (typeof options.largeArraySize === 'number' && Number.isInteger(options.largeArraySize)) {
const largeArraySizeType = typeof options.largeArraySize
let parsedNumber
if (largeArraySizeType === 'string' && Number.isFinite((parsedNumber = Number.parseInt(options.largeArraySize, 10)))) {
largeArraySize = parsedNumber
} else if (largeArraySizeType === 'number' && Number.isInteger(options.largeArraySize)) {
largeArraySize = options.largeArraySize
} else if (typeof options.largeArraySize === 'bigint') {
} else if (largeArraySizeType === 'bigint') {
largeArraySize = Number(options.largeArraySize)
} else {
throw new Error(`Unsupported large array size. Expected integer-like, got ${typeof options.largeArraySize} with value ${options.largeArraySize}`)
@@ -138,9 +168,11 @@ function build (schema, options) {
}
const location = new Location(schema, context.rootSchemaId)
detectRecursiveSchemas(context, location)
const code = buildValue(context, location, 'input')
let contextFunctionCode = `
${serializerFns}
const JSON_STR_BEGIN_OBJECT = '{'
const JSON_STR_END_OBJECT = '}'
const JSON_STR_BEGIN_ARRAY = '['
@@ -252,32 +284,28 @@ const numberKeywords = [
* https://datatracker.ietf.org/doc/html/draft-handrews-json-schema-validation-01#section-6
*/
function inferTypeByKeyword (schema) {
// eslint-disable-next-line
for (var keyword of objectKeywords) {
for (const keyword of objectKeywords) {
if (keyword in schema) return 'object'
}
// eslint-disable-next-line
for (var keyword of arrayKeywords) {
for (const keyword of arrayKeywords) {
if (keyword in schema) return 'array'
}
// eslint-disable-next-line
for (var keyword of stringKeywords) {
for (const keyword of stringKeywords) {
if (keyword in schema) return 'string'
}
// eslint-disable-next-line
for (var keyword of numberKeywords) {
for (const keyword of numberKeywords) {
if (keyword in schema) return 'number'
}
return schema.type
}
function buildExtraObjectPropertiesSerializer (context, location, addComma) {
function buildExtraObjectPropertiesSerializer (context, location, addComma, objVar) {
const schema = location.schema
const propertiesKeys = Object.keys(schema.properties || {})
let code = `
const propertiesKeys = ${JSON.stringify(propertiesKeys)}
for (const [key, value] of Object.entries(obj)) {
for (const [key, value] of Object.entries(${objVar})) {
if (
propertiesKeys.includes(key) ||
value === undefined ||
@@ -296,7 +324,7 @@ function buildExtraObjectPropertiesSerializer (context, location, addComma) {
code += `
if (/${propertyKey.replace(/\\*\//g, '\\/')}/.test(key)) {
${addComma}
json += serializer.asString(key) + JSON_STR_COLONS
json += asString(key) + JSON_STR_COLONS
${buildValue(context, propertyLocation, 'value')}
continue
}
@@ -311,13 +339,13 @@ function buildExtraObjectPropertiesSerializer (context, location, addComma) {
if (additionalPropertiesSchema === true) {
code += `
${addComma}
json += serializer.asString(key) + JSON_STR_COLONS + JSON.stringify(value)
json += asString(key) + JSON_STR_COLONS + JSON.stringify(value)
`
} else {
const propertyLocation = location.getPropertyLocation('additionalProperties')
code += `
${addComma}
json += serializer.asString(key) + JSON_STR_COLONS
json += asString(key) + JSON_STR_COLONS
${buildValue(context, propertyLocation, 'value')}
`
}
@@ -329,7 +357,7 @@ function buildExtraObjectPropertiesSerializer (context, location, addComma) {
return code
}
function buildInnerObject (context, location) {
function buildInnerObject (context, location, objVar) {
const schema = location.schema
const propertiesLocation = location.getPropertyLocation('properties')
@@ -343,23 +371,25 @@ function buildInnerObject (context, location) {
return required1 === required2 ? 0 : required1 ? -1 : 1
}
)
const hasRequiredProperties = requiredProperties.includes(propertiesKeys[0])
let code = 'let value\n'
let code = ''
for (const key of requiredProperties) {
if (!propertiesKeys.includes(key)) {
const sanitizedKey = JSON.stringify(key)
code += `if (obj[${sanitizedKey}] === undefined) throw new Error('${sanitizedKey.replace(/'/g, '\\\'')} is required!')\n`
code += `if (${objVar}[${sanitizedKey}] === undefined) throw new Error('${sanitizedKey.replace(/'/g, '\\\'')} is required!')\n`
}
}
code += 'let json = JSON_STR_BEGIN_OBJECT\n'
code += 'json += JSON_STR_BEGIN_OBJECT\n'
const localUid = context.uid++
let addComma = ''
if (!hasRequiredProperties) {
code += 'let addComma = false\n'
addComma = '!addComma && (addComma = true) || (json += JSON_STR_COMMA)'
const needsRuntimeComma = propertiesKeys.length > 1 || schema.patternProperties || (schema.additionalProperties !== undefined && schema.additionalProperties !== false)
if (needsRuntimeComma) {
code += `let addComma_${localUid} = false\n`
addComma = `!addComma_${localUid} && (addComma_${localUid} = true) || (json += JSON_STR_COMMA)`
}
for (const key of propertiesKeys) {
@@ -369,15 +399,16 @@ function buildInnerObject (context, location) {
}
const sanitizedKey = JSON.stringify(key)
const value = `value_${key.replace(/[^a-zA-Z0-9]/g, '_')}_${context.uid++}`
const defaultValue = propertyLocation.schema.default
const isRequired = requiredProperties.includes(key)
code += `
value = obj[${sanitizedKey}]
if (value !== undefined) {
const ${value} = ${objVar}[${sanitizedKey}]
if (${value} !== undefined) {
${addComma}
json += ${JSON.stringify(sanitizedKey + ':')}
${buildValue(context, propertyLocation, 'value')}
${buildValue(context, propertyLocation, `${value}`)}
}`
if (defaultValue !== undefined) {
@@ -394,24 +425,20 @@ function buildInnerObject (context, location) {
} else {
code += '\n'
}
if (hasRequiredProperties) {
addComma = 'json += \',\''
}
}
if (schema.patternProperties || schema.additionalProperties) {
code += buildExtraObjectPropertiesSerializer(context, location, addComma)
code += buildExtraObjectPropertiesSerializer(context, location, addComma, objVar)
}
code += `
return json + JSON_STR_END_OBJECT
json += JSON_STR_END_OBJECT
`
return code
}
function mergeLocations (context, mergedSchemaId, mergedLocations) {
for (let i = 0; i < mergedLocations.length; i++) {
for (let i = 0, mergedLocationsLength = mergedLocations.length; i < mergedLocationsLength; i++) {
const location = mergedLocations[i]
const schema = location.schema
if (schema.$ref) {
@@ -473,40 +500,57 @@ function toJSON (variableName) {
`
}
function buildObject (context, location) {
function buildObject (context, location, input) {
const schema = location.schema
if (context.functionsNamesBySchema.has(schema)) {
return context.functionsNamesBySchema.get(schema)
const funcName = context.functionsNamesBySchema.get(schema)
return `json += ${funcName}(${input})`
}
const functionName = generateFuncName(context)
context.functionsNamesBySchema.set(schema, functionName)
let schemaRef = location.getSchemaRef()
if (schemaRef.startsWith(context.rootSchemaId)) {
schemaRef = schemaRef.replace(context.rootSchemaId, '')
}
let functionCode = `
`
const nullable = schema.nullable === true
functionCode += `
// ${schemaRef}
function ${functionName} (input) {
const obj = ${toJSON('input')}
${!nullable ? 'if (obj === null) return JSON_STR_EMPTY_OBJECT' : ''}
${buildInnerObject(context, location)}
const schemaId = location.schemaId || ''
const jsonPointer = location.jsonPointer || ''
const fullPath = `${schemaId}#${jsonPointer}`
if (context.recursivePaths.has(fullPath) || context.buildingSet.has(schema)) {
const functionName = generateFuncName(context)
context.functionsNamesBySchema.set(schema, functionName)
const schemaRef = getSafeSchemaRef(context, location)
const functionCode = `
// ${schemaRef}
function ${functionName} (input) {
const obj = ${toJSON('input')}
if (obj === null) return ${nullable ? 'JSON_STR_NULL' : 'JSON_STR_EMPTY_OBJECT'}
let json = ''
${buildInnerObject(context, location, 'obj')}
return json
}
`
context.functions.push(functionCode)
return `json += ${functionName}(${input})`
}
context.buildingSet.add(schema)
const objVar = `obj_${context.uid++}`
const code = `
const ${objVar} = ${toJSON(input)}
if (${objVar} === null) {
json += ${nullable ? 'JSON_STR_NULL' : 'JSON_STR_EMPTY_OBJECT'}
} else {
${buildInnerObject(context, location, objVar)}
}
`
context.functions.push(functionCode)
return functionName
context.buildingSet.delete(schema)
return code
}
function buildArray (context, location) {
function buildArray (context, location, input) {
const schema = location.schema
let itemsLocation = location.getPropertyLocation('items')
@@ -519,61 +563,143 @@ function buildArray (context, location) {
const itemsSchema = itemsLocation.schema
if (context.functionsNamesBySchema.has(schema)) {
return context.functionsNamesBySchema.get(schema)
const funcName = context.functionsNamesBySchema.get(schema)
return `json += ${funcName}(${input})`
}
const functionName = generateFuncName(context)
context.functionsNamesBySchema.set(schema, functionName)
let schemaRef = location.getSchemaRef()
if (schemaRef.startsWith(context.rootSchemaId)) {
schemaRef = schemaRef.replace(context.rootSchemaId, '')
}
let functionCode = `
function ${functionName} (obj) {
// ${schemaRef}
`
const nullable = schema.nullable === true
functionCode += `
${!nullable ? 'if (obj === null) return JSON_STR_EMPTY_ARRAY' : ''}
const schemaId = location.schemaId || ''
const jsonPointer = location.jsonPointer || ''
const fullPath = `${schemaId}#${jsonPointer}`
if (context.recursivePaths.has(fullPath) || context.buildingSet.has(schema)) {
const functionName = generateFuncName(context)
context.functionsNamesBySchema.set(schema, functionName)
const schemaRef = getSafeSchemaRef(context, location)
let functionCode = `
function ${functionName} (obj) {
// ${schemaRef}
let json = ''
`
functionCode += `
if (obj === null) return ${nullable ? 'JSON_STR_NULL' : 'JSON_STR_EMPTY_ARRAY'}
if (!Array.isArray(obj)) {
throw new TypeError(\`The value of '${schemaRef}' does not match schema definition.\`)
}
const arrayLength = obj.length
`
if (!schema.additionalItems && Array.isArray(itemsSchema)) {
functionCode += `
if (!schema.additionalItems && Array.isArray(itemsSchema)) {
functionCode += `
if (arrayLength > ${itemsSchema.length}) {
throw new Error(\`Item at ${itemsSchema.length} does not match schema definition.\`)
}
`
}
if (largeArrayMechanism === 'json-stringify') {
functionCode += `if (arrayLength >= ${largeArraySize}) return JSON.stringify(obj)\n`
}
functionCode += `
json += JSON_STR_BEGIN_ARRAY
`
if (Array.isArray(itemsSchema)) {
for (let i = 0, itemsSchemaLength = itemsSchema.length; i < itemsSchemaLength; i++) {
const item = itemsSchema[i]
const value = `value_${i}`
functionCode += `const ${value} = obj[${i}]`
const tmpRes = buildValue(context, itemsLocation.getPropertyLocation(i), value)
functionCode += `
if (${i} < arrayLength) {
if (${buildArrayTypeCondition(item.type, value)}) {
if (${i}) {
json += JSON_STR_COMMA
}
${tmpRes}
} else {
throw new Error(\`Item at ${i} does not match schema definition.\`)
}
}
`
}
if (schema.additionalItems) {
functionCode += `
for (let i = ${itemsSchema.length}; i < arrayLength; i++) {
if (i) {
json += JSON_STR_COMMA
}
json += JSON.stringify(obj[i])
}`
}
} else {
const code = buildValue(context, itemsLocation, 'value')
functionCode += `
for (let i = 0; i < arrayLength; i++) {
if (i) {
json += JSON_STR_COMMA
}
const value = obj[i]
${code}
}`
}
functionCode += `
return json + JSON_STR_END_ARRAY
}`
context.functions.push(functionCode)
return `json += ${functionName}(${input})`
}
context.buildingSet.add(schema)
const safeSchemaRef = getSafeSchemaRef(context, location)
const objVar = `obj_${context.uid++}`
let inlinedCode = `
const ${objVar} = ${input}
if (${objVar} === null) {
json += ${nullable ? 'JSON_STR_NULL' : 'JSON_STR_EMPTY_ARRAY'}
} else if (!Array.isArray(${objVar})) {
throw new TypeError(\`The value of '${safeSchemaRef}' does not match schema definition.\`)
} else {
const arrayLength_${objVar} = ${objVar}.length
`
if (!schema.additionalItems && Array.isArray(itemsSchema)) {
inlinedCode += `
if (arrayLength_${objVar} > ${itemsSchema.length}) {
throw new Error(\`Item at ${itemsSchema.length} does not match schema definition.\`)
}
`
}
if (largeArrayMechanism === 'json-stringify') {
functionCode += `if (arrayLength >= ${largeArraySize}) return JSON.stringify(obj)\n`
inlinedCode += `if (arrayLength_${objVar} >= ${largeArraySize}) json += JSON.stringify(${objVar})\n else {`
}
functionCode += `
const arrayEnd = arrayLength - 1
let value
let json = ''
inlinedCode += `
json += JSON_STR_BEGIN_ARRAY
`
if (Array.isArray(itemsSchema)) {
for (let i = 0; i < itemsSchema.length; i++) {
const localUid = context.uid++
inlinedCode += `let addComma_${localUid} = false\n`
for (let i = 0, itemsSchemaLength = itemsSchema.length; i < itemsSchemaLength; i++) {
const item = itemsSchema[i]
functionCode += `value = obj[${i}]`
const tmpRes = buildValue(context, itemsLocation.getPropertyLocation(i), 'value')
functionCode += `
if (${i} < arrayLength) {
if (${buildArrayTypeCondition(item.type, `[${i}]`)}) {
const value = `value_${i}_${context.uid++}`
inlinedCode += `const ${value} = ${objVar}[${i}]`
const tmpRes = buildValue(context, itemsLocation.getPropertyLocation(i), value)
inlinedCode += `
if (${i} < arrayLength_${objVar}) {
if (${buildArrayTypeCondition(item.type, value)}) {
!addComma_${localUid} && (addComma_${localUid} = true) || (json += JSON_STR_COMMA)
${tmpRes}
if (${i} < arrayEnd) {
json += JSON_STR_COMMA
}
} else {
throw new Error(\`Item at ${i} does not match schema definition.\`)
}
@@ -582,64 +708,68 @@ function buildArray (context, location) {
}
if (schema.additionalItems) {
functionCode += `
for (let i = ${itemsSchema.length}; i < arrayLength; i++) {
json += JSON.stringify(obj[i])
if (i < arrayEnd) {
json += JSON_STR_COMMA
}
inlinedCode += `
for (let i = ${itemsSchema.length}; i < arrayLength_${objVar}; i++) {
!addComma_${localUid} && (addComma_${localUid} = true) || (json += JSON_STR_COMMA)
json += JSON.stringify(${objVar}[i])
}`
}
} else {
const code = buildValue(context, itemsLocation, 'obj[i]')
functionCode += `
for (let i = 0; i < arrayLength; i++) {
${code}
if (i < arrayEnd) {
const code = buildValue(context, itemsLocation, 'value')
inlinedCode += `
for (let i = 0; i < arrayLength_${objVar}; i++) {
if (i) {
json += JSON_STR_COMMA
}
const value = ${objVar}[i]
${code}
}`
}
functionCode += `
return JSON_STR_BEGIN_ARRAY + json + JSON_STR_END_ARRAY
}`
inlinedCode += `
json += JSON_STR_END_ARRAY
`
context.functions.push(functionCode)
return functionName
if (largeArrayMechanism === 'json-stringify') {
inlinedCode += '}'
}
inlinedCode += '}'
context.buildingSet.delete(schema)
return inlinedCode
}
function buildArrayTypeCondition (type, accessor) {
let condition
switch (type) {
case 'null':
condition = 'value === null'
condition = `${accessor} === null`
break
case 'string':
condition = `typeof value === 'string' ||
value === null ||
value instanceof Date ||
value instanceof RegExp ||
condition = `typeof ${accessor} === 'string' ||
${accessor} === null ||
${accessor} instanceof Date ||
${accessor} instanceof RegExp ||
(
typeof value === "object" &&
typeof value.toString === "function" &&
value.toString !== Object.prototype.toString
typeof ${accessor} === "object" &&
typeof ${accessor}.toString === "function" &&
${accessor}.toString !== Object.prototype.toString
)`
break
case 'integer':
condition = 'Number.isInteger(value)'
condition = `Number.isInteger(${accessor})`
break
case 'number':
condition = 'Number.isFinite(value)'
condition = `Number.isFinite(${accessor})`
break
case 'boolean':
condition = 'typeof value === \'boolean\''
condition = `typeof ${accessor} === 'boolean'`
break
case 'object':
condition = 'value && typeof value === \'object\' && value.constructor === Object'
condition = `${accessor} && typeof ${accessor} === 'object' && ${accessor}.constructor === Object`
break
case 'array':
condition = 'Array.isArray(value)'
condition = `Array.isArray(${accessor})`
break
default:
if (Array.isArray(type)) {
@@ -670,8 +800,9 @@ function buildMultiTypeSerializer (context, location, input) {
switch (type) {
case 'null':
code += `
${statement} (${input} === null)
${statement} (${input} === null) {
${nestedResult}
}
`
break
case 'string': {
@@ -686,40 +817,40 @@ function buildMultiTypeSerializer (context, location, input) {
typeof ${input}.toString === "function" &&
${input}.toString !== Object.prototype.toString
)
)
) {
${nestedResult}
}
`
break
}
case 'array': {
code += `
${statement}(Array.isArray(${input}))
${statement}(Array.isArray(${input})) {
${nestedResult}
}
`
break
}
case 'integer': {
code += `
${statement}(Number.isInteger(${input}) || ${input} === null)
${statement}(Number.isInteger(${input}) || ${input} === null) {
${nestedResult}
}
`
break
}
default: {
code += `
${statement}(typeof ${input} === "${type}" || ${input} === null)
${statement}(typeof ${input} === "${type}" || ${input} === null) {
${nestedResult}
}
`
break
}
}
})
let schemaRef = location.getSchemaRef()
if (schemaRef.startsWith(context.rootSchemaId)) {
schemaRef = schemaRef.replace(context.rootSchemaId, '')
}
code += `
else throw new TypeError(\`The value of '${schemaRef}' does not match schema definition.\`)
else throw new TypeError(\`The value of '${getSafeSchemaRef(context, location)}' does not match schema definition.\`)
`
return code
@@ -733,13 +864,13 @@ function buildSingleTypeSerializer (context, location, input) {
return 'json += JSON_STR_NULL'
case 'string': {
if (schema.format === 'date-time') {
return `json += serializer.asDateTime(${input})`
return `json += asDateTime(${input})`
} else if (schema.format === 'date') {
return `json += serializer.asDate(${input})`
return `json += asDate(${input})`
} else if (schema.format === 'time') {
return `json += serializer.asTime(${input})`
return `json += asTime(${input})`
} else if (schema.format === 'unsafe') {
return `json += serializer.asUnsafeString(${input})`
return `json += asUnsafeString(${input})`
} else {
return `
if (typeof ${input} !== 'string') {
@@ -748,29 +879,27 @@ function buildSingleTypeSerializer (context, location, input) {
} else if (${input} instanceof Date) {
json += JSON_STR_QUOTE + ${input}.toISOString() + JSON_STR_QUOTE
} else if (${input} instanceof RegExp) {
json += serializer.asString(${input}.source)
json += asString(${input}.source)
} else {
json += serializer.asString(${input}.toString())
json += asString(${input}.toString())
}
} else {
json += serializer.asString(${input})
json += asString(${input})
}
`
}
}
case 'integer':
return `json += serializer.asInteger(${input})`
return `json += asInteger(${input})`
case 'number':
return `json += serializer.asNumber(${input})`
return `json += asNumber(${input})`
case 'boolean':
return `json += serializer.asBoolean(${input})`
return `json += asBoolean(${input})`
case 'object': {
const funcName = buildObject(context, location)
return `json += ${funcName}(${input})`
return buildObject(context, location, input)
}
case 'array': {
const funcName = buildArray(context, location)
return `json += ${funcName}(${input})`
return buildArray(context, location, input)
}
case undefined:
return `json += JSON.stringify(${input})`
@@ -779,6 +908,92 @@ function buildSingleTypeSerializer (context, location, input) {
}
}
function detectRecursiveSchemas (context, location) {
const pathStack = new Set()
function traverse (location) {
const schema = location.schema
if (typeof schema !== 'object' || schema === null) return
const schemaId = location.schemaId || ''
const jsonPointer = location.jsonPointer || ''
const fullPath = `${schemaId}#${jsonPointer}`
if (pathStack.has(fullPath)) {
// Mark all nodes in the current path that are part of the cycle
let inCycle = false
for (const p of pathStack) {
if (p === fullPath) inCycle = true
if (inCycle) context.recursivePaths.add(p)
}
context.recursivePaths.add(fullPath)
return
}
pathStack.add(fullPath)
if (schema.$ref) {
try {
const res = resolveRef(context, location)
traverse(res)
} catch (err) {
// Validation will handle missing refs later
}
}
if (schema.properties) {
const propertiesLocation = location.getPropertyLocation('properties')
for (const key in schema.properties) {
traverse(propertiesLocation.getPropertyLocation(key))
}
}
if (schema.additionalProperties && typeof schema.additionalProperties === 'object') {
traverse(location.getPropertyLocation('additionalProperties'))
}
if (schema.patternProperties) {
const patternPropertiesLocation = location.getPropertyLocation('patternProperties')
for (const key in schema.patternProperties) {
traverse(patternPropertiesLocation.getPropertyLocation(key))
}
}
if (schema.items) {
const itemsLocation = location.getPropertyLocation('items')
if (Array.isArray(schema.items)) {
for (let i = 0; i < schema.items.length; i++) {
traverse(itemsLocation.getPropertyLocation(i))
}
} else {
traverse(itemsLocation)
}
}
if (schema.additionalItems && typeof schema.additionalItems === 'object') {
traverse(location.getPropertyLocation('additionalItems'))
}
if (schema.oneOf) {
const oneOfLocation = location.getPropertyLocation('oneOf')
for (let i = 0; i < schema.oneOf.length; i++) {
traverse(oneOfLocation.getPropertyLocation(i))
}
}
if (schema.anyOf) {
const anyOfLocation = location.getPropertyLocation('anyOf')
for (let i = 0; i < schema.anyOf.length; i++) {
traverse(anyOfLocation.getPropertyLocation(i))
}
}
if (schema.allOf) {
const allOfLocation = location.getPropertyLocation('allOf')
for (let i = 0; i < schema.allOf.length; i++) {
traverse(allOfLocation.getPropertyLocation(i))
}
}
if (schema.then) traverse(location.getPropertyLocation('then'))
if (schema.else) traverse(location.getPropertyLocation('else'))
pathStack.delete(fullPath)
}
traverse(location)
}
function buildConstSerializer (location, input) {
const schema = location.schema
const type = schema.type
@@ -828,7 +1043,7 @@ function buildAllOf (context, location, input) {
]
const allOfsLocation = location.getPropertyLocation('allOf')
for (let i = 0; i < allOf.length; i++) {
for (let i = 0, allOfLength = allOf.length; i < allOfLength; i++) {
locations.push(allOfsLocation.getPropertyLocation(i))
}
@@ -853,7 +1068,7 @@ function buildOneOf (context, location, input) {
let code = ''
for (let index = 0; index < oneOfs.length; index++) {
for (let index = 0, oneOfsLength = oneOfs.length; index < oneOfsLength; index++) {
const optionLocation = oneOfsLocation.getPropertyLocation(index)
const optionSchema = optionLocation.schema
@@ -873,19 +1088,16 @@ function buildOneOf (context, location, input) {
const nestedResult = buildValue(context, mergedLocation, input)
const schemaRef = optionLocation.getSchemaRef()
code += `
${index === 0 ? 'if' : 'else if'}(validator.validate("${schemaRef}", ${input}))
${index === 0 ? 'if' : 'else if'}(validator.validate("${schemaRef}", ${input})) {
${nestedResult}
}
`
}
let schemaRef = location.getSchemaRef()
if (schemaRef.startsWith(context.rootSchemaId)) {
schemaRef = schemaRef.replace(context.rootSchemaId, '')
}
code += `
else throw new TypeError(\`The value of '${schemaRef}' does not match schema definition.\`)
else throw new TypeError(\`The value of '${getSafeSchemaRef(context, location)}' does not match schema definition.\`)
`
return code

View File

@@ -92,7 +92,9 @@ module.exports = class Serializer {
asString (str) {
const len = str.length
if (len < 42) {
if (len === 0) {
return '""'
} else if (len < 42) {
// magically escape strings for json
// relying on their charCodeAt
// everything below 32 needs JSON.stringify()
@@ -102,8 +104,7 @@ module.exports = class Serializer {
let result = ''
let last = -1
let point = 255
// eslint-disable-next-line
for (var i = 0; i < len; i++) {
for (let i = 0; i < len; i++) {
point = str.charCodeAt(i)
if (
point === 0x22 || // '"'

View File

@@ -21,8 +21,8 @@ class Validator {
keyword: 'fjs_type',
type: 'object',
errors: false,
validate: (type, date) => {
return date instanceof Date
validate: (_type, data) => {
return data && typeof data.toJSON === 'function'
}
})
@@ -51,8 +51,10 @@ class Validator {
return this.ajv.validate(schemaRef, data)
}
// Ajv does not support js date format. In order to properly validate objects containing a date,
// it needs to replace all occurrences of the string date format with a custom keyword fjs_type.
// Ajv does not natively support JavaScript objects like Date or other types
// that rely on a custom .toJSON() representation. To properly validate schemas
// that may contain such objects (e.g. Date, ObjectId, etc.), we replace all
// occurrences of the string type with a custom keyword fjs_type
// (see https://github.com/fastify/fast-json-stringify/pull/441)
convertSchemaToAjvFormat (schema) {
if (schema === null) return

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2020 Evgeny Poberezkin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,125 +0,0 @@
# ajv-formats
JSON Schema formats for Ajv
[![Build Status](https://travis-ci.org/ajv-validator/ajv-formats.svg?branch=master)](https://travis-ci.org/ajv-validator/ajv-formats)
[![npm](https://img.shields.io/npm/v/ajv-formats.svg)](https://www.npmjs.com/package/ajv-formats)
[![Gitter](https://img.shields.io/gitter/room/ajv-validator/ajv.svg)](https://gitter.im/ajv-validator/ajv)
[![GitHub Sponsors](https://img.shields.io/badge/$-sponsors-brightgreen)](https://github.com/sponsors/epoberezkin)
## Usage
```javascript
// ESM/TypeScript import
import Ajv from "ajv"
import addFormats from "ajv-formats"
// Node.js require:
const Ajv = require("ajv")
const addFormats = require("ajv-formats")
const ajv = new Ajv()
addFormats(ajv)
```
## Formats
The package defines these formats:
- _date_: full-date according to [RFC3339](http://tools.ietf.org/html/rfc3339#section-5.6).
- _time_: time (time-zone is mandatory).
- _date-time_: date-time (time-zone is mandatory).
- _iso-time_: time with optional time-zone.
- _iso-date-time_: date-time with optional time-zone.
- _duration_: duration from [RFC3339](https://tools.ietf.org/html/rfc3339#appendix-A)
- _uri_: full URI.
- _uri-reference_: URI reference, including full and relative URIs.
- _uri-template_: URI template according to [RFC6570](https://tools.ietf.org/html/rfc6570)
- _url_ (deprecated): [URL record](https://url.spec.whatwg.org/#concept-url).
- _email_: email address.
- _hostname_: host name according to [RFC1034](http://tools.ietf.org/html/rfc1034#section-3.5).
- _ipv4_: IP address v4.
- _ipv6_: IP address v6.
- _regex_: tests whether a string is a valid regular expression by passing it to RegExp constructor.
- _uuid_: Universally Unique IDentifier according to [RFC4122](http://tools.ietf.org/html/rfc4122).
- _json-pointer_: JSON-pointer according to [RFC6901](https://tools.ietf.org/html/rfc6901).
- _relative-json-pointer_: relative JSON-pointer according to [this draft](http://tools.ietf.org/html/draft-luff-relative-json-pointer-00).
- _byte_: base64 encoded data according to the [openApi 3.0.0 specification](https://spec.openapis.org/oas/v3.0.0#data-types)
- _int32_: signed 32 bits integer according to the [openApi 3.0.0 specification](https://spec.openapis.org/oas/v3.0.0#data-types)
- _int64_: signed 64 bits according to the [openApi 3.0.0 specification](https://spec.openapis.org/oas/v3.0.0#data-types)
- _float_: float according to the [openApi 3.0.0 specification](https://spec.openapis.org/oas/v3.0.0#data-types)
- _double_: double according to the [openApi 3.0.0 specification](https://spec.openapis.org/oas/v3.0.0#data-types)
- _password_: password string according to the [openApi 3.0.0 specification](https://spec.openapis.org/oas/v3.0.0#data-types)
- _binary_: binary string according to the [openApi 3.0.0 specification](https://spec.openapis.org/oas/v3.0.0#data-types)
See regular expressions used for format validation and the sources that were used in [formats.ts](https://github.com/ajv-validator/ajv-formats/blob/master/src/formats.ts).
**Please note**: JSON Schema draft-07 also defines formats `iri`, `iri-reference`, `idn-hostname` and `idn-email` for URLs, hostnames and emails with international characters. These formats are available in [ajv-formats-draft2019](https://github.com/luzlab/ajv-formats-draft2019) plugin.
## Keywords to compare values: `formatMaximum` / `formatMinimum` and `formatExclusiveMaximum` / `formatExclusiveMinimum`
These keywords allow to define minimum/maximum constraints when the format keyword defines ordering (`compare` function in format definition).
These keywords are added to ajv instance when ajv-formats is used without options or with option `keywords: true`.
These keywords apply only to strings. If the data is not a string, the validation succeeds.
The value of keywords `formatMaximum`/`formatMinimum` and `formatExclusiveMaximum`/`formatExclusiveMinimum` should be a string or [\$data reference](https://github.com/ajv-validator/ajv/blob/master/docs/validation.md#data-reference). This value is the maximum (minimum) allowed value for the data to be valid as determined by `format` keyword. If `format` keyword is not present schema compilation will throw exception.
When these keyword are added, they also add comparison functions to formats `"date"`, `"time"` and `"date-time"`. User-defined formats also can have comparison functions. See [addFormat](https://github.com/ajv-validator/ajv/blob/master/docs/api.md#api-addformat) method.
```javascript
require("ajv-formats")(ajv)
const schema = {
type: "string",
format: "date",
formatMinimum: "2016-02-06",
formatExclusiveMaximum: "2016-12-27",
}
const validDataList = ["2016-02-06", "2016-12-26"]
const invalidDataList = ["2016-02-05", "2016-12-27", "abc"]
```
## Options
Options can be passed via the second parameter. Options value can be
1. The list of format names that will be added to ajv instance:
```javascript
addFormats(ajv, ["date", "time"])
```
**Please note**: when ajv encounters an undefined format it throws exception (unless ajv instance was configured with `strict: false` option). To allow specific undefined formats they have to be passed to ajv instance via `formats` option with `true` value:
```javascript
const ajv = new Ajv((formats: {date: true, time: true})) // to ignore "date" and "time" formats in schemas.
```
2. Format validation mode (default is `"full"`) with optional list of format names and `keywords` option to add additional format comparison keywords:
```javascript
addFormats(ajv, {mode: "fast"})
```
or
```javascript
addFormats(ajv, {mode: "fast", formats: ["date", "time"], keywords: true})
```
In `"fast"` mode the following formats are simplified: `"date"`, `"time"`, `"date-time"`, `"iso-time"`, `"iso-date-time"`, `"uri"`, `"uri-reference"`, `"email"`. For example, `"date"`, `"time"` and `"date-time"` do not validate ranges in `"fast"` mode, only string structure, and other formats have simplified regular expressions.
## Tests
```bash
npm install
git submodule update --init
npm test
```
## License
[MIT](https://github.com/ajv-validator/ajv-formats/blob/master/LICENSE)

View File

@@ -1,9 +0,0 @@
import type { Format } from "ajv";
export type FormatMode = "fast" | "full";
export type FormatName = "date" | "time" | "date-time" | "iso-time" | "iso-date-time" | "duration" | "uri" | "uri-reference" | "uri-template" | "url" | "email" | "hostname" | "ipv4" | "ipv6" | "regex" | "uuid" | "json-pointer" | "json-pointer-uri-fragment" | "relative-json-pointer" | "byte" | "int32" | "int64" | "float" | "double" | "password" | "binary";
export type DefinedFormats = {
[key in FormatName]: Format;
};
export declare const fullFormats: DefinedFormats;
export declare const fastFormats: DefinedFormats;
export declare const formatNames: FormatName[];

View File

@@ -1,208 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.formatNames = exports.fastFormats = exports.fullFormats = void 0;
function fmtDef(validate, compare) {
return { validate, compare };
}
exports.fullFormats = {
// date: http://tools.ietf.org/html/rfc3339#section-5.6
date: fmtDef(date, compareDate),
// date-time: http://tools.ietf.org/html/rfc3339#section-5.6
time: fmtDef(getTime(true), compareTime),
"date-time": fmtDef(getDateTime(true), compareDateTime),
"iso-time": fmtDef(getTime(), compareIsoTime),
"iso-date-time": fmtDef(getDateTime(), compareIsoDateTime),
// duration: https://tools.ietf.org/html/rfc3339#appendix-A
duration: /^P(?!$)((\d+Y)?(\d+M)?(\d+D)?(T(?=\d)(\d+H)?(\d+M)?(\d+S)?)?|(\d+W)?)$/,
uri,
"uri-reference": /^(?:[a-z][a-z0-9+\-.]*:)?(?:\/?\/(?:(?:[a-z0-9\-._~!$&'()*+,;=:]|%[0-9a-f]{2})*@)?(?:\[(?:(?:(?:(?:[0-9a-f]{1,4}:){6}|::(?:[0-9a-f]{1,4}:){5}|(?:[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){4}|(?:(?:[0-9a-f]{1,4}:){0,1}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){3}|(?:(?:[0-9a-f]{1,4}:){0,2}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){2}|(?:(?:[0-9a-f]{1,4}:){0,3}[0-9a-f]{1,4})?::[0-9a-f]{1,4}:|(?:(?:[0-9a-f]{1,4}:){0,4}[0-9a-f]{1,4})?::)(?:[0-9a-f]{1,4}:[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?))|(?:(?:[0-9a-f]{1,4}:){0,5}[0-9a-f]{1,4})?::[0-9a-f]{1,4}|(?:(?:[0-9a-f]{1,4}:){0,6}[0-9a-f]{1,4})?::)|[Vv][0-9a-f]+\.[a-z0-9\-._~!$&'()*+,;=:]+)\]|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)|(?:[a-z0-9\-._~!$&'"()*+,;=]|%[0-9a-f]{2})*)(?::\d*)?(?:\/(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})*)*|\/(?:(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})*)*)?|(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})*)*)?(?:\?(?:[a-z0-9\-._~!$&'"()*+,;=:@/?]|%[0-9a-f]{2})*)?(?:#(?:[a-z0-9\-._~!$&'"()*+,;=:@/?]|%[0-9a-f]{2})*)?$/i,
// uri-template: https://tools.ietf.org/html/rfc6570
"uri-template": /^(?:(?:[^\x00-\x20"'<>%\\^`{|}]|%[0-9a-f]{2})|\{[+#./;?&=,!@|]?(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\*)?(?:,(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\*)?)*\})*$/i,
// For the source: https://gist.github.com/dperini/729294
// For test cases: https://mathiasbynens.be/demo/url-regex
url: /^(?:https?|ftp):\/\/(?:\S+(?::\S*)?@)?(?:(?!(?:10|127)(?:\.\d{1,3}){3})(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z0-9\u{00a1}-\u{ffff}]+-)*[a-z0-9\u{00a1}-\u{ffff}]+)(?:\.(?:[a-z0-9\u{00a1}-\u{ffff}]+-)*[a-z0-9\u{00a1}-\u{ffff}]+)*(?:\.(?:[a-z\u{00a1}-\u{ffff}]{2,})))(?::\d{2,5})?(?:\/[^\s]*)?$/iu,
email: /^[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?$/i,
hostname: /^(?=.{1,253}\.?$)[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\.[a-z0-9](?:[-0-9a-z]{0,61}[0-9a-z])?)*\.?$/i,
// optimized https://www.safaribooksonline.com/library/view/regular-expressions-cookbook/9780596802837/ch07s16.html
ipv4: /^(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)\.){3}(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)$/,
ipv6: /^((([0-9a-f]{1,4}:){7}([0-9a-f]{1,4}|:))|(([0-9a-f]{1,4}:){6}(:[0-9a-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9a-f]{1,4}:){5}(((:[0-9a-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9a-f]{1,4}:){4}(((:[0-9a-f]{1,4}){1,3})|((:[0-9a-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9a-f]{1,4}:){3}(((:[0-9a-f]{1,4}){1,4})|((:[0-9a-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9a-f]{1,4}:){2}(((:[0-9a-f]{1,4}){1,5})|((:[0-9a-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9a-f]{1,4}:){1}(((:[0-9a-f]{1,4}){1,6})|((:[0-9a-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9a-f]{1,4}){1,7})|((:[0-9a-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))$/i,
regex,
// uuid: http://tools.ietf.org/html/rfc4122
uuid: /^(?:urn:uuid:)?[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/i,
// JSON-pointer: https://tools.ietf.org/html/rfc6901
// uri fragment: https://tools.ietf.org/html/rfc3986#appendix-A
"json-pointer": /^(?:\/(?:[^~/]|~0|~1)*)*$/,
"json-pointer-uri-fragment": /^#(?:\/(?:[a-z0-9_\-.!$&'()*+,;:=@]|%[0-9a-f]{2}|~0|~1)*)*$/i,
// relative JSON-pointer: http://tools.ietf.org/html/draft-luff-relative-json-pointer-00
"relative-json-pointer": /^(?:0|[1-9][0-9]*)(?:#|(?:\/(?:[^~/]|~0|~1)*)*)$/,
// the following formats are used by the openapi specification: https://spec.openapis.org/oas/v3.0.0#data-types
// byte: https://github.com/miguelmota/is-base64
byte,
// signed 32 bit integer
int32: { type: "number", validate: validateInt32 },
// signed 64 bit integer
int64: { type: "number", validate: validateInt64 },
// C-type float
float: { type: "number", validate: validateNumber },
// C-type double
double: { type: "number", validate: validateNumber },
// hint to the UI to hide input strings
password: true,
// unchecked string payload
binary: true,
};
exports.fastFormats = {
...exports.fullFormats,
date: fmtDef(/^\d\d\d\d-[0-1]\d-[0-3]\d$/, compareDate),
time: fmtDef(/^(?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)$/i, compareTime),
"date-time": fmtDef(/^\d\d\d\d-[0-1]\d-[0-3]\dt(?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)$/i, compareDateTime),
"iso-time": fmtDef(/^(?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)?$/i, compareIsoTime),
"iso-date-time": fmtDef(/^\d\d\d\d-[0-1]\d-[0-3]\d[t\s](?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)?$/i, compareIsoDateTime),
// uri: https://github.com/mafintosh/is-my-json-valid/blob/master/formats.js
uri: /^(?:[a-z][a-z0-9+\-.]*:)(?:\/?\/)?[^\s]*$/i,
"uri-reference": /^(?:(?:[a-z][a-z0-9+\-.]*:)?\/?\/)?(?:[^\\\s#][^\s#]*)?(?:#[^\\\s]*)?$/i,
// email (sources from jsen validator):
// http://stackoverflow.com/questions/201323/using-a-regular-expression-to-validate-an-email-address#answer-8829363
// http://www.w3.org/TR/html5/forms.html#valid-e-mail-address (search for 'wilful violation')
email: /^[a-z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\.[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?)*$/i,
};
exports.formatNames = Object.keys(exports.fullFormats);
function isLeapYear(year) {
// https://tools.ietf.org/html/rfc3339#appendix-C
return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0);
}
const DATE = /^(\d\d\d\d)-(\d\d)-(\d\d)$/;
const DAYS = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31];
function date(str) {
// full-date from http://tools.ietf.org/html/rfc3339#section-5.6
const matches = DATE.exec(str);
if (!matches)
return false;
const year = +matches[1];
const month = +matches[2];
const day = +matches[3];
return (month >= 1 &&
month <= 12 &&
day >= 1 &&
day <= (month === 2 && isLeapYear(year) ? 29 : DAYS[month]));
}
function compareDate(d1, d2) {
if (!(d1 && d2))
return undefined;
if (d1 > d2)
return 1;
if (d1 < d2)
return -1;
return 0;
}
const TIME = /^(\d\d):(\d\d):(\d\d(?:\.\d+)?)(z|([+-])(\d\d)(?::?(\d\d))?)?$/i;
function getTime(strictTimeZone) {
return function time(str) {
const matches = TIME.exec(str);
if (!matches)
return false;
const hr = +matches[1];
const min = +matches[2];
const sec = +matches[3];
const tz = matches[4];
const tzSign = matches[5] === "-" ? -1 : 1;
const tzH = +(matches[6] || 0);
const tzM = +(matches[7] || 0);
if (tzH > 23 || tzM > 59 || (strictTimeZone && !tz))
return false;
if (hr <= 23 && min <= 59 && sec < 60)
return true;
// leap second
const utcMin = min - tzM * tzSign;
const utcHr = hr - tzH * tzSign - (utcMin < 0 ? 1 : 0);
return (utcHr === 23 || utcHr === -1) && (utcMin === 59 || utcMin === -1) && sec < 61;
};
}
function compareTime(s1, s2) {
if (!(s1 && s2))
return undefined;
const t1 = new Date("2020-01-01T" + s1).valueOf();
const t2 = new Date("2020-01-01T" + s2).valueOf();
if (!(t1 && t2))
return undefined;
return t1 - t2;
}
function compareIsoTime(t1, t2) {
if (!(t1 && t2))
return undefined;
const a1 = TIME.exec(t1);
const a2 = TIME.exec(t2);
if (!(a1 && a2))
return undefined;
t1 = a1[1] + a1[2] + a1[3];
t2 = a2[1] + a2[2] + a2[3];
if (t1 > t2)
return 1;
if (t1 < t2)
return -1;
return 0;
}
const DATE_TIME_SEPARATOR = /t|\s/i;
function getDateTime(strictTimeZone) {
const time = getTime(strictTimeZone);
return function date_time(str) {
// http://tools.ietf.org/html/rfc3339#section-5.6
const dateTime = str.split(DATE_TIME_SEPARATOR);
return dateTime.length === 2 && date(dateTime[0]) && time(dateTime[1]);
};
}
function compareDateTime(dt1, dt2) {
if (!(dt1 && dt2))
return undefined;
const d1 = new Date(dt1).valueOf();
const d2 = new Date(dt2).valueOf();
if (!(d1 && d2))
return undefined;
return d1 - d2;
}
function compareIsoDateTime(dt1, dt2) {
if (!(dt1 && dt2))
return undefined;
const [d1, t1] = dt1.split(DATE_TIME_SEPARATOR);
const [d2, t2] = dt2.split(DATE_TIME_SEPARATOR);
const res = compareDate(d1, d2);
if (res === undefined)
return undefined;
return res || compareTime(t1, t2);
}
const NOT_URI_FRAGMENT = /\/|:/;
const URI = /^(?:[a-z][a-z0-9+\-.]*:)(?:\/?\/(?:(?:[a-z0-9\-._~!$&'()*+,;=:]|%[0-9a-f]{2})*@)?(?:\[(?:(?:(?:(?:[0-9a-f]{1,4}:){6}|::(?:[0-9a-f]{1,4}:){5}|(?:[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){4}|(?:(?:[0-9a-f]{1,4}:){0,1}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){3}|(?:(?:[0-9a-f]{1,4}:){0,2}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){2}|(?:(?:[0-9a-f]{1,4}:){0,3}[0-9a-f]{1,4})?::[0-9a-f]{1,4}:|(?:(?:[0-9a-f]{1,4}:){0,4}[0-9a-f]{1,4})?::)(?:[0-9a-f]{1,4}:[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?))|(?:(?:[0-9a-f]{1,4}:){0,5}[0-9a-f]{1,4})?::[0-9a-f]{1,4}|(?:(?:[0-9a-f]{1,4}:){0,6}[0-9a-f]{1,4})?::)|[Vv][0-9a-f]+\.[a-z0-9\-._~!$&'()*+,;=:]+)\]|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)|(?:[a-z0-9\-._~!$&'()*+,;=]|%[0-9a-f]{2})*)(?::\d*)?(?:\/(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*|\/(?:(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*)?|(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*)(?:\?(?:[a-z0-9\-._~!$&'()*+,;=:@/?]|%[0-9a-f]{2})*)?(?:#(?:[a-z0-9\-._~!$&'()*+,;=:@/?]|%[0-9a-f]{2})*)?$/i;
function uri(str) {
// http://jmrware.com/articles/2009/uri_regexp/URI_regex.html + optional protocol + required "."
return NOT_URI_FRAGMENT.test(str) && URI.test(str);
}
const BYTE = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/gm;
function byte(str) {
BYTE.lastIndex = 0;
return BYTE.test(str);
}
const MIN_INT32 = -(2 ** 31);
const MAX_INT32 = 2 ** 31 - 1;
function validateInt32(value) {
return Number.isInteger(value) && value <= MAX_INT32 && value >= MIN_INT32;
}
function validateInt64(value) {
// JSON and javascript max Int is 2**53, so any int that passes isInteger is valid for Int64
return Number.isInteger(value);
}
function validateNumber() {
return true;
}
const Z_ANCHOR = /[^\\]\\Z/;
function regex(str) {
if (Z_ANCHOR.test(str))
return false;
try {
new RegExp(str);
return true;
}
catch (e) {
return false;
}
}
//# sourceMappingURL=formats.js.map

File diff suppressed because one or more lines are too long

View File

@@ -1,15 +0,0 @@
import { FormatMode, FormatName } from "./formats";
import type { Plugin, Format } from "ajv";
export { FormatMode, FormatName } from "./formats";
export { LimitFormatError } from "./limit";
export interface FormatOptions {
mode?: FormatMode;
formats?: FormatName[];
keywords?: boolean;
}
export type FormatsPluginOptions = FormatName[] | FormatOptions;
export interface FormatsPlugin extends Plugin<FormatsPluginOptions> {
get: (format: FormatName, mode?: FormatMode) => Format;
}
declare const formatsPlugin: FormatsPlugin;
export default formatsPlugin;

View File

@@ -1,37 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const formats_1 = require("./formats");
const limit_1 = require("./limit");
const codegen_1 = require("ajv/dist/compile/codegen");
const fullName = new codegen_1.Name("fullFormats");
const fastName = new codegen_1.Name("fastFormats");
const formatsPlugin = (ajv, opts = { keywords: true }) => {
if (Array.isArray(opts)) {
addFormats(ajv, opts, formats_1.fullFormats, fullName);
return ajv;
}
const [formats, exportName] = opts.mode === "fast" ? [formats_1.fastFormats, fastName] : [formats_1.fullFormats, fullName];
const list = opts.formats || formats_1.formatNames;
addFormats(ajv, list, formats, exportName);
if (opts.keywords)
(0, limit_1.default)(ajv);
return ajv;
};
formatsPlugin.get = (name, mode = "full") => {
const formats = mode === "fast" ? formats_1.fastFormats : formats_1.fullFormats;
const f = formats[name];
if (!f)
throw new Error(`Unknown format "${name}"`);
return f;
};
function addFormats(ajv, list, fs, exportName) {
var _a;
var _b;
(_a = (_b = ajv.opts.code).formats) !== null && _a !== void 0 ? _a : (_b.formats = (0, codegen_1._) `require("ajv-formats/dist/formats").${exportName}`);
for (const f of list)
ajv.addFormat(f, fs[f]);
}
module.exports = exports = formatsPlugin;
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = formatsPlugin;
//# sourceMappingURL=index.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAAA,uCAOkB;AAClB,mCAAiC;AAGjC,sDAAgD;AAgBhD,MAAM,QAAQ,GAAG,IAAI,cAAI,CAAC,aAAa,CAAC,CAAA;AACxC,MAAM,QAAQ,GAAG,IAAI,cAAI,CAAC,aAAa,CAAC,CAAA;AAExC,MAAM,aAAa,GAAkB,CACnC,GAAQ,EACR,OAA6B,EAAC,QAAQ,EAAE,IAAI,EAAC,EACxC,EAAE;IACP,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;QACvB,UAAU,CAAC,GAAG,EAAE,IAAI,EAAE,qBAAW,EAAE,QAAQ,CAAC,CAAA;QAC5C,OAAO,GAAG,CAAA;KACX;IACD,MAAM,CAAC,OAAO,EAAE,UAAU,CAAC,GACzB,IAAI,CAAC,IAAI,KAAK,MAAM,CAAC,CAAC,CAAC,CAAC,qBAAW,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,qBAAW,EAAE,QAAQ,CAAC,CAAA;IAC1E,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,IAAI,qBAAW,CAAA;IACxC,UAAU,CAAC,GAAG,EAAE,IAAI,EAAE,OAAO,EAAE,UAAU,CAAC,CAAA;IAC1C,IAAI,IAAI,CAAC,QAAQ;QAAE,IAAA,eAAW,EAAC,GAAG,CAAC,CAAA;IACnC,OAAO,GAAG,CAAA;AACZ,CAAC,CAAA;AAED,aAAa,CAAC,GAAG,GAAG,CAAC,IAAgB,EAAE,OAAmB,MAAM,EAAU,EAAE;IAC1E,MAAM,OAAO,GAAG,IAAI,KAAK,MAAM,CAAC,CAAC,CAAC,qBAAW,CAAC,CAAC,CAAC,qBAAW,CAAA;IAC3D,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACvB,IAAI,CAAC,CAAC;QAAE,MAAM,IAAI,KAAK,CAAC,mBAAmB,IAAI,GAAG,CAAC,CAAA;IACnD,OAAO,CAAC,CAAA;AACV,CAAC,CAAA;AAED,SAAS,UAAU,CAAC,GAAQ,EAAE,IAAkB,EAAE,EAAkB,EAAE,UAAgB;;;IACpF,YAAA,GAAG,CAAC,IAAI,CAAC,IAAI,EAAC,OAAO,uCAAP,OAAO,GAAK,IAAA,WAAC,EAAA,uCAAuC,UAAU,EAAE,EAAA;IAC9E,KAAK,MAAM,CAAC,IAAI,IAAI;QAAE,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;AAC/C,CAAC;AAED,MAAM,CAAC,OAAO,GAAG,OAAO,GAAG,aAAa,CAAA;AACxC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,YAAY,EAAE,EAAC,KAAK,EAAE,IAAI,EAAC,CAAC,CAAA;AAE3D,kBAAe,aAAa,CAAA"}

View File

@@ -1,10 +0,0 @@
import type { Plugin, CodeKeywordDefinition, ErrorObject } from "ajv";
type Kwd = "formatMaximum" | "formatMinimum" | "formatExclusiveMaximum" | "formatExclusiveMinimum";
type Comparison = "<=" | ">=" | "<" | ">";
export type LimitFormatError = ErrorObject<Kwd, {
limit: string;
comparison: Comparison;
}>;
export declare const formatLimitDefinition: CodeKeywordDefinition;
declare const formatLimitPlugin: Plugin<undefined>;
export default formatLimitPlugin;

View File

@@ -1,69 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.formatLimitDefinition = void 0;
const ajv_1 = require("ajv");
const codegen_1 = require("ajv/dist/compile/codegen");
const ops = codegen_1.operators;
const KWDs = {
formatMaximum: { okStr: "<=", ok: ops.LTE, fail: ops.GT },
formatMinimum: { okStr: ">=", ok: ops.GTE, fail: ops.LT },
formatExclusiveMaximum: { okStr: "<", ok: ops.LT, fail: ops.GTE },
formatExclusiveMinimum: { okStr: ">", ok: ops.GT, fail: ops.LTE },
};
const error = {
message: ({ keyword, schemaCode }) => (0, codegen_1.str) `should be ${KWDs[keyword].okStr} ${schemaCode}`,
params: ({ keyword, schemaCode }) => (0, codegen_1._) `{comparison: ${KWDs[keyword].okStr}, limit: ${schemaCode}}`,
};
exports.formatLimitDefinition = {
keyword: Object.keys(KWDs),
type: "string",
schemaType: "string",
$data: true,
error,
code(cxt) {
const { gen, data, schemaCode, keyword, it } = cxt;
const { opts, self } = it;
if (!opts.validateFormats)
return;
const fCxt = new ajv_1.KeywordCxt(it, self.RULES.all.format.definition, "format");
if (fCxt.$data)
validate$DataFormat();
else
validateFormat();
function validate$DataFormat() {
const fmts = gen.scopeValue("formats", {
ref: self.formats,
code: opts.code.formats,
});
const fmt = gen.const("fmt", (0, codegen_1._) `${fmts}[${fCxt.schemaCode}]`);
cxt.fail$data((0, codegen_1.or)((0, codegen_1._) `typeof ${fmt} != "object"`, (0, codegen_1._) `${fmt} instanceof RegExp`, (0, codegen_1._) `typeof ${fmt}.compare != "function"`, compareCode(fmt)));
}
function validateFormat() {
const format = fCxt.schema;
const fmtDef = self.formats[format];
if (!fmtDef || fmtDef === true)
return;
if (typeof fmtDef != "object" ||
fmtDef instanceof RegExp ||
typeof fmtDef.compare != "function") {
throw new Error(`"${keyword}": format "${format}" does not define "compare" function`);
}
const fmt = gen.scopeValue("formats", {
key: format,
ref: fmtDef,
code: opts.code.formats ? (0, codegen_1._) `${opts.code.formats}${(0, codegen_1.getProperty)(format)}` : undefined,
});
cxt.fail$data(compareCode(fmt));
}
function compareCode(fmt) {
return (0, codegen_1._) `${fmt}.compare(${data}, ${schemaCode}) ${KWDs[keyword].fail} 0`;
}
},
dependencies: ["format"],
};
const formatLimitPlugin = (ajv) => {
ajv.addKeyword(exports.formatLimitDefinition);
return ajv;
};
exports.default = formatLimitPlugin;
//# sourceMappingURL=limit.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"limit.js","sourceRoot":"","sources":["../src/limit.ts"],"names":[],"mappings":";;;AAWA,6BAA8B;AAC9B,sDAA2E;AAM3E,MAAM,GAAG,GAAG,mBAAS,CAAA;AAErB,MAAM,IAAI,GAA4D;IACpE,aAAa,EAAE,EAAC,KAAK,EAAE,IAAI,EAAE,EAAE,EAAE,GAAG,CAAC,GAAG,EAAE,IAAI,EAAE,GAAG,CAAC,EAAE,EAAC;IACvD,aAAa,EAAE,EAAC,KAAK,EAAE,IAAI,EAAE,EAAE,EAAE,GAAG,CAAC,GAAG,EAAE,IAAI,EAAE,GAAG,CAAC,EAAE,EAAC;IACvD,sBAAsB,EAAE,EAAC,KAAK,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,CAAC,EAAE,EAAE,IAAI,EAAE,GAAG,CAAC,GAAG,EAAC;IAC/D,sBAAsB,EAAE,EAAC,KAAK,EAAE,GAAG,EAAE,EAAE,EAAE,GAAG,CAAC,EAAE,EAAE,IAAI,EAAE,GAAG,CAAC,GAAG,EAAC;CAChE,CAAA;AAID,MAAM,KAAK,GAA2B;IACpC,OAAO,EAAE,CAAC,EAAC,OAAO,EAAE,UAAU,EAAC,EAAE,EAAE,CAAC,IAAA,aAAG,EAAA,aAAa,IAAI,CAAC,OAAc,CAAC,CAAC,KAAK,IAAI,UAAU,EAAE;IAC9F,MAAM,EAAE,CAAC,EAAC,OAAO,EAAE,UAAU,EAAC,EAAE,EAAE,CAChC,IAAA,WAAC,EAAA,gBAAgB,IAAI,CAAC,OAAc,CAAC,CAAC,KAAK,YAAY,UAAU,GAAG;CACvE,CAAA;AAEY,QAAA,qBAAqB,GAA0B;IAC1D,OAAO,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;IAC1B,IAAI,EAAE,QAAQ;IACd,UAAU,EAAE,QAAQ;IACpB,KAAK,EAAE,IAAI;IACX,KAAK;IACL,IAAI,CAAC,GAAG;QACN,MAAM,EAAC,GAAG,EAAE,IAAI,EAAE,UAAU,EAAE,OAAO,EAAE,EAAE,EAAC,GAAG,GAAG,CAAA;QAChD,MAAM,EAAC,IAAI,EAAE,IAAI,EAAC,GAAG,EAAE,CAAA;QACvB,IAAI,CAAC,IAAI,CAAC,eAAe;YAAE,OAAM;QAEjC,MAAM,IAAI,GAAG,IAAI,gBAAU,CAAC,EAAE,EAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,MAAe,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAA;QACrF,IAAI,IAAI,CAAC,KAAK;YAAE,mBAAmB,EAAE,CAAA;;YAChC,cAAc,EAAE,CAAA;QAErB,SAAS,mBAAmB;YAC1B,MAAM,IAAI,GAAG,GAAG,CAAC,UAAU,CAAC,SAAS,EAAE;gBACrC,GAAG,EAAE,IAAI,CAAC,OAAO;gBACjB,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,OAAO;aACxB,CAAC,CAAA;YACF,MAAM,GAAG,GAAG,GAAG,CAAC,KAAK,CAAC,KAAK,EAAE,IAAA,WAAC,EAAA,GAAG,IAAI,IAAI,IAAI,CAAC,UAAU,GAAG,CAAC,CAAA;YAC5D,GAAG,CAAC,SAAS,CACX,IAAA,YAAE,EACA,IAAA,WAAC,EAAA,UAAU,GAAG,cAAc,EAC5B,IAAA,WAAC,EAAA,GAAG,GAAG,oBAAoB,EAC3B,IAAA,WAAC,EAAA,UAAU,GAAG,wBAAwB,EACtC,WAAW,CAAC,GAAG,CAAC,CACjB,CACF,CAAA;QACH,CAAC;QAED,SAAS,cAAc;YACrB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAgB,CAAA;YACpC,MAAM,MAAM,GAA4B,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAA;YAC5D,IAAI,CAAC,MAAM,IAAI,MAAM,KAAK,IAAI;gBAAE,OAAM;YACtC,IACE,OAAO,MAAM,IAAI,QAAQ;gBACzB,MAAM,YAAY,MAAM;gBACxB,OAAO,MAAM,CAAC,OAAO,IAAI,UAAU,EACnC;gBACA,MAAM,IAAI,KAAK,CAAC,IAAI,OAAO,cAAc,MAAM,sCAAsC,CAAC,CAAA;aACvF;YACD,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,CAAC,SAAS,EAAE;gBACpC,GAAG,EAAE,MAAM;gBACX,GAAG,EAAE,MAAM;gBACX,IAAI,EAAE,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,IAAA,WAAC,EAAA,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,GAAG,IAAA,qBAAW,EAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS;aACpF,CAAC,CAAA;YAEF,GAAG,CAAC,SAAS,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAA;QACjC,CAAC;QAED,SAAS,WAAW,CAAC,GAAS;YAC5B,OAAO,IAAA,WAAC,EAAA,GAAG,GAAG,YAAY,IAAI,KAAK,UAAU,KAAK,IAAI,CAAC,OAAc,CAAC,CAAC,IAAI,IAAI,CAAA;QACjF,CAAC;IACH,CAAC;IACD,YAAY,EAAE,CAAC,QAAQ,CAAC;CACzB,CAAA;AAED,MAAM,iBAAiB,GAAsB,CAAC,GAAQ,EAAO,EAAE;IAC7D,GAAG,CAAC,UAAU,CAAC,6BAAqB,CAAC,CAAA;IACrC,OAAO,GAAG,CAAA;AACZ,CAAC,CAAA;AAED,kBAAe,iBAAiB,CAAA"}

View File

@@ -1,74 +0,0 @@
{
"name": "ajv-formats",
"version": "3.0.1",
"description": "Format validation for Ajv v7+",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"files": [
"src/",
"dist/"
],
"scripts": {
"build": "tsc",
"prettier:write": "prettier --write \"./**/*.{md,json,yaml,js,ts}\"",
"prettier:check": "prettier --list-different \"./**/*.{md,json,yaml,js,ts}\"",
"eslint": "eslint --ext .ts ./src/**/*",
"test-spec": "jest",
"test-cov": "jest --coverage",
"test": "npm run prettier:check && npm run build && npm run eslint && npm run test-cov",
"ci-test": "npm run test"
},
"repository": {
"type": "git",
"url": "git+https://github.com/ajv-validator/ajv-formats.git"
},
"keywords": [
"Ajv",
"JSON-Schema",
"format",
"validation"
],
"author": "Evgeny Poberezkin",
"license": "MIT",
"bugs": {
"url": "https://github.com/ajv-validator/ajv-formats/issues"
},
"homepage": "https://github.com/ajv-validator/ajv-formats#readme",
"dependencies": {
"ajv": "^8.0.0"
},
"peerDependencies": {
"ajv": "^8.0.0"
},
"peerDependenciesMeta": {
"ajv": {
"optional": true
}
},
"devDependencies": {
"@ajv-validator/config": "^0.3.0",
"@types/jest": "^26.0.5",
"@types/node": "^14.10.1",
"@typescript-eslint/eslint-plugin": "^3.7.0",
"@typescript-eslint/parser": "^3.7.0",
"ajv": "^8.0.0",
"eslint": "^7.5.0",
"eslint-config-prettier": "^6.11.0",
"husky": "^4.2.5",
"jest": "^26.1.0",
"json-schema-test": "^2.0.0",
"lint-staged": "^10.2.11",
"prettier": "^2.3.2",
"ts-jest": "^26.1.3",
"typescript": "^4.0.0"
},
"prettier": "@ajv-validator/config/prettierrc.json",
"husky": {
"hooks": {
"pre-commit": "lint-staged && npm test"
}
},
"lint-staged": {
"*.{md,json,yaml,js,ts}": "prettier --write"
}
}

View File

@@ -1,269 +0,0 @@
import type {Format, FormatDefinition} from "ajv"
import type {FormatValidator, FormatCompare} from "ajv/dist/types"
export type FormatMode = "fast" | "full"
export type FormatName =
| "date"
| "time"
| "date-time"
| "iso-time"
| "iso-date-time"
| "duration"
| "uri"
| "uri-reference"
| "uri-template"
| "url"
| "email"
| "hostname"
| "ipv4"
| "ipv6"
| "regex"
| "uuid"
| "json-pointer"
| "json-pointer-uri-fragment"
| "relative-json-pointer"
| "byte"
| "int32"
| "int64"
| "float"
| "double"
| "password"
| "binary"
export type DefinedFormats = {
[key in FormatName]: Format
}
function fmtDef(
validate: RegExp | FormatValidator<string>,
compare: FormatCompare<string>
): FormatDefinition<string> {
return {validate, compare}
}
export const fullFormats: DefinedFormats = {
// date: http://tools.ietf.org/html/rfc3339#section-5.6
date: fmtDef(date, compareDate),
// date-time: http://tools.ietf.org/html/rfc3339#section-5.6
time: fmtDef(getTime(true), compareTime),
"date-time": fmtDef(getDateTime(true), compareDateTime),
"iso-time": fmtDef(getTime(), compareIsoTime),
"iso-date-time": fmtDef(getDateTime(), compareIsoDateTime),
// duration: https://tools.ietf.org/html/rfc3339#appendix-A
duration: /^P(?!$)((\d+Y)?(\d+M)?(\d+D)?(T(?=\d)(\d+H)?(\d+M)?(\d+S)?)?|(\d+W)?)$/,
uri,
"uri-reference":
/^(?:[a-z][a-z0-9+\-.]*:)?(?:\/?\/(?:(?:[a-z0-9\-._~!$&'()*+,;=:]|%[0-9a-f]{2})*@)?(?:\[(?:(?:(?:(?:[0-9a-f]{1,4}:){6}|::(?:[0-9a-f]{1,4}:){5}|(?:[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){4}|(?:(?:[0-9a-f]{1,4}:){0,1}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){3}|(?:(?:[0-9a-f]{1,4}:){0,2}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){2}|(?:(?:[0-9a-f]{1,4}:){0,3}[0-9a-f]{1,4})?::[0-9a-f]{1,4}:|(?:(?:[0-9a-f]{1,4}:){0,4}[0-9a-f]{1,4})?::)(?:[0-9a-f]{1,4}:[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?))|(?:(?:[0-9a-f]{1,4}:){0,5}[0-9a-f]{1,4})?::[0-9a-f]{1,4}|(?:(?:[0-9a-f]{1,4}:){0,6}[0-9a-f]{1,4})?::)|[Vv][0-9a-f]+\.[a-z0-9\-._~!$&'()*+,;=:]+)\]|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)|(?:[a-z0-9\-._~!$&'"()*+,;=]|%[0-9a-f]{2})*)(?::\d*)?(?:\/(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})*)*|\/(?:(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})*)*)?|(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'"()*+,;=:@]|%[0-9a-f]{2})*)*)?(?:\?(?:[a-z0-9\-._~!$&'"()*+,;=:@/?]|%[0-9a-f]{2})*)?(?:#(?:[a-z0-9\-._~!$&'"()*+,;=:@/?]|%[0-9a-f]{2})*)?$/i,
// uri-template: https://tools.ietf.org/html/rfc6570
"uri-template":
/^(?:(?:[^\x00-\x20"'<>%\\^`{|}]|%[0-9a-f]{2})|\{[+#./;?&=,!@|]?(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\*)?(?:,(?:[a-z0-9_]|%[0-9a-f]{2})+(?::[1-9][0-9]{0,3}|\*)?)*\})*$/i,
// For the source: https://gist.github.com/dperini/729294
// For test cases: https://mathiasbynens.be/demo/url-regex
url: /^(?:https?|ftp):\/\/(?:\S+(?::\S*)?@)?(?:(?!(?:10|127)(?:\.\d{1,3}){3})(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z0-9\u{00a1}-\u{ffff}]+-)*[a-z0-9\u{00a1}-\u{ffff}]+)(?:\.(?:[a-z0-9\u{00a1}-\u{ffff}]+-)*[a-z0-9\u{00a1}-\u{ffff}]+)*(?:\.(?:[a-z\u{00a1}-\u{ffff}]{2,})))(?::\d{2,5})?(?:\/[^\s]*)?$/iu,
email:
/^[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?$/i,
hostname:
/^(?=.{1,253}\.?$)[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\.[a-z0-9](?:[-0-9a-z]{0,61}[0-9a-z])?)*\.?$/i,
// optimized https://www.safaribooksonline.com/library/view/regular-expressions-cookbook/9780596802837/ch07s16.html
ipv4: /^(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)\.){3}(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)$/,
ipv6: /^((([0-9a-f]{1,4}:){7}([0-9a-f]{1,4}|:))|(([0-9a-f]{1,4}:){6}(:[0-9a-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9a-f]{1,4}:){5}(((:[0-9a-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9a-f]{1,4}:){4}(((:[0-9a-f]{1,4}){1,3})|((:[0-9a-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9a-f]{1,4}:){3}(((:[0-9a-f]{1,4}){1,4})|((:[0-9a-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9a-f]{1,4}:){2}(((:[0-9a-f]{1,4}){1,5})|((:[0-9a-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9a-f]{1,4}:){1}(((:[0-9a-f]{1,4}){1,6})|((:[0-9a-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9a-f]{1,4}){1,7})|((:[0-9a-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))$/i,
regex,
// uuid: http://tools.ietf.org/html/rfc4122
uuid: /^(?:urn:uuid:)?[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/i,
// JSON-pointer: https://tools.ietf.org/html/rfc6901
// uri fragment: https://tools.ietf.org/html/rfc3986#appendix-A
"json-pointer": /^(?:\/(?:[^~/]|~0|~1)*)*$/,
"json-pointer-uri-fragment": /^#(?:\/(?:[a-z0-9_\-.!$&'()*+,;:=@]|%[0-9a-f]{2}|~0|~1)*)*$/i,
// relative JSON-pointer: http://tools.ietf.org/html/draft-luff-relative-json-pointer-00
"relative-json-pointer": /^(?:0|[1-9][0-9]*)(?:#|(?:\/(?:[^~/]|~0|~1)*)*)$/,
// the following formats are used by the openapi specification: https://spec.openapis.org/oas/v3.0.0#data-types
// byte: https://github.com/miguelmota/is-base64
byte,
// signed 32 bit integer
int32: {type: "number", validate: validateInt32},
// signed 64 bit integer
int64: {type: "number", validate: validateInt64},
// C-type float
float: {type: "number", validate: validateNumber},
// C-type double
double: {type: "number", validate: validateNumber},
// hint to the UI to hide input strings
password: true,
// unchecked string payload
binary: true,
}
export const fastFormats: DefinedFormats = {
...fullFormats,
date: fmtDef(/^\d\d\d\d-[0-1]\d-[0-3]\d$/, compareDate),
time: fmtDef(
/^(?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)$/i,
compareTime
),
"date-time": fmtDef(
/^\d\d\d\d-[0-1]\d-[0-3]\dt(?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)$/i,
compareDateTime
),
"iso-time": fmtDef(
/^(?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)?$/i,
compareIsoTime
),
"iso-date-time": fmtDef(
/^\d\d\d\d-[0-1]\d-[0-3]\d[t\s](?:[0-2]\d:[0-5]\d:[0-5]\d|23:59:60)(?:\.\d+)?(?:z|[+-]\d\d(?::?\d\d)?)?$/i,
compareIsoDateTime
),
// uri: https://github.com/mafintosh/is-my-json-valid/blob/master/formats.js
uri: /^(?:[a-z][a-z0-9+\-.]*:)(?:\/?\/)?[^\s]*$/i,
"uri-reference": /^(?:(?:[a-z][a-z0-9+\-.]*:)?\/?\/)?(?:[^\\\s#][^\s#]*)?(?:#[^\\\s]*)?$/i,
// email (sources from jsen validator):
// http://stackoverflow.com/questions/201323/using-a-regular-expression-to-validate-an-email-address#answer-8829363
// http://www.w3.org/TR/html5/forms.html#valid-e-mail-address (search for 'wilful violation')
email:
/^[a-z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?(?:\.[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?)*$/i,
}
export const formatNames = Object.keys(fullFormats) as FormatName[]
function isLeapYear(year: number): boolean {
// https://tools.ietf.org/html/rfc3339#appendix-C
return year % 4 === 0 && (year % 100 !== 0 || year % 400 === 0)
}
const DATE = /^(\d\d\d\d)-(\d\d)-(\d\d)$/
const DAYS = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
function date(str: string): boolean {
// full-date from http://tools.ietf.org/html/rfc3339#section-5.6
const matches: string[] | null = DATE.exec(str)
if (!matches) return false
const year: number = +matches[1]
const month: number = +matches[2]
const day: number = +matches[3]
return (
month >= 1 &&
month <= 12 &&
day >= 1 &&
day <= (month === 2 && isLeapYear(year) ? 29 : DAYS[month])
)
}
function compareDate(d1: string, d2: string): number | undefined {
if (!(d1 && d2)) return undefined
if (d1 > d2) return 1
if (d1 < d2) return -1
return 0
}
const TIME = /^(\d\d):(\d\d):(\d\d(?:\.\d+)?)(z|([+-])(\d\d)(?::?(\d\d))?)?$/i
function getTime(strictTimeZone?: boolean): (str: string) => boolean {
return function time(str: string): boolean {
const matches: string[] | null = TIME.exec(str)
if (!matches) return false
const hr: number = +matches[1]
const min: number = +matches[2]
const sec: number = +matches[3]
const tz: string | undefined = matches[4]
const tzSign: number = matches[5] === "-" ? -1 : 1
const tzH: number = +(matches[6] || 0)
const tzM: number = +(matches[7] || 0)
if (tzH > 23 || tzM > 59 || (strictTimeZone && !tz)) return false
if (hr <= 23 && min <= 59 && sec < 60) return true
// leap second
const utcMin = min - tzM * tzSign
const utcHr = hr - tzH * tzSign - (utcMin < 0 ? 1 : 0)
return (utcHr === 23 || utcHr === -1) && (utcMin === 59 || utcMin === -1) && sec < 61
}
}
function compareTime(s1: string, s2: string): number | undefined {
if (!(s1 && s2)) return undefined
const t1 = new Date("2020-01-01T" + s1).valueOf()
const t2 = new Date("2020-01-01T" + s2).valueOf()
if (!(t1 && t2)) return undefined
return t1 - t2
}
function compareIsoTime(t1: string, t2: string): number | undefined {
if (!(t1 && t2)) return undefined
const a1 = TIME.exec(t1)
const a2 = TIME.exec(t2)
if (!(a1 && a2)) return undefined
t1 = a1[1] + a1[2] + a1[3]
t2 = a2[1] + a2[2] + a2[3]
if (t1 > t2) return 1
if (t1 < t2) return -1
return 0
}
const DATE_TIME_SEPARATOR = /t|\s/i
function getDateTime(strictTimeZone?: boolean): (str: string) => boolean {
const time = getTime(strictTimeZone)
return function date_time(str: string): boolean {
// http://tools.ietf.org/html/rfc3339#section-5.6
const dateTime: string[] = str.split(DATE_TIME_SEPARATOR)
return dateTime.length === 2 && date(dateTime[0]) && time(dateTime[1])
}
}
function compareDateTime(dt1: string, dt2: string): number | undefined {
if (!(dt1 && dt2)) return undefined
const d1 = new Date(dt1).valueOf()
const d2 = new Date(dt2).valueOf()
if (!(d1 && d2)) return undefined
return d1 - d2
}
function compareIsoDateTime(dt1: string, dt2: string): number | undefined {
if (!(dt1 && dt2)) return undefined
const [d1, t1] = dt1.split(DATE_TIME_SEPARATOR)
const [d2, t2] = dt2.split(DATE_TIME_SEPARATOR)
const res = compareDate(d1, d2)
if (res === undefined) return undefined
return res || compareTime(t1, t2)
}
const NOT_URI_FRAGMENT = /\/|:/
const URI =
/^(?:[a-z][a-z0-9+\-.]*:)(?:\/?\/(?:(?:[a-z0-9\-._~!$&'()*+,;=:]|%[0-9a-f]{2})*@)?(?:\[(?:(?:(?:(?:[0-9a-f]{1,4}:){6}|::(?:[0-9a-f]{1,4}:){5}|(?:[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){4}|(?:(?:[0-9a-f]{1,4}:){0,1}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){3}|(?:(?:[0-9a-f]{1,4}:){0,2}[0-9a-f]{1,4})?::(?:[0-9a-f]{1,4}:){2}|(?:(?:[0-9a-f]{1,4}:){0,3}[0-9a-f]{1,4})?::[0-9a-f]{1,4}:|(?:(?:[0-9a-f]{1,4}:){0,4}[0-9a-f]{1,4})?::)(?:[0-9a-f]{1,4}:[0-9a-f]{1,4}|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?))|(?:(?:[0-9a-f]{1,4}:){0,5}[0-9a-f]{1,4})?::[0-9a-f]{1,4}|(?:(?:[0-9a-f]{1,4}:){0,6}[0-9a-f]{1,4})?::)|[Vv][0-9a-f]+\.[a-z0-9\-._~!$&'()*+,;=:]+)\]|(?:(?:25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(?:25[0-5]|2[0-4]\d|[01]?\d\d?)|(?:[a-z0-9\-._~!$&'()*+,;=]|%[0-9a-f]{2})*)(?::\d*)?(?:\/(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*|\/(?:(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*)?|(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})+(?:\/(?:[a-z0-9\-._~!$&'()*+,;=:@]|%[0-9a-f]{2})*)*)(?:\?(?:[a-z0-9\-._~!$&'()*+,;=:@/?]|%[0-9a-f]{2})*)?(?:#(?:[a-z0-9\-._~!$&'()*+,;=:@/?]|%[0-9a-f]{2})*)?$/i
function uri(str: string): boolean {
// http://jmrware.com/articles/2009/uri_regexp/URI_regex.html + optional protocol + required "."
return NOT_URI_FRAGMENT.test(str) && URI.test(str)
}
const BYTE = /^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$/gm
function byte(str: string): boolean {
BYTE.lastIndex = 0
return BYTE.test(str)
}
const MIN_INT32 = -(2 ** 31)
const MAX_INT32 = 2 ** 31 - 1
function validateInt32(value: number): boolean {
return Number.isInteger(value) && value <= MAX_INT32 && value >= MIN_INT32
}
function validateInt64(value: number): boolean {
// JSON and javascript max Int is 2**53, so any int that passes isInteger is valid for Int64
return Number.isInteger(value)
}
function validateNumber(): boolean {
return true
}
const Z_ANCHOR = /[^\\]\\Z/
function regex(str: string): boolean {
if (Z_ANCHOR.test(str)) return false
try {
new RegExp(str)
return true
} catch (e) {
return false
}
}

View File

@@ -1,62 +0,0 @@
import {
DefinedFormats,
FormatMode,
FormatName,
formatNames,
fastFormats,
fullFormats,
} from "./formats"
import formatLimit from "./limit"
import type Ajv from "ajv"
import type {Plugin, Format} from "ajv"
import {_, Name} from "ajv/dist/compile/codegen"
export {FormatMode, FormatName} from "./formats"
export {LimitFormatError} from "./limit"
export interface FormatOptions {
mode?: FormatMode
formats?: FormatName[]
keywords?: boolean
}
export type FormatsPluginOptions = FormatName[] | FormatOptions
export interface FormatsPlugin extends Plugin<FormatsPluginOptions> {
get: (format: FormatName, mode?: FormatMode) => Format
}
const fullName = new Name("fullFormats")
const fastName = new Name("fastFormats")
const formatsPlugin: FormatsPlugin = (
ajv: Ajv,
opts: FormatsPluginOptions = {keywords: true}
): Ajv => {
if (Array.isArray(opts)) {
addFormats(ajv, opts, fullFormats, fullName)
return ajv
}
const [formats, exportName] =
opts.mode === "fast" ? [fastFormats, fastName] : [fullFormats, fullName]
const list = opts.formats || formatNames
addFormats(ajv, list, formats, exportName)
if (opts.keywords) formatLimit(ajv)
return ajv
}
formatsPlugin.get = (name: FormatName, mode: FormatMode = "full"): Format => {
const formats = mode === "fast" ? fastFormats : fullFormats
const f = formats[name]
if (!f) throw new Error(`Unknown format "${name}"`)
return f
}
function addFormats(ajv: Ajv, list: FormatName[], fs: DefinedFormats, exportName: Name): void {
ajv.opts.code.formats ??= _`require("ajv-formats/dist/formats").${exportName}`
for (const f of list) ajv.addFormat(f, fs[f])
}
module.exports = exports = formatsPlugin
Object.defineProperty(exports, "__esModule", {value: true})
export default formatsPlugin

View File

@@ -1,99 +0,0 @@
import type Ajv from "ajv"
import type {
Plugin,
CodeKeywordDefinition,
KeywordErrorDefinition,
Code,
Name,
ErrorObject,
} from "ajv"
import type {AddedFormat} from "ajv/dist/types"
import type {Rule} from "ajv/dist/compile/rules"
import {KeywordCxt} from "ajv"
import {_, str, or, getProperty, operators} from "ajv/dist/compile/codegen"
type Kwd = "formatMaximum" | "formatMinimum" | "formatExclusiveMaximum" | "formatExclusiveMinimum"
type Comparison = "<=" | ">=" | "<" | ">"
const ops = operators
const KWDs: {[K in Kwd]: {okStr: Comparison; ok: Code; fail: Code}} = {
formatMaximum: {okStr: "<=", ok: ops.LTE, fail: ops.GT},
formatMinimum: {okStr: ">=", ok: ops.GTE, fail: ops.LT},
formatExclusiveMaximum: {okStr: "<", ok: ops.LT, fail: ops.GTE},
formatExclusiveMinimum: {okStr: ">", ok: ops.GT, fail: ops.LTE},
}
export type LimitFormatError = ErrorObject<Kwd, {limit: string; comparison: Comparison}>
const error: KeywordErrorDefinition = {
message: ({keyword, schemaCode}) => str`should be ${KWDs[keyword as Kwd].okStr} ${schemaCode}`,
params: ({keyword, schemaCode}) =>
_`{comparison: ${KWDs[keyword as Kwd].okStr}, limit: ${schemaCode}}`,
}
export const formatLimitDefinition: CodeKeywordDefinition = {
keyword: Object.keys(KWDs),
type: "string",
schemaType: "string",
$data: true,
error,
code(cxt) {
const {gen, data, schemaCode, keyword, it} = cxt
const {opts, self} = it
if (!opts.validateFormats) return
const fCxt = new KeywordCxt(it, (self.RULES.all.format as Rule).definition, "format")
if (fCxt.$data) validate$DataFormat()
else validateFormat()
function validate$DataFormat(): void {
const fmts = gen.scopeValue("formats", {
ref: self.formats,
code: opts.code.formats,
})
const fmt = gen.const("fmt", _`${fmts}[${fCxt.schemaCode}]`)
cxt.fail$data(
or(
_`typeof ${fmt} != "object"`,
_`${fmt} instanceof RegExp`,
_`typeof ${fmt}.compare != "function"`,
compareCode(fmt)
)
)
}
function validateFormat(): void {
const format = fCxt.schema as string
const fmtDef: AddedFormat | undefined = self.formats[format]
if (!fmtDef || fmtDef === true) return
if (
typeof fmtDef != "object" ||
fmtDef instanceof RegExp ||
typeof fmtDef.compare != "function"
) {
throw new Error(`"${keyword}": format "${format}" does not define "compare" function`)
}
const fmt = gen.scopeValue("formats", {
key: format,
ref: fmtDef,
code: opts.code.formats ? _`${opts.code.formats}${getProperty(format)}` : undefined,
})
cxt.fail$data(compareCode(fmt))
}
function compareCode(fmt: Name): Code {
return _`${fmt}.compare(${data}, ${schemaCode}) ${KWDs[keyword as Kwd].fail} 0`
}
},
dependencies: ["format"],
}
const formatLimitPlugin: Plugin<undefined> = (ajv: Ajv): Ajv => {
ajv.addKeyword(formatLimitDefinition)
return ajv
}
export default formatLimitPlugin

View File

@@ -1,25 +1,21 @@
{
"name": "fast-json-stringify",
"version": "5.16.1",
"version": "6.2.0",
"description": "Stringify your JSON at max speed",
"main": "index.js",
"type": "commonjs",
"types": "types/index.d.ts",
"scripts": {
"bench": "node ./benchmark/bench.js",
"bench": "node --expose-gc ./benchmark/bench.js",
"bench:cmp": "node ./benchmark/bench-cmp-branch.js",
"bench:cmp:ci": "node ./benchmark/bench-cmp-branch.js --ci",
"benchmark": "node ./benchmark/bench-cmp-lib.js",
"lint": "standard",
"lint:fix": "standard --fix",
"benchmark": "node --expose-gc ./benchmark/bench-cmp-lib.js",
"lint": "eslint",
"lint:fix": "eslint --fix",
"test:typescript": "tsd",
"test:unit": "tap",
"test:unit": "c8 node --test",
"test": "npm run test:unit && npm run test:typescript"
},
"precommit": [
"lint",
"test"
],
"repository": {
"type": "git",
"url": "git+https://github.com/fastify/fast-json-stringify.git"
@@ -31,38 +27,62 @@
"fast"
],
"author": "Matteo Collina <hello@matteocollina.com>",
"contributors": [
{
"name": "Tomas Della Vedova",
"url": "http://delved.org"
},
{
"name": "Aras Abbasi",
"email": "aras.abbasi@gmail.com"
},
{
"name": "Manuel Spigolon",
"email": "behemoth89@gmail.com"
},
{
"name": "Frazer Smith",
"email": "frazer.dev@icloud.com",
"url": "https://github.com/fdawgs"
}
],
"license": "MIT",
"bugs": {
"url": "https://github.com/fastify/fast-json-stringify/issues"
},
"homepage": "https://github.com/fastify/fast-json-stringify#readme",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/fastify"
},
{
"type": "opencollective",
"url": "https://opencollective.com/fastify"
}
],
"devDependencies": {
"@fastify/pre-commit": "^2.0.2",
"@sinclair/typebox": "^0.32.3",
"benchmark": "^2.1.4",
"@sinclair/typebox": "^0.34.3",
"c8": "^10.1.2",
"cli-select": "^1.1.2",
"compile-json-stringify": "^0.1.2",
"is-my-json-valid": "^2.20.0",
"simple-git": "^3.7.1",
"standard": "^17.0.0",
"tap": "^16.0.1",
"tsd": "^0.31.0",
"webpack": "^5.40.0",
"fast-json-stringify": "."
"eslint": "^9.17.0",
"fast-json-stringify": ".",
"is-my-json-valid": "^2.20.6",
"json-accelerator": "^0.0.2",
"neostandard": "^0.12.0",
"simple-git": "^3.23.0",
"tinybench": "^5.0.1",
"tsd": "^0.32.0",
"webpack": "^5.90.3"
},
"dependencies": {
"ajv": "^8.10.0",
"@fastify/merge-json-schemas": "^0.2.0",
"ajv": "^8.12.0",
"ajv-formats": "^3.0.1",
"fast-deep-equal": "^3.1.3",
"fast-uri": "^2.1.0",
"rfdc": "^1.2.0",
"json-schema-ref-resolver": "^1.0.1",
"@fastify/merge-json-schemas": "^0.1.0"
},
"standard": {
"ignore": [
"schema-validator.js"
]
"fast-uri": "^3.0.0",
"json-schema-ref-resolver": "^3.0.0",
"rfdc": "^1.2.0"
},
"runkitExampleFilename": "./examples/example.js"
}

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('additionalProperties', (t) => {
@@ -19,7 +19,7 @@ test('additionalProperties', (t) => {
})
const obj = { str: 'test', foo: 42, ofoo: true, foof: 'string', objfoo: { a: true } }
t.equal(stringify(obj), '{"str":"test","foo":"42","ofoo":"true","foof":"string","objfoo":"[object Object]"}')
t.assert.equal(stringify(obj), '{"str":"test","foo":"42","ofoo":"true","foof":"string","objfoo":"[object Object]"}')
})
test('additionalProperties should not change properties', (t) => {
@@ -38,7 +38,7 @@ test('additionalProperties should not change properties', (t) => {
})
const obj = { foo: '42', ofoo: 42 }
t.equal(stringify(obj), '{"foo":"42","ofoo":42}')
t.assert.equal(stringify(obj), '{"foo":"42","ofoo":42}')
})
test('additionalProperties should not change properties and patternProperties', (t) => {
@@ -62,7 +62,7 @@ test('additionalProperties should not change properties and patternProperties',
})
const obj = { foo: '42', ofoo: 42, test: '42' }
t.equal(stringify(obj), '{"foo":"42","ofoo":"42","test":42}')
t.assert.equal(stringify(obj), '{"foo":"42","ofoo":"42","test":42}')
})
test('additionalProperties set to true, use of fast-safe-stringify', (t) => {
@@ -75,7 +75,7 @@ test('additionalProperties set to true, use of fast-safe-stringify', (t) => {
})
const obj = { foo: true, ofoo: 42, arrfoo: ['array', 'test'], objfoo: { a: 'world' } }
t.equal(stringify(obj), '{"foo":true,"ofoo":42,"arrfoo":["array","test"],"objfoo":{"a":"world"}}')
t.assert.equal(stringify(obj), '{"foo":true,"ofoo":42,"arrfoo":["array","test"],"objfoo":{"a":"world"}}')
})
test('additionalProperties - string coerce', (t) => {
@@ -90,7 +90,7 @@ test('additionalProperties - string coerce', (t) => {
})
const obj = { foo: true, ofoo: 42, arrfoo: ['array', 'test'], objfoo: { a: 'world' } }
t.equal(stringify(obj), '{"foo":"true","ofoo":"42","arrfoo":"array,test","objfoo":"[object Object]"}')
t.assert.equal(stringify(obj), '{"foo":"true","ofoo":"42","arrfoo":"array,test","objfoo":"[object Object]"}')
})
test('additionalProperties - number skip', (t) => {
@@ -106,7 +106,7 @@ test('additionalProperties - number skip', (t) => {
// const obj = { foo: true, ofoo: '42', xfoo: 'string', arrfoo: [1, 2], objfoo: { num: 42 } }
const obj = { foo: true, ofoo: '42' }
t.equal(stringify(obj), '{"foo":1,"ofoo":42}')
t.assert.equal(stringify(obj), '{"foo":1,"ofoo":42}')
})
test('additionalProperties - boolean coerce', (t) => {
@@ -121,7 +121,7 @@ test('additionalProperties - boolean coerce', (t) => {
})
const obj = { foo: 'true', ofoo: 0, arrfoo: [1, 2], objfoo: { a: true } }
t.equal(stringify(obj), '{"foo":true,"ofoo":false,"arrfoo":true,"objfoo":true}')
t.assert.equal(stringify(obj), '{"foo":true,"ofoo":false,"arrfoo":true,"objfoo":true}')
})
test('additionalProperties - object coerce', (t) => {
@@ -141,7 +141,7 @@ test('additionalProperties - object coerce', (t) => {
})
const obj = { objfoo: { answer: 42 } }
t.equal(stringify(obj), '{"objfoo":{"answer":42}}')
t.assert.equal(stringify(obj), '{"objfoo":{"answer":42}}')
})
test('additionalProperties - array coerce', (t) => {
@@ -159,10 +159,10 @@ test('additionalProperties - array coerce', (t) => {
})
const coercibleValues = { arrfoo: [1, 2] }
t.equal(stringify(coercibleValues), '{"arrfoo":["1","2"]}')
t.assert.equal(stringify(coercibleValues), '{"arrfoo":["1","2"]}')
const incoercibleValues = { foo: 'true', ofoo: 0, objfoo: { tyrion: 'lannister' } }
t.throws(() => stringify(incoercibleValues))
t.assert.throws(() => stringify(incoercibleValues))
})
test('additionalProperties with empty schema', (t) => {
@@ -173,7 +173,7 @@ test('additionalProperties with empty schema', (t) => {
})
const obj = { a: 1, b: true, c: null }
t.equal(stringify(obj), '{"a":1,"b":true,"c":null}')
t.assert.equal(stringify(obj), '{"a":1,"b":true,"c":null}')
})
test('additionalProperties with nested empty schema', (t) => {
@@ -187,7 +187,7 @@ test('additionalProperties with nested empty schema', (t) => {
})
const obj = { data: { a: 1, b: true, c: null } }
t.equal(stringify(obj), '{"data":{"a":1,"b":true,"c":null}}')
t.assert.equal(stringify(obj), '{"data":{"a":1,"b":true,"c":null}}')
})
test('nested additionalProperties', (t) => {
@@ -207,7 +207,7 @@ test('nested additionalProperties', (t) => {
})
const obj = [{ ap: { value: 'string' } }]
t.equal(stringify(obj), '[{"ap":{"value":"string"}}]')
t.assert.equal(stringify(obj), '[{"ap":{"value":"string"}}]')
})
test('very nested additionalProperties', (t) => {
@@ -244,7 +244,7 @@ test('very nested additionalProperties', (t) => {
})
const obj = [{ ap: { nested: { moarNested: { finally: { value: 'str' } } } } }]
t.equal(stringify(obj), '[{"ap":{"nested":{"moarNested":{"finally":{"value":"str"}}}}}]')
t.assert.equal(stringify(obj), '[{"ap":{"nested":{"moarNested":{"finally":{"value":"str"}}}}}]')
})
test('nested additionalProperties set to true', (t) => {
@@ -261,7 +261,7 @@ test('nested additionalProperties set to true', (t) => {
})
const obj = { ap: { value: 'string', someNumber: 42 } }
t.equal(stringify(obj), '{"ap":{"value":"string","someNumber":42}}')
t.assert.equal(stringify(obj), '{"ap":{"value":"string","someNumber":42}}')
})
test('field passed to fastSafeStringify as undefined should be removed', (t) => {
@@ -278,7 +278,7 @@ test('field passed to fastSafeStringify as undefined should be removed', (t) =>
})
const obj = { ap: { value: 'string', someNumber: undefined } }
t.equal(stringify(obj), '{"ap":{"value":"string"}}')
t.assert.equal(stringify(obj), '{"ap":{"value":"string"}}')
})
test('property without type but with enum, will acts as additionalProperties', (t) => {
@@ -294,7 +294,7 @@ test('property without type but with enum, will acts as additionalProperties', (
})
const obj = { ap: { additional: 'field' } }
t.equal(stringify(obj), '{"ap":{"additional":"field"}}')
t.assert.equal(stringify(obj), '{"ap":{"additional":"field"}}')
})
test('property without type but with enum, will acts as additionalProperties without overwriting', (t) => {
@@ -311,7 +311,7 @@ test('property without type but with enum, will acts as additionalProperties wit
})
const obj = { ap: { additional: 'field' } }
t.equal(stringify(obj), '{"ap":{}}')
t.assert.equal(stringify(obj), '{"ap":{}}')
})
test('function and symbol references are not serialized as undefined', (t) => {
@@ -328,5 +328,5 @@ test('function and symbol references are not serialized as undefined', (t) => {
})
const obj = { str: 'x', test: 'test', meth: () => 'x', sym: Symbol('x') }
t.equal(stringify(obj), '{"str":"x","test":"test"}')
t.assert.equal(stringify(obj), '{"str":"x","test":"test"}')
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
process.env.TZ = 'UTC'
@@ -17,7 +17,7 @@ test('allOf: combine type and format ', (t) => {
const stringify = build(schema)
const date = new Date(1674263005800)
const value = stringify(date)
t.equal(value, '"01:03:25"')
t.assert.equal(value, '"01:03:25"')
})
test('allOf: combine additional properties ', (t) => {
@@ -35,7 +35,7 @@ test('allOf: combine additional properties ', (t) => {
const stringify = build(schema)
const data = { property: true }
const value = stringify(data)
t.equal(value, JSON.stringify(data))
t.assert.equal(value, JSON.stringify(data))
})
test('allOf: combine pattern properties', (t) => {
@@ -57,7 +57,7 @@ test('allOf: combine pattern properties', (t) => {
const stringify = build(schema)
const data = { foo: 42 }
const value = stringify(data)
t.equal(value, JSON.stringify(data))
t.assert.equal(value, JSON.stringify(data))
})
test('object with allOf and multiple schema on the allOf', (t) => {
@@ -101,7 +101,7 @@ test('object with allOf and multiple schema on the allOf', (t) => {
id: 1
})
} catch (e) {
t.equal(e.message, '"name" is required!')
t.assert.equal(e.message, '"name" is required!')
}
try {
@@ -109,15 +109,15 @@ test('object with allOf and multiple schema on the allOf', (t) => {
name: 'string'
})
} catch (e) {
t.equal(e.message, '"id" is required!')
t.assert.equal(e.message, '"id" is required!')
}
t.equal(stringify({
t.assert.equal(stringify({
id: 1,
name: 'string'
}), '{"name":"string","id":1}')
t.equal(stringify({
t.assert.equal(stringify({
id: 1,
name: 'string',
tag: 'otherString'
@@ -149,7 +149,7 @@ test('object with allOf and one schema on the allOf', (t) => {
const value = stringify({
id: 1
})
t.equal(value, '{"id":1}')
t.assert.equal(value, '{"id":1}')
})
test('object with allOf and no schema on the allOf', (t) => {
@@ -165,7 +165,7 @@ test('object with allOf and no schema on the allOf', (t) => {
build(schema)
t.fail()
} catch (e) {
t.equal(e.message, 'schema is invalid: data/allOf must NOT have fewer than 1 items')
t.assert.equal(e.message, 'schema is invalid: data/allOf must NOT have fewer than 1 items')
}
})
@@ -217,7 +217,7 @@ test('object with nested allOfs', (t) => {
id3: 3,
id4: 4 // extra prop shouldn't be in result
})
t.equal(value, '{"id1":1,"id2":2,"id3":3}')
t.assert.equal(value, '{"id1":1,"id2":2,"id3":3}')
})
test('object with anyOf nested inside allOf', (t) => {
@@ -286,7 +286,7 @@ test('object with anyOf nested inside allOf', (t) => {
obj: { nested: 'yes' },
nestedObj: { nested: 'yes' }
})
t.equal(value, '{"id1":1,"obj":{"nested":"yes"},"id3":3,"nestedObj":{"nested":"yes"}}')
t.assert.equal(value, '{"id1":1,"obj":{"nested":"yes"},"id3":3,"nestedObj":{"nested":"yes"}}')
})
test('object with $ref in allOf', (t) => {
@@ -317,7 +317,7 @@ test('object with $ref in allOf', (t) => {
id1: 1,
id2: 2 // extra prop shouldn't be in result
})
t.equal(value, '{"id1":1}')
t.assert.equal(value, '{"id1":1}')
})
test('object with $ref and other object in allOf', (t) => {
@@ -357,7 +357,7 @@ test('object with $ref and other object in allOf', (t) => {
id2: 2,
id3: 3 // extra prop shouldn't be in result
})
t.equal(value, '{"id1":1,"id2":2}')
t.assert.equal(value, '{"id1":1,"id2":2}')
})
test('object with multiple $refs in allOf', (t) => {
@@ -400,7 +400,7 @@ test('object with multiple $refs in allOf', (t) => {
id2: 2,
id3: 3 // extra prop shouldn't be in result
})
t.equal(value, '{"id1":1,"id2":2}')
t.assert.equal(value, '{"id1":1,"id2":2}')
})
test('allOf with nested allOf in $ref', (t) => {
@@ -452,7 +452,7 @@ test('allOf with nested allOf in $ref', (t) => {
id3: 3,
id4: 4 // extra prop shouldn't be in result
})
t.equal(value, '{"id1":1,"id2":2,"id3":3}')
t.assert.equal(value, '{"id1":1,"id2":2,"id3":3}')
})
test('object with external $refs in allOf', (t) => {
@@ -505,7 +505,7 @@ test('object with external $refs in allOf', (t) => {
id2: 2,
id3: 3 // extra prop shouldn't be in result
})
t.equal(value, '{"id1":1,"id2":2}')
t.assert.equal(value, '{"id1":1,"id2":2}')
})
test('allof with local anchor reference', (t) => {
@@ -550,7 +550,7 @@ test('allof with local anchor reference', (t) => {
const stringify = build(schema, { schema: externalSchemas })
const data = { type: 'foo', validation: 'bar' }
t.equal(stringify(data), JSON.stringify(data))
t.assert.equal(stringify(data), JSON.stringify(data))
})
test('allOf: multiple nested $ref properties', (t) => {
@@ -601,12 +601,12 @@ test('allOf: multiple nested $ref properties', (t) => {
const stringify = build(schema, { schema: [externalSchema1, externalSchema2] })
t.equal(stringify({ id1: 1 }), JSON.stringify({ id1: 1 }))
t.equal(stringify({ id2: 2 }), JSON.stringify({ id2: 2 }))
t.assert.equal(stringify({ id1: 1 }), JSON.stringify({ id1: 1 }))
t.assert.equal(stringify({ id2: 2 }), JSON.stringify({ id2: 2 }))
})
test('allOf: throw Error if types mismatch ', (t) => {
t.plan(3)
t.plan(1)
const schema = {
allOf: [
@@ -614,18 +614,16 @@ test('allOf: throw Error if types mismatch ', (t) => {
{ type: 'number' }
]
}
try {
t.assert.throws(() => {
build(schema)
t.fail('should throw the MergeError')
} catch (error) {
t.ok(error instanceof Error)
t.equal(error.message, 'Failed to merge "type" keyword schemas.')
t.same(error.schemas, [['string'], ['number']])
}
}, {
message: 'Failed to merge "type" keyword schemas.',
schemas: [['string'], ['number']]
})
})
test('allOf: throw Error if format mismatch ', (t) => {
t.plan(3)
t.plan(1)
const schema = {
allOf: [
@@ -633,14 +631,12 @@ test('allOf: throw Error if format mismatch ', (t) => {
{ format: 'time' }
]
}
try {
t.assert.throws(() => {
build(schema)
t.fail('should throw the MergeError')
} catch (error) {
t.ok(error instanceof Error)
t.equal(error.message, 'Failed to merge "format" keyword schemas.')
t.same(error.schemas, ['date', 'time'])
}
}, {
message: 'Failed to merge "format" keyword schemas.'
// schemas: ['date', 'time']
})
})
test('recursive nested allOfs', (t) => {
@@ -658,7 +654,7 @@ test('recursive nested allOfs', (t) => {
const data = { foo: {} }
const stringify = build(schema)
t.equal(stringify(data), JSON.stringify(data))
t.assert.equal(stringify(data), JSON.stringify(data))
})
test('recursive nested allOfs', (t) => {
@@ -676,7 +672,7 @@ test('recursive nested allOfs', (t) => {
const data = { foo: {} }
const stringify = build(schema)
t.equal(stringify(data), JSON.stringify(data))
t.assert.equal(stringify(data), JSON.stringify(data))
})
test('external recursive allOfs', (t) => {
@@ -715,7 +711,7 @@ test('external recursive allOfs', (t) => {
}
}
const stringify = build(schema, { schema: { externalSchema } })
t.equal(stringify(data), '{"a":{"bar":"42","foo":{}},"b":{"bar":"42","foo":{}}}')
t.assert.equal(stringify(data), '{"a":{"bar":"42","foo":{}},"b":{"bar":"42","foo":{}}}')
})
test('do not crash with $ref prop', (t) => {
@@ -751,5 +747,5 @@ test('do not crash with $ref prop', (t) => {
$ref: 'true'
}
})
t.equal(value, '{"outside":{"$ref":"true"}}')
t.assert.equal(value, '{"outside":{"$ref":"true"}}')
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('object with nested random property', (t) => {
@@ -16,19 +16,19 @@ test('object with nested random property', (t) => {
}
const stringify = build(schema)
t.equal(stringify({
t.assert.equal(stringify({
id: 1, name: 'string'
}), '{"id":1,"name":"string"}')
t.equal(stringify({
t.assert.equal(stringify({
id: 1, name: { first: 'name', last: 'last' }
}), '{"id":1,"name":{"first":"name","last":"last"}}')
t.equal(stringify({
t.assert.equal(stringify({
id: 1, name: null
}), '{"id":1,"name":null}')
t.equal(stringify({
t.assert.equal(stringify({
id: 1, name: ['first', 'last']
}), '{"id":1,"name":["first","last"]}')
})
@@ -45,7 +45,7 @@ test('object with empty schema with $id: undefined set', (t) => {
}
}
const stringify = build(schema)
t.equal(stringify({
t.assert.equal(stringify({
name: 'string'
}), '{"name":"string"}')
})
@@ -61,7 +61,7 @@ test('array with random items', (t) => {
const stringify = build(schema)
const value = stringify([1, 'string', null])
t.equal(value, '[1,"string",null]')
t.assert.equal(value, '[1,"string",null]')
})
test('empty schema', (t) => {
@@ -71,13 +71,13 @@ test('empty schema', (t) => {
const stringify = build(schema)
t.equal(stringify(null), 'null')
t.equal(stringify(1), '1')
t.equal(stringify(true), 'true')
t.equal(stringify('hello'), '"hello"')
t.equal(stringify({}), '{}')
t.equal(stringify({ x: 10 }), '{"x":10}')
t.equal(stringify([true, 1, 'hello']), '[true,1,"hello"]')
t.assert.equal(stringify(null), 'null')
t.assert.equal(stringify(1), '1')
t.assert.equal(stringify(true), 'true')
t.assert.equal(stringify('hello'), '"hello"')
t.assert.equal(stringify({}), '{}')
t.assert.equal(stringify({ x: 10 }), '{"x":10}')
t.assert.equal(stringify([true, 1, 'hello']), '[true,1,"hello"]')
})
test('empty schema on nested object', (t) => {
@@ -92,13 +92,13 @@ test('empty schema on nested object', (t) => {
const stringify = build(schema)
t.equal(stringify({ x: null }), '{"x":null}')
t.equal(stringify({ x: 1 }), '{"x":1}')
t.equal(stringify({ x: true }), '{"x":true}')
t.equal(stringify({ x: 'hello' }), '{"x":"hello"}')
t.equal(stringify({ x: {} }), '{"x":{}}')
t.equal(stringify({ x: { x: 10 } }), '{"x":{"x":10}}')
t.equal(stringify({ x: [true, 1, 'hello'] }), '{"x":[true,1,"hello"]}')
t.assert.equal(stringify({ x: null }), '{"x":null}')
t.assert.equal(stringify({ x: 1 }), '{"x":1}')
t.assert.equal(stringify({ x: true }), '{"x":true}')
t.assert.equal(stringify({ x: 'hello' }), '{"x":"hello"}')
t.assert.equal(stringify({ x: {} }), '{"x":{}}')
t.assert.equal(stringify({ x: { x: 10 } }), '{"x":{"x":10}}')
t.assert.equal(stringify({ x: [true, 1, 'hello'] }), '{"x":[true,1,"hello"]}')
})
test('empty schema on array', (t) => {
@@ -111,7 +111,7 @@ test('empty schema on array', (t) => {
const stringify = build(schema)
t.equal(stringify([1, true, 'hello', [], { x: 1 }]), '[1,true,"hello",[],{"x":1}]')
t.assert.equal(stringify([1, true, 'hello', [], { x: 1 }]), '[1,true,"hello",[],{"x":1}]')
})
test('empty schema on anyOf', (t) => {
@@ -147,10 +147,10 @@ test('empty schema on anyOf', (t) => {
const stringify = build(schema)
t.equal(stringify({ kind: 'Bar', value: 1 }), '{"kind":"Bar","value":1}')
t.equal(stringify({ kind: 'Foo', value: 1 }), '{"kind":"Foo","value":1}')
t.equal(stringify({ kind: 'Foo', value: true }), '{"kind":"Foo","value":true}')
t.equal(stringify({ kind: 'Foo', value: 'hello' }), '{"kind":"Foo","value":"hello"}')
t.assert.equal(stringify({ kind: 'Bar', value: 1 }), '{"kind":"Bar","value":1}')
t.assert.equal(stringify({ kind: 'Foo', value: 1 }), '{"kind":"Foo","value":1}')
t.assert.equal(stringify({ kind: 'Foo', value: true }), '{"kind":"Foo","value":true}')
t.assert.equal(stringify({ kind: 'Foo', value: 'hello' }), '{"kind":"Foo","value":"hello"}')
})
test('should throw a TypeError with the path to the key of the invalid value /1', (t) => {
@@ -186,7 +186,7 @@ test('should throw a TypeError with the path to the key of the invalid value /1'
const stringify = build(schema)
t.throws(() => stringify({ kind: 'Baz', value: 1 }), new TypeError('The value of \'#\' does not match schema definition.'))
t.assert.throws(() => stringify({ kind: 'Baz', value: 1 }), new TypeError('The value of \'#\' does not match schema definition.'))
})
test('should throw a TypeError with the path to the key of the invalid value /2', (t) => {
@@ -227,5 +227,5 @@ test('should throw a TypeError with the path to the key of the invalid value /2'
const stringify = build(schema)
t.throws(() => stringify({ data: { kind: 'Baz', value: 1 } }), new TypeError('The value of \'#/properties/data\' does not match schema definition.'))
t.assert.throws(() => stringify({ data: { kind: 'Baz', value: 1 } }), new TypeError('The value of \'#/properties/data\' does not match schema definition.'))
})

View File

@@ -1,6 +1,6 @@
'use strict'
const { test } = require('tap')
const { test } = require('node:test')
const build = require('..')
process.env.TZ = 'UTC'
@@ -23,11 +23,11 @@ test('object with multiple types field', (t) => {
}
const stringify = build(schema)
t.equal(stringify({
t.assert.equal(stringify({
str: 'string'
}), '{"str":"string"}')
t.equal(stringify({
t.assert.equal(stringify({
str: true
}), '{"str":true}')
})
@@ -55,11 +55,11 @@ test('object with field of type object or null', (t) => {
}
const stringify = build(schema)
t.equal(stringify({
t.assert.equal(stringify({
prop: null
}), '{"prop":null}')
t.equal(stringify({
t.assert.equal(stringify({
prop: {
str: 'string'
}
@@ -89,13 +89,13 @@ test('object with field of type object or array', (t) => {
}
const stringify = build(schema)
t.equal(stringify({
t.assert.equal(stringify({
prop: {
str: 'string'
}
}), '{"prop":{"str":"string"}}')
t.equal(stringify({
t.assert.equal(stringify({
prop: ['string']
}), '{"prop":["string"]}')
})
@@ -115,7 +115,7 @@ test('object with field of type string and coercion disable ', (t) => {
}
}
const stringify = build(schema)
t.throws(() => stringify({ str: 1 }))
t.assert.throws(() => stringify({ str: 1 }))
})
test('object with field of type string and coercion enable ', (t) => {
@@ -143,7 +143,7 @@ test('object with field of type string and coercion enable ', (t) => {
const value = stringify({
str: 1
})
t.equal(value, '{"str":"1"}')
t.assert.equal(value, '{"str":"1"}')
})
test('object with field with type union of multiple objects', (t) => {
@@ -177,9 +177,9 @@ test('object with field with type union of multiple objects', (t) => {
const stringify = build(schema)
t.equal(stringify({ anyOfSchema: { baz: 5 } }), '{"anyOfSchema":{"baz":5}}')
t.assert.equal(stringify({ anyOfSchema: { baz: 5 } }), '{"anyOfSchema":{"baz":5}}')
t.equal(stringify({ anyOfSchema: { bar: 'foo' } }), '{"anyOfSchema":{"bar":"foo"}}')
t.assert.equal(stringify({ anyOfSchema: { bar: 'foo' } }), '{"anyOfSchema":{"bar":"foo"}}')
})
test('null value in schema', (t) => {
@@ -219,10 +219,10 @@ test('symbol value in schema', (t) => {
}
const stringify = build(schema)
t.equal(stringify({ value: 'foo' }), '{"value":"foo"}')
t.equal(stringify({ value: 'bar' }), '{"value":"bar"}')
t.equal(stringify({ value: 'baz' }), '{"value":"baz"}')
t.throws(() => stringify({ value: 'qux' }))
t.assert.equal(stringify({ value: 'foo' }), '{"value":"foo"}')
t.assert.equal(stringify({ value: 'bar' }), '{"value":"bar"}')
t.assert.equal(stringify({ value: 'baz' }), '{"value":"baz"}')
t.assert.throws(() => stringify({ value: 'qux' }))
})
test('anyOf and $ref together', (t) => {
@@ -251,9 +251,9 @@ test('anyOf and $ref together', (t) => {
const stringify = build(schema)
t.equal(stringify({ cs: 'franco' }), '{"cs":"franco"}')
t.assert.equal(stringify({ cs: 'franco' }), '{"cs":"franco"}')
t.equal(stringify({ cs: true }), '{"cs":true}')
t.assert.equal(stringify({ cs: true }), '{"cs":true}')
})
test('anyOf and $ref: 2 levels are fine', (t) => {
@@ -289,7 +289,7 @@ test('anyOf and $ref: 2 levels are fine', (t) => {
const stringify = build(schema)
const value = stringify({ cs: 3 })
t.equal(value, '{"cs":3}')
t.assert.equal(value, '{"cs":3}')
})
test('anyOf and $ref: multiple levels should throw at build.', (t) => {
@@ -328,9 +328,9 @@ test('anyOf and $ref: multiple levels should throw at build.', (t) => {
const stringify = build(schema)
t.equal(stringify({ cs: 3 }), '{"cs":3}')
t.equal(stringify({ cs: true }), '{"cs":true}')
t.equal(stringify({ cs: 'pippo' }), '{"cs":"pippo"}')
t.assert.equal(stringify({ cs: 3 }), '{"cs":3}')
t.assert.equal(stringify({ cs: true }), '{"cs":true}')
t.assert.equal(stringify({ cs: 'pippo' }), '{"cs":"pippo"}')
})
test('anyOf and $ref - multiple external $ref', (t) => {
@@ -383,10 +383,8 @@ test('anyOf and $ref - multiple external $ref', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"obj":{"prop":{"prop2":"test"}}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"obj":{"prop":{"prop2":"test"}}}')
})
test('anyOf looks for all of the array items', (t) => {
@@ -421,7 +419,7 @@ test('anyOf looks for all of the array items', (t) => {
const stringify = build(schema)
const value = stringify([{ savedId: 'great' }, { error: 'oops' }])
t.equal(value, '[{"savedId":"great"},{"error":"oops"}]')
t.assert.equal(value, '[{"savedId":"great"},{"error":"oops"}]')
})
test('anyOf with enum with more than 100 entries', (t) => {
@@ -443,7 +441,7 @@ test('anyOf with enum with more than 100 entries', (t) => {
const stringify = build(schema)
const value = stringify(['EUR', 'USD', null])
t.equal(value, '["EUR","USD",null]')
t.assert.equal(value, '["EUR","USD",null]')
})
test('anyOf object with field date-time of type string with format or null', (t) => {
@@ -465,7 +463,7 @@ test('anyOf object with field date-time of type string with format or null', (t)
const withOneOfStringify = build(withOneOfSchema)
t.equal(withOneOfStringify({
t.assert.equal(withOneOfStringify({
prop: toStringify
}), `{"prop":"${toStringify.toISOString()}"}`)
})
@@ -495,7 +493,7 @@ test('anyOf object with nested field date-time of type string with format or nul
prop: { nestedProp: new Date() }
}
t.equal(withOneOfStringify(data), JSON.stringify(data))
t.assert.equal(withOneOfStringify(data), JSON.stringify(data))
})
test('anyOf object with nested field date of type string with format or null', (t) => {
@@ -523,7 +521,7 @@ test('anyOf object with nested field date of type string with format or null', (
prop: { nestedProp: new Date(1674263005800) }
}
t.equal(withOneOfStringify(data), '{"prop":{"nestedProp":"2023-01-21"}}')
t.assert.equal(withOneOfStringify(data), '{"prop":{"nestedProp":"2023-01-21"}}')
})
test('anyOf object with nested field time of type string with format or null', (t) => {
@@ -550,7 +548,7 @@ test('anyOf object with nested field time of type string with format or null', (
const data = {
prop: { nestedProp: new Date(1674263005800) }
}
t.equal(withOneOfStringify(data), '{"prop":{"nestedProp":"01:03:25"}}')
t.assert.equal(withOneOfStringify(data), '{"prop":{"nestedProp":"01:03:25"}}')
})
test('anyOf object with field date of type string with format or null', (t) => {
@@ -571,7 +569,7 @@ test('anyOf object with field date of type string with format or null', (t) => {
}
const withOneOfStringify = build(withOneOfSchema)
t.equal(withOneOfStringify({
t.assert.equal(withOneOfStringify({
prop: toStringify
}), '{"prop":"2011-01-01"}')
})
@@ -594,7 +592,7 @@ test('anyOf object with invalid field date of type string with format or null',
}
const withOneOfStringify = build(withOneOfSchema)
t.throws(() => withOneOfStringify({ prop: toStringify }))
t.assert.throws(() => withOneOfStringify({ prop: toStringify }))
})
test('anyOf with a nested external schema', (t) => {
@@ -614,7 +612,7 @@ test('anyOf with a nested external schema', (t) => {
const schema = { anyOf: [{ $ref: 'external' }] }
const stringify = build(schema, { schema: externalSchemas })
t.equal(stringify('foo'), '"foo"')
t.assert.equal(stringify('foo'), '"foo"')
})
test('object with ref and validated properties', (t) => {
@@ -642,7 +640,7 @@ test('object with ref and validated properties', (t) => {
}
const stringify = build(schema, { schema: externalSchemas })
t.equal(stringify({ id: 1, reference: 'hi' }), '{"id":1,"reference":"hi"}')
t.assert.equal(stringify({ id: 1, reference: 'hi' }), '{"id":1,"reference":"hi"}')
})
test('anyOf required props', (t) => {
@@ -659,9 +657,9 @@ test('anyOf required props', (t) => {
anyOf: [{ required: ['prop2'] }, { required: ['prop3'] }]
}
const stringify = build(schema)
t.equal(stringify({ prop1: 'test', prop2: 'test2' }), '{"prop1":"test","prop2":"test2"}')
t.equal(stringify({ prop1: 'test', prop3: 'test3' }), '{"prop1":"test","prop3":"test3"}')
t.equal(stringify({ prop1: 'test', prop2: 'test2', prop3: 'test3' }), '{"prop1":"test","prop2":"test2","prop3":"test3"}')
t.assert.equal(stringify({ prop1: 'test', prop2: 'test2' }), '{"prop1":"test","prop2":"test2"}')
t.assert.equal(stringify({ prop1: 'test', prop3: 'test3' }), '{"prop1":"test","prop3":"test3"}')
t.assert.equal(stringify({ prop1: 'test', prop2: 'test2', prop3: 'test3' }), '{"prop1":"test","prop2":"test2","prop3":"test3"}')
})
test('anyOf required props', (t) => {
@@ -686,9 +684,9 @@ test('anyOf required props', (t) => {
]
}
const stringify = build(schema)
t.equal(stringify({ prop1: 'test1' }), '{"prop1":"test1"}')
t.equal(stringify({ prop2: 'test2' }), '{"prop2":"test2"}')
t.equal(stringify({ prop1: 'test1', prop2: 'test2' }), '{"prop1":"test1","prop2":"test2"}')
t.assert.equal(stringify({ prop1: 'test1' }), '{"prop1":"test1"}')
t.assert.equal(stringify({ prop2: 'test2' }), '{"prop2":"test2"}')
t.assert.equal(stringify({ prop1: 'test1', prop2: 'test2' }), '{"prop1":"test1","prop2":"test2"}')
})
test('recursive nested anyOfs', (t) => {
@@ -706,7 +704,7 @@ test('recursive nested anyOfs', (t) => {
const data = { foo: {} }
const stringify = build(schema)
t.equal(stringify(data), JSON.stringify(data))
t.assert.equal(stringify(data), JSON.stringify(data))
})
test('recursive nested anyOfs', (t) => {
@@ -724,7 +722,7 @@ test('recursive nested anyOfs', (t) => {
const data = { foo: {} }
const stringify = build(schema)
t.equal(stringify(data), JSON.stringify(data))
t.assert.equal(stringify(data), JSON.stringify(data))
})
test('external recursive anyOfs', (t) => {
@@ -763,7 +761,7 @@ test('external recursive anyOfs', (t) => {
}
}
const stringify = build(schema, { schema: { externalSchema } })
t.equal(stringify(data), '{"a":{"bar":"42","foo":{}},"b":{"bar":"42","foo":{}}}')
t.assert.equal(stringify(data), '{"a":{"bar":"42","foo":{}},"b":{"bar":"42","foo":{}}}')
})
test('should build merged schemas twice', (t) => {
@@ -784,11 +782,11 @@ test('should build merged schemas twice', (t) => {
{
const stringify = build(schema)
t.equal(stringify({ enums: 'FOO' }), '{"enums":"FOO"}')
t.assert.equal(stringify({ enums: 'FOO' }), '{"enums":"FOO"}')
}
{
const stringify = build(schema)
t.equal(stringify({ enums: 'BAR' }), '{"enums":"BAR"}')
t.assert.equal(stringify({ enums: 'BAR' }), '{"enums":"BAR"}')
}
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const validator = require('is-my-json-valid')
const build = require('..')
const Ajv = require('ajv')
@@ -8,7 +8,7 @@ const Ajv = require('ajv')
test('error on invalid largeArrayMechanism', (t) => {
t.plan(1)
t.throws(() => build({
t.assert.throws(() => build({
title: 'large array of null values with default mechanism',
type: 'object',
properties: {
@@ -31,9 +31,9 @@ function buildTest (schema, toStringify, options) {
const stringify = build(schema, options)
const output = stringify(toStringify)
t.same(JSON.parse(output), JSON.parse(JSON.stringify(toStringify)))
t.equal(output, JSON.stringify(toStringify))
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.deepStrictEqual(JSON.parse(output), JSON.parse(JSON.stringify(toStringify)))
t.assert.equal(output, JSON.stringify(toStringify))
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
}
@@ -216,7 +216,7 @@ test('invalid items throw', (t) => {
}
}
const stringify = build(schema)
t.throws(() => stringify({ args: ['invalid'] }))
t.assert.throws(() => stringify({ args: ['invalid'] }))
})
buildTest({
@@ -258,7 +258,7 @@ test('array items is a list of schema and additionalItems is true, just the desc
]
})
t.equal(result, '{"foo":["foo","bar",1]}')
t.assert.equal(result, '{"foo":["foo","bar",1]}')
})
test('array items is a list of schema and additionalItems is true, just the described item is validated', (t) => {
@@ -287,7 +287,7 @@ test('array items is a list of schema and additionalItems is true, just the desc
foo: ['foo']
})
t.equal(result, '{"foo":["foo"]}')
t.assert.equal(result, '{"foo":["foo"]}')
})
test('array items is a list of schema and additionalItems is false /1', (t) => {
@@ -307,7 +307,7 @@ test('array items is a list of schema and additionalItems is false /1', (t) => {
}
const stringify = build(schema)
t.throws(() => stringify({ foo: ['foo', 'bar'] }), new Error('Item at 1 does not match schema definition.'))
t.assert.throws(() => stringify({ foo: ['foo', 'bar'] }), new Error('Item at 1 does not match schema definition.'))
})
test('array items is a list of schema and additionalItems is false /2', (t) => {
@@ -329,9 +329,9 @@ test('array items is a list of schema and additionalItems is false /2', (t) => {
const stringify = build(schema)
t.throws(() => stringify({ foo: [1, 'bar'] }), new Error('Item at 0 does not match schema definition.'))
t.throws(() => stringify({ foo: ['foo', 1] }), new Error('Item at 1 does not match schema definition.'))
t.throws(() => stringify({ foo: ['foo', 'bar', 'baz'] }), new Error('Item at 2 does not match schema definition.'))
t.assert.throws(() => stringify({ foo: [1, 'bar'] }), new Error('Item at 0 does not match schema definition.'))
t.assert.throws(() => stringify({ foo: ['foo', 1] }), new Error('Item at 1 does not match schema definition.'))
t.assert.throws(() => stringify({ foo: ['foo', 'bar', 'baz'] }), new Error('Item at 2 does not match schema definition.'))
})
test('array items is a schema and additionalItems is false', (t) => {
@@ -354,8 +354,8 @@ test('array items is a schema and additionalItems is false', (t) => {
const ajv = new Ajv({ allErrors: true, strict: false })
const validate = ajv.compile(schema)
t.same(stringify({ foo: ['foo', 'bar'] }), '{"foo":["foo","bar"]}')
t.equal(validate({ foo: ['foo', 'bar'] }), true)
t.assert.equal(stringify({ foo: ['foo', 'bar'] }), '{"foo":["foo","bar"]}')
t.assert.equal(validate({ foo: ['foo', 'bar'] }), true)
})
// https://github.com/fastify/fast-json-stringify/issues/279
@@ -402,7 +402,7 @@ test('object array with anyOf and symbol', (t) => {
{ name: 'name-0', option: 'Foo' },
{ name: 'name-1', option: 'Bar' }
])
t.equal(value, '[{"name":"name-0","option":"Foo"},{"name":"name-1","option":"Bar"}]')
t.assert.equal(value, '[{"name":"name-0","option":"Foo"},{"name":"name-1","option":"Bar"}]')
})
test('different arrays with same item schemas', (t) => {
@@ -427,7 +427,7 @@ test('different arrays with same item schemas', (t) => {
const stringify = build(schema)
const data = { array1: ['bar'], array2: ['foo', 'bar'] }
t.equal(stringify(data), '{"array1":["bar"],"array2":["foo","bar"]}')
t.assert.equal(stringify(data), '{"array1":["bar"],"array2":["foo","bar"]}')
})
const largeArray = new Array(2e4).fill({ a: 'test', b: 1 })
@@ -557,7 +557,7 @@ buildTest({
test('error on invalid value for largeArraySize /1', (t) => {
t.plan(1)
t.throws(() => build({
t.assert.throws(() => build({
title: 'large array of null values with default mechanism',
type: 'object',
properties: {
@@ -574,7 +574,7 @@ test('error on invalid value for largeArraySize /1', (t) => {
test('error on invalid value for largeArraySize /2', (t) => {
t.plan(1)
t.throws(() => build({
t.assert.throws(() => build({
title: 'large array of null values with default mechanism',
type: 'object',
properties: {
@@ -591,7 +591,7 @@ test('error on invalid value for largeArraySize /2', (t) => {
test('error on invalid value for largeArraySize /3', (t) => {
t.plan(1)
t.throws(() => build({
t.assert.throws(() => build({
title: 'large array of null values with default mechanism',
type: 'object',
properties: {

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
test('asNumber should convert BigInt', (t) => {
t.plan(1)
@@ -9,5 +9,5 @@ test('asNumber should convert BigInt', (t) => {
const number = serializer.asNumber(11753021440n)
t.equal(number, '11753021440')
t.assert.equal(number, '11753021440')
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const validator = require('is-my-json-valid')
const build = require('..')
@@ -12,9 +12,9 @@ function buildTest (schema, toStringify) {
const stringify = build(schema)
const output = stringify(toStringify)
t.same(JSON.parse(output), toStringify)
t.equal(output, JSON.stringify(toStringify))
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.deepStrictEqual(JSON.parse(output), toStringify)
t.assert.equal(output, JSON.stringify(toStringify))
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
}
@@ -281,41 +281,34 @@ test('throw an error or coerce numbers and integers that are not numbers', (t) =
}
})
try {
t.assert.throws(() => {
stringify({ age: 'hello ', distance: 'long' })
t.fail('should throw an error')
} catch (err) {
t.ok(err)
}
}, { message: 'The value "hello " cannot be converted to a number.' })
const result = stringify({
age: '42',
distance: true
})
t.same(JSON.parse(result), { age: 42, distance: 1 })
t.end()
t.assert.deepStrictEqual(JSON.parse(result), { age: 42, distance: 1 })
})
test('Should throw on invalid schema', t => {
t.plan(1)
try {
t.assert.throws(() => {
build({
type: 'Dinosaur',
properties: {
claws: { type: 'sharp' }
}
})
t.fail('should be an invalid schema')
} catch (err) {
t.ok(err)
}
}, { message: 'schema is invalid: data/properties/claws/type must be equal to one of the allowed values' })
})
test('additionalProperties - throw on unknown type', (t) => {
t.plan(1)
try {
t.assert.throws(() => {
build({
title: 'check array coerce',
type: 'object',
@@ -325,15 +318,13 @@ test('additionalProperties - throw on unknown type', (t) => {
}
})
t.fail('should be an invalid schema')
} catch (err) {
t.ok(err)
}
}, { message: 'schema is invalid: data/additionalProperties/type must be equal to one of the allowed values' })
})
test('patternProperties - throw on unknown type', (t) => {
t.plan(1)
try {
t.assert.throws(() => {
build({
title: 'check array coerce',
type: 'object',
@@ -344,10 +335,7 @@ test('patternProperties - throw on unknown type', (t) => {
}
}
})
t.fail('should be an invalid schema')
} catch (err) {
t.ok(err)
}
}, { message: 'schema is invalid: data/patternProperties/foo/type must be equal to one of the allowed values' })
})
test('render a double quote as JSON /1', (t) => {
@@ -362,8 +350,8 @@ test('render a double quote as JSON /1', (t) => {
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, JSON.stringify(toStringify))
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, JSON.stringify(toStringify))
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a double quote as JSON /2', (t) => {
@@ -378,8 +366,8 @@ test('render a double quote as JSON /2', (t) => {
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, JSON.stringify(toStringify))
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, JSON.stringify(toStringify))
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a long string', (t) => {
@@ -394,8 +382,8 @@ test('render a long string', (t) => {
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, JSON.stringify(toStringify))
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, JSON.stringify(toStringify))
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('returns JSON.stringify if schema type is boolean', t => {
@@ -408,5 +396,5 @@ test('returns JSON.stringify if schema type is boolean', t => {
const array = [1, true, 'test']
const stringify = build(schema)
t.equal(stringify(array), JSON.stringify(array))
t.assert.equal(stringify(array), JSON.stringify(array))
})

View File

@@ -1,7 +1,7 @@
'use strict'
const t = require('tap')
const test = t.test
const { test } = require('node:test')
const build = require('..')
test('render a bigint as JSON', (t) => {
@@ -15,7 +15,7 @@ test('render a bigint as JSON', (t) => {
const stringify = build(schema)
const output = stringify(1615n)
t.equal(output, '1615')
t.assert.equal(output, '1615')
})
test('render an object with a bigint as JSON', (t) => {
@@ -36,7 +36,7 @@ test('render an object with a bigint as JSON', (t) => {
id: 1615n
})
t.equal(output, '{"id":1615}')
t.assert.equal(output, '{"id":1615}')
})
test('render an array with a bigint as JSON', (t) => {
@@ -53,7 +53,7 @@ test('render an array with a bigint as JSON', (t) => {
const stringify = build(schema)
const output = stringify([1615n])
t.equal(output, '[1615]')
t.assert.equal(output, '[1615]')
})
test('render an object with an additionalProperty of type bigint as JSON', (t) => {
@@ -72,5 +72,5 @@ test('render an object with an additionalProperty of type bigint as JSON', (t) =
num: 1615n
})
t.equal(output, '{"num":1615}')
t.assert.equal(output, '{"num":1615}')
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('Should clean the cache', (t) => {
@@ -11,9 +11,10 @@ test('Should clean the cache', (t) => {
type: 'string'
}
build(schema)
build(schema)
t.pass()
t.assert.doesNotThrow(() => {
build(schema)
build(schema)
})
})
test('Should clean the cache with external schemas', (t) => {
@@ -39,7 +40,8 @@ test('Should clean the cache with external schemas', (t) => {
}
}
build(schema)
build(schema)
t.pass()
t.assert.doesNotThrow(() => {
build(schema)
build(schema)
})
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const validator = require('is-my-json-valid')
const build = require('..')
@@ -20,8 +20,8 @@ test('schema with const string', (t) => {
foo: 'bar'
})
t.equal(output, '{"foo":"bar"}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":"bar"}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('schema with const string and different input', (t) => {
@@ -40,8 +40,8 @@ test('schema with const string and different input', (t) => {
foo: 'baz'
})
t.equal(output, '{"foo":"bar"}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":"bar"}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('schema with const string and different type input', (t) => {
@@ -60,8 +60,8 @@ test('schema with const string and different type input', (t) => {
foo: 1
})
t.equal(output, '{"foo":"bar"}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":"bar"}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('schema with const string and no input', (t) => {
@@ -78,8 +78,8 @@ test('schema with const string and no input', (t) => {
const stringify = build(schema)
const output = stringify({})
t.equal(output, '{}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('schema with const string that contains \'', (t) => {
@@ -98,8 +98,8 @@ test('schema with const string that contains \'', (t) => {
foo: "'bar'"
})
t.equal(output, '{"foo":"\'bar\'"}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":"\'bar\'"}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('schema with const number', (t) => {
@@ -118,8 +118,8 @@ test('schema with const number', (t) => {
foo: 1
})
t.equal(output, '{"foo":1}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":1}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('schema with const number and different input', (t) => {
@@ -138,8 +138,8 @@ test('schema with const number and different input', (t) => {
foo: 2
})
t.equal(output, '{"foo":1}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":1}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('schema with const bool', (t) => {
@@ -158,8 +158,8 @@ test('schema with const bool', (t) => {
foo: true
})
t.equal(output, '{"foo":true}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":true}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('schema with const number', (t) => {
@@ -178,8 +178,8 @@ test('schema with const number', (t) => {
foo: 1
})
t.equal(output, '{"foo":1}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":1}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('schema with const null', (t) => {
@@ -198,8 +198,8 @@ test('schema with const null', (t) => {
foo: null
})
t.equal(output, '{"foo":null}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":null}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('schema with const array', (t) => {
@@ -218,8 +218,8 @@ test('schema with const array', (t) => {
foo: [1, 2, 3]
})
t.equal(output, '{"foo":[1,2,3]}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":[1,2,3]}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('schema with const object', (t) => {
@@ -238,8 +238,8 @@ test('schema with const object', (t) => {
foo: { bar: 'baz' }
})
t.equal(output, '{"foo":{"bar":"baz"}}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":{"bar":"baz"}}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('schema with const and null as type', (t) => {
@@ -258,12 +258,12 @@ test('schema with const and null as type', (t) => {
foo: null
})
t.equal(output, '{"foo":null}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":null}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
const output2 = stringify({ foo: 'baz' })
t.equal(output2, '{"foo":"baz"}')
t.ok(validate(JSON.parse(output2)), 'valid schema')
t.assert.equal(output2, '{"foo":"baz"}')
t.assert.ok(validate(JSON.parse(output2)), 'valid schema')
})
test('schema with const as nullable', (t) => {
@@ -282,14 +282,14 @@ test('schema with const as nullable', (t) => {
foo: null
})
t.equal(output, '{"foo":null}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"foo":null}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
const output2 = stringify({
foo: 'baz'
})
t.equal(output2, '{"foo":"baz"}')
t.ok(validate(JSON.parse(output2)), 'valid schema')
t.assert.equal(output2, '{"foo":"baz"}')
t.assert.ok(validate(JSON.parse(output2)), 'valid schema')
})
test('schema with const and invalid object', (t) => {
@@ -309,6 +309,6 @@ test('schema with const and invalid object', (t) => {
foo: { foo: 'baz' }
})
t.equal(result, '{"foo":{"foo":"bar"}}')
t.ok(validate(JSON.parse(result)), 'valid schema')
t.assert.equal(result, '{"foo":{"foo":"bar"}}')
t.assert.ok(validate(JSON.parse(result)), 'valid schema')
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const validator = require('is-my-json-valid')
const build = require('..')
@@ -19,8 +19,8 @@ test('render a date in a string as JSON', (t) => {
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, JSON.stringify(toStringify))
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, JSON.stringify(toStringify))
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a date in a string when format is date-format as ISOString', (t) => {
@@ -37,8 +37,8 @@ test('render a date in a string when format is date-format as ISOString', (t) =>
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, JSON.stringify(toStringify))
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, JSON.stringify(toStringify))
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a nullable date in a string when format is date-format as ISOString', (t) => {
@@ -56,8 +56,8 @@ test('render a nullable date in a string when format is date-format as ISOString
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, JSON.stringify(toStringify))
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, JSON.stringify(toStringify))
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a date in a string when format is date as YYYY-MM-DD', (t) => {
@@ -74,8 +74,8 @@ test('render a date in a string when format is date as YYYY-MM-DD', (t) => {
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, '"2023-01-21"')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '"2023-01-21"')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a nullable date in a string when format is date as YYYY-MM-DD', (t) => {
@@ -93,8 +93,8 @@ test('render a nullable date in a string when format is date as YYYY-MM-DD', (t)
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, '"2023-01-21"')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '"2023-01-21"')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('verify padding for rendered date in a string when format is date', (t) => {
@@ -111,8 +111,8 @@ test('verify padding for rendered date in a string when format is date', (t) =>
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, '"2020-01-01"')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '"2020-01-01"')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a date in a string when format is time as kk:mm:ss', (t) => {
@@ -130,10 +130,10 @@ test('render a date in a string when format is time as kk:mm:ss', (t) => {
const output = stringify(toStringify)
validate(JSON.parse(output))
t.equal(validate.errors, null)
t.assert.equal(validate.errors, null)
t.equal(output, '"01:03:25"')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '"01:03:25"')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a nullable date in a string when format is time as kk:mm:ss', (t) => {
@@ -152,10 +152,10 @@ test('render a nullable date in a string when format is time as kk:mm:ss', (t) =
const output = stringify(toStringify)
validate(JSON.parse(output))
t.equal(validate.errors, null)
t.assert.equal(validate.errors, null)
t.equal(output, '"01:03:25"')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '"01:03:25"')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a midnight time', (t) => {
@@ -173,10 +173,10 @@ test('render a midnight time', (t) => {
const output = stringify(midnight)
validate(JSON.parse(output))
t.equal(validate.errors, null)
t.assert.equal(validate.errors, null)
t.equal(output, '"00:03:25"')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '"00:03:25"')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('verify padding for rendered date in a string when format is time', (t) => {
@@ -194,10 +194,10 @@ test('verify padding for rendered date in a string when format is time', (t) =>
const output = stringify(toStringify)
validate(JSON.parse(output))
t.equal(validate.errors, null)
t.assert.equal(validate.errors, null)
t.equal(output, '"01:01:01"')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '"01:01:01"')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a nested object in a string when type is date-format as ISOString', (t) => {
@@ -219,11 +219,11 @@ test('render a nested object in a string when type is date-format as ISOString',
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, JSON.stringify(toStringify))
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, JSON.stringify(toStringify))
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('serializing null value', t => {
test('serializing null value', async t => {
const input = { updatedAt: null }
function createSchema (properties) {
@@ -247,10 +247,10 @@ test('serializing null value', t => {
t.plan(3)
t.test('type::string', t => {
await t.test('type::string', async t => {
t.plan(3)
t.test('format::date-time', t => {
await t.test('format::date-time', t => {
t.plan(2)
const prop = {
@@ -265,11 +265,11 @@ test('serializing null value', t => {
validate
} = serialize(createSchema(prop), input)
t.equal(output, '{"updatedAt":""}')
t.notOk(validate(JSON.parse(output)), 'an empty string is not a date-time format')
t.assert.equal(output, '{"updatedAt":""}')
t.assert.equal(validate(JSON.parse(output)), false, 'an empty string is not a date-time format')
})
t.test('format::date', t => {
await t.test('format::date', t => {
t.plan(2)
const prop = {
@@ -284,11 +284,11 @@ test('serializing null value', t => {
validate
} = serialize(createSchema(prop), input)
t.equal(output, '{"updatedAt":""}')
t.notOk(validate(JSON.parse(output)), 'an empty string is not a date format')
t.assert.equal(output, '{"updatedAt":""}')
t.assert.equal(validate(JSON.parse(output)), false, 'an empty string is not a date format')
})
t.test('format::time', t => {
await t.test('format::time', t => {
t.plan(2)
const prop = {
@@ -303,15 +303,15 @@ test('serializing null value', t => {
validate
} = serialize(createSchema(prop), input)
t.equal(output, '{"updatedAt":""}')
t.notOk(validate(JSON.parse(output)), 'an empty string is not a time format')
t.assert.equal(output, '{"updatedAt":""}')
t.assert.equal(validate(JSON.parse(output)), false, 'an empty string is not a time format')
})
})
t.test('type::array', t => {
await t.test('type::array', async t => {
t.plan(6)
t.test('format::date-time', t => {
await t.test('format::date-time', t => {
t.plan(2)
const prop = {
@@ -326,11 +326,11 @@ test('serializing null value', t => {
validate
} = serialize(createSchema(prop), input)
t.equal(output, '{"updatedAt":""}')
t.notOk(validate(JSON.parse(output)), 'an empty string is not a date-time format')
t.assert.equal(output, '{"updatedAt":""}')
t.assert.equal(validate(JSON.parse(output)), false, 'an empty string is not a date-time format')
})
t.test('format::date', t => {
await t.test('format::date', t => {
t.plan(2)
const prop = {
@@ -345,11 +345,11 @@ test('serializing null value', t => {
validate
} = serialize(createSchema(prop), input)
t.equal(output, '{"updatedAt":""}')
t.notOk(validate(JSON.parse(output)), 'an empty string is not a date format')
t.assert.equal(output, '{"updatedAt":""}')
t.assert.equal(validate(JSON.parse(output)), false, 'an empty string is not a date format')
})
t.test('format::date', t => {
await t.test('format::date', t => {
t.plan(2)
const prop = {
@@ -364,11 +364,11 @@ test('serializing null value', t => {
validate
} = serialize(createSchema(prop), input)
t.equal(output, '{"updatedAt":""}')
t.notOk(validate(JSON.parse(output)), 'an empty string is not a date format')
t.assert.equal(output, '{"updatedAt":""}')
t.assert.equal(validate(JSON.parse(output)), false, 'an empty string is not a date format')
})
t.test('format::time, Date object', t => {
await t.test('format::time, Date object', t => {
t.plan(1)
const schema = {
@@ -389,10 +389,10 @@ test('serializing null value', t => {
const input = { updatedAt: date }
const { output } = serialize(schema, input)
t.equal(output, JSON.stringify({ updatedAt: '01:03:25' }))
t.assert.equal(output, JSON.stringify({ updatedAt: '01:03:25' }))
})
t.test('format::time, Date object', t => {
await t.test('format::time, Date object', t => {
t.plan(1)
const schema = {
@@ -407,10 +407,10 @@ test('serializing null value', t => {
const date = new Date(1674263005800)
const { output } = serialize(schema, date)
t.equal(output, '"01:03:25"')
t.assert.equal(output, '"01:03:25"')
})
t.test('format::time, Date object', t => {
await t.test('format::time, Date object', t => {
t.plan(1)
const schema = {
@@ -424,14 +424,14 @@ test('serializing null value', t => {
const { output } = serialize(schema, 42)
t.equal(output, JSON.stringify(42))
t.assert.equal(output, JSON.stringify(42))
})
})
t.test('type::array::nullable', t => {
await t.test('type::array::nullable', async t => {
t.plan(3)
t.test('format::date-time', t => {
await t.test('format::date-time', t => {
t.plan(2)
const prop = {
@@ -446,11 +446,11 @@ test('serializing null value', t => {
validate
} = serialize(createSchema(prop), input)
t.equal(output, '{"updatedAt":null}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"updatedAt":null}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
t.test('format::date', t => {
await t.test('format::date', t => {
t.plan(2)
const prop = {
@@ -465,11 +465,11 @@ test('serializing null value', t => {
validate
} = serialize(createSchema(prop), input)
t.equal(output, '{"updatedAt":null}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"updatedAt":null}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
t.test('format::time', t => {
await t.test('format::time', t => {
t.plan(2)
const prop = {
@@ -484,8 +484,8 @@ test('serializing null value', t => {
validate
} = serialize(createSchema(prop), input)
t.equal(output, '{"updatedAt":null}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"updatedAt":null}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
})
})
@@ -503,7 +503,7 @@ test('Validate Date object as string type', (t) => {
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, JSON.stringify(toStringify))
t.assert.equal(output, JSON.stringify(toStringify))
})
test('nullable date', (t) => {
@@ -524,7 +524,7 @@ test('nullable date', (t) => {
const data = new Date(1674263005800)
const result = stringify(data)
t.same(result, '"2023-01-21"')
t.assert.equal(result, '"2023-01-21"')
})
test('non-date format should not affect data serialization (issue #491)', (t) => {
@@ -543,7 +543,7 @@ test('non-date format should not affect data serialization (issue #491)', (t) =>
const stringify = build(schema)
const data = { hello: 123n }
t.equal(stringify(data), '{"hello":"123"}')
t.assert.equal(stringify(data), '{"hello":"123"}')
})
test('should serialize also an invalid string value, even if it is not a valid date', (t) => {
@@ -561,8 +561,8 @@ test('should serialize also an invalid string value, even if it is not a valid d
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, JSON.stringify(toStringify))
t.not(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, JSON.stringify(toStringify))
t.assert.equal(validate(JSON.parse(output)), false, 'valid schema')
})
test('should throw an error if value can not be transformed to date-time', (t) => {
@@ -579,8 +579,8 @@ test('should throw an error if value can not be transformed to date-time', (t) =
const validate = validator(schema)
const stringify = build(schema)
t.throws(() => stringify(toStringify), new Error('The value "true" cannot be converted to a date-time.'))
t.not(validate(toStringify))
t.assert.throws(() => stringify(toStringify), new Error('The value "true" cannot be converted to a date-time.'))
t.assert.equal(validate(toStringify), false)
})
test('should throw an error if value can not be transformed to date', (t) => {
@@ -597,8 +597,8 @@ test('should throw an error if value can not be transformed to date', (t) => {
const validate = validator(schema)
const stringify = build(schema)
t.throws(() => stringify(toStringify), new Error('The value "true" cannot be converted to a date.'))
t.not(validate(toStringify))
t.assert.throws(() => stringify(toStringify), new Error('The value "true" cannot be converted to a date.'))
t.assert.equal(validate(toStringify), false)
})
test('should throw an error if value can not be transformed to time', (t) => {
@@ -615,8 +615,8 @@ test('should throw an error if value can not be transformed to time', (t) => {
const validate = validator(schema)
const stringify = build(schema)
t.throws(() => stringify(toStringify), new Error('The value "true" cannot be converted to a time.'))
t.not(validate(toStringify))
t.assert.throws(() => stringify(toStringify), new Error('The value "true" cannot be converted to a time.'))
t.assert.equal(validate(toStringify), false)
})
test('should serialize also an invalid string value, even if it is not a valid time', (t) => {
@@ -634,6 +634,6 @@ test('should serialize also an invalid string value, even if it is not a valid t
const stringify = build(schema)
const output = stringify(toStringify)
t.equal(output, JSON.stringify(toStringify))
t.not(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, JSON.stringify(toStringify))
t.assert.equal(validate(JSON.parse(output)), false, 'valid schema')
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const fjs = require('..')
const Ajv = require('ajv').default
@@ -24,11 +24,11 @@ test('activate debug mode', t => {
t.plan(5)
const debugMode = build({ debugMode: true })
t.type(debugMode, 'object')
t.ok(debugMode.ajv instanceof Ajv)
t.ok(debugMode.validator instanceof Validator)
t.ok(debugMode.serializer instanceof Serializer)
t.type(debugMode.code, 'string')
t.assert.ok(typeof debugMode === 'object')
t.assert.ok(debugMode.ajv instanceof Ajv)
t.assert.ok(debugMode.validator instanceof Validator)
t.assert.ok(debugMode.serializer instanceof Serializer)
t.assert.ok(typeof debugMode.code === 'string')
})
test('activate debug mode truthy', t => {
@@ -36,26 +36,26 @@ test('activate debug mode truthy', t => {
const debugMode = build({ debugMode: 'yes' })
t.type(debugMode, 'object')
t.type(debugMode.code, 'string')
t.ok(debugMode.ajv instanceof Ajv)
t.ok(debugMode.validator instanceof Validator)
t.ok(debugMode.serializer instanceof Serializer)
t.assert.ok(typeof debugMode === 'object')
t.assert.ok(typeof debugMode.code === 'string')
t.assert.ok(debugMode.ajv instanceof Ajv)
t.assert.ok(debugMode.validator instanceof Validator)
t.assert.ok(debugMode.serializer instanceof Serializer)
})
test('to string auto-consistent', t => {
t.plan(6)
const debugMode = build({ debugMode: 1 })
t.type(debugMode, 'object')
t.type(debugMode.code, 'string')
t.ok(debugMode.ajv instanceof Ajv)
t.ok(debugMode.serializer instanceof Serializer)
t.ok(debugMode.validator instanceof Validator)
t.assert.ok(typeof debugMode === 'object')
t.assert.ok(typeof debugMode.code === 'string')
t.assert.ok(debugMode.ajv instanceof Ajv)
t.assert.ok(debugMode.serializer instanceof Serializer)
t.assert.ok(debugMode.validator instanceof Validator)
const compiled = fjs.restore(debugMode)
const tobe = JSON.stringify({ firstName: 'Foo' })
t.same(compiled({ firstName: 'Foo', surname: 'bar' }), tobe, 'surname evicted')
t.assert.equal(compiled({ firstName: 'Foo', surname: 'bar' }), tobe, 'surname evicted')
})
test('to string auto-consistent with ajv', t => {
@@ -75,15 +75,15 @@ test('to string auto-consistent with ajv', t => {
}
}, { debugMode: 1 })
t.type(debugMode, 'object')
t.type(debugMode.code, 'string')
t.ok(debugMode.ajv instanceof Ajv)
t.ok(debugMode.validator instanceof Validator)
t.ok(debugMode.serializer instanceof Serializer)
t.assert.ok(typeof debugMode === 'object')
t.assert.ok(typeof debugMode.code === 'string')
t.assert.ok(debugMode.ajv instanceof Ajv)
t.assert.ok(debugMode.validator instanceof Validator)
t.assert.ok(debugMode.serializer instanceof Serializer)
const compiled = fjs.restore(debugMode)
const tobe = JSON.stringify({ str: 'Foo' })
t.same(compiled({ str: 'Foo', void: 'me' }), tobe)
t.assert.equal(compiled({ str: 'Foo', void: 'me' }), tobe)
})
test('to string auto-consistent with ajv-formats', t => {
@@ -104,12 +104,12 @@ test('to string auto-consistent with ajv-formats', t => {
}
}, { debugMode: 1 })
t.type(debugMode, 'object')
t.assert.ok(typeof debugMode === 'object')
const compiled = fjs.restore(debugMode)
const tobe = JSON.stringify({ str: 'foo@bar.com' })
t.same(compiled({ str: 'foo@bar.com' }), tobe)
t.throws(() => compiled({ str: 'foo' }))
t.assert.equal(compiled({ str: 'foo@bar.com' }), tobe)
t.assert.throws(() => compiled({ str: 'foo' }))
})
test('debug should restore the same serializer instance', t => {
@@ -117,5 +117,5 @@ test('debug should restore the same serializer instance', t => {
const debugMode = fjs({ type: 'integer' }, { debugMode: 1, rounding: 'ceil' })
const compiled = fjs.restore(debugMode)
t.same(compiled(3.95), 4)
t.assert.equal(compiled(3.95), 4)
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
function buildTest (schema, toStringify, expected) {
@@ -11,7 +11,7 @@ function buildTest (schema, toStringify, expected) {
const output = stringify(toStringify)
t.equal(output, JSON.stringify(expected))
t.assert.equal(output, JSON.stringify(expected))
})
}

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('use enum without type', (t) => {
@@ -17,7 +17,7 @@ test('use enum without type', (t) => {
})
const obj = { order: 'asc' }
t.equal('{"order":"asc"}', stringify(obj))
t.assert.equal('{"order":"asc"}', stringify(obj))
})
test('use enum without type', (t) => {
@@ -33,5 +33,5 @@ test('use enum without type', (t) => {
})
const obj = { order: 'asc' }
t.equal('{"order":"asc"}', stringify(obj))
t.assert.equal('{"order":"asc"}', stringify(obj))
})

View File

@@ -1,24 +1,25 @@
'use strict'
const t = require('tap')
const { test } = require('node:test')
const fjs = require('..')
const schema = {
type: 'object',
properties: {
fullName: { type: 'string' },
phone: { type: 'number' }
test('fix-604', t => {
const schema = {
type: 'object',
properties: {
fullName: { type: 'string' },
phone: { type: 'number' }
}
}
}
const input = {
fullName: 'Jone',
phone: 'phone'
}
const input = {
fullName: 'Jone',
phone: 'phone'
}
const render = fjs(schema)
const render = fjs(schema)
try {
render(input)
} catch (err) {
t.equal(err.message, 'The value "phone" cannot be converted to a number.')
}
t.assert.throws(() => {
render(input)
}, { message: 'The value "phone" cannot be converted to a number.' })
})

View File

@@ -1,6 +1,6 @@
'use strict'
const t = require('tap')
const { test } = require('node:test')
const build = require('..')
process.env.TZ = 'UTC'
@@ -251,7 +251,7 @@ const deepFoobarOutput = JSON.stringify({
})
const noElseGreetingOutput = JSON.stringify({})
t.test('if-then-else', t => {
test('if-then-else', async t => {
const tests = [
{
name: 'foobar',
@@ -315,20 +315,18 @@ t.test('if-then-else', t => {
}
]
tests.forEach(test => {
t.test(test.name + ' - normal', t => {
for (const { name, schema, input, expected } of tests) {
await t.test(name + ' - normal', async t => {
t.plan(1)
const stringify = build(JSON.parse(JSON.stringify(test.schema)), { ajv: { strictTypes: false } })
const serialized = stringify(test.input)
t.equal(serialized, test.expected)
const stringify = build(JSON.parse(JSON.stringify(schema)), { ajv: { strictTypes: false } })
const serialized = stringify(input)
t.assert.equal(serialized, expected)
})
})
t.end()
}
})
t.test('nested if/then', t => {
test('nested if/then', t => {
t.plan(2)
const schema = {
@@ -352,18 +350,18 @@ t.test('nested if/then', t => {
const stringify = build(schema)
t.equal(
t.assert.equal(
stringify({ a: 'A', foo: 'foo', bar: 'bar' }),
JSON.stringify({ a: 'A', bar: 'bar' })
)
t.equal(
t.assert.equal(
stringify({ a: 'A', foo: 'foo', bar: 'bar', foo1: 'foo1', bar1: 'bar1' }),
JSON.stringify({ a: 'A', bar: 'bar', bar1: 'bar1' })
)
})
t.test('if/else with string format', (t) => {
test('if/else with string format', (t) => {
t.plan(2)
const schema = {
@@ -376,11 +374,11 @@ t.test('if/else with string format', (t) => {
const date = new Date(1674263005800)
t.equal(stringify(date), '"2023-01-21"')
t.equal(stringify('Invalid'), '"Invalid"')
t.assert.equal(stringify(date), '"2023-01-21"')
t.assert.equal(stringify('Invalid'), '"Invalid"')
})
t.test('if/else with const integers', (t) => {
test('if/else with const integers', (t) => {
t.plan(2)
const schema = {
@@ -392,11 +390,11 @@ t.test('if/else with const integers', (t) => {
const stringify = build(schema)
t.equal(stringify(100.32), '66')
t.equal(stringify(10.12), '33')
t.assert.equal(stringify(100.32), '66')
t.assert.equal(stringify(10.12), '33')
})
t.test('if/else with array', (t) => {
test('if/else with array', (t) => {
t.plan(2)
const schema = {
@@ -408,11 +406,11 @@ t.test('if/else with array', (t) => {
const stringify = build(schema)
t.equal(stringify(['1']), JSON.stringify(['1']))
t.equal(stringify(['1', '2']), JSON.stringify([1, 2]))
t.assert.equal(stringify(['1']), JSON.stringify(['1']))
t.assert.equal(stringify(['1', '2']), JSON.stringify([1, 2]))
})
t.test('external recursive if/then/else', (t) => {
test('external recursive if/then/else', (t) => {
t.plan(1)
const externalSchema = {
@@ -466,5 +464,5 @@ t.test('external recursive if/then/else', (t) => {
}
}
const stringify = build(schema, { schema: { externalSchema } })
t.equal(stringify(data), '{"a":{"base":"a","bar":"42"},"b":{"base":"b","baz":"43"}}')
t.assert.equal(stringify(data), '{"a":{"base":"a","bar":"42"},"b":{"base":"b","baz":"43"}}')
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const validator = require('is-my-json-valid')
const build = require('..')
@@ -12,9 +12,9 @@ function buildTest (schema, toStringify) {
const stringify = build(schema)
const output = stringify(toStringify)
t.same(JSON.parse(output), toStringify)
t.equal(output, JSON.stringify(toStringify))
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.deepStrictEqual(JSON.parse(output), toStringify)
t.assert.equal(output, JSON.stringify(toStringify))
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
}

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('Finite numbers', t => {
@@ -17,7 +17,7 @@ test('Finite numbers', t => {
const stringify = build(schema)
values.forEach(v => t.equal(stringify(v), JSON.stringify(v)))
values.forEach(v => t.assert.equal(stringify(v), JSON.stringify(v)))
})
test('Infinite integers', t => {
@@ -35,7 +35,7 @@ test('Infinite integers', t => {
try {
stringify(v)
} catch (err) {
t.equal(err.message, `The value "${v}" cannot be converted to an integer.`)
t.assert.equal(err.message, `The value "${v}" cannot be converted to an integer.`)
}
})
})
@@ -51,5 +51,5 @@ test('Infinite numbers', t => {
const stringify = build(schema)
values.forEach(v => t.equal(stringify(v), JSON.stringify(v)))
values.forEach(v => t.assert.equal(stringify(v), JSON.stringify(v)))
})

View File

@@ -1,7 +1,7 @@
'use strict'
const t = require('tap')
const test = t.test
const { test } = require('node:test')
const validator = require('is-my-json-valid')
const build = require('..')
const ROUNDING_TYPES = ['ceil', 'floor', 'round']
@@ -18,8 +18,8 @@ test('render an integer as JSON', (t) => {
const stringify = build(schema)
const output = stringify(1615)
t.equal(output, '1615')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '1615')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a float as an integer', (t) => {
@@ -30,8 +30,8 @@ test('render a float as an integer', (t) => {
type: 'integer'
}, { rounding: 'foobar' })
} catch (error) {
t.ok(error)
t.equal(error.message, 'Unsupported integer rounding method foobar')
t.assert.ok(error)
t.assert.equal(error.message, 'Unsupported integer rounding method foobar')
}
})
@@ -44,7 +44,7 @@ test('throws on NaN', (t) => {
}
const stringify = build(schema)
t.throws(() => stringify(NaN), new Error('The value "NaN" cannot be converted to an integer.'))
t.assert.throws(() => stringify(NaN), new Error('The value "NaN" cannot be converted to an integer.'))
})
test('render a float as an integer', (t) => {
@@ -87,8 +87,8 @@ test('render a float as an integer', (t) => {
const stringify = build(schema, { rounding })
const str = stringify(input)
t.equal(str, output)
t.ok(validate(JSON.parse(str)), 'valid schema')
t.assert.equal(str, output)
t.assert.ok(validate(JSON.parse(str)), 'valid schema')
}
})
@@ -111,8 +111,8 @@ test('render an object with an integer as JSON', (t) => {
id: 1615
})
t.equal(output, '{"id":1615}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"id":1615}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render an array with an integer as JSON', (t) => {
@@ -130,8 +130,8 @@ test('render an array with an integer as JSON', (t) => {
const stringify = build(schema)
const output = stringify([1615])
t.equal(output, '[1615]')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '[1615]')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render an object with an additionalProperty of type integer as JSON', (t) => {
@@ -151,8 +151,8 @@ test('render an object with an additionalProperty of type integer as JSON', (t)
num: 1615
})
t.equal(output, '{"num":1615}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"num":1615}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('should round integer object parameter', t => {
@@ -163,8 +163,8 @@ test('should round integer object parameter', t => {
const stringify = build(schema, { rounding: 'ceil' })
const output = stringify({ magic: 4.2 })
t.equal(output, '{"magic":5}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{"magic":5}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('should not stringify a property if it does not exist', t => {
@@ -175,8 +175,8 @@ test('should not stringify a property if it does not exist', t => {
const stringify = build(schema)
const output = stringify({})
t.equal(output, '{}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
ROUNDING_TYPES.forEach((rounding) => {
@@ -188,7 +188,7 @@ ROUNDING_TYPES.forEach((rounding) => {
const stringify = build(schema, { rounding })
const output = stringify({})
t.equal(output, '{}')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '{}')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
})

View File

@@ -1,12 +1,12 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
// Covers issue #139
test('Should throw on invalid schema', t => {
t.plan(2)
try {
t.plan(1)
t.assert.throws(() => {
build({}, {
schema: {
invalid: {
@@ -14,9 +14,5 @@ test('Should throw on invalid schema', t => {
}
}
})
t.fail('should be an invalid schema')
} catch (err) {
t.match(err.message, /^"invalid" schema is invalid:.*/, 'Schema contains invalid key')
t.ok(err)
}
}, { message: /^"invalid" schema is invalid:.*/ })
})

View File

@@ -1,6 +1,6 @@
'use strict'
const { test } = require('tap')
const { test } = require('node:test')
const build = require('..')
test('should validate anyOf after allOf merge', (t) => {
@@ -51,7 +51,7 @@ test('should validate anyOf after allOf merge', (t) => {
const stringify = build(schema)
t.equal(
t.assert.equal(
stringify({ name: 'foo', union: 'a8f1cc50-5530-5c62-9109-5ba9589a6ae1' }),
'{"name":"foo","union":"a8f1cc50-5530-5c62-9109-5ba9589a6ae1"}')
})

View File

@@ -0,0 +1,107 @@
'use strict'
const { test } = require('node:test')
const build = require('..')
test('serialize string with newlines - issue #793', (t) => {
t.plan(2)
const schema = {
type: 'object',
properties: {
message: {
type: 'string'
}
}
}
const input = {
message: `This is a string
with multiple
newlines in it
Foo`
}
const stringify = build(schema)
const output = stringify(input)
// The output should be valid JSON
t.assert.doesNotThrow(() => {
JSON.parse(output)
}, 'JSON output should be parseable')
// The parsed output should match the input
const parsed = JSON.parse(output)
t.assert.equal(parsed.message, input.message)
})
test('serialize string with various newline characters - issue #793', (t) => {
t.plan(4)
const schema = {
type: 'string'
}
const stringify = build(schema)
// Test \n (line feed)
const inputLF = 'line1\nline2'
const outputLF = stringify(inputLF)
t.assert.equal(JSON.parse(outputLF), inputLF)
// Test \r (carriage return)
const inputCR = 'line1\rline2'
const outputCR = stringify(inputCR)
t.assert.equal(JSON.parse(outputCR), inputCR)
// Test \r\n (CRLF)
const inputCRLF = 'line1\r\nline2'
const outputCRLF = stringify(inputCRLF)
t.assert.equal(JSON.parse(outputCRLF), inputCRLF)
// Test mixed newlines
const inputMixed = 'line1\nline2\rline3\r\nline4'
const outputMixed = stringify(inputMixed)
t.assert.equal(JSON.parse(outputMixed), inputMixed)
})
test('serialize object with newlines in multiple properties - issue #793', (t) => {
t.plan(2)
const schema = {
type: 'object',
properties: {
message: {
type: 'string'
},
description: {
type: 'string'
},
timestamp: {
type: 'string'
}
}
}
const input = {
message: `This is a string
with multiple
newlines in it
Foo`,
description: 'This JSON response contains a field with newline characters',
timestamp: new Date().toISOString()
}
const stringify = build(schema)
const output = stringify(input)
// The output should be valid JSON
t.assert.doesNotThrow(() => {
JSON.parse(output)
}, 'JSON output should be parseable')
// The parsed output should match the input
const parsed = JSON.parse(output)
t.assert.deepEqual(parsed, input)
})

View File

@@ -0,0 +1,177 @@
'use strict'
const { test } = require('node:test')
const build = require('..')
test('serialize string with quotes - issue #794', (t) => {
t.plan(2)
const schema = {
type: 'object',
properties: {
message: {
type: 'string'
}
}
}
const input = {
message: 'Error: Property "name" is required'
}
const stringify = build(schema)
const output = stringify(input)
// The output should be valid JSON
t.assert.doesNotThrow(() => {
JSON.parse(output)
}, 'JSON output should be parseable')
// The parsed output should match the input
const parsed = JSON.parse(output)
t.assert.equal(parsed.message, input.message)
})
test('serialize string with various quote types - issue #794', (t) => {
t.plan(6)
const schema = {
type: 'string'
}
const stringify = build(schema)
// Test double quotes
const inputDoubleQuotes = 'Property "name" is required'
const outputDoubleQuotes = stringify(inputDoubleQuotes)
t.assert.doesNotThrow(() => JSON.parse(outputDoubleQuotes))
t.assert.equal(JSON.parse(outputDoubleQuotes), inputDoubleQuotes)
// Test single quotes (should be fine but test for completeness)
const inputSingleQuotes = "Property 'name' is required"
const outputSingleQuotes = stringify(inputSingleQuotes)
t.assert.doesNotThrow(() => JSON.parse(outputSingleQuotes))
t.assert.equal(JSON.parse(outputSingleQuotes), inputSingleQuotes)
// Test mixed quotes
const inputMixedQuotes = 'Error: "Property \'name\' is required"'
const outputMixedQuotes = stringify(inputMixedQuotes)
t.assert.doesNotThrow(() => JSON.parse(outputMixedQuotes))
t.assert.equal(JSON.parse(outputMixedQuotes), inputMixedQuotes)
})
test('serialize error-like object with quotes in message - issue #794', (t) => {
t.plan(2)
const schema = {
type: 'object',
properties: {
error: {
type: 'object',
properties: {
message: {
type: 'string'
},
code: {
type: 'string'
}
}
}
}
}
const input = {
error: {
message: 'Validation failed: Property "email" must be a valid email address',
code: 'VALIDATION_ERROR'
}
}
const stringify = build(schema)
const output = stringify(input)
// The output should be valid JSON
t.assert.doesNotThrow(() => {
JSON.parse(output)
}, 'JSON output should be parseable')
// The parsed output should match the input
const parsed = JSON.parse(output)
t.assert.deepEqual(parsed, input)
})
test('serialize validation errors array with quotes - issue #794', (t) => {
t.plan(2)
const schema = {
type: 'object',
properties: {
errors: {
type: 'array',
items: {
type: 'object',
properties: {
message: {
type: 'string'
},
field: {
type: 'string'
}
}
}
}
}
}
const input = {
errors: [
{
message: 'Property "name" is required',
field: 'name'
},
{
message: 'Property "email" must be a valid email address',
field: 'email'
},
{
message: 'Value must be between "1" and "100"',
field: 'age'
}
]
}
const stringify = build(schema)
const output = stringify(input)
// The output should be valid JSON
t.assert.doesNotThrow(() => {
JSON.parse(output)
}, 'JSON output should be parseable')
// The parsed output should match the input
const parsed = JSON.parse(output)
t.assert.deepEqual(parsed, input)
})
test('serialize string with backslashes and quotes - issue #794', (t) => {
t.plan(4)
const schema = {
type: 'string'
}
const stringify = build(schema)
// Test backslashes
const inputBackslash = 'Path: C:\\Users\\test\\file.json'
const outputBackslash = stringify(inputBackslash)
t.assert.doesNotThrow(() => JSON.parse(outputBackslash))
t.assert.equal(JSON.parse(outputBackslash), inputBackslash)
// Test combination of backslashes and quotes
const inputMixed = 'Error: Could not find file "C:\\Users\\test\\config.json"'
const outputMixed = stringify(inputMixed)
t.assert.doesNotThrow(() => JSON.parse(outputMixed))
t.assert.equal(JSON.parse(outputMixed), inputMixed)
})

View File

@@ -0,0 +1,57 @@
'use strict'
const { test } = require('node:test')
const build = require('..')
test('additionalProperties: false', (t) => {
t.plan(1)
const stringify = build({
title: 'additionalProperties',
type: 'object',
properties: {
foo: {
type: 'string'
}
},
additionalProperties: false
})
const obj = { foo: 'a', bar: 'b', baz: 'c' }
t.assert.equal(stringify(obj), '{"foo":"a"}')
})
test('additionalProperties: {}', (t) => {
t.plan(1)
const stringify = build({
title: 'additionalProperties',
type: 'object',
properties: {
foo: {
type: 'string'
}
},
additionalProperties: {}
})
const obj = { foo: 'a', bar: 'b', baz: 'c' }
t.assert.equal(stringify(obj), '{"foo":"a","bar":"b","baz":"c"}')
})
test('additionalProperties: {type: string}', (t) => {
t.plan(1)
const stringify = build({
title: 'additionalProperties',
type: 'object',
properties: {
foo: {
type: 'string'
}
},
additionalProperties: {
type: 'string'
}
})
const obj = { foo: 'a', bar: 'b', baz: 'c' }
t.assert.equal(stringify(obj), '{"foo":"a","bar":"b","baz":"c"}')
})

View File

@@ -5,6 +5,6 @@ It contains a set of JSON objects that implementors of JSON Schema validation li
# How to add another test case?
1. Navigate to [JSON-Schema-Test-Suite](https://github.com/json-schema-org/JSON-Schema-Test-Suite/tree/master/tests)
1. Navigate to [JSON-Schema-Test-Suite](https://github.com/json-schema-org/JSON-Schema-Test-Suite/tree/main/tests)
2. Choose a draft `draft4`, `draft6` or `draft7`
3. Copy & paste the `test-case.json` to the project and add a test like in the `draft4.test.js`

View File

@@ -1,12 +1,12 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const { counTests, runTests } = require('./util')
const requiredTestSuite = require('./draft4/required.json')
test('required', (t) => {
test('required', async (t) => {
const skippedTests = ['ignores arrays', 'ignores strings', 'ignores other non-objects']
t.plan(counTests(requiredTestSuite, skippedTests))
runTests(t, requiredTestSuite, skippedTests)
await runTests(t, requiredTestSuite, skippedTests)
})

View File

@@ -1,12 +1,12 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const { counTests, runTests } = require('./util')
const requiredTestSuite = require('./draft6/required.json')
test('required', (t) => {
test('required', async (t) => {
const skippedTests = ['ignores arrays', 'ignores strings', 'ignores other non-objects']
t.plan(counTests(requiredTestSuite, skippedTests))
runTests(t, requiredTestSuite, skippedTests)
await runTests(t, requiredTestSuite, skippedTests)
})

View File

@@ -1,12 +1,12 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const { counTests, runTests } = require('./util')
const requiredTestSuite = require('./draft7/required.json')
test('required', (t) => {
test('required', async (t) => {
const skippedTests = ['ignores arrays', 'ignores strings', 'ignores other non-objects']
t.plan(counTests(requiredTestSuite, skippedTests))
runTests(t, requiredTestSuite, skippedTests)
await runTests(t, requiredTestSuite, skippedTests)
})

View File

@@ -2,25 +2,22 @@
const build = require('../..')
function runTests (t, testsuite, skippedTests) {
async function runTests (t, testsuite, skippedTests) {
for (const scenario of testsuite) {
const stringify = build(scenario.schema)
for (const test of scenario.tests) {
if (skippedTests.indexOf(test.description) !== -1) {
t.comment('skip %s', test.description)
console.log(`skip ${test.description}`)
continue
}
t.test(test.description, (t) => {
await t.test(test.description, (t) => {
t.plan(1)
try {
const output = stringify(test.data)
t.equal(output, JSON.stringify(test.data), 'compare payloads')
t.assert.equal(output, JSON.stringify(test.data), 'compare payloads')
} catch (err) {
if (test.valid === false) {
t.pass('payload is invalid')
} else {
t.fail('payload should be valid: ' + err.message)
}
t.assert.ok(test.valid === false, 'payload should be valid: ' + err.message)
}
})
}

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('missing values', (t) => {
@@ -22,9 +22,9 @@ test('missing values', (t) => {
}
})
t.equal('{"val":"value"}', stringify({ val: 'value' }))
t.equal('{"str":"string","val":"value"}', stringify({ str: 'string', val: 'value' }))
t.equal('{"str":"string","num":42,"val":"value"}', stringify({ str: 'string', num: 42, val: 'value' }))
t.assert.equal('{"val":"value"}', stringify({ val: 'value' }))
t.assert.equal('{"str":"string","val":"value"}', stringify({ str: 'string', val: 'value' }))
t.assert.equal('{"str":"string","num":42,"val":"value"}', stringify({ str: 'string', num: 42, val: 'value' }))
})
test('handle null when value should be string', (t) => {
@@ -39,7 +39,7 @@ test('handle null when value should be string', (t) => {
}
})
t.equal('{"str":""}', stringify({ str: null }))
t.assert.equal('{"str":""}', stringify({ str: null }))
})
test('handle null when value should be integer', (t) => {
@@ -54,7 +54,7 @@ test('handle null when value should be integer', (t) => {
}
})
t.equal('{"int":0}', stringify({ int: null }))
t.assert.equal('{"int":0}', stringify({ int: null }))
})
test('handle null when value should be number', (t) => {
@@ -69,7 +69,7 @@ test('handle null when value should be number', (t) => {
}
})
t.equal('{"num":0}', stringify({ num: null }))
t.assert.equal('{"num":0}', stringify({ num: null }))
})
test('handle null when value should be boolean', (t) => {
@@ -84,5 +84,5 @@ test('handle null when value should be boolean', (t) => {
}
})
t.equal('{"bool":false}', stringify({ bool: null }))
t.assert.equal('{"bool":false}', stringify({ bool: null }))
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('should throw a TypeError with the path to the key of the invalid value', (t) => {
@@ -15,5 +15,5 @@ test('should throw a TypeError with the path to the key of the invalid value', (
}
const stringify = build(schema)
t.throws(() => stringify({ num: { bla: 123 } }), new TypeError('The value of \'#/properties/num\' does not match schema definition.'))
t.assert.throws(() => stringify({ num: { bla: 123 } }), new TypeError('The value of \'#/properties/num\' does not match schema definition.'))
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('nested objects with same properties', (t) => {
@@ -28,7 +28,7 @@ test('nested objects with same properties', (t) => {
numberProperty: 42
}
})
t.equal(value, '{"stringProperty":"string1","objectProperty":{"stringProperty":"string2","numberProperty":42}}')
t.assert.equal(value, '{"stringProperty":"string1","objectProperty":{"stringProperty":"string2","numberProperty":42}}')
})
test('names collision', (t) => {
@@ -59,5 +59,5 @@ test('names collision', (t) => {
tes: { b: 'b', t: {} }
}
t.equal(stringify(data), JSON.stringify(data))
t.assert.equal(stringify(data), JSON.stringify(data))
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
@@ -113,7 +113,7 @@ Object.keys(testSet).forEach(key => {
const stringifier = build(schema, extraOptions)
const result = stringifier(data)
t.same(JSON.parse(result), expected)
t.assert.deepStrictEqual(JSON.parse(result), expected)
})
})
@@ -129,8 +129,8 @@ test('handle nullable number correctly', (t) => {
const data = null
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.equal(JSON.parse(result), data)
})
test('handle nullable integer correctly', (t) => {
@@ -145,8 +145,8 @@ test('handle nullable integer correctly', (t) => {
const data = null
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.equal(JSON.parse(result), data)
})
test('handle nullable boolean correctly', (t) => {
@@ -161,8 +161,8 @@ test('handle nullable boolean correctly', (t) => {
const data = null
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.equal(JSON.parse(result), data)
})
test('handle nullable string correctly', (t) => {
@@ -177,8 +177,8 @@ test('handle nullable string correctly', (t) => {
const data = null
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.equal(JSON.parse(result), data)
})
test('handle nullable date-time correctly', (t) => {
@@ -194,8 +194,8 @@ test('handle nullable date-time correctly', (t) => {
const data = null
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.equal(JSON.parse(result), data)
})
test('handle nullable date correctly', (t) => {
@@ -211,8 +211,8 @@ test('handle nullable date correctly', (t) => {
const data = null
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.equal(JSON.parse(result), data)
})
test('handle nullable time correctly', (t) => {
@@ -228,8 +228,8 @@ test('handle nullable time correctly', (t) => {
const data = null
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.equal(JSON.parse(result), data)
})
test('large array of nullable strings with default mechanism', (t) => {
@@ -258,8 +258,8 @@ test('large array of nullable strings with default mechanism', (t) => {
const data = { ids: new Array(2e4).fill(null) }
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.deepStrictEqual(JSON.parse(result), data)
})
test('large array of nullable date-time strings with default mechanism', (t) => {
@@ -289,8 +289,8 @@ test('large array of nullable date-time strings with default mechanism', (t) =>
const data = { ids: new Array(2e4).fill(null) }
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.deepStrictEqual(JSON.parse(result), data)
})
test('large array of nullable date-time strings with default mechanism', (t) => {
@@ -320,8 +320,8 @@ test('large array of nullable date-time strings with default mechanism', (t) =>
const data = { ids: new Array(2e4).fill(null) }
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.deepStrictEqual(JSON.parse(result), data)
})
test('large array of nullable date-time strings with default mechanism', (t) => {
@@ -351,8 +351,8 @@ test('large array of nullable date-time strings with default mechanism', (t) =>
const data = { ids: new Array(2e4).fill(null) }
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.deepStrictEqual(JSON.parse(result), data)
})
test('large array of nullable numbers with default mechanism', (t) => {
@@ -381,8 +381,8 @@ test('large array of nullable numbers with default mechanism', (t) => {
const data = { ids: new Array(2e4).fill(null) }
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.deepStrictEqual(JSON.parse(result), data)
})
test('large array of nullable integers with default mechanism', (t) => {
@@ -411,8 +411,8 @@ test('large array of nullable integers with default mechanism', (t) => {
const data = { ids: new Array(2e4).fill(null) }
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.deepStrictEqual(JSON.parse(result), data)
})
test('large array of nullable booleans with default mechanism', (t) => {
@@ -441,8 +441,8 @@ test('large array of nullable booleans with default mechanism', (t) => {
const data = { ids: new Array(2e4).fill(null) }
const result = stringify(data)
t.same(result, JSON.stringify(data))
t.same(JSON.parse(result), data)
t.assert.equal(result, JSON.stringify(data))
t.assert.deepStrictEqual(JSON.parse(result), data)
})
test('nullable type in the schema', (t) => {
@@ -461,8 +461,8 @@ test('nullable type in the schema', (t) => {
const data = { foo: 'bar' }
t.same(stringify(data), JSON.stringify(data))
t.same(stringify(null), JSON.stringify(null))
t.assert.equal(stringify(data), JSON.stringify(data))
t.assert.equal(stringify(null), JSON.stringify(null))
})
test('throw an error if the value doesn\'t match the type', (t) => {
@@ -493,10 +493,10 @@ test('throw an error if the value doesn\'t match the type', (t) => {
const stringify = build(schema)
const validData = { data: [1, 'testing'] }
t.equal(stringify(validData), JSON.stringify(validData))
t.assert.equal(stringify(validData), JSON.stringify(validData))
const invalidData = { data: [false, 'testing'] }
t.throws(() => stringify(invalidData))
t.assert.throws(() => stringify(invalidData))
})
test('nullable value in oneOf', (t) => {
@@ -539,5 +539,5 @@ test('nullable value in oneOf', (t) => {
const stringify = build(schema)
const data = { data: [{ job: null }] }
t.equal(stringify(data), JSON.stringify(data))
t.assert.equal(stringify(data), JSON.stringify(data))
})

View File

@@ -1,6 +1,6 @@
'use strict'
const { test } = require('tap')
const { test } = require('node:test')
const build = require('..')
test('object with multiple types field', (t) => {
@@ -21,8 +21,8 @@ test('object with multiple types field', (t) => {
}
const stringify = build(schema)
t.equal(stringify({ str: 'string' }), '{"str":"string"}')
t.equal(stringify({ str: true }), '{"str":true}')
t.assert.equal(stringify({ str: 'string' }), '{"str":"string"}')
t.assert.equal(stringify({ str: true }), '{"str":true}')
})
test('object with field of type object or null', (t) => {
@@ -48,9 +48,9 @@ test('object with field of type object or null', (t) => {
}
const stringify = build(schema)
t.equal(stringify({ prop: null }), '{"prop":null}')
t.assert.equal(stringify({ prop: null }), '{"prop":null}')
t.equal(stringify({
t.assert.equal(stringify({
prop: {
str: 'string', remove: 'this'
}
@@ -80,11 +80,11 @@ test('object with field of type object or array', (t) => {
}
const stringify = build(schema)
t.equal(stringify({
t.assert.equal(stringify({
prop: { str: 'string' }
}), '{"prop":{"str":"string"}}')
t.equal(stringify({
t.assert.equal(stringify({
prop: ['string']
}), '{"prop":["string"]}')
})
@@ -104,7 +104,7 @@ test('object with field of type string and coercion disable ', (t) => {
}
}
const stringify = build(schema)
t.throws(() => stringify({ str: 1 }))
t.assert.throws(() => stringify({ str: 1 }))
})
test('object with field of type string and coercion enable ', (t) => {
@@ -132,7 +132,7 @@ test('object with field of type string and coercion enable ', (t) => {
const value = stringify({
str: 1
})
t.equal(value, '{"str":"1"}')
t.assert.equal(value, '{"str":"1"}')
})
test('object with field with type union of multiple objects', (t) => {
@@ -166,9 +166,9 @@ test('object with field with type union of multiple objects', (t) => {
const stringify = build(schema)
t.equal(stringify({ oneOfSchema: { baz: 5 } }), '{"oneOfSchema":{"baz":5}}')
t.assert.equal(stringify({ oneOfSchema: { baz: 5 } }), '{"oneOfSchema":{"baz":5}}')
t.equal(stringify({ oneOfSchema: { bar: 'foo' } }), '{"oneOfSchema":{"bar":"foo"}}')
t.assert.equal(stringify({ oneOfSchema: { bar: 'foo' } }), '{"oneOfSchema":{"bar":"foo"}}')
})
test('null value in schema', (t) => {
@@ -210,9 +210,9 @@ test('oneOf and $ref together', (t) => {
const stringify = build(schema)
t.equal(stringify({ cs: 'franco' }), '{"cs":"franco"}')
t.assert.equal(stringify({ cs: 'franco' }), '{"cs":"franco"}')
t.equal(stringify({ cs: true }), '{"cs":true}')
t.assert.equal(stringify({ cs: true }), '{"cs":true}')
})
test('oneOf and $ref: 2 levels are fine', (t) => {
@@ -250,7 +250,7 @@ test('oneOf and $ref: 2 levels are fine', (t) => {
const value = stringify({
cs: 3
})
t.equal(value, '{"cs":3}')
t.assert.equal(value, '{"cs":3}')
})
test('oneOf and $ref: multiple levels should throw at build.', (t) => {
@@ -289,9 +289,9 @@ test('oneOf and $ref: multiple levels should throw at build.', (t) => {
const stringify = build(schema)
t.equal(stringify({ cs: 3 }), '{"cs":3}')
t.equal(stringify({ cs: true }), '{"cs":true}')
t.equal(stringify({ cs: 'pippo' }), '{"cs":"pippo"}')
t.assert.equal(stringify({ cs: 3 }), '{"cs":3}')
t.assert.equal(stringify({ cs: true }), '{"cs":true}')
t.assert.equal(stringify({ cs: 'pippo' }), '{"cs":"pippo"}')
})
test('oneOf and $ref - multiple external $ref', (t) => {
@@ -344,10 +344,8 @@ test('oneOf and $ref - multiple external $ref', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"obj":{"prop":{"prop2":"test"}}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"obj":{"prop":{"prop2":"test"}}}')
})
test('oneOf with enum with more than 100 entries', (t) => {
@@ -369,7 +367,7 @@ test('oneOf with enum with more than 100 entries', (t) => {
const stringify = build(schema)
const value = stringify(['EUR', 'USD', null])
t.equal(value, '["EUR","USD",null]')
t.assert.equal(value, '["EUR","USD",null]')
})
test('oneOf object with field of type string with format or null', (t) => {
@@ -393,7 +391,7 @@ test('oneOf object with field of type string with format or null', (t) => {
const withOneOfStringify = build(withOneOfSchema)
t.equal(withOneOfStringify({
t.assert.equal(withOneOfStringify({
prop: toStringify
}), `{"prop":"${toStringify.toISOString()}"}`)
})
@@ -425,9 +423,9 @@ test('one array item match oneOf types', (t) => {
const stringify = build(schema)
t.equal(stringify({ data: ['foo'] }), '{"data":["foo"]}')
t.equal(stringify({ data: [1] }), '{"data":[1]}')
t.throws(() => stringify({ data: [false, 'foo'] }))
t.assert.equal(stringify({ data: ['foo'] }), '{"data":["foo"]}')
t.assert.equal(stringify({ data: [1] }), '{"data":[1]}')
t.assert.throws(() => stringify({ data: [false, 'foo'] }))
})
test('some array items match oneOf types', (t) => {
@@ -457,8 +455,8 @@ test('some array items match oneOf types', (t) => {
const stringify = build(schema)
t.equal(stringify({ data: ['foo', 5] }), '{"data":["foo",5]}')
t.throws(() => stringify({ data: [false, 'foo', true, 5] }))
t.assert.equal(stringify({ data: ['foo', 5] }), '{"data":["foo",5]}')
t.assert.throws(() => stringify({ data: [false, 'foo', true, 5] }))
})
test('all array items does not match oneOf types', (t) => {
@@ -488,5 +486,5 @@ test('all array items does not match oneOf types', (t) => {
const stringify = build(schema)
t.throws(() => stringify({ data: [null, false, true, undefined, [], {}] }))
t.assert.throws(() => stringify({ data: [null, false, true, undefined, [], {}] }))
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('patternProperties', (t) => {
@@ -21,7 +21,7 @@ test('patternProperties', (t) => {
})
const obj = { str: 'test', foo: 42, ofoo: true, foof: 'string', objfoo: { a: true }, notMe: false }
t.equal(stringify(obj), '{"str":"test","foo":"42","ofoo":"true","foof":"string","objfoo":"[object Object]"}')
t.assert.equal(stringify(obj), '{"str":"test","foo":"42","ofoo":"true","foof":"string","objfoo":"[object Object]"}')
})
test('patternProperties should not change properties', (t) => {
@@ -42,7 +42,7 @@ test('patternProperties should not change properties', (t) => {
})
const obj = { foo: '42', ofoo: 42 }
t.equal(stringify(obj), '{"foo":"42","ofoo":42}')
t.assert.equal(stringify(obj), '{"foo":"42","ofoo":42}')
})
test('patternProperties - string coerce', (t) => {
@@ -59,7 +59,7 @@ test('patternProperties - string coerce', (t) => {
})
const obj = { foo: true, ofoo: 42, arrfoo: ['array', 'test'], objfoo: { a: 'world' } }
t.equal(stringify(obj), '{"foo":"true","ofoo":"42","arrfoo":"array,test","objfoo":"[object Object]"}')
t.assert.equal(stringify(obj), '{"foo":"true","ofoo":"42","arrfoo":"array,test","objfoo":"[object Object]"}')
})
test('patternProperties - number coerce', (t) => {
@@ -76,14 +76,14 @@ test('patternProperties - number coerce', (t) => {
})
const coercibleValues = { foo: true, ofoo: '42' }
t.equal(stringify(coercibleValues), '{"foo":1,"ofoo":42}')
t.assert.equal(stringify(coercibleValues), '{"foo":1,"ofoo":42}')
const incoercibleValues = { xfoo: 'string', arrfoo: [1, 2], objfoo: { num: 42 } }
try {
stringify(incoercibleValues)
t.fail('should throw an error')
} catch (err) {
t.ok(err)
t.assert.ok(err)
}
})
@@ -101,7 +101,7 @@ test('patternProperties - boolean coerce', (t) => {
})
const obj = { foo: 'true', ofoo: 0, arrfoo: [1, 2], objfoo: { a: true } }
t.equal(stringify(obj), '{"foo":true,"ofoo":false,"arrfoo":true,"objfoo":true}')
t.assert.equal(stringify(obj), '{"foo":true,"ofoo":false,"arrfoo":true,"objfoo":true}')
})
test('patternProperties - object coerce', (t) => {
@@ -123,7 +123,7 @@ test('patternProperties - object coerce', (t) => {
})
const obj = { objfoo: { answer: 42 } }
t.equal(stringify(obj), '{"objfoo":{"answer":42}}')
t.assert.equal(stringify(obj), '{"objfoo":{"answer":42}}')
})
test('patternProperties - array coerce', (t) => {
@@ -143,16 +143,16 @@ test('patternProperties - array coerce', (t) => {
})
const coercibleValues = { arrfoo: [1, 2] }
t.equal(stringify(coercibleValues), '{"arrfoo":["1","2"]}')
t.assert.equal(stringify(coercibleValues), '{"arrfoo":["1","2"]}')
const incoercibleValues = { foo: 'true', ofoo: 0, objfoo: { tyrion: 'lannister' } }
t.throws(() => stringify(incoercibleValues))
t.assert.throws(() => stringify(incoercibleValues))
})
test('patternProperties - fail on invalid regex, handled by ajv', (t) => {
t.plan(1)
t.throws(() => build({
t.assert.throws(() => build({
title: 'check array coerce',
type: 'object',
properties: {},

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('can stringify recursive directory tree (issue #181)', (t) => {
@@ -25,7 +25,7 @@ test('can stringify recursive directory tree (issue #181)', (t) => {
}
const stringify = build(schema)
t.equal(stringify([
t.assert.equal(stringify([
{ name: 'directory 1', subDirectories: [] },
{
name: 'directory 2',
@@ -68,7 +68,7 @@ test('can stringify when recursion in external schema', t => {
})
const value = stringify({ people: { name: 'Elizabeth', children: [{ name: 'Charles' }] } })
t.equal(value, '{"people":{"name":"Elizabeth","children":[{"name":"Charles"}]}}')
t.assert.equal(value, '{"people":{"name":"Elizabeth","children":[{"name":"Charles"}]}}')
})
test('use proper serialize function', t => {
@@ -133,7 +133,7 @@ test('use proper serialize function', t => {
]
}
})
t.equal(value, '{"people":{"name":"Elizabeth","children":[{"name":"Charles","children":[{"name":"William","children":[{"name":"George"},{"name":"Charlotte"}]},{"name":"Harry"}]}]},"directory":{"name":"directory 1","subDirectories":[{"name":"directory 1.1","subDirectories":[]},{"name":"directory 1.2","subDirectories":[{"name":"directory 1.2.1","subDirectories":[]},{"name":"directory 1.2.2","subDirectories":[]}]}]}}')
t.assert.equal(value, '{"people":{"name":"Elizabeth","children":[{"name":"Charles","children":[{"name":"William","children":[{"name":"George"},{"name":"Charlotte"}]},{"name":"Harry"}]}]},"directory":{"name":"directory 1","subDirectories":[{"name":"directory 1.1","subDirectories":[]},{"name":"directory 1.2","subDirectories":[{"name":"directory 1.2.1","subDirectories":[]},{"name":"directory 1.2.2","subDirectories":[]}]}]}}')
})
test('can stringify recursive references in object types (issue #365)', t => {
@@ -176,7 +176,7 @@ test('can stringify recursive references in object types (issue #365)', t => {
}
}
const value = stringify(data)
t.equal(value, '{"category":{"parent":{"parent":{"parent":{"parent":{}}}}}}')
t.assert.equal(value, '{"category":{"parent":{"parent":{"parent":{"parent":{}}}}}}')
})
test('can stringify recursive inline $id references (issue #410)', t => {
@@ -241,5 +241,5 @@ test('can stringify recursive inline $id references (issue #410)', t => {
]
}
const value = stringify(data)
t.equal(value, '{"id":"0","nodes":[{"id":"1","nodes":[{"id":"2","nodes":[{"id":"3","nodes":[]},{"id":"4","nodes":[]},{"id":"5","nodes":[]}]}]},{"id":"6","nodes":[{"id":"7","nodes":[{"id":"8","nodes":[]},{"id":"9","nodes":[]},{"id":"10","nodes":[]}]}]},{"id":"11","nodes":[{"id":"12","nodes":[{"id":"13","nodes":[]},{"id":"14","nodes":[]},{"id":"15","nodes":[]}]}]}]}')
t.assert.equal(value, '{"id":"0","nodes":[{"id":"1","nodes":[{"id":"2","nodes":[{"id":"3","nodes":[]},{"id":"4","nodes":[]},{"id":"5","nodes":[]}]}]},{"id":"6","nodes":[{"id":"7","nodes":[{"id":"8","nodes":[]},{"id":"9","nodes":[]},{"id":"10","nodes":[]}]}]},{"id":"11","nodes":[{"id":"12","nodes":[{"id":"13","nodes":[]},{"id":"14","nodes":[]},{"id":"15","nodes":[]}]}]}]}')
})

View File

@@ -2,7 +2,7 @@
const clone = require('rfdc')({ proto: true })
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('ref internal - properties', (t) => {
@@ -37,10 +37,8 @@ test('ref internal - properties', (t) => {
const stringify = build(schema)
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"obj":{"str":"test"}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"obj":{"str":"test"}}')
})
test('ref internal - items', (t) => {
@@ -69,10 +67,8 @@ test('ref internal - items', (t) => {
const stringify = build(schema)
const output = stringify(array)
JSON.parse(output)
t.pass()
t.equal(output, '[{"str":"test"}]')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '[{"str":"test"}]')
})
test('ref external - properties', (t) => {
@@ -130,10 +126,8 @@ test('ref external - properties', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"obj":{"str":"test"},"num":{"int":42},"strPlain":"test","strHash":"test"}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"obj":{"str":"test"},"num":{"int":42},"strPlain":"test","strHash":"test"}')
})
test('ref internal - patternProperties', (t) => {
@@ -169,10 +163,8 @@ test('ref internal - patternProperties', (t) => {
const stringify = build(schema)
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"obj":{"str":"test"}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"obj":{"str":"test"}}')
})
test('ref internal - additionalProperties', (t) => {
@@ -206,10 +198,8 @@ test('ref internal - additionalProperties', (t) => {
const stringify = build(schema)
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"obj":{"str":"test"}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"obj":{"str":"test"}}')
})
test('ref internal - pattern-additional Properties', (t) => {
@@ -251,10 +241,8 @@ test('ref internal - pattern-additional Properties', (t) => {
const stringify = build(schema)
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"reg":{"str":"test"},"obj":{"str":"test"}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"reg":{"str":"test"},"obj":{"str":"test"}}')
})
test('ref external - pattern-additional Properties', (t) => {
@@ -302,10 +290,8 @@ test('ref external - pattern-additional Properties', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"reg":{"str":"test"},"obj":{"int":42}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"reg":{"str":"test"},"obj":{"int":42}}')
})
test('ref internal - deepObject schema', (t) => {
@@ -354,10 +340,8 @@ test('ref internal - deepObject schema', (t) => {
const stringify = build(schema)
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"winter":{"is":{"coming":{"where":"to town"}}}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"winter":{"is":{"coming":{"where":"to town"}}}}')
})
test('ref internal - plain name fragment', (t) => {
@@ -394,10 +378,8 @@ test('ref internal - plain name fragment', (t) => {
const stringify = build(schema)
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"obj":{"str":"test"}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"obj":{"str":"test"}}')
})
test('ref external - plain name fragment', (t) => {
@@ -453,10 +435,8 @@ test('ref external - plain name fragment', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"first":{"str":"test"},"second":{"int":42}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"first":{"str":"test"},"second":{"int":42}}')
})
test('external reference to $id', (t) => {
@@ -488,10 +468,8 @@ test('external reference to $id', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"first":{"str":"test"}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"first":{"str":"test"}}')
})
test('external reference to key#id', (t) => {
@@ -523,10 +501,8 @@ test('external reference to key#id', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"first":{"str":"test"}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"first":{"str":"test"}}')
})
test('external and inner reference', (t) => {
@@ -564,10 +540,8 @@ test('external and inner reference', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"first":{"str":"test"}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"first":{"str":"test"}}')
})
test('external reference to key', (t) => {
@@ -599,10 +573,8 @@ test('external reference to key', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"first":{"str":"test"}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"first":{"str":"test"}}')
})
test('ref external - plain name fragment', (t) => {
@@ -658,10 +630,8 @@ test('ref external - plain name fragment', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"first":{"str":"test"},"second":{"int":42}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"first":{"str":"test"},"second":{"int":42}}')
})
test('ref external - duplicate plain name fragment', (t) => {
@@ -730,10 +700,8 @@ test('ref external - duplicate plain name fragment', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"local":{"prop":"test"},"external":{"prop":true},"other":{"prop":42}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"local":{"prop":"test"},"external":{"prop":true},"other":{"prop":42}}')
})
test('ref external - explicit external plain name fragment must not fallback to other external schemas', (t) => {
@@ -787,14 +755,13 @@ test('ref external - explicit external plain name fragment must not fallback to
}
}
try {
t.assert.throws(() => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.fail()
} catch (e) {
t.pass()
}
}, {
message: 'Cannot find reference "first#wrong"'
})
})
test('ref internal - multiple $ref format', (t) => {
@@ -838,10 +805,8 @@ test('ref internal - multiple $ref format', (t) => {
const stringify = build(schema)
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"zero":"test","a":"test","b":"test","c":"test","d":"test","e":"test"}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"zero":"test","a":"test","b":"test","c":"test","d":"test","e":"test"}')
})
test('ref external - external schema with internal ref (object property)', (t) => {
@@ -880,10 +845,8 @@ test('ref external - external schema with internal ref (object property)', (t) =
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"obj":{"prop":"test"}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"obj":{"prop":"test"}}')
})
test('ref external - external schema with internal ref (array items)', (t) => {
@@ -925,10 +888,8 @@ test('ref external - external schema with internal ref (array items)', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"arr":[{"prop":"test"}]}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"arr":[{"prop":"test"}]}')
})
test('ref external - external schema with internal ref (root)', (t) => {
@@ -960,10 +921,8 @@ test('ref external - external schema with internal ref (root)', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"prop":"test"}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"prop":"test"}')
})
test('ref external - external schema with internal ref (pattern properties)', (t) => {
@@ -1002,10 +961,8 @@ test('ref external - external schema with internal ref (pattern properties)', (t
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"obj":{"prop":"test"}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"obj":{"prop":"test"}}')
})
test('ref in root internal', (t) => {
@@ -1033,10 +990,8 @@ test('ref in root internal', (t) => {
const stringify = build(schema)
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"int":42}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"int":42}')
})
test('ref in root external', (t) => {
@@ -1068,10 +1023,8 @@ test('ref in root external', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"int":42}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"int":42}')
})
test('ref in root external multiple times', (t) => {
@@ -1107,10 +1060,8 @@ test('ref in root external multiple times', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"int":42}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"int":42}')
})
test('ref external to relative definition', (t) => {
@@ -1141,10 +1092,8 @@ test('ref external to relative definition', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"fooParent":{"foo":"bar"}}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"fooParent":{"foo":"bar"}}')
})
test('ref to nested ref definition', (t) => {
@@ -1179,14 +1128,12 @@ test('ref to nested ref definition', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, '{"foo":"foo"}')
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"foo":"foo"}')
})
test('Bad key', t => {
t.test('Find match', t => {
test('Bad key', async t => {
await t.test('Find match', t => {
t.plan(1)
try {
build({
@@ -1207,13 +1154,14 @@ test('Bad key', t => {
})
t.fail('Should throw')
} catch (err) {
t.equal(err.message, 'Cannot find reference "#/definitions/porjectId"')
t.assert.equal(err.message, 'Cannot find reference "#/definitions/porjectId"')
}
})
t.test('No match', t => {
await t.test('No match', t => {
t.plan(1)
try {
t.assert.throws(() => {
build({
definitions: {
projectId: {
@@ -1230,15 +1178,12 @@ test('Bad key', t => {
}
}
})
t.fail('Should throw')
} catch (err) {
t.equal(err.message, 'Cannot find reference "#/definitions/foobar"')
}
}, { message: 'Cannot find reference "#/definitions/foobar"' })
})
t.test('Find match (external schema)', t => {
await t.test('Find match (external schema)', t => {
t.plan(1)
try {
t.assert.throws(() => {
build({
type: 'object',
properties: {
@@ -1261,14 +1206,12 @@ test('Bad key', t => {
}
})
t.fail('Should throw')
} catch (err) {
t.equal(err.message, 'Cannot find reference "external#/definitions/porjectId"')
}
}, { message: 'Cannot find reference "external#/definitions/porjectId"' })
})
t.test('No match (external schema)', t => {
await t.test('No match (external schema)', t => {
t.plan(1)
try {
t.assert.throws(() => {
build({
type: 'object',
properties: {
@@ -1290,15 +1233,12 @@ test('Bad key', t => {
}
}
})
t.fail('Should throw')
} catch (err) {
t.equal(err.message, 'Cannot find reference "external#/definitions/foobar"')
}
}, { message: 'Cannot find reference "external#/definitions/foobar"' })
})
t.test('Find match (external definitions typo)', t => {
await t.test('Find match (external definitions typo)', t => {
t.plan(1)
try {
t.assert.throws(() => {
build({
type: 'object',
properties: {
@@ -1320,15 +1260,12 @@ test('Bad key', t => {
}
}
})
t.fail('Should throw')
} catch (err) {
t.equal(err.message, 'Cannot find reference "external#/deifnitions/projectId"')
}
}, { message: 'Cannot find reference "external#/deifnitions/projectId"' })
})
t.test('Find match (definitions typo)', t => {
await t.test('Find match (definitions typo)', t => {
t.plan(1)
try {
t.assert.throws(() => {
build({
definitions: {
projectId: {
@@ -1345,15 +1282,12 @@ test('Bad key', t => {
}
}
})
t.fail('Should throw')
} catch (err) {
t.equal(err.message, 'Cannot find reference "#/deifnitions/projectId"')
}
}, { message: 'Cannot find reference "#/deifnitions/projectId"' })
})
t.test('Find match (external schema typo)', t => {
await t.test('Find match (external schema typo)', t => {
t.plan(1)
try {
t.assert.throws(() => {
build({
type: 'object',
properties: {
@@ -1375,16 +1309,8 @@ test('Bad key', t => {
}
}
})
t.fail('Should throw')
} catch (err) {
t.equal(
err.message,
'Cannot resolve ref "extrenal#/definitions/projectId". Schema with id "extrenal" is not found.'
)
}
}, { message: 'Cannot resolve ref "extrenal#/definitions/projectId". Schema with id "extrenal" is not found.' })
})
t.end()
})
test('Regression 2.5.2', t => {
@@ -1431,7 +1357,7 @@ test('Regression 2.5.2', t => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify([{ field: 'parent', sub: { field: 'joined' } }])
t.equal(output, '[{"field":"parent","sub":{"field":"joined"}}]')
t.assert.equal(output, '[{"field":"parent","sub":{"field":"joined"}}]')
})
test('Reference through multiple definitions', (t) => {
@@ -1466,10 +1392,8 @@ test('Reference through multiple definitions', (t) => {
const stringify = build(schema)
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, JSON.stringify(object))
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, JSON.stringify(object))
})
test('issue #350', (t) => {
@@ -1509,10 +1433,8 @@ test('issue #350', (t) => {
const stringify = build(schema)
const output = stringify(object)
JSON.parse(output)
t.pass()
t.equal(output, JSON.stringify(object))
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, JSON.stringify(object))
})
test('deep union type', (t) => {
@@ -1759,10 +1681,10 @@ test('deep union type', (t) => {
]
}
]
t.equal(JSON.stringify(obj), stringify(obj))
t.assert.equal(JSON.stringify(obj), stringify(obj))
})
test('ref with same id in properties', (t) => {
test('ref with same id in properties', async (t) => {
t.plan(2)
const externalSchema = {
@@ -1781,7 +1703,7 @@ test('ref with same id in properties', (t) => {
}
}
t.test('anyOf', (t) => {
await t.test('anyOf', (t) => {
t.plan(1)
const schema = {
@@ -1801,10 +1723,10 @@ test('ref with same id in properties', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify({ _id: 'foo', image: { _id: 'bar', name: 'hello', owner: 'baz' } })
t.equal(output, '{"_id":"foo","image":{"_id":"bar","name":"hello","owner":"baz"}}')
t.assert.equal(output, '{"_id":"foo","image":{"_id":"bar","name":"hello","owner":"baz"}}')
})
t.test('oneOf', (t) => {
await t.test('oneOf', (t) => {
t.plan(1)
const schema = {
@@ -1824,7 +1746,7 @@ test('ref with same id in properties', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify({ _id: 'foo', image: { _id: 'bar', name: 'hello', owner: 'baz' } })
t.equal(output, '{"_id":"foo","image":{"_id":"bar","name":"hello","owner":"baz"}}')
t.assert.equal(output, '{"_id":"foo","image":{"_id":"bar","name":"hello","owner":"baz"}}')
})
})
@@ -1855,8 +1777,8 @@ test('Should not modify external schemas', (t) => {
const data = { id: 'a4e4c954-9f5f-443a-aa65-74d95732249a' }
const output = stringify(data)
t.equal(output, JSON.stringify(data))
t.same(options, optionsClone)
t.assert.equal(output, JSON.stringify(data))
t.assert.deepStrictEqual(options, optionsClone)
})
test('input schema is not mutated', (t) => {
@@ -1884,15 +1806,9 @@ test('input schema is not mutated', (t) => {
const stringify = build(schema)
const output = stringify(object)
try {
JSON.parse(output)
t.pass()
} catch (e) {
t.fail()
}
t.equal(output, '{"obj":"test"}')
t.same(schema, clonedSchema)
t.assert.doesNotThrow(() => JSON.parse(output))
t.assert.equal(output, '{"obj":"test"}')
t.assert.deepStrictEqual(schema, clonedSchema)
})
test('anyOf inside allOf', (t) => {
@@ -1927,7 +1843,7 @@ test('anyOf inside allOf', (t) => {
const stringify = build(schema)
const output = stringify(object)
t.equal(output, JSON.stringify(object))
t.assert.equal(output, JSON.stringify(object))
})
test('should resolve absolute $refs', (t) => {
@@ -1954,7 +1870,7 @@ test('should resolve absolute $refs', (t) => {
const stringify = build(schema, { schema: externalSchema })
const output = stringify(object)
t.equal(output, JSON.stringify(object))
t.assert.equal(output, JSON.stringify(object))
})
test('nested schema should overwrite anchor scope', (t) => {
@@ -1981,8 +1897,8 @@ test('nested schema should overwrite anchor scope', (t) => {
const stringify = build({ $ref: 'subschema#anchor' }, { schema: externalSchema })
const output = stringify(data)
t.equal(output, JSON.stringify(data))
t.throws(() => build({ $ref: 'root#anchor' }, { schema: externalSchema }))
t.assert.equal(output, JSON.stringify(data))
t.assert.throws(() => build({ $ref: 'root#anchor' }, { schema: externalSchema }))
})
test('object property reference with default value', (t) => {
@@ -2006,7 +1922,7 @@ test('object property reference with default value', (t) => {
const stringify = build(schema)
const output = stringify({})
t.equal(output, '{"prop":"foo"}')
t.assert.equal(output, '{"prop":"foo"}')
})
test('should throw an Error if two non-identical schemas with same id are provided', (t) => {
@@ -2082,7 +1998,7 @@ test('should throw an Error if two non-identical schemas with same id are provid
try {
build(schema)
} catch (err) {
t.equal(err.message, 'There is already another schema with id "inner_schema".')
t.assert.equal(err.message, 'There is already another schema with id "inner_schema".')
}
})
@@ -2125,6 +2041,6 @@ test('ref internal - throw if schema has definition twice with different shape',
try {
build(schema)
} catch (err) {
t.equal(err.message, 'There is already another anchor "#uri" in a schema "test".')
t.assert.equal(err.message, 'There is already another anchor "#uri" in schema "test".')
}
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const validator = require('is-my-json-valid')
const build = require('..')
@@ -25,9 +25,8 @@ test('object with RexExp', (t) => {
const validate = validator(schema)
const output = stringify(obj)
JSON.parse(output)
t.pass()
t.assert.doesNotThrow(() => JSON.parse(output))
t.equal(obj.reg.source, new RegExp(JSON.parse(output).reg).source)
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(obj.reg.source, new RegExp(JSON.parse(output).reg).source)
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})

View File

@@ -1,10 +1,10 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('object with required field', (t) => {
t.plan(3)
t.plan(2)
const schema = {
title: 'object with required field',
@@ -21,24 +21,21 @@ test('object with required field', (t) => {
}
const stringify = build(schema)
stringify({
str: 'string'
t.assert.doesNotThrow(() => {
stringify({
str: 'string'
})
})
t.pass()
try {
t.assert.throws(() => {
stringify({
num: 42
})
t.fail()
} catch (e) {
t.equal(e.message, '"str" is required!')
t.pass()
}
}, { message: '"str" is required!' })
})
test('object with required field not in properties schema', (t) => {
t.plan(4)
t.plan(2)
const schema = {
title: 'object with required field',
@@ -52,27 +49,19 @@ test('object with required field not in properties schema', (t) => {
}
const stringify = build(schema)
try {
t.assert.throws(() => {
stringify({})
t.fail()
} catch (e) {
t.equal(e.message, '"str" is required!')
t.pass()
}
}, { message: '"str" is required!' })
try {
t.assert.throws(() => {
stringify({
num: 42
})
t.fail()
} catch (e) {
t.equal(e.message, '"str" is required!')
t.pass()
}
}, { message: '"str" is required!' })
})
test('object with required field not in properties schema with additional properties true', (t) => {
t.plan(4)
t.plan(2)
const schema = {
title: 'object with required field',
@@ -87,27 +76,19 @@ test('object with required field not in properties schema with additional proper
}
const stringify = build(schema)
try {
t.assert.throws(() => {
stringify({})
t.fail()
} catch (e) {
t.equal(e.message, '"str" is required!')
t.pass()
}
}, { message: '"str" is required!' })
try {
t.assert.throws(() => {
stringify({
num: 42
})
t.fail()
} catch (e) {
t.equal(e.message, '"str" is required!')
t.pass()
}
}, { message: '"str" is required!' })
})
test('object with multiple required field not in properties schema', (t) => {
t.plan(6)
t.plan(3)
const schema = {
title: 'object with required field',
@@ -122,35 +103,23 @@ test('object with multiple required field not in properties schema', (t) => {
}
const stringify = build(schema)
try {
t.assert.throws(() => {
stringify({})
t.fail()
} catch (e) {
t.equal(e.message, '"key1" is required!')
t.pass()
}
}, { message: '"key1" is required!' })
try {
t.assert.throws(() => {
stringify({
key1: 42,
key2: 42
})
t.fail()
} catch (e) {
t.equal(e.message, '"num" is required!')
t.pass()
}
}, { message: '"num" is required!' })
try {
t.assert.throws(() => {
stringify({
num: 42,
key1: 'some'
})
t.fail()
} catch (e) {
t.equal(e.message, '"key2" is required!')
t.pass()
}
}, { message: '"key2" is required!' })
})
test('object with required bool', (t) => {
@@ -169,16 +138,14 @@ test('object with required bool', (t) => {
}
const stringify = build(schema)
try {
t.assert.throws(() => {
stringify({})
t.fail()
} catch (e) {
t.equal(e.message, '"bool" is required!')
t.pass()
}
}, { message: '"bool" is required!' })
stringify({
bool: false
t.assert.doesNotThrow(() => {
stringify({
bool: false
})
})
})
@@ -198,14 +165,15 @@ test('required nullable', (t) => {
}
const stringify = build(schema)
stringify({
null: null
t.assert.doesNotThrow(() => {
stringify({
null: null
})
})
t.pass()
})
test('required numbers', (t) => {
t.plan(3)
t.plan(2)
const schema = {
title: 'object with required field',
@@ -222,18 +190,15 @@ test('required numbers', (t) => {
}
const stringify = build(schema)
stringify({
num: 42
t.assert.doesNotThrow(() => {
stringify({
num: 42
})
})
t.pass()
try {
t.assert.throws(() => {
stringify({
num: 'aaa'
})
t.fail()
} catch (e) {
t.equal(e.message, 'The value "aaa" cannot be converted to an integer.')
t.pass()
}
}, { message: 'The value "aaa" cannot be converted to an integer.' })
})

View File

@@ -1,48 +1,50 @@
'use strict'
const t = require('tap')
const { test } = require('node:test')
const build = require('..')
t.test('nested ref requires ajv', async t => {
const schemaA = {
$id: 'urn:schema:a',
definitions: {
foo: { anyOf: [{ type: 'string' }, { type: 'null' }] }
test('nested ref requires ajv', async t => {
t.test('nested ref requires ajv', async t => {
const schemaA = {
$id: 'urn:schema:a',
definitions: {
foo: { anyOf: [{ type: 'string' }, { type: 'null' }] }
}
}
}
const schemaB = {
$id: 'urn:schema:b',
type: 'object',
properties: {
results: {
type: 'object',
properties: {
items: {
type: 'object',
properties: {
bar: {
type: 'array',
items: { $ref: 'urn:schema:a#/definitions/foo' }
const schemaB = {
$id: 'urn:schema:b',
type: 'object',
properties: {
results: {
type: 'object',
properties: {
items: {
type: 'object',
properties: {
bar: {
type: 'array',
items: { $ref: 'urn:schema:a#/definitions/foo' }
}
}
}
}
}
}
}
}
const stringify = build(schemaB, {
schema: {
[schemaA.$id]: schemaA
}
})
const result = stringify({
results: {
items: {
bar: ['baz']
const stringify = build(schemaB, {
schema: {
[schemaA.$id]: schemaA
}
}
})
const result = stringify({
results: {
items: {
bar: ['baz']
}
}
})
t.assert.equal(result, '{"results":{"items":{"bar":["baz"]}}}')
})
t.same(result, '{"results":{"items":{"bar":["baz"]}}}')
})

View File

@@ -1,6 +1,6 @@
'use strict'
const t = require('tap')
const { test } = require('node:test')
const build = require('..')
const stringify = build({
@@ -91,52 +91,51 @@ const obj = {
notmatchnum: 42
}
// pass if it does not crash
const json = stringify(obj)
JSON.parse(json)
test('sanitize', t => {
const json = stringify(obj)
t.assert.doesNotThrow(() => JSON.parse(json))
const stringify2 = build({
title: 'Example Schema',
type: 'object',
patternProperties: {
'"\'w00t.*////': {
type: 'number'
const stringify2 = build({
title: 'Example Schema',
type: 'object',
patternProperties: {
'"\'w00t.*////': {
type: 'number'
}
}
}
})
})
t.same(JSON.parse(stringify2({
'"\'phra////': 42,
asd: 42
})), {
})
t.assert.deepStrictEqual(JSON.parse(stringify2({
'"\'phra////': 42,
asd: 42
})), {
})
const stringify3 = build({
title: 'Example Schema',
type: 'object',
properties: {
"\"phra\\'&&(console.log(42))//||'phra": {}
}
})
// this verifies the escaping
JSON.parse(stringify3({
'"phra\'&&(console.log(42))//||\'phra': 42
}))
const stringify4 = build({
title: 'Example Schema',
type: 'object',
properties: {
'"\\\\\\\\\'w00t': {
type: 'string',
default: '"\'w00t'
const stringify3 = build({
title: 'Example Schema',
type: 'object',
properties: {
"\"phra\\'&&(console.log(42))//||'phra": {}
}
}
})
})
t.same(JSON.parse(stringify4({})), {
'"\\\\\\\\\'w00t': '"\'w00t'
})
// this verifies the escaping
JSON.parse(stringify3({
'"phra\'&&(console.log(42))//||\'phra': 42
}))
t.pass('no crashes')
const stringify4 = build({
title: 'Example Schema',
type: 'object',
properties: {
'"\\\\\\\\\'w00t': {
type: 'string',
default: '"\'w00t'
}
}
})
t.assert.deepStrictEqual(JSON.parse(stringify4({})), {
'"\\\\\\\\\'w00t': '"\'w00t'
})
})

View File

@@ -1,18 +1,18 @@
'use strict'
const t = require('tap')
const { test } = require('node:test')
const build = require('..')
const payload = '(throw "pwoned")'
test('sanitize 2', t => {
const payload = '(throw "pwoned")'
const stringify = build({
properties: {
[`*///\\\\\\']);${payload};{/*`]: {
type: 'number'
const stringify = build({
properties: {
[`*///\\\\\\']);${payload};{/*`]: {
type: 'number'
}
}
}
})
t.assert.doesNotThrow(() => stringify({}))
})
stringify({})
t.pass('no crashes')

View File

@@ -1,15 +1,17 @@
'use strict'
const t = require('tap')
const { test } = require('node:test')
const build = require('..')
t.throws(() => {
build({
$defs: {
type: 'foooo"bar'
},
patternProperties: {
x: { $ref: '#/$defs' }
}
})
}, 'foooo"bar unsupported')
test('sanitize 3', t => {
t.assert.throws(() => {
build({
$defs: {
type: 'foooo"bar'
},
patternProperties: {
x: { $ref: '#/$defs' }
}
})
}, { message: 'foooo"bar unsupported' })
})

View File

@@ -1,14 +1,16 @@
'use strict'
const t = require('tap')
const { test } = require('node:test')
const build = require('..')
const payload = '(throw "pwoned")'
test('sanitize 4', t => {
const payload = '(throw "pwoned")'
const stringify = build({
required: [`"];${payload}//`]
const stringify = build({
required: [`"];${payload}//`]
})
t.assert.throws(() => {
stringify({})
}, { message: '""];(throw "pwoned")//" is required!' })
})
t.throws(() => {
stringify({})
}, 'Error: ""];(throw "pwoned")//" is required!')

View File

@@ -1,16 +1,16 @@
'use strict'
const t = require('tap')
const { test } = require('node:test')
const build = require('..')
const payload = '(throw "pwoned")'
test('sanitize 5', t => {
const payload = '(throw "pwoned")'
const expected = 'Error: Invalid regular expression: /*/: Nothing to repeat. Found at * matching {"type":"*/(throw \\"pwoned\\")){//"}'
t.throws(() => {
build({
patternProperties: {
'*': { type: `*/${payload}){//` }
}
})
}, expected)
t.assert.throws(() => {
build({
patternProperties: {
'*': { type: `*/${payload}){//` }
}
})
}, { message: 'schema is invalid: data/patternProperties must match format "regex"' })
})

View File

@@ -1,22 +1,22 @@
'use strict'
const t = require('tap')
const { test } = require('node:test')
const build = require('..')
const payload = '(throw "pwoned")'
test('sanitize 6', t => {
const payload = '(throw "pwoned")'
const stringify = build({
type: 'object',
properties: {
'/*': { type: 'object' },
x: {
type: 'object',
properties: {
a: { type: 'string', default: `*/}${payload};{//` }
const stringify = build({
type: 'object',
properties: {
'/*': { type: 'object' },
x: {
type: 'object',
properties: {
a: { type: 'string', default: `*/}${payload};{//` }
}
}
}
}
})
t.assert.doesNotThrow(() => { stringify({}) })
})
stringify({})
t.pass('no crashes')

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('required property containing single quote, contains property', (t) => {
@@ -16,7 +16,7 @@ test('required property containing single quote, contains property', (t) => {
]
})
t.throws(() => stringify({}), new Error('"\'" is required!'))
t.assert.throws(() => stringify({}), new Error('"\'" is required!'))
})
test('required property containing double quote, contains property', (t) => {
@@ -32,7 +32,7 @@ test('required property containing double quote, contains property', (t) => {
]
})
t.throws(() => stringify({}), new Error('""" is required!'))
t.assert.throws(() => stringify({}), new Error('""" is required!'))
})
test('required property containing single quote, does not contain property', (t) => {
@@ -48,7 +48,7 @@ test('required property containing single quote, does not contain property', (t)
]
})
t.throws(() => stringify({}), new Error('"\'" is required!'))
t.assert.throws(() => stringify({}), new Error('"\'" is required!'))
})
test('required property containing double quote, does not contain property', (t) => {
@@ -64,5 +64,5 @@ test('required property containing double quote, does not contain property', (t)
]
})
t.throws(() => stringify({}), new Error('""" is required!'))
t.assert.throws(() => stringify({}), new Error('""" is required!'))
})

View File

@@ -1,6 +1,6 @@
'use strict'
const { test } = require('tap')
const { test } = require('node:test')
const clone = require('rfdc/default')
const build = require('..')
@@ -32,8 +32,8 @@ test('oneOf with $ref should not change the input schema', t => {
})
const value = stringify({ people: { name: 'hello', foo: 'bar' } })
t.equal(value, '{"people":{"name":"hello"}}')
t.same(schema, clonedSchema)
t.assert.equal(value, '{"people":{"name":"hello"}}')
t.assert.deepStrictEqual(schema, clonedSchema)
})
test('oneOf and anyOf with $ref should not change the input schema', t => {
@@ -75,9 +75,9 @@ test('oneOf and anyOf with $ref should not change the input schema', t => {
const valueAny1 = stringify({ people: { name: 'hello', foo: 'bar' }, love: 'music' })
const valueAny2 = stringify({ people: { name: 'hello', foo: 'bar' }, love: true })
t.equal(valueAny1, '{"people":{"name":"hello"},"love":"music"}')
t.equal(valueAny2, '{"people":{"name":"hello"},"love":true}')
t.same(schema, clonedSchema)
t.assert.equal(valueAny1, '{"people":{"name":"hello"},"love":"music"}')
t.assert.equal(valueAny2, '{"people":{"name":"hello"},"love":true}')
t.assert.deepStrictEqual(schema, clonedSchema)
})
test('multiple $ref tree', t => {
@@ -118,8 +118,8 @@ test('multiple $ref tree', t => {
})
const value = stringify({ people: { name: 'hello', foo: 'bar', age: 42 } })
t.equal(value, '{"people":{"name":"hello","age":42}}')
t.same(schema, clonedSchema)
t.assert.equal(value, '{"people":{"name":"hello","age":42}}')
t.assert.deepStrictEqual(schema, clonedSchema)
})
test('must not mutate items $ref', t => {
@@ -152,8 +152,8 @@ test('must not mutate items $ref', t => {
})
const value = stringify([{ name: 'foo' }])
t.equal(value, '[{"name":"foo"}]')
t.same(schema, clonedSchema)
t.assert.equal(value, '[{"name":"foo"}]')
t.assert.deepStrictEqual(schema, clonedSchema)
})
test('must not mutate items referred by $ref', t => {
@@ -191,6 +191,6 @@ test('must not mutate items referred by $ref', t => {
})
const value = stringify({ name: { name: 'foo' } })
t.equal(value, '{"name":{"name":"foo"}}')
t.same(firstSchema, clonedSchema)
t.assert.equal(value, '{"name":{"name":"foo"}}')
t.assert.deepStrictEqual(firstSchema, clonedSchema)
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test, after } = require('node:test')
const fjs = require('..')
const fs = require('fs')
const path = require('path')
@@ -22,23 +22,29 @@ const tmpDir = 'test/fixtures'
test('activate standalone mode', async (t) => {
t.plan(3)
const code = build({ mode: 'standalone' })
t.type(code, 'string')
t.equal(code.indexOf('ajv'), -1)
const destination = path.resolve(tmpDir, 'standalone.js')
t.teardown(async () => {
after(async () => {
await fs.promises.rm(destination, { force: true })
})
const code = build({ mode: 'standalone' })
t.assert.ok(typeof code === 'string')
t.assert.equal(code.indexOf('ajv'), -1)
const destination = path.resolve(tmpDir, 'standalone.js')
await fs.promises.writeFile(destination, code)
const standalone = require(destination)
t.same(standalone({ firstName: 'Foo', surname: 'bar' }), JSON.stringify({ firstName: 'Foo' }), 'surname evicted')
t.assert.equal(standalone({ firstName: 'Foo', surname: 'bar' }), JSON.stringify({ firstName: 'Foo' }), 'surname evicted')
})
test('test ajv schema', async (t) => {
t.plan(3)
after(async () => {
await fs.promises.rm(destination, { force: true })
})
const code = build({ mode: 'standalone' }, {
type: 'object',
properties: {
@@ -86,18 +92,14 @@ test('test ajv schema', async (t) => {
}
}
})
t.type(code, 'string')
t.equal(code.indexOf('ajv') > 0, true)
t.assert.ok(typeof code === 'string')
t.assert.equal(code.indexOf('ajv') > 0, true)
const destination = path.resolve(tmpDir, 'standalone2.js')
t.teardown(async () => {
await fs.promises.rm(destination, { force: true })
})
await fs.promises.writeFile(destination, code)
const standalone = require(destination)
t.same(standalone({
t.assert.equal(standalone({
kind: 'foobar',
foo: 'FOO',
list: [{
@@ -122,6 +124,11 @@ test('test ajv schema', async (t) => {
test('no need to keep external schemas once compiled', async (t) => {
t.plan(1)
after(async () => {
await fs.promises.rm(destination, { force: true })
})
const externalSchema = {
first: {
definitions: {
@@ -145,19 +152,19 @@ test('no need to keep external schemas once compiled', async (t) => {
const destination = path.resolve(tmpDir, 'standalone3.js')
t.teardown(async () => {
await fs.promises.rm(destination, { force: true })
})
await fs.promises.writeFile(destination, code)
const standalone = require(destination)
t.same(standalone({ id1: 5 }), JSON.stringify({ id1: 5 }), 'serialization works with external schemas')
t.assert.equal(standalone({ id1: 5 }), JSON.stringify({ id1: 5 }), 'serialization works with external schemas')
})
test('no need to keep external schemas once compiled - with oneOf validator', async (t) => {
t.plan(2)
after(async () => {
await fs.promises.rm(destination, { force: true })
})
const externalSchema = {
ext: {
definitions: {
@@ -204,13 +211,9 @@ test('no need to keep external schemas once compiled - with oneOf validator', as
const destination = path.resolve(tmpDir, 'standalone-oneOf-ref.js')
t.teardown(async () => {
await fs.promises.rm(destination, { force: true })
})
await fs.promises.writeFile(destination, code)
const stringify = require(destination)
t.equal(stringify({ oneOfSchema: { baz: 5 } }), '{"oneOfSchema":{"baz":5}}')
t.equal(stringify({ oneOfSchema: { bar: 'foo' } }), '{"oneOfSchema":{"bar":"foo"}}')
t.assert.equal(stringify({ oneOfSchema: { baz: 5 } }), '{"oneOfSchema":{"baz":5}}')
t.assert.equal(stringify({ oneOfSchema: { bar: 'foo' } }), '{"oneOfSchema":{"bar":"foo"}}')
})

View File

@@ -1,7 +1,7 @@
'use strict'
const t = require('tap')
const test = t.test
const { test } = require('node:test')
const build = require('..')
test('serialize short string', (t) => {
@@ -15,8 +15,8 @@ test('serialize short string', (t) => {
const stringify = build(schema)
const output = stringify(input)
t.equal(output, '"abcd"')
t.equal(JSON.parse(output), input)
t.assert.equal(output, '"abcd"')
t.assert.equal(JSON.parse(output), input)
})
test('serialize short string', (t) => {
@@ -30,8 +30,8 @@ test('serialize short string', (t) => {
const stringify = build(schema)
const output = stringify(input)
t.equal(output, '"\\u0000"')
t.equal(JSON.parse(output), input)
t.assert.equal(output, '"\\u0000"')
t.assert.equal(JSON.parse(output), input)
})
test('serialize long string', (t) => {
@@ -45,8 +45,8 @@ test('serialize long string', (t) => {
const stringify = build(schema)
const output = stringify(input)
t.equal(output, `"${new Array(2e4).fill('\\u0000').join('')}"`)
t.equal(JSON.parse(output), input)
t.assert.equal(output, `"${new Array(2e4).fill('\\u0000').join('')}"`)
t.assert.equal(JSON.parse(output), input)
})
test('unsafe string', (t) => {
@@ -61,8 +61,8 @@ test('unsafe string', (t) => {
const stringify = build(schema)
const output = stringify(input)
t.equal(output, `"${input}"`)
t.equal(JSON.parse(output), input)
t.assert.equal(output, `"${input}"`)
t.assert.equal(JSON.parse(output), input)
})
test('unsafe unescaped string', (t) => {
@@ -77,8 +77,8 @@ test('unsafe unescaped string', (t) => {
const stringify = build(schema)
const output = stringify(input)
t.equal(output, `"${input}"`)
t.throws(function () {
t.assert.equal(output, `"${input}"`)
t.assert.throws(function () {
JSON.parse(output)
})
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const validator = require('is-my-json-valid')
const build = require('..')
@@ -16,8 +16,8 @@ test('render a string with surrogate pairs as JSON:test 1', (t) => {
const stringify = build(schema)
const output = stringify('𝌆')
t.equal(output, '"𝌆"')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '"𝌆"')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a string with surrogate pairs as JSON: test 2', (t) => {
@@ -32,8 +32,8 @@ test('render a string with surrogate pairs as JSON: test 2', (t) => {
const stringify = build(schema)
const output = stringify('\uD834\uDF06')
t.equal(output, '"𝌆"')
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, '"𝌆"')
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a string with Unpaired surrogate code as JSON', (t) => {
@@ -47,8 +47,8 @@ test('render a string with Unpaired surrogate code as JSON', (t) => {
const validate = validator(schema)
const stringify = build(schema)
const output = stringify('\uDF06\uD834')
t.equal(output, JSON.stringify('\uDF06\uD834'))
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, JSON.stringify('\uDF06\uD834'))
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})
test('render a string with lone surrogate code as JSON', (t) => {
@@ -62,6 +62,6 @@ test('render a string with lone surrogate code as JSON', (t) => {
const validate = validator(schema)
const stringify = build(schema)
const output = stringify('\uDEAD')
t.equal(output, JSON.stringify('\uDEAD'))
t.ok(validate(JSON.parse(output)), 'valid schema')
t.assert.equal(output, JSON.stringify('\uDEAD'))
t.assert.ok(validate(JSON.parse(output)), 'valid schema')
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('use toJSON method on object types', (t) => {
@@ -22,7 +22,7 @@ test('use toJSON method on object types', (t) => {
}
}
t.equal('{"productName":"cola"}', stringify(object))
t.assert.equal('{"productName":"cola"}', stringify(object))
})
test('use toJSON method on nested object types', (t) => {
@@ -55,7 +55,7 @@ test('use toJSON method on nested object types', (t) => {
}
]
t.equal('[{"productName":"cola"},{"productName":"sprite"}]', stringify(array))
t.assert.equal('[{"productName":"cola"},{"productName":"sprite"}]', stringify(array))
})
test('not use toJSON if does not exist', (t) => {
@@ -79,7 +79,7 @@ test('not use toJSON if does not exist', (t) => {
product: { name: 'cola' }
}
t.equal('{"product":{"name":"cola"}}', stringify(object))
t.assert.equal('{"product":{"name":"cola"}}', stringify(object))
})
test('not fail on null object declared nullable', (t) => {
@@ -100,7 +100,7 @@ test('not fail on null object declared nullable', (t) => {
}
}
})
t.equal('null', stringify(null))
t.assert.equal('null', stringify(null))
})
test('not fail on null sub-object declared nullable', (t) => {
@@ -124,7 +124,7 @@ test('not fail on null sub-object declared nullable', (t) => {
const object = {
product: null
}
t.equal('{"product":null}', stringify(object))
t.assert.equal('{"product":null}', stringify(object))
})
test('on non nullable null sub-object it should coerce to {}', (t) => {
@@ -150,7 +150,7 @@ test('on non nullable null sub-object it should coerce to {}', (t) => {
}
const result = stringify(object)
t.equal(result, JSON.stringify({ product: {} }))
t.assert.equal(result, JSON.stringify({ product: {} }))
})
test('on non nullable null object it should coerce to {}', (t) => {
@@ -174,7 +174,7 @@ test('on non nullable null object it should coerce to {}', (t) => {
})
const result = stringify(null)
t.equal(result, '{}')
t.assert.equal(result, '{}')
})
test('on non-nullable null object it should skip rendering, skipping required fields checks', (t) => {
@@ -199,5 +199,5 @@ test('on non-nullable null object it should skip rendering, skipping required fi
})
const result = stringify(null)
t.equal(result, '{}')
t.assert.equal(result, '{}')
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('nested object in pattern properties for typebox', (t) => {
@@ -32,5 +32,5 @@ test('nested object in pattern properties for typebox', (t) => {
}
}
})
t.equal(value, '{"key1":{"nestedKey":{"nestedKey1":"value1"}},"key2":{"nestedKey":{"nestedKey1":"value2"}}}')
t.assert.equal(value, '{"key1":{"nestedKey":{"nestedKey1":"value1"}},"key2":{"nestedKey":{"nestedKey1":"value2"}}}')
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('possibly nullable integer primitive alternative', (t) => {
@@ -21,7 +21,7 @@ test('possibly nullable integer primitive alternative', (t) => {
const value = stringify({
data: 4
})
t.equal(value, '{"data":4}')
t.assert.equal(value, '{"data":4}')
})
test('possibly nullable number primitive alternative', (t) => {
@@ -42,7 +42,7 @@ test('possibly nullable number primitive alternative', (t) => {
const value = stringify({
data: 4
})
t.equal(value, '{"data":4}')
t.assert.equal(value, '{"data":4}')
})
test('possibly nullable integer primitive alternative with null value', (t) => {
@@ -63,7 +63,7 @@ test('possibly nullable integer primitive alternative with null value', (t) => {
const value = stringify({
data: null
})
t.equal(value, '{"data":0}')
t.assert.equal(value, '{"data":0}')
})
test('possibly nullable number primitive alternative with null value', (t) => {
@@ -84,7 +84,7 @@ test('possibly nullable number primitive alternative with null value', (t) => {
const value = stringify({
data: null
})
t.equal(value, '{"data":0}')
t.assert.equal(value, '{"data":0}')
})
test('possibly nullable number primitive alternative with null value', (t) => {
@@ -105,7 +105,7 @@ test('possibly nullable number primitive alternative with null value', (t) => {
const value = stringify({
data: null
})
t.equal(value, '{"data":false}')
t.assert.equal(value, '{"data":false}')
})
test('nullable integer primitive', (t) => {
@@ -126,7 +126,7 @@ test('nullable integer primitive', (t) => {
const value = stringify({
data: 4
})
t.equal(value, '{"data":4}')
t.assert.equal(value, '{"data":4}')
})
test('nullable number primitive', (t) => {
@@ -147,7 +147,7 @@ test('nullable number primitive', (t) => {
const value = stringify({
data: 4
})
t.equal(value, '{"data":4}')
t.assert.equal(value, '{"data":4}')
})
test('nullable primitive with null value', (t) => {
@@ -168,7 +168,7 @@ test('nullable primitive with null value', (t) => {
const value = stringify({
data: null
})
t.equal(value, '{"data":null}')
t.assert.equal(value, '{"data":null}')
})
test('nullable number primitive with null value', (t) => {
@@ -189,7 +189,7 @@ test('nullable number primitive with null value', (t) => {
const value = stringify({
data: null
})
t.equal(value, '{"data":null}')
t.assert.equal(value, '{"data":null}')
})
test('possibly null object with multi-type property', (t) => {
@@ -211,19 +211,19 @@ test('possibly null object with multi-type property', (t) => {
}
const stringify = build(schema)
t.equal(stringify({
t.assert.equal(stringify({
objectOrNull: {
stringOrNumber: 'string'
}
}), '{"objectOrNull":{"stringOrNumber":"string"}}')
t.equal(stringify({
t.assert.equal(stringify({
objectOrNull: {
stringOrNumber: 42
}
}), '{"objectOrNull":{"stringOrNumber":42}}')
t.equal(stringify({
t.assert.equal(stringify({
objectOrNull: null
}), '{"objectOrNull":null}')
})
@@ -249,7 +249,7 @@ test('object with possibly null array of multiple types', (t) => {
const value = stringify({
arrayOfStringsAndNumbers: null
})
t.equal(value, '{"arrayOfStringsAndNumbers":null}')
t.assert.equal(value, '{"arrayOfStringsAndNumbers":null}')
} catch (e) {
console.log(e)
t.fail()
@@ -259,21 +259,21 @@ test('object with possibly null array of multiple types', (t) => {
const value = stringify({
arrayOfStringsAndNumbers: ['string1', 'string2']
})
t.equal(value, '{"arrayOfStringsAndNumbers":["string1","string2"]}')
t.assert.equal(value, '{"arrayOfStringsAndNumbers":["string1","string2"]}')
} catch (e) {
console.log(e)
t.fail()
}
t.equal(stringify({
t.assert.equal(stringify({
arrayOfStringsAndNumbers: [42, 7]
}), '{"arrayOfStringsAndNumbers":[42,7]}')
t.equal(stringify({
t.assert.equal(stringify({
arrayOfStringsAndNumbers: ['string1', 42, 7, 'string2']
}), '{"arrayOfStringsAndNumbers":["string1",42,7,"string2"]}')
t.equal(stringify({
t.assert.equal(stringify({
arrayOfStringsAndNumbers: ['string1', null, 42, 7, 'string2', null]
}), '{"arrayOfStringsAndNumbers":["string1",null,42,7,"string2",null]}')
})
@@ -307,7 +307,7 @@ test('object with tuple of multiple types', (t) => {
const value = stringify({
fixedTupleOfStringsAndNumbers: ['string1', 42, 7]
})
t.equal(value, '{"fixedTupleOfStringsAndNumbers":["string1",42,7]}')
t.assert.equal(value, '{"fixedTupleOfStringsAndNumbers":["string1",42,7]}')
} catch (e) {
console.log(e)
t.fail()
@@ -317,7 +317,7 @@ test('object with tuple of multiple types', (t) => {
const value = stringify({
fixedTupleOfStringsAndNumbers: ['string1', 42, 'string2']
})
t.equal(value, '{"fixedTupleOfStringsAndNumbers":["string1",42,"string2"]}')
t.assert.equal(value, '{"fixedTupleOfStringsAndNumbers":["string1",42,"string2"]}')
} catch (e) {
console.log(e)
t.fail()
@@ -354,17 +354,17 @@ test('object with anyOf and multiple types', (t) => {
const value = stringify({
objectOrBoolean: { stringOrNumber: 'string' }
})
t.equal(value, '{"objectOrBoolean":{"stringOrNumber":"string"}}')
t.assert.equal(value, '{"objectOrBoolean":{"stringOrNumber":"string"}}')
} catch (e) {
console.log(e)
t.fail()
}
t.equal(stringify({
t.assert.equal(stringify({
objectOrBoolean: { stringOrNumber: 42 }
}), '{"objectOrBoolean":{"stringOrNumber":42}}')
t.equal(stringify({
t.assert.equal(stringify({
objectOrBoolean: true
}), '{"objectOrBoolean":true}')
})
@@ -383,7 +383,7 @@ test('string type array can handle dates', (t) => {
date: new Date('2018-04-20T07:52:31.017Z'),
dateObject: new Date('2018-04-21T07:52:31.017Z')
})
t.equal(value, '{"date":"2018-04-20T07:52:31.017Z","dateObject":"2018-04-21T07:52:31.017Z"}')
t.assert.equal(value, '{"date":"2018-04-20T07:52:31.017Z","dateObject":"2018-04-21T07:52:31.017Z"}')
})
test('object that is simultaneously a string and a json', (t) => {
@@ -406,10 +406,10 @@ test('object that is simultaneously a string and a json', (t) => {
const stringify = build(schema)
const valueStr = stringify({ simultaneously: likeObjectId })
t.equal(valueStr, '{"simultaneously":"hello"}')
t.assert.equal(valueStr, '{"simultaneously":"hello"}')
const valueObj = stringify({ simultaneously: { foo: likeObjectId } })
t.equal(valueObj, '{"simultaneously":{"foo":"hello"}}')
t.assert.equal(valueObj, '{"simultaneously":{"foo":"hello"}}')
})
test('object that is simultaneously a string and a json switched', (t) => {
@@ -432,10 +432,10 @@ test('object that is simultaneously a string and a json switched', (t) => {
const stringify = build(schema)
const valueStr = stringify({ simultaneously: likeObjectId })
t.equal(valueStr, '{"simultaneously":{}}')
t.assert.equal(valueStr, '{"simultaneously":{}}')
const valueObj = stringify({ simultaneously: { foo: likeObjectId } })
t.equal(valueObj, '{"simultaneously":{"foo":"hello"}}')
t.assert.equal(valueObj, '{"simultaneously":{"foo":"hello"}}')
})
test('class instance that is simultaneously a string and a json', (t) => {
@@ -461,10 +461,10 @@ test('class instance that is simultaneously a string and a json', (t) => {
const stringify = build(schema)
const valueStr = stringify({ simultaneously: likeObjectId })
t.equal(valueStr, '{"simultaneously":"hello"}')
t.assert.equal(valueStr, '{"simultaneously":"hello"}')
const valueObj = stringify({ simultaneously: { foo: likeObjectId } })
t.equal(valueObj, '{"simultaneously":{"foo":"hello"}}')
t.assert.equal(valueObj, '{"simultaneously":{"foo":"hello"}}')
})
test('should not throw an error when type is array and object is null, it should instead coerce to []', (t) => {
@@ -483,7 +483,7 @@ test('should not throw an error when type is array and object is null, it should
const stringify = build(schema)
const result = stringify({ arr: null })
t.equal(result, JSON.stringify({ arr: [] }))
t.assert.equal(result, JSON.stringify({ arr: [] }))
})
test('should throw an error when type is array and object is not an array', (t) => {
@@ -501,7 +501,7 @@ test('should throw an error when type is array and object is not an array', (t)
}
const stringify = build(schema)
t.throws(() => stringify({ arr: { foo: 'hello' } }), new TypeError('The value of \'#/properties/arr\' does not match schema definition.'))
t.assert.throws(() => stringify({ arr: { foo: 'hello' } }), new TypeError('The value of \'#/properties/arr\' does not match schema definition.'))
})
test('should throw an error when type is array and object is not an array with external schema', (t) => {
@@ -529,7 +529,7 @@ test('should throw an error when type is array and object is not an array with e
}
const stringify = build(schema, { schema: externalSchema })
t.throws(() => stringify({ arr: { foo: 'hello' } }), new TypeError('The value of \'arrayOfNumbers#/definitions/arr\' does not match schema definition.'))
t.assert.throws(() => stringify({ arr: { foo: 'hello' } }), new TypeError('The value of \'arrayOfNumbers#/definitions/arr\' does not match schema definition.'))
})
test('throw an error if none of types matches', (t) => {
@@ -546,5 +546,5 @@ test('throw an error if none of types matches', (t) => {
}
const stringify = build(schema)
t.throws(() => stringify({ data: 'string' }), 'The value "string" does not match schema definition.')
t.assert.throws(() => stringify({ data: 'string' }), 'The value "string" does not match schema definition.')
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const build = require('..')
test('object with custom format field', (t) => {
@@ -19,9 +19,9 @@ test('object with custom format field', (t) => {
const stringify = build(schema)
stringify({
str: 'string'
t.assert.doesNotThrow(() => {
stringify({
str: 'string'
})
})
t.pass()
})

View File

@@ -1,6 +1,6 @@
'use strict'
const test = require('tap').test
const { test } = require('node:test')
const webpack = require('webpack')
const path = require('path')
@@ -46,5 +46,5 @@ test('the library should work with webpack', async (t) => {
})
const obj = { foo: '42', bar: true }
t.equal(stringify(obj), '{"foo":"42","bar":true}')
t.assert.equal(stringify(obj), '{"foo":"42","bar":true}')
})

View File

@@ -1,4 +1,4 @@
import Ajv, { Options as AjvOptions } from "ajv"
import Ajv, { Options as AjvOptions } from 'ajv'
type Build = typeof build
@@ -65,33 +65,33 @@ declare namespace build {
*/
$ref: string;
}
export interface AnySchema extends BaseSchema {
}
export interface StringSchema extends BaseSchema {
type: "string";
type: 'string';
format?: string;
}
export interface IntegerSchema extends BaseSchema {
type: "integer";
type: 'integer';
}
export interface NumberSchema extends BaseSchema {
type: "number";
type: 'number';
}
export interface NullSchema extends BaseSchema {
type: "null";
type: 'null';
}
export interface BooleanSchema extends BaseSchema {
type: "boolean";
type: 'boolean';
}
export interface ArraySchema extends BaseSchema {
type: "array";
type: 'array';
/**
* The schema for the items in the array
*/
@@ -99,7 +99,7 @@ declare namespace build {
}
export interface TupleSchema extends BaseSchema {
type: "array";
type: 'array';
/**
* The schemas for the items in the tuple
*/
@@ -115,7 +115,7 @@ declare namespace build {
}
export interface ObjectSchema extends BaseSchema {
type: "object";
type: 'object';
/**
* Describe the properties of the object
*/
@@ -145,7 +145,7 @@ declare namespace build {
| BooleanSchema
| ArraySchema
| TupleSchema
| ObjectSchema;
| ObjectSchema
export interface Options {
/**
@@ -158,12 +158,12 @@ declare namespace build {
ajv?: AjvOptions
/**
* Optionally configure how the integer will be rounded
*
*
* @default 'trunc'
*/
rounding?: 'ceil' | 'floor' | 'round' | 'trunc'
/**
* @deprecated
* @deprecated
* Enable debug mode. Please use `mode: "debug"` instead
*/
debugMode?: boolean
@@ -190,7 +190,7 @@ declare namespace build {
}
export const validLargeArrayMechanisms: string[]
export function restore (value: <TDoc extends object = object>(doc: TDoc) => string): ReturnType<Build>
export function restore (value: <TDoc extends object = object>(doc: TDoc) => string): ReturnType<Build>
export const build: Build
export { build as default }
@@ -208,24 +208,24 @@ interface StandaloneOption extends build.Options {
mode: 'standalone'
}
type StringCoercible = string | Date | RegExp;
type IntegerCoercible = number | BigInt;
type StringCoercible = string | Date | RegExp
type IntegerCoercible = number | BigInt
/**
* Build a stringify function using a schema of the documents that should be stringified
* @param schema The schema used to stringify values
* @param options The options to use (optional)
*/
declare function build(schema: build.AnySchema, options: DebugOption): { code: string, ajv: Ajv };
declare function build(schema: build.AnySchema, options: DeprecateDebugOption): { code: string, ajv: Ajv };
declare function build(schema: build.AnySchema, options: StandaloneOption): string;
declare function build(schema: build.AnySchema, options?: build.Options): <TDoc = any>(doc: TDoc) => any;
declare function build(schema: build.StringSchema, options?: build.Options): <TDoc extends StringCoercible = StringCoercible>(doc: TDoc) => string;
declare function build(schema: build.IntegerSchema | build.NumberSchema, options?: build.Options): <TDoc extends IntegerCoercible = IntegerCoercible>(doc: TDoc) => string;
declare function build(schema: build.NullSchema, options?: build.Options): <TDoc extends null = null>(doc: TDoc) => "null";
declare function build(schema: build.BooleanSchema, options?: build.Options): <TDoc extends boolean = boolean>(doc: TDoc) => string;
declare function build(schema: build.ArraySchema | build.TupleSchema, options?: build.Options): <TDoc extends any[]= any[]>(doc: TDoc) => string;
declare function build(schema: build.ObjectSchema, options?: build.Options): <TDoc extends object = object>(doc: TDoc) => string;
declare function build(schema: build.Schema, options?: build.Options): <TDoc = object | any[] | string | number | boolean | null> (doc: TDoc) => string;
declare function build (schema: build.AnySchema, options: DebugOption): { code: string, ajv: Ajv }
declare function build (schema: build.AnySchema, options: DeprecateDebugOption): { code: string, ajv: Ajv }
declare function build (schema: build.AnySchema, options: StandaloneOption): string
declare function build (schema: build.AnySchema, options?: build.Options): <TDoc = any>(doc: TDoc) => any
declare function build (schema: build.StringSchema, options?: build.Options): <TDoc extends StringCoercible = StringCoercible>(doc: TDoc) => string
declare function build (schema: build.IntegerSchema | build.NumberSchema, options?: build.Options): <TDoc extends IntegerCoercible = IntegerCoercible>(doc: TDoc) => string
declare function build (schema: build.NullSchema, options?: build.Options): <TDoc extends null = null>(doc: TDoc) => 'null'
declare function build (schema: build.BooleanSchema, options?: build.Options): <TDoc extends boolean = boolean>(doc: TDoc) => string
declare function build (schema: build.ArraySchema | build.TupleSchema, options?: build.Options): <TDoc extends any[]= any[]>(doc: TDoc) => string
declare function build (schema: build.ObjectSchema, options?: build.Options): <TDoc extends object = object>(doc: TDoc) => string
declare function build (schema: build.Schema, options?: build.Options): <TDoc = object | any[] | string | number | boolean | null> (doc: TDoc) => string
export = build;
export = build

View File

@@ -1,32 +1,33 @@
// eslint-disable-next-line @typescript-eslint/no-unused-vars -- Test using this disabled, see https://github.com/fastify/fast-json-stringify/pull/683
import Ajv from 'ajv'
import build, { restore, Schema, validLargeArrayMechanisms } from '..'
import { expectError, expectType } from 'tsd'
// Number schemas
build({
type: 'number'
type: 'number'
})(25)
build({
type: 'integer'
type: 'integer'
})(-5)
build({
type: 'integer'
type: 'integer'
})(5n)
build({
type: 'number'
type: 'number'
}, { rounding: 'ceil' })
build({
type: 'number'
type: 'number'
}, { rounding: 'floor' })
build({
type: 'number'
type: 'number'
}, { rounding: 'round' })
build({
type: 'number'
type: 'number'
}, { rounding: 'trunc' })
expectError(build({
type: 'number'
type: 'number'
}, { rounding: 'invalid' }))
// String schema
@@ -36,55 +37,55 @@ build({
// Boolean schema
build({
type: 'boolean'
type: 'boolean'
})(true)
// Null schema
build({
type: 'null'
type: 'null'
})(null)
// Array schemas
build({
type: 'array',
items: { type: 'number' }
type: 'array',
items: { type: 'number' }
})([25])
build({
type: 'array',
items: [{ type: 'string'}, {type: 'integer'}]
type: 'array',
items: [{ type: 'string' }, { type: 'integer' }]
})(['hello', 42])
// Object schemas
build({
type: 'object'
type: 'object'
})({})
build({
type: 'object',
properties: {
foo: { type: 'string' },
bar: { type: 'integer' }
},
required: ['foo'],
patternProperties: {
'baz*': { type: 'null' }
},
additionalProperties: {
type: 'boolean'
}
type: 'object',
properties: {
foo: { type: 'string' },
bar: { type: 'integer' }
},
required: ['foo'],
patternProperties: {
'baz*': { type: 'null' }
},
additionalProperties: {
type: 'boolean'
}
})({ foo: 'bar' })
build({
type: 'object',
properties: {
foo: { type: 'string' },
bar: { type: 'integer' }
},
required: ['foo'],
patternProperties: {
'baz*': { type: 'null' }
},
additionalProperties: {
type: 'boolean'
}
type: 'object',
properties: {
foo: { type: 'string' },
bar: { type: 'integer' }
},
required: ['foo'],
patternProperties: {
'baz*': { type: 'null' }
},
additionalProperties: {
type: 'boolean'
}
}, { rounding: 'floor' })({ foo: 'bar' })
// Reference schemas
@@ -113,7 +114,7 @@ build({
}
},
patternProperties: {
'num': {
num: {
$ref: '#/definitions/num'
}
},
@@ -159,7 +160,7 @@ build({
})(new Date())
/*
This overload doesn't work yet -
This overload doesn't work yet -
TypeScript chooses the generic for the schema
before it chooses the overload for the options
parameter.
@@ -207,52 +208,52 @@ interface InferenceSchema {
}
const stringify3 = build({
type: "object",
properties: { a: { type: "string" } },
});
stringify3<InferenceSchema>({ id: "123" });
stringify3<InferenceSchema>({ a: 123, id: "123" });
expectError(stringify3<InferenceSchema>({ anotherOne: "bar" }));
expectError(stringify3<Schema>({ a: "bar" }));
type: 'object',
properties: { a: { type: 'string' } },
})
stringify3<InferenceSchema>({ id: '123' })
stringify3<InferenceSchema>({ a: 123, id: '123' })
expectError(stringify3<InferenceSchema>({ anotherOne: 'bar' }))
expectError(stringify3<Schema>({ a: 'bar' }))
// Without inference
const stringify4 = build({
type: "object",
properties: { a: { type: "string" } },
});
stringify4({ id: "123" });
stringify4({ a: 123, id: "123" });
stringify4({ anotherOne: "bar" });
stringify4({ a: "bar" });
type: 'object',
properties: { a: { type: 'string' } },
})
stringify4({ id: '123' })
stringify4({ a: 123, id: '123' })
stringify4({ anotherOne: 'bar' })
stringify4({ a: 'bar' })
// Without inference - string type
const stringify5 = build({
type: "string",
});
stringify5("foo");
expectError(stringify5({ id: "123" }));
type: 'string',
})
stringify5('foo')
expectError(stringify5({ id: '123' }))
// Without inference - null type
const stringify6 = build({
type: "null",
});
stringify6(null);
expectError(stringify6("a string"));
type: 'null',
})
stringify6(null)
expectError(stringify6('a string'))
// Without inference - boolean type
const stringify7 = build({
type: "boolean",
});
stringify7(true);
expectError(stringify7("a string"));
type: 'boolean',
})
stringify7(true)
expectError(stringify7('a string'))
// largeArrayMechanism
build({}, { largeArrayMechanism: 'json-stringify'} )
build({}, { largeArrayMechanism: 'default'} )
expectError(build({} as Schema, { largeArrayMechanism: 'invalid'} ))
build({}, { largeArrayMechanism: 'json-stringify' })
build({}, { largeArrayMechanism: 'default' })
expectError(build({} as Schema, { largeArrayMechanism: 'invalid' }))
build({}, { largeArraySize: 2000 } )
build({}, { largeArraySize: '2e4' } )
build({}, { largeArraySize: 2n } )
expectError(build({} as Schema, { largeArraySize: ['asdf']} ))
build({}, { largeArraySize: 2000 })
build({}, { largeArraySize: '2e4' })
build({}, { largeArraySize: 2n })
expectError(build({} as Schema, { largeArraySize: ['asdf'] }))