Buckets:
ktongue/docker_container / .cache /opencode /node_modules /@fastify /merge-json-schemas /test /enum.test.js
| const assert = require('node:assert/strict') | |
| const { test } = require('node:test') | |
| const { mergeSchemas } = require('../index') | |
| const { defaultResolver } = require('./utils') | |
| test('should merge empty schema and string enum values', () => { | |
| const schema1 = { type: 'string' } | |
| const schema2 = { type: 'string', enum: ['foo', 'bar'] } | |
| const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver }) | |
| assert.deepStrictEqual(mergedSchema, { type: 'string', enum: ['foo', 'bar'] }) | |
| }) | |
| test('should merge equal string enum values', () => { | |
| const schema1 = { type: 'string', enum: ['foo', 'bar'] } | |
| const schema2 = { type: 'string', enum: ['foo', 'bar'] } | |
| const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver }) | |
| assert.deepStrictEqual(mergedSchema, { type: 'string', enum: ['foo', 'bar'] }) | |
| }) | |
| test('should merge different string enum values', () => { | |
| const schema1 = { type: 'string', enum: ['foo', 'bar'] } | |
| const schema2 = { type: 'string', enum: ['foo', 'baz'] } | |
| const mergedSchema = mergeSchemas([schema1, schema2], { defaultResolver }) | |
| assert.deepStrictEqual(mergedSchema, { type: 'string', enum: ['foo'] }) | |
| }) | |
| test('should throw an error if can not merge enum values', () => { | |
| const schema1 = { type: 'string', enum: ['foo', 'bar'] } | |
| const schema2 = { type: 'string', enum: ['baz', 'qux'] } | |
| assert.throws(() => { | |
| mergeSchemas([schema1, schema2], { defaultResolver }) | |
| }, { | |
| name: 'JsonSchemaMergeError', | |
| code: 'JSON_SCHEMA_MERGE_ERROR', | |
| message: 'Failed to merge "enum" keyword schemas.', | |
| schemas: [['foo', 'bar'], ['baz', 'qux']] | |
| }) | |
| }) | |
Xet Storage Details
- Size:
- 1.64 kB
- Xet hash:
- f90ce184a1061be1229ffd1b7a9ea317d4b679e6d7d4212efa109928d79ed994
·
Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.