Buckets:
| ; | |
| Object.defineProperty(exports, "__esModule", { value: true }); | |
| exports.resolveSchema = exports.getCompilingSchema = exports.resolveRef = exports.compileSchema = exports.SchemaEnv = void 0; | |
| const codegen_1 = require("./codegen"); | |
| const validation_error_1 = require("../runtime/validation_error"); | |
| const names_1 = require("./names"); | |
| const resolve_1 = require("./resolve"); | |
| const util_1 = require("./util"); | |
| const validate_1 = require("./validate"); | |
| class SchemaEnv { | |
| constructor(env) { | |
| var _a; | |
| this.refs = {}; | |
| this.dynamicAnchors = {}; | |
| let schema; | |
| if (typeof env.schema == "object") | |
| schema = env.schema; | |
| this.schema = env.schema; | |
| this.schemaId = env.schemaId; | |
| this.root = env.root || this; | |
| this.baseId = (_a = env.baseId) !== null && _a !== void 0 ? _a : (0, resolve_1.normalizeId)(schema === null || schema === void 0 ? void 0 : schema[env.schemaId || "$id"]); | |
| this.schemaPath = env.schemaPath; | |
| this.localRefs = env.localRefs; | |
| this.meta = env.meta; | |
| this.$async = schema === null || schema === void 0 ? void 0 : schema.$async; | |
| this.refs = {}; | |
| } | |
| } | |
| exports.SchemaEnv = SchemaEnv; | |
| // let codeSize = 0 | |
| // let nodeCount = 0 | |
| // Compiles schema in SchemaEnv | |
| function compileSchema(sch) { | |
| // TODO refactor - remove compilations | |
| const _sch = getCompilingSchema.call(this, sch); | |
| if (_sch) | |
| return _sch; | |
| const rootId = (0, resolve_1.getFullPath)(this.opts.uriResolver, sch.root.baseId); // TODO if getFullPath removed 1 tests fails | |
| const { es5, lines } = this.opts.code; | |
| const { ownProperties } = this.opts; | |
| const gen = new codegen_1.CodeGen(this.scope, { es5, lines, ownProperties }); | |
| let _ValidationError; | |
| if (sch.$async) { | |
| _ValidationError = gen.scopeValue("Error", { | |
| ref: validation_error_1.default, | |
| code: (0, codegen_1._) `require("ajv/dist/runtime/validation_error").default`, | |
| }); | |
| } | |
| const validateName = gen.scopeName("validate"); | |
| sch.validateName = validateName; | |
| const schemaCxt = { | |
| gen, | |
| allErrors: this.opts.allErrors, | |
| data: names_1.default.data, | |
| parentData: names_1.default.parentData, | |
| parentDataProperty: names_1.default.parentDataProperty, | |
| dataNames: [names_1.default.data], | |
| dataPathArr: [codegen_1.nil], // TODO can its length be used as dataLevel if nil is removed? | |
| dataLevel: 0, | |
| dataTypes: [], | |
| definedProperties: new Set(), | |
| topSchemaRef: gen.scopeValue("schema", this.opts.code.source === true | |
| ? { ref: sch.schema, code: (0, codegen_1.stringify)(sch.schema) } | |
| : { ref: sch.schema }), | |
| validateName, | |
| ValidationError: _ValidationError, | |
| schema: sch.schema, | |
| schemaEnv: sch, | |
| rootId, | |
| baseId: sch.baseId || rootId, | |
| schemaPath: codegen_1.nil, | |
| errSchemaPath: sch.schemaPath || (this.opts.jtd ? "" : "#"), | |
| errorPath: (0, codegen_1._) `""`, | |
| opts: this.opts, | |
| self: this, | |
| }; | |
| let sourceCode; | |
| try { | |
| this._compilations.add(sch); | |
| (0, validate_1.validateFunctionCode)(schemaCxt); | |
| gen.optimize(this.opts.code.optimize); | |
| // gen.optimize(1) | |
| const validateCode = gen.toString(); | |
| sourceCode = `${gen.scopeRefs(names_1.default.scope)}return ${validateCode}`; | |
| // console.log((codeSize += sourceCode.length), (nodeCount += gen.nodeCount)) | |
| if (this.opts.code.process) | |
| sourceCode = this.opts.code.process(sourceCode, sch); | |
| // console.log("\n\n\n *** \n", sourceCode) | |
| const makeValidate = new Function(`${names_1.default.self}`, `${names_1.default.scope}`, sourceCode); | |
| const validate = makeValidate(this, this.scope.get()); | |
| this.scope.value(validateName, { ref: validate }); | |
| validate.errors = null; | |
| validate.schema = sch.schema; | |
| validate.schemaEnv = sch; | |
| if (sch.$async) | |
| validate.$async = true; | |
| if (this.opts.code.source === true) { | |
| validate.source = { validateName, validateCode, scopeValues: gen._values }; | |
| } | |
| if (this.opts.unevaluated) { | |
| const { props, items } = schemaCxt; | |
| validate.evaluated = { | |
| props: props instanceof codegen_1.Name ? undefined : props, | |
| items: items instanceof codegen_1.Name ? undefined : items, | |
| dynamicProps: props instanceof codegen_1.Name, | |
| dynamicItems: items instanceof codegen_1.Name, | |
| }; | |
| if (validate.source) | |
| validate.source.evaluated = (0, codegen_1.stringify)(validate.evaluated); | |
| } | |
| sch.validate = validate; | |
| return sch; | |
| } | |
| catch (e) { | |
| delete sch.validate; | |
| delete sch.validateName; | |
| if (sourceCode) | |
| this.logger.error("Error compiling schema, function code:", sourceCode); | |
| // console.log("\n\n\n *** \n", sourceCode, this.opts) | |
| throw e; | |
| } | |
| finally { | |
| this._compilations.delete(sch); | |
| } | |
| } | |
| exports.compileSchema = compileSchema; | |
| function resolveRef(root, baseId, ref) { | |
| var _a; | |
| ref = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, ref); | |
| const schOrFunc = root.refs[ref]; | |
| if (schOrFunc) | |
| return schOrFunc; | |
| let _sch = resolve.call(this, root, ref); | |
| if (_sch === undefined) { | |
| const schema = (_a = root.localRefs) === null || _a === void 0 ? void 0 : _a[ref]; // TODO maybe localRefs should hold SchemaEnv | |
| const { schemaId } = this.opts; | |
| if (schema) | |
| _sch = new SchemaEnv({ schema, schemaId, root, baseId }); | |
| } | |
| if (_sch === undefined) | |
| return; | |
| return (root.refs[ref] = inlineOrCompile.call(this, _sch)); | |
| } | |
| exports.resolveRef = resolveRef; | |
| function inlineOrCompile(sch) { | |
| if ((0, resolve_1.inlineRef)(sch.schema, this.opts.inlineRefs)) | |
| return sch.schema; | |
| return sch.validate ? sch : compileSchema.call(this, sch); | |
| } | |
| // Index of schema compilation in the currently compiled list | |
| function getCompilingSchema(schEnv) { | |
| for (const sch of this._compilations) { | |
| if (sameSchemaEnv(sch, schEnv)) | |
| return sch; | |
| } | |
| } | |
| exports.getCompilingSchema = getCompilingSchema; | |
| function sameSchemaEnv(s1, s2) { | |
| return s1.schema === s2.schema && s1.root === s2.root && s1.baseId === s2.baseId; | |
| } | |
| // resolve and compile the references ($ref) | |
| // TODO returns AnySchemaObject (if the schema can be inlined) or validation function | |
| function resolve(root, // information about the root schema for the current schema | |
| ref // reference to resolve | |
| ) { | |
| let sch; | |
| while (typeof (sch = this.refs[ref]) == "string") | |
| ref = sch; | |
| return sch || this.schemas[ref] || resolveSchema.call(this, root, ref); | |
| } | |
| // Resolve schema, its root and baseId | |
| function resolveSchema(root, // root object with properties schema, refs TODO below SchemaEnv is assigned to it | |
| ref // reference to resolve | |
| ) { | |
| const p = this.opts.uriResolver.parse(ref); | |
| const refPath = (0, resolve_1._getFullPath)(this.opts.uriResolver, p); | |
| let baseId = (0, resolve_1.getFullPath)(this.opts.uriResolver, root.baseId, undefined); | |
| // TODO `Object.keys(root.schema).length > 0` should not be needed - but removing breaks 2 tests | |
| if (Object.keys(root.schema).length > 0 && refPath === baseId) { | |
| return getJsonPointer.call(this, p, root); | |
| } | |
| const id = (0, resolve_1.normalizeId)(refPath); | |
| const schOrRef = this.refs[id] || this.schemas[id]; | |
| if (typeof schOrRef == "string") { | |
| const sch = resolveSchema.call(this, root, schOrRef); | |
| if (typeof (sch === null || sch === void 0 ? void 0 : sch.schema) !== "object") | |
| return; | |
| return getJsonPointer.call(this, p, sch); | |
| } | |
| if (typeof (schOrRef === null || schOrRef === void 0 ? void 0 : schOrRef.schema) !== "object") | |
| return; | |
| if (!schOrRef.validate) | |
| compileSchema.call(this, schOrRef); | |
| if (id === (0, resolve_1.normalizeId)(ref)) { | |
| const { schema } = schOrRef; | |
| const { schemaId } = this.opts; | |
| const schId = schema[schemaId]; | |
| if (schId) | |
| baseId = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schId); | |
| return new SchemaEnv({ schema, schemaId, root, baseId }); | |
| } | |
| return getJsonPointer.call(this, p, schOrRef); | |
| } | |
| exports.resolveSchema = resolveSchema; | |
| const PREVENT_SCOPE_CHANGE = new Set([ | |
| "properties", | |
| "patternProperties", | |
| "enum", | |
| "dependencies", | |
| "definitions", | |
| ]); | |
| function getJsonPointer(parsedRef, { baseId, schema, root }) { | |
| var _a; | |
| if (((_a = parsedRef.fragment) === null || _a === void 0 ? void 0 : _a[0]) !== "/") | |
| return; | |
| for (const part of parsedRef.fragment.slice(1).split("/")) { | |
| if (typeof schema === "boolean") | |
| return; | |
| const partSchema = schema[(0, util_1.unescapeFragment)(part)]; | |
| if (partSchema === undefined) | |
| return; | |
| schema = partSchema; | |
| // TODO PREVENT_SCOPE_CHANGE could be defined in keyword def? | |
| const schId = typeof schema === "object" && schema[this.opts.schemaId]; | |
| if (!PREVENT_SCOPE_CHANGE.has(part) && schId) { | |
| baseId = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schId); | |
| } | |
| } | |
| let env; | |
| if (typeof schema != "boolean" && schema.$ref && !(0, util_1.schemaHasRulesButRef)(schema, this.RULES)) { | |
| const $ref = (0, resolve_1.resolveUrl)(this.opts.uriResolver, baseId, schema.$ref); | |
| env = resolveSchema.call(this, root, $ref); | |
| } | |
| // even though resolution failed we need to return SchemaEnv to throw exception | |
| // so that compileAsync loads missing schema. | |
| const { schemaId } = this.opts; | |
| env = env || new SchemaEnv({ schema, schemaId, root, baseId }); | |
| if (env.schema !== env.root.schema) | |
| return env; | |
| return undefined; | |
| } | |
| //# sourceMappingURL=index.js.map |
Xet Storage Details
- Size:
- 10 kB
- Xet hash:
- 9fd22f4eb78df19478a099db9fc235d105322a31394d4392aceb7182ac30c28f
·
Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.