text
stringlengths 3
1.05M
|
|---|
// Karma configuration file, see link for more information
// https://karma-runner.github.io/1.0/config/configuration-file.html
module.exports = function (config) {
config.set({
basePath: '',
frameworks: ['jasmine', '@angular-devkit/build-angular'],
plugins: [
require('karma-jasmine'),
require('karma-chrome-launcher'),
require('karma-jasmine-html-reporter'),
require('karma-coverage-istanbul-reporter'),
require('@angular-devkit/build-angular/plugins/karma')
],
client: {
clearContext: false // leave Jasmine Spec Runner output visible in browser
},
coverageIstanbulReporter: {
dir: require('path').join(__dirname, './coverage/UI'),
reports: ['html', 'lcovonly', 'text-summary'],
fixWebpackSourcePaths: true
},
reporters: ['progress', 'kjhtml'],
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ['Chrome'],
singleRun: false,
restartOnFileChange: true
});
};
|
from keras.layers import Input, Reshape, Dropout, Dense, Flatten, BatchNormalization, Activation, ZeroPadding2D
from keras.layers.advanced_activations import LeakyReLU
from keras.layers.convolutional import UpSampling2D, Conv2D
from keras.models import Sequential, Model, load_model
from keras.optimizers import Adam
import numpy as np
from PIL import Image
import os
# Preview image Frame
PREVIEW_ROWS = 4
PREVIEW_COLS = 7
PREVIEW_MARGIN = 4
SAVE_FREQ = 100
# Size vector to generate images from
NOISE_SIZE = 100
# Configuration
EPOCHS = 10000 # number of iterations
BATCH_SIZE = 32
GENERATE_RES = 3
IMAGE_SIZE = 128 # rows/cols
IMAGE_CHANNELS = 3
training_data = np.load('cubism_data.npy')
def build_discriminator(image_shape):
model = Sequential()
model.add(Conv2D(32, kernel_size=3, strides=2,
input_shape=image_shape, padding='same'))
model.add(LeakyReLU(alpha=0.2))
model.add(Dropout(0.25))
model.add(Conv2D(64, kernel_size=3, strides=2, padding='same'))
model.add(ZeroPadding2D(padding=((0, 1), (0, 1))))
model.add(BatchNormalization(momentum=0.8))
model.add(LeakyReLU(alpha=0.2))
model.add(Dropout(0.25))
model.add(Conv2D(128, kernel_size=3, strides=2, padding='same'))
model.add(BatchNormalization(momentum=0.8))
model.add(LeakyReLU(alpha=0.2))
model.add(Dropout(0.25))
model.add(Conv2D(256, kernel_size=3, strides=1, padding='same'))
model.add(BatchNormalization(momentum=0.8))
model.add(LeakyReLU(alpha=0.2))
model.add(Dropout(0.25))
model.add(Conv2D(512, kernel_size=3, strides=1, padding='same'))
model.add(BatchNormalization(momentum=0.8))
model.add(LeakyReLU(alpha=0.2))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(1, activation='sigmoid'))
input_image = Input(shape=image_shape)
validity = model(input_image)
return Model(input_image, validity)
def build_generator(noise_size, channels):
model = Sequential()
model.add(Dense(4 * 4 * 256, activation="relu", input_dim=noise_size))
model.add(Reshape((4, 4, 256)))
model.add(UpSampling2D())
model.add(Conv2D(256, kernel_size=3, padding="same"))
model.add(BatchNormalization(momentum=0.8))
model.add(Activation("relu"))
model.add(UpSampling2D())
model.add(Conv2D(256, kernel_size=3, padding="same"))
model.add(BatchNormalization(momentum=0.8))
model.add(Activation("relu"))
for i in range(GENERATE_RES):
model.add(UpSampling2D())
model.add(Conv2D(256, kernel_size=3, padding="same"))
model.add(BatchNormalization(momentum=0.8))
model.add(Activation("relu"))
model.summary()
model.add(Conv2D(channels, kernel_size=3, padding="same"))
model.add(Activation("tanh"))
input = Input(shape=(noise_size,))
generated_image = model(input)
return Model(input, generated_image)
def save_images(cnt, noise):
image_array = np.full((
PREVIEW_MARGIN + (PREVIEW_ROWS * (IMAGE_SIZE + PREVIEW_MARGIN)),
PREVIEW_MARGIN + (PREVIEW_COLS * (IMAGE_SIZE + PREVIEW_MARGIN)), 3),
255, dtype=np.uint8)
generated_images = generator.predict(noise)
generated_images = 0.5 * generated_images + 0.5
image_count = 0
for row in range(PREVIEW_ROWS):
for col in range(PREVIEW_COLS):
r = row * (IMAGE_SIZE + PREVIEW_MARGIN) + PREVIEW_MARGIN
c = col * (IMAGE_SIZE + PREVIEW_MARGIN) + PREVIEW_MARGIN
image_array[r:r + IMAGE_SIZE, c:c +
IMAGE_SIZE] = generated_images[image_count] * 255
image_count += 1
output_path = 'output'
if not os.path.exists(output_path):
os.makedirs(output_path)
filename = os.path.join(output_path, f"trained-{cnt}.png")
im = Image.fromarray(image_array)
im.save(filename)
image_shape = (IMAGE_SIZE, IMAGE_SIZE, IMAGE_CHANNELS)
optimizer = Adam(1.5e-4, 0.5)
discriminator = build_discriminator(image_shape)
discriminator.compile(loss="binary_crossentropy",
optimizer=optimizer, metrics=["accuracy"])
generator = build_generator(NOISE_SIZE, IMAGE_CHANNELS)
random_input = Input(shape=(NOISE_SIZE,))
generated_image = generator(random_input)
discriminator.trainable = False
validity = discriminator(generated_image)
combined = Model(random_input, validity)
combined.compile(loss="binary_crossentropy",
optimizer=optimizer, metrics=["accuracy"])
y_real = np.ones((BATCH_SIZE, 1))
y_fake = np.zeros((BATCH_SIZE, 1))
fixed_noise = np.random.normal(0, 1, (PREVIEW_ROWS * PREVIEW_COLS, NOISE_SIZE))
cnt = 1
for epoch in range(EPOCHS):
idx = np.random.randint(0, training_data.shape[0], BATCH_SIZE)
x_real = training_data[idx]
noise= np.random.normal(0, 1, (BATCH_SIZE, NOISE_SIZE))
x_fake = generator.predict(noise)
discriminator_metric_real = discriminator.train_on_batch(x_real, y_real)
discriminator_metric_generated = discriminator.train_on_batch(
x_fake, y_fake)
discriminator_metric = 0.5 * np.add(discriminator_metric_real, discriminator_metric_generated)
generator_metric = combined.train_on_batch(noise, y_real)
if epoch % SAVE_FREQ == 0:
save_images(cnt, fixed_noise)
cnt += 1
print(f"{epoch} epoch, Discriminator accuracy: {100* discriminator_metric[1]}, Generator accuracy: {100 * generator_metric[1]}")
|
module.exports =
/******/ (function(modules, runtime) { // webpackBootstrap
/******/ "use strict";
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId]) {
/******/ return installedModules[moduleId].exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ var threw = true;
/******/ try {
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ threw = false;
/******/ } finally {
/******/ if(threw) delete installedModules[moduleId];
/******/ }
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ __webpack_require__.ab = __dirname + "/";
/******/
/******/ // the startup function
/******/ function startup() {
/******/ // Load entry module and return exports
/******/ return __webpack_require__(526);
/******/ };
/******/
/******/ // run startup
/******/ return startup();
/******/ })
/************************************************************************/
/******/ ({
/***/ 11:
/***/ (function(module) {
// Returns a wrapper function that returns a wrapped callback
// The wrapper function should do some stuff, and return a
// presumably different callback function.
// This makes sure that own properties are retained, so that
// decorations and such are not lost along the way.
module.exports = wrappy
function wrappy (fn, cb) {
if (fn && cb) return wrappy(fn)(cb)
if (typeof fn !== 'function')
throw new TypeError('need wrapper function')
Object.keys(fn).forEach(function (k) {
wrapper[k] = fn[k]
})
return wrapper
function wrapper() {
var args = new Array(arguments.length)
for (var i = 0; i < args.length; i++) {
args[i] = arguments[i]
}
var ret = fn.apply(this, args)
var cb = args[args.length-1]
if (typeof ret === 'function' && ret !== cb) {
Object.keys(cb).forEach(function (k) {
ret[k] = cb[k]
})
}
return ret
}
}
/***/ }),
/***/ 16:
/***/ (function(module) {
module.exports = require("tls");
/***/ }),
/***/ 18:
/***/ (function(module) {
module.exports = eval("require")("encoding");
/***/ }),
/***/ 49:
/***/ (function(module, __unusedexports, __webpack_require__) {
var wrappy = __webpack_require__(11)
module.exports = wrappy(once)
module.exports.strict = wrappy(onceStrict)
once.proto = once(function () {
Object.defineProperty(Function.prototype, 'once', {
value: function () {
return once(this)
},
configurable: true
})
Object.defineProperty(Function.prototype, 'onceStrict', {
value: function () {
return onceStrict(this)
},
configurable: true
})
})
function once (fn) {
var f = function () {
if (f.called) return f.value
f.called = true
return f.value = fn.apply(this, arguments)
}
f.called = false
return f
}
function onceStrict (fn) {
var f = function () {
if (f.called)
throw new Error(f.onceError)
f.called = true
return f.value = fn.apply(this, arguments)
}
var name = fn.name || 'Function wrapped with `once`'
f.onceError = name + " shouldn't be called more than once"
f.called = false
return f
}
/***/ }),
/***/ 82:
/***/ (function(__unusedmodule, exports) {
"use strict";
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object.defineProperty(exports, "__esModule", { value: true });
exports.toCommandProperties = exports.toCommandValue = void 0;
/**
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @param input input to sanitize into a string
*/
function toCommandValue(input) {
if (input === null || input === undefined) {
return '';
}
else if (typeof input === 'string' || input instanceof String) {
return input;
}
return JSON.stringify(input);
}
exports.toCommandValue = toCommandValue;
/**
*
* @param annotationProperties
* @returns The command properties to send with the actual annotation command
* See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
*/
function toCommandProperties(annotationProperties) {
if (!Object.keys(annotationProperties).length) {
return {};
}
return {
title: annotationProperties.title,
file: annotationProperties.file,
line: annotationProperties.startLine,
endLine: annotationProperties.endLine,
col: annotationProperties.startColumn,
endColumn: annotationProperties.endColumn
};
}
exports.toCommandProperties = toCommandProperties;
//# sourceMappingURL=utils.js.map
/***/ }),
/***/ 87:
/***/ (function(module) {
module.exports = require("os");
/***/ }),
/***/ 102:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
// For internal use, subject to change.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.issueCommand = void 0;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = __importStar(__webpack_require__(747));
const os = __importStar(__webpack_require__(87));
const utils_1 = __webpack_require__(82);
function issueCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`];
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
encoding: 'utf8'
});
}
exports.issueCommand = issueCommand;
//# sourceMappingURL=file-command.js.map
/***/ }),
/***/ 120:
/***/ (function(module) {
"use strict";
module.exports.mixin = function mixin(target, source) {
const keys = Object.getOwnPropertyNames(source);
for (let i = 0; i < keys.length; ++i) {
Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));
}
};
module.exports.wrapperSymbol = Symbol("wrapper");
module.exports.implSymbol = Symbol("impl");
module.exports.wrapperForImpl = function (impl) {
return impl[module.exports.wrapperSymbol];
};
module.exports.implForWrapper = function (wrapper) {
return wrapper[module.exports.implSymbol];
};
/***/ }),
/***/ 127:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;
const httpClient = __importStar(__webpack_require__(539));
function getAuthString(token, options) {
if (!token && !options.auth) {
throw new Error('Parameter token or opts.auth is required');
}
else if (token && options.auth) {
throw new Error('Parameters token and opts.auth may not both be specified');
}
return typeof options.auth === 'string' ? options.auth : `token ${token}`;
}
exports.getAuthString = getAuthString;
function getProxyAgent(destinationUrl) {
const hc = new httpClient.HttpClient();
return hc.getAgent(destinationUrl);
}
exports.getProxyAgent = getProxyAgent;
function getApiBaseUrl() {
return process.env['GITHUB_API_URL'] || 'https://api.github.com';
}
exports.getApiBaseUrl = getApiBaseUrl;
//# sourceMappingURL=utils.js.map
/***/ }),
/***/ 141:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var net = __webpack_require__(631);
var tls = __webpack_require__(16);
var http = __webpack_require__(605);
var https = __webpack_require__(211);
var events = __webpack_require__(614);
var assert = __webpack_require__(357);
var util = __webpack_require__(669);
exports.httpOverHttp = httpOverHttp;
exports.httpsOverHttp = httpsOverHttp;
exports.httpOverHttps = httpOverHttps;
exports.httpsOverHttps = httpsOverHttps;
function httpOverHttp(options) {
var agent = new TunnelingAgent(options);
agent.request = http.request;
return agent;
}
function httpsOverHttp(options) {
var agent = new TunnelingAgent(options);
agent.request = http.request;
agent.createSocket = createSecureSocket;
agent.defaultPort = 443;
return agent;
}
function httpOverHttps(options) {
var agent = new TunnelingAgent(options);
agent.request = https.request;
return agent;
}
function httpsOverHttps(options) {
var agent = new TunnelingAgent(options);
agent.request = https.request;
agent.createSocket = createSecureSocket;
agent.defaultPort = 443;
return agent;
}
function TunnelingAgent(options) {
var self = this;
self.options = options || {};
self.proxyOptions = self.options.proxy || {};
self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;
self.requests = [];
self.sockets = [];
self.on('free', function onFree(socket, host, port, localAddress) {
var options = toOptions(host, port, localAddress);
for (var i = 0, len = self.requests.length; i < len; ++i) {
var pending = self.requests[i];
if (pending.host === options.host && pending.port === options.port) {
// Detect the request to connect same origin server,
// reuse the connection.
self.requests.splice(i, 1);
pending.request.onSocket(socket);
return;
}
}
socket.destroy();
self.removeSocket(socket);
});
}
util.inherits(TunnelingAgent, events.EventEmitter);
TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {
var self = this;
var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));
if (self.sockets.length >= this.maxSockets) {
// We are over limit so we'll add it to the queue.
self.requests.push(options);
return;
}
// If we are under maxSockets create a new one.
self.createSocket(options, function(socket) {
socket.on('free', onFree);
socket.on('close', onCloseOrRemove);
socket.on('agentRemove', onCloseOrRemove);
req.onSocket(socket);
function onFree() {
self.emit('free', socket, options);
}
function onCloseOrRemove(err) {
self.removeSocket(socket);
socket.removeListener('free', onFree);
socket.removeListener('close', onCloseOrRemove);
socket.removeListener('agentRemove', onCloseOrRemove);
}
});
};
TunnelingAgent.prototype.createSocket = function createSocket(options, cb) {
var self = this;
var placeholder = {};
self.sockets.push(placeholder);
var connectOptions = mergeOptions({}, self.proxyOptions, {
method: 'CONNECT',
path: options.host + ':' + options.port,
agent: false,
headers: {
host: options.host + ':' + options.port
}
});
if (options.localAddress) {
connectOptions.localAddress = options.localAddress;
}
if (connectOptions.proxyAuth) {
connectOptions.headers = connectOptions.headers || {};
connectOptions.headers['Proxy-Authorization'] = 'Basic ' +
new Buffer(connectOptions.proxyAuth).toString('base64');
}
debug('making CONNECT request');
var connectReq = self.request(connectOptions);
connectReq.useChunkedEncodingByDefault = false; // for v0.6
connectReq.once('response', onResponse); // for v0.6
connectReq.once('upgrade', onUpgrade); // for v0.6
connectReq.once('connect', onConnect); // for v0.7 or later
connectReq.once('error', onError);
connectReq.end();
function onResponse(res) {
// Very hacky. This is necessary to avoid http-parser leaks.
res.upgrade = true;
}
function onUpgrade(res, socket, head) {
// Hacky.
process.nextTick(function() {
onConnect(res, socket, head);
});
}
function onConnect(res, socket, head) {
connectReq.removeAllListeners();
socket.removeAllListeners();
if (res.statusCode !== 200) {
debug('tunneling socket could not be established, statusCode=%d',
res.statusCode);
socket.destroy();
var error = new Error('tunneling socket could not be established, ' +
'statusCode=' + res.statusCode);
error.code = 'ECONNRESET';
options.request.emit('error', error);
self.removeSocket(placeholder);
return;
}
if (head.length > 0) {
debug('got illegal response body from proxy');
socket.destroy();
var error = new Error('got illegal response body from proxy');
error.code = 'ECONNRESET';
options.request.emit('error', error);
self.removeSocket(placeholder);
return;
}
debug('tunneling connection has established');
self.sockets[self.sockets.indexOf(placeholder)] = socket;
return cb(socket);
}
function onError(cause) {
connectReq.removeAllListeners();
debug('tunneling socket could not be established, cause=%s\n',
cause.message, cause.stack);
var error = new Error('tunneling socket could not be established, ' +
'cause=' + cause.message);
error.code = 'ECONNRESET';
options.request.emit('error', error);
self.removeSocket(placeholder);
}
};
TunnelingAgent.prototype.removeSocket = function removeSocket(socket) {
var pos = this.sockets.indexOf(socket)
if (pos === -1) {
return;
}
this.sockets.splice(pos, 1);
var pending = this.requests.shift();
if (pending) {
// If we have pending requests and a socket gets closed a new one
// needs to be created to take over in the pool for the one that closed.
this.createSocket(pending, function(socket) {
pending.request.onSocket(socket);
});
}
};
function createSecureSocket(options, cb) {
var self = this;
TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {
var hostHeader = options.request.getHeader('host');
var tlsOptions = mergeOptions({}, self.options, {
socket: socket,
servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host
});
// 0 is dummy port for v0.6
var secureSocket = tls.connect(0, tlsOptions);
self.sockets[self.sockets.indexOf(socket)] = secureSocket;
cb(secureSocket);
});
}
function toOptions(host, port, localAddress) {
if (typeof host === 'string') { // since v0.10
return {
host: host,
port: port,
localAddress: localAddress
};
}
return host; // for v0.11 or later
}
function mergeOptions(target) {
for (var i = 1, len = arguments.length; i < len; ++i) {
var overrides = arguments[i];
if (typeof overrides === 'object') {
var keys = Object.keys(overrides);
for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {
var k = keys[j];
if (overrides[k] !== undefined) {
target[k] = overrides[k];
}
}
}
}
return target;
}
var debug;
if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) {
debug = function() {
var args = Array.prototype.slice.call(arguments);
if (typeof args[0] === 'string') {
args[0] = 'TUNNEL: ' + args[0];
} else {
args.unshift('TUNNEL:');
}
console.error.apply(console, args);
}
} else {
debug = function() {};
}
exports.debug = debug; // for test
/***/ }),
/***/ 176:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
exports.URL = __webpack_require__(880).interface;
exports.serializeURL = __webpack_require__(856).serializeURL;
exports.serializeURLOrigin = __webpack_require__(856).serializeURLOrigin;
exports.basicURLParse = __webpack_require__(856).basicURLParse;
exports.setTheUsername = __webpack_require__(856).setTheUsername;
exports.setThePassword = __webpack_require__(856).setThePassword;
exports.serializeHost = __webpack_require__(856).serializeHost;
exports.serializeInteger = __webpack_require__(856).serializeInteger;
exports.parseURL = __webpack_require__(856).parseURL;
/***/ }),
/***/ 197:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
const usm = __webpack_require__(856);
exports.implementation = class URLImpl {
constructor(constructorArgs) {
const url = constructorArgs[0];
const base = constructorArgs[1];
let parsedBase = null;
if (base !== undefined) {
parsedBase = usm.basicURLParse(base);
if (parsedBase === "failure") {
throw new TypeError("Invalid base URL");
}
}
const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });
if (parsedURL === "failure") {
throw new TypeError("Invalid URL");
}
this._url = parsedURL;
// TODO: query stuff
}
get href() {
return usm.serializeURL(this._url);
}
set href(v) {
const parsedURL = usm.basicURLParse(v);
if (parsedURL === "failure") {
throw new TypeError("Invalid URL");
}
this._url = parsedURL;
}
get origin() {
return usm.serializeURLOrigin(this._url);
}
get protocol() {
return this._url.scheme + ":";
}
set protocol(v) {
usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" });
}
get username() {
return this._url.username;
}
set username(v) {
if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
return;
}
usm.setTheUsername(this._url, v);
}
get password() {
return this._url.password;
}
set password(v) {
if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
return;
}
usm.setThePassword(this._url, v);
}
get host() {
const url = this._url;
if (url.host === null) {
return "";
}
if (url.port === null) {
return usm.serializeHost(url.host);
}
return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port);
}
set host(v) {
if (this._url.cannotBeABaseURL) {
return;
}
usm.basicURLParse(v, { url: this._url, stateOverride: "host" });
}
get hostname() {
if (this._url.host === null) {
return "";
}
return usm.serializeHost(this._url.host);
}
set hostname(v) {
if (this._url.cannotBeABaseURL) {
return;
}
usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" });
}
get port() {
if (this._url.port === null) {
return "";
}
return usm.serializeInteger(this._url.port);
}
set port(v) {
if (usm.cannotHaveAUsernamePasswordPort(this._url)) {
return;
}
if (v === "") {
this._url.port = null;
} else {
usm.basicURLParse(v, { url: this._url, stateOverride: "port" });
}
}
get pathname() {
if (this._url.cannotBeABaseURL) {
return this._url.path[0];
}
if (this._url.path.length === 0) {
return "";
}
return "/" + this._url.path.join("/");
}
set pathname(v) {
if (this._url.cannotBeABaseURL) {
return;
}
this._url.path = [];
usm.basicURLParse(v, { url: this._url, stateOverride: "path start" });
}
get search() {
if (this._url.query === null || this._url.query === "") {
return "";
}
return "?" + this._url.query;
}
set search(v) {
// TODO: query stuff
const url = this._url;
if (v === "") {
url.query = null;
return;
}
const input = v[0] === "?" ? v.substring(1) : v;
url.query = "";
usm.basicURLParse(input, { url, stateOverride: "query" });
}
get hash() {
if (this._url.fragment === null || this._url.fragment === "") {
return "";
}
return "#" + this._url.fragment;
}
set hash(v) {
if (v === "") {
this._url.fragment = null;
return;
}
const input = v[0] === "#" ? v.substring(1) : v;
this._url.fragment = "";
usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" });
}
toJSON() {
return this.href;
}
};
/***/ }),
/***/ 211:
/***/ (function(module) {
module.exports = require("https");
/***/ }),
/***/ 213:
/***/ (function(module) {
module.exports = require("punycode");
/***/ }),
/***/ 226:
/***/ (function(__unusedmodule, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
class BasicCredentialHandler {
constructor(username, password) {
this.username = username;
this.password = password;
}
prepareRequest(options) {
options.headers['Authorization'] =
'Basic ' +
Buffer.from(this.username + ':' + this.password).toString('base64');
}
// This handler cannot handle 401
canHandleAuthentication(response) {
return false;
}
handleAuthentication(httpClient, requestInfo, objs) {
return null;
}
}
exports.BasicCredentialHandler = BasicCredentialHandler;
class BearerCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
options.headers['Authorization'] = 'Bearer ' + this.token;
}
// This handler cannot handle 401
canHandleAuthentication(response) {
return false;
}
handleAuthentication(httpClient, requestInfo, objs) {
return null;
}
}
exports.BearerCredentialHandler = BearerCredentialHandler;
class PersonalAccessTokenCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
options.headers['Authorization'] =
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
}
// This handler cannot handle 401
canHandleAuthentication(response) {
return false;
}
handleAuthentication(httpClient, requestInfo, objs) {
return null;
}
}
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
/***/ }),
/***/ 262:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Context = void 0;
const fs_1 = __webpack_require__(747);
const os_1 = __webpack_require__(87);
class Context {
/**
* Hydrate the context from the environment
*/
constructor() {
var _a, _b, _c;
this.payload = {};
if (process.env.GITHUB_EVENT_PATH) {
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
}
else {
const path = process.env.GITHUB_EVENT_PATH;
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
}
}
this.eventName = process.env.GITHUB_EVENT_NAME;
this.sha = process.env.GITHUB_SHA;
this.ref = process.env.GITHUB_REF;
this.workflow = process.env.GITHUB_WORKFLOW;
this.action = process.env.GITHUB_ACTION;
this.actor = process.env.GITHUB_ACTOR;
this.job = process.env.GITHUB_JOB;
this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;
this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;
this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
}
get issue() {
const payload = this.payload;
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
}
get repo() {
if (process.env.GITHUB_REPOSITORY) {
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
return { owner, repo };
}
if (this.payload.repository) {
return {
owner: this.payload.repository.owner.login,
repo: this.payload.repository.name
};
}
throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
}
}
exports.Context = Context;
//# sourceMappingURL=context.js.map
/***/ }),
/***/ 280:
/***/ (function(module) {
module.exports = register;
function register(state, name, method, options) {
if (typeof method !== "function") {
throw new Error("method for before hook must be a function");
}
if (!options) {
options = {};
}
if (Array.isArray(name)) {
return name.reverse().reduce(function (callback, name) {
return register.bind(null, state, name, callback, options);
}, method)();
}
return Promise.resolve().then(function () {
if (!state.registry[name]) {
return method(options);
}
return state.registry[name].reduce(function (method, registered) {
return registered.hook.bind(null, method, options);
}, method)();
});
}
/***/ }),
/***/ 299:
/***/ (function(__unusedmodule, exports) {
"use strict";
Object.defineProperty(exports, '__esModule', { value: true });
const VERSION = "2.17.0";
function ownKeys(object, enumerableOnly) {
var keys = Object.keys(object);
if (Object.getOwnPropertySymbols) {
var symbols = Object.getOwnPropertySymbols(object);
if (enumerableOnly) {
symbols = symbols.filter(function (sym) {
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
});
}
keys.push.apply(keys, symbols);
}
return keys;
}
function _objectSpread2(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i] != null ? arguments[i] : {};
if (i % 2) {
ownKeys(Object(source), true).forEach(function (key) {
_defineProperty(target, key, source[key]);
});
} else if (Object.getOwnPropertyDescriptors) {
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
} else {
ownKeys(Object(source)).forEach(function (key) {
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
});
}
}
return target;
}
function _defineProperty(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
/**
* Some “list” response that can be paginated have a different response structure
*
* They have a `total_count` key in the response (search also has `incomplete_results`,
* /installation/repositories also has `repository_selection`), as well as a key with
* the list of the items which name varies from endpoint to endpoint.
*
* Octokit normalizes these responses so that paginated results are always returned following
* the same structure. One challenge is that if the list response has only one page, no Link
* header is provided, so this header alone is not sufficient to check wether a response is
* paginated or not.
*
* We check if a "total_count" key is present in the response data, but also make sure that
* a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
* otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
*/
function normalizePaginatedListResponse(response) {
// endpoints can respond with 204 if repository is empty
if (!response.data) {
return _objectSpread2(_objectSpread2({}, response), {}, {
data: []
});
}
const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way
// to retrieve the same information.
const incompleteResults = response.data.incomplete_results;
const repositorySelection = response.data.repository_selection;
const totalCount = response.data.total_count;
delete response.data.incomplete_results;
delete response.data.repository_selection;
delete response.data.total_count;
const namespaceKey = Object.keys(response.data)[0];
const data = response.data[namespaceKey];
response.data = data;
if (typeof incompleteResults !== "undefined") {
response.data.incomplete_results = incompleteResults;
}
if (typeof repositorySelection !== "undefined") {
response.data.repository_selection = repositorySelection;
}
response.data.total_count = totalCount;
return response;
}
function iterator(octokit, route, parameters) {
const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
const requestMethod = typeof route === "function" ? route : octokit.request;
const method = options.method;
const headers = options.headers;
let url = options.url;
return {
[Symbol.asyncIterator]: () => ({
async next() {
if (!url) return {
done: true
};
try {
const response = await requestMethod({
method,
url,
headers
});
const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:
// '<https://api.github.com/users/aseemk/followers?page=2>; rel="next", <https://api.github.com/users/aseemk/followers?page=2>; rel="last"'
// sets `url` to undefined if "next" URL is not present or `link` header is not set
url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
return {
value: normalizedResponse
};
} catch (error) {
if (error.status !== 409) throw error;
url = "";
return {
value: {
status: 200,
headers: {},
data: []
}
};
}
}
})
};
}
function paginate(octokit, route, parameters, mapFn) {
if (typeof parameters === "function") {
mapFn = parameters;
parameters = undefined;
}
return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
}
function gather(octokit, results, iterator, mapFn) {
return iterator.next().then(result => {
if (result.done) {
return results;
}
let earlyExit = false;
function done() {
earlyExit = true;
}
results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
if (earlyExit) {
return results;
}
return gather(octokit, results, iterator, mapFn);
});
}
const composePaginateRest = Object.assign(paginate, {
iterator
});
const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/actions/runners/downloads", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/runners/downloads", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/blocks", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/events", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runners/downloads", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/autolinks", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /scim/v2/enterprises/{enterprise}/Groups", "GET /scim/v2/enterprises/{enterprise}/Users", "GET /scim/v2/organizations/{org}/Users", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/team-sync/group-mappings", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"];
function isPaginatingEndpoint(arg) {
if (typeof arg === "string") {
return paginatingEndpoints.includes(arg);
} else {
return false;
}
}
/**
* @param octokit Octokit instance
* @param options Options passed to Octokit constructor
*/
function paginateRest(octokit) {
return {
paginate: Object.assign(paginate.bind(null, octokit), {
iterator: iterator.bind(null, octokit)
})
};
}
paginateRest.VERSION = VERSION;
exports.composePaginateRest = composePaginateRest;
exports.isPaginatingEndpoint = isPaginatingEndpoint;
exports.paginateRest = paginateRest;
exports.paginatingEndpoints = paginatingEndpoints;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 356:
/***/ (function(__unusedmodule, exports) {
"use strict";
Object.defineProperty(exports, '__esModule', { value: true });
/*!
* is-plain-object <https://github.com/jonschlinkert/is-plain-object>
*
* Copyright (c) 2014-2017, Jon Schlinkert.
* Released under the MIT License.
*/
function isObject(o) {
return Object.prototype.toString.call(o) === '[object Object]';
}
function isPlainObject(o) {
var ctor,prot;
if (isObject(o) === false) return false;
// If has modified constructor
ctor = o.constructor;
if (ctor === undefined) return true;
// If has modified prototype
prot = ctor.prototype;
if (isObject(prot) === false) return false;
// If constructor does not have an Object-specific method
if (prot.hasOwnProperty('isPrototypeOf') === false) {
return false;
}
// Most likely a plain Object
return true;
}
exports.isPlainObject = isPlainObject;
/***/ }),
/***/ 357:
/***/ (function(module) {
module.exports = require("assert");
/***/ }),
/***/ 385:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, '__esModule', { value: true });
var isPlainObject = __webpack_require__(356);
var universalUserAgent = __webpack_require__(796);
function lowercaseKeys(object) {
if (!object) {
return {};
}
return Object.keys(object).reduce((newObj, key) => {
newObj[key.toLowerCase()] = object[key];
return newObj;
}, {});
}
function mergeDeep(defaults, options) {
const result = Object.assign({}, defaults);
Object.keys(options).forEach(key => {
if (isPlainObject.isPlainObject(options[key])) {
if (!(key in defaults)) Object.assign(result, {
[key]: options[key]
});else result[key] = mergeDeep(defaults[key], options[key]);
} else {
Object.assign(result, {
[key]: options[key]
});
}
});
return result;
}
function removeUndefinedProperties(obj) {
for (const key in obj) {
if (obj[key] === undefined) {
delete obj[key];
}
}
return obj;
}
function merge(defaults, route, options) {
if (typeof route === "string") {
let [method, url] = route.split(" ");
options = Object.assign(url ? {
method,
url
} : {
url: method
}, options);
} else {
options = Object.assign({}, route);
} // lowercase header names before merging with defaults to avoid duplicates
options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging
removeUndefinedProperties(options);
removeUndefinedProperties(options.headers);
const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
if (defaults && defaults.mediaType.previews.length) {
mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
}
mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
return mergedOptions;
}
function addQueryParameters(url, parameters) {
const separator = /\?/.test(url) ? "&" : "?";
const names = Object.keys(parameters);
if (names.length === 0) {
return url;
}
return url + separator + names.map(name => {
if (name === "q") {
return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
}
return `${name}=${encodeURIComponent(parameters[name])}`;
}).join("&");
}
const urlVariableRegex = /\{[^}]+\}/g;
function removeNonChars(variableName) {
return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
}
function extractUrlVariableNames(url) {
const matches = url.match(urlVariableRegex);
if (!matches) {
return [];
}
return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
}
function omit(object, keysToOmit) {
return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
obj[key] = object[key];
return obj;
}, {});
}
// Based on https://github.com/bramstein/url-template, licensed under BSD
// TODO: create separate package.
//
// Copyright (c) 2012-2014, Bram Stein
// All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// 3. The name of the author may not be used to endorse or promote products
// derived from this software without specific prior written permission.
// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/* istanbul ignore file */
function encodeReserved(str) {
return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
if (!/%[0-9A-Fa-f]/.test(part)) {
part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
}
return part;
}).join("");
}
function encodeUnreserved(str) {
return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
return "%" + c.charCodeAt(0).toString(16).toUpperCase();
});
}
function encodeValue(operator, value, key) {
value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
if (key) {
return encodeUnreserved(key) + "=" + value;
} else {
return value;
}
}
function isDefined(value) {
return value !== undefined && value !== null;
}
function isKeyOperator(operator) {
return operator === ";" || operator === "&" || operator === "?";
}
function getValues(context, operator, key, modifier) {
var value = context[key],
result = [];
if (isDefined(value) && value !== "") {
if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
value = value.toString();
if (modifier && modifier !== "*") {
value = value.substring(0, parseInt(modifier, 10));
}
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
} else {
if (modifier === "*") {
if (Array.isArray(value)) {
value.filter(isDefined).forEach(function (value) {
result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
});
} else {
Object.keys(value).forEach(function (k) {
if (isDefined(value[k])) {
result.push(encodeValue(operator, value[k], k));
}
});
}
} else {
const tmp = [];
if (Array.isArray(value)) {
value.filter(isDefined).forEach(function (value) {
tmp.push(encodeValue(operator, value));
});
} else {
Object.keys(value).forEach(function (k) {
if (isDefined(value[k])) {
tmp.push(encodeUnreserved(k));
tmp.push(encodeValue(operator, value[k].toString()));
}
});
}
if (isKeyOperator(operator)) {
result.push(encodeUnreserved(key) + "=" + tmp.join(","));
} else if (tmp.length !== 0) {
result.push(tmp.join(","));
}
}
}
} else {
if (operator === ";") {
if (isDefined(value)) {
result.push(encodeUnreserved(key));
}
} else if (value === "" && (operator === "&" || operator === "?")) {
result.push(encodeUnreserved(key) + "=");
} else if (value === "") {
result.push("");
}
}
return result;
}
function parseUrl(template) {
return {
expand: expand.bind(null, template)
};
}
function expand(template, context) {
var operators = ["+", "#", ".", "/", ";", "?", "&"];
return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
if (expression) {
let operator = "";
const values = [];
if (operators.indexOf(expression.charAt(0)) !== -1) {
operator = expression.charAt(0);
expression = expression.substr(1);
}
expression.split(/,/g).forEach(function (variable) {
var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
});
if (operator && operator !== "+") {
var separator = ",";
if (operator === "?") {
separator = "&";
} else if (operator !== "#") {
separator = operator;
}
return (values.length !== 0 ? operator : "") + values.join(separator);
} else {
return values.join(",");
}
} else {
return encodeReserved(literal);
}
});
}
function parse(options) {
// https://fetch.spec.whatwg.org/#methods
let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
let headers = Object.assign({}, options.headers);
let body;
let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
const urlVariableNames = extractUrlVariableNames(url);
url = parseUrl(url).expand(parameters);
if (!/^http/.test(url)) {
url = options.baseUrl + url;
}
const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
const remainingParameters = omit(parameters, omittedParameters);
const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
if (!isBinaryRequest) {
if (options.mediaType.format) {
// e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
}
if (options.mediaType.previews.length) {
const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
return `application/vnd.github.${preview}-preview${format}`;
}).join(",");
}
} // for GET/HEAD requests, set URL query parameters from remaining parameters
// for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
if (["GET", "HEAD"].includes(method)) {
url = addQueryParameters(url, remainingParameters);
} else {
if ("data" in remainingParameters) {
body = remainingParameters.data;
} else {
if (Object.keys(remainingParameters).length) {
body = remainingParameters;
} else {
headers["content-length"] = 0;
}
}
} // default content-type for JSON if body is set
if (!headers["content-type"] && typeof body !== "undefined") {
headers["content-type"] = "application/json; charset=utf-8";
} // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
// fetch does not allow to set `content-length` header, but we can set body to an empty string
if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
body = "";
} // Only return body/request keys if present
return Object.assign({
method,
url,
headers
}, typeof body !== "undefined" ? {
body
} : null, options.request ? {
request: options.request
} : null);
}
function endpointWithDefaults(defaults, route, options) {
return parse(merge(defaults, route, options));
}
function withDefaults(oldDefaults, newDefaults) {
const DEFAULTS = merge(oldDefaults, newDefaults);
const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
return Object.assign(endpoint, {
DEFAULTS,
defaults: withDefaults.bind(null, DEFAULTS),
merge: merge.bind(null, DEFAULTS),
parse
});
}
const VERSION = "6.0.12";
const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
// So we use RequestParameters and add method as additional required property.
const DEFAULTS = {
method: "GET",
baseUrl: "https://api.github.com",
headers: {
accept: "application/vnd.github.v3+json",
"user-agent": userAgent
},
mediaType: {
format: "",
previews: []
}
};
const endpoint = withDefaults(null, DEFAULTS);
exports.endpoint = endpoint;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 413:
/***/ (function(module, __unusedexports, __webpack_require__) {
module.exports = __webpack_require__(141);
/***/ }),
/***/ 431:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.issue = exports.issueCommand = void 0;
const os = __importStar(__webpack_require__(87));
const utils_1 = __webpack_require__(82);
/**
* Commands
*
* Command Format:
* ::name key=value,key=value::message
*
* Examples:
* ::warning::This is the message
* ::set-env name=MY_VAR::some value
*/
function issueCommand(command, properties, message) {
const cmd = new Command(command, properties, message);
process.stdout.write(cmd.toString() + os.EOL);
}
exports.issueCommand = issueCommand;
function issue(name, message = '') {
issueCommand(name, {}, message);
}
exports.issue = issue;
const CMD_STRING = '::';
class Command {
constructor(command, properties, message) {
if (!command) {
command = 'missing.command';
}
this.command = command;
this.properties = properties;
this.message = message;
}
toString() {
let cmdStr = CMD_STRING + this.command;
if (this.properties && Object.keys(this.properties).length > 0) {
cmdStr += ' ';
let first = true;
for (const key in this.properties) {
if (this.properties.hasOwnProperty(key)) {
const val = this.properties[key];
if (val) {
if (first) {
first = false;
}
else {
cmdStr += ',';
}
cmdStr += `${key}=${escapeProperty(val)}`;
}
}
}
}
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
return cmdStr;
}
}
function escapeData(s) {
return utils_1.toCommandValue(s)
.replace(/%/g, '%25')
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A');
}
function escapeProperty(s) {
return utils_1.toCommandValue(s)
.replace(/%/g, '%25')
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A')
.replace(/:/g, '%3A')
.replace(/,/g, '%2C');
}
//# sourceMappingURL=command.js.map
/***/ }),
/***/ 448:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, '__esModule', { value: true });
var universalUserAgent = __webpack_require__(796);
var beforeAfterHook = __webpack_require__(523);
var request = __webpack_require__(753);
var graphql = __webpack_require__(898);
var authToken = __webpack_require__(813);
function _objectWithoutPropertiesLoose(source, excluded) {
if (source == null) return {};
var target = {};
var sourceKeys = Object.keys(source);
var key, i;
for (i = 0; i < sourceKeys.length; i++) {
key = sourceKeys[i];
if (excluded.indexOf(key) >= 0) continue;
target[key] = source[key];
}
return target;
}
function _objectWithoutProperties(source, excluded) {
if (source == null) return {};
var target = _objectWithoutPropertiesLoose(source, excluded);
var key, i;
if (Object.getOwnPropertySymbols) {
var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
for (i = 0; i < sourceSymbolKeys.length; i++) {
key = sourceSymbolKeys[i];
if (excluded.indexOf(key) >= 0) continue;
if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
target[key] = source[key];
}
}
return target;
}
const VERSION = "3.5.1";
const _excluded = ["authStrategy"];
class Octokit {
constructor(options = {}) {
const hook = new beforeAfterHook.Collection();
const requestDefaults = {
baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
headers: {},
request: Object.assign({}, options.request, {
// @ts-ignore internal usage only, no need to type
hook: hook.bind(null, "request")
}),
mediaType: {
previews: [],
format: ""
}
}; // prepend default user agent with `options.userAgent` if set
requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
if (options.baseUrl) {
requestDefaults.baseUrl = options.baseUrl;
}
if (options.previews) {
requestDefaults.mediaType.previews = options.previews;
}
if (options.timeZone) {
requestDefaults.headers["time-zone"] = options.timeZone;
}
this.request = request.request.defaults(requestDefaults);
this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
this.log = Object.assign({
debug: () => {},
info: () => {},
warn: console.warn.bind(console),
error: console.error.bind(console)
}, options.log);
this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
// (2) If only `options.auth` is set, use the default token authentication strategy.
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
// TODO: type `options.auth` based on `options.authStrategy`.
if (!options.authStrategy) {
if (!options.auth) {
// (1)
this.auth = async () => ({
type: "unauthenticated"
});
} else {
// (2)
const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
hook.wrap("request", auth.hook);
this.auth = auth;
}
} else {
const {
authStrategy
} = options,
otherOptions = _objectWithoutProperties(options, _excluded);
const auth = authStrategy(Object.assign({
request: this.request,
log: this.log,
// we pass the current octokit instance as well as its constructor options
// to allow for authentication strategies that return a new octokit instance
// that shares the same internal state as the current one. The original
// requirement for this was the "event-octokit" authentication strategy
// of https://github.com/probot/octokit-auth-probot.
octokit: this,
octokitOptions: otherOptions
}, options.auth)); // @ts-ignore ¯\_(ツ)_/¯
hook.wrap("request", auth.hook);
this.auth = auth;
} // apply plugins
// https://stackoverflow.com/a/16345172
const classConstructor = this.constructor;
classConstructor.plugins.forEach(plugin => {
Object.assign(this, plugin(this, options));
});
}
static defaults(defaults) {
const OctokitWithDefaults = class extends this {
constructor(...args) {
const options = args[0] || {};
if (typeof defaults === "function") {
super(defaults(options));
return;
}
super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {
userAgent: `${options.userAgent} ${defaults.userAgent}`
} : null));
}
};
return OctokitWithDefaults;
}
/**
* Attach a plugin (or many) to your Octokit instance.
*
* @example
* const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
*/
static plugin(...newPlugins) {
var _a;
const currentPlugins = this.plugins;
const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);
return NewOctokit;
}
}
Octokit.VERSION = VERSION;
Octokit.plugins = [];
exports.Octokit = Octokit;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 454:
/***/ (function(module, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, '__esModule', { value: true });
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var Stream = _interopDefault(__webpack_require__(794));
var http = _interopDefault(__webpack_require__(605));
var Url = _interopDefault(__webpack_require__(835));
var whatwgUrl = _interopDefault(__webpack_require__(176));
var https = _interopDefault(__webpack_require__(211));
var zlib = _interopDefault(__webpack_require__(761));
// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
// fix for "Readable" isn't a named export issue
const Readable = Stream.Readable;
const BUFFER = Symbol('buffer');
const TYPE = Symbol('type');
class Blob {
constructor() {
this[TYPE] = '';
const blobParts = arguments[0];
const options = arguments[1];
const buffers = [];
let size = 0;
if (blobParts) {
const a = blobParts;
const length = Number(a.length);
for (let i = 0; i < length; i++) {
const element = a[i];
let buffer;
if (element instanceof Buffer) {
buffer = element;
} else if (ArrayBuffer.isView(element)) {
buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
} else if (element instanceof ArrayBuffer) {
buffer = Buffer.from(element);
} else if (element instanceof Blob) {
buffer = element[BUFFER];
} else {
buffer = Buffer.from(typeof element === 'string' ? element : String(element));
}
size += buffer.length;
buffers.push(buffer);
}
}
this[BUFFER] = Buffer.concat(buffers);
let type = options && options.type !== undefined && String(options.type).toLowerCase();
if (type && !/[^\u0020-\u007E]/.test(type)) {
this[TYPE] = type;
}
}
get size() {
return this[BUFFER].length;
}
get type() {
return this[TYPE];
}
text() {
return Promise.resolve(this[BUFFER].toString());
}
arrayBuffer() {
const buf = this[BUFFER];
const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
return Promise.resolve(ab);
}
stream() {
const readable = new Readable();
readable._read = function () {};
readable.push(this[BUFFER]);
readable.push(null);
return readable;
}
toString() {
return '[object Blob]';
}
slice() {
const size = this.size;
const start = arguments[0];
const end = arguments[1];
let relativeStart, relativeEnd;
if (start === undefined) {
relativeStart = 0;
} else if (start < 0) {
relativeStart = Math.max(size + start, 0);
} else {
relativeStart = Math.min(start, size);
}
if (end === undefined) {
relativeEnd = size;
} else if (end < 0) {
relativeEnd = Math.max(size + end, 0);
} else {
relativeEnd = Math.min(end, size);
}
const span = Math.max(relativeEnd - relativeStart, 0);
const buffer = this[BUFFER];
const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
const blob = new Blob([], { type: arguments[2] });
blob[BUFFER] = slicedBuffer;
return blob;
}
}
Object.defineProperties(Blob.prototype, {
size: { enumerable: true },
type: { enumerable: true },
slice: { enumerable: true }
});
Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
value: 'Blob',
writable: false,
enumerable: false,
configurable: true
});
/**
* fetch-error.js
*
* FetchError interface for operational errors
*/
/**
* Create FetchError instance
*
* @param String message Error message for human
* @param String type Error type for machine
* @param String systemError For Node.js system error
* @return FetchError
*/
function FetchError(message, type, systemError) {
Error.call(this, message);
this.message = message;
this.type = type;
// when err.type is `system`, err.code contains system error code
if (systemError) {
this.code = this.errno = systemError.code;
}
// hide custom error implementation details from end-users
Error.captureStackTrace(this, this.constructor);
}
FetchError.prototype = Object.create(Error.prototype);
FetchError.prototype.constructor = FetchError;
FetchError.prototype.name = 'FetchError';
let convert;
try {
convert = __webpack_require__(18).convert;
} catch (e) {}
const INTERNALS = Symbol('Body internals');
// fix an issue where "PassThrough" isn't a named export for node <10
const PassThrough = Stream.PassThrough;
/**
* Body mixin
*
* Ref: https://fetch.spec.whatwg.org/#body
*
* @param Stream body Readable stream
* @param Object opts Response options
* @return Void
*/
function Body(body) {
var _this = this;
var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
_ref$size = _ref.size;
let size = _ref$size === undefined ? 0 : _ref$size;
var _ref$timeout = _ref.timeout;
let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
if (body == null) {
// body is undefined or null
body = null;
} else if (isURLSearchParams(body)) {
// body is a URLSearchParams
body = Buffer.from(body.toString());
} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
// body is ArrayBuffer
body = Buffer.from(body);
} else if (ArrayBuffer.isView(body)) {
// body is ArrayBufferView
body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
} else if (body instanceof Stream) ; else {
// none of the above
// coerce to string then buffer
body = Buffer.from(String(body));
}
this[INTERNALS] = {
body,
disturbed: false,
error: null
};
this.size = size;
this.timeout = timeout;
if (body instanceof Stream) {
body.on('error', function (err) {
const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
_this[INTERNALS].error = error;
});
}
}
Body.prototype = {
get body() {
return this[INTERNALS].body;
},
get bodyUsed() {
return this[INTERNALS].disturbed;
},
/**
* Decode response as ArrayBuffer
*
* @return Promise
*/
arrayBuffer() {
return consumeBody.call(this).then(function (buf) {
return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
});
},
/**
* Return raw response as Blob
*
* @return Promise
*/
blob() {
let ct = this.headers && this.headers.get('content-type') || '';
return consumeBody.call(this).then(function (buf) {
return Object.assign(
// Prevent copying
new Blob([], {
type: ct.toLowerCase()
}), {
[BUFFER]: buf
});
});
},
/**
* Decode response as json
*
* @return Promise
*/
json() {
var _this2 = this;
return consumeBody.call(this).then(function (buffer) {
try {
return JSON.parse(buffer.toString());
} catch (err) {
return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
}
});
},
/**
* Decode response as text
*
* @return Promise
*/
text() {
return consumeBody.call(this).then(function (buffer) {
return buffer.toString();
});
},
/**
* Decode response as buffer (non-spec api)
*
* @return Promise
*/
buffer() {
return consumeBody.call(this);
},
/**
* Decode response as text, while automatically detecting the encoding and
* trying to decode to UTF-8 (non-spec api)
*
* @return Promise
*/
textConverted() {
var _this3 = this;
return consumeBody.call(this).then(function (buffer) {
return convertBody(buffer, _this3.headers);
});
}
};
// In browsers, all properties are enumerable.
Object.defineProperties(Body.prototype, {
body: { enumerable: true },
bodyUsed: { enumerable: true },
arrayBuffer: { enumerable: true },
blob: { enumerable: true },
json: { enumerable: true },
text: { enumerable: true }
});
Body.mixIn = function (proto) {
for (const name of Object.getOwnPropertyNames(Body.prototype)) {
// istanbul ignore else: future proof
if (!(name in proto)) {
const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
Object.defineProperty(proto, name, desc);
}
}
};
/**
* Consume and convert an entire Body to a Buffer.
*
* Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
*
* @return Promise
*/
function consumeBody() {
var _this4 = this;
if (this[INTERNALS].disturbed) {
return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
}
this[INTERNALS].disturbed = true;
if (this[INTERNALS].error) {
return Body.Promise.reject(this[INTERNALS].error);
}
let body = this.body;
// body is null
if (body === null) {
return Body.Promise.resolve(Buffer.alloc(0));
}
// body is blob
if (isBlob(body)) {
body = body.stream();
}
// body is buffer
if (Buffer.isBuffer(body)) {
return Body.Promise.resolve(body);
}
// istanbul ignore if: should never happen
if (!(body instanceof Stream)) {
return Body.Promise.resolve(Buffer.alloc(0));
}
// body is stream
// get ready to actually consume the body
let accum = [];
let accumBytes = 0;
let abort = false;
return new Body.Promise(function (resolve, reject) {
let resTimeout;
// allow timeout on slow response body
if (_this4.timeout) {
resTimeout = setTimeout(function () {
abort = true;
reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
}, _this4.timeout);
}
// handle stream errors
body.on('error', function (err) {
if (err.name === 'AbortError') {
// if the request was aborted, reject with this Error
abort = true;
reject(err);
} else {
// other errors, such as incorrect content-encoding
reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
}
});
body.on('data', function (chunk) {
if (abort || chunk === null) {
return;
}
if (_this4.size && accumBytes + chunk.length > _this4.size) {
abort = true;
reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
return;
}
accumBytes += chunk.length;
accum.push(chunk);
});
body.on('end', function () {
if (abort) {
return;
}
clearTimeout(resTimeout);
try {
resolve(Buffer.concat(accum, accumBytes));
} catch (err) {
// handle streams that have accumulated too much data (issue #414)
reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
}
});
});
}
/**
* Detect buffer encoding and convert to target encoding
* ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
*
* @param Buffer buffer Incoming buffer
* @param String encoding Target encoding
* @return String
*/
function convertBody(buffer, headers) {
if (typeof convert !== 'function') {
throw new Error('The package `encoding` must be installed to use the textConverted() function');
}
const ct = headers.get('content-type');
let charset = 'utf-8';
let res, str;
// header
if (ct) {
res = /charset=([^;]*)/i.exec(ct);
}
// no charset in content type, peek at response body for at most 1024 bytes
str = buffer.slice(0, 1024).toString();
// html5
if (!res && str) {
res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str);
}
// html4
if (!res && str) {
res = /<meta[\s]+?http-equiv=(['"])content-type\1[\s]+?content=(['"])(.+?)\2/i.exec(str);
if (!res) {
res = /<meta[\s]+?content=(['"])(.+?)\1[\s]+?http-equiv=(['"])content-type\3/i.exec(str);
if (res) {
res.pop(); // drop last quote
}
}
if (res) {
res = /charset=(.*)/i.exec(res.pop());
}
}
// xml
if (!res && str) {
res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str);
}
// found charset
if (res) {
charset = res.pop();
// prevent decode issues when sites use incorrect encoding
// ref: https://hsivonen.fi/encoding-menu/
if (charset === 'gb2312' || charset === 'gbk') {
charset = 'gb18030';
}
}
// turn raw buffers into a single utf-8 buffer
return convert(buffer, 'UTF-8', charset).toString();
}
/**
* Detect a URLSearchParams object
* ref: https://github.com/bitinn/node-fetch/issues/296#issuecomment-307598143
*
* @param Object obj Object to detect by type or brand
* @return String
*/
function isURLSearchParams(obj) {
// Duck-typing as a necessary condition.
if (typeof obj !== 'object' || typeof obj.append !== 'function' || typeof obj.delete !== 'function' || typeof obj.get !== 'function' || typeof obj.getAll !== 'function' || typeof obj.has !== 'function' || typeof obj.set !== 'function') {
return false;
}
// Brand-checking and more duck-typing as optional condition.
return obj.constructor.name === 'URLSearchParams' || Object.prototype.toString.call(obj) === '[object URLSearchParams]' || typeof obj.sort === 'function';
}
/**
* Check if `obj` is a W3C `Blob` object (which `File` inherits from)
* @param {*} obj
* @return {boolean}
*/
function isBlob(obj) {
return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]);
}
/**
* Clone body given Res/Req instance
*
* @param Mixed instance Response or Request instance
* @return Mixed
*/
function clone(instance) {
let p1, p2;
let body = instance.body;
// don't allow cloning a used body
if (instance.bodyUsed) {
throw new Error('cannot clone body after it is used');
}
// check that body is a stream and not form-data object
// note: we can't clone the form-data object without having it as a dependency
if (body instanceof Stream && typeof body.getBoundary !== 'function') {
// tee instance body
p1 = new PassThrough();
p2 = new PassThrough();
body.pipe(p1);
body.pipe(p2);
// set instance body to teed body and return the other teed body
instance[INTERNALS].body = p1;
body = p2;
}
return body;
}
/**
* Performs the operation "extract a `Content-Type` value from |object|" as
* specified in the specification:
* https://fetch.spec.whatwg.org/#concept-bodyinit-extract
*
* This function assumes that instance.body is present.
*
* @param Mixed instance Any options.body input
*/
function extractContentType(body) {
if (body === null) {
// body is null
return null;
} else if (typeof body === 'string') {
// body is string
return 'text/plain;charset=UTF-8';
} else if (isURLSearchParams(body)) {
// body is a URLSearchParams
return 'application/x-www-form-urlencoded;charset=UTF-8';
} else if (isBlob(body)) {
// body is blob
return body.type || null;
} else if (Buffer.isBuffer(body)) {
// body is buffer
return null;
} else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
// body is ArrayBuffer
return null;
} else if (ArrayBuffer.isView(body)) {
// body is ArrayBufferView
return null;
} else if (typeof body.getBoundary === 'function') {
// detect form data input from form-data module
return `multipart/form-data;boundary=${body.getBoundary()}`;
} else if (body instanceof Stream) {
// body is stream
// can't really do much about this
return null;
} else {
// Body constructor defaults other things to string
return 'text/plain;charset=UTF-8';
}
}
/**
* The Fetch Standard treats this as if "total bytes" is a property on the body.
* For us, we have to explicitly get it with a function.
*
* ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes
*
* @param Body instance Instance of Body
* @return Number? Number of bytes, or null if not possible
*/
function getTotalBytes(instance) {
const body = instance.body;
if (body === null) {
// body is null
return 0;
} else if (isBlob(body)) {
return body.size;
} else if (Buffer.isBuffer(body)) {
// body is buffer
return body.length;
} else if (body && typeof body.getLengthSync === 'function') {
// detect form data input from form-data module
if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x
body.hasKnownLength && body.hasKnownLength()) {
// 2.x
return body.getLengthSync();
}
return null;
} else {
// body is stream
return null;
}
}
/**
* Write a Body to a Node.js WritableStream (e.g. http.Request) object.
*
* @param Body instance Instance of Body
* @return Void
*/
function writeToStream(dest, instance) {
const body = instance.body;
if (body === null) {
// body is null
dest.end();
} else if (isBlob(body)) {
body.stream().pipe(dest);
} else if (Buffer.isBuffer(body)) {
// body is buffer
dest.write(body);
dest.end();
} else {
// body is stream
body.pipe(dest);
}
}
// expose Promise
Body.Promise = global.Promise;
/**
* headers.js
*
* Headers class offers convenient helpers
*/
const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/;
function validateName(name) {
name = `${name}`;
if (invalidTokenRegex.test(name) || name === '') {
throw new TypeError(`${name} is not a legal HTTP header name`);
}
}
function validateValue(value) {
value = `${value}`;
if (invalidHeaderCharRegex.test(value)) {
throw new TypeError(`${value} is not a legal HTTP header value`);
}
}
/**
* Find the key in the map object given a header name.
*
* Returns undefined if not found.
*
* @param String name Header name
* @return String|Undefined
*/
function find(map, name) {
name = name.toLowerCase();
for (const key in map) {
if (key.toLowerCase() === name) {
return key;
}
}
return undefined;
}
const MAP = Symbol('map');
class Headers {
/**
* Headers class
*
* @param Object headers Response headers
* @return Void
*/
constructor() {
let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined;
this[MAP] = Object.create(null);
if (init instanceof Headers) {
const rawHeaders = init.raw();
const headerNames = Object.keys(rawHeaders);
for (const headerName of headerNames) {
for (const value of rawHeaders[headerName]) {
this.append(headerName, value);
}
}
return;
}
// We don't worry about converting prop to ByteString here as append()
// will handle it.
if (init == null) ; else if (typeof init === 'object') {
const method = init[Symbol.iterator];
if (method != null) {
if (typeof method !== 'function') {
throw new TypeError('Header pairs must be iterable');
}
// sequence<sequence<ByteString>>
// Note: per spec we have to first exhaust the lists then process them
const pairs = [];
for (const pair of init) {
if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
throw new TypeError('Each header pair must be iterable');
}
pairs.push(Array.from(pair));
}
for (const pair of pairs) {
if (pair.length !== 2) {
throw new TypeError('Each header pair must be a name/value tuple');
}
this.append(pair[0], pair[1]);
}
} else {
// record<ByteString, ByteString>
for (const key of Object.keys(init)) {
const value = init[key];
this.append(key, value);
}
}
} else {
throw new TypeError('Provided initializer must be an object');
}
}
/**
* Return combined header value given name
*
* @param String name Header name
* @return Mixed
*/
get(name) {
name = `${name}`;
validateName(name);
const key = find(this[MAP], name);
if (key === undefined) {
return null;
}
return this[MAP][key].join(', ');
}
/**
* Iterate over all headers
*
* @param Function callback Executed for each item with parameters (value, name, thisArg)
* @param Boolean thisArg `this` context for callback function
* @return Void
*/
forEach(callback) {
let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
let pairs = getHeaders(this);
let i = 0;
while (i < pairs.length) {
var _pairs$i = pairs[i];
const name = _pairs$i[0],
value = _pairs$i[1];
callback.call(thisArg, value, name, this);
pairs = getHeaders(this);
i++;
}
}
/**
* Overwrite header values given name
*
* @param String name Header name
* @param String value Header value
* @return Void
*/
set(name, value) {
name = `${name}`;
value = `${value}`;
validateName(name);
validateValue(value);
const key = find(this[MAP], name);
this[MAP][key !== undefined ? key : name] = [value];
}
/**
* Append a value onto existing header
*
* @param String name Header name
* @param String value Header value
* @return Void
*/
append(name, value) {
name = `${name}`;
value = `${value}`;
validateName(name);
validateValue(value);
const key = find(this[MAP], name);
if (key !== undefined) {
this[MAP][key].push(value);
} else {
this[MAP][name] = [value];
}
}
/**
* Check for header name existence
*
* @param String name Header name
* @return Boolean
*/
has(name) {
name = `${name}`;
validateName(name);
return find(this[MAP], name) !== undefined;
}
/**
* Delete all header values given name
*
* @param String name Header name
* @return Void
*/
delete(name) {
name = `${name}`;
validateName(name);
const key = find(this[MAP], name);
if (key !== undefined) {
delete this[MAP][key];
}
}
/**
* Return raw headers (non-spec api)
*
* @return Object
*/
raw() {
return this[MAP];
}
/**
* Get an iterator on keys.
*
* @return Iterator
*/
keys() {
return createHeadersIterator(this, 'key');
}
/**
* Get an iterator on values.
*
* @return Iterator
*/
values() {
return createHeadersIterator(this, 'value');
}
/**
* Get an iterator on entries.
*
* This is the default iterator of the Headers object.
*
* @return Iterator
*/
[Symbol.iterator]() {
return createHeadersIterator(this, 'key+value');
}
}
Headers.prototype.entries = Headers.prototype[Symbol.iterator];
Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
value: 'Headers',
writable: false,
enumerable: false,
configurable: true
});
Object.defineProperties(Headers.prototype, {
get: { enumerable: true },
forEach: { enumerable: true },
set: { enumerable: true },
append: { enumerable: true },
has: { enumerable: true },
delete: { enumerable: true },
keys: { enumerable: true },
values: { enumerable: true },
entries: { enumerable: true }
});
function getHeaders(headers) {
let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
const keys = Object.keys(headers[MAP]).sort();
return keys.map(kind === 'key' ? function (k) {
return k.toLowerCase();
} : kind === 'value' ? function (k) {
return headers[MAP][k].join(', ');
} : function (k) {
return [k.toLowerCase(), headers[MAP][k].join(', ')];
});
}
const INTERNAL = Symbol('internal');
function createHeadersIterator(target, kind) {
const iterator = Object.create(HeadersIteratorPrototype);
iterator[INTERNAL] = {
target,
kind,
index: 0
};
return iterator;
}
const HeadersIteratorPrototype = Object.setPrototypeOf({
next() {
// istanbul ignore if
if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
throw new TypeError('Value of `this` is not a HeadersIterator');
}
var _INTERNAL = this[INTERNAL];
const target = _INTERNAL.target,
kind = _INTERNAL.kind,
index = _INTERNAL.index;
const values = getHeaders(target, kind);
const len = values.length;
if (index >= len) {
return {
value: undefined,
done: true
};
}
this[INTERNAL].index = index + 1;
return {
value: values[index],
done: false
};
}
}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
value: 'HeadersIterator',
writable: false,
enumerable: false,
configurable: true
});
/**
* Export the Headers object in a form that Node.js can consume.
*
* @param Headers headers
* @return Object
*/
function exportNodeCompatibleHeaders(headers) {
const obj = Object.assign({ __proto__: null }, headers[MAP]);
// http.request() only supports string as Host header. This hack makes
// specifying custom Host header possible.
const hostHeaderKey = find(headers[MAP], 'Host');
if (hostHeaderKey !== undefined) {
obj[hostHeaderKey] = obj[hostHeaderKey][0];
}
return obj;
}
/**
* Create a Headers object from an object of headers, ignoring those that do
* not conform to HTTP grammar productions.
*
* @param Object obj Object of headers
* @return Headers
*/
function createHeadersLenient(obj) {
const headers = new Headers();
for (const name of Object.keys(obj)) {
if (invalidTokenRegex.test(name)) {
continue;
}
if (Array.isArray(obj[name])) {
for (const val of obj[name]) {
if (invalidHeaderCharRegex.test(val)) {
continue;
}
if (headers[MAP][name] === undefined) {
headers[MAP][name] = [val];
} else {
headers[MAP][name].push(val);
}
}
} else if (!invalidHeaderCharRegex.test(obj[name])) {
headers[MAP][name] = [obj[name]];
}
}
return headers;
}
const INTERNALS$1 = Symbol('Response internals');
// fix an issue where "STATUS_CODES" aren't a named export for node <10
const STATUS_CODES = http.STATUS_CODES;
/**
* Response class
*
* @param Stream body Readable stream
* @param Object opts Response options
* @return Void
*/
class Response {
constructor() {
let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
Body.call(this, body, opts);
const status = opts.status || 200;
const headers = new Headers(opts.headers);
if (body != null && !headers.has('Content-Type')) {
const contentType = extractContentType(body);
if (contentType) {
headers.append('Content-Type', contentType);
}
}
this[INTERNALS$1] = {
url: opts.url,
status,
statusText: opts.statusText || STATUS_CODES[status],
headers,
counter: opts.counter
};
}
get url() {
return this[INTERNALS$1].url || '';
}
get status() {
return this[INTERNALS$1].status;
}
/**
* Convenience property representing if the request ended normally
*/
get ok() {
return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
}
get redirected() {
return this[INTERNALS$1].counter > 0;
}
get statusText() {
return this[INTERNALS$1].statusText;
}
get headers() {
return this[INTERNALS$1].headers;
}
/**
* Clone this response
*
* @return Response
*/
clone() {
return new Response(clone(this), {
url: this.url,
status: this.status,
statusText: this.statusText,
headers: this.headers,
ok: this.ok,
redirected: this.redirected
});
}
}
Body.mixIn(Response.prototype);
Object.defineProperties(Response.prototype, {
url: { enumerable: true },
status: { enumerable: true },
ok: { enumerable: true },
redirected: { enumerable: true },
statusText: { enumerable: true },
headers: { enumerable: true },
clone: { enumerable: true }
});
Object.defineProperty(Response.prototype, Symbol.toStringTag, {
value: 'Response',
writable: false,
enumerable: false,
configurable: true
});
const INTERNALS$2 = Symbol('Request internals');
const URL = Url.URL || whatwgUrl.URL;
// fix an issue where "format", "parse" aren't a named export for node <10
const parse_url = Url.parse;
const format_url = Url.format;
/**
* Wrapper around `new URL` to handle arbitrary URLs
*
* @param {string} urlStr
* @return {void}
*/
function parseURL(urlStr) {
/*
Check whether the URL is absolute or not
Scheme: https://tools.ietf.org/html/rfc3986#section-3.1
Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3
*/
if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) {
urlStr = new URL(urlStr).toString();
}
// Fallback to old implementation for arbitrary URLs
return parse_url(urlStr);
}
const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
/**
* Check if a value is an instance of Request.
*
* @param Mixed input
* @return Boolean
*/
function isRequest(input) {
return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
}
function isAbortSignal(signal) {
const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
return !!(proto && proto.constructor.name === 'AbortSignal');
}
/**
* Request class
*
* @param Mixed input Url or Request instance
* @param Object init Custom options
* @return Void
*/
class Request {
constructor(input) {
let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
let parsedURL;
// normalize input
if (!isRequest(input)) {
if (input && input.href) {
// in order to support Node.js' Url objects; though WHATWG's URL objects
// will fall into this branch also (since their `toString()` will return
// `href` property anyway)
parsedURL = parseURL(input.href);
} else {
// coerce input to a string before attempting to parse
parsedURL = parseURL(`${input}`);
}
input = {};
} else {
parsedURL = parseURL(input.url);
}
let method = init.method || input.method || 'GET';
method = method.toUpperCase();
if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
throw new TypeError('Request with GET/HEAD method cannot have body');
}
let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
Body.call(this, inputBody, {
timeout: init.timeout || input.timeout || 0,
size: init.size || input.size || 0
});
const headers = new Headers(init.headers || input.headers || {});
if (inputBody != null && !headers.has('Content-Type')) {
const contentType = extractContentType(inputBody);
if (contentType) {
headers.append('Content-Type', contentType);
}
}
let signal = isRequest(input) ? input.signal : null;
if ('signal' in init) signal = init.signal;
if (signal != null && !isAbortSignal(signal)) {
throw new TypeError('Expected signal to be an instanceof AbortSignal');
}
this[INTERNALS$2] = {
method,
redirect: init.redirect || input.redirect || 'follow',
headers,
parsedURL,
signal
};
// node-fetch-only options
this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
this.counter = init.counter || input.counter || 0;
this.agent = init.agent || input.agent;
}
get method() {
return this[INTERNALS$2].method;
}
get url() {
return format_url(this[INTERNALS$2].parsedURL);
}
get headers() {
return this[INTERNALS$2].headers;
}
get redirect() {
return this[INTERNALS$2].redirect;
}
get signal() {
return this[INTERNALS$2].signal;
}
/**
* Clone this request
*
* @return Request
*/
clone() {
return new Request(this);
}
}
Body.mixIn(Request.prototype);
Object.defineProperty(Request.prototype, Symbol.toStringTag, {
value: 'Request',
writable: false,
enumerable: false,
configurable: true
});
Object.defineProperties(Request.prototype, {
method: { enumerable: true },
url: { enumerable: true },
headers: { enumerable: true },
redirect: { enumerable: true },
clone: { enumerable: true },
signal: { enumerable: true }
});
/**
* Convert a Request to Node.js http request options.
*
* @param Request A Request instance
* @return Object The options object to be passed to http.request
*/
function getNodeRequestOptions(request) {
const parsedURL = request[INTERNALS$2].parsedURL;
const headers = new Headers(request[INTERNALS$2].headers);
// fetch step 1.3
if (!headers.has('Accept')) {
headers.set('Accept', '*/*');
}
// Basic fetch
if (!parsedURL.protocol || !parsedURL.hostname) {
throw new TypeError('Only absolute URLs are supported');
}
if (!/^https?:$/.test(parsedURL.protocol)) {
throw new TypeError('Only HTTP(S) protocols are supported');
}
if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
}
// HTTP-network-or-cache fetch steps 2.4-2.7
let contentLengthValue = null;
if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
contentLengthValue = '0';
}
if (request.body != null) {
const totalBytes = getTotalBytes(request);
if (typeof totalBytes === 'number') {
contentLengthValue = String(totalBytes);
}
}
if (contentLengthValue) {
headers.set('Content-Length', contentLengthValue);
}
// HTTP-network-or-cache fetch step 2.11
if (!headers.has('User-Agent')) {
headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
}
// HTTP-network-or-cache fetch step 2.15
if (request.compress && !headers.has('Accept-Encoding')) {
headers.set('Accept-Encoding', 'gzip,deflate');
}
let agent = request.agent;
if (typeof agent === 'function') {
agent = agent(parsedURL);
}
if (!headers.has('Connection') && !agent) {
headers.set('Connection', 'close');
}
// HTTP-network fetch step 4.2
// chunked encoding is handled by Node.js
return Object.assign({}, parsedURL, {
method: request.method,
headers: exportNodeCompatibleHeaders(headers),
agent
});
}
/**
* abort-error.js
*
* AbortError interface for cancelled requests
*/
/**
* Create AbortError instance
*
* @param String message Error message for human
* @return AbortError
*/
function AbortError(message) {
Error.call(this, message);
this.type = 'aborted';
this.message = message;
// hide custom error implementation details from end-users
Error.captureStackTrace(this, this.constructor);
}
AbortError.prototype = Object.create(Error.prototype);
AbortError.prototype.constructor = AbortError;
AbortError.prototype.name = 'AbortError';
const URL$1 = Url.URL || whatwgUrl.URL;
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
const PassThrough$1 = Stream.PassThrough;
const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
const orig = new URL$1(original).hostname;
const dest = new URL$1(destination).hostname;
return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
};
/**
* Fetch function
*
* @param Mixed url Absolute url or Request instance
* @param Object opts Fetch options
* @return Promise
*/
function fetch(url, opts) {
// allow custom promise
if (!fetch.Promise) {
throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
}
Body.Promise = fetch.Promise;
// wrap http.request into fetch
return new fetch.Promise(function (resolve, reject) {
// build request object
const request = new Request(url, opts);
const options = getNodeRequestOptions(request);
const send = (options.protocol === 'https:' ? https : http).request;
const signal = request.signal;
let response = null;
const abort = function abort() {
let error = new AbortError('The user aborted a request.');
reject(error);
if (request.body && request.body instanceof Stream.Readable) {
request.body.destroy(error);
}
if (!response || !response.body) return;
response.body.emit('error', error);
};
if (signal && signal.aborted) {
abort();
return;
}
const abortAndFinalize = function abortAndFinalize() {
abort();
finalize();
};
// send request
const req = send(options);
let reqTimeout;
if (signal) {
signal.addEventListener('abort', abortAndFinalize);
}
function finalize() {
req.abort();
if (signal) signal.removeEventListener('abort', abortAndFinalize);
clearTimeout(reqTimeout);
}
if (request.timeout) {
req.once('socket', function (socket) {
reqTimeout = setTimeout(function () {
reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
finalize();
}, request.timeout);
});
}
req.on('error', function (err) {
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
finalize();
});
req.on('response', function (res) {
clearTimeout(reqTimeout);
const headers = createHeadersLenient(res.headers);
// HTTP fetch step 5
if (fetch.isRedirect(res.statusCode)) {
// HTTP fetch step 5.2
const location = headers.get('Location');
// HTTP fetch step 5.3
let locationURL = null;
try {
locationURL = location === null ? null : new URL$1(location, request.url).toString();
} catch (err) {
// error here can only be invalid URL in Location: header
// do not throw when options.redirect == manual
// let the user extract the errorneous redirect URL
if (request.redirect !== 'manual') {
reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
finalize();
return;
}
}
// HTTP fetch step 5.5
switch (request.redirect) {
case 'error':
reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));
finalize();
return;
case 'manual':
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
if (locationURL !== null) {
// handle corrupted header
try {
headers.set('Location', locationURL);
} catch (err) {
// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
reject(err);
}
}
break;
case 'follow':
// HTTP-redirect fetch step 2
if (locationURL === null) {
break;
}
// HTTP-redirect fetch step 5
if (request.counter >= request.follow) {
reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
finalize();
return;
}
// HTTP-redirect fetch step 6 (counter increment)
// Create a new Request object.
const requestOpts = {
headers: new Headers(request.headers),
follow: request.follow,
counter: request.counter + 1,
agent: request.agent,
compress: request.compress,
method: request.method,
body: request.body,
signal: request.signal,
timeout: request.timeout,
size: request.size
};
if (!isDomainOrSubdomain(request.url, locationURL)) {
for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
requestOpts.headers.delete(name);
}
}
// HTTP-redirect fetch step 9
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
finalize();
return;
}
// HTTP-redirect fetch step 11
if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
requestOpts.method = 'GET';
requestOpts.body = undefined;
requestOpts.headers.delete('content-length');
}
// HTTP-redirect fetch step 15
resolve(fetch(new Request(locationURL, requestOpts)));
finalize();
return;
}
}
// prepare response
res.once('end', function () {
if (signal) signal.removeEventListener('abort', abortAndFinalize);
});
let body = res.pipe(new PassThrough$1());
const response_options = {
url: request.url,
status: res.statusCode,
statusText: res.statusMessage,
headers: headers,
size: request.size,
timeout: request.timeout,
counter: request.counter
};
// HTTP-network fetch step 12.1.1.3
const codings = headers.get('Content-Encoding');
// HTTP-network fetch step 12.1.1.4: handle content codings
// in following scenarios we ignore compression support
// 1. compression support is disabled
// 2. HEAD request
// 3. no Content-Encoding header
// 4. no content response (204)
// 5. content not modified response (304)
if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
response = new Response(body, response_options);
resolve(response);
return;
}
// For Node v6+
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
const zlibOptions = {
flush: zlib.Z_SYNC_FLUSH,
finishFlush: zlib.Z_SYNC_FLUSH
};
// for gzip
if (codings == 'gzip' || codings == 'x-gzip') {
body = body.pipe(zlib.createGunzip(zlibOptions));
response = new Response(body, response_options);
resolve(response);
return;
}
// for deflate
if (codings == 'deflate' || codings == 'x-deflate') {
// handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
const raw = res.pipe(new PassThrough$1());
raw.once('data', function (chunk) {
// see http://stackoverflow.com/questions/37519828
if ((chunk[0] & 0x0F) === 0x08) {
body = body.pipe(zlib.createInflate());
} else {
body = body.pipe(zlib.createInflateRaw());
}
response = new Response(body, response_options);
resolve(response);
});
return;
}
// for br
if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
body = body.pipe(zlib.createBrotliDecompress());
response = new Response(body, response_options);
resolve(response);
return;
}
// otherwise, use response as-is
response = new Response(body, response_options);
resolve(response);
});
writeToStream(req, request);
});
}
/**
* Redirect code matching
*
* @param Number code Status code
* @return Boolean
*/
fetch.isRedirect = function (code) {
return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
};
// expose Promise
fetch.Promise = global.Promise;
module.exports = exports = fetch;
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = exports;
exports.Headers = Headers;
exports.Request = Request;
exports.Response = Response;
exports.FetchError = FetchError;
/***/ }),
/***/ 463:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, '__esModule', { value: true });
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var deprecation = __webpack_require__(692);
var once = _interopDefault(__webpack_require__(49));
const logOnceCode = once(deprecation => console.warn(deprecation));
const logOnceHeaders = once(deprecation => console.warn(deprecation));
/**
* Error with extra properties to help with debugging
*/
class RequestError extends Error {
constructor(message, statusCode, options) {
super(message); // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
this.name = "HttpError";
this.status = statusCode;
let headers;
if ("headers" in options && typeof options.headers !== "undefined") {
headers = options.headers;
}
if ("response" in options) {
this.response = options.response;
headers = options.response.headers;
} // redact request credentials without mutating original request options
const requestCopy = Object.assign({}, options.request);
if (options.request.headers.authorization) {
requestCopy.headers = Object.assign({}, options.request.headers, {
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
});
}
requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
.replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
this.request = requestCopy; // deprecations
Object.defineProperty(this, "code", {
get() {
logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
return statusCode;
}
});
Object.defineProperty(this, "headers", {
get() {
logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."));
return headers || {};
}
});
}
}
exports.RequestError = RequestError;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 469:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getOctokit = exports.context = void 0;
const Context = __importStar(__webpack_require__(262));
const utils_1 = __webpack_require__(521);
exports.context = new Context.Context();
/**
* Returns a hydrated octokit ready to use for GitHub Actions
*
* @param token the repo PAT or GITHUB_TOKEN
* @param options other options to set
*/
function getOctokit(token, options) {
return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));
}
exports.getOctokit = getOctokit;
//# sourceMappingURL=github.js.map
/***/ }),
/***/ 470:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
const command_1 = __webpack_require__(431);
const file_command_1 = __webpack_require__(102);
const utils_1 = __webpack_require__(82);
const os = __importStar(__webpack_require__(87));
const path = __importStar(__webpack_require__(622));
const oidc_utils_1 = __webpack_require__(742);
/**
* The code to exit an action
*/
var ExitCode;
(function (ExitCode) {
/**
* A code indicating that the action was successful
*/
ExitCode[ExitCode["Success"] = 0] = "Success";
/**
* A code indicating that the action was a failure
*/
ExitCode[ExitCode["Failure"] = 1] = "Failure";
})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
//-----------------------------------------------------------------------
// Variables
//-----------------------------------------------------------------------
/**
* Sets env variable for this action and future actions in the job
* @param name the name of the variable to set
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable(name, val) {
const convertedVal = utils_1.toCommandValue(val);
process.env[name] = convertedVal;
const filePath = process.env['GITHUB_ENV'] || '';
if (filePath) {
const delimiter = '_GitHubActionsFileCommandDelimeter_';
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
file_command_1.issueCommand('ENV', commandValue);
}
else {
command_1.issueCommand('set-env', { name }, convertedVal);
}
}
exports.exportVariable = exportVariable;
/**
* Registers a secret which will get masked from logs
* @param secret value of the secret
*/
function setSecret(secret) {
command_1.issueCommand('add-mask', {}, secret);
}
exports.setSecret = setSecret;
/**
* Prepends inputPath to the PATH (for this action and future actions)
* @param inputPath
*/
function addPath(inputPath) {
const filePath = process.env['GITHUB_PATH'] || '';
if (filePath) {
file_command_1.issueCommand('PATH', inputPath);
}
else {
command_1.issueCommand('add-path', {}, inputPath);
}
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
}
exports.addPath = addPath;
/**
* Gets the value of an input.
* Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
* Returns an empty string if the value is not defined.
*
* @param name name of the input to get
* @param options optional. See InputOptions.
* @returns string
*/
function getInput(name, options) {
const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
if (options && options.required && !val) {
throw new Error(`Input required and not supplied: ${name}`);
}
if (options && options.trimWhitespace === false) {
return val;
}
return val.trim();
}
exports.getInput = getInput;
/**
* Gets the values of an multiline input. Each value is also trimmed.
*
* @param name name of the input to get
* @param options optional. See InputOptions.
* @returns string[]
*
*/
function getMultilineInput(name, options) {
const inputs = getInput(name, options)
.split('\n')
.filter(x => x !== '');
return inputs;
}
exports.getMultilineInput = getMultilineInput;
/**
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
* Support boolean input list: `true | True | TRUE | false | False | FALSE` .
* The return value is also in boolean type.
* ref: https://yaml.org/spec/1.2/spec.html#id2804923
*
* @param name name of the input to get
* @param options optional. See InputOptions.
* @returns boolean
*/
function getBooleanInput(name, options) {
const trueValue = ['true', 'True', 'TRUE'];
const falseValue = ['false', 'False', 'FALSE'];
const val = getInput(name, options);
if (trueValue.includes(val))
return true;
if (falseValue.includes(val))
return false;
throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
`Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
}
exports.getBooleanInput = getBooleanInput;
/**
* Sets the value of an output.
*
* @param name name of the output to set
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput(name, value) {
process.stdout.write(os.EOL);
command_1.issueCommand('set-output', { name }, value);
}
exports.setOutput = setOutput;
/**
* Enables or disables the echoing of commands into stdout for the rest of the step.
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
*
*/
function setCommandEcho(enabled) {
command_1.issue('echo', enabled ? 'on' : 'off');
}
exports.setCommandEcho = setCommandEcho;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
/**
* Sets the action status to failed.
* When the action exits it will be with an exit code of 1
* @param message add error issue message
*/
function setFailed(message) {
process.exitCode = ExitCode.Failure;
error(message);
}
exports.setFailed = setFailed;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/**
* Gets whether Actions Step Debug is on or not
*/
function isDebug() {
return process.env['RUNNER_DEBUG'] === '1';
}
exports.isDebug = isDebug;
/**
* Writes debug message to user log
* @param message debug message
*/
function debug(message) {
command_1.issueCommand('debug', {}, message);
}
exports.debug = debug;
/**
* Adds an error issue
* @param message error issue message. Errors will be converted to string via toString()
* @param properties optional properties to add to the annotation.
*/
function error(message, properties = {}) {
command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
}
exports.error = error;
/**
* Adds a warning issue
* @param message warning issue message. Errors will be converted to string via toString()
* @param properties optional properties to add to the annotation.
*/
function warning(message, properties = {}) {
command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
}
exports.warning = warning;
/**
* Adds a notice issue
* @param message notice issue message. Errors will be converted to string via toString()
* @param properties optional properties to add to the annotation.
*/
function notice(message, properties = {}) {
command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
}
exports.notice = notice;
/**
* Writes info to log with console.log.
* @param message info message
*/
function info(message) {
process.stdout.write(message + os.EOL);
}
exports.info = info;
/**
* Begin an output group.
*
* Output until the next `groupEnd` will be foldable in this group
*
* @param name The name of the output group
*/
function startGroup(name) {
command_1.issue('group', name);
}
exports.startGroup = startGroup;
/**
* End an output group.
*/
function endGroup() {
command_1.issue('endgroup');
}
exports.endGroup = endGroup;
/**
* Wrap an asynchronous function call in a group.
*
* Returns the same type as the function itself.
*
* @param name The name of the group
* @param fn The function to wrap in the group
*/
function group(name, fn) {
return __awaiter(this, void 0, void 0, function* () {
startGroup(name);
let result;
try {
result = yield fn();
}
finally {
endGroup();
}
return result;
});
}
exports.group = group;
//-----------------------------------------------------------------------
// Wrapper action state
//-----------------------------------------------------------------------
/**
* Saves state for current action, the state can only be retrieved by this action's post job execution.
*
* @param name name of the state to store
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState(name, value) {
command_1.issueCommand('save-state', { name }, value);
}
exports.saveState = saveState;
/**
* Gets the value of an state set by this action's main execution.
*
* @param name name of the state to get
* @returns string
*/
function getState(name) {
return process.env[`STATE_${name}`] || '';
}
exports.getState = getState;
function getIDToken(aud) {
return __awaiter(this, void 0, void 0, function* () {
return yield oidc_utils_1.OidcClient.getIDToken(aud);
});
}
exports.getIDToken = getIDToken;
//# sourceMappingURL=core.js.map
/***/ }),
/***/ 510:
/***/ (function(module) {
module.exports = addHook;
function addHook(state, kind, name, hook) {
var orig = hook;
if (!state.registry[name]) {
state.registry[name] = [];
}
if (kind === "before") {
hook = function (method, options) {
return Promise.resolve()
.then(orig.bind(null, options))
.then(method.bind(null, options));
};
}
if (kind === "after") {
hook = function (method, options) {
var result;
return Promise.resolve()
.then(method.bind(null, options))
.then(function (result_) {
result = result_;
return orig(result, options);
})
.then(function () {
return result;
});
};
}
if (kind === "error") {
hook = function (method, options) {
return Promise.resolve()
.then(method.bind(null, options))
.catch(function (error) {
return orig(error, options);
});
};
}
state.registry[name].push({
hook: hook,
orig: orig,
});
}
/***/ }),
/***/ 521:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getOctokitOptions = exports.GitHub = exports.context = void 0;
const Context = __importStar(__webpack_require__(262));
const Utils = __importStar(__webpack_require__(127));
// octokit + plugins
const core_1 = __webpack_require__(448);
const plugin_rest_endpoint_methods_1 = __webpack_require__(842);
const plugin_paginate_rest_1 = __webpack_require__(299);
exports.context = new Context.Context();
const baseUrl = Utils.getApiBaseUrl();
const defaults = {
baseUrl,
request: {
agent: Utils.getProxyAgent(baseUrl)
}
};
exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);
/**
* Convience function to correctly format Octokit Options to pass into the constructor.
*
* @param token the repo PAT or GITHUB_TOKEN
* @param options other options to set
*/
function getOctokitOptions(token, options) {
const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller
// Auth
const auth = Utils.getAuthString(token, opts);
if (auth) {
opts.auth = auth;
}
return opts;
}
exports.getOctokitOptions = getOctokitOptions;
//# sourceMappingURL=utils.js.map
/***/ }),
/***/ 523:
/***/ (function(module, __unusedexports, __webpack_require__) {
var register = __webpack_require__(280)
var addHook = __webpack_require__(510)
var removeHook = __webpack_require__(866)
// bind with array of arguments: https://stackoverflow.com/a/21792913
var bind = Function.bind
var bindable = bind.bind(bind)
function bindApi (hook, state, name) {
var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
hook.api = { remove: removeHookRef }
hook.remove = removeHookRef
;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
var args = name ? [state, kind, name] : [state, kind]
hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
})
}
function HookSingular () {
var singularHookName = 'h'
var singularHookState = {
registry: {}
}
var singularHook = register.bind(null, singularHookState, singularHookName)
bindApi(singularHook, singularHookState, singularHookName)
return singularHook
}
function HookCollection () {
var state = {
registry: {}
}
var hook = register.bind(null, state)
bindApi(hook, state)
return hook
}
var collectionHookDeprecationMessageDisplayed = false
function Hook () {
if (!collectionHookDeprecationMessageDisplayed) {
console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
collectionHookDeprecationMessageDisplayed = true
}
return HookCollection()
}
Hook.Singular = HookSingular.bind()
Hook.Collection = HookCollection.bind()
module.exports = Hook
// expose constructors as a named property for TypeScript
module.exports.Hook = Hook
module.exports.Singular = Hook.Singular
module.exports.Collection = Hook.Collection
/***/ }),
/***/ 526:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(__webpack_require__(470));
const github = __importStar(__webpack_require__(469));
function run() {
return __awaiter(this, void 0, void 0, function* () {
const token = core.getInput("github-token", { required: true });
const octokit = github.getOctokit(token);
const labelNames = yield getPullRequestLabelNames(octokit);
const labels = getInputLabels();
const result = labels.every((label) => labelNames.includes(label));
if (result) {
core.warning("required labels didn't find");
}
core.setOutput("result", result);
core.setOutput("labels", labelNames);
});
}
function getPullRequestLabelNames(octokit) {
return __awaiter(this, void 0, void 0, function* () {
const owner = github.context.repo.owner;
const repo = github.context.repo.repo;
const pull_number = core.getInput("pull-number", { required: true });
const { data } = yield octokit.rest.pulls.get({
owner,
repo,
pull_number,
});
return data.labels.length > 0 ? data.labels.map((label) => label.name) : [];
});
}
function getInputLabels() {
const raw = core.getInput("labels", { required: true });
const json = JSON.parse(raw);
return Array.isArray(json) ? json : [];
}
run().catch((err) => {
core.setFailed(err.message);
});
/***/ }),
/***/ 530:
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
var punycode = __webpack_require__(213);
var mappingTable = __webpack_require__(967);
var PROCESSING_OPTIONS = {
TRANSITIONAL: 0,
NONTRANSITIONAL: 1
};
function normalize(str) { // fix bug in v8
return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000');
}
function findStatus(val) {
var start = 0;
var end = mappingTable.length - 1;
while (start <= end) {
var mid = Math.floor((start + end) / 2);
var target = mappingTable[mid];
if (target[0][0] <= val && target[0][1] >= val) {
return target;
} else if (target[0][0] > val) {
end = mid - 1;
} else {
start = mid + 1;
}
}
return null;
}
var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g;
function countSymbols(string) {
return string
// replace every surrogate pair with a BMP symbol
.replace(regexAstralSymbols, '_')
// then get the length
.length;
}
function mapChars(domain_name, useSTD3, processing_option) {
var hasError = false;
var processed = "";
var len = countSymbols(domain_name);
for (var i = 0; i < len; ++i) {
var codePoint = domain_name.codePointAt(i);
var status = findStatus(codePoint);
switch (status[1]) {
case "disallowed":
hasError = true;
processed += String.fromCodePoint(codePoint);
break;
case "ignored":
break;
case "mapped":
processed += String.fromCodePoint.apply(String, status[2]);
break;
case "deviation":
if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {
processed += String.fromCodePoint.apply(String, status[2]);
} else {
processed += String.fromCodePoint(codePoint);
}
break;
case "valid":
processed += String.fromCodePoint(codePoint);
break;
case "disallowed_STD3_mapped":
if (useSTD3) {
hasError = true;
processed += String.fromCodePoint(codePoint);
} else {
processed += String.fromCodePoint.apply(String, status[2]);
}
break;
case "disallowed_STD3_valid":
if (useSTD3) {
hasError = true;
}
processed += String.fromCodePoint(codePoint);
break;
}
}
return {
string: processed,
error: hasError
};
}
var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/;
function validateLabel(label, processing_option) {
if (label.substr(0, 4) === "xn--") {
label = punycode.toUnicode(label);
processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;
}
var error = false;
if (normalize(label) !== label ||
(label[3] === "-" && label[4] === "-") ||
label[0] === "-" || label[label.length - 1] === "-" ||
label.indexOf(".") !== -1 ||
label.search(combiningMarksRegex) === 0) {
error = true;
}
var len = countSymbols(label);
for (var i = 0; i < len; ++i) {
var status = findStatus(label.codePointAt(i));
if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") ||
(processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&
status[1] !== "valid" && status[1] !== "deviation")) {
error = true;
break;
}
}
return {
label: label,
error: error
};
}
function processing(domain_name, useSTD3, processing_option) {
var result = mapChars(domain_name, useSTD3, processing_option);
result.string = normalize(result.string);
var labels = result.string.split(".");
for (var i = 0; i < labels.length; ++i) {
try {
var validation = validateLabel(labels[i]);
labels[i] = validation.label;
result.error = result.error || validation.error;
} catch(e) {
result.error = true;
}
}
return {
string: labels.join("."),
error: result.error
};
}
module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {
var result = processing(domain_name, useSTD3, processing_option);
var labels = result.string.split(".");
labels = labels.map(function(l) {
try {
return punycode.toASCII(l);
} catch(e) {
result.error = true;
return l;
}
});
if (verifyDnsLength) {
var total = labels.slice(0, labels.length - 1).join(".").length;
if (total.length > 253 || total.length === 0) {
result.error = true;
}
for (var i=0; i < labels.length; ++i) {
if (labels.length > 63 || labels.length === 0) {
result.error = true;
break;
}
}
}
if (result.error) return null;
return labels.join(".");
};
module.exports.toUnicode = function(domain_name, useSTD3) {
var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);
return {
domain: result.string,
error: result.error
};
};
module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;
/***/ }),
/***/ 539:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const http = __webpack_require__(605);
const https = __webpack_require__(211);
const pm = __webpack_require__(950);
let tunnel;
var HttpCodes;
(function (HttpCodes) {
HttpCodes[HttpCodes["OK"] = 200] = "OK";
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
var Headers;
(function (Headers) {
Headers["Accept"] = "accept";
Headers["ContentType"] = "content-type";
})(Headers = exports.Headers || (exports.Headers = {}));
var MediaTypes;
(function (MediaTypes) {
MediaTypes["ApplicationJson"] = "application/json";
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
/**
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
function getProxyUrl(serverUrl) {
let proxyUrl = pm.getProxyUrl(new URL(serverUrl));
return proxyUrl ? proxyUrl.href : '';
}
exports.getProxyUrl = getProxyUrl;
const HttpRedirectCodes = [
HttpCodes.MovedPermanently,
HttpCodes.ResourceMoved,
HttpCodes.SeeOther,
HttpCodes.TemporaryRedirect,
HttpCodes.PermanentRedirect
];
const HttpResponseRetryCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
const ExponentialBackoffCeiling = 10;
const ExponentialBackoffTimeSlice = 5;
class HttpClientError extends Error {
constructor(message, statusCode) {
super(message);
this.name = 'HttpClientError';
this.statusCode = statusCode;
Object.setPrototypeOf(this, HttpClientError.prototype);
}
}
exports.HttpClientError = HttpClientError;
class HttpClientResponse {
constructor(message) {
this.message = message;
}
readBody() {
return new Promise(async (resolve, reject) => {
let output = Buffer.alloc(0);
this.message.on('data', (chunk) => {
output = Buffer.concat([output, chunk]);
});
this.message.on('end', () => {
resolve(output.toString());
});
});
}
}
exports.HttpClientResponse = HttpClientResponse;
function isHttps(requestUrl) {
let parsedUrl = new URL(requestUrl);
return parsedUrl.protocol === 'https:';
}
exports.isHttps = isHttps;
class HttpClient {
constructor(userAgent, handlers, requestOptions) {
this._ignoreSslError = false;
this._allowRedirects = true;
this._allowRedirectDowngrade = false;
this._maxRedirects = 50;
this._allowRetries = false;
this._maxRetries = 1;
this._keepAlive = false;
this._disposed = false;
this.userAgent = userAgent;
this.handlers = handlers || [];
this.requestOptions = requestOptions;
if (requestOptions) {
if (requestOptions.ignoreSslError != null) {
this._ignoreSslError = requestOptions.ignoreSslError;
}
this._socketTimeout = requestOptions.socketTimeout;
if (requestOptions.allowRedirects != null) {
this._allowRedirects = requestOptions.allowRedirects;
}
if (requestOptions.allowRedirectDowngrade != null) {
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
}
if (requestOptions.maxRedirects != null) {
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
}
if (requestOptions.keepAlive != null) {
this._keepAlive = requestOptions.keepAlive;
}
if (requestOptions.allowRetries != null) {
this._allowRetries = requestOptions.allowRetries;
}
if (requestOptions.maxRetries != null) {
this._maxRetries = requestOptions.maxRetries;
}
}
}
options(requestUrl, additionalHeaders) {
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
}
get(requestUrl, additionalHeaders) {
return this.request('GET', requestUrl, null, additionalHeaders || {});
}
del(requestUrl, additionalHeaders) {
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
}
post(requestUrl, data, additionalHeaders) {
return this.request('POST', requestUrl, data, additionalHeaders || {});
}
patch(requestUrl, data, additionalHeaders) {
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
}
put(requestUrl, data, additionalHeaders) {
return this.request('PUT', requestUrl, data, additionalHeaders || {});
}
head(requestUrl, additionalHeaders) {
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
}
sendStream(verb, requestUrl, stream, additionalHeaders) {
return this.request(verb, requestUrl, stream, additionalHeaders);
}
/**
* Gets a typed object from an endpoint
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
*/
async getJson(requestUrl, additionalHeaders = {}) {
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
let res = await this.get(requestUrl, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
async postJson(requestUrl, obj, additionalHeaders = {}) {
let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.post(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
async putJson(requestUrl, obj, additionalHeaders = {}) {
let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.put(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
async patchJson(requestUrl, obj, additionalHeaders = {}) {
let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.patch(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
/**
* Makes a raw http request.
* All other methods such as get, post, patch, and request ultimately call this.
* Prefer get, del, post and patch
*/
async request(verb, requestUrl, data, headers) {
if (this._disposed) {
throw new Error('Client has already been disposed.');
}
let parsedUrl = new URL(requestUrl);
let info = this._prepareRequest(verb, parsedUrl, headers);
// Only perform retries on reads since writes may not be idempotent.
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
? this._maxRetries + 1
: 1;
let numTries = 0;
let response;
while (numTries < maxTries) {
response = await this.requestRaw(info, data);
// Check if it's an authentication challenge
if (response &&
response.message &&
response.message.statusCode === HttpCodes.Unauthorized) {
let authenticationHandler;
for (let i = 0; i < this.handlers.length; i++) {
if (this.handlers[i].canHandleAuthentication(response)) {
authenticationHandler = this.handlers[i];
break;
}
}
if (authenticationHandler) {
return authenticationHandler.handleAuthentication(this, info, data);
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response;
}
}
let redirectsRemaining = this._maxRedirects;
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
this._allowRedirects &&
redirectsRemaining > 0) {
const redirectUrl = response.message.headers['location'];
if (!redirectUrl) {
// if there's no location to redirect to, we won't
break;
}
let parsedRedirectUrl = new URL(redirectUrl);
if (parsedUrl.protocol == 'https:' &&
parsedUrl.protocol != parsedRedirectUrl.protocol &&
!this._allowRedirectDowngrade) {
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response.readBody();
// strip authorization header if redirected to a different hostname
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
for (let header in headers) {
// header names are case insensitive
if (header.toLowerCase() === 'authorization') {
delete headers[header];
}
}
}
// let's make the request with the new redirectUrl
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
response = await this.requestRaw(info, data);
redirectsRemaining--;
}
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
// If not a retry code, return immediately instead of retrying
return response;
}
numTries += 1;
if (numTries < maxTries) {
await response.readBody();
await this._performExponentialBackoff(numTries);
}
}
return response;
}
/**
* Needs to be called if keepAlive is set to true in request options.
*/
dispose() {
if (this._agent) {
this._agent.destroy();
}
this._disposed = true;
}
/**
* Raw request.
* @param info
* @param data
*/
requestRaw(info, data) {
return new Promise((resolve, reject) => {
let callbackForResult = function (err, res) {
if (err) {
reject(err);
}
resolve(res);
};
this.requestRawWithCallback(info, data, callbackForResult);
});
}
/**
* Raw request with callback.
* @param info
* @param data
* @param onResult
*/
requestRawWithCallback(info, data, onResult) {
let socket;
if (typeof data === 'string') {
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
}
let callbackCalled = false;
let handleResult = (err, res) => {
if (!callbackCalled) {
callbackCalled = true;
onResult(err, res);
}
};
let req = info.httpModule.request(info.options, (msg) => {
let res = new HttpClientResponse(msg);
handleResult(null, res);
});
req.on('socket', sock => {
socket = sock;
});
// If we ever get disconnected, we want the socket to timeout eventually
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
if (socket) {
socket.end();
}
handleResult(new Error('Request timeout: ' + info.options.path), null);
});
req.on('error', function (err) {
// err has statusCode property
// res should have headers
handleResult(err, null);
});
if (data && typeof data === 'string') {
req.write(data, 'utf8');
}
if (data && typeof data !== 'string') {
data.on('close', function () {
req.end();
});
data.pipe(req);
}
else {
req.end();
}
}
/**
* Gets an http agent. This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
getAgent(serverUrl) {
let parsedUrl = new URL(serverUrl);
return this._getAgent(parsedUrl);
}
_prepareRequest(method, requestUrl, headers) {
const info = {};
info.parsedUrl = requestUrl;
const usingSsl = info.parsedUrl.protocol === 'https:';
info.httpModule = usingSsl ? https : http;
const defaultPort = usingSsl ? 443 : 80;
info.options = {};
info.options.host = info.parsedUrl.hostname;
info.options.port = info.parsedUrl.port
? parseInt(info.parsedUrl.port)
: defaultPort;
info.options.path =
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
info.options.method = method;
info.options.headers = this._mergeHeaders(headers);
if (this.userAgent != null) {
info.options.headers['user-agent'] = this.userAgent;
}
info.options.agent = this._getAgent(info.parsedUrl);
// gives handlers an opportunity to participate
if (this.handlers) {
this.handlers.forEach(handler => {
handler.prepareRequest(info.options);
});
}
return info;
}
_mergeHeaders(headers) {
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
if (this.requestOptions && this.requestOptions.headers) {
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
}
return lowercaseKeys(headers || {});
}
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
let clientHeader;
if (this.requestOptions && this.requestOptions.headers) {
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
}
return additionalHeaders[header] || clientHeader || _default;
}
_getAgent(parsedUrl) {
let agent;
let proxyUrl = pm.getProxyUrl(parsedUrl);
let useProxy = proxyUrl && proxyUrl.hostname;
if (this._keepAlive && useProxy) {
agent = this._proxyAgent;
}
if (this._keepAlive && !useProxy) {
agent = this._agent;
}
// if agent is already assigned use that agent.
if (!!agent) {
return agent;
}
const usingSsl = parsedUrl.protocol === 'https:';
let maxSockets = 100;
if (!!this.requestOptions) {
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
}
if (useProxy) {
// If using proxy, need tunnel
if (!tunnel) {
tunnel = __webpack_require__(413);
}
const agentOptions = {
maxSockets: maxSockets,
keepAlive: this._keepAlive,
proxy: {
...((proxyUrl.username || proxyUrl.password) && {
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
}),
host: proxyUrl.hostname,
port: proxyUrl.port
}
};
let tunnelAgent;
const overHttps = proxyUrl.protocol === 'https:';
if (usingSsl) {
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
}
else {
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
}
agent = tunnelAgent(agentOptions);
this._proxyAgent = agent;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) {
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent.options = Object.assign(agent.options || {}, {
rejectUnauthorized: false
});
}
return agent;
}
_performExponentialBackoff(retryNumber) {
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
return new Promise(resolve => setTimeout(() => resolve(), ms));
}
static dateTimeDeserializer(key, value) {
if (typeof value === 'string') {
let a = new Date(value);
if (!isNaN(a.valueOf())) {
return a;
}
}
return value;
}
async _processResponse(res, options) {
return new Promise(async (resolve, reject) => {
const statusCode = res.message.statusCode;
const response = {
statusCode: statusCode,
result: null,
headers: {}
};
// not found leads to null obj returned
if (statusCode == HttpCodes.NotFound) {
resolve(response);
}
let obj;
let contents;
// get the result from the body
try {
contents = await res.readBody();
if (contents && contents.length > 0) {
if (options && options.deserializeDates) {
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
}
else {
obj = JSON.parse(contents);
}
response.result = obj;
}
response.headers = res.message.headers;
}
catch (err) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if (statusCode > 299) {
let msg;
// if exception/error in body, attempt to get better error
if (obj && obj.message) {
msg = obj.message;
}
else if (contents && contents.length > 0) {
// it may be the case that the exception is in the body message as string
msg = contents;
}
else {
msg = 'Failed request: (' + statusCode + ')';
}
let err = new HttpClientError(msg, statusCode);
err.result = response.result;
reject(err);
}
else {
resolve(response);
}
});
}
}
exports.HttpClient = HttpClient;
/***/ }),
/***/ 605:
/***/ (function(module) {
module.exports = require("http");
/***/ }),
/***/ 614:
/***/ (function(module) {
module.exports = require("events");
/***/ }),
/***/ 622:
/***/ (function(module) {
module.exports = require("path");
/***/ }),
/***/ 631:
/***/ (function(module) {
module.exports = require("net");
/***/ }),
/***/ 669:
/***/ (function(module) {
module.exports = require("util");
/***/ }),
/***/ 692:
/***/ (function(__unusedmodule, exports) {
"use strict";
Object.defineProperty(exports, '__esModule', { value: true });
class Deprecation extends Error {
constructor(message) {
super(message); // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
this.name = 'Deprecation';
}
}
exports.Deprecation = Deprecation;
/***/ }),
/***/ 742:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.OidcClient = void 0;
const http_client_1 = __webpack_require__(539);
const auth_1 = __webpack_require__(226);
const core_1 = __webpack_require__(470);
class OidcClient {
static createHttpClient(allowRetry = true, maxRetry = 10) {
const requestOptions = {
allowRetries: allowRetry,
maxRetries: maxRetry
};
return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);
}
static getRequestToken() {
const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
if (!token) {
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
}
return token;
}
static getIDTokenUrl() {
const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
if (!runtimeUrl) {
throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
}
return runtimeUrl;
}
static getCall(id_token_url) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const httpclient = OidcClient.createHttpClient();
const res = yield httpclient
.getJson(id_token_url)
.catch(error => {
throw new Error(`Failed to get ID Token. \n
Error Code : ${error.statusCode}\n
Error Message: ${error.result.message}`);
});
const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
if (!id_token) {
throw new Error('Response json body do not have ID Token field');
}
return id_token;
});
}
static getIDToken(audience) {
return __awaiter(this, void 0, void 0, function* () {
try {
// New ID Token is requested from action service
let id_token_url = OidcClient.getIDTokenUrl();
if (audience) {
const encodedAudience = encodeURIComponent(audience);
id_token_url = `${id_token_url}&audience=${encodedAudience}`;
}
core_1.debug(`ID token url is ${id_token_url}`);
const id_token = yield OidcClient.getCall(id_token_url);
core_1.setSecret(id_token);
return id_token;
}
catch (error) {
throw new Error(`Error message: ${error.message}`);
}
});
}
}
exports.OidcClient = OidcClient;
//# sourceMappingURL=oidc-utils.js.map
/***/ }),
/***/ 747:
/***/ (function(module) {
module.exports = require("fs");
/***/ }),
/***/ 751:
/***/ (function(module) {
"use strict";
var conversions = {};
module.exports = conversions;
function sign(x) {
return x < 0 ? -1 : 1;
}
function evenRound(x) {
// Round x to the nearest integer, choosing the even integer if it lies halfway between two.
if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)
return Math.floor(x);
} else {
return Math.round(x);
}
}
function createNumberConversion(bitLength, typeOpts) {
if (!typeOpts.unsigned) {
--bitLength;
}
const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);
const upperBound = Math.pow(2, bitLength) - 1;
const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);
const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);
return function(V, opts) {
if (!opts) opts = {};
let x = +V;
if (opts.enforceRange) {
if (!Number.isFinite(x)) {
throw new TypeError("Argument is not a finite number");
}
x = sign(x) * Math.floor(Math.abs(x));
if (x < lowerBound || x > upperBound) {
throw new TypeError("Argument is not in byte range");
}
return x;
}
if (!isNaN(x) && opts.clamp) {
x = evenRound(x);
if (x < lowerBound) x = lowerBound;
if (x > upperBound) x = upperBound;
return x;
}
if (!Number.isFinite(x) || x === 0) {
return 0;
}
x = sign(x) * Math.floor(Math.abs(x));
x = x % moduloVal;
if (!typeOpts.unsigned && x >= moduloBound) {
return x - moduloVal;
} else if (typeOpts.unsigned) {
if (x < 0) {
x += moduloVal;
} else if (x === -0) { // don't return negative zero
return 0;
}
}
return x;
}
}
conversions["void"] = function () {
return undefined;
};
conversions["boolean"] = function (val) {
return !!val;
};
conversions["byte"] = createNumberConversion(8, { unsigned: false });
conversions["octet"] = createNumberConversion(8, { unsigned: true });
conversions["short"] = createNumberConversion(16, { unsigned: false });
conversions["unsigned short"] = createNumberConversion(16, { unsigned: true });
conversions["long"] = createNumberConversion(32, { unsigned: false });
conversions["unsigned long"] = createNumberConversion(32, { unsigned: true });
conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });
conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });
conversions["double"] = function (V) {
const x = +V;
if (!Number.isFinite(x)) {
throw new TypeError("Argument is not a finite floating-point value");
}
return x;
};
conversions["unrestricted double"] = function (V) {
const x = +V;
if (isNaN(x)) {
throw new TypeError("Argument is NaN");
}
return x;
};
// not quite valid, but good enough for JS
conversions["float"] = conversions["double"];
conversions["unrestricted float"] = conversions["unrestricted double"];
conversions["DOMString"] = function (V, opts) {
if (!opts) opts = {};
if (opts.treatNullAsEmptyString && V === null) {
return "";
}
return String(V);
};
conversions["ByteString"] = function (V, opts) {
const x = String(V);
let c = undefined;
for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {
if (c > 255) {
throw new TypeError("Argument is not a valid bytestring");
}
}
return x;
};
conversions["USVString"] = function (V) {
const S = String(V);
const n = S.length;
const U = [];
for (let i = 0; i < n; ++i) {
const c = S.charCodeAt(i);
if (c < 0xD800 || c > 0xDFFF) {
U.push(String.fromCodePoint(c));
} else if (0xDC00 <= c && c <= 0xDFFF) {
U.push(String.fromCodePoint(0xFFFD));
} else {
if (i === n - 1) {
U.push(String.fromCodePoint(0xFFFD));
} else {
const d = S.charCodeAt(i + 1);
if (0xDC00 <= d && d <= 0xDFFF) {
const a = c & 0x3FF;
const b = d & 0x3FF;
U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));
++i;
} else {
U.push(String.fromCodePoint(0xFFFD));
}
}
}
}
return U.join('');
};
conversions["Date"] = function (V, opts) {
if (!(V instanceof Date)) {
throw new TypeError("Argument is not a Date object");
}
if (isNaN(V)) {
return undefined;
}
return V;
};
conversions["RegExp"] = function (V, opts) {
if (!(V instanceof RegExp)) {
V = new RegExp(V);
}
return V;
};
/***/ }),
/***/ 753:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, '__esModule', { value: true });
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var endpoint = __webpack_require__(385);
var universalUserAgent = __webpack_require__(796);
var isPlainObject = __webpack_require__(356);
var nodeFetch = _interopDefault(__webpack_require__(454));
var requestError = __webpack_require__(463);
const VERSION = "5.6.2";
function getBufferResponse(response) {
return response.arrayBuffer();
}
function fetchWrapper(requestOptions) {
const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
requestOptions.body = JSON.stringify(requestOptions.body);
}
let headers = {};
let status;
let url;
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
return fetch(requestOptions.url, Object.assign({
method: requestOptions.method,
body: requestOptions.body,
headers: requestOptions.headers,
redirect: requestOptions.redirect
}, // `requestOptions.request.agent` type is incompatible
// see https://github.com/octokit/types.ts/pull/264
requestOptions.request)).then(async response => {
url = response.url;
status = response.status;
for (const keyAndValue of response.headers) {
headers[keyAndValue[0]] = keyAndValue[1];
}
if ("deprecation" in headers) {
const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
const deprecationLink = matches && matches.pop();
log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`);
}
if (status === 204 || status === 205) {
return;
} // GitHub API returns 200 for HEAD requests
if (requestOptions.method === "HEAD") {
if (status < 400) {
return;
}
throw new requestError.RequestError(response.statusText, status, {
response: {
url,
status,
headers,
data: undefined
},
request: requestOptions
});
}
if (status === 304) {
throw new requestError.RequestError("Not modified", status, {
response: {
url,
status,
headers,
data: await getResponseData(response)
},
request: requestOptions
});
}
if (status >= 400) {
const data = await getResponseData(response);
const error = new requestError.RequestError(toErrorMessage(data), status, {
response: {
url,
status,
headers,
data
},
request: requestOptions
});
throw error;
}
return getResponseData(response);
}).then(data => {
return {
status,
url,
headers,
data
};
}).catch(error => {
if (error instanceof requestError.RequestError) throw error;
throw new requestError.RequestError(error.message, 500, {
request: requestOptions
});
});
}
async function getResponseData(response) {
const contentType = response.headers.get("content-type");
if (/application\/json/.test(contentType)) {
return response.json();
}
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
return response.text();
}
return getBufferResponse(response);
}
function toErrorMessage(data) {
if (typeof data === "string") return data; // istanbul ignore else - just in case
if ("message" in data) {
if (Array.isArray(data.errors)) {
return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`;
}
return data.message;
} // istanbul ignore next - just in case
return `Unknown error: ${JSON.stringify(data)}`;
}
function withDefaults(oldEndpoint, newDefaults) {
const endpoint = oldEndpoint.defaults(newDefaults);
const newApi = function (route, parameters) {
const endpointOptions = endpoint.merge(route, parameters);
if (!endpointOptions.request || !endpointOptions.request.hook) {
return fetchWrapper(endpoint.parse(endpointOptions));
}
const request = (route, parameters) => {
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
};
Object.assign(request, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
return endpointOptions.request.hook(request, endpointOptions);
};
return Object.assign(newApi, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
}
const request = withDefaults(endpoint.endpoint, {
headers: {
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
}
});
exports.request = request;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 761:
/***/ (function(module) {
module.exports = require("zlib");
/***/ }),
/***/ 794:
/***/ (function(module) {
module.exports = require("stream");
/***/ }),
/***/ 796:
/***/ (function(__unusedmodule, exports) {
"use strict";
Object.defineProperty(exports, '__esModule', { value: true });
function getUserAgent() {
if (typeof navigator === "object" && "userAgent" in navigator) {
return navigator.userAgent;
}
if (typeof process === "object" && "version" in process) {
return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;
}
return "<environment undetectable>";
}
exports.getUserAgent = getUserAgent;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 813:
/***/ (function(__unusedmodule, exports) {
"use strict";
Object.defineProperty(exports, '__esModule', { value: true });
const REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
const REGEX_IS_INSTALLATION = /^ghs_/;
const REGEX_IS_USER_TO_SERVER = /^ghu_/;
async function auth(token) {
const isApp = token.split(/\./).length === 3;
const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);
const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
return {
type: "token",
token: token,
tokenType
};
}
/**
* Prefix token for usage in the Authorization header
*
* @param token OAuth token or JSON Web Token
*/
function withAuthorizationPrefix(token) {
if (token.split(/\./).length === 3) {
return `bearer ${token}`;
}
return `token ${token}`;
}
async function hook(token, request, route, parameters) {
const endpoint = request.endpoint.merge(route, parameters);
endpoint.headers.authorization = withAuthorizationPrefix(token);
return request(endpoint);
}
const createTokenAuth = function createTokenAuth(token) {
if (!token) {
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
}
if (typeof token !== "string") {
throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
}
token = token.replace(/^(token|bearer) +/i, "");
return Object.assign(auth.bind(null, token), {
hook: hook.bind(null, token)
});
};
exports.createTokenAuth = createTokenAuth;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 835:
/***/ (function(module) {
module.exports = require("url");
/***/ }),
/***/ 842:
/***/ (function(__unusedmodule, exports) {
"use strict";
Object.defineProperty(exports, '__esModule', { value: true });
function ownKeys(object, enumerableOnly) {
var keys = Object.keys(object);
if (Object.getOwnPropertySymbols) {
var symbols = Object.getOwnPropertySymbols(object);
if (enumerableOnly) {
symbols = symbols.filter(function (sym) {
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
});
}
keys.push.apply(keys, symbols);
}
return keys;
}
function _objectSpread2(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i] != null ? arguments[i] : {};
if (i % 2) {
ownKeys(Object(source), true).forEach(function (key) {
_defineProperty(target, key, source[key]);
});
} else if (Object.getOwnPropertyDescriptors) {
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
} else {
ownKeys(Object(source)).forEach(function (key) {
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
});
}
}
return target;
}
function _defineProperty(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
const Endpoints = {
actions: {
addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"],
cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"],
createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"],
createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"],
createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"],
deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"],
deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"],
deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"],
deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"],
disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"],
downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"],
downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"],
downloadWorkflowRunAttemptLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"],
downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"],
enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"],
getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"],
getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"],
getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"],
getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"],
getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"],
getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"],
getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, {
renamed: ["actions", "getGithubActionsPermissionsRepository"]
}],
getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"],
getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"],
getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"],
getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"],
getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"],
listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"],
listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"],
listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"],
listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"],
listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"],
listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"],
listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"],
listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"],
listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"],
setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"],
setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"],
setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"],
setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"],
setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"],
setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"]
},
activity: {
checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"],
getFeeds: ["GET /feeds"],
getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
getThread: ["GET /notifications/threads/{thread_id}"],
getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"],
listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
listNotificationsForAuthenticatedUser: ["GET /notifications"],
listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"],
listPublicEvents: ["GET /events"],
listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
listPublicEventsForUser: ["GET /users/{username}/events/public"],
listPublicOrgEvents: ["GET /orgs/{org}/events"],
listReceivedEventsForUser: ["GET /users/{username}/received_events"],
listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"],
listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"],
listReposStarredByAuthenticatedUser: ["GET /user/starred"],
listReposStarredByUser: ["GET /users/{username}/starred"],
listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"],
listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"],
listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"],
markNotificationsAsRead: ["PUT /notifications"],
markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"],
starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
},
apps: {
addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, {
renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"]
}],
addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"],
checkToken: ["POST /applications/{client_id}/token"],
createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", {
mediaType: {
previews: ["corsair"]
}
}],
createContentAttachmentForRepo: ["POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments", {
mediaType: {
previews: ["corsair"]
}
}],
createFromManifest: ["POST /app-manifests/{code}/conversions"],
createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"],
deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
deleteInstallation: ["DELETE /app/installations/{installation_id}"],
deleteToken: ["DELETE /applications/{client_id}/token"],
getAuthenticated: ["GET /app"],
getBySlug: ["GET /apps/{app_slug}"],
getInstallation: ["GET /app/installations/{installation_id}"],
getOrgInstallation: ["GET /orgs/{org}/installation"],
getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"],
getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"],
getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"],
getUserInstallation: ["GET /users/{username}/installation"],
getWebhookConfigForApp: ["GET /app/hook/config"],
getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"],
listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"],
listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"],
listInstallations: ["GET /app/installations"],
listInstallationsForAuthenticatedUser: ["GET /user/installations"],
listPlans: ["GET /marketplace_listing/plans"],
listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
listReposAccessibleToInstallation: ["GET /installation/repositories"],
listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"],
listWebhookDeliveries: ["GET /app/hook/deliveries"],
redeliverWebhookDelivery: ["POST /app/hook/deliveries/{delivery_id}/attempts"],
removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, {
renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"]
}],
removeRepoFromInstallationForAuthenticatedUser: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"],
resetToken: ["PATCH /applications/{client_id}/token"],
revokeInstallationAccessToken: ["DELETE /installation/token"],
scopeToken: ["POST /applications/{client_id}/token/scoped"],
suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"],
updateWebhookConfigForApp: ["PATCH /app/hook/config"]
},
billing: {
getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"],
getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"],
getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"],
getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"]
},
checks: {
create: ["POST /repos/{owner}/{repo}/check-runs"],
createSuite: ["POST /repos/{owner}/{repo}/check-suites"],
get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"],
getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"],
listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"],
listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"],
listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"],
listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"],
rerequestRun: ["POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"],
rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"],
setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"],
update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]
},
codeScanning: {
deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"],
getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, {
renamedParameters: {
alert_id: "alert_number"
}
}],
getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"],
getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"],
listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"],
listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, {
renamed: ["codeScanning", "listAlertInstances"]
}],
listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"],
updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"],
uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"]
},
codesOfConduct: {
getAllCodesOfConduct: ["GET /codes_of_conduct"],
getConductCode: ["GET /codes_of_conduct/{key}"]
},
emojis: {
get: ["GET /emojis"]
},
enterpriseAdmin: {
disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"],
getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"],
listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"],
setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"],
setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"],
setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"]
},
gists: {
checkIsStarred: ["GET /gists/{gist_id}/star"],
create: ["POST /gists"],
createComment: ["POST /gists/{gist_id}/comments"],
delete: ["DELETE /gists/{gist_id}"],
deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"],
fork: ["POST /gists/{gist_id}/forks"],
get: ["GET /gists/{gist_id}"],
getComment: ["GET /gists/{gist_id}/comments/{comment_id}"],
getRevision: ["GET /gists/{gist_id}/{sha}"],
list: ["GET /gists"],
listComments: ["GET /gists/{gist_id}/comments"],
listCommits: ["GET /gists/{gist_id}/commits"],
listForUser: ["GET /users/{username}/gists"],
listForks: ["GET /gists/{gist_id}/forks"],
listPublic: ["GET /gists/public"],
listStarred: ["GET /gists/starred"],
star: ["PUT /gists/{gist_id}/star"],
unstar: ["DELETE /gists/{gist_id}/star"],
update: ["PATCH /gists/{gist_id}"],
updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"]
},
git: {
createBlob: ["POST /repos/{owner}/{repo}/git/blobs"],
createCommit: ["POST /repos/{owner}/{repo}/git/commits"],
createRef: ["POST /repos/{owner}/{repo}/git/refs"],
createTag: ["POST /repos/{owner}/{repo}/git/tags"],
createTree: ["POST /repos/{owner}/{repo}/git/trees"],
deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"],
getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"],
getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"],
getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"],
getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"],
getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"],
listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"],
updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]
},
gitignore: {
getAllTemplates: ["GET /gitignore/templates"],
getTemplate: ["GET /gitignore/templates/{name}"]
},
interactions: {
getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"],
getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"],
getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"],
getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, {
renamed: ["interactions", "getRestrictionsForAuthenticatedUser"]
}],
removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"],
removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"],
removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"],
removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, {
renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"]
}],
setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"],
setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"],
setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"],
setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, {
renamed: ["interactions", "setRestrictionsForAuthenticatedUser"]
}]
},
issues: {
addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
create: ["POST /repos/{owner}/{repo}/issues"],
createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"],
createLabel: ["POST /repos/{owner}/{repo}/labels"],
createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"],
deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"],
get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"],
getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"],
list: ["GET /issues"],
listAssignees: ["GET /repos/{owner}/{repo}/assignees"],
listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"],
listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"],
listForAuthenticatedUser: ["GET /user/issues"],
listForOrg: ["GET /orgs/{org}/issues"],
listForRepo: ["GET /repos/{owner}/{repo}/issues"],
listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"],
listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"],
listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"],
removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"],
removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"],
setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"]
},
licenses: {
get: ["GET /licenses/{license}"],
getAllCommonlyUsed: ["GET /licenses"],
getForRepo: ["GET /repos/{owner}/{repo}/license"]
},
markdown: {
render: ["POST /markdown"],
renderRaw: ["POST /markdown/raw", {
headers: {
"content-type": "text/plain; charset=utf-8"
}
}]
},
meta: {
get: ["GET /meta"],
getOctocat: ["GET /octocat"],
getZen: ["GET /zen"],
root: ["GET /"]
},
migrations: {
cancelImport: ["DELETE /repos/{owner}/{repo}/import"],
deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive"],
deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive"],
downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive"],
getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive"],
getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"],
getImportStatus: ["GET /repos/{owner}/{repo}/import"],
getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"],
getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"],
getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"],
listForAuthenticatedUser: ["GET /user/migrations"],
listForOrg: ["GET /orgs/{org}/migrations"],
listReposForAuthenticatedUser: ["GET /user/migrations/{migration_id}/repositories"],
listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"],
listReposForUser: ["GET /user/migrations/{migration_id}/repositories", {}, {
renamed: ["migrations", "listReposForAuthenticatedUser"]
}],
mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"],
setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"],
startForAuthenticatedUser: ["POST /user/migrations"],
startForOrg: ["POST /orgs/{org}/migrations"],
startImport: ["PUT /repos/{owner}/{repo}/import"],
unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"],
unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"],
updateImport: ["PATCH /repos/{owner}/{repo}/import"]
},
orgs: {
blockUser: ["PUT /orgs/{org}/blocks/{username}"],
cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"],
checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"],
createInvitation: ["POST /orgs/{org}/invitations"],
createWebhook: ["POST /orgs/{org}/hooks"],
deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
get: ["GET /orgs/{org}"],
getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"],
getWebhookDelivery: ["GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"],
list: ["GET /organizations"],
listAppInstallations: ["GET /orgs/{org}/installations"],
listBlockedUsers: ["GET /orgs/{org}/blocks"],
listFailedInvitations: ["GET /orgs/{org}/failed_invitations"],
listForAuthenticatedUser: ["GET /user/orgs"],
listForUser: ["GET /users/{username}/orgs"],
listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"],
listMembers: ["GET /orgs/{org}/members"],
listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
listPendingInvitations: ["GET /orgs/{org}/invitations"],
listPublicMembers: ["GET /orgs/{org}/public_members"],
listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"],
listWebhooks: ["GET /orgs/{org}/hooks"],
pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
redeliverWebhookDelivery: ["POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"],
removeMember: ["DELETE /orgs/{org}/members/{username}"],
removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"],
removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"],
setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"],
unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
update: ["PATCH /orgs/{org}"],
updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"],
updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"],
updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"]
},
packages: {
deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"],
deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"],
deletePackageForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}"],
deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"],
deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
deletePackageVersionForUser: ["DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, {
renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"]
}],
getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, {
renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"]
}],
getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"],
getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"],
getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"],
getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"],
getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"],
getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"],
getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"],
getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"],
listPackagesForAuthenticatedUser: ["GET /user/packages"],
listPackagesForOrganization: ["GET /orgs/{org}/packages"],
listPackagesForUser: ["GET /users/{username}/packages"],
restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"],
restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"],
restorePackageForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"],
restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"],
restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"],
restorePackageVersionForUser: ["POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"]
},
projects: {
addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"],
createCard: ["POST /projects/columns/{column_id}/cards"],
createColumn: ["POST /projects/{project_id}/columns"],
createForAuthenticatedUser: ["POST /user/projects"],
createForOrg: ["POST /orgs/{org}/projects"],
createForRepo: ["POST /repos/{owner}/{repo}/projects"],
delete: ["DELETE /projects/{project_id}"],
deleteCard: ["DELETE /projects/columns/cards/{card_id}"],
deleteColumn: ["DELETE /projects/columns/{column_id}"],
get: ["GET /projects/{project_id}"],
getCard: ["GET /projects/columns/cards/{card_id}"],
getColumn: ["GET /projects/columns/{column_id}"],
getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission"],
listCards: ["GET /projects/columns/{column_id}/cards"],
listCollaborators: ["GET /projects/{project_id}/collaborators"],
listColumns: ["GET /projects/{project_id}/columns"],
listForOrg: ["GET /orgs/{org}/projects"],
listForRepo: ["GET /repos/{owner}/{repo}/projects"],
listForUser: ["GET /users/{username}/projects"],
moveCard: ["POST /projects/columns/cards/{card_id}/moves"],
moveColumn: ["POST /projects/columns/{column_id}/moves"],
removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}"],
update: ["PATCH /projects/{project_id}"],
updateCard: ["PATCH /projects/columns/cards/{card_id}"],
updateColumn: ["PATCH /projects/columns/{column_id}"]
},
pulls: {
checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
create: ["POST /repos/{owner}/{repo}/pulls"],
createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"],
createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"],
get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"],
getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
list: ["GET /repos/{owner}/{repo}/pulls"],
listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"],
listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"],
listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"],
listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"],
listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"],
listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"],
submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"],
update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"],
updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"],
updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"],
updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"]
},
rateLimit: {
get: ["GET /rate_limit"]
},
reactions: {
createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"],
createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"],
createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"],
createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions"],
createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"],
createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"],
deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"],
deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"],
deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"],
deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"],
deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"],
deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"],
listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"],
listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"],
listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"],
listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"],
listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]
},
repos: {
acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}", {}, {
renamed: ["repos", "acceptInvitationForAuthenticatedUser"]
}],
acceptInvitationForAuthenticatedUser: ["PATCH /user/repository_invitations/{invitation_id}"],
addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
mapToData: "apps"
}],
addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"],
addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
mapToData: "contexts"
}],
addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
mapToData: "teams"
}],
addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
mapToData: "users"
}],
checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"],
compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"],
createAutolink: ["POST /repos/{owner}/{repo}/autolinks"],
createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"],
createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"],
createDeployKey: ["POST /repos/{owner}/{repo}/keys"],
createDeployment: ["POST /repos/{owner}/{repo}/deployments"],
createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"],
createForAuthenticatedUser: ["POST /user/repos"],
createFork: ["POST /repos/{owner}/{repo}/forks"],
createInOrg: ["POST /orgs/{org}/repos"],
createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"],
createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
createPagesSite: ["POST /repos/{owner}/{repo}/pages"],
createRelease: ["POST /repos/{owner}/{repo}/releases"],
createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"],
createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, {
renamed: ["repos", "declineInvitationForAuthenticatedUser"]
}],
declineInvitationForAuthenticatedUser: ["DELETE /user/repository_invitations/{invitation_id}"],
delete: ["DELETE /repos/{owner}/{repo}"],
deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"],
deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"],
deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"],
deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"],
deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"],
deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"],
deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"],
deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"],
deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"],
deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"],
deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"],
deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"],
disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"],
disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts"],
downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, {
renamed: ["repos", "downloadZipballArchive"]
}],
downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"],
downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"],
enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes"],
enableLfsForRepo: ["PUT /repos/{owner}/{repo}/lfs"],
enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts"],
generateReleaseNotes: ["POST /repos/{owner}/{repo}/releases/generate-notes"],
get: ["GET /repos/{owner}/{repo}"],
getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"],
getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"],
getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"],
getAllTopics: ["GET /repos/{owner}/{repo}/topics", {
mediaType: {
previews: ["mercy"]
}
}],
getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"],
getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"],
getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"],
getClones: ["GET /repos/{owner}/{repo}/traffic/clones"],
getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"],
getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"],
getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"],
getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"],
getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"],
getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"],
getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"],
getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"],
getContent: ["GET /repos/{owner}/{repo}/contents/{path}"],
getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"],
getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"],
getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"],
getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"],
getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"],
getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"],
getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"],
getPages: ["GET /repos/{owner}/{repo}/pages"],
getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"],
getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"],
getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"],
getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"],
getReadme: ["GET /repos/{owner}/{repo}/readme"],
getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"],
getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"],
getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"],
getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"],
getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"],
getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"],
getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"],
getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"],
getViews: ["GET /repos/{owner}/{repo}/traffic/views"],
getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"],
getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"],
getWebhookDelivery: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"],
listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"],
listBranches: ["GET /repos/{owner}/{repo}/branches"],
listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"],
listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"],
listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"],
listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"],
listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"],
listCommits: ["GET /repos/{owner}/{repo}/commits"],
listContributors: ["GET /repos/{owner}/{repo}/contributors"],
listDeployKeys: ["GET /repos/{owner}/{repo}/keys"],
listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"],
listDeployments: ["GET /repos/{owner}/{repo}/deployments"],
listForAuthenticatedUser: ["GET /user/repos"],
listForOrg: ["GET /orgs/{org}/repos"],
listForUser: ["GET /users/{username}/repos"],
listForks: ["GET /repos/{owner}/{repo}/forks"],
listInvitations: ["GET /repos/{owner}/{repo}/invitations"],
listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"],
listLanguages: ["GET /repos/{owner}/{repo}/languages"],
listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"],
listPublic: ["GET /repositories"],
listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"],
listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"],
listReleases: ["GET /repos/{owner}/{repo}/releases"],
listTags: ["GET /repos/{owner}/{repo}/tags"],
listTeams: ["GET /repos/{owner}/{repo}/teams"],
listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"],
listWebhooks: ["GET /repos/{owner}/{repo}/hooks"],
merge: ["POST /repos/{owner}/{repo}/merges"],
mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"],
pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"],
redeliverWebhookDelivery: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"],
removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
mapToData: "apps"
}],
removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"],
removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
mapToData: "contexts"
}],
removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
mapToData: "teams"
}],
removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
mapToData: "users"
}],
renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", {
mediaType: {
previews: ["mercy"]
}
}],
requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
mapToData: "apps"
}],
setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
mapToData: "contexts"
}],
setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
mapToData: "teams"
}],
setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
mapToData: "users"
}],
testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
transfer: ["POST /repos/{owner}/{repo}/transfer"],
update: ["PATCH /repos/{owner}/{repo}"],
updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"],
updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"],
updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"],
updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, {
renamed: ["repos", "updateStatusCheckProtection"]
}],
updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"],
uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", {
baseUrl: "https://uploads.github.com"
}]
},
search: {
code: ["GET /search/code"],
commits: ["GET /search/commits"],
issuesAndPullRequests: ["GET /search/issues"],
labels: ["GET /search/labels"],
repos: ["GET /search/repositories"],
topics: ["GET /search/topics", {
mediaType: {
previews: ["mercy"]
}
}],
users: ["GET /search/users"]
},
secretScanning: {
getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"],
listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"],
listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]
},
teams: {
addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"],
addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
create: ["POST /orgs/{org}/teams"],
createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"],
deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"],
getByName: ["GET /orgs/{org}/teams/{team_slug}"],
getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"],
list: ["GET /orgs/{org}/teams"],
listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"],
listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"],
listForAuthenticatedUser: ["GET /user/teams"],
listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"],
listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"],
listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"],
listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"],
removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"],
removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"]
},
users: {
addEmailForAuthenticated: ["POST /user/emails", {}, {
renamed: ["users", "addEmailForAuthenticatedUser"]
}],
addEmailForAuthenticatedUser: ["POST /user/emails"],
block: ["PUT /user/blocks/{username}"],
checkBlocked: ["GET /user/blocks/{username}"],
checkFollowingForUser: ["GET /users/{username}/following/{target_user}"],
checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"],
createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, {
renamed: ["users", "createGpgKeyForAuthenticatedUser"]
}],
createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"],
createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, {
renamed: ["users", "createPublicSshKeyForAuthenticatedUser"]
}],
createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"],
deleteEmailForAuthenticated: ["DELETE /user/emails", {}, {
renamed: ["users", "deleteEmailForAuthenticatedUser"]
}],
deleteEmailForAuthenticatedUser: ["DELETE /user/emails"],
deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, {
renamed: ["users", "deleteGpgKeyForAuthenticatedUser"]
}],
deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"],
deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, {
renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"]
}],
deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"],
follow: ["PUT /user/following/{username}"],
getAuthenticated: ["GET /user"],
getByUsername: ["GET /users/{username}"],
getContextForUser: ["GET /users/{username}/hovercard"],
getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, {
renamed: ["users", "getGpgKeyForAuthenticatedUser"]
}],
getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"],
getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, {
renamed: ["users", "getPublicSshKeyForAuthenticatedUser"]
}],
getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"],
list: ["GET /users"],
listBlockedByAuthenticated: ["GET /user/blocks", {}, {
renamed: ["users", "listBlockedByAuthenticatedUser"]
}],
listBlockedByAuthenticatedUser: ["GET /user/blocks"],
listEmailsForAuthenticated: ["GET /user/emails", {}, {
renamed: ["users", "listEmailsForAuthenticatedUser"]
}],
listEmailsForAuthenticatedUser: ["GET /user/emails"],
listFollowedByAuthenticated: ["GET /user/following", {}, {
renamed: ["users", "listFollowedByAuthenticatedUser"]
}],
listFollowedByAuthenticatedUser: ["GET /user/following"],
listFollowersForAuthenticatedUser: ["GET /user/followers"],
listFollowersForUser: ["GET /users/{username}/followers"],
listFollowingForUser: ["GET /users/{username}/following"],
listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, {
renamed: ["users", "listGpgKeysForAuthenticatedUser"]
}],
listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"],
listGpgKeysForUser: ["GET /users/{username}/gpg_keys"],
listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, {
renamed: ["users", "listPublicEmailsForAuthenticatedUser"]
}],
listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"],
listPublicKeysForUser: ["GET /users/{username}/keys"],
listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, {
renamed: ["users", "listPublicSshKeysForAuthenticatedUser"]
}],
listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"],
setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, {
renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"]
}],
setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"],
unblock: ["DELETE /user/blocks/{username}"],
unfollow: ["DELETE /user/following/{username}"],
updateAuthenticated: ["PATCH /user"]
}
};
const VERSION = "5.13.0";
function endpointsToMethods(octokit, endpointsMap) {
const newMethods = {};
for (const [scope, endpoints] of Object.entries(endpointsMap)) {
for (const [methodName, endpoint] of Object.entries(endpoints)) {
const [route, defaults, decorations] = endpoint;
const [method, url] = route.split(/ /);
const endpointDefaults = Object.assign({
method,
url
}, defaults);
if (!newMethods[scope]) {
newMethods[scope] = {};
}
const scopeMethods = newMethods[scope];
if (decorations) {
scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
continue;
}
scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
}
}
return newMethods;
}
function decorate(octokit, scope, methodName, defaults, decorations) {
const requestWithDefaults = octokit.request.defaults(defaults);
/* istanbul ignore next */
function withDecorations(...args) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
if (decorations.mapToData) {
options = Object.assign({}, options, {
data: options[decorations.mapToData],
[decorations.mapToData]: undefined
});
return requestWithDefaults(options);
}
if (decorations.renamed) {
const [newScope, newMethodName] = decorations.renamed;
octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
}
if (decorations.deprecated) {
octokit.log.warn(decorations.deprecated);
}
if (decorations.renamedParameters) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
const options = requestWithDefaults.endpoint.merge(...args);
for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
if (name in options) {
octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
if (!(alias in options)) {
options[alias] = options[name];
}
delete options[name];
}
}
return requestWithDefaults(options);
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
return requestWithDefaults(...args);
}
return Object.assign(withDecorations, requestWithDefaults);
}
function restEndpointMethods(octokit) {
const api = endpointsToMethods(octokit, Endpoints);
return {
rest: api
};
}
restEndpointMethods.VERSION = VERSION;
function legacyRestEndpointMethods(octokit) {
const api = endpointsToMethods(octokit, Endpoints);
return _objectSpread2(_objectSpread2({}, api), {}, {
rest: api
});
}
legacyRestEndpointMethods.VERSION = VERSION;
exports.legacyRestEndpointMethods = legacyRestEndpointMethods;
exports.restEndpointMethods = restEndpointMethods;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 856:
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
const punycode = __webpack_require__(213);
const tr46 = __webpack_require__(530);
const specialSchemes = {
ftp: 21,
file: null,
gopher: 70,
http: 80,
https: 443,
ws: 80,
wss: 443
};
const failure = Symbol("failure");
function countSymbols(str) {
return punycode.ucs2.decode(str).length;
}
function at(input, idx) {
const c = input[idx];
return isNaN(c) ? undefined : String.fromCodePoint(c);
}
function isASCIIDigit(c) {
return c >= 0x30 && c <= 0x39;
}
function isASCIIAlpha(c) {
return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);
}
function isASCIIAlphanumeric(c) {
return isASCIIAlpha(c) || isASCIIDigit(c);
}
function isASCIIHex(c) {
return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);
}
function isSingleDot(buffer) {
return buffer === "." || buffer.toLowerCase() === "%2e";
}
function isDoubleDot(buffer) {
buffer = buffer.toLowerCase();
return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e";
}
function isWindowsDriveLetterCodePoints(cp1, cp2) {
return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);
}
function isWindowsDriveLetterString(string) {
return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|");
}
function isNormalizedWindowsDriveLetterString(string) {
return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":";
}
function containsForbiddenHostCodePoint(string) {
return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1;
}
function containsForbiddenHostCodePointExcludingPercent(string) {
return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1;
}
function isSpecialScheme(scheme) {
return specialSchemes[scheme] !== undefined;
}
function isSpecial(url) {
return isSpecialScheme(url.scheme);
}
function defaultPort(scheme) {
return specialSchemes[scheme];
}
function percentEncode(c) {
let hex = c.toString(16).toUpperCase();
if (hex.length === 1) {
hex = "0" + hex;
}
return "%" + hex;
}
function utf8PercentEncode(c) {
const buf = new Buffer(c);
let str = "";
for (let i = 0; i < buf.length; ++i) {
str += percentEncode(buf[i]);
}
return str;
}
function utf8PercentDecode(str) {
const input = new Buffer(str);
const output = [];
for (let i = 0; i < input.length; ++i) {
if (input[i] !== 37) {
output.push(input[i]);
} else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {
output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));
i += 2;
} else {
output.push(input[i]);
}
}
return new Buffer(output).toString();
}
function isC0ControlPercentEncode(c) {
return c <= 0x1F || c > 0x7E;
}
const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);
function isPathPercentEncode(c) {
return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);
}
const extraUserinfoPercentEncodeSet =
new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);
function isUserinfoPercentEncode(c) {
return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);
}
function percentEncodeChar(c, encodeSetPredicate) {
const cStr = String.fromCodePoint(c);
if (encodeSetPredicate(c)) {
return utf8PercentEncode(cStr);
}
return cStr;
}
function parseIPv4Number(input) {
let R = 10;
if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") {
input = input.substring(2);
R = 16;
} else if (input.length >= 2 && input.charAt(0) === "0") {
input = input.substring(1);
R = 8;
}
if (input === "") {
return 0;
}
const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);
if (regex.test(input)) {
return failure;
}
return parseInt(input, R);
}
function parseIPv4(input) {
const parts = input.split(".");
if (parts[parts.length - 1] === "") {
if (parts.length > 1) {
parts.pop();
}
}
if (parts.length > 4) {
return input;
}
const numbers = [];
for (const part of parts) {
if (part === "") {
return input;
}
const n = parseIPv4Number(part);
if (n === failure) {
return input;
}
numbers.push(n);
}
for (let i = 0; i < numbers.length - 1; ++i) {
if (numbers[i] > 255) {
return failure;
}
}
if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {
return failure;
}
let ipv4 = numbers.pop();
let counter = 0;
for (const n of numbers) {
ipv4 += n * Math.pow(256, 3 - counter);
++counter;
}
return ipv4;
}
function serializeIPv4(address) {
let output = "";
let n = address;
for (let i = 1; i <= 4; ++i) {
output = String(n % 256) + output;
if (i !== 4) {
output = "." + output;
}
n = Math.floor(n / 256);
}
return output;
}
function parseIPv6(input) {
const address = [0, 0, 0, 0, 0, 0, 0, 0];
let pieceIndex = 0;
let compress = null;
let pointer = 0;
input = punycode.ucs2.decode(input);
if (input[pointer] === 58) {
if (input[pointer + 1] !== 58) {
return failure;
}
pointer += 2;
++pieceIndex;
compress = pieceIndex;
}
while (pointer < input.length) {
if (pieceIndex === 8) {
return failure;
}
if (input[pointer] === 58) {
if (compress !== null) {
return failure;
}
++pointer;
++pieceIndex;
compress = pieceIndex;
continue;
}
let value = 0;
let length = 0;
while (length < 4 && isASCIIHex(input[pointer])) {
value = value * 0x10 + parseInt(at(input, pointer), 16);
++pointer;
++length;
}
if (input[pointer] === 46) {
if (length === 0) {
return failure;
}
pointer -= length;
if (pieceIndex > 6) {
return failure;
}
let numbersSeen = 0;
while (input[pointer] !== undefined) {
let ipv4Piece = null;
if (numbersSeen > 0) {
if (input[pointer] === 46 && numbersSeen < 4) {
++pointer;
} else {
return failure;
}
}
if (!isASCIIDigit(input[pointer])) {
return failure;
}
while (isASCIIDigit(input[pointer])) {
const number = parseInt(at(input, pointer));
if (ipv4Piece === null) {
ipv4Piece = number;
} else if (ipv4Piece === 0) {
return failure;
} else {
ipv4Piece = ipv4Piece * 10 + number;
}
if (ipv4Piece > 255) {
return failure;
}
++pointer;
}
address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;
++numbersSeen;
if (numbersSeen === 2 || numbersSeen === 4) {
++pieceIndex;
}
}
if (numbersSeen !== 4) {
return failure;
}
break;
} else if (input[pointer] === 58) {
++pointer;
if (input[pointer] === undefined) {
return failure;
}
} else if (input[pointer] !== undefined) {
return failure;
}
address[pieceIndex] = value;
++pieceIndex;
}
if (compress !== null) {
let swaps = pieceIndex - compress;
pieceIndex = 7;
while (pieceIndex !== 0 && swaps > 0) {
const temp = address[compress + swaps - 1];
address[compress + swaps - 1] = address[pieceIndex];
address[pieceIndex] = temp;
--pieceIndex;
--swaps;
}
} else if (compress === null && pieceIndex !== 8) {
return failure;
}
return address;
}
function serializeIPv6(address) {
let output = "";
const seqResult = findLongestZeroSequence(address);
const compress = seqResult.idx;
let ignore0 = false;
for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {
if (ignore0 && address[pieceIndex] === 0) {
continue;
} else if (ignore0) {
ignore0 = false;
}
if (compress === pieceIndex) {
const separator = pieceIndex === 0 ? "::" : ":";
output += separator;
ignore0 = true;
continue;
}
output += address[pieceIndex].toString(16);
if (pieceIndex !== 7) {
output += ":";
}
}
return output;
}
function parseHost(input, isSpecialArg) {
if (input[0] === "[") {
if (input[input.length - 1] !== "]") {
return failure;
}
return parseIPv6(input.substring(1, input.length - 1));
}
if (!isSpecialArg) {
return parseOpaqueHost(input);
}
const domain = utf8PercentDecode(input);
const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);
if (asciiDomain === null) {
return failure;
}
if (containsForbiddenHostCodePoint(asciiDomain)) {
return failure;
}
const ipv4Host = parseIPv4(asciiDomain);
if (typeof ipv4Host === "number" || ipv4Host === failure) {
return ipv4Host;
}
return asciiDomain;
}
function parseOpaqueHost(input) {
if (containsForbiddenHostCodePointExcludingPercent(input)) {
return failure;
}
let output = "";
const decoded = punycode.ucs2.decode(input);
for (let i = 0; i < decoded.length; ++i) {
output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);
}
return output;
}
function findLongestZeroSequence(arr) {
let maxIdx = null;
let maxLen = 1; // only find elements > 1
let currStart = null;
let currLen = 0;
for (let i = 0; i < arr.length; ++i) {
if (arr[i] !== 0) {
if (currLen > maxLen) {
maxIdx = currStart;
maxLen = currLen;
}
currStart = null;
currLen = 0;
} else {
if (currStart === null) {
currStart = i;
}
++currLen;
}
}
// if trailing zeros
if (currLen > maxLen) {
maxIdx = currStart;
maxLen = currLen;
}
return {
idx: maxIdx,
len: maxLen
};
}
function serializeHost(host) {
if (typeof host === "number") {
return serializeIPv4(host);
}
// IPv6 serializer
if (host instanceof Array) {
return "[" + serializeIPv6(host) + "]";
}
return host;
}
function trimControlChars(url) {
return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, "");
}
function trimTabAndNewline(url) {
return url.replace(/\u0009|\u000A|\u000D/g, "");
}
function shortenPath(url) {
const path = url.path;
if (path.length === 0) {
return;
}
if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {
return;
}
path.pop();
}
function includesCredentials(url) {
return url.username !== "" || url.password !== "";
}
function cannotHaveAUsernamePasswordPort(url) {
return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file";
}
function isNormalizedWindowsDriveLetter(string) {
return /^[A-Za-z]:$/.test(string);
}
function URLStateMachine(input, base, encodingOverride, url, stateOverride) {
this.pointer = 0;
this.input = input;
this.base = base || null;
this.encodingOverride = encodingOverride || "utf-8";
this.stateOverride = stateOverride;
this.url = url;
this.failure = false;
this.parseError = false;
if (!this.url) {
this.url = {
scheme: "",
username: "",
password: "",
host: null,
port: null,
path: [],
query: null,
fragment: null,
cannotBeABaseURL: false
};
const res = trimControlChars(this.input);
if (res !== this.input) {
this.parseError = true;
}
this.input = res;
}
const res = trimTabAndNewline(this.input);
if (res !== this.input) {
this.parseError = true;
}
this.input = res;
this.state = stateOverride || "scheme start";
this.buffer = "";
this.atFlag = false;
this.arrFlag = false;
this.passwordTokenSeenFlag = false;
this.input = punycode.ucs2.decode(this.input);
for (; this.pointer <= this.input.length; ++this.pointer) {
const c = this.input[this.pointer];
const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);
// exec state machine
const ret = this["parse " + this.state](c, cStr);
if (!ret) {
break; // terminate algorithm
} else if (ret === failure) {
this.failure = true;
break;
}
}
}
URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) {
if (isASCIIAlpha(c)) {
this.buffer += cStr.toLowerCase();
this.state = "scheme";
} else if (!this.stateOverride) {
this.state = "no scheme";
--this.pointer;
} else {
this.parseError = true;
return failure;
}
return true;
};
URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) {
if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {
this.buffer += cStr.toLowerCase();
} else if (c === 58) {
if (this.stateOverride) {
if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {
return false;
}
if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {
return false;
}
if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") {
return false;
}
if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) {
return false;
}
}
this.url.scheme = this.buffer;
this.buffer = "";
if (this.stateOverride) {
return false;
}
if (this.url.scheme === "file") {
if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {
this.parseError = true;
}
this.state = "file";
} else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {
this.state = "special relative or authority";
} else if (isSpecial(this.url)) {
this.state = "special authority slashes";
} else if (this.input[this.pointer + 1] === 47) {
this.state = "path or authority";
++this.pointer;
} else {
this.url.cannotBeABaseURL = true;
this.url.path.push("");
this.state = "cannot-be-a-base-URL path";
}
} else if (!this.stateOverride) {
this.buffer = "";
this.state = "no scheme";
this.pointer = -1;
} else {
this.parseError = true;
return failure;
}
return true;
};
URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) {
if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {
return failure;
} else if (this.base.cannotBeABaseURL && c === 35) {
this.url.scheme = this.base.scheme;
this.url.path = this.base.path.slice();
this.url.query = this.base.query;
this.url.fragment = "";
this.url.cannotBeABaseURL = true;
this.state = "fragment";
} else if (this.base.scheme === "file") {
this.state = "file";
--this.pointer;
} else {
this.state = "relative";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) {
if (c === 47 && this.input[this.pointer + 1] === 47) {
this.state = "special authority ignore slashes";
++this.pointer;
} else {
this.parseError = true;
this.state = "relative";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) {
if (c === 47) {
this.state = "authority";
} else {
this.state = "path";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse relative"] = function parseRelative(c) {
this.url.scheme = this.base.scheme;
if (isNaN(c)) {
this.url.username = this.base.username;
this.url.password = this.base.password;
this.url.host = this.base.host;
this.url.port = this.base.port;
this.url.path = this.base.path.slice();
this.url.query = this.base.query;
} else if (c === 47) {
this.state = "relative slash";
} else if (c === 63) {
this.url.username = this.base.username;
this.url.password = this.base.password;
this.url.host = this.base.host;
this.url.port = this.base.port;
this.url.path = this.base.path.slice();
this.url.query = "";
this.state = "query";
} else if (c === 35) {
this.url.username = this.base.username;
this.url.password = this.base.password;
this.url.host = this.base.host;
this.url.port = this.base.port;
this.url.path = this.base.path.slice();
this.url.query = this.base.query;
this.url.fragment = "";
this.state = "fragment";
} else if (isSpecial(this.url) && c === 92) {
this.parseError = true;
this.state = "relative slash";
} else {
this.url.username = this.base.username;
this.url.password = this.base.password;
this.url.host = this.base.host;
this.url.port = this.base.port;
this.url.path = this.base.path.slice(0, this.base.path.length - 1);
this.state = "path";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) {
if (isSpecial(this.url) && (c === 47 || c === 92)) {
if (c === 92) {
this.parseError = true;
}
this.state = "special authority ignore slashes";
} else if (c === 47) {
this.state = "authority";
} else {
this.url.username = this.base.username;
this.url.password = this.base.password;
this.url.host = this.base.host;
this.url.port = this.base.port;
this.state = "path";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) {
if (c === 47 && this.input[this.pointer + 1] === 47) {
this.state = "special authority ignore slashes";
++this.pointer;
} else {
this.parseError = true;
this.state = "special authority ignore slashes";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) {
if (c !== 47 && c !== 92) {
this.state = "authority";
--this.pointer;
} else {
this.parseError = true;
}
return true;
};
URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) {
if (c === 64) {
this.parseError = true;
if (this.atFlag) {
this.buffer = "%40" + this.buffer;
}
this.atFlag = true;
// careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars
const len = countSymbols(this.buffer);
for (let pointer = 0; pointer < len; ++pointer) {
const codePoint = this.buffer.codePointAt(pointer);
if (codePoint === 58 && !this.passwordTokenSeenFlag) {
this.passwordTokenSeenFlag = true;
continue;
}
const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);
if (this.passwordTokenSeenFlag) {
this.url.password += encodedCodePoints;
} else {
this.url.username += encodedCodePoints;
}
}
this.buffer = "";
} else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
(isSpecial(this.url) && c === 92)) {
if (this.atFlag && this.buffer === "") {
this.parseError = true;
return failure;
}
this.pointer -= countSymbols(this.buffer) + 1;
this.buffer = "";
this.state = "host";
} else {
this.buffer += cStr;
}
return true;
};
URLStateMachine.prototype["parse hostname"] =
URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) {
if (this.stateOverride && this.url.scheme === "file") {
--this.pointer;
this.state = "file host";
} else if (c === 58 && !this.arrFlag) {
if (this.buffer === "") {
this.parseError = true;
return failure;
}
const host = parseHost(this.buffer, isSpecial(this.url));
if (host === failure) {
return failure;
}
this.url.host = host;
this.buffer = "";
this.state = "port";
if (this.stateOverride === "hostname") {
return false;
}
} else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
(isSpecial(this.url) && c === 92)) {
--this.pointer;
if (isSpecial(this.url) && this.buffer === "") {
this.parseError = true;
return failure;
} else if (this.stateOverride && this.buffer === "" &&
(includesCredentials(this.url) || this.url.port !== null)) {
this.parseError = true;
return false;
}
const host = parseHost(this.buffer, isSpecial(this.url));
if (host === failure) {
return failure;
}
this.url.host = host;
this.buffer = "";
this.state = "path start";
if (this.stateOverride) {
return false;
}
} else {
if (c === 91) {
this.arrFlag = true;
} else if (c === 93) {
this.arrFlag = false;
}
this.buffer += cStr;
}
return true;
};
URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) {
if (isASCIIDigit(c)) {
this.buffer += cStr;
} else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||
(isSpecial(this.url) && c === 92) ||
this.stateOverride) {
if (this.buffer !== "") {
const port = parseInt(this.buffer);
if (port > Math.pow(2, 16) - 1) {
this.parseError = true;
return failure;
}
this.url.port = port === defaultPort(this.url.scheme) ? null : port;
this.buffer = "";
}
if (this.stateOverride) {
return false;
}
this.state = "path start";
--this.pointer;
} else {
this.parseError = true;
return failure;
}
return true;
};
const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);
URLStateMachine.prototype["parse file"] = function parseFile(c) {
this.url.scheme = "file";
if (c === 47 || c === 92) {
if (c === 92) {
this.parseError = true;
}
this.state = "file slash";
} else if (this.base !== null && this.base.scheme === "file") {
if (isNaN(c)) {
this.url.host = this.base.host;
this.url.path = this.base.path.slice();
this.url.query = this.base.query;
} else if (c === 63) {
this.url.host = this.base.host;
this.url.path = this.base.path.slice();
this.url.query = "";
this.state = "query";
} else if (c === 35) {
this.url.host = this.base.host;
this.url.path = this.base.path.slice();
this.url.query = this.base.query;
this.url.fragment = "";
this.state = "fragment";
} else {
if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points
!isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||
(this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points
!fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {
this.url.host = this.base.host;
this.url.path = this.base.path.slice();
shortenPath(this.url);
} else {
this.parseError = true;
}
this.state = "path";
--this.pointer;
}
} else {
this.state = "path";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) {
if (c === 47 || c === 92) {
if (c === 92) {
this.parseError = true;
}
this.state = "file host";
} else {
if (this.base !== null && this.base.scheme === "file") {
if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {
this.url.path.push(this.base.path[0]);
} else {
this.url.host = this.base.host;
}
}
this.state = "path";
--this.pointer;
}
return true;
};
URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) {
if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {
--this.pointer;
if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {
this.parseError = true;
this.state = "path";
} else if (this.buffer === "") {
this.url.host = "";
if (this.stateOverride) {
return false;
}
this.state = "path start";
} else {
let host = parseHost(this.buffer, isSpecial(this.url));
if (host === failure) {
return failure;
}
if (host === "localhost") {
host = "";
}
this.url.host = host;
if (this.stateOverride) {
return false;
}
this.buffer = "";
this.state = "path start";
}
} else {
this.buffer += cStr;
}
return true;
};
URLStateMachine.prototype["parse path start"] = function parsePathStart(c) {
if (isSpecial(this.url)) {
if (c === 92) {
this.parseError = true;
}
this.state = "path";
if (c !== 47 && c !== 92) {
--this.pointer;
}
} else if (!this.stateOverride && c === 63) {
this.url.query = "";
this.state = "query";
} else if (!this.stateOverride && c === 35) {
this.url.fragment = "";
this.state = "fragment";
} else if (c !== undefined) {
this.state = "path";
if (c !== 47) {
--this.pointer;
}
}
return true;
};
URLStateMachine.prototype["parse path"] = function parsePath(c) {
if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||
(!this.stateOverride && (c === 63 || c === 35))) {
if (isSpecial(this.url) && c === 92) {
this.parseError = true;
}
if (isDoubleDot(this.buffer)) {
shortenPath(this.url);
if (c !== 47 && !(isSpecial(this.url) && c === 92)) {
this.url.path.push("");
}
} else if (isSingleDot(this.buffer) && c !== 47 &&
!(isSpecial(this.url) && c === 92)) {
this.url.path.push("");
} else if (!isSingleDot(this.buffer)) {
if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {
if (this.url.host !== "" && this.url.host !== null) {
this.parseError = true;
this.url.host = "";
}
this.buffer = this.buffer[0] + ":";
}
this.url.path.push(this.buffer);
}
this.buffer = "";
if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) {
while (this.url.path.length > 1 && this.url.path[0] === "") {
this.parseError = true;
this.url.path.shift();
}
}
if (c === 63) {
this.url.query = "";
this.state = "query";
}
if (c === 35) {
this.url.fragment = "";
this.state = "fragment";
}
} else {
// TODO: If c is not a URL code point and not "%", parse error.
if (c === 37 &&
(!isASCIIHex(this.input[this.pointer + 1]) ||
!isASCIIHex(this.input[this.pointer + 2]))) {
this.parseError = true;
}
this.buffer += percentEncodeChar(c, isPathPercentEncode);
}
return true;
};
URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) {
if (c === 63) {
this.url.query = "";
this.state = "query";
} else if (c === 35) {
this.url.fragment = "";
this.state = "fragment";
} else {
// TODO: Add: not a URL code point
if (!isNaN(c) && c !== 37) {
this.parseError = true;
}
if (c === 37 &&
(!isASCIIHex(this.input[this.pointer + 1]) ||
!isASCIIHex(this.input[this.pointer + 2]))) {
this.parseError = true;
}
if (!isNaN(c)) {
this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);
}
}
return true;
};
URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) {
if (isNaN(c) || (!this.stateOverride && c === 35)) {
if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") {
this.encodingOverride = "utf-8";
}
const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead
for (let i = 0; i < buffer.length; ++i) {
if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||
buffer[i] === 0x3C || buffer[i] === 0x3E) {
this.url.query += percentEncode(buffer[i]);
} else {
this.url.query += String.fromCodePoint(buffer[i]);
}
}
this.buffer = "";
if (c === 35) {
this.url.fragment = "";
this.state = "fragment";
}
} else {
// TODO: If c is not a URL code point and not "%", parse error.
if (c === 37 &&
(!isASCIIHex(this.input[this.pointer + 1]) ||
!isASCIIHex(this.input[this.pointer + 2]))) {
this.parseError = true;
}
this.buffer += cStr;
}
return true;
};
URLStateMachine.prototype["parse fragment"] = function parseFragment(c) {
if (isNaN(c)) { // do nothing
} else if (c === 0x0) {
this.parseError = true;
} else {
// TODO: If c is not a URL code point and not "%", parse error.
if (c === 37 &&
(!isASCIIHex(this.input[this.pointer + 1]) ||
!isASCIIHex(this.input[this.pointer + 2]))) {
this.parseError = true;
}
this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);
}
return true;
};
function serializeURL(url, excludeFragment) {
let output = url.scheme + ":";
if (url.host !== null) {
output += "//";
if (url.username !== "" || url.password !== "") {
output += url.username;
if (url.password !== "") {
output += ":" + url.password;
}
output += "@";
}
output += serializeHost(url.host);
if (url.port !== null) {
output += ":" + url.port;
}
} else if (url.host === null && url.scheme === "file") {
output += "//";
}
if (url.cannotBeABaseURL) {
output += url.path[0];
} else {
for (const string of url.path) {
output += "/" + string;
}
}
if (url.query !== null) {
output += "?" + url.query;
}
if (!excludeFragment && url.fragment !== null) {
output += "#" + url.fragment;
}
return output;
}
function serializeOrigin(tuple) {
let result = tuple.scheme + "://";
result += serializeHost(tuple.host);
if (tuple.port !== null) {
result += ":" + tuple.port;
}
return result;
}
module.exports.serializeURL = serializeURL;
module.exports.serializeURLOrigin = function (url) {
// https://url.spec.whatwg.org/#concept-url-origin
switch (url.scheme) {
case "blob":
try {
return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));
} catch (e) {
// serializing an opaque origin returns "null"
return "null";
}
case "ftp":
case "gopher":
case "http":
case "https":
case "ws":
case "wss":
return serializeOrigin({
scheme: url.scheme,
host: url.host,
port: url.port
});
case "file":
// spec says "exercise to the reader", chrome says "file://"
return "file://";
default:
// serializing an opaque origin returns "null"
return "null";
}
};
module.exports.basicURLParse = function (input, options) {
if (options === undefined) {
options = {};
}
const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);
if (usm.failure) {
return "failure";
}
return usm.url;
};
module.exports.setTheUsername = function (url, username) {
url.username = "";
const decoded = punycode.ucs2.decode(username);
for (let i = 0; i < decoded.length; ++i) {
url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);
}
};
module.exports.setThePassword = function (url, password) {
url.password = "";
const decoded = punycode.ucs2.decode(password);
for (let i = 0; i < decoded.length; ++i) {
url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);
}
};
module.exports.serializeHost = serializeHost;
module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;
module.exports.serializeInteger = function (integer) {
return String(integer);
};
module.exports.parseURL = function (input, options) {
if (options === undefined) {
options = {};
}
// We don't handle blobs, so this just delegates:
return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });
};
/***/ }),
/***/ 866:
/***/ (function(module) {
module.exports = removeHook;
function removeHook(state, name, method) {
if (!state.registry[name]) {
return;
}
var index = state.registry[name]
.map(function (registered) {
return registered.orig;
})
.indexOf(method);
if (index === -1) {
return;
}
state.registry[name].splice(index, 1);
}
/***/ }),
/***/ 880:
/***/ (function(module, __unusedexports, __webpack_require__) {
"use strict";
const conversions = __webpack_require__(751);
const utils = __webpack_require__(120);
const Impl = __webpack_require__(197);
const impl = utils.implSymbol;
function URL(url) {
if (!this || this[impl] || !(this instanceof URL)) {
throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.");
}
if (arguments.length < 1) {
throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present.");
}
const args = [];
for (let i = 0; i < arguments.length && i < 2; ++i) {
args[i] = arguments[i];
}
args[0] = conversions["USVString"](args[0]);
if (args[1] !== undefined) {
args[1] = conversions["USVString"](args[1]);
}
module.exports.setup(this, args);
}
URL.prototype.toJSON = function toJSON() {
if (!this || !module.exports.is(this)) {
throw new TypeError("Illegal invocation");
}
const args = [];
for (let i = 0; i < arguments.length && i < 0; ++i) {
args[i] = arguments[i];
}
return this[impl].toJSON.apply(this[impl], args);
};
Object.defineProperty(URL.prototype, "href", {
get() {
return this[impl].href;
},
set(V) {
V = conversions["USVString"](V);
this[impl].href = V;
},
enumerable: true,
configurable: true
});
URL.prototype.toString = function () {
if (!this || !module.exports.is(this)) {
throw new TypeError("Illegal invocation");
}
return this.href;
};
Object.defineProperty(URL.prototype, "origin", {
get() {
return this[impl].origin;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "protocol", {
get() {
return this[impl].protocol;
},
set(V) {
V = conversions["USVString"](V);
this[impl].protocol = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "username", {
get() {
return this[impl].username;
},
set(V) {
V = conversions["USVString"](V);
this[impl].username = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "password", {
get() {
return this[impl].password;
},
set(V) {
V = conversions["USVString"](V);
this[impl].password = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "host", {
get() {
return this[impl].host;
},
set(V) {
V = conversions["USVString"](V);
this[impl].host = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "hostname", {
get() {
return this[impl].hostname;
},
set(V) {
V = conversions["USVString"](V);
this[impl].hostname = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "port", {
get() {
return this[impl].port;
},
set(V) {
V = conversions["USVString"](V);
this[impl].port = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "pathname", {
get() {
return this[impl].pathname;
},
set(V) {
V = conversions["USVString"](V);
this[impl].pathname = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "search", {
get() {
return this[impl].search;
},
set(V) {
V = conversions["USVString"](V);
this[impl].search = V;
},
enumerable: true,
configurable: true
});
Object.defineProperty(URL.prototype, "hash", {
get() {
return this[impl].hash;
},
set(V) {
V = conversions["USVString"](V);
this[impl].hash = V;
},
enumerable: true,
configurable: true
});
module.exports = {
is(obj) {
return !!obj && obj[impl] instanceof Impl.implementation;
},
create(constructorArgs, privateData) {
let obj = Object.create(URL.prototype);
this.setup(obj, constructorArgs, privateData);
return obj;
},
setup(obj, constructorArgs, privateData) {
if (!privateData) privateData = {};
privateData.wrapper = obj;
obj[impl] = new Impl.implementation(constructorArgs, privateData);
obj[impl][utils.wrapperSymbol] = obj;
},
interface: URL,
expose: {
Window: { URL: URL },
Worker: { URL: URL }
}
};
/***/ }),
/***/ 898:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
Object.defineProperty(exports, '__esModule', { value: true });
var request = __webpack_require__(753);
var universalUserAgent = __webpack_require__(796);
const VERSION = "4.8.0";
function _buildMessageForResponseErrors(data) {
return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n");
}
class GraphqlResponseError extends Error {
constructor(request, headers, response) {
super(_buildMessageForResponseErrors(response));
this.request = request;
this.headers = headers;
this.response = response;
this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties.
this.errors = response.errors;
this.data = response.data; // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
}
}
const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"];
const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"];
const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
function graphql(request, query, options) {
if (options) {
if (typeof query === "string" && "query" in options) {
return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`));
}
for (const key in options) {
if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;
return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`));
}
}
const parsedOptions = typeof query === "string" ? Object.assign({
query
}, options) : query;
const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {
if (NON_VARIABLE_OPTIONS.includes(key)) {
result[key] = parsedOptions[key];
return result;
}
if (!result.variables) {
result.variables = {};
}
result.variables[key] = parsedOptions[key];
return result;
}, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix
// https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451
const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;
if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
}
return request(requestOptions).then(response => {
if (response.data.errors) {
const headers = {};
for (const key of Object.keys(response.headers)) {
headers[key] = response.headers[key];
}
throw new GraphqlResponseError(requestOptions, headers, response.data);
}
return response.data.data;
});
}
function withDefaults(request$1, newDefaults) {
const newRequest = request$1.defaults(newDefaults);
const newApi = (query, options) => {
return graphql(newRequest, query, options);
};
return Object.assign(newApi, {
defaults: withDefaults.bind(null, newRequest),
endpoint: request.request.endpoint
});
}
const graphql$1 = withDefaults(request.request, {
headers: {
"user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
},
method: "POST",
url: "/graphql"
});
function withCustomRequest(customRequest) {
return withDefaults(customRequest, {
method: "POST",
url: "/graphql"
});
}
exports.GraphqlResponseError = GraphqlResponseError;
exports.graphql = graphql$1;
exports.withCustomRequest = withCustomRequest;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 950:
/***/ (function(__unusedmodule, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function getProxyUrl(reqUrl) {
let usingSsl = reqUrl.protocol === 'https:';
let proxyUrl;
if (checkBypass(reqUrl)) {
return proxyUrl;
}
let proxyVar;
if (usingSsl) {
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
}
else {
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
}
if (proxyVar) {
proxyUrl = new URL(proxyVar);
}
return proxyUrl;
}
exports.getProxyUrl = getProxyUrl;
function checkBypass(reqUrl) {
if (!reqUrl.hostname) {
return false;
}
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
if (!noProxy) {
return false;
}
// Determine the request port
let reqPort;
if (reqUrl.port) {
reqPort = Number(reqUrl.port);
}
else if (reqUrl.protocol === 'http:') {
reqPort = 80;
}
else if (reqUrl.protocol === 'https:') {
reqPort = 443;
}
// Format the request hostname and hostname with port
let upperReqHosts = [reqUrl.hostname.toUpperCase()];
if (typeof reqPort === 'number') {
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
}
// Compare request host against noproxy
for (let upperNoProxyItem of noProxy
.split(',')
.map(x => x.trim().toUpperCase())
.filter(x => x)) {
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
return true;
}
}
return false;
}
exports.checkBypass = checkBypass;
/***/ }),
/***/ 967:
/***/ (function(module) {
module.exports = [[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",[108,183]],[[321,321],"mapped",[322]],[[322,322],"valid"],[[323,323],"mapped",[324]],[[324,324],"valid"],[[325,325],"mapped",[326]],[[326,326],"valid"],[[327,327],"mapped",[328]],[[328,328],"valid"],[[329,329],"mapped",[700,110]],[[330,330],"mapped",[331]],[[331,331],"valid"],[[332,332],"mapped",[333]],[[333,333],"valid"],[[334,334],"mapped",[335]],[[335,335],"valid"],[[336,336],"mapped",[337]],[[337,337],"valid"],[[338,338],"mapped",[339]],[[339,339],"valid"],[[340,340],"mapped",[341]],[[341,341],"valid"],[[342,342],"mapped",[343]],[[343,343],"valid"],[[344,344],"mapped",[345]],[[345,345],"valid"],[[346,346],"mapped",[347]],[[347,347],"valid"],[[348,348],"mapped",[349]],[[349,349],"valid"],[[350,350],"mapped",[351]],[[351,351],"valid"],[[352,352],"mapped",[353]],[[353,353],"valid"],[[354,354],"mapped",[355]],[[355,355],"valid"],[[356,356],"mapped",[357]],[[357,357],"valid"],[[358,358],"mapped",[359]],[[359,359],"valid"],[[360,360],"mapped",[361]],[[361,361],"valid"],[[362,362],"mapped",[363]],[[363,363],"valid"],[[364,364],"mapped",[365]],[[365,365],"valid"],[[366,366],"mapped",[367]],[[367,367],"valid"],[[368,368],"mapped",[369]],[[369,369],"valid"],[[370,370],"mapped",[371]],[[371,371],"valid"],[[372,372],"mapped",[373]],[[373,373],"valid"],[[374,374],"mapped",[375]],[[375,375],"valid"],[[376,376],"mapped",[255]],[[377,377],"mapped",[378]],[[378,378],"valid"],[[379,379],"mapped",[380]],[[380,380],"valid"],[[381,381],"mapped",[382]],[[382,382],"valid"],[[383,383],"mapped",[115]],[[384,384],"valid"],[[385,385],"mapped",[595]],[[386,386],"mapped",[387]],[[387,387],"valid"],[[388,388],"mapped",[389]],[[389,389],"valid"],[[390,390],"mapped",[596]],[[391,391],"mapped",[392]],[[392,392],"valid"],[[393,393],"mapped",[598]],[[394,394],"mapped",[599]],[[395,395],"mapped",[396]],[[396,397],"valid"],[[398,398],"mapped",[477]],[[399,399],"mapped",[601]],[[400,400],"mapped",[603]],[[401,401],"mapped",[402]],[[402,402],"valid"],[[403,403],"mapped",[608]],[[404,404],"mapped",[611]],[[405,405],"valid"],[[406,406],"mapped",[617]],[[407,407],"mapped",[616]],[[408,408],"mapped",[409]],[[409,411],"valid"],[[412,412],"mapped",[623]],[[413,413],"mapped",[626]],[[414,414],"valid"],[[415,415],"mapped",[629]],[[416,416],"mapped",[417]],[[417,417],"valid"],[[418,418],"mapped",[419]],[[419,419],"valid"],[[420,420],"mapped",[421]],[[421,421],"valid"],[[422,422],"mapped",[640]],[[423,423],"mapped",[424]],[[424,424],"valid"],[[425,425],"mapped",[643]],[[426,427],"valid"],[[428,428],"mapped",[429]],[[429,429],"valid"],[[430,430],"mapped",[648]],[[431,431],"mapped",[432]],[[432,432],"valid"],[[433,433],"mapped",[650]],[[434,434],"mapped",[651]],[[435,435],"mapped",[436]],[[436,436],"valid"],[[437,437],"mapped",[438]],[[438,438],"valid"],[[439,439],"mapped",[658]],[[440,440],"mapped",[441]],[[441,443],"valid"],[[444,444],"mapped",[445]],[[445,451],"valid"],[[452,454],"mapped",[100,382]],[[455,457],"mapped",[108,106]],[[458,460],"mapped",[110,106]],[[461,461],"mapped",[462]],[[462,462],"valid"],[[463,463],"mapped",[464]],[[464,464],"valid"],[[465,465],"mapped",[466]],[[466,466],"valid"],[[467,467],"mapped",[468]],[[468,468],"valid"],[[469,469],"mapped",[470]],[[470,470],"valid"],[[471,471],"mapped",[472]],[[472,472],"valid"],[[473,473],"mapped",[474]],[[474,474],"valid"],[[475,475],"mapped",[476]],[[476,477],"valid"],[[478,478],"mapped",[479]],[[479,479],"valid"],[[480,480],"mapped",[481]],[[481,481],"valid"],[[482,482],"mapped",[483]],[[483,483],"valid"],[[484,484],"mapped",[485]],[[485,485],"valid"],[[486,486],"mapped",[487]],[[487,487],"valid"],[[488,488],"mapped",[489]],[[489,489],"valid"],[[490,490],"mapped",[491]],[[491,491],"valid"],[[492,492],"mapped",[493]],[[493,493],"valid"],[[494,494],"mapped",[495]],[[495,496],"valid"],[[497,499],"mapped",[100,122]],[[500,500],"mapped",[501]],[[501,501],"valid"],[[502,502],"mapped",[405]],[[503,503],"mapped",[447]],[[504,504],"mapped",[505]],[[505,505],"valid"],[[506,506],"mapped",[507]],[[507,507],"valid"],[[508,508],"mapped",[509]],[[509,509],"valid"],[[510,510],"mapped",[511]],[[511,511],"valid"],[[512,512],"mapped",[513]],[[513,513],"valid"],[[514,514],"mapped",[515]],[[515,515],"valid"],[[516,516],"mapped",[517]],[[517,517],"valid"],[[518,518],"mapped",[519]],[[519,519],"valid"],[[520,520],"mapped",[521]],[[521,521],"valid"],[[522,522],"mapped",[523]],[[523,523],"valid"],[[524,524],"mapped",[525]],[[525,525],"valid"],[[526,526],"mapped",[527]],[[527,527],"valid"],[[528,528],"mapped",[529]],[[529,529],"valid"],[[530,530],"mapped",[531]],[[531,531],"valid"],[[532,532],"mapped",[533]],[[533,533],"valid"],[[534,534],"mapped",[535]],[[535,535],"valid"],[[536,536],"mapped",[537]],[[537,537],"valid"],[[538,538],"mapped",[539]],[[539,539],"valid"],[[540,540],"mapped",[541]],[[541,541],"valid"],[[542,542],"mapped",[543]],[[543,543],"valid"],[[544,544],"mapped",[414]],[[545,545],"valid"],[[546,546],"mapped",[547]],[[547,547],"valid"],[[548,548],"mapped",[549]],[[549,549],"valid"],[[550,550],"mapped",[551]],[[551,551],"valid"],[[552,552],"mapped",[553]],[[553,553],"valid"],[[554,554],"mapped",[555]],[[555,555],"valid"],[[556,556],"mapped",[557]],[[557,557],"valid"],[[558,558],"mapped",[559]],[[559,559],"valid"],[[560,560],"mapped",[561]],[[561,561],"valid"],[[562,562],"mapped",[563]],[[563,563],"valid"],[[564,566],"valid"],[[567,569],"valid"],[[570,570],"mapped",[11365]],[[571,571],"mapped",[572]],[[572,572],"valid"],[[573,573],"mapped",[410]],[[574,574],"mapped",[11366]],[[575,576],"valid"],[[577,577],"mapped",[578]],[[578,578],"valid"],[[579,579],"mapped",[384]],[[580,580],"mapped",[649]],[[581,581],"mapped",[652]],[[582,582],"mapped",[583]],[[583,583],"valid"],[[584,584],"mapped",[585]],[[585,585],"valid"],[[586,586],"mapped",[587]],[[587,587],"valid"],[[588,588],"mapped",[589]],[[589,589],"valid"],[[590,590],"mapped",[591]],[[591,591],"valid"],[[592,680],"valid"],[[681,685],"valid"],[[686,687],"valid"],[[688,688],"mapped",[104]],[[689,689],"mapped",[614]],[[690,690],"mapped",[106]],[[691,691],"mapped",[114]],[[692,692],"mapped",[633]],[[693,693],"mapped",[635]],[[694,694],"mapped",[641]],[[695,695],"mapped",[119]],[[696,696],"mapped",[121]],[[697,705],"valid"],[[706,709],"valid",[],"NV8"],[[710,721],"valid"],[[722,727],"valid",[],"NV8"],[[728,728],"disallowed_STD3_mapped",[32,774]],[[729,729],"disallowed_STD3_mapped",[32,775]],[[730,730],"disallowed_STD3_mapped",[32,778]],[[731,731],"disallowed_STD3_mapped",[32,808]],[[732,732],"disallowed_STD3_mapped",[32,771]],[[733,733],"disallowed_STD3_mapped",[32,779]],[[734,734],"valid",[],"NV8"],[[735,735],"valid",[],"NV8"],[[736,736],"mapped",[611]],[[737,737],"mapped",[108]],[[738,738],"mapped",[115]],[[739,739],"mapped",[120]],[[740,740],"mapped",[661]],[[741,745],"valid",[],"NV8"],[[746,747],"valid",[],"NV8"],[[748,748],"valid"],[[749,749],"valid",[],"NV8"],[[750,750],"valid"],[[751,767],"valid",[],"NV8"],[[768,831],"valid"],[[832,832],"mapped",[768]],[[833,833],"mapped",[769]],[[834,834],"valid"],[[835,835],"mapped",[787]],[[836,836],"mapped",[776,769]],[[837,837],"mapped",[953]],[[838,846],"valid"],[[847,847],"ignored"],[[848,855],"valid"],[[856,860],"valid"],[[861,863],"valid"],[[864,865],"valid"],[[866,866],"valid"],[[867,879],"valid"],[[880,880],"mapped",[881]],[[881,881],"valid"],[[882,882],"mapped",[883]],[[883,883],"valid"],[[884,884],"mapped",[697]],[[885,885],"valid"],[[886,886],"mapped",[887]],[[887,887],"valid"],[[888,889],"disallowed"],[[890,890],"disallowed_STD3_mapped",[32,953]],[[891,893],"valid"],[[894,894],"disallowed_STD3_mapped",[59]],[[895,895],"mapped",[1011]],[[896,899],"disallowed"],[[900,900],"disallowed_STD3_mapped",[32,769]],[[901,901],"disallowed_STD3_mapped",[32,776,769]],[[902,902],"mapped",[940]],[[903,903],"mapped",[183]],[[904,904],"mapped",[941]],[[905,905],"mapped",[942]],[[906,906],"mapped",[943]],[[907,907],"disallowed"],[[908,908],"mapped",[972]],[[909,909],"disallowed"],[[910,910],"mapped",[973]],[[911,911],"mapped",[974]],[[912,912],"valid"],[[913,913],"mapped",[945]],[[914,914],"mapped",[946]],[[915,915],"mapped",[947]],[[916,916],"mapped",[948]],[[917,917],"mapped",[949]],[[918,918],"mapped",[950]],[[919,919],"mapped",[951]],[[920,920],"mapped",[952]],[[921,921],"mapped",[953]],[[922,922],"mapped",[954]],[[923,923],"mapped",[955]],[[924,924],"mapped",[956]],[[925,925],"mapped",[957]],[[926,926],"mapped",[958]],[[927,927],"mapped",[959]],[[928,928],"mapped",[960]],[[929,929],"mapped",[961]],[[930,930],"disallowed"],[[931,931],"mapped",[963]],[[932,932],"mapped",[964]],[[933,933],"mapped",[965]],[[934,934],"mapped",[966]],[[935,935],"mapped",[967]],[[936,936],"mapped",[968]],[[937,937],"mapped",[969]],[[938,938],"mapped",[970]],[[939,939],"mapped",[971]],[[940,961],"valid"],[[962,962],"deviation",[963]],[[963,974],"valid"],[[975,975],"mapped",[983]],[[976,976],"mapped",[946]],[[977,977],"mapped",[952]],[[978,978],"mapped",[965]],[[979,979],"mapped",[973]],[[980,980],"mapped",[971]],[[981,981],"mapped",[966]],[[982,982],"mapped",[960]],[[983,983],"valid"],[[984,984],"mapped",[985]],[[985,985],"valid"],[[986,986],"mapped",[987]],[[987,987],"valid"],[[988,988],"mapped",[989]],[[989,989],"valid"],[[990,990],"mapped",[991]],[[991,991],"valid"],[[992,992],"mapped",[993]],[[993,993],"valid"],[[994,994],"mapped",[995]],[[995,995],"valid"],[[996,996],"mapped",[997]],[[997,997],"valid"],[[998,998],"mapped",[999]],[[999,999],"valid"],[[1000,1000],"mapped",[1001]],[[1001,1001],"valid"],[[1002,1002],"mapped",[1003]],[[1003,1003],"valid"],[[1004,1004],"mapped",[1005]],[[1005,1005],"valid"],[[1006,1006],"mapped",[1007]],[[1007,1007],"valid"],[[1008,1008],"mapped",[954]],[[1009,1009],"mapped",[961]],[[1010,1010],"mapped",[963]],[[1011,1011],"valid"],[[1012,1012],"mapped",[952]],[[1013,1013],"mapped",[949]],[[1014,1014],"valid",[],"NV8"],[[1015,1015],"mapped",[1016]],[[1016,1016],"valid"],[[1017,1017],"mapped",[963]],[[1018,1018],"mapped",[1019]],[[1019,1019],"valid"],[[1020,1020],"valid"],[[1021,1021],"mapped",[891]],[[1022,1022],"mapped",[892]],[[1023,1023],"mapped",[893]],[[1024,1024],"mapped",[1104]],[[1025,1025],"mapped",[1105]],[[1026,1026],"mapped",[1106]],[[1027,1027],"mapped",[1107]],[[1028,1028],"mapped",[1108]],[[1029,1029],"mapped",[1109]],[[1030,1030],"mapped",[1110]],[[1031,1031],"mapped",[1111]],[[1032,1032],"mapped",[1112]],[[1033,1033],"mapped",[1113]],[[1034,1034],"mapped",[1114]],[[1035,1035],"mapped",[1115]],[[1036,1036],"mapped",[1116]],[[1037,1037],"mapped",[1117]],[[1038,1038],"mapped",[1118]],[[1039,1039],"mapped",[1119]],[[1040,1040],"mapped",[1072]],[[1041,1041],"mapped",[1073]],[[1042,1042],"mapped",[1074]],[[1043,1043],"mapped",[1075]],[[1044,1044],"mapped",[1076]],[[1045,1045],"mapped",[1077]],[[1046,1046],"mapped",[1078]],[[1047,1047],"mapped",[1079]],[[1048,1048],"mapped",[1080]],[[1049,1049],"mapped",[1081]],[[1050,1050],"mapped",[1082]],[[1051,1051],"mapped",[1083]],[[1052,1052],"mapped",[1084]],[[1053,1053],"mapped",[1085]],[[1054,1054],"mapped",[1086]],[[1055,1055],"mapped",[1087]],[[1056,1056],"mapped",[1088]],[[1057,1057],"mapped",[1089]],[[1058,1058],"mapped",[1090]],[[1059,1059],"mapped",[1091]],[[1060,1060],"mapped",[1092]],[[1061,1061],"mapped",[1093]],[[1062,1062],"mapped",[1094]],[[1063,1063],"mapped",[1095]],[[1064,1064],"mapped",[1096]],[[1065,1065],"mapped",[1097]],[[1066,1066],"mapped",[1098]],[[1067,1067],"mapped",[1099]],[[1068,1068],"mapped",[1100]],[[1069,1069],"mapped",[1101]],[[1070,1070],"mapped",[1102]],[[1071,1071],"mapped",[1103]],[[1072,1103],"valid"],[[1104,1104],"valid"],[[1105,1116],"valid"],[[1117,1117],"valid"],[[1118,1119],"valid"],[[1120,1120],"mapped",[1121]],[[1121,1121],"valid"],[[1122,1122],"mapped",[1123]],[[1123,1123],"valid"],[[1124,1124],"mapped",[1125]],[[1125,1125],"valid"],[[1126,1126],"mapped",[1127]],[[1127,1127],"valid"],[[1128,1128],"mapped",[1129]],[[1129,1129],"valid"],[[1130,1130],"mapped",[1131]],[[1131,1131],"valid"],[[1132,1132],"mapped",[1133]],[[1133,1133],"valid"],[[1134,1134],"mapped",[1135]],[[1135,1135],"valid"],[[1136,1136],"mapped",[1137]],[[1137,1137],"valid"],[[1138,1138],"mapped",[1139]],[[1139,1139],"valid"],[[1140,1140],"mapped",[1141]],[[1141,1141],"valid"],[[1142,1142],"mapped",[1143]],[[1143,1143],"valid"],[[1144,1144],"mapped",[1145]],[[1145,1145],"valid"],[[1146,1146],"mapped",[1147]],[[1147,1147],"valid"],[[1148,1148],"mapped",[1149]],[[1149,1149],"valid"],[[1150,1150],"mapped",[1151]],[[1151,1151],"valid"],[[1152,1152],"mapped",[1153]],[[1153,1153],"valid"],[[1154,1154],"valid",[],"NV8"],[[1155,1158],"valid"],[[1159,1159],"valid"],[[1160,1161],"valid",[],"NV8"],[[1162,1162],"mapped",[1163]],[[1163,1163],"valid"],[[1164,1164],"mapped",[1165]],[[1165,1165],"valid"],[[1166,1166],"mapped",[1167]],[[1167,1167],"valid"],[[1168,1168],"mapped",[1169]],[[1169,1169],"valid"],[[1170,1170],"mapped",[1171]],[[1171,1171],"valid"],[[1172,1172],"mapped",[1173]],[[1173,1173],"valid"],[[1174,1174],"mapped",[1175]],[[1175,1175],"valid"],[[1176,1176],"mapped",[1177]],[[1177,1177],"valid"],[[1178,1178],"mapped",[1179]],[[1179,1179],"valid"],[[1180,1180],"mapped",[1181]],[[1181,1181],"valid"],[[1182,1182],"mapped",[1183]],[[1183,1183],"valid"],[[1184,1184],"mapped",[1185]],[[1185,1185],"valid"],[[1186,1186],"mapped",[1187]],[[1187,1187],"valid"],[[1188,1188],"mapped",[1189]],[[1189,1189],"valid"],[[1190,1190],"mapped",[1191]],[[1191,1191],"valid"],[[1192,1192],"mapped",[1193]],[[1193,1193],"valid"],[[1194,1194],"mapped",[1195]],[[1195,1195],"valid"],[[1196,1196],"mapped",[1197]],[[1197,1197],"valid"],[[1198,1198],"mapped",[1199]],[[1199,1199],"valid"],[[1200,1200],"mapped",[1201]],[[1201,1201],"valid"],[[1202,1202],"mapped",[1203]],[[1203,1203],"valid"],[[1204,1204],"mapped",[1205]],[[1205,1205],"valid"],[[1206,1206],"mapped",[1207]],[[1207,1207],"valid"],[[1208,1208],"mapped",[1209]],[[1209,1209],"valid"],[[1210,1210],"mapped",[1211]],[[1211,1211],"valid"],[[1212,1212],"mapped",[1213]],[[1213,1213],"valid"],[[1214,1214],"mapped",[1215]],[[1215,1215],"valid"],[[1216,1216],"disallowed"],[[1217,1217],"mapped",[1218]],[[1218,1218],"valid"],[[1219,1219],"mapped",[1220]],[[1220,1220],"valid"],[[1221,1221],"mapped",[1222]],[[1222,1222],"valid"],[[1223,1223],"mapped",[1224]],[[1224,1224],"valid"],[[1225,1225],"mapped",[1226]],[[1226,1226],"valid"],[[1227,1227],"mapped",[1228]],[[1228,1228],"valid"],[[1229,1229],"mapped",[1230]],[[1230,1230],"valid"],[[1231,1231],"valid"],[[1232,1232],"mapped",[1233]],[[1233,1233],"valid"],[[1234,1234],"mapped",[1235]],[[1235,1235],"valid"],[[1236,1236],"mapped",[1237]],[[1237,1237],"valid"],[[1238,1238],"mapped",[1239]],[[1239,1239],"valid"],[[1240,1240],"mapped",[1241]],[[1241,1241],"valid"],[[1242,1242],"mapped",[1243]],[[1243,1243],"valid"],[[1244,1244],"mapped",[1245]],[[1245,1245],"valid"],[[1246,1246],"mapped",[1247]],[[1247,1247],"valid"],[[1248,1248],"mapped",[1249]],[[1249,1249],"valid"],[[1250,1250],"mapped",[1251]],[[1251,1251],"valid"],[[1252,1252],"mapped",[1253]],[[1253,1253],"valid"],[[1254,1254],"mapped",[1255]],[[1255,1255],"valid"],[[1256,1256],"mapped",[1257]],[[1257,1257],"valid"],[[1258,1258],"mapped",[1259]],[[1259,1259],"valid"],[[1260,1260],"mapped",[1261]],[[1261,1261],"valid"],[[1262,1262],"mapped",[1263]],[[1263,1263],"valid"],[[1264,1264],"mapped",[1265]],[[1265,1265],"valid"],[[1266,1266],"mapped",[1267]],[[1267,1267],"valid"],[[1268,1268],"mapped",[1269]],[[1269,1269],"valid"],[[1270,1270],"mapped",[1271]],[[1271,1271],"valid"],[[1272,1272],"mapped",[1273]],[[1273,1273],"valid"],[[1274,1274],"mapped",[1275]],[[1275,1275],"valid"],[[1276,1276],"mapped",[1277]],[[1277,1277],"valid"],[[1278,1278],"mapped",[1279]],[[1279,1279],"valid"],[[1280,1280],"mapped",[1281]],[[1281,1281],"valid"],[[1282,1282],"mapped",[1283]],[[1283,1283],"valid"],[[1284,1284],"mapped",[1285]],[[1285,1285],"valid"],[[1286,1286],"mapped",[1287]],[[1287,1287],"valid"],[[1288,1288],"mapped",[1289]],[[1289,1289],"valid"],[[1290,1290],"mapped",[1291]],[[1291,1291],"valid"],[[1292,1292],"mapped",[1293]],[[1293,1293],"valid"],[[1294,1294],"mapped",[1295]],[[1295,1295],"valid"],[[1296,1296],"mapped",[1297]],[[1297,1297],"valid"],[[1298,1298],"mapped",[1299]],[[1299,1299],"valid"],[[1300,1300],"mapped",[1301]],[[1301,1301],"valid"],[[1302,1302],"mapped",[1303]],[[1303,1303],"valid"],[[1304,1304],"mapped",[1305]],[[1305,1305],"valid"],[[1306,1306],"mapped",[1307]],[[1307,1307],"valid"],[[1308,1308],"mapped",[1309]],[[1309,1309],"valid"],[[1310,1310],"mapped",[1311]],[[1311,1311],"valid"],[[1312,1312],"mapped",[1313]],[[1313,1313],"valid"],[[1314,1314],"mapped",[1315]],[[1315,1315],"valid"],[[1316,1316],"mapped",[1317]],[[1317,1317],"valid"],[[1318,1318],"mapped",[1319]],[[1319,1319],"valid"],[[1320,1320],"mapped",[1321]],[[1321,1321],"valid"],[[1322,1322],"mapped",[1323]],[[1323,1323],"valid"],[[1324,1324],"mapped",[1325]],[[1325,1325],"valid"],[[1326,1326],"mapped",[1327]],[[1327,1327],"valid"],[[1328,1328],"disallowed"],[[1329,1329],"mapped",[1377]],[[1330,1330],"mapped",[1378]],[[1331,1331],"mapped",[1379]],[[1332,1332],"mapped",[1380]],[[1333,1333],"mapped",[1381]],[[1334,1334],"mapped",[1382]],[[1335,1335],"mapped",[1383]],[[1336,1336],"mapped",[1384]],[[1337,1337],"mapped",[1385]],[[1338,1338],"mapped",[1386]],[[1339,1339],"mapped",[1387]],[[1340,1340],"mapped",[1388]],[[1341,1341],"mapped",[1389]],[[1342,1342],"mapped",[1390]],[[1343,1343],"mapped",[1391]],[[1344,1344],"mapped",[1392]],[[1345,1345],"mapped",[1393]],[[1346,1346],"mapped",[1394]],[[1347,1347],"mapped",[1395]],[[1348,1348],"mapped",[1396]],[[1349,1349],"mapped",[1397]],[[1350,1350],"mapped",[1398]],[[1351,1351],"mapped",[1399]],[[1352,1352],"mapped",[1400]],[[1353,1353],"mapped",[1401]],[[1354,1354],"mapped",[1402]],[[1355,1355],"mapped",[1403]],[[1356,1356],"mapped",[1404]],[[1357,1357],"mapped",[1405]],[[1358,1358],"mapped",[1406]],[[1359,1359],"mapped",[1407]],[[1360,1360],"mapped",[1408]],[[1361,1361],"mapped",[1409]],[[1362,1362],"mapped",[1410]],[[1363,1363],"mapped",[1411]],[[1364,1364],"mapped",[1412]],[[1365,1365],"mapped",[1413]],[[1366,1366],"mapped",[1414]],[[1367,1368],"disallowed"],[[1369,1369],"valid"],[[1370,1375],"valid",[],"NV8"],[[1376,1376],"disallowed"],[[1377,1414],"valid"],[[1415,1415],"mapped",[1381,1410]],[[1416,1416],"disallowed"],[[1417,1417],"valid",[],"NV8"],[[1418,1418],"valid",[],"NV8"],[[1419,1420],"disallowed"],[[1421,1422],"valid",[],"NV8"],[[1423,1423],"valid",[],"NV8"],[[1424,1424],"disallowed"],[[1425,1441],"valid"],[[1442,1442],"valid"],[[1443,1455],"valid"],[[1456,1465],"valid"],[[1466,1466],"valid"],[[1467,1469],"valid"],[[1470,1470],"valid",[],"NV8"],[[1471,1471],"valid"],[[1472,1472],"valid",[],"NV8"],[[1473,1474],"valid"],[[1475,1475],"valid",[],"NV8"],[[1476,1476],"valid"],[[1477,1477],"valid"],[[1478,1478],"valid",[],"NV8"],[[1479,1479],"valid"],[[1480,1487],"disallowed"],[[1488,1514],"valid"],[[1515,1519],"disallowed"],[[1520,1524],"valid"],[[1525,1535],"disallowed"],[[1536,1539],"disallowed"],[[1540,1540],"disallowed"],[[1541,1541],"disallowed"],[[1542,1546],"valid",[],"NV8"],[[1547,1547],"valid",[],"NV8"],[[1548,1548],"valid",[],"NV8"],[[1549,1551],"valid",[],"NV8"],[[1552,1557],"valid"],[[1558,1562],"valid"],[[1563,1563],"valid",[],"NV8"],[[1564,1564],"disallowed"],[[1565,1565],"disallowed"],[[1566,1566],"valid",[],"NV8"],[[1567,1567],"valid",[],"NV8"],[[1568,1568],"valid"],[[1569,1594],"valid"],[[1595,1599],"valid"],[[1600,1600],"valid",[],"NV8"],[[1601,1618],"valid"],[[1619,1621],"valid"],[[1622,1624],"valid"],[[1625,1630],"valid"],[[1631,1631],"valid"],[[1632,1641],"valid"],[[1642,1645],"valid",[],"NV8"],[[1646,1647],"valid"],[[1648,1652],"valid"],[[1653,1653],"mapped",[1575,1652]],[[1654,1654],"mapped",[1608,1652]],[[1655,1655],"mapped",[1735,1652]],[[1656,1656],"mapped",[1610,1652]],[[1657,1719],"valid"],[[1720,1721],"valid"],[[1722,1726],"valid"],[[1727,1727],"valid"],[[1728,1742],"valid"],[[1743,1743],"valid"],[[1744,1747],"valid"],[[1748,1748],"valid",[],"NV8"],[[1749,1756],"valid"],[[1757,1757],"disallowed"],[[1758,1758],"valid",[],"NV8"],[[1759,1768],"valid"],[[1769,1769],"valid",[],"NV8"],[[1770,1773],"valid"],[[1774,1775],"valid"],[[1776,1785],"valid"],[[1786,1790],"valid"],[[1791,1791],"valid"],[[1792,1805],"valid",[],"NV8"],[[1806,1806],"disallowed"],[[1807,1807],"disallowed"],[[1808,1836],"valid"],[[1837,1839],"valid"],[[1840,1866],"valid"],[[1867,1868],"disallowed"],[[1869,1871],"valid"],[[1872,1901],"valid"],[[1902,1919],"valid"],[[1920,1968],"valid"],[[1969,1969],"valid"],[[1970,1983],"disallowed"],[[1984,2037],"valid"],[[2038,2042],"valid",[],"NV8"],[[2043,2047],"disallowed"],[[2048,2093],"valid"],[[2094,2095],"disallowed"],[[2096,2110],"valid",[],"NV8"],[[2111,2111],"disallowed"],[[2112,2139],"valid"],[[2140,2141],"disallowed"],[[2142,2142],"valid",[],"NV8"],[[2143,2207],"disallowed"],[[2208,2208],"valid"],[[2209,2209],"valid"],[[2210,2220],"valid"],[[2221,2226],"valid"],[[2227,2228],"valid"],[[2229,2274],"disallowed"],[[2275,2275],"valid"],[[2276,2302],"valid"],[[2303,2303],"valid"],[[2304,2304],"valid"],[[2305,2307],"valid"],[[2308,2308],"valid"],[[2309,2361],"valid"],[[2362,2363],"valid"],[[2364,2381],"valid"],[[2382,2382],"valid"],[[2383,2383],"valid"],[[2384,2388],"valid"],[[2389,2389],"valid"],[[2390,2391],"valid"],[[2392,2392],"mapped",[2325,2364]],[[2393,2393],"mapped",[2326,2364]],[[2394,2394],"mapped",[2327,2364]],[[2395,2395],"mapped",[2332,2364]],[[2396,2396],"mapped",[2337,2364]],[[2397,2397],"mapped",[2338,2364]],[[2398,2398],"mapped",[2347,2364]],[[2399,2399],"mapped",[2351,2364]],[[2400,2403],"valid"],[[2404,2405],"valid",[],"NV8"],[[2406,2415],"valid"],[[2416,2416],"valid",[],"NV8"],[[2417,2418],"valid"],[[2419,2423],"valid"],[[2424,2424],"valid"],[[2425,2426],"valid"],[[2427,2428],"valid"],[[2429,2429],"valid"],[[2430,2431],"valid"],[[2432,2432],"valid"],[[2433,2435],"valid"],[[2436,2436],"disallowed"],[[2437,2444],"valid"],[[2445,2446],"disallowed"],[[2447,2448],"valid"],[[2449,2450],"disallowed"],[[2451,2472],"valid"],[[2473,2473],"disallowed"],[[2474,2480],"valid"],[[2481,2481],"disallowed"],[[2482,2482],"valid"],[[2483,2485],"disallowed"],[[2486,2489],"valid"],[[2490,2491],"disallowed"],[[2492,2492],"valid"],[[2493,2493],"valid"],[[2494,2500],"valid"],[[2501,2502],"disallowed"],[[2503,2504],"valid"],[[2505,2506],"disallowed"],[[2507,2509],"valid"],[[2510,2510],"valid"],[[2511,2518],"disallowed"],[[2519,2519],"valid"],[[2520,2523],"disallowed"],[[2524,2524],"mapped",[2465,2492]],[[2525,2525],"mapped",[2466,2492]],[[2526,2526],"disallowed"],[[2527,2527],"mapped",[2479,2492]],[[2528,2531],"valid"],[[2532,2533],"disallowed"],[[2534,2545],"valid"],[[2546,2554],"valid",[],"NV8"],[[2555,2555],"valid",[],"NV8"],[[2556,2560],"disallowed"],[[2561,2561],"valid"],[[2562,2562],"valid"],[[2563,2563],"valid"],[[2564,2564],"disallowed"],[[2565,2570],"valid"],[[2571,2574],"disallowed"],[[2575,2576],"valid"],[[2577,2578],"disallowed"],[[2579,2600],"valid"],[[2601,2601],"disallowed"],[[2602,2608],"valid"],[[2609,2609],"disallowed"],[[2610,2610],"valid"],[[2611,2611],"mapped",[2610,2620]],[[2612,2612],"disallowed"],[[2613,2613],"valid"],[[2614,2614],"mapped",[2616,2620]],[[2615,2615],"disallowed"],[[2616,2617],"valid"],[[2618,2619],"disallowed"],[[2620,2620],"valid"],[[2621,2621],"disallowed"],[[2622,2626],"valid"],[[2627,2630],"disallowed"],[[2631,2632],"valid"],[[2633,2634],"disallowed"],[[2635,2637],"valid"],[[2638,2640],"disallowed"],[[2641,2641],"valid"],[[2642,2648],"disallowed"],[[2649,2649],"mapped",[2582,2620]],[[2650,2650],"mapped",[2583,2620]],[[2651,2651],"mapped",[2588,2620]],[[2652,2652],"valid"],[[2653,2653],"disallowed"],[[2654,2654],"mapped",[2603,2620]],[[2655,2661],"disallowed"],[[2662,2676],"valid"],[[2677,2677],"valid"],[[2678,2688],"disallowed"],[[2689,2691],"valid"],[[2692,2692],"disallowed"],[[2693,2699],"valid"],[[2700,2700],"valid"],[[2701,2701],"valid"],[[2702,2702],"disallowed"],[[2703,2705],"valid"],[[2706,2706],"disallowed"],[[2707,2728],"valid"],[[2729,2729],"disallowed"],[[2730,2736],"valid"],[[2737,2737],"disallowed"],[[2738,2739],"valid"],[[2740,2740],"disallowed"],[[2741,2745],"valid"],[[2746,2747],"disallowed"],[[2748,2757],"valid"],[[2758,2758],"disallowed"],[[2759,2761],"valid"],[[2762,2762],"disallowed"],[[2763,2765],"valid"],[[2766,2767],"disallowed"],[[2768,2768],"valid"],[[2769,2783],"disallowed"],[[2784,2784],"valid"],[[2785,2787],"valid"],[[2788,2789],"disallowed"],[[2790,2799],"valid"],[[2800,2800],"valid",[],"NV8"],[[2801,2801],"valid",[],"NV8"],[[2802,2808],"disallowed"],[[2809,2809],"valid"],[[2810,2816],"disallowed"],[[2817,2819],"valid"],[[2820,2820],"disallowed"],[[2821,2828],"valid"],[[2829,2830],"disallowed"],[[2831,2832],"valid"],[[2833,2834],"disallowed"],[[2835,2856],"valid"],[[2857,2857],"disallowed"],[[2858,2864],"valid"],[[2865,2865],"disallowed"],[[2866,2867],"valid"],[[2868,2868],"disallowed"],[[2869,2869],"valid"],[[2870,2873],"valid"],[[2874,2875],"disallowed"],[[2876,2883],"valid"],[[2884,2884],"valid"],[[2885,2886],"disallowed"],[[2887,2888],"valid"],[[2889,2890],"disallowed"],[[2891,2893],"valid"],[[2894,2901],"disallowed"],[[2902,2903],"valid"],[[2904,2907],"disallowed"],[[2908,2908],"mapped",[2849,2876]],[[2909,2909],"mapped",[2850,2876]],[[2910,2910],"disallowed"],[[2911,2913],"valid"],[[2914,2915],"valid"],[[2916,2917],"disallowed"],[[2918,2927],"valid"],[[2928,2928],"valid",[],"NV8"],[[2929,2929],"valid"],[[2930,2935],"valid",[],"NV8"],[[2936,2945],"disallowed"],[[2946,2947],"valid"],[[2948,2948],"disallowed"],[[2949,2954],"valid"],[[2955,2957],"disallowed"],[[2958,2960],"valid"],[[2961,2961],"disallowed"],[[2962,2965],"valid"],[[2966,2968],"disallowed"],[[2969,2970],"valid"],[[2971,2971],"disallowed"],[[2972,2972],"valid"],[[2973,2973],"disallowed"],[[2974,2975],"valid"],[[2976,2978],"disallowed"],[[2979,2980],"valid"],[[2981,2983],"disallowed"],[[2984,2986],"valid"],[[2987,2989],"disallowed"],[[2990,2997],"valid"],[[2998,2998],"valid"],[[2999,3001],"valid"],[[3002,3005],"disallowed"],[[3006,3010],"valid"],[[3011,3013],"disallowed"],[[3014,3016],"valid"],[[3017,3017],"disallowed"],[[3018,3021],"valid"],[[3022,3023],"disallowed"],[[3024,3024],"valid"],[[3025,3030],"disallowed"],[[3031,3031],"valid"],[[3032,3045],"disallowed"],[[3046,3046],"valid"],[[3047,3055],"valid"],[[3056,3058],"valid",[],"NV8"],[[3059,3066],"valid",[],"NV8"],[[3067,3071],"disallowed"],[[3072,3072],"valid"],[[3073,3075],"valid"],[[3076,3076],"disallowed"],[[3077,3084],"valid"],[[3085,3085],"disallowed"],[[3086,3088],"valid"],[[3089,3089],"disallowed"],[[3090,3112],"valid"],[[3113,3113],"disallowed"],[[3114,3123],"valid"],[[3124,3124],"valid"],[[3125,3129],"valid"],[[3130,3132],"disallowed"],[[3133,3133],"valid"],[[3134,3140],"valid"],[[3141,3141],"disallowed"],[[3142,3144],"valid"],[[3145,3145],"disallowed"],[[3146,3149],"valid"],[[3150,3156],"disallowed"],[[3157,3158],"valid"],[[3159,3159],"disallowed"],[[3160,3161],"valid"],[[3162,3162],"valid"],[[3163,3167],"disallowed"],[[3168,3169],"valid"],[[3170,3171],"valid"],[[3172,3173],"disallowed"],[[3174,3183],"valid"],[[3184,3191],"disallowed"],[[3192,3199],"valid",[],"NV8"],[[3200,3200],"disallowed"],[[3201,3201],"valid"],[[3202,3203],"valid"],[[3204,3204],"disallowed"],[[3205,3212],"valid"],[[3213,3213],"disallowed"],[[3214,3216],"valid"],[[3217,3217],"disallowed"],[[3218,3240],"valid"],[[3241,3241],"disallowed"],[[3242,3251],"valid"],[[3252,3252],"disallowed"],[[3253,3257],"valid"],[[3258,3259],"disallowed"],[[3260,3261],"valid"],[[3262,3268],"valid"],[[3269,3269],"disallowed"],[[3270,3272],"valid"],[[3273,3273],"disallowed"],[[3274,3277],"valid"],[[3278,3284],"disallowed"],[[3285,3286],"valid"],[[3287,3293],"disallowed"],[[3294,3294],"valid"],[[3295,3295],"disallowed"],[[3296,3297],"valid"],[[3298,3299],"valid"],[[3300,3301],"disallowed"],[[3302,3311],"valid"],[[3312,3312],"disallowed"],[[3313,3314],"valid"],[[3315,3328],"disallowed"],[[3329,3329],"valid"],[[3330,3331],"valid"],[[3332,3332],"disallowed"],[[3333,3340],"valid"],[[3341,3341],"disallowed"],[[3342,3344],"valid"],[[3345,3345],"disallowed"],[[3346,3368],"valid"],[[3369,3369],"valid"],[[3370,3385],"valid"],[[3386,3386],"valid"],[[3387,3388],"disallowed"],[[3389,3389],"valid"],[[3390,3395],"valid"],[[3396,3396],"valid"],[[3397,3397],"disallowed"],[[3398,3400],"valid"],[[3401,3401],"disallowed"],[[3402,3405],"valid"],[[3406,3406],"valid"],[[3407,3414],"disallowed"],[[3415,3415],"valid"],[[3416,3422],"disallowed"],[[3423,3423],"valid"],[[3424,3425],"valid"],[[3426,3427],"valid"],[[3428,3429],"disallowed"],[[3430,3439],"valid"],[[3440,3445],"valid",[],"NV8"],[[3446,3448],"disallowed"],[[3449,3449],"valid",[],"NV8"],[[3450,3455],"valid"],[[3456,3457],"disallowed"],[[3458,3459],"valid"],[[3460,3460],"disallowed"],[[3461,3478],"valid"],[[3479,3481],"disallowed"],[[3482,3505],"valid"],[[3506,3506],"disallowed"],[[3507,3515],"valid"],[[3516,3516],"disallowed"],[[3517,3517],"valid"],[[3518,3519],"disallowed"],[[3520,3526],"valid"],[[3527,3529],"disallowed"],[[3530,3530],"valid"],[[3531,3534],"disallowed"],[[3535,3540],"valid"],[[3541,3541],"disallowed"],[[3542,3542],"valid"],[[3543,3543],"disallowed"],[[3544,3551],"valid"],[[3552,3557],"disallowed"],[[3558,3567],"valid"],[[3568,3569],"disallowed"],[[3570,3571],"valid"],[[3572,3572],"valid",[],"NV8"],[[3573,3584],"disallowed"],[[3585,3634],"valid"],[[3635,3635],"mapped",[3661,3634]],[[3636,3642],"valid"],[[3643,3646],"disallowed"],[[3647,3647],"valid",[],"NV8"],[[3648,3662],"valid"],[[3663,3663],"valid",[],"NV8"],[[3664,3673],"valid"],[[3674,3675],"valid",[],"NV8"],[[3676,3712],"disallowed"],[[3713,3714],"valid"],[[3715,3715],"disallowed"],[[3716,3716],"valid"],[[3717,3718],"disallowed"],[[3719,3720],"valid"],[[3721,3721],"disallowed"],[[3722,3722],"valid"],[[3723,3724],"disallowed"],[[3725,3725],"valid"],[[3726,3731],"disallowed"],[[3732,3735],"valid"],[[3736,3736],"disallowed"],[[3737,3743],"valid"],[[3744,3744],"disallowed"],[[3745,3747],"valid"],[[3748,3748],"disallowed"],[[3749,3749],"valid"],[[3750,3750],"disallowed"],[[3751,3751],"valid"],[[3752,3753],"disallowed"],[[3754,3755],"valid"],[[3756,3756],"disallowed"],[[3757,3762],"valid"],[[3763,3763],"mapped",[3789,3762]],[[3764,3769],"valid"],[[3770,3770],"disallowed"],[[3771,3773],"valid"],[[3774,3775],"disallowed"],[[3776,3780],"valid"],[[3781,3781],"disallowed"],[[3782,3782],"valid"],[[3783,3783],"disallowed"],[[3784,3789],"valid"],[[3790,3791],"disallowed"],[[3792,3801],"valid"],[[3802,3803],"disallowed"],[[3804,3804],"mapped",[3755,3737]],[[3805,3805],"mapped",[3755,3745]],[[3806,3807],"valid"],[[3808,3839],"disallowed"],[[3840,3840],"valid"],[[3841,3850],"valid",[],"NV8"],[[3851,3851],"valid"],[[3852,3852],"mapped",[3851]],[[3853,3863],"valid",[],"NV8"],[[3864,3865],"valid"],[[3866,3871],"valid",[],"NV8"],[[3872,3881],"valid"],[[3882,3892],"valid",[],"NV8"],[[3893,3893],"valid"],[[3894,3894],"valid",[],"NV8"],[[3895,3895],"valid"],[[3896,3896],"valid",[],"NV8"],[[3897,3897],"valid"],[[3898,3901],"valid",[],"NV8"],[[3902,3906],"valid"],[[3907,3907],"mapped",[3906,4023]],[[3908,3911],"valid"],[[3912,3912],"disallowed"],[[3913,3916],"valid"],[[3917,3917],"mapped",[3916,4023]],[[3918,3921],"valid"],[[3922,3922],"mapped",[3921,4023]],[[3923,3926],"valid"],[[3927,3927],"mapped",[3926,4023]],[[3928,3931],"valid"],[[3932,3932],"mapped",[3931,4023]],[[3933,3944],"valid"],[[3945,3945],"mapped",[3904,4021]],[[3946,3946],"valid"],[[3947,3948],"valid"],[[3949,3952],"disallowed"],[[3953,3954],"valid"],[[3955,3955],"mapped",[3953,3954]],[[3956,3956],"valid"],[[3957,3957],"mapped",[3953,3956]],[[3958,3958],"mapped",[4018,3968]],[[3959,3959],"mapped",[4018,3953,3968]],[[3960,3960],"mapped",[4019,3968]],[[3961,3961],"mapped",[4019,3953,3968]],[[3962,3968],"valid"],[[3969,3969],"mapped",[3953,3968]],[[3970,3972],"valid"],[[3973,3973],"valid",[],"NV8"],[[3974,3979],"valid"],[[3980,3983],"valid"],[[3984,3986],"valid"],[[3987,3987],"mapped",[3986,4023]],[[3988,3989],"valid"],[[3990,3990],"valid"],[[3991,3991],"valid"],[[3992,3992],"disallowed"],[[3993,3996],"valid"],[[3997,3997],"mapped",[3996,4023]],[[3998,4001],"valid"],[[4002,4002],"mapped",[4001,4023]],[[4003,4006],"valid"],[[4007,4007],"mapped",[4006,4023]],[[4008,4011],"valid"],[[4012,4012],"mapped",[4011,4023]],[[4013,4013],"valid"],[[4014,4016],"valid"],[[4017,4023],"valid"],[[4024,4024],"valid"],[[4025,4025],"mapped",[3984,4021]],[[4026,4028],"valid"],[[4029,4029],"disallowed"],[[4030,4037],"valid",[],"NV8"],[[4038,4038],"valid"],[[4039,4044],"valid",[],"NV8"],[[4045,4045],"disallowed"],[[4046,4046],"valid",[],"NV8"],[[4047,4047],"valid",[],"NV8"],[[4048,4049],"valid",[],"NV8"],[[4050,4052],"valid",[],"NV8"],[[4053,4056],"valid",[],"NV8"],[[4057,4058],"valid",[],"NV8"],[[4059,4095],"disallowed"],[[4096,4129],"valid"],[[4130,4130],"valid"],[[4131,4135],"valid"],[[4136,4136],"valid"],[[4137,4138],"valid"],[[4139,4139],"valid"],[[4140,4146],"valid"],[[4147,4149],"valid"],[[4150,4153],"valid"],[[4154,4159],"valid"],[[4160,4169],"valid"],[[4170,4175],"valid",[],"NV8"],[[4176,4185],"valid"],[[4186,4249],"valid"],[[4250,4253],"valid"],[[4254,4255],"valid",[],"NV8"],[[4256,4293],"disallowed"],[[4294,4294],"disallowed"],[[4295,4295],"mapped",[11559]],[[4296,4300],"disallowed"],[[4301,4301],"mapped",[11565]],[[4302,4303],"disallowed"],[[4304,4342],"valid"],[[4343,4344],"valid"],[[4345,4346],"valid"],[[4347,4347],"valid",[],"NV8"],[[4348,4348],"mapped",[4316]],[[4349,4351],"valid"],[[4352,4441],"valid",[],"NV8"],[[4442,4446],"valid",[],"NV8"],[[4447,4448],"disallowed"],[[4449,4514],"valid",[],"NV8"],[[4515,4519],"valid",[],"NV8"],[[4520,4601],"valid",[],"NV8"],[[4602,4607],"valid",[],"NV8"],[[4608,4614],"valid"],[[4615,4615],"valid"],[[4616,4678],"valid"],[[4679,4679],"valid"],[[4680,4680],"valid"],[[4681,4681],"disallowed"],[[4682,4685],"valid"],[[4686,4687],"disallowed"],[[4688,4694],"valid"],[[4695,4695],"disallowed"],[[4696,4696],"valid"],[[4697,4697],"disallowed"],[[4698,4701],"valid"],[[4702,4703],"disallowed"],[[4704,4742],"valid"],[[4743,4743],"valid"],[[4744,4744],"valid"],[[4745,4745],"disallowed"],[[4746,4749],"valid"],[[4750,4751],"disallowed"],[[4752,4782],"valid"],[[4783,4783],"valid"],[[4784,4784],"valid"],[[4785,4785],"disallowed"],[[4786,4789],"valid"],[[4790,4791],"disallowed"],[[4792,4798],"valid"],[[4799,4799],"disallowed"],[[4800,4800],"valid"],[[4801,4801],"disallowed"],[[4802,4805],"valid"],[[4806,4807],"disallowed"],[[4808,4814],"valid"],[[4815,4815],"valid"],[[4816,4822],"valid"],[[4823,4823],"disallowed"],[[4824,4846],"valid"],[[4847,4847],"valid"],[[4848,4878],"valid"],[[4879,4879],"valid"],[[4880,4880],"valid"],[[4881,4881],"disallowed"],[[4882,4885],"valid"],[[4886,4887],"disallowed"],[[4888,4894],"valid"],[[4895,4895],"valid"],[[4896,4934],"valid"],[[4935,4935],"valid"],[[4936,4954],"valid"],[[4955,4956],"disallowed"],[[4957,4958],"valid"],[[4959,4959],"valid"],[[4960,4960],"valid",[],"NV8"],[[4961,4988],"valid",[],"NV8"],[[4989,4991],"disallowed"],[[4992,5007],"valid"],[[5008,5017],"valid",[],"NV8"],[[5018,5023],"disallowed"],[[5024,5108],"valid"],[[5109,5109],"valid"],[[5110,5111],"disallowed"],[[5112,5112],"mapped",[5104]],[[5113,5113],"mapped",[5105]],[[5114,5114],"mapped",[5106]],[[5115,5115],"mapped",[5107]],[[5116,5116],"mapped",[5108]],[[5117,5117],"mapped",[5109]],[[5118,5119],"disallowed"],[[5120,5120],"valid",[],"NV8"],[[5121,5740],"valid"],[[5741,5742],"valid",[],"NV8"],[[5743,5750],"valid"],[[5751,5759],"valid"],[[5760,5760],"disallowed"],[[5761,5786],"valid"],[[5787,5788],"valid",[],"NV8"],[[5789,5791],"disallowed"],[[5792,5866],"valid"],[[5867,5872],"valid",[],"NV8"],[[5873,5880],"valid"],[[5881,5887],"disallowed"],[[5888,5900],"valid"],[[5901,5901],"disallowed"],[[5902,5908],"valid"],[[5909,5919],"disallowed"],[[5920,5940],"valid"],[[5941,5942],"valid",[],"NV8"],[[5943,5951],"disallowed"],[[5952,5971],"valid"],[[5972,5983],"disallowed"],[[5984,5996],"valid"],[[5997,5997],"disallowed"],[[5998,6000],"valid"],[[6001,6001],"disallowed"],[[6002,6003],"valid"],[[6004,6015],"disallowed"],[[6016,6067],"valid"],[[6068,6069],"disallowed"],[[6070,6099],"valid"],[[6100,6102],"valid",[],"NV8"],[[6103,6103],"valid"],[[6104,6107],"valid",[],"NV8"],[[6108,6108],"valid"],[[6109,6109],"valid"],[[6110,6111],"disallowed"],[[6112,6121],"valid"],[[6122,6127],"disallowed"],[[6128,6137],"valid",[],"NV8"],[[6138,6143],"disallowed"],[[6144,6149],"valid",[],"NV8"],[[6150,6150],"disallowed"],[[6151,6154],"valid",[],"NV8"],[[6155,6157],"ignored"],[[6158,6158],"disallowed"],[[6159,6159],"disallowed"],[[6160,6169],"valid"],[[6170,6175],"disallowed"],[[6176,6263],"valid"],[[6264,6271],"disallowed"],[[6272,6313],"valid"],[[6314,6314],"valid"],[[6315,6319],"disallowed"],[[6320,6389],"valid"],[[6390,6399],"disallowed"],[[6400,6428],"valid"],[[6429,6430],"valid"],[[6431,6431],"disallowed"],[[6432,6443],"valid"],[[6444,6447],"disallowed"],[[6448,6459],"valid"],[[6460,6463],"disallowed"],[[6464,6464],"valid",[],"NV8"],[[6465,6467],"disallowed"],[[6468,6469],"valid",[],"NV8"],[[6470,6509],"valid"],[[6510,6511],"disallowed"],[[6512,6516],"valid"],[[6517,6527],"disallowed"],[[6528,6569],"valid"],[[6570,6571],"valid"],[[6572,6575],"disallowed"],[[6576,6601],"valid"],[[6602,6607],"disallowed"],[[6608,6617],"valid"],[[6618,6618],"valid",[],"XV8"],[[6619,6621],"disallowed"],[[6622,6623],"valid",[],"NV8"],[[6624,6655],"valid",[],"NV8"],[[6656,6683],"valid"],[[6684,6685],"disallowed"],[[6686,6687],"valid",[],"NV8"],[[6688,6750],"valid"],[[6751,6751],"disallowed"],[[6752,6780],"valid"],[[6781,6782],"disallowed"],[[6783,6793],"valid"],[[6794,6799],"disallowed"],[[6800,6809],"valid"],[[6810,6815],"disallowed"],[[6816,6822],"valid",[],"NV8"],[[6823,6823],"valid"],[[6824,6829],"valid",[],"NV8"],[[6830,6831],"disallowed"],[[6832,6845],"valid"],[[6846,6846],"valid",[],"NV8"],[[6847,6911],"disallowed"],[[6912,6987],"valid"],[[6988,6991],"disallowed"],[[6992,7001],"valid"],[[7002,7018],"valid",[],"NV8"],[[7019,7027],"valid"],[[7028,7036],"valid",[],"NV8"],[[7037,7039],"disallowed"],[[7040,7082],"valid"],[[7083,7085],"valid"],[[7086,7097],"valid"],[[7098,7103],"valid"],[[7104,7155],"valid"],[[7156,7163],"disallowed"],[[7164,7167],"valid",[],"NV8"],[[7168,7223],"valid"],[[7224,7226],"disallowed"],[[7227,7231],"valid",[],"NV8"],[[7232,7241],"valid"],[[7242,7244],"disallowed"],[[7245,7293],"valid"],[[7294,7295],"valid",[],"NV8"],[[7296,7359],"disallowed"],[[7360,7367],"valid",[],"NV8"],[[7368,7375],"disallowed"],[[7376,7378],"valid"],[[7379,7379],"valid",[],"NV8"],[[7380,7410],"valid"],[[7411,7414],"valid"],[[7415,7415],"disallowed"],[[7416,7417],"valid"],[[7418,7423],"disallowed"],[[7424,7467],"valid"],[[7468,7468],"mapped",[97]],[[7469,7469],"mapped",[230]],[[7470,7470],"mapped",[98]],[[7471,7471],"valid"],[[7472,7472],"mapped",[100]],[[7473,7473],"mapped",[101]],[[7474,7474],"mapped",[477]],[[7475,7475],"mapped",[103]],[[7476,7476],"mapped",[104]],[[7477,7477],"mapped",[105]],[[7478,7478],"mapped",[106]],[[7479,7479],"mapped",[107]],[[7480,7480],"mapped",[108]],[[7481,7481],"mapped",[109]],[[7482,7482],"mapped",[110]],[[7483,7483],"valid"],[[7484,7484],"mapped",[111]],[[7485,7485],"mapped",[547]],[[7486,7486],"mapped",[112]],[[7487,7487],"mapped",[114]],[[7488,7488],"mapped",[116]],[[7489,7489],"mapped",[117]],[[7490,7490],"mapped",[119]],[[7491,7491],"mapped",[97]],[[7492,7492],"mapped",[592]],[[7493,7493],"mapped",[593]],[[7494,7494],"mapped",[7426]],[[7495,7495],"mapped",[98]],[[7496,7496],"mapped",[100]],[[7497,7497],"mapped",[101]],[[7498,7498],"mapped",[601]],[[7499,7499],"mapped",[603]],[[7500,7500],"mapped",[604]],[[7501,7501],"mapped",[103]],[[7502,7502],"valid"],[[7503,7503],"mapped",[107]],[[7504,7504],"mapped",[109]],[[7505,7505],"mapped",[331]],[[7506,7506],"mapped",[111]],[[7507,7507],"mapped",[596]],[[7508,7508],"mapped",[7446]],[[7509,7509],"mapped",[7447]],[[7510,7510],"mapped",[112]],[[7511,7511],"mapped",[116]],[[7512,7512],"mapped",[117]],[[7513,7513],"mapped",[7453]],[[7514,7514],"mapped",[623]],[[7515,7515],"mapped",[118]],[[7516,7516],"mapped",[7461]],[[7517,7517],"mapped",[946]],[[7518,7518],"mapped",[947]],[[7519,7519],"mapped",[948]],[[7520,7520],"mapped",[966]],[[7521,7521],"mapped",[967]],[[7522,7522],"mapped",[105]],[[7523,7523],"mapped",[114]],[[7524,7524],"mapped",[117]],[[7525,7525],"mapped",[118]],[[7526,7526],"mapped",[946]],[[7527,7527],"mapped",[947]],[[7528,7528],"mapped",[961]],[[7529,7529],"mapped",[966]],[[7530,7530],"mapped",[967]],[[7531,7531],"valid"],[[7532,7543],"valid"],[[7544,7544],"mapped",[1085]],[[7545,7578],"valid"],[[7579,7579],"mapped",[594]],[[7580,7580],"mapped",[99]],[[7581,7581],"mapped",[597]],[[7582,7582],"mapped",[240]],[[7583,7583],"mapped",[604]],[[7584,7584],"mapped",[102]],[[7585,7585],"mapped",[607]],[[7586,7586],"mapped",[609]],[[7587,7587],"mapped",[613]],[[7588,7588],"mapped",[616]],[[7589,7589],"mapped",[617]],[[7590,7590],"mapped",[618]],[[7591,7591],"mapped",[7547]],[[7592,7592],"mapped",[669]],[[7593,7593],"mapped",[621]],[[7594,7594],"mapped",[7557]],[[7595,7595],"mapped",[671]],[[7596,7596],"mapped",[625]],[[7597,7597],"mapped",[624]],[[7598,7598],"mapped",[626]],[[7599,7599],"mapped",[627]],[[7600,7600],"mapped",[628]],[[7601,7601],"mapped",[629]],[[7602,7602],"mapped",[632]],[[7603,7603],"mapped",[642]],[[7604,7604],"mapped",[643]],[[7605,7605],"mapped",[427]],[[7606,7606],"mapped",[649]],[[7607,7607],"mapped",[650]],[[7608,7608],"mapped",[7452]],[[7609,7609],"mapped",[651]],[[7610,7610],"mapped",[652]],[[7611,7611],"mapped",[122]],[[7612,7612],"mapped",[656]],[[7613,7613],"mapped",[657]],[[7614,7614],"mapped",[658]],[[7615,7615],"mapped",[952]],[[7616,7619],"valid"],[[7620,7626],"valid"],[[7627,7654],"valid"],[[7655,7669],"valid"],[[7670,7675],"disallowed"],[[7676,7676],"valid"],[[7677,7677],"valid"],[[7678,7679],"valid"],[[7680,7680],"mapped",[7681]],[[7681,7681],"valid"],[[7682,7682],"mapped",[7683]],[[7683,7683],"valid"],[[7684,7684],"mapped",[7685]],[[7685,7685],"valid"],[[7686,7686],"mapped",[7687]],[[7687,7687],"valid"],[[7688,7688],"mapped",[7689]],[[7689,7689],"valid"],[[7690,7690],"mapped",[7691]],[[7691,7691],"valid"],[[7692,7692],"mapped",[7693]],[[7693,7693],"valid"],[[7694,7694],"mapped",[7695]],[[7695,7695],"valid"],[[7696,7696],"mapped",[7697]],[[7697,7697],"valid"],[[7698,7698],"mapped",[7699]],[[7699,7699],"valid"],[[7700,7700],"mapped",[7701]],[[7701,7701],"valid"],[[7702,7702],"mapped",[7703]],[[7703,7703],"valid"],[[7704,7704],"mapped",[7705]],[[7705,7705],"valid"],[[7706,7706],"mapped",[7707]],[[7707,7707],"valid"],[[7708,7708],"mapped",[7709]],[[7709,7709],"valid"],[[7710,7710],"mapped",[7711]],[[7711,7711],"valid"],[[7712,7712],"mapped",[7713]],[[7713,7713],"valid"],[[7714,7714],"mapped",[7715]],[[7715,7715],"valid"],[[7716,7716],"mapped",[7717]],[[7717,7717],"valid"],[[7718,7718],"mapped",[7719]],[[7719,7719],"valid"],[[7720,7720],"mapped",[7721]],[[7721,7721],"valid"],[[7722,7722],"mapped",[7723]],[[7723,7723],"valid"],[[7724,7724],"mapped",[7725]],[[7725,7725],"valid"],[[7726,7726],"mapped",[7727]],[[7727,7727],"valid"],[[7728,7728],"mapped",[7729]],[[7729,7729],"valid"],[[7730,7730],"mapped",[7731]],[[7731,7731],"valid"],[[7732,7732],"mapped",[7733]],[[7733,7733],"valid"],[[7734,7734],"mapped",[7735]],[[7735,7735],"valid"],[[7736,7736],"mapped",[7737]],[[7737,7737],"valid"],[[7738,7738],"mapped",[7739]],[[7739,7739],"valid"],[[7740,7740],"mapped",[7741]],[[7741,7741],"valid"],[[7742,7742],"mapped",[7743]],[[7743,7743],"valid"],[[7744,7744],"mapped",[7745]],[[7745,7745],"valid"],[[7746,7746],"mapped",[7747]],[[7747,7747],"valid"],[[7748,7748],"mapped",[7749]],[[7749,7749],"valid"],[[7750,7750],"mapped",[7751]],[[7751,7751],"valid"],[[7752,7752],"mapped",[7753]],[[7753,7753],"valid"],[[7754,7754],"mapped",[7755]],[[7755,7755],"valid"],[[7756,7756],"mapped",[7757]],[[7757,7757],"valid"],[[7758,7758],"mapped",[7759]],[[7759,7759],"valid"],[[7760,7760],"mapped",[7761]],[[7761,7761],"valid"],[[7762,7762],"mapped",[7763]],[[7763,7763],"valid"],[[7764,7764],"mapped",[7765]],[[7765,7765],"valid"],[[7766,7766],"mapped",[7767]],[[7767,7767],"valid"],[[7768,7768],"mapped",[7769]],[[7769,7769],"valid"],[[7770,7770],"mapped",[7771]],[[7771,7771],"valid"],[[7772,7772],"mapped",[7773]],[[7773,7773],"valid"],[[7774,7774],"mapped",[7775]],[[7775,7775],"valid"],[[7776,7776],"mapped",[7777]],[[7777,7777],"valid"],[[7778,7778],"mapped",[7779]],[[7779,7779],"valid"],[[7780,7780],"mapped",[7781]],[[7781,7781],"valid"],[[7782,7782],"mapped",[7783]],[[7783,7783],"valid"],[[7784,7784],"mapped",[7785]],[[7785,7785],"valid"],[[7786,7786],"mapped",[7787]],[[7787,7787],"valid"],[[7788,7788],"mapped",[7789]],[[7789,7789],"valid"],[[7790,7790],"mapped",[7791]],[[7791,7791],"valid"],[[7792,7792],"mapped",[7793]],[[7793,7793],"valid"],[[7794,7794],"mapped",[7795]],[[7795,7795],"valid"],[[7796,7796],"mapped",[7797]],[[7797,7797],"valid"],[[7798,7798],"mapped",[7799]],[[7799,7799],"valid"],[[7800,7800],"mapped",[7801]],[[7801,7801],"valid"],[[7802,7802],"mapped",[7803]],[[7803,7803],"valid"],[[7804,7804],"mapped",[7805]],[[7805,7805],"valid"],[[7806,7806],"mapped",[7807]],[[7807,7807],"valid"],[[7808,7808],"mapped",[7809]],[[7809,7809],"valid"],[[7810,7810],"mapped",[7811]],[[7811,7811],"valid"],[[7812,7812],"mapped",[7813]],[[7813,7813],"valid"],[[7814,7814],"mapped",[7815]],[[7815,7815],"valid"],[[7816,7816],"mapped",[7817]],[[7817,7817],"valid"],[[7818,7818],"mapped",[7819]],[[7819,7819],"valid"],[[7820,7820],"mapped",[7821]],[[7821,7821],"valid"],[[7822,7822],"mapped",[7823]],[[7823,7823],"valid"],[[7824,7824],"mapped",[7825]],[[7825,7825],"valid"],[[7826,7826],"mapped",[7827]],[[7827,7827],"valid"],[[7828,7828],"mapped",[7829]],[[7829,7833],"valid"],[[7834,7834],"mapped",[97,702]],[[7835,7835],"mapped",[7777]],[[7836,7837],"valid"],[[7838,7838],"mapped",[115,115]],[[7839,7839],"valid"],[[7840,7840],"mapped",[7841]],[[7841,7841],"valid"],[[7842,7842],"mapped",[7843]],[[7843,7843],"valid"],[[7844,7844],"mapped",[7845]],[[7845,7845],"valid"],[[7846,7846],"mapped",[7847]],[[7847,7847],"valid"],[[7848,7848],"mapped",[7849]],[[7849,7849],"valid"],[[7850,7850],"mapped",[7851]],[[7851,7851],"valid"],[[7852,7852],"mapped",[7853]],[[7853,7853],"valid"],[[7854,7854],"mapped",[7855]],[[7855,7855],"valid"],[[7856,7856],"mapped",[7857]],[[7857,7857],"valid"],[[7858,7858],"mapped",[7859]],[[7859,7859],"valid"],[[7860,7860],"mapped",[7861]],[[7861,7861],"valid"],[[7862,7862],"mapped",[7863]],[[7863,7863],"valid"],[[7864,7864],"mapped",[7865]],[[7865,7865],"valid"],[[7866,7866],"mapped",[7867]],[[7867,7867],"valid"],[[7868,7868],"mapped",[7869]],[[7869,7869],"valid"],[[7870,7870],"mapped",[7871]],[[7871,7871],"valid"],[[7872,7872],"mapped",[7873]],[[7873,7873],"valid"],[[7874,7874],"mapped",[7875]],[[7875,7875],"valid"],[[7876,7876],"mapped",[7877]],[[7877,7877],"valid"],[[7878,7878],"mapped",[7879]],[[7879,7879],"valid"],[[7880,7880],"mapped",[7881]],[[7881,7881],"valid"],[[7882,7882],"mapped",[7883]],[[7883,7883],"valid"],[[7884,7884],"mapped",[7885]],[[7885,7885],"valid"],[[7886,7886],"mapped",[7887]],[[7887,7887],"valid"],[[7888,7888],"mapped",[7889]],[[7889,7889],"valid"],[[7890,7890],"mapped",[7891]],[[7891,7891],"valid"],[[7892,7892],"mapped",[7893]],[[7893,7893],"valid"],[[7894,7894],"mapped",[7895]],[[7895,7895],"valid"],[[7896,7896],"mapped",[7897]],[[7897,7897],"valid"],[[7898,7898],"mapped",[7899]],[[7899,7899],"valid"],[[7900,7900],"mapped",[7901]],[[7901,7901],"valid"],[[7902,7902],"mapped",[7903]],[[7903,7903],"valid"],[[7904,7904],"mapped",[7905]],[[7905,7905],"valid"],[[7906,7906],"mapped",[7907]],[[7907,7907],"valid"],[[7908,7908],"mapped",[7909]],[[7909,7909],"valid"],[[7910,7910],"mapped",[7911]],[[7911,7911],"valid"],[[7912,7912],"mapped",[7913]],[[7913,7913],"valid"],[[7914,7914],"mapped",[7915]],[[7915,7915],"valid"],[[7916,7916],"mapped",[7917]],[[7917,7917],"valid"],[[7918,7918],"mapped",[7919]],[[7919,7919],"valid"],[[7920,7920],"mapped",[7921]],[[7921,7921],"valid"],[[7922,7922],"mapped",[7923]],[[7923,7923],"valid"],[[7924,7924],"mapped",[7925]],[[7925,7925],"valid"],[[7926,7926],"mapped",[7927]],[[7927,7927],"valid"],[[7928,7928],"mapped",[7929]],[[7929,7929],"valid"],[[7930,7930],"mapped",[7931]],[[7931,7931],"valid"],[[7932,7932],"mapped",[7933]],[[7933,7933],"valid"],[[7934,7934],"mapped",[7935]],[[7935,7935],"valid"],[[7936,7943],"valid"],[[7944,7944],"mapped",[7936]],[[7945,7945],"mapped",[7937]],[[7946,7946],"mapped",[7938]],[[7947,7947],"mapped",[7939]],[[7948,7948],"mapped",[7940]],[[7949,7949],"mapped",[7941]],[[7950,7950],"mapped",[7942]],[[7951,7951],"mapped",[7943]],[[7952,7957],"valid"],[[7958,7959],"disallowed"],[[7960,7960],"mapped",[7952]],[[7961,7961],"mapped",[7953]],[[7962,7962],"mapped",[7954]],[[7963,7963],"mapped",[7955]],[[7964,7964],"mapped",[7956]],[[7965,7965],"mapped",[7957]],[[7966,7967],"disallowed"],[[7968,7975],"valid"],[[7976,7976],"mapped",[7968]],[[7977,7977],"mapped",[7969]],[[7978,7978],"mapped",[7970]],[[7979,7979],"mapped",[7971]],[[7980,7980],"mapped",[7972]],[[7981,7981],"mapped",[7973]],[[7982,7982],"mapped",[7974]],[[7983,7983],"mapped",[7975]],[[7984,7991],"valid"],[[7992,7992],"mapped",[7984]],[[7993,7993],"mapped",[7985]],[[7994,7994],"mapped",[7986]],[[7995,7995],"mapped",[7987]],[[7996,7996],"mapped",[7988]],[[7997,7997],"mapped",[7989]],[[7998,7998],"mapped",[7990]],[[7999,7999],"mapped",[7991]],[[8000,8005],"valid"],[[8006,8007],"disallowed"],[[8008,8008],"mapped",[8000]],[[8009,8009],"mapped",[8001]],[[8010,8010],"mapped",[8002]],[[8011,8011],"mapped",[8003]],[[8012,8012],"mapped",[8004]],[[8013,8013],"mapped",[8005]],[[8014,8015],"disallowed"],[[8016,8023],"valid"],[[8024,8024],"disallowed"],[[8025,8025],"mapped",[8017]],[[8026,8026],"disallowed"],[[8027,8027],"mapped",[8019]],[[8028,8028],"disallowed"],[[8029,8029],"mapped",[8021]],[[8030,8030],"disallowed"],[[8031,8031],"mapped",[8023]],[[8032,8039],"valid"],[[8040,8040],"mapped",[8032]],[[8041,8041],"mapped",[8033]],[[8042,8042],"mapped",[8034]],[[8043,8043],"mapped",[8035]],[[8044,8044],"mapped",[8036]],[[8045,8045],"mapped",[8037]],[[8046,8046],"mapped",[8038]],[[8047,8047],"mapped",[8039]],[[8048,8048],"valid"],[[8049,8049],"mapped",[940]],[[8050,8050],"valid"],[[8051,8051],"mapped",[941]],[[8052,8052],"valid"],[[8053,8053],"mapped",[942]],[[8054,8054],"valid"],[[8055,8055],"mapped",[943]],[[8056,8056],"valid"],[[8057,8057],"mapped",[972]],[[8058,8058],"valid"],[[8059,8059],"mapped",[973]],[[8060,8060],"valid"],[[8061,8061],"mapped",[974]],[[8062,8063],"disallowed"],[[8064,8064],"mapped",[7936,953]],[[8065,8065],"mapped",[7937,953]],[[8066,8066],"mapped",[7938,953]],[[8067,8067],"mapped",[7939,953]],[[8068,8068],"mapped",[7940,953]],[[8069,8069],"mapped",[7941,953]],[[8070,8070],"mapped",[7942,953]],[[8071,8071],"mapped",[7943,953]],[[8072,8072],"mapped",[7936,953]],[[8073,8073],"mapped",[7937,953]],[[8074,8074],"mapped",[7938,953]],[[8075,8075],"mapped",[7939,953]],[[8076,8076],"mapped",[7940,953]],[[8077,8077],"mapped",[7941,953]],[[8078,8078],"mapped",[7942,953]],[[8079,8079],"mapped",[7943,953]],[[8080,8080],"mapped",[7968,953]],[[8081,8081],"mapped",[7969,953]],[[8082,8082],"mapped",[7970,953]],[[8083,8083],"mapped",[7971,953]],[[8084,8084],"mapped",[7972,953]],[[8085,8085],"mapped",[7973,953]],[[8086,8086],"mapped",[7974,953]],[[8087,8087],"mapped",[7975,953]],[[8088,8088],"mapped",[7968,953]],[[8089,8089],"mapped",[7969,953]],[[8090,8090],"mapped",[7970,953]],[[8091,8091],"mapped",[7971,953]],[[8092,8092],"mapped",[7972,953]],[[8093,8093],"mapped",[7973,953]],[[8094,8094],"mapped",[7974,953]],[[8095,8095],"mapped",[7975,953]],[[8096,8096],"mapped",[8032,953]],[[8097,8097],"mapped",[8033,953]],[[8098,8098],"mapped",[8034,953]],[[8099,8099],"mapped",[8035,953]],[[8100,8100],"mapped",[8036,953]],[[8101,8101],"mapped",[8037,953]],[[8102,8102],"mapped",[8038,953]],[[8103,8103],"mapped",[8039,953]],[[8104,8104],"mapped",[8032,953]],[[8105,8105],"mapped",[8033,953]],[[8106,8106],"mapped",[8034,953]],[[8107,8107],"mapped",[8035,953]],[[8108,8108],"mapped",[8036,953]],[[8109,8109],"mapped",[8037,953]],[[8110,8110],"mapped",[8038,953]],[[8111,8111],"mapped",[8039,953]],[[8112,8113],"valid"],[[8114,8114],"mapped",[8048,953]],[[8115,8115],"mapped",[945,953]],[[8116,8116],"mapped",[940,953]],[[8117,8117],"disallowed"],[[8118,8118],"valid"],[[8119,8119],"mapped",[8118,953]],[[8120,8120],"mapped",[8112]],[[8121,8121],"mapped",[8113]],[[8122,8122],"mapped",[8048]],[[8123,8123],"mapped",[940]],[[8124,8124],"mapped",[945,953]],[[8125,8125],"disallowed_STD3_mapped",[32,787]],[[8126,8126],"mapped",[953]],[[8127,8127],"disallowed_STD3_mapped",[32,787]],[[8128,8128],"disallowed_STD3_mapped",[32,834]],[[8129,8129],"disallowed_STD3_mapped",[32,776,834]],[[8130,8130],"mapped",[8052,953]],[[8131,8131],"mapped",[951,953]],[[8132,8132],"mapped",[942,953]],[[8133,8133],"disallowed"],[[8134,8134],"valid"],[[8135,8135],"mapped",[8134,953]],[[8136,8136],"mapped",[8050]],[[8137,8137],"mapped",[941]],[[8138,8138],"mapped",[8052]],[[8139,8139],"mapped",[942]],[[8140,8140],"mapped",[951,953]],[[8141,8141],"disallowed_STD3_mapped",[32,787,768]],[[8142,8142],"disallowed_STD3_mapped",[32,787,769]],[[8143,8143],"disallowed_STD3_mapped",[32,787,834]],[[8144,8146],"valid"],[[8147,8147],"mapped",[912]],[[8148,8149],"disallowed"],[[8150,8151],"valid"],[[8152,8152],"mapped",[8144]],[[8153,8153],"mapped",[8145]],[[8154,8154],"mapped",[8054]],[[8155,8155],"mapped",[943]],[[8156,8156],"disallowed"],[[8157,8157],"disallowed_STD3_mapped",[32,788,768]],[[8158,8158],"disallowed_STD3_mapped",[32,788,769]],[[8159,8159],"disallowed_STD3_mapped",[32,788,834]],[[8160,8162],"valid"],[[8163,8163],"mapped",[944]],[[8164,8167],"valid"],[[8168,8168],"mapped",[8160]],[[8169,8169],"mapped",[8161]],[[8170,8170],"mapped",[8058]],[[8171,8171],"mapped",[973]],[[8172,8172],"mapped",[8165]],[[8173,8173],"disallowed_STD3_mapped",[32,776,768]],[[8174,8174],"disallowed_STD3_mapped",[32,776,769]],[[8175,8175],"disallowed_STD3_mapped",[96]],[[8176,8177],"disallowed"],[[8178,8178],"mapped",[8060,953]],[[8179,8179],"mapped",[969,953]],[[8180,8180],"mapped",[974,953]],[[8181,8181],"disallowed"],[[8182,8182],"valid"],[[8183,8183],"mapped",[8182,953]],[[8184,8184],"mapped",[8056]],[[8185,8185],"mapped",[972]],[[8186,8186],"mapped",[8060]],[[8187,8187],"mapped",[974]],[[8188,8188],"mapped",[969,953]],[[8189,8189],"disallowed_STD3_mapped",[32,769]],[[8190,8190],"disallowed_STD3_mapped",[32,788]],[[8191,8191],"disallowed"],[[8192,8202],"disallowed_STD3_mapped",[32]],[[8203,8203],"ignored"],[[8204,8205],"deviation",[]],[[8206,8207],"disallowed"],[[8208,8208],"valid",[],"NV8"],[[8209,8209],"mapped",[8208]],[[8210,8214],"valid",[],"NV8"],[[8215,8215],"disallowed_STD3_mapped",[32,819]],[[8216,8227],"valid",[],"NV8"],[[8228,8230],"disallowed"],[[8231,8231],"valid",[],"NV8"],[[8232,8238],"disallowed"],[[8239,8239],"disallowed_STD3_mapped",[32]],[[8240,8242],"valid",[],"NV8"],[[8243,8243],"mapped",[8242,8242]],[[8244,8244],"mapped",[8242,8242,8242]],[[8245,8245],"valid",[],"NV8"],[[8246,8246],"mapped",[8245,8245]],[[8247,8247],"mapped",[8245,8245,8245]],[[8248,8251],"valid",[],"NV8"],[[8252,8252],"disallowed_STD3_mapped",[33,33]],[[8253,8253],"valid",[],"NV8"],[[8254,8254],"disallowed_STD3_mapped",[32,773]],[[8255,8262],"valid",[],"NV8"],[[8263,8263],"disallowed_STD3_mapped",[63,63]],[[8264,8264],"disallowed_STD3_mapped",[63,33]],[[8265,8265],"disallowed_STD3_mapped",[33,63]],[[8266,8269],"valid",[],"NV8"],[[8270,8274],"valid",[],"NV8"],[[8275,8276],"valid",[],"NV8"],[[8277,8278],"valid",[],"NV8"],[[8279,8279],"mapped",[8242,8242,8242,8242]],[[8280,8286],"valid",[],"NV8"],[[8287,8287],"disallowed_STD3_mapped",[32]],[[8288,8288],"ignored"],[[8289,8291],"disallowed"],[[8292,8292],"ignored"],[[8293,8293],"disallowed"],[[8294,8297],"disallowed"],[[8298,8303],"disallowed"],[[8304,8304],"mapped",[48]],[[8305,8305],"mapped",[105]],[[8306,8307],"disallowed"],[[8308,8308],"mapped",[52]],[[8309,8309],"mapped",[53]],[[8310,8310],"mapped",[54]],[[8311,8311],"mapped",[55]],[[8312,8312],"mapped",[56]],[[8313,8313],"mapped",[57]],[[8314,8314],"disallowed_STD3_mapped",[43]],[[8315,8315],"mapped",[8722]],[[8316,8316],"disallowed_STD3_mapped",[61]],[[8317,8317],"disallowed_STD3_mapped",[40]],[[8318,8318],"disallowed_STD3_mapped",[41]],[[8319,8319],"mapped",[110]],[[8320,8320],"mapped",[48]],[[8321,8321],"mapped",[49]],[[8322,8322],"mapped",[50]],[[8323,8323],"mapped",[51]],[[8324,8324],"mapped",[52]],[[8325,8325],"mapped",[53]],[[8326,8326],"mapped",[54]],[[8327,8327],"mapped",[55]],[[8328,8328],"mapped",[56]],[[8329,8329],"mapped",[57]],[[8330,8330],"disallowed_STD3_mapped",[43]],[[8331,8331],"mapped",[8722]],[[8332,8332],"disallowed_STD3_mapped",[61]],[[8333,8333],"disallowed_STD3_mapped",[40]],[[8334,8334],"disallowed_STD3_mapped",[41]],[[8335,8335],"disallowed"],[[8336,8336],"mapped",[97]],[[8337,8337],"mapped",[101]],[[8338,8338],"mapped",[111]],[[8339,8339],"mapped",[120]],[[8340,8340],"mapped",[601]],[[8341,8341],"mapped",[104]],[[8342,8342],"mapped",[107]],[[8343,8343],"mapped",[108]],[[8344,8344],"mapped",[109]],[[8345,8345],"mapped",[110]],[[8346,8346],"mapped",[112]],[[8347,8347],"mapped",[115]],[[8348,8348],"mapped",[116]],[[8349,8351],"disallowed"],[[8352,8359],"valid",[],"NV8"],[[8360,8360],"mapped",[114,115]],[[8361,8362],"valid",[],"NV8"],[[8363,8363],"valid",[],"NV8"],[[8364,8364],"valid",[],"NV8"],[[8365,8367],"valid",[],"NV8"],[[8368,8369],"valid",[],"NV8"],[[8370,8373],"valid",[],"NV8"],[[8374,8376],"valid",[],"NV8"],[[8377,8377],"valid",[],"NV8"],[[8378,8378],"valid",[],"NV8"],[[8379,8381],"valid",[],"NV8"],[[8382,8382],"valid",[],"NV8"],[[8383,8399],"disallowed"],[[8400,8417],"valid",[],"NV8"],[[8418,8419],"valid",[],"NV8"],[[8420,8426],"valid",[],"NV8"],[[8427,8427],"valid",[],"NV8"],[[8428,8431],"valid",[],"NV8"],[[8432,8432],"valid",[],"NV8"],[[8433,8447],"disallowed"],[[8448,8448],"disallowed_STD3_mapped",[97,47,99]],[[8449,8449],"disallowed_STD3_mapped",[97,47,115]],[[8450,8450],"mapped",[99]],[[8451,8451],"mapped",[176,99]],[[8452,8452],"valid",[],"NV8"],[[8453,8453],"disallowed_STD3_mapped",[99,47,111]],[[8454,8454],"disallowed_STD3_mapped",[99,47,117]],[[8455,8455],"mapped",[603]],[[8456,8456],"valid",[],"NV8"],[[8457,8457],"mapped",[176,102]],[[8458,8458],"mapped",[103]],[[8459,8462],"mapped",[104]],[[8463,8463],"mapped",[295]],[[8464,8465],"mapped",[105]],[[8466,8467],"mapped",[108]],[[8468,8468],"valid",[],"NV8"],[[8469,8469],"mapped",[110]],[[8470,8470],"mapped",[110,111]],[[8471,8472],"valid",[],"NV8"],[[8473,8473],"mapped",[112]],[[8474,8474],"mapped",[113]],[[8475,8477],"mapped",[114]],[[8478,8479],"valid",[],"NV8"],[[8480,8480],"mapped",[115,109]],[[8481,8481],"mapped",[116,101,108]],[[8482,8482],"mapped",[116,109]],[[8483,8483],"valid",[],"NV8"],[[8484,8484],"mapped",[122]],[[8485,8485],"valid",[],"NV8"],[[8486,8486],"mapped",[969]],[[8487,8487],"valid",[],"NV8"],[[8488,8488],"mapped",[122]],[[8489,8489],"valid",[],"NV8"],[[8490,8490],"mapped",[107]],[[8491,8491],"mapped",[229]],[[8492,8492],"mapped",[98]],[[8493,8493],"mapped",[99]],[[8494,8494],"valid",[],"NV8"],[[8495,8496],"mapped",[101]],[[8497,8497],"mapped",[102]],[[8498,8498],"disallowed"],[[8499,8499],"mapped",[109]],[[8500,8500],"mapped",[111]],[[8501,8501],"mapped",[1488]],[[8502,8502],"mapped",[1489]],[[8503,8503],"mapped",[1490]],[[8504,8504],"mapped",[1491]],[[8505,8505],"mapped",[105]],[[8506,8506],"valid",[],"NV8"],[[8507,8507],"mapped",[102,97,120]],[[8508,8508],"mapped",[960]],[[8509,8510],"mapped",[947]],[[8511,8511],"mapped",[960]],[[8512,8512],"mapped",[8721]],[[8513,8516],"valid",[],"NV8"],[[8517,8518],"mapped",[100]],[[8519,8519],"mapped",[101]],[[8520,8520],"mapped",[105]],[[8521,8521],"mapped",[106]],[[8522,8523],"valid",[],"NV8"],[[8524,8524],"valid",[],"NV8"],[[8525,8525],"valid",[],"NV8"],[[8526,8526],"valid"],[[8527,8527],"valid",[],"NV8"],[[8528,8528],"mapped",[49,8260,55]],[[8529,8529],"mapped",[49,8260,57]],[[8530,8530],"mapped",[49,8260,49,48]],[[8531,8531],"mapped",[49,8260,51]],[[8532,8532],"mapped",[50,8260,51]],[[8533,8533],"mapped",[49,8260,53]],[[8534,8534],"mapped",[50,8260,53]],[[8535,8535],"mapped",[51,8260,53]],[[8536,8536],"mapped",[52,8260,53]],[[8537,8537],"mapped",[49,8260,54]],[[8538,8538],"mapped",[53,8260,54]],[[8539,8539],"mapped",[49,8260,56]],[[8540,8540],"mapped",[51,8260,56]],[[8541,8541],"mapped",[53,8260,56]],[[8542,8542],"mapped",[55,8260,56]],[[8543,8543],"mapped",[49,8260]],[[8544,8544],"mapped",[105]],[[8545,8545],"mapped",[105,105]],[[8546,8546],"mapped",[105,105,105]],[[8547,8547],"mapped",[105,118]],[[8548,8548],"mapped",[118]],[[8549,8549],"mapped",[118,105]],[[8550,8550],"mapped",[118,105,105]],[[8551,8551],"mapped",[118,105,105,105]],[[8552,8552],"mapped",[105,120]],[[8553,8553],"mapped",[120]],[[8554,8554],"mapped",[120,105]],[[8555,8555],"mapped",[120,105,105]],[[8556,8556],"mapped",[108]],[[8557,8557],"mapped",[99]],[[8558,8558],"mapped",[100]],[[8559,8559],"mapped",[109]],[[8560,8560],"mapped",[105]],[[8561,8561],"mapped",[105,105]],[[8562,8562],"mapped",[105,105,105]],[[8563,8563],"mapped",[105,118]],[[8564,8564],"mapped",[118]],[[8565,8565],"mapped",[118,105]],[[8566,8566],"mapped",[118,105,105]],[[8567,8567],"mapped",[118,105,105,105]],[[8568,8568],"mapped",[105,120]],[[8569,8569],"mapped",[120]],[[8570,8570],"mapped",[120,105]],[[8571,8571],"mapped",[120,105,105]],[[8572,8572],"mapped",[108]],[[8573,8573],"mapped",[99]],[[8574,8574],"mapped",[100]],[[8575,8575],"mapped",[109]],[[8576,8578],"valid",[],"NV8"],[[8579,8579],"disallowed"],[[8580,8580],"valid"],[[8581,8584],"valid",[],"NV8"],[[8585,8585],"mapped",[48,8260,51]],[[8586,8587],"valid",[],"NV8"],[[8588,8591],"disallowed"],[[8592,8682],"valid",[],"NV8"],[[8683,8691],"valid",[],"NV8"],[[8692,8703],"valid",[],"NV8"],[[8704,8747],"valid",[],"NV8"],[[8748,8748],"mapped",[8747,8747]],[[8749,8749],"mapped",[8747,8747,8747]],[[8750,8750],"valid",[],"NV8"],[[8751,8751],"mapped",[8750,8750]],[[8752,8752],"mapped",[8750,8750,8750]],[[8753,8799],"valid",[],"NV8"],[[8800,8800],"disallowed_STD3_valid"],[[8801,8813],"valid",[],"NV8"],[[8814,8815],"disallowed_STD3_valid"],[[8816,8945],"valid",[],"NV8"],[[8946,8959],"valid",[],"NV8"],[[8960,8960],"valid",[],"NV8"],[[8961,8961],"valid",[],"NV8"],[[8962,9000],"valid",[],"NV8"],[[9001,9001],"mapped",[12296]],[[9002,9002],"mapped",[12297]],[[9003,9082],"valid",[],"NV8"],[[9083,9083],"valid",[],"NV8"],[[9084,9084],"valid",[],"NV8"],[[9085,9114],"valid",[],"NV8"],[[9115,9166],"valid",[],"NV8"],[[9167,9168],"valid",[],"NV8"],[[9169,9179],"valid",[],"NV8"],[[9180,9191],"valid",[],"NV8"],[[9192,9192],"valid",[],"NV8"],[[9193,9203],"valid",[],"NV8"],[[9204,9210],"valid",[],"NV8"],[[9211,9215],"disallowed"],[[9216,9252],"valid",[],"NV8"],[[9253,9254],"valid",[],"NV8"],[[9255,9279],"disallowed"],[[9280,9290],"valid",[],"NV8"],[[9291,9311],"disallowed"],[[9312,9312],"mapped",[49]],[[9313,9313],"mapped",[50]],[[9314,9314],"mapped",[51]],[[9315,9315],"mapped",[52]],[[9316,9316],"mapped",[53]],[[9317,9317],"mapped",[54]],[[9318,9318],"mapped",[55]],[[9319,9319],"mapped",[56]],[[9320,9320],"mapped",[57]],[[9321,9321],"mapped",[49,48]],[[9322,9322],"mapped",[49,49]],[[9323,9323],"mapped",[49,50]],[[9324,9324],"mapped",[49,51]],[[9325,9325],"mapped",[49,52]],[[9326,9326],"mapped",[49,53]],[[9327,9327],"mapped",[49,54]],[[9328,9328],"mapped",[49,55]],[[9329,9329],"mapped",[49,56]],[[9330,9330],"mapped",[49,57]],[[9331,9331],"mapped",[50,48]],[[9332,9332],"disallowed_STD3_mapped",[40,49,41]],[[9333,9333],"disallowed_STD3_mapped",[40,50,41]],[[9334,9334],"disallowed_STD3_mapped",[40,51,41]],[[9335,9335],"disallowed_STD3_mapped",[40,52,41]],[[9336,9336],"disallowed_STD3_mapped",[40,53,41]],[[9337,9337],"disallowed_STD3_mapped",[40,54,41]],[[9338,9338],"disallowed_STD3_mapped",[40,55,41]],[[9339,9339],"disallowed_STD3_mapped",[40,56,41]],[[9340,9340],"disallowed_STD3_mapped",[40,57,41]],[[9341,9341],"disallowed_STD3_mapped",[40,49,48,41]],[[9342,9342],"disallowed_STD3_mapped",[40,49,49,41]],[[9343,9343],"disallowed_STD3_mapped",[40,49,50,41]],[[9344,9344],"disallowed_STD3_mapped",[40,49,51,41]],[[9345,9345],"disallowed_STD3_mapped",[40,49,52,41]],[[9346,9346],"disallowed_STD3_mapped",[40,49,53,41]],[[9347,9347],"disallowed_STD3_mapped",[40,49,54,41]],[[9348,9348],"disallowed_STD3_mapped",[40,49,55,41]],[[9349,9349],"disallowed_STD3_mapped",[40,49,56,41]],[[9350,9350],"disallowed_STD3_mapped",[40,49,57,41]],[[9351,9351],"disallowed_STD3_mapped",[40,50,48,41]],[[9352,9371],"disallowed"],[[9372,9372],"disallowed_STD3_mapped",[40,97,41]],[[9373,9373],"disallowed_STD3_mapped",[40,98,41]],[[9374,9374],"disallowed_STD3_mapped",[40,99,41]],[[9375,9375],"disallowed_STD3_mapped",[40,100,41]],[[9376,9376],"disallowed_STD3_mapped",[40,101,41]],[[9377,9377],"disallowed_STD3_mapped",[40,102,41]],[[9378,9378],"disallowed_STD3_mapped",[40,103,41]],[[9379,9379],"disallowed_STD3_mapped",[40,104,41]],[[9380,9380],"disallowed_STD3_mapped",[40,105,41]],[[9381,9381],"disallowed_STD3_mapped",[40,106,41]],[[9382,9382],"disallowed_STD3_mapped",[40,107,41]],[[9383,9383],"disallowed_STD3_mapped",[40,108,41]],[[9384,9384],"disallowed_STD3_mapped",[40,109,41]],[[9385,9385],"disallowed_STD3_mapped",[40,110,41]],[[9386,9386],"disallowed_STD3_mapped",[40,111,41]],[[9387,9387],"disallowed_STD3_mapped",[40,112,41]],[[9388,9388],"disallowed_STD3_mapped",[40,113,41]],[[9389,9389],"disallowed_STD3_mapped",[40,114,41]],[[9390,9390],"disallowed_STD3_mapped",[40,115,41]],[[9391,9391],"disallowed_STD3_mapped",[40,116,41]],[[9392,9392],"disallowed_STD3_mapped",[40,117,41]],[[9393,9393],"disallowed_STD3_mapped",[40,118,41]],[[9394,9394],"disallowed_STD3_mapped",[40,119,41]],[[9395,9395],"disallowed_STD3_mapped",[40,120,41]],[[9396,9396],"disallowed_STD3_mapped",[40,121,41]],[[9397,9397],"disallowed_STD3_mapped",[40,122,41]],[[9398,9398],"mapped",[97]],[[9399,9399],"mapped",[98]],[[9400,9400],"mapped",[99]],[[9401,9401],"mapped",[100]],[[9402,9402],"mapped",[101]],[[9403,9403],"mapped",[102]],[[9404,9404],"mapped",[103]],[[9405,9405],"mapped",[104]],[[9406,9406],"mapped",[105]],[[9407,9407],"mapped",[106]],[[9408,9408],"mapped",[107]],[[9409,9409],"mapped",[108]],[[9410,9410],"mapped",[109]],[[9411,9411],"mapped",[110]],[[9412,9412],"mapped",[111]],[[9413,9413],"mapped",[112]],[[9414,9414],"mapped",[113]],[[9415,9415],"mapped",[114]],[[9416,9416],"mapped",[115]],[[9417,9417],"mapped",[116]],[[9418,9418],"mapped",[117]],[[9419,9419],"mapped",[118]],[[9420,9420],"mapped",[119]],[[9421,9421],"mapped",[120]],[[9422,9422],"mapped",[121]],[[9423,9423],"mapped",[122]],[[9424,9424],"mapped",[97]],[[9425,9425],"mapped",[98]],[[9426,9426],"mapped",[99]],[[9427,9427],"mapped",[100]],[[9428,9428],"mapped",[101]],[[9429,9429],"mapped",[102]],[[9430,9430],"mapped",[103]],[[9431,9431],"mapped",[104]],[[9432,9432],"mapped",[105]],[[9433,9433],"mapped",[106]],[[9434,9434],"mapped",[107]],[[9435,9435],"mapped",[108]],[[9436,9436],"mapped",[109]],[[9437,9437],"mapped",[110]],[[9438,9438],"mapped",[111]],[[9439,9439],"mapped",[112]],[[9440,9440],"mapped",[113]],[[9441,9441],"mapped",[114]],[[9442,9442],"mapped",[115]],[[9443,9443],"mapped",[116]],[[9444,9444],"mapped",[117]],[[9445,9445],"mapped",[118]],[[9446,9446],"mapped",[119]],[[9447,9447],"mapped",[120]],[[9448,9448],"mapped",[121]],[[9449,9449],"mapped",[122]],[[9450,9450],"mapped",[48]],[[9451,9470],"valid",[],"NV8"],[[9471,9471],"valid",[],"NV8"],[[9472,9621],"valid",[],"NV8"],[[9622,9631],"valid",[],"NV8"],[[9632,9711],"valid",[],"NV8"],[[9712,9719],"valid",[],"NV8"],[[9720,9727],"valid",[],"NV8"],[[9728,9747],"valid",[],"NV8"],[[9748,9749],"valid",[],"NV8"],[[9750,9751],"valid",[],"NV8"],[[9752,9752],"valid",[],"NV8"],[[9753,9753],"valid",[],"NV8"],[[9754,9839],"valid",[],"NV8"],[[9840,9841],"valid",[],"NV8"],[[9842,9853],"valid",[],"NV8"],[[9854,9855],"valid",[],"NV8"],[[9856,9865],"valid",[],"NV8"],[[9866,9873],"valid",[],"NV8"],[[9874,9884],"valid",[],"NV8"],[[9885,9885],"valid",[],"NV8"],[[9886,9887],"valid",[],"NV8"],[[9888,9889],"valid",[],"NV8"],[[9890,9905],"valid",[],"NV8"],[[9906,9906],"valid",[],"NV8"],[[9907,9916],"valid",[],"NV8"],[[9917,9919],"valid",[],"NV8"],[[9920,9923],"valid",[],"NV8"],[[9924,9933],"valid",[],"NV8"],[[9934,9934],"valid",[],"NV8"],[[9935,9953],"valid",[],"NV8"],[[9954,9954],"valid",[],"NV8"],[[9955,9955],"valid",[],"NV8"],[[9956,9959],"valid",[],"NV8"],[[9960,9983],"valid",[],"NV8"],[[9984,9984],"valid",[],"NV8"],[[9985,9988],"valid",[],"NV8"],[[9989,9989],"valid",[],"NV8"],[[9990,9993],"valid",[],"NV8"],[[9994,9995],"valid",[],"NV8"],[[9996,10023],"valid",[],"NV8"],[[10024,10024],"valid",[],"NV8"],[[10025,10059],"valid",[],"NV8"],[[10060,10060],"valid",[],"NV8"],[[10061,10061],"valid",[],"NV8"],[[10062,10062],"valid",[],"NV8"],[[10063,10066],"valid",[],"NV8"],[[10067,10069],"valid",[],"NV8"],[[10070,10070],"valid",[],"NV8"],[[10071,10071],"valid",[],"NV8"],[[10072,10078],"valid",[],"NV8"],[[10079,10080],"valid",[],"NV8"],[[10081,10087],"valid",[],"NV8"],[[10088,10101],"valid",[],"NV8"],[[10102,10132],"valid",[],"NV8"],[[10133,10135],"valid",[],"NV8"],[[10136,10159],"valid",[],"NV8"],[[10160,10160],"valid",[],"NV8"],[[10161,10174],"valid",[],"NV8"],[[10175,10175],"valid",[],"NV8"],[[10176,10182],"valid",[],"NV8"],[[10183,10186],"valid",[],"NV8"],[[10187,10187],"valid",[],"NV8"],[[10188,10188],"valid",[],"NV8"],[[10189,10189],"valid",[],"NV8"],[[10190,10191],"valid",[],"NV8"],[[10192,10219],"valid",[],"NV8"],[[10220,10223],"valid",[],"NV8"],[[10224,10239],"valid",[],"NV8"],[[10240,10495],"valid",[],"NV8"],[[10496,10763],"valid",[],"NV8"],[[10764,10764],"mapped",[8747,8747,8747,8747]],[[10765,10867],"valid",[],"NV8"],[[10868,10868],"disallowed_STD3_mapped",[58,58,61]],[[10869,10869],"disallowed_STD3_mapped",[61,61]],[[10870,10870],"disallowed_STD3_mapped",[61,61,61]],[[10871,10971],"valid",[],"NV8"],[[10972,10972],"mapped",[10973,824]],[[10973,11007],"valid",[],"NV8"],[[11008,11021],"valid",[],"NV8"],[[11022,11027],"valid",[],"NV8"],[[11028,11034],"valid",[],"NV8"],[[11035,11039],"valid",[],"NV8"],[[11040,11043],"valid",[],"NV8"],[[11044,11084],"valid",[],"NV8"],[[11085,11087],"valid",[],"NV8"],[[11088,11092],"valid",[],"NV8"],[[11093,11097],"valid",[],"NV8"],[[11098,11123],"valid",[],"NV8"],[[11124,11125],"disallowed"],[[11126,11157],"valid",[],"NV8"],[[11158,11159],"disallowed"],[[11160,11193],"valid",[],"NV8"],[[11194,11196],"disallowed"],[[11197,11208],"valid",[],"NV8"],[[11209,11209],"disallowed"],[[11210,11217],"valid",[],"NV8"],[[11218,11243],"disallowed"],[[11244,11247],"valid",[],"NV8"],[[11248,11263],"disallowed"],[[11264,11264],"mapped",[11312]],[[11265,11265],"mapped",[11313]],[[11266,11266],"mapped",[11314]],[[11267,11267],"mapped",[11315]],[[11268,11268],"mapped",[11316]],[[11269,11269],"mapped",[11317]],[[11270,11270],"mapped",[11318]],[[11271,11271],"mapped",[11319]],[[11272,11272],"mapped",[11320]],[[11273,11273],"mapped",[11321]],[[11274,11274],"mapped",[11322]],[[11275,11275],"mapped",[11323]],[[11276,11276],"mapped",[11324]],[[11277,11277],"mapped",[11325]],[[11278,11278],"mapped",[11326]],[[11279,11279],"mapped",[11327]],[[11280,11280],"mapped",[11328]],[[11281,11281],"mapped",[11329]],[[11282,11282],"mapped",[11330]],[[11283,11283],"mapped",[11331]],[[11284,11284],"mapped",[11332]],[[11285,11285],"mapped",[11333]],[[11286,11286],"mapped",[11334]],[[11287,11287],"mapped",[11335]],[[11288,11288],"mapped",[11336]],[[11289,11289],"mapped",[11337]],[[11290,11290],"mapped",[11338]],[[11291,11291],"mapped",[11339]],[[11292,11292],"mapped",[11340]],[[11293,11293],"mapped",[11341]],[[11294,11294],"mapped",[11342]],[[11295,11295],"mapped",[11343]],[[11296,11296],"mapped",[11344]],[[11297,11297],"mapped",[11345]],[[11298,11298],"mapped",[11346]],[[11299,11299],"mapped",[11347]],[[11300,11300],"mapped",[11348]],[[11301,11301],"mapped",[11349]],[[11302,11302],"mapped",[11350]],[[11303,11303],"mapped",[11351]],[[11304,11304],"mapped",[11352]],[[11305,11305],"mapped",[11353]],[[11306,11306],"mapped",[11354]],[[11307,11307],"mapped",[11355]],[[11308,11308],"mapped",[11356]],[[11309,11309],"mapped",[11357]],[[11310,11310],"mapped",[11358]],[[11311,11311],"disallowed"],[[11312,11358],"valid"],[[11359,11359],"disallowed"],[[11360,11360],"mapped",[11361]],[[11361,11361],"valid"],[[11362,11362],"mapped",[619]],[[11363,11363],"mapped",[7549]],[[11364,11364],"mapped",[637]],[[11365,11366],"valid"],[[11367,11367],"mapped",[11368]],[[11368,11368],"valid"],[[11369,11369],"mapped",[11370]],[[11370,11370],"valid"],[[11371,11371],"mapped",[11372]],[[11372,11372],"valid"],[[11373,11373],"mapped",[593]],[[11374,11374],"mapped",[625]],[[11375,11375],"mapped",[592]],[[11376,11376],"mapped",[594]],[[11377,11377],"valid"],[[11378,11378],"mapped",[11379]],[[11379,11379],"valid"],[[11380,11380],"valid"],[[11381,11381],"mapped",[11382]],[[11382,11383],"valid"],[[11384,11387],"valid"],[[11388,11388],"mapped",[106]],[[11389,11389],"mapped",[118]],[[11390,11390],"mapped",[575]],[[11391,11391],"mapped",[576]],[[11392,11392],"mapped",[11393]],[[11393,11393],"valid"],[[11394,11394],"mapped",[11395]],[[11395,11395],"valid"],[[11396,11396],"mapped",[11397]],[[11397,11397],"valid"],[[11398,11398],"mapped",[11399]],[[11399,11399],"valid"],[[11400,11400],"mapped",[11401]],[[11401,11401],"valid"],[[11402,11402],"mapped",[11403]],[[11403,11403],"valid"],[[11404,11404],"mapped",[11405]],[[11405,11405],"valid"],[[11406,11406],"mapped",[11407]],[[11407,11407],"valid"],[[11408,11408],"mapped",[11409]],[[11409,11409],"valid"],[[11410,11410],"mapped",[11411]],[[11411,11411],"valid"],[[11412,11412],"mapped",[11413]],[[11413,11413],"valid"],[[11414,11414],"mapped",[11415]],[[11415,11415],"valid"],[[11416,11416],"mapped",[11417]],[[11417,11417],"valid"],[[11418,11418],"mapped",[11419]],[[11419,11419],"valid"],[[11420,11420],"mapped",[11421]],[[11421,11421],"valid"],[[11422,11422],"mapped",[11423]],[[11423,11423],"valid"],[[11424,11424],"mapped",[11425]],[[11425,11425],"valid"],[[11426,11426],"mapped",[11427]],[[11427,11427],"valid"],[[11428,11428],"mapped",[11429]],[[11429,11429],"valid"],[[11430,11430],"mapped",[11431]],[[11431,11431],"valid"],[[11432,11432],"mapped",[11433]],[[11433,11433],"valid"],[[11434,11434],"mapped",[11435]],[[11435,11435],"valid"],[[11436,11436],"mapped",[11437]],[[11437,11437],"valid"],[[11438,11438],"mapped",[11439]],[[11439,11439],"valid"],[[11440,11440],"mapped",[11441]],[[11441,11441],"valid"],[[11442,11442],"mapped",[11443]],[[11443,11443],"valid"],[[11444,11444],"mapped",[11445]],[[11445,11445],"valid"],[[11446,11446],"mapped",[11447]],[[11447,11447],"valid"],[[11448,11448],"mapped",[11449]],[[11449,11449],"valid"],[[11450,11450],"mapped",[11451]],[[11451,11451],"valid"],[[11452,11452],"mapped",[11453]],[[11453,11453],"valid"],[[11454,11454],"mapped",[11455]],[[11455,11455],"valid"],[[11456,11456],"mapped",[11457]],[[11457,11457],"valid"],[[11458,11458],"mapped",[11459]],[[11459,11459],"valid"],[[11460,11460],"mapped",[11461]],[[11461,11461],"valid"],[[11462,11462],"mapped",[11463]],[[11463,11463],"valid"],[[11464,11464],"mapped",[11465]],[[11465,11465],"valid"],[[11466,11466],"mapped",[11467]],[[11467,11467],"valid"],[[11468,11468],"mapped",[11469]],[[11469,11469],"valid"],[[11470,11470],"mapped",[11471]],[[11471,11471],"valid"],[[11472,11472],"mapped",[11473]],[[11473,11473],"valid"],[[11474,11474],"mapped",[11475]],[[11475,11475],"valid"],[[11476,11476],"mapped",[11477]],[[11477,11477],"valid"],[[11478,11478],"mapped",[11479]],[[11479,11479],"valid"],[[11480,11480],"mapped",[11481]],[[11481,11481],"valid"],[[11482,11482],"mapped",[11483]],[[11483,11483],"valid"],[[11484,11484],"mapped",[11485]],[[11485,11485],"valid"],[[11486,11486],"mapped",[11487]],[[11487,11487],"valid"],[[11488,11488],"mapped",[11489]],[[11489,11489],"valid"],[[11490,11490],"mapped",[11491]],[[11491,11492],"valid"],[[11493,11498],"valid",[],"NV8"],[[11499,11499],"mapped",[11500]],[[11500,11500],"valid"],[[11501,11501],"mapped",[11502]],[[11502,11505],"valid"],[[11506,11506],"mapped",[11507]],[[11507,11507],"valid"],[[11508,11512],"disallowed"],[[11513,11519],"valid",[],"NV8"],[[11520,11557],"valid"],[[11558,11558],"disallowed"],[[11559,11559],"valid"],[[11560,11564],"disallowed"],[[11565,11565],"valid"],[[11566,11567],"disallowed"],[[11568,11621],"valid"],[[11622,11623],"valid"],[[11624,11630],"disallowed"],[[11631,11631],"mapped",[11617]],[[11632,11632],"valid",[],"NV8"],[[11633,11646],"disallowed"],[[11647,11647],"valid"],[[11648,11670],"valid"],[[11671,11679],"disallowed"],[[11680,11686],"valid"],[[11687,11687],"disallowed"],[[11688,11694],"valid"],[[11695,11695],"disallowed"],[[11696,11702],"valid"],[[11703,11703],"disallowed"],[[11704,11710],"valid"],[[11711,11711],"disallowed"],[[11712,11718],"valid"],[[11719,11719],"disallowed"],[[11720,11726],"valid"],[[11727,11727],"disallowed"],[[11728,11734],"valid"],[[11735,11735],"disallowed"],[[11736,11742],"valid"],[[11743,11743],"disallowed"],[[11744,11775],"valid"],[[11776,11799],"valid",[],"NV8"],[[11800,11803],"valid",[],"NV8"],[[11804,11805],"valid",[],"NV8"],[[11806,11822],"valid",[],"NV8"],[[11823,11823],"valid"],[[11824,11824],"valid",[],"NV8"],[[11825,11825],"valid",[],"NV8"],[[11826,11835],"valid",[],"NV8"],[[11836,11842],"valid",[],"NV8"],[[11843,11903],"disallowed"],[[11904,11929],"valid",[],"NV8"],[[11930,11930],"disallowed"],[[11931,11934],"valid",[],"NV8"],[[11935,11935],"mapped",[27597]],[[11936,12018],"valid",[],"NV8"],[[12019,12019],"mapped",[40863]],[[12020,12031],"disallowed"],[[12032,12032],"mapped",[19968]],[[12033,12033],"mapped",[20008]],[[12034,12034],"mapped",[20022]],[[12035,12035],"mapped",[20031]],[[12036,12036],"mapped",[20057]],[[12037,12037],"mapped",[20101]],[[12038,12038],"mapped",[20108]],[[12039,12039],"mapped",[20128]],[[12040,12040],"mapped",[20154]],[[12041,12041],"mapped",[20799]],[[12042,12042],"mapped",[20837]],[[12043,12043],"mapped",[20843]],[[12044,12044],"mapped",[20866]],[[12045,12045],"mapped",[20886]],[[12046,12046],"mapped",[20907]],[[12047,12047],"mapped",[20960]],[[12048,12048],"mapped",[20981]],[[12049,12049],"mapped",[20992]],[[12050,12050],"mapped",[21147]],[[12051,12051],"mapped",[21241]],[[12052,12052],"mapped",[21269]],[[12053,12053],"mapped",[21274]],[[12054,12054],"mapped",[21304]],[[12055,12055],"mapped",[21313]],[[12056,12056],"mapped",[21340]],[[12057,12057],"mapped",[21353]],[[12058,12058],"mapped",[21378]],[[12059,12059],"mapped",[21430]],[[12060,12060],"mapped",[21448]],[[12061,12061],"mapped",[21475]],[[12062,12062],"mapped",[22231]],[[12063,12063],"mapped",[22303]],[[12064,12064],"mapped",[22763]],[[12065,12065],"mapped",[22786]],[[12066,12066],"mapped",[22794]],[[12067,12067],"mapped",[22805]],[[12068,12068],"mapped",[22823]],[[12069,12069],"mapped",[22899]],[[12070,12070],"mapped",[23376]],[[12071,12071],"mapped",[23424]],[[12072,12072],"mapped",[23544]],[[12073,12073],"mapped",[23567]],[[12074,12074],"mapped",[23586]],[[12075,12075],"mapped",[23608]],[[12076,12076],"mapped",[23662]],[[12077,12077],"mapped",[23665]],[[12078,12078],"mapped",[24027]],[[12079,12079],"mapped",[24037]],[[12080,12080],"mapped",[24049]],[[12081,12081],"mapped",[24062]],[[12082,12082],"mapped",[24178]],[[12083,12083],"mapped",[24186]],[[12084,12084],"mapped",[24191]],[[12085,12085],"mapped",[24308]],[[12086,12086],"mapped",[24318]],[[12087,12087],"mapped",[24331]],[[12088,12088],"mapped",[24339]],[[12089,12089],"mapped",[24400]],[[12090,12090],"mapped",[24417]],[[12091,12091],"mapped",[24435]],[[12092,12092],"mapped",[24515]],[[12093,12093],"mapped",[25096]],[[12094,12094],"mapped",[25142]],[[12095,12095],"mapped",[25163]],[[12096,12096],"mapped",[25903]],[[12097,12097],"mapped",[25908]],[[12098,12098],"mapped",[25991]],[[12099,12099],"mapped",[26007]],[[12100,12100],"mapped",[26020]],[[12101,12101],"mapped",[26041]],[[12102,12102],"mapped",[26080]],[[12103,12103],"mapped",[26085]],[[12104,12104],"mapped",[26352]],[[12105,12105],"mapped",[26376]],[[12106,12106],"mapped",[26408]],[[12107,12107],"mapped",[27424]],[[12108,12108],"mapped",[27490]],[[12109,12109],"mapped",[27513]],[[12110,12110],"mapped",[27571]],[[12111,12111],"mapped",[27595]],[[12112,12112],"mapped",[27604]],[[12113,12113],"mapped",[27611]],[[12114,12114],"mapped",[27663]],[[12115,12115],"mapped",[27668]],[[12116,12116],"mapped",[27700]],[[12117,12117],"mapped",[28779]],[[12118,12118],"mapped",[29226]],[[12119,12119],"mapped",[29238]],[[12120,12120],"mapped",[29243]],[[12121,12121],"mapped",[29247]],[[12122,12122],"mapped",[29255]],[[12123,12123],"mapped",[29273]],[[12124,12124],"mapped",[29275]],[[12125,12125],"mapped",[29356]],[[12126,12126],"mapped",[29572]],[[12127,12127],"mapped",[29577]],[[12128,12128],"mapped",[29916]],[[12129,12129],"mapped",[29926]],[[12130,12130],"mapped",[29976]],[[12131,12131],"mapped",[29983]],[[12132,12132],"mapped",[29992]],[[12133,12133],"mapped",[30000]],[[12134,12134],"mapped",[30091]],[[12135,12135],"mapped",[30098]],[[12136,12136],"mapped",[30326]],[[12137,12137],"mapped",[30333]],[[12138,12138],"mapped",[30382]],[[12139,12139],"mapped",[30399]],[[12140,12140],"mapped",[30446]],[[12141,12141],"mapped",[30683]],[[12142,12142],"mapped",[30690]],[[12143,12143],"mapped",[30707]],[[12144,12144],"mapped",[31034]],[[12145,12145],"mapped",[31160]],[[12146,12146],"mapped",[31166]],[[12147,12147],"mapped",[31348]],[[12148,12148],"mapped",[31435]],[[12149,12149],"mapped",[31481]],[[12150,12150],"mapped",[31859]],[[12151,12151],"mapped",[31992]],[[12152,12152],"mapped",[32566]],[[12153,12153],"mapped",[32593]],[[12154,12154],"mapped",[32650]],[[12155,12155],"mapped",[32701]],[[12156,12156],"mapped",[32769]],[[12157,12157],"mapped",[32780]],[[12158,12158],"mapped",[32786]],[[12159,12159],"mapped",[32819]],[[12160,12160],"mapped",[32895]],[[12161,12161],"mapped",[32905]],[[12162,12162],"mapped",[33251]],[[12163,12163],"mapped",[33258]],[[12164,12164],"mapped",[33267]],[[12165,12165],"mapped",[33276]],[[12166,12166],"mapped",[33292]],[[12167,12167],"mapped",[33307]],[[12168,12168],"mapped",[33311]],[[12169,12169],"mapped",[33390]],[[12170,12170],"mapped",[33394]],[[12171,12171],"mapped",[33400]],[[12172,12172],"mapped",[34381]],[[12173,12173],"mapped",[34411]],[[12174,12174],"mapped",[34880]],[[12175,12175],"mapped",[34892]],[[12176,12176],"mapped",[34915]],[[12177,12177],"mapped",[35198]],[[12178,12178],"mapped",[35211]],[[12179,12179],"mapped",[35282]],[[12180,12180],"mapped",[35328]],[[12181,12181],"mapped",[35895]],[[12182,12182],"mapped",[35910]],[[12183,12183],"mapped",[35925]],[[12184,12184],"mapped",[35960]],[[12185,12185],"mapped",[35997]],[[12186,12186],"mapped",[36196]],[[12187,12187],"mapped",[36208]],[[12188,12188],"mapped",[36275]],[[12189,12189],"mapped",[36523]],[[12190,12190],"mapped",[36554]],[[12191,12191],"mapped",[36763]],[[12192,12192],"mapped",[36784]],[[12193,12193],"mapped",[36789]],[[12194,12194],"mapped",[37009]],[[12195,12195],"mapped",[37193]],[[12196,12196],"mapped",[37318]],[[12197,12197],"mapped",[37324]],[[12198,12198],"mapped",[37329]],[[12199,12199],"mapped",[38263]],[[12200,12200],"mapped",[38272]],[[12201,12201],"mapped",[38428]],[[12202,12202],"mapped",[38582]],[[12203,12203],"mapped",[38585]],[[12204,12204],"mapped",[38632]],[[12205,12205],"mapped",[38737]],[[12206,12206],"mapped",[38750]],[[12207,12207],"mapped",[38754]],[[12208,12208],"mapped",[38761]],[[12209,12209],"mapped",[38859]],[[12210,12210],"mapped",[38893]],[[12211,12211],"mapped",[38899]],[[12212,12212],"mapped",[38913]],[[12213,12213],"mapped",[39080]],[[12214,12214],"mapped",[39131]],[[12215,12215],"mapped",[39135]],[[12216,12216],"mapped",[39318]],[[12217,12217],"mapped",[39321]],[[12218,12218],"mapped",[39340]],[[12219,12219],"mapped",[39592]],[[12220,12220],"mapped",[39640]],[[12221,12221],"mapped",[39647]],[[12222,12222],"mapped",[39717]],[[12223,12223],"mapped",[39727]],[[12224,12224],"mapped",[39730]],[[12225,12225],"mapped",[39740]],[[12226,12226],"mapped",[39770]],[[12227,12227],"mapped",[40165]],[[12228,12228],"mapped",[40565]],[[12229,12229],"mapped",[40575]],[[12230,12230],"mapped",[40613]],[[12231,12231],"mapped",[40635]],[[12232,12232],"mapped",[40643]],[[12233,12233],"mapped",[40653]],[[12234,12234],"mapped",[40657]],[[12235,12235],"mapped",[40697]],[[12236,12236],"mapped",[40701]],[[12237,12237],"mapped",[40718]],[[12238,12238],"mapped",[40723]],[[12239,12239],"mapped",[40736]],[[12240,12240],"mapped",[40763]],[[12241,12241],"mapped",[40778]],[[12242,12242],"mapped",[40786]],[[12243,12243],"mapped",[40845]],[[12244,12244],"mapped",[40860]],[[12245,12245],"mapped",[40864]],[[12246,12271],"disallowed"],[[12272,12283],"disallowed"],[[12284,12287],"disallowed"],[[12288,12288],"disallowed_STD3_mapped",[32]],[[12289,12289],"valid",[],"NV8"],[[12290,12290],"mapped",[46]],[[12291,12292],"valid",[],"NV8"],[[12293,12295],"valid"],[[12296,12329],"valid",[],"NV8"],[[12330,12333],"valid"],[[12334,12341],"valid",[],"NV8"],[[12342,12342],"mapped",[12306]],[[12343,12343],"valid",[],"NV8"],[[12344,12344],"mapped",[21313]],[[12345,12345],"mapped",[21316]],[[12346,12346],"mapped",[21317]],[[12347,12347],"valid",[],"NV8"],[[12348,12348],"valid"],[[12349,12349],"valid",[],"NV8"],[[12350,12350],"valid",[],"NV8"],[[12351,12351],"valid",[],"NV8"],[[12352,12352],"disallowed"],[[12353,12436],"valid"],[[12437,12438],"valid"],[[12439,12440],"disallowed"],[[12441,12442],"valid"],[[12443,12443],"disallowed_STD3_mapped",[32,12441]],[[12444,12444],"disallowed_STD3_mapped",[32,12442]],[[12445,12446],"valid"],[[12447,12447],"mapped",[12424,12426]],[[12448,12448],"valid",[],"NV8"],[[12449,12542],"valid"],[[12543,12543],"mapped",[12467,12488]],[[12544,12548],"disallowed"],[[12549,12588],"valid"],[[12589,12589],"valid"],[[12590,12592],"disallowed"],[[12593,12593],"mapped",[4352]],[[12594,12594],"mapped",[4353]],[[12595,12595],"mapped",[4522]],[[12596,12596],"mapped",[4354]],[[12597,12597],"mapped",[4524]],[[12598,12598],"mapped",[4525]],[[12599,12599],"mapped",[4355]],[[12600,12600],"mapped",[4356]],[[12601,12601],"mapped",[4357]],[[12602,12602],"mapped",[4528]],[[12603,12603],"mapped",[4529]],[[12604,12604],"mapped",[4530]],[[12605,12605],"mapped",[4531]],[[12606,12606],"mapped",[4532]],[[12607,12607],"mapped",[4533]],[[12608,12608],"mapped",[4378]],[[12609,12609],"mapped",[4358]],[[12610,12610],"mapped",[4359]],[[12611,12611],"mapped",[4360]],[[12612,12612],"mapped",[4385]],[[12613,12613],"mapped",[4361]],[[12614,12614],"mapped",[4362]],[[12615,12615],"mapped",[4363]],[[12616,12616],"mapped",[4364]],[[12617,12617],"mapped",[4365]],[[12618,12618],"mapped",[4366]],[[12619,12619],"mapped",[4367]],[[12620,12620],"mapped",[4368]],[[12621,12621],"mapped",[4369]],[[12622,12622],"mapped",[4370]],[[12623,12623],"mapped",[4449]],[[12624,12624],"mapped",[4450]],[[12625,12625],"mapped",[4451]],[[12626,12626],"mapped",[4452]],[[12627,12627],"mapped",[4453]],[[12628,12628],"mapped",[4454]],[[12629,12629],"mapped",[4455]],[[12630,12630],"mapped",[4456]],[[12631,12631],"mapped",[4457]],[[12632,12632],"mapped",[4458]],[[12633,12633],"mapped",[4459]],[[12634,12634],"mapped",[4460]],[[12635,12635],"mapped",[4461]],[[12636,12636],"mapped",[4462]],[[12637,12637],"mapped",[4463]],[[12638,12638],"mapped",[4464]],[[12639,12639],"mapped",[4465]],[[12640,12640],"mapped",[4466]],[[12641,12641],"mapped",[4467]],[[12642,12642],"mapped",[4468]],[[12643,12643],"mapped",[4469]],[[12644,12644],"disallowed"],[[12645,12645],"mapped",[4372]],[[12646,12646],"mapped",[4373]],[[12647,12647],"mapped",[4551]],[[12648,12648],"mapped",[4552]],[[12649,12649],"mapped",[4556]],[[12650,12650],"mapped",[4558]],[[12651,12651],"mapped",[4563]],[[12652,12652],"mapped",[4567]],[[12653,12653],"mapped",[4569]],[[12654,12654],"mapped",[4380]],[[12655,12655],"mapped",[4573]],[[12656,12656],"mapped",[4575]],[[12657,12657],"mapped",[4381]],[[12658,12658],"mapped",[4382]],[[12659,12659],"mapped",[4384]],[[12660,12660],"mapped",[4386]],[[12661,12661],"mapped",[4387]],[[12662,12662],"mapped",[4391]],[[12663,12663],"mapped",[4393]],[[12664,12664],"mapped",[4395]],[[12665,12665],"mapped",[4396]],[[12666,12666],"mapped",[4397]],[[12667,12667],"mapped",[4398]],[[12668,12668],"mapped",[4399]],[[12669,12669],"mapped",[4402]],[[12670,12670],"mapped",[4406]],[[12671,12671],"mapped",[4416]],[[12672,12672],"mapped",[4423]],[[12673,12673],"mapped",[4428]],[[12674,12674],"mapped",[4593]],[[12675,12675],"mapped",[4594]],[[12676,12676],"mapped",[4439]],[[12677,12677],"mapped",[4440]],[[12678,12678],"mapped",[4441]],[[12679,12679],"mapped",[4484]],[[12680,12680],"mapped",[4485]],[[12681,12681],"mapped",[4488]],[[12682,12682],"mapped",[4497]],[[12683,12683],"mapped",[4498]],[[12684,12684],"mapped",[4500]],[[12685,12685],"mapped",[4510]],[[12686,12686],"mapped",[4513]],[[12687,12687],"disallowed"],[[12688,12689],"valid",[],"NV8"],[[12690,12690],"mapped",[19968]],[[12691,12691],"mapped",[20108]],[[12692,12692],"mapped",[19977]],[[12693,12693],"mapped",[22235]],[[12694,12694],"mapped",[19978]],[[12695,12695],"mapped",[20013]],[[12696,12696],"mapped",[19979]],[[12697,12697],"mapped",[30002]],[[12698,12698],"mapped",[20057]],[[12699,12699],"mapped",[19993]],[[12700,12700],"mapped",[19969]],[[12701,12701],"mapped",[22825]],[[12702,12702],"mapped",[22320]],[[12703,12703],"mapped",[20154]],[[12704,12727],"valid"],[[12728,12730],"valid"],[[12731,12735],"disallowed"],[[12736,12751],"valid",[],"NV8"],[[12752,12771],"valid",[],"NV8"],[[12772,12783],"disallowed"],[[12784,12799],"valid"],[[12800,12800],"disallowed_STD3_mapped",[40,4352,41]],[[12801,12801],"disallowed_STD3_mapped",[40,4354,41]],[[12802,12802],"disallowed_STD3_mapped",[40,4355,41]],[[12803,12803],"disallowed_STD3_mapped",[40,4357,41]],[[12804,12804],"disallowed_STD3_mapped",[40,4358,41]],[[12805,12805],"disallowed_STD3_mapped",[40,4359,41]],[[12806,12806],"disallowed_STD3_mapped",[40,4361,41]],[[12807,12807],"disallowed_STD3_mapped",[40,4363,41]],[[12808,12808],"disallowed_STD3_mapped",[40,4364,41]],[[12809,12809],"disallowed_STD3_mapped",[40,4366,41]],[[12810,12810],"disallowed_STD3_mapped",[40,4367,41]],[[12811,12811],"disallowed_STD3_mapped",[40,4368,41]],[[12812,12812],"disallowed_STD3_mapped",[40,4369,41]],[[12813,12813],"disallowed_STD3_mapped",[40,4370,41]],[[12814,12814],"disallowed_STD3_mapped",[40,44032,41]],[[12815,12815],"disallowed_STD3_mapped",[40,45208,41]],[[12816,12816],"disallowed_STD3_mapped",[40,45796,41]],[[12817,12817],"disallowed_STD3_mapped",[40,46972,41]],[[12818,12818],"disallowed_STD3_mapped",[40,47560,41]],[[12819,12819],"disallowed_STD3_mapped",[40,48148,41]],[[12820,12820],"disallowed_STD3_mapped",[40,49324,41]],[[12821,12821],"disallowed_STD3_mapped",[40,50500,41]],[[12822,12822],"disallowed_STD3_mapped",[40,51088,41]],[[12823,12823],"disallowed_STD3_mapped",[40,52264,41]],[[12824,12824],"disallowed_STD3_mapped",[40,52852,41]],[[12825,12825],"disallowed_STD3_mapped",[40,53440,41]],[[12826,12826],"disallowed_STD3_mapped",[40,54028,41]],[[12827,12827],"disallowed_STD3_mapped",[40,54616,41]],[[12828,12828],"disallowed_STD3_mapped",[40,51452,41]],[[12829,12829],"disallowed_STD3_mapped",[40,50724,51204,41]],[[12830,12830],"disallowed_STD3_mapped",[40,50724,54980,41]],[[12831,12831],"disallowed"],[[12832,12832],"disallowed_STD3_mapped",[40,19968,41]],[[12833,12833],"disallowed_STD3_mapped",[40,20108,41]],[[12834,12834],"disallowed_STD3_mapped",[40,19977,41]],[[12835,12835],"disallowed_STD3_mapped",[40,22235,41]],[[12836,12836],"disallowed_STD3_mapped",[40,20116,41]],[[12837,12837],"disallowed_STD3_mapped",[40,20845,41]],[[12838,12838],"disallowed_STD3_mapped",[40,19971,41]],[[12839,12839],"disallowed_STD3_mapped",[40,20843,41]],[[12840,12840],"disallowed_STD3_mapped",[40,20061,41]],[[12841,12841],"disallowed_STD3_mapped",[40,21313,41]],[[12842,12842],"disallowed_STD3_mapped",[40,26376,41]],[[12843,12843],"disallowed_STD3_mapped",[40,28779,41]],[[12844,12844],"disallowed_STD3_mapped",[40,27700,41]],[[12845,12845],"disallowed_STD3_mapped",[40,26408,41]],[[12846,12846],"disallowed_STD3_mapped",[40,37329,41]],[[12847,12847],"disallowed_STD3_mapped",[40,22303,41]],[[12848,12848],"disallowed_STD3_mapped",[40,26085,41]],[[12849,12849],"disallowed_STD3_mapped",[40,26666,41]],[[12850,12850],"disallowed_STD3_mapped",[40,26377,41]],[[12851,12851],"disallowed_STD3_mapped",[40,31038,41]],[[12852,12852],"disallowed_STD3_mapped",[40,21517,41]],[[12853,12853],"disallowed_STD3_mapped",[40,29305,41]],[[12854,12854],"disallowed_STD3_mapped",[40,36001,41]],[[12855,12855],"disallowed_STD3_mapped",[40,31069,41]],[[12856,12856],"disallowed_STD3_mapped",[40,21172,41]],[[12857,12857],"disallowed_STD3_mapped",[40,20195,41]],[[12858,12858],"disallowed_STD3_mapped",[40,21628,41]],[[12859,12859],"disallowed_STD3_mapped",[40,23398,41]],[[12860,12860],"disallowed_STD3_mapped",[40,30435,41]],[[12861,12861],"disallowed_STD3_mapped",[40,20225,41]],[[12862,12862],"disallowed_STD3_mapped",[40,36039,41]],[[12863,12863],"disallowed_STD3_mapped",[40,21332,41]],[[12864,12864],"disallowed_STD3_mapped",[40,31085,41]],[[12865,12865],"disallowed_STD3_mapped",[40,20241,41]],[[12866,12866],"disallowed_STD3_mapped",[40,33258,41]],[[12867,12867],"disallowed_STD3_mapped",[40,33267,41]],[[12868,12868],"mapped",[21839]],[[12869,12869],"mapped",[24188]],[[12870,12870],"mapped",[25991]],[[12871,12871],"mapped",[31631]],[[12872,12879],"valid",[],"NV8"],[[12880,12880],"mapped",[112,116,101]],[[12881,12881],"mapped",[50,49]],[[12882,12882],"mapped",[50,50]],[[12883,12883],"mapped",[50,51]],[[12884,12884],"mapped",[50,52]],[[12885,12885],"mapped",[50,53]],[[12886,12886],"mapped",[50,54]],[[12887,12887],"mapped",[50,55]],[[12888,12888],"mapped",[50,56]],[[12889,12889],"mapped",[50,57]],[[12890,12890],"mapped",[51,48]],[[12891,12891],"mapped",[51,49]],[[12892,12892],"mapped",[51,50]],[[12893,12893],"mapped",[51,51]],[[12894,12894],"mapped",[51,52]],[[12895,12895],"mapped",[51,53]],[[12896,12896],"mapped",[4352]],[[12897,12897],"mapped",[4354]],[[12898,12898],"mapped",[4355]],[[12899,12899],"mapped",[4357]],[[12900,12900],"mapped",[4358]],[[12901,12901],"mapped",[4359]],[[12902,12902],"mapped",[4361]],[[12903,12903],"mapped",[4363]],[[12904,12904],"mapped",[4364]],[[12905,12905],"mapped",[4366]],[[12906,12906],"mapped",[4367]],[[12907,12907],"mapped",[4368]],[[12908,12908],"mapped",[4369]],[[12909,12909],"mapped",[4370]],[[12910,12910],"mapped",[44032]],[[12911,12911],"mapped",[45208]],[[12912,12912],"mapped",[45796]],[[12913,12913],"mapped",[46972]],[[12914,12914],"mapped",[47560]],[[12915,12915],"mapped",[48148]],[[12916,12916],"mapped",[49324]],[[12917,12917],"mapped",[50500]],[[12918,12918],"mapped",[51088]],[[12919,12919],"mapped",[52264]],[[12920,12920],"mapped",[52852]],[[12921,12921],"mapped",[53440]],[[12922,12922],"mapped",[54028]],[[12923,12923],"mapped",[54616]],[[12924,12924],"mapped",[52280,44256]],[[12925,12925],"mapped",[51452,51032]],[[12926,12926],"mapped",[50864]],[[12927,12927],"valid",[],"NV8"],[[12928,12928],"mapped",[19968]],[[12929,12929],"mapped",[20108]],[[12930,12930],"mapped",[19977]],[[12931,12931],"mapped",[22235]],[[12932,12932],"mapped",[20116]],[[12933,12933],"mapped",[20845]],[[12934,12934],"mapped",[19971]],[[12935,12935],"mapped",[20843]],[[12936,12936],"mapped",[20061]],[[12937,12937],"mapped",[21313]],[[12938,12938],"mapped",[26376]],[[12939,12939],"mapped",[28779]],[[12940,12940],"mapped",[27700]],[[12941,12941],"mapped",[26408]],[[12942,12942],"mapped",[37329]],[[12943,12943],"mapped",[22303]],[[12944,12944],"mapped",[26085]],[[12945,12945],"mapped",[26666]],[[12946,12946],"mapped",[26377]],[[12947,12947],"mapped",[31038]],[[12948,12948],"mapped",[21517]],[[12949,12949],"mapped",[29305]],[[12950,12950],"mapped",[36001]],[[12951,12951],"mapped",[31069]],[[12952,12952],"mapped",[21172]],[[12953,12953],"mapped",[31192]],[[12954,12954],"mapped",[30007]],[[12955,12955],"mapped",[22899]],[[12956,12956],"mapped",[36969]],[[12957,12957],"mapped",[20778]],[[12958,12958],"mapped",[21360]],[[12959,12959],"mapped",[27880]],[[12960,12960],"mapped",[38917]],[[12961,12961],"mapped",[20241]],[[12962,12962],"mapped",[20889]],[[12963,12963],"mapped",[27491]],[[12964,12964],"mapped",[19978]],[[12965,12965],"mapped",[20013]],[[12966,12966],"mapped",[19979]],[[12967,12967],"mapped",[24038]],[[12968,12968],"mapped",[21491]],[[12969,12969],"mapped",[21307]],[[12970,12970],"mapped",[23447]],[[12971,12971],"mapped",[23398]],[[12972,12972],"mapped",[30435]],[[12973,12973],"mapped",[20225]],[[12974,12974],"mapped",[36039]],[[12975,12975],"mapped",[21332]],[[12976,12976],"mapped",[22812]],[[12977,12977],"mapped",[51,54]],[[12978,12978],"mapped",[51,55]],[[12979,12979],"mapped",[51,56]],[[12980,12980],"mapped",[51,57]],[[12981,12981],"mapped",[52,48]],[[12982,12982],"mapped",[52,49]],[[12983,12983],"mapped",[52,50]],[[12984,12984],"mapped",[52,51]],[[12985,12985],"mapped",[52,52]],[[12986,12986],"mapped",[52,53]],[[12987,12987],"mapped",[52,54]],[[12988,12988],"mapped",[52,55]],[[12989,12989],"mapped",[52,56]],[[12990,12990],"mapped",[52,57]],[[12991,12991],"mapped",[53,48]],[[12992,12992],"mapped",[49,26376]],[[12993,12993],"mapped",[50,26376]],[[12994,12994],"mapped",[51,26376]],[[12995,12995],"mapped",[52,26376]],[[12996,12996],"mapped",[53,26376]],[[12997,12997],"mapped",[54,26376]],[[12998,12998],"mapped",[55,26376]],[[12999,12999],"mapped",[56,26376]],[[13000,13000],"mapped",[57,26376]],[[13001,13001],"mapped",[49,48,26376]],[[13002,13002],"mapped",[49,49,26376]],[[13003,13003],"mapped",[49,50,26376]],[[13004,13004],"mapped",[104,103]],[[13005,13005],"mapped",[101,114,103]],[[13006,13006],"mapped",[101,118]],[[13007,13007],"mapped",[108,116,100]],[[13008,13008],"mapped",[12450]],[[13009,13009],"mapped",[12452]],[[13010,13010],"mapped",[12454]],[[13011,13011],"mapped",[12456]],[[13012,13012],"mapped",[12458]],[[13013,13013],"mapped",[12459]],[[13014,13014],"mapped",[12461]],[[13015,13015],"mapped",[12463]],[[13016,13016],"mapped",[12465]],[[13017,13017],"mapped",[12467]],[[13018,13018],"mapped",[12469]],[[13019,13019],"mapped",[12471]],[[13020,13020],"mapped",[12473]],[[13021,13021],"mapped",[12475]],[[13022,13022],"mapped",[12477]],[[13023,13023],"mapped",[12479]],[[13024,13024],"mapped",[12481]],[[13025,13025],"mapped",[12484]],[[13026,13026],"mapped",[12486]],[[13027,13027],"mapped",[12488]],[[13028,13028],"mapped",[12490]],[[13029,13029],"mapped",[12491]],[[13030,13030],"mapped",[12492]],[[13031,13031],"mapped",[12493]],[[13032,13032],"mapped",[12494]],[[13033,13033],"mapped",[12495]],[[13034,13034],"mapped",[12498]],[[13035,13035],"mapped",[12501]],[[13036,13036],"mapped",[12504]],[[13037,13037],"mapped",[12507]],[[13038,13038],"mapped",[12510]],[[13039,13039],"mapped",[12511]],[[13040,13040],"mapped",[12512]],[[13041,13041],"mapped",[12513]],[[13042,13042],"mapped",[12514]],[[13043,13043],"mapped",[12516]],[[13044,13044],"mapped",[12518]],[[13045,13045],"mapped",[12520]],[[13046,13046],"mapped",[12521]],[[13047,13047],"mapped",[12522]],[[13048,13048],"mapped",[12523]],[[13049,13049],"mapped",[12524]],[[13050,13050],"mapped",[12525]],[[13051,13051],"mapped",[12527]],[[13052,13052],"mapped",[12528]],[[13053,13053],"mapped",[12529]],[[13054,13054],"mapped",[12530]],[[13055,13055],"disallowed"],[[13056,13056],"mapped",[12450,12497,12540,12488]],[[13057,13057],"mapped",[12450,12523,12501,12449]],[[13058,13058],"mapped",[12450,12531,12506,12450]],[[13059,13059],"mapped",[12450,12540,12523]],[[13060,13060],"mapped",[12452,12491,12531,12464]],[[13061,13061],"mapped",[12452,12531,12481]],[[13062,13062],"mapped",[12454,12457,12531]],[[13063,13063],"mapped",[12456,12473,12463,12540,12489]],[[13064,13064],"mapped",[12456,12540,12459,12540]],[[13065,13065],"mapped",[12458,12531,12473]],[[13066,13066],"mapped",[12458,12540,12512]],[[13067,13067],"mapped",[12459,12452,12522]],[[13068,13068],"mapped",[12459,12521,12483,12488]],[[13069,13069],"mapped",[12459,12525,12522,12540]],[[13070,13070],"mapped",[12460,12525,12531]],[[13071,13071],"mapped",[12460,12531,12510]],[[13072,13072],"mapped",[12462,12460]],[[13073,13073],"mapped",[12462,12491,12540]],[[13074,13074],"mapped",[12461,12517,12522,12540]],[[13075,13075],"mapped",[12462,12523,12480,12540]],[[13076,13076],"mapped",[12461,12525]],[[13077,13077],"mapped",[12461,12525,12464,12521,12512]],[[13078,13078],"mapped",[12461,12525,12513,12540,12488,12523]],[[13079,13079],"mapped",[12461,12525,12527,12483,12488]],[[13080,13080],"mapped",[12464,12521,12512]],[[13081,13081],"mapped",[12464,12521,12512,12488,12531]],[[13082,13082],"mapped",[12463,12523,12476,12452,12525]],[[13083,13083],"mapped",[12463,12525,12540,12493]],[[13084,13084],"mapped",[12465,12540,12473]],[[13085,13085],"mapped",[12467,12523,12490]],[[13086,13086],"mapped",[12467,12540,12509]],[[13087,13087],"mapped",[12469,12452,12463,12523]],[[13088,13088],"mapped",[12469,12531,12481,12540,12512]],[[13089,13089],"mapped",[12471,12522,12531,12464]],[[13090,13090],"mapped",[12475,12531,12481]],[[13091,13091],"mapped",[12475,12531,12488]],[[13092,13092],"mapped",[12480,12540,12473]],[[13093,13093],"mapped",[12487,12471]],[[13094,13094],"mapped",[12489,12523]],[[13095,13095],"mapped",[12488,12531]],[[13096,13096],"mapped",[12490,12494]],[[13097,13097],"mapped",[12494,12483,12488]],[[13098,13098],"mapped",[12495,12452,12484]],[[13099,13099],"mapped",[12497,12540,12475,12531,12488]],[[13100,13100],"mapped",[12497,12540,12484]],[[13101,13101],"mapped",[12496,12540,12524,12523]],[[13102,13102],"mapped",[12500,12450,12473,12488,12523]],[[13103,13103],"mapped",[12500,12463,12523]],[[13104,13104],"mapped",[12500,12467]],[[13105,13105],"mapped",[12499,12523]],[[13106,13106],"mapped",[12501,12449,12521,12483,12489]],[[13107,13107],"mapped",[12501,12451,12540,12488]],[[13108,13108],"mapped",[12502,12483,12471,12455,12523]],[[13109,13109],"mapped",[12501,12521,12531]],[[13110,13110],"mapped",[12504,12463,12479,12540,12523]],[[13111,13111],"mapped",[12506,12477]],[[13112,13112],"mapped",[12506,12491,12498]],[[13113,13113],"mapped",[12504,12523,12484]],[[13114,13114],"mapped",[12506,12531,12473]],[[13115,13115],"mapped",[12506,12540,12472]],[[13116,13116],"mapped",[12505,12540,12479]],[[13117,13117],"mapped",[12509,12452,12531,12488]],[[13118,13118],"mapped",[12508,12523,12488]],[[13119,13119],"mapped",[12507,12531]],[[13120,13120],"mapped",[12509,12531,12489]],[[13121,13121],"mapped",[12507,12540,12523]],[[13122,13122],"mapped",[12507,12540,12531]],[[13123,13123],"mapped",[12510,12452,12463,12525]],[[13124,13124],"mapped",[12510,12452,12523]],[[13125,13125],"mapped",[12510,12483,12495]],[[13126,13126],"mapped",[12510,12523,12463]],[[13127,13127],"mapped",[12510,12531,12471,12519,12531]],[[13128,13128],"mapped",[12511,12463,12525,12531]],[[13129,13129],"mapped",[12511,12522]],[[13130,13130],"mapped",[12511,12522,12496,12540,12523]],[[13131,13131],"mapped",[12513,12460]],[[13132,13132],"mapped",[12513,12460,12488,12531]],[[13133,13133],"mapped",[12513,12540,12488,12523]],[[13134,13134],"mapped",[12516,12540,12489]],[[13135,13135],"mapped",[12516,12540,12523]],[[13136,13136],"mapped",[12518,12450,12531]],[[13137,13137],"mapped",[12522,12483,12488,12523]],[[13138,13138],"mapped",[12522,12521]],[[13139,13139],"mapped",[12523,12500,12540]],[[13140,13140],"mapped",[12523,12540,12502,12523]],[[13141,13141],"mapped",[12524,12512]],[[13142,13142],"mapped",[12524,12531,12488,12466,12531]],[[13143,13143],"mapped",[12527,12483,12488]],[[13144,13144],"mapped",[48,28857]],[[13145,13145],"mapped",[49,28857]],[[13146,13146],"mapped",[50,28857]],[[13147,13147],"mapped",[51,28857]],[[13148,13148],"mapped",[52,28857]],[[13149,13149],"mapped",[53,28857]],[[13150,13150],"mapped",[54,28857]],[[13151,13151],"mapped",[55,28857]],[[13152,13152],"mapped",[56,28857]],[[13153,13153],"mapped",[57,28857]],[[13154,13154],"mapped",[49,48,28857]],[[13155,13155],"mapped",[49,49,28857]],[[13156,13156],"mapped",[49,50,28857]],[[13157,13157],"mapped",[49,51,28857]],[[13158,13158],"mapped",[49,52,28857]],[[13159,13159],"mapped",[49,53,28857]],[[13160,13160],"mapped",[49,54,28857]],[[13161,13161],"mapped",[49,55,28857]],[[13162,13162],"mapped",[49,56,28857]],[[13163,13163],"mapped",[49,57,28857]],[[13164,13164],"mapped",[50,48,28857]],[[13165,13165],"mapped",[50,49,28857]],[[13166,13166],"mapped",[50,50,28857]],[[13167,13167],"mapped",[50,51,28857]],[[13168,13168],"mapped",[50,52,28857]],[[13169,13169],"mapped",[104,112,97]],[[13170,13170],"mapped",[100,97]],[[13171,13171],"mapped",[97,117]],[[13172,13172],"mapped",[98,97,114]],[[13173,13173],"mapped",[111,118]],[[13174,13174],"mapped",[112,99]],[[13175,13175],"mapped",[100,109]],[[13176,13176],"mapped",[100,109,50]],[[13177,13177],"mapped",[100,109,51]],[[13178,13178],"mapped",[105,117]],[[13179,13179],"mapped",[24179,25104]],[[13180,13180],"mapped",[26157,21644]],[[13181,13181],"mapped",[22823,27491]],[[13182,13182],"mapped",[26126,27835]],[[13183,13183],"mapped",[26666,24335,20250,31038]],[[13184,13184],"mapped",[112,97]],[[13185,13185],"mapped",[110,97]],[[13186,13186],"mapped",[956,97]],[[13187,13187],"mapped",[109,97]],[[13188,13188],"mapped",[107,97]],[[13189,13189],"mapped",[107,98]],[[13190,13190],"mapped",[109,98]],[[13191,13191],"mapped",[103,98]],[[13192,13192],"mapped",[99,97,108]],[[13193,13193],"mapped",[107,99,97,108]],[[13194,13194],"mapped",[112,102]],[[13195,13195],"mapped",[110,102]],[[13196,13196],"mapped",[956,102]],[[13197,13197],"mapped",[956,103]],[[13198,13198],"mapped",[109,103]],[[13199,13199],"mapped",[107,103]],[[13200,13200],"mapped",[104,122]],[[13201,13201],"mapped",[107,104,122]],[[13202,13202],"mapped",[109,104,122]],[[13203,13203],"mapped",[103,104,122]],[[13204,13204],"mapped",[116,104,122]],[[13205,13205],"mapped",[956,108]],[[13206,13206],"mapped",[109,108]],[[13207,13207],"mapped",[100,108]],[[13208,13208],"mapped",[107,108]],[[13209,13209],"mapped",[102,109]],[[13210,13210],"mapped",[110,109]],[[13211,13211],"mapped",[956,109]],[[13212,13212],"mapped",[109,109]],[[13213,13213],"mapped",[99,109]],[[13214,13214],"mapped",[107,109]],[[13215,13215],"mapped",[109,109,50]],[[13216,13216],"mapped",[99,109,50]],[[13217,13217],"mapped",[109,50]],[[13218,13218],"mapped",[107,109,50]],[[13219,13219],"mapped",[109,109,51]],[[13220,13220],"mapped",[99,109,51]],[[13221,13221],"mapped",[109,51]],[[13222,13222],"mapped",[107,109,51]],[[13223,13223],"mapped",[109,8725,115]],[[13224,13224],"mapped",[109,8725,115,50]],[[13225,13225],"mapped",[112,97]],[[13226,13226],"mapped",[107,112,97]],[[13227,13227],"mapped",[109,112,97]],[[13228,13228],"mapped",[103,112,97]],[[13229,13229],"mapped",[114,97,100]],[[13230,13230],"mapped",[114,97,100,8725,115]],[[13231,13231],"mapped",[114,97,100,8725,115,50]],[[13232,13232],"mapped",[112,115]],[[13233,13233],"mapped",[110,115]],[[13234,13234],"mapped",[956,115]],[[13235,13235],"mapped",[109,115]],[[13236,13236],"mapped",[112,118]],[[13237,13237],"mapped",[110,118]],[[13238,13238],"mapped",[956,118]],[[13239,13239],"mapped",[109,118]],[[13240,13240],"mapped",[107,118]],[[13241,13241],"mapped",[109,118]],[[13242,13242],"mapped",[112,119]],[[13243,13243],"mapped",[110,119]],[[13244,13244],"mapped",[956,119]],[[13245,13245],"mapped",[109,119]],[[13246,13246],"mapped",[107,119]],[[13247,13247],"mapped",[109,119]],[[13248,13248],"mapped",[107,969]],[[13249,13249],"mapped",[109,969]],[[13250,13250],"disallowed"],[[13251,13251],"mapped",[98,113]],[[13252,13252],"mapped",[99,99]],[[13253,13253],"mapped",[99,100]],[[13254,13254],"mapped",[99,8725,107,103]],[[13255,13255],"disallowed"],[[13256,13256],"mapped",[100,98]],[[13257,13257],"mapped",[103,121]],[[13258,13258],"mapped",[104,97]],[[13259,13259],"mapped",[104,112]],[[13260,13260],"mapped",[105,110]],[[13261,13261],"mapped",[107,107]],[[13262,13262],"mapped",[107,109]],[[13263,13263],"mapped",[107,116]],[[13264,13264],"mapped",[108,109]],[[13265,13265],"mapped",[108,110]],[[13266,13266],"mapped",[108,111,103]],[[13267,13267],"mapped",[108,120]],[[13268,13268],"mapped",[109,98]],[[13269,13269],"mapped",[109,105,108]],[[13270,13270],"mapped",[109,111,108]],[[13271,13271],"mapped",[112,104]],[[13272,13272],"disallowed"],[[13273,13273],"mapped",[112,112,109]],[[13274,13274],"mapped",[112,114]],[[13275,13275],"mapped",[115,114]],[[13276,13276],"mapped",[115,118]],[[13277,13277],"mapped",[119,98]],[[13278,13278],"mapped",[118,8725,109]],[[13279,13279],"mapped",[97,8725,109]],[[13280,13280],"mapped",[49,26085]],[[13281,13281],"mapped",[50,26085]],[[13282,13282],"mapped",[51,26085]],[[13283,13283],"mapped",[52,26085]],[[13284,13284],"mapped",[53,26085]],[[13285,13285],"mapped",[54,26085]],[[13286,13286],"mapped",[55,26085]],[[13287,13287],"mapped",[56,26085]],[[13288,13288],"mapped",[57,26085]],[[13289,13289],"mapped",[49,48,26085]],[[13290,13290],"mapped",[49,49,26085]],[[13291,13291],"mapped",[49,50,26085]],[[13292,13292],"mapped",[49,51,26085]],[[13293,13293],"mapped",[49,52,26085]],[[13294,13294],"mapped",[49,53,26085]],[[13295,13295],"mapped",[49,54,26085]],[[13296,13296],"mapped",[49,55,26085]],[[13297,13297],"mapped",[49,56,26085]],[[13298,13298],"mapped",[49,57,26085]],[[13299,13299],"mapped",[50,48,26085]],[[13300,13300],"mapped",[50,49,26085]],[[13301,13301],"mapped",[50,50,26085]],[[13302,13302],"mapped",[50,51,26085]],[[13303,13303],"mapped",[50,52,26085]],[[13304,13304],"mapped",[50,53,26085]],[[13305,13305],"mapped",[50,54,26085]],[[13306,13306],"mapped",[50,55,26085]],[[13307,13307],"mapped",[50,56,26085]],[[13308,13308],"mapped",[50,57,26085]],[[13309,13309],"mapped",[51,48,26085]],[[13310,13310],"mapped",[51,49,26085]],[[13311,13311],"mapped",[103,97,108]],[[13312,19893],"valid"],[[19894,19903],"disallowed"],[[19904,19967],"valid",[],"NV8"],[[19968,40869],"valid"],[[40870,40891],"valid"],[[40892,40899],"valid"],[[40900,40907],"valid"],[[40908,40908],"valid"],[[40909,40917],"valid"],[[40918,40959],"disallowed"],[[40960,42124],"valid"],[[42125,42127],"disallowed"],[[42128,42145],"valid",[],"NV8"],[[42146,42147],"valid",[],"NV8"],[[42148,42163],"valid",[],"NV8"],[[42164,42164],"valid",[],"NV8"],[[42165,42176],"valid",[],"NV8"],[[42177,42177],"valid",[],"NV8"],[[42178,42180],"valid",[],"NV8"],[[42181,42181],"valid",[],"NV8"],[[42182,42182],"valid",[],"NV8"],[[42183,42191],"disallowed"],[[42192,42237],"valid"],[[42238,42239],"valid",[],"NV8"],[[42240,42508],"valid"],[[42509,42511],"valid",[],"NV8"],[[42512,42539],"valid"],[[42540,42559],"disallowed"],[[42560,42560],"mapped",[42561]],[[42561,42561],"valid"],[[42562,42562],"mapped",[42563]],[[42563,42563],"valid"],[[42564,42564],"mapped",[42565]],[[42565,42565],"valid"],[[42566,42566],"mapped",[42567]],[[42567,42567],"valid"],[[42568,42568],"mapped",[42569]],[[42569,42569],"valid"],[[42570,42570],"mapped",[42571]],[[42571,42571],"valid"],[[42572,42572],"mapped",[42573]],[[42573,42573],"valid"],[[42574,42574],"mapped",[42575]],[[42575,42575],"valid"],[[42576,42576],"mapped",[42577]],[[42577,42577],"valid"],[[42578,42578],"mapped",[42579]],[[42579,42579],"valid"],[[42580,42580],"mapped",[42581]],[[42581,42581],"valid"],[[42582,42582],"mapped",[42583]],[[42583,42583],"valid"],[[42584,42584],"mapped",[42585]],[[42585,42585],"valid"],[[42586,42586],"mapped",[42587]],[[42587,42587],"valid"],[[42588,42588],"mapped",[42589]],[[42589,42589],"valid"],[[42590,42590],"mapped",[42591]],[[42591,42591],"valid"],[[42592,42592],"mapped",[42593]],[[42593,42593],"valid"],[[42594,42594],"mapped",[42595]],[[42595,42595],"valid"],[[42596,42596],"mapped",[42597]],[[42597,42597],"valid"],[[42598,42598],"mapped",[42599]],[[42599,42599],"valid"],[[42600,42600],"mapped",[42601]],[[42601,42601],"valid"],[[42602,42602],"mapped",[42603]],[[42603,42603],"valid"],[[42604,42604],"mapped",[42605]],[[42605,42607],"valid"],[[42608,42611],"valid",[],"NV8"],[[42612,42619],"valid"],[[42620,42621],"valid"],[[42622,42622],"valid",[],"NV8"],[[42623,42623],"valid"],[[42624,42624],"mapped",[42625]],[[42625,42625],"valid"],[[42626,42626],"mapped",[42627]],[[42627,42627],"valid"],[[42628,42628],"mapped",[42629]],[[42629,42629],"valid"],[[42630,42630],"mapped",[42631]],[[42631,42631],"valid"],[[42632,42632],"mapped",[42633]],[[42633,42633],"valid"],[[42634,42634],"mapped",[42635]],[[42635,42635],"valid"],[[42636,42636],"mapped",[42637]],[[42637,42637],"valid"],[[42638,42638],"mapped",[42639]],[[42639,42639],"valid"],[[42640,42640],"mapped",[42641]],[[42641,42641],"valid"],[[42642,42642],"mapped",[42643]],[[42643,42643],"valid"],[[42644,42644],"mapped",[42645]],[[42645,42645],"valid"],[[42646,42646],"mapped",[42647]],[[42647,42647],"valid"],[[42648,42648],"mapped",[42649]],[[42649,42649],"valid"],[[42650,42650],"mapped",[42651]],[[42651,42651],"valid"],[[42652,42652],"mapped",[1098]],[[42653,42653],"mapped",[1100]],[[42654,42654],"valid"],[[42655,42655],"valid"],[[42656,42725],"valid"],[[42726,42735],"valid",[],"NV8"],[[42736,42737],"valid"],[[42738,42743],"valid",[],"NV8"],[[42744,42751],"disallowed"],[[42752,42774],"valid",[],"NV8"],[[42775,42778],"valid"],[[42779,42783],"valid"],[[42784,42785],"valid",[],"NV8"],[[42786,42786],"mapped",[42787]],[[42787,42787],"valid"],[[42788,42788],"mapped",[42789]],[[42789,42789],"valid"],[[42790,42790],"mapped",[42791]],[[42791,42791],"valid"],[[42792,42792],"mapped",[42793]],[[42793,42793],"valid"],[[42794,42794],"mapped",[42795]],[[42795,42795],"valid"],[[42796,42796],"mapped",[42797]],[[42797,42797],"valid"],[[42798,42798],"mapped",[42799]],[[42799,42801],"valid"],[[42802,42802],"mapped",[42803]],[[42803,42803],"valid"],[[42804,42804],"mapped",[42805]],[[42805,42805],"valid"],[[42806,42806],"mapped",[42807]],[[42807,42807],"valid"],[[42808,42808],"mapped",[42809]],[[42809,42809],"valid"],[[42810,42810],"mapped",[42811]],[[42811,42811],"valid"],[[42812,42812],"mapped",[42813]],[[42813,42813],"valid"],[[42814,42814],"mapped",[42815]],[[42815,42815],"valid"],[[42816,42816],"mapped",[42817]],[[42817,42817],"valid"],[[42818,42818],"mapped",[42819]],[[42819,42819],"valid"],[[42820,42820],"mapped",[42821]],[[42821,42821],"valid"],[[42822,42822],"mapped",[42823]],[[42823,42823],"valid"],[[42824,42824],"mapped",[42825]],[[42825,42825],"valid"],[[42826,42826],"mapped",[42827]],[[42827,42827],"valid"],[[42828,42828],"mapped",[42829]],[[42829,42829],"valid"],[[42830,42830],"mapped",[42831]],[[42831,42831],"valid"],[[42832,42832],"mapped",[42833]],[[42833,42833],"valid"],[[42834,42834],"mapped",[42835]],[[42835,42835],"valid"],[[42836,42836],"mapped",[42837]],[[42837,42837],"valid"],[[42838,42838],"mapped",[42839]],[[42839,42839],"valid"],[[42840,42840],"mapped",[42841]],[[42841,42841],"valid"],[[42842,42842],"mapped",[42843]],[[42843,42843],"valid"],[[42844,42844],"mapped",[42845]],[[42845,42845],"valid"],[[42846,42846],"mapped",[42847]],[[42847,42847],"valid"],[[42848,42848],"mapped",[42849]],[[42849,42849],"valid"],[[42850,42850],"mapped",[42851]],[[42851,42851],"valid"],[[42852,42852],"mapped",[42853]],[[42853,42853],"valid"],[[42854,42854],"mapped",[42855]],[[42855,42855],"valid"],[[42856,42856],"mapped",[42857]],[[42857,42857],"valid"],[[42858,42858],"mapped",[42859]],[[42859,42859],"valid"],[[42860,42860],"mapped",[42861]],[[42861,42861],"valid"],[[42862,42862],"mapped",[42863]],[[42863,42863],"valid"],[[42864,42864],"mapped",[42863]],[[42865,42872],"valid"],[[42873,42873],"mapped",[42874]],[[42874,42874],"valid"],[[42875,42875],"mapped",[42876]],[[42876,42876],"valid"],[[42877,42877],"mapped",[7545]],[[42878,42878],"mapped",[42879]],[[42879,42879],"valid"],[[42880,42880],"mapped",[42881]],[[42881,42881],"valid"],[[42882,42882],"mapped",[42883]],[[42883,42883],"valid"],[[42884,42884],"mapped",[42885]],[[42885,42885],"valid"],[[42886,42886],"mapped",[42887]],[[42887,42888],"valid"],[[42889,42890],"valid",[],"NV8"],[[42891,42891],"mapped",[42892]],[[42892,42892],"valid"],[[42893,42893],"mapped",[613]],[[42894,42894],"valid"],[[42895,42895],"valid"],[[42896,42896],"mapped",[42897]],[[42897,42897],"valid"],[[42898,42898],"mapped",[42899]],[[42899,42899],"valid"],[[42900,42901],"valid"],[[42902,42902],"mapped",[42903]],[[42903,42903],"valid"],[[42904,42904],"mapped",[42905]],[[42905,42905],"valid"],[[42906,42906],"mapped",[42907]],[[42907,42907],"valid"],[[42908,42908],"mapped",[42909]],[[42909,42909],"valid"],[[42910,42910],"mapped",[42911]],[[42911,42911],"valid"],[[42912,42912],"mapped",[42913]],[[42913,42913],"valid"],[[42914,42914],"mapped",[42915]],[[42915,42915],"valid"],[[42916,42916],"mapped",[42917]],[[42917,42917],"valid"],[[42918,42918],"mapped",[42919]],[[42919,42919],"valid"],[[42920,42920],"mapped",[42921]],[[42921,42921],"valid"],[[42922,42922],"mapped",[614]],[[42923,42923],"mapped",[604]],[[42924,42924],"mapped",[609]],[[42925,42925],"mapped",[620]],[[42926,42927],"disallowed"],[[42928,42928],"mapped",[670]],[[42929,42929],"mapped",[647]],[[42930,42930],"mapped",[669]],[[42931,42931],"mapped",[43859]],[[42932,42932],"mapped",[42933]],[[42933,42933],"valid"],[[42934,42934],"mapped",[42935]],[[42935,42935],"valid"],[[42936,42998],"disallowed"],[[42999,42999],"valid"],[[43000,43000],"mapped",[295]],[[43001,43001],"mapped",[339]],[[43002,43002],"valid"],[[43003,43007],"valid"],[[43008,43047],"valid"],[[43048,43051],"valid",[],"NV8"],[[43052,43055],"disallowed"],[[43056,43065],"valid",[],"NV8"],[[43066,43071],"disallowed"],[[43072,43123],"valid"],[[43124,43127],"valid",[],"NV8"],[[43128,43135],"disallowed"],[[43136,43204],"valid"],[[43205,43213],"disallowed"],[[43214,43215],"valid",[],"NV8"],[[43216,43225],"valid"],[[43226,43231],"disallowed"],[[43232,43255],"valid"],[[43256,43258],"valid",[],"NV8"],[[43259,43259],"valid"],[[43260,43260],"valid",[],"NV8"],[[43261,43261],"valid"],[[43262,43263],"disallowed"],[[43264,43309],"valid"],[[43310,43311],"valid",[],"NV8"],[[43312,43347],"valid"],[[43348,43358],"disallowed"],[[43359,43359],"valid",[],"NV8"],[[43360,43388],"valid",[],"NV8"],[[43389,43391],"disallowed"],[[43392,43456],"valid"],[[43457,43469],"valid",[],"NV8"],[[43470,43470],"disallowed"],[[43471,43481],"valid"],[[43482,43485],"disallowed"],[[43486,43487],"valid",[],"NV8"],[[43488,43518],"valid"],[[43519,43519],"disallowed"],[[43520,43574],"valid"],[[43575,43583],"disallowed"],[[43584,43597],"valid"],[[43598,43599],"disallowed"],[[43600,43609],"valid"],[[43610,43611],"disallowed"],[[43612,43615],"valid",[],"NV8"],[[43616,43638],"valid"],[[43639,43641],"valid",[],"NV8"],[[43642,43643],"valid"],[[43644,43647],"valid"],[[43648,43714],"valid"],[[43715,43738],"disallowed"],[[43739,43741],"valid"],[[43742,43743],"valid",[],"NV8"],[[43744,43759],"valid"],[[43760,43761],"valid",[],"NV8"],[[43762,43766],"valid"],[[43767,43776],"disallowed"],[[43777,43782],"valid"],[[43783,43784],"disallowed"],[[43785,43790],"valid"],[[43791,43792],"disallowed"],[[43793,43798],"valid"],[[43799,43807],"disallowed"],[[43808,43814],"valid"],[[43815,43815],"disallowed"],[[43816,43822],"valid"],[[43823,43823],"disallowed"],[[43824,43866],"valid"],[[43867,43867],"valid",[],"NV8"],[[43868,43868],"mapped",[42791]],[[43869,43869],"mapped",[43831]],[[43870,43870],"mapped",[619]],[[43871,43871],"mapped",[43858]],[[43872,43875],"valid"],[[43876,43877],"valid"],[[43878,43887],"disallowed"],[[43888,43888],"mapped",[5024]],[[43889,43889],"mapped",[5025]],[[43890,43890],"mapped",[5026]],[[43891,43891],"mapped",[5027]],[[43892,43892],"mapped",[5028]],[[43893,43893],"mapped",[5029]],[[43894,43894],"mapped",[5030]],[[43895,43895],"mapped",[5031]],[[43896,43896],"mapped",[5032]],[[43897,43897],"mapped",[5033]],[[43898,43898],"mapped",[5034]],[[43899,43899],"mapped",[5035]],[[43900,43900],"mapped",[5036]],[[43901,43901],"mapped",[5037]],[[43902,43902],"mapped",[5038]],[[43903,43903],"mapped",[5039]],[[43904,43904],"mapped",[5040]],[[43905,43905],"mapped",[5041]],[[43906,43906],"mapped",[5042]],[[43907,43907],"mapped",[5043]],[[43908,43908],"mapped",[5044]],[[43909,43909],"mapped",[5045]],[[43910,43910],"mapped",[5046]],[[43911,43911],"mapped",[5047]],[[43912,43912],"mapped",[5048]],[[43913,43913],"mapped",[5049]],[[43914,43914],"mapped",[5050]],[[43915,43915],"mapped",[5051]],[[43916,43916],"mapped",[5052]],[[43917,43917],"mapped",[5053]],[[43918,43918],"mapped",[5054]],[[43919,43919],"mapped",[5055]],[[43920,43920],"mapped",[5056]],[[43921,43921],"mapped",[5057]],[[43922,43922],"mapped",[5058]],[[43923,43923],"mapped",[5059]],[[43924,43924],"mapped",[5060]],[[43925,43925],"mapped",[5061]],[[43926,43926],"mapped",[5062]],[[43927,43927],"mapped",[5063]],[[43928,43928],"mapped",[5064]],[[43929,43929],"mapped",[5065]],[[43930,43930],"mapped",[5066]],[[43931,43931],"mapped",[5067]],[[43932,43932],"mapped",[5068]],[[43933,43933],"mapped",[5069]],[[43934,43934],"mapped",[5070]],[[43935,43935],"mapped",[5071]],[[43936,43936],"mapped",[5072]],[[43937,43937],"mapped",[5073]],[[43938,43938],"mapped",[5074]],[[43939,43939],"mapped",[5075]],[[43940,43940],"mapped",[5076]],[[43941,43941],"mapped",[5077]],[[43942,43942],"mapped",[5078]],[[43943,43943],"mapped",[5079]],[[43944,43944],"mapped",[5080]],[[43945,43945],"mapped",[5081]],[[43946,43946],"mapped",[5082]],[[43947,43947],"mapped",[5083]],[[43948,43948],"mapped",[5084]],[[43949,43949],"mapped",[5085]],[[43950,43950],"mapped",[5086]],[[43951,43951],"mapped",[5087]],[[43952,43952],"mapped",[5088]],[[43953,43953],"mapped",[5089]],[[43954,43954],"mapped",[5090]],[[43955,43955],"mapped",[5091]],[[43956,43956],"mapped",[5092]],[[43957,43957],"mapped",[5093]],[[43958,43958],"mapped",[5094]],[[43959,43959],"mapped",[5095]],[[43960,43960],"mapped",[5096]],[[43961,43961],"mapped",[5097]],[[43962,43962],"mapped",[5098]],[[43963,43963],"mapped",[5099]],[[43964,43964],"mapped",[5100]],[[43965,43965],"mapped",[5101]],[[43966,43966],"mapped",[5102]],[[43967,43967],"mapped",[5103]],[[43968,44010],"valid"],[[44011,44011],"valid",[],"NV8"],[[44012,44013],"valid"],[[44014,44015],"disallowed"],[[44016,44025],"valid"],[[44026,44031],"disallowed"],[[44032,55203],"valid"],[[55204,55215],"disallowed"],[[55216,55238],"valid",[],"NV8"],[[55239,55242],"disallowed"],[[55243,55291],"valid",[],"NV8"],[[55292,55295],"disallowed"],[[55296,57343],"disallowed"],[[57344,63743],"disallowed"],[[63744,63744],"mapped",[35912]],[[63745,63745],"mapped",[26356]],[[63746,63746],"mapped",[36554]],[[63747,63747],"mapped",[36040]],[[63748,63748],"mapped",[28369]],[[63749,63749],"mapped",[20018]],[[63750,63750],"mapped",[21477]],[[63751,63752],"mapped",[40860]],[[63753,63753],"mapped",[22865]],[[63754,63754],"mapped",[37329]],[[63755,63755],"mapped",[21895]],[[63756,63756],"mapped",[22856]],[[63757,63757],"mapped",[25078]],[[63758,63758],"mapped",[30313]],[[63759,63759],"mapped",[32645]],[[63760,63760],"mapped",[34367]],[[63761,63761],"mapped",[34746]],[[63762,63762],"mapped",[35064]],[[63763,63763],"mapped",[37007]],[[63764,63764],"mapped",[27138]],[[63765,63765],"mapped",[27931]],[[63766,63766],"mapped",[28889]],[[63767,63767],"mapped",[29662]],[[63768,63768],"mapped",[33853]],[[63769,63769],"mapped",[37226]],[[63770,63770],"mapped",[39409]],[[63771,63771],"mapped",[20098]],[[63772,63772],"mapped",[21365]],[[63773,63773],"mapped",[27396]],[[63774,63774],"mapped",[29211]],[[63775,63775],"mapped",[34349]],[[63776,63776],"mapped",[40478]],[[63777,63777],"mapped",[23888]],[[63778,63778],"mapped",[28651]],[[63779,63779],"mapped",[34253]],[[63780,63780],"mapped",[35172]],[[63781,63781],"mapped",[25289]],[[63782,63782],"mapped",[33240]],[[63783,63783],"mapped",[34847]],[[63784,63784],"mapped",[24266]],[[63785,63785],"mapped",[26391]],[[63786,63786],"mapped",[28010]],[[63787,63787],"mapped",[29436]],[[63788,63788],"mapped",[37070]],[[63789,63789],"mapped",[20358]],[[63790,63790],"mapped",[20919]],[[63791,63791],"mapped",[21214]],[[63792,63792],"mapped",[25796]],[[63793,63793],"mapped",[27347]],[[63794,63794],"mapped",[29200]],[[63795,63795],"mapped",[30439]],[[63796,63796],"mapped",[32769]],[[63797,63797],"mapped",[34310]],[[63798,63798],"mapped",[34396]],[[63799,63799],"mapped",[36335]],[[63800,63800],"mapped",[38706]],[[63801,63801],"mapped",[39791]],[[63802,63802],"mapped",[40442]],[[63803,63803],"mapped",[30860]],[[63804,63804],"mapped",[31103]],[[63805,63805],"mapped",[32160]],[[63806,63806],"mapped",[33737]],[[63807,63807],"mapped",[37636]],[[63808,63808],"mapped",[40575]],[[63809,63809],"mapped",[35542]],[[63810,63810],"mapped",[22751]],[[63811,63811],"mapped",[24324]],[[63812,63812],"mapped",[31840]],[[63813,63813],"mapped",[32894]],[[63814,63814],"mapped",[29282]],[[63815,63815],"mapped",[30922]],[[63816,63816],"mapped",[36034]],[[63817,63817],"mapped",[38647]],[[63818,63818],"mapped",[22744]],[[63819,63819],"mapped",[23650]],[[63820,63820],"mapped",[27155]],[[63821,63821],"mapped",[28122]],[[63822,63822],"mapped",[28431]],[[63823,63823],"mapped",[32047]],[[63824,63824],"mapped",[32311]],[[63825,63825],"mapped",[38475]],[[63826,63826],"mapped",[21202]],[[63827,63827],"mapped",[32907]],[[63828,63828],"mapped",[20956]],[[63829,63829],"mapped",[20940]],[[63830,63830],"mapped",[31260]],[[63831,63831],"mapped",[32190]],[[63832,63832],"mapped",[33777]],[[63833,63833],"mapped",[38517]],[[63834,63834],"mapped",[35712]],[[63835,63835],"mapped",[25295]],[[63836,63836],"mapped",[27138]],[[63837,63837],"mapped",[35582]],[[63838,63838],"mapped",[20025]],[[63839,63839],"mapped",[23527]],[[63840,63840],"mapped",[24594]],[[63841,63841],"mapped",[29575]],[[63842,63842],"mapped",[30064]],[[63843,63843],"mapped",[21271]],[[63844,63844],"mapped",[30971]],[[63845,63845],"mapped",[20415]],[[63846,63846],"mapped",[24489]],[[63847,63847],"mapped",[19981]],[[63848,63848],"mapped",[27852]],[[63849,63849],"mapped",[25976]],[[63850,63850],"mapped",[32034]],[[63851,63851],"mapped",[21443]],[[63852,63852],"mapped",[22622]],[[63853,63853],"mapped",[30465]],[[63854,63854],"mapped",[33865]],[[63855,63855],"mapped",[35498]],[[63856,63856],"mapped",[27578]],[[63857,63857],"mapped",[36784]],[[63858,63858],"mapped",[27784]],[[63859,63859],"mapped",[25342]],[[63860,63860],"mapped",[33509]],[[63861,63861],"mapped",[25504]],[[63862,63862],"mapped",[30053]],[[63863,63863],"mapped",[20142]],[[63864,63864],"mapped",[20841]],[[63865,63865],"mapped",[20937]],[[63866,63866],"mapped",[26753]],[[63867,63867],"mapped",[31975]],[[63868,63868],"mapped",[33391]],[[63869,63869],"mapped",[35538]],[[63870,63870],"mapped",[37327]],[[63871,63871],"mapped",[21237]],[[63872,63872],"mapped",[21570]],[[63873,63873],"mapped",[22899]],[[63874,63874],"mapped",[24300]],[[63875,63875],"mapped",[26053]],[[63876,63876],"mapped",[28670]],[[63877,63877],"mapped",[31018]],[[63878,63878],"mapped",[38317]],[[63879,63879],"mapped",[39530]],[[63880,63880],"mapped",[40599]],[[63881,63881],"mapped",[40654]],[[63882,63882],"mapped",[21147]],[[63883,63883],"mapped",[26310]],[[63884,63884],"mapped",[27511]],[[63885,63885],"mapped",[36706]],[[63886,63886],"mapped",[24180]],[[63887,63887],"mapped",[24976]],[[63888,63888],"mapped",[25088]],[[63889,63889],"mapped",[25754]],[[63890,63890],"mapped",[28451]],[[63891,63891],"mapped",[29001]],[[63892,63892],"mapped",[29833]],[[63893,63893],"mapped",[31178]],[[63894,63894],"mapped",[32244]],[[63895,63895],"mapped",[32879]],[[63896,63896],"mapped",[36646]],[[63897,63897],"mapped",[34030]],[[63898,63898],"mapped",[36899]],[[63899,63899],"mapped",[37706]],[[63900,63900],"mapped",[21015]],[[63901,63901],"mapped",[21155]],[[63902,63902],"mapped",[21693]],[[63903,63903],"mapped",[28872]],[[63904,63904],"mapped",[35010]],[[63905,63905],"mapped",[35498]],[[63906,63906],"mapped",[24265]],[[63907,63907],"mapped",[24565]],[[63908,63908],"mapped",[25467]],[[63909,63909],"mapped",[27566]],[[63910,63910],"mapped",[31806]],[[63911,63911],"mapped",[29557]],[[63912,63912],"mapped",[20196]],[[63913,63913],"mapped",[22265]],[[63914,63914],"mapped",[23527]],[[63915,63915],"mapped",[23994]],[[63916,63916],"mapped",[24604]],[[63917,63917],"mapped",[29618]],[[63918,63918],"mapped",[29801]],[[63919,63919],"mapped",[32666]],[[63920,63920],"mapped",[32838]],[[63921,63921],"mapped",[37428]],[[63922,63922],"mapped",[38646]],[[63923,63923],"mapped",[38728]],[[63924,63924],"mapped",[38936]],[[63925,63925],"mapped",[20363]],[[63926,63926],"mapped",[31150]],[[63927,63927],"mapped",[37300]],[[63928,63928],"mapped",[38584]],[[63929,63929],"mapped",[24801]],[[63930,63930],"mapped",[20102]],[[63931,63931],"mapped",[20698]],[[63932,63932],"mapped",[23534]],[[63933,63933],"mapped",[23615]],[[63934,63934],"mapped",[26009]],[[63935,63935],"mapped",[27138]],[[63936,63936],"mapped",[29134]],[[63937,63937],"mapped",[30274]],[[63938,63938],"mapped",[34044]],[[63939,63939],"mapped",[36988]],[[63940,63940],"mapped",[40845]],[[63941,63941],"mapped",[26248]],[[63942,63942],"mapped",[38446]],[[63943,63943],"mapped",[21129]],[[63944,63944],"mapped",[26491]],[[63945,63945],"mapped",[26611]],[[63946,63946],"mapped",[27969]],[[63947,63947],"mapped",[28316]],[[63948,63948],"mapped",[29705]],[[63949,63949],"mapped",[30041]],[[63950,63950],"mapped",[30827]],[[63951,63951],"mapped",[32016]],[[63952,63952],"mapped",[39006]],[[63953,63953],"mapped",[20845]],[[63954,63954],"mapped",[25134]],[[63955,63955],"mapped",[38520]],[[63956,63956],"mapped",[20523]],[[63957,63957],"mapped",[23833]],[[63958,63958],"mapped",[28138]],[[63959,63959],"mapped",[36650]],[[63960,63960],"mapped",[24459]],[[63961,63961],"mapped",[24900]],[[63962,63962],"mapped",[26647]],[[63963,63963],"mapped",[29575]],[[63964,63964],"mapped",[38534]],[[63965,63965],"mapped",[21033]],[[63966,63966],"mapped",[21519]],[[63967,63967],"mapped",[23653]],[[63968,63968],"mapped",[26131]],[[63969,63969],"mapped",[26446]],[[63970,63970],"mapped",[26792]],[[63971,63971],"mapped",[27877]],[[63972,63972],"mapped",[29702]],[[63973,63973],"mapped",[30178]],[[63974,63974],"mapped",[32633]],[[63975,63975],"mapped",[35023]],[[63976,63976],"mapped",[35041]],[[63977,63977],"mapped",[37324]],[[63978,63978],"mapped",[38626]],[[63979,63979],"mapped",[21311]],[[63980,63980],"mapped",[28346]],[[63981,63981],"mapped",[21533]],[[63982,63982],"mapped",[29136]],[[63983,63983],"mapped",[29848]],[[63984,63984],"mapped",[34298]],[[63985,63985],"mapped",[38563]],[[63986,63986],"mapped",[40023]],[[63987,63987],"mapped",[40607]],[[63988,63988],"mapped",[26519]],[[63989,63989],"mapped",[28107]],[[63990,63990],"mapped",[33256]],[[63991,63991],"mapped",[31435]],[[63992,63992],"mapped",[31520]],[[63993,63993],"mapped",[31890]],[[63994,63994],"mapped",[29376]],[[63995,63995],"mapped",[28825]],[[63996,63996],"mapped",[35672]],[[63997,63997],"mapped",[20160]],[[63998,63998],"mapped",[33590]],[[63999,63999],"mapped",[21050]],[[64000,64000],"mapped",[20999]],[[64001,64001],"mapped",[24230]],[[64002,64002],"mapped",[25299]],[[64003,64003],"mapped",[31958]],[[64004,64004],"mapped",[23429]],[[64005,64005],"mapped",[27934]],[[64006,64006],"mapped",[26292]],[[64007,64007],"mapped",[36667]],[[64008,64008],"mapped",[34892]],[[64009,64009],"mapped",[38477]],[[64010,64010],"mapped",[35211]],[[64011,64011],"mapped",[24275]],[[64012,64012],"mapped",[20800]],[[64013,64013],"mapped",[21952]],[[64014,64015],"valid"],[[64016,64016],"mapped",[22618]],[[64017,64017],"valid"],[[64018,64018],"mapped",[26228]],[[64019,64020],"valid"],[[64021,64021],"mapped",[20958]],[[64022,64022],"mapped",[29482]],[[64023,64023],"mapped",[30410]],[[64024,64024],"mapped",[31036]],[[64025,64025],"mapped",[31070]],[[64026,64026],"mapped",[31077]],[[64027,64027],"mapped",[31119]],[[64028,64028],"mapped",[38742]],[[64029,64029],"mapped",[31934]],[[64030,64030],"mapped",[32701]],[[64031,64031],"valid"],[[64032,64032],"mapped",[34322]],[[64033,64033],"valid"],[[64034,64034],"mapped",[35576]],[[64035,64036],"valid"],[[64037,64037],"mapped",[36920]],[[64038,64038],"mapped",[37117]],[[64039,64041],"valid"],[[64042,64042],"mapped",[39151]],[[64043,64043],"mapped",[39164]],[[64044,64044],"mapped",[39208]],[[64045,64045],"mapped",[40372]],[[64046,64046],"mapped",[37086]],[[64047,64047],"mapped",[38583]],[[64048,64048],"mapped",[20398]],[[64049,64049],"mapped",[20711]],[[64050,64050],"mapped",[20813]],[[64051,64051],"mapped",[21193]],[[64052,64052],"mapped",[21220]],[[64053,64053],"mapped",[21329]],[[64054,64054],"mapped",[21917]],[[64055,64055],"mapped",[22022]],[[64056,64056],"mapped",[22120]],[[64057,64057],"mapped",[22592]],[[64058,64058],"mapped",[22696]],[[64059,64059],"mapped",[23652]],[[64060,64060],"mapped",[23662]],[[64061,64061],"mapped",[24724]],[[64062,64062],"mapped",[24936]],[[64063,64063],"mapped",[24974]],[[64064,64064],"mapped",[25074]],[[64065,64065],"mapped",[25935]],[[64066,64066],"mapped",[26082]],[[64067,64067],"mapped",[26257]],[[64068,64068],"mapped",[26757]],[[64069,64069],"mapped",[28023]],[[64070,64070],"mapped",[28186]],[[64071,64071],"mapped",[28450]],[[64072,64072],"mapped",[29038]],[[64073,64073],"mapped",[29227]],[[64074,64074],"mapped",[29730]],[[64075,64075],"mapped",[30865]],[[64076,64076],"mapped",[31038]],[[64077,64077],"mapped",[31049]],[[64078,64078],"mapped",[31048]],[[64079,64079],"mapped",[31056]],[[64080,64080],"mapped",[31062]],[[64081,64081],"mapped",[31069]],[[64082,64082],"mapped",[31117]],[[64083,64083],"mapped",[31118]],[[64084,64084],"mapped",[31296]],[[64085,64085],"mapped",[31361]],[[64086,64086],"mapped",[31680]],[[64087,64087],"mapped",[32244]],[[64088,64088],"mapped",[32265]],[[64089,64089],"mapped",[32321]],[[64090,64090],"mapped",[32626]],[[64091,64091],"mapped",[32773]],[[64092,64092],"mapped",[33261]],[[64093,64094],"mapped",[33401]],[[64095,64095],"mapped",[33879]],[[64096,64096],"mapped",[35088]],[[64097,64097],"mapped",[35222]],[[64098,64098],"mapped",[35585]],[[64099,64099],"mapped",[35641]],[[64100,64100],"mapped",[36051]],[[64101,64101],"mapped",[36104]],[[64102,64102],"mapped",[36790]],[[64103,64103],"mapped",[36920]],[[64104,64104],"mapped",[38627]],[[64105,64105],"mapped",[38911]],[[64106,64106],"mapped",[38971]],[[64107,64107],"mapped",[24693]],[[64108,64108],"mapped",[148206]],[[64109,64109],"mapped",[33304]],[[64110,64111],"disallowed"],[[64112,64112],"mapped",[20006]],[[64113,64113],"mapped",[20917]],[[64114,64114],"mapped",[20840]],[[64115,64115],"mapped",[20352]],[[64116,64116],"mapped",[20805]],[[64117,64117],"mapped",[20864]],[[64118,64118],"mapped",[21191]],[[64119,64119],"mapped",[21242]],[[64120,64120],"mapped",[21917]],[[64121,64121],"mapped",[21845]],[[64122,64122],"mapped",[21913]],[[64123,64123],"mapped",[21986]],[[64124,64124],"mapped",[22618]],[[64125,64125],"mapped",[22707]],[[64126,64126],"mapped",[22852]],[[64127,64127],"mapped",[22868]],[[64128,64128],"mapped",[23138]],[[64129,64129],"mapped",[23336]],[[64130,64130],"mapped",[24274]],[[64131,64131],"mapped",[24281]],[[64132,64132],"mapped",[24425]],[[64133,64133],"mapped",[24493]],[[64134,64134],"mapped",[24792]],[[64135,64135],"mapped",[24910]],[[64136,64136],"mapped",[24840]],[[64137,64137],"mapped",[24974]],[[64138,64138],"mapped",[24928]],[[64139,64139],"mapped",[25074]],[[64140,64140],"mapped",[25140]],[[64141,64141],"mapped",[25540]],[[64142,64142],"mapped",[25628]],[[64143,64143],"mapped",[25682]],[[64144,64144],"mapped",[25942]],[[64145,64145],"mapped",[26228]],[[64146,64146],"mapped",[26391]],[[64147,64147],"mapped",[26395]],[[64148,64148],"mapped",[26454]],[[64149,64149],"mapped",[27513]],[[64150,64150],"mapped",[27578]],[[64151,64151],"mapped",[27969]],[[64152,64152],"mapped",[28379]],[[64153,64153],"mapped",[28363]],[[64154,64154],"mapped",[28450]],[[64155,64155],"mapped",[28702]],[[64156,64156],"mapped",[29038]],[[64157,64157],"mapped",[30631]],[[64158,64158],"mapped",[29237]],[[64159,64159],"mapped",[29359]],[[64160,64160],"mapped",[29482]],[[64161,64161],"mapped",[29809]],[[64162,64162],"mapped",[29958]],[[64163,64163],"mapped",[30011]],[[64164,64164],"mapped",[30237]],[[64165,64165],"mapped",[30239]],[[64166,64166],"mapped",[30410]],[[64167,64167],"mapped",[30427]],[[64168,64168],"mapped",[30452]],[[64169,64169],"mapped",[30538]],[[64170,64170],"mapped",[30528]],[[64171,64171],"mapped",[30924]],[[64172,64172],"mapped",[31409]],[[64173,64173],"mapped",[31680]],[[64174,64174],"mapped",[31867]],[[64175,64175],"mapped",[32091]],[[64176,64176],"mapped",[32244]],[[64177,64177],"mapped",[32574]],[[64178,64178],"mapped",[32773]],[[64179,64179],"mapped",[33618]],[[64180,64180],"mapped",[33775]],[[64181,64181],"mapped",[34681]],[[64182,64182],"mapped",[35137]],[[64183,64183],"mapped",[35206]],[[64184,64184],"mapped",[35222]],[[64185,64185],"mapped",[35519]],[[64186,64186],"mapped",[35576]],[[64187,64187],"mapped",[35531]],[[64188,64188],"mapped",[35585]],[[64189,64189],"mapped",[35582]],[[64190,64190],"mapped",[35565]],[[64191,64191],"mapped",[35641]],[[64192,64192],"mapped",[35722]],[[64193,64193],"mapped",[36104]],[[64194,64194],"mapped",[36664]],[[64195,64195],"mapped",[36978]],[[64196,64196],"mapped",[37273]],[[64197,64197],"mapped",[37494]],[[64198,64198],"mapped",[38524]],[[64199,64199],"mapped",[38627]],[[64200,64200],"mapped",[38742]],[[64201,64201],"mapped",[38875]],[[64202,64202],"mapped",[38911]],[[64203,64203],"mapped",[38923]],[[64204,64204],"mapped",[38971]],[[64205,64205],"mapped",[39698]],[[64206,64206],"mapped",[40860]],[[64207,64207],"mapped",[141386]],[[64208,64208],"mapped",[141380]],[[64209,64209],"mapped",[144341]],[[64210,64210],"mapped",[15261]],[[64211,64211],"mapped",[16408]],[[64212,64212],"mapped",[16441]],[[64213,64213],"mapped",[152137]],[[64214,64214],"mapped",[154832]],[[64215,64215],"mapped",[163539]],[[64216,64216],"mapped",[40771]],[[64217,64217],"mapped",[40846]],[[64218,64255],"disallowed"],[[64256,64256],"mapped",[102,102]],[[64257,64257],"mapped",[102,105]],[[64258,64258],"mapped",[102,108]],[[64259,64259],"mapped",[102,102,105]],[[64260,64260],"mapped",[102,102,108]],[[64261,64262],"mapped",[115,116]],[[64263,64274],"disallowed"],[[64275,64275],"mapped",[1396,1398]],[[64276,64276],"mapped",[1396,1381]],[[64277,64277],"mapped",[1396,1387]],[[64278,64278],"mapped",[1406,1398]],[[64279,64279],"mapped",[1396,1389]],[[64280,64284],"disallowed"],[[64285,64285],"mapped",[1497,1460]],[[64286,64286],"valid"],[[64287,64287],"mapped",[1522,1463]],[[64288,64288],"mapped",[1506]],[[64289,64289],"mapped",[1488]],[[64290,64290],"mapped",[1491]],[[64291,64291],"mapped",[1492]],[[64292,64292],"mapped",[1499]],[[64293,64293],"mapped",[1500]],[[64294,64294],"mapped",[1501]],[[64295,64295],"mapped",[1512]],[[64296,64296],"mapped",[1514]],[[64297,64297],"disallowed_STD3_mapped",[43]],[[64298,64298],"mapped",[1513,1473]],[[64299,64299],"mapped",[1513,1474]],[[64300,64300],"mapped",[1513,1468,1473]],[[64301,64301],"mapped",[1513,1468,1474]],[[64302,64302],"mapped",[1488,1463]],[[64303,64303],"mapped",[1488,1464]],[[64304,64304],"mapped",[1488,1468]],[[64305,64305],"mapped",[1489,1468]],[[64306,64306],"mapped",[1490,1468]],[[64307,64307],"mapped",[1491,1468]],[[64308,64308],"mapped",[1492,1468]],[[64309,64309],"mapped",[1493,1468]],[[64310,64310],"mapped",[1494,1468]],[[64311,64311],"disallowed"],[[64312,64312],"mapped",[1496,1468]],[[64313,64313],"mapped",[1497,1468]],[[64314,64314],"mapped",[1498,1468]],[[64315,64315],"mapped",[1499,1468]],[[64316,64316],"mapped",[1500,1468]],[[64317,64317],"disallowed"],[[64318,64318],"mapped",[1502,1468]],[[64319,64319],"disallowed"],[[64320,64320],"mapped",[1504,1468]],[[64321,64321],"mapped",[1505,1468]],[[64322,64322],"disallowed"],[[64323,64323],"mapped",[1507,1468]],[[64324,64324],"mapped",[1508,1468]],[[64325,64325],"disallowed"],[[64326,64326],"mapped",[1510,1468]],[[64327,64327],"mapped",[1511,1468]],[[64328,64328],"mapped",[1512,1468]],[[64329,64329],"mapped",[1513,1468]],[[64330,64330],"mapped",[1514,1468]],[[64331,64331],"mapped",[1493,1465]],[[64332,64332],"mapped",[1489,1471]],[[64333,64333],"mapped",[1499,1471]],[[64334,64334],"mapped",[1508,1471]],[[64335,64335],"mapped",[1488,1500]],[[64336,64337],"mapped",[1649]],[[64338,64341],"mapped",[1659]],[[64342,64345],"mapped",[1662]],[[64346,64349],"mapped",[1664]],[[64350,64353],"mapped",[1658]],[[64354,64357],"mapped",[1663]],[[64358,64361],"mapped",[1657]],[[64362,64365],"mapped",[1700]],[[64366,64369],"mapped",[1702]],[[64370,64373],"mapped",[1668]],[[64374,64377],"mapped",[1667]],[[64378,64381],"mapped",[1670]],[[64382,64385],"mapped",[1671]],[[64386,64387],"mapped",[1677]],[[64388,64389],"mapped",[1676]],[[64390,64391],"mapped",[1678]],[[64392,64393],"mapped",[1672]],[[64394,64395],"mapped",[1688]],[[64396,64397],"mapped",[1681]],[[64398,64401],"mapped",[1705]],[[64402,64405],"mapped",[1711]],[[64406,64409],"mapped",[1715]],[[64410,64413],"mapped",[1713]],[[64414,64415],"mapped",[1722]],[[64416,64419],"mapped",[1723]],[[64420,64421],"mapped",[1728]],[[64422,64425],"mapped",[1729]],[[64426,64429],"mapped",[1726]],[[64430,64431],"mapped",[1746]],[[64432,64433],"mapped",[1747]],[[64434,64449],"valid",[],"NV8"],[[64450,64466],"disallowed"],[[64467,64470],"mapped",[1709]],[[64471,64472],"mapped",[1735]],[[64473,64474],"mapped",[1734]],[[64475,64476],"mapped",[1736]],[[64477,64477],"mapped",[1735,1652]],[[64478,64479],"mapped",[1739]],[[64480,64481],"mapped",[1733]],[[64482,64483],"mapped",[1737]],[[64484,64487],"mapped",[1744]],[[64488,64489],"mapped",[1609]],[[64490,64491],"mapped",[1574,1575]],[[64492,64493],"mapped",[1574,1749]],[[64494,64495],"mapped",[1574,1608]],[[64496,64497],"mapped",[1574,1735]],[[64498,64499],"mapped",[1574,1734]],[[64500,64501],"mapped",[1574,1736]],[[64502,64504],"mapped",[1574,1744]],[[64505,64507],"mapped",[1574,1609]],[[64508,64511],"mapped",[1740]],[[64512,64512],"mapped",[1574,1580]],[[64513,64513],"mapped",[1574,1581]],[[64514,64514],"mapped",[1574,1605]],[[64515,64515],"mapped",[1574,1609]],[[64516,64516],"mapped",[1574,1610]],[[64517,64517],"mapped",[1576,1580]],[[64518,64518],"mapped",[1576,1581]],[[64519,64519],"mapped",[1576,1582]],[[64520,64520],"mapped",[1576,1605]],[[64521,64521],"mapped",[1576,1609]],[[64522,64522],"mapped",[1576,1610]],[[64523,64523],"mapped",[1578,1580]],[[64524,64524],"mapped",[1578,1581]],[[64525,64525],"mapped",[1578,1582]],[[64526,64526],"mapped",[1578,1605]],[[64527,64527],"mapped",[1578,1609]],[[64528,64528],"mapped",[1578,1610]],[[64529,64529],"mapped",[1579,1580]],[[64530,64530],"mapped",[1579,1605]],[[64531,64531],"mapped",[1579,1609]],[[64532,64532],"mapped",[1579,1610]],[[64533,64533],"mapped",[1580,1581]],[[64534,64534],"mapped",[1580,1605]],[[64535,64535],"mapped",[1581,1580]],[[64536,64536],"mapped",[1581,1605]],[[64537,64537],"mapped",[1582,1580]],[[64538,64538],"mapped",[1582,1581]],[[64539,64539],"mapped",[1582,1605]],[[64540,64540],"mapped",[1587,1580]],[[64541,64541],"mapped",[1587,1581]],[[64542,64542],"mapped",[1587,1582]],[[64543,64543],"mapped",[1587,1605]],[[64544,64544],"mapped",[1589,1581]],[[64545,64545],"mapped",[1589,1605]],[[64546,64546],"mapped",[1590,1580]],[[64547,64547],"mapped",[1590,1581]],[[64548,64548],"mapped",[1590,1582]],[[64549,64549],"mapped",[1590,1605]],[[64550,64550],"mapped",[1591,1581]],[[64551,64551],"mapped",[1591,1605]],[[64552,64552],"mapped",[1592,1605]],[[64553,64553],"mapped",[1593,1580]],[[64554,64554],"mapped",[1593,1605]],[[64555,64555],"mapped",[1594,1580]],[[64556,64556],"mapped",[1594,1605]],[[64557,64557],"mapped",[1601,1580]],[[64558,64558],"mapped",[1601,1581]],[[64559,64559],"mapped",[1601,1582]],[[64560,64560],"mapped",[1601,1605]],[[64561,64561],"mapped",[1601,1609]],[[64562,64562],"mapped",[1601,1610]],[[64563,64563],"mapped",[1602,1581]],[[64564,64564],"mapped",[1602,1605]],[[64565,64565],"mapped",[1602,1609]],[[64566,64566],"mapped",[1602,1610]],[[64567,64567],"mapped",[1603,1575]],[[64568,64568],"mapped",[1603,1580]],[[64569,64569],"mapped",[1603,1581]],[[64570,64570],"mapped",[1603,1582]],[[64571,64571],"mapped",[1603,1604]],[[64572,64572],"mapped",[1603,1605]],[[64573,64573],"mapped",[1603,1609]],[[64574,64574],"mapped",[1603,1610]],[[64575,64575],"mapped",[1604,1580]],[[64576,64576],"mapped",[1604,1581]],[[64577,64577],"mapped",[1604,1582]],[[64578,64578],"mapped",[1604,1605]],[[64579,64579],"mapped",[1604,1609]],[[64580,64580],"mapped",[1604,1610]],[[64581,64581],"mapped",[1605,1580]],[[64582,64582],"mapped",[1605,1581]],[[64583,64583],"mapped",[1605,1582]],[[64584,64584],"mapped",[1605,1605]],[[64585,64585],"mapped",[1605,1609]],[[64586,64586],"mapped",[1605,1610]],[[64587,64587],"mapped",[1606,1580]],[[64588,64588],"mapped",[1606,1581]],[[64589,64589],"mapped",[1606,1582]],[[64590,64590],"mapped",[1606,1605]],[[64591,64591],"mapped",[1606,1609]],[[64592,64592],"mapped",[1606,1610]],[[64593,64593],"mapped",[1607,1580]],[[64594,64594],"mapped",[1607,1605]],[[64595,64595],"mapped",[1607,1609]],[[64596,64596],"mapped",[1607,1610]],[[64597,64597],"mapped",[1610,1580]],[[64598,64598],"mapped",[1610,1581]],[[64599,64599],"mapped",[1610,1582]],[[64600,64600],"mapped",[1610,1605]],[[64601,64601],"mapped",[1610,1609]],[[64602,64602],"mapped",[1610,1610]],[[64603,64603],"mapped",[1584,1648]],[[64604,64604],"mapped",[1585,1648]],[[64605,64605],"mapped",[1609,1648]],[[64606,64606],"disallowed_STD3_mapped",[32,1612,1617]],[[64607,64607],"disallowed_STD3_mapped",[32,1613,1617]],[[64608,64608],"disallowed_STD3_mapped",[32,1614,1617]],[[64609,64609],"disallowed_STD3_mapped",[32,1615,1617]],[[64610,64610],"disallowed_STD3_mapped",[32,1616,1617]],[[64611,64611],"disallowed_STD3_mapped",[32,1617,1648]],[[64612,64612],"mapped",[1574,1585]],[[64613,64613],"mapped",[1574,1586]],[[64614,64614],"mapped",[1574,1605]],[[64615,64615],"mapped",[1574,1606]],[[64616,64616],"mapped",[1574,1609]],[[64617,64617],"mapped",[1574,1610]],[[64618,64618],"mapped",[1576,1585]],[[64619,64619],"mapped",[1576,1586]],[[64620,64620],"mapped",[1576,1605]],[[64621,64621],"mapped",[1576,1606]],[[64622,64622],"mapped",[1576,1609]],[[64623,64623],"mapped",[1576,1610]],[[64624,64624],"mapped",[1578,1585]],[[64625,64625],"mapped",[1578,1586]],[[64626,64626],"mapped",[1578,1605]],[[64627,64627],"mapped",[1578,1606]],[[64628,64628],"mapped",[1578,1609]],[[64629,64629],"mapped",[1578,1610]],[[64630,64630],"mapped",[1579,1585]],[[64631,64631],"mapped",[1579,1586]],[[64632,64632],"mapped",[1579,1605]],[[64633,64633],"mapped",[1579,1606]],[[64634,64634],"mapped",[1579,1609]],[[64635,64635],"mapped",[1579,1610]],[[64636,64636],"mapped",[1601,1609]],[[64637,64637],"mapped",[1601,1610]],[[64638,64638],"mapped",[1602,1609]],[[64639,64639],"mapped",[1602,1610]],[[64640,64640],"mapped",[1603,1575]],[[64641,64641],"mapped",[1603,1604]],[[64642,64642],"mapped",[1603,1605]],[[64643,64643],"mapped",[1603,1609]],[[64644,64644],"mapped",[1603,1610]],[[64645,64645],"mapped",[1604,1605]],[[64646,64646],"mapped",[1604,1609]],[[64647,64647],"mapped",[1604,1610]],[[64648,64648],"mapped",[1605,1575]],[[64649,64649],"mapped",[1605,1605]],[[64650,64650],"mapped",[1606,1585]],[[64651,64651],"mapped",[1606,1586]],[[64652,64652],"mapped",[1606,1605]],[[64653,64653],"mapped",[1606,1606]],[[64654,64654],"mapped",[1606,1609]],[[64655,64655],"mapped",[1606,1610]],[[64656,64656],"mapped",[1609,1648]],[[64657,64657],"mapped",[1610,1585]],[[64658,64658],"mapped",[1610,1586]],[[64659,64659],"mapped",[1610,1605]],[[64660,64660],"mapped",[1610,1606]],[[64661,64661],"mapped",[1610,1609]],[[64662,64662],"mapped",[1610,1610]],[[64663,64663],"mapped",[1574,1580]],[[64664,64664],"mapped",[1574,1581]],[[64665,64665],"mapped",[1574,1582]],[[64666,64666],"mapped",[1574,1605]],[[64667,64667],"mapped",[1574,1607]],[[64668,64668],"mapped",[1576,1580]],[[64669,64669],"mapped",[1576,1581]],[[64670,64670],"mapped",[1576,1582]],[[64671,64671],"mapped",[1576,1605]],[[64672,64672],"mapped",[1576,1607]],[[64673,64673],"mapped",[1578,1580]],[[64674,64674],"mapped",[1578,1581]],[[64675,64675],"mapped",[1578,1582]],[[64676,64676],"mapped",[1578,1605]],[[64677,64677],"mapped",[1578,1607]],[[64678,64678],"mapped",[1579,1605]],[[64679,64679],"mapped",[1580,1581]],[[64680,64680],"mapped",[1580,1605]],[[64681,64681],"mapped",[1581,1580]],[[64682,64682],"mapped",[1581,1605]],[[64683,64683],"mapped",[1582,1580]],[[64684,64684],"mapped",[1582,1605]],[[64685,64685],"mapped",[1587,1580]],[[64686,64686],"mapped",[1587,1581]],[[64687,64687],"mapped",[1587,1582]],[[64688,64688],"mapped",[1587,1605]],[[64689,64689],"mapped",[1589,1581]],[[64690,64690],"mapped",[1589,1582]],[[64691,64691],"mapped",[1589,1605]],[[64692,64692],"mapped",[1590,1580]],[[64693,64693],"mapped",[1590,1581]],[[64694,64694],"mapped",[1590,1582]],[[64695,64695],"mapped",[1590,1605]],[[64696,64696],"mapped",[1591,1581]],[[64697,64697],"mapped",[1592,1605]],[[64698,64698],"mapped",[1593,1580]],[[64699,64699],"mapped",[1593,1605]],[[64700,64700],"mapped",[1594,1580]],[[64701,64701],"mapped",[1594,1605]],[[64702,64702],"mapped",[1601,1580]],[[64703,64703],"mapped",[1601,1581]],[[64704,64704],"mapped",[1601,1582]],[[64705,64705],"mapped",[1601,1605]],[[64706,64706],"mapped",[1602,1581]],[[64707,64707],"mapped",[1602,1605]],[[64708,64708],"mapped",[1603,1580]],[[64709,64709],"mapped",[1603,1581]],[[64710,64710],"mapped",[1603,1582]],[[64711,64711],"mapped",[1603,1604]],[[64712,64712],"mapped",[1603,1605]],[[64713,64713],"mapped",[1604,1580]],[[64714,64714],"mapped",[1604,1581]],[[64715,64715],"mapped",[1604,1582]],[[64716,64716],"mapped",[1604,1605]],[[64717,64717],"mapped",[1604,1607]],[[64718,64718],"mapped",[1605,1580]],[[64719,64719],"mapped",[1605,1581]],[[64720,64720],"mapped",[1605,1582]],[[64721,64721],"mapped",[1605,1605]],[[64722,64722],"mapped",[1606,1580]],[[64723,64723],"mapped",[1606,1581]],[[64724,64724],"mapped",[1606,1582]],[[64725,64725],"mapped",[1606,1605]],[[64726,64726],"mapped",[1606,1607]],[[64727,64727],"mapped",[1607,1580]],[[64728,64728],"mapped",[1607,1605]],[[64729,64729],"mapped",[1607,1648]],[[64730,64730],"mapped",[1610,1580]],[[64731,64731],"mapped",[1610,1581]],[[64732,64732],"mapped",[1610,1582]],[[64733,64733],"mapped",[1610,1605]],[[64734,64734],"mapped",[1610,1607]],[[64735,64735],"mapped",[1574,1605]],[[64736,64736],"mapped",[1574,1607]],[[64737,64737],"mapped",[1576,1605]],[[64738,64738],"mapped",[1576,1607]],[[64739,64739],"mapped",[1578,1605]],[[64740,64740],"mapped",[1578,1607]],[[64741,64741],"mapped",[1579,1605]],[[64742,64742],"mapped",[1579,1607]],[[64743,64743],"mapped",[1587,1605]],[[64744,64744],"mapped",[1587,1607]],[[64745,64745],"mapped",[1588,1605]],[[64746,64746],"mapped",[1588,1607]],[[64747,64747],"mapped",[1603,1604]],[[64748,64748],"mapped",[1603,1605]],[[64749,64749],"mapped",[1604,1605]],[[64750,64750],"mapped",[1606,1605]],[[64751,64751],"mapped",[1606,1607]],[[64752,64752],"mapped",[1610,1605]],[[64753,64753],"mapped",[1610,1607]],[[64754,64754],"mapped",[1600,1614,1617]],[[64755,64755],"mapped",[1600,1615,1617]],[[64756,64756],"mapped",[1600,1616,1617]],[[64757,64757],"mapped",[1591,1609]],[[64758,64758],"mapped",[1591,1610]],[[64759,64759],"mapped",[1593,1609]],[[64760,64760],"mapped",[1593,1610]],[[64761,64761],"mapped",[1594,1609]],[[64762,64762],"mapped",[1594,1610]],[[64763,64763],"mapped",[1587,1609]],[[64764,64764],"mapped",[1587,1610]],[[64765,64765],"mapped",[1588,1609]],[[64766,64766],"mapped",[1588,1610]],[[64767,64767],"mapped",[1581,1609]],[[64768,64768],"mapped",[1581,1610]],[[64769,64769],"mapped",[1580,1609]],[[64770,64770],"mapped",[1580,1610]],[[64771,64771],"mapped",[1582,1609]],[[64772,64772],"mapped",[1582,1610]],[[64773,64773],"mapped",[1589,1609]],[[64774,64774],"mapped",[1589,1610]],[[64775,64775],"mapped",[1590,1609]],[[64776,64776],"mapped",[1590,1610]],[[64777,64777],"mapped",[1588,1580]],[[64778,64778],"mapped",[1588,1581]],[[64779,64779],"mapped",[1588,1582]],[[64780,64780],"mapped",[1588,1605]],[[64781,64781],"mapped",[1588,1585]],[[64782,64782],"mapped",[1587,1585]],[[64783,64783],"mapped",[1589,1585]],[[64784,64784],"mapped",[1590,1585]],[[64785,64785],"mapped",[1591,1609]],[[64786,64786],"mapped",[1591,1610]],[[64787,64787],"mapped",[1593,1609]],[[64788,64788],"mapped",[1593,1610]],[[64789,64789],"mapped",[1594,1609]],[[64790,64790],"mapped",[1594,1610]],[[64791,64791],"mapped",[1587,1609]],[[64792,64792],"mapped",[1587,1610]],[[64793,64793],"mapped",[1588,1609]],[[64794,64794],"mapped",[1588,1610]],[[64795,64795],"mapped",[1581,1609]],[[64796,64796],"mapped",[1581,1610]],[[64797,64797],"mapped",[1580,1609]],[[64798,64798],"mapped",[1580,1610]],[[64799,64799],"mapped",[1582,1609]],[[64800,64800],"mapped",[1582,1610]],[[64801,64801],"mapped",[1589,1609]],[[64802,64802],"mapped",[1589,1610]],[[64803,64803],"mapped",[1590,1609]],[[64804,64804],"mapped",[1590,1610]],[[64805,64805],"mapped",[1588,1580]],[[64806,64806],"mapped",[1588,1581]],[[64807,64807],"mapped",[1588,1582]],[[64808,64808],"mapped",[1588,1605]],[[64809,64809],"mapped",[1588,1585]],[[64810,64810],"mapped",[1587,1585]],[[64811,64811],"mapped",[1589,1585]],[[64812,64812],"mapped",[1590,1585]],[[64813,64813],"mapped",[1588,1580]],[[64814,64814],"mapped",[1588,1581]],[[64815,64815],"mapped",[1588,1582]],[[64816,64816],"mapped",[1588,1605]],[[64817,64817],"mapped",[1587,1607]],[[64818,64818],"mapped",[1588,1607]],[[64819,64819],"mapped",[1591,1605]],[[64820,64820],"mapped",[1587,1580]],[[64821,64821],"mapped",[1587,1581]],[[64822,64822],"mapped",[1587,1582]],[[64823,64823],"mapped",[1588,1580]],[[64824,64824],"mapped",[1588,1581]],[[64825,64825],"mapped",[1588,1582]],[[64826,64826],"mapped",[1591,1605]],[[64827,64827],"mapped",[1592,1605]],[[64828,64829],"mapped",[1575,1611]],[[64830,64831],"valid",[],"NV8"],[[64832,64847],"disallowed"],[[64848,64848],"mapped",[1578,1580,1605]],[[64849,64850],"mapped",[1578,1581,1580]],[[64851,64851],"mapped",[1578,1581,1605]],[[64852,64852],"mapped",[1578,1582,1605]],[[64853,64853],"mapped",[1578,1605,1580]],[[64854,64854],"mapped",[1578,1605,1581]],[[64855,64855],"mapped",[1578,1605,1582]],[[64856,64857],"mapped",[1580,1605,1581]],[[64858,64858],"mapped",[1581,1605,1610]],[[64859,64859],"mapped",[1581,1605,1609]],[[64860,64860],"mapped",[1587,1581,1580]],[[64861,64861],"mapped",[1587,1580,1581]],[[64862,64862],"mapped",[1587,1580,1609]],[[64863,64864],"mapped",[1587,1605,1581]],[[64865,64865],"mapped",[1587,1605,1580]],[[64866,64867],"mapped",[1587,1605,1605]],[[64868,64869],"mapped",[1589,1581,1581]],[[64870,64870],"mapped",[1589,1605,1605]],[[64871,64872],"mapped",[1588,1581,1605]],[[64873,64873],"mapped",[1588,1580,1610]],[[64874,64875],"mapped",[1588,1605,1582]],[[64876,64877],"mapped",[1588,1605,1605]],[[64878,64878],"mapped",[1590,1581,1609]],[[64879,64880],"mapped",[1590,1582,1605]],[[64881,64882],"mapped",[1591,1605,1581]],[[64883,64883],"mapped",[1591,1605,1605]],[[64884,64884],"mapped",[1591,1605,1610]],[[64885,64885],"mapped",[1593,1580,1605]],[[64886,64887],"mapped",[1593,1605,1605]],[[64888,64888],"mapped",[1593,1605,1609]],[[64889,64889],"mapped",[1594,1605,1605]],[[64890,64890],"mapped",[1594,1605,1610]],[[64891,64891],"mapped",[1594,1605,1609]],[[64892,64893],"mapped",[1601,1582,1605]],[[64894,64894],"mapped",[1602,1605,1581]],[[64895,64895],"mapped",[1602,1605,1605]],[[64896,64896],"mapped",[1604,1581,1605]],[[64897,64897],"mapped",[1604,1581,1610]],[[64898,64898],"mapped",[1604,1581,1609]],[[64899,64900],"mapped",[1604,1580,1580]],[[64901,64902],"mapped",[1604,1582,1605]],[[64903,64904],"mapped",[1604,1605,1581]],[[64905,64905],"mapped",[1605,1581,1580]],[[64906,64906],"mapped",[1605,1581,1605]],[[64907,64907],"mapped",[1605,1581,1610]],[[64908,64908],"mapped",[1605,1580,1581]],[[64909,64909],"mapped",[1605,1580,1605]],[[64910,64910],"mapped",[1605,1582,1580]],[[64911,64911],"mapped",[1605,1582,1605]],[[64912,64913],"disallowed"],[[64914,64914],"mapped",[1605,1580,1582]],[[64915,64915],"mapped",[1607,1605,1580]],[[64916,64916],"mapped",[1607,1605,1605]],[[64917,64917],"mapped",[1606,1581,1605]],[[64918,64918],"mapped",[1606,1581,1609]],[[64919,64920],"mapped",[1606,1580,1605]],[[64921,64921],"mapped",[1606,1580,1609]],[[64922,64922],"mapped",[1606,1605,1610]],[[64923,64923],"mapped",[1606,1605,1609]],[[64924,64925],"mapped",[1610,1605,1605]],[[64926,64926],"mapped",[1576,1582,1610]],[[64927,64927],"mapped",[1578,1580,1610]],[[64928,64928],"mapped",[1578,1580,1609]],[[64929,64929],"mapped",[1578,1582,1610]],[[64930,64930],"mapped",[1578,1582,1609]],[[64931,64931],"mapped",[1578,1605,1610]],[[64932,64932],"mapped",[1578,1605,1609]],[[64933,64933],"mapped",[1580,1605,1610]],[[64934,64934],"mapped",[1580,1581,1609]],[[64935,64935],"mapped",[1580,1605,1609]],[[64936,64936],"mapped",[1587,1582,1609]],[[64937,64937],"mapped",[1589,1581,1610]],[[64938,64938],"mapped",[1588,1581,1610]],[[64939,64939],"mapped",[1590,1581,1610]],[[64940,64940],"mapped",[1604,1580,1610]],[[64941,64941],"mapped",[1604,1605,1610]],[[64942,64942],"mapped",[1610,1581,1610]],[[64943,64943],"mapped",[1610,1580,1610]],[[64944,64944],"mapped",[1610,1605,1610]],[[64945,64945],"mapped",[1605,1605,1610]],[[64946,64946],"mapped",[1602,1605,1610]],[[64947,64947],"mapped",[1606,1581,1610]],[[64948,64948],"mapped",[1602,1605,1581]],[[64949,64949],"mapped",[1604,1581,1605]],[[64950,64950],"mapped",[1593,1605,1610]],[[64951,64951],"mapped",[1603,1605,1610]],[[64952,64952],"mapped",[1606,1580,1581]],[[64953,64953],"mapped",[1605,1582,1610]],[[64954,64954],"mapped",[1604,1580,1605]],[[64955,64955],"mapped",[1603,1605,1605]],[[64956,64956],"mapped",[1604,1580,1605]],[[64957,64957],"mapped",[1606,1580,1581]],[[64958,64958],"mapped",[1580,1581,1610]],[[64959,64959],"mapped",[1581,1580,1610]],[[64960,64960],"mapped",[1605,1580,1610]],[[64961,64961],"mapped",[1601,1605,1610]],[[64962,64962],"mapped",[1576,1581,1610]],[[64963,64963],"mapped",[1603,1605,1605]],[[64964,64964],"mapped",[1593,1580,1605]],[[64965,64965],"mapped",[1589,1605,1605]],[[64966,64966],"mapped",[1587,1582,1610]],[[64967,64967],"mapped",[1606,1580,1610]],[[64968,64975],"disallowed"],[[64976,65007],"disallowed"],[[65008,65008],"mapped",[1589,1604,1746]],[[65009,65009],"mapped",[1602,1604,1746]],[[65010,65010],"mapped",[1575,1604,1604,1607]],[[65011,65011],"mapped",[1575,1603,1576,1585]],[[65012,65012],"mapped",[1605,1581,1605,1583]],[[65013,65013],"mapped",[1589,1604,1593,1605]],[[65014,65014],"mapped",[1585,1587,1608,1604]],[[65015,65015],"mapped",[1593,1604,1610,1607]],[[65016,65016],"mapped",[1608,1587,1604,1605]],[[65017,65017],"mapped",[1589,1604,1609]],[[65018,65018],"disallowed_STD3_mapped",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],"disallowed_STD3_mapped",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],"mapped",[1585,1740,1575,1604]],[[65021,65021],"valid",[],"NV8"],[[65022,65023],"disallowed"],[[65024,65039],"ignored"],[[65040,65040],"disallowed_STD3_mapped",[44]],[[65041,65041],"mapped",[12289]],[[65042,65042],"disallowed"],[[65043,65043],"disallowed_STD3_mapped",[58]],[[65044,65044],"disallowed_STD3_mapped",[59]],[[65045,65045],"disallowed_STD3_mapped",[33]],[[65046,65046],"disallowed_STD3_mapped",[63]],[[65047,65047],"mapped",[12310]],[[65048,65048],"mapped",[12311]],[[65049,65049],"disallowed"],[[65050,65055],"disallowed"],[[65056,65059],"valid"],[[65060,65062],"valid"],[[65063,65069],"valid"],[[65070,65071],"valid"],[[65072,65072],"disallowed"],[[65073,65073],"mapped",[8212]],[[65074,65074],"mapped",[8211]],[[65075,65076],"disallowed_STD3_mapped",[95]],[[65077,65077],"disallowed_STD3_mapped",[40]],[[65078,65078],"disallowed_STD3_mapped",[41]],[[65079,65079],"disallowed_STD3_mapped",[123]],[[65080,65080],"disallowed_STD3_mapped",[125]],[[65081,65081],"mapped",[12308]],[[65082,65082],"mapped",[12309]],[[65083,65083],"mapped",[12304]],[[65084,65084],"mapped",[12305]],[[65085,65085],"mapped",[12298]],[[65086,65086],"mapped",[12299]],[[65087,65087],"mapped",[12296]],[[65088,65088],"mapped",[12297]],[[65089,65089],"mapped",[12300]],[[65090,65090],"mapped",[12301]],[[65091,65091],"mapped",[12302]],[[65092,65092],"mapped",[12303]],[[65093,65094],"valid",[],"NV8"],[[65095,65095],"disallowed_STD3_mapped",[91]],[[65096,65096],"disallowed_STD3_mapped",[93]],[[65097,65100],"disallowed_STD3_mapped",[32,773]],[[65101,65103],"disallowed_STD3_mapped",[95]],[[65104,65104],"disallowed_STD3_mapped",[44]],[[65105,65105],"mapped",[12289]],[[65106,65106],"disallowed"],[[65107,65107],"disallowed"],[[65108,65108],"disallowed_STD3_mapped",[59]],[[65109,65109],"disallowed_STD3_mapped",[58]],[[65110,65110],"disallowed_STD3_mapped",[63]],[[65111,65111],"disallowed_STD3_mapped",[33]],[[65112,65112],"mapped",[8212]],[[65113,65113],"disallowed_STD3_mapped",[40]],[[65114,65114],"disallowed_STD3_mapped",[41]],[[65115,65115],"disallowed_STD3_mapped",[123]],[[65116,65116],"disallowed_STD3_mapped",[125]],[[65117,65117],"mapped",[12308]],[[65118,65118],"mapped",[12309]],[[65119,65119],"disallowed_STD3_mapped",[35]],[[65120,65120],"disallowed_STD3_mapped",[38]],[[65121,65121],"disallowed_STD3_mapped",[42]],[[65122,65122],"disallowed_STD3_mapped",[43]],[[65123,65123],"mapped",[45]],[[65124,65124],"disallowed_STD3_mapped",[60]],[[65125,65125],"disallowed_STD3_mapped",[62]],[[65126,65126],"disallowed_STD3_mapped",[61]],[[65127,65127],"disallowed"],[[65128,65128],"disallowed_STD3_mapped",[92]],[[65129,65129],"disallowed_STD3_mapped",[36]],[[65130,65130],"disallowed_STD3_mapped",[37]],[[65131,65131],"disallowed_STD3_mapped",[64]],[[65132,65135],"disallowed"],[[65136,65136],"disallowed_STD3_mapped",[32,1611]],[[65137,65137],"mapped",[1600,1611]],[[65138,65138],"disallowed_STD3_mapped",[32,1612]],[[65139,65139],"valid"],[[65140,65140],"disallowed_STD3_mapped",[32,1613]],[[65141,65141],"disallowed"],[[65142,65142],"disallowed_STD3_mapped",[32,1614]],[[65143,65143],"mapped",[1600,1614]],[[65144,65144],"disallowed_STD3_mapped",[32,1615]],[[65145,65145],"mapped",[1600,1615]],[[65146,65146],"disallowed_STD3_mapped",[32,1616]],[[65147,65147],"mapped",[1600,1616]],[[65148,65148],"disallowed_STD3_mapped",[32,1617]],[[65149,65149],"mapped",[1600,1617]],[[65150,65150],"disallowed_STD3_mapped",[32,1618]],[[65151,65151],"mapped",[1600,1618]],[[65152,65152],"mapped",[1569]],[[65153,65154],"mapped",[1570]],[[65155,65156],"mapped",[1571]],[[65157,65158],"mapped",[1572]],[[65159,65160],"mapped",[1573]],[[65161,65164],"mapped",[1574]],[[65165,65166],"mapped",[1575]],[[65167,65170],"mapped",[1576]],[[65171,65172],"mapped",[1577]],[[65173,65176],"mapped",[1578]],[[65177,65180],"mapped",[1579]],[[65181,65184],"mapped",[1580]],[[65185,65188],"mapped",[1581]],[[65189,65192],"mapped",[1582]],[[65193,65194],"mapped",[1583]],[[65195,65196],"mapped",[1584]],[[65197,65198],"mapped",[1585]],[[65199,65200],"mapped",[1586]],[[65201,65204],"mapped",[1587]],[[65205,65208],"mapped",[1588]],[[65209,65212],"mapped",[1589]],[[65213,65216],"mapped",[1590]],[[65217,65220],"mapped",[1591]],[[65221,65224],"mapped",[1592]],[[65225,65228],"mapped",[1593]],[[65229,65232],"mapped",[1594]],[[65233,65236],"mapped",[1601]],[[65237,65240],"mapped",[1602]],[[65241,65244],"mapped",[1603]],[[65245,65248],"mapped",[1604]],[[65249,65252],"mapped",[1605]],[[65253,65256],"mapped",[1606]],[[65257,65260],"mapped",[1607]],[[65261,65262],"mapped",[1608]],[[65263,65264],"mapped",[1609]],[[65265,65268],"mapped",[1610]],[[65269,65270],"mapped",[1604,1570]],[[65271,65272],"mapped",[1604,1571]],[[65273,65274],"mapped",[1604,1573]],[[65275,65276],"mapped",[1604,1575]],[[65277,65278],"disallowed"],[[65279,65279],"ignored"],[[65280,65280],"disallowed"],[[65281,65281],"disallowed_STD3_mapped",[33]],[[65282,65282],"disallowed_STD3_mapped",[34]],[[65283,65283],"disallowed_STD3_mapped",[35]],[[65284,65284],"disallowed_STD3_mapped",[36]],[[65285,65285],"disallowed_STD3_mapped",[37]],[[65286,65286],"disallowed_STD3_mapped",[38]],[[65287,65287],"disallowed_STD3_mapped",[39]],[[65288,65288],"disallowed_STD3_mapped",[40]],[[65289,65289],"disallowed_STD3_mapped",[41]],[[65290,65290],"disallowed_STD3_mapped",[42]],[[65291,65291],"disallowed_STD3_mapped",[43]],[[65292,65292],"disallowed_STD3_mapped",[44]],[[65293,65293],"mapped",[45]],[[65294,65294],"mapped",[46]],[[65295,65295],"disallowed_STD3_mapped",[47]],[[65296,65296],"mapped",[48]],[[65297,65297],"mapped",[49]],[[65298,65298],"mapped",[50]],[[65299,65299],"mapped",[51]],[[65300,65300],"mapped",[52]],[[65301,65301],"mapped",[53]],[[65302,65302],"mapped",[54]],[[65303,65303],"mapped",[55]],[[65304,65304],"mapped",[56]],[[65305,65305],"mapped",[57]],[[65306,65306],"disallowed_STD3_mapped",[58]],[[65307,65307],"disallowed_STD3_mapped",[59]],[[65308,65308],"disallowed_STD3_mapped",[60]],[[65309,65309],"disallowed_STD3_mapped",[61]],[[65310,65310],"disallowed_STD3_mapped",[62]],[[65311,65311],"disallowed_STD3_mapped",[63]],[[65312,65312],"disallowed_STD3_mapped",[64]],[[65313,65313],"mapped",[97]],[[65314,65314],"mapped",[98]],[[65315,65315],"mapped",[99]],[[65316,65316],"mapped",[100]],[[65317,65317],"mapped",[101]],[[65318,65318],"mapped",[102]],[[65319,65319],"mapped",[103]],[[65320,65320],"mapped",[104]],[[65321,65321],"mapped",[105]],[[65322,65322],"mapped",[106]],[[65323,65323],"mapped",[107]],[[65324,65324],"mapped",[108]],[[65325,65325],"mapped",[109]],[[65326,65326],"mapped",[110]],[[65327,65327],"mapped",[111]],[[65328,65328],"mapped",[112]],[[65329,65329],"mapped",[113]],[[65330,65330],"mapped",[114]],[[65331,65331],"mapped",[115]],[[65332,65332],"mapped",[116]],[[65333,65333],"mapped",[117]],[[65334,65334],"mapped",[118]],[[65335,65335],"mapped",[119]],[[65336,65336],"mapped",[120]],[[65337,65337],"mapped",[121]],[[65338,65338],"mapped",[122]],[[65339,65339],"disallowed_STD3_mapped",[91]],[[65340,65340],"disallowed_STD3_mapped",[92]],[[65341,65341],"disallowed_STD3_mapped",[93]],[[65342,65342],"disallowed_STD3_mapped",[94]],[[65343,65343],"disallowed_STD3_mapped",[95]],[[65344,65344],"disallowed_STD3_mapped",[96]],[[65345,65345],"mapped",[97]],[[65346,65346],"mapped",[98]],[[65347,65347],"mapped",[99]],[[65348,65348],"mapped",[100]],[[65349,65349],"mapped",[101]],[[65350,65350],"mapped",[102]],[[65351,65351],"mapped",[103]],[[65352,65352],"mapped",[104]],[[65353,65353],"mapped",[105]],[[65354,65354],"mapped",[106]],[[65355,65355],"mapped",[107]],[[65356,65356],"mapped",[108]],[[65357,65357],"mapped",[109]],[[65358,65358],"mapped",[110]],[[65359,65359],"mapped",[111]],[[65360,65360],"mapped",[112]],[[65361,65361],"mapped",[113]],[[65362,65362],"mapped",[114]],[[65363,65363],"mapped",[115]],[[65364,65364],"mapped",[116]],[[65365,65365],"mapped",[117]],[[65366,65366],"mapped",[118]],[[65367,65367],"mapped",[119]],[[65368,65368],"mapped",[120]],[[65369,65369],"mapped",[121]],[[65370,65370],"mapped",[122]],[[65371,65371],"disallowed_STD3_mapped",[123]],[[65372,65372],"disallowed_STD3_mapped",[124]],[[65373,65373],"disallowed_STD3_mapped",[125]],[[65374,65374],"disallowed_STD3_mapped",[126]],[[65375,65375],"mapped",[10629]],[[65376,65376],"mapped",[10630]],[[65377,65377],"mapped",[46]],[[65378,65378],"mapped",[12300]],[[65379,65379],"mapped",[12301]],[[65380,65380],"mapped",[12289]],[[65381,65381],"mapped",[12539]],[[65382,65382],"mapped",[12530]],[[65383,65383],"mapped",[12449]],[[65384,65384],"mapped",[12451]],[[65385,65385],"mapped",[12453]],[[65386,65386],"mapped",[12455]],[[65387,65387],"mapped",[12457]],[[65388,65388],"mapped",[12515]],[[65389,65389],"mapped",[12517]],[[65390,65390],"mapped",[12519]],[[65391,65391],"mapped",[12483]],[[65392,65392],"mapped",[12540]],[[65393,65393],"mapped",[12450]],[[65394,65394],"mapped",[12452]],[[65395,65395],"mapped",[12454]],[[65396,65396],"mapped",[12456]],[[65397,65397],"mapped",[12458]],[[65398,65398],"mapped",[12459]],[[65399,65399],"mapped",[12461]],[[65400,65400],"mapped",[12463]],[[65401,65401],"mapped",[12465]],[[65402,65402],"mapped",[12467]],[[65403,65403],"mapped",[12469]],[[65404,65404],"mapped",[12471]],[[65405,65405],"mapped",[12473]],[[65406,65406],"mapped",[12475]],[[65407,65407],"mapped",[12477]],[[65408,65408],"mapped",[12479]],[[65409,65409],"mapped",[12481]],[[65410,65410],"mapped",[12484]],[[65411,65411],"mapped",[12486]],[[65412,65412],"mapped",[12488]],[[65413,65413],"mapped",[12490]],[[65414,65414],"mapped",[12491]],[[65415,65415],"mapped",[12492]],[[65416,65416],"mapped",[12493]],[[65417,65417],"mapped",[12494]],[[65418,65418],"mapped",[12495]],[[65419,65419],"mapped",[12498]],[[65420,65420],"mapped",[12501]],[[65421,65421],"mapped",[12504]],[[65422,65422],"mapped",[12507]],[[65423,65423],"mapped",[12510]],[[65424,65424],"mapped",[12511]],[[65425,65425],"mapped",[12512]],[[65426,65426],"mapped",[12513]],[[65427,65427],"mapped",[12514]],[[65428,65428],"mapped",[12516]],[[65429,65429],"mapped",[12518]],[[65430,65430],"mapped",[12520]],[[65431,65431],"mapped",[12521]],[[65432,65432],"mapped",[12522]],[[65433,65433],"mapped",[12523]],[[65434,65434],"mapped",[12524]],[[65435,65435],"mapped",[12525]],[[65436,65436],"mapped",[12527]],[[65437,65437],"mapped",[12531]],[[65438,65438],"mapped",[12441]],[[65439,65439],"mapped",[12442]],[[65440,65440],"disallowed"],[[65441,65441],"mapped",[4352]],[[65442,65442],"mapped",[4353]],[[65443,65443],"mapped",[4522]],[[65444,65444],"mapped",[4354]],[[65445,65445],"mapped",[4524]],[[65446,65446],"mapped",[4525]],[[65447,65447],"mapped",[4355]],[[65448,65448],"mapped",[4356]],[[65449,65449],"mapped",[4357]],[[65450,65450],"mapped",[4528]],[[65451,65451],"mapped",[4529]],[[65452,65452],"mapped",[4530]],[[65453,65453],"mapped",[4531]],[[65454,65454],"mapped",[4532]],[[65455,65455],"mapped",[4533]],[[65456,65456],"mapped",[4378]],[[65457,65457],"mapped",[4358]],[[65458,65458],"mapped",[4359]],[[65459,65459],"mapped",[4360]],[[65460,65460],"mapped",[4385]],[[65461,65461],"mapped",[4361]],[[65462,65462],"mapped",[4362]],[[65463,65463],"mapped",[4363]],[[65464,65464],"mapped",[4364]],[[65465,65465],"mapped",[4365]],[[65466,65466],"mapped",[4366]],[[65467,65467],"mapped",[4367]],[[65468,65468],"mapped",[4368]],[[65469,65469],"mapped",[4369]],[[65470,65470],"mapped",[4370]],[[65471,65473],"disallowed"],[[65474,65474],"mapped",[4449]],[[65475,65475],"mapped",[4450]],[[65476,65476],"mapped",[4451]],[[65477,65477],"mapped",[4452]],[[65478,65478],"mapped",[4453]],[[65479,65479],"mapped",[4454]],[[65480,65481],"disallowed"],[[65482,65482],"mapped",[4455]],[[65483,65483],"mapped",[4456]],[[65484,65484],"mapped",[4457]],[[65485,65485],"mapped",[4458]],[[65486,65486],"mapped",[4459]],[[65487,65487],"mapped",[4460]],[[65488,65489],"disallowed"],[[65490,65490],"mapped",[4461]],[[65491,65491],"mapped",[4462]],[[65492,65492],"mapped",[4463]],[[65493,65493],"mapped",[4464]],[[65494,65494],"mapped",[4465]],[[65495,65495],"mapped",[4466]],[[65496,65497],"disallowed"],[[65498,65498],"mapped",[4467]],[[65499,65499],"mapped",[4468]],[[65500,65500],"mapped",[4469]],[[65501,65503],"disallowed"],[[65504,65504],"mapped",[162]],[[65505,65505],"mapped",[163]],[[65506,65506],"mapped",[172]],[[65507,65507],"disallowed_STD3_mapped",[32,772]],[[65508,65508],"mapped",[166]],[[65509,65509],"mapped",[165]],[[65510,65510],"mapped",[8361]],[[65511,65511],"disallowed"],[[65512,65512],"mapped",[9474]],[[65513,65513],"mapped",[8592]],[[65514,65514],"mapped",[8593]],[[65515,65515],"mapped",[8594]],[[65516,65516],"mapped",[8595]],[[65517,65517],"mapped",[9632]],[[65518,65518],"mapped",[9675]],[[65519,65528],"disallowed"],[[65529,65531],"disallowed"],[[65532,65532],"disallowed"],[[65533,65533],"disallowed"],[[65534,65535],"disallowed"],[[65536,65547],"valid"],[[65548,65548],"disallowed"],[[65549,65574],"valid"],[[65575,65575],"disallowed"],[[65576,65594],"valid"],[[65595,65595],"disallowed"],[[65596,65597],"valid"],[[65598,65598],"disallowed"],[[65599,65613],"valid"],[[65614,65615],"disallowed"],[[65616,65629],"valid"],[[65630,65663],"disallowed"],[[65664,65786],"valid"],[[65787,65791],"disallowed"],[[65792,65794],"valid",[],"NV8"],[[65795,65798],"disallowed"],[[65799,65843],"valid",[],"NV8"],[[65844,65846],"disallowed"],[[65847,65855],"valid",[],"NV8"],[[65856,65930],"valid",[],"NV8"],[[65931,65932],"valid",[],"NV8"],[[65933,65935],"disallowed"],[[65936,65947],"valid",[],"NV8"],[[65948,65951],"disallowed"],[[65952,65952],"valid",[],"NV8"],[[65953,65999],"disallowed"],[[66000,66044],"valid",[],"NV8"],[[66045,66045],"valid"],[[66046,66175],"disallowed"],[[66176,66204],"valid"],[[66205,66207],"disallowed"],[[66208,66256],"valid"],[[66257,66271],"disallowed"],[[66272,66272],"valid"],[[66273,66299],"valid",[],"NV8"],[[66300,66303],"disallowed"],[[66304,66334],"valid"],[[66335,66335],"valid"],[[66336,66339],"valid",[],"NV8"],[[66340,66351],"disallowed"],[[66352,66368],"valid"],[[66369,66369],"valid",[],"NV8"],[[66370,66377],"valid"],[[66378,66378],"valid",[],"NV8"],[[66379,66383],"disallowed"],[[66384,66426],"valid"],[[66427,66431],"disallowed"],[[66432,66461],"valid"],[[66462,66462],"disallowed"],[[66463,66463],"valid",[],"NV8"],[[66464,66499],"valid"],[[66500,66503],"disallowed"],[[66504,66511],"valid"],[[66512,66517],"valid",[],"NV8"],[[66518,66559],"disallowed"],[[66560,66560],"mapped",[66600]],[[66561,66561],"mapped",[66601]],[[66562,66562],"mapped",[66602]],[[66563,66563],"mapped",[66603]],[[66564,66564],"mapped",[66604]],[[66565,66565],"mapped",[66605]],[[66566,66566],"mapped",[66606]],[[66567,66567],"mapped",[66607]],[[66568,66568],"mapped",[66608]],[[66569,66569],"mapped",[66609]],[[66570,66570],"mapped",[66610]],[[66571,66571],"mapped",[66611]],[[66572,66572],"mapped",[66612]],[[66573,66573],"mapped",[66613]],[[66574,66574],"mapped",[66614]],[[66575,66575],"mapped",[66615]],[[66576,66576],"mapped",[66616]],[[66577,66577],"mapped",[66617]],[[66578,66578],"mapped",[66618]],[[66579,66579],"mapped",[66619]],[[66580,66580],"mapped",[66620]],[[66581,66581],"mapped",[66621]],[[66582,66582],"mapped",[66622]],[[66583,66583],"mapped",[66623]],[[66584,66584],"mapped",[66624]],[[66585,66585],"mapped",[66625]],[[66586,66586],"mapped",[66626]],[[66587,66587],"mapped",[66627]],[[66588,66588],"mapped",[66628]],[[66589,66589],"mapped",[66629]],[[66590,66590],"mapped",[66630]],[[66591,66591],"mapped",[66631]],[[66592,66592],"mapped",[66632]],[[66593,66593],"mapped",[66633]],[[66594,66594],"mapped",[66634]],[[66595,66595],"mapped",[66635]],[[66596,66596],"mapped",[66636]],[[66597,66597],"mapped",[66637]],[[66598,66598],"mapped",[66638]],[[66599,66599],"mapped",[66639]],[[66600,66637],"valid"],[[66638,66717],"valid"],[[66718,66719],"disallowed"],[[66720,66729],"valid"],[[66730,66815],"disallowed"],[[66816,66855],"valid"],[[66856,66863],"disallowed"],[[66864,66915],"valid"],[[66916,66926],"disallowed"],[[66927,66927],"valid",[],"NV8"],[[66928,67071],"disallowed"],[[67072,67382],"valid"],[[67383,67391],"disallowed"],[[67392,67413],"valid"],[[67414,67423],"disallowed"],[[67424,67431],"valid"],[[67432,67583],"disallowed"],[[67584,67589],"valid"],[[67590,67591],"disallowed"],[[67592,67592],"valid"],[[67593,67593],"disallowed"],[[67594,67637],"valid"],[[67638,67638],"disallowed"],[[67639,67640],"valid"],[[67641,67643],"disallowed"],[[67644,67644],"valid"],[[67645,67646],"disallowed"],[[67647,67647],"valid"],[[67648,67669],"valid"],[[67670,67670],"disallowed"],[[67671,67679],"valid",[],"NV8"],[[67680,67702],"valid"],[[67703,67711],"valid",[],"NV8"],[[67712,67742],"valid"],[[67743,67750],"disallowed"],[[67751,67759],"valid",[],"NV8"],[[67760,67807],"disallowed"],[[67808,67826],"valid"],[[67827,67827],"disallowed"],[[67828,67829],"valid"],[[67830,67834],"disallowed"],[[67835,67839],"valid",[],"NV8"],[[67840,67861],"valid"],[[67862,67865],"valid",[],"NV8"],[[67866,67867],"valid",[],"NV8"],[[67868,67870],"disallowed"],[[67871,67871],"valid",[],"NV8"],[[67872,67897],"valid"],[[67898,67902],"disallowed"],[[67903,67903],"valid",[],"NV8"],[[67904,67967],"disallowed"],[[67968,68023],"valid"],[[68024,68027],"disallowed"],[[68028,68029],"valid",[],"NV8"],[[68030,68031],"valid"],[[68032,68047],"valid",[],"NV8"],[[68048,68049],"disallowed"],[[68050,68095],"valid",[],"NV8"],[[68096,68099],"valid"],[[68100,68100],"disallowed"],[[68101,68102],"valid"],[[68103,68107],"disallowed"],[[68108,68115],"valid"],[[68116,68116],"disallowed"],[[68117,68119],"valid"],[[68120,68120],"disallowed"],[[68121,68147],"valid"],[[68148,68151],"disallowed"],[[68152,68154],"valid"],[[68155,68158],"disallowed"],[[68159,68159],"valid"],[[68160,68167],"valid",[],"NV8"],[[68168,68175],"disallowed"],[[68176,68184],"valid",[],"NV8"],[[68185,68191],"disallowed"],[[68192,68220],"valid"],[[68221,68223],"valid",[],"NV8"],[[68224,68252],"valid"],[[68253,68255],"valid",[],"NV8"],[[68256,68287],"disallowed"],[[68288,68295],"valid"],[[68296,68296],"valid",[],"NV8"],[[68297,68326],"valid"],[[68327,68330],"disallowed"],[[68331,68342],"valid",[],"NV8"],[[68343,68351],"disallowed"],[[68352,68405],"valid"],[[68406,68408],"disallowed"],[[68409,68415],"valid",[],"NV8"],[[68416,68437],"valid"],[[68438,68439],"disallowed"],[[68440,68447],"valid",[],"NV8"],[[68448,68466],"valid"],[[68467,68471],"disallowed"],[[68472,68479],"valid",[],"NV8"],[[68480,68497],"valid"],[[68498,68504],"disallowed"],[[68505,68508],"valid",[],"NV8"],[[68509,68520],"disallowed"],[[68521,68527],"valid",[],"NV8"],[[68528,68607],"disallowed"],[[68608,68680],"valid"],[[68681,68735],"disallowed"],[[68736,68736],"mapped",[68800]],[[68737,68737],"mapped",[68801]],[[68738,68738],"mapped",[68802]],[[68739,68739],"mapped",[68803]],[[68740,68740],"mapped",[68804]],[[68741,68741],"mapped",[68805]],[[68742,68742],"mapped",[68806]],[[68743,68743],"mapped",[68807]],[[68744,68744],"mapped",[68808]],[[68745,68745],"mapped",[68809]],[[68746,68746],"mapped",[68810]],[[68747,68747],"mapped",[68811]],[[68748,68748],"mapped",[68812]],[[68749,68749],"mapped",[68813]],[[68750,68750],"mapped",[68814]],[[68751,68751],"mapped",[68815]],[[68752,68752],"mapped",[68816]],[[68753,68753],"mapped",[68817]],[[68754,68754],"mapped",[68818]],[[68755,68755],"mapped",[68819]],[[68756,68756],"mapped",[68820]],[[68757,68757],"mapped",[68821]],[[68758,68758],"mapped",[68822]],[[68759,68759],"mapped",[68823]],[[68760,68760],"mapped",[68824]],[[68761,68761],"mapped",[68825]],[[68762,68762],"mapped",[68826]],[[68763,68763],"mapped",[68827]],[[68764,68764],"mapped",[68828]],[[68765,68765],"mapped",[68829]],[[68766,68766],"mapped",[68830]],[[68767,68767],"mapped",[68831]],[[68768,68768],"mapped",[68832]],[[68769,68769],"mapped",[68833]],[[68770,68770],"mapped",[68834]],[[68771,68771],"mapped",[68835]],[[68772,68772],"mapped",[68836]],[[68773,68773],"mapped",[68837]],[[68774,68774],"mapped",[68838]],[[68775,68775],"mapped",[68839]],[[68776,68776],"mapped",[68840]],[[68777,68777],"mapped",[68841]],[[68778,68778],"mapped",[68842]],[[68779,68779],"mapped",[68843]],[[68780,68780],"mapped",[68844]],[[68781,68781],"mapped",[68845]],[[68782,68782],"mapped",[68846]],[[68783,68783],"mapped",[68847]],[[68784,68784],"mapped",[68848]],[[68785,68785],"mapped",[68849]],[[68786,68786],"mapped",[68850]],[[68787,68799],"disallowed"],[[68800,68850],"valid"],[[68851,68857],"disallowed"],[[68858,68863],"valid",[],"NV8"],[[68864,69215],"disallowed"],[[69216,69246],"valid",[],"NV8"],[[69247,69631],"disallowed"],[[69632,69702],"valid"],[[69703,69709],"valid",[],"NV8"],[[69710,69713],"disallowed"],[[69714,69733],"valid",[],"NV8"],[[69734,69743],"valid"],[[69744,69758],"disallowed"],[[69759,69759],"valid"],[[69760,69818],"valid"],[[69819,69820],"valid",[],"NV8"],[[69821,69821],"disallowed"],[[69822,69825],"valid",[],"NV8"],[[69826,69839],"disallowed"],[[69840,69864],"valid"],[[69865,69871],"disallowed"],[[69872,69881],"valid"],[[69882,69887],"disallowed"],[[69888,69940],"valid"],[[69941,69941],"disallowed"],[[69942,69951],"valid"],[[69952,69955],"valid",[],"NV8"],[[69956,69967],"disallowed"],[[69968,70003],"valid"],[[70004,70005],"valid",[],"NV8"],[[70006,70006],"valid"],[[70007,70015],"disallowed"],[[70016,70084],"valid"],[[70085,70088],"valid",[],"NV8"],[[70089,70089],"valid",[],"NV8"],[[70090,70092],"valid"],[[70093,70093],"valid",[],"NV8"],[[70094,70095],"disallowed"],[[70096,70105],"valid"],[[70106,70106],"valid"],[[70107,70107],"valid",[],"NV8"],[[70108,70108],"valid"],[[70109,70111],"valid",[],"NV8"],[[70112,70112],"disallowed"],[[70113,70132],"valid",[],"NV8"],[[70133,70143],"disallowed"],[[70144,70161],"valid"],[[70162,70162],"disallowed"],[[70163,70199],"valid"],[[70200,70205],"valid",[],"NV8"],[[70206,70271],"disallowed"],[[70272,70278],"valid"],[[70279,70279],"disallowed"],[[70280,70280],"valid"],[[70281,70281],"disallowed"],[[70282,70285],"valid"],[[70286,70286],"disallowed"],[[70287,70301],"valid"],[[70302,70302],"disallowed"],[[70303,70312],"valid"],[[70313,70313],"valid",[],"NV8"],[[70314,70319],"disallowed"],[[70320,70378],"valid"],[[70379,70383],"disallowed"],[[70384,70393],"valid"],[[70394,70399],"disallowed"],[[70400,70400],"valid"],[[70401,70403],"valid"],[[70404,70404],"disallowed"],[[70405,70412],"valid"],[[70413,70414],"disallowed"],[[70415,70416],"valid"],[[70417,70418],"disallowed"],[[70419,70440],"valid"],[[70441,70441],"disallowed"],[[70442,70448],"valid"],[[70449,70449],"disallowed"],[[70450,70451],"valid"],[[70452,70452],"disallowed"],[[70453,70457],"valid"],[[70458,70459],"disallowed"],[[70460,70468],"valid"],[[70469,70470],"disallowed"],[[70471,70472],"valid"],[[70473,70474],"disallowed"],[[70475,70477],"valid"],[[70478,70479],"disallowed"],[[70480,70480],"valid"],[[70481,70486],"disallowed"],[[70487,70487],"valid"],[[70488,70492],"disallowed"],[[70493,70499],"valid"],[[70500,70501],"disallowed"],[[70502,70508],"valid"],[[70509,70511],"disallowed"],[[70512,70516],"valid"],[[70517,70783],"disallowed"],[[70784,70853],"valid"],[[70854,70854],"valid",[],"NV8"],[[70855,70855],"valid"],[[70856,70863],"disallowed"],[[70864,70873],"valid"],[[70874,71039],"disallowed"],[[71040,71093],"valid"],[[71094,71095],"disallowed"],[[71096,71104],"valid"],[[71105,71113],"valid",[],"NV8"],[[71114,71127],"valid",[],"NV8"],[[71128,71133],"valid"],[[71134,71167],"disallowed"],[[71168,71232],"valid"],[[71233,71235],"valid",[],"NV8"],[[71236,71236],"valid"],[[71237,71247],"disallowed"],[[71248,71257],"valid"],[[71258,71295],"disallowed"],[[71296,71351],"valid"],[[71352,71359],"disallowed"],[[71360,71369],"valid"],[[71370,71423],"disallowed"],[[71424,71449],"valid"],[[71450,71452],"disallowed"],[[71453,71467],"valid"],[[71468,71471],"disallowed"],[[71472,71481],"valid"],[[71482,71487],"valid",[],"NV8"],[[71488,71839],"disallowed"],[[71840,71840],"mapped",[71872]],[[71841,71841],"mapped",[71873]],[[71842,71842],"mapped",[71874]],[[71843,71843],"mapped",[71875]],[[71844,71844],"mapped",[71876]],[[71845,71845],"mapped",[71877]],[[71846,71846],"mapped",[71878]],[[71847,71847],"mapped",[71879]],[[71848,71848],"mapped",[71880]],[[71849,71849],"mapped",[71881]],[[71850,71850],"mapped",[71882]],[[71851,71851],"mapped",[71883]],[[71852,71852],"mapped",[71884]],[[71853,71853],"mapped",[71885]],[[71854,71854],"mapped",[71886]],[[71855,71855],"mapped",[71887]],[[71856,71856],"mapped",[71888]],[[71857,71857],"mapped",[71889]],[[71858,71858],"mapped",[71890]],[[71859,71859],"mapped",[71891]],[[71860,71860],"mapped",[71892]],[[71861,71861],"mapped",[71893]],[[71862,71862],"mapped",[71894]],[[71863,71863],"mapped",[71895]],[[71864,71864],"mapped",[71896]],[[71865,71865],"mapped",[71897]],[[71866,71866],"mapped",[71898]],[[71867,71867],"mapped",[71899]],[[71868,71868],"mapped",[71900]],[[71869,71869],"mapped",[71901]],[[71870,71870],"mapped",[71902]],[[71871,71871],"mapped",[71903]],[[71872,71913],"valid"],[[71914,71922],"valid",[],"NV8"],[[71923,71934],"disallowed"],[[71935,71935],"valid"],[[71936,72383],"disallowed"],[[72384,72440],"valid"],[[72441,73727],"disallowed"],[[73728,74606],"valid"],[[74607,74648],"valid"],[[74649,74649],"valid"],[[74650,74751],"disallowed"],[[74752,74850],"valid",[],"NV8"],[[74851,74862],"valid",[],"NV8"],[[74863,74863],"disallowed"],[[74864,74867],"valid",[],"NV8"],[[74868,74868],"valid",[],"NV8"],[[74869,74879],"disallowed"],[[74880,75075],"valid"],[[75076,77823],"disallowed"],[[77824,78894],"valid"],[[78895,82943],"disallowed"],[[82944,83526],"valid"],[[83527,92159],"disallowed"],[[92160,92728],"valid"],[[92729,92735],"disallowed"],[[92736,92766],"valid"],[[92767,92767],"disallowed"],[[92768,92777],"valid"],[[92778,92781],"disallowed"],[[92782,92783],"valid",[],"NV8"],[[92784,92879],"disallowed"],[[92880,92909],"valid"],[[92910,92911],"disallowed"],[[92912,92916],"valid"],[[92917,92917],"valid",[],"NV8"],[[92918,92927],"disallowed"],[[92928,92982],"valid"],[[92983,92991],"valid",[],"NV8"],[[92992,92995],"valid"],[[92996,92997],"valid",[],"NV8"],[[92998,93007],"disallowed"],[[93008,93017],"valid"],[[93018,93018],"disallowed"],[[93019,93025],"valid",[],"NV8"],[[93026,93026],"disallowed"],[[93027,93047],"valid"],[[93048,93052],"disallowed"],[[93053,93071],"valid"],[[93072,93951],"disallowed"],[[93952,94020],"valid"],[[94021,94031],"disallowed"],[[94032,94078],"valid"],[[94079,94094],"disallowed"],[[94095,94111],"valid"],[[94112,110591],"disallowed"],[[110592,110593],"valid"],[[110594,113663],"disallowed"],[[113664,113770],"valid"],[[113771,113775],"disallowed"],[[113776,113788],"valid"],[[113789,113791],"disallowed"],[[113792,113800],"valid"],[[113801,113807],"disallowed"],[[113808,113817],"valid"],[[113818,113819],"disallowed"],[[113820,113820],"valid",[],"NV8"],[[113821,113822],"valid"],[[113823,113823],"valid",[],"NV8"],[[113824,113827],"ignored"],[[113828,118783],"disallowed"],[[118784,119029],"valid",[],"NV8"],[[119030,119039],"disallowed"],[[119040,119078],"valid",[],"NV8"],[[119079,119080],"disallowed"],[[119081,119081],"valid",[],"NV8"],[[119082,119133],"valid",[],"NV8"],[[119134,119134],"mapped",[119127,119141]],[[119135,119135],"mapped",[119128,119141]],[[119136,119136],"mapped",[119128,119141,119150]],[[119137,119137],"mapped",[119128,119141,119151]],[[119138,119138],"mapped",[119128,119141,119152]],[[119139,119139],"mapped",[119128,119141,119153]],[[119140,119140],"mapped",[119128,119141,119154]],[[119141,119154],"valid",[],"NV8"],[[119155,119162],"disallowed"],[[119163,119226],"valid",[],"NV8"],[[119227,119227],"mapped",[119225,119141]],[[119228,119228],"mapped",[119226,119141]],[[119229,119229],"mapped",[119225,119141,119150]],[[119230,119230],"mapped",[119226,119141,119150]],[[119231,119231],"mapped",[119225,119141,119151]],[[119232,119232],"mapped",[119226,119141,119151]],[[119233,119261],"valid",[],"NV8"],[[119262,119272],"valid",[],"NV8"],[[119273,119295],"disallowed"],[[119296,119365],"valid",[],"NV8"],[[119366,119551],"disallowed"],[[119552,119638],"valid",[],"NV8"],[[119639,119647],"disallowed"],[[119648,119665],"valid",[],"NV8"],[[119666,119807],"disallowed"],[[119808,119808],"mapped",[97]],[[119809,119809],"mapped",[98]],[[119810,119810],"mapped",[99]],[[119811,119811],"mapped",[100]],[[119812,119812],"mapped",[101]],[[119813,119813],"mapped",[102]],[[119814,119814],"mapped",[103]],[[119815,119815],"mapped",[104]],[[119816,119816],"mapped",[105]],[[119817,119817],"mapped",[106]],[[119818,119818],"mapped",[107]],[[119819,119819],"mapped",[108]],[[119820,119820],"mapped",[109]],[[119821,119821],"mapped",[110]],[[119822,119822],"mapped",[111]],[[119823,119823],"mapped",[112]],[[119824,119824],"mapped",[113]],[[119825,119825],"mapped",[114]],[[119826,119826],"mapped",[115]],[[119827,119827],"mapped",[116]],[[119828,119828],"mapped",[117]],[[119829,119829],"mapped",[118]],[[119830,119830],"mapped",[119]],[[119831,119831],"mapped",[120]],[[119832,119832],"mapped",[121]],[[119833,119833],"mapped",[122]],[[119834,119834],"mapped",[97]],[[119835,119835],"mapped",[98]],[[119836,119836],"mapped",[99]],[[119837,119837],"mapped",[100]],[[119838,119838],"mapped",[101]],[[119839,119839],"mapped",[102]],[[119840,119840],"mapped",[103]],[[119841,119841],"mapped",[104]],[[119842,119842],"mapped",[105]],[[119843,119843],"mapped",[106]],[[119844,119844],"mapped",[107]],[[119845,119845],"mapped",[108]],[[119846,119846],"mapped",[109]],[[119847,119847],"mapped",[110]],[[119848,119848],"mapped",[111]],[[119849,119849],"mapped",[112]],[[119850,119850],"mapped",[113]],[[119851,119851],"mapped",[114]],[[119852,119852],"mapped",[115]],[[119853,119853],"mapped",[116]],[[119854,119854],"mapped",[117]],[[119855,119855],"mapped",[118]],[[119856,119856],"mapped",[119]],[[119857,119857],"mapped",[120]],[[119858,119858],"mapped",[121]],[[119859,119859],"mapped",[122]],[[119860,119860],"mapped",[97]],[[119861,119861],"mapped",[98]],[[119862,119862],"mapped",[99]],[[119863,119863],"mapped",[100]],[[119864,119864],"mapped",[101]],[[119865,119865],"mapped",[102]],[[119866,119866],"mapped",[103]],[[119867,119867],"mapped",[104]],[[119868,119868],"mapped",[105]],[[119869,119869],"mapped",[106]],[[119870,119870],"mapped",[107]],[[119871,119871],"mapped",[108]],[[119872,119872],"mapped",[109]],[[119873,119873],"mapped",[110]],[[119874,119874],"mapped",[111]],[[119875,119875],"mapped",[112]],[[119876,119876],"mapped",[113]],[[119877,119877],"mapped",[114]],[[119878,119878],"mapped",[115]],[[119879,119879],"mapped",[116]],[[119880,119880],"mapped",[117]],[[119881,119881],"mapped",[118]],[[119882,119882],"mapped",[119]],[[119883,119883],"mapped",[120]],[[119884,119884],"mapped",[121]],[[119885,119885],"mapped",[122]],[[119886,119886],"mapped",[97]],[[119887,119887],"mapped",[98]],[[119888,119888],"mapped",[99]],[[119889,119889],"mapped",[100]],[[119890,119890],"mapped",[101]],[[119891,119891],"mapped",[102]],[[119892,119892],"mapped",[103]],[[119893,119893],"disallowed"],[[119894,119894],"mapped",[105]],[[119895,119895],"mapped",[106]],[[119896,119896],"mapped",[107]],[[119897,119897],"mapped",[108]],[[119898,119898],"mapped",[109]],[[119899,119899],"mapped",[110]],[[119900,119900],"mapped",[111]],[[119901,119901],"mapped",[112]],[[119902,119902],"mapped",[113]],[[119903,119903],"mapped",[114]],[[119904,119904],"mapped",[115]],[[119905,119905],"mapped",[116]],[[119906,119906],"mapped",[117]],[[119907,119907],"mapped",[118]],[[119908,119908],"mapped",[119]],[[119909,119909],"mapped",[120]],[[119910,119910],"mapped",[121]],[[119911,119911],"mapped",[122]],[[119912,119912],"mapped",[97]],[[119913,119913],"mapped",[98]],[[119914,119914],"mapped",[99]],[[119915,119915],"mapped",[100]],[[119916,119916],"mapped",[101]],[[119917,119917],"mapped",[102]],[[119918,119918],"mapped",[103]],[[119919,119919],"mapped",[104]],[[119920,119920],"mapped",[105]],[[119921,119921],"mapped",[106]],[[119922,119922],"mapped",[107]],[[119923,119923],"mapped",[108]],[[119924,119924],"mapped",[109]],[[119925,119925],"mapped",[110]],[[119926,119926],"mapped",[111]],[[119927,119927],"mapped",[112]],[[119928,119928],"mapped",[113]],[[119929,119929],"mapped",[114]],[[119930,119930],"mapped",[115]],[[119931,119931],"mapped",[116]],[[119932,119932],"mapped",[117]],[[119933,119933],"mapped",[118]],[[119934,119934],"mapped",[119]],[[119935,119935],"mapped",[120]],[[119936,119936],"mapped",[121]],[[119937,119937],"mapped",[122]],[[119938,119938],"mapped",[97]],[[119939,119939],"mapped",[98]],[[119940,119940],"mapped",[99]],[[119941,119941],"mapped",[100]],[[119942,119942],"mapped",[101]],[[119943,119943],"mapped",[102]],[[119944,119944],"mapped",[103]],[[119945,119945],"mapped",[104]],[[119946,119946],"mapped",[105]],[[119947,119947],"mapped",[106]],[[119948,119948],"mapped",[107]],[[119949,119949],"mapped",[108]],[[119950,119950],"mapped",[109]],[[119951,119951],"mapped",[110]],[[119952,119952],"mapped",[111]],[[119953,119953],"mapped",[112]],[[119954,119954],"mapped",[113]],[[119955,119955],"mapped",[114]],[[119956,119956],"mapped",[115]],[[119957,119957],"mapped",[116]],[[119958,119958],"mapped",[117]],[[119959,119959],"mapped",[118]],[[119960,119960],"mapped",[119]],[[119961,119961],"mapped",[120]],[[119962,119962],"mapped",[121]],[[119963,119963],"mapped",[122]],[[119964,119964],"mapped",[97]],[[119965,119965],"disallowed"],[[119966,119966],"mapped",[99]],[[119967,119967],"mapped",[100]],[[119968,119969],"disallowed"],[[119970,119970],"mapped",[103]],[[119971,119972],"disallowed"],[[119973,119973],"mapped",[106]],[[119974,119974],"mapped",[107]],[[119975,119976],"disallowed"],[[119977,119977],"mapped",[110]],[[119978,119978],"mapped",[111]],[[119979,119979],"mapped",[112]],[[119980,119980],"mapped",[113]],[[119981,119981],"disallowed"],[[119982,119982],"mapped",[115]],[[119983,119983],"mapped",[116]],[[119984,119984],"mapped",[117]],[[119985,119985],"mapped",[118]],[[119986,119986],"mapped",[119]],[[119987,119987],"mapped",[120]],[[119988,119988],"mapped",[121]],[[119989,119989],"mapped",[122]],[[119990,119990],"mapped",[97]],[[119991,119991],"mapped",[98]],[[119992,119992],"mapped",[99]],[[119993,119993],"mapped",[100]],[[119994,119994],"disallowed"],[[119995,119995],"mapped",[102]],[[119996,119996],"disallowed"],[[119997,119997],"mapped",[104]],[[119998,119998],"mapped",[105]],[[119999,119999],"mapped",[106]],[[120000,120000],"mapped",[107]],[[120001,120001],"mapped",[108]],[[120002,120002],"mapped",[109]],[[120003,120003],"mapped",[110]],[[120004,120004],"disallowed"],[[120005,120005],"mapped",[112]],[[120006,120006],"mapped",[113]],[[120007,120007],"mapped",[114]],[[120008,120008],"mapped",[115]],[[120009,120009],"mapped",[116]],[[120010,120010],"mapped",[117]],[[120011,120011],"mapped",[118]],[[120012,120012],"mapped",[119]],[[120013,120013],"mapped",[120]],[[120014,120014],"mapped",[121]],[[120015,120015],"mapped",[122]],[[120016,120016],"mapped",[97]],[[120017,120017],"mapped",[98]],[[120018,120018],"mapped",[99]],[[120019,120019],"mapped",[100]],[[120020,120020],"mapped",[101]],[[120021,120021],"mapped",[102]],[[120022,120022],"mapped",[103]],[[120023,120023],"mapped",[104]],[[120024,120024],"mapped",[105]],[[120025,120025],"mapped",[106]],[[120026,120026],"mapped",[107]],[[120027,120027],"mapped",[108]],[[120028,120028],"mapped",[109]],[[120029,120029],"mapped",[110]],[[120030,120030],"mapped",[111]],[[120031,120031],"mapped",[112]],[[120032,120032],"mapped",[113]],[[120033,120033],"mapped",[114]],[[120034,120034],"mapped",[115]],[[120035,120035],"mapped",[116]],[[120036,120036],"mapped",[117]],[[120037,120037],"mapped",[118]],[[120038,120038],"mapped",[119]],[[120039,120039],"mapped",[120]],[[120040,120040],"mapped",[121]],[[120041,120041],"mapped",[122]],[[120042,120042],"mapped",[97]],[[120043,120043],"mapped",[98]],[[120044,120044],"mapped",[99]],[[120045,120045],"mapped",[100]],[[120046,120046],"mapped",[101]],[[120047,120047],"mapped",[102]],[[120048,120048],"mapped",[103]],[[120049,120049],"mapped",[104]],[[120050,120050],"mapped",[105]],[[120051,120051],"mapped",[106]],[[120052,120052],"mapped",[107]],[[120053,120053],"mapped",[108]],[[120054,120054],"mapped",[109]],[[120055,120055],"mapped",[110]],[[120056,120056],"mapped",[111]],[[120057,120057],"mapped",[112]],[[120058,120058],"mapped",[113]],[[120059,120059],"mapped",[114]],[[120060,120060],"mapped",[115]],[[120061,120061],"mapped",[116]],[[120062,120062],"mapped",[117]],[[120063,120063],"mapped",[118]],[[120064,120064],"mapped",[119]],[[120065,120065],"mapped",[120]],[[120066,120066],"mapped",[121]],[[120067,120067],"mapped",[122]],[[120068,120068],"mapped",[97]],[[120069,120069],"mapped",[98]],[[120070,120070],"disallowed"],[[120071,120071],"mapped",[100]],[[120072,120072],"mapped",[101]],[[120073,120073],"mapped",[102]],[[120074,120074],"mapped",[103]],[[120075,120076],"disallowed"],[[120077,120077],"mapped",[106]],[[120078,120078],"mapped",[107]],[[120079,120079],"mapped",[108]],[[120080,120080],"mapped",[109]],[[120081,120081],"mapped",[110]],[[120082,120082],"mapped",[111]],[[120083,120083],"mapped",[112]],[[120084,120084],"mapped",[113]],[[120085,120085],"disallowed"],[[120086,120086],"mapped",[115]],[[120087,120087],"mapped",[116]],[[120088,120088],"mapped",[117]],[[120089,120089],"mapped",[118]],[[120090,120090],"mapped",[119]],[[120091,120091],"mapped",[120]],[[120092,120092],"mapped",[121]],[[120093,120093],"disallowed"],[[120094,120094],"mapped",[97]],[[120095,120095],"mapped",[98]],[[120096,120096],"mapped",[99]],[[120097,120097],"mapped",[100]],[[120098,120098],"mapped",[101]],[[120099,120099],"mapped",[102]],[[120100,120100],"mapped",[103]],[[120101,120101],"mapped",[104]],[[120102,120102],"mapped",[105]],[[120103,120103],"mapped",[106]],[[120104,120104],"mapped",[107]],[[120105,120105],"mapped",[108]],[[120106,120106],"mapped",[109]],[[120107,120107],"mapped",[110]],[[120108,120108],"mapped",[111]],[[120109,120109],"mapped",[112]],[[120110,120110],"mapped",[113]],[[120111,120111],"mapped",[114]],[[120112,120112],"mapped",[115]],[[120113,120113],"mapped",[116]],[[120114,120114],"mapped",[117]],[[120115,120115],"mapped",[118]],[[120116,120116],"mapped",[119]],[[120117,120117],"mapped",[120]],[[120118,120118],"mapped",[121]],[[120119,120119],"mapped",[122]],[[120120,120120],"mapped",[97]],[[120121,120121],"mapped",[98]],[[120122,120122],"disallowed"],[[120123,120123],"mapped",[100]],[[120124,120124],"mapped",[101]],[[120125,120125],"mapped",[102]],[[120126,120126],"mapped",[103]],[[120127,120127],"disallowed"],[[120128,120128],"mapped",[105]],[[120129,120129],"mapped",[106]],[[120130,120130],"mapped",[107]],[[120131,120131],"mapped",[108]],[[120132,120132],"mapped",[109]],[[120133,120133],"disallowed"],[[120134,120134],"mapped",[111]],[[120135,120137],"disallowed"],[[120138,120138],"mapped",[115]],[[120139,120139],"mapped",[116]],[[120140,120140],"mapped",[117]],[[120141,120141],"mapped",[118]],[[120142,120142],"mapped",[119]],[[120143,120143],"mapped",[120]],[[120144,120144],"mapped",[121]],[[120145,120145],"disallowed"],[[120146,120146],"mapped",[97]],[[120147,120147],"mapped",[98]],[[120148,120148],"mapped",[99]],[[120149,120149],"mapped",[100]],[[120150,120150],"mapped",[101]],[[120151,120151],"mapped",[102]],[[120152,120152],"mapped",[103]],[[120153,120153],"mapped",[104]],[[120154,120154],"mapped",[105]],[[120155,120155],"mapped",[106]],[[120156,120156],"mapped",[107]],[[120157,120157],"mapped",[108]],[[120158,120158],"mapped",[109]],[[120159,120159],"mapped",[110]],[[120160,120160],"mapped",[111]],[[120161,120161],"mapped",[112]],[[120162,120162],"mapped",[113]],[[120163,120163],"mapped",[114]],[[120164,120164],"mapped",[115]],[[120165,120165],"mapped",[116]],[[120166,120166],"mapped",[117]],[[120167,120167],"mapped",[118]],[[120168,120168],"mapped",[119]],[[120169,120169],"mapped",[120]],[[120170,120170],"mapped",[121]],[[120171,120171],"mapped",[122]],[[120172,120172],"mapped",[97]],[[120173,120173],"mapped",[98]],[[120174,120174],"mapped",[99]],[[120175,120175],"mapped",[100]],[[120176,120176],"mapped",[101]],[[120177,120177],"mapped",[102]],[[120178,120178],"mapped",[103]],[[120179,120179],"mapped",[104]],[[120180,120180],"mapped",[105]],[[120181,120181],"mapped",[106]],[[120182,120182],"mapped",[107]],[[120183,120183],"mapped",[108]],[[120184,120184],"mapped",[109]],[[120185,120185],"mapped",[110]],[[120186,120186],"mapped",[111]],[[120187,120187],"mapped",[112]],[[120188,120188],"mapped",[113]],[[120189,120189],"mapped",[114]],[[120190,120190],"mapped",[115]],[[120191,120191],"mapped",[116]],[[120192,120192],"mapped",[117]],[[120193,120193],"mapped",[118]],[[120194,120194],"mapped",[119]],[[120195,120195],"mapped",[120]],[[120196,120196],"mapped",[121]],[[120197,120197],"mapped",[122]],[[120198,120198],"mapped",[97]],[[120199,120199],"mapped",[98]],[[120200,120200],"mapped",[99]],[[120201,120201],"mapped",[100]],[[120202,120202],"mapped",[101]],[[120203,120203],"mapped",[102]],[[120204,120204],"mapped",[103]],[[120205,120205],"mapped",[104]],[[120206,120206],"mapped",[105]],[[120207,120207],"mapped",[106]],[[120208,120208],"mapped",[107]],[[120209,120209],"mapped",[108]],[[120210,120210],"mapped",[109]],[[120211,120211],"mapped",[110]],[[120212,120212],"mapped",[111]],[[120213,120213],"mapped",[112]],[[120214,120214],"mapped",[113]],[[120215,120215],"mapped",[114]],[[120216,120216],"mapped",[115]],[[120217,120217],"mapped",[116]],[[120218,120218],"mapped",[117]],[[120219,120219],"mapped",[118]],[[120220,120220],"mapped",[119]],[[120221,120221],"mapped",[120]],[[120222,120222],"mapped",[121]],[[120223,120223],"mapped",[122]],[[120224,120224],"mapped",[97]],[[120225,120225],"mapped",[98]],[[120226,120226],"mapped",[99]],[[120227,120227],"mapped",[100]],[[120228,120228],"mapped",[101]],[[120229,120229],"mapped",[102]],[[120230,120230],"mapped",[103]],[[120231,120231],"mapped",[104]],[[120232,120232],"mapped",[105]],[[120233,120233],"mapped",[106]],[[120234,120234],"mapped",[107]],[[120235,120235],"mapped",[108]],[[120236,120236],"mapped",[109]],[[120237,120237],"mapped",[110]],[[120238,120238],"mapped",[111]],[[120239,120239],"mapped",[112]],[[120240,120240],"mapped",[113]],[[120241,120241],"mapped",[114]],[[120242,120242],"mapped",[115]],[[120243,120243],"mapped",[116]],[[120244,120244],"mapped",[117]],[[120245,120245],"mapped",[118]],[[120246,120246],"mapped",[119]],[[120247,120247],"mapped",[120]],[[120248,120248],"mapped",[121]],[[120249,120249],"mapped",[122]],[[120250,120250],"mapped",[97]],[[120251,120251],"mapped",[98]],[[120252,120252],"mapped",[99]],[[120253,120253],"mapped",[100]],[[120254,120254],"mapped",[101]],[[120255,120255],"mapped",[102]],[[120256,120256],"mapped",[103]],[[120257,120257],"mapped",[104]],[[120258,120258],"mapped",[105]],[[120259,120259],"mapped",[106]],[[120260,120260],"mapped",[107]],[[120261,120261],"mapped",[108]],[[120262,120262],"mapped",[109]],[[120263,120263],"mapped",[110]],[[120264,120264],"mapped",[111]],[[120265,120265],"mapped",[112]],[[120266,120266],"mapped",[113]],[[120267,120267],"mapped",[114]],[[120268,120268],"mapped",[115]],[[120269,120269],"mapped",[116]],[[120270,120270],"mapped",[117]],[[120271,120271],"mapped",[118]],[[120272,120272],"mapped",[119]],[[120273,120273],"mapped",[120]],[[120274,120274],"mapped",[121]],[[120275,120275],"mapped",[122]],[[120276,120276],"mapped",[97]],[[120277,120277],"mapped",[98]],[[120278,120278],"mapped",[99]],[[120279,120279],"mapped",[100]],[[120280,120280],"mapped",[101]],[[120281,120281],"mapped",[102]],[[120282,120282],"mapped",[103]],[[120283,120283],"mapped",[104]],[[120284,120284],"mapped",[105]],[[120285,120285],"mapped",[106]],[[120286,120286],"mapped",[107]],[[120287,120287],"mapped",[108]],[[120288,120288],"mapped",[109]],[[120289,120289],"mapped",[110]],[[120290,120290],"mapped",[111]],[[120291,120291],"mapped",[112]],[[120292,120292],"mapped",[113]],[[120293,120293],"mapped",[114]],[[120294,120294],"mapped",[115]],[[120295,120295],"mapped",[116]],[[120296,120296],"mapped",[117]],[[120297,120297],"mapped",[118]],[[120298,120298],"mapped",[119]],[[120299,120299],"mapped",[120]],[[120300,120300],"mapped",[121]],[[120301,120301],"mapped",[122]],[[120302,120302],"mapped",[97]],[[120303,120303],"mapped",[98]],[[120304,120304],"mapped",[99]],[[120305,120305],"mapped",[100]],[[120306,120306],"mapped",[101]],[[120307,120307],"mapped",[102]],[[120308,120308],"mapped",[103]],[[120309,120309],"mapped",[104]],[[120310,120310],"mapped",[105]],[[120311,120311],"mapped",[106]],[[120312,120312],"mapped",[107]],[[120313,120313],"mapped",[108]],[[120314,120314],"mapped",[109]],[[120315,120315],"mapped",[110]],[[120316,120316],"mapped",[111]],[[120317,120317],"mapped",[112]],[[120318,120318],"mapped",[113]],[[120319,120319],"mapped",[114]],[[120320,120320],"mapped",[115]],[[120321,120321],"mapped",[116]],[[120322,120322],"mapped",[117]],[[120323,120323],"mapped",[118]],[[120324,120324],"mapped",[119]],[[120325,120325],"mapped",[120]],[[120326,120326],"mapped",[121]],[[120327,120327],"mapped",[122]],[[120328,120328],"mapped",[97]],[[120329,120329],"mapped",[98]],[[120330,120330],"mapped",[99]],[[120331,120331],"mapped",[100]],[[120332,120332],"mapped",[101]],[[120333,120333],"mapped",[102]],[[120334,120334],"mapped",[103]],[[120335,120335],"mapped",[104]],[[120336,120336],"mapped",[105]],[[120337,120337],"mapped",[106]],[[120338,120338],"mapped",[107]],[[120339,120339],"mapped",[108]],[[120340,120340],"mapped",[109]],[[120341,120341],"mapped",[110]],[[120342,120342],"mapped",[111]],[[120343,120343],"mapped",[112]],[[120344,120344],"mapped",[113]],[[120345,120345],"mapped",[114]],[[120346,120346],"mapped",[115]],[[120347,120347],"mapped",[116]],[[120348,120348],"mapped",[117]],[[120349,120349],"mapped",[118]],[[120350,120350],"mapped",[119]],[[120351,120351],"mapped",[120]],[[120352,120352],"mapped",[121]],[[120353,120353],"mapped",[122]],[[120354,120354],"mapped",[97]],[[120355,120355],"mapped",[98]],[[120356,120356],"mapped",[99]],[[120357,120357],"mapped",[100]],[[120358,120358],"mapped",[101]],[[120359,120359],"mapped",[102]],[[120360,120360],"mapped",[103]],[[120361,120361],"mapped",[104]],[[120362,120362],"mapped",[105]],[[120363,120363],"mapped",[106]],[[120364,120364],"mapped",[107]],[[120365,120365],"mapped",[108]],[[120366,120366],"mapped",[109]],[[120367,120367],"mapped",[110]],[[120368,120368],"mapped",[111]],[[120369,120369],"mapped",[112]],[[120370,120370],"mapped",[113]],[[120371,120371],"mapped",[114]],[[120372,120372],"mapped",[115]],[[120373,120373],"mapped",[116]],[[120374,120374],"mapped",[117]],[[120375,120375],"mapped",[118]],[[120376,120376],"mapped",[119]],[[120377,120377],"mapped",[120]],[[120378,120378],"mapped",[121]],[[120379,120379],"mapped",[122]],[[120380,120380],"mapped",[97]],[[120381,120381],"mapped",[98]],[[120382,120382],"mapped",[99]],[[120383,120383],"mapped",[100]],[[120384,120384],"mapped",[101]],[[120385,120385],"mapped",[102]],[[120386,120386],"mapped",[103]],[[120387,120387],"mapped",[104]],[[120388,120388],"mapped",[105]],[[120389,120389],"mapped",[106]],[[120390,120390],"mapped",[107]],[[120391,120391],"mapped",[108]],[[120392,120392],"mapped",[109]],[[120393,120393],"mapped",[110]],[[120394,120394],"mapped",[111]],[[120395,120395],"mapped",[112]],[[120396,120396],"mapped",[113]],[[120397,120397],"mapped",[114]],[[120398,120398],"mapped",[115]],[[120399,120399],"mapped",[116]],[[120400,120400],"mapped",[117]],[[120401,120401],"mapped",[118]],[[120402,120402],"mapped",[119]],[[120403,120403],"mapped",[120]],[[120404,120404],"mapped",[121]],[[120405,120405],"mapped",[122]],[[120406,120406],"mapped",[97]],[[120407,120407],"mapped",[98]],[[120408,120408],"mapped",[99]],[[120409,120409],"mapped",[100]],[[120410,120410],"mapped",[101]],[[120411,120411],"mapped",[102]],[[120412,120412],"mapped",[103]],[[120413,120413],"mapped",[104]],[[120414,120414],"mapped",[105]],[[120415,120415],"mapped",[106]],[[120416,120416],"mapped",[107]],[[120417,120417],"mapped",[108]],[[120418,120418],"mapped",[109]],[[120419,120419],"mapped",[110]],[[120420,120420],"mapped",[111]],[[120421,120421],"mapped",[112]],[[120422,120422],"mapped",[113]],[[120423,120423],"mapped",[114]],[[120424,120424],"mapped",[115]],[[120425,120425],"mapped",[116]],[[120426,120426],"mapped",[117]],[[120427,120427],"mapped",[118]],[[120428,120428],"mapped",[119]],[[120429,120429],"mapped",[120]],[[120430,120430],"mapped",[121]],[[120431,120431],"mapped",[122]],[[120432,120432],"mapped",[97]],[[120433,120433],"mapped",[98]],[[120434,120434],"mapped",[99]],[[120435,120435],"mapped",[100]],[[120436,120436],"mapped",[101]],[[120437,120437],"mapped",[102]],[[120438,120438],"mapped",[103]],[[120439,120439],"mapped",[104]],[[120440,120440],"mapped",[105]],[[120441,120441],"mapped",[106]],[[120442,120442],"mapped",[107]],[[120443,120443],"mapped",[108]],[[120444,120444],"mapped",[109]],[[120445,120445],"mapped",[110]],[[120446,120446],"mapped",[111]],[[120447,120447],"mapped",[112]],[[120448,120448],"mapped",[113]],[[120449,120449],"mapped",[114]],[[120450,120450],"mapped",[115]],[[120451,120451],"mapped",[116]],[[120452,120452],"mapped",[117]],[[120453,120453],"mapped",[118]],[[120454,120454],"mapped",[119]],[[120455,120455],"mapped",[120]],[[120456,120456],"mapped",[121]],[[120457,120457],"mapped",[122]],[[120458,120458],"mapped",[97]],[[120459,120459],"mapped",[98]],[[120460,120460],"mapped",[99]],[[120461,120461],"mapped",[100]],[[120462,120462],"mapped",[101]],[[120463,120463],"mapped",[102]],[[120464,120464],"mapped",[103]],[[120465,120465],"mapped",[104]],[[120466,120466],"mapped",[105]],[[120467,120467],"mapped",[106]],[[120468,120468],"mapped",[107]],[[120469,120469],"mapped",[108]],[[120470,120470],"mapped",[109]],[[120471,120471],"mapped",[110]],[[120472,120472],"mapped",[111]],[[120473,120473],"mapped",[112]],[[120474,120474],"mapped",[113]],[[120475,120475],"mapped",[114]],[[120476,120476],"mapped",[115]],[[120477,120477],"mapped",[116]],[[120478,120478],"mapped",[117]],[[120479,120479],"mapped",[118]],[[120480,120480],"mapped",[119]],[[120481,120481],"mapped",[120]],[[120482,120482],"mapped",[121]],[[120483,120483],"mapped",[122]],[[120484,120484],"mapped",[305]],[[120485,120485],"mapped",[567]],[[120486,120487],"disallowed"],[[120488,120488],"mapped",[945]],[[120489,120489],"mapped",[946]],[[120490,120490],"mapped",[947]],[[120491,120491],"mapped",[948]],[[120492,120492],"mapped",[949]],[[120493,120493],"mapped",[950]],[[120494,120494],"mapped",[951]],[[120495,120495],"mapped",[952]],[[120496,120496],"mapped",[953]],[[120497,120497],"mapped",[954]],[[120498,120498],"mapped",[955]],[[120499,120499],"mapped",[956]],[[120500,120500],"mapped",[957]],[[120501,120501],"mapped",[958]],[[120502,120502],"mapped",[959]],[[120503,120503],"mapped",[960]],[[120504,120504],"mapped",[961]],[[120505,120505],"mapped",[952]],[[120506,120506],"mapped",[963]],[[120507,120507],"mapped",[964]],[[120508,120508],"mapped",[965]],[[120509,120509],"mapped",[966]],[[120510,120510],"mapped",[967]],[[120511,120511],"mapped",[968]],[[120512,120512],"mapped",[969]],[[120513,120513],"mapped",[8711]],[[120514,120514],"mapped",[945]],[[120515,120515],"mapped",[946]],[[120516,120516],"mapped",[947]],[[120517,120517],"mapped",[948]],[[120518,120518],"mapped",[949]],[[120519,120519],"mapped",[950]],[[120520,120520],"mapped",[951]],[[120521,120521],"mapped",[952]],[[120522,120522],"mapped",[953]],[[120523,120523],"mapped",[954]],[[120524,120524],"mapped",[955]],[[120525,120525],"mapped",[956]],[[120526,120526],"mapped",[957]],[[120527,120527],"mapped",[958]],[[120528,120528],"mapped",[959]],[[120529,120529],"mapped",[960]],[[120530,120530],"mapped",[961]],[[120531,120532],"mapped",[963]],[[120533,120533],"mapped",[964]],[[120534,120534],"mapped",[965]],[[120535,120535],"mapped",[966]],[[120536,120536],"mapped",[967]],[[120537,120537],"mapped",[968]],[[120538,120538],"mapped",[969]],[[120539,120539],"mapped",[8706]],[[120540,120540],"mapped",[949]],[[120541,120541],"mapped",[952]],[[120542,120542],"mapped",[954]],[[120543,120543],"mapped",[966]],[[120544,120544],"mapped",[961]],[[120545,120545],"mapped",[960]],[[120546,120546],"mapped",[945]],[[120547,120547],"mapped",[946]],[[120548,120548],"mapped",[947]],[[120549,120549],"mapped",[948]],[[120550,120550],"mapped",[949]],[[120551,120551],"mapped",[950]],[[120552,120552],"mapped",[951]],[[120553,120553],"mapped",[952]],[[120554,120554],"mapped",[953]],[[120555,120555],"mapped",[954]],[[120556,120556],"mapped",[955]],[[120557,120557],"mapped",[956]],[[120558,120558],"mapped",[957]],[[120559,120559],"mapped",[958]],[[120560,120560],"mapped",[959]],[[120561,120561],"mapped",[960]],[[120562,120562],"mapped",[961]],[[120563,120563],"mapped",[952]],[[120564,120564],"mapped",[963]],[[120565,120565],"mapped",[964]],[[120566,120566],"mapped",[965]],[[120567,120567],"mapped",[966]],[[120568,120568],"mapped",[967]],[[120569,120569],"mapped",[968]],[[120570,120570],"mapped",[969]],[[120571,120571],"mapped",[8711]],[[120572,120572],"mapped",[945]],[[120573,120573],"mapped",[946]],[[120574,120574],"mapped",[947]],[[120575,120575],"mapped",[948]],[[120576,120576],"mapped",[949]],[[120577,120577],"mapped",[950]],[[120578,120578],"mapped",[951]],[[120579,120579],"mapped",[952]],[[120580,120580],"mapped",[953]],[[120581,120581],"mapped",[954]],[[120582,120582],"mapped",[955]],[[120583,120583],"mapped",[956]],[[120584,120584],"mapped",[957]],[[120585,120585],"mapped",[958]],[[120586,120586],"mapped",[959]],[[120587,120587],"mapped",[960]],[[120588,120588],"mapped",[961]],[[120589,120590],"mapped",[963]],[[120591,120591],"mapped",[964]],[[120592,120592],"mapped",[965]],[[120593,120593],"mapped",[966]],[[120594,120594],"mapped",[967]],[[120595,120595],"mapped",[968]],[[120596,120596],"mapped",[969]],[[120597,120597],"mapped",[8706]],[[120598,120598],"mapped",[949]],[[120599,120599],"mapped",[952]],[[120600,120600],"mapped",[954]],[[120601,120601],"mapped",[966]],[[120602,120602],"mapped",[961]],[[120603,120603],"mapped",[960]],[[120604,120604],"mapped",[945]],[[120605,120605],"mapped",[946]],[[120606,120606],"mapped",[947]],[[120607,120607],"mapped",[948]],[[120608,120608],"mapped",[949]],[[120609,120609],"mapped",[950]],[[120610,120610],"mapped",[951]],[[120611,120611],"mapped",[952]],[[120612,120612],"mapped",[953]],[[120613,120613],"mapped",[954]],[[120614,120614],"mapped",[955]],[[120615,120615],"mapped",[956]],[[120616,120616],"mapped",[957]],[[120617,120617],"mapped",[958]],[[120618,120618],"mapped",[959]],[[120619,120619],"mapped",[960]],[[120620,120620],"mapped",[961]],[[120621,120621],"mapped",[952]],[[120622,120622],"mapped",[963]],[[120623,120623],"mapped",[964]],[[120624,120624],"mapped",[965]],[[120625,120625],"mapped",[966]],[[120626,120626],"mapped",[967]],[[120627,120627],"mapped",[968]],[[120628,120628],"mapped",[969]],[[120629,120629],"mapped",[8711]],[[120630,120630],"mapped",[945]],[[120631,120631],"mapped",[946]],[[120632,120632],"mapped",[947]],[[120633,120633],"mapped",[948]],[[120634,120634],"mapped",[949]],[[120635,120635],"mapped",[950]],[[120636,120636],"mapped",[951]],[[120637,120637],"mapped",[952]],[[120638,120638],"mapped",[953]],[[120639,120639],"mapped",[954]],[[120640,120640],"mapped",[955]],[[120641,120641],"mapped",[956]],[[120642,120642],"mapped",[957]],[[120643,120643],"mapped",[958]],[[120644,120644],"mapped",[959]],[[120645,120645],"mapped",[960]],[[120646,120646],"mapped",[961]],[[120647,120648],"mapped",[963]],[[120649,120649],"mapped",[964]],[[120650,120650],"mapped",[965]],[[120651,120651],"mapped",[966]],[[120652,120652],"mapped",[967]],[[120653,120653],"mapped",[968]],[[120654,120654],"mapped",[969]],[[120655,120655],"mapped",[8706]],[[120656,120656],"mapped",[949]],[[120657,120657],"mapped",[952]],[[120658,120658],"mapped",[954]],[[120659,120659],"mapped",[966]],[[120660,120660],"mapped",[961]],[[120661,120661],"mapped",[960]],[[120662,120662],"mapped",[945]],[[120663,120663],"mapped",[946]],[[120664,120664],"mapped",[947]],[[120665,120665],"mapped",[948]],[[120666,120666],"mapped",[949]],[[120667,120667],"mapped",[950]],[[120668,120668],"mapped",[951]],[[120669,120669],"mapped",[952]],[[120670,120670],"mapped",[953]],[[120671,120671],"mapped",[954]],[[120672,120672],"mapped",[955]],[[120673,120673],"mapped",[956]],[[120674,120674],"mapped",[957]],[[120675,120675],"mapped",[958]],[[120676,120676],"mapped",[959]],[[120677,120677],"mapped",[960]],[[120678,120678],"mapped",[961]],[[120679,120679],"mapped",[952]],[[120680,120680],"mapped",[963]],[[120681,120681],"mapped",[964]],[[120682,120682],"mapped",[965]],[[120683,120683],"mapped",[966]],[[120684,120684],"mapped",[967]],[[120685,120685],"mapped",[968]],[[120686,120686],"mapped",[969]],[[120687,120687],"mapped",[8711]],[[120688,120688],"mapped",[945]],[[120689,120689],"mapped",[946]],[[120690,120690],"mapped",[947]],[[120691,120691],"mapped",[948]],[[120692,120692],"mapped",[949]],[[120693,120693],"mapped",[950]],[[120694,120694],"mapped",[951]],[[120695,120695],"mapped",[952]],[[120696,120696],"mapped",[953]],[[120697,120697],"mapped",[954]],[[120698,120698],"mapped",[955]],[[120699,120699],"mapped",[956]],[[120700,120700],"mapped",[957]],[[120701,120701],"mapped",[958]],[[120702,120702],"mapped",[959]],[[120703,120703],"mapped",[960]],[[120704,120704],"mapped",[961]],[[120705,120706],"mapped",[963]],[[120707,120707],"mapped",[964]],[[120708,120708],"mapped",[965]],[[120709,120709],"mapped",[966]],[[120710,120710],"mapped",[967]],[[120711,120711],"mapped",[968]],[[120712,120712],"mapped",[969]],[[120713,120713],"mapped",[8706]],[[120714,120714],"mapped",[949]],[[120715,120715],"mapped",[952]],[[120716,120716],"mapped",[954]],[[120717,120717],"mapped",[966]],[[120718,120718],"mapped",[961]],[[120719,120719],"mapped",[960]],[[120720,120720],"mapped",[945]],[[120721,120721],"mapped",[946]],[[120722,120722],"mapped",[947]],[[120723,120723],"mapped",[948]],[[120724,120724],"mapped",[949]],[[120725,120725],"mapped",[950]],[[120726,120726],"mapped",[951]],[[120727,120727],"mapped",[952]],[[120728,120728],"mapped",[953]],[[120729,120729],"mapped",[954]],[[120730,120730],"mapped",[955]],[[120731,120731],"mapped",[956]],[[120732,120732],"mapped",[957]],[[120733,120733],"mapped",[958]],[[120734,120734],"mapped",[959]],[[120735,120735],"mapped",[960]],[[120736,120736],"mapped",[961]],[[120737,120737],"mapped",[952]],[[120738,120738],"mapped",[963]],[[120739,120739],"mapped",[964]],[[120740,120740],"mapped",[965]],[[120741,120741],"mapped",[966]],[[120742,120742],"mapped",[967]],[[120743,120743],"mapped",[968]],[[120744,120744],"mapped",[969]],[[120745,120745],"mapped",[8711]],[[120746,120746],"mapped",[945]],[[120747,120747],"mapped",[946]],[[120748,120748],"mapped",[947]],[[120749,120749],"mapped",[948]],[[120750,120750],"mapped",[949]],[[120751,120751],"mapped",[950]],[[120752,120752],"mapped",[951]],[[120753,120753],"mapped",[952]],[[120754,120754],"mapped",[953]],[[120755,120755],"mapped",[954]],[[120756,120756],"mapped",[955]],[[120757,120757],"mapped",[956]],[[120758,120758],"mapped",[957]],[[120759,120759],"mapped",[958]],[[120760,120760],"mapped",[959]],[[120761,120761],"mapped",[960]],[[120762,120762],"mapped",[961]],[[120763,120764],"mapped",[963]],[[120765,120765],"mapped",[964]],[[120766,120766],"mapped",[965]],[[120767,120767],"mapped",[966]],[[120768,120768],"mapped",[967]],[[120769,120769],"mapped",[968]],[[120770,120770],"mapped",[969]],[[120771,120771],"mapped",[8706]],[[120772,120772],"mapped",[949]],[[120773,120773],"mapped",[952]],[[120774,120774],"mapped",[954]],[[120775,120775],"mapped",[966]],[[120776,120776],"mapped",[961]],[[120777,120777],"mapped",[960]],[[120778,120779],"mapped",[989]],[[120780,120781],"disallowed"],[[120782,120782],"mapped",[48]],[[120783,120783],"mapped",[49]],[[120784,120784],"mapped",[50]],[[120785,120785],"mapped",[51]],[[120786,120786],"mapped",[52]],[[120787,120787],"mapped",[53]],[[120788,120788],"mapped",[54]],[[120789,120789],"mapped",[55]],[[120790,120790],"mapped",[56]],[[120791,120791],"mapped",[57]],[[120792,120792],"mapped",[48]],[[120793,120793],"mapped",[49]],[[120794,120794],"mapped",[50]],[[120795,120795],"mapped",[51]],[[120796,120796],"mapped",[52]],[[120797,120797],"mapped",[53]],[[120798,120798],"mapped",[54]],[[120799,120799],"mapped",[55]],[[120800,120800],"mapped",[56]],[[120801,120801],"mapped",[57]],[[120802,120802],"mapped",[48]],[[120803,120803],"mapped",[49]],[[120804,120804],"mapped",[50]],[[120805,120805],"mapped",[51]],[[120806,120806],"mapped",[52]],[[120807,120807],"mapped",[53]],[[120808,120808],"mapped",[54]],[[120809,120809],"mapped",[55]],[[120810,120810],"mapped",[56]],[[120811,120811],"mapped",[57]],[[120812,120812],"mapped",[48]],[[120813,120813],"mapped",[49]],[[120814,120814],"mapped",[50]],[[120815,120815],"mapped",[51]],[[120816,120816],"mapped",[52]],[[120817,120817],"mapped",[53]],[[120818,120818],"mapped",[54]],[[120819,120819],"mapped",[55]],[[120820,120820],"mapped",[56]],[[120821,120821],"mapped",[57]],[[120822,120822],"mapped",[48]],[[120823,120823],"mapped",[49]],[[120824,120824],"mapped",[50]],[[120825,120825],"mapped",[51]],[[120826,120826],"mapped",[52]],[[120827,120827],"mapped",[53]],[[120828,120828],"mapped",[54]],[[120829,120829],"mapped",[55]],[[120830,120830],"mapped",[56]],[[120831,120831],"mapped",[57]],[[120832,121343],"valid",[],"NV8"],[[121344,121398],"valid"],[[121399,121402],"valid",[],"NV8"],[[121403,121452],"valid"],[[121453,121460],"valid",[],"NV8"],[[121461,121461],"valid"],[[121462,121475],"valid",[],"NV8"],[[121476,121476],"valid"],[[121477,121483],"valid",[],"NV8"],[[121484,121498],"disallowed"],[[121499,121503],"valid"],[[121504,121504],"disallowed"],[[121505,121519],"valid"],[[121520,124927],"disallowed"],[[124928,125124],"valid"],[[125125,125126],"disallowed"],[[125127,125135],"valid",[],"NV8"],[[125136,125142],"valid"],[[125143,126463],"disallowed"],[[126464,126464],"mapped",[1575]],[[126465,126465],"mapped",[1576]],[[126466,126466],"mapped",[1580]],[[126467,126467],"mapped",[1583]],[[126468,126468],"disallowed"],[[126469,126469],"mapped",[1608]],[[126470,126470],"mapped",[1586]],[[126471,126471],"mapped",[1581]],[[126472,126472],"mapped",[1591]],[[126473,126473],"mapped",[1610]],[[126474,126474],"mapped",[1603]],[[126475,126475],"mapped",[1604]],[[126476,126476],"mapped",[1605]],[[126477,126477],"mapped",[1606]],[[126478,126478],"mapped",[1587]],[[126479,126479],"mapped",[1593]],[[126480,126480],"mapped",[1601]],[[126481,126481],"mapped",[1589]],[[126482,126482],"mapped",[1602]],[[126483,126483],"mapped",[1585]],[[126484,126484],"mapped",[1588]],[[126485,126485],"mapped",[1578]],[[126486,126486],"mapped",[1579]],[[126487,126487],"mapped",[1582]],[[126488,126488],"mapped",[1584]],[[126489,126489],"mapped",[1590]],[[126490,126490],"mapped",[1592]],[[126491,126491],"mapped",[1594]],[[126492,126492],"mapped",[1646]],[[126493,126493],"mapped",[1722]],[[126494,126494],"mapped",[1697]],[[126495,126495],"mapped",[1647]],[[126496,126496],"disallowed"],[[126497,126497],"mapped",[1576]],[[126498,126498],"mapped",[1580]],[[126499,126499],"disallowed"],[[126500,126500],"mapped",[1607]],[[126501,126502],"disallowed"],[[126503,126503],"mapped",[1581]],[[126504,126504],"disallowed"],[[126505,126505],"mapped",[1610]],[[126506,126506],"mapped",[1603]],[[126507,126507],"mapped",[1604]],[[126508,126508],"mapped",[1605]],[[126509,126509],"mapped",[1606]],[[126510,126510],"mapped",[1587]],[[126511,126511],"mapped",[1593]],[[126512,126512],"mapped",[1601]],[[126513,126513],"mapped",[1589]],[[126514,126514],"mapped",[1602]],[[126515,126515],"disallowed"],[[126516,126516],"mapped",[1588]],[[126517,126517],"mapped",[1578]],[[126518,126518],"mapped",[1579]],[[126519,126519],"mapped",[1582]],[[126520,126520],"disallowed"],[[126521,126521],"mapped",[1590]],[[126522,126522],"disallowed"],[[126523,126523],"mapped",[1594]],[[126524,126529],"disallowed"],[[126530,126530],"mapped",[1580]],[[126531,126534],"disallowed"],[[126535,126535],"mapped",[1581]],[[126536,126536],"disallowed"],[[126537,126537],"mapped",[1610]],[[126538,126538],"disallowed"],[[126539,126539],"mapped",[1604]],[[126540,126540],"disallowed"],[[126541,126541],"mapped",[1606]],[[126542,126542],"mapped",[1587]],[[126543,126543],"mapped",[1593]],[[126544,126544],"disallowed"],[[126545,126545],"mapped",[1589]],[[126546,126546],"mapped",[1602]],[[126547,126547],"disallowed"],[[126548,126548],"mapped",[1588]],[[126549,126550],"disallowed"],[[126551,126551],"mapped",[1582]],[[126552,126552],"disallowed"],[[126553,126553],"mapped",[1590]],[[126554,126554],"disallowed"],[[126555,126555],"mapped",[1594]],[[126556,126556],"disallowed"],[[126557,126557],"mapped",[1722]],[[126558,126558],"disallowed"],[[126559,126559],"mapped",[1647]],[[126560,126560],"disallowed"],[[126561,126561],"mapped",[1576]],[[126562,126562],"mapped",[1580]],[[126563,126563],"disallowed"],[[126564,126564],"mapped",[1607]],[[126565,126566],"disallowed"],[[126567,126567],"mapped",[1581]],[[126568,126568],"mapped",[1591]],[[126569,126569],"mapped",[1610]],[[126570,126570],"mapped",[1603]],[[126571,126571],"disallowed"],[[126572,126572],"mapped",[1605]],[[126573,126573],"mapped",[1606]],[[126574,126574],"mapped",[1587]],[[126575,126575],"mapped",[1593]],[[126576,126576],"mapped",[1601]],[[126577,126577],"mapped",[1589]],[[126578,126578],"mapped",[1602]],[[126579,126579],"disallowed"],[[126580,126580],"mapped",[1588]],[[126581,126581],"mapped",[1578]],[[126582,126582],"mapped",[1579]],[[126583,126583],"mapped",[1582]],[[126584,126584],"disallowed"],[[126585,126585],"mapped",[1590]],[[126586,126586],"mapped",[1592]],[[126587,126587],"mapped",[1594]],[[126588,126588],"mapped",[1646]],[[126589,126589],"disallowed"],[[126590,126590],"mapped",[1697]],[[126591,126591],"disallowed"],[[126592,126592],"mapped",[1575]],[[126593,126593],"mapped",[1576]],[[126594,126594],"mapped",[1580]],[[126595,126595],"mapped",[1583]],[[126596,126596],"mapped",[1607]],[[126597,126597],"mapped",[1608]],[[126598,126598],"mapped",[1586]],[[126599,126599],"mapped",[1581]],[[126600,126600],"mapped",[1591]],[[126601,126601],"mapped",[1610]],[[126602,126602],"disallowed"],[[126603,126603],"mapped",[1604]],[[126604,126604],"mapped",[1605]],[[126605,126605],"mapped",[1606]],[[126606,126606],"mapped",[1587]],[[126607,126607],"mapped",[1593]],[[126608,126608],"mapped",[1601]],[[126609,126609],"mapped",[1589]],[[126610,126610],"mapped",[1602]],[[126611,126611],"mapped",[1585]],[[126612,126612],"mapped",[1588]],[[126613,126613],"mapped",[1578]],[[126614,126614],"mapped",[1579]],[[126615,126615],"mapped",[1582]],[[126616,126616],"mapped",[1584]],[[126617,126617],"mapped",[1590]],[[126618,126618],"mapped",[1592]],[[126619,126619],"mapped",[1594]],[[126620,126624],"disallowed"],[[126625,126625],"mapped",[1576]],[[126626,126626],"mapped",[1580]],[[126627,126627],"mapped",[1583]],[[126628,126628],"disallowed"],[[126629,126629],"mapped",[1608]],[[126630,126630],"mapped",[1586]],[[126631,126631],"mapped",[1581]],[[126632,126632],"mapped",[1591]],[[126633,126633],"mapped",[1610]],[[126634,126634],"disallowed"],[[126635,126635],"mapped",[1604]],[[126636,126636],"mapped",[1605]],[[126637,126637],"mapped",[1606]],[[126638,126638],"mapped",[1587]],[[126639,126639],"mapped",[1593]],[[126640,126640],"mapped",[1601]],[[126641,126641],"mapped",[1589]],[[126642,126642],"mapped",[1602]],[[126643,126643],"mapped",[1585]],[[126644,126644],"mapped",[1588]],[[126645,126645],"mapped",[1578]],[[126646,126646],"mapped",[1579]],[[126647,126647],"mapped",[1582]],[[126648,126648],"mapped",[1584]],[[126649,126649],"mapped",[1590]],[[126650,126650],"mapped",[1592]],[[126651,126651],"mapped",[1594]],[[126652,126703],"disallowed"],[[126704,126705],"valid",[],"NV8"],[[126706,126975],"disallowed"],[[126976,127019],"valid",[],"NV8"],[[127020,127023],"disallowed"],[[127024,127123],"valid",[],"NV8"],[[127124,127135],"disallowed"],[[127136,127150],"valid",[],"NV8"],[[127151,127152],"disallowed"],[[127153,127166],"valid",[],"NV8"],[[127167,127167],"valid",[],"NV8"],[[127168,127168],"disallowed"],[[127169,127183],"valid",[],"NV8"],[[127184,127184],"disallowed"],[[127185,127199],"valid",[],"NV8"],[[127200,127221],"valid",[],"NV8"],[[127222,127231],"disallowed"],[[127232,127232],"disallowed"],[[127233,127233],"disallowed_STD3_mapped",[48,44]],[[127234,127234],"disallowed_STD3_mapped",[49,44]],[[127235,127235],"disallowed_STD3_mapped",[50,44]],[[127236,127236],"disallowed_STD3_mapped",[51,44]],[[127237,127237],"disallowed_STD3_mapped",[52,44]],[[127238,127238],"disallowed_STD3_mapped",[53,44]],[[127239,127239],"disallowed_STD3_mapped",[54,44]],[[127240,127240],"disallowed_STD3_mapped",[55,44]],[[127241,127241],"disallowed_STD3_mapped",[56,44]],[[127242,127242],"disallowed_STD3_mapped",[57,44]],[[127243,127244],"valid",[],"NV8"],[[127245,127247],"disallowed"],[[127248,127248],"disallowed_STD3_mapped",[40,97,41]],[[127249,127249],"disallowed_STD3_mapped",[40,98,41]],[[127250,127250],"disallowed_STD3_mapped",[40,99,41]],[[127251,127251],"disallowed_STD3_mapped",[40,100,41]],[[127252,127252],"disallowed_STD3_mapped",[40,101,41]],[[127253,127253],"disallowed_STD3_mapped",[40,102,41]],[[127254,127254],"disallowed_STD3_mapped",[40,103,41]],[[127255,127255],"disallowed_STD3_mapped",[40,104,41]],[[127256,127256],"disallowed_STD3_mapped",[40,105,41]],[[127257,127257],"disallowed_STD3_mapped",[40,106,41]],[[127258,127258],"disallowed_STD3_mapped",[40,107,41]],[[127259,127259],"disallowed_STD3_mapped",[40,108,41]],[[127260,127260],"disallowed_STD3_mapped",[40,109,41]],[[127261,127261],"disallowed_STD3_mapped",[40,110,41]],[[127262,127262],"disallowed_STD3_mapped",[40,111,41]],[[127263,127263],"disallowed_STD3_mapped",[40,112,41]],[[127264,127264],"disallowed_STD3_mapped",[40,113,41]],[[127265,127265],"disallowed_STD3_mapped",[40,114,41]],[[127266,127266],"disallowed_STD3_mapped",[40,115,41]],[[127267,127267],"disallowed_STD3_mapped",[40,116,41]],[[127268,127268],"disallowed_STD3_mapped",[40,117,41]],[[127269,127269],"disallowed_STD3_mapped",[40,118,41]],[[127270,127270],"disallowed_STD3_mapped",[40,119,41]],[[127271,127271],"disallowed_STD3_mapped",[40,120,41]],[[127272,127272],"disallowed_STD3_mapped",[40,121,41]],[[127273,127273],"disallowed_STD3_mapped",[40,122,41]],[[127274,127274],"mapped",[12308,115,12309]],[[127275,127275],"mapped",[99]],[[127276,127276],"mapped",[114]],[[127277,127277],"mapped",[99,100]],[[127278,127278],"mapped",[119,122]],[[127279,127279],"disallowed"],[[127280,127280],"mapped",[97]],[[127281,127281],"mapped",[98]],[[127282,127282],"mapped",[99]],[[127283,127283],"mapped",[100]],[[127284,127284],"mapped",[101]],[[127285,127285],"mapped",[102]],[[127286,127286],"mapped",[103]],[[127287,127287],"mapped",[104]],[[127288,127288],"mapped",[105]],[[127289,127289],"mapped",[106]],[[127290,127290],"mapped",[107]],[[127291,127291],"mapped",[108]],[[127292,127292],"mapped",[109]],[[127293,127293],"mapped",[110]],[[127294,127294],"mapped",[111]],[[127295,127295],"mapped",[112]],[[127296,127296],"mapped",[113]],[[127297,127297],"mapped",[114]],[[127298,127298],"mapped",[115]],[[127299,127299],"mapped",[116]],[[127300,127300],"mapped",[117]],[[127301,127301],"mapped",[118]],[[127302,127302],"mapped",[119]],[[127303,127303],"mapped",[120]],[[127304,127304],"mapped",[121]],[[127305,127305],"mapped",[122]],[[127306,127306],"mapped",[104,118]],[[127307,127307],"mapped",[109,118]],[[127308,127308],"mapped",[115,100]],[[127309,127309],"mapped",[115,115]],[[127310,127310],"mapped",[112,112,118]],[[127311,127311],"mapped",[119,99]],[[127312,127318],"valid",[],"NV8"],[[127319,127319],"valid",[],"NV8"],[[127320,127326],"valid",[],"NV8"],[[127327,127327],"valid",[],"NV8"],[[127328,127337],"valid",[],"NV8"],[[127338,127338],"mapped",[109,99]],[[127339,127339],"mapped",[109,100]],[[127340,127343],"disallowed"],[[127344,127352],"valid",[],"NV8"],[[127353,127353],"valid",[],"NV8"],[[127354,127354],"valid",[],"NV8"],[[127355,127356],"valid",[],"NV8"],[[127357,127358],"valid",[],"NV8"],[[127359,127359],"valid",[],"NV8"],[[127360,127369],"valid",[],"NV8"],[[127370,127373],"valid",[],"NV8"],[[127374,127375],"valid",[],"NV8"],[[127376,127376],"mapped",[100,106]],[[127377,127386],"valid",[],"NV8"],[[127387,127461],"disallowed"],[[127462,127487],"valid",[],"NV8"],[[127488,127488],"mapped",[12411,12363]],[[127489,127489],"mapped",[12467,12467]],[[127490,127490],"mapped",[12469]],[[127491,127503],"disallowed"],[[127504,127504],"mapped",[25163]],[[127505,127505],"mapped",[23383]],[[127506,127506],"mapped",[21452]],[[127507,127507],"mapped",[12487]],[[127508,127508],"mapped",[20108]],[[127509,127509],"mapped",[22810]],[[127510,127510],"mapped",[35299]],[[127511,127511],"mapped",[22825]],[[127512,127512],"mapped",[20132]],[[127513,127513],"mapped",[26144]],[[127514,127514],"mapped",[28961]],[[127515,127515],"mapped",[26009]],[[127516,127516],"mapped",[21069]],[[127517,127517],"mapped",[24460]],[[127518,127518],"mapped",[20877]],[[127519,127519],"mapped",[26032]],[[127520,127520],"mapped",[21021]],[[127521,127521],"mapped",[32066]],[[127522,127522],"mapped",[29983]],[[127523,127523],"mapped",[36009]],[[127524,127524],"mapped",[22768]],[[127525,127525],"mapped",[21561]],[[127526,127526],"mapped",[28436]],[[127527,127527],"mapped",[25237]],[[127528,127528],"mapped",[25429]],[[127529,127529],"mapped",[19968]],[[127530,127530],"mapped",[19977]],[[127531,127531],"mapped",[36938]],[[127532,127532],"mapped",[24038]],[[127533,127533],"mapped",[20013]],[[127534,127534],"mapped",[21491]],[[127535,127535],"mapped",[25351]],[[127536,127536],"mapped",[36208]],[[127537,127537],"mapped",[25171]],[[127538,127538],"mapped",[31105]],[[127539,127539],"mapped",[31354]],[[127540,127540],"mapped",[21512]],[[127541,127541],"mapped",[28288]],[[127542,127542],"mapped",[26377]],[[127543,127543],"mapped",[26376]],[[127544,127544],"mapped",[30003]],[[127545,127545],"mapped",[21106]],[[127546,127546],"mapped",[21942]],[[127547,127551],"disallowed"],[[127552,127552],"mapped",[12308,26412,12309]],[[127553,127553],"mapped",[12308,19977,12309]],[[127554,127554],"mapped",[12308,20108,12309]],[[127555,127555],"mapped",[12308,23433,12309]],[[127556,127556],"mapped",[12308,28857,12309]],[[127557,127557],"mapped",[12308,25171,12309]],[[127558,127558],"mapped",[12308,30423,12309]],[[127559,127559],"mapped",[12308,21213,12309]],[[127560,127560],"mapped",[12308,25943,12309]],[[127561,127567],"disallowed"],[[127568,127568],"mapped",[24471]],[[127569,127569],"mapped",[21487]],[[127570,127743],"disallowed"],[[127744,127776],"valid",[],"NV8"],[[127777,127788],"valid",[],"NV8"],[[127789,127791],"valid",[],"NV8"],[[127792,127797],"valid",[],"NV8"],[[127798,127798],"valid",[],"NV8"],[[127799,127868],"valid",[],"NV8"],[[127869,127869],"valid",[],"NV8"],[[127870,127871],"valid",[],"NV8"],[[127872,127891],"valid",[],"NV8"],[[127892,127903],"valid",[],"NV8"],[[127904,127940],"valid",[],"NV8"],[[127941,127941],"valid",[],"NV8"],[[127942,127946],"valid",[],"NV8"],[[127947,127950],"valid",[],"NV8"],[[127951,127955],"valid",[],"NV8"],[[127956,127967],"valid",[],"NV8"],[[127968,127984],"valid",[],"NV8"],[[127985,127991],"valid",[],"NV8"],[[127992,127999],"valid",[],"NV8"],[[128000,128062],"valid",[],"NV8"],[[128063,128063],"valid",[],"NV8"],[[128064,128064],"valid",[],"NV8"],[[128065,128065],"valid",[],"NV8"],[[128066,128247],"valid",[],"NV8"],[[128248,128248],"valid",[],"NV8"],[[128249,128252],"valid",[],"NV8"],[[128253,128254],"valid",[],"NV8"],[[128255,128255],"valid",[],"NV8"],[[128256,128317],"valid",[],"NV8"],[[128318,128319],"valid",[],"NV8"],[[128320,128323],"valid",[],"NV8"],[[128324,128330],"valid",[],"NV8"],[[128331,128335],"valid",[],"NV8"],[[128336,128359],"valid",[],"NV8"],[[128360,128377],"valid",[],"NV8"],[[128378,128378],"disallowed"],[[128379,128419],"valid",[],"NV8"],[[128420,128420],"disallowed"],[[128421,128506],"valid",[],"NV8"],[[128507,128511],"valid",[],"NV8"],[[128512,128512],"valid",[],"NV8"],[[128513,128528],"valid",[],"NV8"],[[128529,128529],"valid",[],"NV8"],[[128530,128532],"valid",[],"NV8"],[[128533,128533],"valid",[],"NV8"],[[128534,128534],"valid",[],"NV8"],[[128535,128535],"valid",[],"NV8"],[[128536,128536],"valid",[],"NV8"],[[128537,128537],"valid",[],"NV8"],[[128538,128538],"valid",[],"NV8"],[[128539,128539],"valid",[],"NV8"],[[128540,128542],"valid",[],"NV8"],[[128543,128543],"valid",[],"NV8"],[[128544,128549],"valid",[],"NV8"],[[128550,128551],"valid",[],"NV8"],[[128552,128555],"valid",[],"NV8"],[[128556,128556],"valid",[],"NV8"],[[128557,128557],"valid",[],"NV8"],[[128558,128559],"valid",[],"NV8"],[[128560,128563],"valid",[],"NV8"],[[128564,128564],"valid",[],"NV8"],[[128565,128576],"valid",[],"NV8"],[[128577,128578],"valid",[],"NV8"],[[128579,128580],"valid",[],"NV8"],[[128581,128591],"valid",[],"NV8"],[[128592,128639],"valid",[],"NV8"],[[128640,128709],"valid",[],"NV8"],[[128710,128719],"valid",[],"NV8"],[[128720,128720],"valid",[],"NV8"],[[128721,128735],"disallowed"],[[128736,128748],"valid",[],"NV8"],[[128749,128751],"disallowed"],[[128752,128755],"valid",[],"NV8"],[[128756,128767],"disallowed"],[[128768,128883],"valid",[],"NV8"],[[128884,128895],"disallowed"],[[128896,128980],"valid",[],"NV8"],[[128981,129023],"disallowed"],[[129024,129035],"valid",[],"NV8"],[[129036,129039],"disallowed"],[[129040,129095],"valid",[],"NV8"],[[129096,129103],"disallowed"],[[129104,129113],"valid",[],"NV8"],[[129114,129119],"disallowed"],[[129120,129159],"valid",[],"NV8"],[[129160,129167],"disallowed"],[[129168,129197],"valid",[],"NV8"],[[129198,129295],"disallowed"],[[129296,129304],"valid",[],"NV8"],[[129305,129407],"disallowed"],[[129408,129412],"valid",[],"NV8"],[[129413,129471],"disallowed"],[[129472,129472],"valid",[],"NV8"],[[129473,131069],"disallowed"],[[131070,131071],"disallowed"],[[131072,173782],"valid"],[[173783,173823],"disallowed"],[[173824,177972],"valid"],[[177973,177983],"disallowed"],[[177984,178205],"valid"],[[178206,178207],"disallowed"],[[178208,183969],"valid"],[[183970,194559],"disallowed"],[[194560,194560],"mapped",[20029]],[[194561,194561],"mapped",[20024]],[[194562,194562],"mapped",[20033]],[[194563,194563],"mapped",[131362]],[[194564,194564],"mapped",[20320]],[[194565,194565],"mapped",[20398]],[[194566,194566],"mapped",[20411]],[[194567,194567],"mapped",[20482]],[[194568,194568],"mapped",[20602]],[[194569,194569],"mapped",[20633]],[[194570,194570],"mapped",[20711]],[[194571,194571],"mapped",[20687]],[[194572,194572],"mapped",[13470]],[[194573,194573],"mapped",[132666]],[[194574,194574],"mapped",[20813]],[[194575,194575],"mapped",[20820]],[[194576,194576],"mapped",[20836]],[[194577,194577],"mapped",[20855]],[[194578,194578],"mapped",[132380]],[[194579,194579],"mapped",[13497]],[[194580,194580],"mapped",[20839]],[[194581,194581],"mapped",[20877]],[[194582,194582],"mapped",[132427]],[[194583,194583],"mapped",[20887]],[[194584,194584],"mapped",[20900]],[[194585,194585],"mapped",[20172]],[[194586,194586],"mapped",[20908]],[[194587,194587],"mapped",[20917]],[[194588,194588],"mapped",[168415]],[[194589,194589],"mapped",[20981]],[[194590,194590],"mapped",[20995]],[[194591,194591],"mapped",[13535]],[[194592,194592],"mapped",[21051]],[[194593,194593],"mapped",[21062]],[[194594,194594],"mapped",[21106]],[[194595,194595],"mapped",[21111]],[[194596,194596],"mapped",[13589]],[[194597,194597],"mapped",[21191]],[[194598,194598],"mapped",[21193]],[[194599,194599],"mapped",[21220]],[[194600,194600],"mapped",[21242]],[[194601,194601],"mapped",[21253]],[[194602,194602],"mapped",[21254]],[[194603,194603],"mapped",[21271]],[[194604,194604],"mapped",[21321]],[[194605,194605],"mapped",[21329]],[[194606,194606],"mapped",[21338]],[[194607,194607],"mapped",[21363]],[[194608,194608],"mapped",[21373]],[[194609,194611],"mapped",[21375]],[[194612,194612],"mapped",[133676]],[[194613,194613],"mapped",[28784]],[[194614,194614],"mapped",[21450]],[[194615,194615],"mapped",[21471]],[[194616,194616],"mapped",[133987]],[[194617,194617],"mapped",[21483]],[[194618,194618],"mapped",[21489]],[[194619,194619],"mapped",[21510]],[[194620,194620],"mapped",[21662]],[[194621,194621],"mapped",[21560]],[[194622,194622],"mapped",[21576]],[[194623,194623],"mapped",[21608]],[[194624,194624],"mapped",[21666]],[[194625,194625],"mapped",[21750]],[[194626,194626],"mapped",[21776]],[[194627,194627],"mapped",[21843]],[[194628,194628],"mapped",[21859]],[[194629,194630],"mapped",[21892]],[[194631,194631],"mapped",[21913]],[[194632,194632],"mapped",[21931]],[[194633,194633],"mapped",[21939]],[[194634,194634],"mapped",[21954]],[[194635,194635],"mapped",[22294]],[[194636,194636],"mapped",[22022]],[[194637,194637],"mapped",[22295]],[[194638,194638],"mapped",[22097]],[[194639,194639],"mapped",[22132]],[[194640,194640],"mapped",[20999]],[[194641,194641],"mapped",[22766]],[[194642,194642],"mapped",[22478]],[[194643,194643],"mapped",[22516]],[[194644,194644],"mapped",[22541]],[[194645,194645],"mapped",[22411]],[[194646,194646],"mapped",[22578]],[[194647,194647],"mapped",[22577]],[[194648,194648],"mapped",[22700]],[[194649,194649],"mapped",[136420]],[[194650,194650],"mapped",[22770]],[[194651,194651],"mapped",[22775]],[[194652,194652],"mapped",[22790]],[[194653,194653],"mapped",[22810]],[[194654,194654],"mapped",[22818]],[[194655,194655],"mapped",[22882]],[[194656,194656],"mapped",[136872]],[[194657,194657],"mapped",[136938]],[[194658,194658],"mapped",[23020]],[[194659,194659],"mapped",[23067]],[[194660,194660],"mapped",[23079]],[[194661,194661],"mapped",[23000]],[[194662,194662],"mapped",[23142]],[[194663,194663],"mapped",[14062]],[[194664,194664],"disallowed"],[[194665,194665],"mapped",[23304]],[[194666,194667],"mapped",[23358]],[[194668,194668],"mapped",[137672]],[[194669,194669],"mapped",[23491]],[[194670,194670],"mapped",[23512]],[[194671,194671],"mapped",[23527]],[[194672,194672],"mapped",[23539]],[[194673,194673],"mapped",[138008]],[[194674,194674],"mapped",[23551]],[[194675,194675],"mapped",[23558]],[[194676,194676],"disallowed"],[[194677,194677],"mapped",[23586]],[[194678,194678],"mapped",[14209]],[[194679,194679],"mapped",[23648]],[[194680,194680],"mapped",[23662]],[[194681,194681],"mapped",[23744]],[[194682,194682],"mapped",[23693]],[[194683,194683],"mapped",[138724]],[[194684,194684],"mapped",[23875]],[[194685,194685],"mapped",[138726]],[[194686,194686],"mapped",[23918]],[[194687,194687],"mapped",[23915]],[[194688,194688],"mapped",[23932]],[[194689,194689],"mapped",[24033]],[[194690,194690],"mapped",[24034]],[[194691,194691],"mapped",[14383]],[[194692,194692],"mapped",[24061]],[[194693,194693],"mapped",[24104]],[[194694,194694],"mapped",[24125]],[[194695,194695],"mapped",[24169]],[[194696,194696],"mapped",[14434]],[[194697,194697],"mapped",[139651]],[[194698,194698],"mapped",[14460]],[[194699,194699],"mapped",[24240]],[[194700,194700],"mapped",[24243]],[[194701,194701],"mapped",[24246]],[[194702,194702],"mapped",[24266]],[[194703,194703],"mapped",[172946]],[[194704,194704],"mapped",[24318]],[[194705,194706],"mapped",[140081]],[[194707,194707],"mapped",[33281]],[[194708,194709],"mapped",[24354]],[[194710,194710],"mapped",[14535]],[[194711,194711],"mapped",[144056]],[[194712,194712],"mapped",[156122]],[[194713,194713],"mapped",[24418]],[[194714,194714],"mapped",[24427]],[[194715,194715],"mapped",[14563]],[[194716,194716],"mapped",[24474]],[[194717,194717],"mapped",[24525]],[[194718,194718],"mapped",[24535]],[[194719,194719],"mapped",[24569]],[[194720,194720],"mapped",[24705]],[[194721,194721],"mapped",[14650]],[[194722,194722],"mapped",[14620]],[[194723,194723],"mapped",[24724]],[[194724,194724],"mapped",[141012]],[[194725,194725],"mapped",[24775]],[[194726,194726],"mapped",[24904]],[[194727,194727],"mapped",[24908]],[[194728,194728],"mapped",[24910]],[[194729,194729],"mapped",[24908]],[[194730,194730],"mapped",[24954]],[[194731,194731],"mapped",[24974]],[[194732,194732],"mapped",[25010]],[[194733,194733],"mapped",[24996]],[[194734,194734],"mapped",[25007]],[[194735,194735],"mapped",[25054]],[[194736,194736],"mapped",[25074]],[[194737,194737],"mapped",[25078]],[[194738,194738],"mapped",[25104]],[[194739,194739],"mapped",[25115]],[[194740,194740],"mapped",[25181]],[[194741,194741],"mapped",[25265]],[[194742,194742],"mapped",[25300]],[[194743,194743],"mapped",[25424]],[[194744,194744],"mapped",[142092]],[[194745,194745],"mapped",[25405]],[[194746,194746],"mapped",[25340]],[[194747,194747],"mapped",[25448]],[[194748,194748],"mapped",[25475]],[[194749,194749],"mapped",[25572]],[[194750,194750],"mapped",[142321]],[[194751,194751],"mapped",[25634]],[[194752,194752],"mapped",[25541]],[[194753,194753],"mapped",[25513]],[[194754,194754],"mapped",[14894]],[[194755,194755],"mapped",[25705]],[[194756,194756],"mapped",[25726]],[[194757,194757],"mapped",[25757]],[[194758,194758],"mapped",[25719]],[[194759,194759],"mapped",[14956]],[[194760,194760],"mapped",[25935]],[[194761,194761],"mapped",[25964]],[[194762,194762],"mapped",[143370]],[[194763,194763],"mapped",[26083]],[[194764,194764],"mapped",[26360]],[[194765,194765],"mapped",[26185]],[[194766,194766],"mapped",[15129]],[[194767,194767],"mapped",[26257]],[[194768,194768],"mapped",[15112]],[[194769,194769],"mapped",[15076]],[[194770,194770],"mapped",[20882]],[[194771,194771],"mapped",[20885]],[[194772,194772],"mapped",[26368]],[[194773,194773],"mapped",[26268]],[[194774,194774],"mapped",[32941]],[[194775,194775],"mapped",[17369]],[[194776,194776],"mapped",[26391]],[[194777,194777],"mapped",[26395]],[[194778,194778],"mapped",[26401]],[[194779,194779],"mapped",[26462]],[[194780,194780],"mapped",[26451]],[[194781,194781],"mapped",[144323]],[[194782,194782],"mapped",[15177]],[[194783,194783],"mapped",[26618]],[[194784,194784],"mapped",[26501]],[[194785,194785],"mapped",[26706]],[[194786,194786],"mapped",[26757]],[[194787,194787],"mapped",[144493]],[[194788,194788],"mapped",[26766]],[[194789,194789],"mapped",[26655]],[[194790,194790],"mapped",[26900]],[[194791,194791],"mapped",[15261]],[[194792,194792],"mapped",[26946]],[[194793,194793],"mapped",[27043]],[[194794,194794],"mapped",[27114]],[[194795,194795],"mapped",[27304]],[[194796,194796],"mapped",[145059]],[[194797,194797],"mapped",[27355]],[[194798,194798],"mapped",[15384]],[[194799,194799],"mapped",[27425]],[[194800,194800],"mapped",[145575]],[[194801,194801],"mapped",[27476]],[[194802,194802],"mapped",[15438]],[[194803,194803],"mapped",[27506]],[[194804,194804],"mapped",[27551]],[[194805,194805],"mapped",[27578]],[[194806,194806],"mapped",[27579]],[[194807,194807],"mapped",[146061]],[[194808,194808],"mapped",[138507]],[[194809,194809],"mapped",[146170]],[[194810,194810],"mapped",[27726]],[[194811,194811],"mapped",[146620]],[[194812,194812],"mapped",[27839]],[[194813,194813],"mapped",[27853]],[[194814,194814],"mapped",[27751]],[[194815,194815],"mapped",[27926]],[[194816,194816],"mapped",[27966]],[[194817,194817],"mapped",[28023]],[[194818,194818],"mapped",[27969]],[[194819,194819],"mapped",[28009]],[[194820,194820],"mapped",[28024]],[[194821,194821],"mapped",[28037]],[[194822,194822],"mapped",[146718]],[[194823,194823],"mapped",[27956]],[[194824,194824],"mapped",[28207]],[[194825,194825],"mapped",[28270]],[[194826,194826],"mapped",[15667]],[[194827,194827],"mapped",[28363]],[[194828,194828],"mapped",[28359]],[[194829,194829],"mapped",[147153]],[[194830,194830],"mapped",[28153]],[[194831,194831],"mapped",[28526]],[[194832,194832],"mapped",[147294]],[[194833,194833],"mapped",[147342]],[[194834,194834],"mapped",[28614]],[[194835,194835],"mapped",[28729]],[[194836,194836],"mapped",[28702]],[[194837,194837],"mapped",[28699]],[[194838,194838],"mapped",[15766]],[[194839,194839],"mapped",[28746]],[[194840,194840],"mapped",[28797]],[[194841,194841],"mapped",[28791]],[[194842,194842],"mapped",[28845]],[[194843,194843],"mapped",[132389]],[[194844,194844],"mapped",[28997]],[[194845,194845],"mapped",[148067]],[[194846,194846],"mapped",[29084]],[[194847,194847],"disallowed"],[[194848,194848],"mapped",[29224]],[[194849,194849],"mapped",[29237]],[[194850,194850],"mapped",[29264]],[[194851,194851],"mapped",[149000]],[[194852,194852],"mapped",[29312]],[[194853,194853],"mapped",[29333]],[[194854,194854],"mapped",[149301]],[[194855,194855],"mapped",[149524]],[[194856,194856],"mapped",[29562]],[[194857,194857],"mapped",[29579]],[[194858,194858],"mapped",[16044]],[[194859,194859],"mapped",[29605]],[[194860,194861],"mapped",[16056]],[[194862,194862],"mapped",[29767]],[[194863,194863],"mapped",[29788]],[[194864,194864],"mapped",[29809]],[[194865,194865],"mapped",[29829]],[[194866,194866],"mapped",[29898]],[[194867,194867],"mapped",[16155]],[[194868,194868],"mapped",[29988]],[[194869,194869],"mapped",[150582]],[[194870,194870],"mapped",[30014]],[[194871,194871],"mapped",[150674]],[[194872,194872],"mapped",[30064]],[[194873,194873],"mapped",[139679]],[[194874,194874],"mapped",[30224]],[[194875,194875],"mapped",[151457]],[[194876,194876],"mapped",[151480]],[[194877,194877],"mapped",[151620]],[[194878,194878],"mapped",[16380]],[[194879,194879],"mapped",[16392]],[[194880,194880],"mapped",[30452]],[[194881,194881],"mapped",[151795]],[[194882,194882],"mapped",[151794]],[[194883,194883],"mapped",[151833]],[[194884,194884],"mapped",[151859]],[[194885,194885],"mapped",[30494]],[[194886,194887],"mapped",[30495]],[[194888,194888],"mapped",[30538]],[[194889,194889],"mapped",[16441]],[[194890,194890],"mapped",[30603]],[[194891,194891],"mapped",[16454]],[[194892,194892],"mapped",[16534]],[[194893,194893],"mapped",[152605]],[[194894,194894],"mapped",[30798]],[[194895,194895],"mapped",[30860]],[[194896,194896],"mapped",[30924]],[[194897,194897],"mapped",[16611]],[[194898,194898],"mapped",[153126]],[[194899,194899],"mapped",[31062]],[[194900,194900],"mapped",[153242]],[[194901,194901],"mapped",[153285]],[[194902,194902],"mapped",[31119]],[[194903,194903],"mapped",[31211]],[[194904,194904],"mapped",[16687]],[[194905,194905],"mapped",[31296]],[[194906,194906],"mapped",[31306]],[[194907,194907],"mapped",[31311]],[[194908,194908],"mapped",[153980]],[[194909,194910],"mapped",[154279]],[[194911,194911],"disallowed"],[[194912,194912],"mapped",[16898]],[[194913,194913],"mapped",[154539]],[[194914,194914],"mapped",[31686]],[[194915,194915],"mapped",[31689]],[[194916,194916],"mapped",[16935]],[[194917,194917],"mapped",[154752]],[[194918,194918],"mapped",[31954]],[[194919,194919],"mapped",[17056]],[[194920,194920],"mapped",[31976]],[[194921,194921],"mapped",[31971]],[[194922,194922],"mapped",[32000]],[[194923,194923],"mapped",[155526]],[[194924,194924],"mapped",[32099]],[[194925,194925],"mapped",[17153]],[[194926,194926],"mapped",[32199]],[[194927,194927],"mapped",[32258]],[[194928,194928],"mapped",[32325]],[[194929,194929],"mapped",[17204]],[[194930,194930],"mapped",[156200]],[[194931,194931],"mapped",[156231]],[[194932,194932],"mapped",[17241]],[[194933,194933],"mapped",[156377]],[[194934,194934],"mapped",[32634]],[[194935,194935],"mapped",[156478]],[[194936,194936],"mapped",[32661]],[[194937,194937],"mapped",[32762]],[[194938,194938],"mapped",[32773]],[[194939,194939],"mapped",[156890]],[[194940,194940],"mapped",[156963]],[[194941,194941],"mapped",[32864]],[[194942,194942],"mapped",[157096]],[[194943,194943],"mapped",[32880]],[[194944,194944],"mapped",[144223]],[[194945,194945],"mapped",[17365]],[[194946,194946],"mapped",[32946]],[[194947,194947],"mapped",[33027]],[[194948,194948],"mapped",[17419]],[[194949,194949],"mapped",[33086]],[[194950,194950],"mapped",[23221]],[[194951,194951],"mapped",[157607]],[[194952,194952],"mapped",[157621]],[[194953,194953],"mapped",[144275]],[[194954,194954],"mapped",[144284]],[[194955,194955],"mapped",[33281]],[[194956,194956],"mapped",[33284]],[[194957,194957],"mapped",[36766]],[[194958,194958],"mapped",[17515]],[[194959,194959],"mapped",[33425]],[[194960,194960],"mapped",[33419]],[[194961,194961],"mapped",[33437]],[[194962,194962],"mapped",[21171]],[[194963,194963],"mapped",[33457]],[[194964,194964],"mapped",[33459]],[[194965,194965],"mapped",[33469]],[[194966,194966],"mapped",[33510]],[[194967,194967],"mapped",[158524]],[[194968,194968],"mapped",[33509]],[[194969,194969],"mapped",[33565]],[[194970,194970],"mapped",[33635]],[[194971,194971],"mapped",[33709]],[[194972,194972],"mapped",[33571]],[[194973,194973],"mapped",[33725]],[[194974,194974],"mapped",[33767]],[[194975,194975],"mapped",[33879]],[[194976,194976],"mapped",[33619]],[[194977,194977],"mapped",[33738]],[[194978,194978],"mapped",[33740]],[[194979,194979],"mapped",[33756]],[[194980,194980],"mapped",[158774]],[[194981,194981],"mapped",[159083]],[[194982,194982],"mapped",[158933]],[[194983,194983],"mapped",[17707]],[[194984,194984],"mapped",[34033]],[[194985,194985],"mapped",[34035]],[[194986,194986],"mapped",[34070]],[[194987,194987],"mapped",[160714]],[[194988,194988],"mapped",[34148]],[[194989,194989],"mapped",[159532]],[[194990,194990],"mapped",[17757]],[[194991,194991],"mapped",[17761]],[[194992,194992],"mapped",[159665]],[[194993,194993],"mapped",[159954]],[[194994,194994],"mapped",[17771]],[[194995,194995],"mapped",[34384]],[[194996,194996],"mapped",[34396]],[[194997,194997],"mapped",[34407]],[[194998,194998],"mapped",[34409]],[[194999,194999],"mapped",[34473]],[[195000,195000],"mapped",[34440]],[[195001,195001],"mapped",[34574]],[[195002,195002],"mapped",[34530]],[[195003,195003],"mapped",[34681]],[[195004,195004],"mapped",[34600]],[[195005,195005],"mapped",[34667]],[[195006,195006],"mapped",[34694]],[[195007,195007],"disallowed"],[[195008,195008],"mapped",[34785]],[[195009,195009],"mapped",[34817]],[[195010,195010],"mapped",[17913]],[[195011,195011],"mapped",[34912]],[[195012,195012],"mapped",[34915]],[[195013,195013],"mapped",[161383]],[[195014,195014],"mapped",[35031]],[[195015,195015],"mapped",[35038]],[[195016,195016],"mapped",[17973]],[[195017,195017],"mapped",[35066]],[[195018,195018],"mapped",[13499]],[[195019,195019],"mapped",[161966]],[[195020,195020],"mapped",[162150]],[[195021,195021],"mapped",[18110]],[[195022,195022],"mapped",[18119]],[[195023,195023],"mapped",[35488]],[[195024,195024],"mapped",[35565]],[[195025,195025],"mapped",[35722]],[[195026,195026],"mapped",[35925]],[[195027,195027],"mapped",[162984]],[[195028,195028],"mapped",[36011]],[[195029,195029],"mapped",[36033]],[[195030,195030],"mapped",[36123]],[[195031,195031],"mapped",[36215]],[[195032,195032],"mapped",[163631]],[[195033,195033],"mapped",[133124]],[[195034,195034],"mapped",[36299]],[[195035,195035],"mapped",[36284]],[[195036,195036],"mapped",[36336]],[[195037,195037],"mapped",[133342]],[[195038,195038],"mapped",[36564]],[[195039,195039],"mapped",[36664]],[[195040,195040],"mapped",[165330]],[[195041,195041],"mapped",[165357]],[[195042,195042],"mapped",[37012]],[[195043,195043],"mapped",[37105]],[[195044,195044],"mapped",[37137]],[[195045,195045],"mapped",[165678]],[[195046,195046],"mapped",[37147]],[[195047,195047],"mapped",[37432]],[[195048,195048],"mapped",[37591]],[[195049,195049],"mapped",[37592]],[[195050,195050],"mapped",[37500]],[[195051,195051],"mapped",[37881]],[[195052,195052],"mapped",[37909]],[[195053,195053],"mapped",[166906]],[[195054,195054],"mapped",[38283]],[[195055,195055],"mapped",[18837]],[[195056,195056],"mapped",[38327]],[[195057,195057],"mapped",[167287]],[[195058,195058],"mapped",[18918]],[[195059,195059],"mapped",[38595]],[[195060,195060],"mapped",[23986]],[[195061,195061],"mapped",[38691]],[[195062,195062],"mapped",[168261]],[[195063,195063],"mapped",[168474]],[[195064,195064],"mapped",[19054]],[[195065,195065],"mapped",[19062]],[[195066,195066],"mapped",[38880]],[[195067,195067],"mapped",[168970]],[[195068,195068],"mapped",[19122]],[[195069,195069],"mapped",[169110]],[[195070,195071],"mapped",[38923]],[[195072,195072],"mapped",[38953]],[[195073,195073],"mapped",[169398]],[[195074,195074],"mapped",[39138]],[[195075,195075],"mapped",[19251]],[[195076,195076],"mapped",[39209]],[[195077,195077],"mapped",[39335]],[[195078,195078],"mapped",[39362]],[[195079,195079],"mapped",[39422]],[[195080,195080],"mapped",[19406]],[[195081,195081],"mapped",[170800]],[[195082,195082],"mapped",[39698]],[[195083,195083],"mapped",[40000]],[[195084,195084],"mapped",[40189]],[[195085,195085],"mapped",[19662]],[[195086,195086],"mapped",[19693]],[[195087,195087],"mapped",[40295]],[[195088,195088],"mapped",[172238]],[[195089,195089],"mapped",[19704]],[[195090,195090],"mapped",[172293]],[[195091,195091],"mapped",[172558]],[[195092,195092],"mapped",[172689]],[[195093,195093],"mapped",[40635]],[[195094,195094],"mapped",[19798]],[[195095,195095],"mapped",[40697]],[[195096,195096],"mapped",[40702]],[[195097,195097],"mapped",[40709]],[[195098,195098],"mapped",[40719]],[[195099,195099],"mapped",[40726]],[[195100,195100],"mapped",[40763]],[[195101,195101],"mapped",[173568]],[[195102,196605],"disallowed"],[[196606,196607],"disallowed"],[[196608,262141],"disallowed"],[[262142,262143],"disallowed"],[[262144,327677],"disallowed"],[[327678,327679],"disallowed"],[[327680,393213],"disallowed"],[[393214,393215],"disallowed"],[[393216,458749],"disallowed"],[[458750,458751],"disallowed"],[[458752,524285],"disallowed"],[[524286,524287],"disallowed"],[[524288,589821],"disallowed"],[[589822,589823],"disallowed"],[[589824,655357],"disallowed"],[[655358,655359],"disallowed"],[[655360,720893],"disallowed"],[[720894,720895],"disallowed"],[[720896,786429],"disallowed"],[[786430,786431],"disallowed"],[[786432,851965],"disallowed"],[[851966,851967],"disallowed"],[[851968,917501],"disallowed"],[[917502,917503],"disallowed"],[[917504,917504],"disallowed"],[[917505,917505],"disallowed"],[[917506,917535],"disallowed"],[[917536,917631],"disallowed"],[[917632,917759],"disallowed"],[[917760,917999],"ignored"],[[918000,983037],"disallowed"],[[983038,983039],"disallowed"],[[983040,1048573],"disallowed"],[[1048574,1048575],"disallowed"],[[1048576,1114109],"disallowed"],[[1114110,1114111],"disallowed"]];
/***/ })
/******/ });
|
/*
* FCKeditor - The text editor for Internet - http://www.fckeditor.net
* Copyright (C) 2003-2008 Frederico Caldeira Knabben
*
* == BEGIN LICENSE ==
*
* Licensed under the terms of any of the following licenses at your
* choice:
*
* - GNU General Public License Version 2 or later (the "GPL")
* http://www.gnu.org/licenses/gpl.html
*
* - GNU Lesser General Public License Version 2.1 or later (the "LGPL")
* http://www.gnu.org/licenses/lgpl.html
*
* - Mozilla Public License Version 1.1 or later (the "MPL")
* http://www.mozilla.org/MPL/MPL-1.1.html
*
* == END LICENSE ==
*
* Class for working with a selection range, much like the W3C DOM Range, but
* it is not intended to be an implementation of the W3C interface.
*/
var FCKDomRange = function( sourceWindow )
{
this.Window = sourceWindow ;
this._Cache = {} ;
}
FCKDomRange.prototype =
{
_UpdateElementInfo : function()
{
var innerRange = this._Range ;
if ( !innerRange )
this.Release( true ) ;
else
{
// For text nodes, the node itself is the StartNode.
var eStart = innerRange.startContainer ;
var eEnd = innerRange.endContainer ;
var oElementPath = new FCKElementPath( eStart ) ;
this.StartNode = eStart.nodeType == 3 ? eStart : eStart.childNodes[ innerRange.startOffset ] ;
this.StartContainer = eStart ;
this.StartBlock = oElementPath.Block ;
this.StartBlockLimit = oElementPath.BlockLimit ;
if ( eStart != eEnd )
oElementPath = new FCKElementPath( eEnd ) ;
// The innerRange.endContainer[ innerRange.endOffset ] is not
// usually part of the range, but the marker for the range end. So,
// let's get the previous available node as the real end.
var eEndNode = eEnd ;
if ( innerRange.endOffset == 0 )
{
while ( eEndNode && !eEndNode.previousSibling )
eEndNode = eEndNode.parentNode ;
if ( eEndNode )
eEndNode = eEndNode.previousSibling ;
}
else if ( eEndNode.nodeType == 1 )
eEndNode = eEndNode.childNodes[ innerRange.endOffset - 1 ] ;
this.EndNode = eEndNode ;
this.EndContainer = eEnd ;
this.EndBlock = oElementPath.Block ;
this.EndBlockLimit = oElementPath.BlockLimit ;
}
this._Cache = {} ;
},
CreateRange : function()
{
return new FCKW3CRange( this.Window.document ) ;
},
DeleteContents : function()
{
if ( this._Range )
{
this._Range.deleteContents() ;
this._UpdateElementInfo() ;
}
},
ExtractContents : function()
{
if ( this._Range )
{
var docFrag = this._Range.extractContents() ;
this._UpdateElementInfo() ;
return docFrag ;
}
return null ;
},
CheckIsCollapsed : function()
{
if ( this._Range )
return this._Range.collapsed ;
return false ;
},
Collapse : function( toStart )
{
if ( this._Range )
this._Range.collapse( toStart ) ;
this._UpdateElementInfo() ;
},
Clone : function()
{
var oClone = FCKTools.CloneObject( this ) ;
if ( this._Range )
oClone._Range = this._Range.cloneRange() ;
return oClone ;
},
MoveToNodeContents : function( targetNode )
{
if ( !this._Range )
this._Range = this.CreateRange() ;
this._Range.selectNodeContents( targetNode ) ;
this._UpdateElementInfo() ;
},
MoveToElementStart : function( targetElement )
{
this.SetStart(targetElement,1) ;
this.SetEnd(targetElement,1) ;
},
// Moves to the first editing point inside a element. For example, in a
// element tree like "<p><b><i></i></b> Text</p>", the start editing point
// is "<p><b><i>^</i></b> Text</p>" (inside <i>).
MoveToElementEditStart : function( targetElement )
{
var editableElement ;
while ( targetElement && targetElement.nodeType == 1 )
{
if ( FCKDomTools.CheckIsEditable( targetElement ) )
editableElement = targetElement ;
else if ( editableElement )
break ; // If we already found an editable element, stop the loop.
targetElement = targetElement.firstChild ;
}
if ( editableElement )
this.MoveToElementStart( editableElement ) ;
},
InsertNode : function( node )
{
if ( this._Range )
this._Range.insertNode( node ) ;
},
CheckIsEmpty : function()
{
if ( this.CheckIsCollapsed() )
return true ;
// Inserts the contents of the range in a div tag.
var eToolDiv = this.Window.document.createElement( 'div' ) ;
this._Range.cloneContents().AppendTo( eToolDiv ) ;
FCKDomTools.TrimNode( eToolDiv ) ;
return ( eToolDiv.innerHTML.length == 0 ) ;
},
/**
* Checks if the start boundary of the current range is "visually" (like a
* selection caret) at the beginning of the block. It means that some
* things could be brefore the range, like spaces or empty inline elements,
* but it would still be considered at the beginning of the block.
*/
CheckStartOfBlock : function()
{
var cache = this._Cache ;
var bIsStartOfBlock = cache.IsStartOfBlock ;
if ( bIsStartOfBlock != undefined )
return bIsStartOfBlock ;
// Take the block reference.
var block = this.StartBlock || this.StartBlockLimit ;
var container = this._Range.startContainer ;
var offset = this._Range.startOffset ;
var currentNode ;
if ( offset > 0 )
{
// First, check the start container. If it is a text node, get the
// substring of the node value before the range offset.
if ( container.nodeType == 3 )
{
var textValue = container.nodeValue.substr( 0, offset ).Trim() ;
// If we have some text left in the container, we are not at
// the end for the block.
if ( textValue.length != 0 )
return cache.IsStartOfBlock = false ;
}
else
currentNode = container.childNodes[ offset - 1 ] ;
}
// We'll not have a currentNode if the container was a text node, or
// the offset is zero.
if ( !currentNode )
currentNode = FCKDomTools.GetPreviousSourceNode( container, true, null, block ) ;
while ( currentNode )
{
switch ( currentNode.nodeType )
{
case 1 :
// It's not an inline element.
if ( !FCKListsLib.InlineChildReqElements[ currentNode.nodeName.toLowerCase() ] )
return cache.IsStartOfBlock = false ;
break ;
case 3 :
// It's a text node with real text.
if ( currentNode.nodeValue.Trim().length > 0 )
return cache.IsStartOfBlock = false ;
}
currentNode = FCKDomTools.GetPreviousSourceNode( currentNode, false, null, block ) ;
}
return cache.IsStartOfBlock = true ;
},
/**
* Checks if the end boundary of the current range is "visually" (like a
* selection caret) at the end of the block. It means that some things
* could be after the range, like spaces, empty inline elements, or a
* single <br>, but it would still be considered at the end of the block.
*/
CheckEndOfBlock : function( refreshSelection )
{
var isEndOfBlock = this._Cache.IsEndOfBlock ;
if ( isEndOfBlock != undefined )
return isEndOfBlock ;
// Take the block reference.
var block = this.EndBlock || this.EndBlockLimit ;
var container = this._Range.endContainer ;
var offset = this._Range.endOffset ;
var currentNode ;
// First, check the end container. If it is a text node, get the
// substring of the node value after the range offset.
if ( container.nodeType == 3 )
{
var textValue = container.nodeValue ;
if ( offset < textValue.length )
{
textValue = textValue.substr( offset ) ;
// If we have some text left in the container, we are not at
// the end for the block.
if ( textValue.Trim().length != 0 )
return this._Cache.IsEndOfBlock = false ;
}
}
else
currentNode = container.childNodes[ offset ] ;
// We'll not have a currentNode if the container was a text node, of
// the offset is out the container children limits (after it probably).
if ( !currentNode )
currentNode = FCKDomTools.GetNextSourceNode( container, true, null, block ) ;
var hadBr = false ;
while ( currentNode )
{
switch ( currentNode.nodeType )
{
case 1 :
var nodeName = currentNode.nodeName.toLowerCase() ;
// It's an inline element.
if ( FCKListsLib.InlineChildReqElements[ nodeName ] )
break ;
// It is the first <br> found.
if ( nodeName == 'br' && !hadBr )
{
hadBr = true ;
break ;
}
return this._Cache.IsEndOfBlock = false ;
case 3 :
// It's a text node with real text.
if ( currentNode.nodeValue.Trim().length > 0 )
return this._Cache.IsEndOfBlock = false ;
}
currentNode = FCKDomTools.GetNextSourceNode( currentNode, false, null, block ) ;
}
if ( refreshSelection )
this.Select() ;
return this._Cache.IsEndOfBlock = true ;
},
// This is an "intrusive" way to create a bookmark. It includes <span> tags
// in the range boundaries. The advantage of it is that it is possible to
// handle DOM mutations when moving back to the bookmark.
// Attention: the inclusion of nodes in the DOM is a design choice and
// should not be changed as there are other points in the code that may be
// using those nodes to perform operations. See GetBookmarkNode.
// For performance, includeNodes=true if intended to SelectBookmark.
CreateBookmark : function( includeNodes )
{
// Create the bookmark info (random IDs).
var oBookmark =
{
StartId : (new Date()).valueOf() + Math.floor(Math.random()*1000) + 'S',
EndId : (new Date()).valueOf() + Math.floor(Math.random()*1000) + 'E'
} ;
var oDoc = this.Window.document ;
var eStartSpan ;
var eEndSpan ;
var oClone ;
// For collapsed ranges, add just the start marker.
if ( !this.CheckIsCollapsed() )
{
eEndSpan = oDoc.createElement( 'span' ) ;
eEndSpan.style.display = 'none' ;
eEndSpan.id = oBookmark.EndId ;
eEndSpan.setAttribute( '_fck_bookmark', true ) ;
// For IE, it must have something inside, otherwise it may be
// removed during DOM operations.
// if ( FCKBrowserInfo.IsIE )
eEndSpan.innerHTML = ' ' ;
oClone = this.Clone() ;
oClone.Collapse( false ) ;
oClone.InsertNode( eEndSpan ) ;
}
eStartSpan = oDoc.createElement( 'span' ) ;
eStartSpan.style.display = 'none' ;
eStartSpan.id = oBookmark.StartId ;
eStartSpan.setAttribute( '_fck_bookmark', true ) ;
// For IE, it must have something inside, otherwise it may be removed
// during DOM operations.
// if ( FCKBrowserInfo.IsIE )
eStartSpan.innerHTML = ' ' ;
oClone = this.Clone() ;
oClone.Collapse( true ) ;
oClone.InsertNode( eStartSpan ) ;
if ( includeNodes )
{
oBookmark.StartNode = eStartSpan ;
oBookmark.EndNode = eEndSpan ;
}
// Update the range position.
if ( eEndSpan )
{
this.SetStart( eStartSpan, 4 ) ;
this.SetEnd( eEndSpan, 3 ) ;
}
else
this.MoveToPosition( eStartSpan, 4 ) ;
return oBookmark ;
},
// This one should be a part of a hypothetic "bookmark" object.
GetBookmarkNode : function( bookmark, start )
{
var doc = this.Window.document ;
if ( start )
return bookmark.StartNode || doc.getElementById( bookmark.StartId ) ;
else
return bookmark.EndNode || doc.getElementById( bookmark.EndId ) ;
},
MoveToBookmark : function( bookmark, preserveBookmark )
{
var eStartSpan = this.GetBookmarkNode( bookmark, true ) ;
var eEndSpan = this.GetBookmarkNode( bookmark, false ) ;
this.SetStart( eStartSpan, 3 ) ;
if ( !preserveBookmark )
FCKDomTools.RemoveNode( eStartSpan ) ;
// If collapsed, the end span will not be available.
if ( eEndSpan )
{
this.SetEnd( eEndSpan, 3 ) ;
if ( !preserveBookmark )
FCKDomTools.RemoveNode( eEndSpan ) ;
}
else
this.Collapse( true ) ;
this._UpdateElementInfo() ;
},
// Non-intrusive bookmark algorithm
CreateBookmark2 : function()
{
// If there is no range then get out of here.
// It happens on initial load in Safari #962 and if the editor it's hidden also in Firefox
if ( ! this._Range )
return { "Start" : 0, "End" : 0 } ;
// First, we record down the offset values
var bookmark =
{
"Start" : [ this._Range.startOffset ],
"End" : [ this._Range.endOffset ]
} ;
// Since we're treating the document tree as normalized, we need to backtrack the text lengths
// of previous text nodes into the offset value.
var curStart = this._Range.startContainer.previousSibling ;
var curEnd = this._Range.endContainer.previousSibling ;
// Also note that the node that we use for "address base" would change during backtracking.
var addrStart = this._Range.startContainer ;
var addrEnd = this._Range.endContainer ;
while ( curStart && curStart.nodeType == 3 )
{
bookmark.Start[0] += curStart.length ;
addrStart = curStart ;
curStart = curStart.previousSibling ;
}
while ( curEnd && curEnd.nodeType == 3 )
{
bookmark.End[0] += curEnd.length ;
addrEnd = curEnd ;
curEnd = curEnd.previousSibling ;
}
// If the object pointed to by the startOffset and endOffset are text nodes, we need
// to backtrack and add in the text offset to the bookmark addresses.
if ( addrStart.nodeType == 1 && addrStart.childNodes[bookmark.Start[0]] && addrStart.childNodes[bookmark.Start[0]].nodeType == 3 )
{
var curNode = addrStart.childNodes[bookmark.Start[0]] ;
var offset = 0 ;
while ( curNode.previousSibling && curNode.previousSibling.nodeType == 3 )
{
curNode = curNode.previousSibling ;
offset += curNode.length ;
}
addrStart = curNode ;
bookmark.Start[0] = offset ;
}
if ( addrEnd.nodeType == 1 && addrEnd.childNodes[bookmark.End[0]] && addrEnd.childNodes[bookmark.End[0]].nodeType == 3 )
{
var curNode = addrEnd.childNodes[bookmark.End[0]] ;
var offset = 0 ;
while ( curNode.previousSibling && curNode.previousSibling.nodeType == 3 )
{
curNode = curNode.previousSibling ;
offset += curNode.length ;
}
addrEnd = curNode ;
bookmark.End[0] = offset ;
}
// Then, we record down the precise position of the container nodes
// by walking up the DOM tree and counting their childNode index
bookmark.Start = FCKDomTools.GetNodeAddress( addrStart, true ).concat( bookmark.Start ) ;
bookmark.End = FCKDomTools.GetNodeAddress( addrEnd, true ).concat( bookmark.End ) ;
return bookmark;
},
MoveToBookmark2 : function( bookmark )
{
// Reverse the childNode counting algorithm in CreateBookmark2()
var curStart = FCKDomTools.GetNodeFromAddress( this.Window.document, bookmark.Start.slice( 0, -1 ), true ) ;
var curEnd = FCKDomTools.GetNodeFromAddress( this.Window.document, bookmark.End.slice( 0, -1 ), true ) ;
// Generate the W3C Range object and update relevant data
this.Release( true ) ;
this._Range = new FCKW3CRange( this.Window.document ) ;
var startOffset = bookmark.Start[ bookmark.Start.length - 1 ] ;
var endOffset = bookmark.End[ bookmark.End.length - 1 ] ;
while ( curStart.nodeType == 3 && startOffset > curStart.length )
{
if ( ! curStart.nextSibling || curStart.nextSibling.nodeType != 3 )
break ;
startOffset -= curStart.length ;
curStart = curStart.nextSibling ;
}
while ( curEnd.nodeType == 3 && endOffset > curEnd.length )
{
if ( ! curEnd.nextSibling || curEnd.nextSibling.nodeType != 3 )
break ;
endOffset -= curEnd.length ;
curEnd = curEnd.nextSibling ;
}
this._Range.setStart( curStart, startOffset ) ;
this._Range.setEnd( curEnd, endOffset ) ;
this._UpdateElementInfo() ;
},
MoveToPosition : function( targetElement, position )
{
this.SetStart( targetElement, position ) ;
this.Collapse( true ) ;
},
/*
* Moves the position of the start boundary of the range to a specific position
* relatively to a element.
* @position:
* 1 = After Start <target>^contents</target>
* 2 = Before End <target>contents^</target>
* 3 = Before Start ^<target>contents</target>
* 4 = After End <target>contents</target>^
*/
SetStart : function( targetElement, position, noInfoUpdate )
{
var oRange = this._Range ;
if ( !oRange )
oRange = this._Range = this.CreateRange() ;
switch( position )
{
case 1 : // After Start <target>^contents</target>
oRange.setStart( targetElement, 0 ) ;
break ;
case 2 : // Before End <target>contents^</target>
oRange.setStart( targetElement, targetElement.childNodes.length ) ;
break ;
case 3 : // Before Start ^<target>contents</target>
oRange.setStartBefore( targetElement ) ;
break ;
case 4 : // After End <target>contents</target>^
oRange.setStartAfter( targetElement ) ;
}
if ( !noInfoUpdate )
this._UpdateElementInfo() ;
},
/*
* Moves the position of the start boundary of the range to a specific position
* relatively to a element.
* @position:
* 1 = After Start <target>^contents</target>
* 2 = Before End <target>contents^</target>
* 3 = Before Start ^<target>contents</target>
* 4 = After End <target>contents</target>^
*/
SetEnd : function( targetElement, position, noInfoUpdate )
{
var oRange = this._Range ;
if ( !oRange )
oRange = this._Range = this.CreateRange() ;
switch( position )
{
case 1 : // After Start <target>^contents</target>
oRange.setEnd( targetElement, 0 ) ;
break ;
case 2 : // Before End <target>contents^</target>
oRange.setEnd( targetElement, targetElement.childNodes.length ) ;
break ;
case 3 : // Before Start ^<target>contents</target>
oRange.setEndBefore( targetElement ) ;
break ;
case 4 : // After End <target>contents</target>^
oRange.setEndAfter( targetElement ) ;
}
if ( !noInfoUpdate )
this._UpdateElementInfo() ;
},
Expand : function( unit )
{
var oNode, oSibling ;
switch ( unit )
{
// Expand the range to include all inline parent elements if we are
// are in their boundary limits.
// For example (where [ ] are the range limits):
// Before => Some <b>[<i>Some sample text]</i></b>.
// After => Some [<b><i>Some sample text</i></b>].
case 'inline_elements' :
// Expand the start boundary.
if ( this._Range.startOffset == 0 )
{
oNode = this._Range.startContainer ;
if ( oNode.nodeType != 1 )
oNode = oNode.previousSibling ? null : oNode.parentNode ;
if ( oNode )
{
while ( FCKListsLib.InlineNonEmptyElements[ oNode.nodeName.toLowerCase() ] )
{
this._Range.setStartBefore( oNode ) ;
if ( oNode != oNode.parentNode.firstChild )
break ;
oNode = oNode.parentNode ;
}
}
}
// Expand the end boundary.
oNode = this._Range.endContainer ;
var offset = this._Range.endOffset ;
if ( ( oNode.nodeType == 3 && offset >= oNode.nodeValue.length ) || ( oNode.nodeType == 1 && offset >= oNode.childNodes.length ) || ( oNode.nodeType != 1 && oNode.nodeType != 3 ) )
{
if ( oNode.nodeType != 1 )
oNode = oNode.nextSibling ? null : oNode.parentNode ;
if ( oNode )
{
while ( FCKListsLib.InlineNonEmptyElements[ oNode.nodeName.toLowerCase() ] )
{
this._Range.setEndAfter( oNode ) ;
if ( oNode != oNode.parentNode.lastChild )
break ;
oNode = oNode.parentNode ;
}
}
}
break ;
case 'block_contents' :
case 'list_contents' :
var boundarySet = FCKListsLib.BlockBoundaries ;
if ( unit == 'list_contents' || FCKConfig.EnterMode == 'br' )
boundarySet = FCKListsLib.ListBoundaries ;
if ( this.StartBlock && FCKConfig.EnterMode != 'br' && unit == 'block_contents' )
this.SetStart( this.StartBlock, 1 ) ;
else
{
// Get the start node for the current range.
oNode = this._Range.startContainer ;
// If it is an element, get the node right before of it (in source order).
if ( oNode.nodeType == 1 )
{
var lastNode = oNode.childNodes[ this._Range.startOffset ] ;
if ( lastNode )
oNode = FCKDomTools.GetPreviousSourceNode( lastNode, true ) ;
else
oNode = oNode.lastChild || oNode ;
}
// We must look for the left boundary, relative to the range
// start, which is limited by a block element.
while ( oNode
&& ( oNode.nodeType != 1
|| ( oNode != this.StartBlockLimit
&& !boundarySet[ oNode.nodeName.toLowerCase() ] ) ) )
{
this._Range.setStartBefore( oNode ) ;
oNode = oNode.previousSibling || oNode.parentNode ;
}
}
if ( this.EndBlock && FCKConfig.EnterMode != 'br' && unit == 'block_contents' && this.EndBlock.nodeName.toLowerCase() != 'li' )
this.SetEnd( this.EndBlock, 2 ) ;
else
{
oNode = this._Range.endContainer ;
if ( oNode.nodeType == 1 )
oNode = oNode.childNodes[ this._Range.endOffset ] || oNode.lastChild ;
// We must look for the right boundary, relative to the range
// end, which is limited by a block element.
while ( oNode
&& ( oNode.nodeType != 1
|| ( oNode != this.StartBlockLimit
&& !boundarySet[ oNode.nodeName.toLowerCase() ] ) ) )
{
this._Range.setEndAfter( oNode ) ;
oNode = oNode.nextSibling || oNode.parentNode ;
}
// In EnterMode='br', the end <br> boundary element must
// be included in the expanded range.
if ( oNode && oNode.nodeName.toLowerCase() == 'br' )
this._Range.setEndAfter( oNode ) ;
}
this._UpdateElementInfo() ;
}
},
/**
* Split the block element for the current range. It deletes the contents
* of the range and splits the block in the collapsed position, resulting
* in two sucessive blocks. The range is then positioned in the middle of
* them.
*
* It returns and object with the following properties:
* - PreviousBlock : a reference to the block element that preceeds
* the range after the split.
* - NextBlock : a reference to the block element that follows the
* range after the split.
* - WasStartOfBlock : a boolean indicating that the range was
* originaly at the start of the block.
* - WasEndOfBlock : a boolean indicating that the range was originaly
* at the end of the block.
*
* If the range was originaly at the start of the block, no split will happen
* and the PreviousBlock value will be null. The same is valid for the
* NextBlock value if the range was at the end of the block.
*/
SplitBlock : function( forceBlockTag )
{
var blockTag = forceBlockTag || FCKConfig.EnterMode ;
if ( !this._Range )
this.MoveToSelection() ;
// The range boundaries must be in the same "block limit" element.
if ( this.StartBlockLimit == this.EndBlockLimit )
{
// Get the current blocks.
var eStartBlock = this.StartBlock ;
var eEndBlock = this.EndBlock ;
var oElementPath = null ;
if ( blockTag != 'br' )
{
if ( !eStartBlock )
{
eStartBlock = this.FixBlock( true, blockTag ) ;
eEndBlock = this.EndBlock ; // FixBlock may have fixed the EndBlock too.
}
if ( !eEndBlock )
eEndBlock = this.FixBlock( false, blockTag ) ;
}
// Get the range position.
var bIsStartOfBlock = ( eStartBlock != null && this.CheckStartOfBlock() ) ;
var bIsEndOfBlock = ( eEndBlock != null && this.CheckEndOfBlock() ) ;
// Delete the current contents.
if ( !this.CheckIsEmpty() )
this.DeleteContents() ;
if ( eStartBlock && eEndBlock && eStartBlock == eEndBlock )
{
if ( bIsEndOfBlock )
{
oElementPath = new FCKElementPath( this.StartContainer ) ;
this.MoveToPosition( eEndBlock, 4 ) ;
eEndBlock = null ;
}
else if ( bIsStartOfBlock )
{
oElementPath = new FCKElementPath( this.StartContainer ) ;
this.MoveToPosition( eStartBlock, 3 ) ;
eStartBlock = null ;
}
else
{
// Extract the contents of the block from the selection point to the end of its contents.
this.SetEnd( eStartBlock, 2 ) ;
var eDocFrag = this.ExtractContents() ;
// Duplicate the block element after it.
eEndBlock = eStartBlock.cloneNode( false ) ;
eEndBlock.removeAttribute( 'id', false ) ;
// Place the extracted contents in the duplicated block.
eDocFrag.AppendTo( eEndBlock ) ;
FCKDomTools.InsertAfterNode( eStartBlock, eEndBlock ) ;
this.MoveToPosition( eStartBlock, 4 ) ;
// In Gecko, the last child node must be a bogus <br>.
// Note: bogus <br> added under <ul> or <ol> would cause lists to be incorrectly rendered.
if ( FCKBrowserInfo.IsGecko &&
! eStartBlock.nodeName.IEquals( ['ul', 'ol'] ) )
FCKTools.AppendBogusBr( eStartBlock ) ;
}
}
return {
PreviousBlock : eStartBlock,
NextBlock : eEndBlock,
WasStartOfBlock : bIsStartOfBlock,
WasEndOfBlock : bIsEndOfBlock,
ElementPath : oElementPath
} ;
}
return null ;
},
// Transform a block without a block tag in a valid block (orphan text in the body or td, usually).
FixBlock : function( isStart, blockTag )
{
// Bookmark the range so we can restore it later.
var oBookmark = this.CreateBookmark() ;
// Collapse the range to the requested ending boundary.
this.Collapse( isStart ) ;
// Expands it to the block contents.
this.Expand( 'block_contents' ) ;
// Create the fixed block.
var oFixedBlock = this.Window.document.createElement( blockTag ) ;
// Move the contents of the temporary range to the fixed block.
this.ExtractContents().AppendTo( oFixedBlock ) ;
FCKDomTools.TrimNode( oFixedBlock ) ;
// Insert the fixed block into the DOM.
this.InsertNode( oFixedBlock ) ;
// Move the range back to the bookmarked place.
this.MoveToBookmark( oBookmark ) ;
return oFixedBlock ;
},
Release : function( preserveWindow )
{
if ( !preserveWindow )
this.Window = null ;
this.StartNode = null ;
this.StartContainer = null ;
this.StartBlock = null ;
this.StartBlockLimit = null ;
this.EndNode = null ;
this.EndContainer = null ;
this.EndBlock = null ;
this.EndBlockLimit = null ;
this._Range = null ;
this._Cache = null ;
},
CheckHasRange : function()
{
return !!this._Range ;
},
GetTouchedStartNode : function()
{
var range = this._Range ;
var container = range.startContainer ;
if ( range.collapsed || container.nodeType != 1 )
return container ;
return container.childNodes[ range.startOffset ] || container ;
},
GetTouchedEndNode : function()
{
var range = this._Range ;
var container = range.endContainer ;
if ( range.collapsed || container.nodeType != 1 )
return container ;
return container.childNodes[ range.endOffset - 1 ] || container ;
}
} ;
|
from os import truncate
from Includes import os, shutil, sleep
from AssistantConfig import voice, USERPATH
TIME = 10
def extension_type(event): # Get file's extension
if event.src_path[event.src_path.rindex('.') + 1:] != 'tmp' or 'crdownload':
return event.src_path[event.src_path.rindex('.') + 1:]
# --- Create respective folders for the extensions ----- #
def isText(event):
if extension_type(event) == 'txt':
return True
return False
def isPDF(event):
if extension_type(event) == 'pdf':
return True
return False
def isMP3(event):
if extension_type(event) in ('mp3', "wav", "m4a", "flac", "aiff", "ogg"):
return True
return False
def isImage(event):
if extension_type(event) in ('png', 'jpg', 'jpeg', 'bmp', 'gif', 'raw', 'ico'):
return True
return False
def isVideo(event):
if extension_type(event) in ('mov', 'mp4', 'avi', 'flv', 'ts'):
return True
return False
def isWord(event):
if extension_type(event) in ('doc', 'docx', 'odf'):
return True
return False
def isSpreadsheet(event):
if extension_type(event) in ('xls', 'xlsx'):
return True
return False
def isPresentation(event):
if extension_type(event) in ('ppt', 'pptx'):
return True
return False
def isCompacted(event):
if extension_type(event) in ('rar', 'zip', '7z', 'iso'):
return True
return False
def isCode(event):
if extension_type(event) in ('py', "jl", 'cs', 'js', 'php', 'html', 'sql', 'css', 'c', 'h', 'cpp', 'java', 'asp', 'aspx', 'axd', 'asx', 'asmx', 'ashx', 'cfm', 'yaws', 'swf', 'htm', 'xhtml', 'jhtml', "jsp", "jspx", "wss", "do", "cmd", "action", "pl", "phtml", "php3", "php4", "rb", "rhtml", "shtml", "rss", "svg", ):
return True
return False
def isExecutable(event):
if extension_type(event) in ('exe', 'msi', 'run', 'deb'):
return True
return False
def isInvoice(event):
if extension_type(event) in ('xml'):
return True
return False
def isTorrent(event):
if extension_type(event) in ('torrent'):
return True
return False
def isPackage(event):
if extension_type(event) in ('package'):
return True
return False
# ------------------------------------------------------ #
def makeFolder(event, foldername): # Create folders
os.chdir('{}\\Downloads'.format(USERPATH))
if extension_type(event) not in ('tmp', 'crdownload'):
voice('Novo arquivo detectado')
if os.path.exists(foldername):
# voice('A pasta destino já existe')
# voice('Pulando criação')
return os.getcwd() + os.sep + str(foldername)
else:
os.mkdir(str(foldername))
return os.getcwd() + os.sep + str(foldername)
def moveToFolder(event, path_to_new_folder): # Move files to inside folders
if extension_type(event) not in ('tmp', 'crdownload'):
try:
voice('Movendo arquivo em {} segundos...'.format(TIME))
sleep(TIME)
shutil.move(event.src_path, path_to_new_folder)
if event.src_path:
voice('Arquivo movido com sucesso')
else:
pass
except:
voice('O arquivo já existe na pasta destino')
fileName = event.src_path
file = fileName.replace('{}\\Downloads\\'.format(USERPATH), '')
try:
os.remove(file) # Delete duplicated files
voice('Deletei o arquivo para evitar duplicidades')
except:
pass
pass
|
from timeit import default_timer
import torch
import torchvision
import torchvision.transforms as transforms
from joblib import Memory
from tqdm import tqdm
from spn.algorithms.Inference import log_likelihood
from spn.algorithms.LearningWrappers import learn_classifier, learn_parametric
from spn.experiments.layers.layers import to_layers, elapsed_timer, to_compressed_layers
from spn.experiments.layers.pytorch import get_torch_spn
from spn.structure.Base import Context, get_nodes_by_type
from spn.structure.leaves.parametric.Parametric import Categorical, Gaussian
import numpy as np
memory = Memory('/tmp/cache', verbose=0, compress=9)
@memory.cache
def get_data():
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,))])
mnist_train_data = torchvision.datasets.MNIST(root='/tmp/MNIST', train=True, download=True, transform=transform)
mnist_test_data = torchvision.datasets.MNIST(root='/tmp/MNIST', train=False, download=True, transform=transform)
def r(ds):
rows = []
labels = []
for d, l in ds:
rows.append(d.view(-1))
labels.append(l)
pixels = torch.stack(rows, dim=0)
labels = torch.tensor(labels)
return torch.cat([labels.unsqueeze(1).float(), pixels], dim=1)
return r(mnist_train_data), r(mnist_test_data)
from spn.algorithms.splitting.Base import split_data_by_clusters
def get_split_cols_random_partition(rand_gen, fail=0.6):
def split_cols_random_partitions(local_data, ds_context, scope):
if rand_gen.random_sample() < fail:
return [(local_data, scope, 1.0)]
clusters = np.zeros_like(scope)
for i, new_scope in enumerate(np.array_split(np.argsort(scope), 2)):
clusters[new_scope] = i
return split_data_by_clusters(local_data, clusters, scope, rows=False)
return split_cols_random_partitions
@memory.cache
def learn_spn(data, min_inst):
spn_classification = learn_classifier(data,
Context(parametric_types=[Categorical] + [Gaussian] * (28 * 28)).add_domains(
data),
learn_parametric, 0,
cols=get_split_cols_random_partition(np.random.RandomState(17)),
rows="kmeans",
min_instances_slice=min_inst)
return spn_classification
@memory.cache
def to_torch(layers):
return get_torch_spn(layers)
# @memory.cache
def spn_torch_cached(spn):
layers = to_compressed_layers(spn)
return get_torch_spn(layers)
# return to_torch(layers)
def get_mnist_spn(min_inst):
with elapsed_timer() as e:
trainds, testds = get_data()
print('loading data in', e())
with elapsed_timer() as e:
spn = learn_spn(trainds.numpy(), min_inst)
print('learning spn classification', e())
rng = np.random.RandomState(17)
for n in get_nodes_by_type(spn, Gaussian):
n.mean = rng.normal()
device = 'cpu'
with elapsed_timer() as e:
torch_spn = spn_torch_cached(spn).to(device)
print('to pytorch', e())
return trainds, testds, spn, torch_spn
def pred_prob_per_class(spn, data):
results = []
#imgx = torch.cat([torch.zeros((data.shape[0], 1)), data], dim=1)
#imgx[:, 0] = float('nan')
#llx = spn(imgx)
for i in range(10):
img = torch.cat([torch.zeros((data.shape[0], 1)), data], dim=1)#torch.tensor(data)
img[:, 0] = i
ll = spn(img)
results.append(ll)
#res = torch.exp(torch.cat(results, dim=1) - llx)
alpha = 1.0
res = (torch.nn.functional.softmax(torch.cat(results, dim=1), dim=1) + alpha) / (1 + 10 * alpha)
return res
if __name__ == '__main__':
trainds, testds, spn, torch_spn = get_mnist_spn(200)
traindsnp, testdsnp = trainds.numpy(), testds.numpy()
device = 'cpu'
avgll = 0
pytorchtime = 0
spftime = 0
with elapsed_timer() as e:
# with torch.no_grad():
for img in tqdm(torch.split(testds, 1024)):
#pred_prob_per_class(spn, img)
start = default_timer()
tll = torch_spn(img).detach().numpy()
pytorchtime += default_timer() - start
start = default_timer()
sll = log_likelihood(spn, img.numpy())
spftime += default_timer() - start
if not np.all(np.isclose(tll, sll)):
print(tll - sll)
# 0/0
avgll += tll.sum().item()
print('elapsed', e())
print('pytorch time', pytorchtime)
print('spflow time', spftime)
print(avgll / testds.shape[0])
|
from solcon import *
"""
Test util file, it should be moved into test/ in the future
"""
class Util:
root = "../../"
@staticmethod
def solcon(file, config="config-esc.toml"):
# Remove '../' to root if you're running it via unit_tests.py
s = SolCon(internal_run=True, internal_args=Util.format_args(file, con=config, root=Util.root))
return s.cnfv
@staticmethod
def format_args(f, con, r=None, root=""):
c = "{}config/{}".format(root, con)
# s = "{}config/example-sol6-config.toml".format(root)
l_l = 50 # critical
o_silent = True
a = {"f": "{}examples/esc/{}".format(root, f), "o": None, "c": c, "r": r, "l": l_l, "e": o_silent}
return a
|
import logging
import os.path
import shutil
from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import Iterator, List, Type
from common import directories, file_utils
from common.commands.base import ArxivBatchCommand
from common.compile import compile_tex, get_errors, is_driver_unimplemented
from common.types import AbsolutePath, ArxivId, CompilationResult, Path, RelativePath
CompilationPath = AbsolutePath
@dataclass(frozen=True)
class CompilationSummaryEntry:
outcome: str
source_path: str
missing_driver: bool
errors: List[str]
@dataclass(frozen=True)
class CompilationTask:
arxiv_id: ArxivId
compilation_path: Path
class CompileTexCommand(ArxivBatchCommand[CompilationTask, CompilationResult], ABC):
"""
Compile a TeX project, first copying it to a new directory.
"""
@abstractmethod
def get_sources_base_dirkey(self) -> str:
"""
Key for a data directory containing all papers' TeX sources.
"""
def get_source_dirs(self, arxiv_id: ArxivId) -> Iterator[RelativePath]:
"""
Get all directories that should be compiled for an arXiv ID. Paths should
be relative to the sources base dir. This method can be overridden.
"""
for iteration in directories.iteration_names(
self.get_sources_base_dirkey(), arxiv_id
):
yield directories.relpath_arxiv_id_iteration(arxiv_id, iteration)
@abstractmethod
def get_output_base_dirkey(self) -> str:
"""
Key for a data directory that will contain all compiled TeX sources.
"""
def get_arxiv_ids_dirkey(self) -> Path:
return self.get_sources_base_dirkey()
def load(self) -> Iterator[CompilationTask]:
sources_base_dir = directories.dirpath(self.get_sources_base_dirkey())
output_base_dir = directories.dirpath(self.get_output_base_dirkey())
for arxiv_id in self.arxiv_ids:
# Clean all past output for this arXiv ID.
output_dir_for_arxiv_id = directories.arxiv_subdir(
self.get_output_base_dirkey(), arxiv_id
)
file_utils.clean_directory(output_dir_for_arxiv_id)
for source_dir in self.get_source_dirs(arxiv_id):
qualified_source_dir = os.path.join(sources_base_dir, source_dir)
output_dir = os.path.join(output_base_dir, source_dir)
if os.path.exists(output_dir):
logging.warning(
"Compilation directory already exists in %s. Deleting.",
output_dir,
)
shutil.rmtree(output_dir)
shutil.copytree(qualified_source_dir, output_dir)
yield CompilationTask(arxiv_id, output_dir)
def process(self, item: CompilationTask) -> Iterator[CompilationResult]:
result = compile_tex(item.compilation_path)
yield result
def save(self, item: CompilationTask, result: CompilationResult) -> None:
file_utils.save_compilation_results(item.compilation_path, result)
self.update_compilation_log(
item.arxiv_id, result.stdout, item.compilation_path, result.success
)
def update_compilation_log(
self,
arxiv_id: ArxivId,
stdout: bytes,
source_path: RelativePath,
success: bool,
) -> None:
arxiv_id_output_root = directories.arxiv_subdir(
self.get_output_base_dirkey(), arxiv_id
)
results_path = os.path.join(arxiv_id_output_root, "compilation_results.csv")
missing_driver = is_driver_unimplemented(stdout)
errors = list(get_errors(stdout))
if missing_driver:
logging.warning( # pylint: disable=logging-not-lazy
"Could not compile arXiv ID %s because colorization commands are missing for the"
+ "driver needed to compile that TeX project.",
arxiv_id,
)
# Write the compilation result to the log.
file_utils.append_to_csv(
results_path,
CompilationSummaryEntry(
outcome="SUCCESS" if success else "FAILURE",
source_path=source_path,
missing_driver=missing_driver,
errors=[e.decode("utf-8") for e in errors],
),
)
class CompileTexSources(CompileTexCommand):
@staticmethod
def get_name() -> str:
return "compile-tex"
@staticmethod
def get_description() -> str:
return "Compile original TeX sources."
def get_sources_base_dirkey(self) -> str:
return "sources"
def get_source_dirs(self, arxiv_id: ArxivId) -> Iterator[RelativePath]:
return iter([directories.escape_slashes(arxiv_id)])
def get_output_base_dirkey(self) -> str:
return "compiled-sources"
def make_compile_tex_command(entity_name: str) -> Type[CompileTexCommand]:
class C(CompileTexCommand):
@staticmethod
def get_name() -> str:
return f"compile-tex-with-colorized-{entity_name}"
@staticmethod
def get_description() -> str:
return f"Compile TeX sources with colorized {entity_name}."
def get_sources_base_dirkey(self) -> str:
return f"sources-with-colorized-{entity_name}"
def get_output_base_dirkey(self) -> str:
return f"compiled-sources-with-colorized-{entity_name}"
return C
|
import pytest
from django.conf import settings
from django.test import RequestFactory
from casa_amparo.users.tests.factories import UserFactory
@pytest.fixture(autouse=True)
def media_storage(settings, tmpdir):
settings.MEDIA_ROOT = tmpdir.strpath
@pytest.fixture
def user() -> settings.AUTH_USER_MODEL:
return UserFactory()
@pytest.fixture
def request_factory() -> RequestFactory:
return RequestFactory()
|
/* Ordered Dictionary object implementation.
This implementation is necessarily explicitly equivalent to the pure Python
OrderedDict class in Lib/collections/__init__.py. The strategy there
involves using a doubly-linked-list to capture the order. We keep to that
strategy, using a lower-level linked-list.
About the Linked-List
=====================
For the linked list we use a basic doubly-linked-list. Using a circularly-
linked-list does have some benefits, but they don't apply so much here
since OrderedDict is focused on the ends of the list (for the most part).
Furthermore, there are some features of generic linked-lists that we simply
don't need for OrderedDict. Thus a simple custom implementation meets our
needs. Alternatives to our simple approach include the QCIRCLE_*
macros from BSD's queue.h, and the linux's list.h.
Getting O(1) Node Lookup
------------------------
One invariant of Python's OrderedDict is that it preserves time complexity
of dict's methods, particularly the O(1) operations. Simply adding a
linked-list on top of dict is not sufficient here; operations for nodes in
the middle of the linked-list implicitly require finding the node first.
With a simple linked-list like we're using, that is an O(n) operation.
Consequently, methods like __delitem__() would change from O(1) to O(n),
which is unacceptable.
In order to preserve O(1) performance for node removal (finding nodes), we
must do better than just looping through the linked-list. Here are options
we've considered:
1. use a second dict to map keys to nodes (a la the pure Python version).
2. keep a simple hash table mirroring the order of dict's, mapping each key
to the corresponding node in the linked-list.
3. use a version of shared keys (split dict) that allows non-unicode keys.
4. have the value stored for each key be a (value, node) pair, and adjust
__getitem__(), get(), etc. accordingly.
The approach with the least performance impact (time and space) is #2,
mirroring the key order of dict's dk_entries with an array of node pointers.
While lookdict() and friends (dk_lookup) don't give us the index into the
array, we make use of pointer arithmetic to get that index. An alternative
would be to refactor lookdict() to provide the index, explicitly exposing
the implementation detail. We could even just use a custom lookup function
for OrderedDict that facilitates our need. However, both approaches are
significantly more complicated than just using pointer arithmetic.
The catch with mirroring the hash table ordering is that we have to keep
the ordering in sync through any dict resizes. However, that order only
matters during node lookup. We can simply defer any potential resizing
until we need to do a lookup.
Linked-List Nodes
-----------------
The current implementation stores a pointer to the associated key only.
One alternative would be to store a pointer to the PyDictKeyEntry instead.
This would save one pointer de-reference per item, which is nice during
calls to values() and items(). However, it adds unnecessary overhead
otherwise, so we stick with just the key.
Linked-List API
---------------
As noted, the linked-list implemented here does not have all the bells and
whistles. However, we recognize that the implementation may need to
change to accommodate performance improvements or extra functionality. To
that end, We use a simple API to interact with the linked-list. Here's a
summary of the methods/macros:
Node info:
* _odictnode_KEY(node)
* _odictnode_VALUE(od, node)
* _odictnode_PREV(node)
* _odictnode_NEXT(node)
Linked-List info:
* _odict_FIRST(od)
* _odict_LAST(od)
* _odict_EMPTY(od)
* _odict_FOREACH(od, node) - used in place of `for (node=...)`
For adding nodes:
* _odict_add_head(od, node)
* _odict_add_tail(od, node)
* _odict_add_new_node(od, key, hash)
For removing nodes:
* _odict_clear_node(od, node, key, hash)
* _odict_clear_nodes(od, clear_each)
Others:
* _odict_find_node_hash(od, key, hash)
* _odict_find_node(od, key)
* _odict_keys_equal(od1, od2)
Used, but specific to the linked-list implementation:
* _odict_free_fast_nodes(od)
And here's a look at how the linked-list relates to the OrderedDict API:
============ === === ==== ==== ==== === ==== ===== ==== ==== === ==== === ===
method key val prev next mem 1st last empty iter find add rmv clr keq
============ === === ==== ==== ==== === ==== ===== ==== ==== === ==== === ===
__del__ ~ X
__delitem__ free ~ node
__eq__ ~ X
__iter__ X X
__new__ X X
__reduce__ X ~ X
__repr__ X X X
__reversed__ X X
__setitem__ key
__sizeof__ size X
clear ~ ~ X
copy X X X
items X X X
keys X X
move_to_end X X X ~ h/t key
pop free key
popitem X X free X X node
setdefault ~ ? ~
values X X
============ === === ==== ==== ==== === ==== ===== ==== ==== === ==== === ===
__delitem__ is the only method that directly relies on finding an arbitrary
node in the linked-list. Everything else is iteration or relates to the
ends of the linked-list.
Situation that Endangers Consistency
------------------------------------
Using a raw linked-list for OrderedDict exposes a key situation that can
cause problems. If a node is stored in a variable, there is a chance that
the node may have been deallocated before the variable gets used, thus
potentially leading to a segmentation fault. A key place where this shows
up is during iteration through the linked list (via _odict_FOREACH or
otherwise).
A number of solutions are available to resolve this situation:
* defer looking up the node until as late as possible and certainly after
any code that could possibly result in a deletion;
* if the node is needed both before and after a point where the node might
be removed, do a check before using the node at the "after" location to
see if the node is still valid;
* like the last one, but simply pull the node again to ensure it's right;
* keep the key in the variable instead of the node and then look up the
node using the key at the point where the node is needed (this is what
we do for the iterators).
Another related problem, preserving consistent ordering during iteration,
is described below. That one is not exclusive to using linked-lists.
Challenges from Subclassing dict
================================
OrderedDict subclasses dict, which is an unusual relationship between two
builtin types (other than the base object type). Doing so results in
some complication and deserves further explanation. There are two things
to consider here. First, in what circumstances or with what adjustments
can OrderedDict be used as a drop-in replacement for dict (at the C level)?
Second, how can the OrderedDict implementation leverage the dict
implementation effectively without introducing unnecessary coupling or
inefficiencies?
This second point is reflected here and in the implementation, so the
further focus is on the first point. It is worth noting that for
overridden methods, the dict implementation is deferred to as much as
possible. Furthermore, coupling is limited to as little as is reasonable.
Concrete API Compatibility
--------------------------
Use of the concrete C-API for dict (PyDict_*) with OrderedDict is
problematic. (See http://bugs.python.org/issue10977.) The concrete API
has a number of hard-coded assumptions tied to the dict implementation.
This is, in part, due to performance reasons, which is understandable
given the part dict plays in Python.
Any attempt to replace dict with OrderedDict for any role in the
interpreter (e.g. **kwds) faces a challenge. Such any effort must
recognize that the instances in affected locations currently interact with
the concrete API.
Here are some ways to address this challenge:
1. Change the relevant usage of the concrete API in CPython and add
PyDict_CheckExact() calls to each of the concrete API functions.
2. Adjust the relevant concrete API functions to explicitly accommodate
OrderedDict.
3. As with #1, add the checks, but improve the abstract API with smart fast
paths for dict and OrderedDict, and refactor CPython to use the abstract
API. Improvements to the abstract API would be valuable regardless.
Adding the checks to the concrete API would help make any interpreter
switch to OrderedDict less painful for extension modules. However, this
won't work. The equivalent C API call to `dict.__setitem__(obj, k, v)`
is 'PyDict_SetItem(obj, k, v)`. This illustrates how subclasses in C call
the base class's methods, since there is no equivalent of super() in the
C API. Calling into Python for parent class API would work, but some
extension modules already rely on this feature of the concrete API.
For reference, here is a breakdown of some of the dict concrete API:
========================== ============= =======================
concrete API uses abstract API
========================== ============= =======================
PyDict_Check PyMapping_Check
(PyDict_CheckExact) -
(PyDict_New) -
(PyDictProxy_New) -
PyDict_Clear -
PyDict_Contains PySequence_Contains
PyDict_Copy -
PyDict_SetItem PyObject_SetItem
PyDict_SetItemString PyMapping_SetItemString
PyDict_DelItem PyMapping_DelItem
PyDict_DelItemString PyMapping_DelItemString
PyDict_GetItem -
PyDict_GetItemWithError PyObject_GetItem
_PyDict_GetItemIdWithError -
PyDict_GetItemString PyMapping_GetItemString
PyDict_Items PyMapping_Items
PyDict_Keys PyMapping_Keys
PyDict_Values PyMapping_Values
PyDict_Size PyMapping_Size
PyMapping_Length
PyDict_Next PyIter_Next
_PyDict_Next -
PyDict_Merge -
PyDict_Update -
PyDict_MergeFromSeq2 -
PyDict_ClearFreeList -
- PyMapping_HasKeyString
- PyMapping_HasKey
========================== ============= =======================
The dict Interface Relative to OrderedDict
==========================================
Since OrderedDict subclasses dict, understanding the various methods and
attributes of dict is important for implementing OrderedDict.
Relevant Type Slots
-------------------
================= ================ =================== ================
slot attribute object dict
================= ================ =================== ================
tp_dealloc - object_dealloc dict_dealloc
tp_repr __repr__ object_repr dict_repr
sq_contains __contains__ - dict_contains
mp_length __len__ - dict_length
mp_subscript __getitem__ - dict_subscript
mp_ass_subscript __setitem__ - dict_ass_sub
__delitem__
tp_hash __hash__ _Py_HashPointer ..._HashNotImpl
tp_str __str__ object_str -
tp_getattro __getattribute__ ..._GenericGetAttr (repeated)
__getattr__
tp_setattro __setattr__ ..._GenericSetAttr (disabled)
tp_doc __doc__ (literal) dictionary_doc
tp_traverse - - dict_traverse
tp_clear - - dict_tp_clear
tp_richcompare __eq__ object_richcompare dict_richcompare
__ne__
tp_weaklistoffset (__weakref__) - -
tp_iter __iter__ - dict_iter
tp_dictoffset (__dict__) - -
tp_init __init__ object_init dict_init
tp_alloc - PyType_GenericAlloc (repeated)
tp_new __new__ object_new dict_new
tp_free - PyObject_Del PyObject_GC_Del
================= ================ =================== ================
Relevant Methods
----------------
================ =================== ===============
method object dict
================ =================== ===============
__reduce__ object_reduce -
__sizeof__ object_sizeof dict_sizeof
clear - dict_clear
copy - dict_copy
fromkeys - dict_fromkeys
get - dict_get
items - dictitems_new
keys - dictkeys_new
pop - dict_pop
popitem - dict_popitem
setdefault - dict_setdefault
update - dict_update
values - dictvalues_new
================ =================== ===============
Pure Python OrderedDict
=======================
As already noted, compatibility with the pure Python OrderedDict
implementation is a key goal of this C implementation. To further that
goal, here's a summary of how OrderedDict-specific methods are implemented
in collections/__init__.py. Also provided is an indication of which
methods directly mutate or iterate the object, as well as any relationship
with the underlying linked-list.
============= ============== == ================ === === ====
method impl used ll uses inq mut iter
============= ============== == ================ === === ====
__contains__ dict - - X
__delitem__ OrderedDict Y dict.__delitem__ X
__eq__ OrderedDict N OrderedDict ~
dict.__eq__
__iter__
__getitem__ dict - - X
__iter__ OrderedDict Y - X
__init__ OrderedDict N update
__len__ dict - - X
__ne__ MutableMapping - __eq__ ~
__reduce__ OrderedDict N OrderedDict ~
__iter__
__getitem__
__repr__ OrderedDict N __class__ ~
items
__reversed__ OrderedDict Y - X
__setitem__ OrderedDict Y __contains__ X
dict.__setitem__
__sizeof__ OrderedDict Y __len__ ~
__dict__
clear OrderedDict Y dict.clear X
copy OrderedDict N __class__
__init__
fromkeys OrderedDict N __setitem__
get dict - - ~
items MutableMapping - ItemsView X
keys MutableMapping - KeysView X
move_to_end OrderedDict Y - X
pop OrderedDict N __contains__ X
__getitem__
__delitem__
popitem OrderedDict Y dict.pop X
setdefault OrderedDict N __contains__ ~
__getitem__
__setitem__
update MutableMapping - __setitem__ ~
values MutableMapping - ValuesView X
============= ============== == ================ === === ====
__reversed__ and move_to_end are both exclusive to OrderedDict.
C OrderedDict Implementation
============================
================= ================
slot impl
================= ================
tp_dealloc odict_dealloc
tp_repr odict_repr
mp_ass_subscript odict_ass_sub
tp_doc odict_doc
tp_traverse odict_traverse
tp_clear odict_tp_clear
tp_richcompare odict_richcompare
tp_weaklistoffset (offset)
tp_iter odict_iter
tp_dictoffset (offset)
tp_init odict_init
tp_alloc (repeated)
tp_new odict_new
================= ================
================= ================
method impl
================= ================
__reduce__ odict_reduce
__sizeof__ odict_sizeof
clear odict_clear
copy odict_copy
fromkeys odict_fromkeys
items odictitems_new
keys odictkeys_new
pop odict_pop
popitem odict_popitem
setdefault odict_setdefault
update odict_update
values odictvalues_new
================= ================
Inherited unchanged from object/dict:
================ ==========================
method type field
================ ==========================
- tp_free
__contains__ tp_as_sequence.sq_contains
__getattr__ tp_getattro
__getattribute__ tp_getattro
__getitem__ tp_as_mapping.mp_subscript
__hash__ tp_hash
__len__ tp_as_mapping.mp_length
__setattr__ tp_setattro
__str__ tp_str
get -
================ ==========================
Other Challenges
================
Preserving Ordering During Iteration
------------------------------------
During iteration through an OrderedDict, it is possible that items could
get added, removed, or reordered. For a linked-list implementation, as
with some other implementations, that situation may lead to undefined
behavior. The documentation for dict mentions this in the `iter()` section
of http://docs.python.org/3.4/library/stdtypes.html#dictionary-view-objects.
In this implementation we follow dict's lead (as does the pure Python
implementation) for __iter__(), keys(), values(), and items().
For internal iteration (using _odict_FOREACH or not), there is still the
risk that not all nodes that we expect to be seen in the loop actually get
seen. Thus, we are careful in each of those places to ensure that they
are. This comes, of course, at a small price at each location. The
solutions are much the same as those detailed in the `Situation that
Endangers Consistency` section above.
Potential Optimizations
=======================
* Allocate the nodes as a block via od_fast_nodes instead of individually.
- Set node->key to NULL to indicate the node is not-in-use.
- Add _odict_EXISTS()?
- How to maintain consistency across resizes? Existing node pointers
would be invalidate after a resize, which is particularly problematic
for the iterators.
* Use a more stream-lined implementation of update() and, likely indirectly,
__init__().
*/
/* TODO
sooner:
- reentrancy (make sure everything is at a thread-safe state when calling
into Python). I've already checked this multiple times, but want to
make one more pass.
- add unit tests for reentrancy?
later:
- make the dict views support the full set API (the pure Python impl does)
- implement a fuller MutableMapping API in C?
- move the MutableMapping implementation to abstract.c?
- optimize mutablemapping_update
- use PyObject_MALLOC (small object allocator) for odict nodes?
- support subclasses better (e.g. in odict_richcompare)
*/
#include "Python.h"
#include "structmember.h"
#include "dict-common.h"
#include <stddef.h>
typedef struct _odictnode _ODictNode;
/* PyODictObject */
struct _odictobject {
PyDictObject od_dict; /* the underlying dict */
_ODictNode *od_first; /* first node in the linked list, if any */
_ODictNode *od_last; /* last node in the linked list, if any */
/* od_fast_nodes, od_fast_nodes_size and od_resize_sentinel are managed
* by _odict_resize().
* Note that we rely on implementation details of dict for both. */
_ODictNode **od_fast_nodes; /* hash table that mirrors the dict table */
Py_ssize_t od_fast_nodes_size;
void *od_resize_sentinel; /* changes if odict should be resized */
size_t od_state; /* incremented whenever the LL changes */
PyObject *od_inst_dict; /* OrderedDict().__dict__ */
PyObject *od_weakreflist; /* holds weakrefs to the odict */
};
/* ----------------------------------------------
* odict keys (a simple doubly-linked list)
*/
struct _odictnode {
PyObject *key;
Py_hash_t hash;
_ODictNode *next;
_ODictNode *prev;
};
#define _odictnode_KEY(node) \
(node->key)
#define _odictnode_HASH(node) \
(node->hash)
/* borrowed reference */
#define _odictnode_VALUE(node, od) \
PyODict_GetItemWithError((PyObject *)od, _odictnode_KEY(node))
#define _odictnode_PREV(node) (node->prev)
#define _odictnode_NEXT(node) (node->next)
#define _odict_FIRST(od) (((PyODictObject *)od)->od_first)
#define _odict_LAST(od) (((PyODictObject *)od)->od_last)
#define _odict_EMPTY(od) (_odict_FIRST(od) == NULL)
#define _odict_FOREACH(od, node) \
for (node = _odict_FIRST(od); node != NULL; node = _odictnode_NEXT(node))
#define _odict_FAST_SIZE(od) ((PyDictObject *)od)->ma_keys->dk_size
static void
_odict_free_fast_nodes(PyODictObject *od) {
if (od->od_fast_nodes) {
PyMem_FREE(od->od_fast_nodes);
}
}
/* Return the index into the hash table, regardless of a valid node. */
static Py_ssize_t
_odict_get_index_raw(PyODictObject *od, PyObject *key, Py_hash_t hash)
{
PyObject **value_addr = NULL;
PyDictKeysObject *keys = ((PyDictObject *)od)->ma_keys;
Py_ssize_t ix;
ix = (keys->dk_lookup)((PyDictObject *)od, key, hash, &value_addr, NULL);
if (ix == DKIX_EMPTY) {
return keys->dk_nentries; /* index of new entry */
}
if (ix < 0)
return -1;
/* We use pointer arithmetic to get the entry's index into the table. */
return ix;
}
/* Replace od->od_fast_nodes with a new table matching the size of dict's. */
static int
_odict_resize(PyODictObject *od) {
Py_ssize_t size, i;
_ODictNode **fast_nodes, *node;
/* Initialize a new "fast nodes" table. */
size = ((PyDictObject *)od)->ma_keys->dk_size;
fast_nodes = PyMem_NEW(_ODictNode *, size);
if (fast_nodes == NULL) {
PyErr_NoMemory();
return -1;
}
for (i = 0; i < size; i++)
fast_nodes[i] = NULL;
/* Copy the current nodes into the table. */
_odict_FOREACH(od, node) {
i = _odict_get_index_raw(od, _odictnode_KEY(node),
_odictnode_HASH(node));
if (i < 0) {
PyMem_FREE(fast_nodes);
return -1;
}
fast_nodes[i] = node;
}
/* Replace the old fast nodes table. */
_odict_free_fast_nodes(od);
od->od_fast_nodes = fast_nodes;
od->od_fast_nodes_size = size;
od->od_resize_sentinel = ((PyDictObject *)od)->ma_keys;
return 0;
}
/* Return the index into the hash table, regardless of a valid node. */
static Py_ssize_t
_odict_get_index(PyODictObject *od, PyObject *key, Py_hash_t hash)
{
PyDictKeysObject *keys;
assert(key != NULL);
keys = ((PyDictObject *)od)->ma_keys;
/* Ensure od_fast_nodes and dk_entries are in sync. */
if (od->od_resize_sentinel != keys ||
od->od_fast_nodes_size != keys->dk_size) {
int resize_res = _odict_resize(od);
if (resize_res < 0)
return -1;
}
return _odict_get_index_raw(od, key, hash);
}
/* Returns NULL if there was some error or the key was not found. */
static _ODictNode *
_odict_find_node_hash(PyODictObject *od, PyObject *key, Py_hash_t hash)
{
Py_ssize_t index;
if (_odict_EMPTY(od))
return NULL;
index = _odict_get_index(od, key, hash);
if (index < 0)
return NULL;
return od->od_fast_nodes[index];
}
static _ODictNode *
_odict_find_node(PyODictObject *od, PyObject *key)
{
Py_ssize_t index;
Py_hash_t hash;
if (_odict_EMPTY(od))
return NULL;
hash = PyObject_Hash(key);
if (hash == -1)
return NULL;
index = _odict_get_index(od, key, hash);
if (index < 0)
return NULL;
return od->od_fast_nodes[index];
}
static void
_odict_add_head(PyODictObject *od, _ODictNode *node)
{
_odictnode_PREV(node) = NULL;
_odictnode_NEXT(node) = _odict_FIRST(od);
if (_odict_FIRST(od) == NULL)
_odict_LAST(od) = node;
else
_odictnode_PREV(_odict_FIRST(od)) = node;
_odict_FIRST(od) = node;
od->od_state++;
}
static void
_odict_add_tail(PyODictObject *od, _ODictNode *node)
{
_odictnode_PREV(node) = _odict_LAST(od);
_odictnode_NEXT(node) = NULL;
if (_odict_LAST(od) == NULL)
_odict_FIRST(od) = node;
else
_odictnode_NEXT(_odict_LAST(od)) = node;
_odict_LAST(od) = node;
od->od_state++;
}
/* adds the node to the end of the list */
static int
_odict_add_new_node(PyODictObject *od, PyObject *key, Py_hash_t hash)
{
Py_ssize_t i;
_ODictNode *node;
Py_INCREF(key);
i = _odict_get_index(od, key, hash);
if (i < 0) {
if (!PyErr_Occurred())
PyErr_SetObject(PyExc_KeyError, key);
Py_DECREF(key);
return -1;
}
else if (od->od_fast_nodes[i] != NULL) {
/* We already have a node for the key so there's no need to add one. */
Py_DECREF(key);
return 0;
}
/* must not be added yet */
node = (_ODictNode *)PyMem_MALLOC(sizeof(_ODictNode));
if (node == NULL) {
Py_DECREF(key);
PyErr_NoMemory();
return -1;
}
_odictnode_KEY(node) = key;
_odictnode_HASH(node) = hash;
_odict_add_tail(od, node);
od->od_fast_nodes[i] = node;
return 0;
}
/* Putting the decref after the free causes problems. */
#define _odictnode_DEALLOC(node) \
do { \
Py_DECREF(_odictnode_KEY(node)); \
PyMem_FREE((void *)node); \
} while (0)
/* Repeated calls on the same node are no-ops. */
static void
_odict_remove_node(PyODictObject *od, _ODictNode *node)
{
if (_odict_FIRST(od) == node)
_odict_FIRST(od) = _odictnode_NEXT(node);
else if (_odictnode_PREV(node) != NULL)
_odictnode_NEXT(_odictnode_PREV(node)) = _odictnode_NEXT(node);
if (_odict_LAST(od) == node)
_odict_LAST(od) = _odictnode_PREV(node);
else if (_odictnode_NEXT(node) != NULL)
_odictnode_PREV(_odictnode_NEXT(node)) = _odictnode_PREV(node);
_odictnode_PREV(node) = NULL;
_odictnode_NEXT(node) = NULL;
od->od_state++;
}
/* If someone calls PyDict_DelItem() directly on an OrderedDict, we'll
get all sorts of problems here. In PyODict_DelItem we make sure to
call _odict_clear_node first.
This matters in the case of colliding keys. Suppose we add 3 keys:
[A, B, C], where the hash of C collides with A and the next possible
index in the hash table is occupied by B. If we remove B then for C
the dict's looknode func will give us the old index of B instead of
the index we got before deleting B. However, the node for C in
od_fast_nodes is still at the old dict index of C. Thus to be sure
things don't get out of sync, we clear the node in od_fast_nodes
*before* calling PyDict_DelItem.
The same must be done for any other OrderedDict operations where
we modify od_fast_nodes.
*/
static int
_odict_clear_node(PyODictObject *od, _ODictNode *node, PyObject *key,
Py_hash_t hash)
{
Py_ssize_t i;
assert(key != NULL);
if (_odict_EMPTY(od)) {
/* Let later code decide if this is a KeyError. */
return 0;
}
i = _odict_get_index(od, key, hash);
if (i < 0)
return PyErr_Occurred() ? -1 : 0;
if (node == NULL)
node = od->od_fast_nodes[i];
assert(node == od->od_fast_nodes[i]);
if (node == NULL) {
/* Let later code decide if this is a KeyError. */
return 0;
}
// Now clear the node.
od->od_fast_nodes[i] = NULL;
_odict_remove_node(od, node);
_odictnode_DEALLOC(node);
return 0;
}
static void
_odict_clear_nodes(PyODictObject *od)
{
_ODictNode *node, *next;
_odict_free_fast_nodes(od);
od->od_fast_nodes = NULL;
node = _odict_FIRST(od);
_odict_FIRST(od) = NULL;
_odict_LAST(od) = NULL;
while (node != NULL) {
next = _odictnode_NEXT(node);
_odictnode_DEALLOC(node);
node = next;
}
}
/* There isn't any memory management of nodes past this point. */
#undef _odictnode_DEALLOC
static int
_odict_keys_equal(PyODictObject *a, PyODictObject *b)
{
_ODictNode *node_a, *node_b;
node_a = _odict_FIRST(a);
node_b = _odict_FIRST(b);
while (1) {
if (node_a == NULL && node_b == NULL)
/* success: hit the end of each at the same time */
return 1;
else if (node_a == NULL || node_b == NULL)
/* unequal length */
return 0;
else {
int res = PyObject_RichCompareBool(
(PyObject *)_odictnode_KEY(node_a),
(PyObject *)_odictnode_KEY(node_b),
Py_EQ);
if (res < 0)
return res;
else if (res == 0)
return 0;
/* otherwise it must match, so move on to the next one */
node_a = _odictnode_NEXT(node_a);
node_b = _odictnode_NEXT(node_b);
}
}
}
/* ----------------------------------------------
* OrderedDict mapping methods
*/
/* mp_ass_subscript: __setitem__() and __delitem__() */
static int
odict_mp_ass_sub(PyODictObject *od, PyObject *v, PyObject *w)
{
if (w == NULL)
return PyODict_DelItem((PyObject *)od, v);
else
return PyODict_SetItem((PyObject *)od, v, w);
}
/* tp_as_mapping */
static PyMappingMethods odict_as_mapping = {
0, /*mp_length*/
0, /*mp_subscript*/
(objobjargproc)odict_mp_ass_sub, /*mp_ass_subscript*/
};
/* ----------------------------------------------
* OrderedDict methods
*/
/* __delitem__() */
PyDoc_STRVAR(odict_delitem__doc__, "od.__delitem__(y) <==> del od[y]");
/* __eq__() */
PyDoc_STRVAR(odict_eq__doc__,
"od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive \n\
while comparison to a regular mapping is order-insensitive.\n\
");
/* forward */
static PyObject * odict_richcompare(PyObject *v, PyObject *w, int op);
static PyObject *
odict_eq(PyObject *a, PyObject *b)
{
return odict_richcompare(a, b, Py_EQ);
}
/* __init__() */
PyDoc_STRVAR(odict_init__doc__,
"Initialize an ordered dictionary. The signature is the same as\n\
regular dictionaries. Keyword argument order is preserved.\n\
\n\
");
/* forward */
static int odict_init(PyObject *self, PyObject *args, PyObject *kwds);
/* __iter__() */
PyDoc_STRVAR(odict_iter__doc__, "od.__iter__() <==> iter(od)");
static PyObject * odict_iter(PyODictObject *self); /* forward */
/* __ne__() */
/* Mapping.__ne__() does not have a docstring. */
PyDoc_STRVAR(odict_ne__doc__, "");
static PyObject *
odict_ne(PyObject *a, PyObject *b)
{
return odict_richcompare(a, b, Py_NE);
}
/* __repr__() */
PyDoc_STRVAR(odict_repr__doc__, "od.__repr__() <==> repr(od)");
static PyObject * odict_repr(PyODictObject *self); /* forward */
/* __setitem__() */
PyDoc_STRVAR(odict_setitem__doc__, "od.__setitem__(i, y) <==> od[i]=y");
/* fromkeys() */
PyDoc_STRVAR(odict_fromkeys__doc__,
"OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.\n\
If not specified, the value defaults to None.\n\
\n\
");
static PyObject *
odict_fromkeys(PyObject *cls, PyObject *args, PyObject *kwargs)
{
static char *kwlist[] = {"iterable", "value", 0};
PyObject *seq;
PyObject *value = Py_None;
/* both borrowed */
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|O:fromkeys", kwlist,
&seq, &value)) {
return NULL;
}
return _PyDict_FromKeys(cls, seq, value);
}
/* __sizeof__() */
/* OrderedDict.__sizeof__() does not have a docstring. */
PyDoc_STRVAR(odict_sizeof__doc__, "");
static PyObject *
odict_sizeof(PyODictObject *od)
{
Py_ssize_t res = _PyDict_SizeOf((PyDictObject *)od);
res += sizeof(_ODictNode *) * _odict_FAST_SIZE(od); /* od_fast_nodes */
if (!_odict_EMPTY(od)) {
res += sizeof(_ODictNode) * PyODict_SIZE(od); /* linked-list */
}
return PyLong_FromSsize_t(res);
}
/* __reduce__() */
PyDoc_STRVAR(odict_reduce__doc__, "Return state information for pickling");
static PyObject *
odict_reduce(register PyODictObject *od)
{
_Py_IDENTIFIER(__dict__);
_Py_IDENTIFIER(items);
PyObject *dict = NULL, *result = NULL;
PyObject *items_iter, *items, *args = NULL;
/* capture any instance state */
dict = _PyObject_GetAttrId((PyObject *)od, &PyId___dict__);
if (dict == NULL)
goto Done;
else {
/* od.__dict__ isn't necessarily a dict... */
Py_ssize_t dict_len = PyObject_Length(dict);
if (dict_len == -1)
goto Done;
if (!dict_len) {
/* nothing to pickle in od.__dict__ */
Py_CLEAR(dict);
}
}
/* build the result */
args = PyTuple_New(0);
if (args == NULL)
goto Done;
items = _PyObject_CallMethodIdObjArgs((PyObject *)od, &PyId_items, NULL);
if (items == NULL)
goto Done;
items_iter = PyObject_GetIter(items);
Py_DECREF(items);
if (items_iter == NULL)
goto Done;
result = PyTuple_Pack(5, Py_TYPE(od), args, dict ? dict : Py_None, Py_None, items_iter);
Py_DECREF(items_iter);
Done:
Py_XDECREF(dict);
Py_XDECREF(args);
return result;
}
/* setdefault() */
PyDoc_STRVAR(odict_setdefault__doc__,
"od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od");
/* Skips __missing__() calls. */
static PyObject *
odict_setdefault(register PyODictObject *od, PyObject *args, PyObject *kwargs)
{
static char *kwlist[] = {"key", "default", 0};
PyObject *key, *result = NULL;
PyObject *failobj = Py_None;
/* both borrowed */
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|O:setdefault", kwlist,
&key, &failobj)) {
return NULL;
}
if (PyODict_CheckExact(od)) {
result = PyODict_GetItemWithError(od, key); /* borrowed */
if (result == NULL) {
if (PyErr_Occurred())
return NULL;
assert(_odict_find_node(od, key) == NULL);
if (PyODict_SetItem((PyObject *)od, key, failobj) >= 0) {
result = failobj;
Py_INCREF(failobj);
}
}
else {
Py_INCREF(result);
}
}
else {
int exists = PySequence_Contains((PyObject *)od, key);
if (exists < 0) {
return NULL;
}
else if (exists) {
result = PyObject_GetItem((PyObject *)od, key);
}
else if (PyObject_SetItem((PyObject *)od, key, failobj) >= 0) {
result = failobj;
Py_INCREF(failobj);
}
}
return result;
}
/* pop() */
PyDoc_STRVAR(odict_pop__doc__,
"od.pop(k[,d]) -> v, remove specified key and return the corresponding\n\
value. If key is not found, d is returned if given, otherwise KeyError\n\
is raised.\n\
\n\
");
/* forward */
static PyObject * _odict_popkey(PyObject *, PyObject *, PyObject *);
/* Skips __missing__() calls. */
static PyObject *
odict_pop(PyObject *od, PyObject *args, PyObject *kwargs)
{
static char *kwlist[] = {"key", "default", 0};
PyObject *key, *failobj = NULL;
/* borrowed */
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|O:pop", kwlist,
&key, &failobj)) {
return NULL;
}
return _odict_popkey(od, key, failobj);
}
static PyObject *
_odict_popkey_hash(PyObject *od, PyObject *key, PyObject *failobj,
Py_hash_t hash)
{
_ODictNode *node;
PyObject *value = NULL;
/* Pop the node first to avoid a possible dict resize (due to
eval loop reentrancy) and complications due to hash collision
resolution. */
node = _odict_find_node_hash((PyODictObject *)od, key, hash);
if (node == NULL) {
if (PyErr_Occurred())
return NULL;
}
else {
int res = _odict_clear_node((PyODictObject *)od, node, key, hash);
if (res < 0) {
return NULL;
}
}
/* Now delete the value from the dict. */
if (PyODict_CheckExact(od)) {
if (node != NULL) {
value = _PyDict_GetItem_KnownHash(od, key, hash); /* borrowed */
if (value != NULL) {
Py_INCREF(value);
if (_PyDict_DelItem_KnownHash(od, key, hash) < 0) {
Py_DECREF(value);
return NULL;
}
}
}
}
else {
int exists = PySequence_Contains(od, key);
if (exists < 0)
return NULL;
if (exists) {
value = PyObject_GetItem(od, key);
if (value != NULL) {
if (PyObject_DelItem(od, key) == -1) {
Py_CLEAR(value);
}
}
}
}
/* Apply the fallback value, if necessary. */
if (value == NULL && !PyErr_Occurred()) {
if (failobj) {
value = failobj;
Py_INCREF(failobj);
}
else {
PyErr_SetObject(PyExc_KeyError, key);
}
}
return value;
}
static PyObject *
_odict_popkey(PyObject *od, PyObject *key, PyObject *failobj)
{
Py_hash_t hash = PyObject_Hash(key);
if (hash == -1)
return NULL;
return _odict_popkey_hash(od, key, failobj, hash);
}
/* popitem() */
PyDoc_STRVAR(odict_popitem__doc__,
"popitem($self, /, last=True)\n"
"--\n"
"\n"
"Remove and return a (key, value) pair from the dictionary.\n"
"\n"
"Pairs are returned in LIFO order if last is true or FIFO order if false.");
static PyObject *
odict_popitem(PyObject *od, PyObject *args, PyObject *kwargs)
{
static char *kwlist[] = {"last", 0};
PyObject *key, *value, *item = NULL;
_ODictNode *node;
int last = 1;
/* pull the item */
/* borrowed */
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|p:popitem", kwlist,
&last)) {
return NULL;
}
if (_odict_EMPTY(od)) {
PyErr_SetString(PyExc_KeyError, "dictionary is empty");
return NULL;
}
node = last ? _odict_LAST(od) : _odict_FIRST(od);
key = _odictnode_KEY(node);
Py_INCREF(key);
value = _odict_popkey_hash(od, key, NULL, _odictnode_HASH(node));
if (value == NULL)
return NULL;
item = PyTuple_Pack(2, key, value);
Py_DECREF(key);
Py_DECREF(value);
return item;
}
/* keys() */
/* MutableMapping.keys() does not have a docstring. */
PyDoc_STRVAR(odict_keys__doc__, "");
static PyObject * odictkeys_new(PyObject *od); /* forward */
/* values() */
/* MutableMapping.values() does not have a docstring. */
PyDoc_STRVAR(odict_values__doc__, "");
static PyObject * odictvalues_new(PyObject *od); /* forward */
/* items() */
/* MutableMapping.items() does not have a docstring. */
PyDoc_STRVAR(odict_items__doc__, "");
static PyObject * odictitems_new(PyObject *od); /* forward */
/* update() */
/* MutableMapping.update() does not have a docstring. */
PyDoc_STRVAR(odict_update__doc__, "");
/* forward */
static PyObject * mutablemapping_update(PyObject *, PyObject *, PyObject *);
#define odict_update mutablemapping_update
/* clear() */
PyDoc_STRVAR(odict_clear__doc__,
"od.clear() -> None. Remove all items from od.");
static PyObject *
odict_clear(register PyODictObject *od)
{
PyDict_Clear((PyObject *)od);
_odict_clear_nodes(od);
if (_odict_resize(od) < 0)
return NULL;
Py_RETURN_NONE;
}
/* copy() */
/* forward */
static int _PyODict_SetItem_KnownHash(PyObject *, PyObject *, PyObject *,
Py_hash_t);
PyDoc_STRVAR(odict_copy__doc__, "od.copy() -> a shallow copy of od");
static PyObject *
odict_copy(register PyODictObject *od)
{
_ODictNode *node;
PyObject *od_copy;
if (PyODict_CheckExact(od))
od_copy = PyODict_New();
else
od_copy = PyObject_CallFunctionObjArgs((PyObject *)Py_TYPE(od), NULL);
if (od_copy == NULL)
return NULL;
if (PyODict_CheckExact(od)) {
_odict_FOREACH(od, node) {
PyObject *key = _odictnode_KEY(node);
PyObject *value = _odictnode_VALUE(node, od);
if (value == NULL) {
if (!PyErr_Occurred())
PyErr_SetObject(PyExc_KeyError, key);
goto fail;
}
if (_PyODict_SetItem_KnownHash((PyObject *)od_copy, key, value,
_odictnode_HASH(node)) != 0)
goto fail;
}
}
else {
_odict_FOREACH(od, node) {
int res;
PyObject *value = PyObject_GetItem((PyObject *)od,
_odictnode_KEY(node));
if (value == NULL)
goto fail;
res = PyObject_SetItem((PyObject *)od_copy,
_odictnode_KEY(node), value);
Py_DECREF(value);
if (res != 0)
goto fail;
}
}
return od_copy;
fail:
Py_DECREF(od_copy);
return NULL;
}
/* __reversed__() */
PyDoc_STRVAR(odict_reversed__doc__, "od.__reversed__() <==> reversed(od)");
#define _odict_ITER_REVERSED 1
#define _odict_ITER_KEYS 2
#define _odict_ITER_VALUES 4
/* forward */
static PyObject * odictiter_new(PyODictObject *, int);
static PyObject *
odict_reversed(PyODictObject *od)
{
return odictiter_new(od, _odict_ITER_KEYS|_odict_ITER_REVERSED);
}
/* move_to_end() */
PyDoc_STRVAR(odict_move_to_end__doc__,
"Move an existing element to the end (or beginning if last==False).\n\
\n\
Raises KeyError if the element does not exist.\n\
When last=True, acts like a fast version of self[key]=self.pop(key).\n\
\n\
");
static PyObject *
odict_move_to_end(PyODictObject *od, PyObject *args, PyObject *kwargs)
{
static char *kwlist[] = {"key", "last", 0};
PyObject *key;
int last = 1;
_ODictNode *node;
if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|p:move_to_end", kwlist,
&key, &last)) {
return NULL;
}
if (_odict_EMPTY(od)) {
PyErr_SetObject(PyExc_KeyError, key);
return NULL;
}
node = last ? _odict_LAST(od) : _odict_FIRST(od);
if (key != _odictnode_KEY(node)) {
node = _odict_find_node(od, key);
if (node == NULL) {
if (!PyErr_Occurred())
PyErr_SetObject(PyExc_KeyError, key);
return NULL;
}
if (last) {
/* Only move if not already the last one. */
if (node != _odict_LAST(od)) {
_odict_remove_node(od, node);
_odict_add_tail(od, node);
}
}
else {
/* Only move if not already the first one. */
if (node != _odict_FIRST(od)) {
_odict_remove_node(od, node);
_odict_add_head(od, node);
}
}
}
Py_RETURN_NONE;
}
/* tp_methods */
static PyMethodDef odict_methods[] = {
/* explicitly defined so we can align docstrings with
* collections.OrderedDict */
{"__delitem__", (PyCFunction)odict_mp_ass_sub, METH_NOARGS,
odict_delitem__doc__},
{"__eq__", (PyCFunction)odict_eq, METH_NOARGS,
odict_eq__doc__},
{"__init__", (PyCFunction)odict_init, METH_NOARGS,
odict_init__doc__},
{"__iter__", (PyCFunction)odict_iter, METH_NOARGS,
odict_iter__doc__},
{"__ne__", (PyCFunction)odict_ne, METH_NOARGS,
odict_ne__doc__},
{"__repr__", (PyCFunction)odict_repr, METH_NOARGS,
odict_repr__doc__},
{"__setitem__", (PyCFunction)odict_mp_ass_sub, METH_NOARGS,
odict_setitem__doc__},
{"fromkeys", (PyCFunction)odict_fromkeys,
METH_VARARGS | METH_KEYWORDS | METH_CLASS, odict_fromkeys__doc__},
/* overridden dict methods */
{"__sizeof__", (PyCFunction)odict_sizeof, METH_NOARGS,
odict_sizeof__doc__},
{"__reduce__", (PyCFunction)odict_reduce, METH_NOARGS,
odict_reduce__doc__},
{"setdefault", (PyCFunction)odict_setdefault,
METH_VARARGS | METH_KEYWORDS, odict_setdefault__doc__},
{"pop", (PyCFunction)odict_pop,
METH_VARARGS | METH_KEYWORDS, odict_pop__doc__},
{"popitem", (PyCFunction)odict_popitem,
METH_VARARGS | METH_KEYWORDS, odict_popitem__doc__},
{"keys", (PyCFunction)odictkeys_new, METH_NOARGS,
odict_keys__doc__},
{"values", (PyCFunction)odictvalues_new, METH_NOARGS,
odict_values__doc__},
{"items", (PyCFunction)odictitems_new, METH_NOARGS,
odict_items__doc__},
{"update", (PyCFunction)odict_update, METH_VARARGS | METH_KEYWORDS,
odict_update__doc__},
{"clear", (PyCFunction)odict_clear, METH_NOARGS,
odict_clear__doc__},
{"copy", (PyCFunction)odict_copy, METH_NOARGS,
odict_copy__doc__},
/* new methods */
{"__reversed__", (PyCFunction)odict_reversed, METH_NOARGS,
odict_reversed__doc__},
{"move_to_end", (PyCFunction)odict_move_to_end,
METH_VARARGS | METH_KEYWORDS, odict_move_to_end__doc__},
{NULL, NULL} /* sentinel */
};
/* ----------------------------------------------
* OrderedDict members
*/
/* tp_getset */
static PyGetSetDef odict_getset[] = {
{"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict},
{NULL}
};
/* ----------------------------------------------
* OrderedDict type slot methods
*/
/* tp_dealloc */
static void
odict_dealloc(PyODictObject *self)
{
PyThreadState *tstate = PyThreadState_GET();
PyObject_GC_UnTrack(self);
Py_TRASHCAN_SAFE_BEGIN(self)
Py_XDECREF(self->od_inst_dict);
if (self->od_weakreflist != NULL)
PyObject_ClearWeakRefs((PyObject *)self);
_odict_clear_nodes(self);
/* Call the base tp_dealloc(). Since it too uses the trashcan mechanism,
* temporarily decrement trash_delete_nesting to prevent triggering it
* and putting the partially deallocated object on the trashcan's
* to-be-deleted-later list.
*/
--tstate->trash_delete_nesting;
assert(_tstate->trash_delete_nesting < PyTrash_UNWIND_LEVEL);
PyDict_Type.tp_dealloc((PyObject *)self);
++tstate->trash_delete_nesting;
Py_TRASHCAN_SAFE_END(self)
}
/* tp_repr */
static PyObject *
odict_repr(PyODictObject *self)
{
int i;
_Py_IDENTIFIER(items);
PyObject *pieces = NULL, *result = NULL;
const char *classname;
classname = strrchr(Py_TYPE(self)->tp_name, '.');
if (classname == NULL)
classname = Py_TYPE(self)->tp_name;
else
classname++;
if (PyODict_SIZE(self) == 0)
return PyUnicode_FromFormat("%s()", classname);
i = Py_ReprEnter((PyObject *)self);
if (i != 0) {
return i > 0 ? PyUnicode_FromString("...") : NULL;
}
if (PyODict_CheckExact(self)) {
Py_ssize_t count = 0;
_ODictNode *node;
pieces = PyList_New(PyODict_SIZE(self));
if (pieces == NULL)
goto Done;
_odict_FOREACH(self, node) {
PyObject *pair;
PyObject *key = _odictnode_KEY(node);
PyObject *value = _odictnode_VALUE(node, self);
if (value == NULL) {
if (!PyErr_Occurred())
PyErr_SetObject(PyExc_KeyError, key);
goto Done;
}
pair = PyTuple_Pack(2, key, value);
if (pair == NULL)
goto Done;
if (count < PyList_GET_SIZE(pieces))
PyList_SET_ITEM(pieces, count, pair); /* steals reference */
else {
if (PyList_Append(pieces, pair) < 0) {
Py_DECREF(pair);
goto Done;
}
Py_DECREF(pair);
}
count++;
}
if (count < PyList_GET_SIZE(pieces))
PyList_GET_SIZE(pieces) = count;
}
else {
PyObject *items = _PyObject_CallMethodIdObjArgs((PyObject *)self,
&PyId_items, NULL);
if (items == NULL)
goto Done;
pieces = PySequence_List(items);
Py_DECREF(items);
if (pieces == NULL)
goto Done;
}
result = PyUnicode_FromFormat("%s(%R)", classname, pieces);
Done:
Py_XDECREF(pieces);
Py_ReprLeave((PyObject *)self);
return result;
}
/* tp_doc */
PyDoc_STRVAR(odict_doc,
"Dictionary that remembers insertion order");
/* tp_traverse */
static int
odict_traverse(PyODictObject *od, visitproc visit, void *arg)
{
_ODictNode *node;
Py_VISIT(od->od_inst_dict);
Py_VISIT(od->od_weakreflist);
_odict_FOREACH(od, node) {
Py_VISIT(_odictnode_KEY(node));
}
return PyDict_Type.tp_traverse((PyObject *)od, visit, arg);
}
/* tp_clear */
static int
odict_tp_clear(PyODictObject *od)
{
PyObject *res;
Py_CLEAR(od->od_inst_dict);
Py_CLEAR(od->od_weakreflist);
res = odict_clear(od);
if (res == NULL)
return -1;
Py_DECREF(res);
return 0;
}
/* tp_richcompare */
static PyObject *
odict_richcompare(PyObject *v, PyObject *w, int op)
{
if (!PyODict_Check(v) || !PyDict_Check(w)) {
Py_RETURN_NOTIMPLEMENTED;
}
if (op == Py_EQ || op == Py_NE) {
PyObject *res, *cmp;
int eq;
cmp = PyDict_Type.tp_richcompare(v, w, op);
if (cmp == NULL)
return NULL;
if (!PyODict_Check(w))
return cmp;
if (op == Py_EQ && cmp == Py_False)
return cmp;
if (op == Py_NE && cmp == Py_True)
return cmp;
Py_DECREF(cmp);
/* Try comparing odict keys. */
eq = _odict_keys_equal((PyODictObject *)v, (PyODictObject *)w);
if (eq < 0)
return NULL;
res = (eq == (op == Py_EQ)) ? Py_True : Py_False;
Py_INCREF(res);
return res;
} else {
Py_RETURN_NOTIMPLEMENTED;
}
}
/* tp_iter */
static PyObject *
odict_iter(PyODictObject *od)
{
return odictiter_new(od, _odict_ITER_KEYS);
}
/* tp_init */
static int
odict_init(PyObject *self, PyObject *args, PyObject *kwds)
{
PyObject *res;
Py_ssize_t len = PyObject_Length(args);
if (len == -1)
return -1;
if (len > 1) {
char *msg = "expected at most 1 arguments, got %d";
PyErr_Format(PyExc_TypeError, msg, len);
return -1;
}
/* __init__() triggering update() is just the way things are! */
res = odict_update(self, args, kwds);
if (res == NULL) {
return -1;
} else {
Py_DECREF(res);
return 0;
}
}
/* tp_new */
static PyObject *
odict_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
PyODictObject *od;
od = (PyODictObject *)PyDict_Type.tp_new(type, args, kwds);
if (od == NULL)
return NULL;
/* type constructor fills the memory with zeros (see
PyType_GenericAlloc()), there is no need to set them to zero again */
if (_odict_resize(od) < 0) {
Py_DECREF(od);
return NULL;
}
return (PyObject*)od;
}
/* PyODict_Type */
PyTypeObject PyODict_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
"collections.OrderedDict", /* tp_name */
sizeof(PyODictObject), /* tp_basicsize */
0, /* tp_itemsize */
(destructor)odict_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_reserved */
(reprfunc)odict_repr, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
&odict_as_mapping, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,/* tp_flags */
odict_doc, /* tp_doc */
(traverseproc)odict_traverse, /* tp_traverse */
(inquiry)odict_tp_clear, /* tp_clear */
(richcmpfunc)odict_richcompare, /* tp_richcompare */
offsetof(PyODictObject, od_weakreflist), /* tp_weaklistoffset */
(getiterfunc)odict_iter, /* tp_iter */
0, /* tp_iternext */
odict_methods, /* tp_methods */
0, /* tp_members */
odict_getset, /* tp_getset */
&PyDict_Type, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
offsetof(PyODictObject, od_inst_dict), /* tp_dictoffset */
(initproc)odict_init, /* tp_init */
PyType_GenericAlloc, /* tp_alloc */
(newfunc)odict_new, /* tp_new */
0, /* tp_free */
};
/* ----------------------------------------------
* the public OrderedDict API
*/
PyObject *
PyODict_New(void) {
return odict_new(&PyODict_Type, NULL, NULL);
}
static int
_PyODict_SetItem_KnownHash(PyObject *od, PyObject *key, PyObject *value,
Py_hash_t hash)
{
int res = _PyDict_SetItem_KnownHash(od, key, value, hash);
if (res == 0) {
res = _odict_add_new_node((PyODictObject *)od, key, hash);
if (res < 0) {
/* Revert setting the value on the dict */
PyObject *exc, *val, *tb;
PyErr_Fetch(&exc, &val, &tb);
(void) _PyDict_DelItem_KnownHash(od, key, hash);
_PyErr_ChainExceptions(exc, val, tb);
}
}
return res;
}
int
PyODict_SetItem(PyObject *od, PyObject *key, PyObject *value)
{
Py_hash_t hash = PyObject_Hash(key);
if (hash == -1)
return -1;
return _PyODict_SetItem_KnownHash(od, key, value, hash);
}
int
PyODict_DelItem(PyObject *od, PyObject *key)
{
int res;
Py_hash_t hash = PyObject_Hash(key);
if (hash == -1)
return -1;
res = _odict_clear_node((PyODictObject *)od, NULL, key, hash);
if (res < 0)
return -1;
return _PyDict_DelItem_KnownHash(od, key, hash);
}
/* -------------------------------------------
* The OrderedDict views (keys/values/items)
*/
typedef struct {
PyObject_HEAD
int kind;
PyODictObject *di_odict;
Py_ssize_t di_size;
size_t di_state;
PyObject *di_current;
PyObject *di_result; /* reusable result tuple for iteritems */
} odictiterobject;
static void
odictiter_dealloc(odictiterobject *di)
{
_PyObject_GC_UNTRACK(di);
Py_XDECREF(di->di_odict);
Py_XDECREF(di->di_current);
if (di->kind & (_odict_ITER_KEYS | _odict_ITER_VALUES)) {
Py_DECREF(di->di_result);
}
PyObject_GC_Del(di);
}
static int
odictiter_traverse(odictiterobject *di, visitproc visit, void *arg)
{
Py_VISIT(di->di_odict);
Py_VISIT(di->di_current); /* A key could be any type, not just str. */
Py_VISIT(di->di_result);
return 0;
}
/* In order to protect against modifications during iteration, we track
* the current key instead of the current node. */
static PyObject *
odictiter_nextkey(odictiterobject *di)
{
PyObject *key = NULL;
_ODictNode *node;
int reversed = di->kind & _odict_ITER_REVERSED;
if (di->di_odict == NULL)
return NULL;
if (di->di_current == NULL)
goto done; /* We're already done. */
/* Check for unsupported changes. */
if (di->di_odict->od_state != di->di_state) {
PyErr_SetString(PyExc_RuntimeError,
"OrderedDict mutated during iteration");
goto done;
}
if (di->di_size != PyODict_SIZE(di->di_odict)) {
PyErr_SetString(PyExc_RuntimeError,
"OrderedDict changed size during iteration");
di->di_size = -1; /* Make this state sticky */
return NULL;
}
/* Get the key. */
node = _odict_find_node(di->di_odict, di->di_current);
if (node == NULL) {
if (!PyErr_Occurred())
PyErr_SetObject(PyExc_KeyError, di->di_current);
/* Must have been deleted. */
Py_CLEAR(di->di_current);
return NULL;
}
key = di->di_current;
/* Advance to the next key. */
node = reversed ? _odictnode_PREV(node) : _odictnode_NEXT(node);
if (node == NULL) {
/* Reached the end. */
di->di_current = NULL;
}
else {
di->di_current = _odictnode_KEY(node);
Py_INCREF(di->di_current);
}
return key;
done:
Py_CLEAR(di->di_odict);
return key;
}
static PyObject *
odictiter_iternext(odictiterobject *di)
{
PyObject *result, *value;
PyObject *key = odictiter_nextkey(di); /* new reference */
if (key == NULL)
return NULL;
/* Handle the keys case. */
if (! (di->kind & _odict_ITER_VALUES)) {
return key;
}
value = PyODict_GetItem((PyObject *)di->di_odict, key); /* borrowed */
if (value == NULL) {
if (!PyErr_Occurred())
PyErr_SetObject(PyExc_KeyError, key);
Py_DECREF(key);
goto done;
}
Py_INCREF(value);
/* Handle the values case. */
if (!(di->kind & _odict_ITER_KEYS)) {
Py_DECREF(key);
return value;
}
/* Handle the items case. */
result = di->di_result;
if (Py_REFCNT(result) == 1) {
/* not in use so we can reuse it
* (the common case during iteration) */
Py_INCREF(result);
Py_DECREF(PyTuple_GET_ITEM(result, 0)); /* borrowed */
Py_DECREF(PyTuple_GET_ITEM(result, 1)); /* borrowed */
}
else {
result = PyTuple_New(2);
if (result == NULL) {
Py_DECREF(key);
Py_DECREF(value);
goto done;
}
}
PyTuple_SET_ITEM(result, 0, key); /* steals reference */
PyTuple_SET_ITEM(result, 1, value); /* steals reference */
return result;
done:
Py_CLEAR(di->di_current);
Py_CLEAR(di->di_odict);
return NULL;
}
/* No need for tp_clear because odictiterobject is not mutable. */
PyDoc_STRVAR(reduce_doc, "Return state information for pickling");
static PyObject *
odictiter_reduce(odictiterobject *di)
{
PyObject *list, *iter;
list = PyList_New(0);
if (!list)
return NULL;
/* iterate the temporary into a list */
for(;;) {
PyObject *element = odictiter_iternext(di);
if (element) {
if (PyList_Append(list, element)) {
Py_DECREF(element);
Py_DECREF(list);
return NULL;
}
Py_DECREF(element);
}
else {
/* done iterating? */
break;
}
}
if (PyErr_Occurred()) {
Py_DECREF(list);
return NULL;
}
iter = _PyObject_GetBuiltin("iter");
if (iter == NULL) {
Py_DECREF(list);
return NULL;
}
return Py_BuildValue("N(N)", iter, list);
}
static PyMethodDef odictiter_methods[] = {
{"__reduce__", (PyCFunction)odictiter_reduce, METH_NOARGS, reduce_doc},
{NULL, NULL} /* sentinel */
};
PyTypeObject PyODictIter_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
"odict_iterator", /* tp_name */
sizeof(odictiterobject), /* tp_basicsize */
0, /* tp_itemsize */
/* methods */
(destructor)odictiter_dealloc, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_reserved */
0, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
PyObject_GenericGetAttr, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
0, /* tp_doc */
(traverseproc)odictiter_traverse, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
PyObject_SelfIter, /* tp_iter */
(iternextfunc)odictiter_iternext, /* tp_iternext */
odictiter_methods, /* tp_methods */
0,
};
static PyObject *
odictiter_new(PyODictObject *od, int kind)
{
odictiterobject *di;
_ODictNode *node;
int reversed = kind & _odict_ITER_REVERSED;
di = PyObject_GC_New(odictiterobject, &PyODictIter_Type);
if (di == NULL)
return NULL;
if (kind & (_odict_ITER_KEYS | _odict_ITER_VALUES)){
di->di_result = PyTuple_Pack(2, Py_None, Py_None);
if (di->di_result == NULL) {
Py_DECREF(di);
return NULL;
}
}
else
di->di_result = NULL;
di->kind = kind;
node = reversed ? _odict_LAST(od) : _odict_FIRST(od);
di->di_current = node ? _odictnode_KEY(node) : NULL;
Py_XINCREF(di->di_current);
di->di_size = PyODict_SIZE(od);
di->di_state = od->od_state;
di->di_odict = od;
Py_INCREF(od);
_PyObject_GC_TRACK(di);
return (PyObject *)di;
}
/* keys() */
static PyObject *
odictkeys_iter(_PyDictViewObject *dv)
{
if (dv->dv_dict == NULL) {
Py_RETURN_NONE;
}
return odictiter_new((PyODictObject *)dv->dv_dict,
_odict_ITER_KEYS);
}
static PyObject *
odictkeys_reversed(_PyDictViewObject *dv)
{
if (dv->dv_dict == NULL) {
Py_RETURN_NONE;
}
return odictiter_new((PyODictObject *)dv->dv_dict,
_odict_ITER_KEYS|_odict_ITER_REVERSED);
}
static PyMethodDef odictkeys_methods[] = {
{"__reversed__", (PyCFunction)odictkeys_reversed, METH_NOARGS, NULL},
{NULL, NULL} /* sentinel */
};
PyTypeObject PyODictKeys_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
"odict_keys", /* tp_name */
0, /* tp_basicsize */
0, /* tp_itemsize */
0, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_reserved */
0, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
0, /* tp_flags */
0, /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)odictkeys_iter, /* tp_iter */
0, /* tp_iternext */
odictkeys_methods, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
&PyDictKeys_Type, /* tp_base */
};
static PyObject *
odictkeys_new(PyObject *od)
{
return _PyDictView_New(od, &PyODictKeys_Type);
}
/* items() */
static PyObject *
odictitems_iter(_PyDictViewObject *dv)
{
if (dv->dv_dict == NULL) {
Py_RETURN_NONE;
}
return odictiter_new((PyODictObject *)dv->dv_dict,
_odict_ITER_KEYS|_odict_ITER_VALUES);
}
static PyObject *
odictitems_reversed(_PyDictViewObject *dv)
{
if (dv->dv_dict == NULL) {
Py_RETURN_NONE;
}
return odictiter_new((PyODictObject *)dv->dv_dict,
_odict_ITER_KEYS|_odict_ITER_VALUES|_odict_ITER_REVERSED);
}
static PyMethodDef odictitems_methods[] = {
{"__reversed__", (PyCFunction)odictitems_reversed, METH_NOARGS, NULL},
{NULL, NULL} /* sentinel */
};
PyTypeObject PyODictItems_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
"odict_items", /* tp_name */
0, /* tp_basicsize */
0, /* tp_itemsize */
0, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_reserved */
0, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
0, /* tp_flags */
0, /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)odictitems_iter, /* tp_iter */
0, /* tp_iternext */
odictitems_methods, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
&PyDictItems_Type, /* tp_base */
};
static PyObject *
odictitems_new(PyObject *od)
{
return _PyDictView_New(od, &PyODictItems_Type);
}
/* values() */
static PyObject *
odictvalues_iter(_PyDictViewObject *dv)
{
if (dv->dv_dict == NULL) {
Py_RETURN_NONE;
}
return odictiter_new((PyODictObject *)dv->dv_dict,
_odict_ITER_VALUES);
}
static PyObject *
odictvalues_reversed(_PyDictViewObject *dv)
{
if (dv->dv_dict == NULL) {
Py_RETURN_NONE;
}
return odictiter_new((PyODictObject *)dv->dv_dict,
_odict_ITER_VALUES|_odict_ITER_REVERSED);
}
static PyMethodDef odictvalues_methods[] = {
{"__reversed__", (PyCFunction)odictvalues_reversed, METH_NOARGS, NULL},
{NULL, NULL} /* sentinel */
};
PyTypeObject PyODictValues_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
"odict_values", /* tp_name */
0, /* tp_basicsize */
0, /* tp_itemsize */
0, /* tp_dealloc */
0, /* tp_print */
0, /* tp_getattr */
0, /* tp_setattr */
0, /* tp_reserved */
0, /* tp_repr */
0, /* tp_as_number */
0, /* tp_as_sequence */
0, /* tp_as_mapping */
0, /* tp_hash */
0, /* tp_call */
0, /* tp_str */
0, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
0, /* tp_flags */
0, /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
0, /* tp_weaklistoffset */
(getiterfunc)odictvalues_iter, /* tp_iter */
0, /* tp_iternext */
odictvalues_methods, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
&PyDictValues_Type, /* tp_base */
};
static PyObject *
odictvalues_new(PyObject *od)
{
return _PyDictView_New(od, &PyODictValues_Type);
}
/* ----------------------------------------------
MutableMapping implementations
Mapping:
============ ===========
method uses
============ ===========
__contains__ __getitem__
__eq__ items
__getitem__ +
__iter__ +
__len__ +
__ne__ __eq__
get __getitem__
items ItemsView
keys KeysView
values ValuesView
============ ===========
ItemsView uses __len__, __iter__, and __getitem__.
KeysView uses __len__, __iter__, and __contains__.
ValuesView uses __len__, __iter__, and __getitem__.
MutableMapping:
============ ===========
method uses
============ ===========
__delitem__ +
__setitem__ +
clear popitem
pop __getitem__
__delitem__
popitem __iter__
_getitem__
__delitem__
setdefault __getitem__
__setitem__
update __setitem__
============ ===========
*/
static int
mutablemapping_add_pairs(PyObject *self, PyObject *pairs)
{
PyObject *pair, *iterator, *unexpected;
int res = 0;
iterator = PyObject_GetIter(pairs);
if (iterator == NULL)
return -1;
PyErr_Clear();
while ((pair = PyIter_Next(iterator)) != NULL) {
/* could be more efficient (see UNPACK_SEQUENCE in ceval.c) */
PyObject *key = NULL, *value = NULL;
PyObject *pair_iterator = PyObject_GetIter(pair);
if (pair_iterator == NULL)
goto Done;
key = PyIter_Next(pair_iterator);
if (key == NULL) {
if (!PyErr_Occurred())
PyErr_SetString(PyExc_ValueError,
"need more than 0 values to unpack");
goto Done;
}
value = PyIter_Next(pair_iterator);
if (value == NULL) {
if (!PyErr_Occurred())
PyErr_SetString(PyExc_ValueError,
"need more than 1 value to unpack");
goto Done;
}
unexpected = PyIter_Next(pair_iterator);
if (unexpected != NULL) {
Py_DECREF(unexpected);
PyErr_SetString(PyExc_ValueError,
"too many values to unpack (expected 2)");
goto Done;
}
else if (PyErr_Occurred())
goto Done;
res = PyObject_SetItem(self, key, value);
Done:
Py_DECREF(pair);
Py_XDECREF(pair_iterator);
Py_XDECREF(key);
Py_XDECREF(value);
if (PyErr_Occurred())
break;
}
Py_DECREF(iterator);
if (res < 0 || PyErr_Occurred() != NULL)
return -1;
else
return 0;
}
static PyObject *
mutablemapping_update(PyObject *self, PyObject *args, PyObject *kwargs)
{
int res = 0;
Py_ssize_t len;
_Py_IDENTIFIER(items);
_Py_IDENTIFIER(keys);
/* first handle args, if any */
assert(args == NULL || PyTuple_Check(args));
len = (args != NULL) ? PyTuple_GET_SIZE(args) : 0;
if (len > 1) {
char *msg = "update() takes at most 1 positional argument (%d given)";
PyErr_Format(PyExc_TypeError, msg, len);
return NULL;
}
if (len) {
PyObject *other = PyTuple_GET_ITEM(args, 0); /* borrowed reference */
assert(other != NULL);
Py_INCREF(other);
if PyDict_CheckExact(other) {
PyObject *items;
if (PyDict_CheckExact(other))
items = PyDict_Items(other);
else
items = _PyObject_CallMethodId(other, &PyId_items, NULL);
Py_DECREF(other);
if (items == NULL)
return NULL;
res = mutablemapping_add_pairs(self, items);
Py_DECREF(items);
if (res == -1)
return NULL;
}
else if (_PyObject_HasAttrId(other, &PyId_keys)) { /* never fails */
PyObject *keys, *iterator, *key;
keys = _PyObject_CallMethodIdObjArgs(other, &PyId_keys, NULL);
if (keys == NULL) {
Py_DECREF(other);
return NULL;
}
iterator = PyObject_GetIter(keys);
Py_DECREF(keys);
if (iterator == NULL) {
Py_DECREF(other);
return NULL;
}
while (res == 0 && (key = PyIter_Next(iterator))) {
PyObject *value = PyObject_GetItem(other, key);
if (value != NULL) {
res = PyObject_SetItem(self, key, value);
Py_DECREF(value);
}
else {
res = -1;
}
Py_DECREF(key);
}
Py_DECREF(other);
Py_DECREF(iterator);
if (res != 0 || PyErr_Occurred())
return NULL;
}
else if (_PyObject_HasAttrId(other, &PyId_items)) { /* never fails */
PyObject *items;
if (PyDict_CheckExact(other))
items = PyDict_Items(other);
else
items = _PyObject_CallMethodId(other, &PyId_items, NULL);
Py_DECREF(other);
if (items == NULL)
return NULL;
res = mutablemapping_add_pairs(self, items);
Py_DECREF(items);
if (res == -1)
return NULL;
}
else {
res = mutablemapping_add_pairs(self, other);
Py_DECREF(other);
if (res != 0)
return NULL;
}
}
/* now handle kwargs */
assert(kwargs == NULL || PyDict_Check(kwargs));
len = (kwargs != NULL) ? PyDict_Size(kwargs) : 0;
if (len > 0) {
PyObject *items = PyDict_Items(kwargs);
if (items == NULL)
return NULL;
res = mutablemapping_add_pairs(self, items);
Py_DECREF(items);
if (res == -1)
return NULL;
}
Py_RETURN_NONE;
}
|
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
import numpy as np
import math
import six
import os
from examples.tensorflow.bert.utils.common import create_initializer
ACTIVATION_AMAX_NUM = 72
INT8O_GEMM_NUM = 8
TRT_AMAX_NUM = 3
SCALE_RESERVE_NUM = 21
def gelu(x):
cdf = 0.5 * (1.0 + tf.tanh(
(np.sqrt(2 / np.pi) * (x + 0.044715 * tf.pow(x, 3)))))
return x * cdf
def layer_norm(input_tensor, name=None):
return tf.contrib.layers.layer_norm(
inputs=input_tensor, begin_norm_axis=-1, begin_params_axis=-1, scope=name)
def attention_layer(from_tensor,
to_tensor,
attention_mask=None,
num_attention_heads=1,
size_per_head=512,
query_act=None,
key_act=None,
value_act=None,
attention_probs_dropout_prob=0.0,
initializer_range=0.02,
do_return_2d_tensor=False,
batch_size=None,
from_seq_length=None,
to_seq_length=None,
tf_datatype=tf.float32):
def transpose_for_scores(input_tensor, batch_size, num_attention_heads,
seq_length, width):
output_tensor = tf.reshape(
input_tensor, [batch_size, seq_length, num_attention_heads, width])
output_tensor = tf.transpose(output_tensor, [0, 2, 1, 3])
return output_tensor
from_shape = get_shape_list(from_tensor, expected_rank=[2, 3])
to_shape = get_shape_list(to_tensor, expected_rank=[2, 3])
if len(from_shape) != len(to_shape):
raise ValueError(
"The rank of `from_tensor` must match the rank of `to_tensor`.")
if len(from_shape) == 3:
batch_size = from_shape[0]
from_seq_length = from_shape[1]
to_seq_length = to_shape[1]
elif len(from_shape) == 2:
if (batch_size is None or from_seq_length is None or to_seq_length is None):
raise ValueError(
"When passing in rank 2 tensors to attention_layer, the values "
"for `batch_size`, `from_seq_length`, and `to_seq_length` "
"must all be specified.")
from_tensor_2d = reshape_to_matrix(from_tensor)
to_tensor_2d = reshape_to_matrix(to_tensor)
# `query_layer` = [B*F, N*H]
query_layer = tf.layers.dense(
from_tensor_2d,
num_attention_heads * size_per_head,
activation=query_act,
name="query",
use_bias=True,
bias_initializer=create_initializer(initializer_range, tf_datatype),
kernel_initializer=create_initializer(initializer_range, tf_datatype))
# `key_layer` = [B*T, N*H]
key_layer = tf.layers.dense(
to_tensor_2d,
num_attention_heads * size_per_head,
activation=key_act,
name="key",
use_bias=True,
bias_initializer=create_initializer(initializer_range, tf_datatype),
kernel_initializer=create_initializer(initializer_range, tf_datatype))
# `value_layer` = [B*T, N*H]
value_layer = tf.layers.dense(
to_tensor_2d,
num_attention_heads * size_per_head,
activation=value_act,
name="value",
use_bias=True,
bias_initializer=create_initializer(initializer_range, tf_datatype),
kernel_initializer=create_initializer(initializer_range, tf_datatype))
# `query_layer` = [B, N, F, H]
query_layer = transpose_for_scores(query_layer, batch_size,
num_attention_heads, from_seq_length,
size_per_head)
# `key_layer` = [B, N, T, H]
key_layer = transpose_for_scores(key_layer, batch_size, num_attention_heads,
to_seq_length, size_per_head)
attention_scores = tf.matmul(query_layer, key_layer, transpose_b=True)
attention_scores = tf.multiply(attention_scores,
1.0 / math.sqrt(float(size_per_head)))
if attention_mask is not None:
# `attention_mask` = [B, 1, F, T]
if tf.rank(attention_mask) == 3:
attention_mask = tf.expand_dims(attention_mask, axis=[1])
adder = (1.0 - tf.cast(attention_mask, tf_datatype)) * -10000.0
attention_scores += adder
attention_probs = tf.nn.softmax(attention_scores)
value_layer = tf.reshape(
value_layer,
[batch_size, to_seq_length, num_attention_heads, size_per_head])
value_layer = tf.transpose(value_layer, [0, 2, 1, 3])
context_layer = tf.matmul(attention_probs, value_layer)
context_layer = tf.transpose(context_layer, [0, 2, 1, 3])
if do_return_2d_tensor:
context_layer = tf.reshape(
context_layer,
[batch_size * from_seq_length, num_attention_heads * size_per_head])
else:
context_layer = tf.reshape(
context_layer,
[batch_size, from_seq_length, num_attention_heads * size_per_head])
return context_layer
def tf_bert(input_tensor,
encoder_args,
attention_mask=None,
intermediate_act_fn=gelu,
initializer_range=0.02):
'''
Run the bert transformer layer by TensorFlow.
Args:
inputs: A tf.Tensor with shape [batch_size, seq_len, hidden_dimension].
The inputs tensor of encoder. The rank must be 3.
encoder_args: The arguments for encoder. The details are in the class
"TransformerArgument" of common.py
attention_mask: A tf.Tensor. The attention mask for self attention.
intermediate_act_fn: A callable function.
The activation function in the FFN. It is gelu in BERT.
initializer_range: A float value.
The range of initializer for all weights.
Outputs:
outputs: A tf.Tensor with shape [batch_size, seq_len, hidden_dimension].
The results of encoder.
'''
if encoder_args.hidden_dim % encoder_args.head_num != 0:
raise ValueError(
"The hidden size (%d) is not a multiple of the number of attention "
"heads (%d)" % (encoder_args.hidden_dim, encoder_args.head_num))
input_shape = get_shape_list(input_tensor, expected_rank=3)
batch_size = input_shape[0]
seq_length = input_shape[1]
prev_output = reshape_to_matrix(input_tensor)
for layer_idx in range(encoder_args.num_layer):
with tf.variable_scope("layer_%d" % layer_idx, reuse=tf.AUTO_REUSE):
layer_input = prev_output
with tf.variable_scope("attention"):
with tf.variable_scope("self"):
attention_head = attention_layer(
from_tensor=layer_input,
to_tensor=layer_input,
attention_mask=attention_mask,
num_attention_heads=encoder_args.head_num,
size_per_head=encoder_args.size_per_head,
initializer_range=initializer_range,
do_return_2d_tensor=True,
batch_size=batch_size,
from_seq_length=seq_length,
to_seq_length=seq_length,
tf_datatype=encoder_args.dtype)
attention_output = attention_head
with tf.variable_scope("output"):
attention_output = tf.layers.dense(
attention_output,
encoder_args.hidden_dim,
use_bias=True,
bias_initializer=create_initializer(
initializer_range, encoder_args.dtype),
kernel_initializer=create_initializer(initializer_range, encoder_args.dtype))
attention_output = layer_norm(
attention_output + layer_input)
# The activation is only applied to the "intermediate" hidden layer.
with tf.variable_scope("intermediate"):
intermediate_output = tf.layers.dense(
attention_output,
encoder_args.inter_size,
activation=intermediate_act_fn,
use_bias=True,
bias_initializer=create_initializer(
initializer_range, encoder_args.dtype),
kernel_initializer=create_initializer(initializer_range, encoder_args.dtype))
# Down-project back to `hidden_size` then add the residual.
with tf.variable_scope("output"):
layer_output = tf.layers.dense(
intermediate_output,
encoder_args.hidden_dim,
use_bias=True,
bias_initializer=create_initializer(
initializer_range, encoder_args.dtype),
kernel_initializer=create_initializer(initializer_range, encoder_args.dtype))
layer_output = layer_norm(layer_output + attention_output)
prev_output = layer_output
# amaxList for int8 quantization
if encoder_args.int8_mode != 0:
amaxList = tf.get_variable(name="amaxList", shape=[ACTIVATION_AMAX_NUM + 9*encoder_args.hidden_dim + INT8O_GEMM_NUM + TRT_AMAX_NUM + SCALE_RESERVE_NUM], dtype=tf.float32)
prev_output = tf.reshape(prev_output, shape=tf.shape(input_tensor))
return prev_output
def build_sequence_mask(sequence_length,
num_heads=None,
maximum_length=None,
dtype=tf.float32):
"""Builds the dot product mask.
Args:
sequence_length: The sequence length.
num_heads: The number of heads.
maximum_length: Optional size of the returned time dimension. Otherwise
it is the maximum of :obj:`sequence_length`.
dtype: The type of the mask tensor.
Returns:
A broadcastable ``tf.Tensor`` of type :obj:`dtype` and shape
``[batch_size, 1, max_length, max_length]``.
"""
mask = tf.sequence_mask(sequence_length, maxlen=maximum_length, dtype=dtype) # [batch_size, maximum_length]
mask = tf.reshape(mask, [-1, 1, 1, maximum_length])
m_2 = tf.transpose(mask, [0, 1, 3, 2])
mask = mask * m_2
return mask
def get_shape_list(tensor, expected_rank=None, name=None):
if name is None:
name = tensor.name
if expected_rank is not None:
assert_rank(tensor, expected_rank, name)
shape = tensor.shape.as_list()
non_static_indexes = []
for (index, dim) in enumerate(shape):
if dim is None:
non_static_indexes.append(index)
if not non_static_indexes:
return shape
dyn_shape = tf.shape(tensor)
for index in non_static_indexes:
shape[index] = dyn_shape[index]
return shape
def reshape_to_matrix(input_tensor):
"""Reshapes a >= rank 2 tensor to a rank 2 tensor (i.e., a matrix)."""
ndims = input_tensor.shape.ndims
if ndims < 2:
raise ValueError("Input tensor must have at least rank 2. Shape = %s" %
(input_tensor.shape))
if ndims == 2:
return input_tensor
width = input_tensor.shape[-1]
output_tensor = tf.reshape(input_tensor, [-1, width])
return output_tensor
def reshape_from_matrix(output_tensor, orig_shape_list):
if len(orig_shape_list) == 2:
return output_tensor
output_shape = get_shape_list(output_tensor)
orig_dims = orig_shape_list[0:-1]
width = output_shape[-1]
return tf.reshape(output_tensor, orig_dims + [width])
def assert_rank(tensor, expected_rank, name=None):
if name is None:
name = tensor.name
expected_rank_dict = {}
if isinstance(expected_rank, six.integer_types):
expected_rank_dict[expected_rank] = True
else:
for x in expected_rank:
expected_rank_dict[x] = True
actual_rank = tensor.shape.ndims
if actual_rank not in expected_rank_dict:
scope_name = tf.get_variable_scope().name
raise ValueError(
"For the tensor `%s` in scope `%s`, the actual rank "
"`%d` (shape = %s) is not equal to the expected rank `%s`" %
(name, scope_name, actual_rank, str(tensor.shape), str(expected_rank)))
def ft_bert(inputs,
encoder_args,
encoder_vars_dict,
sequence_length):
'''
Run the bert transformer layer by FasterTransformer.
Args:
inputs: A tf.Tensor with shape [batch_size, seq_len, hidden_dimension].
The inputs tensor of encoder. The rank must be 3.
encoder_args: The arguments for encoder. The details are in the class "TransformerArgument" of common.py
attention_mask: A tf.Tensor. The attention mask for self attention.
encoder_vars_dict: A dict of tf.Tensor or numpy array.
The variables for encoder. They can be either some tensor or some numpy array.
The key is the name of the tensor, like 'layer_0/attention/self/query/kernel:0'.
Teh value is the corresponding tensor or numpy array
sequence_length: A tf.Tensor or numpy array with shape [batch_size].
The sequence length of the sentences
Outputs:
outputs: A tensor with shape [batch_size, seq_len, hidden_dimension].
The results of encoder.
'''
transformer_op_module = tf.load_op_library(os.path.join('./lib/libtf_bert.so'))
if encoder_args.int8_mode == 0:
outputs = transformer_op_module.bert(
inputs,
inputs,
sequence_length,
[encoder_vars_dict['layer_%d/attention/self/query/kernel:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/self/query/bias:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/self/key/kernel:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/self/key/bias:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/self/value/kernel:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/self/value/bias:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/output/dense/kernel:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/output/dense/bias:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/output/LayerNorm/beta:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/output/LayerNorm/gamma:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/intermediate/dense/kernel:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/intermediate/dense/bias:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/output/dense/kernel:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/output/dense/bias:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/output/LayerNorm/beta:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/output/LayerNorm/gamma:0' % id] for id in range(encoder_args.num_layer)],
head_num = encoder_args.head_num, size_per_head = encoder_args.size_per_head,
inter_size = encoder_args.inter_size,
num_layer = encoder_args.num_layer, remove_padding=encoder_args.remove_padding,
q_scaling = 1.0)
else:
outputs = transformer_op_module.bert_int8(
inputs,
inputs,
sequence_length,
[encoder_vars_dict['layer_%d/attention/self/query/kernel:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/self/query/bias:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/self/key/kernel:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/self/key/bias:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/self/value/kernel:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/self/value/bias:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/output/dense/kernel:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/output/dense/bias:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/output/LayerNorm/beta:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/attention/output/LayerNorm/gamma:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/intermediate/dense/kernel:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/intermediate/dense/bias:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/output/dense/kernel:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/output/dense/bias:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/output/LayerNorm/beta:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/output/LayerNorm/gamma:0' % id] for id in range(encoder_args.num_layer)],
[encoder_vars_dict['layer_%d/amaxList:0' % id] for id in range(encoder_args.num_layer)],
head_num = encoder_args.head_num,
size_per_head = encoder_args.size_per_head,
inter_size = encoder_args.inter_size,
num_layer = encoder_args.num_layer,
int8_mode = encoder_args.int8_mode,
remove_padding=encoder_args.remove_padding,
q_scaling = 1.0)
return outputs
|
"""
# Copyright 2022 Red Hat
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
import operator
import re
from collections import namedtuple
EXPRESSION_PATTERN = re.compile(r"([<>=!]*)(\d)+")
Range = namedtuple('Range', 'operator operand')
RANGE_OPERATORS = {"==": operator.eq, "=": operator.eq, "<": operator.lt,
">": operator.gt, "<=": operator.le, ">=": operator.ge,
"!=": operator.ne}
VALID_OPS = ",".join(RANGE_OPERATORS.keys())
|
//# sourceMappingURL=dateInterface.js.map
|
// homebridge-rpi/index.js
// Copyright © 2019-2021 Erik Baauw. All rights reserved.
//
// Homebridge plugin for Raspberry Pi.
'use strict'
const RpiPlatform = require('./lib/RpiPlatform')
const packageJson = require('./package.json')
module.exports = function (homebridge) {
RpiPlatform.loadPlatform(homebridge, packageJson, 'RPi', RpiPlatform)
}
|
from __future__ import unicode_literals
from . import packetutils as pckt
from os import urandom
from bluepy import btle
import logging
import struct
import time
# Commands :
#: Set mesh groups.
#: Data : 3 bytes
C_MESH_GROUP = 0xd7
#: Set the mesh id. The light will still answer to the 0 mesh id. Calling the
#: command again replaces the previous mesh id.
#: Data : the new mesh id, 2 bytes in little endian order
C_MESH_ADDRESS = 0xe0
#:
C_MESH_RESET = 0xe3
#: On/Off command. Data : one byte 0, 1
C_POWER = 0xd0
#: Data : one byte
C_LIGHT_MODE = 0x33
#: Data : one byte 0 to 6
C_PRESET = 0xc8
#: White temperature. one byte 0 to 0x7f
C_WHITE_TEMPERATURE = 0xf0
#: one byte 1 to 0x7f
C_WHITE_BRIGHTNESS = 0xf1
#: 4 bytes : 0x4 red green blue
C_COLOR = 0xe2
#: one byte : 0xa to 0x64 ....
C_COLOR_BRIGHTNESS = 0xf2
#: Data 4 bytes : How long a color is displayed in a sequence in milliseconds as
#: an integer in little endian order
C_SEQUENCE_COLOR_DURATION = 0xf5
#: Data 4 bytes : Duration of the fading between colors in a sequence, in
#: milliseconds, as an integer in little endian order
C_SEQUENCE_FADE_DURATION = 0xf6
#: 7 bytes
C_TIME = 0xe4
#: 10 bytes
C_ALARMS = 0xe5
#: Request current light/device status
C_GET_STATUS_SENT = 0xda
#: Response of light/device status request
C_GET_STATUS_RECEIVED = 0xdb
#: State notification
C_NOTIFICATION_RECEIVED = 0xdc
PAIR_CHAR_UUID = '00010203-0405-0607-0809-0a0b0c0d1914'
COMMAND_CHAR_UUID = '00010203-0405-0607-0809-0a0b0c0d1912'
STATUS_CHAR_UUID = '00010203-0405-0607-0809-0a0b0c0d1911'
OTA_CHAR_UUID = '00010203-0405-0607-0809-0a0b0c0d1913'
logger = logging.getLogger(__name__)
class Peripheral(btle.Peripheral):
def _connect(self, addr, addrType=btle.ADDR_TYPE_PUBLIC, iface=None, timeout=5):
"""
Temporary manual patch see https://github.com/IanHarvey/bluepy/pull/434
also added a default `timeout` as this is not part yet of the release bluepy package
"""
if len(addr.split(":")) != 6:
raise ValueError("Expected MAC address, got %s" % repr(addr))
if addrType not in (btle.ADDR_TYPE_PUBLIC, btle.ADDR_TYPE_RANDOM):
raise ValueError("Expected address type public or random, got {}".format(addrType))
self._startHelper(iface)
self.addr = addr
self.addrType = addrType
self.iface = iface
if iface is not None:
self._writeCmd("conn %s %s %s\n" % (addr, addrType, "hci"+str(iface)))
else:
self._writeCmd("conn %s %s\n" % (addr, addrType))
rsp = self._getResp('stat', timeout)
if rsp is None:
self._stopHelper()
raise btle.BTLEDisconnectError("Timed out while trying to connect to peripheral %s, addr type: %s" %
(addr, addrType), rsp)
while rsp and rsp['state'][0] == 'tryconn':
rsp = self._getResp('stat', timeout)
if rsp is None:
self._stopHelper()
raise btle.BTLEDisconnectError("Timed out while trying to connect to peripheral %s, addr type: %s" %
(addr, addrType), rsp)
if rsp['state'][0] != 'conn':
self._stopHelper()
raise btle.BTLEDisconnectError("Failed to connect to peripheral %s, addr type: %s [%s]" % (addr, addrType, rsp), rsp)
def _getResp(self, wantType, timeout=None):
"""
Temporary manual patch see https://github.com/IanHarvey/bluepy/commit/b02b436cb5c71387bd70339a1b472b3a6bfe9ac8
"""
# Temp set max timeout for wr commands (failsave)
if timeout is None and wantType == 'wr':
logger.debug('Set fallback time out - %s', wantType)
timeout = 10
if isinstance(wantType, list) is not True:
wantType = [wantType]
while True:
resp = self._waitResp(wantType + ['ntfy', 'ind'], timeout)
if resp is None:
return None
respType = resp['rsp'][0]
if respType == 'ntfy' or respType == 'ind':
hnd = resp['hnd'][0]
data = resp['d'][0]
if self.delegate is not None:
self.delegate.handleNotification(hnd, data)
if respType not in wantType:
continue
return resp
def _waitResp(self, wantType, timeout=None):
while True:
if self._helper.poll() is not None:
raise btle.BTLEInternalError("Helper exited")
if timeout:
logger.debug("_waitResp - set timeout to %d", timeout)
fds = self._poller.poll(timeout*1000)
if len(fds) == 0:
logger.debug("Select timeout")
return None
rv = self._helper.stdout.readline()
if rv.startswith('#') or rv == '\n' or len(rv)==0:
continue
resp = btle.BluepyHelper.parseResp(rv)
if 'rsp' not in resp:
raise btle.BTLEInternalError("No response type indicator", resp)
respType = resp['rsp'][0]
if respType in wantType:
logger.debug("_waitResp - resp [%s]", resp)
return resp
elif respType == 'stat':
if 'state' in resp and len(resp['state']) > 0 and resp['state'][0] == 'disc':
self._stopHelper()
raise btle.BTLEDisconnectError("Device disconnected", resp)
elif respType == 'err':
errcode=resp['code'][0]
if errcode=='nomgmt':
raise btle.BTLEManagementError("Management not available (permissions problem?)", resp)
elif errcode=='atterr':
raise btle.BTLEGattError("Bluetooth command failed", resp)
else:
raise btle.BTLEException("Error from bluepy-helper (%s)" % errcode, resp)
elif respType == 'scan':
# Scan response when we weren't interested. Ignore it
continue
else:
raise btle.BTLEInternalError("Unexpected response (%s)" % respType, resp)
class Delegate(btle.DefaultDelegate):
def __init__(self, light):
self.light = light
btle.DefaultDelegate.__init__(self)
def handleNotification(self, cHandle, data):
if self.light.session_key is None:
logger.info(
"Device [%s] is disconnected, ignoring received notification [unable to decrypt without active session]",
self.light.mac)
return
message = pckt.decrypt_packet(self.light.session_key, self.light.mac, data)
if message is None:
logger.warning("Failed to decrypt package [key: %s, data: %s]", self.light.session_key, data)
return
logger.debug("Received notification %s", message)
self.light.parseStatusResult(message)
class AwoxMeshLight:
def __init__(self, mac, mesh_name="unpaired", mesh_password="1234", mesh_id=0):
"""
Args :
mac: The light's MAC address as a string in the form AA:BB:CC:DD:EE:FF
mesh_name: The mesh name as a string.
mesh_password: The mesh password as a string.
mesh_id: The mesh id (address)
"""
self.mac = mac
self.mesh_id = mesh_id
self.btdevice = Peripheral()
self.session_key = None
self.command_char = None
self.status_char = None
self.mesh_name = mesh_name.encode()
self.mesh_password = mesh_password.encode()
# Light status
self.white_brightness = None
self.white_temperature = None
self.color_brightness = None
self.red = None
self.green = None
self.blue = None
self.color_mode = None
self.transition_mode = None
self.state = None
self.status_callback = None
def connect(self, mesh_name=None, mesh_password=None):
"""
Args :
mesh_name: The mesh name as a string.
mesh_password: The mesh password as a string.
"""
if mesh_name: self.mesh_name = mesh_name.encode()
if mesh_password: self.mesh_password = mesh_password.encode()
assert len(self.mesh_name) <= 16, "mesh_name can hold max 16 bytes"
assert len(self.mesh_password) <= 16, "mesh_password can hold max 16 bytes"
self.btdevice.connect(self.mac)
self.btdevice.setDelegate(Delegate(self))
pair_char = self.btdevice.getCharacteristics(uuid=PAIR_CHAR_UUID)[0]
self.session_random = urandom(8)
message = pckt.make_pair_packet(self.mesh_name, self.mesh_password, self.session_random)
pair_char.write(message)
self.status_char = self.btdevice.getCharacteristics(uuid=STATUS_CHAR_UUID)[0]
self.status_char.write(b'\x01')
reply = bytearray(pair_char.read())
if reply[0] == 0xd:
self.session_key = pckt.make_session_key(self.mesh_name, self.mesh_password, self.session_random, reply[1:9])
else:
if reply[0] == 0xe:
logger.info("Auth error : check name and password.")
else:
logger.info("Unexpected pair value : %s", repr(reply))
self.disconnect()
return False
return True
def waitForNotifications(self):
session_key = self.session_key
logger.info('[%s] Started waitForNotifications', self.mac)
while self.session_key == session_key:
try:
self.btdevice.waitForNotifications(5)
except btle.BTLEDisconnectError:
self.session_key = None
except Exception as error:
logger.debug("waitForNotifications error - %s", error)
# If we get the response to a write then we'll break
pass
logger.info('[%s] WaitForNotifications done', self.mac)
def connectWithRetry(self, num_tries=1, mesh_name=None, mesh_password=None):
"""
Args:
num_tries: The number of attempts to connect.
mesh_name: The mesh name as a string.
mesh_password: The mesh password as a string.
"""
connected = False
attempts = 0
while (not connected and attempts < num_tries):
try:
connected = self.connect(mesh_name, mesh_password)
except btle.BTLEDisconnectError:
logger.info("connection_error: retrying for %s time", attempts)
finally:
attempts += 1
return connected
def setMesh(self, new_mesh_name, new_mesh_password, new_mesh_long_term_key):
"""
Sets or changes the mesh network settings.
Args :
new_mesh_name: The new mesh name as a string, 16 bytes max.
new_mesh_password: The new mesh password as a string, 16 bytes max.
new_mesh_long_term_key: The new long term key as a string, 16 bytes max.
Returns :
True on success.
"""
assert (self.session_key), "Not connected"
assert len(new_mesh_name.encode()) <= 16, "new_mesh_name can hold max 16 bytes"
assert len(new_mesh_password.encode()) <= 16, "new_mesh_password can hold max 16 bytes"
assert len(new_mesh_long_term_key.encode()) <= 16, "new_mesh_long_term_key can hold max 16 bytes"
pair_char = self.btdevice.getCharacteristics(uuid=PAIR_CHAR_UUID)[0]
# FIXME : Removing the delegate as a workaround to a bluepy.btle.BTLEException
# similar to https://github.com/IanHarvey/bluepy/issues/182 That may be
# a bluepy bug or I'm using it wrong or both ...
self.btdevice.setDelegate(None)
message = pckt.encrypt(self.session_key, new_mesh_name.encode())
message.insert(0, 0x4)
pair_char.write(message)
message = pckt.encrypt(self.session_key, new_mesh_password.encode())
message.insert(0, 0x5)
pair_char.write(message)
message = pckt.encrypt(self.session_key, new_mesh_long_term_key.encode())
message.insert(0, 0x6)
pair_char.write(message)
time.sleep(1)
reply = bytearray(pair_char.read())
self.btdevice.setDelegate(Delegate(self))
if reply[0] == 0x7:
self.mesh_name = new_mesh_name.encode()
self.mesh_password = new_mesh_password.encode()
logger.info("Mesh network settings accepted.")
return True
else:
logger.info("Mesh network settings change failed : %s", repr(reply))
return False
def setMeshId(self, mesh_id):
"""
Sets the mesh id.
Args :
mesh_id: as a number.
"""
data = struct.pack("<H", mesh_id)
self.writeCommand(C_MESH_ADDRESS, data)
self.mesh_id = mesh_id
def writeCommand(self, command, data, dest=None, withResponse=True):
"""
Args:
command: The command, as a number.
data: The parameters for the command, as bytes.
dest: The destination mesh id, as a number. If None, this lightbulb's
mesh id will be used.
"""
assert (self.session_key)
if dest == None: dest = self.mesh_id
packet = pckt.make_command_packet(self.session_key, self.mac, dest, command, data)
try:
if not self.command_char:
self.command_char = self.btdevice.getCharacteristics(uuid=COMMAND_CHAR_UUID)[0]
logger.info("[%s][%d] Writing command %i data %s", self.mac, dest, command, repr(data))
return self.command_char.write(packet, withResponse=withResponse)
except btle.BTLEDisconnectError as err:
logger.error('Command failed, device is disconnected: %s', err)
self.session_key = None
raise err
except btle.BTLEInternalError as err:
if 'Helper not started' in str(err):
logger.error('Command failed, Helper not started, device is disconnected: %s', err)
self.session_key = None
else:
logger.exception('Command response failed to be correctly processed but we ignore it for now: %s', err)
def resetMesh(self):
"""
Restores the default name and password. Will disconnect the device.
"""
return self.writeCommand(C_MESH_RESET, b'\x00')
def readStatus(self):
packet = self.status_char.read()
return pckt.decrypt_packet(self.session_key, self.mac, packet)
def parseStatusResult(self, data):
command = struct.unpack('B', data[7:8])[0]
status = {}
if command == C_GET_STATUS_RECEIVED:
mode = struct.unpack('B', data[10:11])[0]
mesh_id = (struct.unpack('B', data[4:5])[0] * 256) + struct.unpack('B', data[3:4])[0]
white_brightness, white_temperature = struct.unpack('BB', data[11:13])
color_brightness, red, green, blue = struct.unpack('BBBB', data[13:17])
status = {
'mesh_id': mesh_id,
'state': (mode & 1) == 1,
'color_mode': ((mode >> 1) & 1) == 1,
'transition_mode': ((mode >> 2) & 1) == 1,
'red': red,
'green': green,
'blue': blue,
'white_temperature': white_temperature,
'white_brightness': white_brightness,
'color_brightness': color_brightness,
}
if command == C_NOTIFICATION_RECEIVED:
mesh_id = (struct.unpack('B', data[19:20])[0] * 256) + struct.unpack('B', data[10:11])[0]
mode = struct.unpack('B', data[12:13])[0]
white_brightness, white_temperature = struct.unpack('BB', data[13:15])
color_brightness, red, green, blue = struct.unpack('BBBB', data[15:19])
status = {
'mesh_id': mesh_id,
'state': (mode & 1) == 1,
'color_mode': ((mode >> 1) & 1) == 1,
'transition_mode': ((mode >> 2) & 1) == 1,
'red': red,
'green': green,
'blue': blue,
'white_temperature': white_temperature,
'white_brightness': white_brightness,
'color_brightness': color_brightness,
}
if status:
logger.debug('parsed status %s', status)
else:
logger.info('Unknown command [%d]', command)
if status and status['mesh_id'] == self.mesh_id:
logger.info('Update light status - mesh_id %d', status['mesh_id'])
self.state = status['state']
self.color_mode = status['color_mode']
self.transition_mode = status['transition_mode']
self.white_brightness = status['white_brightness']
self.white_temperature = status['white_temperature']
self.color_brightness = status['color_brightness']
self.red = status['red']
self.green = status['green']
self.blue = status['blue']
if status and self.status_callback:
self.status_callback(status)
def requestStatus(self, dest=None, withResponse=False):
logger.debug('requestStatus(%s)', dest)
data = struct.pack('B', 16)
return self.writeCommand(C_GET_STATUS_SENT, data, dest, withResponse)
def setColor(self, red, green, blue, dest=None):
"""
Args :
red, green, blue: between 0 and 0xff
"""
data = struct.pack('BBBB', 0x04, red, green, blue)
return self.writeCommand(C_COLOR, data, dest)
def setColorBrightness(self, brightness, dest=None):
"""
Args :
brightness: a value between 0xa and 0x64 ...
"""
data = struct.pack('B', brightness)
return self.writeCommand(C_COLOR_BRIGHTNESS, data, dest)
def setSequenceColorDuration(self, duration, dest=None):
"""
Args :
duration: in milliseconds.
"""
data = struct.pack("<I", duration)
return self.writeCommand(C_SEQUENCE_COLOR_DURATION, data, dest)
def setSequenceFadeDuration(self, duration, dest=None):
"""
Args:
duration: in milliseconds.
"""
data = struct.pack("<I", duration)
return self.writeCommand(C_SEQUENCE_FADE_DURATION, data, dest)
def setPreset(self, num, dest=None):
"""
Set a preset color sequence.
Args :
num: number between 0 and 6
"""
data = struct.pack('B', num)
return self.writeCommand(C_PRESET, data, dest)
def setWhiteBrightness(self, brightness, dest=None):
"""
Args :
brightness: between 1 and 0x7f
"""
data = struct.pack('B', brightness)
return self.writeCommand(C_WHITE_BRIGHTNESS, data, dest)
def setWhiteTemperature(self, temp, dest=None):
"""
Args :
temp: between 0 and 0x7f
"""
data = struct.pack('B', temp)
return self.writeCommand(C_WHITE_TEMPERATURE, data, dest)
def setWhite(self, temp, brightness, dest=None):
"""
Args :
temp: between 0 and 0x7f
brightness: between 1 and 0x7f
"""
data = struct.pack('B', temp)
self.writeCommand(C_WHITE_TEMPERATURE, data, dest)
data = struct.pack('B', brightness)
return self.writeCommand(C_WHITE_BRIGHTNESS, data, dest)
def on(self, dest=None):
""" Turns the light on.
"""
return self.writeCommand(C_POWER, b'\x01', dest)
def off(self, dest=None):
""" Turns the light off.
"""
return self.writeCommand(C_POWER, b'\x00', dest)
def reconnect(self):
logger.debug("Reconnecting.")
self.session_key = None
self.connect()
def disconnect(self):
logger.debug("Disconnecting.")
try:
self.btdevice.disconnect()
except Exception as err:
logger.warning('Disconnect failed: %s', err)
self.session_key = None
def getFirmwareRevision(self):
"""
Returns :
The firmware version as a null terminated utf-8 string.
"""
char = self.btdevice.getCharacteristics(uuid=btle.AssignedNumbers.firmwareRevisionString)[0]
return char.read()
def getHardwareRevision(self):
"""
Returns :
The hardware version as a null terminated utf-8 string.
"""
char = self.btdevice.getCharacteristics(uuid=btle.AssignedNumbers.hardwareRevisionString)[0]
return char.read()
def getModelNumber(self):
"""
Returns :
The model as a null terminated utf-8 string.
"""
char = self.btdevice.getCharacteristics(uuid=btle.AssignedNumbers.modelNumberString)[0]
return char.read()
def sendFirmware(self, firmware_path):
"""
Updates the light bulb's firmware. The light will blink green after receiving the new
firmware.
Args:
firmware_path: The path of the firmware file.
"""
assert (self.session_key)
with open(firmware_path, 'rb') as firmware_file:
firmware_data = firmware_file.read()
if not firmware_data:
return
ota_char = self.btdevice.getCharacteristics(uuid=OTA_CHAR_UUID)[0]
count = 0
for i in range(0, len(firmware_data), 0x10):
data = struct.pack('<H', count) + firmware_data[i:i + 0x10].ljust(0x10, b'\xff')
crc = pckt.crc16(data)
packet = data + struct.pack('<H', crc)
logger.debug("Writing packet %i of %i : %s", count + 1, len(firmware_data) / 0x10 + 1, repr(packet))
ota_char.write(packet)
# FIXME : When calling write with withResponse=True bluepy hangs after a few packets.
# Without any delay the light blinks once without accepting the firmware.
# The choosen value is arbitrary.
time.sleep(0.01)
count += 1
data = struct.pack('<H', count)
crc = pckt.crc16(data)
packet = data + struct.pack('<H', crc)
logger.debug("Writing last packet : %s", repr(packet))
ota_char.write(packet)
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#ifndef DORIS_BE_SRC_QUERY_EXEC_OLAP_SCANNER_H
#define DORIS_BE_SRC_QUERY_EXEC_OLAP_SCANNER_H
#include <list>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "common/status.h"
#include "exec/exec_node.h"
#include "exec/olap_utils.h"
#include "exprs/bloomfilter_predicate.h"
#include "exprs/expr.h"
#include "gen_cpp/PaloInternalService_types.h"
#include "gen_cpp/PlanNodes_types.h"
#include "olap/delete_handler.h"
#include "olap/olap_cond.h"
#include "olap/reader.h"
#include "olap/rowset/column_data.h"
#include "olap/storage_engine.h"
#include "runtime/descriptors.h"
#include "runtime/tuple.h"
#include "runtime/vectorized_row_batch.h"
namespace doris {
class OlapScanNode;
class OLAPReader;
class RuntimeProfile;
class Field;
class OlapScanner {
public:
OlapScanner(RuntimeState* runtime_state, OlapScanNode* parent, bool aggregation,
bool need_agg_finalize, const TPaloScanRange& scan_range,
const std::vector<OlapScanRange*>& key_ranges);
~OlapScanner();
Status prepare(const TPaloScanRange& scan_range, const std::vector<OlapScanRange*>& key_ranges,
const std::vector<TCondition>& filters,
const std::vector<std::pair<std::string, std::shared_ptr<IBloomFilterFuncBase>>>&
bloom_filters);
Status open();
Status get_batch(RuntimeState* state, RowBatch* batch, bool* eof);
Status close(RuntimeState* state);
RuntimeState* runtime_state() { return _runtime_state; }
std::vector<ExprContext*>* conjunct_ctxs() { return &_conjunct_ctxs; }
int id() const { return _id; }
void set_id(int id) { _id = id; }
bool is_open() const { return _is_open; }
void set_opened() { _is_open = true; }
int64_t raw_rows_read() const { return _raw_rows_read; }
void update_counter();
const std::string& scan_disk() const { return _tablet->data_dir()->path(); }
void start_wait_worker_timer() {
_watcher.reset();
_watcher.start();
}
int64_t update_wait_worker_timer() { return _watcher.elapsed_time(); }
void set_use_pushdown_conjuncts(bool has_pushdown_conjuncts) {
_use_pushdown_conjuncts = has_pushdown_conjuncts;
}
std::vector<bool>* mutable_runtime_filter_marks() { return &_runtime_filter_marks; }
const std::vector<SlotDescriptor*>& get_query_slots() const {
return _query_slots;
}
protected:
Status _init_params(const std::vector<OlapScanRange*>& key_ranges,
const std::vector<TCondition>& filters,
const std::vector<std::pair<string, std::shared_ptr<IBloomFilterFuncBase>>>&
bloom_filters);
Status _init_return_columns();
void _convert_row_to_tuple(Tuple* tuple);
// Update profile that need to be reported in realtime.
void _update_realtime_counter();
protected:
RuntimeState* _runtime_state;
OlapScanNode* _parent;
const TupleDescriptor* _tuple_desc; /**< tuple descriptor */
RuntimeProfile* _profile;
const std::vector<SlotDescriptor*>& _string_slots;
const std::vector<SlotDescriptor*>& _collection_slots;
std::vector<ExprContext*> _conjunct_ctxs;
// to record which runtime filters have been used
std::vector<bool> _runtime_filter_marks;
int _id;
bool _is_open;
bool _aggregation;
bool _need_agg_finalize = true;
bool _has_update_counter = false;
int _tuple_idx = 0;
int _direct_conjunct_size = 0;
bool _use_pushdown_conjuncts = false;
ReaderParams _params;
std::unique_ptr<Reader> _reader;
TabletSharedPtr _tablet;
int64_t _version;
std::vector<uint32_t> _return_columns;
RowCursor _read_row_cursor;
std::vector<SlotDescriptor*> _query_slots;
// time costed and row returned statistics
ExecNode::EvalConjunctsFn _eval_conjuncts_fn = nullptr;
RuntimeProfile::Counter* _rows_read_counter = nullptr;
int64_t _num_rows_read = 0;
int64_t _raw_rows_read = 0;
int64_t _compressed_bytes_read = 0;
RuntimeProfile::Counter* _rows_pushed_cond_filtered_counter = nullptr;
// number rows filtered by pushed condition
int64_t _num_rows_pushed_cond_filtered = 0;
bool _is_closed = false;
MonotonicStopWatch _watcher;
std::shared_ptr<MemTracker> _mem_tracker;
};
} // namespace doris
#endif
|
/********************************************************************************
** Form generated from reading UI file 'contoursui.ui'
**
** Created by: Qt User Interface Compiler version 5.2.1
**
** WARNING! All changes made in this file will be lost when recompiling UI file!
********************************************************************************/
#ifndef UI_CONTOURSUI_H
#define UI_CONTOURSUI_H
#include <QtCore/QVariant>
#include <QtWidgets/QAction>
#include <QtWidgets/QApplication>
#include <QtWidgets/QButtonGroup>
#include <QtWidgets/QDialog>
#include <QtWidgets/QFormLayout>
#include <QtWidgets/QGridLayout>
#include <QtWidgets/QGroupBox>
#include <QtWidgets/QHeaderView>
#include <QtWidgets/QLabel>
#include <QtWidgets/QPushButton>
#include <QtWidgets/QSpinBox>
#include <QtWidgets/QTabWidget>
#include <QtWidgets/QWidget>
QT_BEGIN_NAMESPACE
class Ui_contoursUI
{
public:
QGridLayout *gridLayout;
QGroupBox *groupBox_2;
QFormLayout *formLayout;
QLabel *label;
QSpinBox *spinSobelApt;
QLabel *label_5;
QSpinBox *spinSobelThresh;
QGroupBox *groupBox_3;
QFormLayout *formLayout_2;
QLabel *label_2;
QSpinBox *spinLapKSize;
QGroupBox *groupBox;
QFormLayout *formLayout_3;
QLabel *label_3;
QSpinBox *spinCannyMin;
QLabel *label_4;
QSpinBox *spinCannyMax;
QTabWidget *Circle;
QWidget *tab;
QGroupBox *groupBox_4;
QFormLayout *formLayout_4;
QLabel *label_6;
QSpinBox *spinVotes;
QLabel *label_7;
QSpinBox *spinMinRad;
QLabel *label_8;
QSpinBox *spinMaxRad;
QLabel *label_9;
QSpinBox *spinCircDist;
QWidget *tab_2;
QGridLayout *gridLayout_3;
QLabel *label_10;
QSpinBox *spinrho;
QLabel *label_11;
QSpinBox *spinthresh;
QLabel *label_12;
QSpinBox *spinmin;
QLabel *label_13;
QSpinBox *spinmax;
QPushButton *pushButton;
void setupUi(QDialog *contoursUI)
{
if (contoursUI->objectName().isEmpty())
contoursUI->setObjectName(QStringLiteral("contoursUI"));
contoursUI->resize(254, 526);
gridLayout = new QGridLayout(contoursUI);
gridLayout->setObjectName(QStringLiteral("gridLayout"));
groupBox_2 = new QGroupBox(contoursUI);
groupBox_2->setObjectName(QStringLiteral("groupBox_2"));
formLayout = new QFormLayout(groupBox_2);
formLayout->setObjectName(QStringLiteral("formLayout"));
formLayout->setFieldGrowthPolicy(QFormLayout::AllNonFixedFieldsGrow);
label = new QLabel(groupBox_2);
label->setObjectName(QStringLiteral("label"));
formLayout->setWidget(0, QFormLayout::LabelRole, label);
spinSobelApt = new QSpinBox(groupBox_2);
spinSobelApt->setObjectName(QStringLiteral("spinSobelApt"));
spinSobelApt->setValue(3);
formLayout->setWidget(0, QFormLayout::FieldRole, spinSobelApt);
label_5 = new QLabel(groupBox_2);
label_5->setObjectName(QStringLiteral("label_5"));
formLayout->setWidget(1, QFormLayout::LabelRole, label_5);
spinSobelThresh = new QSpinBox(groupBox_2);
spinSobelThresh->setObjectName(QStringLiteral("spinSobelThresh"));
spinSobelThresh->setMaximum(255);
spinSobelThresh->setValue(128);
formLayout->setWidget(1, QFormLayout::FieldRole, spinSobelThresh);
gridLayout->addWidget(groupBox_2, 0, 0, 1, 1);
groupBox_3 = new QGroupBox(contoursUI);
groupBox_3->setObjectName(QStringLiteral("groupBox_3"));
formLayout_2 = new QFormLayout(groupBox_3);
formLayout_2->setObjectName(QStringLiteral("formLayout_2"));
formLayout_2->setFieldGrowthPolicy(QFormLayout::AllNonFixedFieldsGrow);
label_2 = new QLabel(groupBox_3);
label_2->setObjectName(QStringLiteral("label_2"));
formLayout_2->setWidget(0, QFormLayout::LabelRole, label_2);
spinLapKSize = new QSpinBox(groupBox_3);
spinLapKSize->setObjectName(QStringLiteral("spinLapKSize"));
QSizePolicy sizePolicy(QSizePolicy::Minimum, QSizePolicy::Fixed);
sizePolicy.setHorizontalStretch(0);
sizePolicy.setVerticalStretch(4);
sizePolicy.setHeightForWidth(spinLapKSize->sizePolicy().hasHeightForWidth());
spinLapKSize->setSizePolicy(sizePolicy);
spinLapKSize->setValue(3);
formLayout_2->setWidget(0, QFormLayout::FieldRole, spinLapKSize);
gridLayout->addWidget(groupBox_3, 1, 0, 1, 1);
groupBox = new QGroupBox(contoursUI);
groupBox->setObjectName(QStringLiteral("groupBox"));
formLayout_3 = new QFormLayout(groupBox);
formLayout_3->setObjectName(QStringLiteral("formLayout_3"));
formLayout_3->setFieldGrowthPolicy(QFormLayout::AllNonFixedFieldsGrow);
label_3 = new QLabel(groupBox);
label_3->setObjectName(QStringLiteral("label_3"));
formLayout_3->setWidget(0, QFormLayout::LabelRole, label_3);
spinCannyMin = new QSpinBox(groupBox);
spinCannyMin->setObjectName(QStringLiteral("spinCannyMin"));
spinCannyMin->setMaximum(500);
spinCannyMin->setValue(125);
formLayout_3->setWidget(0, QFormLayout::FieldRole, spinCannyMin);
label_4 = new QLabel(groupBox);
label_4->setObjectName(QStringLiteral("label_4"));
formLayout_3->setWidget(1, QFormLayout::LabelRole, label_4);
spinCannyMax = new QSpinBox(groupBox);
spinCannyMax->setObjectName(QStringLiteral("spinCannyMax"));
spinCannyMax->setMaximum(1000);
spinCannyMax->setValue(380);
formLayout_3->setWidget(1, QFormLayout::FieldRole, spinCannyMax);
gridLayout->addWidget(groupBox, 2, 0, 1, 1);
Circle = new QTabWidget(contoursUI);
Circle->setObjectName(QStringLiteral("Circle"));
tab = new QWidget();
tab->setObjectName(QStringLiteral("tab"));
groupBox_4 = new QGroupBox(tab);
groupBox_4->setObjectName(QStringLiteral("groupBox_4"));
groupBox_4->setGeometry(QRect(0, 0, 227, 162));
formLayout_4 = new QFormLayout(groupBox_4);
formLayout_4->setObjectName(QStringLiteral("formLayout_4"));
label_6 = new QLabel(groupBox_4);
label_6->setObjectName(QStringLiteral("label_6"));
formLayout_4->setWidget(0, QFormLayout::LabelRole, label_6);
spinVotes = new QSpinBox(groupBox_4);
spinVotes->setObjectName(QStringLiteral("spinVotes"));
spinVotes->setMaximum(1000);
spinVotes->setValue(100);
formLayout_4->setWidget(0, QFormLayout::FieldRole, spinVotes);
label_7 = new QLabel(groupBox_4);
label_7->setObjectName(QStringLiteral("label_7"));
formLayout_4->setWidget(1, QFormLayout::LabelRole, label_7);
spinMinRad = new QSpinBox(groupBox_4);
spinMinRad->setObjectName(QStringLiteral("spinMinRad"));
spinMinRad->setMaximum(1000);
spinMinRad->setValue(100);
formLayout_4->setWidget(1, QFormLayout::FieldRole, spinMinRad);
label_8 = new QLabel(groupBox_4);
label_8->setObjectName(QStringLiteral("label_8"));
formLayout_4->setWidget(2, QFormLayout::LabelRole, label_8);
spinMaxRad = new QSpinBox(groupBox_4);
spinMaxRad->setObjectName(QStringLiteral("spinMaxRad"));
spinMaxRad->setMaximum(1000);
spinMaxRad->setValue(300);
formLayout_4->setWidget(2, QFormLayout::FieldRole, spinMaxRad);
label_9 = new QLabel(groupBox_4);
label_9->setObjectName(QStringLiteral("label_9"));
formLayout_4->setWidget(3, QFormLayout::LabelRole, label_9);
spinCircDist = new QSpinBox(groupBox_4);
spinCircDist->setObjectName(QStringLiteral("spinCircDist"));
spinCircDist->setMaximum(1000);
spinCircDist->setValue(200);
formLayout_4->setWidget(3, QFormLayout::FieldRole, spinCircDist);
Circle->addTab(tab, QString());
tab_2 = new QWidget();
tab_2->setObjectName(QStringLiteral("tab_2"));
gridLayout_3 = new QGridLayout(tab_2);
gridLayout_3->setObjectName(QStringLiteral("gridLayout_3"));
label_10 = new QLabel(tab_2);
label_10->setObjectName(QStringLiteral("label_10"));
gridLayout_3->addWidget(label_10, 0, 0, 1, 1);
spinrho = new QSpinBox(tab_2);
spinrho->setObjectName(QStringLiteral("spinrho"));
spinrho->setValue(1);
gridLayout_3->addWidget(spinrho, 0, 1, 1, 1);
label_11 = new QLabel(tab_2);
label_11->setObjectName(QStringLiteral("label_11"));
gridLayout_3->addWidget(label_11, 1, 0, 1, 1);
spinthresh = new QSpinBox(tab_2);
spinthresh->setObjectName(QStringLiteral("spinthresh"));
spinthresh->setMaximum(255);
spinthresh->setValue(80);
gridLayout_3->addWidget(spinthresh, 1, 1, 1, 1);
label_12 = new QLabel(tab_2);
label_12->setObjectName(QStringLiteral("label_12"));
gridLayout_3->addWidget(label_12, 2, 0, 1, 1);
spinmin = new QSpinBox(tab_2);
spinmin->setObjectName(QStringLiteral("spinmin"));
spinmin->setValue(30);
gridLayout_3->addWidget(spinmin, 2, 1, 1, 1);
label_13 = new QLabel(tab_2);
label_13->setObjectName(QStringLiteral("label_13"));
gridLayout_3->addWidget(label_13, 3, 0, 1, 1);
spinmax = new QSpinBox(tab_2);
spinmax->setObjectName(QStringLiteral("spinmax"));
spinmax->setValue(10);
gridLayout_3->addWidget(spinmax, 3, 1, 1, 1);
Circle->addTab(tab_2, QString());
gridLayout->addWidget(Circle, 3, 0, 1, 1);
pushButton = new QPushButton(contoursUI);
pushButton->setObjectName(QStringLiteral("pushButton"));
gridLayout->addWidget(pushButton, 4, 0, 1, 1);
retranslateUi(contoursUI);
Circle->setCurrentIndex(1);
QMetaObject::connectSlotsByName(contoursUI);
} // setupUi
void retranslateUi(QDialog *contoursUI)
{
contoursUI->setWindowTitle(QApplication::translate("contoursUI", "Contour settings", 0));
groupBox_2->setTitle(QApplication::translate("contoursUI", "Sobel Parameters", 0));
label->setText(QApplication::translate("contoursUI", "Sobel Aperature", 0));
label_5->setText(QApplication::translate("contoursUI", "Threshold", 0));
groupBox_3->setTitle(QApplication::translate("contoursUI", "Laplacian Parameters", 0));
label_2->setText(QApplication::translate("contoursUI", "Laplacian Kernel", 0));
groupBox->setTitle(QApplication::translate("contoursUI", "Canny Parameters", 0));
label_3->setText(QApplication::translate("contoursUI", "Minimum Threshold", 0));
label_4->setText(QApplication::translate("contoursUI", "Maximum Threshold", 0));
groupBox_4->setTitle(QApplication::translate("contoursUI", "Hough Parameters", 0));
label_6->setText(QApplication::translate("contoursUI", "Min votes", 0));
label_7->setText(QApplication::translate("contoursUI", "Min Radius", 0));
label_8->setText(QApplication::translate("contoursUI", "Max Radius", 0));
label_9->setText(QApplication::translate("contoursUI", "Min Circle Distance", 0));
Circle->setTabText(Circle->indexOf(tab), QApplication::translate("contoursUI", "Circle", 0));
label_10->setText(QApplication::translate("contoursUI", "Rho", 0));
label_11->setText(QApplication::translate("contoursUI", "Threshold", 0));
label_12->setText(QApplication::translate("contoursUI", "Min line length", 0));
label_13->setText(QApplication::translate("contoursUI", "Max Line Gap", 0));
Circle->setTabText(Circle->indexOf(tab_2), QApplication::translate("contoursUI", "P-Lines", 0));
pushButton->setText(QApplication::translate("contoursUI", "Apply", 0));
} // retranslateUi
};
namespace Ui {
class contoursUI: public Ui_contoursUI {};
} // namespace Ui
QT_END_NAMESPACE
#endif // UI_CONTOURSUI_H
|
// reference https://github.com/noeldelgado/gemini-scrollbar/blob/master/index.js
import { addResizeListener, removeResizeListener } from 'gemini-ui/src/utils/resize-event';
import scrollbarWidth from 'gemini-ui/src/utils/scrollbar-width';
import { toObject } from 'gemini-ui/src/utils/util';
import Bar from './bar';
/* istanbul ignore next */
export default {
name: 'GeminiScrollbar',
components: { Bar },
props: {
native: Boolean,
wrapStyle: {},
wrapClass: {},
viewClass: {},
viewStyle: {},
noresize: Boolean, // 如果 container 尺寸不会发生变化,最好设置它可以优化性能
tag: {
type: String,
default: 'div'
}
},
data() {
return {
sizeWidth: '0',
sizeHeight: '0',
moveX: 0,
moveY: 0
};
},
computed: {
wrap() {
return this.$refs.wrap;
}
},
render(h) {
let gutter = scrollbarWidth();
let style = this.wrapStyle;
if (gutter) {
const gutterWith = `-${gutter}px`;
const gutterStyle = `margin-bottom: ${gutterWith}; margin-right: ${gutterWith};`;
if (Array.isArray(this.wrapStyle)) {
style = toObject(this.wrapStyle);
style.marginRight = style.marginBottom = gutterWith;
} else if (typeof this.wrapStyle === 'string') {
style += gutterStyle;
} else {
style = gutterStyle;
}
}
const view = h(this.tag, {
class: ['gemini-scrollbar__view', this.viewClass],
style: this.viewStyle,
ref: 'resize'
}, this.$slots.default);
const wrap = (
<div
ref="wrap"
style={ style }
onScroll={ this.handleScroll }
class={ [this.wrapClass, 'gemini-scrollbar__wrap', gutter ? '' : 'gemini-scrollbar__wrap--hidden-default'] }>
{ [view] }
</div>
);
let nodes;
if (!this.native) {
nodes = ([
wrap,
<Bar
move={ this.moveX }
size={ this.sizeWidth }></Bar>,
<Bar
vertical
move={ this.moveY }
size={ this.sizeHeight }></Bar>
]);
} else {
nodes = ([
<div
ref="wrap"
class={ [this.wrapClass, 'gemini-scrollbar__wrap'] }
style={ style }>
{ [view] }
</div>
]);
}
return h('div', { class: 'gemini-scrollbar' }, nodes);
},
methods: {
handleScroll() {
const wrap = this.wrap;
this.moveY = ((wrap.scrollTop * 100) / wrap.clientHeight);
this.moveX = ((wrap.scrollLeft * 100) / wrap.clientWidth);
},
update() {
let heightPercentage, widthPercentage;
const wrap = this.wrap;
if (!wrap) return;
heightPercentage = (wrap.clientHeight * 100 / wrap.scrollHeight);
widthPercentage = (wrap.clientWidth * 100 / wrap.scrollWidth);
this.sizeHeight = (heightPercentage < 100) ? (heightPercentage + '%') : '';
this.sizeWidth = (widthPercentage < 100) ? (widthPercentage + '%') : '';
}
},
mounted() {
if (this.native) return;
this.$nextTick(this.update);
!this.noresize && addResizeListener(this.$refs.resize, this.update);
},
beforeDestroy() {
if (this.native) return;
!this.noresize && removeResizeListener(this.$refs.resize, this.update);
}
};
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = """
module: na_ontap_net_subnet
short_description: NetApp ONTAP Create, delete, modify network subnets.
extends_documentation_fragment:
- netapp.ontap.netapp.na_ontap
version_added: 2.8.0
author: Storage Engineering (@Albinpopote) <ansible@black-perl.fr>
description:
- Create, modify, destroy the network subnet
options:
state:
description:
- Whether the specified network interface group should exist or not.
choices: ['present', 'absent']
default: present
type: str
broadcast_domain:
description:
- Specify the required broadcast_domain name for the subnet.
- A broadcast domain can not be modified after the subnet has been created
type: str
name:
description:
- Specify the subnet name.
required: true
type: str
from_name:
description:
- Name of the subnet to be renamed
type: str
gateway:
description:
- Specify the gateway for the default route of the subnet.
type: str
ipspace:
description:
- Specify the ipspace for the subnet.
- The default value for this parameter is the default IPspace, named 'Default'.
type: str
ip_ranges:
description:
- Specify the list of IP address ranges associated with the subnet.
type: list
elements: str
subnet:
description:
- Specify the subnet (ip and mask).
type: str
"""
EXAMPLES = """
- name: create subnet
na_ontap_net_subnet:
state: present
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
hostname: "{{ netapp_hostname }}"
subnet: 10.10.10.0/24
name: subnet-adm
ip_ranges: [ '10.10.10.30-10.10.10.40', '10.10.10.51' ]
gateway: 10.10.10.254
ipspace: Default
broadcast_domain: Default
- name: delete subnet
na_ontap_net_subnet:
state: absent
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
hostname: "{{ netapp_hostname }}"
name: subnet-adm
ipspace: Default
- name: rename subnet
na_ontap_net_subnet:
state: present
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
hostname: "{{ netapp_hostname }}"
name: subnet-adm-new
from_name: subnet-adm
ipspace: Default
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
from ansible_collections.netapp.ontap.plugins.module_utils.netapp_module import NetAppModule
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppOntapSubnet(object):
"""
Create, Modifies and Destroys a subnet
"""
def __init__(self):
"""
Initialize the ONTAP Subnet class
"""
self.argument_spec = netapp_utils.na_ontap_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=False, type='str', choices=['present', 'absent'], default='present'),
name=dict(required=True, type='str'),
from_name=dict(required=False, type='str'),
broadcast_domain=dict(required=False, type='str'),
gateway=dict(required=False, type='str'),
ip_ranges=dict(required=False, type='list', elements='str'),
ipspace=dict(required=False, type='str'),
subnet=dict(required=False, type='str')
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True
)
self.na_helper = NetAppModule()
self.parameters = self.na_helper.set_parameters(self.module.params)
if HAS_NETAPP_LIB is False:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_na_ontap_zapi(module=self.module)
return
def get_subnet(self, name=None):
"""
Return details about the subnet
:param:
name : Name of the subnet
:return: Details about the subnet. None if not found.
:rtype: dict
"""
if name is None:
name = self.parameters.get('name')
subnet_iter = netapp_utils.zapi.NaElement('net-subnet-get-iter')
subnet_info = netapp_utils.zapi.NaElement('net-subnet-info')
subnet_info.add_new_child('subnet-name', name)
query = netapp_utils.zapi.NaElement('query')
query.add_child_elem(subnet_info)
subnet_iter.add_child_elem(query)
result = self.server.invoke_successfully(subnet_iter, True)
return_value = None
# check if query returns the expected subnet
if result.get_child_by_name('num-records') and \
int(result.get_child_content('num-records')) == 1:
subnet_attributes = result.get_child_by_name('attributes-list').get_child_by_name('net-subnet-info')
broadcast_domain = subnet_attributes.get_child_content('broadcast-domain')
gateway = subnet_attributes.get_child_content('gateway')
ipspace = subnet_attributes.get_child_content('ipspace')
subnet = subnet_attributes.get_child_content('subnet')
name = subnet_attributes.get_child_content('subnet-name')
ip_ranges = []
if subnet_attributes.get_child_by_name('ip-ranges'):
range_obj = subnet_attributes.get_child_by_name('ip-ranges').get_children()
for elem in range_obj:
ip_ranges.append(elem.get_content())
return_value = {
'name': name,
'broadcast_domain': broadcast_domain,
'gateway': gateway,
'ip_ranges': ip_ranges,
'ipspace': ipspace,
'subnet': subnet
}
return return_value
def create_subnet(self):
"""
Creates a new subnet
"""
options = {'subnet-name': self.parameters.get('name'),
'broadcast-domain': self.parameters.get('broadcast_domain'),
'subnet': self.parameters.get('subnet')}
subnet_create = netapp_utils.zapi.NaElement.create_node_with_children(
'net-subnet-create', **options)
if self.parameters.get('gateway'):
subnet_create.add_new_child('gateway', self.parameters.get('gateway'))
if self.parameters.get('ip_ranges'):
subnet_ips = netapp_utils.zapi.NaElement('ip-ranges')
subnet_create.add_child_elem(subnet_ips)
for ip_range in self.parameters.get('ip_ranges'):
subnet_ips.add_new_child('ip-range', ip_range)
if self.parameters.get('ipspace'):
subnet_create.add_new_child('ipspace', self.parameters.get('ipspace'))
try:
self.server.invoke_successfully(subnet_create, True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error creating subnet %s: %s' % (self.parameters.get('name'), to_native(error)),
exception=traceback.format_exc())
def delete_subnet(self):
"""
Deletes a subnet
"""
subnet_delete = netapp_utils.zapi.NaElement.create_node_with_children(
'net-subnet-destroy', **{'subnet-name': self.parameters.get('name')})
try:
self.server.invoke_successfully(subnet_delete, True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error deleting subnet %s: %s' % (self.parameters.get('name'), to_native(error)),
exception=traceback.format_exc())
def modify_subnet(self):
"""
Modifies a subnet
"""
options = {'subnet-name': self.parameters.get('name')}
subnet_modify = netapp_utils.zapi.NaElement.create_node_with_children(
'net-subnet-modify', **options)
if self.parameters.get('gateway'):
subnet_modify.add_new_child('gateway', self.parameters.get('gateway'))
if self.parameters.get('ip_ranges'):
subnet_ips = netapp_utils.zapi.NaElement('ip-ranges')
subnet_modify.add_child_elem(subnet_ips)
for ip_range in self.parameters.get('ip_ranges'):
subnet_ips.add_new_child('ip-range', ip_range)
if self.parameters.get('ipspace'):
subnet_modify.add_new_child('ipspace', self.parameters.get('ipspace'))
if self.parameters.get('subnet'):
subnet_modify.add_new_child('subnet', self.parameters.get('subnet'))
try:
self.server.invoke_successfully(subnet_modify, True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error modifying subnet %s: %s' % (self.parameters.get('name'), to_native(error)),
exception=traceback.format_exc())
def rename_subnet(self):
"""
TODO
"""
options = {'subnet-name': self.parameters.get('from_name'),
'new-name': self.parameters.get('name')}
subnet_rename = netapp_utils.zapi.NaElement.create_node_with_children(
'net-subnet-rename', **options)
if self.parameters.get('ipspace'):
subnet_rename.add_new_child('ipspace', self.parameters.get('ipspace'))
try:
self.server.invoke_successfully(subnet_rename, True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error renaming subnet %s: %s' % (self.parameters.get('name'), to_native(error)),
exception=traceback.format_exc())
def apply(self):
'''Apply action to subnet'''
results = netapp_utils.get_cserver(self.server)
cserver = netapp_utils.setup_na_ontap_zapi(module=self.module, vserver=results)
netapp_utils.ems_log_event("na_ontap_net_subnet", cserver)
current = self.get_subnet()
cd_action, rename = None, None
if self.parameters.get('from_name'):
rename = self.na_helper.is_rename_action(self.get_subnet(self.parameters.get('from_name')), current)
if rename is None:
self.module.fail_json(msg="Error renaming: subnet %s does not exist" %
self.parameters.get('from_name'))
else:
cd_action = self.na_helper.get_cd_action(current, self.parameters)
modify = self.na_helper.get_modified_attributes(current, self.parameters)
for attribute in modify:
if attribute in ['broadcast_domain']:
self.module.fail_json(msg='Error modifying subnet %s: cannot modify broadcast_domain parameter.' % self.parameters.get('name'))
if self.na_helper.changed:
if self.module.check_mode:
pass
else:
if rename:
self.rename_subnet()
# If rename is True, cd_action is NOne but modify could be true
if cd_action == 'create':
for attribute in ['subnet', 'broadcast_domain']:
if not self.parameters.get(attribute):
self.module.fail_json(msg='Error - missing required arguments: %s.' % attribute)
self.create_subnet()
elif cd_action == 'delete':
self.delete_subnet()
elif modify:
self.modify_subnet()
self.module.exit_json(changed=self.na_helper.changed)
def main():
"""
Creates the NetApp ONTAP Net Route object and runs the correct play task
"""
subnet_obj = NetAppOntapSubnet()
subnet_obj.apply()
if __name__ == '__main__':
main()
|
# coding: utf-8
"""
HubSpot Events API
API for accessing CRM object events. # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from hubspot.events.configuration import Configuration
class Paging(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'next': 'NextPage'
}
attribute_map = {
'next': 'next'
}
def __init__(self, next=None, local_vars_configuration=None): # noqa: E501
"""Paging - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._next = None
self.discriminator = None
if next is not None:
self.next = next
@property
def next(self):
"""Gets the next of this Paging. # noqa: E501
:return: The next of this Paging. # noqa: E501
:rtype: NextPage
"""
return self._next
@next.setter
def next(self, next):
"""Sets the next of this Paging.
:param next: The next of this Paging. # noqa: E501
:type: NextPage
"""
self._next = next
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Paging):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Paging):
return True
return self.to_dict() != other.to_dict()
|
/*********************************************************************************************************************
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. *
* *
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance *
* with the License. A copy of the License is located at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES *
* OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions *
* and limitations under the License. *
*********************************************************************************************************************/
const AWS = require('aws-sdk');
/**
* Description: creates a medialive device input (Elemental Link)
* @param {object} config the configuration settings for input:
* @param {string} config.StreamName the name of the input
* @param {string} config.Type should be INPUT_DEVICE
* @param {string} config.InputDeviceId should be the device id "hd-11111111111"
*/
const createDeviceInput = async (config) => {
console.log('Creating Link Input.....');
const medialive = new AWS.MediaLive({
region: process.env.AWS_REGION
});
let responseData
try {
let params = {
Name: config.StreamName,
Type: config.Type,
InputDevices: [
{
Id:config.InputDeviceId
}
]
};
data = await medialive.createInput(params).promise();
responseData = {
Id: data.Input.Id,
EndPoint: 'Push InputType only'
};
} catch (err) {
throw (err);
}
return responseData;
};
/**
* Description: creates a medialive RTP push input and associated security group
* @param {object} config the configuration settings for input:
* @param {string} config.StreamName the name of the input
* @param {string} config.Type should be RTP_PUSH
* @param {string} config.Cidr a valid cidr block to restrict access to the input (0.0.0.0/0)
*/
const createRtpInput = async (config) => {
console.log('Creating RTP Input.....');
const medialive = new AWS.MediaLive({
region: process.env.AWS_REGION
});
let responseData,
params,
data;
try {
params = {
WhitelistRules: [{
Cidr: config.Cidr
}]
};
data = await medialive.createInputSecurityGroup(params).promise();
params = {
InputSecurityGroups: [data.SecurityGroup.Id],
Name: config.StreamName,
Type: config.Type
};
data = await medialive.createInput(params).promise();
responseData = {
Id: data.Input.Id,
EndPoint: data.Input.Destinations[0].Url
};
} catch (err) {
throw (err);
}
return responseData;
};
/**
* Description: creates a medialive RTP push input and associated security group
* @param {object} config the configuration settings for input:
* @param {string} config.StreamName the name of the input
* @param {string} config.Type should be RTP_PUSH
* @param {string} config.Cidr a valid cidr block to restrict access to the input (0.0.0.0/0)
*/
const createRtmpInput = async (config) => {
console.log('Creating RTMP Input.....');
const medialive = new AWS.MediaLive({
region: process.env.AWS_REGION
});
let responseData,
params,
data;
try {
params = {
WhitelistRules: [{
Cidr: config.Cidr
}]
};
data = await medialive.createInputSecurityGroup(params).promise();
params = {
InputSecurityGroups: [data.SecurityGroup.Id],
Name: config.StreamName,
Type: config.Type,
Destinations: [{
StreamName: `${config.StreamName}/${config.StreamInputKey}`
}
]
};
data = await medialive.createInput(params).promise();
responseData = {
Id: data.Input.Id,
EndPoint: data.Input.Destinations[0].Url,
};
} catch (err) {
throw (err);
}
return responseData;
};
/**
* Description: creates a medialive RTP push input and associated security group
* @param {object} config the configuration settings for input:
* @param {string} config.StreamName the name of the input
* @param {string} config.Type should be RTP_PUSH
* @param {string} config.PullUser the username if autnetication is required to access the source URL
* @param {string} config.PullPass the password if autnetication is required to access the source URL
*/
const createUrlInput = async (config) => {
console.log('Creating URL_PULL Input.....');
const medialive = new AWS.MediaLive({
region: process.env.AWS_REGION
});
const ssm = new AWS.SSM({
region: process.env.AWS_REGION
});
let responseData,
params,
data;
try {
params = {
Name: config.StreamName,
Type: config.Type,
Sources: [{
Url: config.PullUrl
}
]
};
if (config.PullUser && config.PullUser !== '') {
params.Sources[0].Username = config.PullUser;
params.Sources[0].PasswordParam = config.PullUser;
let ssm_params = {
Name: config.PullUser,
Description: 'Live Stream solution input credentials',
Type: 'String',
Value: config.PullPass,
Overwrite: true
};
await ssm.putParameter(ssm_params).promise();
}
data = await medialive.createInput(params).promise();
responseData = {
Id: data.Input.Id,
EndPoint: 'Push InputType only'
};
} catch (err) {
throw (err);
}
return responseData;
};
/**
* Description: delete a medialive input and associated security group
* @param {string} InputId the InputId which in CloudFormation is the physical resource ID.
*/
const deleteInput = async (InputId) => {
console.log('Deleting Input.....');
const sleep = ms => new Promise(resolve => setTimeout(resolve, ms));
const medialive = new AWS.MediaLive({
region: process.env.AWS_REGION
});
let params,
data;
try {
params = {
InputId: InputId
};
data = await medialive.describeInput(params).promise();
await medialive.deleteInput(params).promise();
if (data.SecurityGroups && data.SecurityGroups.length !== 0 ) {
params = {
InputSecurityGroupId: data.SecurityGroups[0]
};
/**
* When the input is deleted the SG is detached however it can take a few seconds for the SG state
* to change from IN_USE to IDLE
*/
let state = '';
let retry = 5;
while (state !== 'IDLE') {
await sleep(6000);
data = await medialive.describeInputSecurityGroup(params).promise();
state = data.State;
retry = retry -1;
if (retry === 0 && state !== 'IDLE') {
throw new Error(`Failed to delete Security Group, state: ${state} is not IDLE`);
}
}
await medialive.deleteInputSecurityGroup(params).promise();
}
} catch (err) {
throw err;
}
return 'success';
};
/**
* Description: creates a medialive channel and then call the waitFor function to confirm the channel creation is successful.
* @param {object} config the configuration settings for input:
* @param {string} config.EncodingProfile should one of HD-1080p, HD-720p, SD-540p.
* @param {string} config.Codec encoding codec option, default is AVC.
* @param {string} config.Role the MediaLive IAM Role associated with the channel.
* @param {string} config.InputId the ID of the medialive input to attach to the channel.
* @param {string} config.MediaStoreEndpoint the mediastore endpoint to use as the output destination
* @param {string} config.SoltionId used to tag the medialive channel
*/
const createChannel = async (config) => {
const medialive = new AWS.MediaLive({
region: process.env.AWS_REGION
});
const encode1080p = require('./encoding-profiles/hd-1080p');
const encode720p = require('./encoding-profiles/hd-720p');
const encode540p = require('./encoding-profiles/sd-540p');
let responseData,
params,
data;
try {
params = {
ChannelClass:'SINGLE_PIPELINE',
Destinations: [{
Id: "destination1",
Settings: [{
Url: config.MediaStoreEndpoint.replace('https','mediastoressl')+'/stream/index'
}
]
}],
InputSpecification: {
Codec: config.Codec,
Resolution: '',
MaximumBitrate: ''
},
Name: config.Name,
RoleArn: config.Role,
InputAttachments: [{
InputId: config.InputId,
InputSettings: {}
}],
EncoderSettings: {},
Tags: {
Solution:'SO0013'
}
};
if (config.Type === 'URL_PULL') {
params.InputAttachments[0].InputSettings = {
SourceEndBehavior: 'LOOP'
};
}
switch (config.EncodingProfile) {
case 'HD-1080p':
params.InputSpecification.Resolution = 'HD';
params.InputSpecification.MaximumBitrate = 'MAX_20_MBPS';
params.EncoderSettings = encode1080p;
break;
case 'HD-720p':
params.InputSpecification.Resolution = 'HD';
params.InputSpecification.MaximumBitrate = 'MAX_10_MBPS';
params.EncoderSettings = encode720p;
break;
case 'SD-540p':
params.InputSpecification.Resolution = 'SD';
params.InputSpecification.MaximumBitrate = 'MAX_10_MBPS';
params.EncoderSettings = encode540p;
break;
default:
throw new Error('EncodingProfile is not defined');
}
console.log(`Creating Channel with a ${config.EncodingProfile} profile`);
data = await medialive.createChannel(params).promise();
params = {
ChannelId: data.Channel.Id
}
await medialive.waitFor('channelCreated',params).promise();
responseData = {
ChannelId: data.Channel.Id
};
} catch (err) {
throw err;
}
return responseData;
};
/**
* Description: start the medialive channel
* @param {object} config the configuration settings for input:
* @param {string} ChannelId the medialive channel id
*/
const startChannel = async (config) => {
console.log('Starting Channel.....');
const medialive = new AWS.MediaLive({
region: process.env.AWS_REGION
});
try {
let params = {
ChannelId: config.ChannelId
};
await medialive.startChannel(params).promise();
} catch (err) {
throw err;
}
return 'success';
};
/**
* Description: delete a medialive channel.
* @param {string} ChannelId the ChannelId which in CloudFormation is the physical resource ID.
*/
const deleteChannel = async (ChannelId) => {
console.log('Deleting Channel.....');
const medialive = new AWS.MediaLive({
region: process.env.AWS_REGION
});
let params,
data;
try {
params = {
ChannelId: ChannelId
};
await medialive.stopChannel(params).promise();
await medialive.waitFor('channelStopped',params).promise();
await medialive.deleteChannel(params).promise();
await medialive.waitFor('channelDeleted',params).promise();
} catch (err) {
throw err;
}
return 'success';
};
module.exports = {
createDeviceInput: createDeviceInput,
createRtpInput: createRtpInput,
createRtmpInput: createRtmpInput,
createUrlInput: createUrlInput,
deleteInput: deleteInput,
createChannel: createChannel,
startChannel: startChannel,
deleteChannel: deleteChannel
};
|
import logging
logger = logging.getLogger(__name__)
def error():
raise RuntimeError()
def log(msg):
print(msg)
def silenced1():
try:
error()
except Exception:
pass
def silenced2():
try:
error()
except Exception as exc:
log(exc)
for i in range(200):
log(i)
return 'x'
def silenced3():
try:
error()
finally:
return "mwhahaha"
def silenced4():
try:
error()
except Exception as exc:
logger.info(repr(exc))
def notsilenced():
try:
error()
except Exception as exc:
raise ValueError(exc)
|
# *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
# ** Copyright UCAR (c) 1992 - 2014
# ** University Corporation for Atmospheric Research(UCAR)
# ** National Center for Atmospheric Research(NCAR)
# ** P.O.Box 3000, Boulder, Colorado, 80307-3000, USA
# ** See LICENSE.TXT for license details
# *=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*=*
## no longer used
import matplotlib.patches as patches
from matplotlib.pyplot import figure, show, rc, grid
import matplotlib.pyplot as plt
class Polygon:
def __init__(self, verts, color):
self.verts = verts
self.color = color
def draw(self,ax):
poly = patches.Polygon(self.verts,facecolor=self.color,closed=True,fill=True,edgecolor=self.color)
ax.add_patch(poly)
|
"""
For main documentation consult cog/bot.py
"""
import sys
__version__ = '0.3.1'
try:
assert sys.version_info[0:2] >= (3, 7)
except AssertionError:
print('This entire program must be run with python >= 3.7')
print('If unavailable on platform, see https://github.com/pyenv/pyenv')
sys.exit(1)
|
from sympy import Eq, S, sqrt
from sympy.abc import x, y, z, s, t
from sympy.sets import FiniteSet, EmptySet
from sympy.geometry import Point
from sympy.vector import ImplicitRegion
from sympy.testing.pytest import raises
def test_ImplicitRegion():
ellipse = ImplicitRegion((x, y), (x**2/4 + y**2/16 - 1))
assert ellipse.equation == x**2/4 + y**2/16 - 1
assert ellipse.variables == (x, y)
assert ellipse.degree == 2
r = ImplicitRegion((x, y, z), Eq(x**4 + y**2 - x*y, 6))
assert r.equation == x**4 + y**2 - x*y - 6
assert r.variables == (x, y, z)
assert r.degree == 4
def test_regular_point():
r1 = ImplicitRegion((x,), x**2 - 16)
r1.regular_point() == (-4,)
c1 = ImplicitRegion((x, y), x**2 + y**2 - 4)
c1.regular_point() == (2, 0)
c2 = ImplicitRegion((x, y), (x - S(5)/2)**2 + y**2 - (S(1)/4)**2)
c2.regular_point() == (11/4, 0)
c3 = ImplicitRegion((x, y), (y - 5)**2 - 16*(x - 5))
c3.regular_point() == (5, 5)
r2 = ImplicitRegion((x, y), x**2 - 4*x*y - 3*y**2 + 4*x + 8*y - 5)
r2.regular_point == (4/7, 13/21)
r3 = ImplicitRegion((x, y), x**2 - 2*x*y + 3*y**2 - 2*x - 5*y + 3/2)
raises(ValueError, lambda: r3.regular_point())
def test_singular_points_and_multiplicty():
r1 = ImplicitRegion((x, y, z), Eq(x + y + z, 0))
assert r1.singular_points() == FiniteSet((-y - z, y, z))
assert r1.multiplicity((0, 0, 0)) == 1
assert r1.multiplicity((-y - z, y, z)) == 1
r2 = ImplicitRegion((x, y, z), x*y*z + y**4 -x**2*z**2)
assert r2.singular_points() == FiniteSet((0, 0, z), ((-y*sqrt(4*y**2 + 1)/2 + y/2)/z, y, z),\
((y*sqrt(4*y**2 + 1)/2 + y/2)/z, y, z))
assert r2.multiplicity((0, 0, 0)) == 3
assert r2.multiplicity((0, 0, 6)) == 2
r3 = ImplicitRegion((x, y, z), z**2 - x**2 - y**2)
assert r3.singular_points() == FiniteSet((0, 0, 0))
assert r3.multiplicity((0, 0, 0)) == 2
r4 = ImplicitRegion((x, y), x**2 + y**2 - 2*x)
assert r4.singular_points() == EmptySet
assert r4.multiplicity(Point(1, 3)) == 0
def test_rational_parametrization():
p = ImplicitRegion((x,), x - 2)
assert p.rational_parametrization() == (x - 2,)
line = ImplicitRegion((x, y), Eq(y, 3*x + 2))
assert line.rational_parametrization() == (x, 3*x + 2)
circle1 = ImplicitRegion((x, y), (x-2)**2 + (y+3)**2 - 4)
assert circle1.rational_parametrization(parameters=t) == (4*t/(t**2 + 1) + 2, 4*t**2/(t**2 + 1) - 5)
circle2 = ImplicitRegion((x, y), (x - S.Half)**2 + y**2 - (S(1)/2)**2)
print(circle2.regular_point())
print(circle2.rational_parametrization())
assert circle2.rational_parametrization(parameters=t) == (t/(t**2 + 1) + S(1)/2, t**2/(t**2 + 1) - S(1)/2)
circle3 = ImplicitRegion((x, y), Eq(x**2 + y**2, 2*x))
assert circle3.rational_parametrization(parameters=(t,)) == (2*t/(t**2 + 1) + 1, 2*t**2/(t**2 + 1) - 1)
parabola = ImplicitRegion((x, y), (y - 3)**2 - 4*(x + 6))
assert parabola.rational_parametrization(t) == (-6 + 4/t**2, 3 + 4/t)
rect_hyperbola = ImplicitRegion((x, y), x*y - 1)
assert rect_hyperbola.rational_parametrization(t) == (-1 + (t + 1)/t, t)
cubic_curve = ImplicitRegion((x, y), x**3 + x**2 - y**2)
assert cubic_curve.rational_parametrization(parameters=(t)) == (t**2 - 1, t*(t**2 - 1))
cuspidal = ImplicitRegion((x, y), (x**3 - y**2))
assert cuspidal.rational_parametrization(t) == (t**2, t**3)
I = ImplicitRegion((x, y), x**3 + x**2 - y**2)
assert I.rational_parametrization(t) == (t**2 - 1, t*(t**2 - 1))
sphere = ImplicitRegion((x, y, z), Eq(x**2 + y**2 + z**2, 2*x))
print(sphere.rational_parametrization(parameters=(s, t)))
assert sphere.rational_parametrization(parameters=(s, t)) == (2/(s**2 + t**2 + 1), 2*t/(s**2 + t**2 + 1), 2*s/(s**2 + t**2 + 1))
conic = ImplicitRegion((x, y), Eq(x**2 + 4*x*y + 3*y**2 + x - y + 10, 0))
conic.rational_parametrization(t) == (17/2 + 4/(3*t**2 + 4*t + 1), 4*t/(3*t**2 + 4*t + 1) - 11/2)
r1 = ImplicitRegion((x, y), y**2 - x**3 + x)
raises(NotImplementedError, lambda: r1.rational_parametrization())
r2 = ImplicitRegion((x, y), y**2 - x**3 - x**2 + 1)
raises(NotImplementedError, lambda: r2.rational_parametrization())
|
window.__NUXT__=(function(a,b,c,d,e){return {staticAssetsBase:"https:\u002F\u002Fwww.baca-quran.id\u002Fstatic\u002F1627814429",layout:"default",error:b,state:{notification:{show:a,title:c,message:c},isShowSidebar:a,isSupportWebShare:a,headerTitle:"Baca Qur'an",page:"home",lastReadVerse:b,settingActiveTheme:{name:"dark",bgColor:"#071e3d",fgColor:"#fff"},settingShowTranslation:a,settingShowTafsir:a,settingShowMuqaddimah:d,surahFavorite:[]},serverRendered:d,routePath:"\u002F37\u002F54",config:{_app:{basePath:e,assetsPath:e,cdnURL:"https:\u002F\u002Fwww.baca-quran.id\u002F"}}}}(false,null,"",true,"\u002F"));
|
(function() {
var script = document.currentScript;
var previousOnload = window.onload || function(){};
window.onload = function() {
previousOnload();
var container = script.parentElement.parentElement;
var date = container.querySelector('x-date').when();
if (date < new Date()) {
container.className = 'hidden';
document.getElementById('no-upcoming-meetups').className = '';
}
};
})();
|
# Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron_lib import constants as lib_constants
ROUTER_PORT_OWNERS = lib_constants.ROUTER_INTERFACE_OWNERS_SNAT + \
(lib_constants.DEVICE_OWNER_ROUTER_GW,)
ROUTER_STATUS_ACTIVE = 'ACTIVE'
ROUTER_STATUS_ALLOCATING = 'ALLOCATING'
ROUTER_STATUS_ERROR = 'ERROR'
VALID_ROUTER_STATUS = (ROUTER_STATUS_ACTIVE,
ROUTER_STATUS_ALLOCATING,
ROUTER_STATUS_ERROR)
HA_ROUTER_STATE_KEY = '_ha_state'
METERING_LABEL_KEY = '_metering_labels'
FLOATINGIP_AGENT_INTF_KEY = '_floatingip_agent_interfaces'
SNAT_ROUTER_INTF_KEY = '_snat_router_interfaces'
HA_NETWORK_NAME = 'HA network tenant %s'
HA_SUBNET_NAME = 'HA subnet tenant %s'
HA_PORT_NAME = 'HA port tenant %s'
HA_ROUTER_STATE_ACTIVE = 'active'
HA_ROUTER_STATE_STANDBY = 'standby'
VALID_HA_STATES = (HA_ROUTER_STATE_ACTIVE, HA_ROUTER_STATE_STANDBY)
PAGINATION_INFINITE = 'infinite'
SORT_DIRECTION_ASC = 'asc'
SORT_DIRECTION_DESC = 'desc'
ETHERTYPE_NAME_ARP = 'arp'
ETHERTYPE_ARP = 0x0806
ETHERTYPE_IP = 0x0800
ETHERTYPE_IPV6 = 0x86DD
IP_PROTOCOL_NAME_ALIASES = {lib_constants.PROTO_NAME_IPV6_ICMP_LEGACY:
lib_constants.PROTO_NAME_IPV6_ICMP}
IP_PROTOCOL_NUM_TO_NAME_MAP = {
str(v): k for k, v in lib_constants.IP_PROTOCOL_MAP.items()}
# When using iptables-save we specify '-p {proto}',
# but sometimes those values are not identical. This is a map
# of known protocol numbers that require a name to be used and
# protocol names that require a different name to be used,
# because that is how iptables-save will display them.
#
# This is how the list was created, so there is a possibility
# it will need to be updated in the future:
#
# $ for num in {0..255}; do iptables -A INPUT -p $num; done
# $ iptables-save
#
# These cases are special, and were found by inspection:
# - 'ipv6-encap' uses 'ipv6'
# - 'icmpv6' uses 'ipv6-icmp'
# - 'pgm' uses '113' instead of its name
# - protocol '0' uses no -p argument
IPTABLES_PROTOCOL_NAME_MAP = {lib_constants.PROTO_NAME_IPV6_ENCAP: 'ipv6',
lib_constants.PROTO_NAME_IPV6_ICMP_LEGACY:
'ipv6-icmp',
lib_constants.PROTO_NAME_PGM: '113',
'0': None,
'1': 'icmp',
'2': 'igmp',
'3': 'ggp',
'4': 'ipencap',
'5': 'st',
'6': 'tcp',
'8': 'egp',
'9': 'igp',
'12': 'pup',
'17': 'udp',
'20': 'hmp',
'22': 'xns-idp',
'27': 'rdp',
'29': 'iso-tp4',
'33': 'dccp',
'36': 'xtp',
'37': 'ddp',
'38': 'idpr-cmtp',
'41': 'ipv6',
'43': 'ipv6-route',
'44': 'ipv6-frag',
'45': 'idrp',
'46': 'rsvp',
'47': 'gre',
'50': 'esp',
'51': 'ah',
'57': 'skip',
'58': 'ipv6-icmp',
'59': 'ipv6-nonxt',
'60': 'ipv6-opts',
'73': 'rspf',
'81': 'vmtp',
'88': 'eigrp',
'89': 'ospf',
'93': 'ax.25',
'94': 'ipip',
'97': 'etherip',
'98': 'encap',
'103': 'pim',
'108': 'ipcomp',
'112': 'vrrp',
'115': 'l2tp',
'124': 'isis',
'132': 'sctp',
'133': 'fc',
'135': 'mobility-header',
'136': 'udplite',
'137': 'mpls-in-ip',
'138': 'manet',
'139': 'hip',
'140': 'shim6',
'141': 'wesp',
'142': 'rohc'}
# Timeout in seconds for getting an IPv6 LLA
LLA_TASK_TIMEOUT = 40
# length of all device prefixes (e.g. qvo, tap, qvb)
LINUX_DEV_PREFIX_LEN = 3
# must be shorter than linux IFNAMSIZ (which is 16)
LINUX_DEV_LEN = 14
# Possible prefixes to partial port IDs in interface names used by the OVS,
# Linux Bridge, and IVS VIF drivers in Nova and the neutron agents. See the
# 'get_ovs_interfaceid' method in Nova (nova/virt/libvirt/vif.py) for details.
INTERFACE_PREFIXES = (lib_constants.TAP_DEVICE_PREFIX,
lib_constants.VETH_DEVICE_PREFIX,
lib_constants.SNAT_INT_DEV_PREFIX)
ATTRIBUTES_TO_UPDATE = 'attributes_to_update'
# TODO(amuller): Re-define the RPC namespaces once Oslo messaging supports
# Targets with multiple namespaces. Neutron will then implement callbacks
# for its RPC clients in order to support rolling upgrades.
# RPC Interface for agents to call DHCP API implemented on the plugin side
RPC_NAMESPACE_DHCP_PLUGIN = None
# RPC interface for the metadata service to get info from the plugin side
RPC_NAMESPACE_METADATA = None
# RPC interface for agent to plugin security group API
RPC_NAMESPACE_SECGROUP = None
# RPC interface for agent to plugin DVR api
RPC_NAMESPACE_DVR = None
# RPC interface for reporting state back to the plugin
RPC_NAMESPACE_STATE = None
# RPC interface for agent to plugin resources API
RPC_NAMESPACE_RESOURCES = None
# Default network MTU value when not configured
DEFAULT_NETWORK_MTU = 1500
IPV6_MIN_MTU = 1280
ROUTER_MARK_MASK = "0xffff"
VALID_ETHERTYPES = (lib_constants.IPv4, lib_constants.IPv6)
IP_ALLOWED_VERSIONS = [lib_constants.IP_VERSION_4, lib_constants.IP_VERSION_6]
PORT_RANGE_MIN = 1
PORT_RANGE_MAX = 65535
# Configuration values for accept_ra sysctl, copied from linux kernel
# networking (netdev) tree, file Documentation/networking/ip-sysctl.txt
#
# Possible values are:
# 0 Do not accept Router Advertisements.
# 1 Accept Router Advertisements if forwarding is disabled.
# 2 Overrule forwarding behaviour. Accept Router Advertisements
# even if forwarding is enabled.
ACCEPT_RA_DISABLED = 0
ACCEPT_RA_WITHOUT_FORWARDING = 1
ACCEPT_RA_WITH_FORWARDING = 2
# Some components communicate using private address ranges, define
# them all here. These address ranges should not cause any issues
# even if they overlap since they are used in disjoint namespaces,
# but for now they are unique.
# We define the metadata cidr since it falls in the range.
PRIVATE_CIDR_RANGE = '169.254.0.0/16'
DVR_FIP_LL_CIDR = '169.254.64.0/18'
L3_HA_NET_CIDR = '169.254.192.0/18'
METADATA_CIDR = '169.254.169.254/32'
# The only defined IpamAllocation status at this stage is 'ALLOCATED'.
# More states will be available in the future - e.g.: RECYCLABLE
IPAM_ALLOCATION_STATUS_ALLOCATED = 'ALLOCATED'
VALID_IPAM_ALLOCATION_STATUSES = (IPAM_ALLOCATION_STATUS_ALLOCATED,)
# Port binding states for Live Migration
PORT_BINDING_STATUSES = (lib_constants.ACTIVE,
lib_constants.INACTIVE)
VALID_FLOATINGIP_STATUS = (lib_constants.FLOATINGIP_STATUS_ACTIVE,
lib_constants.FLOATINGIP_STATUS_DOWN,
lib_constants.FLOATINGIP_STATUS_ERROR)
# Floating IP host binding states
FLOATING_IP_HOST_UNBOUND = "FLOATING_IP_HOST_UNBOUND"
FLOATING_IP_HOST_NEEDS_BINDING = "FLOATING_IP_HOST_NEEDS_BINDING"
# Possible types of values (e.g. in QoS rule types)
VALUES_TYPE_CHOICES = "choices"
VALUES_TYPE_RANGE = "range"
# Units base
SI_BASE = 1000
IEC_BASE = 1024
|
import itertools
import json
import uuid
from datetime import timedelta
from functools import reduce
import numpy
from django.core.exceptions import SuspiciousOperation, ValidationError
from django.db import models
from django.db.models import Q
from rest_framework import serializers
from perftracker.helpers import PTDurationField, PTRoundedFloatField, PTRoundedFloatMKField, PTJson, pt_is_valid_uuid
from perftracker.models.job import JobModel
from perftracker.models.test_group import TestGroupModel, TEST_GROUP_TAG_LENGTH
TEST_STATUSES = ['NOTTESTED', 'SKIPPED', 'INPROGRESS', 'SUCCESS', 'FAILED']
class TestModel(models.Model):
seq_num = models.IntegerField(help_text="Test sequence number in the job", db_index=True)
uuid = models.UUIDField(default=uuid.uuid1, editable=False, help_text="test run uuid", db_index=True)
tag = models.CharField(max_length=512, help_text="Test tag used for resuts comparisons: Disk sequential read", db_index=True)
binary = models.CharField(max_length=128, help_text="Test binary: hdd_seq_read.exe")
cmdline = models.CharField(max_length=1024, help_text="Test cmdline: -f /root/file/ -O 100M -s 1")
group = models.CharField(max_length=TEST_GROUP_TAG_LENGTH, help_text="Test group tag")
description = models.CharField(max_length=1024, help_text="Test description: disk sequential read test by 1M blocks")
scores = models.CharField(max_length=16384, help_text="Raw test scores: [12.21, 14.23, 12.94]")
scores_rejected = models.IntegerField(default=0, help_text="Number of scores rejected")
deviations = models.CharField(max_length=1024, help_text="Test deviations: [0.02, 0.03, 0.01]")
deviations_rejected = models.IntegerField(default=0, help_text="Number of deviations rejected")
samples = models.IntegerField(help_text="Number of test samples (iterations)", default=1)
category = models.CharField(max_length=128, help_text="Test category: 1-thread")
metrics = models.CharField(max_length=64, help_text="Test result metrics: MB/s")
links = models.CharField(max_length=1024, help_text="Test links json: {'test logs': 'http://logs.localdomain/231241.log'}")
attribs = models.CharField(max_length=1024, help_text="Custom test attributes json: {'version': '12.3'}")
less_better = models.BooleanField(help_text="Set to True if 'less' score is better")
errors = models.IntegerField(help_text="Number of test errors")
warnings = models.IntegerField(help_text="Number of test warnings")
begin = models.DateTimeField(help_text="Test begin time", null=True)
end = models.DateTimeField(help_text="Test end time", null=True)
loops = models.IntegerField(help_text="Test loops", null=True)
duration = models.DurationField(help_text="total execution time (sec)")
status = models.CharField(max_length=16, help_text="Test status: %s" % str(TEST_STATUSES))
job = models.ForeignKey(JobModel, help_text="Job instance", related_name="tests", on_delete=models.CASCADE)
avg_score = models.FloatField("Test average score: 13.02", null=True)
min_score = models.FloatField("Test min score: 12.21", null=True)
max_score = models.FloatField("Test max score: 14.23", null=True)
avg_dev = models.FloatField("Test average deviation: 0.02", null=True)
min_dev = models.FloatField("Test min deviation: 0.01", null=True)
max_dev = models.FloatField("Test max deviation: 0.03", null=True)
avg_plusmin = models.FloatField("Deviation in % of average score: 0.02", null=True)
min_plusmin = models.FloatField("Deviation in % of min score: 0.01", null=True)
max_plusmin = models.FloatField("Deviation in % of max score: 0.03", null=True)
@staticmethod
def pt_get_uuid(json_data):
if 'uuid' in json_data:
u = json_data['uuid']
if not pt_is_valid_uuid(u):
raise ValidationError("Invalid test uuid: '%s'" % u)
return u.lower()
return uuid.uuid1()
def pt_update(self, json_data):
j = PTJson(json_data, obj_name="test json", exception_type=SuspiciousOperation)
if 'seq_num' in json_data:
self.seq_num = json_data['seq_num']
if not self.seq_num:
self.seq_num = 0
self.tag = j.get_str('tag', require=True)
j.obj_name = self.tag
self.binary = j.get_str('binary')
self.cmdline = j.get_str('cmdline')
self.description = j.get_str('description')
score = j.get_float('score', defval=None)
if score is None:
scores = j.get_list('scores', require=True)
else:
scores = [score]
deviations = j.get_list('deviations')
self.scores = str(scores)
self.deviations = str(deviations) if deviations else str([0] * len(scores))
self.category = j.get_str('category')
self.metrics = j.get_str('metrics', 'loops/sec')
self.links = json.dumps(j.get_dict('links'))
self.attribs = json.dumps(j.get_dict('attribs'))
self.less_better = j.get_bool('less_better')
self.begin = j.get_datetime('begin')
self.end = j.get_datetime('end')
self.loops = j.get_int('loops')
self.status = j.get_str('status', "SUCCESS")
if self.status not in TEST_STATUSES:
raise SuspiciousOperation("invalid 'status' value '%s', must be one of: %s" % (self.status, str(TEST_STATUSES)))
e = j.get('errors', 0)
if type(e) is int:
self.errors = e
else:
self.errors = len(j.get_list('errors'))
w = j.get('warnings', 0)
if type(w) is int:
self.warnings = w
else:
self.warnings = len(j.get_list('warnings'))
dur_sec = j.get_float('duration_sec', 0)
if dur_sec:
self.duration = timedelta(seconds=int(dur_sec))
elif self.end and self.begin:
self.duration = self.end - self.begin
else:
self.duration = timedelta(seconds=0)
self.group = j.get_str('group')
TestGroupModel.pt_get_by_tag(self.group) # ensure appropriate TestGroupModel object exists
self.samples = j.get_int('samples', len(scores))
self.avg_score = numpy.mean(scores)
self.min_score = min(scores)
self.max_score = max(scores)
self.avg_dev = numpy.mean(deviations) if deviations else numpy.std(scores)
self.min_dev = min(deviations) if deviations else self.avg_dev
self.max_dev = max(deviations) if deviations else self.avg_dev
self.avg_plusmin = int(round(100 * abs(self.avg_dev / self.avg_score))) if self.avg_score else 0
self.min_plusmin = int(round(100 * abs(self.min_dev / self.min_score))) if self.min_score else 0
self.max_plusmin = int(round(100 * abs(self.max_dev / self.max_score))) if self.max_score else 0
if self.begin and (self.begin.tzinfo is None or self.begin.tzinfo.utcoffset(self.begin) is None):
raise SuspiciousOperation("'begin' datetime object must include timezone: %s" % str(self.begin))
if self.end and (self.end.tzinfo is None or self.end.tzinfo.utcoffset(self.end) is None):
raise SuspiciousOperation("'end' datetime object must include timezone: %s" % str(self.end))
def pt_save(self):
try:
obj = TestModel.objects.get(uuid=self.uuid)
except TestModel.MultipleObjectsReturned:
TestModel.objects.filter(uuid=self.uuid).delete()
obj = None
except TestModel.DoesNotExist:
obj = None
if obj is None or not self.pt_is_equal_to(obj):
self.save()
def __str__(self):
return self.tag
def pt_status_is_completed(self):
return self.status in ("SUCCESS", "FAILED")
def pt_status_is_failed(self):
return self.status == "FAILED"
@staticmethod
def pt_delete_tests(tests_uuids):
""" Delete Test objects from the tests_uuids by 100 elements at once"""
if not tests_uuids:
return
def grouper(n, iterable):
args = [iter(iterable)] * n
return ([e for e in t if e is not None] for t in itertools.zip_longest(*args))
for uuids in grouper(100, tests_uuids):
TestModel.objects.filter(reduce(lambda x, y: x | y, [Q(uuid=uuid) for uuid in uuids])).delete()
def pt_is_equal_to(self, test):
# FIXME, XXX - not sure this is right way to manage the problem of data object update in the database
for f in self._meta.get_fields():
if getattr(test, f.name) != getattr(self, f.name):
return False
return True
def pt_gen_unique_key(self):
return "%s %s {%s}" % (self.group, self.tag, self.category)
def pt_validate_uniqueness(self, key2test):
key = self.pt_gen_unique_key()
if key not in key2test:
key2test[key] = self
return
test = key2test[key]
if self.uuid == test.uuid:
return
if test.group != self.group or test.tag != self.tag or test.category != self.category:
return
raise ValidationError("test with given group ('%s'), tag ('%s') and category ('%s') already exists" %
(self.group, self.tag, self.category))
class Meta:
verbose_name = "Test result"
verbose_name_plural = "Tests results"
class TestSimpleSerializer(serializers.ModelSerializer):
duration = PTDurationField()
avg_score = PTRoundedFloatMKField()
avg_plusmin = PTRoundedFloatField()
class Meta:
model = TestModel
fields = ('id', 'seq_num', 'group', 'tag', 'category', 'duration', 'avg_score', 'avg_plusmin', 'errors', 'status')
class TestDetailedSerializer(TestSimpleSerializer):
class Meta:
model = TestModel
fields = [f.name for f in TestModel._meta.get_fields()]
|
import axios from 'axios'
export function request(config) {
// 1.创建axios实例
const instance = axios.create({
baseURL: 'http://152.136.185.210:7878/api/m5',
timeout: 5000,
})
// 2.axios网络拦截器
// request拦截下来的config参数其实就是我们的网络请求的配置(但好像没有拦截下数据)
instance.interceptors.request.use(config => {
// 拦截完后必须把配置给人还回去,不然网络请求会发送失败
return config;
}, err => {
console.log(err);
});
// response拦截下来的结果(包含数据)
instance.interceptors.response.use(res => {
// 拦截完后必须把配置给人还回去,不然网络请求无返回结果(undefined)
//一般返回data就可以了(这个才比较有用)
return res.data;
}, err => {
console.log(err);
})
// 3.发送网络请求
return instance(config);
}
|
from dataclasses import dataclass
from typing import List
from src.types.condition_opcodes import ConditionOpcode
@dataclass(frozen=True)
class ConditionVarPair:
"""
This structure is used to store parsed CLVM conditions
Conditions in CLVM have either format of (opcode, var1) or (opcode, var1, var2)
"""
opcode: ConditionOpcode
vars: List[bytes]
def __init__(self, opc: ConditionOpcode, *args):
i = 0
var_list = []
for arg in args:
if arg is not None:
assert isinstance(arg, bytes)
var_list.append(arg)
i = i + 1
object.__setattr__(self, "opcode", opc)
object.__setattr__(self, "vars", var_list)
|
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
# justice-lobby-server (staging)
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from .....core import Operation
from .....core import HeaderStr
from .....core import HttpResponse
from ...models import RestapiErrorResponseV1
class DeleteTemplateLocalizationV1Admin(Operation):
"""delete template localization (deleteTemplateLocalizationV1Admin)
Required permission : `ADMIN:NAMESPACE:{namespace}:NOTIFICATION [DELETE]` with scope `social`
delete template localization
Action Code: 50209
Required Permission(s):
- ADMIN:NAMESPACE:{namespace}:NOTIFICATION [DELETE]
Required Scope(s):
- social
Properties:
url: /lobby/v1/admin/notification/namespaces/{namespace}/templates/{templateSlug}/languages/{templateLanguage}
method: DELETE
tags: ["notification"]
consumes: ["application/json"]
produces: ["application/json"]
securities: [BEARER_AUTH]
namespace: (namespace) REQUIRED str in path
template_language: (templateLanguage) REQUIRED str in path
template_slug: (templateSlug) REQUIRED str in path
Responses:
204: No Content - (No Content)
401: Unauthorized - RestapiErrorResponseV1 (Unauthorized)
403: Forbidden - RestapiErrorResponseV1 (Forbidden)
404: Not Found - RestapiErrorResponseV1 (Not Found)
500: Internal Server Error - RestapiErrorResponseV1 (Internal Server Error)
"""
# region fields
_url: str = "/lobby/v1/admin/notification/namespaces/{namespace}/templates/{templateSlug}/languages/{templateLanguage}"
_method: str = "DELETE"
_consumes: List[str] = ["application/json"]
_produces: List[str] = ["application/json"]
_securities: List[List[str]] = [["BEARER_AUTH"]]
_location_query: str = None
namespace: str # REQUIRED in [path]
template_language: str # REQUIRED in [path]
template_slug: str # REQUIRED in [path]
# endregion fields
# region properties
@property
def url(self) -> str:
return self._url
@property
def method(self) -> str:
return self._method
@property
def consumes(self) -> List[str]:
return self._consumes
@property
def produces(self) -> List[str]:
return self._produces
@property
def securities(self) -> List[List[str]]:
return self._securities
@property
def location_query(self) -> str:
return self._location_query
# endregion properties
# region get methods
# endregion get methods
# region get_x_params methods
def get_all_params(self) -> dict:
return {
"path": self.get_path_params(),
}
def get_path_params(self) -> dict:
result = {}
if hasattr(self, "namespace"):
result["namespace"] = self.namespace
if hasattr(self, "template_language"):
result["templateLanguage"] = self.template_language
if hasattr(self, "template_slug"):
result["templateSlug"] = self.template_slug
return result
# endregion get_x_params methods
# region is/has methods
# endregion is/has methods
# region with_x methods
def with_namespace(self, value: str) -> DeleteTemplateLocalizationV1Admin:
self.namespace = value
return self
def with_template_language(self, value: str) -> DeleteTemplateLocalizationV1Admin:
self.template_language = value
return self
def with_template_slug(self, value: str) -> DeleteTemplateLocalizationV1Admin:
self.template_slug = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result: dict = {}
if hasattr(self, "namespace") and self.namespace:
result["namespace"] = str(self.namespace)
elif include_empty:
result["namespace"] = ""
if hasattr(self, "template_language") and self.template_language:
result["templateLanguage"] = str(self.template_language)
elif include_empty:
result["templateLanguage"] = ""
if hasattr(self, "template_slug") and self.template_slug:
result["templateSlug"] = str(self.template_slug)
elif include_empty:
result["templateSlug"] = ""
return result
# endregion to methods
# region response methods
# noinspection PyMethodMayBeStatic
def parse_response(self, code: int, content_type: str, content: Any) -> Tuple[None, Union[None, HttpResponse, RestapiErrorResponseV1]]:
"""Parse the given response.
204: No Content - (No Content)
401: Unauthorized - RestapiErrorResponseV1 (Unauthorized)
403: Forbidden - RestapiErrorResponseV1 (Forbidden)
404: Not Found - RestapiErrorResponseV1 (Not Found)
500: Internal Server Error - RestapiErrorResponseV1 (Internal Server Error)
---: HttpResponse (Undocumented Response)
---: HttpResponse (Unexpected Content-Type Error)
---: HttpResponse (Unhandled Error)
"""
pre_processed_response, error = self.pre_process_response(code=code, content_type=content_type, content=content)
if error is not None:
return None, None if error.is_no_content() else error
code, content_type, content = pre_processed_response
if code == 204:
return None, None
if code == 401:
return None, RestapiErrorResponseV1.create_from_dict(content)
if code == 403:
return None, RestapiErrorResponseV1.create_from_dict(content)
if code == 404:
return None, RestapiErrorResponseV1.create_from_dict(content)
if code == 500:
return None, RestapiErrorResponseV1.create_from_dict(content)
return None, self.handle_undocumented_response(code=code, content_type=content_type, content=content)
# endregion response methods
# region static methods
@classmethod
def create(
cls,
namespace: str,
template_language: str,
template_slug: str,
) -> DeleteTemplateLocalizationV1Admin:
instance = cls()
instance.namespace = namespace
instance.template_language = template_language
instance.template_slug = template_slug
return instance
@classmethod
def create_from_dict(cls, dict_: dict, include_empty: bool = False) -> DeleteTemplateLocalizationV1Admin:
instance = cls()
if "namespace" in dict_ and dict_["namespace"] is not None:
instance.namespace = str(dict_["namespace"])
elif include_empty:
instance.namespace = ""
if "templateLanguage" in dict_ and dict_["templateLanguage"] is not None:
instance.template_language = str(dict_["templateLanguage"])
elif include_empty:
instance.template_language = ""
if "templateSlug" in dict_ and dict_["templateSlug"] is not None:
instance.template_slug = str(dict_["templateSlug"])
elif include_empty:
instance.template_slug = ""
return instance
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"namespace": "namespace",
"templateLanguage": "template_language",
"templateSlug": "template_slug",
}
@staticmethod
def get_required_map() -> Dict[str, bool]:
return {
"namespace": True,
"templateLanguage": True,
"templateSlug": True,
}
# endregion static methods
|
# Generated by Django 3.2.5 on 2021-07-26 19:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('neighborhood', '0006_rename_health_tell_neighborhood_hospital_number'),
]
operations = [
migrations.AlterField(
model_name='neighborhood',
name='hospital_number',
field=models.CharField(blank=True, max_length=15),
),
migrations.AlterField(
model_name='neighborhood',
name='police_number',
field=models.CharField(blank=True, max_length=15),
),
]
|
import numpy as np
import pyglet
import time
class KinematicEnv():
viewer = None
dt = 0.1
state_dim = 12
action_dim = 5
action_bound = [-1, 1]
def __init__(self):
#位姿初始化
self.STATE = 0 #0表示两端都在杆上; 1表示下端在杆上,执行stepUp(); 2表示上端在杆上,执行stepDown()
self.on_goal = 0 #判断当前动作是否结束
self.width = 40.
self.pole = 100.
self.crank = 50.
self.downPointLocation = np.array([0., 0.])
self.downPointAngle = np.pi / 6 #相对于X轴
self.downJointLocation = np.array([self.pole*np.cos(self.downPointAngle), self.pole*np.sin(self.downPointAngle)]).reshape(1,2)[0]
self.downJointAngle = 2 * np.pi / 3 #相对于下端连杆
self.centerLocation = np.array([(self.pole - self.crank) * np.cos(self.downPointAngle), (self.pole + self.crank) * np.sin(self.downPointAngle)]).reshape(1,2)[0]
self.centerAngle = 4 * np.pi / 3 #相对于下端曲柄
self.upJointLocation = np.array([self.pole * np.cos(self.downPointAngle), (self.pole + 2 * self.crank) * np.sin(self.downPointAngle)]).reshape(1,2)[0]
self.upJointAngle = 2 * np.pi / 3 #相对于上端曲柄
self.upPointLocation = np.array([0., (2 * self.pole + 2 * self.crank) * np.sin(self.downPointAngle)]).reshape(1,2)[0]
self.upPointAngle = 11 * np.pi / 6 #相对于X轴
self.armState = np.concatenate((self.downPointLocation, self.downJointLocation, self.centerLocation, self.upJointLocation, self.upPointLocation))
self.jointState = np.array([self.downPointAngle, self.downJointAngle, self.centerAngle, self.upJointAngle, self.upPointAngle])
def A(self, theta, l):
'''广义变换矩阵,先旋转再平移,由相对于新的坐标系变换,需右乘 Rot(z, theta).dot(Trans(l, 0., 0.))'''
result = np.array([[np.cos(theta), np.sin(theta), 0., l*np.cos(theta)],
[np.sin(theta), -np.cos(theta), 0., l*np.sin(theta)],
[0., 0., 1., 0.],
[0., 0., 0., 1.]])
return result
def thetaTrans(self, theta):
return 2 * np.pi - theta #关节的相对角度改变,从相对于下端改变到相对于上端
def stepUp(self, action):
done = False
action = np.clip(action, *self.action_bound)
self.jointState += action * self.dt
self.jointState = self.jointState % (2*np.pi) # normalize
self.downPointAngle, self.downJointAngle, self.centerAngle, self.upJointAngle, self.upPointAngle = self.jointState
self.jointState[4] = (np.pi + self.downPointAngle + self.downJointAngle + self.centerAngle + self.upJointAngle) % (2*np.pi)
A1 = self.A(self.downPointAngle, self.pole)
A2 = self.A(self.downJointAngle, self.crank)
A3 = self.A(self.centerAngle, self.crank)
A4 = self.A(self.upJointAngle, self.pole)
self.downJointLocation = self.downPointLocation + (A1.dot(np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
self.centerLocation = self.downPointLocation + (A1.dot(A2).dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
self.upJointLocation = self.downPointLocation + (A1.dot(A2).dot(A3).dot(np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1,4)[0][0:2]
self.upPointLocation = self.downPointLocation + (A1.dot(A2).dot(A3).dot(A4).dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
s = np.concatenate((self.downPointLocation, self.downJointLocation, self.centerLocation, self.upJointLocation, self.upPointLocation, [1.], [1. if self.on_goal else 0.]))
r = - np.sqrt((self.upPointLocation[0] / 300.) ** 2 + ((self.upPointLocation[1] - self.downPointLocation[1] - 200) / 300.) ** 2)
if - self.width / 2 < self.upPointLocation[0] < self.width / 2:
if - self.width / 2 < self.upPointLocation[1] - self.downPointLocation[1] - 200 < self.width / 2:
r += 1.
self.on_goal += 1
if self.on_goal > 3: #while training ddpg, it should be 50
done = True
else:
self.on_goal = 0
self.armState = np.concatenate(
(self.downPointLocation, self.downJointLocation, self.centerLocation, self.upJointLocation,
self.upPointLocation))
#print(self.armState)
return s, r, done
def stepDown(self, action):
done = False
action = np.clip(action, *self.action_bound)
self.jointState += action * self.dt
self.jointState %= 2*np.pi # normalize
self.downPointAngle, self.downJointAngle, self.centerAngle, self.upJointAngle, self.upPointAngle = self.jointState
self.jointState[0] = (7 * np.pi + self.upPointAngle - self.downJointAngle - self.centerAngle - self.upJointAngle) % (2*np.pi)
#正运动学求解关节位置
A1 = self.A(self.thetaTrans(self.upPointAngle), self.pole)
A2 = self.A(self.thetaTrans(self.upJointAngle), self.crank)
A3 = self.A(self.thetaTrans(self.centerAngle), self.crank)
A4 = self.A(self.thetaTrans(self.downJointAngle), self.pole)
self.upJointLocation = self.upPointLocation + (A1.dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1,4)[0][0:2]
self.centerLocation = self.upPointLocation + (A1.dot(A2).dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1,4)[0][0:2]
self.downJointLocation = self.upPointLocation + (A1.dot(A2.dot(A3)).dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1,4)[0][0:2]
self.downPointLocation = self.upPointLocation + (A1.dot(A2.dot(A3.dot(A4))).dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1,4)[0][0:2]
s = np.concatenate((self.downPointLocation, self.downJointLocation, self.centerLocation, self.upJointLocation,
self.upPointLocation, [2.], [1. if self.on_goal else 0.]))
r = - 2 * np.sqrt((self.downPointLocation[0]/300.) ** 2 + ((self.upPointLocation[1] - self.downPointLocation[1] - 100)/300)**2)
if - self.width / 2 < self.downPointLocation[0] < self.width / 2:
if - self.width / 2 < self.upPointLocation[1] - self.downPointLocation[1] - 100 < self.width / 2:
r += 1.
self.on_goal += 1
if self.on_goal > 3: #while training ddpg, it is set as 50
done = True
else:
self.on_goal = 0
self.armState = np.concatenate(
(self.downPointLocation, self.downJointLocation, self.centerLocation, self.upJointLocation,
self.upPointLocation))
return s, r, done
def initialOn(self):
self.downPointLocation = np.array([np.random.rand(1)[0]*40. - 20., np.random.rand(1)[0]*500.])
self.upPointLocation = np.array([np.random.rand(1)[0]*40. - 20., self.downPointLocation[1] + np.random.rand(1)[0]*200. + 50.])
#逆运动学求解关节角度
from scipy.optimize import fsolve
def f(k):
a, b, c, d = k.tolist()
A1 = self.A(a, self.pole)
A2 = self.A(b, self.crank)
A3 = self.A(c, self.crank)
A4 = self.A(d, self.pole)
return [(A1.dot(A2.dot(A3.dot(A4))).dot(np.concatenate((np.array([0., 0.]), np.array([0., 1.])))).reshape(4, 1))[0][0] - self.upPointLocation[0] + self.downPointLocation[0],
(A1.dot(A2.dot(A3.dot(A4))).dot(np.concatenate((np.array([0., 0.]), np.array([0., 1.])))).reshape(4, 1))[1][0] - self.upPointLocation[1] + self.downPointLocation[1],
(A1.dot(A2.dot(A3.dot(A4))).dot(np.concatenate((np.array([0., 0.]), np.array([0., 1.])))).reshape(4, 1))[2][0],
(A1.dot(A2.dot(A3.dot(A4))).dot(np.concatenate((np.array([0., 0.]), np.array([0., 1.])))).reshape(4, 1))[3][0] - 1.
]
result = fsolve(f, [1., 1., 1., 1.])
self.downPointAngle, self.downJointAngle, self.centerAngle, self.upJointAngle = result
self.upPointAngle = (np.pi + self.downPointAngle + self.downJointAngle + self.centerAngle + self.upJointAngle) % (2*np.pi)
self.jointState = np.array([self.downPointAngle, self.downJointAngle, self.centerAngle, self.upJointAngle, self.upPointAngle])
#正运动学求解关节位置
A1 = self.A(self.downPointAngle, self.pole)
A2 = self.A(self.downJointAngle, self.crank)
A3 = self.A(self.centerAngle, self.crank)
A4 = self.A(self.upJointAngle, self.pole)
self.downJointLocation = self.downPointLocation + (A1.dot(np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
self.centerLocation = self.downPointLocation + (A1.dot(A2).dot(np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
self.upJointLocation = self.downPointLocation + (A1.dot(A2.dot(A3)).dot(np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
s = np.concatenate((self.downPointLocation, self.downJointLocation, self.centerLocation, self.upJointLocation, self.upPointLocation,[0.], [1.]))
self.armState = np.concatenate((self.downPointLocation, self.downJointLocation, self.centerLocation, self.upJointLocation,
self.upPointLocation))
return s
def initialUp(self):
self.downPointLocation = np.array([np.random.rand(1)[0]*40. - 20., np.random.rand(1)[0] * 500.])
self.downPointAngle, self.downJointAngle, self.centerAngle, self.upJointAngle = np.random.rand(4) * 2 * np.pi
self.upPointAngle = (
np.pi + self.downPointAngle + self.downJointAngle + self.centerAngle + self.upJointAngle) % (
2 * np.pi)
self.jointState = np.array(
[self.downPointAngle, self.downJointAngle, self.centerAngle, self.upJointAngle, self.upPointAngle])
# 正运动学求解关节位置
A1 = self.A(self.downPointAngle, self.pole)
A2 = self.A(self.downJointAngle, self.crank)
A3 = self.A(self.centerAngle, self.crank)
A4 = self.A(self.upJointAngle, self.pole)
self.downJointLocation = self.downPointLocation + (A1.dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
self.centerLocation = self.downPointLocation + (A1.dot(A2).dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
self.upJointLocation = self.downPointLocation + (A1.dot(A2.dot(A3)).dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
self.upPointLocation = self.downPointLocation + (A1.dot(A2.dot(A3.dot(A4))).dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
s = np.concatenate((self.downPointLocation, self.downJointLocation, self.centerLocation, self.upJointLocation,
self.upPointLocation, [1.], [0.]))
self.armState = np.concatenate(
(self.downPointLocation, self.downJointLocation, self.centerLocation, self.upJointLocation,
self.upPointLocation))
return s
def initialDown(self):
self.upPointLocation = np.array([np.random.rand(1)[0]*40. - 20., np.random.rand(1)[0] * 700.])
self.downJointAngle, self.centerAngle, self.upJointAngle, self.upPointAngle = np.random.rand(4) * 2 * np.pi
self.downPointAngle = (
7 * np.pi + self.upPointAngle - self.downJointAngle - self.centerAngle - self.upJointAngle) % (
2 * np.pi)
# 正运动学求解关节位置
A1 = self.A(self.thetaTrans(self.upPointAngle), self.pole)
A2 = self.A(self.thetaTrans(self.upJointAngle), self.crank)
A3 = self.A(self.thetaTrans(self.centerAngle), self.crank)
A4 = self.A(self.thetaTrans(self.downJointAngle), self.pole)
self.upJointLocation = self.upPointLocation + (A1.dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
self.centerLocation = self.upPointLocation + (A1.dot(A2).dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
self.downJointLocation = self.upPointLocation + (A1.dot(A2.dot(A3)).dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
self.downPointLocation = self.upPointLocation + (A1.dot(A2.dot(A3.dot(A4))).dot(
np.concatenate((np.array([0., 0.]), np.array([0., 1.]))).reshape(4, 1))).reshape(1, 4)[0][0:2]
s = np.concatenate((self.downPointLocation, self.downJointLocation, self.centerLocation, self.upJointLocation, self.upPointLocation, [2.], [0.]))
self.armState = np.concatenate(
(self.downPointLocation, self.downJointLocation, self.centerLocation, self.upJointLocation,
self.upPointLocation))
return s
def render(self):
if self.viewer is None:
self.viewer = Viewer(self.armState)
self.viewer.render(self.armState)
def random_action(self):
return np.random.rand(5) - 0.5
class Viewer(pyglet.window.Window):
def __init__(self, state):
super(Viewer, self).__init__(width = 800, height = 800, resizable=False, caption='ClimbingArms', vsync=False)
pyglet.gl.glClearColor(1, 1, 1, 1)
self.state = state
self.batch = pyglet.graphics.Batch()
self.goal = self.batch.add(4, pyglet.gl.GL_QUADS, None, ('v2f', [380, 0, 420, 0, 420, 800, 380, 800]), ('c3B', (86, 109, 249) *4))
self.dPole = self.batch.add(4, pyglet.gl.GL_QUADS, None, ('v2f', [int(self.state[0])+400, int(self.state[1]), int(self.state[2])+400, int(self.state[3]), int(self.state[2])+395, 5+int(self.state[3]), int(self.state[0])+395, 5+int(self.state[1])]), ('c3B', (246, 86, 86) *4))
self.dCrank = self.batch.add(4, pyglet.gl.GL_QUADS, None, ('v2f', [int(self.state[2])+400, int(self.state[3]), int(self.state[4])+400, int(self.state[5]), int(self.state[4])+395, 5+int(self.state[5]), int(self.state[2])+395, 5+int(self.state[3])]), ('c3B', (246, 86, 86) *4))
self.uCrank = self.batch.add(4, pyglet.gl.GL_QUADS, None, ('v2f', [int(self.state[4])+400, int(self.state[5]), int(self.state[6])+400, int(self.state[7]), int(self.state[6])+395, 5+int(self.state[7]), int(self.state[4])+395, 5+int(self.state[5])]), ('c3B', (246, 86, 86) *4))
self.uPole = self.batch.add(4, pyglet.gl.GL_QUADS, None, ('v2f', [int(self.state[6])+400, int(self.state[7]), int(self.state[8])+400, int(self.state[9]), int(self.state[8])+395, 5+int(self.state[9]), int(self.state[6])+395, 5+int(self.state[7])]), ('c3B', (246, 86, 86) *4))
def render(self, arm_State):
self._update(arm_State)
self.switch_to()
self.dispatch_events()
self.dispatch_event('on_draw')
self.flip()
def on_draw(self):
self.clear()
self.batch.draw()
def _update(self, arm_State):
state = arm_State.astype(int)
self.dPole.vertices = [int(state[0]) + 400, int(state[1]), int(state[2]) + 400, int(state[3]), int(state[2]) + 395, 5+int(state[3]), int(state[0]) + 395, 5+int(state[1])]
self.dCrank.vertices = [int(state[2])+400, int(state[3]), int(state[4])+400, int(state[5]), int(state[4])+395, 5+int(state[5]), int(state[2])+395, 5+int(state[3])]
self.uCrank.vertices = [int(state[4])+400, int(state[5]), int(state[6])+400, int(state[7]), int(state[6])+395, 5+int(state[7]), int(state[4])+395, 5+int(state[5])]
self.uPole.vertices = [int(state[6])+400, int(state[7]), int(state[8])+400, int(state[9]), int(state[8])+395, 5+int(state[9]), int(state[6])+395, 5+int(state[7])]
if __name__ == '__main__':
a = KinematicEnv()
while True:
time.sleep(0.1)
a.render()
a.stepUp(a.random_action())
|
from item import Item
class Pepe(Item):
NAME = 'Pepe'
QUALITY_FACTOR = 0
SELL_IN_FACTOR = 0
def __init__(self, sell_in, quality):
super().__init__(self.NAME, sell_in, quality)
def update_quality(self):
self.decrase_quality()
self.decrease_sell_in()
if self.sell_in < self.SELL_IN_FACTOR and self.quality > self.QUALITY_FACTOR:
self.decrase_quality()
|
GLOBAL['#FairKey#']=(function(__initProps__){const __global__=this;return runCallback(function(__mod__){with(__mod__.imports){function PicData(){const inner=PicData.__inner__;if(this==__global__){return new PicData({__args__:arguments});}else{const args=arguments.length>0?arguments[0].__args__||arguments:[];inner.apply(this,args);PicData.prototype.ctor.apply(this,args);return this;}}PicData.__inner__=function inner(){this.picUrl='https://www.youxinpai.com/public/home/widget/services/4s/img/img_4s_4_0943ac1.jpg';this.title='';this.content='';};PicData.prototype={};PicData.prototype.ctor=function(){Object.prototype.ctor.call(this);};function _PicNetDemoPageStateful(){const inner=_PicNetDemoPageStateful.__inner__;if(this==__global__){return new _PicNetDemoPageStateful({__args__:arguments});}else{const args=arguments.length>0?arguments[0].__args__||arguments:[];inner.apply(this,args);_PicNetDemoPageStateful.prototype.ctor.apply(this,args);return this;}}_PicNetDemoPageStateful.__inner__=function inner(){this._picData=PicData();};_PicNetDemoPageStateful.prototype={onClick:function onClick(){const __thiz__=this;with(__thiz__){let order_id=10;FairNet().request(convertObjectLiteralToSetOrMap({['pageName']:'#FairKey#',['method']:'GET',['url']:'https://www.wanandroid.com/banner/json',['data']:convertObjectLiteralToSetOrMap({['order_id']:order_id,['content']:'test',['aa']:['hello','world'],}),['success']:function dummy(resp){if(resp==null){return null;}let data=resp.__op_idx__('data');let u=data.__op_idx__(0).__op_idx__('imagePath');_picData.picUrl=u;_picData.title=data.__op_idx__(0).__op_idx__('title');setState('#FairKey#',function dummy(){});},}));}},};_PicNetDemoPageStateful.prototype.ctor=function(){Object.prototype.ctor.call(this);};;return _PicNetDemoPageStateful();}},[]);})(convertObjectLiteralToSetOrMap(JSON.parse('#FairProps#')));
|
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef SERVICES_DATA_DECODER_DATA_DECODER_SERVICE_H_
#define SERVICES_DATA_DECODER_DATA_DECODER_SERVICE_H_
#include <memory>
#include "base/macros.h"
#include "build/chromeos_buildflags.h"
#include "components/web_package/mojom/web_bundle_parser.mojom.h"
#include "mojo/public/cpp/bindings/pending_receiver.h"
#include "mojo/public/cpp/bindings/receiver_set.h"
#include "services/data_decoder/public/mojom/data_decoder_service.mojom.h"
#include "services/data_decoder/public/mojom/gzipper.mojom.h"
#include "services/data_decoder/public/mojom/image_decoder.mojom.h"
#include "services/data_decoder/public/mojom/json_parser.mojom.h"
#include "services/data_decoder/public/mojom/web_bundler.mojom.h"
#include "services/data_decoder/public/mojom/xml_parser.mojom.h"
#if BUILDFLAG(IS_CHROMEOS_ASH)
#include "services/data_decoder/public/mojom/ble_scan_parser.mojom.h"
#endif
namespace data_decoder {
class DataDecoderService : public mojom::DataDecoderService {
public:
DataDecoderService();
explicit DataDecoderService(
mojo::PendingReceiver<mojom::DataDecoderService> receiver);
~DataDecoderService() override;
// May be used to establish a latent DataDecoderService binding for this
// instance. May only be called once, and only if this instance was default-
// constructed.
void BindReceiver(mojo::PendingReceiver<mojom::DataDecoderService> receiver);
// Configures the service to drop ImageDecoder receivers instead of binding
// them. Useful for tests simulating service failures.
void SimulateImageDecoderCrashForTesting(bool drop) {
drop_image_decoders_ = drop;
}
// Same as above but for JsonParser receivers.
void SimulateJsonParserCrashForTesting(bool drop) {
drop_json_parsers_ = drop;
}
// Configures the service to use |binder| to bind
// WebBundleParserFactory in subsequent
// BindWebBundleParserFactory() calls.
void SetWebBundleParserFactoryBinderForTesting(
base::RepeatingCallback<void(
mojo::PendingReceiver<web_package::mojom::WebBundleParserFactory>)>
binder) {
web_bundle_parser_factory_binder_ = binder;
}
// Configures the service to use |binder| to bind WebBundler in subsequent
// BindWebBundler() calls.
void SetWebBundlerBinderForTesting(
base::RepeatingCallback<void(mojo::PendingReceiver<mojom::WebBundler>)>
binder) {
web_bundler_binder_ = binder;
}
private:
// mojom::DataDecoderService implementation:
void BindImageDecoder(
mojo::PendingReceiver<mojom::ImageDecoder> receiver) override;
void BindJsonParser(
mojo::PendingReceiver<mojom::JsonParser> receiver) override;
void BindXmlParser(mojo::PendingReceiver<mojom::XmlParser> receiver) override;
void BindWebBundleParserFactory(
mojo::PendingReceiver<web_package::mojom::WebBundleParserFactory>
receiver) override;
void BindWebBundler(
mojo::PendingReceiver<mojom::WebBundler> receiver) override;
void BindGzipper(mojo::PendingReceiver<mojom::Gzipper> receiver) override;
#if BUILDFLAG(IS_CHROMEOS_ASH)
void BindBleScanParser(
mojo::PendingReceiver<mojom::BleScanParser> receiver) override;
#endif // BUILDFLAG(IS_CHROMEOS_ASH)
// In-process instances (e.g. on iOS or in tests) may have multiple concurrent
// remote DataDecoderService clients.
mojo::ReceiverSet<mojom::DataDecoderService> receivers_;
bool drop_image_decoders_ = false;
bool drop_json_parsers_ = false;
base::RepeatingCallback<void(
mojo::PendingReceiver<web_package::mojom::WebBundleParserFactory>)>
web_bundle_parser_factory_binder_;
base::RepeatingCallback<void(mojo::PendingReceiver<mojom::WebBundler>)>
web_bundler_binder_;
DISALLOW_COPY_AND_ASSIGN(DataDecoderService);
};
} // namespace data_decoder
#endif // SERVICES_DATA_DECODER_DATA_DECODER_SERVICE_H_
|
// Dependencies
// =============================================================
var express = require("express");
var bodyParser = require("body-parser");
// Sets up the Express App
// =============================================================
var app = express();
var PORT = process.env.PORT || 7999;
//Sets up the Express app to handle data parsing
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
require("./app/routing/htmlRoutes")(app);
require("./app/routing/apiRoutes")(app);
app.listen(PORT, function() {
console.log("App listening on PORT " + PORT);
});
|
from flask import Flask
from config import FitterConfig
from fitter.cachestore.inmemory import InMemoryStore
from fitter.cachestore.redis import RedisStore
from fitter.storage.fs import FileSystemSourceStorage
from fitter.storage.fs import FileSystemStoreStorage
from fitter.storage.s3 import S3SourceStorage
from fitter.storage.s3 import S3StoreStorage
def _set_file_system_storage(storage_class, storage_config):
return storage_class(
storage_config['LOCATION'],
base_url=storage_config.get('BASE_URL'),
)
def _set_s3_storage(storage_class, storage_config):
return storage_class(
storage_config['AWS_ACCESS_KEY_ID'],
storage_config['AWS_SECRET_ACCESS_KEY'],
storage_config['BUCKET_NAME'],
storage_config['BUCKET_REGION'],
storage_config['LOCATION'],
)
fitter = Flask('fitter')
fitter.config.from_object(FitterConfig())
fitter.cache_store = None
fitter.source_storage = None
fitter.store_storage = None
if fitter.config['CACHE_STORE']['TYPE'] == 'redis':
fitter.cache_store = RedisStore(
fitter.config['CACHE_STORE']['HOST'],
fitter.config['CACHE_STORE']['PORT'],
fitter.config['CACHE_STORE']['DB'],
fitter.config['CACHE_STORE']['PASSWORD'],
)
elif fitter.config['CACHE_STORE']['TYPE'] == 'in-memory':
fitter.cache_store = InMemoryStore()
_source_storage_config = fitter.config['SOURCE_STORAGE']
_store_storage_config = fitter.config['STORE_STORAGE']
if _source_storage_config['TYPE'] == 'fs':
fitter.source_storage = _set_file_system_storage(FileSystemSourceStorage, _source_storage_config)
elif _source_storage_config['TYPE'] == 's3':
fitter.source_storage = _set_s3_storage(S3SourceStorage, _source_storage_config)
if _store_storage_config['TYPE'] == 'fs':
fitter.store_storage = _set_file_system_storage(FileSystemStoreStorage, _store_storage_config)
elif _store_storage_config['TYPE'] == 's3':
fitter.store_storage = _set_s3_storage(S3StoreStorage, _store_storage_config)
|
// This file is required by the index.html file and will
// be executed in the renderer process for that window.
// All of the Node.js APIs are available in this process.
var remote = require('electron').remote;
function leave(){
console.log('closing')
remote.getCurrentWindow().close();
}
module.exports=leave;
|
// All material copyright ESRI, All Rights Reserved, unless otherwise specified.
// See http://js.arcgis.com/3.15/esri/copyright.txt and http://www.arcgis.com/apps/webappbuilder/copyright.txt for details.
//>>built
define({"widgets/GriddedReferenceGraphic/setting/nls/strings":{gridTabLabel:"Grid",labelTabLabel:"Label",referenceSystemTabLabel:"Sistem Referensi",gridTabDescription:"Atur tampakan dan perilaku sel grid.",cellOutlineColor:"Warna Kerangka Sel",cellFillColor:"Warna Isi Sel",transparency:"Transparansi",cellShapeDropDown:"Bentuk Sel","default":"Persegi Empat",hexagon:"Heksagon",cellUnitsDropDown:"Unit Sel",miles:"Mil",kilometers:"Kilometer",feet:"Kaki",meters:"Meter",yards:"Yard","nautical-miles":"Mil Laut",
gridOriginDropDown:"Asal Grid",lowerLeft:"Kiri-Bawah",lowerRight:"Kanan-Bawah",upperLeft:"Kiri-Atas",upperRight:"Kanan-Atas",center:"Tengah",labelTabDescription:"Atur tampakan dan perilaku label grid.",labelSettings:"Gaya Label",font:"Font",textSize:"Ukuran Teks",textColor:"Warna Teks",halo:"Halo",show:"Tampilkan",labelTypeDropDown:"Jenis Label",alphaNumeric:"Alfa-Numerik",alphaAlpha:"Alfa-Alfa",numeric:"Numerik",labelDirectionDropDown:"Arah Label",horizontal:"Horizontal",vertical:"Vertikal",labelOriginDropDown:"Asal Label",
referenceSystemlTabDescription:"Atur sistem referensi default.",MGRS:"MGRS",USNG:"USNG",lockSettings:"Pengaturan Kunci - Ini akan mencegah pengguna dapat mengubah apa pun dari pengaturan terkonfigurasi dalam widget",_localized:{}}});
|
import numpy as np
def to_one_hot(observation, dim):
"""
Convert Discrete observation to one-hot vector
"""
v = np.zeros(dim)
v[observation] = 1
return v
def from_one_hot(observation):
assert (np.sum(observation) == 1)
return np.argmax(observation)
def discount(x, gamma):
"""
computes discounted sums along 0th dimension of x.
inputs
------
x: ndarray
gamma: float
outputs
-------
y: ndarray with same shape as x, satisfying
y[t] = x[t] + gamma*x[t+1] + gamma^2*x[t+2] + ... + gamma^k x[t+k],
where k = len(x) - t - 1
"""
assert x.ndim >= 1
return scipy.signal.lfilter([1],[1,-gamma],x[::-1], axis=0)[::-1]
|
//
// ShowAlertClass.h
// impcloud_dev
//
// Created by 许阳 on 2019/3/27.
// Copyright © 2019 Elliot. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
typedef void (^sureBtnClickBlock) (void);
typedef void (^CancelBlock) (void);
@interface ShowAlertClass : NSObject
//弹出确认、警告窗口
+ (void)showAlert:(NSString *)msg;
+ (UIAlertController *)showAlert:(NSString *)msg withClickSureBtnHandler:(sureBtnClickBlock)handler;
//判断相机权限
+ (UIAlertController *)getCameraSettingAlertWithCancelHandler:(CancelBlock)cancelHandler ;
@end
NS_ASSUME_NONNULL_END
|
/*
* $Log: set.h,v $
* Revision 1.6 2003/05/20 15:12:55 sccblom
* Added weak bisimulation and trace equivalence.
*
* Stefan.
*
* Revision 1.5 2003/04/22 15:33:27 sccblom
* Distributed branching bisimulation v1.
*
* Revision 1.4 2002/12/05 15:27:43 sccblom
* Fixed a number of bugs for branching bisimulation
* Small improvements for single threaded tool.
*
* Revision 1.3 2002/05/15 12:21:59 sccblom
* Added tex subdirectory and MPI prototype.
*
* Revision 1.2 2002/02/12 13:33:36 sccblom
* First test version.
*
* Revision 1.1 2002/02/08 17:42:15 sccblom
* Just saving.
*
*/
#ifndef SET_H
#define SET_H
#include <stdio.h>
#define EMPTY_SET 0
#define UNDEFINED_SET (-1)
extern void SetPrint(FILE *f,int set);
extern void SetPrintIndex(FILE *f,int set,char **index);
extern void SetClear(int tag);
extern void SetFree();
extern int SetInsert(int set,int label,int dest);
extern int SetUnion(int set1,int set2);
extern int SetGetTag(int set);
extern void SetSetTag(int set,int tag);
extern int SetGetSize(int set);
extern void SetGetSet(int set,int*data);
extern int SetGetLabel(int set);
extern int SetGetDest(int set);
extern int SetGetParent(int set);
extern unsigned int SetGetHash(int set);
#endif
|
import React from "react";
import PropTypes from "prop-types";
import CommentsBlock from "./CommentsBlock";
import RecipeItem from "./RecipeItem";
const RecipePage = ({ recipes, match, searchString }) =>
recipes
? recipes.results
.filter(recipe => {
return recipe.title === match.params.recipeSlug;
})
.map(recipe => {
return (
<div>
<img
className='img-fluid'
src={recipe.thumbnail}
alt={recipe.title}
/>
<div className='card-body'>
<h5 className='card-title'>{recipe.title}</h5>
<p className='card-text'>
<strong>Ingredients: </strong>
{recipe.ingredients}
</p>
</div>
<CommentsBlock match={match} />
</div>
);
})
: recipes.results
.filter(recipe => {
return (
recipe.title.toLowerCase().indexOf(searchString.toLowerCase()) !==
-1 ||
recipe.ingredients
.toLowerCase()
.indexOf(searchString.toLowerCase()) !== -1
);
})
.map(recipe => {
return (
<RecipeItem
key={recipe.title}
thumbnail={recipe.thumbnail}
title={recipe.title}
ingredients={recipe.ingredients}
/>
);
}) || (recipes = []);
RecipePage.propTypes = {
recipes: PropTypes.object
};
export default RecipePage;
|
# from resource.city import get
from resource.city import get
from resource import User
from flask_restful import Api
from flask import Flask
print(get())
app = Flask(__name__)
api = Api(app, catch_all_404s=True)
api.add_resource(User, '/user/<int:id>', '/user', '/user/')
if __name__ == '__main__':
app.run(debug=True)
|
import numpy
from Bio import SeqIO
import sys
import pysam
# import vcf
from pstats import Stats
from collections import OrderedDict
from cProfile import run
from pstats import Stats
USE_CHASTITY = False
### cat sd_0001_PAO1_5k.sam | python get_alleles_from_sam.py sample_name positions.txt vcf_file
def get_args():
if len(sys.argv) != 4:
print 'incorrect number of args, useage is get_alleles_from_sam.py <sample_name> <positions.txt> <output_file>'
else:
return sys.argv[1], sys.argv[2], sys.argv[3]
def read_positions(positions_txt):
# all positions are 0-based (Pythonic)
positions = OrderedDict()
for line in open(positions_txt, 'r'):
cols = line.strip().split(',')
if cols[0] not in positions.keys():
positions[cols[0]] = set()
positions[cols[0]].add(int(cols[1]) - 1)
else:
positions[cols[0]].add(int(cols[1]) - 1)
return positions
def init_array(refs, position_file):
# 1 2 3 4 5 ...
#A 0 0 0 0 0 ...
#C 0 0 0 0 0 ...
#G 0 0 0 0 0 ...
#T 0 0 0 0 0 ...
array = {}
positions = read_positions(position_file)
for ref in positions.keys():
array[ref] = numpy.zeros((4, max(positions[ref]) + 1), dtype=int)
return array, positions
def read_sam(position_file):
base_dict = {'A':0, 'C':1, 'G':2, 'T':3}
alignment = pysam.Samfile('-', 'r')
array, positions = init_array(alignment.references, position_file)
for line in alignment:
# ignore any unmapped reads
if line.is_unmapped:
continue
chrom = alignment.getrname(line.tid)
read_positions = set(xrange(line.pos, line.aend))
try:
isecs = positions[chrom].intersection(read_positions)
except KeyError:
continue
if isecs:
#overlap = [(pos, line.seq[pos-line.pos]) for pos in isec]
#quality = [(pos, ord(line.qual[pos-line.pos])-33) for pos in isec]
aligned_pairs = dict((ref, query) for (query, ref) in line.get_aligned_pairs())
for isec in isecs:
if aligned_pairs[isec]:
read_base = line.seq[aligned_pairs[isec]]
if read_base != 'N':
array[chrom][(base_dict[read_base], isec)] += 1
return array, positions
def write_alleles(array, positions, sample_name, output_file):
frag = ''
chastity_list = []
gaps = 0
#indel_positions = [(position.CHROM, position.POS - 1) for position
# in list(vcf.Reader(open(sys.argv[3], 'r'))) if position.is_indel]
indel_positions = []
for chrom in positions.keys():
for pos in sorted(positions[chrom]):
if (chrom, pos) in indel_positions:
gaps += 1
frag += '-'
else:
counts = tuple(array[chrom][:,pos])
counts_sort = sorted(counts, reverse=True)
if all(counts[0] > base for base in counts[1:4]):
# chastity is greatest / (greatest + second greatest)
if USE_CHASTITY:
chastity_list.append(float(counts_sort[0]) /
sum(counts_sort[:2]))
frag += 'A'
elif all(counts[1] > base for base in counts[0:1] + counts[2:4]):
if USE_CHASTITY:
chastity_list.append(float(counts_sort[0]) /
sum(counts_sort[:2]))
frag += 'C'
elif all(counts[2] > base for base in counts[0:2] + counts[3:4]):
if USE_CHASTITY:
chastity_list.append(float(counts_sort[0]) /
sum(counts_sort[:2]))
frag += 'G'
elif all(counts[3] > base for base in counts[0:3]):
if USE_CHASTITY:
chastity_list.append(float(counts_sort[0]) /
sum(counts_sort[:2]))
frag += 'T'
else:
gaps += 1
frag += '-'
total = sum([len(positions[chrom]) for chrom in positions.keys()])
print 'Sample: %s' %(sample_name)
print 'Total positions: %i' %(total)
print 'Gaps: %i' %(gaps)
print 'Positions covered: %.2f %%' %(100 - (float(gaps) / total * 100.0))
print 'Sample: %s' %(sample_name)
if USE_CHASTITY:
mean_chastity = (sum(chastity_list) / (len(frag) - gaps)) * 100.0
print 'Mean chastity: %.2f %%' %(mean_chastity)
if mean_chastity < 90:
print 'CHASTITY WARNING: Mixed samples can severely affect accuracy of placement'
with open(output_file, 'w') as file_out:
print >>file_out, '>%s_new\n%s\n' %(sample_name, frag),
if __name__ == '__main__':
sample_name, position_file, output_file = get_args()
array, positions = read_sam(position_file)
# run('write_alleles(array, positions)', 'stats')
#stats = Stats('stats')
write_alleles(array, positions, sample_name, output_file)
|
import { Shape, xy } from '@jsxcad/api-shape';
import { outline, taggedGroup, translate } from '@jsxcad/geometry';
import { seq } from '@jsxcad/api-v1-math';
import { toToolFromTags } from '@jsxcad/algorithm-tool';
const Z = 2;
const carve = (block, tool = {}, ...shapes) => {
const { grbl = {} } = tool;
const { diameter = 1, cutDepth = 0.2 } = grbl;
const negative = block.cut(...shapes);
const { max, min } = block.size();
const depth = max[Z] - min[Z];
const cuts = Math.ceil(depth / cutDepth);
const effectiveCutDepth = depth / cuts;
// Use sectionProfile when it is fixed.
return negative
.section(
...seq((l) => xy.z(l), {
from: min[Z],
upto: max[Z],
by: effectiveCutDepth,
}).reverse()
)
.inset(diameter / 2, diameter / 2);
};
function carveMethod(tool, ...shapes) {
return carve(this, tool, ...shapes);
}
Shape.prototype.carve = carveMethod;
const mill = (tool = {}, negative) => {
const { grbl = {} } = tool;
const { diameter = 1, cutDepth = 0.2 } = grbl;
const { max, min } = negative.size();
const depth = max[Z] - min[Z];
const cuts = Math.ceil(depth / cutDepth);
const effectiveCutDepth = depth / cuts;
// Use sectionProfile when it is fixed.
return negative
.section(
...seq((l) => xy.z(l), {
from: min[Z],
upto: max[Z],
by: effectiveCutDepth,
}).reverse()
)
.inset(diameter / 2, diameter / 2);
};
function millMethod(tool = {}, negative) {
return mill(tool, negative);
}
Shape.prototype.mill = millMethod;
const engrave = (paths, depth = 0.5) => {
const { cutDepth = 0.2 } = toToolFromTags('grbl', paths.toGeometry().tags);
const cuts = Math.ceil(depth / cutDepth);
const effectiveCutDepth = depth / cuts;
const toolpaths = [];
for (let cut = 1; cut <= cuts; cut++) {
for (const path of outline({}, paths.toGeometry())) {
toolpaths.push(translate([0, 0, cut * -effectiveCutDepth], path));
}
}
return Shape.fromGeometry(taggedGroup({}, ...toolpaths));
};
function engraveMethod(tool, ...shapes) {
return engrave(this, tool, ...shapes);
}
Shape.prototype.engrave = engraveMethod;
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 4
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import isi_sdk_8_0_1
from isi_sdk_8_0_1.models.auth_ldap_templates_ldap_field_template_item import AuthLdapTemplatesLdapFieldTemplateItem # noqa: E501
from isi_sdk_8_0_1.rest import ApiException
class TestAuthLdapTemplatesLdapFieldTemplateItem(unittest.TestCase):
"""AuthLdapTemplatesLdapFieldTemplateItem unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testAuthLdapTemplatesLdapFieldTemplateItem(self):
"""Test AuthLdapTemplatesLdapFieldTemplateItem"""
# FIXME: construct object with mandatory attributes with example values
# model = isi_sdk_8_0_1.models.auth_ldap_templates_ldap_field_template_item.AuthLdapTemplatesLdapFieldTemplateItem() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
def nested_dictionnary(dic, nest_list, value, verbose = 0) :
if verbose > 1 :
print(f"parameters : {dic} {nest_list} {value}")
if len(nest_list) == 1 :
dic[nest_list[0]] = value
if verbose > 1 :
print(dic)
return(dic)
elif nest_list[0] not in dic :
dic[nest_list[0]] = nested_dictionnary({}, nest_list[1:], value)
if verbose > 1 :
print(dic)
return dic
else :
dic[nest_list[0]] = nested_dictionnary(dic[nest_list[0]], nest_list[1:], value)
if verbose > 1 :
print(dic)
return dic
if __name__ == "__main__" :
response = {}
nested_dictionnary(response, ["StandardScaler","X"], 1, verbose = 2)
nested_dictionnary(response, ["StandardScaler","Y"], 2, verbose = 2)
nested_dictionnary(response, ["MinMaxScaler","X"], 3, verbose = 2)
nested_dictionnary(response, ["MinMaxScaler","Y"], 4, verbose = 2)
|
# (c) Copyright 2016 Hewlett-Packard Enterprise Development , L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from freezer.mode import mode
class CindernativeMode(mode.Mode):
"""
Execute a cinder-volume native backup/restore
"""
def __init__(self, conf):
self.conf = conf
@property
def name(self):
return "cindernative"
@property
def version(self):
return "1.0"
def release(self):
pass
def prepare(self):
pass
|
var searchData=
[
['access',['access',['../structgdt__entry__struct.html#a360a726ac0b61d9e4e1be3ad34f80244',1,'gdt_entry_struct::access()'],['../tables_8h.html#a360a726ac0b61d9e4e1be3ad34f80244',1,'access(): tables.h']]],
['accessed',['accessed',['../structpage__entry.html#afb99a0327fa4c7332208a4c69586c8ec',1,'page_entry']]],
['addcomhistory',['addComHistory',['../com_handler_8h.html#ad0ac0a9ea55a932af636e6144c66c4f3',1,'addComHistory(char *newCom): comHandler.c'],['../com_handler_8c.html#ad0ac0a9ea55a932af636e6144c66c4f3',1,'addComHistory(char *newCom): comHandler.c']]],
['addfunctiondef',['addFunctionDef',['../com_handler_8h.html#af45611c390f08b72ad7f4d38fe845bfa',1,'addFunctionDef(char *name, const char *helpString, const char *(funcPointer)(char **args, int numArgs)): comHandler.c'],['../com_handler_8c.html#af45611c390f08b72ad7f4d38fe845bfa',1,'addFunctionDef(char *name, const char *helpString, const char *(funcPointer)(char **args, int numArgs)): comHandler.c']]],
['alloc',['alloc',['../heap_8h.html#a2b1d5a9ba11695605f74fc10cd719af5',1,'alloc(u32int size, heap *hp, int align): heap.c'],['../heap_8c.html#a06dae34c7e7c73d518de00212a7c92da',1,'alloc(u32int size, heap *h, int align): heap.c']]],
['allocated',['ALLOCATED',['../memory_control_8h.html#aaa0db48d5bc1e51c3fde24e3b8ade641',1,'memoryControl.h']]],
['allocatedhead',['allocatedHead',['../memory_control_8c.html#a4eace240c1aefe0d2500fb0b09b7f527',1,'memoryControl.c']]],
['allocatemem',['allocateMem',['../mem_commands_8h.html#a8c34ba132332867bae929ac6bce37cc7',1,'allocateMem(char **args, int numArgs): memCommands.c'],['../mem_commands_8c.html#a8c34ba132332867bae929ac6bce37cc7',1,'allocateMem(char **args, int numArgs): memCommands.c']]],
['allocatememory',['allocateMemory',['../memory_control_8h.html#ae841b4b92ac63a904d3b357cd0277a0e',1,'allocateMemory(int size): memoryControl.c'],['../memory_control_8c.html#ae841b4b92ac63a904d3b357cd0277a0e',1,'allocateMemory(int size): memoryControl.c']]],
['allocatepcb',['allocatePCB',['../pcb_8h.html#abaf5a209b901d0648067e94079dffe22',1,'allocatePCB(): pcb.c'],['../pcb_8c.html#abaf5a209b901d0648067e94079dffe22',1,'allocatePCB(): pcb.c']]],
['application',['APPLICATION',['../pcb_8h.html#a796bd7c6ba2e59281760fb155c6287e8',1,'pcb.h']]],
['apr',['APR',['../time_8h.html#a14e6757ba0150df2248d1550fb8d13d0',1,'time.h']]],
['archive',['ARCHIVE',['../fat_8h.html#a16f293f5117d4ab69140566862a352ca',1,'fat.h']]],
['asm',['asm',['../system_8h.html#a71921cebf4610b0dbb2b7a0daaf3fedf',1,'system.h']]],
['asm_2eh',['asm.h',['../asm_8h.html',1,'']]],
['atoi',['atoi',['../string_8h.html#a30670a60464f77af17dfb353353d6df8',1,'atoi(const char *s): string.c'],['../string_8c.html#a30670a60464f77af17dfb353353d6df8',1,'atoi(const char *s): string.c']]],
['attributes',['attributes',['../structdir__entry.html#a983149395439fbc9ca8497076b75fd6b',1,'dir_entry']]],
['aug',['AUG',['../time_8h.html#a4ea48e5a35ec64cdaaa559ca9a08b260',1,'time.h']]]
];
|
# coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class WorkRequest(object):
"""
The asynchronous API request does not take effect immediately. This request spawns an asynchronous workflow to fulfill the request. WorkRequest objects provide visibility for in-progress workflows.
"""
#: A constant which can be used with the operation_type property of a WorkRequest.
#: This constant has a value of "BULK_APPLY_RECOMMENDATIONS"
OPERATION_TYPE_BULK_APPLY_RECOMMENDATIONS = "BULK_APPLY_RECOMMENDATIONS"
#: A constant which can be used with the status property of a WorkRequest.
#: This constant has a value of "ACCEPTED"
STATUS_ACCEPTED = "ACCEPTED"
#: A constant which can be used with the status property of a WorkRequest.
#: This constant has a value of "IN_PROGRESS"
STATUS_IN_PROGRESS = "IN_PROGRESS"
#: A constant which can be used with the status property of a WorkRequest.
#: This constant has a value of "FAILED"
STATUS_FAILED = "FAILED"
#: A constant which can be used with the status property of a WorkRequest.
#: This constant has a value of "SUCCEEDED"
STATUS_SUCCEEDED = "SUCCEEDED"
#: A constant which can be used with the status property of a WorkRequest.
#: This constant has a value of "CANCELING"
STATUS_CANCELING = "CANCELING"
#: A constant which can be used with the status property of a WorkRequest.
#: This constant has a value of "CANCELED"
STATUS_CANCELED = "CANCELED"
def __init__(self, **kwargs):
"""
Initializes a new WorkRequest object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param operation_type:
The value to assign to the operation_type property of this WorkRequest.
Allowed values for this property are: "BULK_APPLY_RECOMMENDATIONS", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type operation_type: str
:param status:
The value to assign to the status property of this WorkRequest.
Allowed values for this property are: "ACCEPTED", "IN_PROGRESS", "FAILED", "SUCCEEDED", "CANCELING", "CANCELED", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type status: str
:param id:
The value to assign to the id property of this WorkRequest.
:type id: str
:param compartment_id:
The value to assign to the compartment_id property of this WorkRequest.
:type compartment_id: str
:param resources:
The value to assign to the resources property of this WorkRequest.
:type resources: list[oci.optimizer.models.WorkRequestResource]
:param percent_complete:
The value to assign to the percent_complete property of this WorkRequest.
:type percent_complete: float
:param time_accepted:
The value to assign to the time_accepted property of this WorkRequest.
:type time_accepted: datetime
:param time_started:
The value to assign to the time_started property of this WorkRequest.
:type time_started: datetime
:param time_finished:
The value to assign to the time_finished property of this WorkRequest.
:type time_finished: datetime
"""
self.swagger_types = {
'operation_type': 'str',
'status': 'str',
'id': 'str',
'compartment_id': 'str',
'resources': 'list[WorkRequestResource]',
'percent_complete': 'float',
'time_accepted': 'datetime',
'time_started': 'datetime',
'time_finished': 'datetime'
}
self.attribute_map = {
'operation_type': 'operationType',
'status': 'status',
'id': 'id',
'compartment_id': 'compartmentId',
'resources': 'resources',
'percent_complete': 'percentComplete',
'time_accepted': 'timeAccepted',
'time_started': 'timeStarted',
'time_finished': 'timeFinished'
}
self._operation_type = None
self._status = None
self._id = None
self._compartment_id = None
self._resources = None
self._percent_complete = None
self._time_accepted = None
self._time_started = None
self._time_finished = None
@property
def operation_type(self):
"""
**[Required]** Gets the operation_type of this WorkRequest.
An enum-like description of the type of work the work request is doing.
Allowed values for this property are: "BULK_APPLY_RECOMMENDATIONS", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The operation_type of this WorkRequest.
:rtype: str
"""
return self._operation_type
@operation_type.setter
def operation_type(self, operation_type):
"""
Sets the operation_type of this WorkRequest.
An enum-like description of the type of work the work request is doing.
:param operation_type: The operation_type of this WorkRequest.
:type: str
"""
allowed_values = ["BULK_APPLY_RECOMMENDATIONS"]
if not value_allowed_none_or_none_sentinel(operation_type, allowed_values):
operation_type = 'UNKNOWN_ENUM_VALUE'
self._operation_type = operation_type
@property
def status(self):
"""
**[Required]** Gets the status of this WorkRequest.
The current status of the work request.
Allowed values for this property are: "ACCEPTED", "IN_PROGRESS", "FAILED", "SUCCEEDED", "CANCELING", "CANCELED", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The status of this WorkRequest.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this WorkRequest.
The current status of the work request.
:param status: The status of this WorkRequest.
:type: str
"""
allowed_values = ["ACCEPTED", "IN_PROGRESS", "FAILED", "SUCCEEDED", "CANCELING", "CANCELED"]
if not value_allowed_none_or_none_sentinel(status, allowed_values):
status = 'UNKNOWN_ENUM_VALUE'
self._status = status
@property
def id(self):
"""
**[Required]** Gets the id of this WorkRequest.
The OCID of the work request.
:return: The id of this WorkRequest.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this WorkRequest.
The OCID of the work request.
:param id: The id of this WorkRequest.
:type: str
"""
self._id = id
@property
def compartment_id(self):
"""
**[Required]** Gets the compartment_id of this WorkRequest.
The OCID of the compartment that contains the work request.
:return: The compartment_id of this WorkRequest.
:rtype: str
"""
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
"""
Sets the compartment_id of this WorkRequest.
The OCID of the compartment that contains the work request.
:param compartment_id: The compartment_id of this WorkRequest.
:type: str
"""
self._compartment_id = compartment_id
@property
def resources(self):
"""
**[Required]** Gets the resources of this WorkRequest.
The resources this work request affects.
:return: The resources of this WorkRequest.
:rtype: list[oci.optimizer.models.WorkRequestResource]
"""
return self._resources
@resources.setter
def resources(self, resources):
"""
Sets the resources of this WorkRequest.
The resources this work request affects.
:param resources: The resources of this WorkRequest.
:type: list[oci.optimizer.models.WorkRequestResource]
"""
self._resources = resources
@property
def percent_complete(self):
"""
**[Required]** Gets the percent_complete of this WorkRequest.
How much progress the operation has made.
:return: The percent_complete of this WorkRequest.
:rtype: float
"""
return self._percent_complete
@percent_complete.setter
def percent_complete(self, percent_complete):
"""
Sets the percent_complete of this WorkRequest.
How much progress the operation has made.
:param percent_complete: The percent_complete of this WorkRequest.
:type: float
"""
self._percent_complete = percent_complete
@property
def time_accepted(self):
"""
**[Required]** Gets the time_accepted of this WorkRequest.
Date and time the work was accepted, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z`
:return: The time_accepted of this WorkRequest.
:rtype: datetime
"""
return self._time_accepted
@time_accepted.setter
def time_accepted(self, time_accepted):
"""
Sets the time_accepted of this WorkRequest.
Date and time the work was accepted, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z`
:param time_accepted: The time_accepted of this WorkRequest.
:type: datetime
"""
self._time_accepted = time_accepted
@property
def time_started(self):
"""
Gets the time_started of this WorkRequest.
Date and time the work started, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z`
:return: The time_started of this WorkRequest.
:rtype: datetime
"""
return self._time_started
@time_started.setter
def time_started(self, time_started):
"""
Sets the time_started of this WorkRequest.
Date and time the work started, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z`
:param time_started: The time_started of this WorkRequest.
:type: datetime
"""
self._time_started = time_started
@property
def time_finished(self):
"""
Gets the time_finished of this WorkRequest.
Date and time the work completed, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z`
:return: The time_finished of this WorkRequest.
:rtype: datetime
"""
return self._time_finished
@time_finished.setter
def time_finished(self, time_finished):
"""
Sets the time_finished of this WorkRequest.
Date and time the work completed, in the format defined by RFC3339. Example: `2016-08-25T21:10:29.600Z`
:param time_finished: The time_finished of this WorkRequest.
:type: datetime
"""
self._time_finished = time_finished
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
|
import React from "react"
import { shallow } from "enzyme"
import toJson from "enzyme-to-json"
import Markdown from "../markdown"
describe("Markdown", () => {
const props = {
data: {
ama: "",
badgeColor: "0e75b6",
badgeLabel: "Profile views",
badgeStyle: "flat",
collaborateOn: "",
contact: "",
currentLearn: "",
currentWork: "currentWork",
devDynamicBlogs: false,
funFact: "",
githubProfileTrophy: false,
githubStats: false,
githubStatsOptions: {
bgColor: "",
cacheSeconds: null,
hideBorder: false,
locale: "en",
textColor: "",
theme: "",
titleColor: "",
},
helpWith: "",
mediumDynamicBlogs: false,
rssDynamicBlogs: false,
subtitle: "A passionate frontend developer from India",
title: "title",
topLanguages: false,
topLanguagesOptions: {
bgColor: "",
cacheSeconds: null,
hideBorder: false,
locale: "en",
textColor: "",
theme: "",
titleColor: "",
},
twitterBadge: false,
visitorsBadge: false,
},
link: {
blog: "blog",
collaborateOn: "collaborateOn",
currentWork: "currentWork",
helpWith: "helpWith",
portfolio: "portfolio",
resume: "resume",
},
prefix: {
ama: "💬 Ask me about",
blog: "📝 I regulary write articles on",
collaborateOn: "👯 I’m looking to collaborate on",
contact: "📫 How to reach me",
currentLearn: "🌱 I’m currently learning",
currentWork: "🔭 I’m currently working on",
funFact: "⚡ Fun fact",
helpWith: "🤝 I’m looking for help with",
portfolio: "👨💻 All of my projects are available at",
resume: "📄 Know about my experiences",
title: "Hi 👋, I'm",
},
skills: {
javascript: true,
express: false,
},
social: {
dev: "dev",
codechef: "",
},
}
it("renders without subtitle", () => {
const component = shallow(
<Markdown
{...props}
data={{
...props.data,
subtitle: "",
}}
/>
)
expect(toJson(component)).toMatchSnapshot()
})
it("renders without prefix.title and data.title", () => {
const component = shallow(
<Markdown
{...props}
data={{
...props.data,
title: "",
}}
prefix={{
...props.prefix,
title: "",
}}
/>
)
expect(toJson(component)).toMatchSnapshot()
})
it("renders topLanguages is true", () => {
const component = shallow(
<Markdown
{...props}
data={{
...props.data,
topLanguages: true,
}}
/>
)
expect(toJson(component)).toMatchSnapshot()
})
it("renders topLanguages is true and githubStats is true", () => {
const component = shallow(
<Markdown
{...props}
data={{
...props.data,
topLanguages: true,
githubStats: true,
}}
/>
)
expect(toJson(component)).toMatchSnapshot()
})
it("renders devDynamicBlogs is true", () => {
const component = shallow(
<Markdown
{...props}
data={{
...props.data,
devDynamicBlogs: true,
}}
/>
)
expect(toJson(component)).toMatchSnapshot()
})
it("renders without link.currentWork", () => {
const component = shallow(
<Markdown
{...props}
link={{
...props.data,
currentWork: "",
}}
/>
)
expect(toJson(component)).toMatchSnapshot()
})
it("renders visitorsBadge is true", () => {
const component = shallow(
<Markdown
{...props}
data={{
...props.data,
visitorsBadge: true,
}}
/>
)
expect(toJson(component)).toMatchSnapshot()
})
it("renders twitterBadge is true", () => {
const component = shallow(
<Markdown
{...props}
data={{
...props.data,
twitterBadge: true,
}}
/>
)
expect(toJson(component)).toMatchSnapshot()
})
it("renders githubProfileTrophy is true", () => {
const component = shallow(
<Markdown
{...props}
data={{
...props.data,
githubProfileTrophy: true,
}}
/>
)
expect(toJson(component)).toMatchSnapshot()
})
it("renders githubProfileTrophy is true", () => {
const component = shallow(
<Markdown
{...props}
data={{
...props.data,
githubProfileTrophy: true,
}}
/>
)
expect(toJson(component)).toMatchSnapshot()
})
})
|
#include <stdio.h>
int main()
{
printf( "Hello\n" );
return 0;
}
|
(function() {var implementors = {};
implementors["parking_lot"] = [{"text":"impl <a class=\"trait\" href=\"lock_api/rwlock/trait.RawRwLockUpgradeTimed.html\" title=\"trait lock_api::rwlock::RawRwLockUpgradeTimed\">RawRwLockUpgradeTimed</a> for <a class=\"struct\" href=\"parking_lot/struct.RawRwLock.html\" title=\"struct parking_lot::RawRwLock\">RawRwLock</a>","synthetic":false,"types":["parking_lot::raw_rwlock::RawRwLock"]}];
if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})()
|
# Copyright 2019 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
from unittest import mock
import pytest
import numpy as np
from google.protobuf import any_pb2, timestamp_pb2
from google.protobuf.text_format import Merge
import cirq
import cirq_google as cg
from cirq_google.api import v1, v2
from cirq_google.engine import util
from cirq_google.cloud import quantum
from cirq_google.engine.engine import EngineContext
from cirq_google.engine.result_type import ResultType
_BATCH_PROGRAM_V2 = util.pack_any(
Merge(
"""programs { language {
gate_set: "xmon"
}
circuit {
scheduling_strategy: MOMENT_BY_MOMENT
moments {
operations {
gate {
id: "xy"
}
args {
key: "axis_half_turns"
value {
arg_value {
float_value: 0.0
}
}
}
args {
key: "half_turns"
value {
arg_value {
float_value: 0.5
}
}
}
qubits {
id: "5_2"
}
}
}
moments {
operations {
gate {
id: "meas"
}
args {
key: "invert_mask"
value {
arg_value {
bool_values {
}
}
}
}
args {
key: "key"
value {
arg_value {
string_value: "result"
}
}
}
qubits {
id: "5_2"
}
}
}
}
}
""",
v2.batch_pb2.BatchProgram(),
)
)
_PROGRAM_V2 = util.pack_any(
Merge(
"""language {
gate_set: "xmon"
}
circuit {
scheduling_strategy: MOMENT_BY_MOMENT
moments {
operations {
gate {
id: "xy"
}
args {
key: "axis_half_turns"
value {
arg_value {
float_value: 0.0
}
}
}
args {
key: "half_turns"
value {
arg_value {
float_value: 0.5
}
}
}
qubits {
id: "5_2"
}
}
}
moments {
operations {
gate {
id: "meas"
}
args {
key: "invert_mask"
value {
arg_value {
bool_values {
}
}
}
}
args {
key: "key"
value {
arg_value {
string_value: "result"
}
}
}
qubits {
id: "5_2"
}
}
}
}
""",
v2.program_pb2.Program(),
)
)
@mock.patch('cirq_google.engine.engine_client.EngineClient.create_job')
def test_run_sweeps_delegation(create_job):
create_job.return_value = ('steve', quantum.QuantumJob())
program = cg.EngineProgram('my-proj', 'my-prog', EngineContext())
param_resolver = cirq.ParamResolver({})
job = program.run_sweep(
job_id='steve', repetitions=10, params=param_resolver, processor_ids=['mine']
)
assert job._job == quantum.QuantumJob()
@mock.patch('cirq_google.engine.engine_client.EngineClient.create_job')
def test_run_batch_delegation(create_job):
create_job.return_value = ('kittens', quantum.QuantumJob())
program = cg.EngineProgram('my-meow', 'my-meow', EngineContext(), result_type=ResultType.Batch)
resolver_list = [cirq.Points('cats', [1.0, 2.0, 3.0]), cirq.Points('cats', [4.0, 5.0, 6.0])]
job = program.run_batch(
job_id='steve', repetitions=10, params_list=resolver_list, processor_ids=['lazykitty']
)
assert job._job == quantum.QuantumJob()
@mock.patch('cirq_google.engine.engine_client.EngineClient.create_job')
def test_run_calibration_delegation(create_job):
create_job.return_value = ('dogs', quantum.QuantumJob())
program = cg.EngineProgram('woof', 'woof', EngineContext(), result_type=ResultType.Calibration)
job = program.run_calibration(processor_ids=['lazydog'])
assert job._job == quantum.QuantumJob()
@mock.patch('cirq_google.engine.engine_client.EngineClient.create_job')
def test_run_calibration_no_processors(create_job):
create_job.return_value = ('dogs', quantum.QuantumJob())
program = cg.EngineProgram('woof', 'woof', EngineContext(), result_type=ResultType.Calibration)
with pytest.raises(ValueError, match='No processors specified'):
_ = program.run_calibration(job_id='spot')
@mock.patch('cirq_google.engine.engine_client.EngineClient.create_job')
def test_run_batch_no_sweeps(create_job):
# Running with no sweeps is fine. Uses program's batch size to create
# proper empty sweeps.
create_job.return_value = ('kittens', quantum.QuantumJob())
program = cg.EngineProgram(
'my-meow',
'my-meow',
_program=quantum.QuantumProgram(code=_BATCH_PROGRAM_V2),
context=EngineContext(),
result_type=ResultType.Batch,
)
job = program.run_batch(job_id='steve', repetitions=10, processor_ids=['lazykitty'])
assert job._job == quantum.QuantumJob()
batch_run_context = v2.batch_pb2.BatchRunContext()
create_job.call_args[1]['run_context'].Unpack(batch_run_context)
assert len(batch_run_context.run_contexts) == 1
def test_run_batch_no_processors():
program = cg.EngineProgram('no-meow', 'no-meow', EngineContext(), result_type=ResultType.Batch)
resolver_list = [cirq.Points('cats', [1.0, 2.0]), cirq.Points('cats', [3.0, 4.0])]
with pytest.raises(ValueError, match='No processors specified'):
_ = program.run_batch(repetitions=1, params_list=resolver_list)
def test_run_batch_not_in_batch_mode():
program = cg.EngineProgram('no-meow', 'no-meow', EngineContext())
resolver_list = [cirq.Points('cats', [1.0, 2.0, 3.0]), cirq.Points('cats', [4.0, 5.0, 6.0])]
with pytest.raises(ValueError, match='Can only use run_batch'):
_ = program.run_batch(repetitions=1, processor_ids=['lazykitty'], params_list=resolver_list)
def test_run_in_batch_mode():
program = cg.EngineProgram('no-meow', 'no-meow', EngineContext(), result_type=ResultType.Batch)
with pytest.raises(ValueError, match='Please use run_batch'):
_ = program.run_sweep(
repetitions=1, processor_ids=['lazykitty'], params=cirq.Points('cats', [1.0, 2.0, 3.0])
)
@mock.patch('cirq_google.engine.engine_client.EngineClient.get_job_results')
@mock.patch('cirq_google.engine.engine_client.EngineClient.create_job')
def test_run_delegation(create_job, get_results):
create_job.return_value = (
'steve',
quantum.QuantumJob(
name='projects/a/programs/b/jobs/steve',
execution_status=quantum.ExecutionStatus(state=quantum.ExecutionStatus.State.SUCCESS),
),
)
get_results.return_value = quantum.QuantumResult(
result=util.pack_any(
Merge(
"""sweep_results: [{
repetitions: 4,
parameterized_results: [{
params: {
assignments: {
key: 'a'
value: 1
}
},
measurement_results: {
key: 'q'
qubit_measurement_results: [{
qubit: {
id: '1_1'
}
results: '\006'
}]
}
}]
}]
""",
v2.result_pb2.Result(),
)
)
)
program = cg.EngineProgram('a', 'b', EngineContext())
param_resolver = cirq.ParamResolver({})
results = program.run(
job_id='steve', repetitions=10, param_resolver=param_resolver, processor_ids=['mine']
)
assert results == cirq.ResultDict(
params=cirq.ParamResolver({'a': 1.0}),
measurements={'q': np.array([[False], [True], [True], [False]], dtype=bool)},
)
@mock.patch('cirq_google.engine.engine_client.EngineClient.list_jobs')
def test_list_jobs(list_jobs):
job1 = quantum.QuantumJob(name='projects/proj/programs/prog1/jobs/job1')
job2 = quantum.QuantumJob(name='projects/otherproj/programs/prog1/jobs/job2')
list_jobs.return_value = [job1, job2]
ctx = EngineContext()
result = cg.EngineProgram(project_id='proj', program_id='prog1', context=ctx).list_jobs()
list_jobs.assert_called_once_with(
'proj',
'prog1',
created_after=None,
created_before=None,
has_labels=None,
execution_states=None,
)
assert [(j.program_id, j.project_id, j.job_id, j.context, j._job) for j in result] == [
('prog1', 'proj', 'job1', ctx, job1),
('prog1', 'otherproj', 'job2', ctx, job2),
]
def test_engine():
program = cg.EngineProgram('a', 'b', EngineContext())
assert program.engine().project_id == 'a'
def test_get_job():
program = cg.EngineProgram('a', 'b', EngineContext())
assert program.get_job('c').job_id == 'c'
def test_create_time():
program = cg.EngineProgram(
'a',
'b',
EngineContext(),
_program=quantum.QuantumProgram(create_time=timestamp_pb2.Timestamp(seconds=1581515101)),
)
assert program.create_time() == datetime.datetime(
2020, 2, 12, 13, 45, 1, tzinfo=datetime.timezone.utc
)
@mock.patch('cirq_google.engine.engine_client.EngineClient.get_program')
def test_update_time(get_program):
program = cg.EngineProgram('a', 'b', EngineContext())
get_program.return_value = quantum.QuantumProgram(
update_time=timestamp_pb2.Timestamp(seconds=1581515101)
)
assert program.update_time() == datetime.datetime(
2020, 2, 12, 13, 45, 1, tzinfo=datetime.timezone.utc
)
get_program.assert_called_once_with('a', 'b', False)
@mock.patch('cirq_google.engine.engine_client.EngineClient.get_program')
def test_description(get_program):
program = cg.EngineProgram(
'a', 'b', EngineContext(), _program=quantum.QuantumProgram(description='hello')
)
assert program.description() == 'hello'
get_program.return_value = quantum.QuantumProgram(description='hello')
assert cg.EngineProgram('a', 'b', EngineContext()).description() == 'hello'
get_program.assert_called_once_with('a', 'b', False)
@mock.patch('cirq_google.engine.engine_client.EngineClient.set_program_description')
def test_set_description(set_program_description):
program = cg.EngineProgram('a', 'b', EngineContext())
set_program_description.return_value = quantum.QuantumProgram(description='world')
assert program.set_description('world').description() == 'world'
set_program_description.assert_called_with('a', 'b', 'world')
set_program_description.return_value = quantum.QuantumProgram(description='')
assert program.set_description('').description() == ''
set_program_description.assert_called_with('a', 'b', '')
def test_labels():
program = cg.EngineProgram(
'a', 'b', EngineContext(), _program=quantum.QuantumProgram(labels={'t': '1'})
)
assert program.labels() == {'t': '1'}
@mock.patch('cirq_google.engine.engine_client.EngineClient.set_program_labels')
def test_set_labels(set_program_labels):
program = cg.EngineProgram('a', 'b', EngineContext())
set_program_labels.return_value = quantum.QuantumProgram(labels={'a': '1', 'b': '1'})
assert program.set_labels({'a': '1', 'b': '1'}).labels() == {'a': '1', 'b': '1'}
set_program_labels.assert_called_with('a', 'b', {'a': '1', 'b': '1'})
set_program_labels.return_value = quantum.QuantumProgram()
assert program.set_labels({}).labels() == {}
set_program_labels.assert_called_with('a', 'b', {})
@mock.patch('cirq_google.engine.engine_client.EngineClient.add_program_labels')
def test_add_labels(add_program_labels):
program = cg.EngineProgram(
'a', 'b', EngineContext(), _program=quantum.QuantumProgram(labels={})
)
assert program.labels() == {}
add_program_labels.return_value = quantum.QuantumProgram(labels={'a': '1'})
assert program.add_labels({'a': '1'}).labels() == {'a': '1'}
add_program_labels.assert_called_with('a', 'b', {'a': '1'})
add_program_labels.return_value = quantum.QuantumProgram(labels={'a': '2', 'b': '1'})
assert program.add_labels({'a': '2', 'b': '1'}).labels() == {'a': '2', 'b': '1'}
add_program_labels.assert_called_with('a', 'b', {'a': '2', 'b': '1'})
@mock.patch('cirq_google.engine.engine_client.EngineClient.remove_program_labels')
def test_remove_labels(remove_program_labels):
program = cg.EngineProgram(
'a', 'b', EngineContext(), _program=quantum.QuantumProgram(labels={'a': '1', 'b': '1'})
)
assert program.labels() == {'a': '1', 'b': '1'}
remove_program_labels.return_value = quantum.QuantumProgram(labels={'b': '1'})
assert program.remove_labels(['a']).labels() == {'b': '1'}
remove_program_labels.assert_called_with('a', 'b', ['a'])
remove_program_labels.return_value = quantum.QuantumProgram(labels={})
assert program.remove_labels(['a', 'b', 'c']).labels() == {}
remove_program_labels.assert_called_with('a', 'b', ['a', 'b', 'c'])
@mock.patch('cirq_google.engine.engine_client.EngineClient.get_program')
def test_get_circuit_v1(get_program):
program = cg.EngineProgram('a', 'b', EngineContext())
get_program.return_value = quantum.QuantumProgram(code=util.pack_any(v1.program_pb2.Program()))
with pytest.raises(ValueError, match='v1 Program is not supported'):
program.get_circuit()
@mock.patch('cirq_google.engine.engine_client.EngineClient.get_program')
def test_get_circuit_v2(get_program):
circuit = cirq.Circuit(
cirq.X(cirq.GridQubit(5, 2)) ** 0.5, cirq.measure(cirq.GridQubit(5, 2), key='result')
)
program = cg.EngineProgram('a', 'b', EngineContext())
get_program.return_value = quantum.QuantumProgram(code=_PROGRAM_V2)
assert program.get_circuit() == circuit
get_program.assert_called_once_with('a', 'b', True)
@mock.patch('cirq_google.engine.engine_client.EngineClient.get_program')
def test_get_circuit_batch(get_program):
circuit = cirq.Circuit(
cirq.X(cirq.GridQubit(5, 2)) ** 0.5, cirq.measure(cirq.GridQubit(5, 2), key='result')
)
program = cg.EngineProgram('a', 'b', EngineContext())
get_program.return_value = quantum.QuantumProgram(code=_BATCH_PROGRAM_V2)
with pytest.raises(ValueError, match='A program number must be specified'):
program.get_circuit()
with pytest.raises(ValueError, match='Only 1 in the batch but index 1 was specified'):
program.get_circuit(1)
assert program.get_circuit(0) == circuit
get_program.assert_called_once_with('a', 'b', True)
@mock.patch('cirq_google.engine.engine_client.EngineClient.get_program')
def test_get_batch_size(get_program):
# Has to fetch from engine if not _program specified.
program = cg.EngineProgram('a', 'b', EngineContext(), result_type=ResultType.Batch)
get_program.return_value = quantum.QuantumProgram(code=_BATCH_PROGRAM_V2)
assert program.batch_size() == 1
# If _program specified, uses that value.
program = cg.EngineProgram(
'a',
'b',
EngineContext(),
_program=quantum.QuantumProgram(code=_BATCH_PROGRAM_V2),
result_type=ResultType.Batch,
)
assert program.batch_size() == 1
with pytest.raises(ValueError, match='ResultType.Program'):
program = cg.EngineProgram('a', 'b', EngineContext(), result_type=ResultType.Program)
_ = program.batch_size()
with pytest.raises(ValueError, match='cirq.google.api.v2.Program'):
get_program.return_value = quantum.QuantumProgram(code=_PROGRAM_V2)
program = cg.EngineProgram('a', 'b', EngineContext(), result_type=ResultType.Batch)
_ = program.batch_size()
@pytest.fixture(scope='session', autouse=True)
def mock_grpc_client():
with mock.patch(
'cirq_google.engine.engine_client.quantum.QuantumEngineServiceClient'
) as _fixture:
yield _fixture
@mock.patch('cirq_google.engine.engine_client.EngineClient.get_program')
def test_get_circuit_v2_unknown_gateset(get_program):
program = cg.EngineProgram('a', 'b', EngineContext())
get_program.return_value = quantum.QuantumProgram(
code=util.pack_any(
v2.program_pb2.Program(language=v2.program_pb2.Language(gate_set="BAD_GATESET"))
)
)
with pytest.raises(ValueError, match='BAD_GATESET'):
program.get_circuit()
@mock.patch('cirq_google.engine.engine_client.EngineClient.get_program')
def test_get_circuit_unsupported_program_type(get_program):
program = cg.EngineProgram('a', 'b', EngineContext())
get_program.return_value = quantum.QuantumProgram(
code=any_pb2.Any(type_url='type.googleapis.com/unknown.proto')
)
with pytest.raises(ValueError, match='unknown.proto'):
program.get_circuit()
@mock.patch('cirq_google.engine.engine_client.EngineClient.delete_program')
def test_delete(delete_program):
program = cg.EngineProgram('a', 'b', EngineContext())
program.delete()
delete_program.assert_called_with('a', 'b', delete_jobs=False)
program.delete(delete_jobs=True)
delete_program.assert_called_with('a', 'b', delete_jobs=True)
@mock.patch('cirq_google.engine.engine_client.EngineClient.delete_job')
def test_delete_jobs(delete_job):
program = cg.EngineProgram('a', 'b', EngineContext())
program.delete_job('c')
delete_job.assert_called_with('a', 'b', 'c')
def test_str():
program = cg.EngineProgram('my-proj', 'my-prog', EngineContext())
assert str(program) == 'EngineProgram(project_id=\'my-proj\', program_id=\'my-prog\')'
|
import React from 'react';
import styled from 'styled-components';
const Container = styled.section`
padding: 0 2rem;
h1 {
font-size: 2rem;
margin-bottom: 1rem;
}
p {
line-height: 1.5;
}
`;
const Services = styled.section`
display: grid;
grid-template-rows: 1fr 1fr;
grid-template-columns: 1fr 1fr;
grid-gap: 2rem;
margin: 2rem 0;
@media (min-width: 1024px) {
grid-template-rows: 1fr;
grid-template-columns: 1fr 1fr 1fr 1fr;
}
`;
const Service = styled.article`
display: flex;
flex-direction: column;
align-items: center;
div {
width: 10rem;
max-width: 35vw;
height: 100%;
img {
width: 100%;
height: 100%;
border-radius: 70% 50% / 70% 60%;
box-shadow: 0 10px 15px rgba(0, 0, 0, 0.22),
0 1px 8px 2px rgba(0, 0, 0, 0.22);
opacity: 0.75;
}
}
span {
opacity: 0.65;
font-size: 1.2rem;
}
> * + * {
margin-top: 1.5rem;
}
`;
const Massages = ({ title, body }) => {
return (
<Container>
<h1>{title}</h1>
<p dangerouslySetInnerHTML={{ __html: body }} />
<Services>
<Service>
<div>
<img src="/essential.jpg" />
</div>
<span>Oli Essenziali</span>
</Service>
<Service>
<div>
<img src="/ayurveda.jpg" />
</div>
<span>Ayurveda</span>
</Service>
<Service>
<div>
<img src="/sunset.jpg" />
</div>
<span>Linfodrenaggio</span>
</Service>
<Service>
<div>
<img src="/hot-stones.jpg" />
</div>
<span>Hot Stones</span>
</Service>
</Services>
</Container>
);
};
export default Massages;
|
#ifndef CONSOLEDISPLAY_H
#define CONSOLEDISPLAY_H
#include "Display.h"
class ConsoleDisplay : public Display {
public:
void show();
};
#endif
|
# import portality.models, workflows, cerif
from portality import models
from portality.gtrindexer import workflows
from portality.gtrindexer import cerif
from portality import settings
def project_handler(project, cerif_project):
proj = models.Project(**project.as_dict())
print "saving data from " + project.url()
proj.save()
if cerif_project is not None:
cproj = models.CerifProject(**cerif_project.as_dict())
print "saving data from " + cerif_project.url()
cproj.save()
def person_handler(person):
pers = models.Person(**person.as_dict())
print "saving data from " + person.url()
pers.save()
def organisation_handler(organisation):
org = models.Organisation(**organisation.as_dict())
print "saving data from " + organisation.url()
org.save()
def publication_handler(publication):
pub = models.Publication(**publication.as_dict())
print "saving data from " + publication.url()
pub.save()
def initialise_index():
mappings = settings.GTR_MAPPINGS
i = str(settings.ELASTIC_SEARCH_HOST).rstrip('/')
i += '/' + settings.GTR_INDEX
for key, mapping in mappings.iteritems():
im = i + '/' + key + '/_mapping'
exists = requests.get(im)
if exists.status_code != 200:
ri = requests.post(i)
r = requests.put(im, json.dumps(mapping))
print key, r.status_code
def indexgtr():
# ensure the index with the right mappings exists
initialise_index()
# use the crawler to crawl all of the gtr data
workflows.crawl("http://gtr.rcuk.ac.uk/", min_request_gap=0,
project_limit=None, project_callback=project_handler, pass_cerif_project=True,
person_limit=None, person_callback=person_handler,
organisation_limit=None, organisation_callback=organisation_handler,
publication_limit=None, publication_callback=publication_handler
)
# index the cerif classes
client = cerif.GtRCerif("http://gtr.rcuk.ac.uk/")
classes = client.cerif_classes()
for k, o in classes.iteritems():
c = models.CerifClass(**o)
print "saving cerif class " + k
c.save()
if __name__ == "__main__":
indexgtr()
|
/*
* BMKPolyline.h
* BMapKit
*
* Copyright 2011 Baidu Inc. All rights reserved.
*
*/
#import "BMKMultiPoint.h"
#import "BMKOverlay.h"
/// 此类用于定义一段折线
@interface BMKPolyline : BMKMultiPoint <BMKOverlay>
/**
*根据指定坐标点生成一段折线
*@param points 指定的直角坐标点数组
*@param count points数组中坐标点的个数
*@return 新生成的折线对象
*/
+ (BMKPolyline *)polylineWithPoints:(BMKMapPoint *)points count:(NSUInteger)count;
/**
*根据指定坐标点生成一段折线
*@param coords 指定的经纬度坐标点数组
*@param count coords数组中坐标点的个数
*@return 新生成的折线对象
*/
+ (BMKPolyline *)polylineWithCoordinates:(CLLocationCoordinate2D *)coords count:(NSUInteger)count;
/**
*重新设置折线坐标点
*@param points 指定的直角坐标点数组
*@param count points数组中坐标点的个数
*@return 是否设置成功
*/
- (BOOL)setPolylineWithPoints:(BMKMapPoint *)points count:(NSInteger) count;
/**
*重新设置折线坐标点
*@param coords 指定的经纬度坐标点数组
*@param count coords数组中坐标点的个数
*@return 是否设置成功
*/
- (BOOL)setPolylineWithCoordinates:(CLLocationCoordinate2D *)coords count:(NSInteger) count;
#pragma mark - 以下方法和属性只适用于分段纹理绘制和分段颜色绘制
///纹理索引数组(颜色索引数组)
@property (nonatomic, strong) NSArray<NSNumber *> *textureIndex;
/**
*分段纹理绘制/分段颜色绘制,根据指定坐标点生成一段折线
*
*分段纹理绘制:其对应的BMKPolylineView必须使用 - (BOOL)loadStrokeTextureImages:(NSArray <UIImage *>*)textureImages; 加载纹理图片;否则使用默认的灰色纹理绘制
*分段颜色绘制:其对应的BMKPolylineView必须设置colors属性
*
*@param points 指定的直角坐标点数组
*@param count points数组中坐标点的个数
*@param textureIndex 纹理索引数组(颜色索引数组),成员为NSNumber,且为非负数,负数按0处理
*@return 新生成的折线对象
*/
+ (BMKPolyline *)polylineWithPoints:(BMKMapPoint *)points count:(NSUInteger)count textureIndex:(NSArray<NSNumber *> *) textureIndex;
/**
*根据指定坐标点生成一段折线
*
*分段纹理绘制:其对应的BMKPolylineView必须使用 - (BOOL)loadStrokeTextureImages:(NSArray <UIImage *>*)textureImages; 加载纹理图片;否则使用默认的灰色纹理绘制
*分段颜色绘制:其对应的BMKPolylineView必须设置colors属性
*
*@param coords 指定的经纬度坐标点数组
*@param count coords数组中坐标点的个数
*@param textureIndex 纹理索引数组(颜色索引数组),成员为NSNumber,且为非负数,负数按0处理
*@return 新生成的折线对象
*/
+ (BMKPolyline *)polylineWithCoordinates:(CLLocationCoordinate2D *)coords count:(NSUInteger)count textureIndex:(NSArray<NSNumber *> *) textureIndex;
/**
*重新设置折线坐标点 和 纹理索引
*@param points 指定的直角坐标点数组
*@param count points数组中坐标点的个数
*@param textureIndex 纹理索引数组(颜色索引数组),成员为NSNumber,且为非负数,负数按0处理
*@return 是否设置成功
*/
- (BOOL)setPolylineWithPoints:(BMKMapPoint *)points count:(NSInteger) count textureIndex:(NSArray<NSNumber *> *) textureIndex;
/**
*重新设置折线坐标点
*@param coords 指定的经纬度坐标点数组
*@param count coords数组中坐标点的个数
*@param textureIndex 纹理索引数组(颜色索引数组),成员为NSNumber,且为非负数,负数按0处理
*@return 是否设置成功
*/
- (BOOL)setPolylineWithCoordinates:(CLLocationCoordinate2D *)coords count:(NSInteger) count textureIndex:(NSArray<NSNumber *> *) textureIndex;
@end
|
import MQTTClient from './MQTTClient';
import Constants from '../constants/';
import store from '../store';
import * as action from '../action/';
import { getUrlFor } from '../reducers';
import { subscribe } from 'redux-subscriber';
const mqttEvents = Constants.MQTTEvents;
export const bindMQTTEvents = url => {
let client = MQTTClient(url);
client.bind(mqttEvents.CONNECTION_ESTABLISHED, message => {
console.log(message.toString());
});
client.bind(mqttEvents.SENSOR_ADDED, params => {
store.dispatch(
action.fetchSensor(
getUrlFor(store.getState(), 'sensors'),
params.sensorID,
params.instanceID
)
);
});
client.bind(mqttEvents.SENSOR_REMOVED, params => {
store.dispatch(action.removeSensor(params.sensorID, params.instanceID));
});
client.bind(mqttEvents.SENSOR_STATUS, sensor => {
store.dispatch(action.addSensor(sensor));
});
client.bind(mqttEvents.SENSOR_UPDATED, params => {
store.dispatch(
action.updateSensorValue(
params.sensorID,
params.instanceID,
params.resourceID,
params.value
)
);
});
client.bind(mqttEvents.REQUEST_RESPONSE_RECEIVED, params => {
store.dispatch(action.handleRequestResponse(params));
});
client.bind(mqttEvents.REQUEST_SENT, params => {
store.dispatch(action.sendRequest(params));
});
client.bind(mqttEvents.NEW_MAPPING_VALUE, params => {
store.dispatch(
action.newMappingValue(params.mappingType, params.mappingID, params.value)
);
});
// eslint-disable-next-line no-unused-vars
const unsubscribeFromMQTTUrl = subscribe('settings.URLs.MQTT', () => {
console.log('MQTT URL changed');
client.disconnect();
client = MQTTClient(getUrlFor(store.getState(), 'mqtt'));
});
};
|
"""
"""
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from sympy import *
from sympy.matrices import Matrix,eye
from moro.transformations import *
from moro.util import *
__all__ = ["plot_euler", "draw_uv", "draw_uvw"]
def plot_euler(phi,theta,psi,seq="zxz"):
fig = plt.figure()
ax = fig.add_subplot(111, projection="3d")
if seq in ("zxz","ZXZ","313",313):
R1 = rotz(phi)
R2 = R1*rotx(theta)
R3 = R2*rotz(psi)
elif seq in ("zyz","ZYZ","323",323):
R1 = rotz(phi)
R2 = R1*roty(theta)
R3 = R2*rotz(psi)
else:
R1 = R2 = R3 = eye(4)
draw_uvw(eye(4), ax, sz=6, alpha=0.4)
draw_uvw(R1, ax, sz=6, alpha=0.6)
draw_uvw(R2, ax, sz=6, alpha=0.8)
draw_uvw(R3, ax, sz=6, alpha=1.0)
ax.set_xlim([-1,1])
ax.set_ylim([-1,1])
ax.set_zlim([-1,1])
ax.set_aspect("equal")
ax.axis('off')
def draw_uvw(H,ax,color=("r","g","b"),sz=1,alpha=1.0):
u = H[:3,0]
v = H[:3,1]
w = H[:3,2]
if ishtm(H):
o = H[:3,3]
else:
o = Matrix([0,0,0])
L = sz/5
if isinstance(color,str):
colorl = (color,color,color)
else:
colorl = color
ax.quiver(o[0],o[1],o[2],u[0],u[1],u[2], color=colorl[0],
length=L, arrow_length_ratio=0.2, alpha=alpha)
ax.quiver(o[0],o[1],o[2],v[0],v[1],v[2], color=colorl[1],
length=L, arrow_length_ratio=0.2, alpha=alpha)
ax.quiver(o[0],o[1],o[2],w[0],w[1],w[2], color=colorl[2],
length=L, arrow_length_ratio=0.2, alpha=alpha)
def draw_xyz(*args, **kwargs):
return draw_uvw(*args, **kwargs)
def draw_frame(*args, **kwargs):
return draw_uvw(*args, **kwargs)
def draw_uv(H, ax, name="S0", color=("r","g"), sz=1):
tpos = H*Matrix([1,1,0,1])
H = sympy2float(H)
u = H[:3,0]
v = H[:3,1]
w = H[:3,2]
if ishtm(H):
o = H[:3,3]
else:
o = Matrix([0,0,0])
L = sz/5
if isinstance(color,str):
colorl = (color,color)
else:
colorl = color
# ~ print(o, u)
ax.arrow(o[0],o[1],u[0],u[1], color=colorl[0])
ax.arrow(o[0],o[1],v[0],v[1], color=colorl[1])
ax.text(tpos[0], tpos[1], "{"+name+"}", fontsize=8)
ax.set_aspect("equal")
if __name__=="__main__":
plot_euler(pi/3, pi/3, 0.5)
plt.show()
# ~ fig = plt.figure()
# ~ ax = fig.add_subplot(111)
# ~ H1 = eye(4)*htmrot(pi/3)
# ~ H2 = H1*htmtra([10,5,0])
# ~ H3 = H2*htmtra([-4,5,0])*htmrot(pi/4)
# ~ draw_uv(H1, ax, "A", "b")
# ~ draw_uv(H2, ax, "B")
# ~ draw_uv(H3, ax, "C")
# ~ plt.grid(ls="--")
# ~ plt.axis([-20,20,-20,20])
# ~ plt.show()
|
import os
from django.contrib import admin
from django.utils.safestring import mark_safe
from frontend.apps.post.models import Post, Extractor
class PostAdmin(admin.ModelAdmin):
list_display = ('id', 'getSubject', 'upvotes', 'post_age', 'comments', 'is_read', 'is_deleted')
# fieldsets basically organises the edit view with panels.
# the first item is panel name and second item in the tuple is a dict with key as field.
fieldsets = (
('Primary', { # Panel-1
'fields': ('subject', 'hacker_news_url')
}),
('Analytics', { # Panel-2
# Making tuple allows both fields come inline while displaying in frontend.
'fields': (('post_age', 'comments', 'upvotes', 'is_read', 'is_deleted'), )
}),
('Timestamp', { # Panel-3
'fields': (('created_at', 'modified_at'), )
})
)
# Order by created_at desc
ordering = ['-post_age', 'id']
search_fields = ['subject']
def getSubject(self, instance):
return mark_safe(f'<a href="{instance.hacker_news_url}" alt="{instance.subject}"> {instance.subject} </a>')
getSubject.short_description = 'Subject'
class ExtractorAdmin(admin.ModelAdmin):
list_display = ('id', 'getFilePath', 'pagination', 'is_parsed', 'created_at')
fieldsets = (
('Primary', { # Panel-1
'fields': ('file_path', 'pagination', 'created_at')
}),
)
ordering = ['-created_at']
search_fields = ['file_path']
def getFilePath(self, instance):
basename = os.path.basename(instance.file_path)
return mark_safe(f'<a href="/post/html/{basename}" alt="{instance.file_path}"> {instance.file_path} </a>')
getFilePath.short_description = "Html Path"
admin.site.register(Post, PostAdmin)
admin.site.register(Extractor, ExtractorAdmin)
|
deepmacDetailCallback("40d855190000/36",[{"a":"Rua Alencar Araripe,1440 São Paulo SP BR 04253-000","o":"Spider Tecnologia Ind. e Com Ltda","d":"2013-10-13","t":"add","s":"ieee","c":"BR"}]);
|
import unittest
from pyknon import notation
class TestNotation(unittest.TestCase):
def test_parse_accidental(self):
acc1 = notation.parse_accidental("###")
acc2 = notation.parse_accidental("bbb")
acc3 = notation.parse_accidental("")
self.assertEqual(acc1, 3)
self.assertEqual(acc2, -3)
self.assertEqual(acc3, 0)
def test_parse_octave(self):
oct1 = notation.parse_octave("'")
oct2 = notation.parse_octave("''")
oct3 = notation.parse_octave("")
oct4 = notation.parse_octave(",")
oct5 = notation.parse_octave(",,")
self.assertEqual(oct1, 5)
self.assertEqual(oct2, 6)
self.assertEqual(oct3, 5)
self.assertEqual(oct4, 4)
self.assertEqual(oct5, 3)
def test_parse_dur(self):
dur1 = notation.parse_dur("8")
dur2 = notation.parse_dur("4")
dur3 = notation.parse_dur("4", ".")
dur4 = notation.parse_dur("4", "..")
dur5 = notation.parse_dur("2")
self.assertEqual(dur1, 0.125)
self.assertEqual(dur2, 0.25)
self.assertEqual(dur3, 0.375)
self.assertEqual(dur4, 0.4375)
self.assertEqual(dur5, 0.5)
def test_parse_note(self):
note1 = notation.parse_note("C#'")
note2 = notation.parse_note("C2")
note3 = notation.parse_note("Cb8,")
note4 = notation.parse_note("B#16''")
self.assertEqual(note1, (1, 5, 0.25, 120))
self.assertEqual(note2, (0, 5, 0.5, 120))
self.assertEqual(note3, (11, 4, 0.125, 120))
self.assertEqual(note4, (0, 6, 0.0625, 120))
def test_parse_notes(self):
notes1 = notation.parse_notes(["C", "D", "E"])
notes2 = notation.parse_notes(["Cb4'", "D#8,", "E#16,"])
list_notes1 = [(0, 5, 0.25, 120), (2, 5, 0.25, 120), (4, 5, 0.25, 120)]
list_notes2 = [(11, 5, 0.25, 120), (3, 4, 0.125, 120), (5, 4, 0.0625, 120)]
self.assertEqual(notes1, list_notes1)
self.assertEqual(notes2, list_notes2)
def test_parse_notes_dur_dot(self):
notes1 = notation.parse_notes(["C4.''", "D4..", "E8."])
list_notes1 = [(0, 6, 0.375, 120), (2, 6, 0.4375, 120), (4, 6, 0.1875, 120)]
self.assertEqual(notes1, list_notes1)
|
/*=========================================================================
*
* Copyright Insight Software Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
#ifndef __itkImageTransformHelper_h
#define __itkImageTransformHelper_h
#include "itkConceptChecking.h"
#include "itkImageBase.h"
#include "itkMatrix.h"
#include "vnl/vnl_math.h"
#include "itkImageBase.h"
namespace itk
{
/** \class ImageTransformHelper
* \brief Fast index/physical index computation
* \ingroup ITKCommon
*/
template< unsigned int NImageDimension, unsigned int R, unsigned int C >
class ImageTransformHelper
{
public:
typedef ImageBase< NImageDimension > ImageType;
typedef typename ImageType::IndexType IndexType;
typedef typename ImageType::SpacingType SpacingType;
typedef Matrix< double, NImageDimension, NImageDimension > MatrixType;
typedef typename ImageType::PointType OriginType;
typedef Point< double, NImageDimension > DoublePoint;
typedef Point< float, NImageDimension > FloatPoint;
typedef Concept::Detail::UniqueType_bool< false > UniqueTypeBoolFalse;
typedef Concept::Detail::UniqueType_bool< true > UniqueTypeBoolTrue;
//
// Methods with DoublePoint
//
// IndexToPhysicalPoint with full matrix
//
//
inline static void TransformIndexToPhysicalPoint(
const MatrixType & matrix, const OriginType & origin,
const IndexType & index, DoublePoint & point)
{
ImageTransformHelper< NImageDimension, R, C >::
TransformIndexToPhysicalPointRow(
matrix, origin,
index, point,
Concept::Detail::UniqueType_bool< ( R + 1 == 0 ) >() );
}
inline static void TransformIndexToPhysicalPointRow(
const MatrixType & matrix, const OriginType & origin,
const IndexType & index, DoublePoint & point,
const UniqueTypeBoolFalse &)
{
point[R] = origin[R];
// Start column
ImageTransformHelper< NImageDimension, R, C >
::TransformIndexToPhysicalPointCol(
matrix,
index, point,
Concept::Detail::UniqueType_bool< ( C + 1 == 0 ) >() );
// Do Next Row
ImageTransformHelper< NImageDimension, R - 1, C >
::TransformIndexToPhysicalPointRow(
matrix, origin,
index, point,
Concept::Detail::UniqueType_bool< ( R == 0 ) >() );
}
inline static void TransformIndexToPhysicalPointRow(
const MatrixType &, const OriginType &,
const IndexType &, DoublePoint &,
const UniqueTypeBoolTrue &)
{
// Do last row
}
inline static void TransformIndexToPhysicalPointCol(
const MatrixType & matrix,
const IndexType & index, DoublePoint & point,
const UniqueTypeBoolFalse &)
{
point[R] = point[R] + matrix[R][C] * index[C];
// Do next dimension
ImageTransformHelper< NImageDimension, R, C - 1 >
::TransformIndexToPhysicalPointCol(
matrix,
index, point,
Concept::Detail::UniqueType_bool< ( C == 0 ) >() );
}
inline static void TransformIndexToPhysicalPointCol(
const MatrixType &,
const IndexType &, DoublePoint &,
const UniqueTypeBoolTrue &)
{}
// PhysicalPointToIndex with full matrix
//
//
inline static void TransformPhysicalPointToIndex(
const MatrixType & matrix, const OriginType & origin,
const DoublePoint & point, IndexType & index)
{
DoublePoint rindex;
ImageTransformHelper< NImageDimension, R, C >::
TransformPhysicalPointToIndexRow(
matrix, origin,
point, rindex, index,
Concept::Detail::UniqueType_bool< ( R + 1 == 0 ) >() );
}
inline static void TransformPhysicalPointToIndexRow(
const MatrixType & matrix, const OriginType & origin,
const DoublePoint & point, DoublePoint & rindex, IndexType & index,
const UniqueTypeBoolFalse &)
{
rindex[R] = 0.0;
// Start column
ImageTransformHelper< NImageDimension, R, C >
::TransformPhysicalPointToIndexCol(
matrix, origin,
point, rindex, index,
Concept::Detail::UniqueType_bool< ( C + 1 == 0 ) >() );
// Do next row
ImageTransformHelper< NImageDimension, R - 1, C >
::TransformPhysicalPointToIndexRow(
matrix, origin,
point, rindex, index,
Concept::Detail::UniqueType_bool< ( R == 0 ) >() );
}
inline static void TransformPhysicalPointToIndexRow(
const MatrixType &, const OriginType &,
const DoublePoint &, DoublePoint &, IndexType &,
const UniqueTypeBoolTrue &)
{
// Do last row
}
inline static void TransformPhysicalPointToIndexCol(
const MatrixType & matrix, const OriginType & origin,
const DoublePoint & point, DoublePoint & rindex, IndexType & index,
const UniqueTypeBoolFalse &)
{
rindex[R] = rindex[R] + matrix[R][C] * ( point[C] - origin[C] );
// Do next dimension
ImageTransformHelper< NImageDimension, R, C - 1 >
::TransformPhysicalPointToIndexCol(
matrix, origin,
point, rindex, index,
Concept::Detail::UniqueType_bool< ( C == 0 ) >() );
}
inline static void TransformPhysicalPointToIndexCol(
const MatrixType &, const OriginType &,
const DoublePoint &, DoublePoint & rindex, IndexType & index,
const UniqueTypeBoolTrue &)
{
index[R] = Math::RoundHalfIntegerUp< IndexValueType >(rindex[R]);
}
//
// Methods with FloatPoint
//
// IndexToPhysicalPoint with full matrix
//
//
inline static void TransformIndexToPhysicalPoint(
const MatrixType & matrix, const OriginType & origin,
const IndexType & index, FloatPoint & point)
{
ImageTransformHelper< NImageDimension, R, C >::
TransformIndexToPhysicalPointRow(
matrix, origin,
index, point,
Concept::Detail::UniqueType_bool< ( R + 1 == 0 ) >() );
}
inline static void TransformIndexToPhysicalPointRow(
const MatrixType & matrix, const OriginType & origin,
const IndexType & index, FloatPoint & point,
const UniqueTypeBoolFalse &)
{
point[R] = origin[R];
// Start column
ImageTransformHelper< NImageDimension, R, C >
::TransformIndexToPhysicalPointCol(
matrix,
index, point,
Concept::Detail::UniqueType_bool< ( C + 1 == 0 ) >() );
// Do Next Row
ImageTransformHelper< NImageDimension, R - 1, C >
::TransformIndexToPhysicalPointRow(
matrix, origin,
index, point,
Concept::Detail::UniqueType_bool< ( R == 0 ) >() );
}
inline static void TransformIndexToPhysicalPointRow(
const MatrixType &, const OriginType &,
const IndexType &, FloatPoint &,
const UniqueTypeBoolTrue &)
{
// Do last row
}
inline static void TransformIndexToPhysicalPointCol(
const MatrixType & matrix,
const IndexType & index, FloatPoint & point,
const UniqueTypeBoolFalse &)
{
point[R] = point[R] + matrix[R][C] * index[C];
// Do next dimension
ImageTransformHelper< NImageDimension, R, C - 1 >
::TransformIndexToPhysicalPointCol(
matrix,
index, point,
Concept::Detail::UniqueType_bool< ( C == 0 ) >() );
}
inline static void TransformIndexToPhysicalPointCol(
const MatrixType &,
const IndexType &, FloatPoint &,
const UniqueTypeBoolTrue &)
{}
// PhysicalPointToIndex with full matrix
//
//
inline static void TransformPhysicalPointToIndex(
const MatrixType & matrix, const OriginType & origin,
const FloatPoint & point, IndexType & index)
{
FloatPoint rindex;
ImageTransformHelper< NImageDimension, R, C >::
TransformPhysicalPointToIndexRow(
matrix, origin,
point, rindex, index,
Concept::Detail::UniqueType_bool< ( R + 1 == 0 ) >() );
}
inline static void TransformPhysicalPointToIndexRow(
const MatrixType & matrix, const OriginType & origin,
const FloatPoint & point, FloatPoint & rindex, IndexType & index,
const UniqueTypeBoolFalse &)
{
rindex[R] = 0.0;
// Start column
ImageTransformHelper< NImageDimension, R, C >
::TransformPhysicalPointToIndexCol(
matrix, origin,
point, rindex, index,
Concept::Detail::UniqueType_bool< ( C + 1 == 0 ) >() );
// Do next row
ImageTransformHelper< NImageDimension, R - 1, C >
::TransformPhysicalPointToIndexRow(
matrix, origin,
point, rindex, index,
Concept::Detail::UniqueType_bool< ( R == 0 ) >() );
}
inline static void TransformPhysicalPointToIndexRow(
const MatrixType &, const OriginType &,
const FloatPoint &, FloatPoint &, IndexType &,
const UniqueTypeBoolTrue &)
{
// Do last row
}
inline static void TransformPhysicalPointToIndexCol(
const MatrixType & matrix, const OriginType & origin,
const FloatPoint & point, FloatPoint & rindex, IndexType & index,
const UniqueTypeBoolFalse &)
{
rindex[R] = rindex[R] + matrix[R][C] * ( point[C] - origin[C] );
// Do next dimension
ImageTransformHelper< NImageDimension, R, C - 1 >
::TransformPhysicalPointToIndexCol(
matrix, origin,
point, rindex, index,
Concept::Detail::UniqueType_bool< ( C == 0 ) >() );
}
inline static void TransformPhysicalPointToIndexCol(
const MatrixType &, const OriginType &,
const FloatPoint &, FloatPoint & rindex, IndexType & index,
const UniqueTypeBoolTrue &)
{
index[R] = Math::RoundHalfIntegerUp< IndexValueType >(rindex[R]);
}
};
} // end namespace itk
#endif
|
import './styles.scss';
import React from 'react';
import InputWithButton from 'widgets/InputWithButton';
const ComingSoon = () => (
<div className="coming-soon-container">
<div className="content">
<div>
<p>Something Is</p>
<div className="soon">COMING SOON</div>
<p>We will be celebrating the launch of our new site very soon!</p>
</div>
<div>
<InputWithButton placeholder="Enter your email address" button={window.innerWidth >= 991 ? 'Notice me' : 'send'} />
</div>
</div>
</div>
);
export default ComingSoon;
|
// @flow
import type { EdgeLobby } from 'edge-core-js'
import { type Reducer } from 'redux'
import type { Action } from '../../modules/ReduxTypes.js'
export type EdgeLoginState = {
lobby: EdgeLobby | null,
error: Error | null,
isProcessing: boolean
}
const initialState = {
lobby: null,
error: null,
isProcessing: false
}
export const edgeLogin: Reducer<EdgeLoginState, Action> = (state = initialState, action: Action) => {
switch (action.type) {
case 'PROCESS_EDGE_LOGIN': {
return {
...state,
isProcessing: true
}
}
case 'INVALIDATE_EDGE_LOBBY': {
return {
...state,
lobby: null,
isProcessing: false
}
}
case 'SET_LOBBY_ERROR': {
return {
...state,
lobby: null,
error: action.data,
isProcessing: false
}
}
case 'SAVE_EDGE_LOBBY': {
return {
...state,
lobby: action.data,
error: null,
isProcessing: false
}
}
default:
return state
}
}
|
from balebot.models.messages.template_response_message import TemplateResponseMessage
import re
from balebot.filters.filter import Filter
class TemplateResponseFilter(Filter):
def __init__(self, keywords=None, pattern=None, validator=None, include_commands=True):
super(TemplateResponseFilter, self).__init__(validator)
self.keywords = []
if isinstance(keywords, list):
self.keywords += keywords
elif isinstance(keywords, str):
self.keywords.append(keywords)
self.pattern = pattern
self.validator = validator if callable(validator) else None
self.include_commands = include_commands
def match(self, message):
if isinstance(message, TemplateResponseMessage):
text = message.text
if not self.include_commands:
if text.startswith("/"):
return False
if not self.pattern and not self.keywords and not self.validator:
return True
if self.find_keywords(text):
return True
elif self.find_pattern(text):
return True
elif self.validate(text):
return True
else:
return False
def find_keywords(self, text):
for keyword in self.keywords:
if keyword:
if text.find(keyword) != -1:
return True
return False
def find_pattern(self, text):
if self.pattern:
return re.search(self.pattern, text)
else:
return False
|
import logging
import random
import time
from typing import Dict, Tuple
from tor.helpers.flair import check_promotion
import beeline
from blossom_wrapper import BlossomStatus
from praw.models import Comment, Message, Redditor, Submission
from tor.validation.formatting_validation import (
check_for_formatting_issues,
get_formatting_issue_message,
)
from tor.core.config import Config
from tor.core.helpers import get_wiki_page, remove_if_required, send_to_modchat
from tor.validation.transcription_validation import get_transcription
from tor.helpers.flair import flair, set_user_flair
from tor.strings import translation
i18n = translation()
log = logging.getLogger(__name__)
MODCHAT_EMOTES = [
":badger:",
":beers:",
":catta-tappa:",
":confetti_ball:",
":coolio:",
":derp:",
":fb-like:",
":fidget-spinner:",
":gold:",
":heartpulse:",
":lenny1::lenny2:",
":tada:",
":partyblob:",
":partylexi:",
":party_parrot:",
":trophy:",
":upvote:",
":+1:",
]
@beeline.traced(name="process_coc")
def process_coc(
username: str, context: str, blossom_submission: Dict, cfg: Config
) -> Tuple:
"""
Process the acceptation of the CoC by the specified user.
:param username: The name of the user accepting the CoC
:param context: The context of the reply, to use as a link
:param blossom_submission: The corresponding Submission in Blossom
:param cfg: Config of tor
"""
user_response = cfg.blossom.get_user(username=username)
if user_response.status == BlossomStatus.ok:
# The status codes of accepting the CoC are not checked because they are already
# caught by getting the user.
response = cfg.blossom.accept_coc(username=username)
new_acceptance = response.status == BlossomStatus.ok
if new_acceptance:
emote = random.choice(MODCHAT_EMOTES)
user_url = i18n["urls"]["reddit_url"].format(f"/u/{username}")
post_url = i18n["urls"]["reddit_url"].format(context)
send_to_modchat(
f"<{user_url}|u/{username}> has just "
f"<{post_url}|accepted the CoC!> {emote}",
cfg,
channel="new_volunteers",
)
return process_claim(
username, blossom_submission, cfg, first_time=new_acceptance
)
elif user_response.status == BlossomStatus.not_found:
cfg.blossom.create_user(username=username)
return (
i18n["responses"]["general"]["coc_not_accepted"].format(
get_wiki_page("codeofconduct", cfg)
),
None,
)
else:
return process_claim(username, blossom_submission, cfg)
@beeline.traced(name="process_claim")
def process_claim(
username: str, blossom_submission: Dict, cfg: Config, first_time=False
) -> Tuple:
"""
Process a claim request.
This function sends a reply depending on the response from Blossom and
creates an user when this is the first time a user uses the bot.
:param username: Name of the user claiming the submission
:param blossom_submission: The relevant submission in Blossom
:param cfg: Config of tor
:param first_time: Whether this is the first time a user claims something
"""
coc_not_accepted = i18n["responses"]["general"]["coc_not_accepted"]
response = cfg.blossom.claim(
submission_id=blossom_submission["id"], username=username
)
return_flair = None
if response.status == BlossomStatus.ok:
# A random tip to append to the response
random_tip = i18n["tips"]["message"].format(
tip_message=random.choice(i18n["tips"]["collection"])
)
message = (
i18n["responses"]["claim"][
"first_claim_success" if first_time else "success"
]
+ "\n\n"
+ random_tip
)
return_flair = flair.in_progress
log.info(
f'Claim on Submission {blossom_submission["tor_url"]} by {username} successful.'
)
elif response.status == BlossomStatus.coc_not_accepted:
message = coc_not_accepted.format(get_wiki_page("codeofconduct", cfg))
elif response.status == BlossomStatus.not_found:
message = coc_not_accepted.format(get_wiki_page("codeofconduct", cfg))
cfg.blossom.create_user(username=username)
elif response.status == BlossomStatus.blacklisted:
message = i18n["responses"]["general"]["blacklisted"]
elif response.status == BlossomStatus.already_claimed:
claimed_by = response.data["username"]
if claimed_by == username:
# This user already got the submission
message = i18n["responses"]["claim"]["already_claimed_by_self"]
else:
# The submission was claimed by someone else
message = i18n["responses"]["claim"]["already_claimed_by_someone"].format(
claimed_by=claimed_by
)
elif response.status == BlossomStatus.too_many_claims:
claimed_links = [submission["tor_url"] for submission in response.data]
message = i18n["responses"]["claim"]["too_many_claims"].format(
links="\n".join(f"- {link}" for link in claimed_links),
)
else:
message = i18n["responses"]["general"]["oops"]
return message, return_flair
@beeline.traced(name="process_done")
def process_done(
user: Redditor,
blossom_submission: Dict,
comment: Comment,
cfg: Config,
override=False,
alt_text_trigger=False,
) -> Tuple:
"""
Handles comments where the user claims to have completed a post.
This function sends a reply to the user depending on the responses received
from Blossom.
:param user: The user claiming his transcription is done
:param blossom_submission: The relevant submission in Blossom
:param comment: The comment of the user, used to retrieve the user's flair
:param cfg: the global config object.
:param override: whether the validation check should be skipped
:param alt_text_trigger: whether there is an alternative to "done" that has
triggered this function.
"""
return_flair = None
done_messages = i18n["responses"]["done"]
# This is explicitly missing the format call that adds the code of
# conduct text because if we populate it here, we will fetch the wiki
# page on _every single `done`_ and that's just silly. Only populate
# it if it's necessary.
coc_not_accepted = i18n["responses"]["general"]["coc_not_accepted"]
blossom_user = cfg.blossom.get_user(username=user.name)
if blossom_user.status != BlossomStatus.ok:
# If we don't know who the volunteer is, then we don't have a record of
# them and they need to go through the code of conduct process.
return (
coc_not_accepted.format(get_wiki_page("codeofconduct", cfg)),
return_flair,
)
if not blossom_user.data["accepted_coc"]:
# If the volunteer in question hasn't accepted the code of conduct,
# eject early and return. Although the `create_transcription` endpoint
# returns a code of conduct check, we only hit it when we create a
# transcription, which requires that they wrote something. If a volunteer
# just writes `done` without putting a transcription down, it will hit
# this edge case.
return (
coc_not_accepted.format(get_wiki_page("codeofconduct", cfg)),
return_flair,
)
transcription, is_visible = get_transcription(blossom_submission["url"], user, cfg)
message = done_messages["cannot_find_transcript"] # default message
if not transcription:
# When the user replies `done` quickly after posting the transcription,
# it might not be available on Reddit yet. Wait a bit and try again.
time.sleep(1)
transcription, is_visible = get_transcription(
blossom_submission["url"], user, cfg
)
if transcription and not override:
# Try to detect common formatting errors
formatting_errors = check_for_formatting_issues(transcription.body)
if len(formatting_errors) > 0:
# Formatting issues found. Reject the `done` and ask the
# volunteer to fix them.
issues = ", ".join([error.value for error in formatting_errors])
# TODO: Re-evaluate if this is necessary
# This is more of a temporary thing to see how the
# volunteers react to the bot.
send_to_modchat(
i18n["mod"]["formatting_issues"].format(
author=user.name,
issues=issues,
link=f"https://reddit.com{comment.context}",
),
cfg,
"formatting-issues",
)
message = get_formatting_issue_message(formatting_errors)
return message, return_flair
if transcription:
cfg.blossom.create_transcription(
transcription.id,
transcription.body,
i18n["urls"]["reddit_url"].format(str(transcription.permalink)),
transcription.author.name,
blossom_submission["id"],
not is_visible,
)
if transcription or override:
# because we can enter this state with or without a transcription, it
# makes sense to have this as a separate block.
done_response = cfg.blossom.done(blossom_submission["id"], user.name, override)
# Note that both the not_found and coc_not_accepted status are already
# caught in the previous lines of code, hence these are not checked again.
if done_response.status == BlossomStatus.ok:
return_flair = flair.completed
set_user_flair(user, comment, cfg)
log.info(
f'Done on Submission {blossom_submission["tor_url"]} by {user.name}'
f" successful."
)
message = done_messages["completed_transcript"]
transcription_count = blossom_user["gamma"] + 1
is_promoted = check_promotion(transcription_count)
if is_promoted is not None:
alt_promotion_txt = done_messages["promotion_text"][str(is_promoted)]
message = f"{message}\n\n{alt_promotion_txt}"
if alt_text_trigger:
message = f"I think you meant `done`, so here we go!\n\n{message}"
elif done_response.status == BlossomStatus.already_completed:
message = done_messages["already_completed"]
elif done_response.status == BlossomStatus.missing_prerequisite:
message = done_messages["not_claimed_by_user"]
elif done_response.status == BlossomStatus.blacklisted:
message = i18n["responses"]["general"]["blacklisted"]
return message, return_flair
@beeline.traced(name="process_unclaim")
def process_unclaim(
username: str, blossom_submission: Dict, submission: Submission, cfg: Config
) -> Tuple:
"""
Process an unclaim request.
Note that this function also checks whether a post should be removed and
does so when required.
:param username: The name of the user unclaiming the submission
:param blossom_submission: The relevant Submission of Blossom
:param submission: The relevant Submission in Reddit
:param cfg: Config of tor
"""
response = cfg.blossom.unclaim(
submission_id=blossom_submission["id"], username=username
)
return_flair = None
unclaim_messages = i18n["responses"]["unclaim"]
if response.status == BlossomStatus.ok:
message = unclaim_messages["success"]
return_flair = flair.unclaimed
removed, reported = remove_if_required(
submission, blossom_submission["id"], cfg
)
if removed:
# Select the message based on whether the post was reported or not.
message = unclaim_messages[
"success_with_report" if reported else "success_without_report"
]
elif response.status == BlossomStatus.not_found:
message = i18n["responses"]["general"]["coc_not_accepted"].format(
get_wiki_page("codeofconduct", cfg)
)
cfg.blossom.create_user(username)
elif response.status == BlossomStatus.other_user:
message = unclaim_messages["claimed_other_user"]
elif response.status == BlossomStatus.already_completed:
message = unclaim_messages["post_already_completed"]
elif response.status == BlossomStatus.blacklisted:
message = i18n["responses"]["general"]["blacklisted"]
else:
message = unclaim_messages["still_unclaimed"]
return message, return_flair
@beeline.traced(name="process_message")
def process_message(message: Message, cfg: Config) -> None:
dm_subject = i18n["responses"]["direct_message"]["dm_subject"]
dm_body = i18n["responses"]["direct_message"]["dm_body"]
author = message.author
username = author.name if author else None
if username:
author.message(dm_subject, dm_body)
send_to_modchat(
f'DM from <{i18n["urls"]["reddit_url"].format("/u/" + username)}|u/{username}> -- '
f"*{message.subject}*:\n{message.body}",
cfg,
)
log.info(
f"Received DM from {username}. \n Subject: {message.subject}\n\nBody: {message.body}"
)
else:
send_to_modchat(
f"DM with no author -- " f"*{message.subject}*:\n{message.body}", cfg
)
log.info(
f"Received DM with no author. \n Subject: {message.subject}\n\nBody: {message.body}"
)
|
from random import randint, choice
import pygame
pygame.init()
def create_checker_board():
x = y = 0
for i in range(6):
for j in range(6):
pygame.draw.rect(screen, choice(colors), (x, y, 100, 100))
x += 100
y += 100
x = 0
screen = pygame.display.set_mode((600, 600))
pygame.display.set_caption('Shapes!!')
colors = [tuple([randint(0, 255) for j in range(3)]) for i in range(10)]
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
quit()
screen.fill((0, 0, 0))
create_checker_board()
pygame.display.update()
|
/*
* Generated by asn1c-0.9.29 (http://lionet.info/asn1c)
* From ASN.1 module "S1AP-Containers"
* found in "../support/r14.4.0/36413-e40.asn"
* `asn1c -pdu=all -fcompound-names -findirect-choice -fno-include-deps`
*/
#include "S1AP_ProtocolIE-SingleContainer.h"
/*
* This type is implemented using S1AP_E_RABToBeSetupItemBearerSUReqIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABSetupItemBearerSUResIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABToBeModifiedItemBearerModReqIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABModifyItemBearerModResIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABReleaseItemBearerRelCompIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABToBeSetupItemCtxtSUReqIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABSetupItemCtxtSUResIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_TAIItemIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_UE_associatedLogicalS1_ConnectionItemRes,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_UE_associatedLogicalS1_ConnectionItemResAck,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABModifyItemBearerModConfIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_Bearers_SubjectToStatusTransfer_ItemIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABInformationListIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABItemIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABUsageReportItemIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_MDTMode_ExtensionIE,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_RecommendedCellItemIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_RecommendedENBItemIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_SecondaryRATDataUsageReportItemIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_SONInformation_ExtensionIE,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABDataForwardingItemIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABToBeSetupItemHOReqIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABAdmittedItemIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABFailedtoSetupItemHOReqAckIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABToBeSwitchedDLItemIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABToBeSwitchedULItemIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABToBeModifiedItemBearerModIndIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABNotToBeModifiedItemBearerModIndIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABFailedToResumeItemResumeReqIEs,
* so here we adjust the DEF accordingly.
*/
/*
* This type is implemented using S1AP_E_RABFailedToResumeItemResumeResIEs,
* so here we adjust the DEF accordingly.
*/
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P0_tags_1[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P0 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P0_tags_1,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P0_tags_1)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P0_tags_1[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P0_tags_1, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P0_tags_1)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P0_tags_1[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABToBeSetupItemBearerSUReqIEs_1,
3, /* Elements count */
&asn_SPC_S1AP_E_RABToBeSetupItemBearerSUReqIEs_specs_1 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P1_tags_2[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P1 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P1_tags_2,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P1_tags_2)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P1_tags_2[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P1_tags_2, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P1_tags_2)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P1_tags_2[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABSetupItemBearerSUResIEs_5,
3, /* Elements count */
&asn_SPC_S1AP_E_RABSetupItemBearerSUResIEs_specs_5 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P2_tags_3[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P2 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P2_tags_3,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P2_tags_3)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P2_tags_3[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P2_tags_3, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P2_tags_3)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P2_tags_3[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABToBeModifiedItemBearerModReqIEs_9,
3, /* Elements count */
&asn_SPC_S1AP_E_RABToBeModifiedItemBearerModReqIEs_specs_9 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P3_tags_4[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P3 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P3_tags_4,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P3_tags_4)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P3_tags_4[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P3_tags_4, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P3_tags_4)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P3_tags_4[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABModifyItemBearerModResIEs_13,
3, /* Elements count */
&asn_SPC_S1AP_E_RABModifyItemBearerModResIEs_specs_13 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P4_tags_5[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P4 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P4_tags_5,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P4_tags_5)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P4_tags_5[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P4_tags_5, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P4_tags_5)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P4_tags_5[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABReleaseItemBearerRelCompIEs_17,
3, /* Elements count */
&asn_SPC_S1AP_E_RABReleaseItemBearerRelCompIEs_specs_17 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P5_tags_6[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P5 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P5_tags_6,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P5_tags_6)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P5_tags_6[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P5_tags_6, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P5_tags_6)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P5_tags_6[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABToBeSetupItemCtxtSUReqIEs_21,
3, /* Elements count */
&asn_SPC_S1AP_E_RABToBeSetupItemCtxtSUReqIEs_specs_21 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P6_tags_7[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P6 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P6_tags_7,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P6_tags_7)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P6_tags_7[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P6_tags_7, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P6_tags_7)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P6_tags_7[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABSetupItemCtxtSUResIEs_25,
3, /* Elements count */
&asn_SPC_S1AP_E_RABSetupItemCtxtSUResIEs_specs_25 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P7_tags_8[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P7 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P7_tags_8,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P7_tags_8)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P7_tags_8[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P7_tags_8, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P7_tags_8)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P7_tags_8[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_TAIItemIEs_29,
3, /* Elements count */
&asn_SPC_S1AP_TAIItemIEs_specs_29 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P8_tags_9[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P8 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P8_tags_9,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P8_tags_9)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P8_tags_9[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P8_tags_9, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P8_tags_9)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P8_tags_9[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_UE_associatedLogicalS1_ConnectionItemRes_33,
3, /* Elements count */
&asn_SPC_S1AP_UE_associatedLogicalS1_ConnectionItemRes_specs_33 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P9_tags_10[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P9 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P9_tags_10,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P9_tags_10)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P9_tags_10[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P9_tags_10, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P9_tags_10)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P9_tags_10[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_UE_associatedLogicalS1_ConnectionItemResAck_37,
3, /* Elements count */
&asn_SPC_S1AP_UE_associatedLogicalS1_ConnectionItemResAck_specs_37 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P10_tags_11[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P10 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P10_tags_11,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P10_tags_11)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P10_tags_11[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P10_tags_11, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P10_tags_11)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P10_tags_11[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABModifyItemBearerModConfIEs_41,
3, /* Elements count */
&asn_SPC_S1AP_E_RABModifyItemBearerModConfIEs_specs_41 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P11_tags_12[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P11 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P11_tags_12,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P11_tags_12)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P11_tags_12[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P11_tags_12, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P11_tags_12)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P11_tags_12[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_Bearers_SubjectToStatusTransfer_ItemIEs_45,
3, /* Elements count */
&asn_SPC_S1AP_Bearers_SubjectToStatusTransfer_ItemIEs_specs_45 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P12_tags_13[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P12 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P12_tags_13,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P12_tags_13)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P12_tags_13[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P12_tags_13, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P12_tags_13)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P12_tags_13[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABInformationListIEs_49,
3, /* Elements count */
&asn_SPC_S1AP_E_RABInformationListIEs_specs_49 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P13_tags_14[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P13 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P13_tags_14,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P13_tags_14)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P13_tags_14[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P13_tags_14, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P13_tags_14)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P13_tags_14[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABItemIEs_53,
3, /* Elements count */
&asn_SPC_S1AP_E_RABItemIEs_specs_53 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P14_tags_15[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P14 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P14_tags_15,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P14_tags_15)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P14_tags_15[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P14_tags_15, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P14_tags_15)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P14_tags_15[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABUsageReportItemIEs_57,
3, /* Elements count */
&asn_SPC_S1AP_E_RABUsageReportItemIEs_specs_57 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P15_tags_16[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P15 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P15_tags_16,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P15_tags_16)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P15_tags_16[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P15_tags_16, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P15_tags_16)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P15_tags_16[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_MDTMode_ExtensionIE_61,
3, /* Elements count */
&asn_SPC_S1AP_MDTMode_ExtensionIE_specs_61 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P16_tags_17[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P16 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P16_tags_17,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P16_tags_17)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P16_tags_17[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P16_tags_17, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P16_tags_17)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P16_tags_17[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_RecommendedCellItemIEs_65,
3, /* Elements count */
&asn_SPC_S1AP_RecommendedCellItemIEs_specs_65 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P17_tags_18[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P17 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P17_tags_18,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P17_tags_18)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P17_tags_18[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P17_tags_18, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P17_tags_18)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P17_tags_18[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_RecommendedENBItemIEs_69,
3, /* Elements count */
&asn_SPC_S1AP_RecommendedENBItemIEs_specs_69 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P18_tags_19[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P18 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P18_tags_19,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P18_tags_19)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P18_tags_19[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P18_tags_19, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P18_tags_19)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P18_tags_19[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_SecondaryRATDataUsageReportItemIEs_73,
3, /* Elements count */
&asn_SPC_S1AP_SecondaryRATDataUsageReportItemIEs_specs_73 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P19_tags_20[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P19 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P19_tags_20,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P19_tags_20)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P19_tags_20[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P19_tags_20, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P19_tags_20)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P19_tags_20[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_SONInformation_ExtensionIE_77,
3, /* Elements count */
&asn_SPC_S1AP_SONInformation_ExtensionIE_specs_77 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P20_tags_21[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P20 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P20_tags_21,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P20_tags_21)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P20_tags_21[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P20_tags_21, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P20_tags_21)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P20_tags_21[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABDataForwardingItemIEs_449,
3, /* Elements count */
&asn_SPC_S1AP_E_RABDataForwardingItemIEs_specs_449 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P21_tags_22[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P21 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P21_tags_22,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P21_tags_22)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P21_tags_22[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P21_tags_22, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P21_tags_22)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P21_tags_22[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABToBeSetupItemHOReqIEs_453,
3, /* Elements count */
&asn_SPC_S1AP_E_RABToBeSetupItemHOReqIEs_specs_453 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P22_tags_23[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P22 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P22_tags_23,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P22_tags_23)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P22_tags_23[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P22_tags_23, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P22_tags_23)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P22_tags_23[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABAdmittedItemIEs_457,
3, /* Elements count */
&asn_SPC_S1AP_E_RABAdmittedItemIEs_specs_457 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P23_tags_24[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P23 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P23_tags_24,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P23_tags_24)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P23_tags_24[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P23_tags_24, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P23_tags_24)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P23_tags_24[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABFailedtoSetupItemHOReqAckIEs_461,
3, /* Elements count */
&asn_SPC_S1AP_E_RABFailedtoSetupItemHOReqAckIEs_specs_461 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P24_tags_25[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P24 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P24_tags_25,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P24_tags_25)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P24_tags_25[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P24_tags_25, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P24_tags_25)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P24_tags_25[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABToBeSwitchedDLItemIEs_465,
3, /* Elements count */
&asn_SPC_S1AP_E_RABToBeSwitchedDLItemIEs_specs_465 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P25_tags_26[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P25 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P25_tags_26,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P25_tags_26)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P25_tags_26[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P25_tags_26, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P25_tags_26)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P25_tags_26[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABToBeSwitchedULItemIEs_469,
3, /* Elements count */
&asn_SPC_S1AP_E_RABToBeSwitchedULItemIEs_specs_469 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P26_tags_27[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P26 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P26_tags_27,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P26_tags_27)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P26_tags_27[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P26_tags_27, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P26_tags_27)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P26_tags_27[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABToBeModifiedItemBearerModIndIEs_473,
3, /* Elements count */
&asn_SPC_S1AP_E_RABToBeModifiedItemBearerModIndIEs_specs_473 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P27_tags_28[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P27 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P27_tags_28,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P27_tags_28)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P27_tags_28[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P27_tags_28, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P27_tags_28)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P27_tags_28[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABNotToBeModifiedItemBearerModIndIEs_477,
3, /* Elements count */
&asn_SPC_S1AP_E_RABNotToBeModifiedItemBearerModIndIEs_specs_477 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P28_tags_29[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P28 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P28_tags_29,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P28_tags_29)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P28_tags_29[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P28_tags_29, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P28_tags_29)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P28_tags_29[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABFailedToResumeItemResumeReqIEs_481,
3, /* Elements count */
&asn_SPC_S1AP_E_RABFailedToResumeItemResumeReqIEs_specs_481 /* Additional specs */
};
static const ber_tlv_tag_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P29_tags_30[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
asn_TYPE_descriptor_t asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P29 = {
"ProtocolIE-SingleContainer",
"ProtocolIE-SingleContainer",
&asn_OP_SEQUENCE,
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P29_tags_30,
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P29_tags_30)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P29_tags_30[0]), /* 1 */
asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P29_tags_30, /* Same as above */
sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P29_tags_30)
/sizeof(asn_DEF_S1AP_ProtocolIE_SingleContainer_7007P29_tags_30[0]), /* 1 */
{ 0, 0, SEQUENCE_constraint },
asn_MBR_S1AP_E_RABFailedToResumeItemResumeResIEs_485,
3, /* Elements count */
&asn_SPC_S1AP_E_RABFailedToResumeItemResumeResIEs_specs_485 /* Additional specs */
};
|
import time
from app.thirdparty.oneforall.config import settings
from app.thirdparty.oneforall.common.search import Search
from app.thirdparty.oneforall.config.log import logger
class GithubAPI(Search):
def __init__(self, domain):
Search.__init__(self)
self.source = 'GithubAPISearch'
self.module = 'Search'
self.addr = 'https://api.github.com/search/code'
self.domain = domain
self.delay = 5
self.token = settings.github_api_token
def search(self):
"""
向接口查询子域并做子域匹配
"""
self.header = self.get_header()
self.proxy = self.get_proxy(self.source)
self.header.update(
{'Accept': 'application/vnd.github.v3.text-match+json'})
page = 1
while True:
time.sleep(self.delay)
params = {'q': self.domain, 'per_page': 100,
'page': page, 'sort': 'indexed',
'access_token': self.token}
try:
resp = self.get(self.addr, params=params)
except Exception as e:
logger.log('ERROR', e.args)
break
if not resp or resp.status_code != 200:
logger.log('ERROR', f'{self.source} module query failed')
break
subdomains = self.match_subdomains(resp)
if not subdomains:
break
self.subdomains.update(subdomains)
page += 1
try:
resp_json = resp.json()
except Exception as e:
logger.log('ERROR', e.args)
break
total_count = resp_json.get('total_count')
if not isinstance(total_count, int):
break
if page * 100 > total_count:
break
if page * 100 > 1000:
break
def run(self):
"""
类执行入口
"""
if not self.have_api(self.token):
return
self.begin()
self.search()
self.finish()
self.save_json()
self.gen_result()
self.save_db()
def run(domain):
"""
类统一调用入口
:param str domain: 域名
"""
query = GithubAPI(domain)
query.run()
if __name__ == '__main__':
run('freebuf.com')
|
var searchData=
[
['ndarrayformaterr',['NDArrayFormatErr',['../namespacemxnet.html#ace60510752753f459193f95cab0e9e1a',1,'mxnet']]],
['ndarrayfunctiontypemask',['NDArrayFunctionTypeMask',['../namespacemxnet.html#a89a5f0f5cfd9e1e94604a7b42dda818a',1,'mxnet']]],
['ndarraystoragetype',['NDArrayStorageType',['../namespacemxnet.html#a536b732faa980e1de446c552460ff76a',1,'mxnet']]]
];
|
import requests
import os
import shutil
from concurrent.futures import ProcessPoolExecutor,ThreadPoolExecutor
from threading import currentThread
import traceback
from PIL import Image
from urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def save_image(input_name):
im = Image.open(input_name)
if im.mode=="RGBA":
im.load() # required for png.split()
background = Image.new("RGB", im.size, (255, 255, 255))
background.paste(im, mask=im.split()[3]) # 3 is the alpha channel
im = background
im.save(input_name.replace('.webp', '.jpg'),'JPEG')
os.remove(input_name)
def get_task():
url = 'https://www.manhuatai.com/api/getComicInfoBody?product_id=2&productname=mht&platformname=pc&comic_id=25934'
response = requests.get(url, verify=False)
response_json = response.json()
tasks = response_json['data']['comic_chapter']
# pool = ThreadPoolExecutor(max_workers=2)
# for task in tasks:
# pool.submit(deal_task,task)
# pool.shutdown()
with ProcessPoolExecutor() as pool:
for index,task in enumerate(tasks[::-1]):
pool.submit(deal_task, index, task)
def deal_task(index, task):
# for index,task in enumerate(tasks[::-1]):
print(currentThread())
page_count = task['end_num']
page_name = task['chapter_name']
path = "../download/dpcq/{}".format(page_name)
if os.path.exists(path):
shutil.rmtree(path)
os.makedirs(path)
for count in range(page_count):
img_url = "https://mhpic.cnmanhua.com/comic/D/%E6%96%97%E7%A0%B4%E8%8B%8D%E7%A9%B9%E6%8B%86%E5%88%86%E7%89%88/{}%E8%AF%9D/{}.jpg-mht.middle.webp".format(index+1,count+1)
response_img = requests.get(img_url)
img_path = path + '/{}_{}.webp'.format(page_name,count)
with open(img_path, 'wb') as f:
f.write(response_img.content)
f.close()
save_image(img_path)
print(page_name, '已经完成')
if __name__ == '__main__':
get_task()
|
'use strict';
exports.up = function (knex) {
return knex.schema.table('accounts', function (table) {
table.string('role', true).notNullable().defaultTo('admin');
});
};
exports.down = function (knex) {
return knex.schema.table('accounts', function (table) {
table.dropColumn('role');
});
};
|
'use strict';
module.exports = function(grunt) {
grunt.config('uglify', {
options: {
banner: '/*!'
+ '\n<%= pkg.name %> - v<%= pkg.version %> - '
+ '<%= grunt.template.today("yyyy-mm-dd") %>'
+ '\nhttps://github.com/provejs-jquery'
+ '\n*/'
},
prove: {
options: {
sourceMap: true,
sourceMapName: 'dist/prove.map',
compress: {
dead_code: true,
drop_console: true
}
},
dest: 'dist/prove.min.js',
src: [
'dist/prove.js',
]
},
decorator: {
options: {
sourceMap: true,
sourceMapName: 'dist/decorator.map',
compress: {
dead_code: true,
drop_console: true
}
},
dest: 'dist/decorator.min.js',
src: [
'dist/decorator.js',
]
}
});
grunt.loadNpmTasks('grunt-contrib-uglify');
};
|
import numpy as np
__all__ = ["plot_spectrum_datasets_off_regions", "plot_contour_line"]
def plot_spectrum_datasets_off_regions(datasets, ax=None):
"""Plot spectrum datasets of regions.
Parameters
----------
datasets : list of `SpectrumDatasetOnOff`
List of spectrum on-off datasets
"""
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
ax = plt.gca(projection=datasets[0].counts_off.geom.wcs) or ax
color_cycle = plt.rcParams["axes.prop_cycle"]
colors = color_cycle.by_key()["color"]
handles = []
for color, dataset in zip(colors, datasets):
kwargs = {"edgecolor": color, "facecolor": "none"}
dataset.counts_off.plot_region(ax=ax, **kwargs)
# create proxy artist for the custom legend
handle = mpatches.Patch(label=dataset.name, **kwargs)
handles.append(handle)
plt.legend(handles=handles)
def plot_contour_line(ax, x, y, **kwargs):
"""Plot smooth curve from contour points"""
from scipy.interpolate import CubicSpline
# close countour
xf = np.append(x, x[0])
yf = np.append(y, y[0])
# curve parametrization must be strictly increasing
# so we use the cumulative distance of each point from the first one
dist = np.sqrt(np.diff(xf) ** 2.0 + np.diff(yf) ** 2.0)
dist = [0] + list(dist)
t = np.cumsum(dist)
ts = np.linspace(0, t[-1], 50)
# 1D cubic spline interpolation
cs = CubicSpline(t, np.c_[xf, yf], bc_type="periodic")
out = cs(ts)
# plot
if "marker" in kwargs.keys():
marker = kwargs.pop("marker")
else:
marker = "+"
if "color" in kwargs.keys():
color = kwargs.pop("color")
else:
color = "b"
ax.plot(out[:, 0], out[:, 1], "-", color=color, **kwargs)
ax.plot(xf, yf, linestyle='', marker=marker, color=color)
|
import pandas as pd
from evalml.objectives import get_objective
from evalml.pipelines.regression_pipeline import RegressionPipeline
from evalml.problem_types import ProblemTypes
from evalml.utils.gen_utils import (
_convert_to_woodwork_structure,
_convert_woodwork_types_wrapper,
drop_rows_with_nans,
pad_with_nans
)
class TimeSeriesRegressionPipeline(RegressionPipeline):
"""Pipeline base class for time series regression problems."""
problem_type = ProblemTypes.TIME_SERIES_REGRESSION
def __init__(self, parameters, random_state=0):
"""Machine learning pipeline for time series regression problems made out of transformers and a classifier.
Required Class Variables:
component_graph (list): List of components in order. Accepts strings or ComponentBase subclasses in the list
Arguments:
parameters (dict): Dictionary with component names as keys and dictionary of that component's parameters as values.
An empty dictionary {} implies using all default values for component parameters. Pipeline-level
parameters such as gap and max_delay must be specified with the "pipeline" key. For example:
Pipeline(parameters={"pipeline": {"max_delay": 4, "gap": 2}}).
random_state (int): Seed for the random number generator. Defaults to 0.
"""
if "pipeline" not in parameters:
raise ValueError("gap and max_delay parameters cannot be omitted from the parameters dict. "
"Please specify them as a dictionary with the key 'pipeline'.")
pipeline_params = parameters["pipeline"]
self.gap = pipeline_params['gap']
self.max_delay = pipeline_params['max_delay']
super().__init__(parameters, random_state)
def fit(self, X, y):
"""Fit a time series regression pipeline.
Arguments:
X (ww.DataTable, pd.DataFrame or np.ndarray): The input training data of shape [n_samples, n_features]
y (ww.DataColumn, pd.Series, np.ndarray): The target training targets of length [n_samples]
Returns:
self
"""
if X is None:
X = pd.DataFrame()
X = _convert_to_woodwork_structure(X)
y = _convert_to_woodwork_structure(y)
X = _convert_woodwork_types_wrapper(X.to_dataframe())
y = _convert_woodwork_types_wrapper(y.to_series())
X_t = self._compute_features_during_fit(X, y)
X_t = X_t.to_dataframe()
y_shifted = y.shift(-self.gap)
X_t, y_shifted = drop_rows_with_nans(X_t, y_shifted)
self.estimator.fit(X_t, y_shifted)
return self
def predict(self, X, y=None, objective=None):
"""Make predictions using selected features.
Arguments:
X (ww.DataTable, pd.DataFrame, or np.ndarray): Data of shape [n_samples, n_features]
y (ww.DataColumn, pd.Series, np.ndarray, None): The target training targets of length [n_samples]
objective (Object or string): The objective to use to make predictions
Returns:
ww.DataColumn: Predicted values.
"""
if X is None:
X = pd.DataFrame()
X = _convert_to_woodwork_structure(X)
y = _convert_to_woodwork_structure(y)
X = _convert_woodwork_types_wrapper(X.to_dataframe())
y = _convert_woodwork_types_wrapper(y.to_series())
features = self.compute_estimator_features(X, y)
features = _convert_woodwork_types_wrapper(features.to_dataframe())
features_no_nan, y = drop_rows_with_nans(features, y)
y_arg = None
if self.estimator.predict_uses_y:
y_arg = y
predictions = self.estimator.predict(features_no_nan, y_arg).to_series()
predictions = predictions.rename(self.input_target_name)
padded = pad_with_nans(predictions, max(0, features.shape[0] - predictions.shape[0]))
return _convert_to_woodwork_structure(padded)
def score(self, X, y, objectives):
"""Evaluate model performance on current and additional objectives.
Arguments:
X (ww.DataTable, pd.DataFrame or np.ndarray): Data of shape [n_samples, n_features]
y (pd.Series, ww.DataColumn): True labels of length [n_samples]
objectives (list): Non-empty list of objectives to score on
Returns:
dict: Ordered dictionary of objective scores
"""
# Only converting X for the call to _score_all_objectives
if X is None:
X = pd.DataFrame()
X = _convert_to_woodwork_structure(X)
X = _convert_woodwork_types_wrapper(X.to_dataframe())
y = _convert_to_woodwork_structure(y)
y = _convert_woodwork_types_wrapper(y.to_series())
y_predicted = self.predict(X, y)
y_predicted = _convert_woodwork_types_wrapper(y_predicted.to_series())
y_shifted = y.shift(-self.gap)
objectives = [get_objective(o, return_instance=True) for o in objectives]
y_shifted, y_predicted = drop_rows_with_nans(y_shifted, y_predicted)
return self._score_all_objectives(X, y_shifted,
y_predicted,
y_pred_proba=None,
objectives=objectives)
|
/*-
* Copyright (c) 2007 Kai Wang
* Copyright (c) 2007 Tim Kientzle
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer
* in this position and unchanged.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR(S) ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "test.h"
__FBSDID("$FreeBSD: soc2013/dpl/head/contrib/libarchive/libarchive/test/test_read_format_ar.c 249817 2013-03-22 13:36:03Z mm $");
DEFINE_TEST(test_read_format_ar)
{
char buff[64];
const char reffile[] = "test_read_format_ar.ar";
struct archive_entry *ae;
struct archive *a;
extract_reference_file(reffile);
assert((a = archive_read_new()) != NULL);
assertA(0 == archive_read_support_filter_all(a));
assertA(0 == archive_read_support_format_all(a));
assertA(0 == archive_read_open_filename(a, reffile, 7));
/* Filename table. */
assertA(0 == archive_read_next_header(a, &ae));
assertEqualString("//", archive_entry_pathname(ae));
assertEqualInt(0, archive_entry_mtime(ae));
assertEqualInt(0, archive_entry_uid(ae));
assertEqualInt(0, archive_entry_gid(ae));
assertEqualInt(0, archive_entry_size(ae));
/* First Entry */
assertA(0 == archive_read_next_header(a, &ae));
assertEqualString("yyytttsssaaafff.o", archive_entry_pathname(ae));
assertEqualInt(1175465652, archive_entry_mtime(ae));
assertEqualInt(1001, archive_entry_uid(ae));
assertEqualInt(0, archive_entry_gid(ae));
assert(8 == archive_entry_size(ae));
assertA(8 == archive_read_data(a, buff, 10));
assertEqualMem(buff, "55667788", 8);
/* Second Entry */
assertA(0 == archive_read_next_header(a, &ae));
assertEqualString("gghh.o", archive_entry_pathname(ae));
assertEqualInt(1175465668, archive_entry_mtime(ae));
assertEqualInt(1001, archive_entry_uid(ae));
assertEqualInt(0, archive_entry_gid(ae));
assert(4 == archive_entry_size(ae));
assertA(4 == archive_read_data(a, buff, 10));
assertEqualMem(buff, "3333", 4);
/* Third Entry */
assertA(0 == archive_read_next_header(a, &ae));
assertEqualString("hhhhjjjjkkkkllll.o", archive_entry_pathname(ae));
assertEqualInt(1175465713, archive_entry_mtime(ae));
assertEqualInt(1001, archive_entry_uid(ae));
assertEqualInt(0, archive_entry_gid(ae));
assert(9 == archive_entry_size(ae));
assertA(9 == archive_read_data(a, buff, 9));
assertEqualMem(buff, "987654321", 9);
/* Test EOF */
assertA(1 == archive_read_next_header(a, &ae));
assertEqualInt(4, archive_file_count(a));
assertEqualIntA(a, ARCHIVE_OK, archive_read_close(a));
assertEqualInt(ARCHIVE_OK, archive_read_free(a));
}
|
/**
* 自定义jquery扩展方法
*/
$.extend({
});
|
'use strict';
var _ = require('lodash'),
should = require('should'),
request = require('supertest'),
path = require('path'),
async = require('async'),
moment = require('moment'),
mongoose = require('mongoose'),
User = mongoose.model('User'),
Offer = mongoose.model('Offer'),
Tribe = mongoose.model('Tribe'),
express = require(path.resolve('./config/lib/express'));
/**
* Globals
*/
var app,
agent,
credentials,
credentials2,
user1,
user2,
user3,
user2Id,
user3Id,
offer1,
offer2,
offer2Id,
offer3,
offer3Id,
offerMeet,
tribe1,
tribe2,
tribe1Id,
tribe2Id;
var testLocations = {
'Europe': {
queryBoundingBox:
'?northEastLat=55.31212135084999' +
'&northEastLng=18.73318142361111' +
'&southWestLat=44.66407507240992' +
'&southWestLng=3.689914279513889',
location: [52.48556355813466, 13.489011526107788]
},
'China': {
queryBoundingBox:
'?northEastLat=68.58321725728176' +
'&northEastLng=151.23828125000003' +
'&southWestLat=-3.9332268264771106' +
'&southWestLng=61.63281250000001',
location: [34.632532, 103.767519]
},
'US': {
queryBoundingBox:
'?northEastLat=70.1061015189654' +
'&northEastLng=-48.44921875000001' +
'&southWestLat=0.021065118766989688' +
'&southWestLng=-138.05468750000003',
location: [40.514402, -88.990735]
},
'NorthPole': {
queryBoundingBox:
'?northEastLat=89.99703020040681' +
'&northEastLng=145.61328125000003' +
'&southWestLat=78.02765497223292' +
'&southWestLng=56.00781250000001',
location: [80.912672, 79.732322]
}
};
/**
* Offer routes tests
*/
describe('Offer search tests', function () {
before(function (done) {
// Get application
app = express.init(mongoose.connection);
agent = request.agent(app);
done();
});
beforeEach(function (doneBeforeEach) {
// Create user credentials
credentials = {
username: 'loremipsum',
password: 'Password123!'
};
// Create user2 credentials
credentials2 = {
username: 'loremipsum2',
password: 'Password123!'
};
// Create a new user
user1 = new User({
firstName: 'Full',
lastName: 'Name',
displayName: 'Full Name',
email: 'test1@test.com',
member: [],
username: credentials.username,
password: credentials.password,
provider: 'local',
public: true,
seen: new Date()
});
// Create a new user
user2 = new User({
firstName: 'Full',
lastName: 'Name',
displayName: 'Full Name',
email: 'test2@test.com',
member: [],
username: credentials2.username,
password: credentials2.password,
languages: ['fin', 'ita'],
provider: 'local',
public: true,
seen: new Date()
});
// Create a new user
user3 = new User({
firstName: 'Full',
lastName: 'Name',
displayName: 'Full Name',
email: 'test3@test.com',
username: credentials.username + '3',
password: credentials.password,
languages: ['ita'],
provider: 'local',
public: true,
seen: new Date()
});
// Used only for sending via POST and thus doesn't include some data
offer1 = {
type: 'host',
status: 'yes',
description: '<p>1 I can host! :)</p>',
noOfferDescription: '<p>1 I cannot host... :(</p>',
maxGuests: 5,
location: testLocations.Europe.location
};
offer2 = new Offer({
type: 'host',
status: 'yes',
description: '<p>2 I can host! :)</p>',
noOfferDescription: '<p>2 I cannot host... :(</p>',
maxGuests: 3,
updated: new Date(),
location: [52.498981209298776, 13.418329954147339]
});
offer3 = new Offer({
type: 'host',
status: 'yes',
description: '<p>3 I can host! :)</p>',
noOfferDescription: '<p>3 I cannot host... :(</p>',
maxGuests: 1,
updated: new Date(),
location: [52.498981209298775, 13.418329954147338]
});
offerMeet = new Offer({
type: 'meet',
description: '<p>Dinner party!</p>',
validUntil: moment().add(30, 'day').toDate(),
updated: new Date(),
location: [52.498981209298887, 13.418329954147449]
});
tribe1 = new Tribe({
'slug': 'tribe1',
'label': 'tribe1',
'color': '111111',
'tribe': true,
'count': 1,
'public': true
});
tribe2 = new Tribe({
'slug': 'tribe2',
'label': 'tribe2',
'color': '222222',
'count': 1,
'public': true
});
// Save data to the test db
async.waterfall([
// Save tribe 1
function (done) {
tribe1.save(function (err, tribe1) {
tribe1Id = tribe1._id;
done(err);
});
},
// Save tribe 2
function (done) {
tribe2.save(function (err, tribe2) {
tribe2Id = tribe2._id;
done(err);
});
},
// Save user 1 (without tribe membership)
function (done) {
user1.save(function (err) {
done(err);
});
},
// Save user 2 (with tribe membership)
function (done) {
user2.member = [{
tribe: tribe2Id,
since: new Date()
}];
user2.save(function (err, user2res) {
user2Id = user2res._id;
done(err);
});
},
// Save user 3 (with tribe membership)
function (done) {
user3.member = [{
tribe: tribe1Id,
since: new Date()
}];
user3.save(function (err, user3res) {
user3Id = user3res._id;
return done(err);
});
},
// Save hosting offer 2
function (done) {
offer2.user = user2Id;
offer2.save(function (err, offer2) {
offer2Id = offer2._id;
done(err);
});
},
// Save hosting offer 3
function (done) {
offer3.user = user3Id;
offer3.save(function (err, offer3) {
offer3Id = offer3._id;
done(err);
});
}
], function (err) {
should.not.exist(err);
doneBeforeEach(err);
});
});
it('should be able to get empty list from an area where there are no offers', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (in Niger)
agent.get('/api/offers' +
'?northEastLat=32.89472514359572' +
'&northEastLng=25.598493303571427' +
'&southWestLat=-20.49068931208608' +
'&southWestLng=-12.986188616071427'
)
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.length.should.equal(0);
// Call the assertion callback
return done();
});
});
});
it('should be able to use + in front of positive coordinates', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (in Niger)
agent.get('/api/offers' +
'?northEastLat=+55.31212135084999' +
'&northEastLng=+18.73318142361111' +
'&southWestLat=+44.66407507240992' +
'&southWestLng=+3.689914279513889'
)
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.length.should.equal(2);
// Call the assertion callback
return done();
});
});
});
it('should return error when missing bounding box parameter', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Missing `southWestLng` paramter
agent.get('/api/offers' +
'?northEastLat=32.89472514359572' +
'&northEastLng=25.598493303571427' +
'&southWestLat=-20.49068931208608'
)
.expect(400)
.end(done);
});
});
it('should return error with invalid bounding box parameter (string after decimals)', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Missing `southWestLng` paramter
agent.get('/api/offers' +
'?northEastLat=25.' + '1'.repeat(30) + 'foo' + // `foo` starts at 31
'&northEastLng=25.598493303571427' +
'&southWestLat=-20.49068931208608' +
'&southWestLng=-12.986188616071427'
)
.expect(400)
.end(done);
});
});
it('should return error with invalid bounding box parameter (string instead of coordinate)', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Missing `southWestLng` paramter
agent.get('/api/offers' +
'?northEastLat=FAIL' +
'&northEastLng=25.598493303571427' +
'&southWestLat=-20.49068931208608' +
'&southWestLng=-12.986188616071427'
)
.expect(400)
.end(done);
});
});
it('should not be able to get list of offers from an area if not authenticated', function (done) {
// Get offers (around Berlin)
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox)
.expect(403)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
offersGetRes.body.message.should.equal('Forbidden.');
// Call the assertion callback
return done();
});
});
it('should be able to get list of offers from an area (Europe)', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (around Berlin)
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox)
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// MongoDb returns these in random order, figure out order here
var user2Order = 1;
var user3Order = 0;
if (offersGetRes.body[0]._id === offer2Id.toString()) {
user2Order = 0;
user3Order = 1;
}
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(2);
offersGetRes.body[user2Order].status.should.equal(offer2.status);
offersGetRes.body[user2Order].location.should.be.instanceof(Array).and.have.lengthOf(2);
offersGetRes.body[user2Order].location[0].should.be.approximately(offer2.locationFuzzy[0], 0.0000000000001);
offersGetRes.body[user2Order].location[1].should.be.approximately(offer2.locationFuzzy[1], 0.0000000000001);
offersGetRes.body[user2Order]._id.should.equal(offer2Id.toString());
should.not.exist(offersGetRes.body[user2Order].locationFuzzy);
offersGetRes.body[user3Order].status.should.equal(offer3.status);
offersGetRes.body[user3Order].location.should.be.instanceof(Array).and.have.lengthOf(2);
offersGetRes.body[user3Order].location[0].should.be.approximately(offer3.locationFuzzy[0], 0.0000000000001);
offersGetRes.body[user3Order].location[1].should.be.approximately(offer3.locationFuzzy[1], 0.0000000000001);
offersGetRes.body[user3Order]._id.should.equal(offer3Id.toString());
should.not.exist(offersGetRes.body[user3Order].locationFuzzy);
// Call the assertion callback
return done();
});
});
});
// Tests different regions in the globe (Asia, USA, North Pole etc)
_.forEach(testLocations, function (testLocation, area) {
it('should be able to get offer from an area (' + area + ')', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Clean out the DB from other offers
Offer.remove().exec(function () {
// Create new offer to target location
var testLocationOffer = new Offer(offer1);
testLocationOffer.location = testLocation.location;
testLocationOffer.save(function (saveErr, saveRes) {
if (saveErr) return done(saveErr);
// Get offers (around Berlin)
agent.get('/api/offers' + testLocation.queryBoundingBox)
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(1);
offersGetRes.body[0]._id.should.equal(saveRes._id.toString());
offersGetRes.body[0].location[0].should.be.approximately(testLocation.location[0], 0.1);
offersGetRes.body[0].location[1].should.be.approximately(testLocation.location[1], 0.1);
// Call the assertion callback
return done();
});
});
});
});
});
});
it('should include both meet and host offers when getting a list of offers from an area', function (done) {
offerMeet.save(function (saveErr) {
// Handle save error
if (saveErr) return done(saveErr);
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (around Berlin)
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox)
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(3);
// Count different offer types
// This produces `{'host': 2, 'meet': 1}`
var count = _.countBy(offersGetRes.body, function (offer) {
return offer.type;
});
count.host.should.equal(2);
count.meet.should.equal(1);
// Call the assertion callback
return done();
});
});
});
});
it('should not include outdated meet offers when getting a list of offers from an area', function (done) {
// Set date to past
offerMeet.validUntil = moment().subtract(1, 'minute').toDate();
offerMeet.save(function (saveErr) {
// Handle save error
if (saveErr) return done(saveErr);
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (around Berlin)
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox)
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(2);
// Only "host" offers here
// Note that these are in random order from Mongo but it doesn't matter here
offersGetRes.body[0].type.should.equal('host');
offersGetRes.body[1].type.should.equal('host');
// Call the assertion callback
return done();
});
});
});
});
describe('Search offers by "types" filter', function () {
it('should be able to get list of offers from an area filtered by type "host"', function (done) {
offerMeet.save(function (saveErr) {
// Handle save error
if (saveErr) return done(saveErr);
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (around Berlin)
var filters = {
types: ['host']
};
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox + '&filters=' + encodeURIComponent(JSON.stringify(filters)))
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// MongoDb returns these in random order, figure out order here
var user2Order = 1;
var user3Order = 0;
if (offersGetRes.body[0]._id === offer2Id.toString()) {
user2Order = 0;
user3Order = 1;
}
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(2);
offersGetRes.body[user2Order].type.should.equal(offer2.type);
offersGetRes.body[user3Order].type.should.equal(offer3.type);
// Call the assertion callback
return done();
});
});
});
});
it('should be able to get list of offers from an area filtered by type "meet"', function (done) {
offerMeet.save(function (saveErr) {
// Handle save error
if (saveErr) return done(saveErr);
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (around Berlin)
var filters = {
types: ['meet']
};
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox + '&filters=' + encodeURIComponent(JSON.stringify(filters)))
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(1);
offersGetRes.body[0].type.should.equal(offerMeet.type);
// Call the assertion callback
return done();
});
});
});
});
it('should be able to get list of offers from an area filtered by non existing type', function (done) {
offerMeet.save(function (saveErr) {
// Handle save error
if (saveErr) return done(saveErr);
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (around Berlin)
var filters = {
types: ['foobar']
};
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox + '&filters=' + encodeURIComponent(JSON.stringify(filters)))
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(3);
// Count different offer types
// This produces `{'host': 2, 'meet': 1}`
var count = _.countBy(offersGetRes.body, function (offer) {
return offer.type;
});
count.host.should.equal(2);
count.meet.should.equal(1);
// Call the assertion callback
return done();
});
});
});
});
});
describe('Search offers by "languages" filter', function () {
it('should be able to get list of offers from an area filtered by one language and ignore users by other language', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (around Berlin)
var filters = {
languages: ['fin']
};
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox + '&filters=' + encodeURIComponent(JSON.stringify(filters)))
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(1);
offersGetRes.body[0]._id.should.equal(offer2._id.toString());
// Call the assertion callback
return done();
});
});
});
it('should be able to get list of offers from an area filtered by multiple languages', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (around Berlin)
var filters = {
languages: ['fin', 'ita']
};
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox + '&filters=' + encodeURIComponent(JSON.stringify(filters)))
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// MongoDb returns these in random order, figure out order here
var user2Order = 1;
var user3Order = 0;
if (offersGetRes.body[0]._id === offer2Id.toString()) {
user2Order = 0;
user3Order = 1;
}
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(2);
offersGetRes.body[user2Order]._id.should.equal(offer2._id.toString());
offersGetRes.body[user3Order]._id.should.equal(offer3._id.toString());
// Call the assertion callback
return done();
});
});
});
});
describe('Search offers by "tribes" filter', function () {
it('should be able to get list of offers from an area filtered by one tribe', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (around Berlin)
var filters = {
tribes: [tribe2Id]
};
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox + '&filters=' + encodeURIComponent(JSON.stringify(filters)))
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(1);
offersGetRes.body[0].status.should.equal(offer2.status);
offersGetRes.body[0].type.should.equal(offer2.type);
offersGetRes.body[0].location.should.be.instanceof(Array).and.have.lengthOf(2);
offersGetRes.body[0].location[0].should.be.approximately(offer2.locationFuzzy[0], 0.0000000000001);
offersGetRes.body[0].location[1].should.be.approximately(offer2.locationFuzzy[1], 0.0000000000001);
offersGetRes.body[0]._id.should.equal(offer2Id.toString());
should.not.exist(offersGetRes.body[0].locationFuzzy);
// Call the assertion callback
return done();
});
});
});
it('should be able to get list of offers from an area filtered by tribes and not get tribe-less offers', function (done) {
user3.member = [];
user3.save(function (err, user3res) {
should.not.exist(err);
user3res.member.length.should.equal(0);
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (around Berlin)
var filters = {
tribes: [tribe1Id, tribe2Id]
};
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox + '&filters=' + encodeURIComponent(JSON.stringify(filters)))
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(1);
offersGetRes.body[0].status.should.equal(offer2.status);
offersGetRes.body[0].type.should.equal(offer2.type);
offersGetRes.body[0].location.should.be.instanceof(Array).and.have.lengthOf(2);
offersGetRes.body[0].location[0].should.be.approximately(offer2.locationFuzzy[0], 0.0000000000001);
offersGetRes.body[0].location[1].should.be.approximately(offer2.locationFuzzy[1], 0.0000000000001);
// Call the assertion callback
return done();
});
});
});
});
it('should be able to get list of offers from an area filtered by many tribes', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
// Get offers (around Berlin)
var filters = {
tribes: [tribe1Id, tribe2Id]
};
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox + '&filters=' + encodeURIComponent(JSON.stringify(filters)))
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(2);
// MongoDb returns these in random order, figure out order here
var user2Order = 1;
var user3Order = 0;
if (offersGetRes.body[0]._id === offer2Id.toString()) {
user2Order = 0;
user3Order = 1;
}
// User 2 offer
offersGetRes.body[user2Order].status.should.equal(offer2.status);
offersGetRes.body[user2Order].type.should.equal(offer2.type);
offersGetRes.body[user2Order].location.should.be.instanceof(Array).and.have.lengthOf(2);
offersGetRes.body[user2Order].location[0].should.be.approximately(offer2.locationFuzzy[0], 0.0000000000001);
offersGetRes.body[user2Order].location[1].should.be.approximately(offer2.locationFuzzy[1], 0.0000000000001);
offersGetRes.body[user2Order]._id.should.equal(offer2Id.toString());
should.not.exist(offersGetRes.body[user2Order].locationFuzzy);
// User 3 offer
offersGetRes.body[user3Order].status.should.equal(offer3.status);
offersGetRes.body[user3Order].type.should.equal(offer2.type);
offersGetRes.body[user3Order].location.should.be.instanceof(Array).and.have.lengthOf(2);
offersGetRes.body[user3Order].location[0].should.be.approximately(offer3.locationFuzzy[0], 0.0000000000001);
offersGetRes.body[user3Order].location[1].should.be.approximately(offer3.locationFuzzy[1], 0.0000000000001);
offersGetRes.body[user3Order]._id.should.equal(offer3Id.toString());
should.not.exist(offersGetRes.body[user3Order].locationFuzzy);
// Call the assertion callback
return done();
});
});
});
it('should be able able to send empty filter request', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox + '&filters=&types=')
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(2);
// Call the assertion callback
return done();
});
});
});
it('should not be able to send non-json filter request', function (done) {
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) return done(signinErr);
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox + '&filters={wrong}')
.expect(400)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
offersGetRes.body.message.should.equal('Could not parse filters.');
// Call the assertion callback
return done();
});
});
});
});
describe('Search offers by "seen" filter', function () {
it('should be able to get list of offers from an area filtered by last seen', function (done) {
user2.seen = moment().subtract({ 'months': 2 }).toDate()
user2.save(function (user2SaveErr) {
if (user2SaveErr) {
return done(user2SaveErr);
}
agent.post('/api/auth/signin')
.send(credentials)
.expect(200)
.end(function (signinErr) {
// Handle signin error
if (signinErr) {
return done(signinErr);
}
var filters = {
seen: {
'months': 1
}
};
agent.get('/api/offers' + testLocations.Europe.queryBoundingBox + '&filters=' + encodeURIComponent(JSON.stringify(filters)))
.expect(200)
.end(function (offersGetErr, offersGetRes) {
// Handle offer get error
if (offersGetErr) return done(offersGetErr);
// Set assertions
// User2's offer should be filtered out
offersGetRes.body.should.be.instanceof(Array).and.have.lengthOf(1);
offersGetRes.body[0]._id.should.equal(offer3Id.toString());
// Call the assertion callback
return done();
});
});
});
});
});
afterEach(function (done) {
User.remove().exec(function () {
Tribe.remove().exec(function () {
Offer.remove().exec(done);
});
});
});
});
|
import React, { useState } from "react";
//
import useFetch from "../../hooks/useFetch";
import useQuery from "../../hooks/useQuery";
import Card from "../../components/Card";
import Pagination from "../../components/PaginationC";
import SearchBox from "../../components/SearchBox/SearchBox";
import PhSlider from "../../components/Sliders/PhSlider";
import SrmSlider from "../../components/Sliders/SrmSlider";
import VolumeSlider from "../../components/Sliders/VolumeSlider";
import { buttonData } from "../../components/Sliders/buttonData";
import VolumeButton from "../../components/Sliders/VolumeButton";
function Home() {
const [currentPage, setCurrentPage] = useState(1);
const [query, setQuery] = useState("");
const [ph, setPh] = useState([0, 7]);
const [volume, setVolume] = useState(20);
const [srm, setSrm] = useState(0);
const [button, setButton] = useState("none");
const URL = `https://api.punkapi.com/v2/beers?page=${currentPage}&per_page=60&${button}=${volume}`;
const SURL = `https://api.punkapi.com/v2`;
const { data } = useFetch(URL, currentPage);
console.log(data);
const { results } = useQuery(SURL, query);
console.log(results);
console.log(button);
console.log(ph);
console.log(URL);
const itemsCountPerPage = 3;
const offset = currentPage * itemsCountPerPage;
const currentPageData = data
.filter((item) => ph[0] <= item.ph && ph[1] >= item.ph && item.srm >= srm)
.slice(offset, offset + itemsCountPerPage);
const itemsCount = Math.ceil(data.length / itemsCountPerPage);
function handleChangePage(pageNumber) {
setCurrentPage(pageNumber);
}
function handleSubmit(e) {
e.preventDefault();
}
function handleReset() {
setQuery("");
}
function handleChange(e) {
setQuery(e.target.value);
}
return (
<div>
<div className="container">
<SearchBox
onSubmit={handleSubmit}
onChange={handleChange}
onReset={handleReset}
query={query}
/>
<div className="row my-5 mx-0 justify-centent-center">
<h5>Alcohol by Volume</h5>
<VolumeSlider
value={volume}
onChange={(volume) => setVolume(volume)}
/>
<div className="col-xl-4 col-lg-4">
{buttonData.map((item) => (
<VolumeButton
key={item.id}
name={item.name}
btnName={item.btnName}
button={button}
isActive={item.isActive}
setButton={setButton}
/>
))}
</div>
</div>
<div className="row my-4 mx-0 justify-centent-center">
<h5 className="font-small">Ph</h5>
<PhSlider value={ph} onChange={(ph) => setPh(ph)} />
<h5 className="font-small">Srm</h5>
<SrmSlider value={srm} onChange={(srm) => setSrm(srm)} />
</div>
</div>
<div className="container">
<div className="row row-cols-1 row-cols-md-3 g-4 ">
{results
? results.map((item) => <Card item={item} />)
: currentPageData &&
currentPageData.map((item) => <Card item={item} />)}
</div>
</div>
<div className="container">
<Pagination
onChange={handleChangePage}
activePage={currentPage}
itemsCountPerPage={itemsCountPerPage}
totalItemsCount={itemsCount}
/>
</div>
</div>
);
}
export default Home;
|
# Generated by Django 2.2.1 on 2021-03-19 06:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('webapi', '0061_auto_20210317_1402'),
]
operations = [
migrations.AlterField(
model_name='entity',
name='node',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='webapi.Node'),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import transaction
def setUp():
from sw.allotmentclub import Depot, User
user = User.create(username='hans')
Depot.create(date=datetime.datetime(2014, 11, 27, 7, 21, 45),
size=15, data=b'GIF89a????!?,D;', mimetype='image/gif',
name='test.gif', user=user)
transaction.commit()
def test_displays_list_of_depots(browser):
setUp()
browser.login()
browser.open('http://localhost/depots')
expected = [1, 'test.gif', 'GIF', '15.00 B', '27.11.2014 07:21', 'hans']
assert expected in browser.json_result
def test_depot_can_be_added_via_json_view(browser):
from sw.allotmentclub import Depot
browser.login()
browser._upload('http://localhost/depots/add')
assert 'success' == browser.json['status']
assert 'Transparent.gif' == Depot.query().one().name
def test_depot_can_be_edited_via_json_view(browser):
from sw.allotmentclub import Depot
setUp()
browser.login()
browser._upload('http://localhost/depots/1/edit')
now = datetime.datetime.now()
assert 'success' == browser.json['status']
assert 1 == len(Depot.query().all())
browser.open('http://localhost/depots')
assert [1, 'Transparent.gif', 'GIF', '42.00 B',
now.strftime('%d.%m.%Y %H:%M'), 'admin'] == browser.json_result[0]
def test_depot_can_not_edit_file_if_mimetype_is_different(browser):
from sw.allotmentclub import Depot
setUp()
depot = Depot.get(1)
depot.mimetype = 'image/png'
transaction.savepoint()
browser.login()
browser._upload('http://localhost/depots/1/edit')
assert 'error' == browser.json['status']
assert (
'Kann nur Dateien vom gleichen Typ bearbeiten. '
'alt: image/png, neu: image/gif' == browser.json['message'])
|
import codecs
import datetime
import locale
from decimal import Decimal
from urllib.parse import quote
from django.utils.functional import Promise
class DjangoUnicodeDecodeError(UnicodeDecodeError):
def __init__(self, obj, *args):
self.obj = obj
super().__init__(*args)
def __str__(self):
return '%s. You passed in %r (%s)' % (super().__str__(), self.obj, type(self.obj))
def smart_text(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Return a string representing 's'. Treat bytestrings using the 'encoding'
codec.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, Promise):
# The input is the result of a gettext_lazy() call.
return s
return force_text(s, encoding, strings_only, errors)
_PROTECTED_TYPES = (
type(None), int, float, Decimal, datetime.datetime, datetime.date, datetime.time,
)
def is_protected_type(obj):
"""Determine if the object instance is of a protected type.
Objects of protected types are preserved as-is when passed to
force_text(strings_only=True).
"""
return isinstance(obj, _PROTECTED_TYPES)
def force_text(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Similar to smart_text, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first for performance reasons.
if issubclass(type(s), str):
return s
if strings_only and is_protected_type(s):
return s
try:
if isinstance(s, bytes):
s = str(s, encoding, errors)
else:
s = str(s)
except UnicodeDecodeError as e:
raise DjangoUnicodeDecodeError(s, *e.args)
return s
def smart_bytes(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Return a bytestring version of 's', encoded as specified in 'encoding'.
If strings_only is True, don't convert (some) non-string-like objects.
"""
if isinstance(s, Promise):
# The input is the result of a gettext_lazy() call.
return s
return force_bytes(s, encoding, strings_only, errors)
def force_bytes(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Similar to smart_bytes, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
If strings_only is True, don't convert (some) non-string-like objects.
"""
# Handle the common case first for performance reasons.
if isinstance(s, bytes):
if encoding == 'utf-8':
return s
else:
return s.decode('utf-8', errors).encode(encoding, errors)
if strings_only and is_protected_type(s):
return s
if isinstance(s, memoryview):
return bytes(s)
return str(s).encode(encoding, errors)
smart_str = smart_text
force_str = force_text
smart_str.__doc__ = """
Apply smart_text in Python 3 and smart_bytes in Python 2.
This is suitable for writing to sys.stdout (for instance).
"""
force_str.__doc__ = """
Apply force_text in Python 3 and force_bytes in Python 2.
"""
def iri_to_uri(iri):
"""
Convert an Internationalized Resource Identifier (IRI) portion to a URI
portion that is suitable for inclusion in a URL.
This is the algorithm from section 3.1 of RFC 3987, slightly simplified
since the input is assumed to be a string rather than an arbitrary byte
stream.
Take an IRI (string or UTF-8 bytes, e.g. '/I ♥ Django/' or
b'/I \xe2\x99\xa5 Django/') and return a string containing the encoded
result with ASCII chars only (e.g. '/I%20%E2%99%A5%20Django/').
"""
# The list of safe characters here is constructed from the "reserved" and
# "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986:
# reserved = gen-delims / sub-delims
# gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
# sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
# / "*" / "+" / "," / ";" / "="
# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
# Of the unreserved characters, urllib.parse.quote() already considers all
# but the ~ safe.
# The % character is also added to the list of safe characters here, as the
# end of section 3.1 of RFC 3987 specifically mentions that % must not be
# converted.
if iri is None:
return iri
elif isinstance(iri, Promise):
iri = str(iri)
return quote(iri, safe="/#%[]=:;$&()+,!?*@'~")
# List of byte values that uri_to_iri() decodes from percent encoding.
# First, the unreserved characters from RFC 3986:
_ascii_ranges = [[45, 46, 95, 126], range(65, 91), range(97, 123)]
_hextobyte = {
(fmt % char).encode(): bytes((char,))
for ascii_range in _ascii_ranges
for char in ascii_range
for fmt in ['%02x', '%02X']
}
# And then everything above 128, because bytes ≥ 128 are part of multibyte
# unicode characters.
_hexdig = '0123456789ABCDEFabcdef'
_hextobyte.update({
(a + b).encode(): bytes.fromhex(a + b)
for a in _hexdig[8:] for b in _hexdig
})
def uri_to_iri(uri):
"""
Convert a Uniform Resource Identifier(URI) into an Internationalized
Resource Identifier(IRI).
This is the algorithm from section 3.2 of RFC 3987, excluding step 4.
Take an URI in ASCII bytes (e.g. '/I%20%E2%99%A5%20Django/') and return
a string containing the encoded result (e.g. '/I%20♥%20Django/').
"""
if uri is None:
return uri
uri = force_bytes(uri)
# Fast selective unqote: First, split on '%' and then starting with the
# second block, decode the first 2 bytes if they represent a hex code to
# decode. The rest of the block is the part after '%AB', not containing
# any '%'. Add that to the output without further processing.
bits = uri.split(b'%')
if len(bits) == 1:
iri = uri
else:
parts = [bits[0]]
append = parts.append
hextobyte = _hextobyte
for item in bits[1:]:
hex = item[:2]
if hex in hextobyte:
append(hextobyte[item[:2]])
append(item[2:])
else:
append(b'%')
append(item)
iri = b''.join(parts)
return repercent_broken_unicode(iri).decode()
def escape_uri_path(path):
"""
Escape the unsafe characters from the path portion of a Uniform Resource
Identifier (URI).
"""
# These are the "reserved" and "unreserved" characters specified in
# sections 2.2 and 2.3 of RFC 2396:
# reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | "$" | ","
# unreserved = alphanum | mark
# mark = "-" | "_" | "." | "!" | "~" | "*" | "'" | "(" | ")"
# The list of safe characters here is constructed subtracting ";", "=",
# and "?" according to section 3.3 of RFC 2396.
# The reason for not subtracting and escaping "/" is that we are escaping
# the entire path, not a path segment.
return quote(path, safe="/:@&+$,-_.!~*'()")
def repercent_broken_unicode(path):
"""
As per section 3.2 of RFC 3987, step three of converting a URI into an IRI,
repercent-encode any octet produced that is not part of a strictly legal
UTF-8 octet sequence.
"""
try:
path.decode()
except UnicodeDecodeError as e:
repercent = quote(path[e.start:e.end], safe=b"/#%[]=:;$&()+,!?*@'~")
path = repercent_broken_unicode(
path[:e.start] + force_bytes(repercent) + path[e.end:])
return path
def filepath_to_uri(path):
"""Convert a file system path to a URI portion that is suitable for
inclusion in a URL.
Encode certain chars that would normally be recognized as special chars
for URIs. Do not encode the ' character, as it is a valid character
within URIs. See the encodeURIComponent() JavaScript function for details.
"""
if path is None:
return path
# I know about `os.sep` and `os.altsep` but I want to leave
# some flexibility for hardcoding separators.
return quote(path.replace("\\", "/"), safe="/~!*()'")
def get_system_encoding():
"""
The encoding of the default system locale. Fallback to 'ascii' if the
#encoding is unsupported by Python or could not be determined. See tickets
#10335 and #5846.
"""
try:
encoding = locale.getdefaultlocale()[1] or 'ascii'
codecs.lookup(encoding)
except Exception:
encoding = 'ascii'
return encoding
DEFAULT_LOCALE_ENCODING = get_system_encoding()
|
class PublicKey(object):
def __init__(self, keylen, data):
"""
:param data: bytes
"""
if len(data) != keylen:
raise ValueError("Wrong length: %d" % len(data))
self._data = data
@property
def data(self):
"""
:return: bytes
"""
return self._data
|
import { moduleFor, test } from 'ember-qunit';
moduleFor(
'route:student/class',
'Unit | Route | student/class',
{
// Specify the other units that are required for this test.
// needs: ['controller:foo']
}
);
test('it exists', function(assert) {
let route = this.subject();
assert.ok(route);
});
|
/**
* Copyright IBM Corp. 2019, 2020
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*
* Code generated by @carbon/icon-build-helpers. DO NOT EDIT.
*/
'use strict';
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var Icon = require('../Icon-1f3f78db.js');
require('@carbon/icon-helpers');
require('prop-types');
var React = _interopDefault(require('react'));
var _ref2 =
/*#__PURE__*/
/*#__PURE__*/
React.createElement("path", {
d: "M26,30H14a2,2,0,0,1-2-2V25h2v3H26V4H14V7H12V4a2,2,0,0,1,2-2H26a2,2,0,0,1,2,2V28A2,2,0,0,1,26,30Z"
});
var _ref3 =
/*#__PURE__*/
/*#__PURE__*/
React.createElement("path", {
d: "M14.59 20.59L18.17 17 4 17 4 15 18.17 15 14.59 11.41 16 10 22 16 16 22 14.59 20.59z"
});
var Login32 = /*#__PURE__*/React.forwardRef(function Login32(_ref, ref) {
var children = _ref.children,
rest = Icon._objectWithoutProperties(_ref, ["children"]);
return /*#__PURE__*/React.createElement(Icon.Icon, Icon._extends({
width: 32,
height: 32,
viewBox: "0 0 32 32",
xmlns: "http://www.w3.org/2000/svg",
fill: "currentColor",
ref: ref
}, rest), _ref2, _ref3, children);
});
module.exports = Login32;
|
# Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
import unittest
import os
from gzip import GzipFile
from shutil import rmtree
import cPickle as pickle
import logging
import fcntl
import time
import tempfile
from contextlib import contextmanager
from eventlet.green import subprocess
from eventlet import Timeout, tpool
from test.unit import FakeLogger
from swift.common import utils
from swift.common.utils import hash_path, mkdirs, normalize_timestamp
from swift.common import ring
from swift.obj import replicator as object_replicator
from swift.obj.server import DiskFile
def _ips():
return ['127.0.0.0']
object_replicator.whataremyips = _ips
def mock_http_connect(status):
class FakeConn(object):
def __init__(self, status, *args, **kwargs):
self.status = status
self.reason = 'Fake'
self.host = args[0]
self.port = args[1]
self.method = args[4]
self.path = args[5]
self.with_exc = False
self.headers = kwargs.get('headers', {})
def getresponse(self):
if self.with_exc:
raise Exception('test')
return self
def getheader(self, header):
return self.headers[header]
def read(self, amt=None):
return pickle.dumps({})
def close(self):
return
return lambda *args, **kwargs: FakeConn(status, *args, **kwargs)
process_errors = []
class MockProcess(object):
ret_code = None
ret_log = None
check_args = None
class Stream(object):
def read(self):
return MockProcess.ret_log.next()
def __init__(self, *args, **kwargs):
targs = MockProcess.check_args.next()
for targ in targs:
if targ not in args[0]:
process_errors.append("Invalid: %s not in %s" % (targ,
args))
self.stdout = self.Stream()
def wait(self):
return self.ret_code.next()
@contextmanager
def _mock_process(ret):
orig_process = subprocess.Popen
MockProcess.ret_code = (i[0] for i in ret)
MockProcess.ret_log = (i[1] for i in ret)
MockProcess.check_args = (i[2] for i in ret)
object_replicator.subprocess.Popen = MockProcess
yield
object_replicator.subprocess.Popen = orig_process
def _create_test_ring(path):
testgz = os.path.join(path, 'object.ring.gz')
intended_replica2part2dev_id = [
[0, 1, 2, 3, 4, 5, 6],
[1, 2, 3, 0, 5, 6, 4],
[2, 3, 0, 1, 6, 4, 5],
]
intended_devs = [
{'id': 0, 'device': 'sda', 'zone': 0, 'ip': '127.0.0.0', 'port': 6000},
{'id': 1, 'device': 'sda', 'zone': 1, 'ip': '127.0.0.1', 'port': 6000},
{'id': 2, 'device': 'sda', 'zone': 2, 'ip': '127.0.0.2', 'port': 6000},
{'id': 3, 'device': 'sda', 'zone': 4, 'ip': '127.0.0.3', 'port': 6000},
{'id': 4, 'device': 'sda', 'zone': 5, 'ip': '127.0.0.4', 'port': 6000},
{'id': 5, 'device': 'sda', 'zone': 6, 'ip': '127.0.0.5', 'port': 6000},
{'id': 6, 'device': 'sda', 'zone': 7, 'ip': '127.0.0.6', 'port': 6000},
]
intended_part_shift = 30
intended_reload_time = 15
pickle.dump(ring.RingData(intended_replica2part2dev_id,
intended_devs, intended_part_shift),
GzipFile(testgz, 'wb'))
return ring.Ring(testgz, reload_time=intended_reload_time)
class TestObjectReplicator(unittest.TestCase):
def setUp(self):
utils.HASH_PATH_SUFFIX = 'endcap'
# Setup a test ring (stolen from common/test_ring.py)
self.testdir = tempfile.mkdtemp()
self.devices = os.path.join(self.testdir, 'node')
rmtree(self.testdir, ignore_errors=1)
os.mkdir(self.testdir)
os.mkdir(self.devices)
os.mkdir(os.path.join(self.devices, 'sda'))
self.objects = os.path.join(self.devices, 'sda', 'objects')
os.mkdir(self.objects)
self.parts = {}
for part in ['0', '1', '2', '3']:
self.parts[part] = os.path.join(self.objects, part)
os.mkdir(os.path.join(self.objects, part))
self.ring = _create_test_ring(self.testdir)
self.conf = dict(
swift_dir=self.testdir, devices=self.devices, mount_check='false',
timeout='300', stats_interval='1')
self.replicator = object_replicator.ObjectReplicator(
self.conf)
def tearDown(self):
process_errors = []
rmtree(self.testdir, ignore_errors=1)
def test_run_once(self):
replicator = object_replicator.ObjectReplicator(
dict(swift_dir=self.testdir, devices=self.devices,
mount_check='false', timeout='300', stats_interval='1'))
was_connector = object_replicator.http_connect
object_replicator.http_connect = mock_http_connect(200)
cur_part = '0'
df = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o',
FakeLogger())
mkdirs(df.datadir)
f = open(os.path.join(df.datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, cur_part, data_dir)
process_arg_checker = []
nodes = [node for node in
self.ring.get_part_nodes(int(cur_part)) \
if node['ip'] not in _ips()]
for node in nodes:
rsync_mod = '[%s]::object/sda/objects/%s' % (node['ip'], cur_part)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
replicator.run_once()
self.assertFalse(process_errors)
object_replicator.http_connect = was_connector
def test_get_hashes(self):
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
mkdirs(df.datadir)
with open(os.path.join(df.datadir, normalize_timestamp(
time.time()) + '.ts'), 'wb') as f:
f.write('1234567890')
part = os.path.join(self.objects, '0')
hashed, hashes = object_replicator.get_hashes(part)
self.assertEquals(hashed, 1)
self.assert_('a83' in hashes)
hashed, hashes = object_replicator.get_hashes(part, do_listdir=True)
self.assertEquals(hashed, 0)
self.assert_('a83' in hashes)
hashed, hashes = object_replicator.get_hashes(part,
recalculate=['a83'])
self.assertEquals(hashed, 1)
self.assert_('a83' in hashes)
def test_hash_suffix_hash_dir_is_file_quarantine(self):
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
mkdirs(os.path.dirname(df.datadir))
open(df.datadir, 'wb').close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '0', data_dir)
orig_quarantine_renamer = object_replicator.quarantine_renamer
called = [False]
def wrapped(*args, **kwargs):
called[0] = True
return orig_quarantine_renamer(*args, **kwargs)
try:
object_replicator.quarantine_renamer = wrapped
object_replicator.hash_suffix(whole_path_from, 101)
finally:
object_replicator.quarantine_renamer = orig_quarantine_renamer
self.assertTrue(called[0])
def test_hash_suffix_one_file(self):
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
mkdirs(df.datadir)
f = open(os.path.join(df.datadir,
normalize_timestamp(time.time() - 100) + '.ts'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '0', data_dir)
object_replicator.hash_suffix(whole_path_from, 101)
self.assertEquals(len(os.listdir(self.parts['0'])), 1)
object_replicator.hash_suffix(whole_path_from, 99)
self.assertEquals(len(os.listdir(self.parts['0'])), 0)
def test_hash_suffix_multi_file_one(self):
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
mkdirs(df.datadir)
for tdiff in [1, 50, 100, 500]:
for suff in ['.meta', '.data', '.ts']:
f = open(os.path.join(df.datadir,
normalize_timestamp(int(time.time()) - tdiff) + suff),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '0', data_dir)
hsh_path = os.listdir(whole_path_from)[0]
whole_hsh_path = os.path.join(whole_path_from, hsh_path)
object_replicator.hash_suffix(whole_path_from, 99)
# only the tombstone should be left
self.assertEquals(len(os.listdir(whole_hsh_path)), 1)
def test_hash_suffix_multi_file_two(self):
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
mkdirs(df.datadir)
for tdiff in [1, 50, 100, 500]:
suffs = ['.meta', '.data']
if tdiff > 50:
suffs.append('.ts')
for suff in suffs:
f = open(os.path.join(df.datadir,
normalize_timestamp(int(time.time()) - tdiff) + suff),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '0', data_dir)
hsh_path = os.listdir(whole_path_from)[0]
whole_hsh_path = os.path.join(whole_path_from, hsh_path)
object_replicator.hash_suffix(whole_path_from, 99)
# only the meta and data should be left
self.assertEquals(len(os.listdir(whole_hsh_path)), 2)
def test_invalidate_hash(self):
def assertFileData(file_path, data):
with open(file_path, 'r') as fp:
fdata = fp.read()
self.assertEquals(pickle.loads(fdata), pickle.loads(data))
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
mkdirs(df.datadir)
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '0', data_dir)
hashes_file = os.path.join(self.objects, '0',
object_replicator.HASH_FILE)
# test that non existant file except caught
self.assertEquals(object_replicator.invalidate_hash(whole_path_from),
None)
# test that hashes get cleared
check_pickle_data = pickle.dumps({data_dir: None},
object_replicator.PICKLE_PROTOCOL)
for data_hash in [{data_dir: None}, {data_dir: 'abcdefg'}]:
with open(hashes_file, 'wb') as fp:
pickle.dump(data_hash, fp, object_replicator.PICKLE_PROTOCOL)
object_replicator.invalidate_hash(whole_path_from)
assertFileData(hashes_file, check_pickle_data)
def test_check_ring(self):
self.assertTrue(self.replicator.check_ring())
orig_check = self.replicator.next_check
self.replicator.next_check = orig_check - 30
self.assertTrue(self.replicator.check_ring())
self.replicator.next_check = orig_check
orig_ring_time = self.replicator.object_ring._mtime
self.replicator.object_ring._mtime = orig_ring_time - 30
self.assertTrue(self.replicator.check_ring())
self.replicator.next_check = orig_check - 30
self.assertFalse(self.replicator.check_ring())
def test_collect_jobs(self):
jobs = self.replicator.collect_jobs()
jobs_to_delete = [j for j in jobs if j['delete']]
jobs_to_keep = [j for j in jobs if not j['delete']]
jobs_by_part = {}
for job in jobs:
jobs_by_part[job['partition']] = job
self.assertEquals(len(jobs_to_delete), 1)
self.assertTrue('1', jobs_to_delete[0]['partition'])
self.assertEquals(
[node['id'] for node in jobs_by_part['0']['nodes']], [1, 2])
self.assertEquals(
[node['id'] for node in jobs_by_part['1']['nodes']], [1, 2, 3])
self.assertEquals(
[node['id'] for node in jobs_by_part['2']['nodes']], [2, 3])
self.assertEquals(
[node['id'] for node in jobs_by_part['3']['nodes']], [3, 1])
for part in ['0', '1', '2', '3']:
for node in jobs_by_part[part]['nodes']:
self.assertEquals(node['device'], 'sda')
self.assertEquals(jobs_by_part[part]['path'],
os.path.join(self.objects, part))
def test_delete_partition(self):
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
mkdirs(df.datadir)
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate()
self.assertFalse(os.access(part_path, os.F_OK))
def test_run_once_recover_from_failure(self):
replicator = object_replicator.ObjectReplicator(
dict(swift_dir=self.testdir, devices=self.devices,
mount_check='false', timeout='300', stats_interval='1'))
was_connector = object_replicator.http_connect
try:
object_replicator.http_connect = mock_http_connect(200)
# Write some files into '1' and run replicate- they should be moved
# to the other partitoins and then node should get deleted.
cur_part = '1'
df = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o',
FakeLogger())
mkdirs(df.datadir)
f = open(os.path.join(df.datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, cur_part, data_dir)
process_arg_checker = []
nodes = [node for node in
self.ring.get_part_nodes(int(cur_part)) \
if node['ip'] not in _ips()]
for node in nodes:
rsync_mod = '[%s]::object/sda/objects/%s' % (node['ip'],
cur_part)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
self.assertTrue(os.access(os.path.join(self.objects,
'1', data_dir, ohash),
os.F_OK))
with _mock_process(process_arg_checker):
replicator.run_once()
self.assertFalse(process_errors)
for i, result in [('0', True), ('1', False),
('2', True), ('3', True)]:
self.assertEquals(os.access(
os.path.join(self.objects,
i, object_replicator.HASH_FILE),
os.F_OK), result)
finally:
object_replicator.http_connect = was_connector
def test_run_once_recover_from_timeout(self):
replicator = object_replicator.ObjectReplicator(
dict(swift_dir=self.testdir, devices=self.devices,
mount_check='false', timeout='300', stats_interval='1'))
was_connector = object_replicator.http_connect
was_get_hashes = object_replicator.get_hashes
was_execute = tpool.execute
self.get_hash_count = 0
try:
def fake_get_hashes(*args, **kwargs):
self.get_hash_count += 1
if self.get_hash_count == 3:
# raise timeout on last call to get hashes
raise Timeout()
return 2, {'abc': 'def'}
def fake_exc(tester, *args, **kwargs):
if 'Error syncing partition' in args[0]:
tester.i_failed = True
self.i_failed = False
object_replicator.http_connect = mock_http_connect(200)
object_replicator.get_hashes = fake_get_hashes
replicator.logger.exception = \
lambda *args, **kwargs: fake_exc(self, *args, **kwargs)
# Write some files into '1' and run replicate- they should be moved
# to the other partitoins and then node should get deleted.
cur_part = '1'
df = DiskFile(self.devices, 'sda', cur_part, 'a', 'c', 'o',
FakeLogger())
mkdirs(df.datadir)
f = open(os.path.join(df.datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, cur_part, data_dir)
process_arg_checker = []
nodes = [node for node in
self.ring.get_part_nodes(int(cur_part)) \
if node['ip'] not in _ips()]
for node in nodes:
rsync_mod = '[%s]::object/sda/objects/%s' % (node['ip'],
cur_part)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
self.assertTrue(os.access(os.path.join(self.objects,
'1', data_dir, ohash),
os.F_OK))
with _mock_process(process_arg_checker):
replicator.run_once()
self.assertFalse(process_errors)
self.assertFalse(self.i_failed)
finally:
object_replicator.http_connect = was_connector
object_replicator.get_hashes = was_get_hashes
tpool.execute = was_execute
def test_run(self):
with _mock_process([(0, '')] * 100):
self.replicator.replicate()
def test_run_withlog(self):
with _mock_process([(0, "stuff in log")] * 100):
self.replicator.replicate()
if __name__ == '__main__':
unittest.main()
|
(function () {
angular.module('app.core')
.run(appRun);
/* @ngInject */
function appRun(routeHelper){
routeHelper.configure('/404',getStates());
function getStates() {
return [
{
state: '404',
config: {
url: '/404',
templateUrl: 'app/core/404.html',
title: '404'
}
}
];
}
}
});
|
/**
* @file csscomb rule
* @author chris<wfsr@foxmail.com>
*/
var path = require('path');
var Module = require('module');
function createModule(filename) {
var mod = new Module(filename);
mod.filename = filename;
mod.paths = Module._nodeModulePaths(path.dirname(filename));
return mod;
}
var csscomb = createModule(Module._resolveFilename('csscomb', module.parent));
var gonzales = createModule(Module._resolveFilename('gonzales-pe', csscomb));
module.exports = {
name: 'space-after-value',
runBefore: 'block-indent',
syntax: ['css', 'less', 'sass', 'scss'],
accepts: {
number: true,
string: /^[ \t\n]*$/
},
/**
* Processes tree node.
*
* @param {node} node AST node
*/
process: function (node) {
if (!node.is || !node.is('block')) {
return;
}
var value = this.getValue(module.exports.name);
for (var i = node.length; i--;) {
if (!node.get(i).is('declarationDelimiter')) {
continue;
}
var hasSpace = node.get(i - 1).is('space');
if (!value && hasSpace) {
node.remove(i - 1);
continue;
}
if (value && !hasSpace) {
var space = gonzales.createNode({type: 'space', content: value});
node.insert(i, space);
continue;
}
}
},
/**
* Detects the value of an option at the tree node.
*
* @param {node} node AST node
* @return {string} the config value
*/
detect: function (node) {
if (!node.is('block')) {
return;
}
for (var i = node.length; i--;) {
if (!node.get(i).is('declarationDelimiter')) {
continue;
}
return node.get(i - 1).is('space') ? node.get(i - 1).content : '';
}
}
};
|
import Link from 'next/link'
export default function About() {
return (
<>
<h1>About Appainter Net</h1>
<h2>
<p>Minimalistic Nextjs frontend.</p>
<Link href="/">
<a>Back to home</a>
</Link>
</h2>
</>
)
}
|
/** @jsx jsx */
import {jsx, Styled, Container} from 'theme-ui'
import React, {useEffect, useState} from 'react' // eslint-disable-line
// import SEO from '../components/seo'
// import RenderModules from '../lib/renderModules'
import Product from '../components/dashboard/analytics/productRow'
const Fundraiser = ({path, pageContext}) => {
// console.log(pageContext)
const {
main: {
fundraiserId,
name,
menu,
organizer,
slug
}
} = pageContext
const [totalSales, setTotalSales] = useState(0)
const addToTotalSales = (amountToAdd) => {
const newTotal = totalSales + amountToAdd
setTotalSales(newTotal)
}
return (
<section>
<Container>
{/* <SEO metaInfo={meta} pagePath={slug.current} /> */}
<Styled.h2>Sales Data Dashboard</Styled.h2>
<p>For fundraiser: {name}</p>
<Styled.h3>Menu:</Styled.h3>
<ul sx={productList}>
{menu.map(menuItem => {
// console.log(menuItem)
const {
price,
productRef: {
id,
content: {
main: {
name,
image
}
}
}
} = menuItem
const productId = `${fundraiserId.current}-${id}`
return (
<li key={id}>
<Product {...menuItem} productId={productId} addToTotalSales={addToTotalSales} />
</li>
)
})}
</ul>
<h2>Total Sales across all products: ${totalSales}</h2>
</Container>
</section>
)
}
export default Fundraiser
const productList = {
li: {
py: 3,
fontSize: 2
}
}
|
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\sims\sim.py
# Compiled at: 2020-10-22 18:43:51
# Size of source mod 2**32: 217029 bytes
import functools, itertools, random
from animation.animation_interaction import AnimationInteraction
from animation.animation_overlay import AnimationOverlayComponent
from animation.animation_utils import AnimationOverrides
from animation.animation_utils import AsmAutoExitInfo, flush_all_animations
from animation.arb_accumulator import with_skippable_animation_time
from animation.awareness.awareness_component import AwarenessComponent
from animation.posture_manifest import Hand
from autonomy import autonomy_modes
from autonomy.autonomy_preference import AutonomyPreferenceType
from broadcasters.environment_score.environment_score_mixin import EnvironmentScoreMixin
from buffs.tunable import TunableBuffReference
from careers.school.school_tuning import SchoolTuning
from carry.carry_utils import get_carried_objects_gen
from carry.carrying_component import CarryingComponent
from carry.pick_up_sim_liability import WaitToBePickedUpLiability
from date_and_time import DateAndTime
from distributor.ops import SetRelativeLotLocation
from distributor.system import Distributor
from element_utils import build_critical_section_with_finally, build_element, build_critical_section
from ensemble.ensemble_component import EnsembleComponent
from event_testing import test_events
from event_testing.resolver import SingleSimResolver
from interactions import priority, constraints
from interactions.aop import AffordanceObjectPair
from interactions.base.interaction import Interaction
from interactions.base.super_interaction import RallySource
from interactions.context import InteractionContext, QueueInsertStrategy, InteractionSource
from interactions.interaction_finisher import FinishingType
from interactions.interaction_queue import InteractionQueue
from interactions.priority import Priority
from interactions.privacy import PrivacyViolators
from interactions.si_state import SIState
from interactions.utils.death import DeathTracker
from interactions.utils.interaction_liabilities import FITNESS_LIABILITY, FitnessLiability
from interactions.utils.routing import FollowPath
from objects import HiddenReasonFlag, VisibilityState, ALL_HIDDEN_REASONS
from objects.base_interactions import JoinInteraction, AskToJoinInteraction
from objects.components.carryable_component import CarryTargetInteraction
from objects.components.consumable_component import ConsumableComponent
from objects.game_object import GameObject
from objects.mixins import LockoutMixin
from objects.object_enums import ItemLocation, PersistenceType, ResetReason
from objects.part import Part
from postures import ALL_POSTURES, posture_graph
from postures.posture_specs import PostureSpecVariable, get_origin_spec
from postures.posture_state import PostureState
from postures.transition_sequence import DerailReason
from protocolbuffers.Consts_pb2 import MGR_OBJECT
from routing import SurfaceType, SurfaceIdentifier
from routing.portals.portal_tuning import PortalFlags
from services.reset_and_delete_service import ResetRecord
from sims.master_controller import WorkRequest
from sims.outfits.outfit_enums import OutfitCategory, OutfitChangeReason
from sims.outfits.outfit_tuning import OutfitTuning
from sims.sim_info_mixin import HasSimInfoMixin
from sims.sim_info_types import SpeciesExtended
from sims4.callback_utils import CallableList, consume_exceptions, RemovableCallableList
from sims4.geometry import test_point_in_polygon
from sims4.localization import TunableLocalizedString
from sims4.math import Transform
from sims4.tuning.instances import lock_instance_tunables
from sims4.tuning.tunable import Tunable, TunableList, TunableReference, TunableMapping, TunableThreshold, OptionalTunable
from sims4.tuning.tunable_base import GroupNames
from sims4.utils import classproperty, flexmethod, constproperty
from singletons import DEFAULT, EMPTY_SET
from socials.social_tests import SocialContextTest
from teleport.teleport_helper import TeleportHelper
from teleport.teleport_tuning import TeleportTuning
from terrain import get_water_depth, get_water_depth_at_location
from traits.trait_quirks import TraitQuirkSet
from uid import UniqueIdGenerator
from vehicles.vehicle_tuning import get_favorite_tag_for_surface
from world import region
from world.ocean_tuning import OceanTuning
import autonomy.autonomy_request, buffs.buff, build_buy, caches, cas.cas, clock, date_and_time, distributor.fields, distributor.ops, element_utils, elements, enum, gsi_handlers.sim_timeline_handlers, interactions.constraints, objects.components.topic_component, placement, routing, services, sims.multi_motive_buff_tracker, sims.ui_manager, sims4.log, statistics.commodity
try:
import _zone
except ImportError:
class _zone:
@staticmethod
def add_sim(_):
pass
@staticmethod
def remove_sim(_):
pass
logger = sims4.log.Logger('Sim')
def __reload__(old_module_vars):
global GLOBAL_AUTONOMY
GLOBAL_AUTONOMY = old_module_vars['GLOBAL_AUTONOMY']
class SimulationState(enum.Int, export=False):
INITIALIZING = 1
RESETTING = 2
SIMULATING = 3
BEING_DESTROYED = 4
class LOSAndSocialConstraintTuning:
constraint_expansion_amount = Tunable(description="\n The amount, in meters, to expand the Sim's current constraint by when\n calculating fallback social constraints. This number should be equal to\n the tuned radius for the standard social group constraint minus a\n nominal amount, such as 1 meter to prevent extremely small intersections\n from being considered valid.\n ",
tunable_type=float,
default=5)
num_sides_for_circle_expansion_of_point_constraint = Tunable(description='\n The number of sides to use when creating a circle for expanding point\n constraints for the fallback social constraint.\n ',
tunable_type=int,
default=8)
incompatible_target_sim_route_nearby_frequency = Tunable(description='\n The number of sim minutes to delay in between routing nearby the target\n Sim of a social interaction if they are in an incompatible state (such\n as sleeping).\n ',
tunable_type=float,
default=5)
maximum_intended_distance_to_route_nearby = Tunable(description="\n The maximum distance in meters from the target Sim's current position to\n their intended position where a Sim will stop the target Sim instead of\n routing to their intended position. Note: this only applies to Sims who\n are trying to socialize with a target Sim at higher-priority than the\n interaction that Sim is running.\n ",
tunable_type=float,
default=20)
minimum_delay_between_route_nearby_attempts = Tunable(description="\n The minimum delay, in Sim minutes, between route nearby attempts when a\n social is in the head of a Sim's queue. NOTE: This is performance-\n critical so please don't change this unless you know what you are doing.\n ",
tunable_type=float,
default=5)
minimum_adjustment_cone_radius = Tunable(description='\n The minimum radius in meters, that the Sim needs to be in front of the\n target Sim when running social adjustment before a social super\n interaction.\n ',
tunable_type=float,
default=0.7)
adjustment_cone_angle = Tunable(description='\n The angle in radians of the social adjustment cone in front of the\n target sim during a social super interaction.\n ',
tunable_type=float,
default=1.5707)
class Sim(HasSimInfoMixin, LockoutMixin, EnvironmentScoreMixin, GameObject):
INSTANCE_TUNABLES = {'_interaction_queue':InteractionQueue.TunableFactory(tuning_group=GroupNames.COMPONENTS),
'trait_quirks':TraitQuirkSet.TunableFactory(tuning_group=GroupNames.COMPONENTS),
'initial_buff':TunableBuffReference(description='\n A buff that will be permanently added to the Sim on creation. Used\n to affect the neutral state of a Sim.\n '),
'_phone_affordances':TunableList(description="\n A list of affordances generated when the player wants to use the\n Sim's cell phone.\n ",
tunable=TunableReference(description='\n An affordance that can be run as a solo interaction.\n ',
manager=(services.affordance_manager()),
pack_safe=True)),
'_relation_panel_affordances':TunableList(description='\n A list of affordances that are shown when the player clicks on a Sim\n in the relationship panel. These affordances must be able to run as\n solo interactions, meaning they cannot have a target object or Sim.\n \n When the selected interaction runs, the Subject type \n "PickedItemId" will be set to the clicked Sim\'s id. For example,\n a relationship change loot op with Subject as Actor and Target\n Subject as PickedItemId will change the relationship between the\n Active Sim and the Sim selected in the Relationship Panel.\n ',
tunable=TunableReference(description='\n An affordance shown when the player clicks on a relation in the\n relationship panel.\n ',
manager=(services.affordance_manager()),
pack_safe=True)),
'animation_overlay_component':AnimationOverlayComponent.TunableFactory(description='\n Tune animation overlays that are constantly played on this Sim.\n ',
tuning_group=GroupNames.COMPONENTS),
'_carrying_component':CarryingComponent.TunableFactory(description='\n Define how this Sim picks up, holds, and puts down carryable\n objects.\n ',
tuning_group=GroupNames.COMPONENTS),
'_awareness_component':OptionalTunable(description='\n If enabled, this Sim will react to stimuli using the client-driven\n awareness system.\n ',
tunable=AwarenessComponent.TunableFactory(),
tuning_group=GroupNames.COMPONENTS),
'_ensemble_component':OptionalTunable(description='\n If enabled, the Sim will have specific ensemble-related\n functionality. This is not a requirement for Sims to be in\n ensembles.\n ',
tunable=EnsembleComponent.TunableFactory(),
tuning_group=GroupNames.COMPONENTS),
'_school':OptionalTunable(description='\n If enabled, this Sim is required to be enrolled in school at\n specific ages.\n ',
tunable=SchoolTuning.TunableFactory(),
tuning_group=GroupNames.COMPONENTS)}
_reaction_triggers = {}
FOREIGN_ZONE_BUFF = buffs.buff.Buff.TunableReference(description='\n This buff is applied to any sim that is not in their home zone. It is\n used by autonomy for NPCs to score the GoHome interaction.\n ')
BUFF_CLOTHING_REASON = TunableLocalizedString(description='\n The localized string used to give reason why clothing buff was added.\n Does not support any tokens.\n ')
MULTI_MOTIVE_BUFF_MOTIVES = TunableMapping(description='\n Buffs, Motives and the threshold needed for that motive to count towards\n the multi motive buff\n ',
key_type=buffs.buff.Buff.TunableReference(description='\n Buff that is added when all the motives are above their threshold\n ',
pack_safe=True),
value_type=TunableMapping(description='\n Motives and the threshold needed for that motive to count towards\n the multi motive buff\n ',
key_type=statistics.commodity.Commodity.TunableReference(description='\n Motive needed above threshold to get the buff\n ',
pack_safe=True),
value_type=TunableThreshold(description='\n Threshold at which this motive counts for the buff\n ')))
def __init__(self, *args, **kwargs):
self._sim_info = None
self._simulation_state = SimulationState.INITIALIZING
(super().__init__)(*args, **kwargs)
self.add_component(objects.components.topic_component.TopicComponent(self))
self.add_component(objects.components.sim_inventory_component.SimInventoryComponent(self))
self.add_component(self.animation_overlay_component(self))
self.add_component(self._carrying_component(self))
if self._awareness_component is not None:
self.add_component(self._awareness_component(self))
if self._ensemble_component is not None:
self.add_component(self._ensemble_component(self))
self.queue = None
self._is_removed = False
self._starting_up = False
self._persistence_group = objects.persistence_groups.PersistenceGroups.SIM
self._route_fail_disable_count = 0
self._voice_pitch_override = None
self.waiting_dialog_response = None
self._posture_state = None
self.target_posture = None
self._si_state = SIState(self)
self._obj_manager = None
self._lot_routing_restriction_ref_count = 0
self.on_social_group_changed = CallableList()
self._social_groups = []
self.on_social_geometry_changed = CallableList()
self.on_posture_event = CallableList()
self._ui_manager = sims.ui_manager.UIManager(self)
self._posture_compatibility_filter = []
self._mixers_locked_out = {}
self._front_page_cooldown = {}
self.needs_fitness_update = False
self.asm_auto_exit = AsmAutoExitInfo()
self.animation_interaction = None
self.last_affordance = None
self.last_animation_factory = None
self._sleeping = False
self._buff_handles = []
self.interaction_logging = False
self.transition_path_logging = False
self._multi_motive_buff_trackers = []
self._los_constraint = None
self._social_group_constraint = None
self.on_start_up = RemovableCallableList()
self.object_ids_to_ignore = set()
self._posture_target_refs = []
self.next_passive_balloon_unlock_time = DateAndTime(0)
self.two_person_social_transforms = {}
self._intended_position_on_active_lot = False
self.active_transition = None
self._allow_route_instantly_when_hitting_marks = False
self.ignore_blocking_near_destination = False
self.current_object_set_as_head = None
self._handedness = None
self._locked_param_cache = {}
self._socials_locked = False
self._lock_id_generator = UniqueIdGenerator()
self._affordance_locks = None
self.asm_last_call_time = 0
self.zero_length_asm_calls = 0
self._dynamic_preroll_commodity_flags_map = None
self._teleport_style_interactions_to_inject = None
def __repr__(self):
if self.sim_info is None:
if self._simulation_state == SimulationState.INITIALIZING:
return "sim 'Creating Sim - Unknown Name' {0:#x}".format(self.id)
return "sim 'Destroyed Sim - Unknown Name' {0:#x}".format(self.id)
return "<sim '{0}' {1:#x}>".format(self.full_name, self.id)
def __str__(self):
if self.sim_info is None:
if self._simulation_state == SimulationState.INITIALIZING:
return "sim 'Creating Sim - Unknown Name' {0:#x}".format(self.id)
return 'Destroyed Sim - Unknown Name ID: {0:#x}'.format(self.id)
return self.full_name
@property
def routing_surface(self):
return self._location.world_routing_surface or self.parent.routing_surface
@classproperty
def reaction_triggers(cls):
return cls._reaction_triggers
@constproperty
def is_sim():
return True
@property
def _anim_overrides_internal(self):
routing_component = self.routing_component
path = routing_component.current_path
if path:
walkstyle = routing_component.get_walkstyle_for_path(path)
else:
walkstyle = self.get_default_walkstyle()
params = {'sex':self.gender.animation_gender_param, 'age':self.age.animation_age_param,
'mood':self.get_mood_animation_param_name(),
'species':SpeciesExtended.get_animation_species_param(self.extended_species),
'walkstyle':walkstyle.animation_parameter,
'walkstyle_override':walkstyle}
subroot = self._get_current_subroot()
if subroot is not None:
params['subroot'] = subroot
if self.sim_info.occult_tracker is not None:
params.update(self.sim_info.occult_tracker.get_anim_overrides())
params.update(self._get_animation_skill_param())
return AnimationOverrides(overrides=(super()._anim_overrides_internal), params=params)
@property
def sim_info(self):
return self._sim_info
@sim_info.setter
def sim_info(self, value):
self._sim_info = value
self.update_rig()
def update_rig(self):
self._rig = self._sim_info.rig_key
@distributor.fields.Field(op=(distributor.ops.SetThumbnail))
def thumbnail(self):
return self.sim_info.thumbnail
@thumbnail.setter
def thumbnail(self, value):
self.sim_info.thumbnail = value
@distributor.fields.Field(op=(distributor.ops.SetOverrideDialogPitch))
def voice_pitch_override(self):
return self._voice_pitch_override
@voice_pitch_override.setter
def voice_pitch_override(self, value):
self._voice_pitch_override = value
@property
def socials_locked(self):
return self._socials_locked
@socials_locked.setter
def socials_locked(self, value):
self._socials_locked = value
@property
def block_id(self):
if self.zone_id is not None:
if self.zone_id != 0:
return build_buy.get_block_id(self.zone_id, self.location.transform.translation, self.level)
return 0
@property
def parented_vehicle(self):
parent = self.parent
if parent is None or parent.vehicle_component is None:
return
return parent
@property
def parent_may_move(self):
parent = self.parent
return parent is not None and parent.may_move
@property
def level(self):
if self.parent is not None:
return self.parent.level
if self.in_pool:
return self.location.routing_surface.secondary_id - 1
return self.location.routing_surface.secondary_id
@property
@caches.cached(maxsize=None)
def is_dying(self):
return self.has_buff(DeathTracker.IS_DYING_BUFF)
@property
def is_selected(self):
client = services.client_manager().get_client_by_household(self.household)
if client is not None:
return self is client.active_sim
return False
@property
def transition_controller(self):
if self.queue is not None:
return self.queue.transition_controller
def get_transition_global_asm_params(self):
if self.transition_controller is not None:
if self.transition_controller.interaction is not None:
if self.transition_controller.interaction.transition_global_asm_params is not None:
return self.transition_controller.interaction.transition_global_asm_params
return dict()
def get_transition_asm_params(self):
if self.transition_controller is not None:
if self.transition_controller.interaction is not None:
if self.transition_controller.interaction.transition_asm_params is not None:
return self.transition_controller.interaction.transition_asm_params
return dict()
@property
def si_state(self):
return self._si_state
@property
def is_valid_posture_graph_object(self):
return True
@property
def should_route_fail(self):
return self._route_fail_disable_count == 0
@property
def should_mark_as_new(self):
return False
def set_allow_route_instantly_when_hitting_marks(self, allow):
self._allow_route_instantly_when_hitting_marks = allow
@property
def is_simulating(self):
return self._simulation_state == SimulationState.SIMULATING
@property
def is_being_destroyed(self):
return self._simulation_state == SimulationState.RESETTING and self.reset_reason() == ResetReason.BEING_DESTROYED
@property
def on_home_lot(self):
current_zone = services.current_zone()
if self.household.home_zone_id == current_zone.id:
active_lot = current_zone.lot
if active_lot.is_position_on_lot(self.position):
return True
return False
@property
def affordance_locks(self):
return self._affordance_locks
def is_affordance_locked(self, affordance):
if self._affordance_locks is None:
return False
tunable_affordance_filters = []
for affordance_filter in self._affordance_locks.values():
if affordance_filter is None:
return True
tunable_affordance_filters.append(affordance_filter)
for affordance_filter in tunable_affordance_filters:
if not affordance_filter(affordance):
break
else:
return False
return True
def set_affordance_lock(self, affordance_filter=None):
unique_id = self._lock_id_generator()
if self._affordance_locks is None:
self._affordance_locks = {}
self._affordance_locks[unique_id] = affordance_filter
return unique_id
def remove_affordance_lock(self, lock_id):
if self._affordance_locks is None or lock_id not in self._affordance_locks:
logger.error('Trying to remove a lock id that was already removed or never existed: {}', lock_id)
del self._affordance_locks[lock_id]
if not self._affordance_locks:
self._affordance_locks = None
def set_location_without_distribution(self, value):
if self._location.transform.translation != sims4.math.Vector3.ZERO():
if value.parent is None:
if value.transform.translation == sims4.math.Vector3.ZERO():
logger.callstack(('Attempting to move an unparented object {} to position Zero'.format(self)), level=(sims4.log.LEVEL_ERROR))
super().set_location_without_distribution(value)
def update_intended_position_on_active_lot(self, *_, update_ui=False, **__):
arrival_spawn_point = services.current_zone().active_lot_arrival_spawn_point
if not (services.active_lot().is_position_on_lot(self.intended_position) or arrival_spawn_point) is not None or test_point_in_polygon(self.intended_position, arrival_spawn_point.get_footprint_polygon()):
new_intended_position_on_active_lot = True
else:
new_intended_position_on_active_lot = False
on_active_lot = new_intended_position_on_active_lot
parent = self.parent
if parent is not None:
if parent.is_sim:
on_active_lot = True
msg = SetRelativeLotLocation(self.id, on_active_lot, self.sim_info.lives_here, self.sim_info.is_in_travel_group())
distributor = Distributor.instance()
distributor.add_op(self, msg)
on_off_lot_update = self._intended_position_on_active_lot != new_intended_position_on_active_lot
self._intended_position_on_active_lot = new_intended_position_on_active_lot
if on_off_lot_update or update_ui:
services.get_event_manager().process_event((test_events.TestEvent.SimActiveLotStatusChanged), sim_info=(self.sim_info), on_active_lot=new_intended_position_on_active_lot)
def preload_outdoor_streetwear_change(self, final_si, preload_outfit_set):
weather_service = services.weather_service()
if weather_service is not None:
weather_outfit_category_and_index = weather_service.get_weather_outfit_change(SingleSimResolver(self._sim_info))
if weather_outfit_category_and_index is not None:
self.transition_controller.outdoor_streetwear_change[self.id] = weather_outfit_category_and_index
preload_outfit_set.add(weather_outfit_category_and_index)
return
if self.sim_info._current_outfit[0] in OutfitTuning.INAPPROPRIATE_STREETWEAR:
if self.transition_controller is None:
return
remaining_transitions = self.transition_controller.get_transition_specs(self)
for transition_spec in remaining_transitions:
if transition_spec.portal_obj is not None:
outfit_category_and_index = transition_spec.portal_obj.get_on_entry_outfit(final_si, (transition_spec.portal_id), sim_info=(self.sim_info))
if outfit_category_and_index is None:
continue
if outfit_category_and_index[0] in OutfitTuning.INAPPROPRIATE_STREETWEAR:
outfit_category_and_index = None
break
else:
outfit_category_and_index = None
if outfit_category_and_index is None:
outfit_category_and_index = self.sim_info.get_outfit_for_clothing_change(final_si, OutfitChangeReason.DefaultOutfit)
self.transition_controller.outdoor_streetwear_change[self.id] = outfit_category_and_index
preload_outfit_set.add(outfit_category_and_index)
@distributor.fields.Field(op=(distributor.ops.SetSimSleepState))
def sleeping(self):
return self._sleeping
@sleeping.setter
def sleeping(self, value):
self._sleeping = value
def save_object(self, object_list, item_location, container_id):
pass
def get_location_for_save(self):
for sim_primitive in self.primitives:
if isinstance(sim_primitive, FollowPath):
node = sim_primitive.get_next_non_portal_node()
if node is None:
continue
position = (sims4.math.Vector3Immutable)(*node.position)
orientation = (sims4.math.QuaternionImmutable)(*node.orientation)
break
else:
transform = self.transform
position = transform.translation
orientation = transform.orientation
if self.location.world_routing_surface is not None:
level = self.location.level
else:
level = 0
if self.location.routing_surface is not None:
surface_id = self.location.routing_surface.type
else:
surface_id = 1
return (position, orientation, level, surface_id)
def get_inventory_proto_for_save(self):
inventory_msg = self.inventory_component.save_items()
if inventory_msg is None:
return
inventory = self.inventory_component
for parented_item in self.children:
if inventory.should_save_parented_item_to_inventory(parented_item):
parented_item.save_object(inventory_msg.objects, ItemLocation.SIM_INVENTORY, self.id)
vehicle = self.parented_vehicle
if vehicle is not None:
if inventory.should_save_parented_item_to_inventory(vehicle):
vehicle.save_object(inventory_msg.objects, ItemLocation.SIM_INVENTORY, self.id)
return inventory_msg
def get_vehicles_for_path(self, path):
supported_vehicles = []
favorites_tracker = self.sim_info.favorites_tracker
favorite_vehicle = None
favorite_tag = get_favorite_tag_for_surface(self.routing_surface.type)
favorite_vehicle_id = None
if favorites_tracker is not None:
if favorite_tag is not None:
favorite_vehicle_id = favorites_tracker.get_favorite_object_id(favorite_tag)
for vehicle in self.inventory_component.vehicle_objects_gen():
if vehicle.vehicle_component.should_deploy_for_path(path, self.routing_surface):
if vehicle.id == favorite_vehicle_id:
favorite_vehicle = vehicle
continue
supported_vehicles.append(vehicle)
if favorite_vehicle is not None:
return [
favorite_vehicle] + supported_vehicles
return supported_vehicles
def get_create_after_objs(self):
super_objs = super().get_create_after_objs()
return (self.sim_info,) + super_objs
def set_build_buy_lockout_state(self, lockout_state, lockout_timer=None):
raise AssertionError('Trying to illegally set a Sim as locked out: {}'.format(self))
def without_route_failure(self, sequence=None):
def disable_route_fail(_):
self._route_fail_disable_count += 1
def enable_route_fail(_):
self._route_fail_disable_count -= 1
return build_critical_section_with_finally(disable_route_fail, sequence, enable_route_fail)
@property
def rig(self):
return self._rig
def inc_lot_routing_restriction_ref_count(self):
if not self.is_npc or self.sim_info.lives_here:
return
self._lot_routing_restriction_ref_count += 1
if services.current_zone().lot.is_position_on_lot(self.position):
return
if self.pathplan_context.get_key_mask() & routing.FOOTPRINT_KEY_ON_LOT:
self.pathplan_context.set_key_mask(self.pathplan_context.get_key_mask() & ~routing.FOOTPRINT_KEY_ON_LOT)
def dec_lot_routing_restriction_ref_count(self):
if not self.is_npc or self.sim_info.lives_here:
return
if self._lot_routing_restriction_ref_count > 0:
self._lot_routing_restriction_ref_count -= 1
if self._lot_routing_restriction_ref_count == 0:
self.pathplan_context.set_key_mask(self.pathplan_context.get_key_mask() | routing.FOOTPRINT_KEY_ON_LOT)
def clear_lot_routing_restrictions_ref_count(self):
self._lot_routing_restriction_ref_count = 0
self.pathplan_context.set_key_mask(self.pathplan_context.get_key_mask() | routing.FOOTPRINT_KEY_ON_LOT)
def execute_adjustment_interaction(self, affordance, constraint, int_priority, group_id=None, **kwargs):
aop = AffordanceObjectPair(affordance, None, affordance, None, constraint_to_satisfy=constraint,
route_fail_on_transition_fail=False,
is_adjustment_interaction=True, **kwargs)
context = InteractionContext(self, (InteractionContext.SOURCE_SOCIAL_ADJUSTMENT), int_priority, insert_strategy=(QueueInsertStrategy.NEXT),
group_id=group_id,
must_run_next=True,
cancel_if_incompatible_in_queue=True,
can_derail_if_constraint_invalid=False)
return aop.test_and_execute(context)
@property
def ui_manager(self):
return self._ui_manager
def _update_social_geometry_on_location_changed(self, *args, **kwargs):
social_group = self.get_main_group()
if social_group is not None:
social_group.refresh_social_geometry(sim=self)
def notify_social_group_changed(self, group):
if self in group:
if group not in self._social_groups:
self._social_groups.append(group)
elif group in self._social_groups:
self._social_groups.remove(group)
self.on_social_group_changed(self, group)
def in_non_adjustable_posture(self):
for aspect in self._posture_state.aspects:
if not aspect.allow_social_adjustment:
return True
return False
def filter_supported_postures(self, supported_postures):
filtered_postures = supported_postures
if filtered_postures is ALL_POSTURES:
return ALL_POSTURES
for filter_func in self._posture_compatibility_filter:
filtered_postures = filter_func(filtered_postures)
return filtered_postures
def may_reserve(self, *args, **kwargs):
return False
def on_reset_notification(self, reset_reason):
super().on_reset_notification(reset_reason)
self._simulation_state = SimulationState.RESETTING
self.queue.lock()
def on_reset_get_interdependent_reset_records(self, reset_reason, reset_records):
super().on_reset_get_interdependent_reset_records(reset_reason, reset_records)
master_controller = services.get_master_controller()
master_controller.add_interdependent_reset_records(self, reset_records)
for other_sim in master_controller.added_sims():
if other_sim is not self and other_sim.is_sim and other_sim.has_sim_in_any_queued_interactions_required_sim_cache(self):
reset_records.append(ResetRecord(other_sim, ResetReason.RESET_EXPECTED, self, 'In required sims of queued interaction.'))
for social_group in self.get_groups_for_sim_gen():
for game_object in social_group.get_objects_for_reset():
if game_object is not self:
reset_records.append(ResetRecord(game_object, ResetReason.RESET_EXPECTED, self, 'In social group'))
for interaction in self.get_all_running_and_queued_interactions():
if interaction.prepared:
for other_sim in interaction.required_sims():
if other_sim is not self:
reset_records.append(ResetRecord(other_sim, ResetReason.RESET_EXPECTED, self, 'required sim in {}'.format(interaction)))
if self.posture_state is not None:
for aspect in self.posture_state.aspects:
target = aspect.target
if target is not None:
if target.is_part:
target = target.part_owner
reset_records.append(ResetRecord(target, ResetReason.RESET_EXPECTED, self, 'Posture state aspect:{} target:{}'.format(aspect, target)))
if self.current_object_set_as_head is not None:
object_on_head = self.current_object_set_as_head()
if object_on_head is not None:
if not self.has_component(objects.components.types.PARENT_TO_SIM_HEAD_COMPONENT):
self.current_object_set_as_head = None
reset_records.append(ResetRecord(object_on_head, (ResetReason.BEING_DESTROYED),
source=self,
cause='Destroying object parented to head outside parent to sim head component'))
else:
if reset_reason == ResetReason.BEING_DESTROYED:
inventory = self.inventory_component
if inventory.should_save_parented_item_to_inventory(object_on_head):
reset_records.append(ResetRecord(object_on_head, (ResetReason.BEING_DESTROYED),
source=self,
cause='Destroying object parented to sim, should be stored in inventory'))
def on_reset_restart(self):
self._start_animation_interaction()
self.start_animation_overlays()
self.update_animation_overlays()
return False
def on_state_changed(self, state, old_value, new_value, from_init):
if not self.is_simulating:
return
else:
affordances = self.sim_info.PHYSIQUE_CHANGE_AFFORDANCES
reaction_affordance = None
if not old_value != new_value or state == ConsumableComponent.FAT_STATE or state == ConsumableComponent.FIT_STATE:
self.needs_fitness_update = True
if state == ConsumableComponent.FAT_STATE:
reaction_affordance = affordances.FAT_CHANGE_NEUTRAL_AFFORDANCE
fat_commodity = ConsumableComponent.FAT_COMMODITY
old_fat = self.sim_info.fat
new_fat = self.commodity_tracker.get_value(fat_commodity)
midrange_fat = (fat_commodity.max_value + fat_commodity.min_value) / 2
self.sim_info.fat = new_fat
if new_fat > midrange_fat:
if old_fat < new_fat:
if new_fat == fat_commodity.max_value:
reaction_affordance = affordances.FAT_CHANGE_MAX_NEGATIVE_AFFORDANCE
else:
reaction_affordance = affordances.FAT_CHANGE_NEGATIVE_AFFORDANCE
elif old_fat > new_fat:
reaction_affordance = affordances.FAT_CHANGE_POSITIVE_AFFORDANCE
elif new_fat == fat_commodity.min_value:
reaction_affordance = affordances.FAT_CHANGE_MAX_POSITIVE_AFFORDANCE
else:
reaction_affordance = affordances.FIT_CHANGE_NEUTRAL_AFFORDANCE
old_fit = self.sim_info.fit
new_fit = self.commodity_tracker.get_value(ConsumableComponent.FIT_COMMODITY)
self.sim_info.fit = new_fit
if old_fit < new_fit:
reaction_affordance = affordances.FIT_CHANGE_POSITIVE_AFFORDANCE
else:
reaction_affordance = affordances.FIT_CHANGE_NEGATIVE_AFFORDANCE
if reaction_affordance is not None:
context = InteractionContext(self, (InteractionContext.SOURCE_SCRIPT), (Priority.Low), client=None, pick=None)
result = self.push_super_affordance(reaction_affordance, None, context)
if result:
result.interaction.add_liability(FITNESS_LIABILITY, FitnessLiability(self))
return
self.sim_info.update_fitness_state()
def _on_navmesh_updated(self):
self.validate_current_location_or_fgl()
if self.transition_controller is not None:
if self.routing_component.current_path is not None and self.routing_component.current_path.nodes.needs_replan():
if self.transition_controller.succeeded:
self.reset(ResetReason.RESET_EXPECTED, None, 'Traversing a path that needs replanning but is not controlled by the transition sequence.')
elif self.routing_component.current_path.final_location.transform != self.routing_component.current_path.intended_location.transform:
self.validate_location(self.routing_component.current_path.intended_location) or self.reset(ResetReason.RESET_EXPECTED, None, "Traversing an path that's been canceled or derailed and the position we've chosen to cancel/derail to is not valid.")
else:
zone = services.current_zone()
if zone.is_in_build_buy:
self.transition_controller.derail(DerailReason.NAVMESH_UPDATED_BY_BUILD, self)
else:
self.transition_controller.derail(DerailReason.NAVMESH_UPDATED, self)
elif self.transition_controller.sim_is_traversing_invalid_portal(self):
self.reset(ResetReason.RESET_EXPECTED, None, 'Transitioning through a portal that was deleted.')
self.two_person_social_transforms.clear()
def validate_location(self, location, from_reset=False):
if self.is_hidden(allow_hidden_flags=(ALL_HIDDEN_REASONS & ~HiddenReasonFlag.RABBIT_HOLE)):
return True
routing_location = routing.Location(location.transform.translation, location.transform.orientation, location.routing_surface)
water_depth = get_water_depth_at_location(routing_location)
wading_interval = OceanTuning.get_actor_wading_interval(self)
if not routing_location.routing_surface.type == routing.SurfaceType.SURFACETYPE_POOL or (build_buy.is_location_pool(location.transform.translation, location.level) or wading_interval is None or water_depth) <= wading_interval.upper_bound:
return False
else:
if not (wading_interval is None and water_depth > 0):
if wading_interval is not None:
if water_depth >= wading_interval.upper_bound:
return False
else:
allowed_targets = set()
for interaction in itertools.chain((self.queue.running,), self.si_state):
if not interaction is None:
if interaction.target is None:
continue
allowed_targets.add(interaction.target)
allowed_targets.add(interaction.target.parent)
if self.transition_controller is not None:
allowed_targets.update(self.transition_controller.relevant_objects)
contexts = {obj.raycast_context() for obj in allowed_targets if obj is not None if obj is not None}
contexts.add(self.routing_context)
on_object_surface = routing_location.routing_surface.type == routing.SurfaceType.SURFACETYPE_OBJECT
test_portal_clearance = self.posture.unconstrained and not self.in_pool and not on_object_surface
if not from_reset:
if on_object_surface:
agent_radius = self.posture.unconstrained or routing.FAKE_AGENT_RADIUS_FOR_OBJECT_ROUTING_SURFACE_VALIDATION
agent_radius = routing.get_default_agent_radius()
for context in contexts:
if placement.validate_sim_location(routing_location, agent_radius, context, test_portal_clearance):
return True
return False
def validate_current_location_or_fgl(self, from_reset=False):
parent = self.parent
if parent is not None:
return parent.is_sim or from_reset or None
self.clear_parent(parent.transform, parent.routing_surface)
zone = services.current_zone()
if not zone.is_in_build_buy:
if not from_reset:
ocean_data = OceanTuning.get_actor_ocean_data(self)
can_swim = ocean_data is not None and ocean_data.beach_portal_data is not None
return can_swim or self.should_be_swimming_at_position((self.position), (self.location.level), check_can_swim=False) or None
if from_reset:
if zone.is_in_build_buy:
services.get_event_manager().process_event((test_events.TestEvent.OnBuildBuyReset), sim_info=(self.sim_info))
if self.routing_component.current_path is not None:
if from_reset:
return
if any((sim_primitive.is_traversing_invalid_portal() for sim_primitive in self.primitives if isinstance(sim_primitive, FollowPath))):
self.reset(ResetReason.RESET_EXPECTED, self, 'Traversing invalid portal.')
return
location, on_surface = self.get_location_on_nearest_surface_below()
if self.validate_location(location, from_reset=from_reset):
if not on_surface:
if not from_reset:
self.reset(ResetReason.RESET_EXPECTED, self, 'Failed to validate location.')
self.location = location
return
ignored_object_ids = {self.sim_id}
ignored_object_ids.update((child.id for child in self.children_recursive_gen()))
parent_object = self.parent_object()
while parent_object is not None:
ignored_object_ids.add(parent_object.id)
parent_object = self.parent_object()
search_flags = placement.FGLSearchFlagsDefault | placement.FGLSearchFlag.USE_SIM_FOOTPRINT | placement.FGLSearchFlag.STAY_IN_CURRENT_BLOCK
starting_location = placement.create_starting_location(location=location)
wading_interval = OceanTuning.get_actor_wading_interval(self)
if wading_interval is None:
min_wading_depth = None
max_wading_depth = 0
else:
if starting_location.routing_surface.type == routing.SurfaceType.SURFACETYPE_POOL:
min_wading_depth = wading_interval.upper_bound
max_wading_depth = None
else:
min_wading_depth = None
max_wading_depth = wading_interval.upper_bound
def get_reset_location():
fgl_context = placement.FindGoodLocationContext(starting_location, ignored_object_ids=ignored_object_ids,
additional_avoid_sim_radius=(routing.get_sim_extra_clearance_distance()),
search_flags=search_flags,
routing_context=(self.routing_context),
min_water_depth=min_wading_depth,
max_water_depth=max_wading_depth)
return placement.find_good_location(fgl_context)
trans, orient = get_reset_location()
new_location_routing_surface = starting_location.routing_surface
def _push_location_to_world_surface():
new_world_surface = routing.SurfaceIdentifier(services.current_zone_id(), self._get_best_valid_level(), routing.SurfaceType.SURFACETYPE_WORLD)
starting_location.routing_surface = new_world_surface
return new_world_surface
if not new_location_routing_surface.type == routing.SurfaceType.SURFACETYPE_OBJECT or trans is None or orient is None:
new_location_routing_surface = _push_location_to_world_surface()
trans, orient = get_reset_location()
else:
if not routing.test_connectivity_pt_pt(self.routing_location, routing.Location(trans, orient, routing_surface=new_location_routing_surface), self.routing_context):
new_location_routing_surface = _push_location_to_world_surface()
trans, orient = get_reset_location()
else:
if trans is None or orient is None:
if not from_reset:
self.reset(ResetReason.RESET_EXPECTED, self, 'Failed to find location.')
return
self.fgl_reset_to_landing_strip()
return
from_reset or self.reset(ResetReason.RESET_EXPECTED, self, 'Failed to find location.')
new_transform = sims4.math.Transform(trans, orient)
self.location = location.clone(transform=new_transform, routing_surface=new_location_routing_surface)
def fgl_reset_to_landing_strip(self):
self.reset(ResetReason.RESET_EXPECTED, self, 'Reset to landing strip.')
zone = services.current_zone()
spawn_point = zone.active_lot_arrival_spawn_point
if spawn_point is None:
self.move_to_landing_strip()
return
else:
spawn_trans, _ = spawn_point.next_spawn_spot()
location = routing.Location(spawn_trans, routing_surface=(spawn_point.routing_surface))
success = False
if self.pathplan_context.get_key_mask() & routing.FOOTPRINT_KEY_ON_LOT:
self.pathplan_context.set_key_mask(self.pathplan_context.get_key_mask() & ~routing.FOOTPRINT_KEY_ON_LOT)
should_have_permission = True
else:
should_have_permission = False
try:
starting_location = placement.create_starting_location(location=location)
fgl_context = placement.create_fgl_context_for_sim(starting_location, self, additional_avoid_sim_radius=(routing.get_default_agent_radius()),
routing_context=(self.routing_context))
trans, orient = placement.find_good_location(fgl_context)
if trans is not None:
if orient is not None:
transform = Transform(trans, orient)
if spawn_point is not None:
self.location = self.location.clone(routing_surface=(spawn_point.routing_surface),
transform=transform)
else:
self.location = self.location.clone(transform=transform)
success = True
finally:
if should_have_permission:
self.pathplan_context.set_key_mask(self.pathplan_context.get_key_mask() | routing.FOOTPRINT_KEY_ON_LOT)
return success
def _get_best_valid_level(self):
position = sims4.math.Vector3(self.position.x, self.position.y, self.position.z)
for i in range(self.routing_surface.secondary_id, build_buy.get_lowest_level_allowed() - 1, -1):
if build_buy.has_floor_at_location(position, i):
return i
if self.routing_surface.secondary_id < 0:
for i in range(self.routing_surface.secondary_id, 0, 1):
if build_buy.has_floor_at_location(position, i):
return i
return 0
def get_location_on_nearest_surface_below(self):
if self.posture_state.valid:
if self.posture.unconstrained:
if not ((self.posture.consider_constrained_for_on_surface_checks or self.active_transition) is not None):
return (
self.location, True)
else:
location = self.location
level = self._get_best_valid_level()
if self.location.routing_surface.type == routing.SurfaceType.SURFACETYPE_POOL:
surface_type = self._should_be_swimming() or routing.SurfaceType.SURFACETYPE_WORLD
else:
surface_type = None
if level != location.routing_surface.secondary_id or surface_type is not None:
routing_surface = routing.SurfaceIdentifier(location.routing_surface.primary_id, level, surface_type or location.routing_surface.type)
location = location.clone(routing_surface=routing_surface)
else:
on_surface = False
snapped_y = services.terrain_service.terrain_object().get_routing_surface_height_at(location.transform.translation.x, location.transform.translation.z, location.routing_surface)
LEVEL_SNAP_TOLERANCE = 0.001
if location.routing_surface == self.routing_surface and sims4.math.almost_equal(snapped_y, (location.transform.translation.y), epsilon=LEVEL_SNAP_TOLERANCE):
on_surface = True
translation = sims4.math.Vector3(location.transform.translation.x, snapped_y, location.transform.translation.z)
location = location.clone(translation=translation)
return (location, on_surface)
def move_to_landing_strip(self):
zone = services.current_zone()
spawn_point = zone.get_spawn_point()
if spawn_point is not None:
trans, _ = spawn_point.next_spawn_spot()
self.location = self.location.clone(translation=trans, routing_surface=(spawn_point.routing_surface))
self.fade_in()
return
logger.warn('No landing strip exists in zone {}', zone)
def fade_in(self, fade_duration=None, immediate=False, additional_channels=None):
if self.posture.is_vehicle:
vehicle = self.posture.target
if not immediate:
additional_channels = [] if additional_channels is None else additional_channels
additional_channels.append((MGR_OBJECT, vehicle.id, None))
additional_channels.append((MGR_OBJECT, self.id, None))
vehicle.fade_in(fade_duration=fade_duration, immediate=immediate, additional_channels=additional_channels)
super().fade_in(fade_duration=fade_duration, immediate=immediate, additional_channels=additional_channels)
def fade_out(self, fade_duration=None, immediate=False, additional_channels=None):
if self.posture.is_vehicle:
vehicle = self.posture.target
if not immediate:
additional_channels = [] if additional_channels is None else additional_channels
additional_channels.append((MGR_OBJECT, vehicle.id, None))
additional_channels.append((MGR_OBJECT, self.id, None))
vehicle.fade_out(fade_duration=fade_duration, immediate=immediate, additional_channels=additional_channels)
super().fade_out(fade_duration=fade_duration, immediate=immediate, additional_channels=additional_channels)
def _start_animation_interaction(self):
animation_interaction_context = InteractionContext(self, InteractionContext.SOURCE_SCRIPT, priority.Priority.High)
animation_aop = AffordanceObjectPair(AnimationInteraction, None, AnimationInteraction, None)
self.animation_interaction = animation_aop.interaction_factory(animation_interaction_context).interaction
def _stop_animation_interaction(self):
if self.animation_interaction is not None:
self.animation_interaction.cancel(FinishingType.RESET, 'Sim is being reset.')
self.animation_interaction.on_removed_from_queue()
self.animation_interaction = None
def create_animation_interaction(self):
animation_interaction_context = InteractionContext(self, InteractionContext.SOURCE_SCRIPT, priority.Priority.High)
animation_aop = AffordanceObjectPair(AnimationInteraction, None, AnimationInteraction, None)
return animation_aop.interaction_factory(animation_interaction_context).interaction
def on_reset_internal_state(self, reset_reason):
being_destroyed = reset_reason == ResetReason.BEING_DESTROYED
try:
try:
if not being_destroyed:
if self.is_npc:
if self.sim_info.get_current_outfit()[0] == OutfitCategory.BATHING:
if not self.in_pool:
self.set_current_outfit((OutfitCategory.EVERYDAY, 0))
self.set_last_user_directed_action_time()
else:
services.get_master_controller().on_reset_sim(self, reset_reason)
self.hide(HiddenReasonFlag.NOT_INITIALIZED)
self.queue.on_reset(being_destroyed)
self.si_state.on_reset()
self.socials_locked = False
if self.posture_state is not None:
self.posture_state.on_reset(ResetReason.RESET_EXPECTED)
if not being_destroyed:
self.sim_info.resend_current_outfit()
self._posture_target_refs.clear()
self._stop_environment_score()
self._stop_animation_interaction()
self.stop_animation_overlays()
self.zero_length_asm_calls = 0
self.ui_manager.remove_all_interactions()
self.on_sim_reset(being_destroyed)
self.clear_all_autonomy_skip_sis()
if being_destroyed:
self._remove_multi_motive_buff_trackers()
self.asm_auto_exit.clear()
self.last_affordance = None
self.last_animation_factory = None
if not being_destroyed:
if not self._is_removed:
try:
self.validate_current_location_or_fgl(from_reset=True)
self.refresh_los_constraint()
self.visibility = VisibilityState()
self.opacity = 1
except Exception:
logger.exception('Exception thrown while finding good location for Sim on reset:')
services.get_event_manager().process_event((test_events.TestEvent.OnSimReset), sim_info=(self.sim_info))
self.two_person_social_transforms.clear()
self.current_object_set_as_head = None
except:
logger.exception('TODO: Exception thrown during Sim reset, possibly we should be kicking the Sim out of the game.')
raise
finally:
super().on_reset_internal_state(reset_reason)
def _reset_reference_arb(self):
self._reference_arb = None
def _create_motives(self):
if self.initial_buff.buff_type is not None:
self.add_buff(self.initial_buff.buff_type, self.initial_buff.buff_reason)
def running_interactions_gen(self, affordance):
if self.si_state is not None:
interaction_type = affordance.get_interaction_type()
for si in self.si_state.sis_actor_gen():
if issubclass(si.get_interaction_type(), interaction_type):
yield si
else:
linked_interaction_type = si.get_linked_interaction_type()
if linked_interaction_type is not None and issubclass(linked_interaction_type, interaction_type):
yield si
def get_all_running_and_queued_interactions(self):
if self.si_state is None or self.queue is None:
logger.error('Trying to get the running and queued interactions from a Sim that has likely been removed. Sim={}', self)
return []
interactions = [si for si in self.si_state.sis_actor_gen()]
for si in self.queue:
interactions.append(si)
return interactions
def get_running_and_queued_interactions_by_tag_or_affordance_type(self, type_affordances=EMPTY_SET, tags=EMPTY_SET):
if self.si_state is None or self.queue is None:
logger.error('Trying to get the running and queued interactions by tag from a Sim that has likely been removed. Sim={}', self)
return []
running_and_queued_interactions = self.get_all_running_and_queued_interactions()
interaction_set = set()
for interaction in running_and_queued_interactions:
if tags & interaction.affordance.interaction_category_tags:
interaction_set.add(interaction)
continue
running_interaction_type = interaction.get_interaction_type()
for type_affordance in type_affordances:
if issubclass(type_affordance.get_interaction_type(), running_interaction_type):
interaction_set.add(interaction)
break
return interaction_set
def get_running_and_queued_interactions_by_tag(self, tags):
if self.si_state is None or self.queue is None:
logger.error('Trying to get the running and queued interactions by tag from a Sim that has likely been removed. Sim={}', self)
return []
interaction_set = set()
for si in self.si_state.sis_actor_gen():
if tags & si.affordance.interaction_category_tags:
interaction_set.add(si)
for si in self.queue:
if tags & si.affordance.interaction_category_tags:
interaction_set.add(si)
return interaction_set
def has_running_and_queued_interactions_with_liability(self, liability_type):
for interaction in self.get_all_running_and_queued_interactions():
if interaction.get_liability(liability_type) is not None:
return True
return False
def has_any_interaction_running_or_queued_of_tags(self, tags):
running_and_queued_interactions = self.get_all_running_and_queued_interactions()
for si in running_and_queued_interactions:
if tags & si.affordance.interaction_category_tags:
return True
return False
def has_any_interaction_running_or_queued_of_types(self, interaction_types):
for si in self.get_all_running_and_queued_interactions():
if any((issubclass(a, interaction_types) for a in si.affordances)):
return True
return False
def has_sim_in_any_queued_interactions_required_sim_cache(self, sim_in_question):
return any((interaction.has_sim_in_required_sim_cache(sim_in_question) for interaction in self.queue))
def get_running_interactions_by_tags(self, tags):
interaction_set = set()
for si in self.si_state.sis_actor_gen():
if tags & si.affordance.interaction_category_tags:
interaction_set.add(si)
return interaction_set
def has_any_pending_or_running_interactions(self):
transition_controller = self.transition_controller
if transition_controller is not None:
if transition_controller.interaction.visible:
if not transition_controller.interaction.is_finishing:
return True
for interaction in self.get_all_running_and_queued_interactions():
if not interaction.visible:
if not interaction.is_autonomous_picker_interaction:
continue
if interaction.is_super:
if interaction.pending_complete:
continue
return True
return False
@caches.cached
def _all_affordance_targets(self):
results = []
if self.si_state is not None:
for si in self.si_state.sis_actor_gen():
if si.is_finishing:
continue
affordance = si.get_interaction_type()
results.append((affordance, si.target))
linked_affordance = si.get_linked_interaction_type()
if linked_affordance is not None:
results.append((linked_affordance, si.target))
for other_target in si.get_potential_mixer_targets():
results.append((affordance, other_target))
if linked_affordance is not None:
results.append((linked_affordance, other_target))
return frozenset(results)
@caches.cached
def _shared_affordance_targets(self, sim):
affordance_targets_a = self._all_affordance_targets()
affordance_targets_b = sim._all_affordance_targets()
both = affordance_targets_a & affordance_targets_b
if both:
result = frozenset((affordance for affordance, _ in both))
return result
return ()
def is_running_interaction(self, affordance, target):
affordance_targets = self._all_affordance_targets()
return (affordance, target) in affordance_targets
def are_running_equivalent_interactions(self, sim, affordance):
shared = self._shared_affordance_targets(sim)
return affordance in shared
def _provided_interactions_gen(self, context, **kwargs):
_generated_affordance = set()
for interaction in self.si_state:
if interaction.is_finishing:
continue
for affordance_data in interaction.affordance.provided_affordances:
affordance = affordance_data.affordance
if affordance in _generated_affordance:
continue
if context.source == InteractionSource.AUTONOMY:
if not affordance.allow_autonomous:
continue
if context.sim is self:
if not affordance_data.allow_self:
continue
if context.sim is not None:
if context.sim.is_running_interaction(affordance, self):
continue
if self.are_running_equivalent_interactions(context.sim, affordance):
continue
target = interaction.get_participant(affordance_data.target)
target = target if target is not None else self
if not affordance_data.object_filter.is_object_valid(target):
logger.error('Provided Affordance {} from {} is not valid to run on {}', affordance, interaction, target, owner='rmccord')
continue
carry_target = interaction.get_participant(affordance_data.carry_target)
provided_affordance = affordance
if carry_target is not None:
provided_affordance = CarryTargetInteraction.generate(affordance, carry_target)
_generated_affordance.add(affordance)
if affordance_data.is_linked:
depended_on_si = interaction
depended_on_until_running = affordance_data.unlink_if_running
else:
depended_on_si = None
depended_on_until_running = False
yield from (provided_affordance.potential_interactions)(target, context, depended_on_si=depended_on_si, depended_on_until_running=depended_on_until_running, **kwargs)
club_service = services.get_club_service()
if club_service is not None:
for club, affordance in club_service.provided_clubs_and_interactions_gen(context, target=self):
aop = AffordanceObjectPair(affordance, self, affordance, None, associated_club=club, **kwargs)
if aop.test(context):
yield aop
if context.sim is not None:
if context.sim is not self:
for _, _, carried_object in get_carried_objects_gen(context.sim):
yield from (carried_object.get_provided_aops_gen)(self, context, **kwargs)
def get_object_provided_target_affordances_gen(self, target, context, **kwargs):
sim_inventory_component = self.get_component(objects.components.types.INVENTORY_COMPONENT)
if sim_inventory_component is None:
return
shift_held = context.shift_held if context is not None else False
for affordance, provided_affordance_data in sim_inventory_component.get_cached_target_super_affordances_gen(context, target):
if self._can_show_affordance(shift_held, affordance):
kwargs_copy = kwargs.copy()
kwargs_copy['object_providing_target_affordance'] = provided_affordance_data.provider_id
yield from (affordance.potential_interactions)(target, context, **kwargs_copy)
if False:
yield None
def _potential_joinable_interactions_gen(self, context, **kwargs):
def get_target(interaction, join_participant):
join_target = interaction.get_participant(join_participant)
if join_target:
if isinstance(join_target, Part):
join_target = join_target.part_owner
return join_target
def get_join_affordance(default, join_info, joining_sim, target):
if join_info.join_affordance.is_affordance:
join_affordance = join_info.join_affordance.value
if join_affordance is None:
join_affordance = default
if target is not None:
for interaction in joining_sim.si_state:
if interaction.get_interaction_type() is join_affordance:
interaction_join_target = get_target(interaction, join_info.join_target)
if interaction_join_target is target:
return (
None, target)
return (
join_affordance, target)
if context.source == InteractionSource.AUTONOMY:
return (
None, target)
commodity_search = join_info.join_affordance.value
for interaction in joining_sim.si_state:
if commodity_search.commodity in interaction.commodity_flags:
return (
None, target)
join_context = InteractionContext(joining_sim, (InteractionContext.SOURCE_AUTONOMY), (Priority.High),
client=None, pick=None, always_check_in_use=True)
constraint = constraints.Circle(target.position, commodity_search.radius, target.routing_surface)
autonomy_request = autonomy.autonomy_request.AutonomyRequest(joining_sim,
(autonomy_modes.FullAutonomy), static_commodity_list=(commodity_search.commodity,), context=join_context,
constraint=constraint,
limited_autonomy_allowed=True,
consider_scores_of_zero=True,
allow_forwarding=False,
autonomy_mode_label_override='Joinable')
best_action = services.autonomy_service().find_best_action(autonomy_request)
if best_action:
return (
best_action, best_action.target)
return (
None, target)
def get_join_aops_gen(interaction, join_sim, joining_sim, join_factory):
interaction_type = interaction.get_interaction_type()
join_target_ref = join_sim.ref()
for joinable_info in interaction.joinable:
if join_sim is self:
if not joinable_info.join_available:
continue
else:
if join_sim is context.sim:
if not joinable_info.invite_available:
continue
join_target = get_target(interaction, joinable_info.join_target)
if join_target is None and interaction.sim is not self:
continue
joinable_interaction, join_target = get_join_affordance(interaction_type, joinable_info, joining_sim, join_target)
if joinable_interaction is None:
continue
join_interaction = join_factory(joinable_interaction.affordance, joining_sim, interaction, joinable_info)
for aop in (join_interaction.potential_interactions)(join_target, context, join_target_ref=join_target_ref, **kwargs):
result = aop.test(context)
if result or result.tooltip:
yield aop
def create_join_si(affordance, joining_sim, join_interaction, joinable_info):
return JoinInteraction.generate(affordance, join_interaction, joinable_info)
def create_invite_to_join_si(affordance, joining_sim, join_interaction, joinable_info):
return AskToJoinInteraction.generate(affordance, joining_sim, join_interaction, joinable_info)
if context.sim is not None:
for interaction in self.si_state.sis_actor_gen():
if interaction.joinable and not interaction.is_finishing:
for aop in get_join_aops_gen(interaction, self, context.sim, create_join_si):
yield aop
for interaction in context.sim.si_state.sis_actor_gen():
if interaction.joinable and not interaction.is_finishing:
for aop in get_join_aops_gen(interaction, context.sim, self, create_invite_to_join_si):
yield aop
def potential_preroll_interactions(self, context, get_interaction_parameters=None, **kwargs):
potential_affordances = (super().potential_preroll_interactions)(context, get_interaction_parameters=get_interaction_parameters, **kwargs)
active_roles = self.active_roles()
if active_roles is None:
return potential_affordances
role_affordances = set((role_affordance for active_role in active_roles for role_affordance in active_role.preroll_affordances))
for affordance in role_affordances:
if not affordance.is_affordance_available(context=context):
continue
else:
if not self.supports_affordance(affordance):
continue
if get_interaction_parameters is not None:
interaction_parameters = get_interaction_parameters(affordance, kwargs)
else:
interaction_parameters = kwargs
for aop in (affordance.potential_interactions)(self, context, **interaction_parameters):
potential_affordances.append(aop)
return potential_affordances
def _potential_behavior_affordances_gen(self, context, **kwargs):
def _get_role_state_affordances_gen(active_roles, use_target=False):
if active_roles is not None:
role_affordances = set((role_affordance for active_role in active_roles for role_affordance in (active_role.role_target_affordances if use_target else active_role.role_affordances)))
for affordance in role_affordances:
if self._can_show_affordance(shift_held, affordance):
yield affordance
shift_held = False
if context is not None:
shift_held = context.shift_held
yield from _get_role_state_affordances_gen(self.active_roles())
for affordance in self.sim_info.get_super_affordance_availability_gen():
if self._can_show_affordance(shift_held, affordance):
yield affordance
for affordance in self.sim_info.trait_tracker.get_cached_super_affordances_gen():
if self._can_show_affordance(shift_held, affordance):
yield affordance
for affordance in self.sim_info.commodity_tracker.get_cached_super_affordances_gen():
if self._can_show_affordance(shift_held, affordance):
yield affordance
for affordance in self.inventory_component.get_cached_super_affordances_gen():
if self._can_show_affordance(shift_held, affordance):
yield affordance
if self.sim_info.unlock_tracker is not None:
for affordance in self.sim_info.unlock_tracker.get_cached_super_affordances_gen():
if self._can_show_affordance(shift_held, affordance):
yield affordance
if self.sim_info.career_tracker is not None:
for affordance in self.sim_info.career_tracker.get_cached_super_affordances_gen():
if self._can_show_affordance(shift_held, affordance):
yield affordance
if context is not None:
if context.sim is not None:
yield from _get_role_state_affordances_gen((context.sim.active_roles()), use_target=True)
yield from (super()._potential_behavior_affordances_gen)(context, **kwargs)
def _get_interactions_gen(self, context, get_interaction_parameters, affordance, **kwargs):
if context.source == InteractionSource.AUTONOMY:
if not affordance.allow_autonomous:
return
else:
if context.sim is not None:
if context.sim.is_running_interaction(affordance, self):
return
if self.are_running_equivalent_interactions(context.sim, affordance):
return
if get_interaction_parameters is not None:
interaction_parameters = get_interaction_parameters(affordance, kwargs)
else:
interaction_parameters = kwargs
yield from (affordance.potential_interactions)(self, context, **interaction_parameters)
if False:
yield None
def potential_interactions(self, context, get_interaction_parameters=None, ignored_objects=None, **kwargs):
return (self._potential_interactions_internal)(context, get_interaction_parameters=get_interaction_parameters, **kwargs)
@caches.cached_generator
def _potential_interactions_internal(self, context, get_interaction_parameters=None, **kwargs):
for affordance in self.super_affordances(context):
yield from (self._get_interactions_gen)(context, get_interaction_parameters, affordance, **kwargs)
yield from (self._provided_interactions_gen)(context, **kwargs)
if context.sim is not None:
for relbit in context.sim.sim_info.relationship_tracker.get_all_bits(self.sim_id):
for affordance in relbit.super_affordances:
yield from (self._get_interactions_gen)(context, get_interaction_parameters, affordance, **kwargs)
if context.sim is not None:
yield from (context.sim.get_object_provided_target_affordances_gen)(self, context, **kwargs)
else:
yield from (self.sim_info.template_affordance_tracker.get_template_interactions_gen)(context, **kwargs)
if context.source == InteractionSource.AUTONOMY:
if context.sim is self:
for ensemble in services.ensemble_service().get_all_ensembles_for_sim(self):
yield from (ensemble.get_ensemble_autonomous_interactions_gen)(context, **kwargs)
if context.sim is not self:
yield from (self._potential_joinable_interactions_gen)(context, **kwargs)
else:
for si in self.si_state.sis_actor_gen():
for affordance in si.all_affordances_gen():
for aop in (affordance.potential_interactions)((si.target), (si.affordance), si, **kwargs):
if aop.affordance.is_allowed_to_forward(self):
yield aop
if context.sim is self:
yield from (self.get_component_potential_interactions_gen)(context, get_interaction_parameters, **kwargs)
def potential_phone_interactions(self, context, **kwargs):
for affordance in self._phone_affordances:
for aop in (affordance.potential_interactions)(self, context, **kwargs):
yield aop
club_service = services.get_club_service()
if club_service is not None:
yield from club_service.provided_clubs_and_interactions_for_phone_gen(context)
def potential_relation_panel_interactions(self, context, **kwargs):
for affordance in self._relation_panel_affordances:
for aop in (affordance.potential_interactions)(self, context, **kwargs):
yield aop
def locked_from_obj_by_privacy(self, obj):
for privacy in services.privacy_service().privacy_instances:
if not privacy.privacy_violators & PrivacyViolators.SIM:
continue
else:
if self in privacy.allowed_sims:
continue
if self not in privacy.disallowed_sims and privacy.evaluate_sim(self):
continue
if privacy.intersects_with_object(obj):
return True
return False
@flexmethod
def super_affordances(cls, inst, context=None):
inst_or_cls = inst if inst is not None else cls
for affordance in super(GameObject, inst_or_cls).super_affordances(context):
yield affordance
@staticmethod
def _get_mixer_key(target, affordance, sim_specific):
mixer_lockout_key = affordance.get_mixer_key_override(target)
if mixer_lockout_key is not None:
return mixer_lockout_key
if sim_specific:
if target is not None:
if target.is_sim:
return (
affordance, target.id)
return affordance
def set_sub_action_lockout(self, mixer_interaction, target=None, lock_other_affordance=False, initial_lockout=False):
now = services.time_service().sim_now
if initial_lockout:
lockout_time = mixer_interaction.lock_out_time_initial.random_float()
sim_specific = False
else:
lockout_time = mixer_interaction.lock_out_time.interval.random_float()
sim_specific = mixer_interaction.lock_out_time.target_based_lock_out
lockout_time_span = clock.interval_in_sim_minutes(lockout_time)
lock_out_time = now + lockout_time_span
mixer_lockout_key = self._get_mixer_key(mixer_interaction.target, mixer_interaction.affordance, sim_specific)
self._mixers_locked_out[mixer_lockout_key] = lock_out_time
if not initial_lockout:
if lock_other_affordance:
if mixer_interaction.lock_out_affordances is not None:
for affordance in mixer_interaction.lock_out_affordances:
sim_specific = affordance.lock_out_time.target_based_lock_out if affordance.lock_out_time is not None else False
mixer_lockout_key = self._get_mixer_key(mixer_interaction.target, affordance, sim_specific)
self._mixers_locked_out[mixer_lockout_key] = lock_out_time
def update_last_used_interaction(self, interaction):
if not interaction.is_super:
if interaction.lock_out_time is not None:
self.set_sub_action_lockout(interaction, lock_other_affordance=True)
front_page_cooldown = interaction.content_score.front_page_cooldown if interaction.content_score is not None else None
if front_page_cooldown is not None:
cooldown_time = front_page_cooldown.interval.random_float()
now = services.time_service().sim_now
cooldown_time_span = clock.interval_in_sim_minutes(cooldown_time)
cooldown_finish_time = now + cooldown_time_span
affordance = interaction.affordance
cur_penalty = self.get_front_page_penalty(affordance)
penalty = front_page_cooldown.penalty + cur_penalty
self._front_page_cooldown[affordance] = (
cooldown_finish_time, penalty)
def get_front_page_penalty(self, affordance):
if affordance in self._front_page_cooldown:
cooldown_finish_time, penalty = self._front_page_cooldown[affordance]
now = services.time_service().sim_now
if now >= cooldown_finish_time:
del self._front_page_cooldown[affordance]
else:
return penalty
return 0
def is_sub_action_locked_out(self, affordance, target=None):
if affordance is None:
return False
targeted_lockout_key = self._get_mixer_key(target, affordance, True)
global_lockout_key = self._get_mixer_key(target, affordance, False)
targeted_unlock_time = self._mixers_locked_out.get(targeted_lockout_key, None)
global_unlock_time = self._mixers_locked_out.get(global_lockout_key, None)
if targeted_unlock_time is None:
if global_unlock_time is None:
return False
now = services.time_service().sim_now
locked_out = False
if targeted_unlock_time is not None:
if now >= targeted_unlock_time:
if targeted_lockout_key in self._mixers_locked_out:
del self._mixers_locked_out[targeted_lockout_key]
else:
locked_out = True
if global_unlock_time is not None:
if now >= global_unlock_time:
if global_lockout_key in self._mixers_locked_out:
del self._mixers_locked_out[global_lockout_key]
else:
locked_out = True
return locked_out
def create_default_si(self, target_override=None):
context = InteractionContext(self, InteractionContext.SOURCE_SCRIPT, priority.Priority.Low)
if build_buy.is_location_pool(self.position, self.location.level) or self.routing_surface.type == SurfaceType.SURFACETYPE_POOL:
aop = posture_graph.PostureGraphService.get_swim_aop(self.species)
else:
if self.posture.mobile:
if self.posture.posture_type is not posture_graph.SIM_DEFAULT_POSTURE_TYPE:
posture_type = self.posture.posture_type
posture_graph_service = services.posture_graph_service()
for affordance in posture_graph_service.mobile_posture_providing_affordances:
if affordance.provided_posture_type is posture_type:
aop = AffordanceObjectPair(affordance, target_override, affordance, None, force_inertial=True)
break
else:
aop = posture_graph.PostureGraphService.get_default_aop(self.species)
else:
result = aop.interaction_factory(context)
result or logger.error('Error creating default si: {}', result.reason)
return result.interaction
def pre_add(self, manager, *args, **kwargs):
(super().pre_add)(manager, *args, **kwargs)
self.queue = self._interaction_queue(self)
self._obj_manager = manager
self.hide(HiddenReasonFlag.NOT_INITIALIZED)
@property
def persistence_group(self):
return self._persistence_group
@persistence_group.setter
def persistence_group(self, value):
logger.callstack('Trying to override the persistence group of sim: {}.', self, owner='msantander')
def on_add(self):
super().on_add()
zone_id = services.current_zone_id()
_zone.add_sim(self.sim_id, zone_id)
self.routing_component.on_sim_added()
with consume_exceptions('SimInfo', 'Error during motive creation'):
self._create_motives()
with consume_exceptions('SimInfo', 'Error during buff addition'):
if self.sim_info.should_add_foreign_zone_buff(zone_id):
self.add_buff(self.FOREIGN_ZONE_BUFF)
self.Buffs.add_venue_buffs()
with consume_exceptions('SimInfo', 'Error during inventory load'):
self.inventory_component.load_items(self.sim_info.inventory_data)
with consume_exceptions('SimInfo', 'Error during template affordance tracker init'):
self.sim_info.template_affordance_tracker.on_sim_added()
with consume_exceptions('SimInfo', 'Error updating trait effects'):
self.sim_info.trait_tracker.update_trait_effects()
with consume_exceptions('SimInfo', 'Error during spawn condition trigger'):
self.manager.trigger_sim_spawn_condition(self.sim_id)
services.get_master_controller().add_sim(self)
def _portal_added_callback(self, portal):
portal.lock_sim(self)
def _should_be_swimming(self):
return self.should_be_swimming_at_position(self.position, self.location.level)
def should_be_swimming_at_position(self, position, level=0, check_can_swim=True):
if build_buy.is_location_pool(position, level):
return True
ocean_data = OceanTuning.get_actor_ocean_data(self)
if check_can_swim:
if ocean_data is None or ocean_data.beach_portal_data is None:
return False
depth = get_water_depth(position.x, position.z, level)
if ocean_data is None or ocean_data.wading_interval is None:
return 0 < depth
return ocean_data.wading_interval.upper_bound <= depth
def _update_face_and_posture_gen(self, timeline):
target_override = None
posture_type = None
previous_posture_type = None
previous_posture_target = None
if self.posture_state is not None:
previous_posture_type = self.posture_state.body.posture_type
previous_posture_target = self.posture_state.body.target
try:
if self._should_be_swimming():
posture_type = posture_graph.SIM_SWIM_POSTURE_TYPE
location = self.location
routing_surface = self.routing_surface
routing_surface = SurfaceIdentifier(routing_surface.primary_id, routing_surface.secondary_id, SurfaceType.SURFACETYPE_POOL)
snapped_y = services.terrain_service.terrain_object().get_routing_surface_height_at(location.transform.translation.x, location.transform.translation.z, routing_surface)
if not (routing_surface.type != location.routing_surface.type or sims4.math.almost_equal(location.transform.translation.y, snapped_y)):
translation = sims4.math.Vector3(location.transform.translation.x, snapped_y, location.transform.translation.z)
self.location = self.location.clone(translation=translation, routing_surface=routing_surface)
else:
posture_graph_service = services.current_zone().posture_graph_service
compatible_postures_and_targets = posture_graph_service.get_compatible_mobile_postures_and_targets(self)
if compatible_postures_and_targets:
for target, compatible_postures in compatible_postures_and_targets.items():
if self.posture_state is None or self.posture_state.body.posture_type in compatible_postures:
posture_type = self.posture_state.body.posture_type if self.posture_state is not None else compatible_postures[0]
target_override = target
break
else:
target_override, posture_types = next(iter(compatible_postures_and_targets.items()))
posture_type = next(iter(posture_types), None)
if posture_type is None:
posture_type = posture_graph.SIM_DEFAULT_POSTURE_TYPE
origin_posture_spec = get_origin_spec(posture_type)
self.posture_state = PostureState(self, None, origin_posture_spec, {PostureSpecVariable.HAND: (Hand.LEFT,)})
yield from self.posture_state.kickstart_gen(timeline, (self.routing_surface), target_override=target_override)
except Exception:
posture_type = posture_graph.SIM_DEFAULT_POSTURE_TYPE
origin_posture_spec = get_origin_spec(posture_type)
self.posture_state = PostureState(self, None, origin_posture_spec, {PostureSpecVariable.HAND: (Hand.LEFT,)})
yield from self.posture_state.kickstart_gen(timeline, self.routing_surface)
self._start_animation_interaction()
self.start_animation_overlays()
self.update_animation_overlays()
if previous_posture_type is not None:
if previous_posture_type.is_vehicle:
if previous_posture_target is not None:
if previous_posture_target.vehicle_component is not None:
if previous_posture_target.inventoryitem_component is not None:
if previous_posture_target.vehicle_component.retrieve_tuning is not None:
household_owner_id = previous_posture_target.household_owner_id
if household_owner_id == self.household_id:
previous_posture_target = previous_posture_target.part_owner if previous_posture_target.is_part else previous_posture_target
self.inventory_component.player_try_add_object(previous_posture_target)
if False:
yield None
def _update_multi_motive_buff_trackers(self):
for multi_motive_buff_tracker in self._multi_motive_buff_trackers:
multi_motive_buff_tracker.setup_callbacks()
def _remove_multi_motive_buff_trackers(self):
for multi_motive_buff_tracker in self._multi_motive_buff_trackers:
multi_motive_buff_tracker.cleanup_callbacks()
self._multi_motive_buff_trackers.clear()
def add_callbacks(self):
with consume_exceptions('SimInfo', 'Error during routing initialization'):
self.routing_component.add_callbacks()
self.routing_component.on_intended_location_changed.append(self.refresh_los_constraint)
self.routing_component.on_intended_location_changed.append(self._update_social_geometry_on_location_changed)
self.routing_component.on_intended_location_changed.append(lambda *_, **__: self.two_person_social_transforms.clear())
self.routing_component.on_intended_location_changed.append(self.update_intended_position_on_active_lot)
with consume_exceptions('SimInfo', 'Error during navmesh initialization'):
zone = services.get_zone(self.zone_id)
if zone is not None:
zone.navmesh_change_callbacks.append(self._on_navmesh_updated)
zone.wall_contour_update_callbacks.append(self._on_navmesh_updated)
zone.foundation_and_level_height_update_callbacks.append(self.validate_current_location_or_fgl)
with consume_exceptions('SimInfo', 'Error during outfit initialization'):
self.sim_info.on_outfit_changed.append(self.on_outfit_changed)
def remove_callbacks(self):
zone = services.current_zone()
if self._on_navmesh_updated in zone.navmesh_change_callbacks:
zone.navmesh_change_callbacks.remove(self._on_navmesh_updated)
if self._on_navmesh_updated in zone.wall_contour_update_callbacks:
zone.wall_contour_update_callbacks.remove(self._on_navmesh_updated)
if self.validate_current_location_or_fgl in zone.foundation_and_level_height_update_callbacks:
zone.foundation_and_level_height_update_callbacks.remove(self.validate_current_location_or_fgl)
if self.on_outfit_changed in self.sim_info.on_outfit_changed:
self.sim_info.on_outfit_changed.remove(self.on_outfit_changed)
self.manager.unregister_portal_added_callback(self._portal_added_callback)
self.routing_component.remove_callbacks()
def _startup_sim_gen(self, timeline):
if self._starting_up:
logger.info('Attempting to run _startup_sim while it is already running on another thread. Request ignored.')
return
previous_simulation_state = self._simulation_state
self._starting_up = True
try:
yield from self._update_face_and_posture_gen(timeline)
self.queue.unlock()
self.show(HiddenReasonFlag.NOT_INITIALIZED)
if self._simulation_state == SimulationState.INITIALIZING:
school_data = self.sim_info.get_school_data()
if school_data is not None:
create_homework = self.sim_info.time_sim_was_saved is None
school_data.update_school_data((self.sim_info), create_homework=create_homework)
self.trait_tracker.on_sim_startup()
for commodity in tuple(self.commodity_tracker):
if commodity.needs_fixup_on_load():
commodity.fixup_on_sim_instantiated()
owning_household_of_active_lot = services.owning_household_of_active_lot()
if owning_household_of_active_lot is not None:
for target_sim_info in owning_household_of_active_lot:
self.relationship_tracker.add_relationship_appropriateness_buffs(target_sim_info.id)
services.relationship_service().on_sim_creation(self)
self.autonomy_component.start_autonomy_alarm()
situation_manager = services.get_zone_situation_manager()
situation_manager.on_begin_sim_creation_notification(self)
services.sim_spawner_service().on_sim_creation(self)
situation_manager.on_end_sim_creation_notification(self)
self.commodity_tracker.start_regular_simulation()
for buff, multi_motive_buff_motives in self.MULTI_MOTIVE_BUFF_MOTIVES.items():
self._multi_motive_buff_trackers.append(sims.multi_motive_buff_tracker.MultiMotiveBuffTracker(self, multi_motive_buff_motives, buff))
self.sim_info.Buffs.on_sim_ready_to_simulate()
self.sim_info.career_tracker.on_sim_startup()
self.sim_info.occult_tracker.on_sim_ready_to_simulate(self)
self.sim_info.trait_tracker.on_sim_ready_to_simulate()
if self.sim_info.whim_tracker is not None:
self.sim_info.whim_tracker.load_whims_info_from_proto()
self.sim_info.whim_tracker.start_whims_tracker()
self.update_sleep_schedule()
if self.sim_info.time_sim_was_saved is None:
if self.sim_info.degree_tracker is not None:
if self.sim_info.degree_tracker.get_enrolled_major() is not None:
self.sim_info.degree_tracker.create_university_objects()
sims.ghost.Ghost.make_ghost_if_needed(self.sim_info)
if self.sim_info.is_in_travel_group():
travel_group = self.travel_group
current_region = services.current_region()
travel_group_region = region.get_region_instance_from_zone_id(travel_group.zone_id)
if not current_region.is_region_compatible(travel_group_region):
if any((sim_info.can_live_alone for sim_info in travel_group if sim_info is not self.sim_info)):
travel_group.remove_sim_info(self.sim_info)
else:
travel_group.end_vacation()
if services.current_zone().is_zone_running:
if self.sim_info.away_action_tracker is not None:
self.sim_info.away_action_tracker.refresh()
services.sim_info_manager().update_greeted_relationships_on_spawn(self.sim_info)
if self.is_selectable:
self.sim_info.start_aspiration_tracker_on_instantiation(force_ui_update=True)
if self.is_selected:
self.client.notify_active_sim_changed(None, new_sim_info=(self.sim_info))
self.update_portal_locks()
else:
if self._simulation_state == SimulationState.RESETTING:
self.remove_callbacks()
self.Buffs.on_sim_reset()
self.on_outfit_changed(self.sim_info, self._sim_info.get_current_outfit())
self.refresh_los_constraint()
self._simulation_state = SimulationState.SIMULATING
self.add_callbacks()
self.on_start_up(self)
self._start_environment_score()
self.update_intended_position_on_active_lot(update_ui=True)
suntan_tracker = self.sim_info.suntan_tracker
if suntan_tracker is not None:
suntan_tracker.on_start_up(self)
familiar_tracker = self.sim_info.familiar_tracker
if familiar_tracker is not None:
familiar_tracker.on_sim_startup()
street_service = services.street_service()
if street_service is not None:
street_service.on_sim_added(self.sim_info)
if gsi_handlers.sim_info_lifetime_handlers.archiver.enabled:
gsi_handlers.sim_info_lifetime_handlers.archive_sim_info_event(self.sim_info, 'instantiated')
finally:
self._starting_up = False
if previous_simulation_state == SimulationState.RESETTING:
services.current_zone().service_manager.on_sim_reset(self)
if False:
yield None
def on_remove(self):
self.sim_info.Buffs.on_sim_removed()
self.routing_component.on_sim_removed()
self._stop_environment_score()
self.trait_tracker.on_sim_removed()
familiar_tracker = self.sim_info.familiar_tracker
if familiar_tracker is not None:
familiar_tracker.on_sim_removed()
street_service = services.street_service()
if street_service is not None:
street_service.on_sim_removed(self.sim_info)
self.commodity_tracker.stop_regular_simulation()
self.commodity_tracker.start_low_level_simulation()
self.sim_info.template_affordance_tracker.on_sim_removed()
self.sim_info.time_sim_was_saved = services.time_service().sim_now
self.asm_auto_exit.clear()
zone = services.current_zone()
zone.posture_graph_service.update_sim_node_caches(self)
if zone.master_controller is not None:
zone.master_controller.remove_sim(self)
self.on_posture_event.clear()
_zone.remove_sim(self.sim_id, zone.id)
self._is_removed = True
super().on_remove()
self._posture_state = None
self.on_start_up.clear()
self.remove_callbacks()
zone.sim_quadtree.remove(self.sim_id, placement.ItemType.SIM_POSITION, 0)
zone.sim_quadtree.remove(self.sim_id, placement.ItemType.SIM_INTENDED_POSITION, 0)
if self.refresh_los_constraint in zone.wall_contour_update_callbacks:
zone.wall_contour_update_callbacks.remove(self.refresh_los_constraint)
self._remove_multi_motive_buff_trackers()
self.object_ids_to_ignore.clear()
self._si_state = None
self._mixers_locked_out.clear()
self._front_page_cooldown.clear()
if gsi_handlers.sim_info_lifetime_handlers.archiver.enabled:
gsi_handlers.sim_info_lifetime_handlers.archive_sim_info_event(self.sim_info, 'deinstantiated')
def post_remove(self):
super().post_remove()
self._clear_clothing_buffs()
self.queue = None
@property
def is_outside(self):
if self._is_running_interaction_counts_as_inside():
return False
return super().is_outside
@property
def is_inside_building(self):
if self._is_running_interaction_counts_as_inside():
return True
return super().is_inside_building
@property
def is_in_shade(self):
if self._is_running_interaction_counts_as_in_shade():
return True
return False
def _is_running_interaction_counts_as_inside(self):
return any((affordance.counts_as_inside for affordance, _ in self._all_affordance_targets()))
def _is_running_interaction_counts_as_in_shade(self):
return any((affordance.counts_as_in_shade for affordance, _ in self._all_affordance_targets()))
@property
def intended_location(self):
sim_parent = self.parent
if sim_parent is not None:
if sim_parent.is_sim:
return sim_parent.intended_location
if self.transition_controller is not None:
return self.transition_controller.intended_location(self)
return self.location
@property
def intended_transform(self):
return self.intended_location.world_transform
@property
def intended_routing_surface(self):
return self.intended_location.routing_surface or self.parent.intended_routing_surface
@property
def intended_position_on_active_lot(self):
return self._intended_position_on_active_lot
def get_intended_location_excluding_transition(self, exclude_transition):
if self.transition_controller is None or self.transition_controller is exclude_transition:
return self.location
return self.intended_location
def _should_invalidate_location(self):
return False
@property
def preroll_commodity_flags(self):
if self._dynamic_preroll_commodity_flags_map is None:
dynamic_preroll_commodity_flags = frozenset()
else:
dynamic_preroll_commodity_flags = frozenset((flag for commodity_set in self._dynamic_preroll_commodity_flags_map.values() for flag in commodity_set))
return super().preroll_commodity_flags | dynamic_preroll_commodity_flags
def add_dynamic_preroll_commodity_flags(self, key, commodity_flags):
if self._dynamic_preroll_commodity_flags_map is None:
self._dynamic_preroll_commodity_flags_map = {}
self._dynamic_preroll_commodity_flags_map[key] = commodity_flags
def remove_dynamic_preroll_commodity_flags(self, key):
if self._dynamic_preroll_commodity_flags_map is not None:
if key in self._dynamic_preroll_commodity_flags_map:
del self._dynamic_preroll_commodity_flags_map[key]
if not self._dynamic_preroll_commodity_flags_map:
self._dynamic_preroll_commodity_flags_map = None
def commodities_gen(self):
for stat in self.commodity_tracker:
yield stat
def static_commodities_gen(self):
for stat in self.static_commodity_tracker:
yield stat
def statistics_gen(self):
for stat in self.statistic_tracker:
yield stat
def object_tags_override_off_lot_autonomy_ref_count(self, object_tag_list):
return self.sim_info.object_tags_override_off_lot_autonomy_ref_count(object_tag_list)
def all_skills(self):
return self.sim_info.all_skills()
def scored_stats_gen(self):
for stat in self.statistic_tracker.all_statistics():
if stat.is_scored and self.is_scorable(stat.stat_type):
yield stat
for commodity in self.commodity_tracker.all_statistics():
if commodity.is_scored and self.is_scorable(commodity.stat_type):
yield commodity
for static_commodity in self.static_commodity_tracker.all_statistics():
if static_commodity.is_scored and self.is_scorable(static_commodity.stat_type):
yield static_commodity
def force_update_routing_location(self):
for primitive in self.primitives:
if hasattr(primitive, 'update_routing_location'):
primitive.update_routing_location()
def populate_localization_token(self, *args, **kwargs):
(self.sim_info.populate_localization_token)(*args, **kwargs)
def create_posture_interaction_context(self):
return InteractionContext(self, InteractionContext.SOURCE_POSTURE_GRAPH, Priority.High)
@property
def posture(self):
if self.posture_state is not None:
return self.posture_state.body
@property
def posture_target(self):
if self.posture is not None:
return self.posture.target
@distributor.fields.Field(op=(distributor.ops.SetActorPosture), default=None)
def posture_state(self):
return self._posture_state
@posture_state.setter
def posture_state(self, value):
if self._posture_state is not None:
if value is not None:
if self._posture_state.carry_targets != value.carry_targets:
for interaction in self.queue:
if interaction.transition is not None and not interaction.transition.running:
if interaction.carry_track is not None or interaction.should_carry_create_target():
interaction.transition.reset_sim_progress(self)
if self._posture_state.body != value.body:
if self.animation_interaction is not None:
self.animation_interaction.clear_animation_liability_cache()
value.body.fallback_occult_on_posture_reset = self._posture_state.body.get_occult_for_posture_reset()
else:
self._posture_state = value
key_mask = PortalFlags.REQUIRE_NO_CARRY
carry_target_found = False
for carry_target in value.carry_targets:
if carry_target is not None:
carry_target_found = True
key_mask = key_mask & ~carry_target.get_portal_key_make_for_carry()
if carry_target_found:
self.routing_component.clear_portal_mask_flag(key_mask)
else:
self.routing_component.set_portal_mask_flag(key_mask)
self._posture_target_refs.clear()
for aspect in self._posture_state.aspects:
if aspect.target is not None:
self._posture_target_refs.append(aspect.target.ref(lambda _: self.reset(ResetReason.RESET_EXPECTED, self, 'Posture target went away.')))
if self.posture_state is not None:
connectivity_handles = self.posture_state.connectivity_handles
if connectivity_handles is not None:
self.pathplan_context.connectivity_handles = connectivity_handles
def is_surface(self, *args, **kwargs):
return False
@caches.cached
def ignore_group_socials(self, excluded_group=None):
if self.si_state is not None:
for si in self.si_state:
if excluded_group is not None:
if si.social_group is excluded_group:
continue
if si.ignore_group_socials:
return True
else:
if self.queue is None:
return True
next_interaction = self.queue.peek_head()
if next_interaction is not None:
if next_interaction.is_super and next_interaction.ignore_group_socials:
if excluded_group is None or next_interaction.social_group is not excluded_group:
return True
return False
@property
def disallow_as_mixer_target(self):
return any((si.disallow_as_mixer_target for si in self.si_state))
def get_groups_for_sim_gen(self):
for group in self._social_groups:
if self in group:
yield group
def get_main_group(self):
for group in self.get_groups_for_sim_gen():
if not group.is_side_group:
return group
def get_visible_group(self):
visible_group = None
for group in self.get_groups_for_sim_gen():
if group.is_visible:
if not group.is_side_group:
return group
visible_group = group
return visible_group
def get_ensemble_sims(self):
ensemble_sims = set()
for ensemble in services.ensemble_service().get_all_ensembles_for_sim(self):
ensemble_sims.update(ensemble)
return ensemble_sims
def get_sims_for_rally(self, rally_sources):
rally_sims = set()
if RallySource.SOCIAL_GROUP in rally_sources:
main_group = self.get_visible_group()
if main_group:
rally_sims.update(main_group)
if RallySource.ENSEMBLE in rally_sources:
ensemble_sims = services.ensemble_service().get_ensemble_sims_for_rally(self)
if ensemble_sims:
rally_sims.update(ensemble_sims)
return rally_sims
def is_in_side_group(self):
return any((self in g and g.is_side_group for g in self.get_groups_for_sim_gen()))
def is_in_group_with(self, target_sim):
return any((target_sim in group for group in self.get_groups_for_sim_gen()))
@caches.cached
def get_social_context(self):
sims = set((itertools.chain)(*self.get_groups_for_sim_gen()))
social_context_bit = (SocialContextTest.get_overall_short_term_context_bit)(*sims)
if social_context_bit is not None:
size_limit = social_context_bit.size_limit
if size_limit is not None:
if len(sims) > size_limit.size:
social_context_bit = size_limit.transformation
return social_context_bit
def on_social_context_changed(self):
SocialContextTest.get_overall_short_term_context_bit.cache.clear()
self.get_social_context.cache.clear()
for group in self.get_groups_for_sim_gen():
group.on_social_context_changed()
def without_social_focus(self, sequence):
new_sequence = sequence
for group in self.get_groups_for_sim_gen():
new_sequence = group.without_social_focus(self, self, new_sequence)
return new_sequence
def set_mood_asm_parameter(self, asm, actor_name):
mood_asm_name = self.get_mood_animation_param_name()
if mood_asm_name is not None:
asm.set_actor_parameter(actor_name, self, 'mood', mood_asm_name.lower())
def set_trait_asm_parameters(self, asm, actor_name):
asm_param_dict = self.sim_info.trait_tracker.get_default_trait_asm_params(actor_name)
for param_name, param_value in self._default_anim_params.items():
asm_param_dict[(param_name, actor_name)] = param_value
for trait in self.sim_info.get_traits():
if trait.trait_asm_overrides.trait_asm_param is None:
continue
if trait.trait_asm_overrides.param_type is None:
asm_param_dict[(trait.trait_asm_overrides.trait_asm_param, actor_name)] = True
else:
asm_param_dict[(trait.trait_asm_overrides.param_type, actor_name)] = trait.trait_asm_overrides.trait_asm_param
asm.update_locked_params(asm_param_dict, ignore_virtual_suffix=True)
self._locked_param_cache.update(asm_param_dict)
def _get_animation_skill_param(self):
param_dict = {}
for skill in self.all_skills():
if not skill.stat_asm_param.always_apply:
continue
asm_param_name, asm_param_value = skill.get_asm_param()
if asm_param_name is not None and asm_param_value is not None:
param_dict[asm_param_name] = asm_param_value
return param_dict
def get_sim_locked_params(self):
return self._locked_param_cache
def evaluate_si_state_and_cancel_incompatible(self, finishing_type, cancel_reason_msg):
sim_posture_constraint = self.posture_state.posture_constraint_strict
parent = self.parent
if parent is None or parent.routing_component is None:
sim_transform_constraint = interactions.constraints.Transform((self.transform), routing_surface=(self.routing_surface))
sim_constraint = sim_transform_constraint.intersect(sim_posture_constraint)
else:
sim_constraint = sim_posture_constraint
_, included_sis = self.si_state.get_combined_constraint(sim_constraint, None, None, None, True, True)
for si in self.si_state:
if si not in included_sis and si.basic_content is not None and si.basic_content.staging:
si.cancel(finishing_type, cancel_reason_msg)
def refresh_los_constraint(self, *args, target_position=DEFAULT, **kwargs):
if target_position is DEFAULT:
target_position = self.intended_position
target_forward = self.intended_forward
target_routing_surface = self.intended_routing_surface
else:
target_forward = self.forward
target_routing_surface = self.routing_surface
if target_routing_surface == self.lineofsight_component.routing_surface:
if sims4.math.vector3_almost_equal_2d(target_position, self.lineofsight_component.position):
return
target_position = target_position + target_forward * self.lineofsight_component.facing_offset
self.lineofsight_component.generate(position=target_position, routing_surface=target_routing_surface, lock=True, build_convex=True)
self._los_constraint = self.lineofsight_component.constraint
zone = services.current_zone()
if self.refresh_los_constraint not in zone.wall_contour_update_callbacks:
zone.wall_contour_update_callbacks.append(self.refresh_los_constraint)
self._social_group_constraint = None
master_transform = sims4.math.Transform(target_position, sims4.math.angle_to_yaw_quaternion(sims4.math.vector3_angle(target_forward)))
for slave_data in self.get_all_routing_slave_data_gen():
if not slave_data.slave.is_sim:
continue
offset = sims4.math.Vector3.ZERO()
for attachment_info in slave_data.attachment_info_gen():
offset.x = offset.x + attachment_info.parent_offset.x - attachment_info.offset.x
offset.z = offset.z + attachment_info.parent_offset.y - attachment_info.offset.y
transformed_point = master_transform.transform_point(offset)
slave_data.slave.refresh_los_constraint(target_position=transformed_point)
@property
def los_constraint(self):
return self._los_constraint
def can_see(self, obj):
if obj.intended_position is not None:
obj_position = obj.intended_position
else:
obj_position = obj.position
return self.los_constraint.geometry.contains_point(obj_position)
def get_social_group_constraint--- This code section failed: ---
L.3618 0 LOAD_FAST 'self'
2 LOAD_ATTR _social_group_constraint
4 LOAD_CONST None
6 COMPARE_OP is
8_10 POP_JUMP_IF_FALSE 350 'to 350'
L.3619 12 LOAD_FAST 'self'
14 LOAD_ATTR si_state
16 LOAD_ATTR get_total_constraint
18 LOAD_FAST 'si'
20 LOAD_CONST None
22 COMPARE_OP is-not
24 POP_JUMP_IF_FALSE 32 'to 32'
26 LOAD_FAST 'si'
28 LOAD_ATTR priority
30 JUMP_FORWARD 34 'to 34'
32_0 COME_FROM 24 '24'
32 LOAD_CONST None
34_0 COME_FROM 30 '30'
L.3620 34 LOAD_CONST True
L.3621 36 LOAD_FAST 'si'
38 LOAD_CONST ('priority', 'include_inertial_sis', 'to_exclude')
40 CALL_FUNCTION_KW_3 3 '3 total positional and keyword args'
42 STORE_FAST 'si_constraint'
L.3623 44 SETUP_LOOP 112 'to 112'
46 LOAD_FAST 'si_constraint'
48 GET_ITER
50_0 COME_FROM 62 '62'
50 FOR_ITER 68 'to 68'
52 STORE_FAST 'base_constraint'
L.3624 54 LOAD_FAST 'base_constraint'
56 LOAD_ATTR geometry
58 LOAD_CONST None
60 COMPARE_OP is-not
62 POP_JUMP_IF_FALSE 50 'to 50'
L.3625 64 BREAK_LOOP
66 JUMP_BACK 50 'to 50'
68 POP_BLOCK
L.3629 70 LOAD_FAST 'self'
72 LOAD_ATTR queue
74 LOAD_ATTR running
76 LOAD_CONST None
78 COMPARE_OP is-not
80 POP_JUMP_IF_FALSE 112 'to 112'
82 LOAD_FAST 'self'
84 LOAD_ATTR queue
86 LOAD_ATTR running
88 LOAD_ATTR is_super
90 POP_JUMP_IF_FALSE 112 'to 112'
L.3630 92 LOAD_GLOBAL interactions
94 LOAD_ATTR constraints
96 LOAD_ATTR Transform
98 LOAD_FAST 'self'
100 LOAD_ATTR transform
102 LOAD_FAST 'self'
104 LOAD_ATTR routing_surface
106 LOAD_CONST ('routing_surface',)
108 CALL_FUNCTION_KW_2 2 '2 total positional and keyword args'
110 STORE_FAST 'base_constraint'
112_0 COME_FROM 90 '90'
112_1 COME_FROM 80 '80'
112_2 COME_FROM_LOOP 44 '44'
L.3631 112 LOAD_FAST 'base_constraint'
114 LOAD_ATTR geometry
116 LOAD_CONST None
118 COMPARE_OP is-not
120_122 POP_JUMP_IF_FALSE 338 'to 338'
124 LOAD_FAST 'base_constraint'
126 LOAD_ATTR geometry
128 LOAD_ATTR polygon
130_132 POP_JUMP_IF_FALSE 338 'to 338'
L.3632 134 LOAD_FAST 'self'
136 LOAD_ATTR los_constraint
138 STORE_FAST 'los_constraint'
L.3633 140 LOAD_FAST 'base_constraint'
142 LOAD_ATTR geometry
144 STORE_FAST 'base_geometry'
L.3634 146 BUILD_LIST_0 0
148 STORE_FAST 'expanded_polygons'
L.3635 150 SETUP_LOOP 276 'to 276'
152 LOAD_FAST 'base_geometry'
154 LOAD_ATTR polygon
156 GET_ITER
158_0 COME_FROM 210 '210'
158 FOR_ITER 274 'to 274'
160 STORE_FAST 'sub_polygon'
L.3636 162 LOAD_GLOBAL len
164 LOAD_FAST 'sub_polygon'
166 CALL_FUNCTION_1 1 '1 positional argument'
168 LOAD_CONST 1
170 COMPARE_OP ==
172 POP_JUMP_IF_FALSE 200 'to 200'
L.3638 174 LOAD_GLOBAL sims4
176 LOAD_ATTR geometry
178 LOAD_METHOD generate_circle_constraint
180 LOAD_GLOBAL LOSAndSocialConstraintTuning
182 LOAD_ATTR num_sides_for_circle_expansion_of_point_constraint
L.3639 184 LOAD_FAST 'sub_polygon'
186 LOAD_CONST 0
188 BINARY_SUBSCR
L.3640 190 LOAD_GLOBAL LOSAndSocialConstraintTuning
192 LOAD_ATTR constraint_expansion_amount
194 CALL_METHOD_3 3 '3 positional arguments'
196 STORE_FAST 'new_polygon'
198 JUMP_FORWARD 262 'to 262'
200_0 COME_FROM 172 '172'
L.3641 200 LOAD_GLOBAL len
202 LOAD_FAST 'sub_polygon'
204 CALL_FUNCTION_1 1 '1 positional argument'
206 LOAD_CONST 1
208 COMPARE_OP >
210 POP_JUMP_IF_FALSE 158 'to 158'
L.3642 212 LOAD_GLOBAL sum
214 LOAD_FAST 'sub_polygon'
216 LOAD_GLOBAL sims4
218 LOAD_ATTR math
220 LOAD_ATTR Vector3
222 LOAD_METHOD ZERO
224 CALL_METHOD_0 0 '0 positional arguments'
226 CALL_FUNCTION_2 2 '2 positional arguments'
228 LOAD_GLOBAL len
230 LOAD_FAST 'sub_polygon'
232 CALL_FUNCTION_1 1 '1 positional argument'
234 BINARY_TRUE_DIVIDE
236 STORE_FAST 'center'
L.3643 238 LOAD_GLOBAL sims4
240 LOAD_ATTR geometry
242 LOAD_ATTR inflate_polygon
244 LOAD_FAST 'sub_polygon'
246 LOAD_GLOBAL LOSAndSocialConstraintTuning
248 LOAD_ATTR constraint_expansion_amount
L.3644 250 LOAD_FAST 'center'
252 LOAD_CONST ('centroid',)
254 CALL_FUNCTION_KW_3 3 '3 total positional and keyword args'
256 STORE_FAST 'new_polygon'
258 JUMP_FORWARD 262 'to 262'
L.3646 260 CONTINUE 158 'to 158'
262_0 COME_FROM 258 '258'
262_1 COME_FROM 198 '198'
L.3647 262 LOAD_FAST 'expanded_polygons'
264 LOAD_METHOD append
266 LOAD_FAST 'new_polygon'
268 CALL_METHOD_1 1 '1 positional argument'
270 POP_TOP
272 JUMP_BACK 158 'to 158'
274 POP_BLOCK
276_0 COME_FROM_LOOP 150 '150'
L.3649 276 LOAD_GLOBAL sims4
278 LOAD_ATTR geometry
280 LOAD_METHOD CompoundPolygon
282 LOAD_FAST 'expanded_polygons'
284 CALL_METHOD_1 1 '1 positional argument'
286 STORE_FAST 'new_compound_polygon'
L.3650 288 LOAD_GLOBAL sims4
290 LOAD_ATTR geometry
292 LOAD_METHOD RestrictedPolygon
294 LOAD_FAST 'new_compound_polygon'
296 BUILD_LIST_0 0
298 CALL_METHOD_2 2 '2 positional arguments'
300 STORE_FAST 'new_restricted_polygon'
L.3651 302 LOAD_GLOBAL interactions
304 LOAD_ATTR constraints
306 LOAD_ATTR Constraint
308 LOAD_FAST 'new_restricted_polygon'
L.3652 310 LOAD_FAST 'los_constraint'
312 LOAD_ATTR routing_surface
314 LOAD_CONST ('geometry', 'routing_surface')
316 CALL_FUNCTION_KW_2 2 '2 total positional and keyword args'
318 STORE_FAST 'base_constraint'
L.3653 320 LOAD_FAST 'base_constraint'
322 LOAD_METHOD intersect
324 LOAD_FAST 'los_constraint'
326 CALL_METHOD_1 1 '1 positional argument'
328 STORE_FAST 'intersection'
L.3654 330 LOAD_FAST 'intersection'
332 LOAD_FAST 'self'
334 STORE_ATTR _social_group_constraint
336 JUMP_FORWARD 350 'to 350'
338_0 COME_FROM 130 '130'
338_1 COME_FROM 120 '120'
L.3656 338 LOAD_GLOBAL interactions
340 LOAD_ATTR constraints
342 LOAD_METHOD Anywhere
344 CALL_METHOD_0 0 '0 positional arguments'
346 LOAD_FAST 'self'
348 STORE_ATTR _social_group_constraint
350_0 COME_FROM 336 '336'
350_1 COME_FROM 8 '8'
L.3658 350 LOAD_FAST 'self'
352 LOAD_ATTR _social_group_constraint
354 RETURN_VALUE
-1 RETURN_LAST
Parse error at or near `LOAD_FAST' instruction at offset 112
def get_next_work_priority(self):
if not self.is_simulating:
return Priority.Critical
next_interaction = self.queue.get_head()
if next_interaction is not None:
return next_interaction.priority
return Priority.Low
def get_next_work(self):
if self.is_being_destroyed:
logger.error('sim.get_next_work() called for Sim {} when they were in the process of being destroyed.', self, owner='tastle/sscholl')
return WorkRequest()
if not self.is_simulating:
if self._starting_up:
return WorkRequest()
else:
return WorkRequest(work_element=(elements.GeneratorElement(self._startup_sim_gen)), required_sims=(
self,))
return self.has_work_locks or services.posture_graph_service().has_built_for_zone_spin_up or WorkRequest()
_ = self.queue._get_head()
next_interaction = self.queue.get_head()
if next_interaction is None and services.current_zone().is_zone_running:
if any((not i.is_super for i in self.queue._autonomy)):
for i in tuple(self.queue._autonomy):
i.cancel(FinishingType.INTERACTION_QUEUE, 'Blocked interaction in autonomy bucket, canceling all interactions in the autonomy bucket to fix.')
else:
self.run_subaction_autonomy()
next_interaction = self.queue.get_head()
if next_interaction is not None:
wait_to_be_picked_up_liability = next_interaction.get_liability(WaitToBePickedUpLiability.LIABILITY_TOKEN)
if wait_to_be_picked_up_liability is not None:
return WorkRequest()
next_interaction.refresh_and_lock_required_sims()
required_sims = next_interaction.required_sims(for_threading=True)
element = elements.GeneratorElement(functools.partial((self._process_interaction_gen), interaction=next_interaction))
mutexed_resources = next_interaction.get_mutexed_resources()
required_sims |= mutexed_resources
return WorkRequest(work_element=element, required_sims=required_sims,
additional_resources=(next_interaction.required_resources()),
set_work_timestamp=(next_interaction.set_work_timestamp),
debug_name=(str(next_interaction)))
return WorkRequest()
def get_idle_element(self, duration=10):
if self.is_being_destroyed:
logger.error('sim.get_idle_element() called for Sim {} when they were in the process of being destroyed.', self, owner='tastle/sscholl')
elif not self.is_simulating:
return (None, None)
possible_idle_behaviors = []
for si in self.si_state:
idle_behavior = si.get_idle_behavior()
if idle_behavior is not None:
possible_idle_behaviors.append((si, idle_behavior))
if possible_idle_behaviors:
_, idle_behavior = random.choice(possible_idle_behaviors)
else:
idle_behavior = self.posture.get_idle_behavior()
sleep_behavior = build_element((elements.SoftSleepElement(date_and_time.create_time_span(minutes=duration)),
self.si_state.process_gen))
idle_sequence = build_element([
build_critical_section(idle_behavior, flush_all_animations),
sleep_behavior])
idle_sequence = with_skippable_animation_time((self,), sequence=idle_sequence)
for group in self.get_groups_for_sim_gen():
idle_sequence = group.with_listener_focus(self, self, idle_sequence)
def do_idle_behavior(timeline):
nonlocal idle_sequence
with gsi_handlers.sim_timeline_handlers.archive_sim_timeline_context_manager(self, 'Sim', 'Process Idle Interaction'):
try:
self.queue._apply_next_pressure()
result = yield from element_utils.run_child(timeline, idle_sequence)
return result
finally:
idle_sequence = None
if False:
yield None
def cancel_idle_behavior():
nonlocal idle_sequence
if idle_sequence is not None:
idle_sequence.trigger_soft_stop()
idle_sequence = None
return (elements.GeneratorElement(do_idle_behavior), cancel_idle_behavior)
def _process_interaction_gen(self, timeline, interaction=None):
with gsi_handlers.sim_timeline_handlers.archive_sim_timeline_context_manager(self, 'Sim', 'Process Interaction', interaction):
try:
if self.queue.get_head() is not interaction:
logger.info('Interaction has changed from {} to {} after work was scheduled. Bailing.', interaction, self.queue.get_head())
return
yield from self.queue.process_one_interaction_gen(timeline)
finally:
interaction.unlock_required_sims()
if False:
yield None
def push_super_affordance(self, super_affordance, target, context, **kwargs):
if isinstance(super_affordance, str):
super_affordance = services.get_instance_manager(sims4.resources.Types.INTERACTION).get(super_affordance)
if not super_affordance:
raise ValueError('{0} is not a super affordance'.format(super_affordance))
aop = (interactions.aop.AffordanceObjectPair)(super_affordance, target, super_affordance, None, **kwargs)
res = aop.test_and_execute(context)
return res
def test_super_affordance(self, super_affordance, target, context, **kwargs):
if isinstance(super_affordance, str):
super_affordance = services.get_instance_manager(sims4.resources.Types.INTERACTION).get(super_affordance)
if not super_affordance:
raise ValueError('{0} is not a super affordance'.format(super_affordance))
aop = (interactions.aop.AffordanceObjectPair)(super_affordance, target, super_affordance, None, **kwargs)
res = aop.test(context)
return res
def running_interactions_with_target_gen(self, target):
if self.queue is not None:
if self.queue.running is not None:
if self.queue.running.is_super:
if self.queue.running.target == target:
yield self.queue.running
if self.si_state is not None:
for test_si in self.si_state:
if test_si.target == target:
yield test_si
def find_interaction_by_affordance(self, affordance):
for si in self.queue.queued_super_interactions_gen():
if si.affordance is affordance:
return si
return self.si_state.get_si_by_affordance(affordance)
def find_interaction_by_id(self, id_to_find):
id_to_find = self.ui_manager.get_routing_owner_id(id_to_find)
interaction = None
if self.queue is not None:
interaction = self.queue.find_interaction_by_id(id_to_find)
if interaction is None:
transition_controller = self.queue.transition_controller
if transition_controller is not None:
target_si, _ = transition_controller.interaction.get_target_si()
if target_si is not None:
if target_si.id == id_to_find:
return target_si
if interaction is None:
if self.si_state is not None:
interaction = self.si_state.find_interaction_by_id(id_to_find)
return interaction
def find_continuation_by_id(self, source_id):
interaction = None
if self.queue is not None:
interaction = self.queue.find_continuation_by_id(source_id)
if interaction is None:
if self.si_state is not None:
interaction = self.si_state.find_continuation_by_id(source_id)
return interaction
def find_sub_interaction_by_aop_id(self, super_id, aop_id):
interaction = None
if self.queue is not None:
interaction = self.queue.find_sub_interaction(super_id, aop_id)
return interaction
def set_autonomy_preference(self, preference, obj, context):
if preference.is_scoring:
if preference.should_clear:
if preference.tag in self.sim_info.autonomy_scoring_preferences:
del self.sim_info.autonomy_scoring_preferences[preference.tag]
else:
self.sim_info.autonomy_scoring_preferences[preference.tag] = obj.id
else:
if preference.use_only:
object_preference_tracker = services.object_preference_tracker(require_active_household=True)
if object_preference_tracker is None:
return
target_objects = set()
if context is not None and context.pick is not None:
target_objects = obj.is_part or obj.get_closest_parts_to_position((context.pick.location), restrict_autonomy_preference=True)
else:
target_objects = set()
if not target_objects:
target_objects.add(obj)
elif preference.should_clear:
object_preference_tracker.clear_restriction(target_objects, preference.tag)
else:
object_preference_tracker.set_restriction(self.sim_info, target_objects, preference.tag, preference.should_set.should_force)
else:
if preference.should_clear:
if preference.tag in self.sim_info.autonomy_use_preferences[preference.tag]:
del self.sim_info.autonomy_use_preferences[preference.tag]
else:
self.sim_info.autonomy_use_preferences[preference.tag] = obj.id
def is_object_scoring_preferred(self, preference_tag, obj):
return self._check_preference(preference_tag, obj, self.sim_info.autonomy_scoring_preferences)
def is_object_use_preferred(self, preference_tag, obj):
return self._check_preference(preference_tag, obj, self.sim_info.autonomy_use_preferences)
def get_autonomy_preference_type(self, preference_tag, obj, full_object, allow_test=True):
preference_type = AutonomyPreferenceType.ALLOWED
object_preference_tracker = services.object_preference_tracker()
if object_preference_tracker is not None:
preference_type = object_preference_tracker.get_restriction((self.sim_info), obj,
preference_tag,
full_object=full_object,
allow_test=allow_test)
if preference_type == AutonomyPreferenceType.ALLOWED:
if self._check_preference(preference_tag, obj, self.sim_info.autonomy_use_preferences):
preference_type = AutonomyPreferenceType.USE_PREFERENCE
return preference_type
def get_use_only_object(self, preference_tag):
object_preference_tracker = services.object_preference_tracker()
if object_preference_tracker is not None:
object_id, subroot_index = object_preference_tracker.get_restricted_object(self.sim_info.sim_id, preference_tag)
if object_id is not None:
obj = services.object_manager().get(object_id)
if obj is not None:
if subroot_index is not None:
return obj.get_part_by_index(subroot_index)
return obj
@property
def autonomy_settings(self):
return self.get_autonomy_settings()
def _check_preference(self, preference_tag, obj, preference_map):
obj_id = preference_map.get(preference_tag, None)
return obj.id == obj_id
def _clear_clothing_buffs(self):
for buff_type, buff_handle in self._buff_handles:
if buff_handle is not None:
self.remove_buff(buff_handle)
stat = self.get_stat_instance(buff_type.commodity)
if stat is not None:
stat.decay_enabled = True
self._buff_handles.clear()
def on_outfit_changed(self, sim_info, category_and_index):
self.apply_outfit_buffs_for_sim_info(self.sim_info, category_and_index)
def apply_outfit_buffs_for_sim_info(self, sim_info, category_and_index):
self._clear_clothing_buffs()
outfit_data = (sim_info.get_outfit)(*category_and_index)
outfit_category_tuning = OutfitTuning.OUTFIT_CATEGORY_TUNING.get(category_and_index[0], ())
for buff in outfit_category_tuning.buffs:
self._add_outfit_buff(buff.buff_type, buff.buff_reason)
return outfit_data is None or outfit_data.part_ids or None
buff_manager = services.get_instance_manager(sims4.resources.Types.BUFF)
for buff_guid in cas.cas.get_buff_from_part_ids(outfit_data.part_ids):
buff_type = buff_manager.get(buff_guid)
if buff_type is None:
logger.error('Error one of the parts in current outfit does not have a valid buff')
continue
self._add_outfit_buff(buff_type, self.BUFF_CLOTHING_REASON)
def _add_outfit_buff(self, buff_type, reason):
buff_handle = None
if buff_type.can_add(self):
buff_handle = self.add_buff(buff_type, buff_reason=reason)
else:
if buff_type.commodity is None:
return
stat = self.get_stat_instance(buff_type.commodity)
if stat is not None:
stat.decay_enabled = True
self._buff_handles.append((buff_type, buff_handle))
def load_staged_interactions(self):
return self.si_state.load_staged_interactions(self.sim_info.si_state)
def load_transitioning_interaction(self):
return self.si_state.load_transitioning_interaction(self.sim_info.si_state)
def load_queued_interactions(self):
self.si_state.load_queued_interactions(self.sim_info.si_state)
def update_related_objects(self, triggering_sim, forced_interaction=None):
if not triggering_sim.valid_for_distribution:
return
else:
PARTICIPANT_TYPE_MASK = interactions.ParticipantType.Actor | interactions.ParticipantType.Object | interactions.ParticipantType.Listeners | interactions.ParticipantType.CarriedObject | interactions.ParticipantType.CraftingObject | interactions.ParticipantType.ActorSurface
relevant_obj_ids = set()
relevant_obj_ids.add(self.id)
if forced_interaction is not None:
objs = forced_interaction.get_participants(PARTICIPANT_TYPE_MASK)
for obj in objs:
relevant_obj_ids.add(obj.id)
for i in self.running_interactions_gen(Interaction):
objs = i.get_participants(PARTICIPANT_TYPE_MASK)
for obj in objs:
relevant_obj_ids.add(obj.id)
if self.queue is not None and self.queue.running is not None:
objs = self.queue.running.get_participants(PARTICIPANT_TYPE_MASK)
for obj in objs:
relevant_obj_ids.add(obj.id)
op = distributor.ops.SetRelatedObjects(relevant_obj_ids, self.id)
dist = Distributor.instance()
dist.add_op(triggering_sim, op)
def _is_on_spawn_point(self, use_intended_position=False):
current_zone = services.current_zone()
if not current_zone:
return False
arrival_spawn_point = current_zone.active_lot_arrival_spawn_point
if arrival_spawn_point is None:
return False
position = self.intended_position if use_intended_position else self.position
return test_point_in_polygon(position, arrival_spawn_point.get_footprint_polygon())
@caches.cached(maxsize=10)
def is_on_active_lot(self, tolerance=0, include_spawn_point=False):
if self.parent is not None:
return self.parent.is_on_active_lot(tolerance=tolerance)
lot = services.current_zone().lot
position = self.position
if not lot.is_position_on_lot(position, tolerance):
return include_spawn_point and self._is_on_spawn_point() or False
else:
intended_position = self.intended_position
if intended_position != position:
if not lot.is_position_on_lot(intended_position, tolerance):
return include_spawn_point and self._is_on_spawn_point(use_intended_position=True) or False
return True
def log_sim_info(self, *args, **kwargs):
(self.sim_info.log_sim_info)(*args, **kwargs)
def should_suppress_social_front_page_when_targeted(self):
if any((buff.suppress_social_front_page_when_targeted for buff in self.Buffs)):
return True
return False
def discourage_route_to_join_social_group(self):
return self.sim_info.discourage_route_to_join_social_group()
def bucks_trackers_gen(self):
yield from self.sim_info.bucks_trackers_gen()
if False:
yield None
def fill_choices_menu_with_si_state_aops(self, target, context, choice_menu, scoring_gsi_handler):
for si in self.si_state:
potential_targets = si.get_potential_mixer_targets()
for potential_target in potential_targets:
if target is potential_target:
break
if potential_target.is_part and potential_target.part_owner is target:
break
else:
continue
content_set = autonomy.content_sets.generate_content_set(self, (si.super_affordance),
si,
context,
potential_targets=(
target,),
scoring_gsi_handler=scoring_gsi_handler,
check_posture_compatibility=True,
include_failed_aops_with_tooltip=True,
aop_kwargs=(si.aop.interaction_parameters))
for _, aop, test_result in content_set:
choice_menu.add_aop(aop, context, result_override=test_result, do_test=False)
def _get_current_subroot(self):
if self.posture.target is not None:
if self.posture.target.is_part:
return str(self.posture.target.subroot_index)
@property
def pathplan_context(self):
return self.routing_component.pathplan_context
@property
def routing_context(self):
return self.routing_component.pathplan_context
def _create_routing_context(self):
pass
def add_teleport_style_interaction_to_inject(self, interaction):
if self._teleport_style_interactions_to_inject is None:
self._teleport_style_interactions_to_inject = {}
elif interaction in self._teleport_style_interactions_to_inject.keys():
self._teleport_style_interactions_to_inject[interaction] += 1
else:
self._teleport_style_interactions_to_inject[interaction] = 1
def try_remove_teleport_style_interaction_to_inject(self, interaction):
if self._teleport_style_interactions_to_inject is None:
logger.error('Attempted to remove a teleport style interaction to inject, but the dict is not initialized. Interaction: {}', interaction, owner='brgibson')
return
if interaction not in self._teleport_style_interactions_to_inject:
logger.error('Attempted to remove a teleport style interaction to inject, but the entry for this interaction is not in the dict. Interaction: {}', interaction, owner='brgibson')
return
current_ref_count = self._teleport_style_interactions_to_inject[interaction]
if current_ref_count <= 0:
logger.error('Ref count for teleport style interaction to inject was zero or below when trying to remove it. Interaction: {}, Value: {}', interaction, current_ref_count, owner='brgibson')
current_ref_count -= 1
if current_ref_count <= 0:
del self._teleport_style_interactions_to_inject[interaction]
if not self._teleport_style_interactions_to_inject:
self._teleport_style_interactions_to_inject = None
else:
self._teleport_style_interactions_to_inject[interaction] = current_ref_count
def get_teleport_style_affordance_to_inject_list(self):
if not self._teleport_style_interactions_to_inject:
return ()
return list(self._teleport_style_interactions_to_inject.keys())
def get_teleport_style_interaction_aop(self, interaction, override_pick=None, override_target=None):
if interaction is None:
return (None, None, None)
else:
sim = interaction.sim
if sim is None:
return (None, None, None)
if not TeleportHelper.can_teleport_style_be_injected_before_interaction(sim, interaction):
return (None, None, None)
teleport_style_affordances = self.get_teleport_style_affordance_to_inject_list()
return teleport_style_affordances or (None, None, None)
selected_pick = override_pick
if selected_pick is None:
selected_pick = interaction.context.pick
selected_target = override_target
if selected_target is None:
selected_target = interaction.target
interaction_context = InteractionContext(self, (InteractionContext.SOURCE_SCRIPT),
(Priority.Critical),
insert_strategy=(QueueInsertStrategy.FIRST),
group_id=(interaction.group_id),
pick=selected_pick)
for teleport_style_affordance in teleport_style_affordances:
aop = AffordanceObjectPair(teleport_style_affordance, selected_target,
teleport_style_affordance,
None,
route_fail_on_transition_fail=False,
allow_posture_changes=True,
depended_on_si=interaction)
test_result = aop.test(interaction_context)
if test_result:
teleport_style_data = TeleportTuning.get_teleport_data(teleport_style_affordance.teleport_style_tuning)
return (aop, interaction_context, teleport_style_data)
return (None, None, None)
def can_sim_teleport_using_teleport_style(self):
if TeleportHelper.does_routing_slave_prevent_teleport(self):
return False
for _, _, carry_object in get_carried_objects_gen(self):
if carry_object.is_sim:
return False
return True
@property
def object_radius(self):
return self.routing_component.object_radius
@property
def connectivity_handles(self):
return self.routing_component.connectivity_handles
@property
def is_moving(self):
return self.routing_component.is_moving
lock_instance_tunables(Sim, _persistence=(PersistenceType.NONE),
_world_file_object_persists=False)
|
# Needs to be run like: pytest -s test_sqlite_provider.py
# In eclipse we need to set PYGEOAPI_CONFIG, Run>Debug Configurations>
# (Arguments as py.test and set external variables to the correct config path)
import pytest
from pygeoapi.provider.sqlite import SQLiteProvider
@pytest.fixture()
def config():
return {
'name': 'Sqlite',
'data': './tests/data/ne_110m_admin_0_countries.sqlite',
'id_field': "ogc_fid",
'table': 'ne_110m_admin_0_countries'
}
def test_query(config):
"""Testing query for a valid JSON object with geometry"""
p = SQLiteProvider(config)
feature_collection = p.query()
assert feature_collection.get('type', None) == "FeatureCollection"
features = feature_collection.get('features', None)
assert features is not None
feature = features[0]
properties = feature.get("properties", None)
assert properties is not None
geometry = feature.get("geometry", None)
assert geometry is not None
def test_get(config):
p = SQLiteProvider(config)
results = p.get(118)
assert len(results['features']) == 1
assert "Netherlands" in results['features'][0]['properties']['admin']
|
// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2014 The Bitcoin developers
// Copyright (c) 2014-2015 The Dash developers
// Copyright (c) 2015-2017 The PIVX developers
// Copyright (c) 2018 The Sprocket developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#ifndef BITCOIN_WALLET_H
#define BITCOIN_WALLET_H
#include "amount.h"
#include "base58.h"
#include "crypter.h"
#include "kernel.h"
#include "key.h"
#include "keystore.h"
#include "main.h"
#include "primitives/block.h"
#include "primitives/transaction.h"
#include "ui_interface.h"
#include "util.h"
#include "validationinterface.h"
#include "wallet_ismine.h"
#include "walletdb.h"
#include <algorithm>
#include <map>
#include <set>
#include <stdexcept>
#include <stdint.h>
#include <string>
#include <utility>
#include <vector>
/**
* Settings
*/
extern CFeeRate payTxFee;
extern CAmount maxTxFee;
extern unsigned int nTxConfirmTarget;
extern bool bSpendZeroConfChange;
extern bool fSendFreeTransactions;
extern bool fPayAtLeastCustomFee;
//! -paytxfee default
static const CAmount DEFAULT_TRANSACTION_FEE = 0;
//! -paytxfee will warn if called with a higher fee than this amount (in satoshis) per KB
static const CAmount nHighTransactionFeeWarning = 0.1 * COIN;
//! -maxtxfee default
static const CAmount DEFAULT_TRANSACTION_MAXFEE = 1 * COIN;
//! -maxtxfee will warn if called with a higher fee than this amount (in satoshis)
static const CAmount nHighTransactionMaxFeeWarning = 100 * nHighTransactionFeeWarning;
//! Largest (in bytes) free transaction we're willing to create
static const unsigned int MAX_FREE_TRANSACTION_CREATE_SIZE = 1000;
class CAccountingEntry;
class CCoinControl;
class COutput;
class CReserveKey;
class CScript;
class CWalletTx;
/** (client) version numbers for particular wallet features */
enum WalletFeature {
FEATURE_BASE = 10500, // the earliest version new wallets supports (only useful for getinfo's clientversion output)
FEATURE_WALLETCRYPT = 40000, // wallet encryption
FEATURE_COMPRPUBKEY = 60000, // compressed public keys
FEATURE_LATEST = 61000
};
enum AvailableCoinsType {
ALL_COINS = 1,
ONLY_DENOMINATED = 2,
ONLY_NOT1000IFMN = 3,
ONLY_NONDENOMINATED_NOT1000IFMN = 4, // ONLY_NONDENOMINATED and not 1000 SPRKC at the same time
ONLY_1000 = 5 // find masternode outputs including locked ones (use with caution)
};
struct CompactTallyItem {
CBitcoinAddress address;
CAmount nAmount;
std::vector<CTxIn> vecTxIn;
CompactTallyItem()
{
nAmount = 0;
}
};
/** A key pool entry */
class CKeyPool
{
public:
int64_t nTime;
CPubKey vchPubKey;
CKeyPool();
CKeyPool(const CPubKey& vchPubKeyIn);
ADD_SERIALIZE_METHODS;
template <typename Stream, typename Operation>
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion)
{
if (!(nType & SER_GETHASH))
READWRITE(nVersion);
READWRITE(nTime);
READWRITE(vchPubKey);
}
};
/** Address book data */
class CAddressBookData
{
public:
std::string name;
std::string purpose;
CAddressBookData()
{
purpose = "unknown";
}
typedef std::map<std::string, std::string> StringMap;
StringMap destdata;
};
/**
* A CWallet is an extension of a keystore, which also maintains a set of transactions and balances,
* and provides the ability to create new transactions.
*/
class CWallet : public CCryptoKeyStore, public CValidationInterface
{
private:
bool SelectCoins(const CAmount& nTargetValue, std::set<std::pair<const CWalletTx*, unsigned int> >& setCoinsRet, CAmount& nValueRet, const CCoinControl* coinControl = NULL, AvailableCoinsType coin_type = ALL_COINS, bool useIX = true) const;
//it was public bool SelectCoins(int64_t nTargetValue, std::set<std::pair<const CWalletTx*,unsigned int> >& setCoinsRet, int64_t& nValueRet, const CCoinControl *coinControl = NULL, AvailableCoinsType coin_type=ALL_COINS, bool useIX = true) const;
CWalletDB* pwalletdbEncryption;
//! the current wallet version: clients below this version are not able to load the wallet
int nWalletVersion;
//! the maximum wallet format version: memory-only variable that specifies to what version this wallet may be upgraded
int nWalletMaxVersion;
int64_t nNextResend;
int64_t nLastResend;
/**
* Used to keep track of spent outpoints, and
* detect and report conflicts (double-spends or
* mutated transactions where the mutant gets mined).
*/
typedef std::multimap<COutPoint, uint256> TxSpends;
TxSpends mapTxSpends;
void AddToSpends(const COutPoint& outpoint, const uint256& wtxid);
void AddToSpends(const uint256& wtxid);
void SyncMetaData(std::pair<TxSpends::iterator, TxSpends::iterator>);
public:
bool MintableCoins();
bool SelectStakeCoins(std::set<std::pair<const CWalletTx*, unsigned int> >& setCoins, CAmount nTargetAmount) const;
bool SelectCoinsDark(CAmount nValueMin, CAmount nValueMax, std::vector<CTxIn>& setCoinsRet, CAmount& nValueRet, int nObfuscationRoundsMin, int nObfuscationRoundsMax) const;
bool SelectCoinsByDenominations(int nDenom, CAmount nValueMin, CAmount nValueMax, std::vector<CTxIn>& vCoinsRet, std::vector<COutput>& vCoinsRet2, CAmount& nValueRet, int nObfuscationRoundsMin, int nObfuscationRoundsMax);
bool SelectCoinsDarkDenominated(CAmount nTargetValue, std::vector<CTxIn>& setCoinsRet, CAmount& nValueRet) const;
bool HasCollateralInputs(bool fOnlyConfirmed = true) const;
bool IsCollateralAmount(CAmount nInputAmount) const;
int CountInputsWithAmount(CAmount nInputAmount);
bool SelectCoinsCollateral(std::vector<CTxIn>& setCoinsRet, CAmount& nValueRet) const;
/*
* Main wallet lock.
* This lock protects all the fields added by CWallet
* except for:
* fFileBacked (immutable after instantiation)
* strWalletFile (immutable after instantiation)
*/
mutable CCriticalSection cs_wallet;
bool fFileBacked;
bool fWalletUnlockAnonymizeOnly;
std::string strWalletFile;
std::set<int64_t> setKeyPool;
std::map<CKeyID, CKeyMetadata> mapKeyMetadata;
typedef std::map<unsigned int, CMasterKey> MasterKeyMap;
MasterKeyMap mapMasterKeys;
unsigned int nMasterKeyMaxID;
// Stake Settings
unsigned int nHashDrift;
unsigned int nHashInterval;
uint64_t nStakeSplitThreshold;
int nStakeSetUpdateTime;
//MultiSend
std::vector<std::pair<std::string, int> > vMultiSend;
bool fMultiSendStake;
bool fMultiSendMasternodeReward;
bool fMultiSendNotify;
std::string strMultiSendChangeAddress;
int nLastMultiSendHeight;
std::vector<std::string> vDisabledAddresses;
//Auto Combine Inputs
bool fCombineDust;
CAmount nAutoCombineThreshold;
CWallet()
{
SetNull();
}
CWallet(std::string strWalletFileIn)
{
SetNull();
strWalletFile = strWalletFileIn;
fFileBacked = true;
}
~CWallet()
{
delete pwalletdbEncryption;
}
void SetNull()
{
nWalletVersion = FEATURE_BASE;
nWalletMaxVersion = FEATURE_BASE;
fFileBacked = false;
nMasterKeyMaxID = 0;
pwalletdbEncryption = NULL;
nOrderPosNext = 0;
nNextResend = 0;
nLastResend = 0;
nTimeFirstKey = 0;
fWalletUnlockAnonymizeOnly = false;
// Stake Settings
nHashDrift = 45;
nStakeSplitThreshold = 2000;
nHashInterval = 22;
nStakeSetUpdateTime = 300; // 5 minutes
//MultiSend
vMultiSend.clear();
fMultiSendStake = false;
fMultiSendMasternodeReward = false;
fMultiSendNotify = false;
strMultiSendChangeAddress = "";
nLastMultiSendHeight = 0;
vDisabledAddresses.clear();
//Auto Combine Dust
fCombineDust = false;
nAutoCombineThreshold = 0;
}
bool isMultiSendEnabled()
{
return fMultiSendMasternodeReward || fMultiSendStake;
}
void setMultiSendDisabled()
{
fMultiSendMasternodeReward = false;
fMultiSendStake = false;
}
std::map<uint256, CWalletTx> mapWallet;
int64_t nOrderPosNext;
std::map<uint256, int> mapRequestCount;
std::map<CTxDestination, CAddressBookData> mapAddressBook;
CPubKey vchDefaultKey;
std::set<COutPoint> setLockedCoins;
int64_t nTimeFirstKey;
const CWalletTx* GetWalletTx(const uint256& hash) const;
//! check whether we are allowed to upgrade (or already support) to the named feature
bool CanSupportFeature(enum WalletFeature wf)
{
AssertLockHeld(cs_wallet);
return nWalletMaxVersion >= wf;
}
void AvailableCoins(std::vector<COutput>& vCoins, bool fOnlyConfirmed = true, const CCoinControl* coinControl = NULL, bool fIncludeZeroValue = false, AvailableCoinsType nCoinType = ALL_COINS, bool fUseIX = false) const;
std::map<CBitcoinAddress, std::vector<COutput> > AvailableCoinsByAddress(bool fConfirmed = true, CAmount maxCoinValue = 0);
bool SelectCoinsMinConf(const CAmount& nTargetValue, int nConfMine, int nConfTheirs, std::vector<COutput> vCoins, std::set<std::pair<const CWalletTx*, unsigned int> >& setCoinsRet, CAmount& nValueRet) const;
/// Get 1000DASH output and keys which can be used for the Masternode
bool GetMasternodeVinAndKeys(CTxIn& txinRet, CPubKey& pubKeyRet, CKey& keyRet, std::string strTxHash = "", std::string strOutputIndex = "");
/// Extract txin information and keys from output
bool GetVinAndKeysFromOutput(COutput out, CTxIn& txinRet, CPubKey& pubKeyRet, CKey& keyRet);
bool IsSpent(const uint256& hash, unsigned int n) const;
bool IsLockedCoin(uint256 hash, unsigned int n) const;
void LockCoin(COutPoint& output);
void UnlockCoin(COutPoint& output);
void UnlockAllCoins();
void ListLockedCoins(std::vector<COutPoint>& vOutpts);
CAmount GetTotalValue(std::vector<CTxIn> vCoins);
// keystore implementation
// Generate a new key
CPubKey GenerateNewKey();
//! Adds a key to the store, and saves it to disk.
bool AddKeyPubKey(const CKey& key, const CPubKey& pubkey);
//! Adds a key to the store, without saving it to disk (used by LoadWallet)
bool LoadKey(const CKey& key, const CPubKey& pubkey) { return CCryptoKeyStore::AddKeyPubKey(key, pubkey); }
//! Load metadata (used by LoadWallet)
bool LoadKeyMetadata(const CPubKey& pubkey, const CKeyMetadata& metadata);
bool LoadMinVersion(int nVersion)
{
AssertLockHeld(cs_wallet);
nWalletVersion = nVersion;
nWalletMaxVersion = std::max(nWalletMaxVersion, nVersion);
return true;
}
//! Adds an encrypted key to the store, and saves it to disk.
bool AddCryptedKey(const CPubKey& vchPubKey, const std::vector<unsigned char>& vchCryptedSecret);
//! Adds an encrypted key to the store, without saving it to disk (used by LoadWallet)
bool LoadCryptedKey(const CPubKey& vchPubKey, const std::vector<unsigned char>& vchCryptedSecret);
bool AddCScript(const CScript& redeemScript);
bool LoadCScript(const CScript& redeemScript);
//! Adds a destination data tuple to the store, and saves it to disk
bool AddDestData(const CTxDestination& dest, const std::string& key, const std::string& value);
//! Erases a destination data tuple in the store and on disk
bool EraseDestData(const CTxDestination& dest, const std::string& key);
//! Adds a destination data tuple to the store, without saving it to disk
bool LoadDestData(const CTxDestination& dest, const std::string& key, const std::string& value);
//! Look up a destination data tuple in the store, return true if found false otherwise
bool GetDestData(const CTxDestination& dest, const std::string& key, std::string* value) const;
//! Adds a watch-only address to the store, and saves it to disk.
bool AddWatchOnly(const CScript& dest);
bool RemoveWatchOnly(const CScript& dest);
//! Adds a watch-only address to the store, without saving it to disk (used by LoadWallet)
bool LoadWatchOnly(const CScript& dest);
//! Adds a MultiSig address to the store, and saves it to disk.
bool AddMultiSig(const CScript& dest);
bool RemoveMultiSig(const CScript& dest);
//! Adds a MultiSig address to the store, without saving it to disk (used by LoadWallet)
bool LoadMultiSig(const CScript& dest);
bool Unlock(const SecureString& strWalletPassphrase, bool anonimizeOnly = false);
bool ChangeWalletPassphrase(const SecureString& strOldWalletPassphrase, const SecureString& strNewWalletPassphrase);
bool EncryptWallet(const SecureString& strWalletPassphrase);
void GetKeyBirthTimes(std::map<CKeyID, int64_t>& mapKeyBirth) const;
/**
* Increment the next transaction order id
* @return next transaction order id
*/
int64_t IncOrderPosNext(CWalletDB* pwalletdb = NULL);
typedef std::pair<CWalletTx*, CAccountingEntry*> TxPair;
typedef std::multimap<int64_t, TxPair> TxItems;
/**
* Get the wallet's activity log
* @return multimap of ordered transactions and accounting entries
* @warning Returned pointers are *only* valid within the scope of passed acentries
*/
TxItems OrderedTxItems(std::list<CAccountingEntry>& acentries, std::string strAccount = "");
void MarkDirty();
bool AddToWallet(const CWalletTx& wtxIn, bool fFromLoadWallet = false);
void SyncTransaction(const CTransaction& tx, const CBlock* pblock);
bool AddToWalletIfInvolvingMe(const CTransaction& tx, const CBlock* pblock, bool fUpdate);
void EraseFromWallet(const uint256& hash);
int ScanForWalletTransactions(CBlockIndex* pindexStart, bool fUpdate = false);
void ReacceptWalletTransactions();
void ResendWalletTransactions();
CAmount GetBalance() const;
CAmount GetUnconfirmedBalance() const;
CAmount GetImmatureBalance() const;
CAmount GetAnonymizableBalance() const;
CAmount GetAnonymizedBalance() const;
double GetAverageAnonymizedRounds() const;
CAmount GetNormalizedAnonymizedBalance() const;
CAmount GetDenominatedBalance(bool unconfirmed = false) const;
CAmount GetWatchOnlyBalance() const;
CAmount GetUnconfirmedWatchOnlyBalance() const;
CAmount GetImmatureWatchOnlyBalance() const;
bool CreateTransaction(const std::vector<std::pair<CScript, CAmount> >& vecSend,
CWalletTx& wtxNew,
CReserveKey& reservekey,
CAmount& nFeeRet,
std::string& strFailReason,
const CCoinControl* coinControl = NULL,
AvailableCoinsType coin_type = ALL_COINS,
bool useIX = false,
CAmount nFeePay = 0);
bool CreateTransaction(CScript scriptPubKey, const CAmount& nValue, CWalletTx& wtxNew, CReserveKey& reservekey, CAmount& nFeeRet, std::string& strFailReason, const CCoinControl* coinControl = NULL, AvailableCoinsType coin_type = ALL_COINS, bool useIX = false, CAmount nFeePay = 0);
bool CommitTransaction(CWalletTx& wtxNew, CReserveKey& reservekey, std::string strCommand = "tx");
std::string PrepareObfuscationDenominate(int minRounds, int maxRounds);
int GenerateObfuscationOutputs(int nTotalValue, std::vector<CTxOut>& vout);
bool CreateCollateralTransaction(CMutableTransaction& txCollateral, std::string& strReason);
bool ConvertList(std::vector<CTxIn> vCoins, std::vector<int64_t>& vecAmounts);
bool CreateCoinStake(const CKeyStore& keystore, unsigned int nBits, int64_t nSearchInterval, CMutableTransaction& txNew, unsigned int& nTxNewTime);
bool MultiSend();
void AutoCombineDust();
static CFeeRate minTxFee;
static CAmount GetMinimumFee(unsigned int nTxBytes, unsigned int nConfirmTarget, const CTxMemPool& pool);
bool NewKeyPool();
bool TopUpKeyPool(unsigned int kpSize = 0);
void ReserveKeyFromKeyPool(int64_t& nIndex, CKeyPool& keypool);
void KeepKey(int64_t nIndex);
void ReturnKey(int64_t nIndex);
bool GetKeyFromPool(CPubKey& key);
int64_t GetOldestKeyPoolTime();
void GetAllReserveKeys(std::set<CKeyID>& setAddress) const;
std::set<std::set<CTxDestination> > GetAddressGroupings();
std::map<CTxDestination, CAmount> GetAddressBalances();
std::set<CTxDestination> GetAccountAddresses(std::string strAccount) const;
bool GetBudgetSystemCollateralTX(CTransaction& tx, uint256 hash, bool useIX);
bool GetBudgetSystemCollateralTX(CWalletTx& tx, uint256 hash, bool useIX);
// get the Obfuscation chain depth for a given input
int GetRealInputObfuscationRounds(CTxIn in, int rounds) const;
// respect current settings
int GetInputObfuscationRounds(CTxIn in) const;
bool IsDenominated(const CTxIn& txin) const;
bool IsDenominated(const CTransaction& tx) const;
bool IsDenominatedAmount(int64_t nInputAmount) const;
isminetype IsMine(const CTxIn& txin) const;
CAmount GetDebit(const CTxIn& txin, const isminefilter& filter) const;
isminetype IsMine(const CTxOut& txout) const
{
return ::IsMine(*this, txout.scriptPubKey);
}
CAmount GetCredit(const CTxOut& txout, const isminefilter& filter) const
{
if (!MoneyRange(txout.nValue))
throw std::runtime_error("CWallet::GetCredit() : value out of range");
return ((IsMine(txout) & filter) ? txout.nValue : 0);
}
bool IsChange(const CTxOut& txout) const;
CAmount GetChange(const CTxOut& txout) const
{
if (!MoneyRange(txout.nValue))
throw std::runtime_error("CWallet::GetChange() : value out of range");
return (IsChange(txout) ? txout.nValue : 0);
}
bool IsMine(const CTransaction& tx) const
{
BOOST_FOREACH (const CTxOut& txout, tx.vout)
if (IsMine(txout))
return true;
return false;
}
/** should probably be renamed to IsRelevantToMe */
bool IsFromMe(const CTransaction& tx) const
{
return (GetDebit(tx, ISMINE_ALL) > 0);
}
CAmount GetDebit(const CTransaction& tx, const isminefilter& filter) const
{
CAmount nDebit = 0;
BOOST_FOREACH (const CTxIn& txin, tx.vin) {
nDebit += GetDebit(txin, filter);
if (!MoneyRange(nDebit))
throw std::runtime_error("CWallet::GetDebit() : value out of range");
}
return nDebit;
}
CAmount GetCredit(const CTransaction& tx, const isminefilter& filter) const
{
CAmount nCredit = 0;
BOOST_FOREACH (const CTxOut& txout, tx.vout) {
nCredit += GetCredit(txout, filter);
if (!MoneyRange(nCredit))
throw std::runtime_error("CWallet::GetCredit() : value out of range");
}
return nCredit;
}
CAmount GetChange(const CTransaction& tx) const
{
CAmount nChange = 0;
BOOST_FOREACH (const CTxOut& txout, tx.vout) {
nChange += GetChange(txout);
if (!MoneyRange(nChange))
throw std::runtime_error("CWallet::GetChange() : value out of range");
}
return nChange;
}
void SetBestChain(const CBlockLocator& loc);
DBErrors LoadWallet(bool& fFirstRunRet);
DBErrors ZapWalletTx(std::vector<CWalletTx>& vWtx);
bool SetAddressBook(const CTxDestination& address, const std::string& strName, const std::string& purpose);
bool DelAddressBook(const CTxDestination& address);
bool UpdatedTransaction(const uint256& hashTx);
void Inventory(const uint256& hash)
{
{
LOCK(cs_wallet);
std::map<uint256, int>::iterator mi = mapRequestCount.find(hash);
if (mi != mapRequestCount.end())
(*mi).second++;
}
}
unsigned int GetKeyPoolSize()
{
AssertLockHeld(cs_wallet); // setKeyPool
return setKeyPool.size();
}
bool SetDefaultKey(const CPubKey& vchPubKey);
//! signify that a particular wallet feature is now used. this may change nWalletVersion and nWalletMaxVersion if those are lower
bool SetMinVersion(enum WalletFeature, CWalletDB* pwalletdbIn = NULL, bool fExplicit = false);
//! change which version we're allowed to upgrade to (note that this does not immediately imply upgrading to that format)
bool SetMaxVersion(int nVersion);
//! get the current wallet format (the oldest client version guaranteed to understand this wallet)
int GetVersion()
{
LOCK(cs_wallet);
return nWalletVersion;
}
//! Get wallet transactions that conflict with given transaction (spend same outputs)
std::set<uint256> GetConflicts(const uint256& txid) const;
/**
* Address book entry changed.
* @note called with lock cs_wallet held.
*/
boost::signals2::signal<void(CWallet* wallet, const CTxDestination& address, const std::string& label, bool isMine, const std::string& purpose, ChangeType status)> NotifyAddressBookChanged;
/**
* Wallet transaction added, removed or updated.
* @note called with lock cs_wallet held.
*/
boost::signals2::signal<void(CWallet* wallet, const uint256& hashTx, ChangeType status)> NotifyTransactionChanged;
/** Show progress e.g. for rescan */
boost::signals2::signal<void(const std::string& title, int nProgress)> ShowProgress;
/** Watch-only address added */
boost::signals2::signal<void(bool fHaveWatchOnly)> NotifyWatchonlyChanged;
/** MultiSig address added */
boost::signals2::signal<void(bool fHaveMultiSig)> NotifyMultiSigChanged;
};
/** A key allocated from the key pool. */
class CReserveKey
{
protected:
CWallet* pwallet;
int64_t nIndex;
CPubKey vchPubKey;
public:
CReserveKey(CWallet* pwalletIn)
{
nIndex = -1;
pwallet = pwalletIn;
}
~CReserveKey()
{
ReturnKey();
}
void ReturnKey();
bool GetReservedKey(CPubKey& pubkey);
void KeepKey();
};
typedef std::map<std::string, std::string> mapValue_t;
static void ReadOrderPos(int64_t& nOrderPos, mapValue_t& mapValue)
{
if (!mapValue.count("n")) {
nOrderPos = -1; // TODO: calculate elsewhere
return;
}
nOrderPos = atoi64(mapValue["n"].c_str());
}
static void WriteOrderPos(const int64_t& nOrderPos, mapValue_t& mapValue)
{
if (nOrderPos == -1)
return;
mapValue["n"] = i64tostr(nOrderPos);
}
struct COutputEntry {
CTxDestination destination;
CAmount amount;
int vout;
};
/** A transaction with a merkle branch linking it to the block chain. */
class CMerkleTx : public CTransaction
{
private:
int GetDepthInMainChainINTERNAL(const CBlockIndex*& pindexRet) const;
public:
uint256 hashBlock;
std::vector<uint256> vMerkleBranch;
int nIndex;
// memory only
mutable bool fMerkleVerified;
CMerkleTx()
{
Init();
}
CMerkleTx(const CTransaction& txIn) : CTransaction(txIn)
{
Init();
}
void Init()
{
hashBlock = 0;
nIndex = -1;
fMerkleVerified = false;
}
ADD_SERIALIZE_METHODS;
template <typename Stream, typename Operation>
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion)
{
READWRITE(*(CTransaction*)this);
nVersion = this->nVersion;
READWRITE(hashBlock);
READWRITE(vMerkleBranch);
READWRITE(nIndex);
}
int SetMerkleBranch(const CBlock& block);
/**
* Return depth of transaction in blockchain:
* -1 : not in blockchain, and not in memory pool (conflicted transaction)
* 0 : in memory pool, waiting to be included in a block
* >=1 : this many blocks deep in the main chain
*/
int GetDepthInMainChain(const CBlockIndex*& pindexRet, bool enableIX = true) const;
int GetDepthInMainChain(bool enableIX = true) const
{
const CBlockIndex* pindexRet;
return GetDepthInMainChain(pindexRet, enableIX);
}
bool IsInMainChain() const
{
const CBlockIndex* pindexRet;
return GetDepthInMainChainINTERNAL(pindexRet) > 0;
}
int GetBlocksToMaturity() const;
bool AcceptToMemoryPool(bool fLimitFree = true, bool fRejectInsaneFee = true, bool ignoreFees = false);
int GetTransactionLockSignatures() const;
bool IsTransactionLockTimedOut() const;
};
/**
* A transaction with a bunch of additional info that only the owner cares about.
* It includes any unrecorded transactions needed to link it back to the block chain.
*/
class CWalletTx : public CMerkleTx
{
private:
const CWallet* pwallet;
public:
mapValue_t mapValue;
std::vector<std::pair<std::string, std::string> > vOrderForm;
unsigned int fTimeReceivedIsTxTime;
unsigned int nTimeReceived; //! time received by this node
unsigned int nTimeSmart;
char fFromMe;
std::string strFromAccount;
int64_t nOrderPos; //! position in ordered transaction list
// memory only
mutable bool fDebitCached;
mutable bool fCreditCached;
mutable bool fImmatureCreditCached;
mutable bool fAvailableCreditCached;
mutable bool fAnonymizableCreditCached;
mutable bool fAnonymizedCreditCached;
mutable bool fDenomUnconfCreditCached;
mutable bool fDenomConfCreditCached;
mutable bool fWatchDebitCached;
mutable bool fWatchCreditCached;
mutable bool fImmatureWatchCreditCached;
mutable bool fAvailableWatchCreditCached;
mutable bool fChangeCached;
mutable CAmount nDebitCached;
mutable CAmount nCreditCached;
mutable CAmount nImmatureCreditCached;
mutable CAmount nAvailableCreditCached;
mutable CAmount nAnonymizableCreditCached;
mutable CAmount nAnonymizedCreditCached;
mutable CAmount nDenomUnconfCreditCached;
mutable CAmount nDenomConfCreditCached;
mutable CAmount nWatchDebitCached;
mutable CAmount nWatchCreditCached;
mutable CAmount nImmatureWatchCreditCached;
mutable CAmount nAvailableWatchCreditCached;
mutable CAmount nChangeCached;
CWalletTx()
{
Init(NULL);
}
CWalletTx(const CWallet* pwalletIn)
{
Init(pwalletIn);
}
CWalletTx(const CWallet* pwalletIn, const CMerkleTx& txIn) : CMerkleTx(txIn)
{
Init(pwalletIn);
}
CWalletTx(const CWallet* pwalletIn, const CTransaction& txIn) : CMerkleTx(txIn)
{
Init(pwalletIn);
}
void Init(const CWallet* pwalletIn)
{
pwallet = pwalletIn;
mapValue.clear();
vOrderForm.clear();
fTimeReceivedIsTxTime = false;
nTimeReceived = 0;
nTimeSmart = 0;
fFromMe = false;
strFromAccount.clear();
fDebitCached = false;
fCreditCached = false;
fImmatureCreditCached = false;
fAvailableCreditCached = false;
fAnonymizableCreditCached = false;
fAnonymizedCreditCached = false;
fDenomUnconfCreditCached = false;
fDenomConfCreditCached = false;
fWatchDebitCached = false;
fWatchCreditCached = false;
fImmatureWatchCreditCached = false;
fAvailableWatchCreditCached = false;
fChangeCached = false;
nDebitCached = 0;
nCreditCached = 0;
nImmatureCreditCached = 0;
nAvailableCreditCached = 0;
nAnonymizableCreditCached = 0;
nAnonymizedCreditCached = 0;
nDenomUnconfCreditCached = 0;
nDenomConfCreditCached = 0;
nWatchDebitCached = 0;
nWatchCreditCached = 0;
nAvailableWatchCreditCached = 0;
nImmatureWatchCreditCached = 0;
nChangeCached = 0;
nOrderPos = -1;
}
ADD_SERIALIZE_METHODS;
template <typename Stream, typename Operation>
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion)
{
if (ser_action.ForRead())
Init(NULL);
char fSpent = false;
if (!ser_action.ForRead()) {
mapValue["fromaccount"] = strFromAccount;
WriteOrderPos(nOrderPos, mapValue);
if (nTimeSmart)
mapValue["timesmart"] = strprintf("%u", nTimeSmart);
}
READWRITE(*(CMerkleTx*)this);
std::vector<CMerkleTx> vUnused; //! Used to be vtxPrev
READWRITE(vUnused);
READWRITE(mapValue);
READWRITE(vOrderForm);
READWRITE(fTimeReceivedIsTxTime);
READWRITE(nTimeReceived);
READWRITE(fFromMe);
READWRITE(fSpent);
if (ser_action.ForRead()) {
strFromAccount = mapValue["fromaccount"];
ReadOrderPos(nOrderPos, mapValue);
nTimeSmart = mapValue.count("timesmart") ? (unsigned int)atoi64(mapValue["timesmart"]) : 0;
}
mapValue.erase("fromaccount");
mapValue.erase("version");
mapValue.erase("spent");
mapValue.erase("n");
mapValue.erase("timesmart");
}
//! make sure balances are recalculated
void MarkDirty()
{
fCreditCached = false;
fAvailableCreditCached = false;
fAnonymizableCreditCached = false;
fAnonymizedCreditCached = false;
fDenomUnconfCreditCached = false;
fDenomConfCreditCached = false;
fWatchDebitCached = false;
fWatchCreditCached = false;
fAvailableWatchCreditCached = false;
fImmatureWatchCreditCached = false;
fDebitCached = false;
fChangeCached = false;
}
void BindWallet(CWallet* pwalletIn)
{
pwallet = pwalletIn;
MarkDirty();
}
//! filter decides which addresses will count towards the debit
CAmount GetDebit(const isminefilter& filter) const
{
if (vin.empty())
return 0;
CAmount debit = 0;
if (filter & ISMINE_SPENDABLE) {
if (fDebitCached)
debit += nDebitCached;
else {
nDebitCached = pwallet->GetDebit(*this, ISMINE_SPENDABLE);
fDebitCached = true;
debit += nDebitCached;
}
}
if (filter & ISMINE_WATCH_ONLY) {
if (fWatchDebitCached)
debit += nWatchDebitCached;
else {
nWatchDebitCached = pwallet->GetDebit(*this, ISMINE_WATCH_ONLY);
fWatchDebitCached = true;
debit += nWatchDebitCached;
}
}
return debit;
}
CAmount GetCredit(const isminefilter& filter) const
{
// Must wait until coinbase is safely deep enough in the chain before valuing it
if (IsCoinBase() && GetBlocksToMaturity() > 0)
return 0;
CAmount credit = 0;
if (filter & ISMINE_SPENDABLE) {
// GetBalance can assume transactions in mapWallet won't change
if (fCreditCached)
credit += nCreditCached;
else {
nCreditCached = pwallet->GetCredit(*this, ISMINE_SPENDABLE);
fCreditCached = true;
credit += nCreditCached;
}
}
if (filter & ISMINE_WATCH_ONLY) {
if (fWatchCreditCached)
credit += nWatchCreditCached;
else {
nWatchCreditCached = pwallet->GetCredit(*this, ISMINE_WATCH_ONLY);
fWatchCreditCached = true;
credit += nWatchCreditCached;
}
}
return credit;
}
CAmount GetImmatureCredit(bool fUseCache = true) const
{
if ((IsCoinBase() || IsCoinStake()) && GetBlocksToMaturity() > 0 && IsInMainChain()) {
if (fUseCache && fImmatureCreditCached)
return nImmatureCreditCached;
nImmatureCreditCached = pwallet->GetCredit(*this, ISMINE_SPENDABLE);
fImmatureCreditCached = true;
return nImmatureCreditCached;
}
return 0;
}
CAmount GetAvailableCredit(bool fUseCache = true) const
{
if (pwallet == 0)
return 0;
// Must wait until coinbase is safely deep enough in the chain before valuing it
if (IsCoinBase() && GetBlocksToMaturity() > 0)
return 0;
if (fUseCache && fAvailableCreditCached)
return nAvailableCreditCached;
CAmount nCredit = 0;
uint256 hashTx = GetHash();
for (unsigned int i = 0; i < vout.size(); i++) {
if (!pwallet->IsSpent(hashTx, i)) {
const CTxOut& txout = vout[i];
nCredit += pwallet->GetCredit(txout, ISMINE_SPENDABLE);
if (!MoneyRange(nCredit))
throw std::runtime_error("CWalletTx::GetAvailableCredit() : value out of range");
}
}
nAvailableCreditCached = nCredit;
fAvailableCreditCached = true;
return nCredit;
}
CAmount GetAnonymizableCredit(bool fUseCache = true) const
{
if (pwallet == 0)
return 0;
// Must wait until coinbase is safely deep enough in the chain before valuing it
if (IsCoinBase() && GetBlocksToMaturity() > 0)
return 0;
if (fUseCache && fAnonymizableCreditCached)
return nAnonymizableCreditCached;
CAmount nCredit = 0;
uint256 hashTx = GetHash();
for (unsigned int i = 0; i < vout.size(); i++) {
const CTxOut& txout = vout[i];
const CTxIn vin = CTxIn(hashTx, i);
if (pwallet->IsSpent(hashTx, i) || pwallet->IsLockedCoin(hashTx, i)) continue;
if (fMasterNode && vout[i].nValue == 1000 * COIN) continue; // do not count MN-like outputs
const int rounds = pwallet->GetInputObfuscationRounds(vin);
if (rounds >= -2 && rounds < nObfuscationRounds) {
nCredit += pwallet->GetCredit(txout, ISMINE_SPENDABLE);
if (!MoneyRange(nCredit))
throw std::runtime_error("CWalletTx::GetAnonamizableCredit() : value out of range");
}
}
nAnonymizableCreditCached = nCredit;
fAnonymizableCreditCached = true;
return nCredit;
}
CAmount GetAnonymizedCredit(bool fUseCache = true) const
{
if (pwallet == 0)
return 0;
// Must wait until coinbase is safely deep enough in the chain before valuing it
if (IsCoinBase() && GetBlocksToMaturity() > 0)
return 0;
if (fUseCache && fAnonymizedCreditCached)
return nAnonymizedCreditCached;
CAmount nCredit = 0;
uint256 hashTx = GetHash();
for (unsigned int i = 0; i < vout.size(); i++) {
const CTxOut& txout = vout[i];
const CTxIn vin = CTxIn(hashTx, i);
if (pwallet->IsSpent(hashTx, i) || !pwallet->IsDenominated(vin)) continue;
const int rounds = pwallet->GetInputObfuscationRounds(vin);
if (rounds >= nObfuscationRounds) {
nCredit += pwallet->GetCredit(txout, ISMINE_SPENDABLE);
if (!MoneyRange(nCredit))
throw std::runtime_error("CWalletTx::GetAnonymizedCredit() : value out of range");
}
}
nAnonymizedCreditCached = nCredit;
fAnonymizedCreditCached = true;
return nCredit;
}
CAmount GetDenominatedCredit(bool unconfirmed, bool fUseCache = true) const
{
if (pwallet == 0)
return 0;
// Must wait until coinbase is safely deep enough in the chain before valuing it
if (IsCoinBase() && GetBlocksToMaturity() > 0)
return 0;
int nDepth = GetDepthInMainChain(false);
if (nDepth < 0) return 0;
bool isUnconfirmed = !IsFinalTx(*this) || (!IsTrusted() && nDepth == 0);
if (unconfirmed != isUnconfirmed) return 0;
if (fUseCache) {
if (unconfirmed && fDenomUnconfCreditCached)
return nDenomUnconfCreditCached;
else if (!unconfirmed && fDenomConfCreditCached)
return nDenomConfCreditCached;
}
CAmount nCredit = 0;
uint256 hashTx = GetHash();
for (unsigned int i = 0; i < vout.size(); i++) {
const CTxOut& txout = vout[i];
if (pwallet->IsSpent(hashTx, i) || !pwallet->IsDenominatedAmount(vout[i].nValue)) continue;
nCredit += pwallet->GetCredit(txout, ISMINE_SPENDABLE);
if (!MoneyRange(nCredit))
throw std::runtime_error("CWalletTx::GetDenominatedCredit() : value out of range");
}
if (unconfirmed) {
nDenomUnconfCreditCached = nCredit;
fDenomUnconfCreditCached = true;
} else {
nDenomConfCreditCached = nCredit;
fDenomConfCreditCached = true;
}
return nCredit;
}
CAmount GetImmatureWatchOnlyCredit(const bool& fUseCache = true) const
{
if (IsCoinBase() && GetBlocksToMaturity() > 0 && IsInMainChain()) {
if (fUseCache && fImmatureWatchCreditCached)
return nImmatureWatchCreditCached;
nImmatureWatchCreditCached = pwallet->GetCredit(*this, ISMINE_WATCH_ONLY);
fImmatureWatchCreditCached = true;
return nImmatureWatchCreditCached;
}
return 0;
}
CAmount GetAvailableWatchOnlyCredit(const bool& fUseCache = true) const
{
if (pwallet == 0)
return 0;
// Must wait until coinbase is safely deep enough in the chain before valuing it
if (IsCoinBase() && GetBlocksToMaturity() > 0)
return 0;
if (fUseCache && fAvailableWatchCreditCached)
return nAvailableWatchCreditCached;
CAmount nCredit = 0;
for (unsigned int i = 0; i < vout.size(); i++) {
if (!pwallet->IsSpent(GetHash(), i)) {
const CTxOut& txout = vout[i];
nCredit += pwallet->GetCredit(txout, ISMINE_WATCH_ONLY);
if (!MoneyRange(nCredit))
throw std::runtime_error("CWalletTx::GetAvailableCredit() : value out of range");
}
}
nAvailableWatchCreditCached = nCredit;
fAvailableWatchCreditCached = true;
return nCredit;
}
CAmount GetChange() const
{
if (fChangeCached)
return nChangeCached;
nChangeCached = pwallet->GetChange(*this);
fChangeCached = true;
return nChangeCached;
}
void GetAmounts(std::list<COutputEntry>& listReceived,
std::list<COutputEntry>& listSent,
CAmount& nFee,
std::string& strSentAccount,
const isminefilter& filter) const;
void GetAccountAmounts(const std::string& strAccount, CAmount& nReceived, CAmount& nSent, CAmount& nFee, const isminefilter& filter) const;
bool IsFromMe(const isminefilter& filter) const
{
return (GetDebit(filter) > 0);
}
bool InMempool() const;
bool IsTrusted() const
{
// Quick answer in most cases
if (!IsFinalTx(*this))
return false;
int nDepth = GetDepthInMainChain();
if (nDepth >= 1)
return true;
if (nDepth < 0)
return false;
if (!bSpendZeroConfChange || !IsFromMe(ISMINE_ALL)) // using wtx's cached debit
return false;
// Trusted if all inputs are from us and are in the mempool:
BOOST_FOREACH (const CTxIn& txin, vin) {
// Transactions not sent by us: not trusted
const CWalletTx* parent = pwallet->GetWalletTx(txin.prevout.hash);
if (parent == NULL)
return false;
const CTxOut& parentOut = parent->vout[txin.prevout.n];
if (pwallet->IsMine(parentOut) != ISMINE_SPENDABLE)
return false;
}
return true;
}
bool WriteToDisk();
int64_t GetTxTime() const;
int GetRequestCount() const;
void RelayWalletTransaction(std::string strCommand = "tx");
std::set<uint256> GetConflicts() const;
};
class COutput
{
public:
const CWalletTx* tx;
int i;
int nDepth;
bool fSpendable;
COutput(const CWalletTx* txIn, int iIn, int nDepthIn, bool fSpendableIn)
{
tx = txIn;
i = iIn;
nDepth = nDepthIn;
fSpendable = fSpendableIn;
}
//Used with Obfuscation. Will return largest nondenom, then denominations, then very small inputs
int Priority() const
{
BOOST_FOREACH (CAmount d, obfuScationDenominations)
if (tx->vout[i].nValue == d) return 10000;
if (tx->vout[i].nValue < 1 * COIN) return 20000;
//nondenom return largest first
return -(tx->vout[i].nValue / COIN);
}
CAmount Value() const
{
return tx->vout[i].nValue;
}
std::string ToString() const;
};
/** Private key that includes an expiration date in case it never gets used. */
class CWalletKey
{
public:
CPrivKey vchPrivKey;
int64_t nTimeCreated;
int64_t nTimeExpires;
std::string strComment;
//! todo: add something to note what created it (user, getnewaddress, change)
//! maybe should have a map<string, string> property map
CWalletKey(int64_t nExpires = 0);
ADD_SERIALIZE_METHODS;
template <typename Stream, typename Operation>
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion)
{
if (!(nType & SER_GETHASH))
READWRITE(nVersion);
READWRITE(vchPrivKey);
READWRITE(nTimeCreated);
READWRITE(nTimeExpires);
READWRITE(LIMITED_STRING(strComment, 65536));
}
};
/**
* Account information.
* Stored in wallet with key "acc"+string account name.
*/
class CAccount
{
public:
CPubKey vchPubKey;
CAccount()
{
SetNull();
}
void SetNull()
{
vchPubKey = CPubKey();
}
ADD_SERIALIZE_METHODS;
template <typename Stream, typename Operation>
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion)
{
if (!(nType & SER_GETHASH))
READWRITE(nVersion);
READWRITE(vchPubKey);
}
};
/**
* Internal transfers.
* Database key is acentry<account><counter>.
*/
class CAccountingEntry
{
public:
std::string strAccount;
CAmount nCreditDebit;
int64_t nTime;
std::string strOtherAccount;
std::string strComment;
mapValue_t mapValue;
int64_t nOrderPos; //! position in ordered transaction list
uint64_t nEntryNo;
CAccountingEntry()
{
SetNull();
}
void SetNull()
{
nCreditDebit = 0;
nTime = 0;
strAccount.clear();
strOtherAccount.clear();
strComment.clear();
nOrderPos = -1;
nEntryNo = 0;
}
ADD_SERIALIZE_METHODS;
template <typename Stream, typename Operation>
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion)
{
if (!(nType & SER_GETHASH))
READWRITE(nVersion);
//! Note: strAccount is serialized as part of the key, not here.
READWRITE(nCreditDebit);
READWRITE(nTime);
READWRITE(LIMITED_STRING(strOtherAccount, 65536));
if (!ser_action.ForRead()) {
WriteOrderPos(nOrderPos, mapValue);
if (!(mapValue.empty() && _ssExtra.empty())) {
CDataStream ss(nType, nVersion);
ss.insert(ss.begin(), '\0');
ss << mapValue;
ss.insert(ss.end(), _ssExtra.begin(), _ssExtra.end());
strComment.append(ss.str());
}
}
READWRITE(LIMITED_STRING(strComment, 65536));
size_t nSepPos = strComment.find("\0", 0, 1);
if (ser_action.ForRead()) {
mapValue.clear();
if (std::string::npos != nSepPos) {
CDataStream ss(std::vector<char>(strComment.begin() + nSepPos + 1, strComment.end()), nType, nVersion);
ss >> mapValue;
_ssExtra = std::vector<char>(ss.begin(), ss.end());
}
ReadOrderPos(nOrderPos, mapValue);
}
if (std::string::npos != nSepPos)
strComment.erase(nSepPos);
mapValue.erase("n");
}
private:
std::vector<char> _ssExtra;
};
#endif // BITCOIN_WALLET_H
|
# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from GooglePretrainedWeightDownloader import GooglePretrainedWeightDownloader
from NVIDIAPretrainedWeightDownloader import NVIDIAPretrainedWeightDownloader
from WikiDownloader import WikiDownloader
from BooksDownloader import BooksDownloader
from GLUEDownloader import GLUEDownloader
from SquadDownloader import SquadDownloader
from PubMedDownloader import PubMedDownloader
class Downloader:
def __init__(self, dataset_name, save_path):
self.dataset_name = dataset_name
self.save_path = save_path
def download(self):
if self.dataset_name == 'bookscorpus':
self.download_bookscorpus()
elif self.dataset_name == 'wikicorpus_en':
self.download_wikicorpus('en')
elif self.dataset_name == 'wikicorpus_zh':
self.download_wikicorpus('zh')
elif self.dataset_name == 'pubmed_baseline':
self.download_pubmed('baseline')
elif self.dataset_name == 'pubmed_daily_update':
self.download_pubmed('daily_update')
elif self.dataset_name == 'pubmed_fulltext':
self.download_pubmed('fulltext')
elif self.dataset_name == 'pubmed_open_access':
self.download_pubmed('open_access')
elif self.dataset_name == 'google_pretrained_weights':
self.download_google_pretrained_weights()
elif self.dataset_name == 'nvidia_pretrained_weights':
self.download_nvidia_pretrained_weights()
elif self.dataset_name == 'mrpc':
self.download_glue(self.dataset_name)
elif self.dataset_name == 'mnli':
self.download_glue(self.dataset_name)
elif self.dataset_name == 'cola':
self.download_glue(self.dataset_name)
elif self.dataset_name == 'sst-2':
self.download_glue(self.dataset_name)
elif self.dataset_name == 'squad':
self.download_squad()
elif self.dataset_name == 'all':
self.download_bookscorpus()
self.download_wikicorpus('en')
self.download_wikicorpus('zh')
self.download_pubmed('baseline')
self.download_pubmed('daily_update')
self.download_pubmed('fulltext')
self.download_pubmed('open_access')
self.download_google_pretrained_weights()
self.download_nvidia_pretrained_weights()
self.download_glue("cola")
self.download_glue("mnli")
self.download_glue("mrpc")
self.download_glue("sst-2")
self.download_squad()
else:
print(self.dataset_name)
assert False, 'Unknown dataset_name provided to downloader'
def download_bookscorpus(self):
downloader = BooksDownloader(self.save_path)
downloader.download()
def download_wikicorpus(self, language):
downloader = WikiDownloader(language, self.save_path)
downloader.download()
def download_pubmed(self, subset):
downloader = PubMedDownloader(subset, self.save_path)
downloader.download()
def download_google_pretrained_weights(self):
downloader = GooglePretrainedWeightDownloader(self.save_path)
downloader.download()
def download_nvidia_pretrained_weights(self):
downloader = NVIDIAPretrainedWeightDownloader(self.save_path)
downloader.download()
def download_glue(self, glue_task_name):
downloader = GLUEDownloader(self.save_path)
downloader.download(glue_task_name)
def download_squad(self):
downloader = SquadDownloader(self.save_path)
downloader.download()
|
"""
Python ICE-CASCADE tectonic uplift-subsidence model component
Null model: Defines do-nothing methods for required interface, used to disable
the uplift-subsidence model component
"""
from .base import base_model
import numpy as np
class null_model(base_model):
"""Do-nothing class to be used for disabled uplift component"""
def __init__(self):
pass
def set_height(self, new):
self._height = np.copy(np.double(new))
def get_height(self):
return np.copy(self._height)
def init_netcdf(self, nc, *args):
nc.createVariable('uplift_model', np.dtype('i1')) # scalar
nc['uplift_model'][...] = False
nc['uplift_model'].type = self.__class__.__name__
def to_netcdf(*args):
pass
def run(*args):
pass
|