text
stringlengths 3
1.05M
|
|---|
/**
* Maps an array of Formulate fields into an associative array, with the field
* ID as the key and the field as the value.
* @param fields The array of fields to map.
* @returns {{}} The associative array of fields.
*/
function mapFields(fields) {
// Variables.
let i, fieldMap, field, fieldId;
// Process each field.
fieldMap = {};
for (i = 0; i < fields.length; i++) {
// Store the field in the associative array.
field = fields[i];
fieldId = field.id;
fieldMap[fieldId] = field;
}
// Return the associative array of fields.
return fieldMap;
}
// Export the function that maps fields.
module.exports = mapFields;
|
module.exports = [
'0.1.0',
'0.1.1',
'0.2.0',
'0.2.1',
'0.2.2',
'0.2.3',
'0.2.4',
'0.2.5',
'0.2.6',
'0.2.7',
'0.2.8',
'0.2.9',
'0.2.10',
'0.2.11',
'0.2.12',
'0.2.13',
'0.2.14',
'0.3.0',
'0.3.1',
'0.3.2',
'0.3.3',
'0.4.0',
'0.4.1',
'0.9.9',
'0.9.10',
'0.9.11',
'1.0.0',
'1.0.1',
'1.0.2',
'1.1.0',
'1.1.1',
'1.1.2',
'1.2.0',
'1.2.1',
'1.2.2',
'1.2.3',
'2.0.0-beta.0',
'2.0.0-beta.1',
'2.0.0-beta.2',
'2.0.0-beta.3',
'2.0.0-beta.4',
'2.0.0-beta.5',
'2.0.0-beta.6',
'2.0.0-beta.7',
'2.0.0-beta.8',
'2.0.0-beta.9',
'2.0.0-beta.10',
'2.0.0-beta.11',
'2.0.0-beta.12',
'2.0.0-beta.13',
'2.0.0-beta.14',
'2.0.0-beta.15',
'2.0.0-beta.16',
'2.0.0-beta.17',
'2.0.0-beta.18',
'2.0.0-beta.19',
'2.0.0-beta.20',
'2.0.0-beta.21',
'2.0.0-beta.22',
'2.0.0-beta.23',
'2.0.0-beta.24',
'2.0.0-beta.25',
'2.0.0-beta.26',
'2.0.0-beta.27',
'2.0.0-beta.28',
'2.0.0-beta.29',
'2.0.0-beta.30',
'2.0.0-beta.31',
'2.0.0-beta.32',
'2.0.0-beta.33',
'2.0.0-beta.34',
'2.0.0-beta.35',
'2.0.0-beta.36',
'2.0.0-beta.37',
'2.0.0-beta.38',
'2.0.0-beta.39',
'2.0.0-beta.40',
'2.0.0-beta.41',
'2.0.0-beta.42',
'2.0.0',
'2.0.1',
'2.1.0',
'2.1.1',
'2.2.0',
'2.3.0-alpha1',
'2.3.0',
'2.3.1',
'2.4.0',
'2.4.1',
'2.4.2',
'2.4.3',
'2.4.4',
'2.4.5',
'2.4.6',
'2.4.7',
'2.4.8',
'2.4.9',
'3.0.0-beta1',
'3.0.0-beta10',
'3.0.0-beta11',
'3.0.0-beta12',
'3.0.0-beta13',
'3.0.0-beta14',
'3.0.0-beta15',
'3.0.0-beta16',
'3.0.0-beta2',
'3.0.0-beta3',
'3.0.0-beta4',
'3.0.0-beta5',
'3.0.0-beta6',
'3.0.0-beta7',
'3.0.0-beta8',
'3.0.0-beta9',
'3.0.1-beta.1',
'3.0.1-beta.2',
'3.0.1-beta.3',
'3.0.1-beta.4',
'3.0.1-beta.5',
'3.0.1-beta.6',
'3.0.1-beta.7',
'3.0.1-beta.8',
'3.0.1-beta.9',
'3.0.1-beta.10',
'3.0.1-beta.11',
'3.0.1-beta.12',
'3.0.1-beta.13',
'3.0.1-beta.14',
'3.0.1-beta.15',
'3.0.1-beta.16',
'3.0.1-beta.17',
'3.0.1-beta.18',
'3.0.1-beta.19',
'3.0.1-beta.20',
'3.0.1-beta.21',
'3.0.1',
'3.0.2',
'3.0.3',
'3.0.4',
'3.0.5',
'3.0.6',
'3.1.0',
'3.2.0',
'3.2.1',
'3.2.2',
'3.2.3',
'4.0.0-beta.1',
'4.0.0-beta.2',
'4.0.0-beta.3',
'4.0.0-beta.4',
'4.0.0-beta.5',
'4.0.0-beta.6',
'4.0.0',
'4.0.1',
'4.0.2',
'4.0.3',
'4.0.4',
'4.0.5',
'4.1.0',
'4.1.1',
'4.1.2',
'4.1.3',
'4.1.4-canary.1',
'4.1.4-canary.2',
'4.1.4',
'4.2.0-canary.1',
'4.2.0-zones.2',
'4.2.0',
'4.2.1',
'4.2.2',
'4.2.3',
'4.3.0-canary.1',
'4.3.0-universal-alpha.1',
'4.3.0-universal-alpha.2',
'4.3.0-universal-alpha.3',
'4.3.0-universal-alpha.4',
'4.3.0-zones.1',
'4.4.0-canary.2',
'4.4.0-canary.3',
'5.0.0-universal-alpha.1',
'5.0.0-universal-alpha.2',
'5.0.0-universal-alpha.3',
'5.0.0-universal-alpha.4',
'5.0.0-universal-alpha.5',
'5.0.0-universal-alpha.6',
'5.0.0-universal-alpha.7',
'5.0.0-universal-alpha.8',
'5.0.0-universal-alpha.9',
'5.0.0-universal-alpha.10',
'5.0.0-universal-alpha.11',
'5.0.0-universal-alpha.12',
'5.0.0-universal-alpha.13',
'5.0.0-universal-alpha.14',
'5.0.0-universal-alpha.15',
'5.0.0-universal-alpha.16',
'5.0.0-universal-alpha.17',
'5.0.0-universal-alpha.18',
'5.0.0-universal-alpha.19',
'5.0.0-universal-alpha.20',
'5.0.0-universal-alpha.21',
'5.0.0-universal-alpha.22',
'5.0.0-universal-alpha.23',
'5.0.0-zones.1',
'5.0.0',
'5.0.1-canary.1',
'5.0.1-canary.2',
'5.0.1-canary.3',
'5.0.1-canary.4',
'5.0.1-canary.5',
'5.0.1-canary.6',
'5.0.1-canary.7',
'5.0.1-canary.8',
'5.0.1-canary.9',
'5.0.1-canary.10',
'5.0.1-canary.11',
'5.0.1-canary.12',
'5.0.1-canary.13',
'5.0.1-canary.14',
'5.0.1-canary.15',
'5.0.1-canary.16',
'5.0.1-canary.17',
'5.1.0',
'6.0.0-canary.1',
'6.0.0-canary.2',
'6.0.0-canary.3',
'6.0.0-canary.4',
'6.0.0-canary.5',
'6.0.0-canary.6',
'6.0.0-canary.7',
'6.0.0',
'6.0.1-canary.0',
'6.0.1-canary.1',
'6.0.1-canary.2',
'6.0.1',
'6.0.2-canary.0',
'6.0.2',
'6.0.3-canary.0',
'6.0.3-canary.1',
'6.0.3',
'6.0.4-canary.0',
'6.0.4-canary.1',
'6.0.4-canary.2',
'6.0.4-canary.3',
'6.0.4-canary.4',
'6.0.4-canary.5',
'6.0.4-canary.6',
'6.0.4-canary.7',
'6.0.4-canary.8',
'6.0.4-canary.9',
'6.1.0-canary.0',
'6.1.0',
'6.1.1-canary.0',
'6.1.1-canary.1',
'6.1.1-canary.2',
'6.1.1-canary.3',
'6.1.1-canary.4',
'6.1.1-canary.5',
'6.1.1',
'6.1.2',
'7.0.0-canary.0',
'7.0.0-canary.1',
'7.0.0-canary.2',
'7.0.0-canary.3',
'7.0.0-canary.4',
'7.0.0-canary.5',
'7.0.0-canary.6',
'7.0.0-canary.7',
'7.0.0-canary.8',
'7.0.0-canary.9',
'7.0.0-canary.10',
'7.0.0-canary.11',
'7.0.0-canary.12',
'7.0.0-canary.13',
'7.0.0-canary.14',
'7.0.0-canary.15',
'7.0.0-canary.16',
'7.0.0-canary.18',
'7.0.0-canary.19',
'7.0.0-canary.20',
'7.0.0',
'7.0.1-canary.0',
'7.0.1-canary.1',
'7.0.1-canary.2',
'7.0.1-canary.3',
'7.0.1-canary.4',
'7.0.1-canary.5',
'7.0.1-canary.6',
'7.0.1',
'7.0.2-alpha.1',
'7.0.2-alpha.3',
'7.0.2-canary.5',
'7.0.2-canary.6',
'7.0.2-canary.7',
'7.0.2-canary.8',
'7.0.2-canary.9',
'7.0.2-canary.10',
'7.0.2-canary.11',
'7.0.2-canary.12',
'7.0.2-canary.13',
'7.0.2-canary.14',
'7.0.2-canary.15',
'7.0.2-canary.16',
'7.0.2-canary.17',
'7.0.2-canary.18',
'7.0.2-canary.19',
'7.0.2-canary.20',
'7.0.2-canary.21',
'7.0.2-canary.22',
'7.0.2-canary.23',
'7.0.2-canary.24',
'7.0.2-canary.25',
'7.0.2-canary.26',
'7.0.2-canary.27',
'7.0.2-canary.28',
'7.0.2-canary.29',
'7.0.2-canary.31',
'7.0.2-canary.33',
'7.0.2-canary.34',
'7.0.2-canary.35',
'7.0.2-canary.36',
'7.0.2-canary.37',
'7.0.2-canary.38',
'7.0.2-canary.39',
'7.0.2-canary.40',
'7.0.2-canary.41',
'7.0.2-canary.42',
'7.0.2-canary.43',
'7.0.2-canary.44',
'7.0.2-canary.45',
'7.0.2-canary.46',
'7.0.2-canary.47',
'7.0.2-canary.48',
'7.0.2-canary.49',
'7.0.2-canary.50',
'7.0.2',
];
|
from sardine.exceptions.lang.lang_exception import SardineLangException
class CannotRedefineAlias(SardineLangException):
def __init__(self, alias: str):
super().__init__(f"Tried redefining alias '{alias}'")
|
import React from 'react'
class Footer extends React.Component {
render() {
return (
<section className='mt6'>
<footer className="pv4 ph3 ph5-ns tc">
<small className="f6 db hint-text pv3 tc">© 2019 <b className="ttu">Ahmer Malik</b>, ALL RIGHTS RESERVED.</small>
</footer>
</section>
)
}
}
export default Footer
|
const helpers = require('../../../test/test-helper');
// Filled in after mocking occurs
let checkStatusFunction;
let token;
const baseContext = {
ACCOUNT_SID: 'ACXXX',
AUTH_TOKEN: 'abcdef',
getTwilioClient: jest.fn(),
};
const mockStatuses = {
statuses: [
async () => 'good one',
async () => {
throw new Error('bad two');
},
async () => 'good three',
],
environment: jest.fn().mockReturnValue({
title: 'Environment example',
valid: true,
}),
};
describe('voice-client-javascript/admin/check-status', () => {
beforeAll(() => {
process.env.ADMIN_PASSWORD = 'supersekret';
const runtime = new helpers.MockRuntime();
runtime._addAsset(
'/admin/shared.js',
'../../assets/admin/shared.private.js'
);
runtime._addAsset(
'/admin/statuses.js',
'../../assets/admin/statuses.private.js'
);
helpers.setup(baseContext, runtime);
jest.mock('../../assets/admin/statuses.private.js', () => mockStatuses);
const { createToken } = require('../../assets/admin/shared.private');
token = createToken(baseContext, process.env.ADMIN_PASSWORD);
checkStatusFunction = require('../../functions/admin/check-status').handler;
});
test('calls must be authenticated', (done) => {
const callback = (err, result) => {
expect(err).toBeNull();
expect(result).toBeDefined();
expect(result._statusCode).toBe(403);
expect(result._body).toBe('Not authorized');
done();
};
// Note no token
checkStatusFunction(baseContext, {}, callback);
});
test('only error free statuses are returned', (done) => {
const callback = (err, result) => {
expect(err).toBeNull();
expect(result).toBeDefined();
expect(result.statuses).toBeDefined();
expect(result.statuses.length).toEqual(2);
expect(result.statuses).toContain('good one');
expect(result.statuses).toContain('good three');
done();
};
checkStatusFunction(baseContext, { token }, callback);
});
test('environment function is ran', (done) => {
const callback = (err, result) => {
expect(err).toBeNull();
expect(result).toBeDefined();
expect(result.environment).toBeDefined();
expect(result.environment.title).toBe('Environment example');
expect(mockStatuses.environment).toHaveBeenCalled();
done();
};
checkStatusFunction(baseContext, { token }, callback);
});
});
|
/**
* Created by bln on 16-6-28.
*/
var bunyan = require('bunyan');
var path = require('path');
var fs = require('fs');
var globalConfig = require('./config.js');
var logConfig = globalConfig.log,
accessPath = logConfig.access(),
errorPath = logConfig.error();
if (!fs.existsSync(logConfig.dir())) {
fs.mkdirSync(logConfig.dir());
}
var logger = bunyan.createLogger({
name: 'foundationLog',
streams: [
{
level: 'info',
path: accessPath
},
{
level: 'debug',
path: accessPath
},
{
level: 'error',
path: errorPath
},
{
level: 'fatal',
path: errorPath
}
],
});
logger._error = logger.error;
logger.error = function () {
logger._error.apply(this, arguments);
};
module.exports = logger;
|
import numpy as np
def sigmoid(x):
return 1.0 / (1 + np.exp(-x))
def y_hat(weights, bias, x):
return np.dot(weights, x) + bias
def cost(y, output):
return -(y*np.log(output) - (1-y)*np.log(1-output))
def gradient_descent(x, y, weights, bias, learnrate):
y_h = y_hat(weights, bias, x)
weights += learnrate * (y-y_h) * x
bias += learnrate * (y-y_h)
return weights, bias
|
import React from 'react'
const ServicesPage = () => {
return (
<div>
<h1>Our Services</h1>
<p>
Doctrina legam an proident exquisitaque, singulis enim ingeniis, malis
et mentitum, iudicem si illum appellat id dolore e singulis, pariatur eu
probant ad possumus sunt probant, irure incididunt do commodo. Probant
reprehenderit est proident o aut quorum quamquam. Officia export tamen
occaecat noster. De dolor quis ad consequat e iis quid exercitation, ad
ipsum dolore te occaecat et esse consequat ne quamquam.Sint hic iis anim
ingeniis te laboris o culpa occaecat, do ea multos ipsum quid, nam
singulis sed singulis, eu export reprehenderit, aut occaecat aut
litteris, fugiat laboris deserunt ab quo quid admodum. Nisi aliquip
litteris. Mentitum arbitrantur in probant, duis non mentitum, nisi
deserunt ex familiaritatem hic an quorum senserit arbitrantur quo ne
ipsum sint sed possumus ea multos distinguantur nescius multos
incurreret, consequat se culpa si quem te nostrud.
</p>
<p>
Summis mentitum sed litteris. Do deserunt ex constias id senserit
concursionibus ut officia, elit possumus mandaremus ex ubi aut multos
cernantur, ingeniis summis offendit incurreret, aut ex exquisitaque,
doctrina nulla constias, o aliquip id excepteur. Tamen vidisse et
distinguantur, do tamen hic eram non ab ubi fugiat constias se velit eu
iis quorum proident, ubi ita veniam magna eram ubi se cupidatat si
incididunt, quae incurreret do multos elit an excepteur qui esse
cernantur. Est nam elit fore legam. Irure do appellat si quis ex ullamco
sed elit, eu mentitum instituendarum an export pariatur ne malis quem. E
de philosophari. Ea nam cillum cillum labore, proident illum arbitror,
mandaremus a vidisse id vidisse a laborum.
</p>
</div>
)
}
export default ServicesPage
|
/**
* Copyright 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule EventPluginRegistry
* @flow
*/
'use strict';
import type {
DispatchConfig,
ReactSyntheticEvent,
} from 'ReactSyntheticEventType';
import type {AnyNativeEvent, PluginName, PluginModule} from 'PluginModuleType';
type NamesToPlugins = {[key: PluginName]: PluginModule<AnyNativeEvent>};
type EventPluginOrder = null | Array<PluginName>;
var invariant = require('invariant');
/**
* Injectable ordering of event plugins.
*/
var eventPluginOrder: EventPluginOrder = null;
/**
* Injectable mapping from names to event plugin modules.
*/
var namesToPlugins: NamesToPlugins = {};
/**
* Recomputes the plugin list using the injected plugins and plugin ordering.
*
* @private
*/
function recomputePluginOrdering(): void {
if (!eventPluginOrder) {
// Wait until an `eventPluginOrder` is injected.
return;
}
for (var pluginName in namesToPlugins) {
var pluginModule = namesToPlugins[pluginName];
var pluginIndex = eventPluginOrder.indexOf(pluginName);
invariant(
pluginIndex > -1,
'EventPluginRegistry: Cannot inject event plugins that do not exist in ' +
'the plugin ordering, `%s`.',
pluginName,
);
if (EventPluginRegistry.plugins[pluginIndex]) {
continue;
}
invariant(
pluginModule.extractEvents,
'EventPluginRegistry: Event plugins must implement an `extractEvents` ' +
'method, but `%s` does not.',
pluginName,
);
EventPluginRegistry.plugins[pluginIndex] = pluginModule;
var publishedEvents = pluginModule.eventTypes;
for (var eventName in publishedEvents) {
invariant(
publishEventForPlugin(
publishedEvents[eventName],
pluginModule,
eventName,
),
'EventPluginRegistry: Failed to publish event `%s` for plugin `%s`.',
eventName,
pluginName,
);
}
}
}
/**
* Publishes an event so that it can be dispatched by the supplied plugin.
*
* @param {object} dispatchConfig Dispatch configuration for the event.
* @param {object} PluginModule Plugin publishing the event.
* @return {boolean} True if the event was successfully published.
* @private
*/
function publishEventForPlugin(
dispatchConfig: DispatchConfig,
pluginModule: PluginModule<AnyNativeEvent>,
eventName: string,
): boolean {
invariant(
!EventPluginRegistry.eventNameDispatchConfigs.hasOwnProperty(eventName),
'EventPluginHub: More than one plugin attempted to publish the same ' +
'event name, `%s`.',
eventName,
);
EventPluginRegistry.eventNameDispatchConfigs[eventName] = dispatchConfig;
var phasedRegistrationNames = dispatchConfig.phasedRegistrationNames;
if (phasedRegistrationNames) {
for (var phaseName in phasedRegistrationNames) {
if (phasedRegistrationNames.hasOwnProperty(phaseName)) {
var phasedRegistrationName = phasedRegistrationNames[phaseName];
publishRegistrationName(
phasedRegistrationName,
pluginModule,
eventName,
);
}
}
return true;
} else if (dispatchConfig.registrationName) {
publishRegistrationName(
dispatchConfig.registrationName,
pluginModule,
eventName,
);
return true;
}
return false;
}
/**
* Publishes a registration name that is used to identify dispatched events and
* can be used with `EventPluginHub.putListener` to register listeners.
*
* @param {string} registrationName Registration name to add.
* @param {object} PluginModule Plugin publishing the event.
* @private
*/
function publishRegistrationName(
registrationName: string,
pluginModule: PluginModule<AnyNativeEvent>,
eventName: string,
): void {
invariant(
!EventPluginRegistry.registrationNameModules[registrationName],
'EventPluginHub: More than one plugin attempted to publish the same ' +
'registration name, `%s`.',
registrationName,
);
EventPluginRegistry.registrationNameModules[registrationName] = pluginModule;
EventPluginRegistry.registrationNameDependencies[registrationName] =
pluginModule.eventTypes[eventName].dependencies;
if (__DEV__) {
var lowerCasedName = registrationName.toLowerCase();
EventPluginRegistry.possibleRegistrationNames[
lowerCasedName
] = registrationName;
if (registrationName === 'onDoubleClick') {
EventPluginRegistry.possibleRegistrationNames.ondblclick = registrationName;
}
}
}
/**
* Registers plugins so that they can extract and dispatch events.
*
* @see {EventPluginHub}
*/
var EventPluginRegistry = {
/**
* Ordered list of injected plugins.
*/
plugins: [],
/**
* Mapping from event name to dispatch config
*/
eventNameDispatchConfigs: {},
/**
* Mapping from registration name to plugin module
*/
registrationNameModules: {},
/**
* Mapping from registration name to event name
*/
registrationNameDependencies: {},
/**
* Mapping from lowercase registration names to the properly cased version,
* used to warn in the case of missing event handlers. Available
* only in __DEV__.
* @type {Object}
*/
possibleRegistrationNames: __DEV__ ? {} : (null: any),
// Trust the developer to only use possibleRegistrationNames in __DEV__
/**
* Injects an ordering of plugins (by plugin name). This allows the ordering
* to be decoupled from injection of the actual plugins so that ordering is
* always deterministic regardless of packaging, on-the-fly injection, etc.
*
* @param {array} InjectedEventPluginOrder
* @internal
* @see {EventPluginHub.injection.injectEventPluginOrder}
*/
injectEventPluginOrder: function(
injectedEventPluginOrder: EventPluginOrder,
): void {
invariant(
!eventPluginOrder,
'EventPluginRegistry: Cannot inject event plugin ordering more than ' +
'once. You are likely trying to load more than one copy of React.',
);
// Clone the ordering so it cannot be dynamically mutated.
eventPluginOrder = Array.prototype.slice.call(injectedEventPluginOrder);
recomputePluginOrdering();
},
/**
* Injects plugins to be used by `EventPluginHub`. The plugin names must be
* in the ordering injected by `injectEventPluginOrder`.
*
* Plugins can be injected as part of page initialization or on-the-fly.
*
* @param {object} injectedNamesToPlugins Map from names to plugin modules.
* @internal
* @see {EventPluginHub.injection.injectEventPluginsByName}
*/
injectEventPluginsByName: function(
injectedNamesToPlugins: NamesToPlugins,
): void {
var isOrderingDirty = false;
for (var pluginName in injectedNamesToPlugins) {
if (!injectedNamesToPlugins.hasOwnProperty(pluginName)) {
continue;
}
var pluginModule = injectedNamesToPlugins[pluginName];
if (
!namesToPlugins.hasOwnProperty(pluginName) ||
namesToPlugins[pluginName] !== pluginModule
) {
invariant(
!namesToPlugins[pluginName],
'EventPluginRegistry: Cannot inject two different event plugins ' +
'using the same name, `%s`.',
pluginName,
);
namesToPlugins[pluginName] = pluginModule;
isOrderingDirty = true;
}
}
if (isOrderingDirty) {
recomputePluginOrdering();
}
},
/**
* Looks up the plugin for the supplied event.
*
* @param {object} event A synthetic event.
* @return {?object} The plugin that created the supplied event.
* @internal
*/
getPluginModuleForEvent: function(
event: ReactSyntheticEvent,
): null | PluginModule<AnyNativeEvent> {
var dispatchConfig = event.dispatchConfig;
if (dispatchConfig.registrationName) {
return (
EventPluginRegistry.registrationNameModules[
dispatchConfig.registrationName
] || null
);
}
if (dispatchConfig.phasedRegistrationNames !== undefined) {
// pulling phasedRegistrationNames out of dispatchConfig helps Flow see
// that it is not undefined.
var {phasedRegistrationNames} = dispatchConfig;
for (var phase in phasedRegistrationNames) {
if (!phasedRegistrationNames.hasOwnProperty(phase)) {
continue;
}
var pluginModule =
EventPluginRegistry.registrationNameModules[
phasedRegistrationNames[phase]
];
if (pluginModule) {
return pluginModule;
}
}
}
return null;
},
/**
* Exposed for unit testing.
* @private
*/
_resetEventPlugins: function(): void {
eventPluginOrder = null;
for (var pluginName in namesToPlugins) {
if (namesToPlugins.hasOwnProperty(pluginName)) {
delete namesToPlugins[pluginName];
}
}
EventPluginRegistry.plugins.length = 0;
var eventNameDispatchConfigs = EventPluginRegistry.eventNameDispatchConfigs;
for (var eventName in eventNameDispatchConfigs) {
if (eventNameDispatchConfigs.hasOwnProperty(eventName)) {
delete eventNameDispatchConfigs[eventName];
}
}
var registrationNameModules = EventPluginRegistry.registrationNameModules;
for (var registrationName in registrationNameModules) {
if (registrationNameModules.hasOwnProperty(registrationName)) {
delete registrationNameModules[registrationName];
}
}
if (__DEV__) {
var possibleRegistrationNames =
EventPluginRegistry.possibleRegistrationNames;
for (var lowerCasedName in possibleRegistrationNames) {
if (possibleRegistrationNames.hasOwnProperty(lowerCasedName)) {
delete possibleRegistrationNames[lowerCasedName];
}
}
}
},
};
module.exports = EventPluginRegistry;
|
(function () {
'use strict';
angular
.module('app.admin')
.controller('adminController', adminController);
adminController.$inject = ['User', 'TaskService'];
/* @ngInject */
function adminController(User, TaskService) {
var vm = this;
vm.title = 'adminController';
activate();
////////////////
function activate() {
vm.users = User.query();
TaskService.getAllAdmin(function(response){
vm.tasks = response.tasks;
});
}
}
})();
|
import Axios from "axios";
import { server } from "../util/Env.util";
export function registerUser(user) {
return Axios.post(`${server}/auth/local/register`, user);
}
export function loginUser(user) {
return Axios.post(`${server}/auth/local`, user);
}
export function userRoles() {
return Axios.get(`${server}/users-permissions/roles`)
}
export function editUser(id, user) {
return Axios.put(`${server}/users/${id}`, user)
}
|
import { live, checked, removeValues, resetValues, prop, addClass, removeClass } from 'components/utils';
document.addEventListener('turbolinks:load', () => {
live("#member_waiting_basket_size_input input[type='radio']", 'change', event => {
const extraPrice = '#member_waiting_basket_price_extra_input';
const extraPriceRadios = `${extraPrice} input[type='radio']`;
const complements = 'fieldset.members_basket_complements';
const complementsInputs = `${complements} input[type='number']`;
const depots = '#member_waiting_depot_input';
const depotsRadios = `${depots} input[type='radio']`;
const alternativeDepots = '#member_waiting_alternative_depot_ids_input';
const alternativeDepotsCheckBoxes = `${alternativeDepots} input[type='checkbox']`;
const billingYearDivision = '#member_billing_year_division_input';
const billingYearDivisionRadios = `${billingYearDivision} input[type='radio']`;
const billingYearDivision1 = '#member_billing_year_division_1';
const billingYearDivision1Label = 'label[for=member_billing_year_division_1]';
if (event.target.value === '0') {
addClass(extraPrice, 'disabled');
checked(extraPriceRadios, false);
prop(extraPriceRadios, 'disabled', true);
addClass(complements, 'disabled');
removeValues(complementsInputs);
prop(complementsInputs, 'disabled', true);
addClass(depots, 'disabled');
checked(depotsRadios, false);
prop(depotsRadios, 'disabled', true);
addClass(alternativeDepots, 'disabled');
checked(alternativeDepotsCheckBoxes, false);
prop(alternativeDepotsCheckBoxes, 'disabled', true);
addClass(billingYearDivision, 'disabled');
checked(billingYearDivisionRadios, false);
prop(billingYearDivisionRadios, 'disabled', true);
removeClass(billingYearDivision1Label, 'disabled');
checked(billingYearDivision1, true);
prop(billingYearDivision1, 'disabled', false);
} else {
removeClass(extraPrice, 'disabled');
prop(extraPriceRadios, 'disabled', false);
removeClass(complements, 'disabled');
resetValues(complementsInputs, 0);
prop(complementsInputs, 'disabled', false);
removeClass(depots, 'disabled');
prop(depotsRadios, 'disabled', false);
removeClass(alternativeDepots, 'disabled');
prop(alternativeDepotsCheckBoxes, 'disabled', false);
removeClass(billingYearDivision, 'disabled');
prop(billingYearDivisionRadios, 'disabled', false);
}
});
live("#member_waiting_depot_input input[type='radio']", 'change', event => {
const alternativeDepotsLabels = "#member_waiting_alternative_depot_ids_input label";
const alternativeDepotsCheckBoxes = "#member_waiting_alternative_depot_ids_input input[type='checkbox']";
const alternativeDepotsLabel = `label[for='member_waiting_alternative_depot_ids_${event.target.value}']`;
const alternativeDepotsCheckBox = `#member_waiting_alternative_depot_ids_${event.target.value}`;
removeClass(alternativeDepotsLabels, 'disabled');
prop(alternativeDepotsCheckBoxes, 'disabled', false);
addClass(alternativeDepotsLabel, 'disabled');
checked(alternativeDepotsCheckBox, false);
prop(alternativeDepotsCheckBox, 'disabled', true);
});
});
|
//// [unusedLocalsOnFunctionDeclarationWithinFunctionExpression2.ts]
var greeter = function (person: string, person2: string) {
var unused = 20;
function maker(child: string): void {
var unused2 = 22;
}
function maker2(child2: string): void {
var unused3 = 23;
}
maker2(person2);
}
//// [unusedLocalsOnFunctionDeclarationWithinFunctionExpression2.js]
var greeter = function (person, person2) {
var unused = 20;
function maker(child) {
var unused2 = 22;
}
function maker2(child2) {
var unused3 = 23;
}
maker2(person2);
};
|
// DETAILS: This class just routes things to the right component so the component can deal with it
// Constants (self-made)
const Debug = require("./../../Debug"),
MimeTypes = require("./MimeTypes"),
HttpTemplates = require("./Templates"),
GenericResponses = require("./GenericResponses"),
QueryHandler = require("./QueryHandler");
// Other Dependancies
const path = require('path'),
SystemIO = require('fs'),
{ Console } = require("console"),
{ request } = require("http"),
QueryCodes = {
"%0A": "\n",
"%20": " ",
"%21": "!",
"%22": "\"",
"%23": "#",
"%24": "$",
"%2F": "/",
"%27": "'",
"%28": "(",
"%29": ")",
"%3A": ":",
"%3B": ";",
"%3F": "?",
"%40": "@",
"%5e": "^",
"5C": "\\",
"%25": "%"
};
// Variables
var HttpServer;
const SiteRoot = path.join(__dirname, "../../..", "wwwstatic");
function Start(port = 80) {
// Init all the objects
QueryHandler.Init();
HttpTemplates.Init();
// Start the HTTP server
HttpServer = require("http").createServer((_clientRequest, _serverResponse) => {
// Once we get a request what do we do?
// Orginize all the variables and check if they are needed
// Then do an action based on what is defined
var requestData = {
path: (_clientRequest.url.indexOf("?") == -1 ? _clientRequest.url : _clientRequest.url.substring(0, _clientRequest.url.indexOf("?"))),
query: (_clientRequest.url.indexOf("?") == -1 ? undefined : _clientRequest.url.substring(_clientRequest.url.indexOf("?") + 1, _clientRequest.url.length)),
fullPath: undefined,
fileExt: undefined,
Request: _clientRequest,
Socket: _serverResponse
};
requestData.fullPath = path.join(SiteRoot, requestData.path);
requestData.fileExt = (requestData.path.indexOf('.') != -1 ? requestData.path.substring(requestData.path.lastIndexOf('.')) : undefined);
requestData.Send = data => FinishSend(requestData, data);
requestData.SendErrorPage = (code, body) => GenericResponses.SendErrorPage(requestData, code, body);
//#region This is to format the requestData.path
while (requestData.path.indexOf("%20") != -1) requestData.path = requestData.path.replace("%20", " ");
// This is a query thing: while (requestData.path.indexOf("+") != -1) requestData.path = requestData.path.replace("+", " ");
while (requestData.path.indexOf("%21") != -1) requestData.path = requestData.path.replace("%21", "!");
while (requestData.path.indexOf("%22") != -1) requestData.path = requestData.path.replace("%22", "\"");
while (requestData.path.indexOf("%23") != -1) requestData.path = requestData.path.replace("%23", "#");
while (requestData.path.indexOf("%24") != -1) requestData.path = requestData.path.replace("%24", "$");
while (requestData.path.indexOf("%25") != -1) requestData.path = requestData.path.replace("%25", "%");
while (requestData.path.indexOf("%28") != -1) requestData.path = requestData.path.replace("%28", "(");
while (requestData.path.indexOf("%29") != -1) requestData.path = requestData.path.replace("%29", ")");
while (requestData.path.indexOf("%3B") != -1) requestData.path = requestData.path.replace("%3F", ";");
while (requestData.path.indexOf("%3F") != -1) requestData.path = requestData.path.replace("%3F", "?");
while (requestData.path.indexOf("%40") != -1) requestData.path = requestData.path.replace("%40", "@");
while (requestData.path.indexOf("%5e") != -1) requestData.path = requestData.path.replace("%5e", "^");
//#endregion
// Everything below is to format the requestData.query
requestData.parsedQuery = {};
if (requestData.query) {
while (requestData.query.indexOf("+") != -1) requestData.query = requestData.query.replace("+", " ");
while (requestData.query.indexOf("%20") != -1) requestData.query = requestData.query.replace("%20", " ");
while (requestData.query.indexOf("%21") != -1) requestData.query = requestData.query.replace("%21", "!");
while (requestData.query.indexOf("%22") != -1) requestData.query = requestData.query.replace("%22", "\"");
while (requestData.query.indexOf("%23") != -1) requestData.query = requestData.query.replace("%23", "#");
while (requestData.query.indexOf("%24") != -1) requestData.query = requestData.query.replace("%24", "$");
while (requestData.query.indexOf("%28") != -1) requestData.query = requestData.query.replace("%28", "(");
while (requestData.query.indexOf("%29") != -1) requestData.query = requestData.query.replace("%29", ")");
while (requestData.query.indexOf("%3B") != -1) requestData.query = requestData.query.replace("%3B", ";");
while (requestData.query.indexOf("%3F") != -1) requestData.query = requestData.query.replace("%3F", "?");
while (requestData.query.indexOf("%40") != -1) requestData.query = requestData.query.replace("%40", "@");
while (requestData.query.indexOf("%5e") != -1) requestData.query = requestData.query.replace("%5e", "^");
while (requestData.query.indexOf("%27") != -1) requestData.query = requestData.query.replace("%27", "'");
while (requestData.query.indexOf("%0A") != -1) requestData.query = requestData.query.replace("%0A", "\n");
while (requestData.query.indexOf("%3A") != -1) requestData.query = requestData.query.replace("%3A", ":");
while (requestData.query.indexOf("%5C") != -1) requestData.query = requestData.query.replace("%5C", "\\");
while (requestData.query.indexOf("%2F") != -1) requestData.query = requestData.query.replace("%2F", "/")
while (requestData.query.indexOf("%25") != -1) requestData.query = requestData.query.replace("%25", "%");
//for (let i = 0; i < QueryCodes.length; i++) requestData.replace(new RegExp(Query, 'g'))
//Object.keys(QueryCodes).forEach(code => requestData.query.replace(new RegExp(code, 'g'), QueryCodes[code]));
requestData.parsedQuery = ParseQuery(requestData.query);
}
// Precondition 1: Is the currernt working directory valid?
if (requestData.fullPath.indexOf(SiteRoot) != 0) {
// Forward to generic responses with code 403 (Forbidden)
// Reason? Access outside the wwwroot directory is prohibited
GenericResponses.SendErrorPage(Data, 403);
}
// Precondition 2: Is there a query?
else if (requestData.query) {
// Send it to the query handler
QueryHandler.Process(requestData);
}
else SendStaticFile(requestData);
}).listen(port, "0.0.0.0", undefined, undefined, () => { Debug.Log("HTTP Listener Online"); });
}
var HttpCallbacks = [];
async function SendStaticFile(requestData, skipCallback = false) {
// Check callbacks first
if (!skipCallback) {
var i = 0;
while (i < HttpCallbacks.length) {
if (requestData.path.toLowerCase().indexOf(HttpCallbacks[i].Name) == 0) {
HttpCallbacks[i].Callback(requestData);
return;
}
i++;
}
}
// Condition 1: Does the file exist
if (await FileExists(requestData.fullPath)) {
// Get the file and return it to the client
SystemIO.readFile(requestData.fullPath, async function(error, data) {
if (error) {
// Error even though the file exists... return err 500 (Internal Server Error)
GenericResponses.SendErrorPage(requestData, 500);
Debug.LogNetwork("[HTTP] [" + requestData.Socket.connection.remoteAddress + "] [500] : " + requestData.path + (requestData.query ? "?" + requestData.query : ""));
}
else {
// File was found then return it
Debug.LogNetwork("[HTTP] [" + requestData.Socket.connection.remoteAddress + "] [200] : " + requestData.path + (requestData.query ? "?" + requestData.query : ""));
requestData.Socket.writeHead(200, { 'Content-Type': await MimeTypes.GetHeader(requestData.fileExt) });
FinishSend(requestData, data);
}
});
}
// Condition 2: Does the file exist (+ '/index.html')
else if (await FileExists(requestData.fullPath + "/index.html")) {
requestData.fileExt = ".html";
SystemIO.readFile(requestData.fullPath + "/index.html", function(error, data) {
if (error) {
// Error even though the file exists... return err 500 (Internal Server Error)
GenericResponses.SendErrorPage(requestData, 500);
Debug.LogError("[HTTP] [" + requestData.Socket.connection.remoteAddress + "] [500] : " + requestData.path + (requestData.query ? "?" + requestData.query : ""));
}
else {
// File was found then return it
Debug.LogNetwork("[HTTP] [" + requestData.Socket.connection.remoteAddress + "] [200] : " + requestData.path + (requestData.query ? "?" + requestData.query : ""));
requestData.Socket.writeHead(200, { 'Content-Type': MimeTypes.GetHeader(".html") });
FinishSend(requestData, data);
}
});
}
// Condition 3: Does a default image exist (made a .webp because it is predicted to become the standard of the web)
else if (await FileExists(requestData.fullPath.substring(0, requestData.fullPath.lastIndexOf('/')) + "/default.webp")) {
requestData.fileExt = ".webp";
SystemIO.readFile(requestData.fullPath.substring(0, requestData.fullPath.lastIndexOf('/')) + "/default.webp", function(error, data) {
if (error) {
// Error even though the file exists... return err 500 (Internal Server Error)
GenericResponses.SendErrorPage(requestData, 500);
Debug.LogError("[HTTP] [" + requestData.Socket.connection.remoteAddress + "] [500] : " + requestData.path + (requestData.query ? "?" + requestData.query : ""));
}
else {
// File was found then return it
Debug.LogNetwork("[HTTP] [" + requestData.Socket.connection.remoteAddress + "] [200] : " + requestData.path + (requestData.query ? "?" + requestData.query : "") + " {as default.webp}");
requestData.Socket.writeHead(200, { 'Content-Type': MimeTypes.GetHeader(".webp") });
FinishSend(requestData, data);
}
});
}
// Condition 3: Does a default image exist (made a .webp because it is predicted to become the standard of the web)
else if (await FileExists(requestData.fullPath.substring(0, requestData.fullPath.lastIndexOf('\\')) + "\\default.webp")) {
requestData.fileExt = ".webp";
SystemIO.readFile(requestData.fullPath.substring(0, requestData.fullPath.lastIndexOf('\\')) + "\\default.webp", function(error, data) {
if (error) {
// Error even though the file exists... return err 500 (Internal Server Error)
GenericResponses.SendErrorPage(requestData, 500);
Debug.LogError("[HTTP] [" + requestData.Socket.connection.remoteAddress + "] [500] : " + requestData.path + (requestData.query ? "?" + requestData.query : ""));
}
else {
// File was found then return it
Debug.LogNetwork("[HTTP] [" + requestData.Socket.connection.remoteAddress + "] [200] : " + requestData.path + (requestData.query ? "?" + requestData.query : "") + " {as default.webp}");
requestData.Socket.writeHead(200, { 'Content-Type': MimeTypes.GetHeader(".webp") });
FinishSend(requestData, data);
}
});
}
// Still precondition 3
else if (await FileExists(requestData.fullPath + "/default.webp")) {
requestData.fileExt = ".webp";
SystemIO.readFile(requestData.fullPath + "/default.webp", function(error, data) {
if (error) {
// Error even though the file exists... return err 500 (Internal Server Error)
GenericResponses.SendErrorPage(requestData, 500);
Debug.LogError("[HTTP] [" + requestData.Socket.connection.remoteAddress + "] [500] : " + requestData.path + (requestData.query ? "?" + requestData.query : ""));
}
else {
// File was found then return it
Debug.LogNetwork("[HTTP] [" + requestData.Socket.connection.remoteAddress + "] [200] : " + requestData.path + (requestData.query ? "?" + requestData.query : ""));
requestData.Socket.writeHead(200, { 'Content-Type': MimeTypes.GetHeader(".webp") });
FinishSend(requestData, data);
}
});
}
// Files do not exist: Return error 404 (Not Found)
else {
Debug.LogWarn("[HTTP] [" + requestData.Socket.connection.remoteAddress + "] [404] : " + requestData.path + (requestData.query ? "?" + requestData.query : ""));
GenericResponses.SendErrorPage(requestData, 404);
}
}
module.exports.AddHttpCallback = (rule, callback = (requestData) => { SendStaticFile(requestData, true); }) => {
HttpCallbacks.push({
Name: rule.toLowerCase(),
Callback: callback
});
}
async function FinishSend(requestData, data) {
// Format requests and etc
if (requestData.fileExt == undefined) {
requestData.Socket.writeHead(200, { 'Content-Type': 'text/html' });
requestData.fileExt = ".html";
}
if (requestData.fileExt == ".html") data = await HttpTemplates.Process(data);
requestData.Socket.end(data);
}
function FileExists(path = "") {
return new Promise(async resolve => {
await SystemIO.stat(path, (statError, stat) => {
if (statError) resolve(false);
try { resolve(stat.isFile()); }
catch { resolve(false); }
});
});
}
function ParseQuery(query = "") {
var _finalData = { };
var _splitDeclarations = query.split("&");
for (var i = 0; i < _splitDeclarations.length; i++) {
var _field = _splitDeclarations[i].split("=");
if (_field.length == 1) _finalData[_field[0]] = true;
/*else if (!isNaN(parseFloat(_field[1]))) {
_finalData[_field[0]] = parseFloat(_field[1]);
}
else if (!isNaN(parseInt(_field[1]))) {
_finalData[_field[0]] = parseInt(_field[1]);
}
else if (_field[1].toLowerCase() == 'true' || _field[1].toLowerCase() == 'false'){
_finalData[_field[0]] = (_field[1].toLowerCase() == "true");
}*/
else {
_finalData[_field[0]] = _field[1];
}
}
return _finalData;
}
module.exports.Start = Start;
module.exports.FinishSend = FinishSend;
module.exports.SendStaticFile = SendStaticFile;
|
"""Get information about an user on GitHub
Syntax: .github USERNAME"""
from telethon import events
import requests
from userbot.utils import admin_cmd
@borg.on(admin_cmd("github (.*)"))
async def _(event):
if event.fwd_from:
return
input_str = event.pattern_match.group(1)
url = "https://api.github.com/users/{}".format(input_str)
r = requests.get(url)
if r.status_code != 404:
b = r.json()
avatar_url = b["avatar_url"]
html_url = b["html_url"]
gh_type = b["type"]
name = b["name"]
company = b["company"]
blog = b["blog"]
location = b["location"]
bio = b["bio"]
created_at = b["created_at"]
await borg.send_file(
event.chat_id,
caption="""Name: [{}]({})
CH : @IQTHON
Type: {}
Company: {}
Blog: {}
Location: {}
Bio: {}
Profile Created: {}""".format(name, html_url, gh_type, company, blog, location, bio, created_at),
file=avatar_url,
force_document=False,
allow_cache=False,
reply_to=event
)
await event.delete()
else:
await event.edit("`{}`: {}".format(input_str, r.text))
|
const R = require('ramda')
const { log } = console
const { map, compose, equals } = R
const prompt = require('readline-sync')
class IO {
// of :: (IO f) => a -> f a
static of (value) {
return new IO(value)
}
constructor (value) {
if (typeof value !== 'function') {
throw new Error('IO Monad required a function')
}
this.value = value
}
// fmap :: (IO f) => f a ~> (a -> b) -> f b
fmap (fn) {
return new IO(() => fn( this.run() ))
}
// ap :: (IO f) => f a ~> f (a -> b) -> f b
ap (aFn) {
return IO.of(() => aFn.run()(this.run()) )
}
// chain :: (IO f) => f a ~> (a -> f b) -> f b
chain (fnA) {
// return IO.of(() => fn(this.run()).run())
return fnA(this.value())
}
// run :: (IO f) => f a ~> () -> a
run () {
return this.value()
}
}
function getUser() {
return prompt.question('What is your username?')
}
function askForPassword(username) {
return IO.of(() => {
log(`Hey there ${ username }`)
const pass = prompt.question('Password please: ')
return { username, pass }
})
}
function checkPassword({ username, pass }) {
const isCorrect = pass === '12345'
log(
isCorrect ?
`Welcome ${ username }, to the machine` :
`Whoa ${ username }! Come back correct!`
)
return isCorrect
}
IO.of(getUser)
.chain(askForPassword)
.ap(IO.of(() => checkPassword))
.run()
|
var UniJSnodeReq = require;
var requireTags, require = function(req) {
requireTags = req.split('!');
return UniJSnodeReq(requireTags.pop());
};
require.resolve = UniJSnodeReq.resolve;
'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _reactRouter = require('react-router');
var _componentsMainReact = require('./components/Main.react');
var _componentsMainReact2 = _interopRequireDefault(_componentsMainReact);
var _componentsHomeReact = require('./components/Home.react');
var _componentsHomeReact2 = _interopRequireDefault(_componentsHomeReact);
var _componentsBlogPostReact = require('./components/BlogPost.react');
var _componentsBlogPostReact2 = _interopRequireDefault(_componentsBlogPostReact);
var routes = _react2['default'].createElement(
_reactRouter.Route,
{ handler: _componentsMainReact2['default'], name: 'main', path: '/' },
_react2['default'].createElement(_reactRouter.Route, { handler: _componentsBlogPostReact2['default'], name: 'blogpost', path: '/:id' }),
_react2['default'].createElement(_reactRouter.DefaultRoute, { handler: _componentsHomeReact2['default'] })
);
exports['default'] = routes;
module.exports = exports['default'];
|
'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"AM",
"PM"
],
"DAY": [
"Cawe",
"Mvulo",
"Lwesibini",
"Lwesithathu",
"Lwesine",
"Lwesihlanu",
"Mgqibelo"
],
"MONTH": [
"Janyuwari",
"Februwari",
"Matshi",
"Epreli",
"Meyi",
"Juni",
"Julayi",
"Agasti",
"Septemba",
"Okthoba",
"Novemba",
"Disemba"
],
"SHORTDAY": [
"Caw",
"Mvu",
"Bin",
"Tha",
"Sin",
"Hla",
"Mgq"
],
"SHORTMONTH": [
"Jan",
"Feb",
"Mat",
"Epr",
"Mey",
"Jun",
"Jul",
"Aga",
"Sep",
"Okt",
"Nov",
"Dis"
],
"fullDate": "y MMMM d, EEEE",
"longDate": "y MMMM d",
"medium": "y MMM d HH:mm:ss",
"mediumDate": "y MMM d",
"mediumTime": "HH:mm:ss",
"short": "y-MM-dd HH:mm",
"shortDate": "y-MM-dd",
"shortTime": "HH:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "R",
"DECIMAL_SEP": ",",
"GROUP_SEP": "\u00a0",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "\u00a4-",
"negSuf": "",
"posPre": "\u00a4",
"posSuf": ""
}
]
},
"id": "xh",
"pluralCat": function (n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]);
|
/**
* Original Work Copyright 2014 IBM Corp.
*
* Copyright (c) 2016, Klaus Landsdorf (http://bianco-royal.de/)
* All rights reserved.
* node-red-contrib-modbus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
var should = require("should");
var sinon = require("sinon");
var when = require("when");
var request = require('supertest');
var nock;
if (!process.version.match(/^v0\.[0-9]\./)) {
// only set nock for node >= 0.10
try {
nock = require('nock');
} catch (err) {
// nevermind, will skip nock tests
nock = null;
}
}
var RED = require("node-red/red/red.js");
var redNodes = require("node-red/red/runtime/nodes");
var flows = require("node-red/red/runtime/nodes/flows");
var credentials = require("node-red/red/runtime/nodes/credentials");
var comms = require("node-red/red/api/comms.js");
var log = require("node-red/red/runtime/log.js");
var express = require("express");
var http = require('http');
var app = express();
var address = '127.0.0.1';
var listenPort = 0; // use ephemeral port
var port;
var url;
var logSpy;
var server;
function helperNode(n) {
RED.nodes.createNode(this, n);
}
module.exports = {
load: function (testNode, testFlows, testCredentials, cb) {
logSpy = sinon.spy(log, "log");
logSpy.FATAL = log.FATAL;
logSpy.ERROR = log.ERROR;
logSpy.WARN = log.WARN;
logSpy.INFO = log.INFO;
logSpy.DEBUG = log.DEBUG;
logSpy.TRACE = log.TRACE;
logSpy.METRIC = log.METRIC;
if (typeof testCredentials === 'function') {
cb = testCredentials;
testCredentials = {};
}
var storage = {
getFlows: function () {
var defer = when.defer();
defer.resolve(testFlows);
return defer.promise;
},
getCredentials: function () {
var defer = when.defer();
defer.resolve(testCredentials);
return defer.promise;
},
saveCredentials: function () {
// do nothing
}
};
var settings = {
available: function () {
return false;
}
};
var red = {};
for (var i in RED) {
if (RED.hasOwnProperty(i) && !/^(init|start|stop)$/.test(i)) {
var propDescriptor = Object.getOwnPropertyDescriptor(RED, i);
Object.defineProperty(red, i, propDescriptor);
}
}
red["_"] = function (messageId) {
return messageId;
};
redNodes.init({settings: settings, storage: storage});
credentials.init(storage, express());
RED.nodes.registerType("helper", helperNode);
if (Array.isArray(testNode)) {
for (var i = 0; i < testNode.length; i++) {
testNode[i](red);
}
} else {
testNode(red);
}
flows.load().then(function () {
flows.startFlows();
should.deepEqual(testFlows, flows.getFlows());
cb();
});
},
unload: function () {
// TODO: any other state to remove between tests?
redNodes.clearRegistry();
if (logSpy) {
logSpy.restore();
}
return flows.stopFlows();
},
getNode: function (id) {
return flows.get(id);
},
credentials: credentials,
clearFlows: function () {
return flows.stopFlows();
},
request: function () {
return request(RED.httpAdmin);
},
startServer: function (done) {
server = http.createServer(function (req, res) {
app(req, res);
});
RED.init(server, {
SKIP_BUILD_CHECK: true,
logging: {console: {level: 'off'}}
});
server.listen(listenPort, address);
server.on('listening', function () {
port = server.address().port;
url = 'http://' + address + ':' + port;
comms.start();
done();
});
},
//TODO consider saving TCP handshake/server reinit on start/stop/start sequences
stopServer: function (done) {
if (server) {
try {
server.close(done);
} catch (e) {
done();
}
}
},
url: function () {
return url;
},
nock: nock,
log: function () {
return logSpy;
}
};
|
"""Weight of Evidence"""
import numpy as np
import pandas as pd
from sklearn.base import BaseEstimator, TransformerMixin
import category_encoders.utils as util
from sklearn.utils.random import check_random_state
__author__ = 'Jan Motl'
class WOEEncoder(BaseEstimator, TransformerMixin):
"""Weight of Evidence coding for categorical features.
Parameters
----------
verbose: int
integer indicating verbosity of output. 0 for none.
cols: list
a list of columns to encode, if None, all string columns will be encoded.
drop_invariant: bool
boolean for whether or not to drop columns with 0 variance.
return_df: bool
boolean for whether to return a pandas DataFrame from transform (otherwise it will be a numpy array).
impute_missing: bool
boolean for whether or not to apply the logic for handle_unknown, will be deprecated in the future.
handle_unknown: str
options are 'ignore', 'error' and 'impute', defaults to 'impute', which will assume WOE=0.
randomized: bool,
adds normal (Gaussian) distribution noise into training data in order to decrease overfitting (testing data are untouched).
sigma: float
standard deviation (spread or "width") of the normal distribution.
regularization: float
the purpose of regularization is mostly to prevent division by zero.
When regularization is 0, you may encounter division by zero.
Example
-------
>>> from category_encoders import *
>>> import pandas as pd
>>> from sklearn.datasets import load_boston
>>> bunch = load_boston()
>>> y = bunch.target > 22.5
>>> X = pd.DataFrame(bunch.data, columns=bunch.feature_names)
>>> enc = WOEEncoder(cols=['CHAS', 'RAD']).fit(X, y)
>>> numeric_dataset = enc.transform(X)
>>> print(numeric_dataset.info())
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 506 entries, 0 to 505
Data columns (total 13 columns):
CRIM 506 non-null float64
ZN 506 non-null float64
INDUS 506 non-null float64
CHAS 506 non-null float64
NOX 506 non-null float64
RM 506 non-null float64
AGE 506 non-null float64
DIS 506 non-null float64
RAD 506 non-null float64
TAX 506 non-null float64
PTRATIO 506 non-null float64
B 506 non-null float64
LSTAT 506 non-null float64
dtypes: float64(13)
memory usage: 51.5 KB
None
References
----------
.. [1] Weight of Evidence (WOE) and Information Value Explained. from
https://www.listendata.com/2015/03/weight-of-evidence-woe-and-information.html.
"""
def __init__(self, verbose=0, cols=None, drop_invariant=False, return_df=True, impute_missing=True,
handle_unknown='impute', random_state=None, randomized=False, sigma=0.05, regularization=1.0):
self.verbose = verbose
self.return_df = return_df
self.drop_invariant = drop_invariant
self.drop_cols = []
self.cols = cols
self._dim = None
self.mapping = None
self.impute_missing = impute_missing
self.handle_unknown = handle_unknown
self._sum = None
self._count = None
self.random_state = random_state
self.randomized = randomized
self.sigma = sigma
self.regularization = regularization
# noinspection PyUnusedLocal
def fit(self, X, y, **kwargs):
"""Fit encoder according to X and binary y.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vectors, where n_samples is the number of samples
and n_features is the number of features.
y : array-like, shape = [n_samples]
Binary target values.
Returns
-------
self : encoder
Returns self.
"""
# Unite parameters into pandas types
X = util.convert_input(X)
if isinstance(y, pd.DataFrame):
y = y.iloc[:,0]
else:
y = pd.Series(y, name='target')
# The lengths must be equal
if X.shape[0] != y.shape[0]:
raise ValueError("The length of X is " + str(X.shape[0]) + " but length of y is " + str(y.shape[0]) + ".")
# The label must be binary with values {0,1}
unique = y.unique()
if len(unique) != 2:
raise ValueError("The target column y must be binary. But the target contains " + str(len(unique)) + " unique value(s).")
if y.isnull().any():
raise ValueError("The target column y must not contain missing values.")
if np.max(unique) < 1:
raise ValueError("The target column y must be binary with values {0, 1}. Value 1 was not found in the target.")
if np.min(unique) > 0:
raise ValueError("The target column y must be binary with values {0, 1}. Value 0 was not found in the target.")
self._dim = X.shape[1]
# If columns aren't passed, just use every string column
if self.cols is None:
self.cols = util.get_obj_cols(X)
else:
self.cols = util.convert_cols_to_list(self.cols)
# Training
self.mapping = self._train(X, y, cols=self.cols)
# Store column names with approximately constant variance on the training data
if self.drop_invariant:
self.drop_cols = []
X_temp = self.transform(X)
generated_cols = util.get_generated_cols(X, X_temp, self.cols)
self.drop_cols = [x for x in generated_cols if X_temp[x].var() <= 10e-5]
return self
def transform(self, X, y=None):
"""Perform the transformation to new categorical data. When the data are used for model training,
it is important to also pass the target in order to apply leave one out.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
y : array-like, shape = [n_samples] when transform by leave one out
None, when transform without target information (such as transform test set)
Returns
-------
p : array, shape = [n_samples, n_numeric + N]
Transformed values with encoding applied.
"""
if self._dim is None:
raise ValueError('Must train encoder before it can be used to transform data.')
# Unite the input into pandas DataFrame
X = util.convert_input(X)
# Then make sure that it is the right size
if X.shape[1] != self._dim:
raise ValueError('Unexpected input dimension %d, expected %d' % (X.shape[1], self._dim,))
# If we are encoding the training data, we have to check the target
if y is not None:
if isinstance(y, pd.DataFrame):
y = y.iloc[:, 0]
else:
y = pd.Series(y, name='target')
if X.shape[0] != y.shape[0]:
raise ValueError("The length of X is " + str(X.shape[0]) + " but length of y is " + str(y.shape[0]) + ".")
if not self.cols:
return X
# Do not modify the input argument
X = X.copy(deep=True)
# Loop over columns and replace nominal values with WOE
X = self._score(X, y)
# Postprocessing
# Note: We should not even convert these columns.
if self.drop_invariant:
for col in self.drop_cols:
X.drop(col, 1, inplace=True)
if self.return_df:
return X
else:
return X.values
def fit_transform(self, X, y=None, **fit_params):
"""
Encoders that utilize the target must make sure that the training data are transformed with:
transform(X, y)
and not with:
transform(X)
"""
return self.fit(X, y, **fit_params).transform(X, y)
def _train(self, X, y, cols=None):
# Initialize the output
mapping = {}
# Calculate global statistics
self._sum = y.sum()
self._count = y.count()
for col in cols:
# Calculate sum and count of the target for each unique value in the feature col
stats = y.groupby(X[col]).agg(['sum', 'count']) # Count of x_{i,+} and x_i
# Create a new column with regularized WOE.
# Regularization helps to avoid division by zero.
# Pre-calculate WOEs because logarithms are slow.
nominator = (stats['sum'] + self.regularization) / (self._sum + 2*self.regularization)
denominator = ((stats['count'] - stats['sum']) + self.regularization) / (self._count - self._sum + 2*self.regularization)
woe = np.log(nominator / denominator)
# Ignore unique values. This helps to prevent overfitting on id-like columns.
woe[stats['count'] == 1] = 0
# Store WOE for transform() function
mapping[col] = woe
return mapping
def _score(self, X, y):
for col in self.cols:
# Score the column
X[col] = X[col].map(self.mapping[col])
# Replace missing values only in the computed columns
if self.impute_missing:
if self.handle_unknown == 'impute':
X[col].fillna(0, inplace=True)
elif self.handle_unknown == 'error':
if X[col].isnull().any():
raise ValueError('Unexpected categories found in column %s' % col)
# Randomization is meaningful only for training data -> we do it only if y is present
if self.randomized and y is not None:
random_state_generator = check_random_state(self.random_state)
X[col] = (X[col] * random_state_generator.normal(1., self.sigma, X[col].shape[0]))
return X
|
/**
* Copyright IBM Corp. 2016, 2021
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*
* Code generated by @carbon/icon-build-helpers. DO NOT EDIT.
*/
'use strict';
var iconPropTypes = require('./iconPropTypes-b9203099.js');
var React = require('react');
require('@carbon/icon-helpers');
require('prop-types');
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var React__default = /*#__PURE__*/_interopDefaultLegacy(React);
var _path;
var Scalpel = /*#__PURE__*/React__default['default'].forwardRef(function Scalpel(_ref, ref) {
var children = _ref.children,
_ref$size = _ref.size,
size = _ref$size === void 0 ? 16 : _ref$size,
rest = iconPropTypes._objectWithoutProperties(_ref, ["children", "size"]);
return /*#__PURE__*/React__default['default'].createElement(iconPropTypes.Icon, iconPropTypes._objectSpread2({
width: size,
height: size,
ref: ref,
xmlns: "http://www.w3.org/2000/svg",
viewBox: "0 0 32 32",
fill: "currentColor"
}, rest), _path || (_path = /*#__PURE__*/React__default['default'].createElement("path", {
d: "M28.83,5.17a4.1,4.1,0,0,0-5.66,0L.34,28H9.59a5,5,0,0,0,3.53-1.46L28.83,10.83a4,4,0,0,0,0-5.66ZM12.29,18.88l2.09-2.09,2.83,2.83-2.09,2.09Zm-.58,6.24A3,3,0,0,1,9.59,26H5.17l5.71-5.71,2.83,2.83ZM27.41,9.41l-8.79,8.8-2.83-2.83,8.8-8.79a2,2,0,0,1,2.82,0,2,2,0,0,1,0,2.82Z"
})), children);
});
Scalpel.propTypes = iconPropTypes.iconPropTypes;
module.exports = Scalpel;
|
const config = {
projectName: 'taro-demos',
date: '2018-11-20',
designWidth: 750,
deviceRatio: {
'640': 2.34 / 2,
'750': 1,
'828': 1.81 / 2
},
sourceRoot: 'src',
outputRoot: 'dist',
plugins: {
babel: {
sourceMap: true,
presets: [
'env'
],
plugins: [
'transform-class-properties',
'transform-decorators-legacy',
'transform-object-rest-spread'
]
}
},
// 全局变量设置
defineConstants: {
},
copy: {
patterns: [
],
options: {
}
},
// 小程序端专用配置
weapp: {
module: {
postcss: {
autoprefixer: {
enable: true,
config: {
browsers: [
'last 3 versions',
'Android >= 4.1',
'ios >= 8'
]
}
},
pxtransform: {
enable: true,
config: {
}
},
// 小程序端样式引用本地资源内联配置
url: {
enable: true,
config: {
limit: 10240 // 设定转换尺寸上限
}
}
}
}
},
// H5 端专用配置
h5: {
publicPath: '/',
staticDirectory: 'static',
module: {
postcss: {
autoprefixer: {
enable: true
}
}
}
}
}
module.exports = function (merge) {
if (process.env.NODE_ENV === 'development') {
return merge({}, config, require('./dev'))
}
return merge({}, config, require('./prod'))
}
|
'use strict'
const dependencies = require('electron-installer-common/src/dependencies')
const spawn = require('./spawn')
const dependencyMap = {
gconf: 'GConf2',
glib2: 'glib2',
gtk2: 'gtk2',
gtk3: 'gtk3',
gvfs: 'gvfs-client',
kdeCliTools: 'kde-cli-tools',
kdeRuntime: 'kde-runtime',
notify: 'libnotify',
nss: 'nss',
trashCli: 'trash-cli',
uuid: 'libuuid',
xdgUtils: 'xdg-utils',
xss: 'libXScrnSaver',
xtst: 'libXtst'
}
/**
* Retrieves the RPM version number and determines whether it has support for boolean
* dependencies (>= 4.13.0).
*/
function rpmSupportsBooleanDependencies (logger) {
return spawn('rpmbuild', ['--version'], logger)
.then(output => rpmVersionSupportsBooleanDependencies(output.trim().split(' ')[2]))
}
/**
* Determine whether the RPM version string has support for boolean dependencies (>= 4.13.0).
*
* RPM does not follow semantic versioning, so `semver` cannot be used.
*/
function rpmVersionSupportsBooleanDependencies (rpmVersionString) {
const rpmVersion = rpmVersionString.split('.').slice(0, 3).map(n => parseInt(n))
return rpmVersion >= [4, 13, 0]
}
/**
* Transforms the list of trash requires into an RPM boolean dependency list.
*/
function trashRequiresAsBoolean (electronVersion, dependencyMap) {
const trashDepends = dependencies.getTrashDepends(electronVersion, dependencyMap)
if (trashDepends.length === 1) {
return [trashDepends[0]]
} else {
return [`(${trashDepends.join(' or ')})`]
}
}
module.exports = {
dependencyMap,
/**
* The dependencies for Electron itself, given an Electron version.
*/
forElectron: function dependenciesForElectron (electronVersion, logger) {
const requires = dependencies.getDepends(electronVersion, dependencyMap)
return module.exports.rpmSupportsBooleanDependencies(logger)
.then(supportsBooleanDependencies => {
if (supportsBooleanDependencies) {
const trashRequires = trashRequiresAsBoolean(electronVersion, dependencyMap)
return { requires: requires.concat(trashRequires) }
} else {
console.warn("You are using RPM < 4.13, which does not support boolean dependencies. This is required to express the dependencies needed for the 'shell.moveItemToTrash' API.\nIf you do not use this API, you can safely ignore this warning.\nIf you do use this API, please upgrade to RPM 4.13 or above to have the trash dependencies added to your RPM's requires section.")
return { requires }
}
})
},
rpmSupportsBooleanDependencies,
rpmVersionSupportsBooleanDependencies,
trashRequiresAsBoolean
}
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2014-2019, Lars Asplund lars.anders.asplund@gmail.com
"""
UI class Results
"""
class Results(object):
"""
Gives access to results after running tests.
"""
def __init__(self, simulator_if):
self._simulator_if = simulator_if
def merge_coverage(self, file_name, args=None):
"""
Create a merged coverage report from the individual coverage files
:param file_name: The resulting coverage file name.
:param args: The tool arguments for the merge command. Should be a list of strings.
"""
self._simulator_if.merge_coverage(file_name=file_name, args=args)
|
module.exports = {
singleQuote: true,
trailingComma: 'all',
overrides: [
{
files: '*.json',
options: { trailingComma: 'none' },
},
],
};
|
/*
Copyright (c) 2003-2016, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("codesnippet","th",{button:"แทรกชิ้นส่วนของรหัสหรือโค้ด",codeContents:"Code content",emptySnippetError:"A code snippet cannot be empty.",language:"Language",title:"Code snippet",pathName:"code snippet"});
|
/**
* Copyright 2020 Dhiego Cassiano Fogaça Barbosa
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @file Admin plugin (restart command)
*
* @author Dhiego Cassiano Fogaça Barbosa <modscleo4@outlook.com>
*/
'use strict';
import {adminID} from "../../config.js";
import Message from "../../lib/Message.js";
import Command from "../../lib/Command.js";
import {serverConfig} from "../../global.js";
import i18n from "../../lang/lang.js";
export default new Command({
description: {
en_US: 'Restarts the bot.',
pt_BR: 'Reinicia o bot.',
},
only: [adminID],
/**
*
* @this {Command}
* @param {Object} message
* @param {import('../../lib/Client.js').default} message.client
* @param {import('discord.js').Guild} message.guild
* @param {import('discord.js').TextChannel} message.channel
* @param {import('discord.js').User} message.author
* @param {import('discord.js').GuildMember} message.member
* @param {Function} message.sendMessage
* @param {string[]} args
* @return {Promise<{content?: string, embeds?: import('discord.js').MessageEmbed[], lockAuthor?: boolean, reactions?: string[], onReact?: Function, onEndReact?: Function, timer?: number, deleteAfter?: boolean}>}{Promise<string|import('discord.js').MessageEmbed|{embed: import('discord.js').MessageEmbed, reactions: string[]}>}
*/
async fn({client, guild, channel, author, member, sendMessage}, args) {
const sc = serverConfig.get(guild.id);
setTimeout(() => {
process.exit(1);
}, 1000);
return {content: i18n('admin.restart.restarting', sc?.lang)};
},
});
|
/* global define: false */
define(['../TrackingInfo'], function(TrackingInfo) {
'use strict';
/**
* Sets metadata applicable to all {@link TrackingInfo} instances.
* This metadata will be added to any TrackingInfo instance prior
* to being persisted to registered collectors.
* @class Static
*/
return function Static(collect, decorate) {
var levels = ['page', 'app', 'screen'],
metrics = {},
dimensions = {},
context = {},
toString = Object.prototype.toString,
objectKey = '[object Object]',
merge = function merge(target) {
target = target || {};
return Array.prototype.slice.call(arguments, 1).reduce(function outerCopy(result, source) {
return Object.keys(source).reduce(function copy(result, key) {
if (toString.call(source[key]) === objectKey) {
result[key] = merge(result[key], source[key]);
} else {
result[key] = source[key];
}
return result;
}, result);
}, target);
};
decorate(function setMetaData(info) {
info.data.context = merge(info.data.context, context);
info.data.metrics = merge(info.data.metrics, metrics);
info.data.dimensions = merge(info.data.dimensions, dimensions);
});
/**
* Sets the global context on future {@link TrackingInfo}
* instances before they are sent to any registered collectors.
* There are 3 built-in context levels: page > app > screen.
* You can only be in one context at any given level, and
* setting an outer level will clear any inner levels. See the
* examples for details.
* @function Static.setContext
* @param {String} type The type of context to set.
* Possible values include 'page', 'app', or 'screen', or any
* custom value you want, such as 'dialog'.
* @param {String} name The name of the context to set.
* @param {Object} [data] Any optional data to include with
* the TrackingInfo.
* @example
* // set page first
* Tracking.static.setContext('page', '/index', {
* title: 'Home'
* });
*
* // then set app (if applicable)
* Tracking.static.setContext('app', 'myAppName', {
* 'appId': 'myAppId',
* 'appVersion': '1.0'
* });
*
* // then set the screen of the app (if applicable)
* Tracking.static.setContext('screen', 'main');
*
* // subsequent tracking entries will include this data
* Tracking.events.fire('loading');
*
* // if we now re-set the app level, the screen will
* // be un-set for us automatically:
* Tracking.static.setContext('app', 'anotherApp');
* Tracking.events.fire('loading'); // page and app values will
* // be sent with this and future events, but not the
* // previous screen value
*/
this.setContext = function setContext(type, name, data) {
this.unsetContext(type);
context[type] = name;
collect(new TrackingInfo({
type: 'context',
label: name,
data: data || {},
category: type
}));
};
/**
* Retrieves the value set for the given context (or 'not set', if
* no context has yet been set).
* @function Static.getContext
* @param {String} type The name of the context (the context type).
* @example
* Tracking.static.getContext('screen'); // 'main'
* Tracking.static.getContext('app'); // 'myAppName'
*/
this.getContext = function getContext(type) {
return context[type] || 'not set';
};
/**
* Clears any custom metrics associated with the specified
* context type (page, app, screen, or a custom value), then
* removes that context from the cached list. The context
* and associated metrics will no longer be added to future
* TrackingInfo instances.
* @function Static.unsetContext
* @param {String} type The type of context whose metrics
* should be cleared and which should be removed from the
* internal cache and no longer sent with future tracking
* data.
* @param {Boolean} [shouldCollect=false] Set to `true` to notify
* any collectors that the context has been unset. Otherwise,
* no collectors will be notified.
* @example
* // automatically unsetting a context:
* Tracking.static.setContext('screen', 'welcome');
* // setting a new context at the same level will
* // clear that context and any "lower" contexts,
* // excluding any custom contexts, which must be
* // unset manually
* Tracking.static.setContext('screen', 'dashboard');
* @example
* // manually unsetting a custom context:
* Tracking.static.setContext('dialog', 'help');
* Tracking.static.unsetContext('dialog');
*/
this.unsetContext = function unsetContext(type, shouldCollect) {
var allLevels = levels.concat(type),
index = allLevels.indexOf(type);
allLevels.slice(index).forEach(function clearContext(level) {
context[level] = 'not set';
if (levels.indexOf(type) === -1) {
delete context[level];
}
var mets = merge(metrics[level]);
for(var metric in mets) {
if (mets.hasOwnProperty(metric)) {
this.setMetric(level, metric);
}
}
}.bind(this));
if (!!shouldCollect && index !== -1) {
// notify any collectors that the context
// has been unset
collect(new TrackingInfo({
label: '',
type: 'context',
category: type
}));
}
};
/**
* Adds a new custom metric to the internal collection.
* Custom metrics will be added to future {@link TrackingInfo}
* instances automatically before persisting to collectors.
* @function Static.setMetric
* @param {String} type The type of metric to set.
* Possible values include 'page', 'app', or 'screen', as
* well as any custom contexts you may have set.
* @param {String} name The name of the custom metric to set.
* @param {*} value The value to associate with the custom
* metric. If `undefined` or '', the metric will no longer
* appear in future TrackingInfo instances.
* @example
* Tracking.static.setMetric('page', 'loginTime', Date.now());
* Tracking.static.setMetric('app', 'mode', 'admin');
* Tracking.static.setMetric('screen', 'message count', 6);
* Tracking.events.fire('applications loaded');
* @example
* // to unset a previously set metric, either pass an empty
* // string or do not specify the value:
* Tracking.metric.setMetric('app', 'mode');
* Tracking.metric.setMetric('app', 'mode', '');
*/
this.setMetric = function setMetric(type, name, value) {
metrics[type] = metrics[type] || {};
metrics[type][name] = value;
if (typeof value === 'undefined' || value === '') {
value = '';
delete metrics[type][name];
}
if (levels.indexOf(type) === -1 &&
!Object.keys(metrics[type]).length) {
delete metrics[type];
}
collect(new TrackingInfo({
label: name,
type: 'metric',
category: type,
variable: value
}));
};
/**
* Retrieves the value set for the custom metric under the specified
* context (or `undefined`, if no metric was set for the specified
* context).
* @function Static.getMetric
* @param {String} type The name of the context (the context type).
* @param {String} name The name of the metric.
* @example
* Tracking.static.getMetric('app', 'mode'); // 'admin'
* Tracking.static.getMetric('page', 'loginTime'); // [Date]
*/
this.getMetric = function getMetric(type, name) {
return metrics[type] && metrics[type][name];
};
/**
* Adds a new custom dimension to the internal collection. A
* dimension represents some way you wish to segment your
* collected tracking data. Common examples are by product
* availability, geographic region, AB test group, etc.
* @function Static.setDimension
* @param {String} name The name of the custom dimension.
* @param {String|undefined} value The value to associate
* with the new custom dimension. If `undefined` or '', the
* dimension will no longer appear in future TrackingInfo
* instances.
* @example
* Tracking.static.setDimension('region', 'northeast');
* Tracking.static.setDimension('support-level', 'gold');
* Tracking.events.fire('user data loaded');
*/
this.setDimension = function setDimension(name, value) {
dimensions[name] = value;
if (value === undefined || value === '') {
delete dimensions[name];
}
collect(new TrackingInfo({
label: name,
type: 'dimension',
/* jshint -W041 */
variable: value === undefined || value === null ?
'' : value.toString()
}));
};
/**
* Retrieves the value set for the custom dimension (or `undefined`,
* if no value was set).
* @function Static.getDimension
* @param {String} name The name of the custom dimension.
* @example
* Tracking.static.getDimension('region'); // 'northeast'
* Tracking.static.getDimension('support-level'); // 'gold'
*/
this.getDimension = function getDimension(name) {
return dimensions[name];
};
/**
* @private
*/
this.reset = function reset() {
metrics = {};
dimensions = {};
context = {};
levels.forEach(function addContext(level) {
metrics[level] = {};
context[level] = 'not set';
});
};
};
});
|
def pad(s,l,c=" "):
return (s+(c*l))[:l]
class LineBuffer:
def __init__(self,s,l=None):
if l is None:
l = len(s)
self.s = s
self.l = l
self.draw()
def set(self,s):
self.s = s
if len(self.s)>self.l:
self.l = len(self.s)
elif len(self.s)<self.l:
self.s = pad(self.s,self.l)
def get(self):
return self.s
def draw(self):
print("\r{}".format(self.s),end="")
|
import React from 'react'
import Link from 'gatsby-link'
import styles from './Projects.module.css'
import Container from './'
import { evoHaxMockup, sweshareMockup, theListeningRoomMockup, websiteMockup } from '../images/projects'
import { chevronBlack } from '../images'
import {
reactImg,
bootstrapImg,
firebaseImg,
googlemapsImg,
herokuImg,
nodeImg,
postgresqlImg,
} from '../images/tech'
let marginY = 0
let destination = 0
let speed = 20
let scroller = null
const toTop = elementId => {
destination = document.getElementById(elementId).offsetTop
scroller = setTimeout(() => {
initScroll(elementId)
}, 1)
marginY -= speed
if (marginY <= 0) {
clearTimeout(scroller)
}
window.scroll(0, marginY)
}
const Project = props => {
const techImgs = props.techImgs
return (
<div className={styles.project}>
<div className={styles.description}>
<div className={styles.projectname}>
<h2>{props.projectname}</h2>
</div>
<div className={styles.projectdesc}>
<h3>{props.projectdesc}</h3>
</div>
<div className={styles.contentInner}>
<ul>
{props.descriptions &&
props.descriptions.map(description => <li>{description}</li>)}
</ul>
</div>
</div>
<div className={styles.mockupAndLinks}>
<div className={styles.mockupContainer}>
<img src={props.mockup} className={styles.mockup} alt="" />
{/* <div className={styles.mockupOverlay}>
<div className={styles.techStack}>
{techImgs
? techImgs.map(image => <img src={image} alt="" />)
: 'Tech stack loading...'}
</div>
</div> */}
</div>
<div className={styles.linksContainer}>
<a href={props.githubUrl} target="_blank">
github
</a>
</div>
</div>
</div>
)
}
export default () => (
<div id="projects" className={styles.projectsSection}>
<div className={styles.projectsBar}>
<h1>#include <projects.h></h1>
<a onClick={() => toTop('top')} href="#">
<img src={chevronBlack} />
</a>
</div>
<div className={styles.projectsContainer}>
<Project
projectname="evoHax Hackathon: 1st Place Winner"
projectdesc="Augmented Speech-reader and Bluetooth module."
mockup={evoHaxMockup}
descriptions={["A portable voice recognition device augmented with bluetooth that converts speech to text to desktop and web commands.",
"Written in C++ and Python to interface with the free, open source, text to speech reader Non-visual Desktop Access (NVDA) to enable two way, hands free, audio communication between the user and various functions on their computer from opening an application to selectively reading a web page.",
"The automation of searching commands like control + F to find relevant content eliminates any need for manual keyboard and mouse input. "
]}
githubUrl="https://github.com/mai-codes/evoHax-nvda"
// techImgs={[reactImg, firebaseImg, nodeImg, googlemapsImg]}
/>
<hr className={styles.projectHR} />
<Project
projectname="FemmeHacks Hackathon: 2nd Place Winner"
projectdesc="SWEShare Goal Share Web Application."
mockup={sweshareMockup}
descriptions={["An application aimed to help Drexel’s Society of Women Engineers organize its schedule and tasks.",
"Written in Python using Django framework. Includes scripts written in Javascript and HTML/CSS.",
"The application enabled SWE members to post their goals and get matched with other members or mentors with similar goals in order to help them reach that goal through collaboration."
]}
githubUrl="https://github.com/mai-codes/FemmeHacks-SWEApp2015"
// techImgs={[reactImg, nodeImg, postgresqlImg, bootstrapImg, herokuImg]}
/>
<hr className={styles.projectHR} />
<Project
projectname="HP21 Calculator Clone"
projectdesc="*in progress*"
mockup={theListeningRoomMockup}
descriptions={["Developed a clone of the HP21 calculator.",
"Written in C# and XAML."
]}
siteUrl="https://symbalplayer.firebaseapp.com/"
githubUrl="https://github.com/TheListeningRoom/the-listening-room"
// techImgs={[reactImg, firebaseImg]}
/>
<hr className={styles.projectHR} />
<Project
projectname="Personal Web Portfolio"
projectdesc="*in progress*"
mockup={websiteMockup}
descriptions={["I developed this personal web portfolio to display my skills and resume.",
"This is a ReactJS application written in Javascript using GatsbyJS web framework. Incorporates HTML & CSS."
]}
siteUrl="https://grace-card.firebaseapp.com/"
githubUrl="https://github.com/nrl240/grace-card"
// techImgs={[reactImg, firebaseImg]}
/>
</div>
</div>
)
|
from .body_types import Transaction, Block
|
var request = require('request'),
log = require('bole')('npme-verify-trial');
module.exports = function(verificationKey, callback) {
var trialEndpoint = process.env.LICENSE_API + '/trial';
// check if a trial with this verification key exists already
request.get({
url: trialEndpoint + '/' + verificationKey,
json: true
}, function(er, resp, trial) {
if (resp.statusCode === 404) {
log.error('unable to find verification key ' + verificationKey);
// can't find a trial for that key
return callback(new Error('verification key not found'));
}
if (resp.statusCode !== 200) {
log.error('unexpected status code from hubspot; status=' + resp.statusCode + '; verificationKey=' + verificationKey);
return callback(new Error('problem verifying trial for ' + verificationKey));
}
// they already have a trial, but has it been verified?
if (trial.verified) {
return callback(null, trial);
}
request.put({
url: trialEndpoint + '/' + trial.id + '/verification',
json: true
}, function(er, resp, verifiedTrial) {
if (resp.statusCode === 200) {
return callback(null, verifiedTrial);
}
log.error('unexpected status code from hubspot; status=' + resp.statusCode + '; trial=', trial);
return callback(new Error('problem starting trial for ' + trial.id));
});
});
};
|
import React from "react"
import styled from 'styled-components'
const VideoWrapper = styled.div `
background-color: #000;
color: #fff;
font-family: 'Roboto', sans-serif;
font-weight: 700;
max-height: 70rem;
position: relative;
overflow: hidden;
text-align: center;
text-transform: uppercase;
&:before {
content: '';
display: block;
padding-top: 56%;
}
video {
sdisplay: block;
position: absolute;
z-index: 0;
top: 50%;
left: 50%;
transform: translateX(-50%) translateY(-50%);
min-width: 100%;
min-height: 100%;
width: auto !important;
height: auto !important;
}
`
const Content = styled.div `
left: 0;
position: absolute;
top: 33%;
width: 100%;
`
const PreText = styled.div `
text-align: center;
font-size: 50px;
line-height: 1em;
margin-bottom: 2em;
`
const BannerTitle = styled.div `
span {
background-color: #78c777;
display: inline-block;
padding: 0.25em;
line-height: 1em;
}
span:first-child {
font-size: 50px;
margin-right: -10%;
position: relative;
top: -1em;
vertical-align: top;
}
span:last-child {
font-size: 75px;
}
`
const BannerVideo = (props) => {
const titleWords = props.title.split(` `);
const lastWord = titleWords.pop();
const otherWords = titleWords.join(` `);
return (
<VideoWrapper>
<video muted loop autoPlay>
<source src={props.src} type="video/mp4" />
</video>
<Content>
<PreText>{ props.pretext }</PreText>
<BannerTitle><span>{ otherWords }</span>
<span>{ lastWord }</span>
</BannerTitle>
</Content>
</VideoWrapper>
)
}
export default BannerVideo
|
module.exports = {
total: 552,
directives: 15,
tags: 133,
attributes: 79,
css: 75,
scripts: 38,
comments: 60,
text: 49,
spaces: 103,
rawTotal: 552
};
|
#!/usr/bin/env python
import glob
import os
import re
import shutil
import subprocess
import sys
import stat
from lib.config import LIBCHROMIUMCONTENT_COMMIT, BASE_URL, PLATFORM, \
get_target_arch, get_chromedriver_version, \
get_platform_key
from lib.util import scoped_cwd, rm_rf, get_electron_version, make_zip, \
execute, electron_gyp
ELECTRON_VERSION = get_electron_version()
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
CHROMIUM_DIR = os.path.join(SOURCE_ROOT, 'vendor', 'brightray', 'vendor',
'download', 'libchromiumcontent', 'static_library')
PROJECT_NAME = electron_gyp()['project_name%']
PRODUCT_NAME = electron_gyp()['product_name%']
TARGET_BINARIES = {
'darwin': [
],
'win32': [
'{0}.exe'.format(PROJECT_NAME), # 'electron.exe'
'content_shell.pak',
'd3dcompiler_47.dll',
'icudtl.dat',
'libEGL.dll',
'libGLESv2.dll',
'ffmpeg.dll',
'node.dll',
'content_resources_200_percent.pak',
'ui_resources_200_percent.pak',
'xinput1_3.dll',
'natives_blob.bin',
'snapshot_blob.bin',
],
'linux': [
PROJECT_NAME, # 'electron'
'content_shell.pak',
'icudtl.dat',
'libffmpeg.so',
'libnode.so',
'natives_blob.bin',
'snapshot_blob.bin',
],
}
TARGET_DIRECTORIES = {
'darwin': [
'{0}.app'.format(PRODUCT_NAME),
],
'win32': [
'resources',
'locales',
],
'linux': [
'resources',
'locales',
],
}
def main():
rm_rf(DIST_DIR)
os.makedirs(DIST_DIR)
force_build()
create_symbols()
copy_binaries()
copy_chrome_binary('chromedriver')
copy_chrome_binary('mksnapshot')
copy_license()
if PLATFORM == 'linux':
strip_binaries()
create_version()
create_dist_zip()
create_chrome_binary_zip('chromedriver', get_chromedriver_version())
create_chrome_binary_zip('mksnapshot', ELECTRON_VERSION)
create_ffmpeg_zip()
create_symbols_zip()
def force_build():
build = os.path.join(SOURCE_ROOT, 'script', 'build.py')
execute([sys.executable, build, '-c', 'Release'])
def copy_binaries():
for binary in TARGET_BINARIES[PLATFORM]:
shutil.copy2(os.path.join(OUT_DIR, binary), DIST_DIR)
for directory in TARGET_DIRECTORIES[PLATFORM]:
shutil.copytree(os.path.join(OUT_DIR, directory),
os.path.join(DIST_DIR, directory),
symlinks=True)
def copy_chrome_binary(binary):
if PLATFORM == 'win32':
binary += '.exe'
src = os.path.join(CHROMIUM_DIR, binary)
dest = os.path.join(DIST_DIR, binary)
# Copy file and keep the executable bit.
shutil.copyfile(src, dest)
os.chmod(dest, os.stat(dest).st_mode | stat.S_IEXEC)
def copy_license():
shutil.copy2(os.path.join(CHROMIUM_DIR, '..', 'LICENSES.chromium.html'),
DIST_DIR)
shutil.copy2(os.path.join(SOURCE_ROOT, 'LICENSE'), DIST_DIR)
def strip_binaries():
for binary in TARGET_BINARIES[PLATFORM]:
if binary.endswith('.so') or '.' not in binary:
strip_binary(os.path.join(DIST_DIR, binary))
def strip_binary(binary_path):
if get_target_arch() == 'arm':
strip = 'arm-linux-gnueabihf-strip'
else:
strip = 'strip'
execute([strip, binary_path])
def create_version():
version_path = os.path.join(SOURCE_ROOT, 'dist', 'version')
with open(version_path, 'w') as version_file:
version_file.write(ELECTRON_VERSION)
def create_symbols():
destination = os.path.join(DIST_DIR, '{0}.breakpad.syms'.format(PROJECT_NAME))
dump_symbols = os.path.join(SOURCE_ROOT, 'script', 'dump-symbols.py')
execute([sys.executable, dump_symbols, destination])
if PLATFORM == 'darwin':
dsyms = glob.glob(os.path.join(OUT_DIR, '*.dSYM'))
for dsym in dsyms:
shutil.copytree(dsym, os.path.join(DIST_DIR, os.path.basename(dsym)))
elif PLATFORM == 'win32':
pdbs = glob.glob(os.path.join(OUT_DIR, '*.pdb'))
for pdb in pdbs:
shutil.copy2(pdb, DIST_DIR)
def create_dist_zip():
dist_name = '{0}-{1}-{2}-{3}.zip'.format(PROJECT_NAME, ELECTRON_VERSION,
get_platform_key(),
get_target_arch())
zip_file = os.path.join(SOURCE_ROOT, 'dist', dist_name)
with scoped_cwd(DIST_DIR):
files = TARGET_BINARIES[PLATFORM] + ['LICENSE', 'LICENSES.chromium.html',
'version']
dirs = TARGET_DIRECTORIES[PLATFORM]
make_zip(zip_file, files, dirs)
def create_chrome_binary_zip(binary, version):
dist_name = '{0}-{1}-{2}-{3}.zip'.format(binary, version, get_platform_key(),
get_target_arch())
zip_file = os.path.join(SOURCE_ROOT, 'dist', dist_name)
with scoped_cwd(DIST_DIR):
files = ['LICENSE', 'LICENSES.chromium.html']
if PLATFORM == 'win32':
files += [binary + '.exe']
else:
files += [binary]
make_zip(zip_file, files, [])
def create_ffmpeg_zip():
dist_name = 'ffmpeg-{0}-{1}-{2}.zip'.format(
ELECTRON_VERSION, get_platform_key(), get_target_arch())
zip_file = os.path.join(SOURCE_ROOT, 'dist', dist_name)
if PLATFORM == 'darwin':
ffmpeg_name = 'libffmpeg.dylib'
elif PLATFORM == 'linux':
ffmpeg_name = 'libffmpeg.so'
elif PLATFORM == 'win32':
ffmpeg_name = 'ffmpeg.dll'
shutil.copy2(os.path.join(CHROMIUM_DIR, '..', 'ffmpeg', ffmpeg_name),
DIST_DIR)
if PLATFORM == 'linux':
strip_binary(os.path.join(DIST_DIR, ffmpeg_name))
with scoped_cwd(DIST_DIR):
make_zip(zip_file, [ffmpeg_name, 'LICENSE', 'LICENSES.chromium.html'], [])
def create_symbols_zip():
dist_name = '{0}-{1}-{2}-{3}-symbols.zip'.format(PROJECT_NAME,
ELECTRON_VERSION,
get_platform_key(),
get_target_arch())
zip_file = os.path.join(DIST_DIR, dist_name)
licenses = ['LICENSE', 'LICENSES.chromium.html', 'version']
with scoped_cwd(DIST_DIR):
dirs = ['{0}.breakpad.syms'.format(PROJECT_NAME)]
make_zip(zip_file, licenses, dirs)
if PLATFORM == 'darwin':
dsym_name = '{0}-{1}-{2}-{3}-dsym.zip'.format(PROJECT_NAME,
ELECTRON_VERSION,
get_platform_key(),
get_target_arch())
with scoped_cwd(DIST_DIR):
dsyms = glob.glob('*.dSYM')
make_zip(os.path.join(DIST_DIR, dsym_name), licenses, dsyms)
elif PLATFORM == 'win32':
pdb_name = '{0}-{1}-{2}-{3}-pdb.zip'.format(PROJECT_NAME,
ELECTRON_VERSION,
get_platform_key(),
get_target_arch())
with scoped_cwd(DIST_DIR):
pdbs = glob.glob('*.pdb')
make_zip(os.path.join(DIST_DIR, pdb_name), pdbs + licenses, [])
if __name__ == '__main__':
sys.exit(main())
|
export default {
// API *** you have to run Co2VisualBackEnd to access this api ***
BASE_URL : "api/v1"
}
|
import React, {Component} from 'react';
import GreetingView from '../views/GreetingView';
import PropTypes from 'prop-types';
class Greeting extends Component {
constructor(props) {
super(props);
this.state = {status: {}};
}
showMainScreen() {
this.props.identityService.emitter.emit('setView', 'MainScreen');
}
async componentDidMount() {
const keyHolderAddress = this.props.identityService.identity.address;
const {greetMode} = this.props.viewParameters;
const status = await this.props.greetingService.getStatus(keyHolderAddress, greetMode);
this.setState({status});
}
render() {
const {identity} = this.props.identityService;
return (
<GreetingView
identity={identity}
onStartClick={this.showMainScreen.bind(this)}
status={this.state.status}
/>
);
}
}
Greeting.propTypes = {
identityService: PropTypes.object,
greetingService: PropTypes.object,
greetMode: PropTypes.number,
viewParameters: PropTypes.object
};
export default Greeting;
|
class SteamInventory: pass
|
/*jshint node:true, mocha:true*/
/**
* @author kecso / https://github.com/kecso
*/
var testFixture = require('../../_globals.js');
describe('core.intrapersist', function () {
'use strict';
var gmeConfig = testFixture.getGmeConfig(),
logger = testFixture.logger.fork('core.intrapersist'),
Q = testFixture.Q,
expect = testFixture.expect,
storage,
CANON = testFixture.requirejs('../src/common/util/canon');
function loadNodes(paths, next) {
var needed = paths.length,
nodes = {},
error = null,
i,
loadNode = function (path) {
core.loadByPath(root, path, function (err, node) {
error = error || err;
nodes[path] = node;
if (--needed === 0) {
next(error, nodes);
}
});
};
for (i = 0; i < paths.length; i++) {
loadNode(paths[i]);
}
}
//global variables of the test
var commit = '',
baseCommit = '',
root = null,
rootHash = '',
core = null,
projectName = 'coreIntrapersistTest',
projectId = testFixture.projectName2Id(projectName),
project = null,
gmeAuth;
before(function (done) {
testFixture.clearDBAndGetGMEAuth(gmeConfig, projectName)
.then(function (gmeAuth_) {
gmeAuth = gmeAuth_;
storage = testFixture.getMemoryStorage(logger, gmeConfig, gmeAuth);
return storage.openDatabase();
})
.then(function () {
return storage.deleteProject({projectId: projectId});
})
.then(function () {
return testFixture.importProject(storage, {
projectSeed: 'test/common/core/core/intraPersist.webgmex',
projectName: projectName,
gmeConfig: gmeConfig,
logger: logger
});
})
.then(function (result) {
project = result.project;
core = result.core;
root = result.rootNode;
commit = result.commitHash;
baseCommit = result.commitHash;
rootHash = result.rootHash;
})
.nodeify(done);
});
after(function (done) {
Q.allDone([
storage.closeDatabase(),
gmeAuth.unload()
])
.nodeify(done);
});
describe('SimpleChanges', function () {
var e1NodePath = '/1736622193/1271963336',
e1NodePrimePath = '/1710723537/1271963336',
s1NodePath = '/1736622193/274170516',
s1NodePrimePath = '/1710723537/274170516',
nodes = null;
before(function (done) {
core.loadRoot(rootHash, function (err, r) {
if (err) {
return done(err);
}
root = r;
loadNodes([e1NodePath, e1NodePrimePath, s1NodePath, s1NodePrimePath], function (err, n) {
if (err) {
return done(err);
}
nodes = n;
done();
});
});
});
it('modified attributes should be visible in already loaded nodes', function () {
if (core.getAttribute(nodes[e1NodePath], 'name') !== 'e1' ||
core.getAttribute(nodes[e1NodePrimePath], 'name') !== 'e1') {
throw new Error('initial attribute values are wrong');
}
core.setAttribute(nodes[e1NodePath], 'name', 'e1modified');
if (core.getAttribute(nodes[e1NodePath], 'name') !== 'e1modified' ||
core.getAttribute(nodes[e1NodePrimePath], 'name') !== 'e1modified') {
throw new Error('modified attribute values are wrong');
}
core.setAttribute(nodes[e1NodePrimePath], 'name', 'e1prime');
if (core.getAttribute(nodes[e1NodePath], 'name') !== 'e1modified' ||
core.getAttribute(nodes[e1NodePrimePath], 'name') !== 'e1prime') {
throw new Error('differentiated attribute values are wrong');
}
core.delAttribute(nodes[e1NodePrimePath], 'name');
if (core.getAttribute(nodes[e1NodePath], 'name') !== 'e1modified' ||
core.getAttribute(nodes[e1NodePrimePath], 'name') !== 'e1modified') {
throw new Error('removed prime attribute values are wrong');
}
core.delAttribute(nodes[e1NodePath], 'name');
if (core.getAttribute(nodes[e1NodePath], 'name') !== 'node' ||
core.getAttribute(nodes[e1NodePrimePath], 'name') !== 'node') {
throw new Error('removed attribute values are wrong');
}
core.setAttribute(nodes[e1NodePath], 'name', 'e1');
if (core.getAttribute(nodes[e1NodePath], 'name') !== 'e1' ||
core.getAttribute(nodes[e1NodePrimePath], 'name') !== 'e1') {
throw new Error('final attribute values are wrong');
}
});
it('modified registry should be visible in already loaded nodes', function () {
var pos, posPrime;
pos = core.getRegistry(nodes[e1NodePath], 'position');
posPrime = core.getRegistry(nodes[e1NodePrimePath], 'position');
if (pos.x !== 194 || pos.y !== 228 || posPrime.x !== 194 || posPrime.y !== 228) {
throw new Error('initial registry values are wrong');
}
core.setRegistry(nodes[e1NodePath], 'position', {x: 100, y: 200});
pos = core.getRegistry(nodes[e1NodePath], 'position');
posPrime = core.getRegistry(nodes[e1NodePrimePath], 'position');
if (pos.x !== 100 || pos.y !== 200 || posPrime.x !== 100 || posPrime.y !== 200) {
throw new Error('modified registry values are wrong');
}
core.setRegistry(nodes[e1NodePrimePath], 'position', {x: 200, y: 300});
pos = core.getRegistry(nodes[e1NodePath], 'position');
posPrime = core.getRegistry(nodes[e1NodePrimePath], 'position');
if (pos.x !== 100 || pos.y !== 200 || posPrime.x !== 200 || posPrime.y !== 300) {
throw new Error('separated registry values are wrong');
}
core.delRegistry(nodes[e1NodePrimePath], 'position');
pos = core.getRegistry(nodes[e1NodePath], 'position');
posPrime = core.getRegistry(nodes[e1NodePrimePath], 'position');
if (pos.x !== 100 || pos.y !== 200 || posPrime.x !== 100 || posPrime.y !== 200) {
throw new Error('removed prime registry values are wrong');
}
core.delRegistry(nodes[e1NodePath], 'position');
pos = core.getRegistry(nodes[e1NodePath], 'position');
posPrime = core.getRegistry(nodes[e1NodePrimePath], 'position');
if (pos.x !== 178 || pos.y !== 141 || posPrime.x !== 178 || posPrime.y !== 141) {
throw new Error('removed registry values are wrong');
}
core.setRegistry(nodes[e1NodePath], 'position', {x: 194, y: 228});
pos = core.getRegistry(nodes[e1NodePath], 'position');
posPrime = core.getRegistry(nodes[e1NodePrimePath], 'position');
if (pos.x !== 194 || pos.y !== 228 || posPrime.x !== 194 || posPrime.y !== 228) {
throw new Error('final registry values are wrong');
}
});
it('modified pointer targets should be visible in already loaded nodes', function () {
if (core.getPointerPath(nodes[s1NodePath], 'myNode') !== core.getPath(nodes[e1NodePath]) ||
core.getPointerPath(nodes[s1NodePrimePath], 'myNode') !== core.getPath(nodes[e1NodePrimePath])) {
throw new Error('initial target values are wrong');
}
core.setPointer(nodes[s1NodePath], 'myNode', nodes[e1NodePrimePath]);
if (core.getPointerPath(nodes[s1NodePath], 'myNode') !== core.getPath(nodes[e1NodePrimePath]) ||
core.getPointerPath(nodes[s1NodePrimePath], 'myNode') !== core.getPath(nodes[e1NodePrimePath])) {
throw new Error('modified target values are wrong');
}
core.setPointer(nodes[s1NodePrimePath], 'myNode', nodes[e1NodePath]);
if (core.getPointerPath(nodes[s1NodePath], 'myNode') !== core.getPath(nodes[e1NodePrimePath]) ||
core.getPointerPath(nodes[s1NodePrimePath], 'myNode') !== core.getPath(nodes[e1NodePath])) {
throw new Error('differentiated target values are wrong');
}
core.deletePointer(nodes[s1NodePath], 'myNode');
if (core.getPointerPath(nodes[s1NodePath], 'myNode') !== null ||
core.getPointerPath(nodes[s1NodePrimePath], 'myNode') !== core.getPath(nodes[e1NodePath])) {
throw new Error('removed target values are wrong');
}
core.deletePointer(nodes[s1NodePrimePath], 'myNode');
if (core.getPointerPath(nodes[s1NodePath], 'myNode') !== null ||
core.getPointerPath(nodes[s1NodePrimePath], 'myNode') !== null) {
throw new Error('removed prime target values are wrong');
}
core.setPointer(nodes[s1NodePath], 'myNode', nodes[e1NodePath]);
if (core.getPointerPath(nodes[s1NodePath], 'myNode') !== core.getPath(nodes[e1NodePath]) ||
core.getPointerPath(nodes[s1NodePrimePath], 'myNode') !== core.getPath(nodes[e1NodePrimePath])) {
throw new Error('final target values are wrong');
}
});
it('checks the set harmonization for member registry', function () {
expect(core.getMemberPaths(nodes[e1NodePath], 'mySpecials'))
.to.have.members(['/1736622193/1579656591', '/1736622193/274170516']);
expect(core.getMemberPaths(nodes[e1NodePrimePath], 'mySpecials'))
.to.have.members(['/1710723537/1579656591', '/1710723537/274170516']);
core.delMember(nodes[e1NodePath], 'mySpecials', s1NodePath);
expect(core.getMemberPaths(nodes[e1NodePath], 'mySpecials'))
.to.have.members(['/1736622193/1579656591']);
expect(core.getMemberPaths(nodes[e1NodePrimePath], 'mySpecials'))
.to.have.members(['/1710723537/1579656591','/1710723537/274170516']);
core.addMember(nodes[e1NodePrimePath], 'mySpecials', nodes[s1NodePrimePath]);
core.setMemberRegistry(nodes[e1NodePrimePath], 'mySpecials', s1NodePrimePath, 'position', {x: 100, y: 200});
expect(core.getMemberPaths(nodes[e1NodePath], 'mySpecials'))
.to.have.members(['/1736622193/1579656591']);
expect(core.getMemberPaths(nodes[e1NodePrimePath], 'mySpecials'))
.to.have.members(['/1710723537/1579656591', '/1710723537/274170516']);
expect(core.getMemberRegistry(nodes[e1NodePrimePath], 'mySpecials', s1NodePrimePath, 'position'))
.to.deep.equal({x: 100, y: 200});
core.addMember(nodes[e1NodePath], 'mySpecials', nodes[s1NodePath]);
core.setMemberRegistry(nodes[e1NodePath], 'mySpecials', s1NodePath, 'position', {x: 200, y: 300});
expect(core.getMemberPaths(nodes[e1NodePath], 'mySpecials'))
.to.have.members(['/1736622193/1579656591', '/1736622193/274170516']);
expect(core.getMemberPaths(nodes[e1NodePrimePath], 'mySpecials'))
.to.have.members(['/1710723537/1579656591', '/1710723537/274170516']);
expect(core.getMemberRegistry(nodes[e1NodePrimePath], 'mySpecials', s1NodePrimePath, 'position'))
.to.deep.equal({x: 100, y: 200});
expect(core.getMemberRegistry(nodes[e1NodePath], 'mySpecials', s1NodePath, 'position'))
.to.deep.equal({x: 200, y: 300});
});
it('modified set elements should be visible in already loaded nodes', function () {
var elements, elementsPrime;
elements = core.getMemberPaths(nodes[e1NodePath], 'mySpecials');
elements.sort();
elementsPrime = core.getMemberPaths(nodes[e1NodePrimePath], 'mySpecials');
elementsPrime.sort();
if (CANON.stringify(elements) !== CANON.stringify(['/1736622193/1579656591', '/1736622193/274170516']) ||
CANON.stringify(elementsPrime) !== CANON.stringify(['/1710723537/1579656591',
'/1710723537/274170516'])) {
throw new Error('initial set members are wrong');
}
core.delMember(nodes[e1NodePrimePath], 'mySpecials', s1NodePrimePath);
elements = core.getMemberPaths(nodes[e1NodePath], 'mySpecials');
elements.sort();
elementsPrime = core.getMemberPaths(nodes[e1NodePrimePath], 'mySpecials');
elementsPrime.sort();
if (CANON.stringify(elements) !== CANON.stringify(['/1736622193/1579656591', '/1736622193/274170516']) ||
CANON.stringify(elementsPrime) !== CANON.stringify(['/1710723537/1579656591',
'/1710723537/274170516'])) {
throw new Error('prime removed set members are wrong');
}
core.delMember(nodes[e1NodePath], 'mySpecials', s1NodePath);
elements = core.getMemberPaths(nodes[e1NodePath], 'mySpecials');
elements.sort();
elementsPrime = core.getMemberPaths(nodes[e1NodePrimePath], 'mySpecials');
elementsPrime.sort();
if (CANON.stringify(elements) !== CANON.stringify(['/1736622193/1579656591']) ||
CANON.stringify(elementsPrime) !== CANON.stringify(['/1710723537/1579656591'])) {
throw new Error('removed set members are wrong');
}
core.addMember(nodes[e1NodePrimePath], 'mySpecials', nodes[s1NodePrimePath]);
elements = core.getMemberPaths(nodes[e1NodePath], 'mySpecials');
elements.sort();
elementsPrime = core.getMemberPaths(nodes[e1NodePrimePath], 'mySpecials');
elementsPrime.sort();
if (CANON.stringify(elements) !== CANON.stringify(['/1736622193/1579656591']) ||
CANON.stringify(elementsPrime) !== CANON.stringify(['/1710723537/1579656591',
'/1710723537/274170516'])) {
throw new Error('prime set members are wrong');
}
core.addMember(nodes[e1NodePath], 'mySpecials', nodes[s1NodePath]);
core.setMemberRegistry(nodes[e1NodePath], 'mySpecials', s1NodePath, 'position', {x: 86, y: 80});
elements = core.getMemberPaths(nodes[e1NodePath], 'mySpecials');
elements.sort();
elementsPrime = core.getMemberPaths(nodes[e1NodePrimePath], 'mySpecials');
elementsPrime.sort();
if (CANON.stringify(elements) !== CANON.stringify(['/1736622193/1579656591', '/1736622193/274170516']) ||
CANON.stringify(elementsPrime) !== CANON.stringify(['/1710723537/1579656591',
'/1710723537/274170516'])) {
throw new Error('prime set members are wrong');
}
});
});
describe('Creation', function () {
var nodePath = '/989341553/1009293372',
specialPath = '/989341553/138645871',
examplePath = '/1736622193',
examplePrimePath = '/1710723537',
e1NodePath = '/1736622193/1271963336',
nodes = null;
it('sets the root and commit back to base', function (done) {
core.loadRoot(rootHash, function (err, r) {
if (err) {
return done(err);
}
root = r;
done();
});
});
it('loads all the nodes for the test', function (done) {
loadNodes([nodePath, specialPath, examplePath, examplePrimePath, e1NodePath], function (err, n) {
if (err) {
return done(err);
}
nodes = n;
done();
});
});
it('new node should be available instantaneously', function () {
var newNode = core.createNode({parent: nodes[examplePath], base: nodes[specialPath]});
if (core.getChildrenPaths(nodes[examplePath]).indexOf(core.getPath(newNode)) === -1) {
throw new Error('new child is unavailable');
}
if (core.getChildrenRelids(nodes[examplePrimePath]).indexOf(core.getRelid(newNode)) === -1) {
throw new Error('new child is unavailable in descendant');
}
if (core.getAttribute(newNode, 'mySpeciality') !== 'nothing') {
throw new Error('new node attribute is not available');
}
core.setAttribute(nodes[specialPath], 'mySpeciality', 'shit');
if (core.getAttribute(newNode, 'mySpeciality') !== 'shit') {
throw new Error('new node changed attribute is not available');
}
core.setAttribute(nodes[specialPath], 'mySpeciality', 'nothing');
core.deleteNode(newNode);
});
it('newly created nodes\' set should be fully available', function () {
var newNode = core.createNode({parent: root, base: nodes[e1NodePath]}),
memberPaths, memberNewPaths;
if (core.getAttribute(newNode, 'name') !== 'e1' ||
core.getRegistry(newNode, 'position').x !== 194 ||
core.getRegistry(newNode, 'position').y !== 228 ||
CANON.stringify(core.getSetNames(newNode)) !== CANON.stringify(['mySpecials'])
) {
throw new Error('values of the new node are wrong');
}
memberPaths = core.getMemberPaths(nodes[e1NodePath], 'mySpecials').sort();
memberNewPaths = core.getMemberPaths(newNode, 'mySpecials').sort();
if (CANON.stringify(memberPaths) !== CANON.stringify(memberNewPaths)) {
throw new Error('bad set members of new node');
}
core.deleteNode(newNode);
});
it('children of new node should be visible instantaneously', function () {
var newNode = core.createNode({parent: root, base: nodes[examplePath]}),
childrenRelids,
childrenNewRelids;
childrenRelids = core.getChildrenRelids(nodes[examplePath]).sort();
childrenNewRelids = core.getChildrenRelids(newNode).sort();
if (CANON.stringify(childrenRelids) !== CANON.stringify(childrenNewRelids)) {
throw new Error('wrong chilrdenlist for new node');
}
});
});
describe('Move', function () {
var nodePath = '/989341553/1009293372',
specialPath = '/989341553/138645871',
examplePath = '/1736622193',
examplePrimePath = '/1710723537',
e1NodePath = '/1736622193/1271963336',
nodes = null;
before(function(done){
core.loadRoot(rootHash, function (err, r) {
if (err) {
return done(err);
}
root = r;
loadNodes([nodePath, specialPath, examplePath, examplePrimePath, e1NodePath], function (err, n) {
if (err) {
return done(err);
}
nodes = n;
done();
});
});
});
it('moved node should be available instantaneously', function () {
var movedNode = core.moveNode(nodes[e1NodePath], root);
if (core.getPath(movedNode) !== '/1271963336') {
throw new Error('bad path of moved node');
}
if (core.getPath(nodes[e1NodePath]) !== e1NodePath) {
throw new Error('old object points to old place');
}
});
});
});
|
import typing as t
from .baseobject import BaseObject
__all__ = ("UlistLabels",)
class UlistLabels(BaseObject):
"""
A class representing a ulist.
Note:
This class is not meant to be instantiated directly.
Note:
Every Attribute is optional and may return `None`.
## FLAG: NONE
Attributes:
id (int): The ulist's ID.
## FLAG: [BASIC](../enums.md#azaka.tools.enums.Flags)
Attributes:
uid (int): The ulist's ID.
label (str): The label's name.
private (bool): The label is private.
"""
__slots__ = ("uid", "label", "private")
def __init__(self, data: t.Mapping[str, t.Any]) -> None:
super().__init__(data)
self.uid: t.Optional[int] = data.get("uid")
self.label: t.Optional[str] = data.get("label")
self.private: t.Optional[bool] = data.get("private")
|
import{r,c as t,h as e,H as s,g as a}from"./p-856de026.js";import{d as o,e as i}from"./p-c2089e63.js";import{s as n,h}from"./p-c50c697b.js";const d=class{constructor(e){r(this,e),this.bkkrChange=t(this,"bkkrChange",7),this.bkkrFocus=t(this,"bkkrFocus",7),this.bkkrBlur=t(this,"bkkrBlur",7),this.bkkrStyle=t(this,"bkkrStyle",7),this.inputId="bkkr-cb-"+b++,this.name=this.inputId,this.checked=!1,this.indeterminate=!1,this.disabled=!1,this.value="on",this.onClick=r=>{r.preventDefault(),this.setFocus(),this.checked=!this.checked,this.indeterminate=!1},this.onFocus=()=>{this.bkkrFocus.emit()},this.onBlur=()=>{this.bkkrBlur.emit()}}componentWillLoad(){this.emitStyle()}checkedChanged(r){this.bkkrChange.emit({checked:r,value:this.value}),this.emitStyle()}disabledChanged(){this.emitStyle()}emitStyle(){this.bkkrStyle.emit({"state-checked":this.checked,"interactive-disabled":this.disabled})}setFocus(){this.focusEl&&this.focusEl.focus()}render(){const{color:r,checked:t,disabled:a,el:d,indeterminate:b,inputId:c,name:l,value:k}=this,{label:p,labelId:m,labelText:g}=o(d,c);i(!0,d,l,t?k:"",a);let x=e("path",b?{d:"M5 10.75L16 10.75",part:"mark"}:{d:"M5,10.75 L8.5,14.25 L19.4,2.3 C18.8333333,1.43333333 18.0333333,1 16,1 L5,1 C2.35,1 1,2.35 1,5 L1,16 C1,18.65 2.35,20 5,20 L16,20 C18.65,20 20,18.65 20,16 L20,7.99769186",part:"mark"});return e(s,{onClick:this.onClick,"aria-labelledby":p?m:null,"aria-checked":`${t}`,"aria-hidden":a?"true":null,role:"checkbox",class:n(r,{"in-item":h("bkkr-item",d),"checkbox-indeterminate":b,"state-checked":t,"state-disabled":a,"state-focusable":!0,interactive:!0})},e("svg",{class:"checkbox-button",viewBox:"0 0 21 21",part:"container"},x),e("label",{htmlFor:c},g),e("input",{type:"checkbox","aria-checked":`${t}`,disabled:a,id:c,onFocus:()=>this.onFocus(),onBlur:()=>this.onBlur(),ref:r=>this.focusEl=r}))}get el(){return a(this)}static get watchers(){return{checked:["checkedChanged"],disabled:["disabledChanged"]}}};let b=0;d.style=":host{--background:transparent;--border-color:var(--bkkr-border, rgba(var(--bkkr-text-color-rgb, 0, 0, 0), 0.3));--border-color-hover:var(--bkkr-border, rgba(var(--bkkr-text-color-rgb, 0, 0, 0), 0.3));--border-color-focus:var(--bkkr-border, rgba(var(--bkkr-text-color-rgb, 0, 0, 0), 0.3));--border-color-selected:var(--color-base, var(--color-primary, #3880ff));--border-width:2px;display:inline-block;position:relative;width:24px;height:24px;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;z-index:2}label{top:0;left:0;margin-left:0;margin-right:0;margin-top:0;margin-bottom:0;position:absolute;width:100%;height:100%;border:0;outline:none;background:transparent;cursor:pointer;-webkit-appearance:none;-moz-appearance:none;appearance:none;display:-ms-flexbox;display:flex;-ms-flex-align:center;align-items:center;opacity:0}label::-moz-focus-inner{border:0}input{position:absolute;top:0;right:0;bottom:0;left:0;width:100%;height:100%;margin:0;padding:0;border:0;outline:0;opacity:0;overflow:hidden;clip:rect(0 0 0 0);-webkit-appearance:none;-moz-appearance:none;appearance:none}.checkbox-button{border-radius:6px;display:block;width:24px;height:24px;-webkit-transform:scale(var(--scale, 1)) translateZ(0);transform:scale(var(--scale, 1)) translateZ(0);-webkit-transition:stroke-dasharray 0.6s, stroke-dashoffset 0.6s, -webkit-box-shadow 0.3s;transition:stroke-dasharray 0.6s, stroke-dashoffset 0.6s, -webkit-box-shadow 0.3s;transition:stroke-dasharray 0.6s, stroke-dashoffset 0.6s, box-shadow 0.3s;transition:stroke-dasharray 0.6s, stroke-dashoffset 0.6s, box-shadow 0.3s, -webkit-box-shadow 0.3s;-webkit-box-shadow:inset 0 0 0 var(--sharp, 0.55px) var(--backdrop, var(--border-color));box-shadow:inset 0 0 0 var(--sharp, 0.55px) var(--backdrop, var(--border-color));z-index:1;pointer-events:none;fill:none;stroke:var(--border-color-selected, var(--border-color));stroke-width:var(--border-width);stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:var(--array, 86.12);stroke-dashoffset:var(--offset, 86.12)}@media (any-hover: hover){:host(:hover) .checkbox-button{--sharp:var(--border-width);--backdrop:var(--border-color-hover)}}:host(:focus) .checkbox-button,:host(.state-focused) .checkbox-button{--sharp:var(--border-width);--backdrop:var(--border-color-focus)}:host(.state-checked) .checkbox-button{--sharp:var(--border-width);--backdrop:var(--border-color-selected);--array:16.1 86.12;--offset:102.22;-webkit-transition:stroke-dasharray 0.5s, stroke-dashoffset 0.5s, -webkit-box-shadow 0.2s 0.3s;transition:stroke-dasharray 0.5s, stroke-dashoffset 0.5s, -webkit-box-shadow 0.2s 0.3s;transition:stroke-dasharray 0.5s, stroke-dashoffset 0.5s, box-shadow 0.2s 0.3s;transition:stroke-dasharray 0.5s, stroke-dashoffset 0.5s, box-shadow 0.2s 0.3s, -webkit-box-shadow 0.2s 0.3s}:host(.state-disabled){pointer-events:none;opacity:0.3}:host(.in-item){margin-left:0;margin-right:8px;margin-top:10px;margin-bottom:9px;display:block;position:static}@supports ((-webkit-margin-start: 0) or (margin-inline-start: 0)) or (-webkit-margin-start: 0){:host(.in-item){margin-left:unset;margin-right:unset;-webkit-margin-start:0;margin-inline-start:0;-webkit-margin-end:8px;margin-inline-end:8px}}:host(.in-item[slot=start]){margin-left:2px;margin-right:var(--bkkr-spacer, 16px);margin-top:8px;margin-bottom:8px}@supports ((-webkit-margin-start: 0) or (margin-inline-start: 0)) or (-webkit-margin-start: 0){:host(.in-item[slot=start]){margin-left:unset;margin-right:unset;-webkit-margin-start:2px;margin-inline-start:2px;-webkit-margin-end:var(--bkkr-spacer, 16px);margin-inline-end:var(--bkkr-spacer, 16px)}}";export{d as bkkr_checkbox}
|
const path = require('path');
/**
* NOTE:
* We are using http://localhost:2000 as a mock dev environment
*
* If you wanted make the URL slightly more realistic and use http://dev.localhost:2000, make the following changes
*
* Set this below
* `host: 'dev.localhost'`
*
* Also, run
* `sudo nano /etc/hosts`
* and add
* `127.0.0.1 dev.localhost`
*/
module.exports = (env = {}) => {
return {
mode: 'development',
devServer: {
port: 2000,
host: '0.0.0.0',
hot: false,
historyApiFallback: {
index: '/assets/core-team_site/index.html',
},
liveReload: false,
static: {
directory: path.join(__dirname, 'dist'),
},
},
};
};
|
# **** Es nececario guardar el Proyecto de QGIS antes de ejecutar el script! ****
# **** You must save the QGIS Project before executing this script! ****
# Script para descargar el WRF (Puerto Rico), importarlo en QGIS, y procesar
# Script to download WRF, import into QGIS,and process
# © Feb 18, 2019 - Chris Edwards, Jake Lewis, Hunter Williams
# WRF Data Information:
# URL: https://www.nco.ncep.noaa.gov/pmb/products/hiresw/
# Model: AWIPS 3.8km Puerto Rico ARW (NCAR Advanced Research WRF)
# (The 2.5km doesn't include the DR) (filename says it's 5km)
# Data Access: GRIB2 via http
# This model runs twice a day, at 6:00 and 18:00. We use the 6:00
# We use the 24- and 48-hr accumulated precipitation in kg/m^2
# Filename eg: hiresw.t06z.arw_5km.f24.pr.grib2
# The variable APCP (Total Precipitation) is stored in Raster Band 282
print("El proceso ha comenzado (Script Has Started).")
import os
import shutil
import datetime
import urllib.request
import qgis.core
from PyQt5.QtCore import QVariant
# *** Download the GRIB2 files from the internet
# Identify today's date in the form YYYYMMDD
now = datetime.datetime.now()
input_datetime = str(now.strftime("%Y%m%d"))
# Make a directory for raw data.
# This directory will be created in the same directory where this script is saved.
# If the directory already exists, it will be deleted along with all its contents.
project_directory = QgsProject.instance().homePath()
new_folder_path = project_directory + "/wrf_" + input_datetime
if os.path.exists(new_folder_path) == False:
os.mkdir(new_folder_path)
else:
shutil.rmtree(new_folder_path)
os.mkdir(new_folder_path)
# Download the 24-hr file to the new folder
data_url_24 = "https://www.ftp.ncep.noaa.gov/data/nccf/com/hiresw/prod/hiresw." + input_datetime + "/hiresw.t06z.arw_5km.f24.pr.grib2"
# 48-hr grib2 filename and path
grib_filename_24 = "wrf_" + input_datetime +"_f24.grib2"
grib_path_24 = new_folder_path + "/" + grib_filename_24
# download through url
urllib.request.urlretrieve(data_url_24, grib_path_24)
print(grib_filename_24 + " fue descargado (was downloaded).")
# Create Raster Layer
target_ras_24 = QgsRasterLayer(grib_path_24, grib_filename_24)
# Necessary input for Raster Calculator
target_24 = QgsRasterCalculatorEntry()
target_24.raster = target_ras_24
target_24.bandNumber = 282
target_24.ref = grib_filename_24 + '@282'
# List of Calculator Entries
entries = [target_24]
# 24-hr APCP accumulation Tiff File name and path
tiff_filename_24 = "WRF-" + input_datetime + "_Hr_00-24"
tiff_path_24 = new_folder_path + "/" + tiff_filename_24
# Raster Calculator. Simply extracting band 282
calc = QgsRasterCalculator (grib_filename_24 + '@282 * 1', tiff_path_24 , 'GTiff', target_ras_24.extent(), target_ras_24.width(), target_ras_24.height(), entries)
calc.processCalculation()
# Save new raster as a QGIS Layer
apcp_00to24 = QgsRasterLayer(tiff_path_24, tiff_filename_24)
# Import the Raster into QGIS
iface.addRasterLayer(tiff_path_24, tiff_filename_24)
print(tiff_filename_24 + " fue importado (was imported).")
#Download the 48-hr file to the new folder
data_url_48 = "https://www.ftp.ncep.noaa.gov/data/nccf/com/hiresw/prod/hiresw." + input_datetime + "/hiresw.t06z.arw_5km.f48.pr.grib2"
# 48-hr grib2 filename and path
grib_filename_48 = "wrf_" + input_datetime +"_f48.grib2"
grib_path_48 = new_folder_path + "/" + grib_filename_48
# download through url
urllib.request.urlretrieve(data_url_48, grib_path_48)
print(grib_filename_48 + " fue descargado (was downloaded).")
# Create Raster Layer
target_ras_48 = QgsRasterLayer(grib_path_48, grib_filename_48)
# Necessary input for Raster Calculator
target_48 = QgsRasterCalculatorEntry()
target_48.raster = target_ras_48
target_48.bandNumber = 282
target_48.ref = grib_filename_48 + '@282'
# List of Calculator Entries
entries = [target_24, target_48]
# 48-hr APCP accumulation Tiff File name and path
tiff_filename_48 = "WRF-" + input_datetime + "_Hr_24-48"
tiff_path_48 = new_folder_path + "/" + tiff_filename_48
# *** Raster Calculator:
# Total 48-hr accumulation subtract the 24-hr accumulation to get the final 24-hr accumulation.
calc = QgsRasterCalculator (grib_filename_48 + '@282 - ' + grib_filename_24 +'@282', tiff_path_48 , 'GTiff', target_ras_48.extent(), target_ras_48.width(), target_ras_48.height(), entries)
calc.processCalculation()
# Save new raster as a QGIS Layer
apcp_24to48 = QgsRasterLayer(tiff_path_48, tiff_filename_48)
# Import the Raster into QGIS
iface.addRasterLayer(tiff_path_48, tiff_filename_48)
print(tiff_filename_48 + " fue importado (was imported).")
# *** Zonal Statistics to average Precipitation
# (Raster Calculator was done when importing correct band)
print("Ejecutando estadísticas de zona (Executing Zonal Statistics...)")
shapefile_path = project_directory + "/ffgs_wrf_shp/ffgs.shp"
ffgs_shp = QgsVectorLayer(shapefile_path,'ffgs','ogr')
zoneStat = QgsZonalStatistics (ffgs_shp, apcp_00to24, '00-24', 1, QgsZonalStatistics.Mean)
zoneStat.calculateStatistics(None)
zoneStat = QgsZonalStatistics (ffgs_shp, apcp_24to48, '24-48', 1, QgsZonalStatistics.Mean)
zoneStat.calculateStatistics(None)
# *** Add new fields to the shapefile. Example: "Ind_00-24" = Potential to flood, 0-24 hours.
print("Añadiendo Campo (Adding Field...)")
layer_provider=ffgs_shp.dataProvider()
layer_provider.addAttributes([QgsField("Ind_00-24",QVariant.Double),
QgsField("Ind_24-48",QVariant.Double)])
ffgs_shp.updateFields()
# *** Calculate fields. Will it flood? FFGS_mm - 0-24Mean = Ind_00-24
# Negative means it will flood. Positive means there isn't enought water to flood.
print("Calculando Atributos (Calculating Attributes...")
def calculate_attributes():
with edit(ffgs_shp):
for feature in ffgs_shp.getFeatures():
feature.setAttribute(feature.fieldNameIndex('Ind_00-24'), feature['ffgs_mm']-feature['00-24mean'])
ffgs_shp.updateFeature(feature)
with edit(ffgs_shp):
for feature in ffgs_shp.getFeatures():
feature.setAttribute(feature.fieldNameIndex('Ind_24-48'), feature['ffgs_mm']-feature['24-48mean'])
ffgs_shp.updateFeature(feature)
calculate_attributes()
# *** Add Shapefile to the Map
iface.addVectorLayer(shapefile_path, "WRF-modificado", 'ogr')
print("Proceso terminado con éxito (Process successfully finished).")
|
import React, { Component } from 'react';
import { Switch, Route } from 'react-router-dom';
import IpfsRouter from 'ipfs-react-router';
import Leftnav from './components/leftnav/leftnav';
import Header from './components/header/header';
import Exchange from './components/exchange/exchange';
import './i18n';
import Account from './components/account/account';
import Home from './components/home/home';
import Hives from './components/hives/hives';
import Faq from './components/faq/faq';
import Resources from './components/resources/resources';
import Stake from './components/stake/stake';
import Nft from './components/nft/nft';
import NftItem from './components/nft/nftItem';
import Pools from './components/pools/pools';
import {
CONNECTION_CONNECTED,
CONNECTION_DISCONNECTED,
CONFIGURE,
CONFIGURE_RETURNED,
GET_BALANCES_PERPETUAL,
GET_BALANCES_PERPETUAL_RETURNED,
} from './constants';
import { injected } from './stores/connectors';
import Store from './stores/store';
const { emitter, dispatcher, store } = Store;
class App extends Component {
state = {
account: null,
headerValue: null,
themeType: false,
style: 'dark-mode',
};
setHeaderValue = (newValue) => {
this.setState({ headerValue: newValue });
};
componentWillMount() {
emitter.on(CONNECTION_CONNECTED, this.connectionConnected);
emitter.on(CONNECTION_DISCONNECTED, this.connectionDisconnected);
emitter.on(CONFIGURE_RETURNED, this.configureReturned);
emitter.on(GET_BALANCES_PERPETUAL_RETURNED, this.getBalancesReturned);
let style = localStorage.getItem('theme');
if (style) this.setState({ style });
injected.isAuthorized().then((isAuthorized) => {
if (isAuthorized) {
injected
.activate()
.then((a) => {
store.setStore({
account: { address: a.account },
web3context: { library: { provider: a.provider } },
});
emitter.emit(CONNECTION_CONNECTED);
// console.log(a);
})
.catch((e) => {
console.log(e);
});
} else {
}
});
}
componentWillUnmount() {
emitter.removeListener(CONNECTION_CONNECTED, this.connectionConnected);
emitter.removeListener(
CONNECTION_DISCONNECTED,
this.connectionDisconnected
);
emitter.removeListener(CONFIGURE_RETURNED, this.configureReturned);
emitter.removeListener(
GET_BALANCES_PERPETUAL_RETURNED,
this.getBalancesReturned
);
}
getBalancesReturned = () => {
window.setTimeout(() => {
dispatcher.dispatch({ type: GET_BALANCES_PERPETUAL, content: {} });
}, 300000);
};
configureReturned = () => {
dispatcher.dispatch({ type: GET_BALANCES_PERPETUAL, content: {} });
};
connectionConnected = () => {
this.setState({ account: store.getStore('account') });
dispatcher.dispatch({ type: CONFIGURE, content: {} });
};
connectionDisconnected = () => {
this.setState({ account: store.getStore('account') });
};
onSwitchThemeHandler = () => {
let style = this.state.style == 'light-mode' ? 'dark-mode' : 'light-mode';
localStorage.setItem('theme', style);
this.setState({ style });
};
render() {
const { account, style } = this.state;
return (
<div className={`main-content`}>
<IpfsRouter>
<Header />
<Leftnav
onSwitchTheme={this.onSwitchThemeHandler}
activeStyle={this.state.style}
/>
<>
<Switch>
<Route path='/stake/:address' component={Stake} />
<Route path='/hives' component={account ? Hives : Account} />
<Route path='/faq' component={Faq} />
<Route path='/resources' component={Resources} />
<Route path='/exchange' component={Exchange} />
<Route exact path='/pools/:selectedPool'>
<Pools
assetsStoreKey='exchangeAssets'
extraAssets={['ETH', 'WPE']}
disableSwap={true}
/>
</Route>
<Route path='/pools'>
<Pools
assetsStoreKey='exchangeAssets'
extraAssets={['ETH', 'WPE']}
disableSwap={true}
/>
</Route>
<Route path='/nft/:tokenAddress/:tokenId' component={NftItem} />
<Route path='/nft'>
<Nft />
</Route>
<Route path='/'>
<Home />
</Route>
</Switch>
</>
</IpfsRouter>
</div>
);
}
}
export default App;
|
import sys
from os import path
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
README = path.abspath(path.join(path.dirname(__file__), 'README.md'))
classifiers = [
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Environment :: Web Environment',
'Development Status :: 3 - Alpha',
]
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = ['forgot_password']
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name='skygear_forgot_password',
version='0.1.0',
packages=find_packages(),
description='Forgot Password Plugin for Skygear',
long_description=open(README).read(),
classifiers=classifiers,
author='SkygearIO',
author_email='hello@skygear.io',
url='https://github.com/SkygearIO/forgot_password',
license='Apache License, Version 2.0',
install_requires=[
'Jinja2>=2.8',
'pyzmail36>=1.0.3',
'skygear>=1.6.0',
'nexmo>=2.0.0',
'twilio>=6.10.4'
],
cmdclass= {'test': PyTest},
tests_require=[
'pytest',
],
package_data={
'forgot_password': [
'templates/*.html',
'templates/*.txt',
],
},
)
|
/**
* Takes elements for the specified duration from the start of the observable source sequence, using the specified scheduler to run timers.
*
* @example
* 1 - res = source.takeWithTime(5000, [optional scheduler]);
* @description
* This operator accumulates a queue with a length enough to store elements received during the initial duration window.
* As more elements are received, elements older than the specified duration are taken from the queue and produced on the
* result sequence. This causes elements to be delayed with duration.
* @param {Number} duration Duration for taking elements from the start of the sequence.
* @param {Scheduler} scheduler Scheduler to run the timer on. If not specified, defaults to Rx.Scheduler.timeout.
* @returns {Observable} An observable sequence with the elements taken during the specified duration from the start of the source sequence.
*/
observableProto.takeWithTime = function (duration, scheduler) {
var source = this;
isScheduler(scheduler) || (scheduler = timeoutScheduler);
return new AnonymousObservable(function (observer) {
return new CompositeDisposable(scheduler.scheduleWithRelative(duration, observer.onCompleted.bind(observer)), source.subscribe(observer));
});
};
|
var app = angular.module("DemoApp", [])
app.controller("MainController", function ($scope, $q) {
function add(a, b) {
var d = $q.defer()
// takes some time to get result
setTimeout(function () {
var r = a + b;
if (0 > r) {
d.reject("negative value")
}
else {
d.resolve(r)
}
}, 500)
return d.promise;
}
var now = Date.now()
add(3, 7)
.then(function (result) {
return add(result, 5)
})
.then(function (result) {
return add(result, -20)
})
.then(function (result) {
$scope.result = result
})
.catch(function (errMsg) {
$scope.err = errMsg
})
.finally(function(){
$scope.elaspedTime = Date.now() - now
})
})
|
"""
Unit tests for the basin hopping global minimization algorithm.
"""
from __future__ import division, print_function, absolute_import
import copy
from numpy.testing import TestCase, run_module_suite, \
assert_almost_equal, assert_
import numpy as np
from numpy import cos, sin
from scipy.optimize import basinhopping, minimize
from scipy.optimize._basinhopping import Storage, RandomDisplacement, \
Metropolis, AdaptiveStepsize
def func1d(x):
f = cos(14.5 * x - 0.3) + (x + 0.2) * x
df = np.array(-14.5 * sin(14.5 * x - 0.3) + 2. * x + 0.2)
return f, df
def func1d_nograd(x):
f = cos(14.5 * x - 0.3) + (x + 0.2) * x
df = np.array(-14.5 * sin(14.5 * x - 0.3) + 2. * x + 0.2)
return f, df
def func2d_nograd(x):
f = cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0]
return f
def func2d(x):
f = cos(14.5 * x[0] - 0.3) + (x[1] + 0.2) * x[1] + (x[0] + 0.2) * x[0]
df = np.zeros(2)
df[0] = -14.5 * sin(14.5 * x[0] - 0.3) + 2. * x[0] + 0.2
df[1] = 2. * x[1] + 0.2
return f, df
class Minimizer(object):
def __init__(self, func, **kwargs):
self.kwargs = kwargs
self.func = func
def __call__(self, x0, **newkwargs):
#combine the two kwargs
kwargs = dict(list(newkwargs.items()) + list(self.kwargs.items()))
res = minimize(self.func, x0, **kwargs)
return res
class MyTakeStep1(RandomDisplacement):
"""use a copy of displace, but have it set a special parameter to
make sure it's actually being used."""
def __init__(self):
self.been_called = False
super(MyTakeStep1, self).__init__()
def __call__(self, x):
self.been_called = True
return super(MyTakeStep1, self).__call__(x)
def myTakeStep2(x):
"""redo RandomDisplacement in function form without the attribute stepsize
to make sure still everything works ok
"""
s = 0.5
x += np.random.uniform(-s, s, np.shape(x))
return x
class MyAcceptTest(object):
"""pass a custom accept test
This does nothing but make sure it's being used and ensure all the
possible return values are accepted
"""
def __init__(self):
self.been_called = False
self.ncalls = 0
def __call__(self, **kwargs):
self.been_called = True
self.ncalls += 1
if self.ncalls == 1:
return False
elif self.ncalls == 2:
return 'force accept'
else:
return True
class MyCallBack(object):
"""pass a custom callback function
This makes sure it's being used. It also returns True after 10
steps to ensure that it's stopping early.
"""
def __init__(self):
self.been_called = False
self.ncalls = 0
def __call__(self, x, f, accepted):
self.been_called = True
self.ncalls += 1
if self.ncalls == 10:
return True
class TestBasinHopping(TestCase):
""" Tests for basinhopping """
def setUp(self):
""" Tests setup.
run tests based on the 1-D and 2-D functions described above. These
are the same functions as used in the anneal algorithm with some
gradients added.
"""
self.x0 = (1.0, [1.0, 1.0])
self.sol = (-0.195, np.array([-0.195, -0.1]))
self.upper = (3., [3., 3.])
self.lower = (-3., [-3., -3.])
self.tol = 3 # number of decimal places
self.niter = 100
self.disp = False
# fix random seed
np.random.seed(1234)
self.kwargs = {"method": "L-BFGS-B", "jac": True}
self.kwargs_nograd = {"method": "L-BFGS-B"}
def test_TypeError(self):
# test the TypeErrors are raised on bad input
i = 1
# if take_step is passed, it must be callable
self.assertRaises(TypeError, basinhopping, func2d, self.x0[i],
take_step=1)
# if accept_test is passed, it must be callable
self.assertRaises(TypeError, basinhopping, func2d, self.x0[i],
accept_test=1)
# accept_test must return bool or string "force_accept"
def bad_accept_test1(*args, **kwargs):
return 1
def bad_accept_test2(*args, **kwargs):
return "not force_accept"
self.assertRaises(ValueError, basinhopping, func2d, self.x0[i],
minimizer_kwargs=self.kwargs,
accept_test=bad_accept_test1)
self.assertRaises(ValueError, basinhopping, func2d, self.x0[i],
minimizer_kwargs=self.kwargs,
accept_test=bad_accept_test2)
def test_1d_grad(self):
# test 1d minimizations with gradient
i = 0
res = basinhopping(func1d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=self.niter, disp=self.disp)
assert_almost_equal(res.x, self.sol[i], self.tol)
def test_2d(self):
# test 2d minimizations with gradient
i = 1
res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=self.niter, disp=self.disp)
assert_almost_equal(res.x, self.sol[i], self.tol)
self.assertTrue(res.nfev > 0)
def test_njev(self):
# test njev is returned correctly
i = 1
minimizer_kwargs = self.kwargs.copy()
# L-BFGS-B doesn't use njev, but BFGS does
minimizer_kwargs["method"] = "BFGS"
res = basinhopping(func2d, self.x0[i],
minimizer_kwargs=minimizer_kwargs, niter=self.niter,
disp=self.disp)
self.assertTrue(res.nfev > 0)
self.assertEqual(res.nfev, res.njev)
def test_2d_nograd(self):
# test 2d minimizations without gradient
i = 1
res = basinhopping(func2d_nograd, self.x0[i],
minimizer_kwargs=self.kwargs_nograd,
niter=self.niter, disp=self.disp)
assert_almost_equal(res.x, self.sol[i], self.tol)
def test_all_minimizers(self):
# test 2d minimizations with gradient
i = 1
methods = ['Nelder-Mead', 'Powell', 'CG', 'BFGS', 'Newton-CG',
'L-BFGS-B', 'TNC', 'COBYLA', 'SLSQP']
minimizer_kwargs = copy.copy(self.kwargs)
for method in methods:
minimizer_kwargs["method"] = method
res = basinhopping(func2d, self.x0[i],
minimizer_kwargs=self.kwargs,
niter=self.niter, disp=self.disp)
assert_almost_equal(res.x, self.sol[i], self.tol)
def test_pass_takestep(self):
# test that passing a custom takestep works
# also test that the stepsize is being adjusted
takestep = MyTakeStep1()
initial_step_size = takestep.stepsize
i = 1
res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=self.niter, disp=self.disp,
take_step=takestep)
assert_almost_equal(res.x, self.sol[i], self.tol)
assert_(takestep.been_called)
# make sure that the built in adaptive step size has been used
assert_(initial_step_size != takestep.stepsize)
def test_pass_simple_takestep(self):
# test that passing a custom takestep without attribute stepsize
takestep = myTakeStep2
i = 1
res = basinhopping(func2d_nograd, self.x0[i],
minimizer_kwargs=self.kwargs_nograd,
niter=self.niter, disp=self.disp,
take_step=takestep)
assert_almost_equal(res.x, self.sol[i], self.tol)
def test_pass_accept_test(self):
# test passing a custom accept test
# makes sure it's being used and ensures all the possible return values
# are accepted.
accept_test = MyAcceptTest()
i = 1
#there's no point in running it more than a few steps.
res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=10, disp=self.disp, accept_test=accept_test)
assert_(accept_test.been_called)
def test_pass_callback(self):
# test passing a custom callback function
# This makes sure it's being used. It also returns True after 10 steps
# to ensure that it's stopping early.
callback = MyCallBack()
i = 1
#there's no point in running it more than a few steps.
res = basinhopping(func2d, self.x0[i], minimizer_kwargs=self.kwargs,
niter=30, disp=self.disp, callback=callback)
assert_(callback.been_called)
assert_("callback" in res.message[0])
assert_(res.nit == 10)
class Test_Storage(TestCase):
def setUp(self):
self.x0 = np.array(1)
self.f0 = 0
self.storage = Storage(self.x0, self.f0)
def test_higher_f_rejected(self):
ret = self.storage.update(self.x0 + 1, self.f0 + 1)
x, f = self.storage.get_lowest()
self.assertEqual(self.x0, x)
self.assertEqual(self.f0, f)
self.assertFalse(ret)
def test_lower_f_accepted(self):
ret = self.storage.update(self.x0 + 1, self.f0 - 1)
x, f = self.storage.get_lowest()
self.assertNotEqual(self.x0, x)
self.assertNotEqual(self.f0, f)
self.assertTrue(ret)
class Test_RandomDisplacement(TestCase):
def setUp(self):
self.stepsize = 1.0
self.displace = RandomDisplacement(stepsize=self.stepsize)
self.N = 300000
self.x0 = np.zeros([self.N])
def test_random(self):
#the mean should be 0
#the variance should be (2*stepsize)**2 / 12
#note these tests are random, they will fail from time to time
x = self.displace(self.x0)
v = (2. * self.stepsize) ** 2 / 12
self.assertAlmostEqual(np.mean(x), 0., 1)
self.assertAlmostEqual(np.var(x), v, 1)
class Test_Metropolis(TestCase):
def setUp(self):
self.T = 2.
self.met = Metropolis(self.T)
def test_boolean_return(self):
#the return must be a bool. else an error will be raised in
#basinhopping
ret = self.met(f_new=0., f_old=1.)
assert isinstance(ret, bool)
def test_lower_f_accepted(self):
self.assertTrue(self.met(f_new=0., f_old=1.))
def test_KeyError(self):
#should raise KeyError if kwargs f_old or f_new is not passed
self.assertRaises(KeyError, self.met, f_old=1.)
self.assertRaises(KeyError, self.met, f_new=1.)
def test_accept(self):
#test that steps are randomly accepted for f_new > f_old
one_accept = False
one_reject = False
for i in range(1000):
if one_accept and one_reject:
break
ret = self.met(f_new=1., f_old=0.5)
if ret:
one_accept = True
else:
one_reject = True
self.assertTrue(one_accept)
self.assertTrue(one_reject)
class Test_AdaptiveStepsize(TestCase):
def setUp(self):
self.stepsize = 1.
self.ts = RandomDisplacement(stepsize=self.stepsize)
self.target_accept_rate = 0.5
self.takestep = AdaptiveStepsize(takestep=self.ts, verbose=False,
accept_rate=self.target_accept_rate)
def test_adaptive_increase(self):
#if few steps are rejected, the stepsize should increase
x = 0.
self.takestep(x)
self.takestep.report(False)
for i in range(self.takestep.interval):
self.takestep(x)
self.takestep.report(True)
self.assertTrue(self.ts.stepsize > self.stepsize)
def test_adaptive_decrease(self):
#if few steps are rejected, the stepsize should increase
x = 0.
self.takestep(x)
self.takestep.report(True)
for i in range(self.takestep.interval):
self.takestep(x)
self.takestep.report(False)
self.assertTrue(self.ts.stepsize < self.stepsize)
def test_all_accepted(self):
#test that everything works OK if all steps were accepted
x = 0.
for i in range(self.takestep.interval + 1):
self.takestep(x)
self.takestep.report(True)
self.assertTrue(self.ts.stepsize > self.stepsize)
def test_all_rejected(self):
#test that everything works OK if all steps were rejected
x = 0.
for i in range(self.takestep.interval + 1):
self.takestep(x)
self.takestep.report(False)
self.assertTrue(self.ts.stepsize < self.stepsize)
if __name__ == "__main__":
run_module_suite()
|
/**
* Popup Component
* @author ryan.bian
*/
import React, { PureComponent } from 'react';
import PropTypes from 'prop-types';
import classnames from 'classnames';
import renderTo from '../../enhancer/render-to';
import styles from './Trigger.css';
import Animation from '../animation';
@renderTo()
export default class Popup extends PureComponent {
static displayName = 'Popup'
static defaultProps = {
position: [0, 0],
popupRef: null,
visible: false,
onMouseEnter() {},
onMouseLeave() {},
}
// https://facebook.github.io/react/docs/typechecking-with-proptypes.html
static propTypes = {
position: PropTypes.arrayOf(PropTypes.number),
popupRef: PropTypes.func,
visible: PropTypes.bool,
onMouseEnter: PropTypes.func,
onMouseLeave: PropTypes.func,
}
constructor(props) {
super(props);
this.state = {
popupVisible: props.visible,
};
}
componentWillReceiveProps(nextProps) {
if (nextProps.visible && !this.props.visible) {
this.setState({
popupVisible: true,
});
}
}
render() {
const { children, popupRef, position, visible, ...otherProps } = this.props;
const { popupVisible } = this.state;
const wrapProps = {
ref: popupRef,
className: classnames(styles.popup, {
[styles['popup--hidden']]: !popupVisible,
}),
// style: {
// left: position[0],
// top: position[1],
// },
...otherProps,
};
return (
<Animation
duration={300}
timingFunction={'ease-in-out'}
in={visible}
motion={'fade'}
mountOnEnter
appear
onExited={() => {
this.setState({
popupVisible: false,
});
}}
style={{
position: 'absolute',
left: position[0],
top: position[1],
}}
>
<div {...wrapProps}>
{children}
</div>
</Animation>
);
}
}
|
const path = require("path");
const mode = process.env.NODE_ENV === "production" ? "production" : "development";
const base = mode === "production" ? "/" + path.basename(process.cwd()) + "/" : "/";
module.exports = {
root: "src",
base,
mode,
publicDir: "../public",
build: {
outDir: "../dist",
assetsDir: "./"
},
server: {
port: 3001
},
};
|
import React, { Component } from "react";
import PropTypes from "prop-types";
// import Button from "@reactioncommerce/components/Button/v1";
import Router from "translations/i18nRouter";
import { Button } from "@material-ui/core";
import styled from "styled-components";
import { withStyles } from "@material-ui/core/styles";
import { addTypographyStyles, applyTheme } from "@reactioncommerce/components/utils";
const Div = styled.div``;
const styles = (theme) => ({
purchasingProcess:{
backgroundColor: theme.palette.primary.dark,
color: theme.palette.primary.light,
},
buyHome:{
backgroundColor: "#F6F6F6",
color: '#1D0D13',
border: '1px solid #1D0D13',
}
})
class CheckoutButtons extends Component {
static propTypes = {
/**
* Set to `true` to prevent the button from calling `onClick` when clicked
*/
isDisabled: PropTypes.bool,
/**
* The NextJS route name for the primary checkout button.
*/
primaryButtonRoute: PropTypes.string,
/**
* Text to display inside the button
*/
primaryButtonText: PropTypes.string,
/**
* className for primary checkout button
*/
primaryClassName: PropTypes.string
}
static defaultProps = {
primaryButtonRoute: "/cart/checkout",
primaryButtonText: "Procesar a la compra",
secondButtonRoute: "/",
secondButtonText: "Seguir comprando"
};
handleOnClick = () => {
const { primaryButtonRoute } = this.props;
Router.push(primaryButtonRoute);
}
handleOnClickSecond = () => {
const { secondButtonRoute } = this.props;
Router.push(secondButtonRoute)
}
render() {
const {
isDisabled,
primaryClassName,
primaryButtonText,
secondButtonText,
classes:{purchasingProcess,buyHome}
} = this.props;
return (
<Div>
<Div style={{paddingBottom:'10px'}}>
<Button
fullWidth
className={buyHome}
variant="contained"
onClick={this.handleOnClickSecond}
disableRipple
>
{secondButtonText}
</Button>
</Div>
<Button
fullWidth
className={purchasingProcess}
onClick={this.handleOnClick}
variant="contained"
disableRipple
>
{primaryButtonText}
</Button>
</Div>
);
}
}
export default withStyles(styles)(CheckoutButtons);
|
import { combineReducers } from 'redux';
import {reducer as FormReducer} from 'redux-form';
import PostsReducer from './reducer_posts';
const rootReducer = combineReducers({
posts: PostsReducer,
form: FormReducer
});
export default rootReducer;
|
# Copyright (c) 2010-2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The s3api middleware will emulate the S3 REST api on top of swift.
To enable this middleware to your configuration, add the s3api middleware
in front of the auth middleware. See ``proxy-server.conf-sample`` for more
detail and configurable options.
To set up your client, ensure you are using the tempauth or keystone auth
system for swift project.
When your swift on a SAIO environment, make sure you have setting the tempauth
middleware configuration in ``proxy-server.conf``, and the access key will be
the concatenation of the account and user strings that should look like
test:tester, and the secret access key is the account password. The host should
also point to the swift storage hostname.
The tempauth option example:
.. code-block:: ini
[filter:tempauth]
use = egg:swift#tempauth
user_admin_admin = admin .admin .reseller_admin
user_test_tester = testing
An example client using tempauth with the python boto library is as follows:
.. code-block:: python
from boto.s3.connection import S3Connection
connection = S3Connection(
aws_access_key_id='test:tester',
aws_secret_access_key='testing',
port=8080,
host='127.0.0.1',
is_secure=False,
calling_format=boto.s3.connection.OrdinaryCallingFormat())
And if you using keystone auth, you need the ec2 credentials, which can
be downloaded from the API Endpoints tab of the dashboard or by openstack
ec2 command.
Here is showing to create an EC2 credential:
.. code-block:: console
# openstack ec2 credentials create
+------------+---------------------------------------------------+
| Field | Value |
+------------+---------------------------------------------------+
| access | c2e30f2cd5204b69a39b3f1130ca8f61 |
| links | {u'self': u'http://controller:5000/v3/......'} |
| project_id | 407731a6c2d0425c86d1e7f12a900488 |
| secret | baab242d192a4cd6b68696863e07ed59 |
| trust_id | None |
| user_id | 00f0ee06afe74f81b410f3fe03d34fbc |
+------------+---------------------------------------------------+
An example client using keystone auth with the python boto library will be:
.. code-block:: python
from boto.s3.connection import S3Connection
connection = S3Connection(
aws_access_key_id='c2e30f2cd5204b69a39b3f1130ca8f61',
aws_secret_access_key='baab242d192a4cd6b68696863e07ed59',
port=8080,
host='127.0.0.1',
is_secure=False,
calling_format=boto.s3.connection.OrdinaryCallingFormat())
----------
Deployment
----------
Proxy-Server Setting
^^^^^^^^^^^^^^^^^^^^
Set s3api before your auth in your pipeline in ``proxy-server.conf`` file.
To enable all compatibility currently supported, you should make sure that
bulk, slo, and your auth middleware are also included in your proxy
pipeline setting.
Using tempauth, the minimum example config is:
.. code-block:: ini
[pipeline:main]
pipeline = proxy-logging cache s3api tempauth bulk slo proxy-logging \
proxy-server
When using keystone, the config will be:
.. code-block:: ini
[pipeline:main]
pipeline = proxy-logging cache authtoken s3api s3token keystoneauth bulk \
slo proxy-logging proxy-server
Finally, add the s3api middleware section:
.. code-block:: ini
[filter:s3api]
use = egg:swift#s3api
.. note::
``keystonemiddleware.authtoken`` can be located before/after s3api but
we recommend to put it before s3api because when authtoken is after s3api,
both authtoken and s3token will issue the acceptable token to keystone
(i.e. authenticate twice). And in the ``keystonemiddleware.authtoken``
middleware , you should set ``delay_auth_decision`` option to ``True``.
-----------
Constraints
-----------
Currently, the s3api is being ported from https://github.com/openstack/swift3
so any existing issues in swift3 are still remaining. Please make sure
descriptions in the example ``proxy-server.conf`` and what happens with the
config, before enabling the options.
-------------
Supported API
-------------
The compatibility will continue to be improved upstream, you can keep and
eye on compatibility via a check tool build by SwiftStack. See
https://github.com/swiftstack/s3compat in detail.
"""
from cgi import parse_header
import json
from paste.deploy import loadwsgi
from swift.common.constraints import valid_api_version
from swift.common.middleware.listing_formats import \
MAX_CONTAINER_LISTING_CONTENT_LENGTH
from swift.common.wsgi import PipelineWrapper, loadcontext, WSGIContext
from swift.common.middleware.s3api.bucket_db import get_bucket_db, \
BucketDbWrapper
from swift.common.middleware.s3api.etree import Element
from swift.common.middleware.s3api.exception import NotS3Request, \
InvalidSubresource
from swift.common.middleware.s3api.s3request import get_request_class
from swift.common.middleware.s3api.s3response import ErrorResponse, \
InternalError, MethodNotAllowed, S3ResponseBase, S3NotImplemented, \
InvalidRequest
from swift.common.utils import get_logger, register_swift_info, \
config_true_value, config_positive_int_value, split_path, \
closing_if_possible
from swift.common.middleware.s3api.utils import Config
from swift.common.middleware.s3api.acl_handlers import get_acl_handler
class ListingEtagMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, env, start_response):
# a lot of this is cribbed from listing_formats / swob.Request
if env['REQUEST_METHOD'] != 'GET':
# Nothing to translate
return self.app(env, start_response)
try:
v, a, c = split_path(env.get('SCRIPT_NAME', '') +
env['PATH_INFO'], 3, 3)
if not valid_api_version(v):
raise ValueError
except ValueError:
is_container_req = False
else:
is_container_req = True
if not is_container_req:
# pass through
return self.app(env, start_response)
ctx = WSGIContext(self.app)
resp_iter = ctx._app_call(env)
content_type = content_length = cl_index = None
for index, (header, value) in enumerate(ctx._response_headers):
header = header.lower()
if header == 'content-type':
content_type = value.split(';', 1)[0].strip()
if content_length:
break
elif header == 'content-length':
cl_index = index
try:
content_length = int(value)
except ValueError:
pass # ignore -- we'll bail later
if content_type:
break
if content_type != 'application/json' or content_length is None or \
content_length > MAX_CONTAINER_LISTING_CONTENT_LENGTH:
start_response(ctx._response_status, ctx._response_headers,
ctx._response_exc_info)
return resp_iter
# We've done our sanity checks, slurp the response into memory
with closing_if_possible(resp_iter):
body = b''.join(resp_iter)
try:
listing = json.loads(body)
for item in listing:
if 'subdir' in item:
continue
value, params = parse_header(item['hash'])
if 's3_etag' in params:
item['s3_etag'] = '"%s"' % params.pop('s3_etag')
item['hash'] = value + ''.join(
'; %s=%s' % kv for kv in params.items())
except (TypeError, KeyError, ValueError):
# If anything goes wrong above, drop back to original response
start_response(ctx._response_status, ctx._response_headers,
ctx._response_exc_info)
return [body]
body = json.dumps(listing).encode('ascii')
ctx._response_headers[cl_index] = (
ctx._response_headers[cl_index][0],
str(len(body)),
)
start_response(ctx._response_status, ctx._response_headers,
ctx._response_exc_info)
return [body]
class S3ApiMiddleware(object):
"""S3Api: S3 compatibility middleware"""
def __init__(self, app, conf, *args, **kwargs):
self.app = app
self.conf = Config()
# Set default values if they are not configured
self.conf.s3_only = config_true_value(
conf.get('s3_only', False))
self.conf.allow_no_owner = config_true_value(
conf.get('allow_no_owner', False))
self.conf.location = conf.get('location', 'us-east-1')
self.conf.dns_compliant_bucket_names = config_true_value(
conf.get('dns_compliant_bucket_names', True))
self.conf.max_bucket_listing = config_positive_int_value(
conf.get('max_bucket_listing', 1000))
self.conf.max_buckets_per_account = config_positive_int_value(
conf.get('max_buckets_per_account', 100))
self.conf.max_parts_listing = config_positive_int_value(
conf.get('max_parts_listing', 1000))
self.conf.max_multi_delete_objects = config_positive_int_value(
conf.get('max_multi_delete_objects', 1000))
self.conf.multi_delete_concurrency = config_positive_int_value(
conf.get('multi_delete_concurrency', 2))
self.conf.s3_acl = config_true_value(
conf.get('s3_acl', False))
self.conf.storage_domain = conf.get('storage_domain', '')
self.conf.auth_pipeline_check = config_true_value(
conf.get('auth_pipeline_check', True))
self.conf.max_upload_part_num = config_positive_int_value(
conf.get('max_upload_part_num', 1000))
self.conf.check_bucket_owner = config_true_value(
conf.get('check_bucket_owner', False))
self.conf.force_swift_request_proxy_log = config_true_value(
conf.get('force_swift_request_proxy_log', False))
self.conf.allow_multipart_uploads = config_true_value(
conf.get('allow_multipart_uploads', True))
self.conf.min_segment_size = config_positive_int_value(
conf.get('min_segment_size', 5242880))
self.conf.log_s3api_command = config_true_value(
conf.get('log_s3api_command', False))
self.conf.allow_anonymous_path_requests = config_true_value(
conf.get('allow_anonymous_path_requests', False))
self.conf.cors_rules = list()
for allow_origin in (
a.strip()
for a in conf.get('cors_allow_origin', '').split(',')
if a.strip()):
rule = Element('CORSRule')
allow_origin_elm = Element('AllowedOrigin')
allow_origin_elm.text = allow_origin
rule.append(allow_origin_elm)
for allow_method in ('GET', 'HEAD', 'PUT', 'POST', 'DELETE'):
allow_method_elm = Element('AllowedMethod')
allow_method_elm.text = allow_method
rule.append(allow_method_elm)
for expose_header in (
a.strip()
for a in conf.get('cors_expose_headers', '').split(',')
if a.strip()):
expose_header_elm = Element('ExposeHeader')
expose_header_elm.text = expose_header
rule.append(expose_header_elm)
# For only these origins, allow all headers requested in the
# request. The CORS specification does leave the door open
# for this, as mentioned in
# http://www.w3.org/TR/cors/#resource-preflight-requests
allowed_header_elm = Element('AllowedHeader')
allowed_header_elm.text = '*'
rule.append(allowed_header_elm)
self.conf.cors_rules.append(rule)
self.logger = get_logger(
conf, log_route=conf.get('log_name', 's3api'))
self.slo_enabled = self.conf.allow_multipart_uploads
self.check_pipeline(self.conf)
self.bucket_db = get_bucket_db(conf)
def __call__(self, env, start_response):
try:
# XXX(FVE): this should be done in an independant middleware
if self.bucket_db:
env['s3api.bucket_db'] = BucketDbWrapper(self.bucket_db)
req_class = get_request_class(env, self.conf.s3_acl)
req = req_class(
env, self.app, self.slo_enabled, self.conf.storage_domain,
self.conf.location, self.conf.force_swift_request_proxy_log,
self.conf.dns_compliant_bucket_names,
self.conf.allow_multipart_uploads, self.conf.allow_no_owner,
self.conf.allow_anonymous_path_requests)
env['s3api.bucket'] = req.container_name
resp = self.handle_request(req)
except NotS3Request:
if self.conf.s3_only and env.get('PATH_INFO') != '/info':
resp = InvalidRequest(reason='Not S3 request')
else:
resp = self.app
except InvalidSubresource as e:
self.logger.debug(e.cause)
except ErrorResponse as err_resp:
if isinstance(err_resp, InternalError):
self.logger.exception(err_resp)
resp = err_resp
except Exception as e:
self.logger.exception(e)
resp = InternalError(reason=str(e))
if isinstance(resp, S3ResponseBase) and 'swift.trans_id' in env:
resp.headers['x-amz-id-2'] = env['swift.trans_id']
resp.headers['x-amz-request-id'] = env['swift.trans_id']
if 's3api.backend_path' in env and 'swift.backend_path' not in env:
env['swift.backend_path'] = env['s3api.backend_path']
return resp(env, start_response)
def handle_request(self, req):
self.logger.debug('Calling S3Api Middleware')
try:
controller = req.controller(self.app, self.conf, self.logger)
except S3NotImplemented:
# TODO: Probably we should distinct the error to log this warning
self.logger.warning('multipart: No SLO middleware in pipeline')
raise
acl_handler = get_acl_handler(req.controller_name)(req, self.logger)
req.set_acl_handler(acl_handler)
if hasattr(controller, req.method):
handler = getattr(controller, req.method)
if not getattr(handler, 'publicly_accessible', False):
raise MethodNotAllowed(req.method,
req.controller.resource_type())
res = handler(req)
else:
raise MethodNotAllowed(req.method,
req.controller.resource_type())
return res
def check_pipeline(self, conf):
"""
Check that proxy-server.conf has an appropriate pipeline for s3api.
"""
if conf.get('__file__', None) is None:
return
ctx = loadcontext(loadwsgi.APP, conf.__file__)
pipeline = str(PipelineWrapper(ctx)).split(' ')
# Add compatible with 3rd party middleware.
self.check_filter_order(pipeline, ['s3api', 'proxy-server'])
auth_pipeline = pipeline[pipeline.index('s3api') + 1:
pipeline.index('proxy-server')]
# Check SLO middleware
if self.slo_enabled and 'slo' not in auth_pipeline:
self.slo_enabled = False
self.logger.warning('s3api middleware requires SLO middleware '
'to support multi-part upload, please add it '
'in pipeline')
# Check IAM middleware position: when enabled, must be before s3api
if 'iam' in pipeline:
self.check_filter_order(pipeline, ['iam', 's3api'])
if not conf.auth_pipeline_check:
self.logger.debug('Skip pipeline auth check.')
return
if 'tempauth' in auth_pipeline:
self.logger.debug('Use tempauth middleware.')
elif 'keystoneauth' in auth_pipeline:
self.check_filter_order(
auth_pipeline,
['s3token', 'keystoneauth'])
self.logger.debug('Use keystone middleware.')
elif len(auth_pipeline):
self.logger.debug('Use third party(unknown) auth middleware.')
else:
raise ValueError('Invalid pipeline %r: expected auth between '
's3api and proxy-server ' % pipeline)
def check_filter_order(self, pipeline, required_filters):
"""
Check that required filters are present in order in the pipeline.
"""
indexes = []
missing_filters = []
for required_filter in required_filters:
try:
indexes.append(pipeline.index(required_filter))
except ValueError as e:
self.logger.debug(e)
missing_filters.append(required_filter)
if missing_filters:
raise ValueError('Invalid pipeline %r: missing filters %r' % (
pipeline, missing_filters))
if indexes != sorted(indexes):
raise ValueError('Invalid pipeline %r: expected filter %s' % (
pipeline, ' before '.join(required_filters)))
def filter_factory(global_conf, **local_conf):
"""Standard filter factory to use the middleware with paste.deploy"""
conf = global_conf.copy()
conf.update(local_conf)
register_swift_info(
's3api',
# TODO: make default values as variables
max_bucket_listing=conf.get('max_bucket_listing', 1000),
max_parts_listing=conf.get('max_parts_listing', 1000),
max_upload_part_num=conf.get('max_upload_part_num', 1000),
max_multi_delete_objects=conf.get('max_multi_delete_objects', 1000),
allow_multipart_uploads=conf.get('allow_multipart_uploads', True),
min_segment_size=conf.get('min_segment_size', 5242880),
s3_acl=conf.get('s3_acl', False)
)
def s3api_filter(app):
return S3ApiMiddleware(ListingEtagMiddleware(app), conf)
return s3api_filter
|
/*
Template Name: Color Admin - Responsive Admin Dashboard Template build with Twitter Bootstrap 3.3.5
Version: 1.9.0
Author: Sean Ngu
Website: http://www.seantheme.com/color-admin-v1.9/admin/
*/var blue="#348fe2",blueLight="#5da5e8",blueDark="#1993E4",aqua="#49b6d6",aquaLight="#6dc5de",aquaDark="#3a92ab",green="#00acac",greenLight="#33bdbd",greenDark="#008a8a",orange="#f59c1a",orangeLight="#f7b048",orangeDark="#c47d15",dark="#2d353c",grey="#b6c2c9",purple="#727cb6",purpleLight="#8e96c5",purpleDark="#5b6392",red="#ff5b57";var handleMorrisLineChart=function(){var e=[{period:"2011 Q3",licensed:3407,sorned:660},{period:"2011 Q2",licensed:3351,sorned:629},{period:"2011 Q1",licensed:3269,sorned:618},{period:"2010 Q4",licensed:3246,sorned:661},{period:"2009 Q4",licensed:3171,sorned:676},{period:"2008 Q4",licensed:3155,sorned:681},{period:"2007 Q4",licensed:3226,sorned:620},{period:"2006 Q4",licensed:3245,sorned:null},{period:"2005 Q4",licensed:3289,sorned:null}];Morris.Line({element:"morris-line-chart",data:e,xkey:"period",ykeys:["licensed","sorned"],labels:["Licensed","Off the road"],resize:true,lineColors:[dark,blue]})};var handleMorrisBarChart=function(){Morris.Bar({element:"morris-bar-chart",data:[{device:"iPhone",geekbench:136},{device:"iPhone 3G",geekbench:137},{device:"iPhone 3GS",geekbench:275},{device:"iPhone 4",geekbench:380},{device:"iPhone 4S",geekbench:655},{device:"iPhone 5",geekbench:1571}],xkey:"device",ykeys:["geekbench"],labels:["Geekbench"],barRatio:.4,xLabelAngle:35,hideHover:"auto",resize:true,barColors:[dark]})};var handleMorrisAreaChart=function(){Morris.Area({element:"morris-area-chart",data:[{period:"2010 Q1",iphone:2666,ipad:null,itouch:2647},{period:"2010 Q2",iphone:2778,ipad:2294,itouch:2441},{period:"2010 Q3",iphone:4912,ipad:1969,itouch:2501},{period:"2010 Q4",iphone:3767,ipad:3597,itouch:5689},{period:"2011 Q1",iphone:6810,ipad:1914,itouch:2293},{period:"2011 Q2",iphone:5670,ipad:4293,itouch:1881},{period:"2011 Q3",iphone:4820,ipad:3795,itouch:1588},{period:"2011 Q4",iphone:15073,ipad:5967,itouch:5175},{period:"2012 Q1",iphone:10687,ipad:4460,itouch:2028},{period:"2012 Q2",iphone:8432,ipad:5713,itouch:1791}],xkey:"period",ykeys:["iphone","ipad","itouch"],labels:["iPhone","iPad","iPod Touch"],pointSize:2,hideHover:"auto",resize:true,lineColors:[red,orange,dark]})};var handleMorrisDonusChart=function(){Morris.Donut({element:"morris-donut-chart",data:[{label:"Jam",value:25},{label:"Frosted",value:40},{label:"Custard",value:25},{label:"Sugar",value:10}],formatter:function(e){return e+"%"},resize:true,colors:[dark,orange,red,grey]})};var MorrisChart=function(){"use strict";return{init:function(){handleMorrisLineChart();handleMorrisBarChart();handleMorrisAreaChart();handleMorrisDonusChart()}}}()
|
import { select } from 'd3';
import dagre from 'dagre';
import graphlib from 'graphlib';
import { logger } from '../../logger';
import classDb, { lookUpDomId } from './classDb';
import { parser } from './parser/classDiagram';
import svgDraw from './svgDraw';
import { getConfig } from '../../config';
import { render } from '../../dagre-wrapper/index.js';
// import addHtmlLabel from 'dagre-d3/lib/label/add-html-label.js';
import { curveLinear } from 'd3';
import { interpolateToCurve, getStylesFromArray, configureSvgSize } from '../../utils';
import common from '../common/common';
parser.yy = classDb;
let idCache = {};
const padding = 20;
const conf = {
dividerMargin: 10,
padding: 5,
textHeight: 10
};
/**
* Function that adds the vertices found during parsing to the graph to be rendered.
* @param vert Object containing the vertices.
* @param g The graph that is to be drawn.
*/
export const addClasses = function(classes, g) {
// const svg = select(`[id="${svgId}"]`);
const keys = Object.keys(classes);
logger.info('keys:', keys);
logger.info(classes);
// Iterate through each item in the vertex object (containing all the vertices found) in the graph definition
keys.forEach(function(id) {
const vertex = classes[id];
/**
* Variable for storing the classes for the vertex
* @type {string}
*/
let cssClassStr = '';
if (vertex.cssClasses.length > 0) {
cssClassStr = cssClassStr + ' ' + vertex.cssClasses.join(' ');
}
// if (vertex.classes.length > 0) {
// classStr = vertex.classes.join(' ');
// }
const styles = { labelStyle: '' }; //getStylesFromArray(vertex.styles);
// Use vertex id as text in the box if no text is provided by the graph definition
let vertexText = vertex.text !== undefined ? vertex.text : vertex.id;
// We create a SVG label, either by delegating to addHtmlLabel or manually
// let vertexNode;
// if (getConfig().flowchart.htmlLabels) {
// const node = {
// label: vertexText.replace(
// /fa[lrsb]?:fa-[\w-]+/g,
// s => `<i class='${s.replace(':', ' ')}'></i>`
// )
// };
// vertexNode = addHtmlLabel(svg, node).node();
// vertexNode.parentNode.removeChild(vertexNode);
// } else {
// const svgLabel = document.createElementNS('http://www.w3.org/2000/svg', 'text');
// svgLabel.setAttribute('style', styles.labelStyle.replace('color:', 'fill:'));
// const rows = vertexText.split(common.lineBreakRegex);
// for (let j = 0; j < rows.length; j++) {
// const tspan = document.createElementNS('http://www.w3.org/2000/svg', 'tspan');
// tspan.setAttributeNS('http://www.w3.org/XML/1998/namespace', 'xml:space', 'preserve');
// tspan.setAttribute('dy', '1em');
// tspan.setAttribute('x', '1');
// tspan.textContent = rows[j];
// svgLabel.appendChild(tspan);
// }
// vertexNode = svgLabel;
// }
let radious = 0;
let _shape = '';
// Set the shape based parameters
switch (vertex.type) {
case 'class':
_shape = 'class_box';
break;
default:
_shape = 'class_box';
}
// Add the node
g.setNode(vertex.id, {
labelStyle: styles.labelStyle,
shape: _shape,
labelText: vertexText,
classData: vertex,
rx: radious,
ry: radious,
class: cssClassStr,
style: styles.style,
id: vertex.id,
domId: vertex.domId,
haveCallback: vertex.haveCallback,
link: vertex.link,
width: vertex.type === 'group' ? 500 : undefined,
type: vertex.type,
padding: getConfig().flowchart.padding
});
logger.info('setNode', {
labelStyle: styles.labelStyle,
shape: _shape,
labelText: vertexText,
rx: radious,
ry: radious,
class: cssClassStr,
style: styles.style,
id: vertex.id,
width: vertex.type === 'group' ? 500 : undefined,
type: vertex.type,
padding: getConfig().flowchart.padding
});
});
};
/**
* Add edges to graph based on parsed graph defninition
* @param {Object} edges The edges to add to the graph
* @param {Object} g The graph object
*/
export const addRelations = function(relations, g) {
let cnt = 0;
let defaultStyle;
let defaultLabelStyle;
// if (typeof relations.defaultStyle !== 'undefined') {
// const defaultStyles = getStylesFromArray(relations.defaultStyle);
// defaultStyle = defaultStyles.style;
// defaultLabelStyle = defaultStyles.labelStyle;
// }
relations.forEach(function(edge) {
cnt++;
const edgeData = {};
//Set relationship style and line type
edgeData.classes = 'relation';
edgeData.pattern = edge.relation.lineType == 1 ? 'dashed' : 'solid';
edgeData.id = 'id' + cnt;
// Set link type for rendering
if (edge.type === 'arrow_open') {
edgeData.arrowhead = 'none';
} else {
edgeData.arrowhead = 'normal';
}
logger.info(edgeData, edge);
//Set edge extra labels
//edgeData.startLabelLeft = edge.relationTitle1;
edgeData.startLabelRight = edge.relationTitle1 === 'none' ? '' : edge.relationTitle1;
edgeData.endLabelLeft = edge.relationTitle2 === 'none' ? '' : edge.relationTitle2;
//edgeData.endLabelRight = edge.relationTitle2;
//Set relation arrow types
edgeData.arrowTypeStart = getArrowMarker(edge.relation.type1);
edgeData.arrowTypeEnd = getArrowMarker(edge.relation.type2);
let style = '';
let labelStyle = '';
if (typeof edge.style !== 'undefined') {
const styles = getStylesFromArray(edge.style);
style = styles.style;
labelStyle = styles.labelStyle;
} else {
style = 'fill:none';
if (typeof defaultStyle !== 'undefined') {
style = defaultStyle;
}
if (typeof defaultLabelStyle !== 'undefined') {
labelStyle = defaultLabelStyle;
}
}
edgeData.style = style;
edgeData.labelStyle = labelStyle;
if (typeof edge.interpolate !== 'undefined') {
edgeData.curve = interpolateToCurve(edge.interpolate, curveLinear);
} else if (typeof relations.defaultInterpolate !== 'undefined') {
edgeData.curve = interpolateToCurve(relations.defaultInterpolate, curveLinear);
} else {
edgeData.curve = interpolateToCurve(conf.curve, curveLinear);
}
edge.text = edge.title;
if (typeof edge.text === 'undefined') {
if (typeof edge.style !== 'undefined') {
edgeData.arrowheadStyle = 'fill: #333';
}
} else {
edgeData.arrowheadStyle = 'fill: #333';
edgeData.labelpos = 'c';
if (getConfig().flowchart.htmlLabels && false) { // eslint-disable-line
edgeData.labelType = 'html';
edgeData.label = '<span class="edgeLabel">' + edge.text + '</span>';
} else {
edgeData.labelType = 'text';
edgeData.label = edge.text.replace(common.lineBreakRegex, '\n');
if (typeof edge.style === 'undefined') {
edgeData.style = edgeData.style || 'stroke: #333; stroke-width: 1.5px;fill:none';
}
edgeData.labelStyle = edgeData.labelStyle.replace('color:', 'fill:');
}
}
// Add the edge to the graph
g.setEdge(edge.id1, edge.id2, edgeData, cnt);
});
};
// Todo optimize
const getGraphId = function(label) {
const keys = Object.keys(idCache);
for (let i = 0; i < keys.length; i++) {
if (idCache[keys[i]].label === label) {
return keys[i];
}
}
return undefined;
};
export const setConf = function(cnf) {
const keys = Object.keys(cnf);
keys.forEach(function(key) {
conf[key] = cnf[key];
});
};
/**
* Draws a flowchart in the tag with id: id based on the graph definition in text.
* @param text
* @param id
*/
export const drawOld = function(text, id) {
idCache = {};
parser.yy.clear();
parser.parse(text);
logger.info('Rendering diagram ' + text);
// Fetch the default direction, use TD if none was found
const diagram = select(`[id='${id}']`);
// insertMarkers(diagram);
// Layout graph, Create a new directed graph
const g = new graphlib.Graph({
multigraph: true
});
// Set an object for the graph label
g.setGraph({
isMultiGraph: true
});
// Default to assigning a new object as a label for each new edge.
g.setDefaultEdgeLabel(function() {
return {};
});
const classes = classDb.getClasses();
logger.info('classes:');
logger.info(classes);
const keys = Object.keys(classes);
for (let i = 0; i < keys.length; i++) {
const classDef = classes[keys[i]];
const node = svgDraw.drawClass(diagram, classDef, conf);
idCache[node.id] = node;
// Add nodes to the graph. The first argument is the node id. The second is
// metadata about the node. In this case we're going to add labels to each of
// our nodes.
g.setNode(node.id, node);
logger.info('Org height: ' + node.height);
}
const relations = classDb.getRelations();
logger.info('relations:', relations);
relations.forEach(function(relation) {
logger.info(
'tjoho' + getGraphId(relation.id1) + getGraphId(relation.id2) + JSON.stringify(relation)
);
g.setEdge(
getGraphId(relation.id1),
getGraphId(relation.id2),
{
relation: relation
},
relation.title || 'DEFAULT'
);
});
dagre.layout(g);
g.nodes().forEach(function(v) {
if (typeof v !== 'undefined' && typeof g.node(v) !== 'undefined') {
logger.debug('Node ' + v + ': ' + JSON.stringify(g.node(v)));
select('#' + lookUpDomId(v)).attr(
'transform',
'translate(' +
(g.node(v).x - g.node(v).width / 2) +
',' +
(g.node(v).y - g.node(v).height / 2) +
' )'
);
}
});
g.edges().forEach(function(e) {
if (typeof e !== 'undefined' && typeof g.edge(e) !== 'undefined') {
logger.debug('Edge ' + e.v + ' -> ' + e.w + ': ' + JSON.stringify(g.edge(e)));
svgDraw.drawEdge(diagram, g.edge(e), g.edge(e).relation, conf);
}
});
const svgBounds = diagram.node().getBBox();
const width = svgBounds.width + padding * 2;
const height = svgBounds.height + padding * 2;
configureSvgSize(diagram, height, width, conf.useMaxWidth);
// Ensure the viewBox includes the whole svgBounds area with extra space for padding
const vBox = `${svgBounds.x - padding} ${svgBounds.y - padding} ${width} ${height}`;
logger.debug(`viewBox ${vBox}`);
diagram.attr('viewBox', vBox);
};
export const draw = function(text, id) {
logger.info('Drawing class');
classDb.clear();
// const parser = classDb.parser;
// parser.yy = classDb;
// Parse the graph definition
// try {
parser.parse(text);
// } catch (err) {
// logger.debug('Parsing failed');
// }
// Fetch the default direction, use TD if none was found
let dir = 'TD';
const conf = getConfig().flowchart;
logger.info('config:', conf);
const nodeSpacing = conf.nodeSpacing || 50;
const rankSpacing = conf.rankSpacing || 50;
// Create the input mermaid.graph
const g = new graphlib.Graph({
multigraph: true,
compound: true
})
.setGraph({
rankdir: dir,
nodesep: nodeSpacing,
ranksep: rankSpacing,
marginx: 8,
marginy: 8
})
.setDefaultEdgeLabel(function() {
return {};
});
// let subG;
// const subGraphs = flowDb.getSubGraphs();
// logger.info('Subgraphs - ', subGraphs);
// for (let i = subGraphs.length - 1; i >= 0; i--) {
// subG = subGraphs[i];
// logger.info('Subgraph - ', subG);
// flowDb.addVertex(subG.id, subG.title, 'group', undefined, subG.classes);
// }
// Fetch the verices/nodes and edges/links from the parsed graph definition
const classes = classDb.getClasses();
const relations = classDb.getRelations();
logger.info(relations);
// let i = 0;
// for (i = subGraphs.length - 1; i >= 0; i--) {
// subG = subGraphs[i];
// selectAll('cluster').append('text');
// for (let j = 0; j < subG.nodes.length; j++) {
// g.setParent(subG.nodes[j], subG.id);
// }
// }
addClasses(classes, g, id);
addRelations(relations, g);
// Add custom shapes
// flowChartShapes.addToRenderV2(addShape);
// Set up an SVG group so that we can translate the final graph.
const svg = select(`[id="${id}"]`);
svg.attr('xmlns:xlink', 'http://www.w3.org/1999/xlink');
// Run the renderer. This is what draws the final graph.
const element = select('#' + id + ' g');
render(element, g, ['aggregation', 'extension', 'composition', 'dependency'], 'classDiagram', id);
// element.selectAll('g.node').attr('title', function() {
// return flowDb.getTooltip(this.id);
// });
const padding = 8;
const svgBounds = svg.node().getBBox();
const width = svgBounds.width + padding * 2;
const height = svgBounds.height + padding * 2;
logger.debug(
`new ViewBox 0 0 ${width} ${height}`,
`translate(${padding - g._label.marginx}, ${padding - g._label.marginy})`
);
configureSvgSize(svg, height, width, conf.useMaxWidth);
svg.attr('viewBox', `0 0 ${width} ${height}`);
svg
.select('g')
.attr('transform', `translate(${padding - g._label.marginx}, ${padding - svgBounds.y})`);
// Index nodes
// flowDb.indexNodes('subGraph' + i);
// Add label rects for non html labels
if (!conf.htmlLabels) {
const labels = document.querySelectorAll('[id="' + id + '"] .edgeLabel .label');
for (let k = 0; k < labels.length; k++) {
const label = labels[k];
// Get dimensions of label
const dim = label.getBBox();
const rect = document.createElementNS('http://www.w3.org/2000/svg', 'rect');
rect.setAttribute('rx', 0);
rect.setAttribute('ry', 0);
rect.setAttribute('width', dim.width);
rect.setAttribute('height', dim.height);
rect.setAttribute('style', 'fill:#e8e8e8;');
label.insertBefore(rect, label.firstChild);
}
}
// If node has a link, wrap it in an anchor SVG object.
// const keys = Object.keys(classes);
// keys.forEach(function(key) {
// const vertex = classes[key];
// if (vertex.link) {
// const node = select('#' + id + ' [id="' + key + '"]');
// if (node) {
// const link = document.createElementNS('http://www.w3.org/2000/svg', 'a');
// link.setAttributeNS('http://www.w3.org/2000/svg', 'class', vertex.classes.join(' '));
// link.setAttributeNS('http://www.w3.org/2000/svg', 'href', vertex.link);
// link.setAttributeNS('http://www.w3.org/2000/svg', 'rel', 'noopener');
// const linkNode = node.insert(function() {
// return link;
// }, ':first-child');
// const shape = node.select('.label-container');
// if (shape) {
// linkNode.append(function() {
// return shape.node();
// });
// }
// const label = node.select('.label');
// if (label) {
// linkNode.append(function() {
// return label.node();
// });
// }
// }
// }
// });
};
export default {
setConf,
draw
};
function getArrowMarker(type) {
let marker;
switch (type) {
case 0:
marker = 'aggregation';
break;
case 1:
marker = 'extension';
break;
case 2:
marker = 'composition';
break;
case 3:
marker = 'dependency';
break;
default:
marker = 'none';
}
return marker;
}
|
(function(root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD.
define(['expect.js', process.cwd()+'/src/index'], factory);
} else if (typeof module === 'object' && module.exports) {
// CommonJS-like environments that support module.exports, like Node.
factory(require('expect.js'), require(process.cwd()+'/src/index'));
} else {
// Browser globals (root is window)
factory(root.expect, root.ApacheFineract);
}
}(this, function(expect, ApacheFineract) {
'use strict';
var instance;
beforeEach(function() {
instance = new ApacheFineract.PutGLAccountsResponse();
});
var getProperty = function(object, getter, property) {
// Use getter method if present; otherwise, get the property directly.
if (typeof object[getter] === 'function')
return object[getter]();
else
return object[property];
}
var setProperty = function(object, setter, property, value) {
// Use setter method if present; otherwise, set the property directly.
if (typeof object[setter] === 'function')
object[setter](value);
else
object[property] = value;
}
describe('PutGLAccountsResponse', function() {
it('should create an instance of PutGLAccountsResponse', function() {
// uncomment below and update the code to test PutGLAccountsResponse
//var instane = new ApacheFineract.PutGLAccountsResponse();
//expect(instance).to.be.a(ApacheFineract.PutGLAccountsResponse);
});
it('should have the property resourceId (base name: "resourceId")', function() {
// uncomment below and update the code to test the property resourceId
//var instane = new ApacheFineract.PutGLAccountsResponse();
//expect(instance).to.be();
});
it('should have the property changes (base name: "changes")', function() {
// uncomment below and update the code to test the property changes
//var instane = new ApacheFineract.PutGLAccountsResponse();
//expect(instance).to.be();
});
});
}));
|
requirejs(['./WorldWindShim',
'./LayerManager'],
function (WorldWind,
LayerManager) {
"use strict";
// Tell WorldWind to log only warnings and errors.
WorldWind.Logger.setLoggingLevel(WorldWind.Logger.LEVEL_WARNING);
// Create the WorldWindow.
var wwd = new WorldWind.WorldWindow("canvasOne");
// Create and add layers to the WorldWindow.
var layers = [
// Imagery layers.
{layer: new WorldWind.BMNGLayer(), enabled: true},
{layer: new WorldWind.BMNGLandsatLayer(), enabled: false},
{layer: new WorldWind.BingAerialWithLabelsLayer(null), enabled: true},
// Add atmosphere layer on top of all base layers.
{layer: new WorldWind.AtmosphereLayer(), enabled: true},
// WorldWindow UI layers.
{layer: new WorldWind.CompassLayer(), enabled: true},
{layer: new WorldWind.CoordinatesDisplayLayer(wwd), enabled: true},
{layer: new WorldWind.ViewControlsLayer(wwd), enabled: true}
];
for (var l = 0; l < layers.length; l++) {
console.log(layers[l].enabled);
layers[l].layer.enabled = layers[l].enabled;
wwd.addLayer(layers[l].layer);
}
console.log(wwd);
// Create the custom image for the placemark with a 2D canvas.
var canvas = document.createElement("canvas"),
ctx2d = canvas.getContext("2d"),
size = 64, c = size / 2 - 0.5, innerRadius = 5, outerRadius = 20;
canvas.width = size;
canvas.height = size;
var gradient = ctx2d.createRadialGradient(c, c, innerRadius, c, c, outerRadius);
gradient.addColorStop(0, 'rgb(255, 0, 0)');
gradient.addColorStop(0.5, 'rgb(0, 255, 0)');
gradient.addColorStop(1, 'rgb(255, 0, 0)');
ctx2d.fillStyle = gradient;
ctx2d.arc(c, c, outerRadius, 0, 2 * Math.PI, false);
ctx2d.fill();
// Set placemark attributes.
var placemarkAttributes = new WorldWind.PlacemarkAttributes(null);
// Wrap the canvas created above in an ImageSource object to specify it as the placemarkAttributes image source.
placemarkAttributes.imageSource = new WorldWind.ImageSource(canvas);
// Define the pivot point for the placemark at the center of its image source.
placemarkAttributes.imageOffset = new WorldWind.Offset(WorldWind.OFFSET_FRACTION, 0.5, WorldWind.OFFSET_FRACTION, 0.5);
placemarkAttributes.imageScale = 1;
placemarkAttributes.imageColor = WorldWind.Color.WHITE;
// Set placemark highlight attributes.
// Note that the normal attributes are specified as the default highlight attributes so that all properties
// are identical except the image scale. You could instead vary the color, image, or other property
// to control the highlight representation.
var highlightAttributes = new WorldWind.PlacemarkAttributes(placemarkAttributes);
highlightAttributes.imageScale = 1.2;
// Create the placemark with the attributes defined above.
var placemarkPosition = new WorldWind.Position(40.3968, -74.0916, 1e2);
var placemark = new WorldWind.Placemark(placemarkPosition, false, placemarkAttributes);
// Draw placemark at altitude defined above, relative to the terrain.
placemark.altitudeMode = WorldWind.RELATIVE_TO_GROUND;
// Assign highlight attributes for the placemark.
placemark.highlightAttributes = highlightAttributes;
// Create the renderable layer for placemarks.
var placemarkLayer = new WorldWind.RenderableLayer("Custom Placemark");
// Add the placemark to the layer.
placemarkLayer.addRenderable(placemark);
// Add the placemarks layer to the WorldWindow's layer list.
wwd.addLayer(placemarkLayer);
// Now set up to handle highlighting.
var highlightController = new WorldWind.HighlightController(wwd);
// Create a layer manager for controlling layer visibility.
var layerManager = new LayerManager(wwd);
var handlePick = function (o) {
var x = o.clientX,
y = o.clientY;
var pickList = wwd.pick(wwd.canvasCoordinates(x, y));
for(var i = 0; i < pickList.objects.length; i++){
if(pickList.objects[i].userObject instanceof WorldWind.Placemark){
$('.hover_bkgr_fricc').show();
}
}
};
$('.hover_bkgr_fricc').click(function(){
$('.hover_bkgr_fricc').hide();
});
wwd.addLayer(placemarkLayer);
console.log(wwd);
wwd.addEventListener("click", handlePick);
});
|
'use strict';
//Usuarios service used for communicating with the usuarios REST endpoints
angular.module('usuarios').factory('Usuarios', ['$resource',
function ($resource) {
return $resource('api/usuarios/:usuarioId', {
usuarioId: '@_id'
}, {
update: {
method: 'PUT'
}
});
}
]);
|
/*
Copyright (c) 2003-2018, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang( 'preview', 'th', {
preview: 'ดูหน้าเอกสารตัวอย่าง'
} );
|
import datetime
from django import template
register = template.Library()
@register.filter
def is_not_past_due(lesson):
today = datetime.date.today()
day = today - datetime.timedelta(days=today.weekday()) + datetime.timedelta(days=lesson.day + (lesson.week-1)*7)
if lesson.date_end > day:
return True
else:
return False
|
"""Test app signals
"""
import factory
import pytest
|
// @flow
import produce from "immer";
import * as common from "../common";
/**
* Let this declare the way for well typed records for outputs
*
* General organization is:
*
* - Declare the in-memory type
* - Declare the nbformat type (exactly matching nbformat.v4.schema.json)
* - Create a "record maker", which we _don't_ export, followed by the real `makeXRecord` function that enforces set values
* - Write a way to go from nbformat to these records
* - Write a way to go from message spec to these records
*
*/
export type StreamName = "stdout" | "stderr";
export type StreamType = "stream";
export const STDOUT: StreamName = "stdout";
export const STDERR: StreamName = "stderr";
export const STREAM = "stream";
// In-memory version
type StreamOutput = {
outputType: StreamType,
name: StreamName,
text: string
};
// On disk
export type NbformatStreamOutput = {
output_type: StreamType,
name: StreamName,
text: common.MultilineString
};
type StreamMessage = {
header: {
msg_type: StreamType
},
content: {
name: StreamName,
text: string
}
};
export type StreamOutputRecord = Object;
// NOTE: No export, as the values here should get overridden by an exact version
// passed into makeStreamOutputRecord
export const makeStreamOutputRecord: Function = (streamOutput: Object) => {
const defaultStreamOutput = {
outputType: STREAM,
name: STDOUT,
text: ""
};
return produce(defaultStreamOutput, draft =>
Object.assign(draft, streamOutput)
);
};
export function streamRecordFromNbformat(
s: NbformatStreamOutput
): StreamOutputRecord {
return makeStreamOutputRecord({
outputType: s.output_type,
name: s.name,
text: common.demultiline(s.text)
});
}
export function streamRecordFromMessage(
msg: StreamMessage
): StreamOutputRecord {
return makeStreamOutputRecord({
outputType: STREAM,
name: msg.content.name,
text: msg.content.text
});
}
|
import '../css/style.css';
import Game from './classes/Game.js';
{
console.log('initialising...');
document.querySelector('.sound').addEventListener('sound-loaded', ({currentTarget}) => {
currentTarget.components.sound.playSound();
});
if (window.confirm('Are you on Google Chrome and do you have a PS4 controller in hand?')) {
window.addEventListener('gamepadconnected', ({gamepad}) => {
console.log(`${gamepad.id} is connected, ready to play`);
new Game();
});
} else {
document.querySelector('.beginstate').classList.add('hide');
document.querySelector('.prompt').classList.remove('hide');
}
}
|
import React from 'react'
import Link from 'gatsby-link'
import Content, { HTMLContent } from '../components/Content'
import Logo from '../assets/imgs/logo_small.png'
import HelloRobot from '../assets/imgs/hello_robot.jpg'
import FutureRobot from '../assets/imgs/future-robot.jpg'
import PlayingWRobot from '../assets/imgs/kid-playing-robot.jpg'
export const AboutPageTemplate = ({
mission_section,
subheading,
title,
top_image,
content,
contentComponent,
}) => {
const PageContent = contentComponent || Content
return (
<main id="main" className="about-bg">
<section className="about-holder tc-padding">
<div className="container">
<div className="about-content has-layout">
<div className="row align-items-center">
<div className="col-sm-6">
<div className="about-img-2">
<img src={top_image} alt="" />
</div>
</div>
<div className="col-lg-6 col-md-6 col-sm-12">
<div className="about-text style-2">
<h5>{subheading}</h5>
<h3>{title}</h3>
<PageContent content={content} className="content" />
<div className="kids-img has-layout">
<ul>
<li>
<img
src={require('../assets/imgs/kids-imgs/img-01.png')}
alt=""
/>
</li>
<li>
<img
src={require('../assets/imgs/kids-imgs/img-02.png')}
alt=""
/>
</li>
<li>
<img
src={require('../assets/imgs/kids-imgs/img-03.png')}
alt=""
/>
</li>
<li>
<img
src={require('../assets/imgs/kids-imgs/img-04.png')}
alt=""
/>
</li>
</ul>
</div>
</div>
</div>
</div>
<div className="our-mission-holder">
<div className="row align-items-center">
{/* About Text */}
<div className="col-lg-6 col-md-6 col-sm-12">
<div className="about-text has-layout">
<h3>Our Mission</h3>
<h4>{mission_section.missionText}</h4>
<h3>Our Vision</h3>
<h4>{mission_section.visionText}</h4>
</div>
</div>
{/* About Text */}
{/* About Img */}
<div className="col-sm-6">
<div className="our-mission">
<img src={mission_section.section_image} alt="" />
</div>
</div>
{/* About Img */}
</div>
</div>
</div>
</div>
</section>
</main>
)
}
export default ({ data }) => {
const { markdownRemark: post } = data
return (
<AboutPageTemplate
contentComponent={HTMLContent}
title={post.frontmatter.title}
content={post.html}
subheading={post.frontmatter.subheading}
top_image={post.frontmatter.top_image}
mission_section={post.frontmatter.mission_section}
/>
)
}
export const aboutPageQuery = graphql`
query AboutPage($id: String!) {
markdownRemark(id: { eq: $id }) {
html
frontmatter {
subheading
title
top_image
mission_section {
missionText
visionText
section_image
}
}
}
}
`
|
import logging
from threading import Event, Lock, Thread
from playback.recording import Recording
from playback.tape_cassette import TapeCassette
_logger = logging.getLogger(__name__)
class AsyncRecordOnlyTapeCassette(TapeCassette):
# pylint: disable=too-many-instance-attributes
"""
Wraps TapeCassette with asynchronous execution of the operation that change the state of the recording, this
cassette can only be used for recording and for playback
"""
def __init__(self, tape_cassette, flush_interval=0.1, timeout_on_close=10):
"""
:param tape_cassette: The storage driver to hold the recording in and wrap with asynchronous behaviour
:type tape_cassette: playback.tape_cassette.TapeCassette
:param flush_interval: Interval to flush recording to the underlying storage (tape cassette)
:type flush_interval: float
:param timeout_on_close: How much time to wait for joining recording thread on close
:type timeout_on_close: float
"""
self.wrapped_tape_cassette = tape_cassette
self._flush_interval = flush_interval
self._timeout_on_close = timeout_on_close
self._recording_operation_buffer = []
self._stop_event = Event()
self._lock = Lock()
self._update_recording_thread = Thread(target=self._recording_loop, name="AsyncTapeCassette Thread")
self._update_recording_thread.setDaemon(True)
self._started = False
def start(self):
"""
Starts the recording thread
"""
_logger.info("Starting AsyncTapeCassette")
self._started = True
self._update_recording_thread.start()
def close(self):
"""
Signal the cassette to close, this will signal the underlying thread to stop waiting for more recording and
will join it until its completed sending remaining recordings using the given timeout
"""
_logger.info("Shutting down AsyncTapeCassette (joining for {}s)".format(self._timeout_on_close))
self._started = False
self._stop_event.set()
try:
self._update_recording_thread.join(self._timeout_on_close)
except RuntimeError:
# If thread was not started
pass
self.wrapped_tape_cassette.close()
_logger.info("AsyncTapeCassette has shutdown")
def get_recording(self, recording_id):
raise TypeError("AsyncTapeCassette should only be used for recording, not playback")
def iter_recording_ids(self, category, start_date=None, end_date=None, metadata=None, limit=None):
raise TypeError("AsyncTapeCassette should only be used for recording, not playback")
def extract_recording_category(self, recording_id):
raise TypeError("AsyncTapeCassette should only be used for recording, not playback")
def create_new_recording(self, category):
"""
:param category: A category to classify the recording in (e.g operation class) (serializable)
:type category: Any
:return: Creates a new recording object
:rtype: playback.recording.Recording
"""
assert self._started, "Recording thread is not running"
# The assumption is that create new recording is not a long running task and hence we can do it synchronously,
# if that will not be the case the creation it self needs to become async as well
return AsyncRecording(self.wrapped_tape_cassette.create_new_recording(category), self._add_async_operation)
def _add_async_operation(self, func):
"""
Adds operation to be executed asynchronously
:param func: Operation to execute
:type func: function
"""
with self._lock:
self._recording_operation_buffer.append(func)
def _save_recording(self, recording):
"""
Saves given recording
:param recording: Recording to save
:type recording: AsyncRecording
"""
self._add_async_operation(lambda: self.wrapped_tape_cassette.save_recording(recording.wrapped_recording))
def _recording_loop(self):
"""
Runs the recording loop in a dedicated thread
"""
_logger.info('Async recording thread started')
while not self._stop_event.is_set():
self._flush_recording()
self._stop_event.wait(self._flush_interval)
_logger.info('Async recording thread signaled to stop thread, flushing any pending recording')
# Flush any pending recording
self._flush_recording()
_logger.info('Async recording thread stopped')
def _flush_recording(self):
"""
Flush current pending recording to the underlying storage, this method is blocking till recording is done
"""
# We don't want to keep the lock while doing actual recording, hence we copy the buffer state and release
# the lock
_logger.debug('Flushing pending recording operations')
with self._lock:
current_flushed_operations = self._recording_operation_buffer
self._recording_operation_buffer = []
# Execute actual recording
for recording_operation in current_flushed_operations:
try:
recording_operation()
except Exception: # pylint: disable=broad-except
_logger.exception(u"Error running recording operation")
class AsyncRecording(Recording):
"""
Wraps a recording with asynchronous set behaviour
"""
def __init__(self, wrapped_recording, add_async_operation_callback):
"""
:param wrapped_recording: Recording to wrap with asynchronous set data
:type wrapped_recording: Recording
:param add_async_operation_callback: A callback to add operations to be executed asynchronously
:type add_async_operation_callback: function
"""
# This cassette is only used for recording, hence it has no use of keeping the playback factory
super(AsyncRecording, self).__init__(wrapped_recording.id)
self.wrapped_recording = wrapped_recording
self._add_async_operation_callback = add_async_operation_callback
def _set_data(self, key, value):
"""
Sets data in the recording
:param key: data key
:type key: basestring
:param value: data value (serializable)
:type value: Any
"""
self._add_async_operation_callback(lambda: self.wrapped_recording.set_data(key, value))
def get_data(self, key):
raise Exception("AsyncTapeCassette should only be used for recording, not playback")
def get_all_keys(self):
raise Exception("AsyncTapeCassette should only be used for recording, not playback")
def _add_metadata(self, metadata):
"""
:param metadata: Metadata to add to the recording
:type metadata: dict
"""
self._add_async_operation_callback(lambda: self.wrapped_recording.add_metadata(metadata))
def get_metadata(self):
raise Exception("AsyncTapeCassette should only be used for recording, not playback")
|
import sys
import matplotlib.pyplot as plt
import numpy as np
from collections import defaultdict
import pandas as pd
import statistics
from data_utils import DatasetBuilder
from metrics_utils import compute_metrics, describe_metrics, get_test_metrics, test
from plot_utils import plot
from mitigators import NullMitigator, SyntheticMitigator, DIRMitigator, ReweighMitigator, EGRMitigator, PRMitigator, CPPMitigator, ROMitigator
from test_algorithms import TestAlgorithms
from plot_utils import plot_algo_lr, plot_algo
# Metrics
from aif360.metrics import BinaryLabelDatasetMetric
# Bias insertion
from oversample import label_bias, selection_bias
# construct argument parser
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("-d", "--data", choices=['adult', 'compas', 'german', 'bank', 'meps19', 'grade'], default='compas', help="dataset: adult, compas, german, bank, meps19, grade ")
ap.add_argument("-c", "--classifier", choices=['lr', 'rf', 'svm', 'nn', 'nb'], default='lr', help="baseline model: lr, rf, svm, nn, nb")
ap.add_argument("-m", "--mitigator", choices=['dir', 'rew', 'egr', 'pr', 'cpp', 'ro'], required=False, help="mitigators: dir, rew, egr, pr, cpp, ro")
ap.add_argument("-b", "--bias", default=0., help="amount of bias: o-1")
ap.add_argument("-t", "--biastype", choices=['label', 'selection'], default='label', help="amount of bias: o-1")
ap.add_argument("-o", "--os", default=2, help="oversample mode: 1: privi unfav 2: unpriv fav")
args = vars(ap.parse_args())
DATASET = args["data"]
BASELINE = args["classifier"]
MITIGATOR = args["mitigator"]
BIAS = float(args["bias"])
BIAS_TYPE = args["biastype"]
OS_MODE = int(args["os"])
# global constants
if BASELINE == 'svm' or BASELINE == 'nn':
SCALER = False
else:
SCALER = False
DISPLAY = False
THRESH_ARR = 0.5
# loop ten times
N = 10
# percentage of favor and unfavor
priv_metric_orig = defaultdict(float)
favor_metric_orig = defaultdict(float)
favor_metric_transf = defaultdict(float)
# running resutls over N runs
orig_metrics = defaultdict(list)
transf_metrics = defaultdict(list)
reweigh_metrics = defaultdict(list)
dir_metrics = defaultdict(list)
egr_metrics = defaultdict(list)
pr_orig_metrics = defaultdict(list)
cpp_metrics = defaultdict(list)
ro_metrics = defaultdict(list)
# load dataset and set the groups
dataset_builder = DatasetBuilder(DATASET)
dataset_orig = dataset_builder.load_data()
sens_attr = dataset_orig.protected_attribute_names[0]
unprivileged_groups = dataset_builder.unprivileged_groups
privileged_groups = dataset_builder.privileged_groups
# training data split ratio
p = 0.8
# run mitigating algorithms
for i in range(N):
# split dataset into train, validation, and test
dataset_orig_train, dataset_orig_test = dataset_orig.split([p], shuffle=True)
dataset_orig_val = dataset_orig_test
#print(dataset_orig_train.features)
# favorable and unfavorable labels and feature_names
f_label = dataset_orig_train.favorable_label
uf_label = dataset_orig_train.unfavorable_label
feature_names = dataset_orig_train.feature_names
# introduce label or selection biases, assuming the original data is fair
if BIAS_TYPE == 'label':
dataset_orig_train = label_bias(dataset_orig_train, unprivileged_groups, BIAS)
elif BIAS_TYPE == 'selection':
dataset_orig_train = selection_bias(dataset_orig_train, unprivileged_groups, BIAS)
else:
print('no bias type specified')
# show data info
print("#### Training Dataset shape")
print(dataset_orig_train.features.shape)
print("#### Favorable and unfavorable labels")
print(dataset_orig_train.favorable_label, dataset_orig_train.unfavorable_label)
print("#### Protected attribute names")
print(dataset_orig_train.protected_attribute_names)
print("#### Privileged and unprivileged protected attribute values")
print(privileged_groups, unprivileged_groups)
#print(dataset_orig_train.privileged_protected_attributes, dataset_orig_train.unprivileged_protected_attributes)
print("#### Dataset feature names")
print(dataset_orig_train.feature_names)
print(dataset_orig_train.features[0]) #:,4])
# check fairness on the original data
metric_orig_train = BinaryLabelDatasetMetric(dataset_orig_train,
unprivileged_groups=unprivileged_groups,
privileged_groups=privileged_groups)
print("privileged vs. unprivileged: ", metric_orig_train.num_positives(privileged=True) + metric_orig_train.num_negatives(privileged=True), metric_orig_train.num_positives(privileged=False) + metric_orig_train.num_negatives(privileged=False))
base_rate_unprivileged = metric_orig_train.base_rate(privileged=False)
base_rate_privileged = metric_orig_train.base_rate(privileged=True)
print('base_pos unpriv: ', base_rate_unprivileged)
print('base_pos priv: ', base_rate_privileged)
#print(np.count_nonzero(dataset_orig_train.labels==f_label))
print("Difference in mean outcomes between unprivileged and privileged groups = %f" % metric_orig_train.mean_difference())
# statistics of favored/positive class BEFORE transf
priv_metric_orig['total_priv'] += metric_orig_train.num_instances(privileged = True)
priv_metric_orig['total_unpriv'] += metric_orig_train.num_instances(privileged = False)
favor_metric_orig['total_favor'] += metric_orig_train.base_rate()
favor_metric_orig['total_unfavor'] += 1 - metric_orig_train.base_rate()
favor_metric_orig['priv_favor'] += metric_orig_train.base_rate(privileged = True)
favor_metric_orig['priv_unfavor'] += 1 - metric_orig_train.base_rate(privileged = True)
favor_metric_orig['unpriv_favor'] += metric_orig_train.base_rate(privileged = False)
favor_metric_orig['unpriv_unfavor'] += 1 - metric_orig_train.base_rate(privileged = False)
print(dataset_orig_train.features.shape, dataset_orig_val.features.shape, dataset_orig_test.features.shape)
# testing mitigation methods
test_cases = TestAlgorithms(BASELINE)
# null mitigator
orig_metrics = test_cases.run_original(dataset_orig_train, dataset_orig_val, dataset_orig_test, BASELINE, orig_metrics, f_label, uf_label, unprivileged_groups, privileged_groups, THRESH_ARR, DISPLAY, SCALER)
# synthetic data mitigator
metric_transf_train, transf_metrics = test_cases.run_oversample(dataset_orig_train, dataset_orig_val, dataset_orig_test, privileged_groups, unprivileged_groups, base_rate_privileged, base_rate_unprivileged, BASELINE, transf_metrics, f_label, uf_label, THRESH_ARR, DISPLAY, OS_MODE, SCALER)
# statistics of favored/positive class AFTER transf
favor_metric_transf['total_favor'] += metric_transf_train.base_rate()
favor_metric_transf['total_unfavor'] += 1 - metric_transf_train.base_rate()
favor_metric_transf['priv_favor'] += metric_transf_train.base_rate(privileged = True)
favor_metric_transf['priv_unfavor'] += 1 - metric_transf_train.base_rate(privileged = True)
favor_metric_transf['unpriv_favor'] += metric_transf_train.base_rate(privileged = False)
favor_metric_transf['unpriv_unfavor'] += 1 - metric_transf_train.base_rate(privileged = False)
# dir mitigator
dir_metrics = test_cases.run_dir(dataset_orig_train, dataset_orig_val, dataset_orig_test, sens_attr, BASELINE, dir_metrics, f_label, uf_label, unprivileged_groups, privileged_groups, THRESH_ARR, DISPLAY, SCALER)
# reweigh mitigator
reweigh_metrics = test_cases.run_rew(dataset_orig_train, dataset_orig_val, dataset_orig_test, f_label, uf_label, unprivileged_groups, privileged_groups, BASELINE, reweigh_metrics, THRESH_ARR, DISPLAY, SCALER)
# egr mitigator
egr_metrics = test_cases.run_egr(dataset_orig_train, dataset_orig_val, dataset_orig_test, egr_metrics, BASELINE, f_label, uf_label, unprivileged_groups, privileged_groups,THRESH_ARR, DISPLAY, SCALER)
# cpp mitigator
cpp_metrics = test_cases.run_cpp(dataset_orig_train, dataset_orig_val, dataset_orig_test, cpp_metrics, BASELINE, unprivileged_groups, privileged_groups, THRESH_ARR, SCALER)
# ro mitigator
ro_metrics = test_cases.run_ro(dataset_orig_train, dataset_orig_val, dataset_orig_test, ro_metrics, BASELINE, unprivileged_groups, privileged_groups, THRESH_ARR, SCALER)
if (BASELINE == 'lr'):
pr_orig_metrics = test_cases.run_pr(dataset_orig_train, dataset_orig_val, dataset_orig_test, pr_orig_metrics, sens_attr, f_label, uf_label, unprivileged_groups, privileged_groups, THRESH_ARR, DISPLAY, SCALER)
# display output
print('\n\n\n')
print(DATASET)
print(dataset_orig_train.features.shape[0])
print('\n\n\n')
priv_metric_orig = {k: [v/N] for (k,v) in priv_metric_orig.items()}
results = [priv_metric_orig]
tr = pd.Series(['orig'], name='num_instance')
df = pd.concat([pd.DataFrame(metrics) for metrics in results], axis = 0).set_index([tr])
print(df)
print('\n')
favor_metric_orig = {k: [v/N] for (k,v) in favor_metric_orig.items()}
favor_metric_transf = {k: [v/N] for (k,v) in favor_metric_transf.items()}
pd.set_option('display.multi_sparse', False)
results = [favor_metric_orig, favor_metric_transf]
tr = pd.Series(['orig'] + ['transf'], name='dataset')
df = pd.concat([pd.DataFrame(metrics) for metrics in results], axis = 0).set_index([tr])
print(df)
print('\n\n\n')
# dataframe to display fairness metrics
# error metrics
orig_error_metrics = {k: [statistics.stdev(v)] for (k,v) in orig_metrics.items()}
transf_error_metrics = {k: [statistics.stdev(v)] for (k,v) in transf_metrics.items()}
reweigh_error_metrics = {k: [statistics.stdev(v)] for (k,v) in reweigh_metrics.items()}
dir_error_metrics = {k: [statistics.stdev(v)] for (k,v) in dir_metrics.items()}
egr_error_metrics = {k: [statistics.stdev(v)] for (k,v) in egr_metrics.items()}
pr_orig_error_metrics = {k: [statistics.stdev(v)] for (k,v) in pr_orig_metrics.items()}
cpp_error_metrics = {k: [statistics.stdev(v)] for (k,v) in cpp_metrics.items()}
ro_error_metrics = {k: [statistics.stdev(v)] for (k,v) in ro_metrics.items()}
# mean value metrics
orig_metrics_mean = {k: [sum(v)/N] for (k,v) in orig_metrics.items()}
transf_metrics_mean = {k: [sum(v)/N] for (k,v) in transf_metrics.items()}
reweigh_metrics_mean = {k:[sum(v)/N] for (k,v) in reweigh_metrics.items()}
dir_metrics_mean = {k:[sum(v)/N] for (k,v) in dir_metrics.items()}
egr_metrics_mean = {k:[sum(v)/N] for (k,v) in egr_metrics.items()}
pr_orig_metrics_mean = {k: [sum(v)/N] for (k,v) in pr_orig_metrics.items()}
cpp_metrics_mean = {k: [sum(v)/N] for (k,v) in cpp_metrics.items()}
ro_metrics_mean = {k: [sum(v)/N] for (k,v) in ro_metrics.items()}
# Python paired sample t-test
from scipy.stats import ttest_rel
def paired_t (a, b):
np_a = np.array(a)
np_b = np.array(b)
s, p = ttest_rel(np.absolute(np_a), np.absolute(np_b))
return p
def acc_diff (a, b):
np_a = np.array(a)
np_b = np.array(b)
delta = np_a - np_b
m = statistics.mean(delta)
s = statistics.stdev(delta)
return [m, s]
if BASELINE == 'lr':
plot_algo_lr(orig_metrics_mean, transf_metrics_mean, dir_metrics_mean, reweigh_metrics_mean, egr_metrics_mean, pr_orig_metrics_mean, cpp_metrics_mean, ro_metrics_mean, orig_error_metrics, transf_error_metrics, dir_error_metrics, reweigh_error_metrics, egr_error_metrics, pr_orig_error_metrics, cpp_error_metrics, ro_error_metrics, BASELINE)
stat = {k: [paired_t(transf_metrics[k], v)] for (k,v) in orig_metrics.items()}
print(stat)
else:
plot_algo(orig_metrics_mean, transf_metrics_mean, dir_metrics_mean, reweigh_metrics_mean, egr_metrics_mean, cpp_metrics_mean, ro_metrics_mean, orig_error_metrics, transf_error_metrics, dir_error_metrics, reweigh_error_metrics, egr_error_metrics, cpp_error_metrics, ro_error_metrics, BASELINE)
stat = {k: [paired_t(transf_metrics[k], v)] for (k,v) in orig_metrics.items()}
print(stat)
plt.show()
|
// test
// init first mock
MockDataPool.when("POST", "/test1.do")
.withExpectedHeader("content-type", "application/json;charset=utf-8")
.responseWith({status: 200, body: JSON.stringify({message: "保存成功!"})});
MockDataPool.when("POST", "/test.do")
.withExpectedHeader("content-type", "application/json;charset=utf-8")
.responseWith({status: 200, body: JSON.stringify({message: "保存成功!"})});
MockDataPool.when("POST", "/test.do")
.withExpectedHeader("content-type", "application/x-www-form-urlencoded;charset=UTF-8")
.responseWith({status: 200, body: JSON.stringify({a: 1, b: 2, c: 3})});
MockDataPool.when("GET", "/test3.do")
.responseWith({status: 200, body: "表单保存成功!"});
MockDataPool.when("GET", "/test4.do")
.responseWith({status: 200, body: "表单保存成功!"});
|
var searchData=
[
['labeldatathread',['LabelDataThread',['../structstp_1_1_label_data_thread.html',1,'stp']]],
['linearspline',['linearspline',['../classtk_1_1linearspline.html',1,'tk']]]
];
|
/*!
* UI development toolkit for HTML5 (OpenUI5)
* (c) Copyright 2009-2017 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
sap.ui.define(['jquery.sap.global','../base/Object','../base/ManagedObject','./ElementMetadata','../Device','jquery.sap.strings','jquery.sap.trace'],function(q,B,M,E,D){"use strict";var a=M.extend("sap.ui.core.Element",{metadata:{stereotype:"element","abstract":true,publicMethods:["getId","getMetadata","getTooltip_AsString","getTooltip_Text","getModel","setModel","hasModel","bindElement","unbindElement","getElementBinding","prop","getLayoutData","setLayoutData"],library:"sap.ui.core",aggregations:{tooltip:{name:"tooltip",type:"sap.ui.core.TooltipBase",altTypes:["string"],multiple:false},customData:{name:"customData",type:"sap.ui.core.CustomData",multiple:true,singularName:"customData"},layoutData:{name:"layoutData",type:"sap.ui.core.LayoutData",multiple:false,singularName:"layoutData"},dependents:{name:"dependents",type:"sap.ui.core.Element",multiple:true}}},constructor:function(i,s){M.apply(this,arguments);},renderer:null},E);a.defineClass=function(c,s,m){return B.defineClass(c,s,m||E);};a.prototype.getInterface=function(){return this;};a.prototype._handleEvent=function(e){var t=this,h="on"+e.type;function b(d){var i,l,o;if(d&&(l=d.length)>0){d=l===1?d:d.slice();for(i=0;i<l;i++){if(e.isImmediateHandlerPropagationStopped()){return;}o=d[i].oDelegate;if(o[h]){o[h].call(d[i].vThis===true?t:d[i].vThis||o,e);}}}}b(this.aBeforeDelegates);if(e.isImmediateHandlerPropagationStopped()){return;}if(this[h]){this[h](e);}b(this.aDelegates);};a.create=M.create;a.prototype.toString=function(){return"Element "+this.getMetadata().getName()+"#"+this.sId;};a.prototype.getDomRef=function(s){return q.sap.domById(s?this.getId()+"-"+s:this.getId());};a.prototype.$=function(s){return q(this.getDomRef(s));};a.prototype.isActive=function(){return this.oParent&&this.oParent.isActive();};a.prototype.prop=function(p,v){var P=this.getMetadata().getAllSettings()[p];if(P){if(arguments.length==1){return this[P._sGetter]();}else{this[P._sMutator](v);return this;}}};a.prototype.insertDependent=function(e,i){return this.insertAggregation("dependents",e,i,true);};a.prototype.addDependent=function(e){return this.addAggregation("dependents",e,true);};a.prototype.removeDependent=function(e){return this.removeAggregation("dependents",e,true);};a.prototype.removeAllDependents=function(){return this.removeAllAggregation("dependents",true);};a.prototype.destroyDependents=function(){return this.destroyAggregation("dependents",true);};a.prototype.rerender=function(){if(this.oParent){this.oParent.rerender();}};a.prototype.getUIArea=function(){return this.oParent?this.oParent.getUIArea():null;};a.prototype.destroy=function(s){a._updateFocusInfo(this);M.prototype.destroy.call(this,s);if(s!=="KeepDom"||this.getMetadata().isInstanceOf("sap.ui.core.PopupInterface")){this.$().remove();}else{q.sap.log.debug("DOM is not removed on destroy of "+this);}};a.prototype.fireEvent=function(e,p,A,b){if(this.hasListeners(e)){q.sap.interaction.notifyStepStart(this);}if(typeof p==='boolean'){b=A;A=p;p=null;}p=p||{};p.id=p.id||this.getId();return M.prototype.fireEvent.call(this,e,p,A,b);};a.prototype.addDelegate=function(d,c,t,C){if(!d){return this;}this.removeDelegate(d);if(typeof c==="object"){C=t;t=c;c=false;}if(typeof t==="boolean"){C=t;t=undefined;}(c?this.aBeforeDelegates:this.aDelegates).push({oDelegate:d,bClone:!!C,vThis:((t===this)?true:t)});return this;};a.prototype.removeDelegate=function(d){var i;for(i=0;i<this.aDelegates.length;i++){if(this.aDelegates[i].oDelegate==d){this.aDelegates.splice(i,1);i--;}}for(i=0;i<this.aBeforeDelegates.length;i++){if(this.aBeforeDelegates[i].oDelegate==d){this.aBeforeDelegates.splice(i,1);i--;}}return this;};a.prototype.addEventDelegate=function(d,t){return this.addDelegate(d,false,t,true);};a.prototype.removeEventDelegate=function(d){return this.removeDelegate(d);};a.prototype.getFocusDomRef=function(){return this.getDomRef()||null;};a.prototype.focus=function(){q.sap.focus(this.getFocusDomRef());};a.prototype.getFocusInfo=function(){return{id:this.getId()};};a.prototype.applyFocusInfo=function(f){this.focus();return this;};a.prototype._refreshTooltipBaseDelegate=function(t){var T=sap.ui.require('sap/ui/core/TooltipBase');if(T){var o=this.getTooltip();if(o instanceof T){this.removeDelegate(o);}if(t instanceof T){t._currentControl=this;this.addDelegate(t);}}};a.prototype.setTooltip=function(t){this._refreshTooltipBaseDelegate(t);this.setAggregation("tooltip",t);return this;};a.prototype.getTooltip=function(){return this.getAggregation("tooltip");};a.runWithPreprocessors=M.runWithPreprocessors;a.prototype.getTooltip_AsString=function(){var t=this.getTooltip();if(typeof t==="string"||t instanceof String){return t;}return undefined;};a.prototype.getTooltip_Text=function(){var t=this.getTooltip();if(t&&typeof t.getText==="function"){return t.getText();}return t;};(function(){var g=function(e,k){var d=e.getAggregation("customData");if(d){for(var i=0;i<d.length;i++){if(d[i].getKey()==k){return d[i];}}}return null;};var s=function(e,k,v,w){if(v===null){var d=g(e,k);if(!d){return;}var b=e.getAggregation("customData").length;if(b==1){e.destroyAggregation("customData",true);}else{e.removeAggregation("customData",d,true);d.destroy();}}else{var C=sap.ui.requireSync('sap/ui/core/CustomData');var d=g(e,k);if(d){d.setValue(v);d.setWriteToDom(w);}else{var d=new C({key:k,value:v,writeToDom:w});e.addAggregation("customData",d,true);}}};a.prototype.data=function(){var b=arguments.length;if(b==0){var d=this.getAggregation("customData"),r={};if(d){for(var i=0;i<d.length;i++){r[d[i].getKey()]=d[i].getValue();}}return r;}else if(b==1){var c=arguments[0];if(c===null){this.destroyAggregation("customData",true);return this;}else if(typeof c=="string"){var e=g(this,c);return e?e.getValue():null;}else if(typeof c=="object"){for(var k in c){s(this,k,c[k]);}return this;}else{throw new Error("When data() is called with one argument, this argument must be a string, an object or null, but is "+(typeof c)+":"+c+" (on UI Element with ID '"+this.getId()+"')");}}else if(b==2){s(this,arguments[0],arguments[1]);return this;}else if(b==3){s(this,arguments[0],arguments[1],arguments[2]);return this;}else{throw new Error("data() may only be called with 0-3 arguments (on UI Element with ID '"+this.getId()+"')");}};})();a.prototype.clone=function(I,l){var c=M.prototype.clone.apply(this,arguments);for(var i=0;i<this.aDelegates.length;i++){if(this.aDelegates[i].bClone){c.aDelegates.push(this.aDelegates[i]);}}for(var i=0;i<this.aBeforeDelegates.length;i++){if(this.aBeforeDelegates[i].bClone){c.aBeforeDelegates.push(this.aBeforeDelegates[i]);}}if(this._sapui_declarativeSourceInfo){c._sapui_declarativeSourceInfo=q.extend({},this._sapui_declarativeSourceInfo);}return c;};a.prototype.findElements=M.prototype.findAggregatedObjects;a.prototype.setLayoutData=function(l){this.setAggregation("layoutData",l,true);var L=this.getParent();if(L){var e=q.Event("LayoutDataChange");e.srcControl=this;L._handleEvent(e);}return this;};a.prototype.bindElement=M.prototype.bindObject;a.prototype.unbindElement=M.prototype.unbindObject;a.prototype.getElementBinding=M.prototype.getObjectBinding;a.prototype._getFieldGroupIds=function(){var f;if(this.getMetadata().hasProperty("fieldGroupIds")){f=this.getFieldGroupIds();}if(!f||f.length==0){var p=this.getParent();if(p&&p._getFieldGroupIds){return p._getFieldGroupIds();}}return f||[];};a.prototype._getMediaContainerWidth=function(){if(typeof this._oContextualSettings==="undefined"){return undefined;}return this._oContextualSettings.contextualWidth;};a.prototype._getCurrentMediaContainerRange=function(n){var w=this._getMediaContainerWidth();n=n||D.media.RANGESETS.SAP_STANDARD;return D.media.getCurrentRange(n,w);};a.prototype._onContextualSettingsChanged=function(){var w=this._getMediaContainerWidth(),s=w!==undefined,p=s^!!this._bUsingContextualWidth,l=this._aContextualWidthListeners||[];if(p){if(s){l.forEach(function(L){D.media.detachHandler(L.callback,L.listener,L.name);});}else{l.forEach(function(L){D.media.attachHandler(L.callback,L.listener,L.name);});}this._bUsingContextualWidth=s;}l.forEach(function(L){var m=this._getCurrentMediaContainerRange(L.name);if(m.from!==L.media.from){L.media=m;L.callback.call(L.listener||window,m);}},this);};a.prototype._attachMediaContainerWidthChange=function(f,l,n){n=n||D.media.RANGESETS.SAP_STANDARD;this._aContextualWidthListeners=this._aContextualWidthListeners||[];this._aContextualWidthListeners.push({callback:f,listener:l,name:n,media:this._getCurrentMediaContainerRange(n)});if(!this._bUsingContextualWidth){D.media.attachHandler(f,l,n);}};a.prototype._detachMediaContainerWidthChange=function(f,l,n){var L;n=n||D.media.RANGESETS.SAP_STANDARD;if(!this._aContextualWidthListeners){return;}for(var i=0,b=this._aContextualWidthListeners.length;i<b;i++){L=this._aContextualWidthListeners[i];if(L.callback===f&&L.listener===l&&L.name===n){if(!this._bUsingContextualWidth){D.media.detachHandler(f,l,n);}this._aContextualWidthListeners.splice(i,1);break;}}};return a;});
|
var attachCSS = require('../index.js')
var test = require('tape')
var createElement = require('base-element')
var document = require('global/document')
test('button -> button.my-button', function (t) {
t.plan(2)
var result
setUp(function (fixture) {
var button = createButton(fixture)
result = attachCSS('button { color: red; }', button.vtree, { compress: true })
t.equal(result, 'button.my-button{color:red;}')
result = attachCSS('button em { font-style: italic; }', button.vtree, { compress: true })
t.equal(result, 'button.my-button em{font-style:italic;}')
tearDown(t.end)
})
})
test('.my-button -> .my-button', function (t) {
t.plan(1)
var result
setUp(function (fixture) {
var button = createButton(fixture)
result = attachCSS('.my-button { color: red; } .my-button em { font-style: italic; }', button.vtree, { compress: true })
t.equal(result, '.my-button{color:red;}.my-button em{font-style:italic;}')
tearDown(t.end)
})
})
test('#my-button -> #my-button', function (t) {
t.plan(2)
var result
setUp(function (fixture) {
var button = createButton(fixture, { id: 'my-button' })
result = attachCSS('#my-button { color: red; }', button.vtree, { compress: true })
t.equal(result, '#my-button{color:red;}')
result = attachCSS('#my-button em { font-style: italic; }', button.vtree, { compress: true })
t.equal(result, '#my-button em{font-style:italic;}')
tearDown(t.end)
})
})
test('* -> .my-button *', function (t) {
t.plan(1)
var result
setUp(function (fixture) {
var button = createButton(fixture)
result = attachCSS('* { color: red; }', button.vtree, { compress: true })
t.equal(result, '.my-button *{color:red;}')
tearDown(t.end)
})
})
test('.my-button.second -> .my-button.second', function (t) {
t.plan(2)
var result
setUp(function (fixture) {
var button = createButton(fixture)
result = attachCSS('.my-button.second { color: red; }', button.vtree, { compress: true })
t.equal(result, '.my-button.second{color:red;}')
result = attachCSS('.my-button.second button { color: red; }', button.vtree, { compress: true })
t.equal(result, '.my-button.second button{color:red;}')
tearDown(t.end)
})
})
function createButton (fixture, params) {
params = params || { className: 'my-button' }
var button = createElement(fixture)
button.render(function () {
return this.html('button', params, this.html('em', 'click me'))
})
return button
}
function setUp (cb) {
var fixture = document.createElement('div')
fixture.setAttribute('id', 'fixture')
document.body.appendChild(fixture)
cb(fixture)
}
function tearDown (cb) {
var fixture = document.getElementById('fixture')
if (fixture) fixture.parentNode.removeChild(fixture)
cb()
}
|
import React, {useState} from 'react';
import TextField from '@material-ui/core/TextField';
//import { makeStyles } from '@material-ui/core/styles';
import {useDispatch} from 'react-redux'
import { joinChat } from '../actions/actions';
/*
const useStyles = makeStyles({
root: {
position: 'inherit',
top: '50%',
left: '50%'
}
})
*/
export default function ChatRoomForm(props) {
const [chatRoom, setChatRoom] = useState('')
const dispatch = useDispatch()
const handleKeyDown = event => {
if (event.key === 'Enter'){
event.preventDefault();
event.stopPropagation();
console.log(chatRoom);
dispatch(joinChat(chatRoom));
setChatRoom('');
}
}
return (
<TextField {...props} required placeholder='Join a chat room' onKeyDown={handleKeyDown} onChange={e => setChatRoom(e.target.value)} value={chatRoom}/>
);
}
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v8.enums",
marshal="google.ads.googleads.v8",
manifest={"FeedItemTargetDeviceEnum",},
)
class FeedItemTargetDeviceEnum(proto.Message):
r"""Container for enum describing possible data types for a feed
item target device.
"""
class FeedItemTargetDevice(proto.Enum):
r"""Possible data types for a feed item target device."""
UNSPECIFIED = 0
UNKNOWN = 1
MOBILE = 2
__all__ = tuple(sorted(__protobuf__.manifest))
|
const hamburger = document.querySelector(".navbar-mobile");
const navsub = document.querySelector(".navbar-item");
hamburger.addEventListener('click', () => {
hamburger.classList.toggle("change")
navsub.classList.toggle("nav-change")
});
|
import numpy as np
import cv2
import pickle
face_cascade = cv2.CascadeClassifier('cascades/data/haarcascade_frontalface_alt2.xml')
recognizer = cv2.face.LBPHFaceRecognizer_create()
recognizer.read("trainer.yml")
labels = {0}
with open("labels.pickle", 'rb') as f:
og_labels = pickle.load(f)
labels = {v:k for k,v in og_labels.items()}
cap = cv2.VideoCapture(0)
while(True):
ret, frame = cap.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray,scaleFactor=1.2, minNeighbors=1)
for (x,y,w,h) in faces:
roi_gray = gray[y: y+h, x: x+h] #region of interest
roi_color = frame[y: y+h, x: x+h]
#recognize ? deep learning model
id_,conf = recognizer.predict(roi_gray)
if conf >= 55:
font = cv2.FONT_HERSHEY_SIMPLEX
name = labels[id_]
color = (255, 255, 255) #BGR
stroke = 2
cv2.putText(frame, name, (x,y), font, 1, color, stroke, cv2.LINE_AA)
#img_item = "7.png"
#cv2.imwrite(img_item,roi_color)
color = (255, 0, 0) #BGR
stroke = 2
end_cord_x = x+w
end_cord_y = y+h
cv2.rectangle(frame, (x,y), (end_cord_x, end_cord_y), color, stroke)
cv2.imshow('frame',frame)
if cv2.waitKey(20) & 0xFF == ord('q'):
break
cap.release()
cap.destroyAllWindows()
|
import React from 'react';
import { connect } from 'react-redux';
import Navbar from './Navbar';
import Webpage from './Webpage';
import { IconContext } from 'react-icons';
import '../styles/App.sass';
const App = (props) => {
// @TODO 1. Get logo
// @TODO 3. Update URL Bar and Title on update
// @TODO 5. Config
return (
<IconContext.Provider value={{ className: 'svgIcon' }}>
<div className={'appContainer'}>
<Navbar />
{props.tabs.map((tab) => (
<Webpage id={tab.id} url={tab.url} key={tab.id} />
))}
</div>
</IconContext.Provider>
);
};
const mapStateToProps = (state) => ({
tabs: state.tabs
});
export default connect(mapStateToProps, null, null, { forwardRef: true })(App);
|
import numpy as np
from ..utils.np_utils import split_dataset
import itertools
def load_data(n_train=32 * 50, n_val=32*5, n_test=32*5):
# input_dim = np.random.randint(5, 10)
input_dim = 3
max_power = 2
dropout = 0.5
noise_sigma = 1e-8
total_n = n_train + n_val + n_test
x = np.random.uniform(-1, 1, (total_n, input_dim))
y = np.zeros((total_n,))
for i in range(1, max_power + 1):
for combo in itertools.combinations_with_replacement(range(input_dim), i):
#if np.random.uniform(0.0, 1.0) > dropout:
tmp = np.ones((total_n,))
for c in combo:
tmp = tmp * x[:, c]
coeff = np.random.uniform(-1, 1)
y += coeff * tmp
noise = np.random.normal(0, noise_sigma, (total_n,))
y += noise
y = y.reshape((-1, 1))
sd = np.std(y, axis=None)
m = np.mean(y, axis=None)
y = (y - m) / sd
assert np.all(np.isfinite(x))
assert np.all(np.isfinite(y))
return split_dataset(x, y, n_train, n_val, n_test)
|
#!/usr/bin/env node
// Api sub-module (server)
const fs = require('fs'),
multer = require('multer');
const { Core, Flux, Logger, Files, Utils } = require('./../../../api');
const log = new Logger(__filename);
const admin = require(Core._SECURITY + 'admin.js').init(Core._SECURITY);
const FILE_REQUEST_HISTORY = Core._LOG + Core.const('name') + '_requestHistory.log';
const FILE_ERROR_HISTORY = Core._LOG + Core.const('name') + '_errorHistory.json';
const FILE_TTS_UI_HISTORY = Core._LOG + Core.const('name') + '_ttsUIHistory.json';
const FILE_VOICEMAIL_HISTORY = Core._LOG + Core.const('name') + '_voicemailHistory.json';
var uiHttp;
module.exports = {
attachRoutes: attachRoutes
};
function attachRoutes(ui, modulesApi) {
uiHttp = ui;
// TODO attachUiRoute(uiHttp);
attachDefaultRoutes(uiHttp);
attachFluxRoutes(uiHttp);
attachUnmappedRouteHandler(uiHttp);
return uiHttp;
}
function attachFluxRoutes(ui) {
ui.post('/flux/:type/:subject/:id', function (req, res) {
let value = req.body;
if (typeof value === 'object' && value.hasOwnProperty('_wrapper')) value = value._wrapper;
new Flux(req.params.type + '|' + req.params.subject + '|' + req.params.id, value);
res.end();
});
return ui;
}
function attachUnmappedRouteHandler(ui, mode) {
let errorMsg = Core.isAwake() ? 'Error UI > not mapped:' : 'Sleep mode, not allowed to interact';
ui.post('/*', function (req, res) {
Core.error(errorMsg, req.url, false);
res.writeHead(401);
res.end();
});
return ui;
}
function attachDefaultRoutes(ui) {
/** DASHBOARD SECTION */
ui.get('/dashboard', function (req, res) {
new Flux('interface|hardware|runtime', false, { log: 'debug' });
let etatBtn = Core.run('etat');
let cpuTemperature = Core.run('cpu.temperature');
let cpuUsage = Core.run('cpu.usage');
let dashboard = {
config: Core.conf(),
run: Core.run(),
errors: Core.errors,
mode: {
value: {
mode: Core.conf('log') == 'trace' ? 'Trace' : Core.conf('log') == 'debug' ? 'Debug' : Utils.firstLetterUpper(Core.conf('mode')),
param: Utils.logTime('h:m (D/M)', Core.const('startDateTime')),
switch: etatBtn == 'high' ? true : false,
mood: Core.run('mood')
}
},
switch: {
value: etatBtn,
active: etatBtn ? true : false
},
volume: {
value: Core.run('volume'),
active: etatBtn == 1 ? true : false
},
voicemail: {
value: Core.run('voicemail'),
active: Core.run('voicemail') > 0 ? true : false
},
audioRecord: {
value: Core.run('audioRecord'),
active: Core.run('audioRecord') > 0 ? true : false
},
music: {
value: Core.run('music')
},
timer: {
value: Core.run('timer'),
active: Core.run('timer') > 0 ? true : false
},
hardware: {
value: {
usage: cpuUsage,
temperature: cpuTemperature,
memory: {
framebot: Core.run('memory.framebot'),
system: Core.run('memory.system')
}
},
active: cpuTemperature > 55 || cpuUsage >= 20 ? true : false
},
alarms: {
value: Core.conf('alarms'),
active: true
},
weather: { value: Core.run('weather') },
mosquitoRepellent: { value: Core.run('mosquitoRepellent') },
powerPlug: { value: Core.run('powerPlug') },
rfxcom: { value: Core.run('rfxcom') },
arduino: { value: Core.run('max') },
video: { value: Core.run('hdmi') },
network: { value: Core.run('network') },
update: {
value: Core.const('updateDateTime')
},
debug: {
value: Core.conf('log') == 'debug' ? 'debug' : ''
},
trace: {
value: Core.conf('log') == 'trace' ? 'trace' : ''
},
watcher: {
value: Core.conf('watcher')
}
};
res.end(JSON.stringify(dashboard));
});
/** ==> GET SECTION */
ui.get('/log', function (req, res) {
let logSize = 100;
let params = req.query;
if (params.hasOwnProperty('logSize') && !isNaN(params.logSize)) {
logSize = parseInt(params.logSize);
}
prepareLogs(logSize)
.then(logs => res.end(logs))
.catch(err => Core.error("Can't retrieve logs", err));
});
// '/file/:filename'
ui.get('/config.json', function (req, res) {
log.table(Core.conf(), 'CONFIG');
res.end(JSON.stringify(Core.conf()));
});
ui.get('/runtime', function (req, res) {
new Flux('interface|hardware|runtime', true);
setTimeout(() => {
res.end(JSON.stringify(Core.run()));
}, 500);
});
ui.get('/const', function (req, res) {
res.end(JSON.stringify(Core.const()));
});
ui.get('/errors', function (req, res) {
res.end(JSON.stringify(Core.errors));
});
ui.get('/errorHistory', function (req, res) {
res.end(fs.readFileSync(FILE_ERROR_HISTORY, 'utf8').toString());
});
ui.get('/requestHistory', function (req, res) {
res.end(fs.readFileSync(FILE_REQUEST_HISTORY, 'utf8').toString());
});
ui.get('/ttsUIHistory', function (req, res) {
res.end(fs.readFileSync(FILE_TTS_UI_HISTORY, 'utf8').toString());
});
ui.get('/voicemailHistory', function (req, res) {
res.end(fs.readFileSync(FILE_VOICEMAIL_HISTORY, 'utf8').toString());
});
ui.get('/about', function (req, res) {
res.end(fs.readFileSync(_PATH + 'README.md', 'utf8').toString());
});
/** ==> POST SECTION */
let audioRecordStorage = multer.diskStorage({
destination: function (req, file, callback) {
if (!fs.existsSync(Core._UPLOAD)) {
fs.mkdirSync(Core._UPLOAD);
}
callback(null, Core._UPLOAD);
},
filename: function (req, file, callback) {
callback(null, file.fieldname + '_' + new Date().toISOString() + '.wav');
}
});
let audioRecordUpload = multer({ storage: audioRecordStorage }).single('audioRecord');
ui.post('/audio', audioRecordUpload, function (req, res) {
log.info('Audio received!');
log.debug(req.file);
new Flux('service|audioRecord|new', req.file.path, { delay: 1 });
res.end();
});
ui.post('/toggleDebug', function (req, res) {
log.info('UI > Toggle debug');
let newLogLevel = log.level() == 'debug' ? 'info' : 'debug';
log.level(newLogLevel);
res.end();
});
ui.post('/toggleTrace', function (req, res) {
log.info('UI > Toggle trace');
let newLogLevel = log.level() == 'trace' ? 'info' : 'trace';
log.level(newLogLevel);
new Flux('service|context|update', {
log: newLogLevel
});
res.end();
});
var granted = false;
ui.post('/grant', function (req, res) {
let pattern = req.headers.pwd.split('#')[0]; // get security pattern without anchor character
if (pattern && admin.checkPassword(pattern)) {
granted = true;
log.info('>> Admin granted !');
let ip = Core.run('network');
log.info('ip:', ip.local, typeof ip.public === 'string' ? '/ ' + ip.public.trim() : '');
} else {
Core.error('>> User NOT granted /!\\', pattern, false);
new Flux('interface|tts|speak', { lg: 'en', msg: 'User NOT granted' }, { delay: 0.5 });
}
res.send(granted);
if (granted) granted = false;
});
ui.post('/tts', function (req, res) {
let params = req.query;
if (params.voice && params.lg && params.msg) {
if (!Core.isAwake() || params.hasOwnProperty('voicemail')) {
new Flux('service|voicemail|new', {
voice: params.voice,
lg: params.lg,
msg: params.msg
});
} else {
new Flux('interface|tts|speak', {
voice: params.voice,
lg: params.lg,
msg: params.msg
});
}
params.timestamp = Utils.logTime('D/M h:m:s', new Date());
Files.appendJsonFile(FILE_TTS_UI_HISTORY, params);
} else {
new Flux('interface|tts|random');
}
res.end();
});
return ui;
}
const LOG_FILE_PATH = Core._LOG + Core.const('name') + '.log';
function prepareLogs(lines) {
return new Promise((resolve, reject) => {
fs.readFile(LOG_FILE_PATH, 'UTF-8', (err, logs) => {
if (err) reject(err);
logs = logs.toString().split('\n').slice(-lines).join('\n');
resolve(logs);
});
});
}
|
#测试转化格式,可以查看affine,hdr,data等是否有改变
import SimpleITK as sitk
import glob
from tqdm import tqdm
#转化数据
mhd_list = glob.glob('mask/LNDb*.mhd')
print(mhd_list)
for i in tqdm(range(len(mhd_list))): #tqdm
img = sitk.ReadImage(mhd_list[i])
sitk.WriteImage(img, 'mask/nii/'+mhd_list[i][5:19]+'.nii.gz')
nii_list = glob.glob('mask/nii/*.nii.gz')
print(nii_list)
|
load("0d8683db8b3792521a65ad1edba9cf82.js");
load("dada5190587903f93a3604016a6099ce.js");
load("01e0ec3a9a01836764c05319def52ae3.js");
load("762f4c20b6c2bf79dcf92be3017eef40.js");
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is JavaScript Engine testing utilities.
*
* The Initial Developer of the Original Code is
* Mozilla Foundation.
* Portions created by the Initial Developer are Copyright (C) 2006
* the Initial Developer. All Rights Reserved.
*
* Contributor(s): Reto Laemmler
* Brendan Eich
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
var gTestfile = 'regress-354541-03.js';
//-----------------------------------------------------------------------------
var BUGNUMBER = 354541;
var summary = 'Regression to standard class constructors in case labels';
var actual = '';
var expect = '';
printBugNumber(BUGNUMBER);
printStatus (summary + ': top level');
String.prototype.trim = function() { print('hallo'); };
String.prototype.trim = function() { return 'hallo'; };
const S = String;
const Sp = String.prototype;
expect = 'hallo';
var expectStringInvariant = true
var actualStringInvariant;
var expectStringPrototypeInvariant = true;
var actualStringPrototypeInvariant;
if (typeof Script == 'undefined')
{
print('Test skipped. Script not defined.');
reportCompare("Script not defined, Test skipped.",
"Script not defined, Test skipped.",
summary);
}
else
{
var s = Script('var tmp = function(o) { switch(o) { case String: case 1: return ""; } }; actualStringInvariant = (String === S); actualStringPrototypeInvariant = (String.prototype === Sp); actual = "".trim();');
try
{
s();
}
catch(ex)
{
actual = ex + '';
}
reportCompare(expect, actual, 'trim() returned');
reportCompare(expectStringInvariant, actualStringInvariant,
'String invariant');
reportCompare(expectStringPrototypeInvariant,
actualStringPrototypeInvariant,
'String.prototype invariant');
}
|
define({"topics" : [{"title":"Database Vendors and Drivers","shortdesc":"\n <p class=\"shortdesc\">The JDBC Tee processor can write data to a MySQL or PostgreSQL database.</p>\n ","href":"datacollector\/UserGuide\/Processors\/JDBCTee.html#concept_fd2_3rj_bhb","attributes": {"data-id":"concept_fd2_3rj_bhb",},"menu": {"hasChildren":true,},"tocID":"concept_fd2_3rj_bhb-d46e121668","next":"concept_fd2_3rj_bhb-d46e121668",},{"title":"Installing the JDBC Driver","href":"datacollector\/UserGuide\/Processors\/JDBCTee.html#concept_h35_xwq_tw","attributes": {"data-id":"concept_h35_xwq_tw",},"menu": {"hasChildren":false,},"tocID":"concept_h35_xwq_tw-d46e121767","topics":[]},{"title":"Define the CRUD Operation","href":"datacollector\/UserGuide\/Processors\/JDBCTee.html#concept_qfd_tpm_5y","attributes": {"data-id":"concept_qfd_tpm_5y",},"menu": {"hasChildren":false,},"tocID":"concept_qfd_tpm_5y-d46e121819","topics":[]},{"title":"Single and Multi-row Operations","href":"datacollector\/UserGuide\/Processors\/JDBCTee.html#concept_jnl_rmp_h1b","attributes": {"data-id":"concept_jnl_rmp_h1b",},"menu": {"hasChildren":false,},"tocID":"concept_jnl_rmp_h1b-d46e121881","topics":[]},{"title":"Configuring a JDBC Tee Processor","href":"datacollector\/UserGuide\/Processors\/JDBCTee.html#task_qpj_ncy_hw","attributes": {"data-id":"task_qpj_ncy_hw",},"menu": {"hasChildren":false,},"tocID":"task_qpj_ncy_hw-d46e121953","topics":[]}]});
|
window.onload = function() {
var socket = io.connect();
socket.on('connect', function() {
socket.emit('join', prompt('What is your nickname?'));
});
};
|
import multiprocessing
import platform
from abc import ABC, abstractmethod
from distutils.version import LooseVersion
from typing import Union, List, Tuple, Callable, Optional
import torch
import torch.distributed as torch_distrib
from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
from torch.utils.data.distributed import DistributedSampler
from pytorch_lightning.core import LightningModule
from pytorch_lightning.utilities import rank_zero_warn
from pytorch_lightning.utilities.exceptions import MisconfigurationException
try:
from torch.utils.data import IterableDataset
ITERABLE_DATASET_EXISTS = True
except ImportError:
ITERABLE_DATASET_EXISTS = False
try:
from apex import amp
except ImportError:
amp = None
try:
import torch_xla
import torch_xla.core.xla_model as xm
import torch_xla.distributed.xla_multiprocessing as xmp
except ImportError:
XLA_AVAILABLE = False
else:
XLA_AVAILABLE = True
try:
import horovod.torch as hvd
except (ModuleNotFoundError, ImportError):
HOROVOD_AVAILABLE = False
else:
HOROVOD_AVAILABLE = True
def _has_iterable_dataset(dataloader: DataLoader):
return ITERABLE_DATASET_EXISTS and hasattr(dataloader, 'dataset') \
and isinstance(dataloader.dataset, IterableDataset)
def _has_len(dataloader: DataLoader) -> bool:
""" Checks if a given Dataloader has __len__ method implemented i.e. if
it is a finite dataloader or infinite dataloader. """
try:
# try getting the length
if len(dataloader) == 0:
raise ValueError('`Dataloader` returned 0 length.'
' Please make sure that your Dataloader at least returns 1 batch')
has_len = True
except TypeError:
has_len = False
except NotImplementedError: # e.g. raised by torchtext if a batch_size_fn is used
has_len = False
if has_len and _has_iterable_dataset(dataloader) and LooseVersion(torch.__version__) >= LooseVersion("1.4.0"):
rank_zero_warn(
'Your `IterableDataset` has `__len__` defined.'
' In combination with multi-processing data loading (e.g. batch size > 1),'
' this can lead to unintended side effects since the samples will be duplicated.'
)
return has_len
class TrainerDataLoadingMixin(ABC):
# this is just a summary on variables used in this abstract class,
# the proper values/initialisation should be done in child class
global_rank: int
use_ddp: bool
use_ddp2: bool
use_horovod: bool
shown_warnings: ...
val_check_interval: float
use_tpu: bool
tpu_local_core_rank: int
train_dataloader: DataLoader
num_training_batches: Union[int, float]
val_check_batch: ...
val_dataloaders: List[DataLoader]
num_val_batches: List[Union[int, float]]
test_dataloaders: List[DataLoader]
num_test_batches: List[Union[int, float]]
limit_train_batches: Union[int, float]
limit_val_batches: Union[int, float]
limit_test_batches: Union[int, float]
replace_sampler_ddp: bool
num_nodes: int
num_processes: int
distributed_backend: Optional[str]
@abstractmethod
def is_overridden(self, *args):
"""Warning: this is just empty shell for code implemented in other class."""
def _worker_check(self, dataloader: DataLoader, name: str) -> None:
on_windows = platform.system() == 'Windows'
# ddp_spawn + num_workers > 0 don't mix! tell the user
is_dataloader = isinstance(dataloader, DataLoader)
using_spawn = self.distributed_backend == 'ddp_spawn'
if is_dataloader and not on_windows:
if dataloader.num_workers > 0 and using_spawn:
rank_zero_warn('Dataloader(num_workers>0) and ddp_spawn do not mix well!'
' Your performance might suffer dramatically.'
' Please consider setting distributed_backend=ddp to use num_workers > 0'
' (this is a bottleneck of Python .spawn() and PyTorch')
elif dataloader.num_workers == 0 and using_spawn:
rank_zero_warn('You are using `distributed_backend=ddp_spawn` with num_workers=0.'
' For much faster performance, switch to `distributed_backend=ddp`'
' and set `num_workers>0`')
elif dataloader.num_workers <= 2 and multiprocessing.cpu_count() > 2 and not using_spawn:
num_cpus = multiprocessing.cpu_count()
rank_zero_warn(f'The dataloader, {name}, does not have many workers which may be a bottleneck.'
' Consider increasing the value of the `num_workers` argument`'
f' (try {num_cpus} which is the number of cpus on this machine)'
' in the `DataLoader` init to improve performance.')
def auto_add_sampler(self, dataloader: DataLoader, train: bool) -> DataLoader:
# don't do anything if it's not a dataloader
is_dataloader = isinstance(dataloader, DataLoader)
# don't manipulate iterable datasets
is_iterable_ds = _has_iterable_dataset(dataloader)
if not is_dataloader or is_iterable_ds:
return dataloader
need_dist_sampler = (self.use_ddp or self.use_ddp2 or self.use_horovod or self.use_tpu)
if self.replace_sampler_ddp and need_dist_sampler:
if not isinstance(dataloader.sampler, (SequentialSampler, RandomSampler)):
raise MisconfigurationException(
'You seem to have configured a sampler in your DataLoader. This will be replaced '
' by `DistributedSampler` since `replace_sampler_ddp` is True and you are using'
' distributed training. Either remove the sampler from your DataLoader or set'
' `replace_sampler_ddp`=False if you want to use your custom sampler.')
# replace with distributed sampler
sampler = self._get_distributed_sampler(dataloader, train)
dataloader = self.replace_sampler(dataloader, sampler)
return dataloader
def replace_sampler(self, dataloader, sampler):
skip_keys = ['sampler', 'batch_sampler', 'dataset_kind']
dl_args = {
k: v for k, v in dataloader.__dict__.items() if not k.startswith('_') and k not in skip_keys
}
dl_args['sampler'] = sampler
dataloader = type(dataloader)(**dl_args)
return dataloader
def _get_distributed_sampler(self, dataloader, train):
if self.use_tpu:
kwargs = dict(num_replicas=xm.xrt_world_size(), rank=xm.get_ordinal())
elif self.use_horovod:
kwargs = dict(num_replicas=hvd.size(), rank=hvd.rank())
else:
world_size = {
'ddp': self.num_nodes * self.num_processes,
'ddp_spawn': self.num_nodes * self.num_processes,
'ddp2': self.num_nodes,
'ddp_cpu': self.num_processes * self.num_nodes
}
assert self.distributed_backend is not None
kwargs = dict(num_replicas=world_size[self.distributed_backend], rank=self.global_rank)
kwargs['shuffle'] = train
sampler = DistributedSampler(dataloader.dataset, **kwargs)
return sampler
def reset_train_dataloader(self, model: LightningModule) -> None:
"""Resets the train dataloader and initialises required variables
(number of batches, when to validate, etc.).
Args:
model: The current `LightningModule`
"""
self.train_dataloader = self.request_dataloader(model.train_dataloader)
self.num_training_batches = 0
# automatically add samplers
self.train_dataloader = self.auto_add_sampler(self.train_dataloader, train=True)
self.num_training_batches = len(self.train_dataloader) if _has_len(self.train_dataloader) else float('inf')
self._worker_check(self.train_dataloader, 'train dataloader')
if isinstance(self.limit_train_batches, int) or self.limit_train_batches == 0.0:
self.num_training_batches = min(self.num_training_batches, int(self.limit_train_batches))
elif self.num_training_batches != float('inf'):
self.num_training_batches = int(self.num_training_batches * self.limit_train_batches)
elif self.limit_train_batches != 1.0:
raise MisconfigurationException(
'When using an IterableDataset for `limit_train_batches`,'
' `Trainer(limit_train_batches)` must be `0.0`, `1.0` or an int. An int k specifies'
' `num_training_batches` to use.')
# determine when to check validation
# if int passed in, val checks that often
# otherwise, it checks in [0, 1.0] % range of a training epoch
if isinstance(self.val_check_interval, int):
self.val_check_batch = self.val_check_interval
if self.val_check_batch > self.num_training_batches:
raise ValueError(
f'`val_check_interval` ({self.val_check_interval}) must be less than or equal '
f'to the number of the training batches ({self.num_training_batches}). '
'If you want to disable validation set `limit_val_batches` to 0.0 instead.')
else:
if not _has_len(self.train_dataloader):
if self.val_check_interval == 1.0:
self.val_check_batch = float('inf')
else:
raise MisconfigurationException(
'When using an IterableDataset for `train_dataloader`,'
' `Trainer(val_check_interval)` must be `1.0` or an int. An int k specifies'
' checking validation every k training batches.')
else:
self.val_check_batch = int(self.num_training_batches * self.val_check_interval)
self.val_check_batch = max(1, self.val_check_batch)
def _reset_eval_dataloader(
self,
model: LightningModule,
mode: str
) -> Tuple[List[Union[int, float]], List[DataLoader]]:
"""Generic method to reset a dataloader for evaluation.
Args:
model: The current `LightningModule`
mode: Either `'val'` or `'test'`
Returns:
Tuple (num_batches, dataloaders)
"""
# use the training loader as val and test when overfitting
if self.overfit_batches > 0:
dataloaders = self.request_dataloader(getattr(model, 'train_dataloader'))
else:
dataloaders = self.request_dataloader(getattr(model, f'{mode}_dataloader'))
if not isinstance(dataloaders, list):
dataloaders = [dataloaders]
for loader_i in range(len(dataloaders)):
loader = dataloaders[loader_i]
# shuffling in val and test set is bad practice
if mode in ('val', 'test') and hasattr(loader, 'sampler') and isinstance(loader.sampler, RandomSampler):
# when overfitting, the dataloader should not have sampler
if self.overfit_batches > 0:
rank_zero_warn('You requested to overfit but enabled training dataloader shuffling.'
' We are turning it off for you.')
dataloaders[loader_i] = self.replace_sampler(loader, SequentialSampler(loader.dataset))
else:
rank_zero_warn(f'Your {mode}_dataloader has `shuffle=True`, it is best practice to turn'
' this off for validation and test dataloaders.')
if any([dl is None for dl in dataloaders]):
rank_zero_warn("One of given dataloaders is None and it will be skipped.")
# add samplers
dataloaders = [self.auto_add_sampler(dl, train=False) for dl in dataloaders if dl is not None]
loader_num_batches = []
# determine number of batches
# datasets could be none, 1 or 2+
if len(dataloaders) != 0:
for i, dataloader in enumerate(dataloaders):
num_batches = len(dataloader) if _has_len(dataloader) else float('inf')
self._worker_check(dataloader, f'{mode} dataloader {i}')
# percent or num_steps
limit_eval_batches = getattr(self, f'limit_{mode}_batches')
# limit num batches either as a percent or num steps
if isinstance(limit_eval_batches, int) or limit_eval_batches == 0.0:
num_batches = min(num_batches, int(limit_eval_batches))
elif num_batches != float('inf'):
num_batches = int(num_batches * limit_eval_batches)
elif limit_eval_batches != 1.0:
raise MisconfigurationException(
'When using an IterableDataset for `limit_{mode}_batches`,'
f' `Trainer(limit_{mode}_batches)` must be `0.0`, `1.0` or an int. An int k specifies'
f' `num_{mode}_batches` to use.')
if num_batches == 0 and limit_eval_batches > 0.0 and isinstance(limit_eval_batches, float):
min_pct = 1.0 / len(dataloader)
raise MisconfigurationException(
f'you requested to check {limit_eval_batches} of the {mode} dataloader but'
f' {limit_eval_batches}*{num_batches} = 0. Please increase the limit_{mode}_batches.'
f' Try at least limit_{mode}_batches={min_pct}'
)
loader_num_batches.append(num_batches)
return loader_num_batches, dataloaders
def reset_val_dataloader(self, model: LightningModule) -> None:
"""Resets the validation dataloader and determines the number of batches.
Args:
model: The current `LightningModule`
"""
has_loader = self.is_overridden('val_dataloader', model)
has_step = self.is_overridden('validation_step', model)
if has_loader and has_step:
self.num_val_batches, self.val_dataloaders = self._reset_eval_dataloader(model, 'val')
def reset_test_dataloader(self, model) -> None:
"""Resets the validation dataloader and determines the number of batches.
Args:
model: The current `LightningModule`
"""
has_loader = self.is_overridden('test_dataloader', model)
has_step = self.is_overridden('test_step', model)
if has_loader and has_step:
self.num_test_batches, self.test_dataloaders =\
self._reset_eval_dataloader(model, 'test')
def request_dataloader(self, dataloader_fx: Callable) -> DataLoader:
"""Handles downloading data in the GPU or TPU case.
Args:
dataloader_fx: The bound dataloader getter
Returns:
The dataloader
"""
dataloader = dataloader_fx()
# get the function we'll use to get data
if self.use_ddp or self.use_ddp2:
# all processes wait until data download has happened
torch_distrib.barrier()
# data download/load on TPU
elif self.use_tpu and XLA_AVAILABLE:
# all processes wait until data download has happened
torch_xla.core.xla_model.rendezvous('pl.TrainerDataLoadingMixin.get_dataloaders')
elif self.use_horovod:
# all processes wait until data download has happened
hvd.join()
return dataloader
def determine_data_use_amount(self, overfit_batches: float) -> None:
"""Use less data for debugging purposes"""
if overfit_batches > 0:
self.limit_train_batches = overfit_batches
self.limit_val_batches = overfit_batches
self.limit_test_batches = overfit_batches
|
# coding=utf-8
# 这是为 codeforces.com 配置文件
#
# 使用方法:
# 1. 复制本文件到 zmirror 根目录(wsgi.py所在目录), 并重命名为 config.py
# 2. 修改 my_host_name 为你自己的域名
#
# 各项设置选项的详细介绍请看 config_default.py 中对应的部分
# 本配置文件假定你的服务器本身在墙外
# 如果服务器本身在墙内(或者在本地环境下测试, 请修改`Proxy Settings`中的设置
#
# 基本全功能完整
# Github: https://github.com/aploium/zmirror
# ############## Local Domain Settings ##############
my_host_name = 'm1.codeforces.live'
my_host_scheme = 'https://'
my_host_port = None # None表示使用默认端口, 可以设置成非标准端口, 比如 81
# ############## Target Domain Settings ##############
target_domain = 'm1.codeforces.com'
target_scheme = 'https://'
# 这里面大部分域名都是通过 `enable_automatic_domains_whitelist` 自动采集的, 我只是把它们复制黏贴到了这里
# 实际镜像一个新的站时, 手动只需要添加很少的几个域名就可以了.
# 自动采集(如果开启的话)会不断告诉你新域名
external_domains = (
'assets.codeforces.com',
'sta.codeforces.com',
'userpic.codeforces.com',
'espresso.codeforces.com',
'www.codeforces.com',
'codeforces.org',
'mathjax.codeforces.org',
'player.vimeo.com',
)
# 强制所有 codeforces 站点使用HTTPS
force_https_domains = 'ALL'
# 自动动态添加域名
enable_automatic_domains_whitelist = True
domains_whitelist_auto_add_glob_list = ('*.codeforces.com','*.codeforces.org','*.wikipedia.org','*.codeforces.ru')
# ############## Proxy Settings ##############
# 如果你在墙内使用本配置文件, 请指定一个墙外的http代理
is_use_proxy = False
# 代理的格式及SOCKS代理, 请看 http://docs.python-requests.org/en/latest/user/advanced/#proxies
requests_proxies = dict(
http='http://127.0.0.1:8123',
https='https://127.0.0.1:8123',
)
custom_text_rewriter_enable = True
|
#!/usr/bin/env python3
# Copyright 2017-present, The Visdom Authors
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
"""Server"""
import argparse
import copy
import getpass
import hashlib
import inspect
import json
import jsonpatch
import logging
import math
import os
import sys
import time
import traceback
import uuid
import warnings
from os.path import expanduser
from collections import OrderedDict
try:
# for after python 3.8
from collections.abc import Mapping, Sequence
except ImportError:
# for python 3.7 and below
from collections import Mapping, Sequence
from zmq.eventloop import ioloop
ioloop.install() # Needs to happen before any tornado imports!
import tornado.ioloop # noqa E402: gotta install ioloop first
import tornado.web # noqa E402: gotta install ioloop first
import tornado.websocket # noqa E402: gotta install ioloop first
import tornado.escape # noqa E402: gotta install ioloop first
LAYOUT_FILE = 'layouts.json'
DEFAULT_ENV_PATH = '%s/.visdom/' % expanduser("~")
DEFAULT_PORT = 8097
DEFAULT_HOSTNAME = "localhost"
DEFAULT_BASE_URL = "/"
here = os.path.abspath(os.path.dirname(__file__))
COMPACT_SEPARATORS = (',', ':')
_seen_warnings = set()
MAX_SOCKET_WAIT = 15
assert sys.version_info[0] >= 3, 'To use visdom with python 2, downgrade to v0.1.8.9'
def warn_once(msg, warningtype=None):
"""
Raise a warning, but only once.
:param str msg: Message to display
:param Warning warningtype: Type of warning, e.g. DeprecationWarning
"""
global _seen_warnings
if msg not in _seen_warnings:
_seen_warnings.add(msg)
warnings.warn(msg, warningtype, stacklevel=2)
def check_auth(f):
def _check_auth(self, *args, **kwargs):
self.last_access = time.time()
if self.login_enabled and not self.current_user:
self.set_status(400)
return
f(self, *args, **kwargs)
return _check_auth
def get_rand_id():
return str(uuid.uuid4())
def ensure_dir_exists(path):
"""Make sure the parent dir exists for path so we can write a file."""
try:
os.makedirs(os.path.dirname(path))
except OSError as e1:
assert e1.errno == 17 # errno.EEXIST
pass
def get_path(filename):
"""Get the path to an asset."""
cwd = os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe())))
return os.path.join(cwd, filename)
def escape_eid(eid):
"""Replace slashes with underscores, to avoid recognizing them
as directories.
"""
return eid.replace('/', '_')
def extract_eid(args):
"""Extract eid from args. If eid does not exist in args,
it returns 'main'."""
eid = 'main' if args.get('eid') is None else args.get('eid')
return escape_eid(eid)
def set_cookie(value=None):
"""Create cookie secret key for authentication"""
if value is not None:
cookie_secret = value
else:
cookie_secret = input("Please input your cookie secret key here: ")
with open(DEFAULT_ENV_PATH + "COOKIE_SECRET", "w") as cookie_file:
cookie_file.write(cookie_secret)
def hash_password(password):
"""Hashing Password with SHA-256"""
return hashlib.sha256(password.encode("utf-8")).hexdigest()
tornado_settings = {
"autoescape": None,
"debug": "/dbg/" in __file__,
"static_path": get_path('static'),
"template_path": get_path('static'),
"compiled_template_cache": False
}
def serialize_env(state, eids, env_path=DEFAULT_ENV_PATH):
env_ids = [i for i in eids if i in state]
if env_path is not None:
for env_id in env_ids:
env_path_file = os.path.join(env_path, "{0}.json".format(env_id))
with open(env_path_file, 'w') as fn:
fn.write(json.dumps(state[env_id]))
return env_ids
def serialize_all(state, env_path=DEFAULT_ENV_PATH):
serialize_env(state, list(state.keys()), env_path=env_path)
class Application(tornado.web.Application):
def __init__(self, port=DEFAULT_PORT, base_url='',
env_path=DEFAULT_ENV_PATH, readonly=False,
user_credential=None, use_frontend_client_polling=False):
self.env_path = env_path
self.state = self.load_state()
self.layouts = self.load_layouts()
self.subs = {}
self.sources = {}
self.port = port
self.base_url = base_url
self.readonly = readonly
self.user_credential = user_credential
self.login_enabled = False
self.last_access = time.time()
self.wrap_socket = use_frontend_client_polling
if user_credential:
self.login_enabled = True
with open(DEFAULT_ENV_PATH + "COOKIE_SECRET", "r") as fn:
tornado_settings["cookie_secret"] = fn.read()
tornado_settings['static_url_prefix'] = self.base_url + "/static/"
tornado_settings['debug'] = True
handlers = [
(r"%s/events" % self.base_url, PostHandler, {'app': self}),
(r"%s/update" % self.base_url, UpdateHandler, {'app': self}),
(r"%s/close" % self.base_url, CloseHandler, {'app': self}),
(r"%s/socket" % self.base_url, SocketHandler, {'app': self}),
(r"%s/socket_wrap" % self.base_url, SocketWrap, {'app': self}),
(r"%s/vis_socket" % self.base_url,
VisSocketHandler, {'app': self}),
(r"%s/vis_socket_wrap" % self.base_url,
VisSocketWrap, {'app': self}),
(r"%s/env/(.*)" % self.base_url, EnvHandler, {'app': self}),
(r"%s/compare/(.*)" % self.base_url,
CompareHandler, {'app': self}),
(r"%s/save" % self.base_url, SaveHandler, {'app': self}),
(r"%s/error/(.*)" % self.base_url, ErrorHandler, {'app': self}),
(r"%s/win_exists" % self.base_url, ExistsHandler, {'app': self}),
(r"%s/win_data" % self.base_url, DataHandler, {'app': self}),
(r"%s/delete_env" % self.base_url,
DeleteEnvHandler, {'app': self}),
(r"%s/win_hash" % self.base_url, HashHandler, {'app': self}),
(r"%s/env_state" % self.base_url, EnvStateHandler, {'app': self}),
(r"%s/fork_env" % self.base_url, ForkEnvHandler, {'app': self}),
(r"%s(.*)" % self.base_url, IndexHandler, {'app': self}),
]
super(Application, self).__init__(handlers, **tornado_settings)
def get_last_access(self):
if len(self.subs) > 0 or len(self.sources) > 0:
# update the last access time to now, as someone
# is currently connected to the server
self.last_access = time.time()
return self.last_access
def save_layouts(self):
if self.env_path is None:
warn_once(
'Saving and loading to disk has no effect when running with '
'env_path=None.',
RuntimeWarning
)
return
layout_filepath = os.path.join(self.env_path, 'view', LAYOUT_FILE)
with open(layout_filepath, 'w') as fn:
fn.write(self.layouts)
def load_layouts(self):
if self.env_path is None:
warn_once(
'Saving and loading to disk has no effect when running with '
'env_path=None.',
RuntimeWarning
)
return ""
layout_filepath = os.path.join(self.env_path, 'view', LAYOUT_FILE)
ensure_dir_exists(layout_filepath)
if os.path.isfile(layout_filepath):
with open(layout_filepath, 'r') as fn:
return fn.read()
else:
return ""
def load_state(self):
state = {}
env_path = self.env_path
if env_path is None:
warn_once(
'Saving and loading to disk has no effect when running with '
'env_path=None.',
RuntimeWarning
)
return {'main': {'jsons': {}, 'reload': {}}}
ensure_dir_exists(env_path)
env_jsons = [i for i in os.listdir(env_path) if '.json' in i]
for env_json in env_jsons:
env_path_file = os.path.join(env_path, env_json)
try:
with open(env_path_file, 'r') as fn:
env_data = tornado.escape.json_decode(fn.read())
except Exception as e:
logging.warn(
"Failed loading environment json: {} - {}".format(
env_path_file, repr(e)))
continue
eid = env_json.replace('.json', '')
state[eid] = {'jsons': env_data['jsons'],
'reload': env_data['reload']}
if 'main' not in state and 'main.json' not in env_jsons:
state['main'] = {'jsons': {}, 'reload': {}}
serialize_env(state, ['main'], env_path=self.env_path)
return state
def broadcast_envs(handler, target_subs=None):
if target_subs is None:
target_subs = handler.subs.values()
for sub in target_subs:
sub.write_message(json.dumps(
{'command': 'env_update', 'data': list(handler.state.keys())}
))
def send_to_sources(handler, msg):
target_sources = handler.sources.values()
for source in target_sources:
source.write_message(json.dumps(msg))
class BaseWebSocketHandler(tornado.websocket.WebSocketHandler):
def get_current_user(self):
"""
This method determines the self.current_user
based the value of cookies that set in POST method
at IndexHandler by self.set_secure_cookie
"""
try:
return self.get_secure_cookie("user_password")
except Exception: # Not using secure cookies
return None
class VisSocketHandler(BaseWebSocketHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
def check_origin(self, origin):
return True
def open(self):
if self.login_enabled and not self.current_user:
self.close()
return
self.sid = str(hex(int(time.time() * 10000000))[2:])
if self not in list(self.sources.values()):
self.eid = 'main'
self.sources[self.sid] = self
logging.info('Opened visdom socket from ip: {}'.format(
self.request.remote_ip))
self.write_message(
json.dumps({'command': 'alive', 'data': 'vis_alive'}))
def on_message(self, message):
logging.info('from visdom client: {}'.format(message))
msg = tornado.escape.json_decode(tornado.escape.to_basestring(message))
cmd = msg.get('cmd')
if cmd == 'echo':
for sub in self.sources.values():
sub.write_message(json.dumps(msg))
def on_close(self):
if self in list(self.sources.values()):
self.sources.pop(self.sid, None)
class VisSocketWrapper():
def __init__(self, app):
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
self.app = app
self.messages = []
self.last_read_time = time.time()
self.open()
try:
if not self.app.socket_wrap_monitor.is_running():
self.app.socket_wrap_monitor.start()
except AttributeError:
self.app.socket_wrap_monitor = tornado.ioloop.PeriodicCallback(
self.socket_wrap_monitor_thread, 15000
)
self.app.socket_wrap_monitor.start()
# TODO refactor the two socket wrappers into a wrapper class
def socket_wrap_monitor_thread(self):
if len(self.subs) > 0 or len(self.sources) > 0:
for sub in list(self.subs.values()):
if time.time() - sub.last_read_time > MAX_SOCKET_WAIT:
sub.close()
for sub in list(self.sources.values()):
if time.time() - sub.last_read_time > MAX_SOCKET_WAIT:
sub.close()
else:
self.app.socket_wrap_monitor.stop()
def open(self):
if self.login_enabled and not self.current_user:
print("AUTH Failed in SocketHandler")
self.close()
return
self.sid = get_rand_id()
if self not in list(self.sources.values()):
self.eid = 'main'
self.sources[self.sid] = self
logging.info('Mocking visdom socket: {}'.format(self.sid))
self.write_message(
json.dumps({'command': 'alive', 'data': 'vis_alive'}))
def on_message(self, message):
logging.info('from visdom client: {}'.format(message))
msg = tornado.escape.json_decode(tornado.escape.to_basestring(message))
cmd = msg.get('cmd')
if cmd == 'echo':
for sub in self.sources.values():
sub.write_message(json.dumps(msg))
def close(self):
if self in list(self.sources.values()):
self.sources.pop(self.sid, None)
def write_message(self, msg):
self.messages.append(msg)
def get_messages(self):
to_send = []
while len(self.messages) > 0:
message = self.messages.pop()
if isinstance(message, dict):
# Not all messages are being formatted the same way (JSON)
# TODO investigate
message = json.dumps(message)
to_send.append(message)
self.last_read_time = time.time()
return to_send
class SocketHandler(BaseWebSocketHandler):
def initialize(self, app):
self.port = app.port
self.env_path = app.env_path
self.app = app
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.broadcast_layouts()
self.readonly = app.readonly
self.login_enabled = app.login_enabled
def check_origin(self, origin):
return True
def broadcast_layouts(self, target_subs=None):
if target_subs is None:
target_subs = self.subs.values()
for sub in target_subs:
sub.write_message(json.dumps(
{'command': 'layout_update', 'data': self.app.layouts}
))
def open(self):
if self.login_enabled and not self.current_user:
print("AUTH Failed in SocketHandler")
self.close()
return
self.sid = get_rand_id()
if self not in list(self.subs.values()):
self.eid = 'main'
self.subs[self.sid] = self
logging.info(
'Opened new socket from ip: {}'.format(self.request.remote_ip))
self.write_message(
json.dumps({'command': 'register', 'data': self.sid,
'readonly': self.readonly}))
self.broadcast_layouts([self])
broadcast_envs(self, [self])
def on_message(self, message):
logging.info('from web client: {}'.format(message))
msg = tornado.escape.json_decode(tornado.escape.to_basestring(message))
cmd = msg.get('cmd')
if self.readonly:
return
if cmd == 'close':
if 'data' in msg and 'eid' in msg:
logging.info('closing window {}'.format(msg['data']))
p_data = self.state[msg['eid']]['jsons'].pop(msg['data'], None)
event = {
'event_type': 'close',
'target': msg['data'],
'eid': msg['eid'],
'pane_data': p_data,
}
send_to_sources(self, event)
elif cmd == 'save':
# save localStorage window metadata
if 'data' in msg and 'eid' in msg:
msg['eid'] = escape_eid(msg['eid'])
self.state[msg['eid']] = \
copy.deepcopy(self.state[msg['prev_eid']])
self.state[msg['eid']]['reload'] = msg['data']
self.eid = msg['eid']
serialize_env(self.state, [self.eid], env_path=self.env_path)
elif cmd == 'delete_env':
if 'eid' in msg:
logging.info('closing environment {}'.format(msg['eid']))
del self.state[msg['eid']]
if self.env_path is not None:
p = os.path.join(
self.env_path,
"{0}.json".format(msg['eid'])
)
os.remove(p)
broadcast_envs(self)
elif cmd == 'save_layouts':
if 'data' in msg:
self.app.layouts = msg.get('data')
self.app.save_layouts()
self.broadcast_layouts()
elif cmd == 'forward_to_vis':
packet = msg.get('data')
environment = self.state[packet['eid']]
if packet.get('pane_data') is not False:
packet['pane_data'] = environment['jsons'][packet['target']]
send_to_sources(self, msg.get('data'))
elif cmd == 'layout_item_update':
eid = msg.get('eid')
win = msg.get('win')
self.state[eid]['reload'][win] = msg.get('data')
elif cmd == 'pop_embeddings_pane':
packet = msg.get('data')
eid = packet['eid']
win = packet['target']
p = self.state[eid]['jsons'][win]
p['content']['selected'] = None
p['content']['data'] = p['old_content'].pop()
if len(p['old_content']) == 0:
p['content']['has_previous'] = False
p['contentID'] = get_rand_id()
broadcast(self, p, eid)
def on_close(self):
if self in list(self.subs.values()):
self.subs.pop(self.sid, None)
# TODO condense some of the functionality between this class and the
# original SocketHandler class
class ClientSocketWrapper():
"""
Wraps all of the socket actions in regular request handling, thus
allowing all of the same information to be sent via a polling interface
"""
def __init__(self, app):
self.port = app.port
self.env_path = app.env_path
self.app = app
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.readonly = app.readonly
self.login_enabled = app.login_enabled
self.messages = []
self.last_read_time = time.time()
self.open()
try:
if not self.app.socket_wrap_monitor.is_running():
self.app.socket_wrap_monitor.start()
except AttributeError:
self.app.socket_wrap_monitor = tornado.ioloop.PeriodicCallback(
self.socket_wrap_monitor_thread, 15000
)
self.app.socket_wrap_monitor.start()
def socket_wrap_monitor_thread(self):
# TODO mark wrapped subs and sources separately
if len(self.subs) > 0 or len(self.sources) > 0:
for sub in list(self.subs.values()):
if time.time() - sub.last_read_time > MAX_SOCKET_WAIT:
sub.close()
for sub in list(self.sources.values()):
if time.time() - sub.last_read_time > MAX_SOCKET_WAIT:
sub.close()
else:
self.app.socket_wrap_monitor.stop()
def broadcast_layouts(self, target_subs=None):
if target_subs is None:
target_subs = self.subs.values()
for sub in target_subs:
sub.write_message(json.dumps(
{'command': 'layout_update', 'data': self.app.layouts}
))
def open(self):
self.sid = get_rand_id()
if self not in list(self.subs.values()):
self.eid = 'main'
self.subs[self.sid] = self
logging.info('Mocking new socket: {}'.format(self.sid))
self.write_message(
json.dumps({'command': 'register', 'data': self.sid,
'readonly': self.readonly}))
self.broadcast_layouts([self])
broadcast_envs(self, [self])
def on_message(self, message):
logging.info('from web client: {}'.format(message))
msg = tornado.escape.json_decode(tornado.escape.to_basestring(message))
cmd = msg.get('cmd')
if self.readonly:
return
if cmd == 'close':
if 'data' in msg and 'eid' in msg:
logging.info('closing window {}'.format(msg['data']))
p_data = self.state[msg['eid']]['jsons'].pop(msg['data'], None)
event = {
'event_type': 'close',
'target': msg['data'],
'eid': msg['eid'],
'pane_data': p_data,
}
send_to_sources(self, event)
elif cmd == 'save':
# save localStorage window metadata
if 'data' in msg and 'eid' in msg:
msg['eid'] = escape_eid(msg['eid'])
self.state[msg['eid']] = \
copy.deepcopy(self.state[msg['prev_eid']])
self.state[msg['eid']]['reload'] = msg['data']
self.eid = msg['eid']
serialize_env(self.state, [self.eid], env_path=self.env_path)
elif cmd == 'delete_env':
if 'eid' in msg:
logging.info('closing environment {}'.format(msg['eid']))
del self.state[msg['eid']]
if self.env_path is not None:
p = os.path.join(
self.env_path,
"{0}.json".format(msg['eid'])
)
os.remove(p)
broadcast_envs(self)
elif cmd == 'save_layouts':
if 'data' in msg:
self.app.layouts = msg.get('data')
self.app.save_layouts()
self.broadcast_layouts()
elif cmd == 'forward_to_vis':
packet = msg.get('data')
environment = self.state[packet['eid']]
packet['pane_data'] = environment['jsons'][packet['target']]
send_to_sources(self, msg.get('data'))
elif cmd == 'layout_item_update':
eid = msg.get('eid')
win = msg.get('win')
self.state[eid]['reload'][win] = msg.get('data')
def close(self):
if self in list(self.subs.values()):
self.subs.pop(self.sid, None)
def write_message(self, msg):
self.messages.append(msg)
def get_messages(self):
to_send = []
while len(self.messages) > 0:
message = self.messages.pop()
if isinstance(message, dict):
# Not all messages are being formatted the same way (JSON)
# TODO investigate
message = json.dumps(message)
to_send.append(message)
self.last_read_time = time.time()
return to_send
class BaseHandler(tornado.web.RequestHandler):
def __init__(self, *request, **kwargs):
self.include_host = False
super(BaseHandler, self).__init__(*request, **kwargs)
def get_current_user(self):
"""
This method determines the self.current_user
based the value of cookies that set in POST method
at IndexHandler by self.set_secure_cookie
"""
try:
return self.get_secure_cookie("user_password")
except Exception: # Not using secure cookies
return None
def write_error(self, status_code, **kwargs):
logging.error("ERROR: %s: %s" % (status_code, kwargs))
if "exc_info" in kwargs:
logging.info('Traceback: {}'.format(
traceback.format_exception(*kwargs["exc_info"])))
if self.settings.get("debug") and "exc_info" in kwargs:
logging.error("rendering error page")
exc_info = kwargs["exc_info"]
# exc_info is a tuple consisting of:
# 1. The class of the Exception
# 2. The actual Exception that was thrown
# 3. The traceback opbject
try:
params = {
'error': exc_info[1],
'trace_info': traceback.format_exception(*exc_info),
'request': self.request.__dict__
}
self.render("error.html", **params)
logging.error("rendering complete")
except Exception as e:
logging.error(e)
def update_window(p, args):
"""Adds new args to a window if they exist"""
content = p['content']
layout_update = args.get('layout', {})
for layout_name, layout_val in layout_update.items():
if layout_val is not None:
content['layout'][layout_name] = layout_val
opts = args.get('opts', {})
for opt_name, opt_val in opts.items():
if opt_val is not None:
p[opt_name] = opt_val
if 'legend' in opts:
pdata = p['content']['data']
for i, d in enumerate(pdata):
d['name'] = opts['legend'][i]
return p
def window(args):
""" Build a window dict structure for sending to client """
uid = args.get('win', 'window_' + get_rand_id())
if uid is None:
uid = 'window_' + get_rand_id()
opts = args.get('opts', {})
ptype = args['data'][0]['type']
p = {
'command': 'window',
'id': str(uid),
'title': opts.get('title', ''),
'inflate': opts.get('inflate', True),
'width': opts.get('width'),
'height': opts.get('height'),
'contentID': get_rand_id(), # to detected updated windows
}
if ptype == 'image_history':
p.update({
'content': [args['data'][0]['content']],
'selected': 0,
'type': ptype,
'show_slider': opts.get('show_slider', True)
})
elif ptype in ['image', 'text', 'properties']:
p.update({'content': args['data'][0]['content'], 'type': ptype})
elif ptype in ['embeddings']:
p.update({
'content': args['data'][0]['content'],
'type': ptype,
'old_content': [], # Used to cache previous to prevent recompute
})
p['content']['has_previous'] = False
else:
p['content'] = {'data': args['data'], 'layout': args['layout']}
p['type'] = 'plot'
return p
def broadcast(self, msg, eid):
for s in self.subs:
if isinstance(self.subs[s].eid, dict):
if eid in self.subs[s].eid:
self.subs[s].write_message(msg)
else:
if self.subs[s].eid == eid:
self.subs[s].write_message(msg)
def register_window(self, p, eid):
# in case env doesn't exist
is_new_env = False
if eid not in self.state:
is_new_env = True
self.state[eid] = {'jsons': {}, 'reload': {}}
env = self.state[eid]['jsons']
if p['id'] in env:
p['i'] = env[p['id']]['i']
else:
p['i'] = len(env)
env[p['id']] = p
broadcast(self, p, eid)
if is_new_env:
broadcast_envs(self)
self.write(p['id'])
class PostHandler(BaseHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
self.handlers = {
'update': UpdateHandler,
'save': SaveHandler,
'close': CloseHandler,
'win_exists': ExistsHandler,
'delete_env': DeleteEnvHandler,
}
@check_auth
def post(self):
req = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
if req.get('func') is not None:
raise Exception(
'Support for Lua Torch was deprecated following `v0.1.8.4`. '
"If you'd like to use torch support, you'll need to download "
"that release. You can follow the usage instructions there, "
"but it is no longer officially supported."
)
eid = extract_eid(req)
p = window(req)
register_window(self, p, eid)
class ExistsHandler(BaseHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
@staticmethod
def wrap_func(handler, args):
eid = extract_eid(args)
if args['win'] in handler.state[eid]['jsons']:
handler.write('true')
else:
handler.write('false')
@check_auth
def post(self):
args = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
self.wrap_func(self, args)
def order_by_key(kv):
key, val = kv
return key
# Based on json-stable-stringify-python from @haochi with some usecase modifications
def recursive_order(node):
if isinstance(node, Mapping):
ordered_mapping = OrderedDict(sorted(node.items(), key=order_by_key))
for key, value in ordered_mapping.items():
ordered_mapping[key] = recursive_order(value)
return ordered_mapping
elif isinstance(node, Sequence):
if isinstance(node, (bytes,)):
return node
elif isinstance(node, (str,)):
return node
else:
return [recursive_order(item) for item in node]
if isinstance(node, float) and node.is_integer():
return int(node)
return node
def stringify(node):
return json.dumps(recursive_order(node), separators=COMPACT_SEPARATORS)
def hash_md_window(window_json):
json_string = stringify(window_json).encode("utf-8")
return hashlib.md5(json_string).hexdigest()
class UpdateHandler(BaseHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
@staticmethod
def update_packet(p, args):
old_p = copy.deepcopy(p)
p = UpdateHandler.update(p, args)
p['contentID'] = get_rand_id()
# TODO: make_patch isn't high performance.
# If bottlenecked we should build the patch ourselves.
patch = jsonpatch.make_patch(old_p, p)
return p, patch.patch
@staticmethod
def update(p, args):
# Update text in window, separated by a line break
if p['type'] == 'text':
p['content'] += "<br>" + args['data'][0]['content']
return p
if p['type'] == 'embeddings':
# TODO embeddings updates should be handled outside of the regular
# update flow, as update packets are easy to create manually and
# expensive to calculate otherwise
if args['data']['update_type'] == 'EntitySelected':
p['content']['selected'] = args['data']['selected']
elif args['data']['update_type'] == 'RegionSelected':
p['content']['selected'] = None
print(len(p['content']['data']))
p['old_content'].append(p['content']['data'])
p['content']['has_previous'] = True
p['content']['data'] = args['data']['points']
print(len(p['content']['data']))
return p
if p['type'] == 'image_history':
utype = args['data'][0]['type']
if utype == 'image_history':
p['content'].append(args['data'][0]['content'])
p['selected'] = len(p['content']) - 1
elif utype == 'image_update_selected':
# TODO implement python client function for this
# Bound the update to within the dims of the array
selected = args['data']
selected_not_neg = max(0, selected)
selected_exists = min(len(p['content'])-1, selected_not_neg)
p['selected'] = selected_exists
return p
pdata = p['content']['data']
new_data = args.get('data')
p = update_window(p, args)
name = args.get('name')
if name is None and new_data is None:
return p # we only updated the opts or layout
append = args.get('append')
idxs = list(range(len(pdata)))
if name is not None:
assert len(new_data) == 1 or args.get('delete')
idxs = [i for i in idxs if pdata[i]['name'] == name]
# Delete a trace
if args.get('delete'):
for idx in idxs:
del pdata[idx]
return p
# inject new trace
if len(idxs) == 0:
idx = len(pdata)
pdata.append(dict(pdata[0])) # plot is not empty, clone an entry
idxs = [idx]
append = False
pdata[idx] = new_data[0]
for k, v in new_data[0].items():
pdata[idx][k] = v
pdata[idx]['name'] = name
return p
# Update traces
for n, idx in enumerate(idxs):
if all(math.isnan(i) or i is None for i in new_data[n]['x']):
continue
# handle data for plotting
for axis in ['x', 'y']:
pdata[idx][axis] = (pdata[idx][axis] + new_data[n][axis]) \
if append else new_data[n][axis]
# handle marker properties
if 'marker' not in new_data[n]:
continue
if 'marker' not in pdata[idx]:
pdata[idx]['marker'] = {}
pdata_marker = pdata[idx]['marker']
for marker_prop in ['color']:
if marker_prop not in new_data[n]['marker']:
continue
if marker_prop not in pdata[idx]['marker']:
pdata[idx]['marker'][marker_prop] = []
pdata_marker[marker_prop] = (
pdata_marker[marker_prop] +
new_data[n]['marker'][marker_prop]) if append else \
new_data[n]['marker'][marker_prop]
return p
@staticmethod
def wrap_func(handler, args):
eid = extract_eid(args)
if args['win'] not in handler.state[eid]['jsons']:
# Append to a window that doesn't exist attempts to create
# that window
append = args.get('append')
if append:
p = window(args)
register_window(handler, p, eid)
else:
handler.write('win does not exist')
return
p = handler.state[eid]['jsons'][args['win']]
if not (p['type'] == 'text' or p['type'] == 'image_history'
or p['type'] == 'embeddings'
or p['content']['data'][0]['type'] in
['scatter', 'scattergl', 'custom']):
handler.write(
'win is not scatter, custom, image_history, embeddings, or text; '
'was {}'.format(p['content']['data'][0]['type']))
return
p, diff_packet = UpdateHandler.update_packet(p, args)
# send the smaller of the patch and the updated pane
if len(stringify(p)) <= len(stringify(diff_packet)):
broadcast(handler, p, eid)
else:
hashed = hash_md_window(p)
broadcast_packet = {
'command': 'window_update',
'win': args['win'],
'env': eid,
'content': diff_packet,
'finalHash': hashed
}
broadcast(handler, broadcast_packet, eid)
handler.write(p['id'])
@check_auth
def post(self):
if self.login_enabled and not self.current_user:
self.set_status(400)
return
args = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
self.wrap_func(self, args)
class CloseHandler(BaseHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
@staticmethod
def wrap_func(handler, args):
eid = extract_eid(args)
win = args.get('win')
keys = \
list(handler.state[eid]['jsons'].keys()) if win is None else [win]
for win in keys:
handler.state[eid]['jsons'].pop(win, None)
broadcast(
handler, json.dumps({'command': 'close', 'data': win}), eid
)
@check_auth
def post(self):
args = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
self.wrap_func(self, args)
class SocketWrap(BaseHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
self.app = app
@check_auth
def post(self):
"""Either write a message to the socket, or query what's there"""
# TODO formalize failure reasons
args = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
type = args.get('message_type')
sid = args.get('sid')
socket_wrap = self.subs.get(sid)
# ensure a wrapper still exists for this connection
if socket_wrap is None:
self.write(json.dumps({'success': False, 'reason': 'closed'}))
return
# handle the requests
if type == 'query':
messages = socket_wrap.get_messages()
self.write(json.dumps({
'success': True, 'messages': messages
}))
elif type == 'send':
msg = args.get('message')
if msg is None:
self.write(json.dumps({'success': False, 'reason': 'no msg'}))
else:
socket_wrap.on_message(msg)
self.write(json.dumps({'success': True}))
else:
self.write(json.dumps({'success': False, 'reason': 'invalid'}))
@check_auth
def get(self):
"""Create a new socket wrapper for this requester, return the id"""
new_sub = ClientSocketWrapper(self.app)
self.write(json.dumps({'success': True, 'sid': new_sub.sid}))
# TODO refactor socket wrappers to one class
class VisSocketWrap(BaseHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
self.app = app
@check_auth
def post(self):
"""Either write a message to the socket, or query what's there"""
# TODO formalize failure reasons
args = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
type = args.get('message_type')
sid = args.get('sid')
if sid is None:
new_sub = VisSocketWrapper(self.app)
self.write(json.dumps({'success': True, 'sid': new_sub.sid}))
return
socket_wrap = self.sources.get(sid)
# ensure a wrapper still exists for this connection
if socket_wrap is None:
self.write(json.dumps({'success': False, 'reason': 'closed'}))
return
# handle the requests
if type == 'query':
messages = socket_wrap.get_messages()
self.write(json.dumps({
'success': True, 'messages': messages
}))
elif type == 'send':
msg = args.get('message')
if msg is None:
self.write(json.dumps({'success': False, 'reason': 'no msg'}))
else:
socket_wrap.on_message(msg)
self.write(json.dumps({'success': True}))
else:
self.write(json.dumps({'success': False, 'reason': 'invalid'}))
class DeleteEnvHandler(BaseHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
@staticmethod
def wrap_func(handler, args):
eid = extract_eid(args)
if eid is not None:
del handler.state[eid]
if handler.env_path is not None:
p = os.path.join(handler.env_path, "{0}.json".format(eid))
os.remove(p)
broadcast_envs(handler)
@check_auth
def post(self):
args = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
self.wrap_func(self, args)
class EnvStateHandler(BaseHandler):
def initialize(self, app):
self.app = app
self.state = app.state
self.login_enabled = app.login_enabled
@staticmethod
def wrap_func(handler, args):
# TODO if an env is provided return the state of that env
all_eids = list(handler.state.keys())
handler.write(json.dumps(all_eids))
@check_auth
def post(self):
args = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
self.wrap_func(self, args)
class ForkEnvHandler(BaseHandler):
def initialize(self, app):
self.app = app
self.state = app.state
self.subs = app.subs
self.login_enabled = app.login_enabled
@staticmethod
def wrap_func(handler, args):
prev_eid = escape_eid(args.get('prev_eid'))
eid = escape_eid(args.get('eid'))
assert prev_eid in handler.state, 'env to be forked doesn\'t exit'
handler.state[eid] = copy.deepcopy(handler.state[prev_eid])
serialize_env(handler.state, [eid], env_path=handler.app.env_path)
broadcast_envs(handler)
handler.write(eid)
@check_auth
def post(self):
args = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
self.wrap_func(self, args)
class HashHandler(BaseHandler):
def initialize(self, app):
self.app = app
self.state = app.state
self.login_enabled = app.login_enabled
@staticmethod
def wrap_func(handler, args):
eid = extract_eid(args)
handler_json = handler.state[eid]['jsons']
if args['win'] in handler_json:
hashed = hash_md_window(handler_json[args['win']])
handler.write(hashed)
else:
handler.write('false')
@check_auth
def post(self):
args = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
self.wrap_func(self, args)
def load_env(state, eid, socket, env_path=DEFAULT_ENV_PATH):
""" load an environment to a client by socket """
env = {}
if eid in state:
env = state.get(eid)
elif env_path is not None:
p = os.path.join(env_path, eid.strip(), '.json')
if os.path.exists(p):
with open(p, 'r') as fn:
env = tornado.escape.json_decode(fn.read())
state[eid] = env
if 'reload' in env:
socket.write_message(
json.dumps({'command': 'reload', 'data': env['reload']})
)
jsons = list(env.get('jsons', {}).values())
windows = sorted(jsons, key=lambda k: ('i' not in k, k.get('i', None)))
for v in windows:
socket.write_message(v)
socket.write_message(json.dumps({'command': 'layout'}))
socket.eid = eid
def gather_envs(state, env_path=DEFAULT_ENV_PATH):
if env_path is not None:
items = [i.replace('.json', '') for i in os.listdir(env_path)
if '.json' in i]
else:
items = []
return sorted(list(set(items + list(state.keys()))))
def compare_envs(state, eids, socket, env_path=DEFAULT_ENV_PATH):
logging.info('comparing envs')
eidNums = {e: str(i) for i, e in enumerate(eids)}
env = {}
envs = {}
for eid in eids:
if eid in state:
envs[eid] = state.get(eid)
elif env_path is not None:
p = os.path.join(env_path, eid.strip(), '.json')
if os.path.exists(p):
with open(p, 'r') as fn:
env = tornado.escape.json_decode(fn.read())
state[eid] = env
envs[eid] = env
res = copy.deepcopy(envs[list(envs.keys())[0]])
name2Wid = {res['jsons'][wid].get('title', None): wid + '_compare'
for wid in res.get('jsons', {})
if 'title' in res['jsons'][wid]}
for wid in list(res['jsons'].keys()):
res['jsons'][wid + '_compare'] = res['jsons'][wid]
res['jsons'][wid] = None
res['jsons'].pop(wid)
for ix, eid in enumerate(envs.keys()):
env = envs[eid]
for wid in env.get('jsons', {}).keys():
win = env['jsons'][wid]
if win.get('type', None) != 'plot':
continue
if 'content' not in win:
continue
if 'title' not in win:
continue
title = win['title']
if title not in name2Wid or title == '':
continue
destWid = name2Wid[title]
destWidJson = res['jsons'][destWid]
# Combine plots with the same window title. If plot data source was
# labeled "name" in the legend, rename to "envId_legend" where
# envId is enumeration of the selected environments (not the long
# environment id string). This makes plot lines more readable.
if ix == 0:
if 'name' not in destWidJson['content']['data'][0]:
continue # Skip windows with unnamed data
destWidJson['has_compare'] = False
destWidJson['content']['layout']['showlegend'] = True
destWidJson['contentID'] = get_rand_id()
for dataIdx, data in enumerate(destWidJson['content']['data']):
if 'name' not in data:
break # stop working with this plot, not right format
destWidJson['content']['data'][dataIdx]['name'] = \
'{}_{}'.format(eidNums[eid], data['name'])
else:
if 'name' not in destWidJson['content']['data'][0]:
continue # Skip windows with unnamed data
# has_compare will be set to True only if the window title is
# shared by at least 2 envs.
destWidJson['has_compare'] = True
for _dataIdx, data in enumerate(win['content']['data']):
data = copy.deepcopy(data)
if 'name' not in data:
destWidJson['has_compare'] = False
break # stop working with this plot, not right format
data['name'] = '{}_{}'.format(eidNums[eid], data['name'])
destWidJson['content']['data'].append(data)
# Make sure that only plots that are shared by at least two envs are shown.
# Check has_compare flag
for destWid in list(res['jsons'].keys()):
if ('has_compare' not in res['jsons'][destWid]) or \
(not res['jsons'][destWid]['has_compare']):
del res['jsons'][destWid]
# create legend mapping environment names to environment numbers so one can
# look it up for the new legend
tableRows = ["<tr> <td> {} </td> <td> {} </td> </tr>".format(v, eidNums[v])
for v in eidNums]
tbl = """"<style>
table, th, td {{
border: 1px solid black;
}}
</style>
<table> {} </table>""".format(' '.join(tableRows))
res['jsons']['window_compare_legend'] = {
"command": "window",
"id": "window_compare_legend",
"title": "compare_legend",
"inflate": True,
"width": None,
"height": None,
"contentID": "compare_legend",
"content": tbl,
"type": "text",
"layout": {"title": "compare_legend"},
"i": 1,
"has_compare": True,
}
if 'reload' in res:
socket.write_message(
json.dumps({'command': 'reload', 'data': res['reload']})
)
jsons = list(res.get('jsons', {}).values())
windows = sorted(jsons, key=lambda k: ('i' not in k, k.get('i', None)))
for v in windows:
socket.write_message(v)
socket.write_message(json.dumps({'command': 'layout'}))
socket.eid = eids
class EnvHandler(BaseHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
self.wrap_socket = app.wrap_socket
@check_auth
def get(self, eid):
items = gather_envs(self.state, env_path=self.env_path)
active = '' if eid not in items else eid
self.render(
'index.html',
user=getpass.getuser(),
items=items,
active_item=active,
wrap_socket=self.wrap_socket,
)
@check_auth
def post(self, args):
msg_args = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
if 'sid' in msg_args:
sid = msg_args['sid']
if sid in self.subs:
load_env(self.state, args, self.subs[sid],
env_path=self.env_path)
if 'eid' in msg_args:
eid = msg_args['eid']
if eid not in self.state:
self.state[eid] = {'jsons': {}, 'reload': {}}
broadcast_envs(self)
class CompareHandler(BaseHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.env_path = app.env_path
self.login_enabled = app.login_enabled
self.wrap_socket = app.wrap_socket
@check_auth
def get(self, eids):
items = gather_envs(self.state)
eids = eids.split('+')
# Filter out eids that don't exist
eids = [x for x in eids if x in items]
eids = '+'.join(eids)
self.render(
'index.html',
user=getpass.getuser(),
items=items,
active_item=eids,
wrap_socket=self.wrap_socket,
)
@check_auth
def post(self, args):
sid = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)['sid']
if sid in self.subs:
compare_envs(self.state, args.split('+'), self.subs[sid],
self.env_path)
class SaveHandler(BaseHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.sources = app.sources
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
@staticmethod
def wrap_func(handler, args):
envs = args['data']
envs = [escape_eid(eid) for eid in envs]
# this drops invalid env ids
ret = serialize_env(handler.state, envs, env_path=handler.env_path)
handler.write(json.dumps(ret))
@check_auth
def post(self):
args = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
self.wrap_func(self, args)
class DataHandler(BaseHandler):
def initialize(self, app):
self.state = app.state
self.subs = app.subs
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
@staticmethod
def wrap_func(handler, args):
eid = extract_eid(args)
if 'data' in args:
# Load data from client
data = json.loads(args['data'])
if eid not in handler.state:
handler.state[eid] = {'jsons': {}, 'reload': {}}
if 'win' in args and args['win'] is None:
handler.state[eid]['jsons'] = data
else:
handler.state[eid]['jsons'][args['win']] = data
broadcast_envs(handler)
else:
# Dump data to client
if 'win' in args and args['win'] is None:
handler.write(json.dumps(handler.state[eid]['jsons']))
else:
assert args['win'] in handler.state[eid]['jsons'], \
"Window {} doesn't exist in env {}".format(args['win'], eid)
handler.write(json.dumps(handler.state[eid]['jsons'][args['win']]))
@check_auth
def post(self):
args = tornado.escape.json_decode(
tornado.escape.to_basestring(self.request.body)
)
self.wrap_func(self, args)
class IndexHandler(BaseHandler):
def initialize(self, app):
self.state = app.state
self.port = app.port
self.env_path = app.env_path
self.login_enabled = app.login_enabled
self.user_credential = app.user_credential
self.base_url = app.base_url if app.base_url != '' else '/'
self.wrap_socket = app.wrap_socket
def get(self, args, **kwargs):
items = gather_envs(self.state, env_path=self.env_path)
if (not self.login_enabled) or self.current_user:
"""self.current_user is an authenticated user provided by Tornado,
available when we set self.get_current_user in BaseHandler,
and the default value of self.current_user is None
"""
self.render(
'index.html',
user=getpass.getuser(),
items=items,
active_item='',
wrap_socket=self.wrap_socket,
)
elif self.login_enabled:
self.render(
'login.html',
user=getpass.getuser(),
items=items,
active_item='',
base_url=self.base_url
)
def post(self, arg, **kwargs):
json_obj = tornado.escape.json_decode(self.request.body)
username = json_obj["username"]
password = hash_password(json_obj["password"])
if ((username == self.user_credential["username"]) and
(password == self.user_credential["password"])):
self.set_secure_cookie("user_password", username + password)
else:
self.set_status(400)
class ErrorHandler(BaseHandler):
def get(self, text):
error_text = text or "test error"
raise Exception(error_text)
# function that downloads and installs javascript, css, and font dependencies:
def download_scripts(proxies=None, install_dir=None):
import visdom
print("Checking for scripts.")
# location in which to download stuff:
if install_dir is None:
install_dir = os.path.dirname(visdom.__file__)
# all files that need to be downloaded:
b = 'https://unpkg.com/'
bb = '%sbootstrap@3.3.7/dist/' % b
ext_files = {
# - js
'%sjquery@3.1.1/dist/jquery.min.js' % b: 'jquery.min.js',
'%sbootstrap@3.3.7/dist/js/bootstrap.min.js' % b: 'bootstrap.min.js',
'%sreact@16.2.0/umd/react.production.min.js' % b: 'react-react.min.js',
'%sreact-dom@16.2.0/umd/react-dom.production.min.js' % b:
'react-dom.min.js',
'%sreact-modal@3.1.10/dist/react-modal.min.js' % b:
'react-modal.min.js',
# here is another url in case the cdn breaks down again.
# https://raw.githubusercontent.com/plotly/plotly.js/master/dist/plotly.min.js
'https://cdn.plot.ly/plotly-latest.min.js': 'plotly-plotly.min.js',
# Stanford Javascript Crypto Library for Password Hashing
'%ssjcl@1.0.7/sjcl.js' % b: 'sjcl.js',
'%slayout-bin-packer@1.4.0/dist/layout-bin-packer.js.map' % b: 'layout-bin-packer.js.map',
# - css
'%sreact-resizable@1.4.6/css/styles.css' % b:
'react-resizable-styles.css',
'%sreact-grid-layout@0.16.3/css/styles.css' % b:
'react-grid-layout-styles.css',
'%scss/bootstrap.min.css' % bb: 'bootstrap.min.css',
# - fonts
'%sclassnames@2.2.5' % b: 'classnames',
'%slayout-bin-packer@1.4.0/dist/layout-bin-packer.js' % b:
'layout_bin_packer.js',
'%sfonts/glyphicons-halflings-regular.eot' % bb:
'glyphicons-halflings-regular.eot',
'%sfonts/glyphicons-halflings-regular.woff2' % bb:
'glyphicons-halflings-regular.woff2',
'%sfonts/glyphicons-halflings-regular.woff' % bb:
'glyphicons-halflings-regular.woff',
'%sfonts/glyphicons-halflings-regular.ttf' % bb:
'glyphicons-halflings-regular.ttf',
'%sfonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular' % bb: # noqa
'glyphicons-halflings-regular.svg#glyphicons_halflingsregular',
}
# make sure all relevant folders exist:
dir_list = [
'%s' % install_dir,
'%s/static' % install_dir,
'%s/static/js' % install_dir,
'%s/static/css' % install_dir,
'%s/static/fonts' % install_dir,
]
for directory in dir_list:
if not os.path.exists(directory):
os.makedirs(directory)
# set up proxy handler:
from urllib import request
from urllib.error import HTTPError, URLError
handler = request.ProxyHandler(proxies) if proxies is not None \
else request.BaseHandler()
opener = request.build_opener(handler)
request.install_opener(opener)
built_path = os.path.join(here, 'static/version.built')
is_built = visdom.__version__ == 'no_version_file'
if os.path.exists(built_path):
with open(built_path, 'r') as build_file:
build_version = build_file.read().strip()
if build_version == visdom.__version__:
is_built = True
else:
os.remove(built_path)
if not is_built:
print('Downloading scripts, this may take a little while')
# download files one-by-one:
for (key, val) in ext_files.items():
# set subdirectory:
if val.endswith('.js') or val.endswith('.js.map'):
sub_dir = 'js'
elif val.endswith('.css'):
sub_dir = 'css'
else:
sub_dir = 'fonts'
# download file:
filename = '%s/static/%s/%s' % (install_dir, sub_dir, val)
if not os.path.exists(filename) or not is_built:
req = request.Request(key,
headers={'User-Agent': 'Chrome/30.0.0.0'})
try:
data = opener.open(req).read()
with open(filename, 'wb') as fwrite:
fwrite.write(data)
except HTTPError as exc:
logging.error('Error {} while downloading {}'.format(
exc.code, key))
except URLError as exc:
logging.error('Error {} while downloading {}'.format(
exc.reason, key))
# Download MathJax Js Files
import requests
cdnjs_url = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/'
mathjax_dir = os.path.join(*cdnjs_url.split('/')[-3:])
mathjax_path = [
'config/Safe.js?V=2.7.5',
'config/TeX-AMS-MML_HTMLorMML.js?V=2.7.5',
'extensions/Safe.js?V=2.7.5',
'jax/output/SVG/fonts/TeX/fontdata.js?V=2.7.5',
'jax/output/SVG/jax.js?V=2.7.5',
'jax/output/SVG/fonts/TeX/Size1/Regular/Main.js?V=2.7.5',
'jax/output/SVG/config.js?V=2.7.5',
'MathJax.js?config=TeX-AMS-MML_HTMLorMML%2CSafe.js&ver=4.1',
]
mathjax_dir_path = '%s/static/%s/%s' % (install_dir, 'js', mathjax_dir)
for path in mathjax_path:
filename = path.split("/")[-1].split("?")[0]
extracted_directory = os.path.join(mathjax_dir_path, *path.split('/')[:-1])
if not os.path.exists(extracted_directory):
os.makedirs(extracted_directory)
if not os.path.exists(os.path.join(extracted_directory, filename)):
js_file = requests.get(cdnjs_url + path)
with open(os.path.join(extracted_directory, filename), "wb+") as file:
file.write(js_file.content)
if not is_built:
with open(built_path, 'w+') as build_file:
build_file.write(visdom.__version__)
def start_server(port=DEFAULT_PORT, hostname=DEFAULT_HOSTNAME,
base_url=DEFAULT_BASE_URL, env_path=DEFAULT_ENV_PATH,
readonly=False, print_func=None, user_credential=None,
use_frontend_client_polling=False, bind_local=False):
print("It's Alive!")
app = Application(port=port, base_url=base_url, env_path=env_path,
readonly=readonly, user_credential=user_credential,
use_frontend_client_polling=use_frontend_client_polling)
if bind_local:
app.listen(port, max_buffer_size=1024 ** 3, address='127.0.0.1')
else:
app.listen(port, max_buffer_size=1024 ** 3)
logging.info("Application Started")
if "HOSTNAME" in os.environ and hostname == DEFAULT_HOSTNAME:
hostname = os.environ["HOSTNAME"]
else:
hostname = hostname
if print_func is None:
print(
"You can navigate to http://%s:%s%s" % (hostname, port, base_url))
else:
print_func(port)
ioloop.IOLoop.instance().start()
app.subs = []
app.sources = []
def main(print_func=None):
parser = argparse.ArgumentParser(description='Start the visdom server.')
parser.add_argument('-port', metavar='port', type=int,
default=DEFAULT_PORT,
help='port to run the server on.')
parser.add_argument('--hostname', metavar='hostname', type=str,
default=DEFAULT_HOSTNAME,
help='host to run the server on.')
parser.add_argument('-base_url', metavar='base_url', type=str,
default=DEFAULT_BASE_URL,
help='base url for server (default = /).')
parser.add_argument('-env_path', metavar='env_path', type=str,
default=DEFAULT_ENV_PATH,
help='path to serialized session to reload.')
parser.add_argument('-logging_level', metavar='logger_level',
default='INFO',
help='logging level (default = INFO). Can take '
'logging level name or int (example: 20)')
parser.add_argument('-readonly', help='start in readonly mode',
action='store_true')
parser.add_argument('-enable_login', default=False, action='store_true',
help='start the server with authentication')
parser.add_argument('-force_new_cookie', default=False,
action='store_true',
help='start the server with the new cookie, '
'available when -enable_login provided')
parser.add_argument('-use_frontend_client_polling', default=False,
action='store_true',
help='Have the frontend communicate via polling '
'rather than over websockets.')
parser.add_argument('-bind_local', default=False,
action='store_true',
help='Make server only accessible only from '
'localhost.')
FLAGS = parser.parse_args()
# Process base_url
base_url = FLAGS.base_url if FLAGS.base_url != DEFAULT_BASE_URL else ""
assert base_url == '' or base_url.startswith('/'), \
'base_url should start with /'
assert base_url == '' or not base_url.endswith('/'), \
'base_url should not end with / as it is appended automatically'
try:
logging_level = int(FLAGS.logging_level)
except ValueError:
try:
logging_level = logging._checkLevel(FLAGS.logging_level)
except ValueError:
raise KeyError(
"Invalid logging level : {0}".format(FLAGS.logging_level)
)
logging.getLogger().setLevel(logging_level)
if FLAGS.enable_login:
enable_env_login = 'VISDOM_USE_ENV_CREDENTIALS'
use_env = os.environ.get(enable_env_login, False)
if use_env:
username_var = 'VISDOM_USERNAME'
password_var = 'VISDOM_PASSWORD'
username = os.environ.get(username_var)
password = os.environ.get(password_var)
if not (username and password):
print(
'*** Warning ***\n'
'You have set the {0} env variable but probably '
'forgot to setup one (or both) {{ {1}, {2} }} '
'variables.\nYou should setup these variables with '
'proper username and password to enable logging. Try to '
'setup the variables, or unset {0} to input credentials '
'via command line prompt instead.\n'
.format(enable_env_login, username_var, password_var))
sys.exit(1)
else:
username = input("Please input your username: ")
password = getpass.getpass(prompt="Please input your password: ")
user_credential = {
"username": username,
"password": hash_password(hash_password(password))
}
need_to_set_cookie = (
not os.path.isfile(DEFAULT_ENV_PATH + "COOKIE_SECRET")
or FLAGS.force_new_cookie)
if need_to_set_cookie:
if use_env:
cookie_var = 'VISDOM_COOKIE'
env_cookie = os.environ.get(cookie_var)
if env_cookie is None:
print(
'The cookie file is not found. Please setup {0} env '
'variable to provide a cookie value, or unset {1} env '
'variable to input credentials and cookie via command '
'line prompt.'.format(cookie_var, enable_env_login))
sys.exit(1)
else:
env_cookie = None
set_cookie(env_cookie)
else:
user_credential = None
start_server(port=FLAGS.port, hostname=FLAGS.hostname, base_url=base_url,
env_path=FLAGS.env_path, readonly=FLAGS.readonly,
print_func=print_func, user_credential=user_credential,
use_frontend_client_polling=FLAGS.use_frontend_client_polling,
bind_local=FLAGS.bind_local)
def download_scripts_and_run():
download_scripts()
main()
if __name__ == "__main__":
download_scripts_and_run()
|
import os
import yaml
from yacs.config import CfgNode as CN
_C = CN()
# Base config files
_C.BASE = ['']
# -----------------------------------------------------------------------------
# Data settings
# -----------------------------------------------------------------------------
_C.DATA = CN()
# Batch size for a single GPU, could be overwritten by command line argument
_C.DATA.BATCH_SIZE = 128
# Path to dataset, could be overwritten by command line argument
_C.DATA.DATA_PATH = ''
# Dataset name
_C.DATA.DATASET = 'imagenet'
# Input image size
_C.DATA.IMG_SIZE = 224
# Interpolation to resize image (random, bilinear, bicubic)
_C.DATA.INTERPOLATION = 'bicubic'
# Use zipped dataset instead of folder dataset
# could be overwritten by command line argument
_C.DATA.ZIP_MODE = False
# Cache Data in Memory, could be overwritten by command line argument
_C.DATA.CACHE_MODE = 'part'
# Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.
_C.DATA.PIN_MEMORY = True
# Number of data loading threads
_C.DATA.NUM_WORKERS = 8
# -----------------------------------------------------------------------------
# Model settings
# -----------------------------------------------------------------------------
_C.MODEL = CN()
# Model type
_C.MODEL.TYPE = 'swin'
# Model name
_C.MODEL.NAME = 'swin_tiny_patch4_window7_224'
# Checkpoint to resume, could be overwritten by command line argument
_C.MODEL.RESUME = ''
# Number of classes, overwritten in data preparation
_C.MODEL.NUM_CLASSES = 1000
# Dropout rate
_C.MODEL.DROP_RATE = 0.0
# Drop path rate
_C.MODEL.DROP_PATH_RATE = 0.1
# Label Smoothing
_C.MODEL.LABEL_SMOOTHING = 0.1
# Swin Transformer parameters
_C.MODEL.DWNET = CN()
_C.MODEL.DWNET.PATCH_SIZE = 4
_C.MODEL.DWNET.IN_CHANS = 3
_C.MODEL.DWNET.EMBED_DIM = 96
_C.MODEL.DWNET.DEPTHS = [2, 2, 6, 2]
_C.MODEL.DWNET.WINDOW_SIZE = 7
_C.MODEL.DWNET.MLP_RATIO = 4.
_C.MODEL.DWNET.APE = False
_C.MODEL.DWNET.PATCH_NORM = True
_C.MODEL.DWNET.CONV_TYPE = "v1"
_C.MODEL.DWNET.DYNAMIC = False
# halo Transformer parameters
_C.MODEL.HALO = CN()
_C.MODEL.HALO.PATCH_SIZE = 4
_C.MODEL.HALO.IN_CHANS = 3
_C.MODEL.HALO.EMBED_DIM = 96
_C.MODEL.HALO.DEPTHS = [2, 2, 6, 2]
_C.MODEL.HALO.NUM_HEADS = [3, 6, 12, 24]
_C.MODEL.HALO.WINDOW_SIZE = [7, 7, 7, 7]
_C.MODEL.HALO.HALO_SIZE = [3, 3, 3, 3]
_C.MODEL.HALO.MLP_RATIO = 4.
_C.MODEL.HALO.QKV_BIAS = True
_C.MODEL.HALO.QK_SCALE = None
_C.MODEL.HALO.APE = False
_C.MODEL.HALO.PATCH_NORM = True
# -----------------------------------------------------------------------------
# Training settings
# -----------------------------------------------------------------------------
_C.TRAIN = CN()
_C.TRAIN.START_EPOCH = 0
_C.TRAIN.EPOCHS = 300
_C.TRAIN.WARMUP_EPOCHS = 20
_C.TRAIN.WEIGHT_DECAY = 0.05
_C.TRAIN.BASE_LR = 5e-4
_C.TRAIN.WARMUP_LR = 5e-7
_C.TRAIN.MIN_LR = 5e-6
# Clip gradient norm
_C.TRAIN.CLIP_GRAD = 5.0
# Auto resume from latest checkpoint
_C.TRAIN.AUTO_RESUME = False
# Gradient accumulation steps
# could be overwritten by command line argument
_C.TRAIN.ACCUMULATION_STEPS = 0
# Whether to use gradient checkpointing to save memory
# could be overwritten by command line argument
_C.TRAIN.USE_CHECKPOINT = False
# LR scheduler
_C.TRAIN.LR_SCHEDULER = CN()
_C.TRAIN.LR_SCHEDULER.NAME = 'cosine'
# Epoch interval to decay LR, used in StepLRScheduler
_C.TRAIN.LR_SCHEDULER.DECAY_EPOCHS = 30
# LR decay rate, used in StepLRScheduler
_C.TRAIN.LR_SCHEDULER.DECAY_RATE = 0.1
# Optimizer
_C.TRAIN.OPTIMIZER = CN()
_C.TRAIN.OPTIMIZER.NAME = 'adamw'
# Optimizer Epsilon
_C.TRAIN.OPTIMIZER.EPS = 1e-8
# Optimizer Betas
_C.TRAIN.OPTIMIZER.BETAS = (0.9, 0.999)
# SGD momentum
_C.TRAIN.OPTIMIZER.MOMENTUM = 0.9
# -----------------------------------------------------------------------------
# Augmentation settings
# -----------------------------------------------------------------------------
_C.AUG = CN()
# Color jitter factor
_C.AUG.COLOR_JITTER = 0.4
# Use AutoAugment policy. "v0" or "original"
_C.AUG.AUTO_AUGMENT = 'rand-m9-mstd0.5-inc1'
# Random erase prob
_C.AUG.REPROB = 0.25
# Random erase mode
_C.AUG.REMODE = 'pixel'
# Random erase count
_C.AUG.RECOUNT = 1
# Mixup alpha, mixup enabled if > 0
_C.AUG.MIXUP = 0.8
# Cutmix alpha, cutmix enabled if > 0
_C.AUG.CUTMIX = 1.0
# Cutmix min/max ratio, overrides alpha and enables cutmix if set
_C.AUG.CUTMIX_MINMAX = None
# Probability of performing mixup or cutmix when either/both is enabled
_C.AUG.MIXUP_PROB = 1.0
# Probability of switching to cutmix when both mixup and cutmix enabled
_C.AUG.MIXUP_SWITCH_PROB = 0.5
# How to apply mixup/cutmix params. Per "batch", "pair", or "elem"
_C.AUG.MIXUP_MODE = 'batch'
# -----------------------------------------------------------------------------
# Testing settings
# -----------------------------------------------------------------------------
_C.TEST = CN()
# Whether to use center crop when testing
_C.TEST.CROP = True
# -----------------------------------------------------------------------------
# Misc
# -----------------------------------------------------------------------------
# Mixed precision opt level, if O0, no amp is used ('O0', 'O1', 'O2')
# overwritten by command line argument
_C.AMP_OPT_LEVEL = ''
# Path to output folder, overwritten by command line argument
_C.OUTPUT = ''
# Tag of experiment, overwritten by command line argument
_C.TAG = 'default'
# Frequency to save checkpoint
_C.SAVE_FREQ = 1
# Frequency to logging info
_C.PRINT_FREQ = 10
# Fixed random seed
_C.SEED = 0
# Perform evaluation only, overwritten by command line argument
_C.EVAL_MODE = False
# Test throughput only, overwritten by command line argument
_C.THROUGHPUT_MODE = False
# local rank for DistributedDataParallel, given by command line argument
_C.LOCAL_RANK = 0
def _update_config_from_file(config, cfg_file):
config.defrost()
with open(cfg_file, 'r') as f:
yaml_cfg = yaml.load(f, Loader=yaml.FullLoader)
for cfg in yaml_cfg.setdefault('BASE', ['']):
if cfg:
_update_config_from_file(
config, os.path.join(os.path.dirname(cfg_file), cfg)
)
print('=> merge config from {}'.format(cfg_file))
config.merge_from_file(cfg_file)
config.freeze()
def update_config(config, args):
_update_config_from_file(config, args.cfg)
config.defrost()
if args.opts:
config.merge_from_list(args.opts)
# merge from specific arguments
if args.batch_size:
config.DATA.BATCH_SIZE = args.batch_size
if args.data_path:
config.DATA.DATA_PATH = args.data_path
if args.zip:
config.DATA.ZIP_MODE = True
if args.cache_mode:
config.DATA.CACHE_MODE = args.cache_mode
if args.resume:
config.MODEL.RESUME = args.resume
if args.accumulation_steps:
config.TRAIN.ACCUMULATION_STEPS = args.accumulation_steps
if args.use_checkpoint:
config.TRAIN.USE_CHECKPOINT = True
if args.amp_opt_level:
config.AMP_OPT_LEVEL = args.amp_opt_level
if args.output:
config.OUTPUT = args.output
if args.tag:
config.TAG = args.tag
if args.eval:
config.EVAL_MODE = True
if args.throughput:
config.THROUGHPUT_MODE = True
if args.data_set== 'CIFAR':
config.DATA.DATASET='cifar'
elif args.data_set == 'IMNET':
config.DATA.DATASET='imagenet'
if args.epoch!=300:
config.TRAIN.EPOCHS=args.epoch
# set local rank for distributed training
config.LOCAL_RANK = args.local_rank
# output folder
config.OUTPUT = os.path.join(config.OUTPUT, config.MODEL.NAME, config.TAG)
config.freeze()
def get_config(args):
"""Get a yacs CfgNode object with default values."""
# Return a clone so that the defaults will not be altered
# This is for the "local variable" use pattern
config = _C.clone()
update_config(config, args)
return config
|
module.exports = {
purge: ["./index.html", "./src/**/*.{vue,js,ts,jsx,tsx}"],
darkMode: false, // or 'media' or 'class'
theme: {
extend: {},
},
variants: {
extend: {},
},
plugins: [require("@tailwindcss/forms")],
};
|
/**
* Copyright (c) 2006-2012, JGraph Ltd
*/
/**
* Construcs a new toolbar for the given editor.
* @class
*/
function Toolbar(editorUi, container) {
this.editorUi = editorUi;
this.container = container;
this.staticElements = [];
this.init();
// Global handler to hide the current menu
this.gestureHandler = mxUtils.bind(this, function (evt) {
if (this.editorUi.currentMenu != null && mxEvent.getSource(evt) != this.editorUi.currentMenu.div) {
this.hideMenu();
}
});
mxEvent.addGestureListeners(document, this.gestureHandler);
};
/**
* Image for the dropdown arrow.
*/
Toolbar.prototype.dropdownImage = (!mxClient.IS_SVG) ? IMAGE_PATH + '/dropdown.gif' : 'data:image/gif;base64,R0lGODlhDQANAIABAHt7e////yH/C1hNUCBEYXRhWE1QPD94cGFja2V0IGJlZ2luPSLvu78iIGlkPSJXNU0wTXBDZWhpSHpyZVN6TlRjemtjOWQiPz4gPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iQWRvYmUgWE1QIENvcmUgNS4wLWMwNjAgNjEuMTM0Nzc3LCAyMDEwLzAyLzEyLTE3OjMyOjAwICAgICAgICAiPiA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPiA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIiB4bWxuczp4bXA9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC8iIHhtbG5zOnhtcE1NPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvbW0vIiB4bWxuczpzdFJlZj0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL3NUeXBlL1Jlc291cmNlUmVmIyIgeG1wOkNyZWF0b3JUb29sPSJBZG9iZSBQaG90b3Nob3AgQ1M1IE1hY2ludG9zaCIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDpCREM1NkJFMjE0NEMxMUU1ODk1Q0M5MjQ0MTA4QjNDMSIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDpCREM1NkJFMzE0NEMxMUU1ODk1Q0M5MjQ0MTA4QjNDMSI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOkQzOUMzMjZCMTQ0QjExRTU4OTVDQzkyNDQxMDhCM0MxIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOkQzOUMzMjZDMTQ0QjExRTU4OTVDQzkyNDQxMDhCM0MxIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+Af/+/fz7+vn49/b19PPy8fDv7u3s6+rp6Ofm5eTj4uHg397d3Nva2djX1tXU09LR0M/OzczLysnIx8bFxMPCwcC/vr28u7q5uLe2tbSzsrGwr66trKuqqainpqWko6KhoJ+enZybmpmYl5aVlJOSkZCPjo2Mi4qJiIeGhYSDgoGAf359fHt6eXh3dnV0c3JxcG9ubWxramloZ2ZlZGNiYWBfXl1cW1pZWFdWVVRTUlFQT05NTEtKSUhHRkVEQ0JBQD8+PTw7Ojk4NzY1NDMyMTAvLi0sKyopKCcmJSQjIiEgHx4dHBsaGRgXFhUUExIREA8ODQwLCgkIBwYFBAMCAQAAIfkEAQAAAQAsAAAAAA0ADQAAAhGMj6nL3QAjVHIu6azbvPtWAAA7';
/**
* Image element for the dropdown arrow.
*/
Toolbar.prototype.dropdownImageHtml = '<img border="0" style="position:absolute;right:4px;top:' +
((!EditorUi.compactUi) ? 8 : 6) + 'px;" src="' + Toolbar.prototype.dropdownImage + '" valign="middle"/>';
/**
* Defines the background for selected buttons.
*/
Toolbar.prototype.selectedBackground = '#d0d0d0';
/**
* Defines the background for selected buttons.
*/
Toolbar.prototype.unselectedBackground = 'none';
/**
* Array that contains the DOM nodes that should never be removed.
*/
Toolbar.prototype.staticElements = null;
/**
* Adds the toolbar elements.
*/
Toolbar.prototype.init = function () {
var sw = screen.width;
// Takes into account initial compact mode
sw -= (screen.height > 740) ? 56 : 0;
if (sw >= 700) {
var formatMenu = this.addMenu('', mxResources.get('view') + ' (' + mxResources.get('panTooltip') + ')', true, 'viewPanels', null, true);
this.addDropDownArrow(formatMenu, 'geSprite-formatpanel', 38, 50, -4, -3, 36, -8);
this.addSeparator();
}
var viewMenu = this.addMenu('', mxResources.get('zoom') + ' (Alt+Mousewheel)', true, 'viewZoom', null, true);
viewMenu.showDisabled = true;
viewMenu.style.whiteSpace = 'nowrap';
viewMenu.style.position = 'relative';
viewMenu.style.overflow = 'hidden';
if (EditorUi.compactUi) {
viewMenu.style.width = (mxClient.IS_QUIRKS) ? '58px' : '50px';
} else {
viewMenu.style.width = (mxClient.IS_QUIRKS) ? '62px' : '36px';
}
if (sw >= 420) {
this.addSeparator();
var elts = this.addItems(['zoomIn', 'zoomOut']);
elts[0].setAttribute('title', mxResources.get('zoomIn') + ' (' + this.editorUi.actions.get('zoomIn').shortcut + ')');
elts[1].setAttribute('title', mxResources.get('zoomOut') + ' (' + this.editorUi.actions.get('zoomOut').shortcut + ')');
}
// Updates the label if the scale changes
this.updateZoom = mxUtils.bind(this, function () {
viewMenu.innerHTML = Math.round(this.editorUi.editor.graph.view.scale * 100) + '%' +
this.dropdownImageHtml;
if (EditorUi.compactUi) {
viewMenu.getElementsByTagName('img')[0].style.right = '1px';
viewMenu.getElementsByTagName('img')[0].style.top = '5px';
}
});
this.editorUi.editor.graph.view.addListener(mxEvent.EVENT_SCALE, this.updateZoom);
this.editorUi.editor.addListener('resetGraphView', this.updateZoom);
var elts = this.addItems(['-', 'undo', 'redo']);
elts[1].setAttribute('title', mxResources.get('undo') + ' (' + this.editorUi.actions.get('undo').shortcut + ')');
elts[2].setAttribute('title', mxResources.get('redo') + ' (' + this.editorUi.actions.get('redo').shortcut + ')');
if (sw >= 320) {
var elts = this.addItems(['-', 'delete']);
elts[1].setAttribute('title', mxResources.get('delete') + ' (' + this.editorUi.actions.get('delete').shortcut + ')');
}
if (sw >= 550) {
this.addItems(['-', 'toFront', 'toBack']);
}
if (sw >= 740) {
this.addItems(['-', 'fillColor']);
if (sw >= 780) {
this.addItems(['strokeColor']);
if (sw >= 820) {
this.addItems(['shadow']);
}
}
}
if (sw >= 400) {
this.addSeparator();
if (sw >= 440) {
this.edgeShapeMenu = this.addMenuFunction('', mxResources.get('connection'), false, mxUtils.bind(this, function (menu) {
this.editorUi.menus.edgeStyleChange(menu, '', [mxConstants.STYLE_SHAPE, 'width'], [null, null], 'geIcon geSprite geSprite-connection', null, true).setAttribute('title', mxResources.get('line'));
this.editorUi.menus.edgeStyleChange(menu, '', [mxConstants.STYLE_SHAPE, 'width'], ['link', null], 'geIcon geSprite geSprite-linkedge', null, true).setAttribute('title', mxResources.get('link'));
this.editorUi.menus.edgeStyleChange(menu, '', [mxConstants.STYLE_SHAPE, 'width'], ['flexArrow', null], 'geIcon geSprite geSprite-arrow', null, true).setAttribute('title', mxResources.get('arrow'));
this.editorUi.menus.edgeStyleChange(menu, '', [mxConstants.STYLE_SHAPE, 'width'], ['arrow', null], 'geIcon geSprite geSprite-simplearrow', null, true).setAttribute('title', mxResources.get('simpleArrow'));
}));
this.addDropDownArrow(this.edgeShapeMenu, 'geSprite-connection', 44, 50, 0, 0, 22, -4);
}
this.edgeStyleMenu = this.addMenuFunction('geSprite-orthogonal', mxResources.get('waypoints'), false, mxUtils.bind(this, function (menu) {
this.editorUi.menus.edgeStyleChange(menu, '', [mxConstants.STYLE_EDGE, mxConstants.STYLE_CURVED, mxConstants.STYLE_NOEDGESTYLE], [null, null, null], 'geIcon geSprite geSprite-straight', null, true).setAttribute('title', mxResources.get('straight'));
this.editorUi.menus.edgeStyleChange(menu, '', [mxConstants.STYLE_EDGE, mxConstants.STYLE_CURVED, mxConstants.STYLE_NOEDGESTYLE], ['orthogonalEdgeStyle', null, null], 'geIcon geSprite geSprite-orthogonal', null, true).setAttribute('title', mxResources.get('orthogonal'));
this.editorUi.menus.edgeStyleChange(menu, '', [mxConstants.STYLE_EDGE, mxConstants.STYLE_ELBOW, mxConstants.STYLE_CURVED, mxConstants.STYLE_NOEDGESTYLE], ['elbowEdgeStyle', null, null, null], 'geIcon geSprite geSprite-horizontalelbow', null, true).setAttribute('title', mxResources.get('simple'));
this.editorUi.menus.edgeStyleChange(menu, '', [mxConstants.STYLE_EDGE, mxConstants.STYLE_ELBOW, mxConstants.STYLE_CURVED, mxConstants.STYLE_NOEDGESTYLE], ['elbowEdgeStyle', 'vertical', null, null], 'geIcon geSprite geSprite-verticalelbow', null, true).setAttribute('title', mxResources.get('simple'));
this.editorUi.menus.edgeStyleChange(menu, '', [mxConstants.STYLE_EDGE, mxConstants.STYLE_ELBOW, mxConstants.STYLE_CURVED, mxConstants.STYLE_NOEDGESTYLE], ['isometricEdgeStyle', null, null, null], 'geIcon geSprite geSprite-horizontalisometric', null, true).setAttribute('title', mxResources.get('isometric'));
this.editorUi.menus.edgeStyleChange(menu, '', [mxConstants.STYLE_EDGE, mxConstants.STYLE_ELBOW, mxConstants.STYLE_CURVED, mxConstants.STYLE_NOEDGESTYLE], ['isometricEdgeStyle', 'vertical', null, null], 'geIcon geSprite geSprite-verticalisometric', null, true).setAttribute('title', mxResources.get('isometric'));
this.editorUi.menus.edgeStyleChange(menu, '', [mxConstants.STYLE_EDGE, mxConstants.STYLE_CURVED, mxConstants.STYLE_NOEDGESTYLE], ['orthogonalEdgeStyle', '1', null], 'geIcon geSprite geSprite-curved', null, true).setAttribute('title', mxResources.get('curved'));
this.editorUi.menus.edgeStyleChange(menu, '', [mxConstants.STYLE_EDGE, mxConstants.STYLE_CURVED, mxConstants.STYLE_NOEDGESTYLE], ['entityRelationEdgeStyle', null, null], 'geIcon geSprite geSprite-entity', null, true).setAttribute('title', mxResources.get('entityRelation'));
}));
this.addDropDownArrow(this.edgeStyleMenu, 'geSprite-orthogonal', 44, 50, 0, 0, 22, -4);
}
this.addSeparator();
var insertMenu = this.addMenu('', mxResources.get('insert') + ' (' + mxResources.get('doubleClickTooltip') + ')', true, 'insert', null, true);
this.addDropDownArrow(insertMenu, 'geSprite-plus', 38, 48, -4, -3, 36, -8);
};
/**
* Adds the toolbar elements.
*/
Toolbar.prototype.addDropDownArrow = function (menu, sprite, width, atlasWidth, left, top, atlasDelta, atlasLeft) {
atlasDelta = (atlasDelta != null) ? atlasDelta : 32;
left = (EditorUi.compactUi) ? left : atlasLeft;
menu.style.whiteSpace = 'nowrap';
menu.style.overflow = 'hidden';
menu.style.position = 'relative';
menu.innerHTML = '<div class="geSprite ' + sprite + '" style="margin-left:' + left + 'px;margin-top:' + top + 'px;"></div>' +
this.dropdownImageHtml;
menu.style.width = (mxClient.IS_QUIRKS) ? atlasWidth + 'px' : (atlasWidth - atlasDelta) + 'px';
if (mxClient.IS_QUIRKS) {
menu.style.height = (EditorUi.compactUi) ? '24px' : '26px';
}
// Fix for item size in kennedy theme
if (EditorUi.compactUi) {
menu.getElementsByTagName('img')[0].style.left = '24px';
menu.getElementsByTagName('img')[0].style.top = '5px';
menu.style.width = (mxClient.IS_QUIRKS) ? width + 'px' : (width - 10) + 'px';
}
};
/**
* Sets the current font name.
*/
Toolbar.prototype.setFontName = function (value) {
if (this.fontMenu != null) {
this.fontMenu.innerHTML = '<div style="width:60px;overflow:hidden;display:inline-block;">' +
mxUtils.htmlEntities(value) + '</div>' + this.dropdownImageHtml;
}
};
/**
* Sets the current font name.
*/
Toolbar.prototype.setFontSize = function (value) {
if (this.sizeMenu != null) {
this.sizeMenu.innerHTML = '<div style="width:24px;overflow:hidden;display:inline-block;">' +
value + '</div>' + this.dropdownImageHtml;
}
};
/**
* Hides the current menu.
*/
Toolbar.prototype.createTextToolbar = function () {
var graph = this.editorUi.editor.graph;
var styleElt = this.addMenu('', mxResources.get('style'), true, 'formatBlock');
styleElt.style.position = 'relative';
styleElt.style.whiteSpace = 'nowrap';
styleElt.style.overflow = 'hidden';
styleElt.innerHTML = mxResources.get('style') + this.dropdownImageHtml;
if (EditorUi.compactUi) {
styleElt.style.paddingRight = '18px';
styleElt.getElementsByTagName('img')[0].style.right = '1px';
styleElt.getElementsByTagName('img')[0].style.top = '5px';
}
this.addSeparator();
this.fontMenu = this.addMenu('', mxResources.get('fontFamily'), true, 'fontFamily');
this.fontMenu.style.position = 'relative';
this.fontMenu.style.whiteSpace = 'nowrap';
this.fontMenu.style.overflow = 'hidden';
this.fontMenu.style.width = (mxClient.IS_QUIRKS) ? '80px' : '60px';
this.setFontName(Menus.prototype.defaultFont);
if (EditorUi.compactUi) {
this.fontMenu.style.paddingRight = '18px';
this.fontMenu.getElementsByTagName('img')[0].style.right = '1px';
this.fontMenu.getElementsByTagName('img')[0].style.top = '5px';
}
this.addSeparator();
this.sizeMenu = this.addMenu(Menus.prototype.defaultFontSize, mxResources.get('fontSize'), true, 'fontSize');
this.sizeMenu.style.position = 'relative';
this.sizeMenu.style.whiteSpace = 'nowrap';
this.sizeMenu.style.overflow = 'hidden';
this.sizeMenu.style.width = (mxClient.IS_QUIRKS) ? '44px' : '24px';
this.setFontSize(Menus.prototype.defaultFontSize);
if (EditorUi.compactUi) {
this.sizeMenu.style.paddingRight = '18px';
this.sizeMenu.getElementsByTagName('img')[0].style.right = '1px';
this.sizeMenu.getElementsByTagName('img')[0].style.top = '5px';
}
var elts = this.addItems(['-', 'undo', 'redo', '-', 'bold', 'italic', 'underline']);
elts[1].setAttribute('title', mxResources.get('undo') + ' (' + this.editorUi.actions.get('undo').shortcut + ')');
elts[2].setAttribute('title', mxResources.get('redo') + ' (' + this.editorUi.actions.get('redo').shortcut + ')');
elts[4].setAttribute('title', mxResources.get('bold') + ' (' + this.editorUi.actions.get('bold').shortcut + ')');
elts[5].setAttribute('title', mxResources.get('italic') + ' (' + this.editorUi.actions.get('italic').shortcut + ')');
elts[6].setAttribute('title', mxResources.get('underline') + ' (' + this.editorUi.actions.get('underline').shortcut + ')');
// KNOWN: Lost focus after click on submenu with text (not icon) in quirks and IE8. This is because the TD seems
// to catch the focus on click in these browsers. NOTE: Workaround in mxPopupMenu for icon items (without text).
var alignMenu = this.addMenuFunction('', mxResources.get('align'), false, mxUtils.bind(this, function (menu) {
elt = menu.addItem('', null, mxUtils.bind(this, function (evt) {
graph.cellEditor.alignText(mxConstants.ALIGN_LEFT, evt);
}), null, 'geIcon geSprite geSprite-left');
elt.setAttribute('title', mxResources.get('left'));
elt = menu.addItem('', null, mxUtils.bind(this, function (evt) {
graph.cellEditor.alignText(mxConstants.ALIGN_CENTER, evt);
}), null, 'geIcon geSprite geSprite-center');
elt.setAttribute('title', mxResources.get('center'));
elt = menu.addItem('', null, mxUtils.bind(this, function (evt) {
graph.cellEditor.alignText(mxConstants.ALIGN_RIGHT, evt);
}), null, 'geIcon geSprite geSprite-right');
elt.setAttribute('title', mxResources.get('right'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
document.execCommand('justifyfull', false, null);
}), null, 'geIcon geSprite geSprite-justifyfull');
elt.setAttribute('title', mxResources.get('justifyfull'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
document.execCommand('insertorderedlist', false, null);
}), null, 'geIcon geSprite geSprite-orderedlist');
elt.setAttribute('title', mxResources.get('numberedList'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
document.execCommand('insertunorderedlist', false, null);
}), null, 'geIcon geSprite geSprite-unorderedlist');
elt.setAttribute('title', mxResources.get('bulletedList'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
document.execCommand('outdent', false, null);
}), null, 'geIcon geSprite geSprite-outdent');
elt.setAttribute('title', mxResources.get('decreaseIndent'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
document.execCommand('indent', false, null);
}), null, 'geIcon geSprite geSprite-indent');
elt.setAttribute('title', mxResources.get('increaseIndent'));
}));
alignMenu.style.position = 'relative';
alignMenu.style.whiteSpace = 'nowrap';
alignMenu.style.overflow = 'hidden';
alignMenu.innerHTML = '<div class="geSprite geSprite-left" style="margin-left:-2px;"></div>' + this.dropdownImageHtml;
alignMenu.style.width = (mxClient.IS_QUIRKS) ? '50px' : '30px';
if (EditorUi.compactUi) {
alignMenu.getElementsByTagName('img')[0].style.left = '22px';
alignMenu.getElementsByTagName('img')[0].style.top = '5px';
}
var formatMenu = this.addMenuFunction('', mxResources.get('format'), false, mxUtils.bind(this, function (menu) {
elt = menu.addItem('', null, this.editorUi.actions.get('subscript').funct,
null, 'geIcon geSprite geSprite-subscript');
elt.setAttribute('title', mxResources.get('subscript') + ' (' + Editor.ctrlKey + '+,)');
elt = menu.addItem('', null, this.editorUi.actions.get('superscript').funct,
null, 'geIcon geSprite geSprite-superscript');
elt.setAttribute('title', mxResources.get('superscript') + ' (' + Editor.ctrlKey + '+.)');
// KNOWN: IE+FF don't return keyboard focus after color dialog (calling focus doesn't help)
elt = menu.addItem('', null, this.editorUi.actions.get('fontColor').funct,
null, 'geIcon geSprite geSprite-fontcolor');
elt.setAttribute('title', mxResources.get('fontColor'));
elt = menu.addItem('', null, this.editorUi.actions.get('backgroundColor').funct,
null, 'geIcon geSprite geSprite-fontbackground');
elt.setAttribute('title', mxResources.get('backgroundColor'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
document.execCommand('removeformat', false, null);
}), null, 'geIcon geSprite geSprite-removeformat');
elt.setAttribute('title', mxResources.get('removeFormat'));
}));
formatMenu.style.position = 'relative';
formatMenu.style.whiteSpace = 'nowrap';
formatMenu.style.overflow = 'hidden';
formatMenu.innerHTML = '<div class="geSprite geSprite-dots" style="margin-left:-2px;"></div>' +
this.dropdownImageHtml;
formatMenu.style.width = (mxClient.IS_QUIRKS) ? '50px' : '30px';
if (EditorUi.compactUi) {
formatMenu.getElementsByTagName('img')[0].style.left = '22px';
formatMenu.getElementsByTagName('img')[0].style.top = '5px';
}
this.addSeparator();
this.addButton('geIcon geSprite geSprite-code', mxResources.get('html'), function () {
graph.cellEditor.toggleViewMode();
if (graph.cellEditor.textarea.innerHTML.length > 0 && (graph.cellEditor.textarea.innerHTML != ' ' || !graph.cellEditor.clearOnChange)) {
window.setTimeout(function () {
document.execCommand('selectAll', false, null);
});
}
});
this.addSeparator();
// FIXME: Uses geButton here and geLabel in main menu
var insertMenu = this.addMenuFunction('', mxResources.get('insert'), true, mxUtils.bind(this, function (menu) {
menu.addItem(mxResources.get('insertLink'), null, mxUtils.bind(this, function () {
this.editorUi.actions.get('link').funct();
}));
menu.addItem(mxResources.get('insertImage'), null, mxUtils.bind(this, function () {
this.editorUi.actions.get('image').funct();
}));
menu.addItem(mxResources.get('insertHorizontalRule'), null, mxUtils.bind(this, function () {
document.execCommand('inserthorizontalrule', false, null);
}));
}));
insertMenu.style.whiteSpace = 'nowrap';
insertMenu.style.overflow = 'hidden';
insertMenu.style.position = 'relative';
insertMenu.innerHTML = '<div class="geSprite geSprite-plus" style="margin-left:-4px;margin-top:-3px;"></div>' +
this.dropdownImageHtml;
insertMenu.style.width = (mxClient.IS_QUIRKS) ? '36px' : '16px';
// Fix for item size in kennedy theme
if (EditorUi.compactUi) {
insertMenu.getElementsByTagName('img')[0].style.left = '24px';
insertMenu.getElementsByTagName('img')[0].style.top = '5px';
insertMenu.style.width = (mxClient.IS_QUIRKS) ? '50px' : '30px';
}
this.addSeparator();
// KNOWN: All table stuff does not work with undo/redo
// KNOWN: Lost focus after click on submenu with text (not icon) in quirks and IE8. This is because the TD seems
// to catch the focus on click in these browsers. NOTE: Workaround in mxPopupMenu for icon items (without text).
var elt = this.addMenuFunction('geIcon geSprite geSprite-table', mxResources.get('table'), false, mxUtils.bind(this, function (menu) {
var elt = graph.getSelectedElement();
var cell = graph.getParentByNames(elt, ['TD', 'TH'], graph.cellEditor.text2);
var row = graph.getParentByName(elt, 'TR', graph.cellEditor.text2);
if (row == null) {
this.editorUi.menus.addInsertTableItem(menu);
} else {
var table = graph.getParentByName(row, 'TABLE', graph.cellEditor.text2);
elt = menu.addItem('', null, mxUtils.bind(this, function () {
try {
graph.selectNode(graph.insertColumn(table, (cell != null) ? cell.cellIndex : 0));
} catch (e) {
this.editorUi.handleError(e);
}
}), null, 'geIcon geSprite geSprite-insertcolumnbefore');
elt.setAttribute('title', mxResources.get('insertColumnBefore'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
try {
graph.selectNode(graph.insertColumn(table, (cell != null) ? cell.cellIndex + 1 : -1));
} catch (e) {
this.editorUi.handleError(e);
}
}), null, 'geIcon geSprite geSprite-insertcolumnafter');
elt.setAttribute('title', mxResources.get('insertColumnAfter'));
elt = menu.addItem('Delete column', null, mxUtils.bind(this, function () {
if (cell != null) {
try {
graph.deleteColumn(table, cell.cellIndex);
} catch (e) {
this.editorUi.handleError(e);
}
}
}), null, 'geIcon geSprite geSprite-deletecolumn');
elt.setAttribute('title', mxResources.get('deleteColumn'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
try {
graph.selectNode(graph.insertRow(table, row.sectionRowIndex));
} catch (e) {
this.editorUi.handleError(e);
}
}), null, 'geIcon geSprite geSprite-insertrowbefore');
elt.setAttribute('title', mxResources.get('insertRowBefore'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
try {
graph.selectNode(graph.insertRow(table, row.sectionRowIndex + 1));
} catch (e) {
this.editorUi.handleError(e);
}
}), null, 'geIcon geSprite geSprite-insertrowafter');
elt.setAttribute('title', mxResources.get('insertRowAfter'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
try {
graph.deleteRow(table, row.sectionRowIndex);
} catch (e) {
this.editorUi.handleError(e);
}
}), null, 'geIcon geSprite geSprite-deleterow');
elt.setAttribute('title', mxResources.get('deleteRow'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
// Converts rgb(r,g,b) values
var color = table.style.borderColor.replace(
/\brgb\s*\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)/g,
function ($0, $1, $2, $3) {
return '#' + ('0' + Number($1).toString(16)).substr(-2) + ('0' + Number($2).toString(16)).substr(-2) + ('0' + Number($3).toString(16)).substr(-2);
});
this.editorUi.pickColor(color, function (newColor) {
if (newColor == null || newColor == mxConstants.NONE) {
table.removeAttribute('border');
table.style.border = '';
table.style.borderCollapse = '';
} else {
table.setAttribute('border', '1');
table.style.border = '1px solid ' + newColor;
table.style.borderCollapse = 'collapse';
}
});
}), null, 'geIcon geSprite geSprite-strokecolor');
elt.setAttribute('title', mxResources.get('borderColor'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
// Converts rgb(r,g,b) values
var color = table.style.backgroundColor.replace(
/\brgb\s*\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)/g,
function ($0, $1, $2, $3) {
return '#' + ('0' + Number($1).toString(16)).substr(-2) + ('0' + Number($2).toString(16)).substr(-2) + ('0' + Number($3).toString(16)).substr(-2);
});
this.editorUi.pickColor(color, function (newColor) {
if (newColor == null || newColor == mxConstants.NONE) {
table.style.backgroundColor = '';
} else {
table.style.backgroundColor = newColor;
}
});
}), null, 'geIcon geSprite geSprite-fillcolor');
elt.setAttribute('title', mxResources.get('backgroundColor'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
var value = table.getAttribute('cellPadding') || 0;
var dlg = new FilenameDialog(this.editorUi, value, mxResources.get('apply'), mxUtils.bind(this, function (newValue) {
if (newValue != null && newValue.length > 0) {
table.setAttribute('cellPadding', newValue);
} else {
table.removeAttribute('cellPadding');
}
}), mxResources.get('spacing'));
this.editorUi.showDialog(dlg.container, 300, 80, true, true);
dlg.init();
}), null, 'geIcon geSprite geSprite-fit');
elt.setAttribute('title', mxResources.get('spacing'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
table.setAttribute('align', 'left');
}), null, 'geIcon geSprite geSprite-left');
elt.setAttribute('title', mxResources.get('left'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
table.setAttribute('align', 'center');
}), null, 'geIcon geSprite geSprite-center');
elt.setAttribute('title', mxResources.get('center'));
elt = menu.addItem('', null, mxUtils.bind(this, function () {
table.setAttribute('align', 'right');
}), null, 'geIcon geSprite geSprite-right');
elt.setAttribute('title', mxResources.get('right'));
}
}));
elt.style.position = 'relative';
elt.style.whiteSpace = 'nowrap';
elt.style.overflow = 'hidden';
elt.innerHTML = '<div class="geSprite geSprite-table" style="margin-left:-2px;"></div>' + this.dropdownImageHtml;
elt.style.width = (mxClient.IS_QUIRKS) ? '50px' : '30px';
// Fix for item size in kennedy theme
if (EditorUi.compactUi) {
elt.getElementsByTagName('img')[0].style.left = '22px';
elt.getElementsByTagName('img')[0].style.top = '5px';
}
};
/**
* Hides the current menu.
*/
Toolbar.prototype.hideMenu = function () {
this.editorUi.hideCurrentMenu();
};
/**
* Adds a label to the toolbar.
*/
Toolbar.prototype.addMenu = function (label, tooltip, showLabels, name, c, showAll, ignoreState) {
var menu = this.editorUi.menus.get(name);
var elt = this.addMenuFunction(label, tooltip, showLabels, function () {
menu.funct.apply(menu, arguments);
}, c, showAll);
if (!ignoreState) {
menu.addListener('stateChanged', function () {
elt.setEnabled(menu.enabled);
});
}
return elt;
};
/**
* Adds a label to the toolbar.
*/
Toolbar.prototype.addMenuFunction = function (label, tooltip, showLabels, funct, c, showAll) {
return this.addMenuFunctionInContainer((c != null) ? c : this.container, label, tooltip, showLabels, funct, showAll);
};
/**
* Adds a label to the toolbar.
*/
Toolbar.prototype.addMenuFunctionInContainer = function (container, label, tooltip, showLabels, funct, showAll) {
var elt = (showLabels) ? this.createLabel(label) : this.createButton(label);
this.initElement(elt, tooltip);
this.addMenuHandler(elt, showLabels, funct, showAll);
container.appendChild(elt);
return elt;
};
/**
* Adds a separator to the separator.
*/
Toolbar.prototype.addSeparator = function (c) {
c = (c != null) ? c : this.container;
var elt = document.createElement('div');
elt.className = 'geSeparator';
c.appendChild(elt);
return elt;
};
/**
* Adds given action item
*/
Toolbar.prototype.addItems = function (keys, c, ignoreDisabled) {
var items = [];
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
if (key == '-') {
items.push(this.addSeparator(c));
} else {
items.push(this.addItem('geSprite-' + key.toLowerCase(), key, c, ignoreDisabled));
}
}
return items;
};
/**
* Adds given action item
*/
Toolbar.prototype.addItem = function (sprite, key, c, ignoreDisabled) {
var action = this.editorUi.actions.get(key);
var elt = null;
if (action != null) {
var tooltip = action.label;
if (action.shortcut != null) {
tooltip += ' (' + action.shortcut + ')';
}
elt = this.addButton(sprite, tooltip, action.funct, c);
if (!ignoreDisabled) {
elt.setEnabled(action.enabled);
action.addListener('stateChanged', function () {
elt.setEnabled(action.enabled);
});
}
}
return elt;
};
/**
* Adds a button to the toolbar.
*/
Toolbar.prototype.addButton = function (classname, tooltip, funct, c) {
var elt = this.createButton(classname);
c = (c != null) ? c : this.container;
this.initElement(elt, tooltip);
this.addClickHandler(elt, funct);
c.appendChild(elt);
return elt;
};
/**
* Initializes the given toolbar element.
*/
Toolbar.prototype.initElement = function (elt, tooltip) {
// Adds tooltip
if (tooltip != null) {
elt.setAttribute('title', tooltip);
}
this.addEnabledState(elt);
};
/**
* Adds enabled state with setter to DOM node (avoids JS wrapper).
*/
Toolbar.prototype.addEnabledState = function (elt) {
var classname = elt.className;
elt.setEnabled = function (value) {
elt.enabled = value;
if (value) {
elt.className = classname;
} else {
elt.className = classname + ' mxDisabled';
}
};
elt.setEnabled(true);
};
/**
* Adds enabled state with setter to DOM node (avoids JS wrapper).
*/
Toolbar.prototype.addClickHandler = function (elt, funct) {
if (funct != null) {
mxEvent.addListener(elt, 'click', function (evt) {
if (elt.enabled) {
funct(evt);
}
mxEvent.consume(evt);
});
// Prevents focus
mxEvent.addListener(elt, (mxClient.IS_POINTER) ? 'pointerdown' : 'mousedown',
mxUtils.bind(this, function (evt) {
evt.preventDefault();
}));
}
};
/**
* Creates and returns a new button.
*/
Toolbar.prototype.createButton = function (classname) {
var elt = document.createElement('a');
elt.className = 'geButton';
var inner = document.createElement('div');
if (classname != null) {
inner.className = 'geSprite ' + classname;
}
elt.appendChild(inner);
return elt;
};
/**
* Creates and returns a new button.
*/
Toolbar.prototype.createLabel = function (label, tooltip) {
var elt = document.createElement('a');
elt.className = 'geLabel';
mxUtils.write(elt, label);
return elt;
};
/**
* Adds a handler for showing a menu in the given element.
*/
Toolbar.prototype.addMenuHandler = function (elt, showLabels, funct, showAll) {
if (funct != null) {
var graph = this.editorUi.editor.graph;
var menu = null;
var show = true;
mxEvent.addListener(elt, 'click', mxUtils.bind(this, function (evt) {
if (show && (elt.enabled == null || elt.enabled)) {
graph.popupMenuHandler.hideMenu();
menu = new mxPopupMenu(funct);
menu.div.className += ' geToolbarMenu';
menu.showDisabled = showAll;
menu.labels = showLabels;
menu.autoExpand = true;
var offset = mxUtils.getOffset(elt);
menu.popup(offset.x, offset.y + elt.offsetHeight, null, evt);
this.editorUi.setCurrentMenu(menu, elt);
// Workaround for scrollbar hiding menu items
if (!showLabels && menu.div.scrollHeight > menu.div.clientHeight) {
menu.div.style.width = '40px';
}
menu.hideMenu = mxUtils.bind(this, function () {
mxPopupMenu.prototype.hideMenu.apply(menu, arguments);
this.editorUi.resetCurrentMenu();
menu.destroy();
});
// Extends destroy to reset global state
menu.addListener(mxEvent.EVENT_HIDE, mxUtils.bind(this, function () {
this.currentElt = null;
}));
}
show = true;
mxEvent.consume(evt);
}));
// Hides menu if already showing and prevents focus
mxEvent.addListener(elt, (mxClient.IS_POINTER) ? 'pointerdown' : 'mousedown',
mxUtils.bind(this, function (evt) {
show = this.currentElt != elt;
evt.preventDefault();
}));
}
};
/**
* Adds a handler for showing a menu in the given element.
*/
Toolbar.prototype.destroy = function () {
if (this.gestureHandler != null) {
mxEvent.removeGestureListeners(document, this.gestureHandler);
this.gestureHandler = null;
}
};
|
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var core_1 = require("@angular/core");
var feeds_service_1 = require("../../services/feeds/feeds.service");
var HistoryComponent = (function () {
function HistoryComponent(router, feedService) {
this.router = router;
this.feedService = feedService;
}
HistoryComponent.prototype.ngOnInit = function () {
var _this = this;
this.feedService.getFeedsByType('history').then(function (feeds) { return _this.feeds = feeds; });
};
return HistoryComponent;
}());
HistoryComponent = __decorate([
core_1.Component({
moduleId: '3',
selector: 'app-history',
providers: [feeds_service_1.FeedsService],
templateUrl: './history.component.html',
styleUrls: ['./history.component.less']
})
], HistoryComponent);
exports.HistoryComponent = HistoryComponent;
|
/*!
* kontext
* http://lab.hakim.se/kontext
* MIT licensed
*
* Copyright (C) 2013 Hakim El Hattab, http://hakim.se
*/
window.kontext = function( container ) {
// Dispatched when the current layer changes
var changed = new kontext.Signal();
// All layers in this instance of kontext
var layers = Array.prototype.slice.call( container.querySelectorAll( '.layer' ) );
// Flag if the browser is capable of handling our fancy transition
var capable = 'WebkitPerspective' in document.body.style ||
'MozPerspective' in document.body.style ||
'msPerspective' in document.body.style ||
'OPerspective' in document.body.style ||
'perspective' in document.body.style;
if( capable ) {
container.classList.add( 'capable' );
}
// Create dimmer elements to fade out preceding slides
layers.forEach( function( el, i ) {
if( !el.querySelector( '.dimmer' ) ) {
var dimmer = document.createElement( 'div' );
dimmer.className = 'dimmer';
el.appendChild( dimmer );
}
} );
/**
* Transitions to and shows the target layer.
*
* @param target index of layer or layer DOM element
*/
function show( target, direction ) {
// Make sure our listing of available layers is up to date
layers = Array.prototype.slice.call( container.querySelectorAll( '.layer' ) );
// Flag to CSS that we're ready to animate transitions
container.classList.add( 'animate' );
// Flag which direction
direction = direction || ( target > getIndex() ? 'right' : 'left' );
// Accept multiple types of targets
if( typeof target === 'string' ) target = parseInt( target );
if( typeof target !== 'number' ) target = getIndex( target );
// Enforce index bounds
target = Math.max( Math.min( target, layers.length ), 0 );
// Only navigate if were able to locate the target
if( layers[ target ] && !layers[ target ].classList.contains( 'show' ) ) {
layers.forEach( function( el, i ) {
el.classList.remove( 'left', 'right' );
el.classList.add( direction );
if( el.classList.contains( 'show' ) ) {
el.classList.remove( 'show' );
el.classList.add( 'hide' );
}
else {
el.classList.remove( 'hide' );
}
} );
layers[ target ].classList.add( 'show' );
changed.dispatch( layers[target], target );
}
}
/**
* Shows the previous layer.
*/
function prev() {
var index = getIndex() - 1;
show( index >= 0 ? index : layers.length + index, 'left' );
}
/**
* Shows the next layer.
*/
function next() {
show( ( getIndex() + 1 ) % layers.length, 'right' );
}
/**
* Retrieves the index of the current slide.
*
* @param of [optional] layer DOM element which index is
* to be returned
*/
function getIndex( of ) {
var index = 0;
layers.forEach( function( layer, i ) {
if( ( of && of == layer ) || ( !of && layer.classList.contains( 'show' ) ) ) {
index = i;
return;
}
} );
return index;
}
/**
* Retrieves the total number of layers.
*/
function getTotal() {
return layers.length;
}
// API
return {
show: show,
prev: prev,
next: next,
getIndex: getIndex,
getTotal: getTotal,
changed: changed
};
};
/**
* Minimal utility for dispatching signals (events).
*/
kontext.Signal = function() {
this.listeners = [];
}
kontext.Signal.prototype.add = function( callback ) {
this.listeners.push( callback );
}
kontext.Signal.prototype.remove = function( callback ) {
var i = this.listeners.indexOf( callback );
if( i >= 0 ) this.listeners.splice( i, 1 );
}
kontext.Signal.prototype.dispatch = function() {
var args = Array.prototype.slice.call( arguments );
this.listeners.forEach( function( f, i ) {
f.apply( null, args );
} );
}
|
// 配置基本设置
var HOST_URL = $("meta[name='website-url']").attr('content'), ENABLE_ASIDE = $("meta[name='enable_aside']").attr('content'), ROUTE_NAME = $("meta[name='current_route_name']").attr('content'), MOBILE_DEVICE = $("meta[name='mobile_device']").attr('content'), AES_IV = $("meta[name='aes-iv']").attr('content'), AES_ENCRYPT_KEY = $("meta[name='aes-encrypt-key']").attr('content'), DEFAULT_THEME = $("meta[name='default-theme']").attr('content');
//初始化ajax请求
$.ajaxSetup({
//设置默认头部
headers: {
'X-CSRF-TOKEN': $('meta[name="csrf-token"]').attr('content')
},
});
//默认操作
$(function () {
//初始化菜单信息
initMenus();
//获取信息
var aside_obj = $("#kt_aside"), change_password = $("#kt-edit_admin_password"), change_password_modal = $("#kt-edit_admin_password_modal"), refresh_nodes = $("#kt-refresh_console_nodes");
//判断存在侧边栏
if (typeof (aside_obj) !== 'undefined' && aside_obj.length > 0) {
//监听快捷显示侧边菜单状态
$("#show_aside").on('change', function () {
//判断状态
if (this.checked) {
//设置显示aside
$("#acb_body").removeClass('aside-sticky aside-enabled').addClass('aside-sticky aside-enabled');
$("#kt_aside").removeClass('d-none');
} else {
//设置隐藏aside
$("#acb_body").removeClass('aside-sticky aside-enabled');
$("#kt_aside").removeClass('d-none').addClass('d-none');
}
});
//判断是否开放侧边栏
if (parseInt(ENABLE_ASIDE) === 1 && parseInt(MOBILE_DEVICE) === 0) {
//设置显示侧边栏
$("#show_aside").prop('checked', true).change();
}
}
//判断是否存在刷新节点按钮
if (typeof refresh_nodes !== 'undefined' && refresh_nodes.length > 0) {
//点击监听
refresh_nodes.on('click', function () {
//提示确认
confirmPopup('刷新节点预计需要1-2分钟,确认后请耐心等待,是否继续?', function (res) {
//加载loading
var loading = loadingStart(refresh_nodes, $('body')[0], '正在刷新节点...');
//发起请求
buildRequest(refresh_nodes.attr('data-query-url'), {}, 'post', true, function (res) {
//提示信息
alertToast('刷新成功', 2000, 'success');
//刷新页面
window.location.reload();
}, function (res) {
//提示信息
alertToast(res.msg, 2000, 'error');
}, function () {
//关闭loading
loadingStop(loading, refresh_nodes);
})
});
});
}
//判断是否存在修改密码按钮
if (typeof change_password !== 'undefined' && change_password.length > 0) {
//实例modal
var change_password_modal_object;
//设置监听
change_password.on('click', function () {
//显示弹窗
change_password_modal_object = new bootstrap.Modal(change_password_modal[0], {backdrop: 'static', keyboard: false});
//显示弹窗
change_password_modal_object.show();
});
//提交确认
$("#kt-edit_admin_password_modal_confirm_button").on('click', function () {
//获取参数
var _this = $(this), password = $("#kt-edit_admin_password_modal_new_password"), password_confirmed = $("#kt-edit_admin_password_modal_new_password_confirmed"), password_value = password.val(), confirmed_password_value = password_confirmed.val();
//判断信息
if (typeof password_value === 'undefined' || password_value.length < 6) {
//新增提示
password.parents('.form_item').append('<div class="fs-7 fw-bold text-danger my-2 validator_tip">密码需设置至少6位,请更新此项内容后再试</span></div>');
//设置延时关闭
setTimeout(function () {
password.parents('.form_item').find('.validator_tip').remove();
}, 5000);
//跳出循环
return false;
}
//判断信息
if (typeof confirmed_password_value === 'undefined' || confirmed_password_value.length < 6) {
//新增提示
password_confirmed.parents('.form_item').append('<div class="fs-7 fw-bold text-danger my-2 validator_tip">密码需设置至少6位,请更新此项内容后再试</span></div>');
//设置延时关闭
setTimeout(function () {
password_confirmed.parents('.form_item').find('.validator_tip').remove();
}, 5000);
//跳出循环
return false;
}
//判断值是否一致
if (password_value !== confirmed_password_value) {
//新增提示
password_confirmed.parents('.form_item').append('<div class="fs-7 fw-bold text-danger my-2 validator_tip">前后密码不一致</span></div>');
//设置延时关闭
setTimeout(function () {
password_confirmed.parents('.form_item').find('.validator_tip').remove();
}, 5000);
//跳出循环
return false;
}
//加载loading
var loading = loadingStart(_this, change_password_modal[0], '正在修改...');
//发起请求
buildRequest(change_password_modal.attr('data-query-url'), {password:password_value, password_confirmed:confirmed_password_value}, 'post', true, function (res) {
//提示信息
alertToast('修改成功', 2000, 'success');
//关闭弹窗
change_password_modal_object.hide();
}, function (res) {
//提示信息
alertToast(res.msg, 2000, 'error');
}, function () {
//关闭loading
loadingStop(loading, _this);
})
});
}
});
/**
* 初始化菜单
*/
function initMenus()
{
//查询第一个当前路由的item
var item, aside = $("#kt_aside_menu_wrapper"), header = $("#kt_header_navs_wrapper"), title = $('title'), routers = $("#acb_routers"), acb_permissions = $("#acb_permissions"), kt_toolbar_breadcrumb_title = $("#kt_toolbar_breadcrumb_title");
//判断元素是否存在
if (typeof acb_permissions !== 'undefined' && acb_permissions.length > 0) {
//初始化信息
acb_permissions = JSON.parse(acb_permissions.text().trim());
//循环元素
header.find('.menu-link[data-menu-type="link"]').each(function () {
//获取配置route_names
var route_names = $(this).attr('data-route-names'), has_permission = false;
//判断信息
if (typeof route_names !== 'undefined' && route_names.length > 0) {
//拆分信息
route_names = route_names.split(',');
//循环路由名称
$.each(route_names, function (i, item) {
//判断路由名是否存在
if ($.inArray(item, acb_permissions) >= 0) {
//设置有权限
has_permission = true;
//跳出循环
return false;
}
});
//判断是否有权限
if (!has_permission) {
//判断侧边栏是否存在
if (typeof aside !== 'undefined' && aside.length > 0) {
//删除指定元素
aside.find('.aside-obj[data-did="'+$(this).attr('data-did')+'"]').remove();
}
//删除当前元素
$(this).parents('.menu-obj').eq(0).remove();
}
}
});
//清空剩余菜单
clearWithoutPermissionMenus();
}
//循环元素
header.find('.menu-link').each(function () {
//获取配置route_names
var route_names = $(this).attr('data-route-names');
//判断信息
if (typeof route_names !== 'undefined' && route_names.length > 0) {
//拆分信息
route_names = route_names.split(',');
//判断路由名是否存在
if ($.inArray(('get&'+ROUTE_NAME), route_names) >= 0) {
//设置ITEM
item = $(this);
//跳出循环
return false;
}
}
});
//判断是否存在
if (typeof (item) !== 'undefined' && item.length > 0) {
//显示菜单
var breadcrumbs = showMenu(item.attr('data-did'), []), kt_toolbar_breadcrumbs = $("#kt_toolbar_breadcrumbs");
//判断是否存在
if (typeof (breadcrumbs) !== 'undefined' && breadcrumbs.length > 0) {
//循环面包屑
$.each(breadcrumbs, function (i, item) {
//判断索引值
if (parseInt(i) === 0) {
//设置面包屑
kt_toolbar_breadcrumbs.prepend('<li class="breadcrumb-item text-gray-500">'+item['name']+'</li>');
} else {
//设置面包屑
kt_toolbar_breadcrumbs.prepend('<li class="breadcrumb-item text-gray-600"><a href="'+item['link']+'" class="text-gray-600 text-hover-primary">'+item['name']+'</a></li>');
}
});
}
}
//设置面包屑标题
kt_toolbar_breadcrumb_title.text(JSON.parse(routers.text().trim())['get&'+ROUTE_NAME]);
//判断是否存在标题
if (title.text().length <= 0) {
//设置标题
title.text(kt_toolbar_breadcrumb_title.text());
}
//移除路由信息
routers.remove();
}
/**
* 清除没有权限的菜单
* @returns {boolean}
*/
function clearWithoutPermissionMenus()
{
//获取基本信息
var aside = $("#kt_aside_menu_wrapper");
//循环元素
$("#kt_header_navs_wrapper").find('.menu-obj[data-menu-type="tab"]').each(function () {
//判断是否存在下级
if ($(this).hasClass('menu-lg-down-accordion')) {
//获取ID
var did = $(this).attr('data-did');
//查询子菜单数量
if ($(this).find('.menu-obj[data-parent-did="'+did+'"]').length <= 0) {
//判断侧边栏是否存在
if (typeof aside !== 'undefined' && aside.length > 0) {
//删除指定元素
aside.find('.aside-obj[data-did="'+did+'"]').remove();
}
//移除当前元素
$(this).remove();
}
}
});
//循环元素
$("#menu_tops").find('.menu-obj[data-menu-type="nav"]').each(function () {
//获取ID
var did = $(this).attr('data-did');
//查询数量
if ($("#kt_header_navs_tab_"+did).find('.menu-obj[data-menu-type="tab"]').length <= 0) {
//判断侧边栏是否存在
if (typeof aside !== 'undefined' && aside.length > 0) {
//删除指定元素
aside.find('.aside-obj[data-did="'+did+'"]').remove();
}
//删除当前元素
$(this).remove();
}
});
//返回成功
return true;
}
/**
* 展开菜单
* @param menu_did 菜单ID
* @param breadcrumbs 面包屑
* @returns {*}
*/
function showMenu(menu_did, breadcrumbs)
{
//判断是否存在
if (typeof (menu_did) !== 'undefined' && parseInt(menu_did) > 0) {
//查询菜单ITEM
var item, aside = $("#kt_aside_menu_wrapper");
//判断手机设备还是电脑设备
if (parseInt(MOBILE_DEVICE) === 1) {
//查找元素
item = $("#kt_header_navs").find('.menu-obj[data-did="'+parseInt(menu_did)+'"]')
} else {
//查找元素
item = $("#kt_header").find('.menu-obj[data-did="'+parseInt(menu_did)+'"]')
}
//判断侧边栏是否存在
if (typeof aside !== 'undefined' && aside.length > 0) {
//删除指定元素
var aside_item = aside.find('.aside-obj[data-did="'+menu_did+'"]');
//根据类型处理
switch (aside_item.attr('data-menu-type')) {
case 'accordion':
//选中菜单
aside_item.removeClass('here show').addClass('here show');
break;
case 'item':
aside_item.find('.menu-link').removeClass('active').addClass('active');
break;
}
}
//判断是否存在
if (typeof (item) !== 'undefined' && item.length > 0) {
//根据目录类型处理
switch (item.attr('data-menu-type')) {
case 'tab':
//选中菜单
item.removeClass('here show').addClass('here show');
//选中菜单
$("#kt_header_navs_wrapper").find('.menu-link[data-did="'+menu_did+'"]').removeClass('active').addClass('active');
break;
case 'nav':
//选中菜单
item.find('.nav-link').removeClass('active').addClass('active');
$(item.find('.nav-link').attr('href')).removeClass('active show').addClass('active show');
break;
default:
//判断是否存在下级
if (item.hasClass('menu-lg-down-accordion')) {
//选中菜单
item.removeClass('here show').addClass('here show');
} else {
//选中菜单
$("#kt_header_navs_wrapper").find('.menu-link[data-did="'+menu_did+'"]').removeClass('active').addClass('active');
}
break;
}
//添加信息
breadcrumbs.push({name: item.attr('data-guard-name'), link: item.attr('data-redirect-uri')});
//追溯上级菜单
showMenu(item.attr('data-parent-did'), breadcrumbs);
}
}
//返回面包屑信息
return breadcrumbs;
}
/**
* 将指定元素移至工具栏
* @param html
* @param remove_target
* @returns {boolean}
*/
function moveToToolbar(html, remove_target)
{
var toolbars = $("#kt_dashboard_toolbar_items");
//判断数据
if (typeof html !== 'undefined' && html.length > 0) {
//判断是否存在需要移除元素
if (typeof remove_target !== 'undefined' && remove_target.length > 0) {
//移除已存在项
toolbars.find(remove_target).remove();
}
//添加内容
toolbars.prepend(html);
//重置tooltip
KTApp.initBootstrapTooltips();
}
//返回成功
return true;
}
/**
* 获取随机字符串
* @param length
* @returns {string}
*/
function randomString(length) {
//设置字符集
var str = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ', result = '';
//循环长度
for (var i = length; i > 0; --i)
//设置内容
result += str[Math.floor(Math.random() * str.length)];
//返回结果
return result;
}
/**
* container滑动至
* @param o
* @param container
*/
function scrollToObject(o, container)
{
//判断container
if (typeof container === 'undefined' || container.lenhth <= 0) {
//设置container
container = $('html , body');
}
//获取高度
var of = o.offset().top;
//滚动页面
container.animate({scrollTop: parseInt(of > 200 ? (of - 160) : of)}, 1000)
}
/**
* 开始加载loading
* @param trigger 触发对象
* @param target 目标对象
* @param message 提示信息
* @param theme 主题色
* @returns {KTBlockUI}
*/
function loadingStart(trigger, target, message, theme)
{
//判断对象信息
if (typeof (trigger) === 'undefined' || trigger.length <= 0) {
//设置主体对象
trigger = $('body');
}
if (typeof (target) === 'undefined' || target.length <= 0) {
//设置主体对象
target = $('body');
}
//判断主题信息
if (typeof (theme) === 'undefined' || theme.length <= 0) {
//设置默认主题
theme = 'secondary';
}
//判断提示信息
if (typeof (message) === 'undefined' || message.length <= 0) {
//设置默认提示信息
message = '请稍后...';
}
//实例化处理对象
var blockUI = new KTBlockUI(target, {
overlayClass: "bg-"+theme+" bg-opacity-25",
message: '<div class="blockui-message"><span class="spinner-border text-primary"></span>Loading '+message+'</div>'
}), tagName = trigger[0].tagName;
//判断状态
if (!blockUI.isBlocked()) {
//根据标签名操作
switch (tagName) {
case 'BUTTON':
//判断是否存在progress
if (trigger.find('.indicator-progress').length <= 0) {
//更改button内容
trigger.html('<span class="indicator-label">'+trigger.text()+'</span><span class="indicator-progress">'+message+'<span class="spinner-border spinner-border-sm align-middle ms-2"></span></span>');
}
//设置动态
trigger.attr("data-kt-indicator", "on");
break;
}
//显示加载
blockUI.block();
}
//返回实例对象
return blockUI;
}
/**
* 停止加载
* @param blockUI 实例对象
* @param trigger 触发对象
* @returns {boolean}
*/
function loadingStop(blockUI, trigger) {
//判断对象信息
if (typeof (trigger) === 'undefined' || trigger.length <= 0) {
//设置主体对象
trigger = $('body');
}
//获取信息
var tagName = trigger[0].tagName;
//判断标签
switch (tagName) {
case 'BUTTON':
//设置动态
trigger.removeAttr("data-kt-indicator");
break;
}
//判断状态
if (blockUI.isBlocked()) {
//释放block
blockUI.release();
}
//销毁block
blockUI.destroy();
//返回成功
return true;
}
/**
* 加密表单数据
* @param encrypt_params 加密参数
* @returns {{}|{__encrypt__}}
*/
function encryptFormData(encrypt_params)
{
//判断是否为对象
if (typeof (encrypt_params) === 'object') {
//判断是否为空
if ($.isEmptyObject(encrypt_params)) {
//添加默认参数
encrypt_params['__RANDOM_STRING__'] = randomString(3);
}
//转义信息
var encrypt_string = JSON.stringify(encrypt_params);
//返回信息
return {'__encrypt__': encrypt(encrypt_string, AES_ENCRYPT_KEY, AES_IV)};
}
//返回空
return {};
}
/**
* 加密
* @param str 加密串
* @param key esc_keys
* @param iv esc_iv
* @returns {*}
*/
function encrypt(str, key, iv) {
// //密钥16位
key = CryptoJS.enc.Utf8.parse(key);
// //加密向量16位
iv = CryptoJS.enc.Utf8.parse(iv);
//加密信息
return CryptoJS.AES.encrypt(str, key, {
iv: iv,
mode: CryptoJS.mode.CBC,
padding: CryptoJS.pad.ZeroPadding
}).toString();
}
/**
* 提示信息
* @param msg string 提示文案
* @param timeout int >0 显示时长,<=0 不自动关闭
* @param theme string 显示主题颜色:success|info|error|warning
* @param title string 提示标题
* @param position string 显示为止
* @returns {*}
*/
function alertToast(msg, timeout, theme, title, position) {
//整理基础配置
toastr.options = {
"closeButton": false,
"debug": false,
"newestOnTop": false,
"progressBar": true,
"positionClass": "toastr-top-center",
"preventDuplicates": false,
"onclick": null,
"showDuration": "300",
"hideDuration": "1000",
"timeOut": "5000",
"extendedTimeOut": "1000",
"showEasing": "swing",
"hideEasing": "linear",
"showMethod": "fadeIn",
"hideMethod": "fadeOut"
};
//初始化信息
if (typeof (timeout) === 'undefined') {
//设置默认值
timeout = 0;
}
if (typeof (theme) === 'undefined') {
//设置默认值
theme = 'warning';
}
if (typeof (position) === 'undefined') {
//设置默认值
toastr.options.positionClass = 'toastr-top-center';
}
//判断是否持续提示
if (parseInt(timeout) <= 0) {
//设置关闭按钮
toastr.options.progressBar = false;
toastr.options.extendedTimeOut = 0;
} else {
//取消移上停止操作
toastr.options.tapToDismiss = true;
//设置超时时间
toastr.options.extendedTimeOut = parseInt(timeout) + 500;
}
//设置超时时间
toastr.options.timeOut = parseInt(timeout);
//根据主题提示
switch (theme) {
case 'info':
toastr.info(msg, title);
break;
case 'success':
toastr.success(msg, title);
break;
case 'error':
toastr.error(msg, title);
break;
case 'warning':
toastr.warning(msg, title);
break;
default:
toastr.warning(msg, title);
break;
}
//返回成功
return true;
}
/**
*
* 序列化表单数据
* @param data
* @returns {{}}
*/
function serializeFormData(data)
{
//初始化参数
var form_data = {};
//循环表单数据
$.each(data, function() {
//设置参数
form_data[this.name] = this.value;
});
//返回数据
return form_data;
}
/**
* 创建外部引入CSS文件
* @param href
* @param callback
*/
function createExtraCss(href, callback)
{
//判断是否已存在css文件
if ($("head").find("link[href='"+href+"']").length <= 0) {
//引入外部文件
$("<link>").attr({rel: "stylesheet", type: "text/css", href: href}).appendTo("head");
//执行回调
callCustomerFunc(callback);
} else {
//执行回调
callCustomerFunc(callback);
}
}
/**
* 创建外部引入JS文件
* @param file
* @param object
* @param callback
*/
function createExtraJs(file, object, callback)
{
//判断js是否已实例化
if (typeof (object) == 'undefined' || object === 'undefined') {
//引入外部文件
$.getScript(file, function () {
//执行回调
callCustomerFunc(callback);
});
} else {
//执行回调
callCustomerFunc(callback);
}
}
/**
* 调用自定义方法
* @param func
*/
function callCustomerFunc(func)
{
//判断为方法
if (typeof (func) == 'function') {
//自定义方法调用
return func();
}
//返回失败
return false;
}
/**
* 创建请求方法
* @param query_url 请求链接
* @param params 请求参数
* @param method 请求方式
* @param is_ajax 是否使用ajax请求
* @param callback 成功时回调
* @param fail_callback 失败时回调
* @param after_ajax_callback 处理之后回调
* @returns {boolean}
*/
function buildRequest(query_url, params, method, is_ajax, callback, fail_callback, after_ajax_callback) {
//判断链接信息
if (typeof (query_url) !== 'undefined' && query_url.length > 0) {
//判断是否为ajax请求
if (is_ajax) {
//整理基础方法
var func = function (res) {
//设置ajax同步请求
$.ajaxSettings.async = false;
//判断回调信息
if (typeof (after_ajax_callback) == 'function') {
//自定义方法调用
after_ajax_callback();
}
//判断处理状态
if (res.state) {
//恢复ajax异步请求
$.ajaxSettings.async = true;
//判断回调信息
if (typeof (callback) == 'function') {
//自定义方法调用
return callback(res);
}
} else {
//恢复ajax异步请求
$.ajaxSettings.async = true;
//判断回调信息
if (typeof (fail_callback) == 'function') {
//自定义方法调用
return fail_callback(res);
}
}
}
//判断请求方式
if (method.toUpperCase() === 'POST') {
//触发post请求
$.post(query_url, encryptFormData(params), function (res) {
//执行方法
return func(res);
}, 'json');
} else {
//触发get请求
$.get(query_url, encryptFormData(params), function (res) {
//执行方法
return func(res);
}, 'json');
}
} else {
//判断是否存在参数
if (typeof (params) !== 'undefined' && !$.isEmptyObject(params)) {
//整理链接参数
var url_params = [];
//循环参数配置信息
$.each(params, function (i, item) {
//设置链接参数
url_params.push(i + '=' + item);
})
//设置链接信息
query_url += (query_url.indexOf('?') >= 0 ? '' : '?') + url_params.join('&');
}
//判断回调信息
if (typeof (callback) == 'function') {
//自定义方法调用
return callback(query_url);
} else {
//发起跳转
window.location.href = query_url;
}
}
}
//返回失败
return false;
}
/**
* 确认弹窗
* @param tip
* @param success
* @param fail
* @param timeout
* @returns {boolean}
*/
function confirmPopup(tip, success, fail, timeout) {
//判断延时信息
if (typeof (timeout) === 'undefined') {
//设置默认10
timeout = 10000;
}
//提示信息
Swal.fire({
text: tip,
icon: "info",
buttonsStyling: false,
position: 'center',
timer: timeout,
timerProgressBar: true,
showCancelButton: true,
confirmButtonText: "确定",
cancelButtonText: '取消',
allowOutsideClick: false,
customClass: {
confirmButton: "btn btn-primary btn-sm",
cancelButton: 'btn btn-danger btn-sm'
},
}).then(function(isConfirmed) {
//判断是否确认
if (isConfirmed.isConfirmed) {
return typeof (success) !== 'undefined' && success ? success() : true;
} else {
return typeof (fail) !== 'undefined' && fail ? fail() : true;
}
});
//返回成功
return true;
}
|
var builder = require('botbuilder');
module.exports = [
function (session, args, next) {
builder.Prompts.text(session, "Please input change Ids or numbers. Separate by space. Max count is 4");
},
function (session, results) {
session.send("Start to trigger build with change id = " + results.response);
runscript(session, results.response);
}
];
function runscript(session, content) {
//var data = content.split(" ");
var data = content.match(/\b(I[a-f0-9]{40})\b/g);
if(!data) {
data = content.match(/\d+/g);
}
if(!data || data.length > 4) {
session.send("change count should <= 4");
session.endDialog();
return;
}
var PythonShell = require('python-shell');
var options = {
mode: 'text',
pythonPath: '/usr/bin/python',
scriptPath: './python',
args: data
};
var shell = new PythonShell('triggerbuild.py', options);
shell.on('message', function(message) {
if (message.startsWith('There is no build pass project') ) {
session.send(message);
}
else {
console.log(message);
}
});
shell.end(function(err) {
if (err) {
console.error(err);
session.send("Run script failed.");
} else {
session.send("Trigger build done");
}
session.endDialog();
});
}
|
// @flow
/**
* This file handles all logic for converting string-based configuration references into loaded objects.
*/
import buildDebug from "debug";
import resolve from "resolve";
import path from "path";
const debug = buildDebug("babel:config:loading:files:plugins");
const EXACT_RE = /^module:/;
const BABEL_PLUGIN_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-plugin-)/;
const BABEL_PRESET_PREFIX_RE = /^(?!@|module:|[^/]+\/|babel-preset-)/;
const BABEL_PLUGIN_ORG_RE = /^(@babel\/)(?!plugin-|[^/]+\/)/;
const BABEL_PRESET_ORG_RE = /^(@babel\/)(?!preset-|[^/]+\/)/;
const OTHER_PLUGIN_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-plugin(?:-|\/|$)|[^/]+\/)/;
const OTHER_PRESET_ORG_RE = /^(@(?!babel\/)[^/]+\/)(?![^/]*babel-preset(?:-|\/|$)|[^/]+\/)/;
const OTHER_ORG_DEFAULT_RE = /^(@(?!babel$)[^/]+)$/;
export function resolvePlugin(name: string, dirname: string): string | null {
return resolveStandardizedName("plugin", name, dirname);
}
export function resolvePreset(name: string, dirname: string): string | null {
return resolveStandardizedName("preset", name, dirname);
}
export function loadPlugin(
name: string,
dirname: string,
): { filepath: string, value: mixed } {
const filepath = resolvePlugin(name, dirname);
if (!filepath) {
throw new Error(`Plugin ${name} not found relative to ${dirname}`);
}
const value = requireModule("plugin", filepath);
debug("Loaded plugin %o from %o.", name, dirname);
return { filepath, value };
}
export function loadPreset(
name: string,
dirname: string,
): { filepath: string, value: mixed } {
const filepath = resolvePreset(name, dirname);
if (!filepath) {
throw new Error(`Preset ${name} not found relative to ${dirname}`);
}
const value = requireModule("preset", filepath);
debug("Loaded preset %o from %o.", name, dirname);
return { filepath, value };
}
function standardizeName(type: "plugin" | "preset", name: string) {
// Let absolute and relative paths through.
if (path.isAbsolute(name)) return name;
const isPreset = type === "preset";
return (
name
// foo -> babel-preset-foo
.replace(
isPreset ? BABEL_PRESET_PREFIX_RE : BABEL_PLUGIN_PREFIX_RE,
`babel-${type}-`,
)
// @babel/es2015 -> @babel/preset-es2015
.replace(
isPreset ? BABEL_PRESET_ORG_RE : BABEL_PLUGIN_ORG_RE,
`$1${type}-`,
)
// @foo/mypreset -> @foo/babel-preset-mypreset
.replace(
isPreset ? OTHER_PRESET_ORG_RE : OTHER_PLUGIN_ORG_RE,
`$1babel-${type}-`,
)
// @foo -> @foo/babel-preset
.replace(OTHER_ORG_DEFAULT_RE, `$1/babel-${type}`)
// module:mypreset -> mypreset
.replace(EXACT_RE, "")
);
}
function resolveStandardizedName(
type: "plugin" | "preset",
name: string,
dirname: string = process.cwd(),
) {
const standardizedName = standardizeName(type, name);
try {
return resolve.sync(standardizedName, { basedir: dirname });
} catch (e) {
if (e.code !== "MODULE_NOT_FOUND") throw e;
if (standardizedName !== name) {
let resolvedOriginal = false;
try {
resolve.sync(name, { basedir: dirname });
resolvedOriginal = true;
} catch (e2) {}
if (resolvedOriginal) {
e.message += `\n- If you want to resolve "${name}", use "module:${name}"`;
}
}
let resolvedBabel = false;
try {
resolve.sync(standardizeName(type, "@babel/" + name), {
basedir: dirname,
});
resolvedBabel = true;
} catch (e2) {}
if (resolvedBabel) {
e.message += `\n- Did you mean "@babel/${name}"?`;
}
let resolvedOppositeType = false;
const oppositeType = type === "preset" ? "plugin" : "preset";
try {
resolve.sync(standardizeName(oppositeType, name), { basedir: dirname });
resolvedOppositeType = true;
} catch (e2) {}
if (resolvedOppositeType) {
e.message += `\n- Did you accidentally pass a ${oppositeType} as a ${type}?`;
}
throw e;
}
}
const LOADING_MODULES = new Set();
function requireModule(type: string, name: string): mixed {
if (LOADING_MODULES.has(name)) {
throw new Error(
`Reentrant ${type} detected trying to load "${name}". This module is not ignored ` +
"and is trying to load itself while compiling itself, leading to a dependency cycle. " +
'We recommend adding it to your "ignore" list in your babelrc, or to a .babelignore.',
);
}
try {
LOADING_MODULES.add(name);
// $FlowIssue
return require(name);
} finally {
LOADING_MODULES.delete(name);
}
}
|
import React from 'react';
import Helmet from 'react-helmet';
// import { Link, graphql } from 'gatsby'
import '../styles/blogpost.css';
import Layout from '../components/shared/Layout';
const BlogPosts = ({ data }) => {
const blogPosts = data.allContentfulSingleBlogPost.edges;
const formatDateTime = (dateTime) => {
let date = new Date(dateTime);
console.log(date.toString());
return date.toString();
};
return (
<div id="blog-background">
<Helmet>
<meta charSet="utf-8" />
<title>Blog by @morgan.codes</title>
</Helmet>
<Layout>
<div className="coming-soon-container">
<p className="coming-soon-text">Blog coming soon...</p>
</div>
{/* <h1 className="blog-header"><Link className="blog-link" to="/">M</Link></h1>
<div className='blogposts'>
{blogPosts.map(({ node: post }) => (
<div key={post.id} className="post-container">
<img className="post-image" src={post.previewImage.file.url}></img>
<div className="post-body">
<Link className="blogpost-title" to={`/blogpost/${post.slug}`}>{post.title}</Link>
<div className="about-blog">{post.subtitle}</div>
<div className="date-time">{formatDateTime(post.dateTime)}</div>
</div>
</div>
))}
<span className="mgBtm__24" />
</div> */}
</Layout>
</div>
);
};
export default BlogPosts;
export const query = graphql`
query BlogPostsPageQuery {
allContentfulSingleBlogPost(limit: 1000) {
edges {
node {
title
id
description
author
subtitle
slug
dateTime
previewImage {
file {
url
fileName
contentType
}
}
}
}
}
}
`;
|
window.onload = function () {
var wkUserData = JSON.parse(localStorage.wkUserData);
fullfillUserData();
// display info message if the user is coming for the first time
if (
wkUserData.userPublicKey === undefined ||
wkUserData.userPublicKey == ""
) {
document.querySelector(".info").style.display = "inline";
} else {
// reload user data
requestUserData(false, function () {
wkUserData = JSON.parse(localStorage.wkUserData);
fullfillUserData();
});
}
// display Gravatar image (if exist)
var xhr = new XMLHttpRequest();
xhr.open(
"GET",
"http://www.gravatar.com/avatar/" + wkUserData.gravatar + "?d=404",
true
);
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
document.getElementById("gravatar").src =
"http://www.gravatar.com/avatar/" + wkUserData.gravatar;
}
}
};
xhr.send();
// when the user click on a link, it redirect the url to the web-container page or a new Chrome tab (depends on user settings)
var inApp = wkUserData.inAppNavigation;
document.getElementById("toLessons").onclick = function () {
var url = "https://www.wanikani.com/lesson/session";
if (inApp) {
localStorage.toLink = url;
} else {
chrome.tabs.create({ url: url });
}
};
document.getElementById("toReviews").onclick = function () {
var url = "https://www.wanikani.com/review/session";
if (inApp) {
localStorage.toLink = url;
} else {
chrome.tabs.create({ url: url });
}
};
document.getElementById("toDashboard").onclick = function () {
var url = "https://www.wanikani.com/login";
if (inApp) {
localStorage.toLink = url;
} else {
chrome.tabs.create({ url: url });
}
};
// fullfill user data
function fullfillUserData() {
document.getElementById("username").innerHTML = wkUserData.username;
document.getElementById("level").innerHTML = wkUserData.level;
document.getElementById("nbLessons").innerHTML = wkUserData.nbLessons;
document.getElementById("nbReviews").innerHTML = wkUserData.nbReviews;
document.getElementById("reviewTime").innerHTML = wkUserData.nextReview;
document.getElementById("srsNbApprentice").innerHTML =
wkUserData.srsNbApprentice;
document.getElementById("srsNbGuru").innerHTML = wkUserData.srsNbGuru;
document.getElementById("srsNbMaster").innerHTML = wkUserData.srsNbMaster;
document.getElementById("srsNbEnlighten").innerHTML =
wkUserData.srsNbEnlighten;
document.getElementById("srsNbBurned").innerHTML = wkUserData.srsNbBurned;
if (wkUserData.nbReviews > 0 || !wkUserData.nextReview) {
// the user has reviews, or does not have next reviews
document.querySelector("#reviews").style.display = "block";
document.querySelector("#nextReviews").style.display = "none";
} else {
// the user does not have available reviews, display when will be the next one
document.querySelector("#reviews").style.display = "none";
document.querySelector("#nextReviews").style.display = "block";
}
}
};
|
import React from 'react'
import PropTypes from 'prop-types'
import EngIqVideo from "../../static/videos/eng-iq.mp4"
function NewlineText(props) {
const text = props.text;
return text.split('\n').map(str => <p>{str}</p>);
}
const AboutGrid = ({ gridItems }) => (
<div className="columns is-multiline">
{gridItems.map((item) => (
<div key={item.text} className="columns is-multiline is-vcentered">
<div className="column is-6">
<h1>{item.name}</h1>
<NewlineText text={item.text}></NewlineText>
</div>
<div className="column is-6">
<div className="is-desktop is-vcentered">
<video autoPlay muted loop playsInline>
<source src={EngIqVideo} type="video/mp4"/>
</video>
</div>
</div>
</div>
))}
</div>
)
AboutGrid.propTypes = {
gridItems: PropTypes.arrayOf(
PropTypes.shape({
image: PropTypes.oneOfType([PropTypes.object, PropTypes.string]),
text: PropTypes.string,
})
),
}
export default AboutGrid
|
"""LintError type for use in linting."""
# Copyright 2015-2016 Capstone Team G
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import typing
LintError = typing.NamedTuple('LintError', [('line_number', int), ('column', int), ('msg', str)])
|
from __future__ import absolute_import
import os
from django.db import connections
from django.test import TestCase
from django.contrib.gis.gdal import Driver
from django.contrib.gis.geometry.test_data import TEST_DATA
from django.contrib.gis.utils.ogrinspect import ogrinspect
from .models import AllOGRFields
class OGRInspectTest(TestCase):
maxDiff = 1024
def test_poly(self):
shp_file = os.path.join(TEST_DATA, 'test_poly', 'test_poly.shp')
model_def = ogrinspect(shp_file, 'MyModel')
expected = [
'# This is an auto-generated Django model module created by ogrinspect.',
'from django.contrib.gis.db import models',
'',
'class MyModel(models.Model):',
' float = models.FloatField()',
' int = models.FloatField()',
' str = models.CharField(max_length=80)',
' geom = models.PolygonField(srid=-1)',
' objects = models.GeoManager()',
]
self.assertEqual(model_def, '\n'.join(expected))
def test_date_field(self):
shp_file = os.path.join(TEST_DATA, 'cities', 'cities.shp')
model_def = ogrinspect(shp_file, 'City')
expected = [
'# This is an auto-generated Django model module created by ogrinspect.',
'from django.contrib.gis.db import models',
'',
'class City(models.Model):',
' name = models.CharField(max_length=80)',
' population = models.FloatField()',
' density = models.FloatField()',
' created = models.DateField()',
' geom = models.PointField(srid=-1)',
' objects = models.GeoManager()',
]
self.assertEqual(model_def, '\n'.join(expected))
def test_time_field(self):
# Only possible to test this on PostGIS at the momemnt. MySQL
# complains about permissions, and SpatiaLite/Oracle are
# insanely difficult to get support compiled in for in GDAL.
if not connections['default'].ops.postgis:
self.skipTest("This database does not support 'ogrinspect'ion")
# Getting the database identifier used by OGR, if None returned
# GDAL does not have the support compiled in.
ogr_db = get_ogr_db_string()
if not ogr_db:
self.skipTest("Your GDAL installation does not support PostGIS databases")
# Writing shapefiles via GDAL currently does not support writing OGRTime
# fields, so we need to actually use a database
model_def = ogrinspect(ogr_db, 'Measurement',
layer_key=AllOGRFields._meta.db_table,
decimal=['f_decimal'])
self.assertTrue(model_def.startswith(
'# This is an auto-generated Django model module created by ogrinspect.\n'
'from django.contrib.gis.db import models\n'
'\n'
'class Measurement(models.Model):\n'
))
# The ordering of model fields might vary depending on several factors (version of GDAL, etc.)
self.assertIn(' f_decimal = models.DecimalField(max_digits=0, decimal_places=0)', model_def)
self.assertIn(' f_int = models.IntegerField()', model_def)
self.assertIn(' f_datetime = models.DateTimeField()', model_def)
self.assertIn(' f_time = models.TimeField()', model_def)
self.assertIn(' f_float = models.FloatField()', model_def)
self.assertIn(' f_char = models.CharField(max_length=10)', model_def)
self.assertIn(' f_date = models.DateField()', model_def)
self.assertTrue(model_def.endswith(
' geom = models.PolygonField()\n'
' objects = models.GeoManager()'
))
def get_ogr_db_string():
"""
Construct the DB string that GDAL will use to inspect the database.
GDAL will create its own connection to the database, so we re-use the
connection settings from the Django test.
"""
db = connections.databases['default']
# Map from the django backend into the OGR driver name and database identifier
# http://www.gdal.org/ogr/ogr_formats.html
#
# TODO: Support Oracle (OCI).
drivers = {
'django.contrib.gis.db.backends.postgis': ('PostgreSQL', "PG:dbname='%(db_name)s'", ' '),
'django.contrib.gis.db.backends.mysql': ('MySQL', 'MYSQL:"%(db_name)s"', ','),
'django.contrib.gis.db.backends.spatialite': ('SQLite', '%(db_name)s', '')
}
drv_name, db_str, param_sep = drivers[db['ENGINE']]
# Ensure that GDAL library has driver support for the database.
try:
Driver(drv_name)
except:
return None
# SQLite/Spatialite in-memory databases
if db['NAME'] == ":memory:":
return None
# Build the params of the OGR database connection string
params = [db_str % {'db_name': db['NAME']}]
def add(key, template):
value = db.get(key, None)
# Don't add the parameter if it is not in django's settings
if value:
params.append(template % value)
add('HOST', "host='%s'")
add('PORT', "port='%s'")
add('USER', "user='%s'")
add('PASSWORD', "password='%s'")
return param_sep.join(params)
|
const mongoose = require("mongoose");
const { Schema } = require("mongoose");
const Follows = new Schema({
_displayName: String,
_owner: { type: Schema.Types.ObjectId, ref: "User" },
follows: [{ type: Schema.Types.ObjectId, ref: "User" }]
});
mongoose.model("Follows", Follows);
|
const { v4: uuid } = require('uuid');
const domain = require('domain');
const Logger = require('@naturacosmeticos/clio-nodejs-logger');
const AsyncHooksStorage = require('@naturacosmeticos/async-hooks-storage');
const logAllPattern = '*';
const logLevelDebug = 'debug';
const grayLogFormat = 'graylog';
/** @private */
function requestContext(req) {
return {
correlationId: AsyncHooksStorage.getEntry('correlation-id'),
customerId: req.headers['x-customer-id'] || null,
requestId: req.headers['request-id'] || uuid(),
sessionId: req.headers['x-session-id'] || null,
};
}
/** @private */
function createLogger(req) {
const context = requestContext(req);
if ('x-debug-mode-on' in req.headers) {
return new Logger({
context,
logLevel: logLevelDebug,
logPatterns: logAllPattern,
namespace: '',
});
}
return new Logger({ context, logFormat: grayLogFormat });
}
/**
* Middleware for log the input and output from Express
* @param {function} req - Express req function
* @param {function} res - Express res function
* @param {function} next - Express next function
*/
function contextMiddleware(req, res, next) {
const currentDomain = domain.create();
currentDomain.context = requestContext(req);
currentDomain.logger = createLogger(req);
currentDomain.databaseLogger = currentDomain.logger.createChildLogger('db');
res.locals.logger = currentDomain.logger.createChildLogger('http');
currentDomain.run(next);
}
module.exports = contextMiddleware;
|
/**
* dashboard.js
* It contains scripts for dashboard.html
*/
|