text
stringlengths 3
1.05M
|
|---|
const colorNames = require(`color-names`);
const xml2js = require(`xml2js`);
module.exports.name = `e:cue`;
module.exports.version = `0.3.1`;
const colors = {};
for (const hex of Object.keys(colorNames)) {
colors[colorNames[hex].toLowerCase().replace(/\s/g, ``)] = hex;
}
module.exports.import = function importEcue(str, filename, resolve, reject) {
const parser = new xml2js.Parser();
const timestamp = new Date().toISOString().replace(/T.*/, ``);
const out = {
manufacturers: {},
fixtures: {},
warnings: {}
};
new Promise((res, rej) => {
parser.parseString(str, (parseError, xml) => {
if (parseError) {
rej(parseError);
}
else {
res(xml);
}
});
})
.then(xml => {
if (!(`Library` in xml.Document) || !(`Fixtures` in xml.Document.Library[0]) || !(`Manufacturer` in xml.Document.Library[0].Fixtures[0])) {
throw new Error(`Nothing to import.`);
}
return xml.Document.Library[0].Fixtures[0].Manufacturer || [];
})
.then(ecueManufacturers => {
for (const manufacturer of ecueManufacturers) {
const manName = manufacturer.$.Name;
const manKey = slugify(manName);
out.manufacturers[manKey] = {
name: manName
};
if (manufacturer.$.Comment !== ``) {
out.manufacturers[manKey].comment = manufacturer.$.Comment;
}
if (manufacturer.$.Web !== ``) {
out.manufacturers[manKey].website = manufacturer.$.Web;
}
for (const fixture of (manufacturer.Fixture || [])) {
addFixture(fixture, manKey);
}
}
resolve(out);
})
.catch(parseError => {
reject(`Error parsing '${filename}'.\n${parseError.toString()}`);
});
/**
* Parses the e:cue fixture and add it to out.fixtures.
* @param {!object} ecueFixture The e:cue fixture object.
* @param {!string} manKey The manufacturer key of the fixture.
*/
function addFixture(ecueFixture, manKey) {
const fixture = {
$schema: `https://raw.githubusercontent.com/OpenLightingProject/open-fixture-library/master/schemas/fixture.json`,
name: ecueFixture.$.Name
};
let fixKey = `${manKey}/${slugify(fixture.name)}`;
if (fixKey in out.fixtures) {
fixKey += `-${Math.random().toString(36).substr(2, 5)}`;
out.warnings[fixKey] = [`Fixture key '${fixKey}' is not unique, appended random characters.`];
}
else {
out.warnings[fixKey] = [];
}
if (ecueFixture.$.NameShort !== ``) {
fixture.shortName = ecueFixture.$.NameShort;
}
fixture.categories = [`Other`];
out.warnings[fixKey].push(`Please specify categories.`);
fixture.meta = {
authors: [],
createDate: ecueFixture.$._CreationDate.replace(/#.*/, ``),
lastModifyDate: ecueFixture.$._ModifiedDate.replace(/#.*/, ``),
importPlugin: {
plugin: `ecue`,
date: timestamp
}
};
out.warnings[fixKey].push(`Please specify your name in meta.authors.`);
if (ecueFixture.$.Comment !== ``) {
fixture.comment = ecueFixture.$.Comment;
}
const physical = getPhysical(ecueFixture);
if (JSON.stringify(physical) !== `{}`) {
fixture.physical = physical;
}
fixture.availableChannels = {};
// ecue does not support modes, so we generate only one
fixture.modes = [{
name: `${ecueFixture.$.AllocateDmxChannels}-channel`,
shortName: `${ecueFixture.$.AllocateDmxChannels}ch`,
channels: []
}];
for (const ecueChannel of getCombinedEcueChannels(ecueFixture)) {
addChannelToFixture(ecueChannel, fixture, out.warnings[fixKey]);
}
out.fixtures[fixKey] = fixture;
}
};
/**
* @param {!object} ecueFixture The e:cue fixture object.
* @returns {!object} The OFL fixture's physical object.
*/
function getPhysical(ecueFixture) {
const physical = {};
if (ecueFixture.$.DimWidth !== `10` && ecueFixture.$.DimHeight !== `10` && ecueFixture.$.DimDepth !== `10`) {
physical.dimensions = [parseFloat(ecueFixture.$.DimWidth), parseFloat(ecueFixture.$.DimHeight), parseFloat(ecueFixture.$.DimDepth)];
}
if (ecueFixture.$.Weight !== `0`) {
physical.weight = parseFloat(ecueFixture.$.Weight);
}
if (ecueFixture.$.Power !== `0`) {
physical.power = parseFloat(ecueFixture.$.Power);
}
return physical;
}
/**
* @param {!object} ecueFixture The e:cue fixture object.
* @returns {!Array.<!object>} An array of all ecue channel objects.
*/
function getCombinedEcueChannels(ecueFixture) {
let channels = [];
const channelTypes = [`ChannelIntensity`, `ChannelColor`, `ChannelBeam`, `ChannelFocus`];
for (const channelType of channelTypes) {
if (ecueFixture[channelType]) {
channels = channels.concat(ecueFixture[channelType].map(ch => {
// save the channel type in the channel object
ch._ecueChannelType = channelType;
return ch;
}));
}
}
// sort channels by (coarse) DMX channel
channels = channels.sort((a, b) => {
if (parseInt(a.$.DmxByte0) < parseInt(b.$.DmxByte0)) {
return -1;
}
return (parseInt(a.$.DmxByte0) > parseInt(b.$.DmxByte0)) ? 1 : 0;
});
return channels;
}
/**
* Parses the e:cue channel and adds it to OFL fixture's availableChannels and the first mode.
* @param {!object} ecueChannel The e:cue channel object.
* @param {!object} fixture The OFL fixture object.
* @param {!Array.<!string>} warningsArray This fixture's warnings array in the `out` object.
*/
function addChannelToFixture(ecueChannel, fixture, warningsArray) {
const channel = {};
const channelName = ecueChannel.$.Name.trim();
let channelKey = channelName;
if (channelKey in fixture.availableChannels) {
warningsArray.push(`Channel key '${channelKey}' is not unique, appended random characters.`);
channelKey += `-${Math.random().toString(36).substr(2, 5)}`;
channel.name = channelName;
}
let maxDmxValue = 255;
if (ecueChannel.$.DmxByte1 !== `0`) {
const shortNameFine = `${channelKey} fine`;
channel.fineChannelAliases = [shortNameFine];
maxDmxValue = (256 * 256) - 1;
fixture.modes[0].channels[parseInt(ecueChannel.$.DmxByte1) - 1] = shortNameFine;
}
addDmxValues();
if (!(`Range` in ecueChannel)) {
ecueChannel.Range = [{
$: {
Start: 0,
End: maxDmxValue,
Name: `0-100%`,
AutoMenu: `1`,
Centre: `0`
}
}];
}
channel.capabilities = ecueChannel.Range.map(getCapability);
if (channel.capabilities.length === 1) {
channel.capability = channel.capabilities[0];
delete channel.capabilities;
delete channel.capability.dmxRange;
}
fixture.availableChannels[channelKey] = channel;
fixture.modes[0].channels[parseInt(ecueChannel.$.DmxByte0) - 1] = channelKey;
/**
* Adds DMX value related properties to channel.
*/
function addDmxValues() {
if (ecueChannel.$.DefaultValue !== `0`) {
channel.defaultValue = parseInt(ecueChannel.$.DefaultValue);
}
if (ecueChannel.$.Highlight !== `0`) {
channel.highlightValue = parseInt(ecueChannel.$.Highlight);
}
if (ecueChannel.$.Constant === `1`) {
channel.constant = true;
}
if (ecueChannel.$.Precedence === `HTP`) {
channel.precedence = `HTP`;
}
}
/**
*
* @param {*} ecueRange The e:cue range object.
* @param {*} index The index of the capability / range.
* @returns {!object} The OFL capability object.
*/
function getCapability(ecueRange, index) {
const cap = {
dmxRange: getDmxRange()
};
const capabilityName = ecueRange.$.Name.trim();
cap.type = getCapabilityType();
// capability parsers can rely on the channel type as a first distinctive feature
const capabilityTypeParsers = {
ColorIntensity() {
cap.color = [`Red`, `Green`, `Blue`, `Cyan`, `Magenta`, `Yellow`, `Amber`, `White`, `UV`, `Lime`].find(
color => channelName.toLowerCase().includes(color.toLowerCase())
);
cap.comment = capabilityName;
},
ColorWheelIndex() {
const color = capabilityName.toLowerCase().replace(/\s/g, ``);
if (color in colors) {
cap.color = colors[color];
}
cap.comment = getSpeedGuessedComment();
if (`speedStart` in cap) {
cap.type = `ColorWheelRotation`;
}
},
ColorPreset() {
const color = capabilityName.toLowerCase().replace(/\s/g, ``);
if (color in colors) {
cap.color = colors[color];
}
cap.comment = capabilityName;
},
ShutterStrobe() {
if (capabilityName.match(/^(?:Blackout|(?:Shutter )?Closed?)$/i)) {
cap.shutterEffect = `Closed`;
return;
}
if (capabilityName.match(/^(?:(?:Shutter )?Open|Full?)$/i)) {
cap.shutterEffect = `Open`;
return;
}
if (capabilityName.match(/puls/i)) {
cap.shutterEffect = `Pulse`;
}
else if (capabilityName.match(/ramp\s*up/i)) {
cap.shutterEffect = `RampUp`;
}
else if (capabilityName.match(/ramp\s*down/i)) {
cap.shutterEffect = `RampDown`;
}
else {
cap.shutterEffect = `Strobe`;
}
if (capabilityName.match(/random/i)) {
cap.shutterEffect += `Random`;
}
cap.comment = getSpeedGuessedComment();
},
Pan() {
cap.angleStart = `0%`;
cap.angleEnd = `100%`;
cap.comment = capabilityName;
},
Tilt() {
cap.angleStart = `0%`;
cap.angleEnd = `100%`;
cap.comment = capabilityName;
},
Effect() {
cap.effectName = ``; // set it first here so effectName is before speedStart/speedEnd
cap.effectName = getSpeedGuessedComment();
},
NoFunction() {
// don't even add a comment
}
};
if (cap.type in capabilityTypeParsers) {
capabilityTypeParsers[cap.type]();
}
else {
cap.comment = getSpeedGuessedComment();
}
// delete unnecessary comments
if (`comment` in cap && (cap.comment === channelName || cap.comment.match(/^$|^0%?\s*(?:-|to|–|…|\.{2,}|->|<->|→)\s*100%$/))) {
delete cap.comment;
}
if (ecueRange.$.AutoMenu !== `1`) {
cap.menuClick = `hidden`;
}
else if (ecueRange.$.Centre !== `0`) {
cap.menuClick = `center`;
}
return cap;
/**
* @returns {!Array.<!number>} The DMX range of this capability.
*/
function getDmxRange() {
const dmxRangeStart = parseInt(ecueRange.$.Start);
let dmxRangeEnd = parseInt(ecueRange.$.End);
if (dmxRangeEnd === -1) {
dmxRangeEnd = (index + 1 < ecueChannel.Range.length) ? parseInt(ecueChannel.Range[index + 1].$.Start) - 1 : maxDmxValue;
}
return [dmxRangeStart, dmxRangeEnd];
}
/**
* @returns {!string} The parsed capability type.
*/
function getCapabilityType() {
// capability parsers can rely on the channel type as a first distinctive feature
const capabilityTypePerChannelType = {
ChannelColor() {
if (channelName.match(/\bCTO\b|\bCTB\b|temperature\b/i)) {
return `ColorTemperature`;
}
if (ecueChannel.Range.length === 1 && !channelName.match(/macro|wheel\b/i)) {
return `ColorIntensity`;
}
if (channelName.match(/wheel\b/i)) {
return `ColorWheelIndex`;
}
return `ColorPreset`;
},
ChannelIntensity() {
// fall back to default
return capabilityTypePerChannelType.ChannelBeam();
},
ChannelFocus() {
if (channelName.match(/speed/i)) {
return `PanTiltSpeed`;
}
const isPan = channelName.match(/pan/i);
const isTilt = channelName.match(/tilt/i);
let panOrTilt = null;
if (isPan && !isTilt) {
panOrTilt = `Pan`;
}
else if (isTilt && !isPan) {
panOrTilt = `Tilt`;
}
else {
// fall back to default
return capabilityTypePerChannelType.ChannelBeam();
}
if (channelName.match(/continuous/i)) {
return `${panOrTilt}Continuous`;
}
return panOrTilt;
},
ChannelBeam() {
const capabilityTypeRegexps = {
NoFunction: /^(?:nothing|no func(?:tion)?|unused|not used|empty|no strobe|no prism|no frost)$/,
StrobeSpeed: /\bstrobe speed\b/,
StrobeDuration: /\bstrobe duration\b/,
ShutterStrobe: /\b(?:shutter|strobe|strb|strob|strobing)\b/,
Intensity: /\b(?:intensity|dimmer)\b/,
PanTiltSpeed: /\b(?:pan[/ -]?tilt speed|p[/ -]?t speed)\b/,
PanContinuous: /\bpan continuous\b/,
TiltContinuous: /\btilt continuous\b/,
EffectParameter: /\beffect param(?:eter)?\b/,
EffectSpeed: /\beffect speed\b/,
EffectDuration: /\beffect duration\b/,
Effect: /\beffect\b/,
SoundSensitivity: /\b(?:sound|mic|microphone) sensitivity\b/,
GoboShake: /\bgobo shake\b/,
GoboStencilRotation: /\bgobo rot(?:ation)?\b/,
GoboWheelRotation: /\bgobo wheel rot(?:ation)?\b/,
GoboIndex: /\bgobo\b/,
Focus: /\bfocus\b/,
Zoom: /\bzoom\b/,
IrisEffect: /\biris effect\b/,
Iris: /\biris\b/,
FrostEffect: /\bfrost effect\b/,
Frost: /\bfrost\b/,
PrismRotation: /\bprisma? rot(?:ation)?\b/,
Prism: /\bprisma?\b/,
BladeInsertion: /\bblade insertion\b/,
BladeRotation: /\bblade rot(?:ation)?\b/,
BladeSystemRotation: /\bblade system rot(?:ation)?\b/,
FogOutput: /\bfog output\b/,
FogType: /\bfog type\b/,
Fog: /\bfog\b/,
BeamAngle: /\bbeam angle\b/,
Rotation: /\brotation\b/,
Speed: /\bspeed\b/,
Time: /\btime\b/,
Maintenance: /\b(?:reset|maintenance)\b/
};
return Object.keys(capabilityTypeRegexps).find(
channelType => capabilityName.toLowerCase().match(capabilityTypeRegexps[channelType]) ||
channelName.toLowerCase().match(capabilityTypeRegexps[channelType])
) || `Generic`;
}
};
return capabilityTypePerChannelType[ecueChannel._ecueChannelType]();
}
/**
* Try to guess speedStart / speedEnd from the capabilityName. May set cap.type to Rotation.
* @returns {!string} The rest of the capabilityName.
*/
function getSpeedGuessedComment() {
return capabilityName.replace(/(?:^|,\s*|\s+)\(?((?:(?:counter-?)?clockwise|C?CW)(?:,\s*|\s+))?\(?(slow|fast|\d+|\d+\s*Hz)\s*(?:-|to|–|…|\.{2,}|->|<->|→)\s*(fast|slow|\d+\s*Hz)\)?$/i, (match, direction, start, end) => {
const directionStr = direction ? (direction.match(/^(?:clockwise|CW),?\s+$/i) ? ` CW` : ` CCW`) : ``;
if (directionStr !== ``) {
cap.type = `Rotation`;
}
start = start.toLowerCase();
end = end.toLowerCase();
const startNumber = parseFloat(start);
const endNumber = parseFloat(end);
if (!isNaN(startNumber) && !isNaN(endNumber)) {
start = `${startNumber}Hz`;
end = `${endNumber}Hz`;
}
cap.speedStart = start + directionStr;
cap.speedEnd = end + directionStr;
// delete the parsed part
return ``;
});
}
}
}
/**
* @param {!string} str The string to slugify.
* @returns {!string} A slugified version of the string, i.e. only containing lowercase letters, numbers and dashes.
*/
function slugify(str) {
return str.toLowerCase().replace(/[^a-z0-9-]+/g, ` `).trim().replace(/\s+/g, `-`);
}
|
import AccountPageObject from '../../../support/pages/account.page-object';
import SettingsPageObject from '../../../support/pages/module/sw-settings.page-object';
describe('Account: Edit profile\'s Vat Id', () => {
beforeEach(() => {
return cy.createCustomerFixtureStorefront().then(() => {
return cy.createDefaultFixture('country');
});
});
it('@customer @package: Update profile', () => {
cy.openInitialPage(`${Cypress.env('admin')}#/sw/settings/login/registration/index`);
cy.intercept({
url: `${Cypress.env('apiPath')}/_action/system-config/batch`,
method: 'POST'
}).as('saveSettings');
cy.intercept({
url: `${Cypress.env('apiPath')}/country/*`,
method: 'PATCH'
}).as('saveCountry');
cy.get('input[name="core.loginRegistration.showAccountTypeSelection"]').scrollIntoView();
cy.get('input[name="core.loginRegistration.showAccountTypeSelection"]').should('exist');
cy.get('input[name="core.loginRegistration.showAccountTypeSelection"]').click().should('have.value', 'on');
cy.get('.smart-bar__content .sw-button--primary').click();
cy.wait('@saveSettings')
.its('response.statusCode').should('equal', 204);
cy.visit(`${Cypress.env('admin')}#/sw/settings/country/index`);
const settingPage = new SettingsPageObject();
cy.get('.sw-admin-menu__item--sw-settings').click();
cy.get('#sw-settings-country').click();
cy.get('.sw-skeleton').should('not.exist');
cy.get('.sw-loader').should('not.exist');
cy.get('.smart-bar__header').contains('Countries');
// // should wait for search result
cy.intercept({
method: 'POST',
url: '/api/search/country',
}).as('searchCountries');
// find a country with the name is "Germany"
cy.get('input.sw-search-bar__input').typeAndCheckSearchField('Germany');
cy.get('input.sw-search-bar__input').type('{esc}');
// choose "Germany"
cy.get(`${settingPage.elements.dataGridRow}--0 ${settingPage.elements.countryColumnName} a`).should('be.visible');
cy.get(`${settingPage.elements.dataGridRow}--0 ${settingPage.elements.countryColumnName} a`).click();
cy.wait('@searchCountries');
cy.get('.sw-settings-country-general__vat-id-required .sw-field--switch__input').click();
cy.get('.sw-settings-country-general__field-check-vatid-format .sw-field--switch__input').click();
cy.get(settingPage.elements.countrySaveAction).click();
cy.wait('@saveCountry')
.its('response.statusCode').should('equal', 204);
cy.visit('/account/login');
const page = new AccountPageObject();
// Login
cy.get('.login-card').should('be.visible');
cy.get('#loginMail').typeAndCheckStorefront('test@example.com');
cy.get('#loginPassword').typeAndCheckStorefront('shopware');
cy.get(`${page.elements.loginSubmit} [type="submit"]`).click();
cy.get('.account-welcome h1').should((element) => {
expect(element).to.contain('Overview');
});
cy.visit('/account/profile');
const accountTypeSelector = 'select[name="accountType"]';
const companySelector = 'input[name="company"]';
const vatIdsSelector = 'input#vatIds';
cy.get(accountTypeSelector).should('be.visible');
cy.get(accountTypeSelector).typeAndSelect('Private');
cy.get(companySelector).should('not.be.visible');
cy.get(vatIdsSelector).should('not.be.visible');
cy.get(accountTypeSelector).typeAndSelect('Commercial');
cy.get(companySelector).should('be.visible');
cy.get(companySelector).type('Company Testing');
cy.get(vatIdsSelector).should('be.visible');
cy.get(vatIdsSelector).clear();
cy.get('#profilePersonalForm button[type="submit"]').click();
cy.get('.invalid-feedback').contains('VAT Reg.No. should not be empty.').should('be.visible');
cy.get(vatIdsSelector).clearTypeAndCheck('wrong-format');
cy.get('#profilePersonalForm button[type="submit"]').click();
cy.get('.invalid-feedback').contains('The VAT Reg.No. you have entered does not have the correct format.').should('be.visible');
cy.get(vatIdsSelector).clearTypeAndCheck('123456789');
cy.get('#profilePersonalForm button[type="submit"]').click();
cy.get('.alert-success .alert-content').contains('Profile has been updated.');
});
});
|
/**
* 小程序配置文件
*/
// 此处主机域名修改成腾讯云解决方案分配的域名
var host = 'https://336423237.watersoup.club';
var config = {
// 下面的地址配合云端 Demo 工作
service: {
host,
// 登录地址,用于建立会话
loginUrl: `${host}/weapp/login`,
// 测试的请求地址,用于测试会话
requestUrl: `${host}/weapp/user`,
// 上传图片接口
uploadUrl: `${host}/weapp/upload`,
groupUrl: `${host}/weapp/group`,
init_classUrl: `${host}/weapp/init_class`,
init_teacherUrl: `${host}/weapp/init_teacher`,
publish_homework: `${host}/weapp/publish_homework`,
homeworkUrl: `${host}/weapp/homework`,
studentsUrl: `${host}/weapp/students`,
uploadGradeUrl: `${host}/weapp/upload_grade`,
initParentUrl: `${host}/weapp/init_parent`,
uploadTimeUrl: `${host}/weapp/upload_time`,
my_infoUrl: `${host}/weapp/my_info`,
allGroupUrl: `${host}/weapp/all_group`,
uploadClassScheduleUrl: `${host}/weapp/upload_class_schedule`,
teacherFromGroupUrl: `${host}/weapp/teacher_from_group`,
timetableUrl: `${host}/weapp/timetable`,
uploadTimetableUrl: `${host}/weapp/uploadTimetable`,
classScheduleUrl: `${host}/weapp/class_schedule`,
deleteClassScheduleUrl: `${host}/weapp/delete_class_schedule`,
modify_studentUrl: `${host}/weapp/modify_student`
}
};
module.exports = config;
|
/*
* forgerock-react-native-sample
*
* kba.js
*
* Copyright (c) 2021 ForgeRock. All rights reserved.
* This software may be modified and distributed under the terms
* of the MIT license. See the LICENSE file for details.
*/
import React, { useState } from 'react';
import { FormControl, Input, Select } from 'native-base';
function KBA({ callback }) {
/********************************************************************
* JAVASCRIPT SDK INTEGRATION POINT
* Summary: Utilize Callback methods
* ------------------------------------------------------------------
* Details: Because we wrap our responses in FRStep using the Javascript SDK
* we have access to helper methods to set, and retrieve information from our response.
* Referencing these helper methods allows us to avoid managing the state
* in our own application and leverage the SDK to do so
* *************************************************************** */
const [selectedQuestion, setAns] = useState('');
const updateQuestion = (question) => {
setAns(question);
callback.setQuestion(question);
};
const label = callback.getPrompt();
const isRequired = callback.isRequired ? callback.isRequired() : false;
return (
<FormControl isRequired={isRequired}>
<FormControl.Label>{label}</FormControl.Label>
<Select
accessibilityLabel={label}
placeholder={label}
selectedValue={selectedQuestion}
onValueChange={updateQuestion}
>
{callback.getPredefinedQuestions().map((question) => (
<Select.Item label={question} key={question} value={question} />
))}
</Select>
<Input onChangeText={(itemValue) => callback.setAnswer(itemValue)} />
</FormControl>
);
}
export { KBA };
|
var selectron = require("../index")
var sel = selectron()
, ul = document.getElementById("list")
ul.addEventListener("click", function (evt) {
if (evt.target.tagName === "LI") {
sel.select(evt.target)
}
})
sel.on("select", function (li) {
li.classList.add("selected")
})
sel.on("unselect", function (li) {
li.classList.remove("selected")
})
|
import os
from pandaharvester.harvestercore import core_utils
from .base_messenger import BaseMessenger
from pandaharvester.harvesterconfig import harvester_config
from pandaharvester.harvestermisc.k8s_utils import k8s_Client
# from pandaharvester.harvestercore.queue_config_mapper import QueueConfigMapper
# from pandaharvester.harvestercore.work_spec import WorkSpec
# logger
_logger = core_utils.setup_logger('k8s_messenger')
# Messenger for generic Kubernetes clusters
class K8sMessenger(BaseMessenger):
def __init__(self, **kwargs):
BaseMessenger.__init__(self, **kwargs)
try:
self.logDir
except AttributeError:
print('K8sMessenger: Missing attribute logDir')
raise
self.k8s_client = k8s_Client(namespace=self.k8s_namespace, config_file=self.k8s_config_file)
self._all_pods_list = self.k8s_client.get_pods_info()
def post_processing(self, workspec, jobspec_list, map_type):
"""
Do the folloiwing in post_processing, i.e. when workers terminate (finished/failed/cancelled)
- Fetch logs of the pod from k8s
- Store or upload logs
"""
# get logger
tmpLog = core_utils.make_logger(_logger, 'workerID={0}'.format(workspec.workerID),
method_name='post_processing')
tmpLog.debug('start')
try:
# fetch and store logs
job_id = workspec.batchID
pods_list = self.k8s_client.filter_pods_info(self._all_pods_list, job_name=job_id)
pod_name_list = [ pods_info['name'] for pods_info in pods_list ]
outlog_filename = os.path.join(self.logDir, 'gridK8S.{0}.{1}.out'.format(workspec.workerID, workspec.batchID))
with open(outlog_filename, 'w') as f:
for pod_name in pod_name_list:
current_log_str = self.k8s_client.get_pod_logs(pod_name)
f.write(current_log_str)
# upload logs
pass
# return
tmpLog.debug('done')
return True
except Exception:
core_utils.dump_error_message(tmpLog)
return None
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=Buttons.js.map
|
function scroll_to_class(element_class, removed_height) {
var scroll_to = $(element_class).offset().top - removed_height;
if($(window).scrollTop() != scroll_to) {
$('html, body').stop().animate({scrollTop: scroll_to}, 0);
}
}
function bar_progress(progress_line_object, direction) {
var number_of_steps = progress_line_object.data('number-of-steps');
var now_value = progress_line_object.data('now-value');
var new_value = 0;
if(direction == 'right') {
new_value = now_value + ( 100 / number_of_steps );
}
else if(direction == 'left') {
new_value = now_value - ( 100 / number_of_steps );
}
progress_line_object.attr('style', 'width: ' + new_value + '%;').data('now-value', new_value);
}
jQuery(document).ready(function() {
/*
Fullscreen background
*/
$.backstretch("asset/img/BG.png");
$('#top-navbar-1').on('shown.bs.collapse', function(){
$.backstretch("resize");
});
$('#top-navbar-1').on('hidden.bs.collapse', function(){
$.backstretch("resize");
});
/*
Form
*/
$('.f1 fieldset:first').fadeIn('slow');
$('.f1 input[type="text"], .f1 input[type="password"], .f1 textarea').on('focus', function() {
$(this).removeClass('input-error');
});
// next step
$('.f1 .btn-next').on('click', function() {
var parent_fieldset = $(this).parents('fieldset');
var next_step = true;
// navigation steps / progress steps
var current_active_step = $(this).parents('.f1').find('.f1-step.active');
var progress_line = $(this).parents('.f1').find('.f1-progress-line');
// fields validation
parent_fieldset.find('input[type="text"], input[type="password"], textarea').each(function() {
if( $(this).val() == "" ) {
$(this).addClass('input-error');
next_step = false;
}
else {
$(this).removeClass('input-error');
}
});
// fields validation
if( next_step ) {
parent_fieldset.fadeOut(400, function() {
// change icons
current_active_step.removeClass('active').addClass('activated').next().addClass('active');
// progress bar
bar_progress(progress_line, 'right');
// show next step
$(this).next().fadeIn();
// scroll window to beginning of the form
scroll_to_class( $('.f1'), 20 );
});
}
});
// previous step
$('.f1 .btn-previous').on('click', function() {
// navigation steps / progress steps
var current_active_step = $(this).parents('.f1').find('.f1-step.active');
var progress_line = $(this).parents('.f1').find('.f1-progress-line');
$(this).parents('fieldset').fadeOut(400, function() {
// change icons
current_active_step.removeClass('active').prev().removeClass('activated').addClass('active');
// progress bar
bar_progress(progress_line, 'left');
// show previous step
$(this).prev().fadeIn();
// scroll window to beginning of the form
scroll_to_class( $('.f1'), 20 );
});
});
// submit
$('.f1').on('submit', function(e) {
// fields validation
$(this).find('input[type="text"], input[type="password"], textarea').each(function() {
if( $(this).val() == "" ) {
e.preventDefault();
$(this).addClass('input-error');
}
else {
$(this).removeClass('input-error');
}
});
// fields validation
});
});
|
// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Regression test for the case where a phi has two input operands with
// the same value.
function test(start) {
if (true) {
for (var i = start; i < 10; i++) { }
}
for (var i = start; i < 10; i++) { }
}
var n = 5000000;
for (var i = 0; i < n; ++i) {
test(0);
}
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[77],{192:function(t,e,s){"use strict";s.r(e);var a=s(0),r=Object(a.a)({},function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{staticClass:"content"},[t._m(0),t._v(" "),s("Bit"),t._v(" "),t._m(1),t._v(" "),t._m(2),t._v(" "),t._m(3),t._v(" "),t._m(4),t._v(" "),t._m(5),t._v(" "),t._m(6),t._v(" "),s("ul",[s("li",[s("router-link",{attrs:{to:"./../guide/assets.html#base-url"}},[t._v("Base URL")])],1),t._v(" "),s("li",[s("router-link",{attrs:{to:"./../guide/deploy.html#github-pages"}},[t._v("Deploy Guide > Github Pages")])],1)]),t._v(" "),t._m(7),t._v(" "),t._m(8),t._v(" "),s("p",[t._v("Title for the site. This will be the prefix for all page titles, and displayed in the navbar in the default theme.")]),t._v(" "),t._m(9),t._v(" "),t._m(10),t._v(" "),t._m(11),t._v(" "),t._m(12),t._v(" "),t._m(13),t._v(" "),t._m(14),t._v(" "),t._m(15),t._m(16),t._v(" "),t._m(17),t._v(" "),s("p",[t._v("Specify the host to use for the dev server.")]),t._v(" "),t._m(18),t._v(" "),t._m(19),t._v(" "),s("p",[t._v("Specify the port to use for the dev server.")]),t._v(" "),t._m(20),t._v(" "),t._m(21),t._v(" "),t._m(22),t._v(" "),t._m(23),t._v(" "),t._m(24),t._v(" "),s("p",[t._v("Provide the Google Analytics ID to enable integration.")]),t._v(" "),s("div",{staticClass:"tip custom-block"},[s("p",{staticClass:"custom-block-title"},[t._v("TIP")]),t._v(" "),s("p",[t._v("Please be aware of "),s("a",{attrs:{href:"https://ec.europa.eu/commission/priorities/justice-and-fundamental-rights/data-protection/2018-reform-eu-data-protection-rules_en",target:"_blank",rel:"noopener noreferrer"}},[t._v("GDPR (2018 reform of EU data protection rules)"),s("OutboundLink")],1),t._v(" and consider setting Google Analytics to "),s("a",{attrs:{href:"https://support.google.com/analytics/answer/2763052?hl=en",target:"_blank",rel:"noopener noreferrer"}},[t._v("anonymize IPs"),s("OutboundLink")],1),t._v(" where appropriate and/or needed.")])]),t._v(" "),t._m(25),t._v(" "),t._m(26),t._v(" "),t._m(27),t._v(" "),t._m(28),t._v(" "),t._m(29),t._v(" "),s("div",{staticClass:"tip custom-block"},[s("p",{staticClass:"custom-block-title"},[t._v("PWA NOTES")]),t._v(" "),s("p",[t._v("The "),s("code",[t._v("serviceWorker")]),t._v(" option only handles the service worker. To make your site fully PWA-compliant, you will need to provide the Web App Manifest and icons in "),s("code",[t._v(".vuepress/public")]),t._v(". For more details, see "),s("a",{attrs:{href:"https://developer.mozilla.org/en-US/docs/Web/Manifest",target:"_blank",rel:"noopener noreferrer"}},[t._v("MDN docs about the Web App Manifest"),s("OutboundLink")],1),t._v(".")]),t._v(" "),s("p",[t._v("Also, only enable this if you are able to deploy your site with SSL, since service worker can only be registered under HTTPs URLs.")])]),t._v(" "),t._m(30),t._v(" "),t._m(31),t._v(" "),s("p",[t._v("Specify locales for i18n support. For more details, see the guide on "),s("router-link",{attrs:{to:"./../guide/i18n.html"}},[t._v("Internationalization")]),t._v(".")],1),t._v(" "),t._m(32),t._v(" "),t._m(33),t._v(" "),s("p",[t._v("A function to control what files should have "),s("code",[t._v('<link rel="preload">')]),t._v(" resource hints generated. See "),s("a",{attrs:{href:"https://ssr.vuejs.org/api/#shouldprefetch",target:"_blank",rel:"noopener noreferrer"}},[t._v("shouldPrefetch"),s("OutboundLink")],1),t._v(".")]),t._v(" "),t._m(34),t._v(" "),t._m(35),t._v(" "),t._m(36),t._v(" "),t._m(37),t._v(" "),t._m(38),t._v(" "),t._m(39),t._v(" "),s("p",[t._v("Provide config options to the used theme. The options will vary depending on the theme you are using.")]),t._v(" "),t._m(40),t._v(" "),s("ul",[s("li",[s("router-link",{attrs:{to:"./../default-theme-config/"}},[t._v("Default Theme Configuration")]),t._v(".")],1)]),t._v(" "),t._m(41),t._v(" "),t._m(42),t._v(" "),t._m(43),t._v(" "),s("p",[t._v("Whether to show line numbers to the left of each code blocks.")]),t._v(" "),t._m(44),t._v(" "),s("ul",[s("li",[s("router-link",{attrs:{to:"./../guide/markdown.html#line-numbers"}},[t._v("Line Numbers")])],1)]),t._v(" "),t._m(45),t._v(" "),s("ul",[t._m(46),t._v(" "),s("li",[t._v("Default: "),s("a",{attrs:{href:"https://github.com/vuejs/vuepress/blob/master/lib/markdown/slugify.js",target:"_blank",rel:"noopener noreferrer"}},[t._v("source"),s("OutboundLink")],1)])]),t._v(" "),s("p",[t._v("Function for transforming header texts into slugs. This affects the ids/links generated for header anchors, table of contents and sidebar links.")]),t._v(" "),t._m(47),t._v(" "),t._m(48),t._v(" "),t._m(49),t._v(" "),t._m(50),t._v(" "),t._m(51),t._v(" "),s("p",[t._v("Options for "),s("a",{attrs:{href:"https://github.com/valeriangalliat/markdown-it-anchor",target:"_blank",rel:"noopener noreferrer"}},[t._v("markdown-it-anchor"),s("OutboundLink")],1),t._v(". (Note: prefer "),s("code",[t._v("markdown.slugify")]),t._v(" if you want to customize header ids.)")]),t._v(" "),t._m(52),t._v(" "),t._m(53),t._v(" "),s("p",[t._v("Options for "),s("a",{attrs:{href:"https://github.com/Oktavilla/markdown-it-table-of-contents",target:"_blank",rel:"noopener noreferrer"}},[t._v("markdown-it-table-of-contents"),s("OutboundLink")],1),t._v(". (Note: prefer "),s("code",[t._v("markdown.slugify")]),t._v(" if you want to customize header ids.)")]),t._v(" "),t._m(54),t._v(" "),t._m(55),t._v(" "),s("p",[t._v("A function to modify default config or apply additional plugins to the "),s("a",{attrs:{href:"https://github.com/markdown-it/markdown-it",target:"_blank",rel:"noopener noreferrer"}},[t._v("markdown-it"),s("OutboundLink")],1),t._v(" instance used to render source files. Example:")]),t._v(" "),t._m(56),t._m(57),t._v(" "),t._m(58),t._v(" "),t._m(59),t._v(" "),s("p",[t._v("Options for "),s("a",{attrs:{href:"https://github.com/postcss/postcss-loader",target:"_blank",rel:"noopener noreferrer"}},[t._v("postcss-loader"),s("OutboundLink")],1),t._v(". Note specifying this value will overwrite autoprefixer and you will need to include it yourself.")]),t._v(" "),t._m(60),t._v(" "),t._m(61),t._v(" "),s("p",[t._v("Options for "),s("a",{attrs:{href:"https://github.com/shama/stylus-loader",target:"_blank",rel:"noopener noreferrer"}},[t._v("stylus-loader"),s("OutboundLink")],1),t._v(".")]),t._v(" "),t._m(62),t._v(" "),t._m(63),t._v(" "),s("p",[t._v("Options for "),s("a",{attrs:{href:"https://github.com/webpack-contrib/sass-loader",target:"_blank",rel:"noopener noreferrer"}},[t._v("sass-loader"),s("OutboundLink")],1),t._v(" to load "),s("code",[t._v("*.scss")]),t._v(" files.")]),t._v(" "),t._m(64),t._v(" "),t._m(65),t._v(" "),s("p",[t._v("Options for "),s("a",{attrs:{href:"https://github.com/webpack-contrib/sass-loader",target:"_blank",rel:"noopener noreferrer"}},[t._v("sass-loader"),s("OutboundLink")],1),t._v(" to load "),s("code",[t._v("*.sass")]),t._v(" files.")]),t._v(" "),t._m(66),t._v(" "),t._m(67),t._v(" "),s("p",[t._v("Options for "),s("a",{attrs:{href:"https://github.com/webpack-contrib/less-loader",target:"_blank",rel:"noopener noreferrer"}},[t._v("less-loader"),s("OutboundLink")],1),t._v(".")]),t._v(" "),t._m(68),t._v(" "),t._m(69),t._v(" "),s("p",[t._v("Modify the internal webpack config. If the value is an Object, it will be merged into the final config using "),s("a",{attrs:{href:"https://github.com/survivejs/webpack-merge",target:"_blank",rel:"noopener noreferrer"}},[t._v("webpack-merge"),s("OutboundLink")],1),t._v("; If the value is a function, it will receive the config as the 1st argument and an "),s("code",[t._v("isServer")]),t._v(" flag as the 2nd argument. You can either mutate the config directly, or return an object to be merged:")]),t._v(" "),t._m(70),t._m(71),t._v(" "),t._m(72),t._v(" "),s("p",[t._v("Modify the internal webpack config with "),s("a",{attrs:{href:"https://github.com/mozilla-neutrino/webpack-chain",target:"_blank",rel:"noopener noreferrer"}},[t._v("webpack-chain"),s("OutboundLink")],1),t._v(".")]),t._v(" "),t._m(73),t._m(74),t._v(" "),t._m(75),t._v(" "),t._m(76),t._v(" "),t._m(77)],1)},[function(){var t=this.$createElement,e=this._self._c||t;return e("h1",{attrs:{id:"config-reference"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#config-reference","aria-hidden":"true"}},[this._v("#")]),this._v(" Config Reference")])},function(){var t=this.$createElement,e=this._self._c||t;return e("h2",{attrs:{id:"basic-config"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#basic-config","aria-hidden":"true"}},[this._v("#")]),this._v(" Basic Config")])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"base"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#base","aria-hidden":"true"}},[this._v("#")]),this._v(" base")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("string")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("/")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[this._v("The base URL the site will be deployed at. You will need to set this if you plan to deploy your site under a sub path, for example GitHub pages. If you plan to deploy your site to "),e("code",[this._v("https://foo.github.io/bar/")]),this._v(", then "),e("code",[this._v("base")]),this._v(" should be set to "),e("code",[this._v('"/bar/"')]),this._v(". It should always start and end with a slash.")])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[this._v("The "),e("code",[this._v("base")]),this._v(" is automatically prepended to all the URLs that start with "),e("code",[this._v("/")]),this._v(" in other options, so you only need to specify it once.")])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[e("strong",[this._v("Also see:")])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"title"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#title","aria-hidden":"true"}},[this._v("#")]),this._v(" title")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("string")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("undefined")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"description"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#description","aria-hidden":"true"}},[this._v("#")]),this._v(" description")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("string")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("undefined")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[this._v("Description for the site. This will be rendered as a "),e("code",[this._v("<meta>")]),this._v(" tag in the page HTML.")])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"head"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#head","aria-hidden":"true"}},[this._v("#")]),this._v(" head")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Array")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("[]")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[this._v("Extra tags to be injected to the page HTML "),e("code",[this._v("<head>")]),this._v(". Each tag can be specified in the form of "),e("code",[this._v("[tagName, { attrName: attrValue }, innerHTML?]")]),this._v(". For example, to add a custom favicon:")])},function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{staticClass:"language-js extra-class"},[s("pre",{pre:!0,attrs:{class:"language-js"}},[s("code",[t._v("module"),s("span",{attrs:{class:"token punctuation"}},[t._v(".")]),t._v("exports "),s("span",{attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n head"),s("span",{attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("[")]),t._v("\n "),s("span",{attrs:{class:"token punctuation"}},[t._v("[")]),s("span",{attrs:{class:"token string"}},[t._v("'link'")]),s("span",{attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("{")]),t._v(" rel"),s("span",{attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{attrs:{class:"token string"}},[t._v("'icon'")]),s("span",{attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" href"),s("span",{attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{attrs:{class:"token string"}},[t._v("'/logo.png'")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("}")]),s("span",{attrs:{class:"token punctuation"}},[t._v("]")]),t._v("\n "),s("span",{attrs:{class:"token punctuation"}},[t._v("]")]),t._v("\n"),s("span",{attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"host"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#host","aria-hidden":"true"}},[this._v("#")]),this._v(" host")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("string")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("'0.0.0.0'")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"port"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#port","aria-hidden":"true"}},[this._v("#")]),this._v(" port")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("number")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("8080")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"dest"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#dest","aria-hidden":"true"}},[this._v("#")]),this._v(" dest")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("string")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v(".vuepress/dist")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[this._v("Specify the output directory for "),e("code",[this._v("vuepress build")]),this._v(".")])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"ga"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#ga","aria-hidden":"true"}},[this._v("#")]),this._v(" ga")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("string")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("undefined")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"serviceworker"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#serviceworker","aria-hidden":"true"}},[this._v("#")]),this._v(" serviceWorker")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("boolean")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("false")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[this._v("If set to "),e("code",[this._v("true")]),this._v(", VuePress will automatically generate and register a service worker that caches the content for offline use (only enabled in production).")])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[this._v("If developing a custom theme, the "),e("code",[this._v("Layout.vue")]),this._v(" component will also be emitting the following events:")])},function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("ul",[s("li",[s("code",[t._v("sw-ready")])]),t._v(" "),s("li",[s("code",[t._v("sw-cached")])]),t._v(" "),s("li",[s("code",[t._v("sw-updated")])]),t._v(" "),s("li",[s("code",[t._v("sw-offline")])]),t._v(" "),s("li",[s("code",[t._v("sw-error")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"locales"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#locales","aria-hidden":"true"}},[this._v("#")]),this._v(" locales")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("{ [path: string]: Object }")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("undefined")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"shouldprefetch"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#shouldprefetch","aria-hidden":"true"}},[this._v("#")]),this._v(" shouldPrefetch")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Function")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("() => true")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h2",{attrs:{id:"theming"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#theming","aria-hidden":"true"}},[this._v("#")]),this._v(" Theming")])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"theme"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#theme","aria-hidden":"true"}},[this._v("#")]),this._v(" theme")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("string")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("undefined")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[this._v("Specify this to use a custom theme. With the value of "),e("code",[this._v('"foo"')]),this._v(", VuePress will attempt to load the theme component at "),e("code",[this._v("node_modules/vuepress-theme-foo/Layout.vue")]),this._v(".")])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"themeconfig"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#themeconfig","aria-hidden":"true"}},[this._v("#")]),this._v(" themeConfig")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Object")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("{}")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[e("strong",[this._v("Also see:")])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h2",{attrs:{id:"markdown"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#markdown","aria-hidden":"true"}},[this._v("#")]),this._v(" Markdown")])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"markdown-linenumbers"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#markdown-linenumbers","aria-hidden":"true"}},[this._v("#")]),this._v(" markdown.lineNumbers")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("boolean")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("undefined")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[e("strong",[this._v("Also see:")])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"markdown-slugify"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#markdown-slugify","aria-hidden":"true"}},[this._v("#")]),this._v(" markdown.slugify")])},function(){var t=this.$createElement,e=this._self._c||t;return e("li",[this._v("Type: "),e("code",[this._v("Function")])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"markdown-externallinks"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#markdown-externallinks","aria-hidden":"true"}},[this._v("#")]),this._v(" markdown.externalLinks")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Object")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("{ target: '_blank', rel: 'noopener noreferrer' }")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[this._v("The key and value pair will be added to "),e("code",[this._v("<a>")]),this._v(" tags that points to an external link. The default option will open external links in a new window.")])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"markdown-anchor"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#markdown-anchor","aria-hidden":"true"}},[this._v("#")]),this._v(" markdown.anchor")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Object")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("{ permalink: true, permalinkBefore: true, permalinkSymbol: '#' }")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"markdown-toc"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#markdown-toc","aria-hidden":"true"}},[this._v("#")]),this._v(" markdown.toc")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Object")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("{ includeLevel: [2, 3] }")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"markdown-config"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#markdown-config","aria-hidden":"true"}},[this._v("#")]),this._v(" markdown.config")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Function")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("undefined")])])])},function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{staticClass:"language-js extra-class"},[s("pre",{pre:!0,attrs:{class:"language-js"}},[s("code",[t._v("module"),s("span",{attrs:{class:"token punctuation"}},[t._v(".")]),t._v("exports "),s("span",{attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n markdown"),s("span",{attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n config"),s("span",{attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" md "),s("span",{attrs:{class:"token operator"}},[t._v("=>")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n md"),s("span",{attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{attrs:{class:"token keyword"}},[t._v("set")]),s("span",{attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{attrs:{class:"token punctuation"}},[t._v("{")]),t._v(" breaks"),s("span",{attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{attrs:{class:"token boolean"}},[t._v("true")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("}")]),s("span",{attrs:{class:"token punctuation"}},[t._v(")")]),t._v("\n md"),s("span",{attrs:{class:"token punctuation"}},[t._v(".")]),s("span",{attrs:{class:"token function"}},[t._v("use")]),s("span",{attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{attrs:{class:"token function"}},[t._v("require")]),s("span",{attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{attrs:{class:"token string"}},[t._v("'markdown-it-xxx'")]),s("span",{attrs:{class:"token punctuation"}},[t._v(")")]),s("span",{attrs:{class:"token punctuation"}},[t._v(")")]),t._v("\n "),s("span",{attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),s("span",{attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n"),s("span",{attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h2",{attrs:{id:"build-pipeline"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#build-pipeline","aria-hidden":"true"}},[this._v("#")]),this._v(" Build Pipeline")])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"postcss"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#postcss","aria-hidden":"true"}},[this._v("#")]),this._v(" postcss")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Object")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("{ plugins: [require('autoprefixer')] }")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"stylus"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#stylus","aria-hidden":"true"}},[this._v("#")]),this._v(" stylus")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Object")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("{ preferPathResolver: 'webpack' }")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"scss"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#scss","aria-hidden":"true"}},[this._v("#")]),this._v(" scss")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Object")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("{}")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"sass"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#sass","aria-hidden":"true"}},[this._v("#")]),this._v(" sass")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Object")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("{ indentedSyntax: true }")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"less"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#less","aria-hidden":"true"}},[this._v("#")]),this._v(" less")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Object")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("{}")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"configurewebpack"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#configurewebpack","aria-hidden":"true"}},[this._v("#")]),this._v(" configureWebpack")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Object | Function")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("undefined")])])])},function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{staticClass:"language-js extra-class"},[s("pre",{pre:!0,attrs:{class:"language-js"}},[s("code",[t._v("module"),s("span",{attrs:{class:"token punctuation"}},[t._v(".")]),t._v("exports "),s("span",{attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n configureWebpack"),s("span",{attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("(")]),t._v("config"),s("span",{attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" isServer"),s("span",{attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{attrs:{class:"token operator"}},[t._v("=>")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{attrs:{class:"token keyword"}},[t._v("if")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("(")]),s("span",{attrs:{class:"token operator"}},[t._v("!")]),t._v("isServer"),s("span",{attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{attrs:{class:"token comment"}},[t._v("// mutate the config for client")]),t._v("\n "),s("span",{attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n "),s("span",{attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n"),s("span",{attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"chainwebpack"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#chainwebpack","aria-hidden":"true"}},[this._v("#")]),this._v(" chainWebpack")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("Function")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("undefined")])])])},function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{staticClass:"language-js extra-class"},[s("pre",{pre:!0,attrs:{class:"language-js"}},[s("code",[t._v("module"),s("span",{attrs:{class:"token punctuation"}},[t._v(".")]),t._v("exports "),s("span",{attrs:{class:"token operator"}},[t._v("=")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n chainWebpack"),s("span",{attrs:{class:"token punctuation"}},[t._v(":")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("(")]),t._v("config"),s("span",{attrs:{class:"token punctuation"}},[t._v(",")]),t._v(" isServer"),s("span",{attrs:{class:"token punctuation"}},[t._v(")")]),t._v(" "),s("span",{attrs:{class:"token operator"}},[t._v("=>")]),t._v(" "),s("span",{attrs:{class:"token punctuation"}},[t._v("{")]),t._v("\n "),s("span",{attrs:{class:"token comment"}},[t._v("// config is an instance of ChainableConfig")]),t._v("\n "),s("span",{attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n"),s("span",{attrs:{class:"token punctuation"}},[t._v("}")]),t._v("\n")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("h2",{attrs:{id:"browser-compatibility"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#browser-compatibility","aria-hidden":"true"}},[this._v("#")]),this._v(" Browser Compatibility")])},function(){var t=this.$createElement,e=this._self._c||t;return e("h3",{attrs:{id:"evergreen"}},[e("a",{staticClass:"header-anchor",attrs:{href:"#evergreen","aria-hidden":"true"}},[this._v("#")]),this._v(" evergreen")])},function(){var t=this.$createElement,e=this._self._c||t;return e("ul",[e("li",[this._v("Type: "),e("code",[this._v("boolean")])]),this._v(" "),e("li",[this._v("Default: "),e("code",[this._v("false")])])])},function(){var t=this.$createElement,e=this._self._c||t;return e("p",[this._v("Set to "),e("code",[this._v("true")]),this._v(" if you are only targeting evergreen browsers. This will disable ES5 transpilation and polyfills for IE, and result in faster builds and smaller files.")])}],!1,null,null,null);e.default=r.exports}}]);
|
import React from "react";
function Scores(props) {
return (
<div className="scores">
<div className="score score--current">{props.score}</div>
<div className="score score--best">{props.best}</div>
</div>
);
}
export default Scores;
|
// Copyright (c) 2012 Ecma International. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es5id: 15.2.3.5-4-254
description: >
Object.create - one property in 'Properties' is an Arguments
object which implements its own [[Get]] method to access the 'get'
property (8.10.5 step 7.a)
---*/
var argObj = (function () { return arguments; })();
argObj.get = function () {
return "VerifyArgumentsObject";
};
var newObj = Object.create({}, {
prop: argObj
});
assert.sameValue(newObj.prop, "VerifyArgumentsObject", 'newObj.prop');
|
$(document).ready(function () {
$('#trierCommande').on('click',function(){
var element = $(this).val();
$.ajax({
type : "post",
url : "http://127.0.0.1:8000/commercant/commandes/trier",
data : {
element : element
},
success : function (data) {
var content = $('tbody');
var commandes = jQuery.parseJSON(data);
console.log(commandes);
content.html('');
if(commandes == null){
var tr = $('<tr><td>pas de commandes</td></tr>');
tr.appendTo(content);
}else{
$.each(commandes, function (key,commande){
console.log(commande.id);
var ligne = $('<tr class="tbody">' +
'<td>'+commande.id+'</td>' +
'<td>' +
'<p>Nom : '+commande.client.nom+'</p>' +
'<p>Prénom : '+commande.client.prenom+'</p>'+
'<p>Email : '+commande.client.email+'</p>'+
'</td>' +
'<td>'+commande.date+'</td>'+
'<td>'+commande.montantTotal+'</td>'+
'</tr>');
ligne.appendTo(content);
var bool = '';
if(commande.estLivre){
bool = "oui";
}
else{
bool = "non";
}
var livrer = $('<td>'+bool+'</td>');
livrer.appendTo('.tbody');
});
}
}
})
})
});
|
const HtmlWebpackPlugin = require('html-webpack-plugin');
const VuetifyLoaderPlugin = require('vuetify-loader/lib/plugin');
const VueLoaderPlugin = require('vue-loader/lib/plugin');
const path = require('path');
module.exports = {
entry: './docs/src/index.js',
module: {
rules: [
{test: /\.js$/, use: 'babel-loader'},
{test: /\.vue$/, use: 'vue-loader'},
{test: /\.css$/, use: ['style-loader', 'css-loader']},
{test: /\.s(c|a)ss$/, use: ['style-loader', 'css-loader', 'sass-loader']},
]
},
plugins: [
new HtmlWebpackPlugin({inject: true, template: './docs/src/index.html'}),
new VueLoaderPlugin(),
new VuetifyLoaderPlugin(),
],
resolve: {
extensions: ['.js', '.vue', '.json'],
alias: {
'vue$': 'vue/dist/vue.esm.js',
'@': path.join(__dirname, '.', 'docs'),
}
},
};
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Create publication quality figure."""
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
from matplotlib.font_manager import FontProperties
from matplotlib.transforms import Bbox
from matplotlib.ticker import MultipleLocator
# from matplotlib.ticker import AutoLocator, ScalarFormatter, FuncFormatter
from pathlib import Path
from matplotlib import gridspec
from manipUtils.figmanip import setFigurePosition, getFigurePosition, cm2inch
%matplotlib qt5
plt.ion()
# %% ================== Close all other figures ===============================
# plt.close('all')
# %% ======================== Negative ========================================
negative = False
if negative:
plt.style.use('dark_background')
else:
plt.style.use('default')
# %% ============================ Fonts =======================================
# Check system available fonts
# from matplotlib.font_manager import findSystemFonts
# font_list = findSystemFonts()
# matching = [s for s in font_list if 'cmr10' in s]
# print(matching)
# Configure fonts
# font0 = FontProperties(fname='/usr/share/fonts/truetype/lyx/cmr10.ttf')
font0 = FontProperties(fname=str(Path(r'C:\Users\carlo\github\manipUtils\templates\ttf\cmr10.ttf')))
font0.set_size(9)
# Change mathtext to Computer Modern Roman ('LaTeX font')
mpl.rcParams['mathtext.fontset'] = 'cm'
# Change text to text, and not paths. This enable to edit text on vector graphics editors
mpl.rcParams['svg.fonttype'] = 'none'
# Note: mathtext output relies on absolute glyph positioning. Therefore, text
# objects that contain mathtext are trick to edit in vector graphics editors
# (inkscape, illustrator, etc...) This was reported as a bug:
# https://github.com/matplotlib/matplotlib/issues/13200
# %% ============================ Figure ======================================
# Figure position on screen
# getFigurePosition()
xPosition = 100
yPosition = 100
# Figure size on the paper (in cm)
# PRB 2 column: Column width 8.6 cm or 3 3/8 in.
width = 8.6
height = 15
# Use the pyplot interface for creating figures, and then use the object
# methods for the rest
fig = plt.figure(figsize=cm2inch(width, height))
setFigurePosition(xPosition, yPosition)
# %% ================================ Axes ====================================
number_of_lines = 4
number_of_columns = 1
height_ratios=[1, 1, 1, 1]
width_ratios=[1]
gs = gridspec.GridSpec(number_of_lines, number_of_columns, height_ratios=height_ratios, width_ratios=width_ratios)
fig.subplots_adjust(hspace=0, wspace=.3) #Set distance between subplots
ax = list()
ax.append(fig.add_subplot(gs[0]))
for i in range(1, number_of_lines):
ax.append(fig.add_subplot(gs[i], sharex=ax[0]))
# %% =================================== Plot =================================
# Data
x = np.linspace(0, 10*np.pi, 100)
y = np.sin(x)
scale = 1e1
colors = ['black', 'red', 'blue', 'green', 'magenta']
linestyles = ['-', '--', '-.', ':', '-']
markers = [None, 'o', None, None, 'x']
color = iter(colors)
linestyle = iter(linestyles)
marker = iter(markers)
markevery = 2
markersize = 3
linewidth = 1
line1, = ax[0].plot(x, y*scale,
ls=next(linestyle),
linewidth=linewidth,
marker=next(marker),
markevery=markevery,
ms=markersize,
color=next(color),
markerfacecolor='blue',
markeredgewidth=0.5,
label='$y=\sin(x)$')
line2, = ax[1].plot(x, y**2,
ls=next(linestyle),
linewidth=linewidth,
marker=next(marker),
markevery=markevery,
ms=markersize,
color=next(color),
markerfacecolor='blue',
markeredgewidth=0.5,
label='$y=\sin(x)$')
line1, = ax[2].plot(x, y*scale,
ls=next(linestyle),
linewidth=linewidth,
marker=next(marker),
markevery=markevery,
ms=markersize,
color=next(color),
markerfacecolor='blue',
markeredgewidth=0.5,
label='$y=\sin(x)$')
line1, = ax[3].plot(x, y*scale,
ls=next(linestyle),
linewidth=linewidth,
marker=next(marker),
markevery=markevery,
ms=markersize,
color=next(color),
markerfacecolor='blue',
markeredgewidth=0.5,
label='$y=\sin(x)$')
# %% ================================== Axis ==================================
# Axis Label
ax[-1].set_xlabel(r'$T$ (K)', fontproperties=font0, labelpad=None)
for a in ax:
a.set_ylabel(r'$\sigma$ ($10^{' + '{:.0f}'.format(-np.log10(scale)) + '}$ S/m)', fontproperties=font0, labelpad=None)
# Turn off scientific notation
for a in ax:
a.ticklabel_format(axis='both', style='plain')
# major ticks
if True:
for a in ax:
# ticks values
xTicks = a.get_xticks() # xTicks = np.arange(420, 508, 10)
# a.set_xticks(xTicks, minor=False)
a.set_xticklabels(['{0:.0f}'.format(i) for i in xTicks], fontproperties=font0, visible=False)
yTicks = a.get_yticks()
# a.set_yticks(yTicks, minor=False)
a.set_yticklabels(['{0:.0f}'.format(i) for i in yTicks], fontproperties=font0, visible=True)
ax[-1].set_xticklabels(['{0:.0f}'.format(i) for i in xTicks], fontproperties=font0, visible=True)
# minor ticks
if True:
for a in ax:
a.xaxis.set_minor_locator(MultipleLocator(2))
a.yaxis.set_minor_locator(MultipleLocator(2))
# a.xaxis.set_ticks(np.arange(0.5, 11.6, 0.5), minor=True)
# ticks properties
for a in ax:
a.tick_params(which='major', direction='in', top=True, left=True, right=True, labeltop=False, labelleft=True, labelright=False)
a.tick_params(which='minor', direction='in', top=True, left=True, right=True)
# Axis limits
for a in ax:
x_min = a.get_xlim()[0]
x_max = a.get_xlim()[1]
a.set_xlim((x_min, x_max), auto=False)
y_min = a.get_ylim()[0]
y_max = a.get_ylim()[1]
a.set_ylim((y_min, y_max), auto=False)
# %% ======================== Axes position on figure =========================
fig.subplots_adjust(left=0.13, bottom=0.06, right=0.98, top=0.98)
# %% ========================== Legend ========================================
for a in ax:
a.legend(frameon=0, labelspacing=.1, prop=font0)
# %% ========================= Save figure ====================================
if False:
dirpath = Path(r'C:\Users\Carlos\Desktop')
name_prefix = 'plot'
if negative:
# pdf is for fast visualization (svg has to open inkscape)
plt.savefig(str(dirpath / (name_prefix + '_negative.pdf')))
plt.savefig(str(dirpath / (name_prefix + '_negative.svg')), transparent=False)
else:
# pdf is for fast visualization (svg has to open inkscape)
plt.savefig(str(dirpath / (name_prefix + '_raw.pdf')))
plt.savefig(str(dirpath / (name_prefix + '_raw.svg')), transparent=True)
del dirpath, name_prefix, negative
|
export default {
cyan: '#0DA1C3',
lightgrey: '#F4F5F7',
yellow: '#EBE047',
white: '#FFFFFF',
}
|
// @flow
import randomColor from 'randomcolor'
import React, {PureComponent} from 'react'
import {Polyline, Tooltip} from 'react-leaflet'
import * as activeActions from '../../actions/active'
import * as mapActions from '../../actions/map'
import * as tripPatternActions from '../../actions/tripPattern'
import {ensureValidCoords, isSegmentActive} from '../../util/map'
import type {ControlPoint, Coordinates, Entity, Feed, GtfsRoute, LatLng, Pattern} from '../../../types'
import type {EditSettingsState} from '../../../types/reducers'
type Props = {
activeEntity: Entity,
activePattern: Pattern,
constructControlPoint: typeof mapActions.constructControlPoint,
controlPoints: Array<ControlPoint>,
editSettings: EditSettingsState,
feedSource: Feed,
patternCoordinates: Array<Coordinates>,
patternSegment: number,
route: GtfsRoute,
setActiveEntity: typeof activeActions.setActiveEntity,
setActivePatternSegment: typeof tripPatternActions.setActivePatternSegment,
subEntityId: number,
updateEditSetting: typeof activeActions.updateEditSetting
}
type SegmentProps = {
latlng: LatLng,
pattern: Pattern,
segmentCoordinates: Coordinates,
segmentIndex: number
}
export default class PatternsLayer extends PureComponent<Props> {
_constructControlPoint = (segmentProps: SegmentProps) => {
const {constructControlPoint, patternCoordinates, controlPoints} = this.props
constructControlPoint({...segmentProps, patternCoordinates, controlPoints})
}
render () {
const {
activeEntity,
activePattern,
editSettings,
feedSource,
patternCoordinates,
patternSegment,
route,
setActiveEntity,
setActivePatternSegment,
subEntityId,
updateEditSetting
} = this.props
return (
<div id='trip-patterns-layer'>
{route && route.tripPatterns
? route.tripPatterns
.map(tp => {
const isActive = subEntityId === tp.id
const isEditing = isActive && editSettings.editGeometry
const pattern = isActive ? activePattern : tp
// Coordinates for non-active trip patterns
const latLngs = (
pattern && pattern.shape && ensureValidCoords(pattern.shape.coordinates)
? pattern.shape.coordinates
: []
)
// NOTE: don't render pattern if latlngs don't exist or a single pattern is active
if (!latLngs || (!isActive && subEntityId)) {
!latLngs && console.warn(`not rendering ${tp.id} due to missing coordinates`)
return null
}
const lineColor = editSettings.editGeometry
? '#F3F315' // yellow if editing
: 'blue'
const clickAction = isEditing
? 'add control point'
: isActive
? 'edit shape'
: 'select'
if (isActive) {
// Render active pattern as collection of inter-stop segments.
// console.log('active pattern coordinates', patternCoordinates)
return patternCoordinates && patternCoordinates
.map((coordinates, index) => {
const segmentIsActive = isSegmentActive(patternSegment, index)
if (!segmentIsActive && editSettings.hideInactiveSegments && editSettings.editGeometry) {
return null
}
return <PatternSegment
key={`${tp.id}-${index}`}
activeEntity={activeEntity}
index={index}
feedSource={feedSource}
isEditing={isEditing}
patternIsActive={isActive}
segmentIsActive={segmentIsActive}
setActivePatternSegment={setActivePatternSegment}
clickAction={clickAction}
color={lineColor}
setActiveEntity={setActiveEntity}
updateEditSetting={updateEditSetting}
editSettings={editSettings}
coordinates={coordinates}
// Pass active pattern (with up-to-date shape) as prop
pattern={activePattern}
constructControlPoint={this._constructControlPoint} />
})
} else {
// Render inactive pattern as single polyline.
return (
<PatternSegment
activeEntity={activeEntity}
clickAction={clickAction}
color={lineColor}
constructControlPoint={this._constructControlPoint}
coordinates={latLngs}
editSettings={editSettings}
feedSource={feedSource}
isEditing={isEditing}
key={tp.id}
pattern={tp}
patternIsActive={isActive}
setActiveEntity={setActiveEntity}
setActivePatternSegment={setActivePatternSegment}
updateEditSetting={updateEditSetting} />
)
}
})
: null
}
</div>
)
}
}
type PatternSegmentProps = {
activeEntity: Entity,
clickAction: string,
color: string,
constructControlPoint: SegmentProps => void,
coordinates: Coordinates,
editSettings: EditSettingsState,
feedSource: Feed,
index?: number,
isEditing: boolean,
pattern: Pattern,
patternIsActive: boolean,
segmentIsActive?: boolean,
setActiveEntity: typeof activeActions.setActiveEntity,
setActivePatternSegment: typeof tripPatternActions.setActivePatternSegment,
updateEditSetting: typeof activeActions.updateEditSetting
}
class PatternSegment extends PureComponent<PatternSegmentProps> {
_onClick = (e: SyntheticInputEvent<HTMLInputElement> & {latlng: LatLng}) => {
const {
pattern,
activeEntity,
constructControlPoint,
coordinates,
editSettings,
feedSource,
index,
isEditing,
patternIsActive,
segmentIsActive,
setActiveEntity,
setActivePatternSegment,
updateEditSetting
} = this.props
if (
isEditing &&
segmentIsActive &&
editSettings.onMapClick === 'DRAG_HANDLES' &&
typeof index !== 'undefined'
) {
constructControlPoint({
pattern,
latlng: e.latlng,
segmentIndex: index,
segmentCoordinates: coordinates
})
}
if (!isEditing && patternIsActive) {
updateEditSetting({
setting: 'editGeometry',
value: true
})
}
if (!patternIsActive) {
setActiveEntity(
feedSource.id,
'route',
activeEntity,
'trippattern',
pattern
)
}
if (patternIsActive && !segmentIsActive) {
setActivePatternSegment(index)
}
}
render () {
const {color,
clickAction,
coordinates,
editSettings,
index,
isEditing,
segmentIsActive,
patternIsActive,
pattern
} = this.props
const hasShapePoints = pattern.shapePoints && pattern.shapePoints.length > 0
if (!coordinates || !coordinates.length || !ensureValidCoords(coordinates)) {
if (typeof index === 'number') {
console.warn(`Could not render segment #${index} of pattern ID ${pattern.id}`, coordinates)
} else {
// console.warn(`Could not render shape for pattern ID ${pattern.id}`, coordinates)
}
return null
}
if (coordinates.length < 2) {
coordinates.push(coordinates[0])
}
const editingSegment = segmentIsActive && isEditing
const dragEnabled = editSettings.onMapClick === 'DRAG_HANDLES'
let badCoordinates = false
const reversedCoordinates = coordinates.map(c => {
if (!c[1] || !c[0]) {
console.warn(`Coordinates are not valid`)
badCoordinates = true
}
return [c[1], c[0]]
})
if (badCoordinates) return null
return (
<Polyline
// React leaflet coordinates are [lat, lon]
positions={reversedCoordinates}
onClick={this._onClick}
lineCap='butt'
color={editingSegment
? 'green'
: isEditing && patternIsActive
? randomColor()
: color
}
dashArray={patternIsActive && !hasShapePoints ? '5, 5' : undefined}
opacity={editingSegment ? 0.8 : 0.5}
weight={5}>
<Tooltip
key={Math.random()}
opacity={editSettings.showTooltips ? 0.9 : 0}
sticky>
<span>
{dragEnabled && isEditing
? `Click to ${editingSegment ? clickAction : 'edit segment'}`
: dragEnabled
? `${pattern.name} (click to ${clickAction})`
: 'Dragging handles is disabled. Change edit mode first.'
}
</span>
</Tooltip>
</Polyline>
)
}
}
|
from hwtypes import Bit, BitVector
import magma as m
from mantle2.queue import Queue
import fault as f
import pysv
def test_queue_simple():
tester = f.SynchronousTester(Queue(4, m.Bits[2]))
tester.advance_cycle()
for i in range(4):
tester.circuit.enq.data = i
tester.circuit.enq.valid = 1
tester.circuit.enq.ready.expect(1)
tester.advance_cycle()
tester.circuit.enq.valid = 0
tester.circuit.enq.ready.expect(0)
for i in range(4):
tester.circuit.deq.data.expect(i)
tester.circuit.deq.valid.expect(1)
tester.circuit.deq.ready = 1
tester.advance_cycle()
tester.circuit.deq.valid.expect(0)
tester.compile_and_run("verilator")
class QueueModel:
@pysv.sv()
def __init__(self, num_entries):
# TODO: How can we encode the type information as a parameter?
self.num_entries = num_entries
self._entries = []
@pysv.sv(value=pysv.DataType.UInt)
def enq(self, value):
if len(self._entries) > self.num_entries:
raise Exception("Queue is full")
self._entries.append(value)
@pysv.sv(return_type=pysv.DataType.UInt)
def deq(self):
if len(self._entries) == 0:
raise Exception("Queue is empty")
return self._entries.pop(0)
def test_queue_model():
num_entries = 4
tester = f.SynchronousTester(Queue(num_entries, m.UInt[32]))
tester.step(2)
model = tester.Var("model", QueueModel)
tester.poke(model, tester.make_call_expr(QueueModel, num_entries))
for i in range(10):
enq_valid = Bit.random()
enq_data = BitVector.random(32)
tester.circuit.enq.data = enq_data
tester.circuit.enq.valid = enq_valid
if_tester = tester._if(
tester.circuit.enq.valid & tester.circuit.enq.ready)
if_tester.make_call_stmt(model.enq, enq_data)
tester.advance_cycle()
tester.circuit.enq.valid = 0
deq_ready = Bit.random()
tester.circuit.deq.ready = deq_ready
if_tester = tester._if(
tester.circuit.deq.valid & tester.circuit.deq.ready)
var = if_tester.Var("deq_data", BitVector[32])
if_tester.poke(var, if_tester.make_call_expr(model.deq))
# TODO: Expect calls function twice (second time for erorr message)
if_tester.circuit.deq.data.expect(var)
tester.advance_cycle()
tester.circuit.deq.ready = 0
tester.compile_and_run("verilator", use_pysv=True, disp_type="realtime")
@f.python_monitor()
class QueueMonitor:
@pysv.sv()
def __init__(self, num_entries):
self.num_entries = num_entries
self._entries = []
@pysv.sv()
def observe(self, enq: m.EnqIO[m.UInt[32]],
deq: m.EnqIO[m.UInt[32]]):
if deq.ready & deq.valid:
if len(self._entries) == 0:
raise Exception("Queue is empty")
assert deq.data == self._entries.pop(0)
if enq.ready & enq.valid:
if len(self._entries) > self.num_entries:
raise Exception("Queue is full")
self._entries.append(enq.data)
def test_queue_monitor():
num_entries = 4
tester = f.SynchronousTester(Queue(num_entries, m.UInt[32]))
tester.step(2)
monitor = tester.Var("monitor", QueueMonitor)
tester.poke(monitor, tester.make_call_expr(QueueMonitor, num_entries))
for i in range(10):
enq_valid = Bit.random()
enq_data = BitVector.random(32)
tester.circuit.enq.data = enq_data
tester.circuit.enq.valid = enq_valid
deq_ready = Bit.random()
tester.circuit.deq.ready = deq_ready
tester.advance_cycle()
tester.compile_and_run("verilator", use_pysv=True, disp_type="realtime")
|
import copy
import typing as t
from functools import wraps
import inspect
import logging
from .validation import check_instance_of
def create_instance(cls: t.Any, *args):
"""
Create an instance of a new class dynamically using random arguments.
Note: This creation will be difficult for classes
that perform validation in the instantiation phase.
:param cls: The class to instantiate
:return: An instance of Class <cls>
"""
# for inspection purposes
class NewClass(cls):
pass
# Get the number of arguments to create a dummy instance of a class
# Used for decorating a class
arg_count = len(inspect.getfullargspec(NewClass.__init__).args) - 1
# Use placeholder values to initialize
if arg_count > len(args):
new_args = tuple(args) + tuple(range(arg_count - len(args)))
instance = NewClass(*new_args)
else:
instance = NewClass(*args)
return instance
def get_deepcopy_args_kwargs(fn: t.Callable, args: t.Tuple, kwargs: t.Dict):
"""
Return deep copies of arg_kwargs with default values included
:param fn: The target function to evaluate
:param args:
:param kwargs:
:return: Dict of key value pairs
"""
# Add defaults
parameters = inspect.signature(fn).parameters
arg_count: int = len(args)
new_args = {}
i: int = 0
for k, v in parameters.items():
if i >= arg_count:
new_args[k] = v.default
i += 1
return copy.deepcopy(args), copy.deepcopy(kwargs)
def fill_default_kwargs(fn: t.Callable, args: t.Tuple, kwargs: t.Dict):
"""
Kwarg is empty if default values are used during runtime.
Fill the kwargs with default values
"""
parameters = inspect.signature(fn).parameters
arg_count: int = len(args)
i: int = 0
for k, v in parameters.items():
if i >= arg_count:
kwargs[k] = v.default
i += 1
def get_shallow_default_arg_dict(fn: t.Callable, args: t.Tuple):
"""
Return key value pair comprised of
key: The name of the variable
value: The value passed
:param fn: The target function to evaluate
:param args:
:param kwargs:
:return: Dict of key value pairs
"""
# Add defaults
arg_count = len(args)
args_names = []
# Add defaults
parameters = inspect.signature(fn).parameters
new_kwargs = {}
i: int = 1
for k, v in parameters.items():
if i > arg_count:
new_kwargs[k] = v.default
else:
args_names.append(k)
i += 1
return {**dict(zip(args_names, args)), **new_kwargs}
def create_properties(valid_properties: t.Dict, **kwargs) -> t.Dict:
"""
Add properties from kwargs to valid_properties
:param valid_properties: A dictionary containing valid properties
:param kwargs:
"""
properties: t.Dict = {}
# Validate and add properties
for key, (data_type, default_value) in valid_properties.items():
if key in kwargs:
current_property = kwargs[key]
if isinstance(current_property, t.Tuple):
for item in current_property:
check_instance_of(item, data_type)
else:
check_instance_of(current_property, data_type)
properties[key] = kwargs[key]
else:
properties[key] = default_value
return properties
def get_unique_func_name(func: t.Callable) -> str:
return f'{func.__module__}.{func.__qualname__}'
def dict_is_empty(obj: t.Dict):
if not isinstance(obj, t.Dict):
raise TypeError("Object is not a dictionary. "
f"Passed in type: {type(obj)}")
for _ in obj.keys():
return False
return True
class LoggingLevelError(ValueError):
"""
Occurs if users pass in an invalid logging type
to logging function
"""
def __init__(self, msg):
super().__init__(msg)
def logger_factory(logger_name: str,
level: int = logging.DEBUG,
file_name: str = None):
"""
Function for writing information to a file during program execution
:param file_name: The name of the file to store logger
:param logger_name: The name of the function being called
:param level: The debug level
performed both to the file and the console
"""
# This is required for logging rules to apply
logging.basicConfig(level=level)
logger = logging.getLogger(logger_name)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s',
'%Y-%m-%d %H:%M:%S')
# remove all old handlers
for handler in logger.handlers:
logger.removeHandler(handler)
# add file logging
if file_name is not None:
file_handler = logging.FileHandler(file_name, 'a')
file_handler.setFormatter(formatter)
file_handler.setLevel(level)
# set the new handler
logger.addHandler(file_handler)
return logger
class ContextDecorator:
"""
Used for creating decorators that behave both
as decorators and context managers
"""
def __call__(self, func: t.Callable) -> t.Callable:
self.wrapped_func = func
@wraps(func)
def inner(*args, **kwargs):
with self:
return func(*args, **kwargs)
return inner
def truncate(max_length: int) -> t.Callable:
"""
Responsible for truncating a sentence based on its length
:param max_length:
:return: a truncation function
"""
def do_truncate(sentence: str) -> str:
truncated_sentence = (sentence[:max_length], ' ...') if len(sentence) > max_length else sentence
return truncated_sentence
return do_truncate
class TraceDecorator:
def __init__(self, func: t.Callable, verbose: bool = False):
self.func = func
self.verbose = verbose
self.default_index = 0
self.argspecs = inspect.getfullargspec(func)
def __call__(self, *args, **kwargs):
...
# use self.param1
result = self.func(*args, **kwargs)
# use self.param2
return result
def get_default_values(self, *args, **kwargs):
args_repr = [repr(a) for a in args] # 1
default_index = 0
function_input_str = "Debug: calling --> " + self.func.__name__ + '('
for i, test in enumerate(args):
if i < len(args):
function_input_str += args_repr[i]
elif i >= len(args) and test not in kwargs:
function_input_str += f"{test}={self.argspecs.defaults[default_index]}"
default_index += 1
else:
function_input_str += f"{test}={kwargs[test]}"
# Add commas and space
function_input_str += ','
function_input_str += ' '
# remove trailing ', '
function_input_str = function_input_str[:-2]
function_input_str += ')'
return function_input_str
def attach_property(cls: t.Any,
prop: str,
getter = None,
setter = None):
accessor: str = f"_{cls.__name__}__{prop}"
def create_getter(func):
@wraps(func)
def executor(self):
func(self)
return getattr(self, accessor)
return executor
def create_setter(func):
@wraps(func)
def executor(self, value):
func(self, value)
setattr(self, accessor, value)
return executor
# Create property dynamically
# By default, they are the same
if getter is None:
def getter(self):
return getattr(self, accessor)
if setter is None:
def setter(self, v):
setattr(self, accessor, v)
test = property(create_getter(getter))
test = test.setter(create_setter(setter))
setattr(cls, prop, test)
def format_list_str(list_of_stuff: t.Union[t.List, t.Tuple]):
return ',\n'.join(list_of_stuff)
|
import React from "react";
import styled from "styled-components";
const ProgramNumWorkoutsSection = ({ numberOfWorkouts }) => {
return (
<SectionContainer>
<FooterLabel label="true">Workouts</FooterLabel>
<FooterLabel>{numberOfWorkouts}</FooterLabel>
</SectionContainer>
);
};
export default ProgramNumWorkoutsSection;
const SectionContainer = styled.div`
display: grid;
grid-template-columns: 1fr;
grid-template-rows: auto 1fr;
justify-items: center;
`;
const FooterLabel = styled.p`
font-family: Quicksand;
font-size: 12px;
color: ${props =>
props.label ? props.theme.footerAddressText : props.theme.primaryAccent};
text-transform: uppercase;
`;
|
"""
2021/01/18
This script plots the network response to basic shapes and the movement of them.
Look under load_data._load_basic_shapes for all the possibilities of basic shapes.
"""
import numpy as np
import tensorflow as tf
import os
from plots_utils.plot_cnn_output import plot_cnn_output
from utils.load_config import load_config
from utils.load_data import load_data
# load config
config = load_config("norm_base_basic_shape_t0002.json")
# make folder
folder = os.path.join("../../models/saved", config["save_name"])
if not os.path.exists(folder):
os.mkdir(folder)
# cnn
model = tf.keras.applications.VGG19(include_top=False, weights="imagenet", input_shape=(224,224,3))
model = tf.keras.Model(inputs=model.input, outputs=model.get_layer("block3_pool").output)
# calculate and plot response
images = load_data(config, train=config["subset"])
response = model.predict(np.array(images))
for i, image in enumerate(images):
plot_cnn_output(response[i], os.path.join(folder, config["sub_folder"]), f"plot{i}.png",
title="block3_pool response", image=image)
|
(window["webpackJsonp"]=window["webpackJsonp"]||[]).push([["chunk-2de9b4ea"],{"11b2":function(t,e,s){"use strict";var a=function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",[s("div",{ref:"slotWrapper"},[t._t("default")],2)])},n=[],i={name:"relocator",props:{container:{default:"body"}},computed:{containerElem:function(){return"string"===typeof this.container?document.querySelector(this.container):this.container}},mounted:function(){var t=this;if(this.containerElem){var e=this.$refs.slotWrapper;e.parentNode.removeChild(e),this.containerElem.appendChild(e),this.$once("hook:beforeDestroy",(function(){e&&(e.parentNode.removeChild(e),t.$el.appendChild(e))}))}}},o=i,r=s("2877"),c=Object(r["a"])(o,a,n,!1,null,null,null);e["a"]=c.exports},3905:function(t,e,s){"use strict";var a=function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{ref:"popupContainer",staticClass:"popup",class:{active:t.isActive,"is-closing":t.isClosing},attrs:{tabindex:"-1"},on:{keyup:function(e){return!e.type.indexOf("key")&&t._k(e.keyCode,"esc",27,e.key,["Esc","Escape"])||e.target!==e.currentTarget?null:void(!t.closeOnEsc||t.close())}}},[s("div",{staticClass:"popup-overlay",on:{click:function(e){e.preventDefault(),!t.closeOnOverlay||t.close()}}}),s("div",{staticClass:"popup-panel"},[t.closeBtn?s("span",{staticClass:"popup-close-handle popup-close",on:{click:function(e){return e.preventDefault(),t.close()}}}):t._e(),s("div",{staticClass:"popup-header"},[t._t("header")],2),s("div",{staticClass:"popup-content"},[t._t("content")],2),s("div",{staticClass:"popup-footer"},[t._t("footer")],2)])])},n=[],i={name:"popup",props:{closeOnEsc:{type:Boolean,default:!0},closeOnOverlay:{type:Boolean,default:!0},closeBtn:{type:Boolean,default:!0}},data:function(){return{isActive:!1,isClosing:!1}},methods:{open:function(){var t=this;this.isActive=!0,this.isClosing=!1,this.$nextTick((function(){t.$refs.popupContainer.focus()})),document.body.classList.add("popup-active"),this.$nextTick((function(){var e=t.$el.querySelector("form input");e&&e.focus()}))},close:function(){var t=this;this.isActive&&(this.isClosing=!0,document.body.classList.remove("popup-active"),this.closingTimeoutId&&clearTimeout(this.closingTimeoutId),this.closingTimeoutId=setTimeout((function(){t.isActive=!1,t.isClosing=!1}),300))}}},o=i,r=s("2877"),c=Object(r["a"])(o,a,n,!1,null,null,null);e["a"]=c.exports},"438c":function(t,e,s){"use strict";var a=function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("footer",{staticClass:"app-footer"},[s("div",{staticClass:"info"},[s("a",{staticClass:"info-item",attrs:{href:t.$getAppConfig("VUE_APP_RELEASES_URL"),target:"_blank",rel:"noopener"}},[s("span",{staticClass:"txt"},[t._v("Presentator v2.13.1")])]),s("a",{staticClass:"info-item",attrs:{href:t.$getAppConfig("VUE_APP_REPO_URL"),target:"_blank",rel:"noopener"}},[s("i",{staticClass:"fe fe-github"}),s("span",{staticClass:"txt"},[t._v("Github")])]),t.loggedUser&&t.loggedUser.id&&t.$getAppConfig("VUE_APP_SHOW_SEND_FEEDBACK")<<0?s("div",{staticClass:"info-item handle",on:{click:function(e){return e.preventDefault(),t.openFeedbackPopup()}}},[s("i",{staticClass:"fe fe-life-buoy"}),s("span",{staticClass:"txt"},[t._v(t._s(t.$t("Send feedback")))])]):t._e(),t._l(t.getFooterLinks(),(function(e,a){return s("a",{staticClass:"info-item",attrs:{href:e,target:"_blank",rel:"noopener"}},[s("span",{staticClass:"txt"},[t._v(t._s(a))])])})),s("div",{staticClass:"info-item"},[s("languages-select")],1)],2),t.$getAppConfig("VUE_APP_SHOW_CREDITS")<<0?s("div",{staticClass:"credits"},[s("i18n",{attrs:{path:"Crafted by {author}"}},[s("a",{attrs:{slot:"author",href:"https://gani.bg",target:"_blank",rel:"noopener"},slot:"author"},[t._v("Gani")])])],1):t._e(),t.$getAppConfig("VUE_APP_SHOW_SEND_FEEDBACK")<<0?s("relocator",[s("feedback-popup",{ref:"feedbackPopup"})],1):t._e()],1)},n=[],i=(s("8e6e"),s("ac6a"),s("456d"),s("28a5"),s("bd86")),o=s("2f62"),r=s("11b2"),c=function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("form",{on:{submit:function(e){return e.preventDefault(),t.submitForm()}}},[s("popup",{ref:"popup",staticClass:"popup-sm",scopedSlots:t._u([{key:"header",fn:function(){return[s("h4",{staticClass:"title"},[t._v(t._s(t.$t("Help us improve Presentator")))])]},proxy:!0},{key:"content",fn:function(){return[s("div",{staticClass:"alert alert-light-border txt-center m-b-small"},[s("p",[t._v(" "+t._s(t.$t("Found a bug or have a feature request?"))+" "),s("br"),s("i18n",{attrs:{path:"Fill the form below OR {issuesLink}."}},[s("a",{attrs:{slot:"issuesLink",href:t.$getAppConfig("VUE_APP_ISSUES_URL"),target:"_blank",rel:"noopener"},slot:"issuesLink"},[t._v(t._s(t.$t("create a GitHub issue")))])])],1)]),s("form-field",{staticClass:"required",attrs:{name:"message"}},[s("label",{attrs:{for:"feedback_message"}},[t._v(t._s(t.$t("Message")))]),s("textarea",{directives:[{name:"model",rawName:"v-model.trim",value:t.message,expression:"message",modifiers:{trim:!0}}],attrs:{id:"feedback_message",required:""},domProps:{value:t.message},on:{input:function(e){e.target.composing||(t.message=e.target.value.trim())},blur:function(e){return t.$forceUpdate()}}})])]},proxy:!0},{key:"footer",fn:function(){return[s("button",{staticClass:"btn btn-light-border",attrs:{type:"button"},on:{click:function(e){return e.preventDefault(),t.close()}}},[s("span",{staticClass:"txt"},[t._v(t._s(t.$t("Cancel")))])]),s("button",{staticClass:"btn btn-primary btn-cons btn-loader",class:{"btn-loader-active":t.isProcessing},attrs:{type:"submit"}},[s("span",{staticClass:"txt"},[t._v(t._s(t.$t("Send feedback")))])])]},proxy:!0}])})],1)},l=[],u=s("f744"),p=s("3905"),f={name:"feedback-popup",components:{popup:p["a"]},data:function(){return{isProcessing:!1,message:""}},methods:{open:function(){this.resetForm(),this.$refs.popup.open(),this.$emit("open")},close:function(){this.$refs.popup.close(),this.$emit("close")},resetForm:function(){this.message=""},submitForm:function(){var t=this;this.isProcessing||(this.isProcessing=!0,u["a"].Users.sendFeedback(this.message).then((function(e){t.$toast(t.$t("Thank you for the feedback!")),t.close(),t.resetForm()})).catch((function(e){t.$errResponseHandler(e)})).finally((function(){t.isProcessing=!1})))}}},g=f,d=s("2877"),v=Object(d["a"])(g,c,l,!1,null,null,null),h=v.exports,m=s("f7ef");function _(t,e){var s=Object.keys(t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(t);e&&(a=a.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),s.push.apply(s,a)}return s}function b(t){for(var e=1;e<arguments.length;e++){var s=null!=arguments[e]?arguments[e]:{};e%2?_(Object(s),!0).forEach((function(e){Object(i["a"])(t,e,s[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(s)):_(Object(s)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(s,e))}))}return t}var C={name:"app-footer",components:{relocator:r["a"],"feedback-popup":h,"languages-select":m["a"]},computed:b({},Object(o["d"])({loggedUser:function(t){return t.user.user}})),methods:{getFooterLinks:function(){for(var t=(this.$getAppConfig("VUE_APP_FOOTER_LINKS")||"").split(","),e={},s=0;s<t.length;s++){var a=t[s].split("|",2),n=(a[0]||"").trim(),i=(a[1]||"").trim();n.length&&i.length&&(e[n]=i)}return e},openFeedbackPopup:function(){this.$refs.feedbackPopup&&this.$refs.feedbackPopup.open()}}},k=C,y=Object(d["a"])(k,a,n,!1,null,null,null);e["a"]=y.exports},"81e1":function(t,e,s){"use strict";s.r(e);var a=function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{staticClass:"auth-container"},[s("div",{staticClass:"flex-fill-block"}),s("div",{staticClass:"container-wrapper container-wrapper-sm"},[s("app-header"),s("div",{staticClass:"clearfix m-b-large"}),s("div",{staticClass:"panel auth-panel"},[s("h3",{staticClass:"panel-title"},[t._v(t._s(t.$t("Account activation")))]),s("div",{staticClass:"panel-content"},[t.isProcessing?s("div",{staticClass:"alert alert-transp-primary txt-center"},[s("p",[t._v(t._s(t.$t("Activating your account"))+" "),s("span",{staticClass:"loader m-l-5"})])]):[t.processSuccess?t._e():s("div",{staticClass:"alert alert-transp-danger txt-center"},[s("p",[t._v(" "+t._s(t.$t("The provided activation token is invalid or expired."))+" "),s("br"),s("i18n",{attrs:{path:"Please contact us at {supportEmail} if you need further assistance."}},[s("a",{attrs:{slot:"supportEmail",href:"mailto:"+t.$getAppConfig("VUE_APP_SUPPORT_EMAIL")},slot:"supportEmail"},[t._v(t._s(t.$getAppConfig("VUE_APP_SUPPORT_EMAIL")))])])],1)]),s("div",{staticClass:"clearfix m-b-small"}),s("router-link",{staticClass:"btn btn-primary block",attrs:{to:{name:"login"}}},[s("span",{staticClass:"txt"},[t._v(t._s(t.$t("Continue to login")))]),s("i",{staticClass:"fe fe-arrow-right-circle"})])]],2)])],1),s("div",{staticClass:"flex-fill-block m-b-base"}),s("app-footer")],1)},n=[],i=s("f744"),o=s("dbb6"),r=s("438c"),c={name:"activate",components:{"app-header":o["a"],"app-footer":r["a"]},data:function(){return{isProcessing:!1,processSuccess:!1}},beforeMount:function(){var t=this;this.$setDocumentTitle((function(){return t.$t("Account activation")})),this.activate()},methods:{activate:function(){var t=this;this.isProcessing||(this.isProcessing=!0,i["a"].Users.activate(this.$route.params.activateToken).then((function(e){t.processSuccess=!0,t.$loginByResponse(e)})).catch((function(e){t.processSuccess=!1})).finally((function(){t.isProcessing=!1})))}}},l=c,u=s("2877"),p=Object(u["a"])(l,a,n,!1,null,null,null);e["default"]=p.exports},"81fb":function(t,e,s){t.exports=s.p+"spa-resources/img/logogram.a11cb759.svg"},dbb6:function(t,e,s){"use strict";var a=function(){var t=this,e=t.$createElement,a=t._self._c||e;return a("header",{staticClass:"app-logo-wrapper"},[a("router-link",{staticClass:"app-logo",attrs:{to:{name:"home"}}},[a("img",{staticClass:"img",attrs:{src:s("81fb"),alt:"Presentator logo",width:"44"}}),a("span",{staticClass:"txt"},[t._v("Presentator")])])],1)},n=[],i={name:"app-header"},o=i,r=s("2877"),c=Object(r["a"])(o,a,n,!1,null,null,null);e["a"]=c.exports},f7ef:function(t,e,s){"use strict";var a=function(){var t=this,e=t.$createElement,s=t._self._c||e;return s("div",{staticClass:"languages-select",class:{loading:t.isChanging}},[s("div",{staticClass:"selected-language"},[s("span",{staticClass:"txt language-title"},[t._v(t._s(t.activeLanguageTitle))]),t._v(" ("),s("span",{staticClass:"txt language-code"},[t._v(t._s(t.activeLanguageCode))]),t._v(") ")]),s("toggler",{staticClass:"dropdown dropdown-sm"},t._l(t.languages,(function(e,a){return s("div",{key:"language_"+a,staticClass:"dropdown-item",class:{active:t.activeLanguageCode==a},on:{click:function(e){return e.preventDefault(),t.changeLanguage(a)}}},[s("small",{staticClass:"label language-code m-r-5",class:t.activeLanguageCode==a?"label-transp-primary":"label-light-border"},[t._v(t._s(a))]),s("span",{staticClass:"txt language-title"},[t._v(t._s(e))])])})),0)],1)},n=[],i=s("9225"),o={name:"languages-select",data:function(){return{isChanging:!1,languages:Object.assign({},i["c"])}},watch:{"$i18n.locale":function(t,e){"function"===typeof document._titleFunc&&this.$setDocumentTitle(document._titleFunc)}},computed:{activeLanguageCode:function(){return this.$i18n.locale},activeLanguageTitle:function(){return this.languages[this.activeLanguageCode]}},methods:{changeLanguage:function(t){var e=this;this.isChanging=!0,Object(i["a"])(t).finally((function(){e.isChanging=!1}))}}},r=o,c=s("2877"),l=Object(c["a"])(r,a,n,!1,null,null,null);e["a"]=l.exports}}]);
|
import { createGlobalStyle } from 'styled-components';
import fontRegular from '../assets/fonts/SourceSansPro-Regular.ttf';
import fontBold from '../assets/fonts/SourceSansPro-Bold.ttf';
const Typography = createGlobalStyle`
@font-face {
font-family: SourceSansPro;
src: url(${fontRegular});
}
html {
font-family: SourceSansPro;
color: var(--brown);
}
h1,h2,h3,h4,h5,h6 {
font-weight: bold;
margin: 0;
}
h1, h2 {
line-height: 3.5rem;
}
a {
text-decoration-color: var(--brown);
text-decoration: none;
}
`;
export default Typography;
|
module.exports = {
parser: '@typescript-eslint/parser',
extends: [
'plugin:@typescript-eslint/recommended', // Uses the recommended rules from the @typescript-eslint/eslint-plugin
'prettier/@typescript-eslint', // Uses eslint-config-prettier to disable ESLint rules from @typescript-eslint/eslint-plugin that would conflict with prettier
'plugin:prettier/recommended', // Enables eslint-plugin-prettier and displays prettier errors as ESLint errors. Make sure this is always the last configuration in the extends array.
],
plugins: ['@typescript-eslint'],
rules: {
// Type is enforced by callers. Not entirely, but it's good enough.
'@typescript-eslint/explicit-function-return-type': 'off',
// Aries protocol defines attributes with snake case.
'@typescript-eslint/camelcase': 'off',
'@typescript-eslint/no-use-before-define': ['error', { functions: false, classes: false, variables: true }],
'@typescript-eslint/explicit-member-accessibility': 'off',
'no-console': 'error',
},
};
|
module.exports = {
parser: 'typescript-eslint-parser',
plugins: ['typescript'],
rules: {
'typescript/no-unused-vars': 'error',
},
};
|
import 'https://cdnjs.cloudflare.com/ajax/libs/xterm/3.14.5/xterm.min.js'
import 'https://cdnjs.cloudflare.com/ajax/libs/xterm/3.14.5/addons/fit/fit.min.js'
import 'https://cdnjs.cloudflare.com/ajax/libs/xterm/3.14.5/addons/fullscreen/fullscreen.min.js'
import 'https://cdnjs.cloudflare.com/ajax/libs/xterm/3.14.5/addons/webLinks/webLinks.min.js'
import 'https://cdnjs.cloudflare.com/ajax/libs/socket.io/2.3.0/socket.io.slim.js'
const term = new window.Terminal({ cursorBlink: true })
// Open the terminal in #terminal-container
term.open(document.getElementById('terminal'))
term.write('Hello from \x1B[1;3;31mxterm.js\x1B[0m $ ')
window.fullscreen.toggleFullScreen(term)
window.fit.fit(term)
window.webLinks.webLinksInit(term)
const socket = io('/repl')
term.on('data', chunk => socket.emit('terminal-in', chunk))
socket.on('terminal-out', chunk => term.write(chunk))
|
app.factory('contentItemService', ['$http', function contentItemService($http) {
var base_path = siteRoot + "api/content";
var service = {
search: search,
get: get,
insert: insert,
update: update,
remove: remove,
save: save
};
return service;
// implementation
// search
function search(contentType = '', name = '', verbose = null, skip = null, take = null) {
var request = $http({
method: "get",
url: base_path + '/' + contentType + '/' + '?name=' + name + '&verbose=' + verbose + '&skip=' + skip + '&take=' + take
});
return request;
}
// get
function get(contentType, id) {
var request = $http({
method: "get",
url: base_path + '/' + contentType + '/' + id
});
return request;
}
// insert
function insert(contentType, item) {
var request = $http({
method: "post",
headers: {
'Content-Type': "application/json"
},
url: base_path + '/' + contentType,
data: item
});
return request;
}
// update
function update(contentType, item) {
var request = $http({
method: "put",
url: base_path + '/' + contentType,
headers: {
'Content-Type': "application/json"
},
data: item
});
return request;
}
// delete
function remove(contentType, id) {
var request = $http({
method: "delete",
url: base_path + '/' + contentType + '/' + id
});
return request;
}
// save
function save(contentType, item) {
if (item.id > 0) {
return update(contentType, item);
}
else {
return insert(contentType, item);
}
}
}]);
|
'use strict';
const logger = require('../logger'),
validation = require('./validation'),
tokens = require('./../services/tokenSessions'),
bcrypt = require('bcryptjs'),
errors = require('../errors'),
User = require('../models').user,
saltRounds = 10,
time = require('./../../config').common.expiration;
const newUserObject = body => {
const newUser = body
? {
firstName: body.firstName,
lastName: body.lastName,
password: body.password,
email: body.email
}
: {};
const errMsg = validation.validateUser(newUser);
if (errMsg.length > 0) {
throw errors.invalidUser(errMsg);
} else {
return bcrypt.hash(newUser.password, saltRounds).then(hash => {
newUser.password = hash;
return newUser;
});
}
};
exports.newUser = (req, res, next) =>
newUserObject(req.body)
.then(user => {
return User.createModel(user)
.then(newUser => {
logger.info(`Successfully created new user. Welcome, ${newUser.firstName} ${newUser.lastName}!`);
res.status(201).end();
})
.catch(next);
})
.catch(next);
exports.newAdmin = (req, res, next) =>
newUserObject(req.body)
.then(admin => {
admin.isAdmin = true;
return User.upsertUser(admin).then(created => {
created
? logger.info(`Successfully created new admin.`)
: logger.info(`Successfully granted admin status to user.`);
res.status(201).end();
});
})
.catch(next);
exports.signIn = (req, res, next) => {
const credentials = req.body
? {
password: req.body.password,
email: req.body.email
}
: {};
const errMsg = validation.validateSignIn(credentials);
if (errMsg.length > 0) {
next(errors.invalidUser(errMsg));
} else {
return User.getOneWhere(['email', 'password'], { email: credentials.email })
.then(dbUser => {
if (dbUser) {
return bcrypt.compare(credentials.password, dbUser.password).then(validPass => {
if (validPass) {
const auth = tokens.encode({ email: dbUser.email });
res.status(200);
res.set(tokens.headerName, auth);
res.send({ expirationTime: time });
} else {
next(errors.invalidUser('The email/password combination you entered is invalid.'));
}
});
} else {
next(errors.invalidUser('There is no user registered with that email.'));
}
})
.catch(next);
}
};
exports.listUsers = (req, res, next) => {
const lim = req.query.limit || 5;
const page = req.query.page * lim || 0;
return User.getAllNoPassword(page, lim)
.then(userList => {
res.send(userList);
})
.catch(next);
};
exports.invalidateAll = (req, res, next) =>
User.logout(req.user.id)
.then(() => {
res.send('Logged out successfully');
})
.catch(next);
|
/*global define*/
define([
'../Core/defaultValue',
'../Core/defineProperties',
'../Core/VertexFormat',
'../Shaders/Appearances/PerInstanceFlatColorAppearanceFS',
'../Shaders/Appearances/PolylineColorAppearanceVS',
'../Shaders/PolylineCommon',
'./Appearance'
], function(
defaultValue,
defineProperties,
VertexFormat,
PerInstanceFlatColorAppearanceFS,
PolylineColorAppearanceVS,
PolylineCommon,
Appearance) {
"use strict";
var defaultVertexShaderSource = PolylineCommon + '\n' + PolylineColorAppearanceVS;
var defaultFragmentShaderSource = PerInstanceFlatColorAppearanceFS;
/**
* An appearance for {@link GeometryInstance} instances with color attributes and {@link PolylineGeometry}.
* This allows several geometry instances, each with a different color, to
* be drawn with the same {@link Primitive}.
*
* @alias PolylineColorAppearance
* @constructor
*
* @param {Object} [options] Object with the following properties:
* @param {Boolean} [options.translucent=true] When <code>true</code>, the geometry is expected to appear translucent so {@link PolylineColorAppearance#renderState} has alpha blending enabled.
* @param {String} [options.vertexShaderSource] Optional GLSL vertex shader source to override the default vertex shader.
* @param {String} [options.fragmentShaderSource] Optional GLSL fragment shader source to override the default fragment shader.
* @param {RenderState} [options.renderState] Optional render state to override the default render state.
*
*@demo {@link http://cesiumjs.org/Cesium/Apps/Sandcastle/index.html?src=Polyline%20Color.html|Cesium Sandcastle Polyline Color Appearance Demo}
*
* @example
* // A solid white line segment
* var primitive = new Cesium.Primitive({
* geometryInstances : new Cesium.GeometryInstance({
* geometry : new Cesium.PolylineGeometry({
* positions : Cesium.Cartesian3.fromDegreesArray([
* 0.0, 0.0,
* 5.0, 0.0
* ]),
* width : 10.0,
* vertexFormat : Cesium.PolylineColorAppearance.VERTEX_FORMAT
* }),
* attributes : {
* color : Cesium.ColorGeometryInstanceAttribute.fromColor(new Cesium.Color(1.0, 1.0, 1.0, 1.0))
* }
* }),
* appearance : new Cesium.PolylineColorAppearance({
* translucent : false
* })
* });
*/
function PolylineColorAppearance(options) {
options = defaultValue(options, defaultValue.EMPTY_OBJECT);
var translucent = defaultValue(options.translucent, true);
var closed = false;
var vertexFormat = PolylineColorAppearance.VERTEX_FORMAT;
/**
* This property is part of the {@link Appearance} interface, but is not
* used by {@link PolylineColorAppearance} since a fully custom fragment shader is used.
*
* @type Material
*
* @default undefined
*/
this.material = undefined;
/**
* When <code>true</code>, the geometry is expected to appear translucent so
* {@link PolylineColorAppearance#renderState} has alpha blending enabled.
*
* @type {Boolean}
*
* @default true
*/
this.translucent = translucent;
this._vertexShaderSource = defaultValue(options.vertexShaderSource, defaultVertexShaderSource);
this._fragmentShaderSource = defaultValue(options.fragmentShaderSource, defaultFragmentShaderSource);
this._renderState = Appearance.getDefaultRenderState(translucent, closed, options.renderState);
this._closed = closed;
// Non-derived members
this._vertexFormat = vertexFormat;
}
defineProperties(PolylineColorAppearance.prototype, {
/**
* The GLSL source code for the vertex shader.
*
* @memberof PolylineColorAppearance.prototype
*
* @type {String}
* @readonly
*/
vertexShaderSource : {
get : function() {
return this._vertexShaderSource;
}
},
/**
* The GLSL source code for the fragment shader.
*
* @memberof PolylineColorAppearance.prototype
*
* @type {String}
* @readonly
*/
fragmentShaderSource : {
get : function() {
return this._fragmentShaderSource;
}
},
/**
* The WebGL fixed-function state to use when rendering the geometry.
* <p>
* The render state can be explicitly defined when constructing a {@link PolylineColorAppearance}
* instance, or it is set implicitly via {@link PolylineColorAppearance#translucent}.
* </p>
*
* @memberof PolylineColorAppearance.prototype
*
* @type {Object}
* @readonly
*/
renderState : {
get : function() {
return this._renderState;
}
},
/**
* When <code>true</code>, the geometry is expected to be closed so
* {@link PolylineColorAppearance#renderState} has backface culling enabled.
* This is always <code>false</code> for <code>PolylineColorAppearance</code>.
*
* @memberof PolylineColorAppearance.prototype
*
* @type {Boolean}
* @readonly
*
* @default false
*/
closed : {
get : function() {
return this._closed;
}
},
/**
* The {@link VertexFormat} that this appearance instance is compatible with.
* A geometry can have more vertex attributes and still be compatible - at a
* potential performance cost - but it can't have less.
*
* @memberof PolylineColorAppearance.prototype
*
* @type VertexFormat
* @readonly
*
* @default {@link PolylineColorAppearance.VERTEX_FORMAT}
*/
vertexFormat : {
get : function() {
return this._vertexFormat;
}
}
});
/**
* The {@link VertexFormat} that all {@link PolylineColorAppearance} instances
* are compatible with. This requires only a <code>position</code> attribute.
*
* @type VertexFormat
*
* @constant
*/
PolylineColorAppearance.VERTEX_FORMAT = VertexFormat.POSITION_ONLY;
/**
* Procedurally creates the full GLSL fragment shader source.
*
* @function
*
* @returns {String} The full GLSL fragment shader source.
*/
PolylineColorAppearance.prototype.getFragmentShaderSource = Appearance.prototype.getFragmentShaderSource;
/**
* Determines if the geometry is translucent based on {@link PolylineColorAppearance#translucent}.
*
* @function
*
* @returns {Boolean} <code>true</code> if the appearance is translucent.
*/
PolylineColorAppearance.prototype.isTranslucent = Appearance.prototype.isTranslucent;
/**
* Creates a render state. This is not the final render state instance; instead,
* it can contain a subset of render state properties identical to the render state
* created in the context.
*
* @function
*
* @returns {Object} The render state.
*/
PolylineColorAppearance.prototype.getRenderState = Appearance.prototype.getRenderState;
return PolylineColorAppearance;
});
|
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 25 18:26:14 2021
@author: Emmanuel
"""
board = {(x-3, y-3, x-y) for x in range(7) for y in range(7) if abs(x-y) <=3}
def neighbors_1(x, y, z):
res = set()
for i in [-1, 1]:
res.update({(x, y-i, z+i), (x+i, y+i, z), (x+i, y, z+i)})
return {box for box in res if box in board}
def neighbors_2(x, y, z):
res = set()
for a, b, c in neighbors_1(x, y, z):
res.update(neighbors_1(a, b, c))
res.discard((x, y, z))
return {box for box in res if box in board}
|
{
"targets": [
{
"target_name": "<(module_name)",
"sources": [ "<(module_name).cc" ],
'include_dirs': [
'<(custom_include_path)',
"../../node_modules/node-addon-api/"
],
'product_dir': '<(module_path)',
'cflags!': [ '-fno-exceptions' ],
'cflags_cc!': [ '-fno-exceptions' ],
"xcode_settings": {
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
"CLANG_CXX_LIBRARY": "libc++"
},
'msvs_settings': {
'VCCLCompilerTool': { 'ExceptionHandling': 1 },
}
}
]
}
|
// JavaScript Document
var connect=require('connect');
var serveStatic=require('serve-static');
connect().use(serveStatic('code')).listen(5000);
console.log("static file server is running at port 5000");
|
import numpy as np
from cliffwalk.cliffwalk import GridWorld
def egreedy_policy(q_values, state, epsilon=0.1):
if np.random.random() < epsilon:
return np.random.choice(4)
else:
return np.argmax(q_values[state])
def sarsa_td0(env,
actions=['UP', 'DOWN', 'RIGHT', 'LEFT'],
num_states=4*12,
num_actions=4,
epochs=500,
render=True,
exploration_rate=0.1,
learning_rate=0.5,
gamma=0.9):
q = np.zeros((num_states, num_actions))
reward_sum_list = []
for i in range(epochs):
state = env.reset()
done = False
reward_sum = 0
action = egreedy_policy(q, state, exploration_rate)
while not done:
next_state, reward, done = env.step(action)
reward_sum += reward
next_action = egreedy_policy(q, next_state, exploration_rate)
td_target = reward + gamma * q[next_state][next_action]
td_error = td_target - q[state][action]
q[state][action] += learning_rate * td_error
state = next_state
action = next_action
if i % 100 == 0:
env.render(q, action=actions[action], colorize_q=True)
reward_sum_list.append(reward_sum)
if i % 3 == 0:
print('Average scores = ', np.mean(reward_sum_list))
reward_sum_list = []
return q
def train():
env = GridWorld()
q = sarsa_td0(env, render=False, learning_rate=0.5, gamma=0.99)
if __name__ == '__main__':
train()
|
export default function (email) {
return `
<html>
<head>
<meta charset="utf-8">
<style>
.wrapper {
max-width: 600px;
margin: 0 auto;
}
.grayBg a {
color: rgba(0, 0, 0, 0.5);
}
a {
color: #fff;
}
table a {
color: rgba(0, 0, 0, 0.5);
text-decoration: none;
}
</style>
</head>
<body style="background-color: #F7F7F7; font-family: Helvetica, sans-serif; margin: 0; padding: 0; color: rgba(0, 0, 0, 0.8);">
<div class="wrapper" style="padding: 20px;">
<div style="font-size: 18px;">
<h1>Ny registrering!</h1>
</div>
</div>
<div style="padding: 20px; background-color: #ECECEC; border-top: 1px solid #cacaca; border-bottom: 1px solid #cacaca;">
<div class="wrapper">
<div class="grayBg">
<p style="color: rgba(0, 0, 0, 0.5); line-height: 24px;">
Du har mottatt en ny registrering fra kodeboksen.no!
</p>
</div>
<table>
<tr>
<td><b>E-post<b/></td>
<td>:</td>
<td>${email}</td>
</tr>
</table>
<br/>
</div>
</div>
<div class="wrapper" style="padding: 20px; margin-top: 20px;">
</div>
<div style="background-color: #ff4081; padding: 20px; color: #fff; font-size: 28px; margin-top: 20px; margin-bottom: 20px;">
<div class="wrapper">
<span style="font-size: 16px; color: rgba(255, 255, 255, 0.6)">Med vennlig hilsen,</span> <br/>
Kodeboksen.no
</div>
</div>
<div class="wrapper" style="font-size: 12px; color: rgba(0, 0, 0, 0.4); padding: 0 20px; margin-bottom: 20px;">
Din e-post sendes ikke videre og vil kun motta e-poster knyttet til lanseringen av Kodeboksen.
<br/>
Dersom du ikke ønsker å motte flere e-poster fra oss, kontakt oss på <a style="color: rgba(0, 0, 0, 0.4);" href="mailto:post@kodeboksen.no">post@kodeboksen.no</a>
</div>
</body>
</html>
`;
}
|
/**
* Copyright 2015 Telerik AD
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
!function(e,define){define([],e)}(function(){return function(e){var t=e.kendo||(e.kendo={cultures:{}});t.cultures["ar-AE"]={name:"ar-AE",numberFormat:{pattern:["n-"],decimals:2,",":",",".":".",groupSize:[3],percent:{pattern:["-n %","n %"],decimals:2,",":",",".":".",groupSize:[3],symbol:"%"},currency:{pattern:["$n-","$ n"],decimals:2,",":",",".":".",groupSize:[3],symbol:"د.إ."}},calendars:{standard:{days:{names:["الأحد","الإثنين","الثلاثاء","الأربعاء","الخميس","الجمعة","السبت"],namesAbbr:["الأحد","الإثنين","الثلاثاء","الأربعاء","الخميس","الجمعة","السبت"],namesShort:["ح","ن","ث","ر","خ","ج","س"]},months:{names:["يناير","فبراير","مارس","أبريل","مايو","يونيو","يوليه","أغسطس","سبتمبر","أكتوبر","نوفمبر","ديسمبر",""],namesAbbr:["يناير","فبراير","مارس","أبريل","مايو","يونيو","يوليه","أغسطس","سبتمبر","أكتوبر","نوفمبر","ديسمبر",""]},AM:["ص","ص","ص"],PM:["م","م","م"],patterns:{d:"dd/MM/yyyy",D:"dd MMMM, yyyy",F:"dd MMMM, yyyy hh:mm:ss tt",g:"dd/MM/yyyy hh:mm tt",G:"dd/MM/yyyy hh:mm:ss tt",m:"dd MMMM",M:"dd MMMM",s:"yyyy'-'MM'-'dd'T'HH':'mm':'ss",t:"hh:mm tt",T:"hh:mm:ss tt",u:"yyyy'-'MM'-'dd HH':'mm':'ss'Z'",y:"MMMM, yyyy",Y:"MMMM, yyyy"},"/":"/",":":":",firstDay:6}}}}(this),window.kendo},"function"==typeof define&&define.amd?define:function(e,t){t()});
|
const post = require('./post');
const {
errors,
ERROR_UNKNOWN_WORD,
ERROR_REPEATED_WORD,
ERROR_CAPITALIZATION,
ERROR_TOO_MANY_ERRORS,
} = require('./errors');
const YASPELLER_HOST = 'speller.yandex.net';
const YASPELLER_PATH = '/services/spellservice.json/';
const DEFAULT_FORMAT = 'plain';
const DEFAULT_LANG = 'en,ru';
const DEFAULT_REQUEST_LIMIT = 2;
const DEFAULT_TIMEOUT = 500;
/**
* Check text for typos.
*
* @param {string} text
* @param {Function} callback
* @param {Settings} settings
* @see {@link https://tech.yandex.ru/speller/doc/dg/reference/checkText-docpage/}
*/
function checkText(text, callback, settings) {
const form = prepareSettings(settings);
form.text = text;
post({
host: YASPELLER_HOST,
path: YASPELLER_PATH + 'checkText',
form: form,
}, function(error, response, body) {
if (error) {
callback(error, null);
} else {
if (response.statusCode === 200) {
callback(null, body);
} else {
callback(
Error('Yandex.Speller API returns status code is ' +
response.statusCode, null)
);
}
}
});
}
/**
* Check text for typos.
*
* @param {string[]} texts
* @param {Function} callback
* @param {Settings} settings
* @see {@link https://tech.yandex.ru/speller/doc/dg/reference/checkTexts-docpage/}
*/
function checkTexts(texts, callback, settings) {
const form = prepareSettings(settings);
form.text = texts;
post({
host: YASPELLER_HOST,
path: YASPELLER_PATH + 'checkTexts',
form: form,
}, function(error, response, body) {
if (error) {
callback(error, null);
} else {
if (response.statusCode === 200) {
callback(null, body);
} else {
callback(
Error('Yandex.Speller API returns status code is ' +
response.statusCode, null)
);
}
}
});
}
function prepareOptions(options) {
let result = 0;
// https://tech.yandex.ru/speller/doc/dg/reference/speller-options-docpage/
const standartOptions = {
IGNORE_UPPERCASE: 1,
IGNORE_DIGITS: 2,
IGNORE_URLS: 4,
FIND_REPEAT_WORDS: 8,
IGNORE_LATIN: 16,
NO_SUGGEST: 32,
FLAG_LATIN: 128,
BY_WORDS: 256,
IGNORE_CAPITALIZATION: 512,
IGNORE_ROMAN_NUMERALS: 2048,
};
Object.keys(options || {}).forEach(function(key) {
const upperCaseKey = key.replace(/([A-Z])/g, '_$1').toUpperCase();
if (standartOptions[upperCaseKey] && options[key]) {
result |= standartOptions[upperCaseKey];
}
});
return result;
}
function prepareSettings(settings) {
settings = settings || {};
return {
format: settings.format || DEFAULT_FORMAT,
lang: settings.lang || DEFAULT_LANG,
options: prepareOptions(settings.options),
requestLimit: settings.requestLimit || DEFAULT_REQUEST_LIMIT,
timeout: settings.timeout || DEFAULT_TIMEOUT,
};
}
/**
* @typedef {Object} Settings
* @param {string} [format] Text format: plain or html.
* @param {string|Array} [lang] Language: en, ru or uk.
* @param {number} [requestLimit] Request repeat count in case internet connection issues.
* @param {number} [timeout] Timeout between request repeats in milliseconds.
* @param {Object} [options]
* @param {boolean} [options.ignoreUppercase] Ignore words written in capital letters.
* @param {boolean} [options.ignoreDigits] Ignore words with numbers, such as "avp17h4534".
* @param {boolean} [options.ignoreUrls] Ignore Internet addresses, email addresses and filenames.
* @param {boolean} [options.findRepeatWords] Highlight repetitions of words, consecutive. For example, "I flew to to to Cyprus".
* @param {boolean} [options.ignoreLatin] Ignore words, written in Latin, for example, "madrid".
* @param {boolean} [options.noSuggest] Just check the text, without giving options to replace.
* @param {boolean} [options.flagLatin] Celebrate words, written in Latin, as erroneous.
* @param {boolean} [options.byWords] Do not use a dictionary environment (context) during the scan. This is useful in cases where the service is transmitted to the input of a list of individual words.
* @param {boolean} [options.ignoreCapitalization] Ignore the incorrect use of UPPERCASE / lowercase letters, for example, in the word "moscow".
* @param {boolean} [options.ignoreRomanNumerals] Ignore Roman numerals ("I, II, III, ...").
*/
module.exports = {
checkText,
checkTexts,
defaultFormat: DEFAULT_FORMAT,
defaultLang: DEFAULT_LANG,
errors,
errorCodes: errors, // Deprecated
ERROR_UNKNOWN_WORD,
ERROR_REPEATED_WORD,
ERROR_CAPITALIZATION,
ERROR_TOO_MANY_ERRORS,
supportedFormats: ['plain', 'html']
};
|
"""
The system RoBERTa trains on the AGB dataset with softmax loss function.
At every 1000 training steps, the model is evaluated on the AGB dev set.
"""
from torch.utils.data import DataLoader
import math
from sentence_transformers import models, losses
from sentence_transformers import SentencesDataset, LoggingHandler, SentenceTransformer
from sentence_transformers.evaluation import EmbeddingSimilarityEvaluator, LabelAccuracyEvaluator
from sentence_transformers.readers import *
import logging
from datetime import datetime
#### Just some code to print debug information to stdout
logging.basicConfig(format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO,
handlers=[LoggingHandler()])
#### /print debug information to stdout
# Read the dataset
model_name = 'roberta-base'
batch_size = 32
agb_reader = AGBDataReader('datasets/AGB_og')
train_num_labels = agb_reader.get_num_labels()
model_save_path = 'output/training_agb_og_'+model_name+'-'+datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
# Use RoBERTa for mapping tokens to embeddings
word_embedding_model = models.RoBERTa(model_name)
# Apply mean pooling to get one fixed sized sentence vector
pooling_model = models.Pooling(word_embedding_model.get_word_embedding_dimension(),
pooling_mode_mean_tokens=True,
pooling_mode_cls_token=False,
pooling_mode_max_tokens=False)
model = SentenceTransformer(modules=[word_embedding_model, pooling_model])
# Convert the dataset to a DataLoader ready for training
logging.info("Read AGB train dataset")
train_data = SentencesDataset(agb_reader.get_examples('train.tsv'), model=model, shorten=True)
train_dataloader = DataLoader(train_data, shuffle=True, batch_size=batch_size)
train_loss = losses.SoftmaxLoss(model=model,
sentence_embedding_dimension=model.get_sentence_embedding_dimension(),
num_labels=train_num_labels)
logging.info("Read AGB dev dataset")
dev_data = SentencesDataset(examples=agb_reader.get_examples('dev.tsv'), model=model, shorten=True)
dev_dataloader = DataLoader(dev_data, shuffle=False, batch_size=batch_size)
evaluator = LabelAccuracyEvaluator(dev_dataloader, softmax_model=train_loss)
# Configure the training
num_epochs = 2
warmup_steps = math.ceil(len(train_data) * num_epochs / batch_size * 0.1) # 10% of train data for warm-up
logging.info("Warmup-steps: {}".format(warmup_steps))
# Train the model
model.fit(train_objectives=[(train_dataloader, train_loss)],
evaluator=evaluator,
epochs=num_epochs,
evaluation_steps=10000,
warmup_steps=warmup_steps,
output_path=model_save_path
)
##############################################################################
#
# Load the stored model and evaluate its performance on STS benchmark dataset
#
##############################################################################
model = SentenceTransformer(model_save_path)
test_data = SentencesDataset(examples=agb_reader.get_examples('test.tsv'), model=model, shorten=True)
test_dataloader = DataLoader(test_data, shuffle=False, batch_size=batch_size)
evaluator = LabelAccuracyEvaluator(test_dataloader, softmax_model=train_loss)
model.evaluate(evaluator)
|
'use strict'
/* global hexo */
const debug = require('debug')('docs')
const helpers = require('../lib/helpers')
let pathFn = require('path')
let _ = require('lodash')
function startsWith (str, start) {
return str.substring(0, start.length) === start
}
hexo.extend.helper.register('page_nav', function () {
const lang = this.page.lang
const isEnglish = lang === 'en'
const type = this.page.canonical_path.split('/')[0]
const sidebar = this.site.data.sidebar[type]
const path = pathFn.basename(this.path)
const prefix = `sidebar.${type}.`
let list = {}
for (let i in sidebar) {
for (let j in sidebar[i]) {
list[sidebar[i][j]] = { 'group': i, 'title': j }
}
}
let keys = Object.keys(list)
let index = keys.indexOf(path)
let result = ''
const shouldShowPrevArticle = index > 0
const shouldShowNextArticle = index < keys.length - 1
if (shouldShowPrevArticle) {
const group = list[keys[index - 1]].group
const page = keys[index - 1]
const title = list[keys[index - 1]].title
const href = (!isEnglish ? [lang, type, group, page] : [type, group, page]).join('/')
result += `<a href="${this.config.root + href}" title="Prev Article" class="article-footer-prev"><i class="fa fa-chevron-left"></i><span>${this.__(prefix + title)}</span></a>`
}
if (shouldShowNextArticle) {
const group = list[keys[index + 1]].group
const page = keys[index + 1]
const title = list[keys[index + 1]].title
const href = (!isEnglish ? [lang, type, group, page] : [type, group, page]).join('/')
result += `<a href="${this.config.root + href}" title="Next Article" class="article-footer-next"><span>${this.__(prefix + title)}</span><i class="fa fa-chevron-right"></i></a>`
}
return result
})
hexo.extend.helper.register('doc_sidebar', function (className) {
const lang = this.page.lang
const isEnglish = lang === 'en'
const type = this.page.canonical_path.split('/')[0]
const sidebar = this.site.data.sidebar[type]
const path = pathFn.basename(this.path)
const self = this
const prefix = `sidebar.${type}.`
let result = ''
let expandAll = false
// IF the sidebar's categories aren't that many,
// just expand them all, since it's more of a hassle to expand one by one
if (_.keys(sidebar).length <= 6) {
expandAll = true
}
_.each(sidebar, function (menu, title) {
result += `<li class="${className}-title is-collapsed" data-target="sidebar-li-${title}" data-toggle="collapse"><strong>${self.__(prefix + title)}</strong><ul class="sidebar-links">`
_.each(menu, function (link, text) {
let href = [type, title, link].join('/')
if (!isEnglish) {
href = [lang, href].join('/')
}
let itemClass = `${className}-link`
let currentlyActive = link === path
if (currentlyActive) {
itemClass += ' current'
// remove 'is-collapsed' class from parent container
result = result.replace(`is-collapsed" data-target="sidebar-li-${title}`, `current" data-target="sidebar-li-${title}`)
}
if (expandAll) {
// remove 'is-collapsed' class from parent container
result = result.replace(`is-collapsed" data-target="sidebar-li-${title}`, `" data-target="sidebar-li-${title}`)
}
result += `<li class='sidebar-li sidebar-li-${title}'><a href="${self.config.root + href}" class="${itemClass}">
${self.__(prefix + text)}</a></li>`
})
// close the ul containing the menus
result += '</ul></li>'
})
return result
})
hexo.extend.helper.register('api_toc', function () {
const lang = this.page.lang
const isEnglish = lang === 'en'
const type = this.page.canonical_path.split('/')[0]
const sidebar = this.site.data.sidebar[type]
const self = this
const prefix = `sidebar.${type}.`
let result = ''
_.each(sidebar, function (menu, title) {
result += `<li class="api-title"><h2>${self.__(prefix + title)}</h2><ul class="api-links">`
_.each(menu, function (link, text) {
let href = (!isEnglish ? [lang, type, title, link] : [type, title, link]).join('/')
result += `<li class='api-li api-li-${title}'><a href="${self.config.root + href}" class="api-link">
${self.__(prefix + text)}</a></li>`
})
// close the ul containing the menus
result += '</ul></li>'
})
return result
})
hexo.extend.helper.register('menu', function (type) {
const lang = this.page.lang
const isEnglish = lang === 'en'
const file = `${type}-menu`
const menu = this.site.data[file]
const self = this
const currentPathFolder = this.path.split('/')[0]
return _.reduce(menu, function (result, menuPath, title) {
if (!isEnglish) {
menuPath = lang + menuPath
}
// Sees if our current path is part of the menu's path
// Capture the first folder
// /guides/welcome/foo.html captures 'guides'
let firstPathName = menuPath.split('/')[1]
// Does our current path match our menu?
let isCurrent = currentPathFolder === firstPathName
return `${result}<li><a href="${self.url_for(menuPath)}" class="${type}-nav-link ${isCurrent ? 'active' : ''}"> ${self.__(`menu.${title}`)}</a></li>`
}, '')
})
hexo.extend.helper.register('canonical_url', function (lang) {
let path = this.page.canonical_path
if (lang && lang !== 'en') path = `${lang}/${path}`
return `${this.config.url}/${path}`
})
hexo.extend.helper.register('url_for_lang', function (path) {
const lang = this.page.lang
let url = this.url_for(path)
if (lang !== 'en' && url[0] === '/') url = `/${lang}${url}`
return url
})
hexo.extend.helper.register('raw_link', function () {
const source = this.page.source
return `https://github.com/cypress-io/cypress-documentation/edit/develop/source/${source}`
})
hexo.extend.helper.register('add_page_anchors', helpers.addPageAnchors)
hexo.extend.helper.register('canonical_path_for_nav', function () {
let path = this.page.canonical_path
if (startsWith(path, 'guides/') || startsWith(path, 'api/')) {
return path
}
return ''
})
hexo.extend.helper.register('lang_name', function (lang) {
let data = this.site.data.languages[lang]
return data.name || data
})
hexo.extend.helper.register('order_by_name', function (posts) {
return _.sortBy(posts, (post) => post.name.toLowerCase(), 'name')
})
/**
* Helper that creates safe url id from section title.
* @example
```
{% for pluginType in site.data.plugins %}
<h2 id="{{ id(pluginType.name) }}">{{ pluginType.name }}</h2>
{% endfor %}
```
*/
const id = (title) => {
const id = _.kebabCase(_.deburr(title))
debug('from title "%s" got id "%s"', title, id)
return id
}
hexo.extend.helper.register('id', id)
|
'use strict';
module.exports = class MessageEventCommand {
constructor () {
return {
nome: 'messageDelete',
run: this.run
};
}
async run (newMessage) {
const channelDB = await global.db.get(`logs-${newMessage.guildID}`) || '927209681754132530';
const channel = await global.zuly.getRESTChannel(channelDB);
if (newMessage.author.bot) return;
if (newMessage.author.id === global.zuly.user.id) return;
const webhook = await global.zuly.getWebhook(channel);
let idioma = require('../Config/idiomas');
let lang = await global.db.get(`idioma-${newMessage.guildID}`) || 'pt_br';
lang = lang.replace(/-/g, '_');
idioma = idioma[lang];
const embed = new global.zuly.manager.Ebl();
embed.setTitle(`${newMessage.author.username}#${newMessage.author.discriminator} | ${idioma.logs.message.title}`);
embed.addField(`📝 ${idioma.logs.deleted}`, `\`\`\`${newMessage.content}\`\`\``, true);
embed.addField(`<:zu_logs_channel:910218450415255593> ${idioma.logs.channel}`, `<#${newMessage.channel.id}> \`(${newMessage.channel.name} [${newMessage.channel.id}])\``);
embed.addField(`<:zu_link:927212474573418517> ${idioma.logs.url}`, `${newMessage.jumpLink}`);
embed.setColor('#E74C3C');
embed.setFooter('⤷ zulybot.xyz', global.zuly.user.avatarURL);
try {
await global.zuly.executeWebhook(webhook.id, webhook.token, {
username: global.zuly.user.username,
avatarURL: global.zuly.user.avatarURL,
embeds: [embed.get()],
components: [
{
type: 1,
components: [
{
type: 2,
label: idioma.logs.jump,
style: 5,
url: `${newMessage.jumpLink}`,
disabled: true
}
]
}
]
});
}
catch (e) {
channel.createMessage({
embeds: [embed.get()],
components: [
{
type: 1,
components: [
{
type: 2,
label: idioma.logs.jump,
style: 5,
url: `${newMessage.jumpLink}`,
disabled: true
}
]
}
]
});
}
}
};
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
import sphinx_rtd_theme # noqa: F401
sys.path.insert(0, os.path.abspath("../.."))
# -- Project information -----------------------------------------------------
project = "term-image"
copyright = "2022, AnonymouX47"
author = "AnonymouX47"
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"sphinx_rtd_theme",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = []
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
autodoc_typehints = "description"
autodoc_typehints_format = "fully-qualified"
autodoc_typehints_description_target = "documented"
autodoc_member_order = "bysource"
|
(function (t) {
// bg
t.add("This value should be false.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043b\u044a\u0436\u0430 (false).", "validators", "bg");
t.add("This value should be true.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u0438\u0441\u0442\u0438\u043d\u0430 (true).", "validators", "bg");
t.add("This value should be of type {{ type }}.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043e\u0442 \u0442\u0438\u043f {{ type }}.", "validators", "bg");
t.add("This value should be blank.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043f\u0440\u0430\u0437\u043d\u0430.", "validators", "bg");
t.add("The value you selected is not a valid choice.", "\u0418\u0437\u0431\u0440\u0430\u043d\u0430\u0442\u0430 \u0441\u0442\u043e\u0439\u043d\u043e\u0441\u0442 \u0435 \u043d\u0435\u0432\u0430\u043b\u0438\u0434\u043d\u0430.", "validators", "bg");
t.add("You must select at least {{ limit }} choice.|You must select at least {{ limit }} choices.", "\u0422\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0438\u0437\u0431\u0435\u0440\u0435\u0442\u0435 \u043f\u043e\u043d\u0435 {{ limit }} \u043e\u043f\u0446\u0438\u044f.|\u0422\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0438\u0437\u0431\u0435\u0440\u0435\u0442\u0435 \u043f\u043e\u043d\u0435 {{ limit }} \u043e\u043f\u0446\u0438\u0438.", "validators", "bg");
t.add("You must select at most {{ limit }} choice.|You must select at most {{ limit }} choices.", "\u0422\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0438\u0437\u0431\u0435\u0440\u0435\u0442\u0435 \u043d\u0430\u0439-\u043c\u043d\u043e\u0433\u043e {{ limit }} \u043e\u043f\u0446\u0438\u044f.|\u0422\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0438\u0437\u0431\u0435\u0440\u0435\u0442\u0435 \u043d\u0430\u0439-\u043c\u043d\u043e\u0433\u043e {{ limit }} \u043e\u043f\u0446\u0438\u0438.", "validators", "bg");
t.add("One or more of the given values is invalid.", "\u0415\u0434\u043d\u0430 \u0438\u043b\u0438 \u043f\u043e\u0432\u0435\u0447\u0435 \u043e\u0442 \u0437\u0430\u0434\u0430\u0434\u0435\u043d\u0438\u0442\u0435 \u0441\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0438 \u0435 \u043d\u0435\u0432\u0430\u043b\u0438\u0434\u043d\u0430.", "validators", "bg");
t.add("This field was not expected.", "\u041f\u043e\u043b\u0435\u0442\u043e \u043d\u0435 \u0441\u0435 \u0435 \u043e\u0447\u0430\u043a\u0432\u0430\u043b\u043e.", "validators", "bg");
t.add("This field is missing.", "\u041f\u043e\u043b\u0435\u0442\u043e \u043b\u0438\u043f\u0441\u0432\u0430.", "validators", "bg");
t.add("This value is not a valid date.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u043d\u0430 \u0434\u0430\u0442\u0430.", "validators", "bg");
t.add("This value is not a valid datetime.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u043d\u0430 \u0434\u0430\u0442\u0430 \u0438 \u0447\u0430\u0441.", "validators", "bg");
t.add("This value is not a valid email address.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u0435\u043d \u0438\u043c\u0435\u0439\u043b \u0430\u0434\u0440\u0435\u0441.", "validators", "bg");
t.add("The file could not be found.", "\u0424\u0430\u0439\u043b\u044a\u0442 \u043d\u0435 \u0431\u0435\u0448\u0435 \u043e\u0442\u043a\u0440\u0438\u0442.", "validators", "bg");
t.add("The file is not readable.", "\u0424\u0430\u0439\u043b\u044a\u0442 \u043d\u0435 \u043c\u043e\u0436\u0435 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043f\u0440\u043e\u0447\u0435\u0442\u0435\u043d.", "validators", "bg");
t.add("The file is too large ({{ size }} {{ suffix }}). Allowed maximum size is {{ limit }} {{ suffix }}.", "\u0424\u0430\u0439\u043b\u044a\u0442 \u0435 \u0442\u0432\u044a\u0440\u0434\u0435 \u0433\u043e\u043b\u044f\u043c ({{ size }} {{ suffix }}). \u041c\u0430\u043a\u0441\u0438\u043c\u0430\u043b\u043d\u0438\u044f\u0442 \u0440\u0430\u0437\u043c\u0435\u0440 \u0435 {{ limit }} {{ suffix }}.", "validators", "bg");
t.add("The mime type of the file is invalid ({{ type }}). Allowed mime types are {{ types }}.", "Mime \u0442\u0438\u043f\u0430 \u043d\u0430 \u0444\u0430\u0439\u043b\u0430 \u0435 \u043d\u0435\u0432\u0430\u043b\u0438\u0434\u0435\u043d ({{ type }}). \u0420\u0430\u0437\u0440\u0435\u0448\u0435\u043d\u0438 mime \u0442\u0438\u043f\u043e\u0432\u0435 \u0441\u0430 {{ types }}.", "validators", "bg");
t.add("This value should be {{ limit }} or less.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 {{ limit }} \u0438\u043b\u0438 \u043f\u043e-\u043c\u0430\u043b\u043a\u043e.", "validators", "bg");
t.add("This value is too long. It should have {{ limit }} character or less.|This value is too long. It should have {{ limit }} characters or less.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0435 \u0442\u0432\u044a\u0440\u0434\u0435 \u0434\u044a\u043b\u0433\u0430. \u0422\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0441\u044a\u0434\u044a\u0440\u0436\u0430 \u043d\u0430\u0439-\u043c\u043d\u043e\u0433\u043e {{ limit }} \u0441\u0438\u043c\u0432\u043e\u043b.|\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0435 \u0442\u0432\u044a\u0440\u0434\u0435 \u0434\u044a\u043b\u0433\u0430. \u0422\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0441\u044a\u0434\u044a\u0440\u0436\u0430 \u043d\u0430\u0439-\u043c\u043d\u043e\u0433\u043e {{ limit }} \u0441\u0438\u043c\u0432\u043e\u043b\u0430.", "validators", "bg");
t.add("This value should be {{ limit }} or more.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 {{ limit }} \u0438\u043b\u0438 \u043f\u043e\u0432\u0435\u0447\u0435.", "validators", "bg");
t.add("This value is too short. It should have {{ limit }} character or more.|This value is too short. It should have {{ limit }} characters or more.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0435 \u0442\u0432\u044a\u0440\u0434\u0435 \u043a\u0440\u0430\u0442\u043a\u0430. \u0422\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0441\u044a\u0434\u044a\u0440\u0436\u0430 \u043f\u043e\u043d\u0435 {{ limit }} \u0441\u0438\u043c\u0432\u043e\u043b.|\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0435 \u0442\u0432\u044a\u0440\u0434\u0435 \u043a\u0440\u0430\u0442\u043a\u0430. \u0422\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0441\u044a\u0434\u044a\u0440\u0436\u0430 \u043f\u043e\u043d\u0435 {{ limit }} \u0441\u0438\u043c\u0432\u043e\u043b\u0430.", "validators", "bg");
t.add("This value should not be blank.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043f\u0440\u0430\u0437\u043d\u0430.", "validators", "bg");
t.add("This value should not be null.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 null.", "validators", "bg");
t.add("This value should be null.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 null.", "validators", "bg");
t.add("This value is not valid.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u043d\u0430.", "validators", "bg");
t.add("This value is not a valid time.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u043d\u043e \u0432\u0440\u0435\u043c\u0435.", "validators", "bg");
t.add("This value is not a valid URL.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u0435\u043d URL.", "validators", "bg");
t.add("The two values should be equal.", "\u0414\u0432\u0435\u0442\u0435 \u0441\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0438 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0430\u0442 \u0440\u0430\u0432\u043d\u0438.", "validators", "bg");
t.add("The file is too large. Allowed maximum size is {{ limit }} {{ suffix }}.", "\u0424\u0430\u0439\u043b\u044a\u0442 \u0435 \u0442\u0432\u044a\u0440\u0434\u0435 \u0433\u043e\u043b\u044f\u043c. \u0420\u0430\u0437\u0440\u0435\u0448\u0435\u043d\u0438\u044f\u0442 \u043c\u0430\u043a\u0441\u0438\u043c\u0430\u043b\u0435\u043d \u0440\u0430\u0437\u043c\u0435\u0440 \u0435 {{ limit }} {{ suffix }}.", "validators", "bg");
t.add("The file is too large.", "\u0424\u0430\u0439\u043b\u044a\u0442 \u0435 \u0442\u0432\u044a\u0440\u0434\u0435 \u0433\u043e\u043b\u044f\u043c.", "validators", "bg");
t.add("The file could not be uploaded.", "\u0424\u0430\u0439\u043b\u044a\u0442 \u043d\u0435 \u043c\u043e\u0436\u0435 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043a\u0430\u0447\u0435\u043d.", "validators", "bg");
t.add("This value should be a valid number.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u0432\u0430\u043b\u0438\u0434\u0435\u043d \u043d\u043e\u043c\u0435\u0440.", "validators", "bg");
t.add("This file is not a valid image.", "\u0424\u0430\u0439\u043b\u044a\u0442 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u043d\u043e \u0438\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435.", "validators", "bg");
t.add("This is not a valid IP address.", "\u0422\u043e\u0432\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u0435\u043d IP \u0430\u0434\u0440\u0435\u0441.", "validators", "bg");
t.add("This value is not a valid language.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u0435\u043d \u0435\u0437\u0438\u043a.", "validators", "bg");
t.add("This value is not a valid locale.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u043d\u0430 \u043b\u043e\u043a\u0430\u043b\u0438\u0437\u0430\u0446\u0438\u044f.", "validators", "bg");
t.add("This value is not a valid country.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u043d\u0430 \u0434\u044a\u0440\u0436\u0430\u0432\u0430.", "validators", "bg");
t.add("This value is already used.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0432\u0435\u0447\u0435 \u0435 \u0432 \u0443\u043f\u043e\u0442\u0440\u0435\u0431\u0430.", "validators", "bg");
t.add("The size of the image could not be detected.", "\u0420\u0430\u0437\u043c\u0435\u0440\u0430 \u043d\u0430 \u0438\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435\u0442\u043e \u043d\u0435 \u043c\u043e\u0436\u0435 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043e\u043f\u0440\u0435\u0434\u0435\u043b\u0435\u043d.", "validators", "bg");
t.add("The image width is too big ({{ width }}px). Allowed maximum width is {{ max_width }}px.", "\u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435\u0442\u043e \u0435 \u0442\u0432\u044a\u0440\u0434\u0435 \u0448\u0438\u0440\u043e\u043a\u043e ({{ width }}px). \u0428\u0438\u0440\u043e\u0447\u0438\u043d\u0430\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043c\u0430\u043a\u0441\u0438\u043c\u0443\u043c {{ max_width }}px.", "validators", "bg");
t.add("The image width is too small ({{ width }}px). Minimum width expected is {{ min_width }}px.", "\u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435\u0442\u043e \u0435 \u0441 \u0442\u0432\u044a\u0440\u0434\u0435 \u043c\u0430\u043b\u043a\u0430 \u0448\u0438\u0440\u043e\u0447\u0438\u043d\u0430 ({{ width }}px). \u0428\u0438\u0440\u043e\u0447\u0438\u043d\u0430\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043c\u0438\u043d\u0438\u043c\u0443\u043c {{ min_width }}px.", "validators", "bg");
t.add("The image height is too big ({{ height }}px). Allowed maximum height is {{ max_height }}px.", "\u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435\u0442\u043e \u0435 \u0441 \u0442\u0432\u044a\u0440\u0434\u0435 \u0433\u043e\u043b\u044f\u043c\u0430 \u0432\u0438\u0441\u043e\u0447\u0438\u043d\u0430 ({{ height }}px). \u0412\u0438\u0441\u043e\u0447\u0438\u043d\u0430\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043c\u0430\u043a\u0441\u0438\u043c\u0443\u043c {{ max_height }}px.", "validators", "bg");
t.add("The image height is too small ({{ height }}px). Minimum height expected is {{ min_height }}px.", "\u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435\u0442\u043e \u0435 \u0441 \u0442\u0432\u044a\u0440\u0434\u0435 \u043c\u0430\u043b\u043a\u0430 \u0432\u0438\u0441\u043e\u0447\u0438\u043d\u0430 ({{ height }}px). \u0412\u0438\u0441\u043e\u0447\u0438\u043d\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043c\u0438\u043d\u0438\u043c\u0443\u043c {{ min_height }}px.", "validators", "bg");
t.add("This value should be the user's current password.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u0442\u0435\u043a\u0443\u0449\u0430\u0442\u0430 \u043f\u043e\u0442\u0440\u0435\u0431\u0438\u0442\u0435\u043b\u0441\u043a\u0430 \u043f\u0430\u0440\u043e\u043b\u0430.", "validators", "bg");
t.add("This value should have exactly {{ limit }} character.|This value should have exactly {{ limit }} characters.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u0442\u043e\u0447\u043d\u043e {{ limit }} \u0441\u0438\u043c\u0432\u043e\u043b.|\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u0442\u043e\u0447\u043d\u043e {{ limit }} \u0441\u0438\u043c\u0432\u043e\u043b\u0430.", "validators", "bg");
t.add("The file was only partially uploaded.", "\u0424\u0430\u0439\u043b\u044a\u0442 \u0435 \u043a\u0430\u0447\u0435\u043d \u0447\u0430\u0441\u0442\u0438\u0447\u043d\u043e.", "validators", "bg");
t.add("No file was uploaded.", "\u0424\u0430\u0439\u043b\u044a\u0442 \u043d\u0435 \u0431\u0435\u0448\u0435 \u043a\u0430\u0447\u0435\u043d.", "validators", "bg");
t.add("No temporary folder was configured in php.ini.", "\u041d\u0435 \u0435 \u043f\u043e\u0441\u043e\u0447\u0435\u043d\u0430 \u0434\u0438\u0440\u0435\u043a\u0442\u043e\u0440\u0438\u044f \u0437\u0430 \u0432\u0440\u0435\u043c\u0435\u043d\u043d\u0438 \u0444\u0430\u0439\u043b\u043e\u0432\u0435 \u0432 php.ini.", "validators", "bg");
t.add("Cannot write temporary file to disk.", "\u041d\u0435 \u043c\u043e\u0436\u0435 \u0434\u0430 \u0437\u0430\u043f\u0438\u0448\u0435 \u0432\u0440\u0435\u043c\u0435\u043d\u0435\u043d \u0444\u0430\u0439\u043b \u043d\u0430 \u0434\u0438\u0441\u043a\u0430.", "validators", "bg");
t.add("A PHP extension caused the upload to fail.", "PHP \u0440\u0430\u0437\u0448\u0438\u0440\u0435\u043d\u0438\u0435 \u043f\u0440\u0435\u0434\u0438\u0437\u0432\u0438\u043a\u0430 \u043f\u0440\u0435\u043a\u044a\u0441\u0432\u0430\u043d\u0435 \u043d\u0430 \u043a\u0430\u0447\u0432\u0430\u043d\u0435\u0442\u043e.", "validators", "bg");
t.add("This collection should contain {{ limit }} element or more.|This collection should contain {{ limit }} elements or more.", "\u041a\u043e\u043b\u0435\u043a\u0446\u0438\u044f\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0441\u044a\u0434\u044a\u0440\u0436\u0430 \u043f\u043e\u043d\u0435 {{ limit }} \u0435\u043b\u0435\u043c\u0435\u043d\u0442.|\u041a\u043e\u043b\u0435\u043a\u0446\u0438\u044f\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0441\u044a\u0434\u044a\u0440\u0436\u0430 \u043f\u043e\u043d\u0435 {{ limit }} \u0435\u043b\u0435\u043c\u0435\u043d\u0442\u0430.", "validators", "bg");
t.add("This collection should contain {{ limit }} element or less.|This collection should contain {{ limit }} elements or less.", "\u041a\u043e\u043b\u0435\u043a\u0446\u0438\u044f\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0441\u044a\u0434\u044a\u0440\u0436\u0430 \u043d\u0430\u0439-\u043c\u043d\u043e\u0433\u043e {{ limit }} \u0435\u043b\u0435\u043c\u0435\u043d\u0442.|\u041a\u043e\u043b\u0435\u043a\u0446\u0438\u044f\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0441\u044a\u0434\u044a\u0440\u0436\u0430 \u043d\u0430\u0439-\u043c\u043d\u043e\u0433\u043e {{ limit }} \u0435\u043b\u0435\u043c\u0435\u043d\u0442\u0430.", "validators", "bg");
t.add("This collection should contain exactly {{ limit }} element.|This collection should contain exactly {{ limit }} elements.", "\u041a\u043e\u043b\u0435\u043a\u0446\u0438\u044f\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0441\u044a\u0434\u044a\u0440\u0436\u0430 \u0442\u043e\u0447\u043d\u043e {{ limit }} \u0435\u043b\u0435\u043c\u0435\u043d\u0442.|\u041a\u043e\u043b\u0435\u043a\u0446\u0438\u044f\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0441\u044a\u0434\u044a\u0440\u0436\u0430 \u0442\u043e\u0447\u043d\u043e {{ limit }} \u0435\u043b\u0435\u043c\u0435\u043d\u0442\u0430.", "validators", "bg");
t.add("Invalid card number.", "\u041d\u0435\u0432\u0430\u043b\u0438\u0434\u0435\u043d \u043d\u043e\u043c\u0435\u0440 \u043d\u0430 \u043a\u0430\u0440\u0442\u0430.", "validators", "bg");
t.add("Unsupported card type or invalid card number.", "\u041d\u0435\u043f\u043e\u0434\u0434\u044a\u0440\u0436\u0430\u043d \u0442\u0438\u043f \u043a\u0430\u0440\u0442\u0430 \u0438\u043b\u0438 \u043d\u0435\u0432\u0430\u043b\u0438\u0434\u0435\u043d \u043d\u043e\u043c\u0435\u0440 \u043d\u0430 \u043a\u0430\u0440\u0442\u0430.", "validators", "bg");
t.add("This is not a valid International Bank Account Number (IBAN).", "\u0422\u043e\u0432\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u0435\u043d \u041c\u0435\u0436\u0434\u0443\u043d\u0430\u0440\u043e\u0434\u0435\u043d \u043d\u043e\u043c\u0435\u0440 \u043d\u0430 \u0431\u0430\u043d\u043a\u043e\u0432\u0430 \u0441\u043c\u0435\u0442\u043a\u0430 (IBAN).", "validators", "bg");
t.add("This value is not a valid ISBN-10.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u0435\u043d ISBN-10.", "validators", "bg");
t.add("This value is not a valid ISBN-13.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u0435\u043d ISBN-13.", "validators", "bg");
t.add("This value is neither a valid ISBN-10 nor a valid ISBN-13.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u043d\u0438\u0442\u043e \u0432\u0430\u043b\u0438\u0434\u0435\u043d ISBN-10, \u043d\u0438\u0442\u043e \u0432\u0430\u043b\u0438\u0434\u0435\u043d ISBN-13.", "validators", "bg");
t.add("This value is not a valid ISSN.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u0435\u043d ISSN.", "validators", "bg");
t.add("This value is not a valid currency.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u043d\u0430 \u0432\u0430\u043b\u0443\u0442\u0430.", "validators", "bg");
t.add("This value should be equal to {{ compared_value }}.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u0440\u0430\u0432\u043d\u0430 \u043d\u0430 {{ compared_value }}.", "validators", "bg");
t.add("This value should be greater than {{ compared_value }}.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043f\u043e-\u0433\u043e\u043b\u044f\u043c\u0430 \u043e\u0442 {{ compared_value }}.", "validators", "bg");
t.add("This value should be greater than or equal to {{ compared_value }}.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043f\u043e-\u0433\u043e\u043b\u044f\u043c\u0430 \u0438\u043b\u0438 \u0440\u0430\u0432\u043d\u0430 \u043d\u0430 {{ compared_value }}.", "validators", "bg");
t.add("This value should be identical to {{ compared_value_type }} {{ compared_value }}.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u0438\u0434\u0435\u043d\u0442\u0438\u0447\u043d\u0430 \u0441 {{ compared_value_type }} {{ compared_value }}.", "validators", "bg");
t.add("This value should be less than {{ compared_value }}.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043f\u043e-\u043c\u0430\u043b\u043a\u0430 {{ compared_value }}.", "validators", "bg");
t.add("This value should be less than or equal to {{ compared_value }}.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043f\u043e-\u043c\u0430\u043b\u043a\u0430 \u0438\u043b\u0438 \u0440\u0430\u0432\u043d\u0430 \u043d\u0430 {{ compared_value }}.", "validators", "bg");
t.add("This value should not be equal to {{ compared_value }}.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u0440\u0430\u0432\u043d\u0430 \u043d\u0430 {{ compared_value }}.", "validators", "bg");
t.add("This value should not be identical to {{ compared_value_type }} {{ compared_value }}.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u0438\u0434\u0435\u043d\u0442\u0438\u0447\u043d\u0430 \u0441 {{ compared_value_type }} {{ compared_value }}.", "validators", "bg");
t.add("The image ratio is too big ({{ ratio }}). Allowed maximum ratio is {{ max_ratio }}.", "\u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435\u0442\u043e \u0435 \u0441 \u0442\u0432\u044a\u0440\u0434\u0435 \u0433\u043e\u043b\u044f\u043c\u0430 \u043f\u0440\u043e\u043f\u043e\u0440\u0446\u0438\u044f ({{ ratio }}). \u041c\u0430\u043a\u0441\u0438\u043c\u0430\u043b\u043d\u0430\u0442\u0430 \u043f\u0440\u043e\u043f\u043e\u0440\u0446\u0438\u044f \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0435 {{ max_ratio }}.", "validators", "bg");
t.add("The image ratio is too small ({{ ratio }}). Minimum ratio expected is {{ min_ratio }}.", "\u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435\u0442\u043e \u0435 \u0441 \u0442\u0432\u044a\u0440\u0434\u0435 \u043c\u0430\u043b\u043a\u0430 \u043f\u0440\u043e\u043f\u043e\u0440\u0446\u0438\u044f ({{ ratio }}). \u041c\u0438\u043d\u0438\u043c\u0430\u043b\u043d\u0430\u0442\u0430 \u043f\u0440\u043e\u043f\u043e\u0440\u0446\u0438\u044f \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0435 {{ min_ratio }}.", "validators", "bg");
t.add("The image is square ({{ width }}x{{ height }}px). Square images are not allowed.", "\u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435\u0442\u043e \u0435 \u043a\u0432\u0430\u0434\u0440\u0430\u0442 ({{ width }}x{{ height }}px). \u0422\u0430\u043a\u0438\u0432\u0430 \u0438\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u044f \u043d\u0435 \u0441\u0430 \u0440\u0430\u0437\u0440\u0435\u0448\u0435\u043d\u0438.", "validators", "bg");
t.add("The image is landscape oriented ({{ width }}x{{ height }}px). Landscape oriented images are not allowed.", "\u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435\u0442\u043e \u0435 \u0441 \u043f\u0435\u0439\u0437\u0430\u0436\u043d\u0430 \u043e\u0440\u0438\u0435\u043d\u0442\u0430\u0446\u0438\u044f ({{ width }}x{{ height }}px). \u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u044f \u0441 \u0442\u0430\u043a\u0430\u0432\u0430 \u043e\u0440\u0438\u0435\u043d\u0442\u0430\u0446\u0438\u044f \u043d\u0435 \u0441\u0430 \u0440\u0430\u0437\u0440\u0435\u0448\u0435\u043d\u0438.", "validators", "bg");
t.add("The image is portrait oriented ({{ width }}x{{ height }}px). Portrait oriented images are not allowed.", "\u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u0435\u0442\u043e \u0435 \u0441 \u043f\u043e\u0440\u0442\u0440\u0435\u0442\u043d\u0430 \u043e\u0440\u0438\u0435\u043d\u0442\u0430\u0446\u0438\u044f ({{ width }}x{{ height }}px). \u0418\u0437\u043e\u0431\u0440\u0430\u0436\u0435\u043d\u0438\u044f \u0441 \u0442\u0430\u043a\u0430\u0432\u0430 \u043e\u0440\u0438\u0435\u043d\u0442\u0430\u0446\u0438\u044f \u043d\u0435 \u0441\u0430 \u0440\u0430\u0437\u0440\u0435\u0448\u0435\u043d\u0438.", "validators", "bg");
t.add("An empty file is not allowed.", "\u041f\u0440\u0430\u0437\u043d\u0438 \u0444\u0430\u0439\u043b\u043e\u0432\u0435 \u043d\u0435 \u0441\u0430 \u0440\u0430\u0437\u0440\u0435\u0448\u0435\u043d\u0438.", "validators", "bg");
t.add("The host could not be resolved.", "\u0425\u043e\u0441\u0442\u044a\u0442 \u0435 \u043d\u0435\u0434\u043e\u0441\u0442\u044a\u043f\u0435\u043d.", "validators", "bg");
t.add("This value does not match the expected {{ charset }} charset.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u043d\u0435 \u0441\u044a\u0432\u043f\u0430\u0434\u0430 \u0441 \u043e\u0447\u0430\u043a\u0432\u0430\u043d\u0430\u0442\u0430 {{ charset }} \u043a\u043e\u0434\u0438\u0440\u043e\u0432\u043a\u0430.", "validators", "bg");
t.add("This is not a valid Business Identifier Code (BIC).", "\u0422\u043e\u0432\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u0435\u043d \u0411\u0438\u0437\u043d\u0435\u0441 \u0438\u0434\u0435\u043d\u0442\u0438\u0444\u0438\u043a\u0430\u0446\u0438\u043e\u043d\u0435\u043d \u043a\u043e\u0434 (BIC).", "validators", "bg");
t.add("Error", "\u0413\u0440\u0435\u0448\u043a\u0430", "validators", "bg");
t.add("This is not a valid UUID.", "\u0422\u043e\u0432\u0430 \u043d\u0435 \u0435 \u0432\u0430\u043b\u0438\u0434\u0435\u043d UUID.", "validators", "bg");
t.add("This value should be a multiple of {{ compared_value }}.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0431\u044a\u0434\u0435 \u043a\u0440\u0430\u0442\u043d\u043e \u0447\u0438\u0441\u043b\u043e \u043d\u0430 {{ compared_value }}.", "validators", "bg");
t.add("This Business Identifier Code (BIC) is not associated with IBAN {{ iban }}.", "\u0411\u0438\u0437\u043d\u0435\u0441 \u0438\u0434\u0435\u043d\u0442\u0438\u0444\u0438\u043a\u0430\u0446\u0438\u043e\u043d\u043d\u0438\u044f \u043a\u043e\u0434 (BIC) \u043d\u0435 \u0435 \u0441\u0432\u044a\u0440\u0437\u0430\u043d \u0441 IBAN {{ iban }}.", "validators", "bg");
t.add("This value should be valid JSON.", "\u0421\u0442\u043e\u0439\u043d\u043e\u0441\u0442\u0442\u0430 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0435 \u0432\u0430\u043b\u0438\u0434\u0435\u043d JSON.", "validators", "bg");
})(Translator);
|
from __future__ import absolute_import
from __future__ import print_function
import logging.config
import logging
import pydoc
import os
import unittest
import sys
import re
#Look for MG5/MG4 path
_mg5_path = os.sep.join(os.path.realpath(__file__).split(os.sep)[:-3])
sys.path.append(_mg5_path)
_file_path = os.path.dirname(os.path.realpath(__file__))
_pickle_path = os.path.join(_file_path, 'input_files', 'ML_parallel_saved_runs')
from madgraph import MG5DIR
from madgraph import MadGraph5Error
from madgraph.iolibs.files import cp
#import madgraph.iolibs.save_load_object as save_load_object
from . import loop_me_comparator
import madgraph.various.banner as banner_mod
import madgraph.various.misc as misc
from . import me_comparator
from .test_ML5 import procToFolderName
# The processes below are treated all together because they are relatively quick
HCR_processes_short = []
ML5EW_processes_short = []
ML5EW_processes_short_sqso = [
('u u~ > d d~',{},['QCD QED'],{'QCD^2==':6,'QED^2==':0})
,('u u~ > d d~',{},['QCD QED'],{'QCD^2==':4,'QED^2==':2})
,('u u~ > d d~',{},['QCD QED'],{'QCD^2==':2,'QED^2==':4})
,('u u~ > d d~',{},['QCD QED'],{'QCD^2==':0,'QED^2==':6})
,('u u~ > d d~',{},['QCD QED'],{'QCD^2=':99,'QED^2=':99})
]
# The longer processes below are treated one by one so that they can be better
# independently checked/updated (especially the corresponding reference pickle.)
HCR_processes_long = [
# The process below is for testing the parallel tests only
('g g > t t~',{'QCD':2,'QED':0},['QED'],{'QCD':4,'QED':2}),
('a a > t t~',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('a a > w+ w-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u u~ > w+ w-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u u~ > z a',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u u~ > z z',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u u~ > a a',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('t t~ > w+ w-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('ve ve~ > e+ e-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('w+ w- > h h',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('h h > h h',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u u~ > e+ e-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('e+ e- > t t~ g',{'QCD':1,'QED':2},['QED'],{'QCD':2,'QED':6}),
('e+ e- > t t~ a',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8}),
('g g > t t~ g',{'QCD':3,'QED':0},['QED'],{'QCD':6,'QED':2}),
('g g > t t~ h',{'QCD':2,'QED':1},['QCD'],{'QCD':6,'QED':2}),
('g g > t t~ h',{'QCD':2,'QED':1},['QED'],{'QCD':4,'QED':4}),
('a a > t t~ a',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8}),
('h h > h h h',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8}),
('u u~ > g a',{},['QCD QED'],{}),
# for massive b quark
('g b > t w-',{'QCD':1,'QED':1},['QED'],{'QCD':2,'QED':4}),
('a b > t w-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('g g > t w- b~',{'QCD':2,'QED':1},['QED'],{'QCD':4,'QED':4}),
('a g > t w- b~',{'QCD':1,'QED':2},['QED'],{'QCD':2,'QED':6}),
('a a > t w- b~',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8}),
('u d~ > t b~',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u b > t d',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u g > t d b~',{'QCD':1,'QED':2},['QED'],{'QCD':2,'QED':6}),
('u a > t d b~',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8}),
# for loop-induced
('g g > h h',{},['QCD'],{}),
# for splitting orders
('u u~ > u u~',{},['QCD QED'],{'QCD':99,'QED':99}),
('u u~ > u u~ g',{},['QCD QED'],{'QCD':99,'QED':99}),
('u u~ > u u~ a',{},['QCD QED'],{'QCD':99,'QED':99}),
# checking with arXiv:1307.4331
('u~ u > w+ w-',{},['QCD'],{}),
('u~ u > w+ w-',{},['QED'],{}),
('d~ d > w+ w-',{},['QCD'],{}),
('d~ d > w+ w-',{},['QED'],{}),
('u~ u > z z',{},['QCD'],{}),
('u~ u > z z',{},['QED'],{}),
('d~ d > z z',{},['QCD'],{}),
('d~ d > z z',{},['QED'],{}),
('u~ d > w- z',{},['QCD'],{}),
('u~ d > w- z',{},['QED'],{})
]
HCR_processes_long_dic = dict((procToFolderName(elem[0])+'_'+'_'.join(elem[2][0].split()),elem)\
for elem in HCR_processes_long)
ML5EW_processes_long = [
('g g > t t~',{'QCD':2,'QED':0},['QED'],{'QCD':4,'QED':2}),
('a a > t t~',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('a a > w+ w-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u u~ > w+ w-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u u~ > z a',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u u~ > z z',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u u~ > a a',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('t t~ > w+ w-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('ve ve~ > e+ e-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('w+ w- > h h',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('h h > h h',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u u~ > e+ e-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('e+ e- > t t~ g',{'QCD':1,'QED':2},['QED'],{'QCD':2,'QED':6}),
('e+ e- > t t~ a',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8}),
('g g > t t~ g',{'QCD':3,'QED':0},['QED'],{'QCD':6,'QED':2}),
('g g > t t~ h',{'QCD':2,'QED':1},['QCD'],{'QCD':6,'QED':2}),
('g g > t t~ h',{'QCD':2,'QED':1},['QED'],{'QCD':4,'QED':4}),
('a a > t t~ a',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8}),
('h h > h h h',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8}),
('u u~ > g a',{},['QCD QED'],{}),
# for massive b quark
('g b > t w-',{'QCD':1,'QED':1},['QED'],{'QCD':2,'QED':4}),
('a b > t w-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('g g > t w- b~',{'QCD':2,'QED':1},['QED'],{'QCD':4,'QED':4}),
('a g > t w- b~',{'QCD':1,'QED':2},['QED'],{'QCD':2,'QED':6}),
('a a > t w- b~',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8}),
('u d~ > t b~',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u b > t d',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6}),
('u g > t d b~',{'QCD':1,'QED':2},['QED'],{'QCD':2,'QED':6}),
('u a > t d b~',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8}),
# for loop-induced
('g g > h h',{},['QCD'],{}),
# for splitting orders
('u u~ > u u~',{},['QCD QED'],{'QCD':99,'QED':99}),
('u u~ > u u~ g',{},['QCD QED'],{'QCD':99,'QED':99}),
('u u~ > u u~ a',{},['QCD QED'],{'QCD':99,'QED':99}),
# checking with arXiv:1307.4331
('u~ u > w+ w-',{},['QCD'],{}),
('u~ u > w+ w-',{},['QED'],{}),
('d~ d > w+ w-',{},['QCD'],{}),
('d~ d > w+ w-',{},['QED'],{}),
('u~ u > z z',{},['QCD'],{}),
('u~ u > z z',{},['QED'],{}),
('d~ d > z z',{},['QCD'],{}),
('d~ d > z z',{},['QED'],{}),
('u~ d > w- z',{},['QCD'],{}),
('u~ d > w- z',{},['QED'],{})
]
ML5EW_processes_long_dic = dict((procToFolderName(elem[0])+'_'+'_'.join(elem[2][0].split()),elem)\
for elem in ML5EW_processes_long)
def create_pickle(my_proc_list, pickle_file, runner, ref_runner=None,
model = 'loop_qcd_qed_sm-parallel_test', energy = 1000):
""" Create a pickle with name 'pickle_file' on the specified processes
and also possibly using the PS points provided by the reference runner """
my_comp = loop_me_comparator.LoopMEComparator()
if not ref_runner is None:
my_comp.set_me_runners(ref_runner,runner)
else:
my_comp.set_me_runners(runner)
my_comp.run_comparison(my_proc_list,model=model,energy=energy)
loop_me_comparator.LoopPickleRunner.store_comparison(
os.path.join(_pickle_path,pickle_file),
[runner.proc_list,runner.res_list],
runner.model,runner.name,energy=runner.energy)
def compare_processes(testInstance, my_proc_list = [], model = 'loop_qcd_qed_sm-parallel_test',
pickle_file = "", energy = 2000, tolerance = 3e-06, filename = "",
chosen_runner = "ML5_opt",loop_induced = False,mu_r=0.0):
""" A helper function to compare processes.
Note that the chosen_runner is what runner should to create the reference
pickle if missing"""
# Print out progress if it is a run for an individual process
if len(my_proc_list)==1:
print("\n== %s %s =="%(my_proc_list[0][0],my_proc_list[0][2]))
else:
print("\n== %s =="%filename)
# Check if pickle exists, if not create it
if pickle_file!="" and not os.path.isfile(os.path.join(_pickle_path,pickle_file)):
print(" => Computing reference evaluation with %s"%chosen_runner)
create_loop_pickle(my_proc_list, model,
pickle_file, energy, chosen_runner)
print("\n => Done with %s evaluation"%chosen_runner)
# Load the stored runner
if pickle_file != "":
stored_runner = me_comparator.PickleRunner.find_comparisons(
os.path.join(_pickle_path,pickle_file))[0]
energy = stored_runner.energy
# Check if the process has squared order constraints
has_sqso = any('^2' in key for proc in my_proc_list for key in \
proc[3].keys())
MLCard = banner_mod.MadLoopParam(os.path.join(_mg5_path,'Template','loop_material',
'StandAlone','Cards','MadLoopParams.dat'))
MLredstr=MLCard['MLReductionLib'][0:1]
# Create a MERunner object for MadLoop 5 optimized
ML5_opt = loop_me_comparator.LoopMG5Runner()
ML5_opt.setup(_mg5_path, optimized_output=True, temp_dir=filename,\
mu_r=mu_r)
if MLredstr=="1" and not has_sqso:
# Create a MERunner object for MadLoop 5 default
ML5_default = loop_me_comparator.LoopMG5Runner()
ML5_default.setup(_mg5_path, optimized_output=False, temp_dir=filename,\
mu_r=mu_r)
# Create and setup a comparator
my_comp = loop_me_comparator.LoopMEComparator()
if MLredstr=="1" and not has_sqso:
# Always put the saved run first if you use it, so that the corresponding PS
# points will be used.
if pickle_file != "" and not loop_induced:
my_comp.set_me_runners(stored_runner,ML5_opt,ML5_default)
elif pickle_file !="" and loop_induced:
my_comp.set_me_runners(stored_runner,ML5_default)
elif pickle_file == "" and not loop_induced:
my_comp.set_me_runners(ML5_opt,ML5_default)
else:
raise MadGraph5Error('Cannot find pickle_file for loop induced process.')
else:
if pickle_file !="":
my_comp.set_me_runners(stored_runner,ML5_opt)
else:
raise MadGraph5Error("CANNOT find the stored result with TIR")
# Run the actual comparison
my_comp.run_comparison(my_proc_list,
model=model,
energy=energy)
# Print the output
my_comp.output_result(filename=os.path.join(_mg5_path,filename+'.log'),\
tolerance = tolerance)
# Assert that all process comparisons passed the tolerance cut
my_comp.assert_processes(testInstance, tolerance)
# Do some cleanup
my_comp.cleanup()
def create_loop_pickle(my_proc_list, model, pickle_file, energy, \
chosen_runner):
""" Create the pickle file for reference for the arguments here."""
# print "Creating loop pickle for chosen_runner=",chosen_runner
allowed_chosen_runners = ['ML5_opt','ML5_default']
if chosen_runner not in allowed_chosen_runners:
raise MadGraph5Error('The reference runner can only be in %s.'%\
allowed_chosen_runners)
runner = None
if chosen_runner == 'ML5_opt':
runner = loop_me_comparator.LoopMG5Runner()
runner.setup(_mg5_path, optimized_output=True)
if chosen_runner == 'ML5_default':
runner = loop_me_comparator.LoopMG5Runner()
runner.setup(_mg5_path, optimized_output=False)
create_pickle(my_proc_list,pickle_file, runner, ref_runner=None, \
model=model,energy=energy)
runner.cleanup()
class ML5EWTest(unittest.TestCase):
""" A class to test ML5 EW corrections versus runs from hard-coded reference process. """
test_model_name = 'loop_qcd_qed_sm-parallel_test'
def setUp(self):
""" Here we just copy the hidden restrict_card to a regular one.
And we don't bother making it hidden again after the test."""
model_path = os.path.join(_mg5_path,'models','loop_qcd_qed_sm')
if not os.path.exists(model_path):
for tmp in os.environ['PYTHONPATH'].split(':'):
model_path = os.path.join(tmp,'loop_qcd_qed_sm')
if os.path.exists(model_path):
break
cp(os.path.join(model_path,
'.restrict_parallel_test.dat'),
os.path.join(model_path,
'restrict_parallel_test.dat'))
cp(os.path.join(model_path,
'.restrict_parallel_test_MB.dat'),
os.path.join(model_path,
'restrict_parallel_test_MB.dat'))
model_path = os.path.join(_mg5_path,'models','loop_qcd_qed_sm_Gmu')
if not os.path.exists(model_path):
for tmp in os.environ['PYTHONPATH'].split(':'):
model_path = os.path.join(tmp,'loop_qcd_qed_sm_Gmu')
if os.path.exists(model_path):
break
cp(os.path.join(model_path,
'.restrict_parallel_test_WW.dat'),
os.path.join(model_path,
'restrict_parallel_test_WW.dat'))
cp(os.path.join(model_path,
'.restrict_parallel_test_ZZ.dat'),
os.path.join(model_path,
'restrict_parallel_test_ZZ.dat'))
cp(os.path.join(model_path,
'.restrict_parallel_test_WZ.dat'),
os.path.join(model_path,
'restrict_parallel_test_WZ.dat'))
#===========================================================================
# First tests consisting in a list of quick 2>2 processes to be run together
#===========================================================================
def test__ML5EW_sm_vs_stored_ML5EW(self):
if ML5EW_processes_short:
compare_processes(self,ML5EW_processes_short,model = self.test_model_name,
pickle_file = 'ml5ew_short_parallel_tests.pkl',
filename = 'ptest_short_ml5ew_vs_old_ml5ew',
chosen_runner='ML5_opt')
#===========================================================================
# First tests consisting in a list of quick 2>2 processes to be run together
#===========================================================================
def test_ML5EW_sm_vs_stored_ML5EW_sqso(self):
if ML5EW_processes_short_sqso:
compare_processes(self,ML5EW_processes_short_sqso,
model = self.test_model_name,
pickle_file = 'ml5ew_short_parallel_tests_sqso.pkl',
filename = 'ptest_short_ml5ew_vs_old_ml5ew_sqso',
chosen_runner='ML5_opt')
# The tests below probe one quite long process at a time individually, so
# one can better manage them.
#===========================================================================
# First the long checks against results available in Hard-Coded Reference
#===========================================================================
# ('g g > t t~',{'QCD':2,'QED':0},['QED'],{'QCD':4,'QED':2})
def test_long_sm_vs_stored_HCR_gg_ttx_QED(self):
proc = 'gg_ttx_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('a a > t t~',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_aa_ttx_QED(self):
proc = 'aa_ttx_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('a a > w+ w-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_aa_wpwm_QED(self):
proc = 'aa_wpwm_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('u u~ > w+ w-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_uux_wpwm_QED(self):
proc = 'uux_wpwm_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('u u~ > z a',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_uux_za_QED(self):
proc = 'uux_za_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('u u~ > z z',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_uux_zz_QED(self):
proc = 'uux_zz_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('u u~ > a a',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_uux_aa_QED(self):
proc = 'uux_aa_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('t t~ > w+ w-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_ttx_wpwm_QED(self):
proc = 'ttx_wpwm_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('ve ve~ > e+ e-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_vevex_epem_QED(self):
proc = 'vevex_epem_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('w+ w- > h h',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_wpwm_hh_QED(self):
proc = 'wpwm_hh_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('h h > h h',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_hh_hh_QED(self):
proc = 'hh_hh_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('u u~ > e+ e-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_uux_epem_QED(self):
proc = 'uux_epem_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('e+ e- > t t~ g',{'QCD':1,'QED':2},['QED'],{'QCD':2,'QED':6})
def test_long_sm_vs_stored_HCR_epem_ttxg_QED(self):
proc = 'epem_ttxg_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('e+ e- > t t~ a',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8})
def test_long_sm_vs_stored_HCR_epem_ttxa_QED(self):
proc = 'epem_ttxa_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('g g > t t~ g',{'QCD':3,'QED':0},['QED'],{'QCD':6,'QED':2})
def test_long_sm_vs_stored_HCR_gg_ttxg_QED(self):
proc = 'gg_ttxg_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_HCR_%s'%proc, chosen_runner = 'HCR')
# ('g g > t t~ h',{'QCD':2,'QED':1},['QED'],{'QCD':4,'QED':4})
def test_long_sm_vs_stored_HCR_gg_ttxh_QED(self):
proc = 'gg_ttxh_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc)
# ('g g > t t~ h',{'QCD':2,'QED':1},['QCD'],{'QCD':6,'QED':2})
def test_long_sm_vs_stored_HCR_gg_ttxh_QCD(self):
proc = 'gg_ttxh_QCD'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR')
# ('a a > t t~ a',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8})
def test_long_sm_vs_stored_HCR_aa_ttxa_QED(self):
proc = 'aa_ttxa_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR')
# ('h h > h h h',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8})
def test_long_sm_vs_stored_HCR_hh_hhh_QED(self):
proc = 'hh_hhh_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR')
# ('u u~ > g a',{},['QCD QED'],{})
def test_long_sm_vs_stored_HCR_uux_ga_QCD_QED(self):
proc = 'uux_ga_QCD_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR')
#===========================================================================
# Now the long checks against results previsouly generated in MadLoop 5.
#===========================================================================
# ('g g > h t t~',{'QCD':2,'QED':1},['QCD'],{'QCD':6,'QED':2})
# it cannot be used since the parameter of loop_qcd_qed_sm is different with loop_sm
# ml5_sm_%s.pkl is generated by loop_sm-parallel_tests
def test_long_sm_vs_stored_ML5EW_gg_ttxh_QCD(self):
return
proc = "gg_httx"
compare_processes(self,[ML5EW_processes_long_dic[proc+'_QCD']],
model = self.test_model_name, pickle_file = 'ml5_sm_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_old_ml5_%s_QCD'%proc,
chosen_runner = 'ML5_opt')
# test loop induced processes
# ('g g > h h',{},['QCD'],{})
def test_long_sm_vs_stored_HCR_gg_hh_QCD(self):
proc = 'gg_hh_QCD'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = True)
# test splitting orders (now only test the sum)
# ('u u~ > u u~',{},['QCD QED'],{'QCD':99,'QED':99}
def test_long_sm_vs_stored_HCR_uux_uux_QCD_QED(self):
proc = 'uux_uux_QCD_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False)
# test splitting orders (now only test the sum)
# ('u u~ > u u~ g',{},['QCD QED'],{'QCD':99,'QED':99}
def test_long_sm_vs_stored_HCR_uux_uuxg_QCD_QED(self):
proc = 'uux_uuxg_QCD_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False)
# test splitting orders (now only test the sum)
# ('u u~ > u u~ a',{},['QCD QED'],{'QCD':99,'QED':99}
def test_long_sm_vs_stored_HCR_uux_uuxa_QCD_QED(self):
proc = 'uux_uuxa_QCD_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = self.test_model_name, pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False)
# checking with arXiv:1307.4331
# thanks to Ninh for providing us the PS results
# ('u~ u > w+ w-',{},['QCD'],{})
def test_long_sm_vs_stored_HCR_uxu_wpwm_QCD(self):
proc = 'uxu_wpwm_QCD'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm_Gmu-parallel_test_WW",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False,mu_r=-1.0) # mu_r<0, use MU_R in param_card.dat
# ('u~ u > w+ w-',{},['QED'],{})
def test_long_sm_vs_stored_HCR_uxu_wpwm_QED(self):
proc = 'uxu_wpwm_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm_Gmu-parallel_test_WW",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False,mu_r=-1.0) # mu_r<0, use MU_R in param_card.dat
# ('d~ d > w+ w-',{},['QCD'],{})
def test_long_sm_vs_stored_HCR_dxd_wpwm_QCD(self):
proc = 'dxd_wpwm_QCD'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm_Gmu-parallel_test_WW",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False,mu_r=-1.0) # mu_r<0, use MU_R in param_card.dat
# ('d~ d > w+ w-',{},['QED'],{})
def test_long_sm_vs_stored_HCR_dxd_wpwm_QED(self):
proc = 'dxd_wpwm_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm_Gmu-parallel_test_WW",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False,mu_r=-1.0) # mu_r<0, use MU_R in param_card.dat
# ('u~ u > z z',{},['QCD'],{})
def test_long_sm_vs_stored_HCR_uxu_zz_QCD(self):
proc = 'uxu_zz_QCD'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm_Gmu-parallel_test_ZZ",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False,mu_r=-1.0) # mu_r<0, use MU_R in param_card.dat
# ('u~ u > z z',{},['QED'],{})
def test_long_sm_vs_stored_HCR_uxu_zz_QED(self):
proc = 'uxu_zz_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm_Gmu-parallel_test_ZZ",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False,mu_r=-1.0) # mu_r<0, use MU_R in param_card.dat
# ('d~ d > z z',{},['QCD'],{})
def test_long_sm_vs_stored_HCR_dxd_zz_QCD(self):
proc = 'dxd_zz_QCD'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm_Gmu-parallel_test_ZZ",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False,mu_r=-1.0) # mu_r<0, use MU_R in param_card.dat
# ('d~ d > z z',{},['QED'],{})
def test_long_sm_vs_stored_HCR_dxd_zz_QED(self):
proc = 'dxd_zz_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm_Gmu-parallel_test_ZZ",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False,mu_r=-1.0) # mu_r<0, use MU_R in param_card.dat
# ('u~ d > w- z',{},['QCD'],{})
def test_long_sm_vs_stored_HCR_uxd_wmz_QCD(self):
proc = 'uxd_wmz_QCD'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm_Gmu-parallel_test_WZ",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False,mu_r=-1.0) # mu_r<0, use MU_R in param_card.dat
# ('u~ d > w- z',{},['QED'],{})
def test_long_sm_vs_stored_HCR_uxd_wmz_QED(self):
proc = 'uxd_wmz_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm_Gmu-parallel_test_WZ",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False,mu_r=-1.0) # mu_r<0, use MU_R in param_card.dat
# check the massive b quark in loop_qcd_qed_sm
# ('g b > t w-',{'QCD':1,'QED':1},['QED'],{'QCD':2,'QED':4})
def test_long_sm_vs_stored_HCR_gb_twm_QED(self):
proc = 'gb_twm_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm-parallel_test_MB",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False)
# ('a b > t w-',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_ab_twm_QED(self):
proc = 'ab_twm_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm-parallel_test_MB",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False)
# ('g g > t w- b~',{'QCD':2,'QED':1},['QED'],{'QCD':4,'QED':4})
def test_long_sm_vs_stored_HCR_gg_twmbx_QED(self):
proc = 'gg_twmbx_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm-parallel_test_MB",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False)
# ('a g > t w- b~',{'QCD':1,'QED':2},['QED'],{'QCD':2,'QED':6})
def test_long_sm_vs_stored_HCR_ag_twmbx_QED(self):
proc = 'ag_twmbx_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm-parallel_test_MB",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False)
# ('a a > t w- b~',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8})
def test_long_sm_vs_stored_HCR_aa_twmbx_QED(self):
proc = 'aa_twmbx_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm-parallel_test_MB",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False)
# ('u d~ > t b~',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_udx_tbx_QED(self):
proc = 'udx_tbx_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm-parallel_test_MB",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False)
# ('u b > t d',{'QCD':0,'QED':2},['QED'],{'QCD':0,'QED':6})
def test_long_sm_vs_stored_HCR_ub_td_QED(self):
proc = 'ub_td_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm-parallel_test_MB",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False)
# ('u g > t d b~',{'QCD':1,'QED':2},['QED'],{'QCD':2,'QED':6})
def test_long_sm_vs_stored_HCR_ug_tdbx_QED(self):
proc = 'ug_tdbx_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm-parallel_test_MB",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False)
# ('u a > t d b~',{'QCD':0,'QED':3},['QED'],{'QCD':0,'QED':8})
def test_long_sm_vs_stored_HCR_ua_tdbx_QED(self):
proc = 'ua_tdbx_QED'
compare_processes(self,[HCR_processes_long_dic[proc]],
model = "loop_qcd_qed_sm-parallel_test_MB",
pickle_file = 'hcr_%s.pkl'%proc,
filename = 'ptest_long_sm_vs_hcr_%s'%proc, chosen_runner = 'HCR',
loop_induced = False)
if '__main__' == __name__:
# Get full logging info
logging.config.fileConfig(os.path.join(_mg5_path, 'tests', '.mg5_logging.conf'))
logging.root.setLevel(logging.INFO)
logging.getLogger('madgraph').setLevel(logging.INFO)
logging.getLogger('cmdprint').setLevel(logging.INFO)
logging.getLogger('tutorial').setLevel(logging.ERROR)
logging.basicConfig(level=logging.INFO)
# save hard-coded reference results
# Replace here the path of your HCR output file, below is just an example.
HCRpath = '/Users/erdissshaw/Works/FLibatM/check-ML/OutputML'
# Replace the correct model and resctrict card
model = 'loop_qcd_qed_sm-parallel_test'
#model = 'loop_qcd_qed_sm_Gmu-parallel_test_WW'
#model = 'loop_qcd_qed_sm_Gmu-parallel_test_ZZ'
#model = 'loop_qcd_qed_sm_Gmu-parallel_test_WZ'
model = 'loop_qcd_qed_sm-parallel_test_MB'
for savefile in HCR_processes_long_dic.keys():
res_list = []
proc_list = []
if os.path.isfile(os.path.join(_pickle_path,"hcr_"+savefile+".pkl")):
continue
else:
pickle_file = "hcr_"+savefile+".pkl"
if not os.path.isfile(os.path.join(HCRpath,savefile+'.dat')):
continue
proc = HCR_processes_long_dic[savefile]
proc_list.append(proc)
res_list.append(loop_me_comparator.LoopMG5Runner.\
parse_check_output(open(os.path.join(HCRpath,savefile+'.dat'))))
runner = loop_me_comparator.LoopHardCodedRefRunner()
runner.setup(proc_list,res_list,model)
create_pickle(proc_list,pickle_file,runner,ref_runner=None,\
model=runner.model,energy=runner.energy)
#loop_me_comparator.LoopPickleRunner.store_comparison(
# os.path.join(_pickle_path,pickle_file),
# [runner.proc_list,runner.res_list],
# runner.model,runner.name,energy=runner.energy)
# runner=save_load_object.load_from_file(os.path.join(_pickle_path,"hcr_gg_ttxh_QED.pkl"))
unittest.main() # necessary for unittest
#ml5ew = ML5EWTest()
|
import getOr from 'lodash/fp/getOr';
import { NAME } from './constants';
export const getPage =
getOr('', `${NAME}.payload.page`);
export const getType =
getOr('', `${NAME}.type`);
|
'use strict';
module.exports = {
extends: 'octane',
rules: {
'no-invalid-interactive': {
ignoredTags: ['form'],
},
},
};
|
/**
* Bootstrap Table English translation
* Author: Zhixin Wen<wenzhixin2010@gmail.com>
*/
(function ($) {
'use strict';
$.fn.bootstrapTable.locales['en-US'] = {
formatLoadingMessage: function () {
return 'Loading, please wait...';
},
formatRecordsPerPage: function (pageNumber) {
return pageNumber + ' records per page';
},
formatShowingRows: function (pageFrom, pageTo, totalRows) {
return 'Showing ' + pageFrom + ' to ' + pageTo + ' of ' + totalRows + ' rows';
},
formatSearch: function () {
return 'Search';
},
formatNoMatches: function () {
return 'No matching records found';
},
formatPaginationSwitch: function () {
return 'Hide/Show pagination';
},
formatRefresh: function () {
return 'Refresh';
},
formatToggle: function () {
return 'Toggle';
},
formatColumns: function () {
return 'Columns';
},
formatAllRows: function () {
return 'All';
}
};
$.extend($.fn.bootstrapTable.defaults, $.fn.bootstrapTable.locales['en-US']);
})(jQuery);
|
/* -------------------------------------------------------------------------- */
/* Copyright 2002-2021, OpenNebula Project, OpenNebula Systems */
/* */
/* Licensed under the Apache License, Version 2.0 (the "License"); you may */
/* not use this file except in compliance with the License. You may obtain */
/* a copy of the License at */
/* */
/* http://www.apache.org/licenses/LICENSE-2.0 */
/* */
/* Unless required by applicable law or agreed to in writing, software */
/* distributed under the License is distributed on an "AS IS" BASIS, */
/* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */
/* See the License for the specific language governing permissions and */
/* limitations under the License. */
/* -------------------------------------------------------------------------- */
define(function(require) {
var Files = require('utils/files');
var ConnectionTypes = require("utils/guacamole/types/connection-types");
function GuacButtons($guac, $scope, $elements) {
$elements.screenshotButton.onclick = function() {
if (!$guac.client) return;
var canvas = $guac.client.getDisplay().getDefaultLayer().getCanvas();
Files.downloadImage('screenshot', canvas)
};
$elements.sendCtrlAltDelButton.onclick = function() {
if (!$guac.client || !$guac.osk) return;
var ctrlKey = $guac.osk.keys['LCtrl'][0].keysym;
var altKey = $guac.osk.keys['LAlt'][0].keysym;
var delKey = $guac.osk.keys['Del'][0].keysym;
$guac.client.sendKeyEvent(1, ctrlKey);
$guac.client.sendKeyEvent(1, altKey);
$guac.client.sendKeyEvent(1, delKey);
$guac.client.sendKeyEvent(0, delKey);
$guac.client.sendKeyEvent(0, altKey);
$guac.client.sendKeyEvent(0, ctrlKey);
};
$elements.oskButton.onclick =
$elements.closeOskButton.onclick = function() {
if (!$guac.client) return;
$('#osk__container').fadeToggle('fast');
};
$elements.mouseButton.onclick = function() {
// toggle disabled
this.classList.toggle('disabled');
$scope.localCursor = $elements.mouseButton.classList.contains('disabled');
};
$elements.fullscreenButton.onclick = function() {
// If the document is not in full screen mode make the video full screen
if (!document.fullscreenElement && document.fullscreenEnabled) {
$elements.main.requestFullscreen();
} else if (document.exitFullscreen) {
document.exitFullscreen();
}
};
GuacButtons.destroy = function() {
// reset default state
$('#osk__container').hide();
$elements.mouseButton.classList.remove('disabled');
$elements.sendCtrlAltDelButton.onclick =
$elements.screenshotButton.onclick =
$elements.mouseButton.onclick =
$elements.oskButton.onclick =
$elements.closeOskButton.onclick = null;
};
}
return GuacButtons;
});
|
import React from "react";
import { Container, Card, Button, Row } from "react-bootstrap";
import fn from '../../Assets/Images/LandingPageFue.jpg';
import locale from '../../Assets/Images/LandingPagejpg.jpg';
import popquiz from '../../Assets/Images/popquiz.jpg';
import teamprofile from '../../Assets/Images/teamProfile.png';
import workplanner from '../../Assets/Images/workPlanner.png';
import weather from '../../Assets/Images/weatherforecast.png';
export default function Projects() {
return (
<Container>
<h1 className="text-white">Projects</h1>
<Row className="justify-content-between pb-5">
<Card className="col-md-3 col-sm-12 bg-secondary text-white pt-1">
<Card.Img variant="top" src={fn} />
<Card.Body>
<Card.Title>Fue or Nay?</Card.Title>
<Card.Text>
A website built for gamers, by gamers. Find your favorite games, read reviews, and leave your own reviews!
</Card.Text>
<Row>
<Button variant="dark" className="mb-2" href="https://github.com/Fue-Or-Nay/Fue-Or-Nay" target="_blank">GitHub</Button>
<Button variant="dark" href="https://fue-or-nay.herokuapp.com/" target="blank">Deployment</Button>
</Row>
</Card.Body>
</Card>
<div className="col-md-1 col-sm-12 pb-5"></div>
<Card className="col-md-3 col-sm-12 bg-secondary text-white pt-1">
<Card.Img variant="top" src={locale} />
<Card.Body>
<Card.Title>Loc-Ale</Card.Title>
<Card.Text>
Local brewery finder. Whether you are traveling or just relaxing in your home town, this tool will help find breweries near you!
</Card.Text>
<Row>
<Button variant="dark" className="mb-2" href="https://github.com/br-through/loc-ale" target="_blank">GitHub</Button>
<Button variant="dark" href="https://br-through.github.io/loc-ale/" target="_blank">Deployment</Button>
</Row>
</Card.Body>
</Card>
<div className="col-md-1 col-sm-12 pb-5"></div>
<Card className="col-md-3 col-sm-12 bg-secondary text-white pt-1">
<Card.Img variant="top" src={popquiz} />
<Card.Body>
<Card.Title>Code Pop Quiz!</Card.Title>
<Card.Text>
Test your basic web dev skills with this code pop quiz!
</Card.Text>
<Row>
<Button variant="dark" className="mb-2" href="https://github.com/Mheer91/Code-Pop-Quiz" target="_blank">GitHub</Button>
<Button variant="dark" href="https://mheer91.github.io/Code-Pop-Quiz/" target="_blank">Deployment</Button>
</Row>
</Card.Body>
</Card>
</Row>
<Row className="justify-content-between pb-5">
<Card className="col-md-3 col-sm-12 bg-secondary text-white pt-1">
<Card.Img variant="top" src={teamprofile} />
<Card.Body>
<Card.Title>Team Profile Generator</Card.Title>
<Card.Text>
Manage your employees/work team with this team profile generator!
</Card.Text>
<Row>
<Button variant="dark" className="mb-2" href="https://github.com/Mheer91/Team-Profile-Generator" target="_blank">GitHub</Button>
<Button variant="dark" disabled>Deployment</Button>
</Row>
</Card.Body>
</Card>
<div className="col-md-1 col-sm-12 pb-5"></div>
<Card className="col-md-3 col-sm-12 bg-secondary text-white pt-1">
<Card.Img variant="top" src={weather} />
<Card.Body>
<Card.Title>Weather Dashboard</Card.Title>
<Card.Text>
Sleek weather dashboard that displays weather information for any city upon search!
</Card.Text>
<Row>
<Button variant="dark" className="mb-2" href="https://github.com/Mheer91/weather-forecast" target="_blank">GitHub</Button>
<Button variant="dark" href="https://mheer91.github.io/weather-forecast/ " target="_blank">Deployment</Button>
</Row>
</Card.Body>
</Card>
<div className="col-md-1 col-sm-12 pb-5"></div>
<Card className="col-md-3 col-sm-12 bg-secondary text-white pt-1">
<Card.Img variant="top" src={workplanner} />
<Card.Body>
<Card.Title>Work Planner</Card.Title>
<Card.Text>
Help keep your work day organized with this daily planner!
</Card.Text>
<Row>
<Button variant="dark" className="mb-2" href="https://github.com/Mheer91/Work-Planner-" target="_blank">GitHub</Button>
<Button variant="dark" href="https://mheer91.github.io/Work-Planner-/" target="_blank">Deployment</Button>
</Row>
</Card.Body>
</Card>
</Row>
</Container>
)
};
|
module.exports = function(grunt) {
grunt.registerTask( 'default', [ 'clean', 'copy', 'hapi', 'watch'] );
grunt.registerTask( 'build', [ 'clean', 'copy' ] );
grunt.registerTask( 'run', [ 'hapi', 'watch' ]);
grunt.initConfig({
watch: {
hapi: {
files: [
'./app/assets/**/*.{png,jpg,jpeg,mp3,svg}',
'./app/scripts/**/*.js',
'./app/styles/**/*.css',
'./app/pages/**/*.html',
'./app/templates/**/*.html',
'Gruntfile.js'
],
tasks: [
'clean',
'copy'
],
options: {
spawn: false
}
}
},
copy: {
dist: {
files: [{
expand: true,
src: [ './assets/**/*.{png,jpg,jpeg,mp3,svg}' ],
dest: './dist',
cwd: './app'
}, {
expand: true,
src: [ './**/*.html' ],
dest: './dist',
cwd: './app/pages'
}, {
expand: true,
src: [ './**/*.css' ],
dest: './dist/styles',
cwd: './app/styles'
}, {
expand: true,
src: [ './**/*.js' ],
dest: './dist/scripts',
cwd: './app/scripts'
}, {
expand: true,
src: [ './**/*.html' ],
dest: './dist/templates',
cwd: './app/templates'
}]
}
},
hapi: {
custom_options: {
options: {
server: require('path').resolve('./server'),
bases: {
'/dist': require('path').resolve('./dist/')
}
}
}
},
clean: ['./dist']
});
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-hapi');
};
|
// Copyright 2018-2021 Polyaxon, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* Polyaxon SDKs and REST API specification.
* Polyaxon SDKs and REST API specification.
*
* The version of the OpenAPI document: 1.8.3
* Contact: contact@polyaxon.com
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*
*/
(function(root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD.
define(['expect.js', process.cwd()+'/src/index'], factory);
} else if (typeof module === 'object' && module.exports) {
// CommonJS-like environments that support module.exports, like Node.
factory(require('expect.js'), require(process.cwd()+'/src/index'));
} else {
// Browser globals (root is window)
factory(root.expect, root.PolyaxonSdk);
}
}(this, function(expect, PolyaxonSdk) {
'use strict';
var instance;
beforeEach(function() {
instance = new PolyaxonSdk.V1AuthType();
});
var getProperty = function(object, getter, property) {
// Use getter method if present; otherwise, get the property directly.
if (typeof object[getter] === 'function')
return object[getter]();
else
return object[property];
}
var setProperty = function(object, setter, property, value) {
// Use setter method if present; otherwise, set the property directly.
if (typeof object[setter] === 'function')
object[setter](value);
else
object[property] = value;
}
describe('V1AuthType', function() {
it('should create an instance of V1AuthType', function() {
// uncomment below and update the code to test V1AuthType
//var instane = new PolyaxonSdk.V1AuthType();
//expect(instance).to.be.a(PolyaxonSdk.V1AuthType);
});
it('should have the property user (base name: "user")', function() {
// uncomment below and update the code to test the property user
//var instane = new PolyaxonSdk.V1AuthType();
//expect(instance).to.be();
});
it('should have the property password (base name: "password")', function() {
// uncomment below and update the code to test the property password
//var instane = new PolyaxonSdk.V1AuthType();
//expect(instance).to.be();
});
});
}));
|
#!/usr/bin/python
"""Looks for sensor with ADDRESS1 and changes it's address to ADDRESS2 then changes it back to ADDRESS1"""
import minimalmodbus
import serial
from time import sleep
ADDRESS1 = 1
ADDRESS2 = 2
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=19200
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
sensor.debug=True
print(("reading address: ") + str(sensor.read_register(0, functioncode=3)))
print("writing new address: " + str(ADDRESS2))
sensor.write_register(0, value=ADDRESS2, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS2)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
print("writing new address: " + str(ADDRESS1))
sensor.write_register(0, value=ADDRESS1, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS1)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
|
import React, { Component } from 'react';
import styles from './styles/index.css';
class Bar extends Component {
render() {
const { editing, onToggle } = this.props;
const btnClass = editing ? styles.buttonActive : styles.button;
return (
<div className={styles.bar}>
<button className={btnClass} onClick={onToggle}>
{editing && 'Exit edition mode'}
{!editing && <i className='fa fa-pencil'></i>}
</button>
<div className={styles.name}>
averyverylargemailhere@company.com account of User with a very very large text
</div>
</div>
);
}
}
Bar.defaultProps = {
editing: false,
};
export default Bar;
|
const DrawCard = require('../../drawcard.js');
class StoneCrows extends DrawCard {
setupCardAbilities(ability) {
this.reaction({
when: {
afterChallenge: event => (
event.challenge.winner === this.controller &&
this.isAttacking() &&
event.challenge.defenders.length >= 1)
},
cost: ability.costs.discardGold(),
handler: context => {
let otherPlayer = context.event.challenge.loser;
this.game.promptForSelect(otherPlayer, {
cardCondition: card => (
card.isDefending() &&
card.getType() === 'character'),
activePromptTitle: 'Select character to kill',
source: this,
gameAction: 'kill',
onSelect: (player, card) => {
card.controller.killCharacter(card);
this.game.addMessage('{0} discards 1 gold from {1} to force {2} to kill {3}', this.controller, this, otherPlayer, card);
return true;
}
});
}
});
}
}
StoneCrows.code = '06009';
module.exports = StoneCrows;
|
$(document).ready(function () {
$(window).scroll(function () {
if (this.scrollY > 20) {
$(".navbar").addClass("sticky");
} else {
$(".navbar").removeClass("sticky");
}
});
$(".menu-toggler").click(function () {
$(this).toggleClass("active");
$(".navbar-menu").toggleClass("active");
});
});
|
/* =================================================================
* Template JS
*
* Template: Sepia - Photography Portfolio HTML Website Template
* Author: Themetorium
* URL: http://themetorium.net
*
================================================================= */
/* Table of Content
====================
# Page transitions / preloader
# Disable right click
# Smooth scrolling
# Header
# Main menu
# Page header
# Defer videos
# Isotope
# OWL Carousel
# lightGallery
# YTPlayer
# Add to favorite button
# Universal PHP Mail Feedback Script
# Fade out element with page scroll
# Parallax effect
# Remove input placeholder on focus
# Albums
# Single gallery
# Limit number of characters/words in element
# Footer
# Scroll to top button
# Miscellaneous
*/
(function ($) {
'use strict';
// ===============================================
// Page transitions / preloader (Animsition)
// More info: http://git.blivesta.com/animsition/
// ===============================================
$(".animsition").animsition({
inClass: 'fade-in',
outClass: 'fade-out',
inDuration: 800,
outDuration: 500,
// linkElement: '.animsition-link',
linkElement: 'a:not([target="_blank"]):not([href^="#"]):not([class*="lg-trigger"]):not([class*="sksw-btn"])', // e.g. linkElement: 'a:not([target="_blank"]):not([href^="#"])'
loading: true,
loadingParentElement: 'html', //animsition wrapper element
loadingClass: 'animsition-loading',
loadingInner: '', // e.g '<img src="assets/img/loading.svg" />'
timeout: true,
timeoutCountdown: 4000,
onLoadEvent: true,
browser: ['animation-duration', '-webkit-animation-duration', '-o-animation-duration'], // "browser" option allows you to disable the "animsition" in case the css property in the array is not supported by your browser. The default setting is to disable the "animsition" in a browser that does not support "animation-duration".
overlay : false,
overlayClass : 'animsition-overlay-slide',
overlayParentElement : 'html',
transition: function(url){ window.location.href = url; }
});
// ==========================================
// Disable right click (uncomment if needed)
// ==========================================
// $(document)[0].oncontextmenu = function() { return false; }
// $(document).mousedown(function(e) {
// if( e.button == 2 ) {
// alert('Sorry, this functionality is disabled!');
// return false;
// } else {
// return true;
// }
// });
// =========================================================================
// Smooth scrolling
// Note: requires Easing plugin - http://gsgd.co.uk/sandbox/jquery/easing/
// =========================================================================
$('.sm-scroll').on("click",function() {
if (location.pathname.replace(/^\//, '') === this.pathname.replace(/^\//, '') && location.hostname === this.hostname) {
var target = $(this.hash);
target = target.length ? target : $('[name=' + this.hash.slice(1) + ']');
if (target.length) {
$('html,body').animate({
scrollTop: target.offset().top
}, 1000, 'easeInOutExpo');
return false;
}
}
});
// ===================================================
// Header
// ===================================================
// if #header contains class "header-transparent" add class "header-transparent-on" to <pody>.
if ($('#header').hasClass('header-transparent')) {
$('body').addClass('header-transparent-on');
}
// Hide header on scroll down, show on scroll up
// More info: https://medium.com/@mariusc23/hide-header-on-scroll-down-show-on-scroll-up-67bbaae9a78c
// ===================================================
var didScroll;
var lastScrollTop = 0;
var delta = 120;
var navbarHeight = $('.header-show-hide-on-scroll').outerHeight();
$(window).scroll(function(event){
didScroll = true;
});
setInterval(function() {
if (didScroll) {
hasScrolled();
didScroll = false;
}
}, 50);
function hasScrolled() {
var st = $(window).scrollTop();
// Make sure they scroll more than delta
if(Math.abs(lastScrollTop - st) <= delta)
return;
// If they scrolled down and are past the header, add class .fly-up.
// This is necessary so you never see what is "behind" the header.
if (st > lastScrollTop && st > navbarHeight){
// Scroll Down
$('.header-show-hide-on-scroll').addClass('fly-up');
} else {
// Scroll Up
if(st + $(window).height() < $(document).height()) {
$('.header-show-hide-on-scroll').removeClass('fly-up');
}
}
lastScrollTop = st;
}
// Header Filled (cbpAnimatedHeader)
// More info: http://tympanus.net/codrops/2013/06/06/on-scroll-animated-header/
// ====================================
var cbpAnimatedHeader = (function() {
var docElem = document.documentElement,
header = document.querySelector( '#header' ),
didScroll = false,
changeHeaderOn = 1;
function init() {
window.addEventListener( 'scroll', function( event ) {
if( !didScroll ) {
didScroll = true;
setTimeout( scrollPage, 300 );
}
}, false );
}
function scrollPage() {
var sy = scrollY();
if ($(this).scrollTop() > 150){
$('#header.header-fixed-top, #header.header-show-hide-on-scroll').addClass("header-filled");
}
else{
$('#header.header-fixed-top, #header.header-show-hide-on-scroll').removeClass("header-filled");
}
didScroll = false;
}
function scrollY() {
return window.pageYOffset || docElem.scrollTop;
}
init();
})();
// Set padding-top to <body> if needed
// ====================================
$(window).resize(function() {
// Make <body> padding-top equal to "#header" height if "#header" contains one of these classes: "header-fixed-top", "header-show-hide-on-scroll".
if ($('#header').is('.header-fixed-top, .header-show-hide-on-scroll')) {
$('body').css( 'padding-top', $('#header').css('height'));
}
// Set "body" padding-top to "0" if "#header" contains class: "header-transparent".
if ($('#header').is('.header-transparent')) {
$('body').css('padding-top', 0);
}
}).resize();
// =======================
// Main menu
// =======================
// Add caret (little arrow down) if menu link contains dropdown
$('.tt-submenu-wrap > a').append('<span class="tt-caret"></span>');
// tt submenu (open submenu on hover)
$('.tt-submenu-wrap').on("mouseenter",function(){
$(this).addClass('tt-submenu-open');
}).on("mouseleave",function(){
$(this).removeClass('tt-submenu-open');
});
// Open/close mobile menu on toggle button click
// ==============================================
$('#tt-m-menu-toggle-btn').on('click',function() {
// Add class "tt-m-menu-open" to <body> if menu is open
$('body').toggleClass('tt-m-menu-open');
// Mobile menu collapse
$('.tt-menu-collapse').stop().slideToggle(300);
// Close all dropdowns on mobile menu toggle button click
$('.tt-submenu').slideUp(300);
// Remove class "tt-m-dropdown-open" on dropdown toggle button click
$('.tt-m-submenu-toggle').removeClass('tt-m-dropdown-open');
});
// Mobile submenu toggle button
// =============================
$('.tt-submenu-wrap').append('<div class="tt-m-submenu-toggle"></div>');
// Open/close mobile dropdown on dropdown toggle button click
$('.tt-m-submenu-toggle').on('click', function() {
$(this).toggleClass('tt-m-dropdown-open');
$(this).prev('ul').stop().slideToggle(300);
});
// Keeping sub-menus inside screen (useful if multi level sub-menus are used). Effect on large screens only!
// More info: http://stackoverflow.com/questions/17985334/jquery-solution-for-keeping-dropdown-dropdown-inside-screen
// ===========================
if ($(window).width() > 768) {
// If "<body>" contains class "tt-rtl".
if ($('body').hasClass('tt-rtl')) {
$('.tt-submenu-master .tt-submenu-wrap > a').parent().hover(function() {
var menu = $('> .tt-submenu',this);
var menupos = $(menu).offset();
if (menupos.left < 0 ) {
var newpos = menupos.left + $(menu).width() * 2;
menu.css({ left: $(menu).width() });
menu.css({ right: 'auto' });
}
});
} else {
$('.tt-submenu-master .tt-submenu-wrap > a').parent().hover(function() {
var menu = $('> .tt-submenu',this);
var menupos = $(menu).offset();
if (menupos.left + menu.width() > $(window).width()) {
var newpos = -$(menu).width();
menu.css({ left: newpos });
}
});
}
}
// Menu tools
// ===========
// tt dropdown (open dropdown on hover)
$('.tt-dropdown-wrap').on("mouseenter",function(){
$(this).addClass('tt-dropdown-open');
}).on("mouseleave",function(){
$(this).removeClass('tt-dropdown-open');
});
// tt clobal search toggle
$('.tt-clobal-search-trigger').on('click', function() {
$('.tt-clobal-search').fadeIn(300, function() {
$(this).addClass('search-open');
});
return false;
});
$('.tt-clobal-search-close').on('click', function() {
$('.tt-clobal-search').fadeOut(300, function() {
$(this).removeClass('search-open');
});
});
// ===============================
// Page header
// ===============================
// if #page-header exist add class "page-header-on" to <body>.
if ($('#page-header').length) {
$('body').addClass('page-header-on');
}
// if page header contains background image add class "ph-image-on" to #page-header.
if ($('.page-header-image').length) {
$('#page-header').addClass('ph-image-on');
}
// if class "hide-ph-image" exist remove class "ph-image-on".
if ($('.page-header-image').hasClass('hide-ph-image')) {
$('#page-header').removeClass('ph-image-on');
}
// =======================================================================================
// Defer videos (Youtube, Vimeo)
// Note: When you have embed videos in your webpages it causes your page to load slower.
// Deffering will allow your page to load quickly.
// Source: https://www.feedthebot.com/pagespeed/defer-videos.html
// =======================================================================================
function init() {
var vidDefer = document.getElementsByTagName('iframe');
for (var i=0; i<vidDefer.length; i++) {
if(vidDefer[i].getAttribute('data-src')) {
vidDefer[i].setAttribute('src',vidDefer[i].getAttribute('data-src'));
} } }
window.onload = init;
// ===================================================================================
// Isotope
// Source: http://isotope.metafizzy.co
// Note: "imagesloaded" blugin is required: https://github.com/desandro/imagesloaded
// ===================================================================================
// init Isotope
var $container = $('.isotope-items-wrap');
// if RTL enabled
var origLeft = true;
if( $('body').hasClass('tt-rtl')) {
origLeft = false;
}
$container.imagesLoaded(function() {
$container.isotope({
itemSelector: '.isotope-item',
transitionDuration: '0.7s',
originLeft: origLeft,
masonry: {
columnWidth: '.grid-sizer',
horizontalOrder: false
}
});
});
// Filter
$('.isotope-filter-links a').on("click",function(){
var selector = $(this).attr('data-filter');
$container.isotope({
filter: selector
});
return false;
});
// Filter item active
var filterItemActive = $('.isotope-filter-links a');
filterItemActive.on('click', function(){
var $this = $(this);
if ( !$this.hasClass('active')) {
filterItemActive.removeClass('active');
$this.addClass('active');
}
});
// If "isotope-top-content" exist add class ".iso-top-content-on" to <body>.
if ($('.isotope-top-content').length) {
$('body').addClass('iso-top-content-on');
}
// If ".isotope-filter" contains class "fi-to-button" add class "fi-to-button-on" to ".isotope-top-content".
if ($('.isotope-filter').hasClass('fi-to-button')) {
$('.isotope-top-content').addClass('fi-to-button-on');
}
// If ".isotope-filter" contains class "fi-to-button" add class "fi-to-button-on" to ".isotope-top-content".
if ($('.gallery-share').length) {
$('.isotope-top-content').addClass('gallery-share-on');
}
// Filter button clickable/hover (clickable on small screens)
if ( $(window).width() < 992) {
// Filter button clickable (effect on small screens)
$('.isotope-filter-button').on("click",function(){
$('.isotope-filter').toggleClass('iso-filter-open');
});
// Close filter button if click on filter links (effect only on small screens)
$('ul.isotope-filter-links > li > a').on("click",function() {
$(".isotope-filter-button").click();
});
} else {
// Filter button on hover
$('.isotope-filter').on("mouseenter",function(){
$('.isotope-filter').addClass('iso-filter-open');
}).on("mouseleave",function(){
$('.isotope-filter').removeClass('iso-filter-open');
});
}
// if class "isotope" exist.
if ($('.isotope').length){
// add overflow scroll to <html> (isotope items gaps fix).
if ( document.querySelector('body').offsetHeight > window.innerHeight ) {
document.documentElement.style.overflowY = 'scroll';
}
// Add class "isotope-on" to <body>.
$('body').addClass('isotope-on');
}
// Add class "iso-gutter-*-on" to <body> if ".isotope" contains class "gutter-*".
if ($('.isotope').hasClass('gutter-1')) {
$('body').addClass('iso-gutter-1-on');
}
if ($('.isotope').hasClass('gutter-2')) {
$('body').addClass('iso-gutter-2-on');
}
if ($('.isotope').hasClass('gutter-3')) {
$('body').addClass('iso-gutter-3-on');
}
// Add class "iso-tt-wrap-on" to <body> if ".isotope-wrap" contains class "tt-wrap".
if ($('.isotope-wrap').hasClass('tt-wrap')) {
$('body').addClass('iso-tt-wrap-on');
}
// =====================================================
// OWL Carousel
// Source: http://owlcarousel2.github.io/OwlCarousel2/
// =====================================================
$(window).on('load', function() { // fixes Owl Carousel "autoWidth: true" issue (https://github.com/OwlCarousel2/OwlCarousel2/issues/1139).
$('.owl-carousel').each(function(){
var $carousel = $(this);
$carousel.owlCarousel({
items: $carousel.data("items"),
loop: $carousel.data("loop"),
margin: $carousel.data("margin"),
center: $carousel.data("center"),
startPosition: $carousel.data("start-position"),
animateIn: $carousel.data("animate-in"),
animateOut: $carousel.data("animate-out"),
autoWidth: $carousel.data("autowidth"),
autoHeight: $carousel.data("autoheight"),
autoplay: $carousel.data("autoplay"),
autoplayTimeout: $carousel.data("autoplay-timeout"),
autoplayHoverPause: $carousel.data("autoplay-hover-pause"),
autoplaySpeed: $carousel.data("autoplay-speed"),
nav: $carousel.data("nav"),
navText: ['', ''],
navSpeed: $carousel.data("nav-speed"),
dots: $carousel.data("dots"),
dotsSpeed: $carousel.data("dots-speed"),
mouseDrag: $carousel.data("mouse-drag"),
touchDrag: $carousel.data("touch-drag"),
dragEndSpeed: $carousel.data("drag-end-speed"),
lazyLoad: $carousel.data("lazy-load"),
video: true,
responsive: {
0: {
items: $carousel.data("mobile-portrait"),
center: false,
},
480: {
items: $carousel.data("mobile-landscape"),
center: false,
},
768: {
items: $carousel.data("tablet-portrait"),
center: false,
},
992: {
items: $carousel.data("tablet-landscape"),
},
1200: {
items: $carousel.data("items"),
}
}
});
});
// Mousewheel plugin
var owlMouse = $('.owl-mousewheel');
owlMouse.on('mousewheel', '.owl-stage', function (e) {
if (e.deltaY > 0) {
owlMouse.trigger('prev.owl', [800]);
} else {
owlMouse.trigger('next.owl', [800]);
}
e.preventDefault();
});
// Keyboard (prev/next arrow) events for navigating
// https://github.com/OwlCarousel2/OwlCarousel2/issues/492#issuecomment-55629470
var owlKeyboard = $('.owl-carousel');
$(document).keyup(function(i){
if(i.keyCode==37) {
owlKeyboard.trigger('prev.owl', [800]);
} else if (i.keyCode==39) {
owlKeyboard.trigger('next.owl', [800]);
}
});
});
// CC item hover
$('.cc-item').on('mouseenter',function() {
$('.owl-carousel').addClass('cc-item-hovered');
});
$('.cc-item').on('mouseleave',function() {
$('.owl-carousel').removeClass('cc-item-hovered');
});
// If ".cc-caption" exist add class "cc-caption-on" to ".cc-item".
$('.cc-item').each(function() {
if ($(this).find('.cc-caption').length) {
$(this).addClass('cc-caption-on');
}
});
// =====================================================
// lightGallery (lightbox plugin)
// Source: http://sachinchoolur.github.io/lightGallery
// =====================================================
$(".lightgallery").lightGallery({
// Please read about gallery options here: http://sachinchoolur.github.io/lightGallery/docs/api.html
// lightgallery core
selector: '.lg-trigger',
mode: 'lg-fade', // Type of transition between images ('lg-fade' or 'lg-slide').
height: '100%', // Height of the gallery (ex: '100%' or '300px').
width: '100%', // Width of the gallery (ex: '100%' or '300px').
iframeMaxWidth: '100%', // Set maximum width for iframe.
loop: true, // If false, will disable the ability to loop back to the beginning of the gallery when on the last element.
speed: 600, // Transition duration (in ms).
closable: true, // Allows clicks on dimmer to close gallery.
escKey: true, // Whether the LightGallery could be closed by pressing the "Esc" key.
keyPress: true, // Enable keyboard navigation.
hideBarsDelay: 5000, // Delay for hiding gallery controls (in ms).
controls: true, // If false, prev/next buttons will not be displayed.
mousewheel: true, // Chane slide on mousewheel.
download: false, // Enable download button. By default download url will be taken from data-src/href attribute but it supports only for modern browsers. If you want you can provide another url for download via data-download-url.
counter: true, // Whether to show total number of images and index number of currently displayed image.
swipeThreshold: 50, // By setting the swipeThreshold (in px) you can set how far the user must swipe for the next/prev image.
enableDrag: true, // Enables desktop mouse drag support.
enableTouch: true, // Enables touch support.
// thumbnial plugin
thumbnail: true, // Enable thumbnails for the gallery.
showThumbByDefault: false, // Show/hide thumbnails by default.
thumbMargin: 5, // Spacing between each thumbnails.
toogleThumb: true, // Whether to display thumbnail toggle button.
enableThumbSwipe: true, // Enables thumbnail touch/swipe support for touch devices.
exThumbImage: 'data-exthumbnail', // If you want to use external image for thumbnail, add the path of that image inside "data-" attribute and set value of this option to the name of your custom attribute.
// autoplay plugin
autoplay: false, // Enable gallery autoplay.
autoplayControls: true, // Show/hide autoplay controls.
pause: 6000, // The time (in ms) between each auto transition.
progressBar: true, // Enable autoplay progress bar.
fourceAutoplay: false, // If false autoplay will be stopped after first user action
// fullScreen plugin
fullScreen: true, // Enable/Disable fullscreen mode.
// zoom plugin
zoom: true, // Enable/Disable zoom option.
scale: 0.5, // Value of zoom should be incremented/decremented.
enableZoomAfter: 50, // Some css styles will be added to the images if zoom is enabled. So it might conflict if you add some custom styles to the images such as the initial transition while opening the gallery. So you can delay adding zoom related styles to the images by changing the value of enableZoomAfter.
// video options
videoMaxWidth: '1000px', // Set limit for video maximal width.
// Youtube video options
loadYoutubeThumbnail: true, // You can automatically load thumbnails for youtube videos from youtube by setting loadYoutubeThumbnail true.
youtubeThumbSize: 'default', // You can specify the thumbnail size by setting respective number: 0, 1, 2, 3, 'hqdefault', 'mqdefault', 'default', 'sddefault', 'maxresdefault'.
youtubePlayerParams: { // Change youtube player parameters: https://developers.google.com/youtube/player_parameters
modestbranding: 0,
showinfo: 1,
controls: 1
},
// Vimeo video options
loadVimeoThumbnail: true, // You can automatically load thumbnails for vimeo videos from vimeo by setting loadYoutubeThumbnail true.
vimeoThumbSize: 'thumbnail_medium', // Thumbnail size for vimeo videos: 'thumbnail_large' or 'thumbnail_medium' or 'thumbnail_small'.
vimeoPlayerParams: { // Change vimeo player parameters: https://developer.vimeo.com/player/embedding#universal-parameters
byline : 1,
portrait : 1,
title: 1,
color : 'CCCCCC',
autopause: 1
},
// hash plugin (unique url for each slides)
hash: true, // Enable/Disable hash plugin.
hgalleryId: 1, // Unique id for each gallery. It is mandatory when you use hash plugin for multiple galleries on the same page.
// share plugin
share: false, // Enable/Disable share plugin.
facebook: true, // Enable Facebook share.
facebookDropdownText: 'Facebook', // Facebok dropdown text.
twitter: true, // Enable Twitter share.
twitterDropdownText: 'Twitter', // Twitter dropdown text.
googlePlus: true, // Enable Google Plus share.
googlePlusDropdownText: 'Google+', // Google Plus dropdown text.
pinterest: true, // Enable Pinterest share.
pinterestDropdownText: 'Pinterest' // Pinterest dropdown text.
});
// =======================================================
// YTPlayer (Background Youtube video)
// Source: https://github.com/pupunzi/jquery.mb.YTPlayer
// =======================================================
// Disabled on mobile devices, because video background doesn't work on mobile devices (instead the background image is displayed).
if (!jQuery.browser.mobile) {
$(".youtube-bg").mb_YTPlayer();
}
// ==============================================================================
// Add to favorite button
// Source: http://www.webdesigncrowd.com/demo/circle-reveal-animation-12.23.13/
// ==============================================================================
$(".fav-count").on("click",function() {
var total = parseInt($(this).html(), 10);
if ($(this).parent().hasClass("active")) {
total -= 1;
} else {
total += 1;
}
$(this).html(total);
$(this).parent().toggleClass("active");
});
$(".icon-heart").on("click",function() {
var total = parseInt($(this).parent().siblings(".fav-count").first().html(), 10);
if ($(this).parent().parent().hasClass("active")) {
total -= 1;
} else {
total += 1;
}
$(this).parent().siblings(".fav-count").first().html(total);
$(this).parent().parent().toggleClass("active");
});
// ===============================================
// Universal PHP Mail Feedback Script
// Source: https://github.com/agragregra/uniMail
// ===============================================
// E-mail Ajax Send
$("#contact-form").submit(function() { // Change (your contact form ID)
var th = $(this);
$.ajax({
type: "POST",
url: "mail.php", // Change (mail.php path)
data: th.serialize()
}).done(function() {
alert("Thank you. Your message has been sent!");
setTimeout(function() {
// Done Functions
th.trigger("reset");
}, 1000);
});
return false;
});
// ==================================
// Fade out element with page scroll
// ==================================
$(window).scroll(function(){
$(".fade-out-scroll-1").css("opacity", 1 - $(window).scrollTop() / 150);
$(".fade-out-scroll-2").css("opacity", 1 - $(window).scrollTop() / 250);
$(".fade-out-scroll-3").css("opacity", 1 - $(window).scrollTop() / 350);
$(".fade-out-scroll-4").css("opacity", 1 - $(window).scrollTop() / 450);
$(".fade-out-scroll-5").css("opacity", 1 - $(window).scrollTop() / 550);
$(".fade-out-scroll-6").css("opacity", 1 - $(window).scrollTop() / 650);
$(".fade-out-scroll-7").css("opacity", 1 - $(window).scrollTop() / 750);
$(".fade-out-scroll-8").css("opacity", 1 - $(window).scrollTop() / 850);
});
// ========================
// Parallax effect
// ========================
$(window).scroll(function(){
var plxbgScroll = $(this).scrollTop();
// parallax - image background position
$('.parallax-bg-1').css('background-position','center '+ ((plxbgScroll * 0.1)) +'px');
$('.parallax-bg-2').css('background-position','center '+ ((plxbgScroll * 0.2)) +'px');
$('.parallax-bg-3').css('background-position','center '+ ((plxbgScroll * 0.3)) +'px');
$('.parallax-bg-4').css('background-position','center '+ ((plxbgScroll * 0.4)) +'px');
$('.parallax-bg-5').css('background-position','center '+ ((plxbgScroll * 0.5)) +'px');
$('.parallax-bg-6').css('background-position','center '+ ((plxbgScroll * 0.6)) +'px');
});
$(window).scroll(function(){
var plxScroll = $(this).scrollTop();
// parallax - transform
$('.parallax-1').css('transform', 'translate3d(0, '+ ((plxScroll * 0.1)) +'px, 0)');
$('.parallax-2').css('transform', 'translate3d(0, '+ ((plxScroll * 0.2)) +'px, 0)');
$('.parallax-3').css('transform', 'translate3d(0, '+ ((plxScroll * 0.3)) +'px, 0)');
$('.parallax-4').css('transform', 'translate3d(0, '+ ((plxScroll * 0.4)) +'px, 0)');
$('.parallax-5').css('transform', 'translate3d(0, '+ ((plxScroll * 0.5)) +'px, 0)');
$('.parallax-6').css('transform', 'translate3d(0, '+ ((plxScroll * 0.6)) +'px, 0)');
});
// ==================================
// Remove input placeholder on focus
// ==================================
$('input,textarea').focus(function () {
$(this).data('placeholder', $(this).attr('placeholder'))
.attr('placeholder', '');
}).blur(function () {
$(this).attr('placeholder', $(this).data('placeholder'));
});
// ==================================
// Albums
// ==================================
// Rotate thumb-list items randomly (in gallery-list-carousel)
$(".thumb-list.tl-rotate > li").each( function() {
var rNum = (Math.random()*50)-25;
$(this).css( {
'-webkit-transform': 'rotate('+rNum+'2deg)',
'-moz-transform': 'rotate('+rNum+'2deg)'
});
});
// ==================================
// Single gallery
// ==================================
// Gallery single carousel
// ========================
// Make carousel info same width as ".gs-carousel-wrap" on small devices
$(window).resize(function() {
if ($(window).width() < 768) {
var gscwWidth = $('.gs-carousel-wrap').width();
$('.gs-carousel-info').css({
'width': gscwWidth
});
} else {
$('.gs-carousel-info').css({
'width': 440
});
}
}).resize();
// ============================================
// Limit number of characters/words in element
// ============================================
// Limit number of characters in element (example: data-max-characters="120")
$("div, p, a").each(function() {
var textMaxChar = $(this).attr('data-max-characters');
length = $(this).text().length;
if(length > textMaxChar) {
$(this).text($(this).text().substr(0, textMaxChar)+'...');
}
});
// Limit number of words in element (example: data-max-words="40")
$("div, p, a").each(function() {
var textMaxWords = $(this).attr('data-max-words');
var text = $(this).text();
length = text.split(' ').length;
if(length > textMaxWords) {
var lastWord = text.split(' ')[textMaxWords];
var lastWordIndex = text.indexOf(lastWord);
$(this).text(text.substr(0, lastWordIndex) + '...');
}
});
// ======================
// Footer
// ======================
// If "#footer" contains class "footer-minimal" add class "footer-minimal-on" to <body>.
if ($('#footer').hasClass('footer-minimal')) {
$('body').addClass('footer-minimal-on');
}
// ======================
// Scroll to top button
// ======================
// Check to see if the window is top if not then display button
$(window).scroll(function(){
if ($(this).scrollTop() > 500) {
$('.scrolltotop').fadeIn();
} else {
$('.scrolltotop').fadeOut();
}
});
// ===============
// Miscellaneous
// ===============
// Bootstrap-3 modal fix
$('.modal').appendTo("body")
// Bootstrap tooltip
$('[data-toggle="tooltip"]').tooltip()
// Bootstrap popover
$('[data-toggle="popover"]').popover({
html: true
});
// Hover fix for iOS
$('*').on('touchstart',function() {
$(this).trigger('hover') ;
}).on('touchend',function() {
$(this).trigger('hover') ;
});
})(jQuery);
|
//// [tests/cases/conformance/parser/ecmascript5/ArrowFunctionExpressions/parserArrowFunctionExpression10.ts] ////
//// [fileJs.js]
a ? (b) : c => (d) : e => f // Not legal JS; "Unexpected token ':'" at last colon
//// [fileTs.ts]
a ? (b) : c => (d) : e => f
//// [fileJs.js]
a ? function (b) { return (d); } : function (e) { return f; }; // Not legal JS; "Unexpected token ':'" at last colon
//// [fileTs.js]
a ? function (b) { return (d); } : function (e) { return f; };
|
import Component from '@glimmer/component';
import { action } from '@ember/object';
export default class BlockChangesPropertyComponent extends Component {
@action
onValue(_, [ value ]) {
this.args.onPropertyChange(this.args.key, value);
}
}
|
{"mlist":[],"rlist":{},"page":{"page":1,"count":0,"size":10,"type":0,"id":5488}}
|
"""
Copyright (C) king.com Ltd 2019
https://github.com/king/s3vdc
License: MIT, https://raw.github.com/king/s3vdc/LICENSE.md
"""
from lib.data_utils import DataUtils
from lib.traindev_config_parser import TrainDevConfigParser
from lib.tf_feature_columns import TfFeatureColumns
from lib.data_source import DataSource
from lib.trainer import Trainer
from lib.flag_parser import FlagParser
from lib.file_logger import FileLogger
import shutil
import tensorflow as tf
import os
import task_inertial_har
_this_root = os.path.join(os.path.dirname(task_inertial_har.__file__), "../")
shutil.copyfile(
os.path.join(_this_root, "task_inertial_har/s3vdc.py"),
os.path.join(_this_root, "lib/user_custom_model.py"),
)
from lib.resolve_model_type import resolve_model_type
model_dir = "model"
date_time_str = None
parsed_args = FlagParser.parse_args()
if parsed_args:
date_time_str = parsed_args.datetime
else:
raise RuntimeError(
"missing datetime or pred-data string for training/evaluation job"
)
if parsed_args.job_dir:
model_dir = parsed_args.job_dir
FileLogger("TRAIN_EVAL", model_dir)
data_utils = DataUtils()
conf_parser = TrainDevConfigParser(
data_utils.get_file_contents(
os.path.join(os.path.dirname(task_inertial_har.__file__), "config.json")
)
)
tf_feature_cols = TfFeatureColumns(conf_parser)
ds_train = DataSource(
_tf_feature_cols=tf_feature_cols, _ds_class="train", _dsutil=data_utils
)
# train data_sources can not be empty
if ds_train.is_empty:
raise RuntimeError("training dataset can not be empty!")
# evaluation dataset can be empty, yet a warning will be given
ds_eval = DataSource(
_tf_feature_cols=tf_feature_cols, _ds_class="eval", _dsutil=data_utils
)
if ds_eval.is_empty:
ds_eval = DataSource(
_tf_feature_cols=tf_feature_cols,
_ds_class="train",
_dsutil=data_utils,
force_test=True,
)
tf.logging.warn(
"evaluation dataset is empty; will use training dataset for evaluation instead!"
)
_handle = Trainer(
model_fn=resolve_model_type(conf_parser.model_type),
train_data_source=ds_train,
eval_data_source=ds_eval,
hyper_params=conf_parser.model_params,
model_dir=model_dir,
date_time_str=date_time_str,
)
_result = _handle.run()
tf.logging.info(_result)
|
const { Right, Left, fold } = require('./either');
test('TODO: Add either lib tests', () => {
void Right;
void Left;
void fold;
expect(1).toBe(1);
});
|
"use strict";
// This icon file is generated automatically.
Object.defineProperty(exports, "__esModule", { value: true });
var SyncOutlined = { "icon": { "tag": "svg", "attrs": { "viewBox": "64 64 896 896", "focusable": "false" }, "children": [{ "tag": "path", "attrs": { "d": "M168 504.2c1-43.7 10-86.1 26.9-126 17.3-41 42.1-77.7 73.7-109.4S337 212.3 378 195c42.4-17.9 87.4-27 133.9-27s91.5 9.1 133.8 27A341.5 341.5 0 01755 268.8c9.9 9.9 19.2 20.4 27.8 31.4l-60.2 47a8 8 0 003 14.1l175.7 43c5 1.2 9.9-2.6 9.9-7.7l.8-180.9c0-6.7-7.7-10.5-12.9-6.3l-56.4 44.1C765.8 155.1 646.2 92 511.8 92 282.7 92 96.3 275.6 92 503.8a8 8 0 008 8.2h60c4.4 0 7.9-3.5 8-7.8zm756 7.8h-60c-4.4 0-7.9 3.5-8 7.8-1 43.7-10 86.1-26.9 126-17.3 41-42.1 77.8-73.7 109.4A342.45 342.45 0 01512.1 856a342.24 342.24 0 01-243.2-100.8c-9.9-9.9-19.2-20.4-27.8-31.4l60.2-47a8 8 0 00-3-14.1l-175.7-43c-5-1.2-9.9 2.6-9.9 7.7l-.7 181c0 6.7 7.7 10.5 12.9 6.3l56.4-44.1C258.2 868.9 377.8 932 512.2 932c229.2 0 415.5-183.7 419.8-411.8a8 8 0 00-8-8.2z" } }] }, "name": "sync", "theme": "outlined" };
exports.default = SyncOutlined;
|
(window["webpackJsonphelander-spa-test"]=window["webpackJsonphelander-spa-test"]||[]).push([[0],{66:function(e,t,a){e.exports=a(83)},71:function(e,t,a){},72:function(e,t,a){},83:function(e,t,a){"use strict";a.r(t);var n=a(0),o=a.n(n),r=a(10),i=a.n(r),l=(a(71),a(72),a(24)),s=a(115),c=a(119),m=a(120),u=a(122),d=a(118),p=a(121),h=a(126),g=a(127),y=a(40),b=a.n(y),f=Object(s.a)((function(e){return{root:{flexGrow:1},container:{display:"flex"},paper:{padding:e.spacing(2),textAlign:"center",color:e.palette.text.secondary,flex:"1 0 auto",margin:e.spacing(1)}}}));var w=Object(g.a)()((function(e){var t=f();return e.width,o.a.createElement("div",{className:t.root},o.a.createElement("div",{className:t.container},o.a.createElement(h.a,{smUp:!0},o.a.createElement(b.a,null))))})),v=a(23),E=a(125);function x(){return{top:"".concat(50,"%"),left:"".concat(50,"%"),transform:"translate(-".concat(50,"%, -").concat(50,"%)")}}var O=Object(s.a)((function(e){return{paper:{position:"absolute",width:"400",backgroundColor:e.palette.background.paper,border:"2px solid #000",boxShadow:e.shadows[5],padding:e.spacing(2,4,4)},button:{backgroundColor:"brown",color:"white",fontFamily:"open sans"},menuButton:{marginRight:e.spacing(2),color:"black",fontFamily:"open sans"}}}));function N(){var e=O(),t=o.a.useState(x),a=Object(v.a)(t,1)[0],n=o.a.useState(!1),r=Object(v.a)(n,2),i=r[0],l=r[1];return o.a.createElement("div",null,o.a.createElement(d.a,{className:e.menuButton,color:"inherit",onClick:function(){l(!0)}},"Contact"),o.a.createElement(E.a,{"aria-labelledby":"simple-modal-title","aria-describedby":"simple-modal-description",open:i,onClose:function(){l(!1)}},o.a.createElement("div",{style:a,className:e.paper},o.a.createElement("h2",{id:"simple-modal-title"},"Text in a modal"),o.a.createElement("p",{id:"simple-modal-description"},"Duis mollis, est non commodo luctus, nisi erat porttitor ligula."))))}var j=a(34),k=Object(s.a)((function(e){return{root:{flexGrow:1},menuButton:{marginRight:e.spacing(2),color:"black",fontFamily:"open sans"},title:{flexGrow:1,color:"brown",fontFamily:"open sans"},appBar:{backgroundColor:"white"},link:{textDecoration:"underline"},span:{}}}));function B(){var e=k();return o.a.createElement("div",{className:e.root},o.a.createElement(c.a,{position:"static",className:e.appBar},o.a.createElement(m.a,null,o.a.createElement(p.a,{edge:"start",className:e.menuButton,color:"inherit","aria-label":"menu"},o.a.createElement(w,null)),o.a.createElement(u.a,{variant:"h6",className:e.title},o.a.createElement("span",{className:e.span}," Helander")," School of Music"),o.a.createElement(h.a,{xsDown:!0},o.a.createElement(j.a,{to:"/"},o.a.createElement(d.a,{className:e.menuButton,color:"inherit"}," Home "))),o.a.createElement(h.a,{xsDown:!0},o.a.createElement(j.a,{to:"/about"}," ",o.a.createElement(d.a,{className:e.menuButton,color:"inherit"},"About"))),o.a.createElement(h.a,{xsDown:!0},o.a.createElement(j.a,{to:"/testimonials"}," ",o.a.createElement(d.a,{className:e.menuButton,color:"inherit"},"Testimonials"))),o.a.createElement(h.a,{xsDown:!0},o.a.createElement(j.a,{to:"/ratesAndPolicies"}," ",o.a.createElement(d.a,{className:e.menuButton,color:"inherit"},"Rates and Policies"))),o.a.createElement(h.a,{xsDown:!0},o.a.createElement(N,null)))))}var A=a(8),C=(a(4),a(123));function F(e,t){var a=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),a.push.apply(a,n)}return a}var J=Object(s.a)((function(e){return{container:{display:"flex",flexWrap:"wrap"},textField:{marginLeft:e.spacing(1),marginRight:e.spacing(1),width:200},dense:{marginTop:19},menu:{width:200}}}));function H(){var e,t=J(),a=o.a.useState({name:"",age:"",multiline:"Controlled",currency:"EUR"}),n=Object(v.a)(a,2),r=n[0],i=n[1];return o.a.createElement("form",{className:t.container,noValidate:!0,autoComplete:"off"},o.a.createElement(C.a,{id:"standard-name",label:"your email",className:t.textField,value:r.name,onChange:(e="name",function(t){i(function(e){for(var t=1;t<arguments.length;t++){var a=null!=arguments[t]?arguments[t]:{};t%2?F(a,!0).forEach((function(t){Object(A.a)(e,t,a[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(a)):F(a).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(a,t))}))}return e}({},r,Object(A.a)({},e,t.target.value)))}),margin:"normal"}))}function T(){return{top:"".concat(50,"%"),left:"".concat(50,"%"),transform:"translate(-".concat(50,"%, -").concat(50,"%)")}}var P=Object(s.a)((function(e){return{paper:{position:"absolute",width:400,backgroundColor:e.palette.background.paper,border:"2px solid #000",boxShadow:e.shadows[5],padding:e.spacing(2,4,4)},button:{backgroundColor:"brown",color:"white",fontFamily:"open sans"}}}));function S(){var e=P(),t=o.a.useState(T),a=Object(v.a)(t,1)[0],n=o.a.useState(!1),r=Object(v.a)(n,2),i=r[0],l=r[1];return o.a.createElement("div",null,o.a.createElement(d.a,{variant:"contained",size:"large",className:e.button,onClick:function(){l(!0)}},"Contact"),o.a.createElement(E.a,{"aria-labelledby":"simple-modal-title","aria-describedby":"simple-modal-description",open:i,onClose:function(){l(!1)}},o.a.createElement("div",{style:a,className:e.paper},o.a.createElement("h2",{id:"simple-modal-title"},"Send me a message!"),o.a.createElement(H,null),o.a.createElement("p",{id:"simple-modal-description"},"Duis mollis, est non commodo luctus, nisi erat porttitor ligula."))))}a(124);var D=Object(s.a)({root:{width:"100%",maxWidth:500,textAlign:"left",margin:"auto",marginTop:"40px",marginBottom:"20px"},typography:{fontFamily:"open sans"}});function W(){var e=D();return o.a.createElement("div",{className:e.root},o.a.createElement(u.a,{className:e.typography,variant:"body2",gutterBottom:!0},"fb.com/JoelHelanderMusic | Copyright \xa9 2019 Joel Helander"))}Object(s.a)({root:{width:"100%",height:"20px"},typography:{},title:{},box:{}});var M=Object(s.a)({root:{width:"100%",maxWidth:700,margin:"auto",marginTop:"40px",textAlign:"center"},typographyTop:{fontFamily:"open sans",textAlign:"center"},typographyOther:{color:"black",fontFamily:"open sans",textAlign:"center",marginBottom:"100px"},button:{backgroundColor:"brown",color:"white",fontFamily:"open sans",alignText:"left"}});function z(){var e=M();return o.a.createElement("div",{className:e.root},o.a.createElement(u.a,{className:e.typographyTop,variant:"h1",gutterBottom:!0},"Hello"),o.a.createElement(u.a,{className:e.typographyOther,variant:"h2",gutterBottom:!0},"I am good at ping pong and can teach you piano"),o.a.createElement(S,null))}var R=Object(s.a)({root:{width:"100%",maxWidth:500,textAlign:"left",margin:"auto",marginTop:"40px"},typography:{fontFamily:"open sans"},title:{color:"brown",fontFamily:"open sans"}});function I(){var e,t=R();return o.a.createElement("div",{className:t.root},o.a.createElement(u.a,(e={className:t.typography},Object(A.a)(e,"className",t.title),Object(A.a)(e,"variant","h3"),Object(A.a)(e,"gutterBottom",!0),e),"About"),o.a.createElement(u.a,{className:t.typography,variant:"body1",gutterBottom:!0},"Joel Helander holds a bachelor's degree in music from Clark University and has trained as a pianist across the jazz, pop, and classical styles for over thirteen years. He is a self-taught guitarist and ukulele player of ten years, a songwriter, and a composer. Over the past five years, Joel has taught music lessons, classes, and ensembles at Worcester Music Academy, Easthampton Music Conservatory, and the Academy at Charlemont. Joel's first two albums Flood and Passing Frames have been featured on Forbes Online and currently reside in the top ten artist-recommended classical albums on bandcamp.com. More recently, he played keyboards as a session musician on Jake Klar's Until the Wildfire Becomes Paradise and continues to perform in Klar's touring band."),o.a.createElement(u.a,{className:t.typography,variant:"body1",gutterBottom:!0},"As a teacher, Joel emphasizes the crucial balance between patient preparation and free improvisation. From day one, he strives to identify what specifically draws his students to music, and what rudiments they will need to learn in order to meet their goals. This approach combined with an equal focus on in-the-moment creative expression helps the student to see music not as a test or a stressor, but as a lifelong companion and expressive outlet. Whether you hope to learn jazz, pop, or classical, to work on ear training, sight-reading, improvisation, or theory, Joel is excited to share his knowledge with you in a fun and relaxed lesson environment."),o.a.createElement(W,null))}var K=Object(s.a)({root:{width:"100%",maxWidth:500,textAlign:"left",margin:"auto",marginTop:"40px"},typography:{fontFamily:"open sans"},title:{fontFamily:"open sans",color:"brown"}});function U(){var e,t=K();return o.a.createElement("div",{className:t.root},o.a.createElement(u.a,(e={className:t.typography},Object(A.a)(e,"className",t.title),Object(A.a)(e,"variant","h3"),Object(A.a)(e,"gutterBottom",!0),e),"Testimonials"),o.a.createElement(u.a,{className:t.typography,variant:"body1",gutterBottom:!0},'"My daughter began taking guitar lessons with Joel at age nine. Joel seemed to work magic, in my eyes, instilling a love of music in my daughter that has only grown over time. All that she learned from him has helped her to excel not only in music, but also socially and academically. Our time with Joel was an invaluable part of my daughter\'s childhood that was filled with inspiration, creativity, and the blooming of her soul." ~Rebecca, mother of guitar student Emma'),o.a.createElement(u.a,{className:t.typography,variant:"body1",gutterBottom:!0},'"Joel is fun and easygoing. He\'s always willing to follow my interests and knows when to insert exercises or information to improve technique. He provide a solid base for me to explore from and reliably leads me to the next step in my study." ~ Kathryn, adult piano student'),o.a.createElement(u.a,{className:t.typography,variant:"body1",gutterBottom:!0},'"Joel was the only teacher in the area who didn\u2019t consider our 5 year old too young for lessons. Always engaging, he Incorporates games and movement activities which enable our daughter to have fun while making progress. 1 \xbd years later she proudly played a Beethoven tune at her grandparent\u2019s recent 50th anniversary party. She is looking forward to continued fun and learning." ~Marti, mother of guitar student Holland'),o.a.createElement(u.a,{className:t.typography,variant:"body1",gutterBottom:!0},'"Joel is a fantastic teacher. Myriads of challenges face adult learners of jazz piano. Joel is great at identifying where the bottle-necks are, and coming up with exercises designed to overcome them. He is always encouraging too, helping you to inspire to get to the next level. I would wholeheartedly recommend him!" ~Yuko, adult piano student'),o.a.createElement(u.a,{className:t.typography,variant:"body1",gutterBottom:!0},'"It was a pleasure having Joel as part of our faculty. He got along really well with his colleagues, his students and their parents. His students loved working with him and made tremendous progress during their time studying with him. They were all very sad when he moved. As a teacher, he was able to relate well with his students and was able to cater his lessons to the needs of each student. He was very encouraging, while also challenging students in their studies." ~Sarah Kelly, Executive Director at Worcester Music Academy'),o.a.createElement(W,null))}var $=Object(s.a)({root:{width:"100%",maxWidth:500,textAlign:"left",margin:"auto",marginTop:"40px"},typography:{fontFamily:"open sans"},title:{color:"brown"}});function q(){var e,t=$();return o.a.createElement("div",{className:t.root},o.a.createElement(u.a,(e={className:t.typography},Object(A.a)(e,"className",t.title),Object(A.a)(e,"variant","h3"),Object(A.a)(e,"gutterBottom",!0),e),"Rates and Policies"),o.a.createElement(u.a,{className:t.typography,variant:"body1",gutterBottom:!0},"Students or parents pay for a month at a time at the beginning of the month based on the number of lesson weeks during that month. Hour long lessons cost $50, forty-five minute lessons cost $40, and half hour lessons cost $30. If you need to cancel, please provide at least 24-hours notice to guarantee a makeup lesson."),o.a.createElement(W,null))}var G=Object(s.a)({root:{width:"100%",maxWidth:500,textAlign:"left",margin:"auto",marginTop:"40px"},typography:{color:"brown"}});function Y(){var e=G();return o.a.createElement("div",{className:e.root},o.a.createElement(u.a,{className:e.typography,variant:"h3",gutterBottom:!0},"Contact"),o.a.createElement(u.a,{className:e.typography,variant:"body1",gutterBottom:!0},"You may contact me at j.n.helander@gmail.com with inquiries about lessons, notation services, song licensing, or session work."))}var L=function(){return o.a.createElement("div",{className:"App"},o.a.createElement(B,null),o.a.createElement(l.c,null,o.a.createElement(l.a,{exact:!0,path:"/",component:z}),o.a.createElement(l.a,{path:"/about",component:I}),o.a.createElement(l.a,{path:"/testimonials",component:U}),o.a.createElement(l.a,{path:"/ratesAndPolicies",component:q}),o.a.createElement(l.a,{path:"/contact",component:Y})))};Boolean("localhost"===window.location.hostname||"[::1]"===window.location.hostname||window.location.hostname.match(/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/));var V=a(55),Q=a.n(V)()({basename:"/helander-spa-test"});i.a.render(o.a.createElement(l.b,{history:Q},o.a.createElement(L,null)),document.getElementById("root")),"serviceWorker"in navigator&&navigator.serviceWorker.ready.then((function(e){e.unregister()}))}},[[66,1,2]]]);
//# sourceMappingURL=main.d35bbbf8.chunk.js.map
|
const path = require('path');
module.exports = {
client: 'sqlite3',
connection: {
filename: path.resolve(__dirname, 'src', 'database', 'database.sqlite')
},
migrations: {
directory: path.resolve(__dirname, 'src', 'database', 'migrations')
},
useNullAsDefault: true,
};
|
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*/
const fs = require('fs');
const GithubSlugger = require('github-slugger');
function walk(dir) {
let results = [];
const list = fs.readdirSync(dir);
list.forEach(function (file) {
file = dir + '/' + file;
const stat = fs.statSync(file);
if (stat && stat.isDirectory()) {
/* Recurse into a subdirectory */
results = results.concat(walk(file));
} else {
/* Is a file */
results.push(file);
}
});
return results;
}
function stripLinks(line) {
return line.replace(/\[([^\]]+)\]\([^)]+\)/, (match, p1) => p1);
}
function addHeaderID(line, slugger) {
// check if we're a header at all
if (!line.startsWith('#')) {
return line;
}
// check if it already has an id
if (/\{#[^}]+\}/.test(line)) {
return line;
}
const headingText = line.slice(line.indexOf(' ')).trim();
const headingLevel = line.slice(0, line.indexOf(' '));
return `${headingLevel} ${headingText} {#${slugger.slug(
stripLinks(headingText),
)}}`;
}
function addHeaderIDs(lines) {
// Sluggers should be per file
const slugger = new GithubSlugger();
let inCode = false;
const results = [];
lines.forEach((line) => {
// Ignore code blocks
if (line.startsWith('```')) {
inCode = !inCode;
results.push(line);
return;
}
if (inCode) {
results.push(line);
return;
}
results.push(addHeaderID(line, slugger));
});
return results;
}
const [path] = process.argv.slice(2);
const files = walk(path);
files.forEach((file) => {
if (!file.endsWith('.md')) {
return;
}
const content = fs.readFileSync(file, 'utf8');
const lines = content.split('\n');
const updatedLines = addHeaderIDs(lines);
fs.writeFileSync(file, updatedLines.join('\n'));
});
|
const func = {
//不能为空
empty(val = "") {
let isNull = false;
switch (val) {
case '':
isNull = true;
break;
case null:
isNull = true;
break;
case 'null':
isNull = true;
break;
case undefined:
isNull = true;
break;
case 'undefined':
isNull = true;
break;
default:
isNull = false;
break;
}
return isNull;
},
//数字
number(val = 0) {
let reg = new RegExp(/^[0-9]*$/);
return reg.test(val);
},
//汉字
chineseChar(val = "") {
let reg = new RegExp(/^[\u4e00-\u9fa5]{0,}$/);
return reg.test(val);
},
//英文和数字
atoBAndNum(val = "") {
let reg = new RegExp(/^[A-Za-z0-9]+$/);
return reg.test(val);
},
//len的所有字符
lenString(val = "", len = 0) {
let reg = new RegExp(`^.{${len}}$`);
return reg.test(val);
},
//max的所有字符
maxString(val = "", max = 0) {
let reg = new RegExp(`^.{,${max}}$`);
return reg.test(val);
},
//min的所有字符
minString(val = "", min = 0) {
let reg = new RegExp(`^.{${min},}$`);
return reg.test(val);
},
//长度为min-max的所有字符
rangeString(val = "", min = 0, max = 0) {
let reg = new RegExp(`^.{${min},${max}}$`);
return reg.test(val);
},
//身份证号码
idNum(code = "") {
let city = {
11: "北京",
12: "天津",
13: "河北",
14: "山西",
15: "内蒙古",
21: "辽宁",
22: "吉林",
23: "黑龙江 ",
31: "上海",
32: "江苏",
33: "浙江",
34: "安徽",
35: "福建",
36: "江西",
37: "山东",
41: "河南",
42: "湖北 ",
43: "湖南",
44: "广东",
45: "广西",
46: "海南",
50: "重庆",
51: "四川",
52: "贵州",
53: "云南",
54: "西藏 ",
61: "陕西",
62: "甘肃",
63: "青海",
64: "宁夏",
65: "新疆",
71: "台湾",
81: "香港",
82: "澳门",
91: "国外 "
};
let tip = "";
let pass = true;
if (!code || !/^\d{6}(18|19|20)?\d{2}(0[1-9]|1[012])(0[1-9]|[12]\d|3[01])\d{3}(\d|X)$/i.test(code)) {
tip = "身份证号格式错误";
pass = false;
} else if (!city[code.substr(0, 2)]) {
tip = "地址编码错误";
pass = false;
} else {
//18位身份证需要验证最后一位校验位
if (code.length == 18) {
code = code.split('');
//∑(ai×Wi)(mod 11)
//加权因子
var factor = [7, 9, 10, 5, 8, 4, 2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2];
//校验位
var parity = [1, 0, 'X', 9, 8, 7, 6, 5, 4, 3, 2];
var sum = 0;
var ai = 0;
var wi = 0;
for (var i = 0; i < 17; i++) {
ai = code[i];
wi = factor[i];
sum += ai * wi;
}
var last = parity[sum % 11];
if (parity[sum % 11] != code[17]) {
tip = "校验位错误";
pass = false;
}
}
}
return pass;
},
//手机号码
mobile(val = "") {
let reg = new RegExp(/^1(3|4|5|6|7|8|9)\d{9}$/);
return reg.test(val);
},
//车牌号码,包括新能源
plate(val = "") {
let reg = new RegExp(/^(([京津沪渝冀豫云辽黑湘皖鲁新苏浙赣鄂桂甘晋蒙陕吉闽贵粤青藏川宁琼使领][A-Z](([0-9]{5}[DF])|([DF]([A-HJ-NP-Z0-9])[0-9]{4})))|([京津沪渝冀豫云辽黑湘皖鲁新苏浙赣鄂桂甘晋蒙陕吉闽贵粤青藏川宁琼使领][A-Z][A-HJ-NP-Z0-9]{4}[A-HJ-NP-Z0-9挂学警港澳使领]))$/);
return reg.test(val);
},
//邮件验证
email(val = ""){
let reg = new RegExp(/^[a-z0-9]+([._\\-]*[a-z0-9])*@([a-z0-9]+[-a-z0-9]*[a-z0-9]+.){1,63}[a-z0-9]+$/); //正则表达式
return reg.test(val);
}
};
const funcList = {
//不能为空
"empty": "empty",
//数字
"number": "number",
//汉字
"chineseChar": "chineseChar",
//英文和数字
"atoBAndNum": "atoBAndNum",
//len的所有字符
"lenString": "lenString",
//max的所有字符
"maxString": "maxString",
//min的所有字符
"minString": "minString",
//长度为min-max的所有字符
"rangeString": "rangeString",
//身份证号码
"idNum": "idNum",
//手机号码
"mobile": "mobile",
//车牌号码
"plate": "plate",
//邮件验证
"email":"email"
};
export default {
funcList,
valid(data = {}, validInfo = []) {
let isOK = true;
let msg = "";
let key = "";
for (let index = 0; index < validInfo.length; index++) {
let item = validInfo[index];
key = item.name;
let params = item.param.split("|");
switch (params[0]) {
case funcList.empty:
isOK = !func.empty(data[item.name]);
if (data[item.name] == 0) {
isOK = false;
}
msg = `${item.field_name}不能为空`;
break;
case funcList.number:
isOK = func.number(data[item.name]);
msg = `${item.field_name}必须是数字`;
break;
case funcList.chineseChar:
isOK = func.chineseChar(data[item.name]);
msg = `${item.field_name}必须是汉字`;
break;
case funcList.atoBAndNum:
isOK = func.atoBAndNum(data[item.name]);
msg = `${item.field_name}只能是英文和数字`;
break;
case funcList.lenString:
isOK = func.lenString(data[item.name], params[1]);
msg = `${item.field_name}的长度必须是${params[1]}位`;
break;
case funcList.maxString:
isOK = func.maxString(data[item.name], params[1]);
msg = `${item.field_name}的长度最多${params[1]}位`;
break;
case funcList.minString:
isOK = func.minString(data[item.name], params[1]);
msg = `${item.field_name}的长度最少${params[1]}位`;
break;
case funcList.rangeString:
isOK = func.rangeString(data[item.name], params[1], params[2]);
msg = `${item.field_name}的长度最少${params[1]}位,最多${params[2]}位`;
break;
case funcList.idNum:
isOK = func.idNum(data[item.name]);
if (func.empty(item.field_name)) {
msg = `身份证不合法`;
} else {
msg = `${item.field_name}不合法`;
}
break;
case funcList.mobile:
isOK = func.mobile(data[item.name]);
if (func.empty(item.field_name)) {
msg = `手机号码不合法`;
} else {
msg = `${item.field_name}不合法`;
}
break;
case funcList.plate:
isOK = func.plate(data[item.name]);
if (func.empty(item.field_name)) {
msg = `车牌号码不合法`;
} else {
msg = `${item.field_name}不合法`;
}
break;
case funcList.email:
isOK = func.email(data[item.name]);
if (func.empty(item.field_name)) {
msg = `电子邮件不合法`;
} else {
msg = `${item.field_name}不合法`;
}
break;
default:
isOK = true;
break;
}
if (!isOK) {
break;
}
}
return {"status": isOK, "msg": msg, "key": key};
},
getValidInfo(name = "", funcName = "", fieldName = "", param = "") {
return {"name": name, "param": `${funcName}|${param}`, "field_name": fieldName};
}
}
|
import {Page, NavController, ActionSheet, Modal} from 'ionic/ionic';
import {PreviewModal} from './preview';
import {Itunes} from '../../itunes/itunes';
@Page({
templateUrl: 'build/pages/search/search.html',
viewProviders: []
})
export class SearchPage {
constructor(nav: NavController, itunes: Itunes) {
this.nav = nav;
this.results = [];
this._unfilteredResults = [];
this.usesFilter = false;
this.keyword = '';
this.itunes = itunes;
}
openPreview(track) {
let modal = Modal.create(PreviewModal, {
track: track
});
this.nav.present(modal);
}
userPressedCancel() {
console.debug('User pressed cancel');
}
keyHasBeenPressed(e) {
if(e.keyIdentifier === 'Enter') {
this.itunes.search(this.keyword).then((results) => {
this.results = results;
this._unfilteredResults = results;
this.usesFilter = false;
});
}
}
// Filtering function for case where user presses "Movies only" or "Songs only"
// this.results = this._unfilteredResults.filter((item) => item.kind === '<the kind of items - feature-movie or song>');
openFilters() {
let sheet = ActionSheet.create({
title: 'Filter by...',
buttons: [
{
text: 'Movies only',
handler: () => {
this.results = this._unfilteredResults.filter((item) => item.kind === 'feature-movie');
this.usesFilter = true;
}
},
{
text: 'Songs only',
handler: () => {
this.results = this._unfilteredResults.filter((item) => item.kind === 'song');
this.usesFilter = true;
}
},
{
text: 'Clear',
style: 'destructive',
handler: () => {
console.debug('Clearing filter');
this.results = this._unfilteredResults;
this.usesFilter = false;
}
},
{
text: 'Cancel',
style: 'cancel'
}
]
})
this.nav.present(sheet);
}
}
|
const axios = require('axios')
const constants = require('core-module/constants')
const HError = require('core-module/HError')
const utils = require('core-module/utils')
const {getUploadFileConfig} = require('core-module/upload')
module.exports = function (BaaS) {
/**
* 上传文件。
* @memberof BaaS
* @param {FileParams} fileParams 文件参数
* @param {FileMeta} metaData 文件元信息
* @return {Promise<any>}
*/
BaaS.uploadFile = function (fileParams, metaData) {
let fileObj = fileParams.fileObj
if (!fileObj || typeof fileObj !== 'object' || !fileObj.name) {
throw new HError(605)
}
if(fileParams.fileName !== undefined && typeof fileParams.fileName !== 'string'){
throw new HError(605)
}
if (!metaData) {
metaData = {}
} else if (typeof metaData !== 'object') {
throw new HError(605)
}
let config = {}
let fileName = fileParams.fileName || fileObj.name
return getUploadFileConfig(fileName, utils.replaceQueryParams(metaData)).then(res => {
config = {
id: res.data.id,
fileName: fileName,
policy: res.data.policy,
authorization: res.data.authorization,
uploadUrl: res.data.upload_url,
filePath: fileObj.name,
destLink: res.data.path,
}
let fd = new FormData()
fd.append(constants.UPLOAD.UPLOAD_FILE_KEY, fileObj, fileName)
fd.append('policy', config.policy)
fd.append('authorization', config.authorization)
return axios.post(config.uploadUrl, fd)
}).then(res => {
let result = {}
let data = res.data
result.status = 'ok'
result.path = config.destLink
result.file = {
'id': config.id,
'path': config.destLink,
'name': config.fileName,
'created_at': data.time,
'mime_type': data.mimetype,
'cdn_path': data.url,
'size': data.file_size,
}
res.data = result
return res
})
}
}
|
module.exports = {
parser: "@typescript-eslint/parser",
extends: [
"eslint:recommended",
"plugin:react/recommended",
"plugin:@typescript-eslint/recommended",
"prettier/@typescript-eslint",
"plugin:prettier/recommended"
],
plugins: [
"react",
"@typescript-eslint",
"prettier",
"react-hooks",
"simple-import-sort",
"sort-exports"
],
env: {
browser: true,
node: true,
es6: true
},
settings: {
react: {
pragma: "React",
version: "detect"
}
},
rules: {
"react-hooks/rules-of-hooks": "error",
"react-hooks/exhaustive-deps": "warn",
"@typescript-eslint/explicit-function-return-type": ["off"],
"@typescript-eslint/no-inferrable-types": 1,
"@typescript-eslint/no-unused-vars": [
"error",
{ argsIgnorePattern: "^_", varsIgnorePattern: "^_" }
],
"sort-exports/sort-exports": ["error", { sortDir: "asc" }],
"simple-import-sort/sort": [
"error",
{
groups: [
["^\\u0000"],
["^@?\\w"],
["^[^.|components]"],
["^\\.|components"]
]
}
]
},
overrides: [
{
files: ["**/*.tsx"],
rules: {
"react/prop-types": "off"
}
}
]
};
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.web.static}.
"""
import errno
import inspect
import mimetypes
import os
import re
import sys
import warnings
from io import BytesIO as StringIO
from unittest import skipIf
from zope.interface.verify import verifyObject
from twisted.internet import abstract, interfaces
from twisted.python.runtime import platform
from twisted.python.filepath import FilePath
from twisted.python import compat, log
from twisted.python.compat import networkString
from twisted.trial.unittest import TestCase
from twisted.web import static, http, script, resource
from twisted.web.server import UnsupportedMethod
from twisted.web.test.requesthelper import DummyRequest
from twisted.web.test._util import _render
from twisted.web._responses import FOUND
class StaticDataTests(TestCase):
"""
Tests for L{Data}.
"""
def test_headRequest(self):
"""
L{Data.render} returns an empty response body for a I{HEAD} request.
"""
data = static.Data(b"foo", "bar")
request = DummyRequest([""])
request.method = b"HEAD"
d = _render(data, request)
def cbRendered(ignored):
self.assertEqual(b"".join(request.written), b"")
d.addCallback(cbRendered)
return d
def test_invalidMethod(self):
"""
L{Data.render} raises L{UnsupportedMethod} in response to a non-I{GET},
non-I{HEAD} request.
"""
data = static.Data(b"foo", b"bar")
request = DummyRequest([b""])
request.method = b"POST"
self.assertRaises(UnsupportedMethod, data.render, request)
class StaticFileTests(TestCase):
"""
Tests for the basic behavior of L{File}.
"""
def _render(self, resource, request):
return _render(resource, request)
def test_ignoredExtTrue(self):
"""
Passing C{1} as the value to L{File}'s C{ignoredExts} argument
issues a warning and sets the ignored extensions to the
wildcard C{"*"}.
"""
with warnings.catch_warnings(record=True) as caughtWarnings:
file = static.File(self.mktemp(), ignoredExts=1)
self.assertEqual(file.ignoredExts, ["*"])
self.assertEqual(len(caughtWarnings), 1)
def test_ignoredExtFalse(self):
"""
Passing C{1} as the value to L{File}'s C{ignoredExts} argument
issues a warning and sets the ignored extensions to the empty
list.
"""
with warnings.catch_warnings(record=True) as caughtWarnings:
file = static.File(self.mktemp(), ignoredExts=0)
self.assertEqual(file.ignoredExts, [])
self.assertEqual(len(caughtWarnings), 1)
def test_allowExt(self):
"""
Passing C{1} as the value to L{File}'s C{allowExt} argument
issues a warning and sets the ignored extensions to the
wildcard C{*}.
"""
with warnings.catch_warnings(record=True) as caughtWarnings:
file = static.File(self.mktemp(), ignoredExts=True)
self.assertEqual(file.ignoredExts, ["*"])
self.assertEqual(len(caughtWarnings), 1)
def test_invalidMethod(self):
"""
L{File.render} raises L{UnsupportedMethod} in response to a non-I{GET},
non-I{HEAD} request.
"""
request = DummyRequest([b""])
request.method = b"POST"
path = FilePath(self.mktemp())
path.setContent(b"foo")
file = static.File(path.path)
self.assertRaises(UnsupportedMethod, file.render, request)
def test_notFound(self):
"""
If a request is made which encounters a L{File} before a final segment
which does not correspond to any file in the path the L{File} was
created with, a not found response is sent.
"""
base = FilePath(self.mktemp())
base.makedirs()
file = static.File(base.path)
request = DummyRequest([b"foobar"])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(request.responseCode, 404)
d.addCallback(cbRendered)
return d
def test_emptyChild(self):
"""
The C{''} child of a L{File} which corresponds to a directory in the
filesystem is a L{DirectoryLister}.
"""
base = FilePath(self.mktemp())
base.makedirs()
file = static.File(base.path)
request = DummyRequest([b""])
child = resource.getChildForRequest(file, request)
self.assertIsInstance(child, static.DirectoryLister)
self.assertEqual(child.path, base.path)
def test_emptyChildUnicodeParent(self):
"""
The C{u''} child of a L{File} which corresponds to a directory
whose path is text is a L{DirectoryLister} that renders to a
binary listing.
@see: U{https://twistedmatrix.com/trac/ticket/9438}
"""
textBase = FilePath(self.mktemp()).asTextMode()
textBase.makedirs()
textBase.child("text-file").open("w").close()
textFile = static.File(textBase.path)
request = DummyRequest([b""])
child = resource.getChildForRequest(textFile, request)
self.assertIsInstance(child, static.DirectoryLister)
nativePath = compat.nativeString(textBase.path)
self.assertEqual(child.path, nativePath)
response = child.render(request)
self.assertIsInstance(response, bytes)
def test_securityViolationNotFound(self):
"""
If a request is made which encounters a L{File} before a final segment
which cannot be looked up in the filesystem due to security
considerations, a not found response is sent.
"""
base = FilePath(self.mktemp())
base.makedirs()
file = static.File(base.path)
request = DummyRequest([b".."])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(request.responseCode, 404)
d.addCallback(cbRendered)
return d
@skipIf(platform.isWindows(), "Cannot remove read permission on Windows")
def test_forbiddenResource(self):
"""
If the file in the filesystem which would satisfy a request cannot be
read, L{File.render} sets the HTTP response code to I{FORBIDDEN}.
"""
base = FilePath(self.mktemp())
base.setContent(b"")
# Make sure we can delete the file later.
self.addCleanup(base.chmod, 0o700)
# Get rid of our own read permission.
base.chmod(0)
file = static.File(base.path)
request = DummyRequest([b""])
d = self._render(file, request)
def cbRendered(ignored):
self.assertEqual(request.responseCode, 403)
d.addCallback(cbRendered)
return d
def test_undecodablePath(self):
"""
A request whose path cannot be decoded as UTF-8 receives a not
found response, and the failure is logged.
"""
path = self.mktemp()
if isinstance(path, bytes):
path = path.decode("ascii")
base = FilePath(path)
base.makedirs()
file = static.File(base.path)
request = DummyRequest([b"\xff"])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(request.responseCode, 404)
self.assertEqual(len(self.flushLoggedErrors(UnicodeDecodeError)), 1)
d.addCallback(cbRendered)
return d
def test_forbiddenResource_default(self):
"""
L{File.forbidden} defaults to L{resource.ForbiddenResource}.
"""
self.assertIsInstance(static.File(b".").forbidden, resource.ForbiddenResource)
def test_forbiddenResource_customize(self):
"""
The resource rendered for forbidden requests is stored as a class
member so that users can customize it.
"""
base = FilePath(self.mktemp())
base.setContent(b"")
markerResponse = b"custom-forbidden-response"
def failingOpenForReading():
raise OSError(errno.EACCES, "")
class CustomForbiddenResource(resource.Resource):
def render(self, request):
return markerResponse
class CustomStaticFile(static.File):
forbidden = CustomForbiddenResource()
fileResource = CustomStaticFile(base.path)
fileResource.openForReading = failingOpenForReading
request = DummyRequest([b""])
result = fileResource.render(request)
self.assertEqual(markerResponse, result)
def test_indexNames(self):
"""
If a request is made which encounters a L{File} before a final empty
segment, a file in the L{File} instance's C{indexNames} list which
exists in the path the L{File} was created with is served as the
response to the request.
"""
base = FilePath(self.mktemp())
base.makedirs()
base.child("foo.bar").setContent(b"baz")
file = static.File(base.path)
file.indexNames = ["foo.bar"]
request = DummyRequest([b""])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(b"".join(request.written), b"baz")
self.assertEqual(
request.responseHeaders.getRawHeaders(b"content-length")[0], b"3"
)
d.addCallback(cbRendered)
return d
def test_staticFile(self):
"""
If a request is made which encounters a L{File} before a final segment
which names a file in the path the L{File} was created with, that file
is served as the response to the request.
"""
base = FilePath(self.mktemp())
base.makedirs()
base.child("foo.bar").setContent(b"baz")
file = static.File(base.path)
request = DummyRequest([b"foo.bar"])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(b"".join(request.written), b"baz")
self.assertEqual(
request.responseHeaders.getRawHeaders(b"content-length")[0], b"3"
)
d.addCallback(cbRendered)
return d
@skipIf(
sys.getfilesystemencoding().lower() not in ("utf-8", "mcbs"),
"Cannot write unicode filenames with file system encoding of"
" {}".format(sys.getfilesystemencoding()),
)
def test_staticFileUnicodeFileName(self):
"""
A request for a existing unicode file path encoded as UTF-8
returns the contents of that file.
"""
name = "\N{GREEK SMALL LETTER ETA WITH PERISPOMENI}"
content = b"content"
base = FilePath(self.mktemp())
base.makedirs()
base.child(name).setContent(content)
file = static.File(base.path)
request = DummyRequest([name.encode("utf-8")])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(b"".join(request.written), content)
self.assertEqual(
request.responseHeaders.getRawHeaders(b"content-length")[0],
networkString(str(len(content))),
)
d.addCallback(cbRendered)
return d
def test_staticFileDeletedGetChild(self):
"""
A L{static.File} created for a directory which does not exist should
return childNotFound from L{static.File.getChild}.
"""
staticFile = static.File(self.mktemp())
request = DummyRequest([b"foo.bar"])
child = staticFile.getChild(b"foo.bar", request)
self.assertEqual(child, staticFile.childNotFound)
def test_staticFileDeletedRender(self):
"""
A L{static.File} created for a file which does not exist should render
its C{childNotFound} page.
"""
staticFile = static.File(self.mktemp())
request = DummyRequest([b"foo.bar"])
request2 = DummyRequest([b"foo.bar"])
d = self._render(staticFile, request)
d2 = self._render(staticFile.childNotFound, request2)
def cbRendered2(ignored):
def cbRendered(ignored):
self.assertEqual(b"".join(request.written), b"".join(request2.written))
d.addCallback(cbRendered)
return d
d2.addCallback(cbRendered2)
return d2
def test_getChildChildNotFound_customize(self):
"""
The resource rendered for child not found requests can be customize
using a class member.
"""
base = FilePath(self.mktemp())
base.setContent(b"")
markerResponse = b"custom-child-not-found-response"
class CustomChildNotFoundResource(resource.Resource):
def render(self, request):
return markerResponse
class CustomStaticFile(static.File):
childNotFound = CustomChildNotFoundResource()
fileResource = CustomStaticFile(base.path)
request = DummyRequest([b"no-child.txt"])
child = fileResource.getChild(b"no-child.txt", request)
result = child.render(request)
self.assertEqual(markerResponse, result)
def test_headRequest(self):
"""
L{static.File.render} returns an empty response body for I{HEAD}
requests.
"""
path = FilePath(self.mktemp())
path.setContent(b"foo")
file = static.File(path.path)
request = DummyRequest([b""])
request.method = b"HEAD"
d = _render(file, request)
def cbRendered(ignored):
self.assertEqual(b"".join(request.written), b"")
d.addCallback(cbRendered)
return d
def test_processors(self):
"""
If a request is made which encounters a L{File} before a final segment
which names a file with an extension which is in the L{File}'s
C{processors} mapping, the processor associated with that extension is
used to serve the response to the request.
"""
base = FilePath(self.mktemp())
base.makedirs()
base.child("foo.bar").setContent(
b"from twisted.web.static import Data\n"
b"resource = Data(b'dynamic world', 'text/plain')\n"
)
file = static.File(base.path)
file.processors = {".bar": script.ResourceScript}
request = DummyRequest([b"foo.bar"])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(b"".join(request.written), b"dynamic world")
self.assertEqual(
request.responseHeaders.getRawHeaders(b"content-length")[0], b"13"
)
d.addCallback(cbRendered)
return d
def test_ignoreExt(self):
"""
The list of ignored extensions can be set by passing a value to
L{File.__init__} or by calling L{File.ignoreExt} later.
"""
file = static.File(b".")
self.assertEqual(file.ignoredExts, [])
file.ignoreExt(".foo")
file.ignoreExt(".bar")
self.assertEqual(file.ignoredExts, [".foo", ".bar"])
file = static.File(b".", ignoredExts=(".bar", ".baz"))
self.assertEqual(file.ignoredExts, [".bar", ".baz"])
def test_ignoredExtensionsIgnored(self):
"""
A request for the I{base} child of a L{File} succeeds with a resource
for the I{base<extension>} file in the path the L{File} was created
with if such a file exists and the L{File} has been configured to
ignore the I{<extension>} extension.
"""
base = FilePath(self.mktemp())
base.makedirs()
base.child("foo.bar").setContent(b"baz")
base.child("foo.quux").setContent(b"foobar")
file = static.File(base.path, ignoredExts=(".bar",))
request = DummyRequest([b"foo"])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(b"".join(request.written), b"baz")
d.addCallback(cbRendered)
return d
def test_directoryWithoutTrailingSlashRedirects(self):
"""
A request for a path which is a directory but does not have a trailing
slash will be redirected to a URL which does have a slash by L{File}.
"""
base = FilePath(self.mktemp())
base.makedirs()
base.child("folder").makedirs()
file = static.File(base.path)
request = DummyRequest([b"folder"])
request.uri = b"http://dummy/folder#baz?foo=bar"
child = resource.getChildForRequest(file, request)
self.successResultOf(self._render(child, request))
self.assertEqual(request.responseCode, FOUND)
self.assertEqual(
request.responseHeaders.getRawHeaders(b"location"),
[b"http://dummy/folder/#baz?foo=bar"],
)
def _makeFilePathWithStringIO(self):
"""
Create a L{File} that when opened for reading, returns a L{StringIO}.
@return: 2-tuple of the opened "file" and the L{File}.
@rtype: L{tuple}
"""
fakeFile = StringIO()
path = FilePath(self.mktemp())
path.touch()
file = static.File(path.path)
# Open our file instead of a real one
file.open = lambda: fakeFile
return fakeFile, file
def test_HEADClosesFile(self):
"""
A HEAD request opens the file, gets the size, and then closes it after
the request.
"""
fakeFile, file = self._makeFilePathWithStringIO()
request = DummyRequest([""])
request.method = b"HEAD"
self.successResultOf(_render(file, request))
self.assertEqual(b"".join(request.written), b"")
self.assertTrue(fakeFile.closed)
def test_cachedRequestClosesFile(self):
"""
A GET request that is cached closes the file after the request.
"""
fakeFile, file = self._makeFilePathWithStringIO()
request = DummyRequest([""])
request.method = b"GET"
# This request will always return saying that it is cached
request.setLastModified = lambda _: http.CACHED
self.successResultOf(_render(file, request))
self.assertEqual(b"".join(request.written), b"")
self.assertTrue(fakeFile.closed)
class StaticMakeProducerTests(TestCase):
"""
Tests for L{File.makeProducer}.
"""
def makeResourceWithContent(self, content, type=None, encoding=None):
"""
Make a L{static.File} resource that has C{content} for its content.
@param content: The L{bytes} to use as the contents of the resource.
@param type: Optional value for the content type of the resource.
"""
fileName = FilePath(self.mktemp())
fileName.setContent(content)
resource = static.File(fileName._asBytesPath())
resource.encoding = encoding
resource.type = type
return resource
def contentHeaders(self, request):
"""
Extract the content-* headers from the L{DummyRequest} C{request}.
This returns the subset of C{request.outgoingHeaders} of headers that
start with 'content-'.
"""
contentHeaders = {}
for k, v in request.responseHeaders.getAllRawHeaders():
if k.lower().startswith(b"content-"):
contentHeaders[k.lower()] = v[0]
return contentHeaders
def test_noRangeHeaderGivesNoRangeStaticProducer(self):
"""
makeProducer when no Range header is set returns an instance of
NoRangeStaticProducer.
"""
resource = self.makeResourceWithContent(b"")
request = DummyRequest([])
with resource.openForReading() as file:
producer = resource.makeProducer(request, file)
self.assertIsInstance(producer, static.NoRangeStaticProducer)
def test_noRangeHeaderSets200OK(self):
"""
makeProducer when no Range header is set sets the responseCode on the
request to 'OK'.
"""
resource = self.makeResourceWithContent(b"")
request = DummyRequest([])
with resource.openForReading() as file:
resource.makeProducer(request, file)
self.assertEqual(http.OK, request.responseCode)
def test_noRangeHeaderSetsContentHeaders(self):
"""
makeProducer when no Range header is set sets the Content-* headers
for the response.
"""
length = 123
contentType = "text/plain"
contentEncoding = "gzip"
resource = self.makeResourceWithContent(
b"a" * length, type=contentType, encoding=contentEncoding
)
request = DummyRequest([])
with resource.openForReading() as file:
resource.makeProducer(request, file)
self.assertEqual(
{
b"content-type": networkString(contentType),
b"content-length": b"%d" % (length,),
b"content-encoding": networkString(contentEncoding),
},
self.contentHeaders(request),
)
def test_singleRangeGivesSingleRangeStaticProducer(self):
"""
makeProducer when the Range header requests a single byte range
returns an instance of SingleRangeStaticProducer.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=1-3")
resource = self.makeResourceWithContent(b"abcdef")
with resource.openForReading() as file:
producer = resource.makeProducer(request, file)
self.assertIsInstance(producer, static.SingleRangeStaticProducer)
def test_singleRangeSets206PartialContent(self):
"""
makeProducer when the Range header requests a single, satisfiable byte
range sets the response code on the request to 'Partial Content'.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=1-3")
resource = self.makeResourceWithContent(b"abcdef")
with resource.openForReading() as file:
resource.makeProducer(request, file)
self.assertEqual(http.PARTIAL_CONTENT, request.responseCode)
def test_singleRangeSetsContentHeaders(self):
"""
makeProducer when the Range header requests a single, satisfiable byte
range sets the Content-* headers appropriately.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=1-3")
contentType = "text/plain"
contentEncoding = "gzip"
resource = self.makeResourceWithContent(
b"abcdef", type=contentType, encoding=contentEncoding
)
with resource.openForReading() as file:
resource.makeProducer(request, file)
self.assertEqual(
{
b"content-type": networkString(contentType),
b"content-encoding": networkString(contentEncoding),
b"content-range": b"bytes 1-3/6",
b"content-length": b"3",
},
self.contentHeaders(request),
)
def test_singleUnsatisfiableRangeReturnsSingleRangeStaticProducer(self):
"""
makeProducer still returns an instance of L{SingleRangeStaticProducer}
when the Range header requests a single unsatisfiable byte range.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=4-10")
resource = self.makeResourceWithContent(b"abc")
with resource.openForReading() as file:
producer = resource.makeProducer(request, file)
self.assertIsInstance(producer, static.SingleRangeStaticProducer)
def test_singleUnsatisfiableRangeSets416ReqestedRangeNotSatisfiable(self):
"""
makeProducer sets the response code of the request to of 'Requested
Range Not Satisfiable' when the Range header requests a single
unsatisfiable byte range.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=4-10")
resource = self.makeResourceWithContent(b"abc")
with resource.openForReading() as file:
resource.makeProducer(request, file)
self.assertEqual(http.REQUESTED_RANGE_NOT_SATISFIABLE, request.responseCode)
def test_singleUnsatisfiableRangeSetsContentHeaders(self):
"""
makeProducer when the Range header requests a single, unsatisfiable
byte range sets the Content-* headers appropriately.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=4-10")
contentType = "text/plain"
resource = self.makeResourceWithContent(b"abc", type=contentType)
with resource.openForReading() as file:
resource.makeProducer(request, file)
self.assertEqual(
{
b"content-type": b"text/plain",
b"content-length": b"0",
b"content-range": b"bytes */3",
},
self.contentHeaders(request),
)
def test_singlePartiallyOverlappingRangeSetsContentHeaders(self):
"""
makeProducer when the Range header requests a single byte range that
partly overlaps the resource sets the Content-* headers appropriately.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=2-10")
contentType = "text/plain"
resource = self.makeResourceWithContent(b"abc", type=contentType)
with resource.openForReading() as file:
resource.makeProducer(request, file)
self.assertEqual(
{
b"content-type": b"text/plain",
b"content-length": b"1",
b"content-range": b"bytes 2-2/3",
},
self.contentHeaders(request),
)
def test_multipleRangeGivesMultipleRangeStaticProducer(self):
"""
makeProducer when the Range header requests a single byte range
returns an instance of MultipleRangeStaticProducer.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=1-3,5-6")
resource = self.makeResourceWithContent(b"abcdef")
with resource.openForReading() as file:
producer = resource.makeProducer(request, file)
self.assertIsInstance(producer, static.MultipleRangeStaticProducer)
def test_multipleRangeSets206PartialContent(self):
"""
makeProducer when the Range header requests a multiple satisfiable
byte ranges sets the response code on the request to 'Partial
Content'.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=1-3,5-6")
resource = self.makeResourceWithContent(b"abcdef")
with resource.openForReading() as file:
resource.makeProducer(request, file)
self.assertEqual(http.PARTIAL_CONTENT, request.responseCode)
def test_mutipleRangeSetsContentHeaders(self):
"""
makeProducer when the Range header requests a single, satisfiable byte
range sets the Content-* headers appropriately.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=1-3,5-6")
resource = self.makeResourceWithContent(b"abcdefghijkl", encoding="gzip")
with resource.openForReading() as file:
producer = resource.makeProducer(request, file)
contentHeaders = self.contentHeaders(request)
# The only content-* headers set are content-type and content-length.
self.assertEqual(
{b"content-length", b"content-type"}, set(contentHeaders.keys())
)
# The content-length depends on the boundary used in the response.
expectedLength = 5
for boundary, offset, size in producer.rangeInfo:
expectedLength += len(boundary)
self.assertEqual(
b"%d" % (expectedLength,), contentHeaders[b"content-length"]
)
# Content-type should be set to a value indicating a multipart
# response and the boundary used to separate the parts.
self.assertIn(b"content-type", contentHeaders)
contentType = contentHeaders[b"content-type"]
self.assertNotIdentical(
None,
re.match(br'multipart/byteranges; boundary="[^"]*"\Z', contentType),
)
# Content-encoding is not set in the response to a multiple range
# response, which is a bit wussy but works well enough with the way
# static.File does content-encodings...
self.assertNotIn(b"content-encoding", contentHeaders)
def test_multipleUnsatisfiableRangesReturnsMultipleRangeStaticProducer(self):
"""
makeProducer still returns an instance of L{SingleRangeStaticProducer}
when the Range header requests multiple ranges, none of which are
satisfiable.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=10-12,15-20")
resource = self.makeResourceWithContent(b"abc")
with resource.openForReading() as file:
producer = resource.makeProducer(request, file)
self.assertIsInstance(producer, static.MultipleRangeStaticProducer)
def test_multipleUnsatisfiableRangesSets416ReqestedRangeNotSatisfiable(self):
"""
makeProducer sets the response code of the request to of 'Requested
Range Not Satisfiable' when the Range header requests multiple ranges,
none of which are satisfiable.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=10-12,15-20")
resource = self.makeResourceWithContent(b"abc")
with resource.openForReading() as file:
resource.makeProducer(request, file)
self.assertEqual(http.REQUESTED_RANGE_NOT_SATISFIABLE, request.responseCode)
def test_multipleUnsatisfiableRangeSetsContentHeaders(self):
"""
makeProducer when the Range header requests multiple ranges, none of
which are satisfiable, sets the Content-* headers appropriately.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=4-10")
contentType = "text/plain"
request.requestHeaders.addRawHeader(b"range", b"bytes=10-12,15-20")
resource = self.makeResourceWithContent(b"abc", type=contentType)
with resource.openForReading() as file:
resource.makeProducer(request, file)
self.assertEqual(
{
b"content-length": b"0",
b"content-range": b"bytes */3",
b"content-type": b"text/plain",
},
self.contentHeaders(request),
)
def test_oneSatisfiableRangeIsEnough(self):
"""
makeProducer when the Range header requests multiple ranges, at least
one of which matches, sets the response code to 'Partial Content'.
"""
request = DummyRequest([])
request.requestHeaders.addRawHeader(b"range", b"bytes=1-3,100-200")
resource = self.makeResourceWithContent(b"abcdef")
with resource.openForReading() as file:
resource.makeProducer(request, file)
self.assertEqual(http.PARTIAL_CONTENT, request.responseCode)
class StaticProducerTests(TestCase):
"""
Tests for the abstract L{StaticProducer}.
"""
def test_stopProducingClosesFile(self):
"""
L{StaticProducer.stopProducing} closes the file object the producer is
producing data from.
"""
fileObject = StringIO()
producer = static.StaticProducer(None, fileObject)
producer.stopProducing()
self.assertTrue(fileObject.closed)
def test_stopProducingSetsRequestToNone(self):
"""
L{StaticProducer.stopProducing} sets the request instance variable to
None, which indicates to subclasses' resumeProducing methods that no
more data should be produced.
"""
fileObject = StringIO()
producer = static.StaticProducer(DummyRequest([]), fileObject)
producer.stopProducing()
self.assertIdentical(None, producer.request)
class NoRangeStaticProducerTests(TestCase):
"""
Tests for L{NoRangeStaticProducer}.
"""
def test_implementsIPullProducer(self):
"""
L{NoRangeStaticProducer} implements L{IPullProducer}.
"""
verifyObject(interfaces.IPullProducer, static.NoRangeStaticProducer(None, None))
def test_resumeProducingProducesContent(self):
"""
L{NoRangeStaticProducer.resumeProducing} writes content from the
resource to the request.
"""
request = DummyRequest([])
content = b"abcdef"
producer = static.NoRangeStaticProducer(request, StringIO(content))
# start calls registerProducer on the DummyRequest, which pulls all
# output from the producer and so we just need this one call.
producer.start()
self.assertEqual(content, b"".join(request.written))
def test_resumeProducingBuffersOutput(self):
"""
L{NoRangeStaticProducer.start} writes at most
C{abstract.FileDescriptor.bufferSize} bytes of content from the
resource to the request at once.
"""
request = DummyRequest([])
bufferSize = abstract.FileDescriptor.bufferSize
content = b"a" * (2 * bufferSize + 1)
producer = static.NoRangeStaticProducer(request, StringIO(content))
# start calls registerProducer on the DummyRequest, which pulls all
# output from the producer and so we just need this one call.
producer.start()
expected = [
content[0:bufferSize],
content[bufferSize : 2 * bufferSize],
content[2 * bufferSize :],
]
self.assertEqual(expected, request.written)
def test_finishCalledWhenDone(self):
"""
L{NoRangeStaticProducer.resumeProducing} calls finish() on the request
after it is done producing content.
"""
request = DummyRequest([])
finishDeferred = request.notifyFinish()
callbackList = []
finishDeferred.addCallback(callbackList.append)
producer = static.NoRangeStaticProducer(request, StringIO(b"abcdef"))
# start calls registerProducer on the DummyRequest, which pulls all
# output from the producer and so we just need this one call.
producer.start()
self.assertEqual([None], callbackList)
class SingleRangeStaticProducerTests(TestCase):
"""
Tests for L{SingleRangeStaticProducer}.
"""
def test_implementsIPullProducer(self):
"""
L{SingleRangeStaticProducer} implements L{IPullProducer}.
"""
verifyObject(
interfaces.IPullProducer,
static.SingleRangeStaticProducer(None, None, None, None),
)
def test_resumeProducingProducesContent(self):
"""
L{SingleRangeStaticProducer.resumeProducing} writes the given amount
of content, starting at the given offset, from the resource to the
request.
"""
request = DummyRequest([])
content = b"abcdef"
producer = static.SingleRangeStaticProducer(request, StringIO(content), 1, 3)
# DummyRequest.registerProducer pulls all output from the producer, so
# we just need to call start.
producer.start()
self.assertEqual(content[1:4], b"".join(request.written))
def test_resumeProducingBuffersOutput(self):
"""
L{SingleRangeStaticProducer.start} writes at most
C{abstract.FileDescriptor.bufferSize} bytes of content from the
resource to the request at once.
"""
request = DummyRequest([])
bufferSize = abstract.FileDescriptor.bufferSize
content = b"abc" * bufferSize
producer = static.SingleRangeStaticProducer(
request, StringIO(content), 1, bufferSize + 10
)
# DummyRequest.registerProducer pulls all output from the producer, so
# we just need to call start.
producer.start()
expected = [
content[1 : bufferSize + 1],
content[bufferSize + 1 : bufferSize + 11],
]
self.assertEqual(expected, request.written)
def test_finishCalledWhenDone(self):
"""
L{SingleRangeStaticProducer.resumeProducing} calls finish() on the
request after it is done producing content.
"""
request = DummyRequest([])
finishDeferred = request.notifyFinish()
callbackList = []
finishDeferred.addCallback(callbackList.append)
producer = static.SingleRangeStaticProducer(request, StringIO(b"abcdef"), 1, 1)
# start calls registerProducer on the DummyRequest, which pulls all
# output from the producer and so we just need this one call.
producer.start()
self.assertEqual([None], callbackList)
class MultipleRangeStaticProducerTests(TestCase):
"""
Tests for L{MultipleRangeStaticProducer}.
"""
def test_implementsIPullProducer(self):
"""
L{MultipleRangeStaticProducer} implements L{IPullProducer}.
"""
verifyObject(
interfaces.IPullProducer,
static.MultipleRangeStaticProducer(None, None, None),
)
def test_resumeProducingProducesContent(self):
"""
L{MultipleRangeStaticProducer.resumeProducing} writes the requested
chunks of content from the resource to the request, with the supplied
boundaries in between each chunk.
"""
request = DummyRequest([])
content = b"abcdef"
producer = static.MultipleRangeStaticProducer(
request, StringIO(content), [(b"1", 1, 3), (b"2", 5, 1)]
)
# DummyRequest.registerProducer pulls all output from the producer, so
# we just need to call start.
producer.start()
self.assertEqual(b"1bcd2f", b"".join(request.written))
def test_resumeProducingBuffersOutput(self):
"""
L{MultipleRangeStaticProducer.start} writes about
C{abstract.FileDescriptor.bufferSize} bytes of content from the
resource to the request at once.
To be specific about the 'about' above: it can write slightly more,
for example in the case where the first boundary plus the first chunk
is less than C{bufferSize} but first boundary plus the first chunk
plus the second boundary is more, but this is unimportant as in
practice the boundaries are fairly small. On the other side, it is
important for performance to bundle up several small chunks into one
call to request.write.
"""
request = DummyRequest([])
content = b"0123456789" * 2
producer = static.MultipleRangeStaticProducer(
request, StringIO(content), [(b"a", 0, 2), (b"b", 5, 10), (b"c", 0, 0)]
)
producer.bufferSize = 10
# DummyRequest.registerProducer pulls all output from the producer, so
# we just need to call start.
producer.start()
expected = [
b"a" + content[0:2] + b"b" + content[5:11],
content[11:15] + b"c",
]
self.assertEqual(expected, request.written)
def test_finishCalledWhenDone(self):
"""
L{MultipleRangeStaticProducer.resumeProducing} calls finish() on the
request after it is done producing content.
"""
request = DummyRequest([])
finishDeferred = request.notifyFinish()
callbackList = []
finishDeferred.addCallback(callbackList.append)
producer = static.MultipleRangeStaticProducer(
request, StringIO(b"abcdef"), [(b"", 1, 2)]
)
# start calls registerProducer on the DummyRequest, which pulls all
# output from the producer and so we just need this one call.
producer.start()
self.assertEqual([None], callbackList)
class RangeTests(TestCase):
"""
Tests for I{Range-Header} support in L{twisted.web.static.File}.
@type file: L{file}
@ivar file: Temporary (binary) file containing the content to be served.
@type resource: L{static.File}
@ivar resource: A leaf web resource using C{file} as content.
@type request: L{DummyRequest}
@ivar request: A fake request, requesting C{resource}.
@type catcher: L{list}
@ivar catcher: List which gathers all log information.
"""
def setUp(self):
"""
Create a temporary file with a fixed payload of 64 bytes. Create a
resource for that file and create a request which will be for that
resource. Each test can set a different range header to test different
aspects of the implementation.
"""
path = FilePath(self.mktemp())
# This is just a jumble of random stuff. It's supposed to be a good
# set of data for this test, particularly in order to avoid
# accidentally seeing the right result by having a byte sequence
# repeated at different locations or by having byte values which are
# somehow correlated with their position in the string.
self.payload = (
b"\xf8u\xf3E\x8c7\xce\x00\x9e\xb6a0y0S\xf0\xef\xac\xb7"
b"\xbe\xb5\x17M\x1e\x136k{\x1e\xbe\x0c\x07\x07\t\xd0"
b"\xbckY\xf5I\x0b\xb8\x88oZ\x1d\x85b\x1a\xcdk\xf2\x1d"
b"&\xfd%\xdd\x82q/A\x10Y\x8b"
)
path.setContent(self.payload)
self.file = path.open()
self.resource = static.File(self.file.name)
self.resource.isLeaf = 1
self.request = DummyRequest([b""])
self.request.uri = self.file.name
self.catcher = []
log.addObserver(self.catcher.append)
def tearDown(self):
"""
Clean up the resource file and the log observer.
"""
self.file.close()
log.removeObserver(self.catcher.append)
def _assertLogged(self, expected):
"""
Asserts that a given log message occurred with an expected message.
"""
logItem = self.catcher.pop()
self.assertEqual(logItem["message"][0], expected)
self.assertEqual(
self.catcher, [], "An additional log occurred: {!r}".format(logItem)
)
def test_invalidRanges(self):
"""
L{File._parseRangeHeader} raises L{ValueError} when passed
syntactically invalid byte ranges.
"""
f = self.resource._parseRangeHeader
# there's no =
self.assertRaises(ValueError, f, b"bytes")
# unknown isn't a valid Bytes-Unit
self.assertRaises(ValueError, f, b"unknown=1-2")
# there's no - in =stuff
self.assertRaises(ValueError, f, b"bytes=3")
# both start and end are empty
self.assertRaises(ValueError, f, b"bytes=-")
# start isn't an integer
self.assertRaises(ValueError, f, b"bytes=foo-")
# end isn't an integer
self.assertRaises(ValueError, f, b"bytes=-foo")
# end isn't equal to or greater than start
self.assertRaises(ValueError, f, b"bytes=5-4")
def test_rangeMissingStop(self):
"""
A single bytes range without an explicit stop position is parsed into a
two-tuple giving the start position and L{None}.
"""
self.assertEqual(self.resource._parseRangeHeader(b"bytes=0-"), [(0, None)])
def test_rangeMissingStart(self):
"""
A single bytes range without an explicit start position is parsed into
a two-tuple of L{None} and the end position.
"""
self.assertEqual(self.resource._parseRangeHeader(b"bytes=-3"), [(None, 3)])
def test_range(self):
"""
A single bytes range with explicit start and stop positions is parsed
into a two-tuple of those positions.
"""
self.assertEqual(self.resource._parseRangeHeader(b"bytes=2-5"), [(2, 5)])
def test_rangeWithSpace(self):
"""
A single bytes range with whitespace in allowed places is parsed in
the same way as it would be without the whitespace.
"""
self.assertEqual(self.resource._parseRangeHeader(b" bytes=1-2 "), [(1, 2)])
self.assertEqual(self.resource._parseRangeHeader(b"bytes =1-2 "), [(1, 2)])
self.assertEqual(self.resource._parseRangeHeader(b"bytes= 1-2"), [(1, 2)])
self.assertEqual(self.resource._parseRangeHeader(b"bytes=1 -2"), [(1, 2)])
self.assertEqual(self.resource._parseRangeHeader(b"bytes=1- 2"), [(1, 2)])
self.assertEqual(self.resource._parseRangeHeader(b"bytes=1-2 "), [(1, 2)])
def test_nullRangeElements(self):
"""
If there are multiple byte ranges but only one is non-null, the
non-null range is parsed and its start and stop returned.
"""
self.assertEqual(
self.resource._parseRangeHeader(b"bytes=1-2,\r\n, ,\t"), [(1, 2)]
)
def test_multipleRanges(self):
"""
If multiple byte ranges are specified their starts and stops are
returned.
"""
self.assertEqual(
self.resource._parseRangeHeader(b"bytes=1-2,3-4"), [(1, 2), (3, 4)]
)
def test_bodyLength(self):
"""
A correct response to a range request is as long as the length of the
requested range.
"""
self.request.requestHeaders.addRawHeader(b"range", b"bytes=0-43")
self.resource.render(self.request)
self.assertEqual(len(b"".join(self.request.written)), 44)
def test_invalidRangeRequest(self):
"""
An incorrect range request (RFC 2616 defines a correct range request as
a Bytes-Unit followed by a '=' character followed by a specific range.
Only 'bytes' is defined) results in the range header value being logged
and a normal 200 response being sent.
"""
range = b"foobar=0-43"
self.request.requestHeaders.addRawHeader(b"range", range)
self.resource.render(self.request)
expected = "Ignoring malformed Range header {!r}".format(range.decode())
self._assertLogged(expected)
self.assertEqual(b"".join(self.request.written), self.payload)
self.assertEqual(self.request.responseCode, http.OK)
self.assertEqual(
self.request.responseHeaders.getRawHeaders(b"content-length")[0],
b"%d" % (len(self.payload),),
)
def parseMultipartBody(self, body, boundary):
"""
Parse C{body} as a multipart MIME response separated by C{boundary}.
Note that this with fail the calling test on certain syntactic
problems.
"""
sep = b"\r\n--" + boundary
parts = body.split(sep)
self.assertEqual(b"", parts[0])
self.assertEqual(b"--\r\n", parts[-1])
parsed_parts = []
for part in parts[1:-1]:
before, header1, header2, blank, partBody = part.split(b"\r\n", 4)
headers = header1 + b"\n" + header2
self.assertEqual(b"", before)
self.assertEqual(b"", blank)
partContentTypeValue = re.search(
b"^content-type: (.*)$", headers, re.I | re.M
).group(1)
start, end, size = re.search(
b"^content-range: bytes ([0-9]+)-([0-9]+)/([0-9]+)$",
headers,
re.I | re.M,
).groups()
parsed_parts.append(
{
b"contentType": partContentTypeValue,
b"contentRange": (start, end, size),
b"body": partBody,
}
)
return parsed_parts
def test_multipleRangeRequest(self):
"""
The response to a request for multiple bytes ranges is a MIME-ish
multipart response.
"""
startEnds = [(0, 2), (20, 30), (40, 50)]
rangeHeaderValue = b",".join(
[networkString("{}-{}".format(s, e)) for (s, e) in startEnds]
)
self.request.requestHeaders.addRawHeader(b"range", b"bytes=" + rangeHeaderValue)
self.resource.render(self.request)
self.assertEqual(self.request.responseCode, http.PARTIAL_CONTENT)
boundary = re.match(
b'^multipart/byteranges; boundary="(.*)"$',
self.request.responseHeaders.getRawHeaders(b"content-type")[0],
).group(1)
parts = self.parseMultipartBody(b"".join(self.request.written), boundary)
self.assertEqual(len(startEnds), len(parts))
for part, (s, e) in zip(parts, startEnds):
self.assertEqual(networkString(self.resource.type), part[b"contentType"])
start, end, size = part[b"contentRange"]
self.assertEqual(int(start), s)
self.assertEqual(int(end), e)
self.assertEqual(int(size), self.resource.getFileSize())
self.assertEqual(self.payload[s : e + 1], part[b"body"])
def test_multipleRangeRequestWithRangeOverlappingEnd(self):
"""
The response to a request for multiple bytes ranges is a MIME-ish
multipart response, even when one of the ranged falls off the end of
the resource.
"""
startEnds = [(0, 2), (40, len(self.payload) + 10)]
rangeHeaderValue = b",".join(
[networkString("{}-{}".format(s, e)) for (s, e) in startEnds]
)
self.request.requestHeaders.addRawHeader(b"range", b"bytes=" + rangeHeaderValue)
self.resource.render(self.request)
self.assertEqual(self.request.responseCode, http.PARTIAL_CONTENT)
boundary = re.match(
b'^multipart/byteranges; boundary="(.*)"$',
self.request.responseHeaders.getRawHeaders(b"content-type")[0],
).group(1)
parts = self.parseMultipartBody(b"".join(self.request.written), boundary)
self.assertEqual(len(startEnds), len(parts))
for part, (s, e) in zip(parts, startEnds):
self.assertEqual(networkString(self.resource.type), part[b"contentType"])
start, end, size = part[b"contentRange"]
self.assertEqual(int(start), s)
self.assertEqual(int(end), min(e, self.resource.getFileSize() - 1))
self.assertEqual(int(size), self.resource.getFileSize())
self.assertEqual(self.payload[s : e + 1], part[b"body"])
def test_implicitEnd(self):
"""
If the end byte position is omitted, then it is treated as if the
length of the resource was specified by the end byte position.
"""
self.request.requestHeaders.addRawHeader(b"range", b"bytes=23-")
self.resource.render(self.request)
self.assertEqual(b"".join(self.request.written), self.payload[23:])
self.assertEqual(len(b"".join(self.request.written)), 41)
self.assertEqual(self.request.responseCode, http.PARTIAL_CONTENT)
self.assertEqual(
self.request.responseHeaders.getRawHeaders(b"content-range")[0],
b"bytes 23-63/64",
)
self.assertEqual(
self.request.responseHeaders.getRawHeaders(b"content-length")[0], b"41"
)
def test_implicitStart(self):
"""
If the start byte position is omitted but the end byte position is
supplied, then the range is treated as requesting the last -N bytes of
the resource, where N is the end byte position.
"""
self.request.requestHeaders.addRawHeader(b"range", b"bytes=-17")
self.resource.render(self.request)
self.assertEqual(b"".join(self.request.written), self.payload[-17:])
self.assertEqual(len(b"".join(self.request.written)), 17)
self.assertEqual(self.request.responseCode, http.PARTIAL_CONTENT)
self.assertEqual(
self.request.responseHeaders.getRawHeaders(b"content-range")[0],
b"bytes 47-63/64",
)
self.assertEqual(
self.request.responseHeaders.getRawHeaders(b"content-length")[0], b"17"
)
def test_explicitRange(self):
"""
A correct response to a bytes range header request from A to B starts
with the A'th byte and ends with (including) the B'th byte. The first
byte of a page is numbered with 0.
"""
self.request.requestHeaders.addRawHeader(b"range", b"bytes=3-43")
self.resource.render(self.request)
written = b"".join(self.request.written)
self.assertEqual(written, self.payload[3:44])
self.assertEqual(self.request.responseCode, http.PARTIAL_CONTENT)
self.assertEqual(
self.request.responseHeaders.getRawHeaders(b"content-range")[0],
b"bytes 3-43/64",
)
self.assertEqual(
b"%d" % (len(written),),
self.request.responseHeaders.getRawHeaders(b"content-length")[0],
)
def test_explicitRangeOverlappingEnd(self):
"""
A correct response to a bytes range header request from A to B when B
is past the end of the resource starts with the A'th byte and ends
with the last byte of the resource. The first byte of a page is
numbered with 0.
"""
self.request.requestHeaders.addRawHeader(b"range", b"bytes=40-100")
self.resource.render(self.request)
written = b"".join(self.request.written)
self.assertEqual(written, self.payload[40:])
self.assertEqual(self.request.responseCode, http.PARTIAL_CONTENT)
self.assertEqual(
self.request.responseHeaders.getRawHeaders(b"content-range")[0],
b"bytes 40-63/64",
)
self.assertEqual(
b"%d" % (len(written),),
self.request.responseHeaders.getRawHeaders(b"content-length")[0],
)
def test_statusCodeRequestedRangeNotSatisfiable(self):
"""
If a range is syntactically invalid due to the start being greater than
the end, the range header is ignored (the request is responded to as if
it were not present).
"""
self.request.requestHeaders.addRawHeader(b"range", b"bytes=20-13")
self.resource.render(self.request)
self.assertEqual(self.request.responseCode, http.OK)
self.assertEqual(b"".join(self.request.written), self.payload)
self.assertEqual(
self.request.responseHeaders.getRawHeaders(b"content-length")[0],
b"%d" % (len(self.payload),),
)
def test_invalidStartBytePos(self):
"""
If a range is unsatisfiable due to the start not being less than the
length of the resource, the response is 416 (Requested range not
satisfiable) and no data is written to the response body (RFC 2616,
section 14.35.1).
"""
self.request.requestHeaders.addRawHeader(b"range", b"bytes=67-108")
self.resource.render(self.request)
self.assertEqual(
self.request.responseCode, http.REQUESTED_RANGE_NOT_SATISFIABLE
)
self.assertEqual(b"".join(self.request.written), b"")
self.assertEqual(
self.request.responseHeaders.getRawHeaders(b"content-length")[0], b"0"
)
# Sections 10.4.17 and 14.16
self.assertEqual(
self.request.responseHeaders.getRawHeaders(b"content-range")[0],
networkString("bytes */%d" % (len(self.payload),)),
)
class DirectoryListerTests(TestCase):
"""
Tests for L{static.DirectoryLister}.
"""
def _request(self, uri):
request = DummyRequest([b""])
request.uri = uri
return request
def test_renderHeader(self):
"""
L{static.DirectoryLister} prints the request uri as header of the
rendered content.
"""
path = FilePath(self.mktemp())
path.makedirs()
lister = static.DirectoryLister(path.path)
data = lister.render(self._request(b"foo"))
self.assertIn(b"<h1>Directory listing for foo</h1>", data)
self.assertIn(b"<title>Directory listing for foo</title>", data)
def test_renderUnquoteHeader(self):
"""
L{static.DirectoryLister} unquote the request uri before printing it.
"""
path = FilePath(self.mktemp())
path.makedirs()
lister = static.DirectoryLister(path.path)
data = lister.render(self._request(b"foo%20bar"))
self.assertIn(b"<h1>Directory listing for foo bar</h1>", data)
self.assertIn(b"<title>Directory listing for foo bar</title>", data)
def test_escapeHeader(self):
"""
L{static.DirectoryLister} escape "&", "<" and ">" after unquoting the
request uri.
"""
path = FilePath(self.mktemp())
path.makedirs()
lister = static.DirectoryLister(path.path)
data = lister.render(self._request(b"foo%26bar"))
self.assertIn(b"<h1>Directory listing for foo&bar</h1>", data)
self.assertIn(b"<title>Directory listing for foo&bar</title>", data)
def test_renderFiles(self):
"""
L{static.DirectoryLister} is able to list all the files inside a
directory.
"""
path = FilePath(self.mktemp())
path.makedirs()
path.child("file1").setContent(b"content1")
path.child("file2").setContent(b"content2" * 1000)
lister = static.DirectoryLister(path.path)
data = lister.render(self._request(b"foo"))
body = b"""<tr class="odd">
<td><a href="file1">file1</a></td>
<td>8B</td>
<td>[text/html]</td>
<td></td>
</tr>
<tr class="even">
<td><a href="file2">file2</a></td>
<td>7K</td>
<td>[text/html]</td>
<td></td>
</tr>"""
self.assertIn(body, data)
def test_renderDirectories(self):
"""
L{static.DirectoryLister} is able to list all the directories inside
a directory.
"""
path = FilePath(self.mktemp())
path.makedirs()
path.child("dir1").makedirs()
path.child("dir2 & 3").makedirs()
lister = static.DirectoryLister(path.path)
data = lister.render(self._request(b"foo"))
body = b"""<tr class="odd">
<td><a href="dir1/">dir1/</a></td>
<td></td>
<td>[Directory]</td>
<td></td>
</tr>
<tr class="even">
<td><a href="dir2%20%26%203/">dir2 & 3/</a></td>
<td></td>
<td>[Directory]</td>
<td></td>
</tr>"""
self.assertIn(body, data)
def test_renderFiltered(self):
"""
L{static.DirectoryLister} takes an optional C{dirs} argument that
filter out the list of directories and files printed.
"""
path = FilePath(self.mktemp())
path.makedirs()
path.child("dir1").makedirs()
path.child("dir2").makedirs()
path.child("dir3").makedirs()
lister = static.DirectoryLister(path.path, dirs=["dir1", "dir3"])
data = lister.render(self._request(b"foo"))
body = b"""<tr class="odd">
<td><a href="dir1/">dir1/</a></td>
<td></td>
<td>[Directory]</td>
<td></td>
</tr>
<tr class="even">
<td><a href="dir3/">dir3/</a></td>
<td></td>
<td>[Directory]</td>
<td></td>
</tr>"""
self.assertIn(body, data)
def test_oddAndEven(self):
"""
L{static.DirectoryLister} gives an alternate class for each odd and
even rows in the table.
"""
lister = static.DirectoryLister(None)
elements = [
{"href": "", "text": "", "size": "", "type": "", "encoding": ""}
for i in range(5)
]
content = lister._buildTableContent(elements)
self.assertEqual(len(content), 5)
self.assertTrue(content[0].startswith('<tr class="odd">'))
self.assertTrue(content[1].startswith('<tr class="even">'))
self.assertTrue(content[2].startswith('<tr class="odd">'))
self.assertTrue(content[3].startswith('<tr class="even">'))
self.assertTrue(content[4].startswith('<tr class="odd">'))
def test_contentType(self):
"""
L{static.DirectoryLister} produces a MIME-type that indicates that it is
HTML, and includes its charset (UTF-8).
"""
path = FilePath(self.mktemp())
path.makedirs()
lister = static.DirectoryLister(path.path)
req = self._request(b"")
lister.render(req)
self.assertEqual(
req.responseHeaders.getRawHeaders(b"content-type")[0],
b"text/html; charset=utf-8",
)
def test_mimeTypeAndEncodings(self):
"""
L{static.DirectoryLister} is able to detect mimetype and encoding of
listed files.
"""
path = FilePath(self.mktemp())
path.makedirs()
path.child("file1.txt").setContent(b"file1")
path.child("file2.py").setContent(b"python")
path.child("file3.conf.gz").setContent(b"conf compressed")
path.child("file4.diff.bz2").setContent(b"diff compressed")
directory = os.listdir(path.path)
directory.sort()
contentTypes = {
".txt": "text/plain",
".py": "text/python",
".conf": "text/configuration",
".diff": "text/diff",
}
lister = static.DirectoryLister(path.path, contentTypes=contentTypes)
dirs, files = lister._getFilesAndDirectories(directory)
self.assertEqual(dirs, [])
self.assertEqual(
files,
[
{
"encoding": "",
"href": "file1.txt",
"size": "5B",
"text": "file1.txt",
"type": "[text/plain]",
},
{
"encoding": "",
"href": "file2.py",
"size": "6B",
"text": "file2.py",
"type": "[text/python]",
},
{
"encoding": "[gzip]",
"href": "file3.conf.gz",
"size": "15B",
"text": "file3.conf.gz",
"type": "[text/configuration]",
},
{
"encoding": "[bzip2]",
"href": "file4.diff.bz2",
"size": "15B",
"text": "file4.diff.bz2",
"type": "[text/diff]",
},
],
)
@skipIf(not platform._supportsSymlinks(), "No symlink support")
def test_brokenSymlink(self):
"""
If on the file in the listing points to a broken symlink, it should not
be returned by L{static.DirectoryLister._getFilesAndDirectories}.
"""
path = FilePath(self.mktemp())
path.makedirs()
file1 = path.child("file1")
file1.setContent(b"file1")
file1.linkTo(path.child("file2"))
file1.remove()
lister = static.DirectoryLister(path.path)
directory = os.listdir(path.path)
directory.sort()
dirs, files = lister._getFilesAndDirectories(directory)
self.assertEqual(dirs, [])
self.assertEqual(files, [])
def test_childrenNotFound(self):
"""
Any child resource of L{static.DirectoryLister} renders an HTTP
I{NOT FOUND} response code.
"""
path = FilePath(self.mktemp())
path.makedirs()
lister = static.DirectoryLister(path.path)
request = self._request(b"")
child = resource.getChildForRequest(lister, request)
result = _render(child, request)
def cbRendered(ignored):
self.assertEqual(request.responseCode, http.NOT_FOUND)
result.addCallback(cbRendered)
return result
def test_repr(self):
"""
L{static.DirectoryLister.__repr__} gives the path of the lister.
"""
path = FilePath(self.mktemp())
lister = static.DirectoryLister(path.path)
self.assertEqual(repr(lister), "<DirectoryLister of {!r}>".format(path.path))
self.assertEqual(str(lister), "<DirectoryLister of {!r}>".format(path.path))
def test_formatFileSize(self):
"""
L{static.formatFileSize} format an amount of bytes into a more readable
format.
"""
self.assertEqual(static.formatFileSize(0), "0B")
self.assertEqual(static.formatFileSize(123), "123B")
self.assertEqual(static.formatFileSize(4567), "4K")
self.assertEqual(static.formatFileSize(8900000), "8M")
self.assertEqual(static.formatFileSize(1234000000), "1G")
self.assertEqual(static.formatFileSize(1234567890000), "1149G")
class LoadMimeTypesTests(TestCase):
"""
Tests for the MIME type loading routine.
@cvar UNSET: A sentinel to signify that C{self.paths} has not been set by
the mock init.
"""
UNSET = object()
def setUp(self):
self.paths = self.UNSET
def _fakeInit(self, paths):
"""
A mock L{mimetypes.init} that records the value of the passed C{paths}
argument.
@param paths: The paths that will be recorded.
"""
self.paths = paths
def test_defaultArgumentIsNone(self):
"""
By default, L{None} is passed to C{mimetypes.init}.
"""
static.loadMimeTypes(init=self._fakeInit)
self.assertIdentical(self.paths, None)
def test_extraLocationsWork(self):
"""
Passed MIME type files are passed to C{mimetypes.init}.
"""
paths = ["x", "y", "z"]
static.loadMimeTypes(paths, init=self._fakeInit)
self.assertIdentical(self.paths, paths)
def test_usesGlobalInitFunction(self):
"""
By default, C{mimetypes.init} is called.
"""
# Checking mimetypes.inited doesn't always work, because
# something, somewhere, calls mimetypes.init. Yay global
# mutable state :)
if getattr(inspect, "signature", None):
signature = inspect.signature(static.loadMimeTypes)
self.assertIs(signature.parameters["init"].default, mimetypes.init)
else:
args, _, _, defaults = inspect.getargspec(static.loadMimeTypes)
defaultInit = defaults[args.index("init")]
self.assertIs(defaultInit, mimetypes.init)
class StaticDeprecationTests(TestCase):
def test_addSlashDeprecated(self):
"""
L{twisted.web.static.addSlash} is deprecated.
"""
from twisted.web.static import addSlash
addSlash(DummyRequest([b""]))
warnings = self.flushWarnings([self.test_addSlashDeprecated])
self.assertEqual(len(warnings), 1)
self.assertEqual(
warnings[0]["message"],
"twisted.web.static.addSlash was deprecated in Twisted 16.0.0",
)
|
// app/calc.js
function sum (arr) {
return arr.reduce(function(a, b) {
return a + b
}, 0)
}
module.exports.sum = sum
|
from typing import Any, List, Literal, TypedDict
from .FHIR_Element import FHIR_Element
from .FHIR_Reference import FHIR_Reference
from .FHIR_string import FHIR_string
# Demographics and other administrative information about an individual or animal receiving care or other health-related services.
FHIR_Patient_Link = TypedDict(
"FHIR_Patient_Link",
{
# Unique id for the element within a resource (for internal references). This may be any string value that does not contain spaces.
"id": FHIR_string,
# May be used to represent additional information that is not part of the basic definition of the element. To make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer can define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension.
"extension": List[Any],
# May be used to represent additional information that is not part of the basic definition of the element and that modifies the understanding of the element in which it is contained and/or the understanding of the containing element's descendants. Usually modifier elements provide negation or qualification. To make the use of extensions safe and manageable, there is a strict set of governance applied to the definition and use of extensions. Though any implementer can define an extension, there is a set of requirements that SHALL be met as part of the definition of the extension. Applications processing a resource are required to check for modifier extensions.Modifier extensions SHALL NOT change the meaning of any elements on Resource or DomainResource (including cannot change the meaning of modifierExtension itself).
"modifierExtension": List[Any],
# The other patient resource that the link refers to.
"other": FHIR_Reference,
# The type of link between this patient resource and another patient resource.
"type": Literal["replaced-by", "replaces", "refer", "seealso"],
# Extensions for type
"_type": FHIR_Element,
},
total=False,
)
|
# flake8: noqa
"""main function (need setup.py)"""
from .main import main
|
import { axiosWithAuth } from "../utils/axiosWithAuth";
export const SET_ADMIN = "SET_ADMIN";
export const GET_USERS = "GET_USERS";
export const ADMIN_DEL_USER = "ADMIN_DEL_USER";
export const getUsers = data => dispatch => {
axiosWithAuth()
.get("https://bookedup-pt9.herokuapp.com/api/users/")
.then(res => {
console.log(res.data)
dispatch({ type: GET_USERS, payload: res.data });
})
.catch(err => console.log(err.message));
};
export const AdminDeleteUser = (data) => dispatch => {
axiosWithAuth()
.delete(`https://bookedup-pt9.herokuapp.com/api/users/${data[0]}`)
.then(res => {
dispatch({ type: ADMIN_DEL_USER, payload: data });
alert('Account have been deleted')
window.location.reload();
})
.catch(err => console.log(err));
};
export const setAdmin = data => dispatch => {
dispatch({ type: SET_ADMIN, payload: data });
};
|
//! moment.js locale configuration
//! locale : Arabic [ar]
//! author : Abdel Said: https://github.com/abdelsaid
//! author : Ahmed Elkhatib
//! author : forabi https://github.com/forabi
import moment from '../moment';
var symbolMap = {
'1': '١',
'2': '٢',
'3': '٣',
'4': '٤',
'5': '٥',
'6': '٦',
'7': '٧',
'8': '٨',
'9': '٩',
'0': '٠'
}, numberMap = {
'١': '1',
'٢': '2',
'٣': '3',
'٤': '4',
'٥': '5',
'٦': '6',
'٧': '7',
'٨': '8',
'٩': '9',
'٠': '0'
}, pluralForm = function (n) {
return n === 0 ? 0 : n === 1 ? 1 : n === 2 ? 2 : n % 100 >= 3 && n % 100 <= 10 ? 3 : n % 100 >= 11 ? 4 : 5;
}, plurals = {
s : ['أقل من ثانية', 'ثانية واحدة', ['ثانيتان', 'ثانيتين'], '%d ثوان', '%d ثانية', '%d ثانية'],
m : ['أقل من دقيقة', 'دقيقة واحدة', ['دقيقتان', 'دقيقتين'], '%d دقائق', '%d دقيقة', '%d دقيقة'],
h : ['أقل من ساعة', 'ساعة واحدة', ['ساعتان', 'ساعتين'], '%d ساعات', '%d ساعة', '%d ساعة'],
d : ['أقل من يوم', 'يوم واحد', ['يومان', 'يومين'], '%d أيام', '%d يومًا', '%d يوم'],
M : ['أقل من شهر', 'شهر واحد', ['شهران', 'شهرين'], '%d أشهر', '%d شهرا', '%d شهر'],
y : ['أقل من عام', 'عام واحد', ['عامان', 'عامين'], '%d أعوام', '%d عامًا', '%d عام']
}, pluralize = function (u) {
return function (number, withoutSuffix, string, isFuture) {
var f = pluralForm(number),
str = plurals[u][pluralForm(number)];
if (f === 2) {
str = str[withoutSuffix ? 0 : 1];
}
return str.replace(/%d/i, number);
};
}, months = [
'يناير',
'فبراير',
'مارس',
'أبريل',
'مايو',
'يونيو',
'يوليو',
'أغسطس',
'سبتمبر',
'أكتوبر',
'نوفمبر',
'ديسمبر'
];
export default moment.defineLocale('ar', {
months : months,
monthsShort : months,
weekdays : 'الأحد_الإثنين_الثلاثاء_الأربعاء_الخميس_الجمعة_السبت'.split('_'),
weekdaysShort : 'أحد_إثنين_ثلاثاء_أربعاء_خميس_جمعة_سبت'.split('_'),
weekdaysMin : 'ح_ن_ث_ر_خ_ج_س'.split('_'),
weekdaysParseExact : true,
longDateFormat : {
LT : 'HH:mm',
LTS : 'HH:mm:ss',
L : 'D/\u200FM/\u200FYYYY',
LL : 'D MMMM YYYY',
LLL : 'D MMMM YYYY HH:mm',
LLLL : 'dddd D MMMM YYYY HH:mm'
},
meridiemParse: /ص|م/,
isPM : function (input) {
return 'م' === input;
},
meridiem : function (hour, minute, isLower) {
if (hour < 12) {
return 'ص';
} else {
return 'م';
}
},
calendar : {
sameDay: '[اليوم عند الساعة] LT',
nextDay: '[غدًا عند الساعة] LT',
nextWeek: 'dddd [عند الساعة] LT',
lastDay: '[أمس عند الساعة] LT',
lastWeek: 'dddd [عند الساعة] LT',
sameElse: 'L'
},
relativeTime : {
future : 'بعد %s',
past : 'منذ %s',
s : pluralize('s'),
ss : pluralize('s'),
m : pluralize('m'),
mm : pluralize('m'),
h : pluralize('h'),
hh : pluralize('h'),
d : pluralize('d'),
dd : pluralize('d'),
M : pluralize('M'),
MM : pluralize('M'),
y : pluralize('y'),
yy : pluralize('y')
},
preparse: function (string) {
return string.replace(/[١٢٣٤٥٦٧٨٩٠]/g, function (match) {
return numberMap[match];
}).replace(/،/g, ',');
},
postformat: function (string) {
return string.replace(/\d/g, function (match) {
return symbolMap[match];
}).replace(/,/g, '،');
},
week : {
dow : 6, // Saturday is the first day of the week.
doy : 12 // The week that contains Jan 12th is the first week of the year.
}
});
|
require("./Footer.tea");
const {Block,Text,Logo,BlockColor,Switch,Dialog} = require("../../internal");
class Footer extends Block {
static get title() { return _t('Footer') }
static get description() { return _t('The final block') }
configForm() {
return html`
<${Dialog}>
<${Switch} name="show_policy" label="${_t("Show policy")}" showWhen=${{variant:[1]}} />
<label value=${_t("Background color:")} />
<${BlockColor} name="background_color" />
<//>
`;
}
tpl_1(val) {
return html`
<div class="container-fluid footer footer_1" style="background: ${val.background_color}">
<div class="container">
<div class="row">
<div class="col-3">
<${Logo} name="logo" />
</div>
<div class="col-4">
<div class="desc">
<${Text} name="desc" options=${Text.default_text} />
</div>
${ val.show_policy && html`
<div class="policy_wrap">
<a class="policy" href="#" onClick=${(e)=>{e.preventDefault();this.policyDialog.open()}}>${_t("Information policy")}</a>
<${Dialog} ref=${(r)=>this.policyDialog=r} class="policy_dialog" scrollable=${true} overlayColor="rgba(0,0,0,0.5)">
<div class="policy_info">
<${Text} name="policy_info" options=${Text.size_text} />
</div>
<//>
</div>
`}
</div>
<div class="col-4 before-1">
<div class="phone">
<${Text} name="phone" options=${Text.default_heading} />
</div>
<div class="phone_desc">
<${Text} name="phone_desc" options=${Text.default_text} />
</div>
</div>
</div>
</div>
</div>
`
}
tpl_default_1() {
return config.language == 'en' ? {
'show_policy': true,
'background_color': '#FFFFFF',
'logo': {...Logo.tpl_default(), 'size': 87 },
'desc': "The best circus «One and the same are at the circus ring»,<br>Moscow, Color Blvd., 13",
'phone': '+7 (495) 321-46-98',
'phone_desc': 'Free call from Venus',
'policy_info': `
<h2>Privacy Policy</h2>
<p>This privacy policy discloses the privacy practices for lpcandy.ru. This privacy policy applies solely to information collected by this web site.</p>
<p>It will notify you of the following:</p>
<ul>
<li>What personally identifiable information is collected from you through the web site, how it is used and with whom it may be shared.</li>
<li>What choices are available to you regarding the use of your data.</li>
<li>The security procedures in place to protect the misuse of your information.</li>
</ul>
<p>How you can correct any inaccuracies in the information.</p>
<h3>Information Collection, Use, and Sharing</h3>
<p>We are the sole owners of the information collected on this site. We only have access to/collect information that you voluntarily give us via email or other direct contact from you. We will not sell or rent this information to anyone.</p>
<p>We will use your information to respond to you, regarding the reason you contacted us. We will not share your information with any third party outside of our organization, other than as necessary to fulfill your request, e.g. to ship an order.</p>
<p>Unless you ask us not to, we may contact you via email in the future to tell you about specials, new products or services, or changes to this privacy policy.</p>
<h3>Your Access to and Control Over Information</h3>
<p>You may opt out of any future contacts from us at any time. You can do the following at any time by contacting us via the email address or phone number given on our website:<p>
<ul>
<li>See what data we have about you, if any.</li>
<li>Change/correct any data we have about you.</li>
<li>Have us delete any data we have about you.</li>
<li>Express any concern you have about our use of your data.</li>
</ul>
<h3>Security</h3>
<p>We take precautions to protect your information. When you submit sensitive information via the website, your information is protected both online and offline.</p>
<p>Wherever we collect sensitive information (such as credit card data), that information is encrypted and transmitted to us in a secure way. You can verify this by looking for a closed lock icon at the bottom of your web browser, or looking for "https" at the beginning of the address of the web page.</p>
<p>While we use encryption to protect sensitive information transmitted online, we also protect your information offline. Only employees who need the information to perform a specific job (for example, billing or customer service) are granted access to personally identifiable information. The computers/servers in which we store personally identifiable information are kept in a secure environment.</p>
<h3>Updates</h3>
<p>Our Privacy Policy may change from time to time and all updates will be posted on this page.</p>
<p>If you feel that we are not abiding by this privacy policy, you should contact us immediately via email.</p>
`,
} : {
'show_policy': true,
'background_color': '#FFFFFF',
'logo': {...Logo.tpl_default(), 'size': 87 },
'desc': "Лучший цирк «НА МАНЕЖЕ ВСЕ ТЕ ЖЕ»,<br>г.Москва, Цветной бульвар, 13",
'phone': '+7 (495) 321-46-98',
'phone_desc': 'Звонок с Венеры бесплатный',
'policy_info': `
<h2>ПОЛИТИКА КОНФИДЕНЦИАЛЬНОСТИ</h2>
<p>Соблюдение Вашей конфиденциальности важно для нас. По этой причине, мы разработали Политику Конфиденциальности, которая описывает, как мы используем и храним Вашу информацию. Пожалуйста, ознакомьтесь с нашими правилами соблюдения конфиденциальности и сообщите нам, если у вас возникнут какие-либо вопросы.</p>
<h3>Сбор и использование персональной информации</h3>
<p>Под персональной информацией понимаются данные, которые могут быть использованы для идентификации определенного лица либо связи с ним.</p>
<p>От вас может быть запрошено предоставление вашей персональной информации в любой момент, когда вы связываетесь с нами. </p>
<p>Ниже приведены некоторые примеры типов персональной информации, которую мы можем собирать, и как мы можем использовать такую информацию.</p>
<h4>Какую персональную информацию мы собираем</h4>
<ul>
<li>Когда вы оставляете заявку на сайте, мы можем собирать различную информацию, включая ваши имя, номер телефона, адрес электронной почты и т.д.</li>
</ul>
<h4>Как мы используем вашу персональную информацию</h4>
<ul>
<li>Собираемая нами персональная информация позволяет нам связываться с вами и сообщать об уникальных предложениях, акциях и других мероприятиях и ближайших событиях. </li>
<li>Время от времени, мы можем использовать вашу персональную информацию для отправки важных уведомлений и сообщений. </li>
<li>Мы также можем использовать персональную информацию для внутренних целей, таких как проведения аудита, анализа данных и различных исследований в целях улучшения услуг предоставляемых нами и предоставления Вам рекомендаций относительно наших услуг.</li>
<li>Если вы принимаете участие в розыгрыше призов, конкурсе или сходном стимулирующем мероприятии, мы можем использовать предоставляемую вами информацию для управления такими программами.</li>
</ul>
<h3>Раскрытие информации третьим лицам</h3>
<p>Мы не раскрываем полученную от Вас информацию третьим лицам. </p>
<h4>Исключения</h4>
<p>В случае если необходимо — в соответствии с законом, судебным порядком, в судебном разбирательстве, и/или на основании публичных запросов или запросов от государственных органов на территории РФ — раскрыть вашу персональную информацию. Мы также можем раскрывать информацию о вас если мы определим, что такое раскрытие необходимо или уместно в целях безопасности, поддержания правопорядка, или иных общественно важных случаях.</p>
<p>В случае реорганизации, слияния или продажи мы можем передать собираемую нами персональную информацию соответствующему третьему лицу – правопреемнику.</p>
<h3>Защита персональной информации</h3>
<p>Мы предпринимаем меры предосторожности — включая административные, технические и физические — для защиты вашей персональной информации от утраты, кражи, и недобросовестного использования, а также от несанкционированного доступа, раскрытия, изменения и уничтожения.</p>
<h3>Соблюдение вашей конфиденциальности на уровне компании</h3>
<p>Для того чтобы убедиться, что ваша персональная информация находится в безопасности, мы доводим нормы соблюдения конфиденциальности и безопасности до наших сотрудников, и строго следим за исполнением мер соблюдения конфиденциальности.</p>
`,
}
}
}
Block.register('Footer',exports = Footer);
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import oneflow as flow
from oneflow.python.nn.module import Module
from oneflow.python.oneflow_export import oneflow_export, experimental_api
from oneflow.python.framework.tensor import register_tensor_op
class Eq(Module):
def __init__(self) -> None:
super().__init__()
self.eq_op = (
flow.builtin_op("broadcast_equal").Input("x").Input("y").Output("z").Build()
)
def forward(self, input, other):
if isinstance(other, flow.Tensor):
for i in range(len(input.size())):
assert (
input.shape[i] >= other.shape[i]
), "The second tensor's shape should broadcastable with the first argument."
elif isinstance(other, int) or isinstance(other, float):
raise NotImplementedError(
"Unsupport data type, int or float data type are not support yet!"
)
else:
raise NotImplementedError(
"Unsupport data type, The second argument can be a tensor whose shape is broadcastable with the first argument."
)
return self.eq_op(input, other)[0]
@oneflow_export("eq", "equal")
@register_tensor_op("eq")
@experimental_api
def eq_op(input, other):
r"""
Computes element-wise equality.
The second argument can be a number or a tensor whose shape is broadcastable with the first argument.
Args:
input (oneflow.Tensor): the tensor to compare
other (oneflow.Tensor): the tensor to compare
Returns:
- A boolean tensor that is True where :attr:`input` is equal to :attr:`other` and False elsewhere
For example:
.. code-block:: python
>>> import oneflow.experimental as flow
>>> import numpy as np
>>> flow.enable_eager_execution()
>>> input = flow.Tensor(np.array([2, 3, 4, 5]), dtype=flow.float32)
>>> other = flow.Tensor(np.array([2, 3, 4, 1]), dtype=flow.float32)
>>> y = flow.eq(input, other)
>>> y
tensor([1, 1, 1, 0], dtype=oneflow.int8)
"""
return Eq()(input, other)
if __name__ == "__main__":
import doctest
doctest.testmod()
|
!function(e){"function"==typeof define&&define.amd?define(["../widgets/datepicker"],e):e(jQuery.datepicker)}(function(e){return e.regional.nl={closeText:"Sluiten",prevText:"\u2190",nextText:"\u2192",currentText:"Vandaag",monthNames:["januari","februari","maart","april","mei","juni","juli","augustus","september","oktober","november","december"],monthNamesShort:["jan","feb","mrt","apr","mei","jun","jul","aug","sep","okt","nov","dec"],dayNames:["zondag","maandag","dinsdag","woensdag","donderdag","vrijdag","zaterdag"],dayNamesShort:["zon","maa","din","woe","don","vri","zat"],dayNamesMin:["zo","ma","di","wo","do","vr","za"],weekHeader:"Wk",dateFormat:"dd-mm-yy",firstDay:1,isRTL:!1,showMonthAfterYear:!1,yearSuffix:""},e.setDefaults(e.regional.nl),e.regional.nl});
|
GOODREADS_API_KEY = 'YOUR GOODREADS API KEY'
BING_SEARCH_API_KEY = 'YOUR BING SEARCH API KEY'
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import glob
import ntpath
def get_module_name(module_path):
"""
Return the module name of the module path
"""
return ntpath.split(module_path)[1].split(".")[0]
def snake_to_camel(word):
"""
Convert a word from snake_case to CamelCase
"""
return ''.join(x.capitalize() or '_' for x in word.split('_'))
setup(
name="fn_cisco_asa",
#display_name="Cisco ASA Functions for IBM Security SOAR",
version="1.0.0",
license="MIT",
author="Resilient Labs",
author_email="resil.labs@gmail.com",
url="https://github.com/ibmresilient/resilient-community-apps",
description="Resilient Circuits Components for 'fn_cisco_asa'",
long_description="""Resilient Circuits Components for 'fn_cisco_asa'""",
install_requires=[
"resilient-circuits>=31.0.0",
"resilient-lib>=39.0.0"
],
packages=find_packages(),
include_package_data=True,
platforms="any",
classifiers=[
"Programming Language :: Python",
],
entry_points={
"resilient.circuits.components": [
# When setup.py is executed, loop through the .py files in the components directory and create the entry points.
"{}FunctionComponent = fn_cisco_asa.components.{}:FunctionComponent".format(snake_to_camel(get_module_name(filename)), get_module_name(filename)) for filename in glob.glob("./fn_cisco_asa/components/[a-zA-Z]*.py")
],
"resilient.circuits.configsection": ["gen_config = fn_cisco_asa.util.config:config_section_data"],
"resilient.circuits.customize": ["customize = fn_cisco_asa.util.customize:customization_data"],
"resilient.circuits.selftest": ["selftest = fn_cisco_asa.util.selftest:selftest_function"]
}
)
|
from gyun.iaas.connection import APIConnection
from gyun.conn import auth
from . import constants as const
from __builtin__ import str
class AppConnection(APIConnection):
def __init__(self, app_id, secret_app_key, zone,
host='api.qc.gyun.com', port=443, protocol='https',
pool=None, expires=None, retry_time=3,
http_socket_timeout=10, access_token=None):
"""
@param app_id
@param secret_app_key
@param zone - the zone id to access
@param host - the host to make the connection to
@param port - the port to use when connect to host
@param protocol - the protocol to access to web server, "http" or "https"
@param pool - the connection pool
@param retry_time - the retry_time when message send fail
"""
APIConnection.__init__(self, app_id, secret_app_key, zone, host, port,
protocol, pool, expires, retry_time,
http_socket_timeout)
self._auth_handler = auth.AppSignatureAuthHandler(app_id,
secret_app_key,
access_token)
def send_request(self, action, body, url='/app/', verb='GET'):
""" Send request
"""
return super(AppConnection, self).send_request(action, body, url, verb)
def describe_users(self, **ignore):
""" get current app user info
"""
action = const.ACTION_DESCRIBE_USERS
body = {}
return self.send_request(action, body)
def lease_app(self, service, resource=None):
""" start lease app
@param service: service to lease
@param resource: related gyun resource
"""
action = const.ACTION_LEASE_APP
body = {"service": service}
if resource:
body["resource"] = resource
return self.send_request(action, body)
def unlease_app(self, resources):
""" start lease app
@param resources: list of resource ids to unlease.
It can be id of user, app, service or appr.
For user id, unlease all app services for this user
For app id, unlease all services for this app
For service id, unlease all services
user id and other id can be conbined to unlease service for specified user
"""
action = const.ACTION_UNLEASE_APP
if isinstance(resources, str):
resources = [resources]
if not isinstance(resources, list):
return None
body = {"resources": resources}
return self.send_request(action, body)
|
import os
from pathlib import Path
import toml
import typer
ISWIN = os.name == "nt"
PYPROJECT_PATH = Path(__file__).parent.parent.joinpath("pyproject.toml")
pycln = toml.load(PYPROJECT_PATH)["tool"]["poetry"]
__name__ = pycln["name"]
__version__ = pycln["version"]
__doc__ = pycln["description"]
def version_callback(value: bool):
"""Show the version and exit with 0."""
if value:
typer.echo(f"{__name__}, version {__version__}")
raise typer.Exit(0)
|
# pylint: disable=W0613
# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
import os.path
import git
import pytest
import requests
@pytest.fixture(scope="function")
def repo_dir(tmp_path_factory, random_repo_name):
return tmp_path_factory.mktemp(random_repo_name)
@pytest.fixture(scope="function")
def mock_repo(repo_dir):
repo = git.Repo.init(repo_dir)
file_name = os.path.join(repo_dir, "test.txt")
Path(file_name).touch()
repo.index.add([file_name])
repo.index.commit("initial commit")
return repo
@pytest.mark.docker
@pytest.mark.integration
def test_apache_git_http_backend_apache_running(container_run, base_url):
request = requests.get(base_url)
assert request.status_code == 200
@pytest.mark.docker
@pytest.mark.integration
def test_apache_git_http_backend_repo_creation(
container_run, basic_auth_creds, repo_creation_url
):
request = requests.get(
repo_creation_url,
auth=requests.auth.HTTPBasicAuth(
basic_auth_creds["user"], basic_auth_creds["password"]
),
)
assert request.status_code == 201
@pytest.mark.docker
@pytest.mark.integration
def test_apache_git_http_backend_repo_creation_fails_without_credentials(
container_run, repo_creation_url
):
request = requests.get(repo_creation_url)
assert request.status_code == 401
@pytest.mark.docker
@pytest.mark.integration
def test_apache_git_http_backend_repo_creation_fails_wrong_fs_permissions(
container_run, basic_auth_creds, repo_creation_url
):
container_run.exec_run("chown -R root:root /var/gerrit/git")
request = requests.get(
repo_creation_url,
auth=requests.auth.HTTPBasicAuth(
basic_auth_creds["user"], basic_auth_creds["password"]
),
)
container_run.exec_run("chown -R gerrit:users /var/gerrit/git")
assert request.status_code == 500
@pytest.mark.docker
@pytest.mark.integration
def test_apache_git_http_backend_repo_creation_push_repo(
container_run, base_url, basic_auth_creds, mock_repo, random_repo_name
):
container_run.exec_run(
f"su -c 'git init --bare /var/gerrit/git/{random_repo_name}.git' gerrit"
)
url = f"{base_url}/git/{random_repo_name}.git"
url = url.replace(
"//", f"//{basic_auth_creds['user']}:{basic_auth_creds['password']}@"
)
origin = mock_repo.create_remote("origin", url)
assert origin.exists()
origin.fetch()
result = origin.push(refspec="master:master")
assert result
remote_refs = {}
git_cmd = git.cmd.Git()
for ref in git_cmd.ls_remote(url).split("\n"):
hash_ref_list = ref.split("\t")
remote_refs[hash_ref_list[1]] = hash_ref_list[0]
assert remote_refs["HEAD"] == mock_repo.head.object.hexsha
|
/*
artifact generator: C:\My\wizzi\v6\node_modules\wizzi-js\lib\artifacts\js\module\gen\main.js
primary source IttfDocument: c:\my\wizzi\v6\plugins\wizzi-js\src\ittf\lib\artifacts\js\module\gen\codegen\statements\loops.js.ittf
*/
'use strict';
var util = require('util');
var verify = require('wizzi-utils').verify;
var node = require('wizzi-utils').node;
var u = require('../util/stm');
var myname = 'wizzi-js.artifacts.js.module.gen.codegen.statements.loops';
var md = module.exports = {};
function hasStatements(model) {
return model.statements && model.statements.length > 0;
}
md.load = function(cnt) {
cnt.stm.xfor = function(model, ctx, callback) {
if (typeof callback === 'undefined') {
throw new Error('Missing callback parameter in cnt.stm: ' + myname + '.xfor');
}
if (typeof callback !== 'function') {
throw new Error('The callback parameter must be a function. In ' + myname + '.xfor. Got: ' + callback);
}
u.emitBlock(cnt, 'for', model, model.statements, model.statements.length, ctx, callback);
};
cnt.stm.foreach = function(model, ctx, callback) {
if (typeof callback === 'undefined') {
throw new Error('Missing callback parameter in cnt.stm: ' + myname + '.foreach');
}
if (typeof callback !== 'function') {
throw new Error('The callback parameter must be a function. In ' + myname + '.foreach. Got: ' + callback);
}
var ss = model.wzName.trim().split(' ')
;
if (ss.length != 3 || ss[1] !== 'in') {
return callback(ctx.error("Malformed foreach. Should be: foreach <item> in <coll>. Is " + model.wzName, model));
}
var item = ss[0],
coll = ss[2];
var nidif = (ctx.__for_nidif || 0);
if (nidif >= max_for_nidif) {
return callback(ctx.error(myname + '. Maximum number of nested foreach loop is ' + max_for_nidif, model));
}
var letter = forloopLetters[nidif];
var items = (letter + '_items');
var len = (letter + '_len');
ctx.__for_nidif = (nidif + 1);
ctx.w('var ' + letter + ', ' + items + '=' + coll + ', ' + len + '=' + coll + '.length, ' + item + ';');
ctx.w('for (' + letter + '=0; ' + letter + '<' + len + '; ' + letter + '++) {');
ctx.w(' ' + item + ' = ' + coll + '[' + letter + '];');
cnt.genItems(model.statements, ctx, {
indent: true
}, function(err, notUsed) {
if (err) {
return callback(err);
}
ctx.w('}');
ctx.__for_nidif = (ctx.__for_nidif - 1);
return callback(null, null);
});
};
cnt.stm.backeach = function(model, ctx, callback) {
if (typeof callback === 'undefined') {
throw new Error('Missing callback parameter in cnt.stm: ' + myname + '.backeach');
}
if (typeof callback !== 'function') {
throw new Error('The callback parameter must be a function. In ' + myname + '.backeach. Got: ' + callback);
}
var ss = model.wzName.trim().split(' ')
;
if (ss.length != 3 || ss[1] !== 'in') {
return callback(ctx.error("Malformed backeach. Should be: backeach <item> in <coll>. Is " + model.wzName, model));
}
var item = ss[0],
coll = ss[2];
var nidif = (ctx.__for_nidif || 0);
if (nidif >= max_for_nidif) {
return callback(ctx.error(myname + '. Maximum number of nested backeach loop is ' + max_for_nidif, model));
}
var letter = forloopLetters[nidif];
var len = (letter + '_len');
ctx.__for_nidif = (nidif + 1);
ctx.w('var ' + letter + ', ' + len + '=' + coll + '.length, ' + item + ';');
ctx.w('for (' + letter + '= (' + len + '-1); ' + letter + '>-1; ' + letter + '--) {');
ctx.w(' ' + item + ' = ' + coll + '[' + letter + '];');
cnt.genItems(model.statements, ctx, {
indent: true
}, function(err, notUsed) {
if (err) {
return callback(err);
}
ctx.w('}');
ctx.__for_nidif = ctx.__for_nidif - 1;
return callback(null, null);
});
};
cnt.stm.xbreak = function(model, ctx, callback) {
if (typeof callback === 'undefined') {
throw new Error('Missing callback parameter in cnt.stm: ' + myname + '.xbreak');
}
if (typeof callback !== 'function') {
throw new Error('The callback parameter must be a function. In ' + myname + '.xbreak. Got: ' + callback);
}
ctx.write('break');
ctx.write((model.wzName || '').trim().length > 0 ? (' ' + model.wzName) : '');
ctx.w(';');
return callback(null, null);
};
cnt.stm.xcontinue = function(model, ctx, callback) {
if (typeof callback === 'undefined') {
throw new Error('Missing callback parameter in cnt.stm: ' + myname + '.xcontinue');
}
if (typeof callback !== 'function') {
throw new Error('The callback parameter must be a function. In ' + myname + '.xcontinue. Got: ' + callback);
}
ctx.write('continue');
ctx.write((model.wzName || '').trim().length > 0 ? (' ' + model.wzName) : '');
ctx.w(';');
return callback(null, null);
};
cnt.stm.xwhile = function(model, ctx, callback) {
if (typeof callback === 'undefined') {
throw new Error('Missing callback parameter in cnt.stm: ' + myname + '.xwhile');
}
if (typeof callback !== 'function') {
throw new Error('The callback parameter must be a function. In ' + myname + '.xwhile. Got: ' + callback);
}
u.emitBlock(cnt, 'while', model, model.statements, model.statements.length, ctx, callback);
};
cnt.stm.xdo = function(model, ctx, callback) {
if (typeof callback === 'undefined') {
throw new Error('Missing callback parameter in cnt.stm: ' + myname + '.xdo');
}
if (typeof callback !== 'function') {
throw new Error('The callback parameter must be a function. In ' + myname + '.xdo. Got: ' + callback);
}
var items = model.statements;
var count = model.statements.length;
ctx.write('do');
if (ctx.values.__preserveBlock) {
if ((count > 1) || (count == 1 && items[0].wzElement === 'block')) {
ctx.w(' {');
}
else {
ctx.w('');
}
}
else {
ctx.w(' {');
}
cnt.genItems(model.statements, ctx, {
indent: true
}, function(err, notUsed) {
if (err) {
return callback(err);
}
if (ctx.values.__preserveBlock) {
if ((count > 1) || (count == 1 && items[0].wzElement === 'block')) {
ctx.write('}');
}
}
else {
ctx.write('}');
}
ctx.w(' while (' + u.unparen(model.wzName) + ')');
return callback(null, null);
});
};
cnt.stm.xlabel = function(model, ctx, callback) {
if (typeof callback === 'undefined') {
throw new Error('Missing callback parameter in cnt.stm: ' + myname + '.xlabel');
}
if (typeof callback !== 'function') {
throw new Error('The callback parameter must be a function. In ' + myname + '.xlabel. Got: ' + callback);
}
ctx.w(model.wzName + ':');
cnt.genItems(model.statements, ctx, {
indent: false
}, callback);
};
var forloopLetters = "ijklmn";
var max_for_nidif = forloopLetters.length;
};
|
import React from 'react';
import 'slick-carousel/slick/slick.css';
import 'slick-carousel/slick/slick-theme.css';
import Slider from 'react-slick';
import { NavLink } from 'react-router-dom';
export default function newItemsSlider(props) {
var settings = {
dots: true,
infinite: true,
speed: 500,
slidesToShow: 3,
slidesToScroll: 1,
autoplay: true,
autoplaySpeed: 2000,
draggable: false,
arrows: false,
responsive: [
{
breakpoint: 768,
settings: {
slidesToShow: 2,
},
},
{
breakpoint: 550,
settings: {
slidesToShow: 1,
},
},
],
};
return (
<div>
<Slider {...settings}>
{props.items.map((elem, id) => {
if (id < 6) {
return (
<div className="product-item-out" key={elem._id}>
<div className="product-item for-view">
<NavLink
onClick={() => {
window.scrollTo(0, 0);
props.countToOne();
}}
strict
to={`/item/${elem._id}`}
className={`product-item-img${
elem.imgName === 'default.png' ? ' no-img' : ''
}`}
>
{/* {elem.isFreeShipping && (
<div className="isFreeShipping">
<img src="/img/truck.png" alt=""/>
</div>
)} */}
<img
className="img-elem"
src={`/img/${
elem.imgName !== 'default.png' ? 'uploads/' : ''
}${elem.imgName}`}
alt=""
/>
</NavLink>
<div className="product-item-info">
<div className="product-item-title">{elem.title}</div>
<div className="product-item-price">
{elem.price}
<span className="dollar">грн</span>
</div>
<div className="cart-add-btn">
<img
className="cart-add-img"
src="/img/cart.png"
alt=""
/>
</div>
</div>
</div>
</div>
);
} else return <></>;
})}
</Slider>
</div>
);
}
|
const homeApi = client => {
return {
getHomePage: async () => {
const {item} = await client
.item('untitled_content_item')
.toPromise();
return mapHomePage(item);
},
};
};
const mapHomePage = contentItem => {
return {
hero: mapHomeHero(contentItem),
};
};
const mapHomeHero = contentItem => {
return {
headline: contentItem.hero__title.value,
summary: contentItem.hero__text.value,
buttonText: contentItem.hero__button.value,
buttonLink: contentItem.hero__button_link.value?.[0].slug.value,
image: contentItem.hero__image.value?.[0].url,
};
};
export default homeApi;
|
const functions = require('firebase-functions');
// FitstTask
function findingNextOddInSequence (arr) {
if(arr.length > 0){
const j = arr.length - 1,
n = arr.pop();
return +n + findingEvenFromIndex(j);
}else{
return false;
}
}
function findingEvenFromIndex(i) {
if(!isNaN(i)) {
return 2 * (i+1);
}else{
return false;
}
}
function Task1 (str='3,5,9,15') {
if(str !== '') {
let arr = str.split(',');
const out = findingNextOddInSequence(arr);
return formatOutput({
"answer": out,
"explaination": "Each number is the result of the previous number added by the even number of the sequnece (2, 4, 6, 8, 10,...), will be referred as Arr in the example, at the similar array index. So, X is 15+Arr[3], Arr[3]=8 so X=23",
"description": "3, 5, 9, 15, X - Please create new function for finding X value"
});
}else{
return false;
}
}
// Second Task
/* Basically, it should be easily resolved by using algebra.js module ...
*/
function Task2 (str = '(Y+24)+(10×2)=99') {
const args = convertMultiplySymbol(str).replace(/\s/g,'');
return formatOutput({
"explaination": "The solution is to split the equation firstly by '=' sign so it becomes array of factors. Then the result of the equation is the last array element (array[1]). Then, converting the operators of each fuactors to be the opposite, e.g. + beacomes -"
+ "Then, concat all the converted factors to the result and finally use ```eval``` function to execute the calculation"
+ "NB: This is the equation resolver function, which requires lots of work to support all possible cases (square root, exponential, etc). Thus this function is only focus on resolving the very basic factors according to the given task ",
"description": "(Y + 24)+(10 × 2) = 99 - Please create new function for finding Y value",
"answer": equation(args)
});
}
function equation(str) {
let factors = str.split('=');
const result = factors[1],
regx = /([+\-*/^])?\(.*?\)/g;
let leftSide = factors[0].match(regx),
output = result,
varFactor='';
leftSide.forEach(element => {
if( !findingVar(element, 'Y') ) {
output += reversedOperator(element); //String concat
}else{
console.log(element);
varFactor = removeParentheses(element);
}
});
if(varFactor.match(/^[Yy]/)) {
let f = removeParentheses(varFactor).split('Y');
output += reversedOperator(f[1]);
}else{
let g = removeParentheses(varFactor).split(/[+\-*/^][Yy]/);
console.log(g);
output += reversedOperator(g[0]);
}
return 'Given ' + str + "\r\n" + 'So, Y = ' + (output) + "\r\n" + 'Summary is ' + eval(output);
}
function removeParentheses (str) {
return str.replace(/([()])/g,'');
}
function findingVar (phrase,varName) {
return phrase.match(varName) ? true : false;
}
function reversedOperator(str) {
const regx = /^[+\-*/^]/;
if(!str.match(regx)) {
return '-' + str;
}else{
str = convertMultiplySymbol(str);
const reversMap = {
"+": "-",
"-" : "+",
"*": "/",
"/":"*"
},
op = str.match(regx);
let replaceStr = op[0];
return str.replace(regx,reversMap[replaceStr]);
}
}
function convertMultiplySymbol (str) {
return str.replace(/[Xx×]/g,'*');
}
// Task 3
function Task3 (args=5) {
const ans = arraySequenceFn(args);
return formatOutput({
"explaination": "Each number in a sequence is the reversed concatenated previous numbers and padded with number 5. The function is to manipulate the pattern by giving the target number as an argument. fn(5)=" + ans,
"description": "If 1 = 5 , 2 = 25 , 3 = 325 , 4 = 4325 Then 5 = X - Please create new function for finding X value",
"answer": ans
});
}
function arraySequenceFn(n) {
var arr=[],
i=1;
while (i<=n) {
let v = i===1 ? '5' : i.toString() + arr[i-2];
arr.push(v);
i++;
}
return arr.pop();
}
const taskMaps = {
"1": Task1,
"2": Task2,
"3": Task3
};
function formatOutput (o) {
let data = {
"description": o.description,
"explaination": o.explaination,
"answer": o.answer
};
return data;
}
exports.Test = functions.https.onRequest((request, response) => {
const task = request.query.task,
args = request.query.args,
out = taskMaps[task].call(this,args);
response.send(out);
});
|
/*-
* Copyright (c) 2018, 2021 Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* https://oss.oracle.com/licenses/upl/
*/
'use strict';
const assert = require('assert');
const PackedInteger = require('./packed_integer');
const ResizableBuffer = require('./buffer');
const NoSQLProtocolError = require('../error').NoSQLProtocolError;
const stringToUTCDate = require('../utils').stringToUTCDate;
class DataReader {
constructor(buf) {
this._buf = (buf instanceof ResizableBuffer) ? buf :
new ResizableBuffer(buf);
this._off = 0;
}
_readByte() {
return this._buf.readInt8(this._off++);
}
_handleEOF(e, ...args) {
if (e.name == 'RangeError') {
let msg = 'End of stream reached';
if (args.length > 0) {
msg += ` while reading ${args[0]}`;
if (args.length > 1) {
msg += ` of length ${args[1]}`;
}
}
throw new NoSQLProtocolError(msg);
} else {
throw e;
}
}
_readBuffer(len) {
try {
const ret = this._buf.readBuffer(this._off, len);
this._off += len;
return ret;
} catch(e) {
this._handleEOF(e, 'binary', len);
}
}
get offset() {
return this._off;
}
set offset(val) {
assert(val >= 0);
if (val > this._buf.length) {
throw new NoSQLProtocolError(`End of stream reached: offset \
${val} is past length ${this._buf.length}`);
}
this._off = val;
}
/**
* Reads a packed integer from the buffer and returns it.
*
* @return the integer that was read
* @throws NoSQLError if the input format is invalid or end of input is
* reached
*/
readInt() {
try {
//We pass ResizableBuffer instead of Buffer so that EOF checking
//is performed when reading bytes.
let { value, off } = PackedInteger.readSortedInt(
this._buf, this._off);
this._off = off;
return value;
} catch(e) {
this._handleEOF(e, 'packed int');
}
}
/**
* Reads a packed long from the buffer and returns it.
*
* @return the long that was read
* @throws NoSQLError if the input format is invalid or end of input is
* reached
*/
readLong() {
try {
let { value, off } = PackedInteger.readSortedLong(
this._buf, this._off);
this._off = off;
return value;
} catch(e) {
this._handleEOF(e, 'packed long');
}
}
/**
* Reads a string written by {@link #writeString}, using standard UTF-8
*
* @return a string or null
* @throws NoSQLError if the input format is invalid or end of input is
* reached
*/
readString() {
const len = this.readInt();
if (len < -1) {
throw new NoSQLProtocolError(`Invalid string length: ${len}`);
}
if (len == -1) {
return null;
}
if (len == 0) {
return '';
}
try {
const nextOff = this._off + len;
const ret = this._buf.slice(this._off, nextOff).toString('utf8');
this._off = nextOff;
return ret;
} catch(e) {
this._handleEOF(e, 'string', len);
}
}
readArray(readItem) {
const len = this.readInt();
if (len < -1) {
throw new NoSQLProtocolError(`Invalid array length: ${len}`);
}
if (len == -1) {
return null;
}
const a = new Array(len);
for(let i = 0; i < len; i++) {
a[i] = readItem();
}
return a;
}
readStringArray() {
return this.readArray(this.readString.bind(this));
}
/**
* Reads a possibly null binary as a {@link #readPackedInt
* sequence length} followed by the array contents.
*
* @return array the array or null
* @throws NoSQLError if the input format is invalid or end of input is
* reached
*/
readBinary() {
const len = this.readInt();
if (len < -1) {
throw new NoSQLProtocolError(`Invalid binary length: ${len}`);
}
if (len == -1) {
return null;
}
if (len == 0) {
return Buffer.allocUnsafe(0);
}
return this._readBuffer(len);
}
//Equivalent to readByteArrayWithInt() in BinaryProtocol.java
readBinary2() {
const len = this.readInt32BE();
if (len <= 0) {
return Buffer.allocUnsafe(0);
}
return this._readBuffer(len);
}
readIntArray() {
return this.readArray(this.readInt.bind(this));
}
readByte() {
try {
return this._readByte();
} catch(e) {
this._handleEOF(e, 'byte');
}
}
readBoolean() {
try {
return Boolean(this._readByte());
} catch(e) {
this._handleEOF(e, 'boolean');
}
}
readDouble() {
try {
const ret = this._buf.readDoubleBE(this._off);
this._off += 8;
return ret;
} catch(e) {
this._handleEOF(e, 'double');
}
}
readDate() {
const s = this.readString();
if (s === null) {
return null;
}
return stringToUTCDate(s);
}
readInt16BE() {
try {
const ret = this._buf.readInt16BE(this._off);
this._off += 2;
return ret;
} catch(e) {
this._handleEOF(e, 'short');
}
}
readInt32BE() {
try {
const ret = this._buf.readInt32BE(this._off);
this._off += 4;
return ret;
} catch(e) {
this._handleEOF(e, 'integer');
}
}
reset() {
this._off = 0;
return this;
}
toString(encoding = 'utf8') {
return this._buf.toString(encoding);
}
}
module.exports = DataReader;
|
const transform = require('./src/transform');
const readGeojson = require('./src/readGeojson');
const writeGtfs = require('./src/writeGtfs');
const constants = require('./src/constants');
function geojsonToGtfs(geojson, outputPath = null, userConfig = {}) {
const input = typeof geojson === "string"
? readGeojson(geojson)
: geojson;
// Handle userConfig passed in as second argument
if (typeof outputPath === "object") {
outputPath = null;
userConfig = outputPath;
}
if (userConfig.prepareInput) {
userConfig.prepareInput(input);
}
const data = transform(input, userConfig);
if (userConfig.prepareOutput) {
userConfig.prepareOutput(data);
}
if (outputPath) {
writeGtfs(data, outputPath, userConfig.zipCompressionLevel, userConfig.zipComment);
}
return data;
};
Object.assign(geojsonToGtfs, constants);
module.exports = geojsonToGtfs;
|
// knockout-amd-helpers 0.6.2 | (c) 2014 Ryan Niemeyer | http://www.opensource.org/licenses/mit-license
define(["knockout"],function(a){var b=window.require||window.curl,c=a.utils.unwrapObservable,d=function(a,b){var c,d=function(){return a.apply(this,b||[])};return d.prototype=a.prototype,c=new d,c.constructor=a,c},e=function(a){return a&&a.replace(/\/?$/,"/")},f=function(b){for(var c=a.virtualElements.firstChild(b);c;){if(1===c.nodeType||8===c.nodeType)return!0;c=a.virtualElements.nextSibling(c)}return!1};a.bindingHandlers.module={init:function(g,h,i,j,k){var l,m,n=h(),o=c(n),p={},q=a.bindingHandlers.module.initializer,r=a.bindingHandlers.module.disposeMethod;return p.templateEngine=o&&o.templateEngine,p.afterRender=function(){var a=c(h());a&&"function"==typeof a.afterRender&&a.afterRender.apply(this,arguments)},f(g)||(p.name=function(){var a=c(n);return(a&&"object"==typeof a?c(a.template||a.name):a)||""}),p.data=a.observable(),p["if"]=p.data,a.applyBindingsToNode(g,{template:p},l=k.extend({$module:null})),m=function(){a.computed(function(){var a=p.data();a&&("function"==typeof a[r]&&(a[r].call(a),a=null),p.data(null))}).dispose()},a.computed({read:function(){var f,g=c(n);g&&"object"==typeof g&&(q=g.initializer||q,r=g.disposeMethod||r,f=[].concat(c(g.data)),g=c(g.name)),m(),g&&b([e(a.bindingHandlers.module.baseDir)+g],function(a){"function"==typeof a?a=d(a,f):a&&a[q]&&(a=a[q].apply(a,f||[])||a),l.$module=a,p.data(a)})},disposeWhenNodeIsRemoved:g}),a.utils.domNodeDisposal.addDisposeCallback(g,m),{controlsDescendantBindings:!0}},baseDir:"",initializer:"initialize",disposeMethod:"dispose"},a.virtualElements&&(a.virtualElements.allowedBindings.module=!0),function(a,b){var c=new a.nativeTemplateEngine,d=(c.renderTemplate,{});c.defaultPath="templates",c.defaultSuffix=".tmpl.html",c.defaultRequireTextPluginName="text",a.templateSources.requireTemplate=function(b){this.key=b,this.template=a.observable(" "),this.requested=!1,this.retrieved=!1},a.templateSources.requireTemplate.prototype.text=function(){return!this.requested&&this.key&&(b([c.defaultRequireTextPluginName+"!"+e(c.defaultPath)+this.key+c.defaultSuffix],function(a){this.retrieved=!0,this.template(a)}.bind(this)),this.requested=!0),this.key||this.template(""),0===arguments.length?this.template():void 0},c.makeTemplateSource=function(b,c){var e;return"string"==typeof b?(e=(c||document).getElementById(b),e&&"script"===e.tagName.toLowerCase()?new a.templateSources.domElement(e):(b in d||(d[b]=new a.templateSources.requireTemplate(b)),d[b])):!b||1!==b.nodeType&&8!==b.nodeType?void 0:new a.templateSources.anonymousTemplate(b)},c.renderTemplate=function(b,d,e,f){var g=c.makeTemplateSource(b,f),h=e&&e.afterRender;return"function"==typeof h&&g instanceof a.templateSources.requireTemplate&&!g.retrieved&&(e.afterRender=function(){g.retrieved&&h.apply(this,arguments)}),c.renderTemplateSource(g,d,e)},a.amdTemplateEngine=c,a.setTemplateEngine(c)}(a,b)});
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import sys
import tempfile
import warnings
import numpy
from numpy import testing as npt
import tables
from tables import Atom, ClosedNodeError, NoSuchNodeError
from tables.utils import byteorders
from tables.tests import common
from tables.tests.common import allequal
from tables.tests.common import unittest
from tables.tests.common import PyTablesTestCase as TestCase
warnings.resetwarnings()
class BasicTestCase(TestCase):
"""Basic test for all the supported typecodes present in numpy.
All of them are included on pytables.
"""
endiancheck = False
def write_read(self, testarray):
a = testarray
if common.verbose:
print('\n', '-=' * 30)
print("Running test for array with type '%s'" % a.dtype.type,
end=' ')
print("for class check:", self.title)
# Create an instance of HDF5 file
filename = tempfile.mktemp(".h5")
try:
with tables.open_file(filename, mode="w") as fileh:
root = fileh.root
# Create the array under root and name 'somearray'
if self.endiancheck and a.dtype.kind != "S":
b = a.byteswap()
b.dtype = a.dtype.newbyteorder()
a = b
fileh.create_array(root, 'somearray', a, "Some array")
# Re-open the file in read-only mode
with tables.open_file(filename, mode="r") as fileh:
root = fileh.root
# Read the saved array
b = root.somearray.read()
# Compare them. They should be equal.
if common.verbose and not allequal(a, b):
print("Write and read arrays differ!")
# print("Array written:", a)
print("Array written shape:", a.shape)
print("Array written itemsize:", a.itemsize)
print("Array written type:", a.dtype.type)
# print("Array read:", b)
print("Array read shape:", b.shape)
print("Array read itemsize:", b.itemsize)
print("Array read type:", b.dtype.type)
if a.dtype.kind != "S":
print("Array written byteorder:", a.dtype.byteorder)
print("Array read byteorder:", b.dtype.byteorder)
# Check strictly the array equality
self.assertEqual(a.shape, b.shape)
self.assertEqual(a.shape, root.somearray.shape)
if a.dtype.kind == "S":
self.assertEqual(root.somearray.atom.type, "string")
else:
self.assertEqual(a.dtype.type, b.dtype.type)
self.assertEqual(a.dtype.type,
root.somearray.atom.dtype.type)
abo = byteorders[a.dtype.byteorder]
bbo = byteorders[b.dtype.byteorder]
if abo != "irrelevant":
self.assertEqual(abo, root.somearray.byteorder)
self.assertEqual(bbo, sys.byteorder)
if self.endiancheck:
self.assertNotEqual(bbo, abo)
obj = root.somearray
self.assertEqual(obj.flavor, 'numpy')
self.assertEqual(obj.shape, a.shape)
self.assertEqual(obj.ndim, a.ndim)
self.assertEqual(obj.chunkshape, None)
if a.shape:
nrows = a.shape[0]
else:
# scalar
nrows = 1
self.assertEqual(obj.nrows, nrows)
self.assertTrue(allequal(a, b))
finally:
# Then, delete the file
os.remove(filename)
def write_read_out_arg(self, testarray):
a = testarray
if common.verbose:
print('\n', '-=' * 30)
print("Running test for array with type '%s'" % a.dtype.type,
end=' ')
print("for class check:", self.title)
# Create an instance of HDF5 file
filename = tempfile.mktemp(".h5")
try:
with tables.open_file(filename, mode="w") as fileh:
root = fileh.root
# Create the array under root and name 'somearray'
if self.endiancheck and a.dtype.kind != "S":
b = a.byteswap()
b.dtype = a.dtype.newbyteorder()
a = b
fileh.create_array(root, 'somearray', a, "Some array")
# Re-open the file in read-only mode
with tables.open_file(filename, mode="r") as fileh:
root = fileh.root
# Read the saved array
b = numpy.empty_like(a, dtype=a.dtype)
root.somearray.read(out=b)
# Check strictly the array equality
self.assertEqual(a.shape, b.shape)
self.assertEqual(a.shape, root.somearray.shape)
if a.dtype.kind == "S":
self.assertEqual(root.somearray.atom.type, "string")
else:
self.assertEqual(a.dtype.type, b.dtype.type)
self.assertEqual(a.dtype.type,
root.somearray.atom.dtype.type)
abo = byteorders[a.dtype.byteorder]
bbo = byteorders[b.dtype.byteorder]
if abo != "irrelevant":
self.assertEqual(abo, root.somearray.byteorder)
self.assertEqual(abo, bbo)
if self.endiancheck:
self.assertNotEqual(bbo, sys.byteorder)
self.assertTrue(allequal(a, b))
finally:
# Then, delete the file
os.remove(filename)
def write_read_atom_shape_args(self, testarray):
a = testarray
atom = Atom.from_dtype(a.dtype)
shape = a.shape
byteorder = None
if common.verbose:
print('\n', '-=' * 30)
print("Running test for array with type '%s'" % a.dtype.type,
end=' ')
print("for class check:", self.title)
# Create an instance of HDF5 file
filename = tempfile.mktemp(".h5")
try:
with tables.open_file(filename, mode="w") as fileh:
root = fileh.root
# Create the array under root and name 'somearray'
if self.endiancheck and a.dtype.kind != "S":
b = a.byteswap()
b.dtype = a.dtype.newbyteorder()
if b.dtype.byteorder in ('>', '<'):
byteorder = byteorders[b.dtype.byteorder]
a = b
ptarr = fileh.create_array(root, 'somearray',
atom=atom, shape=shape,
title="Some array",
# specify the byteorder explicitly
# since there is no way to deduce
# it in this case
byteorder=byteorder)
self.assertEqual(shape, ptarr.shape)
self.assertEqual(atom, ptarr.atom)
ptarr[...] = a
# Re-open the file in read-only mode
with tables.open_file(filename, mode="r") as fileh:
root = fileh.root
# Read the saved array
b = root.somearray.read()
# Compare them. They should be equal.
if common.verbose and not allequal(a, b):
print("Write and read arrays differ!")
# print("Array written:", a)
print("Array written shape:", a.shape)
print("Array written itemsize:", a.itemsize)
print("Array written type:", a.dtype.type)
# print("Array read:", b)
print("Array read shape:", b.shape)
print("Array read itemsize:", b.itemsize)
print("Array read type:", b.dtype.type)
if a.dtype.kind != "S":
print("Array written byteorder:", a.dtype.byteorder)
print("Array read byteorder:", b.dtype.byteorder)
# Check strictly the array equality
self.assertEqual(a.shape, b.shape)
self.assertEqual(a.shape, root.somearray.shape)
if a.dtype.kind == "S":
self.assertEqual(root.somearray.atom.type, "string")
else:
self.assertEqual(a.dtype.type, b.dtype.type)
self.assertEqual(a.dtype.type,
root.somearray.atom.dtype.type)
abo = byteorders[a.dtype.byteorder]
bbo = byteorders[b.dtype.byteorder]
if abo != "irrelevant":
self.assertEqual(abo, root.somearray.byteorder)
self.assertEqual(bbo, sys.byteorder)
if self.endiancheck:
self.assertNotEqual(bbo, abo)
obj = root.somearray
self.assertEqual(obj.flavor, 'numpy')
self.assertEqual(obj.shape, a.shape)
self.assertEqual(obj.ndim, a.ndim)
self.assertEqual(obj.chunkshape, None)
if a.shape:
nrows = a.shape[0]
else:
# scalar
nrows = 1
self.assertEqual(obj.nrows, nrows)
self.assertTrue(allequal(a, b))
finally:
# Then, delete the file
os.remove(filename)
def setup00_char(self):
"""Data integrity during recovery (character objects)"""
if not isinstance(self.tupleChar, numpy.ndarray):
a = numpy.array(self.tupleChar, dtype="S")
else:
a = self.tupleChar
return a
def test00_char(self):
a = self.setup00_char()
self.write_read(a)
def test00_char_out_arg(self):
a = self.setup00_char()
self.write_read_out_arg(a)
def test00_char_atom_shape_args(self):
a = self.setup00_char()
self.write_read_atom_shape_args(a)
def test00b_char(self):
"""Data integrity during recovery (string objects)"""
a = self.tupleChar
filename = tempfile.mktemp(".h5")
try:
# Create an instance of HDF5 file
with tables.open_file(filename, mode="w") as fileh:
fileh.create_array(fileh.root, 'somearray', a, "Some array")
# Re-open the file in read-only mode
with tables.open_file(filename, mode="r") as fileh:
# Read the saved array
b = fileh.root.somearray.read()
if isinstance(a, bytes):
self.assertEqual(type(b), bytes)
self.assertEqual(a, b)
else:
# If a is not a python string, then it should be a list
# or ndarray
self.assertTrue(type(b) in [list, numpy.ndarray])
finally:
# Then, delete the file
os.remove(filename)
def test00b_char_out_arg(self):
"""Data integrity during recovery (string objects)"""
a = self.tupleChar
filename = tempfile.mktemp(".h5")
try:
# Create an instance of HDF5 file
with tables.open_file(filename, mode="w") as fileh:
fileh.create_array(fileh.root, 'somearray', a, "Some array")
# Re-open the file in read-only mode
with tables.open_file(filename, mode="r") as fileh:
# Read the saved array
b = numpy.empty_like(a)
if fileh.root.somearray.flavor != 'numpy':
self.assertRaises(TypeError,
lambda: fileh.root.somearray.read(out=b))
else:
fileh.root.somearray.read(out=b)
self.assertTrue(type(b), numpy.ndarray)
finally:
# Then, delete the file
os.remove(filename)
def test00b_char_atom_shape_args(self):
"""Data integrity during recovery (string objects)"""
a = self.tupleChar
filename = tempfile.mktemp(".h5")
try:
# Create an instance of HDF5 file
with tables.open_file(filename, mode="w") as fileh:
nparr = numpy.asarray(a)
atom = Atom.from_dtype(nparr.dtype)
shape = nparr.shape
if nparr.dtype.byteorder in ('>', '<'):
byteorder = byteorders[nparr.dtype.byteorder]
else:
byteorder = None
ptarr = fileh.create_array(fileh.root, 'somearray',
atom=atom, shape=shape,
byteorder=byteorder,
title="Some array")
self.assertEqual(shape, ptarr.shape)
self.assertEqual(atom, ptarr.atom)
ptarr[...] = a
# Re-open the file in read-only mode
with tables.open_file(filename, mode="r") as fileh:
# Read the saved array
b = numpy.empty_like(a)
if fileh.root.somearray.flavor != 'numpy':
self.assertRaises(TypeError,
lambda: fileh.root.somearray.read(out=b))
else:
fileh.root.somearray.read(out=b)
self.assertTrue(type(b), numpy.ndarray)
finally:
# Then, delete the file
os.remove(filename)
def setup01_char_nc(self):
"""Data integrity during recovery (non-contiguous character objects)"""
if not isinstance(self.tupleChar, numpy.ndarray):
a = numpy.array(self.tupleChar, dtype="S")
else:
a = self.tupleChar
if a.ndim == 0:
b = a.copy()
else:
b = a[::2]
# Ensure that this numpy string is non-contiguous
if len(b) > 1:
self.assertEqual(b.flags.contiguous, False)
return b
def test01_char_nc(self):
b = self.setup01_char_nc()
self.write_read(b)
def test01_char_nc_out_arg(self):
b = self.setup01_char_nc()
self.write_read_out_arg(b)
def test01_char_nc_atom_shape_args(self):
b = self.setup01_char_nc()
self.write_read_atom_shape_args(b)
def test02_types(self):
"""Data integrity during recovery (numerical types)"""
typecodes = ['int8', 'int16', 'int32', 'int64',
'uint8', 'uint16', 'uint32', 'uint64',
'float32', 'float64',
'complex64', 'complex128']
for name in ('float16', 'float96', 'float128',
'complex192', 'complex256'):
atomname = name.capitalize() + 'Atom'
if atomname in globals():
typecodes.append(name)
for typecode in typecodes:
a = numpy.array(self.tupleInt, typecode)
self.write_read(a)
b = numpy.array(self.tupleInt, typecode)
self.write_read_out_arg(b)
c = numpy.array(self.tupleInt, typecode)
self.write_read_atom_shape_args(c)
def test03_types_nc(self):
"""Data integrity during recovery (non-contiguous numerical types)"""
typecodes = ['int8', 'int16', 'int32', 'int64',
'uint8', 'uint16', 'uint32', 'uint64',
'float32', 'float64',
'complex64', 'complex128', ]
for name in ('float16', 'float96', 'float128',
'complex192', 'complex256'):
atomname = name.capitalize() + 'Atom'
if atomname in globals():
typecodes.append(name)
for typecode in typecodes:
a = numpy.array(self.tupleInt, typecode)
if a.ndim == 0:
b1 = a.copy()
b2 = a.copy()
b3 = a.copy()
else:
b1 = a[::2]
b2 = a[::2]
b3 = a[::2]
# Ensure that this array is non-contiguous
if len(b1) > 1:
self.assertEqual(b1.flags.contiguous, False)
if len(b2) > 1:
self.assertEqual(b2.flags.contiguous, False)
if len(b3) > 1:
self.assertEqual(b3.flags.contiguous, False)
self.write_read(b1)
self.write_read_out_arg(b2)
self.write_read_atom_shape_args(b3)
class Basic0DOneTestCase(BasicTestCase):
# Scalar case
title = "Rank-0 case 1"
tupleInt = 3
tupleChar = b"3"
endiancheck = True
class Basic0DTwoTestCase(BasicTestCase):
# Scalar case
title = "Rank-0 case 2"
tupleInt = 33
tupleChar = b"33"
endiancheck = True
class Basic1DZeroTestCase(BasicTestCase):
# This test case is not supported by PyTables (HDF5 limitations)
# 1D case
title = "Rank-1 case 0"
tupleInt = ()
tupleChar = ()
endiancheck = False
class Basic1DOneTestCase(BasicTestCase):
"Method doc"
# 1D case
title = "Rank-1 case 1"
tupleInt = (3,)
tupleChar = (b"a",)
endiancheck = True
class Basic1DTwoTestCase(BasicTestCase):
# 1D case
title = "Rank-1 case 2"
tupleInt = (3, 4)
tupleChar = (b"aaa",)
endiancheck = True
class Basic1DThreeTestCase(BasicTestCase):
# 1D case
title = "Rank-1 case 3"
tupleInt = (3, 4, 5)
tupleChar = (b"aaa", b"bbb",)
endiancheck = True
class Basic2DOneTestCase(BasicTestCase):
# 2D case
title = "Rank-2 case 1"
tupleInt = numpy.array(numpy.arange((4)**2))
tupleInt.shape = (4,)*2
tupleChar = numpy.array(["abc"]*3**2, dtype="S3")
tupleChar.shape = (3,)*2
endiancheck = True
class Basic2DTwoTestCase(BasicTestCase):
# 2D case, with a multidimensional dtype
title = "Rank-2 case 2"
tupleInt = numpy.array(numpy.arange((4)), dtype=(numpy.int_, (4,)))
tupleChar = numpy.array(["abc"]*3, dtype=("S3", (3,)))
endiancheck = True
class Basic10DTestCase(BasicTestCase):
# 10D case
title = "Rank-10 test"
tupleInt = numpy.array(numpy.arange((2)**10))
tupleInt.shape = (2,)*10
tupleChar = numpy.array(
["abc"]*2**10, dtype="S3")
tupleChar.shape = (2,)*10
endiancheck = True
class Basic32DTestCase(BasicTestCase):
# 32D case (maximum)
title = "Rank-32 test"
tupleInt = numpy.array((32,))
tupleInt.shape = (1,)*32
tupleChar = numpy.array(["121"], dtype="S3")
tupleChar.shape = (1,)*32
class ReadOutArgumentTests(common.TempFileMixin, TestCase):
def setUp(self):
super(ReadOutArgumentTests, self).setUp()
self.size = 1000
def create_array(self):
array = numpy.arange(self.size, dtype='f8')
disk_array = self.h5file.create_array('/', 'array', array)
return array, disk_array
def test_read_entire_array(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size, ), 'f8')
disk_array.read(out=out_buffer)
numpy.testing.assert_equal(out_buffer, array)
def test_read_contiguous_slice1(self):
array, disk_array = self.create_array()
out_buffer = numpy.arange(self.size, dtype='f8')
out_buffer = numpy.random.permutation(out_buffer)
out_buffer_orig = out_buffer.copy()
start = self.size // 2
disk_array.read(start=start, stop=self.size, out=out_buffer[start:])
numpy.testing.assert_equal(out_buffer[start:], array[start:])
numpy.testing.assert_equal(out_buffer[:start], out_buffer_orig[:start])
def test_read_contiguous_slice2(self):
array, disk_array = self.create_array()
out_buffer = numpy.arange(self.size, dtype='f8')
out_buffer = numpy.random.permutation(out_buffer)
out_buffer_orig = out_buffer.copy()
start = self.size // 4
stop = self.size - start
disk_array.read(start=start, stop=stop, out=out_buffer[start:stop])
numpy.testing.assert_equal(out_buffer[start:stop], array[start:stop])
numpy.testing.assert_equal(out_buffer[:start], out_buffer_orig[:start])
numpy.testing.assert_equal(out_buffer[stop:], out_buffer_orig[stop:])
def test_read_non_contiguous_slice_contiguous_buffer(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size // 2, ), dtype='f8')
disk_array.read(start=0, stop=self.size, step=2, out=out_buffer)
numpy.testing.assert_equal(out_buffer, array[0:self.size:2])
def test_read_non_contiguous_buffer(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size, ), 'f8')
out_buffer_slice = out_buffer[0:self.size:2]
# once Python 2.6 support is dropped, this could change
# to assertRaisesRegexp to check exception type and message at once
self.assertRaises(ValueError, disk_array.read, 0, self.size, 2,
out_buffer_slice)
try:
disk_array.read(0, self.size, 2, out_buffer_slice)
except ValueError as exc:
self.assertEqual('output array not C contiguous', str(exc))
def test_buffer_too_small(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size // 2, ), 'f8')
self.assertRaises(ValueError, disk_array.read, 0, self.size, 1,
out_buffer)
try:
disk_array.read(0, self.size, 1, out_buffer)
except ValueError as exc:
self.assertTrue('output array size invalid, got' in str(exc))
def test_buffer_too_large(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size + 1, ), 'f8')
self.assertRaises(ValueError, disk_array.read, 0, self.size, 1,
out_buffer)
try:
disk_array.read(0, self.size, 1, out_buffer)
except ValueError as exc:
self.assertTrue('output array size invalid, got' in str(exc))
class SizeOnDiskInMemoryPropertyTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(SizeOnDiskInMemoryPropertyTestCase, self).setUp()
self.array_size = (10, 10)
self.array = self.h5file.create_array(
'/', 'somearray', numpy.zeros(self.array_size, 'i4'))
def test_all_zeros(self):
self.assertEqual(self.array.size_on_disk, 10 * 10 * 4)
self.assertEqual(self.array.size_in_memory, 10 * 10 * 4)
class UnalignedAndComplexTestCase(common.TempFileMixin, TestCase):
"""Basic test for all the supported typecodes present in numpy.
Most of them are included on PyTables.
"""
def setUp(self):
super(UnalignedAndComplexTestCase, self).setUp()
self.root = self.h5file.root
def write_read(self, testArray):
if common.verbose:
print('\n', '-=' * 30)
print("\nRunning test for array with type '%s'" %
testArray.dtype.type)
# Create the array under root and name 'somearray'
a = testArray
if self.endiancheck:
byteorder = {"little": "big", "big": "little"}[sys.byteorder]
else:
byteorder = sys.byteorder
self.h5file.create_array(self.root, 'somearray', a, "Some array",
byteorder=byteorder)
if self.reopen:
self._reopen()
self.root = self.h5file.root
# Read the saved array
b = self.root.somearray.read()
# Get an array to be compared in the correct byteorder
c = a.newbyteorder(byteorder)
# Compare them. They should be equal.
if not allequal(c, b) and common.verbose:
print("Write and read arrays differ!")
print("Array written:", a)
print("Array written shape:", a.shape)
print("Array written itemsize:", a.itemsize)
print("Array written type:", a.dtype.type)
print("Array read:", b)
print("Array read shape:", b.shape)
print("Array read itemsize:", b.itemsize)
print("Array read type:", b.dtype.type)
# Check strictly the array equality
self.assertEqual(a.shape, b.shape)
self.assertEqual(a.shape, self.root.somearray.shape)
if a.dtype.byteorder != "|":
self.assertEqual(a.dtype, b.dtype)
self.assertEqual(a.dtype, self.root.somearray.atom.dtype)
self.assertEqual(byteorders[b.dtype.byteorder], sys.byteorder)
self.assertEqual(self.root.somearray.byteorder, byteorder)
self.assertTrue(allequal(c, b))
def test01_signedShort_unaligned(self):
"Checking an unaligned signed short integer array"
r = numpy.rec.array(b'a'*200, formats='i1,f4,i2', shape=10)
a = r["f2"]
# Ensure that this array is non-aligned
self.assertEqual(a.flags.aligned, False)
self.assertEqual(a.dtype.type, numpy.int16)
self.write_read(a)
def test02_float_unaligned(self):
"Checking an unaligned single precision array"
r = numpy.rec.array(b'a'*200, formats='i1,f4,i2', shape=10)
a = r["f1"]
# Ensure that this array is non-aligned
self.assertEqual(a.flags.aligned, 0)
self.assertEqual(a.dtype.type, numpy.float32)
self.write_read(a)
def test03_byte_offset(self):
"Checking an offsetted byte array"
r = numpy.arange(100, dtype=numpy.int8)
r.shape = (10, 10)
a = r[2]
self.write_read(a)
def test04_short_offset(self):
"Checking an offsetted unsigned short int precision array"
r = numpy.arange(100, dtype=numpy.uint32)
r.shape = (10, 10)
a = r[2]
self.write_read(a)
def test05_int_offset(self):
"Checking an offsetted integer array"
r = numpy.arange(100, dtype=numpy.int32)
r.shape = (10, 10)
a = r[2]
self.write_read(a)
def test06_longlongint_offset(self):
"Checking an offsetted long long integer array"
r = numpy.arange(100, dtype=numpy.int64)
r.shape = (10, 10)
a = r[2]
self.write_read(a)
def test07_float_offset(self):
"Checking an offsetted single precision array"
r = numpy.arange(100, dtype=numpy.float32)
r.shape = (10, 10)
a = r[2]
self.write_read(a)
def test08_double_offset(self):
"Checking an offsetted double precision array"
r = numpy.arange(100, dtype=numpy.float64)
r.shape = (10, 10)
a = r[2]
self.write_read(a)
def test09_float_offset_unaligned(self):
"Checking an unaligned and offsetted single precision array"
r = numpy.rec.array(b'a'*200, formats='i1,3f4,i2', shape=10)
a = r["f1"][3]
# Ensure that this array is non-aligned
self.assertEqual(a.flags.aligned, False)
self.assertEqual(a.dtype.type, numpy.float32)
self.write_read(a)
def test10_double_offset_unaligned(self):
"Checking an unaligned and offsetted double precision array"
r = numpy.rec.array(b'a'*400, formats='i1,3f8,i2', shape=10)
a = r["f1"][3]
# Ensure that this array is non-aligned
self.assertEqual(a.flags.aligned, False)
self.assertEqual(a.dtype.type, numpy.float64)
self.write_read(a)
def test11_int_byteorder(self):
"Checking setting data with different byteorder in a range (integer)"
# Save an array with the reversed byteorder on it
a = numpy.arange(25, dtype=numpy.int32).reshape(5, 5)
a = a.byteswap()
a = a.newbyteorder()
array = self.h5file.create_array(
self.h5file.root, 'array', a, "byteorder (int)")
# Read a subarray (got an array with the machine byteorder)
b = array[2:4, 3:5]
b = b.byteswap()
b = b.newbyteorder()
# Set this subarray back to the array
array[2:4, 3:5] = b
b = b.byteswap()
b = b.newbyteorder()
# Set this subarray back to the array
array[2:4, 3:5] = b
# Check that the array is back in the correct byteorder
c = array[...]
if common.verbose:
print("byteorder of array on disk-->", array.byteorder)
print("byteorder of subarray-->", b.dtype.byteorder)
print("subarray-->", b)
print("retrieved array-->", c)
self.assertTrue(allequal(a, c))
def test12_float_byteorder(self):
"Checking setting data with different byteorder in a range (float)"
# Save an array with the reversed byteorder on it
a = numpy.arange(25, dtype=numpy.float64).reshape(5, 5)
a = a.byteswap()
a = a.newbyteorder()
array = self.h5file.create_array(
self.h5file.root, 'array', a, "byteorder (float)")
# Read a subarray (got an array with the machine byteorder)
b = array[2:4, 3:5]
b = b.byteswap()
b = b.newbyteorder()
# Set this subarray back to the array
array[2:4, 3:5] = b
b = b.byteswap()
b = b.newbyteorder()
# Set this subarray back to the array
array[2:4, 3:5] = b
# Check that the array is back in the correct byteorder
c = array[...]
if common.verbose:
print("byteorder of array on disk-->", array.byteorder)
print("byteorder of subarray-->", b.dtype.byteorder)
print("subarray-->", b)
print("retrieved array-->", c)
self.assertTrue(allequal(a, c))
class ComplexNotReopenNotEndianTestCase(UnalignedAndComplexTestCase):
endiancheck = False
reopen = False
class ComplexReopenNotEndianTestCase(UnalignedAndComplexTestCase):
endiancheck = False
reopen = True
class ComplexNotReopenEndianTestCase(UnalignedAndComplexTestCase):
endiancheck = True
reopen = False
class ComplexReopenEndianTestCase(UnalignedAndComplexTestCase):
endiancheck = True
reopen = True
class GroupsArrayTestCase(common.TempFileMixin, TestCase):
"""This test class checks combinations of arrays with groups."""
def test00_iterativeGroups(self):
"""Checking combinations of arrays with groups."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00_iterativeGroups..." %
self.__class__.__name__)
# Get the root group
group = self.h5file.root
# Set the type codes to test
# The typecodes below does expose an ambiguity that is reported in:
# http://projects.scipy.org/scipy/numpy/ticket/283 and
# http://projects.scipy.org/scipy/numpy/ticket/290
typecodes = ['b', 'B', 'h', 'H', 'i', 'I', 'l', 'L', 'q', 'f', 'd',
'F', 'D']
if 'Float16Atom' in globals():
typecodes.append('e')
if 'Float96Atom' in globals() or 'Float128Atom' in globals():
typecodes.append('g')
if 'Complex192Atom' in globals() or 'Complex256Atom' in globals():
typecodes.append('G')
for i, typecode in enumerate(typecodes):
a = numpy.ones((3,), typecode)
dsetname = 'array_' + typecode
if common.verbose:
print("Creating dataset:", group._g_join(dsetname))
self.h5file.create_array(group, dsetname, a, "Large array")
group = self.h5file.create_group(group, 'group' + str(i))
# Reopen the file
self._reopen()
# Get the root group
group = self.h5file.root
# Get the metadata on the previosly saved arrays
for i in range(len(typecodes)):
# Create an array for later comparison
a = numpy.ones((3,), typecodes[i])
# Get the dset object hanging from group
dset = getattr(group, 'array_' + typecodes[i])
# Get the actual array
b = dset.read()
if common.verbose:
print("Info from dataset:", dset._v_pathname)
print(" shape ==>", dset.shape, end=' ')
print(" type ==> %s" % dset.atom.dtype)
print("Array b read from file. Shape: ==>", b.shape, end=' ')
print(". Type ==> %s" % b.dtype)
self.assertEqual(a.shape, b.shape)
self.assertEqual(a.dtype, b.dtype)
self.assertTrue(allequal(a, b))
# Iterate over the next group
group = getattr(group, 'group' + str(i))
def test01_largeRankArrays(self):
"""Checking creation of large rank arrays (0 < rank <= 32)
It also uses arrays ranks which ranges until maxrank.
"""
# maximum level of recursivity (deepest group level) achieved:
# maxrank = 32 (for a effective maximum rank of 32)
# This limit is due to HDF5 library limitations.
minrank = 1
maxrank = 32
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_largeRankArrays..." %
self.__class__.__name__)
print("Maximum rank for tested arrays:", maxrank)
group = self.h5file.root
if common.verbose:
print("Rank array writing progress: ", end=' ')
for rank in range(minrank, maxrank + 1):
# Create an array of integers, with incrementally bigger ranges
a = numpy.ones((1,) * rank, numpy.int32)
if common.verbose:
print("%3d," % (rank), end=' ')
self.h5file.create_array(group, "array", a, "Rank: %s" % rank)
group = self.h5file.create_group(group, 'group' + str(rank))
# Reopen the file
self._reopen()
group = self.h5file.root
if common.verbose:
print()
print("Rank array reading progress: ")
# Get the metadata on the previosly saved arrays
for rank in range(minrank, maxrank + 1):
# Create an array for later comparison
a = numpy.ones((1,) * rank, numpy.int32)
# Get the actual array
b = group.array.read()
if common.verbose:
print("%3d," % (rank), end=' ')
if common.verbose and not allequal(a, b):
print("Info from dataset:", group.array._v_pathname)
print(" Shape: ==>", group.array.shape, end=' ')
print(" typecode ==> %c" % group.array.typecode)
print("Array b read from file. Shape: ==>", b.shape, end=' ')
print(". Type ==> %c" % b.dtype)
self.assertEqual(a.shape, b.shape)
self.assertEqual(a.dtype, b.dtype)
self.assertTrue(allequal(a, b))
# print(self.h5file)
# Iterate over the next group
group = self.h5file.get_node(group, 'group' + str(rank))
if common.verbose:
print() # This flush the stdout buffer
class CopyTestCase(common.TempFileMixin, TestCase):
def test01_copy(self):
"""Checking Array.copy() method."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_copy..." % self.__class__.__name__)
# Create an Array
arr = numpy.array([[456, 2], [3, 457]], dtype='int16')
array1 = self.h5file.create_array(
self.h5file.root, 'array1', arr, "title array1")
# Copy to another Array
array2 = array1.copy('/', 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
# print("dirs-->", dir(array1), dir(array2))
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
self.assertTrue(allequal(array1.read(), array2.read()))
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.flavor, array2.flavor)
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.title, array2.title)
def test02_copy(self):
"""Checking Array.copy() method (where specified)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_copy..." % self.__class__.__name__)
# Create an Array
arr = numpy.array([[456, 2], [3, 457]], dtype='int16')
array1 = self.h5file.create_array(
self.h5file.root, 'array1', arr, "title array1")
# Copy to another Array
group1 = self.h5file.create_group("/", "group1")
array2 = array1.copy(group1, 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.group1.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
# print("dirs-->", dir(array1), dir(array2))
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
self.assertTrue(allequal(array1.read(), array2.read()))
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.flavor, array2.flavor)
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.title, array2.title)
def test03_copy(self):
"""Checking Array.copy() method (checking title copying)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test04_copy..." % self.__class__.__name__)
# Create an Array
arr = numpy.array([[456, 2], [3, 457]], dtype='int16')
array1 = self.h5file.create_array(
self.h5file.root, 'array1', arr, "title array1")
# Append some user attrs
array1.attrs.attr1 = "attr1"
array1.attrs.attr2 = 2
# Copy it to another Array
array2 = array1.copy('/', 'array2', title="title array2")
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
# Assert user attributes
if common.verbose:
print("title of destination array-->", array2.title)
self.assertEqual(array2.title, "title array2")
def test04_copy(self):
"""Checking Array.copy() method (user attributes copied)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05_copy..." % self.__class__.__name__)
# Create an Array
arr = numpy.array([[456, 2], [3, 457]], dtype='int16')
array1 = self.h5file.create_array(
self.h5file.root, 'array1', arr, "title array1")
# Append some user attrs
array1.attrs.attr1 = "attr1"
array1.attrs.attr2 = 2
# Copy it to another Array
array2 = array1.copy('/', 'array2', copyuserattrs=1)
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Assert user attributes
self.assertEqual(array2.attrs.attr1, "attr1")
self.assertEqual(array2.attrs.attr2, 2)
def test04b_copy(self):
"""Checking Array.copy() method (user attributes not copied)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05b_copy..." % self.__class__.__name__)
# Create an Array
arr = numpy.array([[456, 2], [3, 457]], dtype='int16')
array1 = self.h5file.create_array(
self.h5file.root, 'array1', arr, "title array1")
# Append some user attrs
array1.attrs.attr1 = "attr1"
array1.attrs.attr2 = 2
# Copy it to another Array
array2 = array1.copy('/', 'array2', copyuserattrs=0)
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Assert user attributes
self.assertEqual(hasattr(array2.attrs, "attr1"), 0)
self.assertEqual(hasattr(array2.attrs, "attr2"), 0)
class CloseCopyTestCase(CopyTestCase):
close = 1
class OpenCopyTestCase(CopyTestCase):
close = 0
class CopyIndexTestCase(common.TempFileMixin, TestCase):
def test01_index(self):
"""Checking Array.copy() method with indexes."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_index..." % self.__class__.__name__)
# Create a numpy
r = numpy.arange(200, dtype='int32')
r.shape = (100, 2)
# Save it in a array:
array1 = self.h5file.create_array(
self.h5file.root, 'array1', r, "title array1")
# Copy to another array
array2 = array1.copy("/", 'array2',
start=self.start,
stop=self.stop,
step=self.step)
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
r2 = r[self.start:self.stop:self.step]
self.assertTrue(allequal(r2, array2.read()))
# Assert the number of rows in array
if common.verbose:
print("nrows in array2-->", array2.nrows)
print("and it should be-->", r2.shape[0])
self.assertEqual(r2.shape[0], array2.nrows)
def test02_indexclosef(self):
"""Checking Array.copy() method with indexes (close file version)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_indexclosef..." % self.__class__.__name__)
# Create a numpy
r = numpy.arange(200, dtype='int32')
r.shape = (100, 2)
# Save it in a array:
array1 = self.h5file.create_array(
self.h5file.root, 'array1', r, "title array1")
# Copy to another array
array2 = array1.copy("/", 'array2',
start=self.start,
stop=self.stop,
step=self.step)
# Close and reopen the file
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
r2 = r[self.start:self.stop:self.step]
self.assertTrue(allequal(r2, array2.read()))
# Assert the number of rows in array
if common.verbose:
print("nrows in array2-->", array2.nrows)
print("and it should be-->", r2.shape[0])
self.assertEqual(r2.shape[0], array2.nrows)
class CopyIndex1TestCase(CopyIndexTestCase):
start = 0
stop = 7
step = 1
class CopyIndex2TestCase(CopyIndexTestCase):
start = 0
stop = -1
step = 1
class CopyIndex3TestCase(CopyIndexTestCase):
start = 1
stop = 7
step = 1
class CopyIndex4TestCase(CopyIndexTestCase):
start = 0
stop = 6
step = 1
class CopyIndex5TestCase(CopyIndexTestCase):
start = 3
stop = 7
step = 1
class CopyIndex6TestCase(CopyIndexTestCase):
start = 3
stop = 6
step = 2
class CopyIndex7TestCase(CopyIndexTestCase):
start = 0
stop = 7
step = 10
class CopyIndex8TestCase(CopyIndexTestCase):
start = 6
stop = -1 # Negative values means starting from the end
step = 1
class CopyIndex9TestCase(CopyIndexTestCase):
start = 3
stop = 4
step = 1
class CopyIndex10TestCase(CopyIndexTestCase):
start = 3
stop = 4
step = 2
class CopyIndex11TestCase(CopyIndexTestCase):
start = -3
stop = -1
step = 2
class CopyIndex12TestCase(CopyIndexTestCase):
start = -1 # Should point to the last element
stop = None # None should mean the last element (including it)
step = 1
class GetItemTestCase(common.TempFileMixin, TestCase):
def test00_single(self):
"Single element access (character types)"
# Create the array under root and name 'somearray'
a = self.charList
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original first element:", a[0], type(a[0]))
print("Read first element:", arr[0], type(arr[0]))
self.assertTrue(allequal(a[0], arr[0]))
self.assertEqual(type(a[0]), type(arr[0]))
def test01_single(self):
"Single element access (numerical types)"
# Create the array under root and name 'somearray'
a = self.numericalList
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original first element:", a[0], type(a[0]))
print("Read first element:", arr[0], type(arr[0]))
self.assertEqual(a[0], arr[0])
self.assertEqual(type(a[0]), type(arr[0]))
def test02_range(self):
"Range element access (character types)"
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4])
print("Read elements:", arr[1:4])
self.assertTrue(allequal(a[1:4], arr[1:4]))
def test03_range(self):
"Range element access (numerical types)"
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4])
print("Read elements:", arr[1:4])
self.assertTrue(allequal(a[1:4], arr[1:4]))
def test04_range(self):
"Range element access, strided (character types)"
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4:2])
print("Read elements:", arr[1:4:2])
self.assertTrue(allequal(a[1:4:2], arr[1:4:2]))
def test05_range(self):
"Range element access, strided (numerical types)"
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4:2])
print("Read elements:", arr[1:4:2])
self.assertTrue(allequal(a[1:4:2], arr[1:4:2]))
def test06_negativeIndex(self):
"Negative Index element access (character types)"
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original last element:", a[-1])
print("Read last element:", arr[-1])
self.assertTrue(allequal(a[-1], arr[-1]))
def test07_negativeIndex(self):
"Negative Index element access (numerical types)"
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original before last element:", a[-2])
print("Read before last element:", arr[-2])
if isinstance(a[-2], numpy.ndarray):
self.assertTrue(allequal(a[-2], arr[-2]))
else:
self.assertEqual(a[-2], arr[-2])
def test08_negativeRange(self):
"Negative range element access (character types)"
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original last elements:", a[-4:-1])
print("Read last elements:", arr[-4:-1])
self.assertTrue(allequal(a[-4:-1], arr[-4:-1]))
def test09_negativeRange(self):
"Negative range element access (numerical types)"
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original last elements:", a[-4:-1])
print("Read last elements:", arr[-4:-1])
self.assertTrue(allequal(a[-4:-1], arr[-4:-1]))
class GI1NATestCase(GetItemTestCase, TestCase):
title = "Rank-1 case 1"
numericalList = numpy.array([3])
numericalListME = numpy.array([3, 2, 1, 0, 4, 5, 6])
charList = numpy.array(["3"], 'S')
charListME = numpy.array(
["321", "221", "121", "021", "421", "521", "621"], 'S')
class GI1NAOpenTestCase(GI1NATestCase):
close = 0
class GI1NACloseTestCase(GI1NATestCase):
close = 1
class GI2NATestCase(GetItemTestCase):
# A more complex example
title = "Rank-1,2 case 2"
numericalList = numpy.array([3, 4])
numericalListME = numpy.array([[3, 2, 1, 0, 4, 5, 6],
[2, 1, 0, 4, 5, 6, 7],
[4, 3, 2, 1, 0, 4, 5],
[3, 2, 1, 0, 4, 5, 6],
[3, 2, 1, 0, 4, 5, 6]])
charList = numpy.array(["a", "b"], 'S')
charListME = numpy.array(
[["321", "221", "121", "021", "421", "521", "621"],
["21", "21", "11", "02", "42", "21", "61"],
["31", "21", "12", "21", "41", "51", "621"],
["321", "221", "121", "021",
"421", "521", "621"],
["3241", "2321", "13216",
"0621", "4421", "5421", "a621"],
["a321", "s221", "d121", "g021", "b421", "5vvv21", "6zxzxs21"]], 'S')
class GI2NAOpenTestCase(GI2NATestCase):
close = 0
class GI2NACloseTestCase(GI2NATestCase):
close = 1
class SetItemTestCase(common.TempFileMixin, TestCase):
def test00_single(self):
"Single element update (character types)"
# Create the array under root and name 'somearray'
a = self.charList
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify a single element of a and arr:
a[0] = b"b"
arr[0] = b"b"
# Get and compare an element
if common.verbose:
print("Original first element:", a[0])
print("Read first element:", arr[0])
self.assertTrue(allequal(a[0], arr[0]))
def test01_single(self):
"Single element update (numerical types)"
# Create the array under root and name 'somearray'
a = self.numericalList
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
a[0] = 333
arr[0] = 333
# Get and compare an element
if common.verbose:
print("Original first element:", a[0])
print("Read first element:", arr[0])
self.assertEqual(a[0], arr[0])
def test02_range(self):
"Range element update (character types)"
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
a[1:3] = b"xXx"
arr[1:3] = b"xXx"
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4])
print("Read elements:", arr[1:4])
self.assertTrue(allequal(a[1:4], arr[1:4]))
def test03_range(self):
"Range element update (numerical types)"
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = slice(1, 3, None)
rng = numpy.arange(a[s].size)*2 + 3
rng.shape = a[s].shape
a[s] = rng
arr[s] = rng
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4])
print("Read elements:", arr[1:4])
self.assertTrue(allequal(a[1:4], arr[1:4]))
def test04_range(self):
"Range element update, strided (character types)"
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = slice(1, 4, 2)
a[s] = b"xXx"
arr[s] = b"xXx"
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4:2])
print("Read elements:", arr[1:4:2])
self.assertTrue(allequal(a[1:4:2], arr[1:4:2]))
def test05_range(self):
"Range element update, strided (numerical types)"
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = slice(1, 4, 2)
rng = numpy.arange(a[s].size)*2 + 3
rng.shape = a[s].shape
a[s] = rng
arr[s] = rng
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4:2])
print("Read elements:", arr[1:4:2])
self.assertTrue(allequal(a[1:4:2], arr[1:4:2]))
def test06_negativeIndex(self):
"Negative Index element update (character types)"
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = -1
a[s] = b"xXx"
arr[s] = b"xXx"
# Get and compare an element
if common.verbose:
print("Original last element:", a[-1])
print("Read last element:", arr[-1])
self.assertTrue(allequal(a[-1], arr[-1]))
def test07_negativeIndex(self):
"Negative Index element update (numerical types)"
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = -2
a[s] = a[s]*2 + 3
arr[s] = arr[s]*2 + 3
# Get and compare an element
if common.verbose:
print("Original before last element:", a[-2])
print("Read before last element:", arr[-2])
if isinstance(a[-2], numpy.ndarray):
self.assertTrue(allequal(a[-2], arr[-2]))
else:
self.assertEqual(a[-2], arr[-2])
def test08_negativeRange(self):
"Negative range element update (character types)"
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = slice(-4, -1, None)
a[s] = b"xXx"
arr[s] = b"xXx"
# Get and compare an element
if common.verbose:
print("Original last elements:", a[-4:-1])
print("Read last elements:", arr[-4:-1])
self.assertTrue(allequal(a[-4:-1], arr[-4:-1]))
def test09_negativeRange(self):
"Negative range element update (numerical types)"
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = slice(-3, -1, None)
rng = numpy.arange(a[s].size)*2 + 3
rng.shape = a[s].shape
a[s] = rng
arr[s] = rng
# Get and compare an element
if common.verbose:
print("Original last elements:", a[-4:-1])
print("Read last elements:", arr[-4:-1])
self.assertTrue(allequal(a[-4:-1], arr[-4:-1]))
def test10_outOfRange(self):
"Out of range update (numerical types)"
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of arr that are out of range:
s = slice(1, a.shape[0]+1, None)
s2 = slice(1, 1000, None)
rng = numpy.arange(a[s].size)*2 + 3
rng.shape = a[s].shape
a[s] = rng
rng2 = numpy.arange(a[s2].size)*2 + 3
rng2.shape = a[s2].shape
arr[s2] = rng2
# Get and compare an element
if common.verbose:
print("Original last elements:", a[-4:-1])
print("Read last elements:", arr[-4:-1])
self.assertTrue(allequal(a[-4:-1], arr[-4:-1]))
class SI1NATestCase(SetItemTestCase, TestCase):
title = "Rank-1 case 1"
numericalList = numpy.array([3])
numericalListME = numpy.array([3, 2, 1, 0, 4, 5, 6])
charList = numpy.array(["3"], 'S')
charListME = numpy.array(
["321", "221", "121", "021", "421", "521", "621"], 'S')
class SI1NAOpenTestCase(SI1NATestCase):
close = 0
class SI1NACloseTestCase(SI1NATestCase):
close = 1
class SI2NATestCase(SetItemTestCase):
# A more complex example
title = "Rank-1,2 case 2"
numericalList = numpy.array([3, 4])
numericalListME = numpy.array([[3, 2, 1, 0, 4, 5, 6],
[2, 1, 0, 4, 5, 6, 7],
[4, 3, 2, 1, 0, 4, 5],
[3, 2, 1, 0, 4, 5, 6],
[3, 2, 1, 0, 4, 5, 6]])
charList = numpy.array(["a", "b"], 'S')
charListME = numpy.array(
[["321", "221", "121", "021", "421", "521", "621"],
["21", "21", "11", "02", "42", "21", "61"],
["31", "21", "12", "21", "41", "51", "621"],
["321", "221", "121", "021",
"421", "521", "621"],
["3241", "2321", "13216",
"0621", "4421", "5421", "a621"],
["a321", "s221", "d121", "g021", "b421", "5vvv21", "6zxzxs21"]], 'S')
class SI2NAOpenTestCase(SI2NATestCase):
close = 0
class SI2NACloseTestCase(SI2NATestCase):
close = 1
class GeneratorTestCase(common.TempFileMixin, TestCase):
def test00a_single(self):
"Testing generator access to Arrays, single elements (char)"
# Create the array under root and name 'somearray'
a = self.charList
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
ga = [i for i in a]
garr = [i for i in arr]
if common.verbose:
print("Result of original iterator:", ga)
print("Result of read generator:", garr)
self.assertEqual(ga, garr)
def test00b_me(self):
"Testing generator access to Arrays, multiple elements (char)"
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
ga = [i for i in a]
garr = [i for i in arr]
if common.verbose:
print("Result of original iterator:", ga)
print("Result of read generator:", garr)
for i in range(len(ga)):
self.assertTrue(allequal(ga[i], garr[i]))
def test01a_single(self):
"Testing generator access to Arrays, single elements (numeric)"
# Create the array under root and name 'somearray'
a = self.numericalList
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
ga = [i for i in a]
garr = [i for i in arr]
if common.verbose:
print("Result of original iterator:", ga)
print("Result of read generator:", garr)
self.assertEqual(ga, garr)
def test01b_me(self):
"Testing generator access to Arrays, multiple elements (numeric)"
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
ga = [i for i in a]
garr = [i for i in arr]
if common.verbose:
print("Result of original iterator:", ga)
print("Result of read generator:", garr)
for i in range(len(ga)):
self.assertTrue(allequal(ga[i], garr[i]))
class GE1NATestCase(GeneratorTestCase):
title = "Rank-1 case 1"
numericalList = numpy.array([3])
numericalListME = numpy.array([3, 2, 1, 0, 4, 5, 6])
charList = numpy.array(["3"], 'S')
charListME = numpy.array(
["321", "221", "121", "021", "421", "521", "621"], 'S')
class GE1NAOpenTestCase(GE1NATestCase):
close = 0
class GE1NACloseTestCase(GE1NATestCase):
close = 1
class GE2NATestCase(GeneratorTestCase):
# A more complex example
title = "Rank-1,2 case 2"
numericalList = numpy.array([3, 4])
numericalListME = numpy.array([[3, 2, 1, 0, 4, 5, 6],
[2, 1, 0, 4, 5, 6, 7],
[4, 3, 2, 1, 0, 4, 5],
[3, 2, 1, 0, 4, 5, 6],
[3, 2, 1, 0, 4, 5, 6]])
charList = numpy.array(["a", "b"], 'S')
charListME = numpy.array(
[["321", "221", "121", "021", "421", "521", "621"],
["21", "21", "11", "02", "42", "21", "61"],
["31", "21", "12", "21", "41", "51", "621"],
["321", "221", "121", "021",
"421", "521", "621"],
["3241", "2321", "13216",
"0621", "4421", "5421", "a621"],
["a321", "s221", "d121", "g021", "b421", "5vvv21", "6zxzxs21"]], 'S')
class GE2NAOpenTestCase(GE2NATestCase):
close = 0
class GE2NACloseTestCase(GE2NATestCase):
close = 1
class NonHomogeneousTestCase(common.TempFileMixin, TestCase):
def test(self):
"""Test for creation of non-homogeneous arrays."""
# This checks ticket #12.
self.assertRaises(ValueError,
self.h5file.create_array, '/', 'test', [1, [2, 3]])
self.assertRaises(NoSuchNodeError, self.h5file.remove_node, '/test')
class TruncateTestCase(common.TempFileMixin, TestCase):
def test(self):
"""Test for unability to truncate Array objects."""
array1 = self.h5file.create_array('/', 'array1', [0, 2])
self.assertRaises(TypeError, array1.truncate, 0)
class PointSelectionTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(PointSelectionTestCase, self).setUp()
# Limits for selections
self.limits = [
(0, 1), # just one element
(20, -10), # no elements
(-10, 4), # several elements
(0, 10), # several elements (again)
]
# Create a sample array
size = numpy.prod(self.shape)
nparr = numpy.arange(size, dtype=numpy.int32).reshape(self.shape)
self.nparr = nparr
self.tbarr = self.h5file.create_array(self.h5file.root, 'array', nparr)
def test01a_read(self):
"""Test for point-selections (read, boolean keys)."""
nparr = self.nparr
tbarr = self.tbarr
for value1, value2 in self.limits:
key = (nparr >= value1) & (nparr < value2)
if common.verbose:
print("Selection to test:", key)
a = nparr[key]
b = tbarr[key]
# if common.verbose:
# print("NumPy selection:", a)
# print("PyTables selection:", b)
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables selections does not match.")
def test01b_read(self):
"""Test for point-selections (read, integer keys)."""
nparr = self.nparr
tbarr = self.tbarr
for value1, value2 in self.limits:
key = numpy.where((nparr >= value1) & (nparr < value2))
if common.verbose:
print("Selection to test:", key)
a = nparr[key]
b = tbarr[key]
# if common.verbose:
# print("NumPy selection:", a)
# print("PyTables selection:", b)
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables selections does not match.")
def test01c_read(self):
"""Test for point-selections (read, float keys)."""
nparr = self.nparr
tbarr = self.tbarr
for value1, value2 in self.limits:
key = numpy.where((nparr >= value1) & (nparr < value2))
if common.verbose:
print("Selection to test:", key)
# a = nparr[key]
fkey = numpy.array(key, "f4")
self.assertRaises((IndexError, TypeError), tbarr.__getitem__, fkey)
def test01d_read(self):
nparr = self.nparr
tbarr = self.tbarr
for key in self.working_keyset:
if common.verbose:
print("Selection to test:", key)
a = nparr[key]
b = tbarr[key]
npt.assert_array_equal(
a, b, "NumPy array and PyTables selections does not match.")
def test01e_read(self):
tbarr = self.tbarr
for key in self.not_working_keyset:
if common.verbose:
print("Selection to test:", key)
self.assertRaises(IndexError, tbarr.__getitem__, key)
def test02a_write(self):
"""Test for point-selections (write, boolean keys)."""
nparr = self.nparr
tbarr = self.tbarr
for value1, value2 in self.limits:
key = (nparr >= value1) & (nparr < value2)
if common.verbose:
print("Selection to test:", key)
s = nparr[key]
nparr[key] = s * 2
tbarr[key] = s * 2
a = nparr[:]
b = tbarr[:]
# if common.verbose:
# print("NumPy modified array:", a)
# print("PyTables modifyied array:", b)
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables modifications does not match.")
def test02b_write(self):
"""Test for point-selections (write, integer keys)."""
nparr = self.nparr
tbarr = self.tbarr
for value1, value2 in self.limits:
key = numpy.where((nparr >= value1) & (nparr < value2))
if common.verbose:
print("Selection to test:", key)
s = nparr[key]
nparr[key] = s * 2
tbarr[key] = s * 2
a = nparr[:]
b = tbarr[:]
# if common.verbose:
# print("NumPy modified array:", a)
# print("PyTables modifyied array:", b)
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables modifications does not match.")
def test02c_write(self):
"""Test for point-selections (write, integer values, broadcast)."""
nparr = self.nparr
tbarr = self.tbarr
for value1, value2 in self.limits:
key = numpy.where((nparr >= value1) & (nparr < value2))
if common.verbose:
print("Selection to test:", key)
# s = nparr[key]
nparr[key] = 2 # force a broadcast
tbarr[key] = 2 # force a broadcast
a = nparr[:]
b = tbarr[:]
# if common.verbose:
# print("NumPy modified array:", a)
# print("PyTables modifyied array:", b)
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables modifications does not match.")
class PointSelection0(PointSelectionTestCase):
shape = (3,)
working_keyset = [
[0, 1],
[0, -1],
]
not_working_keyset = [
[0, 3],
[0, 4],
[0, -4],
]
class PointSelection1(PointSelectionTestCase):
shape = (5, 3, 3)
working_keyset = [
[(0, 0), (0, 1), (0, 0)],
[(0, 0), (0, -1), (0, 0)],
]
not_working_keyset = [
[(0, 0), (0, 3), (0, 0)],
[(0, 0), (0, 4), (0, 0)],
[(0, 0), (0, -4), (0, 0)],
[(0, 0), (0, -5), (0, 0)]
]
class PointSelection2(PointSelectionTestCase):
shape = (7, 3)
working_keyset = [
[(0, 0), (0, 1)],
[(0, 0), (0, -1)],
[(0, 0), (0, -2)],
]
not_working_keyset = [
[(0, 0), (0, 3)],
[(0, 0), (0, 4)],
[(0, 0), (0, -4)],
[(0, 0), (0, -5)],
]
class PointSelection3(PointSelectionTestCase):
shape = (4, 3, 2, 1)
working_keyset = [
[(0, 0), (0, 1), (0, 0), (0, 0)],
[(0, 0), (0, -1), (0, 0), (0, 0)],
]
not_working_keyset = [
[(0, 0), (0, 3), (0, 0), (0, 0)],
[(0, 0), (0, 4), (0, 0), (0, 0)],
[(0, 0), (0, -4), (0, 0), (0, 0)],
]
class PointSelection4(PointSelectionTestCase):
shape = (1, 3, 2, 5, 6)
working_keyset = [
[(0, 0), (0, 1), (0, 0), (0, 0), (0, 0)],
[(0, 0), (0, -1), (0, 0), (0, 0), (0, 0)],
]
not_working_keyset = [
[(0, 0), (0, 3), (0, 0), (0, 0), (0, 0)],
[(0, 0), (0, 4), (0, 0), (0, 0), (0, 0)],
[(0, 0), (0, -4), (0, 0), (0, 0), (0, 0)],
]
class FancySelectionTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(FancySelectionTestCase, self).setUp()
M, N, O = self.shape
# The next are valid selections for both NumPy and PyTables
self.working_keyset = [
([1, 3], slice(1, N-1), 2),
([M-1, 1, 3, 2], slice(None), 2), # unordered lists supported
(slice(M), [N-1, 1, 0], slice(None)),
(slice(1, M, 3), slice(1, N), [O-1, 1, 0]),
(M-1, [2, 1], 1),
(1, 2, 1), # regular selection
([1, 2], -2, -1), # negative indices
([1, -2], 2, -1), # more negative indices
([1, -2], 2, Ellipsis), # one ellipsis
(Ellipsis, [1, 2]), # one ellipsis
(numpy.array(
[1, -2], 'i4'), 2, -1), # array 32-bit instead of list
(numpy.array(
[-1, 2], 'i8'), 2, -1), # array 64-bit instead of list
]
# Using booleans instead of ints is deprecated since numpy 1.8
# Tests for keys that have to support the __index__ attribute
#if (sys.version_info[0] >= 2 and sys.version_info[1] >= 5):
# self.working_keyset.append(
# (False, True), # equivalent to (0,1) ;-)
# )
# Valid selections for NumPy, but not for PyTables (yet)
# The next should raise an IndexError
self.not_working_keyset = [
numpy.array([False, True], dtype="b1"), # boolean arrays
([1, 2, 1], 2, 1), # repeated values
([1, 2], 2, [1, 2]), # several lists
([], 2, 1), # empty selections
(Ellipsis, [1, 2], Ellipsis), # several ellipsis
# Using booleans instead of ints is deprecated since numpy 1.8
([False, True]), # boolean values with incompatible shape
]
# The next should raise an IndexError in both NumPy and PyTables
self.not_working_oob = [
([1, 2], 2, 1000), # out-of-bounds selections
([1, 2], 2000, 1), # out-of-bounds selections
]
# The next should raise a IndexError in both NumPy and PyTables
self.not_working_too_many = [
([1, 2], 2, 1, 1),
]
# Create a sample array
nparr = numpy.empty(self.shape, dtype=numpy.int32)
data = numpy.arange(N * O, dtype=numpy.int32).reshape(N, O)
for i in xrange(M):
nparr[i] = data * i
self.nparr = nparr
self.tbarr = self.h5file.create_array(self.h5file.root, 'array', nparr)
def test01a_read(self):
"""Test for fancy-selections (working selections, read)."""
nparr = self.nparr
tbarr = self.tbarr
for key in self.working_keyset:
if common.verbose:
print("Selection to test:", key)
a = nparr[key]
b = tbarr[key]
# if common.verbose:
# print("NumPy selection:", a)
# print("PyTables selection:", b)
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables selections does not match.")
def test01b_read(self):
"""Test for fancy-selections (not working selections, read)."""
# nparr = self.nparr
tbarr = self.tbarr
for key in self.not_working_keyset:
if common.verbose:
print("Selection to test:", key)
# a = nparr[key]
self.assertRaises(IndexError, tbarr.__getitem__, key)
def test01c_read(self):
"""Test for fancy-selections (out-of-bound indexes, read)."""
nparr = self.nparr
tbarr = self.tbarr
for key in self.not_working_oob:
if common.verbose:
print("Selection to test:", key)
self.assertRaises(IndexError, nparr.__getitem__, key)
self.assertRaises(IndexError, tbarr.__getitem__, key)
def test01d_read(self):
"""Test for fancy-selections (too many indexes, read)."""
nparr = self.nparr
tbarr = self.tbarr
for key in self.not_working_too_many:
if common.verbose:
print("Selection to test:", key)
# ValueError for numpy 1.6.x and earlier
# IndexError in numpy > 1.8.0
self.assertRaises((ValueError, IndexError), nparr.__getitem__, key)
self.assertRaises(IndexError, tbarr.__getitem__, key)
def test02a_write(self):
"""Test for fancy-selections (working selections, write)."""
nparr = self.nparr
tbarr = self.tbarr
for key in self.working_keyset:
if common.verbose:
print("Selection to test:", key)
s = nparr[key]
nparr[key] = s * 2
tbarr[key] = s * 2
a = nparr[:]
b = tbarr[:]
# if common.verbose:
# print("NumPy modified array:", a)
# print("PyTables modifyied array:", b)
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables modifications does not match.")
def test02b_write(self):
"""Test for fancy-selections (working selections, write, broadcast)."""
nparr = self.nparr
tbarr = self.tbarr
for key in self.working_keyset:
if common.verbose:
print("Selection to test:", key)
# s = nparr[key]
nparr[key] = 2 # broadcast value
tbarr[key] = 2 # broadcast value
a = nparr[:]
b = tbarr[:]
# if common.verbose:
# print("NumPy modified array:", a)
# print("PyTables modifyied array:", b)
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables modifications does not match.")
class FancySelection1(FancySelectionTestCase):
shape = (5, 3, 3) # Minimum values
class FancySelection2(FancySelectionTestCase):
# shape = (5, 3, 3) # Minimum values
shape = (7, 3, 3)
class FancySelection3(FancySelectionTestCase):
# shape = (5, 3, 3) # Minimum values
shape = (7, 4, 5)
class FancySelection4(FancySelectionTestCase):
# shape = (5, 3, 3) # Minimum values
shape = (5, 3, 10)
class CopyNativeHDF5MDAtom(TestCase):
def setUp(self):
super(CopyNativeHDF5MDAtom, self).setUp()
filename = self._testFilename("array_mdatom.h5")
self.h5file = tables.open_file(filename, "r")
self.arr = self.h5file.root.arr
self.copy = tempfile.mktemp(".h5")
self.copyh = tables.open_file(self.copy, mode="w")
self.arr2 = self.arr.copy(self.copyh.root, newname="arr2")
def tearDown(self):
self.h5file.close()
self.copyh.close()
os.remove(self.copy)
super(CopyNativeHDF5MDAtom, self).tearDown()
def test01_copy(self):
"""Checking that native MD atoms are copied as-is"""
self.assertEqual(self.arr.atom, self.arr2.atom)
self.assertEqual(self.arr.shape, self.arr2.shape)
def test02_reopen(self):
"""Checking that native MD atoms are copied as-is (re-open)"""
self.copyh.close()
self.copyh = tables.open_file(self.copy, mode="r")
self.arr2 = self.copyh.root.arr2
self.assertEqual(self.arr.atom, self.arr2.atom)
self.assertEqual(self.arr.shape, self.arr2.shape)
class AccessClosedTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(AccessClosedTestCase, self).setUp()
a = numpy.zeros((10, 10))
self.array = self.h5file.create_array(self.h5file.root, 'array', a)
def test_read(self):
self.h5file.close()
self.assertRaises(ClosedNodeError, self.array.read)
def test_getitem(self):
self.h5file.close()
self.assertRaises(ClosedNodeError, self.array.__getitem__, 0)
def test_setitem(self):
self.h5file.close()
self.assertRaises(ClosedNodeError, self.array.__setitem__, 0, 0)
class BroadcastTest(common.TempFileMixin, TestCase):
def test(self):
"""Test correct broadcasting when the array atom is not scalar."""
array_shape = (2, 3)
element_shape = (3,)
dtype = numpy.dtype((numpy.int, element_shape))
atom = Atom.from_dtype(dtype)
h5arr = self.h5file.create_carray(self.h5file.root, 'array',
atom, array_shape)
size = numpy.prod(element_shape)
nparr = numpy.arange(size).reshape(element_shape)
h5arr[0] = nparr
self.assertTrue(numpy.all(h5arr[0] == nparr))
class TestCreateArrayArgs(common.TempFileMixin, TestCase):
where = '/'
name = 'array'
obj = numpy.array([[1, 2], [3, 4]])
title = 'title'
byteorder = None
createparents = False
atom = Atom.from_dtype(obj.dtype)
shape = obj.shape
def test_positional_args(self):
self.h5file.create_array(self.where, self.name, self.obj, self.title)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(self.obj, nparr))
def test_positional_args_atom_shape(self):
self.h5file.create_array(self.where, self.name, None, self.title,
self.byteorder, self.createparents,
self.atom, self.shape)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(numpy.zeros_like(self.obj), nparr))
def test_kwargs_obj(self):
self.h5file.create_array(self.where, self.name, title=self.title,
obj=self.obj)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_atom_shape_01(self):
ptarr = self.h5file.create_array(self.where, self.name,
title=self.title,
atom=self.atom, shape=self.shape)
ptarr[...] = self.obj
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_atom_shape_02(self):
ptarr = self.h5file.create_array(self.where, self.name,
title=self.title,
atom=self.atom, shape=self.shape)
#ptarr[...] = self.obj
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(numpy.zeros_like(self.obj), nparr))
def test_kwargs_obj_atom(self):
ptarr = self.h5file.create_array(self.where, self.name,
title=self.title,
obj=self.obj,
atom=self.atom)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj_shape(self):
ptarr = self.h5file.create_array(self.where, self.name,
title=self.title,
obj=self.obj,
shape=self.shape)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj_atom_shape(self):
ptarr = self.h5file.create_array(self.where, self.name,
title=self.title,
obj=self.obj,
atom=self.atom,
shape=self.shape)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj_atom_error(self):
atom = Atom.from_dtype(numpy.dtype('complex'))
#shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_array,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=atom)
def test_kwargs_obj_shape_error(self):
#atom = Atom.from_dtype(numpy.dtype('complex'))
shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_array,
self.where,
self.name,
title=self.title,
obj=self.obj,
shape=shape)
def test_kwargs_obj_atom_shape_error_01(self):
atom = Atom.from_dtype(numpy.dtype('complex'))
#shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_array,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=atom,
shape=self.shape)
def test_kwargs_obj_atom_shape_error_02(self):
#atom = Atom.from_dtype(numpy.dtype('complex'))
shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_array,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=self.atom,
shape=shape)
def test_kwargs_obj_atom_shape_error_03(self):
atom = Atom.from_dtype(numpy.dtype('complex'))
shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_array,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=atom,
shape=shape)
#----------------------------------------------------------------------
def suite():
theSuite = unittest.TestSuite()
niter = 1
for i in range(niter):
# The scalar case test should be refined in order to work
theSuite.addTest(unittest.makeSuite(Basic0DOneTestCase))
theSuite.addTest(unittest.makeSuite(Basic0DTwoTestCase))
# theSuite.addTest(unittest.makeSuite(Basic1DZeroTestCase))
theSuite.addTest(unittest.makeSuite(Basic1DOneTestCase))
theSuite.addTest(unittest.makeSuite(Basic1DTwoTestCase))
theSuite.addTest(unittest.makeSuite(Basic1DThreeTestCase))
theSuite.addTest(unittest.makeSuite(Basic2DOneTestCase))
theSuite.addTest(unittest.makeSuite(Basic2DTwoTestCase))
theSuite.addTest(unittest.makeSuite(Basic10DTestCase))
# The 32 dimensions case is tested on GroupsArray
# theSuite.addTest(unittest.makeSuite(Basic32DTestCase))
theSuite.addTest(unittest.makeSuite(ReadOutArgumentTests))
theSuite.addTest(unittest.makeSuite(
SizeOnDiskInMemoryPropertyTestCase))
theSuite.addTest(unittest.makeSuite(GroupsArrayTestCase))
theSuite.addTest(unittest.makeSuite(ComplexNotReopenNotEndianTestCase))
theSuite.addTest(unittest.makeSuite(ComplexReopenNotEndianTestCase))
theSuite.addTest(unittest.makeSuite(ComplexNotReopenEndianTestCase))
theSuite.addTest(unittest.makeSuite(ComplexReopenEndianTestCase))
theSuite.addTest(unittest.makeSuite(CloseCopyTestCase))
theSuite.addTest(unittest.makeSuite(OpenCopyTestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex1TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex2TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex3TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex4TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex5TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex6TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex7TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex8TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex9TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex10TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex11TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex12TestCase))
theSuite.addTest(unittest.makeSuite(GI1NAOpenTestCase))
theSuite.addTest(unittest.makeSuite(GI1NACloseTestCase))
theSuite.addTest(unittest.makeSuite(GI2NAOpenTestCase))
theSuite.addTest(unittest.makeSuite(GI2NACloseTestCase))
theSuite.addTest(unittest.makeSuite(SI1NAOpenTestCase))
theSuite.addTest(unittest.makeSuite(SI1NACloseTestCase))
theSuite.addTest(unittest.makeSuite(SI2NAOpenTestCase))
theSuite.addTest(unittest.makeSuite(SI2NACloseTestCase))
theSuite.addTest(unittest.makeSuite(GE1NAOpenTestCase))
theSuite.addTest(unittest.makeSuite(GE1NACloseTestCase))
theSuite.addTest(unittest.makeSuite(GE2NAOpenTestCase))
theSuite.addTest(unittest.makeSuite(GE2NACloseTestCase))
theSuite.addTest(unittest.makeSuite(NonHomogeneousTestCase))
theSuite.addTest(unittest.makeSuite(TruncateTestCase))
theSuite.addTest(unittest.makeSuite(FancySelection1))
theSuite.addTest(unittest.makeSuite(FancySelection2))
theSuite.addTest(unittest.makeSuite(FancySelection3))
theSuite.addTest(unittest.makeSuite(FancySelection4))
theSuite.addTest(unittest.makeSuite(PointSelection0))
theSuite.addTest(unittest.makeSuite(PointSelection1))
theSuite.addTest(unittest.makeSuite(PointSelection2))
theSuite.addTest(unittest.makeSuite(PointSelection3))
theSuite.addTest(unittest.makeSuite(PointSelection4))
theSuite.addTest(unittest.makeSuite(CopyNativeHDF5MDAtom))
theSuite.addTest(unittest.makeSuite(AccessClosedTestCase))
theSuite.addTest(unittest.makeSuite(TestCreateArrayArgs))
theSuite.addTest(unittest.makeSuite(BroadcastTest))
return theSuite
if __name__ == '__main__':
common.print_versions()
unittest.main(defaultTest='suite')
|
import cocotb
from cocotb.triggers import Timer, Edge
def gcd(a: int, b: int) -> int:
while a != b:
if a > b:
a = a - b
else:
b = b - a
return a
async def toggle(signal):
"""Flips the value of binary signal wire"""
if signal.value == 0:
signal.value = 1
else:
signal.value = 0
async def reset(dut):
# Reset sequence
dut.reset.value = 1
dut.io_in_data_a.value = 0
dut.io_in_data_b.value = 0
dut.io_out_ack.value = 0
dut.io_in_req.value = 0
await Timer(5, "ns")
dut.reset.value = 0
await Timer(1)
async def compute_gcd(dut, a, b):
"""Performs all of the data poking necessary to compute the GCD of two values"""
# Toggle req to start the circuit
dut.io_in_data_a.value = a
dut.io_in_data_b.value = b
await toggle(dut.io_in_req)
await Edge(dut.io_out_req)
await Timer(1)
assert dut.io_out_data_a == gcd(a, b)
assert dut.io_out_data_b == gcd(a, b)
await Timer(1, "ns")
await toggle(dut.io_out_ack)
await Timer(2, "ns")
@cocotb.test()
async def check_in_req(dut):
await reset(dut)
# Custom compute gcd without acknowledge in the end
dut.io_in_data_a.value = 4
dut.io_in_data_b.value = 2
await toggle(dut.io_in_req)
await Edge(dut.io_out_req)
await Timer(1)
assert dut.io_out_data_a == gcd(4, 2)
assert dut.io_out_data_b == gcd(4, 2)
# End of gcd
await Timer(30, "ns")
await toggle(dut.io_in_req)
await Timer(30, "ns")
await toggle(dut.io_in_req)
await Timer(50, "ns")
await toggle(dut.io_in_req)
await Timer(30, "ns")
await toggle(dut.io_in_req)
await Timer(30, "ns")
await toggle(dut.io_in_req)
#
# @cocotb.test()
# async def gcd_4_2(dut):
# """It should compute the GCD of 4 and 2"""
# await reset(dut)
# await compute_gcd(dut, 4, 2)
#
#
# @cocotb.test()
# async def gcd_5_15(dut):
# """It should compute the GCD of 5 and 15"""
# await reset(dut)
# await compute_gcd(dut, 5, 15)
#
#
# @cocotb.test()
# async def multiple_gcd(dut):
# """It should compute multiple GCD's in a row"""
# await reset(dut)
# await compute_gcd(dut, 9, 12)
# await compute_gcd(dut, 42, 16)
# await compute_gcd(dut, 42, 18)
#
#
# @cocotb.test()
# async def await_completion(dut):
# """It should not take in new inputs before outputs have been generated"""
# await reset(dut)
# dut.io_in_data_a.value = 18
# dut.io_in_data_b.value = 15
# await toggle(dut.io_in_req)
# await Edge(dut.io_in_ack)
# await Timer(2, "ns")
#
# # Once acknowledged, we change the input data already
# # Should not impact the output
# dut.io_in_data_a.value = 6
# dut.io_in_data_b.value = 4
# await toggle(dut.io_in_req)
#
# # Await the output
# await Edge(dut.io_out_req)
# await Timer(1)
# assert dut.io_out_data_a == gcd(18, 15)
# assert dut.io_out_data_b == gcd(18, 15)
# # Input-ack should still be high, as it hasn't taken in new data yet
# assert dut.io_in_ack.value == 1
# await Timer(1, "ns")
# # Acknowledge first data received
# await toggle(dut.io_out_ack)
# # Now, wait for next data to be received
# await Edge(dut.io_out_req)
# await Timer(1)
# assert dut.io_out_data_a == gcd(6, 4)
# assert dut.io_out_data_b == gcd(6, 4)
# await Timer(2, "ns")
|
//Generates a letter-only (sms url friendly) key for ids
const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz';
module.exports = len => {
let result = '';
for (let i = 0; i < len; i++) {
const pos = Math.floor(Math.random() * len);
result += characters.slice(pos, pos + 1);
}
return result;
};
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
for i in xrange(5):
print i
time.sleep(1);
|
/** When your routing table is too long, you can split it into small modules **/
import Layout from '@/layout'
const gxq = {
path: '/gxq',
component: Layout,
redirect: '/gxq/gxqzzb',
name: 'gxq',
meta: {
title: '高新区',
icon: 'table'
},
children: [
{
path: 'gxqzzb',
component: () => import('@/views/table/complex-table'),
name: 'gxqzzb',
meta: { title: '早中班', area: 'gxq', type: 'zzb' }
},
{
path: 'gxqrcxc',
component: () => import('@/views/table/complex-table'),
name: 'gxqrcxc',
meta: { title: '日常巡查', area: 'gxq', type: 'rcxc' }
},
{
path: 'gxqgdhc',
component: () => import('@/views/table/complex-table'),
name: 'gxqgdhc',
meta: { title: '工单核查', area: 'gxq', type: 'gdhc' }
},
{
path: 'gxqgdcc',
component: () => import('@/views/table/complex-table'),
name: 'gxqgdcc',
meta: { title: '工单抽查', area: 'gxq', type: 'gdcc' }
},
{
path: 'gxqwtsl',
component: () => import('@/views/table/complex-table'),
name: 'gxqwtsl',
meta: { title: '问题梳理', area: 'gxq', type: 'wtsl' }
},
{
path: 'gxqzglxd',
component: () => import('@/views/table/complex-table'),
name: 'gxqzglxd',
meta: { title: '整改联系单', area: 'gxq', type: 'zglxd' }
},
{
path: 'gxqldjb',
component: () => import('@/views/table/complex-table'),
name: 'gxqldjb',
meta: { title: '领导交办', area: 'gxq', type: 'ldjb' }
},
{
path: 'gxqwmcs',
component: () => import('@/views/table/complex-table'),
name: 'gxqwmcs',
meta: { title: '文明城市', area: 'gxq', type: 'wmcs' }
}
]
}
export default gxq
|
import os
import subprocess
from typing import Optional, List
class CompilerBase(object):
""" Base class for a compiler. """
supported_extensions: List[str] = []
default_flags: List[str] = []
def __init__(self, src_path: str, *, output_ext: str = '.e',
flags: Optional[str] = None, timeout: int = 10):
""" Instantiates a new compiler.
:param src_path: Source file to compile.
:param output_ext: Extension for output file.
:param flags: Overrides default compiler flags.
:param timeout: Timeout for compilation to complete.
"""
src_path = os.path.abspath(src_path)
src_path_without_ext, src_ext = os.path.splitext(src_path)
assert src_ext in self.supported_extensions
self.src_path = src_path
self.flags = self.default_flags if flags is None else flags
self.timeout = timeout
self.exe_path = src_path_without_ext + output_ext
self.log = ''
def cmd(self) -> str:
""" Returns command to compile the code. """
raise NotImplementedError
def compile(self) -> bool:
""" Runs compile command (`cmd()`) and sets `log`.
:return: True on success
"""
try:
self.log = subprocess.check_output(self.cmd(), stderr=subprocess.STDOUT,
timeout=self.timeout).decode('utf-8')
return True
except subprocess.CalledProcessError as e:
self.log = e.output.decode('utf-8')
return False
class CppCompiler(CompilerBase):
supported_extensions = ['.cc', '.cpp']
default_flags = ['-O3', '--static', '--std=c++17']
def cmd(self) -> bool:
return ['g++'] + self.flags + [f'-o{self.exe_path}', f'{self.src_path}']
class PascalCompiler(CompilerBase):
supported_extensions = ['.pas']
default_flags = ['-O3']
def cmd(self) -> bool:
return ['pc'] + self.flags + [f'-o{self.exe_path}', f'{self.src_path}']
def compiler(path, **kwargs):
""" Selects and instantiates a compiler for a given path."""
ext = os.path.splitext(path)[1]
for c in CompilerBase.__subclasses__():
if ext in c.supported_extensions:
return c(path, **kwargs)
raise LookupError(f'Unknown source code extension {ext}')
|
var webpack = require('webpack');
var ExtractTextPlugin = require('extract-text-webpack-plugin');
var HtmlWebpackPlugin = require('html-webpack-plugin');
// 环境变量配置,dev / online
var WEBPACK_ENV = process.env.WEBPACK_ENV || 'dev';
// 获取html-webpack-plugin参数的方法
var getHtmlConfig = function(name, title) {
return {
template: './src/view/' + name + '.html',
filename: 'view/' + name + '.html',
favicon: './favicon.ico',
title: title,
inject: true,
hash: true,
chunks: ['common', name]
};
};
// webpack config
var config = {
entry: {
'common': ['./src/page/common/index.js'],
'index': ['./src/page/index/index.js'],
'list': ['./src/page/list/index.js'],
'detail': ['./src/page/detail/index.js'],
'cart': ['./src/page/cart/index.js'],
'order-confirm': ['./src/page/order-confirm/index.js'],
'order-list': ['./src/page/order-list/index.js'],
'order-detail': ['./src/page/order-detail/index.js'],
'payment': ['./src/page/payment/index.js'],
'user-login': ['./src/page/user-login/index.js'],
'user-register': ['./src/page/user-register/index.js'],
'user-pass-reset': ['./src/page/user-pass-reset/index.js'],
'user-center': ['./src/page/user-center/index.js'],
'user-center-update': ['./src/page/user-center-update/index.js'],
'user-pass-update': ['./src/page/user-pass-update/index.js'],
'result': ['./src/page/result/index.js'],
'about': ['./src/page/about/index.js'],
},
output: {
path : __dirname + '/dist/',
publicPath : 'dev' === WEBPACK_ENV ? '/dist/' : '//mmall.zyjblogs.cn/mmall-fe/dist/',
filename : 'js/[name].js'
},
externals: {
'jquery': 'window.jQuery'
},
module: {
loaders: [
{ test: /\.css$/, loader: ExtractTextPlugin.extract("style-loader", "css-loader") },
{ test: /\.(gif|png|jpg|woff|svg|eot|ttf|ico|woff2)\??.*$/, loader: 'url-loader?limit=100&name=resource/[name].[ext]' },
{
test: /\.string$/,
loader: 'html-loader',
query: {
minimize: true,
removeAttributeQuotes: false
}
}
]
},
resolve: {
alias: {
node_modules: __dirname + '/node_modules',
util: __dirname + '/src/util',
page: __dirname + '/src/page',
service: __dirname + '/src/service',
image: __dirname + '/src/image'
}
},
plugins: [
// 独立通用模块到js/base.js
new webpack.optimize.CommonsChunkPlugin({
name: 'common',
filename: 'js/base.js'
}),
// 把css单独打包到文件里
new ExtractTextPlugin("css/[name].css"),
// html模板的处理
new HtmlWebpackPlugin(getHtmlConfig('index', '首页')),
new HtmlWebpackPlugin(getHtmlConfig('list', '商品列表')),
new HtmlWebpackPlugin(getHtmlConfig('detail', '商品详情')),
new HtmlWebpackPlugin(getHtmlConfig('cart', '购物车')),
new HtmlWebpackPlugin(getHtmlConfig('order-confirm', '订单确认')),
new HtmlWebpackPlugin(getHtmlConfig('order-list', '订单列表')),
new HtmlWebpackPlugin(getHtmlConfig('order-detail', '订单详情')),
new HtmlWebpackPlugin(getHtmlConfig('payment', '订单支付')),
new HtmlWebpackPlugin(getHtmlConfig('user-login', '用户登录')),
new HtmlWebpackPlugin(getHtmlConfig('user-register', '用户注册')),
new HtmlWebpackPlugin(getHtmlConfig('user-pass-reset', '找回密码')),
new HtmlWebpackPlugin(getHtmlConfig('user-center', '个人中心')),
new HtmlWebpackPlugin(getHtmlConfig('user-center-update', '修改个人信息')),
new HtmlWebpackPlugin(getHtmlConfig('user-pass-update', '修改密码')),
new HtmlWebpackPlugin(getHtmlConfig('result', '操作结果')),
new HtmlWebpackPlugin(getHtmlConfig('about', '关于MMall')),
]
,
devServer: {
port: 8088,
inline: true,
proxy : {
'**/*.do' : {
target: 'http://localhost:8080/',
changeOrigin : true
}
}
}
};
module.exports = config;
|
from __future__ import absolute_import, division, print_function
import numpy as np
import scipy.linalg
from nnmnkwii.util import apply_each2d_padded, apply_each2d_trim
from nnmnkwii.util.linalg import cholesky_inv, cholesky_inv_banded
from nose.plugins.attrib import attr
def _get_windows_set():
windows_set = [
# Static
[
(0, 0, np.array([1.0])),
],
# Static + delta
[
(0, 0, np.array([1.0])),
(1, 1, np.array([-0.5, 0.0, 0.5])),
],
# Static + delta + deltadelta
[
(0, 0, np.array([1.0])),
(1, 1, np.array([-0.5, 0.0, 0.5])),
(1, 1, np.array([1.0, -2.0, 1.0])),
],
]
return windows_set
def test_function_utils():
def dummmy_func2d(x):
return x + 1
T, D = 10, 24
np.random.seed(1234)
X = np.random.rand(2, T, D)
lengths = [60, 100]
# Paddd case
Y = apply_each2d_padded(dummmy_func2d, X, lengths)
for i, l in enumerate(lengths):
assert np.allclose(X[i][:l] + 1, Y[i][:l])
assert np.all(Y[i][l:] == 0)
# Trim
for i, l in enumerate(lengths):
X[i][l:] = 0
Y = apply_each2d_trim(dummmy_func2d, X)
for i, l in enumerate(lengths):
assert np.allclose(X[i][:l] + 1, Y[i][:l])
assert np.all(Y[i][l:] == 0)
def _get_banded_test_mat(win_mats, T):
from nnmnkwii.paramgen import _bandmat as bm
sdw = max([win_mat.l + win_mat.u for win_mat in win_mats])
P = bm.zeros(sdw, sdw, T)
for _, win_mat in enumerate(win_mats):
bm.dot_mm_plus_equals(win_mat.T, win_mat, target_bm=P)
return P
@attr("requires_bandmat")
def test_linalg_choleskey_inv():
from nnmnkwii.paramgen import build_win_mats
for windows in _get_windows_set():
for T in [5, 10]:
win_mats = build_win_mats(windows, T)
P = _get_banded_test_mat(win_mats, T).full()
L = scipy.linalg.cholesky(P, lower=True)
U = scipy.linalg.cholesky(P, lower=False)
assert np.allclose(L.dot(L.T), P)
assert np.allclose(U.T.dot(U), P)
Pinv = np.linalg.inv(P)
Pinv_hat = cholesky_inv(L, lower=True)
assert np.allclose(Pinv, Pinv_hat)
Pinv_hat = cholesky_inv(U, lower=False)
assert np.allclose(Pinv, Pinv_hat)
Pinv_hat = cholesky_inv_banded(L, width=3)
assert np.allclose(Pinv, Pinv_hat)
|
export const getEmptyInputs = form => {
let emptyInputs = []
for (const [key, value] of Object.entries(form)) {
console.log(`${key}: ${value}`)
}
for (const [key] of Object.entries(form)) {
console.log(`${key}`)
}
return emptyInputs
}
|
"""Stuff to parse Sun and NeXT audio files.
An audio file consists of a header followed by the data. The structure
of the header is as follows.
+---------------+
| magic word |
+---------------+
| header size |
+---------------+
| data size |
+---------------+
| encoding |
+---------------+
| sample rate |
+---------------+
| # of channels |
+---------------+
| info |
| |
+---------------+
The magic word consists of the 4 characters '.snd'. Apart from the
info field, all header fields are 4 bytes in size. They are all
32-bit unsigned integers encoded in big-endian byte order.
The header size really gives the start of the data.
The data size is the physical size of the data. From the other
parameters the number of frames can be calculated.
The encoding gives the way in which audio samples are encoded.
Possible values are listed below.
The info field currently consists of an ASCII string giving a
human-readable description of the audio file. The info field is
padded with NUL bytes to the header size.
Usage.
Reading audio files:
f = sunau.open(file, 'r')
where file is either the name of a file or an open file pointer.
The open file pointer must have methods read(), seek(), and close().
When the setpos() and rewind() methods are not used, the seek()
method is not necessary.
This returns an instance of a class with the following public methods:
getnchannels() -- returns number of audio channels (1 for
mono, 2 for stereo)
getsampwidth() -- returns sample width in bytes
getframerate() -- returns sampling frequency
getnframes() -- returns number of audio frames
getcomptype() -- returns compression type ('NONE' or 'ULAW')
getcompname() -- returns human-readable version of
compression type ('not compressed' matches 'NONE')
getparams() -- returns a namedtuple consisting of all of the
above in the above order
getmarkers() -- returns None (for compatibility with the
aifc module)
getmark(id) -- raises an error since the mark does not
exist (for compatibility with the aifc module)
readframes(n) -- returns at most n frames of audio
rewind() -- rewind to the beginning of the audio stream
setpos(pos) -- seek to the specified position
tell() -- return the current position
close() -- close the instance (make it unusable)
The position returned by tell() and the position given to setpos()
are compatible and have nothing to do with the actual position in the
file.
The close() method is called automatically when the class instance
is destroyed.
Writing audio files:
f = sunau.open(file, 'w')
where file is either the name of a file or an open file pointer.
The open file pointer must have methods write(), tell(), seek(), and
close().
This returns an instance of a class with the following public methods:
setnchannels(n) -- set the number of channels
setsampwidth(n) -- set the sample width
setframerate(n) -- set the frame rate
setnframes(n) -- set the number of frames
setcomptype(type, name)
-- set the compression type and the
human-readable compression type
setparams(tuple)-- set all parameters at once
tell() -- return current position in output file
writeframesraw(data)
-- write audio frames without pathing up the
file header
writeframes(data)
-- write audio frames and patch up the file header
close() -- patch up the file header and close the
output file
You should set the parameters before the first writeframesraw or
writeframes. The total number of frames does not need to be set,
but when it is set to the correct value, the header does not have to
be patched up.
It is best to first set all parameters, perhaps possibly the
compression type, and then write audio frames using writeframesraw.
When all frames have been written, either call writeframes('') or
close() to patch up the sizes in the header.
The close() method is called automatically when the class instance
is destroyed.
"""
from collections import namedtuple
_sunau_params = namedtuple('_sunau_params',
'nchannels sampwidth framerate nframes comptype compname')
# from <multimedia/audio_filehdr.h>
AUDIO_FILE_MAGIC = 0x2e736e64
AUDIO_FILE_ENCODING_MULAW_8 = 1
AUDIO_FILE_ENCODING_LINEAR_8 = 2
AUDIO_FILE_ENCODING_LINEAR_16 = 3
AUDIO_FILE_ENCODING_LINEAR_24 = 4
AUDIO_FILE_ENCODING_LINEAR_32 = 5
AUDIO_FILE_ENCODING_FLOAT = 6
AUDIO_FILE_ENCODING_DOUBLE = 7
AUDIO_FILE_ENCODING_ADPCM_G721 = 23
AUDIO_FILE_ENCODING_ADPCM_G722 = 24
AUDIO_FILE_ENCODING_ADPCM_G723_3 = 25
AUDIO_FILE_ENCODING_ADPCM_G723_5 = 26
AUDIO_FILE_ENCODING_ALAW_8 = 27
# from <multimedia/audio_hdr.h>
AUDIO_UNKNOWN_SIZE = 0xFFFFFFFF # ((unsigned)(~0))
_simple_encodings = [AUDIO_FILE_ENCODING_MULAW_8,
AUDIO_FILE_ENCODING_LINEAR_8,
AUDIO_FILE_ENCODING_LINEAR_16,
AUDIO_FILE_ENCODING_LINEAR_24,
AUDIO_FILE_ENCODING_LINEAR_32,
AUDIO_FILE_ENCODING_ALAW_8]
class Error(Exception):
pass
def _read_u32(file):
x = 0
for i in range(4):
byte = file.read(1)
if not byte:
raise EOFError
x = x*256 + ord(byte)
return x
def _write_u32(file, x):
data = []
for i in range(4):
d, m = divmod(x, 256)
data.insert(0, int(m))
x = d
file.write(bytes(data))
class Au_read:
def __init__(self, f):
if type(f) == type(''):
import builtins
f = builtins.open(f, 'rb')
self._opened = True
else:
self._opened = False
self.initfp(f)
def __del__(self):
if self._file:
self.close()
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def initfp(self, file):
self._file = file
self._soundpos = 0
magic = int(_read_u32(file))
if magic != AUDIO_FILE_MAGIC:
raise Error('bad magic number')
self._hdr_size = int(_read_u32(file))
if self._hdr_size < 24:
raise Error('header size too small')
if self._hdr_size > 100:
raise Error('header size ridiculously large')
self._data_size = _read_u32(file)
if self._data_size != AUDIO_UNKNOWN_SIZE:
self._data_size = int(self._data_size)
self._encoding = int(_read_u32(file))
if self._encoding not in _simple_encodings:
raise Error('encoding not (yet) supported')
if self._encoding in (AUDIO_FILE_ENCODING_MULAW_8,
AUDIO_FILE_ENCODING_ALAW_8):
self._sampwidth = 2
self._framesize = 1
elif self._encoding == AUDIO_FILE_ENCODING_LINEAR_8:
self._framesize = self._sampwidth = 1
elif self._encoding == AUDIO_FILE_ENCODING_LINEAR_16:
self._framesize = self._sampwidth = 2
elif self._encoding == AUDIO_FILE_ENCODING_LINEAR_24:
self._framesize = self._sampwidth = 3
elif self._encoding == AUDIO_FILE_ENCODING_LINEAR_32:
self._framesize = self._sampwidth = 4
else:
raise Error('unknown encoding')
self._framerate = int(_read_u32(file))
self._nchannels = int(_read_u32(file))
self._framesize = self._framesize * self._nchannels
if self._hdr_size > 24:
self._info = file.read(self._hdr_size - 24)
for i in range(len(self._info)):
if self._info[i] == b'\0':
self._info = self._info[:i]
break
else:
self._info = ''
try:
self._data_pos = file.tell()
except (AttributeError, OSError):
self._data_pos = None
def getfp(self):
return self._file
def getnchannels(self):
return self._nchannels
def getsampwidth(self):
return self._sampwidth
def getframerate(self):
return self._framerate
def getnframes(self):
if self._data_size == AUDIO_UNKNOWN_SIZE:
return AUDIO_UNKNOWN_SIZE
if self._encoding in _simple_encodings:
return self._data_size // self._framesize
return 0 # XXX--must do some arithmetic here
def getcomptype(self):
if self._encoding == AUDIO_FILE_ENCODING_MULAW_8:
return 'ULAW'
elif self._encoding == AUDIO_FILE_ENCODING_ALAW_8:
return 'ALAW'
else:
return 'NONE'
def getcompname(self):
if self._encoding == AUDIO_FILE_ENCODING_MULAW_8:
return 'CCITT G.711 u-law'
elif self._encoding == AUDIO_FILE_ENCODING_ALAW_8:
return 'CCITT G.711 A-law'
else:
return 'not compressed'
def getparams(self):
return _sunau_params(self.getnchannels(), self.getsampwidth(),
self.getframerate(), self.getnframes(),
self.getcomptype(), self.getcompname())
def getmarkers(self):
return None
def getmark(self, id):
raise Error('no marks')
def readframes(self, nframes):
if self._encoding in _simple_encodings:
if nframes == AUDIO_UNKNOWN_SIZE:
data = self._file.read()
else:
data = self._file.read(nframes * self._framesize)
self._soundpos += len(data) // self._framesize
if self._encoding == AUDIO_FILE_ENCODING_MULAW_8:
import audioop
data = audioop.ulaw2lin(data, self._sampwidth)
return data
return None # XXX--not implemented yet
def rewind(self):
if self._data_pos is None:
raise OSError('cannot seek')
self._file.seek(self._data_pos)
self._soundpos = 0
def tell(self):
return self._soundpos
def setpos(self, pos):
if pos < 0 or pos > self.getnframes():
raise Error('position not in range')
if self._data_pos is None:
raise OSError('cannot seek')
self._file.seek(self._data_pos + pos * self._framesize)
self._soundpos = pos
def close(self):
if self._opened and self._file:
self._file.close()
self._file = None
class Au_write:
def __init__(self, f):
if type(f) == type(''):
import builtins
f = builtins.open(f, 'wb')
self._opened = True
else:
self._opened = False
self.initfp(f)
def __del__(self):
if self._file:
self.close()
self._file = None
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def initfp(self, file):
self._file = file
self._framerate = 0
self._nchannels = 0
self._sampwidth = 0
self._framesize = 0
self._nframes = AUDIO_UNKNOWN_SIZE
self._nframeswritten = 0
self._datawritten = 0
self._datalength = 0
self._info = b''
self._comptype = 'ULAW' # default is U-law
def setnchannels(self, nchannels):
if self._nframeswritten:
raise Error('cannot change parameters after starting to write')
if nchannels not in (1, 2, 4):
raise Error('only 1, 2, or 4 channels supported')
self._nchannels = nchannels
def getnchannels(self):
if not self._nchannels:
raise Error('number of channels not set')
return self._nchannels
def setsampwidth(self, sampwidth):
if self._nframeswritten:
raise Error('cannot change parameters after starting to write')
if sampwidth not in (1, 2, 4):
raise Error('bad sample width')
self._sampwidth = sampwidth
def getsampwidth(self):
if not self._framerate:
raise Error('sample width not specified')
return self._sampwidth
def setframerate(self, framerate):
if self._nframeswritten:
raise Error('cannot change parameters after starting to write')
self._framerate = framerate
def getframerate(self):
if not self._framerate:
raise Error('frame rate not set')
return self._framerate
def setnframes(self, nframes):
if self._nframeswritten:
raise Error('cannot change parameters after starting to write')
if nframes < 0:
raise Error('# of frames cannot be negative')
self._nframes = nframes
def getnframes(self):
return self._nframeswritten
def setcomptype(self, type, name):
if type in ('NONE', 'ULAW'):
self._comptype = type
else:
raise Error('unknown compression type')
def getcomptype(self):
return self._comptype
def getcompname(self):
if self._comptype == 'ULAW':
return 'CCITT G.711 u-law'
elif self._comptype == 'ALAW':
return 'CCITT G.711 A-law'
else:
return 'not compressed'
def setparams(self, params):
nchannels, sampwidth, framerate, nframes, comptype, compname = params
self.setnchannels(nchannels)
self.setsampwidth(sampwidth)
self.setframerate(framerate)
self.setnframes(nframes)
self.setcomptype(comptype, compname)
def getparams(self):
return _sunau_params(self.getnchannels(), self.getsampwidth(),
self.getframerate(), self.getnframes(),
self.getcomptype(), self.getcompname())
def tell(self):
return self._nframeswritten
def writeframesraw(self, data):
self._ensure_header_written()
if self._comptype == 'ULAW':
import audioop
data = audioop.lin2ulaw(data, self._sampwidth)
nframes = len(data) // self._framesize
self._file.write(data)
self._nframeswritten = self._nframeswritten + nframes
self._datawritten = self._datawritten + len(data)
def writeframes(self, data):
self.writeframesraw(data)
if self._nframeswritten != self._nframes or \
self._datalength != self._datawritten:
self._patchheader()
def close(self):
if self._file:
try:
self._ensure_header_written()
if self._nframeswritten != self._nframes or \
self._datalength != self._datawritten:
self._patchheader()
self._file.flush()
finally:
if self._opened and self._file:
self._file.close()
self._file = None
#
# private methods
#
def _ensure_header_written(self):
if not self._nframeswritten:
if not self._nchannels:
raise Error('# of channels not specified')
if not self._sampwidth:
raise Error('sample width not specified')
if not self._framerate:
raise Error('frame rate not specified')
self._write_header()
def _write_header(self):
if self._comptype == 'NONE':
if self._sampwidth == 1:
encoding = AUDIO_FILE_ENCODING_LINEAR_8
self._framesize = 1
elif self._sampwidth == 2:
encoding = AUDIO_FILE_ENCODING_LINEAR_16
self._framesize = 2
elif self._sampwidth == 4:
encoding = AUDIO_FILE_ENCODING_LINEAR_32
self._framesize = 4
else:
raise Error('internal error')
elif self._comptype == 'ULAW':
encoding = AUDIO_FILE_ENCODING_MULAW_8
self._framesize = 1
else:
raise Error('internal error')
self._framesize = self._framesize * self._nchannels
_write_u32(self._file, AUDIO_FILE_MAGIC)
header_size = 25 + len(self._info)
header_size = (header_size + 7) & ~7
_write_u32(self._file, header_size)
if self._nframes == AUDIO_UNKNOWN_SIZE:
length = AUDIO_UNKNOWN_SIZE
else:
length = self._nframes * self._framesize
try:
self._form_length_pos = self._file.tell()
except (AttributeError, OSError):
self._form_length_pos = None
_write_u32(self._file, length)
self._datalength = length
_write_u32(self._file, encoding)
_write_u32(self._file, self._framerate)
_write_u32(self._file, self._nchannels)
self._file.write(self._info)
self._file.write(b'\0'*(header_size - len(self._info) - 24))
def _patchheader(self):
if self._form_length_pos is None:
raise OSError('cannot seek')
self._file.seek(self._form_length_pos)
_write_u32(self._file, self._datawritten)
self._datalength = self._datawritten
self._file.seek(0, 2)
def open(f, mode=None):
if mode is None:
if hasattr(f, 'mode'):
mode = f.mode
else:
mode = 'rb'
if mode in ('r', 'rb'):
return Au_read(f)
elif mode in ('w', 'wb'):
return Au_write(f)
else:
raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
openfp = open
|
import React, { PropTypes, Component } from 'react';
import { connect } from 'react-redux';
// Import Components
import PostList from '../../components/PostList';
import PostCreateWidget from '../../components/PostCreateWidget/PostCreateWidget';
// Import Actions
import { addPostRequest, fetchPosts, deletePostRequest,
thumbUpPostRequest, thumbDownPostRequest } from '../../PostActions';
import { toggleAddPost } from '../../../App/AppActions';
// Import Selectors
import { getShowAddPost } from '../../../App/AppReducer';
import { getPosts } from '../../PostReducer';
class PostListPage extends Component {
componentDidMount() {
this.props.dispatch(fetchPosts());
}
handleDeletePost = post => {
if (confirm('Do you want to delete this post')) { // eslint-disable-line
this.props.dispatch(deletePostRequest(post));
}
};
handleAddPost = (name, title, content) => {
this.props.dispatch(toggleAddPost());
this.props.dispatch(addPostRequest({ name, title, content }));
};
// INSERT +/- FUNCTIONS HERE
handleThumbUpPost = post => {
this.props.dispatch(thumbUpPostRequest(post.cuid, post));
};
handleThumbDownPost = post => {
this.props.dispatch(thumbDownPostRequest(post.cuid, post));
};
render() {
return (
<div>
<PostCreateWidget addPost={this.handleAddPost} showAddPost={this.props.showAddPost} />
<PostList handleDeletePost={this.handleDeletePost} posts={this.props.posts} handleThumbUpPost={this.handleThumbUpPost} handleThumbDownPost={this.handleThumbDownPost} />
</div>
);
}
}
// Actions required to provide data for this component to render in sever side.
PostListPage.need = [() => { return fetchPosts(); }];
// Retrieve data from store as props
function mapStateToProps(state) {
return {
showAddPost: getShowAddPost(state),
posts: getPosts(state),
};
}
PostListPage.propTypes = {
posts: PropTypes.arrayOf(PropTypes.shape({
name: PropTypes.string.isRequired,
title: PropTypes.string.isRequired,
content: PropTypes.string.isRequired,
})).isRequired,
showAddPost: PropTypes.bool.isRequired,
dispatch: PropTypes.func.isRequired,
};
PostListPage.contextTypes = {
router: React.PropTypes.object,
};
export default connect(mapStateToProps)(PostListPage);
|
define(
({
common: {
ok: "ΟΚ",
cancel: "Ακύρωση",
next: "Επόμενο",
back: "Πίσω"
},
errorCode: "Κωδικός",
errorMessage: "Μήνυμα",
errorDetail: "Λεπτομέρεια",
widgetPlaceholderTooltip: "Για να το ρυθμίσετε, μεταβείτε στα Widget και κάντε κλικ στο αντίστοιχο σύμβολο",
symbolChooser: {
preview: "Προεπισκόπηση",
basic: "Βασικά",
arrows: "Βέλη",
business: "Επιχείρηση",
cartographic: "Χαρτογραφικά",
nationalParkService: "Εθνική Υπηρεσία Πάρκων",
outdoorRecreation: "Υπαίθριες δραστηριότητες αναψυχής",
peoplePlaces: "Άνθρωποι και μέρη",
safetyHealth: "Ασφάλεια - Υγεία",
shapes: "Σχήματα",
transportation: "Μεταφορές",
symbolSize: "Μέγεθος συμβόλου",
color: "Χρώμα",
alpha: "Άλφα",
outlineColor: "Χρώμα περιγράμματος",
outlineWidth: "Πλάτος περιγράμματος",
style: "Στυλ",
width: "Πλάτος",
text: "Κείμενο",
fontColor: "Χρώμα γραμματοσειράς",
fontSize: "Μέγεθος γραμματοσειράς",
transparency: "Διαφάνεια",
solid: "Συμπαγές",
dash: "Παύλα",
dot: "Τελεία",
dashDot: "Παύλα τελεία",
dashDotDot: "Παύλα τελεία τελεία"
},
transparency: {
opaque: "Αδιαφανές",
transparent: "Διαφανές"
},
rendererChooser: {
domain: "Τομέας",
use: "Χρήση",
singleSymbol: "Ένα μόνο σύμβολο",
uniqueSymbol: "Μοναδικά σύμβολα",
color: "Χρώμα",
size: "Μέγεθος",
toShow: "Για εμφάνιση",
colors: "Χρώματα",
classes: "Κλάσεις",
symbolSize: "Μέγεθος συμβόλου",
addValue: "Προσθήκη τιμής",
setDefaultSymbol: "Ορισμός προκαθορισμένου συμβόλου",
defaultSymbol: "Προκαθορισμένο σύμβολο",
selectedSymbol: "Επιλεγμένο σύμβολο",
value: "Τιμή",
label: "Ετικέτα",
range: "Εύρος"
},
drawBox: {
point: "Σημείο",
line: "Γραμμή",
polyline: "Γραμμή πολλαπλών τμημάτων",
freehandPolyline: "Γραμμή ελεύθερης σχεδίασης",
triangle: "Τρίγωνο",
extent: "Έκταση",
circle: "Κύκλος",
ellipse: "Έλλειψη",
polygon: "Πολύγωνο",
freehandPolygon: "Πολύγωνο ελεύθερης σχεδίασης",
text: "Κείμενο",
clear: "Απαλοιφή"
},
popupConfig: {
title: "Τίτλος",
add: "Προσθήκη",
fields: "Πεδία",
noField: "Χωρίς πεδίο",
visibility: "Ορατό",
name: "Όνομα",
alias: "Ψευδώνυμο",
actions: "Ενέργειες"
},
includeButton: {
include: "Συμπερίληψη"
},
loadingShelter: {
loading: "Φόρτωση"
},
basicServiceBrowser: {
noServicesFound: "Δεν εντοπίστηκε υπηρεσία.",
unableConnectTo: "Δεν ήταν δυνατή η σύνδεση σε",
invalidUrlTip: "Το URL που εισαγάγατε δεν είναι έγκυρο ή δεν είναι προσβάσιμο."
},
serviceBrowser: {
noGpFound: "Δεν εντοπίστηκε geoprocessing service.",
unableConnectTo: "Δεν ήταν δυνατή η σύνδεση σε"
},
layerServiceBrowser: {
noServicesFound: "Δεν εντοπίστηκε map service ή feature service",
unableConnectTo: "Δεν ήταν δυνατή η σύνδεση σε"
},
basicServiceChooser: {
validate: "Επικύρωση",
example: "Παράδειγμα",
set: "Ορισμός"
},
urlInput: {
invalidUrl: "Μη έγκυρο URL."
},
urlComboBox: {
invalidUrl: "Μη έγκυρο URL."
},
filterBuilder: {
addAnotherExpression: "Προσθήκη έκφρασης φίλτρου",
addSet: "Προσθήκη συνόλου εκφράσεων",
matchMsg: "Βρείτε τα στοιχρία του θεματικού επιπέδου που συμφωνούν με ${any_or_all} τις παρακάτω εκφράσεις",
matchMsgSet: "${any_or_all} από τις παρακάτω εκφράσεις σε αυτό το σύνολο έχουν τιμή true",
all: "Όλες",
any: "Κάποιες",
value: "Τιμή",
field: "Πεδίο",
unique: "Μοναδικό",
none: "Κανένα",
and: "και",
valueTooltip: "Εισαγωγή τιμής",
fieldTooltip: "Επιλογή από υπάρχον πεδίο",
uniqueValueTooltip: "Επιλογή από μοναδικές τιμές σε επιλεγμένο πεδίο",
stringOperatorIs: "είναι", // e.g. <stringFieldName> is "California"
stringOperatorIsNot: "δεν είναι",
stringOperatorStartsWith: "ξεκινάει με",
stringOperatorEndsWith: "τελειώνει με",
stringOperatorContains: "περιέχει",
stringOperatorDoesNotContain: "δεν περιέχει",
stringOperatorIsBlank: "είναι κενό",
stringOperatorIsNotBlank: "δεν είναι κενό",
dateOperatorIsOn: "είναι στις", // e.g. <dateFieldName> is on "1/1/2012"
dateOperatorIsNotOn: "δεν είναι στις",
dateOperatorIsBefore: "είναι πριν από",
dateOperatorIsAfter: "είναι μετά από",
dateOperatorDays: "ημέρες",
dateOperatorWeeks: "εβδομάδες", // e.g. <dateFieldName> is the last 4 weeks
dateOperatorMonths: "μήνες",
dateOperatorInTheLast: "τους τελευταίους/τις τελευταίες",
dateOperatorNotInTheLast: "όχι τους τελευταίους/τις τελευταίες",
dateOperatorIsBetween: "είναι μεταξύ",
dateOperatorIsNotBetween: "δεν είναι μεταξύ",
dateOperatorIsBlank: "είναι κενό",
dateOperatorIsNotBlank: "δεν είναι κενό",
numberOperatorIs: "είναι", // e.g. <numberFieldName> is 1000
numberOperatorIsNot: "δεν είναι",
numberOperatorIsAtLeast: "είναι τουλάχιστον",
numberOperatorIsLessThan: "είναι λιγότερο από",
numberOperatorIsAtMost: "είναι το πολύ",
numberOperatorIsGreaterThan: "είναι μεγαλύτερο από",
numberOperatorIsBetween: "είναι μεταξύ",
numberOperatorIsNotBetween: "δεν είναι μεταξύ",
numberOperatorIsBlank: "είναι κενό",
numberOperatorIsNotBlank: "δεν είναι κενό",
string: "Συμβολοσειρά",
number: "Αριθμός",
date: "Ημερομηνία",
askForValues: "Εισαγωγή τιμών από το χρήστη",
prompt: "Κείμενο εμφάνισης",
hint: "Κείμενο υπόδειξης",
error: {
invalidParams: "Μη έγκυρες παράμετροι.",
invalidUrl: "Μη έγκυρο URL.",
noFilterFields: "Το θεματικό επίπεδο δεν διαθέτει πεδία που μπορούν να χρησιμοποιηθούν για φίλτρο.",
invalidSQL: "Μη έγκυρη έκφραση SQL.",
cantParseSQL: "Δεν είναι δυνατή η ανάλυση της έκφρασης SQL."
},
caseSensitive: "Διάκριση πεζών-κεφαλαίων",
notSupportCaseSensitiveTip: "Οι hosted υπηρεσίες δεν υποστηρίζουν ερωτήματα με διάκριση πεζών-κεφαλαίων."
},
featureLayerSource: {
layer: "Θεματικό Επίπεδο",
browse: "Περιήγηση",
selectFromMap: "Επιλογή από το χάρτη",
selectFromPortal: "Προσθήκη από το Portal for ArcGIS",
addServiceUrl: "Προσθήκη Service URL",
inputLayerUrl: "URL θεματικού επιπέδου εισόδου",
selectLayer: "Επιλέξτε ένα feature layer από τον τρέχοντα χάρτη.",
chooseItem: "Επιλέξτε ένα αντικείμενο τύπου feature layer.",
setServiceUrl: "Καταχωρίστε το URL του feature service ή του map service.",
selectFromOnline: "Προσθήκη από το ArcGIS Online",
chooseLayer: "Επιλέξτε ένα feature layer."
},
queryableLayerSource: {
layer: "Θεματικό επίπεδο",
browse: "Αναζήτηση",
selectFromMap: "Επιλογή από το χάρτη",
selectFromPortal: "Προσθήκη από το Portal for ArcGIS",
addServiceUrl: "Προσθήκη Service URL",
inputLayerUrl: "URL θεματικού επιπέδου εισόδου",
selectLayer: "Επιλέξτε θεματικό επίπεδο από τον τρέχοντα χάρτη.",
chooseItem: "Διαλέξτε ένα αντικείμενο.",
setServiceUrl: "Καταχωρίστε το URL της υπηρεσίας.",
selectFromOnline: "Προσθήκη από το ArcGIS Online",
chooseLayer: "Διαλέξτε θεματικό επίπεδο."
},
gpSource: {
selectFromPortal: "Προσθήκη από το Portal for ArcGIS",
addServiceUrl: "Προσθήκη URL υπηρεσίας",
selectFromOnline: "Προσθήκη από το ArcGIS Online",
setServiceUrl: "Καταχωρίστε το URL του geoprocessing service.",
chooseItem: "Επιλέξτε ένα αντικείμενο τύπου geoprocessing service.",
chooseTask: "Επιλέξτε μια εργασία γεωεπεξεργασίας."
},
itemSelector: {
map: "Χάρτης",
selectWebMap: "Επιλογή Web χάρτη",
addMapFromOnlineOrPortal: "Εντοπίστε και προσθέστε έναν web χάρτη στην εφαρμογή από τους δημόσιους πόρους του ArcGIS Online ή από το ιδιωτικό σας περιεχόμενο στο ArcGIS Online ή το Portal.",
searchMapName: "Αναζήτηση ανά όνομα χάρτη...",
searchNone: "Δεν ήταν δυνατή η εύρεση του στοιχείου που αναζητήσατε. Προσπαθήστε ξανά.",
groups: "Ομάδες",
noneGroups: "Δεν υπάρχουν ομάδες",
signInTip: "Η περίοδος λειτουργίας έχει λήξει, ανανεώστε το πρόγραμμα περιήγησης για να συνδεθείτε ξανά στην πύλη σας.",
signIn: "Είσοδος",
publicMap: "Δημόσιο περιεχόμενο",
myOrganization: "Ο Οργανισμός μου",
myGroup: "Οι Ομάδες μου",
myContent: "Το Περιεχόμενό μου",
count: "Πλήθος",
fromPortal: "από το Portal",
fromOnline: "από το ArcGIS.com",
noneThumbnail: "Δεν υπάρχουν διαθέσιμες μικρογραφίες",
owner: "κάτοχος",
signInTo: "Είσοδος σε",
lastModified: "Τελευταία τροποποίηση",
moreDetails: "Περισσότερες λεπτομέρειες"
},
featureLayerChooserFromPortal: {
notSupportQuery: "Η υπηρεσία δεν υποστηρίζει ερωτήματα."
},
basicLayerChooserFromMap: {
noLayersTip: "Δεν υπάρχει κατάλληλο θεματικό επίπεδο, διαθέσιμο στο χάρτη."
},
layerInfosMenu: {
titleBasemap: "Υπόβαθρα",
titleLayers: "Επιχειρησιακά επίπεδα",
labelLayer: "Όνομα θεματικού επιπέδου",
itemZoomTo: "Εστίαση",
itemTransparency: "Διαφάνεια",
itemTransparent: "Διαφανές",
itemOpaque: "Αδιαφανές",
itemMoveUp: "Μετακίνηση προς τα επάνω",
itemMoveDown: "Μετακίνηση προς τα κάτω",
itemDesc: "Περιγραφή",
itemDownload: "Λήψη",
itemToAttributeTable: "Άνοιγμα Πίνακα Περιγραφικών Γνωρισμάτων"
},
imageChooser: {
unsupportReaderAPI: "TODO: Το πρόγραμμα περιήγησης δεν υποστηρίζει το API του προγράμματος ανάγνωσης αρχείων",
readError: "Απέτυχε η ανάγνωση του αρχείου.",
unknowError: "δεν είναι δυνατή η ολοκλήρωση ενεργειών",
invalidType: "Μη έγκυρος τύπος αρχείου.",
exceed: "Το μέγεθος αρχείου δεν μπορεί να υπερβαίνει τα 1024 KB",
enableFlash: "TODO: ενεργοποιήστε το flash.",
cropWaining: "Διαλέξτε μια φωτογραφία με διαστάσεις τουλάχιστον ${πλάτος} x ${ύψος} pixel.",
toolTip: "Για το καλύτερο αποτέλεσμα, η εικόνα πρέπει να έχει πλάτος ${width} pixel και ύψος ${height} pixel. Τα άλλα μεγέθη θα προσαρμοστούν αναλόγως για σωστή εμφάνιση. Οι αποδεκτοι μορφότυποι εικόνας είναι οι εξής: PNG, GIF και JPEG."
},
simpleTable: {
moveUp: "Μετακίνηση προς τα επάνω",
moveDown: "Μετακίνηση προς τα κάτω",
deleteRow: "Διαγραφή",
edit: "Επεξεργασία"
},
urlParams: {
invalidToken: "Μη έγκυρο διακριτικό",
validateTokenError: "Το διακριτικό δεν είναι έγκυρο ή παρουσιάστηκε σφάλμα δικτύου"
},
exportTo: {
exportTo: "Εξαγωγή",
toCSV: "Εξαγωγή σε αρχείο CSV",
toFeatureCollection: "Εξαγωγή σε Feature Collection",
toGeoJSON: "Εξαγωγή σε GeoJSON"
},
appState: {
title: "Κατάσταση εφαρμογής",
restoreMap: "Κάντε κλικ για να επαναφέρετε την έκταση του χάρτη και την ορατότητα των θεματικών επιπέδων στο σημείο που σταματήσατε."
},
popupManager: {
showRelatedRecords: "Εμφάνιση συσχετισμένων εγγραφών",
edit: "Επεξεργασία",
getDirections: "Λήψη οδηγιών",
zoomTo: "Εστίαση"
}
})
);
|