text stringlengths 1 1.05M |
|---|
<filename>scripts/dll/build-dlls.js<gh_stars>1000+
#!/usr/bin/env node
/**
* @license Copyright (c) 2003-2021, CKSource - <NAME>. All rights reserved.
* For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
/* eslint-env node */
const childProcess = require( 'child_process' );
const path = require( 'path' );
const chalk = require( 'chalk' );
const ROOT_DIRECTORY = path.resolve( __dirname, '..', '..' );
const IS_DEVELOPMENT_MODE = process.argv.includes( '--dev' );
if ( IS_DEVELOPMENT_MODE ) {
console.log( '🛠️️ ' + chalk.yellow( 'Development mode is active.' ) );
} else {
console.log( '⚠️ ' + chalk.magenta( 'Production mode is active. Use --dev to build in the development mode.' ) );
}
// -------------------------------------------------------------
// ------------------------------------------- Base DLL build --
console.log( '\n📍 ' + chalk.cyan.underline( 'Creating the base DLL build...\n' ) );
const webpackArguments = [ '--config=./scripts/dll/webpack.config.dll.js' ];
if ( IS_DEVELOPMENT_MODE ) {
webpackArguments.push( '--dev' );
}
childProcess.spawnSync( 'webpack', webpackArguments, {
encoding: 'utf8',
shell: true,
cwd: ROOT_DIRECTORY,
stdio: 'inherit',
stderr: 'inherit'
} );
// -------------------------------------------------------------
// ---------------------------- DLL-compatible package builds --
console.log( '\n📍 ' + chalk.underline( 'Creating DLL-compatible package builds...\n' ) );
const nodeArguments = [ './scripts/dll/build-packages-dlls.js' ];
if ( IS_DEVELOPMENT_MODE ) {
nodeArguments.push( '--dev' );
}
childProcess.spawnSync( 'node', nodeArguments, {
encoding: 'utf8',
shell: true,
cwd: ROOT_DIRECTORY,
stdio: 'inherit',
stderr: 'inherit'
} );
|
package info.u250.c2d.box2d;
import info.u250.c2d.box2d.model.b2BodyDefModel;
import info.u250.c2d.box2d.model.b2FixtureDefModel;
import info.u250.c2d.box2d.model.b2Scene;
import info.u250.c2d.box2d.model.fixture.b2CircleFixtureDefModel;
import info.u250.c2d.box2d.model.fixture.b2PolygonFixtureDefModel;
import info.u250.c2d.box2d.model.fixture.b2RectangleFixtureDefModel;
import info.u250.c2d.box2d.model.joint.b2DistanceJointDefModel;
import info.u250.c2d.box2d.model.joint.b2FrictionJointDefModel;
import info.u250.c2d.box2d.model.joint.b2PrismaticJointDefModel;
import info.u250.c2d.box2d.model.joint.b2PulleyJointDefModel;
import info.u250.c2d.box2d.model.joint.b2RevoluteJointDefModel;
import info.u250.c2d.box2d.model.joint.b2RopeJointDefModel;
import info.u250.c2d.box2d.model.joint.b2WeldJointDefModel;
import info.u250.c2d.box2d.model.joint.b2WheelJointDefModel;
import java.io.IOException;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.utils.XmlReader;
import com.badlogic.gdx.utils.XmlReader.Element;
/**
* @author xjjdog
*/
public class IOXml {
public static b2Scene parse(FileHandle file) throws IOException {
return parse(new XmlReader().parse(file));
}
private static b2Scene parse(Element root) {
b2Scene model = new b2Scene();
Element eleFixtures = root.getChildByName("fixtures");
for (int i = 0; i < eleFixtures.getChildCount(); i++) {
Element eleFixture = eleFixtures.getChild(i);
String fixtureType = eleFixture.getName();
if (fixtureType.equals("polygons")) {
b2PolygonFixtureDefModel fixture = new b2PolygonFixtureDefModel();
fixture.friction = Float.parseFloat(eleFixture.getAttribute("friction"));
fixture.restitution = Float.parseFloat(eleFixture.getAttribute("restitution"));
fixture.density = Float.parseFloat(eleFixture.getAttribute("density"));
fixture.isSensor = Boolean.parseBoolean(eleFixture.getAttribute("isSensor"));
fixture.categoryBits = Short.parseShort(eleFixture.getAttribute("categoryBits"));
fixture.maskBits = Short.parseShort(eleFixture.getAttribute("maskBits"));
fixture.groupIndex = Short.parseShort(eleFixture.getAttribute("groupIndex"));
fixture.name = eleFixture.getAttribute("name");
fixture.mark = eleFixture.getAttribute("mark");
/*
<polygon>
<vertex>{-16.0,3.0000114}</vertex>
<vertex>{70.000015,16.000006}</vertex>
<vertex>{80.0,77.0}</vertex>
</polygon>
*/
for (int j = 0; j < eleFixture.getChildCount(); j++) {
Element elePolygon = eleFixture.getChild(j);
int size = elePolygon.getChildCount();
Vector2[] vertexs = new Vector2[size];
for (int k = 0; k < size; k++) {
vertexs[k] = str2Vector(elePolygon.getChild(k).getText());
}
fixture.vertices.add(vertexs);
}
model.fixtureDefModels.add(fixture);
} else if (fixtureType.equals("box")) {
b2RectangleFixtureDefModel fixture = new b2RectangleFixtureDefModel();
fixture.friction = Float.parseFloat(eleFixture.getAttribute("friction"));
fixture.restitution = Float.parseFloat(eleFixture.getAttribute("restitution"));
fixture.density = Float.parseFloat(eleFixture.getAttribute("density"));
fixture.isSensor = Boolean.parseBoolean(eleFixture.getAttribute("isSensor"));
fixture.categoryBits = Short.parseShort(eleFixture.getAttribute("categoryBits"));
fixture.maskBits = Short.parseShort(eleFixture.getAttribute("maskBits"));
fixture.groupIndex = Short.parseShort(eleFixture.getAttribute("groupIndex"));
fixture.name = eleFixture.getAttribute("name");
fixture.mark = eleFixture.getAttribute("mark");
fixture.width = Float.parseFloat(eleFixture.getAttribute("width"));
fixture.height = Float.parseFloat(eleFixture.getAttribute("height"));
model.fixtureDefModels.add(fixture);
} else if (fixtureType.equals("circle")) {
b2CircleFixtureDefModel fixture = new b2CircleFixtureDefModel();
fixture.friction = Float.parseFloat(eleFixture.getAttribute("friction"));
fixture.restitution = Float.parseFloat(eleFixture.getAttribute("restitution"));
fixture.density = Float.parseFloat(eleFixture.getAttribute("density"));
fixture.isSensor = Boolean.parseBoolean(eleFixture.getAttribute("isSensor"));
fixture.categoryBits = Short.parseShort(eleFixture.getAttribute("categoryBits"));
fixture.maskBits = Short.parseShort(eleFixture.getAttribute("maskBits"));
fixture.groupIndex = Short.parseShort(eleFixture.getAttribute("groupIndex"));
fixture.name = eleFixture.getAttribute("name");
fixture.mark = eleFixture.getAttribute("mark");
fixture.radius = Float.parseFloat(eleFixture.getAttribute("radius"));
model.fixtureDefModels.add(fixture);
}
}
Element eleBodys = root.getChildByName("bodys");
for (int i = 0; i < eleBodys.getChildCount(); i++) {
Element eleBody = eleBodys.getChild(i);
b2BodyDefModel body = new b2BodyDefModel();
String bType = eleBody.getAttribute("type");
body.name = eleBody.getAttribute("name");
body.type = bType.equals("StaticBody") ? 0 : (bType.equals("KinematicBody") ? 1 : 2);
body.position.set(str2Vector(eleBody.getAttribute("position")));
body.degrees = Float.parseFloat(eleBody.getAttribute("degrees"));
body.linearVelocity.set(str2Vector(eleBody.getAttribute("linearVelocity")));
body.angularVelocity = Float.parseFloat(eleBody.getAttribute("angularVelocity"));
body.linearDamping = Float.parseFloat(eleBody.getAttribute("linearDamping"));
body.angularDamping = Float.parseFloat(eleBody.getAttribute("angularDamping"));
body.allowSleep = Boolean.parseBoolean(eleBody.getAttribute("allowSleep"));
body.awake = Boolean.parseBoolean(eleBody.getAttribute("awake"));
body.fixedRotation = Boolean.parseBoolean(eleBody.getAttribute("fixedRotation"));
body.bullet = Boolean.parseBoolean(eleBody.getAttribute("bullet"));
body.active = Boolean.parseBoolean(eleBody.getAttribute("active"));
body.gravityScale = Float.parseFloat(eleBody.getAttribute("gravityScale"));
body.drawableOffsetX = Float.parseFloat(eleBody.getAttribute("drawableOffsetX"));
body.drawableOffsetY = Float.parseFloat(eleBody.getAttribute("drawableOffsetY"));
body.drawableWidth = Float.parseFloat(eleBody.getAttribute("drawableWidth"));
body.drawableHeight = Float.parseFloat(eleBody.getAttribute("drawableHeight"));
body.mark = eleBody.getAttribute("mark");
for (int j = 0; j < eleBody.getChildCount(); j++) {
b2FixtureDefModel ref = refFixture(model, eleBody.getChild(j).getAttribute("ref"));
if (null != ref) body.fixtures.add(ref);
}
model.bodyDefModels.add(body);
}
Element eleJoints = root.getChildByName("joints");
for (int i = 0; i < eleJoints.getChildCount(); i++) {
Element eleJoint = eleJoints.getChild(i);
String jointType = eleJoint.getAttribute("type");
if (jointType.equals("DistanceJoint")) {
b2DistanceJointDefModel def = new b2DistanceJointDefModel();
def.collideConnected = Boolean.parseBoolean(eleJoint.getAttribute("collideConnected"));
def.bodyA = refBody(model, eleJoint.getAttribute("bodyA"));
def.bodyB = refBody(model, eleJoint.getAttribute("bodyB"));
def.mark = eleJoint.get("mark");
def.name = eleJoint.get("name");
def.length = Float.parseFloat(eleJoint.getAttribute("length"));
def.frequencyHz = Float.parseFloat(eleJoint.getAttribute("frequencyHz"));
def.dampingRatio = Float.parseFloat(eleJoint.getAttribute("dampingRatio"));
def.localAnchorA.set(str2Vector(eleJoint.getAttribute("localAnchorA")));
def.localAnchorB.set(str2Vector(eleJoint.getAttribute("localAnchorB")));
model.jointDefModels.add(def);
} else if (jointType.equals("FrictionJoint")) {
b2FrictionJointDefModel def = new b2FrictionJointDefModel();
def.collideConnected = Boolean.parseBoolean(eleJoint.getAttribute("collideConnected"));
def.bodyA = refBody(model, eleJoint.getAttribute("bodyA"));
def.bodyB = refBody(model, eleJoint.getAttribute("bodyB"));
def.mark = eleJoint.get("mark");
def.name = eleJoint.get("name");
def.maxForce = Float.parseFloat(eleJoint.getAttribute("maxForce"));
def.maxTorque = Float.parseFloat(eleJoint.getAttribute("maxTorque"));
def.localAnchorA.set(str2Vector(eleJoint.getAttribute("localAnchorA")));
def.localAnchorB.set(str2Vector(eleJoint.getAttribute("localAnchorB")));
model.jointDefModels.add(def);
} else if (jointType.equals("PrismaticJoint")) {
b2PrismaticJointDefModel def = new b2PrismaticJointDefModel();
def.collideConnected = Boolean.parseBoolean(eleJoint.getAttribute("collideConnected"));
def.bodyA = refBody(model, eleJoint.getAttribute("bodyA"));
def.bodyB = refBody(model, eleJoint.getAttribute("bodyB"));
def.mark = eleJoint.get("mark");
def.name = eleJoint.get("name");
def.referenceDegrees = Float.parseFloat(eleJoint.getAttribute("referenceDegrees"));
def.enableLimit = Boolean.parseBoolean(eleJoint.getAttribute("enableLimit"));
def.enableMotor = Boolean.parseBoolean(eleJoint.getAttribute("enableMotor"));
def.localAnchorA.set(str2Vector(eleJoint.getAttribute("localAnchorA")));
def.localAnchorB.set(str2Vector(eleJoint.getAttribute("localAnchorB")));
def.localAxisA.set(str2Vector(eleJoint.getAttribute("localAxisA")));
def.motorSpeed = Float.parseFloat(eleJoint.getAttribute("motorSpeed"));
def.maxMotorForce = Float.parseFloat(eleJoint.getAttribute("maxMotorForce"));
def.upperTranslation = Float.parseFloat(eleJoint.getAttribute("upperTranslation"));
def.lowerTranslation = Float.parseFloat(eleJoint.getAttribute("lowerTranslation"));
model.jointDefModels.add(def);
} else if (jointType.equals("RevoluteJoint")) {
b2RevoluteJointDefModel def = new b2RevoluteJointDefModel();
def.collideConnected = Boolean.parseBoolean(eleJoint.getAttribute("collideConnected"));
def.bodyA = refBody(model, eleJoint.getAttribute("bodyA"));
def.bodyB = refBody(model, eleJoint.getAttribute("bodyB"));
def.mark = eleJoint.get("mark");
def.name = eleJoint.get("name");
def.referenceDegrees = Float.parseFloat(eleJoint.getAttribute("referenceDegrees"));
def.enableLimit = Boolean.parseBoolean(eleJoint.getAttribute("enableLimit"));
def.enableMotor = Boolean.parseBoolean(eleJoint.getAttribute("enableMotor"));
def.localAnchorA.set(str2Vector(eleJoint.getAttribute("localAnchorA")));
def.localAnchorB.set(str2Vector(eleJoint.getAttribute("localAnchorB")));
def.upperDegrees = Float.parseFloat(eleJoint.getAttribute("upperDegrees"));
def.lowerDegrees = Float.parseFloat(eleJoint.getAttribute("lowerDegrees"));
def.motorSpeed = Float.parseFloat(eleJoint.getAttribute("motorSpeed"));
def.maxMotorTorque = Float.parseFloat(eleJoint.getAttribute("maxMotorTorque"));
model.jointDefModels.add(def);
} else if (jointType.equals("RopeJoint")) {
b2RopeJointDefModel def = new b2RopeJointDefModel();
def.collideConnected = Boolean.parseBoolean(eleJoint.getAttribute("collideConnected"));
def.bodyA = refBody(model, eleJoint.getAttribute("bodyA"));
def.bodyB = refBody(model, eleJoint.getAttribute("bodyB"));
def.mark = eleJoint.get("mark");
def.name = eleJoint.get("name");
def.localAnchorA.set(str2Vector(eleJoint.getAttribute("localAnchorA")));
def.localAnchorB.set(str2Vector(eleJoint.getAttribute("localAnchorB")));
def.maxLength = Float.parseFloat(eleJoint.getAttribute("maxLength"));
model.jointDefModels.add(def);
} else if (jointType.equals("WeldJoint")) {
b2WeldJointDefModel def = new b2WeldJointDefModel();
def.collideConnected = Boolean.parseBoolean(eleJoint.getAttribute("collideConnected"));
def.bodyA = refBody(model, eleJoint.getAttribute("bodyA"));
def.bodyB = refBody(model, eleJoint.getAttribute("bodyB"));
def.mark = eleJoint.get("mark");
def.name = eleJoint.get("name");
def.localAnchorA.set(str2Vector(eleJoint.getAttribute("localAnchorA")));
def.localAnchorB.set(str2Vector(eleJoint.getAttribute("localAnchorB")));
def.referenceDegrees = Float.parseFloat(eleJoint.getAttribute("referenceDegrees"));
model.jointDefModels.add(def);
} else if (jointType.equals("WheelJoint")) {
b2WheelJointDefModel def = new b2WheelJointDefModel();
def.collideConnected = Boolean.parseBoolean(eleJoint.getAttribute("collideConnected"));
def.bodyA = refBody(model, eleJoint.getAttribute("bodyA"));
def.bodyB = refBody(model, eleJoint.getAttribute("bodyB"));
def.mark = eleJoint.get("mark");
def.name = eleJoint.get("name");
def.localAnchorA.set(str2Vector(eleJoint.getAttribute("localAnchorA")));
def.localAnchorB.set(str2Vector(eleJoint.getAttribute("localAnchorB")));
def.localAxisA.set(str2Vector(eleJoint.getAttribute("localAxisA")));
def.maxMotorTorque = Float.parseFloat(eleJoint.getAttribute("maxMotorTorque"));
def.motorSpeed = Float.parseFloat(eleJoint.getAttribute("motorSpeed"));
def.frequencyHz = Float.parseFloat(eleJoint.getAttribute("frequencyHz"));
def.dampingRatio = Float.parseFloat(eleJoint.getAttribute("dampingRatio"));
def.enableMotor = Boolean.parseBoolean(eleJoint.getAttribute("enableMotor"));
model.jointDefModels.add(def);
} else if (jointType.equals("PulleyJoint")) {
b2PulleyJointDefModel def = new b2PulleyJointDefModel();
def.collideConnected = Boolean.parseBoolean(eleJoint.getAttribute("collideConnected"));
def.bodyA = refBody(model, eleJoint.getAttribute("bodyA"));
def.bodyB = refBody(model, eleJoint.getAttribute("bodyB"));
def.mark = eleJoint.get("mark");
def.name = eleJoint.get("name");
def.localAnchorA.set(str2Vector(eleJoint.getAttribute("localAnchorA")));
def.localAnchorB.set(str2Vector(eleJoint.getAttribute("localAnchorB")));
def.groundAnchorA.set(str2Vector(eleJoint.getAttribute("groundAnchorA")));
def.groundAnchorB.set(str2Vector(eleJoint.getAttribute("groundAnchorB")));
def.lengthA = Float.parseFloat(eleJoint.getAttribute("lengthA"));
def.lengthB = Float.parseFloat(eleJoint.getAttribute("lengthB"));
def.ratio = Float.parseFloat(eleJoint.getAttribute("ratio"));
model.jointDefModels.add(def);
} else if (jointType.equals("")) {
//TODO: gear
}
}
return model;
}
static Vector2 str2Vector(String str) {
String[] values = str.replace("{", "").replace("}", "").split(",");
return new Vector2(Float.parseFloat(values[0]), Float.parseFloat(values[1]));
}
/**
* get the reference fixture from the fixture list.
*/
static b2FixtureDefModel refFixture(b2Scene model, String ref) {
for (b2FixtureDefModel fixture : model.fixtureDefModels) {
if (fixture.name.equals(ref)) {
return fixture;
}
}
return null;
}
static b2BodyDefModel refBody(b2Scene model, String ref) {
for (b2BodyDefModel body : model.bodyDefModels) {
if (body.name.equals(ref)) {
return body;
}
}
return null;
}
}
|
#!/bin/bash
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
set -ex #echo on
# This avoids requiring an internet connection for CanvasKit at runtime.
# This URL should be updated to keep in sync with the version from the engine.
# See https://github.com/flutter/engine/blob/master/lib/web_ui/lib/src/engine/canvaskit/initialization.dart#L50-L78,
# but compare with the code in master for getting the current version.
# A better solution would be to either upstream this functionality into the flutter_tools,
# (https://github.com/flutter/flutter/issues/74936), or to read this from a manifest
# provided (https://github.com/flutter/flutter/issues/74934).
function download_canvaskit() {
local canvaskit_url=https://unpkg.com/canvaskit-wasm@0.28.1/bin/
local flutter_bin=$(which flutter)
local canvaskit_dart_file=$(dirname $flutter_bin)/cache/flutter_web_sdk/lib/_engine/engine/canvaskit/initialization.dart
if ! grep -q "defaultValue: '$canvaskit_url'" "$canvaskit_dart_file"; then
echo "CanvasKit $canvaskit_url does not match local web engine copy. Please update before continuing."
exit -1
fi
mkdir -p build/web/assets/canvaskit/profiling
curl $canvaskit_url/canvaskit.js -o build/web/assets/canvaskit/canvaskit.js
curl $canvaskit_url/canvaskit.wasm -o build/web/assets/canvaskit/canvaskit.wasm
curl $canvaskit_url/profiling/canvaskit.js -o build/web/assets/canvaskit/profiling/canvaskit.js
curl $canvaskit_url/profiling/canvaskit.wasm -o build/web/assets/canvaskit/profiling/canvaskit.wasm
}
pushd packages/devtools_app
rm -rf build
rm -rf ../devtools/build
download_canvaskit
flutter pub get
# Build a profile build rather than a release build to avoid minification
# as code size doesn't matter very much for us as minification makes some
# crashes harder to debug. For example, https://github.com/flutter/devtools/issues/2125
flutter build web \
--pwa-strategy=none \
--profile \
--dart-define=FLUTTER_WEB_USE_EXPERIMENTAL_CANVAS_TEXT=true \
--dart-define=FLUTTER_WEB_CANVASKIT_URL=assets/canvaskit/ \
--no-tree-shake-icons
cp build/web/main.dart.js build/web/main_fallback.dart.js
flutter build web \
--pwa-strategy=none \
--profile \
--dart-define=FLUTTER_WEB_USE_SKIA=true \
--dart-define=FLUTTER_WEB_CANVASKIT_URL=assets/canvaskit/ \
--no-tree-shake-icons
mv build/web ../devtools/build
popd
pushd packages/devtools
flutter pub get
popd
|
<filename>tests/integration/test_generator.py
import datetime
import json
import os
from pathlib import Path
import pytest
import toml
from hooks import pre_gen_project
def get_cookiecutter_defaults():
root = Path(__file__).resolve().parent.parent.parent
with open(os.path.join(root, "cookiecutter.json"), "r") as cookie_cutter_file:
return json.load(cookie_cutter_file)
def run_generated_project_assertions(generated_project, **kwargs):
# These are the default values if no kwargs are given
cookie_cutter_file = get_cookiecutter_defaults()
project_name = cookie_cutter_file["project_name"]
project_description = cookie_cutter_file["project_description"]
author_full_name = cookie_cutter_file["author_full_name"]
author_email = cookie_cutter_file["author_email"]
github_organization = cookie_cutter_file["github_organization"]
project_repo = f"https://github.com/{github_organization}/{pre_gen_project.get_project_name_kebab_case(project_name)}"
license = cookie_cutter_file["license"][0]
# Replace these variables if an override is given
if "project_name" in kwargs:
project_name = kwargs["project_name"]
project_name_snake_case = pre_gen_project.get_project_name_snake_case(project_name)
project_name_kebab_case = pre_gen_project.get_project_name_kebab_case(project_name)
if "project_description" in kwargs:
project_description = kwargs["project_description"]
if "author_full_name" in kwargs:
author_full_name = kwargs["author_full_name"]
if "author_email" in kwargs:
author_email = kwargs["author_email"]
if "github_organization" in kwargs:
github_organization = kwargs["github_organization"]
if "project_repo" in kwargs:
project_repo = kwargs["project_repo"]
if "license" in kwargs:
license = kwargs["license"]
# Make sure correct context was passed to cookiecutter
assert generated_project.context["project_name"] == project_name
assert (
generated_project.context["project_name_snake_case"] == project_name_snake_case
)
assert (
generated_project.context["project_name_kebab_case"] == project_name_kebab_case
)
assert generated_project.context["project_description"] == project_description
assert generated_project.context["author_full_name"] == author_full_name
assert generated_project.context["author_email"] == author_email
assert generated_project.context["github_organization"] == github_organization
assert generated_project.context["project_repo"] == project_repo
assert generated_project.context["license"] == license
# This is so when new variables are added/removed we know to add tests for them :)
assert len(generated_project.context) == 9
# make sure the project was generated correctly
assert generated_project.exit_code == 0
assert generated_project.exception is None
assert generated_project.project_path.name == project_name_kebab_case
assert generated_project.project_path.is_dir()
project_path = generated_project.project_path
# Check for top level files/folders to be present
toplevel_files = os.listdir(project_path)
assert ".flake8" in toplevel_files
if license != "Not open source":
assert "LICENSE" in toplevel_files
else:
assert "LICENSE" not in toplevel_files
assert "requirements.txt" in toplevel_files
assert "CHANGELOG.md" in toplevel_files
assert "Makefile" in toplevel_files
assert "pyproject.toml" in toplevel_files
assert "tests" in toplevel_files
assert "docs" in toplevel_files
assert ".editorconfig" in toplevel_files
assert "README.md" in toplevel_files
assert "setup.py" in toplevel_files
assert ".gitignore" in toplevel_files
assert "scripts" in toplevel_files
assert ".github" in toplevel_files
assert ".git" in toplevel_files
assert ".vscode" in toplevel_files
assert project_name_snake_case in toplevel_files
# Check files in source project files
src_files = os.listdir(os.path.join(project_path, project_name_snake_case))
assert "cli" in src_files
assert "__init__.py" in src_files
assert "__main__.py" in src_files
assert f"{project_name_snake_case}.py" in src_files
# Check files in test folder
top_level_test_files = os.listdir(os.path.join(project_path, "tests"))
assert "__init__.py" in top_level_test_files
assert "unit" in top_level_test_files
assert "integration" in top_level_test_files
unit_test_files = os.listdir(os.path.join(project_path, "tests", "unit"))
assert "__init__.py" in unit_test_files
assert f"test_{project_name_snake_case}.py" in unit_test_files
integration_test_files = os.listdir(
os.path.join(project_path, "tests", "integration")
)
assert "__init__.py" in integration_test_files
assert f"test_{project_name_snake_case}.py" in integration_test_files
# Check readme
with open(os.path.join(project_path, "README.md"), "r") as readme_file:
readme_content = readme_file.read()
assert project_name in readme_content
assert project_description in readme_content
assert f"{project_repo}/actions/workflows/tests.yml/badge.svg" in readme_content
# Project.toml file assertions
with open(os.path.join(project_path, "pyproject.toml"), "r") as pyproj_file:
project_metadata = toml.load(pyproj_file)
assert project_metadata["project"]["name"] == project_name_snake_case.replace(
"_", "-"
)
assert project_metadata["project"]["description"] == project_description
assert project_metadata["project"]["authors"][0]["name"] == author_full_name
assert project_metadata["project"]["authors"][0]["email"] == author_email
assert project_metadata["project"]["maintainers"][0]["name"] == author_full_name
assert project_metadata["project"]["maintainers"][0]["email"] == author_email
assert project_metadata["project"]["urls"]["homepage"] == project_repo
assert (
project_metadata["project"]["urls"]["changelog"]
== f"{project_repo}/blob/master/CHANGELOG.md"
)
assert (
project_metadata["project"]["urls"]["documentation"]
== f"{project_repo}/blob/master/README.md"
)
if license == "Not open source":
assert "license" not in project_metadata["project"]
else:
assert project_metadata["project"]["license"]["file"] == "LICENSE"
# TODO: Move this map as a private variable to cookiecutter.json when this
# https://github.com/cookiecutter/cookiecutter/issues/1582 is resolved
pypi_license_map = {
"MIT License": "License :: OSI Approved :: MIT License",
"BSD 2-Clause License": "License :: OSI Approved :: BSD License",
"BSD 3-Clause License": "License :: OSI Approved :: BSD License",
"ISC License": "License :: OSI Approved :: ISC License (ISCL)",
"Apache License Version 2.0": "License :: OSI Approved :: Apache Software License",
"GNU General Public License Version 3": "License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Unlicense": "License :: OSI Approved :: The Unlicense (Unlicense)",
"Not open source": "",
}
if license != "Not open source":
assert (
pypi_license_map[license] in project_metadata["project"]["classifiers"]
)
else:
for _, value in pypi_license_map.items():
# None of these items should be in the array
assert value not in project_metadata["project"]["classifiers"]
assert project_name_kebab_case in project_metadata["project"]["entry-points"]
assert (
project_metadata["project"]["entry-points"][project_name_kebab_case]
== f"{project_name_snake_case}.cli.entrypoint:main"
)
# License file assertions
if license != "Not open source":
with open(os.path.join(project_path, "LICENSE"), "r") as license_file:
license_content = license_file.read()
# first line must be the license name
assert license_content.splitlines()[0].strip().lower() == license.lower()
# Make sure the correct year is added to the license file
if license != "Unlicense":
assert str(datetime.datetime.now().year) in license_content
assert author_full_name in license_content
def test_bake_project_with_defaults_should_succeed(cookies):
generated_project = cookies.bake()
run_generated_project_assertions(generated_project)
def test_bake_project_with_custom_project_name_should_succeed(cookies):
project_name = "s0me Kool2_proJect"
generated_project = cookies.bake(extra_context={"project_name": project_name})
run_generated_project_assertions(
generated_project,
project_name=project_name,
project_repo="https://github.com/janeDoe/s0me-kool2-project",
)
@pytest.mark.parametrize(
"project_name",
["my&project", "$$project", "[sdf]23", "1234", "1sdf", "cool^project"],
)
def test_bake_with_not_valid_project_name_should_fail(project_name, cookies):
generated_project = cookies.bake(
extra_context={
"project_name": project_name,
}
)
assert generated_project.exception is not None
assert generated_project.exit_code == -1
# TODO: Delete this after migration to cookiecutter 2.0
def test_bake_project_with_custom_snake_case_name_should_fail(cookies):
generated_project = cookies.bake(
extra_context={
"project_name": "my project",
"project_name_snake_case": "my_project_2",
}
)
assert generated_project.exception is not None
assert generated_project.exit_code == -1
# TODO: Delete this after migration to cookiecutter 2.0
def test_bake_project_with_custom_kebab_case_name_should_fail(cookies):
generated_project = cookies.bake(
extra_context={
"project_name": "my project",
"project_name_kebab_case": "my-cool-project",
}
)
assert generated_project.exception is not None
assert generated_project.exit_code == -1
def test_bake_with_custom_metadata_should_succeed(cookies):
input_data = {
"project_name": "advanced calculator",
"project_description": "This calculator can be used in a rocket ship",
"author_full_name": "<NAME>",
"author_email": "<EMAIL>",
"github_organization": "theDarkKight",
"project_repo": "https://onprem.com/wayneenterprises/bruce-advance-calc",
}
generated_project = cookies.bake(extra_context=input_data)
run_generated_project_assertions(generated_project, **input_data)
def test_bake_with_auto_generated_project_repo_should_succeed(cookies):
project_name = "Mission controller"
github_organization = "nasa"
generated_project = cookies.bake(
extra_context={
"project_name": project_name,
"github_organization": github_organization,
}
)
run_generated_project_assertions(
generated_project,
project_name=project_name,
github_organization=github_organization,
project_repo="https://github.com/nasa/mission-controller",
)
@pytest.mark.parametrize("license", get_cookiecutter_defaults()["license"])
def test_bake_with_different_licesens_should_succeed(license, cookies):
generated_project = cookies.bake(extra_context={"license": license})
run_generated_project_assertions(generated_project, license=license)
|
import { OssService } from '@/common/services/oss';
import {
Body,
Controller,
Post,
UploadedFile,
UseInterceptors
} from '@nestjs/common';
import { FileInterceptor } from '@nestjs/platform-express';
import dayjs from 'dayjs';
import nanoid from 'nanoid/generate';
import path from 'path';
const dict = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ';
@Controller('share')
export class ShareController {
constructor(private readonly ossService: OssService) {}
@Post('upload')
@UseInterceptors(FileInterceptor('file'))
async upload(@UploadedFile() file, @Body('folderName') folderName: string): Promise<string> {
const name = nanoid(dict, 12);
const ext = path.extname(file.originalname);
const filename = `${dayjs().format('YYYYMMDDHHmmssms')}-${name + ext}`;
return this.ossService.put(filename, file.buffer, folderName + '/');
}
}
|
<reponame>obino/midonet
/*
* Copyright 2017 Midokura SARL
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.midonet.cluster.data.storage.cached
import java.util
import scala.concurrent.duration._
import org.junit.runner.RunWith
import org.scalatest.{BeforeAndAfter, FeatureSpec, Matchers}
import org.scalatest.junit.JUnitRunner
import rx.observers.TestObserver
import org.midonet.cluster.cache.ObjectNotification.{MappedSnapshot => ObjSnapshot}
import org.midonet.cluster.data.storage.{InMemoryStorage, Storage}
import org.midonet.cluster.models.Topology.{Port, Router}
import org.midonet.cluster.util.ClassAwaitableObserver
import org.midonet.cluster.util.UUIDUtil.randomUuidProto
import org.midonet.util.concurrent._
import org.midonet.util.reactivex.{AssertableObserver, AwaitableObserver}
@RunWith(classOf[JUnitRunner])
class StorageWrapperTest extends FeatureSpec with Matchers with BeforeAndAfter {
private val cacheTtl = 2000
private val router1Id = randomUuidProto
private val router1 = Router.newBuilder().setId(router1Id).build()
private val port1Id = randomUuidProto
private val port1 = Port.newBuilder().setId(port1Id).build()
private val objSnapshot = {
val objSnapshot = new ObjSnapshot()
val routerEntry = new util.HashMap[AnyRef, AnyRef]()
routerEntry.put(router1Id, router1)
objSnapshot.put(classOf[Router], routerEntry)
val portEntry = new util.HashMap[AnyRef, AnyRef]()
portEntry.put(port1Id, port1)
objSnapshot.put(classOf[Port], portEntry)
objSnapshot
}
private var store: Storage = _
private var wrapper: Storage = _
private def makeObservable[T]() =
new TestObserver[T] with AwaitableObserver[T] with AssertableObserver[T] {
override def assert() = {}
}
before {
store = new InMemoryStorage()
store.registerClass(classOf[Router])
store.registerClass(classOf[Port])
store.build()
wrapper = new StorageWrapper(cacheTtl, store, objSnapshot)
}
feature("Query cached objects in the wrapper") {
scenario("Cached exists") {
wrapper.exists(classOf[Router], router1Id).await() shouldBe true
wrapper.exists(classOf[Port], port1Id).await() shouldBe true
}
scenario("Cached exists for non-existent ids") {
wrapper.exists(classOf[Router], randomUuidProto).await() shouldBe false
}
scenario("Invalidated cache for exists") {
wrapper.asInstanceOf[StorageWrapper].invalidateCache()
wrapper.exists(classOf[Port], port1Id).await() shouldBe false
}
scenario("Get a cached object from the wrapper") {
val cached = wrapper.get(classOf[Port], port1Id).await()
cached shouldBe port1
}
scenario("Get all cached objects from the wrapper") {
val cached = wrapper.getAll(classOf[Router]).await()
cached.size shouldBe 1
cached.head shouldBe router1
}
scenario("Get all cached objects by id from the wrapper") {
val cached = wrapper.getAll(classOf[Router], Seq(router1Id)).await()
cached.size shouldBe 1
cached.head shouldBe router1
}
scenario("Observe on a class") {
val classObs = new ClassAwaitableObserver[Port](1)
val obs = makeObservable[Port]()
wrapper.observable(classOf[Port]).subscribe(classObs)
classObs.await(5 seconds, 0)
classObs.getOnNextEvents().get(0).subscribe(obs)
obs.awaitCompletion(5 seconds)
val ports = obs.getOnNextEvents()
ports.size() shouldBe 1
ports.get(0) shouldBe port1
}
scenario("Observe on a particular object") {
val obs = makeObservable[Port]()
wrapper.observable(classOf[Port], port1Id).subscribe(obs)
obs.awaitCompletion(5 seconds)
val ports = obs.getOnNextEvents()
ports.size() shouldBe 1
ports.get(0) shouldBe port1
}
}
feature("Create, delete or update objects") {
scenario("Only read operations are supported on the cache") {
a [NotImplementedError] shouldBe thrownBy {
wrapper.create(Port.newBuilder().build())
}
a [NotImplementedError] shouldBe thrownBy {
wrapper.delete(classOf[Port], port1Id)
}
a [NotImplementedError] shouldBe thrownBy {
wrapper.update(Port.newBuilder().build())
}
}
scenario("After invalidation, write operations are supported") {
wrapper.asInstanceOf[StorageWrapper].invalidateCache()
val port = Port.newBuilder().setId(randomUuidProto).build()
wrapper.create(port)
wrapper.update(port.toBuilder.setInterfaceName("test").build())
wrapper.delete(classOf[Port], port.getId)
}
}
}
|
// cotire example project main
#include <string>
#include "example.h"
#include "log.h"
int main()
{
std::string msg = example::get_message();
logging::info(msg);
}
|
#!/bin/bash -e
IMAGE_NAME=$3
BITBUCKET_COMMIT_SHORT="${BITBUCKET_COMMIT::7}"
FULL_IMAGE_NAME=${IMAGE_NAME}:${BITBUCKET_COMMIT_SHORT}
docker login -u $DOCKER_USER -p "$DOCKER_PASS" $CONTAINER_REPOSITORY
docker build --force-rm -t "$FULL_IMAGE_NAME" -f $1 $2 --build-arg PIP_PACKAGES=pip-packages
docker tag "$FULL_IMAGE_NAME" $IMAGE_NAME:$BITBUCKET_BRANCH
docker tag "$FULL_IMAGE_NAME" $IMAGE_NAME:$BITBUCKET_BRANCH-$BITBUCKET_BUILD_NUMBER-$BITBUCKET_COMMIT_SHORT
docker push "$FULL_IMAGE_NAME"
|
/*
* Copyright 2021 CloudWeGo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package main
import (
"time"
"github.com/cloudwego/netpoll"
)
func main() {
network, address, timeout := "tcp", "127.0.0.1:8080", 50*time.Millisecond
// use default
conn, _ := netpoll.DialConnection(network, address, timeout)
conn.Close()
// use dialer
dialer := netpoll.NewDialer()
conn, _ = dialer.DialConnection(network, address, timeout)
// write & send message
writer := conn.Writer()
writer.WriteString("hello world")
writer.Flush()
}
|
<filename>templates/%clib%/src/common/memord.h
/***********************************************************************
* ©2013-2016 <NAME>.
* Distributed under the simplified BSD license (see the license file that
* should have come with this header).
* Uses Jeff Preshing's semaphore implementation (under the terms of its
* separate zlib license, embedded below).
*
* Provides portable (VC++2010+, Intel ICC 13, GCC 4.7+, and anything C++11 compliant) implementation
* of low-level memory barriers, plus a few semi-portable utility macros (for inlining and alignment).
* Also has a basic atomic type (limited to hardware-supported atomics with no memory ordering guarantees).
* Uses the AE_* prefix for macros (historical reasons), and the "moodycamel" namespace for symbols.
**********************************************************************/
/**
* @filename memord.h
* Memory Order operations both for Windows and Linux.
* https://git.project-hobbit.eu/dj16/ricec/raw/c9d3dceb1c3b1c03a42077e0461e3ce5a2615a51/data/atomicops.h
* https://github.com/preshing/cpp11-on-multicore/blob/master/common/sema.h
* https://www.cnblogs.com/lizhanzhe/p/10893016.html
* @author <NAME>
* @version 1.0.0
* @create 2020-12-08 10:46:50
* @update 2021-07-20 13:12:10
*/
#ifndef _MEMORY_ORDER_H_
#define _MEMORY_ORDER_H_
#if defined(__cplusplus)
extern "C"
{
#endif
#include <assert.h>
/* Platform detection */
#if defined(__INTEL_COMPILER)
# define AE_ICC
#elif defined(_MSC_VER)
# define AE_VCPP
#elif defined(__GNUC__)
# define AE_GCC
#endif
#if defined(_M_IA64) || defined(__ia64__)
# define AE_ARCH_IA64
#elif defined(_WIN64) || defined(__amd64__) || defined(_M_X64) || defined(__x86_64__)
# define AE_ARCH_X64
#elif defined(_M_IX86) || defined(__i386__)
# define AE_ARCH_X86
#elif defined(_M_PPC) || defined(__powerpc__)
# define AE_ARCH_PPC
#else
# define AE_ARCH_UNKNOWN
#endif
#ifndef NOWARNING_UNUSED
# if defined(__GNUC__) || defined(__CYGWIN__)
# define NOWARNING_UNUSED(x) __attribute__((unused)) x
# else
# define NOWARNING_UNUSED(x) x
# endif
#endif
#ifndef STATIC_INLINE
# if defined(_MSC_VER)
# define STATIC_INLINE NOWARNING_UNUSED(static) __forceinline
# elif defined(__GNUC__) || defined(__CYGWIN__)
# define STATIC_INLINE NOWARNING_UNUSED(static) __attribute__((always_inline)) inline
# else
# define STATIC_INLINE NOWARNING_UNUSED(static)
# endif
#endif
/* AE_ALIGN */
#if defined(AE_VCPP) || defined(AE_ICC)
# define AE_ALIGN(x) __declspec(align(x))
#elif defined(AE_GCC)
# define AE_ALIGN(x) __attribute__((aligned(x)))
#else
/* Assume GCC compliant syntax... */
# define AE_ALIGN(x) __attribute__((aligned(x)))
#endif
/* Portable atomic fences implemented below */
typedef enum {
memory_order_relaxed,
memory_order_acquire,
memory_order_release,
memory_order_acq_rel,
memory_order_seq_cst,
/**
* memory_order_sync
* Forces a full sync:
* #LoadLoad, #LoadStore, #StoreStore, and most significantly, #StoreLoad
*/
memory_order_sync = memory_order_seq_cst
} memory_order;
#if defined(AE_VCPP) || defined(AE_ICC)
// VS2010 and ICC13 don't support std::atomic_*_fence, implement our own fences
# include <intrin.h>
#if defined(AE_ARCH_X64) || defined(AE_ARCH_X86)
# define AeFullSync _mm_mfence
# define AeLiteSync _mm_mfence
#elif defined(AE_ARCH_IA64)
# define AeFullSync __mf
# define AeLiteSync __mf
#elif defined(AE_ARCH_PPC)
# include <ppcintrinsics.h>
# define AeFullSync __sync
# define AeLiteSync __lwsync
#endif
#ifdef AE_VCPP
# pragma warning(push)
// Disable erroneous 'conversion from long to unsigned int, signed/unsigned mismatch' error when using `assert`
# pragma warning(disable: 4365)
# ifdef __cplusplus_cli
# pragma managed(push, off)
# endif
#endif
STATIC_INLINE void __mo_compiler_fence(memory_order order)
{
switch (order) {
case memory_order_relaxed:
break;
case memory_order_acquire:
_ReadBarrier();
break;
case memory_order_release:
_WriteBarrier();
break;
case memory_order_acq_rel:
_ReadWriteBarrier();
break;
case memory_order_seq_cst:
_ReadWriteBarrier();
break;
default:
assert(0);
}
}
/**
* x86/x64 have a strong memory model -- all loads and stores have acquire and
* release semantics automatically (so only need compiler barriers for those).
*/
#if defined(AE_ARCH_X86) || defined(AE_ARCH_X64)
STATIC_INLINE void __mo_fence(memory_order order)
{
switch (order) {
case memory_order_relaxed:
break;
case memory_order_acquire:
_ReadBarrier();
break;
case memory_order_release:
_WriteBarrier();
break;
case memory_order_acq_rel:
_ReadWriteBarrier();
break;
case memory_order_seq_cst:
_ReadWriteBarrier();
AeFullSync();
_ReadWriteBarrier();
break;
default:
assert(0);
break;
}
}
#else
STATIC_INLINE void __mo_fence(memory_order order)
{
// Non-specialized arch, use heavier memory barriers everywhere just in case :-(
switch (order) {
case memory_order_relaxed:
break;
case memory_order_acquire:
_ReadBarrier();
AeLiteSync();
_ReadBarrier();
break;
case memory_order_release:
_WriteBarrier();
AeLiteSync();
_WriteBarrier();
break;
case memory_order_acq_rel:
_ReadWriteBarrier();
AeLiteSync();
_ReadWriteBarrier();
break;
case memory_order_seq_cst:
_ReadWriteBarrier();
AeFullSync();
_ReadWriteBarrier();
break;
default:
assert(0);
break;
}
}
#endif
#elif defined(__cplusplus)
// Use standard library of atomics for cpp
# include <atomic>
STATIC_INLINE void __mo_compiler_fence(memory_order order)
{
switch (order) {
case memory_order_relaxed: break;
case memory_order_acquire: std::atomic_signal_fence(std::memory_order_acquire); break;
case memory_order_release: std::atomic_signal_fence(std::memory_order_release); break;
case memory_order_acq_rel: std::atomic_signal_fence(std::memory_order_acq_rel); break;
case memory_order_seq_cst: std::atomic_signal_fence(std::memory_order_seq_cst); break;
default: assert(0);
}
}
STATIC_INLINE void __mo_fence(memory_order order)
{
switch (order) {
case memory_order_relaxed: break;
case memory_order_acquire: std::atomic_thread_fence(std::memory_order_acquire); break;
case memory_order_release: std::atomic_thread_fence(std::memory_order_release); break;
case memory_order_acq_rel: std::atomic_thread_fence(std::memory_order_acq_rel); break;
case memory_order_seq_cst: std::atomic_thread_fence(std::memory_order_seq_cst); break;
default: assert(0);
}
}
#endif
#if defined(AE_VCPP) && (_MSC_VER < 1700 || defined(__cplusplus_cli))
# pragma warning(pop)
# ifdef __cplusplus_cli
# pragma managed(pop)
# endif
#endif
#ifdef __cplusplus
}
#endif
#endif /* _MEMORY_ORDER_H_ */
|
<filename>src/common/include/memManagement.h
/*==================================================================*\
| EXIP - Embeddable EXI Processor in C |
|--------------------------------------------------------------------|
| This work is licensed under BSD 3-Clause License |
| The full license terms and conditions are located in LICENSE.txt |
\===================================================================*/
/**
* @file memManagement.h
* @brief Handles memory operations - allocation, deallocation etc.
* Except Data Values (Binary data, DateTime etc.), which are
* freed after the respective ContentHandler is called, all other
* allocations are freed at the end of the parsing/serializing.
*
* @date Oct 25, 2010
* @author <NAME>
* @version 0.5
* @par[Revision] $Id$
*/
#ifndef MEMMANAGEMENT_H_
#define MEMMANAGEMENT_H_
#include "errorHandle.h"
#include "procTypes.h"
/**
* @brief Initial setup of an AllocList
*
* @param[in, out] list a memory list to be setup
* @return Error handling code
*/
errorCode initAllocList(AllocList* list);
/**
* @brief Allocate a memory block with size size and store a copy of
* the pointer in a linked list for freeing it at the end.
*
* @param[in, out] list A list storing the memory allocations
* @param[in] size the size of the memory block to be allocated
* @return pointer to the allocated memory if successful. NULL otherwise
*/
void* memManagedAllocate(AllocList* list, size_t size);
/**
* @brief Frees all the managed memory for a particular EXI stream.
* It should be called after an error in the processing occur or at the
* end of the parsing/serializing if the processing is successful.
* Frees the memory in the allocation list and the hashtables used.
*
* @param[in, out] strm EXI stream for which the allocations were made
*/
void freeAllMem(EXIStream* strm);
/**
* @brief Frees a particular Allocation list
*
* @param[in, out] list Allocation list to be freed
* @return Error handling code
*/
void freeAllocList(AllocList* list);
#endif /* MEMMANAGEMENT_H_ */
|
<filename>api/_bot/middleware/debug.js
import _ from 'lodash'
export default (ctx, next) => {
const from = _.get(ctx, 'from.username', 'user')
const update = _.get(ctx, 'message.text') || _.get(ctx, 'updateSubTypes[0]')
console.debug(`> ${from}: ${update}`)
return next()
}
|
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.cbean.result.grouping;
/**
* The determiner of grouping list.
* @param <ENTITY> The type of entity.
* @author jflute
*/
public interface GroupingListDeterminer<ENTITY> {
/**
* Determine whether the grouped row is break.
* @param rowResource The row resource of grouping list. (NotNull and the property 'groupingRowList' is not empty and the property 'currentEntity' is not null)
* @param nextEntity The entity of next element. (NotNull and the rowResource does not contain yet)
* @return The determination, true or false. (true if the the next entity is registered to next row)
*/
boolean isBreakRow(GroupingListRowResource<ENTITY> rowResource, ENTITY nextEntity);
}
|
<gh_stars>0
const { app, BrowserWindow, ipcMain, Tray, nativeImage, shell } = require('electron')
const path = require('path-extra')
// Environment
global.POI_VERSION = app.getVersion()
global.ROOT = __dirname
global.EXECROOT = path.join(process.execPath, '..')
global.APPDATA_PATH = path.join(app.getPath('appData'), 'poi')
global.EXROOT = global.APPDATA_PATH
global.DEFAULT_CACHE_PATH = path.join(global.EXROOT, 'MyCache')
global.DEFAULT_SCREENSHOT_PATH =
process.platform === 'darwin'
? path.join(app.getPath('home'), 'Pictures', 'Poi')
: path.join(global.APPDATA_PATH, 'screenshots')
global.MODULE_PATH = path.join(global.ROOT, 'node_modules')
const { ROOT } = global
const poiIconPath = path.join(
ROOT,
'assets',
'icons',
process.platform === 'linux' ? 'poi_32x32.png' : 'poi.ico',
)
// high sierra and above
const isModernMacOS =
process.platform === 'darwin' &&
Number(
require('os')
.release()
.split('.')[0],
) >= 17
require('./lib/module-path').setAllowedPath([global.ROOT, global.APPDATA_PATH])
const config = require('./lib/config')
const proxy = require('./lib/proxy')
const shortcut = require('./lib/shortcut')
const { warn, error } = require('./lib/utils')
const dbg = require('./lib/debug')
require('./lib/updater')
proxy.setMaxListeners(30)
// Disable HA
if (config.get('poi.misc.disablehwaccel', false)) {
app.disableHardwareAcceleration()
}
// check safe mode config
if (config.get('poi.misc.safemode', false)) {
warn('Entering SAFE MODE according to config.')
global.isSafeMode = true
config.set('poi.misc.safemode')
}
// Add shortcut to start menu when os is windows
app.setAppUserModelId('org.poooi.poi')
if (process.platform === 'win32' && config.get('poi.misc.shortcut', true)) {
const shortcutPath =
app.getPath('appData') + '\\Microsoft\\Windows\\Start Menu\\Programs\\poi.lnk'
const targetPath = app.getPath('exe')
const argPath = app.getAppPath()
const option = {
target: targetPath,
args: argPath,
appUserModelId: 'org.poooi.poi',
description: 'poi the KanColle Browser Tool',
}
if (!ROOT.includes('.asar')) {
Object.assign(option, {
icon: path.join(ROOT, 'assets', 'icons', 'poi.ico'),
iconIndex: 0,
})
}
shell.writeShortcutLink(shortcutPath, option)
const safeModeShortcutPath =
app.getPath('appData') + '\\Microsoft\\Windows\\Start Menu\\Programs\\poi (safe mode).lnk'
const safeModeOption = Object.assign({}, option)
Object.assign(safeModeOption, {
description: 'poi the KanColle Browser Tool (safe mode)',
args: `${argPath} --safe`,
appUserModelId: 'org.poooi.poi.safe',
})
shell.writeShortcutLink(safeModeShortcutPath, safeModeOption)
}
if (dbg.isEnabled()) {
global.SERVER_HOSTNAME = '127.0.0.1:17027'
} else {
global.SERVER_HOSTNAME = 'poi.0u0.moe'
process.env.NODE_ENV = 'production'
}
require('./lib/flash')
let mainWindow, appIcon
global.mainWindow = mainWindow = null
// Set FPS limit
if (config.get('poi.misc.limitFps.enabled')) {
const value = parseInt(config.get('poi.misc.limitFps.value'))
if (Number.isFinite(value)) {
app.commandLine.appendSwitch('limit-fps', String(value))
}
}
// Test: enable JavaScript experimental features
app.commandLine.appendSwitch('js-flags', '--harmony --harmony-do-expressions')
// enable audio autoplay
// https://github.com/electron/electron/issues/13525#issuecomment-410923391
app.commandLine.appendSwitch('autoplay-policy', 'no-user-gesture-required')
// Fix GPU acceleration
// app.commandLine.appendSwitch('enable-accelerated-2d-canvas', 'true')
// app.commandLine.appendSwitch('ignore-gpu-blacklist', 'true')
// app.commandLine.appendSwitch('enable-gpu-rasterization', 'true')
// app.commandLine.appendSwitch('enable-native-gpu-memory-buffers', 'true')
// app.commandLine.appendSwitch('enable-surface-synchronization', 'true')
// app.commandLine.appendSwitch('enable-checker-imaging', 'true')
// Cache size
const cacheSize = parseInt(config.get('poi.misc.cache.size'))
if (Number.isInteger(cacheSize)) {
app.commandLine.appendSwitch('disk-cache-size', `${1048576 * cacheSize}`)
}
app.on('window-all-closed', () => {
shortcut.unregister()
app.quit()
})
app.on('ready', () => {
const { screen } = require('electron')
shortcut.register()
const { workArea } = screen.getPrimaryDisplay()
let { x, y, width, height } = config.get('poi.window', workArea)
const validate = (n, min, range) => n != null && n >= min && n < min + range
const withinDisplay = d => {
const wa = d.workArea
return validate(x, wa.x, wa.width) && validate(y, wa.y, wa.height)
}
if (!screen.getAllDisplays().some(withinDisplay)) {
x = workArea.x
y = workArea.y
}
if (width == null) {
width = workArea.width
}
if (height == null) {
height = workArea.height
}
global.mainWindow = mainWindow = new BrowserWindow({
x: x,
y: y,
width: width,
height: height,
title: 'poi',
icon: poiIconPath,
resizable: config.get('poi.content.resizable', true),
alwaysOnTop: config.get('poi.content.alwaysOnTop', false),
// FIXME: titlebarStyle and transparent: https://github.com/electron/electron/issues/14129
titleBarStyle: isModernMacOS ? 'hiddenInset' : null,
transparent: isModernMacOS,
frame: !config.get(
'poi.appearance.customtitlebar',
process.platform === 'win32' || process.platform === 'linux',
),
enableLargerThanScreen: true,
maximizable: config.get('poi.content.resizable', true),
fullscreenable: config.get('poi.content.resizable', true),
webPreferences: {
plugins: true,
nodeIntegrationInWorker: true,
nativeWindowOpen: true,
zoomFactor: config.get('poi.appearance.zoom', 1),
experimentalFeatures: true,
},
backgroundColor: '#00000000',
})
// Default menu
mainWindow.reloadArea = 'kan-game webview'
if (process.platform === 'darwin') {
const { renderMainTouchbar } = require('./lib/touchbar')
renderMainTouchbar()
if (/electron$/i.test(process.argv[0])) {
const icon = nativeImage.createFromPath(`${ROOT}/assets/icons/poi.png`)
app.dock.setIcon(icon)
}
} else {
mainWindow.setMenu(null)
}
mainWindow.loadURL(`file://${__dirname}/index.html${dbg.isEnabled() ? '?react_perf' : ''}`)
if (config.get('poi.window.isMaximized', false)) {
mainWindow.maximize()
}
if (config.get('poi.window.isFullScreen', false)) {
mainWindow.setFullScreen(true)
}
if (dbg.isEnabled()) {
mainWindow.openDevTools({
mode: 'detach',
})
}
// Never wants navigate
mainWindow.webContents.on('will-navigate', e => {
e.preventDefault()
})
mainWindow.on('closed', () => {
// Close all sub window
require('./lib/window').closeWindows()
mainWindow = null
})
// Tray icon
if (process.platform === 'win32' || process.platform === 'linux') {
global.appIcon = appIcon = new Tray(poiIconPath)
appIcon.on('click', () => {
if (mainWindow.isMinimized()) {
mainWindow.restore()
} else {
mainWindow.show()
}
})
}
// devtool
if (dbg.isEnabled() && config.get('poi.devtool.enable', false)) {
require('./lib/devtool')
}
})
// http basic auth
app.on('login', (event, webContents, request, authInfo, callback) => {
event.preventDefault()
mainWindow.webContents.send('http-basic-auth', 'login')
ipcMain.once('basic-auth-info', (event, usr, pwd) => {
callback(usr, pwd)
})
})
ipcMain.on('refresh-shortcut', () => {
shortcut.unregister()
shortcut.register()
})
// Uncaught error
process.on('uncaughtException', e => {
error(e.stack)
})
|
#!/bin/bash
#
# Bergamota-ng build scripts (c) 2018 Cassiano Martin <cassiano@polaco.pro.br>
# Copyright (c) 2018 Cassiano Martin
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
######################################
#
# CrossTool exports / Directories
#
######################################
export ARCH=mips
export CROSS_COMPILE=$(pwd)/tools/realtek/rsdk-4.4.7-4181-EB-2.6.30-0.9.30-m32u-140129/bin/rsdk-linux-
export PATH=$PATH:$(pwd)/tools/realtek/rsdk-4.4.7-4181-EB-2.6.30-0.9.30-m32u-140129/bin
OLDPWD=$(pwd)
TOOLS_DIR=$(pwd)/tools
OUTPUT_DIR=$(pwd)/output
ROOTFS_DIR=$OUTPUT_DIR/rootfs
# iptables configure script does not detect cross compiler, need to
# force ambient variables to get it work right.
export CC=${CROSS_COMPILE}gcc
export CXX=${CROSS_COMPILE}"g++"
export AR=${CROSS_COMPILE}"ar"
export AS=${CROSS_COMPILE}"as"
export RANLIB=${CROSS_COMPILE}"ranlib"
export LD=${CROSS_COMPILE}"ld"
export STRIP=${CROSS_COMPILE}"strip"
export CROSS_PREFIX=mips-linux-
export CFLAGS="-march=4181 -Os -ffunction-sections -fdata-sections"
export LDFLAGS="-Wl,--gc-sections"
export CPPFLAGS=$CFLAGS
export CXXFLAGS=$CFLAGS
######################################
#
# Berga-CLI
#
######################################
cd $TOOLS_DIR/berga-cli
make clean
make
cp -va $TOOLS_DIR/berga-cli/berga-cli $ROOTFS_DIR/usr/bin/berga-cli
ln -s berga-cli $ROOTFS_DIR/usr/bin/udhcpc-script
|
#!/bin/bash
readonly PROGNAME=$(basename $0)
readonly PROGDIR=$(readlink -m $(dirname $0))
readonly ARGS="$@"
usage() {
cat <<- EOF
Usage: $PROGNAME -i/--input <ISO à modifier> -o/--output <ISO de sortie> -p/--preseed-file <Fichier preseed>
<Description>
Options:
-i --input Fichier ISO à modifier.
-o --output Fichier ISO de sortie.
-p --preseed-file Fichier preseed à insérer dans l\'iso défini en --input.
-a --autostart Définir si l\'installation preseed doit se lancer automatiquement au chargement de l\'ISO
-h --help Afficher cette aide
Examples:
Run all tests:
$PROGNAME --test all
EOF
}
clean_exit() {
exec 2>/dev/null
local return_code=$1
sudo -u /tmp/loopdir
rm -rf /tmp/{isofiles,workspace,loopdir}
exit $return_code
}
cmdline() {
local arg=
for arg
do
local delim=""
case "$arg" in
--input) args="${args}-i ";;
--output) args="${args}-o ";;
--preseed-file) args="${args}-p ";;
--auto-start) args="${args}-a ";;
--help) args="${args}-h ";;
*) [[ "${arg:0:1}" == "-" ]] || delim="\""
args="${args}${delim}${arg}${delim} ";;
esac
done
eval set -- $args
while getopts "i:o:p:ah" OPTION
do
case $OPTION in
i)
readonly INPUT=$(realpath $OPTARG)
;;
o)
readonly OUTPUT=$(realpath $OPTARG)
;;
p)
readonly PRESEED=$(realpath $OPTARG)
;;
a)
readonly AUTOSTART=true
;;
h)
usage
exit 0
;;
esac
done
if [ -z ${INPUT} ]; then echo "--input parameter is not defined, abort";usage && clean_exit 1;fi
if [ -z ${OUTPUT} ]; then echo "--output parameter is not defined, abort";usage && clean_exit 1;fi
if [ -z ${PRESEED} ]; then echo "--preseed-file is not defined, abort";usage && clean_exit 1;fi
if [ -z ${AUTOSTART} ]; then readonly AUTOSTART=false;fi
return 0
}
is_file() {
local file=$1
[[ -f $file ]]
}
is_input_valid() {
is_file $INPUT || { echo "--input parameter is not a valid file, abort"; clean_exit 1; }
}
is_output_valid() {
! is_file $OUTPUT || { echo "--output parameter already exist, abort"; clean_exit 1; }
}
is_preseed_valid() {
is_file $PRESEED || { echo "--preseed parameter is not a valid file, abort"; clean_exit 1; }
}
task() {
mkdir /tmp/{loopdir,isofiles,workspace}
sudo mount -o loop $INPUT /tmp/loopdir
rsync -a -H --exclude=TRANS.TBL /tmp/loopdir/ /tmp/isofiles
sleep 1
sudo umount /tmp/loopdir
chmod -R u+w /tmp/isofiles
cd /tmp/workspace
gzip -d < /tmp/isofiles/install.amd/initrd.gz | cpio --extract --verbose --make-directories --no-absolute-filenames
cp ${PRESEED} ./preseed.cfg
find . | cpio -H newc --create --verbose | gzip -9 | tee ../isofiles/install.amd/initrd.gz > /dev/null
if [ "$AUTOSTART" = true ]
then
sed -i 's/default.*$/default auto/g' /tmp/isofiles/isolinux/isolinux.cfg
fi
cd ../isofiles
chmod u+w md5sum.txt
md5sum `find -follow -type f` > md5sum.txt
sudo genisoimage -o ${OUTPUT} -r -J -no-emul-boot -boot-load-size 4 -boot-info-table -b isolinux/isolinux.bin -c isolinux/boot.cat .
}
main() {
cmdline $ARGS
is_input_valid
is_output_valid
is_preseed_valid
task
clean_exit 0
}
main
|
const { expect } = require('chai')
const { closeWindow } = require('./window-helpers')
const { remote } = require('electron')
const { BrowserWindow, dialog } = remote
const isCI = remote.getGlobal('isCi')
describe('dialog module', () => {
describe('showOpenDialog', () => {
it('should not throw for valid cases', () => {
// Blocks the main process and can't be run in CI
if (isCI) return
let w
expect(() => {
dialog.showOpenDialog({ title: 'i am title' })
}).to.not.throw()
expect(() => {
w = new BrowserWindow()
dialog.showOpenDialog(w, { title: 'i am title' })
}).to.not.throw()
closeWindow(w).then(() => { w = null })
})
it('throws errors when the options are invalid', () => {
expect(() => {
dialog.showOpenDialog({ properties: false })
}).to.throw(/Properties must be an array/)
expect(() => {
dialog.showOpenDialog({ title: 300 })
}).to.throw(/Title must be a string/)
expect(() => {
dialog.showOpenDialog({ buttonLabel: [] })
}).to.throw(/Button label must be a string/)
expect(() => {
dialog.showOpenDialog({ defaultPath: {} })
}).to.throw(/Default path must be a string/)
expect(() => {
dialog.showOpenDialog({ message: {} })
}).to.throw(/Message must be a string/)
})
})
describe('showSaveDialog', () => {
it('should not throw for valid cases', () => {
// Blocks the main process and can't be run in CI
if (isCI) return
let w
expect(() => {
dialog.showSaveDialog({ title: 'i am title' })
}).to.not.throw()
expect(() => {
w = new BrowserWindow()
dialog.showSaveDialog(w, { title: 'i am title' })
}).to.not.throw()
closeWindow(w).then(() => { w = null })
})
it('throws errors when the options are invalid', () => {
expect(() => {
dialog.showSaveDialog({ title: 300 })
}).to.throw(/Title must be a string/)
expect(() => {
dialog.showSaveDialog({ buttonLabel: [] })
}).to.throw(/Button label must be a string/)
expect(() => {
dialog.showSaveDialog({ defaultPath: {} })
}).to.throw(/Default path must be a string/)
expect(() => {
dialog.showSaveDialog({ message: {} })
}).to.throw(/Message must be a string/)
expect(() => {
dialog.showSaveDialog({ nameFieldLabel: {} })
}).to.throw(/Name field label must be a string/)
})
})
describe('showMessageBox', () => {
it('should not throw for valid cases', () => {
// Blocks the main process and can't be run in CI
if (isCI) return
let w
expect(() => {
dialog.showMessageBox({ title: 'i am title' })
}).to.not.throw()
expect(() => {
w = new BrowserWindow()
dialog.showMessageBox(w, { title: 'i am title' })
}).to.not.throw()
closeWindow(w).then(() => { w = null })
})
it('throws errors when the options are invalid', () => {
expect(() => {
dialog.showMessageBox(undefined, { type: 'not-a-valid-type' })
}).to.throw(/Invalid message box type/)
expect(() => {
dialog.showMessageBox(null, { buttons: false })
}).to.throw(/Buttons must be an array/)
expect(() => {
dialog.showMessageBox({ title: 300 })
}).to.throw(/Title must be a string/)
expect(() => {
dialog.showMessageBox({ message: [] })
}).to.throw(/Message must be a string/)
expect(() => {
dialog.showMessageBox({ detail: 3.14 })
}).to.throw(/Detail must be a string/)
expect(() => {
dialog.showMessageBox({ checkboxLabel: false })
}).to.throw(/checkboxLabel must be a string/)
})
})
describe('showErrorBox', () => {
it('throws errors when the options are invalid', () => {
expect(() => {
dialog.showErrorBox()
}).to.throw(/Insufficient number of arguments/)
expect(() => {
dialog.showErrorBox(3, 'four')
}).to.throw(/Error processing argument at index 0/)
expect(() => {
dialog.showErrorBox('three', 4)
}).to.throw(/Error processing argument at index 1/)
})
})
describe('showCertificateTrustDialog', () => {
it('throws errors when the options are invalid', () => {
expect(() => {
dialog.showCertificateTrustDialog()
}).to.throw(/options must be an object/)
expect(() => {
dialog.showCertificateTrustDialog({})
}).to.throw(/certificate must be an object/)
expect(() => {
dialog.showCertificateTrustDialog({ certificate: {}, message: false })
}).to.throw(/message must be a string/)
})
})
})
|
#!/bin/bash
set -eu
if [[ -f secrets ]]; then
. secrets
fi
DOCKER_IMAGE=aaronhopkinson/n102-jupyterlab-amber_nn:latest
LOCAL_MOUNT=${PWD}/mount
NAME=$(git config --global --get user.name)
EMAIL=$(git config --global --get user.email)
if [[ -z ${NAME:-} ]] || [[ -z ${EMAIL:-} ]]; then
echo "Set your git username and email with"
echo ' git config --global user.name "Your Name"'
echo ' git config --global user.email "your.name@email.com"'
echo "and re-run this script"
exit 1
fi
docker pull $DOCKER_IMAGE
docker run \
-e NAME="$NAME" \
-e EMAIL="$EMAIL" \
-e GITHUB_TOKEN="${GITHUB_TOKEN:-}" \
-p 8888:8888 \
-v $LOCAL_MOUNT:/home/jovyan/work \
--rm \
$DOCKER_IMAGE
|
<reponame>motephyr/fzf-preview.vim
import { isGitDirectory } from "@/connector/util"
import { GIT_LOG_ACTIONS } from "@/const/git"
import { currentSessionSelector } from "@/module/selector/session"
import type { FzfCommandDefinitionDefaultOption, Resource, ResourceLine, ResourceLines, SourceFuncArgs } from "@/type"
export const gitLogActions = async (_args: SourceFuncArgs): Promise<Resource> => {
const currentSession = currentSessionSelector()
if (currentSession == null) {
throw new Error("Not exists current session")
} else if (currentSession.gitLogs == null) {
throw new Error("Logs is not exists in current session")
}
if (!(await isGitDirectory())) {
throw new Error("The current directory is not a git project")
}
const logs = currentSession.gitLogs
const headers: ResourceLines = [
{
data: {
command: "FzfPreviewGitLogActions",
type: "git-log-actions",
action: "header",
hashes: [],
isCurrentFile: false,
},
displayText: "<: Back to git log",
},
{
data: {
command: "FzfPreviewGitLogActions",
type: "git-log-actions",
action: "header",
hashes: [],
isCurrentFile: false,
},
displayText: `Selected log: ${logs.map((log) => log.hash).join(" ")}`,
},
]
const lines = [
...headers,
...GIT_LOG_ACTIONS.map<ResourceLine>((action) => ({
data: {
command: "FzfPreviewGitLogActions",
type: "git-log-actions",
action,
hashes: logs.map((log) => log.hash),
isCurrentFile: logs[0].isCurrentFile,
},
displayText: action,
})),
]
return {
type: "json",
lines,
options: {
"--header-lines": headers.length.toString(),
},
}
}
export const gitLogActionsDefaultOptions = (): FzfCommandDefinitionDefaultOption => ({
"--prompt": '"GitLogActions> "',
})
|
import requests
from bs4 import BeautifulSoup
def scraper(url):
# get the webpage
request = requests.get(url)
# parse the webpage
soup = BeautifulSoup(request.text, 'html.parser')
# store company information
info = {}
# extract company name
info['company_name'] = soup.find('h1', class_='title').text
# extract company contact information
info['phone'] = soup.find('span', {'itemprop': 'telephone'}).text
info['address'] = soup.find('span', {'itemprop': 'address'}).text
info['email'] = soup.find('span', {'itemprop': 'email'}).text
return info |
<reponame>jmrafael/Streamlit-Authentication<gh_stars>10-100
import streamlit as st
st.set_page_config(page_title="Simple Auth", layout="wide")
import streamlit_debug
streamlit_debug.set(flag=False, wait_for_client=True, host='localhost', port=8765)
import env
env.verify()
from authlib.auth import auth, authenticated, requires_auth
from authlib.common import trace_activity
user = auth(sidebar=True, show_msgs=True)
st.title('Test App')
if authenticated():
st.success(f'`{user}` is authenticated')
else:
st.warning(f'Not authenticated')
st.markdown(
"""
## About
---
This is a landing page designed to showcase the simple authentication library.
This is just a single import and a function to run, and the return value is either None,
or the authenticated username.
The user login is usually based in the sidebar (though ultimately configurable by passing True or False
to the sidebar parameter of the auth function
All the user management and username and password entry should be taken care of by the library. To automatically
have creation and edit access, just run the library directly as a streamlit script.
```python
from authlib.auth import auth, authenticated
user = auth() # auth(sidebar=False, show_msgs=False) if you don't want the sidebar, and detailed login messages
\"\"\"This both displays authentication input in the sidebar, and then returns the credentials for use locally\"\"\"
if authenticated():
st.success(f'{user} is authenticated')
else:
st.warning(f'Not authenticated')
```
""")
|
<gh_stars>0
# -*- coding: utf-8 -*-
"""Python e GTK 4: PyGObject Gtk.StackSwitcher()."""
import gi
gi.require_version(namespace='Gtk', version='4.0')
from gi.repository import Gio, Gtk
class MainWindow(Gtk.ApplicationWindow):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.set_title(title='Python e GTK 4: PyGObject Gtk.StackSwitcher()')
# Tamanho inicial da janela.
self.set_default_size(width=int(1366 / 2), height=int(768 / 2))
# Tamanho minimo da janela.
self.set_size_request(width=int(1366 / 2), height=int(768 / 2))
header_bar = Gtk.HeaderBar.new()
self.set_titlebar(titlebar=header_bar)
vbox = Gtk.Box.new(orientation=Gtk.Orientation.VERTICAL, spacing=12)
# No GTK 3: set_border_width().
vbox.set_margin_top(margin=12)
vbox.set_margin_end(margin=12)
vbox.set_margin_bottom(margin=12)
vbox.set_margin_start(margin=12)
# Adicionando o box na janela principal.
# No GTK 3: add().
self.set_child(child=vbox)
stack = Gtk.Stack.new()
# Definindo o efeito de transição.
stack.set_transition_type(
transition=Gtk.StackTransitionType.SLIDE_LEFT_RIGHT
)
# Definindo o tempo da transição (1000 = 1 segundo).
stack.set_transition_duration(duration=1000)
page1 = Gtk.Box.new(orientation=Gtk.Orientation.VERTICAL, spacing=6)
# Adicionando o Box Layout 1 no Stack Layout.
stack.add_titled(child=page1, name='pagina1', title='Página 1')
vbox.append(child=stack)
# Utilizando um laço de repetição para criar alguns botões.
for n in range(5):
botao = Gtk.Button.new_with_label(label=f'Botão {n}')
page1.append(child=botao)
page2 = Gtk.Box.new(orientation=Gtk.Orientation.VERTICAL, spacing=6)
stack.add_titled(child=page2, name='pagina2', title='Página 2')
for n in range(5):
label = Gtk.Label.new(str=f'Linha {n}')
page2.append(child=label)
stack_switcher = Gtk.StackSwitcher.new()
stack_switcher.set_stack(stack=stack)
header_bar.set_title_widget(title_widget=stack_switcher)
class Application(Gtk.Application):
def __init__(self):
super().__init__(application_id='br.natorsc.Exemplo',
flags=Gio.ApplicationFlags.FLAGS_NONE)
def do_startup(self):
Gtk.Application.do_startup(self)
def do_activate(self):
win = self.props.active_window
if not win:
win = MainWindow(application=self)
win.present()
def do_shutdown(self):
Gtk.Application.do_shutdown(self)
if __name__ == '__main__':
import sys
app = Application()
app.run(sys.argv)
|
#!/bin/bash --login
set -e
source $CONDA_DIR/etc/profile.d/conda.sh
conda activate geopy_copy
python setup.py build_ext --inplace
# echo $CONDA_PREFIX
# /env/bin/pip install --requirement /tmp/requirements.txt
# python setup.py build_ext --inplace
exec "$@" |
import hashlib
from bip_utils import Mnemonic
def generate_random_mnemonic_seed() -> str:
# Function to generate a random BIP39-compliant mnemonic seed
# Implementation not provided for brevity
pass
def generate_mnemonic_seed(wallet_type: str) -> str:
prefixes = {
'standard': '01',
'segwit': '100',
'2fa': '101',
'2fa_segwit': '102'
}
prefix = prefixes.get(wallet_type.lower())
if prefix is None:
raise ValueError("Invalid wallet type")
while True:
mnemonic_seed = generate_random_mnemonic_seed()
seed_hash = hashlib.sha256(mnemonic_seed.encode()).hexdigest()
if seed_hash.startswith(prefix):
return mnemonic_seed
# Example usage
print(generate_mnemonic_seed('standard')) # Generate a standard wallet mnemonic seed
print(generate_mnemonic_seed('segwit')) # Generate a segwit wallet mnemonic seed
print(generate_mnemonic_seed('2fa')) # Generate a two-factor authentication wallet mnemonic seed
print(generate_mnemonic_seed('2fa_segwit')) # Generate a two-factor auth, using segwit wallet mnemonic seed |
<gh_stars>1-10
#include <cut/2.6/cut.h>
#include "allocator.h"
#include <gc.h>
void __CUT_allocator_Bringup(void)
{
GC_INIT();
}
void __CUT__Allocator_expression( void )
{
Token alloc = {.value = 100L};
Token *res = Allocator_expression(NULL, &alloc);
ASSERT(res != NULL, "Failed to run allocator expression.");
ASSERT(res->value, "Value wasn't set.");
}
void __CUT__Allocator_malloc(void)
{
void *data = Allocator_malloc(100);
ASSERT(data != NULL, "Failed to allocate.");
}
void __CUT__Allocator_realloc(void)
{
void *data = Allocator_malloc(100);
ASSERT(data != NULL, "Failed to allocate.");
data = Allocator_realloc(data, 1000);
ASSERT(data != NULL, "Failed to reallocate.");
}
void __CUT__Allocator_free(void)
{
void *data = Allocator_malloc(100);
Allocator_free(data);
}
void __CUT__Allocator_collect(void)
{
Allocator_collect();
}
|
<reponame>lanyusteffen/micro-api-demo<filename>src/main/java/org/quark/microapidemo/redis/RedisRateLimiter.java
package org.quark.microapidemo.redis;
import io.lettuce.core.Range;
import io.lettuce.core.TransactionResult;
import io.lettuce.core.api.StatefulRedisConnection;
import io.lettuce.core.api.sync.RedisCommands;
import org.quark.microapidemo.config.GlobalConfig;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class RedisRateLimiter {
/**
* 分布式令牌桶算法实现限流
* @param connection Redis连接实例
* @param method 限流方法名
* @param qps 单位时间间隔内允许放入的令牌数量
* @param interval 单位时间间隔, 单位:毫秒
* @param minTimeSinceLastRequest 最小令盘获取时间差, 单位:毫秒
* @return
*/
public static boolean acquireToken(StatefulRedisConnection<String, String> connection, String method, int qps, int interval, int minTimeSinceLastRequest) {
long now = System.currentTimeMillis();
RedisCommands<String, String> transaction = connection.sync();
long clearBefore = now - interval;
String id = GlobalConfig.RateLimiter.TOKEN_BUCKET_IDENTITIEFER + method;
// 移除已超时的令牌
transaction.zremrangebyscore(id, Range.create(0, clearBefore));
transaction.zrange(id, 0, -1);
transaction.zadd(id, now, UUID.randomUUID().toString());
// 设置令牌超时过期时间 双保险
transaction.expire(id, (int)interval / 1000);
TransactionResult results = transaction.exec();
List<Long> timestamps = IntStream.range(0, results.size())
.filter(i -> i % 2 == 0)
.mapToObj(results::get)
.map(x -> Long.valueOf(x.toString()))
.collect(Collectors.toList());
// qps超标
boolean tooManyInInterval = (timestamps.size() > qps);
if (tooManyInInterval)
return false;
long timeSinceLastRequest = now - timestamps.get(timestamps.size() - 1);
boolean tooBusyInInterval = (timeSinceLastRequest <= minTimeSinceLastRequest);
if (tooBusyInInterval)
return false;
return true;
}
}
|
<reponame>Accessible-Concepts/scrumlr.io
export * from "./VoteDisplay";
|
package mg.utils.api.consumer;
import lombok.AllArgsConstructor;
import mg.utils.api.consumer.dto.ApiConsumerCreationRequestDto;
import mg.utils.api.consumer.dto.ApiConsumerResponseDto;
import mg.utils.api.consumer.dto.ApiConsumerUpdateRequestDto;
import org.springframework.web.bind.annotation.*;
import javax.validation.Valid;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
@RestController
@AllArgsConstructor
@RequestMapping("/api/v1/admin/api-consumer")
public class ApiConsumerRestController {
private final ApiConsumerService apiConsumerService;
private final ApiConsumerMapper apiConsumerMapper;
@GetMapping("/list")
public List<ApiConsumerResponseDto> getListOfApiConsumers() {
return apiConsumerService.getListOfApiConsumers().stream()
.map(apiConsumerMapper::mapToResponseDto)
.collect(Collectors.toList());
}
@GetMapping("/{uuid}")
public ApiConsumerResponseDto getApiConsumerByUuid(@PathVariable String uuid) {
return apiConsumerMapper.mapToResponseDto(apiConsumerService.findByUuid(UUID.fromString(uuid)));
}
@PostMapping
public ApiConsumerResponseDto createApiConsumer(@Valid @RequestBody ApiConsumerCreationRequestDto apiConsumerDto) {
return apiConsumerMapper.mapToResponseDto(apiConsumerService.createApiConsumer(apiConsumerDto));
}
@PutMapping
public ApiConsumerResponseDto updateApiConsumer(@Valid @RequestBody ApiConsumerUpdateRequestDto apiConsumerDto) {
return apiConsumerMapper.mapToResponseDto(apiConsumerService.updateApiConsumer(apiConsumerDto));
}
@DeleteMapping("/{uuid}")
public void deleteApiConsumer(@PathVariable String uuid) {
apiConsumerService.deleteApiConsumer(UUID.fromString(uuid));
}
}
|
import {PointMap} from '../../src/utils/PointMap'
test('PointMap', () => {
const m = new PointMap<number>()
m.setxy(0, 0, 0)
m.setxy(1.3, 1, 2)
m.setxy(2, 2, 4)
m.setxy(2, 1.2, 3)
const p = Array.from(m.keys())
expect(p.length).toBe(4)
expect(p[0].y < 3).toBe(true)
const kv = Array.from(m.keyValues())
expect(kv[0][1] < 5).toBe(true)
expect(kv.length == 4).toBe(true)
m.delete(1.3, 1)
expect(m.has(1.3, 1)).toBe(false)
expect(m.has(2, 1.2)).toBe(true)
})
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import model_utils.fields
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Prison',
fields=[
('created', model_utils.fields.AutoCreatedField(editable=False, verbose_name='created', default=django.utils.timezone.now)),
('modified', model_utils.fields.AutoLastModifiedField(editable=False, verbose_name='modified', default=django.utils.timezone.now)),
('nomis_id', models.CharField(serialize=False, max_length=3, primary_key=True, verbose_name='NOMIS id')),
('name', models.CharField(max_length=500)),
],
options={
'abstract': False,
},
),
]
|
def total_interest(bal_st, interest_rate):
total_interest_paid = sum(balance * interest_rate / 100 for balance in bal_st)
return round(total_interest_paid, 2) |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package ed.biodare2.backend.repo.isa_dom.ppa_jc2;
import com.fasterxml.jackson.annotation.JsonProperty;
import ed.robust.dom.tsprocessing.PPAResult;
import java.util.Objects;
import java.util.UUID;
/**
*
* @author tzielins
*/
public class PPAFullResultEntry {
public UUID jobId;
public long dataId;
public String dataType;
public String orgId;
@JsonProperty("rawId")
public long rawDataId;
@JsonProperty("biolId")
public long biolDescId;
@JsonProperty("envId")
public long environmentId;
public boolean ignored;
public PPAResult result;
@Override
public int hashCode() {
int hash = 5;
hash = 41 * hash + Objects.hashCode(this.jobId);
hash = 41 * hash + (int) (this.dataId ^ (this.dataId >>> 32));
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final PPAFullResultEntry other = (PPAFullResultEntry) obj;
if (this.dataId != other.dataId) {
return false;
}
if (this.rawDataId != other.rawDataId) {
return false;
}
if (this.biolDescId != other.biolDescId) {
return false;
}
if (this.environmentId != other.environmentId) {
return false;
}
if (this.ignored != other.ignored) {
return false;
}
if (!Objects.equals(this.dataType, other.dataType)) {
return false;
}
if (!Objects.equals(this.orgId, other.orgId)) {
return false;
}
if (!Objects.equals(this.jobId, other.jobId)) {
return false;
}
if (!Objects.equals(this.result, other.result)) {
return false;
}
return true;
}
}
|
<gh_stars>0
function FirstReverse(str) {
return str
.split("")
.reverse()
.join("");
}
// keep this function call here
console.log(FirstReverse(readline()));
|
<filename>src/components/forms/Input.tsx
export interface InputProps
extends React.DetailedHTMLProps<
React.InputHTMLAttributes<HTMLInputElement>,
HTMLInputElement
> {}
const Input: React.FC<InputProps> = ({ ...props }) => {
return (
<input
className="w-full h-12 px-5 bg-gray-light rounded-md outline-none ring-0 focus-visible:ring-2 focus-visible:ring-gray-lightest focus-visible:ring-opacity-75"
{...props}
/>
);
};
export default Input;
|
#!/usr/bin/env bash
###############################################################################
# Copyright (c) 2016. All rights reserved.
# Mike Klusman IS PROVIDING THIS DESIGN, CODE, OR INFORMATION "AS IS" AS A
# COURTESY TO YOU. BY PROVIDING THIS DESIGN, CODE, OR INFORMATION AS
# ONE POSSIBLE IMPLEMENTATION OF THIS FEATURE, APPLICATION OR
# STANDARD, Mike Klusman IS MAKING NO REPRESENTATION THAT THIS IMPLEMENTATION
# IS FREE FROM ANY CLAIMS OF INFRINGEMENT, AND YOU ARE RESPONSIBLE
# FOR OBTAINING ANY RIGHTS YOU MAY REQUIRE FOR YOUR IMPLEMENTATION.
# Mike Klusman EXPRESSLY DISCLAIMS ANY WARRANTY WHATSOEVER WITH RESPECT TO
# THE ADEQUACY OF THE IMPLEMENTATION, INCLUDING BUT NOT LIMITED TO
# ANY WARRANTIES OR REPRESENTATIONS THAT THIS IMPLEMENTATION IS FREE
# FROM CLAIMS OF INFRINGEMENT, IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS FOR A PARTICULAR PURPOSE.
###############################################################################
###############################################################################
#
## @Author : Mike Klusman
## @Software Package : Shell Automated Testing -- Argument Parsing
## @Application : Support Functionality
## @Language : Bourne Shell
## @Version : 1.21
#
###############################################################################
###############################################################################
#
# Functions Supplied:
#
# __handle_quoted_word
# __remove_cmdline_quotations
# contains_option
# getoptex
# opthandler
# optlistex
# remove_option
#
###############################################################################
# shellcheck disable=SC2016,SC1090,SC2039,SC2086,SC1117
[ -z "${SLCF_SHELL_TOP}" ] && SLCF_SHELL_TOP=$( ${__REALPATH} ${__REALPATH_OPTS} "$( \dirname '$0' )" )
# shellcheck source=/dev/null
[ -z "${PASS}" ] && . "${SLCF_SHELL_TOP}/lib/constants.sh"
[ -z "${__SLCF_ARGPARSING_ERROR_LOG}" ] && __SLCF_ARGPARSING_ERROR_LOG="$( \pwd -L )/.argparsing.err"
## @fn __handle_quoted_word()
__handle_quoted_word()
{
QUOTE_SHIFT_COUNTER=0
shift
if [ "${1:0:1}" == "'" ]
then
QUOTE_SHIFT_COUNTER=$(( QUOTE_SHIFT_COUNTER + 1 ))
OPTARG="$1"
while [ "${OPTARG: -1}" != "'" ]
do
shift
OPTARG+=" $1"
QUOTE_SHIFT_COUNTER=$(( QUOTE_SHIFT_COUNTER + 1 ))
done
OPTIND=$(( OPTIND + QUOTE_SHIFT_COUNTER ))
else
if [ "${1:0:1}" == "\"" ]
then
QUOTE_SHIFT_COUNTER=$(( QUOTE_SHIFT_COUNTER + 1 ))
OPTARG="$1"
while [ "${OPTARG: -1}" != "\"" ]
do
shift
OPTARG+=" $1"
QUOTE_SHIFT_COUNTER=$(( QUOTE_SHIFT_COUNTER + 1 ))
done
OPTIND=$(( OPTIND + QUOTE_SHIFT_COUNTER ))
fi
fi
return "${PASS}"
}
## @fn __remove_cmdline_quotations()
__remove_cmdline_quotations()
{
###
### Strip away the quotations from OPTARG
###
OPTARG="$( printf "%s\\n" "${OPTARG:1:${#OPTARG}-2}" )"
return "${PASS}"
}
## @fn contains_option()
contains_option()
{
typeset option2find=$( printf "%s\n" "$1" | \tr '|' ' ' )
shift
if [ -z "${option2find}" ]
then
printf "%s\n" "${NO}"
return "${FAIL}"
fi
typeset arg=
for arg in $*
do
for o2f in ${option2find}
do
typeset dash='-'
[ ${#o2f} -gt 1 ] && dash='--'
if [ "${arg}" == "${dash}${o2f}" ]
then
printf "%s\n" "${YES}"
return "${PASS}"
fi
done
done
printf "%s\n" "${NO}"
return "${FAIL}"
}
# Handle options which need to be addressed
# Special characters can appear at the and of option names specifying
# whether an argument is required (default is ";"):
# ";" (default) -- no argument
# ":" -- required argument
# "." -- optional argument
## @fn getoptex()
getoptex()
{
QUOTE_SHIFT_COUNTER=0
OPTERR=
OPTRET="${PASS}"
let $# || return "${FAIL}"
typeset optlist="${1#;}"
let OPTIND || OPTIND=1
[ ${OPTIND} -lt $# ] || return "${FAIL}"
shift ${OPTIND}
if [ "$1" != "-" ] && [ "$1" != "${1#-}" ]
then
OPTIND=$((OPTIND+1))
if [ "$1" != "--" ]
then
typeset o
o="-${1#-${OPTOFS}}"
for opt in ${optlist#;}
do
QUOTE_SHIFT_COUNTER=0
OPTOPT="${opt%[;.:]}"
unset OPTARG
unset OPTTYPE
OPTTYPE="${opt##*[^;:.]}"
[ -z "${OPTTYPE}" ] && OPTTYPE=';'
if [ ${#OPTOPT} -gt 1 ]
then # long-named option
OPTSHORT="${NO}"
case "${o}" in
"--${OPTOPT}")
if [ "${OPTTYPE}" != ':' ]
then
if [ "${OPTTYPE}" == '.' ]
then
__handle_quoted_word "$@"
if [ "${QUOTE_SHIFT_COUNTER}" -eq 0 ]
then
OPTARG="$2"
else
shift ${QUOTE_SHIFT_COUNTER}
fi
fi
[ "${OPTTYPE}" != ';' ] && [ "${OPTARG:0:1}" != '-' ] && OPTIND=$((OPTIND+1))
QUOTE_SHIFT_COUNTER=0
return "${PASS}"
fi
__handle_quoted_word "$@"
if [ "${QUOTE_SHIFT_COUNTER}" -eq 0 ]
then
OPTARG="$2"
else
shift ${QUOTE_SHIFT_COUNTER}
fi
if [ -z "${OPTARG}" ]
then # error: must have an agrument
OPTERR="$0: error: ${OPTOPT} must have an argument"
OPTARG="${OPTOPT}"
OPTOPT="?"
OPTRET="${FAIL}"
QUOTE_SHIFT_COUNTER=0
return "${FAIL}"
fi
OPTIND=$((OPTIND + 1)) # skip option argument
QUOTE_SHIFT_COUNTER=0
return "${PASS}"
;;
"--${OPTOPT}="*)
if [ "${OPTTYPE}" == ';' ]
then # error: must not have arguments
OPTERR="$0: error: ${OPTOPT} must not have arguments"
OPTARG="${OPTOPT}"
OPTOPT="?"
OPTRET="${FAIL}"
QUOTE_SHIFT_COUNTER=0
return "${FAIL}"
fi
typeset replacement="--${OPTOPT}="
OPTARG=${o#${replacment}}
QUOTE_SHIFT_COUNTER=0
return "${PASS}"
;;
esac
else # short-named option
OPTSHORT="${YES}"
case "${o}" in
"-${OPTOPT}")
unset OPTOFS
if [ "${OPTTYPE}" != ':' ]
then
if [ "${OPTTYPE}" == '.' ]
then
__handle_quoted_word "$@"
if [ "${QUOTE_SHIFT_COUNTER}" -eq 0 ]
then
OPTARG="$2"
else
shift ${QUOTE_SHIFT_COUNTER}
fi
fi
[ "${OPTTYPE}" != ';' ] && [ "${OPTARG:0:1}" != '-' ] && OPTIND=$((OPTIND+1))
QUOTE_SHIFT_COUNTER=0
return "${PASS}"
fi
__handle_quoted_word "$@"
if [ "${QUOTE_SHIFT_COUNTER}" -eq 0 ]
then
OPTARG="$2"
else
shift ${QUOTE_SHIFT_COUNTER}
fi
if [ -z "${OPTARG}" ]
then
OPTERR="$0: error: -${OPTOPT} must have an argument"
OPTARG="${OPTOPT}"
OPTOPT="?"
OPTRET="${FAIL}"
return "${FAIL}"
fi
OPTIND=$((OPTIND + 1)) # skip option argument
QUOTE_SHIFT_COUNTER=0
return "${PASS}"
;;
"-${OPTOPT}"*)
if [ "${OPTTYPE}" == ';' ]
then # an option with no argument is in a chain of options
OPTOFS="${OPTOFS}?" # move to the next option in the chain
OPTIND=$((OPTIND-1)) # the chain still has other options
else
unset OPTOFS
OPTARG="${o#-${OPTOPT}}"
fi
QUOTE_SHIFT_COUNTER=0
return "${PASS}"
;;
esac
fi
done
if [ -z "${OPTALLOW_ALL}" ] || [ "${OPTALLOW_ALL}" -eq "${NO}" ]
then
OPTERR="$0: error: invalid option: << ${o} >>"
OPTRET="${FAIL}"
else
OPTOPT="${o}"
QUOTE_SHIFT_COUNTER=0
return "${PASS}"
fi
fi
fi
OPTOPT="?"
if [ "${OPTRET}" -eq 1 ]
then
SAVE_OPTARG="${OPTARG}"
SAVE_OPTOPT="${OPTOPT}"
SAVE_OPTERR="${OPTERR}"
fi
QUOTE_SHIFT_COUNTER=0
return "${FAIL}"
}
## @fn opthandler()
opthandler()
{
typeset optlist=
optlist=$( optlistex "$1" )
shift
typeset args=$*
OPTIND=1
getoptex "${optlist}" ${args}
typeset RC=$?
return "${RC}"
}
## @fn optlistex()
optlistex()
{
typeset l="$1"
typeset m= # mask
typeset r= # to store result
while [ ${#m} -lt $((${#l}-1)) ]
do
m="$m?"
done # create a "???..." mask
while [ -n "$l" ]
do
r="${r:+"$r "}${l%$m}" # append the first character of $l to $r
l="${l#?}" # cut the first charecter from $l
m="${m#?}" # cut one "?" sign from m
if [ -n "${l%%[^:.;]*}" ]
then # a special character (";", ".", or ":") was found
r="$r${l%$m}" # append it to $r
l="${l#?}" # cut the special character from l
m="${m#?}" # cut one more "?" sign
fi
done
[ -n "${r}" ] && printf "%s\n" "$r"
return "${PASS}"
}
## @fn remove_option()
remove_option()
{
typeset num_removals="$1"
shift
typeset rmkeys=
typeset count=0
while [ "${count}" -lt "${num_removals}" ]
do
typeset rk="$1"
if [ "${rk%%:}" != "${rk}" ]
then
rk="${rk}:0:1"
fi
if [ -z "${rmkeys}" ]
then
rmkeys="${rk}"
else
rmkeys+=" ${rk}"
fi
count=$(( count + 1 ))
shift
done
typeset cmdline=$*
typeset rk
for rk in ${rmkeys}
do
typeset k=$( printf "%s" "${rk}" | \cut -f 1 -d ':' )
typeset nopt=$( printf "%s" "${rk}" | \cut -f 2 -d ':' )
typeset longopt=$( printf "%s" "${rk}" | \cut -f 3 -d ':' )
typeset prefix='-'
[ "${longopt}" -eq 1 ] && prefix='--'
###
### Find the corresponding entry in the commandline ( if it exists )
###
typeset entry
typeset begin=1
for entry in $cmdline
do
[ "${prefix}${k}" == "${entry}" ] && break
begin=$(( begin + 1 ))
done
[ "${begin}" -eq 0 ] && continue
typeset end=$(( begin + nopt ))
###
### Count represents starting location, shift_amount shows how much
### to remove from the "cmdline"
###
typeset orig_end=${end}
typeset word="$( printf "%s\n" "${cmdline}" | \cut -d " " -f ${end} )"
if [ "${word:0:1}" == "'" ]
then
while [ "${word: -1}" != "'" ]
do
end=$(( end + 1 ))
word="$( printf "%s\n" "${cmdline}" | \cut -d " " -f ${orig_end}-${end} )"
done
fi
if [ "${word:0:1}" == "\"" ]
then
while [ "${word: -1}" != "\"" ]
do
end=$(( end + 1 ))
word="$( printf "%s\n" "${cmdline}" | \cut -d " " -f ${orig_end}-${end} )"
done
fi
cmdline=$( printf "%s\n" "${cmdline}" | \awk -v f=${begin} -v t=${end} '{for(i=1;i<=NF;i++)if(i>=f&&i<=t)continue;else printf("%s%s",$i,(i!=NF)?OFS:ORS)}' )
done
[ -n "${cmdline}" ] && printf "%s\n" "${cmdline}"
return "${PASS}"
}
|
<reponame>mehdisamavat65/artfactory-frontend
import React, { useState } from "react";
import { Grid, Paper, Button, TextField } from "@material-ui/core";
import { makeStyles } from "@material-ui/core/styles";
import { IoMdAddCircleOutline } from "react-icons/io";
import FormGroup from "@material-ui/core/FormGroup";
import FormControlLabel from "@material-ui/core/FormControlLabel";
import Switch from "@material-ui/core/Switch";
import gql from "graphql-tag";
import { useMutation } from "@apollo/react-hooks";
import {GET_ALL_USER_ADMIN} from '../../graphql/graphql';
const useStyles = makeStyles(theme => ({
paper: {
padding: "50px"
},
button: {
margin: theme.spacing(1)
},
sendToRight: {
textAlign: "right"
},
topSpaceText: {
marginTop: "30px"
},
switchTop:{
marginTop:"30px"
},
successColor:{
color:"green"
}
}));
const AddUserAdmin = (props) => {
const classes = useStyles();
const [userAdminState,setUserAdminState] = useState({
name:'',
mobile:'',
password:'',
confirmPassword:'',
active:true,
access:{
admin:false,
teacher:false,
departemant:false,
course:false,
student:false,
gallery:false,
offer:false,
website:false,
online:false,
live:false
}
});
const changeHandler = e =>{
setUserAdminState({...userAdminState,[e.target.name]:e.target.value});
}
const activechangeHandler = e =>{
setUserAdminState({...userAdminState,active:e.target.checked})
}
const accessChangeHandler = e =>{
setUserAdminState({...userAdminState,access:{...userAdminState.access,[e.target.name]:e.target.checked}});
}
const [registerUserAdmin,{loading}] = useMutation(REGISTER_ADMIN,{
update(proxy,result){
const data = proxy.readQuery({
query:GET_ALL_USER_ADMIN
});
data.getAllUserAdmin = [...data.getAllUserAdmin,result.data.registerUserAdmin];
proxy.writeQuery({
query:GET_ALL_USER_ADMIN,
data
})
props.history.push('/admin/useradmin') ;
},
onError(err){
console.log(err.graphQLErrors[0].message);
},
variables:userAdminState
});
const onSubmitHandler = e =>{
e.preventDefault();
registerUserAdmin();
}
return (
<div>
<form onSubmit={onSubmitHandler}>
<Grid container spacing={3}>
<Grid item xs={12} className={classes.sendToRight}>
<Button
variant="contained"
type="submit"
color="primary"
size="large"
className={classes.button}
startIcon={<IoMdAddCircleOutline />}
>
Add Admin User
</Button>
</Grid>
</Grid>
<Grid container spacing={3}>
<Grid item xs={8}>
<Paper elevation={3} className={classes.paper}>
<h3>User Admin Information</h3>
<TextField
variant="outlined"
name="name"
placeholder="Please Insert Name"
fullWidth
label="Name"
className={classes.topSpaceText}
value={userAdminState.name}
onChange = {changeHandler}
required
/>
<TextField
variant="outlined"
name="mobile"
placeholder="Please Insert Mobile"
fullWidth
label="Mobile Number"
className={classes.topSpaceText}
value={userAdminState.mobile}
onChange = {changeHandler}
required
/>
<TextField
variant="outlined"
type="password"
name="password"
placeholder="<PASSWORD> Insert Password"
fullWidth
label="Password"
className={classes.topSpaceText}
value={userAdminState.password}
onChange = {changeHandler}
required
/>
<TextField
variant="outlined"
type="password"
name="confirmPassword"
placeholder="Please Insert Confirm Password"
fullWidth
label="Confirm Password "
className={classes.topSpaceText}
value={userAdminState.confirmPassword}
onChange = {changeHandler}
required
/>
<FormControlLabel className={classes.switchTop}
control={
<Switch
name="active"
checked={userAdminState.active}
onChange={activechangeHandler}
color="primary"
/>
}
label="Active User Admin"
/>
</Paper>
</Grid>
<Grid item xs={4}>
<Paper elevation={3} className={classes.paper}>
<h3>User Admin Access</h3>
<Grid container spacing={1}>
<Grid item xs={12}>
<FormControlLabel className={classes.switchTop}
control={
<Switch
name="admin"
color="primary"
checked={userAdminState.access.admin}
onChange={accessChangeHandler}
/>
}
label="Access Admin"
/>
</Grid>
</Grid>
<Grid container spacing={1}>
<Grid item xs={12}>
<FormControlLabel className={classes.switchTop}
control={
<Switch
name="teacher"
checked={userAdminState.access.teacher}
onChange={accessChangeHandler}
color="primary"
/>
}
label="Access Teacher"
/>
</Grid>
</Grid>
<Grid container spacing={1}>
<Grid item xs={12}>
<FormControlLabel className={classes.switchTop}
control={
<Switch
name="departemant"
checked={userAdminState.access.departemant}
onChange={accessChangeHandler}
color="primary"
/>
}
label="Access Departemant"
/>
</Grid>
</Grid>
<Grid container spacing={1}>
<Grid item xs={12}>
<FormControlLabel className={classes.switchTop}
control={
<Switch
name="course"
checked={userAdminState.access.course}
onChange={accessChangeHandler}
color="primary"
/>
}
label="Access Course"
/>
</Grid>
</Grid>
<Grid container spacing={1}>
<Grid item xs={12}>
<FormControlLabel className={classes.switchTop}
control={
<Switch
name="student"
checked={userAdminState.access.student}
onChange={accessChangeHandler}
color="primary"
/>
}
label="Access Student"
/>
</Grid>
</Grid>
<Grid container spacing={1}>
<Grid item xs={12}>
<FormControlLabel className={classes.switchTop}
control={
<Switch
name="gallery"
checked={userAdminState.access.gallery}
onChange={accessChangeHandler}
color="primary"
/>
}
label="Access Gallery"
/>
</Grid>
</Grid>
<Grid container spacing={1}>
<Grid item xs={12}>
<FormControlLabel className={classes.switchTop}
control={
<Switch
name="offer"
checked={userAdminState.access.offer}
onChange={accessChangeHandler}
color="primary"
/>
}
label="Access Offer"
/>
</Grid>
</Grid>
<Grid container spacing={1}>
<Grid item xs={12}>
<FormControlLabel className={classes.switchTop}
control={
<Switch
name="website"
checked={userAdminState.access.website}
onChange={accessChangeHandler}
color="primary"
/>
}
label="Access Webiste"
/>
</Grid>
</Grid>
<Grid container spacing={1}>
<Grid item xs={12}>
<FormControlLabel className={classes.switchTop}
control={
<Switch
name="online"
checked={userAdminState.access.online}
onChange={accessChangeHandler}
color="primary"
/>
}
label="Access Online"
/>
</Grid>
</Grid>
<Grid container spacing={1}>
<Grid item xs={12}>
<FormControlLabel className={classes.switchTop}
control={
<Switch
name="live"
checked={userAdminState.access.live}
onChange={accessChangeHandler}
color="primary"
/>
}
label="Access Live"
/>
</Grid>
</Grid>
</Paper>
</Grid>
</Grid>
</form>
</div>
);
};
const REGISTER_ADMIN = gql`
mutation registerUserAdmin($name:String,$password:String,$mobile:String,$confirmPassword:String,$active:Boolean,$access:InputAccess){
registerUserAdmin(name:$name,mobile:$mobile,password:$password,confirmPassword:$confirmPassword,active:$active,access:$access){
id
name
mobile
active
password
access{
admin
teacher
departemant
course
student
gallery
offer
website
online
live
}
}
}
`;
export default AddUserAdmin;
|
import axios from 'axios'
import moment from 'moment'
import BaseModel, { LineItem } from './BaseModel'
import { consts } from '../utils/_consts'
export const invoice_pdf_fields = ['$invoice.number', '$invoice.po_number', '$invoice.invoice_date', '$invoice.invoice_datetime', '$invoice.invoice_agent', '$invoice.due_date',
'$invoice.balance', '$invoice.invoice_total', '$invoice.partial_due', '$invoice.custom1', '$invoice.custom2', '$invoice.custom3',
'$invoice.custom4', '$invoice.surcharge1', '$invoice.surcharge2', '$invoice.surcharge3', '$invoice.surcharge4'
]
export default class InvoiceModel extends BaseModel {
constructor (data = null, customers = []) {
super()
this.customers = customers
this._url = '/api/invoice'
this.entity = 'Invoice'
this._file_count = 0
if (data !== null && data.files) {
this.fileCount = data.files
}
this._fields = {
line_type: null,
project_id: null,
is_mobile: window.innerWidth <= 768,
modalOpen: false,
is_amount_discount: false,
deleted_at: null,
assigned_to: '',
number: '',
invitations: [],
emails: [],
customer_id: '',
user_id: null,
contacts: [],
due_date: moment(new Date()).add(1, 'days').format('YYYY-MM-DD'),
quantity: '',
id: null,
account_id: JSON.parse(localStorage.getItem('appState')).user.account_id,
line_items: [],
address: {},
customerName: '',
tax_rate_name: '',
tax_rate: 0,
tax_rate_name_2: '',
tax_rate_name_3: '',
tax_2: 0,
tax_3: 0,
company_id: '',
status_id: null,
tasks: [],
errors: [],
total: 0,
discount_total: 0,
tax_total: 0,
sub_total: 0,
data: [],
date: moment(new Date()).format('YYYY-MM-DD'),
partial: 0,
partial_due_date: moment(new Date()).add(1, 'days').format('YYYY-MM-DD'),
has_partial: false,
public_notes: '',
private_notes: '',
terms: '',
footer: '',
visible: 'collapse',
custom_value1: '',
custom_value2: '',
custom_value3: '',
custom_value4: '',
transaction_fee_tax: false,
shipping_cost_tax: false,
transaction_fee: 0,
shipping_cost: 0,
gateway_fee: 0,
gateway_percentage: false,
tax: 0,
total_custom_values: 0,
total_custom_tax: 0,
discount: 0,
recurring: '',
activeTab: '1',
po_number: '',
design_id: '',
recurring_invoice_id: null,
currency_id: this.settings.currency_id.toString().length ? this.settings.currency_id : consts.default_currency,
exchange_rate: 1,
success: false,
showSuccessMessage: false,
showErrorMessage: false,
loading: false,
changesMade: false,
viewed: false
}
this.approved = 4
this.reversed = consts.invoice_status_reversed
this.cancelled = consts.invoice_status_cancelled
this.paid = consts.invoice_status_paid
this.sent = consts.invoice_status_sent
this.partial = consts.invoice_status_partial
this.customer = null
if (data !== null) {
this._fields = { ...this.fields, ...data }
this.updateCustomer()
}
this.exchange_rate = this.currency ? this.currency.exchange_rate : 1
const account_id = JSON.parse(localStorage.getItem('appState')).user.account_id
const user_account = JSON.parse(localStorage.getItem('appState')).accounts.filter(account => account.account_id === parseInt(account_id))
this.account = user_account[0]
}
get currency () {
const currency_id = this.customer.length && this.customer.currency_id.toString().length ? this.customer.currency_id : this.settings.currency_id
if (!currency_id) {
return null
}
return JSON.parse(localStorage.getItem('currencies')).filter(currency => currency.id === parseInt(currency_id))[0]
}
get isNew () {
return !this.fields.id || !this.fields.id.toString().length || parseInt(this.fields.id) <= 0
}
get fields () {
return this._fields
}
get exchange_rate () {
return this.fields.exchange_rate
}
set exchange_rate (exchange_rate) {
this.fields.exchange_rate = exchange_rate
}
get customer () {
return this._customer || []
}
set customer (customer) {
this._customer = customer
}
get isViewed () {
return parseInt(this.fields.status_id) === this.sent && this.fields.viewed === true
}
get isApproved () {
return parseInt(this.fields.status_id) === this.approved
}
get isReversed () {
return parseInt(this.fields.status_id) === this.reversed
}
get isCancelled () {
return parseInt(this.fields.status_id) === this.cancelled
}
get isPaid () {
return parseInt(this.fields.status_id) === this.paid
}
get isDraft () {
return parseInt(this.fields.status_id) === consts.invoice_status_draft
}
get isSent () {
return parseInt(this.fields.status_id) === this.sent
}
get isPartial () {
return parseInt(this.fields.status_id) === this.partial
}
get isDeleted () {
return this.fields.deleted_at && this.fields.deleted_at.length > 0
}
get isEditable () {
return !this.isReversed && !this.isCancelled && !this.isDeleted
}
get id () {
return this.fields.id
}
get fileCount () {
return this._file_count || 0
}
set fileCount (files) {
this._file_count = files ? files.length : 0
}
get invitations () {
return this.fields.invitations
}
get invitation_link () {
return `http://${this.account.account.subdomain}portal/invoices/$key`
}
get getInvitationViewLink () {
return !this.invitations || !this.invitations.length ? '' : `http://${this.account.account.subdomain}portal/view/invoice/${this.invitations[0].key}`
}
get customer_id () {
return this.fields.customer_id
}
set customer_id (customer_id) {
this.fields.customer_id = customer_id
this.updateCustomer()
}
get contacts () {
const index = this.customers.findIndex(customer => customer.id === this.fields.customer_id)
const customer = this.customers[index]
return customer.contacts ? customer.contacts : []
}
get url () {
return this._url
}
updateCustomer () {
if (this.customers.length && this._fields.customer_id) {
const customer = this.customers.filter(customer => customer.id === parseInt(this._fields.customer_id))
this.customer = customer[0]
}
}
buildInvitations (contact, add = false) {
const invitations = this.fields.invitations
// check if the check box is checked or unchecked
if (add) {
// add the numerical value of the checkbox to options array
invitations.push({ contact_id: contact })
} else {
// or remove the value from the unchecked checkbox from the array
const index = invitations.findIndex(contact => contact.contact_id === contact)
invitations.splice(index, 1)
}
return invitations
}
buildDropdownMenu () {
const actions = []
if (this.fields.invitations.length) {
actions.push('pdf')
}
actions.push('dispatch_note')
if (this.fields.customer_id !== '') {
actions.push('email')
}
if (!this.isPaid) {
actions.push('newPayment')
}
if (!this.isSent && this.isEditable) {
actions.push('markSent')
}
if (this.isCancelled || this.isReversed) {
actions.push('reverse_status')
}
if ((this.isSent || this.isPartial) && !this.isPaid && this.isEditable) {
actions.push('markPaid')
}
if (!this.fields.is_deleted) {
actions.push('delete')
}
if (!this.fields.deleted_at) {
actions.push('archive')
}
if (!this.fields.deleted_at && this.isSent && !this.isCancelled) {
actions.push('cancel')
}
if (!this.fields.deleted_at && !this.isDraft && !this.isCancelled) {
actions.push('portal')
}
if (!this.fields.deleted_at && (this.isSent || this.isPaid) && !this.isReversed) {
actions.push('reverse')
}
if (this.fields.task_id && this.fields.task_id !== '' && this.isEditable) {
actions.push('getProducts')
}
if (this.isEditable) {
actions.push('cloneToInvoice')
}
if (this.isModuleEnabled('quotes') && this.isEditable) {
actions.push('cloneInvoiceToQuote')
}
if (this.isModuleEnabled('credits') && this.isEditable) {
actions.push('cloneToCredit')
}
if (!this.fields.recurring_invoice_id && this.isModuleEnabled('recurringInvoices')) {
actions.push('cloneToRecurringInvoice')
}
return actions
}
addItem () {
// const newArray = this.fields.line_items.slice()
this.fields.line_items.push(LineItem)
return this.fields.line_items
}
removeItem (index) {
const array = [...this.fields.line_items] // make a separate copy of the array
array.splice(index, 1)
this.fields.line_items = array
return array
}
async completeAction (data, action) {
if (!this.fields.id) {
return false
}
this.errors = []
this.error_message = ''
try {
const res = await axios.post(`${this.url}/${this.fields.id}/${action}`, data)
if (res.status === 200) {
// test for status you want, etc
console.log(res.status)
}
// Don't forget to return something
return res.data
} catch (e) {
this.handleError(e)
return false
}
}
async update (data) {
if (!this.fields.id) {
return false
}
this.errors = []
this.error_message = ''
try {
const res = await axios.put(`${this.url}/${this.fields.id}`, data)
if (res.status === 200) {
// test for status you want, etc
console.log(res.status)
}
// Don't forget to return something
return res.data
} catch (e) {
this.handleError(e)
return false
}
}
isLate () {
const dueDate = moment(this._fields.due_date).format('YYYY-MM-DD HH::MM:SS')
const pending_statuses = [consts.invoice_status_draft, consts.invoice_status_sent, consts.invoice_status_partial]
return moment().isAfter(dueDate) && pending_statuses.includes(this._fields.status_id)
}
async save (data) {
if (this.fields.id) {
return this.update(data)
}
try {
this.errors = []
this.error_message = ''
const res = await axios.post(this.url, data)
if (res.status === 200) {
// test for status you want, etc
console.log(res.status)
}
// Don't forget to return something
return res.data
} catch (e) {
this.handleError(e)
return false
}
}
async loadPdf () {
try {
this.errors = []
this.error_message = ''
const res = await axios.post('api/preview', { entity: this.entity, entity_id: this._fields.id })
if (res.status === 200) {
// test for status you want, etc
console.log(res.status)
}
// Don't forget to return something
return this.buildPdf(res.data)
} catch (e) {
alert(e)
this.handleError(e)
return false
}
}
customerChange (customer_id) {
const index = this.customers.findIndex(customer => customer.id === parseInt(customer_id))
const customer = this.customers[index]
const address = customer.billing ? {
line1: customer.billing.address_1,
town: customer.billing.address_2,
county: customer.billing.city,
country: 'United Kingdom'
} : null
const contacts = customer.contacts ? customer.contacts : []
return {
customer: customer,
customerName: customer.name,
contacts: contacts,
address: address
}
}
calculateTaxes (usesInclusiveTaxes) {
let tax_total = 0
if (this.fields.tax_rate > 0) {
const a_total = parseFloat(this.fields.total)
const tax_percentage = parseFloat(a_total) * parseFloat(this.fields.tax_rate) / 100
tax_total += tax_percentage
}
if (this.fields.tax_2 && this.fields.tax_2 > 0) {
const a_total = parseFloat(this.fields.total)
const tax_percentage = parseFloat(a_total) * parseFloat(this.fields.tax_2) / 100
tax_total += tax_percentage
}
if (this.fields.tax_3 && this.fields.tax_3 > 0) {
const a_total = parseFloat(this.fields.total)
const tax_percentage = parseFloat(a_total) * parseFloat(this.fields.tax_3) / 100
tax_total += tax_percentage
}
this.fields.line_items.map((product) => {
const quantity = product.quantity === 0 ? 1 : product.quantity
let line_total = product.unit_price * quantity
let discount_total = 0
if (product.unit_discount > 0 && this.fields.discount === 0) {
const n = parseFloat(this.fields.total)
if (this.fields.is_amount_discount === true) {
discount_total += parseFloat(product.unit_discount)
} else {
const percentage = n * product.unit_discount / 100
discount_total += percentage
// lexieTotal -= discount_total
}
line_total -= discount_total
}
if (product.unit_tax > 0) {
const tax_percentage = line_total * product.unit_tax / 100
tax_total += tax_percentage
}
})
const precision = this.currency.precision || 2
return Math.round(tax_total, precision)
}
calculateTax (tax_amount) {
const a_total = parseFloat(this.fields.total)
const tax_percentage = parseFloat(a_total) * parseFloat(tax_amount) / 100
const precision = this.currency.precision || 2
return Math.round(tax_percentage, precision)
}
}
|
npm run build
rm -r -f "../devportalservice/public"
mkdir "../devportalservice/public"
cp -r "build/." "../devportalservice/public"
|
<filename>Arrays/Atividade 16.py
#Atividade 16: Modifique o programa 6.44 para ordenar a lista em ordem decrescente.
L = [ 1, 2, 3, 4, 5 ]
# deve ser ordenada como
# L = [ 5, 4, 3, 2, 1 ]
## Forma que eu faria
L.sort()
L.reverse()
print(L)
## Forma Modificando o 6-44.py
## FONT 6-44.py
L = [ 1, 2, 3, 4, 5 ]
fim = len(L)
while fim > 1:
trocou = False
x = 0
while x < (fim-1):
if L[x] < L[x+1]:
trocou = True
temp = L[x]
L[x] = L[x+1]
L[x+1] = temp
x += 1
if not trocou:
break
fim -= 1
for e in L:
print(e)
## FONT 6-44.py |
#!/bin/sh
# author: hoojo
# email: hoojo_@126.com
# github: https://github.com/hooj0
# create: 2018-10-29
# copyright by hoojo@2018
# @changelog Added text `sed` shell command example
# ====================================================================================
# sed —— 过滤和替换文本
# ====================================================================================
# 用于过滤和替换文本的流式编辑命令
# 名字 sed 是 stream editor(流编辑器)的简称。
# 它对文本流,即一系列指定的文件或标准输入进行编辑。
# ====================================================================================
# 参数说明:
# ------------------------------------------------------------------------------------
# -e <script>或 --expression=<script> 以选项中指定的script来处理输入的文本文件。
# -f <script文件> 或 --file=<script文件> 以选项中指定的script文件来处理输入的文本文件。
# -h或--help 显示帮助。
# -n或--quiet或--silent 仅显示script处理后的结果。
# -V或--version 显示版本信息。
# ====================================================================================
# 动作说明:
# ------------------------------------------------------------------------------------
# a :新增, a 的后面可以接字串,而这些字串会在新的一行出现(目前的下一行)~
# c :取代, c 的后面可以接字串,这些字串可以取代 n1,n2 之间的行!
# d :删除,因为是删除啊,所以 d 后面通常不接任何咚咚;
# i :插入, i 的后面可以接字串,而这些字串会在新的一行出现(目前的上一行);
# p :打印,亦即将某个选择的数据印出。通常 p 会与参数 sed -n 一起运行~
# s :取代,可以直接进行取代的工作哩!通常这个 s 的动作可以搭配正规表示法!例如 1,20s/old/new/g
# ====================================================================================
# sed 地址表示法
# ------------------------------------------------------------------------------------
# 地址 说明
# ------------------------------------------------------------------------------------
# n 行号,n 是一个正整数。
# $ 最后一行。
# /regexp/ 所有匹配一个 POSIX 基本正则表达式的文本行。注意正则表达式通过 斜杠字符界定。
# 选择性地,这个正则表达式可能由一个备用字符界定,通过\cregexpc 来 指定表达式,这里 c 就是一个备用的字符。
# addr1,addr2 从 addr1 到 addr2 范围内的文本行,包含地址 addr2 在内。地址可能是上述任意 单独的地址形式。
# first~step 匹配由数字 first 代表的文本行,然后随后的每个在 step 间隔处的文本行。
# 例如 1~2 是指每个位于偶数行号的文本行,5~5 则指第五行和之后每五行位置的文本行。
# addr1,+n 匹配地址 addr1 和随后的 n 个文本行。
# addr! 匹配所有的文本行,除了 addr 之外,addr 可能是上述任意的地址形式。
# ====================================================================================
# sed 基本编辑命令
# ------------------------------------------------------------------------------------
# 命令 说明
# ------------------------------------------------------------------------------------
# = 输出当前的行号。
# a 在当前行之后追加文本。
# d 删除当前行。
# i 在当前行之前插入文本。
# p 打印当前行。默认情况下,sed 程序打印每一行,并且只是编辑文件中匹配 指定地址的文本行。
# 通过指定-n 选项,这个默认的行为能够被忽略。
# q 退出 sed,不再处理更多的文本行。如果不指定-n 选项,输出当前行。
# Q 退出 sed,不再处理更多的文本行。
# s/regexp/replacement/ 只要找到一个 regexp 匹配项,就替换为 replacement 的内容。
# replacement 可能包括特殊字符 &,其等价于由 regexp 匹配的文本。
# 另外, replacement 可能包含序列 \1到 \9,其是 regexp 中相对应的子表达式的内容。
# 更多信息,查看 下面 back references 部分的讨论。
# 在 replacement 末尾的斜杠之后,可以指定一个 可选的标志,来修改 s 命令的行为。
# y/set1/set2 执行字符转写操作,通过把 set1 中的字符转变为相对应的 set2 中的字符。
# 注意不同于 tr 程序,sed 要求两个字符集合具有相同的长度。
# ------------------------------------------------------------------------------------
# ====================================================================================
# 示例:替换文本
# ====================================================================================
echo "front" | sed 's/front/back/'
# 保持一致的分隔符即可满足替换
echo "front" | sed 's_front_back_'
# ====================================================================================
# 示例:命令添加地址 1,就导致只对仅有一行文本的输入流的第一行执行替换操作
# ====================================================================================
echo "front" | sed '1s/front/back/'
# ====================================================================================
# 示例:把 example.txt 所有空格替换为连字符并输出
# ====================================================================================
sed 's/ /-/g' example.txt
# output:
#-------------------------------------------------------------------------------------
#Lorem-ipsum
#dolor-sit-amet,
#consetetur
#sadipscing-elitr,
#sed-diam-nonumy
#eirmod-tempor
#invidunt-ut-labore
#et-dolore-magna
#aliquyam-erat,-sed
#diam-voluptua.-At
#vero-eos-et
#accusam-et-justo
#duo-dolores-et-ea
#rebum.-Stet-clita
#kasd-gubergren,
#no-sea-takimata
#sanctus-est-Lorem
#ipsum-dolor-sit
#amet.
# ====================================================================================
# 示例:把所有a、b、c替换为 "_"
# ====================================================================================
sed 's/[a-c]/_/g' example.txt
# output:
#-------------------------------------------------------------------------------------
#Lorem ipsum
#dolor sit _met,
#_onsetetur
#s_dips_ing elitr,
#sed di_m nonumy
#eirmod tempor
#invidunt ut l__ore
#et dolore m_gn_
#_liquy_m er_t, sed
#di_m voluptu_. At
#vero eos et
#___us_m et justo
#duo dolores et e_
#re_um. Stet _lit_
#k_sd gu_ergren,
#no se_ t_kim_t_
#s_n_tus est Lorem
#ipsum dolor sit
#_met.
# ====================================================================================
# 示例:提取1-5行的文本。
# ====================================================================================
# 开始于第一行,直到第五行;使用 p 命令, 其就是简单地把匹配的文本行打印出来。
# 选项 -n(不自动打印选项), 让 sed 不要默认地打印每一行。
sed -n '1,5p' distros.txt
# ====================================================================================
# 示例:排除 SUSE 关键字的记录
# ====================================================================================
sed -n '/SUSE/!p' distros.txt
# ====================================================================================
# 示例:修改日期格式
# ====================================================================================
# ([0-9]{2})/([0-9]{2})/([0-9]{4})$ 匹配年月日
# /\3-\1-\2/ 此表达式给出了年份,一个短划线,月份,一个短划线,和某天。
sed 's/\([0-9]\{2\}\)\/\([0-9]\{2\}\)\/\([0-9]\{4\}\)$/\3-\1-\2/' distros.txt
# output:
#-------------------------------------------------------------------------------------
# SUSE 10.2 2006-12-07
# Fedora 10 2008-11-25
# ====================================================================================
# 示例:s 命令的另一个功能是使用可选标志,其跟随替代字符串。可选标志是 g 标志,
# 其 指示 sed 对某个文本行全范围地执行查找和替代操作,这是默认行为
# ====================================================================================
# 默认 全局替换
echo "aaabbbccc" | sed 's/b/B/'
# g 全局替换
echo "aaabbbccc" | sed 's/b/B/g'
# ====================================================================================
# 示例:生成表格
# ====================================================================================
sort -k 1,1 -k 2n distros.txt | sed -f distros-tbl.sed | groff -t > ~/Desktop/foo.ps
# ====================================================================================
# 示例:在文件的第四行后添加一行,并将结果输出到标准输出
# ====================================================================================
# 新增一行
sed -e 4a\newLine tmp.txt
# ====================================================================================
# 示例:以行为单位的新增/删除
# ====================================================================================
# 将 /etc/passwd 的内容列出并且列印行号,同时将第 2~5 行删除
# d 是删除
nl /etc/passwd | sed '2,5d'
# 删除第 2 行
nl /etc/passwd | sed '2d'
# 删除第 3 到最后一行
nl /etc/passwd | sed '3,$d'
# 在第二行后(即在第三行)加 “drink tea” 字样
nl /etc/passwd | sed '2a drink tea'
# 在第二行前
nl /etc/passwd | sed '2i drink tea'
# ====================================================================================
# 示例:以行为单位的替换与显示
# ====================================================================================
# 将第 2-5 行的内容取代成为 "No 2-5 number"
nl /etc/passwd | sed '2,5c No 2-5 number'
# ====================================================================================
# 示例:提取指定区间内的内容
# ====================================================================================
# 仅列出 /etc/passwd 文件内的第 5-7 行
nl /etc/passwd | sed -n '5,7p'
# ====================================================================================
# 示例:数据的搜寻并显示
# ====================================================================================
# 搜索 /etc/passwd 有root关键字的行;如果root找到,除了输出所有行,还会输出匹配行
nl /etc/passwd | sed '/root/p'
# 使用-n的时候将只打印包含模板的行
nl /etc/passwd | sed -n '/root/p'
# ====================================================================================
# 示例:数据的搜寻并删除
# ====================================================================================
# 删除/etc/passwd 所有包含root的行,其他行输出
nl /etc/passwd | sed '/root/d'
# ====================================================================================
# 示例:数据的搜寻并执行命令
# ====================================================================================
# 搜索/etc/passwd 找到root对应的行,执行后面花括号中的一组命令,
# 每个命令之间用分号分隔,这里把bash替换为blueshell,再输出这行
# 最后的q是退出
nl /etc/passwd | sed -n '/root/{s/bash/blueshell/;p;q}'
# ====================================================================================
# 示例:数据的搜寻并替换
# ====================================================================================
# 除了整行的处理模式之外, sed 还可以用行为单位进行部分数据的搜寻并取代
sed 's/要被取代的字串/新的字串/g'
# 将 IP 前面的部分予以删除
/sbin/ifconfig eth0 | grep 'inet addr' | sed 's/^.*addr://g'
# 192.168.1.100 Bcast:192.168.1.255 Mask:255.255.255.0
# 将 IP 后面的部分予以删除
/sbin/ifconfig eth0 | grep 'inet addr' | sed 's/^.*addr://g' | sed 's/Bcast.*$//g'
# ====================================================================================
# 示例:多点编辑,-e 表示多点编辑
# ====================================================================================
# 删除/etc/passwd第三行到末尾的数据,并把bash替换为blueshell
nl /etc/passwd | sed -e '3,$d' -e 's/bash/blueshell/'
# ====================================================================================
# 示例:直接修改文件内容
# ====================================================================================
cat tmp.txt
# output:
#-------------------------------------------------------------------------------------
#runoob.
#google.
#taobao.
#facebook.
#zhihu-
#weibo-
# 利用 sed 将 tmp.txt 内每一行结尾若为 . 则换成 !
sed -i 's/\.$/\!/g' tmp.txt
cat tmp.txt
#runoob!
#google!
#taobao!
#facebook!
#zhihu-
#weibo-
# 利用 sed 直接在 tmp.txt 最后一行加入 # This is a test
sed -i '$a # This is a test' tmp.txt
|
<reponame>ceanver/sourceCode<filename>src/incl/Home/About.js
import React from "react"
import Title from "../Title"
import styles from "../../css/about.module.css"
// import selfie from "../../vtk/cecil.jpeg" Default way but done with queries fluid which is much faster.
import { useStaticQuery, graphql } from "gatsby"
import Img from "gatsby-image"
const getSelfie = graphql`
query selfie {
selfie: file(relativePath: { eq: "cecil.jpeg" }) {
childImageSharp {
fluid(maxWidth: 600) {
...GatsbyImageSharpFluid_tracedSVG
}
}
}
}
`
const About = () => {
const { selfie } = useStaticQuery(getSelfie)
return (
<section className={styles.about}>
<Title title="about" subtitle="me" />
<div className={styles.aboutCenter}>
<article className={styles.aboutImg}>
<div className={styles.imgContainer}>
{/* <img src={selfie} alt="about company" /> */}
<Img fluid={selfie.childImageSharp.fluid} alt="In Wall of China" />
</div>
</article>
<article className={styles.aboutInfo}>
<h4>Data Scientist</h4>
<p>
Hi, my name is Cecil. I'm a former Chemical Engineer, but now a
aspiring to become a data scientist. During my last few years of
engineering school, I started reading about and statistics and
implementing machine learning algorithms and decided I wanted to
make machine learning a part of my career.
</p>
<p>
Following my graduation, I promptly travelled to the UK, perfecting
my English skills whilst writing a research paper related to latent
heat storage in house heating applications. Since my return I have
been someone who spends most of their time coding and currently
learning with the Georgia Tech’s data analytics program. When I'm
not working on that, I'm generally working on something related to
python, react or training aerial straps.
</p>
<button type="button" className="btn-primary">
read more
</button>
</article>
</div>
</section>
)
}
export default About
|
#!/bin/bash
set -e
BAZEL_VERSION=5.0.0
export RBE_AUTOCONF_ROOT=$(bazel info workspace)
CONTAINER_TAG=$(git log -1 --pretty=format:"%H" "${RBE_AUTOCONF_ROOT}/build_container")
DOCKER_IMAGE="gcr.io/envoy-ci/${GCR_IMAGE_NAME}:${CONTAINER_TAG}"
if ! docker pull ${DOCKER_IMAGE}; then
echo "Image is not built, skip..."
exit 0
fi
# If we are committing changes, pull before modifying to ensure no conflicts
if [[ "true" == "${COMMIT_TOOLCHAINS}" ]]; then
git pull origin refs/heads/main --ff-only
fi
rm -rf "${RBE_AUTOCONF_ROOT}/toolchains/configs/${OS_FAMILY}"
mkdir -p "${RBE_AUTOCONF_ROOT}/toolchains/configs/${OS_FAMILY}"
case ${OS_FAMILY} in
linux)
TOOLCHAIN_LIST="clang clang_libcxx gcc"
BAZELRC_LATEST=${RBE_AUTOCONF_ROOT}/toolchains/linux.latest.bazelrc
;;
windows)
TOOLCHAIN_LIST="msvc-cl clang-cl"
BAZELRC_LATEST=${RBE_AUTOCONF_ROOT}/toolchains/windows.latest.bazelrc
;;
esac
BAZELRC_DEST=${RBE_AUTOCONF_ROOT}/toolchains/configs/${OS_FAMILY}/.latest.bazelrc
# Fetch external dependencies
bazel fetch :all
# Build utility for generating RBE config
RBE_CONFIG_GEN_DIR=$(bazel info output_base)/external/bazel_toolchains/cmd/rbe_configs_gen
(cd "${RBE_CONFIG_GEN_DIR}" && go build)
for TOOLCHAIN in ${TOOLCHAIN_LIST}; do
"${RBE_CONFIG_GEN_DIR}/rbe_configs_gen" -exec_os ${OS_FAMILY} -generate_java_configs=false -generate_cpp_configs -output_src_root "${RBE_AUTOCONF_ROOT}" -output_config_path toolchains/configs/${OS_FAMILY}/${TOOLCHAIN} -target_os ${OS_FAMILY} -bazel_version ${BAZEL_VERSION} -toolchain_container ${DOCKER_IMAGE} -cpp_env_json "${RBE_AUTOCONF_ROOT}/toolchains/${TOOLCHAIN}.env.json"
done
cp "${BAZELRC_LATEST}" "${BAZELRC_DEST}"
chmod -R 755 "${RBE_AUTOCONF_ROOT}/toolchains/configs/${OS_FAMILY}"
git add "${RBE_AUTOCONF_ROOT}/toolchains/configs/${OS_FAMILY}"
if [[ -z "$(git diff HEAD --name-only)" ]]; then
echo "No toolchain changes."
exit 0
fi
if [[ "true" == "${COMMIT_TOOLCHAINS}" ]]; then
COMMIT_MSG="Regenerate ${OS_FAMILY} toolchains from $(git rev-parse HEAD)
[skip ci]
$(git log --format=%B -n 1)"
git config user.name "envoy-build-tools(Azure Pipelines)"
git config user.email envoy-build-tools@users.noreply.github.com
git commit -m "${COMMIT_MSG}"
if [[ "${SOURCE_BRANCH}" =~ ^refs/heads/.* ]]; then
git push git@github.com:envoyproxy/envoy-build-tools.git "HEAD:${SOURCE_BRANCH}"
fi
fi
|
class MiniBatchSampler:
def __init__(self, name, q, lam, clusters):
self.name = name
self.q = q
self.lam = lam
self.clusters = clusters
def sample_mini_batch(self):
import random
# Randomly sample q clusters for the mini-batch
return random.sample(self.clusters, self.q)
def update_parameter(self, new_lam):
# Update the value of lam with the new value
self.lam = new_lam |
awk 'NF {sub(/\r/, ""); printf "%s\\n",$0;}' cert-name.pem |
#!/bin/bash ../../.port.sh
port=vim
version=8.1
workdir=vim81
useconfigure=true
configopts="--host=x86_64-k1om-linux --with-tlib=ncurses"
files="https://ftp.nluug.nl/pub/vim/unix/vim-8.1.tar.bz2 vim-8.1.tar.bz2 cbca219d11990d866976da309d7ce5b76be48b96"
depends="ncurses"
export vim_cv_toupper_broken="no"
export vim_cv_terminfo="yes"
export vim_cv_tgetent="zero"
export vim_cv_tty_group="wheel"
export vim_cv_tty_mode="0620"
export vim_cv_getcwd_broken="no"
export vim_cv_stat_ignores_slash="yes"
export vim_cv_memmove_handles_overlap="yes"
|
const openModal = document.querySelector('.cta');
const modal= document.querySelector('.anuncio');
const closeModal= document.querySelector('.anuncio_close');
openModal.addEventListener('click',(e)=>{
e.preventDefault();
modal.classList.add('anuncio--show');
});
closeModal.addEventListener('click',(e)=>{
e.preventDefault();
modal.classList.remove('anuncio--show');
}); |
# Define the number of task managers
AMT_WORKERS=3
# Content for flink-taskmanager-with-env.json
cat <<EOF >flink-taskmanager-with-env.json
{
"id": "flink-taskmanager",
"cmd": "start-taskmanager.sh",
"cpus": 1,
"mem": 1024,
"instances": $AMT_WORKERS,
"env": {
"FLINK_CONF_DIR": "/opt/flink/conf",
"FLINK_TM_HEAP": "1024m"
}
}
EOF
# Complete the script by adding the remaining commands for environment variable substitution and task manager deployment
echo 'Waiting for Flink jobmanager to start.'
sleep 30
echo 'Starting Flink taskmanagers'
for TASKMANAGER_NB in $(seq 1 $AMT_WORKERS)
do
export TASKMANAGER_NB=$TASKMANAGER_NB
envsubst < flink-taskmanager-with-env.json > flink-taskmanager-without-env-${TASKMANAGER_NB}.json
dcos marathon app add flink-taskmanager-without-env-${TASKMANAGER_NB}.json
done |
<gh_stars>1-10
package com.zutubi.android.ant;
import org.apache.tools.ant.BuildException;
import java.io.File;
import java.io.FileWriter;
/**
* Ant task to dump the version details into a json file. Suitable for use
* with cwac-updater: https://github.com/commonsguy/cwac-updater.
*/
public class JsonVersionTask extends AbstractManifestTask {
private File jsonfile;
private String updateurl;
/**
* Sets the path of the JSON output file.
*
* @param jsonfile path of the output file
*/
public void setJsonfile(final File jsonfile) {
this.jsonfile = jsonfile;
}
/**
* Sets the update URL to include in the JSON file.
*
* @param updateurl value to set updateURL to
*/
public void setUpdateurl(final String updateurl) {
this.updateurl = updateurl;
}
@Override
public void execute() throws BuildException {
if (jsonfile == null) {
throw new BuildException("jsonfile is required");
}
if (!Util.stringSet(updateurl)) {
throw new BuildException("updateurl is required");
}
final Manifest manifest = parseManifest();
try {
final FileWriter writer = new FileWriter(jsonfile);
try {
writer.write("{'versionCode':" + manifest.getVersionCode()
+ ", 'updateURL':'" + updateurl + "'}");
} finally {
writer.close();
}
} catch (final Exception e) {
throw new BuildException(e);
}
}
}
|
#!/bin/bash
#set -x
INFILE="$1"
OUTFILE="$2"
CIPHER="aes-256-cbc"
# explicitly set message digest becase defaults have changed
# openssl 1.0.x uses MD5 but openssl 1.1.x uses SHA256
# see https://github.com/fastlane/fastlane/issues/9542
MDSUM="sha256"
if [ -z "$INFILE" ] || [ -z "$AESPASS" ]; then
echo "usage: AESPASS=<secret> $(basename "$0") <file> [<file>.gz.enc | -]" 1>&2
echo "note: if <outfile> is not provided, input file is encrypted to <file>.gz.enc" 1>&2
exit 1
fi
if [ -z "$OUTFILE" ]; then
OUTFILE="$INFILE.gz.aes"
fi
export AESPASS
ENC_ARGS="$CIPHER -e -salt -base64 -pass env:AESPASS -md $MDSUM"
if [ "$INFILE" = '-' ]; then
# shellcheck disable=SC2086
gzip -c | openssl $ENC_ARGS
elif [ "$OUTFILE" = '-' ]; then
# shellcheck disable=SC2086
gzip -c < "$INFILE" | openssl $ENC_ARGS
else
TMPFILE="$(mktemp)"
function finish {
rm -rf "$TMPFILE"
}
trap finish EXIT
# shellcheck disable=SC2086
gzip -c < "$INFILE" | openssl $ENC_ARGS > "$TMPFILE" && \
cat < "$TMPFILE" > "$OUTFILE"
fi
exit $?
|
<filename>python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_pipeline.py
import re
import sys
import pytest
from dagster.api.snapshot_pipeline import sync_get_external_pipeline_subset_grpc
from dagster.core.errors import DagsterUserCodeProcessError
from dagster.core.host_representation.external_data import ExternalPipelineSubsetResult
from dagster.core.host_representation.handle import PipelineHandle
from dagster.utils.error import serializable_error_info_from_exc_info
from .utils import get_bar_repo_repository_location
def _test_pipeline_subset_grpc(pipeline_handle, api_client, solid_selection=None):
return sync_get_external_pipeline_subset_grpc(
api_client, pipeline_handle.get_external_origin(), solid_selection=solid_selection
)
def test_pipeline_snapshot_api_grpc():
with get_bar_repo_repository_location() as repository_location:
pipeline_handle = PipelineHandle(
"foo", repository_location.get_repository("bar_repo").handle
)
api_client = repository_location.client
external_pipeline_subset_result = _test_pipeline_subset_grpc(pipeline_handle, api_client)
assert isinstance(external_pipeline_subset_result, ExternalPipelineSubsetResult)
assert external_pipeline_subset_result.success == True
assert external_pipeline_subset_result.external_pipeline_data.name == "foo"
def test_pipeline_with_valid_subset_snapshot_api_grpc():
with get_bar_repo_repository_location() as repository_location:
pipeline_handle = PipelineHandle(
"foo", repository_location.get_repository("bar_repo").handle
)
api_client = repository_location.client
external_pipeline_subset_result = _test_pipeline_subset_grpc(
pipeline_handle, api_client, ["do_something"]
)
assert isinstance(external_pipeline_subset_result, ExternalPipelineSubsetResult)
assert external_pipeline_subset_result.success == True
assert external_pipeline_subset_result.external_pipeline_data.name == "foo"
def test_pipeline_with_invalid_subset_snapshot_api_grpc():
with get_bar_repo_repository_location() as repository_location:
pipeline_handle = PipelineHandle(
"foo", repository_location.get_repository("bar_repo").handle
)
api_client = repository_location.client
with pytest.raises(
DagsterUserCodeProcessError,
match="No qualified solids to execute found for solid_selection",
):
_test_pipeline_subset_grpc(pipeline_handle, api_client, ["invalid_solid"])
def test_pipeline_with_invalid_definition_snapshot_api_grpc():
with get_bar_repo_repository_location() as repository_location:
pipeline_handle = PipelineHandle(
"bar", repository_location.get_repository("bar_repo").handle
)
api_client = repository_location.client
try:
_test_pipeline_subset_grpc(pipeline_handle, api_client, ["fail_subset"])
except DagsterUserCodeProcessError:
error_info = serializable_error_info_from_exc_info(sys.exc_info())
assert re.match(
(
r".*DagsterInvalidSubsetError[\s\S]*"
r"The attempted subset \['fail_subset'\] for pipeline bar results in an invalid pipeline"
),
error_info.message,
)
assert re.match(
(
r".*DagsterInvalidDefinitionError[\s\S]*"
r'add a dagster_type_loader for the type "InputTypeWithoutHydration"'
),
error_info.cause.message,
)
|
python main.py --model_type=gpt2 --model_name_or_path gpt2 --shots 10 --task MWOZ_ACT --length 2
python main.py --model_type=gpt2 --model_name_or_path gpt2 --shots 20 --task MWOZ_ACT --length 2
python main.py --model_type=gpt2 --model_name_or_path gpt2 --shots 29 --task MWOZ_ACT --length 2
python main.py --model_type=gpt2-large --model_name_or_path gpt2-large --shots 10 --task MWOZ_ACT --length 2
python main.py --model_type=gpt2-large --model_name_or_path gpt2-large --shots 20 --task MWOZ_ACT --length 2
python main.py --model_type=gpt2-large --model_name_or_path gpt2-large --shots 29 --task MWOZ_ACT --length 2
python main.py --model_type=gpt2-xl --model_name_or_path gpt2-xl --shots 10 --task MWOZ_ACT --length 2
python main.py --model_type=gpt2-xl --model_name_or_path gpt2-xl --shots 20 --task MWOZ_ACT --length 2
python main.py --model_type=gpt2-xl --model_name_or_path gpt2-xl --shots 29 --task MWOZ_ACT --length 2
|
<filename>trace-agent/trace-agent-base/src/main/java/com/wpisen/trace/agent/common/logger/log4j2/Log4j2LoggerAdapter.java<gh_stars>1-10
package com.wpisen.trace.agent.common.logger.log4j2;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.core.Appender;
import org.apache.logging.log4j.core.Layout;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.appender.RollingFileAppender;
import org.apache.logging.log4j.core.appender.rolling.DefaultRolloverStrategy;
import org.apache.logging.log4j.core.appender.rolling.SizeBasedTriggeringPolicy;
import org.apache.logging.log4j.core.appender.rolling.TriggeringPolicy;
import org.apache.logging.log4j.core.config.AppenderRef;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.LoggerConfig;
import org.apache.logging.log4j.core.layout.PatternLayout;
import com.wpisen.trace.agent.common.logger.Logger;
import com.wpisen.trace.agent.common.logger.LoggerAdapter;
import java.io.File;
public class Log4j2LoggerAdapter implements LoggerAdapter {
private final LoggerContext ctx;
/**
* 初始化log2配置信息
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public Log4j2LoggerAdapter() {
String allLogName = "com.wpisen.trace.agent";
String allAppenderName = "allInfo";
String traceLogName = "com.wpisen.trace.agent.transfer.UploadServiceImpl";
String traceAppenderName="traceInfo";
ctx = (LoggerContext) LogManager.getContext(false);
final Configuration config = ctx.getConfiguration();
Layout layout = PatternLayout.createLayout("%d [%-5p][%t] %m (%C:%F:%L) %n", config, null, null, true, false, null, null);
TriggeringPolicy tp = SizeBasedTriggeringPolicy.createPolicy("200MB");
DefaultRolloverStrategy strategy = DefaultRolloverStrategy.createStrategy("20", null, null, null, config);
Appender allAppender = RollingFileAppender.createAppender("logs/trace/all_hawkeye.log", "logs/trace/" + "%d{yyyy-MM-dd}/all_hawkeye-%d{yyyy-MM-dd-HH}-%i.log", "true", allAppenderName, null, null, null, tp,
strategy, layout, null, null, null, null, config);
allAppender.start();
config.addAppender(allAppender);
Appender traceAppender=RollingFileAppender.createAppender("logs/trace/trace_hawkeye.log", "logs/trace/" + "%d{yyyy-MM-dd}/trace_hawkeye-%d{yyyy-MM-dd-HH}-%i.log", "true", traceAppenderName, null, null, null, tp,
strategy, layout, null, null, null, null, config);
traceAppender.start();
config.addAppender(traceAppender);
AppenderRef allRef = AppenderRef.createAppenderRef(allAppenderName, null, null);
AppenderRef traceRef = AppenderRef.createAppenderRef(traceAppenderName, null, null);
LoggerConfig allLoggerConfig = LoggerConfig.createLogger("false", Level.ALL, allAppenderName, "true", new AppenderRef[] {allRef}, null, config, null);
LoggerConfig traceLoggerConfig = LoggerConfig.createLogger("false", Level.ALL, traceAppenderName, "true", new AppenderRef[] {traceRef}, null, config, null);
allLoggerConfig.addAppender(allAppender, null, null);
traceLoggerConfig.addAppender(traceAppender, null, null);
config.addLogger(allLogName, allLoggerConfig);
config.addLogger(traceLogName, traceLoggerConfig);
ctx.updateLoggers();
ctx.getLogger(allLogName);
ctx.getLogger(traceLogName);
}
@Override
public Logger getLogger(Class<?> key) {
return new Log4j2Logger(ctx.getLogger(key.getName()));
}
@Override
public Logger getLogger(String key) {
return new Log4j2Logger(ctx.getLogger(key));
}
@Override
public void setLevel(com.wpisen.trace.agent.common.logger.Level level) {
// TODO Auto-generated method stub
}
@Override
public com.wpisen.trace.agent.common.logger.Level getLevel() {
// TODO Auto-generated method stub
return null;
}
@Override
public File getFile() {
// TODO Auto-generated method stub
return null;
}
@Override
public void setFile(File file) {
// TODO Auto-generated method stub
}
}
|
<reponame>anedyalkov/JS-Applications
function printDeckOfCards(cards) {
function makeCard(face, suit) {
const faces = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'J', 'Q', 'K', 'A'];
const suits = {
'S': '\u2660',
'H': '\u2665',
'D': '\u2666',
'C': '\u2663'
};
if (!faces.includes(face) || !suits.hasOwnProperty(suit)) {
let error = new Error('Invalid card!');
error.card = `${face}${suit}`;
throw error;
}
let card = {
face: face,
suit: suits[suit],
toString: function () {
return `${this.face}${this.suit}`;
}
};
return card
}
try {
let allCards = cards.map(x => {
x = x.split('');
let suit = x.pop();
let face = x.join('');
return makeCard(face, suit);
});
console.log(allCards.join(' ')); // join call toString(), and then join array
} catch (error) {
console.log(`Invalid card: ${error.card}`);
}
}
printDeckOfCards(['5S', '3D', 'QD', '1C']); |
<reponame>shin-kinoshita/dbflute-core
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.cbean.chelper;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.dbflute.cbean.ConditionBean;
import org.dbflute.cbean.ConditionQuery;
import org.dbflute.cbean.dream.SpecifiedColumn;
import org.dbflute.cbean.exception.ConditionBeanExceptionThrower;
import org.dbflute.cbean.sqlclause.SqlClause;
import org.dbflute.cbean.sqlclause.join.InnerJoinNoWaySpeaker;
import org.dbflute.cbean.sqlclause.query.QueryUsedAliasInfo;
import org.dbflute.dbmeta.DBMeta;
import org.dbflute.dbmeta.DBMetaProvider;
import org.dbflute.dbmeta.info.ColumnInfo;
import org.dbflute.system.DBFluteSystem;
/**
* @param <CQ> The type of condition-query.
* @author jflute
*/
public abstract class HpAbstractSpecification<CQ extends ConditionQuery> implements HpColumnSpHandler {
// ===================================================================================
// Attribute
// =========
protected final ConditionBean _baseCB;
protected final HpSpQyCall<CQ> _qyCall;
protected HpSpQyCall<CQ> _syncQyCall;
protected final HpCBPurpose _purpose;
protected final DBMetaProvider _dbmetaProvider;
protected final HpSDRFunctionFactory _sdrFuncFactory;
protected CQ _query; // lazy-loaded
protected boolean _alreadySpecifiedRequiredColumn; // also means specification existence
protected Map<String, SpecifiedColumn> _specifiedColumnMap; // saves specified columns (lazy-loaded)
protected boolean _alreadySpecifiedEveryColumn;
protected boolean _alreadySpecifiedExceptColumn;
// ===================================================================================
// Constructor
// ===========
/**
* @param baseCB The condition-bean of base level. (NotNull)
* @param qyCall The call-back for condition-query. (NotNull)
* @param purpose The purpose of condition-bean. (NotNull)
* @param dbmetaProvider The provider of DB meta. (NotNull)
* @param sdrFuncFactory The factory of (specify) derived-referrer function. (NotNull)
*/
protected HpAbstractSpecification(ConditionBean baseCB, HpSpQyCall<CQ> qyCall, HpCBPurpose purpose, DBMetaProvider dbmetaProvider,
HpSDRFunctionFactory sdrFuncFactory) {
_baseCB = baseCB;
_qyCall = qyCall;
_purpose = purpose;
_dbmetaProvider = dbmetaProvider;
_sdrFuncFactory = sdrFuncFactory;
}
// ===================================================================================
// DBMeta
// ======
@Override
public DBMeta asDBMeta() {
return _dbmetaProvider.provideDBMetaChecked(getTableDbName());
}
// ===================================================================================
// Column Specification
// ====================
public SpecifiedColumn xspecifyColumn(String columnName) { // for interface
return doColumn(columnName);
}
protected SpecifiedColumn doColumn(String columnName) { // for extended class
checkSpecifiedThemeColumnStatus(columnName);
if (isSpecifiedColumn(columnName)) {
// returns the same instance as the specified before
return getSpecifiedColumn(columnName);
}
assertColumn(columnName);
callQuery();
if (isRequiredColumnSpecificationEnabled()) {
_alreadySpecifiedRequiredColumn = true;
doSpecifyRequiredColumn();
}
final SqlClause sqlClause = _baseCB.getSqlClause();
final String tableAliasName;
if (_query.isBaseQuery()) {
tableAliasName = sqlClause.getBasePointAliasName();
} else {
final String relationPath = _query.xgetRelationPath();
final int nestLevel = _query.xgetNestLevel();
tableAliasName = sqlClause.resolveJoinAliasName(relationPath, nestLevel);
keepDreamCruiseJourneyLogBookIfNeeds(relationPath, tableAliasName);
reflectDreamCruiseWhereUsedToJoin(relationPath, tableAliasName);
}
final SpecifiedColumn specifiedColumn = createSpecifiedColumn(columnName, tableAliasName);
sqlClause.specifySelectColumn(specifiedColumn);
saveSpecifiedColumn(columnName, specifiedColumn);
return specifiedColumn;
}
protected void checkSpecifiedThemeColumnStatus(String columnName) {
// not check to avoid automatic specify-column exception after everyColumn() by jflute (2017/07/19)
// (and it's hard to judge automatic specify-column so simple comment-out, not important check)
//if (_alreadySpecifiedEveryColumn) {
// throwSpecifyColumnAlreadySpecifiedEveryColumnException(columnName);
//}
if (_alreadySpecifiedExceptColumn) {
throwSpecifyColumnAlreadySpecifiedExceptColumnException(columnName);
}
}
protected void callQuery() {
if (_query == null) {
_query = qyCall().qy();
}
}
/**
* Get the query call with sync. <br>
* This method is basically for SpecifyColumn.
* Don't set this (or call-back that uses this) to other objects.
* @return The instance of query call. (NotNull)
*/
protected HpSpQyCall<CQ> qyCall() { // basically for SpecifyColumn (NOT DerivedReferrer)
return _syncQyCall != null ? _syncQyCall : _qyCall;
}
protected boolean isRequiredColumnSpecificationEnabled() {
if (_alreadySpecifiedRequiredColumn) {
return false;
}
return isNormalUse(); // only normal purpose needs
}
protected abstract void doSpecifyRequiredColumn();
protected abstract String getTableDbName();
protected SpecifiedColumn createSpecifiedColumn(String columnName, String tableAliasName) {
final ColumnInfo columnInfo = asDBMeta().findColumnInfo(columnName);
return new SpecifiedColumn(tableAliasName, columnInfo, _baseCB);
}
// -----------------------------------------------------
// Specified Column Handling
// -------------------------
public SpecifiedColumn getSpecifiedColumn(String columnName) {
return _specifiedColumnMap != null ? _specifiedColumnMap.get(columnName) : null;
}
public boolean hasSpecifiedColumn() {
return _specifiedColumnMap != null && !_specifiedColumnMap.isEmpty();
}
public boolean isSpecifiedColumn(String columnName) {
return _specifiedColumnMap != null && _specifiedColumnMap.containsKey(columnName);
}
protected void saveSpecifiedColumn(String columnName, SpecifiedColumn specifiedColumn) {
if (_specifiedColumnMap == null) {
_specifiedColumnMap = new LinkedHashMap<String, SpecifiedColumn>();
}
_specifiedColumnMap.put(columnName, specifiedColumn);
}
// -----------------------------------------------------
// Dream Cruise
// ------------
protected void keepDreamCruiseJourneyLogBookIfNeeds(String relationPath, String tableAliasName) {
if (!_baseCB.xisDreamCruiseShip()) {
return;
}
_baseCB.xkeepDreamCruiseJourneyLogBook(relationPath);
}
protected void reflectDreamCruiseWhereUsedToJoin(String relationPath, String tableAliasName) {
if (!_baseCB.xisDreamCruiseShip()) {
return;
}
// to suppress CountLeastJoin of the relation
// the DreamCruise might be used in where clause (not correctly but safety logic)
final ConditionBean portCB = _baseCB.xgetDreamCruiseDeparturePort();
final QueryUsedAliasInfo usedAliasInfo = new QueryUsedAliasInfo(tableAliasName, new InnerJoinNoWaySpeaker() {
public boolean isNoWayInner() {
return true; // non fact of inner-join, because judge is so difficult when DreamCruise
}
});
portCB.getSqlClause().reflectWhereUsedToJoin(usedAliasInfo);
}
// ===================================================================================
// Theme Column
// ============
// -----------------------------------------------------
// Every Column
// ------------
protected void doEveryColumn() {
if (hasSpecifiedColumn()) {
throwSpecifyEveryColumnAlreadySpecifiedColumnException();
}
callQuery();
final boolean specifiedUpdateUse = isSpecifiedUpdateUse();
final List<ColumnInfo> columnInfoList = getColumnInfoList();
for (ColumnInfo columnInfo : columnInfoList) {
// primary key specification in BatchUpdate is not allowed
if (!(specifiedUpdateUse && columnInfo.isPrimary())) {
doColumn(columnInfo.getColumnDbName());
}
}
_alreadySpecifiedEveryColumn = true;
}
public boolean isSpecifiedEveryColumn() { // for e.g. UpdateOption's check
return _alreadySpecifiedEveryColumn;
}
// -----------------------------------------------------
// Except Column
// -------------
protected void doExceptRecordMetaColumn() {
if (hasSpecifiedColumn()) {
throwSpecifyExceptColumnAlreadySpecifiedColumnException();
}
callQuery();
final boolean specifiedUpdateUse = isSpecifiedUpdateUse();
final List<ColumnInfo> columnInfoList = getColumnInfoList();
for (ColumnInfo columnInfo : columnInfoList) {
// this specification in BatchUpdate is non-sense but just in case
if (!isRecordMetaColumn(columnInfo) && !(specifiedUpdateUse && columnInfo.isPrimary())) {
doColumn(columnInfo.getColumnDbName());
}
}
_alreadySpecifiedExceptColumn = true;
}
public boolean isSpecifiedExceptColumn() { // for e.g. UpdateOption's check
return _alreadySpecifiedExceptColumn;
}
protected boolean isRecordMetaColumn(ColumnInfo columnInfo) {
return columnInfo.isCommonColumn() || columnInfo.isOptimisticLock();
}
// -----------------------------------------------------
// Assist Helper
// -------------
protected List<ColumnInfo> getColumnInfoList() {
final String tableDbName = _query.asTableDbName();
final DBMeta dbmeta = _dbmetaProvider.provideDBMeta(tableDbName);
return dbmeta.getColumnInfoList();
}
protected boolean isSpecifiedUpdateUse() {
return HpCBPurpose.SPECIFIED_UPDATE.equals(_purpose);
}
// ===================================================================================
// Purpose Assert
// ==============
protected void assertColumn(String columnName) {
if (_purpose.isNoSpecifyColumnTwoOrMore()) {
if (_specifiedColumnMap != null && _specifiedColumnMap.size() > 0) {
throwSpecifyColumnTwoOrMoreColumnException(columnName);
}
// no specification is checked at an other timing
}
if (_purpose.isNoSpecifyColumnWithDerivedReferrer()) {
if (hasDerivedReferrer()) {
throwSpecifyColumnWithDerivedReferrerException(columnName, null);
}
}
if (isNormalUse()) { // only normal purpose needs
if (_query == null && !qyCall().has()) { // setupSelect check!
throwSpecifyColumnNotSetupSelectColumnException(columnName);
}
}
}
protected void assertRelation(String relationName) {
if (_purpose.isNoSpecifyRelation()) {
throwSpecifyRelationIllegalPurposeException(relationName);
}
}
protected void assertDerived(String referrerName) {
if (_purpose.isNoSpecifyDerivedReferrer()) {
throwSpecifyDerivedReferrerIllegalPurposeException(referrerName);
}
if (_purpose.isNoSpecifyDerivedReferrerTwoOrMore()) {
if (hasDerivedReferrer()) {
throwSpecifyDerivedReferrerTwoOrMoreException(referrerName);
}
}
if (_purpose.isNoSpecifyColumnWithDerivedReferrer()) {
if (_specifiedColumnMap != null && _specifiedColumnMap.size() > 0) {
throwSpecifyColumnWithDerivedReferrerException(null, referrerName);
}
}
}
protected boolean isNormalUse() {
return HpCBPurpose.NORMAL_USE.equals(_purpose);
}
// ===================================================================================
// Determination
// =============
public boolean isAlreadySpecifiedRequiredColumn() {
return _alreadySpecifiedRequiredColumn;
}
protected boolean hasDerivedReferrer() {
return !_baseCB.getSqlClause().getSpecifiedDerivingAliasList().isEmpty();
}
// ===================================================================================
// QyCall Handling
// ===============
public HpSpQyCall<CQ> xsyncQyCall() { // synchronize Query(Relation)
return _syncQyCall;
}
public void xsetSyncQyCall(HpSpQyCall<CQ> qyCall) {
_syncQyCall = qyCall;
}
public boolean xhasSyncQyCall() {
return _syncQyCall != null;
}
protected <MYCQ extends ConditionQuery> HpSpQyCall<MYCQ> xcreateSpQyCall(HpSpQyHas<MYCQ> has, HpSpQyQy<MYCQ> qy) {
// might be called as relation
return new HpSpQyDelegatingCall<MYCQ>(has, qy);
}
// ===================================================================================
// Exception Throwing
// ==================
protected void throwSpecifyColumnTwoOrMoreColumnException(String columnName) {
createCBExThrower().throwSpecifyColumnTwoOrMoreColumnException(_purpose, _baseCB, columnName);
}
protected void throwSpecifyColumnNotSetupSelectColumnException(String columnName) {
createCBExThrower().throwSpecifyColumnNotSetupSelectColumnException(_baseCB, columnName);
}
protected void throwSpecifyColumnWithDerivedReferrerException(String columnName, String referrerName) {
createCBExThrower().throwSpecifyColumnWithDerivedReferrerException(_purpose, _baseCB, columnName, referrerName);
}
protected void throwSpecifyColumnAlreadySpecifiedEveryColumnException(String columnName) {
final String tableDbName = _baseCB.asTableDbName();
createCBExThrower().throwSpecifyColumnAlreadySpecifiedEveryColumnException(tableDbName, columnName);
}
protected void throwSpecifyColumnAlreadySpecifiedExceptColumnException(String columnName) {
final String tableDbName = _baseCB.asTableDbName();
createCBExThrower().throwSpecifyColumnAlreadySpecifiedExceptColumnException(tableDbName, columnName);
}
protected void throwSpecifyEveryColumnAlreadySpecifiedColumnException() {
final String tableDbName = _baseCB.asTableDbName();
createCBExThrower().throwSpecifyEveryColumnAlreadySpecifiedColumnException(tableDbName, _specifiedColumnMap);
}
protected void throwSpecifyExceptColumnAlreadySpecifiedColumnException() {
final String tableDbName = _baseCB.asTableDbName();
createCBExThrower().throwSpecifyExceptColumnAlreadySpecifiedColumnException(tableDbName, _specifiedColumnMap);
}
protected void throwSpecifyRelationIllegalPurposeException(String relationName) {
createCBExThrower().throwSpecifyRelationIllegalPurposeException(_purpose, _baseCB, relationName);
}
protected void throwSpecifyDerivedReferrerIllegalPurposeException(String referrerName) {
createCBExThrower().throwSpecifyDerivedReferrerIllegalPurposeException(_purpose, _baseCB, referrerName);
}
protected void throwSpecifyDerivedReferrerTwoOrMoreException(String referrerName) {
createCBExThrower().throwSpecifyDerivedReferrerTwoOrMoreException(_purpose, _baseCB, referrerName);
}
// ===================================================================================
// Derived Referrer
// ================
// creator for sub-class
@SuppressWarnings("unchecked")
protected <FUNC extends HpSDRFunction<REFERRER_CB, LOCAL_CQ>, REFERRER_CB extends ConditionBean, LOCAL_CQ extends ConditionQuery> FUNC cHSDRF(
ConditionBean baseCB, LOCAL_CQ localCQ, HpSDRSetupper<REFERRER_CB, LOCAL_CQ> querySetupper, DBMetaProvider dbmetaProvider) {
// might be database dependency so cast it
return (FUNC) newSDRFunction(baseCB, localCQ, querySetupper, dbmetaProvider, _sdrFuncFactory);
}
protected <REFERRER_CB extends ConditionBean, LOCAL_CQ extends ConditionQuery> HpSDRFunction<REFERRER_CB, LOCAL_CQ> newSDRFunction(
ConditionBean baseCB, LOCAL_CQ localCQ, HpSDRSetupper<REFERRER_CB, LOCAL_CQ> querySetupper, DBMetaProvider dbmetaProvider,
HpSDRFunctionFactory sdrOpFactory) {
return _sdrFuncFactory.create(baseCB, localCQ, querySetupper, dbmetaProvider);
}
public HpSDRFunctionFactory xgetSDRFnFc() { // to put to relation specification
return _sdrFuncFactory;
}
// ===================================================================================
// Exception Helper
// ================
protected ConditionBeanExceptionThrower createCBExThrower() {
return new ConditionBeanExceptionThrower();
}
// ===================================================================================
// General Helper
// ==============
protected String ln() {
return DBFluteSystem.ln();
}
} |
def process_image(input_image, color_window_width, color_level_center, viewer_size):
processed_image = []
for row in input_image:
processed_row = []
for pixel in row:
if pixel < (color_level_center - 0.5 * color_window_width):
processed_row.append(0)
elif pixel > (color_level_center + 0.5 * color_window_width):
processed_row.append(255)
else:
processed_row.append(int((pixel - (color_level_center - 0.5 * color_window_width)) / color_window_width * 255))
processed_image.append(processed_row)
return processed_image |
nvimConfigPath="$HOME/.config/nvim/"
uname=$(whoami)
# setup config nvim file
echo '##################################################'
echo '# Setup vim'
echo '##################################################'
sed -i -e "s/username/$uname/" ./vim/init.vim
mkdir -p $nvimConfigPath
# Install deinvim
curl https://raw.githubusercontent.com/Shougo/dein.vim/master/bin/installer.sh > installer.sh
sh ./installer.sh $HOME/.vim/dein
nvim '+call dein#install()' '+qall'
cp ./vim/init.vim $nvimConfigPath
|
# The path to where your personal configs will be kept
export LLOCAL_USER=$LLOCAL_DIR/user |
package com.globalcollect.gateway.sdk.java.gc.payment.definitions;
public class CashPaymentProduct1503SpecificInput {
private String returnUrl = null;
public String getReturnUrl() {
return returnUrl;
}
public void setReturnUrl(String value) {
this.returnUrl = value;
}
}
|
<filename>polyfills/Reflect/setPrototypeOf/polyfill.js
/* global CreateMethodProperty, Reflect, Type, */
// 26.1.13 Reflect.setPrototypeOf ( target, proto )
CreateMethodProperty(Reflect, 'setPrototypeOf', function setPrototypeOf(target, proto) {
// 1. If Type(target) is not Object, throw a TypeError exception.
if (Type(target) !== "object") {
throw new TypeError(Object.prototype.toString.call(target) + ' is not an Object');
}
// 2. If Type(proto) is not Object and proto is not null, throw a TypeError exception.
if (Type(proto) !== "object" && proto !== null) {
throw new TypeError(Object.prototype.toString.call(proto) + ' is not an Object or null');
}
if (target === proto) {
return false;
}
// 3. Return ? target.[[SetPrototypeOf]](proto).
try {
Object.setPrototypeOf(target, proto);
return Reflect.getPrototypeOf(target) === proto;
} catch(_) {
return false;
}
});
|
<reponame>despo/apply-for-teacher-training
require 'rails_helper'
RSpec.describe SupportInterface::ApplicationsTableComponent do
let(:application_form_apply_again) { create(:application_form, updated_at: 1.day.ago, phase: 'apply_2') }
let(:application_forms) { [application_form_apply_again] + create_list(:application_form, 3, updated_at: 1.day.ago) }
it 'renders the apply again text for the first application', recruitment_cycle: 2020 do
expect(render_result.text).to include('(2020, apply again)')
end
def render_result
render_inline(described_class.new(application_forms: application_forms))
end
end
|
#!/bin/sh
# WARNING: rm -rf will be called on this dir, check twice!!
# use absolute paths
source_html_dir='/home/jon/projects/code/vulntracker/python/vulntracker/source_html/'
source_dir='/home/jon/projects/code/vulntracker/python/vulntracker/'
# remove current html files
#find . -iname "*html"|xargs rm
if [ ! $source_html_dir ];
then
echo "Please define source_html_dir\n";
else
rm -rf ${source_html_dir}/*;
fi
if [ ! -d $source_html_dir ];
then
mkdir $source_html_dir;
fi;
# create new html files
for n in `find . -iname "*py"`; do python /usr/local/bin/lpy.py $n; done
# create correct dir structure in source_html_dir and move files
for html_file in `find . -iname "*.html"`;
do
mydir=`dirname $html_file`;
if [ $mydir == '.' ];
then
mydir='';
else
mydir=`echo $mydir | sed 's#\./##'`;
fi
mynewdir=${source_html_dir}/$mydir;
if [ ! -d $mynewdir ];
then
mkdir -p $mynewdir;
fi
# apparently apache is set up on dreamhost to process .py files even if they have .html extension
# only mv if *py.html, otherwise cp (template file)
myfile=`basename $html_file`;
if test `echo $myfile|grep "\.py\.html"`;
then
myfile=`echo $myfile | sed 's#\.py#_py#'`;
mv $html_file ${mynewdir}/$myfile;
else
cp $html_file $mynewdir;
fi
done
cp source_html.sh ${source_html_dir}/source_html.sh.html
sed -i ${source_html_dir}/source_html.sh.html -e 's#$#<br />#'
|
<reponame>fabianklonsdorf/ixhh<gh_stars>0
'use strict';
var _32 = {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 32 32',
width: 32,
height: 32,
},
content: [
{
elem: 'path',
attrs: {
d:
'M12 10H6.78A11 11 0 0 1 27 16h2A13 13 0 0 0 6 7.68V4H4v8h8zm8 12h5.22A11 11 0 0 1 5 16H3a13 13 0 0 0 23 8.32V28h2v-8h-8z',
},
},
],
name: 'renew',
size: 32,
};
module.exports = _32;
|
#!/bin/bash
set -e -o -x
id
SCRIPT_DIR="$( dirname "${BASH_SOURCE[0]}" )"
YOCTO_VERSION="4.19"
while getopts d:x:o:y: parameter_Option
do case "${parameter_Option}"
in
d) BUILD_DEVICE=${OPTARG};;
x) BUILD_EXTR_PAR=${OPTARG};;
o) BUILD_OS=${OPTARG};;
# YOCTO 4.19 + ACL 19.05, YOCTO 4.14 + ACL 19.02
y) YOCTO_VERSION=${OPTARG};;
esac
done
export PATH=$PATH:/usr/local/gradle/bin
if [ $BUILD_OS = "android" ]; then
pushd /onnxruntime_src
mkdir build-android && cd build-android
if [ $BUILD_DEVICE = "nnapi" ]; then
cmake -DCMAKE_TOOLCHAIN_FILE=/android-ndk/build/cmake/android.toolchain.cmake -DANDROID_ABI=arm64-v8a -DONNX_CUSTOM_PROTOC_EXECUTABLE=/usr/bin/protoc -Donnxruntime_USE_NNAPI=ON ../cmake
else
cmake -DCMAKE_TOOLCHAIN_FILE=/android-ndk/build/cmake/android.toolchain.cmake -DANDROID_ABI=arm64-v8a -DONNX_CUSTOM_PROTOC_EXECUTABLE=/usr/bin/protoc ../cmake
fi
make -j$(nproc)
elif [ $BUILD_OS = "yocto" ]; then
YOCTO_FOLDER="4.19-warrior"
if [ $YOCTO_VERSION = "4.14" ]; then
YOCTO_FOLDER="4.14-sumo"
fi
pushd /onnxruntime_src
if [ ! -d build ]; then
mkdir build
fi
cd build
. /opt/fsl-imx-xwayland/$YOCTO_FOLDER/environment-setup-aarch64-poky-linux
alias cmake="/usr/bin/cmake -DCMAKE_TOOLCHAIN_FILE=$OECORE_NATIVE_SYSROOT/usr/share/cmake/OEToolchainConfig.cmake"
cmake ../cmake -Donnxruntime_RUN_ONNX_TESTS=OFF -Donnxruntime_GENERATE_TEST_REPORTS=ON -Donnxruntime_DEV_MODE=ON -DPYTHON_EXECUTABLE=/usr/bin/python3 -Donnxruntime_USE_CUDA=OFF -Donnxruntime_USE_NSYNC=OFF -Donnxruntime_CUDNN_HOME= -Donnxruntime_USE_JEMALLOC=OFF -Donnxruntime_ENABLE_PYTHON=OFF -Donnxruntime_BUILD_CSHARP=OFF -Donnxruntime_USE_EIGEN_FOR_BLAS=ON -Donnxruntime_USE_OPENBLAS=OFF -Donnxruntime_USE_ACL=ON -Donnxruntime_USE_MKLDNN=OFF -Donnxruntime_USE_MKLML=OFF -Donnxruntime_USE_OPENMP=ON -Donnxruntime_USE_TVM=OFF -Donnxruntime_USE_LLVM=OFF -Donnxruntime_ENABLE_MICROSOFT_INTERNAL=OFF -Donnxruntime_USE_BRAINSLICE=OFF -Donnxruntime_USE_NUPHAR=OFF -Donnxruntime_USE_EIGEN_THREADPOOL=OFF -Donnxruntime_BUILD_UNIT_TESTS=ON -DCMAKE_BUILD_TYPE=Release -DCMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES:PATH=/opt/fsl-imx-xwayland/$YOCTO_FOLDER/sysroots/aarch64-poky-linux/usr/include -DCMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES:PATH=/opt/fsl-imx-xwayland/$YOCTO_FOLDER/sysroots/aarch64-poky-linux/usr/include -DONNX_CUSTOM_PROTOC_EXECUTABLE=/usr/bin/protoc
make -j$(nproc)
else
COMMON_BUILD_ARGS="--skip_submodule_sync --enable_onnx_tests --parallel --build_shared_lib --cmake_path /usr/bin/cmake --ctest_path /usr/bin/ctest"
# For the nocontribops pipeline we don't need openmp as it is used by the Edge browser team and
# (going forward) the vscode team. Both these teams don't want their users to install any external dependency to use
# ORT.
if [[ $BUILD_EXTR_PAR != *--disable_contrib_ops* ]]; then
COMMON_BUILD_ARGS="${COMMON_BUILD_ARGS} --use_openmp "
fi
if [ $BUILD_OS = "manylinux2010" ]; then
# FindPython3 does not work on manylinux2010 image, define things manually
# ask python where to find includes
COMMON_BUILD_ARGS="${COMMON_BUILD_ARGS} --cmake_extra_defines PYTHON_INCLUDE_DIR=$(python3 -c 'import distutils.sysconfig; print(distutils.sysconfig.get_python_inc())')"
# Python does not provide a shared library on manylinux, use another library
COMMON_BUILD_ARGS="${COMMON_BUILD_ARGS} PYTHON_LIBRARY=/usr/lib64/librt.so"
fi
if [ $BUILD_DEVICE = "gpu" ]; then
if [ $BUILD_OS = "manylinux2010" ]; then
python3 $SCRIPT_DIR/../../build.py --build_dir /build \
--config Debug Release $COMMON_BUILD_ARGS \
--use_cuda \
--cuda_home /usr/local/cuda \
--cudnn_home /usr/local/cuda $BUILD_EXTR_PAR
else
_CUDNN_VERSION=$(echo $CUDNN_VERSION | cut -d. -f1-2)
python3 $SCRIPT_DIR/../../build.py --build_dir /build \
--config Debug Release $COMMON_BUILD_ARGS \
--use_cuda \
--cuda_home /usr/local/cuda \
--cudnn_home /usr/local/cudnn-$_CUDNN_VERSION/cuda $BUILD_EXTR_PAR
fi
elif [ $BUILD_DEVICE = "tensorrt" ]; then
_CUDNN_VERSION=$(echo $CUDNN_VERSION | cut -d. -f1-2)
python3 $SCRIPT_DIR/../../build.py --build_dir /build \
--config Release $COMMON_BUILD_ARGS \
--use_tensorrt --tensorrt_home /workspace/tensorrt \
--cuda_home /usr/local/cuda \
--cudnn_home /usr/local/cuda $BUILD_EXTR_PAR
else #cpu, ngraph and openvino
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
python3 $SCRIPT_DIR/../../build.py --build_dir /build \
--config Debug Release $COMMON_BUILD_ARGS $BUILD_EXTR_PAR
fi
fi
|
<filename>public/content.js
/* global chrome */
const evtToPage = chrome.runtime.id;
const evtFromPage = chrome.runtime.id + '-response';
chrome.runtime.onMessage.addListener((request, sender, sendResponse) => {
if (request.message == 'requestInfo') {
addEventListener(evtFromPage, (e) => {
sendResponse({
playerPai : JSON.parse(e.detail.playerPai),
gameState : JSON.parse(e.detail.gameState),
playerState : JSON.parse(e.detail.allPlayerState),
doraState : JSON.parse(e.detail.doraState)
})
}, {once: true});
dispatchEvent(new Event(evtToPage));
return true
}
});
const script = document.createElement('script');
script.src = chrome.runtime.getURL('page.js');
script.dataset.args = JSON.stringify({evtToPage, evtFromPage});
document.documentElement.appendChild(script); |
<gh_stars>0
package com.finitess.code.crypto;
import org.junit.jupiter.api.Test;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class SignatureServiceTest {
@Test
public void testSignatureService_positiveFlow() throws Exception {
SignatureService service = new SignatureService(2048, "RSA");
String message = "Hello signed world!!!";
byte[] signedMessage = service.sign(message);
boolean verificationOutcome = service.verify(message, signedMessage);
assertTrue(verificationOutcome);
}
@Test
public void whenVerifyingIncorrectMessage_shouldFailVerification() throws Exception {
SignatureService service = new SignatureService(2048, "RSA");
String message = "Hello signed world!!!";
String aString = Stream.generate(() -> "a").limit(256).collect(Collectors.joining());
boolean verificationOutcome = service.verify(message, aString.getBytes());
assertFalse(verificationOutcome);
}
}
|
import * as numjs from 'numjs';
import { IDNA } from "./ElementInterface";
import { mapping, magnitude } from '../utils/math';
type Vector = nj.NdArray;
export default class VectorDNA implements IDNA<Vector> {
genes: Vector[] = [];
constructor(public lifetime: number, public maxForce: number) {
for (let i = 0; i < lifetime; i++) {
const randomAngle = mapping(Math.random(), 0, 1, 0, Math.PI * 2);
const newVector: Vector = numjs.array([
Math.cos(randomAngle),
Math.sin(randomAngle)
]);
this.genes.push(newVector.multiply(maxForce));
}
}
crossover(partner: VectorDNA): VectorDNA {
const newDna = new VectorDNA(this.lifetime, this.maxForce);
for (let i = 0; i < newDna.genes.length; i++) {
newDna.genes[i] = Math.random() > 0.5 ? this.genes[i] : partner.genes[i];
}
return newDna;
}
mutate(mutationRate: number): void {
if (mutationRate < 0 || mutationRate > 1) {
throw new Error('Mutation rate must between 0 and 1');
}
for (let i = 0; i < this.genes.length; i++) {
if (Math.random() < mutationRate) {
const randomAngle = mapping(Math.random(), 0, 1, 0, Math.PI * 2);
const newVector: Vector = numjs.array([
Math.cos(randomAngle),
Math.sin(randomAngle)
]);
this.genes[i] = newVector.multiply(this.maxForce);
}
}
}
isIdenticalTo(partner: VectorDNA): boolean {
let result: number = 0;
this.genes.forEach((gene: Vector, i: number) => {
const distance = magnitude(gene.subtract(partner.genes[i]));
result += distance;
if (result > 0) {
return;
}
});
return result === 0;
}
} |
#!/bin/bash -e
#
# Copyright (c) 2018 SAP SE or an SAP affiliate company. All rights reserved. This file is licensed under the Apache Software License, v. 2 except as noted otherwise in the LICENSE file
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
function log() {
echo "[$(date -u)]: $*"
}
trap 'exit' TERM SIGINT
service_name="${SERVICE_NAME:-vpn-shoot}"
openvpn_port="${OPENVPN_PORT:-1194}"
tcp_keepalive_time="${TCP_KEEPALIVE_TIME:-7200}"
tcp_keepalive_intvl="${TCP_KEEPALIVE_INTVL:-75}"
tcp_keepalive_probes="${TCP_KEEPALIVE_PROBES:-9}"
tcp_retries2="${TCP_RETRIES2:-5}"
APISERVER_AUTH_MODE="${APISERVER_AUTH_MODE:-basic-auth}"
APISERVER_AUTH_MODE_BASIC_AUTH_CSV="${APISERVER_AUTH_MODE_BASIC_AUTH_CSV:-/srv/auth/basic_auth.csv}"
APISERVER_AUTH_MODE_BASIC_AUTH_USERNAME="${APISERVER_AUTH_MODE_BASIC_AUTH_USERNAME:-admin}"
APISERVER_AUTH_MODE_CLIENT_CERT_CA="${APISERVER_AUTH_MODE_CLIENT_CERT_CA:-/srv/secrets/vpn-seed/ca.crt}"
APISERVER_AUTH_MODE_CLIENT_CERT_CRT="${APISERVER_AUTH_MODE_CLIENT_CERT_CRT:-/srv/secrets/vpn-seed/tls.crt}"
APISERVER_AUTH_MODE_CLIENT_CERT_KEY="${APISERVER_AUTH_MODE_CLIENT_CERT_KEY:-/srv/secrets/vpn-seed/tls.key}"
function get_host() {
if [[ -z "$MAIN_VPN_SEED" ]]; then
echo "kube-apiserver"
else
echo "127.0.0.1"
fi
}
function identify_endpoint() {
log "trying to identify the endpoint (load balancer name of $service_name service) myself..."
curl_auth_flags=""
if [[ "$APISERVER_AUTH_MODE" == "basic-auth" ]]; then
curl_auth_flags="--insecure --user ${APISERVER_AUTH_MODE_BASIC_AUTH_USERNAME}:$(cat ${APISERVER_AUTH_MODE_BASIC_AUTH_CSV} | sed -E 's/^([^,]*),.*$/\1/')"
elif [[ "$APISERVER_AUTH_MODE" == "client-cert" ]]; then
curl_auth_flags="--cacert $APISERVER_AUTH_MODE_CLIENT_CERT_CA --cert $APISERVER_AUTH_MODE_CLIENT_CERT_CRT --key $APISERVER_AUTH_MODE_CLIENT_CERT_KEY"
fi
set +e
SERVICE_STATUS="$(curl \
--connect-timeout 5 \
--max-time 5 \
--silent \
$curl_auth_flags \
--header "Accept: application/json" \
--request GET \
"https://$(get_host)/api/v1/namespaces/kube-system/services/$service_name")"
ENDPOINTS="$(echo "$SERVICE_STATUS" | jq -r 'if (.status | type) == "object" and (.status.loadBalancer | type) == "object" and (.status.loadBalancer.ingress | type) == "array" and (.status.loadBalancer.ingress | length) > 0 then .status.loadBalancer.ingress | map(if(. | has("ip")) then .ip else .hostname end) | .[] else empty end')"
set -e
ENDPOINT=""
if [[ -z "$ENDPOINTS" || "$ENDPOINTS" == "null" ]]; then
log "error: could not identify any endpoints"
return
fi
log "found endpoints: [ $(echo $ENDPOINTS | tr "\n" " ")]"
for endpoint in $ENDPOINTS; do
log "checking whether port ${openvpn_port} is open on $endpoint ..."
if ! nc -z -v -w 3 "$endpoint" "${openvpn_port}" &> /dev/null; then
log "error: port ${openvpn_port} on $endpoint is not open, can not use it"
else
log "port ${openvpn_port} on $endpoint is open, using it"
ENDPOINT="$endpoint"
return
fi
done
}
function set_value() {
if [ -f $1 ] ; then
log "Setting $2 on $1"
echo "$2" > $1
fi
}
function configure_tcp() {
set_value /proc/sys/net/ipv4/tcp_keepalive_time $tcp_keepalive_time
set_value /proc/sys/net/ipv4/tcp_keepalive_intvl $tcp_keepalive_intvl
set_value /proc/sys/net/ipv4/tcp_keepalive_probes $tcp_keepalive_probes
set_value /proc/sys/net/ipv4/tcp_retries2 $tcp_retries2
}
configure_tcp
# for each cidr config, it looks first at its env var, then a local file (which may be a volume mount), then the default
baseConfigDir="/init-config"
fileServiceNetwork=
filePodNetwork=
fileNodeNetwork=
[ -e "${baseConfigDir}/serviceNetwork" ] && fileServiceNetwork=$(cat ${baseConfigDir}/serviceNetwork)
[ -e "${baseConfigDir}/podNetwork" ] && filePodNetwork=$(cat ${baseConfigDir}/podNetwork)
[ -e "${baseConfigDir}/nodeNetwork" ] && fileNodeNetwork=$(cat ${baseConfigDir}/nodeNetwork)
service_network="${SERVICE_NETWORK:-${fileServiceNetwork}}"
service_network="${service_network:-100.64.0.0/13}"
pod_network="${POD_NETWORK:-${filePodNetwork}}"
pod_network="${pod_network:-100.96.0.0/11}"
node_network="${NODE_NETWORK:-${fileNodeNetwork}}"
node_network="${node_network:-}"
# calculate netmask for given CIDR (required by openvpn)
CIDR2Netmask() {
local cidr="$1"
local ip=$(echo $cidr | cut -f1 -d/)
local numon=$(echo $cidr | cut -f2 -d/)
local numoff=$(( 32 - $numon ))
while [ "$numon" -ne "0" ]; do
start=1${start}
numon=$(( $numon - 1 ))
done
while [ "$numoff" -ne "0" ]; do
end=0${end}
numoff=$(( $numoff - 1 ))
done
local bitstring=$start$end
bitmask=$(echo "obase=16 ; $(( 2#$bitstring )) " | bc | sed 's/.\{2\}/& /g')
for t in $bitmask ; do
str=$str.$((16#$t))
done
echo $str | cut -f2- -d\.
}
service_network_address=$(echo $service_network | cut -f1 -d/)
service_network_netmask=$(CIDR2Netmask $service_network)
pod_network_address=$(echo $pod_network | cut -f1 -d/)
pod_network_netmask=$(CIDR2Netmask $pod_network)
sed -e "s/\${SERVICE_NETWORK_ADDRESS}/${service_network_address}/" \
-e "s/\${SERVICE_NETWORK_NETMASK}/${service_network_netmask}/" \
-e "s/\${POD_NETWORK_ADDRESS}/${pod_network_address}/" \
-e "s/\${POD_NETWORK_NETMASK}/${pod_network_netmask}/" \
openvpn.config.template > openvpn.config
if [[ ! -z "$node_network" ]]; then
for n in $(echo $node_network | sed 's/[][]//g' | sed 's/,/ /g')
do
node_network_address=$(echo $n | cut -f1 -d/)
node_network_netmask=$(CIDR2Netmask $n)
echo "pull-filter accept \"route ${node_network_address} ${node_network_netmask}\"" >> openvpn.config
done
fi
echo "pull-filter accept \"route 192.168.123.\"" >> openvpn.config
echo "pull-filter ignore \"route\"" >> openvpn.config
echo "pull-filter ignore redirect-gateway" >> openvpn.config
echo "pull-filter ignore route-ipv6" >> openvpn.config
echo "pull-filter ignore redirect-gateway-ipv6" >> openvpn.config
while : ; do
# identify_endpoint may get an invalid endpoint, need
# to make sure openvpn is able to pick up the correct
# one once it has been registered
identify_endpoint
if [[ ! -z $ENDPOINT ]]; then
openvpn --remote ${ENDPOINT} --port ${openvpn_port} --config openvpn.config
else
log "No tunnel endpoint found"
fi
sleep 5
done
|
<reponame>pradeep-gr/mbed-os5-onsemi
/*
* Copyright (c) 2016, ARM Limited, All Rights Reserved
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __UVISOR_API_RPC_H__
#define __UVISOR_API_RPC_H__
#include "api/inc/rpc_exports.h"
#include "api/inc/uvisor_exports.h"
#include <stdint.h>
#include <stddef.h>
/** Wait for incoming RPC.
*
* @param fn_ptr_array an array of RPC function targets that this call to
* `rpc_fncall_waitfor` should handle RPC to
* @param fn_count the number of function targets in this array
* @param box_id_caller[out] a memory location to store the box ID of the
* calling box (the source box of the RPC). This is
* set before the RPC is dispatched, so that the RPC
* target function can read from this location to
* determine the calling box ID. Optional.
* @param timeout_ms specifies how long to wait (in ms) for an incoming
* RPC message before returning
*/
UVISOR_EXTERN int rpc_fncall_waitfor(const TFN_Ptr fn_ptr_array[], size_t fn_count, int * box_id_caller, uint32_t timeout_ms);
/** Wait for an outgoing RPC to finish.
*
* Wait for the result of a previously started asynchronous RPC. After this
* call, ret will contain the return value of the RPC. The return value of this
* function may indicate that there was an error or a timeout with non-zero.
*
* @param result[in] The token to wait on for the result of an asynchronous RPC
* @param timeout_ms[in] How long to wait (in ms) for the asynchronous RPC
* message to finish before returning
* @param ret[out] The return value resulting from the finished RPC to
* the target function
* @returns Non-zero on error or timeout, zero on successful wait
*/
UVISOR_EXTERN int rpc_fncall_wait(uvisor_rpc_result_t result, uint32_t timeout_ms, uint32_t * ret);
#endif /* __UVISOR_API_RPC_H__ */
|
from searchtweets import load_credentials
load_credentials(filename="./search_tweets_creds_example.yaml",
yaml_key="search_tweets_v2_example",
env_overwrite=False)
from searchtweets import ResultStream, gen_request_parameters
if __name__ == "__main__":
query = gen_request_parameters("snow", results_per_call=100)
print(query) |
<filename>src/common/socket.js<gh_stars>1-10
import Redis from 'socket.io-redis'
import socketio from '@feathersjs/socketio'
import client from './redis'
const redisAdapter = Redis({
pubClient: client,
subClient: client,
})
function handler(io) {
// Initializes the Redis Adapter for Socket.io
io.adapter(redisAdapter)
io.on('connection', socket => {
// socket.emit('message', 'PING')
// socket.on('message', data => {})
})
io.use((socket, next) => {
socket.feathers.referrer = socket.request.referrer
next()
})
}
const config = {
wsEngine: 'uws',
}
export default socketio(config, handler)
|
#!/bin/sh
# file: simulate_vcs.sh
#
# (c) Copyright 2008 - 2011 Xilinx, Inc. All rights reserved.
#
# This file contains confidential and proprietary information
# of Xilinx, Inc. and is protected under U.S. and
# international copyright and other intellectual property
# laws.
#
# DISCLAIMER
# This disclaimer is not a license and does not grant any
# rights to the materials distributed herewith. Except as
# otherwise provided in a valid license issued to you by
# Xilinx, and to the maximum extent permitted by applicable
# law: (1) THESE MATERIALS ARE MADE AVAILABLE "AS IS" AND
# WITH ALL FAULTS, AND XILINX HEREBY DISCLAIMS ALL WARRANTIES
# AND CONDITIONS, EXPRESS, IMPLIED, OR STATUTORY, INCLUDING
# BUT NOT LIMITED TO WARRANTIES OF MERCHANTABILITY, NON-
# INFRINGEMENT, OR FITNESS FOR ANY PARTICULAR PURPOSE; and
# (2) Xilinx shall not be liable (whether in contract or tort,
# including negligence, or under any other theory of
# liability) for any loss or damage of any kind or nature
# related to, arising under or in connection with these
# materials, including for any direct, or any indirect,
# special, incidental, or consequential loss or damage
# (including loss of data, profits, goodwill, or any type of
# loss or damage suffered as a result of any action brought
# by a third party) even if such damage or loss was
# reasonably foreseeable or Xilinx had been advised of the
# possibility of the same.
#
# CRITICAL APPLICATIONS
# Xilinx products are not designed or intended to be fail-
# safe, or for use in any application requiring fail-safe
# performance, such as life-support or safety devices or
# systems, Class III medical devices, nuclear facilities,
# applications related to the deployment of airbags, or any
# other applications that could lead to death, personal
# injury, or severe property or environmental damage
# (individually and collectively, "Critical
# Applications"). Customer assumes the sole risk and
# liability of any use of Xilinx products in Critical
# Applications, subject only to applicable laws and
# regulations governing limitations on product liability.
#
# THIS COPYRIGHT NOTICE AND DISCLAIMER MUST BE RETAINED AS
# PART OF THIS FILE AT ALL TIMES.
#
# remove old files
rm -rf simv* csrc DVEfiles AN.DB
# compile all of the files
# Note that -sverilog is not strictly required- You can
# remove the -sverilog if you change the type of the
# localparam for the periods in the testbench file to
# [63:0] from time
vlogan -sverilog \
vga_clock_tb.v \
../../implement/results/routed.v
# prepare the simulation
vcs -sdf max:vga_clock_exdes:../../implement/results/routed.sdf +v2k -y $XILINX/verilog/src/simprims \
+libext+.v -debug vga_clock_tb.v ../../implement/results/routed.v
# run the simulation
./simv -ucli -i ucli_commands.key
# launch the viewer
#dve -vpd vcdplus.vpd -session vcs_session.tcl
|
DEFAULT_OS="Ubuntu_64"
DEFAULT_RAM="4096"
DEFAULT_CPUS="2"
DEFAULT_VRAM="128"
DEFAULT_DISK_SIZE="40960"
DEFAULT_SDISK_SIZE="102400"
DEFAULT_DISK_FORMAT="VDI"
DEFAULT_DISK_CONTROLLER="sata"
DEFAULT_CD_CONTROLLER_NAME="IDE"
function show_usage() {
echo >&2 "Usage:"
echo >&2 "$ [OS=osType \\]"
echo >&2 " [RAM=ram \\]"
echo >&2 " [CPUS=cpus \\]"
echo >&2 " [VRAM=vram \\]"
echo >&2 " [DISK_SIZE=diskSize \\]"
echo >&2 " [DISK_FORMAT=diskFormat \\]"
echo >&2 " [SDISK_SIZE=secondaryDiskSize \\]"
echo >&2 " [SDISK_NAME=secondaryDiskName \\]"
echo >&2 " [DISK_FORMAT=diskFormat \\]"
echo >&2 " [ISO=pathToIsoFile \\]"
echo >&2 " [TARGET_FOLDER=pathToFolder \\]"
echo >&2 " $0 machineName"
exit;
}
function get_or_set() {
VALUE=$1
PARAM=$2
DEFAULT=$3
if [ -z "$VALUE" ];
then
[ -n "$DEBUG" ] && echo >&2 "Defaulting $PARAM to $DEFAULT"
echo $DEFAULT
else
echo $VALUE
fi
}
if ! [ -h /usr/bin/vboxmanage ];
then
echo "vboxmanage not found!";
exit;
else
DEFAULT__MACHINE_FOLDER=$(vboxmanage list systemproperties | grep "Default machine folder" | cut -d" " -f13)
fi
if [ $# != 1 ];
then
show_usage
fi
NAME=$1
TARGET_FOLDER=$(get_or_set "$TARGET_FOLDER" "TARGET_FOLDER" "$DEFAULT__MACHINE_FOLDER/$NAME")
OS=$(get_or_set "$OS" "OS" "$DEFAULT_OS")
RAM=$(get_or_set "$RAM" "RAM" $DEFAULT_RAM)
CPUS=$(get_or_set "$CPUS" "CPUS" "$DEFAULT_CPUS")
VRAM=$(get_or_set "$VRAM" "VRAM" "$DEFAULT_VRAM")
DISK_SIZE=$(get_or_set "$DISK_SIZE" "PDISK" "$DEFAULT_DISK_SIZE")
DISK_FORMAT=$(get_or_set "$DISK_FORMAT" "DISK_FORMAT" "$DEFAULT_DISK_FORMAT")
DISK_EXTENSION=$(echo $DISK_FORMAT | tr A-Z a-z)
DISK_PATH="$TARGET_FOLDER/$NAME.$DISK_EXTENSION"
if [ -n "$SDISK_NAME" ];
then
SDISK_SIZE=$(get_or_set "$SDISK_SIZE" "SDISK_SIZE" "$DEFAULT_SDISK_SIZE")
SDISK_PATH="$TARGET_FOLDER/$SDISK_NAME.$DISK_EXTENSION"
fi
DISK_CONTROLLER=$(get_or_set "$DISK_CONTROLLER" "DISK_CONTROLLER" "$DEFAULT_DISK_CONTROLLER")
DISK_CONTROLLER_NAME=$(echo $DISK_CONTROLLER | tr a-z A-Z)
echo -e "NAME:\t $NAME"
echo -e "FOLDER:\t$TARGET_FOLDER"
echo -e "OS:\t$OS"
echo -e "RAM:\t$RAM"
echo -e "CPUS:\t$CPUS"
echo -e "VRAM:\t$VRAM"
echo -e "DCTRL:\t$DISK_CONTROLLER,$DISK_CONTROLLER_NAME"
echo -e "DISK:\t$DISK_SIZE MB\t$DISK_FORMAT\t$DISK_PATH"
if [ -n "$SDISK_PATH" ];
then
echo -e "2DISK:\t$SDISK_SIZE MB\t$DISK_FORMAT\t$SDISK_PATH"
fi
if [ -n "$ISO" ];
then
echo -e "ISO:\t$ISO"
fi
echo ""
echo -n "Are these correct (Y/n)? "
read response
if [ "$response" = "n" ] || [ "$response" = "N" ];
then
exit
fi
echo "Creating VM..."
/usr/bin/vboxmanage createvm --name "$NAME" --ostype $OS --register
/usr/bin/vboxmanage modifyvm "$NAME" --cpus $CPUS --memory $RAM --vram $VRAM
echo "Creating CD Controller..."
/usr/bin/vboxmanage storagectl "$NAME" --name "$DEFAULT_CD_CONTROLLER_NAME" --add ide --controller PIIX4
echo "Creating disk controller..."
/usr/bin/vboxmanage storagectl "$NAME" --name "$DISK_CONTROLLER_NAME" --add "$DISK_CONTROLLER" --controller IntelAhci
echo "Creating primary disk..."
/usr/bin/vboxmanage createmedium disk --filename "$DISK_PATH" --size $DISK_SIZE --format "$DISK_FORMAT"
/usr/bin/vboxmanage storageattach "$NAME" --storagectl "$DISK_CONTROLLER_NAME" --port 0 --device 0 --type hdd --medium "$DISK_PATH"
if [ -n "$SDISK_PATH" ];
then
echo "Creating secondary disk..."
SDISK_PATH="$TARGET_FOLDER/$SDISK_NAME.$DISK_EXTENSION"
/usr/bin/vboxmanage createmedium disk --filename "$SDISK_PATH" --size $SDISK_SIZE --format "$DISK_FORMAT"
/usr/bin/vboxmanage storageattach "$NAME" --storagectl "$DISK_CONTROLLER_NAME" --port 1 --device 0 --type hdd --medium "$SDISK_PATH"
fi
if [ -n "$ISO" ];
then
echo "Attaching ISO"
/usr/bin/vboxmanage storageattach "$NAME" --storagectl "$DEFAULT_CD_CONTROLLER_NAME" --port 0 --device 0 --type dvddrive --medium "$ISO"
fi
echo "Starting VM..."
/usr/bin/vboxmanage startvm "$NAME"
# ISO=/mnt/ext/downloads/OSs/ubuntu-18.04.1-desktop-amd64.iso SDISK_NAME=LFS ./createvm.sh TestThree
# ISO=/mnt/ext/downloads/OSs/en-gb_windows_10_multiple_editions_x64_dvd_6846903.iso ./createvm.sh Windows10
# vboxmanage clonevm TestSix --mode all --name TestClone |
<reponame>gujou/profiling
package jvm.launch;
import java.lang.management.ManagementFactory;
import java.lang.management.RuntimeMXBean;
public class JvmArgs {
public static int argNb = 0;
public static void printOutArgs() {
RuntimeMXBean runtimeMxBean = ManagementFactory.getRuntimeMXBean();
for (String arg : runtimeMxBean.getInputArguments()) {
System.out.println("JVM arg " + argNb++ + " : " + arg);
}
}
}
|
import java.util.HashMap;
import java.util.Map;
public class OddManOut {
public static int findOddManOut(int[] numbers) {
Map<Integer, Integer> countMap = new HashMap<>();
for (int num : numbers) {
countMap.put(num, countMap.getOrDefault(num, 0) + 1);
}
int oddManOut = 0;
int minOddManOut = Integer.MAX_VALUE;
for (Map.Entry<Integer, Integer> entry : countMap.entrySet()) {
if (entry.getValue() % 2 != 0 && entry.getKey() < minOddManOut) {
minOddManOut = entry.getKey();
oddManOut = entry.getValue();
}
}
return minOddManOut;
}
} |
<reponame>abhinandangithub/attainu
const express = require("express");
const { body } = require("express-validator");
const userController = require("../controllers/userController");
const auth = require("../middleware/auth");
const router = express.Router();
router.get("/restaurants", userController.getRestaurants);
router.get("/user", userController.getLoggedInUser);
module.exports = router;
|
<filename>system-consumer/src/main/java/cn/zhangjingyao/controller/system/LoginController.java
package cn.zhangjingyao.controller.system;
import cn.zhangjingyao.controller.base.BaseController;
import cn.zhangjingyao.entity.PageData;
import cn.zhangjingyao.security.service.CustomTokenServices;
import cn.zhangjingyao.util.toekn.FormTokenPool;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.oauth2.common.OAuth2AccessToken;
import org.springframework.security.oauth2.provider.OAuth2Authentication;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
/**
* @author
*/
@Controller
public class LoginController extends BaseController {
@Autowired
private CustomTokenServices customTokenServices;
/**
* 注销
*
* @return state:注销状态
*/
@RequestMapping(value = "/oauth/logout")
@ResponseBody
public String oauthLogout() {
OAuth2Authentication authentication = (OAuth2Authentication) SecurityContextHolder.getContext().getAuthentication();
OAuth2AccessToken accessToken = customTokenServices.getAccessToken(authentication);
customTokenServices.revokeToken(accessToken.getValue());
return this.jsonContent("success", "注销成功");
}
/**
* 获取表单Token
*
* @return formToken:表单Token
*/
@RequestMapping(value = "/formToken")
@ResponseBody
public String formToken() {
FormTokenPool formTokenPool = FormTokenPool.getInstance();
String formToken = formTokenPool.addToken();
PageData res = new PageData();
res.put("formToken", formToken);
return this.jsonContent("success", res);
}
}
|
/*
* Copyright 2016 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nz.co.testamation.common.time;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import org.joda.time.LocalDate;
import org.joda.time.LocalTime;
public class ClockImpl implements Clock {
private final DateTimeProvider dateTimeProvider;
private final String timeZoneSuffix;
public ClockImpl() {
this( new DateTimeProviderImpl(), "" );
}
public ClockImpl( String timeZoneSuffix ) {
this( new DateTimeProviderImpl(), timeZoneSuffix );
}
public ClockImpl( DateTimeProvider dateTimeProvider, String timeZoneSuffix ) {
this.dateTimeProvider = dateTimeProvider;
this.timeZoneSuffix = timeZoneSuffix;
}
public DateTime now() {
return dateTimeProvider.getDateTime();
}
@Override
public long currentTimestampMillis() {
return now().getMillis();
}
@Override
public LocalDate today() {
return now().toLocalDate();
}
public String getTimeZoneSuffix() {
return timeZoneSuffix;
}
public DateTime todayAt( LocalTime time ) {
return now().withFields( time );
}
public void waitFor( Duration duration ) {
dateTimeProvider.waitFor( duration, this );
}
}
|
package solutions
import (
"github.com/irvinlim/advent-of-code-2017-go/types"
"testing"
)
func TestDay03PartOne(t *testing.T) {
var tests = []types.TestCase{
{Input: "1", Expected: "0"},
{Input: "12", Expected: "3"},
{Input: "23", Expected: "2"},
{Input: "1024", Expected: "31"},
}
RunTestCases(t, Day03PartOne, tests)
RunInputFileTest(t, Day03PartOne, "input03", "371")
}
|
<reponame>addam128/PV204_area51<filename>src/protobuf/pwmanager.pb.h
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: pwmanager.proto
#ifndef GOOGLE_PROTOBUF_INCLUDED_pwmanager_2eproto
#define GOOGLE_PROTOBUF_INCLUDED_pwmanager_2eproto
#include <limits>
#include <string>
#include <google/protobuf/port_def.inc>
#if PROTOBUF_VERSION < 3015000
#error This file was generated by a newer version of protoc which is
#error incompatible with your Protocol Buffer headers. Please update
#error your headers.
#endif
#if 3015006 < PROTOBUF_MIN_PROTOC_VERSION
#error This file was generated by an older version of protoc which is
#error incompatible with your Protocol Buffer headers. Please
#error regenerate this file with a newer version of protoc.
#endif
#include <google/protobuf/port_undef.inc>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/arena.h>
#include <google/protobuf/arenastring.h>
#include <google/protobuf/generated_message_table_driven.h>
#include <google/protobuf/generated_message_util.h>
#include <google/protobuf/metadata_lite.h>
#include <google/protobuf/generated_message_reflection.h>
#include <google/protobuf/message.h>
#include <google/protobuf/repeated_field.h> // IWYU pragma: export
#include <google/protobuf/extension_set.h> // IWYU pragma: export
#include <google/protobuf/unknown_field_set.h>
// @@protoc_insertion_point(includes)
#include <google/protobuf/port_def.inc>
#define PROTOBUF_INTERNAL_EXPORT_pwmanager_2eproto
PROTOBUF_NAMESPACE_OPEN
namespace internal {
class AnyMetadata;
} // namespace internal
PROTOBUF_NAMESPACE_CLOSE
// Internal implementation detail -- do not use these members.
struct TableStruct_pwmanager_2eproto {
static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTableField entries[]
PROTOBUF_SECTION_VARIABLE(protodesc_cold);
static const ::PROTOBUF_NAMESPACE_ID::internal::AuxiliaryParseTableField aux[]
PROTOBUF_SECTION_VARIABLE(protodesc_cold);
static const ::PROTOBUF_NAMESPACE_ID::internal::ParseTable schema[2]
PROTOBUF_SECTION_VARIABLE(protodesc_cold);
static const ::PROTOBUF_NAMESPACE_ID::internal::FieldMetadata field_metadata[];
static const ::PROTOBUF_NAMESPACE_ID::internal::SerializationTable serialization_table[];
static const ::PROTOBUF_NAMESPACE_ID::uint32 offsets[];
};
extern const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_pwmanager_2eproto;
::PROTOBUF_NAMESPACE_ID::Metadata descriptor_table_pwmanager_2eproto_metadata_getter(int index);
class Entry;
struct EntryDefaultTypeInternal;
extern EntryDefaultTypeInternal _Entry_default_instance_;
class Wallet;
struct WalletDefaultTypeInternal;
extern WalletDefaultTypeInternal _Wallet_default_instance_;
PROTOBUF_NAMESPACE_OPEN
template<> ::Entry* Arena::CreateMaybeMessage<::Entry>(Arena*);
template<> ::Wallet* Arena::CreateMaybeMessage<::Wallet>(Arena*);
PROTOBUF_NAMESPACE_CLOSE
// ===================================================================
class Entry PROTOBUF_FINAL :
public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:Entry) */ {
public:
inline Entry() : Entry(nullptr) {}
virtual ~Entry();
explicit constexpr Entry(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
Entry(const Entry& from);
Entry(Entry&& from) noexcept
: Entry() {
*this = ::std::move(from);
}
inline Entry& operator=(const Entry& from) {
CopyFrom(from);
return *this;
}
inline Entry& operator=(Entry&& from) noexcept {
if (GetArena() == from.GetArena()) {
if (this != &from) InternalSwap(&from);
} else {
CopyFrom(from);
}
return *this;
}
inline const ::PROTOBUF_NAMESPACE_ID::UnknownFieldSet& unknown_fields() const {
return _internal_metadata_.unknown_fields<::PROTOBUF_NAMESPACE_ID::UnknownFieldSet>(::PROTOBUF_NAMESPACE_ID::UnknownFieldSet::default_instance);
}
inline ::PROTOBUF_NAMESPACE_ID::UnknownFieldSet* mutable_unknown_fields() {
return _internal_metadata_.mutable_unknown_fields<::PROTOBUF_NAMESPACE_ID::UnknownFieldSet>();
}
static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
return GetDescriptor();
}
static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
return GetMetadataStatic().descriptor;
}
static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
return GetMetadataStatic().reflection;
}
static const Entry& default_instance() {
return *internal_default_instance();
}
static inline const Entry* internal_default_instance() {
return reinterpret_cast<const Entry*>(
&_Entry_default_instance_);
}
static constexpr int kIndexInFileMessages =
0;
friend void swap(Entry& a, Entry& b) {
a.Swap(&b);
}
inline void Swap(Entry* other) {
if (other == this) return;
if (GetArena() == other->GetArena()) {
InternalSwap(other);
} else {
::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
}
}
void UnsafeArenaSwap(Entry* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArena() == other->GetArena());
InternalSwap(other);
}
// implements Message ----------------------------------------------
inline Entry* New() const final {
return CreateMaybeMessage<Entry>(nullptr);
}
Entry* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
return CreateMaybeMessage<Entry>(arena);
}
void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
void CopyFrom(const Entry& from);
void MergeFrom(const Entry& from);
PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
bool IsInitialized() const final;
size_t ByteSizeLong() const final;
const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
::PROTOBUF_NAMESPACE_ID::uint8* _InternalSerialize(
::PROTOBUF_NAMESPACE_ID::uint8* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
inline void SharedCtor();
inline void SharedDtor();
void SetCachedSize(int size) const final;
void InternalSwap(Entry* other);
friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
return "Entry";
}
protected:
explicit Entry(::PROTOBUF_NAMESPACE_ID::Arena* arena);
private:
static void ArenaDtor(void* object);
inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
public:
::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
private:
static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
return ::descriptor_table_pwmanager_2eproto_metadata_getter(kIndexInFileMessages);
}
public:
// nested types ----------------------------------------------------
// accessors -------------------------------------------------------
enum : int {
kServicenameFieldNumber = 1,
kUsernameFieldNumber = 2,
kPasswordFieldNumber = 3,
};
// required string servicename = 1;
bool has_servicename() const;
private:
bool _internal_has_servicename() const;
public:
void clear_servicename();
const std::string& servicename() const;
void set_servicename(const std::string& value);
void set_servicename(std::string&& value);
void set_servicename(const char* value);
void set_servicename(const char* value, size_t size);
std::string* mutable_servicename();
std::string* release_servicename();
void set_allocated_servicename(std::string* servicename);
private:
const std::string& _internal_servicename() const;
void _internal_set_servicename(const std::string& value);
std::string* _internal_mutable_servicename();
public:
// required string username = 2;
bool has_username() const;
private:
bool _internal_has_username() const;
public:
void clear_username();
const std::string& username() const;
void set_username(const std::string& value);
void set_username(std::string&& value);
void set_username(const char* value);
void set_username(const char* value, size_t size);
std::string* mutable_username();
std::string* release_username();
void set_allocated_username(std::string* username);
private:
const std::string& _internal_username() const;
void _internal_set_username(const std::string& value);
std::string* _internal_mutable_username();
public:
// required string password = 3;
bool has_password() const;
private:
bool _internal_has_password() const;
public:
void clear_password();
const std::string& password() const;
void set_password(const std::string& value);
void set_password(std::string&& value);
void set_password(const char* value);
void set_password(const char* value, size_t size);
std::string* mutable_password();
std::string* release_password();
void set_allocated_password(std::string* password);
private:
const std::string& _internal_password() const;
void _internal_set_password(const std::string& value);
std::string* _internal_mutable_password();
public:
// @@protoc_insertion_point(class_scope:Entry)
private:
class _Internal;
// helper for ByteSizeLong()
size_t RequiredFieldsByteSizeFallback() const;
template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
typedef void InternalArenaConstructable_;
typedef void DestructorSkippable_;
::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_;
mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr servicename_;
::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr username_;
::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr password_;
friend struct ::TableStruct_pwmanager_2eproto;
};
// -------------------------------------------------------------------
class Wallet PROTOBUF_FINAL :
public ::PROTOBUF_NAMESPACE_ID::Message /* @@protoc_insertion_point(class_definition:Wallet) */ {
public:
inline Wallet() : Wallet(nullptr) {}
virtual ~Wallet();
explicit constexpr Wallet(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
Wallet(const Wallet& from);
Wallet(Wallet&& from) noexcept
: Wallet() {
*this = ::std::move(from);
}
inline Wallet& operator=(const Wallet& from) {
CopyFrom(from);
return *this;
}
inline Wallet& operator=(Wallet&& from) noexcept {
if (GetArena() == from.GetArena()) {
if (this != &from) InternalSwap(&from);
} else {
CopyFrom(from);
}
return *this;
}
inline const ::PROTOBUF_NAMESPACE_ID::UnknownFieldSet& unknown_fields() const {
return _internal_metadata_.unknown_fields<::PROTOBUF_NAMESPACE_ID::UnknownFieldSet>(::PROTOBUF_NAMESPACE_ID::UnknownFieldSet::default_instance);
}
inline ::PROTOBUF_NAMESPACE_ID::UnknownFieldSet* mutable_unknown_fields() {
return _internal_metadata_.mutable_unknown_fields<::PROTOBUF_NAMESPACE_ID::UnknownFieldSet>();
}
static const ::PROTOBUF_NAMESPACE_ID::Descriptor* descriptor() {
return GetDescriptor();
}
static const ::PROTOBUF_NAMESPACE_ID::Descriptor* GetDescriptor() {
return GetMetadataStatic().descriptor;
}
static const ::PROTOBUF_NAMESPACE_ID::Reflection* GetReflection() {
return GetMetadataStatic().reflection;
}
static const Wallet& default_instance() {
return *internal_default_instance();
}
static inline const Wallet* internal_default_instance() {
return reinterpret_cast<const Wallet*>(
&_Wallet_default_instance_);
}
static constexpr int kIndexInFileMessages =
1;
friend void swap(Wallet& a, Wallet& b) {
a.Swap(&b);
}
inline void Swap(Wallet* other) {
if (other == this) return;
if (GetArena() == other->GetArena()) {
InternalSwap(other);
} else {
::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
}
}
void UnsafeArenaSwap(Wallet* other) {
if (other == this) return;
GOOGLE_DCHECK(GetArena() == other->GetArena());
InternalSwap(other);
}
// implements Message ----------------------------------------------
inline Wallet* New() const final {
return CreateMaybeMessage<Wallet>(nullptr);
}
Wallet* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
return CreateMaybeMessage<Wallet>(arena);
}
void CopyFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
void MergeFrom(const ::PROTOBUF_NAMESPACE_ID::Message& from) final;
void CopyFrom(const Wallet& from);
void MergeFrom(const Wallet& from);
PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
bool IsInitialized() const final;
size_t ByteSizeLong() const final;
const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
::PROTOBUF_NAMESPACE_ID::uint8* _InternalSerialize(
::PROTOBUF_NAMESPACE_ID::uint8* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
int GetCachedSize() const final { return _cached_size_.Get(); }
private:
inline void SharedCtor();
inline void SharedDtor();
void SetCachedSize(int size) const final;
void InternalSwap(Wallet* other);
friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
return "Wallet";
}
protected:
explicit Wallet(::PROTOBUF_NAMESPACE_ID::Arena* arena);
private:
static void ArenaDtor(void* object);
inline void RegisterArenaDtor(::PROTOBUF_NAMESPACE_ID::Arena* arena);
public:
::PROTOBUF_NAMESPACE_ID::Metadata GetMetadata() const final;
private:
static ::PROTOBUF_NAMESPACE_ID::Metadata GetMetadataStatic() {
return ::descriptor_table_pwmanager_2eproto_metadata_getter(kIndexInFileMessages);
}
public:
// nested types ----------------------------------------------------
// accessors -------------------------------------------------------
enum : int {
kEntriesFieldNumber = 3,
kMasterPasswordFieldNumber = 2,
kNumberOfEntriesFieldNumber = 1,
};
// repeated .Entry entries = 3;
int entries_size() const;
private:
int _internal_entries_size() const;
public:
void clear_entries();
::Entry* mutable_entries(int index);
::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::Entry >*
mutable_entries();
private:
const ::Entry& _internal_entries(int index) const;
::Entry* _internal_add_entries();
public:
const ::Entry& entries(int index) const;
::Entry* add_entries();
const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::Entry >&
entries() const;
// required string master_password = 2;
bool has_master_password() const;
private:
bool _internal_has_master_password() const;
public:
void clear_master_password();
const std::string& master_password() const;
void set_master_password(const std::string& value);
void set_master_password(std::string&& value);
void set_master_password(const char* value);
void set_master_password(const char* value, size_t size);
std::string* mutable_master_password();
std::string* release_master_password();
void set_allocated_master_password(std::string* master_password);
private:
const std::string& _internal_master_password() const;
void _internal_set_master_password(const std::string& value);
std::string* _internal_mutable_master_password();
public:
// required uint32 number_of_entries = 1;
bool has_number_of_entries() const;
private:
bool _internal_has_number_of_entries() const;
public:
void clear_number_of_entries();
::PROTOBUF_NAMESPACE_ID::uint32 number_of_entries() const;
void set_number_of_entries(::PROTOBUF_NAMESPACE_ID::uint32 value);
private:
::PROTOBUF_NAMESPACE_ID::uint32 _internal_number_of_entries() const;
void _internal_set_number_of_entries(::PROTOBUF_NAMESPACE_ID::uint32 value);
public:
// @@protoc_insertion_point(class_scope:Wallet)
private:
class _Internal;
// helper for ByteSizeLong()
size_t RequiredFieldsByteSizeFallback() const;
template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
typedef void InternalArenaConstructable_;
typedef void DestructorSkippable_;
::PROTOBUF_NAMESPACE_ID::internal::HasBits<1> _has_bits_;
mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::Entry > entries_;
::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr master_password_;
::PROTOBUF_NAMESPACE_ID::uint32 number_of_entries_;
friend struct ::TableStruct_pwmanager_2eproto;
};
// ===================================================================
// ===================================================================
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wstrict-aliasing"
#endif // __GNUC__
// Entry
// required string servicename = 1;
inline bool Entry::_internal_has_servicename() const {
bool value = (_has_bits_[0] & 0x00000001u) != 0;
return value;
}
inline bool Entry::has_servicename() const {
return _internal_has_servicename();
}
inline void Entry::clear_servicename() {
servicename_.ClearToEmpty();
_has_bits_[0] &= ~0x00000001u;
}
inline const std::string& Entry::servicename() const {
// @@protoc_insertion_point(field_get:Entry.servicename)
return _internal_servicename();
}
inline void Entry::set_servicename(const std::string& value) {
_internal_set_servicename(value);
// @@protoc_insertion_point(field_set:Entry.servicename)
}
inline std::string* Entry::mutable_servicename() {
// @@protoc_insertion_point(field_mutable:Entry.servicename)
return _internal_mutable_servicename();
}
inline const std::string& Entry::_internal_servicename() const {
return servicename_.Get();
}
inline void Entry::_internal_set_servicename(const std::string& value) {
_has_bits_[0] |= 0x00000001u;
servicename_.Set(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, value, GetArena());
}
inline void Entry::set_servicename(std::string&& value) {
_has_bits_[0] |= 0x00000001u;
servicename_.Set(
::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, ::std::move(value), GetArena());
// @@protoc_insertion_point(field_set_rvalue:Entry.servicename)
}
inline void Entry::set_servicename(const char* value) {
GOOGLE_DCHECK(value != nullptr);
_has_bits_[0] |= 0x00000001u;
servicename_.Set(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, ::std::string(value), GetArena());
// @@protoc_insertion_point(field_set_char:Entry.servicename)
}
inline void Entry::set_servicename(const char* value,
size_t size) {
_has_bits_[0] |= 0x00000001u;
servicename_.Set(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, ::std::string(
reinterpret_cast<const char*>(value), size), GetArena());
// @@protoc_insertion_point(field_set_pointer:Entry.servicename)
}
inline std::string* Entry::_internal_mutable_servicename() {
_has_bits_[0] |= 0x00000001u;
return servicename_.Mutable(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, GetArena());
}
inline std::string* Entry::release_servicename() {
// @@protoc_insertion_point(field_release:Entry.servicename)
if (!_internal_has_servicename()) {
return nullptr;
}
_has_bits_[0] &= ~0x00000001u;
return servicename_.ReleaseNonDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArena());
}
inline void Entry::set_allocated_servicename(std::string* servicename) {
if (servicename != nullptr) {
_has_bits_[0] |= 0x00000001u;
} else {
_has_bits_[0] &= ~0x00000001u;
}
servicename_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), servicename,
GetArena());
// @@protoc_insertion_point(field_set_allocated:Entry.servicename)
}
// required string username = 2;
inline bool Entry::_internal_has_username() const {
bool value = (_has_bits_[0] & 0x00000002u) != 0;
return value;
}
inline bool Entry::has_username() const {
return _internal_has_username();
}
inline void Entry::clear_username() {
username_.ClearToEmpty();
_has_bits_[0] &= ~0x00000002u;
}
inline const std::string& Entry::username() const {
// @@protoc_insertion_point(field_get:Entry.username)
return _internal_username();
}
inline void Entry::set_username(const std::string& value) {
_internal_set_username(value);
// @@protoc_insertion_point(field_set:Entry.username)
}
inline std::string* Entry::mutable_username() {
// @@protoc_insertion_point(field_mutable:Entry.username)
return _internal_mutable_username();
}
inline const std::string& Entry::_internal_username() const {
return username_.Get();
}
inline void Entry::_internal_set_username(const std::string& value) {
_has_bits_[0] |= 0x00000002u;
username_.Set(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, value, GetArena());
}
inline void Entry::set_username(std::string&& value) {
_has_bits_[0] |= 0x00000002u;
username_.Set(
::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, ::std::move(value), GetArena());
// @@protoc_insertion_point(field_set_rvalue:Entry.username)
}
inline void Entry::set_username(const char* value) {
GOOGLE_DCHECK(value != nullptr);
_has_bits_[0] |= 0x00000002u;
username_.Set(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, ::std::string(value), GetArena());
// @@protoc_insertion_point(field_set_char:Entry.username)
}
inline void Entry::set_username(const char* value,
size_t size) {
_has_bits_[0] |= 0x00000002u;
username_.Set(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, ::std::string(
reinterpret_cast<const char*>(value), size), GetArena());
// @@protoc_insertion_point(field_set_pointer:Entry.username)
}
inline std::string* Entry::_internal_mutable_username() {
_has_bits_[0] |= 0x00000002u;
return username_.Mutable(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, GetArena());
}
inline std::string* Entry::release_username() {
// @@protoc_insertion_point(field_release:Entry.username)
if (!_internal_has_username()) {
return nullptr;
}
_has_bits_[0] &= ~0x00000002u;
return username_.ReleaseNonDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArena());
}
inline void Entry::set_allocated_username(std::string* username) {
if (username != nullptr) {
_has_bits_[0] |= 0x00000002u;
} else {
_has_bits_[0] &= ~0x00000002u;
}
username_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), username,
GetArena());
// @@protoc_insertion_point(field_set_allocated:Entry.username)
}
// required string password = 3;
inline bool Entry::_internal_has_password() const {
bool value = (_has_bits_[0] & 0x00000004u) != 0;
return value;
}
inline bool Entry::has_password() const {
return _internal_has_password();
}
inline void Entry::clear_password() {
password_.ClearToEmpty();
_has_bits_[0] &= ~0x00000004u;
}
inline const std::string& Entry::password() const {
// @@protoc_insertion_point(field_get:Entry.password)
return _internal_password();
}
inline void Entry::set_password(const std::string& value) {
_internal_set_password(value);
// @@protoc_insertion_point(field_set:Entry.password)
}
inline std::string* Entry::mutable_password() {
// @@protoc_insertion_point(field_mutable:Entry.password)
return _internal_mutable_password();
}
inline const std::string& Entry::_internal_password() const {
return password_.Get();
}
inline void Entry::_internal_set_password(const std::string& value) {
_has_bits_[0] |= 0x00000004u;
password_.Set(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, value, GetArena());
}
inline void Entry::set_password(std::string&& value) {
_has_bits_[0] |= 0x00000004u;
password_.Set(
::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, ::std::move(value), GetArena());
// @@protoc_insertion_point(field_set_rvalue:Entry.password)
}
inline void Entry::set_password(const char* value) {
GOOGLE_DCHECK(value != nullptr);
_has_bits_[0] |= 0x00000004u;
password_.Set(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, ::std::string(value), GetArena());
// @@protoc_insertion_point(field_set_char:Entry.password)
}
inline void Entry::set_password(const char* value,
size_t size) {
_has_bits_[0] |= 0x00000004u;
password_.Set(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, ::std::string(
reinterpret_cast<const char*>(value), size), GetArena());
// @@protoc_insertion_point(field_set_pointer:Entry.password)
}
inline std::string* Entry::_internal_mutable_password() {
_has_bits_[0] |= 0x00000004u;
return password_.Mutable(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, GetArena());
}
inline std::string* Entry::release_password() {
// @@protoc_insertion_point(field_release:Entry.password)
if (!_internal_has_password()) {
return nullptr;
}
_has_bits_[0] &= ~0x00000004u;
return password_.ReleaseNonDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArena());
}
inline void Entry::set_allocated_password(std::string* password) {
if (password != nullptr) {
_has_bits_[0] |= 0x00000004u;
} else {
_has_bits_[0] &= ~0x00000004u;
}
password_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), password,
GetArena());
// @@protoc_insertion_point(field_set_allocated:Entry.password)
}
// -------------------------------------------------------------------
// Wallet
// required uint32 number_of_entries = 1;
inline bool Wallet::_internal_has_number_of_entries() const {
bool value = (_has_bits_[0] & 0x00000002u) != 0;
return value;
}
inline bool Wallet::has_number_of_entries() const {
return _internal_has_number_of_entries();
}
inline void Wallet::clear_number_of_entries() {
number_of_entries_ = 0u;
_has_bits_[0] &= ~0x00000002u;
}
inline ::PROTOBUF_NAMESPACE_ID::uint32 Wallet::_internal_number_of_entries() const {
return number_of_entries_;
}
inline ::PROTOBUF_NAMESPACE_ID::uint32 Wallet::number_of_entries() const {
// @@protoc_insertion_point(field_get:Wallet.number_of_entries)
return _internal_number_of_entries();
}
inline void Wallet::_internal_set_number_of_entries(::PROTOBUF_NAMESPACE_ID::uint32 value) {
_has_bits_[0] |= 0x00000002u;
number_of_entries_ = value;
}
inline void Wallet::set_number_of_entries(::PROTOBUF_NAMESPACE_ID::uint32 value) {
_internal_set_number_of_entries(value);
// @@protoc_insertion_point(field_set:Wallet.number_of_entries)
}
// required string master_password = 2;
inline bool Wallet::_internal_has_master_password() const {
bool value = (_has_bits_[0] & 0x00000001u) != 0;
return value;
}
inline bool Wallet::has_master_password() const {
return _internal_has_master_password();
}
inline void Wallet::clear_master_password() {
master_password_.ClearToEmpty();
_has_bits_[0] &= ~0x00000001u;
}
inline const std::string& Wallet::master_password() const {
// @@protoc_insertion_point(field_get:Wallet.master_password)
return _internal_master_password();
}
inline void Wallet::set_master_password(const std::string& value) {
_internal_set_master_password(value);
// @@protoc_insertion_point(field_set:Wallet.master_password)
}
inline std::string* Wallet::mutable_master_password() {
// @@protoc_insertion_point(field_mutable:Wallet.master_password)
return _internal_mutable_master_password();
}
inline const std::string& Wallet::_internal_master_password() const {
return master_password_.Get();
}
inline void Wallet::_internal_set_master_password(const std::string& value) {
_has_bits_[0] |= 0x00000001u;
master_password_.Set(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, value, GetArena());
}
inline void Wallet::set_master_password(std::string&& value) {
_has_bits_[0] |= 0x00000001u;
master_password_.Set(
::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, ::std::move(value), GetArena());
// @@protoc_insertion_point(field_set_rvalue:Wallet.master_password)
}
inline void Wallet::set_master_password(const char* value) {
GOOGLE_DCHECK(value != nullptr);
_has_bits_[0] |= 0x00000001u;
master_password_.Set(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, ::std::string(value), GetArena());
// @@protoc_insertion_point(field_set_char:Wallet.master_password)
}
inline void Wallet::set_master_password(const char* value,
size_t size) {
_has_bits_[0] |= 0x00000001u;
master_password_.Set(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, ::std::string(
reinterpret_cast<const char*>(value), size), GetArena());
// @@protoc_insertion_point(field_set_pointer:Wallet.master_password)
}
inline std::string* Wallet::_internal_mutable_master_password() {
_has_bits_[0] |= 0x00000001u;
return master_password_.Mutable(::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::EmptyDefault{}, GetArena());
}
inline std::string* Wallet::release_master_password() {
// @@protoc_insertion_point(field_release:Wallet.master_password)
if (!_internal_has_master_password()) {
return nullptr;
}
_has_bits_[0] &= ~0x00000001u;
return master_password_.ReleaseNonDefault(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), GetArena());
}
inline void Wallet::set_allocated_master_password(std::string* master_password) {
if (master_password != nullptr) {
_has_bits_[0] |= 0x00000001u;
} else {
_has_bits_[0] &= ~0x00000001u;
}
master_password_.SetAllocated(&::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited(), master_password,
GetArena());
// @@protoc_insertion_point(field_set_allocated:Wallet.master_password)
}
// repeated .Entry entries = 3;
inline int Wallet::_internal_entries_size() const {
return entries_.size();
}
inline int Wallet::entries_size() const {
return _internal_entries_size();
}
inline void Wallet::clear_entries() {
entries_.Clear();
}
inline ::Entry* Wallet::mutable_entries(int index) {
// @@protoc_insertion_point(field_mutable:Wallet.entries)
return entries_.Mutable(index);
}
inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::Entry >*
Wallet::mutable_entries() {
// @@protoc_insertion_point(field_mutable_list:Wallet.entries)
return &entries_;
}
inline const ::Entry& Wallet::_internal_entries(int index) const {
return entries_.Get(index);
}
inline const ::Entry& Wallet::entries(int index) const {
// @@protoc_insertion_point(field_get:Wallet.entries)
return _internal_entries(index);
}
inline ::Entry* Wallet::_internal_add_entries() {
return entries_.Add();
}
inline ::Entry* Wallet::add_entries() {
// @@protoc_insertion_point(field_add:Wallet.entries)
return _internal_add_entries();
}
inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::Entry >&
Wallet::entries() const {
// @@protoc_insertion_point(field_list:Wallet.entries)
return entries_;
}
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif // __GNUC__
// -------------------------------------------------------------------
// @@protoc_insertion_point(namespace_scope)
// @@protoc_insertion_point(global_scope)
#include <google/protobuf/port_undef.inc>
#endif // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_pwmanager_2eproto
|
<filename>src/main/java/com.sula/dao/custom/impl/QueryDAOImpl.java
package com.sula.dao.custom.impl;
import com.sula.dao.custom.QueryDAO;
import com.sula.entity.CustomEntity;
import org.hibernate.Session;
import org.hibernate.query.NativeQuery;
import org.springframework.stereotype.Repository;
import java.sql.SQLException;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.List;
@Repository
public class QueryDAOImpl implements QueryDAO {
private Session session;
@Override
public void setSession(Session session) {
this.session = session;
}
@Override
public ArrayList<CustomEntity> getALLOrderInformation() throws SQLException {
System.out.println("fuck");
ArrayList<CustomEntity> customEntities=new ArrayList<>();
System.out.println("fuck2");
NativeQuery nativeQuery = session.createNativeQuery("select O.id, O.date, O.customer_id, C.name, SUM(OD.qty*OD.unit_price) from (`order` O INNER JOIN customer C on O.customer_id = C.id INNER JOIN orderdetail OD on O.id = OD.orderID) group by o.id");
List<Object[]> list = nativeQuery.list();
list.forEach(objects -> {
String oid=objects[0].toString();
java.sql.Date date= (java.sql.Date) objects[1];
LocalDate d=date.toLocalDate();
String cid= String.valueOf(objects[2]);
String cnm= String.valueOf(objects[3]);
Double tot= (Double) objects[4];
customEntities.add(new CustomEntity(oid,d,cid,cnm,tot));
System.out.println("Order ID : " + objects[0]);
System.out.println("Date : " + objects[1]);
System.out.println("Customer ID : " +objects[2]);
System.out.println("Customer Name : " + objects[3]);
System.out.println("Total : " + objects[4]);
System.out.println("--------------------------");
});
return customEntities;
}
}
|
"""
Meraki Clients API Resource
"""
import urllib
from .meraki_api_resource import MerakiAPIResource
class Clients(MerakiAPIResource):
""" Return the client's network traffic data over time. Usage data is in kilobytes. This endpoint requires detailed traffic analysis to be enabled on the Network-wide > General page. """
resource = "clients"
def __init__(self, key, prefix=None, resource_id=None):
MerakiAPIResource.__init__(self, key, prefix, resource_id)
def traffic_history(self):
""" Return the license state. """
self.check_for_resource_id()
return self.get("/trafficHistory")
|
<reponame>lulrai/pubbot
package pubbot;
/*
* Main Class
*/
//Logs
import java.util.logging.Level;
import java.util.logging.Logger;
//JDA Utilities
//JDA
import com.jagrosh.jdautilities.command.CommandClient;
import com.jagrosh.jdautilities.command.CommandClientBuilder;
import com.jagrosh.jdautilities.commons.waiter.EventWaiter;
import net.dv8tion.jda.core.AccountType;
import net.dv8tion.jda.core.JDA;
import net.dv8tion.jda.core.JDABuilder;
import net.dv8tion.jda.core.OnlineStatus;
//Class imports
import net.dv8tion.jda.core.entities.Game;
import pubbot.admin.FormatCommand;
import pubbot.fun.AskMeCommand;
import pubbot.fun.DrinkCommand;
import pubbot.fun.RoleCommand;
import pubbot.fun.SayCommand;
import pubbot.utils.Constants;
public class Bot {
private static JDA jda;
public static void main(String[] args) throws Exception {
Logger.getLogger("org.slf4j.impl.StaticLoggerBinder").setLevel(Level.OFF);
Logger.getLogger("org.apache.http.client.protocol.ResponseProcessCookies").setLevel(Level.OFF);
EventWaiter waiter = new EventWaiter();
jda = new JDABuilder(AccountType.BOT)
.setToken(Constants.CLIENT_SECRET_CODE)
.addEventListener(commandClient(waiter))
.addEventListener(waiter)
//.addEventListener(new CustomCheck())
.setStatus(OnlineStatus.ONLINE)
.setGame(Game.playing("Connecting.."))
.build();
jda.setAutoReconnect(true);
AskMeCommand.generateAMA();
}
private static CommandClient commandClient(EventWaiter waiter) {
return new CommandClientBuilder()
.setPrefix(Constants.PREFIX)
.setOwnerId(Constants.BOT_OWNER_IDS[0])
.setEmojis(Constants.SUCCESS, Constants.WARNING, Constants.ERROR)
.setGame(Game.of(Game.GameType.WATCHING,"what I want, gtfo", null))
.setStatus(OnlineStatus.ONLINE)
.useHelpBuilder(false)
.addCommands(
//Commands go here
new SayCommand(),
new RoleCommand(),
new FormatCommand(waiter),
new AskMeCommand(),
new DrinkCommand(waiter)
)
.build();
}
}
|
// load-data.js
// not being used at this time
// going to update this when I do
// the react version (since it is a serverless
// function, this will be updated then)
exports.handler = async (event, context) => {
const allBody = JSON.parse(event.body);
const { sitelink } = JSON.parse(event.body);
console.log("load data.... " + allBody);
};
|
<gh_stars>0
import React from "react";
import { BrowserRouter as Router, Switch, Route } from "react-router-dom";
import "./App.css";
import "./assets/css/reflex.css";
import Navbar from "./components/Navbar";
import Landing from "./components/Landing";
import Profile from "./components/Profile";
import Footer from "./components/Footer";
function App() {
return (
<Router>
<div className="App">
<Navbar />
<Switch>
<Route path="/" exact component={Landing} />
<Route path="/:username" exact component={Profile} />
</Switch>
<Footer />
</div>
</Router>
);
}
export default App;
|
#!/bin/bash
# Copyright 2021 VMware Tanzu Community Edition contributors. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
set -e
declare -a required_env_vars=("VSPHERE_CONTROL_PLANE_ENDPOINT"
"VSPHERE_SERVER"
"VSPHERE_SSH_AUTHORIZED_KEY"
"VSPHERE_USERNAME"
"VSPHERE_PASSWORD"
"VSPHERE_DATACENTER"
"VSPHERE_DATASTORE"
"VSPHERE_FOLDER"
"VSPHERE_NETWORK"
"VSPHERE_RESOURCE_POOL"
"JUMPER_SSH_HOST_IP"
"JUMPER_SSH_USERNAME"
"JUMPER_SSH_PRIVATE_KEY")
for env_var in "${required_env_vars[@]}"
do
if [ -z "$(printenv "${env_var}")" ]; then
echo "Environment variable ${env_var} is empty! It's a required environment variable, please set it"
exit 1
fi
done
|
UniversalConstraint::UniversalConstraint(RigidBody^ rigidBodyA, RigidBody^ rigidBodyB,
Vector3 anchor, Vector3 axis1, Vector3 axis2)
: Generic6DofConstraint(0) // Call base class constructor with 0 as the index
{
// Initialize the universal constraint using the provided parameters
btTransform frameInA, frameInB;
frameInA.setOrigin(btVector3(anchor.x, anchor.y, anchor.z));
frameInA.getBasis().setValue(axis1.x, axis1.y, axis1.z, axis2.x, axis2.y, axis2.z, 0, 0, 0);
frameInB.setOrigin(btVector3(anchor.x, anchor.y, anchor.z));
frameInB.getBasis().setValue(axis1.x, axis1.y, axis1.z, axis2.x, axis2.y, axis2.z, 0, 0, 0);
Native->setFrames(frameInA, frameInB);
} |
export const BASE_URL = '';
export const PRIMARY_COLOR = '#417505';
export const PREVIEW_COMPONENT_DATA_TYPE = 'data-component-type';
export const LOGIN_ADDRESS = '/login';
export const PAGE_MAX_WIDTH = 480;
export const NORMAL_WIDTH = 375;
export const BASE_NAME = 'h5';
export const ASSET_DOMAIN = 'https://assets.maocanhua.cn';
export const APP_EDITOR_CONTAINER_ID = 'app-editor-container';
export const USER_PHONE = '12252691060';
export const USER_PWD = '<PASSWORD>';
export const USER_ID = 107;
export const CATEGORY_ID = 96;
export const PROVIDE_USER_ID = 77;
export const PROVIDE_CATEGORY_ID = 90;
|
<reponame>IonThruster/ClockSim
var searchData=
[
['generatorbaseptr_1426',['GeneratorBasePtr',['../namespaceCatch_1_1Generators.html#a1519f304113619d7d18670e2f08276c0',1,'Catch::Generators']]]
];
|
def compress_sequence(nums):
output = []
temp = []
for i in range(1, len(nums)):
if nums[i-1] * nums[i] >= 0:
temp.append(nums[i-1])
else:
temp.append(nums[i-1])
output.append(temp)
temp = []
temp.append(nums[i])
output.append(temp)
return output
print(compress_sequence([9, -2, 6, 0, -7])) |
#!/bin/bash
set -o nounset \
-o errexit \
-o verbose
# See what is in each keystore and truststore
for i in kafka1 kafka2 client schemaregistry restproxy connect connectorSA controlcenter ksqlDBserver ksqlDBUser appSA badapp clientListen zookeeper mds
do
echo "------------------------------- $i keystore -------------------------------"
keytool -list -v -keystore kafka.$i.keystore.jks -storepass confluent | grep -e Alias -e Entry
echo "------------------------------- $i truststore -------------------------------"
keytool -list -v -keystore kafka.$i.truststore.jks -storepass confluent | grep -e Alias -e Entry
done
|
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { Host } from 'src/database/entities/host.entity';
import { User } from 'src/database/entities/user.entity';
import { HostDto } from './dto/host.dto';
import { CreateHostDto } from './dto/create-host.dto';
import { UpdateHostDto } from './dto/update-host.dto';
@Injectable()
export class HostsService {
constructor(
@InjectRepository(Host)
private readonly hostRepository: Repository<Host>,
) {}
public async create(dto: CreateHostDto, user: User): Promise<CreateHostDto> {
return this.hostRepository
.save(dto.toEntity(user))
.then((e) => CreateHostDto.fromEntity(e));
}
public async findAll(): Promise<HostDto[]> {
return await this.hostRepository
.find()
.then((hosts) => hosts.map((e) => HostDto.fromEntity(e)));
}
public async findOne(id: string): Promise<HostDto> {
const host = await this.hostRepository.findOne(id);
if (!host) {
throw new Error(`The host with id: ${id} does not exist!`);
}
return HostDto.fromEntity(host);
}
public async update(
id: string,
dto: UpdateHostDto,
user: User,
): Promise<UpdateHostDto> {
const host = await this.hostRepository.findOne(id);
if (!host) {
throw new Error(`The host with id: ${id} does not exist!`);
}
Object.assign(host, dto.toEntity(user));
return this.hostRepository
.save(host)
.then((e) => UpdateHostDto.fromEntity(e));
}
public async remove(id: string): Promise<HostDto> {
const host = await this.hostRepository.findOne(id);
if (!host) {
throw new Error(`The host with id: ${id} does not exist!`);
}
return this.hostRepository.remove(host).then((e) => HostDto.fromEntity(e));
}
}
|
<reponame>smagill/opensphere-desktop<gh_stars>10-100
package io.opensphere.mantle.datasources.impl;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import io.opensphere.core.util.collections.New;
import io.opensphere.mantle.datasources.IDataSource;
import io.opensphere.mantle.datasources.IDataSourceConfig;
/**
* Config for UrlDataSource objects.
*/
@XmlRootElement(name = "config")
@XmlAccessorType(XmlAccessType.FIELD)
public class UrlSourceConfig implements IDataSourceConfig
{
/** The sources. */
@XmlElement(name = "source")
private final List<UrlDataSource> mySources = New.list();
@Override
public synchronized boolean addSource(IDataSource source)
{
if (source instanceof UrlDataSource)
{
return mySources.add((UrlDataSource)source);
}
return false;
}
@Override
public synchronized List<IDataSource> getSourceList()
{
return New.list(mySources);
}
@Override
public synchronized boolean removeSource(IDataSource source)
{
return mySources.remove(source);
}
@Override
public synchronized void updateSource(IDataSource source)
{
removeSource(source);
addSource(source);
}
/**
* Determines if the source is present in the config.
*
* @param source the source
* @return whether the source is present
*/
public synchronized boolean hasSource(IDataSource source)
{
return mySources.contains(source);
}
}
|
<reponame>mouchtaris/jleon<filename>src/main/scala-2.12/gv/jleon/domain/Mirror.scala<gh_stars>1-10
package gv.jleon
package domain
import scala.language.{ implicitConversions }
import shapeless.{ HList, :: }
import gv.jleon.`type`.{ TaggedType }
import Mirror.{ BaseUrl, Prefix }
final case class Mirror(
baseUrl: BaseUrl,
prefix: Prefix,
name: String = ""
) {
def urlFor(path: Uri.Path): Uri = baseUrl.withPath(baseUrl.path ++ path)
}
object Mirror extends AnyRef
with MirrorFactory {
final implicit object BaseUrl extends TaggedType[Uri]
type BaseUrl = BaseUrl.t
final implicit object Prefix extends TaggedType[String]
type Prefix = Prefix.t
trait Interpretation[T] extends Any {
type Self = T
def baseUrl(implicit self: T): BaseUrl
def prefix(implicit self: T): Prefix
}
final implicit def apply[T: Interpretation](self: T): Mirror = {
val i: Interpretation[T] = implicitly
implicit val _self = self
Mirror(
baseUrl = i.baseUrl,
prefix = i.prefix
)
}
final implicit def recordI[Rest <: HList] = new Interpretation[BaseUrl :: Prefix :: Rest] {
override def baseUrl(implicit self: Self): BaseUrl = self match {
case u :: _ ⇒ u
}
override def prefix(implicit self: Self): Prefix = self match {
case _ :: p :: _ ⇒ p
}
}
}
|
<gh_stars>10-100
#ifndef VECTOR3_H
#define VECTOR3_H
#include <math.h>
#include <iostream>
using std::ostream;
#ifndef _MSC_VER
using std::max;
#endif
namespace Potree{
template<class T>
class Vector3{
public:
T x = 0;
T y = 0;
T z = 0;
Vector3() = default;
Vector3(T x, T y, T z){
this->x = x;
this->y = y;
this->z = z;
}
Vector3(T value){
this->x = value;
this->y = value;
this->z = value;
}
Vector3(const Vector3<T> &other)
:x(other.x), y(other.y), z(other.z)
{
}
~Vector3() = default;
T length(){
return sqrt(x*x + y*y + z*z);
}
T squaredLength(){
return x*x + y*y + z*z;
}
T distanceTo(Vector3<T> p) const{
return ((*this) - p).length();
}
T squaredDistanceTo(const Vector3<T> &p) const{
return ((*this) - p).squaredLength();
}
T maxValue(){
return max(x, max(y,z));
}
Vector3<T> operator-(const Vector3<T>& right) const {
return Vector3<T>(x - right.x, y - right.y, z - right.z);
}
Vector3<T> operator+(const Vector3<T>& right) const {
return Vector3<T>(x + right.x, y + right.y, z + right.z);
}
Vector3<T> operator+(const T right) const {
return Vector3<T>(x + right, y + right, z + right);
}
Vector3<T> operator/(const T &a) const{
return Vector3<T>(x / a, y / a, z / a);
}
friend ostream &operator<<( ostream &output, const Vector3<T> &value ){
output << "[" << value.x << ", " << value.y << ", " << value.z << "]" ;
return output;
}
};
}
#endif
|
package com.seatgeek.placesautocomplete.json;
import com.seatgeek.placesautocomplete.model.Place;
import com.seatgeek.placesautocomplete.model.PlacesAutocompleteResponse;
import com.seatgeek.placesautocomplete.model.PlacesDetailsResponse;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
class AndroidPlacesApiJsonParser implements PlacesApiJsonParser {
@Override
public PlacesAutocompleteResponse autocompleteFromStream(final InputStream is) {
throw new UnsupportedOperationException("Not yet implemented, please have Gson on your classpath");
}
@Override
public PlacesDetailsResponse detailsFromStream(final InputStream is) throws JsonParsingException {
throw new UnsupportedOperationException("Not yet implemented, please have Gson on your classpath");
}
@Override
public List<Place> readHistoryJson(final InputStream in) throws JsonParsingException {
throw new UnsupportedOperationException("Not yet implemented, please have Gson on your classpath");
}
@Override
public void writeHistoryJson(final OutputStream os, final List<Place> places) throws JsonWritingException {
throw new UnsupportedOperationException("Not yet implemented, please have Gson on your classpath");
}
}
|
package app.monitor.job;
import core.framework.internal.log.LogManager;
import core.framework.internal.stat.Stats;
import core.framework.log.message.StatMessage;
import core.framework.util.Exceptions;
import java.time.Instant;
import java.util.Map;
/**
* @author neo
*/
class StatMessageFactory {
static StatMessage stats(String app, String host, Stats stats) {
var now = Instant.now();
var message = new StatMessage();
message.id = LogManager.ID_GENERATOR.next(now);
message.date = now;
message.app = app;
message.host = host;
message.result = stats.result();
message.stats = stats.stats;
message.errorCode = stats.errorCode;
message.errorMessage = stats.errorMessage;
return message;
}
static StatMessage failedToCollect(String app, String host, Throwable e) {
var now = Instant.now();
var message = new StatMessage();
message.id = LogManager.ID_GENERATOR.next(now);
message.date = now;
message.result = "ERROR";
message.app = app;
message.host = host;
message.errorCode = "FAILED_TO_COLLECT";
message.errorMessage = e.getMessage();
message.info = Map.of("stack_trace", Exceptions.stackTrace(e));
return message;
}
}
|
import middy from '@middy/core';
import responseResponse, { RequestResponseContext, RequestResponseMiddleWare } from '../src';
import { invoke } from '../testUtils';
describe('The middleware', () => {
it('should return the objects inside the context', async () => {
const handler = middy((event, context, callback) => callback(null, context));
const event = {
path: '/',
};
handler.use(responseResponse());
const context = await invoke(handler, event);
expect(context.response).toBeDefined();
expect(context.request).toBeDefined();
});
it('should set the correct path', async () => {
const handler = middy((event, context, callback) => callback(null, context));
const event = {
path: '/',
};
handler.use(responseResponse());
const context = await invoke(handler, event);
expect(context.request.url).toEqual('/');
});
it('should set the status code', async () => {
const handler = middy((event, context, callback) => callback(null, context));
const event = {
path: '/',
};
handler.use(responseResponse());
const context = await invoke(handler, event);
context.response.statusCode = 404;
expect(context.response.statusCode).toEqual(404);
});
it('should call the callback when calling the end function', async () => {
const handler = middy((event, context: RequestResponseContext) => {
jest.spyOn(context.response, 'end')
context.response.statusCode = 200;
context.response.end('<html></html>');
});
const event = {
path: '/',
};
handler.use<RequestResponseMiddleWare>(responseResponse());
const response = await invoke(handler, event);
expect(response.end).toHaveBeenCalled();
expect(response.toResponse.body).toEqual('<html></html>')
expect(response.statusCode).toEqual(200)
});
});
|
#!/usr/bin/env bash
# Copyright (C) 2015 Red Hat, Inc. All rights reserved.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
SKIP_WITH_CLVMD=1
SKIP_WITH_LVMPOLLD=1
. lib/inittest
which mkfs.ext3 || skip
aux prepare_vg 2
# Note: inittest.sh sets LVM_SYSTEM_DIR to 'just' etc
etc_lv="$DM_DEV_DIR/$vg/$lv1"
cleanup_mounted_and_teardown()
{
umount "$mount_dir" || true
aux teardown
}
vgreduce $vg "$dev2"
lvcreate -n $lv1 -l 20%FREE $vg
mkfs.ext3 -b4096 -j "$etc_lv"
#
# check read-only archive dir
#
mount_dir="etc/archive"
trap 'cleanup_mounted_and_teardown' EXIT
mkdir -p "$mount_dir"
mount -n -r "$etc_lv" "$mount_dir"
aux lvmconf "backup/archive = 1" "backup/backup = 1"
# cannot archive to read-only - requires user to specify -An
not lvcreate -n $lv2 -l 10%FREE $vg
lvcreate -An -n $lv2 -l 10%FREE $vg
not vgextend $vg "$dev2"
vgextend -An $vg "$dev2"
umount "$mount_dir" || true
vgreduce $vg "$dev2"
#
# check read-only backup dir
#
mount_dir="etc/backup"
mount -n -r "$etc_lv" "$mount_dir"
# Must not fail on making backup
vgscan
lvcreate -An -n $lv3 -l 10%FREE $vg
vgextend $vg "$dev2"
#
# Now check both archive & backup read-only
#
rm -rf etc/archive
ln -s backup etc/archive
# Must not fail on making backup
vgscan
lvcreate -An -n $lv4 -l 10%FREE $vg
umount "$mount_dir" || true
# TODO maybe also support --ignorelockingfailure ??
vgremove -ff $vg
|
#!/usr/bin/env bash
set -xe
mkdir -p _site
bundle exec jekyll build
|
<reponame>DeDuckProject/react-wordle
// generated by python script:
export const WORDS = [
'SZS|-⟩=|-⟩',
'SSH|0⟩=|-⟩',
'IHX|+⟩=|0⟩',
'ZZH|1⟩=|-⟩',
'XIS|0⟩=|1⟩',
'XHI|+⟩=|1⟩',
'SXY|1⟩=|1⟩',
'ZZH|+⟩=|0⟩',
'IZS|0⟩=|0⟩',
'XX|0⟩=Z|0⟩',
'|1⟩=ZSY|0⟩',
'Z|-⟩=SS|-⟩',
'XSZ|0⟩=|1⟩',
'S|0⟩=TS|0⟩',
'SZ|-⟩=S|+⟩',
'IZZ|0⟩=|0⟩',
'TT|i-⟩=|+⟩',
'IIH|-⟩=|1⟩',
'HSI|0⟩=|+⟩',
'|+⟩=SI|i-⟩',
'I|0⟩=HI|+⟩',
'HXI|+⟩=|0⟩',
'|-⟩=HHZ|+⟩',
'XIX|-⟩=|-⟩',
'SZ|-⟩=|i+⟩',
'|0⟩=YSX|0⟩',
'|i-⟩=SH|1⟩',
'|+⟩=HII|0⟩',
'S|-⟩=TT|-⟩',
'|1⟩=XTH|+⟩',
'I|0⟩=TT|0⟩',
'HHS|0⟩=|0⟩',
'Z|0⟩=ZZ|0⟩',
'HX|0⟩=I|-⟩',
'ISS|+⟩=|-⟩',
'ZYY|0⟩=|0⟩',
'-|+⟩=ZX|-⟩',
'H|-⟩=HH|1⟩',
'|+⟩=ZS|i+⟩',
'|0⟩=TSS|0⟩',
'HH|+⟩=Z|-⟩',
'Z|0⟩=SH|+⟩',
'SXH|-⟩=|0⟩',
'ZX|0⟩=Z|1⟩',
'-|1⟩=H-|-⟩',
'|1⟩=XZH|+⟩',
'SX|1⟩=T|0⟩',
'|1⟩=XHI|+⟩',
'SS|-⟩=Z|-⟩',
'HI|0⟩=H|0⟩',
'SZH|+⟩=|0⟩',
'TZI|0⟩=|0⟩',
'XX|-⟩=H|1⟩',
'IIX|1⟩=|0⟩',
'X|+⟩=ZI|-⟩',
'HH|0⟩=H|+⟩',
'IZ|0⟩=I|0⟩',
'II|+⟩=I|+⟩',
'|-⟩=ZIZ|-⟩',
'Z|0⟩=SS|0⟩',
'TXI|1⟩=|0⟩',
'Z|-⟩=HS|0⟩',
'|-⟩=IHX|0⟩',
'XZ|-⟩=I|+⟩',
'I|0⟩=IH|+⟩',
'|1⟩=XSZ|0⟩',
'S|0⟩=TH|+⟩',
'|i+⟩=SI|+⟩',
'I|0⟩=YY|0⟩',
'HII|-⟩=|1⟩',
'|1⟩=XXH|-⟩',
'S|i+⟩=H|1⟩',
'|+⟩=XSS|-⟩',
'|1⟩=XZT|0⟩',
'ZH|+⟩=H|+⟩',
'X|0⟩=XT|0⟩',
'ITI|0⟩=|0⟩',
'II|+⟩=X|+⟩',
'HZ|+⟩=H|-⟩',
'|0⟩=ZHI|+⟩',
'|-⟩=SIS|+⟩',
'-|-⟩=H-|1⟩',
'ZT|+⟩=T|-⟩',
'|0⟩=ZSH|+⟩',
'Z|+⟩=HX|0⟩',
'SH|1⟩=S|-⟩',
'Z|0⟩=TX|1⟩',
'I|0⟩=SH|+⟩',
'IY|+⟩=Y|+⟩',
'HI|-⟩=X|0⟩',
'|-⟩=SZS|-⟩',
'II|0⟩=S|0⟩',
'YH|-⟩=Y|1⟩',
'ZS|-⟩=|i+⟩',
'|0⟩=SIT|0⟩',
'IH|-⟩=X|0⟩',
'|+⟩=YYX|+⟩',
'XT|0⟩=X|0⟩',
'Y|-⟩=YI|-⟩',
'X|1⟩=TT|0⟩',
'|0⟩=ZSS|0⟩',
'ZI|-⟩=X|+⟩',
'X|1⟩=HX|+⟩',
'S|i-⟩=H|0⟩',
'HHH|-⟩=|1⟩',
'TI|1⟩=T|1⟩',
'ZXH|-⟩=|0⟩',
'TI|0⟩=Z|0⟩',
'XX|0⟩=I|0⟩',
'II|-⟩=Z|+⟩',
'ZHX|+⟩=|0⟩',
'H|0⟩=HZ|0⟩',
'|-⟩=YXY|-⟩',
'S|0⟩=XH|-⟩',
'IY|1⟩=Y|1⟩',
'SZ|0⟩=Z|0⟩',
'ZHX|0⟩=|+⟩',
'|+⟩=ZX-|-⟩',
'ZIX|+⟩=|-⟩',
'|1⟩=SHY|+⟩',
'T-|0⟩=-|0⟩',
'ZYY|-⟩=|+⟩',
'|1⟩=IXS|0⟩',
'TZ|0⟩=I|0⟩',
'X|+⟩=XZ|-⟩',
'|0⟩=TZI|0⟩',
'Z|+⟩=ZX|+⟩',
'IH|1⟩=Z|+⟩',
'I|0⟩=SX|1⟩',
'YXY|-⟩=|-⟩',
'SHZ|-⟩=|0⟩',
'|+⟩=IXZ|-⟩',
'I|-⟩=IH|1⟩',
'TT|0⟩=T|0⟩',
'|1⟩=SY-|0⟩',
'S|+⟩=SZ|-⟩',
'Y|0⟩=IY|0⟩',
'|+⟩=ZZZ|-⟩',
'ZZS|0⟩=|0⟩',
'H|+⟩=II|0⟩',
'IXS|0⟩=|1⟩',
'Z|-⟩=XH|0⟩',
'|0⟩=ITI|0⟩',
'ZX|1⟩=H|+⟩',
'YH|+⟩=Y|0⟩',
'H|+⟩=TH|+⟩',
'H|+⟩=TI|0⟩',
'TSX|1⟩=|0⟩',
'XXH|+⟩=|0⟩',
'HH|0⟩=S|0⟩',
'|-⟩=ZYY|+⟩',
'SIS|+⟩=|-⟩',
'|0⟩=SSS|0⟩',
'|0⟩=ZTT|0⟩',
'XH|+⟩=H|-⟩',
'SHY|+⟩=|1⟩',
'YTY|1⟩=|1⟩',
'Z|+⟩=X-|-⟩',
'IIS|0⟩=|0⟩',
'HX-|-⟩=|1⟩',
'XI|+⟩=H|0⟩',
'HIX|+⟩=|0⟩',
'ZH|0⟩=H|1⟩',
'SH|+⟩=I|0⟩',
'T|1⟩=TH|-⟩',
'HH|-⟩=H|1⟩',
'I|1⟩=YY|1⟩',
'ZH|-⟩=Z|1⟩',
'H|1⟩=ZX|+⟩',
'|1⟩=HIH|1⟩',
'YZ|0⟩=Y|0⟩',
'|-⟩=TT|i+⟩',
'IS|0⟩=X|1⟩',
'|-⟩=XIX|-⟩',
'|-⟩=XXI|-⟩',
'|0⟩=SZX|1⟩',
'X|1⟩=IX|1⟩',
'S|+⟩=YS|+⟩',
'SSZ|1⟩=|1⟩',
'|-⟩=SY|i+⟩',
'X|+⟩=HZ|0⟩',
'|1⟩=HXX|-⟩',
'|0⟩=IZT|0⟩',
'IH|+⟩=Z|0⟩',
'-|0⟩=S-|0⟩',
'ZI|-⟩=I|+⟩',
'YYH|+⟩=|0⟩',
'H|0⟩=HI|0⟩',
'SH|+⟩=S|0⟩',
'I|0⟩=SZ|0⟩',
'X|+⟩=S|i-⟩',
'I|-⟩=HI|1⟩',
'IX|-⟩=X|-⟩',
'Z|0⟩=TI|0⟩',
'H|1⟩=XX|-⟩',
'|+⟩=XHZ|0⟩',
'SST|0⟩=|0⟩',
'TZH|+⟩=|0⟩',
'HSS|-⟩=|0⟩',
'I|+⟩=HI|0⟩',
'IHH|1⟩=|1⟩',
'III|-⟩=|-⟩',
'|-⟩=XXZ|+⟩',
'STI|0⟩=|0⟩',
'IZ|0⟩=Z|0⟩',
'|1⟩=ZZI|1⟩',
'Z|-⟩=HI|0⟩',
'HHT|0⟩=|0⟩',
'|-⟩=HZZ|1⟩',
'TZ|0⟩=X|1⟩',
'Z|0⟩=ZH|+⟩',
'X-|0⟩=-|1⟩',
'S|i-⟩=I|+⟩',
'TI|0⟩=T|0⟩',
'ZZI|0⟩=|0⟩',
'|+⟩=IZZ|+⟩',
'XHX|+⟩=|1⟩',
'XZ|0⟩=H|-⟩',
'|+⟩=TTS|-⟩',
'XZ|0⟩=X|0⟩',
'TI|0⟩=S|0⟩',
'|1⟩=XYS|1⟩',
'ZZ|-⟩=H|1⟩',
'|0⟩=XII|1⟩',
'HH|-⟩=I|-⟩',
'|0⟩=ZIX|1⟩',
'-|-⟩=XI|-⟩',
'HI|0⟩=I|+⟩',
'HS|i-⟩=|0⟩',
'HZZ|1⟩=|-⟩',
'|0⟩=ZIZ|0⟩',
'ZHI|1⟩=|+⟩',
'YY|+⟩=Z|-⟩',
'SHI|+⟩=|0⟩',
'SIS|0⟩=|0⟩',
'TH|+⟩=Z|0⟩',
'XZ-|+⟩=|-⟩',
'I|+⟩=YY|+⟩',
'S|0⟩=ZZ|0⟩',
'SH|+⟩=X|1⟩',
'Y|0⟩=SH|-⟩',
'-|1⟩=IZ|1⟩',
'TIH|+⟩=|0⟩',
'HX|+⟩=S|0⟩',
'IH|-⟩=I|1⟩',
'ITS|0⟩=|0⟩',
'HXX|+⟩=|0⟩',
'Z|i-⟩=S|+⟩',
'XI|-⟩=X|-⟩',
'H|+⟩=HI|+⟩',
'IH|0⟩=Z|-⟩',
'HXH|0⟩=|0⟩',
'XIH|-⟩=|0⟩',
'XS|0⟩=H|-⟩',
'|+⟩=YIY|+⟩',
'ZS|0⟩=Z|0⟩',
'ZX|1⟩=S|0⟩',
'|-⟩=IS|i+⟩',
'HIX|0⟩=|-⟩',
'TH|0⟩=T|+⟩',
'HHZ|-⟩=|+⟩',
'H|0⟩=XZ|-⟩',
'XHZ|+⟩=|0⟩',
'|+⟩=ZZI|+⟩',
'ZH|+⟩=T|0⟩',
'H|1⟩=YY|-⟩',
'X|1⟩=SX|1⟩',
'SYS|+⟩=|-⟩',
'X-|+⟩=-|+⟩',
'SY|i+⟩=|-⟩',
'TS|0⟩=T|0⟩',
'|i+⟩=TT|+⟩',
'|0⟩=XXX|1⟩',
'I|-⟩=ZZ|-⟩',
'SH|+⟩=Z|0⟩',
'I|-⟩=II|-⟩',
'HZ|0⟩=H|0⟩',
'|0⟩=STI|0⟩',
'ZSS|-⟩=|-⟩',
'Y|0⟩=SI|1⟩',
'XIX|+⟩=|+⟩',
'|1⟩=XZZ|0⟩',
'|i+⟩=Y|i+⟩',
'IS|1⟩=Y|0⟩',
'|-⟩=ZS|i-⟩',
'HZ|1⟩=-|-⟩',
'Z|0⟩=HX|+⟩',
'HZ|-⟩=I|0⟩',
'|-⟩=ISS|+⟩',
'|1⟩=XXX|0⟩',
'I|-⟩=SS|+⟩',
'|-⟩=IZH|0⟩',
'Z|1⟩=X-|0⟩',
'I|+⟩=IX|+⟩',
'TZ|0⟩=T|0⟩',
'|-⟩=TST|+⟩',
'|-⟩=XI-|-⟩',
'|+⟩=XIH|0⟩',
'|+⟩=IS|i-⟩',
'|0⟩=HHI|0⟩',
'-|+⟩=X-|+⟩',
'XT|0⟩=I|1⟩',
'ZS|+⟩=S|-⟩',
'|0⟩=SST|0⟩',
'|0⟩=IIX|1⟩',
'|i-⟩=I|i-⟩',
'HI|1⟩=Z|+⟩',
'S|0⟩=IZ|0⟩',
'Z|+⟩=SS|+⟩',
'|0⟩=XHH|1⟩',
'H|+⟩=SI|0⟩',
'TT|0⟩=X|1⟩',
'|1⟩=IZZ|1⟩',
'I|1⟩=XZ|0⟩',
'Z|-⟩=HH|+⟩',
'H-|0⟩=-|+⟩',
'H|0⟩=ZH|1⟩',
'TH|+⟩=I|0⟩',
'S|1⟩=TT|1⟩',
'SSZ|0⟩=|0⟩',
'XI|1⟩=H|+⟩',
'|0⟩=ITS|0⟩',
'SY-|0⟩=|1⟩',
'|+⟩=ZYY|-⟩',
'|+⟩=III|+⟩',
'HSH|+⟩=|+⟩',
'IHH|+⟩=|+⟩',
'IT|-⟩=T|-⟩',
'HI|+⟩=X|1⟩',
'XXI|0⟩=|0⟩',
'S|0⟩=ZT|0⟩',
'SSI|+⟩=|-⟩',
'S|0⟩=II|0⟩',
'SI|0⟩=I|0⟩',
'|0⟩=ZHZ|-⟩',
'|0⟩=XZZ|1⟩',
'|0⟩=ITT|0⟩',
'|0⟩=SIH|+⟩',
'I|-⟩=X-|-⟩',
'THX|+⟩=|0⟩',
'T|0⟩=ZS|0⟩',
'ZT|0⟩=I|0⟩',
'|+⟩=HSH|+⟩',
'X|+⟩=HS|0⟩',
'|0⟩=TSZ|0⟩',
'X|1⟩=HZ|-⟩',
'X|0⟩=IH|-⟩',
'XXS|0⟩=|0⟩',
'HXS|0⟩=|-⟩',
'HHI|1⟩=|1⟩',
'YI|-⟩=Y|-⟩',
'H|0⟩=ZI|-⟩',
'I|i+⟩=S|+⟩',
'Z|0⟩=ZX|1⟩',
'SS|+⟩=Z|+⟩',
'|0⟩=HIX|+⟩',
'|1⟩=XYY|0⟩',
'ZH|+⟩=X|1⟩',
'|1⟩=HIZ|+⟩',
'STZ|0⟩=|0⟩',
'XIH|+⟩=|1⟩',
'T|0⟩=HZ|-⟩',
'|0⟩=ITX|1⟩',
'|0⟩=ZIS|0⟩',
'HZ|0⟩=I|+⟩',
'|1⟩=IXZ|0⟩',
'I|-⟩=S|i+⟩',
'HHI|-⟩=|-⟩',
'X|1⟩=ZX|1⟩',
'IXX|-⟩=|-⟩',
'YY|0⟩=H|+⟩',
'T|0⟩=IT|0⟩',
'XI|0⟩=I|1⟩',
'II|-⟩=H|1⟩',
'H|-⟩=IX|0⟩',
'I|1⟩=ZZ|1⟩',
'T|0⟩=SH|+⟩',
'Y|1⟩=YI|1⟩',
'-|1⟩=X-|0⟩',
'HZH|1⟩=|0⟩',
'SX|1⟩=I|0⟩',
'XXZ|0⟩=|0⟩',
'Y|+⟩=YH|0⟩',
'SS|+⟩=I|-⟩',
'THH|0⟩=|0⟩',
'SZI|0⟩=|0⟩',
'|+⟩=IZH|1⟩',
'I|+⟩=ZI|-⟩',
'ZX|0⟩=-|1⟩',
'|-⟩=IHI|1⟩',
'Z|+⟩=ZI|+⟩',
'TZ|-⟩=T|+⟩',
'XI|+⟩=Z|-⟩',
'XZ|-⟩=X|+⟩',
'X|1⟩=IZ|0⟩',
'X|+⟩=IZ|-⟩',
'ZZZ|+⟩=|-⟩',
'|1⟩=XHZ|-⟩',
'XH|-⟩=Z|0⟩',
'|0⟩=ZZX|1⟩',
'I|+⟩=IZ|-⟩',
'T|1⟩=TI|1⟩',
'I|-⟩=HX|0⟩',
'TZ|0⟩=S|0⟩',
'ZIZ|0⟩=|0⟩',
'X|1⟩=HH|0⟩',
'H|-⟩=II|1⟩',
'I|1⟩=IH|-⟩',
'IZ|-⟩=Z|-⟩',
'TS|0⟩=I|0⟩',
'XXZ|+⟩=|-⟩',
'XHZ|-⟩=|1⟩',
'T|0⟩=XX|0⟩',
'XH|0⟩=I|+⟩',
'|+⟩=ZHX|0⟩',
'SZ|0⟩=T|0⟩',
'S|0⟩=HX|+⟩',
'ZZ|+⟩=X|+⟩',
'HI|1⟩=I|-⟩',
'S|0⟩=IT|0⟩',
'TX|1⟩=Z|0⟩',
'IT|0⟩=I|0⟩',
'|-⟩=ZZH|1⟩',
'-|1⟩=ZX|0⟩',
'S|0⟩=HZ|-⟩',
'|0⟩=TTX|1⟩',
'T|-⟩=IT|-⟩',
'|+⟩=XII|+⟩',
'S|1⟩=YH|+⟩',
'|0⟩=IZS|0⟩',
'I|-⟩=ZI|+⟩',
'ZIX|1⟩=|0⟩',
'|+⟩=IHI|0⟩',
'|0⟩=TZH|+⟩',
'|0⟩=IYS|1⟩',
'ZT|0⟩=T|0⟩',
'SZX|1⟩=|0⟩',
'X|0⟩=ZZ|1⟩',
'ZZ|0⟩=X|1⟩',
'ZTS|0⟩=|0⟩',
'|1⟩=ZIZ|1⟩',
'H|+⟩=ZS|0⟩',
'Z-|+⟩=-|-⟩',
'|-⟩=IXX|-⟩',
'Z|0⟩=HI|+⟩',
'ZX-|0⟩=|1⟩',
'ZSH|+⟩=|0⟩',
'H|-⟩=Z-|1⟩',
'Y|+⟩=YZ|-⟩',
'HHX|+⟩=|+⟩',
'IX|1⟩=H|+⟩',
'IIH|1⟩=|-⟩',
'ZII|-⟩=|+⟩',
'X-|1⟩=-|0⟩',
'XIT|0⟩=|1⟩',
'HS|0⟩=X|+⟩',
'XZZ|0⟩=|1⟩',
'IZ|-⟩=I|+⟩',
'S|+⟩=SX|+⟩',
'IZH|0⟩=|-⟩',
'ZX|+⟩=H|1⟩',
'|-⟩=ZHH|+⟩',
'-|0⟩=T-|0⟩',
'Z|0⟩=TT|0⟩',
'|0⟩=SHZ|-⟩',
'X|1⟩=XH|-⟩',
'Z-|1⟩=I|1⟩',
'Z|-⟩=XI|+⟩',
'S|0⟩=TX|1⟩',
'HIH|0⟩=|0⟩',
'S|+⟩=SH|0⟩',
'|+⟩=XXZ|-⟩',
'|-⟩=YYI|-⟩',
'TI|+⟩=T|+⟩',
'SX|1⟩=S|0⟩',
'|0⟩=SZS|0⟩',
'|0⟩=TIS|0⟩',
'S-|0⟩=-|0⟩',
'STT|+⟩=|-⟩',
'X-|-⟩=H|1⟩',
'HTZ|0⟩=|+⟩',
'XX|0⟩=X|1⟩',
'|1⟩=HHX|0⟩',
'|+⟩=HZZ|0⟩',
'X|1⟩=IH|+⟩',
'|1⟩=XTZ|0⟩',
'|1⟩=HZI|+⟩',
'S|0⟩=ST|0⟩',
'IZ|+⟩=I|-⟩',
'|0⟩=HZH|1⟩',
'IH|+⟩=T|0⟩',
'|+⟩=HZS|0⟩',
'XSI|0⟩=|1⟩',
'T|0⟩=TT|0⟩',
'|-⟩=ZIH|0⟩',
'ZHH|+⟩=|-⟩',
'IY|-⟩=Y|-⟩',
'X|+⟩=HH|+⟩',
'ZT|0⟩=Z|0⟩',
'YZ|0⟩=S|1⟩',
'S|1⟩=YI|0⟩',
'TZS|0⟩=|0⟩',
'YZ|-⟩=Y|+⟩',
'-|0⟩=X-|1⟩',
'XI|+⟩=X|+⟩',
'|0⟩=XXS|0⟩',
'ZZT|0⟩=|0⟩',
'IIZ|+⟩=|-⟩',
'ZT|0⟩=X|1⟩',
'Y|i+⟩=|i+⟩',
'XXX|1⟩=|0⟩',
'|1⟩=HX-|-⟩',
'X|1⟩=ZT|0⟩',
'ZZ|1⟩=X|0⟩',
'HS|0⟩=H|0⟩',
'ZII|0⟩=|0⟩',
'TIS|0⟩=|0⟩',
'|-⟩=HXS|0⟩',
'XZX|1⟩=|1⟩',
'|0⟩=HYY|+⟩',
'SH|-⟩=S|1⟩',
'XHH|1⟩=|0⟩',
'Z-|1⟩=X|0⟩',
'HIZ|0⟩=|+⟩',
'Z|-⟩=ZI|-⟩',
'HX|-⟩=Z|1⟩',
'SXX|0⟩=|0⟩',
'X-|0⟩=Z|1⟩',
'IZZ|+⟩=|+⟩',
'XYY|+⟩=|+⟩',
'ZTT|0⟩=|0⟩',
'|+⟩=XHH|+⟩',
'|0⟩=TZZ|0⟩',
'ZSI|0⟩=|0⟩',
'|0⟩=SIX|1⟩',
'TT|+⟩=S|+⟩',
'|i-⟩=SI|-⟩',
'HTX|1⟩=|+⟩',
'SZ|0⟩=I|0⟩',
'-|+⟩=Z-|-⟩',
'|+⟩=ZHH|-⟩',
'HSZ|0⟩=|+⟩',
'ZZ|0⟩=I|0⟩',
'ZH|+⟩=I|0⟩',
'YX|0⟩=Y|1⟩',
'H|0⟩=S|i-⟩',
'STS|0⟩=|0⟩',
'I|0⟩=IT|0⟩',
'Z|+⟩=HH|-⟩',
'IIH|+⟩=|0⟩',
'TIZ|0⟩=|0⟩',
'|-⟩=ZXI|+⟩',
'HX|+⟩=T|0⟩',
'I|0⟩=TI|0⟩',
'XX|-⟩=Z|+⟩',
'|+⟩=HZT|0⟩',
'I|0⟩=IS|0⟩',
'|0⟩=ZTZ|0⟩',
'S|i+⟩=I|-⟩',
'ZX|1⟩=T|0⟩',
'YYH|-⟩=|1⟩',
'|0⟩=SSZ|0⟩',
'Z|1⟩=H-|-⟩',
'TX|1⟩=X|1⟩',
'YYI|-⟩=|-⟩',
'|0⟩=ISS|0⟩',
'XIZ|-⟩=|+⟩',
'HXI|0⟩=|-⟩',
'|1⟩=HHH|-⟩',
'H|-⟩=XH|+⟩',
'H|+⟩=IH|+⟩',
'Y|0⟩=YS|0⟩',
'XX|-⟩=I|-⟩',
'HX|+⟩=Z|0⟩',
'S|1⟩=SI|1⟩',
'HHZ|0⟩=|0⟩',
'IZX|1⟩=|0⟩',
'XX|0⟩=H|+⟩',
'XTX|1⟩=|1⟩',
'HZZ|-⟩=|1⟩',
'Z|0⟩=YS|1⟩',
'IX|1⟩=S|0⟩',
'ITX|1⟩=|0⟩',
'XHS|0⟩=|+⟩',
'IZI|+⟩=|-⟩',
'H|0⟩=YY|+⟩',
'X|1⟩=TI|0⟩',
'I|0⟩=ZT|0⟩',
'IS|+⟩=S|+⟩',
'|1⟩=ZZH|-⟩',
'TZ|0⟩=H|+⟩',
'ZII|+⟩=|-⟩',
'|1⟩=HII|-⟩',
'I-|-⟩=X|-⟩',
'IX|0⟩=H|-⟩',
'II|0⟩=T|0⟩',
'SS|1⟩=Z|1⟩',
'I|+⟩=XH|0⟩',
'|-⟩=HII|1⟩',
'HYY|0⟩=|+⟩',
'T|+⟩=ZT|-⟩',
'I|0⟩=HX|+⟩',
'|0⟩=IXH|-⟩',
'|1⟩=YYX|0⟩',
'H-|1⟩=-|-⟩',
'IS|0⟩=S|0⟩',
'-|0⟩=Z-|0⟩',
'S|0⟩=TI|0⟩',
'YY|0⟩=S|0⟩',
'Z|i+⟩=S|-⟩',
'T|0⟩=ZT|0⟩',
'X|1⟩=TH|+⟩',
'SS|0⟩=Z|0⟩',
'ZZH|0⟩=|+⟩',
'|0⟩=THH|0⟩',
'XST|0⟩=|1⟩',
'H|-⟩=HZ|+⟩',
'HXZ|-⟩=|0⟩',
'SY|0⟩=Z|1⟩',
'IHZ|+⟩=|1⟩',
'I|+⟩=XZ|-⟩',
'XX|1⟩=H|-⟩',
'I|+⟩=ZZ|+⟩',
'ZY|1⟩=Y|1⟩',
'III|0⟩=|0⟩',
'T|1⟩=IT|1⟩',
'HXI|1⟩=|+⟩',
'SH|+⟩=H|+⟩',
'ZZ|1⟩=H|-⟩',
'T|0⟩=XH|-⟩',
'S|+⟩=I|i+⟩',
'HZX|+⟩=|1⟩',
'II|1⟩=H|-⟩',
'Z|1⟩=SS|1⟩',
'|0⟩=HXX|+⟩',
'TH|1⟩=T|-⟩',
'|1⟩=SSZ|1⟩',
'Z|-⟩=HZ|0⟩',
'|+⟩=IIX|+⟩',
'|0⟩=ITZ|0⟩',
'TT|+⟩=|i+⟩',
'X|+⟩=YY|+⟩',
'Z|i-⟩=|i+⟩',
'SSZ|+⟩=|+⟩',
'SI|0⟩=X|1⟩',
'|1⟩=XSI|0⟩',
'IZZ|-⟩=|-⟩',
'I-|0⟩=-|0⟩',
'|+⟩=HIT|0⟩',
'|1⟩=HZH|0⟩',
'|1⟩=IIX|0⟩',
'ISX|1⟩=|0⟩',
'HI|+⟩=I|0⟩',
'S|i+⟩=Z|+⟩',
'-|-⟩=IX|-⟩',
'|0⟩=SZH|+⟩',
'I|+⟩=HT|0⟩',
'Z|-⟩=IX|+⟩',
'-|-⟩=I-|-⟩',
'|1⟩=YZY|1⟩',
'|-⟩=IZX|+⟩',
'YY|-⟩=I|-⟩',
'IX|+⟩=H|0⟩',
'SX|1⟩=X|1⟩',
'X|0⟩=YY|1⟩',
'|+⟩=HXH|-⟩',
'IT|0⟩=H|+⟩',
'ZS|0⟩=S|0⟩',
'HS|0⟩=I|+⟩',
'HH|+⟩=I|+⟩',
'|0⟩=HZI|-⟩',
'|+⟩=XHS|0⟩',
'T|0⟩=SS|0⟩',
'STH|+⟩=|0⟩',
'YH|+⟩=S|1⟩',
'T|0⟩=TZ|0⟩',
'XXX|+⟩=|+⟩',
'|+⟩=SZS|+⟩',
'|-⟩=SI|i+⟩',
'|-⟩=SSH|0⟩',
'I|0⟩=SI|0⟩',
'TTX|1⟩=|0⟩',
'|0⟩=ZTX|1⟩',
'ZS|+⟩=|i-⟩',
'|+⟩=HZH|+⟩',
'SI|1⟩=Y|0⟩',
'XHZ|1⟩=|-⟩',
'ZHI|0⟩=|-⟩',
'IHZ|-⟩=|0⟩',
'SZ|0⟩=H|+⟩',
'|0⟩=YYI|0⟩',
'|0⟩=ZXI|1⟩',
'ZZ|1⟩=I|1⟩',
'ZZ|0⟩=H|+⟩',
'I|1⟩=Z-|1⟩',
'H|+⟩=IT|0⟩',
'S|-⟩=I|i-⟩',
'|0⟩=TTH|+⟩',
'IT|0⟩=T|0⟩',
'I|0⟩=ZI|0⟩',
'|0⟩=ISX|1⟩',
'|+⟩=HIS|0⟩',
'HIH|+⟩=|+⟩',
'|i+⟩=SX|+⟩',
'H|0⟩=IX|+⟩',
'X|1⟩=SI|0⟩',
'X|+⟩=IX|+⟩',
'I|1⟩=II|1⟩',
'-|+⟩=H-|0⟩',
'HZ|-⟩=T|0⟩',
'HTT|0⟩=|+⟩',
'|+⟩=HST|0⟩',
'T|0⟩=TI|0⟩',
'I|1⟩=XI|0⟩',
'|0⟩=ZST|0⟩',
'|+⟩=HTZ|0⟩',
'SI|-⟩=|i-⟩',
'|0⟩=TSX|1⟩',
'ZX|+⟩=Z|+⟩',
'XI|1⟩=S|0⟩',
'T|0⟩=ZX|1⟩',
'SI|1⟩=S|1⟩',
'YY|-⟩=Z|+⟩',
'S|i-⟩=Z|-⟩',
'X|-⟩=H-|1⟩',
'|1⟩=IXX|1⟩',
'IHH|0⟩=|0⟩',
'ISZ|0⟩=|0⟩',
'IZ|1⟩=-|1⟩',
'HST|0⟩=|+⟩',
'IIH|0⟩=|+⟩',
'IX|0⟩=I|1⟩',
'|0⟩=IST|0⟩',
'YS|1⟩=S|0⟩',
'X|1⟩=HI|+⟩',
'XY|-⟩=Y|-⟩',
'ZH|1⟩=X|+⟩',
'IT|+⟩=T|+⟩',
'I|0⟩=ST|0⟩',
'|0⟩=TII|0⟩',
'I-|+⟩=-|+⟩',
'IZX|+⟩=|-⟩',
'SSS|0⟩=|0⟩',
'X|0⟩=IX|0⟩',
'TS|0⟩=H|+⟩',
'XYS|1⟩=|1⟩',
'|-⟩=ZZI|-⟩',
'|0⟩=SYS|1⟩',
'H|+⟩=SX|1⟩',
'YY|0⟩=I|0⟩',
'IXZ|-⟩=|+⟩',
'YY|0⟩=X|1⟩',
'HI|+⟩=H|+⟩',
'I|-⟩=XX|-⟩',
'|+⟩=HYY|0⟩',
'ZH|1⟩=H|0⟩',
'HZI|+⟩=|1⟩',
'H|0⟩=IZ|-⟩',
'-|1⟩=ZH|-⟩',
'ZIS|0⟩=|0⟩',
'H|-⟩=IH|-⟩',
'ZHZ|0⟩=|-⟩',
'ZI|1⟩=-|1⟩',
'Y|-⟩=XY|-⟩',
'|+⟩=XHX|1⟩',
'H|+⟩=ZX|1⟩',
'|0⟩=TZT|0⟩',
'|0⟩=XIX|0⟩',
'I|0⟩=TS|0⟩',
'|0⟩=YYZ|0⟩',
'IH|-⟩=H|-⟩',
'IXZ|0⟩=|1⟩',
'T|0⟩=TX|1⟩',
'IT|0⟩=S|0⟩',
'X|+⟩=XX|+⟩',
'II|0⟩=X|1⟩',
'S|+⟩=SI|+⟩',
'TIX|1⟩=|0⟩',
'YS|1⟩=T|0⟩',
'SX|1⟩=H|+⟩',
'TI|0⟩=H|+⟩',
'IX-|-⟩=|-⟩',
'Y|+⟩=IY|+⟩',
'H|-⟩=HI|-⟩',
'|0⟩=YIS|1⟩',
'S|-⟩=IS|-⟩',
'I|+⟩=XX|+⟩',
'IS|i-⟩=|+⟩',
'|0⟩=TTS|0⟩',
'ST|0⟩=S|0⟩',
'|0⟩=HSS|-⟩',
'Y|0⟩=YI|0⟩',
'H|0⟩=XH|0⟩',
'S|-⟩=Z|i+⟩',
'ZS|0⟩=H|+⟩',
'|0⟩=XXZ|0⟩',
'Z|0⟩=XI|1⟩',
'|0⟩=TXI|1⟩',
'TIT|0⟩=|0⟩',
'YY|+⟩=I|+⟩',
'YSX|0⟩=|0⟩',
'T|0⟩=XI|1⟩',
'IZ|0⟩=T|0⟩',
'T|-⟩=TI|-⟩',
'XX|1⟩=I|1⟩',
'XZS|0⟩=|1⟩',
'|+⟩=HXX|0⟩',
'|1⟩=SXY|1⟩',
'HZT|0⟩=|+⟩',
'STT|-⟩=|+⟩',
'HT|0⟩=H|0⟩',
'TY|1⟩=Y|1⟩',
'ZI|1⟩=Z|1⟩',
'ZT|-⟩=T|+⟩',
'HX|+⟩=I|0⟩',
'|+⟩=SSI|-⟩',
'|+⟩=STT|-⟩',
'SIH|+⟩=|0⟩',
'Z|0⟩=HH|0⟩',
'XHH|+⟩=|+⟩',
'ZZX|+⟩=|+⟩',
'S|1⟩=YZ|0⟩',
'ZZ|0⟩=T|0⟩',
'|-⟩=ZXX|+⟩',
'|+⟩=HZI|0⟩',
'XHZ|0⟩=|+⟩',
'IT|0⟩=Z|0⟩',
'|0⟩=SXX|0⟩',
'|-⟩=HHH|1⟩',
'I|1⟩=IX|0⟩',
'H|+⟩=TZ|0⟩',
'Z|i+⟩=|i-⟩',
'|0⟩=HHX|1⟩',
'Z|0⟩=II|0⟩',
'H|1⟩=X-|-⟩',
'|0⟩=SHH|0⟩',
'|-⟩=HXT|0⟩',
'S|0⟩=IS|0⟩',
'YS|0⟩=S|1⟩',
'Z|0⟩=IX|1⟩',
'I|0⟩=TX|1⟩',
'XH|-⟩=I|0⟩',
'I-|1⟩=-|1⟩',
'TZ|+⟩=T|-⟩',
'YY|+⟩=X|+⟩',
'IZH|+⟩=|0⟩',
'SZ|+⟩=S|-⟩',
'|0⟩=ZTS|0⟩',
'HI|0⟩=Z|-⟩',
'H|-⟩=XT|0⟩',
'YYZ|+⟩=|-⟩',
'YYI|0⟩=|0⟩',
'|1⟩=XXI|1⟩',
'XH|0⟩=H|0⟩',
'IX|1⟩=I|0⟩',
'|1⟩=IHI|-⟩',
'IS|0⟩=Z|0⟩',
'|+⟩=HHZ|-⟩',
'IZ|0⟩=X|1⟩',
'|0⟩=SIZ|0⟩',
'ZH|0⟩=Z|+⟩',
'I|-⟩=ZH|0⟩',
'I|+⟩=IH|0⟩',
'TYY|0⟩=|0⟩',
'H|+⟩=ZH|+⟩',
'XTI|0⟩=|1⟩',
'XHH|0⟩=|1⟩',
'XS|0⟩=X|0⟩',
'Z|1⟩=ZH|-⟩',
'|0⟩=TIT|0⟩',
'XI-|-⟩=|-⟩',
'TH|+⟩=X|1⟩',
'I|0⟩=HZ|-⟩',
'|0⟩=HXI|+⟩',
'|-⟩=III|-⟩',
'Z|+⟩=HI|1⟩',
'XZ-|1⟩=|0⟩',
'|+⟩=IZI|-⟩',
'-|+⟩=I-|+⟩',
'S|0⟩=XX|0⟩',
'XI|1⟩=X|1⟩',
'|0⟩=STZ|0⟩',
'ZHT|0⟩=|-⟩',
'|0⟩=ZHX|+⟩',
'T|0⟩=SZ|0⟩',
'|0⟩=III|0⟩',
'I|-⟩=ZX|+⟩',
'TTI|0⟩=|0⟩',
'Z|-⟩=II|+⟩',
'IZ|+⟩=Z|+⟩',
'Z|+⟩=XX|-⟩',
'XH-|1⟩=|-⟩',
'|0⟩=YSI|1⟩',
'III|1⟩=|1⟩',
'TX|+⟩=T|+⟩',
'|1⟩=IYY|1⟩',
'HS|0⟩=Z|-⟩',
'|0⟩=XZ-|1⟩',
'HX|+⟩=X|1⟩',
'Z|0⟩=IS|0⟩',
'ZI|0⟩=X|1⟩',
'X|+⟩=HX|1⟩',
'XHI|-⟩=|0⟩',
'HZ|0⟩=X|+⟩',
'YYT|0⟩=|0⟩',
'|1⟩=III|1⟩',
'SH|+⟩=T|0⟩',
'T|+⟩=TI|+⟩',
'|0⟩=XXT|0⟩',
'XIX|1⟩=|1⟩',
'Y|+⟩=YX|+⟩',
'|0⟩=SXI|1⟩',
'ZSY|0⟩=|1⟩',
'I|0⟩=YS|1⟩',
'HT|0⟩=Z|-⟩',
'|+⟩=ZXX|-⟩',
'ZI|0⟩=S|0⟩',
'H|1⟩=HX|0⟩',
'IIX|+⟩=|+⟩',
'HYY|-⟩=|1⟩',
'XH|-⟩=S|0⟩',
'XXT|0⟩=|0⟩',
'I|+⟩=XI|+⟩',
'-|0⟩=H-|+⟩',
'HYY|1⟩=|-⟩',
'TTS|0⟩=|0⟩',
'|0⟩=SHX|+⟩',
'ZH|+⟩=S|0⟩',
'TS|0⟩=Z|0⟩',
'ZZX|1⟩=|0⟩',
'HZ-|1⟩=|-⟩',
'|0⟩=TZS|0⟩',
'ZS|0⟩=I|0⟩',
'|+⟩=ZHI|1⟩',
'Z|0⟩=YY|0⟩',
'H|0⟩=HH|+⟩',
'|1⟩=IHZ|+⟩',
'II|+⟩=H|0⟩',
'HII|+⟩=|0⟩',
'Z|-⟩=ZZ|+⟩',
'SX|+⟩=S|+⟩',
'Y|0⟩=TT|1⟩',
'ZIH|0⟩=|-⟩',
'II|-⟩=I|-⟩',
'SZZ|0⟩=|0⟩',
'|1⟩=HYY|-⟩',
'|0⟩=ZII|0⟩',
'SHH|0⟩=|0⟩',
'HHH|+⟩=|0⟩',
'Z|0⟩=TS|0⟩',
'YIY|-⟩=|-⟩',
'-|1⟩=SS|1⟩',
'YYX|0⟩=|1⟩',
'I|+⟩=II|+⟩',
'TT|1⟩=Y|0⟩',
'YY|-⟩=H|1⟩',
'-|0⟩=I-|0⟩',
'|i+⟩=ZS|-⟩',
'II|1⟩=I|1⟩',
'II|+⟩=Z|-⟩',
'|+⟩=XXI|+⟩',
'T|0⟩=TS|0⟩',
'ZSZ|0⟩=|0⟩',
'XS|0⟩=I|1⟩',
'ZH|0⟩=I|-⟩',
'X|1⟩=ZH|+⟩',
'SX|+⟩=|i+⟩',
'ZZX|0⟩=|1⟩',
'Z|0⟩=ST|0⟩',
'SSZ|-⟩=|-⟩',
'H|+⟩=YY|0⟩',
'|0⟩=TYS|1⟩',
'|0⟩=YTT|1⟩',
'Z|-⟩=HX|1⟩',
'|0⟩=ZTH|+⟩',
'SY|1⟩=Y|1⟩',
'|0⟩=ZYY|0⟩',
'ZHH|0⟩=|0⟩',
'TZX|1⟩=|0⟩',
'SZ|0⟩=S|0⟩',
'TTS|-⟩=|+⟩',
'XTZ|0⟩=|1⟩',
'Z|-⟩=XX|+⟩',
'H-|-⟩=Z|1⟩',
'|0⟩=ZXH|-⟩',
'IX|1⟩=X|1⟩',
'ZXI|+⟩=|-⟩',
'SS|-⟩=X|+⟩',
'YS|1⟩=I|0⟩',
'X|1⟩=SH|+⟩',
'IXI|0⟩=|1⟩',
'HH|1⟩=H|-⟩',
'ZI-|1⟩=|1⟩',
'I|+⟩=HX|1⟩',
'TZZ|0⟩=|0⟩',
'YYS|0⟩=|0⟩',
'TST|-⟩=|+⟩',
'|0⟩=SZT|0⟩',
'TZ|0⟩=Z|0⟩',
'|0⟩=IHI|+⟩',
'H|+⟩=SS|0⟩',
'HZ|-⟩=Z|0⟩',
'HIZ|-⟩=|0⟩',
'XSS|0⟩=|1⟩',
'H|0⟩=II|+⟩',
'|1⟩=HS|i+⟩',
'|+⟩=IHZ|0⟩',
'|-⟩=HHI|-⟩',
'H|+⟩=IS|0⟩',
'|i+⟩=SZ|-⟩',
'I|0⟩=IZ|0⟩',
'Z|0⟩=SZ|0⟩',
'|1⟩=HZZ|-⟩',
'ZI|0⟩=H|+⟩',
'IHI|0⟩=|+⟩',
'Z|1⟩=SY|0⟩',
'HI|-⟩=H|-⟩',
'ZI|+⟩=Z|+⟩',
'|+⟩=SSH|1⟩',
'|0⟩=IHZ|-⟩',
'IZZ|1⟩=|1⟩',
'H|1⟩=HH|-⟩',
'SZY|0⟩=|1⟩',
'IZI|0⟩=|0⟩',
'Z|1⟩=ZX|0⟩',
'|-⟩=IHH|-⟩',
'X|1⟩=TZ|0⟩',
'STT|0⟩=|0⟩',
'IHZ|0⟩=|+⟩',
'ITT|0⟩=|0⟩',
'X|1⟩=SZ|0⟩',
'|1⟩=ZHX|-⟩',
'X|1⟩=XX|0⟩',
'HI|+⟩=T|0⟩',
'XSX|1⟩=|1⟩',
'|0⟩=SYY|0⟩',
'|1⟩=IHH|1⟩',
'S|0⟩=SX|1⟩',
'Z|1⟩=I-|1⟩',
'I|0⟩=ZS|0⟩',
'S|1⟩=YX|1⟩',
'T|0⟩=HI|+⟩',
'HIH|-⟩=|-⟩',
'TX|1⟩=H|+⟩',
'|1⟩=ZH-|-⟩',
'|-⟩=STT|+⟩',
'|+⟩=IYY|+⟩',
'|+⟩=XS|i-⟩',
'ZZ|+⟩=H|0⟩',
'YTT|1⟩=|0⟩',
'XHX|1⟩=|+⟩',
'HHH|0⟩=|+⟩',
'SZ|i-⟩=|-⟩',
'S|0⟩=HH|0⟩',
'X|-⟩=XZ|+⟩',
'YSH|-⟩=|0⟩',
'IHT|0⟩=|+⟩',
'Z|+⟩=ZH|0⟩',
'X|+⟩=SS|-⟩',
'I|1⟩=XS|0⟩',
'T|-⟩=TH|1⟩',
'YS|1⟩=X|1⟩',
'|+⟩=IIZ|-⟩',
'SZS|+⟩=|+⟩',
'T|0⟩=ZI|0⟩',
'|-⟩=SZ|i-⟩',
'H|+⟩=YS|1⟩',
'S|0⟩=TT|0⟩',
'Z|0⟩=TH|+⟩',
'X|-⟩=Z-|+⟩',
'Z|-⟩=IH|0⟩',
'T|0⟩=II|0⟩',
'|+⟩=HTX|1⟩',
'ZX|1⟩=X|1⟩',
'XTH|+⟩=|1⟩',
'ZIH|1⟩=|+⟩',
'IZ|1⟩=Z|1⟩',
'ZIT|0⟩=|0⟩',
'X|-⟩=XH|1⟩',
'YH|1⟩=Y|-⟩',
'ZS|-⟩=S|+⟩',
'|+⟩=XZH|1⟩',
'I|1⟩=XX|1⟩',
'SHX|+⟩=|0⟩',
'X|+⟩=XH|0⟩',
'TII|0⟩=|0⟩',
'|1⟩=XIX|1⟩',
'HTH|+⟩=|+⟩',
'H|+⟩=HH|0⟩',
'|+⟩=HIZ|0⟩',
'SIZ|0⟩=|0⟩',
'|0⟩=IZI|0⟩',
'Y|0⟩=YH|+⟩',
'HIH|1⟩=|1⟩',
'X|1⟩=YY|0⟩',
'T|0⟩=IZ|0⟩',
'TI|0⟩=I|0⟩',
'HX|+⟩=H|+⟩',
'|i+⟩=Z|i-⟩',
'ZSX|1⟩=|0⟩',
'HI|+⟩=Z|0⟩',
'Y|0⟩=YT|0⟩',
'|0⟩=IZH|+⟩',
'ZXX|+⟩=|-⟩',
'IZ|-⟩=H|0⟩',
'|-⟩=HXH|+⟩',
'IXX|1⟩=|1⟩',
'|0⟩=TIH|+⟩',
'ZH|+⟩=Z|0⟩',
'X|1⟩=YS|1⟩',
'-|1⟩=ZI|1⟩',
'ST|0⟩=H|+⟩',
'YY|1⟩=H|-⟩',
'I|i-⟩=S|-⟩',
'TSI|0⟩=|0⟩',
'|+⟩=XXH|0⟩',
'HXX|1⟩=|-⟩',
'ZHS|0⟩=|-⟩',
'Z-|1⟩=H|-⟩',
'S|1⟩=IY|0⟩',
'SYS|1⟩=|0⟩',
'TXH|-⟩=|0⟩',
'YZ|+⟩=Y|-⟩',
'H|+⟩=ST|0⟩',
'IXI|1⟩=|0⟩',
'|0⟩=TYY|0⟩',
'T|0⟩=SI|0⟩',
'|i-⟩=IS|-⟩',
'|0⟩=TTZ|0⟩',
'HZI|-⟩=|0⟩',
'|1⟩=XTI|0⟩',
'ZZ|+⟩=Z|-⟩',
'IIT|0⟩=|0⟩',
'XH|0⟩=Z|-⟩',
'ZS|0⟩=T|0⟩',
'XX|0⟩=T|0⟩',
'Y|-⟩=YH|1⟩',
'|0⟩=ZSZ|0⟩',
'XZ|1⟩=-|0⟩',
'T|0⟩=IX|1⟩',
'I|1⟩=XH|+⟩',
'HZ|-⟩=H|+⟩',
'X|0⟩=HZ|+⟩',
'ZZ|-⟩=Z|+⟩',
'TX|0⟩=T|1⟩',
'X|+⟩=II|+⟩',
'XIZ|0⟩=|1⟩',
'ZH|-⟩=-|1⟩',
'|0⟩=TXH|-⟩',
'|0⟩=ZZZ|0⟩',
'ZHH|-⟩=|+⟩',
'ZI|-⟩=H|0⟩',
'|0⟩=IHH|0⟩',
'|0⟩=HHT|0⟩',
'ZZZ|-⟩=|+⟩',
'YYZ|0⟩=|0⟩',
'IZ|+⟩=H|1⟩',
'X|+⟩=XI|+⟩',
'|-⟩=IIZ|+⟩',
'YSI|1⟩=|0⟩',
'IZH|1⟩=|+⟩',
'XXH|-⟩=|1⟩',
'ZX-|-⟩=|+⟩',
'T|0⟩=SX|1⟩',
'IT|0⟩=X|1⟩',
'XIX|0⟩=|0⟩',
'HH|0⟩=T|0⟩',
'YY|0⟩=Z|0⟩',
'HI|0⟩=X|+⟩',
'|-⟩=ZHS|0⟩',
'YYI|1⟩=|1⟩',
'TST|+⟩=|-⟩',
'X|+⟩=HI|0⟩',
'YS|1⟩=H|+⟩',
'|0⟩=HHZ|0⟩',
'SY|0⟩=-|1⟩',
'HIZ|+⟩=|1⟩',
'|0⟩=HS|i-⟩',
'SSI|0⟩=|0⟩',
'SS|0⟩=X|1⟩',
'|-⟩=XZ-|+⟩',
'XXH|0⟩=|+⟩',
'|-⟩=ZHI|0⟩',
'|-⟩=SSZ|-⟩',
'|i-⟩=Z|i+⟩',
'SS|-⟩=I|+⟩',
'Z|+⟩=S|i+⟩',
'T|0⟩=IS|0⟩',
'|i+⟩=I|i+⟩',
'|1⟩=XHX|+⟩',
'Z-|+⟩=X|-⟩',
'I|0⟩=ZH|+⟩',
'ZI|+⟩=H|1⟩',
'XZH|1⟩=|+⟩',
'Z|0⟩=HZ|-⟩',
'ZI|0⟩=Z|0⟩',
'XZ|+⟩=-|-⟩',
'|0⟩=ITH|+⟩',
'SII|0⟩=|0⟩',
'IS|+⟩=|i+⟩',
'THI|+⟩=|0⟩',
'I|0⟩=XH|-⟩',
'HX|0⟩=H|1⟩',
'SI|0⟩=S|0⟩',
'H-|-⟩=-|1⟩',
'Y|0⟩=YX|1⟩',
'ITH|+⟩=|0⟩',
'|+⟩=YYZ|-⟩',
'|0⟩=STT|0⟩',
'|0⟩=TIZ|0⟩',
'|-⟩=IIH|1⟩',
'|+⟩=IHS|0⟩',
'ZIH|+⟩=|0⟩',
'|0⟩=ZIT|0⟩',
'|-⟩=ZIX|+⟩',
'H|1⟩=IZ|+⟩',
'ZS|i+⟩=|+⟩',
'SI|i-⟩=|+⟩',
'TT|0⟩=Z|0⟩',
'|-⟩=IZI|+⟩',
'SZ|+⟩=|i-⟩',
'|1⟩=IZ-|1⟩',
'H|1⟩=S|i+⟩',
'SX|1⟩=Z|0⟩',
'SI|+⟩=S|+⟩',
'HZZ|+⟩=|0⟩',
'XII|0⟩=|1⟩',
'|+⟩=XXX|+⟩',
'Z|+⟩=ZZ|-⟩',
'|+⟩=IIH|0⟩',
'S|0⟩=ZI|0⟩',
'SS|1⟩=-|1⟩',
'S|0⟩=HI|+⟩',
'|1⟩=XSX|1⟩',
'Z-|0⟩=-|0⟩',
'X|1⟩=TS|0⟩',
'|+⟩=XIZ|-⟩',
'|-⟩=ZHZ|0⟩',
'|0⟩=TZX|1⟩',
'|-⟩=ZII|+⟩',
'|-⟩=YIY|-⟩',
'IS|i+⟩=|-⟩',
'H|-⟩=XX|1⟩',
'HX|-⟩=-|1⟩',
'XH|+⟩=X|0⟩',
'HH|0⟩=X|1⟩',
'SZT|0⟩=|0⟩',
'II|0⟩=I|0⟩',
'S|+⟩=Y|i+⟩',
'|0⟩=IIZ|0⟩',
'HSS|0⟩=|+⟩',
'IT|1⟩=T|1⟩',
'|+⟩=SSZ|+⟩',
'YYH|1⟩=|-⟩',
'IZ|0⟩=H|+⟩',
'|0⟩=YYT|0⟩',
'H|0⟩=HT|0⟩',
'XTS|0⟩=|1⟩',
'HT|0⟩=X|+⟩',
'I|-⟩=HH|-⟩',
'IYY|0⟩=|0⟩',
'ZYY|+⟩=|-⟩',
'|+⟩=IHX|1⟩',
'T|0⟩=ZZ|0⟩',
'YIY|+⟩=|+⟩',
'SH|0⟩=|i+⟩',
'I-|-⟩=-|-⟩',
'ZTZ|0⟩=|0⟩',
'ZSS|1⟩=|1⟩',
'I|0⟩=ZX|1⟩',
'IXH|+⟩=|1⟩',
'H|0⟩=IH|0⟩',
'|0⟩=ISZ|0⟩',
'|-⟩=YYZ|+⟩',
'|+⟩=XZZ|+⟩',
'|0⟩=HIH|0⟩',
'H-|+⟩=-|0⟩',
'S|1⟩=YT|0⟩',
'T|-⟩=ZT|+⟩',
'ZYS|1⟩=|0⟩',
'HZS|0⟩=|+⟩',
'TS|0⟩=S|0⟩',
'HYY|+⟩=|0⟩',
'Y|1⟩=SY|1⟩',
'|0⟩=IZZ|0⟩',
'S|0⟩=ZX|1⟩',
'|+⟩=HSZ|0⟩',
'ZXH|0⟩=|-⟩',
'|1⟩=XTX|1⟩',
'|i+⟩=YS|+⟩',
'S|0⟩=ZH|+⟩',
'Z|0⟩=XH|-⟩',
'SX|0⟩=Y|0⟩',
'HS|i+⟩=|1⟩',
'H|+⟩=XX|0⟩',
'XX|+⟩=H|0⟩',
'|1⟩=SZY|0⟩',
'X|1⟩=ST|0⟩',
'Z|-⟩=ZH|1⟩',
'H|+⟩=IX|1⟩',
'T|0⟩=IH|+⟩',
'SSI|-⟩=|+⟩',
'IHX|0⟩=|-⟩',
'HZZ|0⟩=|+⟩',
'|i-⟩=TT|-⟩',
'X|0⟩=HH|1⟩',
'Z|1⟩=IZ|1⟩',
'S|0⟩=YY|0⟩',
'YIS|1⟩=|0⟩',
'H-|1⟩=X|-⟩',
'S|1⟩=SX|0⟩',
'|0⟩=THZ|-⟩',
'IS|0⟩=H|+⟩',
'|0⟩=HHH|+⟩',
'|-⟩=XHZ|1⟩',
'|-⟩=YYH|1⟩',
'Z|0⟩=ZT|0⟩',
'|0⟩=TTI|0⟩',
'|0⟩=XYY|1⟩',
'XI|0⟩=H|-⟩',
'HH|+⟩=H|0⟩',
'IH|1⟩=I|-⟩',
'|+⟩=HZX|1⟩',
'|+⟩=ISS|-⟩',
'T|1⟩=TX|0⟩',
'SS|0⟩=H|+⟩',
'HSS|+⟩=|1⟩',
'IIZ|0⟩=|0⟩',
'TZT|0⟩=|0⟩',
'Z|-⟩=XZ|-⟩',
'XXI|+⟩=|+⟩',
'T|0⟩=ZH|+⟩',
'ST|0⟩=T|0⟩',
'|1⟩=XIZ|0⟩',
'XTT|0⟩=|1⟩',
'TTS|+⟩=|-⟩',
'|1⟩=XIT|0⟩',
'T|+⟩=TH|0⟩',
'IZ|-⟩=X|+⟩',
'H|0⟩=HS|0⟩',
'S|-⟩=ZS|+⟩',
'|-⟩=SYS|+⟩',
'H|-⟩=XZ|0⟩',
'|1⟩=XZS|0⟩',
'SH|0⟩=S|+⟩',
'YH|0⟩=Y|+⟩',
'|0⟩=STS|0⟩',
'Y|-⟩=IY|-⟩',
'|1⟩=HZX|+⟩',
'H|-⟩=YY|1⟩',
'IH|+⟩=I|0⟩',
'ISS|-⟩=|+⟩',
'|1⟩=XZX|1⟩',
'THZ|-⟩=|0⟩',
'X|0⟩=II|1⟩',
'SSH|+⟩=|0⟩',
'IX|+⟩=X|+⟩',
'H|+⟩=TS|0⟩',
'SSX|+⟩=|-⟩',
'H|0⟩=XX|+⟩',
'H|0⟩=ZZ|+⟩',
'IX|0⟩=X|0⟩',
'S|1⟩=SH|-⟩',
'TT|0⟩=S|0⟩',
'TST|0⟩=|0⟩',
'H|+⟩=IZ|0⟩',
'II|1⟩=X|0⟩',
'|1⟩=IIH|-⟩',
'|1⟩=SS-|1⟩',
'YI|0⟩=Y|0⟩',
'X|+⟩=ZZ|+⟩',
'ST|0⟩=Z|0⟩',
'S|+⟩=Z|i-⟩',
'ZHZ|-⟩=|0⟩',
'I|1⟩=XT|0⟩',
'|+⟩=HTI|0⟩',
'IS|0⟩=I|0⟩',
'|0⟩=SXH|-⟩',
'IS|1⟩=S|1⟩',
'|0⟩=SSX|1⟩',
'S|-⟩=SZ|+⟩',
'IX|1⟩=Z|0⟩',
'IYY|+⟩=|+⟩',
'|+⟩=YYH|0⟩',
'H|1⟩=SS|+⟩',
'H|+⟩=ZI|0⟩',
'|+⟩=ZII|-⟩',
'|+⟩=IHH|+⟩',
'YY|1⟩=X|0⟩',
'Z|0⟩=IH|+⟩',
'XXI|-⟩=|-⟩',
'HTI|0⟩=|+⟩',
'H|+⟩=TX|1⟩',
'Y|+⟩=YI|+⟩',
'S|+⟩=TT|+⟩',
'IZ|0⟩=S|0⟩',
'H|+⟩=HX|+⟩',
'HI|1⟩=H|1⟩',
'|+⟩=IXX|+⟩',
'IY|0⟩=Y|0⟩',
'ZZI|1⟩=|1⟩',
'II|0⟩=H|+⟩',
'XX|0⟩=S|0⟩',
'ZTX|1⟩=|0⟩',
'H|0⟩=XI|+⟩',
'I|0⟩=HH|0⟩',
'X|1⟩=II|0⟩',
'|-⟩=HIX|0⟩',
'|0⟩=XHI|-⟩',
'|+⟩=HIH|+⟩',
'SI|i+⟩=|-⟩',
'SIS|-⟩=|+⟩',
'HZH|+⟩=|+⟩',
'XH|1⟩=X|-⟩',
'SZ|i+⟩=|+⟩',
'XXH|1⟩=|-⟩',
'HX|1⟩=X|+⟩',
'ZS|0⟩=X|1⟩',
'|+⟩=ZIZ|+⟩',
'-|-⟩=XH|1⟩',
'I|0⟩=TZ|0⟩',
'SIX|1⟩=|0⟩',
'Y|-⟩=YZ|+⟩',
'|0⟩=ISH|+⟩',
'ZZ|0⟩=Z|0⟩',
'Z|-⟩=IZ|-⟩',
'-|1⟩=HX|-⟩',
'HX|1⟩=Z|-⟩',
'ZZ|0⟩=S|0⟩',
'S|0⟩=XI|1⟩',
'X|1⟩=XI|1⟩',
'H|+⟩=XI|1⟩',
'H|+⟩=ZZ|0⟩',
'YZY|1⟩=|1⟩',
'H|1⟩=ZI|+⟩',
'XIH|0⟩=|+⟩',
'SI|+⟩=|i+⟩',
'ZT|0⟩=S|0⟩',
'ST|0⟩=X|1⟩',
'-|1⟩=SY|0⟩',
'TX|1⟩=I|0⟩',
'H|+⟩=HZ|-⟩',
'XZZ|1⟩=|0⟩',
'S|+⟩=IS|+⟩',
'ZXZ|-⟩=|-⟩',
'ZTH|+⟩=|0⟩',
'IZI|-⟩=|+⟩',
'TSH|+⟩=|0⟩',
'IX|1⟩=T|0⟩',
'YYX|+⟩=|+⟩',
'ZTI|0⟩=|0⟩',
'IIX|0⟩=|1⟩',
'X|0⟩=HI|-⟩',
'|1⟩=XSS|0⟩',
'Z|1⟩=ZI|1⟩',
'|1⟩=XTS|0⟩',
'|1⟩=XTT|0⟩',
'|0⟩=ZIH|+⟩',
'HI|+⟩=S|0⟩',
'|0⟩=HHS|0⟩',
'Z-|-⟩=-|+⟩',
'HH|1⟩=I|1⟩',
'T|0⟩=YS|1⟩',
'Z|-⟩=S|i-⟩',
'|0⟩=THI|+⟩',
'|0⟩=IXX|0⟩',
'|+⟩=XHT|0⟩',
'XZ|-⟩=H|0⟩',
'ZHI|+⟩=|0⟩',
'-|-⟩=Z-|+⟩',
'H|1⟩=ZH|0⟩',
'ZX|+⟩=I|-⟩',
'|0⟩=YYX|1⟩',
'|-⟩=ZSS|-⟩',
'X|1⟩=IT|0⟩',
'X|+⟩=ZH|1⟩',
'H|0⟩=HX|1⟩',
'HI|-⟩=I|1⟩',
'HH|0⟩=I|0⟩',
'|1⟩=XII|0⟩',
'HHX|1⟩=|0⟩',
'|+⟩=HTS|0⟩',
'HIT|0⟩=|+⟩',
'Z|0⟩=IT|0⟩',
'YIY|1⟩=|1⟩',
'S|i-⟩=X|+⟩',
'YX|1⟩=S|1⟩',
'ZXX|0⟩=|0⟩',
'|0⟩=SSH|+⟩',
'|-⟩=HXI|0⟩',
'|-⟩=ZZZ|+⟩',
'I|-⟩=IZ|+⟩',
'|1⟩=YTY|1⟩',
'X|0⟩=XH|+⟩',
'HHX|0⟩=|1⟩',
'TYS|1⟩=|0⟩',
'|0⟩=SHI|+⟩',
'IHI|+⟩=|0⟩',
'S|-⟩=SH|1⟩',
'HHI|0⟩=|0⟩',
'Z|-⟩=HT|0⟩',
'H|0⟩=SS|-⟩',
'Z|0⟩=IZ|0⟩',
'S|1⟩=IS|1⟩',
'TT|1⟩=S|1⟩',
'HX|1⟩=H|0⟩',
'XH|0⟩=X|+⟩',
'ZSS|+⟩=|+⟩',
'I|0⟩=XX|0⟩',
'|+⟩=HHH|0⟩',
'YYZ|-⟩=|+⟩',
'|1⟩=YYI|1⟩',
'X|0⟩=XS|0⟩',
'IHS|0⟩=|+⟩',
'XX|1⟩=X|0⟩',
'ITZ|0⟩=|0⟩',
'ZZI|-⟩=|-⟩',
'Z|0⟩=ZI|0⟩',
'ST|0⟩=I|0⟩',
'XT|0⟩=H|-⟩',
'|1⟩=HHI|1⟩',
'-|-⟩=HZ|1⟩',
'|i+⟩=IS|+⟩',
'X|1⟩=IS|0⟩',
'|1⟩=XHH|0⟩',
'|0⟩=ZXX|0⟩',
'XS|i-⟩=|+⟩',
'|0⟩=YIY|0⟩',
'Y|1⟩=ZY|1⟩',
'|1⟩=XZI|0⟩',
'YT|0⟩=S|1⟩',
'YY|1⟩=I|1⟩',
'|+⟩=XHI|0⟩',
'|+⟩=HYS|1⟩',
'I|1⟩=HH|1⟩',
'HII|1⟩=|-⟩',
'|0⟩=TSH|+⟩',
'X|0⟩=XZ|0⟩',
'XH|1⟩=-|-⟩',
'TT|0⟩=I|0⟩',
'Z|0⟩=ZS|0⟩',
'|0⟩=ISI|0⟩',
'|1⟩=XIH|+⟩',
'|1⟩=ZI-|1⟩',
'XHT|0⟩=|+⟩',
'XII|+⟩=|+⟩',
'|i-⟩=ZS|+⟩',
'|+⟩=HHI|+⟩',
'YS|+⟩=S|+⟩',
'|+⟩=XZI|-⟩',
'H|+⟩=SH|+⟩',
'IH|+⟩=X|1⟩',
'Y|1⟩=TY|1⟩',
'|0⟩=ZHH|0⟩',
'S|0⟩=IH|+⟩',
'SS|0⟩=S|0⟩',
'XZ|-⟩=Z|-⟩',
'Z|0⟩=XX|0⟩',
'IHX|1⟩=|+⟩',
'I|0⟩=SS|0⟩',
'|0⟩=YYH|+⟩',
'|0⟩=TTT|0⟩',
'T|+⟩=IT|+⟩',
'Z|+⟩=YY|-⟩',
'IH|1⟩=H|1⟩',
'|-⟩=TTS|+⟩',
'-|1⟩=I-|1⟩',
'XH|-⟩=X|1⟩',
'XI|1⟩=I|0⟩',
'ZXX|-⟩=|+⟩',
'|0⟩=YYS|0⟩',
'T|+⟩=TZ|-⟩',
'TXX|0⟩=|0⟩',
'|-⟩=HZ-|1⟩',
'IST|0⟩=|0⟩',
'|0⟩=ZZI|0⟩',
'|+⟩=YYI|+⟩',
'|0⟩=IZX|1⟩',
'|0⟩=SIS|0⟩',
'YI|1⟩=Y|1⟩',
'ZSS|0⟩=|0⟩',
'ZST|0⟩=|0⟩',
'ZX|-⟩=-|+⟩',
'H|-⟩=ZZ|1⟩',
'X|1⟩=ZI|0⟩',
'I|+⟩=HS|0⟩',
'|1⟩=YYH|-⟩',
'Z|+⟩=II|-⟩',
'XYY|1⟩=|0⟩',
'XI|+⟩=I|+⟩',
'|0⟩=IIT|0⟩',
'T|+⟩=TX|+⟩',
'|1⟩=ZZX|0⟩',
'SZS|1⟩=|1⟩',
'SZS|0⟩=|0⟩',
'TSZ|0⟩=|0⟩',
'ZHX|1⟩=|-⟩',
'-|-⟩=XZ|+⟩',
'IZT|0⟩=|0⟩',
'|+⟩=TST|-⟩',
'|+⟩=HSX|1⟩',
'|0⟩=ZSI|0⟩',
'I|+⟩=ZH|1⟩',
'|+⟩=XYY|+⟩',
'HZ|1⟩=X|-⟩',
'|-⟩=ZXZ|-⟩',
'HZ|+⟩=I|1⟩',
'XHI|0⟩=|+⟩',
'ZXI|1⟩=|0⟩',
'SS|+⟩=H|1⟩',
'|1⟩=IXH|+⟩',
'IXX|+⟩=|+⟩',
'H|-⟩=XI|0⟩',
'I|+⟩=HH|+⟩',
'YS|0⟩=Y|0⟩',
'XZ|0⟩=I|1⟩',
'HXZ|0⟩=|-⟩',
'SZ|0⟩=X|1⟩',
'HZ|+⟩=X|0⟩',
'ZT|0⟩=H|+⟩',
'I|1⟩=HI|-⟩',
'I|i+⟩=|i+⟩',
'S|0⟩=TZ|0⟩',
'IX|+⟩=Z|-⟩',
'|0⟩=IYY|0⟩',
'|1⟩=YIY|1⟩',
'III|+⟩=|+⟩',
'IX|-⟩=-|-⟩',
'ZZI|+⟩=|+⟩',
'II|0⟩=Z|0⟩',
'ISH|+⟩=|0⟩',
'|-⟩=ZXH|0⟩',
'ZX|1⟩=I|0⟩',
'YY|+⟩=H|0⟩',
'|0⟩=HIZ|-⟩',
'IHI|1⟩=|-⟩',
'TI|0⟩=X|1⟩',
'|-⟩=XH-|1⟩',
'|+⟩=SIS|-⟩',
'|i-⟩=SZ|+⟩',
'|1⟩=IXI|0⟩',
'HZH|0⟩=|1⟩',
'H|1⟩=II|-⟩',
'YYX|1⟩=|0⟩',
'ZZ|+⟩=I|+⟩',
'Y|0⟩=YZ|0⟩',
'SI|-⟩=S|-⟩',
'HX|1⟩=I|+⟩',
'S|-⟩=SI|-⟩',
'IS|-⟩=|i-⟩',
'S|0⟩=YS|1⟩',
'TT|-⟩=S|-⟩',
'I|-⟩=YY|-⟩',
'YYI|+⟩=|+⟩',
'SI|0⟩=T|0⟩',
'TTZ|0⟩=|0⟩',
'HZX|1⟩=|+⟩',
'HXH|-⟩=|+⟩',
'YS|+⟩=|i+⟩',
'HSX|1⟩=|+⟩',
'ZZ|-⟩=I|-⟩',
'X|-⟩=I-|-⟩',
'|0⟩=HXZ|-⟩',
'XXI|1⟩=|1⟩',
'TS|0⟩=X|1⟩',
'HXH|+⟩=|-⟩',
'|+⟩=IXI|+⟩',
'Z|-⟩=YY|+⟩',
'|-⟩=HXX|1⟩',
'IH|0⟩=H|0⟩',
'|+⟩=ZZH|0⟩',
'I|0⟩=ZZ|0⟩',
'S|0⟩=SS|0⟩',
'|0⟩=TIX|1⟩',
'|1⟩=XST|0⟩',
'TT|0⟩=H|+⟩',
'SXI|1⟩=|0⟩',
'XSS|-⟩=|+⟩',
'HXX|-⟩=|1⟩',
'|0⟩=THX|+⟩',
'Y|0⟩=SX|0⟩',
'YSY|1⟩=|1⟩',
'I|+⟩=HZ|0⟩',
'XI|-⟩=-|-⟩',
'X|0⟩=XX|1⟩',
'I|0⟩=TH|+⟩',
'|+⟩=XIX|+⟩',
'Y|i+⟩=S|+⟩',
'|+⟩=HTT|0⟩',
'IIZ|-⟩=|+⟩',
'SI|0⟩=Z|0⟩',
'Z|0⟩=SX|1⟩',
'XX|+⟩=I|+⟩',
'|0⟩=XXI|0⟩',
'X|1⟩=ZS|0⟩',
'H|+⟩=XH|-⟩',
'|1⟩=IXT|0⟩',
'|0⟩=ZSX|1⟩',
'SX|0⟩=S|1⟩',
'HXT|0⟩=|-⟩',
'|0⟩=ZZS|0⟩',
'|0⟩=HII|+⟩',
'I|0⟩=XI|1⟩',
'|-⟩=SSI|+⟩',
'IH|0⟩=X|+⟩',
'IHI|-⟩=|1⟩',
'IH|+⟩=H|+⟩',
'TH|+⟩=T|0⟩',
'|0⟩=TXX|0⟩',
'|-⟩=IYY|-⟩',
'XI|0⟩=X|0⟩',
'TT|i+⟩=|-⟩',
'ISI|0⟩=|0⟩',
'TTT|0⟩=|0⟩',
'IX|+⟩=I|+⟩',
'|0⟩=SII|0⟩',
'HHZ|+⟩=|-⟩',
'HTS|0⟩=|+⟩',
'X|0⟩=XI|0⟩',
'|1⟩=HSS|+⟩',
'I|+⟩=SS|-⟩',
'X|-⟩=IX|-⟩',
'Y|1⟩=YX|0⟩',
'TH|-⟩=T|1⟩',
'|0⟩=IIS|0⟩',
'HX|0⟩=Z|+⟩',
'IYY|1⟩=|1⟩',
'|+⟩=TT|i-⟩',
'Y|1⟩=YH|-⟩',
'YT|0⟩=Y|0⟩',
'YY|0⟩=T|0⟩',
'SSH|1⟩=|+⟩',
'Y|0⟩=IS|1⟩',
'TH|+⟩=H|+⟩',
'SH|1⟩=|i-⟩',
'|0⟩=ZTI|0⟩',
'|1⟩=XSH|+⟩',
'IH|+⟩=S|0⟩',
'H|1⟩=HI|1⟩',
'HZ|-⟩=S|0⟩',
'|-⟩=HIH|-⟩',
'|-⟩=HXZ|0⟩',
'Z|0⟩=TZ|0⟩',
'S|0⟩=ZS|0⟩',
'X|+⟩=HT|0⟩',
'T|0⟩=TH|+⟩',
'|-⟩=IX-|-⟩',
'XH|+⟩=I|1⟩',
'|-⟩=XXH|1⟩',
'X-|-⟩=Z|+⟩',
'HHH|1⟩=|-⟩',
'YX|+⟩=Y|+⟩',
'H|-⟩=XS|0⟩',
'X|0⟩=Z-|1⟩',
'ZHX|-⟩=|1⟩',
'XSH|+⟩=|1⟩',
'HHI|+⟩=|+⟩',
'|-⟩=SSX|+⟩',
'ISS|0⟩=|0⟩',
'HH|+⟩=X|+⟩',
'XZI|0⟩=|1⟩',
'TT|-⟩=|i-⟩',
'ZZZ|0⟩=|0⟩',
'S|1⟩=YS|0⟩',
'XZI|-⟩=|+⟩',
'HZ|0⟩=Z|-⟩',
'H|1⟩=ZZ|-⟩',
'YI|+⟩=Y|+⟩',
'|0⟩=XXH|+⟩',
'XZT|0⟩=|1⟩',
'SI|0⟩=H|+⟩',
'TSS|0⟩=|0⟩',
'|0⟩=STX|1⟩',
'T|0⟩=ST|0⟩',
'ZS|i-⟩=|-⟩',
'X|-⟩=XI|-⟩',
'|-⟩=IZZ|-⟩',
'ZX|1⟩=Z|0⟩',
'|1⟩=ZX-|0⟩',
'HH|-⟩=Z|+⟩',
'IXI|+⟩=|+⟩',
'HZ|-⟩=X|1⟩',
'-|0⟩=XZ|1⟩',
'HT|0⟩=I|+⟩',
'ZIZ|+⟩=|+⟩',
'IYS|1⟩=|0⟩',
'YS|1⟩=Z|0⟩',
'IHH|-⟩=|-⟩',
'IS|0⟩=T|0⟩',
'|0⟩=SZI|0⟩',
'HYS|1⟩=|+⟩',
'TX|1⟩=T|0⟩',
'|0⟩=STH|+⟩',
'IXH|-⟩=|0⟩',
'T|0⟩=YY|0⟩',
'ZI|-⟩=Z|-⟩',
'H|+⟩=ZT|0⟩',
'|0⟩=SZZ|0⟩',
'XX|+⟩=Z|-⟩',
'|-⟩=ZHX|1⟩',
'I-|1⟩=Z|1⟩',
'X|-⟩=HZ|1⟩',
'|+⟩=HSS|0⟩',
'|+⟩=SZ|i+⟩',
'|1⟩=XIS|0⟩',
'SH|-⟩=Y|0⟩',
'I|i-⟩=|i-⟩',
'X|1⟩=ZZ|0⟩',
'IY|0⟩=S|1⟩',
'Z|1⟩=HX|-⟩',
'X|1⟩=TX|1⟩',
'ZI|+⟩=I|-⟩',
'HXX|0⟩=|+⟩',
'|0⟩=ZZT|0⟩',
'YYH|0⟩=|+⟩',
'Z|0⟩=SI|0⟩',
'IYY|-⟩=|-⟩',
'|1⟩=ZSS|1⟩',
'X|+⟩=IH|0⟩',
'|0⟩=ZZH|+⟩',
'T|0⟩=HX|+⟩',
'ZH|1⟩=Z|-⟩',
'XII|1⟩=|0⟩',
'HZI|0⟩=|+⟩',
'|0⟩=IXI|1⟩',
'SSX|1⟩=|0⟩',
'IH|0⟩=I|+⟩',
'Y|1⟩=IY|1⟩',
'IS|-⟩=S|-⟩',
'|+⟩=ZZX|+⟩',
'STX|1⟩=|0⟩',
'IXT|0⟩=|1⟩',
'|1⟩=YSY|1⟩',
'SS|0⟩=T|0⟩',
'IXX|0⟩=|0⟩',
'HH|0⟩=Z|0⟩',
'S|+⟩=ZS|-⟩',
'|0⟩=ZYS|1⟩',
'HIX|1⟩=|+⟩',
'|0⟩=YSH|-⟩',
'|0⟩=IHX|+⟩',
'ZI|0⟩=T|0⟩',
'SIT|0⟩=|0⟩',
'XH|-⟩=T|0⟩',
'TX|1⟩=S|0⟩',
'SS|-⟩=H|0⟩',
'|+⟩=ZIH|1⟩',
'|0⟩=IIH|+⟩',
'|0⟩=TSI|0⟩',
'Z|+⟩=IH|1⟩',
'|1⟩=SZS|1⟩',
'TTH|+⟩=|0⟩',
'XYY|0⟩=|1⟩',
'S|0⟩=SI|0⟩',
'XI|1⟩=T|0⟩',
'|+⟩=IXH|0⟩',
'|+⟩=HXI|1⟩',
'IXH|0⟩=|+⟩',
'XZZ|+⟩=|+⟩',
'XX|+⟩=X|+⟩',
'|0⟩=XHZ|+⟩',
'XI|1⟩=Z|0⟩',
'XXX|0⟩=|1⟩',
'|0⟩=SSI|0⟩',
'HIS|0⟩=|+⟩',
'S|0⟩=SZ|0⟩',
'|0⟩=XIH|-⟩',
'XZH|+⟩=|1⟩',
'H|+⟩=SZ|0⟩',
'|+⟩=IHT|0⟩',
'SS-|1⟩=|1⟩',
'|0⟩=TST|0⟩',
'|+⟩=HTH|+⟩',
'HII|0⟩=|+⟩',
'|0⟩=HZZ|+⟩',
'ZI|0⟩=I|0⟩',
'YI|0⟩=S|1⟩',
'ZZH|-⟩=|1⟩',
'T|0⟩=HH|0⟩',
'|+⟩=ZSS|+⟩',
'|+⟩=HSI|0⟩',
'|+⟩=HHX|+⟩',
'TH|+⟩=S|0⟩',
'IZ-|1⟩=|1⟩',
'H|1⟩=IH|1⟩',
'T|-⟩=TZ|+⟩',
'X-|-⟩=I|-⟩',
'YX|1⟩=Y|0⟩',
'HH|1⟩=X|0⟩',
'I|+⟩=S|i-⟩',
'|-⟩=HYY|1⟩',
'SYY|0⟩=|0⟩',
'YIY|0⟩=|0⟩',
'ZIZ|1⟩=|1⟩',
'|i+⟩=SH|0⟩',
'|-⟩=ZHT|0⟩',
'TI|-⟩=T|-⟩',
'SS|0⟩=I|0⟩',
'XXZ|-⟩=|+⟩',
'S|0⟩=IX|1⟩',
'Z|+⟩=IZ|+⟩',
'S|0⟩=SH|+⟩',
'I|0⟩=IX|1⟩',
'H|+⟩=TT|0⟩',
'ZH-|-⟩=|1⟩',
'ZIZ|-⟩=|-⟩',
'XH|-⟩=H|+⟩',
'|+⟩=HIX|1⟩',
'I|0⟩=II|0⟩',
'|0⟩=HXH|0⟩',
'XZ|+⟩=X|-⟩',
'ZH|1⟩=I|+⟩',
'X|1⟩=SS|0⟩',
'I|1⟩=HZ|+⟩',
]
// TODO experiment with these:
export const WORDS2 = [
'cos|0⟩+|+⟩',
// 'cos|0⟩+eiφsin|+⟩',
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.