text stringlengths 1 1.05M |
|---|
# NameSpaceNetworkRuleSetCreate
RESOURCE_GROUP="myresourcegroup"
NAMESPACE_NAME="my"
NETWORK_RULE_SET_NAME="mynetworkruleset"
VIRTUAL_NETWORK_NAME="myvirtualnetwork"
SUBNET_NAME="mysubnet"
az resource create --id /subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP/providers/Microsoft.ServiceBus/namespaces/$NAMESPACE_NAME/networkRuleSets/$NETWORK_RULE_SET_NAME --api-version 2017-04-01 --is-full-object --properties '
{
"properties": {
"defaultAction": "Deny",
"virtualNetworkRules": [
{
"subnet": {
"id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Network/virtualNetworks/" + VIRTUAL_NETWORK_NAME + "/subnets/" + SUBNET_NAME + ""
},
"ignoreMissingVnetServiceEndpoint": True
},
{
"subnet": {
"id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Network/virtualNetworks/" + VIRTUAL_NETWORK_NAME + "/subnets/" + SUBNET_NAME + ""
},
"ignoreMissingVnetServiceEndpoint": False
},
{
"subnet": {
"id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Network/virtualNetworks/" + VIRTUAL_NETWORK_NAME + "/subnets/" + SUBNET_NAME + ""
},
"ignoreMissingVnetServiceEndpoint": False
}
],
"ipRules": [
{
"ipMask": "1.1.1.1",
"action": "Allow"
},
{
"ipMask": "1.1.1.2",
"action": "Allow"
},
{
"ipMask": "1.1.1.3",
"action": "Allow"
},
{
"ipMask": "1.1.1.4",
"action": "Allow"
},
{
"ipMask": "1.1.1.5",
"action": "Allow"
}
]
}
}
' |
"""
Create a program that computes the average distance between two points in a two-dimensional space
"""
import math
def avg_distance(point1, point2):
# Compute the average distance between two points in a two-dimensional space
x1, y1 = point1
x2, y2 = point2
# Calculate distance
dist = math.sqrt(((x2-x1)**2) + ((y2-y1)**2))
return dist
if __name__ == '__main__':
point1 = (2,3)
point2 = (8,6)
print(avg_distance(point1,point2)) |
/***
* Copyright (C) <NAME>. All rights reserved.
* Licensed under the MIT license. See LICENSE file in the project root
* for full license information.
*
* =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
*
* For related information - https://github.com/CodeWithRodi/Cutternet/
*
* Cutternet Backend Source Code
*
* =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
****/
const Mongoose = require('mongoose');
const { FormatError } = require('../Utils/RuntimeError');
const TextSearch = require('mongoose-partial-search');
const Validation = require('../Settings/').General.DataValidation.LinkStatistic;
const LinkStatisticSchema = new Mongoose.Schema({
Link: {
type: Mongoose.Schema.ObjectId,
ref: 'Link',
required: [true, FormatError('LINK_STATISTIC_URL_NOT_PROVIDED')]
},
IPAddress: {
type: String,
searchable: true,
maxlength: [Validation.IPAddress.MaxLength, FormatError('LINK_STATISTIC_IP_MAXLENGTH')],
required: [true, FormatError('LINK_STATISTIC_IP_NOT_PROVIDED')]
},
BrowserLanguage: {
type: String,
maxlength: [
Validation.BrowserLanguage.MaxLength,
FormatError('LINK_STATISTIC_BROWSER_LANGUAGE_MAXLENGTH')
],
minlength: [
Validation.BrowserLanguage.MinLength,
FormatError('LINK_STATISTIC_BROWSER_LANGUAGE_MINLENGTH')
],
required: [true, FormatError('LINK_STATISTIC_BROWSER_LANGUAGE_NOT_PROVIDED')]
},
OperatingSystem: {
type: String,
searchable: true,
maxlength: [
Validation.OperatingSystem.MaxLength,
FormatError('LINK_STATISTIC_OS_MAXLENGTH')
],
required: [true, FormatError('LINK_STATISTIC_OS_NOT_PROVIDED')]
},
Browser: {
searchable: true,
type: String,
maxlength: [Validation.Browser.MaxLength, FormatError('LINK_STATISTIC_BROWSER_MAXLENGTH')],
required: [true, FormatError('LINK_STATISTIC_BROWSER_NOT_PROVIDED')]
},
Country: {
searchable: true,
type: String,
maxlength: [Validation.Country.MaxLength, FormatError('LINK_STATISTIC_COUNTRY_MAXLENGTH')],
default: 'Unknown'
},
Region: {
type: String,
maxlength: [Validation.Region.MaxLength, FormatError('LINK_STATISTIC_REGION_MAXLENGTH')],
default: 'Unknown'
},
City: {
type: String,
maxlength: [Validation.City.MaxLength, FormatError('LINK_STATISTIC_CITY_MAXLENGTH')],
default: 'Unknown'
},
Timezone: {
searchable: true,
type: String,
maxlength: [
Validation.Timezone.MaxLength,
FormatError('LINK_STATISTIC_TIMEZONE_MAXLENGTH')
],
default: 'Unknown'
},
Latitude: {
type: String,
maxlength: [
Validation.Latitude.MaxLength,
FormatError('LINK_STATISTIC_LATITUDE_MAXLENGTH')
],
default: 'Unknown'
},
Longitude: {
type: String,
maxlength: [
Validation.Longitude.MaxLength,
FormatError('LINK_STATISTIC_LONGITUDE_MAXLENGTH')
],
default: 'Unknown'
},
RegisteredAt: {
type: Date,
default: Date.now
}
});
LinkStatisticSchema.plugin(TextSearch);
LinkStatisticSchema.pre(/^find/, function (Next) {
this.populate({
path: 'Link',
select: 'Link'
});
Next();
});
const LinkStatistic = Mongoose.model('LinkStatistic', LinkStatisticSchema);
module.exports = LinkStatistic;
|
import { gql } from 'apollo-server-express';
export default gql`
type TUser {
id: ID
firstname: String
lastname: String
email: String
gender: EGender
role: EUserRole
isEmailVerified: Boolean
accountClosed: Boolean
}
type TPaginatedUsers {
results: [TUser]!
page: Int!
limit: Int!
totalPages: Int!
totalResults: Int!
}
input IUpdateUser {
firstname: String
lastname: String
email: String
gender: EGender
role: EUserRole
password: String
}
# Queries
type Query {
user(userId: ID!): TUser
users(
role: EUserRole,
limit: Int,
page: Int,
): TPaginatedUsers!
}
# Mutations
type Mutation {
createUser(
firstname: String!,
lastname: String!,
email: String!,
gender: EGender,
role: EUserRole,
): TUser
updateUser(userId: ID!, data: IUpdateUser!): TUser
deleteUser(userId: ID!): String!
}
`;
|
<reponame>ruritoBlogger/GameAI-FightingAI<gh_stars>0
package RHEA;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Random;
import java.util.Deque;
import enumerate.Action;
import RHEA.bandits.BanditArray;
import RHEA.bandits.BanditGene;
import RHEA.utils.Operations;
import static RHEA.utils.Constants.*;
//General Information
import RHEA.utils.GeneralInformation;
//TODO: Bandits working with inner macro-actions?
import RHEA.Heuristics.*;
import RHEA.utils.ParameterSet;
import struct.FrameData;
public class Individual implements Comparable {
private Gene[] genes; // actions in individual. length of individual = actions.length
private int nActions; // number of legal actions
private LinkedList<Double> value;
private double diversityScore;
private boolean canMut;
private Random randomGenerator;
private StateHeuristic heuristic;
private RollingHorizonPlayer player;
private RHEAAgent agent;
int nCalls;
static protected double[] bounds = new double[] { Double.MAX_VALUE, -Double.MAX_VALUE };
Individual(int nActions, Random gen, StateHeuristic heuristic, RollingHorizonPlayer player, RHEAAgent agent) {
this.heuristic = heuristic;
this.randomGenerator = gen;
this.nActions = nActions;
this.player = player;
this.agent = agent;
// this.value = new StatSummary();
this.diversityScore = 0;
this.value = new LinkedList<Double>();
genes = new Gene[agent.getParameters().SIMULATION_DEPTH];
canMut = true;
for (int i = 0; i < genes.length; i++) {
if (nActions <= 1) {
nActions = 1;
canMut = false;
}
genes[i] = new Gene(randomGenerator, player.params.INNER_MACRO_ACTION_LENGTH, i);
}
}
/**
* Evaluates an individual by rolling the current state with the actions in the
* individual and returning the value of the resulting state; random action
* chosen for the opponent
*
* @param state - current state, root of rollouts
* @return - number of FM calls used during this call
*/
int evaluate(GeneralInformation gi, ParameterSet params, TreeNode statsTree, BanditArray bandits,
int[] actionDist) {
nCalls = 0;
FrameData o_fd = gi.getFrameData();
rollout(gi, actionDist, params); // Very Important Here for evaluation
double reward;
if (params.ROLLOUTS) {
reward = MCrollouts(gi, params);
} else {
reward = heuristic.evaluateState(gi);
}
// back to original
gi.setFrameData(o_fd);
// Apply discount factor
// reward *=
// Math.pow(params.DISCOUNT,params.SIMULATION_DEPTH*params.INNER_MACRO_ACTION_LENGTH);
// bounds
if (reward < bounds[0])
bounds[0] = reward;
if (reward > bounds[1])
bounds[1] = reward;
// reward = Operations.normalise(reward, bounds[0], bounds[1]);
double delta = updateReward(reward);
// Update bandits; No bandits for inner macro-actions
if (params.BANDIT_MUTATION) {
for (BanditGene bg : bandits.genome) {
bg.applyReward(delta);
if (bg.revertOrKeep(delta)) {
genes[bg.index].setAction(bg.x, null, 0);
}
}
}
// Update tree
int[] actions = new int[params.SIMULATION_DEPTH * params.INNER_MACRO_ACTION_LENGTH];
for (int k = 0; k < params.SIMULATION_DEPTH; k++) {
System.arraycopy(genes[k].getMacroAction(), 0, actions, k * params.INNER_MACRO_ACTION_LENGTH,
params.INNER_MACRO_ACTION_LENGTH);
}
if (params.TREE)
statsTree.rollout(actions, getValue());
return nCalls;
}
// Returns delta (diff between previous value and new one)
double updateReward(double reward) {
// Update individual value
this.value.add(reward);
double avgValue = getValue();
// System.out.println("avgValue:"+avgValue);
return reward - avgValue;
}
double updateDiversityScore(double score) {
// Update individual value
diversityScore = score;
return score;
}
private void rollout(GeneralInformation start, int[] actionDist, ParameterSet params) {
Deque<Action> myActs = new LinkedList<Action>();
for (int i = 0; i < params.SIMULATION_DEPTH; i++) {
for (int m = 0; m < params.INNER_MACRO_ACTION_LENGTH; m++) {
int action = genes[i].getMacroAction()[m];
if (i == 0)
myActs.add(player.getStartActionMapping(action));
else
myActs.add(player.getContinueActionMapping(action));
nCalls += params.MACRO_ACTION_LENGTH;
}
}
player.advanceState(start, myActs, start.getFrameData().getCharacter(!start.getMyPlayer()).getAction());
}
// Monte Carlo rollouts
private double MCrollouts(GeneralInformation start, ParameterSet params) {
double reward = 0;
boolean ok = true;
FrameData o = start.getFrameData();
for (int k = 0; k < params.REPEAT_ROLLOUT; k++) {
for (int j = 0; j < params.ROLLOUT_LENGTH; j++) {
if (ok) {
LinkedList<Action> acts = start.getMySelectedActions();
int bound = acts.size();
Action action = null;
if (bound > 0) {
action = acts.get(randomGenerator.nextInt(bound));
}
ok = player.advanceState(start, action, agent);
nCalls += params.MACRO_ACTION_LENGTH;
} else {
break;
}
}
double thisReward = heuristic.evaluateState(start);
reward += thisReward;
start.setFrameData(o);
}
reward /= params.REPEAT_ROLLOUT;
return reward;
}
double diversityDiff(Population population) {
double diff = 0;
int[] actionSequence = getActions();
double maxCount = player.params.POPULATION_SIZE
+ (population.numGenerations - 1) * (player.params.POPULATION_SIZE - player.params.ELITISM);
HashMap<Integer, Integer>[] actionCountAllGen = population.getActionCountAllGen();
HashMap<Integer, Integer> posCellCountAllGen = population.getPosCellCountAllGen();
for (int i = 0; i < actionSequence.length; i++) {
if (player.params.DIVERSITY_TYPE == DIVERSITY_GENOTYPE) {
// Compare actions
diff += actionCountAllGen[i].get(actionSequence[i]) / (maxCount);
} else if (player.params.DIVERSITY_TYPE == DIVERSITY_PHENOTYPE) {
}
}
diff /= (player.params.SIMULATION_DEPTH * player.params.INNER_MACRO_ACTION_LENGTH);
return 1 - diff;
}
/**
* Return the value of this individual from the StatSummary object
*
* @return - the mean value by default
*/
double getValue() {
return value.getLast();
// return Operations.normalise(value.getLast(), bounds[0], bounds[1]);
}
/**
* Reset the value of this individual
*/
void resetValue() {
value.clear();
}
/**
* Return the value of this individual from the StatSummary object
*
* @return - the mean value by default
*/
double getDiversityScore() {
return diversityScore;
}
void setGene(int idx, int singleAction, int idxMacro) {
genes[idx].setAction(singleAction, null, idxMacro);
}
void setGene(int idx, Gene g) {
genes[idx].setGene(g);
}
void setGene(int idx) {
genes[idx].randomActions(player.params.INNER_MACRO_ACTION_LENGTH);
}
public Gene getGene(int idx) {
return genes[idx];
}
public int[] getActions() {
int[] actionSequence = new int[player.params.SIMULATION_DEPTH * player.params.INNER_MACRO_ACTION_LENGTH];
int i = 0;
for (Gene g : genes) {
for (int a : g.getMacroAction()) {
actionSequence[i] = a;
i++;
}
}
return actionSequence;
}
/**
* Mutate this individual (no new individual is created) Select which gene to
* mutate and let the gene decide what to mutate to. Use default mutation (one
* random gene mutated uniformly at random)
*/
void mutate(Population population) {
int no_mutations = player.params.MUTATION;
if (canMut) {
int count = 0;
while (count < no_mutations) {
int idxGene; // index of gene to mutate
int idxActionToMutate = -1; // index of action to mutate
if (player.params.MUT_BIAS) {
// bias mutations towards the beginning of the array of individuals, softmax
int L = genes.length;
double[] p = new double[L];
double sum = 0, psum = 0;
for (int i = 0; i < L; i++) {
sum += Math.pow(Math.E, -(i + 1));
}
double prob = Math.random();
idxGene = 0;
for (int i = 0; i < L; i++) {
p[i] = Math.pow(Math.E, -(i + 1)) / sum;
psum += p[i];
if (psum > prob) {
idxGene = i;
break;
}
}
} else if (player.params.MUT_DIVERSITY && player.params.DIVERSITY_TYPE == DIVERSITY_GENOTYPE) {
// find action most similar to the others
int actionIdx = 0;
int max = 0;
HashMap<Integer, Integer>[] actionCountAllGen = population.getActionCountAllGen();
int[] actionSequence = getActions();
for (int i = 0; i < actionSequence.length; i++) {
int actionCount = actionCountAllGen[i].get(actionSequence[i]);
if (actionCount > max) {
max = actionCount;
actionIdx = i;
}
}
// find gene with actionIdx
idxGene = actionIdx / player.params.INNER_MACRO_ACTION_LENGTH;
idxActionToMutate = actionIdx % player.params.INNER_MACRO_ACTION_LENGTH;
} else if (player.params.MUT_DIVERSITY && player.params.DIVERSITY_TYPE == DIVERSITY_PHENOTYPE) {
// find pos most similar to the others
int actionIdx = 0;
int max = 0;
HashMap<Integer, Integer> posCellCountAllGen = population.getPosCellCountAllGen();
int[] actionSequence = getActions();
for (int i = 0; i < actionSequence.length; i++) {
int idxG = i / player.params.INNER_MACRO_ACTION_LENGTH;
int idxAction = i % player.params.INNER_MACRO_ACTION_LENGTH;
}
// find gene with actionIdx
idxGene = actionIdx / player.params.INNER_MACRO_ACTION_LENGTH;
idxActionToMutate = actionIdx % player.params.INNER_MACRO_ACTION_LENGTH;
} else {
// random mutation of one gene
idxGene = randomGenerator.nextInt(genes.length);
}
genes[idxGene].mutate(population, player.params.MUT_DIVERSITY, player.params.DIVERSITY_TYPE, idxGene,
idxActionToMutate); // gene decides what the new value is
count++;
}
}
}
/**
* Mutate this individual (no new individual is created) Use bandit mutation
*/
void banditMutate(BanditArray bandits) {
if (canMut) {
BanditGene g = bandits.selectGeneToMutate();
g.banditMutate();
genes[g.index].setAction(g.x, null, 0);
}
}
/**
* Sets the actions of this individual to a new array of actions.
*
* @param a - new array of actions.
*/
private void setGenes(Gene[] a) {
for (int i = 0; i < a.length; i++) {
genes[i] = a[i].copy();
}
}
@Override
public int compareTo(Object o) {
Individual a = this;
Individual b = (Individual) o;
// double valueA = a.getValue();
// double valueB = b.getValue();
double valueA = Operations.normalise(a.getValue(), bounds[0], bounds[1]);
double valueB = Operations.normalise(b.getValue(), bounds[0], bounds[1]);
double diversityA = a.getDiversityScore();
double diversityB = b.getDiversityScore();
double fitnessA, fitnessB;
fitnessA = valueA * (1 - a.player.params.D) + diversityA * a.player.params.D;
fitnessB = valueB * (1 - b.player.params.D) + diversityB * b.player.params.D;
if (fitnessA < fitnessB)
return 1;
else if (fitnessA > fitnessB)
return -1;
else
return 0;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Individual))
return false;
Individual a = this;
Individual b = (Individual) o;
for (int i = 0; i < genes.length; i++) {
if (a.genes[i] != b.genes[i])
return false;
}
return true;
}
public Individual copy() {
Individual a = new Individual(this.nActions, this.randomGenerator, this.heuristic, this.player, this.agent);
a.value = this.value;
a.diversityScore = this.diversityScore;
a.setGenes(this.genes);
a.canMut = this.canMut;
return a;
}
@Override
public String toString() {
double value = getValue();
String s = "Value = " + String.format("%.2f", value) + ": NormValue = "
+ String.format("%.2f", Operations.normalise(value, bounds[0], bounds[1])) + ": DiversityScore = "
+ String.format("%.2f", diversityScore) + ": Actions = ";
for (Gene action : genes)
s += action + " ";
s += "lower:" + bounds[0] + " upper:" + bounds[1];
return s;
}
}
|
<gh_stars>100-1000
/***********************************************************************************************************************
* OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products
* derived from this software without specific prior written permission from the respective party.
*
* (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works
* may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior
* written permission from Alliance for Sustainable Energy, LLC.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED
* STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***********************************************************************************************************************/
#include "../ForwardTranslator.hpp"
#include "../../model/Model.hpp"
#include "../../model/Schedule.hpp"
#include "../../model/Schedule_Impl.hpp"
#include "../../model/Node.hpp"
#include "../../model/Node_Impl.hpp"
#include "../../model/CoilCoolingWaterToAirHeatPumpEquationFit.hpp"
#include "../../model/CoilCoolingWaterToAirHeatPumpEquationFit_Impl.hpp"
#include "../../model/CurveQuadLinear.hpp"
#include "../../model/CurveQuintLinear.hpp"
#include "../../utilities/core/Logger.hpp"
#include "../../utilities/core/Assert.hpp"
#include <utilities/idd/Coil_Cooling_WaterToAirHeatPump_EquationFit_FieldEnums.hxx>
#include "../../utilities/idd/IddEnums.hpp"
#include <utilities/idd/IddEnums.hxx>
#include <utilities/idd/IddFactory.hxx>
using namespace openstudio::model;
//using namespace std;
namespace openstudio {
namespace energyplus {
boost::optional<IdfObject>
ForwardTranslator::translateCoilCoolingWaterToAirHeatPumpEquationFit(CoilCoolingWaterToAirHeatPumpEquationFit& modelObject) {
//setup boost optionals to use to store get method returns
boost::optional<std::string> s;
boost::optional<double> value;
boost::optional<Node> node;
// Make sure the modelObject gets ut into the map, and the new idfObject gets put into the final file.
// Also sets the idfObjects name
IdfObject idfObject = createRegisterAndNameIdfObject(IddObjectType::Coil_Cooling_WaterToAirHeatPump_EquationFit, modelObject);
// Object Name
//std::string baseName = idfObject.name().get();
// A3 ,Field Water Inlet Node Name
if (boost::optional<ModelObject> mo = modelObject.waterInletModelObject()) {
if (boost::optional<Node> node = mo->optionalCast<Node>()) {
idfObject.setString(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::WaterInletNodeName, node->name().get());
}
}
// A4, Field Water Outlet Node Name
if (boost::optional<ModelObject> mo = modelObject.waterOutletModelObject()) {
if (boost::optional<Node> node = mo->optionalCast<Node>()) {
idfObject.setString(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::WaterOutletNodeName, node->name().get());
}
}
// A5, Field Air Inlet Node Name
if (boost::optional<ModelObject> mo = modelObject.airInletModelObject()) {
if (boost::optional<Node> node = mo->optionalCast<Node>()) {
idfObject.setString(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::AirInletNodeName, node->name().get());
}
}
// A6 , \field Air Outlet Node Name
if (boost::optional<ModelObject> mo = modelObject.airOutletModelObject()) {
if (boost::optional<Node> node = mo->optionalCast<Node>()) {
idfObject.setString(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::AirOutletNodeName, node->name().get());
}
}
// N1 Field Rated Air Flow Rate
value = modelObject.ratedAirFlowRate();
if (value) {
idfObject.setDouble(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::RatedAirFlowRate, *value);
}
else {
idfObject.setString(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::RatedAirFlowRate, "Autosize");
}
// N2 Rated Water Flow Rate
value = modelObject.ratedWaterFlowRate();
if (value) {
idfObject.setDouble(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::RatedWaterFlowRate, *value);
}
else {
idfObject.setString(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::RatedWaterFlowRate, "Autosize");
}
// N3, Field Rated Total Cooling Capacity
value = modelObject.ratedTotalCoolingCapacity();
if (value) {
idfObject.setDouble(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::GrossRatedTotalCoolingCapacity, *value);
}
else {
idfObject.setString(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::GrossRatedTotalCoolingCapacity, "Autosize");
}
// N4, Field Rated Sensible Cooling Capacity
value = modelObject.ratedSensibleCoolingCapacity();
if (value) {
idfObject.setDouble(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::GrossRatedSensibleCoolingCapacity, *value);
}
else {
idfObject.setString(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::GrossRatedSensibleCoolingCapacity, "Autosize");
}
// N5, Field Rated Cooling Coefficient of Performance
value = modelObject.ratedCoolingCoefficientofPerformance();
if (value) {
idfObject.setDouble(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::GrossRatedCoolingCOP, *value);
}
// Total Cooling Capacity Curve Name
{
auto curve = modelObject.totalCoolingCapacityCurve();
if (auto _curve = translateAndMapModelObject(curve)) {
idfObject.setString(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::TotalCoolingCapacityCurveName, _curve->nameString());
}
}
// Sensible Cooling Capacity Curve Name
{
auto curve = modelObject.sensibleCoolingCapacityCurve();
if (auto _curve = translateAndMapModelObject(curve)) {
idfObject.setString(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::SensibleCoolingCapacityCurveName, _curve->nameString());
}
}
// Cooling Power Consumption Curve Name
{
auto curve = modelObject.coolingPowerConsumptionCurve();
if (auto _curve = translateAndMapModelObject(curve)) {
idfObject.setString(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::CoolingPowerConsumptionCurveName, _curve->nameString());
}
}
// N22, Field Nominal Time for Condensate Removal to Begin
value = modelObject.nominalTimeforCondensateRemovaltoBegin();
if (value) {
idfObject.setDouble(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::NominalTimeforCondensateRemovaltoBegin, *value);
}
// N23, Field Ratio of Initial Moisture Evaporation Rate and Steady State Latent
value = modelObject.ratioofInitialMoistureEvaporationRateandSteadyStateLatentCapacity();
if (value) {
idfObject.setDouble(Coil_Cooling_WaterToAirHeatPump_EquationFitFields::RatioofInitialMoistureEvaporationRateandSteadyStateLatentCapacity,
*value);
}
return idfObject;
}
} // namespace energyplus
} // namespace openstudio
|
<gh_stars>1-10
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
from . import const_pass, fold_scale_pass, fuse_pass
from .optimization import optimize
__all__ = ["optimize"]
|
package org.spongycastle.tls;
/**
* RFC 5056
* <p>
* Note that the values here are implementation-specific and arbitrary. It is recommended not to
* depend on the particular values (e.g. serialization).
*/
public class ChannelBinding
{
/*
* RFC 5929
*/
public static final int tls_server_end_point = 0;
public static final int tls_unique = 1;
public static final int tls_unique_for_telnet = 2;
}
|
from typing import List
def generate_url_patterns(urls: List[str], views: List[str]) -> str:
url_patterns = []
for url, view in zip(urls, views):
view_name = view.__name__.lower()
url_patterns.append(f"path('{url}', {view.__name__}.as_view(), name='{view_name}')")
return "urlpatterns = [\n " + ",\n ".join(url_patterns) + "\n]\n" |
/* Copyright (c) 2001-2014, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb.test;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.hsqldb.lib.StopWatch;
/**
* A quick test of the new CompiledStatement and batch execution facilities.
*
* @author <NAME> (<EMAIL> dot source<EMAIL>)
* @version 1.9.0
* @since 1.7.2
*/
// fredt@users - modified to do some network connection and generated result tests
public class TestBatchExecution extends TestBase {
static final String drop_table_sql = "drop table test if exists";
static final String create_cached = "create cached ";
static final String create_memory = "create memory ";
static final String create_temp = "create temp ";
static final String table_sql = "table test(id int identity primary key,"
+ "fname varchar(20), lname "
+ "varchar(20), zip int)";
static final String insert_sql = "insert into test values(?,?,?,?)";
static final String update_sql =
"update test set fname = 'Hans' where id = ?";
static final String select_sql = "select * from test where id = ?";
static final String delete_sql = "delete from test where id = ?";
static final String call_sql = "call identity()";
static final String shutdown_sql = "shutdown compact";
static final String def_db_path = "batchtest";
static final int def_runs = 5;
static final int rows = 10000;
static Connection conn;
static Statement stmnt;
static String url;
public TestBatchExecution(String name) {
super(name);
}
public void test() throws Exception {
conn = newConnection();
stmnt = conn.createStatement();
url = super.url;
nonPreparedTest();
preparedTestOne(5);
}
static void print(String s) {
System.out.print(s);
}
static void println(String s) {
System.out.println(s);
}
static void printCommandStats(StopWatch sw, String cmd, int count) {
long et = sw.elapsedTime();
print(sw.elapsedTimeToMessage(count + " " + cmd));
println(" " + ((1000 * count) / et) + " ops/s.");
}
public static void main(String[] args) throws Exception {
int runs;
String db_path;
Driver driver;
runs = def_runs;
db_path = def_db_path;
try {
runs = Integer.parseInt(args[0]);
} catch (Exception e) {}
db_path = "batchtest";
try {
db_path = args[1];
} catch (Exception e) {}
// get the connection and statement
driver =
(Driver) Class.forName("org.hsqldb.jdbc.JDBCDriver").newInstance();
DriverManager.registerDriver(driver);
url = "jdbc:hsqldb:file:" + db_path
+ ";crypt_key=604a6105889da65326bf35790a923932;crypt_type=blowfish;hsqldb.default_table_type=cached;hsqldb.cache_rows=100"
;
conn = DriverManager.getConnection(url, "SA", "");
stmnt = conn.createStatement();
runTests(runs);
}
static void runTests(int runs) throws Exception {
println("");
println("***************************************");
println("featuring cached (persistent) table");
println("***************************************");
// drop and recreate the test table
println(drop_table_sql);
stmnt.execute(drop_table_sql);
println(create_cached + table_sql);
stmnt.execute(create_cached + table_sql);
preparedTestOne(runs);
// drop the test table and shut down database
println(drop_table_sql);
stmnt.execute(drop_table_sql);
println("---------------------------------------");
println("shutting down database");
stmnt.execute(shutdown_sql);
println("---------------------------------------");
// get the connection and statement
conn = DriverManager.getConnection(url, "SA", "");
stmnt = conn.createStatement();
println("");
println("***************************************");
println("featuring memory (persistent) table");
println("***************************************");
// drop and recreate the test table
println(drop_table_sql);
stmnt.execute(drop_table_sql);
println(create_memory + table_sql);
stmnt.execute(create_memory + table_sql);
preparedTestOne(runs);
// drop the test table and shut down database
println(drop_table_sql);
stmnt.execute(drop_table_sql);
println("---------------------------------------");
println("shutting down database");
stmnt.execute(shutdown_sql);
println("---------------------------------------");
// get the connection and statement
conn = DriverManager.getConnection(url, "SA", "");
stmnt = conn.createStatement();
println("");
println("***************************************");
println("featuring temp (transient) table");
println("***************************************");
// drop and recreate the test table
println(drop_table_sql);
stmnt.execute(drop_table_sql);
println(create_temp + table_sql);
stmnt.execute(create_temp + table_sql);
preparedTestOne(runs);
// drop the test table
println(drop_table_sql);
stmnt.execute(drop_table_sql);
println("---------------------------------------");
println("shutting down database");
stmnt.execute(shutdown_sql);
println("---------------------------------------");
//
preparedTestTwo();
preparedTestThree();
}
public static void nonPreparedTest() throws Exception {
stmnt.addBatch(drop_table_sql);
stmnt.addBatch(create_memory + table_sql);
stmnt.executeBatch();
}
public static void preparedTestOne(int runs) throws Exception {
PreparedStatement insertStmnt;
PreparedStatement updateStmnt;
PreparedStatement selectStmnt;
PreparedStatement deleteStmnt;
PreparedStatement callStmnt;
StopWatch sw;
println("---------------------------------------");
println("Preparing Statements:");
println("---------------------------------------");
println(insert_sql);
println(update_sql);
println(select_sql);
println(delete_sql);
println(call_sql);
sw = new StopWatch();
// prepare the statements
insertStmnt = conn.prepareStatement(insert_sql,
Statement.RETURN_GENERATED_KEYS);
updateStmnt = conn.prepareStatement(update_sql);
selectStmnt = conn.prepareStatement(select_sql);
deleteStmnt = conn.prepareStatement(delete_sql);
callStmnt = conn.prepareCall(call_sql);
println("---------------------------------------");
println(sw.elapsedTimeToMessage("statements prepared"));
println("---------------------------------------");
sw.zero();
// set up the batch data
for (int i = 0; i < rows; i++) {
insertStmnt.setInt(1, i);
insertStmnt.setString(2, "Julia");
insertStmnt.setString(3, "Peterson-Clancy");
insertStmnt.setInt(4, i);
updateStmnt.setInt(1, i);
selectStmnt.setInt(1, i);
deleteStmnt.setInt(1, i);
insertStmnt.addBatch();
updateStmnt.addBatch();
selectStmnt.addBatch();
deleteStmnt.addBatch();
callStmnt.addBatch();
}
println("---------------------------------------");
println(sw.elapsedTimeToMessage("" + 5 * rows
+ " batch entries created"));
sw.zero();
// do the test loop forever
for (int i = 0; i < 1; i++) {
println("---------------------------------------");
// inserts
sw.zero();
insertStmnt.executeBatch();
printCommandStats(sw, "inserts", rows);
ResultSet generated = insertStmnt.getGeneratedKeys();
StringBuffer sb = new StringBuffer();
int genCount = 0;
while (generated.next()) {
int gen = generated.getInt(1);
if (gen % 1000 == 0) {
sb.append(gen).append(" - ");
}
genCount++;
}
System.out.println(sb.toString());
printCommandStats(sw, "generated reads", genCount);
// updates
sw.zero();
int[] updateCounts = updateStmnt.executeBatch();
printCommandStats(sw, "updates", updateCounts.length);
// selects
sw.zero();
// selectStmnt.executeBatch();
// printCommandStats(sw, "selects");
// deletes
sw.zero();
updateCounts = deleteStmnt.executeBatch();
printCommandStats(sw, "deletes", updateCounts.length);
// calls
sw.zero();
// callStmnt.executeBatch();
// printCommandStats(sw, "calls ");
}
}
public static void preparedTestTwo() {
System.out.println("preparedTestTwo");
try {
Class.forName("org.hsqldb.jdbc.JDBCDriver");
Connection con = DriverManager.getConnection("jdbc:hsqldb:mem:.",
"sa", "");
System.out.println("con=" + con);
Statement stmt = con.createStatement();
try {
stmt.executeUpdate("drop table ttt");
} catch (Exception e) {}
stmt.executeUpdate("create table ttt (id integer)");
PreparedStatement prep =
con.prepareStatement("INSERT INTO ttt (id) VALUES (?)");
con.setAutoCommit(false);
for (int i = 1; i <= 4; i++) { // [2, 3, 4]
prep.setInt(1, i);
prep.addBatch();
System.out.println("executeBatch() for " + i);
prep.executeBatch();
con.commit();
// prep.clearBatch(); // -> java.lang.NullPointerException
// at org.hsqldb.Result.getUpdateCounts(Unknown Source)
}
prep.close();
// see what we got
ResultSet rs = stmt.executeQuery("select * from ttt");
while (rs.next()) {
System.out.println("id = " + rs.getInt(1));
}
System.out.println("bye.");
} catch (SQLException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
public static void preparedTestThree() {
System.out.println("preparedTestThree");
try {
Class.forName("org.hsqldb.jdbc.JDBCDriver");
Connection con = DriverManager.getConnection("jdbc:hsqldb:mem:.",
"sa", "");
con.setAutoCommit(false);
System.out.println("con=" + con);
Statement stmt = con.createStatement();
try {
stmt.executeUpdate("drop table node");
} catch (Exception e) {}
stmt.executeUpdate(
"create table Node (id varbinary(255) not null, name varchar(255), primary key (id))");
PreparedStatement prep = con.prepareStatement(
"insert into Node (name, id) values (?, ?)");
byte[] byteArray = null;
try {
byteArray =
org.hsqldb.lib.StringConverter.hexStringToByteArray(
"c0a8000a30d110808130d18080880000");
} catch (Exception e) {
//
}
prep.setNull(1, java.sql.Types.VARCHAR);
prep.setBytes(2, byteArray);
int result = prep.executeUpdate();
prep.close();
prep = con.prepareStatement("delete from Node where id=?");
prep.setBytes(1, byteArray);
prep.addBatch();
System.out.println("executeBatch() for delete");
prep.executeBatch();
con.commit();
// prep.clearBatch(); // -> java.lang.NullPointerException
// at org.hsqldb.Result.getUpdateCounts(Unknown Source)
prep.close();
// see what we got
ResultSet rs = stmt.executeQuery("select * from Node");
while (rs.next()) {
System.out.println("row retreived");
}
System.out.println("bye.");
} catch (SQLException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
}
|
echo "usage define three env varaiables DESIGN, TEST, MODULE beforehand"
[ -z $COND ] && cond="" || cond="_$COND"
echo $cond
fsdbfile="../sim/${DESIGN}_${TEST}${cond}.fsdb"
echo $fsdbfile
sh fsdb2saif.sh $fsdbfile
pt_shell -f primetime.tcl
|
TERMUX_PKG_HOMEPAGE=https://www.musicpd.org
TERMUX_PKG_DESCRIPTION="Music player daemon"
TERMUX_PKG_LICENSE="GPL-2.0"
TERMUX_PKG_VERSION=0.22.3
TERMUX_PKG_SRCURL=https://github.com/MusicPlayerDaemon/MPD/archive/v$TERMUX_PKG_VERSION.tar.gz
TERMUX_PKG_SHA256=8ef420742647c4c6b39459545869dd3071b46780b728cf4d63b2b10d85d808eb
TERMUX_PKG_DEPENDS="libc++, libcurl, libexpat, libid3tag, libopus, pulseaudio, libmpdclient, openal-soft, libvorbis, libsqlite, ffmpeg, libmp3lame, libbz2, libogg, libnfs, zlib"
TERMUX_PKG_BUILD_DEPENDS="boost"
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
-Dalsa=disabled
-Dao=disabled
-Depoll=false
-Diconv=disabled
-Dicu=disabled
-Dmad=disabled
-Dpcre=disabled
-Dsndio=disabled
"
TERMUX_PKG_CONFFILES="etc/mpd.conf"
TERMUX_PKG_SERVICE_SCRIPT=("mpd" 'if [ -f "$HOME/.mpd/mpd.conf" ]; then CONFIG="$HOME/.mpd/mpd.conf"; else CONFIG="$PREFIX/etc/mpd.conf"; fi\nexec mpd --stdout --no-daemon $CONFIG 2>&1')
termux_step_pre_configure() {
# Certain packages are not safe to build on device because their
# build.sh script deletes specific files in $TERMUX_PREFIX.
if $TERMUX_ON_DEVICE_BUILD; then
termux_error_exit "Package '$TERMUX_PKG_NAME' is not safe for on-device builds."
fi
CXXFLAGS+=" -DTERMUX -UANDROID"
LDFLAGS+=" -lOpenSLES"
rm -f $TERMUX_PREFIX/etc/mpd.conf
export BOOST_ROOT=$TERMUX_PREFIX
}
termux_step_post_make_install() {
install -Dm600 $TERMUX_PKG_SRCDIR/doc/mpdconf.example $TERMUX_PREFIX/etc/mpd.conf
}
termux_step_create_debscripts() {
echo "#!$TERMUX_PREFIX/bin/sh" > postinst
echo 'mkdir -p $HOME/.mpd/playlists' >> postinst
}
|
import sys
import logging
import functools
import traceback
import synapse.exc as s_exc
import synapse.common as s_common
import synapse.glob as s_glob
import synapse.telepath as s_telepath
import synapse.lib.cmd as s_cmd
import synapse.lib.output as s_output
import synapse.lib.version as s_version
logger = logging.getLogger(__name__)
desc = '''
Manage permissions of users, roles, and objects in a remote cell.
'''
outp = None
min_authgate_vers = (0, 1, 33)
reqver = '>=0.2.0,<3.0.0'
denyallow = ['deny', 'allow']
def reprrule(rule):
head = denyallow[rule[0]]
text = '.'.join(rule[1])
return f'{head}: {text}'
async def printuser(user, details=False, cell=None):
iden = user.get('iden')
name = user.get('name')
admin = user.get('admin')
authtype = user.get('type')
outp.printf(f'{name} ({iden})')
outp.printf(f'type: {authtype}')
if admin is not None:
outp.printf(f'admin: {admin}')
if authtype == 'user':
locked = user.get('locked')
outp.printf(f'locked: {locked}')
outp.printf('rules:')
i = 0
for rule in user.get('rules'):
rrep = reprrule(rule)
outp.printf(f' {i} {rrep}')
i += 1
for gateiden, gateinfo in user.get('authgates', {}).items():
outp.printf(f' auth gate: {gateiden}')
for rule in gateinfo.get('rules', ()):
rrep = reprrule(rule)
outp.printf(f' {i} {rrep}')
i += 1
outp.printf('')
if authtype == 'user':
outp.printf('roles:')
for rolename in sorted(user.get('roles')):
outp.printf(f' role: {rolename}')
if details:
i = 0
role = await cell.getAuthInfo(rolename)
for rule in role.get('rules', ()):
rrep = reprrule(rule)
outp.printf(f' {i} {rrep}')
i += 1
for gateiden, gateinfo in role.get('authgates', {}).items():
outp.printf(f' auth gate: {gateiden}')
for rule in gateinfo.get('rules', ()):
rrep = reprrule(rule)
outp.printf(f' {i} {rrep}')
i += 1
async def handleModify(opts):
cell_supports_authgate = False
if opts.object and not opts.addrule:
outp.printf('--object option only valid with --addrule')
return -1
try:
async with await s_telepath.openurl(opts.cellurl) as cell:
async def useriden(name):
udef = await cell.getUserDefByName(name)
return udef['iden']
async def roleiden(name):
rdef = await cell.getRoleDefByName(name)
return rdef['iden']
s_version.reqVersion(cell._getSynVers(), reqver)
if cell._getSynVers() >= min_authgate_vers:
cell_supports_authgate = True
if opts.adduser:
outp.printf(f'adding user: {opts.name}')
user = await cell.addUser(opts.name)
if opts.deluser:
outp.printf(f'deleting user: {opts.name}')
await cell.delUser(await useriden(opts.name))
if opts.addrole:
outp.printf(f'adding role: {opts.name}')
user = await cell.addRole(opts.name)
if opts.delrole:
outp.printf(f'deleting role: {opts.name}')
await cell.delRole(await roleiden(opts.name))
if opts.passwd:
outp.printf(f'setting passwd for: {opts.name}')
await cell.setUserPasswd(await useriden(opts.name), opts.passwd)
if opts.grant:
outp.printf(f'granting {opts.grant} to: {opts.name}')
await cell.addUserRole(await useriden(opts.name), await roleiden(opts.grant))
if opts.revoke:
outp.printf(f'revoking {opts.revoke} from: {opts.name}')
await cell.delUserRole(await useriden(opts.name), await roleiden(opts.revoke))
if opts.admin:
outp.printf(f'granting admin status: {opts.name}')
await cell.setAuthAdmin(opts.name, True)
if opts.noadmin:
outp.printf(f'revoking admin status: {opts.name}')
await cell.setAuthAdmin(opts.name, False)
if opts.lock:
outp.printf(f'locking user: {opts.name}')
await cell.setUserLocked(await useriden(opts.name), True)
if opts.unlock:
outp.printf(f'unlocking user: {opts.name}')
await cell.setUserLocked(await useriden(opts.name), False)
if opts.addrule:
text = opts.addrule
# TODO: syntax for index...
allow = True
if text.startswith('!'):
allow = False
text = text[1:]
rule = (allow, text.split('.'))
outp.printf(f'adding rule to {opts.name}: {rule!r}')
if cell_supports_authgate:
await cell.addAuthRule(opts.name, rule, indx=None, gateiden=opts.object)
else:
await cell.addAuthRule(opts.name, rule, indx=None)
if opts.delrule is not None:
ruleind = opts.delrule
outp.printf(f'deleting rule index: {ruleind}')
user = await cell.getAuthInfo(opts.name)
userrules = user.get('rules', ())
delrule = None
delgate = None
if ruleind < len(userrules):
delrule = userrules[ruleind]
else:
i = len(userrules)
for gateiden, gateinfo in user.get('authgates', {}).items():
for rule in gateinfo.get('rules', ()):
if i == ruleind:
delrule = rule
delgate = gateiden
i += 1
if delrule is not None:
await cell.delAuthRule(opts.name, delrule, gateiden=delgate)
else:
outp.printf(f'rule index is out of range')
try:
user = await cell.getAuthInfo(opts.name)
except s_exc.NoSuchName:
outp.printf(f'no such user: {opts.name}')
return 1
await printuser(user)
except s_exc.BadVersion as e:
valu = s_version.fmtVersion(*e.get('valu'))
outp.printf(f'Cell version {valu} is outside of the cellauth supported range ({reqver}).')
outp.printf(f'Please use a version of Synapse which supports {valu}; current version is {s_version.verstring}.')
return 1
except Exception as e: # pragma: no cover
if opts.debug:
traceback.print_exc()
outp.printf(str(e))
return 1
else:
return 0
async def handleList(opts):
try:
async with await s_telepath.openurl(opts.cellurl) as cell:
s_version.reqVersion(cell._getSynVers(), reqver)
if opts.name:
user = await cell.getAuthInfo(opts.name[0])
if user is None:
outp.printf(f'no such user: {opts.name}')
return 1
await printuser(user, cell=cell, details=opts.detail)
return 0
outp.printf(f'getting users and roles')
outp.printf('users:')
for user in await cell.getAuthUsers():
outp.printf(f' {user.get("name")}')
outp.printf('roles:')
for role in await cell.getAuthRoles():
outp.printf(f' {role.get("name")}')
except s_exc.BadVersion as e:
valu = s_version.fmtVersion(*e.get('valu'))
outp.printf(f'Cell version {valu} is outside of the cellauth supported range ({reqver}).')
outp.printf(f'Please use a version of Synapse which supports {valu}; current version is {s_version.verstring}.')
return 1
except Exception as e: # pragma: no cover
if opts.debug:
traceback.print_exc()
outp.printf(str(e))
return 1
else:
return 0
async def main(argv, outprint=None):
if outprint is None: # pragma: no cover
outprint = s_output.OutPut()
global outp
outp = outprint
pars = makeargparser()
try:
opts = pars.parse_args(argv)
except s_exc.ParserExit:
return -1
return await opts.func(opts)
def makeargparser():
global outp
pars = s_cmd.Parser('synapse.tools.cellauth', outp=outp, description=desc)
pars.add_argument('--debug', action='store_true', help='Show debug traceback on error.')
pars.add_argument('cellurl', help='The telepath URL to connect to a cell.')
subpars = pars.add_subparsers(required=True,
title='subcommands',
dest='cmd',
parser_class=functools.partial(s_cmd.Parser, outp=outp))
# list
pars_list = subpars.add_parser('list', help='List users/roles')
pars_list.add_argument('name', nargs='*', default=None, help='The name of the user/role to list')
pars_list.add_argument('-d', '--detail', default=False, action='store_true',
help='Show rule details for roles associated with a user.')
pars_list.set_defaults(func=handleList)
# create / modify / delete
pars_mod = subpars.add_parser('modify', help='Create, modify, delete the names user/role')
muxp = pars_mod.add_mutually_exclusive_group()
muxp.add_argument('--adduser', action='store_true', help='Add the named user to the cortex.')
muxp.add_argument('--addrole', action='store_true', help='Add the named role to the cortex.')
muxp.add_argument('--deluser', action='store_true', help='Delete the named user to the cortex.')
muxp.add_argument('--delrole', action='store_true', help='Delete the named role to the cortex.')
muxp.add_argument('--admin', action='store_true', help='Grant admin powers to the user/role.')
muxp.add_argument('--noadmin', action='store_true', help='Revoke admin powers from the user/role.')
muxp.add_argument('--lock', action='store_true', help='Lock the user account.')
muxp.add_argument('--unlock', action='store_true', help='Unlock the user account.')
muxp.add_argument('--passwd', help='Set the user password.')
muxp.add_argument('--grant', help='Grant the specified role to the user.')
muxp.add_argument('--revoke', help='Grant the specified role to the user.')
muxp.add_argument('--addrule', help='Add the given rule to the user/role.')
muxp.add_argument('--delrule', type=int, help='Delete the given rule number from the user/role.')
pars_mod.add_argument('--object', type=str, help='The iden of the object to which to apply the new rule. Only '
'supported on Cells running Synapse >= 0.1.33.')
pars_mod.add_argument('name', help='The user/role to modify.')
pars_mod.set_defaults(func=handleModify)
return pars
async def _main(): # pragma: no cover
s_common.setlogging(logger, 'DEBUG')
return await main(sys.argv[1:])
if __name__ == '__main__': # pragma: no cover
sys.exit(s_glob.sync(_main()))
|
<reponame>martamedio/spring-cloud-gateway
/*
* Copyright 2013-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.gateway.config;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.net.ssl.TrustManagerFactory;
import io.netty.channel.ChannelOption;
import io.netty.handler.ssl.SslContextBuilder;
import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
import org.junit.Test;
import reactor.netty.http.HttpProtocol;
import reactor.netty.http.client.HttpClient;
import reactor.netty.http.client.HttpClientConfig;
import reactor.netty.http.client.WebsocketClientSpec;
import reactor.netty.http.server.WebsocketServerSpec;
import reactor.netty.resources.ConnectionProvider;
import reactor.netty.tcp.SslProvider;
import reactor.netty.transport.ProxyProvider;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.SpringBootConfiguration;
import org.springframework.boot.actuate.autoconfigure.metrics.MetricsAutoConfiguration;
import org.springframework.boot.actuate.autoconfigure.metrics.export.simple.SimpleMetricsExportAutoConfiguration;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.autoconfigure.AutoConfigureBefore;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.security.oauth2.client.reactive.ReactiveOAuth2ClientAutoConfiguration;
import org.springframework.boot.autoconfigure.security.reactive.ReactiveSecurityAutoConfiguration;
import org.springframework.boot.autoconfigure.web.ServerProperties;
import org.springframework.boot.autoconfigure.web.reactive.WebFluxAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.context.runner.ReactiveWebApplicationContextRunner;
import org.springframework.cloud.gateway.actuate.GatewayControllerEndpoint;
import org.springframework.cloud.gateway.actuate.GatewayLegacyControllerEndpoint;
import org.springframework.cloud.gateway.filter.factory.TokenRelayGatewayFilterFactory;
import org.springframework.cloud.gateway.filter.headers.GRPCRequestHeadersFilter;
import org.springframework.cloud.gateway.filter.headers.GRPCResponseHeadersFilter;
import org.springframework.cloud.gateway.route.RouteLocator;
import org.springframework.cloud.gateway.route.builder.GatewayFilterSpec;
import org.springframework.cloud.gateway.route.builder.RouteLocatorBuilder;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.security.oauth2.client.ReactiveOAuth2AuthorizedClientManager;
import org.springframework.web.filter.reactive.HiddenHttpMethodFilter;
import org.springframework.web.reactive.socket.client.ReactorNettyWebSocketClient;
import org.springframework.web.reactive.socket.server.upgrade.ReactorNettyRequestUpgradeStrategy;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
public class GatewayAutoConfigurationTests {
@Test
public void noHiddenHttpMethodFilter() {
try (ConfigurableApplicationContext ctx = SpringApplication.run(Config.class, "--spring.jmx.enabled=false",
"--server.port=0")) {
assertThat(ctx.getEnvironment().getProperty("spring.webflux.hiddenmethod.filter.enabled"))
.isEqualTo("false");
assertThat(ctx.getBeanNamesForType(HiddenHttpMethodFilter.class)).isEmpty();
}
}
@Test
public void nettyHttpClientDefaults() {
new ReactiveWebApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(WebFluxAutoConfiguration.class, MetricsAutoConfiguration.class,
SimpleMetricsExportAutoConfiguration.class, GatewayAutoConfiguration.class,
ServerPropertiesConfig.class))
.withPropertyValues("debug=true").run(context -> {
assertThat(context).hasSingleBean(HttpClient.class);
HttpClient httpClient = context.getBean(HttpClient.class);
CustomHttpClientFactory factory = context.getBean(CustomHttpClientFactory.class);
assertThat(factory.connectionProvider).isNotNull();
assertThat(factory.connectionProvider.maxConnections()).isEqualTo(Integer.MAX_VALUE); // elastic
assertThat(factory.proxyProvider).isNull();
assertThat(factory.sslConfigured).isFalse();
assertThat(httpClient.configuration().isAcceptGzip()).isFalse();
assertThat(httpClient.configuration().loggingHandler()).isNull();
assertThat(httpClient.configuration().options())
.doesNotContainKey(ChannelOption.CONNECT_TIMEOUT_MILLIS);
});
}
@Test
public void nettyHttpClientConfigured() {
new ReactiveWebApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(WebFluxAutoConfiguration.class, MetricsAutoConfiguration.class,
SimpleMetricsExportAutoConfiguration.class, GatewayAutoConfiguration.class,
HttpClientCustomizedConfig.class, ServerPropertiesConfig.class))
.withPropertyValues("spring.cloud.gateway.httpclient.ssl.use-insecure-trust-manager=true",
"spring.cloud.gateway.httpclient.connect-timeout=10",
"spring.cloud.gateway.httpclient.response-timeout=10s",
"spring.cloud.gateway.httpclient.pool.eviction-interval=10s",
"spring.cloud.gateway.httpclient.pool.type=fixed",
"spring.cloud.gateway.httpclient.pool.metrics=true",
"spring.cloud.gateway.httpclient.compression=true",
"spring.cloud.gateway.httpclient.wiretap=true",
// greater than integer max value
"spring.cloud.gateway.httpclient.max-initial-line-length=2147483647",
"spring.cloud.gateway.httpclient.proxy.host=myhost",
"spring.cloud.gateway.httpclient.websocket.max-frame-payload-length=1024")
.run(context -> {
assertThat(context).hasSingleBean(HttpClient.class);
HttpClient httpClient = context.getBean(HttpClient.class);
CustomHttpClientFactory factory = context.getBean(CustomHttpClientFactory.class);
HttpClientProperties properties = context.getBean(HttpClientProperties.class);
assertThat(properties.getMaxInitialLineLength().toBytes()).isLessThanOrEqualTo(Integer.MAX_VALUE);
assertThat(properties.isCompression()).isEqualTo(true);
assertThat(properties.getPool().getEvictionInterval()).hasSeconds(10);
assertThat(properties.getPool().isMetrics()).isEqualTo(true);
assertThat(httpClient.configuration().isAcceptGzip()).isTrue();
assertThat(httpClient.configuration().loggingHandler()).isNotNull();
assertThat(httpClient.configuration().options()).containsKey(ChannelOption.CONNECT_TIMEOUT_MILLIS);
assertThat(httpClient.configuration().options().get(ChannelOption.CONNECT_TIMEOUT_MILLIS))
.isEqualTo(10);
assertThat(factory.connectionProvider).isNotNull();
// fixed pool
assertThat(factory.connectionProvider.maxConnections())
.isEqualTo(ConnectionProvider.DEFAULT_POOL_MAX_CONNECTIONS);
assertThat(factory.proxyProvider).isNotNull();
assertThat(factory.proxyProvider.build().getAddress().get().getHostName()).isEqualTo("myhost");
assertThat(factory.sslConfigured).isTrue();
assertThat(factory.insecureTrustManagerSet).isTrue();
assertThat(context).hasSingleBean(ReactorNettyRequestUpgradeStrategy.class);
ReactorNettyRequestUpgradeStrategy upgradeStrategy = context
.getBean(ReactorNettyRequestUpgradeStrategy.class);
assertThat(upgradeStrategy.getWebsocketServerSpec().maxFramePayloadLength()).isEqualTo(1024);
assertThat(upgradeStrategy.getWebsocketServerSpec().handlePing()).isTrue();
assertThat(context).hasSingleBean(ReactorNettyWebSocketClient.class);
ReactorNettyWebSocketClient webSocketClient = context.getBean(ReactorNettyWebSocketClient.class);
assertThat(webSocketClient.getWebsocketClientSpec().maxFramePayloadLength()).isEqualTo(1024);
HttpClientCustomizedConfig config = context.getBean(HttpClientCustomizedConfig.class);
assertThat(config.called.get()).isTrue();
});
}
@Test
public void verboseActuatorEnabledByDefault() {
try (ConfigurableApplicationContext ctx = SpringApplication.run(Config.class, "--spring.jmx.enabled=false",
"--server.port=0", "--management.endpoint.gateway.enabled=true")) {
assertThat(ctx.getBeanNamesForType(GatewayControllerEndpoint.class)).hasSize(1);
assertThat(ctx.getBeanNamesForType(GatewayLegacyControllerEndpoint.class)).isEmpty();
}
}
@Test
public void verboseActuatorDisabled() {
try (ConfigurableApplicationContext ctx = SpringApplication.run(Config.class, "--spring.jmx.enabled=false",
"--server.port=0", "--spring.cloud.gateway.actuator.verbose.enabled=false",
"--management.endpoint.gateway.enabled=true")) {
assertThat(ctx.getBeanNamesForType(GatewayLegacyControllerEndpoint.class)).hasSize(1);
}
}
@Test
public void tokenRelayBeansAreCreated() {
new ReactiveWebApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(ReactiveSecurityAutoConfiguration.class,
ReactiveOAuth2ClientAutoConfiguration.class, GatewayReactiveOAuth2AutoConfiguration.class,
GatewayAutoConfiguration.TokenRelayConfiguration.class))
.withPropertyValues(
"spring.security.oauth2.client.provider[testprovider].authorization-uri=http://localhost",
"spring.security.oauth2.client.provider[testprovider].token-uri=http://localhost/token",
"spring.security.oauth2.client.registration[test].provider=testprovider",
"spring.security.oauth2.client.registration[test].authorization-grant-type=authorization_code",
"spring.security.oauth2.client.registration[test].redirect-uri=http://localhost/redirect",
"spring.security.oauth2.client.registration[test].client-id=login-client")
.run(context -> {
assertThat(context).hasSingleBean(ReactiveOAuth2AuthorizedClientManager.class);
assertThat(context).hasSingleBean(TokenRelayGatewayFilterFactory.class);
});
}
@Test
public void noTokenRelayFilter() {
assertThatThrownBy(() -> {
try (ConfigurableApplicationContext ctx = SpringApplication.run(RouteLocatorBuilderConfig.class,
"--spring.jmx.enabled=false", "--spring.cloud.gateway.filter.token-relay.enabled=false",
"--spring.security.oauth2.client.provider[testprovider].authorization-uri=http://localhost",
"--spring.security.oauth2.client.provider[testprovider].token-uri=http://localhost/token",
"--spring.security.oauth2.client.registration[test].provider=testprovider",
"--spring.security.oauth2.client.registration[test].authorization-grant-type=authorization_code",
"--spring.security.oauth2.client.registration[test].redirect-uri=http://localhost/redirect",
"--spring.security.oauth2.client.registration[test].client-id=login-client", "--server.port=0",
"--spring.cloud.gateway.actuator.verbose.enabled=false")) {
assertThat(ctx.getBeanNamesForType(GatewayLegacyControllerEndpoint.class)).hasSize(1);
}
}).hasRootCauseInstanceOf(IllegalStateException.class)
.hasMessageContaining("No TokenRelayGatewayFilterFactory bean was found. Did you include");
}
@Test // gh-2159
public void reactorNettyRequestUpgradeStrategyWebSocketSpecBuilderIsUniquePerRequest()
throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
ReactorNettyRequestUpgradeStrategy strategy = new GatewayAutoConfiguration.NettyConfiguration()
.reactorNettyRequestUpgradeStrategy(new HttpClientProperties());
// Method "buildSpec" was introduced for Tests, but has only default visiblity
Method buildSpec = ReactorNettyRequestUpgradeStrategy.class.getDeclaredMethod("buildSpec", String.class);
buildSpec.setAccessible(true);
WebsocketServerSpec spec1 = (WebsocketServerSpec) buildSpec.invoke(strategy, "p1");
WebsocketServerSpec spec2 = strategy.getWebsocketServerSpec();
assertThat(spec1.protocols()).isEqualTo("p1");
assertThat(spec2.protocols()).isNull();
}
@Test // gh-2215
public void webSocketClientSpecBuilderIsUniquePerReactorNettyWebSocketClient()
throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
ReactorNettyWebSocketClient websocketClient = new GatewayAutoConfiguration.NettyConfiguration()
.reactorNettyWebSocketClient(new HttpClientProperties(), HttpClient.create());
// Method "buildSpec" has only private visibility
Method buildSpec = ReactorNettyWebSocketClient.class.getDeclaredMethod("buildSpec", String.class);
buildSpec.setAccessible(true);
WebsocketClientSpec spec1 = (WebsocketClientSpec) buildSpec.invoke(websocketClient, "p1");
WebsocketClientSpec spec2 = websocketClient.getWebsocketClientSpec();
assertThat(spec1.protocols()).isEqualTo("p1");
// Protocols should not be cached between requests:
assertThat(spec2.protocols()).isNull();
}
@Test
public void gRPCFiltersConfiguredWhenHTTP2Enabled() {
new ReactiveWebApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(WebFluxAutoConfiguration.class, MetricsAutoConfiguration.class,
SimpleMetricsExportAutoConfiguration.class, GatewayAutoConfiguration.class,
HttpClientCustomizedConfig.class, ServerPropertiesConfig.class))
.withPropertyValues("server.http2.enabled=true").run(context -> {
assertThat(context).hasSingleBean(GRPCRequestHeadersFilter.class);
assertThat(context).hasSingleBean(GRPCResponseHeadersFilter.class);
HttpClient httpClient = context.getBean(HttpClient.class);
assertThat(httpClient.configuration().protocols()).contains(HttpProtocol.HTTP11, HttpProtocol.H2);
});
}
@Test
public void gRPCFiltersNotConfiguredWhenHTTP2Disabled() {
new ReactiveWebApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(WebFluxAutoConfiguration.class, MetricsAutoConfiguration.class,
SimpleMetricsExportAutoConfiguration.class, GatewayAutoConfiguration.class,
HttpClientCustomizedConfig.class, ServerPropertiesConfig.class))
.withPropertyValues("server.http2.enabled=false").run(context -> {
assertThat(context).doesNotHaveBean(GRPCRequestHeadersFilter.class);
assertThat(context).doesNotHaveBean(GRPCResponseHeadersFilter.class);
});
}
@Test
public void insecureTrustManagerNotEnabledByDefaultWhenHTTP2Enabled() {
new ReactiveWebApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(WebFluxAutoConfiguration.class, MetricsAutoConfiguration.class,
SimpleMetricsExportAutoConfiguration.class, GatewayAutoConfiguration.class,
HttpClientCustomizedConfig.class, ServerPropertiesConfig.class))
.withPropertyValues("server.http2.enabled=true").run(context -> {
assertThat(context).hasSingleBean(HttpClient.class);
CustomHttpClientFactory factory = context.getBean(CustomHttpClientFactory.class);
assertThat(factory.insecureTrustManagerSet).isFalse();
});
}
@Test
public void customHttpClientWorks() {
new ReactiveWebApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(WebFluxAutoConfiguration.class, MetricsAutoConfiguration.class,
SimpleMetricsExportAutoConfiguration.class, GatewayAutoConfiguration.class,
HttpClientCustomizedConfig.class, CustomHttpClientConfig.class))
.run(context -> {
assertThat(context).hasSingleBean(HttpClient.class);
HttpClient httpClient = context.getBean(HttpClient.class);
assertThat(httpClient).isInstanceOf(CustomHttpClient.class);
});
}
@Configuration
@EnableConfigurationProperties(ServerProperties.class)
@AutoConfigureBefore(GatewayAutoConfiguration.class)
protected static class ServerPropertiesConfig {
@Bean
@Primary
CustomHttpClientFactory customHttpClientFactory(HttpClientProperties properties,
ServerProperties serverProperties, List<HttpClientCustomizer> customizers) {
return new CustomHttpClientFactory(properties, serverProperties, customizers);
}
}
protected static class CustomHttpClientFactory extends HttpClientFactory {
boolean insecureTrustManagerSet;
boolean sslConfigured;
private ConnectionProvider connectionProvider;
private ProxyProvider.Builder proxyProvider;
public CustomHttpClientFactory(HttpClientProperties properties, ServerProperties serverProperties,
List<HttpClientCustomizer> customizers) {
super(properties, serverProperties, customizers);
}
@Override
protected ConnectionProvider buildConnectionProvider(HttpClientProperties properties) {
connectionProvider = super.buildConnectionProvider(properties);
return connectionProvider;
}
@Override
protected ProxyProvider.Builder configureProxyProvider(HttpClientProperties.Proxy proxy,
ProxyProvider.TypeSpec proxySpec) {
proxyProvider = super.configureProxyProvider(proxy, proxySpec);
return proxyProvider;
}
@Override
protected void configureSslContext(HttpClientProperties.Ssl ssl, SslProvider.SslContextSpec sslContextSpec) {
sslConfigured = true;
super.configureSslContext(ssl, sslContextSpec);
}
@Override
protected void setTrustManager(SslContextBuilder sslContextBuilder, TrustManagerFactory factory) {
insecureTrustManagerSet = factory == InsecureTrustManagerFactory.INSTANCE;
super.setTrustManager(sslContextBuilder, factory);
}
}
@EnableAutoConfiguration
@SpringBootConfiguration
protected static class Config {
}
@EnableAutoConfiguration
@SpringBootConfiguration
@EnableConfigurationProperties(ServerProperties.class)
@AutoConfigureBefore(GatewayAutoConfiguration.class)
protected static class CustomHttpClientConfig {
@Bean
public HttpClient customHttpClient() {
return new CustomHttpClient();
}
}
protected static class CustomHttpClient extends HttpClient {
@Override
public HttpClientConfig configuration() {
return null;
}
@Override
protected HttpClient duplicate() {
return this;
}
}
@EnableAutoConfiguration
@SpringBootConfiguration
protected static class RouteLocatorBuilderConfig {
@Bean
public RouteLocator myRouteLocator(RouteLocatorBuilder builder) {
return builder.routes()
.route("test", r -> r.alwaysTrue().filters(GatewayFilterSpec::tokenRelay).uri("http://localhost"))
.build();
}
}
@Configuration
protected static class HttpClientCustomizedConfig {
private final AtomicBoolean called = new AtomicBoolean();
@Bean
HttpClientCustomizer myCustomCustomizer() {
return httpClient -> {
called.compareAndSet(false, true);
return httpClient;
};
}
}
}
|
public class UserPageManager {
// Assume the existence of the viewUserPages() method
public function finished(){
return $this->viewUserPages('finished');
}
public function alltasks(){
return $this->viewUserPages('allTasks');
}
public function calendar(){
return $this->viewUserPages('calendar');
}
} |
#!/bin/bash
set -e
DIR="$( cd "$(dirname "$0")" ; pwd -P )"
# onnx inference only on python3, pls pip3 install onnxruntime
run_onnx_inference.py \
--input_file $DIR/data/dog.jpg \
--mean 0.485,0.456,0.406 \
--image_resize_dims 256,256 \
--net_input_dims 224,224 \
--raw_scale 1 \
--output_file resnet18_out_onnx.npz \
--dump_tensor resnet18_out_tensor_all_onnx.npz \
--model_path $MODEL_PATH/imagenet/resnet/onnx/resnet18.onnx
cvi_npz_tool.py extract resnet18_out_tensor_all_onnx.npz resnet18_in_fp32.npz input
# VERDICT
echo $0 PASSED
|
import tensorflow as tf
import matplotlib.pyplot as plt
# Load the dataset
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
# Reshape the data
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)
# Convert into float32
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
# Normalize the data
x_train /= 255
x_test /= 255
# Create a model
model = tf.keras.models.Sequential()
# Add model layers
model.add(tf.keras.layers.Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=(28, 28, 1)))
model.add(tf.keras.layers.MaxPooling2D(2, 2))
model.add(tf.keras.layers.Dropout(0.25))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(128, activation='relu'))
model.add(tf.keras.layers.Dropout(0.25))
model.add(tf.keras.layers.Dense(10, activation='softmax'))
# Compile the model
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
# Train the model
model.fit(x_train, y_train, epochs=10)
# Evaluate the model
model.evaluate(x_test, y_test, verbose=1) |
<filename>src/theme/default/index.ts
import * as app from "./app.m.css";
import * as systemStatusbar from "./system-statusbar.m.css";
import * as defaultVariant from "./variants/default.m.css";
export default {
theme: {
"mini-program-component/app": app,
"mini-program-component/system-statusbar": systemStatusbar,
},
variants: {
default: defaultVariant,
},
};
|
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
cd "build/daipercoins-$HOST" || (echo "could not enter distdir build/daipercoins-$HOST"; exit 1)
if [ "$RUN_UNIT_TESTS" = "true" ]; then
BEGIN_FOLD unit-tests
DOCKER_EXEC LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/depends/$HOST/lib make $MAKEJOBS check VERBOSE=1
END_FOLD
fi
if [ "$RUN_FUNCTIONAL_TESTS" = "true" ]; then
BEGIN_FOLD functional-tests
DOCKER_EXEC test/functional/test_runner.py --ci --combinedlogslen=4000 --coverage --quiet --failfast
END_FOLD
fi
if [ "$RUN_FUZZ_TESTS" = "true" ]; then
BEGIN_FOLD fuzz-tests
DOCKER_EXEC test/fuzz/test_runner.py -l DEBUG ${DIR_FUZZ_IN}
END_FOLD
fi
|
require 'celluloid/current'
require 'yaml'
require_relative 'utils/pmap'
require_relative 'utils/mini_active_support'
# Extend all objects with logger
Object.send(:include, Eye::Logger::ObjectExt)
# needs to preload
Eye::Sigar
Eye::SystemResources
class Eye::Controller
include Celluloid
autoload :Load, 'eye/controller/load'
autoload :Helpers, 'eye/controller/helpers'
autoload :Commands, 'eye/controller/commands'
autoload :Status, 'eye/controller/status'
autoload :Apply, 'eye/controller/apply'
autoload :Options, 'eye/controller/options'
include Eye::Controller::Load
include Eye::Controller::Helpers
include Eye::Controller::Commands
include Eye::Controller::Status
include Eye::Controller::Apply
include Eye::Controller::Options
attr_reader :applications, :current_config
def initialize
@applications = []
@current_config = Eye::Config.new
Celluloid.logger = Eye::Logger.new('celluloid')
info "starting #{Eye::ABOUT} <#{$$}>"
end
def settings
current_config.settings
end
def logger_tag
'Eye'
end
end
|
mosquitto_pub -t 'topic' -m 'message' -V mqttv5 -u user |
# Write your solution here
def most_common_character(my_list):
count = my_list.count(my_list[0])
max_count = my_list[0]
for i in range(len(my_list)):
if my_list.count(my_list[i]) > count:
count = my_list.count(my_list[i])
max_count = my_list[i]
return max_count
if __name__ == "__main__":
first_string = "abcdbde"
print(most_common_character(first_string))
second_string = "exemplaryelementary"
print(most_common_character(second_string)) |
import React from 'react';
const OpenApi = () => {
return <div>开放接口</div>
}
export default OpenApi; |
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/WBasicLibrary/WBasicLibrary.framework"
install_framework "${BUILT_PRODUCTS_DIR}/WVideoPlayer/WVideoPlayer.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/WBasicLibrary/WBasicLibrary.framework"
install_framework "${BUILT_PRODUCTS_DIR}/WVideoPlayer/WVideoPlayer.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
import { Component } from '@angular/core';
import { Store } from '@ngrx/store';
import { RiskTabHelperService } from '../services/risk-tab-helper.service';
import { selectRiskQueryResult } from '../store/selectors';
import { constants } from 'src/constants';
import { ColumnMode } from '@swimlane/ngx-datatable';
@Component({
selector: 'scm-mitigation',
templateUrl: './mitigation.component.html',
styleUrls: ['./mitigation.component.scss']
})
export class MitigationComponent {
readonly RISK_COLS = constants.bigQuery.datasets.risk.columns;
readonly dimensions = this.riskTabHelper.getDimensions();
ColumnMode = ColumnMode;
riskQueryResult: any[];
mitigationTableData: any[];
mitigationColumns = [
{ name: 'Suppliers', prop: this.RISK_COLS.SUPPLIER_NAME },
{ name: 'DRI', prop: this.RISK_COLS.DRI },
{ name: 'Risk Category', prop: 'dimension' },
{ name: 'Risk Rating', prop: 'rating' },
{ name: 'Mitigation Plan', prop: 'mitigationPlan' },
{ name: 'Timeline/ETA', prop: 'timeline' },
{ name: 'Status', prop: 'status' }
]
public constructor(private store: Store, private riskTabHelper: RiskTabHelperService) {
this.store.select(selectRiskQueryResult).subscribe(
riskQueryResult => {
this.riskQueryResult = riskQueryResult;
this.mitigationTableData = this.getMitigationTable(riskQueryResult);;
}
);
}
private getMitigationTable(riskQueryResult: any[]) {
const rows = [];
for (const row of riskQueryResult) {
const line = {};
line[this.RISK_COLS.SUPPLIER_NAME] = row[this.RISK_COLS.SUPPLIER_NAME];
line[this.RISK_COLS.DRI] = row[this.RISK_COLS.DRI];
for (const dimension of Object.keys(this.dimensions)) {
rows.push({
...line,
dimension: this.dimensions[dimension],
rating: row[dimension],
mitigationPlan: '',
timeline: '',
status: ''
});
}
}
return rows;
}
mitigationFilter(segmentationEl, supplierNameEl) {
this.mitigationTableData = this.getMitigationTable(this.riskQueryResult.filter(
row => {
const segmentFilter = segmentationEl && segmentationEl.value ? row[this.RISK_COLS.SEGMENTATION] === segmentationEl.value : true;
const supplierFilter = supplierNameEl && supplierNameEl.value ? row[this.RISK_COLS.SUPPLIER_NAME] === supplierNameEl.value : true;
return segmentFilter && supplierFilter;
}
));
}
getMitigationCellClass({ row, column, value }): any {
const classes = {
cell: true
};
if (column.prop === 'rating') {
classes[value] = true;
} else {
classes['Disabled'] = true;
}
return classes;
}
}
|
#!/bin/bash
set -e -x -u
MY_PATH="`dirname \"$0\"`" # relative
MY_PATH="`( cd \"$MY_PATH\" && pwd )`" # absolutized and normalized
source "$MY_PATH/detect_qmake.sh"
# Prints number of cores to stdout
GetCPUCores() {
case "$OSTYPE" in
# it's GitBash under Windows
cygwin) echo $NUMBER_OF_PROCESSORS
;;
linux-gnu) grep -c ^processor /proc/cpuinfo 2>/dev/null
;;
darwin*) sysctl -n hw.ncpu
;;
*) echo "Unsupported platform in $0"
exit 1
;;
esac
return 0
}
# Replaces "/cygwin/c" prefix with "c:" one on Windows platform.
# Does nothing under other OS.
# 1st param: path to be modified.
StripCygwinPrefix() {
if [[ $(GetNdkHost) == "windows-x86_64" ]]; then
echo "c:`(echo "$1" | cut -c 12-)`"
return 0
fi
echo "$1"
return 0
}
# 1st param: shadow directory path
# 2nd param: mkspec
# 3rd param: additional qmake parameters
BuildQt() {
(
SHADOW_DIR="$1"
MKSPEC="$2"
QMAKE_PARAMS="$3"
mkdir -p "$SHADOW_DIR"
cd "$SHADOW_DIR"
echo "Launching qmake..."
# This call is needed to correctly rebuild c++ sources after switching between branches with added or removed source files.
# Otherwise we get build errors.
"$QMAKE" -r CONFIG-=sdk "$QMAKE_PARAMS" -spec "$(StripCygwinPrefix $MKSPEC)" "$(StripCygwinPrefix $MY_PATH)/../../omim.pro"
# make clean > /dev/null || true
make -j $(GetCPUCores)
)
}
|
import Component from 'react-pure-render/component';
export default (typeof window !== 'undefined')
? require('react-ace').default
: class AceEditor extends Component {
render() { return null }
}
|
#!/bin/bash
TESTDIR=$(dirname -- "$0")
ZXI=${TESTDIR}/../../target/release/examples/zxi
error=0
echo Running zx should-pass tests:
for i in ${TESTDIR}/*.zx; do
${ZXI} "$i" &>/dev/null
if [ "$?" != "0" ]; then
echo "[failure: should-pass] $i"
error=1
fi
done
echo Done.
echo
echo Running zx should-fail tests:
for i in ${TESTDIR}/*.zxf; do
${ZXI} "$i" &>/dev/null
if [ "$?" == "0" ]; then
echo "[failure: should-fail] $i"
error=1
fi
done
echo Done.
exit $error
|
#!/usr/bin/env sh
# abort on errors
set -e
# remove babel cache
rm -r ./node_modules/.cache/babel-loader/
# build
npm run build:demo
# navigate into the build output directory
cd build
git init
git add -A
git commit -m 'update'
git push -f git@github.com:XiongAmao/vue-easy-lightbox.git master:gh-pages
cd -
echo DONE!
|
CREATE TABLE products (
id INTEGER PRIMARY KEY,
product_name VARCHAR(255) NOT NULL,
price DECIMAL(10,2) NOT NULL
);
CREATE TABLE customers (
id INTEGER PRIMARY KEY,
name VARCHAR(255) NOT NULL,
email VARCHAR(255) NOT NULL
);
CREATE TABLE orders (
id INTEGER PRIMARY KEY,
customer_id INTEGER NOT NULL,
product_id INTEGER NOT NULL,
quantity INTEGER NOT NULL,
total_price DECIMAL(10,2) NOT NULL,
payment_method VARCHAR(255) NOT NULL
); |
import {
isDefined,
isString,
isNumber,
isArray,
toString
} from './type-checkers'
export default function get(obj, path) {
let list = []
let arr = false
const _get = (obj, path) => {
if (!path) {
// If there's no path left, we've gotten to the object we care about.
list.push(obj)
} else {
const dotIndex = path.indexOf('.')
let key = path
let remaining = null
if (dotIndex !== -1) {
key = path.slice(0, dotIndex)
remaining = path.slice(dotIndex + 1)
}
const value = obj[key]
if (isDefined(value)) {
if (!remaining && (isString(value) || isNumber(value))) {
list.push(toString(value))
} else if (isArray(value)) {
arr = true
// Search each item in the array.
for (let i = 0, len = value.length; i < len; i += 1) {
_get(value[i], remaining)
}
} else if (remaining) {
// An object. Recurse further.
_get(value, remaining)
}
}
}
}
_get(obj, path)
if (arr) {
return list
}
return list[0]
}
|
package no.item.enonic.builders.mappers;
import com.enonic.cms.api.client.model.user.Address;
import com.enonic.cms.api.client.model.user.UserInfo;
import com.google.common.base.Strings;
import no.item.enonic.models.User;
import java.time.LocalDate;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Date;
import java.util.Objects;
import java.util.function.Function;
import java.util.stream.Collectors;
public class UserInfoMapper implements Function<User, UserInfo> {
@Override
public UserInfo apply(User user) {
UserInfo userInfo = new UserInfo();
userInfo.setMemberId(user.memberId);
userInfo.setFirstName(user.firstName);
userInfo.setLastName(user.lastName);
userInfo.setBirthday(oldDateFormat(user.birthday));
userInfo.setOrganization(user.organization);
userInfo.setPhone(user.phonePrivate);
userInfo.setMobile(user.phoneMobile);
userInfo.setFax(user.phoneWork);
userInfo.setAddresses(address(user));
return userInfo;
}
private Address address(User user) {
if(user.address1 != null || user.address2 != null || user.address3 != null) {
Address address = new Address();
String[] addrArr = { user.address1, user.address2, user.address3 };
String addrStr = Arrays.stream(addrArr)
.filter(str -> !Strings.isNullOrEmpty(str))
.collect(Collectors.joining(" "));
address.setStreet(addrStr);
address.setPostalCode(user.zipCode);
address.setPostalAddress(user.city);
address.setIsoCountry("NO");
return address;
} else {
return null;
}
}
private Date oldDateFormat(LocalDate date){
if(Objects.nonNull(date)){
return Date.from(date.atStartOfDay(ZoneId.systemDefault()).toInstant());
} else {
return null;
}
}
} |
let jsonString = '{"name": "John Smith", "age": 30}';
let jsonObj = JSON.parse(jsonString);
let name = jsonObj.name;
console.log(name); // Outputs: "John Smith" |
<filename>commons/validator/validator_test.go
package validator
import (
"fmt"
"github.com/eddieowens/axon"
"github.com/stretchr/testify/suite"
"os"
"path"
"testing"
)
type ValidatorTest struct {
suite.Suite
validator Validator
}
func (v *ValidatorTest) SetupTest() {
inj := axon.NewInjector(axon.NewBinder(
new(Package),
))
v.validator = inj.GetStructPtr(Key).(Validator)
}
func (v *ValidatorTest) TestExtValid() {
// -- Given
//
type s struct {
Ext string `validate:"ext=json toml"`
}
fp := path.Join(os.TempDir(), "what.json")
_, _ = os.Create(fp)
defer os.Remove(fp)
given := s{
Ext: fp,
}
// -- When
//
err := v.validator.Struct(given)
// -- Then
//
v.NoError(err)
}
func (v *ValidatorTest) TestExtInvalid() {
// -- Given
//
type s struct {
Ext string `validate:"ext=txt toml"`
}
fp := path.Join(os.TempDir(), "what.json")
_, _ = os.Create(fp)
defer os.Remove(fp)
given := s{
Ext: fp,
}
expected := fmt.Sprintf("%s does not have a valid file extension. Valid extensions are txt toml.", fp)
// -- When
//
err := v.validator.Struct(given)
// -- Then
//
v.EqualError(err, expected)
}
func (v *ValidatorTest) TestExtInvalidFileNotFound() {
// -- Given
//
type s struct {
Ext string `validate:"ext=txt toml"`
}
fp := path.Join(os.TempDir(), "what.json")
given := s{
Ext: fp,
}
expected := fmt.Sprintf("%s does not have a valid file extension. Valid extensions are txt toml.", fp)
// -- When
//
err := v.validator.Struct(given)
// -- Then
//
v.EqualError(err, expected)
}
func (v *ValidatorTest) TestExtInvalidPartial() {
// -- Given
//
type s struct {
Ext string `validate:"ext=json toml"`
}
fp := path.Join(os.TempDir(), "what.jso")
_, _ = os.Create(fp)
defer os.Remove(fp)
given := s{
Ext: fp,
}
expected := fmt.Sprintf("%s does not have a valid file extension. Valid extensions are json toml.", fp)
// -- When
//
err := v.validator.Struct(given)
// -- Then
//
v.EqualError(err, expected)
}
func (v *ValidatorTest) TestOverrideError() {
// -- Given
//
type s struct {
Ext string `validate:"ext=json toml"`
}
fp := path.Join(os.TempDir(), "what.jso")
_, _ = os.Create(fp)
defer os.Remove(fp)
given := s{
Ext: fp,
}
expected := fmt.Sprintf("%s does not have a valid file extension. Valid extensions are json toml. extra text", fp)
v.validator.OnError(func(validationError *ValidationError) {
validationError.Msg += " extra text"
}, "ext")
// -- When
//
err := v.validator.Struct(given)
// -- Then
//
v.EqualError(err, expected)
}
func (v *ValidatorTest) TestOverrideErrorAll() {
// -- Given
//
type s struct {
Ext string `validate:"ext=json toml"`
File string `validate:"required"`
}
fp := path.Join(os.TempDir(), "what.jso")
_, _ = os.Create(fp)
defer os.Remove(fp)
given := s{
Ext: fp,
}
expected := fmt.Sprintf("%s does not have a valid file extension. Valid extensions are json toml. extra text\n"+
"file is required. extra text", fp)
v.validator.OnError(func(validationError *ValidationError) {
validationError.Msg += " extra text"
})
// -- When
//
err := v.validator.Struct(given)
// -- Then
//
v.EqualError(err, expected)
}
func TestValidatorTest(t *testing.T) {
suite.Run(t, new(ValidatorTest))
}
|
<filename>db/migrate/20160428201207_add_tags_to_disease_source_variants.rb<gh_stars>0
class AddTagsToDiseaseSourceVariants < ActiveRecord::Migration
def change
drop_join_table :tags, :variants
create_join_table :tags, :disease_source_variants do |t|
t.foreign_key :tags
t.foreign_key :disease_source_variants
t.index [:tag_id, :disease_source_variant_id], name: 'idx_dsv_tags'
end
end
end
|
<reponame>x5z5c5/weboasis-repo.github.io
/* global VT */
window.VT = window.VT || {};
VT.AppIcon = function (el) {
if (el.children.length > 0) return;
var id = el.dataset.id;
var promise = VT.AppIcon.cache[id];
if (!promise) {
var url = VT.AppIcon.baseUrl + id + '.svg';
promise = VT.AppIcon.cache[id] = fetch(url).then(function (r) {
return r.text();
});
}
promise.then(function (svg) {
el.innerHTML = el.classList.contains('-double') ? svg + svg : svg;
});
};
VT.AppIcon.baseUrl =
'https://rawcdn.githack.com/primer/octicons/ff7f6eee63fa2f2d24d02e3aa76a87db48e4b6f6/icons/';
VT.AppIcon.cache = {};
|
<filename>public/92.js<gh_stars>0
(window["webpackJsonp"] = window["webpackJsonp"] || []).push([[92],{
/***/ "./node_modules/babel-loader/lib/index.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/JobWorkflow/Tabs.vue?vue&type=script&lang=js&":
/*!*****************************************************************************************************************************************************************************!*\
!*** ./node_modules/babel-loader/lib??ref--4-0!./node_modules/vue-loader/lib??vue-loader-options!./resources/assets/js/views/JobWorkflow/Tabs.vue?vue&type=script&lang=js& ***!
\*****************************************************************************************************************************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
/* harmony default export */ __webpack_exports__["default"] = ({
data: function data() {
return {
title: "sampling done here"
};
},
mounted: function mounted() {
if (this.$route.path === '/') {
this.$router.push({
name: 'job_processing_workflow'
});
}
}
});
/***/ }),
/***/ "./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/JobWorkflow/Tabs.vue?vue&type=template&id=1fa316f8&":
/*!*********************************************************************************************************************************************************************************************************************!*\
!*** ./node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!./node_modules/vue-loader/lib??vue-loader-options!./resources/assets/js/views/JobWorkflow/Tabs.vue?vue&type=template&id=1fa316f8& ***!
\*********************************************************************************************************************************************************************************************************************/
/*! exports provided: render, staticRenderFns */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "render", function() { return render; });
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "staticRenderFns", function() { return staticRenderFns; });
var render = function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("div", { staticClass: "row" }, [
_c("div", { staticClass: "col-lg-12 col-xlg-12 col-md-12" }, [
_c("div", { staticClass: "card" }, [
_c(
"ul",
{
staticClass: "nav nav-tabs profile-tab",
attrs: { role: "tablist" }
},
[
_c(
"li",
{ staticClass: "nav-item" },
[
_c(
"router-link",
{
staticClass: "nav-link ",
attrs: { to: "/job-processing/workflow" }
},
[
_vm._v(
"\n Job Workflow\n "
)
]
)
],
1
),
_vm._v(" "),
_c(
"li",
{ staticClass: "nav-item" },
[
_c(
"router-link",
{
staticClass: "nav-link ",
attrs: { to: "/job-processing/stages" }
},
[
_vm._v(
"\n Stages\n "
)
]
)
],
1
),
_vm._v(" "),
_c(
"li",
{ staticClass: "nav-item" },
[
_c(
"router-link",
{
staticClass: "nav-link ",
attrs: { to: "/stage/required-documents" }
},
[
_vm._v(
"\n Required Documents\n "
)
]
)
],
1
),
_vm._v(" "),
_c(
"li",
{ staticClass: "nav-item" },
[
_c(
"router-link",
{
staticClass: "nav-link ",
attrs: { to: "/job-workflow/shipments" }
},
[
_vm._v(
"\n Shipments\n "
)
]
)
],
1
),
_vm._v(" "),
_c(
"li",
{ staticClass: "nav-item" },
[
_c(
"router-link",
{
staticClass: "nav-link ",
attrs: { to: "/job-workflow/shipments-types" }
},
[
_vm._v(
"\n Shipment Types\n "
)
]
)
],
1
),
_vm._v(" "),
_c(
"li",
{ staticClass: "nav-item" },
[
_c(
"router-link",
{
staticClass: "nav-link ",
attrs: { to: "/job-workflow/shipments-sub-types" }
},
[
_vm._v(
"\n Shipment Sub Types\n "
)
]
)
],
1
)
]
)
])
])
])
}
var staticRenderFns = []
render._withStripped = true
/***/ }),
/***/ "./resources/assets/js/views/JobWorkflow/Tabs.vue":
/*!********************************************************!*\
!*** ./resources/assets/js/views/JobWorkflow/Tabs.vue ***!
\********************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _Tabs_vue_vue_type_template_id_1fa316f8___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ./Tabs.vue?vue&type=template&id=1fa316f8& */ "./resources/assets/js/views/JobWorkflow/Tabs.vue?vue&type=template&id=1fa316f8&");
/* harmony import */ var _Tabs_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./Tabs.vue?vue&type=script&lang=js& */ "./resources/assets/js/views/JobWorkflow/Tabs.vue?vue&type=script&lang=js&");
/* empty/unused harmony star reexport *//* harmony import */ var _node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ../../../../../node_modules/vue-loader/lib/runtime/componentNormalizer.js */ "./node_modules/vue-loader/lib/runtime/componentNormalizer.js");
/* normalize component */
var component = Object(_node_modules_vue_loader_lib_runtime_componentNormalizer_js__WEBPACK_IMPORTED_MODULE_2__["default"])(
_Tabs_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_1__["default"],
_Tabs_vue_vue_type_template_id_1fa316f8___WEBPACK_IMPORTED_MODULE_0__["render"],
_Tabs_vue_vue_type_template_id_1fa316f8___WEBPACK_IMPORTED_MODULE_0__["staticRenderFns"],
false,
null,
null,
null
)
/* hot reload */
if (false) { var api; }
component.options.__file = "resources/assets/js/views/JobWorkflow/Tabs.vue"
/* harmony default export */ __webpack_exports__["default"] = (component.exports);
/***/ }),
/***/ "./resources/assets/js/views/JobWorkflow/Tabs.vue?vue&type=script&lang=js&":
/*!*********************************************************************************!*\
!*** ./resources/assets/js/views/JobWorkflow/Tabs.vue?vue&type=script&lang=js& ***!
\*********************************************************************************/
/*! exports provided: default */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _node_modules_babel_loader_lib_index_js_ref_4_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Tabs_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/babel-loader/lib??ref--4-0!../../../../../node_modules/vue-loader/lib??vue-loader-options!./Tabs.vue?vue&type=script&lang=js& */ "./node_modules/babel-loader/lib/index.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/JobWorkflow/Tabs.vue?vue&type=script&lang=js&");
/* empty/unused harmony star reexport */ /* harmony default export */ __webpack_exports__["default"] = (_node_modules_babel_loader_lib_index_js_ref_4_0_node_modules_vue_loader_lib_index_js_vue_loader_options_Tabs_vue_vue_type_script_lang_js___WEBPACK_IMPORTED_MODULE_0__["default"]);
/***/ }),
/***/ "./resources/assets/js/views/JobWorkflow/Tabs.vue?vue&type=template&id=1fa316f8&":
/*!***************************************************************************************!*\
!*** ./resources/assets/js/views/JobWorkflow/Tabs.vue?vue&type=template&id=1fa316f8& ***!
\***************************************************************************************/
/*! exports provided: render, staticRenderFns */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony import */ var _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Tabs_vue_vue_type_template_id_1fa316f8___WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! -!../../../../../node_modules/vue-loader/lib/loaders/templateLoader.js??vue-loader-options!../../../../../node_modules/vue-loader/lib??vue-loader-options!./Tabs.vue?vue&type=template&id=1fa316f8& */ "./node_modules/vue-loader/lib/loaders/templateLoader.js?!./node_modules/vue-loader/lib/index.js?!./resources/assets/js/views/JobWorkflow/Tabs.vue?vue&type=template&id=1fa316f8&");
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "render", function() { return _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Tabs_vue_vue_type_template_id_1fa316f8___WEBPACK_IMPORTED_MODULE_0__["render"]; });
/* harmony reexport (safe) */ __webpack_require__.d(__webpack_exports__, "staticRenderFns", function() { return _node_modules_vue_loader_lib_loaders_templateLoader_js_vue_loader_options_node_modules_vue_loader_lib_index_js_vue_loader_options_Tabs_vue_vue_type_template_id_1fa316f8___WEBPACK_IMPORTED_MODULE_0__["staticRenderFns"]; });
/***/ })
}]); |
<filename>src/pages/blog.js
import React from "react";
import Link from "gatsby-link";
import { graphql } from "gatsby";
import { Nav } from "../components/nav";
// import '../css/index.css'; // add some style if you want!
export default function Index({ data }) {
const { edges: posts } = data.allMarkdownRemark;
return (
<>
<Nav />
<div className="">
{posts.map(({ node: post }) => {
return (
<div className="p-4 max-w-xl" key={post.id}>
<h1 className="text-xl uppercase">
<Link to={post.fields.slug}>{post.fields.title}</Link>
</h1>
<h2 className="text-gray-500">{post.fields.date}</h2>
<p>{post.excerpt}</p>
</div>
);
})}
</div>
</>
);
}
export const pageQuery = graphql`
query IndexQuery {
allMarkdownRemark(
sort: { order: DESC, fields: [frontmatter___date] }
filter: { fields: { draft: { ne: true } } }
) {
edges {
node {
fields {
date
slug
title
tags
}
excerpt(pruneLength: 250)
id
frontmatter {
title
date(formatString: "YYYY-MM-DD")
path
tags
}
}
}
}
}
`;
|
import VRating from './v-rating.vue';
export default VRating;
|
<reponame>Sid1000/sample-page
export declare function getTransformTemplate(): string;
export declare function getTransformGpuTemplate(): string;
export declare const filterTemplate: {
"--chakra-blur": string;
"--chakra-brightness": string;
"--chakra-contrast": string;
"--chakra-grayscale": string;
"--chakra-hue-rotate": string;
"--chakra-invert": string;
"--chakra-saturate": string;
"--chakra-sepia": string;
"--chakra-drop-shadow": string;
filter: string;
};
export declare const backdropFilterTemplate: {
backdropFilter: string;
"--chakra-backdrop-blur": string;
"--chakra-backdrop-brightness": string;
"--chakra-backdrop-contrast": string;
"--chakra-backdrop-grayscale": string;
"--chakra-backdrop-hue-rotate": string;
"--chakra-backdrop-invert": string;
"--chakra-backdrop-opacity": string;
"--chakra-backdrop-saturate": string;
"--chakra-backdrop-sepia": string;
};
export declare function getRingTemplate(value: any): {
"--chakra-ring-offset-shadow": string;
"--chakra-ring-shadow": string;
"--chakra-ring-width": any;
boxShadow: string;
};
export declare const flexDirectionTemplate: {
"row-reverse": {
space: string;
divide: string;
};
"column-reverse": {
space: string;
divide: string;
};
};
export declare const spaceXTemplate: {
"& > :not(style) ~ :not(style)": {
marginInlineStart: string;
marginInlineEnd: string;
};
};
export declare const spaceYTemplate: {
"& > :not(style) ~ :not(style)": {
marginTop: string;
marginBottom: string;
};
};
//# sourceMappingURL=templates.d.ts.map |
#;**********************************************************************;
#
# Copyright (c) 2016, Intel Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
#;**********************************************************************;
#!/bin/bash
new_path=`pwd`
PATH="$PATH":"$new_path"
file_primary_key_ctx=context.p_B1
file_signing_key_pub=opuB1_B8
file_signing_key_priv=oprB1_B8
file_signing_key_ctx=context_load_out_B1_B8
file_signing_key_name=name.load.B1_B8
file_input_data=secret.data
file_output_data=sig.4
file_verify_tk_data=tickt_verify_sig.4
file_input_data_hash=secret_hash.data
file_input_data_hash_tk=secret_hash_tk.data
handle_signing_key=0x81010005
alg_hash=0x000B
alg_primary_key=0x0001
alg_signing_key=0x0008
fail()
{
echo "$1 test fail, pelase check the environment or parameters!"
exit 1
}
Pass()
{
echo ""$1" pass" >>test_getpubak_pass.log
}
if [ ! -e "$file_input_data" ]
then
echo "12345678" > $file_input_data
fi
rm $file_primary_key_ctx $file_signing_key_pub $file_signing_key_priv $file_signing_key_ctx $file_signing_key_name $file_output_data $file_verify_tk_data $file_input_data_hash $file_input_data_hash_tk -rf
tpm2_takeownership -c
tpm2_createprimary -A e -g $alg_hash -G $alg_primary_key -C $file_primary_key_ctx
if [ $? != 0 ];then
fail createprimary
fi
tpm2_create -g $alg_hash -G $alg_signing_key -o $file_signing_key_pub -O $file_signing_key_priv -c $file_primary_key_ctx
if [ $? != 0 ];then
fail create
fi
tpm2_load -c $file_primary_key_ctx -u $file_signing_key_pub -r $file_signing_key_priv -n $file_signing_key_name -C $file_signing_key_ctx
if [ $? != 0 ];then
fail load
fi
tpm2_sign -c $file_signing_key_ctx -g $alg_hash -m $file_input_data -s $file_output_data
if [ ! -e "$file_output_data" ];then
echo "no outputfile,sign test Fail!"
exit 1
fi
tpm2_verifysignature -c $file_signing_key_ctx -g $alg_hash -m $file_input_data -s $file_output_data -t $file_verify_tk_data
if [ $? != 0 ];then
fail verifysignature
fi
tpm2_hash -H n -g $alg_hash -I $file_input_data -o $file_input_data_hash -t $file_input_data_hash_tk
if [ ! -e "$file_input_data_hash" ];then
echo "hash $file_input_data Fail!"
exit 1
fi
rm $file_verify_tk_data -rf
tpm2_verifysignature -c $file_signing_key_ctx -D $file_input_data_hash -s $file_output_data -t $file_verify_tk_data
if [ $? != 0 ];then
fail verifysignature
fi
rm $file_verify_tk_data $file_signing_key_ctx -rf
tpm2_loadexternal -H n -u $file_signing_key_pub -C $file_signing_key_ctx
if [ ! -e "$file_signing_key_ctx" ];then
fail Loadexternal
fi
###need debug 0x2cb error first ##
tpm2_verifysignature -c $file_signing_key_ctx -g $alg_hash -m $file_input_data -s $file_output_data -t $file_verify_tk_data
if [ $? != 0 ];then
fail verifysignature
fi
|
CREATE TABLE Fruits (
name TEXT NOT NULL,
color TEXT NOT NULL,
is_sweet BOOLEAN NOT NULL
); |
package org.jooby.mongodb;
import com.google.inject.Binder;
import com.google.inject.Key;
import com.google.inject.binder.AnnotatedBindingBuilder;
import com.google.inject.name.Names;
import com.mongodb.MongoClient;
import com.mongodb.MongoClientURI;
import com.mongodb.client.MongoDatabase;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValueFactory;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.isA;
import org.jooby.Env;
import org.jooby.Env.ServiceKey;
import org.jooby.test.MockUnit;
import org.jooby.test.MockUnit.Block;
import org.jooby.funzy.Throwing;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
@RunWith(PowerMockRunner.class)
@PrepareForTest({Mongodb.class, MongoClient.class})
public class MongodbTest {
private Config $mongodb = ConfigFactory.parseResources(getClass(), "mongodb.conf");
@SuppressWarnings("unchecked")
MockUnit.Block mongodb = unit -> {
AnnotatedBindingBuilder<MongoClientURI> mcuABB = unit.mock(AnnotatedBindingBuilder.class);
mcuABB.toInstance(isA(MongoClientURI.class));
mcuABB.toInstance(isA(MongoClientURI.class));
MongoClient client = unit.constructor(MongoClient.class)
.args(MongoClientURI.class)
.build(isA(MongoClientURI.class));
MongoDatabase db = unit.mock(MongoDatabase.class);
expect(client.getDatabase("mydb")).andReturn(db);
unit.registerMock(MongoClient.class, client);
AnnotatedBindingBuilder<MongoClient> mcABB = unit.mock(AnnotatedBindingBuilder.class);
mcABB.toInstance(client);
mcABB.toInstance(client);
AnnotatedBindingBuilder<MongoDatabase> dbABB = unit.mock(AnnotatedBindingBuilder.class);
dbABB.toInstance(db);
dbABB.toInstance(db);
Binder binder = unit.get(Binder.class);
expect(binder.bind(Key.get(MongoClientURI.class))).andReturn(mcuABB);
expect(binder.bind(Key.get(MongoClientURI.class, Names.named("mydb")))).andReturn(mcuABB);
expect(binder.bind(Key.get(MongoClient.class))).andReturn(mcABB);
expect(binder.bind(Key.get(MongoClient.class, Names.named("mydb")))).andReturn(mcABB);
expect(binder.bind(Key.get(MongoDatabase.class))).andReturn(dbABB);
expect(binder.bind(Key.get(MongoDatabase.class, Names.named("mydb")))).andReturn(dbABB);
Env env = unit.get(Env.class);
expect(env.onStop(unit.capture(Throwing.Runnable.class))).andReturn(env);
};
@Test
public void defaults() throws Exception {
new MockUnit(Env.class, Config.class, Binder.class)
.expect(unit -> {
Config config = unit.get(Config.class);
expect(config.getConfig("mongodb")).andReturn($mongodb.getConfig("mongodb"));
expect(config.hasPath("mongodb.db")).andReturn(false);
expect(config.getString("db")).andReturn("mongodb://127.0.0.1/mydb");
})
.expect(serviceKey(new ServiceKey()))
.expect(mongodb)
.expect(unit -> {
MongoClient client = unit.get(MongoClient.class);
client.close();
})
.run(unit -> {
Mongodb mongodb = new Mongodb();
mongodb.configure(unit.get(Env.class), unit.get(Config.class), unit.get(Binder.class));
}, unit -> {
unit.captured(Throwing.Runnable.class).iterator().next().run();
});
}
@Test(expected = IllegalArgumentException.class)
public void shouldFaileWhenDbIsMissing() throws Exception {
new MockUnit(Env.class, Config.class, Binder.class)
.expect(unit -> {
Config config = unit.get(Config.class);
expect(config.getConfig("mongodb")).andReturn($mongodb.getConfig("mongodb"));
expect(config.hasPath("mongodb.db")).andReturn(false);
expect(config.getString("db")).andReturn("mongodb://127.0.0.1");
})
.expect(mongodb)
.run(unit -> {
Mongodb mongodb = new Mongodb();
mongodb.configure(unit.get(Env.class), unit.get(Config.class), unit.get(Binder.class));
});
}
@Test
public void defaultsWithCustomOption() throws Exception {
new MockUnit(Env.class, Config.class, Binder.class)
.expect(unit -> {
Config config = unit.get(Config.class);
expect(config.getConfig("mongodb"))
.andReturn($mongodb.getConfig("mongodb"));
expect(config.hasPath("mongodb.db")).andReturn(true);
expect(config.getConfig("mongodb.db")).andReturn(ConfigFactory.empty()
.withValue("connectionsPerHost", ConfigValueFactory.fromAnyRef(50)));
expect(config.getString("db")).andReturn("mongodb://127.0.0.1/mydb");
})
.expect(serviceKey(new ServiceKey()))
.expect(mongodb)
.run(unit -> {
new Mongodb()
.options((options, config) -> {
assertEquals(50, options.build().getConnectionsPerHost());
})
.configure(unit.get(Env.class), unit.get(Config.class), unit.get(Binder.class));
});
}
@Test
public void defaultsConfig() throws Exception {
new MockUnit(Env.class, Config.class, Binder.class)
.run(unit -> {
assertEquals($mongodb, new Mongodb().config());
});
}
@Test
public void defaultsWithOptions() throws Exception {
new MockUnit(Env.class, Config.class, Binder.class)
.expect(unit -> {
Config config = unit.get(Config.class);
expect(config.getConfig("mongodb")).andReturn($mongodb.getConfig("mongodb"));
expect(config.hasPath("mongodb.db")).andReturn(false);
expect(config.getString("db")).andReturn("mongodb://127.0.0.1/mydb");
})
.expect(serviceKey(new ServiceKey()))
.expect(mongodb)
.run(unit -> {
new Mongodb()
.options((options, config) -> {
options.connectTimeout(3000);
})
.configure(unit.get(Env.class), unit.get(Config.class), unit.get(Binder.class));
});
}
private Block serviceKey(final ServiceKey serviceKey) {
return unit -> {
Env env = unit.get(Env.class);
expect(env.serviceKey()).andReturn(serviceKey);
};
}
}
|
<gh_stars>10-100
/* eslint-env mocha */
import { expect } from 'chai';
import getStyles from '../../src/switch/get-styles';
import styles from '../../src/switch/styles';
describe('Switch.getStyles', () => {
describe('knob', () => {
it('should get styles', () => {
const style = getStyles.knob();
expect(style).to.deep.equal(styles.knob);
});
it('should combine styles', () => {
const style = getStyles.knob('red', false, { color: 'red' });
expect(style).to.have.property('color', 'red');
});
it('should change theme color', () => {
const style = getStyles.knob('red', true);
expect(style).to.have.property('backgroundColor', 'red');
});
it('should add active styles', () => {
const style = getStyles.knob('red', true);
expect(style).to.have.property('left', '20px');
});
});
describe('track', () => {
it('should get styles', () => {
const style = getStyles.track();
expect(style).to.deep.equal(styles.track);
});
it('should combine styles', () => {
const style = getStyles.track('red', false, { color: 'red' });
expect(style).to.have.property('color', 'red');
});
it('should change theme color', () => {
const style = getStyles.track('red', true);
expect(style).to.have.property('backgroundColor', 'rgba(255, 0, 0, 0.5)');
});
it('should add active styles', () => {
const style = getStyles.track('red', true);
expect(style).to.have.property('backgroundColor', 'rgba(255, 0, 0, 0.5)');
});
});
describe('label', () => {
it('should get styles', () => {
const style = getStyles.label();
expect(style).to.deep.equal(styles.label);
});
it('should combine styles', () => {
const style = getStyles.label({ color: 'red' });
expect(style).to.have.property('color', 'red');
});
});
});
|
<filename>test/test_haversine.c<gh_stars>0
// <NAME>
// testing of haversine formula
// filename: test_haversine.c
// attribution: https://en.wikipedia.org/wiki/Haversine_formula#The_haversine_formula
// attribution: https://www.vcalc.com/wiki/vCalc/Haversine+-+Distance
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#define max(a,b) (a>b ? a : b)
#define min(a,b) (a<b ? a : b)
double haversine(double lat1, double lng1, double lat2, double lng2)
{
// convert coordinates to radians
lat1 *= (M_PI / 180);
lng1 *= (M_PI / 180);
lat2 *= (M_PI / 180);
lng2 *= (M_PI / 180);
// calculate the latitude haversine
double latdiff = (lat2 - lat1);
double lathavrs = sin(pow((latdiff/2),2));
// calculate the longitude haversine
double lngdiff = (lng2 - lng1);
double lnghavrs = sin(pow((lngdiff/2),2));
// radius of Earth in kilometers, average of at equator and
// at north-pole radiuses
double R = (6356.752 + 6378.137) / 2;
// calculate distance via solving for distant in haversine
double h = lathavrs + (cos(lat1) * cos(lat2) * lnghavrs);
h = sqrt(h);
// ensure h stays between 0 and 1 for haversine formula to
// work properly
h = min(max(0,h),1);
double d = 2 * R * asin(h);
// convert distance from kilometers to miles
d *= 0.62137;
return d;
}
int main()
{
double ptA[2] = { 41.507483, -99.436554 };
double ptB[2] = { 38.504048, -98.315949 };
double test = 347.32883869;
double dist = haversine(ptA[0], ptA[1], ptB[0], ptB[1]);
printf("Nebraska: %lf, %lf\n", ptA[0], ptA[1]);
printf("Kansas: %lf, %lf\n", ptB[0], ptB[1]);
printf("my haversine distance is %lf\n",dist);
printf("actual haversine distance is %lf\n",test);
dist == test ? printf("PASS\n") : printf("FAIL\n");
return 0;
}
|
<gh_stars>0
import {verifyType, TYPES, notNegative} from "../util/verifyType.js";
import {Terminable, TerminableList} from "../util/terminable.js";
// The base values for both stats
const OFFENSE = 33.73;
const HP = 107.0149;
/*
The Stat class represents one of a Warrior's
4 stats:
(1) Physical attack
(2) Elemental attack
(3) Armor
(4) Hit points
*/
class Stat{
/*
type: an integer between 0 and 3 (inclusive).
use one of the following:
- Stat.PHYS
- Stat.ELE
- Stat.ARM
- Stat.HP
value: the multiplier for the stat relative to the base value.
*/
constructor(type, multiplier){
verifyType(type, TYPES.number);
notNegative(multiplier);
type = parseInt(type);
switch(type){
case Stat.PHYS:
this.name = "Physical attack";
this.type = type;
this.base = OFFENSE * multiplier;
this.levelsUp = true;
break;
case Stat.ELE:
this.name = "Elemental attack";
this.type = type;
this.base = OFFENSE * multiplier;
this.levelsUp = true;
break;
case Stat.ARM:
this.name = "Armor";
this.type = type;
this.base = multiplier;
this.levelsUp = false;
break;
case Stat.HP:
this.name = "HP";
this.type = type;
this.base = HP * multiplier;
this.levelsUp = true;
break;
default:
throw new Error(`type cannot be ${type}, it must be either Stat.PHYS, Stat.ELE, Stat.ARM, or Stat.HP`);
break;
}
this.ctorMultiplier = multiplier;
this.value = this.base;
this.boosts = new Map();
}
copy(newBase=null){
if(newBase===null){
newBase = this.ctorMultiplier;
}
let ret = new Stat(this.type, newBase);
ret.value = this.value;
ret.boosts = this.boosts;
//need to do deep copy here
return ret;
}
calc(lv){
this.boosts.clear();
if(this.levelsUp){
this.value = Math.round(this.base * Math.pow(1.07, lv));
}
}
applyBoost(boost){
this.boosts.set(boost.id, boost);
}
getBase(){
return this.base;
}
getValue(){
let ret;
let mod = 1;
for(let boost of this.boosts.values()){
mod += boost.amount;
}
if(this.type === Stat.ARM){
// since mod starts at 1
ret = this.value + mod - 1;
} else {
ret = this.value * mod;
}
return Math.round(ret);
}
update(){
let newBoosts = new Map();
this.boosts.forEach((v, k)=>{
v.update();
if(!v.should_terminate){
newBoosts.set(k, v);
}
});
this.boosts = newBoosts;
}
}
Stat.PHYS = 0;
Stat.ELE = 1;
Stat.ARM = 2;
Stat.HP = 3;
class StatBoost extends Terminable{
constructor(id, amount, dur){
super(id, (stat)=>{
//not implemented yet
}, dur);
this.id = id;
this.amount = amount;
this.max_dur = dur;
this.dur_rem = dur;
this.should_terminate = false;
}
update(){
this.dur_rem -= 1;
if(this.dur_rem <= 0){
this.should_terminate = true;
}
}
}
export {
Stat,
StatBoost
};
|
def reverse_string(string):
rev_string = ''
for char in string:
rev_string = char + rev_string
return rev_string
# Time Complexity: O(N)
# Space Complexity: O(N) |
import memoize from 'memoize-one'
// create a set of handlers with a stable identity so as not to
// thwart SCU checks
export default function createEventHandler(getHandler) {
let getter = memoize(getHandler)
let handlers = {}
return events => {
const newHandlers = {}
if (events) {
for (let event of [].concat(events))
newHandlers[event] = handlers[event] || getter(event)
}
return (handlers = newHandlers)
}
}
|
<gh_stars>1-10
/*
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
package play.libs.ws;
import play.Application;
/**
* Asynchronous API to to query web services, as an http client.
*
* The value returned is a {@code Promise<Response>}, and you should use Play's asynchronous mechanisms to use this response.
*/
public class WS {
public static WSClient client() {
Application app = play.Play.application();
WSPlugin wsPlugin = app.plugin(WSPlugin.class);
if (wsPlugin.enabled() && wsPlugin.loaded()) {
return wsPlugin.api().client();
} else {
throw new IllegalStateException("WSPlugin is not loaded / enabled!");
}
}
/**
* Prepare a new request. You can then construct it by chaining calls.
*
* @param url the URL to request
*/
public static WSRequestHolder url(String url) {
return client().url(url);
}
}
|
#!/bin/sh
# Create /lib64 symlink if it doesn't exist
if [ ! -d "/lib64" ]; then
ln -s /lib /lib64
fi
# Remove all IOxOS cards and rescan the pci bus
# This is a workaround for hot plug
rescan=0
# Find all unique IOxOS cards by [vendorid:deviceid]
pcie_ioxos=$(dmesg | grep "\[7357\:1002\]" | sort -u)
# Remove all IOxOS cards from pci bus
while read -r line
do
pcie_addr=$(expr substr "$line" 5 12)
echo 1 > /sys/bus/pci/devices/$pcie_addr/remove
rescan=1
done <<< "$pcie_ioxos"
# Rescan pci bus to add the IOxOS cards again
if [ "$rescan" = "1" ]; then
echo 1 > /sys/bus/pci/rescan
fi
|
package com.trackorjargh.javarepository;
import java.util.List;
import javax.transaction.Transactional;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import com.trackorjargh.javaclass.Book;
import com.trackorjargh.javaclass.PointBook;
import com.trackorjargh.javaclass.User;
public interface PointBookRepository extends JpaRepository<PointBook, Long>{
PointBook findByUserAndBook(User user, Book book);
List<PointBook> findByBook(Book book);
@Modifying
@Transactional
@Query(value = "DELETE FROM POINT_BOOK WHERE USER_ID = ?1", nativeQuery = true)
void removePointsBooksByUserId(long id);
@Modifying
@Transactional
@Query(value = "DELETE FROM POINT_BOOK WHERE BOOK_ID = ?1", nativeQuery = true)
void removePointsBooksByBookId(long id);
PointBook findById(Long id);
}
|
export interface storeState {
navShow: boolean;
artSum: number;
name: string;
link: string;
email: string;
avatar: string;
mode: number;
}
|
def get_smallest_num(nums):
return min(nums)
nums = [5, 10, 15, 20]
print(get_smallest_num(nums)) |
#!/bin/bash
BUCKET=cloud-training-demos-ml # CHANGE
gsutil cp *.ipynb gs://$BUCKET/notebooks/jupyter
|
package io.opensphere.myplaces.importer;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Collection;
import java.util.List;
import org.apache.log4j.Logger;
import de.micromata.opengis.kml.v_2_2_0.Document;
import de.micromata.opengis.kml.v_2_2_0.Feature;
import de.micromata.opengis.kml.v_2_2_0.Folder;
import de.micromata.opengis.kml.v_2_2_0.Kml;
import de.micromata.opengis.kml.v_2_2_0.LineString;
import de.micromata.opengis.kml.v_2_2_0.Placemark;
import io.opensphere.core.Toolbox;
import io.opensphere.core.importer.ImportCallback;
import io.opensphere.core.util.collections.New;
import io.opensphere.kml.gx.Track;
import io.opensphere.kml.marshal.KmlMarshaller;
import io.opensphere.myplaces.export.ExporterUtilities;
import io.opensphere.myplaces.models.MyPlacesModel;
/**
* MyPlaces KML importer.
*/
public class MyPlacesKmlImporter extends AbstractMyPlacesImporter
{
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(MyPlacesKmlImporter.class);
/** The supported file extensions. */
private static final List<String> ourFileExtensions = New.unmodifiableList("kml");
/**
* Converts the KML into placemarks under a folder.
*
* @param kml the KML object
* @param folderName the name of the folder
* @return the folder
*/
private static Folder getFolderWithPlacemarks(Kml kml, String folderName)
{
Folder folder = null;
if (kml != null && kml.getFeature() instanceof Document)
{
Document importDoc = (Document)kml.getFeature();
folder = new Folder();
folder.setName(folderName);
folder.setVisibility(Boolean.TRUE);
replaceLinesWithTracks(importDoc.getFeature());
for (Feature feature : importDoc.getFeature())
{
folder.addToFeature(feature);
}
}
return folder;
}
/**
* Reads the file.
*
* @param file the file
* @return the KML object
*/
private static Kml readFile(File file)
{
Kml kml;
try
{
kml = KmlMarshaller.getInstance().unmarshal(file);
}
catch (FileNotFoundException e)
{
kml = null;
LOGGER.error(e, e);
}
return kml;
}
/**
* Replaces all lines with tracks. This is done because newer Google Earth
* does not show tracks without time.
*
* @param features the features
*/
private static void replaceLinesWithTracks(Collection<Feature> features)
{
Collection<Placemark> placemarks = New.list();
for (Feature feature : features)
{
ExporterUtilities.flattenToPlacemarks(feature, placemarks);
}
for (Placemark placemark : placemarks)
{
if (placemark.getGeometry() instanceof LineString)
{
LineString line = (LineString)placemark.getGeometry();
Track track = new Track();
track.getCoordinates().addAll(line.getCoordinates());
for (int i = 0, n = line.getCoordinates().size(); i < n; i++)
{
track.getWhen().add(null);
}
placemark.setGeometry(track);
}
}
}
/**
* Constructor.
*
* @param toolbox the toolbox
* @param model the model
*/
public MyPlacesKmlImporter(Toolbox toolbox, MyPlacesModel model)
{
super(toolbox, model);
}
@Override
public int getPrecedence()
{
return 100;
}
@Override
public List<String> getSupportedFileExtensions()
{
return ourFileExtensions;
}
@Override
public void importFile(File aFile, ImportCallback callback)
{
Kml kml = readFile(aFile);
Folder folder = getFolderWithPlacemarks(kml, aFile.getName());
addFolderOrFail(folder, aFile);
}
}
|
#!/bin/bash
# pip install
python3 -m pip install -r ./setup/requirements_colab.txt
# Visual Studio Code :: Package list
pkglist=(
ms-python.python
tabnine.tabnine-vscode
njpwerner.autodocstring
kevinrose.vsc-python-indent
ms-ceintl.vscode-language-pack-ja
sbsnippets.pytorch-snippets
mosapride.zenkaku
)
for i in ${pkglist[@]}; do
code --install-extension $i
done |
package dev.shirokuro.commandutility;
import dev.shirokuro.commandutility.platform.Platform;
import dev.shirokuro.commandutility.platform.PlatformCommandHandler;
import java.util.Collections;
import java.util.Map;
public final class TestPlatform implements Platform {
@Override
public void registerHandler(final String command, final PlatformCommandHandler handler) {
}
@Override
public Map<String, CommandCompleter> defaultCompleters() {
return Collections.emptyMap();
}
}
|
function generateCategoryList(categories) {
let html = '<ul class="categories_mega_menu">';
categories.forEach(category => {
html += `<li class="menu_item_children"><a href="#">${category.name}</a></li>`;
});
html += '</ul>';
return html;
} |
<reponame>steadylearner/code
import gzip
import shutil
def gzip_decompress(file_name: str):
with gzip.open(f'{file_name}', 'rb') as f_in:
with open(f'{file_name}'.replace(".gz", ''), 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
|
#!/system/bin/sh
#Copyright (c) 2015 Lenovo Co. Ltd
#Authors: yexh1@lenovo.com
umask 022
#yexh1 LOGFILE="/data/local/log/aplog/dmesglog"
if [ -z "$1" ]; then
LOGDIR=$(getprop persist.sys.lenovo.log.path)
else
LOGDIR=$1
fi
LOGFILE=$LOGDIR"/events" #yexh1
/system/bin/logcat -r8096 -b events -n 16 -v threadtime -f $LOGFILE
|
import { Space, Table } from 'antd';
import React from 'react';
import useTodoService, { TodoDataProps, TodoService } from './useTodoService';
import TableHandler from './TableRowHandler';
import TodoListInput from './TodoListInput';
import { Wrapper } from './StyledComponets/Wrapper';
const TABLE_COLUMN = [
{ title: '标题', dataIndex: 'title' },
{ title: '详情', dataIndex: 'description' },
{
title: '操作',
render: (res: TodoDataProps) => {
return <TableHandler title={res.title} />;
},
},
];
export default function TodoList() {
const todoService = useTodoService();
return (
<TodoService.Provider value={todoService}>
<Wrapper>
<Space direction='vertical' style={{ width: '100%' }}>
<TodoListInput />
<Table dataSource={todoService.todoList} columns={TABLE_COLUMN} rowKey='title' />
</Space>
</Wrapper>
</TodoService.Provider>
);
}
|
#!/usr/bin/env bash
#
# Copyright (C) 2011-2021 Intel Corporation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#
set -e
SCRIPT_DIR=$(dirname "$0")
ROOT_DIR="${SCRIPT_DIR}/../../../../"
LINUX_INSTALLER_DIR="${ROOT_DIR}/installer/linux"
LINUX_INSTALLER_COMMON_DIR="${LINUX_INSTALLER_DIR}/common"
LINUX_INSTALLER_COMMON_DCAP_PCCS_DIR="${LINUX_INSTALLER_COMMON_DIR}/sgx-dcap-pccs"
source ${LINUX_INSTALLER_COMMON_DCAP_PCCS_DIR}/installConfig
DEB_FOLDER=${DCAP_PCCS_PACKAGE_NAME}-${DCAP_PCCS_VERSION}
SGX_VERSION=$(awk '/STRFILEVER/ {print $3}' ${ROOT_DIR}/common/inc/internal/se_version.h|sed 's/^\"\(.*\)\"$/\1/')
DEB_BUILD_FOLDER=${DCAP_PCCS_PACKAGE_NAME}-${SGX_VERSION}
main() {
pre_build
create_upstream_tarball
unpack_upstream_tarball
generate_copyright
update_version
update_install_path
rename_tarball
build_deb_package
post_build
}
pre_build() {
rm -fR ${SCRIPT_DIR}/${DEB_BUILD_FOLDER}
cp -fR ${SCRIPT_DIR}/${DEB_FOLDER} ${SCRIPT_DIR}/${DEB_BUILD_FOLDER}
}
post_build() {
rm -fR ${SCRIPT_DIR}/${DEB_BUILD_FOLDER}
}
create_upstream_tarball() {
${LINUX_INSTALLER_COMMON_DCAP_PCCS_DIR}/createTarball.sh
cp ${LINUX_INSTALLER_COMMON_DCAP_PCCS_DIR}/output/${TARBALL_NAME} ${SCRIPT_DIR}
}
unpack_upstream_tarball() {
pushd ${SCRIPT_DIR}/${DEB_BUILD_FOLDER}
cp ../${TARBALL_NAME} .
tar xvf ${TARBALL_NAME}
rm -f ${TARBALL_NAME}
popd
}
generate_copyright() {
pushd ${SCRIPT_DIR}/${DEB_BUILD_FOLDER}
rm -f debian/copyright
find package/licenses/ -type f -print0 | xargs -0 -n1 cat >> debian/copyright
popd
}
get_os_code() {
OS_CODE=$(lsb_release -cs 2> /dev/null)
if [ -z ${OS_CODE} ]; then
OS_CODE=$(grep "VERSION_CODENAME" /etc/os-release 2> /dev/null | cut -d= -f2)
fi
echo ${OS_CODE}
}
update_version() {
pushd ${SCRIPT_DIR}/${DEB_BUILD_FOLDER}
INS_VERSION=$(echo $(dpkg-parsechangelog |grep "Version" | cut -d: -f2))
DEB_VERSION=$(echo ${INS_VERSION} | cut -d- -f2)
FULL_VERSION=${SGX_VERSION}-$(get_os_code)${DEB_VERSION}
sed -i "s#${INS_VERSION}#${FULL_VERSION}#" debian/changelog
popd
}
update_install_path() {
pushd ${SCRIPT_DIR}/${DEB_BUILD_FOLDER}
sed -i "s#@pkg_path@#${DCAP_PCCS_PACKAGE_PATH}/${DCAP_PCCS_PACKAGE_NAME}#" debian/conffiles
sed -i "s#@pkg_path@#${DCAP_PCCS_PACKAGE_PATH}/${DCAP_PCCS_PACKAGE_NAME}#" debian/postrm
popd
}
rename_tarball() {
TARBALL_NAME_NEW_VERSION=$(echo ${TARBALL_NAME} | sed "s/${DCAP_PCCS_VERSION}/${SGX_VERSION}/")
mv ${SCRIPT_DIR}/${TARBALL_NAME} ${SCRIPT_DIR}/${TARBALL_NAME_NEW_VERSION}
}
build_deb_package() {
pushd ${SCRIPT_DIR}/${DEB_BUILD_FOLDER}
SOURCE_DATE_EPOCH="$(date +%s)" dpkg-buildpackage -us -uc
popd
}
main $@
|
import React from "react";
import { Box } from "@material-ui/core";
import { FormattedMessage, useIntl } from "react-intl";
import ManagementSummaryCard from "../ManagementSummaryCard";
import { formatFloat, formatFloat2Dec } from "../../../../utils/format";
/**
* @typedef {import('../../../../services/tradeApiClient.types').DefaultProviderGetObject} DefaultProviderGetObject
* @typedef {import("../../../../services/tradeApiClient.types").SpotProviderBalanceEntity} SpotProviderBalanceEntity
* @typedef {Object} DefaultProps
* @property {DefaultProviderGetObject} provider Provider Object.
* @property {SpotProviderBalanceEntity} summary Provider Object.
*/
/**
* @param {DefaultProps} props Default props.
* @returns {JSX.Element} Component JSX.
*/
const ProfitSharingSummary = ({ provider, summary }) => {
const intl = useIntl();
return (
<Box
alignItems="center"
className="managementSummary"
display="flex"
flexDirection="row"
justifyContent="space-evenly"
>
<ManagementSummaryCard
foot=" "
icon="followers"
title={<FormattedMessage id="copyt.management.totalfollowers" />}
tooltip={intl.formatMessage({ id: "copyt.management.totalfollowers.tooltip" })}
value={provider.followers}
/>
<ManagementSummaryCard
foot=" "
icon="allocated"
quote={provider.copyTradingQuote}
title={<FormattedMessage id="copyt.management.wallet" />}
tooltip={intl.formatMessage({ id: "copyt.management.wallet.tooltip" })}
value={formatFloat2Dec(summary.totalWallet)}
/>
<ManagementSummaryCard
foot=" "
icon="profit"
quote={provider.copyTradingQuote}
title={<FormattedMessage id="copyt.management.profit" />}
value={`${formatFloat2Dec(summary.totalPnl)}`}
valueColor={summary.totalPnl > 0 ? "green" : summary.totalPnl < 0 ? "red" : ""}
/>
<ManagementSummaryCard
foot=" "
icon="balance"
quote={provider.copyTradingQuote}
title={<FormattedMessage id="copyt.management.invested" />}
tooltip={intl.formatMessage({ id: "copyt.management.invested.tooltip" })}
value={formatFloat2Dec(summary.totalInvested)}
/>
<ManagementSummaryCard
foot={`${provider.copyTradingQuote} ${formatFloat(
(summary.totalFree * summary.abstractPercentage) / 100,
)}`}
icon="balance"
quote="%"
title={<FormattedMessage id="copyt.management.available" />}
tooltip={intl.formatMessage({ id: "copyt.management.available.tooltip" })}
value={formatFloat2Dec(summary.abstractPercentage)}
/>
</Box>
);
};
export default ProfitSharingSummary;
|
import json
import logging
import os
from typing import List, Dict
from spotckup.decorators import timer
from spotckup.utils import save_image_from_url, do_request_validate_response, path_or_create
@timer
def get_list_from_paginated_response(url: str, token: str, verbose: bool = False) -> List[Dict]:
res: List = []
log: logging.Logger = logging.getLogger('')
while url is not None:
next_res = do_request_validate_response('GET', url, verbose=verbose, headers={
"Authorization": "Bearer " + token
}).json()
log.debug(next_res['next'])
res = res + next_res['items']
url = next_res['next']
return res
def backup_playlist(authorization_token, dir_path, debug, verbose):
logging.basicConfig(format='[%(levelname)s] %(message)s')
log: logging.Logger = logging.getLogger('')
if debug: log.setLevel('DEBUG')
path: str = path_or_create(dir_path)
token: str = authorization_token
if token is None:
with open(f'{path}/access_token', 'r') as f:
token = f.read()
user_id: str = do_request_validate_response('GET', 'https://api.spotify.com/v1/me',
verbose=verbose,
headers={
"Authorization": "Bearer " + token
}).json()['id']
playlists: [] = get_list_from_paginated_response(f'https://api.spotify.com/v1/users/{user_id}/playlists', token,
verbose=verbose)
print('Fetched {} playlists.'.format(str(len(playlists))))
os.makedirs(os.path.dirname(f"{path}/img/"), exist_ok=True)
for playlist_meta in playlists:
if playlist_meta['images']:
if not os.path.exists(f'{path}/img/{playlist_meta["id"]}.jpg'):
save_image_from_url(playlist_meta['images'][0]['url'], playlist_meta['id'], f'{path}/img')
try:
with open(f'{path}/playlists-metadata.json', 'r') as f:
cached: [] = [p['snapshot_id'] for p in json.load(f)]
except FileNotFoundError:
cached : [] = []
new_playlists: [] = list(filter(lambda p: p['snapshot_id'] not in cached, playlists))
print("Number of playlists that are already up to date and do not need to be re-downloaded: " +
str(len(playlists) - len(new_playlists)))
with open(f'{path}/playlists-metadata.json', 'w+') as f:
json.dump(new_playlists, f, indent=4)
log.info('Wrote {} playlists metadata in playlists-metadata.json'.format(str(len(new_playlists))))
with open(f'{path}/playlist.json', 'w') as f:
json.dump({
(p['id'] + '#' + p['snapshot_id']): get_list_from_paginated_response(
'https://api.spotify.com/v1/playlists/{}/tracks?fields=next,items(is_local,track(name,uri,album(name),artists(name),artist(name)))'
.format(p['id']), token, verbose=verbose)
for p in new_playlists
}, f, indent=4)
print('The playlists backup has completed succesfully.')
|
package main
import "C"
import (
"bufio"
"encoding/json"
"fmt"
"os"
"storyboard/backend/config"
"storyboard/backend/database"
"storyboard/backend/interfaces"
"storyboard/backend/noterepo"
"storyboard/backend/photorepo"
"storyboard/backend/server"
"storyboard/backend/slog"
"storyboard/backend/wrapper"
)
var inited bool = false
var c interfaces.ConfigService
var db interfaces.DatabaseService
var noteRepo interfaces.NoteRepo
var photoRepo interfaces.PhotoRepo
var ss interfaces.RESTService
//export Backend_Start
func Backend_Start(app *C.char) {
if inited {
slog.Printf("Already Started")
return
}
inited = true
slog.Println("Hello, Backend Server")
// config service
appStr := ""
if app != nil {
appStr = C.GoString(app)
}
slog.SetPath(appStr)
c = config.NewConfigService(appStr)
// database service
db = database.NewDatabaseService(c)
db.Init()
// note & photo repo
noteRepo = noterepo.NewNoteRepo(db)
photoRepo = photorepo.NewPhotoRepo(db)
// server
httpProxy := *wrapper.NewHTTPWrapper()
netProxy := *wrapper.NewNetWrapper()
ss = server.NewRESTServer(netProxy, httpProxy, c, noteRepo, photoRepo)
go ss.Start()
}
//export Backend_Stop
func Backend_Stop() {
slog.Println("Closing Backend Service")
// server
if ss != nil {
ss.Stop()
ss = nil
}
noteRepo = nil
photoRepo = nil
// database
if db != nil {
db.Close()
db = nil
}
c = nil
inited = false
slog.Println("Goodbye, Backend Server")
}
//export Backend_GetCurrentIP
func Backend_GetCurrentIP() *C.char {
var ip = ss.GetCurrentIP()
return C.CString(ip)
}
//export Backend_SetCurrentIP
func Backend_SetCurrentIP(ip *C.char) {
ss.SetCurrentIP(C.GoString(ip))
}
//export Backend_GetAvailableIPs
func Backend_GetAvailableIPs() *C.char {
var ipsMap = ss.GetServerIPs()
var ipsBytes, err = json.Marshal(ipsMap)
if err != nil {
return C.CString("{}")
}
var ipsStr = string(ipsBytes)
return C.CString(ipsStr)
}
func console() {
reader := bufio.NewReader(os.Stdin)
fmt.Println("Press 'q' and 'enter' to quit")
for {
input, _ := reader.ReadString('\n')
if input[0] == 'q' {
break
}
}
}
func getCurrentPwd() *C.char {
dir, err := os.Getwd()
if err != nil {
panic("can not find current pwd")
}
var c = C.CString(dir)
return c
}
func main() {
var c = getCurrentPwd()
Backend_Start(c)
console()
Backend_Stop()
}
|
<gh_stars>1-10
const url = require('url')
const Router = require('koa-router')
const ssr = require('../ssr')
const { addSlashes, stripBasename } = require('../utils/pathUtils')
const router = new Router()
// 从服务器渲染获取到的配置,basename 是 next.config.js 文件中配置的值。
const basename = ssr.renderOpts.runtimeConfig.basename || '/'
// 设置基准路由,下面所有的路由都有 basename 前缀。
router.prefix(addSlashes(basename))
router.get('/', async (ctx) => {
// 下一行第三个参数与 /client/pages/ 的目录结构保持一致,且以 /client/pages/ 为根目录。
await ssr.render(ctx.req, ctx.res, '/', ctx.query)
ctx.respond = false
})
// 匹配其他路由。重写 URL(去除 basename),保证服务器渲染处理器能够正确处理文件。
router.get('*', async (ctx) => {
const parsedUrl = url.parse(stripBasename(ctx.req.url, basename), true)
await ssr.getRequestHandler()(ctx.req, ctx.res, parsedUrl)
ctx.respond = false
})
module.exports = router
|
const express = require('express');
const mongoose = require('mongoose');
// Database
mongoose.connect(
'mongodb://localhost/movielibrary',
{ useNewUrlParser: true, useUnifiedTopology: true }
);
const Movie = mongoose.model('Movie', {
title: String,
year: Number,
synopsis: String
});
const app = express();
app.get('/movies', (req, res) => {
Movie.find({}, (err, docs) => {
res.json({ movies: docs });
});
});
app.get('/movies/search', (req, res) => {
Movie.find({ title: req.query.q }, (err, docs) => {
res.json({ movies: docs });
});
});
app.get('/movies/:id', (req, res) => {
Movie.findOne({ _id: req.params.id }, (err, doc) => {
res.json({ movie: doc });
});
});
app.listen(3000, () => {
console.log('Server running at http://localhost:3000');
}); |
package org.apache.doris.ldap;
import com.google.common.collect.Lists;
import mockit.Delegate;
import mockit.Expectations;
import mockit.Mocked;
import org.apache.doris.analysis.UserIdentity;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.cluster.ClusterNamespace;
import org.apache.doris.common.DdlException;
import org.apache.doris.mysql.privilege.PaloAuth;
import org.apache.doris.mysql.privilege.PaloRole;
import org.apache.doris.qe.ConnectContext;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
public class LdapAuthenticateTest {
private static final String DEFAULT_CLUSTER = "default_cluster";
private static final String USER_NAME = "user";
private static final String IP = "192.168.1.1";
private static final String TABLE_RD = "palo_rd";
private PaloRole ldapGroupsPrivs;
@Mocked
private LdapClient ldapClient;
@Mocked
private LdapPrivsChecker ldapPrivsChecker;
@Mocked
private Catalog catalog;
@Mocked
private PaloAuth auth;
@Before
public void setUp() throws DdlException {
new Expectations() {
{
auth.doesRoleExist(anyString);
minTimes = 0;
result = true;
auth.mergeRolesNoCheckName((List<String>) any, (PaloRole) any);
minTimes = 0;
result = new Delegate() {
void fakeMergeRolesNoCheckName(List<String> roles, PaloRole savedRole) {
ldapGroupsPrivs = savedRole;
}
};
catalog.getAuth();
minTimes = 0;
result = auth;
Catalog.getCurrentCatalog();
minTimes = 0;
result = catalog;
}
};
}
private void setCheckPassword(boolean res) {
new Expectations() {
{
LdapClient.checkPassword(anyString, anyString);
minTimes = 0;
result = res;
}
};
}
private void setCheckPasswordException() {
new Expectations() {
{
LdapClient.checkPassword(anyString, anyString);
minTimes = 0;
result = new RuntimeException("exception");
}
};
}
private void setGetGroups(boolean res) {
new Expectations() {
{
if (res) {
LdapClient.getGroups(anyString);
minTimes = 0;
result = new Delegate() {
List<String> fakeGetGroups(String user) {
List<String> list = new ArrayList<>();
list.add(TABLE_RD);
return list;
}
};
} else {
LdapClient.getGroups(anyString);
minTimes = 0;
result = Lists.newArrayList();
}
}
};
}
private void setGetGroupsException() {
new Expectations() {
{
LdapClient.getGroups(anyString);
minTimes = 0;
result = new RuntimeException("exception");
}
};
}
private void setGetCurrentUserIdentity(boolean res) {
new Expectations() {
{
if (res) {
auth.getCurrentUserIdentity((UserIdentity) any);
minTimes = 0;
result = new UserIdentity(ClusterNamespace.getFullName(DEFAULT_CLUSTER, USER_NAME), IP);
} else {
auth.getCurrentUserIdentity((UserIdentity) any);
minTimes = 0;
result = null;
}
}
};
}
private ConnectContext getContext() {
ConnectContext context = new ConnectContext(null);
context.setCatalog(catalog);
context.setThreadLocalInfo();
return context;
}
@Test
public void testAuthenticate() {
ConnectContext context = getContext();
setCheckPassword(true);
setGetGroups(true);
setGetCurrentUserIdentity(true);
String qualifiedUser = ClusterNamespace.getFullName(DEFAULT_CLUSTER, USER_NAME);
Assert.assertTrue(LdapAuthenticate.authenticate(context, "123", qualifiedUser));
Assert.assertFalse(context.getIsTempUser());
Assert.assertSame(ldapGroupsPrivs, context.getLdapGroupsPrivs());
}
@Test
public void testAuthenticateWithWrongPassword() {
ConnectContext context = getContext();
setCheckPassword(false);
setGetGroups(true);
setGetCurrentUserIdentity(true);
String qualifiedUser = ClusterNamespace.getFullName(DEFAULT_CLUSTER, USER_NAME);
Assert.assertFalse(LdapAuthenticate.authenticate(context, "123", qualifiedUser));
Assert.assertFalse(context.getIsTempUser());
Assert.assertNull(context.getLdapGroupsPrivs());
}
@Test
public void testAuthenticateWithCheckPasswordException() {
ConnectContext context = getContext();
setCheckPasswordException();
setGetGroups(true);
setGetCurrentUserIdentity(true);
String qualifiedUser = ClusterNamespace.getFullName(DEFAULT_CLUSTER, USER_NAME);
Assert.assertFalse(LdapAuthenticate.authenticate(context, "123", qualifiedUser));
Assert.assertFalse(context.getIsTempUser());
Assert.assertNull(context.getLdapGroupsPrivs());
}
@Test
public void testAuthenticateGetGroupsNull() {
ConnectContext context = getContext();
setCheckPassword(true);
setGetGroups(false);
setGetCurrentUserIdentity(true);
String qualifiedUser = ClusterNamespace.getFullName(DEFAULT_CLUSTER, USER_NAME);
Assert.assertTrue(LdapAuthenticate.authenticate(context, "123", qualifiedUser));
Assert.assertFalse(context.getIsTempUser());
Assert.assertNull(context.getLdapGroupsPrivs());
}
@Test
public void testAuthenticateGetGroupsException() {
ConnectContext context = getContext();
setCheckPassword(true);
setGetGroupsException();
setGetCurrentUserIdentity(true);
String qualifiedUser = ClusterNamespace.getFullName(DEFAULT_CLUSTER, USER_NAME);
Assert.assertFalse(LdapAuthenticate.authenticate(context, "123", qualifiedUser));
Assert.assertFalse(context.getIsTempUser());
Assert.assertNull(context.getLdapGroupsPrivs());
}
@Test
public void testAuthenticateUserNotExistInDoris() {
ConnectContext context = getContext();
setCheckPassword(true);
setGetGroups(true);
setGetCurrentUserIdentity(false);
String qualifiedUser = ClusterNamespace.getFullName(DEFAULT_CLUSTER, USER_NAME);
Assert.assertTrue(LdapAuthenticate.authenticate(context, "123", qualifiedUser));
Assert.assertTrue(context.getIsTempUser());
Assert.assertSame(ldapGroupsPrivs, context.getLdapGroupsPrivs());
}
}
|
import sys
def parse_command_line_arguments(args):
options = {
'-h': '--help',
'-v': '--version',
'-f': '--fail-fast',
'-t': '--tap',
'--verbose': None,
'--output-text': None,
'--output-html': None
}
parsed_options = {}
i = 1 # Start from index 1 to skip the script name
while i < len(args):
arg = args[i]
if arg in options:
if options[arg] is not None: # Option with associated value
if '=' in arg:
option, value = arg.split('=')
parsed_options[options[option]] = value
else:
parsed_options[options[arg]] = args[i + 1]
i += 1 # Skip the next argument as it's the value for the current option
else: # Option without associated value
parsed_options[options[arg]] = True
i += 1
return parsed_options
# Example usage
command_line_args = sys.argv
parsed_options = parse_command_line_arguments(command_line_args)
print(parsed_options) |
TERMUX_PKG_HOMEPAGE=https://github.com/adrianlopezroche/fdupes
TERMUX_PKG_DESCRIPTION="Duplicates file detector"
TERMUX_PKG_LICENSE="BSD"
TERMUX_PKG_VERSION=1.6.1
TERMUX_PKG_REVISION=1
TERMUX_PKG_SRCURL=https://github.com/adrianlopezroche/fdupes/archive/v${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=9d6b6fdb0b8419815b4df3bdfd0aebc135b8276c90bbbe78ebe6af0b88ba49ea
TERMUX_PKG_BUILD_IN_SRC=true
termux_step_configure() {
sed -i "s,PREFIX = /usr/local,PREFIX = ${TERMUX_PREFIX}," "$TERMUX_PKG_SRCDIR/Makefile"
}
|
<reponame>gwonsungjun/koa-api-boilerplate<gh_stars>1-10
import { Model, DataTypes } from 'sequelize'
export default sequelize => {
class Product extends Model {
static associate() {}
}
Product.init(
{
productId: { type: DataTypes.BIGINT(20), allowNull: false, primaryKey: true, autoIncrement: true },
productName: { type: DataTypes.STRING(255), allowNull: false },
description: { type: DataTypes.STRING(500), allowNull: false },
price: { type: DataTypes.DECIMAL(20, 2), allowNull: false },
isDisplay: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
deleted: { type: DataTypes.BOOLEAN, allowNull: false, defaultValue: false },
createdAt: { type: DataTypes.DATE, allowNull: false },
updatedAt: { type: DataTypes.DATE, allowNull: true },
deletedAt: { type: DataTypes.DATE, allowNull: true },
},
{
tableName: 'product',
modelName: 'product',
underscoredAll: true,
paranoid: true,
timestamp: true,
createdAt: 'createdAt',
updatedAt: 'updatedAt',
deletedAt: 'deletedAt',
sequelize,
},
)
return Product
}
|
from typing import List
def take_action(state: int, action: int) -> List[int]:
"""
Returns the next state and the reward based on the current state and action.
Args:
state (int): The current state, a number between 1 and 25.
action (int): The action to be taken, one of [-1, 1, 5, -5].
Returns:
List[int]: A list of two elements, where l[0] is the next state and l[1] is the reward.
"""
if state < 1 or state > 25:
return ['error', 'error']
next_state = state + action
if next_state < 1 or next_state > 25:
return ['error', 'error']
reward = 1 # Define the reward logic based on the state and action
return [next_state, reward] |
#!/bin/bash
lines=$(cat $1 | aspell -p ./misc/aspell_dict -x -d en_GB list)
if [[ -z "$lines" ]]; then
exit 0
fi
echo "$lines"
exit 1
|
def rock_paper_scissors(player1_choice, player2_choice):
valid_choices = ["rock", "paper", "scissors"]
if player1_choice not in valid_choices or player2_choice not in valid_choices:
return "Invalid input"
if player1_choice == player2_choice:
return "It's a tie"
elif (player1_choice == "rock" and player2_choice == "scissors") or \
(player1_choice == "scissors" and player2_choice == "paper") or \
(player1_choice == "paper" and player2_choice == "rock"):
return "Player 1 wins"
else:
return "Player 2 wins" |
#!/bin/bash
current_dir=${DOTFILES_CURRENT_SOURCE_DIR}
DOTNVIM="$HOME/.config/nvim"
# Create .config/nvim folder if needed
[ -d "$DOTNVIM" ] && mkdir -p $DOTNVIM
# Install nvim
dotfiles_install_package nvim
# Install config
dotfiles_install_component $current_dir $DOTNVIM
# Install vim plugins
nvim +PlugInstall
|
#!/bin/bash
# Script to run all VSA on all test c-programs.
# Please specify the path to llvm and clang in the environment variables
# VSA_CLANG_PATH and VSA_LLVM_PATH.
# if one argument passed: only analyze the passed program
if [ $# == 1 ] ; then
ARRAY=($1)
else # run all
ARRAY=($(ls -d *.c))
fi
# if no folder is existent
mkdir -p build
printf "Program, VSAbetter, LVIbetter, Equal\n"
# for all c-files...
for f in ${ARRAY[*]};
do
# ... print file name
printf "$f,"
# ... clean up for old run
rm -f build/$f.out
rm -f build/$f.bc
rm -f build/$f-opt.bc
# ... compile
clang -O0 -emit-llvm $f -Xclang -disable-O0-optnone -c -o build/$f.bc
#$VSA_CLANG_PATH/bin/clang -O0 -emit-llvm $f -Xclang -disable-O0-optnone -c -o build/$f.bc
# ... run mem2reg optimization
$VSA_LLVM_PATH/bin/opt -mem2reg < build/$f.bc > build/$f-opt.bc
# ... disassemble optimized file
$VSA_LLVM_PATH/bin/llvm-dis build/$f-opt.bc
# ... run VSA #MAC ONLY .dylib ->.so
$VSA_LLVM_PATH/bin/opt -load $VSA_LLVM_PATH/lib/llvm-vsa-benchmark.so -vsabenchmarkpass < build/$f-opt.bc > /dev/null
done
|
/*
* Copyright 2016 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.oakmountain.tpa.cli;
import ch.oakmountain.tpa.parser.TpaParser;
import ch.oakmountain.tpa.solver.*;
import ch.oakmountain.tpa.web.TpaWebPersistor;
import com.google.common.base.Stopwatch;
import gurobi.GRBException;
import org.apache.commons.cli.*;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.codehaus.plexus.util.dag.CycleDetectedException;
import java.io.*;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashSet;
import java.util.List;
import java.util.Properties;
import java.util.Set;
/**
*
*/
public class TpaCLI {
private static final Logger LOGGER = LogManager.getLogger();
/**
* Access to the tpa parser for integration tests.
*
* @param args
* @return
* @throws IOException
*/
protected static TpaCLIPropertiesCapsule capsule(String[] args) throws IOException {
TpaCLIPropertiesCapsule tpaCLIPropertiesCapsule = new TpaCLIPropertiesCapsule(args).setup();
return tpaCLIPropertiesCapsule;
}
/**
* The tpa CLI.
*
* @param args Expect one argument that capsuleSetupOnly the FILE to read.
* @throws java.io.IOException When there capsuleSetupOnly an error processing the FILE.
*/
public static void main(String[] args) throws CycleDetectedException, InfeasibleTPAException, IllegalAccessException, IOException, GRBException {
doMain(args);
}
public static TrainPathAllocations doMain(String[] args) throws IOException, CycleDetectedException, IllegalAccessException, InfeasibleTPAException, GRBException {
TpaCLIPropertiesCapsule tpaCLIPropertiesCapsule = new TpaCLIPropertiesCapsule(args).setup();
if (tpaCLIPropertiesCapsule.capsuleSetupOnly()) return null;
TpaParser tpaParser = tpaCLIPropertiesCapsule.getTpaParser();
String outputDir = tpaCLIPropertiesCapsule.getOutputDir();
boolean clean = tpaCLIPropertiesCapsule.isClean();
boolean ignoreinfeasibleapps = tpaCLIPropertiesCapsule.isIgnoreinfeasibleapps();
boolean skipweboutput = tpaCLIPropertiesCapsule.isSkipweboutput();
boolean pathbased = tpaCLIPropertiesCapsule.isPathbased();
String fileName = tpaCLIPropertiesCapsule.getFileName();
Periodicity requestFilterLower = tpaCLIPropertiesCapsule.getRequestFilterLower();
Periodicity requestFilterUpper = tpaCLIPropertiesCapsule.getRequestFilterUpper();
int globalHardMaximumEarlierDeparture = tpaCLIPropertiesCapsule.getGlobalHardMaximumEarlierDeparture();
int globalHardMinimumDwellTime = tpaCLIPropertiesCapsule.getGlobalHardMinimumDwellTime();
int globalHardMaximumLaterArrival = tpaCLIPropertiesCapsule.getGlobalHardMaximumLaterArrival();
try {
Stopwatch stopwatchParseModel = TPAUtil.startStopWatch();
// Parse macroscopic topology
MacroscopicTopology macroscopicTopology = tpaParser.readMacroscopicTopology();
if (!skipweboutput) {
TpaWebPersistor.createGraph("topology", outputDir, macroscopicTopology.toCSV(), "<h1>Macroscopic Topology</h1>");
}
// Parse train path catalogue
TrainPathSlotCatalogue trainPathSlotCatalogue = tpaParser.readTrainPathCatalogue(macroscopicTopology, clean, true); // TODO make cli option for correctTrainPathIds?
LOGGER.info("Parsed " + trainPathSlotCatalogue.getNbPeriodicalSlots() + " periodical slots and " + trainPathSlotCatalogue.getNbSlots() + " slots ...");
trainPathSlotCatalogue.logInfo();
// Parse train path applications
List<TrainPathApplication> periodicalTrainPathApplicationWithPeriodicities = tpaParser.readRequests(macroscopicTopology, requestFilterLower, requestFilterUpper, clean, true, globalHardMaximumEarlierDeparture, globalHardMinimumDwellTime, globalHardMaximumLaterArrival);
LOGGER.info("Including the following " + periodicalTrainPathApplicationWithPeriodicities.size() + " unallocated requests...");
for (TrainPathApplication r : periodicalTrainPathApplicationWithPeriodicities) {
LOGGER.info(" Including request " + r.getName() + " on " + r.getNbDays() + " days: " + r.getPeriodicity().getStringRepresentation());
}
Set<SimpleTrainPathApplication> simpleTrainPathApplications = new HashSet<>();
for (TrainPathApplication request : periodicalTrainPathApplicationWithPeriodicities) {
for (Integer day : request.getPeriodicity().getWeekDays()) {
SimpleTrainPathApplication r = request.getRequestOnWeekDay(day);
simpleTrainPathApplications.add(r);
}
}
TPAUtil.stopStopWatch(stopwatchParseModel, "PARSE MODEL");
TrainPathAllocationProblem tpa = new TrainPathAllocationProblem(macroscopicTopology, simpleTrainPathApplications, trainPathSlotCatalogue);
// Statistics?
if (!skipweboutput) {
Stopwatch stopwatchStats = TPAUtil.startStopWatch();
TrainPathAllocationProblemModel.statistics(tpa, outputDir);
TPAUtil.stopStopWatch(stopwatchStats, "COMPILING STATS");
}
TrainPathAllocationProblemModel tpaModel;
if (pathbased) {
tpaModel = new TrainPathAllocationProblemModelPathBased(tpa);
} else {
tpaModel = new TrainPathAllocationProblemModelArcNode(tpa);
}
TrainPathAllocations result = tpaModel.solve(outputDir, ignoreinfeasibleapps, true);
tpaParser.allocate(result.getAllocations());
// see the file and die
File file = new File(fileName);
String outFileName = outputDir + File.separator + file.getName();
tpaParser.writeFile(outFileName);
// TODO table with computation times?
return result;
} catch (InfeasibleTPAException e) {
LOGGER.error(e);
/*Map<String, GraphCSV> csvs = e.getCsvs();
if (csvs != null) {
for (String slotName : csvs.keySet()) {
TpaWebPersistor.createGraph(slotName, outputDir, csvs.get(slotName), "<h1>IIS Analysis for Slot" + slotName + "</h1>");
}
}*/
throw e;
}
}
private static void printHelp(Options options) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("tpa (train path allocation)", options);
}
private enum tpaOptions {
CLEAN("clean", "start with empty allocation"),
IGNOREINFEASIBLEAPPLICATIONS("ignoreinfeasibleapps", "ignore infeasible train path applications and try to allocation feasible applications"),
PATHBASED("pathbased", "use path-based model, default is arc-node model"),
SKIPWEBOUTPUT("skipweboutput", "do not create html pages (may save time)"),
CLEANOUTPUT("cleanoutput", "delete dthe output dir"),
HELP("help", "show this help message and terminate"),
CONFIGHELP("confighelp", "show file format configuration options and terminate"),
CONFIG("showconfig", "show effective file format configuration"),
FILE("file", true, "file", "input file", true, null),
PROPERTIES("properties", true, "file", "properties file (in ISO 8859-1 encoding)", true, null),
OUTPUT("output", true, "file", "output directory", true, "output"),
REQUESTFILTER("requestfilter", true, "pattern", "Pattern of the form 01??000", true, Periodicity.allPattern),
ENUM_MAX_ITERATION("max_iter", true, "nb", "maximum iterations", true, "5"),
GLOBALHARDMAXIMUMLATERARRIVAL("globalHardMaximumLaterArrival", true, "nb", "global hard maximum later arrival (minutes)", true, "0"),
GLOBALHARDMAXIMUMEARLIERDEPARTURE("globalHardMaximumEarlierDeparture", true, "nb", "global hard maximum earlier departure (minutes)", true, "0"),
GLOBALHARDMINIMUMDWELLTIME("globalHardMinimumDwellTime", true, "nb", "global hard minimum dwell time", true, "0");
private final String description;
private boolean hasArg = false;
private String opt;
private String argName;
private boolean optionalArg = true;
private String defaultValue = null;
tpaOptions(String opt, boolean hasArg, String argName, String description, boolean optionalArg, String defaultValue) {
this.opt = opt;
this.hasArg = hasArg;
this.argName = argName;
this.description = description;
this.optionalArg = optionalArg;
this.defaultValue = defaultValue;
}
tpaOptions(String opt, String description) {
this.opt = opt;
this.description = description;
}
public String getOpt() {
return opt;
}
public boolean isHasArg() {
return hasArg;
}
public String getDescription() {
if (!optionalArg) {
return description + " (mandatory)";
}
if (StringUtils.isNotBlank(defaultValue)) {
return description + "(default value: " + defaultValue + ")";
}
return description;
}
public String getArgName() {
return argName;
}
@Override
public String toString() {
return opt;
}
public boolean isOptional() {
return optionalArg;
}
public String getDefaultValue() {
return defaultValue;
}
}
static class TpaCLIPropertiesCapsule {
private boolean capsuleSetupOnly;
private String[] args;
private boolean clean;
private boolean skipweboutput;
private String fileName;
private String outputDir;
private Periodicity requestFilterLower;
private Periodicity requestFilterUpper;
private TpaParser tpaParser;
private int dagEnumeration_maxIter;
private int globalHardMaximumLaterArrival;
private int globalHardMaximumEarlierDeparture;
private int globalHardMinimumDwellTime;
private boolean ignoreinfeasibleapps;
private boolean pathbased;
public TpaCLIPropertiesCapsule(String... args) {
this.args = args;
}
public boolean isPathbased() {
return pathbased;
}
public boolean isIgnoreinfeasibleapps() {
return ignoreinfeasibleapps;
}
public boolean isSkipweboutput() {
return skipweboutput;
}
public int getDagEnumeration_maxIter() {
return dagEnumeration_maxIter;
}
boolean capsuleSetupOnly() {
return capsuleSetupOnly;
}
public boolean isClean() {
return clean;
}
public String getFileName() {
return fileName;
}
public String getOutputDir() {
return outputDir;
}
public Periodicity getRequestFilterLower() {
return requestFilterLower;
}
public Periodicity getRequestFilterUpper() {
return requestFilterUpper;
}
public TpaParser getTpaParser() {
return tpaParser;
}
public TpaCLIPropertiesCapsule setup() throws IOException {
Options options = new Options();
for (tpaOptions tpaOption : tpaOptions.values()) {
if (tpaOption.isHasArg()) {
Option option
= OptionBuilder.withArgName(tpaOption.getArgName())
.hasArg()
.withDescription(tpaOption.getDescription())
.isRequired(!tpaOption.isOptional())
.create(tpaOption.getOpt());
options.addOption(option);
} else {
options.addOption(tpaOption.getOpt(), tpaOption.getDescription());
}
}
CommandLineParser parser = new DefaultParser();
CommandLine commandLine;
try {
commandLine = parser.parse(options, args);
} catch (ParseException e) {
printHelp(options);
throw new RuntimeException("Error parsing arguments!", e);
}
if (commandLine.hasOption(tpaOptions.HELP.getOpt())) {
printHelp(options);
capsuleSetupOnly = true;
return this;
}
InputStream in;
// Load parser propertie
String appPropertiesFileName;
Properties applicationProps = new Properties();
if (commandLine.hasOption(tpaOptions.PROPERTIES.getOpt())) {
appPropertiesFileName = commandLine.getOptionValue(tpaOptions.PROPERTIES.getOpt());
} else {
printHelp(options);
throw new IllegalArgumentException("Properties file has not been specified.");
}
if (Files.exists(FileSystems.getDefault().getPath(appPropertiesFileName))) {
in = new FileInputStream(appPropertiesFileName);
applicationProps.load(in);
in.close();
} else {
throw new IllegalArgumentException("Cannot find FILE \"" + appPropertiesFileName + "\" passed to \"-" + tpaOptions.PROPERTIES.getOpt() + "\" or the default value \"" + tpaOptions.PROPERTIES.getDefaultValue() + "\"");
}
if (commandLine.hasOption(tpaOptions.CONFIGHELP.getOpt())) {
PrintWriter pw = new PrintWriter(System.out); // NOSONAR
for (TpaParser.tpaProps tpaProp : TpaParser.tpaProps.values()) {
pw.println(tpaProp.name() + "=" + applicationProps.getProperty(tpaProp.name()));
}
pw.flush();
pw.close();
capsuleSetupOnly = true;
return this;
}
clean = commandLine.hasOption(tpaOptions.CLEAN.getOpt());
ignoreinfeasibleapps = commandLine.hasOption(tpaOptions.IGNOREINFEASIBLEAPPLICATIONS.getOpt());
//
if (!commandLine.hasOption(tpaOptions.FILE.getOpt())) {
printHelp(options);
throw new IllegalArgumentException("Input file has not been specified.");
} else {
fileName = commandLine.getOptionValue(tpaOptions.FILE.getOpt());
}
if (commandLine.hasOption(tpaOptions.OUTPUT.getOpt())) {
outputDir = commandLine.getOptionValue(tpaOptions.OUTPUT.getOpt());
} else {
outputDir = tpaOptions.OUTPUT.getDefaultValue();
}
Path outputDirPath = Paths.get(outputDir);
if (!Files.exists(outputDirPath)) {
try {
Files.createDirectories(outputDirPath);
} catch (IOException e) {
LOGGER.error(e);
throw e;
}
}
if (commandLine.hasOption(tpaOptions.CLEANOUTPUT.getOpt())) {
FileUtils.deleteDirectory(new File(outputDir));
Files.createDirectory(outputDirPath);
}
skipweboutput = commandLine.hasOption(tpaOptions.SKIPWEBOUTPUT.getOpt());
pathbased = commandLine.hasOption(tpaOptions.PATHBASED.getOpt());
Pair<Periodicity, Periodicity> periodicityBounds;
if (commandLine.hasOption(tpaOptions.REQUESTFILTER.getOpt())) {
periodicityBounds = Periodicity.parsePeriodicityBounds(commandLine.getOptionValue(tpaOptions.REQUESTFILTER.getOpt()));
} else {
periodicityBounds = Periodicity.parsePeriodicityBounds(tpaOptions.REQUESTFILTER.getDefaultValue());
}
requestFilterLower = periodicityBounds.first;
requestFilterUpper = periodicityBounds.second;
if (commandLine.hasOption(tpaOptions.ENUM_MAX_ITERATION.getOpt())) {
dagEnumeration_maxIter = Integer.parseInt(commandLine.getOptionValue(tpaOptions.ENUM_MAX_ITERATION.getOpt()));
} else {
dagEnumeration_maxIter = Integer.parseInt(tpaOptions.ENUM_MAX_ITERATION.getDefaultValue());
}
if (commandLine.hasOption(tpaOptions.GLOBALHARDMAXIMUMEARLIERDEPARTURE.getOpt())) {
globalHardMaximumEarlierDeparture = Integer.parseInt(commandLine.getOptionValue(tpaOptions.GLOBALHARDMAXIMUMEARLIERDEPARTURE.getOpt()));
} else {
globalHardMaximumEarlierDeparture = Integer.parseInt(tpaOptions.GLOBALHARDMAXIMUMEARLIERDEPARTURE.getDefaultValue());
}
if (commandLine.hasOption(tpaOptions.GLOBALHARDMAXIMUMLATERARRIVAL.getOpt())) {
globalHardMaximumLaterArrival = Integer.parseInt(commandLine.getOptionValue(tpaOptions.GLOBALHARDMAXIMUMLATERARRIVAL.getOpt()));
} else {
globalHardMaximumLaterArrival = Integer.parseInt(tpaOptions.GLOBALHARDMAXIMUMLATERARRIVAL.getDefaultValue());
}
if (commandLine.hasOption(tpaOptions.GLOBALHARDMINIMUMDWELLTIME.getOpt())) {
globalHardMinimumDwellTime = Integer.parseInt(commandLine.getOptionValue(tpaOptions.GLOBALHARDMINIMUMDWELLTIME.getOpt()));
} else {
globalHardMinimumDwellTime = Integer.parseInt(tpaOptions.GLOBALHARDMINIMUMDWELLTIME.getDefaultValue());
}
tpaParser = new TpaParser(applicationProps, fileName);
capsuleSetupOnly = false;
return this;
}
public int getGlobalHardMaximumEarlierDeparture() {
return globalHardMaximumEarlierDeparture;
}
public int getGlobalHardMinimumDwellTime() {
return globalHardMinimumDwellTime;
}
public int getGlobalHardMaximumLaterArrival() {
return globalHardMaximumLaterArrival;
}
}
}
|
<reponame>Oddert/graphql-example-ref-netninja<gh_stars>0
const mongoose = require('mongoose')
const AuthorSchema = new mongoose.Schema ({
name: String,
age: Number
})
module.exports = mongoose.model('graphql-netninja-author', AuthorSchema)
|
<gh_stars>0
require 'rails_helper'
RSpec.describe AlbumsController, type: :controller do
describe "GET #index" do
it "responds successfully with an HTTP 200 status code" do
get :index
expect(response).to be_success
expect(response).to have_http_status(200)
end
end
describe "POST #create" do
# positive test - album params are valid
context "Valid album params" do
let(:album_params) do
{
album: {
title: 'a title',
label_code: 'b label',
format: 'c format',
released_year: '1985'
}
}
end
it "creates an Album record" do
post :create, album_params
expect(Album.count).to eq 1
end
it "redirects to the album show page" do
post :create, album_params
expect(subject).to redirect_to(album_path(assigns(:album)))
end
end
# negative test - album params are invalid
context "Invalid album params" do
let(:album_params) do
{
album: { # we know this is invalid because it's missing the :title key
label_code: 'b label',
format: 'c format',
released_year: '1985'
}
}
end
it "does not persist invalid records" do
post :create, album_params
expect(Album.count).to eq 0
end
it "renders the :new view (to allow users to fix invalid data)" do
post :create, album_params
expect(response).to render_template("new")
end
end
end
end
|
const express = require('express');
const app = express();
app.get('/books', (req, res) => {
res.render('books', {
books: [
{ title: 'The Catcher in the Rye', author: 'J. D. Salinger' },
{ title: 'To Kill a Mockingbird', author: 'Harper Lee' },
{ title: '1984', author: 'George Orwell' }
]
});
});
app.listen(3000); |
package com.lai.mtc.mvp.ui.cartoon.fragment;
import android.os.Bundle;
import com.lai.mtc.mvp.base.impl.BaseFragment;
/**
* @author Lai
* @time 2018/1/14 15:44
* @describe 主页动漫fragment
*/
public class CartoonMainFragment extends BaseFragment {
@Override
public int getLayoutResId() {
return 0;
}
@Override
public void finishCreateView(Bundle state) {
}
}
|
import Joi from 'joi';
// siren: Configuration for the watcher saved object.
const WatchConfiguration = {
type: 'sentinl-watcher',
title: 'watcher_title',
schema: Joi.object().keys({
title: Joi.string(),
username: Joi.string(),
input: Joi.any(),
actions: Joi.any(),
transform: Joi.any(),
condition: Joi.any(),
report: Joi.boolean(),
disable: Joi.boolean(),
save_payload: Joi.boolean(),
impersonate: Joi.boolean(),
spy: Joi.boolean(),
trigger: Joi.any(),
wizard: Joi.any(),
dashboard_link: Joi.string()
})
};
export default WatchConfiguration;
|
<reponame>tempora-mutantur/cobertura-plugin<gh_stars>10-100
package hudson.plugins.cobertura.targets;
import java.io.Serializable;
/**
* Describes how {@link CoveragePaint} can be aggregated up a {@link CoverageResult} tree.
*
* @author <NAME>
* @since 29-Aug-2007 18:13:22
*/
public class CoveragePaintRule implements Serializable {
private static final long serialVersionUID = 1039455096344866574L;
private final CoverageElement element;
private final CoverageAggregationMode mode;
public CoveragePaintRule(CoverageElement element, CoverageAggregationMode mode) {
this.element = element;
this.mode = mode;
}
private static final CoveragePaintRule[] INITIAL_RULESET = {
new CoveragePaintRule(CoverageElement.JAVA_METHOD, CoverageAggregationMode.NONE),
new CoveragePaintRule(CoverageElement.JAVA_CLASS, CoverageAggregationMode.SUM),};
public static CoveragePaint makePaint(CoverageElement element) {
for (CoveragePaintRule rule : INITIAL_RULESET) {
if (element == rule.element
|| (element == rule.element.getParent() && CoverageAggregationMode.NONE != rule.mode)) {
return new CoveragePaint(element);
}
}
return null;
}
public static boolean propagatePaintToParent(CoverageElement element) {
for (CoveragePaintRule rule : INITIAL_RULESET) {
if (element == rule.element) {
return CoverageAggregationMode.NONE != rule.mode;
}
}
return false;
}
}
|
<gh_stars>0
/*!
Simple unit testing for c/c++
Copyright 2012, <NAME>.
Licence: Apache 2.0
Purpose: facilitate the unit testing for programs written in c/c++
Use:
Define your tests as functions that don't take any arguments
but return "int". They should return 1 if successful otherwise, 0.
Then add them to a test suite. Finally,
if all you want to run are the tests inside the same .c/.cpp translation
unit add call to TINYTEST_MAIN_SINGLE_SUITE which takes as argument
the name of the test suite to run.
Otherwise, if you wish to gather test suites across translation units
you need to declare your test suites using TINYTEST_DECLARE_SUITE
and include the .h with the declarations inside a .c/.cpp file
which will call TINYTEST_START_MAIN/TINYTEST_END_MAIN in which
you will put all test suites.
Please see examples for more details.
Tests are meant to be in the same place as the tested code.
Declare TINYTEST_NOTEST if you don't want to compile the test code.
Usually you will declare TINYTEST_NOTEST together with NDEBUG.
*/
#ifndef __TINYTEST_H__
#define __TINYTEST_H__
#include <stdlib.h>
#include <stdio.h>
typedef int (*TinyTestFunc)(const char *);
typedef struct TinyTestStruct
{
TinyTestFunc m_func;
const char* m_name;
TinyTestFunc m_setup;
TinyTestFunc m_teardown;
struct TinyTestStruct* m_next;
} TinyTest;
typedef struct TinyTestSuiteStruct
{
struct TinyTestStruct* m_head;
const char* m_name;
struct TinyTestStruct* m_headTest;
struct TinyTestSuiteStruct* m_next;
} TinyTestSuite;
typedef struct TinyTestRegistryStruct
{
TinyTestSuite* m_headSuite;
} TinyTestRegistry;
#ifndef TINYTEST_NOTESTING
#define TINYTEST_FALSE_MSG(actual, msg) \
if ( (actual) ) \
{ \
fprintf( \
stderr, \
"\x1b[1m" \
"\x1b[31m" \
"ERROR: " \
"\x1b[0m" \
"%s:%d false, actual: %s\n", \
__FILE__, __LINE__, #actual); \
if ( msg ) printf(msg); \
return 0; \
}
#define TINYTEST_FALSE(actual) \
TINYTEST_FALSE_MSG(actual, NULL)
#define XCTAssertFalse(actual) \
TINYTEST_FALSE_MSG(actual, NULL)
#define TINYTEST_TRUE_MSG(actual, msg) \
if ( !(actual) ) \
{ \
fprintf( \
stderr, \
"\x1b[1m" \
"\x1b[31m" \
"ERROR: " \
"\x1b[0m" \
"%s:%d true, actual: %s\n", \
__FILE__, __LINE__, #actual \
); \
if ( msg ) printf(msg); \
return 0; \
}
#define TINYTEST_TRUE(actual) \
TINYTEST_TRUE_MSG(actual, NULL)
#define XCTAssertTrue(actual) \
TINYTEST_TRUE_MSG(actual, NULL)
#define TINYTEST_EQUAL_MSG(expected, actual, msg) \
if ( (expected) != (actual) ) \
{ \
fprintf( \
stderr, \
"\x1b[1m" \
"\x1b[31m" \
"ERROR: " \
"\x1b[0m" \
"%s:%d expected %s, actual: %s\n", \
__FILE__, __LINE__, #expected, #actual); \
if ( msg ) printf(msg); \
return 0; \
}
#define TINYTEST_EQUAL(expected, actual) \
TINYTEST_EQUAL_MSG(expected, actual, NULL)
#define TINYTEST_STR_EQUAL_MSG(expected, actual, msg) \
if ( strcmp((expected), (actual)) ) \
{ \
fprintf( \
stderr, \
"\x1b[1m" \
"\x1b[31m" \
"ERROR: " \
"\x1b[0m" \
"%s:%d expected \"%s\", actual: \"%s\"\n", \
__FILE__, __LINE__, expected, actual); \
if ( msg ) printf(msg); \
return 0; \
}
#define TINYTEST_STR_EQUAL(expected, actual) \
TINYTEST_STR_EQUAL_MSG(expected, actual, NULL)
#define TINYTEST_ASSERT_MSG(assertion, msg) \
if ( !(assertion) ) \
{ \
fprintf( \
stderr, \
"\x1b[1m" \
"\x1b[31m" \
"ERROR: " \
"\x1b[0m" \
"%s:%d assertion failed: \"%s\"\n", \
__FILE__, __LINE__, #assertion); \
if ( msg ) printf(msg); \
return 0; \
}
#define TINYTEST_ASSERT(assertion) \
TINYTEST_ASSERT_MSG(assertion, NULL)
#define TINYTEST_DECLARE_SUITE(suiteName) \
void Suite##suiteName(TinyTestRegistry* registry)
#define TINYTEST_START_SUITE(suiteName) \
void Suite##suiteName(TinyTestRegistry* registry) \
{ \
TinyTestSuite* suite = (TinyTestSuite*)malloc(sizeof(TinyTestSuite)); \
suite->m_name = #suiteName; \
suite->m_headTest = NULL; \
suite->m_next = NULL
#define TINYTEST_ADD_TEST(test,setup,teardown) \
TinyTest* test##decl = (TinyTest*)malloc(sizeof(TinyTest)); \
test##decl->m_func = test; \
test##decl->m_name = #test; \
test##decl->m_setup = setup; \
test##decl->m_teardown = teardown; \
test##decl->m_next = suite->m_headTest; \
suite->m_headTest = test##decl
#define TINYTEST_END_SUITE() \
suite->m_next = registry->m_headSuite; \
registry->m_headSuite = suite; \
}
#define TINYTEST_START_MAIN() \
int main(int argc, char* argv[]) \
{ \
TinyTestRegistry registry; \
registry.m_headSuite = NULL
#define TINYTEST_RUN_SUITE(suiteName) \
Suite##suiteName(®istry)
#define TINYTEST_INTERNAL_FREE_TESTS() \
{ \
TinyTestSuite* s = registry.m_headSuite; \
TinyTestSuite* ss = NULL; \
for ( ; s; s = ss ) \
{ \
ss = s->m_next; \
TinyTest* t = s->m_headTest; \
TinyTest* tt = NULL; \
for ( ; t; t = tt ) \
{ \
tt = t->m_next; \
free(t); \
} \
free(s); \
} \
}
#define TINYTEST_INTERNAL_RUN_TESTS() \
{ \
int iRc = 0; \
int okTests = 0; \
int failedTests = 0; \
TinyTestSuite* s = registry.m_headSuite; \
for ( ; s; s = s->m_next ) \
{ \
TinyTest* t = s->m_headTest; \
for ( ; t; t = t->m_next ) \
{ \
if ( t->m_setup ) \
{ \
(*t->m_setup)(t->m_name); \
} \
iRc = (*t->m_func)(t->m_name); \
if ( t->m_teardown ) \
{ \
(*t->m_teardown)(t->m_name); \
} \
if ( iRc ) \
{ \
printf("."); \
++okTests; \
} \
else \
{ \
printf("x"); \
++failedTests; \
} \
} \
} \
printf("\nOK: %d", okTests); \
if ( failedTests ) \
{ \
printf( \
"\x1b[1m" \
"\x1b[31m" \
" FAILED: %d" \
"\x1b[0m", \
failedTests \
); \
} \
printf("\n"); \
}
#define TINYTEST_END_MAIN() \
TINYTEST_INTERNAL_RUN_TESTS(); \
printf("\n"); \
TINYTEST_INTERNAL_FREE_TESTS() \
}
#define TINYTEST_MAIN_SINGLE_SUITE(suiteName) \
TINYTEST_START_MAIN(); \
TINYTEST_RUN_SUITE(suiteName); \
TINYTEST_END_MAIN();
#else /* TINYTEST_NOTESTING */
#define TINYTEST_FALSE_MSG(actual, msg) (void)0
#define TINYTEST_FALSE(actual) (void)0
#define TINYTEST_TRUE_MSG(actual, msg) (void)0
#define TINYTEST_TRUE(actual) (void)0
#define TINYTEST_EQUAL_MSG(expected, actual, msg) (void)0
#define TINYTEST_EQUAL(expected, actual) (void)0
#define TINYTEST_STR_EQUAL_MSG(expected, actual, msg) (void)0
#define TINYTEST_STR_EQUAL(expected, actual) (void)0
#define TINYTEST_ASSERT_MSG(assertion, msg) (void)0
#define TINYTEST_ASSERT(assertion) (void)0
#define TINYTEST_START_SUITE(suiteName)
#define TINYTEST_ADD_TEST(test)
#define TINYTEST_END_SUITE()
#define TINYTEST_START_MAIN()
#define TINYTEST_RUN_SUITE(suiteName)
#define TINYTEST_INTERNAL_FREE_TESTS()
#define TINYTEST_INTERNAL_RUN_TESTS()
#define TINYTEST_END_MAIN()
#define TINYTEST_MAIN_SINGLE_SUITE(suiteName)
#endif /* TINYTEST_NOTESTING */
#endif
|
<reponame>gisikw/hashtag
require 'test_helper'
class InterpreterTest < Minitest::Test
def assert_evaluates_to(code, result)
assert_equal result,
Hashtag::Interpreter.new.eval(Hashtag::Parser.new.parse(Hashtag::Lexer.new(code).tokens))
end
def test_basic_identity
assert_evaluates_to "3\n", 3
end
def test_various_expresions
assert_evaluates_to "3 + 4\n", 7
assert_evaluates_to "3 * 4\n", 12
assert_evaluates_to "3 * 4 + 5\n", 17
assert_evaluates_to "3 * (4 + 5)\n", 27
end
end
|
/**
Copyright 2013 <NAME> project Ardulink http://www.ardulink.org/
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@author <NAME>
*/
package org.zu.ardulink.protocol;
import static java.util.Collections.synchronizedMap;
import java.util.HashMap;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.zu.ardulink.Link;
import org.zu.ardulink.event.AnalogReadChangeEvent;
import org.zu.ardulink.event.DigitalReadChangeEvent;
import org.zu.ardulink.event.IncomingMessageEvent;
/**
* [ardulinktitle] [ardulinkversion]
* This class implements the native Arduino Link protocol.<br/>
* With this class you are able to send messages to Arduino.<br/>
* <br/>
* Messages are in the format:<br/>
* <br/>
* alp://<request or response>/<variable data>?id=<numeric message id><br/>
* <br/>
* where<br/>
* requests from ardulink to arduino are: <br/>
* kprs - Key Pressed<br/>
* ppin - Power Pin Intensity<br/>
* ppsw - Power Pin Switch<br/>
* tone - Tone square wave start<br/>
* notn - Tone square wave stop<br/>
* srld - Start Listening Digital Pin<br/>
* spld - Stop Listening Digital Pin<br/>
* srla - Start Listening Analog Pin<br/>
* spla - Stop Listening Analog Pin<br/>
* cust - Custom message
* <br/>
* requests from arduino to ardulink are:<br/>
* ared - Analog Pin Read<br/>
* dred - Digital Pin Read<br/>
* <br/>
* responses (only from arduino) are:<br/>
* rply - reply message<br/>
* <br/>
* ?id=<numeric message id> is not mandatory (for requests). If is supplied then a asynchronous<br/>
* rply response will send from arduino. Otherwise arduino will not send a response.<br/>
* <br/>
* Each message from ardulink to arduino terminate with a \n<br/>
* <br/>
* See methods about variable data.<br/>
* <br/>
* Variable data:<br/>
* alp://kprs/chr<char pressed>cod<key code>loc<key location>mod<key modifiers>mex<key modifiers>?id=<message id><br/>
* alp://ppin/<pin>/<intensity>?id=<message id> intensity:0-255<br/>
* alp://ppsw/<pin>/<power>?id=<message id> power:0-1<br/>
* alp://srld/<pin>?id=<message id><br/>
* alp://spld/<pin>?id=<message id><br/>
* alp://srla/<pin>?id=<message id><br/>
* alp://spla/<pin>?id=<message id><br/>
* alp://tone/<pin>/<frequency>?id=<message id><br/>
* alp://tone/<pin>/<frequency>/<duration>?id=<message id><br/>
* alp://notn/<pin>?id=<message id><br/>
* alp://cust/<a custom message>?id=<message id><br/>
* alp://ared/<pin>/<intensity> intensity:0-1023<br/>
* alp://dred/<pin>/<power> power:0-1<br/>
* alp://rply/ok|ko?id=<message id><br/>
* <br/>
* @author <NAME>
*
* [adsense]
*/
public class ALProtocol implements IProtocol {
public static final String NAME = "ArdulinkProtocol";
private static final String OUTGOING_MESSAGE_DIVIDER = "\n";
private static final Logger logger = LoggerFactory.getLogger(ALProtocol.class);
private static long nextId = 1;
private final Map<Long, MessageInfo> messageInfos = synchronizedMap(new HashMap<Long, MessageInfo>());
public static final int NO_DURATION = -1;
@Override
public String getProtocolName() {
return NAME;
}
@Override
public MessageInfo sendKeyPressEvent(Link link, char keychar, int keycode, int keylocation, int keymodifiers, int keymodifiersex) {
return sendKeyPressEvent(link, keychar, keycode, keylocation, keymodifiers, keymodifiersex, null);
}
@Override
public MessageInfo sendPowerPinIntensity(Link link, int pin, int intensity) {
return sendPowerPinIntensity(link, pin, intensity, null);
}
@Override
public MessageInfo sendPowerPinSwitch(Link link, int pin, int power) {
return sendPowerPinSwitch(link, pin, power, null);
}
@Override
public MessageInfo sendToneMessage(Link link, int pin, int frequency) {
return sendToneMessage(link, pin, frequency, null);
}
@Override
public MessageInfo sendToneMessage(Link link, int pin, int frequency, int duration) {
return sendToneMessage(link, pin, frequency, duration, null);
}
@Override
public MessageInfo sendNoToneMessage(Link link, int pin) {
return sendNoToneMessage(link, pin, null);
}
@Override
public MessageInfo sendCustomMessage(Link link, String message) {
return sendCustomMessage(link, message, null);
}
@Override
public MessageInfo sendKeyPressEvent(Link link, char keychar, int keycode, int keylocation, int keymodifiers, int keymodifiersex, ReplyMessageCallback callback) {
MessageInfo retvalue = new MessageInfo();
synchronized(this) {
if(link.isConnected()) {
long currentId = nextId++;
retvalue.setMessageID(currentId);
if(callback != null) {
retvalue.setCallback(callback);
messageInfos.put(currentId, retvalue);
}
StringBuilder builder = new StringBuilder("alp://kprs/chr");
builder.append(keychar);
builder.append("cod");
builder.append(keycode);
builder.append("loc");
builder.append(keylocation);
builder.append("mod");
builder.append(keymodifiers);
builder.append("mex");
builder.append(keymodifiersex);
if(callback != null) {
builder.append("?id=");
builder.append(currentId);
}
builder.append(OUTGOING_MESSAGE_DIVIDER);
String mesg = builder.toString();
logger.debug(mesg);
boolean result = link.writeSerial(mesg);
retvalue.setSent(result);
retvalue.setMessageSent(mesg);
if(!result) {
messageInfos.remove(currentId);
}
}
}
return retvalue;
}
@Override
public MessageInfo sendPowerPinIntensity(Link link, int pin, int intensity, ReplyMessageCallback callback) {
MessageInfo retvalue = new MessageInfo();
synchronized(this) {
if(link.isConnected()) {
long currentId = nextId++;
retvalue.setMessageID(currentId);
if(callback != null) {
retvalue.setCallback(callback);
messageInfos.put(currentId, retvalue);
}
StringBuilder builder = new StringBuilder("alp://ppin/");
builder.append(pin);
builder.append("/");
builder.append(intensity);
if(callback != null) {
builder.append("?id=");
builder.append(currentId);
}
builder.append(OUTGOING_MESSAGE_DIVIDER);
String mesg = builder.toString();
logger.debug(mesg);
boolean result = link.writeSerial(mesg);
retvalue.setSent(result);
retvalue.setMessageSent(mesg);
if(!result) {
messageInfos.remove(currentId);
}
}
}
return retvalue;
}
@Override
public MessageInfo sendPowerPinSwitch(Link link, int pin, int power, ReplyMessageCallback callback) {
MessageInfo retvalue = new MessageInfo();
synchronized(this) {
if(link.isConnected() && (power == POWER_HIGH || power == POWER_LOW)) {
long currentId = nextId++;
retvalue.setMessageID(currentId);
if(callback != null) {
retvalue.setCallback(callback);
messageInfos.put(currentId, retvalue);
}
StringBuilder builder = new StringBuilder("alp://ppsw/");
builder.append(pin);
builder.append("/");
builder.append(power);
if(callback != null) {
builder.append("?id=");
builder.append(currentId);
}
builder.append(OUTGOING_MESSAGE_DIVIDER);
String mesg = builder.toString();
logger.debug(mesg);
boolean result = link.writeSerial(mesg);
retvalue.setSent(result);
retvalue.setMessageSent(mesg);
if(!result) {
messageInfos.remove(currentId);
}
}
}
return retvalue;
}
@Override
public MessageInfo sendToneMessage(Link link, int pin, int frequency, ReplyMessageCallback callback) {
return sendToneMessage(link, pin, frequency, NO_DURATION, callback);
}
@Override
public MessageInfo sendToneMessage(Link link, int pin, int frequency, int duration, ReplyMessageCallback callback) {
MessageInfo retvalue = new MessageInfo();
synchronized(this) {
if(link.isConnected()) {
long currentId = nextId++;
retvalue.setMessageID(currentId);
if(callback != null) {
retvalue.setCallback(callback);
messageInfos.put(currentId, retvalue);
}
StringBuilder builder = new StringBuilder("alp://tone/");
builder.append(pin);
builder.append("/");
builder.append(frequency);
builder.append("/");
builder.append(duration);
if(callback != null) {
builder.append("?id=");
builder.append(currentId);
}
builder.append(OUTGOING_MESSAGE_DIVIDER);
String mesg = builder.toString();
logger.debug(mesg);
boolean result = link.writeSerial(mesg);
retvalue.setSent(result);
retvalue.setMessageSent(mesg);
if(!result) {
messageInfos.remove(currentId);
}
}
}
return retvalue;
}
@Override
public MessageInfo sendNoToneMessage(Link link, int pin, ReplyMessageCallback callback) {
MessageInfo retvalue = new MessageInfo();
synchronized(this) {
if(link.isConnected()) {
long currentId = nextId++;
retvalue.setMessageID(currentId);
if(callback != null) {
retvalue.setCallback(callback);
messageInfos.put(currentId, retvalue);
}
StringBuilder builder = new StringBuilder("alp://notn/");
builder.append(pin);
if(callback != null) {
builder.append("?id=");
builder.append(currentId);
}
builder.append(OUTGOING_MESSAGE_DIVIDER);
String mesg = builder.toString();
logger.debug(mesg);
boolean result = link.writeSerial(mesg);
retvalue.setSent(result);
retvalue.setMessageSent(mesg);
if(!result) {
messageInfos.remove(currentId);
}
}
}
return retvalue;
}
@Override
public MessageInfo sendCustomMessage(Link link, String message, ReplyMessageCallback callback) {
MessageInfo retvalue = new MessageInfo();
synchronized(this) {
if(link.isConnected()) {
long currentId = nextId++;
retvalue.setMessageID(currentId);
if(callback != null) {
retvalue.setCallback(callback);
messageInfos.put(currentId, retvalue);
}
StringBuilder builder = new StringBuilder("alp://cust/");
builder.append(message);
if(callback != null) {
builder.append("?id=");
builder.append(currentId);
}
builder.append(OUTGOING_MESSAGE_DIVIDER);
String mesg = builder.toString();
logger.debug(mesg);
boolean result = link.writeSerial(mesg);
retvalue.setSent(result);
retvalue.setMessageSent(mesg);
if(!result) {
messageInfos.remove(currentId);
}
}
}
return retvalue;
}
@Override
public MessageInfo startListenDigitalPin(Link link, int pin) {
return startListenDigitalPin(link, pin, null);
}
@Override
public MessageInfo stopListenDigitalPin(Link link, int pin) {
return stopListenDigitalPin(link, pin, null);
}
@Override
public MessageInfo startListenAnalogPin(Link link, int pin) {
return startListenAnalogPin(link, pin, null);
}
@Override
public MessageInfo stopListenAnalogPin(Link link, int pin) {
return stopListenAnalogPin(link, pin, null);
}
@Override
public MessageInfo startListenDigitalPin(Link link, int pin, ReplyMessageCallback callback) {
MessageInfo retvalue = new MessageInfo();
synchronized(this) {
if(link.isConnected()) {
long currentId = nextId++;
retvalue.setMessageID(currentId);
if(callback != null) {
retvalue.setCallback(callback);
messageInfos.put(currentId, retvalue);
}
StringBuilder builder = new StringBuilder("alp://srld/");
builder.append(pin);
if(callback != null) {
builder.append("?id=");
builder.append(currentId);
}
builder.append(OUTGOING_MESSAGE_DIVIDER);
String mesg = builder.toString();
logger.debug(mesg);
boolean result = link.writeSerial(mesg);
retvalue.setSent(result);
retvalue.setMessageSent(mesg);
if(!result) {
messageInfos.remove(currentId);
}
}
}
return retvalue;
}
@Override
public MessageInfo stopListenDigitalPin(Link link, int pin, ReplyMessageCallback callback) {
MessageInfo retvalue = new MessageInfo();
synchronized(this) {
if(link.isConnected()) {
long currentId = nextId++;
retvalue.setMessageID(currentId);
if(callback != null) {
retvalue.setCallback(callback);
messageInfos.put(currentId, retvalue);
}
StringBuilder builder = new StringBuilder("alp://spld/");
builder.append(pin);
if(callback != null) {
builder.append("?id=");
builder.append(currentId);
}
builder.append(OUTGOING_MESSAGE_DIVIDER);
String mesg = builder.toString();
logger.debug(mesg);
boolean result = link.writeSerial(mesg);
retvalue.setSent(result);
retvalue.setMessageSent(mesg);
if(!result) {
messageInfos.remove(currentId);
}
}
}
return retvalue;
}
@Override
public MessageInfo startListenAnalogPin(Link link, int pin, ReplyMessageCallback callback) {
MessageInfo retvalue = new MessageInfo();
synchronized(this) {
if(link.isConnected()) {
long currentId = nextId++;
retvalue.setMessageID(currentId);
if(callback != null) {
retvalue.setCallback(callback);
messageInfos.put(currentId, retvalue);
}
StringBuilder builder = new StringBuilder("alp://srla/");
builder.append(pin);
if(callback != null) {
builder.append("?id=");
builder.append(currentId);
}
builder.append(OUTGOING_MESSAGE_DIVIDER);
String mesg = builder.toString();
logger.debug(mesg);
boolean result = link.writeSerial(mesg);
retvalue.setSent(result);
retvalue.setMessageSent(mesg);
if(!result) {
messageInfos.remove(currentId);
}
}
}
return retvalue;
}
@Override
public MessageInfo stopListenAnalogPin(Link link, int pin, ReplyMessageCallback callback) {
MessageInfo retvalue = new MessageInfo();
synchronized(this) {
if(link.isConnected()) {
long currentId = nextId++;
retvalue.setMessageID(currentId);
if(callback != null) {
retvalue.setCallback(callback);
messageInfos.put(currentId, retvalue);
}
StringBuilder builder = new StringBuilder("alp://spla/");
builder.append(pin);
if(callback != null) {
builder.append("?id=");
builder.append(currentId);
}
builder.append(OUTGOING_MESSAGE_DIVIDER);
String mesg = builder.toString();
logger.debug(mesg);
boolean result = link.writeSerial(mesg);
retvalue.setSent(result);
retvalue.setMessageSent(mesg);
if(!result) {
messageInfos.remove(currentId);
}
}
}
return retvalue;
}
@Override
public IncomingMessageEvent parseMessage(int[] message) {
IncomingMessageEvent retvalue = null;
try {
String msg = new String(message, 0, message.length).trim();
if(msg.startsWith("alp://")) { // OK is a message I know
String cmd = msg.substring(6,10);
if("rply".equals(cmd)) { // alp://rply/ok?id<messageid> alp://rply/ko?id<messageid>
parseReplyMessage(msg);
} else if("dred".equals(cmd)) { // alp://dred/<pin>/<value>
retvalue = parseDigitalReadMessage(msg);
} else if("ared".equals(cmd)) { // alp://ared/<pin>/<value>
retvalue = parseAnalogReadMessage(msg);
} else { // Message I don't recognize its very strange!
logger.error(
"Arduino sent to me a message in ALProtocol that I don't recognize. Msg: {}",
msg);
}
}
}
catch(Exception e) {
logger.error("Errror parsing message sent from Arduino.", e);
e.printStackTrace();
}
return retvalue;
}
private IncomingMessageEvent parseDigitalReadMessage(String msg) {
int separatorPosition = msg.indexOf('/', 11 );
String pin = msg.substring(11,separatorPosition);
String value = msg.substring(separatorPosition + 1);
DigitalReadChangeEvent e = new DigitalReadChangeEvent(Integer.parseInt(pin), Integer.parseInt(value), msg);
return e;
}
private IncomingMessageEvent parseAnalogReadMessage(String msg) {
int separatorPosition = msg.indexOf('/', 11 );
String pin = msg.substring(11,separatorPosition);
String value = msg.substring(separatorPosition + 1);
AnalogReadChangeEvent e = new AnalogReadChangeEvent(Integer.parseInt(pin), Integer.parseInt(value), msg);
return e;
}
private void parseReplyMessage(String msg) {
String result = msg.substring(11,13);
int idIndex = msg.indexOf("?id=");
String tmpId = msg.substring(idIndex + 4).trim();
Long id = Long.parseLong(tmpId);
MessageInfo messageInfo = messageInfos.get(id);
if(messageInfo != null) {
if("ok".equals(result)) {
messageInfo.setReply(REPLY_OK);
} else if("ko".equals(result)) {
messageInfo.setReply(REPLY_KO);
}
messageInfo.setMessageReceived(msg);
messageInfos.remove(id);
messageInfo.getCallback().replyInfo(messageInfo); // Callback!
}
}
@Override
public ProtocolType getProtocolType() {
return ProtocolType.TEXT;
}
@Override
public int getIncomingMessageDivider() {
return 255;
}
@Override
public int getOutgoingMessageDivider() {
return ALProtocol.OUTGOING_MESSAGE_DIVIDER.charAt(0);
}
}
|
<gh_stars>1-10
package diplomat
import (
"github.com/tony84727/diplomat/pkg/data"
"github.com/tony84727/diplomat/pkg/emit"
"github.com/tony84727/diplomat/pkg/log"
"github.com/tony84727/diplomat/pkg/selector"
"strings"
"sync"
)
type Synthesizer struct {
data.Translation
output Output
emitterRegistry emit.Registry
logger log.Logger
}
func NewSynthesizer(output Output, translation data.Translation, emitterRegistry emit.Registry, logger log.Logger) *Synthesizer {
return &Synthesizer{translation, output, emitterRegistry, log.MaybeLogger(logger)}
}
func (s Synthesizer) Output(output data.Output) error {
selectors := output.GetSelectors()
selectorInstance := make([]selector.Selector, len(selectors))
for i, s := range selectors {
selectorInstance[i] = selector.NewPrefixSelector(strings.Split(string(s), ".")...)
}
selected := data.NewSelectedTranslation(s, selector.NewCombinedSelector(selectorInstance...))
templates := output.GetTemplates()
errChan := make(chan error)
var wg sync.WaitGroup
for _, t := range templates {
if i := s.emitterRegistry.Get(t.GetType()); i != nil {
wg.Add(1)
go func(t data.Template) {
defer wg.Done()
s.logger.Info("[Emitting] %s [%s]", t.GetOptions().GetFilename(), t.GetType())
output, err := i.Emit(selected, t.GetOptions())
if err != nil {
errChan <- err
return
}
if err := s.output.WriteFile(t.GetOptions().GetFilename(), output); err != nil {
errChan <- err
}
}(t)
}
}
done := make(chan struct{})
go func() {
wg.Wait()
close(done)
}()
defer func() {
// dump error
go func() {
for range errChan {
}
}()
wg.Wait()
close(errChan)
}()
select {
case err := <-errChan:
s.logger.Error(err.Error())
return err
case <-done:
return nil
}
}
|
/*
* Router configuration
* https://nuxtjs.org/api/configuration-router
*/
export default {
middleware: ['auth'],
base: '/',
linkExactActiveClass: 'active',
trailingSlash: false
}
|
<gh_stars>1-10
/*
TITLE Animate the Gaussian Elimination Chapter24Exercise8.cpp
COMMENT
Objective: Animate the Gaussian Elimination.
Input: -
Output: -
Author: <NAME>
Date: 07.05.2017
*/
#include <iostream>
#include <iomanip>
#include <sstream>
#include <vector>
#include <array>
#include <numeric>
#include <algorithm>
#include "Chapter24Exercise8.h"
void init_2DVector(std::vector<std::vector<double>>& v)
{
double arr_A[4][4] = { { 3, 2, 1, 5 }, {1, 2, 3, 5}, {7, 11, 13, 5}, { 3, 7, 13, 67} };
for (size_t i = 0; i < v.size(); ++i)
{
for (size_t j = 0; j < v[i].size(); ++j)
{
v[i][j] = arr_A[i][j];
}
}
}
int main()
{
std::array<double, 4> arr_b = { 3, 15, 27, 81 };
Matrix A(4, std::vector<double>(4, 0));
init_2DVector(A);
Vector b(arr_b.begin(), arr_b.end());
Vector x(4);
std::cout <<"Initial System of liner equations in matrix form Ax = b:\n";
std::cout <<"A = \n"<< A <<'\n';
std::cout <<"b = \n"<< b <<'\n';
try
{
x = classical_gaussian_elimination(A, b);
}
catch(std::runtime_error& r)
{
std::cerr <<"Runtime error::"<< r.what();
getchar();
exit(1);
}
std::cout <<"Result:\n";
std::cout <<"x = "<< x <<'\n';
getchar();
} |
<filename>app/controllers/storageBackends/Backends.scala
/*
* Copyright 2017 - Swiss Data Science Center (SDSC)
* A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
* Eidgenössische Technische Hochschule Zürich (ETHZ).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.storageBackends
import javax.inject.{ Inject, Singleton }
import play.api.Configuration
import play.api.inject.{ BindingKey, Injector }
/**
* Created by johann on 07/07/17.
*/
@Singleton
class Backends @Inject() ( injector: Injector, configuration: Configuration ) {
val map: Map[String, StorageBackend] = loadBackends
def getBackend( name: String ): Option[StorageBackend] = map.get( name )
private[this] def loadBackends: Map[String, StorageBackend] = {
val it = for {
conf <- configuration.getOptional[Configuration]( "storage.backend" )
} yield for {
name <- conf.subKeys
if conf.getOptional[Boolean]( s"$name.enabled" ).getOrElse( false )
} yield {
val key = BindingKey( classOf[StorageBackend] ).qualifiedWith( name )
name -> injector.instanceOf( key )
}
it.getOrElse( Seq.empty ).toMap
}
}
|
import { Request, Response, NextFunction } from 'express';
import { validationResult } from 'express-validator/check';
import isEmpty from '../utilities/isEmpty';
import passErrorToNext from '../utilities/passErrorToNext';
import { ErrorREST, Errors } from '../utilities/ErrorREST';
import {
createComment,
getCommentById,
getCommentsBySourceId,
} from '../services/commentServices';
import { getUserById, isBanned } from '../services/userServices';
import isAuthorized from '../utilities/isAuthorized';
export const postCommentFromPost = async (
req: Request,
res: Response,
next: NextFunction,
): Promise<void> => {
try {
isEmpty(validationResult(req));
const { userId } = req;
const { postId, communityId } = req.params;
const onModel = 'Post';
const { text } = req.body;
const user = await getUserById(userId);
isBanned(user.bans, communityId);
const comment = await createComment(text, userId, postId, onModel);
res.status(200).json({ data: { comment } });
} catch (err) {
passErrorToNext(err, next);
}
};
export const postCommentFromComment = async (
req: Request,
res: Response,
next: NextFunction,
): Promise<void> => {
try {
isEmpty(validationResult(req));
const { userId } = req;
const { commentId, communityId } = req.params;
const onModel = 'Comment';
const { text } = req.body;
const user = await getUserById(userId);
isBanned(user.bans, communityId);
const comment = await createComment(text, userId, commentId, onModel);
res.status(200).json({ data: { comment } });
} catch (err) {
passErrorToNext(err, next);
}
};
export const patchComment = async (
req: Request,
res: Response,
next: NextFunction,
): Promise<void> => {
try {
isEmpty(validationResult(req));
const { text } = req.body;
const { userId } = req;
const { commentId } = req.params;
const comment = await getCommentById(commentId);
isAuthorized(comment.user, userId);
comment.text = text;
await comment.save();
res.status(200).json({ data: { comment } });
} catch (err) {
passErrorToNext(err, next);
}
};
export const deleteComment = async (
req: Request,
res: Response,
next: NextFunction,
): Promise<void> => {
try {
const { userId } = req;
const { commentId } = req.params;
const comment = await getCommentById(commentId);
isAuthorized(comment.user, userId);
comment.remove();
res.sendStatus(204);
} catch (err) {
passErrorToNext(err, next);
}
};
export const getComments = async (
req: Request,
res: Response,
next: NextFunction,
): Promise<void> => {
try {
const sort: string = req.query.sort || 'top';
const limit = parseInt(req.query.limit) || 25;
const page = parseInt(req.query.page) || 1;
checkLimit(limit);
const sourceId = req.params.postId || req.params.commentId;
const { comments, commentsCount } = await getCommentsBySourceId(
sourceId,
sort,
limit,
page,
);
res.status(200).json({ data: { comments, commentsCount } });
} catch (err) {
passErrorToNext(err, next);
}
};
|
# The IP/Hostname for the Kubernetes cluster
CLUSTER_IP=""
# The user to use for Egeria
EGERIA_USER=""
# The name of the Egeria server you're starting
EGERIA_SERVER=""
# The IP/Hostname to connect to for Catalog
CATALOG_IP=""
# Catalog Username/pw credentials
CATALOG_USER=""
CATALOG_PASS=""
set -e
# Configure Catalog connection
curl --location --request POST -k "https://${CLUSTER_IP}:30000/open-metadata/admin-services/users/${EGERIA_USER}/servers/${EGERIA_SERVER}/local-repository/mode/repository-proxy/connection" \
--header 'Content-Type: application/json' \
--data-raw "{
\"class\": \"Connection\",
\"connectorType\": {
\"class\": \"ConnectorType\",
\"connectorProviderClassName\": \"org.odpi.openmetadata.connector.sas.repository.connector.RepositoryConnectorProvider\"
},
\"endpoint\": {
\"class\": \"Endpoint\",
\"address\": \"${CATALOG_IP}\",
\"protocol\": \"https\"
},
\"userId\": \"${CATALOG_USER}\",
\"clearPassword\": \"${CATALOG_PASS}\"
}"
# Configure RabbitMQ connection
curl --location --request POST -k "https://${CLUSTER_IP}:30000/open-metadata/admin-services/users/${EGERIA_USER}/servers/${EGERIA_SERVER}/event-bus?connectorProvider=org.odpi.openmetadata.adapters.eventbus.topic.kafka.KafkaOpenMetadataTopicProvider&topicURLRoot=OMRSTopic" \
--header "Content-Type: application/json" \
--data-raw "{
\"producer\": {
\"bootstrap.servers\":\"kafkahost:9092\"
},
\"consumer\": {
\"bootstrap.servers\":\"kafkahost:9092\"
}
}"
curl --location --request POST -k "https://${CLUSTER_IP}:30000/open-metadata/admin-services/users/${EGERIA_USER}/servers/${EGERIA_SERVER}/local-repository/event-mapper-details?connectorProvider=org.odpi.openmetadata.connector.sas.event.mapper.RepositoryEventMapperProvider&eventSource=dev.ingress-nginx.tw-dev-m1.dmmdev.sashq-d.openstack.sas.com:5672" \
--header "Content-Type: application/json" \
--data-raw "{\"username\":\"$(kubectl get secret sas-rabbitmq-server-secret -o go-template='{{(index .data.RABBITMQ_DEFAULT_USER)}}' | base64 -d)\",
\"password\":\"$(kubectl get secret sas-rabbitmq-server-secret -o go-template='{{(index .data.RABBITMQ_DEFAULT_PASS)}}' | base64 -d)\"}"
# Start Egeria Server
curl --location --request POST -k "https://${CLUSTER_IP}:30000/open-metadata/admin-services/users/${EGERIA_USER}/servers/${EGERIA_SERVER}/instance" \
--header "Content-Type: application/json" |
interface Events {
[i: string]: undefined | ((error?: any) => any);
}
jest.mock('net', () => {
let events: Events = {};
let returnPort = 8080;
const server = {
address: () => ({
port: returnPort,
}),
once: function once (event: string, callback: (error?: any) => any) {
events[event] = callback;
return this;
},
listen: function listen (port: number) {
returnPort = port;
return this;
},
close: function close () {
const { close: closeCallback } = events;
if (typeof closeCallback === 'function') {
closeCallback();
}
},
};
return {
createServer: () => server,
_trigger: (event: string, data: any) => {
const callback = events[event];
if (typeof callback === 'function') {
callback(data);
}
},
_clear: () => {
events = {};
},
};
});
|
///
/// @file index.js
/// @brief The entry point for our application.
///
// Imports
const mongoose = require('mongoose');
const loadenv = require('node-env-file');
// Mongoose Promise
mongoose.Promise = global.Promise;
// Environment Variables
//
// Comment this line out when you are ready to deploy this
// application!
// loadenv('.env');
// Connect to Database
mongoose.connect(process.env.DATABASE_URL, { useMongoClient: true })
.then(require('./server'))
.catch(err => {
// Report the error and exit the program.
console.error(`[EXCEPTION!] ${err}`);
console.error(err.stack || 'No Stack');
mongoose.connection.close().catch(() => process.exit(1));
}); |
<gh_stars>0
import os
from os.path import dirname, basename
from irods.session import iRODSSession
from irods.models import Resource, DataObject, Collection
from irods.exception import NetworkException
from .sync_utils import size, get_redis, call, get_hdlr_mod
from .utils import Operation
import redis_lock
import json
import irods.keywords as kw
import base64, random
import ssl
import threading
def validate_target_collection(meta, logger):
# root cannot be the target collection
destination_collection_logical_path = meta["target"]
if destination_collection_logical_path == "/":
raise Exception("Root may only contain collections which represent zones")
def child_of(session, child_resc_name, resc_name):
if child_resc_name == resc_name:
return True
else:
while True:
child_resc = session.resources.get(child_resc_name)
parent_resc_id = child_resc.parent
if parent_resc_id is None:
break
parent_resc_name = None
for row in session.query(Resource.name).filter(Resource.id == parent_resc_id):
parent_resc_name = row[Resource.name]
if parent_resc_name == resc_name:
return True
child_resc_name = parent_resc_name
return False
def create_dirs(hdlr_mod, logger, session, meta, **options):
target = meta["target"]
path = meta["path"]
config = meta["config"]
if target.startswith("/"):
r = get_redis(config)
if not session.collections.exists(target):
with redis_lock.Lock(r, "create_dirs:" + path):
if not session.collections.exists(target):
meta2 = meta.copy()
meta2["target"] = dirname(target)
meta2["path"] = dirname(path)
create_dirs(hdlr_mod, logger, session, meta2, **options)
call(hdlr_mod, "on_coll_create", create_dir, logger, hdlr_mod, logger, session, meta, **options)
else:
raise Exception("create_dirs: relative path; target:[" + target + ']; path:[' + path + ']')
def create_dir(hdlr_mod, logger, session, meta, **options):
target = meta["target"]
path = meta["path"]
logger.info("creating collection " + target)
session.collections.create(target)
def get_target_path(hdlr_mod, session, meta, **options):
if hasattr(hdlr_mod, "target_path"):
return hdlr_mod.target_path(session, meta, **options)
else:
return None
def get_resource_name(hdlr_mod, session, meta, **options):
if hasattr(hdlr_mod, "to_resource"):
return hdlr_mod.to_resource(session, meta, **options)
else:
return None
def annotate_metadata_for_special_data_objs(meta, session, source_physical_fullpath, dest_dataobj_logical_fullpath):
def add_metadata_if_not_present(obj, key, val, unit=None):
# TODO: If updating/syncing link items, we might want to update the readlink result...
if key not in obj.metadata.keys():
obj.metadata.add(key, val, unit)
b64_path_str = meta.get('b64_path_str')
if b64_path_str is not None:
add_metadata_if_not_present(
session.data_objects.get(dest_dataobj_logical_fullpath),
'irods::automated_ingest::UnicodeEncodeError',
b64_path_str,
'python3.base64.b64encode(full_path_of_source_file)')
if meta['is_socket']:
add_metadata_if_not_present(
session.data_objects.get(dest_dataobj_logical_fullpath),
'socket_target',
'socket',
'automated_ingest')
elif meta['is_link']:
add_metadata_if_not_present(
session.data_objects.get(dest_dataobj_logical_fullpath),
'link_target',
os.path.join(os.path.dirname(source_physical_fullpath), os.readlink(source_physical_fullpath)),
'automated_ingest')
def register_file(hdlr_mod, logger, session, meta, **options):
dest_dataobj_logical_fullpath = meta["target"]
source_physical_fullpath = meta["path"]
b64_path_str = meta.get('b64_path_str')
phypath_to_register_in_catalog = get_target_path(hdlr_mod, session, meta, **options)
if phypath_to_register_in_catalog is None:
if b64_path_str is not None:
phypath_to_register_in_catalog = os.path.join(source_physical_fullpath, meta['unicode_error_filename'])
else:
phypath_to_register_in_catalog = source_physical_fullpath
resc_name = get_resource_name(hdlr_mod, session, meta, **options)
if resc_name is not None:
options["destRescName"] = resc_name
if b64_path_str is not None:
source_physical_fullpath = base64.b64decode(b64_path_str)
options[kw.DATA_SIZE_KW] = str(meta['size'])
options[kw.DATA_MODIFY_KW] = str(int(meta['mtime']))
logger.info("registering object " + dest_dataobj_logical_fullpath + ", options = " + str(options))
session.data_objects.register(phypath_to_register_in_catalog, dest_dataobj_logical_fullpath, **options)
logger.info("succeeded", task="irods_register_file", path = source_physical_fullpath)
annotate_metadata_for_special_data_objs(meta, session, source_physical_fullpath, dest_dataobj_logical_fullpath)
def upload_file(hdlr_mod, logger, session, meta, **options):
dest_dataobj_logical_fullpath = meta["target"]
source_physical_fullpath = meta["path"]
b64_path_str = meta.get('b64_path_str')
resc_name = get_resource_name(hdlr_mod, session, meta, **options)
if resc_name is not None:
options["destRescName"] = resc_name
if b64_path_str is not None:
source_physical_fullpath = base64.b64decode(b64_path_str)
logger.info("uploading object " + dest_dataobj_logical_fullpath + ", options = " + str(options))
session.data_objects.put(source_physical_fullpath, dest_dataobj_logical_fullpath, **options)
logger.info("succeeded", task="irods_upload_file", path = source_physical_fullpath)
annotate_metadata_for_special_data_objs(meta, session, source_physical_fullpath, dest_dataobj_logical_fullpath)
def no_op(hdlr_mod, logger, session, meta, **options):
pass
def sync_file(hdlr_mod, logger, session, meta, **options):
dest_dataobj_logical_fullpath = meta["target"]
source_physical_fullpath = meta["path"]
b64_path_str = meta.get('b64_path_str')
resc_name = get_resource_name(hdlr_mod, session, meta, **options)
if resc_name is not None:
options["destRescName"] = resc_name
if b64_path_str is not None:
source_physical_fullpath = base64.b64decode(b64_path_str)
logger.info("syncing object " + dest_dataobj_logical_fullpath + ", options = " + str(options))
op = hdlr_mod.operation(session, meta, **options)
if op == Operation.PUT_APPEND:
BUFFER_SIZE = 1024
logger.info("appending object " + dest_dataobj_logical_fullpath + ", options = " + str(options))
tsize = size(session, dest_dataobj_logical_fullpath)
tfd = session.data_objects.open(dest_dataobj_logical_fullpath, "a", **options)
tfd.seek(tsize)
with open(source_physical_fullpath, "rb") as sfd:
sfd.seek(tsize)
while True:
buf = sfd.read(BUFFER_SIZE)
if buf == b"":
break
tfd.write(buf)
tfd.close()
logger.info("succeeded", task="irods_append_file", path=source_physical_fullpath)
else:
logger.info("uploading object " + dest_dataobj_logical_fullpath + ", options = " + str(options))
session.data_objects.put(source_physical_fullpath, dest_dataobj_logical_fullpath, **options)
logger.info("succeeded", task="irods_update_file", path = source_physical_fullpath)
def update_metadata(hdlr_mod, logger, session, meta, **options):
dest_dataobj_logical_fullpath = meta["target"]
source_physical_fullpath = meta["path"]
phypath_to_register_in_catalog = get_target_path(hdlr_mod, session, meta, **options)
b64_path_str = meta.get('b64_path_str')
if phypath_to_register_in_catalog is None:
if b64_path_str is not None:
# Append generated filename to truncated fullpath because it failed to encode
phypath_to_register_in_catalog = os.path.join(source_physical_fullpath, meta['unicode_error_filename'])
else:
phypath_to_register_in_catalog = source_physical_fullpath
if b64_path_str is not None:
source_physical_fullpath = base64.b64decode(b64_path_str)
size = int(meta['size'])
mtime = int(meta['mtime'])
logger.info("updating object: " + dest_dataobj_logical_fullpath + ", options = " + str(options))
data_obj_info = {"objPath": dest_dataobj_logical_fullpath}
resc_name = get_resource_name(hdlr_mod, session, meta, **options)
outdated_repl_nums = []
found = False
if resc_name is None:
found = True
else:
for row in session.query(Resource.name, DataObject.path, DataObject.replica_number).filter(DataObject.name == basename(dest_dataobj_logical_fullpath), Collection.name == dirname(dest_dataobj_logical_fullpath)):
if row[DataObject.path] == phypath_to_register_in_catalog:
if child_of(session, row[Resource.name], resc_name):
found = True
repl_num = row[DataObject.replica_number]
data_obj_info["replNum"] = repl_num
continue
if not found:
if b64_path_str is not None:
logger.error("updating object: wrong resource or path, dest_dataobj_logical_fullpath = " + dest_dataobj_logical_fullpath + ", phypath_to_register_in_catalog = " + phypath_to_register_in_catalog + ", phypath_to_register_in_catalog = " + phypath_to_register_in_catalog + ", options = " + str(options))
else:
logger.error("updating object: wrong resource or path, dest_dataobj_logical_fullpath = " + dest_dataobj_logical_fullpath + ", source_physical_fullpath = " + source_physical_fullpath + ", phypath_to_register_in_catalog = " + phypath_to_register_in_catalog + ", options = " + str(options))
raise Exception("wrong resource or path")
session.data_objects.modDataObjMeta(data_obj_info, {"dataSize":size, "dataModify":mtime, "allReplStatus":1}, **options)
if b64_path_str is not None:
logger.info("succeeded", task="irods_update_metadata", path = phypath_to_register_in_catalog)
else:
logger.info("succeeded", task="irods_update_metadata", path = source_physical_fullpath)
def sync_file_meta(hdlr_mod, logger, session, meta, **options):
pass
irods_session_map = {}
irods_session_timer_map = {}
class disconnect_timer(object):
def __init__(self, logger, interval, sess_map):
self.logger = logger
self.interval = interval
self.timer = None
self.sess_map = sess_map
def callback(self):
for k, v in self.sess_map.items():
self.logger.info('Cleaning up session ['+k+']')
v.cleanup()
self.sess_map.clear()
def cancel(self):
if self.timer is not None:
self.timer.cancel()
def start(self):
self.timer = threading.Timer(self.interval, self.callback)
self.timer.start()
def stop_timer():
for k, v in irods_session_timer_map.items():
v.cancel();
def start_timer():
for k, v in irods_session_timer_map.items():
v.start();
def irods_session(hdlr_mod, meta, logger, **options):
env_irods_host = os.environ.get("IRODS_HOST")
env_irods_port = os.environ.get("IRODS_PORT")
env_irods_user_name = os.environ.get("IRODS_USER_NAME")
env_irods_zone_name = os.environ.get("IRODS_ZONE_NAME")
env_irods_password = os.environ.get("IRODS_PASSWORD")
env_file = os.environ.get('IRODS_ENVIRONMENT_FILE')
kwargs = {}
if env_irods_host is None or \
env_irods_port is None or \
env_irods_user_name is None or \
env_irods_zone_name is None or \
env_irods_password is None:
if env_file is None:
env_file = os.path.expanduser('~/.irods/irods_environment.json')
kwargs["irods_env_file"] = env_file
else:
kwargs["host"] = env_irods_host
kwargs["port"] = env_irods_port
kwargs["user"] = env_irods_user_name
kwargs["zone"] = env_irods_zone_name
kwargs["password"] = <PASSWORD>_<PASSWORD>_password
if hasattr(hdlr_mod, "as_user"):
client_zone, client_user = hdlr_mod.as_user(meta, **options)
kwargs["client_user"] = client_user
kwargs["client_zone"] = client_zone
key = json.dumps(kwargs) # todo add timestamp of env file to key
with open(env_file) as irods_env:
irods_env_as_json = json.load(irods_env)
if 'irods_ssl_ca_certificate_file' in irods_env_as_json:
kwargs['ssl_context'] = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH,
cafile=irods_env_as_json['irods_ssl_ca_certificate_file'],
capath=None, cadata=None)
if not key in irods_session_map:
# TODO: #42 - pull out 10 into configuration
for i in range(10):
try:
sess = iRODSSession(**kwargs)
irods_session_map[key] = sess
break
except NetworkException:
if i < 10:
time.sleep(0.1)
else:
raise
else:
sess = irods_session_map.get(key)
# =-=-=-=-=-=-=-
# disconnect timer
if key in irods_session_timer_map:
timer = irods_session_timer_map[key]
timer.cancel()
irods_session_timer_map.pop(key, None)
idle_sec = meta['idle_disconnect_seconds']
logger.info("iRODS Idle Time set to: "+str(idle_sec))
timer = disconnect_timer(logger, idle_sec, irods_session_map)
irods_session_timer_map[key] = timer
# =-=-=-=-=-=-=-
return sess
def sync_data_from_file(meta, logger, content, **options):
target = meta["target"]
path = meta["path"]
hdlr_mod = get_hdlr_mod(meta)
init = meta["initial_ingest"]
session = irods_session(hdlr_mod, meta, logger, **options)
if init:
exists = False
else:
if session.data_objects.exists(target):
exists = True
elif session.collections.exists(target):
raise Exception("sync: cannot sync file " + path + " to collection " + target)
else:
exists = False
if hasattr(hdlr_mod, "operation"):
op = hdlr_mod.operation(session, meta, **options)
else:
op = Operation.REGISTER_SYNC
if op == Operation.NO_OP:
if not exists:
call(hdlr_mod, "on_data_obj_create", no_op, logger, hdlr_mod, logger, session, meta, **options)
else:
call(hdlr_mod, "on_data_obj_modify", no_op, logger, hdlr_mod, logger, session, meta, **options)
else:
createRepl = False
if exists and op == Operation.REGISTER_AS_REPLICA_SYNC:
if hasattr(hdlr_mod, "to_resource"):
resc_name = hdlr_mod.to_resource(session, meta, **options)
else:
raise Exception("no resource name defined")
found = False
foundPath = False
for replica in session.data_objects.get(target).replicas:
if child_of(session, replica.resource_name, resc_name):
found = True
if replica.path == path:
foundPath = True
if found:
if not foundPath:
raise Exception("there is at least one replica under resource but all replicas have wrong paths")
else:
createRepl = True
put = op in [Operation.PUT, Operation.PUT_SYNC, Operation.PUT_APPEND]
if not exists:
meta2 = meta.copy()
meta2["target"] = dirname(target)
if 'b64_path_str' not in meta2:
meta2["path"] = dirname(path)
create_dirs(hdlr_mod, logger, session, meta2, **options)
if put:
call(hdlr_mod, "on_data_obj_create", upload_file, logger, hdlr_mod, logger, session, meta, **options)
else:
call(hdlr_mod, "on_data_obj_create", register_file, logger, hdlr_mod, logger, session, meta, **options)
elif createRepl:
options["regRepl"] = ""
call(hdlr_mod, "on_data_obj_create", register_file, logger, hdlr_mod, logger, session, meta, **options)
elif content:
if put:
sync = op in [Operation.PUT_SYNC, Operation.PUT_APPEND]
if sync:
call(hdlr_mod, "on_data_obj_modify", sync_file, logger, hdlr_mod, logger, session, meta, **options)
else:
call(hdlr_mod, "on_data_obj_modify", update_metadata, logger, hdlr_mod, logger, session, meta, **options)
else:
call(hdlr_mod, "on_data_obj_modify", sync_file_meta, logger, hdlr_mod, logger, session, meta, **options)
start_timer()
def sync_metadata_from_file(meta, logger, **options):
sync_data_from_file(meta, logger, False, **options)
def sync_dir_meta(hdlr_mod, logger, session, meta, **options):
pass
def sync_data_from_dir(meta, logger, content, **options):
target = meta["target"]
path = meta["path"]
hdlr_mod = get_hdlr_mod(meta)
session = irods_session(hdlr_mod, meta, logger, **options)
exists = session.collections.exists(target)
if hasattr(hdlr_mod, "operation"):
op = hdlr_mod.operation(session, meta, **options)
else:
op = Operation.REGISTER_SYNC
if op == Operation.NO_OP:
if not exists:
call(hdlr_mod, "on_coll_create", no_op, logger, hdlr_mod, logger, session, meta, **options)
else:
call(hdlr_mod, "on_coll_modify", no_op, logger, hdlr_mod, logger, session, meta, **options)
else:
if not exists:
create_dirs(hdlr_mod, logger, session, meta, **options)
else:
call(hdlr_mod, "on_coll_modify", sync_dir_meta, logger, hdlr_mod, logger, session, meta, **options)
start_timer()
def sync_metadata_from_dir(meta, logger, **options):
sync_data_from_dir(meta, logger, False, **options)
|
<gh_stars>0
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package converters;
import java.util.Date;
/**
*
* @author olu
*/
public class ConverterMain {
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
String baseFolderDir="/home/olu/Dev/data_base/sign_base/training"; //Change as appropriate to you
String labelingFileDir="/home/olu/Dev/data_base/sign_base/labels.txt"; //Change as appropriate to you
String formatName = "jpeg";
//Convert Folder Names
Converter convert = new Converter();
boolean converted = convert.convertFolderName(baseFolderDir, labelingFileDir);
//Convert all of datasets .ppm to .jpeg
long timeStart = System.currentTimeMillis();
boolean converted2 = convert.convertAllDatasetImages(baseFolderDir, formatName);
if(converted2==true){
long timeEnd = System.currentTimeMillis(); //in milliseconds
long diff = timeEnd - timeStart;
long diffSeconds = diff / 1000 % 60;
long diffMinutes = diff / (60 * 1000) % 60;
long diffHours = diff / (60 * 60 * 1000) % 24;
long diffDays = diff / (24 * 60 * 60 * 1000);
System.out.println("ALL "+formatName+" CONVERSIONS NOW COMPLETED. Took "+diffDays+" Day(s), "+diffHours+" Hour(s) "+diffMinutes+" Minute(s) and "+diffSeconds+" Second(s)");
}
}
}
|
def max_difference(arr):
max_diff = -100000
for i in range(len(arr) - 1):
diff = abs(arr[i] - arr[i+1])
max_diff = max(max_diff, diff)
return max_diff |
SELECT name FROM employees
WHERE department = 'Product Development' |
<reponame>antonjb/apple-news-format<gh_stars>1-10
import { SupportedUnits } from "./supported-units";
/**
* Signature/interface for a `Padding` object
* @see https://developer.apple.com/documentation/apple_news/padding
*/
export interface Padding {
bottom?: SupportedUnits | number; // Integer
left?: SupportedUnits | number; // Integer
right?: SupportedUnits | number; // Integer
top?: SupportedUnits | number; // Integer
}
|
package rest
import (
"errors"
"net/http"
"net/url"
"reflect"
"github.com/gin-gonic/gin"
"github.com/mitchellh/mapstructure"
)
// MustBind calls binds and aborts the request if an error is raised
func MustBind(c *gin.Context, target interface{}) error {
if err := Bind(c, target); err != nil {
c.AbortWithError(http.StatusBadRequest, err)
return err
}
return nil
}
// Bind maps a request onto a command/query/event
func Bind(c *gin.Context, target interface{}) error {
if reflect.ValueOf(target).Kind() != reflect.Ptr {
return errors.New("Bind target must be a pointer")
}
intermediary := make(map[string]interface{})
err := bindJSON(c, &intermediary)
if err != nil {
return err
}
err = bindQuery(c, &intermediary)
if err != nil {
return err
}
err = bindForm(c, &intermediary)
if err != nil {
return err
}
err = bindURI(c, &intermediary)
if err != nil {
return err
}
d, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{
ZeroFields: false,
WeaklyTypedInput: true,
Result: target,
TagName: "cqrs",
Squash: true,
DecodeHook: unmarshalDecodeHook,
})
if err != nil {
return err
}
err = d.Decode(intermediary)
if err != nil {
return err
}
return nil
}
func unmarshalDecodeHook(from reflect.Type, to reflect.Type, data interface{}) (interface{}, error) {
if to.Kind() != reflect.Struct {
return data, nil
}
target, ok := reflect.New(to).Interface().(Binder)
if !ok {
return data, nil
}
target.Bind(data)
return target, nil
}
// Binder should be implemented on structs with embedded scalar
// to aid binding, such a value objects
type Binder interface {
Bind(interface{}) error
}
func bindJSON(c *gin.Context, target *map[string]interface{}) error {
if c.ContentType() != "application/json" {
return nil
}
err := c.BindJSON(target)
return err
}
func bindQuery(c *gin.Context, target *map[string]interface{}) error {
query := c.Request.URL.Query()
return bindURLValues(query, target)
}
func bindForm(c *gin.Context, target *map[string]interface{}) error {
err := c.Request.ParseForm()
if err != nil {
return err
}
return bindURLValues(c.Request.PostForm, target)
}
func bindURLValues(vals url.Values, target *map[string]interface{}) error {
for key, val := range vals {
if len(val) == 0 {
continue
} else if len(val) > 1 {
(*target)[key] = val
} else {
(*target)[key] = val[0]
}
}
return nil
}
func bindURI(c *gin.Context, target *map[string]interface{}) error {
params := c.Params
for _, param := range params {
(*target)[param.Key] = param.Value
}
return nil
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.