text stringlengths 1 1.05M |
|---|
/*
* Copyright 2016 Exorath
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.exorath.exodata;
import com.exorath.exodata.api.ExoDatabase;
import com.github.fakemongo.Fongo;
import com.mongodb.MongoClient;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
/**
* Created by toonsev on 8/22/2016.
*/
public class ExoDatabaseTest {
private static final String DB_NAME = "testdb";
private Fongo fongo;
private MongoClient client;
private ExoDatabase database;
@Before
public void setup() {
fongo = new Fongo("mongo server 1");
client = fongo.getMongo();
database = ExoDatabase.create(client.getDatabase(DB_NAME));
}
@Test
public void getCollectionNotNullTest(){
assertNotNull(database.getCollection("testcoll"));
}@Test
public void getCollectionEmitsItemTest(){
assertNotNull(database.getCollection("testcoll").toBlocking().first());
}
@Test
public void databaseNotCreatedByDefaultTest(){
assertEquals(0,client.getUsedDatabases().stream().filter(db -> db.getName().equals(DB_NAME)).count());
}
@Test
public void getCollectionCreatesDatabaseTest(){
database.getCollection("testcoll").toBlocking().first();
assertEquals(1,client.getUsedDatabases().stream().filter(db -> db.getName().equals(DB_NAME)).count());
}
@Test
public void getMongoDatabaseNameEqualsTheActualMongoDatabaseName(){
assertEquals(client.getDatabase(DB_NAME).getName(), database.getMongoDatabase().getName());
}
}
|
select * from xafnfc.acvw_all_ac_entries c where c.BATCH_NO = 1010 and c.AC_BRANCH = '030' and c.TRN_DT = '31/08/2012'
|
import java.util.*;
import bnb.Configuration;
import bnb.Dataset;
import bnb.types.*;
import ilog.concert.IloException;
import ilog.concert.IloIntVar;
import ilog.concert.IloLinearNumExpr;
import ilog.concert.IloNumVar;
import ilog.cplex.IloCplex;
import static bnb.Dataset.Core.saveDataAsFile;
public class Main {
static Dataset data = new Dataset();
//import Data
static int I = data.I;
static int J = data.J;
static int T = data.T;
static int[] G = data.G;
static int[][] L = data.L;
// MAIN: BnB framework
public static void main(String[] args) {
final long timeStart = System.currentTimeMillis();
//compute matrix scenarios with specific node
int[][][] n = data.numberScenarioWithNode( );
//define best solution
SolutionOriginal bestSol = new SolutionOriginal();
//Initialize origin node
BnBNode OriginNode = new BnBNode(T, G);
OriginNode.branched_y = new int[T][][][];
for(int t=0; t<T; t++){
OriginNode.branched_y[t] = new int[G[t]][][];
for(int g=0; g<G[t]; g++){
OriginNode.branched_y[t][g] = new int[L[t][g]][I];
for(int l=0; l<L[t][g]; l++){
for(int i=0; i<I; i++){
OriginNode.branched_y[t][g][l][i] = 0;
}//for i
}//for l
}//for g
}//for t
//Solve Q_s^{SV-NA} for all scenarios s
SolutionScenario[][] resultScenario = new SolutionScenario[G[T-1]][];
for(int g=0; g<G[T-1]; g++){
resultScenario[g] = new SolutionScenario[L[T-1][g]];
for(int l=0; l<L[T-1][g]; l++){
resultScenario[g][l] = splittingVariable(data.ScenarioTree[g][l], OriginNode.branched_y);
}//for l
}//for g
//Compute averages and round them to obtain solution for original problem
SolutionOriginal result_orig = avgSolution(resultScenario); //no objective value yet
SolutionOriginal input_rounded = roundY(result_orig); //no objective value yet
SolutionOriginal result_rounded = originalModel(input_rounded.yval);
bestSol = result_rounded;
//Initialize upper bound F^U
double F_U = result_rounded.result;
OriginNode.sols = resultScenario;
//Branch node k=0
BnBNode[] ChildNodes = new BnBNode[2];
ChildNodes = branch(OriginNode);
//Start BnB Node List with two children of Origin Node
Collection <BnBNode> BnBNodes = new HashSet<BnBNode>();
BnBNodes.add(ChildNodes[0]);
BnBNodes.add(ChildNodes[1]);
//Branch and Bound
while(BnBNodes.isEmpty() == false){
//Choose next node
BnBNode node = BnBNodes.iterator().next();
//Compute new solutions for all scenarios containing branched y
int[][] s_vt = data.scenarioContainingNode(node.current_y[0], node.current_y[1], node.current_y[2], n);
int si=0;
boolean callfathom = true;
while(si<s_vt.length){
int g = s_vt[si][0];
int l = s_vt[si][1];
node.sols[g][l] = splittingVariable(data.ScenarioTree[g][l],node.branched_y);
//break if one problem is not solvable
if(node.sols[g][l].solvable == false){
callfathom = false;
break;
}
si = si + 1;
}
double s_result = 0;
for(int g=0; g<G[T-1]; g++){
for(int l=0; l<L[T-1][g]; l++){
s_result = s_result + node.sols[g][l].result;
}
}
//Check if node must be fathomed
if(callfathom == true){
Fathomer fathom = fathom(node, F_U, s_result);
F_U = fathom.F_U;
if(fathom.fathom == false){
ChildNodes = branch(node);
BnBNodes.add(ChildNodes[0]);
BnBNodes.add(ChildNodes[1]);
}
bestSol = fathom.sol;
}
//Remove node from list
BnBNodes.remove(node);
}
final long timeEnd = System.currentTimeMillis();
System.out.println("Optimal objective value: " + F_U);
System.out.println("Laufzeit: " + (timeEnd-timeStart)+" millisek.");
saveDataAsFile(data, Configuration.dataFilePath);
}
//solve Q with fixed y
public static SolutionOriginal originalModel(double[][][][] y){
SolutionOriginal result = new SolutionOriginal();
result.yval = y;
try{
IloCplex cplex = new IloCplex();
cplex.setOut(null);
//DVAR
IloNumVar[][][][][] x = new IloNumVar[T][][][][];
for(int t=0; t<T; t++){
x[t] = new IloNumVar[G[t]][][][];
for(int g=0; g<G[t]; g++){
x[t][g] = new IloNumVar[L[t][g]][I][J];
for(int l=0; l<L[t][g]; l++){
for (int i=0; i<I;i++){
x[t][g][l][i] = cplex.numVarArray(J, 0, Double.MAX_VALUE);
}//for i
}//for l
}
}
//OBJ
IloLinearNumExpr[][][] exprobj = new IloLinearNumExpr[T][][];
IloLinearNumExpr summedobj = cplex.linearNumExpr();
for(int t=0; t<T; t++){
exprobj[t] = new IloLinearNumExpr[G[t]][];
for(int g=0; g<G[t]; g++){
exprobj[t][g] = new IloLinearNumExpr[L[t][g]];
for(int l=0; l<L[t][g]; l++){
//for all nodes in Tree:
exprobj[t][g][l] = cplex.linearNumExpr();
for (int i=0; i<I;i++){
for(int j=0; j<J; j++){
exprobj[t][g][l].addTerm(data.DataTree[t][g][l].P*data.DataTree[t][g][l].alpha[i][j], x[t][g][l][i][j]);
}
}
summedobj.add(exprobj[t][g][l]);
}//end l = L[t][g]
}
}
cplex.addMinimize(summedobj); //Min
//S.T.
for(int t=0; t<T; t++){
for(int g=0; g<G[t]; g++){
for(int l=0; l<L[t][g]; l++){
//x <= y;
for(int i=0; i<I; i++){
for(int j=0; j<J; j++){
cplex.addLe(x[t][g][l][i][j], data.DataTree[t][g][l].beq[j]*y[t][g][l][i]);
}
}
//sum(i) x = d; forall j
for(int j=0; j<J; j++){
cplex.addEq(cplex.sum(x[t][g][l][0][j],x[t][g][l][1][j],x[t][g][l][2][j],x[t][g][l][3][j]), data.DataTree[t][g][l].beq[j]);
}
}
}
}
//Solve
cplex.solve();
double objval = cplex.getObjValue();
for(int t=0; t<T; t++){
for(int g=0; g<G[t]; g++){
for(int l=0; l<L[t][g]; l++){
//for all nodes in Tree:
for (int i=0; i<I;i++){
objval = objval + data.DataTree[t][g][l].P*data.DataTree[t][g][l].beta[i]*y[t][g][l][i];
}
}//end l = L[t][g]
}//end g = G[t]
}//end t = T
result.result = objval;
//result.xval
result.xval = new double[T][][][][];
for(int t=0; t<T; t++){
result.xval[t] = new double[G[t]][][][];
for(int g=0; g<G[t]; g++){
result.xval[t][g] = new double[L[t][g]][I][J];
for(int l=0; l<L[t][g]; l++){
for(int i=0; i<I; i++){
for(int j=0; j<J; j++){
result.xval[t][g][l][i][j] = cplex.getValue(x[t][g][l][i][j]);
}
}
}
}
}
return result;
}
catch(IloException exc){
System.err.println("Concert exception caught:" + exc);
result.result = 0;
result.xval = null;
return result;
}
}
//solve Q_s^SV-NA for a scenario s
public static SolutionScenario splittingVariable(FullScenario s, int[][][][] branched_y){
SolutionScenario result = new SolutionScenario();
try{
IloCplex cplex = new IloCplex();
cplex.setOut(null);
//DVAR
IloNumVar[][][] x = new IloNumVar[T][I][J];
IloIntVar[][] y = new IloIntVar[T][I];
for(int t=0; t<T; t++){
for (int i=0; i<I;i++){
x[t][i] = cplex.numVarArray(J, 0, Double.MAX_VALUE);
}
y[t] = cplex.boolVarArray(I);
}
//OBJ
IloLinearNumExpr exprobj = cplex.linearNumExpr();
for(int t=0; t<T; t++){
for(int i=0; i<I; i++){
for(int j=0; j<J; j++){
exprobj.addTerm(s.P_s*s.alpha_s[t][i][j], x[t][i][j]);
}
exprobj.addTerm(s.P_s*s.beta_s[t][i], y[t][i]);
}
}
cplex.addMinimize(exprobj); //Min
//S.T.
//fix branched y
int tt = T-1;
int gt = s.index[T-1][1];
int lt = s.index[T-1][2];
while(tt>=0){
for(int i=0; i<I; i++){
if(branched_y[tt][gt][lt][i]==1){
cplex.addEq(y[tt][i], 0);
}else if(branched_y[tt][gt][lt][i]==2){
cplex.addEq(y[tt][i], 1);
}
}
int[] pre = Dataset.Predecessor(tt, gt, L);
tt = pre[0];
gt = pre[1];
lt = pre[2];
}
for(int t=0; t<T; t++){
//x[t][i][j] <= y[t][i]
for(int i=0; i<I; i++){
for(int j=0; j<J; j++){
cplex.addLe(cplex.diff(x[t][i][j], cplex.prod(s.beq_s[t][j], y[t][i])), 0);
}
}
//sum(i) x = d; forall j
for(int j=0; j<J; j++){
cplex.addEq(cplex.sum(x[t][0][j],x[t][1][j],x[t][2][j],x[t][3][j]), s.beq_s[t][j]);
}
}
//time period overlapping constraints
for(int t=1; t<T; t++){
for(int i=0; i<I; i++){
cplex.addGe(cplex.diff(y[t][i], y[t-1][i]), 0);
}
}
//Solve
result.solvable = cplex.solve();
double objval = cplex.getObjValue(); //Zielsetzung
//DVAR
result.xval = new double[T][I][J];
result.yval = new double[T][I];
for(int t=0; t<T; t++){
for(int i=0; i<I; i++){
System.arraycopy(cplex.getValues(x[t][i]), 0, result.xval[t][i], 0,J-1);
result.yval[t][i] = Math.round(cplex.getValue(y[t][i]));
}
}
result.result = objval;
return result;
}
catch(IloException exc){
System.err.println("Concert exception caught:" + exc);
result.result = 0;
result.xval = null;
result.yval = null;
return result;
}
}
//weighted average solution
public static SolutionOriginal avgSolution(SolutionScenario[][] sol_s){
SolutionOriginal avgSolution = new SolutionOriginal();
avgSolution.xval = new double[T][][][][];
avgSolution.yval = new double[T][][][];
for(int t=0; t<T; t++){
avgSolution.xval[t] = new double[G[t]][][][];
avgSolution.yval[t] = new double[G[t]][][];
for(int g=0; g<G[t]; g++){
avgSolution.xval[t][g] = new double[L[t][g]][I][J];
avgSolution.yval[t][g] = new double[L[t][g]][I];
}
}
int[][][] n_matrix = data.numberScenarioWithNode( );
for(int t=0; t<T; t++){
for(int g=0; g<G[t]; g++){
for(int l=0; l<L[t][g]; l++){
int[][] s_vt = data.scenarioContainingNode(t, g, l, n_matrix);
int s_index = s_vt.length;
//P_summed: could be replaced by dataTree[t][g][l].P
double P_summed = 0;
for(int s=0; s<s_index; s++){
P_summed += data.ScenarioTree[s_vt[s][0]][s_vt[s][1]].P_s;
}
for(int s=0; s<s_index; s++){
for(int i=0; i<I; i++){
for(int j=0; j<J; j++){
avgSolution.xval[t][g][l][i][j] += (data.ScenarioTree[s_vt[s][0]][s_vt[s][1]].P_s*sol_s[s_vt[s][0]][s_vt[s][1]].xval[t][i][j])/P_summed;
}
avgSolution.yval[t][g][l][i] += (data.ScenarioTree[s_vt[s][0]][s_vt[s][1]].P_s*sol_s[s_vt[s][0]][s_vt[s][1]].yval[t][i])/P_summed;
}
}
}
}
}
return avgSolution;
}
//round y
public static SolutionOriginal roundY (SolutionOriginal s){
//simple rounding for period 1
for(int g=0; g<G[0]; g++){
for(int l=0; l<L[0][g]; l++){
for(int i=0; i<I; i++){
s.yval[0][g][l][i] = Math.ceil(s.yval[0][g][l][i]);
}
}
}
for(int t=1; t<T; t++){
for(int g=0; g<G[t]; g++){
for(int l=0; l<L[t][g]; l++){
for(int i=0; i<I; i++){
s.yval[t][g][l][i] = Math.ceil(s.yval[t][g][l][i]);
int[] pre = Dataset.Predecessor(t, g, L);
if(s.yval[t][g][l][i] < s.yval[pre[0]][pre[1]][pre[2]][i]){
s.yval[t][g][l][i] = 1;
}
}
}
}
}
return s;
}
//branch node k
public static BnBNode[] branch(BnBNode parentNode){
BnBNode[] childNodes = new BnBNode[2];
childNodes[0] = new BnBNode(T, G);
childNodes[1] = new BnBNode(T, G);
childNodes[0].branched_y = parentNode.branched_y;
childNodes[1].branched_y = parentNode.branched_y;
double dist_smallest = 0.5;
SolutionOriginal S_orig = avgSolution(parentNode.sols);
for(int t=0; t<T; t++){
for(int g=0; g<G[t]; g++){
for(int l=0; l<L[t][g]; l++){
for(int i=0; i<I; i++){
if(parentNode.branched_y[t][g][l][i] == 0){
if(Math.abs(S_orig.yval[t][g][l][i]-0.5) < dist_smallest){
dist_smallest = Math.abs(S_orig.yval[t][g][l][i]-0.5);
childNodes[0].current_y[0] = t;
childNodes[0].current_y[1] = g;
childNodes[0].current_y[2] = l;
childNodes[0].current_y[3] = i;
childNodes[1].current_y[0] = t;
childNodes[1].current_y[1] = g;
childNodes[1].current_y[2] = l;
childNodes[1].current_y[3] = i;
}
}
}
}
}
}
//branch/fix all y
childNodes[0].sols = parentNode.sols;
childNodes[1].sols = parentNode.sols;
childNodes[0].branched_y[childNodes[0].current_y[0]][childNodes[0].current_y[1]][childNodes[0].current_y[2]][childNodes[0].current_y[3]] = 1;
childNodes[1].branched_y[childNodes[1].current_y[0]][childNodes[1].current_y[1]][childNodes[1].current_y[2]][childNodes[1].current_y[3]] = 2;
return childNodes;
}
public static Fathomer fathom(BnBNode node, double F_U, double s_result){
Fathomer F = new Fathomer();
F.fathom = false;
F.F_U = F_U;
if(s_result >= F_U){
F.fathom = true;
}else if(nonant(node)[1]==true){
if(nonant(node)[0]==true){
F.F_U = s_result;
F.sol = avgSolution(node.sols); //stimmt das?
F.fathom = true;
}else{
SolutionOriginal sol_vt = avgSolution(node.sols);
SolutionOriginal sol_fixed = originalModel(sol_vt.yval);
F.sol = sol_fixed;
if(sol_fixed.result < F_U){
F.F_U = sol_fixed.result;
}
F.fathom = false;
}
} else{
SolutionOriginal sol_vt = avgSolution(node.sols);
SolutionOriginal sol_rounded = roundY(sol_vt);
SolutionOriginal complete_sol_rounded = originalModel(sol_rounded.yval);
F.sol = complete_sol_rounded;
if(complete_sol_rounded.result < F_U){
F.F_U = complete_sol_rounded.result;
}
F.fathom = false;
}
return F;
}
public static boolean[] nonant(BnBNode node){
boolean[] nonant = new boolean[2]; //for x and for y
nonant[1] = true;
int[][][] n_matrix = data.numberScenarioWithNode( );
SolutionOriginal sol_avg = avgSolution(node.sols);
//check non-anticipativity for y
int t=0;
while(t<T){
for(int g=0; g<G[t]; g++){
for(int l=0; l<L[t][g]; l++){
for(int i=0; i<I; i++){
if(sol_avg.yval[t][g][l][i]<1&&sol_avg.yval[t][g][l][i]>0){
nonant[1]=false;
break;
}
}
}
}
t = t+1;
}
//check non-anticipativity for x
t=0;
while(t<T){
for(int g=0; g<G[t]; g++){
for(int l=0; l<L[t][g]; l++){
int[][] s_vt = data.scenarioContainingNode(t, g, l, n_matrix);
int s_index = s_vt.length;
for(int s=0; s<s_index; s++){
for (int i = 0; i < I; i++) {
for (int j = 0; j < J; j++) {
if(sol_avg.xval[t][g][l][i][j] != node.sols[s_vt[s][0]][s_vt[s][1]].xval[t][i][j]){
nonant[0]=false;
break;
}
}
}
}
}
}
t = t+1;
}
return nonant;
}
}
|
import "./styles/style.css";
export { Login } from "./registration/login";
export { Register } from "./registration/registration";
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'display_window_UI.ui'
#
# Created by: PyQt5 UI code generator 5.10
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1280, 720)
font = QtGui.QFont()
font.setFamily("微软雅黑")
MainWindow.setFont(font)
MainWindow.setDockNestingEnabled(False)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.input_txtbox = QtWidgets.QLineEdit(self.centralwidget)
self.input_txtbox.setGeometry(QtCore.QRect(10, 610, 320, 26))
self.input_txtbox.setObjectName("input_txtbox")
self.clear_button = QtWidgets.QPushButton(self.centralwidget)
self.clear_button.setGeometry(QtCore.QRect(10, 660, 100, 30))
self.clear_button.setObjectName("clear_button")
self.input_button = QtWidgets.QPushButton(self.centralwidget)
self.input_button.setGeometry(QtCore.QRect(120, 660, 100, 30))
self.input_button.setObjectName("input_button")
self.quit_button = QtWidgets.QPushButton(self.centralwidget)
self.quit_button.setGeometry(QtCore.QRect(230, 660, 100, 30))
self.quit_button.setLayoutDirection(QtCore.Qt.LeftToRight)
self.quit_button.setObjectName("quit_button")
self.display_txtbox = QtWidgets.QTextBrowser(self.centralwidget)
self.display_txtbox.setGeometry(QtCore.QRect(10, 10, 320, 591))
self.display_txtbox.setObjectName("display_txtbox")
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.input_txtbox.setText(_translate("MainWindow", "在这里输入"))
self.clear_button.setText(_translate("MainWindow", "Clear"))
self.input_button.setText(_translate("MainWindow", "Input"))
self.quit_button.setText(_translate("MainWindow", "Quit"))
self.display_txtbox.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'微软雅黑\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
|
def equilateral_triangle_area(a):
return (math.sqrt(3)/4) * a**2
area = equilateral_triangle_area(a) |
#!/bin/sh
# Install swagger
download_url=$(curl -s https://api.github.com/repos/go-swagger/go-swagger/releases/latest | \
jq -r '.assets[] | select(.name | contains("'"$(uname | tr '[:upper:]' '[:lower:]')"'_amd64")) | .browser_download_url')
if [ ! -d "bin" ]; then
mkdir bin
fi
curl -o bin/swagger -L'#' "$download_url"
chmod +x bin/swagger
# Install go binaries
GO111MODULE=on go get golang.org/x/lint/golint
GO111MODULE=on go get github.com/golangci/golangci-lint/cmd/golangci-lint
GO111MODULE=on go get github.com/gordonklaus/ineffassign |
const FS = require("fs");
const REGEXP_ORDER_PRIORITY = /^\$/;
exports.iterateLine = function(buffer, iterator){
const lines = buffer.toString().split(/\r?\n/g);
const newLines = [];
for(let i = 0; i < lines.length; i++){
const R = iterator(lines[i], i, lines);
if(R === undefined){
continue;
}
if(R instanceof Array){
newLines.push(...R);
}else{
newLines.push(R);
}
}
return newLines.join('\r\n');
};
exports.orderByKey = function([ a ], [ b ]){
return a.replace(REGEXP_ORDER_PRIORITY, "__").localeCompare(b.replace(REGEXP_ORDER_PRIORITY, "__"));
};
exports.withComma = function([ , v ], i, my){
let R = v.join('\r\n');
if(i === my.length - 1){
if(R.endsWith(',')){
R = R.slice(0, R.length - 1);
}
}else{
if(!R.endsWith(',')){
R += ',';
}
}
return R;
}
exports.addToDataJSON = function(path, name, ...contents){
const table = {};
let current;
let indent = 0;
FS.writeFileSync(path, exports.iterateLine(FS.readFileSync(path), (v, i) => {
if(v === "{"){
return v;
}
if(v === "}"){
table[name] = contents.map(v => " ".repeat(indent) + v);
return [
...Object.entries(table).sort(exports.orderByKey).map(exports.withComma),
"}"
];
}
const chunk = v.match(/^(\s+)"([$@\w-]+)": (?:\{|\[)(\},?|\],?|$)/);
if(!current && chunk){
indent = chunk[1].length;
if(chunk[3]){
table[chunk[2]] = [ v ];
return;
}else{
current = chunk[2];
table[current] = [];
}
}
if(!current){
throw Error(`불필요한 정보가 들어가 있습니다: #${i}: ${v}`);
}
table[current].push(v);
if(v.startsWith(" ".repeat(indent) + "}") || v.startsWith(" ".repeat(indent) + "]")){
current = null;
}
}));
}; |
import { Request, Response, NextFunction } from 'express';
interface Pagination {
perPage: number,
currentPage: number,
totalPages: number,
totalDocumentCount: number,
}
declare global {
namespace Express {
interface Request {
pagination: Pagination;
}
}
}
export const paginateResponse = (req: Request, res: Response, next: NextFunction) => {
req.pagination = {
perPage: 50,
currentPage: 1,
totalPages: 1,
totalDocumentCount: 1
}
if (req.query.perPage) {
//@ts-ignore
req.pagination.perPage = parseInt(req.query.perPage);
}
if (req.query.currentPage) {
//@ts-ignore
req.pagination.currentPage = parseInt(req.query.currentPage);
}
next()
} |
<filename>app/src/main/java/com/rostdev/survivalpack/ui/main/MainContract.java<gh_stars>0
package com.rostdev.survivalpack.ui.main;
import android.content.Intent;
import android.support.v4.app.Fragment;
import com.rostdev.survivalpack.mvp.MvpPresenter;
import com.rostdev.survivalpack.mvp.MvpView;
/**
* Created by Rosty on 7/5/2016.
*/
public class MainContract {
interface View extends MvpView {
void showFragment(Fragment fragment);
void updateColor(int colorRes, int colorDarkRes);
void startActivity(Intent intent);
}
interface Presenter extends MvpPresenter<View> {
void onTabSelected(int tabId);
void onNavSelected(int navId);
}
}
|
const sharp = require('sharp');
const ErrorHandler = require('../../utils/errorHandler');
const imageCompression = async (imageData) => {
try {
const buffer = await sharp(imageData)
.resize({ width: 250, height: 250 })
.png()
.toBuffer();
return { status: 'SUCCESS', imageBuffer: buffer };
} catch (ex) {
ErrorHandler.extractError(ex);
return { status: 'ERROR_FOUND' };
}
};
module.exports = { imageCompression };
|
#!/bin/bash
#Piensa algún método (script, scp, rsync, git,…) que te permita automatizar la generación de la página (integración continua)
#y el despliegue automático de la página en el entorno de producción, después de realizar un cambio de la página en el entorno de desarrollo.
#Muestra al profesor un ejemplo de como al modificar la página se realiza la puesta en producción de forma automática. (3 puntos)
echo '######################-M A R K D O W N G E N E R A T O R-############################'
echo 'Scrip de Automatización de ficheros, Ejecutelo en Administrador para no tener posibles fallos.'
#año-mes-dia actuales:
time=$(date +%Y-%m-%d)
echo "Nombre de la publicación,(cambia los espacios por '-'):"
read nom
#Creo plantilla por defecto para rellenar
echo "---
date: $time
title: "'"'"$nom"'"'"
cover: "'"'"https://img.utdstc.com/icons/brackets-.png:225"'"'"
categories:
- Escribe aqui la categoria
tags:
- Escribe aqui el tag
---
## NOTE: This "post" is based on [Markdown Cheatsheet](https://github.com/adam-p/markdown-here/wiki/Markdown-Cheatsheet) and is meant to test styling of Markdown generated documents.
This is intended as a quick reference and showcase. For more complete info, see [John Gruber's original spec](http://daringfireball.net/projects/markdown/) and the [Github-flavored Markdown info page](http://github.github.com/github-flavored-markdown/).
This cheatsheet is specifically *Markdown Here's* version of Github-flavored Markdown. This differs slightly in styling and syntax from what Github uses, so what you see below might vary a little from what you get in a *Markdown Here* email, but it should be pretty close.
You can play around with Markdown on our [live demo page](http://www.markdown-here.com/livedemo.html).
(If you're not a Markdown Here user, check out the [Markdown Cheatsheet](./Markdown-Cheatsheet) that is not specific to MDH. But, really, you should also use Markdown Here, because it's awesome. http://markdown-here.com)

##### Table of Contents
[Headers](#headers)
[Emphasis](#emphasis)
[Lists](#lists)
[Links](#links)
[Images](#images)
[Code and Syntax Highlighting](#code)
[Tables](#tables)
[Inline HTML](#html)
[Horizontal Rule](#hr)
[Line Breaks](#lines)
[YouTube Videos](#videos)
<a name="headers"/>
---
__Advertisement :)__
- __[pica](https://nodeca.github.io/pica/demo/)__ - high quality and fast image
resize in browser.
- __[babelfish](https://github.com/nodeca/babelfish/)__ - developer friendly
i18n with plurals support and easy syntax.
You will like those projects!
---
# h1 Heading 8-)
## h2 Heading
### h3 Heading
#### h4 Heading
##### h5 Heading
###### h6 Heading
## Horizontal Rules
___
---
***
## Typographic replacements
Enable typographer option to see result.
(c) (C) (r) (R) (tm) (TM) (p) (P) +-
test.. test... test..... test?..... test!....
!!!!!! ???? ,, -- ---
"Smartypants, double quotes" and 'single quotes'
## Emphasis
**This is bold text**
__This is bold text__
*This is italic text*
_This is italic text_
~~Strikethrough~~
## Blockquotes
> Blockquotes can also be nested...
>> ...by using additional greater-than signs right next to each other...
> > > ...or with spaces between arrows.
## Lists
Unordered
+ Create a list by starting a line with `+`, `-`, or `*`
+ Sub-lists are made by indenting 2 spaces:
- Marker character change forces new list start:
* Ac tristique libero volutpat at
+ Facilisis in pretium nisl aliquet
- Nulla volutpat aliquam velit
+ Very easy!
Ordered
1. Lorem ipsum dolor sit amet
2. Consectetur adipiscing elit
3. Integer molestie lorem at massa
1. You can use sequential numbers...
1. ...or keep all the numbers as `1.`
Start numbering with offset:
57. foo
1. bar
## Code
Inline `code`
Indented code
// Some comments
line 1 of code
line 2 of code
line 3 of code
Block code "fences"
```
Sample text here...
```
Syntax highlighting
``` js
var foo = function (bar) {
return bar++;
};
console.log(foo(5));
```
## Tables
| Option | Description |
| ------ | ----------- |
| data | path to data files to supply the data that will be passed into templates. |
| engine | engine to be used for processing templates. Handlebars is the default. |
| ext | extension to be used for dest files. |
Right aligned columns
| Option | Description |
| ------:| -----------:|
| data | path to data files to supply the data that will be passed into templates. |
| engine | engine to be used for processing templates. Handlebars is the default. |
| ext | extension to be used for dest files. |
## Links
[link text](http://dev.nodeca.com)
[link with title](http://nodeca.github.io/pica/demo/ "title text!")
Autoconverted link https://github.com/nodeca/pica (enable linkify to see)
## Images


Like links, Images also have a footnote style syntax
![Alt text][id]
With a reference later in the document defining the URL location:
[id]: https://octodex.github.com/images/dojocat.jpg "The Dojocat"
## Plugins
The killer feature of `markdown-it` is very effective support of
[syntax plugins](https://www.npmjs.org/browse/keyword/markdown-it-plugin).
### [Emojies](https://github.com/markdown-it/markdown-it-emoji)
> Classic markup: :wink: :crush: :cry: :tear: :laughing: :yum:
>
> Shortcuts (emoticons): :-) :-( 8-) ;)
see [how to change output](https://github.com/markdown-it/markdown-it-emoji#change-output) with twemoji.
### [Subscript](https://github.com/markdown-it/markdown-it-sub) / [Superscript](https://github.com/markdown-it/markdown-it-sup)
- 19^th^
- H~2~O
### [\<ins>](https://github.com/markdown-it/markdown-it-ins)
++Inserted text++
### [\<mark>](https://github.com/markdown-it/markdown-it-mark)
==Marked text==
### [Footnotes](https://github.com/markdown-it/markdown-it-footnote)
Footnote 1 link[^first].
Footnote 2 link[^second].
Inline footnote^[Text of inline footnote] definition.
Duplicated footnote reference[^second].
[^first]: Footnote **can have markup**
and multiple paragraphs.
[^second]: Footnote text.
### [Definition lists](https://github.com/markdown-it/markdown-it-deflist)
Term 1
: Definition 1
with lazy continuation.
Term 2 with *inline markup*
: Definition 2
{ some code, part of Definition 2 }
Third paragraph of definition 2.
_Compact style:_
Term 1
~ Definition 1
Term 2
~ Definition 2a
~ Definition 2b
### [Abbreviations](https://github.com/markdown-it/markdown-it-abbr)
This is HTML abbreviation example.
It converts "HTML", but keep intact partial entries like "xxxHTMLyyy" and so on.
*[HTML]: Hyper Text Markup Language
### [Custom containers](https://github.com/markdown-it/markdown-it-container)
::: warning
*here be dragons*
:::
" > content/$time-$nom.md
#Escribo el contenido de mi publicación
nano content/$time-$nom.md
#Subir los archivos a Github
sudo git add .
sudo git commit -m "Publicación añadida por el Script de Producción"
sudo git push
#Cuando se ejecute npm run deploy todo el contenido de la public carpeta se moverá a la rama master de mi repositorio .
npm run deploy |
class Car:
def __init__(self, make, model, year, color):
self.make = make
self.model = model
self.year = year
self.color = color
def get_info(self):
return f'This car is a {self.color} {self.year} {self.make} {self.model}.' |
#include <errno.h>
#include <math.h>
#include <stdlib.h>
#include "double.h"
#include "mesh.h"
#define EPSILON 0.00001
static int mesh_bd_nodes_a0(const mesh * m, const double * m_node_curvature)
{
int m_bd_nodes_a0, i, m_cn_0;
m_cn_0 = m->cn[0];
m_bd_nodes_a0 = 0;
for (i = 0; i < m_cn_0; ++i)
if (fabs(m_node_curvature[i] - 1) >= EPSILON)
++m_bd_nodes_a0;
return m_bd_nodes_a0;
}
static void mesh_bd_nodes_a1(
int * m_bd_nodes_a1, const mesh * m, const double * m_node_curvature)
{
int i, ind, m_cn_0;
m_cn_0 = m->cn[0];
ind = 0;
for (i = 0; i < m_cn_0; ++i)
{
if (fabs(m_node_curvature[i] - 1) >= EPSILON)
{
m_bd_nodes_a1[ind] = i;
++ind;
}
}
}
jagged1 * mesh_bd_nodes(const mesh * m)
{
double * m_node_curvature;
jagged1 * m_bd_nodes;
m_node_curvature = mesh_node_curvature(m);
if (errno)
{
fputs("mesh_bd_nodes - cannot calculate m_node_curvature\n", stderr);
return NULL;
}
m_bd_nodes = (jagged1 *) malloc(sizeof(jagged1));
if (errno)
{
fputs("mesh_bd_nodes - cannot allocate memory for m_bd_nodes\n", stderr);
free(m_node_curvature);
return NULL;
}
m_bd_nodes->a0 = mesh_bd_nodes_a0(m, m_node_curvature);
m_bd_nodes->a1 = (int *) malloc(sizeof(int) * m_bd_nodes->a0);
if (errno)
{
fputs("mesh_bd_nodes - cannot allocate memory for m_bd_nodes->a1\n",
stderr);
free(m_bd_nodes);
free(m_node_curvature);
return NULL;
}
mesh_bd_nodes_a1(m_bd_nodes->a1, m, m_node_curvature);
free(m_node_curvature);
return m_bd_nodes;
}
|
#!/usr/bin/env bash
# Expect NISMOD dir as first argument
base_path=$1
# Read remote_data, local_dir from config.ini
eval "$(grep -A3 "\[digital-comms\]" $base_path/provision/config.ini | tail -n3)"
local_path=$base_path/$local_dir
# Download data
python $base_path/provision/get_data.py $remote_data $local_path
# Copy region definitions to smif region_definition
mkdir -p $base_path/data/region_definitions/assets_broadband_network
cp $base_path/data/digital_comms/processed/assets_layer3_cabinets.* $base_path/data/region_definitions/assets_broadband_network/
cp $base_path/data/digital_comms/processed/assets_layer4_distributions.* $base_path/data/region_definitions/assets_broadband_network/
|
// DateTime Field
import { distanceInWordsToNow, format } from 'date-fns'
import norwayLocale from 'date-fns/locale/nb'
const LOCALE = norwayLocale
const TIMEFORMAT = 'HH:mm ddd DD. MMM YYYY'
const formatDateTime = (
value,
timeformat = TIMEFORMAT,
locale = LOCALE,
relative = false,
) =>
relative
? distanceInWordsToNow(new Date(value), { addSuffix: true, locale })
: format(new Date(value), timeformat, { locale })
export const EditableField = ({ value, ...args }) => (
<input type="datetime-local" value={value} {...args} />
)
export const DetailField = ({
value,
timeformat,
locale,
relative,
...args
}) => (
<span {...args}>{formatDateTime(value, timeformat, locale, relative)}</span>
)
|
<filename>routes/project.js
'use strict';
const _ = require('lodash');
const ErrorHTTP = require('mapbox-error').ErrorHTTP;
const Sequelize = require('sequelize');
const db = require('../database/index');
const Project = db.Project;
const validateBody = require('../lib/helper/validateBody');
module.exports = {
getProjects: getProjects,
getProjectStats: getProjectStats,
createProject: createProject,
getProject: getProject,
updateProject: updateProject,
deleteProject: deleteProject
};
/**
* Get a list of projects.
* @name get-projects
* @param {Object} [query] - The request URL query parameters
* @param {string} [query.name] - Name of project to filter by (optional)
* @param {true|false} [query.is_archived] - default is false - set to true to return archived projects
* @example
* curl https://host/v1/projects
*
* [
* {
* id: '00000000-0000-0000-0000-000000000000',
* name: '<NAME>',
* metadata: {},
* createdAt: '2017-10-18T00:00:00.000Z',
* updatedAt: '2017-10-18T00:00:00.000Z'
* }
* ]
*/
function getProjects(req, res, next) {
let search = {};
let where = {};
if (req.query.is_archived && req.query.is_archived === 'true') {
where.is_archived = true;
} else {
where.is_archived = false;
}
//FIXME: this is probably not the best way to implement this
// since a search for a name should always return a maximum of 1 item
if (req.query.name) {
where.name = req.query.name;
}
search.where = where;
Project.findAll(search)
.then(function(projects) {
res.json(projects);
})
.catch(next);
}
/**
* Get stats for a project
* @name get-project-stats
* @example
* curl https://host/v1/projects/00000000-0000-0000-0000-000000000000/stats
*
* {
* "total": 3,
* "tags": {
* "tag1": {
* "open": 1,
* "closed": 1
* },
* "tag2": {
* "open": 1
* }
* }
*/
function getProjectStats(req, res, next) {
const projectId = req.params.project;
const countPromises = [
db.Item.count({
where: {
project_id: projectId
}
}),
db.Item.findAll({
includeIgnoreAttributes: false,
include: [
{
model: db.Tag,
attributes: ['id', 'name'],
through: {
attributes: []
}
}
],
attributes: [
'tags.name',
'status',
Sequelize.fn('COUNT', Sequelize.col('item.id'))
],
where: {
project_id: projectId
},
group: ['status', 'tags.name'],
distinct: true,
raw: true
})
];
Promise.all(countPromises)
.then(results => {
const total = results[0];
const tags = results[1].reduce((memo, value) => {
const tagName = value.name;
if (!memo.hasOwnProperty(tagName)) {
memo[tagName] = {};
}
const status = value.status;
memo[tagName][status] = value.count;
return memo;
}, {});
return res.json({
total: total,
tags: tags
});
})
.catch(err => {
return next(err);
});
}
/**
* Create a project.
* @name create-project
* @param {Object} body - The request body
* @param {string} body.name - The project name
* @param {string} [body.quadkey_set_id] - Quadkey Set ID for this project
* @param {Object} [body.metadata={}] - The project metadata
* @example
* curl -X POST -H "Content-Type: application/json" -d '{"name":"<NAME>"}' https://host/v1/projects
*
* {
* id: '00000000-0000-0000-0000-000000000000',
* metadata: {},
* name: '<NAME>',
* updatedAt: '2017-10-19T00:00:00.000Z',
* createdAt: '2017-10-19T00:00:00.000Z'
* }
*/
function createProject(req, res, next) {
const validBodyAttrs = ['name', 'quadkey_set_id', 'metadata'];
const requiredBodyAttr = ['name'];
const validationError = validateBody(
req.body,
validBodyAttrs,
requiredBodyAttr
);
if (validationError) return next(new ErrorHTTP(validationError, 400));
Project.create(req.body)
.then(function(data) {
res.json(data);
})
.catch(err => {
if (err instanceof Sequelize.UniqueConstraintError) {
return next(new ErrorHTTP('Project with name already exists', 400));
} else {
return next(err);
}
});
}
/**
* Get a project.
* @name get-project
* @param {Object} params - The request URL parameters
* @param {string} params.project - The project ID
* @example
* curl https://host/v1/projects/00000000-0000-0000-0000-000000000000
*
* {
* id: '00000000-0000-0000-0000-000000000000',
* name: 'My Project',
* metadata: {},
* createdAt: '2017-10-18T00:00:00.000Z',
* updatedAt: '2017-10-18T00:00:00.000Z'
* }
*/
function getProject(req, res, next) {
Project.findOne({ where: { id: req.params.project } })
.then(function(project) {
if (!project) return next();
res.json(project);
})
.catch(next);
}
/**
* Update a project.
* @name update-project
* @param {Object} params - The request URL parameters
* @param {string} params.project - The project ID
* @param {Object} body - The request body
* @param {string} [body.name] - The project name
* @param {string} [body.quadkey_set_id] - Quadkey Set ID for this project
* @param {Object} [body.metadata] - The project metadata
* @example
* curl -X PUT -H "Content-Type: application/json" -d '{"metadata":{"key":"value"}}' https://host/v1/projects/00000000-0000-0000-0000-000000000000
*
* {
* id: '00000000-0000-0000-0000-000000000000',
* name: '<NAME>',
* metadata: {
* key: "value"
* },
* createdAt: '2017-10-18T00:00:00.000Z',
* updatedAt: '2017-10-18T00:00:00.000Z'
* }
*/
function updateProject(req, res, next) {
const validBodyAttrs = ['name', 'quadkey_set_id', 'metadata', 'is_archived'];
const validationError = validateBody(req.body, validBodyAttrs);
if (validationError) return next(new ErrorHTTP(validationError, 400));
Project.findOne({ where: { id: req.params.project } })
.then(function(project) {
if (!project) return next();
const updated = _.merge({}, project.toJSON(), req.body);
return project.update(updated);
})
.then(function(data) {
res.json(data);
})
.catch(next);
}
/**
* Delete a project. Marks a project as "soft deleted"
* @name delete-project
* @param {Object} params - Request URL params
* @param {string} params.project - Project ID
* @example curl -X DELETE https://host/v1/projects/00000000-0000-0000-0000-000000000000
* {"id": "00000000-0000-0000-0000-000000000000"}
*/
function deleteProject(req, res, next) {
const projectId = req.params.project;
return Project.update(
{
is_archived: true
},
{
where: {
id: projectId
}
}
)
.then(updated => {
// `updated` is an array whose first member is the count of objects updated:
// http://docs.sequelizejs.com/class/lib/model.js~Model.html#static-method-update
const updatedCount = updated[0];
if (updatedCount === 0) {
return next(new ErrorHTTP('Project not found', 404));
}
return res.json({ id: projectId });
})
.catch(next);
}
|
package com.craftinginterpreters.lox
sealed trait TokenType
case object LEFT_PAREN extends TokenType
case object RIGHT_PAREN extends TokenType
case object LEFT_BRACE extends TokenType
case object RIGHT_BRACE extends TokenType
case object COMMA extends TokenType
case object DOT extends TokenType
case object MINUS extends TokenType
case object PLUS extends TokenType
case object SEMICOLON extends TokenType
case object SLASH extends TokenType
case object STAR extends TokenType
case object BANG extends TokenType
case object BANG_EQUAL extends TokenType
case object EQUAL extends TokenType
case object EQUAL_EQUAL extends TokenType
case object GREATER extends TokenType
case object GREATER_EQUAL extends TokenType
case object LESS extends TokenType
case object LESS_EQUAL extends TokenType
case object IDENTIFIER extends TokenType
case object STRING extends TokenType
case object NUMBER extends TokenType
case object AND extends TokenType
case object CLASS extends TokenType
case object ELSE extends TokenType
case object FALSE extends TokenType
case object FUN extends TokenType
case object FOR extends TokenType
case object IF extends TokenType
case object NIL extends TokenType
case object OR extends TokenType
case object PRINT extends TokenType
case object RETURN extends TokenType
case object SUPER extends TokenType
case object THIS extends TokenType
case object TRUE extends TokenType
case object VAR extends TokenType
case object WHILE extends TokenType
case object EOF extends TokenType
|
#!/usr/bin/env bash
set -e
# Insist repository is clean
git diff-index --quiet HEAD
git checkout master
git pull origin master
git push origin master
version=$(grep "__version__ = " cymem/about.py)
version=${version/__version__ = }
version=${version/\'/}
version=${version/\'/}
git tag "v$version"
git push origin --tags
|
var self = require('sdk/self');
var buttons = require("sdk/ui/button/action");
var tabs = require("sdk/tabs");
var button = buttons.ActionButton({
id: "Bookmarklet-To-FF-Addon-Button",
label: "Run the Bookmarklet",
icon: {
"16": "./images/icon16.png",
"19": "./images/icon19.png",
"32": "./images/icon32.png",
"48": "./images/icon48.png",
"64": "./images/icon64.png",
"128": "./images/icon128.png"
},
onClick: runBookmarklet
});
function runBookmarklet(state) {
tabs.activeTab.attach({
contentScriptFile: "../data/bookmarklet-code.js"
})
}; |
#pragma once
enum class Gas : char {
Oxygen,
Nitrogen,
CarbonDioxide,
NitrousOxide,
Plasma,
Freon,
Count
};
typedef float pressure; |
class Node:
def __init__(self, node_id, ip_address, port):
self.node_id = node_id
self.ip_address = ip_address
self.port = port
def __eq__(self, other):
return (isinstance(other, Node) and
self.node_id == other.node_id and
self.ip_address == other.ip_address and
self.port == other.port)
# Test cases
random_id = "random_id"
assert Node(random_id, "192.168.1.1", 8000) != Node(random_id, "192.168.1.2", 8000)
assert Node(random_id, "192.168.1.1", 8000) != Node(random_id, "192.168.1.1", 8001)
assert Node(random_id, "192.168.1.1", 8000) == Node(random_id, "192.168.1.1", 8000) |
def count_non_zeroes(arr):
count = 0
for i in arr:
if i != 0:
count += 1
return count |
#!/bin/sh
#
# This script is designed to run the configuration tool.
# The tool's configuration should be under the /etc directory.
#
# Load the prolog:
. "$(dirname "$(readlink -f "$0")")"/engine-prolog.sh
# logging configuration properties for tools
OVIRT_LOGGING_PROPERTIES="${OVIRT_LOGGING_PROPERTIES:-${ENGINE_USR}/conf/tools-logging.properties}"
usage () {
cat << __EOF__
Usage: engine-config <action> [<args>]
AVAILABLE ACTIONS
-l, --list
List available configuration keys.
-a, --all
Get all available configuration values.
-g KEY, --get=KEY
Get the value of the given key for the given version. If a version is not given, the values of all existing versions are returned.
-s KEY=VALUE, --set KEY=VALUE
Set the value of the given key for the given version. The version is required for this action only when the version is not 'general'.
-m KEY=VALUE, --merge KEY=VALUE
Merge the value of the given key for the given version with the value in the database. The version is required for this action only when the version is not 'general'.
-h, --help
Show this help message and exit.
OPTIONS
--cver=VERSION
Relevant configuration version to use.
-p PROP_FILE, --properties=PROP_FILE
Use the given alternate properties file.
-c CFG_FILE, --config=CFG_FILE
Use the given alternate configuration file.
--log-file=LOG_FILE
Sets file to write logging into (if not set nothing is logged).
--log-level=LOG_LEVEL
Sets log level, one of FINE, INFO (default), WARNING, SEVERE (case insensitive).
SETTING PASSWORDS
Passwords can be set in interactive mode:
engine-config -s PasswordEntry=interactive
or via file with one of the following options:
engine-config -s PasswordEntry --admin-pass-file=/tmp/mypass
engine-config -s PasswordEntry=/tmp/mypass
PasswordEntry varies between the different password options.
CUSTOM LOGGING
If you need custom logging setup, please create your own java.util.logging properties file,
set a path to this file into OVIRT_LOGGING_PROPERTIES environment variable and execute engine-config.
NOTE
In order for your change(s) to take effect, restart the oVirt engine.
__EOF__
return 0
}
# TODO:
# why do we need CONF_FILE here?
# we do not use any vairable
CONF_FILE="${ENGINE_ETC}/engine-config/engine-config.conf"
parseArgs() {
while [ -n "$1" ]; do
local x="$1"
local v="${x#*=}"
shift
case "${x}" in
-c)
CONF_FILE="$1"
shift
;;
-configFile=*)
CONF_FILE="${v}"
;;
-h|-help|--help)
usage
exit 0
;;
esac
done
}
# do this in function so we do not lose $@
parseArgs "$@"
[ -s "${CONF_FILE}" ] || die "Configuration file '${CONF_FILE}' is either empty or does not exist"
. "${CONF_FILE}"
#
# Add this option to the java command line to enable remote debugging in
# all IP addresses and port 8787:
#
# -agentlib:jdwp=transport=dt_socket,address=0.0.0.0:8787,server=y,suspend=y
#
# Note that the "suspend=y" options is needed to suspend the execution
# of the JVM till you connect with the debugger, otherwise it is
# not possible to debug the execution of the main method.
#
exec "${JAVA_HOME}/bin/java" \
-Xbootclasspath/p:"${ENGINE_USR}/logutils/logutils.jar" \
-Djboss.modules.system.pkgs=org.jboss.byteman,org.ovirt.engine.core.logutils \
-Djava.util.logging.config.file="${OVIRT_LOGGING_PROPERTIES}" \
-jar "${JBOSS_HOME}/jboss-modules.jar" \
-dependencies org.ovirt.engine.core.tools \
-class org.ovirt.engine.core.config.EngineConfigExecutor \
"$@"
|
package com.corsair.sparrow.pirate.gamma.config;
import com.corsair.sparrow.pirate.gamma.domain.properties.ThriftServerProperties;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @author jack
*/
@Configuration
@EnableConfigurationProperties(value = ThriftServerProperties.class)
public class ThriftServerConfig {
@Autowired
private ThriftServerProperties thriftServerProperties;
@Bean
public ThriftServerRegistry register() {
ThriftServerRegistry register = new ThriftServerRegistry();
register.setZkServerList(thriftServerProperties.getZkList());
register.setServerHost(thriftServerProperties.getHost());
register.setServerPort(thriftServerProperties.getPort());
return register;
}
}
|
#!/bin/bash
# Copyright 2020 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# set -e
function get_array() {
for value in ${1}
do
arr[${#arr[@]}]=$value
done
echo ${arr[*]}
}
NS=kube-system
CONTAINER=azurefile
echo "check the driver pods if restarts ..."
original_pods=$(kubectl get pods -n kube-system | grep azurefile | awk '{print $1}')
original_restarts=$(kubectl get pods -n kube-system | grep azurefile | awk '{print $4}')
processed_pods=($(get_array "${original_pods[@]}"))
processed_restarts=($(get_array "${original_restarts[@]}"))
for ((i=0; i<${#processed_restarts[@]}; i++)); do
if [ "${processed_restarts[$i]}" -ne "0" ]
then
echo "there is a driver pod which has restarted"
#disable pods restart check temporarily since there is driver restart in MSI enabled cluster
#exit 3
if [[ "$1" == "log" ]]; then
kubectl describe po ${processed_pods[$i]} -n kube-system
echo "======================================================================================"
echo "print previous azurefile cotnainer logs since there is a restart"
kubectl logs ${processed_pods[$i]} -c azurefile -p -n kube-system
echo "======================================================================================"
fi
fi
done
echo "======================================================================================"
|
<reponame>LogoFX/aurelia-oidc-sample<filename>Client/oauth-client/aurelia_project/environments/prod.ts
export default {
debug: false,
testing: false,
authServer: "https://jenkins.logismika.com:5000",
callback: "https://mkt-dashboard.github.io"
};
|
<filename>src/container/Page/Container.tsx<gh_stars>1-10
import { DependencySet } from "@app/interface";
import { Parser } from "@app/infra";
import * as Domain from "@app/domain";
import * as React from "react";
import * as DependencyTableList from "../DependencyTableList";
import * as SidebarNavigation from "../SideNavigation";
import * as LinkList from "../LinkList";
import { generateStore, Store } from "./Store";
import { Page } from "@app/component";
import { useLocation, useParams, useHistory } from "react-router-dom";
const depsDataSet: DependencySet = require("@app/dataSet/deps.json");
const generateProps = (store: Store): Page.Props => {
return {
headerNavigation: {
links: [
{
to: "/",
children: "TOP",
},
{
to: "/packages",
children: "package list",
},
{
to: "/ranking",
children: "ranking",
},
],
},
dependencyTableList: DependencyTableList.generateProps(store.dependencyTableList),
sideNavigation: SidebarNavigation.generateProps(store.sideNavigation),
linkList: LinkList.generateProps(store.linkList),
};
};
const useQuery = () => {
return new URLSearchParams(useLocation().search);
};
export const Container = () => {
const query = useQuery();
const history = useHistory();
const { owner, name } = useParams();
const packageName = !!name ? [owner, name].join("/") : owner;
const repositoryOwner = query.get("owner") || undefined;
const hostname = query.get("hostname") || undefined;
const repo = query.get("repo") || undefined;
const path = query.get("path") || undefined;
const searchParams = Parser.parseStringSearchParams(query.get("q") || "");
const reducers = Domain.App.createReducers(depsDataSet, { name: packageName, hostname, owner: repositoryOwner, repo, path }, searchParams);
const createReducer = <T, S>([state, dispatch]: [T, S]): { state: T; dispatch: S } => ({ state, dispatch });
const domainStores: Domain.App.Stores = {
app: createReducer(React.useReducer(...reducers.app({ history }))),
};
const store = generateStore(domainStores);
return <Page.Component {...generateProps(store)} />;
};
|
#!/bin/bash
docker build -t loraneo/docker-os:9.5a . |
<reponame>ckpt/backend-services<filename>locations/locations.go
package locations
import (
"errors"
"github.com/imdario/mergo"
"github.com/m4rw3r/uuid"
)
// We use dummy in memory storage for now
var storage LocationStorage = NewRedisLocationStorage()
type Coord struct {
Lat float64 `json:lat`
Long float64 `json:long`
}
type Location struct {
UUID uuid.UUID `json:"uuid"`
Host uuid.UUID `json:"host"`
Profile Profile `json:"profile"`
Pictures [][]byte `json:"pictures"`
Active bool `json:"active"`
}
type Profile struct {
URL string `json:"url"`
Coordinates Coord `json:"coordinates"`
Name string `json:"name"`
Description string `json:"description"`
Facilities []string `json:"facilities"`
}
// A storage interface for Locations
type LocationStorage interface {
Store(*Location) error
Delete(uuid.UUID) error
Load(uuid.UUID) (*Location, error)
LoadAll() ([]*Location, error)
LoadByPlayer(uuid.UUID) (*Location, error)
}
//
// Location related functions and methods
//
// Create a Location
func NewLocation(host uuid.UUID, lp Profile) (*Location, error) {
l := new(Location)
l.UUID, _ = uuid.V4()
l.Active = true
l.Host = host
if err := mergo.MergeWithOverwrite(&l.Profile, lp); err != nil {
return nil, errors.New(err.Error() + " - Could not set initial location profile")
}
if err := storage.Store(l); err != nil {
return nil, errors.New(err.Error() + " - Could not write location to storage")
}
return l, nil
}
func AllLocations() ([]*Location, error) {
return storage.LoadAll()
}
func DeleteByUUID(uuid uuid.UUID) bool {
err := storage.Delete(uuid)
if err != nil {
return false
}
return true
}
func LocationByUUID(uuid uuid.UUID) (*Location, error) {
return storage.Load(uuid)
}
func (l *Location) AddPicture(picture []byte) error {
l.Pictures = append(l.Pictures, picture)
err := storage.Store(l)
if err != nil {
return errors.New(err.Error() + " - Could not add picture to location")
}
return nil
}
func (l *Location) RemovePicture(picIndex int) error {
l.Pictures = append(l.Pictures[:picIndex], l.Pictures[picIndex+1:]...)
err := storage.Store(l)
if err != nil {
return errors.New(err.Error() + " - Could not delete picture at index " + string(picIndex))
}
return nil
}
func (l *Location) UpdateProfile(lp Profile) error {
if err := mergo.MergeWithOverwrite(&l.Profile, lp); err != nil {
return errors.New(err.Error() + " - Could not update location profile")
}
err := storage.Store(l)
if err != nil {
return errors.New(err.Error() + " - Could not store updated location profile")
}
return nil
}
|
<reponame>miyamotok0105/modeldb<gh_stars>1-10
package edu.mit.csail.db.ml.modeldb.client
import edu.mit.csail.db.ml.modeldb.client.event.MetricEvent
import org.apache.spark.ml.{Model, Transformer}
import org.apache.spark.mllib.evaluation.MulticlassMetrics
import org.apache.spark.sql.DataFrame
// TODO: We should see if this is still necessary. We may be able to remove it because proper MetricEvent
// logging happens with evaluateSync. Also, The MulticlassMetrics class is part of the old Spark RDD API, rather than
// the new Spark DataFrame API.
/**
* Represents metrics that can be computed and synced to the ModelDB.
*/
object SyncableMetrics {
/**
* Create a Spark MulticlassMetrics object from the given data and model
* and log MetricEvents to the ModelDB.
*
* @param model - The Model that is being evaluated.
* @param df - The DataFrame we are evaluating.
* @param labelCol - The column in the DataFrame above that contains the actual labels.
* @param predictionCol - The column that Model will produce when it transforms
* the DataFrame.
* @param mdbs - The ModelDB Syncer.
* @return The MulticlassMetrics object that is produced.
*/
def ComputeMulticlassMetrics(model: Transformer,
df: DataFrame,
labelCol: String,
predictionCol: String)
(implicit mdbs: Option[ModelDbSyncer]): MulticlassMetrics = {
// We need to convert this into an RDD because that's what MulticlassMetrics
// expects.
val rdd = df.select(df.col(predictionCol), df.col(labelCol)).rdd.map{ (row) =>
val (predicted: Double, actual: Double) =
(row(0).toString.toDouble, row(1).toString.toDouble)
(predicted, actual)
}
// Create the object.
val metrics = new MulticlassMetrics(rdd)
// We compute three metrics and log them to the ModelDB.
val metricMap = Map[String, Double](
"precision" -> metrics.precision,
"recall" -> metrics.recall,
"fMeasure" -> metrics.fMeasure
)
// Create a MetricEvent for each.
metricMap.foreach { case (name, value) =>
mdbs.get.buffer(new MetricEvent(df, model, labelCol, predictionCol, name, value.toFloat))
}
metrics
}
} |
#!/bin/bash
pushd .
cd ../../..
flowcpp lingo/pegcode/pegcompiler.flow -- file=lib/formats/uri2/uri2.lingo flowparser=lib/formats/uri2/uri2_parser flowparserast=lib/formats/uri2/uri2_ast.flow > out.txt
popd
|
#!/usr/bin/env bash
set -e
PYTHON_VERSIONS="${PYTHON_VERSIONS:3.8}"
if [ -n "${PYTHON_VERSIONS}" ]; then
for python_version in ${PYTHON_VERSIONS}; do
if output=$(poetry env use "${python_version}" 2>&1); then
if echo "${output}" | grep -q ^Creating; then
echo "> Environment for Python ${python_version} not created, skipping" >&2
poetry env remove "${python_version}" &>/dev/null || true
else
echo "> poetry run $@ (Python ${python_version})"
poetry run "$@"
fi
else
echo "> poetry env use ${python_version}: Python version not available?" >&2
fi
done
else
poetry run "$@"
fi
|
<gh_stars>1-10
/**
* Copyright (c) 2018-2019 mol* contributors, licensed under MIT, See LICENSE file for more info.
*
* @author <NAME> <<EMAIL>>
*/
import { Color, ColorListEntry } from './color';
import { getColorListFromName, ColorListName } from './lists';
import { defaults } from '../../mol-util';
import { NumberArray } from '../../mol-util/type-helpers';
import { ScaleLegend } from '../legend';
import { SortedArray } from '../../mol-data/int';
import { clamp } from '../../mol-math/interpolate';
export interface ColorScale {
/** Returns hex color for given value */
color: (value: number) => Color
/** Copies color to rgb int8 array */
colorToArray: (value: number, array: NumberArray, offset: number) => void
/** Copies normalized (0 to 1) hex color to rgb array */
normalizedColorToArray: (value: number, array: NumberArray, offset: number) => void
/** */
setDomain: (min: number, max: number) => void
/** Legend */
readonly legend: ScaleLegend
}
export const DefaultColorScaleProps = {
domain: [0, 1] as [number, number],
reverse: false,
listOrName: 'red-yellow-blue' as ColorListEntry[] | ColorListName,
minLabel: '' as string | undefined,
maxLabel: '' as string | undefined,
};
export type ColorScaleProps = Partial<typeof DefaultColorScaleProps>
export namespace ColorScale {
export function create(props: ColorScaleProps): ColorScale {
const { domain, reverse, listOrName } = { ...DefaultColorScaleProps, ...props };
const list = typeof listOrName === 'string' ? getColorListFromName(listOrName).list : listOrName;
const colors = reverse ? list.slice().reverse() : list;
const count1 = colors.length - 1;
let diff = 0, min = 0, max = 0;
function setDomain(_min: number, _max: number) {
min = _min;
max = _max;
diff = (max - min) || 1;
}
setDomain(domain[0], domain[1]);
const minLabel = defaults(props.minLabel, min.toString());
const maxLabel = defaults(props.maxLabel, max.toString());
let color: (v: number) => Color;
const hasOffsets = colors.every(c => Array.isArray(c));
if (hasOffsets) {
const sorted = [...colors] as [Color, number][];
sorted.sort((a, b) => a[1] - b[1]);
const src = sorted.map(c => c[0]);
const off = SortedArray.ofSortedArray(sorted.map(c => c[1]));
const max = src.length - 1;
color = (v: number) => {
let t = clamp((v - min) / diff, 0, 1);
const i = SortedArray.findPredecessorIndex(off, t);
if (i === 0) {
return src[min];
} else if (i > max) {
return src[max];
}
const o1 = off[i - 1], o2 = off[i];
const t1 = clamp((t - o1) / (o2 - o1), 0, 1); // TODO: cache the deltas?
return Color.interpolate(src[i - 1], src[i], t1);
};
} else {
color = (value: number) => {
const t = Math.min(colors.length - 1, Math.max(0, ((value - min) / diff) * count1));
const tf = Math.floor(t);
const c1 = colors[tf] as Color;
const c2 = colors[Math.ceil(t)] as Color;
return Color.interpolate(c1, c2, t - tf);
};
}
return {
color,
colorToArray: (value: number, array: NumberArray, offset: number) => {
Color.toArray(color(value), array, offset);
},
normalizedColorToArray: (value: number, array: NumberArray, offset: number) => {
Color.toArrayNormalized(color(value), array, offset);
},
setDomain,
get legend() { return ScaleLegend(minLabel, maxLabel, colors); }
};
}
}
|
package com.honyum.elevatorMan.hb;
import com.hanbang.netsdk.BaseNetControl;
import com.hanbang.netsdk.HBNetCtrl;
/**
* Created by Administrator on 2016/4/12.
*/
public class DeviceInfo {
/**
* 设备的用户名
*/
public String deviceUser = "";
/**
* 设备的密码
*/
public String devicePsw = "";
/**
* 设备序列号
*/
public String deviceSn = "";
/**
* 设备Id
*/
public String deviceId = "";
/**
* 设备名
*/
public String deviceName = "";
/**
* 设备局域网IP
*/
public String deviceLanIp = "";
/**
* 设备的局域网端口
*/
public int deviceLanPort;
/**
* 设备的威威Id
*/
public String vveyeId = "";
/**
* 设备的威威远程端口
*/
public int vveyeRemortPort = 0;
/**
* 设备域名
*/
public String deviceDomain = "";
/**
* 设备映射端口
*/
public int domainPort = 0;
/**
* 设备的通道数
*/
public int channelCount;
/**
* 设备的网络库对象
*/
public HBNetCtrl hbNetCtrl;
/**
* 设备在线状态
*/
public boolean isOnline = false;
/**
* 流媒体服务器IP
*/
public String smsIp = "";
/**
* 流媒体服务器的端口
*/
public int smsPort = 0;
/**
* 断线回调
*/
public BaseNetControl.NetDataCallback callback = new BaseNetControl.NetDataCallback() {
@Override
public void onNetData(DataType dataType, byte[] bytes, int i, int i1, long l) {
}
@Override
public void onDisconnected() {
//设备离线
if (isOnline)
{
isOnline = false;
//起线程注销设备
new Thread(new Runnable() {
@Override
public void run() {
hbNetCtrl.logout();
}
});
}
}
};
public String toString(){
return deviceSn;
}
}
|
<filename>website/samples/basic/controllers/home-controller.js
E5R.namespace('app/controllers', function (exports) {
"use strict";
var $ = E5R.$jq,
utils = E5R.require('utils'),
register = E5R.require('@registerController'),
BY_ID = E5R.require('@constants').VIEW_BY_ID;
function HomeController(el, options) {
var optionsJson = JSON.stringify(options, null, 2);
$('pre#options', el).text(optionsJson);
};
HomeController.prototype.showMessage = function () {
var self = $(this).controller();
self.$view(BY_ID, 'message').text(self.$options.message);
}
HomeController.prototype.setTitle = function () {
var self = $(this).controller();
utils.setPageTitle(self.$options.pageTitle || utils.getPageTitle);
}
exports['HomeController'] = register('home', HomeController);
});
|
#!/bin/bash
set -e
# fix for host.docker.internal not existing on linux https://github.com/docker/for-linux/issues/264
# see https://dev.to/bufferings/access-host-from-a-docker-container-4099
HOST_DOMAIN="host.docker.internal"
# check if the host exists
#see https://stackoverflow.com/a/24049165/413531
if dig ${HOST_DOMAIN} | grep -q 'NXDOMAIN'
then
# on linux, it will fail - so we'll "manually" add the hostname in the host file
HOST_IP=$(ip route | awk 'NR==1 {print $3}')
echo "$HOST_IP\t$HOST_DOMAIN" >> /etc/hosts
fi
exec "$@" |
package io.dronefleet.mavlink.common;
import io.dronefleet.mavlink.annotations.MavlinkEntryInfo;
import io.dronefleet.mavlink.annotations.MavlinkEnum;
/**
* Type of landing target
*/
@MavlinkEnum
public enum LandingTargetType {
/**
* Landing target signaled by light beacon (ex: IR-LOCK)
*/
@MavlinkEntryInfo(0)
LANDING_TARGET_TYPE_LIGHT_BEACON,
/**
* Landing target signaled by radio beacon (ex: ILS, NDB)
*/
@MavlinkEntryInfo(1)
LANDING_TARGET_TYPE_RADIO_BEACON,
/**
* Landing target represented by a fiducial marker (ex: ARTag)
*/
@MavlinkEntryInfo(2)
LANDING_TARGET_TYPE_VISION_FIDUCIAL,
/**
* Landing target represented by a pre-defined visual shape/feature (ex: X-marker, H-marker,
* square)
*/
@MavlinkEntryInfo(3)
LANDING_TARGET_TYPE_VISION_OTHER
}
|
class Truth {
constructor(public name: string) { }
}
class Falsehood {
constructor(public name: string) { }
}
export const True = new Truth("true");
export const False = new Falsehood("false"); |
"""
Create a classification algorithm that can differentiate between healthy and unhealthy heart sounds.
"""
import numpy as np
import pandas as pd
import librosa
# Load the dataset
data = ...
X, y = ...
# Extract audio features
features = []
for fname in X:
# Load an audio signal and extract audio features
signal, sr = librosa.load(fname)
mfcc = librosa.feature.mfcc(signal, sr=sr)
chroma = librosa.feature.chroma_stft(signal, sr=sr)
mel = librosa.feature.melspectrogram(signal, sr=sr)
contrast = librosa.feature.spectral_contrast(signal, sr=sr)
tonnetz = librosa.feature.tonnetz(signal, sr=sr)
features.append(np.hstack([mfcc.flatten(), chroma.flatten(), mel.flatten(), contrast.flatten(), tonnetz.flatten()]))
# Convert features to a numpy array
X = np.asarray(features)
# Split the data into train and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.20)
# Create a random forest classifier
clf = RandomForestClassifier(n_estimators=100, random_state=0)
# Fit the classier to the training data and make predictions on the test set
clf.fit(X_train, y_train)
y_pred = clf.predict(X_test)
# Calculate the accuracy of the model
accuracy = accuracy_score(y_test, y_pred)
print("Model accuracy: {:.2f}%".format(accuracy*100)) |
<filename>models.js
"use strict";
var eventTypes = {
remoteOffice: 1,
outOfOffice: 2
};
function EventType (eventTypeId) {
this._type = eventTypeId;
}
EventType.prototype.toString = function() {
if (this._type === eventTypes.remoteOffice) {
return "RO";
} else if (this._type === eventTypes.outOfOffice) {
return "OOO";
} else {
return undefined;
}
};
EventType.prototype.isRemoteOffice = function() {
return (this._type === eventTypes.remoteOffice);
};
EventType.prototype.isOutOfOffice = function() {
return (this._type === eventTypes.outOfOffice);
};
var eventTypesEnum = {
remoteOffice: new EventType(eventTypes.remoteOffice),
outOfOffice: new EventType(eventTypes.outOfOffice)
};
Object.freeze(eventTypesEnum.remoteOffice);
Object.freeze(eventTypesEnum.outOfOffice);
Object.freeze(eventTypesEnum);
module.exports = {
EventTypes: eventTypesEnum
}; |
def filter_array_elements(arr):
result = []
for string in arr:
if string[0] == 'A':
result.append(string)
return result
output = filter_array_elements(["Apple", "Grape", "Banana", "Aeroplane"])
print(output) |
package egoscale
import (
"encoding/json"
"io/ioutil"
"net/http"
"testing"
)
var testZone = "ch-gva-2"
func testUnmarshalJSONRequestBody(t *testing.T, req *http.Request, v interface{}) {
data, err := ioutil.ReadAll(req.Body)
if err != nil {
t.Fatalf("error reading request body: %s", err)
}
if err = json.Unmarshal(data, v); err != nil {
t.Fatalf("error while unmarshalling JSON body: %s", err)
}
}
|
<gh_stars>1-10
package com.hellosign.data;
import com.hellosign.utility.SQLHandler;
import org.testng.annotations.DataProvider;
import java.sql.ResultSet;
import java.util.ArrayList;
public class DataProviders {
@DataProvider(name = "Different combinations of documents and signatures")
public static Object[][] completeDocument() {
return new Object[][]{
{Data.signingOptionJustMe, Data.pathToPdfFile, Data.drawItIn},
// {Data.signingOptionJustMe, Data.pathToPdfFile, Data.savedSignatures},
// {Data.signingOptionJustMe, Data.pathToPdfFile, Data.typeItIn},
// {Data.signingOptionJustMe, Data.pathToPdfFile, Data.uploadImage},
// {Data.signingOptionJustMe, Data.pathToPdfFile, Data.useSmartphone},
// {Data.signingOptionMeAndOthers, Data.pathToPdfFile, ""},
// {Data.signingOptionJustOthers, Data.pathToPdfFile, ""},
//
// {Data.signingOptionJustMe, Data.pathToDocFile, Data.savedSignatures},
// {Data.signingOptionJustMe, Data.pathToDocFile, Data.drawItIn},
// {Data.signingOptionJustMe, Data.pathToDocFile, Data.typeItIn},
// {Data.signingOptionJustMe, Data.pathToDocFile, Data.uploadImage},
// {Data.signingOptionJustMe, Data.pathToDocFile, Data.useSmartphone},
// {Data.signingOptionMeAndOthers, Data.pathToDocFile, ""},
// {Data.signingOptionJustOthers, Data.pathToDocFile, ""},
//
// {Data.signingOptionJustMe, Data.pathToJpegFile, Data.savedSignatures},
// {Data.signingOptionJustMe, Data.pathToJpegFile, Data.drawItIn},
// {Data.signingOptionJustMe, Data.pathToJpegFile, Data.typeItIn},
// {Data.signingOptionJustMe, Data.pathToJpegFile, Data.uploadImage},
// {Data.signingOptionJustMe, Data.pathToJpegFile, Data.useSmartphone},
//
//
// {Data.signingOptionMeAndOthers, Data.pathToJpegFile, ""},
// {Data.signingOptionJustOthers, Data.pathToJpegFile, ""}
};
}
@DataProvider(name = "params")
public Object[][] getDataFromDataprovider() {
ArrayList<Object[]> out = new ArrayList<>();
ResultSet rs = null;
try {
SQLHandler.connect("org.postgresql.Driver", "jdbc:postgresql://ip/nameOfDB", "username", "password");
rs = SQLHandler.stmt.executeQuery(
"SELECT account.acct_id, acct_secnd_owner.first_name, acct_secnd_owner.last_name, acct_secnd_owner.street,acct_secnd_owner.city, acct_secnd_owner.state,acct_secnd_owner.zip, acct_secnd_owner.phone, account.status_info,acct_secnd_owner.email, account.open_date, account.expire_date FROM account INNER JOIN acct_secnd_owner ON acct_secnd_owner.acct_id = account.acct_id ");
while (rs.next()) {
if (rs.getString(1) != null) {
String id = rs.getString(1).trim();
out.add(new Object[]{id, rs.getString(2), rs.getString(3), rs.getString(4), rs.getString(5), rs.getString(6), rs.getString(7), rs.getString(8), rs.getString(9), rs.getString(10), rs.getString(11), rs.getString(12)});
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
SQLHandler.disconnect();
}
return out.toArray(new Object[out.size()][]);
}
@DataProvider(name = "NewRequirementsForPassword")
public static Object[][] newRequirementsForPassword() {
return new Object[][]{
{"Boston1!", true},
{"Boston2@", true},
{"#Boston3", true},
{"$Bos4ton", true},
{"Bos%5ton", true},
{"Bos6^ton", true},
{"New York&7", true},
{"Miami**12", true},
{"Miami*-13", true},
{"Miami*-(14)", true},
{"Miami*-(13)+", true},
{"Miami.,90", true},
{"FLORIDA", false},
{"***", false},
{"36363663", false},
{"nevermind", false},
{"Fa1234%", false}, // less than 8 characters
{"Nevermi@", false}, //no digits
{"Arizona1", false}, // no spec characters
{"GLORY12*", false}, // no lower case
{"lowerc#1", false}, // no upper case
{"Polock12", false}, // no spec charact
};
}
//
// @DataProvider(name = "OldRequirementsForPassword")
// public static Object[][] oldRequirementsForPassword() {
// return new Object[][]{
//
// {"FLORIDA12", true},
// {"***", false},
// {"36363663", false},
// {"Nevermind34", true},
// {"Fa12345", false}, // less than 8 characters
// {"Nevermi@", false}, //no digits
// {"Arizona1", true},
// {"GLORY12", false},
// {"lowerc#1", false}, // no upper case
// {"Polock12", true}, // no spec charact
// {"Boston1!", false},
// {"Boston2@", false},
// {"Boston2@", false},
// {"#Boston3", false},
// {"$Bos4ton", false},
// {"Bos%5ton", false},
// {"Bos6^ton", false},
// {"New York&7", false},
// {"Miami**12", false},
// {"Miami*-13", false},
// {"Miami*-(14)", false},
// {"Miami*-(13)+", false},
// {"Miami.,90", false},
// };
// }
}
|
import {Value} from '../values'
const CHARS = /([^!@#$%^&*(),\\/?";:{}|[\]+<>\s-])+/g
const CHARS_WITH_WILDCARD = /([^!@#$%^&(),\\/?";:{}|[\]+<>\s-])+/g
const EDGE_CHARS = /(\b\.+|\.+\b)/g
const MAX_TERM_LENGTH = 1024
export type Token = string
export type Pattern = (tokens: Token[]) => boolean
export function matchText(tokens: Token[], patterns: Pattern[]): boolean {
if (tokens.length === 0 || patterns.length === 0) {
return false
}
return patterns.every((pattern) => pattern(tokens))
}
export function matchTokenize(text: string): Token[] {
return text.replace(EDGE_CHARS, '').match(CHARS) || []
}
export function matchAnalyzePattern(text: string): Pattern[] {
const termsRe = matchPatternRegex(text)
return termsRe.map((re) => (tokens: Token[]) => tokens.some((token) => re.test(token)))
}
export function matchPatternRegex(text: string): RegExp[] {
const terms = text.replace(EDGE_CHARS, '').match(CHARS_WITH_WILDCARD) || []
return terms.map(
(term) => new RegExp(`^${term.slice(0, MAX_TERM_LENGTH).replace(/\*/g, '.*')}$`, 'i')
)
}
export async function gatherText(value: Value, cb: (str: string) => void): Promise<boolean> {
if (value.type === 'string') {
cb(value.data)
return true
}
if (value.isArray()) {
let success = true
for await (const part of value) {
if (part.type === 'string') {
cb(part.data)
} else {
success = false
}
}
return success
}
return false
}
|
<filename>spring-boot-servlet/src/main/java/com/example/servlet/package-info.java
package com.example.servlet; |
<filename>src/useCases/GetUrl/GetUrlUseCase.ts
import { IUrlRepository } from '@/repositories/IUrlRepository'
import { IGetUrlResponseDTO, IGetUrlRequestDTO } from './IGetUrlDTO'
export class GetUrlUseCase {
constructor (
private urlRepository: IUrlRepository
) {}
async execute (data:IGetUrlRequestDTO):Promise<IGetUrlResponseDTO> {
const originalUrl = await this.urlRepository.findUrl(data.smallUrl)
if (!originalUrl) throw new Error('URL not found')
const now = new Date().toUTCString()
if (originalUrl.expire < now) throw new Error('URL expired')
return { originalUrl: originalUrl.url }
}
}
|
<reponame>smagill/opensphere-desktop<filename>open-sphere-base/control-panels/src/test/java/io/opensphere/controlpanels/animation/view/TimeInstantSpanWrapperTest.java
package io.opensphere.controlpanels.animation.view;
import static org.junit.Assert.assertEquals;
import org.junit.Assert;
import org.junit.Test;
import io.opensphere.core.model.time.TimeInstant;
import io.opensphere.core.model.time.TimeSpan;
import io.opensphere.core.units.duration.Days;
import io.opensphere.core.util.ObservableValue;
import io.opensphere.core.util.StrongObservableValue;
/** Tests for {@link TimeInstantSpanWrapper}. */
public class TimeInstantSpanWrapperTest
{
/** Test for {@link TimeInstantSpanWrapper}. */
@Test
public void testIt()
{
long start = 1487314800000L; // 2017/02/17
long end = start + 86400_000L;
ObservableValue<TimeSpan> span = new StrongObservableValue<>();
span.set(TimeSpan.get(start, Days.ONE));
TimeInstantSpanWrapper startWrapper = new TimeInstantSpanWrapper(span, true);
assertEquals(start, startWrapper.get().getEpochMillis());
TimeInstantSpanWrapper endWrapper = new TimeInstantSpanWrapper(span, false);
assertEquals(end, endWrapper.get().getEpochMillis());
startWrapper.set(TimeInstant.get(start + 100));
assertEquals(start + 100, span.get().getStart());
assertEquals(end, span.get().getEnd());
endWrapper.set(TimeInstant.get(end + 100));
assertEquals(start + 100, span.get().getStart());
assertEquals(end + 100, span.get().getEnd());
// End < Start
try
{
endWrapper.set(TimeInstant.get(start));
Assert.fail("Should have thrown exception");
}
catch (IllegalArgumentException e)
{
// eat exception
Assert.assertNotNull(e);
}
assertEquals(start + 100, span.get().getStart());
assertEquals(end + 100, span.get().getEnd());
}
}
|
<gh_stars>1-10
/***********************************************************************
Write a function called `valuesInObject(obj)` that takes in an object and returns
an array of all the values within that Object.
Do this once using using a `for...in` loop and once using `Object.values`.
Examples:
let animals = {dog: "Wolfie", cat: "Jet", bison: "Bilbo"}
let foods = {apple: "tart", lemon: "sour", mango: "sweet"}
valuesInObject(animals); // => ["Wolfie", "Jet", "Bilbo"]
valuesInObject(foods); // => ["tart", "sour", "sweet"]
***********************************************************************/
function valuesInObject( obj ) {
return Object.values( obj );
}
// solution 2
// function valuesInObject(obj) {
// let array = [];
// for (key in obj) {
// let value = obj[key];
// array.push(value);
// }
// return array;
// }
module.exports = valuesInObject;
|
<gh_stars>1-10
/* GENERATED FILE */
import { html, svg, define } from "hybrids";
const PhHourglass = {
color: "currentColor",
size: "1em",
weight: "regular",
mirrored: false,
render: ({ color, size, weight, mirrored }) => html`
<svg
xmlns="http://www.w3.org/2000/svg"
width="${size}"
height="${size}"
fill="${color}"
viewBox="0 0 256 256"
transform=${mirrored ? "scale(-1, 1)" : null}
>
${weight === "bold" &&
svg`<path d="M128,128,67.2,82.4A8,8,0,0,1,64,76V40a8,8,0,0,1,8-8H184a8,8,0,0,1,8,8V75.6412a8,8,0,0,1-3.17594,6.38188L128,128h0" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>
<path d="M128,128,67.2,173.6A8,8,0,0,0,64,180v36a8,8,0,0,0,8,8H184a8,8,0,0,0,8-8V180.3588a8,8,0,0,0-3.17594-6.38188L128,128h0" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>`}
${weight === "duotone" &&
svg`<path d="M128,128,67.2,82.4A8,8,0,0,1,64,76V40a8,8,0,0,1,8-8H184a8,8,0,0,1,8,8V75.6412a8,8,0,0,1-3.17594,6.38188L128,128h0" opacity="0.2"/>
<path d="M128,128,67.2,173.6A8,8,0,0,0,64,180v36a8,8,0,0,0,8,8H184a8,8,0,0,0,8-8V180.3588a8,8,0,0,0-3.17594-6.38188L128,128h0" opacity="0.2"/>
<path d="M128,128,67.2,82.4A8,8,0,0,1,64,76V40a8,8,0,0,1,8-8H184a8,8,0,0,1,8,8V75.6412a8,8,0,0,1-3.17594,6.38188L128,128h0" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<path d="M128,128,67.2,173.6A8,8,0,0,0,64,180v36a8,8,0,0,0,8,8H184a8,8,0,0,0,8-8V180.3588a8,8,0,0,0-3.17594-6.38188L128,128h0" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`}
${weight === "fill" &&
svg`<path d="M200,75.64392V40.0033a16.02084,16.02084,0,0,0-16-16H72a16.02084,16.02084,0,0,0-16,16v36a16.08093,16.08093,0,0,0,6.40625,12.79687l52.26563,39.20313L62.39844,167.20642A16.07348,16.07348,0,0,0,56,180.0033v36a16.02085,16.02085,0,0,0,16,16H184a16.02085,16.02085,0,0,0,16-16V180.36267a16.033,16.033,0,0,0-6.35937-12.76562l-52.377-39.59375,52.377-39.59375A16.05549,16.05549,0,0,0,200,75.64392Z"/>`}
${weight === "light" &&
svg`<path d="M128,128,67.2,82.4A8,8,0,0,1,64,76V40a8,8,0,0,1,8-8H184a8,8,0,0,1,8,8V75.6412a8,8,0,0,1-3.17594,6.38188L128,128h0" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>
<path d="M128,128,67.2,173.6A8,8,0,0,0,64,180v36a8,8,0,0,0,8,8H184a8,8,0,0,0,8-8V180.3588a8,8,0,0,0-3.17594-6.38188L128,128h0" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>`}
${weight === "thin" &&
svg`<path d="M128,128,67.2,82.4A8,8,0,0,1,64,76V40a8,8,0,0,1,8-8H184a8,8,0,0,1,8,8V75.6412a8,8,0,0,1-3.17594,6.38188L128,128h0" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>
<path d="M128,128,67.2,173.6A8,8,0,0,0,64,180v36a8,8,0,0,0,8,8H184a8,8,0,0,0,8-8V180.3588a8,8,0,0,0-3.17594-6.38188L128,128h0" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>`}
${weight === "regular" &&
svg`<path d="M128,128,67.2,82.4A8,8,0,0,1,64,76V40a8,8,0,0,1,8-8H184a8,8,0,0,1,8,8V75.6412a8,8,0,0,1-3.17594,6.38188L128,128h0" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>
<path d="M128,128,67.2,173.6A8,8,0,0,0,64,180v36a8,8,0,0,0,8,8H184a8,8,0,0,0,8-8V180.3588a8,8,0,0,0-3.17594-6.38188L128,128h0" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`}
</svg>
`,
};
define("ph-hourglass", PhHourglass);
export default PhHourglass;
|
#!/usr/bin/env bash
sed -i 's/^static maxpos_t maxPos = 0;/__managed__ static maxpos_t maxPos = 0;/g' $@
|
<filename>src/main/java/net/fabrictest/block/ModBlocks.java
package net.fabrictest.block;
import net.fabricmc.fabric.api.item.v1.FabricItemSettings;
import net.fabricmc.fabric.api.object.builder.v1.block.FabricBlockSettings;
import net.fabricmc.fabric.api.tool.attribute.v1.FabricToolTags;
import net.fabrictest.fabrictest;
import net.minecraft.block.Block;
import net.minecraft.block.Material;
import net.minecraft.item.BlockItem;
import net.minecraft.item.Item;
import net.minecraft.item.ItemGroup;
import net.minecraft.util.Identifier;
import net.minecraft.util.registry.Registry;
public class ModBlocks {
public static final Block PRUEBITA_ORE = registerBlock("pruebita_ore",
new Block(FabricBlockSettings.of(Material.STONE)
.strength(1.5f)
.breakByHand(false)
//.breakByTool(FabricToolTags.PICKAXES, 2)
.requiresTool()),
ItemGroup.BUILDING_BLOCKS);
//Registry.register(Registry.BLOCK, new Identifier("fabrictest", "pruebita_ore"), PRUEBITA_ORE);
//Registry.register(Registry.ITEM, new Identifier("fabrictest", "pruebita_ore"), new BlockItem(PRUEBITA_ORE, new Item.Settings().group(ItemGroup.MATERIALS)));
private static Block registerBlock(String name, Block block, ItemGroup group){
registerBlockItem(name, block, group);
return Registry.register(Registry.BLOCK, new Identifier(fabrictest.MOD_ID, name), block);
}
private static Item registerBlockItem(String name, Block block, ItemGroup group){
return Registry.register(
Registry.ITEM,
new Identifier(fabrictest.MOD_ID, name),
new BlockItem(block, new FabricItemSettings().group(group)));
}
public static void register() {
// TODO Auto-generated method stub
//Registry.register(Registry.ITEM, new Identifier("fabrictest", "pruebita_ingot"), PRUEBITA_INGOT);
System.out.println("Registering blocks.");
}
}
|
/**Add more elements here, along with a valid value for key
* Elements keys must have the same name as their ID
**/
export const defaultKeys = {
"New Circuit": "Shift + N",
"Save Online": "Ctrl + S",
"Save Offline": "Ctrl + Alt + S",
"Download as Image": "Ctrl + D",
"Open Offline": "Ctrl + O",
"Insert Sub-circuit": "Shift + C",
"Combinational Analysis": "Shift + A",
// "Start Plot": "Ctrl + P",
"Direction Up": "Up",
"Direction Down": "Down",
"Direction Left": "Left",
"Direction Right": "Right",
"Insert Label": "Ctrl + L",
"Label Direction Up": "Alt + Up",
"Label Direction Down": "Alt + Down",
"Label Direction Left": "Alt + Left",
"Label Direction Right": "Alt + Right",
"Move Element Up": "Shift + Up",
"Move Element Down": "Shift + Down",
"Move Element Left": "Shift + Left",
"Move Element Right": "Shift + Right",
"Hotkey Preference": "F8",
"Open Documentation": "F1",
};
|
require File.dirname(__FILE__) + '/spec_helper.rb'
describe Punch do
it 'should load data' do
Punch.should.respond_to(:load)
end
describe 'when loading data' do
before do
@data = <<-EOD
---
rip:
- out: 2008-05-19T18:34:39.00-05:00
log:
- punch in @ 2008-05-19T17:09:05-05:00
- punch out @ 2008-05-19T18:34:39-05:00
total: "01:25:34"
in: 2008-05-19T17:09:05.00-05:00
- out: 2008-05-19T21:04:03.00-05:00
total: "00:50:22"
log:
- punch in @ 2008-05-19T20:13:41-05:00
- punch out @ 2008-05-19T21:04:03-05:00
in: 2008-05-19T20:13:41.00-05:00
ps:
- out: 2008-05-19T12:18:52.00-05:00
log:
- punch in @ 2008-05-19T11:23:35-05:00
- punch out @ 2008-05-19T12:18:52-05:00
total: "00:55:17"
in: 2008-05-19T11:23:35.00-05:00
EOD
File.stub!(:read).and_return(@data)
Punch.instance_eval do
class << self
public :data, :data=
end
end
Punch.reset
end
it 'should read the ~/.punch.yml file' do
File.should.receive(:read).with(File.expand_path('~/.punch.yml')).and_return(@data)
Punch.load
end
describe 'when the file is found' do
it 'should load the data as yaml' do
Punch.load
Punch.data.should == YAML.load(@data)
end
it 'should return true' do
Punch.load.should == true
end
describe 'and is empty' do
before do
File.stub!(:read).and_return('')
end
it 'should set the data to an empty hash' do
Punch.load
Punch.data.should == {}
end
it 'should return true' do
Punch.load.should == true
end
end
end
describe 'when no file is found' do
before do
File.stub!(:read).and_raise(Errno::ENOENT)
end
it 'should set the data to an empty hash' do
Punch.load
Punch.data.should == {}
end
it 'should return true' do
Punch.load.should == true
end
end
describe 'and returning data' do
it 'should return the data if set' do
val = { 'rip' => [] }
Punch.data = val
Punch.data.should == val
end
it 'should load the data if not set' do
Punch.data = nil
Punch.data.should == YAML.load(@data)
end
end
end
it 'should reset itself' do
Punch.should.respond_to(:reset)
end
describe 'when resetting itself' do
before do
Punch.instance_eval do
class << self
public :data=
end
end
end
it 'should set its data to nil' do
Punch.data = { 'proj' => 'lots of stuff here' }
Punch.reset
Punch.instance_variable_get('@data').should.be.nil
end
end
it 'should write data' do
Punch.should.respond_to(:write)
end
describe 'when writing data' do
before do
@file = mock('file')
File.stub!(:open).and_yield(@file)
@data = { 'proj' => 'data goes here' }
Punch.instance_eval do
class << self
public :data=
end
end
Punch.data = @data
end
it 'should open the data file for writing' do
File.should.receive(:open).with(File.expand_path('~/.punch.yml'), 'w')
Punch.write
end
it 'should write the data to the file in YAML form' do
@file.should.receive(:puts).with(@data.to_yaml)
Punch.write
end
end
it 'should give project status' do
Punch.should.respond_to(:status)
end
describe "giving a project's status" do
before do
@now = Time.now
@projects = { 'out' => 'test-o', 'in' => 'testshank' }
@data = {
@projects['out'] => [ { 'in' => @now, 'out' => @now + 12 } ],
@projects['in'] => [ { 'in' => @now } ]
}
Punch.instance_eval do
class << self
public :data=
end
end
Punch.data = @data
end
it 'should accept a project name' do
lambda { Punch.status('proj') }.should.not.raise(ArgumentError)
end
it 'should not require a project name' do
lambda { Punch.status }.should.not.raise(ArgumentError)
end
it "should return 'out' if the project is currently punched out" do
Punch.status(@projects['out']).should == 'out'
end
it "should return 'in' if the project is currently punched in" do
Punch.status(@projects['in']).should == 'in'
end
it 'should return nil if the project does not exist' do
Punch.status('other project').should.be.nil
end
it 'should return nil if the project has no time data' do
project = 'empty project'
@data[project] = []
Punch.data = @data
Punch.status(project).should.be.nil
end
it 'should use the last time entry for the status' do
@data[@projects['out']].unshift *[{ 'in' => @now - 100 }, { 'in' => @now - 90, 'out' => @now - 50 }]
@data[@projects['in']].unshift *[{ 'in' => @now - 100, 'out' => @now - 90 }, { 'in' => @now - 50 }]
Punch.data = @data
Punch.status(@projects['out']).should == 'out'
Punch.status(@projects['in']).should == 'in'
end
it 'should return the status of all projects if no project name given' do
Punch.status.should == { @projects['out'] => 'out', @projects['in'] => 'in' }
end
it 'should accept options' do
lambda { Punch.status('proj', :full => true) }.should.not.raise(ArgumentError)
end
describe 'when given a :full option' do
it 'should return the status and the time of that status if the project is currently punched in' do
Punch.status(@projects['in'], :full => true).should == { :status => 'in', :time => @now }
end
it 'should return the status and the time of that status if the project is currently punched out' do
Punch.status(@projects['out'], :full => true).should == { :status => 'out', :time => @now + 12 }
end
it 'should return nil if project does not exist' do
Punch.status('other project', :full => true).should.be.nil
end
it 'should return the full status of all projects if nil is given as the project' do
Punch.status(nil, :full => true).should == {
@projects['out'] => { :status => 'out', :time => @now + 12 },
@projects['in'] => { :status => 'in', :time => @now }
}
end
it 'should return the full status of all projects if no project given' do
Punch.status(:full => true).should == {
@projects['out'] => { :status => 'out', :time => @now + 12 },
@projects['in'] => { :status => 'in', :time => @now }
}
end
it 'should include a message for a punched-in project with log messages' do
message = '<PASSWORD>'
@data[@projects['in']].last['log'] = [message]
Punch.status(@projects['in'], :full => true).should == { :status => 'in', :time => @now, :message => message }
end
it 'should use the last log message for punched-in projects' do
message = 'some <PASSWORD>'
@data[@projects['in']].last['log'] = ['some other message', message]
Punch.status(@projects['in'], :full => true).should == { :status => 'in', :time => @now, :message => message }
end
it 'should not include a message for a punched-out project with log messages' do
@data[@projects['out']].last['log'] = ['some message']
Punch.status(@projects['out'], :full => true).should == { :status => 'out', :time => @now + 12 }
end
end
describe 'when given a :short option' do
it "should return 'in' if the project is currently punched in" do
Punch.status(@projects['in'], :short => true).should == 'in'
end
it "should return 'out' if the project is currently punched out" do
Punch.status(@projects['out'], :short => true).should == 'out'
end
it 'should return nil if project does not exist' do
Punch.status('other project', :short => true).should.be.nil
end
describe 'handling multiple projects' do
before do
@projects['in2'] = 'bingbang'
@projects['out2'] = 'boopadope'
@data[@projects['in2']] = [ { 'in' => @now - 5 } ]
@data[@projects['out2']] = [ { 'in' => @now - 500, 'out' => @now - 20 } ]
Punch.data = @data
end
it 'should return just the punched-in projects if nil is given as the project' do
Punch.status(nil, :short => true).should == {
@projects['in'] => 'in',
@projects['in2'] => 'in'
}
end
it 'should return just the punched-in projects if no project given' do
Punch.status(:short => true).should == {
@projects['in'] => 'in',
@projects['in2'] => 'in'
}
end
it 'should not include empty projects' do
@data['empty_project'] = []
Punch.data = @data
Punch.status(:short => true).should == {
@projects['in'] => 'in',
@projects['in2'] => 'in'
}
end
it "should return 'out' if all projects are punched out" do
@data.delete(@projects['in'])
@data.delete(@projects['in2'])
Punch.data = @data
Punch.status(:short => true).should == 'out'
end
it "should return 'out' if all projects are punched out or empty" do
@data.delete(@projects['in'])
@data.delete(@projects['in2'])
@data['empty_project'] = []
Punch.data = @data
Punch.status(:short => true).should == 'out'
end
end
end
describe 'when given both :short and :full options' do
it 'should return the full status of a punched-in project' do
Punch.status(@projects['in'], :short => true, :full => true).should == { :status => 'in', :time => @now }
end
it 'should return the full status of a punched-out project' do
Punch.status(@projects['out'], :short => true, :full => true).should == { :status => 'out', :time => @now + 12 }
end
it 'should return nil if project does not exist' do
Punch.status('other project', :short => true, :full => true).should.be.nil
end
describe 'handling multiple projects' do
before do
@projects['in2'] = 'bingbang'
@projects['out2'] = 'boopadope'
@data[@projects['in2']] = [ { 'in' => @now - 5 } ]
@data[@projects['out2']] = [ { 'in' => @now - 500, 'out' => @now - 20 } ]
Punch.data = @data
end
it 'should return the full status of just the punched-in projects if nil is given as the project' do
Punch.status(nil, :short => true, :full => true).should == {
@projects['in'] => { :status => 'in', :time => @now },
@projects['in2'] => { :status => 'in', :time => @now - 5 }
}
end
it 'should return the full status of just the punched-in projects if no project given' do
Punch.status(:short => true, :full => true).should == {
@projects['in'] => { :status => 'in', :time => @now },
@projects['in2'] => { :status => 'in', :time => @now - 5 }
}
end
it 'should not include empty projects' do
@data['empty_project'] = []
Punch.data = @data
Punch.status(:short => true, :full => true).should == {
@projects['in'] => { :status => 'in', :time => @now },
@projects['in2'] => { :status => 'in', :time => @now - 5 }
}
end
it "should return 'out' if all projects are punched out" do
@data.delete(@projects['in'])
@data.delete(@projects['in2'])
Punch.data = @data
Punch.status(:short => true, :full => true).should == 'out'
end
it "should return 'out' if all projects are punched out or empty" do
@data.delete(@projects['in'])
@data.delete(@projects['in2'])
@data['empty_project'] = []
Punch.data = @data
Punch.status(:short => true, :full => true).should == 'out'
end
end
end
describe 'handling a sub-project' do
before do
@projects['parent'] = 'daddy'
@projects['child'] = @projects['parent'] + '/sugar'
end
it "should return 'in' for a non-existent parent project if the sub-project is punched in" do
@data[@projects['child']] = [ { 'in' => @now } ]
Punch.data = @data
Punch.status(@projects['parent']).should == 'in'
end
it "should return 'in' for an empty parent project if the sub-project is punched in" do
@data[@projects['parent']] = []
@data[@projects['child']] = [ { 'in' => @now } ]
Punch.data = @data
Punch.status(@projects['parent']).should == 'in'
end
it "should return 'in' for a punched-out parent project if the sub-project is punched in" do
@data[@projects['parent']] = [ { 'in' => @now - 13, 'out' => @now - 5 } ]
@data[@projects['child']] = [ { 'in' => @now } ]
Punch.data = @data
Punch.status(@projects['parent']).should == 'in'
end
it "should use the sub-project's punch-in time for the parent project when returning full status" do
@data[@projects['child']] = [ { 'in' => @now } ]
Punch.data = @data
Punch.status(@projects['parent'], :full => true).should == { :status => 'in', :time => @now }
@data[@projects['parent']] = []
Punch.data = @data
Punch.status(@projects['parent'], :full => true).should == { :status => 'in', :time => @now }
@data[@projects['parent']] = [ { 'in' => @now - 13, 'out' => @now - 5 } ]
Punch.data = @data
Punch.status(@projects['parent'], :full => true).should == { :status => 'in', :time => @now }
end
it "should return nil for a non-existent parent project if the sub-project does not exist" do
Punch.status(@projects['parent']).should.be.nil
end
it "should return nil for an empty parent project if the sub-project does not exist" do
@data[@projects['parent']] = []
Punch.data = @data
Punch.status(@projects['parent']).should.be.nil
end
it "should return nil for a non-existent parent project if the sub-project is empty" do
@data[@projects['child']] = []
Punch.data = @data
Punch.status(@projects['parent']).should.be.nil
end
it "should return nil for an empty parent project if the sub-project is empty" do
@data[@projects['parent']] = []
@data[@projects['child']] = []
Punch.data = @data
Punch.status(@projects['parent']).should.be.nil
end
it "should return nil for the parent project when returning full status" do
Punch.status(@projects['parent'], :full => true).should.be.nil
@data[@projects['parent']] = []
Punch.data = @data
Punch.status(@projects['parent'], :full => true).should.be.nil
@data.delete(@projects['parent'])
@data[@projects['child']] = []
Punch.data = @data
Punch.status(@projects['parent'], :full => true).should.be.nil
@data[@projects['parent']] = []
@data[@projects['child']] = []
Punch.data = @data
Punch.status(@projects['parent'], :full => true).should.be.nil
end
it "should return 'out' for a punched-out parent project if the sub-project does not exist" do
@data[@projects['parent']] = [ { 'in' => @now - 13, 'out' => @now - 5 } ]
Punch.data = @data
Punch.status(@projects['parent']).should == 'out'
end
it "should return 'out' for a punched-out parent project if the sub-project is empty" do
@data[@projects['parent']] = [ { 'in' => @now - 13, 'out' => @now - 5 } ]
@data[@projects['child']] = []
Punch.data = @data
Punch.status(@projects['parent']).should == 'out'
end
it "should use the parent project's punch-out time for the parent project when returning full status" do
@data[@projects['parent']] = [ { 'in' => @now - 13, 'out' => @now - 5 } ]
Punch.data = @data
Punch.status(@projects['parent'], :full => true).should == { :status => 'out', :time => @now - 5 }
@data[@projects['child']] = []
Punch.data = @data
Punch.status(@projects['parent'], :full => true).should == { :status => 'out', :time => @now - 5 }
@data[@projects['child']] = [ { 'in' => @now - 4, 'out' => @now - 1 } ]
Punch.data = @data
Punch.status(@projects['parent'], :full => true).should == { :status => 'out', :time => @now - 5 }
end
it 'should only see projects having the specific parent/child naming as sub-projects' do
@data[@projects['parent']] = []
non_child = @projects['parent'] + '_other'
@data[non_child] = [ { 'in' => @now - 45 } ]
Punch.data = @data
Punch.status(@projects['parent']).should.be.nil
end
end
end
it 'should indicate whether a project is punched out' do
Punch.should.respond_to(:out?)
end
describe 'indicating whether a project is punched out' do
before do
@project = 'testola'
end
it 'should accept a project name' do
lambda { Punch.out?('proj') }.should.not.raise(ArgumentError)
end
it 'should require a project name' do
lambda { Punch.out? }.should.raise(ArgumentError)
end
it "should get the project's status" do
Punch.should.receive(:status).with(@project)
Punch.out?(@project)
end
it "should return true if the project's status is 'out'" do
Punch.stub!(:status).and_return('out')
Punch.out?(@project).should == true
end
it "should return false if the project's status is 'in'" do
Punch.stub!(:status).and_return('in')
Punch.out?(@project).should == false
end
it "should return true if the project's status is nil" do
Punch.stub!(:status).and_return(nil)
Punch.out?(@project).should == true
end
end
it 'should indicate whether a project is punched in' do
Punch.should.respond_to(:in?)
end
describe 'indicating whether a project is punched in' do
before do
@project = 'testola'
end
it 'should accept a project name' do
lambda { Punch.in?('proj') }.should.not.raise(ArgumentError)
end
it 'should require a project name' do
lambda { Punch.in? }.should.raise(ArgumentError)
end
it "should get the project's status" do
Punch.should.receive(:status).with(@project)
Punch.in?(@project)
end
it "should return false if the project's status is 'out'" do
Punch.stub!(:status).and_return('out')
Punch.in?(@project).should == false
end
it "should return true if the project's status is 'in'" do
Punch.stub!(:status).and_return('in')
Punch.in?(@project).should == true
end
it "should return false if the project's status is nil" do
Punch.stub!(:status).and_return(nil)
Punch.in?(@project).should == false
end
end
it 'should punch a project in' do
Punch.should.respond_to(:in)
end
describe 'punching a project in' do
before do
@now = Time.now
Time.stub!(:now).and_return(@now)
@project = 'test project'
@data = { @project => [ {'in' => @now - 50, 'out' => @now - 25} ] }
Punch.instance_eval do
class << self
public :data, :data=
end
end
Punch.data = @data
end
it 'should accept a project name' do
lambda { Punch.in('proj') }.should.not.raise(ArgumentError)
end
it 'should require a project name' do
lambda { Punch.in }.should.raise(ArgumentError)
end
it 'should accept options' do
lambda { Punch.in('proj', :time => Time.now) }.should.not.raise(ArgumentError)
end
describe 'when the project is already punched in' do
before do
@data = { @project => [ {'in' => @now - 50, 'out' => @now - 25}, {'in' => @now - 5} ] }
Punch.data = @data
end
it 'should not change the project data' do
old_data = @data.dup
Punch.in(@project)
Punch.data.should == old_data
end
it 'should return false' do
Punch.in(@project).should == false
end
end
describe 'when the project is not already punched in' do
it 'should add a time entry to the project data' do
Punch.in(@project)
Punch.data[@project].length.should == 2
end
it 'should use now for the punch-in time' do
Punch.in(@project)
Punch.data[@project].last['in'].should == @now
end
it 'should log a message about punch-in time' do
Punch.should.receive(:log).with(@project, 'punch in', :time => @now)
Punch.in(@project)
end
it 'should use a different time if given' do
time = @now + 50
Punch.in(@project, :time => time)
Punch.data[@project].last['in'].should == time
end
it 'should log a message using the given time' do
time = @now + 75
Punch.should.receive(:log).with(@project, 'punch in', :time => time)
Punch.in(@project, :time => time)
end
it 'should log an additional message if given' do
Punch.stub!(:log) # for the time-based message
message = 'working on some stuff'
Punch.should.receive(:log).with(@project, message, :time => @now)
Punch.in(@project, :message => message)
end
it 'should log the additional message with the given time' do
Punch.stub!(:log) # for the time-based message
time = @now + 75
message = 'working on some stuff'
Punch.should.receive(:log).with(@project, message, :time => time)
Punch.in(@project, :message => message, :time => time)
end
it 'should allow the different time to be specified using :at' do
time = @now + 50
Punch.in(@project, :at => time)
Punch.data[@project].last['in'].should == time
end
it 'should return true' do
Punch.in(@project).should == true
end
end
describe 'when the project does not yet exist' do
before do
@project = 'non-existent project'
end
it 'should create the project' do
Punch.in(@project)
Punch.data.should.include(@project)
end
it 'should add a time entry to the project data' do
Punch.in(@project)
Punch.data[@project].length.should == 1
end
it 'should use now for the punch-in time' do
Punch.in(@project)
Punch.data[@project].last['in'].should == @now
end
it 'should log a message about punch-in time' do
Punch.should.receive(:log).with(@project, 'punch in', :time => @now)
Punch.in(@project)
end
it 'should use a different time if given' do
time = @now + 50
Punch.in(@project, :time => time)
Punch.data[@project].last['in'].should == time
end
it 'should log a message using the given time' do
time = @now + 75
Punch.should.receive(:log).with(@project, 'punch in', :time => time)
Punch.in(@project, :time => time)
end
it 'should log an additional message if given' do
Punch.stub!(:log) # for the time-based message
message = 'working on some stuff'
Punch.should.receive(:log).with(@project, message, :time => @now)
Punch.in(@project, :message => message)
end
it 'should log the additional message with the given time' do
Punch.stub!(:log) # for the time-based message
time = @now + 75
message = 'working on some stuff'
Punch.should.receive(:log).with(@project, message, :time => time)
Punch.in(@project, :message => message, :time => time)
end
it 'should allow the different time to be specified using :at' do
time = @now + 50
Punch.in(@project, :at => time)
Punch.data[@project].last['in'].should == time
end
it 'should return true' do
Punch.in(@project).should == true
end
end
end
it 'should punch a project out' do
Punch.should.respond_to(:out)
end
describe 'punching a project out' do
before do
@now = Time.now
Time.stub!(:now).and_return(@now)
@project = 'test project'
@data = { @project => [ {'in' => @now - 50, 'out' => @now - 25} ] }
Punch.instance_eval do
class << self
public :data, :data=
end
end
Punch.data = @data
end
it 'should accept a project name' do
lambda { Punch.out('proj') }.should.not.raise(ArgumentError)
end
it 'should not require a project name' do
lambda { Punch.out }.should.not.raise(ArgumentError)
end
it 'should accept a project name and options' do
lambda { Punch.out('proj', :time => Time.now) }.should.not.raise(ArgumentError)
end
it 'should accept options without a project name' do
lambda { Punch.out(:time => Time.now) }.should.not.raise(ArgumentError)
end
describe 'when the project is already punched out' do
it 'should not change the project data' do
old_data = @data.dup
Punch.out(@project)
Punch.data.should == old_data
end
it 'should return false' do
Punch.out(@project).should == false
end
end
describe 'when the project is not already punched out' do
before do
@data = { @project => [ {'in' => @now - 50} ] }
Punch.data = @data
end
it 'should not add a time entry to the project data' do
Punch.out(@project)
Punch.data[@project].length.should == 1
end
it 'should use now for the punch-out time' do
Punch.out(@project)
Punch.data[@project].last['out'].should == @now
end
it 'should log a message about punch-out time' do
Punch.should.receive(:log).with(@project, 'punch out', :time => @now)
Punch.out(@project)
end
it 'should use a different time if given' do
time = @now + 50
Punch.out(@project, :time => time)
Punch.data[@project].last['out'].should == time
end
it 'should log a message using the given time' do
time = @now + 75
Punch.should.receive(:log).with(@project, 'punch out', :time => time)
Punch.out(@project, :time => time)
end
it 'should log an additional message if given' do
Punch.stub!(:log) # for the time-based message
message = 'finished working on some stuff'
Punch.should.receive(:log).with(@project, message, :time => @now)
Punch.out(@project, :message => message)
end
it 'should log the additional message with the given time' do
Punch.stub!(:log) # for the time-based message
time = @now + 75
message = 'working on some stuff'
Punch.should.receive(:log).with(@project, message, :time => time)
Punch.out(@project, :message => message, :time => time)
end
it 'should allow the different time to be specified using :at' do
time = @now + 50
Punch.out(@project, :at => time)
Punch.data[@project].last['out'].should == time
end
it 'should return true' do
Punch.out(@project).should == true
end
end
describe 'when no project is given' do
before do
@projects = ['test project', 'out project', 'other project']
@data = {
@projects[0] => [ {'in' => @now - 50, 'out' => @now - 25} ],
@projects[1] => [ {'in' => @now - 300, 'out' => @now - 250}, {'in' => @now - 40} ],
@projects[2] => [ {'in' => @now - 50} ],
}
Punch.data = @data
end
it 'should punch out all projects that are currently punched in' do
Punch.out
Punch.data[@projects[0]].last['out'].should == @now - 25
Punch.data[@projects[1]].last['out'].should == @now
Punch.data[@projects[2]].last['out'].should == @now
end
it 'should log punch-out messages for all projects being punched out' do
time = @now.strftime('%Y-%m-%dT%H:%M:%S%z')
Punch.should.receive(:log).with(@projects[1], 'punch out', :time => @now)
Punch.should.receive(:log).with(@projects[2], 'punch out', :time => @now)
Punch.out
end
it 'should use a different time if given' do
time = @now + 50
Punch.out(:time => time)
Punch.data[@projects[0]].last['out'].should == @now - 25
Punch.data[@projects[1]].last['out'].should == time
Punch.data[@projects[2]].last['out'].should == time
end
it 'should log messages using the given time' do
time = @now + 75
Punch.should.receive(:log).with(@projects[1], 'punch out', :time => time)
Punch.should.receive(:log).with(@projects[2], 'punch out', :time => time)
Punch.out(:time => time)
end
it 'should log an additional message if given' do
Punch.stub!(:log) # for the time-based messages
message = 'finished working on some stuff'
Punch.should.receive(:log).with(@projects[1], message, :time => @now)
Punch.should.receive(:log).with(@projects[2], message, :time => @now)
Punch.out(:message => message)
end
it 'should allow the different time to be specified using :at' do
time = @now + 50
Punch.out(:at => time)
Punch.data[@projects[0]].last['out'].should == @now - 25
Punch.data[@projects[1]].last['out'].should == time
Punch.data[@projects[2]].last['out'].should == time
end
it 'should return true' do
Punch.out.should == true
end
describe 'when all projects were already punched out' do
before do
@projects = ['test project', 'out project', 'other project']
@data = {
@projects[0] => [ {'in' => @now - 50, 'out' => @now - 25} ],
@projects[1] => [ {'in' => @now - 300, 'out' => @now - 250}, {'in' => @now - 40, 'out' => @now - 20} ],
@projects[2] => [ {'in' => @now - 50, 'out' => @now - 35} ],
}
Punch.data = @data
end
it 'should not change the data' do
old_data = @data.dup
Punch.out
Punch.data.should == old_data
end
it 'should return false' do
Punch.out.should == false
end
end
end
describe 'handling a sub-project' do
before do
@projects = {}
@projects['parent'] = 'daddy'
@projects['child'] = @projects['parent'] + '/sugar'
end
it 'should actually punch out the sub-project when told to punch out the parent project' do
@data[@projects['parent']] = [ { 'in' => @now - 100, 'out' => @now - 50 } ]
@data[@projects['child']] = [ { 'in' => @now - 20 } ]
Punch.data = @data
Punch.out(@projects['parent'])
Punch.data[@projects['child']].last['out'].should == @now
end
it 'should not change the punch-out time for the parent project' do
@data[@projects['parent']] = [ { 'in' => @now - 100, 'out' => @now - 50 } ]
@data[@projects['child']] = [ { 'in' => @now - 20 } ]
Punch.data = @data
Punch.out(@projects['parent'])
Punch.data[@projects['parent']].last['out'].should == @now - 50
end
it 'should not add data for a non-existent parent project' do
@data[@projects['child']] = [ { 'in' => @now - 20 } ]
Punch.data = @data
Punch.out(@projects['parent'])
Punch.data[@projects['parent']].should.be.nil
end
it 'should not add data for an empty parent project' do
@data[@projects['parent']] = []
@data[@projects['child']] = [ { 'in' => @now - 20 } ]
Punch.data = @data
Punch.out(@projects['parent'])
Punch.data[@projects['parent']].should == []
end
it 'should only see projects having the specific parent/child naming as sub-projects' do
@data[@projects['parent']] = [ { 'in' => @now - 20 } ]
non_child = @projects['parent'] + '_other'
@data[non_child] = [ { 'in' => @now - 45 } ]
Punch.data = @data
Punch.out(@projects['parent'])
Punch.data[non_child].should == [ { 'in' => @now - 45 } ]
end
end
end
it 'should delete a project' do
Punch.should.respond_to(:delete)
end
describe 'deleting a project' do
before do
@now = Time.now
@project = 'test project'
@data = { @project => [ {'in' => @now - 50, 'out' => @now - 25} ] }
Punch.instance_eval do
class << self
public :data, :data=
end
end
Punch.data = @data
end
it 'should accept a project name' do
lambda { Punch.delete('proj') }.should.not.raise(ArgumentError)
end
it 'should require a project name' do
lambda { Punch.delete }.should.raise(ArgumentError)
end
describe 'when the project exists' do
it 'should remove the project data' do
Punch.delete(@project)
Punch.data.should.not.include(@project)
end
it 'should return true' do
Punch.delete(@project).should == true
end
end
describe 'when the project does not exist' do
before do
@project = 'non-existent project'
end
it 'should return nil' do
Punch.delete(@project).should.be.nil
end
end
end
it 'should list project data' do
Punch.should.respond_to(:list)
end
describe 'listing project data' do
before do
@now = Time.now
@project = 'test project'
@data = { @project => [ {'in' => @now - 5000, 'out' => @now - 2500}, {'in' => @now - 2000, 'out' => @now - 1000}, {'in' => @now - 500, 'out' => @now - 100} ] }
Punch.instance_eval do
class << self
public :data, :data=
end
end
Punch.data = @data
end
it 'should accept a project name' do
lambda { Punch.list('proj') }.should.not.raise(ArgumentError)
end
it 'should not require a project name' do
lambda { Punch.list }.should.not.raise(ArgumentError)
end
it 'should allow options' do
lambda { Punch.list('proj', :after => Time.now) }.should.not.raise(ArgumentError)
end
describe 'when the project exists' do
it 'should return the project data' do
Punch.list(@project).should == Punch.data[@project]
end
it 'should restrict returned data to times only after a certain time' do
Punch.list(@project, :after => @now - 501).should == Punch.data[@project].last(1)
end
it 'should restrict returned data to times only before a certain time' do
Punch.list(@project, :before => @now - 2499).should == Punch.data[@project].first(1)
end
it 'should restrict returned data to times only within a time range' do
Punch.list(@project, :after => @now - 2001, :before => @now - 999).should == Punch.data[@project][1, 1]
end
describe 'handling date options' do
before do
# yay ugly setup!
@date = Date.today - 4
morning = Time.local(@date.year, @date.month, @date.day, 0, 0, 1)
night = Time.local(@date.year, @date.month, @date.day, 23, 59, 59)
earlier_date = @date - 2
earlier_time = Time.local(earlier_date.year, earlier_date.month, earlier_date.day, 13, 43)
early_date = @date - 1
early_time = Time.local(early_date.year, early_date.month, early_date.day, 11, 25)
later_date = @date + 1
later_time = Time.local(later_date.year, later_date.month, later_date.day, 3, 12)
@data = { @project => [
{'in' => earlier_time - 50, 'out' => earlier_time + 100},
{'in' => early_time - 60, 'out' => early_time + 70},
{'in' => morning, 'out' => morning + 200},
{'in' => night - 500, 'out' => night},
{'in' => later_time - 30, 'out' => later_time + 70}
]
}
Punch.data = @data
end
it 'should accept an :on shortcut option to restrict returned data to times only on a certain day' do
Punch.list(@project, :on => @date).should == Punch.data[@project][2, 2]
end
it 'should allow the :on option to override the before/after options' do
Punch.list(@project, :on => @date, :before => @now - 2525, :after => @now - 7575).should == Punch.data[@project][2, 2]
end
it 'should allow the :after option to be a date instead of time' do
Punch.list(@project, :after => @date).should == Punch.data[@project][2..-1]
end
it 'should allow the :before option to be a date instead of time' do
Punch.list(@project, :before => @date).should == Punch.data[@project][0, 2]
end
end
describe 'and is punched in' do
before do
@data[@project].push({ 'in' => @now - 25 })
Punch.data = @data
end
it 'should restrict returned data to times only after a certain time' do
Punch.list(@project, :after => @now - 501).should == Punch.data[@project].last(2)
end
it 'should restrict returned data to times only before a certain time' do
Punch.list(@project, :before => @now - 2499).should == Punch.data[@project].first(1)
end
it 'should restrict returned data to times only within a time range' do
Punch.list(@project, :after => @now - 2001, :before => @now - 999).should == Punch.data[@project][1, 1]
end
end
end
describe 'when the project does not exist' do
before do
@project = 'non-existent project'
end
it 'should return nil' do
Punch.list(@project).should.be.nil
end
it 'should return nil if options given' do
Punch.list(@project, :after => @now - 500).should.be.nil
end
end
describe 'when no project is given' do
before do
@projects = ['test project', 'out project', 'other project']
@data = {
@projects[0] => [ {'in' => @now - 50, 'out' => @now - 25} ],
@projects[1] => [ {'in' => @now - 300, 'out' => @now - 250}, {'in' => @now - 40, 'out' => @now - 20} ],
@projects[2] => [ {'in' => @now - 50, 'out' => @now - 35} ],
}
Punch.data = @data
end
it 'should return data for all projects' do
Punch.list.should == @data
end
it 'should respect options' do
Punch.list(:after => @now - 51).should == { @projects[0] => @data[@projects[0]], @projects[1] => @data[@projects[1]].last(1), @projects[2] => @data[@projects[2]]}
end
it 'should not change the stored data when options are given' do
old_data = @data.dup
Punch.list(:after => @now - 51)
Punch.data.should == old_data
end
end
describe 'handling a sub-project' do
before do
@projects = {}
@projects['parent'] = 'daddy'
@projects['child'] = @projects['parent'] + '/sugar'
@data[@projects['parent']] = [ { 'in' => @now - 100, 'out' => @now - 50 } ]
@data[@projects['child']] = [ { 'in' => @now - 20 } ]
Punch.data = @data
end
it 'should return data for the parent and sub-project' do
list_data = { @projects['parent'] => @data[@projects['parent']], @projects['child'] => @data[@projects['child']] }
Punch.list(@projects['parent']).should == list_data
end
it 'should only see projects having the specific parent/child naming as sub-projects' do
non_child = @projects['parent'] + '_other'
@data[non_child] = [ { 'in' => @now - 45 } ]
Punch.data = @data
list_data = { @projects['parent'] => @data[@projects['parent']], @projects['child'] => @data[@projects['child']] }
Punch.list(@projects['parent']).should == list_data
end
it 'should respect options' do
list_data = { @projects['parent'] => [], @projects['child'] => @data[@projects['child']] }
Punch.list(@projects['parent'], :after => @now - 21).should == list_data
end
describe 'when no project is given' do
before do
@projects = ['test project', 'out project', 'other project']
@data[@projects[0]] = [ {'in' => @now - 50, 'out' => @now - 25} ]
@data[@projects[1]] = [ {'in' => @now - 300, 'out' => @now - 250}, {'in' => @now - 40, 'out' => @now - 20} ]
@data[@projects[2]] = [ {'in' => @now - 50, 'out' => @now - 35} ]
Punch.data = @data
end
it 'should return data for all projects' do
Punch.list.should == @data
end
end
end
end
it 'should get the total time for a project' do
Punch.should.respond_to(:total)
end
describe 'getting total time for a project' do
before do
@now = Time.now
Time.stub!(:now).and_return(@now)
@project = 'test project'
@data = { @project => [ {'in' => @now - 5000, 'out' => @now - 2500}, {'in' => @now - 2000, 'out' => @now - 1000}, {'in' => @now - 500, 'out' => @now - 100} ] }
Punch.instance_eval do
class << self
public :data, :data=
end
end
Punch.data = @data
end
it 'should accept a project name' do
lambda { Punch.total('proj') }.should.not.raise(ArgumentError)
end
it 'should not require a project name' do
lambda { Punch.total }.should.not.raise(ArgumentError)
end
it 'should allow options' do
lambda { Punch.total('proj', :after => Time.now) }.should.not.raise(ArgumentError)
end
describe 'when the project exists' do
it 'should return the amount of time spent on the project (in seconds)' do
Punch.total(@project).should == 3900
end
it 'should restrict returned amount to times only after a certain time' do
Punch.total(@project, :after => @now - 501).should == 400
end
it 'should restrict returned amount to times only before a certain time' do
Punch.total(@project, :before => @now - 2499).should == 2500
end
it 'should restrict returned amount to times only within a time range' do
Punch.total(@project, :after => @now - 2001, :before => @now - 999).should == 1000
end
it 'should format the time spent if passed a format option' do
Punch.total(@project, :format => true).should == "1:05:00"
end
describe 'handling date options' do
before do
# yay ugly setup!
@date = Date.today - 4
morning = Time.local(@date.year, @date.month, @date.day, 0, 0, 1)
night = Time.local(@date.year, @date.month, @date.day, 23, 59, 59)
earlier_date = @date - 2
earlier_time = Time.local(earlier_date.year, earlier_date.month, earlier_date.day, 13, 43)
early_date = @date - 1
early_time = Time.local(early_date.year, early_date.month, early_date.day, 11, 25)
later_date = @date + 1
later_time = Time.local(later_date.year, later_date.month, later_date.day, 3, 12)
@data = { @project => [
{'in' => earlier_time - 50, 'out' => earlier_time + 100},
{'in' => early_time - 60, 'out' => early_time + 70},
{'in' => morning, 'out' => morning + 200},
{'in' => night - 500, 'out' => night},
{'in' => later_time - 30, 'out' => later_time + 70}
]
}
Punch.data = @data
end
it 'should accept an :on shortcut option to restrict returned amount to times only on a certain day' do
Punch.total(@project, :on => @date).should == 700
end
it 'should allow the :on option to override the before/after options' do
Punch.total(@project, :on => @date, :before => @now - 2525, :after => @now - 7575).should == 700
end
it 'should allow the :after option to be a date instead of time' do
Punch.total(@project, :after => @date).should == 800
end
it 'should allow the :before option to be a date instead of time' do
Punch.total(@project, :before => @date).should == 280
end
end
describe 'and is punched in' do
before do
@data[@project].push({ 'in' => @now - 25 })
Punch.data = @data
end
it 'give the time spent until now' do
Punch.total(@project).should == 3925
end
it 'should restrict returned amount to times only after a certain time' do
Punch.total(@project, :after => @now - 501).should == 425
end
it 'should restrict returned amount to times only before a certain time' do
Punch.total(@project, :before => @now - 2499).should == 2500
end
it 'should restrict returned amount to times only within a time range' do
Punch.total(@project, :after => @now - 2001, :before => @now - 999).should == 1000
end
end
end
describe 'when the project does not exist' do
before do
@project = 'non-existent project'
end
it 'should return nil' do
Punch.total(@project).should.be.nil
end
end
describe 'when no project is given' do
before do
@projects = ['test project', 'out project', 'other project']
@data = {
@projects[0] => [ {'in' => @now - 50, 'out' => @now - 25} ],
@projects[1] => [ {'in' => @now - 300, 'out' => @now - 250}, {'in' => @now - 40, 'out' => @now - 20} ],
@projects[2] => [ {'in' => @now - 50, 'out' => @now - 35} ],
}
Punch.data = @data
end
it 'should give totals for all projects' do
Punch.total.should == { @projects[0] => 25, @projects[1] => 70, @projects[2] => 15 }
end
it 'should respect options' do
Punch.total(:after => @now - 51).should == { @projects[0] => 25, @projects[1] => 20, @projects[2] => 15 }
end
it 'should format the time spent if passed a format option' do
Punch.total(:format => true).should == { @projects[0] => "00:25", @projects[1] => "01:10", @projects[2] => "00:15" }
end
end
describe 'handling a sub-project' do
before do
@projects = {}
@projects['parent'] = 'daddy'
@projects['child'] = @projects['parent'] + '/sugar'
@data[@projects['parent']] = [ { 'in' => @now - 100, 'out' => @now - 50 } ]
@data[@projects['child']] = [ { 'in' => @now - 20, 'out' => @now - 10 } ]
Punch.data = @data
end
it 'should return data for the parent and sub-project' do
total_data = { @projects['parent'] => 50, @projects['child'] => 10 }
Punch.total(@projects['parent']).should == total_data
end
it 'should respect options' do
total_data = { @projects['parent'] => 0, @projects['child'] => 10 }
Punch.total(@projects['parent'], :after => @now - 21).should == total_data
end
it 'should handle a non-existent parent project' do
@data.delete(@projects['parent'])
Punch.data = @data
total_data = { @projects['parent'] => nil, @projects['child'] => 10 }
Punch.total(@projects['parent']).should == total_data
end
it 'should handle an empty parent project' do
@data[@projects['parent']] = []
Punch.data = @data
total_data = { @projects['parent'] => 0, @projects['child'] => 10 }
Punch.total(@projects['parent']).should == total_data
end
it 'should handle an empty child project' do
@projects['other_child'] = @projects['parent'] + '/button'
@data[@projects['other_child']] = []
Punch.data = @data
total_data = { @projects['parent'] => 50, @projects['child'] => 10, @projects['other_child'] => 0 }
Punch.total(@projects['parent']).should == total_data
end
it 'should only see projects having the specific parent/child naming as sub-projects' do
non_child = @projects['parent'] + '_other'
@data[non_child] = [ { 'in' => @now - 45, 'out' => @now - 20 } ]
Punch.data = @data
total_data = { @projects['parent'] => 50, @projects['child'] => 10 }
Punch.total(@projects['parent']).should == total_data
end
describe 'when no project is given' do
before do
@extra_projects = ['test project', 'out project', 'other project']
@data[@extra_projects[0]] = [ {'in' => @now - 50, 'out' => @now - 25} ]
@data[@extra_projects[1]] = [ {'in' => @now - 300, 'out' => @now - 250}, {'in' => @now - 40, 'out' => @now - 20} ]
@data[@extra_projects[2]] = [ {'in' => @now - 50, 'out' => @now - 35} ]
Punch.data = @data
end
it 'should give totals for all projects' do
total_data = { @extra_projects[0] => 25, @extra_projects[1] => 70, @extra_projects[2] => 15, @projects['parent'] => 50, @projects['child'] => 10 }
Punch.total.should == total_data
end
end
end
end
it 'should log information about a project' do
Punch.should.respond_to(:log)
end
describe 'logging information about a project' do
before do
@now = Time.now
Time.stub!(:now).and_return(@now)
@project = 'test project'
@data = { @project => [ {'in' => @now - 50, 'log' => ['some earlier message']} ] }
Punch.instance_eval do
class << self
public :data, :data=
end
end
Punch.data = @data
@message = 'some log message'
end
it 'should accept a project and message' do
lambda { Punch.log('proj', 'some mess') }.should.not.raise(ArgumentError)
end
it 'should require a message' do
lambda { Punch.log('proj') }.should.raise(ArgumentError)
end
it 'should require a project' do
lambda { Punch.log }.should.raise(ArgumentError)
end
it 'should accept options' do
lambda { Punch.log('proj', 'some mess', :time => Time.now) }.should.not.raise(ArgumentError)
end
it 'should require a project and message even when options are given' do
lambda { Punch.log('proj', :time => Time.now) }.should.raise(ArgumentError)
end
it 'should check if the project is punched in' do
Punch.should.receive(:in?).with(@project)
Punch.log(@project, @message)
end
describe 'when the project is punched in' do
it 'should add a log message to the last time entry for the project' do
Punch.log(@project, @message)
Punch.data[@project].last['log'].length.should == 2
end
it 'should use the given message for the log' do
Punch.log(@project, @message)
Punch.data[@project].last['log'].last.should.match(Regexp.new(Regexp.escape(@message)))
end
it 'should add the formatted time to the message' do
time = @now.strftime('%Y-%m-%dT%H:%M:%S%z')
Punch.log(@project, @message)
Punch.data[@project].last['log'].last.should.match(Regexp.new(Regexp.escape(time)))
end
it 'should format the message as "#{message} @ #{time}"' do
time = @now.strftime('%Y-%m-%dT%H:%M:%S%z')
Punch.log(@project, @message)
Punch.data[@project].last['log'].last.should == "#{@message} @ #{time}"
end
it 'should use a different time if given' do
time = @now + 50
time_str = time.strftime('%Y-%m-%dT%H:%M:%S%z')
Punch.log(@project, @message, :time => time)
Punch.data[@project].last['log'].last.should == "#{@message} @ #{time_str}"
end
it 'should allow the different time to be specified using :at' do
time = @now + 50
time_str = time.strftime('%Y-%m-%dT%H:%M:%S%z')
Punch.log(@project, @message, :at => time)
Punch.data[@project].last['log'].last.should == "#{@message} @ #{time_str}"
end
it 'should return true' do
Punch.log(@project, @message).should == true
end
describe 'and has no log' do
before do
@data = { @project => [ {'in' => @now - 50} ] }
Punch.data = @data
end
it 'should create the log' do
time = @now.strftime('%Y-%m-%dT%H:%M:%S%z')
Punch.log(@project, @message)
Punch.data[@project].last['log'].should == ["#{@message} @ #{time}"]
end
end
end
describe 'when the project is not punched in' do
before do
@data = { @project => [ {'in' => @now - 50, 'out' => @now - 25, 'log' => ['some earlier message']} ] }
Punch.data = @data
end
it 'should not change the project data' do
old_data = @data.dup
Punch.log(@project, @message)
Punch.data.should == old_data
end
it 'should return false' do
Punch.log(@project, @message).should == false
end
end
end
it 'should provide a summary of project time use' do
Punch.should.respond_to(:summary)
end
describe 'providing a summary of project time use' do
before do
@message = 'test usage'
@now = Time.now
Time.stub!(:now).and_return(@now)
@project = 'test project'
@data = { @project => [ {'in' => @now - 500, 'out' => @now - 100} ] }
@time_data = @data[@project].last
@time_format = '%Y-%m-%dT%H:%M:%S%z'
@time_data['log'] = ["punch in @ #{@time_data['in'].strftime(@time_format)}", "#{@message} @ #{@time_data['in'].strftime(@time_format)}", "punch out @ #{@time_data['out'].strftime(@time_format)}"]
Punch.instance_eval do
class << self
public :data, :data=
end
end
Punch.data = @data
end
it 'should accept a project name' do
lambda { Punch.summary('proj') }.should.not.raise(ArgumentError)
end
it 'should require a project name' do
lambda { Punch.summary }.should.raise(ArgumentError)
end
describe 'when the project exists' do
it 'should use the log message to indicate time usage' do
Punch.summary(@project).should == { @message => 400 }
end
it 'should break down a punched-in time based on log message times' do
other_message = 'some other message'
@time_data['log'][-1,0] = "#{other_message} @ #{(@time_data['out'] - 100).strftime(@time_format)}"
Punch.summary(@project).should == { @message => 300, other_message => 100 }
end
it 'should leave out any messages with empty times' do
other_message = 'some other message'
@time_data['log'][-1,0] = "#{other_message} @ #{(@time_data['out'] - 100).strftime(@time_format)}"
@time_data['log'][-1,0] = "some third message @ #{(@time_data['out']).strftime(@time_format)}"
Punch.summary(@project).should == { @message => 300, other_message => 100 }
end
it 'should record unspecified time use' do
@time_data['log'][1] = "#{@message} @ #{(@time_data['in'] + 100).strftime(@time_format)}"
Punch.summary(@project).should == { @message => 300, 'unspecified' => 100 }
end
it 'should record the block of time as unspecified if there are no log messages' do
@time_data['log'] = []
Punch.summary(@project).should == { 'unspecified' => 400 }
end
it 'should record the block of time as unspecified if there is no log' do
@time_data.delete('log')
Punch.summary(@project).should == { 'unspecified' => 400 }
end
it 'should summarize and combine all time data for the project' do
other_message = 'some other message'
@data = { @project => [
{'in' => @now - 650, 'out' => @now - 600, 'log' => ["punch in @ #{(@now - 650).strftime(@time_format)}", "#{@message} @ #{(@now - 650).strftime(@time_format)}", "punch out @ #{(@now - 600).strftime(@time_format)}"]},
{'in' => @now - 400, 'out' => @now - 350, 'log' => ["punch in @ #{(@now - 400).strftime(@time_format)}", "punch out @ #{(@now - 350).strftime(@time_format)}"]},
{'in' => @now - 300, 'out' => @now - 150, 'log' => ["punch in @ #{(@now - 300).strftime(@time_format)}", "#{@message} @ #{(@now - 200).strftime(@time_format)}", "punch out @ #{(@now - 150).strftime(@time_format)}"]},
{'in' => @now - 100, 'out' => @now + 250, 'log' => ["punch in @ #{(@now - 100).strftime(@time_format)}", "#{other_message} @ #{(@now - 50).strftime(@time_format)}", "punch out @ #{(@now + 250).strftime(@time_format)}"]}
] }
Punch.data = @data
Punch.summary(@project).should == { 'unspecified' => 200, @message => 100, other_message => 300 }
end
it 'should allow options' do
lambda { Punch.summary('proj', :after => Time.now) }.should.not.raise(ArgumentError)
end
describe 'handling options' do
before do
@other_message = 'some other message'
@data = { @project => [
{'in' => @now - 650, 'out' => @now - 600, 'log' => ["punch in @ #{(@now - 650).strftime(@time_format)}", "#{@message} @ #{(@now - 650).strftime(@time_format)}", "punch out @ #{(@now - 600).strftime(@time_format)}"]},
{'in' => @now - 400, 'out' => @now - 350, 'log' => ["punch in @ #{(@now - 400).strftime(@time_format)}", "punch out @ #{(@now - 350).strftime(@time_format)}"]},
{'in' => @now - 300, 'out' => @now - 150, 'log' => ["punch in @ #{(@now - 300).strftime(@time_format)}", "#{@message} @ #{(@now - 200).strftime(@time_format)}", "punch out @ #{(@now - 150).strftime(@time_format)}"]},
{'in' => @now - 100, 'out' => @now + 250, 'log' => ["punch in @ #{(@now - 100).strftime(@time_format)}", "#{@other_message} @ #{(@now - 50).strftime(@time_format)}", "punch out @ #{(@now + 250).strftime(@time_format)}"]}
] }
Punch.data = @data
end
it 'should restrict the summary to times only after a certain time' do
Punch.summary(@project, :after => @now - 401).should == { 'unspecified' => 200, @message => 50, @other_message => 300 }
end
it 'should restrict the summary to times only before a certain time' do
Punch.summary(@project, :before => @now - 149).should == { 'unspecified' => 150, @message => 100 }
end
it 'should restrict the summary to times only within a time range' do
Punch.summary(@project, :after => @now - 401, :before => @now - 149).should == { 'unspecified' => 150, @message => 50 }
end
it 'should format the time spent if passed a format option' do
Punch.summary(@project, :format => true).should == { 'unspecified' => '03:20', @message => '01:40', @other_message => '05:00' }
end
describe 'handling date options' do
before do
# yay ugly setup!
@date = Date.today - 4
morning = Time.local(@date.year, @date.month, @date.day, 0, 0, 1)
night = Time.local(@date.year, @date.month, @date.day, 23, 59, 59)
earlier_date = @date - 2
earlier_time = Time.local(earlier_date.year, earlier_date.month, earlier_date.day, 13, 43)
early_date = @date - 1
early_time = Time.local(early_date.year, early_date.month, early_date.day, 11, 25)
later_date = @date + 1
later_time = Time.local(later_date.year, later_date.month, later_date.day, 3, 12)
@data = { @project => [
{'in' => earlier_time - 50, 'out' => earlier_time + 100},
{'in' => early_time - 60, 'out' => early_time + 70},
{'in' => morning, 'out' => morning + 200},
{'in' => night - 500, 'out' => night},
{'in' => later_time - 30, 'out' => later_time + 70}
]
}
Punch.data = @data
end
it 'should accept an :on shortcut option to restrict the summary to times only on a certain day' do
Punch.summary(@project, :on => @date).should == { 'unspecified' => 700 }
end
it 'should allow the :on option to override the before/after options' do
Punch.summary(@project, :on => @date, :before => @now - 2525, :after => @now - 7575).should == { 'unspecified' => 700 }
end
it 'should allow the :after option to be a date instead of time' do
Punch.summary(@project, :after => @date).should == { 'unspecified' => 800 }
end
it 'should allow the :before option to be a date instead of time' do
Punch.summary(@project, :before => @date).should == { 'unspecified' => 280 }
end
end
end
describe 'when the project is currently punched in' do
before do
@data = { @project => [ {'in' => @now - 500} ] }
@time_data = @data[@project].last
@time_data['log'] = ["punch in @ #{@time_data['in'].strftime(@time_format)}", "#{@message} @ #{(@time_data['in'] + 200).strftime(@time_format)}"]
Punch.data = @data
end
it 'should summarize time up to now' do
Punch.summary(@project).should == { 'unspecified' => 200, @message => 300 }
end
it 'should handle time data with no specific log messages' do
@time_data['log'].pop
Punch.summary(@project).should == { 'unspecified' => 500 }
end
it 'should handle time data with no log messages' do
@time_data['log'] = []
Punch.summary(@project).should == { 'unspecified' => 500 }
end
it 'should handle time data with no log' do
@time_data.delete('log')
Punch.summary(@project).should == { 'unspecified' => 500 }
end
end
end
describe 'when the project does not exist' do
before do
@project = 'non-existent project'
end
it 'should return nil' do
Punch.summary(@project).should.be.nil
end
end
end
it 'should age a project' do
Punch.should.respond_to(:age)
end
describe 'aging a project' do
before do
@now = Time.now
Time.stub!(:now).and_return(@now)
@project = 'pro-ject'
@data = { @project => [ {'in' => @now - 3000} ] }
Punch.instance_eval do
class << self
public :data, :data=
end
end
Punch.data = @data
end
it 'should accept a project name' do
lambda { Punch.age('proj') }.should.not.raise(ArgumentError)
end
it 'should require a project name' do
lambda { Punch.age }.should.raise(ArgumentError)
end
it 'should accept options' do
lambda { Punch.age('proj', :before => @now - 5000) }.should.not.raise(ArgumentError)
end
describe 'when the project exists' do
it 'should move the project to #{project}_old/1' do
Punch.age(@project)
Punch.data.should == { "#{@project}_old/1" => [ {'in' => @now - 3000} ] }
end
it 'should simply increment the number of a project name in the x_old/1 format' do
@data = { 'some_old/1' => [ {'in' => @now - 3900} ] }
Punch.data = @data
Punch.age('some_old/1')
Punch.data.should == { "some_old/2" => [ {'in' => @now - 3900} ] }
end
it 'should cascade aging a project to older versions' do
@data["#{@project}_old/1"] = [ {'in' => @now - 50000, 'out' => @now - 40000} ]
@data["#{@project}_old/2"] = [ {'in' => @now - 90000, 'out' => @now - 85000} ]
Punch.data = @data
Punch.age(@project)
Punch.data.should == { "#{@project}_old/1" => [ {'in' => @now - 3000} ], "#{@project}_old/2" => [ {'in' => @now - 50000, 'out' => @now - 40000} ], "#{@project}_old/3" => [ {'in' => @now - 90000, 'out' => @now - 85000} ] }
end
it 'should return true' do
Punch.age(@project).should == true
end
describe 'when options given' do
before do
@data = { @project => [ {'in' => @now - 5000, 'out' => @now - 4000}, {'in' => @now - 3500, 'out' => @now - 3000}, {'in' => @now - 1500, 'out' => @now - 900}, {'in' => @now - 500, 'out' => @now - 400} ] }
Punch.data = @data
end
it 'should only move the appropriate data to the old-version project' do
expected = { @project => [ {'in' => @now - 1500, 'out' => @now - 900}, {'in' => @now - 500, 'out' => @now - 400} ],
"#{@project}_old/1" => [ {'in' => @now - 5000, 'out' => @now - 4000}, {'in' => @now - 3500, 'out' => @now - 3000} ]
}
Punch.age(@project, :before => @now - 1500)
Punch.data.should == expected
end
it 'should not create an old project if no data would be moved' do
expected = { @project => [ {'in' => @now - 5000, 'out' => @now - 4000}, {'in' => @now - 3500, 'out' => @now - 3000}, {'in' => @now - 1500, 'out' => @now - 900}, {'in' => @now - 500, 'out' => @now - 400} ] }
Punch.age(@project, :before => @now - 50000)
Punch.data.should == expected
end
it 'should remove the project if all data would be moved' do
expected = { "#{@project}_old/1" => [ {'in' => @now - 5000, 'out' => @now - 4000}, {'in' => @now - 3500, 'out' => @now - 3000}, {'in' => @now - 1500, 'out' => @now - 900}, {'in' => @now - 500, 'out' => @now - 400} ] }
Punch.age(@project, :before => @now - 10)
Punch.data.should == expected
end
it 'should not accept an after option' do
lambda { Punch.age(@project, :after => @now - 500) }.should.raise
end
end
end
describe 'when the project does not exist' do
before do
@project = 'no dice'
end
it 'should return nil' do
Punch.age(@project).should.be.nil
end
end
end
end
|
#!/bin/bash
cat <<EOT >> ~/.zshrc
alias setproxy='export ALL_PROXY=socks5://127.0.0.1:1080'
alias unsetproxy='unset ALL_PROXY'
alias flushdns='dscacheutil -flushcache'
alias art='php artisan'
EOT
exit
|
<filename>tests/unit/components/timezone-column-test.js
import { moduleForComponent, test } from 'ember-qunit';
import Ember from 'ember';
const { run } = Ember;
moduleForComponent('timezone-column', 'Unit | Component | Timezone Column', {
unit: true
});
test('groups users by timezone offset', function(assert) {
assert.expect(4);
let component = this.subject();
let halifax = Ember.Object.create({ name: 'Halifax', tzOffset: -10800 });
let labrador = Ember.Object.create({ name: 'Labrador', tzOffset: -9000 });
let manaus = Ember.Object.create({ name: 'Manaus', tzOffset: -10800 });
let users = Ember.A([halifax, labrador, manaus]);
run(() => component.set('users', users));
let groups = component.get('groups');
assert.equal(groups.length, 2,
'three users with two timezones gives two groups');
let group;
group = groups.findBy('timezoneOffset', -9000);
assert.deepEqual(group.get('users').sortBy('name'),
Ember.A([labrador]),
'has one user in group with offset -9000');
group = groups.findBy('timezoneOffset', -10800);
assert.deepEqual(group.get('users').sortBy('name'),
Ember.A([halifax, manaus]),
'has two users in group with offset 7200');
assert.deepEqual(groups.mapBy('timezoneOffset'),
[-10800, -9000],
'groups are sorted by timezone, most western first');
});
|
function filterProducts(products, type) {
return products.filter((product) => product.type === type);
}
const laptops = filterProducts(products, 'Laptop');
// [{name: "Chromebook", type: "Laptop"}] |
import React from 'react';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { Grid, Container, Button } from '@material-ui/core';
import particles2 from '../../../assets/images/hero-bg/particles-2.svg';
import MarketingHeaders1 from '../../MarketingHeaders/MarketingHeaders1';
export default function LivePreviewExample() {
return (
<>
<div className="hero-wrapper bg-composed-wrapper bg-dark">
<div className="w-100 px-4">
<MarketingHeaders1 />
</div>
<div className="hero-wrapper--content">
<div
className="bg-composed-wrapper--image bg-composed-filter-rm opacity-9"
style={{ backgroundImage: 'url(' + particles2 + ')' }}
/>
<div className="bg-composed-wrapper--bg bg-second opacity-5" />
<div className="bg-composed-wrapper--bg bg-primary opacity-7" />
<div className="bg-composed-wrapper--content">
<Container className="z-over text-white text-center py-5">
<Grid item md={11} lg={10} xl={8} className="mx-auto py-5">
<h2 className="display-3 font-weight-bold">
Bamburgh React Admin Dashboard with Material-UI PRO
</h2>
<p className="font-size-xl py-3 text-white-50">
Premium admin template powered by the most popular UI
components framework available for React: Material-UI.
Features hundreds of examples making web development fast and
easy. Start from one of the individual apps included or from
the general dashboard and build beautiful scalable
applications and presentation websites.
</p>
<div className="py-4 mb-4">
<Button
href="#/"
onClick={(e) => e.preventDefault()}
size="large"
className="btn-pill shadow-second-sm btn-first">
<span className="btn-wrapper--label">Browse gallery</span>
<span className="btn-wrapper--icon">
<FontAwesomeIcon icon={['fas', 'arrow-right']} />
</span>
</Button>
<Button
href="#/"
onClick={(e) => e.preventDefault()}
size="large"
className="bg-white-10 text-white btn-pill ml-3">
<span>Documentation</span>
</Button>
</div>
<div className="mt-5 mb-5 mb-lg-0 p-3 p-xl-5 bg-second rounded-lg modal-content">
<Grid container spacing={0}>
<Grid item md={6} lg={4} className="p-3">
<div className="divider-v bg-white-10 divider-v-lg" />
<div className="text-center">
<div>
<FontAwesomeIcon
icon={['far', 'envelope']}
className="display-3 text-info"
/>
</div>
<div className="mt-2 line-height-sm">
<b className="font-size-lg">$9,693</b>
<span className="text-white-50 font-size-lg d-block">
revenue
</span>
</div>
</div>
</Grid>
<Grid item sm={6} lg={4} className="p-3">
<div className="divider-v bg-white-10 divider-v-lg" />
<div className="text-center">
<div>
<FontAwesomeIcon
icon={['far', 'lightbulb']}
className="display-3 text-success"
/>
</div>
<div className="mt-2 line-height-sm">
<b className="font-size-lg">2,345</b>
<span className="text-white-50 font-size-lg d-block">
users
</span>
</div>
</div>
</Grid>
<Grid item sm={6} lg={4} className="d-none d-lg-block p-3">
<div className="text-center">
<div>
<FontAwesomeIcon
icon={['far', 'keyboard']}
className="display-3 text-danger"
/>
</div>
<div className="mt-2 line-height-sm">
<b className="font-size-lg">1,024</b>
<span className="text-white-50 font-size-lg d-block">
orders
</span>
</div>
</div>
</Grid>
</Grid>
</div>
</Grid>
</Container>
<div className="z-below" style={{ marginTop: '-300px' }}>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1440 320">
<path
fill="var(--light)"
fillOpacity="1"
d="M0,32L120,58.7C240,85,480,139,720,138.7C960,139,1200,85,1320,58.7L1440,32L1440,320L1320,320C1200,320,960,320,720,320C480,320,240,320,120,320L0,320Z"></path>
</svg>
</div>
</div>
</div>
</div>
</>
);
}
|
'use strict';
Object.defineProperty(exports, '__esModule', {
value: true,
});
exports.mapAsyncIterator = mapAsyncIterator;
/**
* Given an AsyncIterable and a callback function, return an AsyncIterator
* which produces values mapped via calling the callback function.
*/
function mapAsyncIterator(iterable, callback) {
const iterator = iterable[Symbol.asyncIterator]();
async function mapResult(result) {
if (result.done) {
return result;
}
try {
return {
value: await callback(result.value),
done: false,
};
} catch (error) {
// istanbul ignore else (FIXME: add test case)
if (typeof iterator.return === 'function') {
try {
await iterator.return();
} catch (_e) {
/* ignore error */
}
}
throw error;
}
}
return {
async next() {
return mapResult(await iterator.next());
},
async return() {
// If iterator.return() does not exist, then type R must be undefined.
return typeof iterator.return === 'function'
? mapResult(await iterator.return())
: {
value: undefined,
done: true,
};
},
async throw(error) {
if (typeof iterator.throw === 'function') {
return mapResult(await iterator.throw(error));
}
throw error;
},
[Symbol.asyncIterator]() {
return this;
},
};
}
|
<reponame>nepalez/separator<gh_stars>1-10
# encoding: utf-8
describe "Composition" do
it "& works" do
blacklist = Selector.new except: /bar/
whitelist = Selector.new only: /foo/
selector = whitelist & blacklist
expect(selector[:foobaz]).to eql(true)
expect(selector[:foobar]).to eql(false)
end
it "- works" do
whitelist = Selector.new only: /foo/
blacklist = Selector.new except: /bar/
selector = whitelist - blacklist
expect(selector[:foobar]).to eql(true)
expect(selector[:bar]).to eql(false)
expect(selector[:foo]).to eql(false)
end
it "| works" do
whitelist = Selector.new only: 4..8
blacklist = Selector.new except: 1..5
selector = whitelist | blacklist
expect(selector[0.5]).to eql(true)
expect(selector[5.5]).to eql(true)
expect(selector[2.5]).to eql(false)
end
end # describe Composition
|
package modelo;
import java.io.Serializable;
/**
*
* @author Helio
*/
public class Contas implements Serializable
{
private String numeroDoc;
private String valor;
private String banco;
private String tipoOperacao ="D";
public String getNumeroDoc() {
return numeroDoc;
}
public void limpar (String numeroDoc, String valor) {
this.numeroDoc = numeroDoc;
this.valor = valor;
}
public void setNumeroDoc(String numeroDoc) {
this.numeroDoc = numeroDoc;
}
public String getValor() {
return valor;
}
public String getTipoOperacao() {
return tipoOperacao;
}
public void setTipoOperacao(String tipoOperacao) {
this.tipoOperacao = tipoOperacao;
}
public void setValor(String valor) {
this.valor = valor;
}
public String getBanco() {
return banco;
}
public void setBanco(String banco) {
this.banco = banco;
}
}
|
import Component from "@ember/component";
import { computed } from "@ember/object";
import layout from "../templates/components/value-viewer";
import {
isArray,
isPrimitive,
isObject,
} from "ember-json-viewer/utils/value-types";
import { assert } from "@ember/debug";
export default Component.extend({
tagName: "",
layout,
// passed-in
value: null,
showSummary: false,
isPrimitive: computed("value", function () {
return isPrimitive(this.get("value"));
}),
prefix: computed("value", function () {
return isArray(this.get("value")) ? "[" : "{";
}),
suffix: computed("value", function () {
return isArray(this.get("value")) ? "]" : "}";
}),
isObj: computed("value", function () {
return isObject(this.get("value"));
}),
valueSummary: computed("value", function () {
let v = this.get("value");
assert(
`valueSummary only possible for non-primitive, got ${v}`,
!isPrimitive(v)
);
return isArray(v) ? v.length : Object.keys(v).length;
}),
});
|
<reponame>wei756/NaverCafe-Addon
/**
* @author Wei756 <<EMAIL>>
* @license MIT
*/
chrome.contextMenus.create({"title": "선택된 키워드가 포함된 글/댓글 차단하기", "contexts":["selection"],
"onclick": blockKeyword});
function blockKeyword(info, tab) {
var str = info.selectionText;
var iga = "이";
var lastChar = str.charCodeAt(str.length - 1);
if (lastChar >= 44032 && lastChar <= 55215 && (lastChar - 44032) % 28 == 0) {
iga = "가";
}
if (confirm("'" + str + "'" + iga + " 포함된 제목, 댓글을 차단하시겠습니까?")) {
pushBlockItem(keyword, '-', str);
}
}
/**
* @description 네이버 아이디
* @type {string}
*/
const nid = "nid";
/**
* @description 키워드
* @type {string}
*/
const keyword = "keyword";
/**
* @description 회원 아이디/키워드를 차단 목록에 추가합니다.
* @param {string} type 차단 타입
* @param {string} keyword 추가할 키워드/닉네임
* @param {string} cafeid 적용될 카페 id
* @param {string} id 추가할 아이디
*/
function pushBlockItem(type, cafeid = '-', keyword = '', id = '') {
getBlockList(function(items) {
if (typeof items[type] == "undefined" || items[type] == null) { // 차단 목록 생성
items['version'] = 2; // json 버전
items[type] = new Array(); // 새로운 array
}
if ((type == nid ? indexBlockItem(items[type], cafeid, 'id', id)
: indexBlockItem(items[type], cafeid, 'keyword', keyword)) === -1) { // 중복 검사
if (type == nid) { // 사용자
items[type].push({
cafeid: cafeid,
id: id,
nickname: keyword,
timestamp: Date.now(),
});
alert("'" + keyword + "'(" + id + ") 님이 작성한 글과 댓글을 차단합니다.");
} else { // 키워드
items[type].push({
cafeid: cafeid,
keyword: keyword,
timestamp: Date.now(),
});
alert("'" + keyword + "'가 포함된 글이나 댓글을 차단합니다.");
}
} else { // 중복인 경우
alert("'" + keyword + (type == nid ? "' 님은" : "' 은(는)") + " 이미 차단한 " + (type == nid ? "사용자" : "키워드") + "입니다.");
}
chrome.storage.sync.set(items, function() {
//alert(data + " pushed!");
});
});
}
function indexBlockItem(arr, cafeid, key, value) {
var le = arr.length;
for (let i = 0; i < le; i++) {
if (arr[i]['' + key] != null && arr[i]['cafeid'] === cafeid && arr[i]['' + key] === value) {
return i;
}
}
return -1;
}
/**
* @description 차단 목록을 불러옵니다.
* @param {function} callback 콜백 함수
*/
function getBlockList(callback) {
chrome.storage.sync.get(null, function(items) {
//console.log("items: " + JSON.stringify(items));
callback(items);
});
} |
def classify(height, weight, age):
if height < 150 and weight < 50 and age > 70:
return "small"
elif height >= 150 and height < 160 and weight >= 50 and weight < 60 and age > 70 and age < 80:
return "medium"
elif height >= 160 and height < 180 and weight >= 60 and weight < 70 and age > 80 and age < 90:
return "large"
elif height >= 180 and weight >= 70 and age > 90:
return "extra large" |
import inflection from 'inflection';
import { makeMigrationCall, updateSchemaInfo } from '../DataActions';
import gqlPattern, { gqlRelErrorNotif } from '../Common/GraphQLValidation';
import { showErrorNotification } from '../../Common/Notification';
import { getConfirmation } from '../../../Common/utils/jsUtils';
import suggestedRelationshipsRaw from './autoRelations';
import {
getRemoteRelPayload,
parseRemoteRelationship,
} from './RemoteRelationships/utils';
import {
getSaveRemoteRelQuery,
getDropRemoteRelQuery,
getRenameRelationshipQuery,
getCreateObjectRelationshipQuery,
getCreateArrayRelationshipQuery,
getDropRelationshipQuery,
getAddRelationshipQuery,
} from '../../../../metadata/queryUtils';
import Migration from '../../../../utils/migration/Migration';
export const SET_MANUAL_REL_ADD = 'ModifyTable/SET_MANUAL_REL_ADD';
export const MANUAL_REL_SET_TYPE = 'ModifyTable/MANUAL_REL_SET_TYPE';
export const MANUAL_REL_SET_RSCHEMA = 'ModifyTable/MANUAL_REL_SET_RSCHEMA';
export const MANUAL_REL_SET_RTABLE = 'ModifyTable/MANUAL_REL_SET_RTABLE';
export const MANUAL_REL_RESET = 'ModifyTable/MANUAL_REL_RESET';
export const REL_RESET = 'ModifyTable/REL_RESET';
export const REL_SELECTION_CHANGED = 'ModifyTable/REL_SELECTION_CHANGED';
export const MANUAL_REL_NAME_CHANGED = 'ModifyTable/MANUAL_REL_NAME_CHANGED';
export const REL_NAME_CHANGED = 'ModifyTable/REL_NAME_CHANGED';
export const REL_ADD_NEW_CLICKED = 'ModifyTable/REL_ADD_NEW_CLICKED';
export const SET_REMOTE_RELATIONSHIPS = 'ModifyTable/SET_REMOTE_RELATIONSHIPS';
export const defaultRemoteRelationship = {
name: '',
remoteSchema: '',
remoteField: [],
};
export const saveRemoteRelationship = (
state,
existingRel,
successCallback,
errorCallback
) => {
return (dispatch, getState) => {
const isNew = !existingRel;
if (!gqlPattern.test(state.name)) {
return dispatch(
showErrorNotification(
gqlRelErrorNotif[0],
gqlRelErrorNotif[1],
gqlRelErrorNotif[2]
)
);
}
if (!state.remoteSchema) {
return dispatch(showErrorNotification('Remote schema is required'));
}
const table = {
schema: getState().tables.currentSchema,
name: getState().tables.currentTable,
};
const source = getState().tables.currentDataSource;
const errorMsg = `${
isNew ? 'Creating' : 'Updating'
} remote relationship failed`;
let remoteRelQueryArgs;
try {
remoteRelQueryArgs = getRemoteRelPayload(state);
} catch (e) {
if (errorCallback) {
errorCallback();
}
return dispatch(showErrorNotification(errorMsg, e.message));
}
const upQuery = [
getSaveRemoteRelQuery(remoteRelQueryArgs, !existingRel, source),
];
let downQuery = [];
if (isNew) {
downQuery = [getDropRemoteRelQuery(state.name, state.table)];
} else {
downQuery = [
getSaveRemoteRelQuery(
getRemoteRelPayload(parseRemoteRelationship(existingRel)),
isNew,
source
),
];
}
// Apply migrations
const migrationName = `table_${table.name}_${
isNew ? 'create' : 'update'
}_remote_relationship_${state.name}`;
const requestMsg = `${
isNew ? 'Creating' : 'Updating'
} remote relationship...`;
const successMsg = `Successfully ${
isNew ? 'created' : 'updated'
} remote relationship`;
const customOnSuccess = () => {
if (successCallback) {
successCallback();
}
};
const customOnError = () => {
if (errorCallback) {
errorCallback();
}
};
// Rename relationship should fetch entire schema info.
makeMigrationCall(
dispatch,
getState,
upQuery,
downQuery,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg
);
};
};
export const dropRemoteRelationship = (
state,
existingRel,
successCallback,
errorCallback
) => {
return (dispatch, getState) => {
if (
!getConfirmation('This will permanently delete the remote relationship')
) {
if (errorCallback) {
errorCallback();
}
return;
}
const source = getState().tables.currentDataSource;
const table = state.table;
const migration = new Migration();
migration.add(
getDropRemoteRelQuery(
existingRel.remote_relationship_name,
table,
source
),
getSaveRemoteRelQuery(
getRemoteRelPayload(parseRemoteRelationship(existingRel)),
true,
source
)
);
// Apply migrations
const migrationName = `table_${table.name}_drop_remote_relationship_${state.name}`;
const requestMsg = 'Deleting remote relationship...';
const successMsg = 'Successfully deleted remote relationship';
const errorMsg = 'Deleting remote relationship failed';
const customOnSuccess = () => {
if (successCallback) {
successCallback();
}
};
const customOnError = () => {
if (errorCallback) {
errorCallback();
}
};
// Rename relationship should fetch entire schema info.
makeMigrationCall(
dispatch,
getState,
migration.upMigration,
migration.downMigration,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg
);
};
};
export const setRemoteRelationships = remoteRelationships => ({
type: SET_REMOTE_RELATIONSHIPS,
remoteRelationships,
});
const resetRelationshipForm = () => ({ type: REL_RESET });
const resetManualRelationshipForm = () => ({ type: MANUAL_REL_RESET });
const addNewRelClicked = () => ({ type: REL_ADD_NEW_CLICKED });
const relSelectionChanged = selectedRelationship => ({
type: REL_SELECTION_CHANGED,
rel: selectedRelationship,
});
const relNameChanged = relName => ({
type: REL_NAME_CHANGED,
relName,
});
const manualRelNameChanged = relName => ({
type: MANUAL_REL_NAME_CHANGED,
relName,
});
const manualRelTypeChanged = relType => ({
type: MANUAL_REL_SET_TYPE,
relType,
});
const manualRelRSchemaChanged = rSchema => ({
type: MANUAL_REL_SET_RSCHEMA,
rSchema,
});
const saveRenameRelationship = (oldName, newName, tableName, callback) => {
return (dispatch, getState) => {
const currentSchema = getState().tables.currentSchema;
const currentSource = getState().tables.currentDataSource;
const migrateUp = [
getRenameRelationshipQuery(
{
name: tableName,
schema: currentSchema,
},
oldName,
newName,
currentSource
),
];
const migrateDown = [
getRenameRelationshipQuery(
{
name: tableName,
schema: currentSchema,
},
newName,
oldName,
currentSource
),
];
// Apply migrations
const migrationName = `rename_relationship_${oldName}_to_${newName}_schema_${currentSchema}_table_${tableName}`;
const requestMsg = 'Renaming relationship...';
const successMsg = 'Relationship renamed';
const errorMsg = 'Renaming relationship failed';
const customOnSuccess = () => {
callback();
};
const customOnError = () => {};
// Rename relationship should fetch entire schema info.
makeMigrationCall(
dispatch,
getState,
migrateUp,
migrateDown,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg
);
};
};
const generateRelationshipsQuery = (relMeta, currentDataSource) => {
let _upQuery;
let _downQuery;
if (relMeta.isObjRel) {
_upQuery = getCreateObjectRelationshipQuery(
{
name: relMeta.lTable,
schema: relMeta.lSchema,
},
relMeta.relName,
currentDataSource
);
const columnMaps = relMeta.lcol.map((column, index) => ({
lcol: column,
rcol: relMeta.rcol[index],
}));
if (columnMaps.length === 1 && !relMeta.isUnique) {
_upQuery.args.using = {
foreign_key_constraint_on: relMeta.lcol[0],
};
} else {
const columnReducer = (accumulator, val) => ({
...accumulator,
[val.lcol]: val.rcol,
});
_upQuery.args.using = {
manual_configuration: {
remote_table: {
name: relMeta.rTable,
schema: relMeta.rSchema,
},
source: relMeta.source,
column_mapping: columnMaps.reduce(columnReducer, {}),
},
};
}
_downQuery = getDropRelationshipQuery(
{ name: relMeta.lTable, schema: relMeta.lSchema },
relMeta.relName,
currentDataSource
);
} else {
_upQuery = getCreateArrayRelationshipQuery(
{
name: relMeta.lTable,
schema: relMeta.lSchema,
},
relMeta.relName,
currentDataSource
);
const columnMaps = relMeta.rcol.map((column, index) => ({
rcol: column,
lcol: relMeta.lcol[index],
}));
if (columnMaps.length === 1) {
_upQuery.args.using = {
foreign_key_constraint_on: {
table: {
name: relMeta.rTable,
schema: relMeta.rSchema,
},
column: relMeta.rcol[0],
},
};
} else {
const columnReducer = (accumulator, val) => ({
...accumulator,
[val.lcol]: val.rcol,
});
_upQuery.args.using = {
manual_configuration: {
remote_table: {
name: relMeta.rTable,
schema: relMeta.rSchema,
},
source: currentDataSource,
column_mapping: columnMaps.reduce(columnReducer, {}),
},
};
}
_downQuery = getDropRelationshipQuery(
{ name: relMeta.lTable, schema: relMeta.lSchema },
relMeta.relName,
currentDataSource
);
}
return { upQuery: _upQuery, downQuery: _downQuery };
};
const deleteRelMigrate = relMeta => (dispatch, getState) => {
const source = getState().tables.currentDataSource;
const { upQuery, downQuery } = generateRelationshipsQuery(relMeta, source);
const migration = new Migration();
migration.add(downQuery, upQuery); // upquery from generateRelationshipsQuery used as downMigratio and vice versa
// Apply migrations
const migrationName = `drop_relationship_${relMeta.relName}_${relMeta.lSchema}_table_${relMeta.lTable}`;
const requestMsg = 'Deleting Relationship...';
const successMsg = 'Relationship deleted';
const errorMsg = 'Deleting relationship failed';
const customOnSuccess = () => {
dispatch(updateSchemaInfo());
};
const customOnError = () => {};
// Delete relationship should fetch entire schema info.
makeMigrationCall(
dispatch,
getState,
migration.upMigration,
migration.downMigration,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg,
true
);
};
const addRelNewFromStateMigrate = () => (dispatch, getState) => {
const source = getState().tables.currentDataSource;
const state = getState().tables.modify.relAdd;
const { upQuery, downQuery } = generateRelationshipsQuery(
{
lTable: state.lTable,
lSchema: state.lSchema,
isObjRel: state.isObjRel,
relName: state.relName,
lcol: state.lcol,
rcol: state.rcol,
rTable: state.rTable,
rSchema: state.rSchema,
isUnique: state.isUnique,
},
source
);
// Apply migrations
const migrationName = `add_relationship_${state.name}_table_${state.lSchema}_${state.lTable}`;
const requestMsg = 'Adding Relationship...';
const successMsg = 'Relationship created';
const errorMsg = 'Creating relationship failed';
const customOnSuccess = () => {
dispatch(
updateSchemaInfo({
schemas: [state.lSchema, state.rSchema],
})
).then(() => {
dispatch(resetRelationshipForm());
});
};
const customOnError = () => {};
// Rename relationship should fetch only current table schema info.
makeMigrationCall(
dispatch,
getState,
[upQuery],
[downQuery],
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg,
true
);
};
const setManualRelAdd = manualRelAdd => ({
type: SET_MANUAL_REL_ADD,
manualRelAdd,
});
const manualRelRTableChanged = tableName => dispatch => {
dispatch({ type: MANUAL_REL_SET_RTABLE, rTable: tableName });
};
const addRelViewMigrate = (tableSchema, toggleEditor) => (
dispatch,
getState
) => {
const {
relType,
relName,
rSchema,
rTable,
colMappings,
} = getState().tables.modify.manualRelAdd;
const currentTableName = tableSchema.table_name;
const currentTableSchema = tableSchema.table_schema;
const isObjRel = relType === 'object' ? true : false;
const columnMapping = {};
const { currentDataSource } = getState().tables;
colMappings.forEach(colMap => {
if (colMap.column === '') {
return;
}
columnMapping[colMap.column] = colMap.refColumn;
});
const tableInfo = { name: currentTableName, schema: currentTableSchema };
const remoteTableInfo = { name: rTable, schema: rSchema };
const relChangesUp = [
getAddRelationshipQuery(
isObjRel,
tableInfo,
relName,
remoteTableInfo,
columnMapping,
currentDataSource
),
];
const relChangesDown = [
getDropRelationshipQuery(tableInfo, relName, currentDataSource),
];
// Apply migrations
const migrationName = `create_relationship_${relName}_${currentTableSchema}_table_${currentTableName}`;
const requestMsg = 'Adding Relationship...';
const successMsg = 'Relationship created';
const errorMsg = 'Creating relationship failed';
const customOnSuccess = () => {
dispatch(
updateSchemaInfo({
tables: [
{
table_schema: currentTableSchema,
table_name: currentTableName,
},
],
})
).then(() => {
toggleEditor();
});
};
const customOnError = () => {};
// perform validations and make call
if (!relName.trim()) {
dispatch(
showErrorNotification(
'Error adding relationship!',
'Relationship name cannot be empty'
)
);
} else if (!gqlPattern.test(relName)) {
dispatch(
showErrorNotification(
gqlRelErrorNotif[0],
gqlRelErrorNotif[1],
gqlRelErrorNotif[2]
)
);
} else {
makeMigrationCall(
dispatch,
getState,
relChangesUp,
relChangesDown,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg
);
}
};
const sanitizeRelName = arg => arg.trim();
const fallBackRelName = (relMeta, existingFields, iterNumber = 0) => {
let relName;
const targetTable = sanitizeRelName(relMeta.rTable);
if (relMeta.isObjRel) {
const objLCol = sanitizeRelName(relMeta.lcol.join('_'));
relName = `${inflection.singularize(targetTable)}_by_${objLCol}${
iterNumber ? '_' + iterNumber : ''
}`;
} else {
const arrRCol = sanitizeRelName(relMeta.rcol.join('_'));
relName = `${inflection.pluralize(targetTable)}_by_${arrRCol}${
iterNumber ? '_' + iterNumber : ''
}`;
}
relName = inflection.camelize(relName, true);
/*
* Recurse until a unique relationship name is found and keep prefixing an integer at the end to fix collision
* */
return relName in existingFields
? fallBackRelName(relMeta, existingFields, ++iterNumber)
: relName;
};
const formRelName = (relMeta, existingFields) => {
try {
let finalRelName;
const targetTable = sanitizeRelName(relMeta.rTable);
if (relMeta.isObjRel) {
finalRelName = inflection.singularize(targetTable);
} else {
finalRelName = inflection.pluralize(targetTable);
}
/* Check if it is existing, fallback to guaranteed unique name */
if (existingFields && finalRelName in existingFields) {
finalRelName = fallBackRelName(relMeta, existingFields);
}
return finalRelName;
} catch (e) {
return '';
}
};
const getExistingFieldsMap = tableSchema => {
const fieldMap = {};
tableSchema.relationships.forEach(tr => {
fieldMap[tr.rel_name] = true;
});
tableSchema.columns.forEach(tc => {
fieldMap[tc.column_name] = true;
});
return fieldMap;
};
const getAllUnTrackedRelations = (allSchemas, currentSchema, currentSource) => {
const trackedTables = allSchemas.filter(
table => table.is_table_tracked && table.table_schema === currentSchema
);
const tableRelMapping = trackedTables.map(table => ({
table_name: table.table_name,
existingFields: getExistingFieldsMap(table),
relations: suggestedRelationshipsRaw(
table.table_name,
allSchemas,
currentSchema
),
}));
const bulkRelTrack = [];
const bulkRelTrackDown = [];
tableRelMapping.forEach(table => {
// check relations.obj and relations.arr length and form queries
if (table.relations.objectRel.length) {
table.relations.objectRel.forEach(indivObjectRel => {
indivObjectRel.relName = formRelName(
indivObjectRel,
table.existingFields
);
/* Added to ensure that fallback relationship name is created in case of tracking all relationship at once */
table.existingFields[indivObjectRel.relName] = true;
const { upQuery, downQuery } = generateRelationshipsQuery(
indivObjectRel,
currentSource
);
const objTrack = {
upQuery,
downQuery,
data: indivObjectRel,
};
bulkRelTrack.push(objTrack);
});
}
if (table.relations.arrayRel.length) {
table.relations.arrayRel.forEach(indivArrayRel => {
indivArrayRel.relName = formRelName(
indivArrayRel,
table.existingFields
);
/* Added to ensure that fallback relationship name is created in case of tracking all relationship at once */
table.existingFields[indivArrayRel.relName] = true;
const { upQuery, downQuery } = generateRelationshipsQuery(
indivArrayRel,
currentSource
);
const arrTrack = {
upQuery,
downQuery,
data: indivArrayRel,
};
bulkRelTrack.push(arrTrack);
});
}
});
return { bulkRelTrack: bulkRelTrack, bulkRelTrackDown: bulkRelTrackDown };
};
const autoTrackRelations = autoTrackData => (dispatch, getState) => {
const migration = new Migration();
autoTrackData.forEach(({ upQuery, downQuery }) =>
migration.add(upQuery, downQuery)
);
// Apply migrations
const migrationName = 'track_all_relationships';
const requestMsg = 'Adding Relationship...';
const successMsg = 'Relationship created';
const errorMsg = 'Creating relationship failed';
const customOnSuccess = () => {
dispatch(updateSchemaInfo());
};
const customOnError = () => {};
makeMigrationCall(
dispatch,
getState,
migration.upMigration,
migration.downMigration,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg,
true
);
};
const autoAddRelName = ({ upQuery, downQuery }) => (dispatch, getState) => {
const currentSchema = getState().tables.currentSchema;
const relName = upQuery.args.name;
const migration = new Migration();
migration.add(upQuery, downQuery);
// Apply migrations
const migrationName = `add_relationship_${relName}_table_${currentSchema}_${upQuery.args.table}`;
const requestMsg = 'Adding Relationship...';
const successMsg = 'Relationship created';
const errorMsg = 'Creating relationship failed';
const customOnSuccess = () => {
Promise.all([dispatch(updateSchemaInfo())]);
};
const customOnError = () => {};
makeMigrationCall(
dispatch,
getState,
migration.upMigration,
migration.downMigration,
migrationName,
customOnSuccess,
customOnError,
requestMsg,
successMsg,
errorMsg
);
};
export {
deleteRelMigrate,
addNewRelClicked,
manualRelTypeChanged,
manualRelRSchemaChanged,
addRelViewMigrate,
manualRelRTableChanged,
setManualRelAdd,
relSelectionChanged,
addRelNewFromStateMigrate,
manualRelNameChanged,
relNameChanged,
resetRelationshipForm,
resetManualRelationshipForm,
autoTrackRelations,
autoAddRelName,
formRelName,
getAllUnTrackedRelations,
saveRenameRelationship,
getExistingFieldsMap,
};
|
# Developer : Hamdy Abou El Anein
import os
import sys
from easygui import *
print("IMPORTANT\n\nThis software work only if google_speech is installed on the system. To install it go to this link please : https://pypi.python.org/pypi/google_speech/\n\n")
def language():
global lang
msg = "What's the language do you want to make PySpeaking speak ?"
title = "PySpeaking-GUI"
choices = ["English", "French", "German", "Spanish","Japanese","Chinese","Italian","Arabic", "Russian"]
choice = choicebox(msg, title, choices)
if choice == "English":
lang = ' en '
textToSpeak()
elif choice == "French":
lang = ' fr '
textToSpeak()
elif choice == "German":
lang = ' de '
textToSpeak()
elif choice == "Spanish":
lang = ' es '
textToSpeak()
elif choice == "Japanese":
lang = ' ja '
textToSpeak()
elif choice == "Chinese":
lang = ' zh-CN '
textToSpeak()
elif choice == "Italian":
lang = ' it '
textToSpeak()
elif choice == "Arabic":
lang = ' ar '
textToSpeak()
elif choice == "Russian":
lang = ' ru '
textToSpeak()
else:
sys.exit(0)
def textToSpeak():
global fieldValues
msg = "Enter the text to speak"
title = "Enter the text to speak"
fieldNames = ["Text to speak"]
fieldValues = []
fieldValues = multenterbox(msg, title, fieldNames)
fieldValues[0]
speak()
def speak():
global lang, fieldValues
textValue = "google_speech -l" +str(lang) +str(" \"")+str(fieldValues[0].replace("'","\'"))+str("\"")
os.system(textValue)
language() |
export default {
route: {
dashboard: '首页',
fileOper:'档案管理',
fileOperWeixi:'档案管理/包括微信绑定',
releaseOperation:'发布管理',
licenceOperation:'证照管理',
recruitmentOperation:'招募人管理',
registration:'登记管理',
addArrivals:'进站登记',
chartered:'包车管理',
indexedRecord:'指标检查',
health:'健康检查',
consultation:'健康征询',
physical:'体格检查',
Xlight:'X光胸片',
ecg:'心电图',
noninvasive:'无创血红蛋白',
collector:'样本采集',
sampleHandover:'样本交接',
handOver:'样本移交',
receive:'样本接收',
registrat:'样本登记',
bloodTests:'血液检查',
refuseInfo:'拒绝信息发布',
bloodCheck:'血样检查',
plasmaCheck:'浆样检查',
plasmaRefused:'浆站拒绝',
positiveFeed:'检疫期阳性反馈',
plasmaManagement:'血浆管理',
biometric:'生物识别/共性模块',
plasmaCollect:'血浆采集',
plasmaStorage:'血浆入库',
weighing:'称重',
frozen:'速冻',
packing:'装箱',
plasmaOutbound:'血浆出库',
immuneManagement:'免疫管理',
rulesSet:'规则设置',
immuneRegister:'免疫登记',
immunization:'免疫注射',
feedback:'反馈/转类',
adverseReactions:'不良反应',
plasmaCollection:'血浆采集',
immunization2:'免疫注射',
moneyManagement:'费用管理',
rulesSetMoney:'规则设置',
feesPaid:'费用发放',
specialFees:'特殊费用',
giftManagement:'礼品管理',
rulesSetGift:'规则设置',
giftFor:'礼品申请',
giftIssue:'礼品发放',
reviewManagement:'回访管理',
quarantinePeriod:'检疫期回访',
employeesReview:'浆员回访',
reviewQuery:'回访查询',
suppliesManagement:'物资管理',
warehouse:'入库管理',
outbound:'出库管理',
automatedOut:'自动出库',
change:'调拔管理',
inventoryQuery:'库存查询',
points:'盘点管理',
infrastructure:'基础设置',
warehouseManag:'仓库管理',
materialManag:'物料管理',
bom:'物料清单',
materialTemplate:'物料模板',
unit:'单位管理',
materialClass:'物料分类',
warning:'预警管理',
supplier:'供应商管理',
manufacturer:'生产商管理',
inventoryInit:'库存初始化',
qualityManagement:'质控管理',
sample:'样本',
plasma:'血浆',
material:'物料',
scrap:'报废',
instrumentTest:'仪器检验',
enzymeLabeled:'酶标仪管理',
biochemical:'生化仪管理',
equipmentManagement:'设备管理',
registrationEqu:'设备登记',
queryEqu:'设备查询',
inspectionReg:'巡检登记',
integratedQuery:'综合查询',
reportSet:'报表设置',
queryInt:'查询',
permission: '系统管理',
role: '角色管理',
user:'用户管理',
department:'部门管理',
position:'职务管理',
permissions:'权限管理',
parameterSetting:'业务参数设置',
log:'日志管理',
dictionary:'数据字典',
credentials:'凭证管理',
menu:'菜单管理',
queued:'排队叫号配置',
systemSettings:'系统设置',
},
navbar: {
dashboard: '首页',
github: '项目地址',
logOut: '退出登录',
profile: '个人中心',
screenfull:'全屏',
theme: '换肤',
size: '布局大小',
i18n: '中英文切换',
},
login: {
title: '精英天成单采血浆站标准化业务管理系统',
logIn: '登录',
username: '账号',
password: '密码',
},
role:{
rolename:'角色名称',
remakr:'描述',
remakrJane:'描述',
operations:'操作',
menus:'菜单树',
edit:'编辑角色',
new:'新建角色',
},
documentation: {
documentation: '文档',
github: 'Github 地址'
},
permission: {
addRole: '新增角色',
editPermission: '编辑权限',
roles: '你的权限',
switchRoles: '切换权限',
tips: '在某些情况下,不适合使用 v-permission。例如:Element-UI 的 el-tab 或 el-table-column 以及其它动态渲染 dom 的场景。你只能通过手动设置 v-if 来实现。',
delete: '删除',
confirm: '确定',
cancel: '取消'
},
guide: {
description: '引导页对于一些第一次进入项目的人很有用,你可以简单介绍下项目的功能。本 Demo 是基于',
button: '打开引导'
},
components: {
documentation: '文档',
tinymceTips: '富文本是管理后台一个核心的功能,但同时又是一个有很多坑的地方。在选择富文本的过程中我也走了不少的弯路,市面上常见的富文本都基本用过了,最终权衡了一下选择了Tinymce。更详细的富文本比较和介绍见',
dropzoneTips: '由于我司业务有特殊需求,而且要传七牛 所以没用第三方,选择了自己封装。代码非常的简单,具体代码你可以在这里看到 @/components/Dropzone',
stickyTips: '当页面滚动到预设的位置会吸附在顶部',
backToTopTips1: '页面滚动到指定位置会在右下角出现返回顶部按钮',
backToTopTips2: '可自定义按钮的样式、show/hide、出现的高度、返回的位置 如需文字提示,可在外部使用Element的el-tooltip元素',
imageUploadTips: '由于我在使用时它只有vue@1版本,而且和mockjs不兼容,所以自己改造了一下,如果大家要使用的话,优先还是使用官方版本。'
},
table: {
dynamicTips1: '固定表头, 按照表头顺序排序',
dynamicTips2: '不固定表头, 按照点击顺序排序',
dragTips1: '默认顺序',
dragTips2: '拖拽后顺序',
title: '标题',
importance: '重要性',
type: '类型',
remark: '点评',
search: '搜索',
add: '添加',
export: '导出',
reviewer: '审核人',
id: '序号',
date: '时间',
author: '作者',
readings: '阅读数',
status: '状态',
actions: '操作',
edit: '编辑',
publish: '发布',
draft: '草稿',
delete: '删除',
cancel: '取 消',
confirm: '确 定'
},
example: {
warning: '创建和编辑页面是不能被 keep-alive 缓存的,因为keep-alive 的 include 目前不支持根据路由来缓存,所以目前都是基于 component name 来进行缓存的。如果你想类似的实现缓存效果,可以使用 localStorage 等浏览器缓存方案。或者不要使用 keep-alive 的 include,直接缓存所有页面。详情见'
},
errorLog: {
tips: '请点击右上角bug小图标',
description: '现在的管理后台基本都是spa的形式了,它增强了用户体验,但同时也会增加页面出问题的可能性,可能一个小小的疏忽就导致整个页面的死锁。好在 Vue 官网提供了一个方法来捕获处理异常,你可以在其中进行错误处理或者异常上报。',
documentation: '文档介绍'
},
excel: {
export: '导出',
selectedExport: '导出已选择项',
placeholder: '请输入文件名(默认excel-list)'
},
zip: {
export: '导出',
placeholder: '请输入文件名(默认file)'
},
pdf: {
tips: '这里使用 window.print() 来实现下载pdf的功能'
},
theme: {
change: '换肤',
documentation: '换肤文档',
tips: 'Tips: 它区别于 navbar 上的 theme-pick, 是两种不同的换肤方法,各自有不同的应用场景,具体请参考文档。'
},
tagsView: {
refresh: '刷新',
close: '关闭',
closeOthers: '关闭其它',
closeAll: '关闭所有'
},
settings: {
title: '系统布局配置',
theme: '主题色',
tagsView: '开启 Tags-View',
fixedHeader: '固定 Header',
sidebarLogo: '侧边栏 Logo'
},
message:{
err1:'请输入正确的用户名',
err2:'密码不能少于6位',
err3:'名称不能为空',
err4:'角色名称只能为英文字符',
err5:'菜单树必选',
err6:'不能有特殊符号',
err7:'部门必选',
msg1:'您已经登出,您可以取消以停留在此页面,或再次登录',
msg2:'确认注销',
msg3:'验证失败,请重新登录',
msg4:'服务器超时',
msg5:'语言切换成功',
msg6:'确认删除?',
msg7:'删除成功!',
msg8:'警告',
msg9:'有子部门会连同子部门一同删除 确认删除?',
msg10:'修改成功!',
msg11:'添加成功!',
},
button:{
cancel:'取消',
ReLogin:'重新登录',
confirm:'确认'
}
}
|
import forohfor.scryfall.api.Card;
import forohfor.scryfall.api.MTGCardQuery;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
public class DeckGenerator extends JFrame
{
private static final long serialVersionUID = 1L;
private static ArrayList<String> ignore = new ArrayList<>();
static
{
ignore.add("plains");
ignore.add("island");
ignore.add("swamp");
ignore.add("mountain");
ignore.add("forest");
}
private JTextArea jt;
private JButton gen;
private JTextField namebox;
public ArrayList<String> getCardNames()
{
String decklist = jt.getText();
ArrayList<String> added = new ArrayList<>();
for (String cardname : decklist.split("\n"))
{
cardname = cardname.trim();
if (cardname.startsWith("SB:"))
{
cardname = cardname.replace("SB:", "");
cardname = cardname.trim();
}
cardname = removeLeadingNumber(cardname);
if (cardname.contains("\t"))
{
cardname = cardname.split("\t")[1];
}
if (!added.contains(cardname) && !ignore.contains(cardname.toLowerCase()))
{
added.add(cardname);
}
}
return added;
}
public static String removeLeadingNumber(String line)
{
int lastNum = 0;
while (lastNum < line.length() && Character.isDigit(line.charAt(lastNum)))
{
lastNum++;
}
return line.substring(lastNum).trim();
}
public DeckGenerator()
{
super("Deck generator");
gen = new JButton("Generate Deck");
namebox = new JTextField("Enter Deck Name");
JScrollPane scroll = new JScrollPane();
gen.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent e)
{
gen.setEnabled(false);
writeDeck(SavedConfig.PATH);
gen.setEnabled(true);
}
});
JPanel bot = new JPanel();
bot.setLayout(new BorderLayout());
jt = new JTextArea(10, 50);
jt.setText("Paste Decklist Here");
scroll.setViewportView(jt);
setLayout(new BorderLayout());
add(scroll, BorderLayout.CENTER);
bot.add(namebox, BorderLayout.CENTER);
bot.add(gen, BorderLayout.SOUTH);
add(bot, BorderLayout.SOUTH);
pack();
setVisible(true);
}
public void writeDeck(String path)
{
ListRecogStrat r = new ListRecogStrat(namebox.getText());
new File(SavedConfig.getSubPath("decks")).mkdirs();
File f = new File(SavedConfig.getCustomSetPath("decks", namebox.getText()));
ArrayList<String> names = getCardNames();
ArrayList<Card> cards = MTGCardQuery.toCardList(names, true);
final OperationBar bar = RecogApp.INSTANCE.getOpBar();
if (bar.setTask("Generating Deck...", cards.size()))
{
new Thread()
{
public void run()
{
for (Card card : cards)
{
bar.setSubtaskName(String.format("%s (%s)", card.getName(), card.getSetCode()));
r.addFromCard(card);
bar.progressTask();
}
try
{
r.writeOut(f);
JOptionPane.showMessageDialog(null,
"Deck saved with " + r.size() + " unique cards from " + names.size() + " card names.",
"Deck Saved", JOptionPane.INFORMATION_MESSAGE, null);
} catch (IOException e)
{
e.printStackTrace();
JOptionPane.showMessageDialog(null,
"Deck couldn't be saved",
"Error", JOptionPane.ERROR_MESSAGE, null);
}
}
}.start();
}
}
}
|
# Trained weights: enet_cityscapes_512x1024_20210219.pt
# Step-1: Pre-train encoder
python main_semseg.py --epochs=300 --lr=0.0008 --batch-size=16 --weight-decay=0.0002 --dataset=city --model=enet --mixed-precision --encoder-only --exp-name=enet_cityscapes_512x1024_encoder
# Step-2: Train the entire network
python main_semseg.py --state=2 --continue-from=enet_cityscapes_512x1024_encoder.pt --epochs=300 --lr=0.0008 --batch-size=16 --weight-decay=0.0002 --dataset=city --model=enet --mixed-precision --exp-name=enet_cityscapes_512x1024
|
<filename>resources/js/constants/InvitationTypes.js
export const CREW = 'crew';
export const EVENT = 'event';
export const USER = 'user';
|
#!/bin/bash
# Copyright 2013 Johns Hopkins University (Author: Daniel Povey)
# Apache 2.0.
stage=0
. utils/parse_options.sh
if [ $# -eq 0 ]; then
echo "$0 <fisher-dir-1> [<fisher-dir-2> ...]"
echo " e.g.: $0 /export/corpora3/LDC/LDC2004T19 /export/corpora3/LDC/LDC2005T19\\"
echo " /export/corpora3/LDC/LDC2004S13 /export/corpora3/LDC/LDC2005S13"
echo " (We also support a single directory that has the contents of all of them)"
exit 1;
fi
# Check that the arguments are all absolute pathnames.
for dir in $*; do
case $dir in /*) ;; *)
echo "$0: all arguments must be absolute pathnames."; exit 1;
esac
done
# First check we have the right things in there...
#
rm -r data/local/data_fisher/links 2>/dev/null
mkdir -p data/local/data_fisher/links || exit 1;
for subdir in fe_03_p1_sph1 fe_03_p1_sph3 fe_03_p1_sph5 fe_03_p1_sph7 \
fe_03_p2_sph1 fe_03_p2_sph3 fe_03_p2_sph5 fe_03_p2_sph7 fe_03_p1_sph2 \
fe_03_p1_sph4 fe_03_p1_sph6 fe_03_p1_tran fe_03_p2_sph2 fe_03_p2_sph4 \
fe_03_p2_sph6 fe_03_p2_tran; do
found_subdir=false
for dir in $*; do
if [ -d $dir/$subdir ]; then
found_subdir=true
ln -s $dir/$subdir data/local/data_fisher/links/$subdir
else
new_style_subdir=$(echo $subdir | sed s/fe_03_p1_sph/fisher_eng_tr_sp_d/)
if [ -d $dir/$new_style_subdir ]; then
found_subdir=true
ln -s $dir/$new_style_subdir data/local/data_fisher/links/$subdir
fi
fi
done
if ! $found_subdir; then
echo "$0: could not find the subdirectory $subdir in any of $*"
exit 1;
fi
done
tmpdir=`pwd`/data/local/data_fisher
links=data/local/data_fisher/links
. ./path.sh # Needed for KALDI_ROOT
sph2pipe=$KALDI_ROOT/tools/sph2pipe_v2.5/sph2pipe
if [ ! -x $sph2pipe ]; then
echo "Could not find (or execute) the sph2pipe program at $sph2pipe";
exit 1;
fi
# (1) Get transcripts in one file, and clean them up ...
if [ $stage -le 0 ]; then
find $links/fe_03_p1_tran/data $links/fe_03_p2_tran/data -iname '*.txt' > $tmpdir/transcripts.flist
for dir in fe_03_p{1,2}_sph{1,2,3,4,5,6,7}; do
find $links/$dir/ -iname '*.sph'
done > $tmpdir/sph.flist
n=`cat $tmpdir/transcripts.flist | wc -l`
if [ $n -ne 11699 ]; then
echo "Expected to find 11699 transcript files in the Fisher data, found $n"
exit 1;
fi
n=`cat $tmpdir/sph.flist | wc -l`
if [ $n -ne 11699 ]; then
echo "Expected to find 11699 .sph files in the Fisher data, found $n"
exit 1;
fi
fi
dir=data/train_fisher
if [ $stage -le 1 ]; then
mkdir -p $dir
## fe_03_00004.sph
## Transcpribed at the LDC
#
#7.38 8.78 A: an- so the topic is
echo -n > $tmpdir/text.1 || exit 1;
perl -e '
use File::Basename;
($tmpdir)=@ARGV;
open(F, "<$tmpdir/transcripts.flist") || die "Opening list of transcripts";
open(R, "|sort >data/train_fisher/reco2file_and_channel") || die "Opening reco2file_and_channel";
open(T, ">$tmpdir/text.1") || die "Opening text output";
while (<F>) {
$file = $_;
m:([^/]+)\.txt: || die "Bad filename $_";
$call_id = $1;
print R "$call_id-A $call_id A\n";
print R "$call_id-B $call_id B\n";
open(I, "<$file") || die "Opening file $_";
$line1 = <I>;
$line1 =~ m/# (.+)\.sph/ || die "Bad first line $line1 in file $file";
$call_id eq $1 || die "Mismatch call-id $call_id vs $1\n";
while (<I>) {
if (m/([0-9.]+)\s+([0-9.]+) ([AB]):\s*(\S.*\S|\S)\s*$/) {
$start = sprintf("%06d", $1 * 100.0);
$end = sprintf("%06d", $2 * 100.0);
length($end) > 6 && die "Time too long $end in file $file";
$side = $3;
$words = $4;
$utt_id = "${call_id}-$side-$start-$end";
print T "$utt_id $words\n" || die "Error writing to text file";
}
}
}
close(R); close(T) ' $tmpdir || exit 1;
fi
if [ $stage -le 2 ]; then
sort $tmpdir/text.1 | grep -v '((' | \
awk '{if (NF > 1){ print; }}' | \
sed 's:\[laugh\]:[laughter]:g' | \
sed 's:\[sigh\]:[noise]:g' | \
sed 's:\[cough\]:[noise]:g' | \
sed 's:\[sigh\]:[noise]:g' | \
sed 's:\[mn\]:[noise]:g' | \
sed 's:\[breath\]:[noise]:g' | \
sed 's:\[lipsmack\]:[noise]:g' > $tmpdir/text.2
cp $tmpdir/text.2 $dir/text
# create segments file and utt2spk file...
! cat $dir/text | perl -ane 'm:([^-]+)-([AB])-(\S+): || die "Bad line $_;"; print "$1-$2-$3 $1-$2\n"; ' > $dir/utt2spk \
&& echo "Error producing utt2spk file" && exit 1;
cat $dir/text | perl -ane 'm:((\S+-[AB])-(\d+)-(\d+))\s: || die; $utt = $1; $reco = $2; $s = sprintf("%.2f", 0.01*$3);
$e = sprintf("%.2f", 0.01*$4); print "$utt $reco $s $e\n"; ' > $dir/segments
utils/utt2spk_to_spk2utt.pl <$dir/utt2spk > $dir/spk2utt
fi
if [ $stage -le 3 ]; then
for f in `cat $tmpdir/sph.flist`; do
# convert to absolute path
utils/make_absolute.sh $f
done > $tmpdir/sph_abs.flist
cat $tmpdir/sph_abs.flist | perl -ane 'm:/([^/]+)\.sph$: || die "bad line $_; "; print "$1 $_"; ' > $tmpdir/sph.scp
cat $tmpdir/sph.scp | awk -v sph2pipe=$sph2pipe '{printf("%s-A %s -f wav -p -c 1 %s |\n", $1, sph2pipe, $2);
printf("%s-B %s -f wav -p -c 2 %s |\n", $1, sph2pipe, $2);}' | \
sort -k1,1 -u > $dir/wav.scp || exit 1;
fi
if [ $stage -le 4 ]; then
# get the spk2gender information. This is not a standard part of our
# file formats
# The files "filetable2fe_03_p2_sph1 fe_03_05852.sph ff
cat $links/fe_03_p1_sph{1,2,3,4,5,6,7}/filetable.txt \
$links/fe_03_p2_sph{1,2,3,4,5,6,7}/docs/filetable2.txt | \
perl -ane 'm:^\S+ (\S+)\.sph ([fm])([fm]): || die "bad line $_;"; print "$1-A $2\n", "$1-B $3\n"; ' | \
sort | uniq | utils/filter_scp.pl $dir/spk2utt > $dir/spk2gender
if [ ! -s $dir/spk2gender ]; then
echo "It looks like our first try at getting the spk2gender info did not work."
echo "(possibly older distribution?) Trying something else."
cat $links/fe_03_p1_tran/doc/fe_03_p1_filelist.tbl $links/fe_03_p2_tran/doc/fe_03_p2_filelist.tbl | \
perl -ane 'm:fe_03_p[12]_sph\d\t(\d+)\t([mf])([mf]): || die "Bad line $_";
print "fe_03_$1-A $2\n", "fe_03_$1-B $3\n"; ' | \
sort | uniq | utils/filter_scp.pl $dir/spk2utt > $dir/spk2gender
fi
fi
echo "Fisher data preparation succeeded"
|
import * as NS from '../../namespace';
import { IChatMessage } from 'features/chat/chatApi/namespace';
export function chatError(error: string): NS.IChatError {
return { type: 'CHAT:ERROR', payload: error };
}
export function chatConnected(): NS.IChatConnected {
return { type: 'CHAT:CONNECTED' };
}
export function chatDisconnected(): NS.IChatDisconnected {
return { type: 'CHAT:DISCONNECTED' };
}
export function messageReceived(message: IChatMessage): NS.IMessageReceived {
return { type: 'CHAT:MESSAGE_RECEIVED', payload: message };
}
export function messageDeleted(messageId: string, roomId: string): NS.IMessageDeleted {
return { type: 'CHAT:MESSAGE_DELETED', payload: { messageId, roomId } };
}
export function historyReceived(roomId: string, messages: IChatMessage[]): NS.IHistoryReceived {
return { type: 'CHAT:HISTORY_RECEIVED', payload: { roomId, messages } };
}
export function switchRoom(roomId: string): NS.ISwitchRoom {
return { type: 'CHAT:SWITCH_ROOM', payload: roomId };
}
export function setCurrentRoomId(roomId: string): NS.ISwitchRoomSuccess {
return { type: 'CHAT:SET_CURRENT_ROOM_ID', payload: roomId };
}
export function joinRoom(roomId: string): NS.IJoinRoom {
return { type: 'CHAT:JOIN_ROOM', payload: roomId };
}
export function sendMessage(): NS.ISendMessage {
return { type: 'CHAT:SEND_MESSAGE' };
}
export function sendMessageSuccess(): NS.ISendMessageSuccess {
return { type: 'CHAT:SEND_MESSAGE_SUCCESS' };
}
export function sendMessageFail(error: string): NS.ISendMessageFail {
return { type: 'CHAT:SEND_MESSAGE_FAIL', error };
}
export function editMessage(message: IChatMessage): NS.IEditMessage {
return { type: 'CHAT:EDIT_MESSAGE', payload: message };
}
export function setCacheValidity(isValid: boolean): NS.ISetCacheValidity {
return { type: 'CHAT:SET_CACHE_VALIDITY', payload: isValid };
}
|
package lowest_common_ancestor;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 12746번: Traffic (small)
*
* @see https://www.acmicpc.net/problem/12746/
*
*/
public class Boj12746 {
private static ArrayList<Integer>[] tree;
private static int[][] parent;
private static int[] deep;
private static int[] cost;
private static boolean[] visit;
private static int N;
private static final long CIPHER = 1_000_000L;
private static final String SPACE = " ";
private static int result = -1;
private static long index;
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
N = Integer.parseInt(st.nextToken());
int Q = Integer.parseInt(st.nextToken());
tree = new ArrayList[N];
for(int i = 0; i < N; i++){
tree[i] = new ArrayList<>();
}
parent = new int[N][21];
deep = new int[N];
visit = new boolean[N];
cost = new int[N];
int loop = N - 1;
while(loop-- > 0){
st = new StringTokenizer(br.readLine());
int node1 = Integer.parseInt(st.nextToken()) - 1;
int node2 = Integer.parseInt(st.nextToken()) - 1;
tree[node1].add(node2);
tree[node2].add(node1);
}
dfs(0, 0);
connecting();
while(Q-- > 0){
st = new StringTokenizer(br.readLine());
int node1 = Integer.parseInt(st.nextToken()) - 1;
int node2 = Integer.parseInt(st.nextToken()) - 1;
cost[node1]++;
cost[node2]++;
cost[LCA(node1, node2)] -= 2; // make prefix sum
}
System.out.println(getResult());
}
private static void dfs(int current, int depth){
deep[current] = depth;
visit[current] = true;
for(int next: tree[current]){
if(visit[next]) continue;
parent[next][0] = current;
dfs(next, depth + 1);
}
}
private static void connecting(){
for(int p = 1; p < 21; p++){
for(int cur = 0; cur < N; cur++){
parent[cur][p] = parent[parent[cur][p - 1]][p - 1];
}
}
}
private static int LCA(int x, int y){ // LCA
if(deep[x] > deep[y]){
int tmp = x;
x = y;
y = tmp;
}
for(int i = 20; i >= 0; i--){
int jump = 1 << i;
if(deep[y] - deep[x] >= jump) y = parent[y][i]; // set same level
}
if(x == y) return x;
for(int i = 20; i >= 0; i--){ // find ancestor
if(parent[x][i] == parent[y][i]) continue;
x = parent[x][i];
y = parent[y][i];
}
return parent[x][0];
}
private static String getResult(){
StringBuilder sb = new StringBuilder();
filling(0);
return sb.append(index / CIPHER + 1).append(SPACE).append(index % CIPHER + 1).append(SPACE).append(result).toString();
}
private static void filling(int current){
for (int next : tree[current]) {
if (next == parent[current][0]) continue; // pass same node
filling(next); // search next;
long idx = current * CIPHER + next;
if (current > next) idx = next * CIPHER + current; // make index
if (cost[next] > result || cost[next] == result && idx < index) { // find max
result = cost[next];
index = idx;
}
cost[current] += cost[next]; // add cost bottom up
}
}
} |
fn simulate_bingo_game() -> i32 {
let mut card: [[bool; 5]; 5] = [[false; 5]; 5];
card[2][2] = true; // Mark the center cell as "FREE"
let mut drawn_numbers: Vec<u32> = Vec::new();
let mut rng = rand::thread_rng();
// Function to check win conditions
fn check_win(card: &[[bool; 5]; 5]) -> bool {
// Check horizontal, vertical, and diagonal lines
for i in 0..5 {
if card[i].iter().all(|&x| x) || (0..5).all(|j| card[j][i]) {
return true;
}
}
// Check diagonal lines
if (0..5).all(|i| card[i][i]) || (0..5).all(|i| card[i][4 - i]) {
return true;
}
false
}
// Simulate drawing numbers and marking the card
for _ in 1..=75 {
let num = rng.gen_range(1..=75);
drawn_numbers.push(num);
let column = match num {
1..=15 => 0,
16..=30 => 1,
31..=45 => 2,
46..=60 => 3,
_ => 4,
};
for row in 0..5 {
if num == 15 && row == 2 {
continue; // Skip marking the center cell for "FREE"
}
if num == (column * 15 + row as u32 + 1) {
card[row][column] = true;
if check_win(&card) {
return drawn_numbers.len() as i32;
}
}
}
}
-1 // Return -1 if no win condition is met
} |
#pragma once
namespace nifty{
namespace graph{
namespace opt{
namespace minstcut{
template<class OBJECTIVE>
struct MinstcutObjectiveName;
// template<class GRAPH>
// struct GraphName{
// static std::string name(){
//
// }
// };
} // namespace minstcut
} // namespace opt
}
}
|
enum UserAuthState {
case UserAuth
case UserNotAuth
case UserDeleted
}
func getUserAuthMessage(state: UserAuthState) -> String {
switch state {
case .UserAuth:
return "User is authenticated"
case .UserNotAuth:
return "User is not authenticated"
case .UserDeleted:
return "User account has been deleted"
}
}
// Example usage
let authState = UserAuthState.UserAuth
let message = getUserAuthMessage(state: authState)
print(message) // Output: "User is authenticated" |
import random
# define a function to generate random numbers
def generate_random(start, end):
return random.randint(start, end)
# generate a random number in the given range
start = 10
end = 15
print(generate_random(start, end)) |
A visualization showing the pricing history of iPhone consisting of a graph or chart with three points - $499, $599 and $699. |
package com.ourpalm.hbase.aries;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import com.ourpalm.hbase.aries.service.StatService;
public class StatEndPointServer extends BaseEndpointCoprocessor implements StatService {
private final Log log = LogFactory.getLog(this.getClass());
private final static List<String> statTypes = new ArrayList<String>(){{
add("count");
add("total");
}};
@Override
public Long ScannerDefinedKey(String startKey,String stopKey,Long minValue,Long maxValue,String statType) throws IOException {
log.info("================================== begin");
if(!statTypes.contains(statType)) {
statType = statTypes.get(0);
}
Scan scan = new Scan();
/* version */
scan.setStartRow(startKey.getBytes());
scan.setStopRow(stopKey.getBytes());
scan.setMaxVersions(1);
/* batch and caching */
scan.setBatch(0);
scan.setCaching(100000);
RegionCoprocessorEnvironment env = (RegionCoprocessorEnvironment) getEnvironment();
InternalScanner scanner = env.getRegion().getScanner(scan);
long sum = 0;
try {
List<KeyValue> kvList = new ArrayList<KeyValue>();
boolean hasMore = false;
do {
hasMore = scanner.next(kvList);
for (KeyValue kv : kvList) {
String key = new String(kv.getRow());
Long value = Long.parseLong(key.substring(key.lastIndexOf("_") + 1, key.length()));
if (value >= minValue && value <= maxValue) {
if("count".equals(statType)) {
sum++;
} else {
sum = sum + value;
}
}
}
kvList.clear();
} while (hasMore);
} finally {
scanner.close();
}
log.info("================================== end");
return sum;
}
@Override
public Long ScannerDefinedKeyWithUID(String startKey,String stopKey,Long minValue, Long maxValue,String statType) throws IOException {
// TODO Auto-generated method stub
log.info("================================== begin");
if(!statTypes.contains(statType)) {
statType = statTypes.get(0);
}
Scan scan = new Scan();
scan.setStartRow(startKey.getBytes());
scan.setStopRow(stopKey.getBytes());
scan.setMaxVersions(1);
/* batch and caching */
scan.setBatch(0);
scan.setCaching(100000);
RegionCoprocessorEnvironment env = (RegionCoprocessorEnvironment) getEnvironment();
InternalScanner scanner = env.getRegion().getScanner(scan);
long sum = 0;
try {
List<KeyValue> kvList = new ArrayList<KeyValue>();
boolean hasMore = false;
do {
hasMore = scanner.next(kvList);
for (KeyValue kv : kvList) {
String key = new String(kv.getRow());
Long value = Long.parseLong(key.substring(key.lastIndexOf("_") + 1, key.length()));
if (value >= minValue && value <= maxValue) {
if("count".equals(statType)) {
sum++;
} else {
sum = sum + value;
}
}
}
kvList.clear();
} while (hasMore);
} finally {
scanner.close();
}
log.info("================================== end");
return sum;
}
}
|
# Use these commands to generate the LAMMPS input script and data file
# (and other auxilliary files):
# Create LAMMPS input files this way:
cd moltemplate_files
# run moltemplate
moltemplate.sh -atomstyle full system.lt
# This will generate various files with names ending in *.in* and *.data.
# These files are the input files directly read by LAMMPS. Move them to
# the parent directory (or wherever you plan to run the simulation).
mv -f system.in* system.data ../
# We will also need the "Al99.eam.alloy" file:
#cp -f Al99.eam.alloy ../
# This file was downloaded from:
# http://www.ctcms.nist.gov/~cbecker/Download/Al-YM/Al99.eam.alloy
# Optional:
# The "./output_ttree/" directory is full of temporary files generated by
# moltemplate. They can be useful for debugging, but are usually thrown away.
rm -rf output_ttree/
cd ../
|
#!/bin/bash
## .SYNOPSIS
## Grafana Dashboard for Veeam Backup for Microsoft Office 365 v5.0 - Using RestAPI to InfluxDB Script
##
## .DESCRIPTION
## This Script will query the Veeam Backup for Microsoft Office 365 RestAPI and send the data directly to InfluxDB, which can be used to present it to Grafana.
## The Script and the Grafana Dashboard it is provided as it is, and bear in mind you can not open support Tickets regarding this project. It is a Community Project
##
## .Notes
## NAME: veeam_office365.sh
## ORIGINAL NAME: veeam_office365.sh
## LASTEDIT: 15/12/2020
## VERSION: 5.0
## KEYWORDS: Veeam, InfluxDB, Grafana
## .Link
## https://jorgedelacruz.es/
## https://jorgedelacruz.uk/
##
# Configurations
##
# Endpoint URL for InfluxDB
veeamInfluxDBURL="YOURINFLUXSERVERIP" #Your InfluxDB Server, http://FQDN or https://FQDN if using SSL
veeamInfluxDBPort="8086" #Default Port
veeamInfluxDB="telegraf" #Default Database
veeamInfluxDBUser="USER" #User for Database
veeamInfluxDBPassword='PASSWORD' #Password for Database
# Endpoint URL for login action
veeamUsername="YOURVBOUSER"
veeamPassword="YOURVBOPASSWORD"
veeamRestServer="https://YOURVBOSERVERIP"
veeamRestPort="4443" #Default Port
veeamBearer=$(curl -X POST --header "Content-Type: application/x-www-form-urlencoded" --header "Accept: application/json" -d "grant_type=password&username=$veeamUsername&password=$veeamPassword&refresh_token=%27%27" "$veeamRestServer:$veeamRestPort/v5/token" -k --silent | jq -r '.access_token')
##
# Veeam Backup for Microsoft Office 365 Organization. This part will check on our Organization and retrieve Licensing Information
##
veeamVBOUrl="$veeamRestServer:$veeamRestPort/v5/Organizations"
veeamOrgUrl=$(curl -X GET --header "Accept:application/json" --header "Authorization:Bearer $veeamBearer" "$veeamVBOUrl" 2>&1 -k --silent)
declare -i arrayorg=0
for id in $(echo "$veeamOrgUrl" | jq -r '.[].id'); do
veeamOrgId=$(echo "$veeamOrgUrl" | jq --raw-output ".[$arrayorg].id")
veeamOrgName=$(echo "$veeamOrgUrl" | jq --raw-output ".[$arrayorg].name" | awk '{gsub(/ /,"\\ ");print}')
## Licensing
veeamVBOUrl="$veeamRestServer:$veeamRestPort/v5/Organizations/$veeamOrgId/LicensingInformation"
veeamLicenseUrl=$(curl -X GET --header "Accept:application/json" --header "Authorization:Bearer $veeamBearer" "$veeamVBOUrl" 2>&1 -k --silent)
licensedUsers=$(echo "$veeamLicenseUrl" | jq --raw-output '.licensedUsers')
newUsers=$(echo "$veeamLicenseUrl" | jq --raw-output '.newUsers')
#echo "veeam_office365_organization,veeamOrgName=$veeamOrgName licensedUsers=$licensedUsers,newUsers=$newUsers"
curl -i -XPOST "$veeamInfluxDBURL:$veeamInfluxDBPort/write?precision=s&db=$veeamInfluxDB" -u "$veeamInfluxDBUser:$veeamInfluxDBPassword" --data-binary "veeam_office365_organization,veeamOrgName=$veeamOrgName licensedUsers=$licensedUsers,newUsers=$newUsers"
##
# Veeam Backup for Microsoft Office 365 Users. This part will check the total Users and if they are protected or not
##
veeamVBOUrl="$veeamRestServer:$veeamRestPort/v5/LicensedUsers"
veeamUsersUrl=$(curl -X GET --header "Accept:application/json" --header "Authorization:Bearer $veeamBearer" "$veeamVBOUrl" 2>&1 -k --silent)
declare -i arrayLicensed=0
for id in $(echo "$veeamUsersUrl" | jq -r '.results[].id'); do
veeamUserId=$(echo "$veeamUsersUrl" | jq --raw-output ".results[$arrayLicensed].id")
veeamUserName=$(echo "$veeamUsersUrl" | jq --raw-output ".results[$arrayLicensed].name" | awk '{gsub(/ /,"\\ ");print}')
veeamUserBackup=$(echo "$veeamUsersUrl" | jq --raw-output ".results[$arrayLicensed].isBackedUp")
case $veeamUserBackup in
"true")
protectedUser="1"
;;
"false")
protectedUser="2"
;;
esac
veeamLicensedType=$(echo "$veeamUsersUrl" | jq --raw-output ".results[$arrayLicensed].licenseState")
case $veeamLicensedType in
"Licensed")
LicensedUser="1"
;;
"Unlicensed")
LicensedUser="2"
;;
esac
#echo "veeam_office365_overview_OD,veeamOrgName=$veeamOrgName,veeamUserName=$veeamUserName protectedUser=$protectedUser,licensedUser=$LicensedUser"
curl -i -XPOST "$veeamInfluxDBURL:$veeamInfluxDBPort/write?precision=s&db=$veeamInfluxDB" -u "$veeamInfluxDBUser:$veeamInfluxDBPassword" --data-binary "veeam_office365_overview_OD,veeamOrgName=$veeamOrgName,veeamUserName=$veeamUserName protectedUser=$protectedUser,licensedUser=$LicensedUser"
arrayLicensed=$arrayLicensed+1
done
arrayorg=$arrayorg+1
done
##
# Veeam Backup for Microsoft Office 365 Backup Repositories. This part will check the capacity and used space of the Backup Repositories
##
veeamVBOUrl="$veeamRestServer:$veeamRestPort/v5/BackupRepositories"
veeamRepoUrl=$(curl -X GET --header "Accept:application/json" --header "Authorization:Bearer $veeamBearer" "$veeamVBOUrl" 2>&1 -k --silent)
declare -i arrayrepo=0
for id in $(echo "$veeamRepoUrl" | jq -r '.[].id'); do
repository=$(echo "$veeamRepoUrl" | jq --raw-output ".[$arrayrepo].name" | awk '{gsub(/ /,"\\ ");print}')
capacity=$(echo "$veeamRepoUrl" | jq --raw-output ".[$arrayrepo].capacityBytes")
freeSpace=$(echo "$veeamRepoUrl" | jq --raw-output ".[$arrayrepo].freeSpaceBytes")
objectStorageId=$(echo "$veeamRepoUrl" | jq --raw-output ".[$arrayrepo].objectStorageId")
objectStorageEncryptionEnabled=$(echo "$veeamRepoUrl" | jq --raw-output ".[$arrayrepo].objectStorageEncryptionEnabled")
#echo "veeam_office365_repository,repository=$repository capacity=$capacity,freeSpace=$freeSpace"
curl -i -XPOST "$veeamInfluxDBURL:$veeamInfluxDBPort/write?precision=s&db=$veeamInfluxDB" -u "$veeamInfluxDBUser:$veeamInfluxDBPassword" --data-binary "veeam_office365_repository,repository=$repository capacity=$capacity,freeSpace=$freeSpace"
if [[ "$objectStorageId" == "null" ]]; then
echo "It seems you are not using Object Storage offload on the Repository $repository, that's fine."
else
##
# Veeam Backup for Microsoft Office 365 Object Storage Repositories. This part will check the capacity and used space of the Object Storage Repositories
##
veeamVBOUrl="$veeamRestServer:$veeamRestPort/v5/objectstoragerepositories/$objectStorageId"
veeamObjectUrl=$(curl -X GET --header "Accept:application/json" --header "Authorization:Bearer $veeamBearer" "$veeamVBOUrl" 2>&1 -k --silent)
objectName=$(echo "$veeamObjectUrl" | jq --raw-output ".name" | awk '{gsub(/ /,"\\ ");print}')
usedSpaceGB=$(echo "$veeamObjectUrl" | jq --raw-output ".usedSpaceBytes")
type=$(echo "$veeamObjectUrl" | jq --raw-output ".type")
# Bucket information
bucketname=$(echo "$veeamObjectUrl" | jq --raw-output ".bucket.name" | awk '{gsub(/ /,"\\ ");print}')
servicePoint=$(echo "$veeamObjectUrl" | jq --raw-output ".bucket.servicePoint" | awk '{gsub(/ /,"\\ ");print}')
customRegionId=$(echo "$veeamObjectUrl" | jq --raw-output ".bucket.customRegionId" | awk '{gsub(/ /,"\\ ");print}')
#echo "veeam_office365_objectstorage,objectname=$objectName,type=$type,bucketname=$bucketname,servicePoint=$servicePoint,customRegionId=$customRegionId,objectStorageEncryptionEnabled=$objectStorageEncryptionEnabled usedSpaceGB=$usedSpaceGB"
curl -i -XPOST "$veeamInfluxDBURL:$veeamInfluxDBPort/write?precision=s&db=$veeamInfluxDB" -u "$veeamInfluxDBUser:$veeamInfluxDBPassword" --data-binary "veeam_office365_objectstorage,objectname=$objectName,type=$type,bucketname=$bucketname,servicePoint=$servicePoint,customRegionId=$customRegionId,objectStorageEncryptionEnabled=$objectStorageEncryptionEnabled usedSpaceGB=$usedSpaceGB"
fi
arrayrepo=$arrayrepo+1
done
##
# Veeam Backup for Microsoft Office 365 Backup Proxies. This part will check the Name and Threads Number of the Backup Proxies
##
veeamVBOUrl="$veeamRestServer:$veeamRestPort/v5/Proxies"
veeamProxyUrl=$(curl -X GET --header "Accept:application/json" --header "Authorization:Bearer $veeamBearer" "$veeamVBOUrl" 2>&1 -k --silent)
declare -i arrayprox=0
for id in $(echo "$veeamProxyUrl" | jq -r '.[].id'); do
hostName=$(echo "$veeamProxyUrl" | jq --raw-output ".[$arrayprox].hostName" | awk '{gsub(/ /,"\\ ");print}')
threadsNumber=$(echo "$veeamProxyUrl" | jq --raw-output ".[$arrayprox].threadsNumber")
status=$(echo "$veeamProxyUrl" | jq --raw-output ".[$arrayprox].status")
#echo "veeam_office365_proxies,proxies=$hostName,status=$status threadsNumber=$threadsNumber"
curl -i -XPOST "$veeamInfluxDBURL:$veeamInfluxDBPort/write?precision=s&db=$veeamInfluxDB" -u "$veeamInfluxDBUser:$veeamInfluxDBPassword" --data-binary "veeam_office365_proxies,proxies=$hostName,status=$status threadsNumber=$threadsNumber"
arrayprox=$arrayprox+1
done
##
# Veeam Backup for Microsoft Office 365 Backup Jobs. This part will check the different Jobs, and the Job Sessions per every Job
##
veeamVBOUrl="$veeamRestServer:$veeamRestPort/v5/Jobs"
veeamJobsUrl=$(curl -X GET --header "Accept:application/json" --header "Authorization:Bearer $veeamBearer" "$veeamVBOUrl" 2>&1 -k --silent)
declare -i arrayJobs=0
for id in $(echo "$veeamJobsUrl" | jq -r '.[].id'); do
nameJob=$(echo "$veeamJobsUrl" | jq --raw-output ".[$arrayJobs].name" | awk '{gsub(/ /,"\\ ");print}')
idJob=$(echo "$veeamJobsUrl" | jq --raw-output ".[$arrayJobs].id")
# Backup Job Sessions
veeamVBOUrl="$veeamRestServer:$veeamRestPort/v5/Jobs/$idJob/JobSessions"
veeamJobSessionsUrl=$(curl -X GET --header "Accept:application/json" --header "Authorization:Bearer $veeamBearer" "$veeamVBOUrl" 2>&1 -k --silent)
declare -i arrayJobsSessions=0
for id in $(echo "$veeamJobSessionsUrl" | jq -r '.results[].id'); do
creationTime=$(echo "$veeamJobSessionsUrl" | jq --raw-output ".results[$arrayJobsSessions].creationTime")
creationTimeUnix=$(date -d "$creationTime" +"%s")
endTime=$(echo "$veeamJobSessionsUrl" | jq --raw-output ".results[$arrayJobsSessions].endTime")
endTimeUnix=$(date -d "$endTime" +"%s")
totalDuration=$(($endTimeUnix - $creationTimeUnix))
status=$(echo "$veeamJobSessionsUrl" | jq --raw-output ".results[$arrayJobsSessions].status")
case $status in
Success)
jobStatus="1"
;;
Warning)
jobStatus="2"
;;
Failed)
jobStatus="3"
;;
esac
processingRate=$(echo "$veeamJobSessionsUrl" | jq --raw-output ".results[$arrayJobsSessions].statistics.processingRateBytesPS")
readRate=$(echo "$veeamJobSessionsUrl" | jq --raw-output ".results[$arrayJobsSessions].statistics.readRateBytesPS")
writeRate=$(echo "$veeamJobSessionsUrl" | jq --raw-output ".results[$arrayJobsSessions].statistics.writeRateBytesPS")
transferredData=$(echo "$veeamJobSessionsUrl" | jq --raw-output ".results[$arrayJobsSessions].statistics.transferredDataBytes")
processedObjects=$(echo "$veeamJobSessionsUrl" | jq --raw-output ".results[$arrayJobsSessions].statistics.processedObjects")
bottleneck=$(echo "$veeamJobSessionsUrl" | jq --raw-output ".results[$arrayJobsSessions].statistics.bottleneck")
#echo "veeam_office365_jobs,veeamjobname=$nameJob,bottleneck=$bottleneck totalDuration=$totalDuration,status=$jobStatus,processingRate=$processingRate,readRate=$readRate,writeRate=$writeRate,transferredData=$transferredData,processedObjects=$processedObjects $endTimeUnix"
curl -i -XPOST "$veeamInfluxDBURL:$veeamInfluxDBPort/write?precision=s&db=$veeamInfluxDB" -u "$veeamInfluxDBUser:$veeamInfluxDBPassword" --data-binary "veeam_office365_jobs,veeamjobname=$nameJob,bottleneck=$bottleneck totalDuration=$totalDuration,status=$jobStatus,processingRate=$processingRate,readRate=$readRate,writeRate=$writeRate,transferredData=$transferredData,processedObjects=$processedObjects $endTimeUnix"
if [[ $arrayJobsSessions = "1000" ]]; then
break
else
arrayJobsSessions=$arrayJobsSessions+1
fi
done
arrayJobs=$arrayJobs+1
done
##
# Veeam Backup for Microsoft Office 365 Restore Sessions. This part will check the Number of Restore Sessions
##
veeamVBOUrl="$veeamRestServer:$veeamRestPort/v5/RestoreSessions"
veeamRestoreSessionsUrl=$(curl -X GET --header "Accept:application/json" --header "Authorization:Bearer $veeamBearer" "$veeamVBOUrl" 2>&1 -k --silent)
declare -i arrayRestoreSessions=0
for id in $(echo "$veeamRestoreSessionsUrl" | jq -r '.results[].id'); do
name=$(echo "$veeamRestoreSessionsUrl" | jq --raw-output ".results[$arrayRestoreSessions].name")
nameJob=$(echo $name | awk -F": " '{print $2}' | awk -F" - " '{print $1}' | awk '{gsub(/ /,"\\ ");print}')
organization=$(echo "$veeamRestoreSessionsUrl" | jq --raw-output ".results[$arrayRestoreSessions].organization" | awk '{gsub(/ /,"\\ ");print}')
type=$(echo "$veeamRestoreSessionsUrl" | jq --raw-output ".results[$arrayRestoreSessions].type")
endTime=$(echo "$veeamRestoreSessionsUrl" | jq --raw-output ".results[$arrayRestoreSessions].endTime")
endTimeUnix=$(date -d "$endTime" +"%s")
result=$(echo "$veeamRestoreSessionsUrl" | jq --raw-output ".results[$arrayRestoreSessions].result")
initiatedBy=$(echo "$veeamRestoreSessionsUrl" | jq --raw-output ".results[$arrayRestoreSessions].initiatedBy")
details=$(echo "$veeamRestoreSessionsUrl" | jq --raw-output ".results[$arrayRestoreSessions].details")
itemsProcessed=$(echo $details | awk '//{ print $1 }')
[[ ! -z "$itemsProcessed" ]] || itemsProcessed="0"
itemsSuccess=$(echo $details | awk '//{ print $4 }' | awk '{gsub(/\(|\)/,"");print $1}')
[[ ! -z "$itemsSuccess" ]] || itemsSuccess="0"
#echo "veeam_office365_restoresession,organization=$organization,veeamjobname=$nameJob,type=$type,result=$result,initiatedBy=$initiatedBy itemsProcessed=$itemsProcessed,itemsSuccess=$itemsSuccess $endTimeUnix"
curl -i -XPOST "$veeamInfluxDBURL:$veeamInfluxDBPort/write?precision=s&db=$veeamInfluxDB" -u "$veeamInfluxDBUser:$veeamInfluxDBPassword" --data-binary "veeam_office365_restoresession,organization=$organization,veeamjobname=$nameJob,type=$type,result=$result,initiatedBy=$initiatedBy itemsProcessed=$itemsProcessed,itemsSuccess=$itemsSuccess $endTimeUnix"
arrayRestoreSessions=$arrayRestoreSessions+1
done
|
SELECT *, COUNT(*) FROM table group by table_name ORDER BY COUNT(*) DESC LIMIT 1; |
#!/bin/bash -e
# Copyright 2017-2018 by SDRausty. All rights reserved. 🌎 🌍 🌏 🌐 🗺
# Hosting https://sdrausty.github.io/TermuxArch courtesy https://pages.github.com
# https://sdrausty.github.io/TermuxArch/CONTRIBUTORS Thank you for your help.
# https://sdrausty.github.io/TermuxArch/README has information about this project.
################################################################################
sysinfo ()
{
ntime=`date +%N`
spaceinfo
printf "\n\033[1;32m"
printf "Begin setupTermuxArch debug information.\n" > setupTermuxArchdebug$ntime.log
printf "\nDisk report $mntspace on /storage/emulated/0 `date`\n\n" >> setupTermuxArchdebug$ntime.log
for n in 0 1 2 3 4 5
do
echo "BASH_VERSINFO[$n] = ${BASH_VERSINFO[$n]}" >> setupTermuxArchdebug$ntime.log
done
printf "\ncat /proc/cpuinfo results:\n\n" >> setupTermuxArchdebug$ntime.log
cat /proc/cpuinfo >> setupTermuxArchdebug$ntime.log
printf "\ndpkg --print-architecture result:\n\n" >> setupTermuxArchdebug$ntime.log
dpkg --print-architecture >> setupTermuxArchdebug$ntime.log
printf "\ngetprop ro.product.cpu.abi result:\n\n" >> setupTermuxArchdebug$ntime.log
getprop ro.product.cpu.abi >> setupTermuxArchdebug$ntime.log
printf "\ngetprop ro.product.device result:\n\n" >> setupTermuxArchdebug$ntime.log
getprop ro.product.device >> setupTermuxArchdebug$ntime.log
printf "\nDownload directory information results.\n\n" >> setupTermuxArchdebug$ntime.log
ls -al ~/storage/downloads 2>>setupTermuxArchdebug$ntime.log >> setupTermuxArchdebug$ntime.log 2>/dev/null ||:
ls -al ~/downloads 2>>setupTermuxArchdebug$ntime.log >> setupTermuxArchdebug$ntime.log 2>/dev/null ||:
if [ -d /sdcard/Download ]; then echo "/sdcard/Download exists"; else echo "/sdcard/Download not found"; fi >> setupTermuxArchdebug$ntime.log
if [ -d /storage/emulated/0/Download ]; then echo "/storage/emulated/0/Download exists"; else echo "/storage/emulated/0/Download not found"; fi >> setupTermuxArchdebug$ntime.log
printf "\nuname -mo results:\n\n" >> setupTermuxArchdebug$ntime.log
uname -mo >> setupTermuxArchdebug$ntime.log
printf "\nEnd \`setupTermuxArch.sh\` debug information.\n\nPost this information along with information regarding your issue at https://github.com/sdrausty/TermuxArch/issues. Include information about input and output. This debugging information is found in $(pwd)/$(ls setupTermuxArchdebug$ntime.log). If you think screenshots will help in resolving this matter better, include them in your post please. \n" >> setupTermuxArchdebug$ntime.log
cat setupTermuxArchdebug$ntime.log
printf "\n\033[0mSubmit this information if you plan to open up an issue at https://github.com/sdrausty/TermuxArch/issues to improve this installation script along with a screenshot of your topic. Include information about input and output. \n"
printtail
}
rmarch ()
{
while true; do
printf "\n\033[1;31m"
read -p "Uninstall Arch Linux? [y|n] " uanswer
if [[ $uanswer = [Ee]* ]] || [[ $uanswer = [Nn]* ]] || [[ $uanswer = [Qq]* ]];then
break
elif [[ $uanswer = [Yy]* ]];then
printf "\n\033[1;32mUninstalling Arch Linux...\n"
if [ -e $PREFIX/bin/$bin ] ;then
rm $PREFIX/bin/$bin
else
printf "Uninstalling Arch Linux, nothing to do for $PREFIX/bin/$bin.\n"
fi
if [ -d $HOME/arch ] ;then
cd $HOME/arch
rm -rf * 2>/dev/null ||:
find -type d -exec chmod 700 {} \; 2>/dev/null ||:
cd ..
rm -rf $HOME/arch
else
printf "Uninstalling Arch Linux, nothing to do for $HOME/arch.\n"
fi
printf "Uninstalling Arch Linux done.\n"
printtail
else
printf "\nYou answered \033[33;1m$uanswer\033[1;31m.\n\nAnswer \033[32mYes\033[1;31m or No. [\033[32my\033[1;31m|n]\n"
fi
done
printtail
}
|
# Experiment all tricks without center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on
# Dataset 1: market1501
# imagesize: 256x128
# batchsize: 16x4
# warmup_step 10
# random erase prob 0.5
# labelsmooth: on
# last stride 1
# bnneck on
# with center loss
# without re-ranking
python3 tools/test.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('0')" DATASETS.NAMES "('market1501')" DATASETS.ROOT_DIR "('/home/haoluo/data')" MODEL.PRETRAIN_CHOICE "('self')" TEST.WEIGHT "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/market1501/Experiment-all-tricks-tri_center-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005/resnet50_model_120.pth')"
|
<gh_stars>10-100
/*
* =============================================================================
*
* Copyright (c) 2011-2016, The THYMELEAF team (http://www.thymeleaf.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package org.thymeleaf.standard.processor;
import org.junit.Assert;
import org.junit.Test;
public class FragmentInsertionExpressionTest {
public FragmentInsertionExpressionTest() {
super();
}
@Test
public void testFragmentExpressionSelection() throws Exception {
checkExpression("template", true);
checkExpression("template::f", true);
checkExpression("template::frag", true);
checkExpression("template :: frag", true);
checkExpression(" template :: frag ", true);
checkExpression(" :: frag ", true);
checkExpression("::frag ", true);
checkExpression("::frag", true);
checkExpression("this::frag", true);
checkExpression(" this ::frag", true);
checkExpression(" this :: frag", true);
checkExpression(" ${lala slatr} + 'ele' :: 'index_' + 2 * 2", true);
checkExpression(" ${lala slatr} + 'ele' :: ('index_' + 2 * 2)", true);
checkExpression(" ${lala slatr} + 'ele' :: ('index_' + (2 * 2)) (somePar)", true);
checkExpression(" ${lala slatr} + 'ele' :: ('index_' + (2 * 2)) (a='something')", true);
checkExpression(" ${lala slatr} + 'ele' :: ('index_' + (2 * 2)) (a='something',b=4123)", true);
checkExpression(" ${lala slatr} + 'ele' :: ('index_' + (2 * 2)) (a=('something'),b=4123)", true);
checkExpression(" ${lala slatr} + ('ele') :: ('index_' + (2 * 2)) (a=('something'),b=4123)", true);
checkExpression(" ${lala slatr} + ('ele') :: ('index_' + (2 * 2)) (a=('something' + 23),b=4123)", true);
checkExpression(" ${lala slatr}+'ele' :: ('index_'+(2*2)) (a=('something'+23),b=4123)", true);
checkExpression(" ${lala slatr}+'ele' :: ('index_'+(2*2)) (${name}=('something'+23),b=4123)", true);
checkExpression(" ${lala slatr}+'ele' :: ('index_'+(2*2)) ((${name} + 0)=('something'+23),b=4123)", true);
checkExpression("C:\\Program Files\\apps\\templates\\WEB-INF\\temp.html", true);
checkExpression("C:\\Program Files\\apps\\templates\\WEB-INF\\temp.html :: 'fragment number one'", true);
checkExpression("/home/user/apps/templates/WEB-INF/temp.html :: 'fragment number one'", true);
checkExpression("home/user :: 'fragment number one'", true);
checkExpression("${something}", true);
checkExpression("${this} :: ${that}", true);
checkExpression("~{whatever}", false);
checkExpression("${cond} ? ~{this} : ~{that}", false);
checkExpression("${something} :: /div", true);
checkExpression("template :: f (~{some})", true);
checkExpression("folder/template :: f (~{some})", true);
checkExpression("folder/template :: f (~{some})", true);
checkExpression("~folder/template :: f (~{some})", true);
checkExpression("~/folder/template :: f (~{some})", true);
checkExpression("${~{impossible}} :: f (~{some})", true);
checkExpression("'~{impossible}' :: f (~{some})", true);
checkExpression("folder/template (title=~{some})", true);
checkExpression("(~{some})", false);
checkExpression("(${cond}) ? (~{this}) : (~{that})", false);
checkExpression("folder/template (title='one',body=~{that})", true);
checkExpression("folder/template (title=(~{some}))", true);
checkExpression("folder/template (title=('one'),body=(~{that}))", true);
checkExpression("folder/template (title=('one'))", true);
checkExpression("folder/template (body=~{(that)})", true);
checkExpression("folder/template\n (body=~{(that)})", true);
checkExpression("~{folder/template :: f (~{some})}", false);
checkExpression(" ~{folder/template :: f (~{some})} ", false);
// We could think that in this case a "true" should be returned, but actually the expression below is completely
// invalid because we cannot call a template without fragment specification using synthetic parameters.
checkExpression("folder/template (~{some})", false);
}
private static void checkExpression(final String expression, final boolean result) {
if (result) {
Assert.assertTrue(AbstractStandardFragmentInsertionTagProcessor.shouldBeWrappedAsFragmentExpression(expression));
} else {
Assert.assertFalse(AbstractStandardFragmentInsertionTagProcessor.shouldBeWrappedAsFragmentExpression(expression));
}
}
}
|
package alexmog.apilib.managers;
import java.io.IOException;
import java.util.Properties;
import java.util.concurrent.TimeoutException;
import java.util.logging.Level;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.ConnectionFactory;
import com.rabbitmq.client.AMQP.BasicProperties;
import alexmog.apilib.Server;
import alexmog.apilib.config.DatabasesConfig;
import alexmog.apilib.managers.Managers.Manager;
import alexmog.apilib.rabbitmq.RabbitMQEncoder;
import alexmog.apilib.rabbitmq.packets.Packet;
@Manager
public class RabbitMQManager extends alexmog.apilib.managers.Manager {
private Connection mConnection = null;
private Channel mChannel = null;
public Connection getConnection() {
return mConnection;
}
public Channel getChannel() {
return mChannel;
}
@Override
public void shutdown() {
try {
if (mChannel != null) mChannel.close();
if (mConnection != null) mConnection.close();
} catch (IOException | TimeoutException e) {
Server.LOGGER.log(Level.WARNING, "RabbitMQ Exception", e);
}
}
@Override
public void init(Properties config, DatabasesConfig databasesConfig) throws Exception {
if (!config.getProperty("rabbitmq.active", "false").equals("true")) {
Server.LOGGER.info("RabbitMQ active is set to false. Didn't activate the manager.");
return;
}
Server.LOGGER.info("Connecting to RabbitMQ server...");
ConnectionFactory factory = new ConnectionFactory();
factory.setUri(config.getProperty("rabbitmq.uri"));
mConnection = factory.newConnection();
mChannel = mConnection.createChannel();
Server.LOGGER.info("RabbitMQ started.");
}
public void basicPublish(String exchange, String routingKey, BasicProperties properties, Packet packet) {
try {
if (mChannel == null) return;
mChannel.basicPublish(exchange, routingKey, properties, RabbitMQEncoder.encode(packet));
} catch (IOException e) {
Server.LOGGER.log(Level.WARNING, "Cannot publish on rabbitMQ", e);
}
}
}
|
package com.leetcode.offer;
public class Solution_60 {
public double[] dicesProbability(int n) {
double sum = Math.pow(6, n);
int[][] dp = new int[n][6 * n];
for (int i = 0; i < 6; i++) {
dp[0][i] = 1;
}
for (int i = 1; i < n; i++) {
int min = i + 1, max = (i + 1) * 6;
for (int j = min; j <= max; j++) {
int count = 0;
for (int k = 1; k <= 6; k++) {
if (j - k - 1 >= 0) {
count += dp[i - 1][j - k - 1];
}
}
dp[i][j - 1] = count;
}
}
double[] ans = new double[5 * n + 1];
for (int i = 0; i < ans.length; i++) {
ans[i] = dp[dp.length - 1][i + n - 1] / sum;
}
return ans;
}
}
|
<gh_stars>1-10
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from tqdm import tqdm
from time import sleep
from config import URL, QUANDL_URL
from sql_declarative import Price30M, Base, StockPriceDay
from utils import get_data, get_symbols
def store(session, ticker, date, open, high, low,
close, volume, table, *args, **kwargs):
"""Store data into database
Parameters
----------
session: sql session object
ticker: str
Symbol
date: '%Y-%m-%d %H:%M:%S'
open, high, low, close, volume: float
table: str
The name of table
"""
if table == "price30m":
obj = Price30M(ticker=ticker, date=date, open=open, high=high,
low=low, close=close, volume=volume)
else:
obj = StockPriceDay(ticker=ticker, date=date, open=open, high=high,
low=low, close=close, volume=volume)
session.add(obj)
session.commit()
def store_df(ticker, df, table="price30m"):
"""Store DataFrame into database
Parameters
----------
ticker: str
Symbol
df: pd.DataFrame which contains open, high, low, close, volume
table: str
The name of table
"""
# Establish connection
if table == "stock_price_daily":
engine = create_engine(QUANDL_URL)
else:
engine = create_engine(URL)
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
df_val = df.values
for val in tqdm(df_val):
data = dict(session=session, ticker=ticker)
for i, col in enumerate(df.columns):
data[col] = val[i]
store(**data, table=table)
session.close()
def update(ticker, end=None, period="30", exchange="polo"):
"""Update database
Parameters
----------
ticker: str
Symbol
end: '%Y-%m-%d %H:%M:%S'
The most recent data in databases
period: float or str
The unit is second
exchange: str
The name of exchange
"""
if end is None:
end = "1970-01-01 00:00:00"
df = get_data(ticker, start=end, end=None,
period=period, exchange=exchange)
if exchange == "stock":
table = "stock_price_daily"
else:
table = "price30m"
store_df(ticker, df, table=table)
if __name__ == '__main__':
pairs = get_symbols()
for pair in tqdm(pairs):
update(pair, period=1800, exchange="polo")
sleep(3)
|
import ast
import logging
from pygount import SourceAnalysis
# Logging configuration
logging.basicConfig(level=logging.INFO)
# Input code
code = """
def find_bugs(code):
analyzed_code = SourceAnalysis.from_string(language='python', text=code)
bugs = []
for function in code:
if 'fopen' in function.body:
bugs.append('Got a potential vulnerability!')
return bugs
"""
# Parsed syntax tree
tree = ast.parse(code)
# Find all functions
for node in ast.walk(tree):
if isinstance(node, ast.FunctionDef):
# Get the code for the function
function_code = ast.get_source_segment(code, node)
bugs = find_bugs(function_code)
for bug in bugs:
logging.warning(bug) |
#!/bin/bash
# https://stackoverflow.com/questions/59895/can-a-bash-script-tell-which-directory-it-is-stored-in
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export PROD_ONLY=
if [[ $DESECSTACK_E2E_TEST = "TRUE" ]]
then
export PROD_ONLY='#'
fi
for file in $DIR/*.var; do
# we only replace occurrences of the variables specified below as first argument
(envsubst '$DESECSTACK_IPV4_REAR_PREFIX16' | envsubst '$DESECSTACK_DOMAIN' | envsubst '$CERT_PATH' | envsubst '$PROD_ONLY' ) < $file > $DIR/`basename $file .var`
done
|
def categorize(arr):
evens = []
odds = []
for num in arr:
if num % 2 == 0:
evens.append(num)
else:
odds.append(num)
return {'evens': evens, 'odds': odds}
print(categorize([3, 4, 5, 6, 7])) |
#!/bin/bash
#
##################################################################################################################
# Written to be used on 64 bits computers
# Author : Erik Dubois
# Website : http://www.erikdubois.be
##################################################################################################################
##################################################################################################################
#
# DO NOT JUST RUN THIS. EXAMINE AND JUDGE. RUN AT YOUR OWN RISK.
#
##################################################################################################################
program="paper-icon-theme-git"
command="paper-icon-theme-git"
if which pacaur > /dev/null; then
echo "Installing with pacaur"
pacaur -S --noconfirm --noedit $program
elif which packer > /dev/null; then
echo "Installing with packer"
packer -S --noconfirm --noedit $program
elif which yaourt > /dev/null; then
echo "Installing with yaourt"
yaourt -S --noconfirm $program
fi
if which $command > /dev/null; then
echo "################################################################"
echo "################################## "$command" has been installed"
echo "################################################################"
else
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! "$command" has NOT been installed"
echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
fi
|
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_CONF=default
CND_DISTDIR=dist
TMPDIR=build/${CND_CONF}/${IMAGE_TYPE}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=dist/${CND_CONF}/${IMAGE_TYPE}/chaomi.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
OUTPUT_BASENAME=chaomi.X.${IMAGE_TYPE}.${OUTPUT_SUFFIX}
PACKAGE_TOP_DIR=chaomi.x/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/package
rm -rf ${TMPDIR}
mkdir -p ${TMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory ${TMPDIR}/chaomi.x/bin
copyFileToTmpDir "${OUTPUT_PATH}" "${TMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/package/chaomi.x.tar
cd ${TMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/package/chaomi.x.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${TMPDIR}
|
<filename>ruby/spec/ruby/library/delegate/delegator/not_equal_spec.rb
require File.expand_path('../../../../spec_helper', __FILE__)
require File.expand_path('../../fixtures/classes', __FILE__)
describe "Delegator#!=" do
before :all do
@base = mock('base')
@delegator = DelegateSpecs::Delegator.new(@base)
end
it "is not delegated when passed self" do
@base.should_not_receive(:"!=")
(@delegator != @delegator).should be_false
end
it "is delegated when passed the delegated object" do
@base.should_receive(:"!=").and_return(true)
(@delegator != @base).should be_true
end
it "is delegated in general" do
@base.should_receive(:"!=").and_return(false)
(@delegator != 42).should be_false
end
end
|
<filename>src/js/controls/HSVPalette.js
(function () {
'use strict';
var c = createjs;
var HSVPalette = function (overlayGradientImage, selectorImage) {
this.Container_constructor();
this._init(overlayGradientImage, selectorImage);
};
var p = c.extend(HSVPalette, c.Container);
p._init = function (overlayGradientImage, selectorImage) {
this._hueRect = new c.Shape();
this.addChild(this._hueRect);
this._selector = new selectorImage();
this.addChild(this._selector);
var selectorRect = nts.Painter.SpriteSheetUtil.getRect(this._selector);
this._selectorHalfWidth = selectorRect.width / 2;
this._selectorHalfHeight = selectorRect.height / 2;
this._overlayImage = new overlayGradientImage();
this.addChild(this._overlayImage);
var overlayImageRect = nts.Painter.SpriteSheetUtil.getRect(this._overlayImage);
this.width = overlayImageRect.width;
this.height = overlayImageRect.height;
this._maxSelectorX = this.width - selectorRect.width;
this._maxSelectorY = this.height - selectorRect.height;
this._vo = new nts.Painter.HSVPaletteVO();
this._addMouseDownEvent();
this._addedToStageListener = this.on('added', this._addedToStageHandler, this);
};
p.update = function () {
if (this.stage)
this.stage.update();
};
p._addedToStageHandler = function () {
this.update();
this.off('added', this._addedToStageListener);
};
p.setHueColor = function (color) {
this._hueRect.graphics.clear();
this._hueRect.graphics
.beginFill(color)
.drawRect(0, 0, this.width, this.height)
.endFill();
var rgb = nts.Painter.ColorUtil.hexToRgb(color);
this._vo.red = rgb.r;
this._vo.green = rgb.g;
this._vo.blue = rgb.b;
this._vo.color = color;
this.update();
};
p._colorCalculationsByPosition = function (localPoint) {
var hue = this._hueBarVO.selectorY / this._hueBarVO.barHeight * 360;
hue = (hue < 0) ? 0 : hue;
hue = (hue > 360) ? 360 : hue;
var saturation = localPoint.x / this.width * 100;
saturation = (saturation < 0) ? 0 : saturation;
saturation = (saturation > 100) ? 100 : saturation;
var value = -(localPoint.y / this.height * 100) + 100;
value = (value < 0) ? 0 : value;
value = (value > 100) ? 100 : value;
var rgb = nts.Painter.ColorUtil.hsvToRgb(hue, saturation, value);
var hexColor = '#' + nts.Painter.ColorUtil.rgbToHex(rgb[0], rgb[1], rgb[2]);
this._vo.red = rgb[0];
this._vo.green = rgb[1];
this._vo.blue = rgb[2];
this._vo.color = hexColor;
this._vo.hue = hue;
this._vo.value = value;
this._vo.saturation = saturation;
this._dispatchChangeEvent();
};
p._colorCalculationsByHsv = function(hsv) {
var rgb = nts.Painter.ColorUtil.hsvToRgb(hsv[0] * 360, hsv[1] * 100, hsv[2] * 100);
var hexColor = '#' + nts.Painter.ColorUtil.rgbToHex(rgb[0], rgb[1], rgb[2]);
this._vo.red = rgb[0];
this._vo.green = rgb[1];
this._vo.blue = rgb[2];
this._vo.color = hexColor;
this._vo.hue = hsv[0];
this._vo.value = hsv[2];
this._vo.saturation = hsv[1];
};
p._updateCursor = function (localPoint) {
this._selector.x = localPoint.x - this._selectorHalfWidth;
this._selector.y = localPoint.y - this._selectorHalfHeight;
this._selector.x = (this._selector.x < 0) ? 0 : this._selector.x;
this._selector.x = (this._selector.x > this._maxSelectorX) ? this._maxSelectorX : this._selector.x;
this._selector.y = (this._selector.y < 0) ? 0 : this._selector.y;
this._selector.y = (this._selector.y > this._maxSelectorY) ? this._maxSelectorY : this._selector.y;
};
p._updateByStageMouseEvent = function (event) {
var localPoint = this.globalToLocal(event.stageX, event.stageY);
this._vo.selectorPoint = localPoint;
this._updateCursor(localPoint);
this._colorCalculationsByPosition(localPoint);
this.update();
};
p._addMouseDownEvent = function () {
this._mouseOverListener =
this._hueRect.on('mousedown', this._mouseDownHandler, this);
};
p._removeMouseDownEvent = function () {
this._hueRect.off('mousedown', this._mouseOverListener);
};
p._addMouseMoveEvent = function () {
this._mouseOutListener =
stage.on('stagemouseup', this._mouseUpHandler, this);
this._mouseMoveListener =
stage.on('stagemousemove', this._mouseMoveHandler, this);
};
p._removeMouseMoveEvent = function () {
stage.off('stagemouseup', this._mouseOutListener);
stage.off('stagemousemove', this._mouseMoveListener);
};
p._dispatchChangeEvent = function () {
this.dispatchEvent('colorChange');
};
p._mouseDownHandler = function (event) {
this._addMouseMoveEvent();
this._removeMouseDownEvent();
this._updateByStageMouseEvent(event);
};
p._mouseUpHandler = function () {
this._addMouseDownEvent();
this._removeMouseMoveEvent();
};
p._mouseMoveHandler = function (event) {
this._updateByStageMouseEvent(event);
};
p.getHueBarVO = function () {
return this._hueBarVO;
};
p.setHueBarVO = function (value) {
this._hueBarVO = value;
this.setHueColor(this._hueBarVO.color);
};
p.getVO = function () {
return this._vo;
};
p.setVO = function (value) {
this._vo = value;
this._selector.x = this._vo.selectorPoint.x;
this._selector.y = this._vo.selectorPoint.y;
};
p.setSaturationAndValue = function (hsv) {
this._updateCursor({x: hsv[1] * this.width || 0, y: this.height - hsv[2] * this.height || 0});
this._colorCalculationsByHsv(hsv);
};
nts.Painter.HSVPalette = createjs.promote(HSVPalette, 'Container');
})(); |
/*
* Copyright 2020 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm.internal.jni;
import org.bson.BsonValue;
import org.bson.codecs.Codec;
import org.bson.codecs.Decoder;
import org.bson.codecs.DecoderContext;
import org.bson.codecs.Encoder;
import org.bson.codecs.EncoderContext;
import org.bson.codecs.configuration.CodecConfigurationException;
import org.bson.codecs.configuration.CodecRegistry;
import org.bson.json.JsonMode;
import org.bson.json.JsonReader;
import org.bson.json.JsonWriter;
import org.bson.json.JsonWriterSettings;
import java.io.StringReader;
import java.io.StringWriter;
import io.realm.mongodb.ErrorCode;
import io.realm.mongodb.AppException;
/**
* Protocol for passing {@link BsonValue}s to JNI.
* <p>
* For now this just encapsulated the BSON value in a document with key {@value VALUE}. This
* overcomes the shortcoming of {@code org.bson.JsonWrite} not being able to serialize single values.
*/
public class JniBsonProtocol {
private static final String VALUE = "value";
private static JsonWriterSettings writerSettings = JsonWriterSettings.builder()
.outputMode(JsonMode.EXTENDED)
.build();
public static <T> String encode(T value, CodecRegistry registry) {
// catch possible missing codecs before the actual encoding
return encode(value, (Encoder<T>) getCodec(value.getClass(), registry));
}
public static <T> String encode(T value, Encoder<T> encoder) {
try {
StringWriter stringWriter = new StringWriter();
JsonWriter jsonWriter = new JsonWriter(stringWriter, writerSettings);
jsonWriter.writeStartDocument();
jsonWriter.writeName(VALUE);
encoder.encode(jsonWriter, value, EncoderContext.builder().build());
jsonWriter.writeEndDocument();
return stringWriter.toString();
} catch (CodecConfigurationException e) {
// same exception as in the guard above, but needed here as well nonetheless as the
// result might be wrapped inside an iterable or a map and the codec for the end type
// might be missing
throw new AppException(ErrorCode.BSON_CODEC_NOT_FOUND, "Could not resolve encoder for end type", e);
} catch (Exception e) {
throw new AppException(ErrorCode.BSON_ENCODING, "Error encoding value", e);
}
}
public static <T> T decode(String string, Class<T> clz, CodecRegistry registry) {
// catch possible missing codecs before the actual decoding
return decode(string, getCodec(clz, registry));
}
public static <T> T decode(String string, Decoder<T> decoder) {
try {
StringReader stringReader = new StringReader(string);
JsonReader jsonReader = new JsonReader(stringReader);
jsonReader.readStartDocument();
jsonReader.readName(VALUE);
T value = decoder.decode(jsonReader, DecoderContext.builder().build());
jsonReader.readEndDocument();
return value;
} catch (CodecConfigurationException e) {
// same exception as in the guard above, but needed here as well nonetheless as the
// result might be wrapped inside an iterable or a map and the codec for the end type
// might be missing
throw new AppException(ErrorCode.BSON_CODEC_NOT_FOUND, "Could not resolve decoder for end type" + string, e);
} catch (Exception e) {
throw new AppException(ErrorCode.BSON_DECODING, "Error decoding value " + string, e);
}
}
public static <T> Codec<T> getCodec(Class<T> clz, CodecRegistry registry) {
try {
return registry.get(clz);
} catch (CodecConfigurationException e) {
throw new AppException(ErrorCode.BSON_CODEC_NOT_FOUND, "Could not resolve codec for " + clz.getSimpleName(), e);
}
}
}
|
def is_palindrome(string):
if len(string) <= 1:
return True
else:
letter_1 = string[0]
letter_2 = string[-1]
if letter_1 != letter_2:
return False
else:
new_word = string[1:-1]
return is_palindrome(new_word) |
package evilcraft.blocks;
import evilcraft.api.config.BlockConfig;
/**
* Config for {@link FluidBlockPoison}.
* @author rubensworks
*
*/
public class FluidBlockPoisonConfig extends BlockConfig {
/**
* The unique instance.
*/
public static FluidBlockPoisonConfig _instance;
/**
* Make a new instance.
*/
public FluidBlockPoisonConfig() {
super(
true,
"blockPoison",
null,
FluidBlockPoison.class
);
}
@Override
public boolean isDisableable() {
return false;
}
}
|
curl -H "Authorization: Bearer $1" \
-k https://172.31.12.61:6443/api/v1/namespaces/default/pods
echo $1
|
import { getOptions } from 'loader-utils'
import validateOptions from 'schema-utils'
import runtime from './loader-runtime'
import optionsSchema from './options.json'
const loaderName = 'AvifWebpackLoader'
class AvifWebpackLoaderError extends Error {
constructor(message) {
super(message)
this.message = message
this.name = `${loaderName}Error`
Error.captureStackTrace(this, AvifWebpackLoaderError)
}
}
export default async function loader(source) {
// Getting loader options and validating them
const loaderOptions = getOptions(this)
validateOptions(optionsSchema, loaderOptions, loaderName)
// Getting global loader options and validating them
const configKey = loaderOptions.config || 'avifLoader'
const globalOptions = (this.options && this.options[configKey]) || {}
validateOptions(optionsSchema, globalOptions, loaderName)
const options = { ...loaderOptions, ...globalOptions }
// We can pass runtime during debug by using:
// - webpack@1.x: { bypassOnDebug: true }
// - webpack@2.x and newer: { disable: true }
const bypassOnDebug = options.bypassOnDebug || options.disable
if (this.debug === true && bypassOnDebug) {
return null
} else {
try {
return await runtime({ source, options })
} catch (error) {
throw new AvifWebpackLoaderError('Check base image extension/integrity')
}
}
}
// Needed for Webpack to pass raw Buffer of image
export const raw = true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.