text stringlengths 1 1.05M |
|---|
import { Operation, Point } from '..'
/**
* `PointRef` objects keep a specific point in a document synced over time as new
* operations are applied to the editor. You can access their `current` property
* at any time for the up-to-date point value.
*/
export interface PointRef {
current: Point | null
affinity: 'forward' | 'backward' | null
unref(): Point | null
}
export const PointRef = {
/**
* Transform the point ref's current value by an operation.
*/
transform(ref: PointRef, op: Operation): void {
const { current, affinity } = ref
if (current == null) {
return
}
const point = Point.transform(current, op, { affinity })
ref.current = point
if (point == null) {
ref.unref()
}
},
}
|
from pandas import DataFrame
def double_grouped_data(grouped_df):
# Extract the groups from the grouped data frame
groups = grouped_df.groups
# Initialize an empty list to store the modified x values
modified_x = []
# Iterate over the groups and double the x values
for group_key in groups:
group = grouped_df.get_group(group_key)
modified_x.extend(group['x'] * 2)
# Create a new data frame with the modified x values
modified_df = DataFrame({'x': modified_x, 'g': grouped_df['g'].values.repeat(2)})
return modified_df |
$(document).ready(function() {
var elems = document.getElementsByClassName("hole");
var check = jQuery.makeArray(elems);
var playerWin = 0;
var computerWin = 0;
var tie = 0;
var win = false;
var loss = false;
var madeMove = false;
var elemsOne = document.getElementById("row-one").getElementsByClassName("hole");
var rowOne = jQuery.makeArray(elemsOne);
var elemsTwo = document.getElementById("row-two").getElementsByClassName("hole");
var rowTwo = jQuery.makeArray(elemsTwo);
var elemsThree = document.getElementById("row-three").getElementsByClassName("hole");
var rowThree = jQuery.makeArray(elemsThree);
var elemsFour = document.getElementById("row-four").getElementsByClassName("hole");
var rowFour = jQuery.makeArray(elemsFour);
var elemsFive = document.getElementById("row-five").getElementsByClassName("hole");
var rowFive = jQuery.makeArray(elemsFive);
var elemsSix = document.getElementById("row-six").getElementsByClassName("hole");
var rowSix = jQuery.makeArray(elemsSix);
var elemsSeven = document.getElementById("row-seven").getElementsByClassName("hole");
var rowSeven = jQuery.makeArray(elemsSeven);
$("#row-one").on("click", function() {
if (rowOne.length > 0) {
$(rowOne[rowOne.length - 1]).addClass("black");
rowOne.splice(rowOne.indexOf(this), 1);
checkForWin();
checkForTie();
setTimeout(computerMove(), 100);
}
})
$("#row-two").on("click", function() {
if (rowTwo.length > 0) {
$(rowTwo[rowTwo.length - 1]).addClass("black");
rowTwo.splice(rowTwo.indexOf(this), 1);
checkForWin();
checkForTie();
setTimeout(computerMove(), 100);
}
})
$("#row-three").on("click", function() {
if (rowThree.length > 0) {
$(rowThree[rowThree.length - 1]).addClass("black");
rowThree.splice(rowThree.indexOf(this), 1);
checkForWin();
checkForTie();
setTimeout(computerMove(), 100);
}
})
$("#row-four").on("click", function() {
if (rowFour.length > 0) {
$(rowFour[rowFour.length - 1]).addClass("black");
rowFour.splice(rowFour.indexOf(this), 1);
checkForWin();
checkForTie();
setTimeout(computerMove(), 100);
}
})
$("#row-five").on("click", function() {
if (rowFive.length > 0) {
$(rowFive[rowFive.length - 1]).addClass("black");
rowFive.splice(rowFive.indexOf(this), 1);
checkForWin();
checkForTie();
setTimeout(computerMove(), 100);
}
})
$("#row-six").on("click", function() {
if (rowSix.length > 0) {
$(rowSix[rowSix.length - 1]).addClass("black");
rowSix.splice(rowSix.indexOf(this), 1);
checkForWin();
checkForTie();
setTimeout(computerMove(), 100);
}
})
$("#row-seven").on("click", function() {
if (rowSeven.length > 0) {
$(rowSeven[rowSeven.length - 1]).addClass("black");
rowSeven.splice(rowSeven.indexOf(this), 1);
checkForWin();
checkForTie();
setTimeout(computerMove(), 100);
}
})
function computerMove() {
smartMove();
if (madeMove === false) {
var rowArr = [rowOne, rowTwo, rowThree, rowFour, rowFive, rowSix, rowSeven]
//removing full columns
if (rowOne.length < 1) {
rowArr.splice(rowArr.indexOf(rowOne), 1);
}
if (rowTwo.length < 1) {
rowArr.splice(rowArr.indexOf(rowTwo), 1);
}
if (rowThree.length < 1) {
rowArr.splice(rowArr.indexOf(rowThree), 1);
}
if (rowFour.length < 1) {
rowArr.splice(rowArr.indexOf(rowFour), 1);
}
if (rowFive.length < 1) {
rowArr.splice(rowArr.indexOf(rowFive), 1);
}
if (rowSix.length < 1) {
rowArr.splice(rowArr.indexOf(rowSix), 1);
}
if (rowSeven.length < 1) {
rowArr.splice(rowArr.indexOf(rowSeven), 1);
}
var randRow = Math.floor(Math.random() * rowArr.length)
//where the computer places its chip
if (rowArr[randRow] === rowOne) {
if (rowOne.length > 0) {
$(rowOne[rowOne.length - 1]).addClass("red");
rowOne.splice(rowOne.indexOf(this), 1);
}
} else if (rowArr[randRow] === rowTwo) {
if (rowTwo.length > 0) {
$(rowTwo[rowTwo.length - 1]).addClass("red");
rowTwo.splice(rowTwo.indexOf(this), 1);
}
} else if (rowArr[randRow] === rowThree) {
if (rowThree.length > 0) {
$(rowThree[rowThree.length - 1]).addClass("red");
rowThree.splice(rowThree.indexOf(this), 1);
}
} else if (rowArr[randRow] === rowFour) {
if (rowFour.length > 0) {
$(rowFour[rowFour.length - 1]).addClass("red");
rowFour.splice(rowFour.indexOf(this), 1);
}
} else if (rowArr[randRow] === rowFive) {
if (rowFive.length > 0) {
$(rowFive[rowFive.length - 1]).addClass("red");
rowFive.splice(rowFive.indexOf(this), 1);
}
} else if (rowArr[randRow] === rowSix) {
if (rowSix.length > 0) {
$(rowSix[rowSix.length - 1]).addClass("red");
rowSix.splice(rowSix.indexOf(this), 1);
}
} else if (rowArr[randRow] === rowSeven) {
if (rowSeven.length > 0) {
$(rowSeven[rowSeven.length - 1]).addClass("red");
rowSeven.splice(rowSeven.indexOf(this), 1);
}
}
madeMove = true;
}
checkForLoss();
checkForTie();
};
function checkForWin() {
for (var i = 0; i < 42; i++) {
if ($(check[i]).hasClass("black") && $(check[(i + 5)]).hasClass("black") && $(check[(i + 10)]).hasClass("black") && $(check[(i + 15)]).hasClass("black")) {
if (i !== 0 && i !== 1 && i !== 2 && i !== 6 && i !== 7 && i !== 8 && i !== 12 && i !== 13 && i !== 14 && i !== 18 && i !== 19 && i !== 20 && i !== 24 && i !== 25 && i !== 26) {
win = true;
console.log("forward")
}
} else if ($(check[i]).hasClass("black") && $(check[(i + 7)]).hasClass("black") && $(check[(i + 14)]).hasClass("black") && $(check[(i + 21)]).hasClass("black")) {
if (i !== 3 && i !== 4 && i !== 5 && i !== 9 && i !== 10 && i !== 11 && i !== 15 && i !== 16 && i !== 17 && i !== 21 && i !== 22 && i !== 23) {
win = true;
console.log("back")
}
} else if ($(check[i]).hasClass("black") && $(check[(i + 6)]).hasClass("black") && $(check[(i + 12)]).hasClass("black") && $(check[(i + 18)]).hasClass("black")) {
win = true;
console.log("horizontal")
} else if ($(check[i]).hasClass("black") && $(check[(i + 1)]).hasClass("black") && $(check[(i + 2)]).hasClass("black") && $(check[(i + 3)]).hasClass("black")) {
if (i != 3 && i != 4 && i != 5 && i != 9 && i != 10 && i != 11 && i != 15 && i != 16 && i != 17 && i != 21 && i != 22 && i != 23 && i != 27 && i != 28 && i != 29 && i != 33 && i != 34 && i != 35) {
win = true;
console.log("vertical")
}
}
madeMove = false;
}
if (win === true) {
alert("win");
clearBoard();
playerWin++;
$(".player").addClass("number");
$(".player").text(playerWin);
}
};
function checkForLoss() {
for (var i = 0; i < 42; i++) {
if ($(check[i]).hasClass("red") && $(check[(i + 5)]).hasClass("red") && $(check[(i + 10)]).hasClass("red") && $(check[(i + 15)]).hasClass("red")) {
if (i !== 0 && i !== 1 && i !== 2 && i !== 6 && i !== 7 && i !== 8 && i !== 12 && i !== 13 && i !== 14 && i !== 18 && i !== 19 && i !== 20 && i !== 24 && i !== 25 && i !== 26) {
loss = true;
console.log("forward")
}
} else if ($(check[i]).hasClass("red") && $(check[(i + 7)]).hasClass("red") && $(check[(i + 14)]).hasClass("red") && $(check[(i + 21)]).hasClass("red")) {
if (i !== 3 && i !== 4 && i !== 5 && i !== 9 && i !== 10 && i !== 11 && i !== 15 && i !== 16 && i !== 17 && i !== 21 && i !== 22 && i !== 23) {
loss = true;
console.log("back")
}
} else if ($(check[i]).hasClass("red") && $(check[(i + 6)]).hasClass("red") && $(check[(i + 12)]).hasClass("red") && $(check[(i + 18)]).hasClass("red")) {
loss = true;
console.log("horizontal")
} else if ($(check[i]).hasClass("red") && $(check[(i + 1)]).hasClass("red") && $(check[(i + 2)]).hasClass("red") && $(check[(i + 3)]).hasClass("red")) {
if (i != 3 && i != 4 && i != 5 && i != 9 && i != 10 && i != 11 && i != 15 && i != 16 && i != 17 && i != 21 && i != 22 && i != 23 && i != 27 && i != 28 && i != 29 && i != 33 && i != 34 && i != 35) {
loss = true;
console.log("vertical")
}
}
}
if (loss === true) {
alert("You Lose");
clearBoard();
computerWin++;
$(".computer").addClass("number");
$(".computer").text(computerWin);
}
};
function checkForTie() {
if (win === false && loss === false && rowOne.length === 0 && rowTwo.length === 0 && rowThree.length === 0 && rowFour.length === 0 && rowFive.length === 0 && rowSix.length === 0 && rowSeven.length === 0) {
alert("Try Again!");
tie++;
clearBoard();
$(".ties").addClass("number");
$(".ties").text(tie);
}
};
function clearBoard() {
$(".hole").removeClass("black");
$(".hole").removeClass("red");
win = false;
loss = false;
rowOne = jQuery.makeArray(elemsOne);
rowTwo = jQuery.makeArray(elemsTwo);
rowThree = jQuery.makeArray(elemsThree);
rowFour = jQuery.makeArray(elemsFour);
rowFive = jQuery.makeArray(elemsFive);
rowSix = jQuery.makeArray(elemsSix);
rowSeven = jQuery.makeArray(elemsSeven);
};
function smartMove() {
for (var i = 0; i < 42; i++) {
//forward slash
if ($(check[i]).hasClass("black") && $(check[(i + 5)]).hasClass("black") && $(check[(i + 10)]).hasClass("black") && !$(check[(i + 15)]).hasClass("black") && !$(check[(i + 15)]).hasClass("red") && madeMove === false) {
if (i !== 0 && i !== 1 && i !== 2 && i !== 6 && i !== 7 && i !== 8 && i !== 12 && i !== 13 && i !== 14 && i !== 18 && i !== 19 && i !== 20 && i !== 24 && i !== 25 && i !== 26) {
if ($(check[(i + 15) + 1]).hasClass("black") || $(check[(i + 15) + 1]).hasClass("red")) {
$(check[(i + 15)]).addClass("red");
spliceIt();
madeMove = true;
}
}
}
if ($(check[i]).hasClass("black") && $(check[(i + 5)]).hasClass("black") && $(check[(i + 15)]).hasClass("black") && !$(check[(i + 10)]).hasClass("black") && !$(check[(i + 10)]).hasClass("red") && madeMove === false) {
if (i !== 0 && i !== 1 && i !== 2 && i !== 6 && i !== 7 && i !== 8 && i !== 12 && i !== 13 && i !== 14 && i !== 18 && i !== 19 && i !== 20 && i !== 24 && i !== 25 && i !== 26) {
if ($(check[(i + 10) + 1]).hasClass("black") || $(check[(i + 10) + 1]).hasClass("red")) {
$(check[(i + 10)]).addClass("red")
spliceIt();
madeMove = true;
}
}
}
if ($(check[i]).hasClass("black") && !$(check[(i + 5)]).hasClass("black") && !$(check[(i + 5)]).hasClass("red") && $(check[(i + 10)]).hasClass("black") && $(check[(i + 15)]).hasClass("black") && madeMove === false) {
if (i !== 0 && i !== 1 && i !== 2 && i !== 6 && i !== 7 && i !== 8 && i !== 12 && i !== 13 && i !== 14 && i !== 18 && i !== 19 && i !== 20 && i !== 24 && i !== 25 && i !== 26) {
if ($(check[(i + 5) + 1]).hasClass("black") || $(check[(i + 5) + 1]).hasClass("red")) {
$(check[(i + 5)]).addClass("red");
spliceIt();
madeMove = true;
}
}
}
if (!$(check[i]).hasClass("black") && !$(check[i]).hasClass("red") && $(check[(i + 5)]).hasClass("black") && $(check[(i + 10)]).hasClass("black") && $(check[(i + 15)]).hasClass("black") && madeMove === false) {
if (i !== 0 && i !== 1 && i !== 2 && i !== 6 && i !== 7 && i !== 8 && i !== 12 && i !== 13 && i !== 14 && i !== 18 && i !== 19 && i !== 20 && i !== 24 && i !== 25 && i !== 26) {
if ($(check[(i) + 1]).hasClass("black") || $(check[(i) + 1]).hasClass("red")) {
$(check[i]).addClass("red");
spliceIt();
madeMove = true;
}
}
}
//back slash
if (!$(check[i]).hasClass("black") && !$(check[i]).hasClass("red") && $(check[(i + 7)]).hasClass("black") && $(check[(i + 14)]).hasClass("black") && $(check[(i + 21)]).hasClass("black") && madeMove === false) {
if (i !== 3 && i !== 4 && i !== 5 && i !== 9 && i !== 10 && i !== 11 && i !== 15 && i !== 16 && i !== 17 && i !== 21 && i !== 22 && i !== 23) {
if ($(check[(i) + 1]).hasClass("black") || $(check[(i) + 1]).hasClass("red")) {
$(check[i]).addClass("red");
spliceIt();
madeMove = true;
}
}
}
if ($(check[i]).hasClass("black") && !$(check[(i + 7)]).hasClass("black") && !$(check[(i + 7)]).hasClass("red") && $(check[(i + 14)]).hasClass("black") && $(check[(i + 21)]).hasClass("black") && madeMove === false) {
if (i !== 3 && i !== 4 && i !== 5 && i !== 9 && i !== 10 && i !== 11 && i !== 15 && i !== 16 && i !== 17 && i !== 21 && i !== 22 && i !== 23) {
if ($(check[(i + 7) + 1]).hasClass("black") || $(check[(i + 7) + 1]).hasClass("red")) {
$(check[(i + 7)]).addClass("red");
spliceIt();
madeMove = true;
}
}
}
if ($(check[i]).hasClass("black") && $(check[(i + 7)]).hasClass("black") && !$(check[(i + 14)]).hasClass("black") && !$(check[(i + 14)]).hasClass("red") && $(check[(i + 21)]).hasClass("black") && madeMove === false) {
if (i !== 3 && i !== 4 && i !== 5 && i !== 9 && i !== 10 && i !== 11 && i !== 15 && i !== 16 && i !== 17 && i !== 21 && i !== 22 && i !== 23) {
if ($(check[(i + 14) + 1]).hasClass("black") || $(check[(i + 14) + 1]).hasClass("red")) {
$(check[(i + 14)]).addClass("red");
spliceIt();
madeMove = true;
}
}
}
if ($(check[i]).hasClass("black") && $(check[(i + 7)]).hasClass("black") && $(check[(i + 14)]).hasClass("black") && !$(check[(i + 21)]).hasClass("black") && !$(check[(i + 21)]).hasClass("red") && madeMove === false) {
if (i !== 3 && i !== 4 && i !== 5 && i !== 9 && i !== 10 && i !== 11 && i !== 15 && i !== 16 && i !== 17 && i !== 21 && i !== 22 && i !== 23) {
if ($(check[(i + 21) + 1]).hasClass("black") || $(check[(i + 21) + 1]).hasClass("red")) {
$(check[(i + 21)]).addClass("red");
spliceIt();
madeMove = true;
}
}
}
//horizontal
if (!$(check[i]).hasClass("black") && !$(check[i]).hasClass("red") && $(check[(i + 6)]).hasClass("black") && $(check[(i + 12)]).hasClass("black") && $(check[(i + 18)]).hasClass("black") && madeMove === false) {
if(i == 5 || i == 11 || i == 17 || i == 23 || i == 29 || i == 35 || i == 41){
$(check[i]).addClass("red");
spliceIt();
madeMove = true;
}else if ($(check[(i) + 1]).hasClass("black") || $(check[(i) + 1]).hasClass("red")){
$(check[i]).addClass("red");
spliceIt();
madeMove = true;
}
}
if ($(check[i]).hasClass("black") && !$(check[(i + 6)]).hasClass("black") && !$(check[(i + 6)]).hasClass("red") && $(check[(i + 12)]).hasClass("black") && $(check[(i + 18)]).hasClass("black") && madeMove === false) {
if(i == 5 || i == 11 || i == 17 || i == 23 || i == 29 || i == 35 || i == 41){
$(check[i + 6]).addClass("red");
spliceIt();
madeMove = true;
}
else if ($(check[(i + 6) + 1]).hasClass("black") || $(check[(i + 6) + 1]).hasClass("red")) {
$(check[(i + 6)]).addClass("red");
spliceIt();
madeMove = true;
}
}
if ($(check[i]).hasClass("black") && $(check[(i + 6)]).hasClass("black") && !$(check[(i + 12)]).hasClass("black") && !$(check[(i + 12)]).hasClass("red") && $(check[(i + 18)]).hasClass("black") && madeMove === false) {
if(i == 5 || i == 11 || i == 17 || i == 23 || i == 29 || i == 35 || i == 41){
$(check[i + 12]).addClass("red");
spliceIt();
madeMove = true;
}
else if ($(check[(i + 12) + 1]).hasClass("black") || $(check[(i + 12) + 1]).hasClass("red")) {
$(check[(i + 12)]).addClass("red");
spliceIt();
madeMove = true;
}
}
if ($(check[i]).hasClass("black") && $(check[(i + 6)]).hasClass("black") && $(check[(i + 12)]).hasClass("black") && !$(check[(i + 18)]).hasClass("black") && !$(check[(i + 18)]).hasClass("red") && madeMove === false) {
if(i == 5 || i == 11 || i == 17 || i == 23 || i == 29 || i == 35 || i == 41){
$(check[i + 18]).addClass("red");
spliceIt();
madeMove = true;
}
else if ($(check[(i + 18) + 1]).hasClass("black") || $(check[(i + 18) + 1]).hasClass("red")) {
$(check[(i + 18)]).addClass("red");
spliceIt();
madeMove = true;
}
}
//vertical
if (!$(check[i]).hasClass("black") && !$(check[i]).hasClass("red") && $(check[(i + 1)]).hasClass("black") && $(check[(i + 2)]).hasClass("black") && $(check[(i + 3)]).hasClass("black") && madeMove === false) {
if (i != 3 && i != 4 && i != 5 && i != 9 && i != 10 && i != 11 && i != 15 && i != 16 && i != 17 && i != 21 && i != 22 && i != 23 && i != 27 && i != 28 && i != 29 && i != 33 && i != 34 && i != 35) {
$(check[i]).addClass("red");
spliceIt();
madeMove = true;
}
}
if ($(check[i]).hasClass("black") && !$(check[(i + 1)]).hasClass("black") && !$(check[(i + 1)]).hasClass("red") && $(check[(i + 2)]).hasClass("black") && $(check[(i + 3)]).hasClass("black") && madeMove === false) {
if (i != 3 && i != 4 && i != 5 && i != 9 && i != 10 && i != 11 && i != 15 && i != 16 && i != 17 && i != 21 && i != 22 && i != 23 && i != 27 && i != 28 && i != 29 && i != 33 && i != 34 && i != 35) {
$(check[(i + 1)]).addClass("red");
spliceIt();
madeMove = true;
}
}
if ($(check[i]).hasClass("black") && $(check[(i + 1)]).hasClass("black") && !$(check[(i + 2)]).hasClass("black") && !$(check[(i + 2)]).hasClass("red") && $(check[(i + 3)]).hasClass("black") && madeMove === false) {
if (i != 3 && i != 4 && i != 5 && i != 9 && i != 10 && i != 11 && i != 15 && i != 16 && i != 17 && i != 21 && i != 22 && i != 23 && i != 27 && i != 28 && i != 29 && i != 33 && i != 34 && i != 35) {
$(check[(i + 2)]).addClass("red");
spliceIt();
madeMove = true;
}
}
if ($(check[i]).hasClass("black") && $(check[(i + 1)]).hasClass("black") && $(check[(i + 2)]).hasClass("black") && !$(check[(i + 3)]).hasClass("black") && !$(check[(i + 3)]).hasClass("red") && madeMove === false) {
if (i != 3 && i != 4 && i != 5 && i != 9 && i != 10 && i != 11 && i != 15 && i != 16 && i != 17 && i != 21 && i != 22 && i != 23 && i != 27 && i != 28 && i != 29 && i != 33 && i != 34 && i != 35) {
$(check[(i + 3)]).addClass("red");
spliceIt();
madeMove = true;
}
}
}
};
function spliceIt() {
if ($(rowOne[rowOne.length-1]).hasClass("red")) {
rowOne.splice(rowOne.indexOf(this), 1);
};
if ($(rowTwo[rowTwo.length-1]).hasClass("red")) {
rowTwo.splice(rowTwo.indexOf(this), 1);
};
if ($(rowThree[rowThree.length-1]).hasClass("red")) {
rowThree.splice(rowThree.indexOf(this), 1);
};
if ($(rowFour[rowFour.length-1]).hasClass("red")) {
rowFour.splice(rowFour.indexOf(this), 1);
};
if ($(rowFive[rowFive.length-1]).hasClass("red")) {
rowFive.splice(rowFive.indexOf(this), 1);
};
if ($(rowSix[rowSix.length-1]).hasClass("red")) {
rowSix.splice(rowSix.indexOf(this), 1);
};
if ($(rowSeven[rowSeven.length-1]).hasClass("red")) {
rowSeven.splice(rowSeven.indexOf(this), 1);
};
};
}); |
#! /bin/sh
export BEAM4_HOME=$_CIOP_APPLICATION_PATH/shared
if [ -z "$BEAM4_HOME" ]; then
echo
echo Error: BEAM4_HOME not found in your environment.
echo Please set the BEAM4_HOME variable in your environment to match the
echo location of the BEAM 4.x installation
echo
exit 2
fi
. "$BEAM4_HOME/bin/detect_java.sh"
"$app_java_home/bin/java" \
-Xmx1024m \
-Dceres.context=beam \
"-Dbeam.mainClass=org.esa.beam.framework.gpf.main.GPT" \
"-Dbeam.home=$BEAM4_HOME" \
"-Dncsa.hdf.hdflib.HDFLibrary.hdflib=$BEAM4_HOME/lib/lib-hdf/lib/libjhdf.so" \
"-Dncsa.hdf.hdf5lib.H5.hdf5lib=$BEAM4_HOME/lib/lib-hdf/lib/libjhdf5.so" \
-jar "$BEAM4_HOME/lib/ceres-launcher.jar" "$@"
exit $?
|
#!/bin/bash
#######################################################################
# SEND EMAIL TO A (LIST OF) USER(S) #
#######################################################################
# usage: . send_mail.sh TO[user list] MESSAGE[file] [SUBJECT]
# bash >= 4.3
## Variables
# Some global variables
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PROJECT_ROOT_DIR="$(cd "${SCRIPT_DIR}/.." && pwd)"
PROJECT_LIB_DIR="${PROJECT_ROOT_DIR}/lib"
PROJECT_ETC_DIR="${PROJECT_ROOT_DIR}/etc"
YAML_INTERPRETER="${PROJECT_LIB_DIR}/yaml_interpreter.py"
SEND_MAIL_APP="${PROJECT_LIB_DIR}/send_email.py"
CLUSTER_CONFIG_FILE="${PROJECT_ETC_DIR}/cluster.yaml"
MailServer="$(python3 ${YAML_INTERPRETER} ${CLUSTER_CONFIG_FILE} MailServer)"
MailPassword="$(python3 ${YAML_INTERPRETER} ${CLUSTER_CONFIG_FILE} MailPassword)"
MailAddress="$(python3 ${YAML_INTERPRETER} ${CLUSTER_CONFIG_FILE} MailAddress)"
MailDefaultSubject="$(python3 ${YAML_INTERPRETER} ${CLUSTER_CONFIG_FILE} MailDefaultSubject)"
# use printf to save \n in a bash variable: printf -v var "string\nwith\n\n"
printf -v MailSignature "$(python3 ${YAML_INTERPRETER} ${CLUSTER_CONFIG_FILE} MailSignature)"
FROM="${MailAddress}"
# Arguments
if [[ -f $1 ]]; then
TO_FILE_NAME="$(IFS='/' read -r -a array <<< "$1" && echo ${array[-1]})"
TO="$(cd "$(dirname "$1")" && pwd)/${TO_FILE_NAME}"
else
TO="$1"
fi
if [[ -f $2 ]]; then
MESSAGE_FILE_NAME="$(IFS='/' read -r -a array <<< "$2" && echo ${array[-1]})"
MESSAGE="$(cd "$(dirname "$2")" && pwd)/${MESSAGE_FILE_NAME}"
else
#MESSAGE="$2"
printf -v MESSAGE "$2"
fi
if [[ $3 == "" ]]; then
SUBJECT="${MailDefaultSubject}"
else
SUBJECT="${MailDefaultSubject}: $3"
fi
## Program begin
if [[ -f ${TO} ]]; then
while IFS=" " read -ra userinfo
do
useremail=${userinfo[1]}
python3 ${SEND_MAIL_APP} ${MailServer} "${MailPassword}" ${FROM} ${useremail} "${MESSAGE}" "${SUBJECT}" "${MailSignature}"
done < "${TO}"
else
python3 ${SEND_MAIL_APP} ${MailServer} "${MailPassword}" ${FROM} ${TO} "${MESSAGE}" "${SUBJECT}" "${MailSignature}"
fi
exit
|
<filename>src/nikpack/Main.java<gh_stars>0
package nikpack;
import java.io.IOException;
import java.nio.file.*;
import static java.nio.file.StandardWatchEventKinds.ENTRY_CREATE;
import static java.nio.file.StandardWatchEventKinds.ENTRY_DELETE;
import static java.nio.file.StandardWatchEventKinds.ENTRY_MODIFY;
public class Main {
public static void main(String[] args) {
try {
// создаем сервис отслеживания
WatchService watcher = FileSystems.getDefault().newWatchService();
// будем отслеживать каталог watchthis
Path path = Paths.get("/home/sa/IdeaProjects/WatchService/src/watchthis");
if (Files.notExists(path))
Files.createDirectory(path);
// получаем WatchKey
WatchKey watchKey = path.register(watcher,
ENTRY_CREATE,
ENTRY_DELETE,
ENTRY_MODIFY);
StringBuilder message = new StringBuilder(50);
stop_watch:
for(;;) {
WatchKey key;
key = watcher.take();
for(WatchEvent<?> event: key.pollEvents()) {
WatchEvent.Kind<?> eventKind = event.kind();
WatchEvent<Path> pathEvent = (WatchEvent<Path>) event;
Path entryPath = pathEvent.context();
switch (eventKind.toString()) {
case "ENTRY_CREATE": {
String fileType = Files.probeContentType(entryPath);
message.append("Создан файл: " + entryPath + "; тип файла: ");
if (fileType != null)
message.append("[" + fileType + "]");
else
message.append("[неизвестен]");
System.out.println(message);
message.setLength(0);
break;
}
case "ENTRY_DELETE": {
System.out.println("Удален файл: " + entryPath);
break;
}
case "ENTRY_MODIFY": {
System.out.println("Изменен файл: " + entryPath);
break;
}
default:
break;
}
if (entryPath.getFileName().toString().equals("exit.txt")) {
System.out.println("Вазафак");
break stop_watch;
}
}
// сброс ключа, чтобы получить следующие события
if (!key.reset())
break;
}
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
|
// AsyncOperation class to be implemented
public class AsyncOperation
{
private Action<int> progressCallback;
public void Progressed(Action<int> callback)
{
progressCallback = callback;
}
public void Complete()
{
// Simulate completion and report progress
progressCallback?.Invoke(100); // Simulating 100% progress
}
}
// Unit test using NUnit framework
[TestFixture]
public class AsyncOperationTests
{
[Test]
public void ProgressReportingTest()
{
// Arrange
int expectedProgress = 100;
int returnedProgress = 0;
AsyncOperation async = new AsyncOperation();
// Act
async.Progressed((x) => returnedProgress = x);
async.Complete();
// Assert
Assert.AreEqual(expectedProgress, returnedProgress);
}
} |
import React from "react"
const Announcement = () => {
return (
<div className='announcementBar'>
<p><span>COVID-19</span>: Get your business online, the government hates you!</p>
</div>
)
}
export default Announcement |
/*
(c) 2012 <NAME>
*/
/*compute the inverse matrix by using the speciality of the linking number matrix*/
/*
* * * 1 * *
* * * 0 1 *
* * * 0 0 1
1 0 0 0 0 0
* 1 0 0 0 0
* * 1 0 0 0
The submatrix M(n:2n, 1:n) and M(1:n, n:2n) are triangular matrix
*/
#ifndef _INVERSE_LINK_NUMBER_MATRIX_H_
#define _INVERSE_LINK_NUMBER_MATRIX_H_
#include <iostream>
#include <vector>
#include <list>
class InverseLinkNumberMatrix {
public:
InverseLinkNumberMatrix() {
n = 0;
}
InverseLinkNumberMatrix(const InverseLinkNumberMatrix &lhs) {
org_mat = lhs.org_mat;
inv_mat = lhs.inv_mat;
n = lhs.n;
}
~InverseLinkNumberMatrix() {
Clear();
}
void SetOrgMatrix(const std::vector<std::list<std::pair<int, int> > > &inOrgMat) {
org_mat = inOrgMat;
}
void SetMatrixHalfDim(const int inHalfDim) {
n = inHalfDim;
}
void Clear() {
org_mat.clear();
inv_mat.clear();
}
//
void add_rows(const int dst_row, const int src_row, const int multiplier,
std::vector<std::list<std::pair<int, int> > > &mat);
void ComputeInverseMatrix();
void ComputeInverseMatrix(const int n, std::vector<std::list<std::pair<int, int> > > &inOrgMat);
void PrintMatrix(std::vector<std::list<std::pair<int, int> > > &mat);
void PrintMatrix(const char *pFileName, std::vector<std::list<std::pair<int, int> > > &mat);
public:
std::vector<std::list<std::pair<int, int> > > inv_mat;
std::vector<std::list<std::pair<int, int> > > org_mat;
int n; // matrix is 2nx2n
};
#endif //_INVERSE_LINK_NUMBER_MATRIX_H_
|
#!/bin/bash
vagrant destroy --parallel
rm -rf ./.vagrant
rm -rf ./registry
rm -rf ./registry-mirror |
<gh_stars>1000+
/*
* $HeadURL: http://svn.apache.org/repos/asf/httpcomponents/httpclient/trunk/module-client/src/main/java/org/apache/http/client/methods/HttpGet.java $
* $Revision: 664505 $
* $Date: 2008-06-08 06:21:20 -0700 (Sun, 08 Jun 2008) $
*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.client.methods;
import java.net.URI;
/**
* HTTP GET method.
* <p>
* The HTTP GET method is defined in section 9.3 of
* <a href="http://www.ietf.org/rfc/rfc2616.txt">RFC2616</a>: <blockquote> The
* GET method means retrieve whatever information (in the form of an entity) is
* identified by the Request-URI. If the Request-URI refers to a data-producing
* process, it is the produced data which shall be returned as the entity in the
* response and not the source text of the process, unless that text happens to
* be the output of the process. </blockquote>
* </p>
* <p>
* GetMethods will follow redirect requests from the http server by default.
* This behavour can be disabled by calling setFollowRedirects(false).
* </p>
*
* @version $Revision: 664505 $
*
* @since 4.0
*
* @deprecated Please use {@link java.net.URL#openConnection} instead. Please
* visit <a href=
* "http://android-developers.blogspot.com/2011/09/androids-http-clients.html">this
* webpage</a> for further details.
*/
@Deprecated
public class HttpGet extends HttpRequestBase {
public final static String METHOD_NAME = "GET";
public HttpGet() {
}
public HttpGet(final URI uri) {
}
/**
* @throws IllegalArgumentException if the uri is invalid.
*/
public HttpGet(final String uri) {
}
@Override
public String getMethod() {
return METHOD_NAME;
}
} |
package org.synaptra.mlib;
public class Perceptron {
}
|
<filename>lib/generator.js
"use strict";
const _ = require('lodash');
const parse5 = require('parse5');
const GeneratorContext = require('./generator-context');
const ConfigurationFactory = require('./configuration-factory');
const NameExtractor = require('./name/name-extractor');
const EmitTraverser = require('./emit-traverser');
const ParserFactory = require('./parser/parser-factory');
class Generator {
generate(source, options = {}) {
let config = ConfigurationFactory.create(options);
let parseResult = new ParserFactory(source).createParser().parse();
let context = new GeneratorContext(parseResult.document, source);
traverseDocument(context, config);
let emitTraverser = new EmitTraverser(context.page, config, parseResult);
return emitTraverser.emitPageObject();
}
generatePageModel(source, options = {}) {
let config = ConfigurationFactory.create(options);
let parseResult = new ParserFactory(source).createParser().parse();
let context = new GeneratorContext(parseResult.document, source);
traverseDocument(context, config);
return context.page;
}
}
function traverseDocument(context, config) {
traverseDomElement(context, config);
let nameExtractor = new NameExtractor(config.nameSources);
traversePageElement(context.page, nameExtractor, config.locatorGenerator);
}
function traverseDomElement(context, config) {
let element = context.domElement;
for (let processor of config.elementProcessors) {
if (processor.matches(element)) {
processor.process(context);
}
}
if (context.traversePageTreeDownBeforeChildren) {
context.traversePageTreeDown();
}
let traversePageTreeUpAfterChildren = context.traversePageTreeUpAfterChildren;
if (element.childNodes) {
for (let child of element.childNodes) {
context.domElement = child;
traverseDomElement(context, config);
}
}
if (traversePageTreeUpAfterChildren) {
context.traversePageTreeUp()
}
context.domElement = element;
}
function traversePageElement(pageElement, nameExtractor, locatorGenerator) {
if (!_.isNil(pageElement.parent) && !pageElement.notGenerated) {
if (_.isEmpty(pageElement.name)) {
pageElement.name = nameExtractor.extractName(pageElement);
}
pageElement.locator = locatorGenerator.generate(pageElement);
}
if (!_.isNil(pageElement.elements)) {
for (let childPageElement of pageElement.elements) {
traversePageElement(childPageElement, nameExtractor, locatorGenerator);
}
}
}
module.exports = Generator; |
def bubble_sort(lst):
# Set swapped to True so the loop looks runs at least once
swapped = True
while swapped:
swapped = False
for i in range(len(lst) - 1):
# If the current element is greater than the next element
if lst[i] > lst[i + 1]:
# Swap the two elements
lst[i], lst[i + 1] = lst[i + 1], lst[i]
# Set swapped to True so the loop runs again
swapped = True |
VERSION = "0.3.8"
STRIP_WORDS = ("the", "and", "for", "with", "a", "of")
MODEL_ENDINGS = ("le", "pro", "premium", "edition" "standard")
|
package com.pharmacySystem.service.implementations;
import java.util.HashSet;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.pharmacySystem.model.appointment.Appointment;
import com.pharmacySystem.model.appointment.AppointmentStatus;
import com.pharmacySystem.model.pharmacy.Pharmacy;
import com.pharmacySystem.model.user.Patient;
import com.pharmacySystem.repository.AppointmentRepository;
import com.pharmacySystem.service.interfaces.IAppointmentService;
import com.pharmacySystem.service.interfaces.IPatientService;
import java.util.ArrayList;
import java.util.List;
import org.springframework.security.crypto.password.PasswordEncoder;
import com.pharmacySystem.Authentication.exception.ResourceConflictException;
import com.pharmacySystem.DTOs.AllergyDTO;
import com.pharmacySystem.DTOs.PatientDTO;
import com.pharmacySystem.DTOs.ExaminedPatientDTO;
import com.pharmacySystem.DTOs.PatientProfileDTO;
import com.pharmacySystem.DTOs.UserRegistrationDTO;
import com.pharmacySystem.mappers.PatientMapper;
import com.pharmacySystem.model.user.User;
import com.pharmacySystem.repository.AuthorityRepository;
import com.pharmacySystem.repository.IngredientRepository;
import com.pharmacySystem.repository.PatientRepository;
import com.pharmacySystem.repository.PharmacyRepository;
import com.pharmacySystem.repository.UserRepository;
import com.pharmacySystem.model.user.Authority;;
@Service
public class PatientService implements IPatientService{
@Autowired
private AppointmentRepository appointmentRepository;
@Autowired
private IAppointmentService appointmentService;
@Autowired
private PatientRepository patientRepository;
@Autowired
private UserRepository userRepository;
@Autowired
private EmailService emailService;
@Autowired
private AuthorityRepository authorityRepository;
@Autowired
private PasswordEncoder passwordEncoder;
@Autowired
private IngredientRepository ingredientRepository;
@Autowired
private PharmacyRepository pharmacyRepository;
public Set<ExaminedPatientDTO> findAllExaminedByExaminerId(Long id) {
Set<Patient> patients = new HashSet<Patient>();
Set<Appointment> allExaminerAppointments = appointmentRepository.findAllByExaminerId(id);
for (Appointment appointment : allExaminerAppointments) {
if(appointment.getStatus() == AppointmentStatus.SUCCEEDED) {
patients.add(appointment.getPatient());
}
}
Set<ExaminedPatientDTO> patientsDTOs = PatientMapper.PatientSetToPatientDTOsSet(patients);
setLastAppointmentDates(patientsDTOs, id);
return patientsDTOs;
}
@Override
public Patient findById(long id) {
return patientRepository.findById(id).orElse(null);
}
@Override
public Patient save(Patient patient) {
return null;
}
@Override
public String registerPatient(UserRegistrationDTO userRegistrationDTO) {
User existUser = userRepository.findByEmail(userRegistrationDTO.getEmail());
if(existUser !=null) {
throw new ResourceConflictException(userRegistrationDTO.getEmail(), "Email already exists!");
}
userRegistrationDTO.setPassword(passwordEncoder.encode(userRegistrationDTO.getPassword()));
Patient patient = new Patient();
List<Authority> authorities = new ArrayList<Authority>();
authorities.add(authorityRepository.findByName("ROLE_PATIENT"));
patient.setAuthorities(authorities);
PatientMapper.createPatientFromRegistrationDTO(userRegistrationDTO, patient);
Patient addedPatient = patientRepository.save(patient);
emailService.sendConfirmationEmail(userRegistrationDTO, addedPatient.getId());
return addedPatient.getEmail();
}
@Override
public void activatePatientAccount(Long userId) {
User userForActivation = userRepository.getOne(userId);
userForActivation.setActivated(true);
userRepository.save(userForActivation);
}
@Override
public Patient updateAllergies(AllergyDTO allergyDTO) {
Patient patient = patientRepository.findById(allergyDTO.getPatientId()).orElseGet(null);
if(patient != null) {
patient.getAllergies().add(ingredientRepository.findByName(allergyDTO.getAllergyName()));
patientRepository.save(patient);
return patient;
}
return null;
}
@Override
public PatientProfileDTO findPatientProfileInformation(long id) {
Patient patient = patientRepository.findById(id).orElseGet(null);
if(patient != null) {
PatientProfileDTO patientProfileDTO = PatientMapper.createPatientFromPatientProfileDTO(patient);
return patientProfileDTO;
}
return null;
}
@Scheduled(cron = "0 0 0 1 * ?")
@Transactional(readOnly = false)
public void resetPenaltyPoints() {
List<Patient> allPatients = patientRepository.findAll();
for(Patient patient : allPatients) {
if(patient.getPenaltyPoints() != 0) {
patient.setPenaltyPoints(0);
patientRepository.save(patient);
}
}
}
@Override
public Set<PatientDTO> findSubscribedPatientsByPharmacy(long pharmacyId) {
List<Patient> patients = patientRepository.findAll();
Set<User> subscribedUsers = new HashSet<User>();
Set<PatientDTO> subscribedPatients = new HashSet<PatientDTO>();
for(Patient patient : patients) {
for(Pharmacy pharmacy : patient.getSubscribedPharmacies()) {
if(pharmacy.getId() == pharmacyId)
subscribedUsers.add(patient);
}
}
subscribedPatients = PatientMapper.createPatientDTOFromUser(subscribedUsers);
return subscribedPatients;
}
@Override
public PatientDTO subscribeToPharmacy(Long id, long patientId) {
Pharmacy pharmacy = pharmacyRepository.findById(id).orElse(null);
Patient patient = patientRepository.findById(patientId).orElse(null);
PatientDTO patientDTO;
if(pharmacy != null && patient != null) {
Set<Pharmacy> subscribedPharmacies = patient.getSubscribedPharmacies();
subscribedPharmacies.add(pharmacy);
patientRepository.save(patient);
patientDTO = PatientMapper.patientToPatientDTO(patient);
return patientDTO;
}
return null;
}
@Override
public PatientDTO unsubscribeToPharmacy(Long id, long patientId) {
Pharmacy pharmacy = pharmacyRepository.findById(id).orElse(null);
Patient patient = patientRepository.findById(patientId).orElse(null);
PatientDTO patientDTO;
if(pharmacy != null && patient != null) {
Set<Pharmacy> subscribedPharmacies = patient.getSubscribedPharmacies();
subscribedPharmacies.remove(pharmacy);
patientRepository.save(patient);
patientDTO = PatientMapper.patientToPatientDTO(patient);
return patientDTO;
}
return null;
}
private void setLastAppointmentDates(Set<ExaminedPatientDTO> patientsDTOs, Long examinerId) {
for (ExaminedPatientDTO examinedPatientDTO : patientsDTOs) {
examinedPatientDTO.setLastExaminationDate(appointmentService.getLastAppointmentDate(examinedPatientDTO.getId(), examinerId));
}
}
} |
def test_create_report():
# Simulate the creation of a comment report
comment_report_content = 'This is a comment report'
comment_report_data = {
'content': comment_report_content,
'parent_comment_id': self.comment_report.parent_comment.id,
'reported_by_id': self.comment_report.reported_by.id
}
# Call the API endpoint to create a comment report
created_comment_report = create_comment_report(comment_report_data)
# Validate the created comment report
assert created_comment_report.content == comment_report_content
assert created_comment_report.parent_comment.id == self.comment_report.parent_comment.id
assert created_comment_report.reported_by.id == self.comment_report.reported_by.id
# Simulate the creation of an article report
article_report_content = 'This is an article report'
article_report_data = {
'content': article_report_content,
'parent_article_id': self.article_report.parent_article.id,
'reported_by_id': self.article_report.reported_by.id
}
# Call the API endpoint to create an article report
created_article_report = create_article_report(article_report_data)
# Validate the created article report
assert created_article_report.content == article_report_content
assert created_article_report.parent_article.id == self.article_report.parent_article.id
assert created_article_report.reported_by.id == self.article_report.reported_by.id |
<reponame>tenebrousedge/ruby-packer
require File.expand_path('../../spec_helper', __FILE__)
describe 'Optional variable assignments' do
describe 'using ||=' do
describe 'using a single variable' do
it 'assigns a new variable' do
a ||= 10
a.should == 10
end
it 're-assigns an existing variable set to false' do
a = false
a ||= 10
a.should == 10
end
it 're-assigns an existing variable set to nil' do
a = nil
a ||= 10
a.should == 10
end
it 'does not re-assign a variable with a truthy value' do
a = 10
a ||= 20
a.should == 10
end
it 'does not evaluate the right side when not needed' do
a = 10
a ||= raise('should not be executed')
a.should == 10
end
it 'does not re-assign a variable with a truthy value when using an inline rescue' do
a = 10
a ||= 20 rescue 30
a.should == 10
end
end
describe 'using a accessor' do
before do
klass = Class.new { attr_accessor :b }
@a = klass.new
end
it 'assigns a new variable' do
@a.b ||= 10
@a.b.should == 10
end
it 're-assigns an existing variable set to false' do
@a.b = false
@a.b ||= 10
@a.b.should == 10
end
it 're-assigns an existing variable set to nil' do
@a.b = nil
@a.b ||= 10
@a.b.should == 10
end
it 'does not re-assign a variable with a truthy value' do
@a.b = 10
@a.b ||= 20
@a.b.should == 10
end
it 'does not evaluate the right side when not needed' do
@a.b = 10
@a.b ||= raise('should not be executed')
@a.b.should == 10
end
it 'does not re-assign a variable with a truthy value when using an inline rescue' do
@a.b = 10
@a.b ||= 20 rescue 30
@a.b.should == 10
end
end
end
describe 'using &&=' do
describe 'using a single variable' do
it 'leaves new variable unassigned' do
a &&= 10
a.should == nil
end
it 'leaves false' do
a = false
a &&= 10
a.should == false
end
it 'leaves nil' do
a = nil
a &&= 10
a.should == nil
end
it 'does not evaluate the right side when not needed' do
a = nil
a &&= raise('should not be executed')
a.should == nil
end
it 'does re-assign a variable with a truthy value' do
a = 10
a &&= 20
a.should == 20
end
it 'does re-assign a variable with a truthy value when using an inline rescue' do
a = 10
a &&= 20 rescue 30
a.should == 20
end
end
describe 'using a single variable' do
before do
klass = Class.new { attr_accessor :b }
@a = klass.new
end
it 'leaves new variable unassigned' do
@a.b &&= 10
@a.b.should == nil
end
it 'leaves false' do
@a.b = false
@a.b &&= 10
@a.b.should == false
end
it 'leaves nil' do
@a.b = nil
@a.b &&= 10
@a.b.should == nil
end
it 'does not evaluate the right side when not needed' do
@a.b = nil
@a.b &&= raise('should not be executed')
@a.b.should == nil
end
it 'does re-assign a variable with a truthy value' do
@a.b = 10
@a.b &&= 20
@a.b.should == 20
end
it 'does re-assign a variable with a truthy value when using an inline rescue' do
@a.b = 10
@a.b &&= 20 rescue 30
@a.b.should == 20
end
end
end
describe 'using compunded constants' do
before do
Object.send(:remove_const, :A) if defined? Object::A
end
it 'with ||= assignments' do
Object::A ||= 10
Object::A.should == 10
end
it 'with ||= do not reassign' do
Object::A = 20
Object::A ||= 10
Object::A.should == 20
end
it 'with &&= assignments' do
Object::A = 20
-> {
Object::A &&= 10
}.should complain(/already initialized constant/)
Object::A.should == 10
end
it 'with &&= assignments will fail with non-existent constants' do
lambda { Object::A &&= 10 }.should raise_error(NameError)
end
it 'with operator assignments' do
Object::A = 20
-> {
Object::A += 10
}.should complain(/already initialized constant/)
Object::A.should == 30
end
it 'with operator assignments will fail with non-existent constants' do
lambda { Object::A += 10 }.should raise_error(NameError)
end
end
end
|
alias ll="ls -laFh"
alias lls="ll -S"
alias envs="env -0 | sort -z | tr '\0' '\n'"
|
<reponame>waleedmashaqbeh/freequartz
/* Copyright 2010 Smartmobili SARL
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <CoreGraphics/CGContext.h>
#include "CGBasePriv.h"
#include "CGContextPriv.h"
#include "CGContextDelegatePriv.h"
#include "CGGStatePriv.h"
//static CGCallback rendering_callbacks[] =
static CFRuntimeClass CGContextDelegateClass =
{
0, // version
"CGContextDelegate", // className
0, // init
0, // copy
delegate_finalize, // finalize
0, // equal
0, // hash
0, // copyFormattingDesc
0 // copyDebugDesc
};
CFTypeID __kCGContextDelegateID = _kCFRuntimeNotATypeID;
CFTypeID CGContextDelegateGetTypeID(void)
{
return CGTypeRegisterWithCallbacks(&__kCGContextDelegateID, &CGContextDelegateClass);
}
CGContextDelegateRef CGContextDelegateCreate(void* info)
{
CGContextDelegateRef ctxDelegate;
CFIndex size;
size = sizeof(CGContextDelegate) - sizeof(CFRuntimeBase);
ctxDelegate = (CGContextDelegateRef) CGTypeCreateInstance( CGContextDelegateGetTypeID(), size );
if (!ctxDelegate) { return 0; }
ctxDelegate->info = (CGContextDelegateInfoRef)info;
return ctxDelegate;
}
void CGContextDelegateFinalize(CGContextDelegateRef ctxDelegate)
{
if (!ctxDelegate) {
return;
}
//CGContextDelegateRelease(ctxDelegate->delegates);
}
void delegate_finalize(CFTypeRef c)
{
CGContextDelegateRef cdt = (CGContextDelegateRef) (c);
}
CGContextDelegateRef CGContextDelegateRetain(CGContextDelegateRef ctxDelegate)
{
if (!ctxDelegate) { return NULL; }
CFRetain((CFTypeRef)ctxDelegate);
return ctxDelegate;
}
void CGContextDelegateRelease(CGContextDelegateRef ctxDelegate)
{
if (!ctxDelegate) { return; }
CFRelease((CFTypeRef)ctxDelegate);
}
CGContextDelegateInfoRef CGContextDelegateGetInfo(CGContextDelegateRef ctxDelegate)
{
if (!ctxDelegate) { return NULL; }
return ctxDelegate->info;
}
void CGContextDelegateSetCallbacks(CGContextDelegateRef ctxDelegate, const CGCallback* dlArray, int count)
{
int i;
if (!ctxDelegate) { return; }
for (i = 0; i < count; i++) {
CGContextDelegateSetCallback(ctxDelegate, dlArray[i].type, dlArray[i].delegateAddr);
}
}
void CGContextDelegateSetCallback(CGContextDelegateRef ctxDelegate, CGContextDelegateType type, void* callback)
{
CGFunctionPtr* addr;
if (!ctxDelegate)
return;
addr = get_callback_address(ctxDelegate, type, "CGContextDelegateSetCallback");
if (addr) {
*addr = (CGFunctionPtr) callback;
}
}
CGFunctionPtr* get_callback_address(CGContextDelegateRef ctxDelegate, CGContextDelegateType type, const char* callerName)
{
switch(type) {
case kCGContextDelegateFinalize: { return (CGFunctionPtr*)&ctxDelegate->finalize; }
case kCGContextDelegateGetColorTransform: { return (CGFunctionPtr*)&ctxDelegate->getColorTransform; }
case kCGContextDelegateGetBounds: { return (CGFunctionPtr*)&ctxDelegate->getBounds; }
case kCGContextDelegateDrawLines: { return (CGFunctionPtr*)&ctxDelegate->drawLines; }
case kCGContextDelegateDrawRects: { return (CGFunctionPtr*)&ctxDelegate->drawRects; }
case kCGContextDelegateDrawPath: { return (CGFunctionPtr*)&ctxDelegate->drawPath; }
case kCGContextDelegateDrawImage: { return (CGFunctionPtr*)&ctxDelegate->drawImage; }
case kCGContextDelegateDrawGlyphs: { return (CGFunctionPtr*)&ctxDelegate->drawGlyphs; }
case kCGContextDelegateDrawShading: { return (CGFunctionPtr*)&ctxDelegate->drawShading; }
case kCGContextDelegateDrawDisplayList: { return (CGFunctionPtr*)&ctxDelegate->drawDisplayList; }
case kCGContextDelegateDrawImages: { return (CGFunctionPtr*)&ctxDelegate->drawImages; }
case kCGContextDelegateBeginPage: { return (CGFunctionPtr*)&ctxDelegate->beginPage; }
case kCGContextDelegateEndPage: { return (CGFunctionPtr*)&ctxDelegate->endPage; }
case kCGContextDelegateOperation: { return (CGFunctionPtr*)&ctxDelegate->operation; }
case kCGContextDelegateDrawWindowContents: { return (CGFunctionPtr*)&ctxDelegate->drawWindowContents; }
case kCGContextDelegateDirtyWindowContents: { return (CGFunctionPtr*)&ctxDelegate->dirtyWindowContents; }
case kCGContextDelegateBeginLayer: { return (CGFunctionPtr*)&ctxDelegate->beginLayer; }
case kCGContextDelegateEndLayer: { return (CGFunctionPtr*)&ctxDelegate->endLayer; }
case kCGContextDelegateGetLayer: { return (CGFunctionPtr*)&ctxDelegate->getLayer; }
case kCGContextDelegateDrawLayer: { return (CGFunctionPtr*)&ctxDelegate->drawLayer; }
default: {
CGPostError("%s: Unsupported delegate callback: %d.", callerName, (int)type);
abort();
}
};
return NULL;
}
CGError CGContextDelegateDrawLines(CGContextDelegateRef ctxDelegate,
CGRenderingStateRef rendering,
CGGStateRef state,
const CGPoint points[], size_t count)
{
if (ctxDelegate == NULL)
return kCGErrorSuccess;
if (ctxDelegate->drawLines == NULL)
return kCGErrorNotImplemented;
return ctxDelegate->drawLines(ctxDelegate, rendering, state, points, count);
}
CGError CGContextDelegateDrawPath(CGContextDelegateRef ctxDelegate,
CGRenderingStateRef rendering,
CGGStateRef state,
CGPathDrawingMode mode,
CGMutablePathRef path)
{
if (ctxDelegate == NULL)
return kCGErrorSuccess;
if (ctxDelegate->drawPath == NULL)
return kCGErrorNotImplemented;
return ctxDelegate->drawPath(ctxDelegate, rendering, state, mode, path);
}
CGError CGContextDelegateDrawImage(CGContextDelegateRef ctxDelegate,
CGRenderingStateRef rendering,
CGGStateRef state,
CGRect rect,
CGImageRef image)
{
if (ctxDelegate == NULL)
return kCGErrorSuccess;
if (ctxDelegate->drawImage == NULL)
return kCGErrorNotImplemented;
return ctxDelegate->drawImage(ctxDelegate, rendering, state, rect, image);
}
CGError
CGContextDelegateOperation(CGContextDelegateRef ctxDelegate,
CGRenderingStateRef rendering,
CGGStateRef state,
CFStringRef op,
void* tmp)
{
if (ctxDelegate == NULL)
return kCGErrorSuccess;
if (ctxDelegate->operation == NULL)
return kCGErrorNotImplemented;
return ctxDelegate->operation(ctxDelegate, rendering, state, op, tmp);
} |
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const b2VoronoiDiagram_1 = require("./b2VoronoiDiagram");
describe('B2VoronoiDiagram', () => {
it('should be a function', () => {
expect(typeof b2VoronoiDiagram_1.B2VoronoiDiagram).toEqual('function');
});
});
|
<filename>src/main/java/bi/ihela/client/dto/init/MerchantType.java<gh_stars>1-10
/**
*
*/
package bi.ihela.client.dto.init;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.annotation.JsonRootName;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import lombok.ToString;
/**
* @author jeanclaude.bucumi <EMAIL>
*/
@Getter
@Setter
@ToString
@NoArgsConstructor
@JsonRootName("merchant_type")
@JsonPropertyOrder({ "css", "label", "value" })
public class MerchantType implements Serializable {
private static final long serialVersionUID = 1L;
@JsonProperty("css")
public Object css;
@JsonProperty("label")
public String label;
@JsonProperty("value")
public String value;
} |
#python hsc_deploy.py --target localhost:7845 --waiting-time 3 $@
python hsc_deploy.py --target localhost:7845 --exported-key 47CqNF6VHLjr77YPtvjtxrfekDdyhJrWvy1C6qN49JfnthExWL9hcfrWZ5J3ErgSAwyPBhoUu --password pCjjd98Ha8LiEHBCEiot --waiting-time 3 $@
|
<reponame>eloymg/vulcan-checks
/*
Copyright 2019 Adevinta
*/
package main
import (
"fmt"
"log"
"net/http"
"os"
)
var (
changeLogPrefix = "Drupal "
changeLogSuffix = ","
)
func main() {
if len(os.Args) != 2 {
fmt.Fprintf(os.Stderr, "usage: %v <drupal_version>\n", os.Args[0])
os.Exit(1)
}
http.HandleFunc("/CHANGELOG.txt", func(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "%v%v%v", changeLogPrefix, os.Args[1], changeLogSuffix)
})
log.Fatal(http.ListenAndServe(":80", nil))
}
|
<gh_stars>1-10
import {createApp} from 'vue'
import {
ElAside,
ElButton,
ElCard,
ElCascader,
ElCheckbox,
ElContainer,
ElDialog,
ElForm,
ElFormItem,
ElInput,
ElMenu,
ElMenuItem,
ElMenuItemGroup,
ElMessageBox,
ElOption,
ElPagination,
ElPopconfirm,
ElPopover,
ElRadio,
ElRadioGroup,
ElSelect,
ElSubmenu,
ElTable,
ElTableColumn,
ElTag,
ElUpload
} from 'element-plus'
import App from './App.vue'
import router from '@/router'
import '~/theme/index.css'
const app = createApp(App) // 生成 Vue 实例 app
// 全局方法
app.config.globalProperties.$filters = {
prefix(url) {
if (url && url.startsWith('http')) {
return url
} else {
url = `http://localhost:81${url}`
return url
}
}
}
app.config.globalProperties.goTop = function () {
const main = document.querySelector('.main')
main.scrollTop = 0
}
app.use(router) // 引用路由实例
app.use(ElButton)
.use(ElContainer)
.use(ElAside)
.use(ElMenu)
.use(ElSubmenu)
.use(ElMenuItemGroup)
.use(ElMenuItem)
.use(ElForm)
.use(ElFormItem)
.use(ElCheckbox)
.use(ElInput)
.use(ElPopover)
.use(ElTag)
.use(ElCard)
.use(ElTable)
.use(ElTableColumn)
.use(ElPopconfirm)
.use(ElUpload)
.use(ElDialog)
.use(ElPagination)
.use(ElCascader)
.use(ElRadioGroup)
.use(ElRadio)
.use(ElSelect)
.use(ElOption)
.use(ElMessageBox)
app.mount('#app') // 挂载到 #app
|
/*
Starting point for a vizuly.core.component
*/
vizuly.ui.range_input = function (parent) {
// This is the object that provides pseudo "protected" properties that the vizuly.viz function helps create
var scope={};
var properties = {
"data" : [.25,.75], // Expects a array of two values assumes data[0] is less than data[1]
"margin" : { // Our marign object
"top": "10%", // Top margin
"bottom" : "7%", // Bottom margin
"left" : "8%", // Left margin
"right" : "7%" // Right margin
},
"domain": [0,1],
"duration": 500, // This the time in ms used for any component generated transitions
"width": 300, // Overall width of component
"height": 300, // Height of component
"handleWidth": 3, // With of Handle
"trackHeight": .1 // Percentage
};
//Create our viz and type it
var viz=vizuly.core.component(parent,scope,properties,["change","handleOver","handleOut"]);
viz.type="viz.ui.range_input";
//Measurements
var size; // Holds the 'size' variable as defined in viz.util.size()
//These are all d3.selection objects we use to insert and update svg elements into
var svg, defs, background, g, plot, leftHandle, rightHandle, leftPane, centerPane, rightPane, track;
var xScale = d3.scale.linear();
var trackHeight;
var handleWidth;
var leftDrag = d3.behavior.drag(), rightDrag = d3.behavior.drag(), centerDrag = d3.behavior.drag();
leftDrag.on("drag",onLeftDrag);
rightDrag.on("drag",onRightDrag);
centerDrag.on("drag",onCenterDrag);
initialize();
// Here we set up all of our svg layout elements using a 'vz-XX' class namespace. This routine is only called once
// These are all place holder groups for the invidual data driven display elements. We use these to do general
// sizing and margin layout. The all are referenced as D3 selections.
function initialize() {
svg = scope.selection.append("svg").attr("id", scope.id).style("overflow","visible").attr("class","vizuly");
defs = vizuly.core.util.getDefs(viz);
background = svg.append("rect").attr("class","vz-background");
g = svg.append("g").attr("class","vz-range_input");
plot = g.append("g").attr("class","vz-plot")
track = plot.append("rect").attr("class",'vz-range_input-track');
leftPane = plot.append("rect").attr("class",'vz-range_input-sidepane');
centerPane = plot.append("rect").attr("class",'vz-range_input-centerpane');
rightPane = plot.append("rect").attr("class",'vz-range_input-sidepane');
leftHandle = plot.append("rect").attr("class",'vz-range_input-handle');
rightHandle = plot.append("rect").attr("class",'vz-range_input-handle');
leftHandle.call(leftDrag);
rightHandle.call(rightDrag);
centerPane.call(centerDrag);
// Tell everyone we are done initializing
scope.dispatch.initialize();
}
// The measure function performs any measurement or layout calcuations prior to making any updates to the SVG elements
function measure() {
// Call our validate routine and make sure all component properties have been set
viz.validate();
// Get our size based on height, width, and margin
size = vizuly.core.util.size(scope.margin, scope.width, scope.height);
xScale.range([0,size.width]);
xScale.domain(scope.domain);
trackHeight = Math.round(size.height * scope.trackHeight);
handleWidth = scope.handleWidth;
// Tell everyone we are done making our measurements
scope.dispatch.measure();
}
// The update function is the primary function that is called when we want to render the visualiation based on
// all of its set properties. A developer can change properties of the components and it will not show on the screen
// until the update function is called
function update() {
// Call measure each time before we update to make sure all our our layout properties are set correctly
measure();
// Layout all of our primary SVG d3 elements.
svg.attr("width", scope.width).attr("height", scope.height);
background.attr("width", scope.width).attr("height", scope.height);
plot.style("width",size.width).style("height",size.height).attr("transform","translate(" + size.left + "," + size.top + ")");
track.attr("width",size.width).attr("height",trackHeight).attr("y", (size.height-trackHeight)/2);
leftHandle.attr("width",handleWidth).attr("height",size.height).attr("x",xScale(scope.data[0]));
rightHandle.attr("width",handleWidth).attr("height",size.height).attr("x",xScale(scope.data[1]));
leftPane.attr("width",xScale(scope.data[0])).attr("height",size.height);
rightPane.attr("width",size.width-xScale(scope.data[1])).attr("height",size.height).attr("x",xScale(scope.data[1]));
centerPane.attr("width",xScale(scope.data[1])-xScale(scope.data[0])).attr("height",size.height).attr("x",xScale(scope.data[0]));
// Let everyone know we are doing doing our update
// Typically themes will attach a callback to this event so they can apply styles to the elements
scope.dispatch.update();
}
function onLeftDrag() {
var newValue = xScale.invert(d3.event.x);
newValue = Math.min(scope.data[1]-xScale.invert(handleWidth),Math.max(newValue,scope.domain[0]));
scope.data[0] = newValue;
scope.dispatch.change(viz);
update();
}
function onRightDrag() {
var newValue = xScale.invert(d3.event.x);
newValue = Math.max(scope.data[0]+xScale.invert(handleWidth),Math.min(newValue,scope.domain[1]));
scope.data[1] = newValue;
scope.dispatch.change(viz);
update();
}
function onCenterDrag() {
var newValue = xScale.invert(d3.event.dx) + scope.data[0];
newValue = Math.min(scope.data[1],Math.max(newValue,scope.domain[0]));
var diff = scope.data[1]-scope.data[0];
newValue = Math.min(scope.domain[1]-diff,newValue);
scope.data[0] = newValue;
scope.data[1] = newValue + diff;
scope.dispatch.change(viz);
update();
}
// This is our public update call that all viz components implement
viz.update = function () {
update();
return viz;
};
// Returns our glorious viz component :)
return viz;
}; |
# update_version.py
import os
import shutil
from build import replace_text
def update_version():
# Step 1: Read the current version number from version.txt
with open('version.txt', 'r') as file:
new_version = file.read().strip()
# Step 2: Create a backup of the original app.py file
original_file = 'app.py'
backup_file = f'app_backup_{new_version}.py'
shutil.copyfile(original_file, backup_file)
# Step 3: Update the version number in the app.py file
replace_text(original_file, 'VERSION =', f'VERSION = "{new_version}"')
if __name__ == "__main__":
update_version() |
#!/bin/sh
# **********************************************************************
# *
"${PERL_BIN}" "${BUILD_ROOT}/actools/bin/create_build_id" "$@"
|
#!/bin/bash
pac=$(checkupdates | wc -l)
aur=$(cower -u | wc -l)
check=$((pac + aur))
if [[ "$check" != "0" ]]
then
echo "$pac %{F#7a7a7a}%{F-} $aur"
else
echo "%{F#BB6461}%{F-}"
fi
|
#!/bin/env ruby
## encoding: utf-8
require File.join(File.dirname(__FILE__), './icecream/version')
require 'rubygems'
require 'bundler/setup'
#common dependencies
#internal dependences
require File.join(File.dirname(__FILE__), './icecream/icecream')
module IceCream
end
|
#!/bin/bash
cd "$(dirname "$0")"
exec ./niina --type="rendaGS9" "$@" 2> rendaGS9.err
|
#!/bin/bash
set -e
[ "$#" -ge 2 ] || { echo Usage: $0 model_family model_version >&2; exit 1; }
family="$1"
version="$2"
dir="udpipe-$family-$version"
[ -d "$dir" ] && { echo Release $dir already exists >&2; exit 1; }
mkdir "$dir"
cp LICENSE.CC-BY-NC-SA-4 "$dir"/LICENSE
make -C ../doc manual_model_${family}_readme.{txt,html}
cp ../doc/manual_model_${family}_readme.txt "$dir"/README
cp ../doc/manual_model_${family}_readme.html "$dir"/README.html
make -C ../doc clean
for model in ../training/models-$family/*/*.model; do
lang=`basename $model .model`
long_name=`awk "/^$lang /{print \\\$2}" lang-names.txt`
[ -z "$long_name" ] && { echo Unknown language code $lang >&2; exit 1; }
ln -s ../$model $dir/$long_name-$family-$version.udpipe
done
|
#!/bin/bash
set -euo pipefail
if [ $# != 0 ]; then
echo "Usage: $0"
exit 1;
fi
# TODO(kamo): Consider clang case
# Note: Requires gcc>=4.9.2 to build extensions with pytorch>=1.0
if python3 -c 'import torch as t;assert t.__version__[0] == "1"' &> /dev/null; then \
python3 -c "from distutils.version import LooseVersion as V;assert V('$(gcc -dumpversion)') >= V('4.9.2'), 'Requires gcc>=4.9.2'"; \
fi
rm -rf warp-transducer
git clone https://github.com/HawkAaron/warp-transducer.git
(
set -euo pipefail
cd warp-transducer
mkdir build
(
set -euo pipefail
cd build && cmake .. && make
)
(
set -euo pipefail
cd pytorch_binding && python3 setup.py install
)
)
|
async function setToken(token) {
return new Promise((resolve) => {
resolve(token || '');
});
} |
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !windows
package test
// direct-tcpip functional tests
import (
"io"
"net"
"testing"
)
func TestDial(t *testing.T) {
server := newServer(t)
defer server.Shutdown()
sshConn := server.Dial(clientConfig())
defer sshConn.Close()
l, err := net.Listen("tcp", "127.0.0.1:0")
if err != nil {
t.Fatalf("Listen: %v", err)
}
defer l.Close()
go func() {
for {
c, err := l.Accept()
if err != nil {
break
}
io.WriteString(c, c.RemoteAddr().String())
c.Close()
}
}()
conn, err := sshConn.Dial("tcp", l.Addr().String())
if err != nil {
t.Fatalf("Dial: %v", err)
}
defer conn.Close()
}
|
import re
def extract_urls_from_html(html):
pattern = r'<a\s+href="([^"]+)"'
urls = re.findall(pattern, html)
return urls
# Test the function with the provided example
html = '''
</li>
<li>
<a href="https://www.example.com">
<i class="material-icons">home</i>
</a>
</li>
<li>
<a href="https://www.example.com/about">
<i class="material-icons">info</i>
</a>
</li>
'''
print(extract_urls_from_html(html)) # Output: ['https://www.example.com', 'https://www.example.com/about'] |
#include<bits/stdc++.h>
using namespace std;
int main(void)
{
int n,first=0,second=1,sum;
cin>>n;
cout<<first<<" "<<second<<" ";
while(n>2){
sum=first+second;
first=second;
second=sum;
cout<<sum<<" ";
n--;
}
}
|
<reponame>chicofariasneto/FrogHelper<gh_stars>0
const { pool } = require('../../database/connection')
const {
user,
} = require('../model/userModel')
const {
checkUser,
} = require('./checkLogic')
const join = async(userId, groupId, username) => {
const check = await checkUser(userId, groupId)
if (check)
return false
await pool.query(user.insert, [userId, groupId, username])
return true
}
module.exports = {
join,
} |
#!/bin/bash
# Copyright 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
F_USAGE='-c|--count services --cfile config -l|--loglevel [debug|info|warn]'
F_LOGLEVEL='info'
F_COUNT=1
F_OUTPUTDIR=''
F_BASENAME='eservice'
F_SERVICEHOME="$( cd -P "$( dirname ${BASH_SOURCE[0]} )/.." && pwd )"
F_LOGDIR=$F_SERVICEHOME/logs
F_CONFDIR=$F_SERVICEHOME/etc
F_LEDGERURL=''
F_CLEAN='no'
# -----------------------------------------------------------------
# Process command line arguments
# -----------------------------------------------------------------
TEMP=`getopt -o b:c:l:o: --long base:,clean,count:,help,loglevel:,output:,ledger: \
-n 'es-start.sh' -- "$@"`
if [ $? != 0 ] ; then echo "Terminating..." >&2 ; exit 1 ; fi
eval set -- "$TEMP"
while true ; do
case "$1" in
-b|--base) F_BASENAME="$2" ; shift 2 ;;
--clean) F_CLEAN="yes" ; shift 1 ;;
-c|--count) F_COUNT="$2" ; shift 2 ;;
--ledger) F_LEDGERURL="--ledger $2" ; shift 2 ;;
-l|--loglevel) F_LOGLEVEL="$2" ; shift 2 ;;
-o|--output) F_OUTPUTDIR="$2" ; shift 2 ;;
--help) echo $F_USAGE ; exit 1 ;;
--) shift ; break ;;
*) echo "Internal error!" ; exit 1 ;;
esac
done
if [ "$F_OUTPUTDIR" != "" ] ; then
mkdir -p $F_OUTPUTDIR
rm -f $F_OUTPUTDIR/*
else
EFILE=/dev/null
OFILE=/dev/null
fi
for index in `seq 1 $F_COUNT` ; do
IDENTITY="${F_BASENAME}$index"
echo start enclave service $IDENTITY
if [ "${F_CLEAN}" == "yes" ]; then
rm -f "${F_SERVICEHOME}/data/${IDENTITY}.enc"
fi
rm -f $F_LOGDIR/$IDENTITY.log
if [ "$F_OUTPUTDIR" != "" ] ; then
EFILE="$F_OUTPUTDIR/$IDENTITY.err"
OFILE="$F_OUTPUTDIR/$IDENTITY.out"
rm -f $EFILE $OFILE
fi
sleep 5
eservice --identity ${IDENTITY} --config ${IDENTITY}.toml --config-dir ${F_CONFDIR} ${F_LEDGERURL} \
--loglevel ${F_LOGLEVEL} --logfile ${F_LOGDIR}/${IDENTITY}.log 2> $EFILE > $OFILE &
done
|
<reponame>rjarman/Submarine
import {
DateOptions,
DistanceMatrixParam,
EarthRadius,
} from './typings/Typings';
export class Services {
private dateOptions: DateOptions;
constructor() {
this.dateOptions = {
weekday: 'long',
year: 'numeric',
month: 'long',
day: 'numeric',
};
}
private convertToRad(degree: number) {
return (degree * Math.PI) / 180;
}
getDatetime(timestamp: number): string[] {
return [
new Date(timestamp).toLocaleTimeString('en-BD'),
new Date(timestamp).toLocaleDateString('en-BD', this.dateOptions),
];
}
getDistance(distanceMatrixParam: DistanceMatrixParam) {
/**
* @φ is latitude
* @λ is longitude
*
* @Haversine_formula
* a = sin²(Δφ/2) + cos φ1 ⋅ cos φ2 ⋅ sin²(Δλ/2)
* c = 2 ⋅ atan2( √a, √(1−a) )
* d = R ⋅ c
* where φ is latitude, λ is longitude, R is earth’s radius (mean radius = 6,371km);
* note that angles need to be in radians to pass to trig functions!
*
* @return distance in metres
*/
const φ1 = this.convertToRad(distanceMatrixParam.origin.lat1);
const φ2 = this.convertToRad(distanceMatrixParam.destination.lat2);
const Δφ = this.convertToRad(
distanceMatrixParam.destination.lat2 - distanceMatrixParam.origin.lat1
);
const Δλ = this.convertToRad(
distanceMatrixParam.destination.lng2 - distanceMatrixParam.origin.lng1
);
const a =
Math.sin(Δφ / 2) * Math.sin(Δφ / 2) +
Math.cos(φ1) * Math.cos(φ2) * Math.sin(Δλ / 2) * Math.sin(Δλ / 2);
const c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
const d = EarthRadius * c;
return Number(d.toFixed(2));
}
}
|
/**
* WatchMeContentProvider.java
*
* The Content Provider for the WatchMe application.
*
* @author lisastenberg
* @copyright (c) 2012 <NAME>, <NAME>, <NAME>, <NAME>
* @license MIT
*/
package se.chalmers.watchme.database;
import android.content.ContentProvider;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteQueryBuilder;
import android.net.Uri;
public class WatchMeContentProvider extends ContentProvider {
private DatabaseHelper db;
/**
* The authority of the Content Provider
*/
public static final String AUTHORITY = "se.chalmers.watchme.database."
+ "providers.WatchMeContentProvider";
private static final String BASE_PATH_MOVIES = "movies";
private static final String BASE_PATH_TAGS = "tags";
private static final String BASE_PATH_HAS_TAG = "hastag";
private static final int MOVIES = 10;
private static final int MOVIES_ID = 20;
private static final int TAGS = 30;
private static final int TAGS_ID = 40;
private static final int HAS_TAG = 50;
/**
* The Uri that affects the Movie table.
*/
public static final Uri CONTENT_URI_MOVIES = Uri.parse("content://"
+ AUTHORITY + "/" + BASE_PATH_MOVIES);
/**
* The Uri that affects the Tags table.
*/
public static final Uri CONTENT_URI_TAGS = Uri.parse("content://"
+ AUTHORITY + "/" + BASE_PATH_TAGS);
/**
* The Uri that affects the HasTag table.
*/
public static final Uri CONTENT_URI_HAS_TAG = Uri.parse("content://"
+ AUTHORITY + "/" + BASE_PATH_HAS_TAG);
public static final String CONTENT_ITEM_TYPE = ContentResolver.CURSOR_ITEM_BASE_TYPE
+ "/watchme";
public static final String CONTENT_TYPE = ContentResolver.CURSOR_DIR_BASE_TYPE
+ "/watchme";
private static final UriMatcher sUriMatcher = new UriMatcher(
UriMatcher.NO_MATCH);
static {
sUriMatcher.addURI(AUTHORITY, BASE_PATH_MOVIES, MOVIES);
sUriMatcher.addURI(AUTHORITY, BASE_PATH_MOVIES + "/#", MOVIES_ID);
sUriMatcher.addURI(AUTHORITY, BASE_PATH_TAGS, TAGS);
sUriMatcher.addURI(AUTHORITY, BASE_PATH_TAGS + "/#", TAGS_ID);
sUriMatcher.addURI(AUTHORITY, BASE_PATH_HAS_TAG, HAS_TAG);
};
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
SQLiteDatabase sqlDB = db.getWritableDatabase();
int deletedRows;
switch (sUriMatcher.match(uri)) {
case MOVIES:
/*
* selection is supposed to contain "_id = <movieId>" therefore
* movieSel is supposed to contain: " = <movieId>"
*/
String movieSel = selection.split(MoviesTable.COLUMN_MOVIE_ID)[1];
// Query for all tags that is attached to the movie.
Cursor movieCursor = sqlDB.query(HasTagTable.TABLE_HAS_TAG, null,
HasTagTable.COLUMN_MOVIE_ID + movieSel, null, null, null,
null);
movieCursor.getCount();
// Delete the movie
deletedRows = sqlDB.delete(MoviesTable.TABLE_MOVIES, selection,
selectionArgs);
/*
* If the movie had any attached tag: detach them
*/
while (movieCursor.moveToNext()) {
String tagSel = " = " + movieCursor.getString(1);
Cursor tagCursor = sqlDB.query(HasTagTable.TABLE_HAS_TAG, null,
HasTagTable.COLUMN_TAG_ID + tagSel, null, null, null,
null);
if (!tagCursor.moveToFirst()) {
// If the tag isn't connected to any Movie, delete it.
sqlDB.delete(TagsTable.TABLE_TAGS, TagsTable.COLUMN_TAG_ID
+ tagSel, null);
}
tagCursor.close();
}
movieCursor.close();
break;
case MOVIES_ID:
selection = selection + MoviesTable.COLUMN_MOVIE_ID + " = "
+ uri.getLastPathSegment();
deletedRows = sqlDB.delete(MoviesTable.TABLE_MOVIES, selection,
selectionArgs);
break;
case TAGS:
deletedRows = sqlDB.delete(TagsTable.TABLE_TAGS, selection,
selectionArgs);
break;
case TAGS_ID:
selection = selection + TagsTable.COLUMN_TAG_ID + " = " +
uri.getLastPathSegment();
deletedRows = sqlDB.delete(TagsTable.TABLE_TAGS, selection,
selectionArgs);
break;
case HAS_TAG:
// In practice this is the same as detaching a tag from a movie.
deletedRows = sqlDB.delete(HasTagTable.TABLE_HAS_TAG, selection,
selectionArgs);
/*
* selection is supposed to contain
* "movieid = <movieid> AND tagid = <tagid>" therefore
*
* tagSelection[0] is supposed to contain "movieid = <movieid> AND"
*
* tagSelection[1] is supposed to contain: " = <tagId>"
*/
String tagSelection = selection.split(HasTagTable.COLUMN_TAG_ID)[1];
/*
* Query for all movies attached to the tag.
*/
Cursor tagCursor = sqlDB.query(HasTagTable.TABLE_HAS_TAG, null,
HasTagTable.COLUMN_TAG_ID + tagSelection, null, null, null,
null);
// If the tag isn't connected to any Movie, delete it.
if (!tagCursor.moveToFirst()) {
sqlDB.delete(TagsTable.TABLE_TAGS, TagsTable.COLUMN_TAG_ID
+ tagSelection, null);
}
break;
default:
throw new IllegalArgumentException("Unknown URI " + uri);
}
// Notify Observers
getContext().getContentResolver()
.notifyChange(CONTENT_URI_MOVIES, null);
getContext().getContentResolver().notifyChange(CONTENT_URI_TAGS, null);
getContext().getContentResolver().notifyChange(CONTENT_URI_HAS_TAG,
null);
return deletedRows;
}
@Override
public String getType(Uri uri) {
return null;
}
@Override
public Uri insert(Uri uri, ContentValues values) {
SQLiteDatabase sqlDB = db.getWritableDatabase();
long id = 0;
switch (sUriMatcher.match(uri)) {
case MOVIES:
String movieTitle = values.getAsString(MoviesTable.COLUMN_TITLE);
Cursor movieCursor = sqlDB.query(MoviesTable.TABLE_MOVIES, null,
MoviesTable.COLUMN_TITLE + " = \"" + movieTitle + "\"",
null, null, null, null);
// If the Movie doesn't exist, create it.
if (movieCursor.getCount() == 0) {
id = sqlDB.insert(MoviesTable.TABLE_MOVIES, null, values);
}
break;
case HAS_TAG:
/* Check if the Tag exists. If it doesn't exist
* insert into database
*/
String tagName = values.getAsString(TagsTable.COLUMN_NAME);
Cursor tagCursor = sqlDB.query(TagsTable.TABLE_TAGS, null,
TagsTable.COLUMN_NAME + " = \"" + tagName + "\"", null,
null, null, null);
if (tagCursor.moveToFirst()) {
// If the Tag already exist. Get the Id.
id = Long.parseLong(tagCursor.getString(0));
/*
* Check if the tag is already attached to the movie. Return 0
* as id.
*/
Cursor cursor = sqlDB.query(
HasTagTable.TABLE_HAS_TAG, null,
HasTagTable.COLUMN_MOVIE_ID + " = "
+ values.getAsLong(MoviesTable.COLUMN_MOVIE_ID)
+ " AND " + HasTagTable.COLUMN_TAG_ID + " = "
+ id, null, null, null, null);
if (cursor.moveToFirst()) {
id = 0;
break;
}
} else {
ContentValues tagValues = new ContentValues();
tagValues.put(TagsTable.COLUMN_NAME, tagName);
id = sqlDB.insert(TagsTable.TABLE_TAGS, null, tagValues);
}
tagCursor.close();
String sql = "INSERT INTO " + HasTagTable.TABLE_HAS_TAG
+ " VALUES("
+ values.getAsLong(MoviesTable.COLUMN_MOVIE_ID) + ", " + id
+ ")";
sqlDB.execSQL(sql);
break;
case TAGS:
throw new UnsupportedOperationException("A tag can't exist "
+ "without being attached to a movie");
default:
throw new IllegalArgumentException("Unknown URI" + uri);
}
// Notify Observers
getContext().getContentResolver()
.notifyChange(CONTENT_URI_MOVIES, null);
getContext().getContentResolver().notifyChange(CONTENT_URI_TAGS, null);
getContext().getContentResolver().notifyChange(CONTENT_URI_HAS_TAG,
null);
return Uri.parse(BASE_PATH_MOVIES + "/" + id);
}
@Override
public boolean onCreate() {
db = new DatabaseHelper(getContext());
return true;
}
@Override
public Cursor query(Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
SQLiteDatabase sqlDB = db.getReadableDatabase();
SQLiteQueryBuilder queryBuilder = new SQLiteQueryBuilder();
switch (sUriMatcher.match(uri)) {
case MOVIES:
queryBuilder.setTables(MoviesTable.TABLE_MOVIES);
break;
case MOVIES_ID:
selection = selection + MoviesTable.COLUMN_MOVIE_ID + " = "
+ uri.getLastPathSegment();
queryBuilder.setTables(MoviesTable.TABLE_MOVIES);
break;
case TAGS:
queryBuilder.setTables(TagsTable.TABLE_TAGS);
break;
case TAGS_ID:
selection = selection + TagsTable.COLUMN_TAG_ID + " = "
+ uri.getLastPathSegment();
queryBuilder.setTables(TagsTable.TABLE_TAGS);
break;
case HAS_TAG:
/*
* Join the tables Movies, HasTag and Tags to be able to query
* for any attribute in any table.
*/
String tables = MoviesTable.TABLE_MOVIES + " LEFT OUTER JOIN "
+ HasTagTable.TABLE_HAS_TAG + " ON "
+ MoviesTable.TABLE_MOVIES + "."
+ MoviesTable.COLUMN_MOVIE_ID + " = "
+ HasTagTable.TABLE_HAS_TAG + "."
+ HasTagTable.COLUMN_MOVIE_ID + " LEFT OUTER JOIN "
+ TagsTable.TABLE_TAGS + " ON " + HasTagTable.TABLE_HAS_TAG
+ "." + HasTagTable.COLUMN_TAG_ID + " = "
+ TagsTable.TABLE_TAGS + "." + TagsTable.COLUMN_TAG_ID;
queryBuilder.setTables(tables);
break;
default:
throw new IllegalArgumentException("Unknown URI " + uri);
}
Cursor cursor = queryBuilder.query(sqlDB, projection, selection,
selectionArgs, null, null, sortOrder);
cursor.setNotificationUri(getContext().getContentResolver(), uri);
return cursor;
}
@Override
public int update(Uri uri, ContentValues values, String selection,
String[] selectionArgs) {
SQLiteDatabase sqlDB = db.getWritableDatabase();
int updatedRows;
switch (sUriMatcher.match(uri)) {
case MOVIES:
// Nothing need to be added to selection
updatedRows = sqlDB.update(MoviesTable.TABLE_MOVIES, values,
selection, selectionArgs);
break;
case MOVIES_ID:
selection = selection + MoviesTable.COLUMN_MOVIE_ID + " = "
+ uri.getLastPathSegment();
updatedRows = sqlDB.update(MoviesTable.TABLE_MOVIES, values,
selection, selectionArgs);
break;
case TAGS:
// Nothing need to be added to selection
updatedRows = sqlDB.update(TagsTable.TABLE_TAGS, values, selection,
selectionArgs);
break;
case TAGS_ID:
selection = selection + TagsTable.COLUMN_TAG_ID + " = "
+ uri.getLastPathSegment();
updatedRows = sqlDB.update(TagsTable.TABLE_TAGS, values, selection,
selectionArgs);
break;
default:
throw new IllegalArgumentException("Unknown URI " + uri);
}
// Notify Observers
getContext().getContentResolver()
.notifyChange(CONTENT_URI_MOVIES, null);
getContext().getContentResolver().notifyChange(CONTENT_URI_TAGS, null);
getContext().getContentResolver().notifyChange(CONTENT_URI_HAS_TAG,
null);
return updatedRows;
}
}
|
var AreasFilter = require('./areas.filter.service');
module.exports = require('angular')
.module('ki.resources', [])
.factory('AreasFilter', AreasFilter)
.name; |
<gh_stars>0
package controller.channel.messages;
/**
* Base class of a message
*
* @author ramilmsh
*/
public abstract class Message {
}
|
def all_same_length(strings):
for i in range(1,len(strings)):
if len(strings[0]) != len(strings[i]):
return False
return True |
package mock.media;
import org.jitsi.service.neomedia.*;
import org.jitsi.service.neomedia.rtp.*;
import java.awt.*;
import java.util.*;
/**
*
*/
public class MockMediaStreamStats
implements MediaStreamStats
{
@Override
public double getDownloadJitterMs()
{
return 0;
}
@Override
public double getDownloadPercentLoss()
{
return 0;
}
@Override
public double getDownloadRateKiloBitPerSec()
{
return 0;
}
@Override
public Dimension getDownloadVideoSize()
{
return null;
}
@Override
public String getEncoding()
{
return null;
}
@Override
public String getEncodingClockRate()
{
return null;
}
@Override
public int getJitterBufferDelayMs()
{
return 0;
}
@Override
public int getJitterBufferDelayPackets()
{
return 0;
}
@Override
public String getLocalIPAddress()
{
return null;
}
@Override
public int getLocalPort()
{
return 0;
}
@Override
public long getNbReceivedBytes()
{
return 0;
}
@Override
public long getNbSentBytes()
{
return 0;
}
@Override
public long getNbDiscarded()
{
return 0;
}
@Override
public int getNbDiscardedFull()
{
return 0;
}
@Override
public int getNbDiscardedLate()
{
return 0;
}
@Override
public int getNbDiscardedReset()
{
return 0;
}
@Override
public int getNbDiscardedShrink()
{
return 0;
}
@Override
public long getNbFec()
{
return 0;
}
@Override
public long getNbPackets()
{
return 0;
}
@Override
public long getNbPacketsLost()
{
return 0;
}
@Override
public int getPacketQueueCountPackets()
{
return 0;
}
@Override
public int getPacketQueueSize()
{
return 0;
}
@Override
public double getPercentDiscarded()
{
return 0;
}
@Override
public String getRemoteIPAddress()
{
return null;
}
@Override
public int getRemotePort()
{
return 0;
}
@Override
public RTCPReports getRTCPReports()
{
return null;
}
@Override
public long getRttMs()
{
return 0;
}
@Override
public double getUploadJitterMs()
{
return 0;
}
@Override
public double getUploadPercentLoss()
{
return 0;
}
@Override
public double getUploadRateKiloBitPerSec()
{
return 0;
}
@Override
public Dimension getUploadVideoSize()
{
return null;
}
@Override
public boolean isAdaptiveBufferEnabled()
{
return false;
}
@Override
public void updateStats()
{
}
@Override
public double getMinDownloadJitterMs()
{
return 0;
}
@Override
public double getMaxDownloadJitterMs()
{
return 0;
}
@Override
public double getAvgDownloadJitterMs()
{
return 0;
}
@Override
public double getMinUploadJitterMs()
{
return 0;
}
@Override
public double getMaxUploadJitterMs()
{
return 0;
}
@Override
public double getAvgUploadJitterMs()
{
return 0;
}
@Override
public long getNbPacketsSent()
{
return 0;
}
@Override
public long getNbPacketsReceived()
{
return 0;
}
@Override
public long getDownloadNbPacketLost()
{
return 0;
}
@Override
public long getUploadNbPacketLost()
{
return 0;
}
@Override
public void addNackListener(NACKListener nackListener)
{
}
@Override
public void addRembListener(REMBListener rembListener)
{
}
@Override
public long getSendingBitrate()
{
return 0;
}
@Override
public Collection<? extends MediaStreamSSRCStats> getReceivedStats()
{
return null;
}
@Override
public Collection<? extends MediaStreamSSRCStats> getSentStats()
{
return null;
}
}
|
#!/bin/bash
#SBATCH -p bosch_gpu-rtx2080 # partition (queue)
#SBATCH --mem 10000 # memory pool for all cores (8GB)
#SBATCH -t 11-00:00 # time (D-HH:MM)
#SBATCH -c 2 # number of cores
#SBATCH -a 1-12 # array size
#SBATCH --gres=gpu:1 # reserves four GPUs
#SBATCH -D /home/siemsj/projects/darts_weight_sharing_analysis # Change working_dir
#SBATCH -o log/log_$USER_%Y-%m-%d.out # STDOUT (the folder log has to be created prior to running or this won't work)
#SBATCH -e log/err_$USER_%Y-%m-%d.err # STDERR (the folder log has to be created prior to running or this won't work)
#SBATCH -J PC_DARTS_NASBENCH # sets the job name. If not specified, the file name will be used as job name
# #SBATCH --mail-type=END,FAIL # (recive mails about end and timeouts/crashes of your job)
# Print some information about the job to STDOUT
echo "Workingdir: $PWD";
echo "Started at $(date)";
echo "Running job $SLURM_JOB_NAME using $SLURM_JOB_CPUS_PER_NODE cpus per node with given JID $SLURM_JOB_ID on queue $SLURM_JOB_PARTITION";
# Activate conda environment
source ~/.bashrc
conda activate pytorch1.3
gpu_counter=1
for seed in {1..12}
do
# Job to perform
if [ $gpu_counter -eq $SLURM_ARRAY_TASK_ID ]; then
PYTHONPATH=$PWD python optimizers/pc_darts/train_search.py --seed=${seed} --save=baseline --search_space=3
exit $?
fi
let gpu_counter+=1
done
# Print some Information about the end-time to STDOUT
echo "DONE";
echo "Finished at $(date)"; |
function digga(domain) {
const { execSync } = require('child_process');
try {
// Execute the dig command to perform a DNS lookup
const result = execSync(`dig +nocmd "${domain}" any +multiline +noall +answer`, { encoding: 'utf8' });
// Extract the IP address from the result using regular expression
const ipRegex = /(?:[0-9]{1,3}\.){3}[0-9]{1,3}/;
const match = result.match(ipRegex);
if (match) {
// Return the extracted IP address
return match[0];
} else {
// If no IP address is found, return an error message
return 'No IP address found for the domain';
}
} catch (error) {
// Handle any errors that occur during the dig command execution
return `Error executing dig command: ${error.message}`;
}
} |
#!/bin/bash
cd "$(dirname "$0")"
./Orthanc configOSX.json |
#!/bin/bash
#
# Ensure we're excuting from script directory
cd "$(dirname ${BASH_SOURCE[0]})"
# exit when any command fails
set -e
GEN_PATH="gen"
GIT_TEMP_PATH=$(mktemp -d -t 'kinprotosgittmp')
KIN_API_GIT_PATH="${GIT_TEMP_PATH}/kin-api"
VALIDATE_GIT_PATH="${GIT_TEMP_PATH}/validate"
MODEL_TEMP_PATH=$(mktemp -d -t 'kinprotostmp')
IMPORT_PATHS="${MODEL_TEMP_PATH}/proto:${MODEL_TEMP_PATH}"
PODS_ROOT="../Pods"
#PROTOC="${PODS_ROOT}/!ProtoCompiler/protoc"
#PLUGIN="${PODS_ROOT}/!ProtoCompiler-gRPCPlugin/grpc_objective_c_plugin"
PROTOC="protoc"
PLUGIN="grpc_objective_c_plugin"
function cleanup {
rm -rf ${GIT_TEMP_PATH}
rm -rf ${MODEL_TEMP_PATH}
}
# register the cleanup function to be called on the EXIT signal
trap cleanup EXIT
echo "Cloning kin-api into ${KIN_API_GIT_PATH}"
git clone -b master --single-branch git@github.com:kinecosystem/agora-api-internal.git ${KIN_API_GIT_PATH}
echo "Cloning protoc-gen-validate into ${VALIDATE_GIT_PATH}"
git clone -b master --single-branch git@github.com:envoyproxy/protoc-gen-validate.git $VALIDATE_GIT_PATH
# Find only the protos under /v3/.
for path in $(find ${KIN_API_GIT_PATH}/proto -name 'v3' -type d -print0 | xargs -0 -n1); do
path_folder_name=$(echo ${path} | sed 's/.*\(proto\)/\1/g')
source_folder_name=$(echo ${KIN_API_GIT_PATH}/${path_folder_name})
dest_folder_name=$(dirname ${MODEL_TEMP_PATH}/${path_folder_name})
mkdir -p ${dest_folder_name}
cp -r ${source_folder_name} ${dest_folder_name}
echo "Copying to ${dest_folder_name}"
done
# Copy validate.proto
path_folder_name=$(echo "validate")
source_folder_name=$(echo ${VALIDATE_GIT_PATH}/${path_folder_name})
dest_folder_name=$(dirname ${MODEL_TEMP_PATH}/${path_folder_name})
mkdir -p ${dest_folder_name}
cp -r ${source_folder_name} ${dest_folder_name}
echo "Copying from ${source_folder_name} to ${dest_folder_name}"
# Clean GEN_PATH
rm -rf ${GEN_PATH}
mkdir -p ${GEN_PATH}
# Build kin-api
KIN_API_MOBILE_IMPORT_PATHS=${IMPORT_PATHS}
KIN_API_GEN_PATH=${GEN_PATH}
mkdir -p ${KIN_API_GEN_PATH}
for i in $(find ${MODEL_TEMP_PATH} -name '*.proto' -print0 | xargs -0 -n1 dirname | sort | uniq); do
echo $(PWD)
echo "Building ${i}"
${PROTOC} -I${KIN_API_MOBILE_IMPORT_PATHS} \
${i}/*.proto \
--proto_path=. \
--objc_out=${KIN_API_GEN_PATH} \
--grpc_out=${KIN_API_GEN_PATH} \
--plugin=protoc-gen-grpc=${PLUGIN}
done
# Build google
GOOGLE_MODEL_COMMON_GEN_PATH="${GEN_PATH}"
mkdir -p ${GOOGLE_MODEL_COMMON_GEN_PATH}
for i in $(find google -name '*.proto' -print0 | xargs -0 -n1 dirname | sort | uniq); do
echo "Building ${i}"
${PROTOC} \
$i/*.proto \
--proto_path=. \
--objc_out=${GOOGLE_MODEL_COMMON_GEN_PATH} \
--grpc_out=${GOOGLE_MODEL_COMMON_GEN_PATH} \
--plugin=protoc-gen-grpc=${PLUGIN}
done
|
#!/usr/bin/env bash
cd $PROJECT_DIR
cd micropython
make -C mypy-cross
|
package org.dimdev.rift.util;
import net.minecraft.nbt.NBTTagCompound;
import javax.annotation.Nonnull;
/**
* Base interface for (de)serializable objects to serialize
* themselves to and from {@link NBTTagCompound} tag compounds
*/
public interface NBTSerializable {
/**
* Writes this object's data to the given compound
* @param compound The tag compound to be written to
* @return The written tag compound
*/
@Nonnull
NBTTagCompound serialize(@Nonnull NBTTagCompound compound);
/**
* Reads this object's data from the given compound
* @param compound The tag compound to be read from
* @return The given tag compound
*/
@Nonnull
NBTTagCompound deserialize(@Nonnull NBTTagCompound compound);
/**
* Writes this object's data to a new tag compound
* @return The written tag compound
*/
@Nonnull
default NBTTagCompound writeToNBT() {
return this.serialize(new NBTTagCompound());
}
}
|
public class AttributeInformation
{
public string AttributeName { get; set; }
}
public class AttributeInformationDetailed : AttributeInformation
{
public int NumberOfCalls { get; set; }
public IEnumerable<StepDetails> GeneratedStepDefinitions { get; set; }
public static int GetTotalStepDefinitions(IEnumerable<AttributeInformationDetailed> detailedAttributes)
{
int totalStepDefinitions = 0;
foreach (var detailedAttribute in detailedAttributes)
{
if (detailedAttribute.GeneratedStepDefinitions != null)
{
totalStepDefinitions += detailedAttribute.GeneratedStepDefinitions.Count();
}
}
return totalStepDefinitions;
}
}
public class StepDetails
{
// Define properties and methods for StepDetails class
}
class Program
{
static void Main()
{
// Sample usage
var detailedAttribute1 = new AttributeInformationDetailed
{
AttributeName = "Attribute1",
NumberOfCalls = 5,
GeneratedStepDefinitions = new List<StepDetails>
{
new StepDetails(),
new StepDetails()
}
};
var detailedAttribute2 = new AttributeInformationDetailed
{
AttributeName = "Attribute2",
NumberOfCalls = 3,
GeneratedStepDefinitions = new List<StepDetails>
{
new StepDetails(),
new StepDetails(),
new StepDetails()
}
};
var detailedAttributes = new List<AttributeInformationDetailed> { detailedAttribute1, detailedAttribute2 };
int totalStepDefinitions = AttributeInformationDetailed.GetTotalStepDefinitions(detailedAttributes);
Console.WriteLine($"Total step definitions: {totalStepDefinitions}");
}
} |
# == Schema Information
#
# Table name: favorites
#
# id :bigint not null, primary key
# favoritable_type :string indexed => [favoritable_id], indexed => [favoritable_id]
# favoritable_id :bigint indexed => [favoritable_type], indexed => [favoritable_type]
# user_id :bigint indexed
#
# Indexes
#
# index_favorites_on_favoritable_id_and_favoritable_type (favoritable_id,favoritable_type) UNIQUE
# index_favorites_on_favoritable_type_and_favoritable_id (favoritable_type,favoritable_id)
# index_favorites_on_user_id (user_id)
#
class Favorite < ApplicationRecord
belongs_to :favoritable, polymorphic: true
belongs_to :user, inverse_of: :favorites
validates :user_id, uniqueness: {
scope: [:favoritable_id, :favoritable_type],
message: 'can only favorite an item once'
}
scope :categories, -> { where(favoritable_type: 'Category') }
scope :accounts, -> { where(favoritable_type: 'Account') }
scope :expense_logs, -> { where(favoritable_type: 'ExpenseLog') }
scope :for_user, -> (user) { where(user_id: user.id) }
end
|
#!/bin/bash
set -e
cd `dirname "$0"`
check="java -jar target/boku-http-auth-tools-1.2-main.jar check"
for file in test-vectors/*; do
echo -n "$file: "
$check -quiet $file
done
|
import { JsonType } from '@useoptic/optic-domain';
import { ChangeType } from './changes';
import { IContribution } from './contributions';
// Types for rendering shapes and fields
export interface IFieldRenderer {
fieldId: string;
name: string;
shapeId: string;
shapeChoices: IShapeRenderer[];
required: boolean;
changes: ChangeType | null;
contributions: Record<string, string>;
additionalAttributes?: string[];
}
// Used to render an objects field details and contributions
export interface IFieldDetails {
fieldId: string;
name: string;
contribution: IContribution;
shapes: IShapeRenderer[];
depth: number;
required: boolean;
}
// Used to render query parameters
export type QueryParameters = Record<string, IFieldRenderer>;
export type IShapeRenderer =
| {
shapeId: string;
jsonType: JsonType.OBJECT;
asArray?: undefined;
asObject: {
fields: IFieldRenderer[];
};
}
| {
shapeId: string;
jsonType: JsonType.ARRAY;
asArray: IArrayRender;
asObject?: undefined;
}
| {
shapeId: string;
jsonType: Exclude<JsonType, JsonType.OBJECT | JsonType.ARRAY>;
asArray?: undefined;
asObject?: undefined;
};
export interface IArrayRender {
shapeChoices: IShapeRenderer[];
shapeId: string;
}
|
package com.kevinwilde.sitecrawler.masternodesonline.service.graphql;
import com.kevinwilde.graphqljavaclient.GraphQlClient;
import com.kevinwilde.sitecrawler.masternodesonline.domain.githubInforesponse.Data;
import com.kevinwilde.sitecrawler.masternodesonline.domain.githubInforesponse.GithubInfoResponse;
import com.kevinwilde.sitecrawler.masternodesonline.domain.githubInforesponse.Repository;
import com.kevinwilde.sitecrawler.masternodesonline.factory.GithubGraphQlQueryFactory;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Answers;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import static junit.framework.TestCase.assertNull;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class GithubGraphQlQueryServiceTest {
@InjectMocks
private GithubGraphQlQueryService classUnderTest;
@Mock
private GithubGraphQlQueryFactory githubGraphQlQueryFactory;
@Mock
private GraphQlClient graphQlClient;
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private GithubInfoResponse githubInfoResponse;
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
private Repository repository;
@Test
public void retrieveMasternodeGithubTotalCommits_retrievesMasternodeGithubTotalCommits(){
when(githubInfoResponse.getData().getRepository()).thenReturn(repository);
String repositoryOwner = "repositoryOwner";
String repositoryName = "repositoryName";
String query = "query";
when(githubGraphQlQueryFactory.buildCommitsQuery(repositoryOwner, repositoryName)).thenReturn(query);
when(graphQlClient.execute(GithubGraphQlQueryService.HTTPS_API_GITHUB_COM_GRAPHQL,
query,
GithubInfoResponse.class)).thenReturn(githubInfoResponse);
Integer totalCommits = classUnderTest.retrieveMasternodeGithubTotalCommits(repositoryOwner, repositoryName);
assertNotNull(totalCommits);
verify(githubGraphQlQueryFactory).buildCommitsQuery(repositoryOwner, repositoryName);
verify(graphQlClient).execute(GithubGraphQlQueryService.HTTPS_API_GITHUB_COM_GRAPHQL,
query,
GithubInfoResponse.class);
}
@Test
public void retrieveMasternodeGithubTotalCommits_handlesRepositoryNotFound(){
when(githubInfoResponse.getData().getRepository()).thenReturn(null);
String repositoryOwner = "repositoryOwner";
String repositoryName = "repositoryName";
String query = "query";
when(githubGraphQlQueryFactory.buildCommitsQuery(repositoryOwner, repositoryName)).thenReturn(query);
when(graphQlClient.execute(GithubGraphQlQueryService.HTTPS_API_GITHUB_COM_GRAPHQL,
query,
GithubInfoResponse.class)).thenReturn(githubInfoResponse);
Integer totalCommits = classUnderTest.retrieveMasternodeGithubTotalCommits(repositoryOwner, repositoryName);
assertNull(totalCommits);
verify(githubGraphQlQueryFactory).buildCommitsQuery(repositoryOwner, repositoryName);
verify(graphQlClient).execute(GithubGraphQlQueryService.HTTPS_API_GITHUB_COM_GRAPHQL,
query,
GithubInfoResponse.class);
}
}
|
<reponame>carpaltunnel/metalus
package com.acxiom.pipeline
import com.acxiom.pipeline.audits.{AuditType, ExecutionAudit}
import com.acxiom.pipeline.drivers.{DefaultPipelineDriver, DriverSetup}
import com.acxiom.pipeline.utils.DriverUtils
import org.apache.commons.io.FileUtils
import org.apache.hadoop.io.LongWritable
import org.apache.http.client.entity.UrlEncodedFormEntity
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.scalatest.{BeforeAndAfterAll, FunSpec, GivenWhenThen, Suite}
import java.io.File
class SparkSuiteTests extends FunSpec with BeforeAndAfterAll with GivenWhenThen with Suite {
override def beforeAll() {
Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
Logger.getLogger("org.apache.hadoop").setLevel(Level.WARN)
Logger.getLogger("com.acxiom.pipeline").setLevel(Level.DEBUG)
SparkTestHelper.sparkConf = DriverUtils.createSparkConf(Array(classOf[LongWritable], classOf[UrlEncodedFormEntity]))
.setMaster(SparkTestHelper.MASTER)
.setAppName(SparkTestHelper.APPNAME)
SparkTestHelper.sparkConf.set("spark.hadoop.io.compression.codecs",
",org.apache.hadoop.io.compress.BZip2Codec,org.apache.hadoop.io.compress.DeflateCodec," +
"org.apache.hadoop.io.compress.GzipCodec" +
"hadoop.io.compress.Lz4Codec,org.apache.hadoop.io.compress.SnappyCodec")
SparkTestHelper.sparkSession = SparkSession.builder().config(SparkTestHelper.sparkConf).getOrCreate()
// cleanup spark-warehouse and user-warehouse directories
FileUtils.deleteDirectory(new File("spark-warehouse"))
FileUtils.deleteDirectory(new File("user-warehouse"))
}
override def afterAll() {
SparkTestHelper.sparkSession.stop()
Logger.getRootLogger.setLevel(Level.INFO)
// cleanup spark-warehouse and user-warehouse directories
FileUtils.deleteDirectory(new File("spark-warehouse"))
FileUtils.deleteDirectory(new File("user-warehouse"))
}
describe("DefaultPipelineDriver") {
it("Should fail when there is no execution plan") {
val args = List("--driverSetupClass", "com.acxiom.pipeline.SparkTestDriverSetup", "--pipeline", "noPipelines",
"--globalInput", "global-input-value")
SparkTestHelper.pipelineListener = DefaultPipelineListener()
// Execution should complete without exception
val thrown = intercept[IllegalStateException] {
DefaultPipelineDriver.main(args.toArray)
}
assert(thrown.getMessage == "Unable to obtain valid execution plan. Please check the DriverSetup class: com.acxiom.pipeline.SparkTestDriverSetup")
}
it("Should run a basic pipeline") {
val args = List("--driverSetupClass", "com.acxiom.pipeline.SparkTestDriverSetup", "--pipeline", "basic",
"--globalInput", "global-input-value")
val results = new ListenerValidations
SparkTestHelper.pipelineListener = new PipelineListener {
override def pipelineStepFinished(pipeline: Pipeline, step: PipelineStep, pipelineContext: PipelineContext): Option[PipelineContext] = {
step.id.getOrElse("") match {
case "GLOBALVALUESTEP" =>
results.addValidation("GLOBALVALUESTEP return value is incorrect",
pipelineContext.parameters.getParametersByPipelineId("1").get.parameters("GLOBALVALUESTEP")
.asInstanceOf[PipelineStepResponse].primaryReturn.get.asInstanceOf[String] == "global-input-value")
case _ =>
}
None
}
override def registerStepException(exception: PipelineStepException, pipelineContext: PipelineContext): Unit = {
exception match {
case pe: PauseException =>
results.addValidation("Pipeline Id is incorrect", pe.pipelineProgress.get.pipelineId.getOrElse("") == "1")
results.addValidation("Step Id is incorrect", pe.pipelineProgress.get.stepId.getOrElse("") == "PAUSESTEP")
}
}
}
// Execution should complete without exception
DefaultPipelineDriver.main(args.toArray)
results.validate()
}
it("Should run two pipelines") {
val args = List("--driverSetupClass", "com.acxiom.pipeline.SparkTestDriverSetup", "--pipeline", "two",
"--globalInput", "global-input-value")
val results = new ListenerValidations
SparkTestHelper.pipelineListener = new PipelineListener {
override def pipelineStepFinished(pipeline: Pipeline, step: PipelineStep, pipelineContext: PipelineContext): Option[PipelineContext] = {
step.id.getOrElse("") match {
case "GLOBALVALUESTEP" =>
results.addValidation("GLOBALVALUESTEP return value is incorrect",
pipelineContext.parameters.getParametersByPipelineId(pipeline.id.getOrElse("-1")).get.parameters("GLOBALVALUESTEP")
.asInstanceOf[PipelineStepResponse].primaryReturn.get.asInstanceOf[String] == "global-input-value")
case _ => results.addValidation("Unexpected pipeline finished", valid = false)
}
None
}
override def registerStepException(exception: PipelineStepException, pipelineContext: PipelineContext): Unit = {
exception match {
case _ => results.addValidation("Unexpected exception registered", valid = false)
}
}
override def executionFinished(pipelines: List[Pipeline], pipelineContext: PipelineContext): Option[PipelineContext] = {
assert(pipelines.lengthCompare(2) == 0)
None
}
}
// Execution should complete without exception
DefaultPipelineDriver.main(args.toArray)
results.validate()
}
it("Should run one pipeline and pause") {
val args = List("--driverSetupClass", "com.acxiom.pipeline.SparkTestDriverSetup", "--pipeline", "three",
"--globalInput", "global-input-value")
val results = new ListenerValidations
SparkTestHelper.pipelineListener = new PipelineListener {
override def pipelineStepFinished(pipeline: Pipeline, step: PipelineStep, pipelineContext: PipelineContext): Option[PipelineContext] = {
step.id.getOrElse("") match {
case "GLOBALVALUESTEP" =>
results.addValidation("GLOBALVALUESTEP return value is incorrect",
pipelineContext.parameters.getParametersByPipelineId(pipeline.id.getOrElse("-1")).get.parameters("GLOBALVALUESTEP")
.asInstanceOf[PipelineStepResponse].primaryReturn.get.asInstanceOf[String] == "global-input-value")
case _ =>
}
None
}
override def registerStepException(exception: PipelineStepException, pipelineContext: PipelineContext): Unit = {
exception match {
case pe: PauseException =>
results.addValidation("Pipeline Id is incorrect", pe.pipelineProgress.get.pipelineId.getOrElse("") == "0")
results.addValidation("Step Id is incorrect", pe.pipelineProgress.get.stepId.getOrElse("") == "PAUSESTEP")
}
}
override def executionFinished(pipelines: List[Pipeline], pipelineContext: PipelineContext): Option[PipelineContext] = {
results.addValidation("Pipeline completed count is incorrect", pipelines.lengthCompare(1) == 0)
None
}
}
// Execution should complete without exception
DefaultPipelineDriver.main(args.toArray)
results.validate()
}
it("Should run second step because first returns nothing") {
val args = List("--driverSetupClass", "com.acxiom.pipeline.SparkTestDriverSetup", "--pipeline", "four",
"--globalInput", "global-input-value")
val results = new ListenerValidations
SparkTestHelper.pipelineListener = new PipelineListener {
override def pipelineStepFinished(pipeline: Pipeline, step: PipelineStep, pipelineContext: PipelineContext): Option[PipelineContext] = {
step.id.getOrElse("") match {
case "DYNAMICBRANCHSTEP" =>
results.addValidation("DYNAMICBRANCHSTEP return value is incorrect",
pipelineContext.parameters.getParametersByPipelineId(pipeline.id.getOrElse("-1")).get.parameters("DYNAMICBRANCHSTEP")
.asInstanceOf[PipelineStepResponse].primaryReturn.get.asInstanceOf[String] == "global-input-value")
case "DYNAMICBRANCH2STEP" =>
results.addValidation("DYNAMICBRANCH2STEP return value is incorrect",
pipelineContext.parameters.getParametersByPipelineId(pipeline.id.getOrElse("-1")).get.parameters("DYNAMICBRANCH2STEP")
.asInstanceOf[PipelineStepResponse].primaryReturn.get.asInstanceOf[String] == "global-input-value")
case _ =>
}
None
}
override def registerStepException(exception: PipelineStepException, pipelineContext: PipelineContext): Unit = {
results.addValidation("Unexpected exception registered", valid = false)
}
override def executionFinished(pipelines: List[Pipeline], pipelineContext: PipelineContext): Option[PipelineContext] = {
results.addValidation("Pipeline completed count is incorrect", pipelines.lengthCompare(1) == 0)
None
}
}
// Execution should complete without exception
DefaultPipelineDriver.main(args.toArray)
results.validate()
}
it("Should accept changes to pipelineContext at the before processing a step") {
val args = List("--driverSetupClass", "com.acxiom.pipeline.SparkTestDriverSetup", "--pipeline", "nopause",
"--globalInput", "global-input-value")
val results = new ListenerValidations
SparkTestHelper.pipelineListener = new PipelineListener {
override def executionStarted(pipelines: List[Pipeline], pipelineContext: PipelineContext): Option[PipelineContext] = {
results.addValidation("expect no global parameter named 'execution_started' before execution starts",
pipelineContext.getGlobal("execution_started").isEmpty)
Some(pipelineContext.setGlobal("execution_started", "true"))
}
override def pipelineStarted(pipeline: Pipeline, pipelineContext: PipelineContext): Option[PipelineContext] = {
results.addValidation(s"expect global parameter 'execution_started' to exist when pipeline starts: ${pipeline.name.getOrElse("")}",
pipelineContext.getGlobal("execution_started").getOrElse("") == "true")
results.addValidation(s"expect no parameter named 'pipeline_${pipeline.id.get}_started' before pipeline starts",
pipelineContext.getGlobal(s"pipeline_${pipeline.id.get}_started").isEmpty)
And(s"add parameter named 'pipeline_${pipeline.id.get}_started' when pipeline starts")
Some(pipelineContext.setGlobal(s"pipeline_${pipeline.id.get}_started", "true"))
}
override def pipelineStepStarted(pipeline: Pipeline, step: PipelineStep, pipelineContext: PipelineContext): Option[PipelineContext] = {
results.addValidation(s"expect parameter named 'pipeline_${pipeline.id.get}_started' before each step starts",
pipelineContext.getGlobalString(s"pipeline_${pipeline.id.get}_started").getOrElse("") == "true")
results.addValidation(s"expect no parameter named 'step_${step.id.get}_started' before step starts",
pipelineContext.getGlobalString(s"step_${step.id.get}_started").isEmpty)
And(s"add parameter named 'step_${step.id.get}_started' when step starts")
Some(pipelineContext.setGlobal(s"step_${step.id.get}_started", "true"))
}
override def pipelineStepFinished(pipeline: Pipeline, step: PipelineStep, pipelineContext: PipelineContext): Option[PipelineContext] = {
results.addValidation(s"expect parameter named 'step_${step.id.get}_started' to exist before step finishes",
pipelineContext.getGlobalString(s"step_${step.id.get}_started").getOrElse("") == "true")
results.addValidation(s"expect no parameter named 'step_${step.id.get}_finished' before step finishes",
pipelineContext.getGlobalString(s"step_${step.id.get}_finished").isEmpty)
And(s"add parameter named 'step_${step.id.get}_finished' when finished")
Some(pipelineContext.setGlobal(s"step_${step.id.get}_finished", "true"))
}
override def pipelineFinished(pipeline: Pipeline, pipelineContext: PipelineContext): Option[PipelineContext] = {
pipeline.steps.getOrElse(List()).foreach(s => {
results.addValidation(s"expect parameter named 'step_${s.id.get}_finished' to exist before pipeline finishes",
pipelineContext.getGlobalString(s"step_${s.id.get}_finished").getOrElse("") == "true")
})
results.addValidation(s"expect no parameter named 'pipeline_${pipeline.id.get}_finished' before pipeline finishes",
pipelineContext.getGlobalString(s"pipeline_${pipeline.id.get}_finished").isEmpty)
And(s"add pipeline parameter named 'pipeline_${pipeline.id.get}_finished' when finished")
Some(pipelineContext.setGlobal(s"pipeline_${pipeline.id.get}_finished", "true"))
}
override def executionFinished(pipelines: List[Pipeline], pipelineContext: PipelineContext): Option[PipelineContext] = {
pipelines.foreach(p => {
results.addValidation(s"expect parameter named 'pipeline_${p.id.get}_finished' to exist before execution finishes",
pipelineContext.getGlobalString(s"pipeline_${p.id.get}_finished").getOrElse("") == "true")
})
And(s"add gparameter named 'execution_finished' when finished")
Some(pipelineContext.setGlobal("execution_finished", "true"))
}
}
// Execution should complete without exception
DefaultPipelineDriver.main(args.toArray)
results.validate()
}
it("Should retry when the pipeline fails") {
var executionComplete = false
var testIteration = 0
SparkTestHelper.pipelineListener = new PipelineListener {
override def executionFinished(pipelines: List[Pipeline], pipelineContext: PipelineContext): Option[PipelineContext] = {
assert(pipelines.lengthCompare(1) == 0)
val params = pipelineContext.parameters.getParametersByPipelineId("1")
assert(params.isDefined)
assert(params.get.parameters.contains("RETURNNONESTEP"))
executionComplete = true
Some(pipelineContext)
}
override def registerStepException(exception: PipelineStepException, pipelineContext: PipelineContext): Unit = {
exception match {
case t: Throwable if testIteration > 1 => fail(s"Pipeline Failed to run: ${t.getMessage}")
case _ =>
}
}
override def pipelineStarted(pipeline: Pipeline, pipelineContext: PipelineContext): Option[PipelineContext] = {
// Second execution should set a global that allows the pipeline to complete
testIteration += 1
if (testIteration == 2) {
Some(pipelineContext.setGlobal("passTest", true))
} else {
None
}
}
}
val args = List("--driverSetupClass", "com.acxiom.pipeline.SparkTestDriverSetup", "--pipeline", "errorTest",
"--globalInput", "global-input-value", "--maxRetryAttempts", "2")
DefaultPipelineDriver.main(args.toArray)
assert(executionComplete)
}
it ("Should fail and not retry") {
SparkTestHelper.pipelineListener = new PipelineListener {}
val args = List("--driverSetupClass", "com.acxiom.pipeline.SparkTestDriverSetup", "--pipeline", "errorTest",
"--globalInput", "global-input-value", "--terminateAfterFailures", "true")
val thrown = intercept[RuntimeException] {
DefaultPipelineDriver.main(args.toArray)
}
assert(thrown.getMessage.startsWith("Failed to process execution plan after 1 attempts"))
}
it("Should retry a step") {
val args = List("--driverSetupClass", "com.acxiom.pipeline.SparkTestDriverSetup", "--pipeline", "retry")
val results = new ListenerValidations
SparkTestHelper.pipelineListener = new PipelineListener {
override def pipelineStepFinished(pipeline: Pipeline, step: PipelineStep, pipelineContext: PipelineContext): Option[PipelineContext] = {
step.id.getOrElse("") match {
case "RETRYSTEP" =>
results.addValidation("RETRYSTEP return value is incorrect",
pipelineContext.parameters.getParametersByPipelineId("1").get.parameters("RETRYSTEP")
.asInstanceOf[PipelineStepResponse].primaryReturn.get.asInstanceOf[String] == "Retried step 3 of 3")
case _ =>
}
None
}
override def registerStepException(exception: PipelineStepException, pipelineContext: PipelineContext): Unit = {
exception match {
case e: Throwable =>
results.addValidation("Retry step failed", valid = true)
}
}
}
// Execution should complete without exception
DefaultPipelineDriver.main(args.toArray)
results.validate()
}
it("Should retry and fail a step") {
val args = List("--driverSetupClass", "com.acxiom.pipeline.SparkTestDriverSetup", "--pipeline", "retryFailure")
val results = new ListenerValidations
SparkTestHelper.pipelineListener = new PipelineListener {
override def pipelineStepFinished(pipeline: Pipeline, step: PipelineStep, pipelineContext: PipelineContext): Option[PipelineContext] = {
step.id.getOrElse("") match {
case "PARROTSTEP" =>
results.addValidation("PARROTSTEP return value is incorrect",
pipelineContext.parameters.getParametersByPipelineId("1").get.parameters("PARROTSTEP")
.asInstanceOf[PipelineStepResponse].primaryReturn.get.asInstanceOf[String] == "error step called!")
case "RETURNNONESTEP" =>
results.addValidation("RETURNNONESTEP should not have been called", valid = true)
case _ =>
}
None
}
override def registerStepException(exception: PipelineStepException, pipelineContext: PipelineContext): Unit = {
exception match {
case e: Throwable =>
results.addValidation("Retry step failed", valid = true)
}
}
}
// Execution should complete without exception
DefaultPipelineDriver.main(args.toArray)
results.validate()
}
}
describe("PipelineContext") {
it("Should set global values") {
val ctx = SparkTestHelper.generatePipelineContext().copy(stepMessages = None)
assert(ctx.globals.isDefined)
assert(ctx.globals.get.isEmpty)
val map = Map[String, Any]("one" -> 1, "two" -> "two")
val updatedCtx = ctx.setGlobal("three", 3).setGlobals(map)
assert(updatedCtx.globals.isDefined)
assert(updatedCtx.globals.get.size == 3)
assert(updatedCtx.getGlobalString("two").isDefined)
assert(updatedCtx.getGlobalString("two").get == "two")
assert(updatedCtx.getGlobal("one").isDefined)
assert(updatedCtx.getGlobalAs[Int]("one").get == 1)
assert(updatedCtx.getGlobal("three").isDefined)
assert(updatedCtx.getGlobalAs[Int]("three").get == 3)
assert(updatedCtx.getGlobalString("one").isEmpty)
assert(updatedCtx.getStepMessages.isEmpty)
}
it("Should set audit metrics") {
val ctx = SparkTestHelper.generatePipelineContext()
val updatedCtx = ctx.setPipelineAudit(ExecutionAudit("pipelineId", AuditType.PIPELINE, Map[String, Any](),
System.currentTimeMillis()))
.setStepAudit("pipelineId", ExecutionAudit("stepId", AuditType.STEP, Map[String, Any](),
System.currentTimeMillis()))
.setStepMetric("pipelineId", "stepId", None, "bubba", "gump")
assert(updatedCtx.getPipelineAudit("pipelineId").isDefined)
assert(updatedCtx.getPipelineAudit("pipelineId").get.metrics.isEmpty)
val metricsCtx = updatedCtx.setPipelineAuditMetric("pipelineId", "fred", "redonthehead")
assert(metricsCtx.getPipelineAudit("pipelineId").get.metrics.size == 1)
assert(metricsCtx.getPipelineAudit("pipelineId").get.metrics.contains("fred"))
assert(metricsCtx.getPipelineAudit("pipelineId").get.metrics("fred") == "redonthehead")
assert(metricsCtx.getStepAudit("pipelineId", "stepId", None).get.metrics("bubba") == "gump")
}
}
}
object SparkTestHelper {
val MASTER = "local[2]"
val APPNAME = "file-steps-spark"
var sparkConf: SparkConf = _
var sparkSession: SparkSession = _
var pipelineListener: PipelineListener = _
def generatePipelineContext(): PipelineContext = {
val parameters = Map[String, Any]()
PipelineContext(Some(SparkTestHelper.sparkConf), Some(SparkTestHelper.sparkSession), Some(parameters),
PipelineSecurityManager(),
PipelineParameters(),
Some(if (parameters.contains("stepPackages")) {
parameters("stepPackages").asInstanceOf[String]
.split(",").toList
}
else {
List("com.acxiom.pipeline", "com.acxiom.pipeline.steps")
}),
PipelineStepMapper(),
Some(SparkTestHelper.pipelineListener),
Some(SparkTestHelper.sparkSession.sparkContext.collectionAccumulator[PipelineStepMessage]("stepMessages")))
}
}
case class SparkTestDriverSetup(parameters: Map[String, Any]) extends DriverSetup {
override def pipelines: List[Pipeline] = parameters("pipeline") match {
case "basic" => PipelineDefs.BASIC_PIPELINE
case "two" => PipelineDefs.TWO_PIPELINE
case "three" => PipelineDefs.THREE_PIPELINE
case "four" => PipelineDefs.FOUR_PIPELINE
case "nopause" => PipelineDefs.BASIC_NOPAUSE
case "errorTest" => PipelineDefs.ERROR_PIPELINE
case "retry" => PipelineDefs.RETRY_PIPELINE
case "retryFailure" => PipelineDefs.RETRY_FAILURE_PIPELINE
case "noPipelines" => List()
}
override def initialPipelineId: String = ""
override def pipelineContext: PipelineContext = {
PipelineContext(Some(SparkTestHelper.sparkConf), Some(SparkTestHelper.sparkSession), Some(parameters),
PipelineSecurityManager(),
PipelineParameters(List(PipelineParameter("0", Map[String, Any]()), PipelineParameter("1", Map[String, Any]()))),
Some(if (parameters.contains("stepPackages")) {
parameters("stepPackages").asInstanceOf[String]
.split(",").toList
} else {
List("com.acxiom.pipeline.steps", "com.acxiom.pipeline")
}),
PipelineStepMapper(),
Some(SparkTestHelper.pipelineListener),
Some(SparkTestHelper.sparkSession.sparkContext.collectionAccumulator[PipelineStepMessage]("stepMessages")))
}
}
object MockPipelineSteps {
def globalVariableStep(string: String, pipelineContext: PipelineContext): String = {
val stepId = pipelineContext.getGlobalString("stepId").getOrElse("")
val pipelineId = pipelineContext.getGlobalString("pipelineId").getOrElse("")
pipelineContext.addStepMessage(PipelineStepMessage(string, stepId, pipelineId, PipelineStepMessageType.warn))
string
}
def pauseStep(string: String, pipelineContext: PipelineContext): String = {
val stepId = pipelineContext.getGlobalString("stepId").getOrElse("")
val pipelineId = pipelineContext.getGlobalString("pipelineId").getOrElse("")
pipelineContext.addStepMessage(PipelineStepMessage(string, stepId, pipelineId, PipelineStepMessageType.pause))
string
}
def returnNothingStep(string: String): Unit = {}
def parrotStep(value: Any): String = value.toString
def throwError(pipelineContext: PipelineContext): Any = {
throw PipelineException(message = Some("This step should not be called"),
pipelineProgress = Some(pipelineContext.getPipelineExecutionInfo))
}
def retryStep(retryCount: Int, pipelineContext: PipelineContext): String = {
if (pipelineContext.getGlobalAs[Int]("stepRetryCount").getOrElse(-1) == retryCount) {
s"Retried step ${pipelineContext.getGlobalAs[Int]("stepRetryCount").getOrElse(-1)} of $retryCount"
} else {
throw PipelineException(message = Some("Force a retry"),
pipelineProgress = Some(pipelineContext.getPipelineExecutionInfo))
}
}
}
|
<reponame>SamuelMoffat/finalProject
package datageneration;
import com.opencsv.bean.CsvToBeanBuilder;
import gis.UkLocation;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;
public class DataGenerator {
public List<DataPoint> listOfPoints;
public List<DataPoint> trainingListOfPoints;
List<UkLocation> ukLocationsList;
public DataGenerator(){
//initialise random generator
int randomSeed = 123;
randomGeneration rand = new randomGeneration(randomSeed);
//get list of uk coordinates
String coordinates = "src/main/resources/Wards_(December_2017)_Boundaries_in_the_UK_(WGS84).csv";
// if file not found then datapoints cannot be made
try {
ukLocationsList = new CsvToBeanBuilder(new FileReader(coordinates))
.withType(UkLocation.class).build().parse();
//generates random points
int numOfPoints = 500;
generateData(numOfPoints);
generateTrainingData(numOfPoints);
}
catch (FileNotFoundException e) {
e.printStackTrace();
}
}
private void generateData(int numOfPoints){
List<DataPoint> listOfPoints = new ArrayList<>();
for (int i = 0; i < numOfPoints; i++) {
listOfPoints.add(createPoint());
}
setListOfPoints(listOfPoints);
}
private void generateTrainingData(int numOfPoints){
//Note - Training data will explicitly state the correct label
List<DataPoint> listOfPoints = new ArrayList<>();
for (int i = 0; i < numOfPoints; i++) {
listOfPoints.add(createTrainingPoint());
}
setTrainingListOfPoints(listOfPoints);
}
private DataPoint createTrainingPoint() {
DataPoint newPoint = new DataPoint(ukLocationsList,true);
return newPoint;
}
private DataPoint createPoint(){
DataPoint newPoint = new DataPoint(ukLocationsList);
return newPoint;
}
public List<DataPoint> getListOfPoints() {
return listOfPoints;
}
private void setListOfPoints(List<DataPoint> listOfPoints) {
this.listOfPoints = listOfPoints;
}
public List<DataPoint> getTrainingListOfPoints() {
return trainingListOfPoints;
}
public void setTrainingListOfPoints(List<DataPoint> trainingListOfPoints) {
this.trainingListOfPoints = trainingListOfPoints;
}
}
|
/*-
* ========================LICENSE_START=================================
* TeamApps
* ---
* Copyright (C) 2014 - 2021 TeamApps.org
* ---
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =========================LICENSE_END==================================
*/
export abstract class AbstractTableEditor implements Slick.Editors.Editor<any /*TODO*/> {
protected column: Slick.Column<any>;
protected container: HTMLElement;
protected grid: Slick.Grid<any>;
constructor(args: Slick.Editors.EditorOptions<any>) {
this.column = args.column;
this.container = args.container;
this.grid = args.grid;
this.init();
}
public init(): void {
// actually not needed! (only implementing because the tsd says it...
}
// remove all data, events & dom elements created in the constructor
abstract destroy(): void;
// set the focus on the main input control (if any)
abstract focus(): void;
// return true if the value(s) being edited by the user has/have been changed
abstract isValueChanged(): boolean;
// return the value(s) being edited by the user in a serialized form
// can be an arbitrary object
// the only restriction is that it must be a simple object that can be passed around even
// when the editor itself has been destroyed
abstract serializeValue(): any /*TODO*/;
// load the value(s) from the data item and update the UI
// this method will be called immediately after the editor is initialized
// it may also be called by the grid if if the row/cell being edited is updated via grid.updateRow/updateCell
abstract loadValue(item: any /*TODO*/): void;
// deserialize the value(s) saved to "state" and apply them to the data item
// this method may get called after the editor itself has been destroyed
// treat it as an equivalent of a Java/C# "static" method - no instance variables should be accessed
abstract applyValue(item: any /*TODO*/, state: any /*TODO*/): void;
// validate user input and return the result along with the validation message, if any
// if the input is valid, return {valid:true,msg:null}
public validate(): { valid: boolean, msg: string } {
if ((this.column as any).validator) {
const validationResults = (this.column as any).validator(this.serializeValue());
if (!validationResults.valid) {
return validationResults;
}
}
return {
valid: true,
msg: null
};
};
/*********** OPTIONAL METHODS***********/
// if implemented, this will be called if the cell being edited is scrolled out of the view
// implement this is your UI is not appended to the cell itself or if you open any secondary
// selector controls (like a calendar for a datepicker input)
public hide() {
};
// pretty much the opposite of hide
public show() {
};
// if implemented, this will be called by the grid if any of the cell containers are scrolled
// and the absolute position of the edited cell is changed
// if your UI is constructed as a child of document BODY, implement this to update the
// position of the elements as the position of the cell changes
//
// the cellBox: { top, left, bottom, right, width, height, visible }
public position(cellBox: any /*TODO*/) {
};
}
|
<filename>web/old_js/app.js
(function () {
var root;
root = typeof exports !== "undefined" && exports !== null ? exports : this;
root_domain = document.domain.replace(/^app\./, '') + (location.port ? ':' + location.port : '');
$(document).ready(function () {
$('#form_signin').on('submit', function (event) {
$.post('//' + root_domain + '/login_check', {
'_csrf_token': $(this).find('input[name="_csrf_token"]').val(),
'_username': $(this).find('input[name="_username"]').val(),
'_password': $(this).find('input[name="_password"]').val()
}).done(function (data) {
$('#form_signin').find('._messages .alert-box').addClass('hide');
if (data.success != null) {
$('#form_signin').find('._signin-success').removeClass('hide');
return setTimeout(function () {
return window.location = '//app.' + root_domain;
}, 2000);
} else {
return $('#form_signin').find('._signin-fail').removeClass('hide');
}
});
return event.preventDefault();
});
$('#auth_signout').on('click', function (event) {
$.ajax({
type: 'post',
url: '//' + root_domain + '/x-sign-out',
crossDomain: true,
data: "type=" + 1,
xhrFields: {
withCredentials: true
},
success: function (data) {
return setTimeout(function () {
return location.reload();
}, 100);
}
});
return event.preventDefault();
});
$('#form_signup').on('submit', function (event) {
var isEmailAlreadyExisting = false;
$.ajax({
url: '/api/v1/validations/email/availability?q=' + $(this).find('input[name="email"]').val(),
async: false,
dataType: 'json',
success: function (data) {
if ( ! data.success) {
isEmailAlreadyExisting = true;
}
}
});
if (isEmailAlreadyExisting) {
$.bigBox({
title: 'Signup',
content: 'Email account already exists, please use a different one.',
tabicon: false,
sound: false,
color: "#993333",
timeout: 3000
});
return false;
}
$.post('//' + root_domain + '/register', {
'fos_user_registration_form[username]': $(this).find('input[name="_email"]').val(),
'fos_user_registration_form[email]': $(this).find('input[name="_email"]"]').val(),
'fos_user_registration_form[plainPassword][first]': $(this).find('input[name="_password"]').val(),
'fos_user_registration_form[plainPassword][second]': $(this).find('input[name="_password"]').val(),
'fos_user_registration_form[_token]': $(this).find('input[name="fos_user_registration_form[_token]"]').val(),
}).done(function (data) {
$('#form_signup').find('._messages .alert-box').addClass('hide');
if (data.success != null) {
$('#form_signup').find('._signup-success').removeClass('hide');
return setTimeout(function () {
return window.location = '//app.' + root_domain;
}, 2000);
} else {
return $('#form_signup').find('._signup-fail-values').removeClass('hide');
}
});
return event.preventDefault();
});
return $('#form_passreset').on('submit', function (event) {
$.post('//' + root_domain + '/x-password-reset', {
'email': $(this).find('input[name="email"]').val()
}).done(function (data) {
$('#form_passreset').find('._messages .alert-box').addClass('hide');
if (data.success != null) {
$('#form_passreset').find('._passreset-success').removeClass('hide');
return setTimeout(function () {
return window.location = '/message/password-reset-submitted';
}, 2000);
} else if (data.errors != null) {
return $('#form_passreset').find('._passreset-fail-notexists').removeClass('hide');
} else {
return $('#form_passreset').find('._passreset-fail-values').removeClass('hide');
}
});
return event.preventDefault();
});
});
}).call(this);
|
<reponame>chipsi007/qemu_esp32
var ui = {
inputType: {
title: "Input",
value: 2,
values: [["Live Input (5 V peak amplitude)",1], ["Sine Wave (amplitude 5 V)",2], ["Square Wave (amplitude 5 V)",3]]
},
freeze: {
title: "Freeze Live Input",
value: false,
},
freq: {
title: "Input Wave Frequency",
value: 250,
range:[1,1000],
resolution:1,
units: "Hz"
},
gain: {
title: "Oscilloscope gain",
value: 1,
range:[0,5],
resolution:0.1,
},
// invert: {
// title: "Invert",
// value: false,
// },
dropdownExample: {
title: "Seconds / div",
value: 1,
values: [["50 µs", 0.05],["100 µs", 0.1],["200 µs", 0.2],["500 µs", 0.5],["1 ms", 1], ["2 ms", 2],["5 ms", 5]]
},
volts: {
title: "Volts / div",
value: 1,
values: [["1 V", 0.2],["2 V", 0.4],["5 V", 1],["10 V", 2]]
},
// dc_offset: {
// title: "Vertical Offset",
// value: 0,
// range:[-300,300],
// resolution:0.1,
// input: "hidden"
// },
horizOffset: {
title: "Horizontal Offset",
value: 0,
range:[-100,100],
resolution:1,
input: "hidden"
},
vertOffset: {
title: "Vertical Offset",
value: 0,
range:[-100,100],
resolution:1,
input: "hidden"
}
};
$(document).on("uiLoaded", function(){
if (navigator.getUserMedia = (navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia)){
navigator.getUserMedia( {audio:true}, gotStream,function(error) {
console.log("Capture error: ", error.code);
});
} else {
animate();
$(".preamble").append("<div class='alert'>To use Live Audio Input, please download the latest version of Chrome.</div>");
$("#inputType-interface option[value=1]").attr("disabled", true);
};
});
function getQueryVariable(variable) {
var query = window.location.search.substring(1);
var vars = query.split("&");
for (var i=0;i<vars.length;i++) {
var pair = vars[i].split("=");
if (pair[0] == variable) {
return pair[1];
}
}
}
demo = document.getElementById('demo');
c = document.createElement("canvas"); // for gridlines
c2 = document.createElement("canvas"); // for animated line
var w = window;
screenHeight = w.innerHeight;
screenWidth = w.innerWidth;
c.width = document.body.clientWidth*0.8;
//c.width = demo.clientWidth;
c.height = document.body.clientHeight;
c.height = c.width * 0.57;
c2.width = document.body.clientWidth*0.8;
//c2.width = demo.clientWidth;
c2.height = document.body.clientHeight;
c2.height = c.height;
$("#demo").height((c.height + 20));
c.style.backgroundColor = "#5db1a2";
demo.appendChild(c);
demo.appendChild(c2);
midPoint = {x: c.width/2, y: c.height/2};
ctx = c.getContext("2d");
ctx2 = c2.getContext("2d");
function createGrid(ctx){
ctx.beginPath();
ctx.moveTo(0, midPoint.y);
ctx.lineTo(c.width, midPoint.y);
ctx.moveTo(midPoint.x, 0);
ctx.lineTo(midPoint.x, c.height);
ctx.strokeStyle = "#196156";
ctx.lineWidth = '2';
ctx.globalCompositeOperation = 'source-over';
ctx.stroke();
ctx.closePath();
ctx.beginPath();
gridLineX = midPoint.x - 100;
ctx.lineWidth = '2';
while (gridLineX >= 0){
ctx.moveTo(gridLineX, 0);
ctx.lineTo(gridLineX, c.height);
gridLineX -= 100;
}
gridLineX = midPoint.x + 100;
while (gridLineX <= c.width){
ctx.moveTo(gridLineX, 0);
ctx.lineTo(gridLineX, c.height);
gridLineX += 100;
}
gridLineY = midPoint.y - 100;
while (gridLineY >= 0){
ctx.moveTo(0, gridLineY);
ctx.lineTo(c.width, gridLineY);
gridLineY -= 100;
}
gridLineY = midPoint.y + 100;
while (gridLineY <= c.height){
ctx.moveTo(0, gridLineY);
ctx.lineTo(c.width, gridLineY);
gridLineY += 100;
}
dashesX = midPoint.x - 20;
while (dashesX >= 0){
ctx.moveTo(dashesX, midPoint.y-5);
ctx.lineTo(dashesX, midPoint.y+5);
dashesX -= 20;
}
while (dashesX <= c.width){
ctx.moveTo(dashesX, midPoint.y-5);
ctx.lineTo(dashesX, midPoint.y+5);
dashesX += 20;
}
dashesY = midPoint.y - 20;
while (dashesY >= 0){
ctx.moveTo(midPoint.x-5, dashesY);
ctx.lineTo(midPoint.x+5, dashesY);
dashesY -= 20;
}
dashesY = midPoint.y + 20;
while (dashesY <= c.height){
ctx.moveTo(midPoint.x-5, dashesY);
ctx.lineTo(midPoint.x+5, dashesY);
dashesY += 20;
}
ctx.stroke();
}
createGrid(ctx);
var isRunning = false;
function update(el){
if (el == 'inputType' && ui.inputType.value == 1){
streaming = true;
animate();
animateId = window.requestAnimationFrame(animate);
} else if (el == 'inputType' && ui.inputType.value != 1){
//cancel animation
streaming = false;
window.cancelAnimationFrame(animateId);
drawData();
} else if (streaming == true && ui.freeze.value == false){
animate();
}
else {
drawData();
}
//only need to 'animate' one frame when not isRunning
// console.log(ui.freeze.value);
// if (streaming != true){
// if (isRunning == false){
// isRunning = true;
// streaming = true;
// animate();
// // drawData();
// } else {
// streaming = false;
// drawData();
// }
// }
// if (el.title == "Freeze"){
// if (ui.freeze.value){
// window.cancelAnimationFrame(animateId);
// } else {
// animate();
// // drawData();
// }
// }
// if (ui.freeze.value && isRunning){
// // isRunning = false;
// window.cancelAnimationFrame(animateId);
// } else if(ui.freeze.value && !isRunning) {
// } else if (!ui.freeze.value && !isRunning){
// isRunning = true;
// animate();
// }
}
var AudioContext = (window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.oAudioContext || window.msAudioContext);
if (AudioContext){
var audioContext = new AudioContext();
// var gainNode = audioContext.createGain() || audioContext.createGainNode();
var gainNode = audioContext.createGain();
var analyser = audioContext.createAnalyser();
//confusing, gain on oscilloscope, different for gain affecting input
// gainNode.gain.value = ui.gain.value;
gainNode.gain.value = 3;
analyser.smoothingTimeConstant = .9;
// analyser.fftSize = 512;
// analyser.fftSize = 1024;
// analyser.fftSize = 4096;
try {
analyser.fftSize = 4096;
} catch(e) {
analyser.fftSize = 2048;
}
gainNode.connect(analyser);
// frequencyBinCount is readonly and set to fftSize/2;
var timeDomain = new Uint8Array(analyser.frequencyBinCount);
var streaming = false;
var sampleRate = audioContext.sampleRate;
var numSamples = analyser.frequencyBinCount;
} else {
var analyser = {};
analyser.frequencyBinCount = 512;
}
function gotStream(stream) {
// Create an AudioNode from the stream.
window.mediaStreamSource = audioContext.createMediaStreamSource( stream );
//for testing
var osc = audioContext.createOscillator();
osc.frequency.value = 200;
osc.start(0);
// switch these lines
window.mediaStreamSource.connect(gainNode);
// osc.connect(gainNode);
streaming = true;
$('#inputType-interface select').val(1).change();
animate();
}
$(document).on("change", '#inputType-interface select', function(){
if ($(this).val() == 1){
streaming = true;
$('#freq-interface').attr('disabled', 'disabled').addClass("disabled");
} else {
streaming = false;
$('#freq-interface').removeAttr('disabled').removeClass("disabled");
}
})
var mapRange = function(from, to, s) {
return to[0] + (s - from[0]) * (to[1] - to[0]) / (from[1] - from[0]);
};
var animateId;
var previousTranslate = {x:0, y:0};
function animate(){
if (streaming == true && ui.freeze.value == false){
analyser.getByteTimeDomainData(timeDomain);
window.requestAnimationFrame(animate);
}
drawData();
// gainNode.gain.value = ui.gain.value;
}
function drawData(){
ctx2.translate(-previousTranslate.x, -previousTranslate.y);
ctx2.clearRect(0,0,c.width,c.height);
ctx2.translate(ui.horizOffset.value, ui.vertOffset.value);
ctx2.beginPath();
ctx2.strokeStyle = '#befde5';
ctx2.lineWidth = 1;
for (var i = -analyser.frequencyBinCount/2; i <= analyser.frequencyBinCount/2; i++) {
index = i+analyser.frequencyBinCount/2;
// console.log(index);
if (streaming == true){
var height = c.height * timeDomain[i] / 256;
var offset = c.width * (analyser.frequencyBinCount/(analyser.frequencyBinCount-1)) * i/analyser.frequencyBinCount;
// analyser.getByteTimeDomainData(timeDomain);
var xc = i * (c.width/analyser.frequencyBinCount);
var yc = ui.gain.value * ((timeDomain[index] / 255) - 0.5)*200/(ui.volts.value);
yc += c.height/2;
// apply dc offset
//yc = ui.dc_offset.value*-1 + yc;
xc = mapRange([0, 0.001*ui.dropdownExample.value], [0, 100 * (numSamples/sampleRate) / c.width], xc);
// shift graph to middle of oscilloscpe
xc += c.width/2;
ctx2.lineTo(xc, yc);
} else {
var xc = i * (c.width/analyser.frequencyBinCount);
//dropdown value also needs mapping
scaledRangeValue = mapRange([1,2], [1,3], ui.dropdownExample.value);
//Hardcoding 6 is wrong! Gives incorrect values on small screens
// var amplitude = c.height/6 / ui.volts.value;
var amplitude = 100 / ui.volts.value; //100 pixels per division
//so total length in seconds of graph is sampleRate*numSamples
//console.log("total length:", numSamples/sampleRate); //=0.0053333seconds, so 1 pixel represents 0.00533/width seconds
//by default 100px represents 100*0.00533/width = ??? seconds
//we want it to represent 1ms
var yc = -amplitude * Math.sin(2*Math.PI*xc*ui.freq.value*0.00001*ui.dropdownExample.value); //0.00001 is the number of seconds we want a pixel to represent, ie 1ms / 100
if (ui.inputType.value == 3){
if (yc > 0) yc = amplitude;
else yc = -amplitude;
}
//apply gain
yc *= ui.gain.value;
//center vertically
yc = c.height/2 + yc;
// apply dc offset
//yc = ui.dc_offset.value*-1 + yc;
// shift graph to middle of oscilloscpe
xc += c.width/2;
// if (ui.invert.value) yc = -yc + c.height;
ctx2.lineTo(xc, yc);
}
previousTranslate = {x:ui.horizOffset.value,y:ui.vertOffset.value}
}
ctx2.stroke();
ctx2.strokeStyle = 'rgba(174,244,218,0.3)';
ctx2.lineWidth = 3;
ctx2.stroke();
ctx2.strokeStyle = 'rgba(174,244,218,0.3)';
ctx2.lineWidth = 4;
ctx2.stroke();
}
animate(); |
var skrollr = require('skrollr'),
s = skrollr.init(),
setupResizeEvents = require('./resize-events'),
setupParallaxBackground = require('./background-parallax')
setupResizeEvents()
setupParallaxBackground()
|
<gh_stars>1-10
package org.slos.rating;
import java.util.Comparator;
public class PlacementRankComparator implements Comparator<PlacementRank> {
@Override
public int compare(PlacementRank o1, PlacementRank o2) {
if (o1.getRating().equals(o2.getRating())) {
return 0;
}
if (o1.getRating() < o2.getRating()) {
return 1;
}
else {
return -1;
}
}
} |
import nltk
# Tokenize the text strings
spanish_tokenizer = nltk.tokenize.WordPunctTokenizer()
spanish_tokens = spanish_tokenizer.tokenize(“Gracias por su ayuda”)
english_tokenizer = nltk.tokenize.TreebankWordTokenizer()
english_tokens = english_tokenizer.tokenize(“Thank you for your help”)
# Compute the Levenshtein distance of the two tokenized texts
def lev_dist(tokens1, tokens2):
dist = nltk.edit_distance(tokens1, tokens2)
return dist
lev_distance = lev_dist(spanish_tokens, english_tokens)
print('Levenshtein distance between the two texts:', lev_distance) |
# Run stress test on the batch system
# All run*.sh scripts in the $ALICE_ROOT test macro invoked
#
# Parameters:
# 1 - output prefix
# 2 - submit command
#
# Run example:
# $ALICE_ROOT/test/stressTest/stressTest.sh /d/alice12/miranov/streeTest/ "bsub -q proof"
#
outdir=$1/$ALICE_LEVEL/
submitcommand=$2
echo _____________________________________________________________
echo _____________________________________________________________
echo _____________________________________________________________
echo
echo outdir $outdir
echo subitcommand $submitcommand
mkdirhier $outdir
ls -al $outdir
echo
echo _____________________________________________________________
echo _____________________________________________________________
echo _____________________________________________________________
#
# Loop over all run*sh macros
#
cd $ALICE_ROOT
git status > $outdir/git.status
git diff > $outdir/git.diff
cd $outdir
for tmacro in `ls $ALICE_ROOT/test/*/run*.sh` ; do
dname=`dirname $tmacro`
sname=`basename $dname`
workdir=$outdir/$sname
echo $sname $tmacro;
mkdirhier $workdir
cp $dname/* $workdir/
cd $workdir
rm *.root
echo $submitcommand $tmacro
$submitcommand $tmacro
cd $outdir;
done;
|
package utils;
import play.Logger;
import play.mvc.Action.Simple;
import play.mvc.Http;
import play.mvc.Result;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
public class VerboseAction extends Simple {
@Override
public CompletionStage<Result> call(Http.Context ctx) {
Logger.info("Calling action for {}", ctx);
Logger.info(ctx.session().toString());
if (ctx.session().isEmpty()) {
return CompletableFuture.supplyAsync(() -> unauthorized());
}
else {
return delegate.call(ctx);
}
}
} |
def process_data(filename):
data = {'susyhit': [], 'prospino': []}
with open(filename, 'r') as file:
current_data = None
for line in file:
line = line.strip()
if line.startswith('# susyhit data'):
current_data = 'susyhit'
elif line.startswith('# prospino data'):
current_data = 'prospino'
elif line and current_data:
mass, cross_section = map(float, line.split(','))
data[current_data].append((mass, cross_section))
return data |
class InstanceManager:
def __init__(self):
self._state_info = {} # Dictionary to store state information for instances
self._instances_to_purge = set() # Set to store instances marked for purging
self._dirty = False # Flag to track if state information has been modified
def add_instance(self, instance_id):
self._state_info[instance_id] = True # Storing state information for the instance
def mark_for_purge(self, instance_id):
if instance_id in self._state_info:
self._instances_to_purge.add(instance_id) # Marking the instance for purging
self._dirty = True # Setting the dirty flag
def is_marked_for_purge(self, instance_id):
return instance_id in self._instances_to_purge # Checking if the instance is marked for purging
def remove_instance(self, instance_id):
if instance_id in self._state_info:
del self._state_info[instance_id] # Removing the instance from the state information
if instance_id in self._instances_to_purge:
self._instances_to_purge.remove(instance_id) # Unmarking the instance for purging
self._dirty = True # Setting the dirty flag
def is_dirty(self):
return self._dirty # Checking if the state information has been modified |
package main
import (
"fmt"
"os"
"os/signal"
"sync"
"github.com/docopt/docopt-go"
"github.com/mushorg/glutton"
)
var usage = `
Usage:
server -i <interface> [options]
server -h | --help
Options:
-i --interface=<iface> Bind to this interface [default: eth0].
-l --logpath=<path> Log file path [default: /dev/null].
-c --confpath=<path> Configuration file path [default: config/].
-d --debug=<boolean> Enable debug mode [default: false].
-h --help Show this screen.
`
func onErrorExit(err error) {
if err != nil {
fmt.Printf("[glutton ] %+v\n", err)
os.Exit(0)
}
}
func onInterruptSignal(fn func()) {
sig := make(chan os.Signal, 1)
signal.Notify(sig, os.Interrupt)
go func() {
<-sig
fn()
}()
}
func main() {
fmt.Println(`
_____ _ _ _
/ ____| | | | | |
| | __| |_ _| |_| |_ ___ _ __
| | |_ | | | | | __| __/ _ \| '_ \
| |__| | | |_| | |_| || (_) | | | |
\_____|_|\__,_|\__|\__\___/|_| |_|
`)
args, err := docopt.Parse(usage, os.Args[1:], true, "", true)
onErrorExit(err)
gtn, err := glutton.New(args)
onErrorExit(err)
err = gtn.Init()
onErrorExit(err)
exitMtx := sync.RWMutex{}
exit := func() {
// See if there was a panic...
fmt.Fprintln(os.Stderr, recover())
exitMtx.Lock()
println() // make it look nice after the ^C
fmt.Println("[glutton ] shutting down...")
onErrorExit(gtn.Shutdown())
}
defer exit()
onInterruptSignal(func() {
exit()
os.Exit(0)
})
onErrorExit(gtn.Start())
}
|
#!/bin/bash
set -e
os_major_version=$(cat /etc/redhat-release | tr -dc '0-9.'|cut -d \. -f1)
if ! rpm -q --quiet epel-release ; then
yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-$os_major_version.noarch.rpm
fi
echo "installing for os major version : $os_major_version"
yum install -y which gdb redhat-lsb-core expat-devel libcurl-devel tar unzip curl zlib-devel make libunwind icu aria2 rsync bzip2 git bzip2-devel
if [ "$os_major_version" == "7" ]; then
# install dotnet core dependencies
yum install -y lttng-ust openssl-libs krb5-libs libicu libuuid
# install dotnet runtimes
yum install -y https://packages.microsoft.com/config/centos/7/packages-microsoft-prod.rpm
yum install -y dotnet-sdk-2.1
fi
# Install Java
# Install automatic documentation generation dependencies
yum install -y java-11-openjdk-devel graphviz
|
package com.zyf.algorithm.sort;
import java.util.Arrays;
/**
* 查找无序数组中的第 K 大元素
*/
public class FindKth {
/**
* 求无序数组中的第 K 大元素
*/
private int findKthMaxNum(int[] a, int k) {
if (a.length <= 0 || k > a.length || k <= 0) {
return -1;
}
int low = 0;
int high = a.length - 1;
int index = partition(a, low, high);
while (index != k -1) {
if (k > index + 1) {
low = index + 1;
} else {
high = index - 1;
}
index = partition(a, low, high);
}
return a[index];
}
private int partition(int[] a, int low, int high) {
int tmp = a[low];
while (low < high) {
while (low < high && a[high] <= tmp) {
high--;
}
a[low] = a[high];
while (low < high && a[low] >= tmp) {
low++;
}
a[high] = a[low];
}
a[low] = tmp;
return low;
}
public static void main(String[] args) {
FindKth sort = new FindKth();
int[] a = {7, 1, 4, 3, 9, 6, 8, 5};
System.out.println(Arrays.toString(a));
int num = sort.findKthMaxNum(a, 4);
System.out.println(num);
}
}
|
import tensorflow as tf
from tensorflow.keras import datasets, layers, models
# Data
(X_train, y_train), (X_test, y_test) = datasets.mnist.load_data()
X_train = X_train.reshape(X_train.shape[0], 28, 28, 1).astype('float32')
X_test = X_test.reshape(X_test.shape[0], 28, 28, 1).astype('float32')
X_train /= 255
X_test /= 255
# Model
model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1)))
model.add(layers.MaxPool2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPool2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.Flatten())
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(10, activation='softmax'))
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(X_train, y_train, epochs=5)
test_loss, test_acc = model.evaluate(X_test, y_test)
print('Test accuracy:', test_acc) |
SELECT *
FROM Accounts
WHERE last_login_date < DATE_SUB(CURDATE(), INTERVAL 60 DAY) |
python3 -m cluster-middleware.master.main |
<gh_stars>10-100
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ratis.grpc.server;
import org.apache.ratis.grpc.GrpcUtil;
import org.apache.ratis.protocol.RaftPeerId;
import org.apache.ratis.server.RaftServer;
import org.apache.ratis.server.impl.ServerProtoUtils;
import org.apache.ratis.thirdparty.io.grpc.stub.StreamObserver;
import org.apache.ratis.proto.RaftProtos.*;
import org.apache.ratis.proto.grpc.RaftServerProtocolServiceGrpc.RaftServerProtocolServiceImplBase;
import org.apache.ratis.util.ProtoUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;
public class GrpcServerProtocolService extends RaftServerProtocolServiceImplBase {
public static final Logger LOG = LoggerFactory.getLogger(GrpcServerProtocolService.class);
private final Supplier<RaftPeerId> idSupplier;
private final RaftServer server;
public GrpcServerProtocolService(Supplier<RaftPeerId> idSupplier, RaftServer server) {
this.idSupplier = idSupplier;
this.server = server;
}
RaftPeerId getId() {
return idSupplier.get();
}
@Override
public void requestVote(RequestVoteRequestProto request,
StreamObserver<RequestVoteReplyProto> responseObserver) {
try {
final RequestVoteReplyProto reply = server.requestVote(request);
responseObserver.onNext(reply);
responseObserver.onCompleted();
} catch (Throwable e) {
GrpcUtil.warn(LOG, () -> getId() + ": Failed requestVote " + ProtoUtils.toString(request.getServerRequest()), e);
responseObserver.onError(GrpcUtil.wrapException(e));
}
}
@Override
public StreamObserver<AppendEntriesRequestProto> appendEntries(
StreamObserver<AppendEntriesReplyProto> responseObserver) {
return new StreamObserver<AppendEntriesRequestProto>() {
private final AtomicReference<CompletableFuture<Void>> previousOnNext =
new AtomicReference<>(CompletableFuture.completedFuture(null));
private final AtomicBoolean isClosed = new AtomicBoolean(false);
@Override
public void onNext(AppendEntriesRequestProto request) {
final CompletableFuture<Void> current = new CompletableFuture<>();
final CompletableFuture<Void> previous = previousOnNext.getAndSet(current);
try {
server.appendEntriesAsync(request).thenCombine(previous,
(reply, v) -> {
if (!isClosed.get()) {
if (LOG.isDebugEnabled()) {
LOG.debug(server.getId() + ": reply " + ServerProtoUtils.toString(reply));
}
responseObserver.onNext(reply);
}
current.complete(null);
return null;
});
} catch (Throwable e) {
GrpcUtil.warn(LOG, () -> getId() + ": Failed appendEntries " + ProtoUtils.toString(request.getServerRequest()), e);
responseObserver.onError(GrpcUtil.wrapException(e, request.getServerRequest().getCallId()));
current.completeExceptionally(e);
}
}
@Override
public void onError(Throwable t) {
// for now we just log a msg
GrpcUtil.warn(LOG, () -> getId() + ": appendEntries onError", t);
}
@Override
public void onCompleted() {
if (isClosed.compareAndSet(false, true)) {
LOG.info("{}: appendEntries completed", getId());
responseObserver.onCompleted();
}
}
};
}
@Override
public StreamObserver<InstallSnapshotRequestProto> installSnapshot(
StreamObserver<InstallSnapshotReplyProto> responseObserver) {
return new StreamObserver<InstallSnapshotRequestProto>() {
@Override
public void onNext(InstallSnapshotRequestProto request) {
try {
final InstallSnapshotReplyProto reply = server.installSnapshot(request);
responseObserver.onNext(reply);
} catch (Throwable e) {
GrpcUtil.warn(LOG, () -> getId() + ": Failed installSnapshot " + ProtoUtils.toString(request.getServerRequest()), e);
responseObserver.onError(GrpcUtil.wrapException(e));
}
}
@Override
public void onError(Throwable t) {
GrpcUtil.warn(LOG, () -> getId() + ": installSnapshot onError", t);
}
@Override
public void onCompleted() {
LOG.info("{}: installSnapshot completed", getId());
responseObserver.onCompleted();
}
};
}
}
|
<gh_stars>1-10
export default function fromString(value: any): any {
if (typeof value !== 'string') {
return value
}
if (value.match(/^[+-]?(?:\d*\.)?\d+$/)) {
return Number(value)
}
if (value === 'true') {
return true
}
if (value === 'false') {
return false
}
return value
}
|
const init = (name) => {
return `.${name} {
color: red;
}`;
};
module.exports = {
init,
};
|
package api
import "cf/net"
type FakeCurlRepository struct {
Method string
Path string
Header string
Body string
ResponseHeader string
ResponseBody string
ApiResponse net.ApiResponse
}
func (repo *FakeCurlRepository) Request(method, path, header, body string) (resHeaders, resBody string, apiResponse net.ApiResponse) {
repo.Method = method
repo.Path = path
repo.Header = header
repo.Body = body
resHeaders = repo.ResponseHeader
resBody = repo.ResponseBody
apiResponse = repo.ApiResponse
return
}
|
import './utils/disableLogs';
import { dispatch } from '@rematch/core';
import React from 'react';
import { View } from 'react-native';
import Assets from './Assets';
import AudioManager from './AudioManager';
import Settings from './constants/Settings';
import AchievementToastProvider from './ExpoParty/AchievementToastProvider';
import Fire from './ExpoParty/Fire';
import Navigation from './Navigation';
import Gate from './rematch/Gate';
import AssetUtils from './universal/AssetUtils'; // eslint-disable-line
import { AppLoading } from './universal/Expo'; // eslint-disable-line
import THREE from './universal/THREE';
console.ignoredYellowBox = Settings.ignoredYellowBox;
export default class App extends React.Component {
state = { loading: true };
get loadingScreen() {
if (Settings.debug) {
return <View />;
}
return <AppLoading />;
}
get screen() {
return (
<Gate>
<AchievementToastProvider>
<Navigation />
</AchievementToastProvider>
</Gate>
);
}
get fonts() {
const items = {};
const keys = Object.keys(Assets.fonts || {});
for (const key of keys) {
const item = Assets.fonts[key];
const name = key.substr(0, key.lastIndexOf('.'));
items[name] = item;
}
return [items];
}
get files() {
return AssetUtils.arrayFromObject(Assets.images);
}
componentWillMount() {
console.time('Startup');
THREE.suppressExpoWarnings();
this._setupExperienceAsync();
}
componentDidMount() {
Fire.shared.init();
dispatch.locale.getAsync();
}
componentWillUnmount() {
THREE.suppressExpoWarnings(false);
}
_setupExperienceAsync = async () => {
await Promise.all([this._preloadAsync()]);
await AudioManager.shared.setupAsync();
console.timeEnd('Startup');
this.setState({ loading: false });
};
async _preloadAsync() {
await AssetUtils.cacheAssetsAsync({
fonts: this.fonts,
files: this.files,
});
}
render() {
return this.state.loading ? this.loadingScreen : this.screen;
}
}
|
#!/bin/zsh
file=$2
new_tags=$3
for i in {1..${#string}}; do
x=$string[i]
if [[ "$x" == "$sep" ]]; then
echo $value
value=""
else
value+=$x
fi
done
echo $value
if [[ "$1" == "get" ]]; then
read_tags $file
echo $tags
exit 0
elif [[ "$1" == "set" ]]; then
write_tags $file $new_tags
exit 0
elif [[ "$1" == "add" ]]; then
read_tags $file
old_tags=$tags
for i in {1..${#tags}}; do
x=$string[i]
if [[ "$x" == "," ]]; then
if [[ "${tags/$value/}" == "$tags" ]]; then
tags+=$value
fi
value=""
else
value+=$x
fi
done
if [[ "${tags/$value/}" == "$tags" ]]; then
tags+=$value
fi
if [[ "$tags" != "$old_tags" ]]; then
write_tags $file $tags
fi
elif [[ "$1" == "del" ]]; then
read_tags $file
old_tags=$tags
for i in {1..${#tags}}; do
x=$string[i]
if [[ "$x" == "," ]]; then
tags=${tags/$value/}
value=""
else
value+=$x
fi
done
tags=${tags/$value/}
if [[ "$tags" != "$old_tags" ]]; then
write_tags $file $tags
fi
else
echo "Unrecognized command: $1"
exit 1
fi
read_tags() {
tags=$(ffprobe -show_entries format_tags=comment $1 2> /dev/null | sed -n 2p)
tags=${tags:12}
}
write_tags() {
src_dir=$(dirname $1)
src_file=$(filename $1)
# {{{ writes copy of source file with tags in /tmp or target directory itself according to space availability
filesize=$(stat -c %s $1)
freetmp=$(/bin/df --output=avail /tmp 2> /dev/null | tail -n 1)000
freedir=$(/bin/df --output=avail $1 2> /dev/null | tail -n 1)000
if [[ $filesize < $freetmp ]]; then
temp_dest="/tmp"
elif [[ $filesize < $freedir ]]; then
temp_dest=$src_dir
else
echo "Error: not enough free space in /tmp or $src_dir for file $1"
exit 99
fi
# }}}
ffmpeg -i $1 -metadata comment=$2 -codec copy $temp_dest/$src_file.wip && /bin/mv $temp_dest/$src_file.wip $1
}
|
#!/bin/sh
# This is a generated file; do not edit or check into version control.
export "FLUTTER_ROOT=D:\flutter"
export "FLUTTER_APPLICATION_PATH=D:\projects\fluttertraining"
export "FLUTTER_TARGET=lib\main.dart"
export "FLUTTER_BUILD_DIR=build"
export "SYMROOT=${SOURCE_ROOT}/../build\ios"
export "FLUTTER_FRAMEWORK_DIR=D:\flutter\bin\cache\artifacts\engine\ios"
export "FLUTTER_BUILD_NAME=1.0.0"
export "FLUTTER_BUILD_NUMBER=1"
|
<gh_stars>1-10
//******************************************************************************
// MSP430FR6989 Demo - ADC12B, Sample A3, 2.5V Shared Ref, TLV, CRC16
//
// Based in example "tlv_ex3_calibrateTempSensor" of "MSP430 DriverLib - TI"
// This example show how to get and use TLV data to increase accuracy of
// internal reference value (real value) and ADC data using it as reference
//
// -----------------
// /|\| XIN|-
// | | |
// --|RST XOUT|-
// | |
// >---|P1.3/A3 |
//
// Author: <NAME> - <EMAIL>
// 2017/08/01
//******************************************************************************
#include <msp430.h>
#include "driverlib.h"
#define SAMPLES 64
#define CRC_HW // calculate CRC using CRC16 HW of MSP430
#define CRC_SW // calculate CRC by software
/*
* TLV -ADC Variables
*/
struct s_TLV_ADC_Cal_Data *pADCCAL;
uint8_t bADCCAL_bytes;
uint16_t ADCGain;
int16_t ADCOffset;
uint16_t ADC_12V_Ref_30C, ADC_12V_Ref_85C, ADC_20V_Ref_30C;
uint16_t ADC_20V_Ref_85C, ADC_25V_Ref_30C, ADC_25V_Ref_85C;
/*
* TLV - REF Variables
*/
struct s_TLV_REF_Cal_Data *pREFCAL;
uint8_t bREFCAL_bytes;
uint16_t REF_12V, REF_20V, REF_25V;
/*
* other variables
*/
const float Vref_ideal = 2.5f;
float Vref_correct = 0;
float Vbit_ideal = 0;
float Vbit_correct = 0;
float Gain_correct = 0;
uint16_t ADC_raw = 0;
uint32_t ADC_average = 0;
float ADC_correct1 = 0;
float ADC_correct2 = 0;
float ADC_correct3 = 0;
float ADC_correct4 = 0;
float ADC_correct5 = 0;
uint16_t counter = 0;
/*
* Prototype of functions
*/
void readADCCAL(void);
void readREFCAL(void);
uint16_t crc16(const uint8_t* data_p, uint8_t length);
void main(void)
{
/* Stop WDT */
WDT_A_hold(WDT_A_BASE);
#ifdef CRC_HW
/*
* Calculate the CRC16 using MSP430 CRC16 HW
* Based in this post
* - https://e2e.ti.com/support/microcontrollers/msp430/f/166/p/52174/713993#713993
*/
uint16_t i;
uint8_t CRCRESULT_LSB, CRCRESULT_MSB;
CRCINIRES = 0xFFFF;
i = 0x01A04;
for (i = 0x01A04; i <= 0x01AFF; i++) {
CRCDIRB_L = *(unsigned char*)(i);
};
CRCRESULT_LSB = CRCINIRES_L;
CRCRESULT_MSB = CRCINIRES_H;
if (CRCRESULT_LSB != *(unsigned char*)(0x1A02))
while (1); //crc fail
if (CRCRESULT_MSB != *(unsigned char*)(0x1A03))
while (1); // crc fail
__no_operation();
#endif
#ifdef CRC_SW
/*
* calculate the CRC based in Software
*/
volatile uint8_t *ptr = (uint8_t *)0x01A04;
volatile uint16_t crc_value = 0;
crc_value = crc16(ptr, (0x01AFF - 0x01a04 + 1)); // 252 bytes
if ((crc_value & 0xFF) != *(unsigned char*)(0x1A02))
while (1); //crc fail
if (((crc_value >> 8) & 0xFF) != *(unsigned char*)(0x1A03))
while (1); // crc fail
__no_operation(); // add breakpoint to debug value
#endif
/*
* Read TLV specific values
*/
readREFCAL();
readADCCAL();
/* Calculate the real/correct Vref value */
Vref_correct = ((float)REF_25V * Vref_ideal) / (uint16_t)(1<<15);
__no_operation(); // add breakpoint to debug value
/*
* Select Port 1
* Set Pin 0 to output Ternary Module Function, (V_ref+).
* Set Pin 3 to output Ternary Module Function, (A3, C3).
*/
GPIO_setAsPeripheralModuleFunctionOutputPin(
GPIO_PORT_P1,
GPIO_PIN0 + GPIO_PIN3,
GPIO_TERNARY_MODULE_FUNCTION
);
/*
* Disable the GPIO power-on default high-impedance mode to activate
* previously configured port settings
*/
PMM_unlockLPM5();
/* If ref generator busy, WAIT */
while(Ref_A_isRefGenBusy(REF_A_BASE));
/* Select internal reference to 2.5V */
Ref_A_setReferenceVoltage(REF_A_BASE, REF_A_VREF2_5V);
/* Turn on Reference Voltage */
Ref_A_enableReferenceVoltage(REF_A_BASE);
// /* Enable Ref_out in P1.0 */
// Ref_A_enableReferenceVoltageOutput(REF_A_BASE);
/*
* Configure ADC12 module - Base address of ADC12B Module
*
* Use internal ADC12B bit as sample/hold signal to start conversion
* USE MODOSC 5MHZ Digital Oscillator as clock source
* Use default clock divider/pre-divider of 1
* Not use internal channel
*/
ADC12_B_initParam initParam = {0};
initParam.sampleHoldSignalSourceSelect = ADC12_B_SAMPLEHOLDSOURCE_SC;
initParam.clockSourceSelect = ADC12_B_CLOCKSOURCE_ADC12OSC;
initParam.clockSourceDivider = ADC12_B_CLOCKDIVIDER_1;
initParam.clockSourcePredivider = ADC12_B_CLOCKPREDIVIDER__1;
initParam.internalChannelMap = ADC12_B_NOINTCH;
ADC12_B_init(ADC12_B_BASE, &initParam);
/* Enable the ADC12B module */
ADC12_B_enable(ADC12_B_BASE);
/*
* Base address of ADC12B Module
* For memory buffers 0-7 sample/hold for 64 clock cycles
* For memory buffers 8-15 sample/hold for 4 clock cycles (default)
* Disable Multiple Sampling
*/
ADC12_B_setupSamplingTimer(ADC12_B_BASE,
ADC12_B_CYCLEHOLD_16_CYCLES,
ADC12_B_CYCLEHOLD_4_CYCLES,
ADC12_B_MULTIPLESAMPLESDISABLE);
/*
* Configure Memory Buffer
*
* Base address of the ADC12B Module
* Configure memory buffer 0
* Map input A3 to memory buffer 0
* Vref+ = IntBuffer
* Vref- = AVss
* Memory buffer 0 is not the end of a sequence
*/
ADC12_B_configureMemoryParam configureMemoryParam = {0};
configureMemoryParam.memoryBufferControlIndex = ADC12_B_MEMORY_0;
configureMemoryParam.inputSourceSelect = ADC12_B_INPUT_A3;
configureMemoryParam.refVoltageSourceSelect = ADC12_B_VREFPOS_INTBUF_VREFNEG_VSS;
configureMemoryParam.endOfSequence = ADC12_B_NOTENDOFSEQUENCE;
configureMemoryParam.windowComparatorSelect = ADC12_B_WINDOW_COMPARATOR_DISABLE;
configureMemoryParam.differentialModeSelect = ADC12_B_DIFFERENTIAL_MODE_DISABLE;
ADC12_B_configureMemory(ADC12_B_BASE, &configureMemoryParam);
ADC12_B_clearInterrupt(ADC12_B_BASE, 0, ADC12_B_IFG0);
/* Enable memory buffer 0 interrupt */
ADC12_B_enableInterrupt(ADC12_B_BASE, ADC12_B_IE0, 0, 0);
__delay_cycles(75); // reference settling ~75us
while(1)
{
/*
* Enable/Start sampling and conversion
*
* Base address of ADC12B Module
* Start the conversion into memory buffer 0
* Use the single-channel, single-conversion mode
*/
ADC12_B_startConversion(ADC12_B_BASE,
ADC12_B_MEMORY_0,
ADC12_B_SINGLECHANNEL);
__bis_SR_register(LPM0_bits + GIE); // LPM0, ADC10_ISR will force exit
__no_operation(); // For debug only
if (counter < SAMPLES)
{
ADC_average += (uint32_t)ADC_raw;
counter++;
}
else
{
ADC_average /= SAMPLES;
Vbit_ideal = Vref_ideal / 4096; // ideal adc volts/bit resolution
Vbit_correct = Vref_correct / 4096; // real adc volts/bit resolution
Gain_correct = (float)ADCGain / 32768; // gain factor correction
/* 1 - Wihtout any calibration */
ADC_correct1 = (float)ADC_average * Vbit_ideal;
/* 2 - Correcting Vref */
ADC_correct2 = (float)ADC_average * Vbit_correct;
/* 3 - Correcting Vref and ADC Gain */
ADC_correct3 = (float)ADC_average * Vbit_correct * Gain_correct;
/* 4 - Correcting Vref, ADC Gain and ADC offset */
ADC_correct4 = (float)ADC_average * Vbit_correct * Gain_correct + (ADCOffset * Vbit_correct);
/* 5 - Correcting Vref, ADC Gain and ADC offset - less computer intensive */
ADC_correct5 = (Vbit_ideal) * ((((((int32_t)(ADC_average << 1) * (int32_t)REF_25V) >> 16) * (int32_t)ADCGain) >> 15) + (int32_t)ADCOffset);
ADC_average = 0;
counter = 0;
__no_operation();
}
}
}
#if defined(__TI_COMPILER_VERSION__) || defined(__IAR_SYSTEMS_ICC__)
#pragma vector=ADC12_VECTOR
__interrupt
#elif defined(__GNUC__)
__attribute__((interrupt(ADC12_VECTOR)))
#endif
void ADC12_ISR(void)
{
switch(__even_in_range(ADC12IV,12))
{
case 0: break; // Vector 0: No interrupt
case 2: break; // Vector 2: ADC12BMEMx Overflow
case 4: break; // Vector 4: Conversion time overflow
case 6: break; // Vector 6: ADC12BHI
case 8: break; // Vector 8: ADC12BLO
case 10: break; // Vector 10: ADC12BIN
case 12: // Vector 12: ADC12BMEM0 Interrupt
ADC_raw = ADC12_B_getResults(ADC12_B_BASE, ADC12_B_MEMORY_0);
__bic_SR_register_on_exit(LPM0_bits); // Exit active CPU
break; // Clear CPUOFF bit from 0(SR)
case 14: break; // Vector 14: ADC12BMEM1
case 16: break; // Vector 16: ADC12BMEM2
case 18: break; // Vector 18: ADC12BMEM3
case 20: break; // Vector 20: ADC12BMEM4
case 22: break; // Vector 22: ADC12BMEM5
case 24: break; // Vector 24: ADC12BMEM6
case 26: break; // Vector 26: ADC12BMEM7
case 28: break; // Vector 28: ADC12BMEM8
case 30: break; // Vector 30: ADC12BMEM9
case 32: break; // Vector 32: ADC12BMEM10
case 34: break; // Vector 34: ADC12BMEM11
case 36: break; // Vector 36: ADC12BMEM12
case 38: break; // Vector 38: ADC12BMEM13
case 40: break; // Vector 40: ADC12BMEM14
case 42: break; // Vector 42: ADC12BMEM15
case 44: break; // Vector 44: ADC12BMEM16
case 46: break; // Vector 46: ADC12BMEM17
case 48: break; // Vector 48: ADC12BMEM18
case 50: break; // Vector 50: ADC12BMEM19
case 52: break; // Vector 52: ADC12BMEM20
case 54: break; // Vector 54: ADC12BMEM21
case 56: break; // Vector 56: ADC12BMEM22
case 58: break; // Vector 58: ADC12BMEM23
case 60: break; // Vector 60: ADC12BMEM24
case 62: break; // Vector 62: ADC12BMEM25
case 64: break; // Vector 64: ADC12BMEM26
case 66: break; // Vector 66: ADC12BMEM27
case 68: break; // Vector 68: ADC12BMEM28
case 70: break; // Vector 70: ADC12BMEM29
case 72: break; // Vector 72: ADC12BMEM30
case 74: break; // Vector 74: ADC12BMEM31
case 76: break; // Vector 76: ADC12BRDY
default: break;
}
}
/*******************************************************************************
* crc16 - return the crc16 CCITT of data
*
* *data_p: data pointer
* length: length of data (array) in bytes
*
* Based in this post: https://stackoverflow.com/a/23726131
******************************************************************************/
uint16_t crc16(const uint8_t* data_p, uint8_t length)
{
uint8_t x;
uint16_t crc = 0xFFFF;
while (length--){
x = crc >> 8 ^ *data_p++;
x ^= x>>4;
crc = (crc << 8) ^ ((uint16_t)(x << 12)) ^ ((uint16_t)(x <<5)) ^ ((uint16_t)x);
}
return crc;
}
/******************************************************************************
* This function will search the TLV block for the ADC CAL tag and store the
* address of the first data in the block in the reference pointer passed as an
* argument. The data is then saved to local variables.
******************************************************************************/
void readADCCAL(void)
{
/* Read ADC12 Calibration Values */
TLV_getInfo(TLV_TAG_ADCCAL,
0,
&bADCCAL_bytes,
(uint16_t **)&pADCCAL
);
ADCGain = pADCCAL->adc_gain_factor;
ADCOffset = pADCCAL->adc_offset;
ADC_12V_Ref_30C = pADCCAL->adc_ref15_30_temp;
ADC_12V_Ref_85C = pADCCAL->adc_ref15_85_temp;
ADC_20V_Ref_30C = pADCCAL->adc_ref20_30_temp;
ADC_20V_Ref_85C = pADCCAL->adc_ref20_85_temp;
ADC_25V_Ref_30C = pADCCAL->adc_ref25_30_temp;
ADC_25V_Ref_85C = pADCCAL->adc_ref25_85_temp;
}
/******************************************************************************
* This function will search the TLV block for the REF CAL tag and store the
* address of the first data in the block in the reference pointer passed as an
* argument. The data is then saved to local variables.
******************************************************************************/
void readREFCAL(void)
{
/* Read REF Calibration Values */
TLV_getInfo(TLV_TAG_REFCAL,
0,
&bREFCAL_bytes,
(uint16_t **)&pREFCAL
);
REF_12V = pREFCAL->ref_ref15;
REF_20V = pREFCAL->ref_ref20;
REF_25V = pREFCAL->ref_ref25;
}
|
#!/bin/sh
# Package
PACKAGE="nzbget"
DNAME="NZBGet"
# Others
INSTALL_DIR="/usr/local/${PACKAGE}"
PYTHON_DIR="/usr/local/python"
PATH="${INSTALL_DIR}/bin:/usr/local/bin:/bin:/usr/bin:/usr/syno/bin"
USER="nzbget"
NZBGET="${INSTALL_DIR}/bin/nzbget"
CFG_FILE="${INSTALL_DIR}/var/nzbget.conf"
PID_FILE="${INSTALL_DIR}/var/nzbget.pid"
LOG_FILE="${INSTALL_DIR}/var/nzbget.log"
start_daemon ()
{
su - ${USER} -c "PATH=${PATH} ${NZBGET} -c ${CFG_FILE} -D"
}
stop_daemon ()
{
kill `cat ${PID_FILE}`
wait_for_status 1 20 || kill -9 `cat ${PID_FILE}`
rm -f ${PID_FILE}
}
daemon_status ()
{
if [ -f ${PID_FILE} ] && kill -0 `cat ${PID_FILE}` > /dev/null 2>&1; then
return
fi
rm -f ${PID_FILE}
return 1
}
wait_for_status ()
{
counter=$2
while [ ${counter} -gt 0 ]; do
daemon_status
[ $? -eq $1 ] && return
let counter=counter-1
sleep 1
done
return 1
}
case $1 in
start)
if daemon_status; then
echo ${DNAME} is already running
exit 0
else
echo Starting ${DNAME} ...
start_daemon
exit $?
fi
;;
stop)
if daemon_status; then
echo Stopping ${DNAME} ...
stop_daemon
exit $?
else
echo ${DNAME} is not running
exit 0
fi
;;
status)
if daemon_status; then
echo ${DNAME} is running
exit 0
else
echo ${DNAME} is not running
exit 1
fi
;;
log)
echo ${LOG_FILE}
exit 0
;;
*)
exit 1
;;
esac
|
#!/bin/sh
# To use:
# - place this script to /usr/local/etc/rc.d/syz_ci
# - chmod a+x /usr/local/etc/rc.d/syz_ci
# - add the following to /etc/rc.conf (uncommented):
#
# syz_ci_enable="YES"
# syz_ci_chdir="/syzkaller"
# syz_ci_flags="-config config-freebsd.ci"
# syz_ci_log="/syzkaller/syz-ci.log"
# syz_ci_path="/syzkaller/syz-ci"
#
# Then syz-ci will start after boot, to manually start/stop:
# service syz_ci stop
# service syz_ci start
# PROVIDE: syz_ci
# REQUIRE: LOGIN
. /etc/rc.subr
command="${syz_ci_path}"
name="syz_ci"
pidfile="/var/run/${name}.pid"
rcvar="syz_ci_enable"
start_cmd="syz_ci_start"
stop_cmd="syz_ci_stop"
# syz-ci needs to be able to find the go executable.
PATH=${PATH}:/usr/local/bin
syz_ci_start()
{
cd "${syz_ci_chdir}"
daemon -f -o "${syz_ci_log}" -p ${pidfile} "${syz_ci_path}" ${syz_ci_flags}
}
syz_ci_stop()
{
local _pid
_pid=$(cat ${pidfile})
kill -INT $_pid
[ $? -eq 0 ] || return 1
pwait -t 120s $_pid
}
load_rc_config $name
run_rc_command "$1"
|
package com.zys.paylib.pay;
import android.app.Activity;
import com.zys.paylib.alipay.AlipayUtil;
public interface IPay {
void pay(Activity activity, String paynumbe, double price, AlipayUtil.AlipayCallBack callback);
}
|
parallel --jobs 6 < ./results/exp_disk_lustre/run-0/lustre_4n_6t_2d_1000f_617m_10i/jobs/jobs_n2.txt
|
CREATE OR REPLACE FUNCTION your_project_id.your_dataset.h3_num_hexagons(res NUMERIC)
RETURNS NUMERIC
LANGUAGE js AS
"""
return h3.numHexagons(res);
"""
OPTIONS (
library=['gs://file_path']
);
|
const express = require('express');
const path = require('path');
const app = express();
app.get('/', (req, res) => {
res.sendFile(path.join(__dirname, 'index.html'));
});
app.listen(8080, () => {
console.log('Serving www.example.com on port 8080');
}); |
from diem import LocalAccount
from offchain import CommandResponseObject, jws, CommandResponseStatus
def test_serialize_deserialize():
account = LocalAccount.generate()
response = CommandResponseObject(
status=CommandResponseStatus.success,
cid="3185027f05746f5526683a38fdb5de98",
)
ret = jws.serialize(response, account.private_key.sign)
resp = jws.deserialize(
ret,
CommandResponseObject,
account.private_key.public_key().verify,
)
assert resp == response
|
<reponame>dingxiaobo/rest-api-dispatcher
package cn.dxbtech.restapidispatcher;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.ResponseEntity;
import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.util.StringUtils;
import org.springframework.web.client.HttpClientErrorException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.util.*;
public class RestApiDispatcher extends HttpServlet {
private String tokenKey = "token";
private String apiPrefix;
private List<HostMapping> hostMappings;
private final Logger logger = LoggerFactory.getLogger(getClass().getSimpleName());
private final LocalRestTemplate template;
private Map<String, HostMapping> hostMapping;
public RestApiDispatcher() {
SimpleClientHttpRequestFactory requestFactory = new SimpleClientHttpRequestFactory();
requestFactory.setReadTimeout(5000);
template = new LocalRestTemplate(requestFactory);
}
public RestApiDispatcher(String tokenKey, String apiPrefix, HostMapping... hostMappings) {
this();
this.apiPrefix = apiPrefix;
this.tokenKey = tokenKey;
this.hostMappings = new LinkedList<HostMapping>();
this.hostMappings.addAll(Arrays.asList(hostMappings));
}
public RestApiDispatcher(String apiPrefix, HostMapping... hostMappings) {
this();
this.apiPrefix = apiPrefix;
this.hostMappings = new LinkedList<HostMapping>();
this.hostMappings.addAll(Arrays.asList(hostMappings));
}
@Override
public void init() throws ServletException {
if (hostMappings == null) {
//满足该条件 代表需要从web.xml中读取initparam
apiPrefix = getInitParameter("api-prefix");
String initParameterTokenKey = getInitParameter("token-key");
if (!StringUtils.isEmpty(initParameterTokenKey)) {
tokenKey = initParameterTokenKey;
}
try {
TypeReference<List<HostMapping>> typeRef = new TypeReference<List<HostMapping>>() {
};
JsonFactory jsonFactory = new JsonFactory();
jsonFactory.enable(JsonParser.Feature.ALLOW_COMMENTS);
ObjectMapper objectMapper = new ObjectMapper(jsonFactory);
try {
String hostsJson = getInitParameter("hosts");
if (StringUtils.isEmpty(hostsJson)) {
throw new JsonParseException(null, "Servlet init param [hosts] not found.");
}
hostMappings = objectMapper.readValue(hostsJson, typeRef);
} catch (JsonParseException e) {
//json解析失败 不符合json格式 从配置文件中读取json
ResourcePatternResolver resourcePatternResolver = new PathMatchingResourcePatternResolver();
String hostsJsonConfigFileLocation = getInitParameter("hosts-config-location");
if (StringUtils.isEmpty(hostsJsonConfigFileLocation)) {
//servlet init param中找不到hosts-config-location
throw new IOException("Read [hosts] and [hosts-config-location] from servlet init param failed. [hosts-config-location] not found.", e);
}
for (Resource hostsConfigLocation : resourcePatternResolver.getResources(hostsJsonConfigFileLocation)) {
InputStream in = hostsConfigLocation.getInputStream();
StringBuilder stringBuilder = new StringBuilder();
byte[] b = new byte[4096];
for (int n; (n = in.read(b)) != -1; ) {
stringBuilder.append(new String(b, 0, n));
}
List<HostMapping> hms = objectMapper.readValue(stringBuilder.toString(), typeRef);
if (hostMappings == null) hostMappings = new LinkedList<HostMapping>();
hostMappings.addAll(hms);
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
hostMapping = new LinkedHashMap<String, HostMapping>();
for (HostMapping mapping : hostMappings) {
if (hostMapping.get(mapping.getPrefix()) != null) {
throw new ServletException("Duplicated prefix[" + mapping.getPrefix() + "]: [" + mapping + "] has same prefix with [" + hostMapping.get(mapping.getPrefix()) + "] ");
}
hostMapping.put(mapping.getPrefix(), mapping);
logger.info("Api mapping{}: [/{}]\t{} -> {}", mapping.isDebug() ? "(local DEBUG)" : "", mapping.getName(), mapping.getPrefix(), mapping.getUrl());
}
}
private String getUrl(HttpServletRequest request) {
//从request中解析出前缀
int count = 0;
String prefix = null;
String requestURI = request.getRequestURI();
for (String s : requestURI.split("/")) {
if (!StringUtils.isEmpty(s)) {
count++;
if (count == 2) {
prefix = s;
break;
}
}
}
if (prefix == null) return null;
//根据前缀获取主机
HostMapping hostMapping = this.hostMapping.get(prefix);
if (hostMapping == null) {
return null;
}
if (hostMapping.isDebug()) {
//请求json
return requestURI;
}
String url = hostMapping.getUrl() + requestURI.substring((request.getContextPath() + apiPrefix + '/' + prefix).length());
if (!StringUtils.isEmpty(request.getQueryString())) {
url = url + "?" + request.getQueryString();
}
logger.debug("request uri: {} -> {}", requestURI, url);
return url;
}
@Override
protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
int statusCode;
String responseBody;
HttpHeaders responseHeaders;
//发起请求
try {
HttpEntity<String> httpEntity = new HttpEntity<String>(charReader(request), getHttpHeaders(request));
String url = getUrl(request);
if (StringUtils.isEmpty(url)) {
super.service(request, response);
return;
}
ResponseEntity<String> exchange = template.exchange(url, HttpMethod.resolve(request.getMethod()), httpEntity, String.class);
//正常请求
statusCode = exchange.getStatusCodeValue();
responseHeaders = exchange.getHeaders();
responseBody = exchange.getBody();
} catch (HttpClientErrorException e) {
//错误请求
statusCode = e.getRawStatusCode();
responseBody = e.getResponseBodyAsString();
responseHeaders = e.getResponseHeaders();
}
// headers
for (String key : responseHeaders.keySet()) {
StringBuilder value = new StringBuilder("");
for (String v : responseHeaders.get(key)) {
value.append(v).append(',');
}
//去除最后的分隔符
if (!StringUtils.isEmpty(value)) {
value.deleteCharAt(value.length() - 1);
}
String v = value.toString();
response.setHeader(key, v);
}
PrintWriter out = response.getWriter();
out.write(responseBody);
response.setStatus(statusCode);
out.flush();
}
private HttpHeaders getHttpHeaders(HttpServletRequest request) {
HttpHeaders headers = new HttpHeaders();
Enumeration<String> headerNames = request.getHeaderNames();
while (headerNames.hasMoreElements()) {
String headerName = headerNames.nextElement();
if ("cookie".equalsIgnoreCase(headerName)) continue;
StringBuilder value = new StringBuilder("");
Enumeration<String> requestHeaders = request.getHeaders(headerName);
while (requestHeaders.hasMoreElements()) {
value.append(requestHeaders.nextElement()).append(',');
}
//去除最后的分隔符
if (!StringUtils.isEmpty(value)) {
value.deleteCharAt(value.length() - 1);
}
headers.add(headerName, value.toString());
}
String token = (String) request.getSession().getAttribute(tokenKey);
headers.add(tokenKey, StringUtils.isEmpty(token) ? "<empty token>" : token);
return headers;
}
private String charReader(HttpServletRequest request) {
String str;
StringBuilder wholeStr = new StringBuilder();
BufferedReader br = null;
try {
br = request.getReader();
while ((str = br.readLine()) != null) {
wholeStr.append(str);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (br != null) {
br.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return wholeStr.toString();
}
}
|
#!/usr/bin/env bash
################################################################################
# Compute Engine
################################################################################
# Create Compute Engine virtual machine instance
# In:
# MY_GCP_GCE_NAME
# MY_GCP_ZONE
# MY_GCP_GCE_TYPE
# MY_GCP_SUBNET
# MY_GCP_SA_ID
# MY_GCP_GCE_IMAGE_PROJECT
# MY_GCP_GCE_IMAGE_FAMILY
# MY_GCP_GCE_DISK_BOOT_NAME
# MY_GCP_GCE_DISK_BOOT_TYPE
# MY_GCP_GCE_DISK_BOOT_SIZE
# MY_GCP_GCE_STARTUP_SCRIPT_URL
# MY_GCP_GCE_WINDOWS_STARTUP_SCRIPT_URL
# https://cloud.google.com/compute/docs/instances/startup-scripts
#
# Allow full access to all Cloud APIs
# Note that the level of access that a service account has is determined by a combination of access scopes and IAM roles
# so you must configure both access scopes and IAM roles for the service account to work properly.
#
# https://cloud.google.com/sdk/gcloud/reference/compute/instances/create#--create-disk
# name
# Specifies the name of the disk.
# device-name
# An optional name that indicates the disk name the guest operating system will see.
function create_vm() {
# Copy startup scripts to storage bucket
copy_startup_storage
# Create VM
echo_title "Create Compute Engine virtual machine instance '$MY_GCP_GCE_NAME'"
echo_wait
if ! gcloud compute instances create "$MY_GCP_GCE_NAME" \
--zone="$MY_GCP_ZONE" \
--machine-type="$MY_GCP_GCE_TYPE" \
--subnet="$MY_GCP_SUBNET" \
--no-address \
--maintenance-policy=MIGRATE \
--scopes="cloud-platform" \
--service-account="$MY_GCP_SA_ID" \
--create-disk="auto-delete=yes,boot=yes,name=$MY_GCP_GCE_DISK_BOOT_NAME,device-name=$MY_GCP_GCE_DISK_BOOT_NAME,image-family=$MY_GCP_GCE_IMAGE_FAMILY,image-project=$MY_GCP_GCE_IMAGE_PROJECT,mode=rw,size=$MY_GCP_GCE_DISK_BOOT_SIZE,type=$MY_GCP_GCE_DISK_BOOT_TYPE" \
--no-shielded-secure-boot \
--shielded-vtpm \
--shielded-integrity-monitoring \
--reservation-affinity=any \
--metadata=startup-script-url="$MY_GCP_GCE_STARTUP_SCRIPT_URL",windows-startup-script-url="$MY_GCP_GCE_WINDOWS_STARTUP_SCRIPT_URL" \
--description="script-name:$MY_SCRIPT_NAME" \
--labels="git-head=$MY_GIT_HEAD,config-image-family=$MY_GCP_GCE_IMAGE_FAMILY,config-image-project=$MY_GCP_GCE_IMAGE_PROJECT" \
--project="$MY_GCP_PROJECT"; then
echo_warning "Could not create VM instance"
export MY_WARNING=1
else
echo_info "VM instance is starting... Please wait a few minutes..."
if [[ "$MY_GCP_GCE_IMAGE_PROJECT" == 'windows'* ]]; then
echo "RDP into a Windows virtual machine instance:"
echo "1. Start an IAP TCP forwarding tunnel for RDP"
echo " $ gcloud compute start-iap-tunnel $MY_GCP_GCE_NAME 3389 --local-host-port=127.33.8.9:3389 --zone=$MY_GCP_ZONE --project=$MY_GCP_PROJECT"
echo "2. Connect to local 127.33.8.9 via RDP client"
echo " > mstsc /v:127.33.8.9:3389"
echo
echo "Connect to serial console:"
# Windows port 2:
# https://cloud.google.com/compute/docs/troubleshooting/troubleshooting-using-serial-console#setting_up_a_login_on_other_serial_ports
echo "$ gcloud compute connect-to-serial-port $MY_GCP_GCE_NAME --port 2 --zone=$MY_GCP_ZONE --project=$MY_GCP_PROJECT"
else
echo "SSH into a Linux virtual machine instance:"
echo "$ gcloud compute ssh $MY_GCP_GCE_NAME --tunnel-through-iap --zone=$MY_GCP_ZONE --project=$MY_GCP_PROJECT"
echo
# https://cloud.google.com/compute/docs/instances/startup-scripts/linux#viewing-output
echo "View output of a Linux startup script:"
echo "$ sudo journalctl -u google-startup-scripts.service"
echo
echo "Connect to serial console:"
echo "$ gcloud compute connect-to-serial-port $MY_GCP_GCE_NAME --zone=$MY_GCP_ZONE --project=$MY_GCP_PROJECT"
fi
fi
}
# Create GCE virtual machine instance with already created disk
# In:
# MY_GCP_GCE_NAME
# MY_GCP_ZONE
# MY_GCP_GCE_TYPE
# MY_GCP_SUBNET
# MY_GCP_SA_ID
# MY_GCP_GCE_DISK_BOOT_NAME
function create_disk_vm() {
# Create VM
echo_title "Create Compute Engine virtual machine instance '$MY_GCP_GCE_NAME' with boot disk '$MY_GCP_GCE_DISK_BOOT_NAME'"
echo_wait
if ! gcloud compute instances create "$MY_GCP_GCE_NAME" \
--zone="$MY_GCP_ZONE" \
--machine-type="$MY_GCP_GCE_TYPE" \
--subnet="$MY_GCP_SUBNET" \
--no-address \
--maintenance-policy=MIGRATE \
--scopes="cloud-platform" \
--service-account="$MY_GCP_SA_ID" \
--disk="auto-delete=yes,boot=yes,name=$MY_GCP_GCE_DISK_BOOT_NAME,device-name=$MY_GCP_GCE_DISK_BOOT_NAME,mode=rw" \
--no-shielded-secure-boot \
--shielded-vtpm \
--shielded-integrity-monitoring \
--reservation-affinity=any \
--description="script-name:$MY_SCRIPT_NAME" \
--labels="git-head=$MY_GIT_HEAD,config-image-family=$MY_GCP_GCE_IMAGE_FAMILY,config-image-project=$MY_GCP_GCE_IMAGE_PROJECT" \
--project="$MY_GCP_PROJECT"; then
echo_warning "Could not create VM instance"
export MY_WARNING=1
else
echo_info "VM instance is starting... Please wait a few minutes..."
fi
}
# Start Compute Engine virtual machine instance
function start_vm() {
echo_title "Start virtual machine instance '$MY_GCP_GCE_NAME' in zone '$MY_GCP_ZONE'"
if ! gcloud compute instances start "$MY_GCP_GCE_NAME" \
--zone="$MY_GCP_ZONE" \
--project="$MY_GCP_PROJECT"; then
echo_warning "Could not start VM instance"
export MY_WARNING=1
fi
}
# Stop Compute Engine virtual machine instance
function stop_vm() {
echo_title "Stop virtual machine instance '$MY_GCP_GCE_NAME' in zone '$MY_GCP_ZONE'"
if ! gcloud compute instances stop "$MY_GCP_GCE_NAME" \
--zone="$MY_GCP_ZONE" \
--project="$MY_GCP_PROJECT"; then
echo_warning "Could not stop VM instance"
export MY_WARNING=1
fi
}
# SSH into a Linux virtual machine instance
# In: MY_GCP_GCE_NAME, MY_GCP_ZONE
function ssh_vm() {
echo_title "SSH into Linux virtual machine instance '$MY_GCP_GCE_NAME' in zone '$MY_GCP_ZONE'"
if ! gcloud compute ssh "$MY_GCP_GCE_NAME" \
--tunnel-through-iap \
--zone="$MY_GCP_ZONE" \
--project="$MY_GCP_PROJECT"; then
echo_failure "Could not SSH into VM"
fi
}
# SSH into a Linux virtual machine instance and run command
# In: MY_GCP_GCE_NAME, MY_GCP_ZONE, MY_GCP_GCE_SSH_COMMAND
function ssh_command() {
echo_title "SSH into VM '$MY_GCP_GCE_NAME' in zone '$MY_GCP_ZONE' and run command"
echo_wait
if gcloud compute ssh "$MY_GCP_GCE_NAME" \
--tunnel-through-iap \
--zone="$MY_GCP_ZONE" \
--project="$MY_GCP_PROJECT" \
--command="$MY_GCP_GCE_SSH_COMMAND"; then
echo_success "Successfully run command"
else
echo_failure "Could not run command"
fi
}
# Delete Compute Engine virtual machine instance
function delete_vm() {
echo_title "Delete virtual machine instance '$MY_GCP_GCE_NAME' in zone '$MY_GCP_ZONE'"
if ! gcloud compute instances delete "$MY_GCP_GCE_NAME" \
--delete-disks=all \
--zone="$MY_GCP_ZONE" \
--project="$MY_GCP_PROJECT"; then
echo_warning "Could not delete VM instance"
export MY_WARNING=1
fi
}
# List all Compute Engine virtual machine instances
function list_vms() {
echo_title "Compute Engine virtual machine instances"
echo_web "https://console.cloud.google.com/compute/instances?project=$MY_GCP_PROJECT"
if ! gcloud compute instances list \
--format="table( name, zone.basename(), networkInterfaces[].networkIP, status )" \
--project="$MY_GCP_PROJECT"; then
echo_warning "Could not list VM instance"
export MY_WARNING=1
fi
}
# Enabling interactive serial console access for all VM instances that are part the project
# https://cloud.google.com/compute/docs/troubleshooting/troubleshooting-using-serial-console#gcloud
function enable_serial_console() {
echo_title "Enable serial console access for all VMs in project '$MY_GCP_PROJECT'"
if gcloud compute project-info add-metadata \
--metadata "serial-port-enable=TRUE" \
--project="$MY_GCP_PROJECT"; then
echo "Successfully enabled"
else
echo_warning "Could not enable serial console access"
export MY_WARNING=1
fi
}
# Check hostname for SAP or exit_with_failure
# In: MY_GCP_GCE_NAME
function check_sap_hostname() {
if [ ${#MY_GCP_GCE_NAME} -gt 13 ]; then
echo_title "Check hostname '$MY_GCP_GCE_NAME' for SAP"
echo "The length of the hostname you have chosen exceeds 13 character,"
echo "this is not supported by SAP, please use a different hostname."
exit_with_failure "Ilegal hostname for SAP"
fi
} |
def is_same_tree(tree1, tree2):
if tree1 is None and tree2 is None:
return True
elif tree1 is None or tree2 is None:
return False
else:
return tree1.data == tree2.data and\
is_same_tree(tree1.left, tree2.left) and\
is_same_tree(tree1.right, tree2.right) |
The maximum sum of a continuous subarray is 13, with the subarray being [5, 6, -3, -4, 7]. |
#!/usr/bin/env bash
function run_test_suite {
if [ "${TRAVIS_SCALA_VERSION}" == "${TARGET_SCALA_VERSION}" ] && [ "${TRAVIS_JDK_VERSION}" == "oraclejdk8" ];
then
echo "Running tests with coverage and report submission"
sbt ++$TRAVIS_SCALA_VERSION coverage test coverageReport coverageAggregate coveralls
test_exit_code=$?
if [ "${TRAVIS_PULL_REQUEST}" == "true" ];
then
if [ ${test_exit_code} -eq "0" ];
then
echo "Running tut compilation"
sbt ++$TRAVIS_SCALA_VERSION "project readme" "tut"
local tut_exit_code=$?
echo "Tut compilation exited with status $tut_exit_code"
exit ${tut_exit_code}
else
echo "Unable to run tut compilation, test suite failed"
exit ${test_exit_code}
fi
else
echo "Not a pull request, no need to run tut compilation as it runs as part of sbt release"
exit ${test_exit_code}
fi
else
echo "Running tests without attempting to submit coverage reports or run tut"
sbt "plz $TRAVIS_SCALA_VERSION test"
exit $?
fi
}
run_test_suite |
const {
useHttps
} = require('../middleware');
/**
* Enables https redirects and strict transport security.
*
* @param {Router} router
*/
module.exports = function(router) {
if (!process.env.FORCE_HTTPS) {
return;
}
const baseUrl = process.env.BASE_URL;
if (!baseUrl) {
throw new Error('must configure BASE_URL to force https');
}
if (!baseUrl.startsWith('https://')) {
throw new Error('BASE_URL must start with https');
}
router.use(useHttps(baseUrl));
}; |
SELECT customer_name FROM customers WHERE customer_name LIKE 'John%'; |
<gh_stars>1-10
import json
import os
import requests
from .constants import BASE_DIR
answer_key_ext = ".json"
def online_only(shift_code):
""" Downloads a prepared answer key from a repo"""
#//print("[D] Downloading latest Answer Key")
answer_key = requests.get('https://raw.githubusercontent.com/DevParapalli/JEE-Mains-AnswerKeys/main/' + shift_code + answer_key_ext)
answer_key.raise_for_status()
return answer_key.json()
# Removed logic for saving the file.
#shutil.copy(BASE_DIR /'temp'/'answer_key.json', BASE_DIR / 'answer_key_storage' / (shift_code +'.json'))
def create_answer_key_lookup_table():
""" Creates an answer_key from scratch. """
raise NotImplementedError
def offline_only(shift_code):
if os.path.exists(BASE_DIR / 'answer_key_storage' / (shift_code + answer_key_ext)):
#//print("[I] Selecting Answer Key")
return json.loads(open(BASE_DIR / 'answer_key_storage' / (shift_code + answer_key_ext)).read())
else: raise FileNotFoundError
def normal(shift_code):
try:
return offline_only(shift_code)
except FileNotFoundError:
#//print("[E] File Not Found, Downloading")
return online_only(shift_code)
|
import { LOAD_TEMP_USERS, LOAD_USERS, LOAD_USERS_SUCCESS } from './constants';
const defaultState = {
loading: false,
users: [],
};
export default function users(state = defaultState, action) {
switch(action.type) {
case LOAD_USERS:
return { ...state, loading: true };
case LOAD_TEMP_USERS:
return { ...state, loading: true };
case LOAD_USERS_SUCCESS:
return {
...state,
loading: false,
users: action.users,
};
default:
return state;
}
}
|
/* Copyright (c) 2009, <NAME>, Orbot / The Guardian Project - http://openideals.com/guardian */
/* See LICENSE for licensing information */
package com.msopentech.thali.android.toronionproxy.torinstaller;
public interface TorServiceConstants {
String TAG = "TorBinary";
//name of the tor C binary
String TOR_ASSET_KEY = "tor.so";
int FILE_WRITE_BUFFER_SIZE = 1024;
}
|
<gh_stars>0
#ifndef WIN32WINDOW
#define WIN32WINDOW
#ifndef UNICODE
#define UNICODE
#endif
#ifndef _UNICODE
#define _UNICODE
#endif
#include <windows.h>
#include "../contextSettings.hpp"
#include "../iEvent.hpp"
#include <SFML/Window/WindowHandle.hpp>
#include <SFML/System/String.hpp>
#include <SFML/System/Vector2.hpp>
#include <queue>
#include "../iWindow.hpp"
#include <SFML/Window/VideoMode.hpp>
#include "../../../../include/odfaeg/Window/iKeyboard.hpp"
#include "../../../../include/odfaeg/Window/iMouse.hpp"
namespace odfaeg {
namespace window {
class Win32Window : public IWindow {
public :
Win32Window();
////////////////////////////////////////////////////////////
/// \brief Construct the window implementation from an existing control
///
/// \param handle Platform-specific handle of the control
///
////////////////////////////////////////////////////////////
Win32Window(sf::WindowHandle handle, const ContextSettings& settings);
////////////////////////////////////////////////////////////
/// \brief Create the window implementation
///
/// \param mode Video mode to use
/// \param title Title of the window
/// \param style Window style
/// \param settings Additional settings for the underlying OpenGL context
///
////////////////////////////////////////////////////////////
Win32Window(sf::VideoMode mode, const sf::String& title, sf::Uint32 style, const ContextSettings& settings);
void create (sf::VideoMode mode, const sf::String& title, sf::Uint32 style, const ContextSettings& settings);
void create(sf::WindowHandle handle, const ContextSettings& settings);
////////////////////////////////////////////////////////////
/// \brief Destructor
///
////////////////////////////////////////////////////////////
~Win32Window();
////////////////////////////////////////////////////////////
/// \brief Get the OS-specific handle of the window
///
/// \return Handle of the window
///
////////////////////////////////////////////////////////////
virtual sf::WindowHandle getSystemHandle() const;
////////////////////////////////////////////////////////////
/// \brief Get the position of the window
///
/// \return Position of the window, in pixels
///
////////////////////////////////////////////////////////////
virtual sf::Vector2i getPosition() const;
////////////////////////////////////////////////////////////
/// \brief Change the position of the window on screen
///
/// \param position New position of the window, in pixels
///
////////////////////////////////////////////////////////////
virtual void setPosition(const sf::Vector2i& position);
////////////////////////////////////////////////////////////
/// \brief Get the client size of the window
///
/// \return Size of the window, in pixels
///
////////////////////////////////////////////////////////////
virtual sf::Vector2u getSize() const;
////////////////////////////////////////////////////////////
/// \brief Change the size of the rendering region of the window
///
/// \param size New size, in pixels
///
////////////////////////////////////////////////////////////
virtual void setSize(const sf::Vector2u& size);
////////////////////////////////////////////////////////////
/// \brief Change the title of the window
///
/// \param title New title
///
////////////////////////////////////////////////////////////
virtual void setTitle(const sf::String& title);
////////////////////////////////////////////////////////////
/// \brief Change the window's icon
///
/// \param width Icon's width, in pixels
/// \param height Icon's height, in pixels
/// \param pixels Pointer to the pixels in memory, format must be RGBA 32 bits
///
////////////////////////////////////////////////////////////
virtual void setIcon(unsigned int width, unsigned int height, const sf::Uint8* pixels);
////////////////////////////////////////////////////////////
/// \brief Show or hide the window
///
/// \param visible True to show, false to hide
///
////////////////////////////////////////////////////////////
virtual void setVisible(bool visible);
////////////////////////////////////////////////////////////
/// \brief Show or hide the mouse cursor
///
/// \param visible True to show, false to hide
///
////////////////////////////////////////////////////////////
virtual void setMouseCursorVisible(bool visible);
////////////////////////////////////////////////////////////
/// \brief Grab or release the mouse cursor
///
/// \param grabbed True to enable, false to disable
///
////////////////////////////////////////////////////////////
virtual void setMouseCursorGrabbed(bool grabbed);
////////////////////////////////////////////////////////////
/// \brief Set the displayed cursor to a native system cursor
///
/// \param cursor Native system cursor type to display
///
////////////////////////////////////////////////////////////
virtual void setMouseCursor(const sf::Cursor& cursor) {
}
////////////////////////////////////////////////////////////
/// \brief Enable or disable automatic key-repeat
///
/// \param enabled True to enable, false to disable
///
////////////////////////////////////////////////////////////
virtual void setKeyRepeatEnabled(bool enabled);
////////////////////////////////////////////////////////////
/// \brief Request the current window to be made the active
/// foreground window
///
////////////////////////////////////////////////////////////
virtual void requestFocus();
////////////////////////////////////////////////////////////
/// \brief Check whether the window has the input focus
///
/// \return True if window has focus, false otherwise
///
////////////////////////////////////////////////////////////
virtual bool hasFocus() const;
virtual bool isOpen() const;
virtual void close();
virtual void destroy();
virtual bool pollEvent(IEvent& event);
virtual bool waitEvent(IEvent& event) {
return false;
}
protected:
////////////////////////////////////////////////////////////
/// \brief Process incoming events from the operating system
///
////////////////////////////////////////////////////////////
virtual void processEvents();
bool popEvent (IEvent& event);
private:
void pushEvent(IEvent& event);
////////////////////////////////////////////////////////////
/// Register the window class
///
////////////////////////////////////////////////////////////
void registerWindowClass();
////////////////////////////////////////////////////////////
/// \brief Switch to fullscreen mode
///
/// \param mode Video mode to switch to
///
////////////////////////////////////////////////////////////
void switchToFullscreen(const sf::VideoMode& mode);
////////////////////////////////////////////////////////////
/// \brief Free all the graphical resources attached to the window
///
////////////////////////////////////////////////////////////
void cleanup();
////////////////////////////////////////////////////////////
/// \brief Process a Win32 event
///
/// \param message Message to process
/// \param wParam First parameter of the event
/// \param lParam Second parameter of the event
///
////////////////////////////////////////////////////////////
void processEvent(UINT message, WPARAM wParam, LPARAM lParam);
////////////////////////////////////////////////////////////
/// \brief Enables or disables tracking for the mouse cursor leaving the window
///
/// \param track True to enable, false to disable
///
////////////////////////////////////////////////////////////
void setTracking(bool track);
////////////////////////////////////////////////////////////
/// \brief Grab or release the mouse cursor
///
/// This is not to be confused with setMouseCursorGrabbed.
/// Here m_cursorGrabbed is not modified; it is used,
/// for example, to release the cursor when switching to
/// another application.
///
/// \param grabbed True to enable, false to disable
///
////////////////////////////////////////////////////////////
void grabCursor(bool grabbed);
////////////////////////////////////////////////////////////
/// \brief Convert a Win32 virtual key code to a SFML key code
///
/// \param key Virtual key code to convert
/// \param flags Additional flags
///
/// \return SFML key code corresponding to the key
///
////////////////////////////////////////////////////////////
static IKeyboard::Key virtualKeyCodeToODFAEG(WPARAM key, LPARAM flags);
////////////////////////////////////////////////////////////
/// \brief Function called whenever one of our windows receives a message
///
/// \param handle Win32 handle of the window
/// \param message Message received
/// \param wParam First parameter of the message
/// \param lParam Second parameter of the message
///
/// \return True to discard the event after it has been processed
///
////////////////////////////////////////////////////////////
static LRESULT CALLBACK globalOnEvent(HWND handle, UINT message, WPARAM wParam, LPARAM lParam);
////////////////////////////////////////////////////////////
// Member data
////////////////////////////////////////////////////////////
HWND m_handle; ///< Win32 handle of the window
LONG_PTR m_callback; ///< Stores the original event callback function of the control
bool m_cursorVisible; ///< Is the cursor visible or hidden?
HCURSOR m_lastCursor; ///< Last cursor used -- this data is not owned by the window and is required to be always valid
HICON m_icon; ///< Custom icon assigned to the window
bool m_keyRepeatEnabled; ///< Automatic key-repeat state for keydown events
sf::Vector2u m_lastSize; ///< The last handled size of the window
bool m_resizing; ///< Is the window being resized?
sf::Uint16 m_surrogate; ///< First half of the surrogate pair, in case we're receiving a Unicode character in two events
bool m_mouseInside; ///< Mouse is inside the window?
bool m_fullscreen; ///< Is the window fullscreen?
bool m_cursorGrabbed; ///< Is the mouse cursor trapped?
std::queue<IEvent> m_ievents;
bool m_opened, m_destroyed;
};
}
}
#endif // WIN32WINDOW
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.