text
stringlengths 1
1.05M
|
|---|
#!/usr/bin/env bash
set -e
scriptDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
source $scriptDir/utils.sh
scbk delete -f $scriptDir/../k8s/sample-repos.yaml
|
timestamp=`date +%Y%m%d%H%M%S`
rm Logs/*.log
#'MUTAG' 'ENZYMES'
for dataset in 'Pubmed' 'Cora'
do
for modelName in 'ChebConvNet'
do
for NumLayers in 1
do
for LR in 0.5
do
for BatchSize in 512
do
for StartTopoCoeffi in 0.1
do
for VectorPairs in 1
do
for WeightCorrectionCoeffi in 0.001 0.01 0.1 1
do
python3 ConvexPruning.py --dataset $dataset --modelName $modelName --BatchSize $BatchSize --NumLayers $NumLayers --VectorPairs $VectorPairs --StartTopoCoeffi $StartTopoCoeffi --ConCoeff 0.95 --num_pre_epochs 100 --num_epochs 200 --MonteSize 1 --LR $LR --WeightCorrectionCoeffi $WeightCorrectionCoeffi --PruningTimes 1 --resume False 2>&1 |tee Logs/${modelName}_${dataset}_${StartTopoCoeffi}-${WeightCorrectionCoeffi}_${VectorPairs}_$timestamp.log
done
done
done
done
done
done
done
done
|
<filename>py-question/lv1-03.py
# 问题:使用给定的整数n,编写一个程序生成一个包含(i, i*i)的字典,该字典包含1到n之间的整数(两者都包含)。然后程序应该打印字典。
# 假设向程序提供以下输入:8
# 则输出为:
# {1:1,2:4,3:9,4:16,5:25,6:36,,7:49,8:64}
# 提示:在为问题提供输入数据的情况下,应该假设它是控制台输入。考虑使用dict类型()
print ('please enter a number')
x = int (input())
d = dict()
for i in range(1,x+1):
d[i]=i*i
print (d)
|
#!/usr/bin/env bash
set -e
echo "*** Initializing Dapp Tools"
curl -L https://nixos.org/nix/install | sh
. ~/.nix-profile/etc/profile.d/nix.sh
curl https://dapp.tools/install | sh
nix-env -f https://github.com/dapphub/dapptools/archive/master.tar.gz -iA solc-static-versions.solc_0_8_9
|
import numpy as np
class InterventionGenerator:
def __init__(self, task_intervention_space):
self.task_intervention_space = task_intervention_space
def generate_interventions(self, variables):
interventions_dict = {}
for variable in variables:
if variable in self.task_intervention_space:
interventions_dict[variable] = np.random.uniform(
self.task_intervention_space[variable][0],
self.task_intervention_space[variable][1])
return interventions_dict
# Example usage
intervention_space = {'temperature': (20, 30), 'pressure': (50, 70), 'color': (0, 255)}
intervention_generator = InterventionGenerator(intervention_space)
interventions = intervention_generator.generate_interventions(['temperature', 'color'])
print(interventions)
|
#!/bin/bash
# Wifi SSID and strengh
STR="$(grep wlo1 /proc/net/wireless | awk '{print $4}' | sed 's/[^0-9]//g')"
STR=$((STR-30))
SSID="$(iw dev | grep ssid | awk '{print $2}')"
ISTATE="^c#8ec07c^"
[ "$STR" -lt 35 ] && ICON=" " && ISTATE="^c#8ec07c^"
[ "$STR" -ge 35 ] && ICON="說" && ISTATE="^c#fabd2f^"
[ "$STR" -ge 55 ] && ICON="罹 " && ISTATE="^c#fe8019^"
curl www.google.com.br &>/dev/null || ICON=" " ISTATE="^c#fe8019^"
[ -z $SSID ] && SSID="off" && ICON="ﲁ " && ISTATE="^c#fb4934^"
echo " ${ISTATE}${SSID} ${ICON}^d^ "
|
#!/bin/bash
set -e
set -o pipefail
trap 'kill 0' SIGTERM
KUBECTL="${KUBECTL:-kubectl}"
KIND="${KIND:-kind}"
if [ ! $(command -v "$KIND") ]; then
echo "Cannot find or execute KIND binary $KIND, you can override it by setting the KIND env variable"
exit 1
fi
if [ ! $(command -v "$KUBECTL") ]; then
echo "Cannot find or execute Kubectl binary $KUBECTL, you can override it by setting the KUBECTL env variable"
exit 1
fi
setup() {
mkdir -p tmp/bin
echo "-------------------------------------------"
echo "- Downloading ORY Hydra... -"
echo "-------------------------------------------"
curl -L "https://github.com/ory/hydra/releases/download/v1.9.1/hydra_1.9.1-sqlite_linux_64bit.tar.gz" | tar -xzf - -C tmp/bin hydra
echo "-------------------------------------------"
echo "- Cloning observatorium/token-refresher and building... -"
echo "-------------------------------------------"
git clone https://github.com/observatorium/token-refresher tmp/token-refresher
cd tmp/token-refresher
make build
mv ./token-refresher ../bin/
cd -
echo "-------------------------------------------"
echo "- Downloading Prometheus... -"
echo "-------------------------------------------"
curl -L "https://github.com/prometheus/prometheus/releases/download/v2.24.1/prometheus-2.24.1.linux-amd64.tar.gz" | tar -xzf - -C tmp prometheus-2.24.1.linux-amd64/prometheus
mv ./tmp/prometheus-2.24.1.linux-amd64/prometheus ./tmp/bin/
echo "-------------------------------------------"
echo "- Pulling docker image for Grafana... -"
echo "-------------------------------------------"
docker pull grafana/grafana:7.3.7
echo "-------------------------------------------"
echo "- Creating KIND cluster... -"
echo "-------------------------------------------"
$KIND create cluster
}
deploy() {
# Hydra
(DSN=memory ./tmp/bin/hydra serve all --dangerous-force-http --config ./configs/hydra.yaml &> /dev/null) &
echo "-------------------------------------------"
echo "- Waiting for Hydra to come up... -"
echo "-------------------------------------------"
until curl --output /dev/null --silent --fail --insecure http://127.0.0.1:4444/.well-known/openid-configuration; do
printf '.'
sleep 1
done
echo ""
curl \
--output /dev/null --silent \
--header "Content-Type: application/json" \
--request POST \
--data '{"audience": ["observatorium"], "client_id": "user", "client_secret": "secret", "grant_types": ["client_credentials"], "token_endpoint_auth_method": "client_secret_basic"}' \
http://127.0.0.1:4445/clients
# MinIO
echo "-------------------------------------------"
echo "- Deploying MinIO... -"
echo "-------------------------------------------"
$KUBECTL create namespace observatorium-minio --dry-run=client -o yaml | $KUBECTL apply -f -
$KUBECTL apply -f ./manifests/minio-pvc.yaml
$KUBECTL apply -f ./manifests/minio-deployment.yaml
$KUBECTL apply -f ./manifests/minio-service.yaml
echo "-------------------------------------------"
echo "- Waiting for MinIO to come up... -"
echo "-------------------------------------------"
$KUBECTL wait --for=condition=available --timeout=5m -n observatorium-minio deploy/minio
# Observatorium
echo "-------------------------------------------"
echo "- Deploying Observatorium... -"
echo "-------------------------------------------"
$KUBECTL create namespace observatorium --dry-run=client -o yaml | $KUBECTL apply -f -
$KUBECTL apply -f ./manifests/
echo "-------------------------------------------"
echo "- Waiting for Observatorium to come up... -"
echo "-------------------------------------------"
$KUBECTL wait --for=condition=available --timeout=5m -n observatorium deploy/observatorium-xyz-thanos-query-frontend
$KUBECTL wait --for=condition=available --timeout=5m -n observatorium deploy/observatorium-xyz-observatorium-api
($KUBECTL port-forward -n observatorium svc/observatorium-xyz-observatorium-api 8443:8080 &> /dev/null) &
# Token Refresher
echo "-------------------------------------------"
echo "- Starting Token Refresher proxy... -"
echo "-------------------------------------------"
(./tmp/bin/token-refresher \
--oidc.issuer-url=http://172.17.0.1:4444/ \
--oidc.client-id=user \
--oidc.client-secret=secret \
--oidc.audience=observatorium \
--url=http://127.0.0.1:8443 &> /dev/null) &
sleep 1
# Prometheus
echo "-------------------------------------------"
echo "- Starting Prometheus... -"
echo "-------------------------------------------"
(./tmp/bin/prometheus --config.file=./configs/prom.yaml --storage.tsdb.path=tmp/data/ &> /dev/null) &
# Grafana
echo "-------------------------------------------"
echo "- Starting Grafana using docker... -"
echo "-------------------------------------------"
mkdir -p tmp/grafana
(docker run -p 3000:3000 --user $(id -u) --volume "$PWD/tmp/grafana:/var/lib/grafana" grafana/grafana:7.3.7 &> /dev/null) &
echo "Open http://localhost:3000 in your browser. Add Prometheus datasource with endpoint http://172.17.0.1:8080/api/metrics/v1/test-oidc."
}
case $1 in
setup)
setup
;;
deploy)
deploy
;;
help)
echo "usage: $(basename "$0") { setup | deploy }"
;;
*)
setup
deploy
;;
esac
wait
|
<reponame>darccyy/trustworthytimes
import React, { Component } from "react";
import "../scss/PostText.scss";
// Add classes for unloaded / broken images
import loadImages from "../functions/loadImages";
// Copy text to clipboard
import copyText from "../functions/copyText";
// Specific article in /news/*
export default class PostText extends Component {
async componentDidMount() {
loadImages();
}
copyDescription() {
var text = (document.querySelector(".description")?.innerText || "")
.split("\n")
.join("\n\n");
console.log(text);
copyText(text);
}
copyAlt() {
var text = document.querySelector(".alt-text")?.innerText || "";
console.log(text);
copyText(text);
}
render() {
const article = this.props.article || { skeleton: true };
return (
<div className={"PostText" + (article.skeleton ? " skeleton" : "")}>
<div className="text-contain">
<div className="description" onClick={this.copyDescription}>
{/* Big headline */}
<h1 className="text headline">
<span>{article.headline || "Important News Headline"}</span>
</h1>
{/* Subtitle */}
<h2 className="text subtitle">
<span>{article.subtitle || "Subtitle of the Article"}</span>
</h2>
<h3 className="text read">
<span>
Read more at https://trustworthytimes.herokuapp.com
{article.id ? "/news/" + article.id : ""}
</span>
</h3>
{/* Hashtags */}
<h4 className="text hashtags">
{[
"trustworthytimes",
"news",
"satire",
article.labs ? "trustworthylabs" : "",
].map((hashtag, index) => {
if (!hashtag) {
return;
}
return (
<span key={index}>
{index ? " " : ""}#{hashtag}
</span>
);
})}
</h4>
</div>
<h4 className="alt-text" onClick={this.copyAlt}>
{article.alt || "Alt text"}
</h4>
</div>
</div>
);
}
}
|
#!/usr/bin/env bash
UNAME=`uname`
DATE=`date`
DATEREGEX="^(...) (...) (.?.) ((..):(..):(..)) (...) (....)"
AWKCMD=awk
SEDCMD=sed
# FreeBSD uses modified names for GNU versions of awk/sed
if [ "$UNAME" = "FreeBSD" ];
then
AWKCMD=gawk
SEDCMD=gsed
fi
THRESHOLD=15
SORT=1
TIMERANGE="*"
SUMMARYONLY=0
OUTDELIM=","
YEAR=`echo $DATE | $SEDCMD -r "s/$DATEREGEX/\9/"`
FILE=/dev/stdin
while getopts "d:hnr:st:y:" OPTION
do
case $OPTION in
d) THRESHOLD="$OPTARG"
;;
n) SORT=0
;;
r) TIMERANGE="$OPTARG"
;;
s) SUMMARYONLY=1
;;
t) OUTDELIM="$OPTARG"
;;
y) YEAR="$OPTARG"
;;
*h) echo "usage: fwhistory.sh [-dnsty] [file]"
echo " d : threshold value (min.)"
echo " n : do not sort incoming data (can speed processing large, previously sorted data sets)"
echo " r : time range xxxx-yyyy 24hr (defaults to all times)"
echo " s : display summary only"
echo " t : field separator (defaults to a comma)"
echo " y : firewall data year (defaults to current year)"
echo " file : file to process (defaults to stdin)"
exit 1
;;
esac
done
shift $(($OPTIND - 1))
if [ -n "$1" ]
then
FILE="$1"
fi
DELIM="\|"
LOGFILENAME="(.+):"
DATETIME="(...) (..) (..):(..):(..)\...."
NEWDATETIME="(...$DELIM..$DELIM..$DELIM..$DELIM..)"
SOURCEIP=" src=([^\/]+)\/([^ ]+) "
DESTIP=" dst=([^\/]+)\/([^ ]+) "
URL=" arg=([^ ]+) "
#DNAMEREGEX=`cat ~/Scripts/helpers/domainName.regex`
DNAMEREGEX="\b(\w((\w|-){0,61}\w)?\.)*\w((\w|-){0,61}\w)?\b"
INTERNETREGEX="^($LOGFILENAME)?$DATETIME.*$SOURCEIP.*$DESTIP.*$URL.*$"
NEWINTERNETREGEX="^$NEWDATETIME.*$SOURCEIP.*$DESTIP.*$URL.*$"
SED="s/^($LOGFILENAME)?$DATETIME/\3$DELIM\4$DELIM\5$DELIM\6$DELIM\7/;\
s/$NEWINTERNETREGEX/\1$DELIM\2$DELIM\3$DELIM\4$DELIM\5$DELIM\6/;\
s/((http|https|file|ftp):\/\/\/?($DNAMEREGEX))/\3$DELIM\1/;\
s/$DELIM/$OUTDELIM/g"
SORTARGS="-t $OUTDELIM -k 1,2M -k 2,3n -k 3,4n -k 4,5n -k 5,6n"
AWKARGS="--field-separator "$OUTDELIM" --file `dirname $0`/helpers/fwhistory.awk --assign year=$YEAR --assign threshold=$THRESHOLD --assign delim=$OUTDELIM --assign summaryOnly=$SUMMARYONLY --assign timeRange=$TIMERANGE"
if [ $SORT = 1 ]
then
cat "$FILE" | egrep "$INTERNETREGEX" | $SEDCMD -r "$SED" | sort $SORTARGS | $AWKCMD $AWKARGS
else
cat "$FILE" | egrep "$INTERNETREGEX" | $SEDCMD -r "$SED" | $AWKCMD $AWKARGS
fi
|
<reponame>YJieZhang/Tengine
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* License); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright (c) 2017, Open AI Lab
* Author: <EMAIL>
*/
#ifndef __POOLING_KERNEL_H__
#define __POOLING_KERNEL_H__
#include<arm_neon.h>
/**
* MaxPool_2x2: pooling for ksize=2x2,stride=2, pad=0(default pad=0)
* @param[in] input input data (const float pointer)
* @param[in] output output data (float pointer)
* @param[in] inc input channel (int)
* @param[in] inh input height (int)
* @param[in] inw input width (int)
* @param[in] outh output height (int)
* @param[in] outw output width (int)
* @param[in] inc input channel (int)
* @param[in] htail htail=(inh-ksize_h)%stride_h (int)
* @param[in] wtail wtail=(inw-ksize_w)%stride_w (int)
* @return None
*/
static void MaxPool_2x2s2(const float*input,float* output,
int inc,int inh,int inw,
int outh,int outw,
int htail,int wtail)
{
int in_hw=inw*inh;
int out_hw=outh*outw;
int block_w=outw >>2;
int tail_w=outw & ~3;
int wtail_resi=inw+1;
if(wtail)
{
if (outw%4==0)
{
block_w-=1;
tail_w-=4;
}
outw-=1;
}
if(htail)
{
outh-=1;
}
for(int c=0;c<inc;c++)
{
const float* line0=input +c*in_hw;
const float* line1=line0 + inw;
float* out_ptr=output + c*out_hw;
for(int i=0;i<outh;i++)
{
for(int j=0;j<block_w;j++)
{
float32x4_t p00=vld1q_f32(line0);
float32x4_t p10=vld1q_f32(line1);
float32x4_t max0=vmaxq_f32(p00,p10);
float32x4_t p01=vld1q_f32(line0+4);
float32x4_t p11=vld1q_f32(line1+4);
float32x4_t max1=vmaxq_f32(p01,p11);
/* pairwaise max */
float32x4_t _max=vpmaxq_f32(max0,max1);
vst1q_f32(out_ptr,_max);
line0+=8;
line1+=8;
out_ptr+=4;
}
for(int j=tail_w;j<outw;j++)
{
float32x2_t p1=vld1_f32(line0);
float32x2_t p2=vld1_f32(line1);
float32x2_t _max=vmax_f32(p1,p2);
*out_ptr=std::max(_max[0],_max[1]);
out_ptr++;
line0+=2;
line1+=2;
}
if(wtail)
{
*out_ptr =std::max (line0[0],line1[0]);
out_ptr++;
line0+=wtail_resi;
line1+=wtail_resi;
}
else
{
line0+=inw;
line1+=inw;
}
}
if(htail)
{
for(int j=0;j<block_w;j++)
{
float32x4_t p00=vld1q_f32(line0);
float32x4_t p01=vld1q_f32(line0+4);
p00=vpmaxq_f32(p00,p01);
vst1q_f32(out_ptr,p00);
line0+=8;
out_ptr+=4;
}
for(int j=tail_w;j<outw;j++)
{
float32x2_t p1=vld1_f32(line0);
*out_ptr=std::max(p1[0],p1[1]);
out_ptr++;
line0+=2;
}
if(wtail)
{
*out_ptr =line0[0];
out_ptr++;
}
}
}
}
static void AvgPool_2x2s2(const float*input,float* output,
int inc,int inh,int inw,
int outh,int outw,
int htail,int wtail)
{
int in_hw=inw*inh;
int out_hw=outh*outw;
int block_w=outw >>2;
int tail_w=outw & ~3;
int wtail_resi=inw+1;
if(wtail)
{
if (outw%4==0)
{
block_w-=1;
tail_w-=4;
}
outw-=1;
}
if(htail)
{
outh-=1;
}
for(int c=0;c<inc;c++)
{
const float* line0=input +c*in_hw;
const float* line1=line0 + inw;
float* out_ptr=output + c*out_hw;
for(int i=0;i<outh;i++)
{
for(int j=0;j<block_w;j++)
{
float32x4_t p00=vld1q_f32(line0);
float32x4_t p10=vld1q_f32(line1);
float32x4_t sum0=vaddq_f32(p00,p10);
float32x4_t p01=vld1q_f32(line0+4);
float32x4_t p11=vld1q_f32(line1+4);
float32x4_t sum1=vaddq_f32(p01,p11);
sum0=vpaddq_f32(sum0,sum1);
sum0=vmulq_n_f32(sum0, 0.25f);
vst1q_f32(out_ptr,sum0);
line0+=8;
line1+=8;
out_ptr+=4;
}
for(int j=tail_w;j<outw;j++)
{
float32x2_t p1=vld1_f32(line0);
float32x2_t p2=vld1_f32(line1);
float32x2_t sum=vadd_f32(p1,p2);
*out_ptr=(sum[0]+sum[1])*0.25;
out_ptr++;
line0+=2;
line1+=2;
}
if(wtail)
{
*out_ptr = (line0[0] +line1[0])*0.5f;
out_ptr++;
line0+=wtail_resi;
line1+=wtail_resi;
}
else
{
line0+=inw;
line1+=inw;
}
}
if(htail)
{
for(int j=0;j<block_w;j++)
{
float32x4_t p00=vld1q_f32(line0);
float32x4_t p01=vld1q_f32(line0+4);
p00=vpaddq_f32(p00,p01);
p00=vmulq_n_f32(p00, 0.5f);
vst1q_f32(out_ptr,p00);
line0+=8;
out_ptr+=4;
}
for(int j=tail_w;j<outw;j++)
{
float32x2_t p1=vld1_f32(line0);
*out_ptr=(p1[0]+p1[1])*0.5;
out_ptr++;
line0+=2;
}
if(wtail)
{
*out_ptr = line0[0];
out_ptr++;
}
}
}
}
static void MaxPool_3x3s2(const float*input,float* output,
int inc,int inh,int inw,
int outh,int outw,
int htail,int wtail)
{
int in_hw=inw*inh;
int out_hw=outh*outw;
int block_w=outw >>2;
int tail_w=outw & ~3;
int wtail_resi=inw+2;
int w_resi=inw+1;
if(wtail)
{
if (outw%4==0)
{
block_w-=1;
tail_w-=4;
}
outw-=1;
}
if(htail)
{
outh-=1;
}
for(int c=0;c<inc;c++)
{
const float* line0=input +c*in_hw;
const float* line1=line0 + inw;
const float* line2=line1 + inw;
float* out_ptr=output + c*out_hw;
for(int i=0;i<outh;i++)
{
float32x4x2_t p00=vld2q_f32(line0);
float32x4x2_t p10=vld2q_f32(line1);
float32x4x2_t p20=vld2q_f32(line2);
for(int j=0;j<block_w;j++)
{
/*
p00 = [1,2,3,4,5,6,7,8]
p00.val[0]=[1,3,5,7]
max0 = [2,4,6,8]
p00_new = [9,10,11,12,13,14,15,16]
p01 = [3,5,7,9]
max0=max(max0,p01)=[3,5,7,9]
*/
float32x4x2_t p00_new=vld2q_f32(line0+8);
float32x4_t max0=vmaxq_f32(p00.val[0],p00.val[1]);
float32x4_t p01=vextq_f32(p00.val[0],p00_new.val[0],1);
max0=vmaxq_f32(max0,p01);
float32x4x2_t p10_new=vld2q_f32(line1+8);
float32x4_t max1=vmaxq_f32(p10.val[0],p10.val[1]);
float32x4_t p11=vextq_f32(p10.val[0],p10_new.val[0],1);
max1=vmaxq_f32(max1,p11);
float32x4x2_t p20_new=vld2q_f32(line2+8);
float32x4_t max2=vmaxq_f32(p20.val[0],p20.val[1]);
float32x4_t p21=vextq_f32(p20.val[0],p20_new.val[0],1);
max2=vmaxq_f32(max2,p21);
max0 = vmaxq_f32(vmaxq_f32(max0, max1), max2);
vst1q_f32(out_ptr,max0);
p00=p00_new;
p10=p10_new;
p20=p20_new;
line0+=8;
line1+=8;
line2+=8;
out_ptr+=4;
}
for(int j=tail_w;j<outw;j++)
{
float max0 = std::max(std::max(line0[0], line0[1]), line0[2]);
float max1 = std::max(std::max(line1[0], line1[1]), line1[2]);
float max2 = std::max(std::max(line2[0], line2[1]), line2[2]);
*out_ptr = std::max(std::max(max0, max1), max2);
out_ptr++;
line0+=2;
line1+=2;
line2+=2;
}
if(wtail)
{
float max0 = std::max(std::max(line0[0], line0[1]),
std::max(line1[0], line1[1]));
*out_ptr = std::max(std::max(line2[0], line2[1]), max0);
out_ptr++;
line0+=wtail_resi;
line1+=wtail_resi;
line2+=wtail_resi;
}
else
{
line0+=w_resi;
line1+=w_resi;
line2+=w_resi;
}
}
if(htail)
{
float32x4x2_t p00=vld2q_f32(line0);
float32x4x2_t p10=vld2q_f32(line1);
for(int j=0;j<block_w;j++)
{
float32x4x2_t p00_new=vld2q_f32(line0+8);
float32x4_t max0=vmaxq_f32(p00.val[0],p00.val[1]);
float32x4_t p01=vextq_f32(p00.val[0],p00_new.val[0],1);
max0=vmaxq_f32(max0,p01);
float32x4x2_t p10_new=vld2q_f32(line1+8);
float32x4_t max1=vmaxq_f32(p10.val[0],p10.val[1]);
float32x4_t p11=vextq_f32(p10.val[0],p10_new.val[0],1);
max1=vmaxq_f32(max1,p11);
vst1q_f32(out_ptr,vmaxq_f32(max0, max1));
p00=p00_new;
p10=p10_new;
line0+=8;
line1+=8;
out_ptr+=4;
}
for(int j=tail_w;j<outw;j++)
{
float max0 = std::max(std::max(line0[0], line0[1]), line0[2]);
float max1 = std::max(std::max(line1[0], line1[1]), line1[2]);
*out_ptr = std::max(max0, max1);
out_ptr++;
line0+=2;
line1+=2;
}
if(wtail)
{
*out_ptr = std::max(std::max(line0[0], line0[1]),
std::max(line1[0], line1[1]));
out_ptr++;
}
}
}
}
static void AvgPool_3x3s2(const float*input,float* output,
int inc,int inh,int inw,
int outh,int outw,
int htail,int wtail)
{
int in_hw=inw*inh;
int out_hw=outh*outw;
int block_w=outw >>2;
int tail_w=outw & ~3;
int wtail_resi=inw+2;
int w_resi=inw+1;
if(wtail)
{
if (outw%4==0)
{
block_w-=1;
tail_w-=4;
}
outw-=1;
}
if(htail)
{
outh-=1;
}
for(int c=0;c<inc;c++)
{
const float* line0=input +c*in_hw;
const float* line1=line0 + inw;
const float* line2=line1 + inw;
float* out_ptr=output + c*out_hw;
for(int i=0;i<outh;i++)
{
float32x4x2_t p00=vld2q_f32(line0);
float32x4x2_t p10=vld2q_f32(line1);
float32x4x2_t p20=vld2q_f32(line2);
for(int j=0;j<block_w;j++)
{
float32x4x2_t p00_new=vld2q_f32(line0+8);
float32x4_t sum0=vaddq_f32(p00.val[0],p00.val[1]);
float32x4_t p01=vextq_f32(p00.val[0],p00_new.val[0],1);
sum0=vaddq_f32(sum0,p01);
float32x4x2_t p10_new=vld2q_f32(line1+8);
float32x4_t sum1=vaddq_f32(p10.val[0],p10.val[1]);
float32x4_t p11=vextq_f32(p10.val[0],p10_new.val[0],1);
sum1=vaddq_f32(sum1,p11);
float32x4x2_t p20_new=vld2q_f32(line2+8);
float32x4_t sum2=vaddq_f32(p20.val[0],p20.val[1]);
float32x4_t p21=vextq_f32(p20.val[0],p20_new.val[0],1);
sum2=vaddq_f32(sum2,p21);
sum0 = vaddq_f32(vaddq_f32(sum0, sum1), sum2);
sum0=vmulq_n_f32(sum0, 0.11111111f);
vst1q_f32(out_ptr,sum0);
p00=p00_new;
p10=p10_new;
p20=p20_new;
line0+=8;
line1+=8;
line2+=8;
out_ptr+=4;
}
for(int j=tail_w;j<outw;j++)
{
*out_ptr = (line0[0] + line0[1] + line0[2] +
line1[0] + line1[1] + line1[2] +
line2[0] + line2[1] + line2[2])*0.11111111f;
out_ptr++;
line0+=2;
line1+=2;
line2+=2;
}
if(wtail)
{
*out_ptr = (line0[0] + line0[1] +
line1[0] + line1[1] +
line2[0] + line2[1])*0.16666667f;
out_ptr++;
line0+=wtail_resi;
line1+=wtail_resi;
line2+=wtail_resi;
}
else
{
line0+=w_resi;
line1+=w_resi;
line2+=w_resi;
}
}
if(htail)
{
float32x4x2_t p00=vld2q_f32(line0);
float32x4x2_t p10=vld2q_f32(line1);
for(int j=0;j<block_w;j++)
{
float32x4x2_t p00_new=vld2q_f32(line0+8);
float32x4_t sum0=vaddq_f32(p00.val[0],p00.val[1]);
float32x4_t p01=vextq_f32(p00.val[0],p00_new.val[0],1);
sum0=vaddq_f32(sum0,p01);
float32x4x2_t p10_new=vld2q_f32(line1+8);
float32x4_t sum1=vaddq_f32(p10.val[0],p10.val[1]);
float32x4_t p11=vextq_f32(p10.val[0],p10_new.val[0],1);
sum1=vaddq_f32(sum1,p11);
sum0 = vaddq_f32(sum0, sum1);
sum0=vmulq_n_f32(sum0, 0.16666667f);
vst1q_f32(out_ptr,sum0);
p00=p00_new;
p10=p10_new;
line0+=8;
line1+=8;
out_ptr+=4;
}
for(int j=tail_w;j<outw;j++)
{
*out_ptr = (line0[0] + line0[1] + line0[2] +
line1[0] + line1[1] + line1[2])*0.16666667f;
out_ptr++;
line0+=2;
line1+=2;
}
if(wtail)
{
*out_ptr = (line0[0] + line0[1] +
line1[0] + line1[1] )*0.25f;
out_ptr++;
}
}
}
}
static void MaxPool_2x2s2_pad1(const float*input,float* output,
int inc,int inh,int inw,
int outh,int outw)
{
int in_hw=inw*inh;
int out_hw=outh*outw;
int wtail_resi=inw+1;
for(int c=0;c<inc;c++)
{
const float* line00=input +c*in_hw;
float* out_ptr=output + c*out_hw;
//h begin
*out_ptr = line00[0];
out_ptr++;
for(int i=1;i<inw-1;i+=2)
{
*out_ptr=std::max(line00[i],line00[i+1]);
out_ptr++;
}
*out_ptr = line00[inw-1];
out_ptr++;
// h center
const float* line0=line00 + inw;
const float* line1=line0 + inw;
for(int i=1;i<outh-1;i++)
{
//w begin
*out_ptr=std::max(line0[0],line1[0]);
out_ptr++;
line0++;
line1++;
// w center
for(int j=1;j<outw-1;j++)
{
float32x2_t p1=vld1_f32(line0);
float32x2_t p2=vld1_f32(line1);
float32x2_t _max=vmax_f32(p1,p2);
*out_ptr=std::max(_max[0],_max[1]);
out_ptr++;
line0+=2;
line1+=2;
}
// w end
*out_ptr =std::max (line0[0],line1[0]);
out_ptr++;
line0+=wtail_resi;
line1+=wtail_resi;
}
// h end
*out_ptr = line0[0];
out_ptr++;
for(int i=1;i<inw-1;i+=2)
{
*out_ptr=std::max(line0[i],line0[i+1]);
out_ptr++;
}
*out_ptr = line0[inw-1];
out_ptr++;
}
}
static void AvgPool_2x2s2_pad1(const float*input,float* output,
int inc,int inh,int inw,
int outh,int outw)
{
int in_hw=inw*inh;
int out_hw=outh*outw;
int wtail_resi=inw+1;
for(int c=0;c<inc;c++)
{
const float* line00=input +c*in_hw;
float* out_ptr=output + c*out_hw;
//h begin
*out_ptr = line00[0]*0.25;
out_ptr++;
for(int i=1;i<inw-1;i+=2)
{
*out_ptr=(line00[i]+line00[i+1])*0.25;
out_ptr++;
}
*out_ptr = line00[inw-1]*0.25;
out_ptr++;
// h center
const float* line0=line00 + inw;
const float* line1=line0 + inw;
for(int i=1;i<outh-1;i++)
{
//w begin
*out_ptr=(line0[0]+line1[0])*0.25;
out_ptr++;
line0++;
line1++;
// w center
for(int j=1;j<outw-1;j++)
{
float32x2_t p1=vld1_f32(line0);
float32x2_t p2=vld1_f32(line1);
float32x2_t sum=vadd_f32(p1,p2);
*out_ptr=(sum[0]+sum[1])*0.25;
out_ptr++;
line0+=2;
line1+=2;
}
// w end
*out_ptr =(line0[0]+line1[0])*0.25;
out_ptr++;
line0+=wtail_resi;
line1+=wtail_resi;
}
// h end
*out_ptr = line0[0]*0.25;
out_ptr++;
for(int i=1;i<inw-1;i+=2)
{
*out_ptr=(line0[i]+line0[i+1])*0.25;
out_ptr++;
}
*out_ptr = line0[inw-1]*0.25;
out_ptr++;
}
}
static void MaxPool_3x3s2_pad1(const float*input,float* output,
int inc,int inh,int inw,
int outh,int outw,
int htail,int wtail)
{
int in_hw=inw*inh;
int out_hw=outh*outw;
int mid_w = (inw-3+wtail)/2;
int mid_h = (inh-3+htail)/2;
int inw_2=inw+2;
int inw_1=inw+1;
for(int c=0;c<inc;c++)
{
const float* line1=input +c*in_hw;
const float* line2=line1 + inw;
float* out_ptr=output + c*out_hw;
//h begin ---------------------------------------
*out_ptr =std::max( std::max(line1[0],line1[1]),
std::max(line2[0],line2[1]));
out_ptr++;
line1+=1;
line2+=1;
for(int j=0;j<mid_w;j++)
{
//float max0 = std::max(std::max(line0[0], line0[1]), line0[2]);
float max1 = std::max(std::max(line1[0], line1[1]), line1[2]);
float max2 = std::max(std::max(line2[0], line2[1]), line2[2]);
//*out_ptr = std::max(std::max(max0, max1), max2);
*out_ptr = std::max(max2, max1);
out_ptr++;
//line0+=2;
line1+=2;
line2+=2;
}
if(wtail==0)
{
*out_ptr =std::max( std::max(line1[0],line1[1]),
std::max(line2[0],line2[1]));
out_ptr++;
line1+=2;
line2+=2;
}
else
{
*out_ptr =std::max(line1[0],line2[0]);
out_ptr++;
line1+=1;
line2+=1;
}
// h center ---------------------------------------
const float* line0=line1;
line1=line2;
line2=line1+inw;
for(int i=0;i<mid_h;i++)
{
// left
float max0=std::max( std::max(line1[0],line1[1]),
std::max(line2[0],line2[1]));
*out_ptr =std::max(std::max(line0[0],line0[1]),max0);
out_ptr++;
line0+=1;
line1+=1;
line2+=1;
//mid
for(int j=0;j<mid_w;j++)
{
float max0 = std::max(std::max(line0[0], line0[1]), line0[2]);
float max1 = std::max(std::max(line1[0], line1[1]), line1[2]);
float max2 = std::max(std::max(line2[0], line2[1]), line2[2]);
*out_ptr = std::max(std::max(max0, max1), max2);
out_ptr++;
line0+=2;
line1+=2;
line2+=2;
}
if(wtail==0)
{
max0 =std::max( std::max(line1[0],line1[1]),
std::max(line2[0],line2[1]));
*out_ptr =std::max(std::max(line0[0],line0[1]),max0);
out_ptr++;
line0+=inw_2;
line1+=inw_2;
line2+=inw_2;
}
else
{
*out_ptr =std::max(std::max(line0[0],line1[0]),line2[0]);
out_ptr++;
line0+=inw_1;
line1+=inw_1;
line2+=inw_1;
}
}
// h end ------------------------------------------
if(htail==0)
{
*out_ptr =std::max( std::max(line1[0],line1[1]),
std::max(line0[0],line0[1]));
out_ptr++;
line1+=1;
line0+=1;
for(int j=0;j<mid_w;j++)
{
float max0 = std::max(std::max(line0[0], line0[1]), line0[2]);
float max1 = std::max(std::max(line1[0], line1[1]), line1[2]);
//float max2 = std::max(std::max(line2[0], line2[1]), line2[2]);
//*out_ptr = std::max(std::max(max0, max1), max2);
*out_ptr = std::max(max0, max1);
out_ptr++;
line0+=2;
line1+=2;
//line2+=2;
}
if(wtail==0)
{
*out_ptr =std::max( std::max(line1[0],line1[1]),
std::max(line0[0],line0[1]));
out_ptr++;
}
else
{
*out_ptr = std::max(line1[0],line0[0]);
out_ptr++;
}
}
else
{
*out_ptr = std::max(line0[0],line0[1]);
out_ptr++;
line0+=1;
for(int j=0;j<mid_w;j++)
{
*out_ptr = std::max(std::max(line0[0], line0[1]), line0[2]);
out_ptr++;
line0+=2;
}
if(wtail==0)
{
*out_ptr =std::max(line0[0],line0[1]);
out_ptr++;
}
else
{
*out_ptr = line0[0];
out_ptr++;
}
}
}
}
static void AvgPool_3x3s2_pad1(const float*input,float* output,
int inc,int inh,int inw,
int outh,int outw,
int htail,int wtail)
{
int in_hw=inw*inh;
int out_hw=outh*outw;
int mid_w = (inw-3+wtail)/2;
int mid_h = (inh-3+htail)/2;
int inw_2=inw+2;
int inw_1=inw+1;
for(int c=0;c<inc;c++)
{
const float* line1=input +c*in_hw;
const float* line2=line1 + inw;
float* out_ptr=output + c*out_hw;
//h begin ---------------------------------------
*out_ptr =(line1[0]+line1[1]+line2[0]+line2[1])*0.11111111f;
out_ptr++;
line1+=1;
line2+=1;
for(int j=0;j<mid_w;j++)
{
*out_ptr = (line1[0]+ line1[1]+ line1[2]+
line2[0]+ line2[1]+ line2[2])*0.11111111f;
out_ptr++;
//line0+=2;
line1+=2;
line2+=2;
}
if(wtail==0)
{
*out_ptr =(line1[0]+line1[1]+line2[0]+line2[1])*0.11111111f;
out_ptr++;
line1+=2;
line2+=2;
}
else
{
*out_ptr =(line1[0]+line2[0])*0.16666667f;
out_ptr++;
line1+=1;
line2+=1;
}
// h center ---------------------------------------
const float* line0=line1;
line1=line2;
line2=line1+inw;
for(int i=0;i<mid_h;i++)
{
// left
*out_ptr =(line0[0]+line0[1]+
line1[0]+line1[1]+
line2[0]+line2[1])*0.11111111f;
out_ptr++;
line0+=1;
line1+=1;
line2+=1;
//mid
for(int j=0;j<mid_w;j++)
{
*out_ptr = (line0[0] + line0[1] + line0[2] +
line1[0] + line1[1] + line1[2] +
line2[0] + line2[1] + line2[2])*0.11111111f;
out_ptr++;
line0+=2;
line1+=2;
line2+=2;
}
if(wtail==0)
{
*out_ptr =(line0[0]+line0[1]+
line1[0] + line1[1] +
line2[0] + line2[1])*0.11111111f;
out_ptr++;
line0+=inw_2;
line1+=inw_2;
line2+=inw_2;
}
else
{
*out_ptr =(line0[0]+line1[0]+line2[0])*0.16666667f;
out_ptr++;
line0+=inw_1;
line1+=inw_1;
line2+=inw_1;
}
}
// h end ------------------------------------------
if(htail==0)
{
*out_ptr =(line1[0]+line1[1]+line0[0]+line0[1])*0.11111111f;
out_ptr++;
line1+=1;
line0+=1;
for(int j=0;j<mid_w;j++)
{
*out_ptr = (line0[0] + line0[1] +line0[2]+
line1[0] + line1[1] +line1[2])*0.11111111f;
out_ptr++;
line0+=2;
line1+=2;
//line2+=2;
}
if(wtail==0)
{
*out_ptr =(line1[0] + line1[1]+line0[0]+line0[1])*0.11111111f;
out_ptr++;
}
else
{
*out_ptr =(line1[0] + line0[0])*0.16666667f;
out_ptr++;
}
}
else
{
*out_ptr =(line0[0]+line0[1])*0.16666667f;
out_ptr++;
line0+=1;
for(int j=0;j<mid_w;j++)
{
*out_ptr = (line0[0] + line0[1] +line0[2])*0.16666667f;
out_ptr++;
line0+=2;
}
if(wtail==0)
{
*out_ptr =(line0[0]+line0[1])*0.16666667f;
out_ptr++;
}
else
{
*out_ptr =line0[0]*0.25f;
out_ptr++;
}
}
}
}
// TODO: parallel in channel
static void Global_AvgPool(const float*input,float* output,
int inc,int inh,int inw)
{
int in_hw=inw*inh;
int block=in_hw >>3;
int tail=in_hw & ~7;
for(int c=0;c<inc;c++)
{
const float* line0=input +c*in_hw;
float* out_ptr=output + c;
float sum=0.f;
for(int j=0;j<block;j++)
{
float32x4_t p00=vld1q_f32(line0);
float32x4_t p01=vld1q_f32(line0+4);
p00=vaddq_f32(p00,p01);
// p00=vpaddq_f32(p00,p00);
// sum+=(p00[0]+p00[1]);
sum+=(p00[0]+p00[1]+p00[2]+p00[3]);
line0+=8;
}
for(int j=tail;j<in_hw;j++)
{
sum+=line0[0];
line0++;
}
*out_ptr=sum/in_hw;
}
}
// TODO: parallel in channel
static void Global_MaxPool(const float*input,float* output,
int inc,int inh,int inw)
{
int in_hw=inw*inh;
int block=in_hw >>3;
int tail=in_hw & ~7;
for(int c=0;c<inc;c++)
{
const float* line0=input +c*in_hw;
float* out_ptr=output + c;
float32x4_t p00=vld1q_f32(line0);
float32x4_t res=p00;
for(int j=0;j<block;j++)
{
float32x4_t p00=vld1q_f32(line0);
float32x4_t p01=vld1q_f32(line0+4);
float32x4_t max0=vmaxq_f32(p00,p01);
res=vmaxq_f32(res,max0);
line0+=8;
}
float max_=std::max(std::max(res[0],res[1]),std::max(res[2],res[3]));
for(int j=tail;j<in_hw;j++)
{
max_=std::max(max_,line0[0]);
line0++;
}
*out_ptr=max_;
}
}
#endif
|
import json
import requests
def lambda_handler(event, context):
city = event['city']
API_KEY = 'ENTER_YOUR_API_KEY_HERE'
request_url = 'http://api.openweathermap.org/data/2.5/weather?q=' + city + '&appid=' + API_KEY
response = requests.get(request_url)
response_json = response.json()
temperature = response_json['main']['temp']
return {
'statusCode': 200,
'body': json.dumps('Temperature of ' + city + ' is ' + str(temperature))
}
|
import os
import shutil
TEST_DIR = os.path.join(os.getcwd(), "test/python/.tmp")
TEST_ROOT = "/tmp/makisu-test-integration"
if not os.path.exists(TEST_DIR):
os.makedirs(TEST_DIR)
if os.path.exists(TEST_ROOT):
shutil.rmtree(TEST_ROOT)
|
<reponame>wuximing/dsshop<filename>admin/vue2/element-admin-v3/node_modules/@antv/g2plot/esm/components/conversion-tag.js
import { __assign } from "tslib";
import { DEFAULT_ANIMATE_CFG } from '@antv/g2/lib/animate';
import { each, deepMix, get } from '@antv/util';
function parsePoints(shape, coord) {
var parsedPoints = [];
var points = shape.get('origin').points;
each(points, function (p) {
parsedPoints.push(coord.convertPoint(p));
});
return parsedPoints;
}
var ConversionTag = /** @class */ (function () {
function ConversionTag(cfg) {
// @ts-ignore
deepMix(this, this.constructor.getDefaultOptions(cfg), cfg);
this._init();
}
ConversionTag.getDefaultOptions = function (_a) {
var transpose = _a.transpose;
return {
visible: true,
size: transpose ? 32 : 80,
spacing: transpose ? 8 : 12,
offset: transpose ? 32 : 0,
arrow: {
visible: true,
headSize: 12,
style: {
fill: 'rgba(0, 0, 0, 0.05)',
},
},
value: {
visible: true,
style: {
fontSize: 12,
fill: 'rgba(0, 0, 0, 0.85)',
},
formatter: function (valueUpper, valueLower) { return ((100 * valueLower) / valueUpper).toFixed(2) + "%"; },
},
animation: deepMix({}, DEFAULT_ANIMATE_CFG),
};
};
ConversionTag.prototype._init = function () {
var _this = this;
var layer = this.view.backgroundGroup;
this.container = layer.addGroup();
this.draw();
this.view.on('beforerender', function () {
_this.clear();
});
};
ConversionTag.prototype.draw = function () {
var _this = this;
var transpose = this.transpose;
var values = this.view.getScaleByField(this.field).values;
var geometry = this.view.geometries[0];
var shapes = geometry.getShapes();
var shapeLower, valueLower, shapeUpper, valueUpper;
if (transpose) {
shapes.forEach(function (shapeLower, i) {
valueLower = values[i];
if (i++ > 0) {
_this._drawTag(shapeUpper, valueUpper, shapeLower, valueLower);
}
valueUpper = valueLower;
shapeUpper = shapeLower;
});
}
else {
shapes.forEach(function (shapeUpper, i) {
valueUpper = values[i];
if (i++ > 0) {
_this._drawTag(shapeUpper, valueUpper, shapeLower, valueLower);
}
valueLower = valueUpper;
shapeLower = shapeUpper;
});
}
};
ConversionTag.prototype.clear = function () {
if (this.container) {
this.container.clear();
}
};
ConversionTag.prototype.destroy = function () {
if (this.container) {
this.container.remove();
}
};
ConversionTag.prototype._drawTag = function (shapeUpper, valueUpper, shapeLower, valueLower) {
var transpose = this.transpose;
var coord = this.view.geometries[0].coordinate;
var pointUpper = parsePoints(shapeUpper, coord)[transpose ? 3 : 0];
var pointLower = parsePoints(shapeLower, coord)[transpose ? 0 : 3];
this._drawTagArrow(pointUpper, pointLower);
this._drawTagValue(pointUpper, valueUpper, pointLower, valueLower);
};
ConversionTag.prototype._drawTagArrow = function (pointUpper, pointLower) {
var spacing = this.spacing;
var _a = this, size = _a.size, offset = _a.offset, animation = _a.animation, transpose = _a.transpose;
var headSize = this.arrow.headSize;
var totalHeight = pointLower.y - pointUpper.y;
var totalWidth = pointLower.x - pointUpper.x;
var points;
if (transpose) {
if ((totalWidth - headSize) / 2 < spacing) {
// 当柱间距不足容纳箭头尖与间隔时,画三角并挤占间隔
spacing = Math.max(1, (totalWidth - headSize) / 2);
points = [
[pointUpper.x + spacing, pointUpper.y - offset],
[pointUpper.x + spacing, pointUpper.y - offset - size],
[pointLower.x - spacing, pointLower.y - offset - size / 2],
];
}
else {
// 当柱间距足够时,画完整图形并留出间隔。
points = [
[pointUpper.x + spacing, pointUpper.y - offset],
[pointUpper.x + spacing, pointUpper.y - offset - size],
[pointLower.x - spacing - headSize, pointLower.y - offset - size],
[pointLower.x - spacing, pointLower.y - offset - size / 2],
[pointLower.x - spacing - headSize, pointLower.y - offset],
];
}
}
else {
if ((totalHeight - headSize) / 2 < spacing) {
// 当柱间距不足容纳箭头尖与间隔时,画三角并挤占间隔
spacing = Math.max(1, (totalHeight - headSize) / 2);
points = [
[pointUpper.x + offset, pointUpper.y + spacing],
[pointUpper.x + offset + size, pointUpper.y + spacing],
[pointLower.x + offset + size / 2, pointLower.y - spacing],
];
}
else {
// 当柱间距足够时,画完整图形并留出间隔。
points = [
[pointUpper.x + offset, pointUpper.y + spacing],
[pointUpper.x + offset + size, pointUpper.y + spacing],
[pointLower.x + offset + size, pointLower.y - spacing - headSize],
[pointLower.x + offset + size / 2, pointLower.y - spacing],
[pointLower.x + offset, pointLower.y - spacing - headSize],
];
}
}
var tagArrow = this.container.addShape('polygon', {
name: 'arrow',
attrs: __assign(__assign({}, this.arrow.style), { points: points }),
});
if (animation !== false) {
this._fadeInTagShape(tagArrow);
}
};
ConversionTag.prototype._drawTagValue = function (pointUpper, valueUpper, pointLower, valueLower) {
var _a = this, size = _a.size, offset = _a.offset, animation = _a.animation, transpose = _a.transpose;
var text = this.value.formatter(valueUpper, valueLower);
var tagValue = this.container.addShape('text', {
name: 'value',
attrs: __assign(__assign({}, this.value.style), { text: text, x: transpose ? (pointUpper.x + pointLower.x) / 2 : pointUpper.x + offset + size / 2, y: transpose ? pointUpper.y - offset - size / 2 : (pointUpper.y + pointLower.y) / 2, textAlign: 'center', textBaseline: 'middle' }),
});
if (transpose) {
var totalWidth = pointLower.x - pointUpper.x;
var textWidth = tagValue.getBBox().width;
if (textWidth > totalWidth) {
var cWidth = textWidth / text.length;
var cEnd = Math.max(1, Math.ceil(totalWidth / cWidth) - 1);
var textAdjusted = text.slice(0, cEnd) + "...";
tagValue.attr('text', textAdjusted);
}
}
if (animation !== false) {
this._fadeInTagShape(tagValue);
}
};
ConversionTag.prototype._fadeInTagShape = function (shape) {
var animation = this.animation;
var opacity = shape.attr('opacity');
shape.attr('opacity', 0);
var duration = get(animation, 'appear', DEFAULT_ANIMATE_CFG.appear).duration;
shape.animate({ opacity: opacity }, duration);
};
return ConversionTag;
}());
export default ConversionTag;
//# sourceMappingURL=conversion-tag.js.map
|
<reponame>weltam/idylfin<filename>src/de/erichseifert/gral/plots/colors/SingleColor.java<gh_stars>10-100
/*
* GRAL: GRAphing Library for Java(R)
*
* (C) Copyright 2009-2012 <NAME> <dev[at]erichseifert.de>,
* <NAME> <michael[at]erichseifert.de>
*
* This file is part of GRAL.
*
* GRAL is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* GRAL is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with GRAL. If not, see <http://www.gnu.org/licenses/>.
*/
package de.erichseifert.gral.plots.colors;
import java.awt.Paint;
/**
* Class that represents a ColorMapper with a single color.
*/
public class SingleColor extends IndexedColorMapper {
/** Version id for serialization. */
private static final long serialVersionUID = -3377452532555792998L;
/** The color that will be returned in any case. */
private Paint color;
/**
* Creates a new instance with the specified color.
* @param color Color to use.
*/
public SingleColor(Paint color) {
this.color = color;
}
/**
* Returns the Paint according to the specified value.
* @param value Numeric index.
* @return Paint.
*/
@Override
public Paint get(int value) {
return getColor();
}
/**
* Returns the color of this ColorMapper.
* @return Color.
*/
public Paint getColor() {
return color;
}
/**
* Sets the color of this ColorMapper.
* @param color Color to be set.
*/
public void setColor(Paint color) {
this.color = color;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SingleColor)) {
return false;
}
SingleColor cm = (SingleColor) obj;
return color.equals(cm.color) && getMode() == cm.getMode();
}
@Override
public int hashCode() {
long bits = getColor().hashCode();
bits ^= getMode().hashCode() * 31;
return ((int) bits) ^ ((int) (bits >> 32));
}
}
|
arr.sort(function(a, b) {
let lenA = a.length;
let lenB = b.length;
if (lenA < lenB) { return -1; }
if (lenA > lenB) { return 1; }
return 0;
});
|
#!/bin/sh
SCRIPT="$0"
while [ -h "$SCRIPT" ] ; do
ls=`ls -ld "$SCRIPT"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=`dirname "$SCRIPT"`/"$link"
fi
done
if [ ! -d "${APP_DIR}" ]; then
APP_DIR=`dirname "$SCRIPT"`/..
APP_DIR=`cd "${APP_DIR}"; pwd`
fi
executable="./modules/swagger-codegen-cli/target/swagger-codegen-cli.jar"
if [ ! -f "$executable" ]
then
mvn clean package
fi
# if you've executed sbt assembly previously it will use that instead.
export JAVA_OPTS="${JAVA_OPTS} -XX:MaxPermSize=256M -Xmx1024M -DloggerPath=conf/log4j.properties"
echo "Typescript Petstore API client (default)"
ags="$@ generate -i modules/swagger-codegen/src/test/resources/2_0/petstore.yaml -l typescript-jquery -o samples/client/petstore/typescript-jquery/default"
java $JAVA_OPTS -jar $executable $ags
echo "Typescript Petstore API client (npm setting)"
ags="$@ generate -i modules/swagger-codegen/src/test/resources/2_0/petstore.yaml -l typescript-jquery -c bin/typescript-jquery-petstore-npm.json -o samples/client/petstore/typescript-jquery/npm"
java $JAVA_OPTS -jar $executable $ags
|
import CountDowner from './CountDowner';
import { CountDownerProps } from './interface';
export { CountDownerProps, CountDowner };
export default CountDowner;
|
const cities = [{id: 1, name: "New York"},{id: 2, name: "London"}, {id: 3, name: "Berlin"}];
async function getTemperatures() {
const promises = cities.map(city => {
return axios.get(`https://api.openweathermap.org/data/2.5/weather?id=${city.id}&units=metric&appid=YOUR_API_KEY`).then(res => {
return {name: city.name, temperature: res.data.main.temp};
});
});
const temperatures = await Promise.all(promises);
return temperatures;
}
getTemperatures().then(temperatures => {
console.log(temperatures);
// output: [{name: 'New York', temperature: 19.14}, {name: 'London', temperature: 15.0}, {name: 'Berlin', temperature: 11.34}]
});
|
#!/bin/sh
#--------
# Check APC ATS load script for Icinga2
# Require: net-snmp-utils, bc
# v.20170411
#
# https://github.com/psanthoshkumar/Icinga2_labwork
#
while getopts ":V:H:C:h" optname ; do
case "$optname" in
"V")
VERSC=$OPTARG
if [ "$VERSC" = "2c" ] ; then
VERS="2"
else
VERS=$VERSC
fi
;;
"H")
HOST=$OPTARG
;;
"C")
COMM=$OPTARG
;;
"h")
echo "Useage: check_apc_ats_load.sh -H hostname -C community"
exit 2
;;
"?")
echo "Unknown option $OPTARG"
exit 2
;;
":")
echo "No argument value for option $OPTARG"
exit 2
;;
*)
# Should not occur
echo "Unknown error while processing options"
exit 1
;;
esac
done
[ -z $VERSC ] && VERSC="2c" && VERS="2"
[ -z $HOST ] && echo "Please specify hostname!" && exit 2
[ -z $COMM ] && echo "Please specify SNMP community!" && exit 2
IFS_CURRENT=$IFS
IFS_COMMA=","
IFS_NEWLINE="
"
#ATS_OID="1.3.6.1.4.1.318.1.1.8.5.4.5.1.4"
#PDU_OID=".1.3.6.1.4.1.318.1.1.12.2.3.1.1.2"
AMPS=`snmpwalk -c $COMM -v $VERSC $HOST 1.3.6.1.4.1.318.1.1.8.5.4.5.1.4 | grep -v "No Such Instance" | awk 'BEGIN { FS = ": " } { print $2 } '`
[ ! "$AMPS" ] && echo "No such device!" && exit 2
TOTAL_LOAD=`echo $AMPS | awk '{ print $1 }'`
BANK_B1_LOAD=`echo $AMPS | awk '{ print $2 }'`
BANK_B2_LOAD=`echo $AMPS | awk '{ print $3 }'`
#TOTAL_LOAD=`echo "$(echo $AMPS | awk '{ print $1 }') * 0.10" | bc -l`
#BANK_B1_LOAD=`echo "$(echo $AMPS | awk '{ print $2 }') * 0.10" | bc -l`
#BANK_B2_LOAD=`echo "$(echo $AMPS | awk '{ print $3 }') * 0.10" | bc -l`
if [ $TOTAL_LOAD -ge 320 ] ; then
STATUS_CRITICAL=1
elif [ $TOTAL_LOAD -ge 280 ] ; then
STATUS_WARNING=1
else
STATUS_OK=1
fi
if [ $BANK_B1_LOAD -ge 160 ] ; then
STATUS_CRITICAL=1
elif [ $BANK_B1_LOAD -ge 120 ] ; then
STATUS_WARNING=1
else
STATUS_OK=1
fi
if [ $BANK_B2_LOAD -ge 160 ] ; then
STATUS_CRITICAL=1
elif [ $BANK_B2_LOAD -ge 120 ] ; then
STATUS_WARNING=1
else
STATUS_OK=1
fi
TOTAL_LOAD=`echo "$TOTAL_LOAD * 0.10" | bc -l`
BANK_B1_LOAD=`echo "$BANK_B1_LOAD * 0.10" | bc -l`
BANK_B2_LOAD=`echo "$BANK_B2_LOAD * 0.10" | bc -l`
[ $STATUS_OK ] && STATUS="OK" && EXIT=0
[ $STATUS_WARNING ] && STATUS="WARNING" && EXIT=1
[ $STATUS_CRITICAL ] && STATUS="CRITICAL" && EXIT=2
echo "$STATUS: Total Load $TOTAL_LOAD A - Bank B1 Load $BANK_B1_LOAD A - Bank B2 Load $BANK_B2_LOAD A | 'total_load'=$TOTAL_LOAD;28;32;0 'bank_b1_load'=$BANK_B1_LOAD;12;16;0 'bank_b2_load'=$BANK_B2_LOAD;12;16;0"
exit $EXIT
|
<gh_stars>0
/**
* Copyright 2016 <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iaik.privlog.sanitizers;
import java.security.MessageDigest;
import javax.crypto.Mac;
import javax.crypto.SecretKey;
import org.apache.commons.codec.binary.Base64;
import ch.qos.logback.core.spi.ContextAwareBase;
/**
* @author <NAME> <<EMAIL>>
*/
public class BlindingSanitizerFactory extends ContextAwareBase implements IParamSanitizerFactory {
public class BlindingSanitizer extends ParamSanitizerBase {
protected String critical;
public BlindingSanitizer(String tag, Object parameter, int start, int startOriginal, int endOriginal) {
super(tag, parameter, start, startOriginal, endOriginal);
equal = isMaskCritical();
}
@Override
public String getCritical() {
if (critical == null) {
critical = isMaskCritical() ? getSanitized() : super.getCritical();
}
return critical;
}
@Override
public String getSanitized() {
if (sanitized == null) {
if (mac != null) {
sanitized = base64.encodeToString(mac.doFinal(super.getCritical().getBytes()));
} else if (digest != null) {
sanitized = base64.encodeToString(digest.digest(super.getCritical().getBytes()));
} else if (blindingMask != null) {
sanitized = blindingMask;
} else {
sanitized = "[" + tagName + "]";
}
}
return sanitized;
}
}
public static final String BLINDING_MASK = "*****";
protected String blindingMask = BLINDING_MASK;
protected MessageDigest digest;
protected Mac mac;
protected Base64 base64;
protected boolean maskCritical = false;
@Override
public BlindingSanitizer create(String tagName, Object parameter, int start, int startOriginal, int endOriginal) {
return new BlindingSanitizer(tagName, parameter, start, startOriginal, endOriginal);
}
public String getBlindingMask() {
return blindingMask;
}
public BlindingSanitizerFactory setBlindingMask(String blindingMask) {
this.blindingMask = blindingMask;
return this;
}
public void setDigest(MessageDigest digest) {
this.digest = digest;
base64 = new Base64();
}
public void setDigest(String algorithm) {
try {
digest = MessageDigest.getInstance(algorithm);
base64 = new Base64();
} catch (Exception cause) {
addError("Failed to initialize the message digest with algorithm '" + algorithm + "' for " + getClass(), cause);
throw cause instanceof RuntimeException ? (RuntimeException) cause : new RuntimeException(
"Failed to initialize the message digest with algorithm '" + algorithm + "' for " + getClass(), cause);
}
}
public void setMac(Mac mac) {
this.mac = mac;
base64 = new Base64();
}
public void setMac(String algorithm, SecretKey secretKey) {
try {
mac = Mac.getInstance(algorithm);
mac.init(secretKey);
base64 = new Base64();
} catch (Exception cause) {
addError("Failed to initialize the MAC with algorithm '" + algorithm + "' for " + getClass(), cause);
throw cause instanceof RuntimeException ? (RuntimeException) cause : new RuntimeException(
"Failed to initialize the MAC with algorithm '" + algorithm + "' for " + getClass(), cause);
}
}
public boolean isMaskCritical() {
return maskCritical;
}
public void setMaskCritical(boolean maskCritical) {
this.maskCritical = maskCritical;
}
}
|
/* eslint-disable no-confusing-arrow */
const flattenArray = (i) => {
const o = [];
i.forEach(x => Array.isArray(x) ? o.push(...x) : o.push(x));
return o;
};
module.exports = {flattenArray};
|
import { changePassword } from 'helpers/db.js';
export async function post(req, res) {
const { hash, newPassword } = req.body;
const result = await changePassword(hash, newPassword);
res.end(JSON.stringify(result));
}
|
#!/bin/bash
if [ ! -f /debug0 ]; then
if [ -e requirements.txt ]; then
pip2 install -r requirements.txt
fi
touch /debug0
while getopts 'hdo:' flag; do
case "${flag}" in
h)
echo "options:"
echo "-h show brief help"
echo "-d debug mode, no nginx or uwsgi, direct start with 'python app.py'"
echo "-o gid installs docker into the container, gid should be the docker group id of your docker server"
exit 0
;;
d)
touch /debug1
;;
o)
apk add --no-cache docker shadow
groupmod -g ${OPTARG} docker
gpasswd -a nginx docker
;;
*)
break
;;
esac
done
fi
if [ -e /debug1 ]; then
echo "Running app in debug mode!"
python2 app.py
else
echo "Running app in production mode!"
nginx && uwsgi --ini /app.ini
fi
|
<gh_stars>1-10
/***************************************************************************
* (C) Copyright 2003-2012 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.gui;
import java.util.EnumSet;
import java.util.Set;
import games.stendhal.client.gui.chatlog.EventLine;
import games.stendhal.client.gui.wt.core.WtWindowManager;
import games.stendhal.common.NotificationType;
import marauroa.common.Logger;
/**
* A chat log container that allows filtering by event type.
*/
public class NotificationChannel {
private static final Logger logger = Logger.getLogger(NotificationChannel.class);
/** Name of the channel. */
private final String name;
/** Chat log where to write allowed content. */
private final KTextEdit channel;
/** Event types that should be displayed at the channel. */
private final Set<NotificationType> eventTypes;
/**
* Create a new NotificationChannel.
*
* @param channelName name of the channel
* @param channel text area for showing the event log
* @param blackList if <code>true</code>, the channel will default to
* showing everything that has not been explicitly blacklisted. Otherwise
* it'll show only whitelisted content. The main channel should default
* to blacklisting, as it should show types that have been added in new
* game versions
* @param defaultTypes default value of the saved notification type list
* (white- or blacklist depending on the value of <code>showUnknown</code>)
*/
public NotificationChannel(String channelName, KTextEdit channel,
boolean blackList, String defaultTypes) {
name = channelName;
this.channel = channel;
if (blackList) {
eventTypes = EnumSet.allOf(NotificationType.class);
} else {
eventTypes = EnumSet.noneOf(NotificationType.class);
}
// Load
WtWindowManager wm = WtWindowManager.getInstance();
String value = wm.getProperty("ui.channel." + name, defaultTypes);
for (String typeString : value.split(",")) {
/*
* String.split is unfortunately unable to return empty arrays when
* applied on empty string. Work around it.
*/
if ("".equals(typeString)) {
continue;
}
try {
NotificationType type = NotificationType.valueOf(typeString);
setTypeFiltering(type, !blackList);
} catch (RuntimeException e) {
logger.error("Unrecognized notification type '" + typeString + "'", e);
}
}
}
/**
* Get the name of the channel.
*
* @return channel name
*/
String getName() {
return name;
}
/**
* Set filtering of a notification type.
*
* @param type
* @param allow if <code>true</code> then messages of the type are
* displayed, otherwise not
*/
public final void setTypeFiltering(NotificationType type, boolean allow) {
if (allow) {
eventTypes.add(type);
} else {
eventTypes.remove(type);
}
}
/**
* Add an event line to the channel, if it's of type that should be
* displayed.
*
* @param line
* @return <code>true</code> if the channel accepted the message,
* <code>false</code> otherwise
*/
boolean addEventLine(final EventLine line) {
if (eventTypes.contains(line.getType())) {
channel.addLine(line);
return true;
}
return false;
}
/**
* Clear the channel log.
*/
void clear() {
channel.clear();
}
/**
* zones add
* */
public KTextEdit getChannel() {
return channel;
}
}
|
if RUBY_PLATFORM == 'java'
require 'rubygems'
gem 'ffi'
end
require 'ffi'
module Process::Functions
module FFI::Library
unless instance_methods.include?(:attach_pfunc)
# Wrapper method for attach_function + private
def attach_pfunc(*args)
attach_function(*args)
private args[0]
end
end
end
extend FFI::Library
typedef :ulong, :dword
typedef :uintptr_t, :handle
typedef :uintptr_t, :hwnd
typedef :uintptr_t, :hmodule
ffi_lib :kernel32
attach_pfunc :CloseHandle, [:handle], :bool
attach_pfunc :CreateToolhelp32Snapshot, [:dword, :dword], :handle
attach_pfunc :GenerateConsoleCtrlEvent, [:dword, :dword], :bool
attach_pfunc :GetCurrentProcess, [], :handle
attach_pfunc :GetModuleHandle, :GetModuleHandleA, [:string], :hmodule
attach_pfunc :GetProcessAffinityMask, [:handle, :pointer, :pointer], :bool
attach_pfunc :GetPriorityClass, [:handle], :dword
attach_pfunc :GetProcAddress, [:hmodule, :string], :pointer
attach_pfunc :GetVersionExA, [:pointer], :bool
attach_pfunc :Heap32ListFirst, [:handle, :pointer], :bool
attach_pfunc :Heap32ListNext, [:handle, :pointer], :bool
attach_pfunc :Heap32First, [:pointer, :dword, :uintptr_t], :bool
attach_pfunc :Heap32Next, [:pointer], :bool
attach_pfunc :Module32First, [:handle, :pointer], :bool
attach_pfunc :Module32Next, [:handle, :pointer], :bool
attach_pfunc :IsProcessInJob, [:handle, :pointer, :pointer], :bool # 2nd arg optional
attach_pfunc :OpenProcess, [:dword, :int, :dword], :handle
attach_pfunc :Process32First, [:handle, :pointer], :bool
attach_pfunc :Process32Next, [:handle, :pointer], :bool
attach_pfunc :SetHandleInformation, [:handle, :dword, :dword], :bool
attach_pfunc :SetErrorMode, [:uint], :uint
attach_pfunc :SetPriorityClass, [:handle, :dword], :bool
attach_pfunc :TerminateProcess, [:handle, :uint], :bool
attach_pfunc :Thread32First, [:handle, :pointer], :bool
attach_pfunc :Thread32Next, [:handle, :pointer], :bool
attach_pfunc :WaitForSingleObject, [:handle, :dword], :dword
attach_pfunc :CreateRemoteThread,
[:handle, :pointer, :size_t, :pointer, :pointer, :dword, :pointer], :handle
attach_pfunc :GetVolumeInformationA,
[:string, :pointer, :dword, :pointer, :pointer, :pointer, :pointer, :dword], :bool
attach_pfunc :CreateProcessW,
[:buffer_in, :buffer_inout, :pointer, :pointer, :int,
:dword, :buffer_in, :buffer_in, :pointer, :pointer], :bool
attach_pfunc :AssignProcessToJobObject, [:handle, :handle], :bool
attach_pfunc :CreateJobObjectA, [:pointer, :string], :handle
attach_pfunc :OpenJobObjectA, [:dword, :int, :string], :handle
attach_pfunc :QueryInformationJobObject, [:handle, :int, :pointer, :dword, :pointer], :bool
attach_pfunc :SetInformationJobObject, [:handle, :int, :pointer, :dword], :bool
attach_pfunc :GetExitCodeProcess, [:handle, :pointer], :bool
ffi_lib :advapi32
attach_pfunc :ConvertSidToStringSidA, [:buffer_in, :pointer], :bool
attach_pfunc :GetTokenInformation, [:handle, :int, :pointer, :dword, :pointer], :bool
attach_pfunc :OpenProcessToken, [:handle, :dword, :pointer], :bool
attach_pfunc :CreateProcessWithLogonW,
[:buffer_in, :buffer_in, :buffer_in, :dword, :buffer_in, :buffer_inout,
:dword, :buffer_in, :buffer_in, :pointer, :pointer], :bool
ffi_lib FFI::Library::LIBC
attach_pfunc :get_osfhandle, :_get_osfhandle, [:int], :intptr_t
begin
attach_pfunc :get_errno, :_get_errno, [:pointer], :int
rescue FFI::NotFoundError
# Do nothing, Windows XP or earlier.
end
end
|
#!/bin/bash
######################################################################
# Tomatito: pomodoro timer
######################################################################
ayuda () {
echo "Tomatito
Script para cronometrar pomodoros:
* un pomodoro son 25 minutos;
* por cada pomodoro completado hay un receso de 5 minutos;
* al cuarto pomodoro hay un recreo de 20....
* Ahí el programa termina (después del recreo).
Reiniciar para recomenzar el ciclo.
MarxBro. WTFPL. 2016
"
}
[ "$1" ] && ayuda && exit 0
SILENCIO=1 # Opcional. Cero -0- para alertas sonoras.
POMODORO=1500 # 25 minutos
POMODORO_RECREO=600 # 5 minutos
POMODORO_CONTADOR=0 # Al cuarto, recreo largo...
POMODORO_RECREO_LARGO=1200 # Largo de veinte minutos.
# Funciones
campana (){
if (( SILENCIO == 0)); then
mplayer /usr/share/sounds/alert.mp3 2&1>/dev/null
fi
}
notificar (){
campana
notify-send "$1"
}
notificar_comienzo (){
notificar 'Comenzando a trabajar: pomodoro!'
}
notificar_final_pomodoro (){
notificar 'Pomodoro finalizado. Ahora es tu recreo de 5 minutos.'
sleep $POMODORO_RECREO && pomodoro_start
}
notificar_FIN_CICLO (){
notificar 'Pomodoro finalizado. Este es el cuarto, así que ahora es tu recreo de 20 minutos.'
sleep $POMODORO_RECREO_LARGO && notificar_pomodoro_reinicio
}
notificar_pomodoro_reinicio (){
notificar 'Cuatro pomodoros, tres breves recreos y uno largo. Para recomenzar, reiniciar el programa!'
exit 0
}
pomodoro_start(){
let POMODORO_CONTADOR++
notificar_comienzo
sleep $POMODORO
if ((POMODORO_CONTADOR == 4));
then
notificar_FIN_CICLO
else
notificar_final_pomodoro
fi
}
# Main
pomodoro_start
|
package cyclops.pattern;
import cyclops.container.control.Either;
import cyclops.container.control.Option;
import cyclops.container.immutable.tuple.Tuple2;
import cyclops.reactive.ReactiveSeq;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* @author smccarron
*/
public interface OrThenIterableClause1<V, I, O> extends Clause<MatchResult1<V, Iterable<I>, O>> {
static <V, I, O> OrThenIterableClause1<V, I, O> of(Supplier<MatchResult1<V, Iterable<I>, O>> supplier) {
return new OrThenIterableClause1<V, I, O>() {
@Override
public MatchResult1<V, Iterable<I>, O> get() {
return supplier.get();
}
};
}
default OrMatchClause1<V, I, O> then(Function<ReactiveSeq<I>, O> mapper) {
return OrMatchClause1.of(() -> MatchResult1.of(subject().either()
.mapLeft(Tuple2::_2)
.mapLeft(iIterOpt -> iIterOpt.map(ReactiveSeq::fromIterable)
.map(mapper)).<Either<Tuple2<V, Option<I>>, O>>fold(outputOpt -> outputOpt.map(Either::<Tuple2<V, Option<I>>, O>right)
.orElse(Either.<Tuple2<V, Option<I>>, O>left(Tuple2.of(subject().unapply()
._1(),
Option.none()))),
existingOutput -> Either.right(existingOutput))));
}
}
|
<filename>tests/dummy/app/components/oe-select.js
import Component from '@ember/component';
import { computed } from '@ember/object';
const OESelect = Component.extend({
tagName: '',
value: null,
controlId: '',
disabled: false,
options: computed(() => []),
'on-change'() {}
});
OESelect.reopenClass({
positionalParams: ['value']
});
export default OESelect;
|
# dojo.py
def primes(limit):
if limit < 2:
return []
sieve = [True] * limit
sieve[0], sieve[1] = False, False # 0 and 1 are not prime
for num in range(2, int(limit ** 0.5) + 1):
if sieve[num]:
for multiple in range(num * num, limit, num):
sieve[multiple] = False
return [num for num, is_prime in enumerate(sieve) if is_prime]
|
<reponame>GMcD/telar-web
package models
type ChangePasswordModel struct {
CurrentPassword string `json:"currentPassword"`
NewPassword string `json:"newPassword"`
ConfirmPassword string `json:"confirmPassword"`
}
|
pyinstaller --noconfirm app.spec
|
#!/bin/bash
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
set -e
# This script will package the CloudFormation in ${CFN_TEMPLATE_DIR} directory and upload it
# to Amazon S3 in preparation for deployment using the AWS CloudFromation service.
#
# This script exists because Service Catalog products, when using relative references to cloudformation templates are
# not properly packaged by the AWS cli. Also the full stack, due to 2 levels of Service Catalog deployment will not
# always package properly using the AWS cli.
# This script treats the templates as source code and packages them, putting the results into a 'build' subdirectory.
# This script assumes a Linux or MacOSX environment and relies on the following software packages being installed:
# . - AWS Command Line Interface (CLI)
# . - sed
# . - Python 3 / pip3
# . - zip
# PLEASE NOTE this script will store all resources to an Amazon S3 bucket s3://${CFN_BUCKET_NAME}/${PROJECT_NAME}
CFN_BUCKET_NAME=$1
DEPLOYMENT_REGION=$2
PROJECT_NAME="amazon-sagemaker-reusable-components"
CFN_TEMPLATE_DIR="cfn-templates"
SEED_CODE_DIR="project-seed-code"
CFN_OUTPUT_DIR="build/${DEPLOYMENT_REGION}"
SEED_CODE_OUTPUT_DIR="build/${DEPLOYMENT_REGION}/seed-code"
SOURCE_CODE_ZIP_NAME="amazon-sagemaker-reusable-components.zip"
# files that need to be scrubbed with sed to replace < S3 BUCKET LOCATION > with an actual S3 bucket name
SELF_PACKAGE_LIST="sm-project-sc-portfolio.yaml project-s3-fs-ingestion.yaml"
# files to be packaged using `aws cloudformation package`
AWS_PACKAGE_LIST="sm-project-sc-portfolio.yaml"
# files that wont be uploaded by `aws cloudformation package`
UPLOAD_LIST="sm-project-sc-portfolio.yaml project-s3-fs-ingestion.yaml"
# Check that S3 bucket exists, if not create a new one
if aws s3 ls s3://${CFN_BUCKET_NAME} 2>&1 | grep NoSuchBucket
then
echo Creating Amazon S3 bucket ${CFN_BUCKET_NAME}
aws s3 mb s3://${CFN_BUCKET_NAME} --region $DEPLOYMENT_REGION
fi
echo "Preparing content for publication to Amazon S3 s3://${CFN_BUCKET_NAME}/${PROJECT_NAME}"
## clean away any previous builds of the CFN
rm -fr ${CFN_OUTPUT_DIR}
rm -fr ${SEED_CODE_OUTPUT_DIR}
mkdir -p ${CFN_OUTPUT_DIR}
mkdir -p ${SEED_CODE_OUTPUT_DIR}
rm -f build/*-${DEPLOYMENT_REGION}.zip
cp ${CFN_TEMPLATE_DIR}/*.yaml ${CFN_OUTPUT_DIR}
# Zip the source code
echo "Zipping the CloudFormation templates and buildspec files"
rm -f ${SOURCE_CODE_ZIP_NAME}
#zip -r ${SOURCE_CODE_ZIP_NAME} . -i "*.yaml" "*.yml" "*.sh"
zip -r ${SOURCE_CODE_ZIP_NAME} . -x "build/*" "*.pdf" ".git/*" "img/*" "design/*" ".*"
## Zip the MLOps project seed code for
echo "Zipping MLOps project seed code"
(cd ${SEED_CODE_DIR}/s3-fs-ingestion/ && zip -r ../../${SEED_CODE_OUTPUT_DIR}/s3-fs-ingestion-v1.0.zip .)
## publish files to target AWS regions
echo "Publishing CloudFormation to ${DEPLOYMENT_REGION}"
echo "Clearing the project directory for ${PROJECT_NAME} in ${CFN_BUCKET_NAME}..."
aws s3 rm \
s3://${CFN_BUCKET_NAME}/${PROJECT_NAME}/ \
--recursive \
--region ${DEPLOYMENT_REGION}
echo "Self-packaging the Cloudformation templates: ${SELF_PACKAGE_LIST}"
for fname in ${SELF_PACKAGE_LIST};
do
sed -ie "s/< S3_CFN_STAGING_PATH >/${PROJECT_NAME}/" ${CFN_OUTPUT_DIR}/${fname}
sed -ie "s/< S3_CFN_STAGING_BUCKET >/${CFN_BUCKET_NAME}/" ${CFN_OUTPUT_DIR}/${fname}
sed -ie "s/< S3_CFN_STAGING_BUCKET_PATH >/${CFN_BUCKET_NAME}\/${PROJECT_NAME}/" ${CFN_OUTPUT_DIR}/${fname}
done
echo "Packaging Cloudformation templates: ${AWS_PACKAGE_LIST}"
for fname in ${AWS_PACKAGE_LIST};
do
pushd ${CFN_OUTPUT_DIR}
aws cloudformation package \
--template-file ${fname} \
--s3-bucket ${CFN_BUCKET_NAME} \
--s3-prefix ${PROJECT_NAME} \
--output-template-file ${fname}-packaged \
--region ${DEPLOYMENT_REGION}
popd
done
# copy source code .zip file to the S3 bucket
aws s3 cp ${SOURCE_CODE_ZIP_NAME} s3://${CFN_BUCKET_NAME}/${PROJECT_NAME}/
# copy all seed-code .zip files from ${SEED_CODE_OUTPUT_DIR} to S3
aws s3 cp ${SEED_CODE_OUTPUT_DIR} s3://${CFN_BUCKET_NAME}/${PROJECT_NAME}/seed-code/ --recursive
# put an object tag servicecatalog:provisioning=true for AmazonSageMakerServiceCatalogProductsLaunchRole access
for fname in ${SEED_CODE_OUTPUT_DIR}/*
do
echo "Set servicecatalog:provisioning=true tag to object: ${fname}"
aws s3api put-object-tagging \
--bucket ${CFN_BUCKET_NAME} \
--key ${PROJECT_NAME}/seed-code/$(basename $fname) \
--tagging 'TagSet=[{Key=servicecatalog:provisioning,Value=true}]'
done
# push files to S3, note this does not 'package' the templates
echo "Copying cloudformation templates and files to S3: ${UPLOAD_LIST}"
for fname in ${UPLOAD_LIST};
do
if [ -f ${CFN_OUTPUT_DIR}/${fname}-packaged ]; then
aws s3 cp ${CFN_OUTPUT_DIR}/${fname}-packaged s3://${CFN_BUCKET_NAME}/${PROJECT_NAME}/${fname}
else
aws s3 cp ${CFN_OUTPUT_DIR}/${fname} s3://${CFN_BUCKET_NAME}/${PROJECT_NAME}/${fname}
fi
echo "To validate template ${fname}:"
echo "aws cloudformation validate-template --template-url https://s3.${DEPLOYMENT_REGION}.amazonaws.com/${CFN_BUCKET_NAME}/${PROJECT_NAME}/${fname}"
echo "To deploy stack execute:"
echo "aws cloudformation create-stack --template-url https://s3.${DEPLOYMENT_REGION}.amazonaws.com/${CFN_BUCKET_NAME}/${PROJECT_NAME}/${fname} --region ${DEPLOYMENT_REGION} --stack-name <STACK_NAME> --disable-rollback --capabilities CAPABILITY_IAM CAPABILITY_NAMED_IAM --parameters ParameterKey=,ParameterValue="
done
echo ==================================================
echo "Publication complete"
|
<filename>web/script/services/root.js
var sohagApp = angular.module('SohagApp');
sohagApp.factory('SohagRootService', function ($http) {
return {
getHomePageData: function () {
var req = {
method: 'POST',
url: sohagServerUrl + "getHomePageData",
headers: {
'Content-Type': "text/html"
},
data: ""
}
return $http(req);
}
}
});
|
<gh_stars>10-100
package io.opensphere.heatmap;
import io.opensphere.heatmap.DataRegistryHelper.HeatmapImageInfo;
/**
* Interface to an object that knows how to recreate heatmap images but with a
* new style.
*/
public interface HeatmapRecreator
{
/**
* Recreates heat map images but with a new style.
*
* @param dtiKey The layer to create heat map images for.
* @param style The new style of heat map.
* @param imageInfo The previous image info.
*/
void recreate(String dtiKey, HeatmapVisualizationStyle style, HeatmapImageInfo imageInfo);
}
|
package org.rzo.yajsw.controller.jvm;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipelineCoverage;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
import org.rzo.yajsw.Constants;
import org.rzo.yajsw.controller.Message;
@ChannelPipelineCoverage("one")
public class ControllerHandler extends SimpleChannelUpstreamHandler implements Constants
{
JVMController _controller;
ControllerHandler(JVMController controller)
{
_controller = controller;
}
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception
{
if (_controller.getState() == JVMController.STATE_USER_STOP)
{
// set the channel if not set
_controller._channel = ctx.getChannel();
_controller.stop(JVMController.STATE_USER_STOP, "INTERNAL");
return;
}
Message message = (Message) e.getMessage();
switch (message.getCode())
{
case WRAPPER_MSG_KEY:
// check if JVM sent us correct key
if (_controller._key.equals(message.getMessage()))
{
// we set the channel not in channelConnected,
_controller._channel = ctx.getChannel();
_controller.setState(JVMController.STATE_LOGGED_ON);
_controller.startupOK();
ctx.getChannel().write(new Message(Constants.WRAPPER_MSG_OKKEY, "" + _controller._wrappedProcess.getAppPid()));
if (_controller.isDebug())
_controller.getLog().info("Correct key");
}
// if not: announce it and close session
else
{
if (_controller.isDebug())
_controller.getLog().info("Wrong key -> closing session");
ctx.getChannel().write(new Message(Constants.WRAPPER_MSG_BADKEY, null));
ctx.getChannel().close();
}
break;
case Constants.WRAPPER_MSG_STOP:
if (_controller._wrappedProcess != null)
_controller._wrappedProcess.stop("APPLICATION");
break;
case Constants.WRAPPER_MSG_STOP_TIMER:
if (_controller._wrappedProcess != null)
_controller._wrappedProcess.stopTimer();
break;
case Constants.WRAPPER_MSG_RESTART:
if (_controller._wrappedProcess != null)
_controller._wrappedProcess.restartInternal();
break;
case Constants.WRAPPER_MSG_PING:
_controller.pingReceived();
String msg = message.getMessage();
if (msg != null)
{
String[] values = msg.split(";");
if (values.length == 4)
try {
float heap = Float.parseFloat(values[0]);
long minGC = Long.parseLong(values[1]);
long fullGC = Long.parseLong(values[2]);
long heapInBytes = Long.parseLong(values[3]);
_controller.setHeap(heap, minGC, fullGC, heapInBytes);
}
catch (Exception ex)
{
ex.printStackTrace();
}
}
break;
case Constants.WRAPPER_MSG_SERVICE_STARTUP:
_controller.serviceStartup();
break;
case Constants.WRAPPER_MSG_STOP_PENDING:
if (_controller._wrappedProcess != null) {
_controller._wrappedProcess.signalStopping(Long.valueOf(message.getMessage()));
}
break;
}
}
@Override
public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception
{
synchronized (_controller)
{
// we accept only one session. if we already have one -> close the
// new session
if (_controller._channel != null && _controller._channel != ctx.getChannel())
{
if (_controller.isDebug())
_controller.getLog().info("session already established -> ignore further sessions");
ctx.getChannel().close();
}
else if (_controller._channel == null)
{
if (_controller.getState() != JVMController.STATE_USER_STOP)
_controller.setState(JVMController.STATE_ESTABLISHED);
// a hacker may establish a connection but does not send the key, thus locking the controller for the process.
// we leave him connected, so he does not keep polling us, but we allow further connections, until we get the key from our app.
//TODO if we have a real bandit: timeout of connections which do not send the key.
//_controller._channel = ctx.getChannel();
}
}
}
@Override
public void channelDisconnected(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception
{
synchronized (_controller)
{
if (_controller._channel == ctx.getChannel())
{
// stop processing outgoing messages
_controller.workerExecutor.shutdownNow();
// stop the controller
_controller._channel = null;
_controller.setState(JVMController.STATE_WAITING_CLOSED);
if (_controller.isDebug())
_controller.getLog().info("session closed -> waiting");
}
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception
{
if (_controller.isDebug())
_controller.getLog().info(e.getCause().getMessage());
}
}
|
<gh_stars>0
require('dotenv').config();
const erc20abi = require('../../abi/IERC20.json');
const TransactionHandler = require('../../utils/TransactionHandler');
const ApproveTokenSpendingForUniswapV2RouterContract = {
approveTokenSpending: async (tokenQuantityForSpendingApproval) => {
//100 lakshmiKanth Tokens to be approved for spending by uniswapV2RouterAddress
const lakshmiKanthTokenQuantityForApproval = TransactionHandler.toHex(
tokenQuantityForSpendingApproval * 10 ** 18
);
const contract = TransactionHandler.loadContract(
erc20abi.abi,
process.env.exchangeContractAddress_LakshmiKanthToken
);
const tx = contract.methods.approve(
process.env.uniswapV2RouterAddress,
lakshmiKanthTokenQuantityForApproval
);
const nonceNumber = await TransactionHandler.getNonce(
process.env.transactionMaker
);
// get the number of transactions sent so far so we can create a fresh nonce
// construct the transaction data
const txData = {
nonce: TransactionHandler.toHex(nonceNumber),
gasLimit: TransactionHandler.toHex(6000000),
gasPrice: TransactionHandler.toHex(10000000000), // 10 Gwei
to: process.env.lakshmiKanthTokenContractAddress,
from: process.env.transactionMaker,
data: tx.encodeABI(),
};
return await TransactionHandler.signAndSendTransaction(
txData,
process.env.transactionMakerPrivateKey
);
},
};
module.exports = ApproveTokenSpendingForUniswapV2RouterContract;
|
<reponame>panzergame/dxfplotter<gh_stars>10-100
#include <exporter.h>
#include <serializer/task.h>
#include <cereal/cereal.hpp>
namespace Exporter::Dxfplot
{
void Exporter::operator()(const Model::Document& document, std::ostream &output) const
{
Archive archive(output);
save(archive, document);
}
void Exporter::save(Archive &archive, const Model::Document& document) const
{
archive(cereal::make_nvp("task", document.task()));
archive(cereal::make_nvp("profile_name", document.profileConfig().name()));
archive(cereal::make_nvp("tool_name", document.toolConfig().name()));
}
}
|
#!/usr/bin/env bash
### This script import SDK code from Element Android
set -e
elementAndroidPath="../element-android"
if [ -d "$elementAndroidPath" ]; then
echo "Importing sdk module from Element Android located at ${elementAndroidPath}"
else
echo "Element Android not found at ${elementAndroidPath}. Can not continue."
exit 1
fi
# Check that Element Android is on master branch
pushd $elementAndroidPath
elementBranch=`git rev-parse --abbrev-ref HEAD`
if [ "$elementBranch" != "master" ]; then
read -p "Warning, Element Android is not on master branch but on branch '${elementBranch}' . Continue (y/n)? " -n 1 CONT
echo
if [ "$CONT" != "y" ]; then
exit 0
fi
fi
popd
# matrix SDK
# Delete existing path
echo "Importing matrix-sdk-android..."
rm -rf ./matrix-sdk-android
cp -r ${elementAndroidPath}/matrix-sdk-android .
# Add all changes to git
git add -A
# Build the library
./gradlew clean assembleRelease
# Success
echo "Success"
echo
echo "Please check the version name before committing and update the changelog"
|
export default function consume<T>(iterator: Iterator<T>, steps?: number): void;
|
package com.bv.eidss.model.generated;
import android.content.ContentValues;
import android.database.Cursor;
import android.os.Parcel;
import com.bv.eidss.R;
import com.bv.eidss.model.CaseStatus;
import com.bv.eidss.model.interfaces.IFieldChanged;
import com.bv.eidss.model.interfaces.ICallable;
import com.bv.eidss.model.interfaces.FieldMetadata;
import com.bv.eidss.utils.DateHelpers;
import com.bv.eidss.utils.EidssUtils;
import org.json.JSONException;
import org.json.JSONObject;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import org.xmlpull.v1.XmlSerializer;
import java.io.IOException;
import java.text.Format;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.List;
import java.util.ArrayList;
public class ASSample_object {
protected Boolean bChanged;
public Boolean getChanged() { return bChanged; }
protected IFieldChanged _fieldChangedHandler;
public IFieldChanged getFieldChangedHandler() { return _fieldChangedHandler; }
public void setFieldChangedHandler(IFieldChanged value) { _fieldChangedHandler = value; }
public ASSample_object()
{
bChanged = false;
_datCreateDate = DateHelpers.Today();
}
public static String eidss_Farm = "AsSession.Sample.Farm";
public static String eidss_SpeciesType = "AsSession.Sample.Species";
public static String eidss_Animal = "AsSession.Sample.AnimalCode";
public static String eidss_AnimalAge = "AsSession.Sample.AnimalAge";
public static String eidss_Color = "AsSession.Sample.AnimalColor";
public static String eidss_AnimalGender = "AsSession.Sample.AnimalGender";
public static String eidss_Name = "AsSession.Sample.AnimalName";
public static String eidss_Description = "AsSession.Sample.AnimalComments";
public static String eidss_SampleType = "AsSession.Sample.SampleType";
public static String eidss_FieldBarcode = "AsSession.Sample.FieldBarcode";
public static String eidss_FieldCollectionDate = "AsSession.Sample.FieldCollectionDate";
public static String eidss_SendToOffice = "AsSession.Sample.SentTo";
public static List<FieldMetadata> fieldMetadata = new ArrayList<FieldMetadata>() {{{{
add(new FieldMetadata(eidss_Farm, R.string.ASSample_idfFarm, new ICallable<Long, ASSample_object>() { public Long call(ASSample_object t) { return t.getFarm(); }}));
add(new FieldMetadata(eidss_SpeciesType, R.string.ASSample_idfsSpeciesType, new ICallable<Long, ASSample_object>() { public Long call(ASSample_object t) { return t.getSpeciesType(); }}));
add(new FieldMetadata(eidss_Animal, R.string.ASSample_idfAnimal, new ICallable<Long, ASSample_object>() { public Long call(ASSample_object t) { return t.getAnimal(); }}));
add(new FieldMetadata(eidss_AnimalAge, R.string.ASSample_idfsAnimalAge, new ICallable<Long, ASSample_object>() { public Long call(ASSample_object t) { return t.getAnimalAge(); }}));
add(new FieldMetadata(eidss_Color, R.string.ASSample_strColor, new ICallable<String, ASSample_object>() { public String call(ASSample_object t) { return t.getColor(); }}));
add(new FieldMetadata(eidss_AnimalGender, R.string.ASSample_idfsAnimalGender, new ICallable<Long, ASSample_object>() { public Long call(ASSample_object t) { return t.getAnimalGender(); }}));
add(new FieldMetadata(eidss_Name, R.string.ASSample_strName, new ICallable<String, ASSample_object>() { public String call(ASSample_object t) { return t.getName(); }}));
add(new FieldMetadata(eidss_Description, R.string.ASSample_strDescription, new ICallable<String, ASSample_object>() { public String call(ASSample_object t) { return t.getDescription(); }}));
add(new FieldMetadata(eidss_SampleType, R.string.ASSample_idfsSampleType, new ICallable<Long, ASSample_object>() { public Long call(ASSample_object t) { return t.getSampleType(); }}));
add(new FieldMetadata(eidss_FieldBarcode, R.string.ASSample_strFieldBarcode, new ICallable<String, ASSample_object>() { public String call(ASSample_object t) { return t.getFieldBarcode(); }}));
add(new FieldMetadata(eidss_FieldCollectionDate, R.string.ASSample_datFieldCollectionDate, new ICallable<Date, ASSample_object>() { public Date call(ASSample_object t) { return t.getFieldCollectionDate(); }}));
add(new FieldMetadata(eidss_SendToOffice, R.string.ASSample_idfSendToOffice, new ICallable<Long, ASSample_object>() { public Long call(ASSample_object t) { return t.getSendToOffice(); }}));
}}}};
// system fields
protected long _id;
public long getId() { return _id; }
public void setId(long value) { _id = value; }
protected String _strLastSynError;
public String getLastSynError() { return _strLastSynError; }
public void setLastSynError(String value) { _strLastSynError = value; }
protected int _intStatus; // 1 - new; 2 - synchronized; 3 - changed; 4 - unloaded;
public int getStatus() { return _intStatus; }
public void setStatusChanged() { _intStatus = CaseStatus.CHANGED; }
public void setStatusSyn() { _intStatus = CaseStatus.SYNCHRONIZED; }
public void setStatusUploaded() { _intStatus = CaseStatus.UNLOADED; }
protected int _intChanged; // 0 - not; 1 - yes;
public void clearChanged() { _intChanged = 0; }
protected Date _datCreateDate;
public Date getCreateDate() { return _datCreateDate; }
//fields
protected String _uidOfflineCaseID;
public String getOfflineCaseID(){return _uidOfflineCaseID;}
public void setOfflineCaseID(String value) { _uidOfflineCaseID = value; }
protected long _idParent;
public long getParent(){return _idParent;}
public void setParent(long value) { bChanged = bChanged || _idParent != value; _intChanged = ((_intChanged == 1) || _idParent != value) ? 1 : 0; if (_idParent != value) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("idParent", _idParent, value); } _idParent = value; }}
protected long _idfMonitoringSession;
public long getMonitoringSession(){return _idfMonitoringSession;}
public void setMonitoringSession(long value) { bChanged = bChanged || _idfMonitoringSession != value; _intChanged = ((_intChanged == 1) || _idfMonitoringSession != value) ? 1 : 0; if (_idfMonitoringSession != value) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("idfMonitoringSession", _idfMonitoringSession, value); } _idfMonitoringSession = value; }}
protected long _idfFarm;
public long getFarm(){return _idfFarm;}
public void setFarm(long value) { bChanged = bChanged || _idfFarm != value; _intChanged = ((_intChanged == 1) || _idfFarm != value) ? 1 : 0; if (_idfFarm != value) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("idfFarm", _idfFarm, value); } _idfFarm = value; }}
protected long _idfsSpeciesType;
public long getSpeciesType(){return _idfsSpeciesType;}
public void setSpeciesType(long value) { bChanged = bChanged || _idfsSpeciesType != value; _intChanged = ((_intChanged == 1) || _idfsSpeciesType != value) ? 1 : 0; if (_idfsSpeciesType != value) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("idfsSpeciesType", _idfsSpeciesType, value); } _idfsSpeciesType = value; }}
protected long _idfAnimal;
public long getAnimal(){return _idfAnimal;}
public void setAnimal(long value) { bChanged = bChanged || _idfAnimal != value; _intChanged = ((_intChanged == 1) || _idfAnimal != value) ? 1 : 0; if (_idfAnimal != value) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("idfAnimal", _idfAnimal, value); } _idfAnimal = value; }}
protected String _strAnimalCode;
public String getAnimalCode(){return _strAnimalCode;}
public void setAnimalCode(String value) { if(_strAnimalCode == null && value == null) return; bChanged = bChanged || _strAnimalCode == null || value == null || !_strAnimalCode.equals(value); _intChanged = ((_intChanged == 1) || _strAnimalCode == null || value == null || !_strAnimalCode.equals(value)) ? 1 : 0; if (_strAnimalCode == null || value == null || !_strAnimalCode.equals(value)) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("strAnimalCode", _strAnimalCode, value); } _strAnimalCode = value; }}
protected long _idfsAnimalAge;
public long getAnimalAge(){return _idfsAnimalAge;}
public void setAnimalAge(long value) { bChanged = bChanged || _idfsAnimalAge != value; _intChanged = ((_intChanged == 1) || _idfsAnimalAge != value) ? 1 : 0; if (_idfsAnimalAge != value) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("idfsAnimalAge", _idfsAnimalAge, value); } _idfsAnimalAge = value; }}
protected String _strColor;
public String getColor(){return _strColor;}
public void setColor(String value) { if(_strColor == null && value == null) return; bChanged = bChanged || _strColor == null || value == null || !_strColor.equals(value); _intChanged = ((_intChanged == 1) || _strColor == null || value == null || !_strColor.equals(value)) ? 1 : 0; if (_strColor == null || value == null || !_strColor.equals(value)) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("strColor", _strColor, value); } _strColor = value; }}
protected long _idfsAnimalGender;
public long getAnimalGender(){return _idfsAnimalGender;}
public void setAnimalGender(long value) { bChanged = bChanged || _idfsAnimalGender != value; _intChanged = ((_intChanged == 1) || _idfsAnimalGender != value) ? 1 : 0; if (_idfsAnimalGender != value) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("idfsAnimalGender", _idfsAnimalGender, value); } _idfsAnimalGender = value; }}
protected String _strName;
public String getName(){return _strName;}
public void setName(String value) { if(_strName == null && value == null) return; bChanged = bChanged || _strName == null || value == null || !_strName.equals(value); _intChanged = ((_intChanged == 1) || _strName == null || value == null || !_strName.equals(value)) ? 1 : 0; if (_strName == null || value == null || !_strName.equals(value)) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("strName", _strName, value); } _strName = value; }}
protected String _strDescription;
public String getDescription(){return _strDescription;}
public void setDescription(String value) { if(_strDescription == null && value == null) return; bChanged = bChanged || _strDescription == null || value == null || !_strDescription.equals(value); _intChanged = ((_intChanged == 1) || _strDescription == null || value == null || !_strDescription.equals(value)) ? 1 : 0; if (_strDescription == null || value == null || !_strDescription.equals(value)) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("strDescription", _strDescription, value); } _strDescription = value; }}
protected long _idfMaterial;
public long getMaterial(){return _idfMaterial;}
public void setMaterial(long value) { bChanged = bChanged || _idfMaterial != value; _intChanged = ((_intChanged == 1) || _idfMaterial != value) ? 1 : 0; if (_idfMaterial != value) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("idfMaterial", _idfMaterial, value); } _idfMaterial = value; }}
protected long _idfsSampleType;
public long getSampleType(){return _idfsSampleType;}
public void setSampleType(long value) { bChanged = bChanged || _idfsSampleType != value; _intChanged = ((_intChanged == 1) || _idfsSampleType != value) ? 1 : 0; if (_idfsSampleType != value) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("idfsSampleType", _idfsSampleType, value); } _idfsSampleType = value; }}
protected String _strFieldBarcode;
public String getFieldBarcode(){return _strFieldBarcode;}
public void setFieldBarcode(String value) { if(_strFieldBarcode == null && value == null) return; bChanged = bChanged || _strFieldBarcode == null || value == null || !_strFieldBarcode.equals(value); _intChanged = ((_intChanged == 1) || _strFieldBarcode == null || value == null || !_strFieldBarcode.equals(value)) ? 1 : 0; if (_strFieldBarcode == null || value == null || !_strFieldBarcode.equals(value)) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("strFieldBarcode", _strFieldBarcode, value); } _strFieldBarcode = value; }}
protected Date _datFieldCollectionDate;
public Date getFieldCollectionDate(){return _datFieldCollectionDate;}
public void setFieldCollectionDate(Date value) { if(_datFieldCollectionDate == null && value == null) return; bChanged = bChanged || _datFieldCollectionDate == null || value == null || !_datFieldCollectionDate.equals(value); _intChanged = ((_intChanged == 1) || _datFieldCollectionDate == null || value == null || !_datFieldCollectionDate.equals(value)) ? 1 : 0; if (_datFieldCollectionDate == null || value == null || !_datFieldCollectionDate.equals(value)) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("datFieldCollectionDate", _datFieldCollectionDate, value); } _datFieldCollectionDate = value; }}
protected long _idfSendToOffice;
public long getSendToOffice(){return _idfSendToOffice;}
public void setSendToOffice(long value) { bChanged = bChanged || _idfSendToOffice != value; _intChanged = ((_intChanged == 1) || _idfSendToOffice != value) ? 1 : 0; if (_idfSendToOffice != value) { if (_fieldChangedHandler != null) { _fieldChangedHandler.FieldChanged("idfSendToOffice", _idfSendToOffice, value); } _idfSendToOffice = value; }}
protected static ASSample_object FromCursor(Cursor cursor, ASSample_object ret)
{
Format formatterDateTime = DateHelpers.getDateTimeFormatter();
Format formatterDate = DateHelpers.getDateFormatter();
try {
ret._id = cursor.getLong(cursor.getColumnIndex("id"));
ret._strLastSynError = cursor.getString(cursor.getColumnIndex("strLastSynError"));
ret._intStatus = cursor.getInt(cursor.getColumnIndex("intStatus"));
ret._intChanged = cursor.getInt(cursor.getColumnIndex("intChanged"));
String strDate = cursor.getString(cursor.getColumnIndex("datCreateDate"));
if (strDate != null) ret._datCreateDate = (Date)formatterDateTime.parseObject(strDate);
ret._uidOfflineCaseID = cursor.getString(cursor.getColumnIndex("uidOfflineCaseID"));
ret._idParent = cursor.getLong(cursor.getColumnIndex("idParent"));
ret._idfMonitoringSession = cursor.getLong(cursor.getColumnIndex("idfMonitoringSession"));
ret._idfFarm = cursor.getLong(cursor.getColumnIndex("idfFarm"));
ret._idfsSpeciesType = cursor.getLong(cursor.getColumnIndex("idfsSpeciesType"));
ret._idfAnimal = cursor.getLong(cursor.getColumnIndex("idfAnimal"));
ret._strAnimalCode = cursor.getString(cursor.getColumnIndex("strAnimalCode"));
ret._idfsAnimalAge = cursor.getLong(cursor.getColumnIndex("idfsAnimalAge"));
ret._strColor = cursor.getString(cursor.getColumnIndex("strColor"));
ret._idfsAnimalGender = cursor.getLong(cursor.getColumnIndex("idfsAnimalGender"));
ret._strName = cursor.getString(cursor.getColumnIndex("strName"));
ret._strDescription = cursor.getString(cursor.getColumnIndex("strDescription"));
ret._idfMaterial = cursor.getLong(cursor.getColumnIndex("idfMaterial"));
ret._idfsSampleType = cursor.getLong(cursor.getColumnIndex("idfsSampleType"));
ret._strFieldBarcode = cursor.getString(cursor.getColumnIndex("strFieldBarcode"));
ret._datFieldCollectionDate = EidssUtils.ParseDate(cursor, formatterDate, "datFieldCollectionDate");
ret._idfSendToOffice = cursor.getLong(cursor.getColumnIndex("idfSendToOffice"));
ret.bChanged = false;
}
catch (ParseException e)
{
e.printStackTrace();
return ret;
}
return ret;
}
protected ContentValues ContentValuesInternal()
{
String strDate = null;
ContentValues ret = new ContentValues();
Format formatterDateTime = DateHelpers.getDateTimeFormatter();
Format formatterDate = DateHelpers.getDateFormatter();
if (_id != 0)
ret.put("id", _id);
ret.put("strLastSynError", _strLastSynError);
ret.put("intStatus", _intStatus);
ret.put("intChanged", _intChanged);
if (_datCreateDate != null)
strDate = formatterDateTime.format(_datCreateDate);
ret.put("datCreateDate", strDate);
strDate = null;
ret.put("uidOfflineCaseID", _uidOfflineCaseID);
ret.put("idParent", _idParent);
ret.put("idfMonitoringSession", _idfMonitoringSession);
ret.put("idfFarm", _idfFarm);
ret.put("idfsSpeciesType", _idfsSpeciesType);
ret.put("idfAnimal", _idfAnimal);
ret.put("strAnimalCode", _strAnimalCode);
ret.put("idfsAnimalAge", _idfsAnimalAge);
ret.put("strColor", _strColor);
ret.put("idfsAnimalGender", _idfsAnimalGender);
ret.put("strName", _strName);
ret.put("strDescription", _strDescription);
ret.put("idfMaterial", _idfMaterial);
ret.put("idfsSampleType", _idfsSampleType);
ret.put("strFieldBarcode", _strFieldBarcode);
strDate = null;
if (_datFieldCollectionDate != null)
strDate = formatterDate.format(_datFieldCollectionDate);
ret.put("datFieldCollectionDate", strDate);
ret.put("idfSendToOffice", _idfSendToOffice);
return ret;
}
protected void FromParcel(Parcel source)
{
_id = source.readLong();
_strLastSynError = source.readString();
_intStatus = source.readInt();
_intChanged = source.readInt();
_datCreateDate = (Date)source.readSerializable();
_uidOfflineCaseID = source.readString();
_idParent = source.readLong();
_idfMonitoringSession = source.readLong();
_idfFarm = source.readLong();
_idfsSpeciesType = source.readLong();
_idfAnimal = source.readLong();
_strAnimalCode = source.readString();
_idfsAnimalAge = source.readLong();
_strColor = source.readString();
_idfsAnimalGender = source.readLong();
_strName = source.readString();
_strDescription = source.readString();
_idfMaterial = source.readLong();
_idfsSampleType = source.readLong();
_strFieldBarcode = source.readString();
_datFieldCollectionDate = (Date)source.readSerializable();
_idfSendToOffice = source.readLong();
bChanged = source.readInt() == 1;
}
protected void ToParcel(Parcel dest, int flag)
{
dest.writeLong(_id);
dest.writeString(_strLastSynError);
dest.writeInt(_intStatus);
dest.writeInt(_intChanged);
dest.writeSerializable(_datCreateDate);
dest.writeString(_uidOfflineCaseID);
dest.writeLong(_idParent);
dest.writeLong(_idfMonitoringSession);
dest.writeLong(_idfFarm);
dest.writeLong(_idfsSpeciesType);
dest.writeLong(_idfAnimal);
dest.writeString(_strAnimalCode);
dest.writeLong(_idfsAnimalAge);
dest.writeString(_strColor);
dest.writeLong(_idfsAnimalGender);
dest.writeString(_strName);
dest.writeString(_strDescription);
dest.writeLong(_idfMaterial);
dest.writeLong(_idfsSampleType);
dest.writeString(_strFieldBarcode);
dest.writeSerializable(_datFieldCollectionDate);
dest.writeLong(_idfSendToOffice);
dest.writeInt(bChanged ? 1 : 0);
}
protected void ToXml(XmlSerializer serializer) throws IllegalArgumentException, IllegalStateException, IOException
{
serializer.attribute("", "lang", EidssUtils.getCurrentLanguage());
serializer.attribute("", "intChanged", String.valueOf(_intChanged));
if (_uidOfflineCaseID != null)
serializer.attribute("", "uidOfflineCaseID", _uidOfflineCaseID);
if (_idParent != 0)
serializer.attribute("", "idParent", String.valueOf(_idParent));
if (_idfMonitoringSession != 0)
serializer.attribute("", "idfMonitoringSession", String.valueOf(_idfMonitoringSession));
if (_idfFarm != 0)
serializer.attribute("", "idfFarm", String.valueOf(_idfFarm));
if (_idfsSpeciesType != 0)
serializer.attribute("", "idfsSpeciesType", String.valueOf(_idfsSpeciesType));
if (_idfAnimal != 0)
serializer.attribute("", "idfAnimal", String.valueOf(_idfAnimal));
if (_strAnimalCode != null)
serializer.attribute("", "strAnimalCode", _strAnimalCode);
if (_idfsAnimalAge != 0)
serializer.attribute("", "idfsAnimalAge", String.valueOf(_idfsAnimalAge));
if (_strColor != null)
serializer.attribute("", "strColor", _strColor);
if (_idfsAnimalGender != 0)
serializer.attribute("", "idfsAnimalGender", String.valueOf(_idfsAnimalGender));
if (_strName != null)
serializer.attribute("", "strName", _strName);
if (_strDescription != null)
serializer.attribute("", "strDescription", _strDescription);
if (_idfMaterial != 0)
serializer.attribute("", "idfMaterial", String.valueOf(_idfMaterial));
if (_idfsSampleType != 0)
serializer.attribute("", "idfsSampleType", String.valueOf(_idfsSampleType));
if (_strFieldBarcode != null)
serializer.attribute("", "strFieldBarcode", _strFieldBarcode);
if (_datFieldCollectionDate != null)
serializer.attribute("", "datFieldCollectionDate", DateHelpers.FormatWithT(_datFieldCollectionDate));
if (_idfSendToOffice != 0)
serializer.attribute("", "idfSendToOffice", String.valueOf(_idfSendToOffice));
}
public JSONObject ToJson() throws JSONException
{
JSONObject ret = new JSONObject();
EidssUtils.putToJson(ret, "lang", EidssUtils.getCurrentLanguage());
EidssUtils.putToJson(ret, "intChanged", _intChanged);
EidssUtils.putToJson(ret, "uidOfflineCaseID", _uidOfflineCaseID);
EidssUtils.putToJson(ret, "idParent", _idParent);
EidssUtils.putToJson(ret, "idfMonitoringSession", _idfMonitoringSession);
EidssUtils.putToJson(ret, "idfFarm", _idfFarm);
EidssUtils.putToJson(ret, "idfsSpeciesType", _idfsSpeciesType);
EidssUtils.putToJson(ret, "idfAnimal", _idfAnimal);
EidssUtils.putToJson(ret, "strAnimalCode", _strAnimalCode);
EidssUtils.putToJson(ret, "idfsAnimalAge", _idfsAnimalAge);
EidssUtils.putToJson(ret, "strColor", _strColor);
EidssUtils.putToJson(ret, "idfsAnimalGender", _idfsAnimalGender);
EidssUtils.putToJson(ret, "strName", _strName);
EidssUtils.putToJson(ret, "strDescription", _strDescription);
EidssUtils.putToJson(ret, "idfMaterial", _idfMaterial);
EidssUtils.putToJson(ret, "idfsSampleType", _idfsSampleType);
EidssUtils.putToJson(ret, "strFieldBarcode", _strFieldBarcode);
EidssUtils.putToJson(ret, "datFieldCollectionDate", _datFieldCollectionDate);
EidssUtils.putToJson(ret, "idfSendToOffice", _idfSendToOffice);
return ret;
}
public void FromJson(JSONObject json) throws JSONException, ParseException
{
_uidOfflineCaseID = json.getString("uidOfflineCaseID");
_idParent = json.getLong("idParent");
_idfMonitoringSession = json.getLong("idfMonitoringSession");
_idfFarm = json.getLong("idfFarm");
_idfsSpeciesType = json.getLong("idfsSpeciesType");
_idfAnimal = json.getLong("idfAnimal");
_strAnimalCode = json.getString("strAnimalCode");
_idfsAnimalAge = json.getLong("idfsAnimalAge");
_strColor = json.getString("strColor");
_idfsAnimalGender = json.getLong("idfsAnimalGender");
_strName = json.getString("strName");
_strDescription = json.getString("strDescription");
_idfMaterial = json.getLong("idfMaterial");
_idfsSampleType = json.getLong("idfsSampleType");
_strFieldBarcode = json.getString("strFieldBarcode");
_datFieldCollectionDate = DateHelpers.ParseWithT(json.getString("datFieldCollectionDate"));
_idfSendToOffice = json.getLong("idfSendToOffice");
}
public void FromXml(XmlPullParser parser) throws ParseException
{
_uidOfflineCaseID = parser.getAttributeValue("", "uidOfflineCaseID");
_idParent = parser.getAttributeValue("", "idParent") == null ? 0 : Long.getLong(parser.getAttributeValue("", "idParent"));
_idfMonitoringSession = parser.getAttributeValue("", "idfMonitoringSession") == null ? 0 : Long.getLong(parser.getAttributeValue("", "idfMonitoringSession"));
_idfFarm = parser.getAttributeValue("", "idfFarm") == null ? 0 : Long.getLong(parser.getAttributeValue("", "idfFarm"));
_idfsSpeciesType = parser.getAttributeValue("", "idfsSpeciesType") == null ? 0 : Long.getLong(parser.getAttributeValue("", "idfsSpeciesType"));
_idfAnimal = parser.getAttributeValue("", "idfAnimal") == null ? 0 : Long.getLong(parser.getAttributeValue("", "idfAnimal"));
_strAnimalCode = parser.getAttributeValue("", "strAnimalCode");
_idfsAnimalAge = parser.getAttributeValue("", "idfsAnimalAge") == null ? 0 : Long.getLong(parser.getAttributeValue("", "idfsAnimalAge"));
_strColor = parser.getAttributeValue("", "strColor");
_idfsAnimalGender = parser.getAttributeValue("", "idfsAnimalGender") == null ? 0 : Long.getLong(parser.getAttributeValue("", "idfsAnimalGender"));
_strName = parser.getAttributeValue("", "strName");
_strDescription = parser.getAttributeValue("", "strDescription");
_idfMaterial = parser.getAttributeValue("", "idfMaterial") == null ? 0 : Long.getLong(parser.getAttributeValue("", "idfMaterial"));
_idfsSampleType = parser.getAttributeValue("", "idfsSampleType") == null ? 0 : Long.getLong(parser.getAttributeValue("", "idfsSampleType"));
_strFieldBarcode = parser.getAttributeValue("", "strFieldBarcode");
_datFieldCollectionDate = DateHelpers.ParseWithT(parser.getAttributeValue("", "datFieldCollectionDate"));
_idfSendToOffice = parser.getAttributeValue("", "idfSendToOffice") == null ? 0 : Long.getLong(parser.getAttributeValue("", "idfSendToOffice"));
}
}
|
<gh_stars>0
package com.atjl.validate.form.ref;
import com.atjl.validate.api.field.ListField;
import com.atjl.validate.api.field.StringField;
import com.atjl.validate.validator.noparam.Email;
import com.atjl.validate.validator.noparam.Optional;
import com.atjl.validate.validator.noparam.Require;
/**
* 校验表单示例
*/
public class FormListEg {
StringField f1 = new StringField("f1", new Require(), new Email());
ListField f2 = new ListField("f2", FormListEg1.class, new Optional());
}
|
class Vec3:
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def __str__(self):
return "({}, {}, {})".format(self.x, self.y, self.z)
|
using System;
public class ListSorter
{
static void Main()
{
// Create an array of double-precision floating point numbers
double[] array = { 4.4, 16.2, 5.2, 18.4, 8.4 11.2 };
// Sort the array in ascending order
Array.Sort(array);
Console.WriteLine("Ascending Order:");
foreach(double d in array)
{
Console.WriteLine(d);
}
// Sort the array in descending order
Array.Reverse(array);
Console.WriteLine("Descending Order:");
foreach(double d in array)
{
Console.WriteLine(d);
}
}
}
|
#!/bin/bash
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
version=v1
out_dir=$(cd $ROOT/../.. && pwd)
json_dir=$(cd $ROOT/../docs/swagger/$version && pwd)
config_dir=$(cd $ROOT/.. && pwd)
flavor=dart-dio-next
openapi-generator generate -i ${json_dir}/admin.json -g $flavor -o $out_dir/gateway_admin_sdk -c $config_dir/open-generator-admin-config.yaml --enable-post-process-file
code $out_dir/gateway_admin_sdk
# $(cd $out_dir/gateway_admin_sdk &&
# flutter pub get && flutter pub upgrade && flutter pub upgrade --major-versions && flutter pub outdated && flutter pub run build_runner build --delete-conflicting-outputs
# flutter pub get
# flutter pub upgrade
# flutter pub upgrade --major-versions
# flutter pub outdated
# flutter pub run build_runner build --delete-conflicting-outputs
cd $out_dir/gateway_admin_sdk
flutter pub get && flutter pub run build_runner build --delete-conflicting-outputs
cd $ROOT
|
<reponame>ti-co/boss-machine<filename>server/meetings.js
const meetingsRouter = require('express').Router();
module.exports = meetingsRouter;
const { getAllFromDatabase, addToDatabase, deleteAllFromDatabase, createMeeting } = require('./db');
meetingsRouter.get('/', (req, res, next) => {
res.send(getAllFromDatabase('meetings'));
});
meetingsRouter.post('/', (req, res, next) => {
let newMeeting = addToDatabase('meetings', createMeeting());
res.status(201).send(newMeeting);
});
meetingsRouter.delete('/', (req, res, next) => {
deleteAllFromDatabase('meetings');
res.status(204).send();
});
|
<filename>js/login.js
let formOne=document.getElementById('formOne');
const login=document.getElementById('login')
const container_fluid=document.getElementById('container-fluid')
formOne.addEventListener('submit',(e)=>{
e.preventDefault()
// container_fluid.style.display="none"
// form_holder.style.display='block'
container_fluid.classList.add('d-none')
form_holder.classList.remove("d-none")
})
|
def replace_chain():
global blockchain # Assuming 'blockchain' is the variable holding the current chain
new_chain = request.get_json().get('new_chain') # Assuming the new chain is received as JSON in the request
if new_chain is None:
return False # No new chain received, so no replacement
if len(new_chain) > len(blockchain):
# Verify the validity of the new chain (implementation of chain validation is not provided here)
if is_valid_chain(new_chain): # Assuming is_valid_chain function checks the validity of the chain
blockchain = new_chain # Replace the current chain with the new chain
return True # Chain replaced successfully
else:
return False # New chain is invalid, so no replacement
else:
return False # New chain is not longer, so no replacement
|
<filename>src/main/java/com/google/enterprise/secmgr/config/AuthnMechSaml.java
// Copyright 2009 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.enterprise.secmgr.config;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.enterprise.secmgr.json.ProxyTypeAdapter;
import com.google.gson.GsonBuilder;
import java.util.List;
import java.util.Objects;
import javax.annotation.concurrent.Immutable;
/**
* The configuration of a SAML authentication mechanism.
*/
@Immutable
public final class AuthnMechSaml extends AuthnMechanism {
public static final String TYPE_NAME = "SAML";
private static final long DEFAULT_TRUST_DURATION = 30 * 60 * 1000; // 30 mins
private final String entityId;
private final int timeout;
private final long trustDuration;
public static AuthnMechSaml make(String name, String entityId, int timeout, long trustDuration) {
return new AuthnMechSaml(name, entityId, timeout, trustDuration);
}
// Presumably the two constructors below are for backwards compatibility
public static AuthnMechSaml make(String name, String entityId, int timeout) {
return new AuthnMechSaml(name, entityId, timeout, getDefaultTrustDuration());
}
public static AuthnMechSaml make(String name, String entityId) {
return new AuthnMechSaml(name, entityId, NO_TIME_LIMIT, DEFAULT_TRUST_DURATION);
}
private AuthnMechSaml(String name, String entityId, int timeout, long trustDuration) {
super(name);
this.entityId = checkString(entityId);
this.timeout = checkTimeout(timeout);
this.trustDuration = checkTrustDuration(trustDuration);
}
public static AuthnMechSaml makeEmpty() {
return new AuthnMechSaml();
}
private AuthnMechSaml() {
super();
this.entityId = null;
this.timeout = NO_TIME_LIMIT;
this.trustDuration = getDefaultTrustDuration();
}
@Override
public String getTypeName() {
return TYPE_NAME;
}
/**
* Get the default trust-duration value.
*/
public static long getDefaultTrustDuration() {
return DEFAULT_TRUST_DURATION;
}
@Override
public List<CredentialTransform> getCredentialTransforms() {
return ImmutableList.of(
CredentialTransform.make(CredentialTypeSet.NONE, CredentialTypeSet.VERIFIED_PRINCIPAL));
}
@Override
public AuthnMechanism copyWithNewName(String name) {
return make(name, getEntityId(), getTimeout(), getTrustDuration());
}
@Override
public boolean equals(Object object) {
if (object == this) { return true; }
if (!(object instanceof AuthnMechSaml)) { return false; }
AuthnMechSaml mech = (AuthnMechSaml) object;
return super.equals(mech)
&& Objects.equals(entityId, mech.getEntityId())
&& Objects.equals(getTimeout(), mech.getTimeout());
}
@Override
public int hashCode() {
return super.hashCode(entityId, timeout);
}
/**
* Get the SAML entity ID for this authority. This ID is used as a key when
* looking up SAML metadata that describes the authority.
*
* @return The SAML entity ID as a string, never null or empty, normally a URI.
*/
public String getEntityId() {
return entityId;
}
@Override
public int getTimeout() {
return timeout;
}
@Override
public long getTrustDuration() {
return trustDuration;
}
public static final Predicate<AuthnMechanism> SAML_PREDICATE = new Predicate<AuthnMechanism>() {
public boolean apply(AuthnMechanism config) {
return (config instanceof AuthnMechSaml);
}
};
static void registerTypeAdapters(GsonBuilder builder) {
builder.registerTypeAdapter(AuthnMechSaml.class,
ProxyTypeAdapter.make(AuthnMechSaml.class, LocalProxy.class));
}
private static final class LocalProxy extends MechanismProxy<AuthnMechSaml> {
String entityId;
int timeout = NO_TIME_LIMIT;
long trustDuration = DEFAULT_TRUST_DURATION;
@SuppressWarnings("unused")
LocalProxy() {
}
@SuppressWarnings("unused")
LocalProxy(AuthnMechSaml mechanism) {
super(mechanism);
entityId = mechanism.getEntityId();
timeout = mechanism.getTimeout();
trustDuration = mechanism.getTrustDuration();
}
@Override
public AuthnMechSaml build() {
return make(name, entityId, timeout, trustDuration);
}
}
}
|
"""
Create a program for converting currency from one to another
"""
def convert_currency(amount, from_currency, to_currency):
# Get the exchange rates from an external API
rates = get_exchange_rates(from_currency, to_currency)
# Calculate the converted amount
converted_amount = amount * rates[to_currency] / rates[from_currency]
# Return the converted amount
return converted_amount
|
def find_total_amount(tx_list):
amount = sum([price * quantity for price, quantity in tx_list])
return amount
if __name__ == '__main__':
print(find_total_amount(tx_list))
|
#!/bin/bash
mv webapp/cas-server-webapp-config-server/build/libs/cas-server-webapp-config-server-*.war \
webapp/cas-server-webapp-config-server/build/libs/casconfigserver.war
dname="${dname:-CN=cas.example.org,OU=Example,OU=Org,C=US}"
subjectAltName="${subjectAltName:-dns:example.org,dns:localhost,ip:127.0.0.1}"
keystore="./thekeystore"
echo "Generating keystore ${keystore} for CAS with DN=${dname}, SAN=${subjectAltName}"
[ -f "${keystore}" ] && rm "${keystore}"
keytool -genkey -noprompt -alias cas -keyalg RSA -keypass changeit -storepass changeit \
-keystore "${keystore}" -dname "${dname}" -ext SAN="${subjectAltName}"
echo "Launching CAS config server web application..."
java -jar webapp/cas-server-webapp-config-server/build/libs/casconfigserver.war \
--spring.security.user.password=Mellon --server.ssl.key-store="${keystore}" \
--encrypt.key-store.location=file:${keystore} &
pid=$!
echo "Launched CAS config server with pid ${pid}. Waiting for CAS config server to come online..."
sleep 30
cmd=`curl -L -k --user casuser:Mellon --connect-timeout 60 -s \
-o /dev/null -I -w "%{http_code}" https://localhost:8888/casconfigserver/actuator/cas/default`
kill -9 "${pid}"
[ -f "${keystore}" ] && rm "${keystore}"
echo "CAS config server is responding with HTTP status code ${cmd}."
if [ "$cmd" == 200 ]; then
echo "CAS config server is successfully up and running."
exit 0
else
echo "CAS config server failed to start successfully."
exit 1
fi
|
#!/bin/bash
if [ $( psql -v ON_ERROR_STOP=1 --username $POSTGRES_USER -tAc "SELECT 1 FROM pg_database WHERE datname = 'sonar'" ) == '1' ]; then
echo "Database already exists"
exit 1
fi
psql -v ON_ERROR_STOP=1 --username $POSTGRES_USER -c "CREATE DATABASE sonar"
psql -v ON_ERROR_STOP=1 --username $POSTGRES_USER -c "GRANT ALL PRIVILEGES ON DATABASE sonar TO postgres"
psql -v ON_ERROR_STOP=1 --username $POSTGRES_USER -f /docker-entrypoint-initdb.d/sql/sonarqube.sql sonar
|
package cim4j;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
import cim4j.WindGenTurbineType3IEC;
import java.lang.ArrayIndexOutOfBoundsException;
import java.lang.IllegalArgumentException;
import cim4j.Simple_Float;
import cim4j.PU;
import cim4j.Seconds;
/*
IEC Type 3A generator set model. Reference: IEC Standard 61400-27-1 Section 6.6.3.2.
*/
public class WindGenTurbineType3aIEC extends WindGenTurbineType3IEC
{
private BaseClass[] WindGenTurbineType3aIEC_class_attributes;
private BaseClass[] WindGenTurbineType3aIEC_primitive_attributes;
private java.lang.String rdfid;
public void setRdfid(java.lang.String id) {
rdfid = id;
}
private abstract interface PrimitiveBuilder {
public abstract BaseClass construct(java.lang.String value);
};
private enum WindGenTurbineType3aIEC_primitive_builder implements PrimitiveBuilder {
kpc(){
public BaseClass construct (java.lang.String value) {
return new Simple_Float(value);
}
},
xs(){
public BaseClass construct (java.lang.String value) {
return new PU(value);
}
},
tic(){
public BaseClass construct (java.lang.String value) {
return new Seconds(value);
}
},
LAST_ENUM() {
public BaseClass construct (java.lang.String value) {
return new cim4j.Integer("0");
}
};
}
private enum WindGenTurbineType3aIEC_class_attributes_enum {
kpc,
xs,
tic,
LAST_ENUM;
}
public WindGenTurbineType3aIEC() {
WindGenTurbineType3aIEC_primitive_attributes = new BaseClass[WindGenTurbineType3aIEC_primitive_builder.values().length];
WindGenTurbineType3aIEC_class_attributes = new BaseClass[WindGenTurbineType3aIEC_class_attributes_enum.values().length];
}
public void updateAttributeInArray(WindGenTurbineType3aIEC_class_attributes_enum attrEnum, BaseClass value) {
try {
WindGenTurbineType3aIEC_class_attributes[attrEnum.ordinal()] = value;
}
catch (ArrayIndexOutOfBoundsException aoobe) {
System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage());
}
}
public void updateAttributeInArray(WindGenTurbineType3aIEC_primitive_builder attrEnum, BaseClass value) {
try {
WindGenTurbineType3aIEC_primitive_attributes[attrEnum.ordinal()] = value;
}
catch (ArrayIndexOutOfBoundsException aoobe) {
System.out.println("No such attribute: " + attrEnum.name() + ": " + aoobe.getMessage());
}
}
public void setAttribute(java.lang.String attrName, BaseClass value) {
try {
WindGenTurbineType3aIEC_class_attributes_enum attrEnum = WindGenTurbineType3aIEC_class_attributes_enum.valueOf(attrName);
updateAttributeInArray(attrEnum, value);
System.out.println("Updated WindGenTurbineType3aIEC, setting " + attrName);
}
catch (IllegalArgumentException iae)
{
super.setAttribute(attrName, value);
}
}
/* If the attribute is a String, it is a primitive and we will make it into a BaseClass */
public void setAttribute(java.lang.String attrName, java.lang.String value) {
try {
WindGenTurbineType3aIEC_primitive_builder attrEnum = WindGenTurbineType3aIEC_primitive_builder.valueOf(attrName);
updateAttributeInArray(attrEnum, attrEnum.construct(value));
System.out.println("Updated WindGenTurbineType3aIEC, setting " + attrName + " to: " + value);
}
catch (IllegalArgumentException iae)
{
super.setAttribute(attrName, value);
}
}
public java.lang.String toString(boolean topClass) {
java.lang.String result = "";
java.lang.String indent = "";
if (topClass) {
for (WindGenTurbineType3aIEC_primitive_builder attrEnum: WindGenTurbineType3aIEC_primitive_builder.values()) {
BaseClass bc = WindGenTurbineType3aIEC_primitive_attributes[attrEnum.ordinal()];
if (bc != null) {
result += " WindGenTurbineType3aIEC." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator();
}
}
for (WindGenTurbineType3aIEC_class_attributes_enum attrEnum: WindGenTurbineType3aIEC_class_attributes_enum.values()) {
BaseClass bc = WindGenTurbineType3aIEC_class_attributes[attrEnum.ordinal()];
if (bc != null) {
result += " WindGenTurbineType3aIEC." + attrEnum.name() + "(" + bc.debugString() + ")" + " " + bc.toString(false) + System.lineSeparator();
}
}
result += super.toString(true);
}
else {
result += "(WindGenTurbineType3aIEC) RDFID: " + rdfid;
}
return result;
}
public final java.lang.String debugName = "WindGenTurbineType3aIEC";
public java.lang.String debugString()
{
return debugName;
}
public void setValue(java.lang.String s) {
System.out.println(debugString() + " is not sure what to do with " + s);
}
public BaseClass construct() {
return new WindGenTurbineType3aIEC();
}
};
|
package cyclops.pure.typeclasses.taglessfinal;
import lombok.ToString;
import lombok.Value;
public class Cases {
@Value @lombok.With @ToString
public static class Account {
double balance;
long id;
public Account debit(double amount){
return withBalance(balance-amount);
}
public Account credit(double amount){
return withBalance(balance+amount);
}
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.core ;
import java.util.Collections ;
import java.util.LinkedList ;
import java.util.List ;
import org.apache.jena.atlas.lib.Pair ;
import org.apache.jena.graph.Node ;
/** Capture a record of quad actions */
public class DatasetChangesCapture implements DatasetChanges {
// ArrayLists have an annoying issue that they grow by copying the internal
// []-array.
// This growth is by a fixed factor of adding 50% which for an array
// with little guidance as to likely size, can lead to undesirable GC
// and copy-time issues.
// Using a LinkedList avoids this although it adds overhead for list
// entries.
private List<Pair<QuadAction, Quad>> actions ;
final private boolean captureAdd ;
final private boolean captureDelete ;
final private boolean captureNoAdd ;
final private boolean captureNoDelete ;
/** Capture quad actions, excluding no-ops */
public DatasetChangesCapture() {
this(true, true, false, false) ;
}
/**
* Capture quad actions, either including or excluding the "no ops"
*
* @param recordNoOps
* Whether to record {@link QuadAction#NO_ADD} and
* {@link QuadAction#NO_DELETE}
*/
public DatasetChangesCapture(boolean recordNoOps) {
this(true, true, recordNoOps, recordNoOps) ;
}
/** Capture quad actions, selectively by category */
public DatasetChangesCapture(boolean captureAdd, boolean captureDelete, boolean captureNoAdd, boolean captureNoDelete) {
this.captureAdd = captureAdd ;
this.captureDelete = captureDelete ;
this.captureNoAdd = captureNoAdd ;
this.captureNoDelete = captureNoDelete ;
this.actions = new LinkedList<>() ;
}
/** The actions recorded.
* Only valid until the next {@code start} call.
*/
public List<Pair<QuadAction, Quad>> getActions() {
return Collections.unmodifiableList(actions) ;
}
@Override
public void start() {
if ( actions == null )
actions = new LinkedList<>() ;
}
@Override
public void change(QuadAction qaction, Node g, Node s, Node p, Node o) {
Quad q = new Quad(g, s, p, o) ;
Pair<QuadAction, Quad> pair = Pair.create(qaction, q) ;
switch (qaction) {
case ADD :
if ( captureAdd )
actions.add(pair) ;
break ;
case DELETE :
if ( captureDelete )
actions.add(pair) ;
break ;
case NO_ADD :
if ( captureNoAdd )
actions.add(pair) ;
break ;
case NO_DELETE :
if ( captureNoDelete )
actions.add(pair) ;
break ;
}
}
@Override
public void finish() {}
@Override
public void reset() {
if ( actions != null )
actions.clear() ;
actions = null ;
}
}
|
<reponame>ZhekaiLi/Code
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.Queue;
// USSS: Unweighted Single Source Shortest
// 基于 SingleSourcePathBFS
public class USSSPath {
private Graph G;
private boolean[] visited;
private int s;
private int[] pre;
private int[] dis;
public USSSPath(Graph G, int s){
this.G = G;
this.s = s;
visited = new boolean[G.V()];
pre = new int[G.V()];
dis = new int[G.V()];
for (int i = 0; i < G.V(); i++) {
pre[i] = -1;
dis[i] = -1;
}
bfs(s);
}
private void bfs(int s){
Queue<Integer> queue = new LinkedList<>();
queue.add(s);
visited[s] = true;
pre[s] = s;
dis[s] = 0;
while(!queue.isEmpty()){
int v = queue.remove();
for(int w: G.adj(v)){
if(!visited[w]){
queue.add(w);
visited[w] = true;
pre[w] = v;
dis[w] = dis[v] + 1;
}
}
}
}
public boolean isConnectedTo(int t){
G.validateVertex(t);
return visited[t];
}
public Iterable<Integer> path(int t){
ArrayList<Integer> res = new ArrayList<>();
if(!isConnectedTo(t)) return res;
int cur = t;
while(cur != s){
res.add(cur);
cur = pre[cur];
}
res.add(s);
Collections.reverse(res);
return res;
}
public int dis(int t){
G.validateVertex(t);
return dis[t];
}
public static void main(String[] args)
{
Graph g = new Graph("g.txt");
USSSPath usssPath = new USSSPath(g, 0);
System.out.println(usssPath.path(6));
System.out.println(usssPath.dis(6));
}
}
|
package com.company;
public class Exercise_6_12 {
public static void main(String[] args) {
printChars('1', 'Z', 10);
}
public static void printChars(char ch1, char ch2, int numberPerLine) {
final int NUMBERS_PER_LINE = numberPerLine;
int count = 0;
for(char ch = ch1; ch <= ch2; ch++) {
count++;
if(count % NUMBERS_PER_LINE == 0)
System.out.println(ch);
else
System.out.print(ch + " ");
}
}
}
|
<filename>sitewhere-java-model/src/main/java/com/sitewhere/rest/model/device/event/kafka/ProcessedEventPayload.java
/**
* Copyright © 2014-2021 The SiteWhere Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sitewhere.rest.model.device.event.kafka;
import com.sitewhere.spi.device.event.IDeviceEvent;
import com.sitewhere.spi.device.event.IDeviceEventContext;
import com.sitewhere.spi.device.event.kafka.IProcessedEventPayload;
/**
* Event payload after persistence.
*/
public class ProcessedEventPayload implements IProcessedEventPayload {
/** Extra context */
private IDeviceEventContext eventContext;
/** Event */
private IDeviceEvent event;
/*
* @see com.sitewhere.spi.microservice.kafka.payload.IEnrichedEventPayload#
* getEventContext()
*/
@Override
public IDeviceEventContext getEventContext() {
return eventContext;
}
public void setEventContext(IDeviceEventContext eventContext) {
this.eventContext = eventContext;
}
/*
* @see
* com.sitewhere.spi.microservice.kafka.payload.IEnrichedEventPayload#getEvent()
*/
@Override
public IDeviceEvent getEvent() {
return event;
}
public void setEvent(IDeviceEvent event) {
this.event = event;
}
}
|
<filename>dependencies/mozilla-js/1.8.0/jsd/classes/netscape/jsdebug/DebugController.java
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is mozilla.org code.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1998
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package netscape.jsdebug;
import netscape.util.Hashtable;
import netscape.security.PrivilegeManager;
import netscape.security.ForbiddenTargetException;
/**
* This is the master control panel for observing events in the VM.
* Each method setXHook() must be passed an object that extends
* the class XHook. When an event of the specified type
* occurs, a well-known method on XHook will be called (see the
* various XHook classes for details). The method call takes place
* on the same thread that triggered the event in the first place,
* so that any monitors held by the thread which triggered the hook
* will still be owned in the hook method.
* <p>
* This class is meant to be a singleton and has a private constructor.
* Call the static <code>getDebugController()</code> to get this object.
* <p>
* Note that all functions use netscape.security.PrivilegeManager to verify
* that the caller has the "Debugger" privilege. The exception
* netscape.security.ForbiddenTargetException will be throw if this is
* not enabled.
*
* @author <NAME>
* @author <NAME>
* @version 1.0
* @since 1.0
* @see netscape.security.PrivilegeManager
* @see netscape.security.ForbiddenTargetException
*/
public final class DebugController {
private static final int majorVersion = 1;
private static final int minorVersion = 0;
private static DebugController controller;
private ScriptHook scriptHook;
private Hashtable instructionHookTable;
private InterruptHook interruptHook;
private DebugBreakHook debugBreakHook;
private JSErrorReporter errorReporter;
/**
* Get the DebugController object for the current VM.
* <p>
* @return the singleton DebugController
*/
public static synchronized DebugController getDebugController()
throws ForbiddenTargetException
{
try {
PrivilegeManager.checkPrivilegeEnabled("Debugger");
if (controller == null)
controller = new DebugController();
return controller;
} catch (ForbiddenTargetException e) {
System.out.println("failed in check Priv in DebugController.getDebugController()");
e.printStackTrace(System.out);
throw e;
}
}
private DebugController()
{
scriptTable = new Hashtable();
_setController(true);
}
/**
* Request notification of Script loading events.
* <p>
* Whenever a Script is loaded into or unloaded from the VM
* the appropriate method of the ScriptHook argument will be called.
* Callers are responsible for chaining hooks if chaining is required.
*
* @param h new script hook
* @return the previous hook object (null if none)
*/
public synchronized ScriptHook setScriptHook(ScriptHook h)
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
ScriptHook oldHook = scriptHook;
scriptHook = h;
return oldHook;
}
/**
* Get the current observer of Script events.
* <p>
* @return the current script hook (null if none)
*/
public ScriptHook getScriptHook()
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
return scriptHook;
}
/**
* Set a hook at the given program counter value.
* <p>
* When a thread reaches that instruction, a ThreadState
* object will be created and the appropriate method
* of the hook object will be called. Callers are responsible
* for chaining hooks if chaining is required.
*
* @param pc pc at which hook should be set
* @param h new hook for this pc
* @return the previous hook object (null if none)
*/
public synchronized InstructionHook setInstructionHook(
PC pc,
InstructionHook h)
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
InstructionHook oldHook;
if (instructionHookTable == null) {
instructionHookTable = new Hashtable();
}
oldHook = (InstructionHook) instructionHookTable.get(pc);
instructionHookTable.put(pc, h);
setInstructionHook0(pc);
return oldHook;
}
private native void setInstructionHook0(PC pc);
/**
* Get the hook at the given program counter value.
* <p>
* @param pc pc for which hook should be found
* @return the hook (null if none)
*/
public InstructionHook getInstructionHook(PC pc)
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
return getInstructionHook0(pc);
}
// called by native function
private InstructionHook getInstructionHook0(PC pc)
{
if (instructionHookTable == null)
return null;
else
return (InstructionHook) instructionHookTable.get(pc);
}
/**************************************************************/
/**
* Set the hook at to be called when interrupts occur.
* <p>
* The next instruction which starts to execute after
* <code>sendInterrupt()</code> has been called will
* trigger a call to this hook. A ThreadState
* object will be created and the appropriate method
* of the hook object will be called. Callers are responsible
* for chaining hooks if chaining is required.
*
* @param h new hook
* @return the previous hook object (null if none)
* @see netscape.jsdebug.DebugController#sendInterrupt
*/
public synchronized InterruptHook setInterruptHook( InterruptHook h )
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
InterruptHook oldHook = interruptHook;
interruptHook = h;
return oldHook;
}
/**
* Get the current hook to be called on interrupt
* <p>
* @return the hook (null if none)
*/
public InterruptHook getInterruptHook()
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
return interruptHook;
}
/**
* Cause the interrupt hook to be called when the next
* JavaScript instruction starts to execute.
* <p>
* The interrupt is self clearing
* @see netscape.jsdebug.DebugController#setInterruptHook
*/
public void sendInterrupt()
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
sendInterrupt0();
}
private native void sendInterrupt0();
/**************************************************************/
/**
* Set the hook at to be called when a <i>debug break</i> is requested
* <p>
* Set the hook to be called when <i>JSErrorReporter.DEBUG</i> is returned
* by the <i>error reporter</i> hook. When that happens a ThreadState
* object will be created and the appropriate method
* of the hook object will be called. Callers are responsible
* for chaining hooks if chaining is required.
*
* @param h new hook
* @return the previous hook object (null if none)
* @see netscape.jsdebug.DebugController#setErrorReporter
* @see netscape.jsdebug.JSErrorReporter
*/
public synchronized DebugBreakHook setDebugBreakHook( DebugBreakHook h )
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
DebugBreakHook oldHook = debugBreakHook;
debugBreakHook = h;
return oldHook;
}
/**
* Get the current hook to be called on debug break
* <p>
* @return the hook (null if none)
*/
public DebugBreakHook getDebugBreakHook()
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
return debugBreakHook;
}
/**************************************************************/
/**
* Get the 'handle' which cooresponds to the native code representing the
* instance of the underlying JavaScript Debugger context.
* <p>
* This would not normally be useful in java. Some of the other classes
* in this package need this. It remains public mostly for historical
* reasons. It serves as a check to see that the native classes have been
* loaded and the built-in native JavaScript Debugger support has been
* initialized. This DebugController is not valid (or useful) when it is
* in a state where this native context equals 0.
*
* @return the native context (0 if none)
*/
public int getNativeContext()
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
// System.out.println( "nativecontext = " + _nativeContext + "\n" );
return _nativeContext;
}
private native void _setController( boolean set );
private Hashtable scriptTable;
private int _nativeContext;
/**
* Execute a string as a JavaScript script within a stack frame
* <p>
* This method can be used to execute arbitrary sets of statements on a
* stopped thread. It is useful for inspecting and modifying data.
* <p>
* This method can only be called while the JavaScript thread is stopped
* - i.e. as part of the code responding to a hook. Thgis method
* <b>must</b> be called on the same thread as was executing when the
* hook was called.
* <p>
* If an error occurs while execuing this code, then the error
* reporter hook will be called if present.
*
* @param frame the frame context in which to evaluate this script
* @param text the script text
* @param filename where to tell the JavaScript engine this code came
* from (it is usually best to make this the same as the filename of
* code represented by the frame)
* @param lineno the line number to pass to JS ( >=1 )
* @return The result of the script execution converted to a string.
* (null if the result was null or void)
*/
public String executeScriptInStackFrame( JSStackFrameInfo frame,
String text,
String filename,
int lineno )
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
return executeScriptInStackFrame0( frame, text, filename, lineno );
}
private native String executeScriptInStackFrame0( JSStackFrameInfo frame,
String text,
String filename,
int lineno );
/**
* Set the hook at to be called when a JavaScript error occurs
* <p>
* @param er new error reporter hook
* @return the previous hook object (null if none)
* @see netscape.jsdebug.JSErrorReporter
*/
public JSErrorReporter setErrorReporter(JSErrorReporter er)
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
JSErrorReporter old = errorReporter;
errorReporter = er;
return old;
}
/**
* Get the hook at to be called when a JavaScript error occurs
* <p>
* @return the hook object (null if none)
* @see netscape.jsdebug.JSErrorReporter
*/
public JSErrorReporter getErrorReporter()
throws ForbiddenTargetException
{
PrivilegeManager.checkPrivilegeEnabled("Debugger");
return errorReporter;
}
/**
* Get the major version number of this module
* <p>
* @return the version number
*/
public static int getMajorVersion() {return majorVersion;}
/**
* Get the minor version number of this module
* <p>
* @return the version number
*/
public static int getMinorVersion() {return minorVersion;}
private static native int getNativeMajorVersion();
private static native int getNativeMinorVersion();
}
|
import getGlobals from './getGlobals.js';
export default function getOutro ( format, name, options, imports ) {
if ( format === 'es' ) {
return `export default ${name};`;
}
if ( format === 'amd' ) {
return `return ${name};\n\n});`;
}
if ( format === 'cjs' ) {
return `module.exports = ${name};`;
}
if ( format === 'iife' ) {
const globals = getGlobals( imports, options );
return `return ${name};\n\n}(${globals.join( ', ' )}));`;
}
if ( format === 'umd' ) {
return `return ${name};\n\n})));`;
}
throw new Error( `Not implemented: ${format}` );
}
|
<?php
// Create a function that processes form submission
function process_form() {
// define empty array to store errors
$errors = array();
// check for errors
// check empty fields
if (empty($_POST['name'])) {
$errors[] = 'The Name field is required';
}
if (empty($_POST['email'])) {
$errors[] = 'The Email Address field is required';
}
if (empty($_POST['message'])) {
$errors[] = 'The Message field is required';
}
// check for valid email address
if (!filter_var($_POST['email'], FILTER_VALIDATE_EMAIL)) {
$errors[] = 'Invalid email address';
}
// if there are errors, return them
if (!empty($errors)) {
return $errors;
}
// if no errors, continue with processing
// add slashes to data for db query
$name = addslashes($_POST['name']);
$email = addslashes($_POST['email']);
$message = addslashes($_POST['message']);
// build query
$query = "INSERT INTO messages (name, email, message)
VALUES ('$name', '$email', '$message')";
// perform query
mysql_query($query);
// notify user message was sent successfully
echo 'Your message has been sent!';
}
?>
|
package de.unibi.agbi.biodwh2.reactome.entities;
import de.unibi.agbi.biodwh2.reactome.entities.Drug;
/**
* Created by manuel on 12.12.19.
*/
public class ChemicalDrug extends Drug {
public ChemicalDrug() {
}
}
|
<filename>client/src/app/settings.service.spec.ts
import { TestBed } from '@angular/core/testing';
import { SettingsService } from './settings.service';
describe('SettingsService', () => {
beforeEach(() => {
TestBed.configureTestingModule({});
localStorage.clear();
});
it('should be created', () => {
const service: SettingsService = TestBed.get(SettingsService);
expect(service).toBeTruthy();
});
it("should return false if settings are not valid", async () => {
const service: SettingsService = TestBed.get(SettingsService);
service.username = "";
service.defaultCurrency = "";
expect(service.areSettingsValid).toBeFalsy();
});
it("should save to local storage", async () => {
const service: SettingsService = TestBed.get(SettingsService);
service.defaultCurrency = "EUR";
service.username = "manfredo";
service.save();
});
it("should read settings from local storage", async () => {
const service: SettingsService = TestBed.get(SettingsService);
service.defaultCurrency = "EUR";
service.username = "manfredo";
service.save();
const service2: SettingsService = TestBed.get(SettingsService);
expect(service2.defaultCurrency).toBe("EUR");
expect(service2.username).toBe("manfredo");
});
});
|
#!/bin/bash
date_pattern=`date "+%Y-%m-%d"`-
echo -n "Post name > "
read -r REPLY
title=${REPLY}
clean_title=`echo $title | tr "[:upper:]" "[:lower:]"]` #Lower Case
clean_title=`echo $clean_title | iconv -f utf-8 -t ascii//translit` #Remove accents
clean_title=`echo $clean_title | tr -dc "[a-z0-9 ]"` #Keep spaces, letters and numbers
clean_title=`echo $clean_title | tr " " "-"` #Replace spaces by dashes
filename=$date_pattern$clean_title.md
author=`git config --get user.name`
cat > "posts/"$filename <<EOF
---
title: $title
author: $author
tags:
---
EOF
$EDITOR "posts/"$filename
|
<reponame>itsalaidbacklife/sails-react-monorepo-example
module.exports = [
{
id: 'c', // auto-generated, but still string required
user: 1,
data: {
isLoggedIn: true
}
}
];
|
/*
* Copyright (c) 2021. <NAME>
*/
import {Mock} from 'moq.ts';
import {WeatherStationPlatform} from '../src/platform';
import {Logging} from 'homebridge/lib/logger';
import {API} from 'homebridge';
describe('FJW4 Platform', () => {
it('should be successfully created', () => {
const logger = new Mock<Logging>()
.setup((instance) => instance.info('Finished initializing platform:', 'NAME'))
.returns()
.object();
const config = {
platform: 'PLATFORM',
name: 'NAME',
options: {
'username': 'USERNAME',
'password': 'PASSWORD',
},
};
const api = new Mock<API>().object();
const platform = new WeatherStationPlatform(logger, config, api);
expect(platform).toBeInstanceOf(WeatherStationPlatform);
});
});
|
<reponame>coboyoshi/uvicore<gh_stars>10-100
import pytest
import uvicore
import sqlalchemy as sa
from uvicore.support.dumper import dump
# These will also move into test_* as they each have _builder, _encoed, _hybrid inside them
@pytest.mark.asyncio
async def test_select_all(app1):
from app1.database.tables.posts import Posts
query = Posts.table.select()
results = await uvicore.db.fetchall(query, connection='app1')
dump(results)
assert [
'test-post1',
'test-post2',
'test-post3',
'test-post4',
'test-post5',
'test-post6',
'test-post7'
] == [x.unique_slug for x in results]
@pytest.mark.asyncio
async def test_join1(app1):
from app1.database.tables.contacts import Contacts
from app1.database.tables.users import Users
# Implied join on columns works only if there is one foreign key.
# Won't work on posts because posts has both a creator_id and owner_id
# posts = Posts.table
# users = Users.table
# query = (
# sa.select([posts, users])
# .select_from(posts.join(users))
# .where(users.c.email == '<EMAIL>')
# .where(posts.c.id == 3)
# )
# results = await uvicore.db.fetchall(query, connection='app1')
# assert ['<EMAIL>'] == [x.email for x in results]
contacts = Contacts.table
users = Users.table
query = (
sa.select([contacts, users])
.select_from(contacts.join(users))
.where(users.c.email == '<EMAIL>')
)
results = await uvicore.db.fetchall(query, connection='app1')
dump(results)
assert ['<EMAIL>'] == [x.email for x in results]
@pytest.mark.asyncio
async def test_join2(app1):
from sqlalchemy import select
from app1.database.tables.posts import Posts
from app1.database.tables.users import Users
from app1.database.tables.contacts import Contacts
from app1.database.tables.comments import Comments
# This looks close to my own Db Query Builder but you have to manually .fetchall()
# and the returned RowProxy has column name collisions. You would have to select
# each column and add a .label() to avoid collisions. So my query builder is still
# a lot simpler and a bit better looking.
posts, users, contacts, comments = Posts.table, Users.table, Contacts.table, Comments.table
query = (
select([posts, users, contacts, comments])
.select_from(posts
.join(users, posts.c.creator_id == users.c.id)
.join(contacts, users.c.id == contacts.c.user_id)
.join(comments, posts.c.id == comments.c.post_id)
)
)
results = await uvicore.db.fetchall(query, connection='app1')
dump(results)
dump(results[0].keys()) # Notice name collissions
assert [
'test-post1',
'test-post1',
'test-post3',
'test-post3',
'test-post3',
] == [x.unique_slug for x in results]
@pytest.mark.asyncio
async def test_group_by(app1):
from app1.database.tables.posts import Posts
posts = Posts.table
query = (
sa.select([
posts.c.creator_id,
sa.func.count(posts.c.title)
])
.group_by(posts.c.creator_id)
)
results = await uvicore.db.fetchall(query, connection='app1')
dump(results)
assert results == [
(1, 2),
(2, 3),
(5, 2)
]
|
#!/bin/bash
set -eu
function get_abs_filename() {
echo "$(cd "$(dirname "$1")" && pwd)/$(basename "$1")"
}
function usage() {
echo "usage: ./bootstrap.sh INSTALL_PREFIX_DIRECTORY MFEXT_INSTALL_ROOT_DIRECTORY"
}
if test "${1:-}" = "" -o "${2:-}" = ""; then
usage
exit 1
fi
if test "${1:-}" = "--help"; then
usage
exit 0
fi
MFEXT_HOME=$(get_abs_filename "$2")
export MFEXT_HOME
if ! test -d "${MFEXT_HOME}"; then
usage
echo "ERROR: ${MFEXT_HOME} is not a directory"
exit 1
fi
MFEXT_HOME=$(get_abs_filename "${MFEXT_HOME}")
export MFEXT_HOME
MFEXT_VERSION=$(cat "${MFEXT_HOME}/config/version")
export MFEXT_VERSION
MFDATA_VERSION=$("${MFEXT_HOME}/bin/guess_version.sh")
export MFDATA_VERSION
MFMODULE_VERSION=$("${MFEXT_HOME}/bin/guess_version.sh")
export MFMODULE_VERSION
MFMODULE_HOME=$(get_abs_filename "$1")
export MFMODULE_HOME
if ! test -d "${MFMODULE_HOME}"; then
usage
echo "ERROR: ${MFMODULE_HOME} is not a directory"
exit 1
fi
if ! test -f "${MFEXT_HOME}/bin/guess_version.sh"; then
echo "ERROR: configured mfext home (${MFEXT_HOME}) is not a mfext home"
exit 1
fi
export MFMODULE=MFDATA
export MFMODULE_LOWERCASE=mfdata
SRC_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export SRC_DIR
export MODULE_HAS_HOME_DIR=1
rm -f adm/root.mk
touch adm/root.mk
ROOT_PATH=${MFEXT_HOME}/bin:${MFEXT_HOME}/opt/core/bin:/usr/sbin:/usr/bin:/sbin:/bin
ROOT_LD_LIBRARY_PATH=""
ROOT_PKG_CONFIG_PATH=""
ROOT_LAYERAPI2_LAYERS_PATH=${MFMODULE_HOME}/opt:${MFMODULE_HOME}:${MFEXT_HOME}/opt:${MFEXT_HOME}
echo "Making adm/root.mk..."
rm -f adm/root.mk
touch adm/root.mk
echo "unexport MFMODULE_RUNTIME_HOME" >>adm/root.mk
echo "unexport MFMODULE_RUNTIME_SUFFIX" >>adm/root.mk
echo "unexport MFMODULE_RUNTIME_USER" >>adm/root.mk
echo "export MFMODULE := ${MFMODULE}" >>adm/root.mk
echo "export MFMODULE_LOWERCASE := $(echo ${MFMODULE} | tr '[:upper:]' '[:lower:]')" >>adm/root.mk
echo "export LAYERAPI2_LAYERS_PATH := ${ROOT_LAYERAPI2_LAYERS_PATH}" >>adm/root.mk
echo "export MFEXT_HOME := ${MFEXT_HOME}" >>adm/root.mk
echo "export MFEXT_VERSION := ${MFEXT_VERSION}" >>adm/root.mk
echo "export MFMODULE_HOME := ${MFMODULE_HOME}" >>adm/root.mk
echo "export MFMODULE_VERSION := ${MFDATA_VERSION}" >>adm/root.mk
echo "export SRC_DIR := ${SRC_DIR}" >>adm/root.mk
echo "ifeq (\$(FORCED_PATHS),)" >>adm/root.mk
echo " export PATH := ${ROOT_PATH}" >>adm/root.mk
echo " export LD_LIBRARY_PATH := ${ROOT_LD_LIBRARY_PATH}" >>adm/root.mk
echo " export PKG_CONFIG_PATH := ${ROOT_PKG_CONFIG_PATH}" >>adm/root.mk
echo " LAYER_ENVS:=\$(shell env |grep '^LAYERAPI2_LAYER_.*_LOADED=1\$\$' |awk -F '=' '{print \$\$1;}')" >>adm/root.mk
echo " \$(foreach LAYER_ENV, \$(LAYER_ENVS), \$(eval unexport \$(LAYER_ENV)))" >>adm/root.mk
echo "endif" >>adm/root.mk
echo "export ${MFMODULE}_HOME := ${MFMODULE_HOME}" >>adm/root.mk
echo "export ${MFMODULE}_VERSION := ${MFDATA_VERSION}" >>adm/root.mk
if test "${MODULE_HAS_HOME_DIR:-}" = "1"; then
echo "export MODULE_HAS_HOME_DIR := 1" >>adm/root.mk
fi
if test "${FTP_PROXY:-}" != ""; then
echo "export FTP_PROXY:=${FTP_PROXY:-}" >>adm/root.mk
fi
if test "${http_proxy:-}" != ""; then
echo "export http_proxy:=${http_proxy:-}" >>adm/root.mk
fi
if test "${https_proxy:-}" != ""; then
echo "export https_proxy:=${https_proxy:-}" >>adm/root.mk
fi
if test "${HTTPS_PROXY:-}" != ""; then
echo "export HTTPS_PROXY:=${HTTPS_PROXY:-}" >>adm/root.mk
fi
if test "${HTTP_PROXY:-}" != ""; then
echo "export HTTP_PROXY:=${HTTP_PROXY:-}" >>adm/root.mk
fi
echo "export PYTHON2_SHORT_VERSION := 2.7" >>adm/root.mk
echo "export PYTHON3_SHORT_VERSION := 3.9" >>adm/root.mk
echo "BOOTSTRAP DONE !"
echo "MFEXT_HOME=${MFEXT_HOME}"
|
def interpolate_point(p1, p2, p):
x1, y1 = p1
x2, y2 = p2
x, y = p
# Calculate the slope of the line
m = (y2 - y1) / (x2 - x1)
# Calculate the y-coordinate of the interpolated point using linear interpolation formula
y_interpolated = y1 + m * (x - x1)
return x, y_interpolated
|
/*
* Copyright © 2021 <NAME>, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.cdap.cdap.etl.spark.batch;
import io.cdap.cdap.etl.api.batch.SparkCompute;
import io.cdap.cdap.etl.api.batch.SparkSink;
import io.cdap.cdap.etl.api.streaming.Windower;
import io.cdap.cdap.etl.common.PhaseSpec;
import io.cdap.cdap.etl.common.RecordInfo;
import io.cdap.cdap.etl.common.StageStatisticsCollector;
import io.cdap.cdap.etl.proto.v2.spec.StageSpec;
import io.cdap.cdap.etl.spark.SparkCollection;
import io.cdap.cdap.etl.spark.SparkPairCollection;
import io.cdap.cdap.etl.spark.join.JoinExpressionRequest;
import io.cdap.cdap.etl.spark.join.JoinRequest;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
/**
* SQLEngineBackedCollection that wraps another SQLEngineBackedCollection in order to delay the execution of a
* mapping function.
*
* This is currently used to prevent SQL Engine pull operations uniless absolutely needed.
* @param <T> Type of the wrapped collection records.
* @param <U> Type of the output collection records.
*/
public class WrappedSQLEngineCollection<T, U> implements SQLBackedCollection<U> {
private final java.util.function.Function<SparkCollection<T>, SparkCollection<U>> mapper;
private final SQLBackedCollection<T> wrapped;
private SparkCollection<U> unwrapped = null;
public WrappedSQLEngineCollection(SQLBackedCollection<T> wrapped,
java.util.function.Function<SparkCollection<T>, SparkCollection<U>> mapper) {
this.wrapped = wrapped;
this.mapper = mapper;
}
private SparkCollection<U> unwrap() {
if (unwrapped == null) {
unwrapped = mapper.apply(wrapped);
}
return unwrapped;
}
/**
* Executes an operation on the underlying collection and then wraps it in the same mapper for this collection.
*
* This is useful when executing multiple operations in sequence where we need to delegate the operation to the
* underlying SQL engine and keep delaying the pull operation.
*
* By calling this over all wrapped collections, we will eventually reach an instance of a
* {@link SQLEngineCollection} where the actual operation will take place.
*
* @param remapper function used to re-map the underlying collection.
* @return SQL Backed collection after re-mapping the underlying colleciton and re-adding the mapper.
*/
private SparkCollection<U> rewrap(
java.util.function.Function<SparkCollection<T>, SparkCollection<T>> remapper) {
return new WrappedSQLEngineCollection<>((SQLBackedCollection<T>) remapper.apply(wrapped), mapper);
}
@Override
public <C> C getUnderlying() {
return unwrap().getUnderlying();
}
@Override
public SparkCollection<U> cache() {
return unwrap().cache();
}
@Override
public SparkCollection<U> union(SparkCollection<U> other) {
return unwrap().union(other);
}
@Override
public SparkCollection<RecordInfo<Object>> transform(StageSpec stageSpec, StageStatisticsCollector collector) {
return unwrap().transform(stageSpec, collector);
}
@Override
public SparkCollection<RecordInfo<Object>> multiOutputTransform(StageSpec stageSpec,
StageStatisticsCollector collector) {
return unwrap().transform(stageSpec, collector);
}
@Override
public <U1> SparkCollection<U1> map(Function<U, U1> function) {
return unwrap().map(function);
}
@Override
public <U1> SparkCollection<U1> flatMap(StageSpec stageSpec, FlatMapFunction<U, U1> function) {
return unwrap().flatMap(stageSpec, function);
}
@Override
public SparkCollection<RecordInfo<Object>> aggregate(StageSpec stageSpec,
@Nullable Integer partitions,
StageStatisticsCollector collector) {
return unwrap().aggregate(stageSpec, partitions, collector);
}
@Override
public SparkCollection<RecordInfo<Object>> reduceAggregate(StageSpec stageSpec,
@Nullable Integer partitions,
StageStatisticsCollector collector) {
return unwrap().reduceAggregate(stageSpec, partitions, collector);
}
@Override
public <K, V> SparkPairCollection<K, V> flatMapToPair(PairFlatMapFunction<U, K, V> function) {
return unwrap().flatMapToPair(function);
}
@Override
public <U1> SparkCollection<U1> compute(StageSpec stageSpec, SparkCompute<U, U1> compute) throws Exception {
return unwrap().compute(stageSpec, compute);
}
@Override
public Runnable createStoreTask(StageSpec stageSpec,
PairFlatMapFunction<U, Object, Object> sinkFunction) {
return new Runnable() {
@Override
public void run() {
// Run direct store job. If this succeeds, complete execution.
if (wrapped.tryStoreDirect(stageSpec)) {
return;
}
// Run store task on the unwrapped collection if the direct store task could not be completed.
unwrap().createStoreTask(stageSpec, sinkFunction).run();
}
};
}
@Override
public boolean tryStoreDirect(StageSpec stageSpec) {
return wrapped.tryStoreDirect(stageSpec);
}
@Override
public Runnable createMultiStoreTask(PhaseSpec phaseSpec,
Set<String> group,
Set<String> sinks,
Map<String, StageStatisticsCollector> collectors) {
return unwrap().createMultiStoreTask(phaseSpec, group, sinks, collectors);
}
@Override
public Runnable createStoreTask(StageSpec stageSpec,
SparkSink<U> sink) throws Exception {
return unwrap().createStoreTask(stageSpec, sink);
}
@Override
public void publishAlerts(StageSpec stageSpec,
StageStatisticsCollector collector) throws Exception {
unwrap().publishAlerts(stageSpec, collector);
}
@Override
public SparkCollection<U> window(StageSpec stageSpec,
Windower windower) {
return unwrap().window(stageSpec, windower);
}
@Override
public SparkCollection<U> join(JoinRequest joinRequest) {
return rewrap(c -> c.join(joinRequest));
}
@Override
public SparkCollection<U> join(JoinExpressionRequest joinExpressionRequest) {
return rewrap(c -> c.join(joinExpressionRequest));
}
}
|
<filename>sim/cpu/decoder/prefix_none_32.cpp
/*
* Copyright (C) 2016 <NAME>
*
* This file is part of IBMulator.
*
* IBMulator is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* IBMulator is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with IBMulator. If not, see <http://www.gnu.org/licenses/>.
*/
#include "../common.h"
#include "../decoder.h"
#include "../executor.h"
#define PREFIX_NONE prefix_none(_opcode, ctb_idx_, ctb_op_); return;
void CPUDecoder::prefix_none_32(uint8_t _opcode, unsigned &ctb_idx_, unsigned &ctb_op_)
{
ctb_op_ = _opcode;
ctb_idx_ = CTB_IDX_NONE;
switch(_opcode) {
/* 00 /r ADD eb,rb Add byte register into EA byte */
case 0x00: PREFIX_NONE;
/* 01 /r ADD ed,rd Add dword register to EA dword */
case 0x01:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::ADD_ed_rd;
break;
}
/* 02 /r ADD rb,eb Add EA byte into byte register */
case 0x02: PREFIX_NONE;
/* 03 /r ADD rd,ed Add EA dword into dword register */
case 0x03:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::ADD_rd_ed;
break;
}
/* 04 ib ADD AL,ib Add immediate byte into AL */
case 0x04: PREFIX_NONE;
/* 05 id ADD EAX,id Add immediate dword into EAX */
case 0x05:
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::ADD_EAX_id;
break;
}
/* 06 PUSH ES Push ES */
case 0x06:
{
m_instr.reg = REGI_ES;
m_instr.fn = &CPUExecutor::PUSH_SR_dw;
break;
}
/* 07 POP ES Pop top of stack into ES */
case 0x07:
{
m_instr.reg = REGI_ES;
m_instr.fn = &CPUExecutor::POP_SR_dw;
break;
}
/* 08 /r OR eb,rb Logical-OR byte register into EA byte */
case 0x08: PREFIX_NONE;
/* 09 /r OR ed,rd Logical-OR dword register into EA dword */
case 0x09:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::OR_ed_rd;
break;
}
/* 0A /r OR rb,eb Logical-OR EA byte into byte register */
case 0x0A: PREFIX_NONE;
/* 0B /r OR rd,ed Logical-OR EA dword into dword register */
case 0x0B:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::OR_rd_ed;
break;
}
/* 0C ib OR AL,ib Logical-OR immediate byte into AL */
case 0x0C: PREFIX_NONE;
/* 0D id OR EAX,id Logical-OR immediate dword into EAX */
case 0x0D:
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::OR_EAX_id;
break;
}
/* 0E PUSH CS Push CS */
case 0x0E:
{
m_instr.reg = REGI_CS;
m_instr.fn = &CPUExecutor::PUSH_SR_dw;
break;
}
/* 0F 2-byte opcode prefix */
/* 10 /r ADC eb,rb Add with carry byte register into EA byte */
case 0x10: PREFIX_NONE;
/* 11 /r ADC ed,rd Add with carry dword register into EA dword */
case 0x11:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::ADC_ed_rd;
break;
}
/* 12 /r ADC rb,eb Add with carry EA byte into byte register */
case 0x12: PREFIX_NONE;
/* 13 /r ADC rd,ed Add with carry EA dword into dword register */
case 0x13:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::ADC_rd_ed;
break;
}
/* 14 ib ADC AL,ib Add with carry immediate byte into AL */
case 0x14: PREFIX_NONE;
/* 15 id ADC EAX,id Add with carry immediate dword into EAX */
case 0x15:
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::ADC_EAX_id;
break;
}
/* 16 PUSH SS Push SS */
case 0x16:
{
m_instr.reg = REGI_SS;
m_instr.fn = &CPUExecutor::PUSH_SR_dw;
break;
}
/* 17 POP SS Pop top of stack into SS */
case 0x17:
{
m_instr.reg = REGI_SS;
m_instr.fn = &CPUExecutor::POP_SR_dw;
break;
}
/* 18 /r SBB eb,rb Subtract with borrow byte register from EA byte */
case 0x18: PREFIX_NONE;
/* 19 /r SBB ed,rd Subtract with borrow dword register from EA dword */
case 0x19:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::SBB_ed_rd;
break;
}
/* 1A /r SBB rb,eb Subtract with borrow EA byte from byte register */
case 0x1A: PREFIX_NONE;
/* 1B /r SBB rd,ed Subtract with borrow EA dword from dword register */
case 0x1B:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::SBB_rd_ed;
break;
}
/* 1C ib SBB AL,ib Subtract with borrow imm. byte from AL */
case 0x1C: PREFIX_NONE;
/* 1D id SBB EAX,id Subtract with borrow imm. dword from EAX */
case 0x1D:
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::SBB_EAX_id;
break;
}
/* 1E PUSH DS Push DS */
case 0x1E:
{
m_instr.reg = REGI_DS;
m_instr.fn = &CPUExecutor::PUSH_SR_dw;
break;
}
/* 1F POP DS Pop top of stack into DS */
case 0x1F:
{
m_instr.reg = REGI_DS;
m_instr.fn = &CPUExecutor::POP_SR_dw;
break;
}
/* 20 /r AND eb,rb Logical-AND byte register into EA byte */
case 0x20: PREFIX_NONE;
/* 21 /r AND ed,rd Logical-AND dword register into EA dword */
case 0x21:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::AND_ed_rd;
break;
}
/* 22 /r AND rb,eb Logical-AND EA byte into byte register */
case 0x22: PREFIX_NONE;
/* 23 /r AND rd,ed Logical-AND EA dword into dword register */
case 0x23:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::AND_rd_ed;
break;
}
/* 24 ib AND AL,ib Logical-AND immediate byte into AL */
case 0x24: PREFIX_NONE;
/* 25 id AND EAX,id Logical-AND immediate dword into EAX */
case 0x25:
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::AND_EAX_id;
break;
}
/* 26 seg ovr prefix (ES) */
/* 27 DAA Decimal adjust AL after addition */
case 0x27: PREFIX_NONE;
/* 28 /r SUB eb,rb Subtract byte register from EA byte */
case 0x28: PREFIX_NONE;
/* 29 /r SUB ed,rd Subtract dword register from EA dword */
case 0x29:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::SUB_ed_rd;
break;
}
/* 2A /r SUB rb,eb Subtract EA byte from byte register */
case 0x2A: PREFIX_NONE;
/* 2B /r SUB rd,ed Subtract EA dword from dword register */
case 0x2B:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::SUB_rd_ed;
break;
}
/* 2C ib SUB AL,ib Subtract immediate byte from AL */
case 0x2C: PREFIX_NONE;
/* 2D id SUB EAX,id Subtract immediate dword from EAX */
case 0x2D:
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::SUB_EAX_id;
break;
}
/* 2E seg ovr prefix (CS) */
/* 2F DAS Decimal adjust AL after subtraction */
case 0x2F: PREFIX_NONE;
/* 30 /r XOR eb,rb Exclusive-OR byte register into EA byte */
case 0x30: PREFIX_NONE;
/* 31 /r XOR ed,rd Exclusive-OR dword register into EA dword */
case 0x31:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::XOR_ed_rd;
break;
}
/* 32 /r XOR rb,eb Exclusive-OR EA byte into byte register */
case 0x32: PREFIX_NONE;
/* 33 /r XOR rd,ed Exclusive-OR EA dword into dword register */
case 0x33:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::XOR_rd_ed;
break;
}
/* 34 ib XOR AL,ib Exclusive-OR immediate byte into AL */
case 0x34: PREFIX_NONE;
/* 35 id XOR EAX,id Exclusive-OR immediate dword into AX */
case 0x35:
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::XOR_EAX_id;
break;
}
/* 36 seg ovr prefix (SS) */
/* 37 AAA ASCII adjust AL after addition */
case 0x37: PREFIX_NONE;
/* 38 /r CMP eb,rb Compare byte register from EA byte */
case 0x38: PREFIX_NONE;
/* 39 /r CMP ed,rd Compare dword register from EA dword */
case 0x39:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::CMP_ed_rd;
break;
}
/* 3A /r CMP rb,eb Compare EA byte from byte register*/
case 0x3A: PREFIX_NONE;
/* 3B /r CMP rd,ed Compare EA dword from dword register */
case 0x3B:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::CMP_rd_ed;
break;
}
/* 3C ib CMP AL,ib Compare immediate byte from AL */
case 0x3C: PREFIX_NONE;
/* 3D id CMP EAX,id Compare immediate dword from EAX */
case 0x3D:
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::CMP_EAX_id;
break;
}
/* 3E seg ovr prefix (DS) */
/* 3F AAS ASCII adjust AL after subtraction */
case 0x3F: PREFIX_NONE;
/* 40+ rd INC rd Increment dword register by 1 */
case 0x40: //EAX
case 0x41: //ECX
case 0x42: //EDX
case 0x43: //EBX
case 0x44: //ESP
case 0x45: //EBP
case 0x46: //ESI
case 0x47: //EDI
{
m_instr.reg = _opcode - 0x40;
m_instr.fn = &CPUExecutor::INC_rd_op;
break;
}
/* 48+ rd DEC rd Decrement dword register by 1 */
case 0x48: //EAX
case 0x49: //ECX
case 0x4A: //EDX
case 0x4B: //EBX
case 0x4C: //ESP
case 0x4D: //EBP
case 0x4E: //ESI
case 0x4F: //EDI
{
m_instr.reg = _opcode - 0x48;
m_instr.fn = &CPUExecutor::DEC_rd_op;
break;
}
/* 50+ rd PUSH rd Push dword register */
case 0x50: //EAX
case 0x51: //ECX
case 0x52: //EDX
case 0x53: //EBX
case 0x54: //ESP
case 0x55: //EBP
case 0x56: //ESI
case 0x57: //EDI
{
m_instr.reg = _opcode - 0x50;
m_instr.fn = &CPUExecutor::PUSH_rd_op;
break;
}
/* 58+ rd POP rd Pop top of stack into dword register */
case 0x58: //EAX
case 0x59: //ECX
case 0x5A: //EDX
case 0x5B: //EBX
case 0x5C: //ESP
case 0x5D: //EBP
case 0x5E: //ESI
case 0x5F: //EDI
{
m_instr.reg = _opcode - 0x58;
m_instr.fn = &CPUExecutor::POP_rd_op;
break;
}
/* 60 PUSHAD Push EAX, ECX, EDX, EBX, original ESP, EBP, ESI, and EDI */
case 0x60:
{
m_instr.fn = &CPUExecutor::PUSHAD;
break;
}
/* 61 POPAD Pop EDI, ESI, EBP, ESP, EDX, ECX, and EAX */
case 0x61:
{
m_instr.fn = &CPUExecutor::POPAD;
break;
}
/* 62 /r BOUND rd,mq INT 5 if rd not within bounds */
case 0x62:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::BOUND_rd_mq ;
break;
}
/* 63 /r ARPL ew,rw Adjust RPL of EA word not less than RPL of rw */
case 0x63: PREFIX_NONE;
/*
64 seg ovr prefix (FS)
65 seg ovr prefix (GS)
66 operand-size prefix (OS)
67 address-size prefix (AS)
*/
/* 68 id PUSH id Push immediate dword */
case 0x68:
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::PUSH_id;
break;
}
/* 69 /r id IMUL rd,ed,id Signed multiply (rd = EA dword * imm. dword) */
case 0x69:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::IMUL_rd_ed_id;
break;
}
/* 6A ib PUSH ib Push immediate sign-extended byte*/
case 0x6A:
{
m_instr.ib = fetchb();
m_instr.fn = &CPUExecutor::PUSH_ib_dw;
break;
}
/* 6B /r ib IMUL rd,ed,ib Signed multiply (rd = EA dword * imm. byte) */
case 0x6B:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.ib = fetchb();
m_instr.fn = &CPUExecutor::IMUL_rd_ed_ib;
break;
}
/* 6C INSB Input byte from port DX into ES:[(E)DI] */
case 0x6C: PREFIX_NONE;
/* 6D INSD Input dword from port DX into ES:[(E)DI] */
case 0x6D:
{
if(m_instr.addr32) {
m_instr.fn = &CPUExecutor::INSD_a32;
} else {
m_instr.fn = &CPUExecutor::INSD_a16;
}
break;
}
/* 6E OUTSB Output byte DS:[(E)SI] to port number DX */
case 0x6E: PREFIX_NONE;
/* 6F OUTSD Output dword DS:[(E)SI] to port number DX */
case 0x6F:
{
if(m_instr.addr32) {
m_instr.fn = &CPUExecutor::OUTSD_a32;
} else {
m_instr.fn = &CPUExecutor::OUTSD_a16;
}
break;
}
case 0x70: /* 70 cb JO cb Jump short if overflow (OF=1) */
case 0x71: /* 71 cb JNO cb Jump short if notoverflow (OF=0) */
case 0x72: /* 72 cb JC cb Jump short if carry (CF=1) */
case 0x73: /* 73 cb JNC cb Jump short if not carry (CF=0) */
case 0x74: /* 74 cb JE cb Jump short if equal (ZF=1) */
case 0x75: /* 75 cb JNE cb Jump short if not equal (ZF=0) */
case 0x76: /* 76 cb JBE cb Jump short if below or equal (CF=1 or ZF=1) */
case 0x77: /* 77 cb JA cb Jump short if above (CF=0 and ZF=0) */
case 0x78: /* 78 cb JS cb Jump short if sign (SF=1) */
case 0x79: /* 79 cb JNS cb Jump short if not sign (SF=0) */
case 0x7A: /* 7A cb JPE cb Jump short if parity even (PF=1) */
case 0x7B: /* 7B cb JPO cb Jump short if parity odd (PF=0) */
case 0x7C: /* 7C cb JL cb Jump short if less (SF<>OF) */
case 0x7D: /* 7D cb JNL cb Jump short if not less (SF=OF) */
case 0x7E: /* 7E cb JLE cb Jump short if less or equal (ZF=1 or SF<>OF) */
case 0x7F: /* 7F cb JNLE cb Jump short if not less or equal (ZF=0 and SF=OF) */
PREFIX_NONE;
/*
80 /0 ib ADD eb,ib Add immediate byte into EA byte
80 /1 ib OR eb,ib Logical-OR immediate byte into EA byte
80 /2 ib ADC eb,ib Add with carry immediate byte into EA byte
80 /3 ib SBB eb,ib Subtract with borrow imm. byte from EA byte
80 /4 ib AND eb,ib Logical-AND immediate byte into EA byte
80 /5 ib SUB eb,ib Subtract immediate byte from EA byte
80 /6 ib XOR eb,ib Exclusive-OR immediate byte into EA byte
80 /7 ib CMP eb,ib Compare immediate byte from EA byte
*/
case 0x80:
case 0x82: PREFIX_NONE;
/*
81 /0 id ADD ed,id Add immediate dword into EA dword
81 /1 id OR ed,id Logical-OR immediate dword into EA dword
81 /2 id ADC ed,id Add with carry immediate dword into EA dword
81 /3 id SBB ed,id Subtract with borrow imm. dword from EA dword
81 /4 id AND ed,id Logical-AND immediate dword into EA wdord
81 /5 id SUB ed,id Subtract immediate dword from EA dword
81 /6 id XOR ed,id Exclusive-OR immediate dword into EA dword
81 /7 id CMP ed,id Compare immediate dword from EA dword
*/
case 0x81:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.id1 = fetchdw();
switch(m_instr.modrm.n) {
case 0:
m_instr.fn = &CPUExecutor::ADD_ed_id;
break;
case 1:
m_instr.fn = &CPUExecutor::OR_ed_id;
break;
case 2:
m_instr.fn = &CPUExecutor::ADC_ed_id;
break;
case 3:
m_instr.fn = &CPUExecutor::SBB_ed_id;
break;
case 4:
m_instr.fn = &CPUExecutor::AND_ed_id;
break;
case 5:
m_instr.fn = &CPUExecutor::SUB_ed_id;
break;
case 6:
m_instr.fn = &CPUExecutor::XOR_ed_id;
break;
case 7:
m_instr.fn = &CPUExecutor::CMP_ed_id;
break;
default:
illegal_opcode();
}
ctb_op_ = m_instr.modrm.n;
ctb_idx_ = CTB_IDX_81;
break;
}
/* 82 alias of 80 */
/*
83 /0 ib ADD ed,ib Add immediate byte into EA dword
83 /1 ib OR ed,ib Logical-OR immediate byte into EA dword
83 /2 ib ADC ed,ib Add with carry immediate byte into EA dword
83 /3 ib SBB ed,ib Subtract with borrow imm. byte from EA dword
83 /4 ib AND ed,ib Logical-AND immediate byte into EA dword
83 /5 ib SUB ed,ib Subtract immediate byte from EA dword
83 /6 ib XOR ed,ib Exclusive-OR immediate byte into EA dword
83 /7 ib CMP ed,ib Compare immediate byte from EA dword
*/
case 0x83:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.ib = fetchb();
switch(m_instr.modrm.n) {
case 0:
m_instr.fn = &CPUExecutor::ADD_ed_ib;
break;
case 1:
m_instr.fn = &CPUExecutor::OR_ed_ib;
break;
case 2:
m_instr.fn = &CPUExecutor::ADC_ed_ib;
break;
case 3:
m_instr.fn = &CPUExecutor::SBB_ed_ib;
break;
case 4:
m_instr.fn = &CPUExecutor::AND_ed_ib;
break;
case 5:
m_instr.fn = &CPUExecutor::SUB_ed_ib;
break;
case 6:
m_instr.fn = &CPUExecutor::XOR_ed_ib;
break;
case 7:
m_instr.fn = &CPUExecutor::CMP_ed_ib;
break;
default:
illegal_opcode();
}
ctb_op_ = m_instr.modrm.n;
ctb_idx_ = CTB_IDX_83;
break;
}
/* 84 /r TEST eb,rb AND byte register into EA byte for flags only */
case 0x84: PREFIX_NONE;
/* 85 /r TEST ed,rd AND dword register into EA dword for flags only */
case 0x85:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::TEST_ed_rd;
break;
}
/* 86 /r XCHG eb,rb Exchange byte register with EA byte */
case 0x86: PREFIX_NONE;
/* 87 /r XCHG ed,rd Exchange dword register with EA dword */
case 0x87:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::XCHG_ed_rd;
break;
}
/* 88 /r MOV eb,rb Move byte register into EA byte */
case 0x88: PREFIX_NONE;
/* 89 /r MOV ed,rd Move dword register into EA dword */
case 0x89:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::MOV_ed_rd;
break;
}
/* 8A /r MOV rb,eb Move EA byte into byte register */
case 0x8A: PREFIX_NONE;
/* 8B /r MOV rd,ed Move EA dword into dword register */
case 0x8B:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::MOV_rd_ed;
break;
}
/* 8C /r MOV ew,SR Move Segment Register into EA word */
case 0x8C: PREFIX_NONE;
/* 8D /r LEA rd,m Calculate EA offset given by m, place in rd */
case 0x8D:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::LEA_rd_m;
break;
}
/* 8E /r MOV SR,mw Move memory word into Segment Register */
case 0x8E: PREFIX_NONE;
/* 8F /0 POP md Pop top of stack into memory dword */
case 0x8F:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::POP_md;
if(m_instr.modrm.n != 0) {
illegal_opcode();
}
break;
}
/* 90 NOP No OPERATION */
case 0x90: PREFIX_NONE;
/* 90+ rd XCHG EAX,rd Exchange dword register with EAX */
case 0x91: //ECX
case 0x92: //EDX
case 0x93: //EBX
case 0x94: //ESP
case 0x95: //EBP
case 0x96: //ESI
case 0x97: //EDI
{
m_instr.reg = _opcode - 0x90;
m_instr.fn = &CPUExecutor::XCHG_EAX_rd;
break;
}
/* 98 CWDE Convert word into dword */
case 0x98:
{
m_instr.fn = &CPUExecutor::CWDE;
break;
}
/* 99 CDQ Convert dword to qword */
case 0x99:
{
m_instr.fn = &CPUExecutor::CDQ;
break;
}
/* 9A cp CALL cp Call inter-segment, immediate 6-byte address */
case 0x9A:
{
m_instr.id1 = fetchdw();
m_instr.iw2 = fetchw();
m_instr.fn = &CPUExecutor::CALL_ptr1632;
break;
}
/* 9B WAIT Wait until BUSY pin is inactive (HIGH) */
case 0x9B: PREFIX_NONE;
/* 9C PUSHFD Push eflags register */
case 0x9C:
{
m_instr.fn = &CPUExecutor::PUSHFD;
break;
}
/* 9D POPFD Pop top of stack into eflags register */
case 0x9D:
{
m_instr.fn = &CPUExecutor::POPFD;
break;
}
case 0x9E: /* 9E SAHF Store AH into flags */
case 0x9F: /* 9F LAHF Load flags into AH */
PREFIX_NONE;
/* A0 iw/id MOV AL,xb Move byte variable at seg:offset into AL */
case 0xA0: PREFIX_NONE;
/* A1 iw/id MOV EAX,xw Move dword variable at seg:offset into EAX */
case 0xA1:
{
if(m_instr.addr32) {
m_instr.offset = fetchdw();
} else {
m_instr.offset = fetchw();
}
m_instr.fn = &CPUExecutor::MOV_EAX_xd;
break;
}
/* A2 iw/id MOV xb,AL Move AL into byte variable at seg:offset */
case 0xA2: PREFIX_NONE;
/* A3 iw/id MOV xd,EAX Move EAX into dword register at seg:offset */
case 0xA3:
{
if(m_instr.addr32) {
m_instr.offset = fetchdw();
} else {
m_instr.offset = fetchw();
}
m_instr.fn = &CPUExecutor::MOV_xd_EAX;
break;
}
/* A4 MOVSB Move byte DS:[(E)SI] to ES:[(E)DI] */
case 0xA4: PREFIX_NONE;
/* A5 MOVSD Move dword DS:[(E)SI] to ES:[(E)DI] */
case 0xA5:
{
if(m_instr.addr32) {
m_instr.fn = &CPUExecutor::MOVSD_a32;
} else {
m_instr.fn = &CPUExecutor::MOVSD_a16;
}
break;
}
/* A6 CMPSB Compare bytes ES:[(E)DI] from DS:[(E)SI] */
case 0xA6: PREFIX_NONE;
/* A7 CMPSD Compare dwords ES:[(E)DI] from DS:[(E)SI] */
case 0xA7:
{
m_instr.rep_zf = true;
if(m_instr.addr32) {
m_instr.fn = &CPUExecutor::CMPSD_a32;
} else {
m_instr.fn = &CPUExecutor::CMPSD_a16;
}
break;
}
/* A8 ib TEST AL,ib AND immediate byte into AL for flags only */
case 0xA8: PREFIX_NONE;
/* A9 id TEST EAX,id AND immediate dword into EAX for flags only */
case 0xA9:
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::TEST_EAX_id;
break;
}
/* AA STOSB Store AL to byte ES:[(E)DI], advance (E)DI */
case 0xAA: PREFIX_NONE;
/* AB STOSD Store EAX to dword ES:[(E)DI], advance (E)DI */
case 0xAB:
{
if(m_instr.addr32) {
m_instr.fn = &CPUExecutor::STOSD_a32;
} else {
m_instr.fn = &CPUExecutor::STOSD_a16;
}
break;
}
/* AC LODSB Load byte DS:[(E)SI] into AL */
case 0xAC: PREFIX_NONE;
/* AD LODSD Load dword DS:[(E)SI] into EAX */
case 0xAD:
{
if(m_instr.addr32) {
m_instr.fn = &CPUExecutor::LODSD_a32;
} else {
m_instr.fn = &CPUExecutor::LODSD_a16;
}
break;
}
/* AE SCASB Compare bytes AL - ES:[(E)DI], advance (E)DI */
case 0xAE: PREFIX_NONE;
/* AF SCASD Compare dwords EAX - ES:[(E)DI], advance (E)DI */
case 0xAF:
{
m_instr.rep_zf = true;
if(m_instr.addr32) {
m_instr.fn = &CPUExecutor::SCASD_a32;
} else {
m_instr.fn = &CPUExecutor::SCASD_a16;
}
break;
}
/* B0+ rb ib - MOV rb,ib Move imm byte into byte reg */
case 0xB0: //AL
case 0xB1: //CL
case 0xB2: //DL
case 0xB3: //BL
case 0xB4: //AH
case 0xB5: //CH
case 0xB6: //DH
case 0xB7: //BH
PREFIX_NONE;
/* B8+ rd id - MOV rd,id Move imm dword into dword reg */
case 0xB8: //EAX
case 0xB9: //ECX
case 0xBA: //EDX
case 0xBB: //EBX
case 0xBC: //ESP
case 0xBD: //EBP
case 0xBE: //ESI
case 0xBF: //EDI
{
m_instr.id1 = fetchdw();
m_instr.reg = _opcode - 0xB8;
m_instr.fn = &CPUExecutor::MOV_rd_id;
break;
}
/*
C0 /0 ib ROL eb,ib Rotate 8-bit EA byte left ib times
C0 /1 ib ROR eb,ib Rotate 8-bit EA byte right ib times
C0 /2 ib RCL eb,ib Rotate 9-bits (CF, EA byte) left ib times
C0 /3 ib RCR eb,ib Rotate 9-bits (CF, EA byte) right ib times
C0 /4 ib SAL eb,ib Multiply EA byte by 2, ib times
C0 /5 ib SHR eb,ib Unsigned divide EA byte by 2, ib times
C0 /6 ib SHL eb,ib Multiply EA byte by 2, ib times
C0 /7 ib SAR eb,ib Signed divide EA byte by 2, ib times
*/
case 0xC0: PREFIX_NONE;
/*
C1 /0 ib ROL ed,ib Rotate 16-bit EA dword left ib times
C1 /1 ib ROR ed,ib Rotate 16-bit EA dword right ib times
C1 /2 ib RCL ed,ib Rotate 17-bits (CF, EA dword) left ib times
C1 /3 ib RCR ed,ib Rotate 17-bits (CF, EA dword) right ib times
C1 /4 ib SAL ed,ib Multiply EA dword by 2, ib times
C1 /5 ib SHR ed,ib Unsigned divide EA dword by 2, ib times
C1 /6 ib SHL ed,ib Multiply EA dword by 2, ib times
C1 /7 ib SAR ed,ib Signed divide EA dword by 2, ib times
*/
case 0xC1:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.ib = fetchb();
switch(m_instr.modrm.n) {
case 0:
m_instr.fn = &CPUExecutor::ROL_ed_ib;
break;
case 1:
m_instr.fn = &CPUExecutor::ROR_ed_ib;
break;
case 2:
m_instr.fn = &CPUExecutor::RCL_ed_ib;
break;
case 3:
m_instr.fn = &CPUExecutor::RCR_ed_ib;
break;
case 4:
case 6: //SAL and SHL are the same
m_instr.fn = &CPUExecutor::SAL_ed_ib;
break;
case 5:
m_instr.fn = &CPUExecutor::SHR_ed_ib;
break;
case 7:
m_instr.fn = &CPUExecutor::SAR_ed_ib;
break;
default:
illegal_opcode();
}
ctb_op_ = m_instr.modrm.n;
ctb_idx_ = CTB_IDX_C1;
break;
}
/* C2 iw RET iw Return to near caller, pop iw bytes pushed before Call */
case 0xC2:
{
m_instr.iw1 = fetchw();
m_instr.fn = &CPUExecutor::RET_near_o32;
break;
}
/* C3 RET Return to near caller */
case 0xC3:
{
m_instr.iw1 = 0;
m_instr.fn = &CPUExecutor::RET_near_o32;
break;
}
/* C4 /r LES rd,mp Load ES:r32 with pointer from memory */
case 0xC4:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::LES_rd_mp;
if(m_instr.modrm.mod_is_reg()) {
illegal_opcode();
}
break;
}
/* C5 /r LDS rw,mp Load DS:r32 with pointer from memory */
case 0xC5:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.fn = &CPUExecutor::LDS_rd_mp;
if(m_instr.modrm.mod_is_reg()) {
illegal_opcode();
}
break;
}
/* C6 /0 ib MOV eb,ib Move immediate byte into EA byte */
case 0xC6: PREFIX_NONE;
/* C7 /0 id MOV ed,id Move immediate dword into EA dword */
case 0xC7:
{
m_instr.modrm.load(m_instr.addr32);
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::MOV_ed_id;
if(m_instr.modrm.n != 0) {
illegal_opcode();
}
break;
}
/* C8 iw ib ENTER iw,ib Make stack frame for procedure parameters */
case 0xC8:
{
m_instr.iw1 = fetchw();
m_instr.ib = fetchb();
m_instr.fn = &CPUExecutor::ENTER_o32;
break;
}
/* C9 LEAVE Set (E)SP to (E)BP, then POP (E)BP */
case 0xC9:
{
m_instr.fn = &CPUExecutor::LEAVE_o32;
break;
}
/* CA iw RET iw Return to far caller, pop iw bytes */
case 0xCA:
{
m_instr.iw1 = fetchw();
m_instr.fn = &CPUExecutor::RET_far_o32;
break;
}
/* CB RET Return to far caller */
case 0xCB:
{
m_instr.iw1 = 0;
m_instr.fn = &CPUExecutor::RET_far_o32;
break;
}
case 0xCC: /* CC INT 3 Interrupt 3 (trap to debugger) */
case 0xCD: /* CD ib INT ib Interrupt numbered by immediate byte */
case 0xCE: /* CE INTO Interrupt 4 */
PREFIX_NONE;
/* CF IRETD Interrupt return (far return and pop flags) */
case 0xCF:
{
m_instr.fn = &CPUExecutor::IRETD;
break;
}
/*
D0 /0 ROL eb,1 Rotate 8-bit EA byte left once
D0 /1 ROR eb,1 Rotate 8-bit EA byte right once
D0 /2 RCL eb,1 Rotate 9-bits (CF, EA byte) left once
D0 /3 RCR eb,1 Rotate 9-bits (CF, EA byte) right once
D0 /4 SAL eb,1 Multiply EA byte by 2, once
D0 /5 SHR eb,1 Unsigned divide EA byte by 2, once
D0 /6 SHL eb,1 Multiply EA byte by 2, once
D0 /7 SAR eb,1 Signed divide EA byte by 2, once
*/
case 0xD0: PREFIX_NONE;
/*
D1 /0 ROL ed,1 Rotate 32-bit EA dword left once
D1 /1 ROR ed,1 Rotate 32-bit EA dword right once
D1 /2 RCL ed,1 Rotate 33-bits (CF, EA dword) left once
D1 /3 RCR ed,1 Rotate 33-bits (CF, EA dword) right once
D1 /4 SAL ed,1 Multiply EA dword by 2, once
D1 /5 SHR ed,1 Unsigned divide EA dword by 2, once
D1 /6 SHL ed,1 Multiply EA dword by 2, once
D1 /7 SAR ed,1 Signed divide EA dword by 2, once
*/
case 0xD1:
{
m_instr.modrm.load(m_instr.addr32);
switch(m_instr.modrm.n) {
case 0:
m_instr.fn = &CPUExecutor::ROL_ed_1;
break;
case 1:
m_instr.fn = &CPUExecutor::ROR_ed_1;
break;
case 2:
m_instr.fn = &CPUExecutor::RCL_ed_1;
break;
case 3:
m_instr.fn = &CPUExecutor::RCR_ed_1;
break;
case 4:
case 6: //SAL and SHL are the same
m_instr.fn = &CPUExecutor::SAL_ed_1;
break;
case 5:
m_instr.fn = &CPUExecutor::SHR_ed_1;
break;
case 7:
m_instr.fn = &CPUExecutor::SAR_ed_1;
break;
default:
illegal_opcode();
}
ctb_op_ = m_instr.modrm.n;
ctb_idx_ = CTB_IDX_D1;
break;
}
/*
D2 /0 ROL eb,CL Rotate 8-bit EA byte left CL times
D2 /1 ROR eb,CL Rotate 8-bit EA byte right CL times
D2 /2 RCL eb,CL Rotate 9-bits (CF, EA byte) left CL times
D2 /3 RCR eb,CL Rotate 9-bits (CF, EA byte) right CL times
D2 /4 SAL eb,CL Multiply EA byte by 2, CL times
D2 /5 SHR eb,CL Unsigned divide EA byte by 2, CL times
D2 /6 SHR eb,CL Multiply EA byte by 2, CL times
D2 /7 SAR eb,CL Signed divide EA byte by 2, CL times
*/
case 0xD2: PREFIX_NONE;
/*
D3 /0 ROL ed,CL Rotate 32-bit EA dword left CL times
D3 /1 ROR ed,CL Rotate 32-bit EA dword right CL times
D3 /2 RCL ed,CL Rotate 33-bits (CF, EA dword) left CL times
D3 /3 RCR ed,CL Rotate 33-bits (CF, EA dword) right CL times
D3 /4 SAL ed,CL Multiply EA dword by 2, CL times
D3 /5 SHR ed,CL Unsigned divide EAd word by 2, CL times
D3 /6 SHR ed,CL Multiply EA dword by 2, CL times
D3 /7 SAR ed,CL Signed divide EA dword by 2, CL times
*/
case 0xD3:
{
m_instr.modrm.load(m_instr.addr32);
switch(m_instr.modrm.n) {
case 0:
m_instr.fn = &CPUExecutor::ROL_ed_CL;
break;
case 1:
m_instr.fn = &CPUExecutor::ROR_ed_CL;
break;
case 2:
m_instr.fn = &CPUExecutor::RCL_ed_CL;
break;
case 3:
m_instr.fn = &CPUExecutor::RCR_ed_CL;
break;
case 4:
case 6: //SAL and SHL are the same
m_instr.fn = &CPUExecutor::SAL_ed_CL;
break;
case 5:
m_instr.fn = &CPUExecutor::SHR_ed_CL;
break;
case 7:
m_instr.fn = &CPUExecutor::SAR_ed_CL;
break;
default:
illegal_opcode();
}
ctb_op_ = m_instr.modrm.n;
ctb_idx_ = CTB_IDX_D3;
break;
}
case 0xD4: /* D4 ib AAM ASCII adjust AX after multiply */
case 0xD5: /* D5 ib AAD ASCII adjust AX before division */
case 0xD6: /* D6 SALC Set AL If Carry */
case 0xD7: /* D7 XLATB Set AL to memory byte DS:[eBX + unsigned AL] */
PREFIX_NONE;
case 0xD8: /* D8-DF FPU ESC */
case 0xD9:
case 0xDA:
case 0xDB:
case 0xDC:
case 0xDD:
case 0xDE:
case 0xDF:
PREFIX_NONE;
case 0xE0: /* E0 cb LOOPNZ cb DEC eCX; jump short if eCX<>0 and ZF=0 */
case 0xE1: /* E1 cb LOOPZ cb DEC eCX; jump short if eCX<>0 and zero (ZF=1) */
case 0xE2: /* E2 cb LOOP cb DEC eCX; jump short if eCX<>0 */
case 0xE3: /* E3 cb JECXZ cb Jump short if ECX register is zero */
PREFIX_NONE;
/* E4 ib IN AL,ib Input byte from immediate port into AL */
case 0xE4: PREFIX_NONE;
/* E5 ib IN EAX,ib Input dword from immediate port into EAX */
case 0xE5:
{
m_instr.ib = fetchb();
m_instr.fn = &CPUExecutor::IN_EAX_ib;
break;
}
/* E6 ib OUT ib,AL Output byte AL to immediate port number ib */
case 0xE6: PREFIX_NONE;
/* E7 ib OUT ib,EAX Output dword EAX to immediate port number ib */
case 0xE7:
{
m_instr.ib = fetchb();
m_instr.fn = &CPUExecutor::OUT_ib_EAX;
break;
}
/* E8 cd CALL cd Call near, offset relative to next instruction */
case 0xE8:
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::CALL_rel32;
break;
}
/* E9 cd JMP cd Jump near displacement relative to next instruction */
case 0xE9:
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::JMP_rel32;
break;
}
/* EA cp JMP ptr16:32 Jump far/task/call/tss */
case 0xEA:
{
m_instr.id1 = fetchdw();
m_instr.iw2 = fetchw();
m_instr.fn = &CPUExecutor::JMP_ptr1632;
break;
}
/* EB cb JMP cb Jump short */
case 0xEB: PREFIX_NONE;
/* EC IN AL,DX Input byte from port DX into AL */
case 0xEC: PREFIX_NONE;
/* ED IN EAX,DX Input dword from port DX into EAX */
case 0xED:
{
m_instr.fn = &CPUExecutor::IN_EAX_DX;
break;
}
/* EE OUT DX,AL Output byte AL to port number DX */
case 0xEE: PREFIX_NONE;
/* EF OUT DX,EAX Output dword EAX to port number DX */
case 0xEF:
{
m_instr.fn = &CPUExecutor::OUT_DX_EAX;
break;
}
/*
F0 LOCK prefix
F1 prefix, does not generate #UD; INT1 (ICEBP) on 386+ TODO?
F2 REP/REPE prefix
F3 REPNE prefix
*/
/* F4 HLT Halt */
case 0xF4: PREFIX_NONE;
/* F5 CMC Complement carry flag */
case 0xF5: PREFIX_NONE;
/*
F6 /0 ib TEST eb,ib AND immediate byte into EA byte for flags only
F6 /2 NOT eb Reverse each bit of EA byte
F6 /3 NEG eb Two's complement negate EA byte
F6 /4 MUL eb Unsigned multiply (AX = AL * EA byte)
F6 /5 IMUL eb Signed multiply (AX = AL * EA byte)
F6 /6 DIV eb Unsigned divide AX by EA byte
F6 /7 IDIV eb Signed divide AX by EA byte (AL=Quo,AH=Rem)
*/
case 0xF6: PREFIX_NONE;
/*
F7 /0 id TEST ed,id AND immediate dword into EA dword for flags only
F7 /2 NOT ed Reverse each bit of EA dword
F7 /3 NEG ed Two's complement negate EA dword
F7 /4 MUL ed Unsigned multiply (EDX:EAX = EAX * EA dword)
F7 /5 IMUL ed Signed multiply (EDX:EAX = EAX * EA dword)
F7 /6 DIV ed Unsigned divide EDX:EAX by EA dword
F7 /7 IDIV ed Signed divide EDX:EAX by EA dword (EAX=Quo,EDX=Rem)
*/
case 0xF7:
{
m_instr.modrm.load(m_instr.addr32);
switch(m_instr.modrm.n) {
case 0:
case 1: // 1: undocumented alias
{
m_instr.id1 = fetchdw();
m_instr.fn = &CPUExecutor::TEST_ed_id;
break;
}
case 2:
m_instr.fn = &CPUExecutor::NOT_ed;
break;
case 3:
m_instr.fn = &CPUExecutor::NEG_ed;
break;
case 4:
m_instr.fn = &CPUExecutor::MUL_ed;
break;
case 5:
m_instr.fn = &CPUExecutor::IMUL_ed;
break;
case 6:
m_instr.fn = &CPUExecutor::DIV_ed;
break;
case 7:
m_instr.fn = &CPUExecutor::IDIV_ed;
break;
default:
illegal_opcode();
}
ctb_op_ = m_instr.modrm.n;
ctb_idx_ = CTB_IDX_F7;
break;
}
case 0xF8: /* F8 CLC Clear carry flag */
case 0xF9: /* F9 STC Set carry flag */
case 0xFA: /* FA CLI Clear interrupt flag; interrupts disabled */
case 0xFB: /* FB STI Set interrupt enable flag, interrupts enabled */
case 0xFC: /* FC CLD Clear direction flag, (E)SI and (E)DI will increment */
case 0xFD: /* FD STD Set direction flag so (E)SI and (E)DI will decrement */
PREFIX_NONE;
/*
FE /0 INC eb Increment EA byte by 1
FE /1 DEC eb Decrement EA byte by 1
*/
case 0xFE: PREFIX_NONE;
/*
FF /0 INC ed Increment EA dword by 1
FF /1 DEC ed Decrement EA dword by 1
FF /2 CALL ed Call near, offset absolute at EA dword
FF /3 CALL ep Call inter-segment, address at EA pointer
FF /4 JMP ed Jump near to EA dword (absolute offset)
FF /5 JMP ep Jump far (6-byte effective address in memory)
FF /6 PUSH md Push memory dword
*/
case 0xFF:
{
m_instr.modrm.load(m_instr.addr32);
switch(m_instr.modrm.n) {
case 0:
m_instr.fn = &CPUExecutor::INC_ed;
break;
case 1:
m_instr.fn = &CPUExecutor::DEC_ed;
break;
case 2:
m_instr.fn = &CPUExecutor::CALL_ed;
break;
case 3:
m_instr.fn = &CPUExecutor::CALL_m1632;
break;
case 4:
m_instr.fn = &CPUExecutor::JMP_ed;
break;
case 5:
if(m_instr.modrm.mod == 3) {
illegal_opcode();
break;
}
m_instr.fn = &CPUExecutor::JMP_m1632;
break;
case 6:
m_instr.fn = &CPUExecutor::PUSH_md;
break;
default:
illegal_opcode();
break;
}
ctb_op_ = m_instr.modrm.n;
ctb_idx_ = CTB_IDX_FF;
break;
}
default:
{
illegal_opcode();
}
} //switch
}
|
//
// CategoryGroup.h
// ExpenseMobile
//
// Created by <NAME> on 28/05/15.
// Copyright (c) 2015 Shibin. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <CoreData/CoreData.h>
@class Expense;
@interface CategoryGroup : NSManagedObject
@property (nonatomic, retain) NSString * categoryName;
@property (nonatomic, retain) NSNumber * colorCode;
@property (nonatomic, retain) NSDate * date;
@property (nonatomic, retain) NSNumber * isExpense;
@property (nonatomic, retain) NSString * misc;
@property (nonatomic, retain) NSNumber * categoryID;
@property (nonatomic, retain) Expense *expenseRelationship;
@end
|
#!/usr/bin/env bash
# This file is part of The RetroPie Project
#
# The RetroPie Project is the legal property of its developers, whose names are
# too numerous to list here. Please refer to the COPYRIGHT.md file distributed with this source.
#
# See the LICENSE.md file at the top-level directory of this distribution and
# at https://raw.githubusercontent.com/RetroPie/RetroPie-Setup/master/LICENSE.md
#
rp_module_id="ecwolf"
rp_module_desc="ECWolf - ECWolf is an advanced source port for Wolfenstein 3D, Spear of Destiny, and Super 3D Noah's Ark based off of the Wolf4SDL code base. It also supports mods from .pk3 files."
rp_module_licence="GPL2 https://bitbucket.org/ecwolf/ecwolf/raw/5065aaefe055bff5a8bb8396f7f2ca5f2e2cab27/docs/license-gpl.txt"
rp_module_help="For registered versions, replace the shareware files by adding your full Wolf3d, Spear3D 1.4 version game files to $romdir/ports/wolf3d/."
rp_module_section="exp"
rp_module_flags=""
function depends_ecwolf() {
getDepends libsdl2-dev libsdl2-mixer-dev libsdl2-net-dev zlib1g-dev libsdl1.2-dev libsdl-mixer1.2-dev libsdl-net1.2-dev
}
function sources_ecwolf() {
gitPullOrClone "$md_build" "https://bitbucket.org/ecwolf/ecwolf"
}
function build_ecwolf() {
cd "$md_build"
wget -N -q https://raw.githubusercontent.com/tpo1990/ECWolf-RPI/master/ecwolf_keyboardpatch.diff
applyPatch ecwolf_keyboardpatch.diff
cmake . -DCMAKE_BUILD_TYPE=Release -DGPL=ON
make
md_ret_require="$md_build"
}
function install_ecwolf() {
md_ret_files=(
'ecwolf'
'ecwolf.pk3'
)
}
function game_data_ecwolf() {
if [[ -z $(ls "$romdir/ports/wolf3d") ]]; then
cd "$__tmpdir"
downloadAndExtract "http://maniacsvault.net/ecwolf/files/shareware/wolf3d14.zip" "$romdir/ports/wolf3d/shareware"
downloadAndExtract "http://maniacsvault.net/ecwolf/files/shareware/soddemo.zip" "$romdir/ports/wolf3d/shareware"
fi
}
function _add_games_ecwolf(){
local ecw_bin="$1"
local ext path game
declare -A games=(
['wl1']="Wolfenstein 3D (demo)"
['wl6']="Wolfenstein 3D"
['sod']="Wolfenstein 3D - Spear of Destiny"
['sd1']="Wolfenstein 3D - Spear of Destiny"
['sdm']="Wolfenstein 3D - Spear of Destiny (demo)"
['n3d']="Wolfenstein 3D - Super Noah’s Ark 3D"
['sd2']="Wolfenstein 3D - SoD MP2 - Return to Danger"
['sd3']="Wolfenstein 3D - SoD MP3 - Ultimate Challenge"
)
pushd "$romdir/ports/wolf3d"
for game in "${!games[@]}"; do
ecw=$(find . -iname "*.$game" -print -quit)
[[ -n "$ecw" ]] || continue
ext="${ecw##*.}"
path="${ecw%/*}"; path="${path#*/}"
addPort "$md_id" "ecwolf" "${games[$game]}" "pushd $romdir/ports/wolf3d; bash %ROM%; popd" "$romdir/ports/wolf3d/${games[$game]}.ecwolf"
_add_ecwolf_files_ecwolf "$romdir/ports/wolf3d/${games[$game]}.ecwolf" "$path" "$ext" "$ecw_bin"
done
popd
}
function _add_ecwolf_files_ecwolf() {
cat >"$1" <<_EOF_
cd "$2"
"$4" --data $3
wait \$!
_EOF_
}
function add_games_ecwolf() {
_add_games_ecwolf "$md_inst/ecwolf"
}
function configure_ecwolf() {
mkRomDir "ports/wolf3d"
moveConfigDir "$home/.local/share/ecwolf" "$md_conf_root/ecwolf"
moveConfigDir "$home/.config/ecwolf" "$md_conf_root/ecwolf"
# Check if some wolfenstein files are present and upload shareware files
[[ "$md_mode" == "install" ]] && game_data_ecwolf
[[ "$md_mode" == "install" ]] && add_games_ecwolf
chown -R $user:$user "$romdir/ports/wolf3d"
}
|
#!/bin/bash
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
set -e
set -o pipefail
mkdir -p log
rm -R -f log/*
error_handler(){
echo 'Run Error - terminating'
exit_code=$?
set +x
group_pid=$(ps -p $$ -o pgid --no-headers)
sess_pid=$(ps -p $$ -o sess --no-headers)
printf "Script PID:%d, GPID:%s, SPID:%d" $$ $group_pid $sess_pid >> log/killout.txt
if hash pstree 2>/dev/null; then
pstree -pn $$ >> log/killout.txt
PIDS_KILL=$(pstree -pn $$ | grep -o "([[:digit:]]*)" | grep -o "[[:digit:]]*")
kill -9 $(echo "$PIDS_KILL" | grep -v $group_pid | grep -v $$) 2>/dev/null
else
ps f -g $sess_pid > log/subprocess_list
PIDS_KILL=$(pgrep -a --pgroup $group_pid | grep -v celery | grep -v $group_pid | grep -v $$)
echo "$PIDS_KILL" >> log/killout.txt
kill -9 $(echo "$PIDS_KILL" | awk 'BEGIN { FS = "[ \t\n]+" }{ print $1 }') 2>/dev/null
fi
exit $(( 1 > $exit_code ? 1 : $exit_code ))
}
trap error_handler QUIT HUP INT KILL TERM ERR
touch log/stderror.err
ktools_monitor.sh $$ & pid0=$!
# --- Setup run dirs ---
find output/* ! -name '*summary-info*' -type f -exec rm -f {} +
rm -R -f fifo/*
rm -R -f work/*
mkdir work/kat/
mkfifo fifo/gul_P1
mkfifo fifo/gul_P2
mkfifo fifo/gul_P3
mkfifo fifo/gul_P4
mkfifo fifo/gul_P5
mkfifo fifo/gul_P6
mkfifo fifo/gul_P7
mkfifo fifo/gul_P8
mkfifo fifo/gul_P9
mkfifo fifo/gul_P10
mkfifo fifo/gul_P11
mkfifo fifo/gul_P12
mkfifo fifo/gul_P13
mkfifo fifo/gul_P14
mkfifo fifo/gul_P15
mkfifo fifo/gul_P16
mkfifo fifo/gul_P17
mkfifo fifo/gul_P18
mkfifo fifo/gul_P19
mkfifo fifo/gul_P20
mkfifo fifo/gul_S1_summary_P1
mkfifo fifo/gul_S1_summarypltcalc_P1
mkfifo fifo/gul_S1_pltcalc_P1
mkfifo fifo/gul_S1_summary_P2
mkfifo fifo/gul_S1_summarypltcalc_P2
mkfifo fifo/gul_S1_pltcalc_P2
mkfifo fifo/gul_S1_summary_P3
mkfifo fifo/gul_S1_summarypltcalc_P3
mkfifo fifo/gul_S1_pltcalc_P3
mkfifo fifo/gul_S1_summary_P4
mkfifo fifo/gul_S1_summarypltcalc_P4
mkfifo fifo/gul_S1_pltcalc_P4
mkfifo fifo/gul_S1_summary_P5
mkfifo fifo/gul_S1_summarypltcalc_P5
mkfifo fifo/gul_S1_pltcalc_P5
mkfifo fifo/gul_S1_summary_P6
mkfifo fifo/gul_S1_summarypltcalc_P6
mkfifo fifo/gul_S1_pltcalc_P6
mkfifo fifo/gul_S1_summary_P7
mkfifo fifo/gul_S1_summarypltcalc_P7
mkfifo fifo/gul_S1_pltcalc_P7
mkfifo fifo/gul_S1_summary_P8
mkfifo fifo/gul_S1_summarypltcalc_P8
mkfifo fifo/gul_S1_pltcalc_P8
mkfifo fifo/gul_S1_summary_P9
mkfifo fifo/gul_S1_summarypltcalc_P9
mkfifo fifo/gul_S1_pltcalc_P9
mkfifo fifo/gul_S1_summary_P10
mkfifo fifo/gul_S1_summarypltcalc_P10
mkfifo fifo/gul_S1_pltcalc_P10
mkfifo fifo/gul_S1_summary_P11
mkfifo fifo/gul_S1_summarypltcalc_P11
mkfifo fifo/gul_S1_pltcalc_P11
mkfifo fifo/gul_S1_summary_P12
mkfifo fifo/gul_S1_summarypltcalc_P12
mkfifo fifo/gul_S1_pltcalc_P12
mkfifo fifo/gul_S1_summary_P13
mkfifo fifo/gul_S1_summarypltcalc_P13
mkfifo fifo/gul_S1_pltcalc_P13
mkfifo fifo/gul_S1_summary_P14
mkfifo fifo/gul_S1_summarypltcalc_P14
mkfifo fifo/gul_S1_pltcalc_P14
mkfifo fifo/gul_S1_summary_P15
mkfifo fifo/gul_S1_summarypltcalc_P15
mkfifo fifo/gul_S1_pltcalc_P15
mkfifo fifo/gul_S1_summary_P16
mkfifo fifo/gul_S1_summarypltcalc_P16
mkfifo fifo/gul_S1_pltcalc_P16
mkfifo fifo/gul_S1_summary_P17
mkfifo fifo/gul_S1_summarypltcalc_P17
mkfifo fifo/gul_S1_pltcalc_P17
mkfifo fifo/gul_S1_summary_P18
mkfifo fifo/gul_S1_summarypltcalc_P18
mkfifo fifo/gul_S1_pltcalc_P18
mkfifo fifo/gul_S1_summary_P19
mkfifo fifo/gul_S1_summarypltcalc_P19
mkfifo fifo/gul_S1_pltcalc_P19
mkfifo fifo/gul_S1_summary_P20
mkfifo fifo/gul_S1_summarypltcalc_P20
mkfifo fifo/gul_S1_pltcalc_P20
# --- Do ground up loss computes ---
pltcalc < fifo/gul_S1_summarypltcalc_P1 > work/kat/gul_S1_pltcalc_P1 & pid1=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P2 > work/kat/gul_S1_pltcalc_P2 & pid2=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P3 > work/kat/gul_S1_pltcalc_P3 & pid3=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P4 > work/kat/gul_S1_pltcalc_P4 & pid4=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P5 > work/kat/gul_S1_pltcalc_P5 & pid5=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P6 > work/kat/gul_S1_pltcalc_P6 & pid6=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P7 > work/kat/gul_S1_pltcalc_P7 & pid7=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P8 > work/kat/gul_S1_pltcalc_P8 & pid8=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P9 > work/kat/gul_S1_pltcalc_P9 & pid9=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P10 > work/kat/gul_S1_pltcalc_P10 & pid10=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P11 > work/kat/gul_S1_pltcalc_P11 & pid11=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P12 > work/kat/gul_S1_pltcalc_P12 & pid12=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P13 > work/kat/gul_S1_pltcalc_P13 & pid13=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P14 > work/kat/gul_S1_pltcalc_P14 & pid14=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P15 > work/kat/gul_S1_pltcalc_P15 & pid15=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P16 > work/kat/gul_S1_pltcalc_P16 & pid16=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P17 > work/kat/gul_S1_pltcalc_P17 & pid17=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P18 > work/kat/gul_S1_pltcalc_P18 & pid18=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P19 > work/kat/gul_S1_pltcalc_P19 & pid19=$!
pltcalc -s < fifo/gul_S1_summarypltcalc_P20 > work/kat/gul_S1_pltcalc_P20 & pid20=$!
tee < fifo/gul_S1_summary_P1 fifo/gul_S1_summarypltcalc_P1 > /dev/null & pid21=$!
tee < fifo/gul_S1_summary_P2 fifo/gul_S1_summarypltcalc_P2 > /dev/null & pid22=$!
tee < fifo/gul_S1_summary_P3 fifo/gul_S1_summarypltcalc_P3 > /dev/null & pid23=$!
tee < fifo/gul_S1_summary_P4 fifo/gul_S1_summarypltcalc_P4 > /dev/null & pid24=$!
tee < fifo/gul_S1_summary_P5 fifo/gul_S1_summarypltcalc_P5 > /dev/null & pid25=$!
tee < fifo/gul_S1_summary_P6 fifo/gul_S1_summarypltcalc_P6 > /dev/null & pid26=$!
tee < fifo/gul_S1_summary_P7 fifo/gul_S1_summarypltcalc_P7 > /dev/null & pid27=$!
tee < fifo/gul_S1_summary_P8 fifo/gul_S1_summarypltcalc_P8 > /dev/null & pid28=$!
tee < fifo/gul_S1_summary_P9 fifo/gul_S1_summarypltcalc_P9 > /dev/null & pid29=$!
tee < fifo/gul_S1_summary_P10 fifo/gul_S1_summarypltcalc_P10 > /dev/null & pid30=$!
tee < fifo/gul_S1_summary_P11 fifo/gul_S1_summarypltcalc_P11 > /dev/null & pid31=$!
tee < fifo/gul_S1_summary_P12 fifo/gul_S1_summarypltcalc_P12 > /dev/null & pid32=$!
tee < fifo/gul_S1_summary_P13 fifo/gul_S1_summarypltcalc_P13 > /dev/null & pid33=$!
tee < fifo/gul_S1_summary_P14 fifo/gul_S1_summarypltcalc_P14 > /dev/null & pid34=$!
tee < fifo/gul_S1_summary_P15 fifo/gul_S1_summarypltcalc_P15 > /dev/null & pid35=$!
tee < fifo/gul_S1_summary_P16 fifo/gul_S1_summarypltcalc_P16 > /dev/null & pid36=$!
tee < fifo/gul_S1_summary_P17 fifo/gul_S1_summarypltcalc_P17 > /dev/null & pid37=$!
tee < fifo/gul_S1_summary_P18 fifo/gul_S1_summarypltcalc_P18 > /dev/null & pid38=$!
tee < fifo/gul_S1_summary_P19 fifo/gul_S1_summarypltcalc_P19 > /dev/null & pid39=$!
tee < fifo/gul_S1_summary_P20 fifo/gul_S1_summarypltcalc_P20 > /dev/null & pid40=$!
( summarycalc -i -1 fifo/gul_S1_summary_P1 < fifo/gul_P1 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P2 < fifo/gul_P2 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P3 < fifo/gul_P3 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P4 < fifo/gul_P4 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P5 < fifo/gul_P5 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P6 < fifo/gul_P6 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P7 < fifo/gul_P7 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P8 < fifo/gul_P8 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P9 < fifo/gul_P9 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P10 < fifo/gul_P10 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P11 < fifo/gul_P11 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P12 < fifo/gul_P12 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P13 < fifo/gul_P13 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P14 < fifo/gul_P14 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P15 < fifo/gul_P15 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P16 < fifo/gul_P16 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P17 < fifo/gul_P17 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P18 < fifo/gul_P18 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P19 < fifo/gul_P19 ) 2>> log/stderror.err &
( summarycalc -i -1 fifo/gul_S1_summary_P20 < fifo/gul_P20 ) 2>> log/stderror.err &
( eve 1 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P1 ) 2>> log/stderror.err &
( eve 2 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P2 ) 2>> log/stderror.err &
( eve 3 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P3 ) 2>> log/stderror.err &
( eve 4 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P4 ) 2>> log/stderror.err &
( eve 5 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P5 ) 2>> log/stderror.err &
( eve 6 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P6 ) 2>> log/stderror.err &
( eve 7 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P7 ) 2>> log/stderror.err &
( eve 8 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P8 ) 2>> log/stderror.err &
( eve 9 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P9 ) 2>> log/stderror.err &
( eve 10 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P10 ) 2>> log/stderror.err &
( eve 11 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P11 ) 2>> log/stderror.err &
( eve 12 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P12 ) 2>> log/stderror.err &
( eve 13 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P13 ) 2>> log/stderror.err &
( eve 14 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P14 ) 2>> log/stderror.err &
( eve 15 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P15 ) 2>> log/stderror.err &
( eve 16 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P16 ) 2>> log/stderror.err &
( eve 17 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P17 ) 2>> log/stderror.err &
( eve 18 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P18 ) 2>> log/stderror.err &
( eve 19 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P19 ) 2>> log/stderror.err &
( eve 20 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > fifo/gul_P20 ) 2>> log/stderror.err &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8 $pid9 $pid10 $pid11 $pid12 $pid13 $pid14 $pid15 $pid16 $pid17 $pid18 $pid19 $pid20 $pid21 $pid22 $pid23 $pid24 $pid25 $pid26 $pid27 $pid28 $pid29 $pid30 $pid31 $pid32 $pid33 $pid34 $pid35 $pid36 $pid37 $pid38 $pid39 $pid40
# --- Do ground up loss kats ---
kat work/kat/gul_S1_pltcalc_P1 work/kat/gul_S1_pltcalc_P2 work/kat/gul_S1_pltcalc_P3 work/kat/gul_S1_pltcalc_P4 work/kat/gul_S1_pltcalc_P5 work/kat/gul_S1_pltcalc_P6 work/kat/gul_S1_pltcalc_P7 work/kat/gul_S1_pltcalc_P8 work/kat/gul_S1_pltcalc_P9 work/kat/gul_S1_pltcalc_P10 work/kat/gul_S1_pltcalc_P11 work/kat/gul_S1_pltcalc_P12 work/kat/gul_S1_pltcalc_P13 work/kat/gul_S1_pltcalc_P14 work/kat/gul_S1_pltcalc_P15 work/kat/gul_S1_pltcalc_P16 work/kat/gul_S1_pltcalc_P17 work/kat/gul_S1_pltcalc_P18 work/kat/gul_S1_pltcalc_P19 work/kat/gul_S1_pltcalc_P20 > output/gul_S1_pltcalc.csv & kpid1=$!
wait $kpid1
rm -R -f work/*
rm -R -f fifo/*
# Stop ktools watcher
kill -9 $pid0
|
<gh_stars>0
'use strict';
var mathExt = require('../index');
var assert = require('assert');
describe('#round10', function() {
var ceil = mathExt.round;
it('test round ' + 1.234, function() {
assert.equal(1.23, ceil(1.234, -2));
});
it('test round ' + 1.235, function() {
assert.equal(1.24, ceil(1.235, -2));
});
});
|
<filename>jena-3.0.1/jena-jdbc/jena-jdbc-core/src/main/java/org/apache/jena/jdbc/results/SelectResults.java
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.jdbc.results;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.jena.atlas.lib.Closeable;
import org.apache.jena.graph.Node ;
import org.apache.jena.jdbc.results.metadata.SelectResultsMetadata;
import org.apache.jena.jdbc.statements.JenaStatement;
import org.apache.jena.query.QueryCancelledException ;
import org.apache.jena.query.QueryExecution ;
import org.apache.jena.query.ResultSetFactory ;
import org.apache.jena.sparql.core.Var ;
import org.apache.jena.sparql.engine.binding.Binding ;
import org.apache.jena.sparql.resultset.ResultSetPeekable ;
/**
* Represents SPARQL SELECT results
*
*/
public class SelectResults extends StreamedResults<Binding> {
private ResultSetPeekable innerResults;
private List<String> columns;
private SelectResultsMetadata metadata;
/**
* Creates new select results
*
* @param statement
* Statement that created the result set
* @param qe
* Query Execution
* @param results
* SPARQL Results
* @param commit
* Whether a commit is necessary when the results are closed
* @throws SQLException
* Thrown if the arguments are invalid
*/
public SelectResults(JenaStatement statement, QueryExecution qe, ResultSetPeekable results, boolean commit)
throws SQLException {
super(statement, qe, commit);
if (results == null)
throw new SQLException("SPARQL Results cannot be null");
this.innerResults = results;
this.columns = new ArrayList<String>(this.innerResults.getResultVars());
this.metadata = statement.getJenaConnection().applyPostProcessors(new SelectResultsMetadata(this, this.innerResults));
}
/**
* Creates new select results
*
* @param statement
* Statement that created the result set
* @param qe
* Query Execution
* @param results
* SPARQL Results
* @param commit
* Whether a commit is necessary when the results are closed
* @throws SQLException
* Thrown if the arguments are invalid
*/
public SelectResults(JenaStatement statement, QueryExecution qe, org.apache.jena.query.ResultSet results, boolean commit)
throws SQLException {
this(statement, qe, ResultSetFactory.makePeekable(results), commit);
}
@Override
public void closeStreamInternal() {
if (this.innerResults != null) {
if (this.innerResults instanceof Closeable) {
((Closeable) this.innerResults).close();
}
this.innerResults = null;
}
}
public int findColumn(String columnLabel) throws SQLException {
if (this.isClosed())
throw new SQLException("Result Set is closed");
for (int i = 0; i < this.columns.size(); i++) {
if (this.columns.get(i).equals(columnLabel)) {
// Remember that JDBC uses a 1 based index
return i + 1;
}
}
throw new SQLException("The given column does not exist in this result set");
}
@Override
protected String findColumnLabel(int columnIndex) throws SQLException {
if (this.isClosed())
throw new SQLException("Result Set is closed");
if (columnIndex >= 1 && columnIndex <= this.columns.size()) {
// Remember that JDBC uses a 1 based index
return this.columns.get(columnIndex - 1);
} else {
throw new SQLException("Column Index is out of bounds");
}
}
@Override
protected Node getNode(String columnLabel) throws SQLException {
if (this.isClosed())
throw new SQLException("Result Set is closed");
if (this.getCurrentRow() == null)
throw new SQLException("Not currently at a row");
if (!this.columns.contains(columnLabel))
throw new SQLException("The given column does not exist in the result set");
return this.getCurrentRow().get(Var.alloc(columnLabel));
}
@Override
public ResultSetMetaData getMetaData() {
return this.metadata;
}
/**
* Gets whether there are further rows in the underlying SELECT results
*/
@Override
protected boolean hasNext() throws SQLException {
// No null check here because superclass will not call us after we are
// closed and set to null
try {
return this.innerResults.hasNext();
} catch (QueryCancelledException e) {
throw new SQLException("Query was cancelled, it is likely that your query exceeded the specified execution timeout",
e);
} catch (Throwable e) {
// Wrap as SQL exception
throw new SQLException("Unexpected error while moving through results", e);
}
}
/**
* Gets the next row from the underlying SELECT results
*/
@Override
protected Binding moveNext() throws SQLException {
// No null check here because superclass will not call us after we are
// closed and set to null
try {
return this.innerResults.nextBinding();
} catch (QueryCancelledException e) {
throw new SQLException("Query was cancelled, it is likely that your query exceeded the specified execution timeout",
e);
} catch (Throwable e) {
// Wrap as SQL exception
throw new SQLException("Unexpected error while moving through results", e);
}
}
}
|
package org.glowroot.instrumentation.hibernate;
import org.glowroot.instrumentation.api.Descriptor;
import org.glowroot.instrumentation.api.Descriptor.CaptureKind;
@Descriptor(
id = "hibernate",
name = "Hibernate",
advice = {
@Descriptor.Advice(
className = "org.hibernate.Criteria",
methodName = "list",
methodParameterTypes = {
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate query"),
@Descriptor.Advice(
className = "javax.persistence.TypedQuery",
subTypeRestriction = "org.hibernate.*",
methodName = "getResultList",
methodParameterTypes = {
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate query"),
@Descriptor.Advice(
className = "org.hibernate.Transaction",
methodName = "commit",
methodParameterTypes = {
},
nestingGroup = "hibernate",
captureKind = CaptureKind.LOCAL_SPAN,
spanMessageTemplate = "hibernate commit",
timerName = "hibernate commit"),
@Descriptor.Advice(
className = "org.hibernate.Transaction",
methodName = "rollback",
methodParameterTypes = {
},
nestingGroup = "hibernate",
captureKind = CaptureKind.LOCAL_SPAN,
spanMessageTemplate = "hibernate rollback",
timerName = "hibernate rollback"),
@Descriptor.Advice(
className = "javax.persistence.EntityTransaction",
subTypeRestriction = "org.hibernate.Transaction",
methodName = "commit",
methodParameterTypes = {
},
nestingGroup = "hibernate",
captureKind = CaptureKind.LOCAL_SPAN,
spanMessageTemplate = "hibernate commit",
timerName = "hibernate commit"),
@Descriptor.Advice(
className = "javax.persistence.EntityTransaction",
subTypeRestriction = "org.hibernate.Transaction",
methodName = "rollback",
methodParameterTypes = {
},
nestingGroup = "hibernate",
captureKind = CaptureKind.LOCAL_SPAN,
spanMessageTemplate = "hibernate rollback",
timerName = "hibernate rollback"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "flush",
methodParameterTypes = {
},
nestingGroup = "hibernate",
captureKind = CaptureKind.LOCAL_SPAN,
spanMessageTemplate = "hibernate flush",
timerName = "hibernate flush"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "saveOrUpdate",
methodParameterTypes = {
"java.lang.Object"
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate saveOrUpdate"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "saveOrUpdate",
methodParameterTypes = {
"java.lang.String",
"java.lang.Object"
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate saveOrUpdate"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "save",
methodParameterTypes = {
"java.lang.Object"
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate save"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "save",
methodParameterTypes = {
"java.lang.String",
"java.lang.Object"
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate save"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "merge",
methodParameterTypes = {
"java.lang.Object"
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate merge"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "merge",
methodParameterTypes = {
"java.lang.String",
"java.lang.Object"
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate merge"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "update",
methodParameterTypes = {
"java.lang.Object"
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate update"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "update",
methodParameterTypes = {
"java.lang.String",
"java.lang.Object"
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate update"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "persist",
methodParameterTypes = {
"java.lang.Object"
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate persist"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "persist",
methodParameterTypes = {
"java.lang.String",
"java.lang.Object"
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate persist"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "delete",
methodParameterTypes = {
"java.lang.Object"
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate delete"),
@Descriptor.Advice(
className = "org.hibernate.Session",
methodName = "delete",
methodParameterTypes = {
"java.lang.String",
"java.lang.Object"
},
nestingGroup = "hibernate",
captureKind = CaptureKind.TIMER,
timerName = "hibernate delete")
})
public class InstrumentationDescriptor {}
|
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "otbImageFileReader.h"
#include "otbVectorDataToMapFilter.h"
#include "otbAlphaBlendingFunctor.h"
#include "itkBinaryFunctorImageFilter.h"
#include "otbImageFileWriter.h"
// Software Guide : BeginCommandLineArgs
// INPUTS: {Scene.png}
// OUTPUTS: {LSDOutput.png}
// Software Guide : EndCommandLineArgs
// Software Guide : BeginLatex
//
// This example illustrates the use of the
// \doxygen{otb}{LineSegmentDetector}\cite{LSD}, also known as {\em Lucy in the
// Sky with Diamonds}.
// This filter is designed to extract segments in mono channel images.
//
// The first step required to use this filter is to include its header file.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
#include "otbLineSegmentDetector.h"
// Software Guide : EndCodeSnippet
int main(int argc, char * argv[])
{
const char * infname = argv[1];
const char * outfname = argv[2];
typedef unsigned char InputPixelType;
typedef double PrecisionType;
const unsigned int Dimension = 2;
// Software Guide : BeginLatex
//
// As usual, we start by defining the types for the input image and
// the image file reader.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::Image<InputPixelType, Dimension> ImageType;
typedef otb::ImageFileReader<ImageType> ReaderType;
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// We instantiate the reader and set the file name for the input image.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
ReaderType::Pointer reader = ReaderType::New();
reader->SetFileName(infname);
reader->GenerateOutputInformation();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// We define now the type for the segment detector filter. It is
// templated over the input image type and the precision with which
// the coordinates of the detected segments will be given. It is
// recommended to set this precision to a real type. The output of the
// filter will be a \doxygen{otb}{VectorData}.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::LineSegmentDetector<ImageType,
PrecisionType> LsdFilterType;
LsdFilterType::Pointer lsdFilter = LsdFilterType::New();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// In order to be able to display the results, we will draw the
// detected segments on top of the input image. For this matter, we
// will use a \doxygen{otb}{VectorDataToMapFilter} which
// is templated over the input vector data type and the output image
// type, and a combination of a \doxygen{itk}{binaryFunctorImageFilter}
// and the \doxygen{otb}{Functor}{AlphaBlendingFunctor}.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::VectorData<PrecisionType> VectorDataType;
typedef otb::VectorDataToMapFilter<VectorDataType,
ImageType> VectorDataRendererType;
VectorDataRendererType::Pointer vectorDataRenderer = VectorDataRendererType::New();
typedef otb::Functor::AlphaBlendingFunctor<InputPixelType,
InputPixelType, InputPixelType> FunctorType;
typedef itk::BinaryFunctorImageFilter<ImageType, ImageType,
ImageType, FunctorType> BlendingFilterType;
BlendingFilterType::Pointer blendingFilter = BlendingFilterType::New();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// We can now define the type for the writer, instantiate it and set
// the file name for the output image.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
typedef otb::ImageFileWriter<ImageType> WriterType;
WriterType::Pointer writer = WriterType::New();
writer->SetFileName(outfname);
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// We plug the pipeline.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
lsdFilter->SetInput(reader->GetOutput());
vectorDataRenderer->SetInput(lsdFilter->GetOutput());
vectorDataRenderer->SetSize(reader->GetOutput()->GetLargestPossibleRegion().GetSize());
vectorDataRenderer->SetRenderingStyleType(VectorDataRendererType::Binary);
blendingFilter->SetInput1(reader->GetOutput());
blendingFilter->SetInput2(vectorDataRenderer->GetOutput());
blendingFilter->GetFunctor().SetAlpha(0.25);
writer->SetInput(blendingFilter->GetOutput());
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
//
// Before calling the \code{Update()} method of the writer in order to
// trigger the pipeline execution, we call the
// \code{GenerateOutputInformation()} of the reader, so the LSD
// filter gets the information about image size and spacing.
//
// Software Guide : EndLatex
// Software Guide : BeginCodeSnippet
reader->GenerateOutputInformation();
writer->Update();
// Software Guide : EndCodeSnippet
// Software Guide : BeginLatex
// Figure~\ref{fig:LSD} shows the result of applying the line segment
// detection to an image.
// \begin{figure}
// \center
// \includegraphics[width=0.35\textwidth]{Scene.eps}
// \includegraphics[width=0.35\textwidth]{LSDOutput.eps}
// \itkcaption[LSD Application]{Result of applying the
// \doxygen{otb}{LineSegmentDetector} to an image.}
// \label{fig:LSD}
// \end{figure}
//
// Software Guide : EndLatex
return EXIT_SUCCESS;
}
|
#!/usr/bin/env bash
# PLEASE NOTE: This script has been automatically generated by conda-smithy. Any changes here
# will be lost next time ``conda smithy rerender`` is run. If you would like to make permanent
# changes to this script, consider a proposal to conda-smithy so that other feedstocks can also
# benefit from the improvement.
FEEDSTOCK_ROOT=$(cd "$(dirname "$0")/.."; pwd;)
RECIPE_ROOT=$FEEDSTOCK_ROOT/recipe
docker info
config=$(cat <<CONDARC
channels:
- conda-forge
- defaults
conda-build:
root-dir: /feedstock_root/build_artefacts
show_channel_urls: true
CONDARC
)
# In order for the conda-build process in the container to write to the mounted
# volumes, we need to run with the same id as the host machine, which is
# normally the owner of the mounted volumes, or at least has write permission
HOST_USER_ID=$(id -u)
# Check if docker-machine is being used (normally on OSX) and get the uid from
# the VM
if hash docker-machine 2> /dev/null && docker-machine active > /dev/null; then
HOST_USER_ID=$(docker-machine ssh $(docker-machine active) id -u)
fi
rm -f "$FEEDSTOCK_ROOT/build_artefacts/conda-forge-build-done"
cat << EOF | docker run -i \
-v "${RECIPE_ROOT}":/recipe_root \
-v "${FEEDSTOCK_ROOT}":/feedstock_root \
-e HOST_USER_ID="${HOST_USER_ID}" \
-e CLANG_VARIANT="${CLANG_VARIANT}" \
-e CONDA_PY="${CONDA_PY}" \
-a stdin -a stdout -a stderr \
condaforge/linux-anvil \
bash || exit 1
set -e
set +x
export BINSTAR_TOKEN=${BINSTAR_TOKEN}
set -x
export PYTHONUNBUFFERED=1
echo "$config" > ~/.condarc
# A lock sometimes occurs with incomplete builds. The lock file is stored in build_artefacts.
conda clean --lock
conda install --yes --quiet conda-forge-build-setup
source run_conda_forge_build_setup
conda build /recipe_root --quiet || exit 1
upload_or_check_non_existence /recipe_root conda-forge --channel=main || exit 1
touch /feedstock_root/build_artefacts/conda-forge-build-done
EOF
# double-check that the build got to the end
# see https://github.com/conda-forge/conda-smithy/pull/337
# for a possible fix
set -x
test -f "$FEEDSTOCK_ROOT/build_artefacts/conda-forge-build-done" || exit 1
|
<gh_stars>1-10
require 'csv'
require 'find'
require_relative 'download_umls_notice'
require_relative 'temp_dir'
module Inferno
module Terminology
module Tasks
class ProcessUMLS
include TempDir
include DownloadUMLSNotice
attr_reader :version
def initialize(version:)
@version = version
end
def run # rubocop:disable Metrics/CyclomaticComplexity
Inferno.logger.info 'Looking for `./tmp/terminology/MRCONSO.RRF`...'
input_file = Find.find(versioned_temp_dir).find { |f| /MRCONSO.RRF$/ =~f }
if input_file
start = Time.now
output_filename = File.join(versioned_temp_dir, 'terminology_umls.txt')
output = File.open(output_filename, 'w:UTF-8')
line = 0
excluded = 0
excluded_systems = Hash.new(0)
begin
Inferno.logger.info "Writing to #{output_filename}..."
CSV.foreach(input_file, headers: false, col_sep: '|', quote_char: "\x00") do |row|
line += 1
include_code = false
code_system = row[11]
code = row[13]
description = row[14]
case code_system
when 'SNOMEDCT_US'
code_system = 'SNOMED'
include_code = (row[4] == 'PF' && ['FN', 'OAF'].include?(row[12]))
when 'LNC'
code_system = 'LOINC'
include_code = true
when 'ICD10CM', 'ICD10PCS'
code_system = 'ICD10'
include_code = (row[12] == 'PT')
when 'ICD9CM'
code_system = 'ICD9'
include_code = (row[12] == 'PT')
when 'CPT', 'HCPCS'
include_code = (row[12] == 'PT')
when 'MTHICD9'
code_system = 'ICD9'
include_code = true
when 'RXNORM'
include_code = true
when 'CVX'
include_code = ['PT', 'OP'].include?(row[12])
when 'SRC'
# 'SRC' rows define the data sources in the file
include_code = false
else
include_code = false
excluded_systems[code_system] += 1
end
if include_code
output.write("#{code_system}|#{code}|#{description}\n")
else
excluded += 1
end
end
rescue StandardError => e
Inferno.logger.info "Error at line #{line}"
Inferno.logger.info e.message
end
output.close
Inferno.logger.info "Processed #{line} lines, excluding #{excluded} redundant entries."
Inferno.logger.info "Excluded code systems: #{excluded_systems}" unless excluded_systems.empty?
finish = Time.now
minutes = ((finish - start) / 60)
seconds = (minutes - minutes.floor) * 60
Inferno.logger.info "Completed in #{minutes.floor} minute(s) #{seconds.floor} second(s)."
Inferno.logger.info 'Done.'
else
download_umls_notice
end
end
end
end
end
end
|
package com.example.testng.Annotation.Test;
import com.example.testng.Annotation.DataProvider.dataProviderAnnotation;
import org.testng.annotations.Test;
/**
* dataProviderClass=告诉框架 我需要从哪个类获取数据
* dataProvider= 告诉框架 我需要获取数据的名称是什么
*/
public class TestAnnotationData {
@Test(dataProviderClass = dataProviderAnnotation.class, dataProvider = "dataProviderObject")
public void DataProviderTest(String name, int age, String Type) {
System.out.println(name + "今年" + age + "岁了,是一个" + Type);
}
}
|
// async function jq() {
const s = document.createElement('script');
s.type = 'text/javascript';
s.src = 'https://lib.baomitu.com/jquery/3.4.1/jquery.min.js';
document.body.appendChild(s);
// }
// !(async function() {
// await jq();
window.onload=()=>{
let m = document.createElement('script');
m.type = 'text/javascript';
m.src = 'https://player.lmih.cn/player/js/player.js';
m.setAttribute("id", "myhk");
m.setAttribute("key", "159325101761");
m.setAttribute("m", 1);
document.body.appendChild(m);
}
// })()
// Promise.resolve().then(() => {
// const s = document.createElement('script');
// s.type = 'text/javascript';
// s.src = 'https://lib.baomitu.com/jquery/3.4.1/jquery.min.js';
// document.body.appendChild(s);
// }).then(()=> {
// let m = document.createElement('script');
// m.type = 'text/javascript';
// m.src = 'https://player.lmih.cn/player/js/player.js';
// m.setAttribute("id", "myhk");
// m.setAttribute("key", "159325101761");
// m.setAttribute("m", 1);
// document.body.appendChild(m);
// })
|
/**
* @jest-environment jsdom
*/
import { render } from '@testing-library/react';
import { Grid } from '..';
describe('components/Grid', () => {
const props = {
smCols: '2',
mdCols: '2',
lgCols: '3',
xlCols: '4',
space: '4',
};
test('should render the component correctly with all the props', () => {
const rendered = render(<Grid {...props}>Hey im working fine</Grid>);
expect(rendered.baseElement).toBeInTheDocument();
});
test('should render the component correctly with just the mandatory props', () => {
const rendered = render(<Grid>Hey im working fine</Grid>);
expect(rendered.baseElement).toBeInTheDocument();
});
});
|
<gh_stars>1-10
package com.bhm.sdk.demo.http;
import com.bhm.sdk.demo.entity.DoGetEntity;
import com.bhm.sdk.demo.entity.UpLoadEntity;
import io.reactivex.Observable;
import okhttp3.MultipartBody;
import okhttp3.RequestBody;
import okhttp3.ResponseBody;
import retrofit2.http.Body;
import retrofit2.http.Field;
import retrofit2.http.FormUrlEncoded;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Multipart;
import retrofit2.http.POST;
import retrofit2.http.Part;
import retrofit2.http.Query;
import retrofit2.http.Streaming;
import retrofit2.http.Url;
/**
* Created by bhm on 2018/5/11.
*/
public interface HttpApi {
@GET("/api/4/news/latest")
Observable<DoGetEntity> getData(@Header("token") String token,
@Query("type") String type);
@FormUrlEncoded
@POST("v2/movie/in_theaters")
Observable<DoGetEntity> getDataPost(@Field("once") boolean once_no);
/*上传文件*/
@Multipart
@POST("common/update-avatar")
Observable<UpLoadEntity> upload(
@Header("Authorization") String token,
@Part("role_id") RequestBody role_id,
@Part MultipartBody.Part file);
/*上传文件*/
// @Multipart //不需要这个
@POST("common/update-avatar")
Observable<UpLoadEntity> upload(
@Header("Authorization") String token,
@Body MultipartBody body);//文件和字段一起
/*下载*/
@Streaming
@GET
Observable<ResponseBody> downLoad(@Header("RANGE") String range, @Url String url);
}
|
package com.vladmihalcea.book.hpjp.hibernate.transaction.spring.jpa.config;
import com.vladmihalcea.book.hpjp.hibernate.forum.dto.PostDTO;
import com.vladmihalcea.book.hpjp.hibernate.logging.LoggingStatementInspector;
import com.vladmihalcea.book.hpjp.util.DataSourceProxyType;
import com.vladmihalcea.book.hpjp.util.logging.InlineQueryLogEntryCreator;
import com.vladmihalcea.hibernate.type.util.ClassImportIntegrator;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import net.ttddyy.dsproxy.listener.logging.SLF4JQueryLoggingListener;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.hibernate.jpa.boot.spi.IntegratorProvider;
import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.*;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.transaction.support.TransactionTemplate;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import java.util.Arrays;
import java.util.Collections;
import java.util.Properties;
/**
*
* @author <NAME>
*/
@Configuration
@PropertySource({"/META-INF/jdbc-postgresql.properties"})
@ComponentScan(basePackages = {
"com.vladmihalcea.book.hpjp.hibernate.transaction.spring.jpa.dao",
"com.vladmihalcea.book.hpjp.hibernate.transaction.spring.jpa.service",
})
@EnableTransactionManagement
@EnableAspectJAutoProxy
public class ResourceLocalReleaseAfterStatementConfiguration {
public static final String DATA_SOURCE_PROXY_NAME = DataSourceProxyType.DATA_SOURCE_PROXY.name();
@Value("${jdbc.dataSourceClassName}")
private String dataSourceClassName;
@Value("${jdbc.url}")
private String jdbcUrl;
@Value("${jdbc.username}")
private String jdbcUser;
@Value("${jdbc.password}")
private String jdbcPassword;
@Value("${hibernate.dialect}")
private String hibernateDialect;
@Bean
public static PropertySourcesPlaceholderConfigurer properties() {
return new PropertySourcesPlaceholderConfigurer();
}
@Bean(destroyMethod = "close")
public DataSource actualDataSource() {
Properties driverProperties = new Properties();
driverProperties.setProperty("url", jdbcUrl);
driverProperties.setProperty("user", jdbcUser);
driverProperties.setProperty("password", <PASSWORD>);
Properties properties = new Properties();
properties.put("dataSourceClassName", dataSourceClassName);
properties.put("dataSourceProperties", driverProperties);
properties.setProperty("maximumPoolSize", String.valueOf(5));
HikariConfig hikariConfig = new HikariConfig(properties);
hikariConfig.setAutoCommit(false);
return new HikariDataSource(hikariConfig);
}
@Bean
public DataSource dataSource() {
SLF4JQueryLoggingListener loggingListener = new SLF4JQueryLoggingListener();
loggingListener.setQueryLogEntryCreator(new InlineQueryLogEntryCreator());
return ProxyDataSourceBuilder
.create(actualDataSource())
.name(DATA_SOURCE_PROXY_NAME)
.listener(loggingListener)
.build();
}
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean entityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean();
entityManagerFactoryBean.setPersistenceUnitName(getClass().getSimpleName());
entityManagerFactoryBean.setPersistenceProvider(new HibernatePersistenceProvider());
entityManagerFactoryBean.setDataSource(dataSource());
entityManagerFactoryBean.setPackagesToScan(packagesToScan());
JpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter();
entityManagerFactoryBean.setJpaVendorAdapter(vendorAdapter);
entityManagerFactoryBean.setJpaProperties(additionalProperties());
return entityManagerFactoryBean;
}
@Bean
public JpaTransactionManager transactionManager(EntityManagerFactory entityManagerFactory){
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(entityManagerFactory);
return transactionManager;
}
@Bean
public TransactionTemplate transactionTemplate(EntityManagerFactory entityManagerFactory) {
return new TransactionTemplate(transactionManager(entityManagerFactory));
}
protected Properties additionalProperties() {
Properties properties = new Properties();
properties.setProperty("hibernate.dialect", hibernateDialect);
properties.setProperty("hibernate.hbm2ddl.auto", "create-drop");
properties.put(
"hibernate.session_factory.statement_inspector",
new LoggingStatementInspector("com.vladmihalcea.book.hpjp.hibernate.transaction")
);
properties.put(
"hibernate.integrator_provider",
(IntegratorProvider) () -> Collections.singletonList(
new ClassImportIntegrator(Arrays.asList(PostDTO.class))
)
);
properties.put(
AvailableSettings.CONNECTION_HANDLING,
//PhysicalConnectionHandlingMode.DELAYED_ACQUISITION_AND_RELEASE_AFTER_STATEMENT
PhysicalConnectionHandlingMode.DELAYED_ACQUISITION_AND_RELEASE_AFTER_TRANSACTION
);
return properties;
}
protected String[] packagesToScan() {
return new String[]{
"com.vladmihalcea.book.hpjp.hibernate.transaction.forum"
};
}
}
|
<gh_stars>1-10
package com.github.maracas.rest;
import com.github.maracas.rest.data.PullRequestResponse;
import org.junit.jupiter.api.Test;
import org.springframework.test.context.TestPropertySource;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
@TestPropertySource(properties = {"maracas.breakbot-file=.not-found"})
class ConfigLessGithubControllerTests extends AbstractControllerTest {
@Test
void testAnalyzePRPushConfigLess() {
PullRequestResponse res = resultAsPR(analyzePRPush("alien-tools", "comp-changes", 3));
assertThat(res.message(), is("ok"));
assertThat(res.report(), is(notNullValue()));
assertThat(res.report().delta().breakingChanges(), not(empty()));
assertThat(res.report().clientReports(), empty());
}
}
|
import { ApolloServer } from 'apollo-server';
import { resolvers } from './resolvers';
import { typeDefs } from './typeDefs';
const server = new ApolloServer({ typeDefs, resolvers, introspection: true });
server.listen().then(({ url }) => {
// tslint:disable-next-line no-console
console.log(`🚀 Server ready at ${url}`);
});
|
#!/bin/sh
# genext2fs wrapper calculating needed blocks/inodes values if not specified
set -e
export LC_ALL=C
CALC_BLOCKS=1
CALC_INODES=1
EXT_OPTS=
EXT_OPTS_O=
while getopts x:d:D:b:i:N:m:g:e:zfqUPhVv f
do
case $f in
b) CALC_BLOCKS=0 ;;
N) CALC_INODES=0; INODES=$OPTARG ;;
d) TARGET_DIR=$OPTARG ;;
esac
done
eval IMG="\"\${${OPTIND}}\""
# calculate needed inodes
if [ $CALC_INODES -eq 1 ];
then
INODES=$(find $TARGET_DIR | wc -l)
INODES=$(expr $INODES + 400)
set -- $@ -N $INODES
fi
# calculate needed blocks
if [ $CALC_BLOCKS -eq 1 ];
then
# size ~= superblock, block+inode bitmaps, inodes (8 per block), blocks
# we scale inodes / blocks with 10% to compensate for bitmaps size + slack
BLOCKS=$(du -s -c -k $TARGET_DIR | grep total | sed -e "s/total//")
BLOCKS=$(expr 500 + \( $BLOCKS + $INODES / 8 \) \* 11 / 10)
# we add 1300 blocks (a bit more than 1 MiB, assuming 1KiB blocks) for
# the journal if ext3/4
# Note: I came to 1300 blocks after trial-and-error checks. YMMV.
if [ ${GEN} -ge 3 ]; then
BLOCKS=$(expr 1300 + $BLOCKS )
fi
set -- $@ -b $BLOCKS
fi
e2tunefsck() {
# Upgrade the file system
if [ $# -ne 0 ]; then
tune2fs "$@" "${IMG}"
fi
# After changing filesystem options, running fsck is required
# (see: man tune2fs). Running e2fsck in other cases will ensure
# coherency of the filesystem, although it is not required.
# 'e2fsck -pDf' means:
# - automatically repair
# - optimise and check for duplicate entries
# - force checking
# Sending output to oblivion, as e2fsck can be *very* verbose,
# especially with filesystems generated by genext2fs.
# Exit codes 1 & 2 are OK, it means fs errors were successfully
# corrected, hence our little trick with $ret.
ret=0
e2fsck -pDf "${IMG}" >/dev/null || ret=$?
case ${ret} in
0|1|2) ;;
*) exit ${ret};;
esac
printf "\ne2fsck was successfully run on '%s' (ext%d)\n\n" \
"${IMG##*/}" "${GEN}"
# e2fsck will force a *random* UUID, which is bad
# for reproducibility, so we do not want it.
# Remove count- and time-based checks, they are not welcome
# on embedded devices, where they can cause serious boot-time
# issues by tremendously slowing down the boot.
tune2fs -U clear -c 0 -i 0 "${IMG}"
}
# Check we know what generation to generate
case "${GEN}:${REV}" in
2:0|2:1|3:1|4:1)
;;
*)
printf "%s: unknown ext generation '%s' and/or revision '%s'\n" \
"${0##*/}" "${GEN}" "${REV}" >&2
exit 1
;;
esac
# Upgrade to rev1 if needed
if [ ${REV} -ge 1 ]; then
EXT_OPTS_O="${EXT_OPTS_O},filetype"
fi
# Add a journal for ext3 and above
if [ ${GEN} -ge 3 ]; then
EXT_OPTS="${EXT_OPTS} -j -J size=1"
fi
# Add ext4 specific features
if [ ${GEN} -ge 4 ]; then
EXT_OPTS_O="${EXT_OPTS_O},extents,uninit_bg,dir_index"
fi
# Add our -O options (there will be at most one leading comma, remove it)
if [ -n "${EXT_OPTS_O}" ]; then
EXT_OPTS="${EXT_OPTS} -O ${EXT_OPTS_O#,}"
fi
# Generate and upgrade the filesystem
genext2fs "$@"
e2tunefsck ${EXT_OPTS}
|
<reponame>tdm1223/Algorithm
// 1476. 날짜 계산
// 2019.05.18
// 수학, 중국인의 나머지 정리
#include<iostream>
using namespace std;
int main()
{
int n1;
int n2;
int n3;
cin >> n1 >> n2 >> n3;
int count = 0;
while (1)
{
int r1 = count % 15 + 1;
int r2 = count % 28 + 1;
int r3 = count % 19 + 1;
if (r1 == n1 && r2 == n2 && r3 == n3)
{
break;
}
count++;
}
cout << count + 1 << endl;
return 0;
}
|
def longestIncreasingSubsequence(arr):
lis = [1] * len(arr)
for i in range(1, len(arr)):
for j in range(0, i):
if arr[i] > arr[j] and lis[i] < lis[j] + 1:
lis[i] = lis[j] + 1
maxLis = 0
for i in range(len(arr)):
maxLis = max(maxLis, lis[i])
return maxLis
|
#!/bin/sh
sudo hab pkg install core/protobuf-cpp -b
|
package sysinfo
type OSRelease struct {
ID string
Name string
Version string
Platform string
Arch string
PrettyName string
KernelVersion string
// Hostname string
Sysname string
Nodename string
UnameVersion string
Machine string
Domainname string
}
func OSInfo() *OSRelease {
s := &OSRelease{}
s.GetOSInfo()
return s
}
func (o *OSRelease) isSystemVersion() {
}
|
class VirtualMemory:
def __init__(self, size):
self.memory = [None] * size
self.disk = []
def load_page(self, page_number):
if page_number in self.memory:
return # Page already in memory
elif len(self.memory) < len(self.disk):
self.memory[self.disk.pop(0)] = page_number
else:
self.memory[self.memory.index(None)] = page_number
def swap_page(self, page_number):
if page_number in self.memory:
self.memory[self.memory.index(page_number)] = None
self.disk.append(page_number)
def display_state(self):
print("Memory:", self.memory)
print("Disk:", self.disk)
# Example usage
vm = VirtualMemory(4)
vm.load_page(1)
vm.load_page(2)
vm.load_page(3)
vm.load_page(4)
vm.display_state()
vm.swap_page(2)
vm.display_state()
|
#!/bin/bash
# Step 1: Build the Docker image for Litecoin Core node
docker build -t litecoin-node -f Dockerfile .
# Step 2: Run the Docker container for Litecoin Core node
docker run -d -p 9333:9333 --name litecoin-container litecoin-node
|
/*
* Copyright (c) 2004-2021, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.settings;
import org.hisp.dhis.android.core.common.BaseObjectShould;
import org.hisp.dhis.android.core.common.ObjectShould;
import org.junit.Test;
import java.io.IOException;
import java.text.ParseException;
import java.util.Map;
import static com.google.common.truth.Truth.assertThat;
public class AppearanceSettingsShould extends BaseObjectShould implements ObjectShould {
public AppearanceSettingsShould() {
super("settings/appearance_settings.json");
}
@Override
@Test
public void map_from_json_string() throws IOException, ParseException {
AppearanceSettings appearanceSettings = objectMapper.readValue(jsonStream, AppearanceSettings.class);
FilterSorting filterSorting = appearanceSettings.filterSorting();
Map<HomeFilter, FilterSetting> homeFilters = filterSorting.home();
FilterSetting homeDateFilter = homeFilters.get(HomeFilter.DATE);
assertThat(homeDateFilter.scope()).isNull();
assertThat(homeDateFilter.filterType()).isNull();
assertThat(homeDateFilter.uid()).isNull();
assertThat(homeDateFilter.sort()).isEqualTo(true);
assertThat(homeDateFilter.filter()).isEqualTo(true);
DataSetFilters dataSetFilters = filterSorting.dataSetSettings();
Map<DataSetFilter, FilterSetting> dataSetGlobalFilters = dataSetFilters.globalSettings();
FilterSetting dataSetPeriodFilter = dataSetGlobalFilters.get(DataSetFilter.PERIOD);
assertThat(dataSetPeriodFilter.scope()).isNull();
assertThat(dataSetPeriodFilter.filterType()).isNull();
assertThat(dataSetPeriodFilter.uid()).isNull();
assertThat(dataSetPeriodFilter.sort()).isEqualTo(true);
assertThat(dataSetPeriodFilter.filter()).isEqualTo(true);
ProgramFilters programFilters = filterSorting.programSettings();
Map<ProgramFilter, FilterSetting> programGlobalFilters = programFilters.globalSettings();
FilterSetting programEventDateFilter = programGlobalFilters.get(ProgramFilter.EVENT_DATE);
assertThat(programEventDateFilter.scope()).isNull();
assertThat(programEventDateFilter.filterType()).isNull();
assertThat(programEventDateFilter.uid()).isNull();
assertThat(programEventDateFilter.sort()).isEqualTo(true);
assertThat(programEventDateFilter.filter()).isEqualTo(true);
CompletionSpinnerSetting completionSpinnerSetting = appearanceSettings.completionSpinner();
assertThat(completionSpinnerSetting.globalSettings().uid()).isNull();
Map<String, CompletionSpinner> specificCompletionSpinnerList = completionSpinnerSetting.specificSettings();
CompletionSpinner specificCompletionSpinner = specificCompletionSpinnerList.get("IpHINAT79UW");
assertThat(specificCompletionSpinner.uid()).isNull();
assertThat(specificCompletionSpinner.visible()).isEqualTo(true);
}
}
|
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.testgrid.automation.reader;
import org.wso2.testgrid.automation.Test;
import org.wso2.testgrid.automation.TestAutomationException;
import org.wso2.testgrid.common.TestScenario;
import java.util.List;
/**
* Interface defining the behavior for test readers.
*
* @since 1.0.0
*/
public interface TestReader {
/**
* Returns a list of {@link Test} instances for the given test scenario and test location.
*
* @param testLocation locations in which the tests resides
* @param scenario test scenario associated with the tests
* @return a list of {@link Test} instances for the given test scenario and test location
* @throws TestAutomationException thrown when error on reading tests
*/
List<Test> readTests(String testLocation, TestScenario scenario) throws TestAutomationException;
}
|
<filename>src/main/java/com/athena/athena/im/svc/ChatRoomController.java<gh_stars>0
package com.athena.athena.im.svc;
import com.athena.athena.im.bean.ChatMessage;
import com.athena.athena.im.dao.impl.ChatMessageDAOImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.servlet.ModelAndView;
import java.util.List;
@Controller
public class ChatRoomController {
private static final Logger log = LoggerFactory.getLogger(ChatRoomController.class);
@Autowired
ChatMessageDAOImpl chatMessageDAO ;
@GetMapping("/chat/{myid}/{toid}")
public ModelAndView chatRoom(@PathVariable Integer myid, @PathVariable Integer toid){
ModelAndView modelAndView = new ModelAndView();
modelAndView.setViewName("chat");
List<ChatMessage> chatMessages = chatMessageDAO.findListByFromAndTo(Long.valueOf(myid), Long.valueOf(toid));
modelAndView.addObject("chatData",chatMessages);
log.info("/chat controler");
return modelAndView;
}
}
|
<filename>types/TokyoVolcano.d.ts
/// <reference types="svelte" />
import { SvelteComponentTyped } from "svelte";
export interface TokyoVolcanoProps
extends svelte.JSX.HTMLAttributes<HTMLElementTagNameMap["svg"]> {
tabindex?: string;
/**
* @default "currentColor"
*/
fill?: string;
}
export default class TokyoVolcano extends SvelteComponentTyped<
TokyoVolcanoProps,
{},
{}
> {}
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
dir=$(dirname ${0})
cd ${dir}
cd ..
mvn clean install -DskipTests 2>&1 | tee mci.log
mvn verify -Dhdp.version=${HDP_VERSION:-"2.2.4.2-2"} -fae 2>&1 | tee mvnverify.log
|
#!/bin/bash
set -e
mkdir -p dist
node scripts/build/generatePostmanCollection.js
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.