text stringlengths 1 1.05M |
|---|
package sword.langbook3.android.sdb;
import java.io.InputStream;
import sword.collections.ImmutableIntList;
final class AssertStream extends InputStream {
private final ImmutableIntList data;
private final int dataSizeInBytes;
private int byteIndex;
AssertStream(ImmutableIntList data, int dataSizeInBytes) {
if (((dataSizeInBytes + 3) >>> 2) != data.size()) {
throw new IllegalArgumentException();
}
this.data = data;
this.dataSizeInBytes = dataSizeInBytes;
}
@Override
public int read() {
if (byteIndex >= dataSizeInBytes) {
throw new AssertionError("End of the stream already reached");
}
final int wordIndex = byteIndex >>> 2;
final int wordByteIndex = byteIndex & 3;
++byteIndex;
return (data.valueAt(wordIndex) >>> (wordByteIndex * 8)) & 0xFF;
}
boolean allBytesRead() {
return byteIndex == dataSizeInBytes;
}
}
|
#!/bin/bash
#SBATCH --mem=60g
#SBATCH -c 20
#SBATCH --gres=gpu:1
#SBATCH --time=1-00
#SBATCH --mail-user=idan.azuri@mail.huji.ac.il
#SBATCH --mail-type=END,FAIL,TIME_LIMIT
module load tensorflow/1.5.0
dir=/cs/labs/daphna/idan.azuri/tensorflow-generative-model-collections
cd $dir
source /cs/labs/daphna/idan.azuri/venv_64/bin/activate
python3 main.py --gan_type MultiModalInfoGAN --epoch 60 --dataset mnist --sampler multi-uniform --batch_size 64 --dataset_order "czcc czrc rzcc rzrc"
python3 classifier.py --dir_name /cs/labs/daphna/idan.azuri/tensorflow-generative-model-collections/ --fname mnist_MultiModalUniformSample_mu_0_sigma_0.15_czcc_czrc_rzcc_rzrc_ndist_10 --original mnist
python3 main.py --gan_type MultiModalInfoGAN --epoch 60 --dataset fashion-mnist --sampler uniform --batch_size 64 --dataset_order "czcc czrc rzcc rzrc"
python3 classifier.py --dir_name /cs/labs/daphna/idan.azuri/tensorflow-generative-model-collections/ --fname fashion-mnist_UniformSample_mu_0.0_sigma_0.15_czcc_czrc_rzcc_rzrc_ndist_10 --original fashion-mnist
#
#python3 main.py --gan_type MultiModalInfoGAN --epoch 60 --dataset fashion-mnist --sampler multi-uniform --batch_size 64 --dataset_order "czcc rzcc czrc rzrc"
##python3 classifier.py --dir_name /cs/labs/daphna/idan.azuri/tensorflow-generative-model-collections/ --fname fashion-mnist_MultiModalUniformSample_mu_0_sigma_0.15_czcc_rzcc_czrc_rzrc --preprocess True --original fashion-mnist
#python3 classifier.py --dir_name /cs/labs/daphna/idan.azuri/tensorflow-generative-model-collections/ --fname fashion-mnist_MultiModalUniformSample_mu_0_sigma_0.15_czcc_rzcc_czrc_rzrc --original fashion-mnist
#
#python3 main.py --gan_type MultiModalInfoGAN --epoch 60 --dataset fashion-mnist --sampler multi-uniform --batch_size 64 --dataset_order "rzcc rzrc czcc czrc"
##python3 classifier.py --dir_name /cs/labs/daphna/idan.azuri/tensorflow-generative-model-collections/ --fname fashion-mnist_MultiModalUniformSample_mu_0_sigma_0.15_rzcc_rzrc_czcc_czrc --preprocess True --original fashion-mnist
#python3 classifier.py --dir_name /cs/labs/daphna/idan.azuri/tensorflow-generative-model-collections/ --fname fashion-mnist_MultiModalUniformSample_mu_0_sigma_0.15_rzcc_rzrc_czcc_czrc --original fashion-mnist
#
#
#python3 main.py --gan_type MultiModalInfoGAN --epoch 60 --dataset fashion-mnist --sampler multi-uniform --batch_size 64 --dataset_order "czrc czcc rzcc rzrc"
##python3 classifier.py --dir_name /cs/labs/daphna/idan.azuri/tensorflow-generative-model-collections/ --fname fashion-mnist_MultiModalUniformSample_mu_0_sigma_0.15_czrc_czcc_rzcc_rzrc --preprocess True --original fashion-mnist
#python3 classifier.py --dir_name /cs/labs/daphna/idan.azuri/tensorflow-generative-model-collections/ --fname fashion-mnist_MultiModalUniformSample_mu_0_sigma_0.15_czrc_czcc_rzcc_rzrc --original fashion-mnist
#
|
#!/bin/bash
# The MIT License (MIT)
#
# Copyright (c) 2015 Microsoft Azure
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
help()
{
echo "This script installs Elasticsearch on Ubuntu"
echo "Parameters:"
echo " -n elasticsearch cluster name"
echo " -m configure as master node (default: off)"
echo " -h view this help content"
}
# Log method to control/redirect log output
log()
{
echo "$1"
}
log "Begin execution of Elasticsearch script extension on ${HOSTNAME}"
if [ "${UID}" -ne 0 ];
then
log "Script executed without root permissions"
echo "You must be root to run this program." >&2
exit 3
fi
# TEMP FIX - Re-evaluate and remove when possible
# This is an interim fix for hostname resolution in current VM
grep -q "${HOSTNAME}" /etc/hosts
if [ $? == 0 ]
then
echo "${HOSTNAME} found in /etc/hosts"
else
echo "${HOSTNAME} not found in /etc/hosts"
# Append it to the hosts file if not there
echo "127.0.0.1 ${HOSTNAME}" >> /etc/hosts
log "hostname ${HOSTNAME} added to /etc/hosts"
fi
#Script Parameters
CLUSTER_NAME="es-azure-centralus"
ES_VERSION="5.1.2"
IS_DATA_NODE=1
CLUSTER_PASSWORD="***"
READONLY_PASSWORD="***"
#Loop through options passed
while getopts :n:mp:r:h optname; do
#log "Option $optname set with value ${OPTARG}"
case $optname in
n) #set cluster name
CLUSTER_NAME=${OPTARG}
;;
m) #set master mode
IS_DATA_NODE=0
;;
p) #set password
CLUSTER_PASSWORD=${OPTARG}
;;
r) #set password
READONLY_PASSWORD=${OPTARG}
;;
h) #show help
help
exit 2
;;
\?) #unrecognized option - show help
echo -e \\n"Option -${BOLD}$OPTARG${NORM} not allowed."
help
exit 2
;;
esac
done
echo "cluster.name: $CLUSTER_NAME"
# Install Oracle Java
install_java()
{
if [ -f "jdk-8u241-linux-x64.tar.gz" ];
then
log "Java already downloaded"
return
fi
log "Installing Java3333"
RETRY=0
MAX_RETRY=5
while [ $RETRY -lt $MAX_RETRY ]; do
log "Retry $RETRY: downloading jdk-8u241-linux-x64.tar.gz"
wget --no-check-certificate --no-cookies --header "Cookie: oraclelicense=accept-securebackup-cookie" http://enos.itcollege.ee/~jpoial/allalaadimised/jdk8/jdk-8u261-linux-x64.tar.gz
if [ $? -ne 0 ]; then
let RETRY=RETRY+1
else
break
fi
done
if [ $RETRY -eq $MAX_RETRY ]; then
log "Failed to download jdk-8u261-linux-x64.tar.gz"
exit 1
fi
tar xvf jdk-8u261-linux-x64.tar.gz -C /var/lib
export JAVA_HOME=/var/lib/jdk1.8.0_261
export PATH=$PATH:$JAVA_HOME/bin
log "JAVA_HOME: $JAVA_HOME"
log "PATH: $PATH"
java -version
sudo apt-get update && sudo apt-get -y upgrade
sudo apt install -y default-jdk
java -version
if [ $? -ne 0 ]; then
log "Java installation failed"
exit 1
fi
}
# Install default Java(11)
install_default_java()
{
log "Installing install_default_java"
java -version
sudo apt-get -y update && sudo apt-get -y upgrade
sudo apt-get -y update && sudo apt-get -y upgrade
sudo apt install -y default-jdk
export JAVA_HOME=/usr/lib/jvm/default-java/
export PATH=$PATH:$JAVA_HOME/bin
log "JAVA_HOME: $JAVA_HOME"
log "PATH: $PATH"
java -version
if [ $? -ne 0 ]; then
log "Java installation failed"
exit 1
fi
}
install_es()
{
wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
apt-get install apt-transport-https
echo "deb https://artifacts.elastic.co/packages/5.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-5.x.list
apt-get update -y
apt-get install -y elasticsearch
pushd /usr/share/elasticsearch/
bin/elasticsearch-plugin install x-pack --batch
popd
if [ ${IS_DATA_NODE} -eq 0 ];
then
apt-get install -y kibana
pushd /usr/share/kibana/
bin/kibana-plugin install x-pack
popd
fi
}
install_es_latest()
{
wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
apt-get install apt-transport-https
echo "deb https://artifacts.elastic.co/packages/7.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-7.x.list
apt-get update -y
apt-get install -y elasticsearch
pushd /usr/share/elasticsearch/
bin/elasticsearch-plugin install x-pack --batch
popd
if [ ${IS_DATA_NODE} -eq 0 ];
then
apt-get install -y kibana
pushd /usr/share/kibana/
bin/kibana-plugin install x-pack
popd
fi
}
configure_es()
{
log "Update configuration"
mv /etc/elasticsearch/elasticsearch.yml /etc/elasticsearch/elasticsearch.bak
echo "path.logs: /var/log/elasticsearch" >> /etc/elasticsearch/elasticsearch.yml
echo "cluster.name: $CLUSTER_NAME" >> /etc/elasticsearch/elasticsearch.yml
echo "node.name: ${HOSTNAME}" >> /etc/elasticsearch/elasticsearch.yml
echo "discovery.zen.minimum_master_nodes: 1" >> /etc/elasticsearch/elasticsearch.yml
echo 'discovery.seed_hosts: ["10.64.35.30", "10.64.35.31", "10.64.35.32"]' >> /etc/elasticsearch/elasticsearch.yml
echo "network.host: _site_" >> /etc/elasticsearch/elasticsearch.yml
#Security Settings
echo "xpack.security.enabled: true" >> /etc/elasticsearch/elasticsearch.yml
echo "xpack.security.transport.ssl.enabled: false" >> /etc/elasticsearch/elasticsearch.yml
echo "bootstrap.memory_lock: true" >> /etc/elasticsearch/elasticsearch.yml
echo "cluster.initial_master_nodes: master-vm0" >> /etc/elasticsearch/elasticsearch.yml
if [ ${IS_DATA_NODE} -eq 1 ]; then
echo "node.master: false" >> /etc/elasticsearch/elasticsearch.yml
echo "node.data: true" >> /etc/elasticsearch/elasticsearch.yml
else
echo "node.master: true" >> /etc/elasticsearch/elasticsearch.yml
echo "node.data: false" >> /etc/elasticsearch/elasticsearch.yml
fi
}
configure_system()
{
echo "options timeout:1 attempts:5" >> /etc/resolvconf/resolv.conf.d/head
resolvconf -u
#ES_HEAP=`free -m |grep Mem | awk '{if ($2/2 >31744) print 31744;else printf "%.0f", $2/2;}'`
#echo "ES_JAVA_OPTS=\"-Xms${ES_HEAP}m -Xmx${ES_HEAP}m\"" >> /etc/default/elasticsearch
echo "ES_JAVA_OPTS=\"-Xms6g -Xmx6g\"" >> /etc/default/elasticsearch
echo "JAVA_HOME=$JAVA_HOME" >> /etc/default/elasticsearch
echo 'MAX_OPEN_FILES=65536' >> /etc/default/elasticsearch
echo 'MAX_LOCKED_MEMORY=unlimited' >> /etc/default/elasticsearch
#https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-system-settings.html#systemd
mkdir -p /etc/systemd/system/elasticsearch.service.d
touch /etc/systemd/system/elasticsearch.service.d/override.conf
echo '[Service]' >> /etc/systemd/system/elasticsearch.service.d/override.conf
echo 'LimitMEMLOCK=infinity' >> /etc/systemd/system/elasticsearch.service.d/override.conf
sudo systemctl daemon-reload
chown -R elasticsearch:elasticsearch /usr/share/elasticsearch
if [ ${IS_DATA_NODE} -eq 0 ];
then
# Kibana
IP_ADDRESS=$(ip route get 8.8.8.8 | awk -F"src " 'NR==1{split($2,a," ");print a[1]}')
echo "server.host: \"$IP_ADDRESS\"" >> /etc/kibana/kibana.yml
echo 'elasticsearch.hosts: ["http://10.64.35.30:9200", "http://10.64.35.31:9200", "http://10.64.35.32:9200"]' >> /etc/kibana/kibana.yml
echo "logging.dest: \"/tmp/kibana.log\"" >> /etc/kibana/kibana.yml
#Security Settings
echo "xpack.security.enabled: true" >> /etc/kibana/kibana.yml
echo "elasticsearch.username: elastic" >> /etc/kibana/kibana.yml
echo "elasticsearch.password: $CLUSTER_PASSWORD" >> /etc/kibana/kibana.yml
chown -R kibana:kibana /usr/share/kibana
else
# data disk
DATA_DIR="/datadisks/disk1"
if ! [ -f "vm-disk-utils-0.1.sh" ];
then
DOWNLOAD_SCRIPT="https://raw.githubusercontent.com/tharuravuru/azure-quickstart-templates/master/shared_scripts/ubuntu/vm-disk-utils-0.1.sh"
log "Disk setup script not found in `pwd`, download from $DOWNLOAD_SCRIPT"
wget -q $DOWNLOAD_SCRIPT
fi
bash ./vm-disk-utils-0.1.sh
if [ $? -eq 0 ] && [ -d "$DATA_DIR" ];
then
log "Disk setup successful, using $DATA_DIR"
chown -R elasticsearch:elasticsearch $DATA_DIR
echo "DATA_DIR=$DATA_DIR" >> /etc/default/elasticsearch
else
log "Disk setup failed, using default data storage location"
fi
fi
}
start_service()
{
log "Starting Elasticsearch on ${HOSTNAME}"
systemctl daemon-reload
systemctl enable elasticsearch.service
systemctl start elasticsearch.service
sleep 60
if [ `systemctl is-failed elasticsearch.service` == 'failed' ];
then
log "Elasticsearch unit failed to start"
exit 1
fi
if [ ${IS_DATA_NODE} -eq 0 ];
then
log "Starting Kibana on ${HOSTNAME}"
systemctl enable kibana.service
systemctl start kibana.service
sleep 10
if [ `systemctl is-failed kibana.service` == 'failed' ];
then
log "Kibana unit failed to start"
exit 1
fi
fi
}
install_nginx()
{
log "########## installing nginx #########"
sudo apt-get install -y nginx
log "########## installing apache2-utils #########"
sudo apt-get install -y apache2-utils
log "########## installing nginx #########"
sudo htpasswd -b -c /etc/nginx/htpasswd.users readonly $READONLY_PASSWORD
log "########## configure nginx #########"
echo -n 'readonly:'$READONLY_PASSWORD>tempt.txt
encodedPassword=`base64 tempt.txt`
echo "$encodedPassword"
rm tempt.txt
IP_ADDRESS=$(ip route get 8.8.8.8 | awk -F"src " 'NR==1{split($2,a," ");print a[1]}')
echo 'worker_processes 1;
events {
worker_connections 1024;
}
http {
upstream kibana {
server '$IP_ADDRESS':5601;
keepalive 15;
}
server {
listen 9292;
location / {
proxy_pass http://kibana;
proxy_redirect off;
proxy_buffering off;
proxy_http_version 1.1;
proxy_set_header Connection "Keep-Alive";
proxy_set_header Proxy-Connection "Keep-Alive";
proxy_set_header Authorization "Basic '$encodedPassword'";
}
}
}'>/etc/nginx/nginx.conf
log "########## updated nginx: /etc/nginx/nginx.conf #########"
cat /etc/nginx/nginx.conf
}
log "########## JAVA installation #########"
install_default_java
log "########## nginx installation #########"
install_nginx
log "########## starting elasticsearch setup ##########"
install_es_latest
configure_es
configure_system
start_service
service nginx restart
service kibana restart
log "########## completed elasticsearch setup ##########"
exit 0
|
<reponame>team-crossover/mobiliza<filename>App/app/src/main/java/com/crossover/mobiliza/app/data/remote/service/EventoService.java
package com.crossover.mobiliza.app.data.remote.service;
import com.crossover.mobiliza.app.data.local.entity.Evento;
import com.crossover.mobiliza.app.data.local.entity.Ong;
import java.util.List;
import retrofit2.Call;
import retrofit2.http.Body;
import retrofit2.http.DELETE;
import retrofit2.http.GET;
import retrofit2.http.POST;
import retrofit2.http.Path;
import retrofit2.http.Query;
public interface EventoService {
@GET("eventos")
Call<List<Evento>> findAll(@Query("idOng") Long idOng,
@Query("categoria") String categoria,
@Query("regiao") String regiao,
@Query("finalizado") Boolean finalizado);
@GET("eventos/{id}")
Call<Evento> findById(@Path("id") Long id);
@DELETE("eventos/{id}")
Call<Ong> deleteById(@Path("id") Long id, @Query("googleIdToken") String googleIdToken);
@POST("eventos")
Call<Evento> save(@Body Evento evento, @Query("googleIdToken") String googleIdToken);
@POST("eventos/{id}/confirmar")
Call<Evento> confirmar(@Path("id") Long id, @Query("googleIdToken") String googleIdToken, @Query("valor") Boolean valor);
} |
<reponame>wultra/powerauth-webflow<gh_stars>1-10
/*
* Copyright 2019 Wultra s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.getlime.security.powerauth.app.tppengine.model.entity;
import java.util.Date;
/**
* Response entity object in the consent history.
*
* @author <NAME>, <EMAIL>
*/
public class GivenConsentHistory {
/**
* Given consent ID.
*/
private Long id;
/**
* TPP application client ID.
*/
private String clientId;
/**
* Change of the consent that was made. This data item is backed by
* {@link io.getlime.security.powerauth.app.tppengine.model.enumeration.ConsentChange}
* and can have the same values (represented as string).
*/
private String change;
/**
* Consent ID.
*/
private String consentId;
/**
* Name of the consent.
*/
private String consentName;
/**
* Text of the consent, possibly with placeholders.
*/
private String consentText;
/**
* Consent parameters, to be filled to placeholders.
*/
private String consentParameters;
/**
* External ID of the consent initiation, usually related to operation ID or some other
* ID uniquely related to the operation.
*/
private String externalId;
/**
* Timestamp of when the change was made.
*/
private Date timestampCreated;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getChange() {
return change;
}
public void setChange(String change) {
this.change = change;
}
public String getConsentId() {
return consentId;
}
public void setConsentId(String consentId) {
this.consentId = consentId;
}
public String getConsentName() {
return consentName;
}
public void setConsentName(String consentName) {
this.consentName = consentName;
}
public String getConsentText() {
return consentText;
}
public void setConsentText(String consentText) {
this.consentText = consentText;
}
public String getConsentParameters() {
return consentParameters;
}
public void setConsentParameters(String consentParameters) {
this.consentParameters = consentParameters;
}
public String getExternalId() {
return externalId;
}
public void setExternalId(String externalId) {
this.externalId = externalId;
}
public Date getTimestampCreated() {
return timestampCreated;
}
public void setTimestampCreated(Date timestampCreated) {
this.timestampCreated = timestampCreated;
}
}
|
from flask import Flask
app = Flask(__name__)
from application import routes
|
#!/bin/bash
export SPARK_HOME=$SPARK_HOME
export MASTER=local[4]
export FTP_URI=$FTP_URI
export ANALYTICS_ZOO_HOME=$ANALYTICS_ZOO_HOME
export ANALYTICS_ZOO_HOME_DIST=$ANALYTICS_ZOO_HOME/dist
export ANALYTICS_ZOO_JAR=`find ${ANALYTICS_ZOO_HOME_DIST}/lib -type f -name "analytics-zoo*jar-with-dependencies.jar"`
export ANALYTICS_ZOO_PYZIP=`find ${ANALYTICS_ZOO_HOME_DIST}/lib -type f -name "analytics-zoo*python-api.zip"`
export ANALYTICS_ZOO_CONF=${ANALYTICS_ZOO_HOME_DIST}/conf/spark-analytics-zoo.conf
export PYTHONPATH=${ANALYTICS_ZOO_PYZIP}:$PYTHONPATH
echo "#1 start example test for textclassification"
if [ -f analytics-zoo-data/data/glove.6B.zip ]
then
echo "analytics-zoo-data/data/glove.6B.zip already exists"
else
wget $FTP_URI/analytics-zoo-data/data/glove/glove.6B.zip -P analytics-zoo-data/data
unzip -q analytics-zoo-data/data/glove.6B.zip -d analytics-zoo-data/data/glove.6B
fi
if [ -f analytics-zoo-data/data/20news-18828.tar.gz ]
then
echo "analytics-zoo-data/data/20news-18828.tar.gz already exists"
else
wget $FTP_URI/analytics-zoo-data/data/news20/20news-18828.tar.gz -P analytics-zoo-data/data
tar zxf analytics-zoo-data/data/20news-18828.tar.gz -C analytics-zoo-data/data/
fi
echo "check if model directory exists"
if [ ! -d analytics-zoo-models ]
then
mkdir analytics-zoo-models
fi
if [ -f analytics-zoo-models/analytics-zoo_squeezenet_imagenet_0.1.0 ]
then
echo "analytics-zoo-models/analytics-zoo_squeezenet_imagenet_0.1.0 already exists"
else
wget $FTP_URI/analytics-zoo-models/imageclassification/imagenet/analytics-zoo_squeezenet_imagenet_0.1.0 \
-P analytics-zoo-models
fi
if [ -f analytics-zoo-models-new/analytics-zoo_ssd-mobilenet-300x300_PASCAL_0.1.0.model ]
then
echo "analytics-zoo-models-new/analytics-zoo_ssd-mobilenet-300x300_PASCAL_0.1.0.model already exists"
else
wget $FTP_URI/analytics-zoo-models-new/object-detection/analytics-zoo_ssd-mobilenet-300x300_PASCAL_0.1.0.model \
-P analytics-zoo-models-new
fi
${SPARK_HOME}/bin/spark-submit \
--master ${MASTER} \
--driver-memory 20g \
--executor-memory 20g \
--py-files ${ANALYTICS_ZOO_PYZIP},${ANALYTICS_ZOO_HOME}/pyzoo/zoo/examples/textclassification/text_classification.py \
--jars ${ANALYTICS_ZOO_JAR} \
--conf spark.driver.extraClassPath=${ANALYTICS_ZOO_JAR} \
--conf spark.executor.extraClassPath=${ANALYTICS_ZOO_JAR} \
${ANALYTICS_ZOO_HOME}/pyzoo/zoo/examples/textclassification/text_classification.py \
--nb_epoch 2 \
--data_path analytics-zoo-data/data
echo "#2 start example test for customized loss and layer (Funtional API)"
${SPARK_HOME}/bin/spark-submit \
--master ${MASTER} \
--driver-memory 20g \
--executor-memory 20g \
--py-files ${ANALYTICS_ZOO_PYZIP},${ANALYTICS_ZOO_HOME}/pyzoo/zoo/examples/autograd/custom.py \
--jars ${ANALYTICS_ZOO_JAR} \
--conf spark.driver.extraClassPath=${ANALYTICS_ZOO_JAR} \
--conf spark.executor.extraClassPath=${ANALYTICS_ZOO_JAR} \
${ANALYTICS_ZOO_HOME}/pyzoo/zoo/examples/autograd/custom.py \
--nb_epoch 2
${SPARK_HOME}/bin/spark-submit \
--master ${MASTER} \
--driver-memory 20g \
--executor-memory 20g \
--py-files ${ANALYTICS_ZOO_PYZIP},${ANALYTICS_ZOO_HOME}/pyzoo/zoo/examples/imageclassification/predict.py \
--jars ${ANALYTICS_ZOO_JAR} \
--conf spark.driver.extraClassPath=${ANALYTICS_ZOO_JAR} \
--conf spark.executor.extraClassPath=${ANALYTICS_ZOO_JAR} \
${ANALYTICS_ZOO_HOME}/pyzoo/zoo/examples/imageclassification/predict.py \
-f hdfs://172.168.2.181:9000/kaggle/train_100 \
--model analytics-zoo-models/analytics-zoo_squeezenet_imagenet_0.1.0 \
--topN 5
${SPARK_HOME}/bin/spark-submit \
--master ${MASTER} \
--driver-memory 20g \
--executor-memory 20g \
--py-files ${ANALYTICS_ZOO_PYZIP},${ANALYTICS_ZOO_HOME}/pyzoo/zoo/examples/objectdetection/predict.py \
--jars ${ANALYTICS_ZOO_JAR} \
--conf spark.driver.extraClassPath=${ANALYTICS_ZOO_JAR} \
--conf spark.executor.extraClassPath=${ANALYTICS_ZOO_JAR} \
${ANALYTICS_ZOO_HOME}/pyzoo/zoo/examples/objectdetection/predict.py \
analytics-zoo-models-new/analytics-zoo_ssd-mobilenet-300x300_PASCAL_0.1.0.model hdfs://172.168.2.181:9000/kaggle/train_100 /tmp
|
package com.bustiblelemons.utils;
import android.content.Context;
import android.content.res.Resources;
import java.util.Locale;
/**
* Created by bhm on 17.09.14.
*/
public class ResourceHelper {
public static final String UNDERSCORE_SPLITER = "_";
private static final String STATUS_BAR_HEIGHT = "status_bar_height";
private static final String DIMEN = "dimen";
private static final String STRING = "string";
private static final String ANDROID_PACKAGE = "android";
private String mSpliter = UNDERSCORE_SPLITER;
private final Context mContext;
private String mPackageName;
private String[] mParts = new String[0];
private boolean mThrow = false;
public ResourceHelper(Context context) {
mContext = context;
mPackageName = mContext.getPackageName();
}
public static ResourceHelper from(Context context) {
return new ResourceHelper(context);
}
public ResourceHelper withSpliter(String spliter) {
if (spliter != null) {
mSpliter = spliter;
}
return this;
}
public ResourceHelper withNameParts(String... parts) {
if (parts != null) {
mParts = parts;
}
return this;
}
public ResourceHelper throwException(boolean throwException) {
mThrow = throwException;
return this;
}
public String getString() {
int id = getIdentifierForStringByNamePartsWithSpliter(mSpliter, mParts);
if (mThrow) {
return mContext.getString(id);
} else {
try {
return mContext.getString(id);
} catch (Resources.NotFoundException e) {
e.printStackTrace();
}
return null;
}
}
public int getIdentifierForStringByNameParts(String... parts) {
return getIdentifierForStringByNamePartsWithSpliter(UNDERSCORE_SPLITER, parts);
}
public int getIdentifierForStringByNamePartsWithSpliter(String spliter, String... parts) {
StringBuilder b = new StringBuilder();
String fix = "";
for (String part : parts) {
if (part != null) {
b.append(fix);
b.append(part.toLowerCase(Locale.ENGLISH));
fix = spliter;
}
}
return getIdentifierForString(b.toString());
}
public int getIdentifierForString(String resName) {
int resId = 0;
try {
return mContext.getResources().getIdentifier(resName, STRING, mPackageName);
} catch (Resources.NotFoundException e) {
e.printStackTrace();
}
return resId;
}
public int getStatusBarHeight() {
int result = 0;
if (mContext != null) {
Resources resources = mContext.getResources();
int resourceId = resources.getIdentifier(STATUS_BAR_HEIGHT, DIMEN, ANDROID_PACKAGE);
if (resourceId > 0) {
result = resources.getDimensionPixelSize(resourceId);
}
}
return result;
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_7k = void 0;
var ic_7k = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M19 3H5c-1.1 0-2 .9-2 2v14c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zM9.5 15H7.75l1.38-4.5H6.5V9H10c.67 0 1.15.65.96 1.29L9.5 15zm8.5 0h-1.75l-1.75-2.25V15H13V9h1.5v2.25L16.25 9H18l-2.25 3L18 15z"
},
"children": []
}]
};
exports.ic_7k = ic_7k; |
set :output, 'log/schedule.log'
every 1.day do
rake 'sample'
end
|
package util
import (
"strings"
)
func TrunkUrlFragment(domList []string) string {
var builtMatch []string
var splitStrings [][]string
for _, dom := range domList {
parts := strings.Split(dom, ".")
Reverse(&parts)
splitStrings = append(splitStrings, parts)
}
i := 0
main:
for {
var proposed string
for _, slice := range splitStrings {
if i >= len(slice) {
break main
}
if proposed == "" {
proposed = slice[i]
} else if proposed != slice[i] {
break main
}
}
i++
builtMatch = append(builtMatch, proposed)
}
Reverse(&builtMatch)
return strings.Join(builtMatch, ".")
}
func Reverse(input *[]string) {
for i := 0; i < len(*input)/2; i++ {
j := len(*input) - i - 1
(*input)[i], (*input)[j] = (*input)[j], (*input)[i]
}
}
func Contains(container, things []string) bool {
for _, thing := range things {
if _, found := IndexOf(thing, container); !found {
return false
}
}
return true
}
func IndexOf(it string, list []string) (int, bool) {
for index, value := range list {
if value == it {
return index, true
}
}
return 0, false
}
|
#!/bin/sh
#
# Copyright (c) 2018-2021, Christer Edwards <christer.edwards@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
. /usr/local/share/bastille/common.sh
usage() {
error_exit "Usage: bastille pkg TARGET command [args]"
}
# Handle special-case commands first.
case "$1" in
help|-h|--help)
usage
;;
esac
if [ $# -lt 1 ]; then
usage
fi
for _jail in ${JAILS}; do
info "[${_jail}]:"
jexec -l "${_jail}" /usr/sbin/pkg "$@"
echo
done
|
/*
Jameleon - An automation testing tool..
Copyright (C) 2003-2006 <NAME> (<EMAIL>)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package net.sf.jameleon;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import net.sf.jameleon.bean.Attribute;
import net.sf.jameleon.bean.FunctionalPoint;
import net.sf.jameleon.exception.JameleonException;
import net.sf.jameleon.exception.JameleonScriptException;
import net.sf.jameleon.function.Attributable;
import net.sf.jameleon.function.AttributeBroker;
import net.sf.jameleon.function.ContextHelper;
import net.sf.jameleon.util.JameleonUtility;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.jelly.DynaTag;
import org.apache.commons.jelly.JellyTagException;
/**
* <p><code>JameleonTagSupport</code> is an implementation of DynaTag. This tag
* throws the variable name as the XML attribute into the context. It then
* attempts to call to corresponding set method. There is currently no checking
* on whether the set method was called or not. If it's there, then it gets
* called. Calling the set method is mostly for backward compatability. However,
* it can be used for set methods that need to do more than put the variable in the
* context.</p>
* <p>
* Currently, this class is only used by FunctionTag. It was intended to be the base class for all
* Jameleon tags.
* </p>
*/
public abstract class JameleonTagSupport extends LocationAwareTagSupport implements DynaTag, Attributable {
/**
* A map of class attributes and their corresponding types.
*/
protected Map attributes = null;
/**
* A list of variable names that were stored in the context.
*/
protected List contextVars = new LinkedList();
/**
* Used to transfer context variables to instance variables.
*/
protected AttributeBroker broker;
/**
* Represents this tag's attributes
*/
protected FunctionalPoint fp;
protected List unsupportedAttributes = new LinkedList();
public JameleonTagSupport(){
super();
broker = new AttributeBroker(this);
fp = loadFunctionalPoint();
//This calls describeAttributes()
broker.setUp();
}
public FunctionalPoint loadFunctionalPoint(){
FunctionalPoint functionalPoint = null;
try{
functionalPoint = JameleonUtility.loadFunctionalPoint(this.getClass().getName(), this);
}catch(JameleonScriptException jse){
throw new JameleonScriptException(jse.getMessage(), this);
}
return functionalPoint;
}
public List getUnsupportedAttributes(){
return unsupportedAttributes;
}
protected void testForUnsupportedAttributesCaught(){
if (unsupportedAttributes.size() > 0) {
Iterator it = unsupportedAttributes.iterator();
String msg = "The following attributes are not supported by this tag:\n";
while (it.hasNext()) {
msg += "'"+it.next()+"'";
if (it.hasNext()) {
msg += ", ";
}
}
msg += ".\n This could also be that Jameleon could not find the tag's corresponding .dat file\n"+
"that is generated when the tags are registered and should be in the CLASSPATH.\n\n";
throw new JameleonScriptException(msg, this);
}
}
/**
* Sets an attribute value of this tag before the tag is invoked
*/
public void setAttribute(String name, Object value) {
if (fp != null) {
Attribute attr = fp.getAttribute(name);
if (attr != null) {
if (attr.isInstanceVariable()){
if (!attr.isContextVariable() ||
name.equals(attr.getName()) ) {
try{
broker.setConsumerAttribute(attr, value);
}catch(JameleonException je){
throw new JameleonScriptException(je, this);
}
attr.setValue(value);
} else if (attr.isContextVariable()){
setVariableInContext(attr.getContextName(), value);
}
}else{
try{
//There is currently no reasonable way through the commons beanutils API to know whether the method was set or not.
//So I have to assume it was indeed set.
BeanUtils.copyProperty(this, name, value);
//This won't get set because it may not set a context variable
fp.getAttributes().put(name, attr);
attr.setValue(value);
}catch(Exception e){
throw new JameleonScriptException(e, this);
}
}
}else{
unsupportedAttributes.add(name);
}
}else{
unsupportedAttributes.add(name);
}
}
protected void setVariableInContext(String name, Object value){
context.setVariable(name,value);
contextVars.add(name);
}
protected void cleanVariablesInContext(){
ContextHelper.removeVariables(context, contextVars);
}
/**
* Helper method which allows derived tags to access the attributes
* associated with this tag
* @return the context of the tag.
*/
protected Map getAttributes() {
return context.getVariables();
}
public AttributeBroker getAttributeBroker(){
return broker;
}
/**
* Simply returns the context for this tag
*/
protected Map createAttributes() {
return context.getVariables();
}
/**
* Gets the attributes or fields of the tag
*/
protected Map getClassAttributes(){
if (attributes == null) {
attributes = new HashMap();
Class clzz = this.getClass();
do {
Field[] fields = clzz.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
attributes.put(fields[i].getName(),fields[i].getType());
}
clzz = clzz.getSuperclass();
}while(!clzz.equals(JameleonTagSupport.class));
}
return attributes;
}
/**
* @return the type of the given attribute. If we can't figure out
* the type of variable, simply return Object.class
* Required for dyna tag support.
*/
public Class getAttributeType(String name) throws JellyTagException {
getClassAttributes();
Class clzz = Object.class;
if (attributes.containsKey(name)) {
clzz = (Class)attributes.get(name);
}
return clzz;
}
protected void resetFunctionalPoint(){
cleanVariablesInContext();
Map attrs = fp.getAttributes();
Iterator it = attrs.keySet().iterator();
String key;
Attribute attr;
Field f = null;
while (it.hasNext()) {
key = (String)it.next();
attr = (Attribute)attrs.get(key);
if (attr.isInstanceVariable()) {
f = broker.getConsumerField(attr);
if (f.getType().isPrimitive()) {
broker.setConsumerAttributeAsPrimitive(f, attr.getDefaultValue());
}else{
broker.setConsumerAttributeAsObject(f, attr.getDefaultValue());
}
}else if (attr.getDefaultValue() != null){
try{
//There is currently no reasonable way through the commons beanutils API to know whether the method was set or.
//So I have to assume it was indeed set.
BeanUtils.copyProperty(this, key, attr.getDefaultValue());
}catch(Exception e){
throw new JameleonScriptException(e, this);
}
}
attr.setValue(null);
}
}
public FunctionalPoint getFunctionalPoint(){
return fp;
}
// -------------- Begin Attributable methods
public void describeAttributes(AttributeBroker broker) {
if (fp != null) {
Map attrs = fp.getAttributes();
if (attrs != null) {
Set keys = attrs.keySet();
if (keys != null) {
Iterator it = keys.iterator();
while (it != null && it.hasNext()) {
broker.registerAttribute((Attribute)fp.getAttributes().get(it.next()));
}
}
}
}
}
}
|
# -*- coding: utf-8 -*-
import os
import json
import sys
import io
import re
import click
CONTEXT_SETTINGS = dict(auto_envvar_prefix='Trello2Kanboard',
help_option_names=['-h', '--help'])
class Context(object):
def __init__(self):
pass
pass_context = click.make_pass_decorator(Context, ensure=True)
cmd_folder = os.path.abspath(os.path.join(os.path.dirname(__file__),
'commands'))
class Trello2KanboardCLI(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(cmd_folder):
if filename.endswith('.py') and \
filename.startswith('cmd_'):
rv.append(filename[4:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
try:
if sys.version_info[0] == 2:
name = name.encode('ascii', 'replace')
mod = __import__('trello2kanboard.commands.cmd_' + name,
None, None, ['cli'])
except ImportError:
return
return mod.cli
def print_version(ctx, param, value):
abs_path = os.path.dirname(os.path.abspath(__file__))
file_to_open = os.path.join(abs_path, '__init__.py')
with io.open(file_to_open, 'rt', encoding='utf8') as f:
version = re.search(r'__version__ = \'(.*?)\'', f.read()).group(1)
if not value or ctx.resilient_parsing:
return
click.echo('Version '+version)
ctx.exit()
@click.command(cls=Trello2KanboardCLI, context_settings=CONTEXT_SETTINGS)
@click.option('--json-file', '-j', required=True, type=click.File('rb'),
help='Trello JSON file.')
@click.option('--version', '-v', is_flag=True, callback=print_version,
expose_value=False, is_eager=True,
help='Show version and exit.')
@pass_context
def cli(ctx, json_file):
json_str = None
with json_file as f:
json_str = f.read().decode('UTF-8')
try:
obj_json = json.loads(json_str)
ctx.json_file = obj_json
except Exception as e:
print(repr(e))
print(u'Invalid JSON File.')
sys.exit()
"""Simple Python Package for Importing Trello Projects from
JSON Files Using the Kanboard API."""
|
<filename>src/main/java/ru/contextguide/campaign/textCampaign/StrategyAverageRoi.java
package ru.contextguide.campaign.textCampaign;
import com.google.common.base.Objects;
import ru.contextguide.yandexservices.utils.JsonSerializableObject;
/**
* Параметры стратегии Средняя рентабельность инвестиций.
*/
public class StrategyAverageRoi implements JsonSerializableObject {
private int reserveReturn;
private Long roiCoef;
private Long goalId;
private Long weeklySpendLimit;
private Long bidCeiling;
private Long profitability;
/**
* Максимальный процент сэкономленных средств, который можно возвращать в рекламу (расходовать) в случае, если прогнозируемое значение рентабельности превышает значение параметра RoiCoef.
* <p>
* Значение от 0 до 100, кратное десяти: 0, 10, 20... 100.
*/
public int getReserveReturn() {
return reserveReturn;
}
/**
* Максимальный процент сэкономленных средств, который можно возвращать в рекламу (расходовать) в случае, если прогнозируемое значение рентабельности превышает значение параметра RoiCoef.
* <p>
* Значение от 0 до 100, кратное десяти: 0, 10, 20... 100.
*/
public void setReserveReturn(int reserveReturn) {
this.reserveReturn = reserveReturn;
}
/**
* Желаемая средняя рентабельность инвестиций за неделю.
* <p>
* Значение представляет собой отношение прибыли от продаж к расходам на рекламу, умноженное на 1 000 000. Значение должно быть целым числом. Если значение не кратно 10 000, последние 4 цифры обнуляются (что соответствует отбрасыванию лишних знаков после запятой).
*/
public Long getRoiCoef() {
return roiCoef;
}
/**
* Желаемая средняя рентабельность инвестиций за неделю.
* <p>
* Значение представляет собой отношение прибыли от продаж к расходам на рекламу, умноженное на 1 000 000. Значение должно быть целым числом. Если значение не кратно 10 000, последние 4 цифры обнуляются (что соответствует отбрасыванию лишних знаков после запятой).
*/
public void setRoiCoef(Long roiCoef) {
this.roiCoef = roiCoef;
}
/**
* Идентификатор цели Яндекс.Метрики (подходят только несоставные цели).
*/
public Long getGoalId() {
return goalId;
}
/**
* Идентификатор цели Яндекс.Метрики (подходят только несоставные цели).
*/
public void setGoalId(Long goalId) {
this.goalId = goalId;
}
/**
* Недельный бюджет в валюте рекламодателя, умноженный на 1 000 000.
*/
public Long getWeeklySpendLimit() {
return weeklySpendLimit;
}
/**
* Недельный бюджет в валюте рекламодателя, умноженный на 1 000 000.
*/
public void setWeeklySpendLimit(Long weeklySpendLimit) {
this.weeklySpendLimit = weeklySpendLimit;
}
/**
* Максимальная ставка в валюте рекламодателя, умноженная на 1 000 000.
* <p>
* Указывать максимальную ставку не рекомендуется — это может снизить эффективность работы стратегии.
*/
public Long getBidCeiling() {
return bidCeiling;
}
/**
* Максимальная ставка в валюте рекламодателя, умноженная на 1 000 000.
* <p>
* Указывать максимальную ставку не рекомендуется — это может снизить эффективность работы стратегии.
*/
public void setBidCeiling(Long bidCeiling) {
this.bidCeiling = bidCeiling;
}
/**
* Процент выручки, являющийся себестоимостью товаров или услуг.
* Значение представляет собой процент, умноженный на 1 000 000. Значение должно быть целым числом от 0 до 100 000 000 (что соответствует 100%).
* <p>
* Если значение не кратно 10 000, последние 4 цифры обнуляются (что соответствует отбрасыванию лишних знаков после запятой).
*/
public Long getProfitability() {
return profitability;
}
/**
* Процент выручки, являющийся себестоимостью товаров или услуг.
* Значение представляет собой процент, умноженный на 1 000 000. Значение должно быть целым числом от 0 до 100 000 000 (что соответствует 100%).
* <p>
* Если значение не кратно 10 000, последние 4 цифры обнуляются (что соответствует отбрасыванию лишних знаков после запятой).
*/
public void setProfitability(Long profitability) {
this.profitability = profitability;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof StrategyAverageRoi)) return false;
StrategyAverageRoi that = (StrategyAverageRoi) o;
return reserveReturn == that.reserveReturn &&
java.util.Objects.equals(roiCoef, that.roiCoef) &&
java.util.Objects.equals(goalId, that.goalId) &&
java.util.Objects.equals(weeklySpendLimit, that.weeklySpendLimit) &&
java.util.Objects.equals(bidCeiling, that.bidCeiling) &&
java.util.Objects.equals(profitability, that.profitability);
}
@Override
public int hashCode() {
return Objects.hashCode(reserveReturn, roiCoef, goalId, weeklySpendLimit, bidCeiling, profitability);
}
@Override
public String toString() {
return this.toJson();
}
}
|
platform='unknown'
unamestr=`uname`
if [[ "$unamestr" == 'Linux' ]]; then
platform='linux'
elif [[ "$unamestr" == 'Darwin' ]]; then
platform='darwin'
fi
# 1-100: g3.4
# 101-200: p2
# AMI="ami-660ae31e"
AMI="ami-d0a16fa8" #extract
AMI="ami-7428ff0c" #extract
#INSTANCE_TYPE="g3.4xlarge"
#INSTANCE_TYPE="p2.xlarge"
# INSTANCE_TYPE="p3.2xlarge"
INSTANCE_TYPE="c4.4xlarge"
INSTANCE_COUNT=1
KEY_NAME="taskonomy"
SECURITY_GROUP="launch-wizard-1"
SPOT_PRICE=1.2
ZONE="us-west-2"
SUB_ZONES=( a b c )
# 11 - X
START_AT=1
EXIT_AFTER=500
DATA_USED="16k"
#echo 'sleeping for 9 hrs...'
#sleep 9h
INTERMEDIATE_TASKS="keypoint2d__autoencoder,denoise,impainting_whole__edge2d__8__unlocked \
keypoint2d__autoencoder,denoise,segment2d__edge2d__8__unlocked \
keypoint2d__autoencoder,denoise,colorization__edge2d__8__unlocked \
keypoint2d__autoencoder,impainting_whole,segment2d__edge2d__8__unlocked \
keypoint2d__autoencoder,impainting_whole,colorization__edge2d__8__unlocked \
keypoint2d__autoencoder,segment2d,colorization__edge2d__8__unlocked \
keypoint2d__denoise,impainting_whole,segment2d__edge2d__8__unlocked \
keypoint2d__denoise,impainting_whole,colorization__edge2d__8__unlocked \
keypoint2d__denoise,segment2d,colorization__edge2d__8__unlocked \
keypoint2d__impainting_whole,segment2d,colorization__edge2d__8__unlocked \
autoencoder__denoise,impainting_whole,segment2d__edge2d__8__unlocked \
autoencoder__denoise,impainting_whole,colorization__edge2d__8__unlocked \
autoencoder__denoise,segment2d,colorization__edge2d__8__unlocked \
autoencoder__impainting_whole,segment2d,colorization__edge2d__8__unlocked \
denoise__impainting_whole,segment2d,colorization__edge2d__8__unlocked \
denoise__impainting_whole,keypoint2d,segment2d__autoencoder__8__unlocked \
denoise__impainting_whole,keypoint2d,colorization__autoencoder__8__unlocked \
denoise__impainting_whole,keypoint2d,random__autoencoder__8__unlocked \
denoise__impainting_whole,segment2d,colorization__autoencoder__8__unlocked \
denoise__impainting_whole,segment2d,random__autoencoder__8__unlocked \
denoise__impainting_whole,colorization,random__autoencoder__8__unlocked \
denoise__keypoint2d,segment2d,colorization__autoencoder__8__unlocked \
denoise__keypoint2d,segment2d,random__autoencoder__8__unlocked \
denoise__keypoint2d,colorization,random__autoencoder__8__unlocked \
denoise__segment2d,colorization,random__autoencoder__8__unlocked \
impainting_whole__keypoint2d,segment2d,colorization__autoencoder__8__unlocked \
impainting_whole__keypoint2d,segment2d,random__autoencoder__8__unlocked \
impainting_whole__keypoint2d,colorization,random__autoencoder__8__unlocked \
impainting_whole__segment2d,colorization,random__autoencoder__8__unlocked \
keypoint2d__segment2d,colorization,random__autoencoder__8__unlocked \
rgb2sfnorm__rgb2depth,keypoint3d,reshade__curvature__8__unlocked \
rgb2sfnorm__rgb2depth,keypoint3d,rgb2mist__curvature__8__unlocked \
rgb2sfnorm__rgb2depth,keypoint3d,edge3d__curvature__8__unlocked \
rgb2sfnorm__rgb2depth,reshade,rgb2mist__curvature__8__unlocked \
rgb2sfnorm__rgb2depth,reshade,edge3d__curvature__8__unlocked \
rgb2sfnorm__rgb2depth,rgb2mist,edge3d__curvature__8__unlocked \
rgb2sfnorm__keypoint3d,reshade,rgb2mist__curvature__8__unlocked \
rgb2sfnorm__keypoint3d,reshade,edge3d__curvature__8__unlocked \
rgb2sfnorm__keypoint3d,rgb2mist,edge3d__curvature__8__unlocked \
rgb2sfnorm__reshade,rgb2mist,edge3d__curvature__8__unlocked \
rgb2depth__keypoint3d,reshade,rgb2mist__curvature__8__unlocked \
rgb2depth__keypoint3d,reshade,edge3d__curvature__8__unlocked \
rgb2depth__keypoint3d,rgb2mist,edge3d__curvature__8__unlocked \
rgb2depth__reshade,rgb2mist,edge3d__curvature__8__unlocked \
keypoint3d__reshade,rgb2mist,edge3d__curvature__8__unlocked \
autoencoder__impainting_whole,keypoint2d,segment2d__denoise__8__unlocked \
autoencoder__impainting_whole,keypoint2d,colorization__denoise__8__unlocked \
autoencoder__impainting_whole,keypoint2d,random__denoise__8__unlocked \
autoencoder__impainting_whole,segment2d,colorization__denoise__8__unlocked \
autoencoder__impainting_whole,segment2d,random__denoise__8__unlocked \
autoencoder__impainting_whole,colorization,random__denoise__8__unlocked \
autoencoder__keypoint2d,segment2d,colorization__denoise__8__unlocked \
autoencoder__keypoint2d,segment2d,random__denoise__8__unlocked \
autoencoder__keypoint2d,colorization,random__denoise__8__unlocked \
autoencoder__segment2d,colorization,random__denoise__8__unlocked \
impainting_whole__keypoint2d,segment2d,colorization__denoise__8__unlocked \
impainting_whole__keypoint2d,segment2d,random__denoise__8__unlocked \
impainting_whole__keypoint2d,colorization,random__denoise__8__unlocked \
impainting_whole__segment2d,colorization,random__denoise__8__unlocked \
keypoint2d__segment2d,colorization,random__denoise__8__unlocked \
rgb2sfnorm__keypoint3d,rgb2depth,curvature__edge3d__8__unlocked \
rgb2sfnorm__keypoint3d,rgb2depth,reshade__edge3d__8__unlocked \
rgb2sfnorm__keypoint3d,rgb2depth,rgb2mist__edge3d__8__unlocked \
rgb2sfnorm__keypoint3d,curvature,reshade__edge3d__8__unlocked \
rgb2sfnorm__keypoint3d,curvature,rgb2mist__edge3d__8__unlocked \
rgb2sfnorm__keypoint3d,reshade,rgb2mist__edge3d__8__unlocked \
rgb2sfnorm__rgb2depth,curvature,reshade__edge3d__8__unlocked \
rgb2sfnorm__rgb2depth,curvature,rgb2mist__edge3d__8__unlocked \
rgb2sfnorm__rgb2depth,reshade,rgb2mist__edge3d__8__unlocked \
rgb2sfnorm__curvature,reshade,rgb2mist__edge3d__8__unlocked \
keypoint3d__rgb2depth,curvature,reshade__edge3d__8__unlocked \
keypoint3d__rgb2depth,curvature,rgb2mist__edge3d__8__unlocked \
keypoint3d__rgb2depth,reshade,rgb2mist__edge3d__8__unlocked \
keypoint3d__curvature,reshade,rgb2mist__edge3d__8__unlocked \
rgb2depth__curvature,reshade,rgb2mist__edge3d__8__unlocked \
denoise__autoencoder,impainting_whole,segment2d__keypoint2d__8__unlocked \
denoise__autoencoder,impainting_whole,colorization__keypoint2d__8__unlocked \
denoise__autoencoder,impainting_whole,edge2d__keypoint2d__8__unlocked \
denoise__autoencoder,segment2d,colorization__keypoint2d__8__unlocked \
denoise__autoencoder,segment2d,edge2d__keypoint2d__8__unlocked \
denoise__autoencoder,colorization,edge2d__keypoint2d__8__unlocked \
denoise__impainting_whole,segment2d,colorization__keypoint2d__8__unlocked \
denoise__impainting_whole,segment2d,edge2d__keypoint2d__8__unlocked \
denoise__impainting_whole,colorization,edge2d__keypoint2d__8__unlocked \
denoise__segment2d,colorization,edge2d__keypoint2d__8__unlocked \
autoencoder__impainting_whole,segment2d,colorization__keypoint2d__8__unlocked \
autoencoder__impainting_whole,segment2d,edge2d__keypoint2d__8__unlocked \
autoencoder__impainting_whole,colorization,edge2d__keypoint2d__8__unlocked \
autoencoder__segment2d,colorization,edge2d__keypoint2d__8__unlocked \
impainting_whole__segment2d,colorization,edge2d__keypoint2d__8__unlocked \
rgb2sfnorm__room_layout,rgb2mist,ego_motion__fix_pose__8__unlocked \
rgb2sfnorm__room_layout,rgb2mist,rgb2depth__fix_pose__8__unlocked \
rgb2sfnorm__room_layout,rgb2mist,reshade__fix_pose__8__unlocked \
rgb2sfnorm__room_layout,ego_motion,rgb2depth__fix_pose__8__unlocked \
rgb2sfnorm__room_layout,ego_motion,reshade__fix_pose__8__unlocked \
rgb2sfnorm__room_layout,rgb2depth,reshade__fix_pose__8__unlocked \
rgb2sfnorm__rgb2mist,ego_motion,rgb2depth__fix_pose__8__unlocked \
rgb2sfnorm__rgb2mist,ego_motion,reshade__fix_pose__8__unlocked \
rgb2sfnorm__rgb2mist,rgb2depth,reshade__fix_pose__8__unlocked \
rgb2sfnorm__ego_motion,rgb2depth,reshade__fix_pose__8__unlocked \
room_layout__rgb2mist,ego_motion,rgb2depth__fix_pose__8__unlocked \
room_layout__rgb2mist,ego_motion,reshade__fix_pose__8__unlocked \
room_layout__rgb2mist,rgb2depth,reshade__fix_pose__8__unlocked \
room_layout__ego_motion,rgb2depth,reshade__fix_pose__8__unlocked \
rgb2mist__ego_motion,rgb2depth,reshade__fix_pose__8__unlocked \
rgb2sfnorm__fix_pose,edge3d,room_layout__ego_motion__8__unlocked \
rgb2sfnorm__fix_pose,edge3d,rgb2mist__ego_motion__8__unlocked \
rgb2sfnorm__fix_pose,edge3d,reshade__ego_motion__8__unlocked \
rgb2sfnorm__fix_pose,room_layout,rgb2mist__ego_motion__8__unlocked \
rgb2sfnorm__fix_pose,room_layout,reshade__ego_motion__8__unlocked \
rgb2sfnorm__fix_pose,rgb2mist,reshade__ego_motion__8__unlocked \
rgb2sfnorm__edge3d,room_layout,rgb2mist__ego_motion__8__unlocked \
rgb2sfnorm__edge3d,room_layout,reshade__ego_motion__8__unlocked \
rgb2sfnorm__edge3d,rgb2mist,reshade__ego_motion__8__unlocked \
rgb2sfnorm__room_layout,rgb2mist,reshade__ego_motion__8__unlocked \
fix_pose__edge3d,room_layout,rgb2mist__ego_motion__8__unlocked \
fix_pose__edge3d,room_layout,reshade__ego_motion__8__unlocked \
fix_pose__edge3d,rgb2mist,reshade__ego_motion__8__unlocked \
fix_pose__room_layout,rgb2mist,reshade__ego_motion__8__unlocked \
edge3d__room_layout,rgb2mist,reshade__ego_motion__8__unlocked \
curvature__rgb2sfnorm,segment25d,edge3d__keypoint3d__8__unlocked \
curvature__rgb2sfnorm,segment25d,reshade__keypoint3d__8__unlocked \
curvature__rgb2sfnorm,segment25d,rgb2mist__keypoint3d__8__unlocked \
curvature__rgb2sfnorm,edge3d,reshade__keypoint3d__8__unlocked \
curvature__rgb2sfnorm,edge3d,rgb2mist__keypoint3d__8__unlocked \
curvature__rgb2sfnorm,reshade,rgb2mist__keypoint3d__8__unlocked \
curvature__segment25d,edge3d,reshade__keypoint3d__8__unlocked \
curvature__segment25d,edge3d,rgb2mist__keypoint3d__8__unlocked \
curvature__segment25d,reshade,rgb2mist__keypoint3d__8__unlocked \
curvature__edge3d,reshade,rgb2mist__keypoint3d__8__unlocked \
rgb2sfnorm__segment25d,edge3d,reshade__keypoint3d__8__unlocked \
rgb2sfnorm__segment25d,edge3d,rgb2mist__keypoint3d__8__unlocked \
rgb2sfnorm__segment25d,reshade,rgb2mist__keypoint3d__8__unlocked \
rgb2sfnorm__edge3d,reshade,rgb2mist__keypoint3d__8__unlocked \
segment25d__edge3d,reshade,rgb2mist__keypoint3d__8__unlocked \
rgb2sfnorm__fix_pose,vanishing_point_well_defined,ego_motion__non_fixated_pose__8__unlocked \
rgb2sfnorm__fix_pose,vanishing_point_well_defined,room_layout__non_fixated_pose__8__unlocked \
rgb2sfnorm__fix_pose,vanishing_point_well_defined,reshade__non_fixated_pose__8__unlocked \
rgb2sfnorm__fix_pose,ego_motion,room_layout__non_fixated_pose__8__unlocked \
rgb2sfnorm__fix_pose,ego_motion,reshade__non_fixated_pose__8__unlocked \
rgb2sfnorm__fix_pose,room_layout,reshade__non_fixated_pose__8__unlocked \
rgb2sfnorm__vanishing_point_well_defined,ego_motion,room_layout__non_fixated_pose__8__unlocked \
rgb2sfnorm__vanishing_point_well_defined,ego_motion,reshade__non_fixated_pose__8__unlocked \
rgb2sfnorm__vanishing_point_well_defined,room_layout,reshade__non_fixated_pose__8__unlocked \
rgb2sfnorm__ego_motion,room_layout,reshade__non_fixated_pose__8__unlocked \
fix_pose__vanishing_point_well_defined,ego_motion,room_layout__non_fixated_pose__8__unlocked \
fix_pose__vanishing_point_well_defined,ego_motion,reshade__non_fixated_pose__8__unlocked \
fix_pose__vanishing_point_well_defined,room_layout,reshade__non_fixated_pose__8__unlocked \
fix_pose__ego_motion,room_layout,reshade__non_fixated_pose__8__unlocked \
vanishing_point_well_defined__ego_motion,room_layout,reshade__non_fixated_pose__8__unlocked \
edge3d__keypoint3d,reshade,curvature__point_match__8__unlocked \
edge3d__keypoint3d,reshade,rgb2sfnorm__point_match__8__unlocked \
edge3d__keypoint3d,reshade,rgb2depth__point_match__8__unlocked \
edge3d__keypoint3d,curvature,rgb2sfnorm__point_match__8__unlocked \
edge3d__keypoint3d,curvature,rgb2depth__point_match__8__unlocked \
edge3d__keypoint3d,rgb2sfnorm,rgb2depth__point_match__8__unlocked \
edge3d__reshade,curvature,rgb2sfnorm__point_match__8__unlocked \
edge3d__reshade,curvature,rgb2depth__point_match__8__unlocked \
edge3d__reshade,rgb2sfnorm,rgb2depth__point_match__8__unlocked \
edge3d__curvature,rgb2sfnorm,rgb2depth__point_match__8__unlocked \
keypoint3d__reshade,curvature,rgb2sfnorm__point_match__8__unlocked \
keypoint3d__reshade,curvature,rgb2depth__point_match__8__unlocked \
keypoint3d__reshade,rgb2sfnorm,rgb2depth__point_match__8__unlocked \
keypoint3d__curvature,rgb2sfnorm,rgb2depth__point_match__8__unlocked \
reshade__curvature,rgb2sfnorm,rgb2depth__point_match__8__unlocked \
rgb2sfnorm__rgb2mist,rgb2depth,edge3d__reshade__8__unlocked \
rgb2sfnorm__rgb2mist,rgb2depth,keypoint3d__reshade__8__unlocked \
rgb2sfnorm__rgb2mist,rgb2depth,curvature__reshade__8__unlocked \
rgb2sfnorm__rgb2mist,edge3d,keypoint3d__reshade__8__unlocked \
rgb2sfnorm__rgb2mist,edge3d,curvature__reshade__8__unlocked \
rgb2sfnorm__rgb2mist,keypoint3d,curvature__reshade__8__unlocked \
rgb2sfnorm__rgb2depth,edge3d,keypoint3d__reshade__8__unlocked \
rgb2sfnorm__rgb2depth,edge3d,curvature__reshade__8__unlocked \
rgb2sfnorm__rgb2depth,keypoint3d,curvature__reshade__8__unlocked \
rgb2sfnorm__edge3d,keypoint3d,curvature__reshade__8__unlocked \
rgb2mist__rgb2depth,edge3d,keypoint3d__reshade__8__unlocked \
rgb2mist__rgb2depth,edge3d,curvature__reshade__8__unlocked \
rgb2mist__rgb2depth,keypoint3d,curvature__reshade__8__unlocked \
rgb2mist__edge3d,keypoint3d,curvature__reshade__8__unlocked \
rgb2depth__edge3d,keypoint3d,curvature__reshade__8__unlocked \
rgb2depth__reshade,edge3d,keypoint3d__rgb2mist__8__unlocked \
rgb2depth__reshade,edge3d,rgb2sfnorm__rgb2mist__8__unlocked \
rgb2depth__reshade,edge3d,curvature__rgb2mist__8__unlocked \
rgb2depth__reshade,keypoint3d,rgb2sfnorm__rgb2mist__8__unlocked \
rgb2depth__reshade,keypoint3d,curvature__rgb2mist__8__unlocked \
rgb2depth__reshade,rgb2sfnorm,curvature__rgb2mist__8__unlocked \
rgb2depth__edge3d,keypoint3d,rgb2sfnorm__rgb2mist__8__unlocked \
rgb2depth__edge3d,keypoint3d,curvature__rgb2mist__8__unlocked \
rgb2depth__edge3d,rgb2sfnorm,curvature__rgb2mist__8__unlocked \
rgb2depth__keypoint3d,rgb2sfnorm,curvature__rgb2mist__8__unlocked \
reshade__edge3d,keypoint3d,rgb2sfnorm__rgb2mist__8__unlocked \
reshade__edge3d,keypoint3d,curvature__rgb2mist__8__unlocked \
reshade__edge3d,rgb2sfnorm,curvature__rgb2mist__8__unlocked \
reshade__keypoint3d,rgb2sfnorm,curvature__rgb2mist__8__unlocked \
edge3d__keypoint3d,rgb2sfnorm,curvature__rgb2mist__8__unlocked \
rgb2mist__reshade,keypoint3d,edge3d__rgb2depth__8__unlocked \
rgb2mist__reshade,keypoint3d,rgb2sfnorm__rgb2depth__8__unlocked \
rgb2mist__reshade,keypoint3d,curvature__rgb2depth__8__unlocked \
rgb2mist__reshade,edge3d,rgb2sfnorm__rgb2depth__8__unlocked \
rgb2mist__reshade,edge3d,curvature__rgb2depth__8__unlocked \
rgb2mist__reshade,rgb2sfnorm,curvature__rgb2depth__8__unlocked \
rgb2mist__keypoint3d,edge3d,rgb2sfnorm__rgb2depth__8__unlocked \
rgb2mist__keypoint3d,edge3d,curvature__rgb2depth__8__unlocked \
rgb2mist__keypoint3d,rgb2sfnorm,curvature__rgb2depth__8__unlocked \
rgb2mist__edge3d,rgb2sfnorm,curvature__rgb2depth__8__unlocked \
reshade__keypoint3d,edge3d,rgb2sfnorm__rgb2depth__8__unlocked \
reshade__keypoint3d,edge3d,curvature__rgb2depth__8__unlocked \
reshade__keypoint3d,rgb2sfnorm,curvature__rgb2depth__8__unlocked \
reshade__edge3d,rgb2sfnorm,curvature__rgb2depth__8__unlocked \
keypoint3d__edge3d,rgb2sfnorm,curvature__rgb2depth__8__unlocked \
reshade__edge3d,keypoint3d,curvature__rgb2sfnorm__8__unlocked \
reshade__edge3d,keypoint3d,segment25d__rgb2sfnorm__8__unlocked \
reshade__edge3d,keypoint3d,rgb2mist__rgb2sfnorm__8__unlocked \
reshade__edge3d,curvature,segment25d__rgb2sfnorm__8__unlocked \
reshade__edge3d,curvature,rgb2mist__rgb2sfnorm__8__unlocked \
reshade__edge3d,segment25d,rgb2mist__rgb2sfnorm__8__unlocked \
reshade__keypoint3d,curvature,segment25d__rgb2sfnorm__8__unlocked \
reshade__keypoint3d,curvature,rgb2mist__rgb2sfnorm__8__unlocked \
reshade__keypoint3d,segment25d,rgb2mist__rgb2sfnorm__8__unlocked \
reshade__curvature,segment25d,rgb2mist__rgb2sfnorm__8__unlocked \
edge3d__keypoint3d,curvature,segment25d__rgb2sfnorm__8__unlocked \
edge3d__keypoint3d,curvature,rgb2mist__rgb2sfnorm__8__unlocked \
edge3d__keypoint3d,segment25d,rgb2mist__rgb2sfnorm__8__unlocked \
edge3d__curvature,segment25d,rgb2mist__rgb2sfnorm__8__unlocked \
keypoint3d__curvature,segment25d,rgb2mist__rgb2sfnorm__8__unlocked \
rgb2sfnorm__vanishing_point_well_defined,reshade,edge3d__room_layout__8__unlocked \
rgb2sfnorm__vanishing_point_well_defined,reshade,rgb2mist__room_layout__8__unlocked \
rgb2sfnorm__vanishing_point_well_defined,reshade,rgb2depth__room_layout__8__unlocked \
rgb2sfnorm__vanishing_point_well_defined,edge3d,rgb2mist__room_layout__8__unlocked \
rgb2sfnorm__vanishing_point_well_defined,edge3d,rgb2depth__room_layout__8__unlocked \
rgb2sfnorm__vanishing_point_well_defined,rgb2mist,rgb2depth__room_layout__8__unlocked \
rgb2sfnorm__reshade,edge3d,rgb2mist__room_layout__8__unlocked \
rgb2sfnorm__reshade,edge3d,rgb2depth__room_layout__8__unlocked \
rgb2sfnorm__reshade,rgb2mist,rgb2depth__room_layout__8__unlocked \
rgb2sfnorm__edge3d,rgb2mist,rgb2depth__room_layout__8__unlocked \
vanishing_point_well_defined__reshade,edge3d,rgb2mist__room_layout__8__unlocked \
vanishing_point_well_defined__reshade,edge3d,rgb2depth__room_layout__8__unlocked \
vanishing_point_well_defined__reshade,rgb2mist,rgb2depth__room_layout__8__unlocked \
vanishing_point_well_defined__edge3d,rgb2mist,rgb2depth__room_layout__8__unlocked \
reshade__edge3d,rgb2mist,rgb2depth__room_layout__8__unlocked \
autoencoder__keypoint2d,denoise,colorization__segment2d__8__unlocked \
autoencoder__keypoint2d,denoise,edge2d__segment2d__8__unlocked \
autoencoder__keypoint2d,denoise,impainting_whole__segment2d__8__unlocked \
autoencoder__keypoint2d,colorization,edge2d__segment2d__8__unlocked \
autoencoder__keypoint2d,colorization,impainting_whole__segment2d__8__unlocked \
autoencoder__keypoint2d,edge2d,impainting_whole__segment2d__8__unlocked \
autoencoder__denoise,colorization,edge2d__segment2d__8__unlocked \
autoencoder__denoise,colorization,impainting_whole__segment2d__8__unlocked \
autoencoder__denoise,edge2d,impainting_whole__segment2d__8__unlocked \
autoencoder__colorization,edge2d,impainting_whole__segment2d__8__unlocked \
keypoint2d__denoise,colorization,edge2d__segment2d__8__unlocked \
keypoint2d__denoise,colorization,impainting_whole__segment2d__8__unlocked \
keypoint2d__denoise,edge2d,impainting_whole__segment2d__8__unlocked \
keypoint2d__colorization,edge2d,impainting_whole__segment2d__8__unlocked \
denoise__colorization,edge2d,impainting_whole__segment2d__8__unlocked \
keypoint3d__rgb2sfnorm,curvature,edge3d__segment25d__8__unlocked \
keypoint3d__rgb2sfnorm,curvature,reshade__segment25d__8__unlocked \
keypoint3d__rgb2sfnorm,curvature,rgb2mist__segment25d__8__unlocked \
keypoint3d__rgb2sfnorm,edge3d,reshade__segment25d__8__unlocked \
keypoint3d__rgb2sfnorm,edge3d,rgb2mist__segment25d__8__unlocked \
keypoint3d__rgb2sfnorm,reshade,rgb2mist__segment25d__8__unlocked \
keypoint3d__curvature,edge3d,reshade__segment25d__8__unlocked \
keypoint3d__curvature,edge3d,rgb2mist__segment25d__8__unlocked \
keypoint3d__curvature,reshade,rgb2mist__segment25d__8__unlocked \
keypoint3d__edge3d,reshade,rgb2mist__segment25d__8__unlocked \
rgb2sfnorm__curvature,edge3d,reshade__segment25d__8__unlocked \
rgb2sfnorm__curvature,edge3d,rgb2mist__segment25d__8__unlocked \
rgb2sfnorm__curvature,reshade,rgb2mist__segment25d__8__unlocked \
rgb2sfnorm__edge3d,reshade,rgb2mist__segment25d__8__unlocked \
curvature__edge3d,reshade,rgb2mist__segment25d__8__unlocked \
rgb2sfnorm__room_layout,reshade,edge3d__vanishing_point_well_defined__8__unlocked \
rgb2sfnorm__room_layout,reshade,segment25d__vanishing_point_well_defined__8__unlocked \
rgb2sfnorm__room_layout,reshade,keypoint3d__vanishing_point_well_defined__8__unlocked \
rgb2sfnorm__room_layout,edge3d,segment25d__vanishing_point_well_defined__8__unlocked \
rgb2sfnorm__room_layout,edge3d,keypoint3d__vanishing_point_well_defined__8__unlocked \
rgb2sfnorm__room_layout,segment25d,keypoint3d__vanishing_point_well_defined__8__unlocked \
rgb2sfnorm__reshade,edge3d,segment25d__vanishing_point_well_defined__8__unlocked \
rgb2sfnorm__reshade,edge3d,keypoint3d__vanishing_point_well_defined__8__unlocked \
rgb2sfnorm__reshade,segment25d,keypoint3d__vanishing_point_well_defined__8__unlocked \
rgb2sfnorm__edge3d,segment25d,keypoint3d__vanishing_point_well_defined__8__unlocked \
room_layout__reshade,edge3d,segment25d__vanishing_point_well_defined__8__unlocked \
room_layout__reshade,edge3d,keypoint3d__vanishing_point_well_defined__8__unlocked \
room_layout__reshade,segment25d,keypoint3d__vanishing_point_well_defined__8__unlocked \
room_layout__edge3d,segment25d,keypoint3d__vanishing_point_well_defined__8__unlocked \
reshade__edge3d,segment25d,keypoint3d__vanishing_point_well_defined__8__unlocked \
curvature__rgb2sfnorm,keypoint3d,reshade__segmentsemantic_rb__8__unlocked \
curvature__rgb2sfnorm,keypoint3d,edge3d__segmentsemantic_rb__8__unlocked \
curvature__rgb2sfnorm,keypoint3d,segment25d__segmentsemantic_rb__8__unlocked \
curvature__rgb2sfnorm,reshade,edge3d__segmentsemantic_rb__8__unlocked \
curvature__rgb2sfnorm,reshade,segment25d__segmentsemantic_rb__8__unlocked \
curvature__rgb2sfnorm,edge3d,segment25d__segmentsemantic_rb__8__unlocked \
curvature__keypoint3d,reshade,edge3d__segmentsemantic_rb__8__unlocked \
curvature__keypoint3d,reshade,segment25d__segmentsemantic_rb__8__unlocked \
curvature__keypoint3d,edge3d,segment25d__segmentsemantic_rb__8__unlocked \
curvature__reshade,edge3d,segment25d__segmentsemantic_rb__8__unlocked \
rgb2sfnorm__keypoint3d,reshade,edge3d__segmentsemantic_rb__8__unlocked \
rgb2sfnorm__keypoint3d,reshade,segment25d__segmentsemantic_rb__8__unlocked \
rgb2sfnorm__keypoint3d,edge3d,segment25d__segmentsemantic_rb__8__unlocked \
rgb2sfnorm__reshade,edge3d,segment25d__segmentsemantic_rb__8__unlocked \
keypoint3d__reshade,edge3d,segment25d__segmentsemantic_rb__8__unlocked \
class_selected__segmentsemantic_rb,edge3d,curvature__class_1000__8__unlocked \
class_selected__segmentsemantic_rb,edge3d,point_match__class_1000__8__unlocked \
class_selected__segmentsemantic_rb,edge3d,keypoint3d__class_1000__8__unlocked \
class_selected__segmentsemantic_rb,curvature,point_match__class_1000__8__unlocked \
class_selected__segmentsemantic_rb,curvature,keypoint3d__class_1000__8__unlocked \
class_selected__segmentsemantic_rb,point_match,keypoint3d__class_1000__8__unlocked \
class_selected__edge3d,curvature,point_match__class_1000__8__unlocked \
class_selected__edge3d,curvature,keypoint3d__class_1000__8__unlocked \
class_selected__edge3d,point_match,keypoint3d__class_1000__8__unlocked \
class_selected__curvature,point_match,keypoint3d__class_1000__8__unlocked \
segmentsemantic_rb__edge3d,curvature,point_match__class_1000__8__unlocked \
segmentsemantic_rb__edge3d,curvature,keypoint3d__class_1000__8__unlocked \
segmentsemantic_rb__edge3d,point_match,keypoint3d__class_1000__8__unlocked \
segmentsemantic_rb__curvature,point_match,keypoint3d__class_1000__8__unlocked \
edge3d__curvature,point_match,keypoint3d__class_1000__8__unlocked \
class_1000__segmentsemantic_rb,curvature,edge3d__class_selected__8__unlocked \
class_1000__segmentsemantic_rb,curvature,keypoint3d__class_selected__8__unlocked \
class_1000__segmentsemantic_rb,curvature,point_match__class_selected__8__unlocked \
class_1000__segmentsemantic_rb,edge3d,keypoint3d__class_selected__8__unlocked \
class_1000__segmentsemantic_rb,edge3d,point_match__class_selected__8__unlocked \
class_1000__segmentsemantic_rb,keypoint3d,point_match__class_selected__8__unlocked \
class_1000__curvature,edge3d,keypoint3d__class_selected__8__unlocked \
class_1000__curvature,edge3d,point_match__class_selected__8__unlocked \
class_1000__curvature,keypoint3d,point_match__class_selected__8__unlocked \
class_1000__edge3d,keypoint3d,point_match__class_selected__8__unlocked \
segmentsemantic_rb__curvature,edge3d,keypoint3d__class_selected__8__unlocked \
segmentsemantic_rb__curvature,edge3d,point_match__class_selected__8__unlocked \
segmentsemantic_rb__curvature,keypoint3d,point_match__class_selected__8__unlocked \
segmentsemantic_rb__edge3d,keypoint3d,point_match__class_selected__8__unlocked \
curvature__edge3d,keypoint3d,point_match__class_selected__8__unlocked"
INTERMEDIATE_TASKS="class_1000__class_selected,edge3d,segmentsemantic_rb__class_places__8__unlocked \
class_1000__class_selected,edge3d,curvature__class_places__8__unlocked \
class_1000__class_selected,edge3d,keypoint3d__class_places__8__unlocked \
class_1000__class_selected,segmentsemantic_rb,curvature__class_places__8__unlocked \
class_1000__class_selected,segmentsemantic_rb,keypoint3d__class_places__8__unlocked \
class_1000__class_selected,curvature,keypoint3d__class_places__8__unlocked \
class_1000__edge3d,segmentsemantic_rb,curvature__class_places__8__unlocked \
class_1000__edge3d,segmentsemantic_rb,keypoint3d__class_places__8__unlocked \
class_1000__edge3d,curvature,keypoint3d__class_places__8__unlocked \
class_1000__segmentsemantic_rb,curvature,keypoint3d__class_places__8__unlocked \
class_selected__edge3d,segmentsemantic_rb,curvature__class_places__8__unlocked \
class_selected__edge3d,segmentsemantic_rb,keypoint3d__class_places__8__unlocked \
class_selected__edge3d,curvature,keypoint3d__class_places__8__unlocked \
class_selected__segmentsemantic_rb,curvature,keypoint3d__class_places__8__unlocked \
edge3d__segmentsemantic_rb,curvature,keypoint3d__class_places__8__unlocked"
#INTERMEDIATE_TASKS="rgb2sfnorm__curvature,keypoint3d,class_places__segmentsemantic_rb__8__unlocked \
#rgb2sfnorm__curvature,reshade,class_places__segmentsemantic_rb__8__unlocked \
#rgb2sfnorm__curvature,class_places,edge3d__segmentsemantic_rb__8__unlocked \
#rgb2sfnorm__keypoint3d,reshade,class_places__segmentsemantic_rb__8__unlocked \
#rgb2sfnorm__keypoint3d,class_places,edge3d__segmentsemantic_rb__8__unlocked \
#rgb2sfnorm__reshade,class_places,edge3d__segmentsemantic_rb__8__unlocked \
#curvature__keypoint3d,reshade,class_places__segmentsemantic_rb__8__unlocked \
#curvature__keypoint3d,class_places,edge3d__segmentsemantic_rb__8__unlocked \
#curvature__reshade,class_places,edge3d__segmentsemantic_rb__8__unlocked \
#keypoint3d__reshade,class_places,edge3d__segmentsemantic_rb__8__unlocked"
TARGET_DECODER_FUNCS="DO_NOT_REPLACE_TARGET_DECODER"
COUNTER=0
#for src in $SRC_TASKS; do
for intermediate in $INTERMEDIATE_TASKS; do
for decode in $TARGET_DECODER_FUNCS; do
COUNTER=$[$COUNTER +1]
SUB_ZONE=${SUB_ZONES[$((COUNTER%3))]}
if [ "$COUNTER" -lt "$START_AT" ]; then
echo "Skipping at $COUNTER (starting at $START_AT)"
continue
fi
echo "running $COUNTER"
if [[ "$platform" == "linux" ]];
then
OPTIONS="-w 0"
ECHO_OPTIONS="-d"
else
OPTIONS=""
ECHO_OPTIONS="-D"
fi
USER_DATA=$(base64 $OPTIONS << END_USER_DATA
export HOME="/home/ubuntu"
export INSTANCE_TAG="fourth_order/${decode}/$DATA_USED/${intermediate}";
#export ACTION=TRANSFER;
export ACTION=EXTRACT_LOSSES;
cd /home/ubuntu/task-taxonomy-331b
git stash
git remote add autopull https://alexsax:328d7b8a3e905c1400f293b9c4842fcae3b7dc54@github.com/alexsax/task-taxonomy-331b.git
git pull autopull perceptual-transfer
watch -n 300 "bash /home/ubuntu/task-taxonomy-331b/tools/script/reboot_if_disconnected.sh" &> /dev/null &
END_USER_DATA
)
echo "$USER_DATA" | base64 $ECHO_OPTIONS
aws ec2 request-spot-instances \
--spot-price $SPOT_PRICE \
--instance-count $INSTANCE_COUNT \
--region $ZONE \
--launch-specification \
"{ \
\"ImageId\":\"$AMI\", \
\"InstanceType\":\"$INSTANCE_TYPE\", \
\"KeyName\":\"$KEY_NAME\", \
\"SecurityGroups\": [\"$SECURITY_GROUP\"], \
\"UserData\":\"$USER_DATA\", \
\"Placement\": { \
\"AvailabilityZone\": \"us-west-2${SUB_ZONE}\" \
} \
}"
sleep 1
if [ "$COUNTER" -ge "$EXIT_AFTER" ]; then
echo "EXITING before $COUNTER (after $EXIT_AFTER)"
break
fi
done
if [ "$COUNTER" -ge "$EXIT_AFTER" ]; then
echo "EXITING before $COUNTER (after $EXIT_AFTER)"
break
fi
done
|
export const isNil = (val: any) => typeof val === 'undefined' || val === null; |
<reponame>tdrv90/freeCodeCamp<filename>JavaScript Algorithms and Data Structures Certification (300 hours)/Basic JavaScript/06. Understanding Case Sensitivity in Variables.js
// Modify the existing declarations and assignments so their names use camelCase.
// Do not create any new variables.
// Declarations
var studlyCapVar;
var properCamelCase;
var titleCaseOver;
// Assignments
studlyCapVar = 10;
properCamelCase = "A String";
titleCaseOver = 9000;
|
/*
* Copyright (c) 2021-2021. https://playio.cloud/
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* All rights reserved.
*/
export type Named = 'hi' | 'hello';
export enum Key {
Key1 = 2,
Key2 = 'zero',
}
export interface Hey {
name: Named;
key: Key;
}
|
#!/bin/bash
export PATH=/bin:/usr/bin:/sbin/usr/sbin:/usr/local/bin:/usr/local/sbin:~/.bin
read -p "please input your domain: " domain
sed "s/yourdomain/${domain}/" client.yaml > client_release.yaml
sed "s/yourdomain/${domain}/" proxy.yaml > proxy_release.yaml
sed "s/yourdomain/${domain}/" template.nginx.conf > nginx.conf
yaml2json client_release.yaml > client.json
yaml2json proxy_release.yaml > proxy.json
yaml2json server.yaml > server.json
echo "pelase move the v2ray.service to /etc/init.d/v2ray and grant execution authority for it"
|
#!/usr/bin/env bash
TAG=mddv-postgres
API=api-postgres
PORT=8083
echo "building image"
docker build -t $TAG .
if docker container ls | grep $API > /dev/null; then
echo "stopping old container"
docker container stop $API
fi
if docker container ls -a | grep $API > /dev/null; then
echo "removing old container"
docker container rm $API
fi
echo "launching container"
docker run -v mvn-repo:/root/.m2 --name $API -d -p $PORT:$PORT $TAG
|
def compute_measurement(measurement):
return sum(measurement) |
#!/bin/bash
if [ $# -ne 1 ]; then
echo ""
echo "!!! Please specify an argument !!!"
fi
printf "$1" >./examples/tmp/tmp.hytl
echo ""
echo "maked ./examples/tmp/tmp.hytl"
stack exec -- hytl-exe compile examples/tmp/tmp.hytl
echo ""
echo "compiled to ./examples/tmp/tmp.ll"
/usr/local/opt/llvm/bin/llc examples/tmp/tmp.ll
echo ""
echo "compiled to ./examples/tmp/tmp.s"
gcc examples/tmp/tmp.s -o examples/tmp/tmp
echo ""
echo "compiled to ./examples/tmp/tmp"
echo ""
echo "-- result ---"
./examples/tmp/tmp
|
import { ICapsuleCharacterControllerDesc } from "@arche-engine/design";
import { Vector3 } from "@arche-engine/math";
import { PhysicsMaterial } from "../PhysicsMaterial";
import { PhysicsManager } from "../PhysicsManager";
import { CapsuleClimbingMode } from "./CapsuleCharacterController";
import { ControllerNonWalkableMode } from "./CharacterController";
/**
* A descriptor for a capsule character controller.
*/
export class CapsuleCharacterControllerDesc {
/** @internal */
_nativeCharacterControllerDesc: ICapsuleCharacterControllerDesc;
private _radius: number = 0;
private _height: number = 0;
private _climbingMode: CapsuleClimbingMode = CapsuleClimbingMode.EASY;
private _position = new Vector3();
private _upDirection = new Vector3(0, 1, 0);
private _slopeLimit: number = 0;
private _invisibleWallHeight: number = 0;
private _maxJumpHeight: number = 0;
private _contactOffset: number = 0;
private _stepOffset: number = 0;
private _density: number = 0;
private _scaleCoeff: number = 0;
private _volumeGrowth: number = 0;
private _nonWalkableMode: ControllerNonWalkableMode = ControllerNonWalkableMode.PREVENT_CLIMBING;
private _material: PhysicsMaterial;
private _registerDeletionListener: boolean = false;
/**
* The radius of the capsule
*/
get radius(): number {
return this._radius;
}
set radius(newValue: number) {
this._radius = newValue;
this._nativeCharacterControllerDesc.setRadius(newValue);
}
/**
* The height of the controller
*/
get height(): number {
return this._height;
}
set height(newValue: number) {
this._height = newValue;
this._nativeCharacterControllerDesc.setHeight(newValue);
}
/**
* The climbing mode
*/
get climbingMode(): CapsuleClimbingMode {
return this._climbingMode;
}
set climbingMode(newValue: CapsuleClimbingMode) {
this._climbingMode = newValue;
this._nativeCharacterControllerDesc.setClimbingMode(newValue);
}
/**
* The position of the character
*/
get position(): Vector3 {
return this._position;
}
set position(newValue: Vector3) {
if (this._position !== newValue) {
newValue.cloneTo(this._position);
this._nativeCharacterControllerDesc.setPosition(this._position);
}
}
/**
* Specifies the 'up' direction
*/
get upDirection(): Vector3 {
return this._upDirection;
}
set upDirection(newValue: Vector3) {
newValue.cloneTo(this._upDirection);
this._nativeCharacterControllerDesc.setUpDirection(this._upDirection);
}
/**
* The maximum slope which the character can walk up.
*/
get slopeLimit(): number {
return this._slopeLimit;
}
set slopeLimit(newValue: number) {
this._slopeLimit = newValue;
this._nativeCharacterControllerDesc.setSlopeLimit(newValue);
}
/**
* Height of invisible walls created around non-walkable triangles
*/
get invisibleWallHeight(): number {
return this._invisibleWallHeight;
}
set invisibleWallHeight(newValue: number) {
this._invisibleWallHeight = newValue;
this._nativeCharacterControllerDesc.setInvisibleWallHeight(newValue);
}
/**
* Maximum height a jumping character can reach
*/
get maxJumpHeight(): number {
return this._maxJumpHeight;
}
set maxJumpHeight(newValue: number) {
this._maxJumpHeight = newValue;
this._nativeCharacterControllerDesc.setMaxJumpHeight(newValue);
}
/**
* The contact offset used by the controller.
*/
get contactOffset(): number {
return this._contactOffset;
}
set contactOffset(newValue: number) {
this._contactOffset = newValue;
this._nativeCharacterControllerDesc.setContactOffset(newValue);
}
/**
* The maximum height of an obstacle which the character can climb.
*/
get stepOffset(): number {
return this._stepOffset;
}
set stepOffset(newValue: number) {
this._stepOffset = newValue;
this._nativeCharacterControllerDesc.setStepOffset(newValue);
}
/**
* Density of underlying kinematic actor
*/
get density(): number {
return this._density;
}
set density(newValue: number) {
this._density = newValue;
this._nativeCharacterControllerDesc.setDensity(newValue);
}
/**
* Scale coefficient for underlying kinematic actor
*/
get scaleCoeff(): number {
return this._scaleCoeff;
}
set scaleCoeff(newValue: number) {
this._scaleCoeff = newValue;
this._nativeCharacterControllerDesc.setScaleCoeff(newValue);
}
/**
* Cached volume growth
*/
get volumeGrowth(): number {
return this._volumeGrowth;
}
set volumeGrowth(newValue: number) {
this._volumeGrowth = newValue;
this._nativeCharacterControllerDesc.setVolumeGrowth(newValue);
}
/**
* The non-walkable mode controls if a character controller slides or not on a non-walkable part.
*/
get nonWalkableMode(): ControllerNonWalkableMode {
return this._nonWalkableMode;
}
set nonWalkableMode(newValue: ControllerNonWalkableMode) {
this._nonWalkableMode = newValue;
this._nativeCharacterControllerDesc.setNonWalkableMode(newValue);
}
/**
* The material for the actor associated with the controller.
*/
get material(): PhysicsMaterial {
return this._material;
}
set material(newValue: PhysicsMaterial) {
this._material = newValue;
this._nativeCharacterControllerDesc.setMaterial(newValue?._nativeMaterial);
}
/**
* Use a deletion listener to get informed about released objects and clear internal caches if needed.
*/
get registerDeletionListener(): boolean {
return this._registerDeletionListener;
}
set registerDeletionListener(newValue: boolean) {
this._registerDeletionListener = newValue;
this._nativeCharacterControllerDesc.setRegisterDeletionListener(newValue);
}
constructor() {
this._nativeCharacterControllerDesc = PhysicsManager._nativePhysics.createCapsuleCharacterControllerDesc();
}
/**
* (re)sets the structure to the default.
*/
setToDefault() {
this._radius = 0;
this._height = 0;
this._climbingMode = CapsuleClimbingMode.EASY;
this._position.setValue(0, 0, 0);
this._upDirection.setValue(0, 1, 0);
this._slopeLimit = 0;
this._invisibleWallHeight = 0;
this._maxJumpHeight = 0;
this._contactOffset = 0;
this._stepOffset = 0;
this._density = 0;
this._scaleCoeff = 0;
this._volumeGrowth = 0;
this._nonWalkableMode = ControllerNonWalkableMode.PREVENT_CLIMBING;
this._material = null;
this._registerDeletionListener = false;
this._nativeCharacterControllerDesc.setToDefault();
}
}
|
from fixate.config import RESOURCES
from fixate.config.local_config import load_local_config
import fixate.sequencer
__version__ = '0.4.3'
RESOURCES["SEQUENCER"] = fixate.sequencer.Sequencer()
load_local_config()
|
<gh_stars>1-10
package io.gridgo.bean;
import java.util.Collection;
import java.util.function.UnaryOperator;
public interface ImmutableBArray extends BArray {
static final UnsupportedOperationException UNSUPPORTED = new UnsupportedOperationException("Instance of ImmutableBArray cannot be modified");
@Override
default void clear() {
throw UNSUPPORTED;
}
@Override
default boolean add(BElement e) {
throw UNSUPPORTED;
}
@Override
default boolean remove(Object o) {
throw UNSUPPORTED;
}
@Override
default boolean addAll(Collection<? extends BElement> c) {
throw UNSUPPORTED;
}
@Override
default boolean addAll(int index, Collection<? extends BElement> c) {
throw UNSUPPORTED;
}
@Override
default boolean removeAll(Collection<?> c) {
throw UNSUPPORTED;
}
@Override
default boolean retainAll(Collection<?> c) {
throw UNSUPPORTED;
}
@Override
default void replaceAll(UnaryOperator<BElement> operator) {
throw UNSUPPORTED;
}
@Override
default BElement set(int index, BElement element) {
throw UNSUPPORTED;
}
@Override
default void add(int index, BElement element) {
throw UNSUPPORTED;
}
@Override
default BElement remove(int index) {
throw UNSUPPORTED;
}
}
|
<form action="registration.php" method="POST">
Username: <input type="text" name="username"><br>
Password: <input type="password" name="password"><br>
Email: <input type="email" name="email"><br>
<input type="submit" value="Register">
</form>
<?php
if (isset($_POST['username']) && isset($_POST['password']) && isset($_POST['email'])) {
// ...handle registration here
}
?> |
# Copyright (C) 2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
from mmdet.datasets import PIPELINES
from mmcv.utils import build_from_cfg
import numpy as np
# import cv2 as cv
from torchvision import transforms as T
# import torchvision.transforms.functional as F
from mmdet.datasets.pipelines.formating import ImageToTensor, to_tensor
# from mmdet.datasets.pipelines.transforms import Normalize
from PIL import Image, ImageFilter
@PIPELINES.register_module()
class ColorJitter(T.ColorJitter):
"""MMDet adapter"""
def __init__(self, key_maps=[('img', 'img')], **kwargs):
super().__init__(**kwargs)
self.key_maps = key_maps
def forward(self, inputs):
outputs = inputs.copy()
for key_map in self.key_maps:
outputs[key_map[0]] = super().forward(inputs[key_map[1]])
return outputs
@PIPELINES.register_module()
class RandomGrayscale(T.RandomGrayscale):
"""MMDet adapter"""
def __init__(self, key_maps=[('img', 'img')], **kwargs):
super().__init__(**kwargs)
self.key_maps = key_maps
def forward(self, inputs):
outputs = inputs.copy()
for key_map in self.key_maps:
outputs[key_map[0]] = super().forward(inputs[key_map[1]])
return outputs
@PIPELINES.register_module()
class RandomErasing(T.RandomErasing):
"""MMDet adapter"""
def __init__(self, key_maps=[('img', 'img')], **kwargs):
super().__init__(**kwargs)
self.key_maps = key_maps
def forward(self, inputs):
outputs = inputs.copy()
for key_map in self.key_maps:
outputs[key_map[0]] = super().forward(inputs[key_map[1]])
return outputs
@PIPELINES.register_module()
class RandomGaussianBlur(object):
"""Gaussian blur augmentation in SimCLR https://arxiv.org/abs/2002.05709."""
def __init__(self, sigma_min, sigma_max, key_maps=[('img', 'img')]):
self.sigma_min = sigma_min
self.sigma_max = sigma_max
self.key_maps = key_maps
def __call__(self, inputs):
outputs = inputs.copy()
sigma = np.random.uniform(self.sigma_min, self.sigma_max)
# ksize = 2*int(np.ceil(2.0*sigma)) + 1
for key_map in self.key_maps:
img = inputs[key_map[0]]
img = img.filter(ImageFilter.GaussianBlur(radius=sigma))
# img = cv.GaussianBlur(img, ksize=(0,0), sigmaX=sigma)
# img = F.gaussian_blur(img, ksize, [sigma, sigma])
outputs[key_map[1]] = img
return outputs
def __repr__(self):
repr_str = self.__class__.__name__
return repr_str
@PIPELINES.register_module()
class RandomApply(T.RandomApply):
"""MMDet adapter"""
def __init__(self, transform_cfgs, p=0.5):
transforms = []
for transform_cfg in transform_cfgs:
transforms.append(build_from_cfg(transform_cfg, PIPELINES))
super().__init__(transforms, p=p)
@PIPELINES.register_module()
class NDArrayToTensor(ImageToTensor):
"""MMDet adapter"""
def __call__(self, results):
for key in self.keys:
img = results[key]
if len(img.shape) < 3:
img = np.expand_dims(img, -1)
img = np.ascontiguousarray(img.transpose(2, 0, 1))
results[key] = to_tensor(img)
return results
# @PIPELINES.register_module()
# class NormalizeTensor(Normalize):
# """MMDet adapter"""
#
# def __call__(self, results):
# for key in results.get('img_fields', ['img']):
# img = results[key]
# img = F.normalize(img.float(), self.mean, self.std)
# if self.to_rgb:
# img = img[[2, 1, 0]]
# results[key] = img
# results['img_norm_cfg'] = dict(
# mean=self.mean, std=self.std, to_rgb=self.to_rgb)
# return results
#
#
# @PIPELINES.register_module()
# class PadTensor(object):
# def __init__(self, size_divisor=32):
# self.size_divisor = float(size_divisor)
#
# def __call__(self, results):
# for key in results.get('img_fields', ['img']):
# img = results[key]
# h = img.shape[1]
# w = img.shape[2]
# H = int(np.ceil(h/self.size_divisor)*self.size_divisor)
# W = int(np.ceil(w/self.size_divisor)*self.size_divisor)
# padding = (0, 0, W-w, H-h)
# # print(padding)
# img = F.pad(img, padding)
# results[key] = img
# results['pad_shape'] = img.shape
# results['pad_size_divisor'] = self.size_divisor
# return results
@PIPELINES.register_module()
class NDArrayToPILImage(object):
def __init__(self, keys=['img']):
self.keys = keys
def __call__(self, results):
for key in self.keys:
img = results[key]
img = Image.fromarray(img, mode='RGB')
results[key] = img
return results
def __repr__(self):
repr_str = self.__class__.__name__
return repr_str
@PIPELINES.register_module()
class PILImageToNDArray(object):
def __init__(self, keys=['img']):
self.keys = keys
def __call__(self, results):
for key in self.keys:
img = results[key]
img = np.asarray(img)
results[key] = img
return results
def __repr__(self):
repr_str = self.__class__.__name__
return repr_str
@PIPELINES.register_module()
class BranchImage(object):
def __init__(self, key_map={}):
self.key_map = key_map
def __call__(self, results):
for k1, k2 in self.key_map.items():
if k1 in results:
results[k2] = results[k1]
if k1 in results['img_fields']:
results['img_fields'].append(k2)
return results
def __repr__(self):
repr_str = self.__class__.__name__
return repr_str
|
#!/bin/bash
# django housekeeping & setup
python manage.py migrate --noinput # collect static files
python manage.py collectstatic --noinput # collect static files
# Prepare log files and start outputting logs to stdout
touch /srv/logs/gunicorn.log
touch /srv/logs/access.log
tail -n 0 -f /srv/logs/*.log &
# Start Gunicorn processes
echo Starting Gunicorn.
exec gunicorn collaborative.wsgi:application \
--bind 0.0.0.0:8000 \
--workers 1 \
--log-level=info \
--log-file=/srv/logs/gunicorn.log \
--access-logfile=/srv/logs/access.log &
# Start nginx
echo "Starting nginx..."
exec service nginx start
|
const SORT_RULE = {
AVA_PROPERTYCOUNT_DESC: (a, b) => b.avaPropertyCount - a.avaPropertyCount,
AVA_PROPERTYCOUNT_ASC: (a, b) => a.avaPropertyCount - b.avaPropertyCount,
PROPERTYCOUNT_DESC: (a, b) => b.propertyCount - a.propertyCount,
PROPERTYCOUNT_ASC: (a, b) => a.propertyCount - b.propertyCount,
TRUSTREC_PROPERTYCOUNT_DESC: (a, b) => b.trustRecPropertyCount - a.trustRecPropertyCount,
TRUSTREC_PROPERTYCOUNT_ASC: (a, b) => a.trustRecPropertyCount - b.trustRecPropertyCount,
KEY_PROPERTYCOUNT_DESC: (a, b) => b.keyPropertyCount - a.keyPropertyCount,
KEY_PROPERTYCOUNT_ASC: (a, b) => a.keyPropertyCount - b.keyPropertyCount,
REALSUR_PROPERTYCOUNT_DESC: (a, b) => b.realSurPropertyCount - a.realSurPropertyCount,
REALSUR_PROPERTYCOUNT_ASC: (a, b) => a.realSurPropertyCount - b.realSurPropertyCount,
BMRECOM_PROPERTYCOUNT_DESC: (a, b) => b.bmRecomPropertyCount - a.bmRecomPropertyCount,
BMRECOM_PROPERTYCOUNT_ASC: (a, b) => a.bmRecomPropertyCount - b.bmRecomPropertyCount,
DMRECOM_PROPERTYCOUNT_DESC: (a, b) => b.dmRecomPropertyCount - a.dmRecomPropertyCount,
DMRECOM_PROPERTYCOUNT_ASC: (a, b) => a.dmRecomPropertyCount - b.dmRecomPropertyCount
}
export default SORT_RULE
|
<gh_stars>0
import Component from '@glimmer/component';
export default class PoiCard extends Component {
get rndBgColor () {
let rndBG;
const rndNum = Math.round(Math.random() * 100) % 3;
switch (rndNum) {
case 0:
rndBG = 'red';
break;
case 1:
rndBG = 'green';
break;
case 2:
rndBG = 'blue';
break;
}
return rndBG;
}
};
|
<filename>src/main/java/au/org/noojee/irrigation/weather/units/Millimetres.java
package au.org.noojee.irrigation.weather.units;
import java.math.BigDecimal;
public class Millimetres {
BigDecimal millimetres;
public Millimetres(String millimetres) {
this.millimetres = new BigDecimal(millimetres);
}
@Override
public String toString() {
return "Millimeters=" + millimetres;
}
}
|
package relay
import (
"fmt"
"math/rand"
"os"
"reflect"
"strings"
"sync"
"testing"
"time"
"github.com/hashicorp/go-uuid"
)
func CheckInteg(t *testing.T) {
if os.Getenv("INTEG_TESTS") != "true" || AMQPHost() == "" {
t.SkipNow()
}
}
func AMQPHost() string {
return os.Getenv("AMQP_HOST")
}
func testSendRecv(t *testing.T, r *Relay) {
// Get a publisher
pub, err := r.Publisher("test")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer pub.Close()
// Get a consumer
cons, err := r.Consumer("test")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer cons.Close()
// Send a message
msg := "the quick brown fox jumps over the lazy dog"
err = pub.Publish(msg)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Try to get the message
var in string
err = cons.Consume(&in)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Ack the message
err = cons.Ack()
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Check message
if in != msg {
t.Fatalf("unexpected msg! %v %v", in, msg)
}
}
func TestConfigFromURI(t *testing.T) {
config, err := ConfigFromURI("amqp://user:pass@host:10000/vhost")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
obj := &Config{Addr: "host", Port: 10000, Username: "user", Password: "<PASSWORD>", Vhost: "vhost"}
// Ensure equal
if !reflect.DeepEqual(obj, config) {
t.Fatalf("not equal. %#v %#v", obj, config)
}
}
func TestConfigDefaults(t *testing.T) {
r, err := New(&Config{})
if err != nil {
t.Fatalf("err: %v", err)
}
if r.conf.Vhost != "/" {
t.Fatalf("bad vhost: %q", r.conf.Vhost)
}
if r.conf.Username != "guest" {
t.Fatalf("bad username: %q", r.conf.Username)
}
if r.conf.Password != "<PASSWORD>" {
t.Fatalf("bad password: %q", r.conf.Password)
}
if r.conf.Exchange != "relay" {
t.Fatalf("bad exchange: %q", r.conf.Exchange)
}
if r.conf.ExchangeType != "direct" {
t.Fatalf("bad exchange type: %q", r.conf.ExchangeType)
}
if r.conf.PrefetchCount != 1 {
t.Fatalf("bad prefetch count: %d", r.conf.PrefetchCount)
}
}
func TestSimplePublishConsume(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost()}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
testSendRecv(t, r)
}
func TestPublishNoConfirm(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost(), DisablePublishConfirm: true}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
testSendRecv(t, r)
}
func TestPublishNoPersist(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost(), DisablePersistence: true}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
testSendRecv(t, r)
}
func TestCustomExchange(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost(), Exchange: "my-exchange"}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
testSendRecv(t, r)
}
func TestRelayMultiClose(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost()}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
if err := r.Close(); err != nil {
t.Fatalf("unexpected err")
}
if err := r.Close(); err != nil {
t.Fatalf("unexpected err")
}
}
func TestConsumerMultiClose(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost()}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
cons, err := r.Consumer("test")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
if err := cons.Close(); err != nil {
t.Fatalf("unexpected err")
}
if err := cons.Close(); err != nil {
t.Fatalf("unexpected err")
}
}
func TestPublisherMultiClose(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost()}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
pub, err := r.Publisher("test")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
if err := pub.Close(); err != nil {
t.Fatalf("unexpected err")
}
if err := pub.Close(); err != nil {
t.Fatalf("unexpected err")
}
}
func TestMultiConsume(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost(), PrefetchCount: 5, EnableMultiAck: true}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
// Get a publisher
pub, err := r.Publisher("test")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer pub.Close()
// Get a consumer
cons, err := r.Consumer("test")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer cons.Close()
// Send a message
for i := 0; i < 5; i++ {
err = pub.Publish(string(i))
if err != nil {
t.Fatalf("unexpected err %s", err)
}
}
// Try to get the message
var in string
for i := 0; i < 5; i++ {
err = cons.Consume(&in)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
if in != string(i) {
t.Fatalf("unexpected msg! %v %v", in, i)
}
}
// Nack all the messages
err = cons.Nack()
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Should redeliver
for i := 0; i < 5; i++ {
err = cons.ConsumeAck(&in)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
if in != string(i) {
t.Fatalf("unexpected msg! %v %v", in, i)
}
}
}
func TestConsumeWithoutAck(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost(), PrefetchCount: 5, EnableMultiAck: true}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
// Get a publisher
pub, err := r.Publisher("noack")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer pub.Close()
// Get a consumer
cons, err := r.Consumer("noack")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer cons.Close()
// Send a message
for i := 0; i < 10; i++ {
err = pub.Publish(string(i))
if err != nil {
t.Fatalf("unexpected err %s", err)
}
}
// Try to get the message
var in string
for i := 0; i < 5; i++ {
err = cons.Consume(&in)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
if in != string(i) {
t.Fatalf("unexpected msg! %v %v", in, i)
}
}
// The 6th Consume should fail since prefetch is 5
err = cons.Consume(&in)
if err.Error() != "Consume will block without Ack!" {
t.Fatalf("unexpected err %s", err)
}
// Ack all the messages
err = cons.Ack()
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Should get the rest
for i := 5; i < 10; i++ {
err = cons.ConsumeAck(&in)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
if in != string(i) {
t.Fatalf("unexpected msg! %#v %#v", in, string(i))
}
}
}
func TestCloseRelayInUse(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost()}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
// Get a publisher
pub, err := r.Publisher("close-in-use")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer pub.Close()
// Get a consumer
cons, err := r.Consumer("close-in-use")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer cons.Close()
wg := &sync.WaitGroup{}
wg.Add(3)
// Send a message
go func() {
defer wg.Done()
for i := 0; i < 100; i++ {
err := pub.Publish(string(i))
if err == ChannelClosed {
break
}
if err != nil {
t.Fatalf("unexpected err %s", err)
}
}
}()
// Should redeliver
go func() {
defer wg.Done()
var in string
for i := 0; i < 100; i++ {
err := cons.ConsumeAck(&in)
if err == ChannelClosed {
break
}
if err != nil {
t.Fatalf("unexpected err %s", err)
}
if in != string(i) {
t.Fatalf("unexpected msg! %v %v", in, i)
}
}
}()
go func() {
defer wg.Done()
time.Sleep(time.Millisecond)
err := r.Close()
if err != nil {
t.Fatalf("unexpected err %s", err)
}
}()
wg.Wait()
}
func TestClosePendingMsg(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost(), PrefetchCount: 5, EnableMultiAck: true, DisablePublishConfirm: true}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
// Get a publisher
pub, err := r.Publisher("pending-nack")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer pub.Close()
// Get a consumer
cons, err := r.Consumer("pending-nack")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Send a message
for i := 0; i < 20; i++ {
err = pub.Publish(string(i))
if err != nil {
t.Fatalf("unexpected err %s", err)
}
}
// Try to get the message
var in string
err = cons.Consume(&in)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Close. Should nack.
cons.Close()
// Get a consumer
cons, err = r.Consumer("pending-nack")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Should redeliver
for i := 0; i < 20; i++ {
err = cons.ConsumeAck(&in)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
if in != string(i) {
t.Fatalf("unexpected msg! %v %v", in, i)
}
}
}
func TestDoubleConsume(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost()}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
// Get a publisher
pub, err := r.Publisher("double-cons")
defer pub.Close()
// Get a consumer
cons, err := r.Consumer("double-cons")
defer cons.Close()
pub.Publish("test")
var in string
err = cons.Consume(&in)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
err = cons.Consume(&in)
if err.Error() != "Ack required before consume!" {
t.Fatalf("unexpected err %s", err)
}
cons.Ack()
}
func TestCloseConsume(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost()}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
// Get a consumer
cons, err := r.Consumer("double-cons")
cons.Close()
var in string
err = cons.Consume(&in)
if err != ChannelClosed {
t.Fatalf("unexpected err %s", err)
}
err = cons.Ack()
if err != ChannelClosed {
t.Fatalf("unexpected err %s", err)
}
err = cons.Nack()
if err != ChannelClosed {
t.Fatalf("unexpected err %s", err)
}
}
func TestClosePublish(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost()}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
// Get a consumer
pub, err := r.Publisher("double-cons")
pub.Close()
err = pub.Publish("test")
if err != ChannelClosed {
t.Fatalf("unexpected err %s", err)
}
}
func TestNoHost(t *testing.T) {
// Hopefully no rabbit there....
conf := Config{Addr: "127.0.0.1", Port: 1}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
// Try to get a consumer
_, err = r.Consumer("test")
if err == nil {
t.Fatalf("expected err!")
}
// Try to get a publisher
_, err = r.Publisher("test")
if err == nil {
t.Fatalf("expected err!")
}
}
func TestMessageExpires(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost(), MessageTTL: 10 * time.Millisecond}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
pub, err := r.Publisher("test")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer pub.Close()
msg := "the quick brown fox jumps over the lazy dog"
err = pub.Publish(msg)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Wait for expiration
time.Sleep(15 * time.Millisecond)
cons, err := r.Consumer("test")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer cons.Close()
// Try to get the message
var in string
err = cons.ConsumeTimeout(&in, time.Second)
if err != TimedOut {
t.Fatalf("unexpected err %s", err)
}
}
func TestConsumeTimeout(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost(), MessageTTL: 10 * time.Millisecond}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
cons, err := r.Consumer("timeout")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer cons.Close()
// Try to get the message
var in string
err = cons.ConsumeTimeout(&in, 50*time.Millisecond)
if err != TimedOut {
t.Fatalf("unexpected err %s", err)
}
}
func TestQueueTTL(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost(), QueueTTL: 10 * time.Millisecond}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
pub, err := r.Publisher("queuettl")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer pub.Close()
cons, err := r.Consumer("queuettl")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
cons.Close()
time.Sleep(20 * time.Millisecond)
msg := "the quick brown fox jumps over the lazy dog"
err = pub.Publish(msg)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
}
func TestIsDecodeFailure(t *testing.T) {
err := fmt.Errorf("Failed to decode message! Something bad happened.")
if !IsDecodeFailure(err) {
t.Fatalf("should be decode failure")
}
err = fmt.Errorf("Not a decode failure error")
if IsDecodeFailure(err) {
t.Fatalf("should not be decode failure")
}
}
func TestCustomRoutingKey(t *testing.T) {
CheckInteg(t)
conf := Config{Addr: AMQPHost()}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
// Get a publisher
pub, err := r.PublisherWithRoutingKey("test", "widgets")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer pub.Close()
// Get a consumer
cons, err := r.ConsumerWithRoutingKey("test", "widgets")
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Send a message
msg := "the quick brown fox jumps over the lazy dog"
err = pub.Publish(msg)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Try to get the message
var in string
err = cons.Consume(&in)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Ack the message
err = cons.Ack()
if err != nil {
t.Fatalf("unexpected err %s", err)
}
// Check message
if in != msg {
t.Fatalf("unexpected msg! %v %v", in, msg)
}
}
func TestExclusiveQueue(t *testing.T) {
CheckInteg(t)
// Make the config
config := &Config{
Addr: AMQPHost(),
Exchange: "exclusive-test",
}
for i := 0; i < 3; i++ {
// Create a publisher with an exclusive queue
r, err := New(config)
if err != nil {
t.Fatalf("err: %v", err)
}
pub, err := r.Publisher("") // exclusive
if err != nil {
t.Fatalf("err: %v", err)
}
// Publish a message
if err := pub.Publish("foo"); err != nil {
t.Fatalf("err: %v", err)
}
// Close the publisher
pub.Close()
}
// Create a consumer with the same name
r, err := New(config)
if err != nil {
t.Fatalf("err: %v", err)
}
cons, err := r.Consumer("") // exclusive
if err != nil {
t.Fatalf("err: %v", err)
}
defer cons.Close()
// Try consuming. Queue should be empty.
var out string
if err := cons.ConsumeTimeout(&out, time.Second); err != TimedOut {
t.Fatalf("expected empty queue, got: %v", out)
}
}
func TestExchangeType_Fanout(t *testing.T) {
CheckInteg(t)
// Make the config
config := &Config{
Addr: AMQPHost(),
Exchange: "fanout-test",
ExchangeType: "fanout",
}
// Create the wait group
var wg sync.WaitGroup
wg.Add(10)
// Signal the channel when everyone is done
doneCh := make(chan struct{})
go func() {
defer close(doneCh)
wg.Wait()
}()
// Start all of the consumers
for i := 0; i < 10; i++ {
go func() {
defer wg.Done()
r, err := New(config)
if err != nil {
t.Fatalf("err: %v", err)
}
defer r.Close()
cons, err := r.Consumer("") // exclusive
if err != nil {
t.Fatalf("err: %v", err)
}
var out string
if err := cons.Consume(&out); err != nil {
t.Fatalf("err: %v", err)
}
cons.Ack()
}()
}
// Wait for consumers to all start
time.Sleep(time.Second)
// Set up the publisher
r, err := New(config)
if err != nil {
t.Fatalf("err: %v", err)
}
pub, err := r.Publisher("") // exclusive
if err != nil {
t.Fatalf("err: %v", err)
}
// Publish a single message
if err := pub.Publish("hi"); err != nil {
t.Fatalf("err: %v", err)
}
// Every consumer should get a copy.
select {
case <-doneCh:
case <-time.After(time.Second):
t.Fatalf("should fanout")
}
}
func TestPublisherThreadSafety(t *testing.T) {
CheckInteg(t)
// Make a random tests queue name
queueName, err := uuid.GenerateUUID()
if err != nil {
t.Fatalf("err: %v", err)
}
rand.Seed(time.Now().UnixNano())
conf := Config{Addr: AMQPHost()}
r, err := New(&conf)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer r.Close()
// Get a publisher
pub, err := r.Publisher(queueName)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer pub.Close()
// Get a consumer
cons, err := r.Consumer(queueName)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
defer cons.Close()
// Queue up a bunch of publishers who will publish messages very near
// to the same time. When the Publish() calls go in parallel, this would
// usually cause unpredictable behavior of the shared bytes.Buffer used
// previously per-consumer, due to the automatic resizing of the underlying
// byte slice. Instead we will use a buffer per-publish. This will result
// in more allocations, but allows publishers to run in parallel without
// blocking eachother, and without stepping on the buffer size.
var wg sync.WaitGroup
wg.Add(1000)
startCh := make(chan struct{})
for i := 0; i < 1000; i++ {
go func() {
defer wg.Done()
<-startCh
msg := strings.Repeat("x", rand.Intn(1024))
err = pub.Publish(msg)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
}()
}
// Start the publishers. The WaitGroup ensures that we received all of the
// publisher confirmations from all of the threads.
close(startCh)
wg.Wait()
// Try to get the messages. If we got a short buffer on any of the messages,
// this will result in a decoding error due to incomplete JSON.
for i := 0; i < 1000; i++ {
var in string
err = cons.ConsumeTimeout(&in, 100*time.Millisecond)
if err != nil {
t.Fatalf("unexpected err %s", err)
}
cons.Ack()
}
}
|
<filename>ingestion/src/metadata/ingestion/bulksink/migrate.py<gh_stars>0
# Copyright 2021 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import traceback
from datetime import datetime
from pydantic import ValidationError
from metadata.config.common import ConfigModel
from metadata.generated.schema.api.data.createGlossary import CreateGlossaryRequest
from metadata.generated.schema.api.data.createGlossaryTerm import (
CreateGlossaryTermRequest,
)
from metadata.generated.schema.api.services.createDatabaseService import (
CreateDatabaseServiceRequest,
)
from metadata.generated.schema.api.services.createMessagingService import (
CreateMessagingServiceRequest,
)
from metadata.generated.schema.api.services.createPipelineService import (
CreatePipelineServiceRequest,
)
from metadata.generated.schema.api.tags.createTag import CreateTagRequest
from metadata.generated.schema.api.tags.createTagCategory import (
CreateTagCategoryRequest,
)
from metadata.generated.schema.api.teams.createRole import CreateRoleRequest
from metadata.generated.schema.api.teams.createTeam import CreateTeamRequest
from metadata.generated.schema.api.teams.createUser import CreateUserRequest
from metadata.generated.schema.entity.data.databaseSchema import DatabaseSchema
from metadata.generated.schema.entity.data.glossary import Glossary
from metadata.generated.schema.entity.data.glossaryTerm import GlossaryTerm
from metadata.generated.schema.entity.data.pipeline import Pipeline
from metadata.generated.schema.entity.data.table import Table
from metadata.generated.schema.entity.data.topic import Topic
from metadata.generated.schema.entity.services.connections.metadata.openMetadataConnection import (
OpenMetadataConnection,
)
from metadata.generated.schema.entity.services.databaseService import (
DatabaseService,
DatabaseServiceType,
)
from metadata.generated.schema.entity.services.messagingService import MessagingService
from metadata.generated.schema.entity.teams.role import Role
from metadata.generated.schema.entity.teams.team import Team
from metadata.generated.schema.entity.teams.user import User
from metadata.generated.schema.type.entityReference import EntityReference
from metadata.ingestion.api.bulk_sink import BulkSink, BulkSinkStatus
from metadata.ingestion.ometa.client import APIError
from metadata.ingestion.ometa.ometa_api import EmptyPayloadException, OpenMetadata
logger = logging.getLogger(__name__)
class MetadataMigrateSinkConfig(ConfigModel):
dirPath: str
class MigrateBulkSink(BulkSink):
config: MetadataMigrateSinkConfig
DESCRIPTION_PATH = "/description"
def __init__(
self,
config: MetadataMigrateSinkConfig,
metadata_config: OpenMetadataConnection,
):
self.config = config
self.metadata_config = metadata_config
self.service_name = None
self.wrote_something = False
self.metadata = OpenMetadata(self.metadata_config)
self.status = BulkSinkStatus()
self.table_join_dict = {}
self.role_entities = {}
self.team_entities = {}
self.today = datetime.today().strftime("%Y-%m-%d")
self.database_service_map = {
service.value.lower(): service.value for service in DatabaseServiceType
}
@classmethod
def create(cls, config_dict: dict, metadata_config: OpenMetadataConnection):
config = MetadataMigrateSinkConfig.parse_obj(config_dict)
return cls(config, metadata_config)
def write_records(self) -> None:
with open(f"{self.config.dirPath}/user.json") as file:
self.write_users(file)
with open(f"{self.config.dirPath}/glossary.json") as file:
self.write_glossary(file)
with open(f"{self.config.dirPath}/glossary_term.json") as file:
self.write_glossary_term(file)
with open(f"{self.config.dirPath}/tag.json") as file:
self.write_tag(file)
with open(f"{self.config.dirPath}/messaging_service.json") as file:
self.write_messaging_services(file)
with open(f"{self.config.dirPath}/pipeline_service.json") as file:
self.write_pipeline_services(file)
with open(f"{self.config.dirPath}/database_service.json") as file:
self.write_database_services(file)
with open(f"{self.config.dirPath}/table.json") as file:
self.write_tables(file)
with open(f"{self.config.dirPath}/topic.json") as file:
self.write_topics(file)
with open(f"{self.config.dirPath}/pipeline.json") as file:
self.write_pipelines(file)
def _separate_fqn(self, fqn):
database_schema, table = fqn.split(".")[-2:]
if not database_schema:
database_schema = None
return {"database": None, "database_schema": database_schema, "name": table}
def update_through_patch(self, entity, id, value, path, op):
"""
Update the Entity Through Patch API
"""
data = [{"op": op, "path": path, "value": value}]
resp = self.metadata.client.patch(
"{}/{}".format(self.metadata.get_suffix(entity), id), data=json.dumps(data)
)
if not resp:
raise EmptyPayloadException(
f"Got an empty response when trying to PATCH to {self.metadata.get_suffix(entity)}, {data.json()}"
)
def write_columns(self, columns, table_id):
for i in range(len(columns)):
if columns[i].get("description"):
self.update_through_patch(
Table,
table_id,
columns[i].get("description"),
f"/columns/{i}/description",
"add",
)
if columns[i].get("tags"):
tags_list = columns[i].get("tags", [])
self._add_tags_by_patch(
tags_list=tags_list,
entity=Table,
entity_id=table_id,
path=f"/columns/{i}/tags",
)
def write_tables(self, file):
for table in file.readlines():
table = json.loads(table)
try:
table_entities = self.metadata.search_entities_using_es(
table_obj=self._separate_fqn(table.get("fullyQualifiedName")),
search_index="table_search_index",
service_name=table.get("service").get("name"),
)
if len(table_entities) < 1:
continue
table_entity: Table = table_entities[0]
self.update_through_patch(
DatabaseSchema,
table_entity.databaseSchema.id.__root__,
table.get("database").get("description"),
self.DESCRIPTION_PATH,
"add",
)
self._add_entity_owner_by_patch(
owner_dict=table.get("database").get("owner"),
entity=DatabaseSchema,
entity_id=table_entity.databaseSchema.id.__root__,
)
self.update_through_patch(
Table,
table_entity.id.__root__,
table.get("description"),
self.DESCRIPTION_PATH,
"add",
)
self._add_entity_owner_by_patch(
owner_dict=table.get("owner"),
entity=Table,
entity_id=table_entity.id.__root__,
)
columns = table.get("columns")
self.write_columns(columns, table_entity.id.__root__)
logger.info(
"Successfully ingested table {}.{}".format(
table_entity.database.name,
table_entity.name.__root__,
)
)
except (APIError, ValidationError) as err:
logger.error(
"Failed to ingest table {} in database {} ".format(
table.get("name"),
table.get("database").get("name"),
)
)
logger.debug(traceback.format_exc())
logger.error(err)
self.status.failure("Table: {}".format(table.get("name")))
def _create_role(self, create_role) -> Role:
try:
create_req = CreateRoleRequest(
name=create_role.name, displayName=create_role.displayName, policies=[]
)
role = self.metadata.create_or_update(create_req)
self.role_entities[role.name] = str(role.id.__root__)
return role
except Exception as err:
logger.debug(traceback.format_exc())
logger.error(err)
def _create_team(self, create_team: CreateTeamRequest) -> Team:
try:
team = self.metadata.create_or_update(create_team)
self.team_entities[team.name.__root__] = str(team.id.__root__)
return team
except Exception as err:
logger.debug(traceback.format_exc())
logger.error(err)
def _get_role_ids(self, user_obj: User):
if user_obj.roles: # Roles can be optional
role_ids = []
for role in user_obj.roles.__root__:
role_entity = self.metadata.get_by_name(
entity=Role, fqdn=str(role.name)
)
if role_entity:
role_ids.append(role_entity.id)
else:
role_entity = self._create_role(role)
else:
role_ids = None
def _get_team_ids(self, user_obj):
if user_obj.teams: # Teams can be optional
team_ids = []
for team in user_obj.teams.__root__:
try:
team_entity = self.metadata.get_by_name(entity=Team, fqdn=team.name)
if not team_entity:
raise APIError(
error={
"message": "Creating a new team {}".format(team.name)
}
)
team_ids.append(team_entity.id.__root__)
except APIError:
team_request = CreateTeamRequest(
name=team.name,
displayName=team.displayName,
description=team.description,
)
team_entity = self._create_team(team_request)
team_ids.append(team_entity.id.__root__)
except Exception as err:
logger.error(err)
return team_ids
def write_users(self, file):
"""
Given a User profile (User + Teams + Roles create requests):
1. Check if role & team exist, otherwise create
2. Add ids of role & team to the User
3. Create or update User
"""
try:
for user in file.readlines():
user_obj = User(**json.loads(user))
# Create roles if they don't exist
role_ids = self._get_role_ids(user_obj=user_obj)
# Create teams if they don't exist
team_ids = self._get_team_ids(user_obj=user_obj)
# Update user data with the new Role and Team IDs
metadata_user = CreateUserRequest(
roles=role_ids,
teams=team_ids,
name=user_obj.name,
description=user_obj.description,
email=user_obj.email,
timezone=user_obj.timezone,
isBot=user_obj.isBot,
isAdmin=user_obj.isAdmin,
profile=user_obj.profile,
)
# Create user
try:
user = self.metadata.create_or_update(metadata_user)
self.status.records_written(user_obj.displayName)
logger.info("User: {}".format(user_obj.displayName))
except Exception as err:
logger.debug(traceback.format_exc())
logger.error(err)
except Exception as err:
self.status.failure(f"User:")
def _add_entity_owner_by_patch(self, owner_dict, entity, entity_id):
if owner_dict:
owner = self.metadata.get_by_name(
Team if owner_dict.get("type") == "team" else User,
owner_dict.get("name"),
)
if owner:
self.update_through_patch(
entity,
entity_id,
{"id": str(owner.id.__root__), "type": owner_dict.get("type")},
"/owner",
"add",
)
def _add_tags_by_patch(self, tags_list, entity, entity_id, path="/tags"):
for i in range(len(tags_list)):
value = {
"tagFQN": tags_list[i].get("tagFQN"),
"labelType": tags_list[i].get("labelType"),
"state": tags_list[i].get("state"),
"source": tags_list[i].get("source"),
}
self.update_through_patch(
entity,
entity_id,
value,
f"{path}/{i}",
"add",
)
def write_topics(self, file) -> None:
for topic in file.readlines():
topic = json.loads(topic)
try:
topic_obj: Topic = self.metadata.get_by_name(
Topic, topic.get("fullyQualifiedName")
)
self.update_through_patch(
Topic,
topic_obj.id.__root__,
topic.get("description"),
self.DESCRIPTION_PATH,
"add",
)
tags_list = topic.get("tags", [])
self._add_tags_by_patch(
tags_list=tags_list, entity=Topic, entity_id=topic_obj.id.__root__
)
self._add_entity_owner_by_patch(
owner_dict=topic.get("owner"),
entity=Topic,
entity_id=topic_obj.id.__root__,
)
logger.info(f"Successfully ingested topic {topic.get('name')}")
self.status.records_written(f"Topic: {topic.get('name')}")
except (APIError, ValidationError) as err:
logger.error(f"Failed to ingest topic {topic.get('name')}")
logger.error(err)
self.status.failure(f"Topic: {topic.get('name')}")
def write_pipelines(self, file):
for pipeline in file.readlines():
pipeline = json.loads(pipeline)
try:
pipelines_obj: Pipeline = self.metadata.get_by_name(
Pipeline, pipeline.get("fullyQualifiedName")
)
if pipelines_obj:
self.update_through_patch(
Pipeline,
pipelines_obj.id.__root__,
pipeline.get("description"),
self.DESCRIPTION_PATH,
"add",
)
self._add_entity_owner_by_patch(
owner_dict=pipeline.get("owner"),
entity=Pipeline,
entity_id=pipelines_obj.id.__root__,
)
tags_list = pipeline.get("tags", [])
self._add_tags_by_patch(
tags_list=tags_list,
entity=Pipeline,
entity_id=pipelines_obj.id.__root__,
)
logger.info(f"Successfully ingested topic {pipeline.get('name')}")
self.status.records_written(f"Topic: {pipeline.get('name')}")
except (APIError, ValidationError) as err:
logger.error(f"Failed to ingest pipeline {pipeline.get('name')}")
logger.error(err)
self.status.failure(f"Pipeline: {pipeline.get('name')}")
def _get_glossary_reviewers_entities(self, reviewers):
users = []
for reviewer in reviewers:
user = self.metadata.get_by_name(entity=User, fqdn=reviewer.name)
users.append(
EntityReference(
id=user.id.__root__, name=user.name.__root__, type=reviewer.type
)
)
return users
def _get_glossary_owner_entity(self, owner):
user = self.metadata.get_by_name(entity=User, fqdn=owner.name)
return EntityReference(
id=user.id.__root__, name=user.name.__root__, type=owner.type
)
def write_glossary(self, file):
for glossary in file.readlines():
try:
glossary_obj = Glossary(**json.dumps(glossary))
glossary_request = CreateGlossaryRequest(
name=glossary_obj.name.__root__,
displayName=glossary_obj.displayName,
reviewers=self._get_glossary_reviewers_entities(
glossary_obj.reviewers
),
owner=self._get_glossary_owner_entity(glossary_obj.owner),
tags=glossary_obj.tags,
description=glossary_obj.description,
)
self.metadata.create_or_update(glossary_request)
logger.info(
f"Successfully ingested Pipeline {glossary_request.displayName}"
)
self.status.records_written(f"Pipeline: {glossary_request.displayName}")
except (APIError, ValidationError) as err:
logger.error(f"Failed to ingest pipeline {glossary_obj.name}")
logger.error(err)
self.status.failure(f"Pipeline: {glossary_obj.name}")
def _get_glossary_entity(self, glossary):
glossary_obj = self.metadata.get_by_name(entity=Glossary, fqdn=glossary.name)
return EntityReference(
id=glossary_obj.id.__root__, name=glossary.name, type=glossary.type
)
def _get_glossary_term_entity(self, glossary_term):
if glossary_term:
try:
parent = self.metadata.get_by_name(
entity=GlossaryTerm, fqdn=glossary_term.name
)
return EntityReference(
id=parent.id.__root__,
name=glossary_term.name,
type=glossary_term.type,
)
except Exception:
logger.error(f"Failed to fetch glossary term: {glossary_term.name}")
def write_glossary_term(self, file):
for glossary_term in file.readlines():
try:
glossary_term_obj = GlossaryTerm(**json.loads(glossary_term))
glossary_request = CreateGlossaryTermRequest(
name=glossary_term_obj.name,
glossary=self._get_glossary_entity(glossary_term_obj.glossary),
displayName=glossary_term_obj.displayName,
parent=self._get_glossary_term_entity(glossary_term_obj.parent),
synonyms=glossary_term_obj.synonyms,
relatedTerms=glossary_term_obj.relatedTerms,
references=glossary_term_obj.references,
reviewers=glossary_term_obj.reviewers,
tags=glossary_term_obj.tags,
description=glossary_term_obj.description,
)
self.metadata.create_or_update(glossary_request)
logger.info(
f"Successfully ingested Pipeline {glossary_request.displayName}"
)
self.status.records_written(f"Pipeline: {glossary_request.displayName}")
except (APIError, ValidationError) as err:
logger.error(f"Failed to ingest pipeline {glossary_term_obj.name}")
logger.error(err)
self.status.failure(f"Pipeline: {glossary_term_obj.name}")
def _create_tag_category(self, tag_category: CreateTagCategoryRequest):
resp = self.metadata.client.post(
self.metadata.get_suffix(CreateTagCategoryRequest), data=tag_category.json()
)
if not resp:
raise EmptyPayloadException(
f"Got an empty response when trying to POST to {self.metadata.get_suffix(CreateTagCategoryRequest)}, {tag_category.json()}"
)
def _add_tag_to_category(self, tag_category_name, tag: CreateTagRequest):
resp = self.metadata.client.post(
self.metadata.get_suffix(CreateTagRequest) + "/" + tag_category_name,
data=tag.json(),
)
if not resp:
raise EmptyPayloadException(
f"Got an empty response when trying to POST to {self.metadata.get_suffix(CreateTagRequest)}, {tag.json()}"
)
def write_tag(self, file):
for tag_category in file.readlines():
tag_category = json.loads(tag_category)
try:
tag_category_request = CreateTagCategoryRequest(
name=tag_category.get("name"),
description=tag_category.get("description"),
categoryType=tag_category.get("categoryType"),
)
self._create_tag_category(tag_category_request)
except (APIError, ValidationError) as err:
logger.error(f"Failed to ingest TagCategory {tag_category.get('name')}")
logger.error(err)
self.status.failure(f"TagCategory: {tag_category.get('name')}")
try:
for tag in tag_category.get("children", []):
tag_request = CreateTagRequest(
name=tag.get("name"), description=tag.get("description")
)
self._add_tag_to_category(tag_category.get("name"), tag_request)
logger.info(f"Successfully ingested Tag {tag_category_request.name}")
self.status.records_written(f"Tag: {tag_category_request.name}")
except (APIError, ValidationError) as err:
logger.error(f"Failed to ingest tag {tag_category.get('name')}")
logger.error(err)
self.status.failure(f"Tag: {tag_category.get('name')}")
def write_messaging_services(self, file):
for messaging_service in file.readlines():
messaging_service = json.loads(messaging_service)
try:
service_obj: MessagingService = self.metadata.get_by_name(
MessagingService, messaging_service.get("name")
)
if not service_obj:
continue
owner_dict = messaging_service.get("owner")
owner_ref = None
if owner_dict:
owner = self.metadata.get_by_name(
Team if owner_dict.get("type") == "team" else User,
owner_dict.get("name"),
)
owner_ref = EntityReference(
id=owner.id,
name=owner_dict.get("name"),
type=owner_dict.get("type"),
)
service_request = CreateMessagingServiceRequest(
name=messaging_service.get("name"),
description=messaging_service.get("description"),
serviceType=messaging_service.get("serviceType"),
connection=service_obj.connection,
owner=owner_ref,
)
self.metadata.create_or_update(service_request)
logger.info(
f"Successfully ingested messaging service {messaging_service.get('name')}"
)
self.status.records_written(f"Tag: {messaging_service.get('name')}")
except (APIError, ValidationError) as err:
logger.error(f"Failed to ingest tag {messaging_service.get('name')}")
logger.error(err)
self.status.failure(f"Tag: {messaging_service.get('name')}")
def write_pipeline_services(self, file):
for pipeline_service in file.readlines():
pipeline_service = json.loads(pipeline_service)
try:
owner_dict = pipeline_service.get("owner")
owner_ref = None
if owner_dict:
owner = self.metadata.get_by_name(
Team if owner_dict.get("type") == "team" else User,
owner_dict.get("name"),
)
owner_ref = EntityReference(
id=owner.id,
name=owner_dict.get("name"),
type=owner_dict.get("type"),
)
service_request = CreatePipelineServiceRequest(
name=pipeline_service.get("name"),
description=pipeline_service.get("description"),
serviceType=pipeline_service.get("serviceType"),
pipelineUrl=pipeline_service.get("pipelineUrl"),
owner=owner_ref,
)
self.metadata.create_or_update(service_request)
logger.info(
f"Successfully ingested messaging service {pipeline_service.get('name')}"
)
self.status.records_written(f"Tag: {pipeline_service.get('name')}")
except (APIError, ValidationError) as err:
logger.error(f"Failed to ingest tag {pipeline_service.get('name')}")
logger.error(err)
self.status.failure(f"Tag: {pipeline_service.get('name')}")
def write_database_services(self, file):
for databas_services in file.readlines():
databas_services = json.loads(databas_services)
try:
service_obj: DatabaseService = self.metadata.get_by_name(
DatabaseService, databas_services.get("name")
)
if not service_obj:
continue
owner_dict = databas_services.get("owner")
owner_ref = None
if owner_dict:
owner = self.metadata.get_by_name(
Team if owner_dict.get("type") == "team" else User,
owner_dict.get("name"),
)
owner_ref = EntityReference(
id=owner.id,
name=owner_dict.get("name"),
type=owner_dict.get("type"),
)
database_service = CreateDatabaseServiceRequest(
name=databas_services.get("name"),
description=databas_services.get("description"),
serviceType=self.database_service_map.get(
databas_services.get("serviceType").lower(), "Mysql"
),
connection=service_obj.connection,
owner=owner_ref,
)
self.metadata.create_or_update(database_service)
logger.info(
f"Successfully ingested messaging service {databas_services.get('name')}"
)
self.status.records_written(f"Tag: {databas_services.get('name')}")
except (APIError, ValidationError) as err:
logger.error(f"Failed to ingest tag {databas_services.get('name')}")
logger.error(err)
self.status.failure(f"Tag: {databas_services.get('name')}")
def get_status(self):
return self.status
def close(self):
self.metadata.close()
|
# Get user input
city = input("City Name: ")
month = input("Month: ")
# Calculate the average hours of sunlight in a given city during a month
def average_sunlight(city, month):
# Your code goes here
...
# Print the result
print(average_sunlight(city, month)) |
<gh_stars>1-10
package browser
import cats.syntax.all._
import helper.{Hash, Time, UUIDHelper}
import java.time.ZonedDateTime
import models.{Area, LoginToken, User}
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.test._
import play.api.test.Helpers._
import services.{TokenService, UserService}
import org.specs2.specification.BeforeAfterAll
@RunWith(classOf[JUnitRunner])
class LoginSpec extends Specification with Tables with BaseSpec with BeforeAfterAll {
val existingUser = User(
UUIDHelper.namedFrom("julien.test"),
Hash.sha256(s"julien.test"),
"FirstName".some,
"LastName".some,
"<NAME>",
"Admin A+",
"<EMAIL>",
helper = true,
instructor = false,
admin = true,
Area.all.map(_.id),
ZonedDateTime.parse("2017-11-01T00:00+01:00"),
"75056",
groupAdmin = true,
disabled = false,
internalSupportComment = None
)
def beforeAll(): Unit = {
val userService = applicationWithBrowser.injector.instanceOf[UserService]
val _ = userService.add(List(existingUser))
val _ = userService.validateCGU(existingUser.id)
}
def afterAll(): Unit = {
val userService = applicationWithBrowser.injector.instanceOf[UserService]
val _ = userService.deleteById(existingUser.id)
}
"Login" should {
"Login with valid or invalid emails" in new WithBrowser(
webDriver = webDriver,
app = applicationWithBrowser
) {
"email" | "result" |
"<EMAIL>" + "@beta.gouv.fr" ! "Consultez vos e-mails" |
"<EMAIL>" ! "Aucun compte actif n’est associé à cette adresse e-mail." |
"<EMAIL>" + "@beta.g<EMAIL>" ! "Aucun compte actif n’est associé à cette adresse e-mail." |> {
(email, expected) =>
val loginURL =
controllers.routes.LoginController.login.absoluteURL(false, s"localhost:$port")
browser.goTo(loginURL)
browser.el("input[name='email']").fill().withText(email)
browser.el("form").submit()
eventually {
browser.pageSource must contain(expected)
}
}
}
"Use token with success" in new WithBrowser(
webDriver = webDriver,
app = applicationWithBrowser
) {
val tokenService = app.injector.instanceOf[TokenService]
val loginToken = LoginToken.forUserId(existingUser.id, 5, "127.0.0.1")
tokenService.create(loginToken)
val loginURL = controllers.routes.LoginController.magicLinkAntiConsumptionPage
.absoluteURL(false, s"localhost:$port")
browser.goTo(s"$loginURL?token=${loginToken.token}&path=/")
eventually {
browser.url must endWith(
controllers.routes.ApplicationController.myApplications.url.substring(1)
)
}
}
"Use expired token without success" in new WithBrowser(
webDriver = webDriver,
app = applicationWithBrowser
) {
val tokenService = app.injector.instanceOf[TokenService]
val loginToken = LoginToken
.forUserId(existingUser.id, 5, "127.0.0.1")
.copy(expirationDate = Time.nowParis().minusMinutes(5))
tokenService.create(loginToken)
val loginURL = controllers.routes.LoginController.magicLinkAntiConsumptionPage
.absoluteURL(false, s"localhost:$port")
browser.goTo(s"$loginURL?token=${loginToken.token}&path=/")
eventually {
browser.url must endWith(controllers.routes.HomeController.index.url.substring(1))
browser.pageSource must contain("Votre lien de connexion a expiré, il est valable")
}
}
"Use token without success" in new WithBrowser(
webDriver = webDriver,
app = applicationWithBrowser
) {
val loginURL = controllers.routes.LoginController.magicLinkAntiConsumptionPage
.absoluteURL(false, s"localhost:$port")
browser.goTo(s"$loginURL?token=<PASSWORD>&path=/")
eventually {
browser.url must endWith(controllers.routes.LoginController.login.url.substring(1))
browser.pageSource must contain(
"Le lien que vous avez utilisé n'est plus valide, il a déjà été utilisé."
)
}
}
}
}
|
package fetch
import (
"bytes"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"time"
)
const (
ReportFormatCSV string = "csv"
ReportFormatXML string = "XML"
)
type Client struct {
http.Client
url string
authKey string
}
func NewClient(url, authKey string) *Client {
cli := Client{url: url, authKey: authKey}
cli.Timeout = time.Second * 10
return &cli
}
func (c Client) GetReports(from, to time.Time) ([]byte, error) {
form := url.Values{
"datefrom": {from.Format("2006-01-02")},
"dateto": {to.Format("2006-01-02")},
"reportoption": {"csv"},
}
body := bytes.NewBufferString(form.Encode())
fmt.Println(form.Encode())
req, err := http.NewRequest(http.MethodGet, c.url, body)
if err != nil {
return nil, err
}
req.Header.Set("Auth", c.authKey)
resp, err := c.Do(req)
if err != nil {
return nil, err
}
return ioutil.ReadAll(resp.Body)
//return nil, nil
}
|
<reponame>kotik-coder/PULsE<filename>src/main/java/pulse/io/export/package-info.java
/**
* Package contains the PULsE export API, which currently consists of different
* exporter classes, an export manager, an XML converter and a MassExporter
* class.
*
*/
package pulse.io.export;
|
<reponame>rubyonrailsdeveloper/spree_quotes_management<gh_stars>1-10
# Define your Spree extensions Factories within this file to enable applications, and other extensions to use and override them.
#
# Example adding this to your spec_helper will load these Factories for use:
# require 'spree_quotes_management/factories'
FactoryBot.define do
factory :quote, class: Spree::Quote do
description { FFaker::Lorem.sentence }
user
trait :published do
state 'published'
published_at { Time.current }
end
factory :published_quote, traits: [:published]
end
end
|
#!/bin/bash
# Helper utilities for build
function check_var {
if [ -z "$1" ]; then
echo "required variable not defined"
exit 1
fi
}
function lex_pyver {
# Echoes Python version string padded with zeros
# Thus:
# 3.2.1 -> 003002001
# 3 -> 003000000
echo $1 | awk -F "." '{printf "%03d%03d%03d", $1, $2, $3}'
}
function pyver_dist_dir {
# Echoes the dist directory name of given pyver, removing alpha/beta prerelease
# Thus:
# 3.2.1 -> 3.2.1
# 3.7.0b4 -> 3.7.0
echo $1 | awk -F "." '{printf "%d.%d.%d", $1, $2, $3}'
}
function do_cpython_build {
local py_ver=$1
check_var $py_ver
local ucs_setting=$2
check_var $ucs_setting
tar -xzf Python-$py_ver.tgz
pushd Python-$py_ver
if [ "$ucs_setting" = "none" ]; then
unicode_flags=""
dir_suffix=""
else
local unicode_flags="--enable-unicode=$ucs_setting"
local dir_suffix="-$ucs_setting"
fi
local prefix="/opt/_internal/cpython-${py_ver}${dir_suffix}"
mkdir -p ${prefix}/lib
./configure --prefix=${prefix} --disable-shared $unicode_flags > /dev/null
make -j2 > /dev/null
make install > /dev/null
popd
rm -rf Python-$py_ver
# Some python's install as bin/python3. Make them available as
# bin/python.
if [ -e ${prefix}/bin/python3 ]; then
ln -s python3 ${prefix}/bin/python
fi
${prefix}/bin/python -m ensurepip
if [ -e ${prefix}/bin/pip3 ] && [ ! -e ${prefix}/bin/pip ]; then
ln -s pip3 ${prefix}/bin/pip
fi
# Since we fall back on a canned copy of pip, we might not have
# the latest pip and friends. Upgrade them to make sure.
if [ "${py_ver:0:1}" == "2" ]; then
${prefix}/bin/pip install -U --require-hashes -r ${MY_DIR}/py27-requirements.txt
else
${prefix}/bin/pip install -U --require-hashes -r ${MY_DIR}/requirements.txt
fi
local abi_tag=$(${prefix}/bin/python ${MY_DIR}/python-tag-abi-tag.py)
ln -s ${prefix} /opt/python/${abi_tag}
}
function build_cpython {
local py_ver=$1
check_var $py_ver
check_var $PYTHON_DOWNLOAD_URL
local py_dist_dir=$(pyver_dist_dir $py_ver)
curl -fsSLO $PYTHON_DOWNLOAD_URL/$py_dist_dir/Python-$py_ver.tgz
curl -fsSLO $PYTHON_DOWNLOAD_URL/$py_dist_dir/Python-$py_ver.tgz.asc
gpg --verify Python-$py_ver.tgz.asc
if [ $(lex_pyver $py_ver) -lt $(lex_pyver 3.3) ]; then
do_cpython_build $py_ver ucs2
do_cpython_build $py_ver ucs4
else
do_cpython_build $py_ver none
fi
rm -f Python-$py_ver.tgz
rm -f Python-$py_ver.tgz.asc
}
function build_cpythons {
# Import public keys used to verify downloaded Python source tarballs.
# https://www.python.org/static/files/pubkeys.txt
gpg --import ${MY_DIR}/cpython-pubkeys.txt
# Add version 3.8 release manager's key
gpg --import ${MY_DIR}/ambv-pubkey.txt
for py_ver in $@; do
build_cpython $py_ver
done
# Remove GPG hidden directory.
rm -rf /root/.gnupg/
}
function do_openssl_build {
./config no-shared -fPIC --prefix=/usr/local/ssl > /dev/null
make > /dev/null
make install_sw > /dev/null
}
function fetch_source {
# This is called both inside and outside the build context (e.g. in Travis) to prefetch
# source tarballs, where curl exists (and works)
local file=$1
check_var ${file}
local url=$2
check_var ${url}
if [ -f ${file} ]; then
echo "${file} exists, skipping fetch"
else
curl -fsSL -o ${file} ${url}/${file}
fi
}
function check_sha256sum {
local fname=$1
check_var ${fname}
local sha256=$2
check_var ${sha256}
echo "${sha256} ${fname}" > ${fname}.sha256
sha256sum -c ${fname}.sha256
rm -f ${fname}.sha256
}
function build_openssl {
local openssl_fname=$1
check_var ${openssl_fname}
local openssl_sha256=$2
check_var ${openssl_sha256}
fetch_source ${openssl_fname}.tar.gz ${OPENSSL_DOWNLOAD_URL}
check_sha256sum ${openssl_fname}.tar.gz ${openssl_sha256}
tar -xzf ${openssl_fname}.tar.gz
(cd ${openssl_fname} && do_openssl_build)
rm -rf ${openssl_fname} ${openssl_fname}.tar.gz
}
function build_git {
local git_fname=$1
check_var ${git_fname}
local git_sha256=$2
check_var ${git_sha256}
check_var ${GIT_DOWNLOAD_URL}
fetch_source v${git_fname}.tar.gz ${GIT_DOWNLOAD_URL}
check_sha256sum v${git_fname}.tar.gz ${git_sha256}
tar -xzf v${git_fname}.tar.gz
(cd git-${git_fname} && make install prefix=/usr/local NO_GETTEXT=1 NO_TCLTK=1 > /dev/null)
rm -rf git-${git_fname} v${git_fname}.tar.gz
}
function do_standard_install {
./configure "$@" > /dev/null
make > /dev/null
make install > /dev/null
}
function build_autoconf {
local autoconf_fname=$1
check_var ${autoconf_fname}
local autoconf_sha256=$2
check_var ${autoconf_sha256}
check_var ${AUTOCONF_DOWNLOAD_URL}
fetch_source ${autoconf_fname}.tar.gz ${AUTOCONF_DOWNLOAD_URL}
check_sha256sum ${autoconf_fname}.tar.gz ${autoconf_sha256}
tar -zxf ${autoconf_fname}.tar.gz
(cd ${autoconf_fname} && do_standard_install)
rm -rf ${autoconf_fname} ${autoconf_fname}.tar.gz
}
function build_automake {
local automake_fname=$1
check_var ${automake_fname}
local automake_sha256=$2
check_var ${automake_sha256}
check_var ${AUTOMAKE_DOWNLOAD_URL}
fetch_source ${automake_fname}.tar.gz ${AUTOMAKE_DOWNLOAD_URL}
check_sha256sum ${automake_fname}.tar.gz ${automake_sha256}
tar -zxf ${automake_fname}.tar.gz
(cd ${automake_fname} && do_standard_install)
rm -rf ${automake_fname} ${automake_fname}.tar.gz
}
function build_libtool {
local libtool_fname=$1
check_var ${libtool_fname}
local libtool_sha256=$2
check_var ${libtool_sha256}
check_var ${LIBTOOL_DOWNLOAD_URL}
fetch_source ${libtool_fname}.tar.gz ${LIBTOOL_DOWNLOAD_URL}
check_sha256sum ${libtool_fname}.tar.gz ${libtool_sha256}
tar -zxf ${libtool_fname}.tar.gz
(cd ${libtool_fname} && do_standard_install)
rm -rf ${libtool_fname} ${libtool_fname}.tar.gz
}
function build_libxcrypt {
curl -fsSLO "$LIBXCRYPT_DOWNLOAD_URL"/v"$LIBXCRYPT_VERSION"
check_sha256sum "v$LIBXCRYPT_VERSION" "$LIBXCRYPT_HASH"
tar xfz "v$LIBXCRYPT_VERSION"
(cd "libxcrypt-$LIBXCRYPT_VERSION" && ./bootstrap && \
do_standard_install \
--disable-obsolete-api \
--enable-hashes=all \
--disable-werror)
rm -rf "v$LIBXCRYPT_VERSION" "libxcrypt-$LIBXCRYPT_VERSION"
# Delete GLIBC version headers and libraries
rm -rf /usr/include/crypt.h
rm -rf /usr/lib64/libcrypt.a /usr/lib64/libcrypt.so
}
|
#!/bin/sh
# Copyright 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DOCKER_YAML=js/docker/docker-compose-build.yaml
PASSWDS="$USER,hello"
# Fancy colors in the terminal
if [ -t 1 ]; then
RED=$(tput setaf 1)
GREEN=$(tput setaf 2)
RESET=$(tput sgr0)
else
RED=
GREEN=
RESET=
fi
approve() {
echo "${GREEN}I know what I'm doing..[y/n]?${RESET}"
old_stty_cfg=$(stty -g)
stty raw -echo
answer=$(head -c 1)
stty $old_stty_cfg # Careful playing with stty
if echo "$answer" | grep -iq "^y"; then
echo Yes
else
echo Ok, bye!
exit 1
fi
}
help() {
cat <<EOF
usage: create_web_container.sh [-h] [-a] [-s] [-i] -p user1,pass1,user2,pass2,...
optional arguments:
-h show this help message and exit.
-a expose adb. Requires ~/.android/adbkey to be available at container launch
-s start the container after creation.
-p list of username password pairs. Defaults to: [${PASSWDS}]
-i install systemd service, with definition in /opt/emulator
EOF
exit 1
}
panic() {
echo $1
exit 1
}
generate_keys() {
# Generate the adb public key, if it does not exist
if [ ! -f ~/.android/adbkey ]; then
local ADB=adb
if [ ! command -v $ADB ] >/dev/null 2>&1; then
ADB=$ANDROID_SDK_ROOT/platform-tools/adb
command -v $ADB >/dev/null 2>&1 || panic "No adb key, and adb not found in $ADB, make sure ANDROID_SDK_ROOT is set!"
fi
echo "Creating public key from private key with $ADB"
$ADB keygen ~/.android/adbkey
fi
}
while getopts 'hasip:' flag; do
case "${flag}" in
a) DOCKER_YAML="${DOCKER_YAML} -f js/docker/development.yaml" ;;
p) PASSWDS="${OPTARG}" ;;
h) help ;;
s) START='yes' ;;
i) INSTALL='yes' ;;
*) help ;;
esac
done
# Make sure we have all we need for adb to succeed.
generate_keys
. ./configure.sh >/dev/null
# Now generate the public/private keys and salt the password
cd js/jwt-provider
pip install -r requirements.txt >/dev/null
python gen-passwords.py --pairs "${PASSWDS}" || exit 1
cp jwt_secrets_pub.jwks ../docker/certs/jwt_secrets_pub.jwks
cd ../..
# Copy the private adbkey over
cp ~/.android/adbkey js/docker/certs
# compose the container
pip install docker-compose >/dev/null
docker-compose -f ${DOCKER_YAML} build
rm js/docker/certs/adbkey
if [ "${START}" = "yes" ]; then
docker-compose -f ${DOCKER_YAML} up
else
echo "Created container, you can launch it as follows:"
echo "docker-compose -f ${DOCKER_YAML} up"
fi
if [ "${INSTALL}" = "yes" ]; then
echo "Installing created container as systemd service"
echo "This will copy the docker yaml files in /opt/emulator"
echo "Make the current adbkey available to the image"
echo "And activate the container as a systemd service."
approve
sudo mkdir -p /opt/emulator
sudo cp ~/.android/adbkey /opt/emulator/adbkey
sudo cp js/docker/docker-compose.yaml /opt/emulator/docker-compose.yaml
sudo cp js/docker/production.yaml /opt/emulator/docker-compose.override.yaml
sudo cp js/docker/emulator.service /etc/systemd/system/emulator.service
sudo touch /etc/ssl/certs/emulator-grpc.cer
sudo touch /etc/ssl/private/emulator-grpc.key
sudo systemctl daemon-reload
sudo systemctl enable emulator.service
sudo systemctl restart emulator.service
fi
|
bool isSubstring(string s1, string s2){
int M = s1.length();
int N = s2.length();
/* A loop to slide pat[] one by one */
for (int i = 0; i <= N - M; i++) {
int j;
/* For current index i, check for pattern match */
for (j = 0; j < M; j++)
if (s2[i + j] != s1[j])
break;
if (j == M)
return true;
}
return false;
} |
def get_domain_names(urls):
domain_names = []
for url in urls:
if '/' in url[8:]:
start_index = url.find('/') + 2
end_index = url.find('/', start_index)
domain_names.append(url[start_index:end_index])
else:
domain_names.append(url[8:])
return domain_names |
docker build -t cxnt/automatic-actions:0.0.12 . |
<gh_stars>100-1000
import torch
from torch import nn
from quant.quant_layer import UniformAffineQuantizer, round_ste
class AdaRoundQuantizer(nn.Module):
"""
Adaptive Rounding Quantizer, used to optimize the rounding policy
by reconstructing the intermediate output.
Based on
Up or Down? Adaptive Rounding for Post-Training Quantization: https://arxiv.org/abs/2004.10568
:param uaq: UniformAffineQuantizer, used to initialize quantization parameters in this quantizer
:param round_mode: controls the forward pass in this quantizer
:param weight_tensor: initialize alpha
"""
def __init__(self, uaq: UniformAffineQuantizer, weight_tensor: torch.Tensor, round_mode='learned_round_sigmoid'):
super(AdaRoundQuantizer, self).__init__()
# copying all attributes from UniformAffineQuantizer
self.n_bits = uaq.n_bits
self.sym = uaq.sym
self.delta = uaq.delta
self.zero_point = uaq.zero_point
self.n_levels = uaq.n_levels
self.round_mode = round_mode
self.alpha = None
self.soft_targets = False
# params for sigmoid function
self.gamma, self.zeta = -0.1, 1.1
self.beta = 2/3
self.init_alpha(x=weight_tensor.clone())
def forward(self, x):
if self.round_mode == 'nearest':
x_int = torch.round(x / self.delta)
elif self.round_mode == 'nearest_ste':
x_int = round_ste(x / self.delta)
elif self.round_mode == 'stochastic':
x_floor = torch.floor(x / self.delta)
rest = (x / self.delta) - x_floor # rest of rounding
x_int = x_floor + torch.bernoulli(rest)
print('Draw stochastic sample')
elif self.round_mode == 'learned_hard_sigmoid':
x_floor = torch.floor(x / self.delta)
if self.soft_targets:
x_int = x_floor + self.get_soft_targets()
else:
x_int = x_floor + (self.alpha >= 0).float()
else:
raise ValueError('Wrong rounding mode')
x_quant = torch.clamp(x_int + self.zero_point, 0, self.n_levels - 1)
x_float_q = (x_quant - self.zero_point) * self.delta
return x_float_q
def get_soft_targets(self):
return torch.clamp(torch.sigmoid(self.alpha) * (self.zeta - self.gamma) + self.gamma, 0, 1)
def init_alpha(self, x: torch.Tensor):
x_floor = torch.floor(x / self.delta)
if self.round_mode == 'learned_hard_sigmoid':
print('Init alpha to be FP32')
rest = (x / self.delta) - x_floor # rest of rounding [0, 1)
alpha = -torch.log((self.zeta - self.gamma) / (rest - self.gamma) - 1) # => sigmoid(alpha) = rest
self.alpha = nn.Parameter(alpha)
else:
raise NotImplementedError
|
import { connect } from 'react-redux'
import SimpleForm from '../components/SimpleForm'
import { updateText } from '../actions'
const mapStateToProps = (state) => ({
text: state.text
})
const mapDispatchToProps = (dispatch) => ({
onChange: (e) => {
e.preventDefault()
console.log(e.target)
dispatch(updateText(e.target.value))
}
})
const SimpleFormContainer = connect(
mapStateToProps,
mapDispatchToProps
) (SimpleForm)
export default SimpleFormContainer
|
<filename>community-modules/client-side-row-model/dist/es6/clientSideRowModel/immutableService.d.ts<gh_stars>0
import { IImmutableService, RowDataTransaction, BeanStub } from "@ag-grid-community/core";
export declare class ImmutableService extends BeanStub implements IImmutableService {
private rowModel;
private clientSideRowModel;
private postConstruct;
createTransactionForRowData(data: any[]): ([RowDataTransaction, {
[id: string]: number;
}]) | undefined;
}
|
#!/bin/sh
#BSUB -J compare #The name the job will get
#BSUB -q gpuv100 #The queue the job will be committed to, here the GPU enabled queue
#BSUB -gpu "num=1:mode=exclusive_process" #How the job will be run on the VM, here I request 1 GPU with exclusive access i.e. only my c #BSUB -n 1 How many CPU cores my job request
#BSUB -W 24:00 #The maximum runtime my job have note that the queuing might enable shorter jobs earlier due to scheduling.
#BSUB -R "span[hosts=1]" #How many nodes the job requests
#BSUB -R "rusage[mem=12GB]" #How much RAM the job should have access to
#BSUB -R "select[gpu32gb]" #For requesting the extra big GPU w. 32GB of VRAM
#BSUB -o logs/OUTPUT.%J #Log file
#BSUB -e logs/ERROR.%J #Error log file
echo "Starting:"
cd ~/Thesis/metalearning
#cd /Users/theisferre/Documents/SPECIALE/Thesis/src/models
source ~/Thesis/venv-thesis/bin/activate
python /zhome/2b/7/117471/Thesis/src/models/compare_metalearning.py
|
pkg_name=sqlite
pkg_version=3.35.1
pkg_dist_version=3350100
pkg_origin=core
pkg_license=('Public Domain')
pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>"
pkg_description="A software library that implements a self-contained, serverless, zero-configuration, transactional SQL database engine."
pkg_upstream_url=https://www.sqlite.org/
pkg_source="https://www.sqlite.org/2021/${pkg_name}-autoconf-${pkg_dist_version}.tar.gz"
pkg_filename="${pkg_name}-autoconf-${pkg_dist_version}.tar.gz"
pkg_dirname="${pkg_name}-autoconf-${pkg_dist_version}"
pkg_shasum=dce5616c059084887ccf64739e368c161c49c64e7a7e43a47c2940d22d29560f
pkg_deps=(
core/glibc
core/readline
)
pkg_build_deps=(
core/gcc
core/make
core/coreutils
)
pkg_lib_dirs=(lib)
pkg_include_dirs=(include)
pkg_bin_dirs=(bin)
|
var electron = require("electron");
var {ipcRenderer} = electron;
window.ipcRenderer = ipcRenderer
|
<filename>server/src/main/java/com/decathlon/ara/repository/ExecutedScenarioRepository.java<gh_stars>0
package com.decathlon.ara.repository;
import com.decathlon.ara.domain.ExecutedScenario;
import com.decathlon.ara.repository.custom.ExecutedScenarioRepositoryCustom;
import java.util.List;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.querydsl.QuerydslPredicateExecutor;
import org.springframework.stereotype.Repository;
/**
* Spring Data JPA repository for the ExecutedScenario entity.
*/
@Repository
public interface ExecutedScenarioRepository extends JpaRepository<ExecutedScenario, Long>, JpaSpecificationExecutor<ExecutedScenario>,
ExecutedScenarioRepositoryCustom, QuerydslPredicateExecutor<ExecutedScenario> {
@Query("SELECT DISTINCT executedScenario.featureName " +
"FROM ExecutedScenario executedScenario " +
"WHERE executedScenario.run.execution.cycleDefinition.projectId = ?1 " +
"ORDER BY executedScenario.featureName")
List<String> findDistinctFeatureNameByProjectId(long projectId);
@Query("SELECT DISTINCT executedScenario.featureFile " +
"FROM ExecutedScenario executedScenario " +
"WHERE executedScenario.run.execution.cycleDefinition.projectId = ?1 " +
"ORDER BY executedScenario.featureFile")
List<String> findDistinctFeatureFileByProjectId(long projectId);
@Query("SELECT DISTINCT executedScenario.name " +
"FROM ExecutedScenario executedScenario " +
"WHERE executedScenario.run.execution.cycleDefinition.projectId = ?1 " +
"ORDER BY executedScenario.name")
List<String> findDistinctNameByProjectId(long projectId);
@Query("SELECT es " +
"FROM ExecutedScenario es " +
"WHERE es.run.execution.cycleDefinition.projectId = ?1 " +
"AND es.id = ?2 ")
ExecutedScenario findOne(long projectId, long executedScenarioId);
}
|
def MoveTower(height, from, to, aux):
# If only one disc, move from one tower to another
if height >= 1:
MoveTower(height-1, from, aux, to)
MoveDisc(from, to)
MoveTower(height-1, aux, to, from)
def MoveDisc(from, to):
print("Move disc from", from, "to", to)
# Move discs from Peg 1 to Peg 3
MoveTower(3, 1, 3, 2) |
// Code generated by go-swagger; DO NOT EDIT.
package models
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
)
// EdgeClusterInstance EdgeClusterInstance
//
// swagger:model EdgeClusterInstance
type EdgeClusterInstance struct {
// display name
DisplayName string `json:"displayName,omitempty"`
// edge cluster Id
EdgeClusterID string `json:"edgeClusterId,omitempty"`
// path
Path string `json:"path,omitempty"`
}
// Validate validates this edge cluster instance
func (m *EdgeClusterInstance) Validate(formats strfmt.Registry) error {
return nil
}
// MarshalBinary interface implementation
func (m *EdgeClusterInstance) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *EdgeClusterInstance) UnmarshalBinary(b []byte) error {
var res EdgeClusterInstance
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
}
|
<reponame>manish-drake/ts-shell
#include "router.h"
#include "ts-messages/RouterRegister.h"
#include "ts-messages/RouterRegisterReply.h"
#include "ts-messages/header.h"
#include <memory>
Router Router::m_router;
Router::Router()
{
m_sockService.OnReceive([&](std::vector<std::string> &messages) -> std::string {
return this->Receive(messages);
});
}
Router::~Router()
{
}
string Router::Receive(std::vector<std::string>& messages)
{
if (messages.size() >= 2) {
Header hdr;
hdr.setMessage(messages[0]);
if (hdr.getDestination() == "router") {
if (hdr.getType() == "CRouterRegister") {
CRouterRegister routerRegister;
routerRegister.setMessage(messages[1]);
int token = this->m_table.Add(routerRegister.getName(), routerRegister.getAddressUri());
CRouterRegisterReply reply;
reply.setToken(token);
Header repHdr;
repHdr.setType("CRouterRegisterReply");
m_sockService.Send(routerRegister.getAddressUri(), {repHdr.getMessage(), reply.getMessage()});
return reply.getMessage();
}
} else {
int source = hdr.getSource();
string destination = hdr.getDestination();
char dest[255];
sprintf(dest, "inproc://%s", destination.c_str());
destination = m_table.GetClientUri(source);
Header dstHdr;
dstHdr.setDestination(destination);
dstHdr.setSource(source);
dstHdr.setType(hdr.getType());
return m_sockService.Send(dest, { dstHdr.getMessage(), messages[1] });
}
} else {
return "touche!"; //Hit by an unknown message
}
}
string Router::Receive(const string& strMsg)
{
return "touche!"; //Nothing here
}
|
#!/usr/bin/env bash
set -euo pipefail
if [ "${DASHBOARD-}" == "0" ] || [ "${DASHBOARD-}" = "SKIP" ]; then
# DASHBOARD=0 will completely exclude TiDB Dashboard in building when calling from Makefile
# while DASHBOARD=SKIP will keep current asset file unchanged and include it in building
echo '+ Skip TiDB Dashboard'
exit 0
fi
RED='\033[1;31m'
YELLOW='\033[1;33m'
NC='\033[0m'
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
BASE_DIR="$(dirname "$DIR")"
ASSET_FILE_NAME=embedded_assets_handler.go
ASSET_DEST_PATH=${BASE_DIR}/pkg/dashboard/uiserver/${ASSET_FILE_NAME}
echo '+ Clean up existing asset file'
rm -f ASSET_DEST_PATH
echo '+ Fetch TiDB Dashboard Go module'
go mod download
go mod tidy
DASHBOARD_DIR=$(go list -f "{{.Dir}}" -m github.com/pingcap/tidb-dashboard)
echo " - TiDB Dashboard directory: ${DASHBOARD_DIR}"
function download_embed_asset {
CACHE_DIR=${BASE_DIR}/.dashboard_asset_cache
echo '+ Create asset cache directory'
mkdir -p "${CACHE_DIR}"
echo '+ Discover TiDB Dashboard release version'
DASHBOARD_RELEASE_VERSION=$(grep -v '^#' "${DASHBOARD_DIR}/release-version")
echo " - TiDB Dashboard release version: ${DASHBOARD_RELEASE_VERSION}"
echo '+ Check embedded assets exists in cache'
CACHE_FILE=${CACHE_DIR}/embedded-assets-golang-${DASHBOARD_RELEASE_VERSION}.zip
if [[ -f "$CACHE_FILE" ]]; then
echo " - Cached archive exists: ${CACHE_FILE}"
else
echo ' - Cached archive does not exist'
echo ' - Download pre-built embedded assets from GitHub release'
DOWNLOAD_URL="https://github.com/pingcap/tidb-dashboard/releases/download/v${DASHBOARD_RELEASE_VERSION}/embedded-assets-golang.zip"
DOWNLOAD_FILE=${CACHE_DIR}/embedded-assets-golang.zip
echo " - Download ${DOWNLOAD_URL}"
if ! curl -L "${DOWNLOAD_URL}" --fail --output "${DOWNLOAD_FILE}"; then
echo
echo -e "${RED}Error: Failed to download assets of TiDB Dashboard release version ${DASHBOARD_RELEASE_VERSION}.${NC}"
if [ "${DASHBOARD_RELEASE_VERSION}" == "nightly" ]; then
echo 'This project is using the nightly version of TiDB Dashboard, which does not have any release.'
else
echo 'This may be caused by using a non-release version of TiDB Dashboard, or the release is still in progress.'
fi
echo
echo -e "To compile PD without TiDB Dashboard: ${YELLOW}DASHBOARD=0 make${NC}"
echo -e "To compile PD by building TiDB Dashboard assets on-the-fly: ${YELLOW}DASHBOARD=COMPILE make${NC} or ${YELLOW}NO_MINIMIZE=1 DASHBOARD=COMPILE make${NC}"
exit 1
fi
echo " - Save archive to cache: ${CACHE_FILE}"
mv "${DOWNLOAD_FILE}" "${CACHE_FILE}"
fi
echo '+ Unpack embedded asset from archive'
unzip -o "${CACHE_FILE}"
gofmt -s -w ${ASSET_FILE_NAME}
mv "${ASSET_FILE_NAME}" "${ASSET_DEST_PATH}"
echo " - Unpacked ${ASSET_DEST_PATH}"
}
function compile_asset {
BUILD_DIR=${BASE_DIR}/.dashboard_build_temp
echo '+ Clean up TiDB Dashboard build directory'
echo " - Build directory: ${DASHBOARD_DIR}"
if [ -d "${BUILD_DIR}/ui/node_modules" ]; then
echo " - Build dependency exists, keep dependency cache"
mv "${BUILD_DIR}/ui/node_modules" ./
rm -rf "${BUILD_DIR}"
mkdir -p "${BUILD_DIR}/ui"
mv ./node_modules "${BUILD_DIR}/ui/"
else
rm -rf "${BUILD_DIR}"
mkdir -p "${BUILD_DIR}/ui"
fi
echo '+ Copy TiDB Dashboard source code to build directory'
echo " - Src: ${DASHBOARD_DIR}"
echo " - Dest: ${BUILD_DIR}"
cp -r "${DASHBOARD_DIR}/." "${BUILD_DIR}/"
chmod -R u+w "${BUILD_DIR}"
chmod u+x "${BUILD_DIR}"/scripts/*.sh
echo '+ Build UI'
cd "${BUILD_DIR}"
make ui
echo '+ Generating UI assets'
echo ' - Generating...'
NO_ASSET_BUILD_TAG=1 scripts/embed_ui_assets.sh
echo ' - Writing...'
cp "pkg/uiserver/${ASSET_FILE_NAME}" "${ASSET_DEST_PATH}"
cd -
echo " - Wrote ${ASSET_DEST_PATH}"
echo '+ Build UI complete'
}
if [ "${DASHBOARD-}" == "COMPILE" ]; then
compile_asset
else
download_embed_asset
fi
|
<reponame>stroke-outcome/the-corisk-score
const getValidResult = (value) => {
return {
reason: '',
value: value,
isValid: true
};
}
const getInvalidResult = (reason = '') => {
return {
reason: reason,
value: null,
isValid: false
};
}
export const validateAge = (ageStr) => {
if (ageStr === '') {
return getInvalidResult('Age is required');
}
const age = parseInt(ageStr, 10);
if (isNaN(age)) {
return getInvalidResult('Age must be a number');
}
if (age < 0 || age > 120) {
return getInvalidResult('Age must be between 0 and 120');
}
return getValidResult(age);
};
export const validateNihssPoints = (nihssStr) => {
if (nihssStr === '') {
return getInvalidResult('Nihss points is required');
}
const nihss = parseInt(nihssStr, 10);
if (isNaN(nihss)) {
return getInvalidResult('Nihss points must be a number');
}
if (nihss < 0 || nihss > 42) {
return getInvalidResult('Nihss points must be between 0 and 42 inclusive');
}
return getValidResult(nihss);
};
export const validateCopeptinLevel = (copeptinStr) => {
if (copeptinStr === '') {
return getInvalidResult('Copeptin level is required');
}
const copeptin = parseFloat(copeptinStr)
if (isNaN(copeptin)) {
return getInvalidResult('Copeptin level must be a number');
}
if (copeptin > 999) {
return getInvalidResult('Copeptin level must be below 999');
}
return getValidResult(copeptin);
};
const calculateCoRiskScore = ({ age, nihss, copeptin, thrombolysis = false }) => {
if (!validateAge(age).isValid || !validateNihssPoints(nihss).isValid || !validateCopeptinLevel(copeptin).isValid) {
return -1;
}
const thrombolysisParam = thrombolysis ? 1 : 0;
const value = 1 / (1 + Math.exp(7.201586 - (0.05702 * age) - (0.22001 * nihss) + (2.05353 * thrombolysisParam) - (1.18481 * Math.log10(copeptin))));
const rounded = Math.round(value * 100);
return rounded;
};
export default calculateCoRiskScore;
|
package com.appium.page.objects;
import io.appium.java_client.MobileElement;
import io.appium.java_client.pagefactory.AndroidFindBy;
import io.appium.java_client.pagefactory.SelendroidFindBy;
import io.appium.java_client.pagefactory.iOSFindBy;
import java.util.List;
public class CommentPageObjects {
@AndroidFindBy(id = "org.Ebay.android:id/comment") @SelendroidFindBy(id = "comment")
@iOSFindBy(className = "UIATableCell") public List<MobileElement> SELECT_TOPIC;
@AndroidFindBy(id = "org.Ebay.android:id/edit_comment")
@SelendroidFindBy(id = "edit_comment") @iOSFindBy(accessibility = "ReplyText") public MobileElement
ENTER_COMMENTS;
@AndroidFindBy(id = "org.Ebay.android:id/btn_submit_reply")
@SelendroidFindBy(id = "image_post_comment") @iOSFindBy(id = "Reply") public MobileElement
SUBMIT_COMMENTS;
@iOSFindBy(accessibility = "Comments")
@AndroidFindBy(accessibility = "Navigate up")
public MobileElement BACK;
@AndroidFindBy(id = "org.Ebay.android:id/progress_submit_comment")
@SelendroidFindBy(id = "progress_submit_comment") public String PROGRESS_SUBMIT;
}
|
import numpy as np
import tensorflow as tf
def create_model():
model = tf.keras.Sequential([
tf.keras.layers.Input(shape=(2,)),
tf.keras.layers.Dense(8, activation='relu'),
tf.keras.layers.Dense(4, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
model.compile(
optimizer=tf.keras.optimizers.Adam(),
loss=tf.keras.losses.BinaryCrossentropy(),
metrics=['accuracy']
)
return model
model = create_model()
model.fit([[10, 12],], [1,], epochs=1, batch_size=4) |
import fsExtra from "fs-extra";
import { subtask, task } from "../internal/core/config/config-env";
import { getCacheDir } from "../internal/util/global-dir";
import { TASK_CLEAN, TASK_CLEAN_GLOBAL } from "./task-names";
subtask(TASK_CLEAN_GLOBAL, async () => {
const globalCacheDir = await getCacheDir();
await fsExtra.emptyDir(globalCacheDir);
});
task(TASK_CLEAN, "Clears the cache and deletes all artifacts")
.addFlag("global", "Clear the global cache")
.setAction(async ({ global }: { global: boolean }, { config, run }) => {
if (global) {
return run(TASK_CLEAN_GLOBAL);
}
await fsExtra.emptyDir(config.paths.cache);
await fsExtra.remove(config.paths.artifacts);
});
|
<filename>api/internal/handler/stream_route/stream_route_test.go
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stream_route
import (
"encoding/json"
"testing"
"github.com/shiningrush/droplet"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/apisix/manager-api/internal/core/entity"
"github.com/apisix/manager-api/internal/core/store"
)
func TestStructUnmarshal(t *testing.T) {
// define and parse data
jsonStr := `{
"id": 1,
"create_time": 1700000000,
"update_time": 1700000000,
"desc": "desc",
"remote_addr": "1.1.1.1",
"server_addr": "172.16.31.10",
"server_port": 9080,
"sni": "example.com",
"upstream": {
"nodes": [
{
"host": "10.10.10.10",
"port": 8080,
"weight": 1
}
],
"type": "roundrobin",
"scheme": "http",
"pass_host": "pass"
},
"upstream_id": 1
}`
streamRoute := entity.StreamRoute{}
err := json.Unmarshal([]byte(jsonStr), &streamRoute)
// asserts
assert.Nil(t, err)
assert.Equal(t, streamRoute.ID, float64(1))
assert.Equal(t, streamRoute.CreateTime, int64(1700000000))
assert.Equal(t, streamRoute.UpdateTime, int64(1700000000))
assert.Equal(t, streamRoute.Desc, "desc")
assert.Equal(t, streamRoute.RemoteAddr, "1.1.1.1")
assert.Equal(t, streamRoute.ServerAddr, "172.16.31.10")
assert.Equal(t, streamRoute.ServerPort, 9080)
assert.Equal(t, streamRoute.SNI, "example.com")
assert.Equal(t, streamRoute.UpstreamID, float64(1))
assert.NotNil(t, streamRoute.Upstream)
}
func TestStreamRouteConditionList(t *testing.T) {
giveData := []*entity.StreamRoute{
{BaseInfo: entity.BaseInfo{CreateTime: 1609376663}, RemoteAddr: "127.0.0.1", ServerAddr: "127.0.0.1", ServerPort: 9090, Upstream: nil, UpstreamID: "u1"},
{BaseInfo: entity.BaseInfo{CreateTime: 1609376664}, RemoteAddr: "127.0.0.2", ServerAddr: "127.0.0.1", ServerPort: 9091, Upstream: nil, UpstreamID: "u1"},
{BaseInfo: entity.BaseInfo{CreateTime: 1609376665}, RemoteAddr: "127.0.0.3", ServerAddr: "127.0.0.1", ServerPort: 9092, Upstream: nil, UpstreamID: "u1"},
{BaseInfo: entity.BaseInfo{CreateTime: 1609376666}, RemoteAddr: "127.0.0.4", ServerAddr: "127.0.0.1", ServerPort: 9093, Upstream: nil, UpstreamID: "u1"},
}
tests := []struct {
desc string
giveInput *ListInput
giveErr error
wantErr error
wantRet interface{}
}{
{
desc: "list all stream route",
giveInput: &ListInput{
Pagination: store.Pagination{
PageSize: 10,
PageNumber: 10,
},
},
wantRet: &store.ListOutput{
Rows: []interface{}{
giveData[0], giveData[1], giveData[2], giveData[3],
},
TotalSize: 4,
},
},
{
desc: "list stream route with remote_addr",
giveInput: &ListInput{
RemoteAddr: "127.0.0.1",
Pagination: store.Pagination{
PageSize: 10,
PageNumber: 10,
},
},
wantRet: &store.ListOutput{
Rows: []interface{}{
&entity.StreamRoute{BaseInfo: entity.BaseInfo{CreateTime: 1609376663}, RemoteAddr: "127.0.0.1", ServerAddr: "127.0.0.1", ServerPort: 9090, Upstream: nil, UpstreamID: "u1"},
},
TotalSize: 1,
},
},
{
desc: "list stream route with server_addr",
giveInput: &ListInput{
ServerAddr: "127.0.0.1",
Pagination: store.Pagination{
PageSize: 10,
PageNumber: 10,
},
},
wantRet: &store.ListOutput{
Rows: []interface{}{
&entity.StreamRoute{BaseInfo: entity.BaseInfo{CreateTime: 1609376663}, RemoteAddr: "127.0.0.1", ServerAddr: "127.0.0.1", ServerPort: 9090, Upstream: nil, UpstreamID: "u1"},
&entity.StreamRoute{BaseInfo: entity.BaseInfo{CreateTime: 1609376664}, RemoteAddr: "127.0.0.2", ServerAddr: "127.0.0.1", ServerPort: 9091, Upstream: nil, UpstreamID: "u1"},
&entity.StreamRoute{BaseInfo: entity.BaseInfo{CreateTime: 1609376665}, RemoteAddr: "127.0.0.3", ServerAddr: "127.0.0.1", ServerPort: 9092, Upstream: nil, UpstreamID: "u1"},
&entity.StreamRoute{BaseInfo: entity.BaseInfo{CreateTime: 1609376666}, RemoteAddr: "127.0.0.4", ServerAddr: "127.0.0.1", ServerPort: 9093, Upstream: nil, UpstreamID: "u1"},
},
TotalSize: 4,
},
},
{
desc: "list stream route with server_port",
giveInput: &ListInput{
ServerPort: 9092,
Pagination: store.Pagination{
PageSize: 10,
PageNumber: 10,
},
},
wantRet: &store.ListOutput{
Rows: []interface{}{
&entity.StreamRoute{BaseInfo: entity.BaseInfo{CreateTime: 1609376665}, RemoteAddr: "127.0.0.3", ServerAddr: "127.0.0.1", ServerPort: 9092, Upstream: nil, UpstreamID: "u1"},
},
TotalSize: 1,
},
},
}
for _, tc := range tests {
t.Run(tc.desc, func(t *testing.T) {
getCalled := true
mStore := &store.MockInterface{}
mStore.On("List", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
getCalled = true
}).Return(func(input store.ListInput) *store.ListOutput {
var returnData []interface{}
for _, c := range giveData {
if input.Predicate(c) {
if input.Format == nil {
returnData = append(returnData, c)
continue
}
returnData = append(returnData, input.Format(c))
}
}
return &store.ListOutput{
Rows: returnData,
TotalSize: len(returnData),
}
}, tc.giveErr)
h := Handler{streamRouteStore: mStore}
ctx := droplet.NewContext()
ctx.SetInput(tc.giveInput)
ret, err := h.List(ctx)
assert.True(t, getCalled)
assert.Equal(t, tc.wantRet, ret)
assert.Equal(t, tc.wantErr, err)
})
}
}
|
#!/bin/bash
# This name is hardcoded in Makefile. We need a fixed name to push it to local OpenShift registry
IMAGE_NAME=${image:-jboss-datagrid-7/datagrid73-openshift}
echo "---- Clearing up (any potential) leftovers ----"
oc delete all,secrets,sa,templates,configmaps,daemonsets,clusterroles,rolebindings,serviceaccounts --selector=template=cache-service || true
oc delete all,secrets,sa,templates,configmaps,daemonsets,clusterroles,rolebindings,serviceaccounts --selector=template=datagrid-service || true
echo "---- Install templates ----"
echo "Current dir $PWD"
echo "Using image $IMAGE_NAME"
oc create -f ../cache-service-template.yaml
oc create -f ../datagrid-service-template.yaml
echo "---- Creating Caching Service for test ----"
oc new-app cache-service \
-p IMAGE=${IMAGE_NAME} \
-p APPLICATION_USER=test \
-p APPLICATION_PASSWORD=test \
-e SCRIPT_DEBUG=true
echo "---- Creating Datagrid Service for test ----"
oc new-app datagrid-service \
-p IMAGE=${IMAGE_NAME} \
-p APPLICATION_USER=test \
-p APPLICATION_PASSWORD=test \
-p TOTAL_CONTAINER_STORAGE=2 \
-e SCRIPT_DEBUG=true
|
cmake -D GTM_DIST:PATH=$gtm_dist -D CMAKE_BUILD_TYPE=DEBUG -D CMAKE_INSTALL_PREFIX:PATH=${PWD}/package ..
make -j4
make install
cd ${PWD}/package/lib/fis-gtm/V6.3-000_x86_64
sudo ./configure
|
#!/bin/bash
set -o errexit
set -o pipefail
rebuild=false
target='robot'
while getopts :rph flag
do
case "${flag}" in
r) rebuild=true;;
p) target='podium';;
h) echo 'Run Simulator'
echo 'Default just runs'
echo '-p : podium simulator'
echo '-r : force rebuild'
exit 0;;
:) echo 'missing argument' >&2; exit 1;;
\?) echo 'invalid option' >&2; exit 1
esac
done
mkdir -p ~/flo_db
XSOCK=/tmp/.X11-unix
export XSOCK
XAUTH=/tmp/.docker.xauth
export XAUTH
touch $XAUTH
xauth nlist $DISPLAY | sed -e 's/^..../ffff/' | xauth -f $XAUTH nmerge -
ROS_USER=$(id -u "$USER"):$(id -g "$USER")
export ROS_USER
R_GID=$(id -g "$USER")
R_UID=$(id -u "$USER")
export R_GID
export R_UID
pactlmod=$(pactl load-module module-native-protocol-unix socket=/tmp/pulseaudio.socket)
trap 'pactl unload-module "$pactlmod"' EXIT
if [ "$rebuild" = true ] ; then
docker-compose -f "docker-compose-$target-sim.yml" build
fi
docker-compose -f "docker-compose-$target-sim.yml" up
|
package main
import (
"fmt"
"log"
"github.com/sugarme/tokenizer"
"github.com/sugarme/tokenizer/model/bpe"
"github.com/sugarme/tokenizer/pretokenizer"
)
func runTest() {
model, err := bpe.NewBpeFromFiles("model/es-vocab.json", "model/es-merges.txt")
if err != nil {
log.Fatal(err)
}
tk := tokenizer.NewTokenizer(model)
bl := pretokenizer.NewBertPreTokenizer()
tk.WithPreTokenizer(bl)
sentence := "Mi estas Julien."
inputSeq := tokenizer.NewInputSequence(sentence)
en, err := tk.Encode(tokenizer.NewSingleEncodeInput(inputSeq), false)
if err != nil {
log.Fatal(err)
}
fmt.Printf("Sentence: '%v'\n", sentence)
fmt.Printf("Tokens: %+v\n", en.GetTokens())
for _, tok := range en.GetTokens() {
fmt.Printf("'%v'\n", tok)
}
}
|
curl -d '{"jsonrpc": "2.0", "method": "eth_sendTransaction", "id": 2, "params": [{ "from": "0x28454cad80a6e30087e1d9388a5c073a3cc4babb", "data": "608060405234801561001057600080fd5b506102d7806100206000396000f30060806040526004361061004c576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806317d7de7c14610051578063c47f0027146100e1575b600080fd5b34801561005d57600080fd5b5061006661014a565b6040518080602001828103825283818151815260200191508051906020019080838360005b838110156100a657808201518184015260208101905061008b565b50505050905090810190601f1680156100d35780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b3480156100ed57600080fd5b50610148600480360381019080803590602001908201803590602001908080601f01602080910402602001604051908101604052809392919081815260200183838082843782019150505050505091929192905050506101ec565b005b606060008054600181600116156101000203166002900480601f0160208091040260200160405190810160405280929190818152602001828054600181600116156101000203166002900480156101e25780601f106101b7576101008083540402835291602001916101e2565b820191906000526020600020905b8154815290600101906020018083116101c557829003601f168201915b5050505050905090565b8060009080519060200190610202929190610206565b5050565b828054600181600116156101000203166002900490600052602060002090601f016020900481019282601f1061024757805160ff1916838001178555610275565b82800160010185558215610275579182015b82811115610274578251825591602001919060010190610259565b5b5090506102829190610286565b5090565b6102a891905b808211156102a457600081600090555060010161028c565b5090565b905600a165627a7a72305820377326ce63da87d2b5c3553b61c1c77f7461dae43663df1985efaa0df4a1708a0029" }]}' -H "Content-Type: application/json" localhost:3030
curl -d '{"jsonrpc": "2.0", "method": "eth_blockNumber", "id": 1}' -H "Content-Type: application/json" localhost:3030
curl -d '{"jsonrpc": "2.0", "method": "eth_accounts", "id": 2, "params": []}' -H "Content-Type: application/json" localhost:3030
0xbe428e07836b858a27b495b0b6a8eabf8f7a43b7d234e4b6a3da5bb4eb9ac86127c225b05a05ffcce8866ad04ff422b7e3cc01ec8fc5402b3c9950edb4ebe197
curl -d '{"jsonrpc": "2.0", "method": "eth_getTransactionReceipt", "id": 3, "params": ["0xbe428e07836b858a27b495b0b6a8eabf8f7a43b7d234e4b6a3da5bb4eb9ac86127c225b05a05ffcce8866ad04ff422b7e3cc01ec8fc5402b3c9950edb4ebe197"] }' -H "Content-Type: application/json" localhost:3030
|
import connectForm from './connect-form';
export {setPreset} from './presets'
export {setValidator} from './validators'
export {setLabelComponent, setErrorComponent} from './components'
export {connectForm};
export default connectForm;
|
import requests
from bs4 import BeautifulSoup
# Fetch the webpage
res = requests.get(url)
soup = BeautifulSoup(res.text, 'html.parser')
# Extract the data from the HTML using BeautifulSoup
elements = soup.find_all('div', class_="some-class")
# Loop over all the elements
for element in elements:
# Do something with each element
... |
package com.efei.proxy.common.bean;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.util.CharsetUtil;
import lombok.extern.slf4j.Slf4j;
/**
* 传输数据的封装对象
*/
@Slf4j
public class ProxyTcpProtocolBean {
private byte type;
private byte flag;
private String key;
private int length;
private byte[] content;
public ProxyTcpProtocolBean(){
}
/**
*
* @param type 数据业务类型:可以自定义
* @param flag 标识数据是请求、响应 推请求 推响应。 推一般指后端数据到用户
* @param key 数据唯一标识
* @param length 内容长度
* @param content 内容
*/
public ProxyTcpProtocolBean(byte type, byte flag,String key, int length, byte[] content) {
this.type = type;
this.key = key;
this.flag = flag;
this.length = length;
this.content = content;
}
public byte getType() {
return type;
}
public void setType(byte type) {
this.type = type;
}
public String getKey() {
return key;
}
public byte[] getKeyBytes() {
return key.getBytes(CharsetUtil.UTF_8);
}
public void setKey(String key) {
this.key = key;
}
public int getLength() {
return length;
}
public void setLength(int length) {
this.length = length;
}
public byte[] getContent() {
return content;
}
public String getContentStr() {
return new String(content,CharsetUtil.UTF_8);
}
public void setContent(byte[] content) {
this.content = content;
}
public byte getFlag() {
return flag;
}
public void setFlag(byte flag) {
this.flag = flag;
}
// public ByteBuf toByteBuf(){
// ByteBuf buf = Unpooled.buffer();
// buf.writeByte(this.getType());
// buf.writeByte(this.getFlag());
// buf.writeBytes(this.getKeyBytes());
// buf.writeInt(this.getLength());
// buf.writeBytes(this.getContent());
// return buf;
// }
public void toByteBuf(ByteBuf buf){
buf.writeByte(this.getType());
buf.writeByte(this.getFlag());
buf.writeBytes(this.getKeyBytes());
buf.writeInt(this.getLength());
buf.writeBytes(this.getContent());
}
public String toStr(){
StringBuffer sb = new StringBuffer();
sb.append("type=");
sb.append(type);
sb.append(",flag=");
sb.append(flag);
sb.append(",key=");
sb.append(key);
sb.append(",length=");
sb.append(length);
return sb.toString();
}
public String toString(){
return toStr();
}
}
|
<reponame>vaniot-s/sentry
import React from 'react';
import styled from '@emotion/styled';
import {css} from '@emotion/core';
import {OnboardingTaskStatus, OnboardingTaskDescriptor} from 'app/types';
import {t} from 'app/locale';
import theme from 'app/utils/theme';
import ProgressRing from 'app/components/progressRing';
import space from 'app/styles/space';
type Props = {
allTasks: OnboardingTaskDescriptor[];
completedTasks: OnboardingTaskStatus[];
};
const ProgressHeader = ({allTasks, completedTasks}: Props) => (
<Container>
<ProgressRing
size={88}
barWidth={12}
text={allTasks.length - completedTasks.length}
animateText
value={(completedTasks.length / allTasks.length) * 100}
progressEndcaps="round"
backgroundColor={theme.gray300}
textCss={() => css`
font-size: 26px;
color: ${theme.gray500};
`}
/>
<HeadingText>
<h4>{t('Setup Sentry')}</h4>
<p>{t('Complete these tasks to take full advantage of Sentry in your project')}</p>
</HeadingText>
</Container>
);
export default ProgressHeader;
const Container = styled('div')`
display: grid;
grid-template-columns: max-content 1fr;
grid-gap: ${space(2)};
padding: ${space(4)};
align-items: center;
`;
const HeadingText = styled('div')`
h4 {
font-weight: normal;
margin-bottom: ${space(1)};
}
p {
color: ${p => p.theme.gray500};
margin: 0;
line-height: 2rem;
}
`;
|
One approach for sorting an array is to use a sorting algorithm such as Bubble Sort or Insertion Sort.
In the Bubble Sort algorithm, we compare adjacent elements of the array and if they are out of order, we swap them. We keep performing this step until all of the elements in the array are sorted and no more swaps need to be done.
In the Insertion Sort algorithm, we start with the first element of the array, consider it sorted and then compare each subsequent element to the elements before it, one at a time. If any element is found to be out of order, it is inserted into its proper position. |
<filename>src/inputs/text.js
'use strict';
import React from 'react';
import InputBase from './input-base';
import lengthValidator from '../validators/length';
export default class TextInput extends React.Component {
render() {
let {
length,
validators,
...otherProps
} = this.props;
if (length !== undefined) {
length = lengthValidator(length);
} else {
length = undefined;
}
return (
<InputBase validators={[length].concat(validators)}
viewFor={this.constructor}
{...otherProps} />
);
}
};
|
require 'spec_helper'
module SermonAudio
RSpec.describe Configuration do
subject(:config) {
Class.new do
extend(Configuration)
end
}
it "should allow for member_id and password to be set" do
expect(config).to respond_to :member_id
expect(config).to respond_to :password
expect(config).to respond_to :api_key
end
it "should default to environment variables if they are provided" do
env("SERMONAUDIO_MEMBER_ID" => "example") do
expect(config.member_id).to eq "example"
end
env("SERMONAUDIO_PASSWORD" => "<PASSWORD>") do
expect(config.password).to eq "<PASSWORD>"
end
env("SERMONAUDIO_API_KEY" => "example") do
expect(config.api_key).to eq "example"
end
end
it "should allow member_id and password to be explicitly set" do
config.member_id = "other_id"
config.password = "<PASSWORD>"
config.api_key = "default_key"
env("SERMONAUDIO_MEMBER_ID" => "example",
"SERMONAUDIO_PASSWORD" => "p",
"SERMONAUDIO_API_KEY" => "other_key",
) do
expect(config.member_id).to eq "other_id"
expect(config.password).to eq "<PASSWORD>"
expect(config.api_key).to eq "default_key"
end
end
it "should raise and error if no values are set" do
env("SERMONAUDIO_MEMBER_ID" => nil,
"SERMONAUDIO_PASSWORD" => nil,
"SERMONAUDIO_API_KEY" => nil,
) do
expect { config.member_id }.to raise_error Configuration::MissingConfiguration, /configuration\.member_id/
expect { config.password }.to raise_error Configuration::MissingConfiguration, /configuration\.password/
expect { config.api_key }.to raise_error Configuration::MissingConfiguration, /configuration\.api_key/
end
end
end
end
|
import cv2
import os
# Open the image file
input_image_path = "input_image.jpg"
if not os.path.exists(input_image_path):
print("Error: Input image file not found.")
else:
image = cv2.imread(input_image_path)
# Save the image in PNG format
png_file_path = "output_image.png"
cv2.imwrite(png_file_path, image, [cv2.IMWRITE_PNG_COMPRESSION, 9])
png_file_size = os.path.getsize(png_file_path) / 1024 # Convert bytes to kilobytes
print(f"PNG file size: {png_file_size:.2f} KB")
# Save the image in JPEG format
jpeg_file_path = "output_image.jpg"
cv2.imwrite(jpeg_file_path, image, [cv2.IMWRITE_JPEG_QUALITY, 95])
jpeg_file_size = os.path.getsize(jpeg_file_path) / 1024 # Convert bytes to kilobytes
print(f"JPEG file size: {jpeg_file_size:.2f} KB")
# Save the image in TIFF format
tiff_file_path = "output_image.tiff"
cv2.imwrite(tiff_file_path, image, [cv2.IMWRITE_TIFF_COMPRESSION, 9])
tiff_file_size = os.path.getsize(tiff_file_path) / 1024 # Convert bytes to kilobytes
print(f"TIFF file size: {tiff_file_size:.2f} KB") |
require 'spec_helper'
describe Chewy::Type::Import::BulkBuilder do
before { Chewy.massacre }
subject { described_class.new(type, index: index, delete: delete, fields: fields) }
let(:type) { PlacesIndex::City }
let(:index) { [] }
let(:delete) { [] }
let(:fields) { [] }
describe '#bulk_body' do
context 'simple bulk', :orm do
before do
stub_model(:city)
stub_index(:places) do
define_type City do
field :name, :rating
end
end
end
let(:cities) { Array.new(3) { |i| City.create!(id: i + 1, name: "City#{i + 17}", rating: 42) } }
specify { expect(subject.bulk_body).to eq([]) }
context do
let(:index) { cities }
specify do
expect(subject.bulk_body).to eq([
{index: {_id: 1, data: {'name' => 'City17', 'rating' => 42}}},
{index: {_id: 2, data: {'name' => 'City18', 'rating' => 42}}},
{index: {_id: 3, data: {'name' => 'City19', 'rating' => 42}}}
])
end
end
context do
let(:delete) { cities }
specify do
expect(subject.bulk_body).to eq([
{delete: {_id: 1}}, {delete: {_id: 2}}, {delete: {_id: 3}}
])
end
end
context do
let(:index) { cities.first(2) }
let(:delete) { [cities.last] }
specify do
expect(subject.bulk_body).to eq([
{index: {_id: 1, data: {'name' => 'City17', 'rating' => 42}}},
{index: {_id: 2, data: {'name' => 'City18', 'rating' => 42}}},
{delete: {_id: 3}}
])
end
context ':fields' do
let(:fields) { %w[name] }
specify do
expect(subject.bulk_body).to eq([
{update: {_id: 1, data: {doc: {'name' => 'City17'}}}},
{update: {_id: 2, data: {doc: {'name' => 'City18'}}}},
{delete: {_id: 3}}
])
end
end
end
end
context 'parent-child relationship', :orm do
before do
stub_model(:country)
stub_model(:city)
adapter == :sequel ? City.many_to_one(:country) : City.belongs_to(:country)
end
before do
stub_index(:places) do
define_type Country do
field :name
end
define_type City do
root parent: 'country', parent_id: -> { country_id } do
field :name
field :rating
end
end
end
end
before { PlacesIndex::Country.import(country) }
let(:country) { Country.create!(id: 1, name: 'country') }
let(:another_country) { Country.create!(id: 2, name: 'another country') }
let(:city) { City.create!(id: 4, country_id: country.id, name: 'city', rating: 42) }
context 'indexing' do
let(:index) { [city] }
specify do
expect(subject.bulk_body).to eq([
{index: {_id: city.id, parent: country.id, data: {'name' => 'city', 'rating' => 42}}}
])
end
context do
let(:fields) { %w[name] }
specify do
expect(subject.bulk_body).to eq([
{update: {_id: city.id, parent: country.id, data: {doc: {'name' => 'city'}}}}
])
end
end
end
context 'updating parent' do
before do
PlacesIndex::City.import(city)
city.update_attributes(country_id: another_country.id)
end
let(:index) { [city] }
specify do
expect(subject.bulk_body).to eq([
{delete: {_id: city.id, parent: country.id.to_s}},
{index: {_id: city.id, parent: another_country.id, data: {'name' => 'city', 'rating' => 42}}}
])
end
context do
let(:fields) { %w[name] }
specify do
expect(subject.bulk_body).to eq([
{delete: {_id: city.id, parent: country.id.to_s}},
{index: {_id: city.id, parent: another_country.id, data: {'name' => 'city', 'rating' => 42}}}
])
end
end
end
context 'destroying' do
before { PlacesIndex::City.import(city) }
let(:delete) { [city] }
specify do
expect(subject.bulk_body).to eq([
{delete: {_id: city.id, parent: country.id.to_s}}
])
end
end
end
context 'custom id', :orm do
before do
stub_model(:city)
end
before do
stub_index(:places) do
define_type City do
root id: -> { name } do
field :rating
end
end
end
end
let(:london) { City.create(id: 1, name: 'London', rating: 4) }
specify do
expect { PlacesIndex::City.import(london) }
.to update_index(PlacesIndex::City).and_reindex(london.name)
end
context 'indexing' do
let(:index) { [london] }
specify do
expect(subject.bulk_body).to eq([
{index: {_id: london.name, data: {'rating' => 4}}}
])
end
end
context 'destroying' do
let(:delete) { [london] }
specify do
expect(subject.bulk_body).to eq([
{delete: {_id: london.name}}
])
end
end
end
context 'crutches' do
before do
stub_index(:places) do
define_type :city do
crutch :names do |collection|
collection.map { |item| [item.id, "Name#{item.id}"] }.to_h
end
field :name, value: ->(o, c) { c.names[o.id] }
end
end
end
let(:index) { [double(id: 42)] }
specify do
expect(subject.bulk_body).to eq([
{index: {_id: 42, data: {'name' => 'Name42'}}}
])
end
context 'witchcraft' do
before { PlacesIndex::City.witchcraft! }
specify do
expect(subject.bulk_body).to eq([
{index: {_id: 42, data: {'name' => 'Name42'}}}
])
end
end
end
context 'empty ids' do
before do
stub_index(:places) do
define_type :city do
field :name
end
end
end
let(:index) { [{id: 1, name: 'Name0'}, double(id: '', name: 'Name1'), double(name: 'Name2')] }
let(:delete) { [double(id: '', name: 'Name3'), {name: 'Name4'}, '', 2] }
specify do
expect(subject.bulk_body).to eq([
{index: {_id: 1, data: {'name' => 'Name0'}}},
{index: {data: {'name' => 'Name1'}}},
{index: {data: {'name' => 'Name2'}}},
{delete: {_id: {'name' => 'Name4'}}},
{delete: {_id: 2}}
])
end
context do
let(:fields) { %w[name] }
specify do
expect(subject.bulk_body).to eq([
{update: {_id: 1, data: {doc: {'name' => 'Name0'}}}},
{delete: {_id: {'name' => 'Name4'}}},
{delete: {_id: 2}}
])
end
end
end
end
describe '#index_objects_by_id' do
before do
stub_index(:places) do
define_type :city do
field :name
end
end
end
let(:index) { [double(id: 1), double(id: 2), double(id: ''), double] }
let(:delete) { [double(id: 3)] }
specify { expect(subject.index_objects_by_id).to eq('1' => index.first, '2' => index.second) }
end
end
|
#!/usr/bin/env bash
. ~/workspace/metric-store-dotfiles/support/helpers.sh
identify_package "reconfigure-pipeline" 0.28.0
if [ -f /usr/local/share/reconfigure-pipeline-$VERSION ]; then
echo_already_installed
exit
fi
start_install
sudo rm /usr/local/share/reconfigure-pipeline-*
wget -O reconfigure-pipeline-$VERSION.tar.gz https://github.com/pivotal-cf/reconfigure-pipeline/releases/download/v$VERSION/reconfigure-pipeline-darwin.tar.gz
tar -xzvf reconfigure-pipeline-$VERSION.tar.gz
sudo mv reconfigure-pipeline /usr/local/share/reconfigure-pipeline-$VERSION
sudo chmod +x /usr/local/share/reconfigure-pipeline-$VERSION
sudo ln -sf /usr/local/share/reconfigure-pipeline-$VERSION /usr/local/bin/reconfigure-pipeline
rm reconfigure-pipeline-$VERSION.tar.gz
end_install
|
#!/usr/bin/env bash
set -e
exists() { command -v "$1" >/dev/null 2>&1; }
fn_readlink() { if exists greadlink ; then greadlink "$@" ; else readlink "$@" ; fi; }
# SETUP: please have a look at this! -------------------------------------------
# The source branch. Change this when working on this script.
branch_master='master'
# The target branch.
branch_documentation='gh-pages'
# Kit directory.
kit_dir=$(fn_readlink -f "$(dirname $(fn_readlink -f "${0}"))/..")
# Documentation build directory.
documentation_build_dir="${kit_dir}/docs/dist"
# The list of gems we want to generate documentation for.
# Add target gems here.
declare -A documentation_targets=(
["kit"]=$kit_dir
["kit-api"]="${kit_dir}/libraries/kit-api"
["kit-auth"]="${kit_dir}/libraries/kit-auth"
["kit-contract"]="${kit_dir}/libraries/kit-contract"
["kit-doc"]="${kit_dir}/libraries/kit-doc"
["kit-organizer"]="${kit_dir}/libraries/kit-organizer"
["kit-pagination"]="${kit_dir}/libraries/kit-pagination"
["kit-router"]="${kit_dir}/libraries/kit-router"
)
#typeset -p documentation_targets
# SAFETY CHECKS ----------------------------------------------------------------
# Usage
usage() {
cat <<EOM
GENERATE ALL DOCUMENTATION
$(basename $0) generate-documentation
GENERATE GH PAGE COMMIT
$(basename $0) create-gh-pages-commit
EOM
exit
}
# Ensure we are on a clean install.
ensure_clean_install() {
echo "This script generates the documentation to be published on the project GitHub pages."
echo "PLEASE use a clean copy of the repo to run this!"
echo "We are currently in \`${kit_dir}\`"
read -r -p "Is this a clean install? [y/N] " response
if [[ "${response,,}" =~ ^(yes|y)$ ]]; then
echo " Good."
else
echo " Aborted. Please run \`git clone git@github.com:rubykit/kit.git\` somewhere clean before running this."
exit
fi
}
# Clean documentation build dir (should be done in the rake task too but let's be sure).
clean_documentation_build_dir() {
documentation_build_dir_content="$documentation_build_dir/*"
read -r -p $'\e[31mAbout to rm -rf `'$documentation_build_dir_content$'`, does the documentation build dir looks right? [y/N] \e[0m' response
if [[ "${response,,}" =~ ^(yes|y)$ ]]; then
rm -rf $documentation_build_dir_content
mkdir -p $documentation_build_dir
else
echo " Aborted."
exit
fi
}
# GENERATE DOCUMENTATION FILES -------------------------------------------------
# Generate documentation for each gem.
generate_documentation() {
for documentation_target_name in "${!documentation_targets[@]}"; do
documentation_target_src_path=${documentation_targets[$documentation_target_name]}
documentation_target_dst_path="${documentation_build_dir}/${documentation_target_name}"
echo " Gem: ${documentation_target_name} (src: \`${documentation_target_src_path}\`, dst: \`${documentation_target_dst_path}\`)"
cd $kit_dir
git checkout --quiet $branch_master
cd $documentation_target_src_path
# Create the doc directories for this gem
mkdir -p $documentation_target_dst_path
bundle install
KIT_DOC_OUTPUT_DIR_ALL_VERSIONS=$documentation_target_dst_path bundle exec rake documentation:all_versions:generate
done
# Go back to the initial state
cd $kit_dir
git checkout --quiet $branch_master
# Add the top level `index.html`
bundle exec rake documentation:all_versions:generate:global_assets
}
# GITHUB PAGES BRANCH SETUP ----------------------------------------------------
# Recreate the documentation branch
create_gh_pages_commit() {
if [ -n "`git show-ref refs/heads/$branch_documentation`" ]; then
git branch -d $branch_documentation
fi
git checkout -b $branch_documentation
# Move documentation files to top level
documentation_files=$(cd ${documentation_build_dir}; ls -1)
mv ${documentation_build_dir}/* .
git add $documentation_files
# Move CNAME file for github as we are about to force push
git mv ./docs/CNAME .
# Commit the generated files.
git commit -m "KIT DOCUMENTATION - generated on `date '+%F@%H-%M-%S'`"
echo "We should be good to go! Please do check that:"
echo " - You are currently on the expected documentation branch:"
echo " Expected: \`$branch_documentation\`, Current: \``git rev-parse --abbrev-ref HEAD`\` (if it's not \`gh-pages\`, please double check!)"
echo " - Have look at the last commit that was auto-generated , run: \`git log --name-status HEAD^..HEAD
\` we should be in a clean state."
echo " - If everything looks good, run \`git push --force origin $branch_documentation:$branch_documentation\`"
}
# EXECUTION --------------------------------------------------------------------
[ -z $1 ] && { usage; }
if [ "${1}" == "generate-documentation" ]; then
ensure_clean_install
clean_documentation_build_dir
generate_documentation
elif [ "${1}" == "create-gh-pages-commit" ]; then
ensure_clean_install
create_gh_pages_commit
else
usage
fi
|
#!/bin/bash
function mainface {
clear
echo " ____ _ _ "
echo " / __ \ | | | | "
echo " | | | |_ _____ _ __ _ __ ___| |___ _____ _ __| | __"
echo " | | | \ \ / / _ \ '__| '_ \ / _ \ __\ \ /\ / / _ \| '__| |/ /"
echo " | |__| |\ V / __/ | | | | | __/ |_ \ V V / (_) | | | < "
echo " \____/ \_/ \___|_| |_| |_|\___|\__| \_/\_/ \___/|_| |_|\_|"
echo ""
echo "-----------------------------------------------------------------"
echo " OpenSSL Self-Signed Certificate Tool v0.1 | kzofajar@gmail.com"
echo "-----------------------------------------------------------------"
echo ""
}
function toolabout {
echo " Version 0.1 (26/07/2017)"
echo "-------------"
echo "This tool is created to helps you create a fully Self-Signed"
echo "Certificate from Root, Intermediate, and Website Certificate"
echo "Without remembering very long commands."
echo ""
echo "This tool's workflows is based on one of my blog posts"
echo "(http://penguinstunnel.blogspot.co.id) about Self-Signed Cert."
echo ""
echo "Created by Fajar Ru @home"
echo "Thank you for using this tool! :)"
echo ""
echo " Credits"
echo "----------"
echo ""
echo "OVN ASCII Art, Created with (http://patorjk.com/software/taag/)"
echo "My Self-Signed Cert Post Reference (https://jamielinux.com/)"
echo ""
}
function dirsetup {
# ASK LOCATION
echo -n "Enter a location where the certificates will be located (e.g: /home/yourname/openssl): "
read _startplace
echo "Creating Directory ($_startplace)..."
$(mkdir -p $_startplace/certs $_startplace/crl $_startplace/newcerts $_startplace/private && chmod 700 $_startplace/private)
$(touch $_startplace/index.txt && echo 1000 > $_startplace/serial)
$(mkdir -p $_startplace/intermediate/certs $_startplace/intermediate/csr $_startplace/intermediate/crl $_startplace/intermediate/newcerts $_startplace/intermediate/private && chmod 700 $_startplace/intermediate/private)
$(touch $_startplace/intermediate/index.txt && echo 1000 > $_startplace/intermediate/serial)
sleep 0.5
echo "Creating Root Certificate Configuration..." && sleep 0.2
# CA CONFIGURATION
echo "
[ ca ]
default_ca = CA_default
[ CA_default ]
dir = "$_startplace"
certs = \$dir/certs
crl_dir = \$dir/crl
new_certs_dir = \$dir/newcerts
database = \$dir/index.txt
serial = \$dir/serial
RANDFILE = \$dir/private/.rand
private_key = \$dir/private/ca.key.pem
certificate = \$dir/certs/ca.cert.pem
crlnumber = \$dir/crlnumber
crl = \$dir/crl/ca.crl.pem
crl_extensions = crl_ext
default_crl_days = 30
default_md = sha256
name_opt = ca_default
cert_opt = ca_default
default_days = 375
preserve = no
policy = policy_strict
[ policy_strict ]
countryName = match
stateOrProvinceName = match
organizationName = match
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
[ policy_loose ]
countryName = optional
stateOrProvinceName = optional
localityName = optional
organizationName = optional
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
[ req ]
default_bits = 2048
distinguished_name = req_distinguished_name
string_mask = utf8only
default_md = sha256
x509_extensions = v3_ca
[ req_distinguished_name ]
countryName = Country Name (2 letter code)
stateOrProvinceName = State or Province Name
localityName = Locality Name
0.organizationName = Organization Name
organizationalUnitName = Organizational Unit Name
commonName = Common Name
emailAddress = Email Address
countryName_default = ID
stateOrProvinceName_default = Central Java
localityName_default = Klaten
0.organizationName_default = Overnetwork
organizationalUnitName_default =
emailAddress_default =
[ v3_ca ]
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid:always,issuer
basicConstraints = critical, CA:true
keyUsage = critical, digitalSignature, cRLSign, keyCertSign
[ v3_intermediate_ca ]
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid:always,issuer
basicConstraints = critical, CA:true, pathlen:0
keyUsage = critical, digitalSignature, cRLSign, keyCertSign
[ usr_cert ]
basicConstraints = CA:FALSE
nsCertType = client, email
nsComment = 'OpenSSL Generated Client Certificate'
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid,issuer
keyUsage = critical, nonRepudiation, digitalSignature, keyEncipherment
extendedKeyUsage = clientAuth, emailProtection
[ server_cert ]
basicConstraints = CA:FALSE
nsCertType = server
nsComment = 'OpenSSL Generated Server Certificate'
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid,issuer:always
keyUsage = critical, digitalSignature, keyEncipherment
extendedKeyUsage = serverAuth
[ crl_ext ]
authorityKeyIdentifier=keyid:always
[ ocsp ]
basicConstraints = CA:FALSE
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid,issuer
keyUsage = critical, digitalSignature
extendedKeyUsage = critical, OCSPSigning
" >> $_startplace/openssl.cnf
echo "Creating Intermediate Certificate Configuration..." && sleep 0.2
# INTERMEDIATE CONFIGURATION
echo "
[ ca ]
default_ca = CA_default
[ CA_default ].
dir = "$_startplace/intermediate"
certs = \$dir/certs
crl_dir = \$dir/crl
new_certs_dir = \$dir/newcerts
database = \$dir/index.txt
serial = \$dir/serial
RANDFILE = \$dir/private/.rand
private_key = \$dir/private/intermediate.key.pem
certificate = \$dir/certs/intermediate.cert.pem
crlnumber = \$dir/crlnumber
crl = \$dir/crl/intermediate.crl.pem
crl_extensions = crl_ext
default_crl_days = 30
default_md = sha256
name_opt = ca_default
cert_opt = ca_default
default_days = 375
preserve = no
policy = policy_loose
[ policy_strict ]
countryName = match
stateOrProvinceName = match
organizationName = match
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
[ policy_loose ]
countryName = optional
stateOrProvinceName = optional
localityName = optional
organizationName = optional
organizationalUnitName = optional
commonName = supplied
emailAddress = optional
[ req ]
default_bits = 2048
distinguished_name = req_distinguished_name
string_mask = utf8only
default_md = sha256
x509_extensions = v3_ca
[ req_distinguished_name ]
countryName = Country Name (2 letter code)
stateOrProvinceName = State or Province Name
localityName = Locality Name
0.organizationName = Organization Name
organizationalUnitName = Organizational Unit Name
commonName = Common Name
emailAddress = Email Address
countryName_default = ID
stateOrProvinceName_default = Central Java
localityName_default = Klaten
0.organizationName_default = Overnetwork
organizationalUnitName_default =
emailAddress_default =
[ v3_ca ]
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid:always,issuer
basicConstraints = critical, CA:true
keyUsage = critical, digitalSignature, cRLSign, keyCertSign
[ v3_intermediate_ca ]
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid:always,issuer
basicConstraints = critical, CA:true, pathlen:0
keyUsage = critical, digitalSignature, cRLSign, keyCertSign
[ usr_cert ]
basicConstraints = CA:FALSE
nsCertType = client, email
nsComment = 'OpenSSL Generated Client Certificate'
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid,issuer
keyUsage = critical, nonRepudiation, digitalSignature, keyEncipherment
extendedKeyUsage = clientAuth, emailProtection
[ server_cert ]
basicConstraints = CA:FALSE
nsCertType = server
nsComment = 'OpenSSL Generated Server Certificate'
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid,issuer:always
keyUsage = critical, digitalSignature, keyEncipherment
extendedKeyUsage = serverAuth
[ crl_ext ]
authorityKeyIdentifier=keyid:always
[ ocsp ]
basicConstraints = CA:FALSE
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid,issuer
keyUsage = critical, digitalSignature
extendedKeyUsage = critical, OCSPSigning
" >> $_startplace/intermediate/openssl.cnf
echo "Done!"
sleep 0.2
}
function create_root_cert {
# ASK LOCATION
echo -n "Enter your defined Certificate Location: "
read _certlocation
# ASK CERT EXPIRATION TIME
echo ""
echo -n "Enter Root Certificate expiration time in days: "
read _rootcertexp
# GENERATE PRIVATE KEY
echo ""
echo "Generating Root Private Key with AES256 4096-bit..."
openssl genrsa -aes256 -out $_certlocation/private/ca.key.pem 4096
chmod 400 $_certlocation/private/ca.key.pem
# GENERATE CERTIFICATE WITH ENTERED EXPIRATION
echo ""
echo "Generating Root Certificate with " $_rootcertexp " Days Expiration..."
openssl req -config $_certlocation/openssl.cnf -key $_certlocation/private/ca.key.pem -new -x509 -days $_rootcertexp -sha256 -extensions v3_ca -out $_certlocation/certs/ca.cert.pem
# ADJUSTING ACCESS PRIVILEGES
echo ""
echo "Adjusting Privileges..."
chmod 400 $_certlocation/private/ca.key.pem
chmod 444 $_certlocation/certs/ca.cert.pem
# CERTIFICATE CHECK
echo ""
echo "Done! Certificate Result:"
openssl x509 -noout -text -in $_certlocation/certs/ca.cert.pem
}
function create_intermediate_cert {
# ASK LOCATION
echo -n "Enter your defined Certificate Location: "
read _certlocation
# ASK CERTIFICATE EXPIRATION
echo ""
echo "Enter Intermediate Certificate expiration time in days."
echo "It must be less or no more than Root Certificate!"
echo -n "Duration: "
read _intcertexp
# GENERATE INTERMEDIATE PRIVATE KEY
echo ""
echo "Generating Intermediate Private Key with AES256 4096-bit..."
openssl genrsa -aes256 -out $_certlocation/intermediate/private/intermediate.key.pem 4096
# GENERATE INTERMEDIATE CSR
echo ""
echo "Generating Intermediate Cert Signing Request..."
echo "Please note that Intermediate CSR Data should be the same as Root except for CN!"
openssl req -config $_certlocation/intermediate/openssl.cnf -new -sha256 -key $_certlocation/intermediate/private/intermediate.key.pem -out $_certlocation/intermediate/csr/intermediate.csr.pem
# SIGNING INTERMEDIATE CSR TO CERTIFICATE
echo ""
echo "Generating and Signing Intermediate Certificate with " $_intcertexp " Days Expiration..."
openssl ca -config $_certlocation/openssl.cnf -extensions v3_intermediate_ca -days $_intcertexp -notext -md sha256 -in $_certlocation/intermediate/csr/intermediate.csr.pem -out $_certlocation/intermediate/certs/intermediate.cert.pem
# VERIFY INTERMEDIATE CERT TO ROOT CERT
echo ""
echo "Verifying Intermediate Certificate to Root..."
openssl verify -CAfile $_certlocation/certs/ca.cert.pem $_certlocation/intermediate/certs/intermediate.cert.pem
# CHAINING THIS INTERMEDIATE CERT WITH ROOT CERT INTO ONE FILE
echo ""
echo "Generating Certificate Chain..."
cat $_certlocation/intermediate/certs/intermediate.cert.pem $_certlocation/certs/ca.cert.pem > $_certlocation/intermediate/certs/ca-chain.cert.pem
# ADJUST ACCESS PRIVILEGES
echo ""
echo "Adjusting Privileges..."
chmod 400 $_certlocation/intermediate/private/intermediate.key.pem
chmod 444 $_certlocation/intermediate/certs/intermediate.cert.pem
chmod 444 $_certlocation/intermediate/certs/ca-chain.cert.pem
# INCREMENT CRL
echo ""
echo "Incrementing Certificate Revocation Lists..."
echo 1000 > $_certlocation/intermediate/crlnumber
# CERTIFICATE CHECK
echo ""
echo "Done! Certificate Result:"
openssl x509 -noout -text -in $_certlocation/intermediate/certs/intermediate.cert.pem
}
function create_website_cert {
# ASK LOCATION
echo -n "Enter your defined Certificate Location: "
read _certlocation
# ASK CERTIFICATE EXPIRATION
echo ""
echo -n "Enter Certificate expiration time in days: "
read _webcertexp
# GENERATE WEB CERT PRIVATE KEY
echo ""
echo "Generating Private Key with AES256 2048-bit..."
openssl genrsa -aes256 -out $_certlocation/intermediate/private/webcertificate.key.pem 2048
# GENERATE WEB CSR
echo ""
echo "Generating Web Certificate Signing Request..."
openssl req -config $_certlocation/intermediate/openssl.cnf -key $_certlocation/intermediate/private/webcertificate.key.pem -new -sha256 -out $_certlocation/intermediate/csr/webcertificate.csr.pem
# SIGN WEB CSR TO CERTIFICATE
echo ""
echo "Generating and Signing Web Certificate with " $_webcertexp " Days Expire..."
openssl ca -config $_certlocation/intermediate/openssl.cnf -extensions server_cert -days $_webcertexp -notext -md sha256 -in $_certlocation/intermediate/csr/webcertificate.csr.pem -out $_certlocation/intermediate/certs/webcertificate.cert.pem
# VERIFY WEB CERT AGAINST CHAINED ROOT AND INTERMEDIATE CERT
echo ""
echo "Verifying Web Certificate..."
openssl verify -CAfile $_certlocation/certs/ca-chain.cert.pem $_certlocation/certs/webcertificate.cert.pem
# ADJUST ACCESS PRIVILEGES
echo ""
echo "Adjusting Privileges..."
chmod 400 $_certlocation/intermediate/private/webcertificate.key.pem
chmod 444 $_certlocation/intermediate/private/webcertificate.cert.pem
# CERTIFICATE CHECK
echo ""
echo "Certificate Result:"
openssl x509 -noout -text -in $_certlocation/intermediate/certs/webcertificate.cert.pem
echo ""
echo "These are your Hot Certificate, Please move these files ASAP to avoid being overwritten:"
echo "- Certificate: " $_certlocation "/intermediate/certs/webcertificate.cert.pem"
echo "- Private Key: " $_certlocation "/intermediate/private/webcertificate.key.pem"
echo ""
echo "Thank you for using this small tool! :) --Fajar Ru (kzofajar@gmail.com)"
}
# MAINFACE FUNCTION
mainface
# MAINMENU
pilihan=(
'Directory & Config Setup'
'Create: Root Cert'
'Create: Intermediate Cert'
'Create: Website Cert'
'Help/About'
'Exit'
)
PS3='Your Choice: '
select opt in "${pilihan[@]}"
do
case $opt in
"Directory & Config Setup")
echo ""
dirsetup
;;
"Create: Root Cert")
echo ""
create_root_cert
;;
"Create: Intermediate Cert")
echo ""
create_intermediate_cert
;;
"Create: Website Cert")
echo ""
create_website_cert
;;
"Help/About")
mainface
toolabout
;;
"Exit")
echo 'Goodbye..'
exit
;;
*)
echo 'Please choose a number.'
;;
esac
done
|
<reponame>collaide/repository-manager
$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "repository_manager/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = 'repository-manager'
s.version = RepositoryManager::VERSION
s.authors = ['<NAME>']
s.email = ['<EMAIL>']
s.homepage = 'https://github.com/Texicitys/repository-manager'
s.summary = "Ruby on Rails plugin (gem) for managing repositories ( files / folders / permissions / sharings )."
#s.description = "Repository Manager help you to easily manage your files and folders. Each instance has its own repository. You can share these items with other instance with a complet flexible permission control. "
s.description = "This project is based on the need for a system for easily create/delete files and folders in a repository. For sharing these repositories easily with other object with a flexible and complete permissions management. Each instance (users, groups, etc..) can have it own repositories (with files and folders). It can manage them easily (create, delete, edit, move, copy, etc) and sharing them with other instance."
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.license = 'MIT'
s.add_runtime_dependency('rails', '~> 5.1.0')
# s.add_development_dependency('factory_bot', '>= 2.6.0')
s.add_development_dependency('factory_bot', '~> 4.7.0')
s.add_development_dependency 'sqlite3'
s.add_development_dependency('rspec-rails', '~> 2.6.1')
s.add_development_dependency('spork')
s.add_development_dependency('byebug')
s.add_runtime_dependency 'ancestry'
# s.add_runtime_dependency('carrierwave', '>= 0.5.8')
s.add_runtime_dependency('carrierwave', '~> 1.2.0')
s.add_runtime_dependency 'rubyzip', '~> 1.0.0'#, :require => 'zip/zip'
# s.add_runtime_dependency 'paper_trail', '~> 3.0.1'
s.add_runtime_dependency 'paper_trail', '~> 5.2.0'
end
|
<reponame>jpic/pinax
from django import template
register = template.Library()
@register.inclusion_tag("threadedcomments/comments.html", takes_context=True)
def comments(context, obj):
return {
"object": obj,
"request": context["request"],
"user": context["user"],
}
|
<reponame>jpavlav/cbtool<gh_stars>10-100
/*
# Copyright (c) 2017 DigitalOcean, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*/
/*
Example use of the CloudBench golang bindings.
@author: <NAME>
*/
package main
import (
"fmt"
"github.com/ibmcb/cbtool/lib/api"
)
func main() {
api := api_service_client.APIClient{Address: "http://localhost:7070"}
name := "MYSIMCLOUD"
r, err := api.Call("vmlist", name)
if err == nil && r["result"] != nil {
vms := r["result"].([]interface{})
for idx := range vms {
vm := vms[idx].(map[string]interface{})
iter, err := api.Get_latest_management_data(name, vm["uuid"].(string), vm["experiment_id"].(string))
if err != nil {
fmt.Printf("ERROR! %s\n", err)
}
var data map[string]interface{}
for iter.Next(&data) {
fmt.Printf("VM: %s Provisioning time: %d\n", vm["name"], data["mgt_003_provisioning_request_completed"].(int))
}
}
}
api.Close()
}
|
package controllers
import (
"net/http"
"review/logger"
"review/models"
"review/request"
"review/response"
)
// extension of ProjectController
// @Title MakeOneAssignment
// @Description create a user assignment for a project (创建一个人员分配记录: 项目-用户-角色) json字段说明: operator-进行角色分配的管理员id(根据token自行填充), project_id-该项分配记录对应的项目id, user_id-要进行分配的用户id, role-要分配的角色(1-项目管理员, 2-专家, 3-学科助理, 4-命题教师, 5-外审人员)
// @Param token header string true "user token get at login"
// @Param json body request.MakeOneAssignment true "assignment information"
// @Success 200 {object} response.Default
// @Failure 400 "invalid token(body)"
// @router /assign [post]
func (p *ProjectController) MakeOneAssignment() {
var newAssign request.MakeOneAssignment
err := unmarshalBody(p.Ctx.Input.RequestBody, &newAssign)
if err != nil {
p.respondJson(http.StatusBadRequest, response.FAIL, "parse body failed")
return
}
creator, err := parseUserToken(p.Ctx.Request.Header["Token"][0])
if err != nil {
logger.Recorder.Warning("[user token] parse user token error: " + err.Error())
p.respondJson(http.StatusBadRequest, response.FAIL, "invalid token")
return
}
newAssign.Operator = creator
resp, code := models.MakeOneAssignment(&newAssign)
p.respondJson(http.StatusOK, code, "", resp)
return
}
// @Title MakeAssignmentGroup
// @Description 创建一个项目的人员分配(同时分配多种角色); 字段说明: admins-要分配为管理员的人员id数组, experts-分配为专家的id数组, assistants-分配为学科助理的id, teachers-分配为命题教师的id, out_experts-分配为外审人员的id, project_id-所属项目id, operator-进行分配到管理员id
// @Param token header string true "user token get at login"
// @Param json body request.MakeAssignmentGroup true "需要每种角色的user id"
// @Success 200 {object} response.Default
// @Failure 400 "invalid token(body)"
// @router /group [post]
func (p *ProjectController) MakeAssignmentGroup() {
var assignGroup request.MakeAssignmentGroup
err := unmarshalBody(p.Ctx.Input.RequestBody, &assignGroup)
if err != nil {
p.respondJson(http.StatusBadRequest, response.FAIL, "parse body failed")
return
}
creator, err := parseUserToken(p.Ctx.Request.Header["Token"][0])
if err != nil {
logger.Recorder.Warning("[user token] parse user token error: " + err.Error())
p.respondJson(http.StatusBadRequest, response.FAIL, "invalid token")
return
}
assignGroup.Operator = creator
code := models.MakeAssignments(&assignGroup)
p.respondJson(http.StatusOK, code, "")
return
}
// @Title GetUserAssignment
// @Description 获取某一个用户的所有项目参与情况(即参与各个项目的角色分配情况)
// @Param token header string true "user token get at login"
// @Param uid path string true "user id"
// @Success 200 {object} response.Default
// @Failure 400 "invalid user id"
// @router /user/:uid [get]
func (p *ProjectController) GetUserAssignments() {
uid := p.GetString(":uid")
if uid == "" {
p.respondJson(http.StatusBadRequest, response.FAIL, "invalid user id")
return
}
assigns, code := models.GetUserAssignments(uid)
p.respondJson(http.StatusOK, code, "", assigns)
return
}
// @Title GetProjectAssignment
// @Description 获取一个项目的所有人员分配情况
// @Param token header string true "user token get at login"
// @Param pid path string true "project id"
// @Success 200 {object} response.Default
// @Failure 400 "invalid project id"
// @router /assign/:pid [get]
func (p *ProjectController) GetProjectAssignments() {
pid := p.GetString(":pid")
if pid == "" {
p.respondJson(http.StatusBadRequest, response.FAIL, "invalid project id")
return
}
assigns, code := models.GetProjectAssignment(pid)
p.respondJson(http.StatusOK, code, "", assigns)
return
}
// @Title ChangeAssignment
// @Description 更改一个角色分配; 字段说明: operator-进行更改的管理员id(根据token解析), assignment_id-更改的assign id, new_role-新的角色分配,
// @Param token header string true "user token get at login"
// @Param json body request.ChangeAssignment true "new role to change"
// @Success 200 {object} response.Default
// @Failure 400 "invalid bodu"
// @router /assign [patch]
func (p *ProjectController) ChangeAssignment() {
var update request.ChangeAssignment
err := unmarshalBody(p.Ctx.Input.RequestBody, &update)
if err != nil {
p.respondJson(http.StatusBadRequest, response.FAIL, "parse body failed")
return
}
creator, err := parseUserToken(p.Ctx.Request.Header["Token"][0])
if err != nil {
logger.Recorder.Warning("[user token] parse user token error: " + err.Error())
p.respondJson(http.StatusBadRequest, response.FAIL, "invalid token")
return
}
update.Operator = creator
code := models.ChangeAssignment(&update)
p.respondJson(http.StatusOK, code, "")
return
}
// @Title ConfirmAssignment
// @Description 用户端确认角色分配申请
// @Param token header string true "user token get at login"
// @Param aid path string true “要确认的addignment 的 uuid"
// @Success 200 {object} response.Default
// @Failure 400 "invalid uuid"
// @router /assign/confirm/:aid [get]
func (p *ProjectController) ConfirmAssignment() {
aid := p.GetString(":aid")
if aid == "" {
p.respondJson(http.StatusBadRequest, response.FAIL, "invalid assignment id")
return
}
code := models.ConfirmAssignment(aid)
p.respondJson(http.StatusOK, code, "")
return
}
// @Title DeleteAssignment
// @Description 删除一条角色分配
// @Param token header string true "user token get at login"
// @Param aid path string true "uuid of assignment to delete"
// @Success 200 {object} response.Default
// @Failure 400 "invalid uuid"
// @router /assign/:aid [delete]
func (p *ProjectController) DeleteAssignment() {
aid := p.GetString(":aid")
if aid == "" {
p.respondJson(http.StatusBadRequest, response.FAIL, "invalid assignment id")
return
}
code := models.RemoveAssignment(aid)
p.respondJson(http.StatusOK, code, "")
return
}
|
<filename>src/RecordingRow.tsx<gh_stars>1-10
import * as React from "react";
import {useCallback, useState} from "react";
import {usePlayer} from "ractive-player";
import type {RecorderPlugin} from "./types";
interface Props {
data: {
[key: string]: any;
};
pluginsByKey: {
[key: string]: RecorderPlugin;
};
}
export default function RecordingRow(props: Props) {
const player = usePlayer();
const [name, setName] = useState("Untitled");
const onChange = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
setName(e.target.value);
}, []);
const {data, pluginsByKey} = props;
return (
<li className="recording-row">
<input
className="recording-name"
onBlur={player.resumeKeyCapture}
onFocus={player.suspendKeyCapture}
onChange={onChange}
type="text"
value={name}
/>
<table className="recording-results">
<tbody>
{Object.keys(data).map(pluginKey => {
const plugin = pluginsByKey[pluginKey],
SaveComponent = plugin.saveComponent;
return (
<tr key={pluginKey}>
<th key="head" scope="row">
<svg className="recorder-plugin-icon" height="36" width="36" viewBox="0 0 100 100">
<rect height="100" width="100" fill="#222"/>
{plugin.icon}
</svg>
</th>
<td key="cell">
<SaveComponent data={data[pluginKey]}/>
</td>
</tr>);
})}
</tbody>
</table>
</li>
);
} |
import type {App} from './app.js';
import type {Context} from './context.js';
import crypto from 'crypto';
import {promisify} from 'util';
const scrypt = promisify(crypto.scrypt);
const randomBytes = promisify(crypto.randomBytes);
export class Session {
cookieName = 'mojo';
cookiePath = '/';
expiration = 3600;
httpOnly = true;
sameSite: 'lax' | 'strict' | 'none' = 'lax';
secure = false;
_app: WeakRef<App>;
constructor(app: App) {
this._app = new WeakRef(app);
}
static async decrypt(secrets: string[], encrypted: string): Promise<string | null> {
const match = encrypted.match(/^([^-]+)--([^-]+)--([^-]+)$/);
if (match === null) return null;
const value = match[1];
const iv = Buffer.from(match[2], 'base64');
const authTag = Buffer.from(match[3], 'base64');
for (const secret of secrets) {
try {
const key = await scrypt(secret, 'salt', 32);
const decipher = crypto.createDecipheriv('aes-256-gcm', key as crypto.CipherKey, iv);
decipher.setAuthTag(authTag);
const decrypted = decipher.update(value, 'base64', 'utf8');
return decrypted + decipher.final('utf8');
} catch (error) {
continue;
}
}
return null;
}
static async encrypt(secret: string, value: string): Promise<string> {
const key = await scrypt(secret, 'salt', 32);
const iv = await randomBytes(12);
const cipher = crypto.createCipheriv('aes-256-gcm', key as crypto.CipherKey, iv);
const encrypted = cipher.update(value, 'utf8', 'base64') + cipher.final('base64');
return encrypted + '--' + iv.toString('base64') + '--' + cipher.getAuthTag().toString('base64');
}
async load(ctx: Context): Promise<Record<string, any> | null> {
const cookie = ctx.req.getCookie(this.cookieName);
if (cookie === null) return null;
const app = this._app.deref();
if (app === undefined) return null;
const decrypted = await Session.decrypt(app.secrets, cookie);
if (decrypted === null) return null;
const data = JSON.parse(decrypted);
const expires = data.expires;
delete data.expires;
if (expires <= Math.round(Date.now() / 1000)) return null;
return data;
}
async store(ctx: Context, data: Record<string, any>): Promise<void> {
if (typeof data.expires !== 'number') data.expires = Math.round(Date.now() / 1000) + this.expiration;
const app = this._app.deref();
if (app === undefined) return;
const encrypted = await Session.encrypt(app.secrets[0], JSON.stringify(data));
ctx.res.setCookie(this.cookieName, encrypted, {
expires: new Date(data.expires * 1000),
httpOnly: this.httpOnly,
path: this.cookiePath,
sameSite: this.sameSite,
secure: this.secure
});
}
}
|
"""
Write a program to calculate the sum of all nodes in a Binary Search Tree (BST)
"""
class Node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def calculate_sum(root):
if root is None:
return 0
return root.data + calculate_sum(root.left) + calculate_sum(root.right)
if __name__ == '__main__':
root = Node(15)
root.left = Node(10)
root.right = Node(20)
root.left.left = Node(8)
root.left.right = Node(12)
print(calculate_sum(root)) # 55 |
import numpy as np
class QRegisterBase:
def initialize(self, num_qubits):
self.num_qubits = num_qubits
self.state = np.zeros(2**num_qubits, dtype=complex)
self.state[0] = 1.0 # Initialize to |0...0>
def apply_gate(self, gate):
self.state = np.dot(gate, self.state)
class QRegisterBE(QRegisterBase):
def measure_all(self):
probabilities = np.abs(self.state)**2
outcome = np.random.choice(2**self.num_qubits, p=probabilities)
return format(outcome, '0{}b'.format(self.num_qubits))
def get_register_state(self):
return self.state |
#include <clog/clog.h>
char g_clog_temp_str[CLOG_TEMP_STR_SIZE];
|
function fizzBuzz(arr: number[]): string[] {
const modifiedArray: string[] = [];
for (let num of arr) {
if (num % 3 === 0 && num % 5 === 0) {
modifiedArray.push("FizzBuzz");
} else if (num % 3 === 0) {
modifiedArray.push("Fizz");
} else if (num % 5 === 0) {
modifiedArray.push("Buzz");
} else {
modifiedArray.push(num.toString());
}
}
return modifiedArray;
}
// Test
console.log(fizzBuzz([1, 3, 5, 15, 7, 9, 10])); // Output: [ "1", "Fizz", "Buzz", "FizzBuzz", "7", "Fizz", "Buzz" ] |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
/*
* ProgressUI.java
*
* Created on Jul 15, 2010, 4:50:46 PM
*/
package org.fhwa.c2cri.gui;
import org.fhwa.c2cri.utilities.ProgressReporter;
import java.awt.Frame;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import javax.swing.JLabel;
import javax.swing.Timer;
/**
* The Class ProgressUI.
*
* @author TransCore ITS, LLC
* Last Updated: 1/8/2014
*/
public class ProgressUI extends javax.swing.JDialog implements ProgressReporter{
/** The total delay. */
private static int totalDelay = 500;
/** The timer. */
Timer timer; //the timer that sets off the dialog message
/**
* Creates new form ProgressUI.
*
* @param parent the parent
* @param modal the modal
*/
public ProgressUI(java.awt.Frame parent, boolean modal) {
super(parent, modal);
initComponents();
setVisible(false);
ActionListener taskPerformer = new ActionListener() {
public void actionPerformed(ActionEvent evt) {
setVisible(true);
timer.stop();
}
};
timer = new Timer(totalDelay, taskPerformer);
}
/**
* Creates new form ProgressUI.
*
* @param parent the parent
* @param modal the modal
* @param actionTitle the action title
*/
public ProgressUI(java.awt.Frame parent, boolean modal, String actionTitle) {
super(parent, modal);
initComponents();
this.operationProgressLabel.setText("Waiting for completion of "+actionTitle+".");
setVisible(false);
ActionListener taskPerformer = new ActionListener() {
public void actionPerformed(ActionEvent evt) {
setVisible(true);
timer.stop();
}
};
timer = new Timer(totalDelay, taskPerformer);
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
operationProgressLabel = new javax.swing.JLabel();
jProgressBar1 = new javax.swing.JProgressBar();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("Operation in Progress");
setAlwaysOnTop(true);
setModalityType(java.awt.Dialog.ModalityType.APPLICATION_MODAL);
operationProgressLabel.setText("Please wait while the previous request is completed...");
jProgressBar1.setValue(50);
jProgressBar1.setIndeterminate(true);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(20, 20, 20)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(operationProgressLabel)
.addComponent(jProgressBar1, javax.swing.GroupLayout.PREFERRED_SIZE, 246, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(21, Short.MAX_VALUE))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(21, 21, 21)
.addComponent(operationProgressLabel)
.addGap(18, 18, 18)
.addComponent(jProgressBar1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap(31, Short.MAX_VALUE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
/**
* The main method.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param args the command line arguments
*/
public static void main(String args[]) {
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
ProgressUI dialog = new ProgressUI(new javax.swing.JFrame(), true);
dialog.addWindowListener(new java.awt.event.WindowAdapter() {
public void windowClosing(java.awt.event.WindowEvent e) {
System.exit(0);
}
});
dialog.setVisible(true);
}
});
}
/**
* Start.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*/
@Override
public void start() {
// operationProgressLabel.setText("");
timer.start(); // Start the delay Timer
// Start the Timer
}
/**
* Update.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param updateText the update text
*/
@Override
public void update(String updateText) {
operationProgressLabel.setText(updateText);
operationProgressLabel.repaint();
}
/**
* Done.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*/
@Override
public void done() {
// operationProgressLabel.setText("");
timer.stop();
dispose(); // Clear the dialog
}
// Variables declaration - do not modify//GEN-BEGIN:variables
/** The j progress bar1. */
private javax.swing.JProgressBar jProgressBar1;
/** The operation progress label. */
private javax.swing.JLabel operationProgressLabel;
// End of variables declaration//GEN-END:variables
}
|
#!/bin/bash
python -m SimpleHTTPServer
|
/*
* Tencent is pleased to support the open source community by making 蓝鲸 available.
* Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package remote
import (
"strings"
"sync"
"time"
"configcenter/src/common/blog"
"configcenter/src/common/util"
"configcenter/src/storage/dal"
"configcenter/src/storage/rpc"
)
type pool struct {
cache map[string]rpc.Client
conn rpc.Client
sync.RWMutex
}
type client struct {
p *pool
opt *dal.JoinOption
}
func NewPool(client rpc.Client) *pool {
return &pool{
cache: make(map[string]rpc.Client, 0),
conn: client,
}
}
func (p *pool) Option(opt *dal.JoinOption) *client {
return &client{
p: p,
opt: opt,
}
}
func (c *client) Call(cmd string, input interface{}, result interface{}) error {
if requestDuration != nil {
before := time.Now()
defer func() {
requestDuration.WithLabelValues("handle process").Observe(util.ToMillisecond(time.Since(before)))
}()
}
rpcClient := c.p.conn
if c.opt != nil && c.opt.TMAddr != "" {
var err error
rpcClient, err = c.GetRPCByAddr(c.opt.TMAddr)
if err != nil {
blog.ErrorJSON("client call addr(%s) err:%s, rid:%s", c.opt.TMAddr, err.Error(), c.opt.RequestID)
return err
}
}
return rpcClient.Call(cmd, input, result)
}
func (p *pool) CallInfo(cmd string, input interface{}, result interface{}) (addr string, err error) {
if requestDuration != nil {
before := time.Now()
defer func() {
requestDuration.WithLabelValues("handle process").Observe(util.ToMillisecond(time.Since(before)))
}()
}
addr, err = p.conn.CallInfo(cmd, input, result)
return
}
func (p *pool) Close() error {
for _, conn := range p.cache {
err := conn.Close()
if err != nil {
return err
}
}
err := p.conn.Close()
if err != nil {
return err
}
return nil
}
func (p *pool) Ping() error {
return p.conn.Ping()
}
// GetRPCByAddr get rpc client by cache
func (c *client) GetRPCByAddr(addr string) (rpc.Client, error) {
if !strings.HasPrefix(addr, "://") {
addr = "http://" + addr
}
rpc, ok := c.getRPCByAddr(addr)
if ok {
if err := rpc.Ping(); err == nil {
return rpc, nil
}
}
return c.addRPCByAddr(addr)
}
func (c *client) getRPCByAddr(addr string) (rpc.Client, bool) {
c.p.RLock()
defer c.p.RUnlock()
rpc, ok := c.p.cache[addr]
return rpc, ok
}
func (c *client) addRPCByAddr(addr string) (rpc.Client, error) {
c.p.Lock()
defer c.p.Unlock()
getSrvFunc := func() ([]string, error) {
return []string{addr}, nil
}
return rpc.NewClientPool("tcp", getSrvFunc, "/txn/v3/rpc")
}
|
import torch
def process_checkpoint(checkpoint: dict) -> (int, int, int):
num_kernel = 0
zero_kernel = 0
n_kernel = 0
state_dict = checkpoint['gen_state_dict']
for key in state_dict.keys():
if 'mask' in key:
mask = state_dict[key]
num_kernel += 1
if torch.all(mask == 0):
zero_kernel += 1
else:
n_kernel += 1
return num_kernel, zero_kernel, n_kernel |
import time
from lib_utils import *
from lib_leds import LEDS
class LEDS:
def __init__(self):
# Initialize any necessary settings or variables here
pass
def on(self):
# Turn on the LEDs
# Example implementation using lib_leds module
lib_leds.turn_on()
def off(self):
# Turn off the LEDs
# Example implementation using lib_leds module
lib_leds.turn_off()
def set_color(self, color):
# Set the LEDs to the specified color
# Example implementation using lib_leds module
lib_leds.set_color(color)
def blink(self, duration):
# Make the LEDs blink for the specified duration
# Example implementation using lib_leds module
start_time = time.time()
while time.time() - start_time < duration:
lib_leds.turn_on()
time.sleep(0.5)
lib_leds.turn_off()
time.sleep(0.5)
# Example usage
leds = LEDS()
leds.on()
time.sleep(30)
leds.off() |
$("input").change(function() {
$(this).removeClass("error-field");
});
$('#login_forma').submit(function(event) {
event.preventDefault();
if ($("#korisnicko_ime").val() == '' || $("#lozinka").val() == '') {
alertify.showFailure("Niste popunili sva polja!").set('maximizable', false);
if ($("#korisnicko_ime").val() == '') {
$("#korisnicko_ime").addClass("error-field");
}
if ($("#lozinka").val() == '') {
$("#lozinka").addClass("error-field");
}
} else {
var formData = new FormData($(this)[0]);
$.ajax({
url: 'controller/check_user.php',
type: 'POST',
data: formData,
async: false,
cache: false,
contentType: false,
processData: false,
success: function(message) {
if (message != "Uspješno ste se ulogovali!") {
alertify.showFailure(message).set('maximizable', false);
} else {
window.location.replace("viewer/mypages/starter.php");
}
}
})
}
}); |
<reponame>andreapatri/cms_journal
/**
* IE11-safe version of getCanonicalLocales since it's ES2016
* @param locales locales
*/
export function getCanonicalLocales(locales) {
// IE11
var getCanonicalLocales = Intl.getCanonicalLocales;
if (typeof getCanonicalLocales === 'function') {
return getCanonicalLocales(locales);
}
// NOTE: we must NOT call `supportedLocalesOf` of a formatjs polyfill, or their implementation
// will even eventually call this method recursively. Here we use `Intl.DateTimeFormat` since it
// is not polyfilled by `@formatjs`.
// TODO: Fix TypeScript type def for this bc undefined is just fine
return Intl.DateTimeFormat.supportedLocalesOf(locales);
}
//# sourceMappingURL=get-canonical-locales.js.map |
// Copyright The Linux Foundation and each contributor to CommunityBridge.
// SPDX-License-Identifier: MIT
import { Component, EventEmitter, Input, Output } from '@angular/core';
@Component({
selector: 'lfx-header',
templateUrl: 'lfx-header.html'
})
export class lfxHeader {
@Input() expanded;
@Output() toggled: EventEmitter<any> = new EventEmitter<any>();
}
|
package net.community.apps.common.test.gridbag;
import java.util.List;
import net.community.chest.awt.layout.gridbag.GridBagAnchorType;
import net.community.chest.ui.helpers.combobox.EnumComboBox;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @author <NAME>.
* @since Mar 19, 2008 1:33:42 PM
*/
public class GridBagAnchorsChoice extends EnumComboBox<GridBagAnchorType> {
/**
*
*/
private static final long serialVersionUID = -1159194208906359288L;
/*
* @see net.community.chest.ui.helpers.combobox.EnumComboBox#getEnumValues()
*/
@Override
public synchronized List<GridBagAnchorType> getEnumValues ()
{
return GridBagAnchorType.VALUES;
}
public GridBagAnchorsChoice (boolean autoPopulate)
{
super(GridBagAnchorType.class, autoPopulate);
}
public GridBagAnchorsChoice ()
{
this(false);
}
}
|
<filename>frontend/src/context/reducer.js
export const types = {
LIST_INCIDENTS: 'LIST_INCIDENTS',
ADD_INCIDENT: 'ADD_INCIDENT',
DELETE_INCIDENT: 'DELETE_INCIDENT',
SET_ERRORS: 'SET_ERRORS',
IS_LOADING: 'IS_LOADING',
CLEAN_STATE: 'CLEAN_STATE'
};
export const initialState = {
incidents: [],
loading: false,
errors: []
}
export default function reducer(state = initialState, action){
switch (action.type) {
case types.LIST_INCIDENTS:
return {
...state,
incidents: action.incidents,
loading: false,
errors: []
}
case types.DELETE_INCIDENT:
return {
...state,
incidents: state.incidents.filter(incident => incident.id !== action.id),
loading: false,
errors: []
}
case types.ADD_INCIDENT:
return {
...state,
incidents: state.incidents.concat(action.incident),
loading: false,
errors: []
}
case types.SET_ERRORS:
return {
...state,
errors: action.errors,
loading: false
}
case types.IS_LOADING:
return {
...state,
loading: true
}
case types.CLEAN_STATE:
return {
...state,
loading: false,
incidents: [],
errors: []
}
default:
return state
}
}; |
#!/usr/bin/env bash
set -euo pipefail
IFS=$'\n\t'
config_global() {
local APPNAME="Global"
local VARNAMES=(TZ DOCKERHOSTNAME PUID PGID DOCKERCONFDIR DOWNLOADSDIR MEDIADIR_AUDIOBOOKS MEDIADIR_BOOKS MEDIADIR_COMICS MEDIADIR_MOVIES MEDIADIR_MUSIC MEDIADIR_TV DOCKERSHAREDDIR)
local APPVARS
APPVARS=$(for v in "${VARNAMES[@]}"; do echo "${v}=$(run_script 'env_get' "${v}")"; done)
if run_script 'question_prompt' N "Would you like to keep these settings for ${APPNAME}?\\n\\n${APPVARS}"; then
info "Keeping ${APPNAME} .env variables."
else
info "Configuring ${APPNAME} .env variables."
while IFS= read -r line; do
local SET_VAR=${line%%=*}
run_script 'menu_value_prompt' "${SET_VAR}" || return 1
done < <(echo "${APPVARS}")
fi
}
test_config_global() {
# run_script 'config_global'
warning "Travis does not test config_global."
}
|
if n is None:
result = self._cache.pop(0)
else:
result, self_cache = self._cache[:n], self._cache[n:]
|
self.__precacheManifest = [
{
"revision": "c96f075c270359fb979a",
"url": "/static/css/main.90fc7929.chunk.css"
},
{
"revision": "c96f075c270359fb979a",
"url": "/static/js/main.c96f075c.chunk.js"
},
{
"revision": "4656055eac2e2ba7e637",
"url": "/static/js/1.4656055e.chunk.js"
},
{
"revision": "229c360febb4351a89df",
"url": "/static/js/runtime~main.229c360f.js"
},
{
"revision": "5d2c8412f90169df621f95cf2e1cb811",
"url": "/index.html"
}
]; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.