text stringlengths 1 1.05M |
|---|
/*
This file is a part of DSRC software distributed under GNU GPL 2 licence.
The homepage of the DSRC project is http://sun.aei.polsl.pl/dsrc
Authors: <NAME> and <NAME>
Version: 2.00
*/
/*
This file is modified by SDU HPC lab for RabbitQC project.
Last modified: JULY2019
*/
#ifndef H_DATAQUEUE
#define H_DATAQUEUE
#include "Globals.h"
#include <queue>
#ifdef USE_BOOST_THREAD
#include <boost/thread.hpp>
namespace th = boost;
#else
#include <mutex>
#include <condition_variable>
namespace th = std;
#endif
namespace dsrc
{
namespace core
{
template <class _TDataType>
class TDataQueue
{
typedef _TDataType DataType;
typedef std::queue<std::pair<int64, DataType*> > part_queue;
const uint32 threadNum;
const uint32 maxPartNum;
uint64 completedThreadMask;
uint32 partNum;
uint64 currentThreadMask;
part_queue parts;
th::mutex mutex;
th::condition_variable queueFullCondition;
th::condition_variable queueEmptyCondition;
public:
static const uint32 DefaultMaxPartNum = 64;
static const uint32 DefaultMaxThreadtNum = 64;
TDataQueue(uint32 maxPartNum_ = DefaultMaxPartNum, uint32 threadNum_ = 1)
: threadNum(threadNum_)
, maxPartNum(maxPartNum_)
, partNum(0)
, currentThreadMask(0)
{
ASSERT(maxPartNum_ > 0);
ASSERT(threadNum_ >= 1);
ASSERT(threadNum_ < 64);
completedThreadMask = ((uint64)1 << threadNum) - 1;
}
~TDataQueue()
{}
bool IsEmpty()
{
return parts.empty();
}
bool IsCompleted()
{
return parts.empty() && currentThreadMask == completedThreadMask;
}
void SetCompleted()
{
th::lock_guard<th::mutex> lock(mutex);
ASSERT(currentThreadMask != completedThreadMask);
currentThreadMask = (currentThreadMask << 1) | 1;
queueEmptyCondition.notify_all();
}
void Push(int64 partId_, const DataType* part_)
{
th::unique_lock<th::mutex> lock(mutex);
while (partNum > maxPartNum)
queueFullCondition.wait(lock);
parts.push(std::make_pair(partId_, (DataType*)part_));
partNum++;
queueEmptyCondition.notify_one();
}
bool Pop(int64 &partId_, DataType* &part_)
{
th::unique_lock<th::mutex> lock(mutex);
while ((parts.size() == 0) && currentThreadMask != completedThreadMask)
queueEmptyCondition.wait(lock);
if (parts.size() != 0)
{
partId_ = parts.front().first;
part_ = parts.front().second;
partNum--;
parts.pop();
queueFullCondition.notify_one();
return true;
}
// assure this is impossible
ASSERT(currentThreadMask == completedThreadMask);
ASSERT(parts.size() == 0);
return false;
}
void Reset()
{
ASSERT(currentThreadMask == completedThreadMask);
partNum = 0;
currentThreadMask = 0;
}
};
} // namespace core
} // namespace dsrc
#endif // DATA_QUEUE_H
|
#!/bin/sh
###header
readonly VAR_PARAMETERS='$1 script name without extenstion, $2 suite'
if [ -r ${1}.ok ]; then rm ${1}.ok; fi
exec 1>${1}.log
exec 2>${1}.err
exec 3>${1}.tst
if [ "$#" != "2" ]; then echo "Call syntax: $(basename "$0") $VAR_PARAMETERS"; exit 1; fi
###function
checkRetValOK(){
if [ "$?" != "0" ]; then exit 1; fi
}
#$1 suite
activeSuiteRepository(){
#deactivate default repository
sudo sed '1s/^/# /' -i /etc/apt/sources.list.d/public-apt-dishmaev.list
checkRetValOK
#activate required repository
if [ "$1" = "rel" ]; then
cat /etc/apt/sources.list.d/public-apt-dishmaev.list | grep 'apt stable main' | sed 's/# //' | sudo tee /etc/apt/sources.list.d/public-apt-dishmaev-stable.list
checkRetValOK
elif [ "$1" = "tst" ]; then
cat /etc/apt/sources.list.d/public-apt-dishmaev.list | grep 'apt testing main' | sed 's/# //' | sudo tee /etc/apt/sources.list.d/public-apt-dishmaev-testing.list
checkRetValOK
elif [ "$1" = "dev" ]; then
cat /etc/apt/sources.list.d/public-apt-dishmaev.list | grep 'apt unstable main' | sed 's/# //' | sudo tee /etc/apt/sources.list.d/public-apt-dishmaev-unstable.list
checkRetValOK
else #run suite
return
fi
}
checkDpkgUnlock(){
local CONST_LOCK_FILE='/var/lib/dpkg/lock'
local CONST_TRY_NUM=3 #try num for long operation
local CONST_TRY_LONG=30 #one try long
local CONST_SLEEP_LONG=5 #sleep long
local VAR_COUNT=$CONST_TRY_LONG
local VAR_TRY=$CONST_TRY_NUM
echo "Check /var/lib/dpkg/lock"
while sudo fuser $CONST_LOCK_FILE >/dev/null 2>&1; do
echo -n '.'
sleep $CONST_SLEEP_LONG
VAR_COUNT=$((VAR_COUNT-1))
if [ $VAR_COUNT -eq 0 ]; then
VAR_TRY=$((VAR_TRY-1))
if [ $VAR_TRY -eq 0 ]; then #still not powered on, force kill vm
echo "failed wait while unlock $CONST_LOCK_FILE. Check another long process using it"
exit 1
else
echo ''
echo "Still locked $CONST_LOCK_FILE, left $VAR_TRY attempts"
fi;
VAR_COUNT=$CONST_TRY_LONG
fi
done
echo ''
return 0
}
###body
echo "Current create suite: $2"
uname -a
#install packages
if [ "$2" = "run" ]; then
checkDpkgUnlock
sudo apt -y update
checkRetValOK
checkDpkgUnlock
sudo apt -y install build-essential
checkRetValOK
checkDpkgUnlock
sudo apt -y install libboost-all-dev
checkRetValOK
fi
#active suite repository
activeSuiteRepository "$2"
##test
if [ "$2" = "run" ]; then
make --version >&3
checkRetValOK
gcc --version >&3
checkRetValOK
c++ --version >&3
checkRetValOK
dpkg-deb --version >&3
checkRetValOK
fi
###finish
echo 1 > ${1}.ok
exit 0
|
SELECT t1.*, t2.*
FROM Table1 t1
LEFT JOIN Table2 t2
ON t1.id = t2.id
WHERE t1.status = 'active'
AND t2.expired_time > NOW(); |
<filename>magic_shop/components/grid/index.js
// components/grid/index.js
Component({
/**
* 组件的属性列表
*/
properties: {
grid:Array
},
/**
* 组件的初始数据
*/
data: {
},
/**
* 组件的方法列表
*/
methods: {
OnGotoSpuList(event){
const cid = event.currentTarget.dataset.cid
wx.navigateTo({
url: `/pages/spu-list/index?cid=${cid}&isroot=true`
})
}
}
})
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_view_comfy_outline = void 0;
var ic_view_comfy_outline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M3 5v14h19V5H3zm17 4h-2.25V7H20v2zM9.25 11h2.25v2H9.25v-2zm-2 2H5v-2h2.25v2zm4.25-4H9.25V7h2.25v2zm2-2h2.25v2H13.5V7zm-2 8v2H9.25v-2h2.25zm2 0h2.25v2H13.5v-2zm0-2v-2h2.25v2H13.5zm4.25-2H20v2h-2.25v-2zM7.25 7v2H5V7h2.25zM5 15h2.25v2H5v-2zm12.75 2v-2H20v2h-2.25z"
},
"children": []
}]
};
exports.ic_view_comfy_outline = ic_view_comfy_outline; |
<reponame>anotheria/moskito-control<filename>ui/src/main/java/org/moskito/control/ui/bean/DataWidgetBean.java
package org.moskito.control.ui.bean;
import java.util.HashMap;
import java.util.Map;
/**
* Represents a single data widget at runtime.
*
* @author lrosenberg
* @since 07.06.18 13:17
*/
public class DataWidgetBean {
/**
* Widget type to select proper renderer.
*/
private String type;
/**
* Widget caption.
*/
private String caption;
/**
* Variables with values. This is widget specific.
*/
private Map<String,String> data = new HashMap<>();
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getCaption() {
return caption;
}
public void setCaption(String caption) {
this.caption = caption;
}
public void addData(String key, String value) {
data.put(key,value);
}
public Map<String, String> getData() {
return data;
}
public void setData(Map<String, String> data) {
this.data = data;
}
@Override
public String toString() {
return "DataWidgetBean{" +
"type='" + type + '\'' +
", caption='" + caption + '\'' +
", data=" + data +
'}';
}
}
|
#!/usr/bin/env bash
set -o errexit -o nounset -o pipefail
# A set of common functions for use in other scripts.
#
# Functions:
#
# - cromwell::build::*
# Functions for use in other scripts
#
# - cromwell::private::*
# Functions for use only within this file by cromwell::build::* functions
#
# Special Variables
#
# - CROMWELL_BUILD_*
# Variables for use in other scripts.
#
# - crmdbg
# Quick debug scripts. Example: `crmdbg=y src/ci/bin/testCentaurLocal.sh`
#
# - crmcit
# Simulate a centaur integration test build. Example: `crmcit=y src/ci/bin/testCentaurPapiV2.sh`
cromwell::private::check_debug() {
# shellcheck disable=SC2154
if [[ -n "${crmdbg:+set}" ]]; then
set -o xtrace
fi
# shellcheck disable=SC2154
if [[ -n "${crmcit:+set}" ]]; then
CROMWELL_BUILD_CENTAUR_TYPE="integration"
fi
}
# Exports environment variables used for scripts.
cromwell::private::create_build_variables() {
CROMWELL_BUILD_PROVIDER_TRAVIS="travis"
CROMWELL_BUILD_PROVIDER_JENKINS="jenkins"
CROMWELL_BUILD_PROVIDER_UNKNOWN="unknown"
if [[ "${TRAVIS-false}" == "true" ]]; then
CROMWELL_BUILD_PROVIDER="${CROMWELL_BUILD_PROVIDER_TRAVIS}"
elif [[ "${JENKINS-false}" == "true" ]]; then
CROMWELL_BUILD_PROVIDER="${CROMWELL_BUILD_PROVIDER_JENKINS}"
else
CROMWELL_BUILD_PROVIDER="${CROMWELL_BUILD_PROVIDER_UNKNOWN}"
fi
# simplified from https://stackoverflow.com/a/18434831/3320205
CROMWELL_BUILD_OS_DARWIN="darwin";
CROMWELL_BUILD_OS_LINUX="linux";
case "${OSTYPE-unknown}" in
darwin*) CROMWELL_BUILD_OS="${CROMWELL_BUILD_OS_DARWIN}" ;;
linux*) CROMWELL_BUILD_OS="${CROMWELL_BUILD_OS_LINUX}" ;;
*) CROMWELL_BUILD_OS="unknown_os" ;;
esac
CROMWELL_BUILD_HOME_DIRECTORY="${HOME}"
CROMWELL_BUILD_ROOT_DIRECTORY="$(pwd)"
CROMWELL_BUILD_LOG_DIRECTORY="${CROMWELL_BUILD_ROOT_DIRECTORY}/target/ci/logs"
CROMWELL_BUILD_CROMWELL_LOG="${CROMWELL_BUILD_LOG_DIRECTORY}/cromwell.log"
CROMWELL_BUILD_DOCKER_DIRECTORY="${CROMWELL_BUILD_ROOT_DIRECTORY}/src/ci/docker-compose"
CROMWELL_BUILD_SCRIPTS_DIRECTORY="${CROMWELL_BUILD_ROOT_DIRECTORY}/src/ci/bin"
CROMWELL_BUILD_RESOURCES_SOURCES="${CROMWELL_BUILD_ROOT_DIRECTORY}/src/ci/resources"
CROMWELL_BUILD_RESOURCES_DIRECTORY="${CROMWELL_BUILD_ROOT_DIRECTORY}/target/ci/resources"
CROMWELL_BUILD_WAIT_FOR_IT_FILENAME="wait-for-it.sh"
CROMWELL_BUILD_WAIT_FOR_IT_BRANCH="db049716e42767d39961e95dd9696103dca813f1"
CROMWELL_BUILD_WAIT_FOR_IT_URL="https://raw.githubusercontent.com/vishnubob/wait-for-it/${CROMWELL_BUILD_WAIT_FOR_IT_BRANCH}/${CROMWELL_BUILD_WAIT_FOR_IT_FILENAME}"
CROMWELL_BUILD_WAIT_FOR_IT_SCRIPT="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/${CROMWELL_BUILD_WAIT_FOR_IT_FILENAME}"
CROMWELL_BUILD_EXIT_FUNCTIONS="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/cromwell_build_exit_functions.$$"
if [[ -n "${VIRTUAL_ENV:+set}" ]]; then
CROMWELL_BUILD_IS_VIRTUAL_ENV=true
else
CROMWELL_BUILD_IS_VIRTUAL_ENV=false
fi
case "${CROMWELL_BUILD_PROVIDER}" in
"${CROMWELL_BUILD_PROVIDER_TRAVIS}")
CROMWELL_BUILD_IS_CI=true
CROMWELL_BUILD_IS_SECURE="${TRAVIS_SECURE_ENV_VARS}"
if [[ -n "${TRAVIS_PULL_REQUEST_BRANCH:+set}" ]]; then
CROMWELL_BUILD_IS_PULL_REQUEST=true
else
CROMWELL_BUILD_IS_PULL_REQUEST=false
fi
CROMWELL_BUILD_TYPE="${BUILD_TYPE}"
CROMWELL_BUILD_BRANCH="${TRAVIS_PULL_REQUEST_BRANCH:-${TRAVIS_BRANCH}}"
CROMWELL_BUILD_BRANCH_PULL_REQUEST="${TRAVIS_PULL_REQUEST_BRANCH:-""}"
CROMWELL_BUILD_EVENT="${TRAVIS_EVENT_TYPE}"
CROMWELL_BUILD_TAG="${TRAVIS_TAG}"
CROMWELL_BUILD_NUMBER="${TRAVIS_JOB_NUMBER}"
CROMWELL_BUILD_URL="https://travis-ci.com/${TRAVIS_REPO_SLUG}/jobs/${TRAVIS_JOB_ID}"
CROMWELL_BUILD_GIT_USER_EMAIL="travis@travis-ci.com"
CROMWELL_BUILD_GIT_USER_NAME="Travis CI"
CROMWELL_BUILD_HEARTBEAT_PATTERN="…"
CROMWELL_BUILD_GENERATE_COVERAGE=true
# Always run on sbt, even for 'push'.
# This allows quick sanity checks before starting PRs *and* publishing after merges into develop.
if [[ "${TRAVIS_EVENT_TYPE}" == "push" ]] && \
[[ "${BUILD_TYPE}" != "sbt" ]] && \
[[ "${TRAVIS_COMMIT_MESSAGE}" != *"[force ci]"* ]]; then
CROMWELL_BUILD_RUN_TESTS=false
else
CROMWELL_BUILD_RUN_TESTS=true
fi
;;
"${CROMWELL_BUILD_PROVIDER_JENKINS}")
# External variables must be passed through in the ENVIRONMENT of src/ci/docker-compose/docker-compose.yml
CROMWELL_BUILD_IS_CI=true
CROMWELL_BUILD_IS_SECURE=true
CROMWELL_BUILD_IS_PULL_REQUEST=false
CROMWELL_BUILD_TYPE="${JENKINS_BUILD_TYPE}"
CROMWELL_BUILD_BRANCH="${GIT_BRANCH#origin/}"
CROMWELL_BUILD_BRANCH_PULL_REQUEST=""
CROMWELL_BUILD_EVENT=""
CROMWELL_BUILD_TAG=""
CROMWELL_BUILD_NUMBER="${BUILD_NUMBER}"
CROMWELL_BUILD_URL="${BUILD_URL}"
CROMWELL_BUILD_GIT_USER_EMAIL="jenkins@jenkins.io"
CROMWELL_BUILD_GIT_USER_NAME="Jenkins CI"
CROMWELL_BUILD_HEARTBEAT_PATTERN="…\n"
CROMWELL_BUILD_GENERATE_COVERAGE=false
CROMWELL_BUILD_RUN_TESTS=true
;;
*)
CROMWELL_BUILD_IS_CI=false
CROMWELL_BUILD_IS_SECURE=true
CROMWELL_BUILD_IS_PULL_REQUEST=false
CROMWELL_BUILD_TYPE="unknown"
CROMWELL_BUILD_BRANCH="unknown"
CROMWELL_BUILD_BRANCH_PULL_REQUEST=""
CROMWELL_BUILD_EVENT="unknown"
CROMWELL_BUILD_TAG=""
CROMWELL_BUILD_NUMBER=""
CROMWELL_BUILD_URL=""
CROMWELL_BUILD_GIT_USER_EMAIL="unknown.git.user@example.org"
CROMWELL_BUILD_GIT_USER_NAME="Unknown Git User"
CROMWELL_BUILD_HEARTBEAT_PATTERN="…"
CROMWELL_BUILD_GENERATE_COVERAGE=true
CROMWELL_BUILD_RUN_TESTS=true
local bash_script
for bash_script in "${BASH_SOURCE[@]}"; do
if [[ "${bash_script}" != */test.inc.sh ]]; then
local build_type_script
build_type_script="$(basename "${bash_script}")"
build_type_script="${build_type_script#test}"
build_type_script="${build_type_script%.sh}"
build_type_script="$(tr '[:upper:]' '[:lower:]' <<< "${build_type_script:0:1}")${build_type_script:1}"
CROMWELL_BUILD_TYPE="${build_type_script}"
break
fi
done
;;
esac
local backend_type
backend_type="${CROMWELL_BUILD_TYPE}"
backend_type="${backend_type#centaurEngineUpgrade}"
backend_type="${backend_type#centaurPapiUpgrade}"
backend_type="${backend_type#centaurWdlUpgrade}"
backend_type="${backend_type#centaurHoricromtal}"
backend_type="${backend_type#centaur}"
backend_type="${backend_type#conformance}"
backend_type="$(echo "${backend_type}" | sed 's/\([A-Z]\)/_\1/g' | tr '[:upper:]' '[:lower:]' | cut -c 2-)"
CROMWELL_BUILD_BACKEND_TYPE="${backend_type}"
if [[ "${CROMWELL_BUILD_TYPE}" == conformance* ]]; then
CROMWELL_BUILD_SBT_ASSEMBLY_COMMAND="server/assembly centaurCwlRunner/assembly"
else
CROMWELL_BUILD_SBT_ASSEMBLY_COMMAND="assembly"
fi
if [[ "${CROMWELL_BUILD_TYPE}" == centaurPapiUpgrade* ]] || \
[[ "${CROMWELL_BUILD_TYPE}" == centaurHoricromtalEngineUpgrade* ]]; then
CROMWELL_BUILD_CROMWELL_CONFIG="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/papi_v1_v2_upgrade_application.conf"
else
CROMWELL_BUILD_CROMWELL_CONFIG="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/${CROMWELL_BUILD_BACKEND_TYPE}_application.conf"
fi
CROMWELL_BUILD_OPTIONAL_SECURE="${CROMWELL_BUILD_OPTIONAL_SECURE-false}"
CROMWELL_BUILD_REQUIRES_SECURE="${CROMWELL_BUILD_REQUIRES_SECURE-false}"
CROMWELL_BUILD_REQUIRES_PULL_REQUEST="${CROMWELL_BUILD_REQUIRES_PULL_REQUEST-false}"
CROMWELL_BUILD_SBT_ASSEMBLY_COMMAND="${CROMWELL_BUILD_SBT_ASSEMBLY_COMMAND-assembly}"
VAULT_TOKEN="${VAULT_TOKEN-vault token is not set as an environment variable}"
local hours_to_minutes
hours_to_minutes=60
CROMWELL_BUILD_HEARTBEAT_MINUTES=$((20 * hours_to_minutes))
export CROMWELL_BUILD_BACKEND_TYPE
export CROMWELL_BUILD_BRANCH
export CROMWELL_BUILD_BRANCH_PULL_REQUEST
export CROMWELL_BUILD_CROMWELL_CONFIG
export CROMWELL_BUILD_CROMWELL_LOG
export CROMWELL_BUILD_DOCKER_DIRECTORY
export CROMWELL_BUILD_EVENT
export CROMWELL_BUILD_EXIT_FUNCTIONS
export CROMWELL_BUILD_GENERATE_COVERAGE
export CROMWELL_BUILD_GIT_USER_EMAIL
export CROMWELL_BUILD_GIT_USER_NAME
export CROMWELL_BUILD_HEARTBEAT_MINUTES
export CROMWELL_BUILD_HEARTBEAT_PATTERN
export CROMWELL_BUILD_HOME_DIRECTORY
export CROMWELL_BUILD_IS_CI
export CROMWELL_BUILD_IS_PULL_REQUEST
export CROMWELL_BUILD_IS_SECURE
export CROMWELL_BUILD_IS_VIRTUAL_ENV
export CROMWELL_BUILD_LOG_DIRECTORY
export CROMWELL_BUILD_NUMBER
export CROMWELL_BUILD_OPTIONAL_SECURE
export CROMWELL_BUILD_OS
export CROMWELL_BUILD_OS_DARWIN
export CROMWELL_BUILD_OS_LINUX
export CROMWELL_BUILD_PROVIDER
export CROMWELL_BUILD_PROVIDER_JENKINS
export CROMWELL_BUILD_PROVIDER_TRAVIS
export CROMWELL_BUILD_PROVIDER_UNKNOWN
export CROMWELL_BUILD_REQUIRES_SECURE
export CROMWELL_BUILD_RESOURCES_DIRECTORY
export CROMWELL_BUILD_RESOURCES_SOURCES
export CROMWELL_BUILD_ROOT_DIRECTORY
export CROMWELL_BUILD_RUN_TESTS
export CROMWELL_BUILD_SBT_ASSEMBLY_COMMAND
export CROMWELL_BUILD_SCRIPTS_DIRECTORY
export CROMWELL_BUILD_TAG
export CROMWELL_BUILD_TYPE
export CROMWELL_BUILD_URL
export CROMWELL_BUILD_WAIT_FOR_IT_BRANCH
export CROMWELL_BUILD_WAIT_FOR_IT_FILENAME
export CROMWELL_BUILD_WAIT_FOR_IT_SCRIPT
export CROMWELL_BUILD_WAIT_FOR_IT_URL
}
cromwell::private::echo_build_variables() {
echo "CROMWELL_BUILD_IS_CI='${CROMWELL_BUILD_IS_CI}'"
echo "CROMWELL_BUILD_IS_SECURE='${CROMWELL_BUILD_IS_SECURE}'"
echo "CROMWELL_BUILD_REQUIRES_SECURE='${CROMWELL_BUILD_REQUIRES_SECURE}'"
echo "CROMWELL_BUILD_OPTIONAL_SECURE='${CROMWELL_BUILD_OPTIONAL_SECURE}'"
echo "CROMWELL_BUILD_TYPE='${CROMWELL_BUILD_TYPE}'"
echo "CROMWELL_BUILD_BRANCH='${CROMWELL_BUILD_BRANCH}'"
echo "CROMWELL_BUILD_BRANCH_PULL_REQUEST='${CROMWELL_BUILD_BRANCH_PULL_REQUEST}'"
echo "CROMWELL_BUILD_EVENT='${CROMWELL_BUILD_EVENT}'"
echo "CROMWELL_BUILD_TAG='${CROMWELL_BUILD_TAG}'"
echo "CROMWELL_BUILD_NUMBER='${CROMWELL_BUILD_NUMBER}'"
echo "CROMWELL_BUILD_PROVIDER='${CROMWELL_BUILD_PROVIDER}'"
echo "CROMWELL_BUILD_OS='${CROMWELL_BUILD_OS}'"
echo "CROMWELL_BUILD_URL='${CROMWELL_BUILD_URL}'"
}
cromwell::private::create_database_variables() {
case "${CROMWELL_BUILD_PROVIDER}" in
"${CROMWELL_BUILD_PROVIDER_TRAVIS}")
CROMWELL_BUILD_MARIADB_HOSTNAME="localhost"
CROMWELL_BUILD_MARIADB_PORT="13306"
CROMWELL_BUILD_MARIADB_USERNAME="cromwell"
CROMWELL_BUILD_MARIADB_PASSWORD="test"
CROMWELL_BUILD_MARIADB_SCHEMA="cromwell_test"
CROMWELL_BUILD_MARIADB_DOCKER_TAG="${BUILD_MARIADB-}"
CROMWELL_BUILD_MYSQL_HOSTNAME="localhost"
CROMWELL_BUILD_MYSQL_PORT="3306"
CROMWELL_BUILD_MYSQL_USERNAME="cromwell"
CROMWELL_BUILD_MYSQL_PASSWORD="test"
CROMWELL_BUILD_MYSQL_SCHEMA="cromwell_test"
CROMWELL_BUILD_MYSQL_DOCKER_TAG="${BUILD_MYSQL-}"
CROMWELL_BUILD_POSTGRESQL_HOSTNAME="localhost"
CROMWELL_BUILD_POSTGRESQL_PORT="5432"
CROMWELL_BUILD_POSTGRESQL_USERNAME="cromwell"
CROMWELL_BUILD_POSTGRESQL_PASSWORD="test"
CROMWELL_BUILD_POSTGRESQL_SCHEMA="cromwell_test"
CROMWELL_BUILD_POSTGRESQL_DOCKER_TAG="${BUILD_POSTGRESQL-}"
;;
"${CROMWELL_BUILD_PROVIDER_JENKINS}")
# NOTE: Jenkins uses src/ci/docker-compose/docker-compose.yml.
# We don't define a docker tag because the docker-compose has already spun up the database containers by the
# time this script is run. Other variables here must match the database service names and settings the yaml.
CROMWELL_BUILD_MARIADB_DOCKER_TAG=""
CROMWELL_BUILD_MARIADB_HOSTNAME="mariadb-db"
CROMWELL_BUILD_MARIADB_PORT="3306"
CROMWELL_BUILD_MARIADB_USERNAME="cromwell"
CROMWELL_BUILD_MARIADB_PASSWORD="test"
CROMWELL_BUILD_MARIADB_SCHEMA="cromwell_test"
CROMWELL_BUILD_MYSQL_DOCKER_TAG=""
CROMWELL_BUILD_MYSQL_HOSTNAME="mysql-db"
CROMWELL_BUILD_MYSQL_PORT="3306"
CROMWELL_BUILD_MYSQL_USERNAME="cromwell"
CROMWELL_BUILD_MYSQL_PASSWORD="test"
CROMWELL_BUILD_MYSQL_SCHEMA="cromwell_test"
CROMWELL_BUILD_POSTGRESQL_DOCKER_TAG=""
CROMWELL_BUILD_POSTGRESQL_HOSTNAME="postgresql-db"
CROMWELL_BUILD_POSTGRESQL_PORT="5432"
CROMWELL_BUILD_POSTGRESQL_USERNAME="cromwell"
CROMWELL_BUILD_POSTGRESQL_PASSWORD="test"
CROMWELL_BUILD_POSTGRESQL_SCHEMA="cromwell_test"
;;
*)
CROMWELL_BUILD_MARIADB_HOSTNAME="${CROMWELL_BUILD_MARIADB_HOSTNAME-localhost}"
CROMWELL_BUILD_MARIADB_PORT="${CROMWELL_BUILD_MARIADB_PORT-13306}"
CROMWELL_BUILD_MARIADB_USERNAME="${CROMWELL_BUILD_MARIADB_USERNAME-cromwell}"
CROMWELL_BUILD_MARIADB_PASSWORD="${CROMWELL_BUILD_MARIADB_PASSWORD-test}"
CROMWELL_BUILD_MARIADB_SCHEMA="${CROMWELL_BUILD_MARIADB_SCHEMA-cromwell_test}"
CROMWELL_BUILD_MARIADB_DOCKER_TAG=""
CROMWELL_BUILD_MYSQL_HOSTNAME="${CROMWELL_BUILD_MYSQL_HOSTNAME-localhost}"
CROMWELL_BUILD_MYSQL_PORT="${CROMWELL_BUILD_MYSQL_PORT-3306}"
CROMWELL_BUILD_MYSQL_USERNAME="${CROMWELL_BUILD_MYSQL_USERNAME-cromwell}"
CROMWELL_BUILD_MYSQL_PASSWORD="${CROMWELL_BUILD_MYSQL_PASSWORD-test}"
CROMWELL_BUILD_MYSQL_SCHEMA="${CROMWELL_BUILD_MYSQL_SCHEMA-cromwell_test}"
CROMWELL_BUILD_MYSQL_DOCKER_TAG=""
CROMWELL_BUILD_POSTGRESQL_HOSTNAME="${CROMWELL_BUILD_POSTGRESQL_HOSTNAME-localhost}"
CROMWELL_BUILD_POSTGRESQL_PORT="${CROMWELL_BUILD_POSTGRESQL_PORT-5432}"
CROMWELL_BUILD_POSTGRESQL_USERNAME="${CROMWELL_BUILD_POSTGRESQL_USERNAME-cromwell}"
CROMWELL_BUILD_POSTGRESQL_PASSWORD="${CROMWELL_BUILD_POSTGRESQL_PASSWORD-test}"
CROMWELL_BUILD_POSTGRESQL_SCHEMA="${CROMWELL_BUILD_POSTGRESQL_SCHEMA-cromwell_test}"
CROMWELL_BUILD_POSTGRESQL_DOCKER_TAG=""
;;
esac
CROMWELL_BUILD_MARIADB_JDBC_URL="jdbc:mysql://${CROMWELL_BUILD_MARIADB_HOSTNAME}:${CROMWELL_BUILD_MARIADB_PORT}/${CROMWELL_BUILD_MARIADB_SCHEMA}?useSSL=false&rewriteBatchedStatements=true&serverTimezone=UTC"
CROMWELL_BUILD_MYSQL_JDBC_URL="jdbc:mysql://${CROMWELL_BUILD_MYSQL_HOSTNAME}:${CROMWELL_BUILD_MYSQL_PORT}/${CROMWELL_BUILD_MYSQL_SCHEMA}?useSSL=false&rewriteBatchedStatements=true&serverTimezone=UTC"
CROMWELL_BUILD_POSTGRESQL_JDBC_URL="jdbc:postgresql://${CROMWELL_BUILD_POSTGRESQL_HOSTNAME}:${CROMWELL_BUILD_POSTGRESQL_PORT}/${CROMWELL_BUILD_POSTGRESQL_SCHEMA}?reWriteBatchedInserts=true"
export CROMWELL_BUILD_MARIADB_DOCKER_TAG
export CROMWELL_BUILD_MARIADB_HOSTNAME
export CROMWELL_BUILD_MARIADB_JDBC_URL
export CROMWELL_BUILD_MARIADB_PASSWORD
export CROMWELL_BUILD_MARIADB_PORT
export CROMWELL_BUILD_MARIADB_SCHEMA
export CROMWELL_BUILD_MARIADB_USERNAME
export CROMWELL_BUILD_MYSQL_DOCKER_TAG
export CROMWELL_BUILD_MYSQL_HOSTNAME
export CROMWELL_BUILD_MYSQL_JDBC_URL
export CROMWELL_BUILD_MYSQL_PASSWORD
export CROMWELL_BUILD_MYSQL_PORT
export CROMWELL_BUILD_MYSQL_SCHEMA
export CROMWELL_BUILD_MYSQL_USERNAME
export CROMWELL_BUILD_POSTGRESQL_DOCKER_TAG
export CROMWELL_BUILD_POSTGRESQL_HOSTNAME
export CROMWELL_BUILD_POSTGRESQL_JDBC_URL
export CROMWELL_BUILD_POSTGRESQL_PASSWORD
export CROMWELL_BUILD_POSTGRESQL_PORT
export CROMWELL_BUILD_POSTGRESQL_SCHEMA
export CROMWELL_BUILD_POSTGRESQL_USERNAME
}
cromwell::private::create_centaur_variables() {
CROMWELL_BUILD_CENTAUR_TYPE_STANDARD="standard"
CROMWELL_BUILD_CENTAUR_TYPE_INTEGRATION="integration"
CROMWELL_BUILD_CENTAUR_TYPE_ENGINE_UPGRADE="engineUpgrade"
CROMWELL_BUILD_CENTAUR_TYPE_PAPI_UPGRADE="papiUpgrade"
CROMWELL_BUILD_CENTAUR_TYPE_PAPI_UPGRADE_NEW_WORKFLOWS="papiUpgradeNewWorkflows"
CROMWELL_BUILD_CENTAUR_TYPE_HORICROMTAL_ENGINE_UPGRADE="horicromtalEngineUpgrade"
CROMWELL_BUILD_CENTAUR_TYPE_HORICROMTAL="horicromtal"
if [[ -z "${CROMWELL_BUILD_CENTAUR_TYPE-}" ]]; then
if [[ "${CROMWELL_BUILD_TYPE}" == centaurEngineUpgrade* ]]; then
CROMWELL_BUILD_CENTAUR_TYPE="${CROMWELL_BUILD_CENTAUR_TYPE_ENGINE_UPGRADE}"
elif [[ "${CROMWELL_BUILD_TYPE}" == centaurPapiUpgradeNewWorkflows* ]]; then
CROMWELL_BUILD_CENTAUR_TYPE="${CROMWELL_BUILD_CENTAUR_TYPE_PAPI_UPGRADE_NEW_WORKFLOWS}"
elif [[ "${CROMWELL_BUILD_TYPE}" == centaurPapiUpgrade* ]]; then
CROMWELL_BUILD_CENTAUR_TYPE="${CROMWELL_BUILD_CENTAUR_TYPE_PAPI_UPGRADE}"
elif [[ "${CROMWELL_BUILD_TYPE}" == centaurHoricromtalEngineUpgrade* ]]; then
CROMWELL_BUILD_CENTAUR_TYPE="${CROMWELL_BUILD_CENTAUR_TYPE_HORICROMTAL_ENGINE_UPGRADE}"
elif [[ "${CROMWELL_BUILD_TYPE}" == centaurHoricromtal* ]]; then
CROMWELL_BUILD_CENTAUR_TYPE="${CROMWELL_BUILD_CENTAUR_TYPE_HORICROMTAL}"
else
CROMWELL_BUILD_CENTAUR_TYPE="${CROMWELL_BUILD_CENTAUR_TYPE_STANDARD}"
fi
fi
CROMWELL_BUILD_CENTAUR_RESOURCES="${CROMWELL_BUILD_ROOT_DIRECTORY}/centaur/src/main/resources"
if [[ "${CROMWELL_BUILD_CENTAUR_TYPE}" == "${CROMWELL_BUILD_CENTAUR_TYPE_HORICROMTAL}" ]]; then
# Use the standard test cases despite the horicromtal Centaur build type.
CROMWELL_BUILD_CENTAUR_TEST_DIRECTORY="${CROMWELL_BUILD_CENTAUR_RESOURCES}/standardTestCases"
# Special horicromtal Centaur config.
CROMWELL_BUILD_CENTAUR_CONFIG="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/centaur_application_horicromtal.conf"
elif [[ "${CROMWELL_BUILD_CENTAUR_TYPE}" == "${CROMWELL_BUILD_CENTAUR_TYPE_HORICROMTAL_ENGINE_UPGRADE}" ]] ; then
# Use the engine upgrade test cases despite the horicromtal Centaur build type.
CROMWELL_BUILD_CENTAUR_TEST_DIRECTORY="${CROMWELL_BUILD_CENTAUR_RESOURCES}/engineUpgradeTestCases"
# Special horicromtal engine upgrade Centaur config with horicromtal assertions turned off.
CROMWELL_BUILD_CENTAUR_CONFIG="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/centaur_application_horicromtal_no_assert.conf"
else
CROMWELL_BUILD_CENTAUR_TEST_DIRECTORY="${CROMWELL_BUILD_CENTAUR_RESOURCES}/${CROMWELL_BUILD_CENTAUR_TYPE}TestCases"
CROMWELL_BUILD_CENTAUR_CONFIG="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/centaur_application.conf"
fi
CROMWELL_BUILD_CENTAUR_TEST_RENDERED="${CROMWELL_BUILD_CENTAUR_TEST_DIRECTORY}/rendered"
CROMWELL_BUILD_CENTAUR_LOG="${CROMWELL_BUILD_LOG_DIRECTORY}/centaur.log"
# Pick **one** of the databases to run Centaur against
case "${CROMWELL_BUILD_PROVIDER}" in
"${CROMWELL_BUILD_PROVIDER_TRAVIS}")
if [[ -n "${CROMWELL_BUILD_MYSQL_DOCKER_TAG:+set}" ]]; then
CROMWELL_BUILD_CENTAUR_SLICK_PROFILE="slick.jdbc.MySQLProfile$"
CROMWELL_BUILD_CENTAUR_JDBC_DRIVER="com.mysql.cj.jdbc.Driver"
CROMWELL_BUILD_CENTAUR_JDBC_USERNAME="${CROMWELL_BUILD_MYSQL_USERNAME}"
CROMWELL_BUILD_CENTAUR_JDBC_PASSWORD="${CROMWELL_BUILD_MYSQL_PASSWORD}"
CROMWELL_BUILD_CENTAUR_JDBC_URL="${CROMWELL_BUILD_MYSQL_JDBC_URL}"
elif [[ -n "${CROMWELL_BUILD_MARIADB_DOCKER_TAG:+set}" ]]; then
CROMWELL_BUILD_CENTAUR_SLICK_PROFILE="slick.jdbc.MySQLProfile$"
CROMWELL_BUILD_CENTAUR_JDBC_DRIVER="com.mysql.cj.jdbc.Driver"
CROMWELL_BUILD_CENTAUR_JDBC_USERNAME="${CROMWELL_BUILD_MARIADB_USERNAME}"
CROMWELL_BUILD_CENTAUR_JDBC_PASSWORD="${CROMWELL_BUILD_MARIADB_PASSWORD}"
CROMWELL_BUILD_CENTAUR_JDBC_URL="${CROMWELL_BUILD_MARIADB_JDBC_URL}"
elif [[ -n "${CROMWELL_BUILD_POSTGRESQL_DOCKER_TAG:+set}" ]]; then
CROMWELL_BUILD_CENTAUR_SLICK_PROFILE="slick.jdbc.PostgresProfile$"
CROMWELL_BUILD_CENTAUR_JDBC_DRIVER="org.postgresql.Driver"
CROMWELL_BUILD_CENTAUR_JDBC_USERNAME="${CROMWELL_BUILD_POSTGRESQL_USERNAME}"
CROMWELL_BUILD_CENTAUR_JDBC_PASSWORD="${CROMWELL_BUILD_POSTGRESQL_PASSWORD}"
CROMWELL_BUILD_CENTAUR_JDBC_URL="${CROMWELL_BUILD_POSTGRESQL_JDBC_URL}"
else
echo "Error: Unable to determine which RDBMS to use for Centaur." >&2
exit 1
fi
CROMWELL_BUILD_CENTAUR_TEST_ADDITIONAL_PARAMETERS=
;;
"${CROMWELL_BUILD_PROVIDER_JENKINS}")
CROMWELL_BUILD_CENTAUR_SLICK_PROFILE="slick.jdbc.MySQLProfile$"
CROMWELL_BUILD_CENTAUR_JDBC_DRIVER="com.mysql.cj.jdbc.Driver"
CROMWELL_BUILD_CENTAUR_JDBC_USERNAME="${CROMWELL_BUILD_MYSQL_USERNAME}"
CROMWELL_BUILD_CENTAUR_JDBC_PASSWORD="${CROMWELL_BUILD_MYSQL_PASSWORD}"
CROMWELL_BUILD_CENTAUR_JDBC_URL="${CROMWELL_BUILD_MYSQL_JDBC_URL}"
CROMWELL_BUILD_CENTAUR_TEST_ADDITIONAL_PARAMETERS="${CENTAUR_TEST_ADDITIONAL_PARAMETERS-}"
;;
*)
CROMWELL_BUILD_CENTAUR_SLICK_PROFILE="${CROMWELL_BUILD_CENTAUR_SLICK_PROFILE-slick.jdbc.MySQLProfile\$}"
CROMWELL_BUILD_CENTAUR_JDBC_DRIVER="${CROMWELL_BUILD_CENTAUR_JDBC_DRIVER-com.mysql.cj.jdbc.Driver}"
CROMWELL_BUILD_CENTAUR_JDBC_USERNAME="${CROMWELL_BUILD_CENTAUR_JDBC_USERNAME-${CROMWELL_BUILD_MYSQL_USERNAME}}"
CROMWELL_BUILD_CENTAUR_JDBC_PASSWORD="${CROMWELL_BUILD_CENTAUR_JDBC_PASSWORD-${CROMWELL_BUILD_MYSQL_PASSWORD}}"
CROMWELL_BUILD_CENTAUR_JDBC_URL="${CROMWELL_BUILD_CENTAUR_JDBC_URL-${CROMWELL_BUILD_MYSQL_JDBC_URL}}"
CROMWELL_BUILD_CENTAUR_TEST_ADDITIONAL_PARAMETERS=
;;
esac
case "${CROMWELL_BUILD_CENTAUR_TYPE}" in
"${CROMWELL_BUILD_CENTAUR_TYPE_INTEGRATION}")
CROMWELL_BUILD_CENTAUR_READ_LINES_LIMIT=512000
;;
*)
CROMWELL_BUILD_CENTAUR_READ_LINES_LIMIT=128000
;;
esac
CROMWELL_BUILD_CENTAUR_256_BITS_KEY="$(dd bs=1 count=32 if=/dev/urandom 2> /dev/null | base64 | tr -d '\n')"
export CROMWELL_BUILD_CENTAUR_256_BITS_KEY
export CROMWELL_BUILD_CENTAUR_CONFIG
export CROMWELL_BUILD_CENTAUR_JDBC_DRIVER
export CROMWELL_BUILD_CENTAUR_JDBC_PASSWORD
export CROMWELL_BUILD_CENTAUR_JDBC_URL
export CROMWELL_BUILD_CENTAUR_JDBC_USERNAME
export CROMWELL_BUILD_CENTAUR_LOG
export CROMWELL_BUILD_CENTAUR_TEST_ADDITIONAL_PARAMETERS
export CROMWELL_BUILD_CENTAUR_TEST_DIRECTORY
export CROMWELL_BUILD_CENTAUR_TEST_RENDERED
export CROMWELL_BUILD_CENTAUR_READ_LINES_LIMIT
export CROMWELL_BUILD_CENTAUR_RESOURCES
export CROMWELL_BUILD_CENTAUR_SLICK_PROFILE
export CROMWELL_BUILD_CENTAUR_TYPE
export CROMWELL_BUILD_CENTAUR_TYPE_STANDARD
export CROMWELL_BUILD_CENTAUR_TYPE_INTEGRATION
export CROMWELL_BUILD_CENTAUR_TYPE_ENGINE_UPGRADE
}
cromwell::private::create_conformance_variables() {
CROMWELL_BUILD_CWL_RUNNER_MODE="${CROMWELL_BUILD_BACKEND_TYPE}"
CROMWELL_BUILD_CWL_TOOL_VERSION="1.0.20190228155703"
CROMWELL_BUILD_CWL_TEST_VERSION="1.0.20190228134645"
CROMWELL_BUILD_CWL_TEST_COMMIT="1f501e38ff692a408e16b246ac7d64d32f0822c2" # use known git hash to avoid changes
CROMWELL_BUILD_CWL_TEST_RUNNER="${CROMWELL_BUILD_ROOT_DIRECTORY}/centaurCwlRunner/src/bin/centaur-cwl-runner.bash"
CROMWELL_BUILD_CWL_TEST_DIRECTORY="${CROMWELL_BUILD_ROOT_DIRECTORY}/common-workflow-language"
CROMWELL_BUILD_CWL_TEST_RESOURCES="${CROMWELL_BUILD_CWL_TEST_DIRECTORY}/v1.0/v1.0"
CROMWELL_BUILD_CWL_TEST_WDL="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/cwl_conformance_test.wdl"
CROMWELL_BUILD_CWL_TEST_INPUTS="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/cwl_conformance_test.inputs.json"
CROMWELL_BUILD_CWL_TEST_OUTPUT="${CROMWELL_BUILD_LOG_DIRECTORY}/cwl_conformance_test.out.txt"
CROMWELL_BUILD_CWL_TEST_PARALLELISM=10 # Set too high will cause false negatives due to cromwell server timeouts.
export CROMWELL_BUILD_CWL_RUNNER_MODE
export CROMWELL_BUILD_CWL_TOOL_VERSION
export CROMWELL_BUILD_CWL_TEST_VERSION
export CROMWELL_BUILD_CWL_TEST_COMMIT
export CROMWELL_BUILD_CWL_TEST_RUNNER
export CROMWELL_BUILD_CWL_TEST_DIRECTORY
export CROMWELL_BUILD_CWL_TEST_RESOURCES
export CROMWELL_BUILD_CWL_TEST_WDL
export CROMWELL_BUILD_CWL_TEST_INPUTS
export CROMWELL_BUILD_CWL_TEST_OUTPUT
export CROMWELL_BUILD_CWL_TEST_PARALLELISM
}
cromwell::private::verify_secure_build() {
case "${CROMWELL_BUILD_PROVIDER}" in
"${CROMWELL_BUILD_PROVIDER_TRAVIS}")
if [[ "${CROMWELL_BUILD_IS_SECURE}" != "true" ]] && \
[[ "${CROMWELL_BUILD_REQUIRES_SECURE}" == "true" ]]; then
echo "********************************************************"
echo "********************************************************"
echo "** **"
echo "** WARNING: Encrypted keys are unavailable. Exiting. **"
echo "** **"
echo "********************************************************"
echo "********************************************************"
exit 0
fi
;;
*)
;;
esac
}
cromwell::private::verify_pull_request_build() {
case "${CROMWELL_BUILD_PROVIDER}" in
"${CROMWELL_BUILD_PROVIDER_TRAVIS}")
if [[ "${CROMWELL_BUILD_IS_PULL_REQUEST}" != "true" ]] && \
[[ "${CROMWELL_BUILD_REQUIRES_PULL_REQUEST}" == "true" ]]; then
echo "**************************************************"
echo "**************************************************"
echo "** **"
echo "** NOTE: Build is not a Pull Request. Exiting. **"
echo "** **"
echo "**************************************************"
echo "**************************************************"
exit 0
fi
;;
*)
;;
esac
}
cromwell::private::exec_test_script() {
local upper_build_type
upper_build_type="$(tr '[:lower:]' '[:upper:]' <<< "${CROMWELL_BUILD_TYPE:0:1}")${CROMWELL_BUILD_TYPE:1}"
exec "${CROMWELL_BUILD_SCRIPTS_DIRECTORY}/test${upper_build_type}.sh"
}
cromwell::private::stop_travis_defaults() {
# https://stackoverflow.com/questions/27382295/how-to-stop-services-on-travis-ci-running-by-default#answer-27410479
sudo /etc/init.d/mysql stop
sudo /etc/init.d/postgresql stop
}
cromwell::private::delete_boto_config() {
# https://github.com/travis-ci/travis-ci/issues/7940#issuecomment-310759657
sudo rm -f /etc/boto.cfg
export BOTO_CONFIG=/dev/null
}
cromwell::private::delete_sbt_boot() {
# Delete ~/.sbt/boot to fix consistent, almost immediate failures on sub-builds (usually TES but sometimes others).
# Even purging Travis caches didn't always fix the problem. Fortunately stackoverflow knew what to do:
# https://stackoverflow.com/questions/24539576/sbt-scala-2-10-4-missing-scala-tools-nsc-global
rm -rf ~/.sbt/boot/
}
cromwell::private::pip_install() {
local pip_package
pip_package="${1:?pip_install called without a package}"; shift
if [[ "${CROMWELL_BUILD_IS_CI}" == "true" ]]; then
sudo -H pip install "${pip_package}" "$@"
elif [[ "${CROMWELL_BUILD_IS_VIRTUAL_ENV}" == "true" ]]; then
pip install "${pip_package}" "$@"
else
pip install "${pip_package}" --user "$@"
fi
}
cromwell::private::upgrade_pip() {
cromwell::private::pip_install pip --upgrade
cromwell::private::pip_install requests[security] --ignore-installed
}
cromwell::private::install_wait_for_it() {
curl -s "${CROMWELL_BUILD_WAIT_FOR_IT_URL}" > "$CROMWELL_BUILD_WAIT_FOR_IT_SCRIPT"
chmod +x "$CROMWELL_BUILD_WAIT_FOR_IT_SCRIPT"
}
cromwell::private::start_docker() {
local docker_image
local docker_cid_file
docker_image="${1:?foo called without a docker image}"; shift
docker_cid_file="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/$(echo "${docker_image}" | tr "/" "_" | tr ":" "-").cid.$$"
docker run --cidfile="${docker_cid_file}" --detach "$@" "${docker_image}"
cromwell::private::add_exit_function docker rm --force --volumes "$(cat "${docker_cid_file}")"
cromwell::private::add_exit_function rm "${docker_cid_file}"
}
cromwell::private::start_docker_mysql() {
if cromwell::private::is_xtrace_enabled; then
cromwell::private::exec_silent_function cromwell::private::start_docker_mysql
elif [[ -n "${CROMWELL_BUILD_MYSQL_DOCKER_TAG:+set}" ]]; then
cromwell::private::start_docker \
mysql:"${CROMWELL_BUILD_MYSQL_DOCKER_TAG}" \
--env MYSQL_ROOT_PASSWORD=private \
--env MYSQL_USER="${CROMWELL_BUILD_MYSQL_USERNAME}" \
--env MYSQL_PASSWORD="${CROMWELL_BUILD_MYSQL_PASSWORD}" \
--env MYSQL_DATABASE="${CROMWELL_BUILD_MYSQL_SCHEMA}" \
--publish "${CROMWELL_BUILD_MYSQL_PORT}":3306 \
--volume "${CROMWELL_BUILD_DOCKER_DIRECTORY}"/mysql-conf.d:/etc/mysql/conf.d \
fi
}
cromwell::private::start_docker_mariadb() {
if cromwell::private::is_xtrace_enabled; then
cromwell::private::exec_silent_function cromwell::private::start_docker_mariadb
elif [[ -n "${CROMWELL_BUILD_MARIADB_DOCKER_TAG:+set}" ]]; then
cromwell::private::start_docker \
mariadb:"${CROMWELL_BUILD_MARIADB_DOCKER_TAG}" \
--env MYSQL_ROOT_PASSWORD=private \
--env MYSQL_USER="${CROMWELL_BUILD_MARIADB_USERNAME}" \
--env MYSQL_PASSWORD="${CROMWELL_BUILD_MARIADB_PASSWORD}" \
--env MYSQL_DATABASE="${CROMWELL_BUILD_MARIADB_SCHEMA}" \
--publish "${CROMWELL_BUILD_MARIADB_PORT}":3306 \
--volume "${CROMWELL_BUILD_DOCKER_DIRECTORY}"/mariadb-conf.d:/etc/mysql/conf.d \
fi
}
cromwell::private::start_docker_postgresql() {
if cromwell::private::is_xtrace_enabled; then
cromwell::private::exec_silent_function cromwell::private::start_docker_postgresql
elif [[ -n "${CROMWELL_BUILD_POSTGRESQL_DOCKER_TAG:+set}" ]]; then
cromwell::private::start_docker \
postgres:"${CROMWELL_BUILD_POSTGRESQL_DOCKER_TAG}" \
--env POSTGRES_USER="${CROMWELL_BUILD_POSTGRESQL_USERNAME}" \
--env POSTGRES_PASSWORD="${CROMWELL_BUILD_POSTGRESQL_PASSWORD}" \
--env POSTGRES_DB="${CROMWELL_BUILD_POSTGRESQL_SCHEMA}" \
--publish "${CROMWELL_BUILD_POSTGRESQL_PORT}":5432 \
--volume "${CROMWELL_BUILD_DOCKER_DIRECTORY}"/postgresql-initdb.d:/docker-entrypoint-initdb.d \
fi
}
cromwell::private::pull_common_docker_images() {
# All tests use ubuntu:latest - make sure it's there before starting the tests
# because pulling the image during some of the tests would cause them to fail
# (specifically output_redirection which expects a specific value in stderr)
docker pull ubuntu
}
cromwell::private::install_cwltest() {
# TODO: No clue why these are needed for cwltool. If you know please update this comment.
sudo apt-get install procps || true
cromwell::private::pip_install cwltool=="${CROMWELL_BUILD_CWL_TOOL_VERSION}" --ignore-installed
cromwell::private::pip_install cwltest=="${CROMWELL_BUILD_CWL_TEST_VERSION}"
}
cromwell::private::checkout_pinned_cwl() {
if [[ ! -d "${CROMWELL_BUILD_CWL_TEST_DIRECTORY}" ]]; then
git clone \
https://github.com/common-workflow-language/common-workflow-language.git \
"${CROMWELL_BUILD_CWL_TEST_DIRECTORY}"
(
pushd "${CROMWELL_BUILD_CWL_TEST_DIRECTORY}" > /dev/null
git checkout "${CROMWELL_BUILD_CWL_TEST_COMMIT}"
popd > /dev/null
)
fi
}
cromwell::private::write_cwl_test_inputs() {
cat <<JSON >"${CROMWELL_BUILD_CWL_TEST_INPUTS}"
{
"cwl_conformance_test.cwl_dir": "${CROMWELL_BUILD_CWL_TEST_DIRECTORY}",
"cwl_conformance_test.test_result_output": "${CROMWELL_BUILD_CWL_TEST_OUTPUT}",
"cwl_conformance_test.centaur_cwl_runner": "${CROMWELL_BUILD_CWL_TEST_RUNNER}",
"cwl_conformance_test.conformance_expected_failures":
"${CROMWELL_BUILD_RESOURCES_DIRECTORY}/${CROMWELL_BUILD_BACKEND_TYPE}_conformance_expected_failures.txt",
"cwl_conformance_test.timeout": 1200
}
JSON
}
cromwell::private::docker_login() {
if cromwell::private::is_xtrace_enabled; then
cromwell::private::exec_silent_function cromwell::private::docker_login
else
local dockerhub_auth_include
dockerhub_auth_include="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/dockerhub_auth.inc.sh"
if [[ -f "${dockerhub_auth_include}" ]]; then
# shellcheck source=/dev/null
source "${dockerhub_auth_include}"
fi
fi
}
cromwell::private::vault_login() {
if cromwell::private::is_xtrace_enabled; then
cromwell::private::exec_silent_function cromwell::private::vault_login
elif [[ "${CROMWELL_BUILD_IS_SECURE}" == "true" ]]; then
case "${CROMWELL_BUILD_PROVIDER}" in
"${CROMWELL_BUILD_PROVIDER_TRAVIS}")
# Login to vault to access secrets
local vault_token
vault_token="${VAULT_TOKEN}"
# Don't fail here if vault login fails
# shellcheck disable=SC2015
docker run --rm \
-v "${CROMWELL_BUILD_HOME_DIRECTORY}:/root:rw" \
broadinstitute/dsde-toolbox:dev \
vault auth "${vault_token}" < /dev/null > /dev/null && echo vault auth success \
|| true
;;
*)
;;
esac
fi
}
cromwell::private::render_secure_resources() {
sbt renderCiResources \
|| if [[ "${CROMWELL_BUILD_IS_CI}" == "true" ]]; then
echo
echo "Continuing without rendering secure resources."
else
echo
echo "**************************************************************"
echo "**************************************************************"
echo "** **"
echo "** WARNING: Unable to render vault resources. **"
echo "** '*.ctmpl' files should be copied and updated manually. **"
echo "** **"
echo "**************************************************************"
echo "**************************************************************"
fi
}
cromwell::private::copy_all_resources() {
sbt copyCiResources
}
cromwell::private::setup_secure_resources() {
if [[ "${CROMWELL_BUILD_REQUIRES_SECURE}" == "true" ]] || [[ "${CROMWELL_BUILD_OPTIONAL_SECURE}" == "true" ]]; then
case "${CROMWELL_BUILD_PROVIDER}" in
"${CROMWELL_BUILD_PROVIDER_TRAVIS}")
cromwell::private::vault_login
cromwell::private::render_secure_resources
cromwell::private::docker_login
;;
"${CROMWELL_BUILD_PROVIDER_JENKINS}")
cromwell::private::copy_all_resources
;;
*)
cromwell::private::render_secure_resources
;;
esac
else
cromwell::private::copy_all_resources
fi
}
cromwell::private::make_build_directories() {
if [[ "${CROMWELL_BUILD_PROVIDER}" == "${CROMWELL_BUILD_PROVIDER_JENKINS}" ]]; then
sudo chmod -R a+w .
fi
mkdir -p "${CROMWELL_BUILD_LOG_DIRECTORY}"
mkdir -p "${CROMWELL_BUILD_RESOURCES_DIRECTORY}"
}
cromwell::private::find_cromwell_jar() {
CROMWELL_BUILD_CROMWELL_JAR="$( \
find "${CROMWELL_BUILD_ROOT_DIRECTORY}/server/target/scala-2.12" -name "cromwell-*.jar" \
| head -n 1 \
2> /dev/null \
|| true)"
export CROMWELL_BUILD_CROMWELL_JAR
}
cromwell::private::calculate_prior_version_tag() {
local current_version
local prior_version
current_version="$( \
grep 'val cromwellVersion' "${CROMWELL_BUILD_ROOT_DIRECTORY}/project/Version.scala" \
| awk -F \" '{print $2}' \
)"
# This function should only ever run on PR builds.
if [[ -z "${CROMWELL_BUILD_BRANCH_PULL_REQUEST-}" ]]; then
echo "Error: the CROMWELL_BUILD_BRANCH_PULL_REQUEST variable is not set. calculate_prior_version_tag expects to only run on Travis Pull Request builds in which this variable is set." >&2
exit 1
fi
# If this PR targets a hotfix branch, the previous version should be the same major version as this version.
# Otherwise this PR targets a non-hotfix branch so the previous version should be one less than this version.
if [[ "${CROMWELL_BUILD_BRANCH_PULL_REQUEST}" =~ ^[0-9\.]+_hotfix$ ]]; then
prior_version="$current_version"
else
prior_version=$((current_version - 1))
fi
echo "${prior_version}"
}
cromwell::private::get_prior_version_config() {
local prior_version
prior_version="${1:?get_prior_version_config called without a version}"; shift
prior_config="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/${CROMWELL_BUILD_BACKEND_TYPE}_${prior_version}_application.conf"
echo "${prior_config}"
}
cromwell::private::setup_prior_version_resources() {
local prior_config
local prior_version
prior_version="$(cromwell::private::calculate_prior_version_tag)"
CROMWELL_BUILD_CROMWELL_PRIOR_VERSION_JAR="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/cromwell_${prior_version}.jar"
export CROMWELL_BUILD_CROMWELL_PRIOR_VERSION_JAR
prior_config="$(cromwell::private::get_prior_version_config "${prior_version}")"
if [[ -f "${prior_config}" ]]; then
CROMWELL_BUILD_CROMWELL_PRIOR_VERSION_CONFIG="${prior_config}"
export CROMWELL_BUILD_CROMWELL_PRIOR_VERSION_CONFIG
fi
docker run \
--rm \
--entrypoint= \
-v "${PWD}:${PWD}" \
broadinstitute/cromwell:"${prior_version}" \
cp /app/cromwell.jar "${CROMWELL_BUILD_CROMWELL_PRIOR_VERSION_JAR}"
}
cromwell::private::exists_cromwell_jar() {
test -s "${CROMWELL_BUILD_CROMWELL_JAR}"
}
cromwell::private::assemble_jars() {
# CROMWELL_BUILD_SBT_ASSEMBLY_COMMAND allows for an override of the default `assembly` command for assembly.
# This can be useful to reduce time and memory that might otherwise be spent assembling unused subprojects.
# shellcheck disable=SC2086
CROMWELL_SBT_ASSEMBLY_LOG_LEVEL=error sbt coverage ${CROMWELL_BUILD_SBT_ASSEMBLY_COMMAND} -error
}
cromwell::private::generate_code_coverage() {
sbt coverageReport -warn
sbt coverageAggregate -warn
bash <(curl -s https://codecov.io/bash) > /dev/null || true
}
cromwell::private::publish_artifacts_only() {
CROMWELL_SBT_ASSEMBLY_LOG_LEVEL=warn sbt "$@" publish -warn
}
cromwell::private::publish_artifacts_and_docker() {
CROMWELL_SBT_ASSEMBLY_LOG_LEVEL=warn sbt "$@" publish dockerBuildAndPush -warn
}
cromwell::private::publish_artifacts_check() {
sbt verifyArtifactoryCredentialsExist -warn
}
# Some CI environments want to know when new docker images are published. They do not currently poll dockerhub but do
# poll github. To help those environments, signal that a new set of docker images has been published to dockerhub by
# updating a well known branch in github.
cromwell::private::push_publish_complete() {
local github_private_deploy_key="${CROMWELL_BUILD_RESOURCES_DIRECTORY}/github_private_deploy_key"
local git_repo="git@github.com:broadinstitute/cromwell.git"
local git_publish_branch="${CROMWELL_BUILD_BRANCH}_publish_complete"
local git_publish_remote="publish_complete"
local git_publish_message="publish complete [skip ci]"
# Loosely adapted from https://github.com/broadinstitute/workbench-libs/blob/435a932/scripts/version_update.sh
mkdir publish_complete
pushd publish_complete > /dev/null
git init
git config core.sshCommand "ssh -i ${github_private_deploy_key} -F /dev/null"
git config user.email "${CROMWELL_BUILD_GIT_USER_EMAIL}"
git config user.name "${CROMWELL_BUILD_GIT_USER_NAME}"
git remote add "${git_publish_remote}" "${git_repo}"
git checkout -b "${git_publish_branch}"
git commit --allow-empty -m "${git_publish_message}"
git push -f "${git_publish_remote}" "${git_publish_branch}"
popd > /dev/null
}
cromwell::private::start_build_heartbeat() {
# Sleep one minute between printouts, but don't zombie forever
for ((i=0; i < "${CROMWELL_BUILD_HEARTBEAT_MINUTES}"; i++)); do
sleep 60
# shellcheck disable=SC2059
printf "${CROMWELL_BUILD_HEARTBEAT_PATTERN}"
done &
CROMWELL_BUILD_HEARTBEAT_PID=$!
}
cromwell::private::start_cromwell_log_tail() {
while [[ ! -f "${CROMWELL_BUILD_CROMWELL_LOG}" ]]; do
sleep 2
done && tail -n 0 -f "${CROMWELL_BUILD_CROMWELL_LOG}" 2> /dev/null &
CROMWELL_BUILD_CROMWELL_LOG_TAIL_PID=$!
}
cromwell::private::start_centaur_log_tail() {
while [[ ! -f "${CROMWELL_BUILD_CENTAUR_LOG}" ]]; do
sleep 2
done && tail -n 0 -f "${CROMWELL_BUILD_CENTAUR_LOG}" 2> /dev/null &
CROMWELL_BUILD_CENTAUR_LOG_TAIL_PID=$!
}
cromwell::private::cat_centaur_log() {
echo "CENTAUR LOG"
cat "${CROMWELL_BUILD_CENTAUR_LOG}"
}
cromwell::private::cat_conformance_log() {
echo "CONFORMANCE LOG"
cat "${CROMWELL_BUILD_CWL_TEST_OUTPUT}"
}
cromwell::private::kill_build_heartbeat() {
if [[ -n "${CROMWELL_BUILD_HEARTBEAT_PID:+set}" ]]; then
cromwell::private::kill_tree "${CROMWELL_BUILD_HEARTBEAT_PID}"
fi
}
cromwell::private::kill_cromwell_log_tail() {
if [[ -n "${CROMWELL_BUILD_CROMWELL_LOG_TAIL_PID:+set}" ]]; then
cromwell::private::kill_tree "${CROMWELL_BUILD_CROMWELL_LOG_TAIL_PID}"
fi
}
cromwell::private::kill_centaur_log_tail() {
if [[ -n "${CROMWELL_BUILD_CENTAUR_LOG_TAIL_PID:+set}" ]]; then
cromwell::private::kill_tree ${CROMWELL_BUILD_CENTAUR_LOG_TAIL_PID}
fi
}
cromwell::private::run_exit_functions() {
if [[ -f "${CROMWELL_BUILD_EXIT_FUNCTIONS}" ]]; then
local exit_function
while read -r exit_function; do
${exit_function} || true
done < "${CROMWELL_BUILD_EXIT_FUNCTIONS}"
rm "${CROMWELL_BUILD_EXIT_FUNCTIONS}" || true
fi
}
# Adds the function to the list of functions to run on exit.
# Requires at least one positional parameter, the function to run.
cromwell::private::add_exit_function() {
if [[ "$#" -eq 0 ]]; then
echo "Error: add_exit_function called without a function" >&2
exit 1
fi
echo "$@" >> "${CROMWELL_BUILD_EXIT_FUNCTIONS}"
trap cromwell::private::run_exit_functions TERM EXIT
}
cromwell::private::exec_silent_function() {
local silent_function
local xtrace_restore_function
silent_function="${1:?exec_silent_function called without a function}"; shift
xtrace_restore_function="$(shopt -po xtrace || true)"
shopt -uo xtrace
${silent_function} "$@"
${xtrace_restore_function}
}
cromwell::private::is_xtrace_enabled() {
# Return 0 if xtrace is disabled (set +x), 1 if xtrace is enabled (set -x).
shopt -qo xtrace
}
cromwell::private::kill_tree() {
local pid
local cpid
pid="${1:?kill_tree called without a pid}"; shift
for cpid in $(pgrep -P "${pid}"); do
cromwell::private::kill_tree "${cpid}"
done
kill "${pid}" 2> /dev/null
}
cromwell::private::start_conformance_cromwell() {
# Start the Cromwell server in the directory containing input files so it can access them via their relative path
pushd "${CROMWELL_BUILD_CWL_TEST_RESOURCES}" > /dev/null
# Turn off call caching as hashing doesn't work since it sees local and not GCS paths.
# CWL conformance uses alpine images that do not have bash.
java \
-Xmx2g \
-Dconfig.file="${CROMWELL_BUILD_CROMWELL_CONFIG}" \
-Dcall-caching.enabled=false \
-Dsystem.job-shell=/bin/sh \
-jar "${CROMWELL_BUILD_CROMWELL_JAR}" \
server &
CROMWELL_BUILD_CONFORMANCE_CROMWELL_PID=$!
popd > /dev/null
}
cromwell::private::kill_conformance_cromwell() {
if [[ -n "${CROMWELL_BUILD_CONFORMANCE_CROMWELL_PID+set}" ]]; then
cromwell::build::kill_tree "${CROMWELL_BUILD_CONFORMANCE_CROMWELL_PID}"
fi
}
cromwell::private::run_conformance_wdl() {
pushd "${CROMWELL_BUILD_CWL_TEST_RESOURCES}" > /dev/null
java \
-Xmx6g \
-Dbackend.providers.Local.config.concurrent-job-limit="${CROMWELL_BUILD_CWL_TEST_PARALLELISM}" \
-jar "${CROMWELL_BUILD_CROMWELL_JAR}" \
run "${CROMWELL_BUILD_CWL_TEST_WDL}" \
-i "${CROMWELL_BUILD_CWL_TEST_INPUTS}"
popd > /dev/null
}
cromwell::build::exec_test_script() {
cromwell::private::create_build_variables
if [[ "${CROMWELL_BUILD_RUN_TESTS}" == "false" ]]; then
echo "Use '[force ci]' in commit message to run tests on 'push'"
exit 0
fi
cromwell::private::exec_test_script
}
cromwell::build::setup_common_environment() {
cromwell::private::check_debug
cromwell::private::create_build_variables
cromwell::private::echo_build_variables
cromwell::private::create_database_variables
cromwell::private::verify_secure_build
cromwell::private::verify_pull_request_build
cromwell::private::make_build_directories
cromwell::private::setup_secure_resources
case "${CROMWELL_BUILD_PROVIDER}" in
"${CROMWELL_BUILD_PROVIDER_TRAVIS}")
cromwell::private::stop_travis_defaults
cromwell::private::delete_boto_config
cromwell::private::delete_sbt_boot
cromwell::private::upgrade_pip
cromwell::private::pull_common_docker_images
cromwell::private::install_wait_for_it
cromwell::private::start_docker_mysql
cromwell::private::start_docker_mariadb
cromwell::private::start_docker_postgresql
;;
"${CROMWELL_BUILD_PROVIDER_JENKINS}")
cromwell::private::delete_boto_config
cromwell::private::delete_sbt_boot
cromwell::private::upgrade_pip
cromwell::private::install_wait_for_it
;;
*)
cromwell::private::pull_common_docker_images
;;
esac
}
cromwell::build::setup_centaur_environment() {
cromwell::private::create_centaur_variables
if [[ "${CROMWELL_BUILD_CENTAUR_TYPE}" == "${CROMWELL_BUILD_CENTAUR_TYPE_ENGINE_UPGRADE}" ]] || \
[[ "${CROMWELL_BUILD_CENTAUR_TYPE}" == "${CROMWELL_BUILD_CENTAUR_TYPE_PAPI_UPGRADE}" ]]; then
cromwell::private::setup_prior_version_resources
fi
cromwell::private::start_build_heartbeat
cromwell::private::start_cromwell_log_tail
cromwell::private::start_centaur_log_tail
if [[ "${CROMWELL_BUILD_IS_CI}" == "true" ]]; then
cromwell::private::add_exit_function cromwell::private::cat_centaur_log
fi
cromwell::private::add_exit_function cromwell::private::kill_build_heartbeat
cromwell::private::add_exit_function cromwell::private::kill_cromwell_log_tail
cromwell::private::add_exit_function cromwell::private::kill_centaur_log_tail
}
cromwell::build::setup_conformance_environment() {
cromwell::private::create_centaur_variables
cromwell::private::create_conformance_variables
if [[ "${CROMWELL_BUILD_IS_CI}" == "true" ]]; then
cromwell::private::install_cwltest
fi
cromwell::private::checkout_pinned_cwl
cromwell::private::write_cwl_test_inputs
cromwell::private::start_build_heartbeat
cromwell::private::add_exit_function cromwell::private::cat_conformance_log
cromwell::private::add_exit_function cromwell::private::kill_build_heartbeat
}
cromwell::build::setup_docker_environment() {
cromwell::private::start_build_heartbeat
cromwell::private::add_exit_function cromwell::private::kill_build_heartbeat
if [[ "${CROMWELL_BUILD_PROVIDER}" == "${CROMWELL_BUILD_PROVIDER_TRAVIS}" ]]; then
# Upgrade docker-compose so that we get the correct exit codes
docker-compose -version
sudo rm /usr/local/bin/docker-compose
curl \
-L "https://github.com/docker/compose/releases/download/1.23.2/docker-compose-$(uname -s)-$(uname -m)" \
> docker-compose
chmod +x docker-compose
sudo mv docker-compose /usr/local/bin
docker-compose -version
fi
}
cromwell::build::assemble_jars() {
cromwell::private::find_cromwell_jar
if [[ "${CROMWELL_BUILD_IS_CI}" == "true" ]] || ! cromwell::private::exists_cromwell_jar; then
echo "Please wait, building jars…"
cromwell::private::assemble_jars
fi
cromwell::private::find_cromwell_jar
if ! cromwell::private::exists_cromwell_jar; then
echo "Error: find_cromwell_jar did not locate a cromwell jar even after assembly" >&2
exit 1
fi
}
cromwell::build::run_centaur() {
local -a additional_args
additional_args=()
if [[ -n "${CROMWELL_BUILD_CENTAUR_TEST_ADDITIONAL_PARAMETERS-}" ]]; then
# Allow splitting on space to simulate an exported array
# https://stackoverflow.com/questions/5564418/exporting-an-array-in-bash-script#answer-5564589
# shellcheck disable=SC2206
additional_args=(${CROMWELL_BUILD_CENTAUR_TEST_ADDITIONAL_PARAMETERS})
fi
# Handle empty arrays in older versions of bash
# https://stackoverflow.com/questions/7577052/bash-empty-array-expansion-with-set-u#answer-7577209
"${CROMWELL_BUILD_ROOT_DIRECTORY}/centaur/test_cromwell.sh" \
-n "${CROMWELL_BUILD_CENTAUR_CONFIG}" \
-l "${CROMWELL_BUILD_LOG_DIRECTORY}" \
-g \
${additional_args[@]+"${additional_args[@]}"} \
"$@"
}
cromwell::build::run_conformance() {
cromwell::private::start_conformance_cromwell
cromwell::private::add_exit_function cromwell::private::kill_conformance_cromwell
# Give cromwell time to start up
sleep 30
cromwell::private::run_conformance_wdl
}
cromwell::build::generate_code_coverage() {
if [[ "${CROMWELL_BUILD_GENERATE_COVERAGE}" == "true" ]]; then
cromwell::private::generate_code_coverage
fi
}
cromwell::build::publish_artifacts() {
if [[ "${CROMWELL_BUILD_PROVIDER}" == "${CROMWELL_BUILD_PROVIDER_TRAVIS}" ]] && \
[[ "${CROMWELL_BUILD_TYPE}" == "sbt" ]] && \
[[ "${CROMWELL_BUILD_EVENT}" == "push" ]]; then
if [[ "${CROMWELL_BUILD_BRANCH}" == "develop" ]]; then
# Publish images for both the "cromwell develop branch" and the "cromwell dev environment".
CROMWELL_SBT_DOCKER_TAGS=develop,dev \
cromwell::private::publish_artifacts_and_docker \
-Dproject.isSnapshot=true
cromwell::private::push_publish_complete
elif [[ "${CROMWELL_BUILD_BRANCH}" =~ ^[0-9\.]+_hotfix$ ]]; then
# Docker tags float. "30" is the latest hotfix. Those dockers are published here on each hotfix commit.
cromwell::private::publish_artifacts_and_docker -Dproject.isSnapshot=false
elif [[ -n "${CROMWELL_BUILD_TAG:+set}" ]]; then
# Artifact tags are static. Once "30" is set that is only "30" forever. Those artifacts are published here.
cromwell::private::publish_artifacts_only \
-Dproject.version="${CROMWELL_BUILD_TAG}" \
-Dproject.isSnapshot=false
elif [[ "${CROMWELL_BUILD_IS_SECURE}" == "true" ]]; then
cromwell::private::publish_artifacts_check
fi
fi
}
cromwell::build::exec_retry_function() {
local retried_function
local retry_count
local attempt
local exit_status
retried_function="${1:?exec_retry_function called without a function}"; shift
retry_count="${1:-3}"; shift || true
sleep_seconds="${1:-15}"; shift || true
# https://unix.stackexchange.com/a/82610
# https://stackoverflow.com/a/17336953
for attempt in $(seq 0 "${retry_count}"); do
[[ ${attempt} -gt 0 ]] && sleep "${sleep_seconds}"
${retried_function} && exit_status=0 && break || exit_status=$?
done
return ${exit_status}
}
cromwell::build::exec_silent_function() {
local silent_function
silent_function="${1:?exec_silent_function called without a function}"; shift
if cromwell::private::is_xtrace_enabled; then
cromwell::private::exec_silent_function "${silent_function}" "$@"
else
${silent_function} "$@"
fi
}
cromwell::build::pip_install() {
cromwell::private::pip_install "$@"
}
cromwell::build::add_exit_function() {
cromwell::private::add_exit_function "$1"
}
cromwell::build::kill_tree() {
cromwell::private::kill_tree "$1"
}
|
package lit.litfx.core.components;
import javafx.util.Duration;
/**
*
* @author Birdasaur
*/
public interface AnimatedEffect {
public void animate(Duration milliseconds);
public void updateLength(int length);
public boolean isAnimating();
public void stop();
}
|
/* Copyright 2009-2015 <NAME>
*
* This file is part of the MOEA Framework.
*
* The MOEA Framework is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* The MOEA Framework is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the MOEA Framework. If not, see <http://www.gnu.org/licenses/>.
*/
package org.moeaframework.util.tree;
import org.apache.commons.math3.util.FastMath;
import org.moeaframework.core.Settings;
/**
* Provides many arithmetic and trigonometric functions that operate on
* {@link Number}s, performing any necessary implicit casting. An integer
* number remains an integer unless the specific function requires
* floating-point values. These methods favor {@link Long} and {@link Double}
* representations for integer and floating-point values, respectively.
* <p>
* The arithmetic functions provided herein support optional function
* protection, which is enabled by default. Function protection prevents
* values like {@code Inf} and {@code NaN} from appearing due to invalid
* inputs. For example, this protects against division-by-zero. To disable
* function protection, set the property
* {@code org.moeaframework.util.tree.protected_functions = false} in the file
* {@code global.properties}. {@code Inf} and {@code NaN} values can still
* occur with function protection enabled, so outputs should still be
* validated.
*/
public class NumberArithmetic {
/**
* Private constructor to prevent instantiation.
*/
private NumberArithmetic() {
super();
}
/**
* Returns {@code true} if the two numbers are equal; {@code false}
* otherwise.
*
* @param a the first number
* @param b the second number
* @return {@code true} if the two numbers are equal; {@code false}
* otherwise
*/
public static boolean equals(Number a, Number b) {
if (isFloatingPoint(a) || isFloatingPoint(b)) {
return a.doubleValue() == b.doubleValue();
} else {
return a.longValue() == b.longValue();
}
}
/**
* Returns {@code true} if the first number is less than the second;
* {@code false} otherwise.
*
* @param a the first number
* @param b the second number
* @return {@code true} if the first number is less than the second;
* {@code false} otherwise
*/
public static boolean lessThan(Number a, Number b) {
if (isFloatingPoint(a) || isFloatingPoint(b)) {
return a.doubleValue() < b.doubleValue();
} else {
return a.longValue() < b.longValue();
}
}
/**
* Returns {@code true} if the first number is less than or equal to the
* second; {@code false} otherwise.
*
* @param a the first number
* @param b the second number
* @return {@code true} if the first number is less than or equal to the
* second; {@code false} otherwise
*/
public static boolean lessThanOrEqual(Number a, Number b) {
return lessThan(a, b) || equals(a, b);
}
/**
* Returns {@code true} if the first number is greater than the second;
* {@code false} otherwise.
*
* @param a the first number
* @param b the second number
* @return {@code true} if the first number is greater than the second;
* {@code false} otherwise
*/
public static boolean greaterThan(Number a, Number b) {
if (isFloatingPoint(a) || isFloatingPoint(b)) {
return a.doubleValue() > b.doubleValue();
} else {
return a.longValue() > b.longValue();
}
}
/**
* Returns {@code true} if the first number is greater than or equal to the
* second; {@code false} otherwise.
*
* @param a the first number
* @param b the second number
* @return {@code true} if the first number is greater than or equal to the
* second; {@code false} otherwise
*/
public static boolean greaterThanOrEqual(Number a, Number b) {
return greaterThan(a, b) || equals(a, b);
}
/**
* Returns the value of adding the two numbers
*
* @param a the first number
* @param b the second number
* @return the value of adding the two numbers
*/
public static Number add(Number a, Number b) {
if (isFloatingPoint(a) || isFloatingPoint(b)) {
return a.doubleValue() + b.doubleValue();
} else {
return a.longValue() + b.longValue();
}
}
/**
* Returns the square root of the number. If the number is less than zero
* and function protection is enabled, this functions the square root of
* the absolute value of the number.
*
* @param a the number
* @return the square root of the number
* @see Math#sqrt(double)
*/
public static Number sqrt(Number a) {
if ((a.doubleValue() < 0.0) && Settings.isProtectedFunctions()) {
return Math.sqrt(Math.abs(a.doubleValue()));
} else {
return Math.sqrt(a.doubleValue());
}
}
/**
* Returns the value of the first number to the power of the second.
*
* @param a the first number
* @param b the second number
* @return the value of the first number to the power of the second
* @see Math#pow(double, double)
*/
public static Number pow(Number a, Number b) {
return Math.pow(a.doubleValue(), b.doubleValue());
}
/**
* Returns the value of subtracting the first from the second number.
*
* @param a the first number
* @param b the second number
* @return the value of subtracting the first from the second number
*/
public static Number sub(Number a, Number b) {
if (isFloatingPoint(a) || isFloatingPoint(b)) {
return a.doubleValue() - b.doubleValue();
} else {
return a.longValue() - b.longValue();
}
}
/**
* Returns the value of multiplying the two numbers.
*
* @param a the first number
* @param b the second number
* @return the value of multiplying the two numbers
*/
public static Number mul(Number a, Number b) {
if (isFloatingPoint(a) || isFloatingPoint(b)) {
return a.doubleValue() * b.doubleValue();
} else {
return a.longValue() * b.longValue();
}
}
/**
* Returns the value of dividing the first number by the second. If the
* second argument is {@code 0} and function protection is enabled, this
* function returns {@code 1} regardless of the first argument's value.
*
* @param a the first number
* @param b the second number
* @return the value of dividing the first number by the second
*/
public static Number div(Number a, Number b) {
if (isFloatingPoint(a) || isFloatingPoint(b)) {
if ((Math.abs(b.doubleValue()) < Settings.EPS) &&
Settings.isProtectedFunctions()) {
return 1.0;
} else {
return a.doubleValue() / b.doubleValue();
}
} else {
if ((b.longValue() == 0) && Settings.isProtectedFunctions()) {
return 1L;
} else {
return a.longValue() / b.longValue();
}
}
}
/**
* Returns the remainder from dividing the first number by the second. If
* the second argument is {@code 0} and function protection is enabled,
* this function returns {@code 0} regardless of the first argument's
* value.
*
* @param a the first number
* @param b the second number
* @return the remainder from dividing the first number by the second
*/
public static Number mod(Number a, Number b) {
if (isFloatingPoint(a) || isFloatingPoint(b)) {
if ((Math.abs(b.doubleValue()) < Settings.EPS) &&
Settings.isProtectedFunctions()) {
return 0.0;
} else {
return a.doubleValue() % b.doubleValue();
}
} else {
if ((b.longValue() == 0) && Settings.isProtectedFunctions()) {
return 0L;
} else {
return a.longValue() % b.longValue();
}
}
}
/**
* Returns the largest integer value less than or equal to the given
* number.
*
* @param a the number
* @return the largest integer value less than or equal to the given
* number
* @see Math#floor(double)
*/
public static Number floor(Number a) {
if (isFloatingPoint(a)) {
return Math.floor(a.doubleValue());
} else {
return a.longValue();
}
}
/**
* Returns the smallest integer value greater than or equal to the given
* number.
*
* @param a the number
* @return the smallest integer value greater than or equal to the given
* number
* @see Math#ceil(double)
*/
public static Number ceil(Number a) {
if (isFloatingPoint(a)) {
return Math.ceil(a.doubleValue());
} else {
return a.longValue();
}
}
/**
* Returns the value of the number rounded to the nearest integer.
*
* @param a the number
* @return the value of the number rounded to the nearest integer
* @see Math#round(double)
*/
public static Number round(Number a) {
if (isFloatingPoint(a)) {
return Math.round(a.doubleValue());
} else {
return a.longValue();
}
}
/**
* Returns the absolute value of the number.
*
* @param a the number
* @return the absolute value of the number
* @see Math#abs(long)
* @see Math#abs(double)
*/
public static Number abs(Number a) {
if (isFloatingPoint(a)) {
return Math.abs(a.doubleValue());
} else {
return Math.abs(a.longValue());
}
}
/**
* Returns the natural logarithm of the number. If the numbers is
* negative and function protection is enabled, then this function returns
* the natural logarithm of the absolute value of the number. If the
* number is near zero and function protection is enabled, this function
* returns {@code 0.0}.
*
* @param a the number
* @return the natural logarithm of the number
* @see Math#log(double)
*/
public static Number log(Number a) {
if ((a.doubleValue() < Settings.EPS) &&
Settings.isProtectedFunctions()) {
double value = Math.abs(a.doubleValue());
if (value < Settings.EPS) {
return 0.0;
} else {
return Math.log(value);
}
} else {
return Math.log(a.doubleValue());
}
}
/**
* Returns the base-10 logarithm of the number. If the numbers is
* negative and function protection is enabled, then this function returns
* the base-10 logarithm of the absolute value of the number. If the
* number is near zero and function protection is enabled, this function
* returns {@code 0.0}.
*
* @param a the number
* @return the base-10 logarithm of the number
* @see Math#log10(double)
*/
public static Number log10(Number a) {
if ((a.doubleValue() < Settings.EPS) &&
Settings.isProtectedFunctions()) {
double value = Math.abs(a.doubleValue());
if (value < Settings.EPS) {
return 0.0;
} else {
return Math.log10(value);
}
} else {
return Math.log10(a.doubleValue());
}
}
/**
* Returns the value from taking Euler's number <i>e</i> to the power of
* the given number.
*
* @param a the number
* @return the value from taking Euler's number <i>e</i> to the power of
* the given number
* @see Math#exp(double)
*/
public static Number exp(Number a) {
return Math.exp(a.doubleValue());
}
/**
* Returns the trigonometric sine of the number.
*
* @param a the number
* @return the trigonometric sine of the number
* @see Math#sin(double)
*/
public static Number sin(Number a) {
return Math.sin(a.doubleValue());
}
/**
* Returns the trigonometric cosine of the number.
*
* @param a the number
* @return the trigonometric cosine of the number
* @see Math#cos(double)
*/
public static Number cos(Number a) {
return Math.cos(a.doubleValue());
}
/**
* Returns the trigonometric tangent of the number.
*
* @param a the number
* @return the trigonometric tangent of the number
* @see Math#tan(double)
*/
public static Number tan(Number a) {
return Math.tan(a.doubleValue());
}
/**
* Returns the arc sine of the number.
*
* @param a the number
* @return the arc sine of the number
* @see Math#asin(double)
*/
public static Number asin(Number a) {
return Math.asin(a.doubleValue());
}
/**
* Returns the arc cosine of the number.
*
* @param a the number
* @return the arc cosine of the number
* @see Math#acos(double)
*/
public static Number acos(Number a) {
return Math.acos(a.doubleValue());
}
/**
* Returns the arc tangent of the number.
*
* @param a the number
* @return the arc tangent of the number
* @see Math#atan(double)
*/
public static Number atan(Number a) {
return Math.atan(a.doubleValue());
}
/**
* Returns the hyperbolic sine of the number.
*
* @param a the number
* @return the hyperbolic sine of the number
* @see Math#sinh(double)
*/
public static Number sinh(Number a) {
return Math.sinh(a.doubleValue());
}
/**
* Returns the hyperbolic cosine of the number.
*
* @param a the number
* @return the hyperbolic cosine of the number
* @see Math#cosh(double)
*/
public static Number cosh(Number a) {
return Math.cosh(a.doubleValue());
}
/**
* Returns the hyperbolic tangent of the number.
*
* @param a the number
* @return the hyperbolic tangent of the number
* @see Math#tanh(double)
*/
public static Number tanh(Number a) {
return Math.tanh(a.doubleValue());
}
/**
* Returns the hyperbolic arc sine of the number.
*
* @param a the number
* @return the hyperbolic arc sine of the number
* @see FastMath#asinh(double)
*/
public static Number asinh(Number a) {
return FastMath.asinh(a.doubleValue());
}
/**
* Returns the hyperbolic arc cosine of the number.
*
* @param a the number
* @return the hyperbolic arc cosine of the number
* @see FastMath#acosh(double)
*/
public static Number acosh(Number a) {
return FastMath.acosh(a.doubleValue());
}
/**
* Returns the hyperbolic arc tangent of the number.
*
* @param a the number
* @return the hyperbolic arc tangent of the number
* @see FastMath#atanh(double)
*/
public static Number atanh(Number a) {
return FastMath.atanh(a.doubleValue());
}
/**
* Returns the maximum value of two numbers.
*
* @param a the first number
* @param b the second number
* @return the maximum value of two numbers
* @see Math#min(long, long)
* @see Math#min(double, double)
*/
public static Number max(Number a, Number b) {
if (isFloatingPoint(a) || isFloatingPoint(b)) {
return Math.max(a.doubleValue(), b.doubleValue());
} else {
return Math.max(a.longValue(), b.longValue());
}
}
/**
* Returns the minimum value of two numbers.
*
* @param a the first number
* @param b the second number
* @return the minimum value of two numbers
* @see Math#max(long, long)
* @see Math#max(double, double)
*/
public static Number min(Number a, Number b) {
if (isFloatingPoint(a) || isFloatingPoint(b)) {
return Math.min(a.doubleValue(), b.doubleValue());
} else {
return Math.min(a.longValue(), b.longValue());
}
}
/**
* Returns the sign of the number.
*
* @param a the number
* @return the sign of the number
* @see Long#signum(long)
* @see Math#signum(double)
*/
public static Number sign(Number a) {
if (isFloatingPoint(a)) {
return Math.signum(a.doubleValue());
} else {
return Long.signum(a.longValue());
}
}
/**
* Returns {@code true} if the number is a floating-point value;
* {@code false} otherwise.
*
* @param a the number
* @return {@code true} if the number is a floating-point value;
* {@code false} otherwise
*/
public static boolean isFloatingPoint(Number a) {
return (a instanceof Float) || (a instanceof Double);
}
}
|
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
cd ${DIR}
echo "build image in dir "${DIR}
echo "start build Hadoop docker image"
docker build -f Dockerfile_hadoop -t hadoop2.7-all-in-one .
docker build -f Dockerfile -t apachekylin/apache-kylin-standalone:3.1.0 .
|
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
export MESH_NAME=dj-app
export IMAGE_NAME=602401143452.dkr.ecr.us-west-2.amazonaws.com/amazon/aws-app-mesh-inject:v0.1.0
export MESH_REGION="" # Leave this empty
ROOT=$(cd $(dirname $0)/; pwd)
export CA_BUNDLE=$(kubectl get configmap -n kube-system extension-apiserver-authentication -o=jsonpath='{.data.client-ca-file}' | base64 | tr -d '\n')
if [[ -z $CA_BUNDLE ]]; then
export CA_BUNDLE=$(kubectl config view --raw -o json --minify | jq -r '.clusters[0].cluster."certificate-authority-data"' | tr -d '"')
fi
echo "processing templates"
eval "cat <<EOF
$(<${ROOT}/inject.yaml.template)
EOF
" > ${ROOT}/inject.yaml
echo "Created injector manifest at:${ROOT}/inject.yaml"
|
#!/usr/bin/env sh
# This script cleans orphaned FW rules across all projects
# Enable debug output if $DEBUG is set to true
[ "${DEBUG:=false}" = 'true' ] && set -x
# Only print commands to delete rules by default
DRY_RUN=${DRY_RUN:='true'}
# Get projects
PROJECTS=$(gcloud projects list --format="value(project_id)")
for p in ${PROJECTS}; do
echo "## ${p}:"
# Get GKE cluster tag
GCP_TAG="$(gcloud -q --project "${p}" compute instance-templates list --filter="name : gke-k8s-cluster-terraform" --format="value(properties.tags.items[0])" --limit=1 2>/dev/null)"
# Verify we have expected number of rules with tag (5 or 0)
echo "GKE FWs: $(gcloud -q --project "${p}" compute firewall-rules list --filter="targetTags :( ${GCP_TAG} )" --format="value(name)" 2>/dev/null | wc -l)"
# Count bad - orphaned - rules
BAD_FW_RULES="$(gcloud -q --project "${p}" compute firewall-rules list --filter="NOT targetTags :( ${GCP_TAG} )" --format="value(name)" 2>/dev/null | tr '\r\n' ' ')"
echo "Bad FWs: $(echo "${BAD_FW_RULES}" | wc -w)"
# If we have any bad rules, delete them
if [ -n "${BAD_FW_RULES}" ]; then
if [ "${DRY_RUN}" = 'true' ] || [ "${p}" = 'gpii-gcp-prd' ] || [ "${p}" = 'gpii-gcp-stg' ]; then
echo gcloud -q --project "${p}" compute firewall-rules delete ${BAD_FW_RULES}
else
gcloud -q --project "${p}" compute firewall-rules delete ${BAD_FW_RULES}
fi
fi
done
|
import {Injectable} from '@angular/core';
import {BehaviorSubject} from 'rxjs';
import {CrossCodeMap} from '../shared/interfaces/cross-code-map';
export interface HistoryState {
icon: string;
name: string;
state: CrossCodeMap;
}
@Injectable()
export class StateHistoryService {
maxStates = 100;
states: BehaviorSubject<HistoryState[]> = new BehaviorSubject([]);
selectedState: BehaviorSubject<{ state: HistoryState }> = new BehaviorSubject({state: null});
constructor() {
}
init(state: HistoryState) {
this.selectedState.value.state = state;
this.states.next([state]);
}
saveState(state: HistoryState) {
const states = this.states.getValue();
const selected = this.selectedState.getValue();
const i = states.indexOf(selected.state);
selected.state = state;
states.length = i + 1;
if (states.length >= this.maxStates) {
states.shift();
}
states.push(state);
this.states.next(states);
}
undo() {
const states = this.states.getValue();
const selected = this.selectedState.getValue();
let i = states.indexOf(selected.state);
if (i <= 0) {
return;
}
i--;
this.selectedState.next({state: states[i]});
}
redo() {
const states = this.states.getValue();
const selected = this.selectedState.getValue();
let i = states.indexOf(selected.state);
if (i === states.length - 1) {
return;
}
i++;
this.selectedState.next({state: states[i]});
}
}
|
#include "libft.h"
char *ft_strtrim(char const *s1, char const *set)
{
size_t i;
if (!s1 || !set)
return (0);
while (*s1 && ft_strchr(set, *s1))
s1++;
i = ft_strlen(s1);
while (i && ft_strchr(set, s1[i]))
i--;
return (ft_substr(s1, 0, i + 1));
}
|
#!/bin/bash
for MODEL in JC JCI JCG JCIG GTR GTRI GTRG GTRIG 3JC 3JCI 3JCG 3JCIG 3GTR 3GTRI 3GTRG 3GTRIG
do
cd $MODEL
sbatch s.sh
cd ..
done
wait
echo "All jobs submitted."
|
<gh_stars>1-10
public class Rectangle extends Shape {
private double width, length;
public Rectangle() {
this.length = 2;
this.width = 1;
setLW(this.length, this.width);
}
public Rectangle(double width, double length) {
this.length = length;
this.width = width;
setLW(length, width);
}
public Rectangle(double width, double length, boolean isFilled, String color) {
super(isFilled, color);
this.length = length;
this.width = width;
setLW(length, width);
}
public void setLW(double x, double y) {
this.length = Math.max(x, y);
this.width = Math.min(x, y);
}
public String getArea() {
double a = this.length * this.width;
String area = Double.toString(a);
return (String) "Area: " + area;
}
@Override
public String toString() {
String width = (String) ("Width: " + this.width);
String length = (String) ("Length: " + this.length);
String area = (String) getArea();
String shape = super.toString();
return width + "\n" + length + "\n" + area + "\n" + shape;
}
} |
#!/usr/bin/env bash
DIR="${XDG_CONFIG_HOME}/polybar"
# kill
killall -q polybar
while pgrep -u $UID -x polybar >/dev/null; do sleep 1; done
# wait for X
while ! xset q &>/dev/null; do sleep 1; done
# launch
monitors=$(xrandr --query | grep " connected" | cut -d" " -f1)
top_log="/tmp/polybar_top.log"
bottom_log="/tmp/polybar_bottom.log"
echo "---" | tee -a "${top_log}" "${bottom_log}"
MONITORS="${monitors}" polybar top -r -c "${DIR}/config.ini" >>"${top_log}" 2>&1 &
MONITORS="${monitors}" polybar bottom -r -c "${DIR}/config.ini" >>"${bottom_log}" 2>&1 &
|
"""
Created on May, 2019
@author: <NAME>
Data configurations.
"""
class Relation_Config(object):
def __init__(self):
self.d_g = 64
self.d_k = 64
self.Nr = 16
num_samples_on_each_model = 5000
n_object_per_image_in_training = 8
import os
import numpy as np
import pickle
NYU40CLASSES = ['void',
'wall', 'floor', 'cabinet', 'bed', 'chair',
'sofa', 'table', 'door', 'window', 'bookshelf',
'picture', 'counter', 'blinds', 'desk', 'shelves',
'curtain', 'dresser', 'pillow', 'mirror', 'floor_mat',
'clothes', 'ceiling', 'books', 'refridgerator', 'television',
'paper', 'towel', 'shower_curtain', 'box', 'whiteboard',
'person', 'night_stand', 'toilet', 'sink', 'lamp',
'bathtub', 'bag', 'otherstructure', 'otherfurniture', 'otherprop']
NYU37_TO_PIX3D_CLS_MAPPING = {0:0, 1:0, 2:0, 3:8, 4:1, 5:3, 6:5, 7:6, 8:8, 9:2, 10:2, 11:0, 12:0, 13:2, 14:4,
15:2, 16:2, 17:8, 18:0, 19:0, 20:0, 21:0, 22:0, 23:0, 24:8, 25:8, 26:0, 27:0, 28:0,
29:8, 30:8, 31:0, 32:8, 33:0, 34:0, 35:0, 36:0, 37:8}
RECON_3D_CLS = [3,4,5,6,7,8,10,14,15,17,24,25,29,30,32]
number_pnts_on_template = 2562
pix3d_n_classes = 9
cls_reg_ratio = 10
obj_cam_ratio = 1
class Config(object):
def __init__(self, dataset):
"""
Configuration of data paths.
"""
self.dataset = dataset
if self.dataset == 'sunrgbd':
self.metadata_path = './data/sunrgbd'
self.train_test_data_path = os.path.join(self.metadata_path, 'sunrgbd_train_test_data')
self.__size_avg_path = os.path.join(self.metadata_path, 'preprocessed/size_avg_category.pkl')
self.__layout_avg_file = os.path.join(self.metadata_path, 'preprocessed/layout_avg_file.pkl')
self.bins = self.__initiate_bins()
self.evaluation_path = './evaluation/sunrgbd'
if not os.path.exists(self.train_test_data_path):
os.mkdir(self.train_test_data_path)
def __initiate_bins(self):
bin = {}
if self.dataset == 'sunrgbd':
# there are faithful priors for layout locations, we can use it for regression.
if os.path.exists(self.__layout_avg_file):
with open(self.__layout_avg_file, 'rb') as file:
layout_avg_dict = pickle.load(file)
else:
raise IOError('No layout average file in %s. Please check.' % (self.__layout_avg_file))
bin['layout_centroid_avg'] = layout_avg_dict['layout_centroid_avg']
bin['layout_coeffs_avg'] = layout_avg_dict['layout_coeffs_avg']
'''layout orientation bin'''
NUM_LAYOUT_ORI_BIN = 2
ORI_LAYOUT_BIN_WIDTH = np.pi / 4
bin['layout_ori_bin'] = [[np.pi / 4 + i * ORI_LAYOUT_BIN_WIDTH, np.pi / 4 + (i + 1) * ORI_LAYOUT_BIN_WIDTH] for i in range(NUM_LAYOUT_ORI_BIN)]
'''camera bin'''
PITCH_NUMBER_BINS = 2
PITCH_WIDTH = 40 * np.pi / 180
ROLL_NUMBER_BINS = 2
ROLL_WIDTH = 20 * np.pi / 180
# pitch_bin = [[-60 * np.pi/180, -20 * np.pi/180], [-20 * np.pi/180, 20 * np.pi/180]]
bin['pitch_bin'] = [[-60.0 * np.pi / 180 + i * PITCH_WIDTH, -60.0 * np.pi / 180 + (i + 1) * PITCH_WIDTH] for
i in range(PITCH_NUMBER_BINS)]
# roll_bin = [[-20 * np.pi/180, 0 * np.pi/180], [0 * np.pi/180, 20 * np.pi/180]]
bin['roll_bin'] = [[-20.0 * np.pi / 180 + i * ROLL_WIDTH, -20.0 * np.pi / 180 + (i + 1) * ROLL_WIDTH] for i in
range(ROLL_NUMBER_BINS)]
'''bbox orin, size and centroid bin'''
# orientation bin
NUM_ORI_BIN = 6
ORI_BIN_WIDTH = float(2 * np.pi / NUM_ORI_BIN) # 60 degrees width for each bin.
# orientation bin ranges from -np.pi to np.pi.
bin['ori_bin'] = [[(i - NUM_ORI_BIN / 2) * ORI_BIN_WIDTH, (i - NUM_ORI_BIN / 2 + 1) * ORI_BIN_WIDTH] for i
in range(NUM_ORI_BIN)]
if os.path.exists(self.__size_avg_path):
with open(self.__size_avg_path, 'rb') as file:
avg_size = pickle.load(file)
else:
raise IOError('No object average size file in %s. Please check.' % (self.__size_avg_path))
bin['avg_size'] = np.vstack([avg_size[key] for key in range(len(avg_size))])
# for each object bbox, the distance between camera and object centroid will be estimated.
NUM_DEPTH_BIN = 6
DEPTH_WIDTH = 1.0
# centroid_bin = [0, 6]
bin['centroid_bin'] = [[i * DEPTH_WIDTH, (i + 1) * DEPTH_WIDTH] for i in
range(NUM_DEPTH_BIN)]
else:
raise NameError('Please specify a correct dataset name.')
return bin
|
SELECT genre, COUNT(*) as total
FROM books
GROUP BY genre; |
# LANGS DEFAULT
# Assumes everything is in your paths, except the project Python code
export PYTHONPATH=${EMEWS_PROJECT_ROOT}/python:${PYTHONPATH:-}
SWIFT_IMPL=app
# Resident task workers and ranks
if [ -z ${TURBINE_RESIDENT_WORK_WORKERS+x} ]
then
# Resident task workers and ranks
export TURBINE_RESIDENT_WORK_WORKERS=1
export RESIDENT_WORK_RANKS=$(( PROCS - 2 ))
fi
|
import requests
import json
# base url as given in Reddit API documentation
base_url = 'https://www.reddit.com/search.json'
# parameters for the search query. In our case - q is the query
params = {
'q': 'Machine Learning'
}
# send get request
response = requests.get(base_url, params=params)
# extract data in json format
data = response.json()
# data is a nested dictionary. Get the list of results using the 'data' key
results = data['data']['children']
# iterate through the list and print each post
for post in results:
print(post['data']['title']) |
package training.graph;
import org.junit.jupiter.api.Test;
import java.util.*;
import java.util.function.IntUnaryOperator;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* 279. 完全平方数: https://leetcode-cn.com/problems/perfect-squares/
*
* 给定整数 n,返回求和为 n 的最少”完美平方“数量。
* <p>
* ”完美平方“是一个整数的平方,同时”完美平方“还是这个整数的倍数。例如,1、4、9 和 16 是”完美平方“,
* 而 3 和 11 不是。
* <p>
* 例 1:
* Input: n = 12
* Output: 3
* Explanation: 12 = 4 + 4 + 4.
* <p>
* 例 2:
* Input: n = 13
* Output: 2
* Explanation: 13 = 4 + 9.
* <p>
* 约束:
* - 1 <= n <= 10**4
*/
public class E279_Medium_PerfectSquares {
static void test(IntUnaryOperator method) {
assertEquals(method.applyAsInt(12), 3);
assertEquals(method.applyAsInt(13), 2);
assertEquals(method.applyAsInt(1), 1);
assertEquals(method.applyAsInt(9), 1);
assertEquals(method.applyAsInt(17), 2);
}
/**
* BFS,每一层减去所有小于当前数的“完美平方”数,然后作为下一层。
* 注意添加下一层时可以做一下判断,看看是否已经满足要求。这个优化技巧提升巨大。
*
* LeetCode 耗时:6ms - 96.65%
*/
public int numSquares(int n) {
int sqr;
if ((sqr = (int) Math.sqrt(n)) * sqr == n && n % sqr == 0)
return 1;
// 计算所有小于 n 的完美平方数
List<Integer> perfectSquareList = new ArrayList<>();
for (int i = 1; i * i < n; i++) {
perfectSquareList.add(i * i);
}
int level = 0;
Queue<Integer> queue = new LinkedList<>();
queue.add(n);
// BFS
LABEL_OUTER:
while (!queue.isEmpty()) {
level++;
int size = queue.size();
for (int i = 0; i < size; i++) {
n = queue.remove();
int insertion = Collections.binarySearch(perfectSquareList, n);
if (insertion >= 0)
break LABEL_OUTER;
else {
insertion = -insertion - 1;
for (int j = insertion - 1; j >= 0; j--) {
int m = n - perfectSquareList.get(j);
// 不要无脑添加,先做个判断。不做这个判断之前,耗时为 32ms,
// 做了之后耗时为 6ms,大大减少了时间消耗。
if (Collections.binarySearch(perfectSquareList, m) >= 0)
return level + 1;
queue.add(m);
}
}
}
}
return level;
}
@Test
public void testNumSquares() {
test(this::numSquares);
}
/**
* 动态规划方法。
*
* LeetCode 耗时:78 ms - 17%
* 内存消耗:37.6 MB - 42.22%
*/
public int dpMethod(int n) {
int sqrt = (int) Math.sqrt(n);
if (sqrt * sqrt == n) {
return 1;
}
List<Integer> perfect = new ArrayList<>(sqrt + 1);
for (int i = 1; i * i < n; i++) {
perfect.add(i * i);
}
int m = perfect.size();
int[] dp = new int[n + 1];
for (int j = 1; j <= n; j++) {
dp[j] = j;
}
for (int i = 2; i <= m; i++) {
for (int j = 1; j <= n; j++) {
if (perfect.get(i - 1) <= j) {
dp[j] = Math.min(dp[j], dp[j - perfect.get(i - 1)] + 1);
}
}
}
return dp[n];
}
@Test
public void testDpMethod() {
test(this::dpMethod);
}
}
|
<gh_stars>0
import json
def get_recent_block_heights(block_height):
oldness = list(range(10)) + [pow(x, 2) for x in range(4, 64)]
heights = [x for x in [block_height - o for o in oldness] if x >= 0]
return heights
def load_peers_from_list(lst):
from .peer import DisconnectedRemotePeer
return {
(host, port, direction): DisconnectedRemotePeer(host, port, direction, None) for (host, port, direction) in lst
}
def load_peers():
try:
db = [tuple(li) for li in json.loads(open("peers.json").read())]
except Exception:
db = []
return load_peers_from_list(db)
|
<reponame>missaouib/Fame
package com.designre.blog.service;
import com.designre.blog.BaseTest;
import com.designre.blog.util.FameConst;
import com.designre.blog.util.FameUtils;
import lombok.extern.slf4j.Slf4j;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.StringUtils;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
/**
* @author zzzzbw
* @since 2019/7/9 17:58
*/
@Slf4j
public class MediaServiceTest extends BaseTest {
@Autowired
private MediaService mediaService;
@Test
public void test1() {
MultipartFile file = new MultipartFile() {
@Override
public String getName() {
return null;
}
@Override
public String getOriginalFilename() {
return "abc.txt";
}
@Override
public String getContentType() {
return "123123";
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public long getSize() {
return 0;
}
@Override
public byte[] getBytes() throws IOException {
return new byte[0];
}
@Override
public InputStream getInputStream() throws IOException {
return null;
}
@Override
public void transferTo(File dest) throws IOException, IllegalStateException {
}
};
mediaService.upload(file, "a/b/c");
}
}
|
<filename>src/main/java/com/attributestudios/wolfarmor/client/gui/config/WolfArmorGuiFactory.java<gh_stars>0
package com.attributestudios.wolfarmor.client.gui.config;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.GuiScreen;
import net.minecraftforge.fml.client.IModGuiFactory;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Set;
/**
* GUI Factory for the WolfArmor Mod
*/
@SideOnly(Side.CLIENT)
public class WolfArmorGuiFactory implements IModGuiFactory {
//region Public / Protected Methods
/**
* Called when instantiated to initialize with the active minecraft instance.
*
* @param minecraftInstance the instance
*/
@Override
public void initialize(@Nonnull Minecraft minecraftInstance) {
}
@Override
public boolean hasConfigGui() {
return true;
}
@Override
public GuiScreen createConfigGui(GuiScreen guiScreen) {
return new WolfArmorGuiConfig(guiScreen);
}
@Override
@Nullable
public Set<RuntimeOptionCategoryElement> runtimeGuiCategories() {
return null;
}
//endregion Public / Protected Methods
}
|
#!/bin/bash
# exit immediately if a command exits with a non-zero status (see `$ help set`)
set -e
ECHO_PREFIX="[docker-entrypoint.sh]"
# location of master
export ROS_MASTER_URI=http://nils:11311/
echo "$ECHO_PREFIX" "set ROS master: " "$ROS_MASTER_URI"
# ROS installation
ROS=/opt/ros/kinetic/setup.bash
source "$ROS"
echo "$ECHO_PREFIX" "sourced ROS installation:" "$ROS"
# workspace holding custom ROS packages
workspace=/catkin_ws
source "$workspace"/devel/setup.bash
echo "$ECHO_PREFIX" "sourced workspace:" "$workspace"
echo "$ECHO_PREFIX" "execute" "$@"
exec "$@"
|
#!/bin/bash
echo "Starting service"
. set-environment
export GATEWAY_IP=$(ip route | grep default | cut -d ' ' -f 3)
export STATSD_HOST=${STATSD_HOST:-$GATEWAY_IP}
/usr/local/bin/set-environment
set -x
hs-app-template \
${AWS_REGION+ --region "${AWS_REGION}" } \
${KAFKA_BROKER+ --kafka-broker "${KAFKA_BROKER}" } \
${KAFKA_GROUP_ID+ --kafka-group-id "${KAFKA_GROUP_ID}" } \
${KAFKA_SCHEMA_REGISTRY+ --kafka-schema-registry "${KAFKA_SCHEMA_REGISTRY}" } \
${KAFKA_POLL_TIMEOUT_MS+ --kafka-poll-timeout-ms "${KAFKA_POLL_TIMEOUT_MS}" } \
${KAFKA_QUEUED_MAX_MESSAGES_KBYTES+ --kafka-queued-max-messages-kbytes "${KAFKA_QUEUED_MAX_MESSAGES_KBYTES}" } \
${KAFKA_DEBUG_ENABLE+ --kafka-debug-enable "${KAFKA_DEBUG_ENABLE}" } \
${KAFKA_CONSUMER_COMMIT_PERIOD_SEC+ --kafka-consumer-commit-period-sec "${KAFKA_CONSUMER_COMMIT_PERIOD_SEC}" } \
${INPUT_TOPIC_IN+ --input-topic "${INPUT_TOPIC_IN}" } \
${STATSD_HOST+ --statsd-host "${STATSD_HOST}" } \
${STATSD_SAMPLE_RATE+ --statsd-sample-rate "${STATSD_SAMPLE_RATE}" } \
${STATSD_TAGS+ --statsd-tags "${STATSD_TAGS}" } \
${LOG_LEVEL+ --log-level "${LOG_LEVEL}" }
|
import { BLOCK_SEARCH_RESULT_PATH } from '../blocks/config';
import { EPOCH_SEARCH_RESULT_PATH } from '../epochs/config';
import { TRANSACTION_SEARCH_RESULT_PATH } from './config';
export const getTransactionRoute = (txId: string) =>
`${TRANSACTION_SEARCH_RESULT_PATH}?id=${txId}`;
export const getEpochRoute = (epoch: number) =>
`${EPOCH_SEARCH_RESULT_PATH}?number=${epoch}`;
export const getBlockRoute = (blockId: string) =>
`${BLOCK_SEARCH_RESULT_PATH}?id=${blockId}`;
|
<reponame>WAHsss/m-smartisan
const path = require("path");
const {src,dest,series,parallel,watch} = require("gulp");
const connect = require('gulp-connect');
const sass = require("gulp-sass");
const webpack = require("webpack-stream");
const proxy = require('http-proxy-middleware');
const dirPath = "../../dev/";
function gulpServer(){
return connect.server({
name : "mobileApp",
root:`${dirPath}`,
port:"8080",
livereload : true,
middleware : ()=>{
return [
proxy('/api',{
target:'https://shopapi.smartisan.com/mobile',
changeOrigin : true,
pathRewrite:{
'^/api':''
}
}),
proxy('/shopapi',{
target:'https://shopapi.smartisan.com/v1/search/goods-list',
changeOrigin : true,
pathRewrite:{
'^/shopapi':''
}
}),
proxy('/single',{
target:'https://shopapi.smartisan.com/product',
changeOrigin : true,
pathRewrite : {
'^/single' : ''
}
})
]
}
});
}
function copyHTML(){
return src("../*.html")
.pipe(dest(`${dirPath}`))
.pipe(connect.reload())
}
function copySCSS(){
return src(["../styles/*.scss",])
.pipe(sass().on('error', sass.logError))
.pipe(dest(`${dirPath}styles/`))
.pipe(connect.reload())
}
function packJS(){
return src(["../scripts/*.js"])
.pipe(webpack({
mode:"development",//production开发模式
entry: {
app: "../scripts/app.js",
'app-detail' : "../scripts/app-detail.js"
},
output:{
path : path.resolve(__dirname,`${dirPath}`),//文件路径自动解析拼接
filename : '[name].js'
},
module:{
rules:[
{
test : /\.html$/,
loader : 'string-loader'
},{
test : /\.art$/,
loader : 'art-template-loader'
},{
test:/\.scss$/,
use:[
'style-loader',
'css-loader',
'sass-loader'
]
}
]
}
}))
.pipe(dest(`${dirPath}scripts/`))
.pipe(connect.reload())
}
function copyLibs(){
return src("../libs/**/*")
.pipe(dest(`${dirPath}libs/`))
.pipe(connect.reload())
}
function watchFile(){
watch('../*.html',series(copyHTML));
watch("../styles/**/*.scss",series(copySCSS));
watch('../scripts/**/*',series(packJS));
watch('../libs/*',series(copyLibs));
watch('../assets/**/*',series(copyAssets));
}
function copyAssets(){
return src('../assets/**/*')
.pipe(dest(`${dirPath}/assets`))
.pipe(connect.reload());
}
exports.default = series(parallel(copyHTML,copySCSS,packJS,copyLibs,copyAssets),parallel(gulpServer,watchFile)); |
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
##############################################################
# This script is used to compile Apache Doris(incubating)
# Usage:
# sh build.sh build both Backend and Frontend.
# sh build.sh -clean clean previous output and build.
#
# You need to make sure all thirdparty libraries have been
# compiled and installed correctly.
##############################################################
set -eo pipefail
ROOT=`dirname "$0"`
ROOT=`cd "$ROOT"; pwd`
export DORIS_HOME=${ROOT}
. ${DORIS_HOME}/env.sh
# build thirdparty libraries if necessary
if [[ ! -f ${DORIS_THIRDPARTY}/installed/lib/librdkafka.a ]]; then
echo "Thirdparty libraries need to be build ..."
${DORIS_THIRDPARTY}/build-thirdparty.sh
fi
# check java home
if [[ -z ${JAVA_HOME} ]]; then
echo "Warning: JAVA_HOME is not set, use thirdparty/installed/jdk1.8.0_131"
export JAVA_HOME=${DORIS_THIRDPARTY}/installed/jdk1.8.0_131
fi
PARALLEL=$[$(nproc)/4+1]
# Check args
usage() {
echo "
Usage: $0 <options>
Optional options:
--be build Backend
--fe build Frontend
--clean clean and build target
Eg.
$0 build Backend and Frontend without clean
$0 --be build Backend without clean
$0 --fe --clean clean and build Frontend
$0 --fe --be --clean clean and build both Frontend and Backend
"
exit 1
}
OPTS=$(getopt \
-n $0 \
-o '' \
-l 'be' \
-l 'fe' \
-l 'clean' \
-- "$@")
if [ $? != 0 ] ; then
usage
fi
eval set -- "$OPTS"
BUILD_BE=
BUILD_FE=
CLEAN=
RUN_UT=
if [ $# == 1 ] ; then
# defuat
BUILD_BE=1
BUILD_FE=1
CLEAN=0
RUN_UT=0
else
BUILD_BE=0
BUILD_FE=0
CLEAN=0
RUN_UT=0
while true; do
case "$1" in
--be) BUILD_BE=1 ; shift ;;
--fe) BUILD_FE=1 ; shift ;;
--clean) CLEAN=1 ; shift ;;
--ut) RUN_UT=1 ; shift ;;
--) shift ; break ;;
*) ehco "Internal error" ; exit 1 ;;
esac
done
fi
if [ ${CLEAN} -eq 1 -a ${BUILD_BE} -eq 0 -a ${BUILD_FE} -eq 0 ]; then
echo "--clean can not be specified without --fe or --be"
exit 1
fi
echo "Get params:
BUILD_BE -- $BUILD_BE
BUILD_FE -- $BUILD_FE
CLEAN -- $CLEAN
RUN_UT -- $RUN_UT
"
# Clean and build generated code
echo "Build generated code"
cd ${DORIS_HOME}/gensrc
if [ ${CLEAN} -eq 1 ]; then
make clean
fi
make
cd ${DORIS_HOME}
# Clean and build Backend
if [ ${BUILD_BE} -eq 1 ] ; then
echo "Build Backend"
if [ ${CLEAN} -eq 1 ]; then
rm ${DORIS_HOME}/be/build/ -rf
rm ${DORIS_HOME}/be/output/ -rf
fi
mkdir -p ${DORIS_HOME}/be/build/
cd ${DORIS_HOME}/be/build/
cmake ../
make -j${PARALLEL}
make install
cd ${DORIS_HOME}
fi
# Build docs, should be built before Frontend
echo "Build docs"
cd ${DORIS_HOME}/docs
if [ ${CLEAN} -eq 1 ]; then
make clean
fi
make
cd ${DORIS_HOME}
# Clean and build Frontend
if [ ${BUILD_FE} -eq 1 ] ; then
echo "Build Frontend"
cd ${DORIS_HOME}/fe
if [ ${CLEAN} -eq 1 ]; then
${MVN} clean
fi
${MVN} package -DskipTests
cd ${DORIS_HOME}
fi
# Clean and prepare output dir
DORIS_OUTPUT=${DORIS_HOME}/output/
mkdir -p ${DORIS_OUTPUT}
#Copy Frontend and Backend
if [ ${BUILD_FE} -eq 1 ]; then
install -d ${DORIS_OUTPUT}/fe/bin ${DORIS_OUTPUT}/fe/conf \
${DORIS_OUTPUT}/fe/webroot/ ${DORIS_OUTPUT}/fe/lib/
cp -r -p ${DORIS_HOME}/bin/*_fe.sh ${DORIS_OUTPUT}/fe/bin/
cp -r -p ${DORIS_HOME}/conf/fe.conf ${DORIS_OUTPUT}/fe/conf/
cp -r -p ${DORIS_HOME}/fe/target/lib/* ${DORIS_OUTPUT}/fe/lib/
cp -r -p ${DORIS_HOME}/fe/target/palo-fe.jar ${DORIS_OUTPUT}/fe/lib/
cp -r -p ${DORIS_HOME}/docs/build/help-resource.zip ${DORIS_OUTPUT}/fe/lib/
cp -r -p ${DORIS_HOME}/webroot/* ${DORIS_OUTPUT}/fe/webroot/
fi
if [ ${BUILD_BE} -eq 1 ]; then
install -d ${DORIS_OUTPUT}/be/bin ${DORIS_OUTPUT}/be/conf \
${DORIS_OUTPUT}/be/lib/
cp -r -p ${DORIS_HOME}/be/output/bin/* ${DORIS_OUTPUT}/be/bin/
cp -r -p ${DORIS_HOME}/be/output/conf/* ${DORIS_OUTPUT}/be/conf/
cp -r -p ${DORIS_HOME}/be/output/lib/* ${DORIS_OUTPUT}/be/lib/
fi
echo "***************************************"
echo "Successfully build Doris"
echo "***************************************"
if [[ ! -z ${DORIS_POST_BUILD_HOOK} ]]; then
eval ${DORIS_POST_BUILD_HOOK}
fi
exit 0
|
var _arg_min_max_test_impl_8hpp =
[
[ "ArgMaxChannelTest", "_arg_min_max_test_impl_8hpp.xhtml#abecc0d9c322f12e6318c71e1b2743c4d", null ],
[ "ArgMaxHeightTest", "_arg_min_max_test_impl_8hpp.xhtml#a90b0a2e001da0237a81ea1ae918d767e", null ],
[ "ArgMaxSimpleTest", "_arg_min_max_test_impl_8hpp.xhtml#a6e5d88259b3850369b5a20071011df8d", null ],
[ "ArgMinChannelTest", "_arg_min_max_test_impl_8hpp.xhtml#ad1a3228fc6f2d00c816a6f11ff1092ec", null ],
[ "ArgMinSimpleTest", "_arg_min_max_test_impl_8hpp.xhtml#a1fc3e309bafd760c40cec0e98d995529", null ],
[ "ArgMinWidthTest", "_arg_min_max_test_impl_8hpp.xhtml#aba0c3336ffd8a44dc4ab7d61a32fd5c2", null ]
]; |
#!/bin/sh
cd /home/SERVERS
sudo rsync --daemon --config=lwfs-rsyncd.conf
|
<reponame>wultra/powerauth-webflow
/*
* Copyright 2019 Wultra s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.getlime.security.powerauth.app.tppengine.model.certificate;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.security.cert.CertificateException;
import java.util.HashSet;
import java.util.Set;
/**
* @author <NAME>, <EMAIL>
*/
class ICACertificateParserTest {
@Test
public void testCertificateParser() throws CertificateException {
final String certificate = "-----BEGIN CERTIFICATE-----\n" +
"MIIIJjCCBg6gAwIBAgIJA7fTH4NdPar6MA0GCSqGSIb3DQEBCwUAMH8xCzAJBgNV\n" +
"BAYTAkNaMSgwJgYDVQQDDB9JLkNBIFRFU1QgU1NMIEVWIENBL1JTQSAxMC8yMDE3\n" +
"MS0wKwYDVQQKDCRQcnZuw60gY2VydGlmaWthxI1uw60gYXV0b3JpdGEsIGEucy4x\n" +
"FzAVBgNVBGEMDk5UUkNaLTI2NDM5Mzk1MB4XDTE5MTIwMjEwNDgwMVoXDTIwMTIw\n" +
"MTEwNDgwMVowggEEMQswCQYDVQQGEwJDWjEPMA0GA1UEAwwGY25iLmN6MSAwHgYD\n" +
"VQQKDBfEjEVTS8OBIE7DgVJPRE7DjSBCQU5LQTEdMBsGA1UECQwUTmEgcMWZw61r\n" +
"b3DEmyA4NjQvMjgxEDAOBgNVBAcMB1ByYWhhIDExDjAMBgNVBBEMBTExMDAwMREw\n" +
"DwYDVQQFEwg0ODEzNjQ1MDEdMBsGA1UEDwwUUHJpdmF0ZSBPcmdhbml6YXRpb24x\n" +
"EzARBgsrBgEEAYI3PAIBAxMCQ1oxHTAbBgNVBAgMFEhsYXZuw60gbcSbc3RvIFBy\n" +
"YWhhMRswGQYDVQRhDBJQU0RDWi1DTkItNDgxMzY0NTAwggEiMA0GCSqGSIb3DQEB\n" +
"<KEY>n" +
"4apnCRdGqeRFvdDZBZPKfYOpw1cvfk3YTAtEeh2MbGQCgdTqrl0LKBILEPKi60lT\n" +
"rcEFtIBFxC34NhuHeUDifU9pul3y1SIGq1kYgU3zeF0IJBOEfJ5Ez9kIQ/pbjx+h\n" +
"41VMQh0esqKu9hEMQr5QOJlUP1uILX76pMfyKgyGHlP4Dy587yMI/dSp7E2S97+n\n" +
"1/D/zW/3fB3fC2x4NYJx8ufrwhCG/etvWk917iclR39f5GU9mu8a5pBDgGwxuNCW\n" +
"QLnB9aDIuqOK7miQtzeXlIKR4VcwWLCrkHyrjy2KtzPhAgMBAAGjggMcMIIDGDAR\n" +
"BgNVHREECjAIggZjbmIuY3owCQYDVR0TBAIwADCB5gYDVR0gBIHeMIHbMIHNBg0r\n" +
"BgEEAYG4SAoDK<KEY>0GCCsGAQUFBwIBFhFodHRwOi8vd3d3LmljYS5jejCB\n" +
"mQYIKwYBBQUHAgIwgYwagYlUZW50byBURVNUIGNlcnRpZmlrYXQgYnlsIHZ5ZGFu\n" +
"<KEY>" +
"<KEY>" +
"<KEY>" +
"<KEY>n" +
"<KEY>n" +
"AQUFBzABhh5odHRwOi8vdG9jc3AuaWNhLmN6L3RxY3cxN19yc2EwDgYDVR0PAQH/\n" +
"BAQDAgWgMIH+BggrBgEFBQcBAwSB8TCB7jAIBgYEAI5GAQEwEwYGBACORgEGMAkG\n" +
"BwQAjkYBBgMwVgYGBACORgEFMEwwJBYeaHR0cDovL3Rlc3RxLmljYS5jei9wZHNf\n" +
"Y3MucGRmEwJjczAkFh5odHRwOi8vdGVzdHEuaWNhLmN6L3Bkc19lbi5wZGYTAmVu\n" +
"MHUGBgQAgZgnAjBrMEwwEQYHBACBmCcBAQwGUFNQX0FTMBEGBwQAgZgnAQIMBlBT\n" +
"UF9QSTARBgcEAIGYJwEDDAZQU1BfQUkwEQYHBACBmCcBBAwGUFNQX0lDDBNDemVj\n" +
"aCBOYXRpb25hbCBCYW5rDAZDWi1DTkIwHwYDVR0jBBgwFoAUOv/ngSfM0sonGeca\n" +
"odAaO8awn6owHQYDVR0OBBYEFIe6wOqAu0xteqo19vBNC1OjVvHnMB0GA1UdJQQW\n" +
"<KEY>n" +
"61of+TQUMxpkYSLgfTUKzZ6bJqc6ir1r4NPb4WNrAZkJSaJnIFSvej4m6z0Nit0o\n" +
"eHeGxJDQEwWCaQFa3E9lJS/33oZQQGn0iMsgN8rj70FXbGGRE1ZcvyhhioKEmA7f\n" +
"AbbkRlgxigrRp3cY12M7m3SfuD0Rr9fAJ30vvi1UuUBiJCUIznjbWezF2gNyd1KX\n" +
"hroKcoqMxl5260m5DSAWwoUvwc7MxjlHyCEx28RXv2/lWij2P9hnyN8WdjnO1Py9\n" +
"1RrJEJg9BJmfEdOfzVvtCjqAME77EqLB8wysktDe0T6BE7Ef96j/QKEFLId2kVtv\n" +
"U9iJ6xaZwyo5Jh68cC0/tZGMJ4cTx3OES4VttRNzIcneZ8y+gtoPs4X5Ob/uqc5s\n" +
"QrFMf+AclRFimNdAz0DN6Kv3kUS8kZtKn+XN7+Y1gkMHmbT6WSgfWB6BQUbbxG+a\n" +
"Wj3TY+MPQ/SuAJ42hv7iiWUwapcXTyI560n5KFKKiyXHtgu+jipCAR74VBIf4or9\n" +
"fO3E0tLGMlFvwLe2vfiAnBuiAZ1baM9a2vQWBcB/7SahqrBtKGpwGkJg6TAkYVIN\n" +
"EruSUWJnKZlRB/wtGJ6Z/b8DI+18RGmpy4YlF9ujYTiice2GyVXD2HndNBVhqq2o\n" +
"QYANhYtS0EAXe5o3NF2ZxkQ2fiABEPO7/RU=\n" +
"-----END CERTIFICATE-----";
ICACertificateParser parser = new ICACertificateParser();
final CertInfo parse = parser.parse(certificate);
// Check basic certificate info
Assertions.assertEquals("48136450", parse.getSerialNumber());
Assertions.assertEquals("cnb.cz", parse.getCommonName());
Assertions.assertEquals("PSDCZ-CNB-48136450", parse.getPsd2License());
Assertions.assertEquals("ČESKÁ NÁRODNÍ BANKA", parse.getOrganization());
Assertions.assertEquals("Na příkopě 864/28", parse.getStreet());
Assertions.assertEquals("Praha 1", parse.getCity());
Assertions.assertEquals("Hlavní město Praha", parse.getRegion());
Assertions.assertEquals("11000", parse.getZipCode());
Assertions.assertEquals("CZ", parse.getCountry());
Assertions.assertEquals("https://cnb.cz", parse.getWebsite());
Assertions.assertEquals("Na příkopě 864/28\nPraha 1\n11000\nHlavní město Praha\nCZ", parse.getAddressUnstructured());
Set<CertInfo.PSD2> expected = new HashSet<>();
expected.add(CertInfo.PSD2.PSP_AS);
expected.add(CertInfo.PSD2.PSP_AI);
expected.add(CertInfo.PSD2.PSP_IC);
expected.add(CertInfo.PSD2.PSP_PI);
Assertions.assertEquals(expected, parse.getPsd2Mandates());
}
} |
/**
*
*/
package jsr107;
import javax.cache.Cache;
import javax.cache.CacheManager;
import javax.cache.Caching;
import javax.cache.configuration.CompleteConfiguration;
import javax.cache.configuration.MutableConfiguration;
import javax.cache.expiry.CreatedExpiryPolicy;
import javax.cache.expiry.Duration;
import javax.cache.spi.CachingProvider;
import org.ehcache.config.CacheConfiguration;
import org.ehcache.config.CacheRuntimeConfiguration;
import org.ehcache.config.builders.CacheConfigurationBuilder;
import org.ehcache.config.builders.ResourcePoolsBuilder;
import org.ehcache.jsr107.Eh107Configuration;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author dzh
* @date Nov 4, 2016 5:10:25 PM
* @since 1.0
*/
public class TestJCache {
static Logger LOG = LoggerFactory.getLogger(TestJCache.class);
@Test
public void createCacheTest() {
CachingProvider provider = Caching.getCachingProvider();
CacheManager cacheManager = provider.getCacheManager();
MutableConfiguration<Long, String> configuration = new MutableConfiguration<Long, String>()
.setTypes(Long.class, String.class).setStoreByValue(false)
.setExpiryPolicyFactory(CreatedExpiryPolicy.factoryOf(Duration.ONE_MINUTE));
Cache<Long, String> cache = cacheManager.createCache("jCache", configuration);
cache.put(1L, "one");
String value = cache.get(1L);
LOG.info(value);
}
@Test
public void cacheConfigTest() {
CachingProvider provider = Caching.getCachingProvider();
CacheManager cacheManager = provider.getCacheManager();
MutableConfiguration<Long, String> configuration = new MutableConfiguration<Long, String>();
configuration.setTypes(Long.class, String.class);
Cache<Long, String> cache = cacheManager.createCache("someCache", configuration);
CompleteConfiguration<Long, String> completeConfiguration = cache.getConfiguration(CompleteConfiguration.class);
Eh107Configuration<Long, String> eh107Configuration = cache.getConfiguration(Eh107Configuration.class);
CacheRuntimeConfiguration<Long, String> runtimeConfiguration = eh107Configuration
.unwrap(CacheRuntimeConfiguration.class);
}
@Test
public void configEhcache2Jsr107() {
CachingProvider provider = Caching.getCachingProvider();
CacheManager cacheManager = provider.getCacheManager();
CacheConfiguration<Long, String> cacheConfiguration = CacheConfigurationBuilder
.newCacheConfigurationBuilder(Long.class, String.class, ResourcePoolsBuilder.heap(10)).build();
Cache<Long, String> cache = cacheManager.createCache("myCache",
Eh107Configuration.fromEhcacheCacheConfiguration(cacheConfiguration));
Eh107Configuration<Long, String> configuration = cache.getConfiguration(Eh107Configuration.class);
configuration.unwrap(CacheConfiguration.class);
configuration.unwrap(CacheRuntimeConfiguration.class);
try {
cache.getConfiguration(CompleteConfiguration.class);
throw new AssertionError("IllegalArgumentException expected");
} catch (IllegalArgumentException iaex) {
// Expected
}
}
@Test
public void configFromEchache() {
// CachingProvider cachingProvider = Caching.getCachingProvider();
// CacheManager manager = cachingProvider.getCacheManager(
// getClass().getResource("/org/ehcache/docs/ehcache-jsr107-config.xml").toURI(),
// getClass().getClassLoader());
// Cache<Long, Product> readyCache = manager.getCache("ready-cache", Long.class, Product.class);
}
}
|
#!/bin/bash
docker build -t ompi-toy-box:latest -f Dockerfile.ssh .
#docker build -t ompi-toy-box:ubi8 -f Dockerfile.ssh.ubi8 .
|
#!/bin/sh
###############################################################################
# Start MySQL service
###############################################################################
CREATE_ADMIN=/opt/mysql_create_admin.sh
CREATE_DB=/opt/mysql_create_db.sh
## Create admin user
if [ -e "$CREATE_ADMIN" ]; then
$CREATE_ADMIN
fi
## Create database
if [ -e "$CREATE_DB" ]; then
$CREATE_DB
fi
## Start supervisor
if [ $(which supervisord) ]; then
supervisord -n -c /etc/supervisor/supervisord.conf
fi
|
public class CCVariables {
public static void main(String[] args) {
int myVar = 12;
float myFloat = 1245;
char myChar = 'a';
boolean myBool = false;
String myString = "Testing";
double myDouble = 122323.45;
System.out.println(myVar);
System.out.println(myFloat);
System.out.println(myChar);
System.out.println(myBool);
System.out.println(myString);
System.out.println(myDouble);
double x = 15 / 2;
System.out.println(x);
int grade = 'B';
System.out.println(grade);
}
} |
<reponame>DianeYuan/introduction-to-java-programming-10th-edition
package com.company;
import java.util.Scanner;
public class Exercise_2_21 {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
System.out.print("Enter investment amount: ");
double investment = input.nextDouble();
System.out.print("Enter annual interest rate in percentage: ");
double monthlyRate = input.nextDouble()/1200;
System.out.print("Enter number of years: ");
int year = input.nextInt();
double futureValue = investment*Math.pow((1 + monthlyRate), year*12);
System.out.println("Accumulated value is $" + (int) (futureValue*100)/100.0);
}
}
|
<filename>gdx-pay-android-googleplay/src/test/java/com/badlogic/gdx/pay/android/googleplay/billing/converter/InAppPurchaseDataToTransactionConverterTest.java<gh_stars>1-10
package com.badlogic.gdx.pay.android.googleplay.billing.converter;
import com.badlogic.gdx.pay.Transaction;
import org.junit.Test;
import java.util.Date;
import static com.badlogic.gdx.pay.Transaction.REVERSAL_TEXT_CANCELLED;
import static com.badlogic.gdx.pay.Transaction.REVERSAL_TEXT_REFUNDED;
import static com.badlogic.gdx.pay.android.googleplay.billing.converter.InAppPurchaseDataToTransactionConverter.convertJSONPurchaseToTransaction;
import static org.assertj.core.api.Assertions.assertThat;
public class InAppPurchaseDataToTransactionConverterTest {
@Test
public void fillsPurchaseFromJson() throws Exception {
String payload = "{\"packageName\":\"com.app.name\",\"productId\":\"com.app.name.productId\",\n" +
" \"purchaseTime\":1466539081315,\"purchaseState\":0,\n" +
" \"developerPayload\":\"justSomePayload\",\n" +
" \"purchaseToken\":\"<PASSWORD>\"}\n";
Transaction transaction = convertJSONPurchaseToTransaction(payload);
assertThat(transaction.getIdentifier()).isEqualTo("com.app.name.productId");
assertThat(transaction.getPurchaseTime()).isWithinMonth(6);
assertThat(transaction.getReversalTime()).isNull();
assertThat(transaction.getReversalText()).isNull();
}
@Test
public void marksTransactionAsReversedWhenPurchaseStateIsCancelled() throws Exception {
String payload = "{\"packageName\":\"com.app.name\",\"productId\":\"com.app.name.productId\",\n" +
" \"purchaseTime\":1466539081315,\"purchaseState\":1,\n" +
" \"developerPayload\":\"justSomePayload\",\n" +
" \"purchaseToken\":\"<PASSWORD>\"}\n";
Transaction transaction = convertJSONPurchaseToTransaction(payload);
assertThat(transaction.getReversalTime()).isCloseTo(new Date(), 100);
assertThat(transaction.getReversalText()).isEqualTo(REVERSAL_TEXT_CANCELLED);
}
@Test
public void marksTransactionAsReversedWhenPurchaseStateIsRefunded() throws Exception {
String payload = "{\"packageName\":\"com.app.name\",\"productId\":\"com.app.name.productId\",\n" +
" \"purchaseTime\":1466539081315,\"purchaseState\":2,\n" +
" \"developerPayload\":\"justSomePayload\",\n" +
" \"purchaseToken\":\"<PASSWORD>\"}\n";
Transaction transaction = convertJSONPurchaseToTransaction(payload);
assertThat(transaction.getReversalTime()).isCloseTo(new Date(), 100);
assertThat(transaction.getReversalText()).isEqualTo(REVERSAL_TEXT_REFUNDED);
}
} |
package translations;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.function.Consumer;
import java.util.stream.Stream;
import org.junit.Assert;
import org.junit.Test;
import com.google.common.reflect.ClassPath;
/**
* Detect suspicious method, e.g.
* <X extends CharSequence> X getCharSequence() {
* return (X) "hello";
* }
*
* That java allows to abuse:
* Integer i = getCharSequence();
*/
public class GenericMethodWithoutParametersTest {
@Test
public void test() throws Exception {
ArrayList<String> errors = new ArrayList<>();
Consumer<String> errorAction = new Consumer<String>() {
@Override
public void accept(String suspectMethodSignature) {
System.out.println(suspectMethodSignature);
errors.add(suspectMethodSignature);
}
};
ClassPath
.from(Thread.currentThread().getContextClassLoader()).getTopLevelClasses().stream()
.filter(
info -> !info.getPackageName().startsWith("com.")
&& !info.getPackageName().startsWith("org.")
&& !info.getPackageName().startsWith("net.")
&& !info.getPackageName().startsWith("javax.")
&& !info.getPackageName().startsWith("java."))
.flatMap(info -> {
try {
return Stream.of(info.load());
} catch (Throwable ignore) {
return Stream.empty();
}
}).flatMap(c -> {
try {
return Stream.of(c.getMethods());
} catch (Throwable ignore) {
return Stream.<Method> of();
}
}).filter(m -> m.getTypeParameters().length > 0 && m.getParameterCount() == 0)
.sorted(Comparator.comparing(Method::toString)).map(Method::toGenericString)
.forEach(errorAction);
Assert.assertTrue(errors.isEmpty());
}
}
|
#!/bin/bash
GIT_NAME=${GIT_NAME:-"jonz94"}
GIT_MAIL=${GIT_MAIL:-"jody16888@gmail.com"}
install() {
for INSTALL_SCRIPT in ~/dotfiles/scripts/install/*.sh; do
bash ${INSTALL_SCRIPT}
done
}
backup() {
if [ -f ${1} ]; then
mkdir -p $(dirname ${1})
TIMESTAMP=$(date +"%Y-%m-%d,%H:%M:%S")
echo "Backup old ${1} to ${1}.backup.${TIMESTAMP}"
mv ${1} ${1}.backup.${TIMESTAMP}
fi
}
setup_zsh() {
echo 'Setting up zsh...'
ln -s $HOME/dotfiles/zsh/zim/zshenv $HOME/.zshenv
ln -s $HOME/dotfiles/zsh/zim/zlogin $HOME/.zlogin
ln -s $HOME/dotfiles/zsh/zim/zimrc $HOME/.zimrc
echo "source $HOME/dotfiles/zsh/zim/zshrc" > $HOME/.zshrc
# setup local configurations for zsh
mkdir -p ~/dotfiles/local
touch ~/dotfiles/local/local.zsh
touch ~/dotfiles/local/theme.zsh
echo 'Installing zsh modules...'
zsh ~/.zim/zimfw.zsh install
# disable Powerlevel10k configuration wizard
zsh -c "echo 'POWERLEVEL9K_DISABLE_CONFIGURATION_WIZARD=true' >>! ~/.zshrc"
echo 'zsh is ready!'
}
setup_neovim() {
echo 'Setting up neovim...'
NVIM_CONFIG_DIR="$HOME/.config/nvim"
# backup
if [ -e $NVIM_CONFIG_DIR ]; then
TIMESTAMP=$(date +"%Y-%m-%d,%H:%M:%S")
echo "Backup ${NVIM_CONFIG_DIR} to ${NVIM_CONFIG_DIR}.backup.${TIMESTAMP}"
mv ${NVIM_CONFIG_DIR} ${NVIM_CONFIG_DIR}.backup.${TIMESTAMP}
fi
ln -s $HOME/dotfiles/nvim $NVIM_CONFIG_DIR
echo 'neovim is ready!'
}
setup_git() {
echo 'Setting up git...'
git config --global user.name ${GIT_NAME}
git config --global user.email ${GIT_MAIL}
git config --global pull.rebase true
git config --global core.editor nvim
git config --global init.defaultBranch main
echo 'git is ready!'
}
setup_tmux() {
echo 'Setting up tmux...'
backup ~/.tmux.conf
echo 'source-file $HOME/dotfiles/tmux/jonz94.tmux.conf' >> ~/.tmux.conf
# setup local configurations for tmux
mkdir -p ~/dotfiles/local
backup ~/dotfiles/local/theme.tmux.conf
cp ~/dotfiles/tmux/theme.tmux.conf ~/dotfiles/local/theme.tmux.conf
backup ~/dotfiles/local/themepack.tmux.conf
cp ~/dotfiles/tmux/themepack.tmux.conf ~/dotfiles/local/themepack.tmux.conf
echo 'Installing tmux plugins...'
~/.tmux/plugins/tpm/bin/install_plugins
echo 'tmux is ready!'
}
setup_fnm_completions_for_linux() {
if [ $(uname) = "Linux" ]; then
export PATH="$HOME/.fnm:$PATH"
eval "$(fnm env)"
rm -f ~/dotfiles/zsh/functions/_fnm
fnm completions --shell zsh > ~/dotfiles/zsh/functions/_fnm
fi
}
install
setup_zsh
setup_neovim
setup_git
setup_tmux
setup_fnm_completions_for_linux
echo '🎉 All Done!'
echo '🙌 Some changes might need re-login to take effects.'
|
#!/bin/bash -e
################################################
# Deploy from dev to staging targets or live #
################################################
USER="$(id -un)" # $USER empty in vscode terminal
BRANCH="$(git branch --show-current)"
WORDPRESS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )"
PATH_OWID_PLUGIN="web/app/plugins/owid"
ROOT="/home/owid"
if [[ "$1" =~ ^(staging|hans|playfair|jefferson|nightingale|explorer|exemplars|tufte|roser)$ ]]; then
HOST="owid-staging"
elif [ "$1" == "live" ]; then
HOST="owid-live"
if [ "$BRANCH" != "master" ]; then
echo "Please run from the master branch."
exit 1
else
# Making sure we have the latest changes from the upstream
# Also, will fail if working copy is not clean
git pull --rebase
fi
# Prompt for confirmation if deploying to live
read -p "Are you sure you want to deploy to '$1'? " -n 1 -r
else
echo "Please select either live or a valid test target."
exit 1
fi
if [[ $REPLY =~ ^[Yy]$ ]] || [ "$1" != "live" ]; then
NAME="$1-wordpress"
OLD_REPO_BACKUP="$ROOT/tmp/$NAME-old"
SYNC_TARGET="$ROOT/tmp/$NAME-$USER"
TMP_NEW="$ROOT/tmp/$NAME-$USER-tmp"
FINAL_TARGET="$ROOT/$NAME"
FINAL_DATA="$ROOT/$1-data"
GRAPHER_DIR="$ROOT/$1"
# Rsync the local repository to a temporary location on the server
echo 'Uploading files...'
rsync -havz --progress --delete --delete-excluded --filter="merge $WORDPRESS_DIR/.rsync-filter" $WORDPRESS_DIR/ $HOST:$SYNC_TARGET
echo 'Performing atomic copy...'
ssh -t $HOST 'bash -e -s' <<EOF
# Ensure target directories exist
mkdir -p $ROOT/tmp
mkdir -p $FINAL_TARGET
# Remove any previous temporary repo
rm -rf $TMP_NEW
# Copy the synced repo-- this is because we're about to move it, and we want the
# original target to stay around to make future syncs faster
cp -r $SYNC_TARGET $TMP_NEW
# Install dependencies, build assets
cd $TMP_NEW
composer install --no-dev
cd $TMP_NEW/$PATH_OWID_PLUGIN
yarn install
yarn build
# Link in all the persistent stuff that needs to stay around between versions
ln -s $FINAL_DATA/wordpress/.env $TMP_NEW/.env
ln -s $FINAL_DATA/wordpress/uploads $TMP_NEW/web/app/uploads
ln -s $GRAPHER_DIR $TMP_NEW/web/wp/codelink
# Atomically swap the old and new versions
rm -rf $OLD_REPO_BACKUP
mv $FINAL_TARGET $OLD_REPO_BACKUP || true
mv $TMP_NEW $FINAL_TARGET
EOF
fi
|
# for ease in setting fancy ps1 stuff
txtblk='\033[00;30m' # Black - Regular
txtred='\033[00;31m' # Red
txtgrn='\033[00;32m' # Green
txtylw='\033[00;33m' # Yellow
txtblu='\033[00;34m' # Blue
txtpur='\033[00;35m' # Purple
txtcyn='\033[00;36m' # Cyan
txtwht='\033[00;37m' # White
bldblk='\033[01;30m' # Black - Bold
bldred='\033[01;31m' # Red
bldgrn='\033[01;32m' # Green
bldylw='\033[01;33m' # Yellow
bldblu='\033[01;34m' # Blue
bldpur='\033[01;35m' # Purple
bldcyn='\033[01;36m' # Cyan
bldwht='\033[01;37m' # White
unkblk='\033[04;30m' # Black - Underline
undred='\033[04;31m' # Red
undgrn='\033[04;32m' # Green
undylw='\033[04;33m' # Yellow
undblu='\033[04;34m' # Blue
undpur='\033[04;35m' # Purple
undcyn='\033[04;36m' # Cyan
undwht='\033[04;37m' # White
bakblk='\033[40m' # Black - Background
bakred='\033[41m' # Red
badgrn='\033[42m' # Green
bakylw='\033[43m' # Yellow
bakblu='\033[44m' # Blue
bakpur='\033[45m' # Purple
bakcyn='\033[46m' # Cyan
bakwht='\033[47m' # White
txtrst='\033[00m' # Text Reset
# Set up fancy pants colors in ls
# yellow dirs, green exec, cyan sym links, rest defaulted
# man ls for the details
export CLICOLOR=true
export LSCOLORS=dxgxfxexcxegedabagacad
|
package com.alexjing.pullpushtorefresh.lib.base;
/**
* @author: <NAME>(<EMAIL>)
* @date: 2016-05-06
* @time: 14:44
*/
public interface BaseView<T> {
void setPresenter(T presenter);
}
|
#!/bin/bash
wget -O spark-logo.png https://spark.apache.org/images/spark-logo-trademark.png
wget -O kafka-logo.png https://svn.apache.org/repos/asf/kafka/site/logos/kafka-logo-no-text.png
wget -O event-hub.svg https://worldvectorlogo.com/download/azure-event-hub.svg
wget -O iot-hub.svg "https://azure.microsoft.com/svghandler/iot-hub/?width=600&height=600"
wget -O databricks.svg "https://azure.microsoft.com/svghandler/databricks?width=600&height=600"
wget -O table-storage.svg "https://docs.microsoft.com/en-us/azure/media/index/storage.svg"
wget -O stream-analytics.svg "https://azure.microsoft.com/svghandler/stream-analytics?width=600&height=600"
wget -O sql-database.svg "https://azure.microsoft.com/svghandler/sql-database?width=600&height=600"
wget -O cosmos-db.svg "https://azure.microsoft.com/svghandler/cosmos-db?width=600&height=600"
wget -O webjob.png https://developersde.blob.core.windows.net/usercontent/2018/4/15734_webjobs.png
wget -O aks.png https://stephanefrechette.com/wp-content/uploads/2018/04/Azure-Container-Service_COLOR-300x300.png
wget -O dashboard.png https://www.freeiconspng.com/uploads/dashboard-icon-3.png
wget -O blob-storage.png https://azureautomation950740325.files.wordpress.com/2018/02/azure-storage-blob.png
wget -O python-logo.png https://upload.wikimedia.org/wikipedia/commons/thumb/c/c3/Python-logo-notext.svg/2000px-Python-logo-notext.svg.png
wget -O dsvm.svg "https://azure.microsoft.com/svghandler/virtual-machines-data-science-virtual-machines/?width=600&height=600"
wget -O databricks.png "https://databricks.com/wp-content/themes/databricks/assets/images/header_logo.png"
wget -O file-storage.png "https://ievgensaxblog.files.wordpress.com/2017/07/azure-storage-files.png"
wget -O aml-workbench.svg "http://azureml.azureedge.net/content/apphome/media/AML-Logo.svg"
wget -O model.png "https://vc4prod.blob.core.windows.net/catalog/Recommendations/Machine-Learning.png"
wget -O jupyter.png "http://jupyter.org/assets/try/jupyter.png"
convert -density 300 event-hub.svg event-hub.png
convert -density 300 iot-hub.svg iot-hub.png
convert -density 300 databricks.svg databricks.png
convert -density 300 table-storage.svg table-storage.png
convert -density 300 stream-analytics.svg stream-analytics.png
convert -density 300 sql-database.svg sql-database.png
convert -density 300 cosmos-db.svg cosmos-db.png
convert -density 300 dsvm.svg dsvm.png
convert -density 300 aml-workbench.svg aml-workbench.png
|
import random
def generate_matrix(length):
matrix = []
for _ in range(length):
row = []
for _ in range(length):
row.append(random.randint(0,100))
matrix.append(row)
return matrix
print(generate_matrix(3)) |
let outputArray = [];
const inputString = process.argv[2];
let inputArray = JSON.parse(process.argv[3]);
inputArray.forEach(element => {
outputArray.push(inputString + element);
});
console.log(outputArray); |
#!/bin/bash
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.title Disconnect
# @raycast.mode silent
#
# Optional parameters:
# @raycast.packageName Mullvad
# @raycast.icon images/mullvad.png
#
# Documentation:
# @raycast.author Phil Salant
# @raycast.authorURL https://github.com/PSalant726
# @raycast.description Disconnect from the Mullvad VPN tunnel.
#
# Dependencies:
# The Mullvad CLI: https://mullvad.net/en/help/cli-command-wg/
if ! command -v mullvad &> /dev/null; then
echo "The Mullvad CLI is not installed"
exit 1
fi
mullvad disconnect --wait
success=$?
if [ $success -ne 0 ]; then
echo "Failed to disconnect from the VPN tunnel"
exit 1
fi
echo "Disconnected and unsecured"
|
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Welcome to ACME, best company over the world !!!
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
#include "catch.hpp"
#include "expression_helper.hpp"
#include "duckdb/planner/operator/logical_top_n.hpp"
#include "duckdb/optimizer/topn_optimizer.hpp"
#include "test_helpers.hpp"
using namespace duckdb;
using namespace std;
TEST_CASE("Test Top N optimization", "[topn]") {
// LogicalTopN *topn;
ExpressionHelper helper;
helper.con.Query("CREATE TABLE integers(i INTEGER, j INTEGER)");
auto tree = helper.ParseLogicalTree("SELECT i FROM integers ORDER BY i LIMIT 4");
REQUIRE(tree->type == LogicalOperatorType::LIMIT);
REQUIRE(tree->children[0]->type == LogicalOperatorType::ORDER_BY);
TopN topn_optimizer;
auto plan = topn_optimizer.Optimize(move(tree));
// ORDER BY + LIMIT is now replaced by TOP N optimization
REQUIRE(plan->type == LogicalOperatorType::TOP_N);
// Same as above but with OFFSET
tree = helper.ParseLogicalTree("SELECT i FROM integers ORDER BY i DESC LIMIT 4 OFFSET 5");
REQUIRE(tree->type == LogicalOperatorType::LIMIT);
REQUIRE(tree->children[0]->type == LogicalOperatorType::ORDER_BY);
plan = topn_optimizer.Optimize(move(tree));
REQUIRE(plan->type == LogicalOperatorType::TOP_N);
// Same does not apply when OFFSET is present without LIMIT
tree = helper.ParseLogicalTree("SELECT i FROM integers ORDER BY i OFFSET 5");
REQUIRE(tree->type == LogicalOperatorType::LIMIT);
REQUIRE(tree->children[0]->type == LogicalOperatorType::ORDER_BY);
plan = topn_optimizer.Optimize(move(tree));
REQUIRE(plan->type == LogicalOperatorType::LIMIT);
}
|
import ast
def parse_setup_file(setup_file_path):
with open(setup_file_path, 'r') as file:
setup_content = file.read()
setup_ast = ast.parse(setup_content)
main_dependencies = {}
extras_require = {}
for node in setup_ast.body:
if isinstance(node, ast.Assign) and len(node.targets) == 1 and isinstance(node.targets[0], ast.Name) and node.targets[0].id == 'install_requires':
for value in node.value.elts:
dependency = value.s.split('==')
main_dependencies[dependency[0]] = dependency[1]
if isinstance(node, ast.Assign) and len(node.targets) == 1 and isinstance(node.targets[0], ast.Name) and node.targets[0].id == 'extras_require':
for key, value in zip(node.value.keys, node.value.values):
extras_require[key.s] = value.slice.value.s
return {
"main_dependencies": main_dependencies,
"extras_require": extras_require
} |
import { TestBed, async, fakeAsync, flushMicrotasks } from '@angular/core/testing';
import { DashboardHelperService } from './dashboard-helper.service';
import { BigQueryService } from '../../services/big-query/big-query.service';
class MockBigQueryService {
public runQuery(query: string): Promise<gapi.client.Request<gapi.client.bigquery.QueryResponse>> {
return Promise.resolve({
result: null,
execute: null,
then: null,
catch: null,
finally: null,
[Symbol.toStringTag]: null
});
}
public convertResult(result: gapi.client.bigquery.QueryResponse) {
return [
{
heatmap_layers: ['heatmap_layer'],
shape_layers: ['shape_layer']
}
];
}
}
describe('DashboardHelperService', () => {
let service: DashboardHelperService;
let bigQueryService: MockBigQueryService;
beforeEach(async(() => {
TestBed.configureTestingModule({
providers: [
{ provide: BigQueryService, useClass: MockBigQueryService }
]
});
service = TestBed.inject(DashboardHelperService);
bigQueryService = TestBed.inject(BigQueryService);
}));
it('should be created', () => {
expect(service).toBeTruthy();
});
it('#getAdditionalLayers should get all additional layers', fakeAsync(() => {
let flag = null;
service.getAdditionalLayers()
.then(
layers => flag = layers
);
flushMicrotasks();
expect(flag.heatmap).toEqual(['heatmap_layer']);
expect(flag.shape).toEqual(['shape_layer']);
}));
});
|
def find_shortest_path(start, end):
# for holding visited and non visited vertices
visited = set()
unvisited = set([start])
# a dictionary of predecessor nodes to maintain the
# path between the start and the end node
predecessors = {}
# holds the distance between any two nodes
distance = {}
distance[start] = 0
while unvisited:
# pops the vertex with minimum distance
current_vertex = min(unvisited, key = distance.get)
# add the minimum distance vertex to the visited
visited.add(current_vertex)
# remove it from the unvisited set
unvisited.remove(current_vertex)
# for every neighbour of the current node
for neighbour in current_vertex.neighbors:
if neighbour in visited:
continue
# make sure each unvisited node is added to
# the unvisited set
if neighbour not in unvisited:
unvisited.add(neighbour)
# maintain the predecessor
predecessors[neighbour] = current_vertex
# calculate the new distance to the neighbour node
new_distance = distance[current_vertex] + current_vertex.distance(neighbour)
if new_distance < distance.get(neighbour, float('inf')):
# update the distance with the new distance
distance[neighbour] = new_distance
# for storing paths
path = []
# loop through the previous to start from end node
# and trace back the path from start to end node
while end != start:
path.append(end)
end = predecessors[end]
# return the shortest path
path.append(start)
return path[::-1] |
#!/bin/bash
set -eo pipefail
scripts_dir='/opt/nifi/scripts'
[ -f "${scripts_dir}/common.sh" ] && . "${scripts_dir}/common.sh"
prop_replace 'nifi.flow.configuration.file' "${NIFI_FLOW_CONFIG_FILE:=./conf/flow.xml.gz}"
prop_replace 'nifi.flow.configuration.archive.enabled' "${NIFI_FLOW_CONFIG_ARCHIVE_ENABLED:=true}"
prop_replace 'nifi.flow.configuration.archive.dir' "${NIFI_FLOW_CONFIG_ARCHIVE_DIR:=./conf/archive/}"
prop_replace 'nifi.flow.configuration.archive.max.time' "${NIFI_FLOW_CONFIG_ARCHIVE_MAX_TIME:=30 days}"
prop_replace 'nifi.flow.configuration.archive.max.storage' "${NIFI_FLOW_CONFIG_ARCHIVE_MAX_STORAGE:=500 MB}"
prop_replace 'nifi.flow.configuration.archive.max.count' "${NIFI_FLOW_CONFIG_ARCHIVE_MAX_COUNT:=}"
prop_replace 'nifi.authorizer.configuration.file' "${NIFI_AUTHORIZER_CONFIG_FILE:=./conf/authorizers.xml}"
prop_replace 'nifi.login.identity.provider.configuration.file' "${NIFI_LOGIN_IDENTITY_PROVIDER_CONFIG_FILE:=./conf/login-identity-providers.xml}"
prop_replace 'nifi.state.management.configuration.file' "${NIFI_STATE_MANAGEMENT_CONFIG_FILE:=./conf/state-management.xml}"
prop_replace 'nifi.templates.directory' "${NIFI_TEMPLATES_DIR:=./conf/templates}"
prop_replace 'nifi.nar.library.directory' "${NIFI_NAR_LIBRARY_DIR:=./lib}"
prop_replace 'nifi.nar.working.directory' "${NIFI_NAR_WORKING_DIR:=./work/nar/}"
prop_replace 'nifi.documentation.working.directory' "${NIFI_DOCUMENTATION_WORKING_DIR:=./work/docs/components}"
prop_replace 'nifi.database.directory' "${NIFI_DATABASE_DIR:=./database_repository}"
prop_replace 'nifi.flowfile.repository.directory' "${NIFI_FLOWFILE_REPOSITORY_DIR:=./flowfile_repository}"
prop_replace 'nifi.content.repository.directory.default' "${NIFI_CONTENT_REPOSITORY_DIR_DEFAULT:=./content_repository}"
prop_replace 'nifi.provenance.repository.directory.default' "${NIFI_PROVENANCE_REPOSITORY_DIR_DEFAULT:=./provenance_repository}"
prop_replace 'nifi.web.war.directory' "${NIFI_WEB_WAR_DIR:=./lib}"
prop_replace 'nifi.web.jetty.working.directory' "${NIFI_WEB_JETTY_WORKING_DIR:=./work/jetty}"
nifi_fn() {
if [[ "$1" == /* ]]; then
echo "$1"
else
echo "${NIFI_HOME}/$1"
fi
}
nifi_fix_dir_perm() {
local d
d=$1
echo "Fix permission for $d"
chown -R nifi:nifi "$d"
while [[ -n "$d" && "$d" != / ]]; do
chmod go+rx,u+rwx "$d"
d=$(dirname "$d")
done
}
nifi_file() {
local dn
dn=$(dirname "$(nifi_fn "$1")")
echo "Create directory $dn"
mkdir -p "$dn" && nifi_fix_dir_perm "$dn"
if [[ -n "$2" ]]; then
local src_file
src_file=$(nifi_fn "$2")
if [[ ! -e "$1" && -e "$src_file" ]]; then
echo "Copy $src_file to $1"
cp "$src_file" "$1"
chown nifi:nifi "$1"
fi
fi
}
nifi_dir() {
local dn
dn=$(nifi_fn "$1")
echo "Create directory $dn"
mkdir -p "$dn" && nifi_fix_dir_perm "$dn"
}
nifi_file "${NIFI_FLOW_CONFIG_FILE}" './conf/flow.xml.gz'
nifi_dir "${NIFI_FLOW_CONFIG_ARCHIVE_DIR}"
nifi_file "${NIFI_AUTHORIZER_CONFIG_FILE}" './conf/authorizers.xml'
nifi_file "${NIFI_LOGIN_IDENTITY_PROVIDER_CONFIG_FILE}" './conf/login-identity-providers.xml'
nifi_file "${NIFI_STATE_MANAGEMENT_CONFIG_FILE}" './conf/state-management.xml'
nifi_dir "${NIFI_TEMPLATES_DIR}"
nifi_dir "${NIFI_NAR_LIBRARY_DIR}"
nifi_dir "${NIFI_NAR_WORKING_DIR}"
nifi_dir "${NIFI_DOCUMENTATION_WORKING_DIR}"
nifi_dir "${NIFI_DATABASE_DIR}"
nifi_dir "${NIFI_FLOWFILE_REPOSITORY_DIR}"
nifi_dir "${NIFI_CONTENT_REPOSITORY_DIR_DEFAULT}"
nifi_dir "${NIFI_PROVENANCE_REPOSITORY_DIR_DEFAULT}"
nifi_dir "${NIFI_WEB_WAR_DIR}"
nifi_dir "${NIFI_WEB_JETTY_WORKING_DIR}"
set -x
mkdir -p "${NIFI_HOME}/logs"
touch "${NIFI_HOME}/logs/nifi-app.log"
chown -R nifi:nifi \
"${NIFI_LOG_DIR}" \
"${NIFI_HOME}/conf" \
"${NIFI_HOME}/database_repository" \
"${NIFI_HOME}/flowfile_repository" \
"${NIFI_HOME}/content_repository" \
"${NIFI_HOME}/provenance_repository" \
"${NIFI_HOME}/state" \
exec gosu nifi "${NIFI_HOME}/../scripts/start.sh"
|
import sys
import numpy as np
from mdldictionary import ImportMS
def calculate_average_visibility_amplitude(target: str) -> float:
# Load model and residual visibilities into MS format
ms_data = ImportMS('data/' + target + '.ms')
# Extract visibility amplitudes from the loaded data
visibility_amplitudes = ms_data['amplitudes']
# Calculate the average visibility amplitude
average_amplitude = np.mean(visibility_amplitudes)
# Return the average visibility amplitude rounded to two decimal places
return round(average_amplitude, 2) |
#!/bin/bash
git pull
git add --all .
echo "Please write your commit message."
read input
git commit -m "$input"
sleep 0.5
git push --all origin master
echo "Pushed!" |
#!/usr/bin/env bash
echo "Let's get you set up with Rustlings!"
echo "Checking requirements..."
if [ -x "$(command -v git)" ]
then
echo "SUCCESS: Git is installed"
else
echo "WARNING: Git does not seem to be installed."
echo "Please download Git using your package manager or over https://git-scm.com/!"
exit 1
fi
if [ -x "$(command -v rustc)" ]
then
echo "SUCCESS: Rust is installed"
else
echo "WARNING: Rust does not seem to be installed."
echo "Please download Rust using https://rustup.rs!"
exit 1
fi
if [ -x "$(command -v cargo)" ]
then
echo "SUCCESS: Cargo is installed"
else
echo "WARNING: Cargo does not seem to be installed."
echo "Please download Rust and Cargo using https://rustup.rs!"
exit 1
fi
# Function that compares two versions strings v1 and v2 given in arguments (e.g 1.31 and 1.33.0).
# Returns 1 if v1 > v2, 0 if v1 == v2, 2 if v1 < v2.
function vercomp() {
if [[ $1 == $2 ]]
then
return 0
fi
v1=( ${1//./ } )
v2=( ${2//./ } )
len1=${#v1[@]}
len2=${#v2[@]}
max_len=$len1
if [[ $max_len -lt $len2 ]]
then
max_len=$len2
fi
for i in `seq 0 $max_len`
do
# Fill empty fields with zeros in v1
if [ -z "${v1[$i]}" ]
then
v1[$i]=0
fi
# And in v2
if [ -z "${v2[$i]}" ]
then
v2[$i]=0
fi
if [ ${v1[$i]} -gt ${v2[$i]} ]
then
return 1
fi
if [ ${v1[$i]} -lt ${v2[$i]} ]
then
return 2
fi
done
return 0
}
RustVersion=$(rustc --version | cut -d " " -f 2)
MinRustVersion=1.31
vercomp $RustVersion $MinRustVersion
if [ $? -eq 2 ]
then
echo "WARNING: Rust version is too old: $RustVersion - needs at least $MinRustVersion"
echo "Please update Rust with 'rustup update'"
exit 1
else
echo "SUCCESS: Rust is up to date"
fi
Path=${1:-rustlings/}
echo "Cloning Rustlings at $Path..."
git clone -q https://github.com/rust-lang/rustlings $Path
Version=$(curl -s https://api.github.com/repos/rust-lang/rustlings/releases/latest | python -c "import json,sys;obj=json.load(sys.stdin);print(obj['tag_name']);")
echo "Checking out version $Version..."
cd $Path
git checkout -q tags/$Version
echo "Installing the 'rustlings' executable..."
cargo install --force --path .
if [ -x "$(rustlings)" ]
then
echo "WARNING: Please check that you have '~/.cargo/bin' in your PATH environment variable!"
fi
echo "All done! Run 'rustlings' to get started."
|
#!/usr/bin/dumb-init /bin/bash
# Run dummy web server in background for Cloud Run
python3 /server.py &
# Invoke myoung34/github-runner default entrypoint.sh
/entrypoint.sh $@ |
#!/bin/bash
#
#SBATCH --job-name=iwslt_grid_0116
#SBATCH --partition=1080ti-short
#SBATCH --gres=gpu:1
#SBATCH --ntasks-per-node=24
#SBATCH --mem=47GB
#SBATCH -d singleton
#SBATCH --open-mode append
#SBATCH -o /mnt/nfs/work1/miyyer/simengsun/synst/experiments/iwslt_grid_0116/output_eval.txt
#SBATCH --mail-type=ALL
#SBATCH --mail-user=simengsun@cs.umass.edu
BASE_PATH=/mnt/nfs/work1/miyyer
PROJECT_PATH=$BASE_PATH/simengsun/synst
EXPERIMENT_PATH=$PROJECT_PATH/experiments/iwslt_grid_0116
left right left left center center center center
# Load in python3 and source the venv
module load python3/3.6.6-1810
source /mnt/nfs/work1/miyyer/wyou/py36/bin/activate
# Need to include the venv path upfront...
# otherwise it defaults to the loaded slurm module which breaks for pynvml with python3
#PYTHONPATH=$BASE_PATH/simengsun/synst/bin/lib/python3.6/site-packages/:$PYTHONPATH
PYTHONPATH=/mnt/nfs/work1/miyyer/wyou/py36/lib/python3.6/site-packages:$PYTHONPATH
CUDA_VISIBLE_DEVICES=0 python main.py --dataset iwslt_en_de --span 1 --model new_transformer \
--attn-param 1 \
--attn-type normal \
--attn-position left right left right left right left right left left \
--attn-displacement 1 \
--dec-attn-param 1 \
--dec-attn-type normal \
--dec-attn-position center center left center left center left center center center \
--dec-attn-displacement 1 \
--embedding-size 286 --hidden-dim 507 --num-heads 2 --num-layers 5 \
-d /mnt/nfs/work1/miyyer/wyou/iwslt -p /mnt/nfs/work1/miyyer/wyou/iwslt \
--batch-size 1 --batch-method example --split dev \
--restore $EXPERIMENT_PATH/checkpoint.pt \
--average-checkpoints 5 \
translate \
--beam-width 4 --max-decode-length 50 --length-basis input_lens --order-output \
--output-directory $EXPERIMENT_PATH
|
#!/bin/bash
for file in *
do
if [ -d "$file" ]; then
echo ${file}
fi
done |
import { fillIn } from '@ember/test-helpers';
import getByLabel from './get-by-label';
export default function fillInByLabel(labelText, value) {
const control = getByLabel(labelText);
return fillIn(control, value);
}
|
<filename>Demos/src/Aspose.Email.Live.Demos.UI/wwwroot/viewer/qi/doc/js/app.js
(function () {
'use strict';
var app = angular.module('myApp', [
'ngSanitize',
'ngAnimate',
'ngQuantum',
'ngResource'
]);
app.value('PageList', customPageList);
app.run(['$templateCache', '$cacheFactory',
function ($templateCache, $cacheFactory) {
$templateCache = false;
}]);
app.config(['$httpProvider',
function ($httpProvider) {
$httpProvider.defaults.cache = false;
}]);
app.config(['$rootScopeProvider',
function ($rootScopeProvider) {
$rootScopeProvider.digestTtl(5);
}]);
app.directive('infinityscroll', function () {
return {
restrict: 'A',
link: function (scope, element, attrs) {
element.bind('scroll', function () {
if ((element[0].scrollTop + element[0].offsetHeight) >= element[0].scrollHeight) {
//scroll reach to end
scope.$apply(attrs.infinityscroll);
}
});
}
}
});
app.directive('myEnter', function () {
return function ($scope, element, attrs) {
element.bind("keydown keypress", function (event) {
if (event.which === 13) {
$scope.$apply(function () {
$scope.$eval(attrs.myEnter);
});
event.preventDefault();
}
});
};
});
app.factory('apiService', function ($http) {
var getData = function () {
return $http({ method: "GET", url: apiURL + 'api/AsposeEmailViewer/DocumentPages?' + 'file=' + fileName + '&folderName=' + folderName }).then(function (result) {
return result;
});
};
return { getData: getData };
});
function GetNodeIndex(arr, node) {
for (var j = 0; j < arr.length; j++) {
if (CompareImageNode(arr[j], node)) {
return j;
}
}
return -1;
}
function CompareImageNode(node1, node2) {
return (node1.ImageName === node2.ImageName);
}
function ProcessStringListResponse(value, $scope) {
if (value.toLowerCase().includes('c:')) {
if (!customPageList.includes(value)) {
customPageList.push(value);
}
}
}
function ProcessJsonResponse(value, $scope) {
if (isNaN(value)) {
var item;
try {
item = JSON.parse(value);
} catch (e) {
return ProcessStringListResponse(value, $scope);
}
item.ImageWidth = 640;
item.ImageHeight = 480;
if (item.ImageSize) {
var dimensions = item.ImageSize.split('x');
if (dimensions.length === 2) {
item.ImageWidth = Number(dimensions[0]);
item.ImageHeight = Number(dimensions[1]);
}
}
var nodeIdx = GetNodeIndex(customPageList, item);
if (nodeIdx < 0) {
nodeIdx = customPageList.length;
customPageList.push(item);
}
SetPageSize(nodeIdx, item.ImageWidth, item.ImageHeight);
}
}
function GetPagesData($scope, apiService) {
var myDataPromise = apiService.getData();
$scope.loading.show();
myDataPromise.then(function (result) {
var i = 0;
angular.forEach(result.data, function (value) {
if (i === 0 && result.data.length > 1) {
totalPages = parseInt(value);
UpdatePager();
i++;
}
else {
ProcessJsonResponse(value, $scope);
if (customPageList.length > 0 && currentPageCount === 1) {
var dvPages = document.getElementsByName("dvPages")[0];
dvPages.style.cssText = "height: 100vh; padding-top: 55px; width: auto!important; overflow: auto!important; background-color: #777; background-image: none!important;";
dvPages.getElementsByClassName('container-fluid')[0].classList.remove('hidden');
// $scope.navigatePage('+');
}
i++;
}
$scope.loading.hide();
});
/*if (currentPageCount < totalPages) {
$scope.NextPage();
} */
});
}
app.controller('ViewerAPIController',
function ViewerAPIController($scope, $sce, $http, $window, apiService, $loading, $timeout, $q, $alert) {
var $that = this;
$scope.loadingButtonSucces = function () {
return $timeout(function () {
return true;
}, 2000)
}
$scope.exitApp = function () {
if (window.parent && window.parent.closeIframe) {
window.history.back();
window.parent.closeIframe();
}
else {
if (callbackURL !== '')
window.location = callbackURL + '?folderName=' + folderName + '&fileName=' + fileName;
else if (featureName !== '')
window.location = '/email/viewer/' + featureName;
else
window.location = '/email/viewer';
}
}
$scope.loading = new $loading({
busyText: ' Please wait while page loading...',
theme: 'info',
timeout: false,
showSpinner: true
});
$scope.print = function() {
window.print();
}
$scope.getError = function () {
var deferred = $q.defer();
setTimeout(function () {
deferred.reject('Error');
}, 1000);
return deferred.promise;
}
$scope.displayAlert = function (title, message, theme) {
$alert(message, title, theme)
}
if (customPageList.length <= 0) {
$scope.PageList = customPageList;
}
GetPagesData($scope, apiService);
/*$scope.NextPage = function () {
if (currentPageCount > totalPages) {
currentPageCount = totalPages;
return;
}
if (currentPageCount <= totalPages) {
currentPageCount += 1;
currentSelectedPage = currentPageCount;
if ($scope.PageList.length < currentPageCount) {
GetPageData($scope, apiService, currentPageCount);
currentSelectedPage = currentPageCount - 2;
}
}
}*/
$scope.selected = false;
$scope.slectedPageImage = function (event, pageData) {
var domId = event.target.id.replace('img-page-', '').replace('imgt-page-', '');
currentSelectedPage = parseInt(event.target.id.replace('img-page-', '').replace('imgt-page-', ''));
UpdatePager();
if (event.target.id.startsWith('imgt-page-')) {
location.hash = 'page-view-' + domId;
$scope.selected = pageData;
}
}
$scope.navigatePage = function (options) {
if (options === '+') {
currentPageCount += 1;
if (currentPageCount > totalPages) {
currentPageCount = totalPages;
}
}
else if (options === '-') {
currentPageCount -= 1;
if (currentPageCount < 1) {
currentPageCount = 1;
}
}
else if (options === 'f') {
currentPageCount = 1;
}
else if (options === 'e') {
currentPageCount = totalPages;
}
else {
if (document.getElementById('inputcurrentpage').value !== '')
currentPageCount = parseInt(document.getElementById('inputcurrentpage').value);
if (currentPageCount > totalPages) {
currentPageCount = totalPages;
}
if (currentPageCount < 1) {
currentPageCount = 1;
}
}
currentSelectedPage = currentPageCount;
if ($scope.PageList.length < currentSelectedPage) {
//GetPageData($scope, apiService, currentPageCount);
$scope.$broadcast('UpdatePages');
$scope.$broadcast('UpdateThumbnails');
}
UpdatePager();
location.hash = 'page-view-' + currentSelectedPage;
};
$scope.createPageImage = function (indx, pageData) {
if (!pageData) {
return pageData;
}
if (indx <= (imagedata.length - 1)) {
return imagedata[indx];
}
else {
prevoiusIndx = indx;
var imgData = $sce.trustAsResourceUrl(apiURL + 'api/AsposeEmailViewer/pageimage?imageFolderName=' + encodeURIComponent(pageData.ImageFolderName) + '&imageFileName=' + encodeURIComponent(pageData.ImageName));
imagedata.push(imgData);
return imagedata[indx];
}
}
$scope.itemSelected = '1.00';
$scope.zoomPage = function (zoomOption) {
$scope.itemSelected = zoomOption;
ZoomPages(zoomOption);
}
$scope.getPageCss = function(pageIndex) {
return GetPageCss(pageIndex);
}
}
);
})(); |
package de.fraunhofer.fit.train.persistence;
import java.util.List;
import org.springframework.data.mongodb.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.repository.query.QueryByExampleExecutor;
import de.fraunhofer.fit.train.model.EnvironmentProperties;
public interface IEnvironmentPropertiesRepository extends CrudRepository<EnvironmentProperties, String>, QueryByExampleExecutor<EnvironmentProperties> {
@Query("{ '?0' : { $regex: ?1 } }")
List<EnvironmentProperties> findOneByQuery(String fieldStr,String cotent);
@Query("{ ?0 : { $regex: ?1 } }")
List<EnvironmentProperties> findOneBySmampleQuery(String fieldStr,String cotent);
@Query(value="{}", fields="{ '?0' : ?1}")
List<EnvironmentProperties> findOneBySimpleQuery(String fieldStr,String cotent);
@Query("{ ?0 : { $regex: ?1 } }")
List<EnvironmentProperties> findOneByRegexQuery(String fieldStr, String cotent);
}
|
class CustomList:
def __init__(self, elements):
self.elements = elements
def __getitem__(self, index):
return self.elements[index]
# Example usage
custom_list = CustomList([1, 2, 3, 4, 5])
print(custom_list[2]) # Output: 3
print(custom_list[4]) # Output: 5 |
package main
import "fmt"
func main() {
fmt.Println(greet("Jane ", "Doe"))
}
// 注意以下两个具有返回值函数的写法的差异
func greet(fname string, lname string) string {
s := fmt.Sprint(fname, lname)
return s
}
func greet2(fname string, lname string) (s string) {
s = fmt.Sprint(fname,lname)
return
}
/*
IMPORTANT
Avoid using named returns.
Occasionally named returns are useful. Read this article for more information:
https://www.goinggo.net/2013/10/functions-and-naked-returns-in-go.html
*/
|
<filename>EIDSS v6/android.java/workspace/EIDSS.tests/src/com/bv/eidss/model/test/HumanCaseTest.java
package com.bv.eidss.model.test;
/*
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Matchers.any;
import java.io.Serializable;
import java.util.Date;
import java.util.Stack;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import android.os.Parcel;
*/
import com.bv.eidss.model.HumanCase;
import com.bv.eidss.model.CaseStatus;
import com.bv.eidss.model.interfaces.ValidateCode;
import com.bv.eidss.utils.DateHelpers;
import junit.framework.TestCase;
public class HumanCaseTest extends TestCase {
public void testHumanCaseCreate(){
HumanCase hc = HumanCase.CreateNew();
assertNotNull(hc);
assertEquals(CaseStatus.NEW, hc.getStatus());
assertEquals("(new)", hc.getCaseID());
assertNotNull(hc.getOfflineCaseID());
assertEquals(true, hc.getChanged().booleanValue());
}
public void testHumanCaseValidate(){
HumanCase hc = HumanCase.CreateNew();
assertNotNull(hc);
assertEquals(ValidateCode.DiagnosisMandatory, hc.Validate());
hc.setTentativeDiagnosis(1);
assertEquals(ValidateCode.LastNameMandatory, hc.Validate());
hc.setFamilyName("");
assertEquals(ValidateCode.LastNameMandatory, hc.Validate());
hc.setFamilyName(" ");
assertEquals(ValidateCode.LastNameMandatory, hc.Validate());
hc.setFamilyName("name");
assertEquals(ValidateCode.RegionMandatory, hc.Validate());
hc.setRegionCurrentResidence(1);
assertEquals(ValidateCode.RayonMandatory, hc.Validate());
hc.setRayonCurrentResidence(1);
assertEquals(ValidateCode.OK, hc.Validate());
hc.setDateofBirth(DateHelpers.Date(DateHelpers.Today().getYear() + 1900 + 1, DateHelpers.Today().getMonth(), DateHelpers.Today().getDate()));
assertEquals(ValidateCode.DateOfBirthCheckCurrent, hc.Validate());
hc.setDateofBirth(DateHelpers.Date(DateHelpers.Today().getYear() + 1900, DateHelpers.Today().getMonth(), DateHelpers.Today().getDate()));
assertEquals(ValidateCode.OK, hc.Validate());
hc.setOnSetDate(DateHelpers.Date(DateHelpers.Today().getYear() + 1900 + 1, DateHelpers.Today().getMonth(), DateHelpers.Today().getDate()));
assertEquals(ValidateCode.DateOfSymptomCheckCurrent, hc.Validate());
hc.setOnSetDate(DateHelpers.Date(DateHelpers.Today().getYear() + 1900, DateHelpers.Today().getMonth(), DateHelpers.Today().getDate()));
assertEquals(ValidateCode.OK, hc.Validate());
}
/*
public void testHumanCaseParcelable(){
final Stack<Object> s = new Stack<Object>();
Parcel p = mock(Parcel.class);
doAnswer(new Answer<Parcel>(){
@Override
public Parcel answer(InvocationOnMock invocation) throws Throwable {
s.push((Integer)invocation.getArguments()[0]);
return (Parcel)invocation.getMock();
}}).when(p).writeInt(any(int.class));
doAnswer(new Answer<Parcel>(){
@Override
public Parcel answer(InvocationOnMock invocation) throws Throwable {
s.push((Long)invocation.getArguments()[0]);
return (Parcel)invocation.getMock();
}}).when(p).writeLong(any(long.class));
doAnswer(new Answer<Parcel>(){
@Override
public Parcel answer(InvocationOnMock invocation) throws Throwable {
s.push((String)invocation.getArguments()[0]);
return (Parcel)invocation.getMock();
}}).when(p).writeString(any(String.class));
doAnswer(new Answer<Parcel>(){
@Override
public Parcel answer(InvocationOnMock invocation) throws Throwable {
s.push((Date)invocation.getArguments()[0]);
return (Parcel)invocation.getMock();
}}).when(p).writeSerializable(any(Date.class));
when(p.readInt()).thenAnswer(new Answer<Integer>(){
@Override
public Integer answer(InvocationOnMock invocation) throws Throwable {
return (Integer)s.pop();
}});
when(p.readLong()).thenAnswer(new Answer<Long>(){
@Override
public Long answer(InvocationOnMock invocation) throws Throwable {
return (Long)s.pop();
}});
when(p.readString()).thenAnswer(new Answer<String>(){
@Override
public String answer(InvocationOnMock invocation) throws Throwable {
return (String)s.pop();
}});
when(p.readSerializable()).thenAnswer(new Answer<Serializable>(){
@Override
public Serializable answer(InvocationOnMock invocation) throws Throwable {
return (Date)s.pop();
}});
HumanCase hc = HumanCase.CreateNew();
assertNotNull(hc);
hc.writeToParcel(p, 0);
HumanCase hcp = HumanCase.CREATOR.createFromParcel(p);
assertEquals(hc.getCaseID(), hcp.getCaseID());
}
*/
}
|
/**
* Copyright (c) 2020 Intel Corporation
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Inject, Injectable, InjectionToken, OnDestroy} from '@angular/core';
import {interval, Observable, Observer, Subject, Subscriber, SubscriptionLike} from 'rxjs';
import {distinctUntilChanged, filter, map, share, takeWhile} from 'rxjs/operators';
import {WebSocketSubject, WebSocketSubjectConfig} from 'rxjs/webSocket';
export const WS_CONFIG: InjectionToken<string> = new InjectionToken('websocket');
export interface IWSMessage<T> {
event: string;
data?: T;
}
export interface IWSConfig {
url: string;
reconnectInterval?: number;
reconnectAttempts?: number;
}
export interface IWSService {
on<T>(event: string): Observable<T>;
send(event: string, data: any): void;
}
@Injectable({
providedIn: 'root'
})
export class WebsocketService implements IWSService, OnDestroy {
private config: WebSocketSubjectConfig<IWSMessage<any>>;
private websocketSub: SubscriptionLike;
private statusSub: SubscriptionLike;
private status$: Observable<boolean>;
private connection$: Observer<boolean>;
private reconnection$: Observable<number>;
private websocket$: WebSocketSubject<IWSMessage<any>>;
private wsMessages$: Subject<IWSMessage<any>>;
private reconnectInterval: number;
private reconnectAttempts: number;
private isConnected: boolean;
private deferredMessagesQueue: IWSMessage<any>[] = [];
constructor(@Inject(WS_CONFIG) private webSocketConfig: IWSConfig) {
this.wsMessages$ = new Subject<IWSMessage<any>>();
this.reconnectInterval = webSocketConfig.reconnectInterval || 5000;
this.reconnectAttempts = webSocketConfig.reconnectAttempts || 10;
this.config = {
url: webSocketConfig.url,
closeObserver: {
next: (): void => {
this.websocket$ = null;
this.connection$.next(false);
// eslint-disable-next-line no-console
console.log('WebSocket disconnected');
}
},
openObserver: {
next: (): void => {
this.connection$.next(true);
// eslint-disable-next-line no-console
console.log('WebSocket connected');
}
}
};
this.status$ = new Observable<boolean>((observer: Subscriber<boolean>) => {
this.connection$ = observer;
})
.pipe(
share(),
distinctUntilChanged()
);
this.statusSub = this.status$
.subscribe((isConnected: boolean) => {
this.isConnected = isConnected;
if (!this.reconnection$ && typeof (isConnected) === 'boolean' && !isConnected) {
this.reconnect();
return;
}
while (this.deferredMessagesQueue.length) {
const deferredMessage = this.deferredMessagesQueue.shift();
this.websocket$.next({event: deferredMessage.event, data: deferredMessage.data});
}
});
this.websocketSub = this.wsMessages$
.subscribe(null, (error: ErrorEvent) => {
// eslint-disable-next-line no-console
console.error('WebSocket error', error);
});
this.connect();
}
ngOnDestroy(): void {
this.websocketSub.unsubscribe();
this.statusSub.unsubscribe();
}
on<T>(event: string): Observable<T> {
if (event) {
return this.wsMessages$
.pipe(
filter((message: IWSMessage<T>) => message.event === event),
map((message: IWSMessage<T>) => message.data)
);
}
}
send(event: string, data: any = {}): void {
if (event) {
if (this.isConnected) {
this.websocket$.next({event, data});
} else {
this.deferredMessagesQueue.push({event, data});
}
} else {
// eslint-disable-next-line no-console
console.error('Message sending error');
}
}
private connect(): void {
this.websocket$ = new WebSocketSubject(this.config);
this.websocket$
.subscribe(
(message: IWSMessage<any>) => this.wsMessages$.next(message),
() => !this.websocket$ && this.reconnect()
);
}
private reconnect(): void {
// eslint-disable-next-line no-console
console.log('WebSocket reconnecting');
this.reconnection$ = interval(this.reconnectInterval)
.pipe(takeWhile((v: number, index: number) => index < this.reconnectAttempts && !this.websocket$));
this.reconnection$
.subscribe(
() => this.connect(),
null,
() => {
this.reconnection$ = null;
if (!this.websocket$) {
this.wsMessages$.complete();
this.connection$.complete();
}
});
}
}
|
<reponame>1aurabrown/ervell<filename>webpack.config.js
const path = require('path');
const webpack = require('webpack');
const { BundleAnalyzerPlugin } = require('webpack-bundle-analyzer');
const FriendlyErrorsWebpackPlugin = require('friendly-errors-webpack-plugin');
const ProgressBarPlugin = require('progress-bar-webpack-plugin');
const WebpackNotifierPlugin = require('webpack-notifier');
const helpers = require('./webpack.helpers.js');
const { NODE_ENV, PORT, ANALYZE_BUNDLE } = process.env;
const isDevelopment = NODE_ENV === 'development';
const isStaging = NODE_ENV === 'staging';
const isProduction = NODE_ENV === 'production';
const isDeploy = isStaging || isProduction;
const config = {
mode: NODE_ENV,
entry: {
webpack: [
'webpack-hot-middleware/client?reload=true',
],
...helpers.getEntrypoints(),
},
output: {
filename: '[name].js',
path: path.resolve(__dirname, 'public/assets'),
publicPath: '/assets',
sourceMapFilename: '[file].map?[contenthash]',
},
module: {
rules: [
{
test: /\.(gif|svg|jpg|png)$/,
loader: 'file-loader',
},
{
test: /\.coffee$/,
exclude: /node_modules/,
loader: 'coffee-loader',
},
{
test: /\.(jade|pug)$/,
exclude: /node_modules/,
loader: 'pug-loader',
options: {
doctype: 'html',
root: __dirname,
},
},
{
test: /\.(js|jsx)$/,
exclude: /node_modules/,
use: [
{
loader: 'babel-loader',
query: {
cacheDirectory: true,
env: {
development: {
presets: ['react-hmre'],
plugins: [
['react-transform', {
transforms: [{
transform: 'react-transform-hmr',
imports: ['react'],
locals: ['module'],
}],
}],
],
},
},
},
},
],
},
],
},
optimization: {
runtimeChunk: {
name: 'runtime',
},
splitChunks: {
cacheGroups: {
runtimeChunk: 'single',
vendor: {
name: 'vendor',
chunks: 'initial',
test: /[\\/]node_modules[\\/]/,
},
default: {
name: 'common',
chunks: 'all',
minChunks: 2,
},
},
},
},
plugins: [
new FriendlyErrorsWebpackPlugin({
compilationSuccessInfo: {
messages: [`[Ervell] Listening on http://localhost:${PORT} \n`],
},
}),
new ProgressBarPlugin(),
new WebpackNotifierPlugin(),
new webpack.DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify(NODE_ENV),
},
}),
// Ignore moment locales
new webpack.IgnorePlugin(/^\.\/locale$/, /moment$/),
new webpack.ProvidePlugin({
$: 'jquery',
jQuery: 'jquery',
'window.jQuery': 'jquery',
jade: 'jade/runtime.js',
waypoints: 'jquery-waypoints/waypoints.js',
}),
],
resolve: {
alias: {
'jquery.ui.widget': 'blueimp-file-upload/js/vendor/jquery.ui.widget.js',
Images: path.join(__dirname, 'public', 'images'),
},
extensions: ['.js', '.jsx', '.json', '.jade', '.coffee'],
modules: [
'node_modules',
],
symlinks: false,
},
externals: {
request: 'request',
},
};
if (ANALYZE_BUNDLE) {
config.plugins.push(new BundleAnalyzerPlugin());
}
if (isDevelopment) {
config.plugins.push(new webpack.HotModuleReplacementPlugin());
// Staging/Production
} else if (isDeploy) {
// config.devtool = '#source-map';
}
module.exports = config;
|
#!/bin/bash
apache-initialize () {
cd $APACHE_DIR
rm -rfv sites-available/*
rm -rfv sites-enabled/*
rm -rfv ports.conf
cp $WEB_ROOT/sh/res/apache2.conf .
chown -R www-data:www-data $APACHE_DIR
chmod -R 0755 $APACHE_DIR
a2enmod rewrite
a2enmod ssl
cd $WEB_ROOT
}
apache-install-sites () {
cd $WEB_ROOT
while read LINE;
do
IFS=" "
set - $LINE
LINE_PORT=$1
LINE_NAME=$2
LINE_PATH=$3
LINE_VHOST_CONFIG=$4
DATA="$(<sh/vhost.conf.d/$LINE_VHOST_CONFIG)"
DATA=${DATA//__SERVER_PORT__/$LINE_PORT}
DATA=${DATA//__SERVER_NAME__/$LINE_NAME}
DATA=${DATA//__DOCUMENT_ROOT__/$LINE_PATH}
CONF_FILE="${APACHE_DIR}/sites-available/${LINE_NAME}.conf"
CONF_LINK="${APACHE_DIR}/sites-enabled/${LINE_NAME}.conf"
echo "$DATA" > "$CONF_FILE"
echo "Created '$CONF_FILE'"
ln -sf "$CONF_FILE" "$CONF_LINK"
echo "Linked '$CONF_FILE' to '$CONF_LINK'"
done < sh/gen/sites.list
}
apache-restart () {
service apache2 restart
} |
# Install miniconda or load cached version
# Path to the conda distribution
export PATH="$HOME/miniconda/bin:$PATH"
# Check if the conda command exists, and if not,
# download and install miniconda
if ! command -v conda > /dev/null; then
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
bash miniconda.sh -b -p $HOME/miniconda -u;
conda config --add channels conda-forge;
conda config --set always_yes yes;
conda update --all;
conda create --yes -n test python=$PYTHON_VERSION;
conda activate test;
conda install tectonic;
conda install -c conda-forge numpy=$NUMPY_VERSION scipy matplotlib setuptools pytest pytest-cov pip;
fi
# Display some debugging info
conda info -a
# DEBUG [November 2 2018]
# Attempt to resolve issues with SSL certificate expiring for purl.org:
# https://tectonic.newton.cx/t/how-to-use-tectonic-if-you-can-t-access-purl-org/44
# https://github.com/tectonic-typesetting/tectonic/issues/131
mkdir -p $HOME/.config/Tectonic
cat > $HOME/.config/Tectonic/config.toml << EOL
[[default_bundles]]
url = "https://tectonic.newton.cx/bundles/tlextras-2018.1r0/bundle.tar"
EOL
|
#!/bin/bash
set -eo pipefail
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
trap 'last_command=$current_command; current_command=$BASH_COMMAND' DEBUG
# echo an error message before exiting
trap 'echo "\"${last_command}\" command filed with exit code $?."' EXIT
export SCRIPTS_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
source "$SCRIPTS_DIR/common.bash"
ensure_version
export PRODUCT=web3j-unit
export PREVIOUS_RELEASE=$(curl -H "Authorization: token ${GITHUB_PERSONAL_ACCESS_TOKEN}" -s https://api.github.com/repos/web3j/${PRODUCT}/releases/latest | jq -r '.target_commitish' )
export CHANGELOG=$(git rev-list --format=oneline --abbrev-commit --max-count=50 ${PREVIOUS_RELEASE}..HEAD | jq --slurp --raw-input . )
echo "Creating a new release on GitHub with changes"
echo -e "\n${CHANGELOG:1:-1}"
API_JSON="{
\"tag_name\": \"v${VERSION}\",
\"target_commitish\": \"$(git rev-parse HEAD)\",
\"name\": \"v${VERSION}\",
\"body\": \"Release of version ${VERSION}: \n\n ${CHANGELOG:1:-1}\",
\"draft\": false,
\"prerelease\": false
}"
export RESULT=$(curl -H "Authorization: token ${GITHUB_PERSONAL_ACCESS_TOKEN}" --data "$API_JSON" -s https://api.github.com/repos/web3j/${PRODUCT}/releases)
export UPLOAD_URL=$(echo ${RESULT} | jq -r ".upload_url")
for FILE in `find ./build/libs -type f -name "${PRODUCT}-${VERSION}.*"`;
do
curl -H "Authorization: token ${GITHUB_PERSONAL_ACCESS_TOKEN}" -s "${UPLOAD_URL:0:-13}?name=$(basename -- $FILE)" -H "Content-Type: $(file -b --mime-type $FILE)" --data-binary @"${FILE}"
done
echo "Release finished" |
#!/bin/sh
# Copyright 2020 ZUP IT SERVICOS EM TECNOLOGIA E INOVACAO SA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
URL_DOWNLOAD=""
VERSION_DOWNLOAD=$1
LATEST_RC=$(git ls-remote --exit-code --sort='v:refname' --tags https://github.com/ZupIT/horusec.git --ref 'v*.*.*-rc.*' | cut --delimiter='/' --fields=3 | tail --lines=1 | sed 's/.*\///; s/\^{}//')
LATEST_BETA=$(git ls-remote --exit-code --sort='v:refname' --tags https://github.com/ZupIT/horusec.git --ref 'v*.*.*-beta.*' | cut --delimiter='/' --fields=3 | tail --lines=1 | sed 's/.*\///; s/\^{}//')
IS_NEW_URL=false
regex='^v(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$'
horusecSetVersion() {
if [ -z "$VERSION_DOWNLOAD" ]; then
echo "invalid input, empty string"
exit 1
elif [ "$VERSION_DOWNLOAD" = "latest-rc" ]; then
echo "Version set to $LATEST_RC"
VERSION_DOWNLOAD=$LATEST_RC
elif [ "$VERSION_DOWNLOAD" = "latest-beta" ]; then
echo "Version set to $LATEST_BETA"
VERSION_DOWNLOAD=$LATEST_BETA
elif [ "$VERSION_DOWNLOAD" = "latest" ]; then
echo "Version set to latest"
VERSION_DOWNLOAD='latest'
elif echo $VERSION_DOWNLOAD | grep -Eq $regex; then
echo "Version set to $VERSION_DOWNLOAD"
else
echo "input not match required params: 'latest-rc' 'latest-beta' 'latest' or a semantic version compliant, check https://github.com/ZupIT/horusec/releases"
exit 1
fi
}
horusecIdentifyOSLatest() {
if [ "$(uname)" = "Linux" ]; then
if [ "$(uname -m)" = "x86_64" ]; then
echo "Installing Horusec for Linux amd64"
URL_DOWNLOAD="https://github.com/ZupIT/horusec/releases/${VERSION_DOWNLOAD}/download/horusec_linux_amd64"
elif [ "$(uname -m)" = "aarch64" ]; then
echo "Installing Horusec for Linux arm64"
URL_DOWNLOAD="https://github.com/ZupIT/horusec/releases/${VERSION_DOWNLOAD}/download/horusec_linux_arm64"
else
echo "Installing Horusec for Linux x86"
URL_DOWNLOAD="https://github.com/ZupIT/horusec/releases/${VERSION_DOWNLOAD}/download/horusec_linux_x86"
fi
elif [ "$(uname)" = "Darwin" ]; then
if [ "$(uname -m)" = "x86_64" ]; then
echo "Installing Horusec for Mac amd64"
URL_DOWNLOAD="https://github.com/ZupIT/horusec/releases/${VERSION_DOWNLOAD}/download/horusec_mac_amd64"
elif [ "$(uname -m)" = "arm64" ]; then
echo "Installing Horusec for Mac arm64"
URL_DOWNLOAD="https://github.com/ZupIT/horusec/releases/${VERSION_DOWNLOAD}/download/horusec_mac_arm64"
else
echo "Not enable Horusec to Mac x86"
exit 1
fi
else
echo "Unable to identify which OS you're using"
exit 1
fi
}
horusecIdentifyOSWithVersion() {
if [ "$(uname)" = "Linux" ]; then
if [ "$(uname -m)" = "x86_64" ]; then
echo "Installing Horusec for Linux amd64"
if [ $IS_NEW_URL = true ]; then
URL_DOWNLOAD="https://github.com/ZupIT/horusec/releases/download/${VERSION_DOWNLOAD}/horusec_linux_amd64"
else
URL_DOWNLOAD="https://github.com/ZupIT/horusec/releases/download/${VERSION_DOWNLOAD}/horusec_linux_x64"
fi
elif [ "$(uname -m)" = "aarch64" ]; then
echo "Installing Horusec for Linux arm64"
URL_DOWNLOAD="https://github.com/ZupIT/horusec/releases/download/${VERSION_DOWNLOAD}/horusec_linux_arm64"
else
echo "Installing Horusec for Linux x86"
URL_DOWNLOAD="https://github.com/ZupIT/horusec/releases/download/${VERSION_DOWNLOAD}/horusec_linux_x86"
fi
elif [ "$(uname)" = "Darwin" ]; then
if [ "$(uname -m)" = "x86_64" ]; then
echo "Installing Horusec for Mac amd64"
if [ $IS_NEW_URL = true ]; then
URL_DOWNLOAD="https://github.com/ZupIT/horusec/releases/download/${VERSION_DOWNLOAD}/horusec_mac_amd64"
else
URL_DOWNLOAD="https://github.com/ZupIT/horusec/releases/download/${VERSION_DOWNLOAD}/horusec_mac_x64"
fi
elif [ "$(uname -m)" = "x86_64" ]; then
echo "Installing Horusec for amr64"
URL_DOWNLOAD="https://github.com/ZupIT/horusec/releases/download/${VERSION_DOWNLOAD}/horusec_mac_arm64"
else
echo "Not enable Horusec to Mac x86"
exit 1
fi
else
echo "Unable to identify which OS you're using"
exit 1
fi
}
horusecDownloadAndInstall() {
INSTALL_PATH="/usr/local/bin"
if [ ! -d "$INSTALL_PATH" ]; then
mkdir -p $INSTALL_PATH
fi
rm -r $INSTALL_PATH/horusec >/dev/null 2>&1
echo "Downloading horusec..."
echo $URL_DOWNLOAD
curl -fsSL "$URL_DOWNLOAD" -o ./horusec
chmod +x ./horusec
sudo mv ./horusec "$INSTALL_PATH"
echo "Horusec was downloaded and moved to $INSTALL_PATH/horusec"
$INSTALL_PATH/horusec version
}
horusecIdentifyOS() {
if [ "$VERSION_DOWNLOAD" = "latest" ]; then
horusecIdentifyOSLatest
else
horusecIdentifyOSWithVersion
fi
}
# After version 2.6.4 binaries have name changes, which cause a change in the download link.
# This function checks if it is earlier or later than version 2.6.4 and through this we can decide which is the
# correct download link for the version informed by the user.
isOldURLVersion() {
if [ $VERSION_DOWNLOAD != "latest" ]; then
VERSION_WITHOUT_V_PREFIX=$(echo "$VERSION_DOWNLOAD" | sed -e "s/v//g")
VERSION_WITHOUT_BETA_PREFIX=$(echo "$VERSION_WITHOUT_V_PREFIX" | sed -r "s/-beta\.[0-9]+//g")
VERSION_WITHOUT_RC_PREFIX=$(echo "$VERSION_WITHOUT_BETA_PREFIX" | sed -r "s/-rc\.[0-9]+//g")
VERSION_WITHOUT_DOTS=$(echo "$VERSION_WITHOUT_RC_PREFIX" | sed -e "s/\.//g")
if [ "$VERSION_WITHOUT_DOTS" -gt 264 ]; then
IS_NEW_URL=true
fi
fi
}
horusecSetVersion
isOldURLVersion
horusecIdentifyOS
horusecDownloadAndInstall
|
<reponame>jihene-sahli/tt<gh_stars>0
package org.wildfly.swarm.microprofile.config.tck;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Set;
import org.testng.IAnnotationTransformer;
import org.testng.annotations.ITestAnnotation;
/**
* Disable tests from exclusion list
* Created by hbraun on 10.01.18.
*/
public class TestNGAnnotationTransformer implements IAnnotationTransformer {
private Set<String> exclusions = new HashSet();
public TestNGAnnotationTransformer() {
try {
InputStream in = TestNGAnnotationTransformer.class.getClassLoader().getResourceAsStream("TCK-exclusions.txt");
if(in != null) {
read(in);
}
} catch (IOException e) {
e.printStackTrace();
}
}
public void read(InputStream input) throws IOException {
try (BufferedReader buffer = new BufferedReader(new InputStreamReader(input))) {
buffer.lines().forEach(s -> exclusions.add(s));
}
}
@Override
public void transform(ITestAnnotation annotation, Class testClass, Constructor testConstructor, Method testMethod) {
if (testMethod == null || annotation == null) {
return;
}
String clazzName = testMethod.getDeclaringClass().getName();
exclusions.forEach(s -> {
if(s.equals(clazzName)) {
annotation.setEnabled(false);
}
});
}
}
|
public List<DTO> GetCachedOrFetchedData(List<DTO> dtos, bool isCachingEnabled)
{
if (isCachingEnabled)
{
// Return the cached data
return CachedData.GetCachedDTOs();
}
else
{
// Fetch the data from the source
return dtos;
}
} |
<gh_stars>0
import { Vue, Component } from 'vue-property-decorator'
import router from './router'
import App from './App.vue'
@Component({ router, render: h => h(App) })
class Vm extends Vue {
created() {
console.log(`created`)
router.onReady(route => {
this.$mount('#app')
})
}
mounted() {
console.log(`mounted`)
}
}
const vm = new Vm()
export default vm
if (process.env.NODE_ENV == 'development') {
Object.assign(window, { vm })
}
|
import matplotlib.pyplot as plt
# Use randomly generated data
data = np.random.normal(size=1000)
# Generate histogram
plt.hist(data, bins=30, edgecolor='k', alpha=0.7)
plt.xlabel("Value")
plt.ylabel("Frequency")
# Show histogram
plt.show() |
cloc . --exclude-dir=thirdparty,Debug,CMakeFiles,build,CMakeFiles,.git
|
<reponame>melkishengue/cpachecker
package pack;
public class AssertCondition_true_assert {
public static void main(String[] args) {
boolean condition = true;
int startAssert;
assert condition;
int endAssert;
}
}
|
prefix=LA
ts=10000
ogr2ogr -f GeoJSON -t_srs EPSG:3857 LA-3857.geojson LA.geojson
rm $prefix.tif
gdal_rasterize -ot UInt32 -a r -ts $ts $ts -of GTiff $prefix-3857.geojson $prefix.tif
gdal_rasterize -a g -b 2 $prefix-3857.geojson $prefix.tif
gdal_rasterize -a b -b 3 $prefix-3857.geojson $prefix.tif
gdal_translate -ot Byte -r nearest -of MBTILES -co RESAMPLING=NEAREST $prefix.tif $prefix.mbtiles
sqlite3 $prefix.mbtiles "select zoom_level, count(*) from tiles GROUP BY zoom_level"
gdaladdo -r nearest $prefix.mbtiles 2 4 8 16 32 64 128 256 512 1024 2048 4096
sqlite3 $prefix.mbtiles "select zoom_level, count(*) from tiles GROUP BY zoom_level"
scp $prefix.mbtiles grunnkart@hydra:tilesdata/indexed/
#scp $prefix.colors.json grunnkart@hydra:tilesdata/raster/
|
#!/bin/bash
#The script will output to stdout the statistics of filling all the tables of the data schema for the specified period
#specifying the date range
date_start=$(date '+%Y%m%d' -d "2021-1-1") || exit -1
date_end=$(date '+%Y%m%d' -d "2021-8-25") || exit -1
mapfile -t table < <(hive -S -e 'use data; show tables;' 2>/dev/null |grep -v "meta" |uniq)
#creating one large query for all tables for fast execution
hql=""
for i in ${table[@]}; do
hql+=" select '${i}' as table_name, from_unixtime( cast(substr(started,1,10) as INT) ,'yyyyMMddHH')||'0000' as tr_to_hour, count(1) as cnt_ from ${i} where pdate>='${date_start}' and pdate<='${date_end}' group by from_unixtime( cast(substr(started,1,10) as INT) ,'yyyyMMddHH');"
done;
hive -S -e "use data;${hql}" 2>./stat_debug.txt
|
import {DestructableView} from "../lib/numbersLab/DestructableView";
import {VueVar, VueWatched} from "../lib/numbersLab/VueAnnotate";
import {TransactionsExplorer} from "../model/TransactionsExplorer";
import {WalletRepository} from "../model/WalletRepository";
import {DependencyInjectorInstance} from "../lib/numbersLab/DependencyInjector";
import {Constants} from "../model/Constants";
import {Wallet} from "../model/Wallet";
import {AppState, WalletWorker} from "../model/AppState";
import {Password} from "../model/Password";
import {BlockchainExplorerProvider} from "../providers/BlockchainExplorerProvider";
import {BlockchainExplorer} from "../model/blockchain/BlockchainExplorer";
import {WalletWatchdog} from "../model/WalletWatchdog";
let wallet : Wallet = DependencyInjectorInstance().getInstance(Wallet.name, 'default', false);
let blockchainExplorer : BlockchainExplorer = BlockchainExplorerProvider.getInstance();
let walletWatchdog : WalletWatchdog = DependencyInjectorInstance().getInstance(WalletWatchdog.name,'default', false);
class ChangeWalletPasswordView extends DestructableView{
@VueVar('') oldPassword !: string;
@VueVar(false) invalidOldPassword !: boolean;
@VueVar('') walletPassword !: string;
@VueVar('') walletPassword2 !: string;
@VueVar(false) insecurePassword !: boolean;
@VueVar(false) forceInsecurePassword !: boolean;
constructor(container : string){
super(container);
}
@VueWatched()
oldPasswordWatch(){
let wallet = WalletRepository.getLocalWalletWithPassword(this.oldPassword);
if(wallet !== null) {
this.invalidOldPassword = false;
}else
this.invalidOldPassword = true;
}
forceInsecurePasswordCheck(){
let self = this;
self.forceInsecurePassword = true;
}
@VueWatched()
walletPasswordWatch(){
if(!Password.checkPasswordConstraints(this.walletPassword, false)){
this.insecurePassword = true;
}else
this.insecurePassword = false;
}
changePassword(){
let walletWorker : WalletWorker = DependencyInjectorInstance().getInstance(WalletWorker.name,'default', false);
if(walletWorker !== null){
walletWorker.password = <PASSWORD>;
walletWorker.save();
swal({
type:'success',
title:i18n.t('changeWalletPasswordPage.modalSuccess.title'),
confirmButtonText:i18n.t('changeWalletPasswordPage.modalSuccess.confirmText'),
});
this.oldPassword = '';
this.walletPassword = '';
this.walletPassword2 = '';
this.insecurePassword = false;
this.forceInsecurePassword = false;
this.invalidOldPassword = false;
}
}
}
if(wallet !== null && blockchainExplorer !== null)
new ChangeWalletPasswordView('#app');
else
window.location.href = '#index';
|
<gh_stars>10-100
package io.opensphere.osh.aerialimagery.results;
import io.opensphere.core.math.Vector2d;
import io.opensphere.core.math.WGS84EarthConstants;
import io.opensphere.core.model.GeographicConvexQuadrilateral;
import io.opensphere.core.model.GeographicPosition;
import io.opensphere.core.model.LatLonAlt;
import io.opensphere.core.projection.GeographicBody3D;
import io.opensphere.osh.aerialimagery.model.PlatformMetadata;
/**
* Given a platforms location and orientation and a camera's orientation, this
* class knows how to calculate the camera's footprint on the earth.
*/
public class FootprintCalculator
{
/**
* Calculates the footprint of the camera.
*
* @param metadata Contains the camera information.
* @param fieldOfViewWidth The field of view width.
* @param fieldOfViewHeight The field of view height.
* @return The footprint.
*/
public GeographicConvexQuadrilateral calculateFootprint2(PlatformMetadata metadata, double fieldOfViewWidth,
double fieldOfViewHeight)
{
double altitude = metadata.getLocation().getAltM();
double widthAngleDeg = fieldOfViewWidth / 2;
double heightAngleDeg = fieldOfViewHeight / 2;
double widthAngle = Math.toRadians(widthAngleDeg);
double heightAngle = Math.toRadians(heightAngleDeg);
double yawAngle = metadata.getCameraYawAngle() + metadata.getYawAngle();
double pitchAngle = metadata.getPitchAngle() + metadata.getCameraPitchAngle();
double pitchRads = Math.toRadians(90 + pitchAngle);
double yawRads = Math.toRadians(-yawAngle);
double groundDistanceTop = altitude * Math.tan(pitchRads + heightAngle);
double groundDistanceBottom = altitude * Math.tan(pitchRads - heightAngle);
double aircraftToTopDistance = Math.sqrt(Math.pow(groundDistanceTop, 2) + Math.pow(altitude, 2));
double aircraftToBottomDistance = Math.sqrt(Math.pow(groundDistanceBottom, 2) + Math.pow(altitude, 2));
double lonDeltaTopM = aircraftToTopDistance * Math.tan(widthAngle);
double lonDeltaBottomM = aircraftToBottomDistance * Math.tan(widthAngle);
LatLonAlt pos = LatLonAlt.createFromDegrees(metadata.getLocation().getLatD(), metadata.getLocation().getLonD());
LatLonAlt bottomLat = calculateIntersectionPoint(pos, altitude, 0, pitchAngle - heightAngleDeg);
LatLonAlt topLat = calculateIntersectionPoint(pos, altitude, 0, pitchAngle + heightAngleDeg);
LatLonAlt topLeft = GeographicBody3D.greatCircleEndPosition(topLat, Math.toRadians(270),
WGS84EarthConstants.RADIUS_EQUATORIAL_M, lonDeltaTopM);
LatLonAlt topRight = GeographicBody3D.greatCircleEndPosition(topLat, Math.toRadians(90),
WGS84EarthConstants.RADIUS_EQUATORIAL_M, lonDeltaTopM);
LatLonAlt bottomLeft = GeographicBody3D.greatCircleEndPosition(bottomLat, Math.toRadians(270),
WGS84EarthConstants.RADIUS_EQUATORIAL_M, lonDeltaBottomM);
LatLonAlt bottomRight = GeographicBody3D.greatCircleEndPosition(bottomLat, Math.toRadians(90),
WGS84EarthConstants.RADIUS_EQUATORIAL_M, lonDeltaBottomM);
Vector2d posVector = pos.asVec2d();
Vector2d topLeftVector = topLeft.asVec2d().subtract(posVector).rotateAroundOrigin(yawRads).add(posVector);
Vector2d bottomLeftVector = bottomLeft.asVec2d().subtract(posVector).rotateAroundOrigin(yawRads).add(posVector);
Vector2d bottomRightVector = bottomRight.asVec2d().subtract(posVector).rotateAroundOrigin(yawRads).add(posVector);
Vector2d topRightVector = topRight.asVec2d().subtract(posVector).rotateAroundOrigin(yawRads).add(posVector);
topLeft = LatLonAlt.createFromDegrees(topLeftVector.getY(), topLeftVector.getX());
bottomLeft = LatLonAlt.createFromDegrees(bottomLeftVector.getY(), bottomLeftVector.getX());
bottomRight = LatLonAlt.createFromDegrees(bottomRightVector.getY(), bottomRightVector.getX());
topRight = LatLonAlt.createFromDegrees(topRightVector.getY(), topRightVector.getX());
return new GeographicConvexQuadrilateral(new GeographicPosition(topLeft), new GeographicPosition(topRight),
new GeographicPosition(bottomRight), new GeographicPosition(bottomLeft));
}
/**
* Calculates the geographic point at which the ray intersects with the
* earth.
*
* @param cameraPos The position of the camera.
* @param terrainRelativeAlt The altitude of the camera relative to the
* terrain.
* @param yawAngle The yaw angle of the camera.
* @param pitchAngle The pitch angle of the camera.
* @return The geographic position.
*/
private LatLonAlt calculateIntersectionPoint(LatLonAlt cameraPos, double terrainRelativeAlt, double yawAngle,
double pitchAngle)
{
double angle = Math.toRadians(90 + pitchAngle);
double yawRads = Math.toRadians(yawAngle);
double groundDistance = terrainRelativeAlt * Math.tan(angle);
LatLonAlt pos = LatLonAlt.createFromDegrees(cameraPos.getLatD(), cameraPos.getLonD());
LatLonAlt groundPos = GeographicBody3D.greatCircleEndPosition(pos, yawRads, WGS84EarthConstants.RADIUS_EQUATORIAL_M,
groundDistance);
return groundPos;
}
}
|
import { __assign, __extends } from "tslib";
import { deepMix, every } from '@antv/util';
import * as EventParser from './event';
import ViewLayer from '../../base/view-layer';
import { getGeom } from '../../geoms/factory';
import { getPieLabel } from './component/label';
import SpiderLabel from './component/label/spider-label';
import { registerPlotType } from '../../base/global';
import './theme';
export var percentageField = '$$percentage$$';
var G2_GEOM_MAP = {
pie: 'interval',
};
var PLOT_GEOM_MAP = {
pie: 'column',
};
// @ts-ignore
var PieLayer = /** @class */ (function (_super) {
__extends(PieLayer, _super);
function PieLayer() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.type = 'pie';
return _this;
}
PieLayer.getDefaultOptions = function () {
return deepMix({}, _super.getDefaultOptions.call(this), {
width: 400,
height: 400,
title: {
visible: false,
},
description: {
visible: false,
},
forceFit: true,
padding: 'auto',
radius: 0.8,
label: {
visible: true,
type: 'inner',
autoRotate: false,
adjustPosition: true,
allowOverlap: false,
line: {
visible: true,
smooth: true,
},
},
legend: {
visible: true,
position: 'right-center',
},
tooltip: {
visible: true,
shared: false,
showCrosshairs: false,
showMarkers: false,
},
pieStyle: {
stroke: 'white',
lineWidth: 1,
},
});
};
PieLayer.prototype.afterInit = function () {
_super.prototype.afterInit.call(this);
var _a = this.options, angleField = _a.angleField, colorField = _a.colorField, data = _a.data;
var allZero = every(data, function (d) { return d[angleField] === 0; });
if (allZero) {
var pieGeom = this.view.geometries[0];
pieGeom.tooltip(colorField + "*" + angleField);
}
};
PieLayer.prototype.afterRender = function () {
_super.prototype.afterRender.call(this);
var options = this.options;
/** 蜘蛛布局label */
if (options.label && options.label.visible) {
// 清除,避免二次渲染
if (this.labelComponent) {
this.labelComponent.clear();
}
var labelConfig = options.label;
if (labelConfig.type === 'spider') {
var data = options.data, colorField = options.colorField, angleField_1 = options.angleField;
var allZero = every(data, function (d) { return d[angleField_1] === 0; });
var valueField = allZero ? percentageField : angleField_1;
this.labelComponent = new SpiderLabel(__assign({ view: this.view, fields: colorField ? [valueField, colorField] : [valueField], angleField: angleField_1,
allZero: allZero }, this.options.label));
this.labelComponent.render();
}
else {
var LabelCtor = getPieLabel(labelConfig.type);
this.labelComponent = new LabelCtor(this, options.label);
this.labelComponent.render();
}
}
};
PieLayer.prototype.getAngleScale = function () {
var angleField = this.options.angleField;
if (angleField) {
return this.view.getScaleByField(angleField);
}
};
PieLayer.prototype.geometryParser = function (dim, type) {
if (dim === 'g2') {
return G2_GEOM_MAP[type];
}
return PLOT_GEOM_MAP[type];
};
PieLayer.prototype.scale = function () {
var props = this.options;
_super.prototype.scale.call(this);
var scales = {};
scales[props.angleField] = {};
scales[props.colorField] = { type: 'cat' };
scales = deepMix({}, this.config.scales, scales);
this.setConfig('scales', scales);
};
PieLayer.prototype.processData = function (data) {
var key = this.options.angleField;
var allZero = every(data, function (d) { return d[key] === 0; });
if (allZero) {
return data.map(function (item) {
var _a;
return (__assign(__assign({}, item), (_a = {}, _a[key] = typeof item[key] === 'string' ? Number.parseFloat(item[key]) : item[key], _a[percentageField] = 1 / data.length, _a)));
});
}
return data.map(function (item) {
var _a;
var value = item[key];
return __assign(__assign({}, item), (_a = {}, _a[key] = value && !isNaN(value) ? +value : value, _a));
});
};
PieLayer.prototype.axis = function () {
return;
};
PieLayer.prototype.coord = function () {
var props = this.options;
var coordConfig = {
type: 'theta',
cfg: {
radius: props.radius,
// @ts-ignore 业务定制,不开放配置
innerRadius: props.innerRadius || 0,
},
};
this.setConfig('coordinate', coordConfig);
};
PieLayer.prototype.addGeometry = function () {
var props = this.options;
var data = props.data, angleField = props.angleField;
var pie;
var allZero = every(data, function (d) { return d[angleField] === 0; });
if (allZero) {
pie = getGeom('interval', 'main', {
plot: this,
positionFields: [1, percentageField],
});
}
else {
pie = getGeom('interval', 'main', {
plot: this,
positionFields: [1, props.angleField],
});
}
pie.adjust = [{ type: 'stack' }];
this.pie = pie;
if (props.label) {
this.label();
}
if (props.tooltip && (props.tooltip.fields || props.tooltip.formatter)) {
this.geometryTooltip();
}
this.setConfig('geometry', pie);
};
PieLayer.prototype.geometryTooltip = function () {
this.pie.tooltip = {};
var tooltipOptions = this.options.tooltip;
if (tooltipOptions.fields) {
this.pie.tooltip.fields = tooltipOptions.fields;
}
else {
this.pie.tooltip.fields = [this.options.angleField, this.options.colorField];
}
if (tooltipOptions.formatter) {
this.pie.tooltip.callback = tooltipOptions.formatter;
if (!tooltipOptions.fields) {
this.pie.tooltip.fields = [this.options.angleField, this.options.colorField];
}
}
};
PieLayer.prototype.animation = function () {
_super.prototype.animation.call(this);
var props = this.options;
if (props.animation === false) {
/** 关闭动画 */
this.pie.animate = false;
}
};
PieLayer.prototype.annotation = function () {
return;
};
PieLayer.prototype.parseEvents = function (eventParser) {
if (eventParser) {
_super.prototype.parseEvents.call(this, eventParser);
}
else {
_super.prototype.parseEvents.call(this, EventParser);
}
};
PieLayer.prototype.label = function () {
// 不使用 g2 内置label
this.pie.label = false;
};
return PieLayer;
}(ViewLayer));
export default PieLayer;
registerPlotType('pie', PieLayer);
//# sourceMappingURL=layer.js.map |
When(/^I click on the "([^\"]*)" button$/) do |button|
first(:link_or_button, button).click
end
When(/^I click on the "([^"]+)" link$/) do |label|
first(:link, label).click
end
Then(/^I should see an? "([^"]+)" link$/) do |link|
expect(page).to have_selector(:link, link)
end
Then(/^I should not see an? "([^"]+)" link$/) do |link|
expect(page).to_not have_selector(:link, link)
end
Then(/^I should see an? "([^"]+)" button$/) do |button|
expect(page).to have_selector(:link_or_button, button, exact: true)
end
Then(/^I should not see an? "([^"]*)" button$/) do |button|
expect(page).to_not have_selector(:link_or_button, button, exact: true)
end
When(/^I click on the link to the bids$/) do
bid = WinningBid.new(@auction).find
bid_amount = Currency.new(bid.amount).to_s
click_on(bid_amount)
end
When(/^I click on the "Edit" link for the auction$/) do
within('.auction-title') { first(:link_or_button, "Edit").click }
end
When(/^I click on the update button$/) do
update_button = I18n.t('helpers.submit.auction.update')
step("I click on the \"#{update_button}\" button")
end
When(/^I click on the Publish button$/) do
button = I18n.t('links_and_buttons.auctions.publish')
step("I click on the \"#{button}\" button")
end
Then(/^I should see the Archive button$/) do
button = I18n.t('links_and_buttons.auctions.archive')
step("I should see a \"#{button}\" button")
end
Then(/^I should not see the Archive button$/) do
button = I18n.t('links_and_buttons.auctions.archive')
step("I should not see a \"#{button}\" button")
end
When(/^I click on the Archive button$/) do
button = I18n.t('links_and_buttons.auctions.archive')
step("I click on the \"#{button}\" button")
end
When(/^I click on the unpublish button$/) do
unpublish_button = I18n.t('statuses.admin_auction_status_presenter.future.published.actions.unpublish')
step("I click on the \"#{unpublish_button}\" button")
end
When(/^I click on the I'm done button$/) do
button = I18n.t('statuses.bid_status_presenter.over.winner.work_in_progress.action')
step("I click on the \"#{button}\" button")
end
Then(/^I should see a button to mark as delivered$/) do
button = I18n.t('statuses.admin_auction_status_presenter.work_in_progress.actions.mark_delivered')
step("I should see a \"#{button}\" button")
end
When(/^I click the mark as delivered button$/) do
button = I18n.t('statuses.admin_auction_status_presenter.work_in_progress.actions.mark_delivered')
step("I click on the \"#{button}\" button")
end
When(/^I click on the add auction link$/) do
add_link = I18n.t('links_and_buttons.auctions.add')
step("I click on the \"#{add_link}\" link")
end
When(/^I click on the add customer button$/) do
add_link = I18n.t('links_and_buttons.customers.add')
step("I click on the \"#{add_link}\" link")
end
When(/^I click on the confirm received payment button$/) do
allow(UpdateC2ProposalJob).to receive(:perform_later)
.with(@auction.id, 'AddReceiptToC2ProposalAttributes')
link = I18n.t('statuses.bid_status_presenter.over.winner.pending_payment_confirmation.action')
step("I click on the \"#{link}\" button")
end
When(/^I click on the create customer button$/) do
create_button = I18n.t('helpers.submit.create', model: 'Customer')
step("I click on the \"#{create_button}\" button")
end
When(/^I click on the Download CSV link$/) do
link = I18n.t('links_and_buttons.users.download_csv')
step("I click on the \"#{link}\" link")
end
When(/^I click on the needs attention link$/) do
link = I18n.t('links_and_buttons.auctions.needs_attention')
step("I click on the \"#{link}\" link")
end
Then(/^I should see a Download CSV link$/) do
link = I18n.t('links_and_buttons.users.download_csv')
step("I should see a \"#{link}\" link")
end
When(/^I click on the add skill link$/) do
add_link = I18n.t('links_and_buttons.skills.add')
step("I click on the \"#{add_link}\" link")
end
When(/^I click on the create skill button$/) do
create_button = I18n.t('helpers.submit.create', model: 'Skill')
step("I click on the \"#{create_button}\" button")
end
When(/^I click on the Masquerade link$/) do
link = I18n.t('links_and_buttons.users.masquerade')
step("I click on the \"#{link}\" link")
end
Then(/^I should not see the Masquerade link$/) do
link = I18n.t('links_and_buttons.users.masquerade')
expect(page).not_to have_link(link)
end
When(/^I click on the Stop Masquerading link$/) do
link = I18n.t('links_and_buttons.header.stop_masquerading')
step("I click on the \"#{link}\" link")
end
When(/^I click on the auction's title$/) do
click_on(@auction.title)
end
When(/^I click on the name of the first user$/) do
click_on(@user.name)
end
Then(/^I should see the "Edit" link for the auction$/) do
within('.auction-title') { expect(page).to have_selector(:link, "Edit") }
end
Then(/^I should not see an "Edit" link for the auction$/) do
within('.auction-title') { expect(page).not_to have_selector(:link, "Edit") }
end
Then(/^I should see a link to the delivery URL$/) do
expect(page).to have_link('pull request', href: @auction.delivery_url)
end
|
/* FCE Ultra - NES/Famicom Emulator
*
* Copyright notice for this file:
* Copyright (C) 2002 Xodnizel
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <string.h>
#include <stdlib.h>
#include "share.h"
typedef struct {
int32 mzx, mzy, mzxold, mzyold;
uint32 readbit;
uint32 data;
} MOUSE;
static MOUSE Mouse;
static void StrobeMOUSE(int w)
{
Mouse.readbit=0;
}
static uint8 ReadMOUSE(int w)
{
uint8 ret=0;
if(Mouse.readbit>=8)
ret|=1;
else
{
ret|=(Mouse.data>>Mouse.readbit)&1;
if(!fceuindbg)
Mouse.readbit++;
}
return(ret);
}
static void UpdateMOUSE(int w, void *data, int arg)
{
uint32 *ptr=(uint32*)data;
Mouse.data=0;
Mouse.mzxold=Mouse.mzx;
Mouse.mzyold=Mouse.mzy;
Mouse.mzx=ptr[0];
Mouse.mzy=ptr[1];
Mouse.data|=ptr[2];
if((Mouse.mzxold-Mouse.mzx)>0)
Mouse.data|=0x0C;
else if((Mouse.mzxold-Mouse.mzx)<0)
Mouse.data|=0x04;
if((Mouse.mzyold-Mouse.mzy)>0)
Mouse.data|=0x30;
else if((Mouse.mzyold-Mouse.mzy)<0)
Mouse.data|=0x10;
}
static INPUTC MOUSEC={ReadMOUSE,0,StrobeMOUSE,UpdateMOUSE,0,0};
INPUTC *FCEU_InitMouse(int w)
{
Mouse.mzx=0;
Mouse.mzy=0;
Mouse.mzxold=0;
Mouse.mzyold=0;
Mouse.data=0;
return(&MOUSEC);
}
|
func calculateBalance(transactions: [Transaction], forAccount accountID: String) -> Double {
let filteredTransactions = transactions.filter { $0.accountID == accountID }
let totalBalance = filteredTransactions.reduce(0.0) { $0 + $1.amount }
return totalBalance
} |
import React from 'react';
import { mount } from 'enzyme';
import Wrapper from './Wrapper';
import ViewStore from '../../stores/ViewStore';
it('renders without crashing', () => {
mount(
<Wrapper view={ new ViewStore() }>
<div>smack</div>
</Wrapper>
);
});
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2952-1
#
# Security announcement date: 2016-04-21 00:00:00 UTC
# Script generation date: 2017-01-01 21:05:21 UTC
#
# Operating System: Ubuntu 14.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - php5-cli:5.5.9+dfsg-1ubuntu4.16
# - libapache2-mod-php5:5.5.9+dfsg-1ubuntu4.16
# - php5-cgi:5.5.9+dfsg-1ubuntu4.16
# - php5-gd:5.5.9+dfsg-1ubuntu4.16
# - php5-snmp:5.5.9+dfsg-1ubuntu4.16
# - php5-mysqlnd:5.5.9+dfsg-1ubuntu4.16
# - php5-fpm:5.5.9+dfsg-1ubuntu4.16
#
# Last versions recommanded by security team:
# - php5-cli:5.5.9+dfsg-1ubuntu4.20
# - libapache2-mod-php5:5.5.9+dfsg-1ubuntu4.20
# - php5-cgi:5.5.9+dfsg-1ubuntu4.20
# - php5-gd:5.5.9+dfsg-1ubuntu4.20
# - php5-snmp:5.5.9+dfsg-1ubuntu4.20
# - php5-mysqlnd:5.5.9+dfsg-1ubuntu4.16
# - php5-fpm:5.5.9+dfsg-1ubuntu4.16
#
# CVE List:
# - CVE-2014-9767
# - CVE-2015-8835
# - CVE-2016-3185
# - CVE-2015-8838
# - CVE-2016-1903
# - CVE-2016-2554
# - CVE-2016-3141
# - CVE-2016-3142
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade php5-cli=5.5.9+dfsg-1ubuntu4.20 -y
sudo apt-get install --only-upgrade libapache2-mod-php5=5.5.9+dfsg-1ubuntu4.20 -y
sudo apt-get install --only-upgrade php5-cgi=5.5.9+dfsg-1ubuntu4.20 -y
sudo apt-get install --only-upgrade php5-gd=5.5.9+dfsg-1ubuntu4.20 -y
sudo apt-get install --only-upgrade php5-snmp=5.5.9+dfsg-1ubuntu4.20 -y
sudo apt-get install --only-upgrade php5-mysqlnd=5.5.9+dfsg-1ubuntu4.16 -y
sudo apt-get install --only-upgrade php5-fpm=5.5.9+dfsg-1ubuntu4.16 -y
|
import { html, TemplateResult } from "lit-html";
import { OperandStack } from "../models/OperandStack";
import { Value } from "../models/runtime";
const renderStackValue = (value: Value | null): TemplateResult =>
value
? html`
<div class="type-${value.type}" title="${value.value}">
${value.value}
</div>
`
: html`
<div class="empty"></div>
`;
export const renderStack = (stack: OperandStack): TemplateResult =>
html`
<div class="stack">
<header>Stack</header>
<div>
${stack.list.map(renderStackValue)}
</div>
</div>
`;
|
#!/bin/bash -ex
# For host setup as Kubernetes master
MGMT_IP=$1
POD_NETWORK_CIDR=${2:-192.168.0.0/16}
SERVICE_CIDR=${3:-172.16.1.0/24}
if [ -z "${MGMT_IP}" ]; then
echo "Please specify a management IP!"
exit 1
fi
if ! kubectl get nodes; then
sudo kubeadm config images pull
sudo kubeadm init \
--pod-network-cidr="${POD_NETWORK_CIDR}" \
--apiserver-advertise-address="${MGMT_IP}" \
--service-cidr="${SERVICE_CIDR}"
if [ "$(id -u)" = 0 ]; then
echo "export KUBECONFIG=/etc/kubernetes/admin.conf" | \
tee -a "${HOME}/.bashrc"
# shellcheck disable=SC1090
source "${HOME}/.bashrc"
fi
mkdir -p "${HOME}/.kube"
sudo cp /etc/kubernetes/admin.conf "${HOME}/.kube/config"
sudo chown "$(id -u)":"$(id -g)" "${HOME}/.kube/config"
sleep 5
sudo swapon -a
fi
|
#!/bin/bash
# Update Repositories
sudo add-apt-repository ppa:beineri/opt-qt58-trusty -y
sudo apt-get update -qq
|
/* **** Notes
Read.
//*/
# define CAR
# include "../../../incl/config.h"
signed(__cdecl rd_pages(page_t(*argp),signed(fd))) {
auto signed char *b;
auto page_t *page;
auto signed i,r;
auto signed short flag;
if(!argp) return(0x00);
if(fd<(0x00)) return(0x00);
flag = (*(CLI_BASE+(R(flag,*argp))));
if(!(CLI_INIT&(flag))) return(0x00);
return(rd_pages_r(argp,fd));
}
|
import axios from 'axios'
import history from '../history'
/**
* ACTION TYPES
*/
export const GET_USER = 'GET_USER'
export const REMOVE_USER = 'REMOVE_USER'
export const EDIT_PROFILE = 'EDIT_PROFILE'
/**
* INITIAL STATE
*/
const initialState = {
currentUser: {}
}
const defaultUser = {}
/**
* ACTION CREATORS
*/
export const getUser = user => ({type: GET_USER, user})
export const removeUser = () => ({type: REMOVE_USER})
export const editProfile = user => ({type: EDIT_PROFILE, user})
/**
* THUNK CREATORS
*/
export const deleteUserProfile = (userId, user) => async dispatch => {
try {
await axios.delete(`/api/users/${userId}`, user)
dispatch(removeUser())
history.push(`/home`)
} catch (error) {
console.log(error)
}
}
export const fetchUser = userId => async dispatch => {
try {
const res = await axios.get(`/api/users/${userId}`)
dispatch(getUser(res.data))
} catch (err) {
console.error(err)
}
}
export const editUser = (userId, user) => async dispatch => {
try {
const updated = await axios.put(`/api/users/${userId}`, user)
dispatch(editProfile(updated.data))
} catch (err) {
console.error(err)
}
}
export const postUser = user => async dispatch => {
try {
const {data} = await axios.post('/api/users', user)
return data
} catch (err) {
console.error(err)
}
}
export const me = () => async dispatch => {
try {
const res = await axios.get('/auth/me')
dispatch(getUser(res.data || defaultUser))
} catch (err) {
console.error(err)
}
}
export const auth = (email, password, method) => async dispatch => {
let res
try {
res = await axios.post(`/auth/${method}`, {email, password})
} catch (authError) {
return dispatch(getUser({error: authError}))
}
try {
dispatch(getUser(res.data))
history.push('/home')
localStorage.removeItem('user')
} catch (dispatchOrHistoryErr) {
console.error(dispatchOrHistoryErr)
}
}
export const guestAuth = (
userId,
email,
password,
method
) => async dispatch => {
let res
try {
res = await axios.put(`/auth/${method}/${userId}`, {email, password})
} catch (authError) {
return dispatch(getUser({error: authError}))
}
try {
dispatch(getUser(res.data))
history.push('/home')
localStorage.removeItem('user')
} catch (dispatchOrHistoryErr) {
console.error(dispatchOrHistoryErr)
}
}
export const logout = () => async dispatch => {
try {
await axios.post('/auth/logout')
dispatch(removeUser())
history.push('/')
} catch (err) {
console.error(err)
}
}
/**
* REDUCER
*/
export default function(state = initialState, action) {
switch (action.type) {
case GET_USER:
return {...state, currentUser: action.user}
case REMOVE_USER:
return {...state, currentUser: defaultUser}
case EDIT_PROFILE:
return {...state, currentUser: action.user}
default:
return state
}
}
|
#!/bin/bash
#
# Script used to build each application from the WMCore repo and upload to pypi.
#
# Usage
# Build a single package:
# sh etc/build_pypi_packages.sh <package name>
# Build all WMCore packages:
# sh etc/build_pypi_packages.sh all
#
set -x
# package passed as parameter, can be one of PACKAGES or "all"
TOBUILD="t0"
# list of packages that can be built and uploaded to pypi
#PACKAGES="wmagent wmcore reqmon reqmgr2 reqmgr2ms global-workqueue acdcserver t0"
PACKAGES="t0"
PACKAGE_REGEX="^($(echo $PACKAGES | sed 's/\ /|/g')|all)$"
if [[ -z $TOBUILD ]]; then
echo "Usage: sh etc/build_testpypi_packages.sh <package name>"
echo "Usage: sh etc/build_testpypi_packages.sh all"
exit 1
fi
# check to make sure a valid package name was passed
if [[ ! $TOBUILD =~ $PACKAGE_REGEX ]]; then
echo "$TOBUILD is not a valid package name"
echo "Supported packages are $PACKAGES"
exit 1
fi
# update package list when building all packages
if [[ $TOBUILD == "all" ]]; then
TOBUILD=$PACKAGES
fi
# loop through packages to build
for package in $TOBUILD; do
echo "==========" $package "=========="
released="$( curl -X GET https://test.pypi.org/pypi/${package}/json | jq -r '.releases' | jq 'keys' )"
tag=$( grep -m 1 version ../T0/src/python/T0/__init__.py | sed -E "s/version|_|\ |=|'//g")
if [[ ${released} =~ "\"${tag}\"" ]]; then
echo "$package-$tag file already exists. See https://test.pypi.org/help/#file-name-reuse for more information."
exit 0
fi
# make a copy of requirements.txt to reference for each build
cp requirements.txt requirements.t0.txt
# update the setup script template with package name
sed "s/PACKAGE_TO_BUILD/$package/" setup_template.py > setup.py
# build requirements.txt file
awk "/($package$)|($package,)/ {print \$1}" requirements.t0.txt > requirements.txt
# build the package
python3 setup.py clean sdist
if [[ $? -ne 0 ]]; then
echo "Error building package $package"
exit 1
fi
# upload the package to pypi
echo "Uploading package $package to PyPI"
#twine upload dist/$package-*
twine upload --repository testpypi dist/$package-*
# replace requirements.txt contents
cp requirements.t0.txt requirements.txt
done
|
#include <stdio.h>
#include <pthread.h>
volatile int opt = 1;
void *threadFn(void *arg) {
while (opt == 1) {
// Perform some operations
}
printf("Thread exiting\n");
pthread_exit(NULL);
}
int main() {
pthread_t thread;
pthread_create(&thread, NULL, threadFn, NULL);
// Simulate an external operation changing the value of opt
// For demonstration purposes, sleep for a short duration before changing the value
sleep(2);
opt = 0;
pthread_join(thread, NULL);
printf("Main thread exiting\n");
return 0;
} |
#__DIR__="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
#================================================================================
# Utils.sh
# by Ben Younes Ousama <benyounes.ousama@gmail.com>
#================================================================================
declare -r TRUE=0
declare -r FALSE=1
##################################################################
# Purpose: Create a directory
# Arguments:
# $1 (DIR) -> Directory you want to create
# $1 (USER_GROUP) -> User Group of the new directory
##################################################################
function create_dir()
{
DIR=$1
USER_GROUP=$2
if [ -d "$DIR" ]; then
mylog "[INFO] Directory allready exists: $DIR"
else
mylog "[INFO] Creating directory: $DIR"
launch_cmd "mkdir -p $DIR"
launch_cmd "chown -R $USER_GROUP $DIR"
fi
}
##################################################################
# Purpose: Log or simply Echo information
# Arguments:
# $1 (INFO) -> String to print
##################################################################
function mylog()
{
local INFO=$1
if [ $LOG_TYPE == 'echo' ]; then
echo $INFO
else
echo -e $INFO >> "$LOG_DIR/"$HOST
fi
}
##################################################################
# Purpose: Show given title
# Arguments:
# $1 (TITLE) -> String to print
##################################################################
function show_title()
{
local TITLE=$1
mylog "--------------------------------------------"
mylog "$TITLE"
}
##################################################################
# Purpose: Check if user is allow to use this script
# Arguments:
# $1 -> String to convert to lower case
##################################################################
function check_sudo ()
{
local CMD=$1
if [ `whoami` != 'root' ]; then
echo "This script is only allowed for superuser."
echo "Enter your password to continue..."
sudo $0 $* || exit 1
exit 0
fi
if [ "$SUDO_USER" = "root" ]; then
echo "You must start this under your regular user account (not root) using sudo."
echo "Rerun using: sudo $0 $*"
exit 1
fi
}
##################################################################
# Purpose: Launch given command, print it or exit if error occurs
# Arguments:
# $1 (CMD) -> the given command
##################################################################
function launch_cmd()
{
local CMD=$1
now=$(date)
mylog "[INFO] [$now] cmd => $CMD"
eval $CMD
retval=$?
if [ $retval -ne 0 ]; then
mylog "[Error] failed. Exiting..."
exit $retval
fi
}
##################################################################
# Purpose: Return true $INF exits in $FILE
# Arguments:
# $1 (INF) -> The searched term
# $2 (FILE) -> The file where we need to search
# $3 (TYPE) -> The object type (group, user, hostile
# Return: True or False
##################################################################
function check_existing_inf()
{
local INF="$1"
local FILE="$2"
grep -q "^${INF}" $FILE && return 1 || return 0
}
|
function isLowerCase(str) {
return str == str.toLowerCase();
}
console.log(isLowerCase("loremipsum")); |
def detect_fraud(transactions):
# Initialize the fraud list
fraud_list = []
# Loop through all the transactions
for transaction in transactions:
# Get the transaction data
card_number = transaction['card_number']
amount = transaction['amount']
merchant = transaction['merchant']
location = transaction['location']
time = transaction['time']
# Initialize the fraud detection parameters
same_location_count = 0
high_transaction_amount_count = 0
same_merchant_count = 0
consecutive_transaction_count = 0
# Iterate through the previous transactions
for prev_transaction in transactions:
# Check the location
if prev_transaction['location'] == location:
same_location_count += 1
# Check for high transactions amount
if prev_transaction['amount'] > amount * 2:
high_transaction_amount_count += 1
# Check for same merchants
if prev_transaction['merchant'] == merchant:
same_merchant_count += 1
# Check for consecutive transactions
if prev_transaction['card_number'] == card_number and prev_transaction['time'] == time - 3600:
consecutive_transaction_count += 1
# Check if there is any suspicious activity
if same_location_count > 5 or high_transaction_amount_count > 2 or same_merchant_count > 3 or consecutive_transaction_count > 3:
fraud_list.append(transaction)
return fraud_list |
package com.roadmapper.deviceid;
import android.os.AsyncTask;
import android.util.Log;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.io.IOException;
import java.util.ArrayList;
public class GetDeviceTask extends AsyncTask<String, Void, String> {
private static final String TAG = "GETDEVICETASK";
//private TextView bands_t;
public GetDeviceTask() {
}
@Override
protected String doInBackground(String... params) {
Document doc = null;
try {
String url = "http://pdadb.net/index.php?m=search&quick=1&exp=" + params[0];
Log.d(TAG, url);
doc = Jsoup.connect(url).get();
} catch (IOException e) {
e.printStackTrace();
}
String bands_string = "";
Elements devices = doc.select("table > tbody > tr > td > h1 > a");
Element device = null;
if (devices != null) {
device = devices.first();
} else {
bands_string = "Could not find device at pdadb.net";
}
if (device != null) {
Log.d(TAG, device.attr("href"));
Log.d(TAG, device.attr("abs:href"));
try {
doc = Jsoup.connect(device.attr("abs:href")).get();
} catch (IOException e) {
e.printStackTrace();
}
Elements bands = doc.select("tr > td");
for (int i = 0; i < bands.size(); i++) {
if (bands.get(i).text().equals("Cellular Networks:")) {
Log.d(TAG, "" + i);
Log.d(TAG, bands.get(i + 1).text());
bands_string += bands.get(i + 1).text();
}
if (bands.get(i).text().equals("Secondary Cellular Networks:")) {
Log.d(TAG, "" + i);
Log.d(TAG, bands.get(i + 1).text());
bands_string += ", " + bands.get(i + 1).text();
}
}
} else {
bands_string = "Cannot connect to pdadb.net";
}
return bands_string;
}
@Override
protected void onPostExecute(String bands) {
//bands_t.setText(bands);
DeviceFragment.insertData("Bands", bands);
String[] bands_arr = bands.split(",");
ArrayList<Band> bandsArrL = new ArrayList<>();
for (int i = 0; i < bands_arr.length; i++) {
Band band = null;
bands_arr[i] = bands_arr[i].replaceFirst(" ", "");
if (bands_arr[i].contains(Helper.Technology.GSM)) {
band = Helper.createGSM_TDSCDMABand(bands_arr[i], Helper.Technology.GSM);
} else if (bands_arr[i].contains(Helper.Technology.TD_SCDMA)) {
band = Helper.createGSM_TDSCDMABand(bands_arr[i], Helper.Technology.TD_SCDMA);
} else if (bands_arr[i].contains(Helper.Technology.UMTS)) {
band = Helper.createUMTS_LTEBand(bands_arr[i], Helper.Technology.UMTS);
} else if (bands_arr[i].contains(Helper.Technology.LTE)) { // Covers TD-LTE and LTE
band = Helper.createUMTS_LTEBand(bands_arr[i], Helper.Technology.LTE);
} else if (bands_arr[i].contains(Helper.Technology.CDMA)) {
band = Helper.createCDMABand(bands_arr[i]);
}
bandsArrL.add(band);
}
Log.d(TAG, bandsArrL.toString());
//DeviceFragment.setBands();
}
}
|
<gh_stars>0
package inlet_http
import (
"github.com/gogap/errors"
)
var INLET_HTTP_ERR_NS = "INLET_HTTP"
var (
ERR_MESSAGE_ID_IS_EMPTY = errors.TN(INLET_HTTP_ERR_NS, 1, "message id is empty")
ERR_PAYLOAD_CHAN_NOT_EXIST = errors.TN(INLET_HTTP_ERR_NS, 2, "payload chan not exist, message id: {{.id}}")
ERR_ERROR_CHAN_NOT_EXIST = errors.TN(INLET_HTTP_ERR_NS, 3, "error chan not exist, message id: {{.id}}")
ERR_READ_HTTP_BODY_FAILED = errors.TN(INLET_HTTP_ERR_NS, 4, "read http body failed,error: {{.err}}")
ERR_UNMARSHAL_HTTP_BODY_FAILED = errors.TN(INLET_HTTP_ERR_NS, 5, "unmarshal http body failed, error: {{.err}}")
ERR_PARSE_COMMAND_TO_OBJECT_FAILED = errors.TN(INLET_HTTP_ERR_NS, 6, "parse command {{.cmd}} error, error: {{.err}}")
ERR_REQUEST_TIMEOUT = errors.TN(INLET_HTTP_ERR_NS, 7, "request timeout, graph: {{.graphName}}, message id: {{.msgId}}")
ERR_GATHER_RESPONSE_TIMEOUT = errors.TN(INLET_HTTP_ERR_NS, 8, "gather response timeout, graph: {{.graphName}}")
ERR_REQUEST_TIMEOUT_VALUE_FORMAT_WRONG = errors.TN(INLET_HTTP_ERR_NS, 9, "request timeout value format wrong, value: {{.value}}")
ERR_MARSHAL_STAT_DATA_FAILED = errors.TN(INLET_HTTP_ERR_NS, 10, "marshal stat data failed, err: {{.err}}")
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.