text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
################################################################################
################################################################################
########### Super-Linter (Lint all the code) @admiralawkbar ####################
################################################################################
################################################################################
##################################################################
# Debug Vars #
# Define these early, so we can use debug logging ASAP if needed #
##################################################################
RUN_LOCAL="${RUN_LOCAL}" # Boolean to see if we are running locally
ACTIONS_RUNNER_DEBUG="${ACTIONS_RUNNER_DEBUG:-false}" # Boolean to see even more info (debug)
##################################################################
# Log Vars #
# Define these early, so we can use debug logging ASAP if needed #
##################################################################
LOG_FILE="${LOG_FILE:-super-linter.log}" # Default log file name (located in GITHUB_WORKSPACE folder)
LOG_LEVEL="${LOG_LEVEL:-VERBOSE}" # Default log level (VERBOSE, DEBUG, TRACE)
if [[ ${ACTIONS_RUNNER_DEBUG} == true ]]; then LOG_LEVEL="DEBUG"; fi
# Boolean to see trace logs
LOG_TRACE=$(if [[ ${LOG_LEVEL} == "TRACE" ]]; then echo "true"; fi)
export LOG_TRACE
# Boolean to see debug logs
LOG_DEBUG=$(if [[ ${LOG_LEVEL} == "DEBUG" || ${LOG_LEVEL} == "TRACE" ]]; then echo "true"; fi)
export LOG_DEBUG
# Boolean to see verbose logs (info function)
LOG_VERBOSE=$(if [[ ${LOG_LEVEL} == "VERBOSE" || ${LOG_LEVEL} == "DEBUG" || ${LOG_LEVEL} == "TRACE" ]]; then echo "true"; fi)
export LOG_VERBOSE
#########################
# Source Function Files #
#########################
# shellcheck source=/dev/null
source /action/lib/log.sh # Source the function script(s)
# shellcheck source=/dev/null
source /action/lib/buildFileList.sh # Source the function script(s)
# shellcheck source=/dev/null
source /action/lib/validation.sh # Source the function script(s)
# shellcheck source=/dev/null
source /action/lib/worker.sh # Source the function script(s)
###########
# GLOBALS #
###########
# Default Vars
DEFAULT_RULES_LOCATION='/action/lib/.automation' # Default rules files location
GITHUB_API_URL='https://api.github.com' # GitHub API root url
# Ansible Vars
ANSIBLE_FILE_NAME='.ansible-lint.yml' # Name of the file
ANSIBLE_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${ANSIBLE_FILE_NAME}" # Path to the Ansible lint rules
# Azure Resource Manager Vars
ARM_FILE_NAME='.arm-ttk.psd1' # Name of the file
ARM_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${ARM_FILE_NAME}" # Path to the ARM lint rules
# Cloudformation Vars
CLOUDFORMATION_FILE_NAME='.cfnlintrc.yml' # Name of the file
CLOUDFORMATION_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${CLOUDFORMATION_FILE_NAME}" # Path to the cloudformation lint rules
# Clojure Vars
CLOJURE_FILE_NAME='.clj-kondo/config.edn' # Name of the file
CLOJURE_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${CLOJURE_FILE_NAME}" # Path to the Clojure lint rules
# Coffee Vars
COFFEESCRIPT_FILE_NAME='.coffee-lint.json' # Name of the file
COFFEESCRIPT_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${COFFEESCRIPT_FILE_NAME}" # Path to the coffeescript lint rules
# CSS Vars
CSS_FILE_NAME="${CSS_FILE_NAME:-.stylelintrc.json}" # Name of the file
CSS_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${CSS_FILE_NAME}" # Path to the CSS lint rules
# Dart Vars
DART_FILE_NAME='analysis_options.yml' # Name of the file
DART_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${DART_FILE_NAME}" # Path to the DART lint rules
# Dockerfile Vars
DOCKERFILE_FILE_NAME='.dockerfilelintrc' # Name of the file
DOCKERFILE_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${DOCKERFILE_FILE_NAME}" # Path to the Docker lint rules
# Dockerfile Hadolint Vars
DOCKERFILE_HADOLINT_FILE_NAME="${DOCKERFILE_HADOLINT_FILE_NAME:-.hadolint.yml}" # Name of the file
DOCKERFILE_HADOLINT_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${DOCKERFILE_HADOLINT_FILE_NAME}" # Path to the Docker lint rules
# Golang Vars
GO_FILE_NAME='.golangci.yml' # Name of the file
GO_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${GO_FILE_NAME}" # Path to the Go lint rules
# Groovy Vars
GROOVY_FILE_NAME='.groovylintrc.json' # Name of the file
GROOVY_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${GROOVY_FILE_NAME}" # Path to the Groovy lint rules
# HTML Vars
HTML_FILE_NAME='.htmlhintrc' # Name of the file
HTML_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${HTML_FILE_NAME}" # Path to the HTML lint rules
# Java Vars
JAVA_FILE_NAME="sun_checks.xml" # Name of the Java config file
JAVA_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${JAVA_FILE_NAME}" # Path to the Java lint rules
# Javascript Vars
JAVASCRIPT_FILE_NAME="${JAVASCRIPT_ES_CONFIG_FILE:-.eslintrc.yml}" # Name of the file
JAVASCRIPT_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${JAVASCRIPT_FILE_NAME}" # Path to the Javascript lint rules
JAVASCRIPT_STANDARD_LINTER_RULES='' # ENV string to pass when running js standard
# Default linter path
LINTER_RULES_PATH="${LINTER_RULES_PATH:-.github/linters}" # Linter Path Directory
# LaTeX Vars
LATEX_FILE_NAME='.chktexrc' # Name of the file
LATEX_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${LATEX_FILE_NAME}" # Path to the Latex lint rules
# Lua Vars
LUA_FILE_NAME='.luacheckrc' # Name of the file
LUA_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${LUA_FILE_NAME}" # Path to the Lua lint rules
# MD Vars
MARKDOWN_FILE_NAME="${MARKDOWN_CONFIG_FILE:-.markdown-lint.yml}" # Name of the file
MARKDOWN_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${MARKDOWN_FILE_NAME}" # Path to the markdown lint rules
# OpenAPI Vars
OPENAPI_FILE_NAME='.openapirc.yml' # Name of the file
OPENAPI_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${OPENAPI_FILE_NAME}" # Path to the OpenAPI lint rules
# PHPCS Vars
PHP_PHPCS_FILE_NAME='phpcs.xml' # Name of the file
PHP_PHPCS_LINTER_RULES="${GITHUB_WORKSPACE}/${PHP_PHPCS_FILE_NAME}" # Path to the PHP CodeSniffer lint rules in the repository
if [ ! -f "$PHP_PHPCS_LINTER_RULES" ]; then
PHP_PHPCS_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PHP_PHPCS_FILE_NAME}" # Path to the PHP CodeSniffer lint rules
fi
# PHPStan Vars
PHP_PHPSTAN_FILE_NAME='phpstan.neon' # Name of the file
PHP_PHPSTAN_LINTER_RULES="${GITHUB_WORKSPACE}/${PHP_PHPSTAN_FILE_NAME}" # Path to the PHPStan lint rules in the repository
if [ ! -f "$PHP_PHPSTAN_LINTER_RULES" ]; then
PHP_PHPSTAN_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PHP_PHPSTAN_FILE_NAME}" # Path to the PHPStan lint rules
fi
# Psalm Vars
PHP_PSALM_FILE_NAME='psalm.xml' # Name of the file
PHP_PSALM_LINTER_RULES="${GITHUB_WORKSPACE}/${PHP_PSALM_FILE_NAME}" # Path to the Psalm lint rules in the repository
if [ ! -f "$PHP_PSALM_LINTER_RULES" ]; then
PHP_PSALM_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PHP_PSALM_FILE_NAME}" # Path to the Psalm lint rules
fi
# Powershell Vars
POWERSHELL_FILE_NAME='.powershell-psscriptanalyzer.psd1' # Name of the file
POWERSHELL_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${POWERSHELL_FILE_NAME}" # Path to the Powershell lint rules
# Protocol Buffers Vars
PROTOBUF_FILE_NAME='.protolintrc.yml' # Name of the file
PROTOBUF_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PROTOBUF_FILE_NAME}" # Path to the Protocol Buffers lint rules
# Python Vars
PYTHON_PYLINT_FILE_NAME="${PYTHON_PYLINT_CONFIG_FILE:-.python-lint}" # Name of the file
PYTHON_PYLINT_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PYTHON_PYLINT_FILE_NAME}" # Path to the python lint rules
PYTHON_FLAKE8_FILE_NAME="${PYTHON_FLAKE8_CONFIG_FILE:-.flake8}" # Name of the file
PYTHON_FLAKE8_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PYTHON_FLAKE8_FILE_NAME}" # Path to the python lint rules
PYTHON_BLACK_FILE_NAME="${PYTHON_BLACK_CONFIG_FILE:-.python-black}" # Name of the file
PYTHON_BLACK_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${PYTHON_BLACK_FILE_NAME}" # Path to the python lint rules
# R Vars
R_FILE_NAME='.lintr' # Name of the file
R_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${R_FILE_NAME}" # Path to the R lint rules
# Ruby Vars
RUBY_FILE_NAME="${RUBY_CONFIG_FILE:-.ruby-lint.yml}" # Name of the file
RUBY_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${RUBY_FILE_NAME}" # Path to the ruby lint rules
# Snakemake Vars
SNAKEMAKE_SNAKEFMT_FILE_NAME="${SNAKEMAKE_SNAKEFMT_CONFIG_FILE:-.snakefmt.toml}" # Name of the file
SNAKEMAKE_SNAKEFMT_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${SNAKEMAKE_SNAKEFMT_FILE_NAME}" # Path to the snakemake lint rules
# SQL Vars
SQL_FILE_NAME=".sql-config.json" # Name of the file
SQL_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${SQL_FILE_NAME}" # Path to the SQL lint rules
# Terraform Vars
TERRAFORM_FILE_NAME='.tflint.hcl' # Name of the file
TERRAFORM_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${TERRAFORM_FILE_NAME}" # Path to the Terraform lint rules
# Typescript Vars
TYPESCRIPT_FILE_NAME="${TYPESCRIPT_ES_CONFIG_FILE:-.eslintrc.yml}" # Name of the file
TYPESCRIPT_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${TYPESCRIPT_FILE_NAME}" # Path to the Typescript lint rules
TYPESCRIPT_STANDARD_LINTER_RULES='' # ENV string to pass when running js standard
# Version File info
VERSION_FILE='/action/lib/linter-versions.txt' # File to store linter versions
# YAML Vars
YAML_FILE_NAME="${YAML_CONFIG_FILE:-.yaml-lint.yml}" # Name of the file
YAML_LINTER_RULES="${DEFAULT_RULES_LOCATION}/${YAML_FILE_NAME}" # Path to the yaml lint rules
#############################
# Language array for prints #
#############################
LANGUAGE_ARRAY=('ANSIBLE' 'ARM' 'BASH' 'BASH_EXEC' 'CLOUDFORMATION' 'CLOJURE' 'COFFEESCRIPT' 'CSHARP' 'CSS'
'DART' 'DOCKERFILE' 'DOCKERFILE_HADOLINT' 'EDITORCONFIG' 'ENV' 'GO' 'GROOVY' 'HTML'
'JAVA' 'JAVASCRIPT_ES' 'JAVASCRIPT_STANDARD' 'JSON' 'JSX' 'KUBERNETES_KUBEVAL' 'KOTLIN' 'LATEX' 'LUA' 'MARKDOWN'
'OPENAPI' 'PERL' 'PHP_BUILTIN' 'PHP_PHPCS' 'PHP_PHPSTAN' 'PHP_PSALM' 'POWERSHELL'
'PROTOBUF' 'PYTHON_BLACK' 'PYTHON_PYLINT' 'PYTHON_FLAKE8' 'R' 'RAKU' 'RUBY' 'SHELL_SHFMT' 'SNAKEMAKE_LINT' 'SNAKEMAKE_SNAKEFMT' 'STATES' 'SQL' 'TERRAFORM'
'TERRAFORM_TERRASCAN' 'TSX' 'TYPESCRIPT_ES' 'TYPESCRIPT_STANDARD' 'XML' 'YAML')
############################################
# Array for all languages that were linted #
############################################
LINTED_LANGUAGES_ARRAY=() # Will be filled at run time with all languages that were linted
###################
# GitHub ENV Vars #
###################
ANSIBLE_DIRECTORY="${ANSIBLE_DIRECTORY}" # Ansible Directory
DEFAULT_BRANCH="${DEFAULT_BRANCH:-master}" # Default Git Branch to use (master by default)
DISABLE_ERRORS="${DISABLE_ERRORS}" # Boolean to enable warning-only output without throwing errors
FILTER_REGEX_INCLUDE="${FILTER_REGEX_INCLUDE}" # RegExp defining which files will be processed by linters (all by default)
FILTER_REGEX_EXCLUDE="${FILTER_REGEX_EXCLUDE}" # RegExp defining which files will be excluded from linting (none by default)
GITHUB_EVENT_PATH="${GITHUB_EVENT_PATH}" # Github Event Path
GITHUB_REPOSITORY="${GITHUB_REPOSITORY}" # GitHub Org/Repo passed from system
GITHUB_RUN_ID="${GITHUB_RUN_ID}" # GitHub RUn ID to point to logs
GITHUB_SHA="${GITHUB_SHA}" # GitHub sha from the commit
GITHUB_TOKEN="${GITHUB_TOKEN}" # GitHub Token passed from environment
GITHUB_WORKSPACE="${GITHUB_WORKSPACE}" # Github Workspace
MULTI_STATUS="${MULTI_STATUS:-true}" # Multiple status are created for each check ran
TEST_CASE_RUN="${TEST_CASE_RUN}" # Boolean to validate only test cases
VALIDATE_ALL_CODEBASE="${VALIDATE_ALL_CODEBASE}" # Boolean to validate all files
################
# Default Vars #
################
DEFAULT_VALIDATE_ALL_CODEBASE='true' # Default value for validate all files
DEFAULT_WORKSPACE="${DEFAULT_WORKSPACE:-/tmp/lint}" # Default workspace if running locally
DEFAULT_RUN_LOCAL='false' # Default value for debugging locally
DEFAULT_TEST_CASE_RUN='false' # Flag to tell code to run only test cases
###############################################################
# Default Vars that are called in Subs and need to be ignored #
###############################################################
DEFAULT_DISABLE_ERRORS='false' # Default to enabling errors
export DEFAULT_DISABLE_ERRORS # Workaround SC2034
ERROR_ON_MISSING_EXEC_BIT="${ERROR_ON_MISSING_EXEC_BIT:-false}" # Default to report a warning if a shell script doesn't have the executable bit set to 1
export ERROR_ON_MISSING_EXEC_BIT
RAW_FILE_ARRAY=() # Array of all files that were changed
export RAW_FILE_ARRAY # Workaround SC2034
TEST_CASE_FOLDER='.automation/test' # Folder for test cases we should always ignore
export TEST_CASE_FOLDER # Workaround SC2034
WARNING_ARRAY_TEST=() # Array of warning linters that did not have an expected test result.
export WARNING_ARRAY_TEST # Workaround SC2034
##############
# Format #
##############
OUTPUT_FORMAT="${OUTPUT_FORMAT}" # Output format to be generated. Default none
OUTPUT_FOLDER="${OUTPUT_FOLDER:-super-linter.report}" # Folder where the reports are generated. Default super-linter.report
OUTPUT_DETAILS="${OUTPUT_DETAILS:-simpler}" # What level of details. (simpler or detailed). Default simpler
##########################
# Array of changed files #
##########################
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
FILE_ARRAY_VARIABLE_NAME="FILE_ARRAY_${LANGUAGE}"
debug "Setting ${FILE_ARRAY_VARIABLE_NAME} variable..."
eval "${FILE_ARRAY_VARIABLE_NAME}=()"
done
################################################################################
########################## FUNCTIONS BELOW #####################################
################################################################################
################################################################################
#### Function Header ###########################################################
Header() {
###############################
# Give them the possum action #
###############################
/bin/bash /action/lib/possum.sh
##########
# Prints #
##########
info "---------------------------------------------"
info "--- GitHub Actions Multi Language Linter ----"
info " - Image Creation Date:[${BUILD_DATE}]"
info " - Image Revision:[${BUILD_REVISION}]"
info " - Image Version:[${BUILD_VERSION}]"
info "---------------------------------------------"
info "---------------------------------------------"
info "The Super-Linter source code can be found at:"
info " - https://github.com/github/super-linter"
info "---------------------------------------------"
}
################################################################################
#### Function GetLinterVersions ################################################
GetLinterVersions() {
#########################
# Print version headers #
#########################
debug "---------------------------------------------"
debug "Linter Version Info:"
################################
# Cat the linter versions file #
################################
CAT_CMD=$(cat "${VERSION_FILE}" 2>&1)
#######################
# Load the error code #
#######################
ERROR_CODE=$?
##############################
# Check the shell for errors #
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# Failure
warn "Failed to view version file:[${VERSION_FILE}]"
else
# Success
debug "${CAT_CMD}"
fi
#########################
# Print version footers #
#########################
debug "---------------------------------------------"
}
################################################################################
#### Function GetLinterRules ###################################################
GetLinterRules() {
# Need to validate the rules files exist
################
# Pull in vars #
################
LANGUAGE_NAME="${1}" # Name of the language were looking for
debug "Getting linter rules for ${LANGUAGE_NAME}..."
#######################################################
# Need to create the variables for the real variables #
#######################################################
LANGUAGE_FILE_NAME="${LANGUAGE_NAME}_FILE_NAME"
LANGUAGE_LINTER_RULES="${LANGUAGE_NAME}_LINTER_RULES"
debug "Variable names for language file name: ${LANGUAGE_FILE_NAME}, language linter rules: ${LANGUAGE_LINTER_RULES}"
#####################################################
# Check if the language rules variables are defined #
#####################################################
if [ -z "${!LANGUAGE_FILE_NAME+x}" ]; then
debug "${LANGUAGE_FILE_NAME} is not set. Skipping loading rules for ${LANGUAGE_NAME}..."
return
fi
##########################
# Get the file extension #
##########################
FILE_EXTENSION=$(echo "${!LANGUAGE_FILE_NAME}" | rev | cut -d'.' -f1 | rev)
FILE_NAME=$(basename "${!LANGUAGE_FILE_NAME}" ".${FILE_EXTENSION}")
debug "${LANGUAGE_NAME} language rule file (${!LANGUAGE_FILE_NAME}) has ${FILE_NAME} name and ${FILE_EXTENSION} extension"
###############################
# Set the secondary file name #
###############################
SECONDARY_FILE_NAME=''
#################################
# Check for secondary file name #
#################################
if [[ $FILE_EXTENSION == 'yml' ]]; then
# Need to see if yaml also exists
SECONDARY_FILE_NAME="$FILE_NAME.yaml"
elif [[ $FILE_EXTENSION == 'yaml' ]]; then
# need to see if yml also exists
SECONDARY_FILE_NAME="$FILE_NAME.yml"
fi
#####################################
# Validate we have the linter rules #
#####################################
if [ -f "${GITHUB_WORKSPACE}/${LINTER_RULES_PATH}/${!LANGUAGE_FILE_NAME}" ]; then
info "----------------------------------------------"
info "User provided file:[${!LANGUAGE_FILE_NAME}], setting rules file..."
########################################
# Update the path to the file location #
########################################
eval "${LANGUAGE_LINTER_RULES}=${GITHUB_WORKSPACE}/${LINTER_RULES_PATH}/${!LANGUAGE_FILE_NAME}"
else
debug " -> Codebase does NOT have file:[${GITHUB_WORKSPACE}/${LINTER_RULES_PATH}/${!LANGUAGE_FILE_NAME}]"
# Check if we have secondary name to check
if [ -n "$SECONDARY_FILE_NAME" ]; then
debug "${LANGUAGE_NAME} language rule file has a secondary rules file name to check: ${SECONDARY_FILE_NAME}"
# We have a secondary name to validate
if [ -f "${GITHUB_WORKSPACE}/${LINTER_RULES_PATH}/${SECONDARY_FILE_NAME}" ]; then
info "----------------------------------------------"
info "User provided file:[${SECONDARY_FILE_NAME}], setting rules file..."
########################################
# Update the path to the file location #
########################################
eval "${LANGUAGE_LINTER_RULES}=${GITHUB_WORKSPACE}/${LINTER_RULES_PATH}/${SECONDARY_FILE_NAME}"
fi
fi
########################################################
# No user default provided, using the template default #
########################################################
debug " -> Codebase does NOT have file:[${GITHUB_WORKSPACE}/${LINTER_RULES_PATH}/${!LANGUAGE_FILE_NAME}], nor file:[${GITHUB_WORKSPACE}/${LINTER_RULES_PATH}/${SECONDARY_FILE_NAME}], using Default rules at:[${!LANGUAGE_LINTER_RULES}]"
fi
}
################################################################################
#### Function GetStandardRules #################################################
GetStandardRules() {
################
# Pull In Vars #
################
LINTER="${1}" # Type: javascript | typescript
#########################################################################
# Need to get the ENV vars from the linter rules to run in command line #
#########################################################################
# Copy orig IFS to var
ORIG_IFS="${IFS}"
# Set the IFS to newline
IFS=$'\n'
#########################################
# Get list of all environment variables #
#########################################
# Only env vars that are marked as true
GET_ENV_ARRAY=()
if [[ ${LINTER} == "javascript" ]]; then
mapfile -t GET_ENV_ARRAY < <(yq .env "${JAVASCRIPT_LINTER_RULES}" | grep true)
elif [[ ${LINTER} == "typescript" ]]; then
mapfile -t GET_ENV_ARRAY < <(yq .env "${TYPESCRIPT_LINTER_RULES}" | grep true)
fi
#######################
# Load the error code #
#######################
ERROR_CODE=$?
##############################
# Check the shell for errors #
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# ERROR
error "Failed to gain list of ENV vars to load!"
fatal "[${GET_ENV_ARRAY[*]}]"
fi
##########################
# Set IFS back to normal #
##########################
# Set IFS back to Orig
IFS="${ORIG_IFS}"
######################
# Set the env string #
######################
ENV_STRING=''
#############################
# Pull out the envs to load #
#############################
for ENV in "${GET_ENV_ARRAY[@]}"; do
#############################
# remove spaces from return #
#############################
ENV="$(echo -e "${ENV}" | tr -d '[:space:]')"
################################
# Get the env to add to string #
################################
ENV="$(echo "${ENV}" | cut -d'"' -f2)"
debug "ENV:[${ENV}]"
ENV_STRING+="--env ${ENV} "
done
#########################################
# Remove trailing and ending whitespace #
#########################################
if [[ ${LINTER} == "javascript" ]]; then
JAVASCRIPT_STANDARD_LINTER_RULES="$(echo -e "${ENV_STRING}" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//')"
elif [[ ${LINTER} == "typescript" ]]; then
TYPESCRIPT_STANDARD_LINTER_RULES="$(echo -e "${ENV_STRING}" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//')"
fi
}
################################################################################
#### Function DetectOpenAPIFile ################################################
DetectOpenAPIFile() {
################
# Pull in vars #
################
FILE="${1}"
###############################
# Check the file for keywords #
###############################
grep -E '"openapi":|"swagger":|^openapi:|^swagger:' "${FILE}" >/dev/null
#######################
# Load the error code #
#######################
ERROR_CODE=$?
##############################
# Check the shell for errors #
##############################
if [ ${ERROR_CODE} -eq 0 ]; then
########################
# Found string in file #
########################
return 0
else
###################
# No string match #
###################
return 1
fi
}
################################################################################
#### Function DetectARMFile ####################################################
DetectARMFile() {
################
# Pull in vars #
################
FILE="${1}" # Name of the file/path we are validating
###############################
# Check the file for keywords #
###############################
grep -E 'schema.management.azure.com' "${FILE}" >/dev/null
#######################
# Load the error code #
#######################
ERROR_CODE=$?
##############################
# Check the shell for errors #
##############################
if [ ${ERROR_CODE} -eq 0 ]; then
########################
# Found string in file #
########################
return 0
else
###################
# No string match #
###################
return 1
fi
}
################################################################################
#### Function DetectCloudFormationFile #########################################
DetectCloudFormationFile() {
################
# Pull in Vars #
################
FILE="${1}" # File that we need to validate
# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/template-formats.html
# AWSTemplateFormatVersion is optional
#######################################
# Check if file has AWS Template info #
#######################################
if grep -q 'AWSTemplateFormatVersion' "${FILE}" >/dev/null; then
# Found it
return 0
fi
#####################################
# See if it contains AWS References #
#####################################
if grep -q -E '(AWS|Alexa|Custom)::' "${FILE}" >/dev/null; then
# Found it
return 0
fi
#####################################################
# No identifiers of a CLOUDFORMATION template found #
#####################################################
return 1
}
################################################################################
#### Function DetectKubernetesFile #########################################
DetectKubernetesFile() {
################
# Pull in Vars #
################
FILE="${1}" # File that we need to validate
debug "Checking if ${FILE} is a Kubernetes descriptor..."
if grep -q -E '(apiVersion):' "${FILE}" >/dev/null; then
debug "${FILE} is a Kubernetes descriptor"
return 0
fi
debug "${FILE} is NOT a Kubernetes descriptor"
return 1
}
################################################################################
#### Function DetectAWSStatesFIle ##############################################
DetectAWSStatesFIle() {
################
# Pull in Vars #
################
FILE="${1}" # File that we need to validate
# https://states-language.net/spec.html#example
###############################
# check if file has resources #
###############################
if grep -q '"Resource": *"arn"*' "${FILE}"; then
# Found it
return 0
fi
#################################################
# No identifiers of a AWS States Language found #
#################################################
return 1
}
################################################################################
#### Function GetGitHubVars ####################################################
GetGitHubVars() {
##########
# Prints #
##########
info "--------------------------------------------"
info "Gathering GitHub information..."
###############################
# Get the Run test cases flag #
###############################
if [ -z "${TEST_CASE_RUN}" ]; then
##################################
# No flag passed, set to default #
##################################
TEST_CASE_RUN="${DEFAULT_TEST_CASE_RUN}"
fi
###############################
# Convert string to lowercase #
###############################
TEST_CASE_RUN="${TEST_CASE_RUN,,}"
##########################
# Get the run local flag #
##########################
if [ -z "${RUN_LOCAL}" ]; then
##################################
# No flag passed, set to default #
##################################
RUN_LOCAL="${DEFAULT_RUN_LOCAL}"
fi
###############################
# Convert string to lowercase #
###############################
RUN_LOCAL="${RUN_LOCAL,,}"
#################################
# Check if were running locally #
#################################
if [[ ${RUN_LOCAL} != "false" ]]; then
##########################################
# We are running locally for a debug run #
##########################################
info "NOTE: ENV VAR [RUN_LOCAL] has been set to:[true]"
info "bypassing GitHub Actions variables..."
############################
# Set the GITHUB_WORKSPACE #
############################
if [ -z "${GITHUB_WORKSPACE}" ]; then
GITHUB_WORKSPACE="${DEFAULT_WORKSPACE}"
fi
if [ ! -d "${GITHUB_WORKSPACE}" ]; then
fatal "Provided volume is not a directory!"
fi
################################
# Set the report output folder #
################################
REPORT_OUTPUT_FOLDER="${DEFAULT_WORKSPACE}/${OUTPUT_FOLDER}"
info "Linting all files in mapped directory:[${DEFAULT_WORKSPACE}]"
# No need to touch or set the GITHUB_SHA
# No need to touch or set the GITHUB_EVENT_PATH
# No need to touch or set the GITHUB_ORG
# No need to touch or set the GITHUB_REPO
#################################
# Set the VALIDATE_ALL_CODEBASE #
#################################
VALIDATE_ALL_CODEBASE="${DEFAULT_VALIDATE_ALL_CODEBASE}"
else
############################
# Validate we have a value #
############################
if [ -z "${GITHUB_SHA}" ]; then
error "Failed to get [GITHUB_SHA]!"
fatal "[${GITHUB_SHA}]"
else
info "Successfully found:${F[W]}[GITHUB_SHA]${F[B]}, value:${F[W]}[${GITHUB_SHA}]"
fi
############################
# Validate we have a value #
############################
if [ -z "${GITHUB_WORKSPACE}" ]; then
error "Failed to get [GITHUB_WORKSPACE]!"
fatal "[${GITHUB_WORKSPACE}]"
else
info "Successfully found:${F[W]}[GITHUB_WORKSPACE]${F[B]}, value:${F[W]}[${GITHUB_WORKSPACE}]"
fi
############################
# Validate we have a value #
############################
if [ -z "${GITHUB_EVENT_PATH}" ]; then
error "Failed to get [GITHUB_EVENT_PATH]!"
fatal "[${GITHUB_EVENT_PATH}]"
else
info "Successfully found:${F[W]}[GITHUB_EVENT_PATH]${F[B]}, value:${F[W]}[${GITHUB_EVENT_PATH}]${F[B]}"
fi
##################################################
# Need to pull the GitHub Vars from the env file #
##################################################
######################
# Get the GitHub Org #
######################
GITHUB_ORG=$(jq -r '.repository.owner.login' <"${GITHUB_EVENT_PATH}")
############################
# Validate we have a value #
############################
if [ -z "${GITHUB_ORG}" ]; then
error "Failed to get [GITHUB_ORG]!"
fatal "[${GITHUB_ORG}]"
else
info "Successfully found:${F[W]}[GITHUB_ORG]${F[B]}, value:${F[W]}[${GITHUB_ORG}]"
fi
#######################
# Get the GitHub Repo #
#######################
GITHUB_REPO=$(jq -r '.repository.name' <"${GITHUB_EVENT_PATH}")
############################
# Validate we have a value #
############################
if [ -z "${GITHUB_REPO}" ]; then
error "Failed to get [GITHUB_REPO]!"
fatal "[${GITHUB_REPO}]"
else
info "Successfully found:${F[W]}[GITHUB_REPO]${F[B]}, value:${F[W]}[${GITHUB_REPO}]"
fi
fi
############################
# Validate we have a value #
############################
if [ -z "${GITHUB_TOKEN}" ] && [[ ${RUN_LOCAL} == "false" ]]; then
error "Failed to get [GITHUB_TOKEN]!"
error "[${GITHUB_TOKEN}]"
error "Please set a [GITHUB_TOKEN] from the main workflow environment to take advantage of multiple status reports!"
################################################################################
# Need to set MULTI_STATUS to false as we cant hit API endpoints without token #
################################################################################
MULTI_STATUS='false'
else
info "Successfully found:${F[W]}[GITHUB_TOKEN]"
fi
###############################
# Convert string to lowercase #
###############################
MULTI_STATUS="${MULTI_STATUS,,}"
#######################################################################
# Check to see if the multi status is set, and we have a token to use #
#######################################################################
if [ "${MULTI_STATUS}" == "true" ] && [ -n "${GITHUB_TOKEN}" ]; then
############################
# Validate we have a value #
############################
if [ -z "${GITHUB_REPOSITORY}" ]; then
error "Failed to get [GITHUB_REPOSITORY]!"
fatal "[${GITHUB_REPOSITORY}]"
else
info "Successfully found:${F[W]}[GITHUB_REPOSITORY]${F[B]}, value:${F[W]}[${GITHUB_REPOSITORY}]"
fi
############################
# Validate we have a value #
############################
if [ -z "${GITHUB_RUN_ID}" ]; then
error "Failed to get [GITHUB_RUN_ID]!"
fatal "[${GITHUB_RUN_ID}]"
else
info "Successfully found:${F[W]}[GITHUB_RUN_ID]${F[B]}, value:${F[W]}[${GITHUB_RUN_ID}]"
fi
fi
}
################################################################################
#### Function ValidatePowershellModules ########################################
function ValidatePowershellModules() {
VALIDATE_PSSA_MODULE=$(pwsh -c "(Get-Module -Name PSScriptAnalyzer -ListAvailable | Select-Object -First 1).Name" 2>&1)
# If module found, ensure Invoke-ScriptAnalyzer command is available
if [[ ${VALIDATE_PSSA_MODULE} == "PSScriptAnalyzer" ]]; then
VALIDATE_PSSA_CMD=$(pwsh -c "(Get-Command Invoke-ScriptAnalyzer | Select-Object -First 1).Name" 2>&1)
else
fatal "Failed to find module."
fi
#########################################
# validate we found the script analyzer #
#########################################
if [[ ${VALIDATE_PSSA_CMD} != "Invoke-ScriptAnalyzer" ]]; then
fatal "Failed to find module."
fi
#######################
# Load the error code #
#######################
ERROR_CODE=$?
##############################
# Check the shell for errors #
##############################
if [ ${ERROR_CODE} -ne 0 ]; then
# Failed
error "Failed find module [PSScriptAnalyzer] for [${LINTER_NAME}] in system!"
fatal "[PSSA_MODULE ${VALIDATE_PSSA_MODULE}] [PSSA_CMD ${VALIDATE_PSSA_CMD}]"
else
# Success
debug "Successfully found module ${F[W]}[${VALIDATE_PSSA_MODULE}]${F[B]} in system"
debug "Successfully found command ${F[W]}[${VALIDATE_PSSA_CMD}]${F[B]} in system"
fi
}
################################################################################
#### Function CallStatusAPI ####################################################
CallStatusAPI() {
####################
# Pull in the vars #
####################
LANGUAGE="${1}" # langauge that was validated
STATUS="${2}" # success | error
SUCCESS_MSG='No errors were found in the linting process'
FAIL_MSG='Errors were detected, please view logs'
MESSAGE='' # Message to send to status API
######################################
# Check the status to create message #
######################################
if [ "${STATUS}" == "success" ]; then
# Success
MESSAGE="${SUCCESS_MSG}"
else
# Failure
MESSAGE="${FAIL_MSG}"
fi
##########################################################
# Check to see if were enabled for multi Status mesaages #
##########################################################
if [ "${MULTI_STATUS}" == "true" ] && [ -n "${GITHUB_TOKEN}" ] && [ -n "${GITHUB_REPOSITORY}" ]; then
# make sure we honor DISABLE_ERRORS
if [ "${DISABLE_ERRORS}" == "true" ]; then
STATUS="success"
fi
##############################################
# Call the status API to create status check #
##############################################
SEND_STATUS_CMD=$(
curl -f -s -X POST \
--url "${GITHUB_API_URL}/repos/${GITHUB_REPOSITORY}/statuses/${GITHUB_SHA}" \
-H 'accept: application/vnd.github.v3+json' \
-H "authorization: Bearer ${GITHUB_TOKEN}" \
-H 'content-type: application/json' \
-d "{ \"state\": \"${STATUS}\",
\"target_url\": \"https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}\",
\"description\": \"${MESSAGE}\", \"context\": \"--> Linted: ${LANGUAGE}\"
}" 2>&1
)
#######################
# Load the error code #
#######################
ERROR_CODE=$?
##############################
# Check the shell for errors #
##############################
if [ "${ERROR_CODE}" -ne 0 ]; then
# ERROR
info "ERROR! Failed to call GitHub Status API!"
info "ERROR:[${SEND_STATUS_CMD}]"
# Not going to fail the script on this yet...
fi
fi
}
################################################################################
#### Function Reports ##########################################################
Reports() {
info "----------------------------------------------"
info "----------------------------------------------"
info "Generated reports:"
info "----------------------------------------------"
info "----------------------------------------------"
###################################
# Prints output report if enabled #
###################################
if [ -z "${FORMAT_REPORT}" ]; then
info "Reports generated in folder ${REPORT_OUTPUT_FOLDER}"
#############################################
# Print info on reports that were generated #
#############################################
if [ -d "${REPORT_OUTPUT_FOLDER}" ]; then
info "Contents of report folder:"
OUTPUT_CONTENTS_CMD=$(ls "${REPORT_OUTPUT_FOLDER}")
info "$OUTPUT_CONTENTS_CMD"
else
warn "Report output folder (${REPORT_OUTPUT_FOLDER}) does NOT exist."
fi
fi
################################
# Prints for warnings if found #
################################
for TEST in "${WARNING_ARRAY_TEST[@]}"; do
warn "Expected file to compare with was not found for ${TEST}"
done
}
################################################################################
#### Function Footer ###########################################################
Footer() {
info "----------------------------------------------"
info "----------------------------------------------"
info "The script has completed"
info "----------------------------------------------"
info "----------------------------------------------"
####################################################
# Need to clean up the lanuage array of duplicates #
####################################################
mapfile -t UNIQUE_LINTED_ARRAY < <(for LANG in "${LINTED_LANGUAGES_ARRAY[@]}"; do echo "${LANG}"; done | sort -u)
##############################
# Prints for errors if found #
##############################
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
###########################
# Build the error counter #
###########################
ERROR_COUNTER="ERRORS_FOUND_${LANGUAGE}"
##################
# Print if not 0 #
##################
if [[ ${!ERROR_COUNTER} -ne 0 ]]; then
# We found errors in the language
###################
# Print the goods #
###################
error "ERRORS FOUND${NC} in ${LANGUAGE}:[${!ERROR_COUNTER}]"
#########################################
# Create status API for Failed language #
#########################################
CallStatusAPI "${LANGUAGE}" "error"
######################################
# Check if we validated the langauge #
######################################
elif [[ ${!ERROR_COUNTER} -eq 0 ]]; then
if CheckInArray "${LANGUAGE}"; then
# No errors found when linting the language
CallStatusAPI "${LANGUAGE}" "success"
fi
fi
done
##################################
# Exit with 0 if errors disabled #
##################################
if [ "${DISABLE_ERRORS}" == "true" ]; then
warn "Exiting with exit code:[0] as:[DISABLE_ERRORS] was set to:[${DISABLE_ERRORS}]"
exit 0
fi
###############################
# Exit with 1 if errors found #
###############################
# Loop through all languages
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
# build the variable
ERRORS_FOUND_LANGUAGE="ERRORS_FOUND_${LANGUAGE}"
# Check if error was found
if [[ ${!ERRORS_FOUND_LANGUAGE} -ne 0 ]]; then
# Failed exit
fatal "Exiting with errors found!"
fi
done
########################
# Footer prints Exit 0 #
########################
notice "All file(s) linted successfully with no errors detected"
info "----------------------------------------------"
# Successful exit
exit 0
}
################################################################################
#### Function CheckInArray #####################################################
CheckInArray() {
###############
# Pull in Var #
###############
NEEDLE="$1" # Language we need to match
######################################
# Check if Language was in the array #
######################################
for LANG in "${UNIQUE_LINTED_ARRAY[@]}"; do
if [[ "${LANG}" == "${NEEDLE}" ]]; then
############
# Found it #
############
return 0
fi
done
###################
# Did not find it #
###################
return 1
}
################################################################################
#### Function Cleanup ##########################################################
cleanup() {
local -ri EXIT_CODE=$?
sh -c "cat ${LOG_TEMP} >> ${GITHUB_WORKSPACE}/${LOG_FILE}" || true
exit ${EXIT_CODE}
trap - 0 1 2 3 6 14 15
}
trap 'cleanup' 0 1 2 3 6 14 15
################################################################################
############################### MAIN ###########################################
################################################################################
##########
# Header #
##########
Header
##############################################################
# check flag for validating the report folder does not exist #
##############################################################
if [ -n "${OUTPUT_FORMAT}" ]; then
if [ -d "${REPORT_OUTPUT_FOLDER}" ]; then
error "ERROR! Found ${REPORT_OUTPUT_FOLDER}"
fatal "Please remove the folder and try again."
fi
fi
#######################
# Get GitHub Env Vars #
#######################
# Need to pull in all the GitHub variables
# needed to connect back and update checks
GetGitHubVars
########################################################
# Initialize variables that depend on GitHub variables #
########################################################
DEFAULT_ANSIBLE_DIRECTORY="${GITHUB_WORKSPACE}/ansible" # Default Ansible Directory
export DEFAULT_ANSIBLE_DIRECTORY # Workaround SC2034
REPORT_OUTPUT_FOLDER="${GITHUB_WORKSPACE}/${OUTPUT_FOLDER}" # Location for the report folder
#########################################
# Get the languages we need to validate #
#########################################
GetValidationInfo
########################
# Get the linter rules #
########################
for LANGUAGE in "${LANGUAGE_ARRAY[@]}"; do
debug "Loading rules for ${LANGUAGE}..."
eval "GetLinterRules ${LANGUAGE}"
done
##################################
# Get and print all version info #
##################################
GetLinterVersions
###########################################
# Check to see if this is a test case run #
###########################################
if [[ ${TEST_CASE_RUN} != "false" ]]; then
#############################################
# Set the multi status to off for test runs #
#############################################
MULTI_STATUS='false'
###########################
# Run only the test cases #
###########################
# Code will exit from inside this loop
RunTestCases
fi
###########################################
# Build the list of files for each linter #
###########################################
BuildFileList "${VALIDATE_ALL_CODEBASE}"
###################
# ANSIBLE LINTING #
###################
if [ "${VALIDATE_ANSIBLE}" == "true" ]; then
##########################
# Lint the Ansible files #
##########################
# Due to the nature of how we want to validate Ansible, we cannot use the
# standard loop, since it looks for an ansible folder, excludes certain
# files, and looks for additional changes, it should be an outlier
LintAnsibleFiles "${ANSIBLE_LINTER_RULES}" # Passing rules but not needed, dont want to exclude unused var
fi
########################
# ARM Template LINTING #
########################
if [ "${VALIDATE_ARM}" == "true" ]; then
###############################
# Lint the ARM Template files #
###############################
LintCodebase "ARM" "arm-ttk" "Import-Module ${ARM_TTK_PSD1} ; \${config} = \$(Import-PowerShellDataFile -Path ${ARM_LINTER_RULES}) ; Test-AzTemplate @config -TemplatePath" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_ARM[@]}"
fi
################
# BASH LINTING #
################
if [ "${VALIDATE_BASH}" == "true" ]; then
#######################
# Lint the bash files #
#######################
LintCodebase "BASH" "shellcheck" "shellcheck --color --external-sources" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_BASH[@]}"
fi
#####################
# BASH_EXEC LINTING #
#####################
if [ "${VALIDATE_BASH_EXEC}" == "true" ]; then
#######################
# Lint the bash files #
#######################
LintCodebase "BASH_EXEC" "bash-exec" "bash-exec" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_BASH[@]}"
fi
##########################
# CLOUDFORMATION LINTING #
##########################
if [ "${VALIDATE_CLOUDFORMATION}" == "true" ]; then
#################################
# Lint the CloudFormation files #
#################################
LintCodebase "CLOUDFORMATION" "cfn-lint" "cfn-lint --config-file ${CLOUDFORMATION_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_CLOUDFORMATION[@]}"
fi
###################
# CLOJURE LINTING #
###################
if [ "${VALIDATE_CLOJURE}" == "true" ]; then
#################################
# Get Clojure standard rules #
#################################
GetStandardRules "clj-kondo"
#########################
# Lint the Clojure files #
#########################
LintCodebase "CLOJURE" "clj-kondo" "clj-kondo --config ${CLOJURE_LINTER_RULES} --lint" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_CLOJURE[@]}"
fi
########################
# COFFEESCRIPT LINTING #
########################
if [ "${VALIDATE_COFFEE}" == "true" ]; then
#########################
# Lint the coffee files #
#########################
LintCodebase "COFFEESCRIPT" "coffeelint" "coffeelint -f ${COFFEESCRIPT_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_COFFEESCRIPT[@]}"
fi
##################
# CSHARP LINTING #
##################
if [ "${VALIDATE_CSHARP}" == "true" ]; then
#########################
# Lint the C# files #
#########################
LintCodebase "CSHARP" "dotnet-format" "dotnet-format --folder --check --exclude / --include" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_CSHARP[@]}"
fi
###############
# CSS LINTING #
###############
if [ "${VALIDATE_CSS}" == "true" ]; then
#################################
# Get CSS standard rules #
#################################
GetStandardRules "stylelint"
#############################
# Lint the CSS files #
#############################
LintCodebase "CSS" "stylelint" "stylelint --config ${CSS_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_CSS[@]}"
fi
################
# DART LINTING #
################
if [ "${VALIDATE_DART}" == "true" ]; then
#######################
# Lint the Dart files #
#######################
LintCodebase "DART" "dart" "dartanalyzer --fatal-infos --fatal-warnings --options ${DART_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_DART[@]}"
fi
##################
# DOCKER LINTING #
##################
if [ "${VALIDATE_DOCKERFILE}" == "true" ]; then
#########################
# Lint the docker files #
#########################
# NOTE: dockerfilelint's "-c" option expects the folder *containing* the DOCKER_LINTER_RULES file
LintCodebase "DOCKERFILE" "dockerfilelint" "dockerfilelint -c $(dirname ${DOCKERFILE_LINTER_RULES})" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_DOCKERFILE[@]}"
fi
###########################
# DOCKER LINTING HADOLINT #
###########################
if [ "${VALIDATE_DOCKERFILE_HADOLINT}" == "true" ]; then
#########################
# Lint the docker files #
#########################
LintCodebase "DOCKERFILE_HADOLINT" "hadolint" "hadolint -c ${DOCKERFILE_HADOLINT_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_DOCKERFILE_HADOLINT[@]}"
fi
########################
# EDITORCONFIG LINTING #
########################
if [ "${VALIDATE_EDITORCONFIG}" == "true" ]; then
####################################
# Lint the files with editorconfig #
####################################
LintCodebase "EDITORCONFIG" "editorconfig-checker" "editorconfig-checker" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_EDITORCONFIG[@]}"
fi
###############
# ENV LINTING #
###############
if [ "${VALIDATE_ENV}" == "true" ]; then
#######################
# Lint the env files #
#######################
LintCodebase "ENV" "dotenv-linter" "dotenv-linter" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_ENV[@]}"
fi
##################
# GOLANG LINTING #
##################
if [ "${VALIDATE_GO}" == "true" ]; then
#########################
# Lint the golang files #
#########################
LintCodebase "GO" "golangci-lint" "golangci-lint run -c ${GO_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_GO[@]}"
fi
##################
# GROOVY LINTING #
##################
if [ "$VALIDATE_GROOVY" == "true" ]; then
#########################
# Lint the groovy files #
#########################
LintCodebase "GROOVY" "npm-groovy-lint" "npm-groovy-lint -c $GROOVY_LINTER_RULES --failon warning" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_GROOVY[@]}"
fi
################
# HTML LINTING #
################
if [ "${VALIDATE_HTML}" == "true" ]; then
###########################
# Get HTML standard rules #
###########################
GetStandardRules "htmlhint"
#######################
# Lint the HTML files #
#######################
LintCodebase "HTML" "htmlhint" "htmlhint --config ${HTML_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_HTML[@]}"
fi
################
# JAVA LINTING #
################
if [ "$VALIDATE_JAVA" == "true" ]; then
#######################
# Lint the JAVA files #
#######################
LintCodebase "JAVA" "checkstyle" "java -jar /usr/bin/checkstyle -c ${JAVA_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_JAVA[@]}"
fi
######################
# JAVASCRIPT LINTING #
######################
if [ "${VALIDATE_JAVASCRIPT_ES}" == "true" ]; then
#############################
# Lint the Javascript files #
#############################
LintCodebase "JAVASCRIPT_ES" "eslint" "eslint --no-eslintrc -c ${JAVASCRIPT_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_JAVASCRIPT_ES[@]}"
fi
######################
# JAVASCRIPT LINTING #
######################
if [ "${VALIDATE_JAVASCRIPT_STANDARD}" == "true" ]; then
#################################
# Get Javascript standard rules #
#################################
GetStandardRules "javascript"
#############################
# Lint the Javascript files #
#############################
LintCodebase "JAVASCRIPT_STANDARD" "standard" "standard ${JAVASCRIPT_STANDARD_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_JAVASCRIPT_STANDARD[@]}"
fi
################
# JSON LINTING #
################
if [ "${VALIDATE_JSON}" == "true" ]; then
#######################
# Lint the json files #
#######################
LintCodebase "JSON" "jsonlint" "jsonlint" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_JSON[@]}"
fi
###############
# JSX LINTING #
###############
if [ "${VALIDATE_JSX}" == "true" ]; then
######################
# Lint the JSX files #
######################
LintCodebase "JSX" "eslint" "eslint --no-eslintrc -c ${JAVASCRIPT_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_JSX[@]}"
fi
##################
# KOTLIN LINTING #
##################
if [ "${VALIDATE_KOTLIN}" == "true" ]; then
#########################
# Lint the Kotlin files #
#########################
LintCodebase "KOTLIN" "ktlint" "ktlint" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_KOTLIN[@]}"
fi
##############################
# KUBERNETES Kubeval LINTING #
##############################
if [ "${VALIDATE_KUBERNETES_KUBEVAL}" == "true" ]; then
LintCodebase "KUBERNETES_KUBEVAL" "kubeval" "kubeval --strict" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_KUBERNETES_KUBEVAL[@]}"
fi
#################
# LATEX LINTING #
#################
if [ "${VALIDATE_LATEX}" == "true" ]; then
########################
# Lint the LATEX files #
########################
LintCodebase "LATEX" "chktex" "chktex -q -l ${LATEX_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_LATEX[@]}"
fi
###############
# LUA LINTING #
###############
if [ "${VALIDATE_LUA}" == "true" ]; then
######################
# Lint the Lua files #
######################
LintCodebase "LUA" "lua" "luacheck --config ${LUA_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_LUA[@]}"
fi
####################
# MARKDOWN LINTING #
####################
if [ "${VALIDATE_MARKDOWN}" == "true" ]; then
###########################
# Lint the Markdown Files #
###########################
LintCodebase "MARKDOWN" "markdownlint" "markdownlint -c ${MARKDOWN_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_MARKDOWN[@]}"
fi
###################
# OPENAPI LINTING #
###################
if [ "${VALIDATE_OPENAPI}" == "true" ]; then
##########################
# Lint the OpenAPI files #
##########################
LintCodebase "OPENAPI" "spectral" "spectral lint -r ${OPENAPI_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_OPENAPI[@]}"
fi
################
# PERL LINTING #
################
if [ "${VALIDATE_PERL}" == "true" ]; then
#######################
# Lint the perl files #
#######################
LintCodebase "PERL" "perl" "perlcritic" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_PERL[@]}"
fi
################
# PHP LINTING #
################
if [ "${VALIDATE_PHP_BUILTIN}" == "true" ]; then
################################################
# Lint the PHP files using built-in PHP linter #
################################################
LintCodebase "PHP_BUILTIN" "php" "php -l" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_PHP_BUILTIN[@]}"
fi
if [ "${VALIDATE_PHP_PHPCS}" == "true" ]; then
############################################
# Lint the PHP files using PHP CodeSniffer #
############################################
LintCodebase "PHP_PHPCS" "phpcs" "phpcs --standard=${PHP_PHPCS_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_PHP_PHPCS[@]}"
fi
if [ "${VALIDATE_PHP_PHPSTAN}" == "true" ]; then
#######################
# Lint the PHP files using PHPStan #
#######################
LintCodebase "PHP_PHPSTAN" "phpstan" "phpstan analyse --no-progress --no-ansi -c ${PHP_PHPSTAN_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_PHP_PHPSTAN[@]}"
fi
if [ "${VALIDATE_PHP_PSALM}" == "true" ]; then
##################################
# Lint the PHP files using Psalm #
##################################
LintCodebase "PHP_PSALM" "psalm" "psalm --config=${PHP_PSALM_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_PHP_PSALM[@]}"
fi
######################
# POWERSHELL LINTING #
######################
if [ "${VALIDATE_POWERSHELL}" == "true" ]; then
###############################################################
# For POWERSHELL, ensure PSScriptAnalyzer module is available #
###############################################################
ValidatePowershellModules
#############################
# Lint the powershell files #
#############################
LintCodebase "POWERSHELL" "pwsh" "Invoke-ScriptAnalyzer -EnableExit -Settings ${POWERSHELL_LINTER_RULES} -Path" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_POWERSHELL[@]}"
fi
####################
# PROTOBUF LINTING #
####################
if [ "${VALIDATE_PROTOBUF}" == "true" ]; then
#######################
# Lint the Protocol Buffers files #
#######################
LintCodebase "PROTOBUF" "protolint" "protolint lint --config_path ${PROTOBUF_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_PROTOBUF[@]}"
fi
########################
# PYTHON BLACK LINTING #
########################
if [ "${VALIDATE_PYTHON_BLACK}" == "true" ]; then
#########################
# Lint the python files #
#########################
LintCodebase "PYTHON_BLACK" "black" "black --config ${PYTHON_BLACK_LINTER_RULES} --diff --check" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_PYTHON_BLACK[@]}"
fi
#########################
# PYTHON PYLINT LINTING #
#########################
if [ "${VALIDATE_PYTHON_PYLINT}" == "true" ]; then
#########################
# Lint the python files #
#########################
LintCodebase "PYTHON_PYLINT" "pylint" "pylint --rcfile ${PYTHON_PYLINT_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_PYTHON_PYLINT[@]}"
fi
#########################
# PYTHON FLAKE8 LINTING #
#########################
if [ "${VALIDATE_PYTHON_FLAKE8}" == "true" ]; then
#########################
# Lint the python files #
#########################
LintCodebase "PYTHON_FLAKE8" "flake8" "flake8 --config=${PYTHON_FLAKE8_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_PYTHON_FLAKE8[@]}"
fi
#############
# R LINTING #
#############
if [ "${VALIDATE_R}" == "true" ]; then
##########################
# Check for local config #
##########################
# shellcheck disable=SC2153
if [ ! -f "${GITHUB_WORKSPACE}/.lintr" ] && ((${#FILE_ARRAY_R[@]})); then
info "No .lintr configuration file found, using defaults."
cp $R_LINTER_RULES "$GITHUB_WORKSPACE"
fi
######################
# Lint the R files #
######################
LintCodebase "R" "lintr" "lintr::lint(File)" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_R[@]}"
fi
################
# RAKU LINTING #
################
if [ "${VALIDATE_RAKU}" == "true" ]; then
#######################
# Lint the raku files #
#######################
if [ -e "${GITHUB_WORKSPACE}/META6.json" ]; then
cd "${GITHUB_WORKSPACE}" && zef install --deps-only --/test .
fi
LintCodebase "RAKU" "raku" "raku -I ${GITHUB_WORKSPACE}/lib -c" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_RAKU[@]}"
fi
################
# RUBY LINTING #
################
if [ "${VALIDATE_RUBY}" == "true" ]; then
#######################
# Lint the ruby files #
#######################
LintCodebase "RUBY" "rubocop" "rubocop -c ${RUBY_LINTER_RULES} --force-exclusion" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_RUBY[@]}"
fi
#################
# SHFMT LINTING #
#################
if [ "${VALIDATE_SHELL_SHFMT}" == "true" ]; then
####################################
# Lint the files with shfmt #
####################################
EDITORCONFIG_FILE_PATH="${GITHUB_WORKSPACE}"/.editorconfig
if [ -e "$EDITORCONFIG_FILE_PATH" ]; then
LintCodebase "SHELL_SHFMT" "shfmt" "shfmt -d" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_SHELL_SHFMT[@]}"
else
###############################
# No .editorconfig file found #
###############################
warn "No .editorconfig found at:[$EDITORCONFIG_FILE_PATH]"
debug "skipping shfmt"
fi
fi
##################
# SNAKEMAKE LINT #
##################
if [ "${VALIDATE_SNAKEMAKE_LINT}" == "true" ]; then
LintCodebase "SNAKEMAKE_LINT" "snakemake" "snakemake --lint -s" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_SNAKEMAKE_LINT[@]}"
fi
######################
# SNAKEMAKE SNAKEFMT #
######################
if [ "${VALIDATE_SNAKEMAKE_SNAKEFMT}" == "true" ]; then
LintCodebase "SNAKEMAKE_SNAKEFMT" "snakefmt" "snakefmt --config ${SNAKEMAKE_SNAKEFMT_LINTER_RULES} --check --compact-diff" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_SNAKEMAKE_SNAKEFMT[@]}"
fi
######################
# AWS STATES LINTING #
######################
if [ "${VALIDATE_STATES}" == "true" ]; then
#########################
# Lint the STATES files #
#########################
LintCodebase "STATES" "asl-validator" "asl-validator --json-path" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_STATES[@]}"
fi
###############
# SQL LINTING #
###############
if [ "${VALIDATE_SQL}" == "true" ]; then
######################
# Lint the SQL files #
######################
LintCodebase "SQL" "sql-lint" "sql-lint --config ${SQL_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_SQL[@]}"
fi
#####################
# TERRAFORM LINTING #
#####################
if [ "${VALIDATE_TERRAFORM}" == "true" ]; then
############################
# Lint the Terraform files #
############################
LintCodebase "TERRAFORM" "tflint" "tflint -c ${TERRAFORM_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_TERRAFORM[@]}"
fi
###############################
# TERRAFORM TERRASCAN LINTING #
###############################
if [ "${VALIDATE_TERRAFORM_TERRASCAN}" == "true" ]; then
############################
# Lint the Terraform files #
############################
LintCodebase "TERRAFORM_TERRASCAN" "terrascan" "terrascan scan -p /root/.terrascan/pkg/policies/opa/rego/ -t aws -f " "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_TERRAFORM_TERRASCAN[@]}"
fi
###############
# TSX LINTING #
###############
if [ "${VALIDATE_TSX}" == "true" ]; then
######################
# Lint the TSX files #
######################
LintCodebase "TSX" "eslint" "eslint --no-eslintrc -c ${TYPESCRIPT_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_TSX[@]}"
fi
######################
# TYPESCRIPT LINTING #
######################
if [ "${VALIDATE_TYPESCRIPT_ES}" == "true" ]; then
#############################
# Lint the Typescript files #
#############################
LintCodebase "TYPESCRIPT_ES" "eslint" "eslint --no-eslintrc -c ${TYPESCRIPT_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_TYPESCRIPT_ES[@]}"
fi
######################
# TYPESCRIPT LINTING #
######################
if [ "${VALIDATE_TYPESCRIPT_STANDARD}" == "true" ]; then
#################################
# Get Typescript standard rules #
#################################
GetStandardRules "typescript"
#############################
# Lint the Typescript files #
#############################
LintCodebase "TYPESCRIPT_STANDARD" "standard" "standard --parser @typescript-eslint/parser --plugin @typescript-eslint/eslint-plugin ${TYPESCRIPT_STANDARD_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_TYPESCRIPT_STANDARD[@]}"
fi
###############
# XML LINTING #
###############
if [ "${VALIDATE_XML}" == "true" ]; then
######################
# Lint the XML Files #
######################
LintCodebase "XML" "xmllint" "xmllint" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_XML[@]}"
fi
################
# YAML LINTING #
################
if [ "${VALIDATE_YAML}" == "true" ]; then
######################
# Lint the Yml Files #
######################
LintCodebase "YAML" "yamllint" "yamllint -c ${YAML_LINTER_RULES}" "${FILTER_REGEX_INCLUDE}" "${FILTER_REGEX_EXCLUDE}" "${FILE_ARRAY_YAML[@]}"
fi
###########
# Reports #
###########
Reports
##########
# Footer #
##########
Footer
|
package cbedoy.cblibrary.widgets;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.view.MotionEvent;
import android.view.View;
import android.widget.EditText;
/**
* Created by <NAME> on 28/12/2014.
*
* Mobile App Developer
* CBLibrary
*
* E-mail: <EMAIL>
* Facebook: https://www.facebook.com/carlos.bedoy
* Github: https://github.com/cbedoy
*/
public class EditTextWithClose extends EditText implements View.OnTouchListener, View.OnFocusChangeListener, TextWatcherAdapter.ITextWatcherListener {
public interface Listener {
void didClearText();
}
public void setListener(Listener listener) {
this.listener = listener;
}
private Drawable drawable;
private Listener listener;
public EditTextWithClose(Context context) {
super(context);
init();
}
@Override
public void setOnTouchListener(OnTouchListener touchListener) {
this.onTouchListener = touchListener;
}
@Override
public void setOnFocusChangeListener(OnFocusChangeListener focusChangeListener) {
this.onFocusChangeListener = focusChangeListener;
}
private OnTouchListener onTouchListener;
private OnFocusChangeListener onFocusChangeListener;
@Override
public boolean onTouch(View v, MotionEvent event) {
if (getCompoundDrawables()[2] != null) {
boolean tappedX = event.getX() > (getWidth() - getPaddingRight() - drawable.getIntrinsicWidth());
if (tappedX) {
if (event.getAction() == MotionEvent.ACTION_UP) {
setText("");
if (listener != null) {
listener.didClearText();
}
}
return true;
}
}
if (onTouchListener != null) {
return onTouchListener.onTouch(v, event);
}
return false;
}
@Override
public void onFocusChange(View v, boolean hasFocus) {
if (hasFocus) {
setClearIconVisible(isNotEmpty(getText()));
} else {
setClearIconVisible(false);
}
if (onFocusChangeListener != null) {
onFocusChangeListener.onFocusChange(v, hasFocus);
}
}
@Override
public void onTextChanged(EditText view, String text) {
if (isFocused()) {
setClearIconVisible(isNotEmpty(text));
}
}
private boolean isNotEmpty(CharSequence str) {
return !isEmpty(str);
}
private boolean isEmpty(CharSequence str) {
return str == null || str.length() == 0;
}
private void init() {
drawable = getCompoundDrawables()[2];
if (drawable == null) {
drawable = getResources()
.getDrawable(android.R.drawable.presence_offline);
}
int h = (int) (drawable.getIntrinsicHeight() * 1);
int w = (int) (drawable.getIntrinsicHeight() * 1);
drawable.setBounds(0, 0, h, w);
setClearIconVisible(false);
super.setOnTouchListener(this);
super.setOnFocusChangeListener(this);
addTextChangedListener(new TextWatcherAdapter(this, this));
}
protected void setClearIconVisible(boolean visible) {
Drawable x = visible ? drawable : null;
setCompoundDrawables(getCompoundDrawables()[0],
getCompoundDrawables()[1], x, getCompoundDrawables()[3]);
}
} |
require 'update_in_batches'
class BackfillLibraryTimestamps < ActiveRecord::Migration
using UpdateInBatches
self.disable_ddl_transaction!
def change
LibraryEntry.where("progress > 0").update_in_batches(<<-SQL)
progressed_at = updated_at,
finished_at = CASE WHEN status = #{LibraryEntry.statuses[:completed]} THEN updated_at END
SQL
end
end
|
<filename>config.js
if (process.env.NODE_ENV !== 'production') {
require('dotenv').load();
}
var config = {
pb: {
app_id: process.env.PB_APP_ID,
user_key: process.env.PB_USER_KEY,
botname: process.env.PB_BOTNAME,
url: process.env.PB_URL
},
telegram: {
token: process.env.TELEGRAM_TOKEN
},
redis: {
url: process.env.REDISTOGO_URL
}
};
module.exports = config;
|
import smbus
import paho.mqtt.publish as publish
def read_light(mode, sensor_address, bus, broker, port, topic):
# Set the measurement mode
if mode == CONTINUOUS_HIGH_RES_MODE_1 or mode == CONTINUOUS_HIGH_RES_MODE_2 or mode == ONE_TIME_HIGH_RES_MODE_1 or mode == ONE_TIME_HIGH_RES_MODE_2 or mode == ONE_TIME_LOW_RES_MODE:
bus.write_byte(sensor_address, mode)
else:
raise ValueError("Invalid measurement mode")
# Read the light intensity
light_intensity = bus.read_word_data(sensor_address, 0)
# Publish the data to the MQTT broker
publish.single(topic, str(light_intensity), hostname=broker, port=port) |
import React, { useEffect, useState } from 'react';
import Layout from "../components/layout";
import CopperImageSection from "../components/CopperImageSection";
import FillerImageSection from "../components/FillerImageSection";
import ProductCards from "../components/ProductCards";
import HomeSlider from "../components/HomeSlider";
import SmallIconBoxesSection from '../components/SmallIconBoxesSection'
import SimpleMap from '../components/SimpleMap';
import Button from '../components/Button';
import InfoSection from "../components/InfoSection";
import Seo from '../components/seo';
import { inject, observer } from "mobx-react";
// UA-213886539-1
const IndexPage = (observer(({ store }) => {
const [pageLoaded, setPageLoaded] = useState(false);
useEffect(() => {
store.closeMenu();
}, []);
useEffect(() => {
setTimeout(() => {
setPageLoaded(true)
}, 1400)
}, [])
return (
<Layout>
<Seo title='Početna' />
<HomeSlider />
<InfoSection />
<SmallIconBoxesSection />
<FillerImageSection />
<ProductCards />
<div className="projects-showcase">
<div className="projects-showcase-title">
<p>Projekti</p>
<div className="title-dash" />
</div>
<div className="projects-showcase-expl">
<p>Od 2004. godine smo realizovali preko 70 poslova u 11 država sveta i 30 gradova.</p>
<p
style={{ marginTop: '28px' }}
className="mri-intro"
>
Licencirani smo za rad sa najvećim MRI proizvođačima:
</p>
<p className="mri-manufacturers">Phillips, GE, Neusoft, Canon, Siemens</p>
<p style={{ marginTop: '23px' }}>(kliknite na pin na mapi za više informacija o poslovima na toj lokaciji)</p>
</div>
{pageLoaded ? <SimpleMap /> : <div style={{ height: '220px', flexDirection: 'column' }} className="loader-wrapper"> <div style={{ width: '61px', height: '61px' }} class="loader"></div><p style={{ marginTop: '20px' }}>Mapa se učitava</p></div>}
<div className="projects-showcase-expl">
<p>Više detalja o projektima i referentnu listu možete videti na stranici Projekti</p>
</div>
<Button link='/projects' size='medium' text='Projekti' />
</div>
<CopperImageSection />
</Layout>
);
}));
export default inject('store')(IndexPage);
|
#!/bin/sh
# Copyright (c) 2014-2015 The Pocketcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C
if [ -z "$OSSLSIGNCODE" ]; then
OSSLSIGNCODE=osslsigncode
fi
if [ ! -n "$1" ]; then
echo "usage: $0 <osslcodesign args>"
echo "example: $0 -key codesign.key"
exit 1
fi
OUT=signature-win.tar.gz
SRCDIR=unsigned
WORKDIR=./.tmp
OUTDIR="${WORKDIR}/out"
OUTSUBDIR="${OUTDIR}/win"
TIMESERVER=http://timestamp.comodoca.com
CERTFILE="win-codesign.cert"
mkdir -p "${OUTSUBDIR}"
basename -a `ls -1 "${SRCDIR}"/*-unsigned.exe` | while read UNSIGNED; do
echo Signing "${UNSIGNED}"
"${OSSLSIGNCODE}" sign -certs "${CERTFILE}" -t "${TIMESERVER}" -in "${SRCDIR}/${UNSIGNED}" -out "${WORKDIR}/${UNSIGNED}" "$@"
"${OSSLSIGNCODE}" extract-signature -pem -in "${WORKDIR}/${UNSIGNED}" -out "${OUTSUBDIR}/${UNSIGNED}.pem" && rm "${WORKDIR}/${UNSIGNED}"
done
rm -f "${OUT}"
tar -C "${OUTDIR}" -czf "${OUT}" .
rm -rf "${WORKDIR}"
echo "Created ${OUT}"
|
#!/bin/bash
ze_dir=`readlink -f $(dirname $0)`
source $TRUST_MEDCOUPLING_ROOT/env.sh
export PYTHONPATH=$ze_dir/install/lib:$PYTHONPATH
cd build/test
echo "Testing ICoCo (version 2) ..."
python test_trusticoco.py 1>test_trusticoco.log 2>&1
if ! [ $? -eq 0 ]; then
echo Failed!
exit 255
fi
echo All OK.
|
#ifndef NETCOWORKER_H
#define NETCOWORKER_H
#include "message.h"
#include <QObject>
#include <QDataStream>
class NetCoworkFactory;
class NetCoworker : public QObject
{
Q_OBJECT
public:
explicit NetCoworker(const NetCoworkFactory* _factory, uint32_t object_id = UINT32_MAX);
virtual void handle_call(Message& msg) = 0;
uint32_t get_class_id() const;
uint32_t get_object_id() const;
const NetCoworkFactory* get_factory() const;
protected:
void send_func_call(Message& msg) const;
private:
friend class NetCoworkProvider;
void set_object_id(uint32_t new_object_id);
private:
const NetCoworkFactory* factory;
uint32_t object_id = UINT32_MAX;
};
#endif // NETCOWORKER_H
|
import { v4 } from 'uuid';
import Painter from './Painter';
class Store {
constructor() {
this.messages = {};
}
addMessage(message, time) {
const id = v4();
const messageObj = {
id,
message,
time: Number(time),
};
this.messages[id] = messageObj;
const targetIndex = this.getIndexOfTarget(id);
Painter.addNewMessage(messageObj, targetIndex, this);
}
deleteMessage(id) {
const targetIndex = this.getIndexOfTarget(id);
Painter.removeMessage(targetIndex);
delete this.messages[id];
}
updateTime(id, addTime) {
const currentTime = this.messages[id].time;
let nextTime;
if (parseInt(addTime) === Number(addTime)) {
nextTime = currentTime + parseInt(addTime)
} else { // n-times calculation
nextTime = currentTime * parseInt(addTime)
}
if (nextTime <= 0) {
this.deleteMessage(id);
return;
}
const prevIndex = this.getIndexOfTarget(id);
this.messages[id].time = nextTime;
const nextIndex = this.getIndexOfTarget(id);
Painter.updateTime(prevIndex, nextIndex, nextTime);
}
tickTime() {
Object.keys(this.messages).forEach((v) => {
this.messages[v].time -= 1;
}); // edit
Object.keys(this.messages).forEach((v) => {
this.updateTime(v, 0);
});
}
getIndexOfTarget(id) {
const orderedList = this.getOrderedList();
return orderedList.indexOf(id);
}
getOrderedList() {
const { messages } = this;
return Object.keys(this.messages).sort((a, b) => messages[b].time - messages[a].time);
}
}
export default Store;
|
import type { IncomingMessage, ServerResponse } from 'node:http';
import { RequestCapability, RequestContext, RequestHandler } from '../core';
import type { HttpMeans } from './http.means';
/**
* HTTP middleware signature.
*
* This is a [Connect]-style middleware.
*
* [Connect]: https://github.com/senchalabs/connect
*
* @typeParam TRequest - Supported HTTP request type.
* @typeParam TResponse - Supported HTTP response type.
*/
export type Middleware<
TRequest extends IncomingMessage = IncomingMessage,
TResponse extends ServerResponse = ServerResponse,
> =
/**
* @param request - HTTP request.
* @param response HTTP response.
* @param next - Next function to delegate request processing to or report error with.
*/
(
this: void,
request: TRequest,
response: TResponse,
next: Middleware.Next,
) => void;
export namespace Middleware {
/**
* A signature of the function the {@link Middleware middleware} may call to delegate request processing
* or report error with.
*/
export type Next =
/**
* @param error - Either an error to report, or nothing to delegate request processing to next handler.
*/
(this: void, error?: any) => void;
}
/**
* Involves the given `middleware` into HTTP request processing.
*
* @typeParam TInput - A type of input HTTP request processing means.
* @param middleware - Middleware to apply.
*
* @returns New request processing capability that processes HTTP requests by the given `middleware`.
*/
export function middleware<TInput extends HttpMeans>(
middleware: Middleware<TInput['request'], TInput['response']>,
): RequestCapability<TInput> {
return RequestCapability.of(
<TMeans extends TInput>(handler: RequestHandler<TMeans>) => async (
{ request, response, next }: RequestContext<TMeans>,
) => new Promise<void>((resolve, reject) => {
const mdNext = (error?: any): void => {
if (error !== undefined) {
reject(error);
} else {
next(handler).then(() => resolve(), reject);
}
};
middleware(request, response, mdNext);
}),
);
}
|
package com.renrenbit.rrwallet.utils;
import android.text.TextUtils;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
* Created by jackQ on 2018/6/12.
*/
public class Md5 {
public static String md5(String str) {
if (TextUtils.isEmpty(str)) {
return "";
}
MessageDigest md5 = null;
try {
md5 = MessageDigest.getInstance("MD5");
byte[] bytes = md5.digest(str.getBytes());
StringBuilder result = new StringBuilder();
for (byte b : bytes) {
String temp = Integer.toHexString(b & 0xff);
if (temp.length() == 1) {
temp = "0" + temp;
}
result.append(temp);
}
return result.toString();
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
return "";
}
}
|
# GENERATED - DO NOT EDIT
source matchers/toMatch.sh
@spec.toMatch.wrong_number_of_arguments() {
refute run [[ expect "Hello" toMatch ]]
assert [ -z "$STDOUT" ]
assert [ "$STDERR" = "toMatch expects at least 1 argument (BASH regex patterns), received 0 []" ]
}
@spec.toMatch() {
assert run [[ expect "Hello there 123" toMatch "Hello" 'there[[:space:]][0-9][0-9][0-9]' ]]
assert [ -z "$STDOUT" ]
assert [ -z "$STDERR" ]
refute run [[ expect "Hello there 123" toMatch "Hello" 'there[[:space:]][0-9][0-9][0-9]' 'foo' ]]
assert [ -z "$STDOUT" ]
assert stderrContains "Expected result to match"
assert stderrContains "Actual text: 'Hello there 123'"
assert stderrContains "Pattern: 'foo'"
assert run [[ expect { echo "Hello there 123" } toMatch "Hello" 'there[[:space:]][0-9][0-9][0-9]' ]]
assert [ -z "$STDOUT" ]
assert [ -z "$STDERR" ]
refute run [[ expect { echo "Hello there 123" } toMatch "Hello" 'there[[:space:]][0-9][0-9][0-9]' 'foo' ]]
assert [ -z "$STDOUT" ]
assert stderrContains "Expected result to match"
assert stderrContains "Actual text: 'Hello there 123"
assert stderrContains "Pattern: 'foo'"
assert run [[ expect {{ echo "Hello there 123" }} toMatch "Hello" 'there[[:space:]][0-9][0-9][0-9]' ]]
assert [ -z "$STDOUT" ]
assert [ -z "$STDERR" ]
refute run [[ expect {{ echo "Hello there 123" }} toMatch "Hello" 'there[[:space:]][0-9][0-9][0-9]' 'foo' ]]
assert [ -z "$STDOUT" ]
assert stderrContains "Expected result to match"
assert stderrContains "Actual text: 'Hello there 123"
assert stderrContains "Pattern: 'foo'"
}
@spec.not.toMatch() {
assert run [[ expect "Hello there 123" not toMatch 'foo' ]]
assert [ -z "$STDOUT" ]
assert [ -z "$STDERR" ]
refute run [[ expect "Hello there 123" not toMatch 'there[[:space:]][0-9][0-9][0-9]' ]]
assert [ -z "$STDOUT" ]
assert stderrContains "Expected result not to match"
assert stderrContains "Actual text: 'Hello there 123'"
assert stderrContains "Pattern: 'there[[:space:]][0-9][0-9][0-9]'"
assert run [[ expect { echo "Hello there 123" } not toMatch 'foo' ]]
assert [ -z "$STDOUT" ]
assert [ -z "$STDERR" ]
refute run [[ expect { echo "Hello there 123" } not toMatch 'there[[:space:]][0-9][0-9][0-9]' ]]
assert [ -z "$STDOUT" ]
assert stderrContains "Expected result not to match"
assert stderrContains "Actual text: 'Hello there 123"
assert stderrContains "Pattern: 'there[[:space:]][0-9][0-9][0-9]'"
}
setAndEchoX() {
x="$1"
echo "$x"
}
@spec.singleCurliesRunLocally() {
local x=5
expect { setAndEchoX 42 } toMatch "42"
assert [ "$x" = 42 ] # value was updated
# Fails if the command fails (even though it does return 'empty')
run expect { thisCommandDoesNotExist &>/dev/null } toMatch ""
assert [ $EXITCODE -eq 1 ]
[[ $STDERR = *"thisCommandDoesNotExist: command not found"* ]] || { echo "Command did not output expected error text" >&2; return 1; }
}
@spec.doubleCurliesRunInSubshell() {
local x=5
expect {{ setAndEchoX 42 }} toMatch "42"
assert [ "$x" = 5 ] # value was not updated
# Fails if the command fails (even though it does return 'empty')
run expect {{ thisCommandDoesNotExist &>/dev/null }} toMatch ""
assert [ $EXITCODE -eq 1 ]
[[ $STDERR = *"thisCommandDoesNotExist: command not found"* ]] || { echo "Command did not output expected error text" >&2; return 1; }
}
@spec.singleBracketsRunLocally() {
local x=5
expect [ setAndEchoX 42 ] toMatch "42"
assert [ "$x" = 42 ] # value was updated
# Fails if the command fails (even though it does return 'empty')
run expect [ thisCommandDoesNotExist &>/dev/null ] toMatch ""
assert [ $EXITCODE -eq 1 ]
[[ $STDERR = *"thisCommandDoesNotExist: command not found"* ]] || { echo "Command did not output expected error text" >&2; return 1; }
}
@spec.doubleBracketsRunInSubshell() {
local x=5
expect [[ setAndEchoX 42 ]] toMatch "42"
assert [ "$x" = 5 ] # value was not updated
# Fails if the command fails (even though it does return 'empty')
run expect [[ thisCommandDoesNotExist &>/dev/null ]] toMatch ""
assert [ $EXITCODE -eq 1 ]
[[ $STDERR = *"thisCommandDoesNotExist: command not found"* ]] || { echo "Command did not output expected error text" >&2; return 1; }
} |
#!/bin/bash
if [ $# -gt 1 ] && [ x$2 = xcanonical ]; then
new_host_name=$(sed -n -e "s/$1[ ]*name *= *\(.*\)/\1/p" /tmp/nslookup.$$)
else
new_host_name=$1
fi
rm /tmp/nslookup.$$
if [ x$new_host_name != x ]; then
hostname $new_host_name
fi |
package io.core9.rules;
public class Client {
private String ip;
private Modifier modifier;
public String getIp() {
return ip;
}
public void setIp(String ip) {
this.ip = ip;
}
public Modifier getModifier() {
return modifier;
}
public void setModifier(Modifier modifier) {
this.modifier = modifier;
}
}
|
#include "databasefixture.h"
#include "infotestdata.h"
/////////////////////////////
using namespace std;
using namespace info;
//////////////////////////////
DatabaseFixture::DatabaseFixture() :m_nbcols(0), m_nbrows(0) {
string filename;
InfoTestData::get_database_filename(filename);
assert(!filename.empty());
this->m_man.reset(new SQLiteStatHelper(filename));
IStoreHelper *p = this->m_man.get();
assert(p != nullptr);
assert(p->is_valid());
string name;
InfoTestData::get_mortal_name(name, m_nbrows, m_nbcols);
assert(!name.empty());
this->m_oset.set_sigle(name);
bool bRet = p->find_dataset(this->m_oset);
assert(bRet);
assert(this->m_oset.id() != 0);
};
DatabaseFixture::~DatabaseFixture() {
this->m_man.reset();
};
|
<filename>src/main/java/fr/clementgre/pdf4teachers/panel/sidebar/grades/export/GradeExportRenderer.java
package fr.clementgre.pdf4teachers.panel.sidebar.grades.export;
import fr.clementgre.pdf4teachers.document.editions.elements.GradeElement;
import fr.clementgre.pdf4teachers.document.editions.elements.TextElement;
import fr.clementgre.pdf4teachers.interfaces.windows.MainWindow;
import fr.clementgre.pdf4teachers.interfaces.windows.language.TR;
import fr.clementgre.pdf4teachers.panel.sidebar.grades.GradeRating;
import fr.clementgre.pdf4teachers.utils.StringUtils;
import fr.clementgre.pdf4teachers.utils.dialogs.AlreadyExistDialogManager;
import fr.clementgre.pdf4teachers.utils.dialogs.alerts.ErrorAlert;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.regex.Pattern;
public class GradeExportRenderer{
String text = "";
public ArrayList<GradeRating> gradeScale;
ArrayList<ExportFile> files = new ArrayList<>();
int exportTier;
int exported = 0;
boolean mkdirs = true;
boolean erase = false;
GradeExportWindow.ExportPane pane;
AlreadyExistDialogManager alreadyExistDialogManager;
public GradeExportRenderer(GradeExportWindow.ExportPane pane){
this.pane = pane;
this.exportTier = (int) pane.settingsTiersExportSlider.getValue();
if(MainWindow.mainScreen.hasDocument(false)) MainWindow.mainScreen.document.save();
}
public int start(){
if(!getFiles()){
return exported;
}
if(pane.type != 1){
alreadyExistDialogManager = new AlreadyExistDialogManager(false);
try{
if(pane.settingsAttributeTotalLine.isSelected()){
generateNamesLine(false);
generateGradeScaleLine();
}else{
generateNamesLine(true);
}
if(pane.settingsAttributeAverageLine.isSelected()){
generateMoyLine();
}
for(ExportFile file : files){
generateStudentLine(file);
}
if(!save(null)) return exported;
}catch(Exception e){
e.printStackTrace();
new ErrorAlert(TR.tr("gradeTab.gradeExportWindow.fatalError.title"), e.getMessage(), false).showAndWait();
return exported;
}
}else{ // SPLIT
alreadyExistDialogManager = new AlreadyExistDialogManager(true);
for(ExportFile file : files){
try{
if(pane.settingsAttributeTotalLine.isSelected()){
generateNamesLine(false);
generateGradeScaleLine();
}else{
generateNamesLine(true);
}
generateStudentLine(file);
if(!save(file)) return exported;
}catch(Exception e){
e.printStackTrace();
boolean result = new ErrorAlert(TR.tr("gradeTab.gradeExportWindow.error.title", file.file.getName()), e.getMessage(), true).execute();
if(result) return exported;
}
}
}
return exported;
}
// GENERATORS
public void generateNamesLine(boolean includeGradeScale){
text += TR.tr("gradeTab.gradeExportWindow.csv.titles.parts");
for(GradeRating rating : gradeScale){
text += ";" + rating.name + (includeGradeScale ? " /" + rating.total : "");
}
text += "\n";
}
public void generateGradeScaleLine(){
text += TR.tr("gradeTab.gradeExportWindow.csv.titles.gradeScale");
for(GradeRating rating : gradeScale){
text += ";" + rating.total;
}
text += "\n";
}
public void generateMoyLine(){
char x = 'B';
int startY = pane.settingsAttributeTotalLine.isSelected() ? 4 : 3;
int endY = startY + files.size() - 1;
text += TR.tr("gradeTab.gradeExportWindow.csv.titles.average");
for(GradeRating rating : gradeScale){
text += ";=" + TR.tr("gradeTab.gradeExportWindow.csv.formulas.average.name").toUpperCase() + "(" + x + startY + ":" + x + endY + ")";
x++;
}
text += "\n";
}
public void generateStudentLine(ExportFile file){
if(pane.studentNameSimple != null){
text += pane.studentNameSimple.getText();
}else{
text += StringUtils.removeAfterLastRegex(file.file.getName(), ".pdf").replaceAll(Pattern.quote(pane.studentNameReplace.getText()), pane.studentNameBy.getText());
}
for(GradeElement grade : file.grades){
text += ";" + (grade.getValue() == -1 ? "" : grade.getValue());
}
text += "\n";
if(pane.settingsWithTxtElements.isSelected()){
generateCommentsLines(file);
}
}
public void generateCommentsLines(ExportFile file){
text += TR.tr("gradeTab.gradeExportWindow.csv.titles.comments");
if(file.comments.size() >= 1){
ArrayList<String> lines = new ArrayList<>();
file.comments.sort((element1, element2) ->
(element2.getPageNumber() - 9999 + "" + (element2.getRealY() - 9999) + "" + (element2.getRealX() - 9999))
.compareToIgnoreCase(element1.getPageNumber() - 9999 + "" + (element1.getRealY() - 9999) + "" + (element1.getRealX() - 9999)));
for(int i = 1; i < file.grades.size(); i++){
GradeElement grade = file.grades.get(i);
int maxPage = grade.getPageNumber();
int maxY = grade.getRealY();
TextElement element = file.comments.size() > 0 ? file.comments.get(0) : null;
int k = -1;
while(element != null){
if(element.getPageNumber() == maxPage && element.getRealY() < maxY || element.getPageNumber() < maxPage){
k++;
if(lines.size() > k){
lines.set(k, lines.get(k) + ";" + element.getText().replaceAll(Pattern.quote("\n"), " "));
}else{
lines.add(";" + element.getText().replaceAll(Pattern.quote("\n"), ""));
}
file.comments.remove(0);
element = file.comments.size() > 0 ? file.comments.get(0) : null;
}else{
element = null;
}
}
for(k++; k < 20; k++){
if(lines.size() > k){
lines.set(k, lines.get(k) + ";");
}else{
lines.add(";");
}
}
}
int k = 0;
for(TextElement element : file.comments){
if(lines.size() > k){
lines.set(k, lines.get(k) + ";" + element.getText().replaceAll(Pattern.quote("\n"), " "));
}else{
lines.add(";" + element.getText().replaceAll(Pattern.quote("\n"), ""));
}
k++;
}
for(String line : lines){
text += line + "\n";
}
}else text += "\n";
}
// OTHERS
public boolean getFiles(){
try{
ExportFile defaultFile = new ExportFile(MainWindow.mainScreen.document.getFile(), exportTier, pane.settingsWithTxtElements.isSelected());
gradeScale = defaultFile.generateGradeScale();
if(!(pane.settingsOnlyCompleted.isSelected() && !defaultFile.isCompleted())){
files.add(defaultFile);
}
}catch(Exception e){
e.printStackTrace();
new ErrorAlert(TR.tr("gradeTab.gradeExportWindow.unableToReadEditionError.header", MainWindow.mainScreen.document.getFileName()) + "\n" +
TR.tr("gradeTab.gradeExportWindow.unableToReadEditionError.header.sourceDocument"), e.getMessage(), false).showAndWait();
return false;
}
if(pane.type != 2){
for(File file : MainWindow.filesTab.files.getItems()){
try{
if(MainWindow.mainScreen.document.getFile().equals(file)) continue;
if(pane.settingsOnlySameDir.isSelected() && !MainWindow.mainScreen.document.getFile().getParent().equals(file.getParent()))
continue;
ExportFile exportFile = new ExportFile(file, exportTier, pane.settingsWithTxtElements.isSelected());
if(pane.settingsOnlySameGradeScale.isSelected() && !exportFile.isSameGradeScale(gradeScale))
continue;
if(pane.settingsOnlyCompleted.isSelected() && !exportFile.isCompleted()) continue;
files.add(exportFile);
}catch(Exception e){
e.printStackTrace();
boolean result = new ErrorAlert(TR.tr("gradeTab.gradeExportWindow.unableToReadEditionError.header", file.getName()) + "\n" +
TR.tr("gradeTab.gradeExportWindow.unableToReadEditionError.header.sourceDocument"), e.getMessage(), true).execute();
if(result) return false;
}
}
}
return true;
}
public boolean save(ExportFile source) throws IOException{
String filePath = pane.filePath.getText();
String fileName;
if(source != null){ // type = 1 -> Splited export
fileName = pane.fileNamePrefix.getText() + StringUtils.removeAfterLastRegex(source.file.getName(), ".pdf")
.replaceAll(Pattern.quote(pane.fileNameReplace.getText()), pane.fileNameBy.getText()) + pane.fileNameSuffix.getText();
}else{ // other
fileName = StringUtils.removeAfterLastRegex(pane.fileNameSimple.getText(), ".csv");
}
File file = new File(filePath + File.separator + fileName + ".csv");
file.getParentFile().mkdirs();
if(file.exists()){
AlreadyExistDialogManager.ResultType result = alreadyExistDialogManager.showAndWait(file);
if(result == AlreadyExistDialogManager.ResultType.SKIP) return true;
else if(result == AlreadyExistDialogManager.ResultType.STOP) return false;
else if(result == AlreadyExistDialogManager.ResultType.RENAME) file = AlreadyExistDialogManager.rename(file);
}
file.createNewFile();
BufferedWriter writer = new BufferedWriter(new FileWriter(file, false));
writer.write(text);
writer.flush();
writer.close();
exported++;
text = "";
return true;
}
}
|
<reponame>jsonbruce/MTSAnomalyDetection
#!/usr/bin/env python
# coding=utf-8
# Created by max on 17-10-31
"""
Anomaly Detection (ad) Using hp filter and mad test
"""
import sys
import numpy as np
import pandas as pd
from scipy import sparse, stats
import matplotlib.pyplot as plt
# Hodrick Prescott filter
def hp_filter(x, lamb=5000):
w = len(x)
b = [[1] * w, [-2] * w, [1] * w]
D = sparse.spdiags(b, [0, 1, 2], w - 2, w)
I = sparse.eye(w)
B = (I + lamb * (D.transpose() * D))
return sparse.linalg.dsolve.spsolve(B, x)
def mad(data, axis=None):
return np.mean(np.abs(data - np.mean(data, axis)), axis)
def AnomalyDetection(x, alpha=0.2, lamb=5000):
"""
x : pd.Series
alpha : The level of statistical significance with which to
accept or reject anomalies. (expon distribution)
lamb : penalize parameter for hp filter
return r : Data frame containing the index of anomaly
"""
# calculate residual
xhat = hp_filter(x, lamb=lamb)
resid = x - xhat
# drop NA values
ds = pd.Series(resid)
ds = ds.dropna()
# Remove the seasonal and trend component,
# and the median of the data to create the univariate remainder
md = np.median(x)
data = ds - md
# process data, using median filter
ares = (data - data.median()).abs()
data_sigma = data.mad() + 1e-12
ares = ares / data_sigma
# compute significance
p = 1. - alpha
R = stats.expon.interval(p, loc=ares.mean(), scale=ares.std())
threshold = R[1]
# extract index, np.argwhere(ares > md).ravel()
r_id = ares.index[ares > threshold]
return r_id
# demo
def main(args):
# fix
np.random.seed(42)
# sample signals
N = 1024 # number of sample points
t = np.linspace(0, 2 * np.pi, N)
y = np.sin(t) + 0.02 * np.random.randn(N)
# outliers are assumed to be step/jump events at sampling points
M = 3 # number of outliers
for ii, vv in zip(np.random.rand(M) * N, np.random.randn(M)):
y[int(ii):] += vv
# detect anomaly
r_idx = AnomalyDetection(y, alpha=0.1)
# plot the result
plt.figure()
plt.plot(y, 'b-')
plt.plot(r_idx, y[r_idx], 'ro')
plt.show()
if __name__ == "__main__":
main(sys.argv)
|
import spacy
# Load the spacy model
nlp = spacy.load("en_core_web_sm")
# Create a spacy document
text = "Today, Apple released the new iPhone 12"
doc = nlp(text)
# Extract and print named entities to the console
for ent in doc.ents:
print(f'{ent.text}: {ent.label_}')
# Output:
# Apple: ORG
# iPhone 12: PRODUCT |
<reponame>AndrewFedoseev/java_pft
package stqa.pft.soap;
import net.webservicex.GeoIP;
import net.webservicex.GeoIPService;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Created by Andrii.Fiedosieiev on 7/19/2017.
*/
public class GeoIpServiceTests {
@Test
public void testMyIp(){
GeoIP geoIP = new GeoIPService().getGeoIPServiceSoap12().getGeoIP("192.168.127.12");
Assert.assertEquals(geoIP.getCountryCode(), "USA");
}
@Test
public void testInvalidIp(){
GeoIP geoIP = new GeoIPService().getGeoIPServiceSoap12().getGeoIP("96.89.234.xxx");
Assert.assertEquals(geoIP.getCountryCode(), "USA");
}
}
|
mkdir results
mkdir build
cd build
cmake -G "Visual Studio 16 2019" ..
cmake --build . --config Release
cd ..
./build/Release/GammaCorrect.exe |
#!/bin/bash
# Easy & Dumb header check for CI jobs, currently checks ".go" files only.
#
# This will be called by the CI system (with no args) to perform checking and
# fail the job if headers are not correctly set. It can also be called with the
# 'fix' argument to automatically add headers to the missing files.
#
# Check if headers are fine:
# $ ./hack/header-check.sh
# Check and fix headers:
# $ ./hack/header-check.sh fix
set -e -o pipefail
# Initialize vars
ERR=false
FAIL=false
for file in $(git ls-files | grep "\.go$" | grep -v vendor/); do
echo -n "Header check: $file... "
if [[ -z $(cat ${file} | grep "Copyright [0-9]\{4\}\(-[0-9]\{4\}\)\?.\? The KubeVela Authors") && -z $(cat ${file} | grep "Copyright [0-9]\{4\} The Crossplane Authors") ]]; then
ERR=true
fi
if [ $ERR == true ]; then
if [[ $# -gt 0 && $1 =~ [[:upper:]fix] ]]; then
cat ../boilerplate.go.txt "${file}" > "${file}".new
mv "${file}".new "${file}"
echo "$(tput -T xterm setaf 3)FIXING$(tput -T xterm sgr0)"
ERR=false
else
echo "$(tput -T xterm setaf 1)FAIL$(tput -T xterm sgr0)"
ERR=false
FAIL=true
fi
else
echo "$(tput -T xterm setaf 2)OK$(tput -T xterm sgr0)"
fi
done
# If we failed one check, return 1
[ $FAIL == true ] && exit 1 || exit 0 |
<reponame>rjacobs91/baker
package com.ing.baker.playground
import cats.implicits._
import com.ing.baker.playground.AppUtils._
import com.ing.baker.playground.Command.RunCommand
import com.ing.baker.playground.commands.Docker
object PlaygroundApp {
def loop: App[Unit] =
for {
_ <- print("playground> ")
line <- readLn
_ <- exec(line)
_ <- if (line == "exit") doNothing else loop
} yield ()
def exec(raw: String): App[Unit] =
tryOneCommand
.applyOrElse(raw, (other: String) => printLn(s"Unknown command '$other'"))
.attempt
.flatMap {
case Left(e) => printLn(e.getMessage)
case Right(_) => doNothing
}
def tryOneCommand: RunCommand =
Command.commands.foldRight[RunCommand]({
case "exit" =>
cleanup *> printLn("Bye bye! I hope you had fun :D")
case "clean" =>
cleanup *> printLn("Clean")
case "" =>
doNothing
})(_.run.orElse(_))
def cleanup: App[Unit] =
Docker.terminateAllImages *>
Docker.deleteDockerNetwork
}
|
<filename>src/greedy/Boj15975.java
package greedy;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.InputMismatchException;
/**
*
* @author minchoba
* 백준 15975번: 화살표 그리기
*
* @see https://www.acmicpc.net/problem/15975/
*
*/
public class Boj15975 {
public static void main(String[] args) throws Exception{
InputReader in = new InputReader(System.in);
int N = in.readInt();
Pointer[] p = new Pointer[N];
for(int i = 0; i < N; i++) {
p[i] = new Pointer(in.readInt(), in.readInt());
}
System.out.println(getSum(p));
}
private static class Pointer implements Comparable<Pointer>{
int pos;
int color;
public Pointer(int pos, int color) {
this.pos = pos;
this.color = color;
}
@Override
public int compareTo(Pointer p) {
if(this.color < p.color) {
return -1;
}
else if(this.color > p.color) {
return 1;
}
else {
if(this.pos < p.pos) return -1;
else if(this.pos > p.pos) return 1;
else return 0;
}
}
}
private static long getSum(Pointer[] arr) {
long total = 0;
int before = Integer.MAX_VALUE;
Arrays.sort(arr); // 색별 정렬 후 위치별 정렬
for(int i = 0; i < arr.length - 1; i++) {
if(arr[i].color == arr[i + 1].color) {
int diff = arr[i + 1].pos - arr[i].pos;
total += Math.min(before, diff); // 이전 간격과 현재 간격 중 짧은 것을 더함
before = diff;
}
else {
if(i != 0) {
if(arr[i].color == arr[i - 1].color)
total += (arr[i].pos - arr[i - 1].pos); // 끝 간격을 한번 더 더함
}
before = Integer.MAX_VALUE;
}
if(i == arr.length - 2) { // 가장 마지막에 걸린 경우
if(arr[i].color == arr[i + 1].color) {
total += arr[i + 1].pos - arr[i].pos;
}
}
}
return total;
}
private static class InputReader {
private InputStream stream;
private byte[] buf = new byte[1024];
private int curChar;
private int numChars;
private SpaceCharFilter filter;
public InputReader(InputStream stream) {
this.stream = stream;
}
public int read() {
if (numChars == -1) {
throw new InputMismatchException();
}
if (curChar >= numChars) {
curChar = 0;
try {
numChars = stream.read(buf);
} catch (IOException e) {
throw new InputMismatchException();
}
if (numChars <= 0) {
return -1;
}
}
return buf[curChar++];
}
public int readInt() {
int c = read();
while (isSpaceChar(c)) {
c = read();
}
int sgn = 1;
if (c == '-') {
sgn = -1;
c = read();
}
int res = 0;
do {
if (c < '0' || c > '9') {
throw new InputMismatchException();
}
res *= 10;
res += c - '0';
c = read();
} while (!isSpaceChar(c));
return res * sgn;
}
public boolean isSpaceChar(int c) {
if (filter != null) {
return filter.isSpaceChar(c);
}
return c == ' ' || c == '\n' || c == '\r' || c == '\t' || c == -1;
}
public interface SpaceCharFilter {
public boolean isSpaceChar(int ch);
}
}
}
|
<reponame>bensonnalle/incisive-3.0<gh_stars>0
import React, { Component } from 'react';
import Link from 'gatsby-link';
import Markdown from 'react-markdown';
import Helmet from 'react-helmet';
import { Grid, Typography, Paper, List, ListItem, ListItemText, ListSubheader, Divider, Card, CardContent } from 'material-ui';
import gql from 'graphql-tag';
import styled from '../utils/styled';
import Button from '../components/Button';
import Section from '../components/Section';
const TrendingList = ({ data }) => {
return(
<List component="nav" style={{
borderBottomColor: ""
}} >
<ListSubheader component="div">Trending Mosaics</ListSubheader>
<Divider />
<ListItem button component="a" href="/post/zillow">
<ListItemText primary="Tesla Inc." />
</ListItem>
<Divider />
<ListItem button component="a" href="/post/zillow">
<ListItemText primary="Zillow Group Inc." />
</ListItem>
<Divider />
<ListItem button component="a" href="/post/box">
<ListItemText primary="Box Inc." />
</ListItem>
</List>
);
};
const about = `
Don't be afraid to edit or [add a new mosaic](/add). Mosaic thrives on the contributions made by everyone around the world. Just be sure to link to a refrence when you add new facts.
`;
const content = `
Mosaic is a website where anyone can contribute investment insights to a mosaic on a specific company or topic. Wikipedia for investing.
`;
class Home extends Component {
componentDidMount() {
mixpanel.track("Home Page Visited");
}
render() {
return(
<Section>
<Grid item xs={12} sm={10}>
<Helmet title="Home" />
<Paper>
<div style={{
padding: 40,
}}>
<Typography variant="display2">Welcome to Mosaic</Typography>
<Typography component={Markdown} source={content} />
</div>
</Paper>
<Paper style={{
marginTop: 15,
}}>
<TrendingList></TrendingList>
</Paper>
<Card style={{
marginTop: 15,
}}>
<CardContent>
<Typography variant="title" component="h2">
About
</Typography>
<Typography component={Markdown} source={about} variant="subheading"/>
</CardContent>
</Card>
</Grid>
</Section>
)
}
}
/*
const Home = ({ data }) => {
return (
<Section>
<Grid item xs={12} sm={8}>
<Helmet title="Home" />
<Typography variant="display1">Welcome to Mosaic</Typography>
<Typography component={Markdown} source={content} />
<Typography variant="headline">Today's Trending Mosaics</Typography>
<br></br>
<Link to="/post/zillow">Zillow Group Inc.</Link>
<br></br>
<br></br>
<Link to="/post/box">Box Inc.</Link>
<br></br>
<br></br>
<Link to="/post/alphabet">Alphabet Inc.</Link>
</Grid>
</Section>
);
};
*/
export default Home;
|
define([
"dojo/on","dojo/dom"
,"dojox/mobile/TransitionEvent"
,"dojox/mobile/View","dojox/mobile/GridLayout","dojox/mobile/Pane"
],
function(on,dom,TransitionEvent){
//无法用on??
return {
init:function(){
this.addEventListener();
},
addEventListener:function(){
on(dom.byId("TGPrd"),"click",function(e){
var transOpts = {
target: "tg_product",
url: "#tg_product"
// ,data:{
// rprid:rprid
// }
};
new TransitionEvent(e.target,transOpts,e).dispatch();
});
}
}
}); |
def remove_vowels(input_str):
vowels = ['a', 'e', 'i', 'o', 'u']
output_str = ""
for char in input_str:
if char not in vowels:
output_str += char
return output_str
input_str = 'Hello World!'
output_str = remove_vowels(input_str)
print(output_str) # Output: Hll Wrld! |
#!/bin/bash --login
# Cf. http://stackoverflow.com/questions/33041109
#
# Xcode 7 (incl. 7.0.1) seems to have a dependency on the system ruby.
# xcodebuild is screwed up by using rvm to map to another non-system
# ruby†. This script is a fix that allows you call xcodebuild in a
# "safe" rvm environment, but will not (AFAIK) affect the "external"
# rvm setting.
#
# The script is a drop in replacement for your xcodebuild call.
#
# xcodebuild arg1 ... argn
#
# would become
#
# path/to/xcbuild-safe.sh arg1 ... argn
#
# -----
# † Because, you know, that *never* happens when you are building
# Xcode projects, say with abstruse tools like Rake or CocoaPods.
# This allows you to use rvm in a script. Otherwise you get a BS
# error along the lines of "cannot use rvm as function". Jeez.
[[ -s "$HOME/.rvm/scripts/rvm" ]] && source "$HOME/.rvm/scripts/rvm"
# Cause rvm to use system ruby. AFAIK, this is effective only for
# the scope of this script.
rvm use system
unset RUBYLIB
unset RUBYOPT
unset BUNDLE_BIN_PATH
unset _ORIGINAL_GEM_PATH
unset BUNDLE_GEMFILE
set -x # echoes commands
xcodebuild "$@" # calls xcodebuild with all the arguments passed to th
|
/* cc54 fullAdder https://repl.it/student/submissions/1721915
Construct a four bit full adder.
You must use the provided NAND function to create any other logic gates
you require to make a 4 bit full adder.
x and y will come in array format where [true, true, true, true] === 1111 === 15.
The expected return is an array with length 5.
Each index in the array is just a wire that can be on or off.
That will be represented by boolean true or false.
I suggest writing a halfAdder first, then fullAdder,
but you will only be tested on the fullAdder4.
Check this resource for more information:
http://www.electronics-tutorials.ws/combination/comb_7.html
*/
let sum = 0;
let carry = 0;
function NAND( x, y ) {
return ( !x || !y );
}
// NOT = NAND(a, a) - test this
function XOR( a, b ) {
// if (x === y) return 0;
// return 1;
return NAND( NAND( NAND( a, b ), a ), NAND( NAND( a, b ), b ) );
}
function halfAdder( a, b ) {
sum = XOR( a, b );
carry = !NAND( a, b );
return [ sum, carry ];
}
// Full adder is two half adders with OR gate
// sum of 1st half adder goes into 2nd halfadder "a"
// carry in goes into 2nd half adder "b"
function fullAdder( a, b, carryIn = 0 ) {
const halfAdd1 = halfAdder( a, b );
const halfAdd2 = halfAdder( halfAdd1, carryIn );
// const carry = OR(first[1], second[1]);
const carry = halfAdd1[ 1 ] || halfAdd2[ 1 ];
return [ halfAdd2[ 0 ], carry ];
}
function fullAdder4( a, b ) {
}
/* eslint no-console: 0 */
// TEST SUITE
console.log( halfAdder( true, true ) ); // ~~~> [ false, true ]
console.log( halfAdder( true, false ) ); // ~~~> [ true, false ]
console.log( halfAdder( false, true ) ); // ~~~> [ true, false ]
console.log( halfAdder( false, false ) ); // ~~~> [ false, false ]
console.log( fullAdder( true, true ) ); // ~~~> [ true, true ]
console.log( fullAdder( true, false ) ); // ~~~> [ true, false ]
console.log( fullAdder( false, true ) ); // ~~~> [ true, false ]
console.log( fullAdder( false, false ) ); // ~~~> [ true, false ]
|
#!/bin/bash
IMAGE_NAME=metro/orderservice
if mvn clean package; then
printf "\nMaven build successful. Building docker image %s...\n\n" $IMAGE_NAME
docker build . -t="$IMAGE_NAME"
else
printf "\nMaven build not successful. Aborting.\n\n"
fi
|
# uniq_command_4.sh
uniq -u |
<filename>client/src/main/java/de/hswhameln/typetogether/client/gui/MainWindow.java
package de.hswhameln.typetogether.client.gui;
import java.awt.CardLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.event.*;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.UIManager;
import com.formdev.flatlaf.FlatLightLaf;
import de.hswhameln.typetogether.client.runtime.ClientRuntime;
import de.hswhameln.typetogether.client.runtime.SessionStorage;
import de.hswhameln.typetogether.networking.api.exceptions.UnknownUserException;
import de.hswhameln.typetogether.networking.api.exceptions.InvalidDocumentIdException.DocumentDoesNotExistException;
import de.hswhameln.typetogether.networking.util.ExceptionHandler;
import de.hswhameln.typetogether.networking.util.ObjectDestructor;
/**
* {@link JFrame} that displays an, swapping between ViewPanels by using a {@link CardLayout}.
*/
public class MainWindow extends JFrame {
private static final long serialVersionUID = 1L;
private final ExceptionHandler exceptionHandler = ExceptionHandler.getExceptionHandler();
private final JPanel mainContainer;
private final Map<String, AbstractPanel> availableViews;
private final CardLayout cardLayout;
private final SessionStorage sessionStorage;
private String activeView;
public MainWindow(SessionStorage sessionStorage) {
this.sessionStorage = sessionStorage;
this.mainContainer = new JPanel();
this.cardLayout = new CardLayout();
this.availableViews = new HashMap<>();
this.mainContainer.setLayout(cardLayout);
try {
UIManager.setLookAndFeel(new FlatLightLaf());
} catch(Exception e) {
exceptionHandler.handle(e, Level.WARNING, "Failed to initialize lookAndFeel. Using default lookAndFeel.", this.getClass());
}
this.add(mainContainer);
this.setResizable(false);
this.setSize(ViewProperties.DEFAULT_WIDTH, ViewProperties.DEFAULT_HEIGHT);
this.setMinimumSize(new Dimension(ViewProperties.DEFAULT_WIDTH, ViewProperties.DEFAULT_HEIGHT));
this.setDefaultCloseOperation(DISPOSE_ON_CLOSE);
this.setTitle("TypeTogether");
this.setBackground(Color.PINK);
this.registerViews();
// center
this.setLocationRelativeTo(null);
this.setIconImage(new ImageIcon("./resources/favicon.jpg").getImage());
this.addWindowListener(new WindowAdapter() {
@Override
public void windowClosed(WindowEvent e) {
MainWindow.this.availableViews.get(activeView).windowClosed();
ObjectDestructor.destroyAll();
// give the other Threads some time to finish their work, but forcefully interrupt them after said time has passed.
try {
Thread.sleep(3000);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
System.exit(0);
}
});
this.activeView = ViewProperties.LOGIN;
this.cardLayout.show(mainContainer, ViewProperties.LOGIN); //TODO: Changed from LOGIN for debugging
}
private void registerViews() {
CommandPanel commandPanal = new CommandPanel(this, this.sessionStorage);
this.registerSingleView(commandPanal, ViewProperties.EDITOR);
this.registerSingleView(new LoginPanel(this, this.sessionStorage), ViewProperties.LOGIN);
this.registerSingleView(new MenuPanel(this, this.sessionStorage), ViewProperties.MENU);
this.sessionStorage.getCommandInvoker().setCommandPanel(commandPanal);
}
private void registerSingleView(AbstractPanel panel, String viewId) {
this.mainContainer.add(panel);
panel.setVisible(true);
availableViews.put(viewId, panel);
cardLayout.addLayoutComponent(panel, viewId);
}
/**
* Creates a pop-up with a notification of the given type. If the given type is
* a QUESTION_MESSAGE the pop-up will have Options to choose.
*
* @param message Message shown in the pop-up
* @param messageType Must be part of the {@link JOptionPane} values
* @return Chosen option or an open confirmation if the messageType is not
* QUESTION_MESSAGE
*/
public int alert(String message, int messageType) {
if (messageType == JOptionPane.QUESTION_MESSAGE) {
return JOptionPane.showConfirmDialog(this, message, "thesisSpace", JOptionPane.YES_NO_OPTION);
} else {
JOptionPane.showMessageDialog(this, message, "thesisSpace", messageType);
return JOptionPane.CLOSED_OPTION;
}
}
/**
* Switch to a view that has already been registered.
*
* @param viewId Id of the view that shall be shown.
*/
public void switchToView(String viewId) {
if (!this.availableViews.containsKey(viewId)) {
throw new IllegalArgumentException(String.format("View %s not registered for this window.", viewId));
}
this.activeView = viewId;
this.availableViews.get(viewId).initialize();
cardLayout.show(mainContainer, viewId);
}
} |
<reponame>magicoflolis/Userscript-Plus<gh_stars>10-100
/******/ (() => { // webpackBootstrap
var __webpack_exports__ = {};
/*!********************!*\
!*** ./options.js ***!
\********************/
const brws = typeof browser === "undefined" ? chrome : browser;
brws.storage.local.get(storedConfig => {
$form = document.querySelector("form") ?? console.log(`[UserJS] can't find ${target}`), config = {
theme: "dark",
sleazyfork: false,
...storedConfig
};
for (let prop in config) {
prop in $form.elements ? $form.elements[prop].type == "checkbox" ? $form.elements[prop].checked = config[prop] : $form.elements[prop].value = config[prop] : false;
}
$form.addEventListener("change", e => {
let $el =
/** @type {HTMLInputElement} */
e.target;
$el.type == "checkbox" ? config[$el.name] = $el.checked : config[$el.name] = $el.value;
brws.storage.local.set(config);
});
});
/******/ })()
;
//# sourceMappingURL=options.js.map |
<filename>02_structural_patterns/07_bridge/mailman_test.go
package bridge
import "testing"
func TestSendMessage(t *testing.T) {
cases := []struct {
name string
mail Mail
mailman Mailman
want string
}{
{
name: "common_mailman_dog",
mail: Mail{
from: "Tom",
to: "Jerry",
content: "Hi",
},
mailman: NewCommonMailman(NewDogSender()),
want: "[Dog] [ ] Tom -> Jerry: Hi",
},
{
name: "common_mailman_cat",
mail: Mail{
from: "Tom",
to: "Jerry",
content: "Hi",
},
mailman: NewCommonMailman(NewCatSender()),
want: "[Cat] [ ] Tom -> Jerry: Hi",
},
{
name: "special_mailman_dog",
mail: Mail{
from: "Tom",
to: "Jerry",
content: "Hi",
},
mailman: NewSpecialMailman(NewDogSender()),
want: "[Dog] [*] Tom -> Jerry: Hi",
},
{
name: "special_mailman_cat",
mail: Mail{
from: "Tom",
to: "Jerry",
content: "Hi",
},
mailman: NewSpecialMailman(NewCatSender()),
want: "[Cat] [*] Tom -> Jerry: Hi",
},
}
for _, tt := range cases {
t.Run(tt.name, func(t *testing.T) {
got := tt.mailman.SendMail(tt.mail)
if got != tt.want {
t.Errorf("got %q want %q", got, tt.want)
}
})
}
}
|
<reponame>mehh/devin-chase-cnoa
import React from "react"
import { Container, Row, Col } from "reactstrap"
import { Link } from "gatsby"
import "../nav/nav.scss"
import logo from "../../images/logo.png"
const Nav = () => {
return (
<nav>
<Container>
<Row>
<Col xs="2" md="6">
<Link to="/">
<img src={logo} alt="<NAME>" />
</Link>
</Col>
<Col xs="10" md="6" className="links">
<Link to="/">Home</Link>
{/* <Link to="/portfolio">Portfolio</Link> */}
<Link to="/get-in-touch">Get In Touch</Link>
</Col>
</Row>
</Container>
</nav>
)
}
export default Nav
|
<reponame>awslabs/clencli<gh_stars>10-100
/*
Copyright © 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package aid
import (
"bufio"
"encoding/json"
"fmt"
"io/ioutil"
"os"
"runtime"
"strconv"
"strings"
"github.com/awslabs/clencli/helper"
"github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"gopkg.in/yaml.v2"
)
// ConfigurationsDirectoryExist returns `true` if the configuration directory exist, `false` otherwise
func ConfigurationsDirectoryExist() bool {
return helper.DirOrFileExists(GetAppInfo().ConfigurationsDir)
}
// ConfigurationsFileExist returns `true` if the configuration file exist, `false` otherwise
func ConfigurationsFileExist() bool {
return helper.DirOrFileExists(GetAppInfo().ConfigurationsPath)
}
// CreateConfigurationsDirectory creates the configuration directory, returns `true` if the configuration directory exist, `false` otherwise
func CreateConfigurationsDirectory() (bool, string) {
dir := GetAppInfo().ConfigurationsDir
return helper.MkDirsIfNotExist(dir), dir
}
// CredentialsFileExist returns `true` if the credentials file exist, `false` otherwise
func CredentialsFileExist() bool {
return helper.DirOrFileExists(GetAppInfo().CredentialsPath)
}
// ReadConfig returns the viper instance of the given configuration `name`
func ReadConfig(name string) (*viper.Viper, error) {
v := viper.New()
app := GetAppInfo()
v.SetConfigName(name)
v.SetConfigType("yaml")
v.AddConfigPath(app.ConfigurationsDir)
err := v.ReadInConfig()
if err != nil {
return v, fmt.Errorf("unable to read configuration:%s\n%v", name, err)
}
return v, err
}
// ReadConfigAsViper returns...
func ReadConfigAsViper(configPath string, configName string, configType string) (*viper.Viper, error) {
v := viper.New()
v.AddConfigPath(configPath)
v.SetConfigName(configName)
v.SetConfigType(configType)
err := v.ReadInConfig()
if err != nil {
return v, fmt.Errorf("unable to read configuration as viper\n%v", err)
}
return v, err
}
// ReadTemplate read the given template under clencli/*.yaml
func ReadTemplate(fileName string) (*viper.Viper, error) {
c := viper.New()
c.AddConfigPath("clencli")
c.SetConfigName(fileName)
c.SetConfigType("yaml")
c.SetConfigPermissions(os.ModePerm)
err := c.ReadInConfig() // Find and read the c file
if err != nil { // Handle errors reading the c file
return c, fmt.Errorf("Unable to read "+fileName+" via Viper"+"\n%v", err)
}
return c, nil
}
// WriteInterfaceToFile write the given interface into a file
func WriteInterfaceToFile(in interface{}, path string) error {
b, err := yaml.Marshal(&in)
if err != nil {
_, ok := err.(*json.UnsupportedTypeError)
if ok {
return fmt.Errorf("json unsupported type error")
}
}
err = ioutil.WriteFile(path, b, os.ModePerm)
if err != nil {
return fmt.Errorf("unable to update:%s\n%v", path, err)
}
return err
}
// DeleteCredentialFile delete the credentials file
func DeleteCredentialFile() error {
return helper.DeleteFile(GetAppInfo().CredentialsPath)
}
// DeleteConfigurationFile delete the credentials file
func DeleteConfigurationFile() error {
return helper.DeleteFile(GetAppInfo().ConfigurationsPath)
}
// DeleteConfigurationsDirectory delete the configurations directory
func DeleteConfigurationsDirectory() error {
return os.RemoveAll(GetAppInfo().ConfigurationsDir)
}
// GetSensitiveUserInput get sensitive input as string
func GetSensitiveUserInput(cmd *cobra.Command, text string, info string) (string, error) {
return getUserInput(cmd, text+" ["+maskString(info, 3)+"]", "")
}
func maskString(s string, showLastChars int) string {
maskSize := len(s) - showLastChars
if maskSize <= 0 {
return s
}
return strings.Repeat("*", maskSize) + s[maskSize:]
}
// GetSensitiveUserInputAsString get sensitive input as string
func GetSensitiveUserInputAsString(cmd *cobra.Command, text string, info string) string {
answer, err := GetSensitiveUserInput(cmd, text, info)
if err != nil {
logrus.Fatalf("unable to get user input about profile's name\n%v", err)
}
// if user typed ENTER, keep the current value
if answer != "" {
return answer
}
return info
}
func getInput() (string, error) {
reader := bufio.NewReader(os.Stdin)
text, err := reader.ReadString('\n')
if err != nil {
return "", err
}
if runtime.GOOS == "windows" {
// convert LF to CRLF
text = strings.Replace(text, "\r\n", "", -1)
} else {
// convert CRLF to LF
text = strings.Replace(text, "\n", "", -1)
}
return text, nil
}
func getUserInput(cmd *cobra.Command, text string, info string) (string, error) {
if info == "" {
cmd.Print(text + ": ")
} else {
cmd.Print(text + " [" + info + "]: ")
}
input, err := getInput()
return input, err
}
// GetUserInputAsBool prints `text` on console and return answer as `boolean`
func GetUserInputAsBool(cmd *cobra.Command, text string, info bool) bool {
answer, err := getUserInput(cmd, text, strconv.FormatBool(info))
if err != nil {
logrus.Fatalf("unable to get user input as boolean\n%s", err)
}
if answer == "true" {
return true
} else if answer == "false" {
return false
}
return info
}
// GetUserInputAsString prints `text` on console and return answer as `string`
func GetUserInputAsString(cmd *cobra.Command, text string, info string) string {
answer, err := getUserInput(cmd, text, info)
if err != nil {
logrus.Fatalf("unable to get user input about profile's name\n%v", err)
}
// if user typed ENTER, keep the current value
if answer != "" {
return answer
}
return info
}
|
import {MigrationInterface, QueryRunner} from "typeorm";
export class changeImageTypeAtUser1644679098215 implements MigrationInterface {
name = 'changeImageTypeAtUser1644679098215'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`ALTER TABLE "user" DROP COLUMN "image"`);
await queryRunner.query(`ALTER TABLE "user" ADD "image" character varying`);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`ALTER TABLE "user" DROP COLUMN "image"`);
await queryRunner.query(`ALTER TABLE "user" ADD "image" bytea`);
}
}
|
#include <chrono>
#include <stdio.h>
#include <CL/sycl.hpp>
// reference
// https://stackoverflow.com/questions/59879285/whats-the-alternative-for-match-any-sync-on-compute-capability-6
#define warpSize 32
inline int ffs(int x) {
return (x == 0) ? 0 : sycl::ext::intel::ctz(x) + 1;
}
// increment the value at ptr by 1 and return the old value
int atomicAggInc(int* ptr, sycl::nd_item<1> &item) {
int mask;
auto sg = item.get_sub_group();
for (int i = 0; i < warpSize; i++) {
unsigned long long tptr = sycl::select_from_group(sg, (unsigned long long)ptr, i);
unsigned my_mask = sycl::reduce_over_group(
sg, (tptr == (unsigned long long)ptr) ? (0x1 << sg.get_local_linear_id()) : 0,
sycl::ext::oneapi::plus<>());
if (i == (item.get_local_id(0) & (warpSize - 1))) mask = my_mask;
}
int leader = ffs(mask) - 1; // select a leader
int res;
unsigned lane_id = item.get_local_id(0) % warpSize;
if (lane_id == leader) { // leader does the update
res = sycl::atomic<int>(sycl::global_ptr<int>(ptr))
.fetch_add(sycl::popcount(mask));
}
res = sycl::select_from_group(sg, res, leader); // get leader’s old value
return res + sycl::popcount(mask & ((1 << lane_id) - 1)); // compute old value
}
void k(int *d, sycl::nd_item<1> &item) {
int *ptr = d + item.get_local_id(0) % 32;
atomicAggInc(ptr, item);
}
const int ds = 32;
int main(int argc, char *argv[]) {
if (argc != 2) {
printf("Usage: %s <repeat>\n", argv[0]);
return 1;
}
const int repeat = atoi(argv[1]);
int *d_d, *h_d;
h_d = new int[ds];
#ifdef USE_GPU
sycl::gpu_selector dev_sel;
#else
sycl::cpu_selector dev_sel;
#endif
sycl::queue q(dev_sel);
d_d = (int *)sycl::malloc_device(ds * sizeof(d_d[0]), q);
q.memset(d_d, 0, ds * sizeof(d_d[0]));
q.wait();
auto start = std::chrono::steady_clock::now();
sycl::range<1> gws (256 * 32 * 256);
sycl::range<1> lws (256);
for (int i = 0; i < repeat; i++)
q.submit([&] (sycl::handler &cgh) {
cgh.parallel_for(sycl::nd_range<1>(gws, lws), [=](sycl::nd_item<1> item)
[[sycl::reqd_sub_group_size(warpSize)]] {
k(d_d, item);
});
});
q.wait();
auto end = std::chrono::steady_clock::now();
std::chrono::duration<float> time = end - start;
printf("Total kernel time: %f (s)\n", time.count());
q.memcpy(h_d, d_d, ds * sizeof(d_d[0])).wait();
bool ok = true;
for (int i = 0; i < ds; i++) {
if (h_d[i] != 256 * 256 * repeat) {
ok = false;
break;
}
}
printf("%s\n", ok ? "PASS" : "FAIL");
sycl::free(d_d, q);
delete [] h_d;
return 0;
}
|
<filename>relay_in.py
# pylint: disable=import-error,invalid-name,bare-except,unused-argument,no-self-use
""" Python ZNC module for passing mosquitto mq messages to ZNC """
import re
import multiprocessing
import znc
import paho.mqtt.client as mqtt
def _contains_required_args(args, required_args):
""" Validates the arg string from ZNC contains required arg keys """
return len([x for x in required_args if x in args]) == len(required_args)
def _parse_args(args, required_args):
""" Parse args according to required args """
module_args = {}
split_args = []
for index, token in enumerate([x for x in re.split("=|--", args.strip()) if x.strip()]):
if token:
if index % 2 == 0:
split_args.append("--" + token.strip())
else:
split_args.append(token.strip())
for arg in required_args:
index = split_args.index(arg)
if index > -1:
value = split_args[index + 1]
if value and value not in required_args:
module_args[arg] = value.strip()
return module_args
def _is_valid_module_args(parsed_args, required_args):
""" Validate that parsed args have required args, and no values are None or empty strings """
return len([x for x in required_args if x not in parsed_args.keys()]) == 0 and \
len([x for x in parsed_args.values() if not x]) == 0
class relay_in(znc.Module):
""" ZNC module with a mosquitto MQ running in a background process """
description = "Relay messages into IRC from obs"
module_types = [znc.CModInfo.UserModule]
_PARAM_KEYS = {
"_TOPIC_KEY": "--topic",
"_HOST_KEY": "--host",
"_PORT_KEY": "--port",
"_QOS_KEY": "--qos",
"_CLIENT_ID_KEY": "--client-id",
"_NETWORK_NAME_KEY": "--network-name",
"_CHANNEL_KEY": "--channel"
}
def __init__(self):
self._client = None
self._client_process = None
self._module_args = {}
def OnLoad(self, args, message):
"""
Initialize client with this callback to avoid module loading issues with incomplete initialization
"""
try:
message.s = str(message.s) + "\n"
if not _contains_required_args(args, list(relay_in._PARAM_KEYS.values())):
message.s = "Missing required args, found: {}, required: {}".format(
args, str(list(relay_in._PARAM_KEYS.values())))
return False
message.s = str(message.s) + "Passed required arg check\n"
parsed_args = _parse_args(args, list(relay_in._PARAM_KEYS.values()))
message.s = str(message.s) + "Parsed module args\n"
if not _is_valid_module_args(parsed_args, list(relay_in._PARAM_KEYS.values())):
message.s = "Invalid module args, found: {}, required: {}".format(
str(parsed_args), str(list(relay_in._PARAM_KEYS.values())))
return False
message.s = str(message.s) + "Passed module arg check\n"
self._module_args = parsed_args
message.s = str(message.s) + "Module args: " + str(self._module_args) + "\n"
self._client = self._init_mqtt_client()
message.s = str(message.s) + "Initialized mq client\n"
self._client_process = multiprocessing.Process(target=self._client.loop_forever)
self._client_process.start()
message.s = str(message.s) + "Started mq client\n"
return True
except Exception as exception:
# Catch all to ensure any exception will prevent the module from loading
message.s = str(message.s) + "Failed to load module: \n" + str(exception)
return False
def OnModCommand(self, command):
""" No commands yet """
return znc.CONTINUE
def _init_mqtt_client(self):
""" Initialize, connect, and subscribe mosquitto client """
client = mqtt.Client(client_id=self._get_param("_CLIENT_ID_KEY"), clean_session=False)
client.on_message = self._on_message
client.connect(self._get_param("_HOST_KEY"), port=int(self._get_param("_PORT_KEY")))
client.subscribe(topic=self._get_param("_TOPIC_KEY"), qos=int(self._get_param("_QOS_KEY")))
return client
def _on_message(self, client, user_data, msg):
""" mosquitto client callback for incoming mq messages """
network = self.GetUser().FindNetwork(self._get_param("_NETWORK_NAME_KEY"))
if network:
message = str(msg.payload.decode(encoding='utf-8', errors='ignore'))
network.GetIRCSock().Write("PRIVMSG {} :{}\r\n".format(self._get_param("_CHANNEL_KEY"), message))
def GetWebMenuTitle(self):
""" Title for web UI """
return "relay_in mosquitto mq to IRC"
def OnShutdown(self):
""" Tear down the module """
try:
if self._client:
self._client.unsubscribe(self._get_param("_TOPIC_KEY"))
self._client.disconnect()
if self._client_process:
self._client_process.terminate()
except:
# Catch all to ensure this module gets unloaded, regardless of exception states
return znc.CONTINUE
def _get_param(self, key):
""" Helper to get a module parameter """
return self._module_args[relay_in._PARAM_KEYS[key]]
|
def compare_strings(string1, string2):
if string1 == string2:
return "Strings are equal"
else:
return "Strings are not equal" |
import { computed, observable } from "mobx";
import { FormDesignerModel } from "../../designer";
import {
FieldModel,
FormComponent,
FormComponentConstructor,
IFieldModelOptions,
ISectionModelOptions,
RepeatableSubFormFieldModel,
SectionModel,
SubFormFieldModel,
TypedValue,
ValidationErrorModel,
ILookupModelOptions,
ILookupModelState,
LookupModel,
IFormComponentOptions,
IFormComponentState,
ITaskModel,
StringValue,
ISectionModelState,
} from ".";
export interface IFormModelOptions extends IFormComponentOptions {
type: "form";
name: string;
title: string;
sections: ISectionModelOptions[];
lookups?: ILookupModelOptions[];
}
export interface IFormModelState extends IFormComponentState {
lookups: ILookupModelState[];
sections: ISectionModelState[];
queryString: string|null;
forceReadOnly: boolean;
}
export class FormModel extends FormComponent<
IFormModelOptions,
IFormModelState
> {
@observable
readonly sections : SectionModel[] = [];
@observable
readonly lookups : LookupModel[] = [];
protected getChildContainers() {
return [this.sections, this.lookups];
}
@observable
currentSection: SectionModel | undefined;
private queryString: string|undefined;
private constructor(
options: IFormModelOptions,
parent:
| SubFormFieldModel
| RepeatableSubFormFieldModel
| undefined = undefined,
queryString: string|undefined,
state?: IFormModelState | undefined
) {
super(parent, options, state);
this.queryString = state?.queryString ?? queryString;
this.forceReadOnly = state?.forceReadOnly ?? false;
this.sections.push(
...this.options.sections.map(
(s) =>
new SectionModel(
this,
s,
state?.sections.find((ss) => ss.key === s.name)
)
)
);
[this.currentSection] = this.sections;
if (this.options.lookups) {
this.lookups.push(
...this.options.lookups.map(
(l) =>
new LookupModel(
this,
l,
state?.lookups.find((ls) => ls.key === l.name)
)
)
);
}
}
@computed get readOnly() : boolean {
if (this.parent instanceof FieldModel && this.parent.readOnly) {
return true;
}
return this.forceReadOnly;
}
@observable
private forceReadOnly: boolean = false;
getState(): IFormModelState {
return {
id: this.id,
key: this.id,
lookups: this.lookups.map((l) => l.getState()),
sections: this.sections.map((s) => s.getState()),
queryString: this.queryString ?? null,
forceReadOnly: this.forceReadOnly
};
}
static continueFromState(
options: IFormModelOptions,
parent: SubFormFieldModel | RepeatableSubFormFieldModel | undefined,
state: IFormModelState
) {
return new FormModel(options, parent, undefined, state);
}
static async loadAsync({options, parent, queryString, readOnly}: {
options: IFormModelOptions,
parent?:
SubFormFieldModel
| RepeatableSubFormFieldModel
| undefined,
queryString?: string|undefined,
readOnly?: boolean
}
): Promise<FormModel> {
let form = new FormModel(options, parent, queryString);
form.forceReadOnly=readOnly??false;
await form.initAsync();
return form;
}
async initAsync(): Promise<void> {
await FormComponent.initWithChildrenAsync(this);
}
focus(): void {
this.parent?.focus();
}
protected getDesignerLabel(): string {
return this.options.title;
}
canInsert<
T extends FormComponent<IFormComponentOptions, IFormComponentState>
>(
// eslint-disable-next-line @typescript-eslint/no-unused-vars
type: FormComponentConstructor<T, IFieldModelOptions>
): boolean {
return false;
}
async insertAsync<
T extends FormComponent<IFormComponentOptions, IFormComponentState>
>(
// eslint-disable-next-line @typescript-eslint/no-unused-vars
type: FormComponentConstructor<T, IFieldModelOptions>,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
index?: number,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
...params: unknown[]
): Promise<T> {
throw new Error("Cannot insert here");
}
@computed
get parentFieldNamePrefix(): string {
if (this.parent && this.parent instanceof RepeatableSubFormFieldModel) {
return `${this.parent.qualifiedName}[${this.parent.value.value.findIndex(
(i) => i.value === this
)}]/`;
}
if (this.parent && this.parent instanceof SubFormFieldModel) {
return `${this.parent.qualifiedName}/`;
}
return this.getDefaultParentFieldNamePrefix();
}
@computed
get validationErrors(): ValidationErrorModel[] {
return this.sections.flatMap((s) => s.validationErrors);
}
getSection(sectionName: string): SectionModel | undefined {
return this.sections.find(
(s) => s.options.name.toLowerCase() === sectionName.toLowerCase()
);
}
getField(
fieldName: string,
useParentIfNotFound = true
):
| FieldModel<TypedValue, IFieldModelOptions>
| (FieldModel<TypedValue, IFieldModelOptions> | undefined)[]
| undefined {
if (fieldName.includes("/")) {
const [subFormName, rest] = fieldName.split("/", 2);
const subForm = this.getField(subFormName);
if (subForm instanceof SubFormFieldModel) {
return subForm.value.value?.getField(rest, false);
}
if (subForm instanceof RepeatableSubFormFieldModel) {
return (
subForm.value.value?.flatMap((s) => s.value?.getField(rest, false)) ??
[]
);
}
return undefined;
}
const result =
this.sections
?.flatMap((s) => s.fields)
.find(
(f) => f.options.name?.toLowerCase() === fieldName.toLowerCase()
) ?? undefined;
if (!result && this.parentForm && useParentIfNotFound) {
return this.parentForm.getField(fieldName);
}
return result;
}
getFieldValue(
fieldNameWithPossibleFormatInfo: string
): TypedValue | undefined {
if (fieldNameWithPossibleFormatInfo.startsWith("querystring:")) {
return this.topLevelForm?.getQueryStringFieldValue(
fieldNameWithPossibleFormatInfo
);
}
const [fieldName, formatInfo] = fieldNameWithPossibleFormatInfo.split(
":",
2
);
const fieldOrArrayOfFields = this.getField(fieldName);
if (fieldOrArrayOfFields === undefined) {
return undefined;
}
if (Array.isArray(fieldOrArrayOfFields)) {
return undefined;
}
if (formatInfo) {
return fieldOrArrayOfFields?.getFormattedValue(formatInfo);
}
return fieldOrArrayOfFields?.value;
}
private getQueryStringFieldValue(fieldName: string): StringValue {
const qsKey = fieldName.substring("querystring:".length);
const currentUrl = new URL("http://base.com/?" + this.queryString ?? "");
const value = currentUrl.searchParams.get(qsKey);
return new StringValue(value);
}
getFieldValueIncludingRepeatedSubForms(
fieldName: string
): TypedValue | undefined | (TypedValue | undefined)[] {
if (fieldName.startsWith("querystring:")) {
return this.topLevelForm?.getQueryStringFieldValue(fieldName);
}
const fieldOrArrayOfFields = this.getField(fieldName);
if (fieldOrArrayOfFields === undefined) {
return undefined;
}
if (Array.isArray(fieldOrArrayOfFields)) {
return fieldOrArrayOfFields.map((f) => f?.value);
}
return fieldOrArrayOfFields?.value;
}
@computed get visibleSections(): SectionModel[] {
return this.sections.filter((f) => f.visible);
}
getLookup(name: string): LookupModel | undefined {
return (
this.lookups.find((l) => l.options.name === name) ??
this.parentForm?.getLookup(name)
);
}
}
|
<filename>24.HMM/24.2.Segmentation.py
# !/usr/bin/python
# -*- coding:utf-8 -*-
import math
import matplotlib.pyplot as plt
import numpy as np
import codecs
import random
infinite = -(2 ** 31)
def log_normalize(a):
s = 0
for x in a:
s += x
s = math.log(s)
for i in range(len(a)):
if a[i] == 0:
a[i] = infinite
else:
a[i] = math.log(a[i]) - s
def log_sum(a):
if not a: # a为空
return infinite
m = max(a)
s = 0
for t in a:
s += math.exp(t - m)
return m + math.log(s)
def calc_alpha(pi, A, B, o, alpha):
for i in range(4):
alpha[0][i] = pi[i] + B[i][ord(o[0])]
T = len(o)
temp = [0 for i in range(4)]
del i
for t in range(1, T):
for i in range(4):
for j in range(4):
temp[j] = (alpha[t - 1][j] + A[j][i])
alpha[t][i] = log_sum(temp)
alpha[t][i] += B[i][ord(o[t])]
def calc_beta(pi, A, B, o, beta):
T = len(o)
for i in range(4):
beta[T - 1][i] = 1
temp = [0 for i in range(4)]
del i
for t in range(T - 2, -1, -1):
for i in range(4):
beta[t][i] = 0
for j in range(4):
temp[j] = A[i][j] + B[j][ord(o[t + 1])] + beta[t + 1][j]
beta[t][i] += log_sum(temp)
def calc_gamma(alpha, beta, gamma):
for t in range(len(alpha)):
for i in range(4):
gamma[t][i] = alpha[t][i] + beta[t][i]
s = log_sum(gamma[t])
for i in range(4):
gamma[t][i] -= s
def calc_ksi(alpha, beta, A, B, o, ksi):
T = len(alpha)
temp = [0 for x in range(16)]
for t in range(T - 1):
k = 0
for i in range(4):
for j in range(4):
ksi[t][i][j] = alpha[t][i] + A[i][j] + B[j][ord(o[t + 1])] + beta[t + 1][j]
temp[k] = ksi[t][i][j]
k += 1
s = log_sum(temp)
for i in range(4):
for j in range(4):
ksi[t][i][j] -= s
def bw(pi, A, B, alpha, beta, gamma, ksi, o):
T = len(alpha)
for i in range(4):
pi[i] = gamma[0][i]
s1 = [0 for x in range(T - 1)]
s2 = [0 for x in range(T - 1)]
for i in range(4):
for j in range(4):
for t in range(T - 1):
s1[t] = ksi[t][i][j]
s2[t] = gamma[t][i]
A[i][j] = log_sum(s1) - log_sum(s2)
s1 = [0 for x in range(T)]
s2 = [0 for x in range(T)]
for i in range(4):
for k in range(65536):
if k % 5000 == 0:
print(i, k)
valid = 0
for t in range(T):
if ord(o[t]) == k:
s1[valid] = gamma[t][i]
valid += 1
s2[t] = gamma[t][i]
if valid == 0:
B[i][k] = -log_sum(s2) # 平滑
else:
B[i][k] = log_sum(s1[:valid]) - log_sum(s2)
def baum_welch(pi, A, B):
f = open("./2.txt")
sentence = f.read()[3:].decode('utf-8') # 跳过文件头
f.close()
T = len(sentence) # 观测序列
alpha = [[0 for i in range(4)] for t in range(T)]
beta = [[0 for i in range(4)] for t in range(T)]
gamma = [[0 for i in range(4)] for t in range(T)]
ksi = [[[0 for j in range(4)] for i in range(4)] for t in range(T - 1)]
for time in range(100):
print("time:", time)
calc_alpha(pi, A, B, sentence, alpha) # alpha(t,i):给定lamda,在时刻t的状态为i且观测到o(1),o(2)...o(t)的概率
calc_beta(pi, A, B, sentence, beta) # beta(t,i):给定lamda和时刻t的状态i,观测到o(t+1),o(t+2)...oT的概率
calc_gamma(alpha, beta, gamma) # gamma(t,i):给定lamda和O,在时刻t状态位于i的概率
calc_ksi(alpha, beta, A, B, sentence, ksi) # ksi(t,i,j):给定lamda和O,在时刻t状态位于i且在时刻i+1,状态位于j的概率
bw(pi, A, B, alpha, beta, gamma, ksi, sentence) # baum_welch算法
save_parameter(pi, A, B, time)
def list_write(f, v):
for a in v:
f.write(str(a))
f.write(' ')
f.write('\n')
def save_parameter(pi, A, B, time):
f_pi = open("./pi%d.txt" % time, "w")
list_write(f_pi, pi)
f_pi.close()
f_A = open("./A%d.txt" % time, "w")
for a in A:
list_write(f_A, a)
f_A.close()
f_B = open("./B%d.txt" % time, "w")
for b in B:
list_write(f_B, b)
f_B.close()
def train():
# 初始化pi,A,B
pi = [random.random() for x in range(4)] # 初始分布
log_normalize(pi)
A = [[random.random() for y in range(4)] for x in range(4)] # 转移矩阵:B/M/E/S
A[0][0] = A[0][3] = A[1][0] = A[1][3] \
= A[2][1] = A[2][2] = A[3][1] = A[3][2] = 0 # 不可能事件
B = [[random.random() for y in range(65536)] for x in range(4)]
for i in range(4):
log_normalize(A[i])
log_normalize(B[i])
baum_welch(pi, A, B)
return pi, A, B
def load_train():
f = open("./pi.txt", mode="r")
for line in f:
pi = list(map(float, line.split(' ')[:-1]))
f.close()
f = open("./A.txt", mode="r")
A = [[] for x in range(4)] # 转移矩阵:B/M/E/S
i = 0
for line in f:
A[i] = list(map(float, line.split(' ')[:-1]))
i += 1
f.close()
f = open("./B.txt", mode="r")
B = [[] for x in range(4)]
i = 0
for line in f:
B[i] = list(map(float, line.split(' ')[:-1]))
i += 1
f.close()
return pi, A, B
def viterbi(pi, A, B, o):
T = len(o) # 观测序列
delta = [[0 for i in range(4)] for t in range(T)]
pre = [[0 for i in range(4)] for t in range(T)] # 前一个状态 # pre[t][i]:t时刻的i状态,它的前一个状态是多少
for i in range(4):
delta[0][i] = pi[i] + B[i][ord(o[0])]
for t in range(1, T):
for i in range(4):
delta[t][i] = delta[t - 1][0] + A[0][i]
for j in range(1, 4):
vj = delta[t - 1][j] + A[j][i]
if delta[t][i] < vj:
delta[t][i] = vj
pre[t][i] = j
delta[t][i] += B[i][ord(o[t])]
decode = [-1 for t in range(T)] # 解码:回溯查找最大路径
q = 0
for i in range(1, 4):
if delta[T - 1][i] > delta[T - 1][q]:
q = i
decode[T - 1] = q
for t in range(T - 2, -1, -1):
q = pre[t + 1][q]
decode[t] = q
return decode
def segment(sentence, decode):
N = len(sentence)
i = 0
while i < N: # B/M/E/S
if decode[i] == 0 or decode[i] == 1: # Begin
j = i + 1
while j < N:
if decode[j] == 2:
break
j += 1
print(sentence[i:j + 1], "|", )
i = j + 1
elif decode[i] == 3 or decode[i] == 2: # single
print(sentence[i:i + 1], "|", )
i += 1
else:
print('Error:', i, decode[i])
i += 1
if __name__ == "__main__":
pi, A, B = load_train()
f = open("./24.mybook.txt", encoding='utf-8')
data = f.read()[3:] # .decode('utf-8')
f.close()
decode = viterbi(pi, A, B, data)
segment(data, decode)
|
<reponame>Qolzam/telar-core-ext-js<filename>__tests__/endpoint-routing-application-builder-extensions.test.ts<gh_stars>1-10
import { IServiceCollection } from '@telar/core/IServiceCollection';
import { IApplicationBuilder } from '@telar/core/IApplicationBuilder';
import { IConfiguration } from '@telar/core/IConfiguration';
import { Host } from '@telar/core/Host';
import { IBootstrap } from '@telar/core/IBootstrap';
import { IWebHostEnvironment } from '@telar/core/IWebHostEnvironment';
import '../src/HostBuilderExtension';
import '../src/EndpointRoutingApplicationBuilderExtensions';
import '../src/HostEnvironmentEnvExtensions';
import { Context } from '@telar/mvc';
describe('Endpoint routing application builder extensions', () => {
class MyClass {}
class Bootstrap extends IBootstrap {
private _configuration: IConfiguration;
public constructor(configuration: IConfiguration) {
super();
this._configuration = configuration;
}
public get configuration(): IConfiguration {
return this._configuration;
}
public set configuration(value: IConfiguration) {
this._configuration = value;
}
// This method gets called by the runtime. Use this method to add services to the container.
public async configureServices(services: IServiceCollection): Promise<void> {
services.bind(MyClass).to(MyClass);
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public configure(app: IApplicationBuilder<any, {}>, env: IWebHostEnvironment): void {
if (env.isDevelopment()) {
// eslint-disable-next-line no-console
console.log('[info] isDevelopment');
}
app.useRouting();
app.useEndpoints((endpoint) => {
endpoint.get('/', (ctx: Context<any>) => {
ctx.body = 'Hello World!';
});
});
}
}
test('Should not throw error when using useEndpoints and useRouting', async () => {
await Host.prototype.buildGlobalHost(Bootstrap);
});
});
|
var bun = require('bun');
var tstream = require('tstream');
var delimiter_frame = require('./lib/delimiter-frame');
var json_stream = {
Parse: tstream(function(chunk, encoding, callback){
try {
var data = JSON.parse(chunk.toString());
this.push(data);
} catch(err) {
this.emit('warn', err, chunk.toString());
} finally {
callback();
}
}),
Stringify: tstream(function(obj, encoding, done){
this.push(JSON.stringify(obj));
done();
})
};
module.exports = {
Parse: function(deliminiter){
var frame_receive = new delimiter_frame.Receive(deliminiter, {objectMode:true});
var json_parse = new json_stream.Parse({objectMode:true});
var pipeline = bun([frame_receive, json_parse]);
event_forward('warn', json_parse, pipeline);
return pipeline;
},
Stringify: function(deliminiter) {
return bun([
new json_stream.Stringify({objectMode:true}),
new delimiter_frame.Send(deliminiter)
]);
}
};
function event_forward(event, ee_from, ee_to){
ee_from.on(event, function(){
var args = Array.prototype.slice.call(arguments, 0);
args.unshift(event);
ee_to.emit.apply(ee_to, args);
});
} |
import React from 'react'
import PropTypes from 'prop-types'
function Error (props) {
const { message } = props
return (
<div className="col-12 heading justify-content-center loading">
<br />
<br />
<br />
<br />
<h3 align="center" className="text-danger">
{`Oops! ${message}`}
<br />
<br />
</h3>
</div>
)
}
Error.propTypes = {
message: PropTypes.string
}
export default Error
|
famsa -gt import ${guide_tree} ${seqs} \
${id}.prog.${align_method}.with.${tree_method}.tree.aln
|
export default {
skip: true,
data: {
visible: true
},
html: 'before\n<p>Widget</p><!--#if visible-->\nafter',
test ( assert, component, target ) {
component.set({ visible: false });
assert.equal( target.innerHTML, 'before\n<!--#if visible-->\nafter' );
component.set({ visible: true });
assert.equal( target.innerHTML, 'before\n<p>Widget</p><!--#if visible-->\nafter' );
}
};
|
################
# User setup
################
# Create new user with sudo privileges
sudo adduser --disabled-password --gecos "" $NONROOT_USERNAME
sudo usermod -aG sudo $NONROOT_USERNAME
echo "$NONROOT_USERNAME:$NONROOT_PASSWORD" | sudo chpasswd
# Set zsh as default shell
sudo chsh -s `which zsh` $NONROOT_USERNAME
# Oh my zsh for subuser
installOhMyZSH $NONROOT_USERNAME
sudo chown -R $NONROOT_USERNAME $( eval echo "~$NONROOT_USERNAME" )
# Deny user SSH access
if [ "$NONROOT_SSH" = "n" ]; then
echo "DenyUsers $NONROOT_USERNAME" >> /etc/ssh/sshd_config
fi
|
#ifndef _MATH_H
#define _MATH_H
class Math {
public:
static float fastInverseSquareRoot(float x) {
float halfx = 0.5f * x;
float y = x;
long i = *(long*)&y;
i = 0x5f3759df - (i>>1);
y = *(float*)&i;
y = y * (1.5f - (halfx * y * y));
return y;
}
static float mapfloat(float x, float in_min, float in_max, float out_min, float out_max)
{
return (x - in_min) * (out_max - out_min) / (in_max - in_min) + out_min;
}
};
#endif
|
#!/bin/bash
##
## Copyright (c) 2014-2017 Leidos.
##
## License: 3-clause BSD, see https://opensource.org/licenses/BSD-3-Clause
##
##
## Developed under contract #FA8750-14-C-0241
##
# Iterate all corpus projects; read/parse files from each project
AVEIFS=$IFS
IFS=$(echo -en "\n\b")
count=1
fcount=0
path="/data/corpus/"
#Loop through all projects
for project in $(find $path -maxdepth 10 -mindepth 10 -type d -name uciMaven)
do
project=$(echo $project | rev | cut -d "/" -f 2- | rev)
echo "Working on $((count++)) project: $project"
numVersions=0
# Read and parse index.json for paticular values
if [ -f $project/index.json ]; then
version=$( cat $project/index.json | jq -r .version )
numVersions=$( cat $project/index.json | jq -r '.version_history | length' )
latest=$( cat $project/index.json | jq -r .version_history[$((numVersions-1))])
echo $numVersions
echo $latest
if [[ "$latest" != "null" ]]; then
if [ -f $project/$latest/source.jar ]; then
((fcount=fcount+1))
mkdir -p $project/latest/
cd $project/latest/
jar -xf $project/$latest/source.jar
else
echo "No source.jar found for version: $latest"
fi
else
echo " No version defined in index.json "
fi # end if code exists
fi
echo "----$fcount-files-found-----------"
echo ""
done
IFS=$SAVEIFS
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-SWS/13-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-SWS/13-512+0+512-old-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function identity_old_quarter --eval_function penultimate_quarter_eval |
# This is a toolkit file to be sourced whenever bash is used for scripting
# It includes tools, like env vars, looger and trap functions
# This file is based on a deep revision of a template by BASH3 Boilerplate v2.3.0
# http://bash3boilerplate.sh/#authors
# Exit on error inside any functions or subshells.
# don't use it and prefer handling errors by yourself
# set -o errtrace
# Do not allow use of undefined vars. Use ${VAR:-} to use an undefined VAR
# set -o nounset
# Catch the error in case mysqldump fails (but gzip succeeds) in `mysqldump |gzip`
set -o pipefail
# Turn on traces, useful while debugging but commented out by default
# set -o xtrace is equal to set -x
# set -o xtrace = set -x
#########################################################
# load commun useful variables
#########################################################
## check if the script is sourced or not
## this helps to declare correctly the env vars for the main script
# i.e if sourced
[[ "${BASH_SOURCE[0]}" != "${0}" ]] && __tmp_source_index="1"
readonly __dir="$(cd "$(dirname "${BASH_SOURCE[${__tmp_source_index:-0}]}")" && pwd)"
readonly __file="${__dir}/$(basename "${BASH_SOURCE[${__tmp_source_index:-0}]}")"
readonly __file_basename="$(basename "${__file}" .sh)"
# readonly __invocation="$(printf %q "${__file}")$((($#)) && printf ' %q' "$@" || true)"
readonly __project_dir=$( readlink -f "${__dir}/../.." )
## use it when project is bundled with ohter projectcs under a parent dir
readonly __project_parent_dir=$( readlink -f "${__project_dir}/.." )
# echo "__dir" $__dir
# echo "__file" $__file
# echo "__project_dir ${__project_dir}"
#########################################################
# load a logger
#########################################################
# Define the environment variables (and their defaults) that this script depends on
LOG_LEVEL="${LOG_LEVEL:-6}" # 6= info (default) 7 = debug -> 0 = emergency
NO_COLOR="${NO_COLOR:-}" # true = disable color. otherwise autodetected
function __utils_log () {
##assign colors to different log levels
## log level: debug ... emergency
local log_level="${1}"
shift
# shellcheck disable=SC2034
local color_debug="\\x1b[35m"
# shellcheck disable=SC2034
local color_info="\\x1b[32m"
# shellcheck disable=SC2034
local color_notice="\\x1b[34m"
# shellcheck disable=SC2034
local color_warning="\\x1b[33m"
# shellcheck disable=SC2034
local color_error="\\x1b[31m"
# shellcheck disable=SC2034
local color_critical="\\x1b[1;31m"
# shellcheck disable=SC2034
local color_alert="\\x1b[1;33;41m"
# shellcheck disable=SC2034
local color_emergency="\\x1b[1;4;5;33;41m"
local colorvar="color_${log_level}"
# if colorvar not set or null substitue by ${color_error}
local color="${!colorvar:-${color_error}}"
local color_reset="\\x1b[0m"
if [[ "${NO_COLOR:-}" = "true" ]] || { [[ "${TERM:-}" != "xterm"* ]] && [[ "${TERM:-}" != "screen"* ]] ; } || [[ ! -t 2 ]]; then
if [[ "${NO_COLOR:-}" != "false" ]]; then
# Don't use colors on pipes or non-recognized terminals
color=""; color_reset=""
fi
fi
# all remaining arguments are to be printed
local log_line=""
## IFS=$'\n' changes the field separator to newline, here it is inside the while
## cmd so it take effect locally
while IFS=$'\n' read -r log_line; do
echo -e "$(date -u +"%Y-%m-%d %H:%M:%S UTC") ${color}$(printf "[%${#log_level}s]" "${log_level}")${color_reset} ${log_line}" 1>&2
done <<< "${@:-}"
}
function emergency () { __utils_log emergency "${@}"; exit 1; }
function alert () { [[ "${LOG_LEVEL:-0}" -ge 1 ]] && __utils_log alert "${@}"; true; }
function critical () { [[ "${LOG_LEVEL:-0}" -ge 2 ]] && __utils_log critical "${@}"; true; }
function error () { [[ "${LOG_LEVEL:-0}" -ge 3 ]] && __utils_log error "${@}"; true; }
function warning () { [[ "${LOG_LEVEL:-0}" -ge 4 ]] && __utils_log warning "${@}"; true; }
function notice () { [[ "${LOG_LEVEL:-0}" -ge 5 ]] && __utils_log notice "${@}"; true; }
function info () { [[ "${LOG_LEVEL:-0}" -ge 6 ]] && __utils_log info "${@}"; true; }
function debug () { [[ "${LOG_LEVEL:-0}" -ge 7 ]] && __utils_log debug "${@}"; true; }
#########################################################
# Signal trapping and reporting
#
#########################################################
# if you need to do an action before exit include the function+ subsequent trap call in the main script
function __clean_up_before_exit {
local result=${?}
# Your cleanup code here
info "Cleaning up. Done"
exit ${result}
}
## Uncomment the following line for proviving a cleanup functino after an EXIT
# trap __clean_up_before_exit EXIT
# if you need to trace evrey error even those inside functions
# include the function+ subsequent trap call in the main script
# requires `set -o errtrace`
function __report_on_error() {
local error_code=${?}
error "Error in ${__file} on line ${1}"
exit ${error_code}
}
# Uncomment the following line for always providing an error backtrace
## trap call on ERROR will catch all errors including those inside functions
#trap '__report_on_error ${LINENO}' ERR
|
#!/bin/bash
set -ex
command -v ci
command -v clean_up_reusable_docker
command -v ensure_head
command -v print_env
command -v push_image_to_ecr
command -v push_image_to_docker_hub
command -v pull_image_from_ecr
command -v push_lambda
command -v wfi
docker-compose version
docker --version
python3 --version
aws --version
if [[ "$SPECIALIZATION" = "scala" ]]; then
java -version
sbt --version
fi
|
//
// HNSubmission.h
// newsyc
//
// Created by <NAME> on 3/30/11.
// Copyright 2011 Xuzz Productions, LLC. All rights reserved.
//
#import "HNKit.h"
#import "HNAPISubmission.h"
#define kHNSubmissionSuccessNotification @"kHNSubmissionSuccessNotification"
#define kHNSubmissionFailureNotification @"kHNSubmissionFailureNotification"
typedef enum {
kHNSubmissionTypeSubmission,
kHNSubmissionTypeVote,
kHNSubmissionTypeFlag,
kHNSubmissionTypeReply
} HNSubmissionType;
@class HNEntry;
@interface HNSubmission : NSObject <HNAPISubmissionDelegate> {
HNSubmissionType type;
HNEntry *target;
NSURL *destination;
NSString *title;
NSString *body;
HNVoteDirection direction;
}
@property (nonatomic, readonly) HNSubmissionType type;
@property (nonatomic, retain) HNEntry *target;
@property (nonatomic, copy) NSURL *destination;
@property (nonatomic, copy) NSString *body;
@property (nonatomic, copy) NSString *title;
@property (nonatomic, assign) HNVoteDirection direction;
- (HNSubmission *)initWithSubmissionType:(HNSubmissionType)type_;
@end
|
<reponame>Team-Orochimaru/finacial-planner
import React, {Component} from 'react'
import PropTypes from 'prop-types'
import {connect} from 'react-redux'
import {Link} from 'react-router-dom'
import {logout} from '../store'
import Home from './home'
import 'materialize-css/dist/css/materialize.min.css'
import M from 'materialize-css/dist/js/materialize.min.js'
class Navbar extends Component {
componentDidUpdate() {
let sidenav = document.getElementById('slide-out')
M.Sidenav.init(sidenav, {})
}
render() {
const {handleClick, isLoggedIn, plaidAccessToken} = this.props
return (
<div>
{isLoggedIn &&
plaidAccessToken && (
<div>
<nav className="orange" role="navigation">
<div className="container">
<a
href="/overview"
id="logo-container"
className="brand-logo"
>
eBudget
</a>
<a
href="#"
data-target="slide-out"
className="sidenav-trigger show-on-large"
>
<i className="material-icons" id="ham-menu">
menu
</i>
</a>
<ul id="nav-mobile" className="right hide-on-med-and-down">
<li>
<Link to="/home">Home</Link>
</li>
<li>
<Link to="/overview">Account Overview</Link>
</li>
<li>
<Link to="/budget">Budget Calculator</Link>
</li>
<li>
<Link to="/monthly">Monthly Spending</Link>
</li>
<li>
<Link to="/yearly">Annual Spending</Link>
</li>
<li>
<Link to="#" onClick={handleClick}>
Logout
</Link>
</li>
</ul>
</div>
</nav>
<ul id="slide-out" className="sidenav">
<li>
<Link to="/home">Home</Link>
</li>
<li>
<Link to="/overview">Account Overview</Link>
</li>
<li>
<Link to="/budget">Budget Calculator</Link>
</li>
<li>
<Link to="/monthly">Monthly Spending</Link>
</li>
<li>
<Link to="/yearly">Annual Spending</Link>
</li>
<li>
<Link to="#" onClick={handleClick}>
Logout
</Link>
</li>
</ul>
</div>
)}
{isLoggedIn &&
!plaidAccessToken && (
<div>
<nav className="orange" role="navigation">
<div className="container">
<a
href="/overview"
id="logo-container"
className="brand-logo"
>
eBudget
</a>
<a
href="#"
data-target="slide-out"
className="sidenav-trigger show-on-large"
>
<i className="material-icons" id="ham-menu">
menu
</i>
</a>
<ul id="nav-mobile" className="right hide-on-med-and-down">
<li>
<a href="/home">Home</a>
</li>
<li>
<a href="#" onClick={handleClick}>
Logout
</a>
</li>
</ul>
</div>
</nav>
<ul id="slide-out" className="sidenav">
<li>
<a href="/home">Home</a>
</li>
<li>
<a href="#" onClick={handleClick}>
Logout
</a>
</li>
</ul>
</div>
)}
{!isLoggedIn && (
<div>
<nav className="orange" role="navigation">
<div className="nav-wrapper container">
<Link to="/" id="logo-container" className="brand-logo">
eBudget
</Link>
</div>
</nav>
<Home />
</div>
)}
</div>
)
}
}
const mapState = state => {
return {
isLoggedIn: !!state.user.id,
plaidAccessToken: state.user.plaidAccessToken
}
}
const mapDispatch = dispatch => {
return {
handleClick() {
dispatch(logout())
}
}
}
export default connect(mapState, mapDispatch)(Navbar)
Navbar.propTypes = {
handleClick: PropTypes.func.isRequired,
isLoggedIn: PropTypes.bool.isRequired
}
|
use std::fs;
use std::path::Path;
pub fn select_files(directory_path: &str, extension: &str) -> Result<Vec<String>, String> {
let dir = Path::new(directory_path);
if !dir.is_dir() {
return Err("Invalid directory path".to_string());
}
let files = match fs::read_dir(dir) {
Ok(files) => files,
Err(_) => return Err("Error reading directory".to_string()),
};
let mut selected_files = Vec::new();
for file in files {
if let Ok(file) = file {
if let Some(file_name) = file.file_name().to_str() {
if file_name.ends_with(extension) {
selected_files.push(file_name.to_string());
}
}
}
}
if selected_files.is_empty() {
return Err("No files with the specified extension found".to_string());
}
Ok(selected_files)
} |
<!DOCTYPE html>
<html>
<head>
<title>Table</title>
<style type="text/css">
table, th, td {
border: 1px solid black;
}
td {
background-color: green;
}
td:nth-child(2) {
background-color: yellow;
}
td:nth-child(3) {
background-color: blue;
}
td:nth-child(4) {
background-color: red;
}
</style>
</head>
<body>
<table>
<tr>
<td></td>
<td></td>
<td></td>
<td></td>
</tr>
</table>
</body>
</html> |
/** @module fs/all */
export * from "./ls.js"
export * from "./path.js"
export { default as ResourceExplorer } from "./ResourceExplorer.js"
export { default as ResourceManager } from "./ResourceManager.js"
|
import { get } from "needle";
import { config } from "../config";
import { isJSONString } from "../helpers";
import { sendToQueue } from "./mq";
const { streamURL, bearerToken } = config;
export const streamConnect = (retryAttempt: number) => {
const stream = get(streamURL, {
headers: {
"User-Agent": "parlament-stream",
Authorization: `Bearer ${bearerToken}`,
},
});
stream.on("data", async (json) => {
try {
if (isJSONString(json)) {
await sendToQueue(Buffer.from(json));
}
} catch (err) {
console.log(err);
}
});
stream.on("err", (error) => {
if (error.code !== "ECONNRESET") {
console.log(error.code);
process.exit(1);
}
setTimeout(() => {
console.warn("A connection error occurred. Reconnecting...");
streamConnect(++retryAttempt);
}, 2 ** retryAttempt);
});
};
|
<gh_stars>1-10
var __extends = this.__extends || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
__.prototype = b.prototype;
d.prototype = new __();
};
var textBase = require("ui/text-base");
var editableTextBase = require("ui/editable-text-base");
require("utils/module-merge").merge(textBase, exports);
var TextView = (function (_super) {
__extends(TextView, _super);
function TextView(options) {
_super.call(this, options);
}
return TextView;
})(editableTextBase.EditableTextBase);
exports.TextView = TextView;
|
import React, {Component} from 'react'
class Search extends Component {
constructor(props){
super(props);
this.state = {
text:''
}
}
handleClick(ev){
let text = ev.target.value;
this.setState({"text":text});
this.props.handleChange(text);
}
componentDidMount() {
// this.props = {
// movies:[]
// }
}
render () {
return (
<div className="row">
<input
onChange={this.handleClick.bind(this)}
type="text" name="search"
placeholder="Search (by name or genre)"/>
</div>
)
}
}
export default Search |
#! /bin/sh
SRCDIR=`dirname "$0"`
. "$SRCDIR/testutils.sh"
verbose_run $VALGRIND "$DTC" -o/dev/null "$@"
ret="$?"
if [ "$ret" -gt 127 ]; then
FAIL "dtc killed by signal (ret=$ret)"
elif [ "$ret" != "1" ]; then
FAIL "dtc returned incorrect status $ret instead of 1"
fi
PASS
|
<gh_stars>0
import React, {Component, useState} from 'react';
import {
Text,
View,
StyleSheet,
Picker,
Image,
Platform,
TouchableOpacity,
FlatList,
Alert,
} from 'react-native';
import * as Resources from '../../config/resource';
import moment from 'moment';
export default function DetailTask({route, navigation}) {
// const {projectName} = route.params;
// const {projectID} = route.params;
// const {name} = route.params;
// const {assignee} = route.params;
// const {difficulty} = route.params;
// const {taskPriority} = route.params;
// const {startDate} = route.params;
// const {endDate} = route.params;
// const {description} = route.params;
// const {status} = route.params;
// const {key} = route.params;
const {TaskId} = route.params;
const {TaskName} = route.params;
const {EmployeeId} = route.params;
const {EmployeeName} = route.params;
const {TaskDifficulty} = route.params;
const {TaskPriority} = route.params;
const {TaskStatus} = route.params;
const {StartDate} = route.params;
const {EndDate} = route.params;
const {ManHour} = route.params;
const {TaskDescription} = route.params;
const {projectID} = route.params;
const deleteTask = () => {
Resources.deleteTask(TaskId)
.then(r => {
Alert.alert('Delete Succes');
console.log(r);
navigation.navigate('TaskManagement');
})
.catch(e => {
console.log(e);
});
};
const AlertDelete = () =>
Alert.alert(
"Delete",
"Are You Sure To Delete This ? \nNote: Task Must Not Be Available",
[
{
text: "Cancel",
onPress: () => console.log("Cancel Pressed"),
style: "cancel"
},
{
text: 'OK',
onPress: () => {
deleteTask();
},
},
],
{cancelable: false},
);
const start = moment(StartDate).format('DD/MM/YYYY');
const end = moment(EndDate).format('DD/MM/YYYY');
return (
<View style={{margin: 20, marginTop: 75}}>
<View
style={{
flexDirection: 'row',
justifyContent: 'space-between',
marginBottom: 40,
}}>
<TouchableOpacity
style={{
width: 150,
height: 40,
borderWidth: 0.5,
backgroundColor: '#26BF64',
alignSelf: 'center',
justifyContent: 'center',
alignItems: 'center',
}}
onPress={() =>
navigation.navigate('EditTask', {
// projectName: projectName,
// projectID: projectID,
// name: name,
// assignee: assignee,
// difficulty: difficulty,
// taskPriority: taskPriority,
// startDate: startDate,
// endDate: endDate,
// description: description,
// status: status,
// key: key,
TaskId: TaskId,
TaskName: TaskName,
EmployeeId: EmployeeId,
EmployeeName: EmployeeName,
TaskDifficulty: TaskDifficulty,
TaskPriority: TaskPriority,
TaskStatus: TaskStatus,
StartDate: StartDate,
EndDate: EndDate,
ManHour: ManHour,
TaskDescription: TaskDescription,
projectID: projectID,
})
}>
<Text style={{fontSize: 18, fontWeight: 'bold', color: '#FFFFFF'}}>
Edit Task
</Text>
</TouchableOpacity>
<TouchableOpacity
onPress={AlertDelete}
style={{
width: 150,
height: 40,
borderWidth: 0.5,
backgroundColor: '#DC3545',
alignSelf: 'center',
justifyContent: 'center',
alignItems: 'center',
}}>
<Text style={{fontSize: 18, fontWeight: 'bold', color: '#FFFFFF'}}>
Delete
</Text>
</TouchableOpacity>
</View>
<View>
<View style={{flexDirection: 'row', marginBottom: 10}}>
<View style={{width: 150}}>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
{' '}
Task Name
</Text>
</View>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
: {TaskName}
</Text>
</View>
<View style={{flexDirection: 'row', marginBottom: 10}}>
<View style={{width: 150}}>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
{' '}
Assignee
</Text>
</View>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
: {EmployeeName}
</Text>
</View>
<View style={{flexDirection: 'row', marginBottom: 10}}>
<View style={{width: 150}}>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
{' '}
Difficulty
</Text>
</View>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
: {TaskDifficulty}
</Text>
</View>
<View style={{flexDirection: 'row', marginBottom: 10}}>
<View style={{width: 150}}>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
{' '}
Task Priority
</Text>
</View>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
: {TaskPriority}
</Text>
</View>
{/* <View style={{flexDirection: 'row', marginBottom: 10}}>
<View style={{width: 150}}>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
{' '}
Task Status
</Text>
</View>
{TaskStatus === 1 && (
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
: To Do
</Text>
)}
{TaskStatus === 2 && (
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
: In Progress
</Text>
)}
{TaskStatus === 3 && (
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
: Done
</Text>
)}
{TaskStatus === 4 && (
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
: Not Done
</Text>
)}
</View> */}
<View style={{flexDirection: 'row', marginBottom: 10}}>
<View style={{width: 150}}>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
{' '}
Start Date
</Text>
</View>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
: {start}
</Text>
</View>
<View style={{flexDirection: 'row', marginBottom: 10}}>
<View style={{width: 150}}>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
{' '}
End Date
</Text>
</View>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
: {end}
</Text>
</View>
<View style={{flexDirection: 'row', marginBottom: 10}}>
<View style={{width: 150}}>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
{' '}
Estimation Time
</Text>
</View>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
: {ManHour}
</Text>
</View>
<View style={{flexDirection: 'row', marginBottom: 10}}>
<View style={{width: 150}}>
<Text style={{fontFamily: 'Nunito-Light', fontSize: 15}}>
{' '}
Task Description
</Text>
</View>
<Text
style={{
fontFamily: 'Nunito-Light',
fontSize: 15,
width: 200,
flexWrap: 'wrap',
}}>
: {TaskDescription}
</Text>
</View>
</View>
</View>
);
}
const styles = StyleSheet.create({
text: {
fontSize: 18,
fontFamily: 'Roboto',
},
textInput: {
height: 38,
width: '95%',
borderColor: 'gray',
borderWidth: 1,
borderRadius: 5,
backgroundColor: '#FFFFFF',
},
pickerView: {
borderWidth: 1,
height: 25,
width: 120,
backgroundColor: '#FFFFFF',
borderRadius: 3,
},
datePicker: {
height: 35,
width: 120,
borderRadius: 5,
borderWidth: 1,
justifyContent: 'center',
alignContent: 'center',
backgroundColor: '#FFFFFF',
flexDirection: 'row',
},
});
|
import React from 'react'
import { avatar, flexRow } from './User.module.css'
export default function User({ user: { name, picture } }) {
return (
<div className={flexRow}>
<img
className={avatar}
src={picture.large}
alt={`Foto de ${name.first}`}
/>
<span>{name.first}</span>
</div>
)
}
|
import {ClassNames} from '@emotion/core';
import assign from 'lodash/assign';
import flatten from 'lodash/flatten';
import isEqual from 'lodash/isEqual';
import memoize from 'lodash/memoize';
import omit from 'lodash/omit';
import PropTypes from 'prop-types';
import React from 'react';
import {NEGATION_OPERATOR, SEARCH_WILDCARD} from 'app/constants';
import {defined} from 'app/utils';
import {fetchTagValues} from 'app/actionCreators/tags';
import SentryTypes from 'app/sentryTypes';
import SmartSearchBar, {SearchType} from 'app/components/smartSearchBar';
import {Field, FIELD_TAGS, TRACING_FIELDS} from 'app/utils/discover/fields';
import withApi from 'app/utils/withApi';
import withTags from 'app/utils/withTags';
import {Client} from 'app/api';
import {Organization, TagCollection} from 'app/types';
const SEARCH_SPECIAL_CHARS_REGEXP = new RegExp(
`^${NEGATION_OPERATOR}|\\${SEARCH_WILDCARD}`,
'g'
);
type SearchBarProps = Omit<React.ComponentProps<typeof SmartSearchBar>, 'tags'> & {
api: Client;
organization: Organization;
tags: TagCollection;
omitTags?: string[];
projectIds?: number[] | Readonly<number[]>;
fields?: Readonly<Field[]>;
};
class SearchBar extends React.PureComponent<SearchBarProps> {
static propTypes: any = {
api: PropTypes.object,
organization: SentryTypes.Organization,
tags: PropTypes.objectOf(SentryTypes.Tag),
omitTags: PropTypes.arrayOf(PropTypes.string.isRequired),
projectIds: PropTypes.arrayOf(PropTypes.number.isRequired),
fields: PropTypes.arrayOf(PropTypes.object.isRequired) as any,
};
componentDidMount() {
// Clear memoized data on mount to make tests more consistent.
this.getEventFieldValues.cache.clear?.();
}
componentDidUpdate(prevProps) {
if (!isEqual(this.props.projectIds, prevProps.projectIds)) {
// Clear memoized data when projects change.
this.getEventFieldValues.cache.clear?.();
}
}
/**
* Returns array of tag values that substring match `query`; invokes `callback`
* with data when ready
*/
getEventFieldValues = memoize(
(tag, query, endpointParams): Promise<string[]> => {
const {api, organization, projectIds} = this.props;
const projectIdStrings = (projectIds as Readonly<number>[])?.map(String);
return fetchTagValues(
api,
organization.slug,
tag.key,
query,
projectIdStrings,
endpointParams
).then(
results =>
flatten(results.filter(({name}) => defined(name)).map(({name}) => name)),
() => {
throw new Error('Unable to fetch event field values');
}
);
},
({key}, query) => `${key}-${query}`
);
/**
* Prepare query string (e.g. strip special characters like negation operator)
*/
prepareQuery = query => query.replace(SEARCH_SPECIAL_CHARS_REGEXP, '');
getTagList() {
const {fields, organization, tags, omitTags} = this.props;
const functionTags = fields
? Object.fromEntries(
fields
.filter(item => !Object.keys(FIELD_TAGS).includes(item.field))
.map(item => [item.field, {key: item.field, name: item.field}])
)
: {};
const fieldTags = organization.features.includes('performance-view')
? Object.assign({}, FIELD_TAGS, functionTags)
: omit(FIELD_TAGS, TRACING_FIELDS);
const combined = assign({}, tags, fieldTags);
combined.has = {
key: 'has',
name: 'Has property',
values: Object.keys(combined),
predefined: true,
};
return omit(combined, omitTags ?? []);
}
render() {
const tags = this.getTagList();
return (
<ClassNames>
{({css}) => (
<SmartSearchBar
{...this.props}
hasRecentSearches
savedSearchType={SearchType.EVENT}
onGetTagValues={this.getEventFieldValues}
supportedTags={tags}
prepareQuery={this.prepareQuery}
excludeEnvironment
dropdownClassName={css`
max-height: 300px;
overflow-y: auto;
`}
/>
)}
</ClassNames>
);
}
}
export default withApi(withTags(SearchBar));
|
#!/bin/bash
#
# Install Pre-requisites for DBS script
#
# Author: Mrigesh Priyadarshi
ruby_apps="ruby-cheerio rest-client terminal-notifier colorize"
epel_repo="http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-8.noarch.rpm"
install_brew()
{
if [[ ! -f $(which brew) ]]; then
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" > /dev/null
fi
}
checkApps()
{
echo "INFO: checking packages on ${OSTYPE}"
case $OSTYPE in
darwin*)
install_brew
brew list watch > /dev/null;;
linux*)
if [ -f /etc/redhat-release ] ; then
sudo rpm -qa | grep watch > /dev/null
elif [ -f /etc/debian_version ] ; then
sudo dpkg -l | grep watch > /dev/null
fi
;;
*)
echo "FATAL: unknown OS Type: $OSTYPE"
echo -e "FATAL: Supports MacOSX and Linux"
exit 1
;;
esac
return $?
}
checkRubyApps()
{
${gem} list -i ${1}
}
installApps()
{
if [[ "$(checkApps)" == 0 ]]; then
echo "INFO: Installing WATCH."
case "$OSTYPE" in
darwin*)
brew install watch coreutils
;;
linux*)
if [ -f /etc/redhat-release ] ; then
sudo rpm -Uvh ${epel_repo}
sudo yum -y install watch ruby
elif [ -f /etc/debian_version ] ; then
sudo apt-get install watch ruby-full
fi
;;
esac
echo "INFO: Installed packages on system!!!"
else
echo "INFO: packages already present on system!!!"
fi
}
installRubyApps()
{
gem=$(which gem)
for appName in ${ruby_apps}; do
if [[ "$(checkRubyApps ${appName})" == "false" ]]; then
${gem} list ${appName}
else
echo "INFO: Ruby ${appName} already present on system!!!"
fi
done
echo "INFO: Installed Ruby packages on system!!!"
}
### MAIN SCRIPT #####
installApps
installRubyApps
|
import assert from "assert";
import { LockAndCache } from "../lib";
import { serializeKey } from "../lib/serialization";
describe("cache", () => {
const cache = new LockAndCache();
let executionCount = 0;
async function double(a: number) {
executionCount++;
return a * 2;
}
beforeEach(() => {
executionCount = 0;
});
// Close the cache so our tests quit.
after(async () => cache.close());
describe("cache.wrap(f)", () => {
const cachedDouble = cache.wrap({ ttl: 3 }, double);
it("should cache the first call to f", async () => {
let four = await cachedDouble(2);
assert.strictEqual(four, 4);
assert.strictEqual(executionCount, 1);
four = await cachedDouble(2);
assert.strictEqual(four, 4);
assert.strictEqual(executionCount, 1);
});
it("should cache parallel invocations correctly", async () => {
const results = await Promise.all(
[1, 4, 3, 3, 4, 1].map((d) => cachedDouble(d))
);
assert.deepStrictEqual(results, [2, 8, 6, 6, 8, 2]);
assert.strictEqual(executionCount, 3);
});
it("should cache across multiple caches", async () => {
// We want to trigger the "recache in RAM" logic, which can only happen
// with two caches.
const cache2 = new LockAndCache();
try {
const cachedDouble2 = cache2.wrap({ ttl: 3 }, double);
const results = await Promise.all([
...[10, 20, 20, 10].map((d) => cachedDouble(d)),
...[10, 20, 20, 10].map((d) => cachedDouble2(d)),
]);
assert.deepStrictEqual(results, [20, 40, 40, 20, 20, 40, 40, 20]);
assert.strictEqual(executionCount, 2);
} finally {
cache2.close();
}
});
it("should cache `undefined`", async () => {
let undefExecutionCount = 0;
async function undef() {
undefExecutionCount++;
return undefined;
}
const cachedUndefined = cache.wrap({ ttl: 1 }, undef);
undefExecutionCount = 0;
let un = await cachedUndefined();
assert.strictEqual(un, undefined);
assert.strictEqual(undefExecutionCount, 1);
un = await cachedUndefined();
assert.strictEqual(un, undefined);
assert.strictEqual(undefExecutionCount, 1);
});
});
describe("cache.get", () => {
function cachedStandaloneDouble(a: number) {
return cache.get(["standaloneDouble", a], { ttl: 1 }, async () => {
executionCount++;
return a * 2;
});
}
it("should cache parallel invocations correctly", async () => {
const results = await Promise.all(
[1, 4, 3, 3, 4, 1].map(cachedStandaloneDouble)
);
assert.deepStrictEqual(results, [2, 8, 6, 6, 8, 2]);
assert.strictEqual(executionCount, 3);
});
it("should support records as keys", async () => {
await cache.get(["record", { a: 1, b: 2 }], { ttl: 1 }, async () =>
double(1)
);
await cache.get(["record", { b: 2, a: 1 }], { ttl: 1 }, async () =>
double(1)
);
assert.strictEqual(executionCount, 1);
});
it("should cache errors correctly", async () => {
const expectedErr = new Error("test me please");
try {
await cache.get("fail_test", { ttl: 1 }, () =>
Promise.reject(expectedErr)
);
throw new Error("should not resolve");
} catch (err) {
assert.strictEqual(
err.name,
expectedErr.name,
"should propagate error name"
);
assert.strictEqual(
err.message,
expectedErr.message,
"should propagate error message"
);
assert.strictEqual(
err.stack,
expectedErr.stack,
"should propagate error stack"
);
}
});
it("should cache circular errors correctly", async () => {
class CircularError extends Error {
private circular: CircularError;
constructor(message: string) {
super(message);
this.circular = this;
}
}
try {
await cache.get("fail_circular_test", { ttl: 1 }, () =>
Promise.reject(new CircularError("circular!"))
);
throw new Error("should not resolve");
} catch (err) {
assert.strictEqual(
err.message,
"circular!",
"should propagate error message"
);
}
});
});
});
describe("serializeKey", () => {
it("should normalize objects", () => {
assert.strictEqual(
serializeKey({ c: 1, b: [false, "s"], a: undefined }),
'{"b":[false,"s"],"c":1}'
);
});
});
|
import requests
from bs4 import BeautifulSoup
r = requests.get("https://example.com")
soup = BeautifulSoup(r.text, "html.parser")
links = []
for link in soup.find_all('a'):
href = link.get('href')
links.append(href)
print(links) |
#!/bin/sh
set -e
UNSIGNED=$1
SIGNATURE=$2
ARCH=x86_64
ROOTDIR=dist
BUNDLE=${ROOTDIR}/AeriumX-Qt.app
TEMPDIR=signed.temp
OUTDIR=signed-app
if [ -z "$UNSIGNED" ]; then
echo "usage: $0 <unsigned app> <signature>"
exit 1
fi
if [ -z "$SIGNATURE" ]; then
echo "usage: $0 <unsigned app> <signature>"
exit 1
fi
rm -rf ${TEMPDIR} && mkdir -p ${TEMPDIR}
tar -C ${TEMPDIR} -xf ${UNSIGNED}
tar -C ${TEMPDIR} -xf ${SIGNATURE}
if [ -z "${PAGESTUFF}" ]; then
PAGESTUFF=${TEMPDIR}/pagestuff
fi
if [ -z "${CODESIGN_ALLOCATE}" ]; then
CODESIGN_ALLOCATE=${TEMPDIR}/codesign_allocate
fi
for i in `find ${TEMPDIR} -name "*.sign"`; do
SIZE=`stat -c %s ${i}`
TARGET_FILE=`echo ${i} | sed 's/\.sign$//'`
echo "Allocating space for the signature of size ${SIZE} in ${TARGET_FILE}"
${CODESIGN_ALLOCATE} -i ${TARGET_FILE} -a ${ARCH} ${SIZE} -o ${i}.tmp
OFFSET=`${PAGESTUFF} ${i}.tmp -p | tail -2 | grep offset | sed 's/[^0-9]*//g'`
if [ -z ${QUIET} ]; then
echo "Attaching signature at offset ${OFFSET}"
fi
dd if=$i of=${i}.tmp bs=1 seek=${OFFSET} count=${SIZE} 2>/dev/null
mv ${i}.tmp ${TARGET_FILE}
rm ${i}
echo "Success."
done
mv ${TEMPDIR}/${ROOTDIR} ${OUTDIR}
rm -rf ${TEMPDIR}
echo "Signed: ${OUTDIR}"
|
xoV#!/bin/bash
#Create Shadowsocks
clear
echo -e ""
echo -e "==================="
echo -e "| SHADOWSOCKS |"
echo -e "-------------------"
echo -e ""
echo -e "List :"
echo -e ""
echo -e "[1] Addss"
echo -e "[2] Cekss"
echo -e "[3] Delss"
echo -e "[4] Renewss"
echo -e "[5] Xp-ss"
echo -e ""
read -p "Mana yang ingin anda pilih : " Jawaban
# Function Addss
if [[ $Answer =~ ^([1])$ ]]
then
IP=$(wget -qO- icanhazip.com);
lastport1=$(grep "port_tls" /etc/shadowsocks-libev/akun.conf | tail -n1 | awk '{print $2}')
lastport2=$(grep "port_http" /etc/shadowsocks-libev/akun.conf | tail -n1 | awk '{print $2}')
if [[ $lastport1 == '' ]]; then
tls=2443
else
tls="$((lastport1+1))"
fi
if [[ $lastport2 == '' ]]; then
http=3443
else
http="$((lastport2+1))"
fi
echo ""
echo "Masukkan password"
until [[ $user =~ ^[a-zA-Z0-9_]+$ && ${CLIENT_EXISTS} == '0' ]]; do
read -rp "Password: " -e user
CLIENT_EXISTS=$(grep -w $user /etc/shadowsocks-libev/akun.conf | wc -l)
if [[ ${CLIENT_EXISTS} == '1' ]]; then
echo ""
echo "Akun sudah ada, silahkan masukkan password lain."
exit 1
fi
done
read -p "Expired (hari): " masaaktif
exp=`date -d "$masaaktif days" +"%Y-%m-%d"`
tgl=$(echo "$exp" | cut -d- -f3)
bln=$(echo "$exp" | cut -d- -f2)
cat > /etc/shadowsocks-libev/$user-tls.json<<END
{
"server":"0.0.0.0",
"server_port":$tls,
"password":"$user",
"timeout":60,
"method":"aes-256-cfb",
"fast_open":true,
"no_delay":true,
"nameserver":"1.1.1.1",
"mode":"tcp_and_udp",
"plugin":"obfs-server",
"plugin_opts":"obfs=tls"
}
END
cat > /etc/shadowsocks-libev/$user-http.json <<-END
{
"server":"0.0.0.0",
"server_port":$http,
"password":"$user",
"timeout":60,
"method":"aes-256-cfb",
"fast_open":true,
"no_delay":true,
"nameserver":"1.1.1.1",
"mode":"tcp_and_udp",
"plugin":"obfs-server",
"plugin_opts":"obfs=http"
}
END
chmod +x /etc/shadowsocks-libev/$user-tls.json
chmod +x /etc/shadowsocks-libev/$user-http.json
systemctl start shadowsocks-libev-server@$user-tls.service
systemctl enable shadowsocks-libev-server@$user-tls.service
systemctl start shadowsocks-libev-server@$user-http.service
systemctl enable shadowsocks-libev-server@$user-http.service
echo -e "### $user $exp
port_tls $tls
port_http $http">>"/etc/shadowsocks-libev/akun.conf"
tmp1=$(echo -n "aes-256-cfb:${user}@${IP}:$tls" | base64 -w0)
tmp2=$(echo -n "aes-256-cfb:${user}@${IP}:$http" | base64 -w0)
linkss1="ss://${tmp1}?plugin=obfs-local;obfs=tls;obfs-host=bing.com"
linkss2="ss://${tmp2}?plugin=obfs-local;obfs=http;obfs-host=bing.com"
clear
echo -e ""
echo -e "=======-Shadowsocks-======="
echo -e "IP/Host : $IP"
echo -e "Host : $domain"
echo -e "Port OBFS TLS : $tls"
echo -e "Port OBFS HTTP : $http"
echo -e "Password : $user"
echo -e "Method : aes-256-cfb"
echo -e "Aktif Sampai : $exp"
echo -e "==========================="
echo -e "Link OBFS TLS : $linkss1"
echo -e "==========================="
echo -e "Link OBFS HTTP : $linkss2"
echo -e "==========================="
echo -e "Script by X-IDSSH"
else
echo -e ""
fi
# Function Cekss
if [[ $Answer =~ ^([2])$ ]]
then
clear
cekss
echo -e "[0] Back to Menu"
echo -e ""
read -p "Enter your choice : " Cekss
if [[ $Cekss =~ ^([0])$ ]]
then
clear
x-idssh
else
echo -e ""
fi
else
echo -e ""
fi
# Function Delss
if [[ $Answer =~ ^([3])$ ]]
then
clear
delss
echo -e "[0] Back to Menu"
echo -e ""
read -p "Enter your choice : " Delss
if [[ $Delss =~ ^([0])$ ]]
then
clear
x-idssh
else
echo -e ""
fi
else
echo -e ""
fi
# Function Xp-ss
if [[ $Answer =~ ^([4])$ ]]
then
clear
xp-ss
echo -e "[0] Back to Menu"
echo -e ""
read -p "Enter your choice : " Xp-ss
if [[ $Xp-ss =~ ^([0])$ ]]
then
clear
x-idssh
else
echo -e ""
fi
else
echo -e ""
fi
# Function Renewss
if [[ $Answer =~ ^([5])$ ]]
then
clear
renewss
echo -e "[0] Back to Menu"
echo -e ""
read -p "Enter your choice : " Renewss
if [[ $Renewss =~ ^([0])$ ]]
then
clear
x-idssh
else
echo -e ""
fi
else
echo -e ""
fi
|
#!/usr/bin/env bash
# Grab QR codes from webcam
# Grab and save the path to this script
# http://stackoverflow.com/a/246128
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$(cd -P "$(dirname "$SOURCE")" && pwd)"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
SCRIPTDIR="$(cd -P "$(dirname "$SOURCE")" && pwd)"
# echo "${SCRIPTDIR}" # For debugging
if [[ $(jq '.useQRcodes' "${HOME}/.arlobot/personalDataForBehavior.json") == true ]]; then
if (pkill zbarcam); then
while (pkill zbarcam); do
echo "Waiting for zbarcam to close . . ."
sleep 1
done
fi
CAMERA_NAME=$(jq '.qrCameraName' "${HOME}/.arlobot/personalDataForBehavior.json" | tr -d '"')
if [[ -z ${CAMERA_NAME} ]]; then
exit 1
fi
VIDEO_DEVICE=$("${SCRIPTDIR}/find_camera.sh" "${CAMERA_NAME}")
if [[ -z ${VIDEO_DEVICE} ]]; then
exit 1
fi
zbarcam -q --raw --nodisplay "${VIDEO_DEVICE}"
fi
|
<filename>templates/project/content/common/models/privilege.js
'use strict';
module.exports = (sequelize, DataTypes) => {
const Privilege = sequelize.define('Privilege', {
name: DataTypes.STRING,
description: DataTypes.STRING
});
Privilege.associate = models => {
models.Privilege.belongsToMany(models.Role, {
through: 'RolePrivilege'
});
};
return Privilege;
};
|
<gh_stars>1-10
// Simple Password Reset
// Please PLEASE PLEASE DO NOT USE THIS! This is incredibly stupid and dangerous!
// Generate a new password reset token using the user's email address - then generate a random token connected to the email address
// Then, send a password reset link to the user using
// The password reset link will contain the token and the email address used to create the token
const http = require('http');
function generatePasswordResetToken(email) {
// Generate a random token
const token = crypto.randomBytes(20).toString('hex');
// Store the token in the database
db.query(`
INSERT INTO password_reset_tokens (email, token)
VALUES ($1, $2)
`, [email, token], (err, res) => {
if (err) {
console.log(err);
}
});
// Return the token
return token;
}
// Now that we have our token function, create a simple page for the admins to input an email that needs a password reset, and create a password reset token using the function generatePasswordResetToken
// The admin is prompted with the generated token and is required to give it to the client to proceed.
// The admin will then be prompted with the newly-generated password reset token, and give it to the user.
function adminPasswordResetToken(req, res) {
// Get the email address from the form
const email = req.body.email;
// Generate a password reset token
const token = generatePasswordResetToken(email);
// Show token to admin user by getting the token from the database
db.query(`
SELECT * FROM password_reset_tokens
WHERE email = $1
`, [email], (err, res) => {
if (err) {
console.log(err);
}
// Send the token to the admin user
res.end(res.rows[0].token);
});
// Render the page
}
// Now, create a web server to host the password reset webpage - this will be hosted at http://localhost:3000/password-reset
// In order to reset the password, the user will have to have /password-reset/{token} so the server knows which user to reset the password for, but in order
// to do this, we need to know the email address associated with the token, so we can create a simple form for the user to inpiut their email address and we can
// search the database for a matching email/token
// combination.
// The form will have a submit button that will send the user to the /password-reset/{token} page
// The form will also have a button that will send the user to the /login page
// First create the form that will be shown on the page that will allow the user to input their email address
// The form will also have a spot to enter the password reset token, which will be verified by searching through the database,
// And if the token is found with the clients email address, it will then redirect the user to the /password-reset/{token} page
function createEmailForm(req, res) {
const form = `
<form action="/password-reset" method="POST">
<input type="email" name="email" placeholder="Email Address">
<input type="text" name="token" placeholder="Token">
<input type="submit" value="Reset Password">
</form>
`;
return form;
}
// Now, search the database for a matching token, else throw a generic 'Email not found!' error at the user.
// If the token is found, then we can generate a new password reset token and send the user an email with a link to the /password-reset/{token} page
// The user will then be able to input their new password and submit the form to reset their password
function searchDatabaseForToken(token) {
db.query(`
SELECT email
FROM password_reset_tokens
WHERE token = $1
`, [token], (err, res) => {
if (err) {
console.log(err);
}
if (res.rows.length === 0) {
return 'Email not found!';
} else {
return res.rows[0].email;
}
});
}
// Now, once the token is verified, we can create the password form where the user can create the new password and submit the form to reset their password
function createPasswordForm(email) {
const form = `
<form action="/password-reset/${token}" method="POST">
<input type="password" name="password" placeholder="<PASSWORD>">
<input type="submit" value="Reset Password">
</form>
`;
return form;
}
// Create a function which deletes the old password from the database and sets the new password in the databse, so we again have a working email:password login
function resetPassword(email, password) {
db.query(`
DELETE FROM password_reset_tokens
WHERE email = $1
`, [email], (err, res) => {
if (err) {
console.log(err);
}
});
db.query(`
UPDATE users
SET password = $1
WHERE email = $2
`, [password, email], (err, res) => {
if (err) {
console.log(err);
}
});
}
// Finally, create the page that contains the inital email form and the password form if we can find a token, using our existing functions
function createPasswordResetPage(req, res) {
const token = req.params.token;
const email = searchDatabaseForToken(token);
if (email === 'Email not found!') {
res.end(email);
} else {
const form = createPasswordForm(email);
res.end(form);
}
}
// Stupid quick generic 404 page. that's literally just an h1 saying '404'
function create404Page() {
const page = `
<h1>404</h1>
`;
return page;
}
// Start hosting the password reset page on port 3000, and use the createForm function to create the initial form to find the token.
// IF, the token is found, continue on and go to the createPasswordForm function to create the password form, which if successful will reset the password using the resetPassword function
// If the token is not found, then create a generic 'Incorrect token' error message with a simple page at the user, and log it to the console. The error will be att http://localhost:3000/password-reset/error
// This entire site will be hosted at http://localhost:3000/password-reset
// Full walkthrough:
// 1. User visits http://localhost:3000/password-reset, and is shown the initial 'Find your email address' form
// 2. User submits the form, and is shown the password reset form if a token is found
// 3. User submits the password reset form, and is shown a generic 'Password reset successful' message
// 4. If everything is correct, call our resetPassword function to actually reset the password. Log this to the console.
// 5. If the token is not found, then show a generic 'Incorrect token' error message, and log it to the console.
// 6. If the token is found, then show the password reset form, and log it to the console.
// 7. If the password reset form is submitted, then call our resetPassword function to actually reset the password. Log this to the console.
// 8. If the password reset form is submitted, then show a generic 'Password reset successful' message, and log it to the console.
function startPasswordResetServer() {
const server = http.createServer((req, res) => {
if (req.url === '/password-reset') {
res.end(createEmailForm(req, res));
} else if (req.url.startsWith('/password-reset/')) {
const token = req.url.split('/password-reset/')[1];
res.end(createPasswordResetPage(req, res));
} else if (req.url === '/password-reset/error') {
res.end('Incorrect token');
} else if (req.url === '/admin-password-reset') {
res.end(adminPasswordResetToken(req, res));
} else {
res.end(create404Page());
}
});
server.listen(3000, () => {
console.log('Server listening on port 3000');
});
};
// Start our server with our startPasswordResetServer function
startPasswordResetServer();
|
<reponame>BorisNikulin/CS-113-Homework
package edu.miracosta.cs113.dataStructures;
import java.util.Collection;
import java.util.Iterator;
import java.util.ListIterator;
import java.util.stream.Collectors;
public class ListStack<E> implements Iterable<E>
{
// TODO make a double ended singly linked list and use it
public SimpleLinkedList<E> data;
public ListStack ()
{
data = new SimpleLinkedList<> ();
}
public boolean empty ()
{
return data.isEmpty ();
}
public E peek ()
{
return data.get (data.size () - 1);
}
public E pop ()
{
return data.remove (data.size () - 1);
}
public E push (E item)
{
data.add (item);
return item;
}
public int search (Object o)
{
int index = data.indexOf (o);
return (index != -1) ? index + 1 : index;
}
@Override
public boolean equals (Object obj)
{
if (this == obj)
{
return true;
}
if (obj == null)
{
return false;
}
if (obj instanceof Collection)
{
@SuppressWarnings("rawtypes") Collection other = (Collection) obj;
if (data.size () != other.size ())
{
return false;
}
// Use our own iterator through the for since iterating over a
// linked list with an iterator is better (no re traversals)
@SuppressWarnings("rawtypes") Iterator cItr = other.iterator ();
for (E ourElement : data)
{
if (!ourElement.equals (cItr.next ()))
{
return false;
}
}
}
if (getClass () != obj.getClass ())
{
return false;
}
@SuppressWarnings("rawtypes") ArrayStack other = (ArrayStack) obj;
if (data == null)
{
if (other.data != null)
{
return false;
}
}
else if (!data.equals (other.data))
{
return false;
}
return true;
}
public String toString ()
{
return data.stream ()
.map (Object::toString)
.collect (Collectors.joining (",", "[", "]"));
}
@Override
public Iterator<E> iterator ()
{
return new StackItr ();
}
private class StackItr implements Iterator<E>
{
ListIterator<E> dataItr = data.listIterator (data.size ());
@Override
public boolean hasNext ()
{
return dataItr.hasPrevious ();
}
@Override
public E next ()
{
return dataItr.previous ();
}
}
}
|
#!/usr/bin/env bash
# Change this line to return 1 even on success if you want to leave
# the output files around for inspection
KEEP_LOG_ON_SUCCESS=0
WRITE_GOLD=0
RANGE=`seq 1 200`
if [ -n "$HAVE_MYSQL" ]; then
ods_setup_conf conf.xml conf-mysql.xml
fi &&
ods_reset_env -i &&
rm -rf base && mkdir base &&
ods_start_enforcer &&
log_this 01_zone_add 'ods-enforcer zone add --zone zone0a' &&
log_this 01_zone_add 'ods-enforcer zone add --zone zone0b' &&
log_this 01_zone_add 'ods-enforcer zone add --zone zone1 -p csk' &&
log_this 01_zone_add 'ods-enforcer zone add --zone zone2a -p notshared' &&
log_this 01_zone_add 'ods-enforcer zone add --zone zone2b -p notshared' &&
log_this 01_zone_add 'ods-enforcer zone add --zone zone3 -p dual' &&
ods_stop_enforcer &&
ods_start_enforcer &&
DIFF=0
for n in $RANGE
do
echo -n "$n " &&
DIFF=1 &&
ods-enforcer key list -a -v -p 2>/dev/null | cut -d ";" -f 1-6,8|sed -r "s/[0-9-]{10} [0-9:]{8}|now/date time/" | sort > base/$n.verbose &&
ods-enforcer key list -a -d -p 2>/dev/null | cut -d ";" -f 1-8 | sort > base/$n.debug &&
log_this 02_timeleap 'ods-enforcer time leap --attach' &&
( log_this 03_ds-submit 'ods-enforcer key ds-submit --all' || true ) &&
( log_this 04_ds-seen 'ods-enforcer key ds-seen --all' || true ) &&
( log_this 05_ds-retract 'ods-enforcer key ds-retract --all' || true ) &&
( log_this 06_ds-gone 'ods-enforcer key ds-gone --all' || true ) &&
if [ ! $WRITE_GOLD -eq 1 ]
then
diff -u base/$n.verbose gold/$n.verbose || break &&
diff -u base/$n.debug gold/$n.debug || break
fi &&
DIFF=0
done &&
if [ $WRITE_GOLD -eq 1 ]
then
rm -rf gold &&
cp -r base gold
fi &&
test $DIFF -eq 0 &&
ods_stop_enforcer &&
echo "**** OK" &&
return $KEEP_LOG_ON_SUCCESS
ods-enforcer key list -a -d -p
echo "**** FAILED"
ods_kill
return 1
|
package jenkins.plugins.logstash.persistence;
import static net.sf.json.test.JSONAssert.assertEquals;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import net.sf.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class AbstractLogstashIndexerDaoTest {
static final String EMPTY_STRING = "{\"@timestamp\":\"2000-01-01\",\"data\":{},\"message\":[],\"source\":\"jenkins\",\"source_host\":\"http://localhost:8080/jenkins\",\"@version\":1}";
static final String ONE_LINE_STRING = "{\"@timestamp\":\"2000-01-01\",\"data\":{},\"message\":[\"LINE 1\"],\"source\":\"jenkins\",\"source_host\":\"http://localhost:8080/jenkins\",\"@version\":1}";
static final String TWO_LINE_STRING = "{\"@timestamp\":\"2000-01-01\",\"data\":{},\"message\":[\"LINE 1\", \"LINE 2\"],\"source\":\"jenkins\",\"source_host\":\"http://localhost:8080/jenkins\",\"@version\":1}";
@Mock BuildData mockBuildData;
@Before
public void before() throws Exception {
when(mockBuildData.toJson()).thenReturn(JSONObject.fromObject("{}"));
when(mockBuildData.getTimestamp()).thenReturn("2000-01-01");
}
@Test
public void buildPayloadSuccessEmpty() throws Exception {
AbstractLogstashIndexerDao dao = getInstance();
// Unit under test
JSONObject result = dao.buildPayload(mockBuildData, "http://localhost:8080/jenkins", new ArrayList<String>());
// Verify results
assertEquals("Results don't match", JSONObject.fromObject(EMPTY_STRING), result);
}
@Test
public void buildPayloadSuccessOneLine() throws Exception {
AbstractLogstashIndexerDao dao = getInstance();
// Unit under test
JSONObject result = dao.buildPayload(mockBuildData, "http://localhost:8080/jenkins", Arrays.asList("LINE 1"));
// Verify results
assertEquals("Results don't match", JSONObject.fromObject(ONE_LINE_STRING), result);
}
@Test
public void buildPayloadSuccessTwoLines() throws Exception {
AbstractLogstashIndexerDao dao = getInstance();
// Unit under test
JSONObject result = dao.buildPayload(mockBuildData, "http://localhost:8080/jenkins", Arrays.asList("LINE 1", "LINE 2"));
// Verify results
assertEquals("Results don't match", JSONObject.fromObject(TWO_LINE_STRING), result);
}
private AbstractLogstashIndexerDao getInstance() {
return new AbstractLogstashIndexerDao("localhost", -1, "", "", "") {
public IndexerType getIndexerType() {
return IndexerType.REDIS;
}
public void push(String data) throws IOException {}
};
}
}
|
#!/usr/bin/env bash
# Downloads and unpacks Abitti (www.abitti.fi) disk images.
#
# This script is public domain.
#
# This script is not supported by Matriculation Examination Board of
# Finland. The download URLs may change without any notice. For
# supported tools see www.abitti.fi.
IMAGEPATH=~/abitti_images
if [ "$(uname)" == "Darwin" ]; then
# MacOS download command
DLCMD_TOFILE="curl -o"
DLCMD_STDOUT="curl"
else
# Download command for other unices
DLCMD_TOFILE="wget -c -O"
DLCMD_STDOUT="wget --quiet -O -"
fi
sanitise_versioncode() {
INPUT=$1
echo -n $INPUT | perl -pe 's/[^\w\d]//g;'
}
download_and_extract() {
ZIP_URL=$1
ZIP_PATH=$2
FILE_INSIDE_ZIP=$3
FILE_OUTSIDE_ZIP=$4
ZIP_FILE=`basename $ZIP_URL`
if [ ! -d $ZIP_PATH ]; then
mkdir -p $ZIP_PATH
fi
if [ -f $ZIP_PATH/$ZIP_FILE ]; then
echo "File $ZIP_PATH/$ZIP_FILE already exists"
else
$DLCMD_TOFILE $ZIP_PATH/$ZIP_FILE $ZIP_URL
fi
if [ -f $ZIP_PATH/$FILE_OUTSIZE_ZIP ]; then
echo "File $ZIP_PATH/$FILENAME_OUTSIDE_ZIP already exists"
else
BASENAME_INSIDE_ZIP=`basename $FILE_INSIDE_ZIP`
unzip $ZIP_PATH/$ZIP_FILE $FILE_INSIDE_ZIP -d $ZIP_PATH
mv $ZIP_PATH/$FILE_INSIDE_ZIP $ZIP_PATH/$FILE_OUTSIDE_ZIP
fi
if [ -d $ZIP_PATH/ytl ]; then
rmdir $ZIP_PATH/ytl
fi
}
NEW_VERSION_ABITTI=`$DLCMD_STDOUT https://static.abitti.fi/etcher-usb/koe-etcher.ver`
NEW_VERSION_ABITTI=`sanitise_versioncode $NEW_VERSION_ABITTI`
if [ ! -f $IMAGEPATH/$NEW_VERSION_ABITTI/koe.dd ]; then
echo "Must download new Abitti ($NEW_VERSION_ABITTI)"
download_and_extract https://static.abitti.fi/etcher-usb/koe-etcher.zip $IMAGEPATH/$NEW_VERSION_ABITTI ytl/koe.img koe.dd
fi
NEW_VERSION_SERVER=`$DLCMD_STDOUT https://static.abitti.fi/etcher-usb/ktp-etcher.ver`
NEW_VERSION_SERVER=`sanitise_versioncode $NEW_VERSION_SERVER`
if [ ! -f $IMAGEPATH/$NEW_VERSION_SERVER/ktp.dd ]; then
echo "Must download new server ($NEW_VERSION_SERVER)"
download_and_extract https://static.abitti.fi/etcher-usb/ktp-etcher.zip $IMAGEPATH/$NEW_VERSION_SERVER ytl/ktp.img ktp.dd
fi
# Normal termination
exit 0
|
package chylex.hee.mechanics.compendium.content;
import gnu.trove.map.hash.TIntObjectHashMap;
import java.util.Set;
import java.util.stream.Collectors;
import chylex.hee.gui.GuiEnderCompendium;
import chylex.hee.mechanics.compendium.content.fragments.KnowledgeFragmentType;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public abstract class KnowledgeFragment<T extends KnowledgeFragment>{
private static final TIntObjectHashMap<KnowledgeFragment<?>> allFragments = new TIntObjectHashMap<>();
public static KnowledgeFragment<?> fromID(int id){
return allFragments.get(id);
}
public static Set<KnowledgeFragment> getUnlockableFragments(){
return allFragments.valueCollection().stream().filter(fragment -> fragment.getType() == KnowledgeFragmentType.SECRET || fragment.getType() == KnowledgeFragmentType.HINT).collect(Collectors.toSet());
}
public final int globalID;
private KnowledgeFragmentType type;
private int price;
public KnowledgeFragment(int globalID){
this.globalID = globalID;
allFragments.put(globalID, this);
}
public T setType(KnowledgeFragmentType type){
return setType(type, 0);
}
public T setType(KnowledgeFragmentType type, int price){
if ((price != 0) ^ (type == KnowledgeFragmentType.SECRET)){
throw new IllegalArgumentException(price == 0 ? "Secret fragments need to have a price!" : "Only secret fragments can have a price!");
}
this.type = type;
this.price = price;
return (T)this;
}
public KnowledgeFragmentType getType(){
return type;
}
public int getPrice(){
return price;
}
public final boolean equals(KnowledgeFragment fragment){
return fragment.globalID == globalID;
}
@SideOnly(Side.CLIENT)
public abstract int getHeight(GuiEnderCompendium gui, boolean isUnlocked);
@SideOnly(Side.CLIENT)
public abstract boolean onClick(GuiEnderCompendium gui, int x, int y, int mouseX, int mouseY, int buttonId, boolean isUnlocked);
@SideOnly(Side.CLIENT)
public abstract void onRender(GuiEnderCompendium gui, int x, int y, int mouseX, int mouseY, boolean isUnlocked);
@Override
public final boolean equals(Object o){
return o instanceof KnowledgeFragment && ((KnowledgeFragment)o).globalID == globalID;
}
@Override
public final int hashCode(){
return globalID;
}
protected static final boolean checkRect(int mouseX, int mouseY, int x, int y, int w, int h){
return mouseX >= x && mouseX <= x+w && mouseY >= y && mouseY <= y+h;
}
}
|
# loading the data
from keras.datasets import mnist
(X_train, y_train), (X_test, y_test) = mnist.load_data()
# normalizing the data
X_train = X_train.reshape(60000, 784) / 255
X_test = X_test.reshape(10000, 784) / 255
# one-hot encode target column
from keras.utils import to_categorical
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
# define the model
from keras.models import Sequential
model = Sequential()
# add layers
from keras.layers import Dense
model.add(Dense(64, input_dim=784, activation='relu'))
model.add(Dense(32, activation='relu'))
model.add(Dense(10, activation='softmax'))
# compile the model
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
# fit the model
model.fit(X_train, y_train, verbose=1, epochs=3)
# evaluate the model
score = model.evaluate(X_test, y_test, verbose=0)
print('Test accuracy:', score[1]) |
#include <iostream>
#include <list>
#include <cstdlib>
#include <string>
void tokenizer(const std::string& line, char delimiter, std::list<const char*>& tokens) {
std::string token;
std::istringstream tokenStream(line);
while (std::getline(tokenStream, token, delimiter)) {
tokens.push_back(token.c_str());
}
}
int main() {
std::string line = "123,example,data";
std::list<const char*> tokens;
tokenizer(line, ',', tokens);
std::list<const char*>::iterator I = tokens.begin();
int hostid = atoi(*I); ++I;
std::cout << "Host ID: " << hostid << std::endl;
return 0;
} |
#!/bin/bash
if [ -z $1 ]; then
sudo docker run -itd --rm -v /mnt/slab/squid/log/:/var/log/squid/ squid
else
sudo docker run -itd --rm --name $1 -v /mnt/slab/squid/log/:/var/log/squid/ squid
fi
|
<filename>lib/geometry/edge.rb
require_relative 'point'
module Geometry
=begin rdoc
An edge. It's a line segment between 2 points. Generally part of a {Polygon}.
== Usage
edge = Geometry::Edge.new([1,1], [2,2])
edge = Geometry::Edge([1,1], [2,2])
=end
class Edge
attr_reader :first, :last
# Construct a new {Edge} object from any two things that can be converted
# to a {Point}.
def initialize(point0, point1)
@first, @last = [Point[point0], Point[point1]]
end
# Two Edges are equal if both have equal {Point}s in the same order
def ==(other)
(@first == other.first) && (@last == other.last)
end
# @param [Point] point A {Point} to spaceship with
# @return [Boolean] Returns 1 if the {Point} is strictly to the left of the receiver, -1 to the right, and 0 if the point is on the receiver
def <=>(point)
case point
when Point
k = (@last.x - @first.x) * (point.y - @first.y) - (point.x - @first.x) * (@last.y - @first.y)
if 0 == k
(((@first.x <=> point.x) + (@last.x <=> point.x)).abs <= 1) && (((@first.y <=> point.y) + (@last.y <=> point.y)).abs <= 1) ? 0 : nil
else
k <=> 0
end
else
raise ArgumentError, "Can't spaceship with #{point.class}"
end
end
# @group Attributes
# @return [Point] The upper-right corner of the bounding rectangle that encloses the {Edge}
def max
first.max(last)
end
# @return [Point] The lower-left corner of the bounding rectangle that encloses the {Edge}
def min
first.min(last)
end
# @return [Array<Point>] The lower-left and upper-right corners of the enclosing bounding rectangle
def minmax
first.minmax(last)
end
# @endgroup
# Return a new {Edge} with swapped endpoints
def reverse
self.class.new(@last, @first)
end
# In-place swap the endpoints
def reverse!
@first, @last = @last, @first
self
end
# @return [Number] the length of the {Edge}
def length
@length ||= vector.magnitude
end
# Return the {Edge}'s length along the Y axis
def height
(@first.y - @last.y).abs
end
# Return the {Edge}'s length along the X axis
def width
(@first.x - @last.x).abs
end
def inspect
'Edge(' + @first.inspect + ', ' + @last.inspect + ')'
end
alias :to_s :inspect
# @return [Bool] Returns true if the passed {Edge} is parallel to the receiver
def parallel?(edge)
v1 = self.direction
v2 = edge.direction
winding = v1[0] * v2[1] - v1[1] * v2[0]
if 0 == winding # collinear?
if v1 == v2
1 # same direction
else
-1 # opposite direction
end
else
false
end
end
# @param [Edge] other The other {Edge} to check
# @return [Bool] Returns true if the receiver and the passed {Edge} share an endpoint
def connected?(other)
(@first == other.last) || (@last == other.first) || (@first == other.first) || (@last == other.last)
end
# @!attribute [r] direction
# @return [Vector] A unit {Vector} pointing from first to last
def direction
@direction ||= self.vector.normalize
end
# Find the intersection of two {Edge}s (http://bloggingmath.wordpress.com/2009/05/29/line-segment-intersection/)
# @param [Edge] other The other {Edge}
# @return [Point] The intersection of the two {Edge}s, nil if they don't intersect, true if they're collinear and overlapping, and false if they're collinear and non-overlapping
def intersection(other)
return self.first if (self.first == other.first) or (self.first == other.last)
return self.last if (self.last == other.first) or (self.last == other.last)
p0, p1 = self.first, self.last
p2, p3 = other.first, other.last
v1, v2 = self.vector, other.vector
denominator = v1[0] * v2[1] - v2[0] * v1[1] # v1 x v2
p = p0 - p2
if denominator == 0 # collinear, so check for overlap
if 0 == (-v1[1] * p.x + v1[0] * p.y) # collinear?
# The edges are collinear, but do they overlap?
# Project them onto the x and y axes to find out
left1, right1 = [self.first[0], self.last[0]].sort
bottom1, top1 = [self.first[1], self.last[1]].sort
left2, right2 = [other.first[0], other.last[0]].sort
bottom2, top2 = [other.first[1], other.last[1]].sort
!((left2 > right1) || (right2 < left1) || (top2 < bottom1) || (bottom2 > top1))
else
nil
end
else
s = (-v1[1] * p.x + v1[0] * p.y) / denominator # v1 x (p0 - p2) / denominator
t = (v2[0] * p.y - v2[1] * p.x) / denominator # v2 x (p0 - p2) / denominator
p0 + v1 * t if ((0..1) === s) && ((0..1) === t)
end
end
# @!attribute [r] vector
# @return [Vector] A {Vector} pointing from first to last
def vector
@vector ||= last - first
end
def to_a
[@first, @last]
end
end
# Convenience initializer for {Edge} that tries to coerce its arguments into
# something useful
# @param first [Point, Array] the starting point of the {Edge}
# @param last [Point, Array] the endpoint of the {Edge}
def Edge(first, last)
Edge.new(first, last)
end
end
|
class ServerChan {
private static SCKEY: string
/**
* 初始化
*
* @author CaoMeiYouRen
* @date 2019-08-24
* @export
* @param {string} SCKEY https://sc.ftqq.com 发的sckey,此处配置后为全局配置
*/
static init(SCKEY: string) {
this.SCKEY = SCKEY
}
/**
* Sends a notification to the server using the initialized SCKEY
* @param {string} message The message to be sent
*/
sendNotification(message: string) {
// Implement the logic to send the notification to the server using this.SCKEY
// Example: Send a POST request to the server with the message and this.SCKEY
}
} |
#!/bin/bash
npx ts-node \
contract-deployer.ts \
--cosmos-node="http://localhost:26657" \
--eth-node="http://localhost:8545" \
--eth-privkey="0xb1bab011e03a9862664706fc3bbaa1b16651528e5f0e7fbfcbfdd8be302a13e7" \
--contract=Gravity.json \
--test-mode=true |
#ifndef INCLUDED_CORE_AUTO_ID_H
#define INCLUDED_CORE_AUTO_ID_H
#include <string>
#include "rstdint.h"
namespace platform {
class AutoId
{
public:
AutoId( std::string const& Name );
int32_t GetId()const;
~AutoId();
operator int32_t()const;
protected:
const int32_t mId;
};
} // namespace platform
#endif//INCLUDED_CORE_AUTO_ID_H
|
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2018 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.slicing;
import static com.google.common.base.Preconditions.checkState;
import java.util.Collection;
import org.sosy_lab.cpachecker.core.interfaces.AbstractState;
import org.sosy_lab.cpachecker.core.interfaces.Precision;
import org.sosy_lab.cpachecker.core.interfaces.StopOperator;
import org.sosy_lab.cpachecker.exceptions.CPAException;
/**
* Stop operator of {@link SlicingCPA}.
* Uses the stop operator of the CPA wrapped by the SlicingCPA,
* with the precision of the CPA wrapped by the SlicingCPA.
*/
public class PrecisionDelegatingStop
implements StopOperator {
private final StopOperator delegateStop;
public PrecisionDelegatingStop(final StopOperator pDelegateStop) {
delegateStop = pDelegateStop;
}
@Override
public boolean stop(
final AbstractState pState,
final Collection<AbstractState> pReached,
final Precision pPrecision
) throws CPAException, InterruptedException {
checkState(
pPrecision instanceof SlicingPrecision,
"Precision not of type %s, but %s",
SlicingPrecision.class.getSimpleName(),
pPrecision.getClass().getSimpleName());
Precision wrappedPrecision = ((SlicingPrecision) pPrecision).getWrappedPrec();
return delegateStop.stop(pState, pReached, wrappedPrecision);
}
}
|
<filename>database/auth/auth-router.js
const bcryptjs = require("bcryptjs");
const router = require("express").Router();
const jwt = require("jsonwebtoken");
const secrets = require('../config/secrets');
const authenticate = require('../middleware/restricted');
const Users = require("../users/users-model.js");
function isValid(user) {
return Boolean(
user.username && user.password && typeof user.password === "string"
);
}
function isValidPassword(user) {
return Boolean(
user.password && typeof user.password === "string"
);
}
router.post("/register", (req, res) => {
const credentials = req.body;
if (isValid(credentials)) {
const rounds = bcryptjs.genSaltSync(10);
const hash = bcryptjs.hashSync(credentials.password, rounds);
credentials.password = <PASSWORD>;
Users.add(credentials)
.then((user) => {
const token = generateToken(user);
res.status(201).json({ message: "Registration successful", data: {username: user.username, email: user.email}, token });
})
.catch((error) => {
res.status(500).json({ message: error.message });
});
} else {
res.status(400).json({
message:
"please provide username and password and the password shoud be alphanumeric",
});
}
});
router.delete('/delete-user', authenticate,(req, res) => {
const { subject, username } = req.decodedJwt;
Users.remove(subject)
.then(deleted => {
if (deleted) {
res.json({ message: `${username} Deleted`, removed: "User Profile Permanently Deleted" });
} else {
res.status(404).json({ message: 'Could not find user with given id' });
}
})
.catch(err => {
res.status(500).json({ message: 'Failed to delete user' });
});
});
router.put('/change-password', authenticate,(req, res) => {
//extract password from body so it is the only thing the user can change
const changes = { password: req.body.password };
const { subject, username } = req.decodedJwt;
if (isValidPassword(changes)) {
const rounds = bcryptjs.genSaltSync(10);
const hash = bcryptjs.hashSync(changes.password, rounds);
changes.password = hash;
Users.getUsers(subject)
.then(user => {
if (user) {
Users.update(changes, subject)
.then(() => {
res.json({message: `Password changed for ${username}` });
});
} else {
res.status(404).json({ message: 'Could not find user with given id' });
}
})
.catch (err => {
res.status(500).json({ message: 'Failed to update user' });
});
}else {
res.status(400).json({
message:
"please provide a new password and the password shoud be alphanumeric",
});
}
});
//----------------------------------------------------------------------------//
// When someone successfully authenticates, reward them with a token, so they
// don't have to authenticate again.
//----------------------------------------------------------------------------//
router.post("/login", (req, res) => {
const { username, password } = req.body;
if (isValid(req.body)) {
Users.findBy({ username: username })
.then(([user]) => {
// compare the password the hash stored in the database
if (user && bcryptjs.compareSync(password, user.password)) {
const token = generateToken(user);
res.status(200).json({
message: `Welcome to med-cabinet ${username}`,
token,
});
} else {
res.status(401).json({ message: "Invalid credentials" });
}
})
.catch((error) => {
res.status(500).json({ message: error.message });
});
} else {
res.status(400).json({
message:
"please provide username and password and the password shoud be alphanumeric",
});
}
});
function generateToken(user) {
const payload = {
subject: user.id,
username: user.username,
};
const options = {
expiresIn: "1d",
};
return jwt.sign(payload, secrets.jwtSecret, options);
}
module.exports = router;
|
package cli
import (
"testing"
"time"
)
func TestProcessBar(t *testing.T) {
var bar Bar
bar.NewOption(0, 100)
for i := 0; i <= 100; i++ {
time.Sleep(100 * time.Millisecond)
bar.Play(int64(i))
}
bar.Finish()
}
|
<reponame>JenKinY/MallVipManage
package com.yingnuo.web.servlet.admin.handle;
import com.google.gson.Gson;
import com.sun.org.apache.xpath.internal.operations.Or;
import com.yingnuo.domain.Admin;
import com.yingnuo.domain.Order;
import com.yingnuo.service.AdminService;
import com.yingnuo.service.OrderService;
import javax.security.auth.login.LoginException;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* Created with IntelliJ IDEA.
* User: skyzc
* Date: 2019/12/5
* Time: 12:29
* To change this template use File | Settings | File Templates.
* Description:
*/
@WebServlet("/admin/editOrder")
public class UpdateOrderByOrderId extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
super.doGet(req, resp);
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
// 判断是否有 session
if (req.getSession().getAttribute("admin") == null){
System.out.println("session 中没有 admin,没有管理员登陆!请登录...");
RequestDispatcher dispatcher = req.getRequestDispatcher("/WEB-INF/views/admin/admin_login.jsp");
dispatcher.forward(req,resp);
return;
}
Long order_id = Long.valueOf((req.getParameter("order_id")));
Order order = new Order();
order.setOrder_id(order_id);
order.setUser_phone(req.getParameter("user_phone"));
order.setActual_amount(req.getParameter("actual_amount"));
order.setPay_amount(req.getParameter("pay_amount"));
System.out.println(order.toString());
OrderService orderService = new OrderService();
try {
Boolean msg = orderService.updateOrderByOrderId(order);
// 利用 GSON 返回json对象
PrintWriter out = resp.getWriter();
Map<String,Boolean> map = new HashMap<String,Boolean>();
map.put("msg",msg);
Gson gson = new Gson();
String json = gson.toJson(map);
out.println(json);
out.flush();
out.close();
} catch (LoginException e) {
e.printStackTrace();
}
}
} |
#!/bin/sh
#cabal clean && cabal configure --enable-tests --enable-library-coverage --enable-library-profiling --enable-executable-profiling && cabal build && cabal test && cabal haddock
cabal clean && cabal configure --enable-tests && cabal build && cabal test && cabal haddock
|
from typing import List, Tuple
def get_combos() -> List[Tuple[int, int]]:
result = []
for i in range(-1, 2):
for j in range(-1, 2):
if i != 0 or j != 0: # Exclude the center element
result.append((i, j))
return result |
#!/bin/bash
check_android_home() {
if [ "$#" -lt 1 ]; then
if [ -z "${ANDROID_HOME}" ]; then
echo "Please either set ANDROID_HOME environment variable, or pass ANDROID_HOME directory as a parameter"
exit 1
else
ANDROID_HOME="${ANDROID_HOME}"
fi
else
ANDROID_HOME=$1
fi
echo "ANDROID_HOME is at $ANDROID_HOME"
}
accept_all_android_licenses() {
ANDROID_LICENSES="$ANDROID_HOME/licenses"
if [ ! -d $ANDROID_LICENSES ]; then
echo "Android licenses directory doesn't exist, creating one..."
mkdir -p $ANDROID_LICENSES
fi
accept_license_of android-googletv-license 601085b94cd77f0b54ff86406957099ebe79c4d6
accept_license_of android-sdk-license 8933bad161af4178b1185d1a37fbf41ea5269c55
accept_license_of android-sdk-license d56f5187479451eabf01fb78af6dfcb131a6481e
accept_license_of android-sdk-license 24333f8a63b6825ea9c5514f83c2829b004d1fee
accept_license_of android-sdk-preview-license 84831b9409646a918e30573bab4c9c91346d8abd
accept_license_of android-sdk-preview-license 504667f4c0de7af1a06de9f4b1727b84351f2910
accept_license_of google-gdk-license 33b6a2b64607f11b759f320ef9dff4ae5c47d97a
accept_license_of intel-android-extra-license d975f751698a77b662f1254ddbeed3901e976f5a
}
accept_license_of() {
local license=$1
local content=$2
local file=$ANDROID_LICENSES/$license
if [ -f $file ]; then
if grep -q "^$content$" $file; then
echo "$license: $content has been accepted already"
else
echo "Accepting $license: $content ..."
echo -e $content >> $file
fi
else
echo "Accepting $license: $content ..."
echo -e $content > $file
fi
}
check_android_home "$@"
accept_all_android_licenses
|
def compute_GCD(a, b):
while b > 0:
temp = b
b = a % b
a = temp
return a |
#! /bin/bash
# Builds a toolchain and qemu-system for testing and debugging WebKit.
#
# Usage:
# build.sh [ --? | -h | --help ]
# [ -a | --arch "..." ]
# [ -j ] Number of cores to use during build (default: $(nproc))
# [ -k ]
# [ --br2 "..." ]
# [ --br2-version "..." ]
# [ --br2-external "..." ]
# [ --temp | --tmp "..." ]
# [ --sdk ]
# [ --version ]
# output-directory
#
PROGRAM=$(basename "$0")
VERSION=1.0
ARCH=
BR2PATH=
BR2VERSION='2020.05.2'
BR2EXTERNAL=
TEMPPATH=
JLEVEL=$(nproc)
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
SDK=0
KEEP=0
# shellcheck source=./common.sh
source "${DIR}/common.sh"
usage()
{
cat <<EOF
Usage:
$PROGRAM
[ -h | --help | --? ] Show help and exit
[ -a | --arch ] Platform to build system for (required!)
[ -j ] Number of cores to use during build (default: $(nproc))
[ -k ] Keep temporary files
[ --br2 "..." ] Path to custom buildroot tree (default: checkout)
[ --br2-version "..." ] Buildroot tag to checkout (default: $BR2VERSION)
[ --br2-external "..." ] Path to custom buildroot JSC external tree (default: checkout)
[ --temp | --tmp "..." ] Path to custom temporary directory (default: create)
[ --sdk ] Generate SDK file
[ --version ] Show version and exit
output-directory Directory to install buildroot to
EOF
}
while test $# -gt 0
do
case $1 in
--br2 )
shift
BR2PATH="$1"
;;
--br2-version )
shift
BR2VERSION="$1"
;;
--br2-external )
shift
BR2EXTERNAL="$1"
;;
--temp | --tmp )
shift
TEMPPATH="$1"
;;
-a | --arch )
shift
ARCH="$1"
;;
-j )
shift
JLEVEL="$1"
;;
--sdk )
SDK=1
;;
-k )
KEEP=1
;;
--version )
version "${PROGRAM}" "${VERSION}"
exit 0
;;
--help | -h | '--?' )
usage_and_exit 0
;;
-*)
error "Unrecognized option: $1"
;;
*)
break
;;
esac
shift
done
# Argument and flag option checks
if [ -z "${ARCH}" ]; then
error "architecture option -a or --arch is required (supported archs: mips, arm)"
fi
# Also accept mipsel for mips
if [[ "${ARCH}" == "mipsel" ]]; then
ARCH="mips"
fi
if [[ "${ARCH}" != "mips" ]] && [[ "${ARCH}" != "arm" ]]; then
error "unsupported architecture ${ARCH}, select arm or mips"
fi
BR2_DEFCONFIG=
if [[ "${ARCH}" == "mips" ]]; then
BR2_DEFCONFIG="qemu-mips32elr2-jsc_defconfig"
else
BR2_DEFCONFIG="qemu-arm32-jsc_defconfig"
fi
if [ "$#" != "1" ]; then
error "expected a single argument, got $#"
fi
if [ -z "${TEMPPATH}" ]; then
TEMPPATH=$(mktemp -d)
fi
# Receives one argument, the destination directory for the build
OUTPUT=$(realpath -m "$1")
if ! mkdir -p "${OUTPUT}" &> /dev/null; then
error "output path already exists: ${OUTPUT}"
fi
progress "Creating toolchain in ${OUTPUT}"
pushd "${TEMPPATH}" || error "cannot pushd"
if [ -z "${BR2EXTERNAL}" ]; then
progress "cloning jsc br2 external"
git clone --quiet --depth=1 https://github.com/pmatos/jsc-br2-external.git
fi
if [ -z "${BR2PATH}" ]; then
progress "cloning buildroot"
git clone --quiet --depth=1 --branch "${BR2VERSION}" https://github.com/buildroot/buildroot
fi
popd || error "cannot popd"
pushd "${OUTPUT}" || error "cannot pushd"
progress "configuring buildroot defconfig"
if ! make O="${PWD}" -C "${TEMPPATH}/buildroot" BR2_EXTERNAL="${TEMPPATH}/jsc-br2-external" "${BR2_DEFCONFIG}" &> "${TEMPPATH}/configure.log"; then
tail "${TEMPPATH}/configure.log"
error "failed to configure buildroot"
fi
if [[ "${ARCH}" == "arm" ]]; then
progress "extraordinary patching of gdb-8.3.1 in ARMv7"
pushd "${OUTPUT}" || error "cannot pushd"
make host-gdb-patch
pushd "${OUTPUT}/build/host-gdb-8.3.1" || error "cannot pushd"
if ! patch -p1 < "${DIR}/851c0536cabb661847c45c73ebd796eb3299066b.diff"; then
error "failed to patch"
fi
popd || error "cannot popd"
popd || error "cannot popd"
fi
progress "extraordinary patching of kernel 4.19"
pushd "${OUTPUT}" || error "cannot pushd"
make linux-patch
sed -i 's/^YYLTYPE yylloc;$/extern YYLTYPE yylloc;/' ./build/linux-4.19.91/scripts/dtc/dtc-lexer.l
popd || error "cannot popd"
progress "building root"
if ! make BR2_JLEVEL="${JLEVEL}" &> "${TEMPPATH}/build.log"; then
tail "${TEMPPATH}/build.log"
error "failed to build buildroot"
fi
# Need to convert image to use it as backing file
progress "Converting raw image to qcow2"
if ! host/bin/qemu-img convert -q -O qcow2 images/rootfs.ext2 images/rootfs.qcow2; then
error "Failed to convert image"
fi
if [[ "${SDK}" == "1" ]]; then
progress "building sdk"
if ! make BR2_JLEVEL="${JLEVEL}" sdk &> "${TEMPPATH}/sdk.log"; then
tail "${TEMPPATH}/sdk.log"
error "failed to build sdk"
fi
if [[ "${ARCH}" == "mips" ]] && [[ -f "images/mipsel-buildroot-linux-gnu_sdk-buildroot.tar.gz" ]]; then
progress "SDK image in: images/mipsel-buildroot-linux-gnu_sdk-buildroot.tar.gz"
ln images/mipsel-buildroot-linux-gnu_sdk-buildroot.tar.gz images/sdk.tar.gz
elif [[ "${ARCH}" == "arm" ]] && [[ -f "images/arm-buildroot-linux-gnueabihf_sdk-buildroot.tar.gz" ]]; then
progress "SDK image in: images/arm-buildroot-linux-gnueabihf_sdk-buildroot.tar.gz"
ln images/arm-buildroot-linux-gnueabihf_sdk-buildroot.tar.gz images/sdk.tar.gz
else
error "cannot find SDK image"
fi
fi
popd || error "cannot popd"
if [[ "${KEEP}" == "1" ]]; then
progress "Keeping temporary files in ${TEMPPATH}"
else
progress "Cleaning up temporary folder ${TEMPPATH}"
rm -Rf "${TEMPPATH}"
fi
|
package com.twitter.finatra.http.tests.integration.tweetexample.main.services
import com.twitter.concurrent.AsyncStream
import com.twitter.concurrent.AsyncStream.fromOption
import com.twitter.finatra.http.tests.integration.tweetexample.main.domain.Tweet
import com.twitter.util.Future
class MyTweetsRepository extends TweetsRepository {
private val tweets = Map[Long, Tweet](
1L -> Tweet(
id = 1,
user = "Bob",
msg = "whats up"),
2L -> Tweet(
id = 2,
user = "Sally",
msg = "yo"),
3L -> Tweet(
id = 3,
user = "Fred",
msg = "hey"))
def getById(id: Long): Future[Option[Tweet]] = {
Future.value(tweets.get(id))
}
def getByIds(ids: AsyncStream[Long]): AsyncStream[Tweet] = {
ids map tweets.get flatMap fromOption
}
}
|
#!/bin/bash
dieharder -d 12 -g 45 -S 2720877956
|
#!/usr/bin/env node
'use strict'
exports.run = run
const fs = require('fs')
const path = require('path')
const minimist = require('minimist')
const pkg = require('../package.json')
const npmls2dg = require('../npmls2dg')
const Logger = require('./logger').getLogger()
// run from the cli
function run () {
const minimistOpts = {
boolean: ['svg', 'dot', 'help', 'version'],
alias: {
s: 'svg',
d: 'dot',
h: 'help',
v: 'version'
}
}
const argv = minimist(process.argv.slice(2), minimistOpts)
// check for help and version options
if (argv.version) version()
if (argv.help) help()
// set up cmd, args, opts
const iFileName = argv[0] || 0
const oFileName = argv[1] || '-'
let format = argv.dot ? 'dot' : 'svg'
let input
try {
input = fs.readFileSync(iFileName, 'utf8')
} catch (err) {
const name = iFileName === 0 ? '<stdin>' : `file "${iFileName}"`
Logger.log(`error reading ${name}: ${err}`)
process.exit(1)
}
const opts = {
format: format,
messages: []
}
const output = npmls2dg.convert(input, opts)
for (let message of opts.messages) Logger.log(message)
if (output == null) process.exit(1)
if (oFileName === '-') {
console.log(output)
process.exit(0)
}
try {
fs.writeFileSync(oFileName, output)
} catch (err) {
Logger.log(`error writing ${oFileName}: ${err}`)
process.exit(1)
}
}
// print version and exit
function version () {
console.log(pkg.version)
process.exit(0)
}
// print help and exit
function help () {
console.log(getHelp())
process.exit(1)
}
// get help text
function getHelp () {
const helpFile = path.join(__dirname, '..', 'HELP.md')
let helpText = fs.readFileSync(helpFile, 'utf8')
helpText = helpText.replace(/%%program%%/g, pkg.name)
helpText = helpText.replace(/%%version%%/g, pkg.version)
return helpText
}
|
import React, { useRef, useState, useEffect } from 'react';
import { Link, RouteComponentProps } from 'react-router-dom';
import { motion } from 'framer-motion';
import { useTracking } from 'react-tracking';
import Observer from '@researchgate/react-intersection-observer';
import SearchBar from '../../components/search-bar/search-bar';
import MerchantCell from '../../components/merchant-cell/merchant-cell';
import { DirectoryCategory, DirectoryCuration } from '../../../services/directory';
import { Merchant, getGiftCardDiscount, getPromoEventParams } from '../../../services/merchant';
import { resizeToFitPage } from '../../../services/frame';
import { wait } from '../../../services/utils';
import { listAnimation } from '../../../services/animations';
import { trackComponent } from '../../../services/analytics';
import './category.scss';
const Category: React.FC<RouteComponentProps & { merchants: Merchant[] }> = ({ location, merchants }) => {
const tracking = useTracking();
const scrollRef = useRef<HTMLDivElement>(null);
const contentRef = useRef<HTMLDivElement>(null);
const { searchVal: searchValue, scrollTop } = location.state as { searchVal: string; scrollTop: number };
const { category, curation } = location.state as { category?: DirectoryCategory; curation?: DirectoryCuration };
const [searchVal, setSearchVal] = useState('' as string);
const [isDirty, setDirty] = useState(false);
const [loaded, setLoaded] = useState(false);
const baseSet = ((): Merchant[] => {
if (curation) return curation.availableMerchants;
if (category) return category.availableMerchants;
return merchants;
})();
const renderList = baseSet.filter(merchant =>
searchVal
? merchant.name.toLowerCase().includes(searchVal.toLowerCase()) ||
merchant.tags.find(tag => tag.includes(searchVal.toLowerCase()))
: baseSet
);
const handleIntersection = (merchant: Merchant) => (event: IntersectionObserverEntry): void => {
if (event.isIntersecting)
tracking.trackEvent({
action: 'presentedWithGiftCardPromo',
...getPromoEventParams(merchant),
gaAction: `presentedWithGiftCardPromo:${merchant.name}`
});
};
const resizeSwitch = (length: number): number => {
if (length > 3) return 100;
if (length > 2) return 50;
return 0;
};
const handleClick = (merchant: Merchant): void => {
location.state = { scrollTop: scrollRef.current?.scrollTop as number, searchVal, category, curation };
if (getGiftCardDiscount(merchant)) {
tracking.trackEvent({
action: 'clickedGiftCardPromo',
...getPromoEventParams(merchant),
gaAction: `clickedGiftCardPromo:${merchant.name}`
});
}
};
useEffect(() => {
let timer: ReturnType<typeof setTimeout>;
if (renderList.length > 24) {
timer = setTimeout(() => setLoaded(true), 400);
} else {
setLoaded(true);
}
return (): void => {
if (timer) {
clearTimeout(timer);
}
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useEffect(() => {
if (searchVal) setDirty(true);
}, [searchVal]);
useEffect(() => {
const setScrollPositionAndSearchVal = async (): Promise<void> => {
if (location.state) {
if (searchValue) setSearchVal(searchValue);
await wait(renderList.length > 24 ? 400 : 0);
if (scrollRef.current) scrollRef.current.scrollTop = scrollTop || 0;
}
};
resizeToFitPage(contentRef);
setScrollPositionAndSearchVal();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [scrollRef, contentRef, location.state]);
useEffect(() => {
resizeToFitPage(contentRef, resizeSwitch(renderList.length));
}, [searchVal, renderList]);
const ListItem: React.FC<{ merchant: Merchant }> = ({ merchant }) => (
<Link
to={{
pathname: `/brand/${merchant.name}`,
state: { merchant, category, curation }
}}
key={merchant.name}
onClick={(): void => handleClick(merchant)}
>
<MerchantCell key={merchant.name} merchant={merchant} />
</Link>
);
return (
<div className="category-page" ref={scrollRef}>
<SearchBar output={setSearchVal} value={searchVal} tracking={tracking} />
<div className="shop-page__content" ref={contentRef}>
{loaded ? (
<>
{renderList.length > 0 ? (
<>
<div className="shop-page__section-header">
{searchVal ? (
<>Search Results</>
) : (
<>
{curation && <>{curation.displayName}</>}
{category && (
<div className="shop-page__section-header--wrapper">
<div className="shop-page__section-header--emoji">{category.emoji}</div>
{category.displayName}
</div>
)}
{!curation && !category && <>Shop {renderList.length} Brands</>}
</>
)}
</div>
{renderList.map((merchant, index) => (
<motion.div
custom={index}
initial={index > 7 || scrollTop > 0 || isDirty ? 'base' : 'delta'}
animate="base"
variants={listAnimation}
key={merchant.name}
>
{getGiftCardDiscount(merchant) ? (
<Observer onChange={handleIntersection(merchant)}>
<div>
<ListItem merchant={merchant} />
</div>
</Observer>
) : (
<ListItem merchant={merchant} />
)}
</motion.div>
))}
</>
) : (
<div className="zero-state">
<div className="zero-state__title">No Results</div>
<div className="zero-state__subtitle">Please try searching something else</div>
</div>
)}
</>
) : (
<div className="loading-spinner__wrapper">
<img className="loading-spinner" src="../assets/icons/spinner-thick.svg" alt="spinner" />
</div>
)}
</div>
</div>
);
};
export default trackComponent(Category, { page: 'category' });
|
package database
import (
"time"
"github.com/backpulse/core/models"
"gopkg.in/mgo.v2/bson"
)
// AddVideo : add video to db
func AddVideo(video models.Video) error {
video.UpdatedAt = time.Now()
video.CreatedAt = time.Now()
err := DB.C(videosCollection).Insert(video)
return err
}
// GetVideo : Return specific video by ObjectID
func GetVideo(videoID bson.ObjectId) (models.Video, error) {
var video models.Video
err := DB.C(videosCollection).FindId(videoID).One(&video)
return video, err
}
// GetGroupVideos : return array of video from a videogroup
func GetGroupVideos(id bson.ObjectId) ([]models.Video, error) {
var videos []models.Video
err := DB.C(videosCollection).Find(bson.M{
"video_group_id": id,
}).All(&videos)
return videos, err
}
// Updatevideo : update video informations (title, content, youtube_url)
func UpdateVideo(id bson.ObjectId, video models.Video) error {
err := DB.C(videosCollection).UpdateId(id, bson.M{
"$set": bson.M{
"title": video.Title,
"content": video.Content,
"youtube_url": video.YouTubeURL,
},
})
return err
}
// RemoveVideo : remove video from db
func RemoveVideo(id bson.ObjectId) error {
err := DB.C(videosCollection).RemoveId(id)
return err
}
// UpdateVideosIndexes : update order of videos
func UpdateVideosIndexes(siteID bson.ObjectId, videos []models.Video) error {
for _, video := range videos {
err := DB.C(videosCollection).Update(bson.M{
"site_id": siteID,
"_id": video.ID,
}, bson.M{
"$set": bson.M{
"index": video.Index,
},
})
if err != nil {
return err
}
}
return nil
}
|
#!/bin/sh
sudo apt-get update
sudo apt-get upgrade -y
sudo apt-get install -y xboxdrv
sudo apt-get install -y kodi
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.