text
stringlengths 1
1.05M
|
|---|
#!/bin/bash
: "
This script waits for the creation of a stack for a given ticket id.
IMPORTANT: It requires that you have active session via assume-role. If you
don't, it will not give any error leaving you waiting for nothing.
Note: For Team City, sleep first for about 5 minutes before triggering this
script because that's how long before the build gets picked up.
Usage:
./scriptname.sh <ticket-id>
@ticket-id - the JIRA ticket ID, e.g. MT-3070
"
TICKET_ID=`echo $1 | tr '[:upper:]' '[:lower:]'`
if [ -z $TICKET_ID ]; then
echo "NOTE: Assume Role as needed."
echo "Usage:"
echo "./wait-for-stack-provision.sh <ticket-id>"
exit 1
fi
# 1. Wait up to 10 minutes for stack to be provisioned.
STACK_NAME=`aws cloudformation describe-stacks | jq -r ' .Stacks[] | .StackName' | grep $TICKET_ID | grep ec- | awk -F- '$10 > 6000 {print $0}' | sed '1q;d'`
COUNT=0
while [ -z $STACK_NAME ]
do
echo "Waiting to provision the stack for ticket $TICKET_ID..."
sleep 10
COUNT=$[$COUNT + 1]
if [ $COUNT -gt 60 ]; then
echo "Error: Timeout on stack provision: $TICKET_ID"
exit 1
fi
STACK_NAME=`aws cloudformation describe-stacks | jq -r ' .Stacks[] | .StackName' | grep $TICKET_ID | grep ec- | awk -F- '$10 > 6000 {print $0}' | sed '1q;d'`
done
echo "I found a stack: $STACK_NAME"
|
#!/bin/bash
set -u
# First check if the OS is Linux.
if [[ "$(uname)" = "Linux" ]]; then
HOMEBREW_ON_LINUX=1
fi
# On macOS, this script installs to /usr/local only.
# On Linux, it installs to /home/linuxbrew/.linuxbrew if you have sudo access
# and ~/.linuxbrew otherwise.
# To install elsewhere (which is unsupported)
# you can untar https://github.com/Homebrew/brew/tarball/master
# anywhere you like.
if [[ -z "${HOMEBREW_ON_LINUX-}" ]]; then
HOMEBREW_PREFIX="/usr/local"
HOMEBREW_REPOSITORY="/usr/local/Homebrew"
HOMEBREW_CACHE="${HOME}/Library/Caches/Homebrew"
STAT="stat -f"
CHOWN="/usr/sbin/chown"
CHGRP="/usr/bin/chgrp"
GROUP="admin"
else
HOMEBREW_PREFIX_DEFAULT="/home/linuxbrew/.linuxbrew"
HOMEBREW_CACHE="${HOME}/.cache/Homebrew"
STAT="stat --printf"
CHOWN="/bin/chown"
CHGRP="/bin/chgrp"
GROUP="$(id -gn)"
fi
BREW_REPO=${BREW_REPO:-"https://github.com/Homebrew/brew"}
# TODO: bump version when new macOS is released
MACOS_LATEST_SUPPORTED="10.15"
# TODO: bump version when new macOS is released
MACOS_OLDEST_SUPPORTED="10.13"
# no analytics during installation
export HOMEBREW_NO_ANALYTICS_THIS_RUN=1
export HOMEBREW_NO_ANALYTICS_MESSAGE_OUTPUT=1
# string formatters
if [[ -t 1 ]]; then
tty_escape() { printf "\033[%sm" "$1"; }
else
tty_escape() { :; }
fi
tty_mkbold() { tty_escape "1;$1"; }
tty_underline="$(tty_escape "4;39")"
tty_blue="$(tty_mkbold 34)"
tty_red="$(tty_mkbold 31)"
tty_bold="$(tty_mkbold 39)"
tty_reset="$(tty_escape 0)"
have_sudo_access() {
if [[ -z "${HAVE_SUDO_ACCESS-}" ]]; then
/usr/bin/sudo -l mkdir &>/dev/null
HAVE_SUDO_ACCESS="$?"
fi
if [[ -z "${HOMEBREW_ON_LINUX-}" ]] && [[ "$HAVE_SUDO_ACCESS" -ne 0 ]]; then
abort "Need sudo access on macOS!"
fi
return "$HAVE_SUDO_ACCESS"
}
shell_join() {
local arg
printf "%s" "$1"
shift
for arg in "$@"; do
printf " "
printf "%s" "${arg// /\ }"
done
}
chomp() {
printf "%s" "${1/"$'\n'"/}"
}
ohai() {
printf "${tty_blue}==>${tty_bold} %s${tty_reset}\n" "$(shell_join "$@")"
}
warn() {
printf "${tty_red}Warning${tty_reset}: %s\n" "$(chomp "$1")"
}
abort() {
printf "%s\n" "$1"
exit 1
}
execute() {
if ! "$@"; then
abort "$(printf "Failed during: %s" "$(shell_join "$@")")"
fi
}
execute_sudo() {
local -a args=("$@")
if [[ -n "${SUDO_ASKPASS-}" ]]; then
args=("-A" "${args[@]}")
fi
if have_sudo_access; then
ohai "/usr/bin/sudo" "${args[@]}"
execute "/usr/bin/sudo" "${args[@]}"
else
ohai "${args[@]}"
execute "${args[@]}"
fi
}
getc() {
/bin/stty raw -echo
IFS= read -r -n 1 -d '' "$@"
/bin/stty -raw -echo
}
wait_for_user() {
local c
echo
echo "Press RETURN to continue or any other key to abort"
getc c
# we test for \r and \n because some stuff does \r instead
if ! [[ "$c" == $'\r' || "$c" == $'\n' ]]; then
exit 1
fi
}
major_minor() {
echo "${1%%.*}.$(x="${1#*.}"; echo "${x%%.*}")"
}
if [[ -z "${HOMEBREW_ON_LINUX-}" ]]; then
macos_version="$(major_minor "$(/usr/bin/sw_vers -productVersion)")"
fi
version_gt() {
[[ "${1%.*}" -gt "${2%.*}" ]] || [[ "${1%.*}" -eq "${2%.*}" && "${1#*.}" -gt "${2#*.}" ]]
}
version_ge() {
[[ "${1%.*}" -gt "${2%.*}" ]] || [[ "${1%.*}" -eq "${2%.*}" && "${1#*.}" -ge "${2#*.}" ]]
}
version_lt() {
[[ "${1%.*}" -lt "${2%.*}" ]] || [[ "${1%.*}" -eq "${2%.*}" && "${1#*.}" -lt "${2#*.}" ]]
}
should_install_git() {
if [[ $(command -v git) ]]; then
return 1
fi
}
should_install_command_line_tools() {
if [[ -n "${HOMEBREW_ON_LINUX-}" ]]; then
return 1
fi
if version_gt "$macos_version" "10.13"; then
! [[ -e "/Library/Developer/CommandLineTools/usr/bin/git" ]]
else
! [[ -e "/Library/Developer/CommandLineTools/usr/bin/git" ]] ||
! [[ -e "/usr/include/iconv.h" ]]
fi
}
get_permission() {
$STAT "%A" "$1"
}
user_only_chmod() {
[[ -d "$1" ]] && [[ "$(get_permission "$1")" != "755" ]]
}
exists_but_not_writable() {
[[ -e "$1" ]] && ! [[ -r "$1" && -w "$1" && -x "$1" ]]
}
get_owner() {
$STAT "%u" "$1"
}
file_not_owned() {
[[ "$(get_owner "$1")" != "$(id -u)" ]]
}
get_group() {
$STAT "%g" "$1"
}
file_not_grpowned() {
[[ " $(id -G "$USER") " != *" $(get_group "$1") "* ]]
}
# USER isn't always set so provide a fall back for the installer and subprocesses.
if [[ -z "${USER-}" ]]; then
USER="$(chomp "$(id -un)")"
export USER
fi
# Invalidate sudo timestamp before exiting (if it wasn't active before).
if ! /usr/bin/sudo -n -v 2>/dev/null; then
trap '/usr/bin/sudo -k' EXIT
fi
# Things can fail later if `pwd` doesn't exist.
# Also sudo prints a warning message for no good reason
cd "/usr" || exit 1
####################################################################### script
if should_install_git; then
abort "$(cat <<EOABORT
You must install Git before installing Homebrew. See:
${tty_underline}https://docs.brew.sh/Installation${tty_reset}
EOABORT
)"
fi
if [[ -n "${HOMEBREW_ON_LINUX-}" ]]; then
if [[ -n "${CI-}" ]] || [[ -w "$HOMEBREW_PREFIX_DEFAULT" ]] || [[ -w "/home/linuxbrew" ]] || [[ -w "/home" ]]; then
HOMEBREW_PREFIX="$HOMEBREW_PREFIX_DEFAULT"
else
trap exit SIGINT
sudo_output="$(/usr/bin/sudo -n -l mkdir 2>&1)"
sudo_exit_code="$?"
if [[ "$sudo_exit_code" -ne 0 ]] && [[ "$sudo_output" = "sudo: a password is required" ]]; then
ohai "Select the Homebrew installation directory"
echo "- ${tty_bold}Enter your password${tty_reset} to install to ${tty_underline}${HOMEBREW_PREFIX_DEFAULT}${tty_reset} (${tty_bold}recommended${tty_reset})"
echo "- ${tty_bold}Press Control-D${tty_reset} to install to ${tty_underline}$HOME/.linuxbrew${tty_reset}"
echo "- ${tty_bold}Press Control-C${tty_reset} to cancel installation"
fi
if have_sudo_access; then
HOMEBREW_PREFIX="$HOMEBREW_PREFIX_DEFAULT"
else
HOMEBREW_PREFIX="$HOME/.linuxbrew"
fi
trap - SIGINT
fi
HOMEBREW_REPOSITORY="${HOMEBREW_PREFIX}/Homebrew"
fi
if [[ "$UID" == "0" ]]; then
abort "Don't run this as root!"
elif [[ -d "$HOMEBREW_PREFIX" && ! -x "$HOMEBREW_PREFIX" ]]; then
abort "$(cat <<EOABORT
The Homebrew prefix, ${HOMEBREW_PREFIX}, exists but is not searchable. If this is
not intentional, please restore the default permissions and try running the
installer again:
sudo chmod 775 ${HOMEBREW_PREFIX}
EOABORT
)"
fi
if [[ -z "${HOMEBREW_ON_LINUX-}" ]]; then
if version_lt "$macos_version" "10.7"; then
abort "$(cat <<EOABORT
Your Mac OS X version is too old. See:
${tty_underline}https://github.com/mistydemeo/tigerbrew${tty_reset}
EOABORT
)"
elif version_lt "$macos_version" "10.9"; then
abort "Your OS X version is too old"
elif ! [[ "$(dsmemberutil checkmembership -U "$USER" -G "$GROUP")" = *"user is a member"* ]]; then
abort "This script requires the user $USER to be an Administrator."
elif version_gt "$macos_version" "$MACOS_LATEST_SUPPORTED" || \
version_lt "$macos_version" "$MACOS_OLDEST_SUPPORTED"; then
who="We"
what=""
if version_gt "$macos_version" "$MACOS_LATEST_SUPPORTED"; then
what="pre-release version"
else
who+=" (and Apple)"
what="old version"
fi
ohai "You are using macOS ${macos_version}."
ohai "${who} do not provide support for this ${what}."
echo "$(cat <<EOS
This installation may not succeed.
After installation, you will encounter build failures with some formulae.
Please create pull requests instead of asking for help on Homebrew\'s GitHub,
Discourse, Twitter or IRC. You are responsible for resolving any issues you
experience while you are running this ${what}.
EOS
)
"
fi
fi
ohai "This script will install:"
echo "${HOMEBREW_PREFIX}/bin/brew"
echo "${HOMEBREW_PREFIX}/share/doc/homebrew"
echo "${HOMEBREW_PREFIX}/share/man/man1/brew.1"
echo "${HOMEBREW_PREFIX}/share/zsh/site-functions/_brew"
echo "${HOMEBREW_PREFIX}/etc/bash_completion.d/brew"
echo "${HOMEBREW_REPOSITORY}"
# Keep relatively in sync with
# https://github.com/Homebrew/brew/blob/master/Library/Homebrew/keg.rb
directories=(bin etc include lib sbin share opt var
Frameworks
etc/bash_completion.d lib/pkgconfig
share/aclocal share/doc share/info share/locale share/man
share/man/man1 share/man/man2 share/man/man3 share/man/man4
share/man/man5 share/man/man6 share/man/man7 share/man/man8
var/log var/homebrew var/homebrew/linked
bin/brew)
group_chmods=()
for dir in "${directories[@]}"; do
if exists_but_not_writable "${HOMEBREW_PREFIX}/${dir}"; then
group_chmods+=("${HOMEBREW_PREFIX}/${dir}")
fi
done
# zsh refuses to read from these directories if group writable
directories=(share/zsh share/zsh/site-functions)
zsh_dirs=()
for dir in "${directories[@]}"; do
zsh_dirs+=("${HOMEBREW_PREFIX}/${dir}")
done
directories=(bin etc include lib sbin share var opt
share/zsh share/zsh/site-functions
var/homebrew var/homebrew/linked
Cellar Caskroom Homebrew Frameworks)
mkdirs=()
for dir in "${directories[@]}"; do
if ! [[ -d "${HOMEBREW_PREFIX}/${dir}" ]]; then
mkdirs+=("${HOMEBREW_PREFIX}/${dir}")
fi
done
user_chmods=()
if [[ "${#zsh_dirs[@]}" -gt 0 ]]; then
for dir in "${zsh_dirs[@]}"; do
if user_only_chmod "${dir}"; then
user_chmods+=("${dir}")
fi
done
fi
chmods=()
if [[ "${#group_chmods[@]}" -gt 0 ]]; then
chmods+=("${group_chmods[@]}")
fi
if [[ "${#user_chmods[@]}" -gt 0 ]]; then
chmods+=("${user_chmods[@]}")
fi
chowns=()
chgrps=()
if [[ "${#chmods[@]}" -gt 0 ]]; then
for dir in "${chmods[@]}"; do
if file_not_owned "${dir}"; then
chowns+=("${dir}")
fi
if file_not_grpowned "${dir}"; then
chgrps+=("${dir}")
fi
done
fi
if [[ "${#group_chmods[@]}" -gt 0 ]]; then
ohai "The following existing directories will be made group writable:"
printf "%s\n" "${group_chmods[@]}"
fi
if [[ "${#user_chmods[@]}" -gt 0 ]]; then
ohai "The following existing directories will be made writable by user only:"
printf "%s\n" "${user_chmods[@]}"
fi
if [[ "${#chowns[@]}" -gt 0 ]]; then
ohai "The following existing directories will have their owner set to ${tty_underline}${USER}${tty_reset}:"
printf "%s\n" "${chowns[@]}"
fi
if [[ "${#chgrps[@]}" -gt 0 ]]; then
ohai "The following existing directories will have their group set to ${tty_underline}${GROUP}${tty_reset}:"
printf "%s\n" "${chgrps[@]}"
fi
if [[ "${#mkdirs[@]}" -gt 0 ]]; then
ohai "The following new directories will be created:"
printf "%s\n" "${mkdirs[@]}"
fi
if should_install_command_line_tools; then
ohai "The Xcode Command Line Tools will be installed."
fi
if [[ -t 0 && -z "${CI-}" ]]; then
wait_for_user
fi
if [[ -d "${HOMEBREW_PREFIX}" ]]; then
if [[ "${#chmods[@]}" -gt 0 ]]; then
execute_sudo "/bin/chmod" "u+rwx" "${chmods[@]}"
fi
if [[ "${#group_chmods[@]}" -gt 0 ]]; then
execute_sudo "/bin/chmod" "g+rwx" "${group_chmods[@]}"
fi
if [[ "${#user_chmods[@]}" -gt 0 ]]; then
execute_sudo "/bin/chmod" "755" "${user_chmods[@]}"
fi
if [[ "${#chowns[@]}" -gt 0 ]]; then
execute_sudo "$CHOWN" "$USER" "${chowns[@]}"
fi
if [[ "${#chgrps[@]}" -gt 0 ]]; then
execute_sudo "$CHGRP" "$GROUP" "${chgrps[@]}"
fi
else
execute_sudo "/bin/mkdir" "-p" "${HOMEBREW_PREFIX}"
if [[ -z "${HOMEBREW_ON_LINUX-}" ]]; then
execute_sudo "$CHOWN" "root:wheel" "${HOMEBREW_PREFIX}"
else
execute_sudo "$CHOWN" "$USER:$GROUP" "${HOMEBREW_PREFIX}"
fi
fi
if [[ "${#mkdirs[@]}" -gt 0 ]]; then
execute_sudo "/bin/mkdir" "-p" "${mkdirs[@]}"
execute_sudo "/bin/chmod" "g+rwx" "${mkdirs[@]}"
execute_sudo "$CHOWN" "$USER" "${mkdirs[@]}"
execute_sudo "$CHGRP" "$GROUP" "${mkdirs[@]}"
fi
if ! [[ -d "${HOMEBREW_CACHE}" ]]; then
execute_sudo "/bin/mkdir" "-p" "${HOMEBREW_CACHE}"
fi
if exists_but_not_writable "${HOMEBREW_CACHE}"; then
execute_sudo "/bin/chmod" "g+rwx" "${HOMEBREW_CACHE}"
fi
if file_not_owned "${HOMEBREW_CACHE}"; then
execute_sudo "$CHOWN" "$USER" "${HOMEBREW_CACHE}"
fi
if file_not_grpowned "${HOMEBREW_CACHE}"; then
execute_sudo "$CHGRP" "$GROUP" "${HOMEBREW_CACHE}"
fi
if [[ -d "${HOMEBREW_CACHE}" ]]; then
execute "/usr/bin/touch" "${HOMEBREW_CACHE}/.cleaned"
fi
if should_install_command_line_tools && version_ge "$macos_version" "10.13"; then
ohai "Searching online for the Command Line Tools"
# This temporary file prompts the 'softwareupdate' utility to list the Command Line Tools
clt_placeholder="/tmp/.com.apple.dt.CommandLineTools.installondemand.in-progress"
execute_sudo "/usr/bin/touch" "$clt_placeholder"
clt_label_command="/usr/sbin/softwareupdate -l |
grep -B 1 -E 'Command Line Tools' |
awk -F'*' '/^ *\\*/ {print \$2}' |
sed -e 's/^ *Label: //' -e 's/^ *//' |
sort -V |
tail -n1"
clt_label="$(chomp "$(/bin/bash -c "$clt_label_command")")"
if [[ -n "$clt_label" ]]; then
ohai "Installing $clt_label"
execute_sudo "/usr/sbin/softwareupdate" "-i" "$clt_label"
execute_sudo "/bin/rm" "-f" "$clt_placeholder"
execute_sudo "/usr/bin/xcode-select" "--switch" "/Library/Developer/CommandLineTools"
fi
fi
# Headless install may have failed, so fallback to original 'xcode-select' method
if should_install_command_line_tools && test -t 0; then
ohai "Installing the Command Line Tools (expect a GUI popup):"
execute_sudo "/usr/bin/xcode-select" "--install"
echo "Press any key when the installation has completed."
getc
execute_sudo "/usr/bin/xcode-select" "--switch" "/Library/Developer/CommandLineTools"
fi
if [[ -z "${HOMEBREW_ON_LINUX-}" ]] && ! output="$(/usr/bin/xcrun clang 2>&1)" && [[ "$output" == *"license"* ]]; then
abort "$(cat <<EOABORT
You have not agreed to the Xcode license.
Before running the installer again please agree to the license by opening
Xcode.app or running:
sudo xcodebuild -license
EOABORT
)"
fi
ohai "Downloading and installing Homebrew..."
(
cd "${HOMEBREW_REPOSITORY}" >/dev/null || return
# we do it in four steps to avoid merge errors when reinstalling
execute "git" "init" "-q"
# "git remote add" will fail if the remote is defined in the global config
execute "git" "config" "remote.origin.url" "${BREW_REPO}"
execute "git" "config" "remote.origin.fetch" "+refs/heads/*:refs/remotes/origin/*"
# ensure we don't munge line endings on checkout
execute "git" "config" "core.autocrlf" "false"
execute "git" "fetch" "origin" "--force"
execute "git" "fetch" "origin" "--tags" "--force"
execute "git" "reset" "--hard" "origin/master"
execute "ln" "-sf" "${HOMEBREW_REPOSITORY}/bin/brew" "${HOMEBREW_PREFIX}/bin/brew"
execute "${HOMEBREW_PREFIX}/bin/brew" "update" "--force"
)
if [[ ":${PATH}:" != *":${HOMEBREW_PREFIX}/bin:"* ]]; then
warn "${HOMEBREW_PREFIX}/bin is not in your PATH."
fi
ohai "Installation successful!"
echo
# Use the shell's audible bell.
if [[ -t 1 ]]; then
printf "\a"
fi
# Use an extra newline and bold to avoid this being missed.
ohai "Homebrew has enabled anonymous aggregate formulae and cask analytics."
echo "$(cat <<EOS
${tty_bold}Read the analytics documentation (and how to opt-out) here:
${tty_underline}https://docs.brew.sh/Analytics${tty_reset}
No analytics data has been sent yet (or will be during this \`install\` run).
EOS
)
"
ohai "Homebrew is run entirely by unpaid volunteers. Please consider donating:"
echo "$(cat <<EOS
${tty_underline}https://github.com/Homebrew/brew#donations${tty_reset}
EOS
)
"
(
cd "${HOMEBREW_REPOSITORY}" >/dev/null || return
execute "git" "config" "--replace-all" "homebrew.analyticsmessage" "true"
execute "git" "config" "--replace-all" "homebrew.caskanalyticsmessage" "true"
)
ohai "Next steps:"
echo "- Run \`brew help\` to get started"
echo "- Further documentation: "
echo " ${tty_underline}https://docs.brew.sh${tty_reset}"
if [[ -n "${HOMEBREW_ON_LINUX-}" ]]; then
case "$SHELL" in
*/bash*)
if [[ -r "$HOME/.bash_profile" ]]; then
shell_profile="$HOME/.bash_profile"
else
shell_profile="$HOME/.profile"
fi
;;
*/zsh*)
shell_profile="$HOME/.zprofile"
;;
*)
shell_profile="$HOME/.profile"
;;
esac
cat <<EOS
- Install the Homebrew dependencies if you have sudo access:
${tty_bold}Debian, Ubuntu, etc.${tty_reset}
sudo apt-get install build-essential
${tty_bold}Fedora, Red Hat, CentOS, etc.${tty_reset}
sudo yum groupinstall 'Development Tools'
See ${tty_underline}https://docs.brew.sh/linux${tty_reset} for more information.
- Configure Homebrew in your ${tty_underline}${shell_profile}${tty_reset} by running
echo 'eval \$(${HOMEBREW_PREFIX}/bin/brew shellenv)' >> ${shell_profile}
- Add Homebrew to your ${tty_bold}PATH${tty_reset}
eval \$(${HOMEBREW_PREFIX}/bin/brew shellenv)
- We recommend that you install GCC by running:
brew install gcc
EOS
fi
|
#!/bin/bash
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Script to update etcd objects as per the latest API Version.
# This just reads all objects and then writes them back as is to ensure that
# they are written using the latest API version.
#
# Steps to use this script to upgrade the cluster to a new version:
# https://kubernetes.io/docs/tasks/administer-cluster/cluster-management/#upgrading-to-a-different-api-version
set -o errexit
set -o nounset
set -o pipefail
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/..
source "${KUBE_ROOT}/hack/lib/init.sh"
KUBECTL="${KUBE_OUTPUT_HOSTBIN}/kubectl"
# List of resources to be updated.
# TODO: Get this list of resources from server once
# http://issue.k8s.io/2057 is fixed.
declare -a resources=(
"endpoints"
"events"
"limitranges"
"namespaces"
"nodes"
"pods"
"persistentvolumes"
"persistentvolumeclaims"
"replicationcontrollers"
"resourcequotas"
"secrets"
"services"
"jobs"
"horizontalpodautoscalers"
"storageclasses"
"roles.rbac.authorization.k8s.io"
"rolebindings.rbac.authorization.k8s.io"
"clusterroles.rbac.authorization.k8s.io"
"clusterrolebindings.rbac.authorization.k8s.io"
"networkpolicies.networking.k8s.io"
)
# Find all the namespaces.
namespaces=( $("${KUBECTL}" get namespaces -o go-template="{{range.items}}{{.metadata.name}} {{end}}"))
if [ -z "${namespaces:-}" ]
then
echo "Unexpected: No namespace found. Nothing to do."
exit 1
fi
all_failed=1
for resource in "${resources[@]}"
do
for namespace in "${namespaces[@]}"
do
# If get fails, assume it's because the resource hasn't been installed in the apiserver.
# TODO hopefully we can remove this once we use dynamic discovery of gettable/updateable
# resources.
set +e
instances=( $("${KUBECTL}" get "${resource}" --namespace="${namespace}" -o go-template="{{range.items}}{{.metadata.name}} {{end}}"))
result=$?
set -e
if [[ "${all_failed}" -eq 1 && "${result}" -eq 0 ]]; then
all_failed=0
fi
# Nothing to do if there is no instance of that resource.
if [[ -z "${instances:-}" ]]
then
continue
fi
for instance in "${instances[@]}"
do
# Read and then write it back as is.
# Update can fail if the object was updated after we fetched the
# object, but before we could update it. We, hence, try the update
# operation multiple times. But 5 continuous failures indicate some other
# problem.
success=0
for (( tries=0; tries<5; ++tries ))
do
filename="/tmp/k8s-${namespace}-${resource}-${instance}.json"
( "${KUBECTL}" get "${resource}" "${instance}" --namespace="${namespace}" -o json > "${filename}" ) || true
if [[ ! -s "${filename}" ]]
then
# This happens when the instance has been deleted. We can hence ignore
# this instance.
echo "Looks like ${instance} got deleted. Ignoring it"
continue
fi
output=$("${KUBECTL}" replace -f "${filename}" --namespace="${namespace}") || true
rm "${filename}"
if [ -n "${output:-}" ]
then
success=1
break
fi
done
if [[ "${success}" -eq 0 ]]
then
echo "Error: failed to update ${resource}/${instance} in ${namespace} namespace after 5 tries"
exit 1
fi
done
if [[ "${resource}" == "namespaces" ]] || [[ "${resource}" == "nodes" ]]
then
# These resources are namespace agnostic. No need to update them for every
# namespace.
break
fi
done
done
if [[ "${all_failed}" -eq 1 ]]; then
echo "kubectl get failed for all resources"
exit 1
fi
echo "All objects updated successfully!!"
exit 0
|
package org.cloudfoundry.samples.music.config.data;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.data.gemfire.config.annotation.EnableEntityDefinedRegions;
import org.springframework.data.gemfire.repository.config.EnableGemfireRepositories;
@EnableEntityDefinedRegions(basePackages = "org.cloudfoundry.samples.music.domain")
@EnableGemfireRepositories(basePackageClasses = org.cloudfoundry.samples.music.repositories.pcc.PccAlbumRepository.class)
@Configuration
public class PccConfig {
}
|
/**
* Created by <NAME> on 11/8/15.
*/
/*globals define*/
/*jshint node:true, browser:true*/
define([
'plugin/PluginConfig',
'plugin/PluginBase',
'jszip',
'xmljsonconverter'
], function (
PluginConfig,
PluginBase,
JSZip,
Converter) {
'use strict';
/**
* Initializes a new instance of FMImporter.
* @class
* @augments {PluginBase}
* @classdesc This class represents the plugin FMImporter.
* @constructor
*/
var FMImporter = function () {
// Call base class' constructor.
PluginBase.call(this);
};
// Prototypal inheritance from PluginBase.
FMImporter.prototype = Object.create(PluginBase.prototype);
FMImporter.prototype.constructor = FMImporter;
/**
* Gets the name of the FMImporter.
* @returns {string} The name of the plugin.
* @public
*/
FMImporter.prototype.getName = function () {
return 'FM Importer';
};
/**
* Gets the semantic version (semver.org) of the FMImporter.
* @returns {string} The version of the plugin.
* @public
*/
FMImporter.prototype.getVersion = function () {
return '0.1.0';
};
/**
* Gets the configuration structure for the FMImporter.
* The ConfigurationStructure defines the configuration for the plugin
* and will be used to populate the GUI when invoking the plugin from webGME.
* @returns {object} The version of the plugin.
* @public
*/
FMImporter.prototype.getConfigStructure = function () {
return [
{
name: 'file',
displayName: 'FM model',
description: 'Click and drag existing FM models from Eclipse Papyrus',
value: '',
valueType: 'asset',
readOnly: false
}
];
};
/**
* Main function for the plugin to execute. This will perform the execution.
* Notes:
* - Always log with the provided logger.[error,warning,info,debug].
* - Do NOT put any user interaction logic UI, etc. inside this method.
* - callback always has to be called even if error happened.
*
* @param {function(string, plugin.PluginResult)} callback - the result callback
*/
FMImporter.prototype.main = function (callback) {
// Use self to access core, project, result, logger etc from PluginBase.
// These are all instantiated at this point.
var self = this,
currentConfig = self.getCurrentConfig();
if (!currentConfig.file) {
callback(new Error('No file provided.'), self.result);
return;
}
self.blobClient.getObject(currentConfig.file, function (err, jsonOrBuf) {
var dataModel;
if (err) {
callback(err);
return;
}
if (typeof Buffer !== 'undefined' && jsonOrBuf instanceof Buffer) {
// This clause is entered when the plugin in executed in a node process (on the server) rather than
// in a browser. Then the getObject returns a Buffer and we need to convert it to string and then
// parse it into an object.
try {
jsonOrBuf = String.fromCharCode.apply(null, new Uint8Array(jsonOrBuf));
dataModel = JSON.parse(jsonOrBuf);
} catch (err) {
callback(err, self.result);
return;
}
} else {
// In the browser the getObject automatically returns a json object.
dataModel = jsonOrBuf;
}
self.logger.info('Obtained dataModel', dataModel);
self.buildUpFMDiagram(dataModel, function (err) {
if (err) {
callback(err, self.result);
return;
}
self.save('FSM Importer created new model.', function (err) {
if (err) {
callback(err, self.result);
return;
}
self.result.setSuccess(true);
callback(null, self.result);
});
})
});
};
FMImporter.prototype.buildUpFMDiagram = function (dataModel, callback) {
var self = this,
fmData = dataModel,
i, j,
idToNode = {},
nodeNode,
edgeNode,
stateId,
edges = {},
smNode,
_addEdge;
_addEdge = function (nodeId, edge, inOrOutV) {
var k,
e;
if (edge.created) {
for (k = 0; k < edge.created.length; ++k) {
e = edge.created[k];
if (!edges.hasOwnProperty(e.id)) {
edges[e.id] = {
src: nodeId,
dst: e[inOrOutV],
label: 'created'
};
}
}
}
if (edges.knows) {
for (k = 0; k < edge.knows.length; ++k) {
e = edge.knows[k];
if (!edges.hasOwnProperty(e.id)) {
edges[e.id] = {
src: nodeId,
dst: e[inOrOutV],
label: 'knows'
};
}
}
}
};
// Create the stateMachine
smNode = self.core.createNode({
parent: self.activeNode,
base: self.META.Graph
});
self.core.setAttribute(smNode, 'name', 'graph');
self.core.setRegistry(smNode, 'position', {x: 200, y: 200});
// Create the states and gather data about the transitions
for (i = 0; i < fmData.nodes.length; i += 1) {
stateId = fmData.nodes[i].id;
nodeNode = self.core.createNode({
parent: smNode,
base: self.META.Node
});
self.core.setAttribute(nodeNode, 'name', fmData.nodes[i].label);
self.core.setRegistry(smNode, 'position', {x: 50 + (100 * i), y: 200}); // This could be more fancy.
// Add the node with its old id to the map (will be used when creating the transitions)
idToNode[stateId] = nodeNode;
// Gather the outgoing transitions from the current state and store the info.
if (fmData.nodes[i].outE) {
_addEdge(stateId, fmData.nodes[i].outE, 'inV');
} else if (fmData.nodes[i].inE) {
_addEdge(stateId, fmData.nodes[i].inE, 'outV');
}
}
// With all state created, we will now create the transitions and connect them between the states.
for (i in edges) {
edgeNode = self.core.createNode({
parent: smNode,
base: self.META.Edge
});
self.core.setAttribute(edgeNode, 'label', edges[i].label);
self.core.setPointer(edgeNode, 'src', idToNode[edges[i].src]);
self.core.setPointer(edgeNode, 'dst', idToNode[edges[i].dst]);
}
callback(null);
};
return FMImporter;
});
|
package dbtest
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestOpen(t *testing.T) {
db := Open(t)
session := db.Open()
count := 0
err := session.Get(&count, `SELECT COUNT(*) FROM gorp_migrations`)
require.NoError(t, err)
assert.Greater(t, count, 0)
}
|
<reponame>opentaps/opentaps-1<filename>opentaps/opentaps-common/src/common/org/opentaps/common/domain/order/SalesOrderLookupRepository.java
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.common.domain.order;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
import org.hibernate.Criteria;
import org.hibernate.ScrollMode;
import org.hibernate.ScrollableResults;
import org.hibernate.criterion.MatchMode;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.ofbiz.base.util.Debug;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.base.util.UtilValidate;
import org.opentaps.base.constants.OrderTypeConstants;
import org.opentaps.base.constants.RoleTypeConstants;
import org.opentaps.base.constants.StatusItemConstants;
import org.opentaps.base.entities.InventoryItem;
import org.opentaps.base.entities.OrderHeader;
import org.opentaps.base.entities.OrderItem;
import org.opentaps.base.entities.OrderRole;
import org.opentaps.base.entities.PostalAddress;
import org.opentaps.base.entities.TrackingCodeOrder;
import org.opentaps.common.util.UtilDate;
import org.opentaps.domain.order.OrderViewForListing;
import org.opentaps.domain.order.SalesOrderLookupRepositoryInterface;
import org.opentaps.foundation.entity.hibernate.HibernateUtil;
import org.opentaps.foundation.entity.hibernate.Session;
import org.opentaps.foundation.infrastructure.InfrastructureException;
import org.opentaps.foundation.repository.RepositoryException;
import org.opentaps.foundation.repository.ofbiz.CommonLookupRepository;
/**
* Repository to lookup Sales Orders.
*/
public class SalesOrderLookupRepository extends CommonLookupRepository implements SalesOrderLookupRepositoryInterface {
@SuppressWarnings("unused")
private static final String MODULE = SalesOrderLookupRepository.class.getName();
private String orderId;
private String statusId;
private String orderName;
private String organizationPartyId;
private String createdBy;
private String customerPartyId;
private String externalOrderId;
private String fromDateStr;
private Timestamp fromDate;
private String thruDateStr;
private Timestamp thruDate;
private String lotId;
private String productStoreId;
private String purchaseOrderId;
private String serialNumber;
private String shippingAddress;
private String shippingCountry;
private String shippingStateProvince;
private String shippingCity;
private String shippingPostalCode;
private String shippingToName;
private String shippingAttnName;
private String userLoginId;
private String viewPref;
private String productId;
private boolean findActiveOnly = false;
private boolean findDesiredOnly = false;
private Locale locale;
private TimeZone timeZone;
private List<String> orderBy;
/**
* Default constructor.
*/
public SalesOrderLookupRepository() {
super();
}
/** {@inheritDoc} */
public List<OrderViewForListing> findOrders() throws RepositoryException {
// convert fromDateStr / thruDateStr into Timestamps if the string versions were given
if (UtilValidate.isNotEmpty(fromDateStr)) {
fromDate = UtilDate.toTimestamp(fromDateStr, timeZone, locale);
}
if (UtilValidate.isNotEmpty(thruDateStr)) {
thruDate = UtilDate.toTimestamp(thruDateStr, timeZone, locale);
}
Session session = null;
try {
// get a hibernate session
session = getInfrastructure().getSession();
Criteria criteria = session.createCriteria(OrderHeader.class);
// always filter by the current organization
criteria.add(Restrictions.eq(OrderHeader.Fields.billFromPartyId.name(), organizationPartyId));
// filters by order type, we only want sales order
criteria.add(Restrictions.eq(OrderHeader.Fields.orderTypeId.name(), OrderTypeConstants.SALES_ORDER));
// set the from/thru date filter if they were given
if (fromDate != null) {
criteria.add(Restrictions.ge(OrderHeader.Fields.orderDate.name(), fromDate));
}
if (thruDate != null) {
criteria.add(Restrictions.le(OrderHeader.Fields.orderDate.name(), thruDate));
}
// filter the role assoc, there is only one customer role per order
Criteria roleCriteria = criteria.createAlias("orderRoles", "or");
roleCriteria.add(Restrictions.eq("or.id." + OrderRole.Fields.roleTypeId.name(), RoleTypeConstants.BILL_TO_CUSTOMER));
// filter orders created by the given user (TODO: what use is viewPref as a string here, should be a boolean flag instead ?)
if (UtilValidate.isNotEmpty(viewPref)) {
criteria.add(Restrictions.eq(OrderHeader.Fields.createdBy.name(), userLoginId));
}
// filter by order status
if (findActiveOnly || findDesiredOnly) {
List<String> statuses = UtilMisc.toList(StatusItemConstants.OrderStatus.ORDER_APPROVED, StatusItemConstants.OrderStatus.ORDER_CREATED, StatusItemConstants.OrderStatus.ORDER_HOLD);
if (findActiveOnly) {
statuses.add(StatusItemConstants.OrderStatus.ORDER_PROCESSING);
}
criteria.add(Restrictions.in(OrderHeader.Fields.statusId.name(), statuses));
}
// filter by the given orderId string
if (UtilValidate.isNotEmpty(orderId)) {
criteria.add(Restrictions.ilike(OrderHeader.Fields.orderId.name(), orderId, MatchMode.START));
}
// filter by the given externalOrderId string
if (UtilValidate.isNotEmpty(externalOrderId)) {
criteria.add(Restrictions.ilike(OrderHeader.Fields.externalId.name(), externalOrderId, MatchMode.START));
}
// filter by exact matching status, if a statusId was given
if (UtilValidate.isNotEmpty(statusId)) {
criteria.add(Restrictions.eq(OrderHeader.Fields.statusId.name(), statusId));
}
// filter by product store if given
if (UtilValidate.isNotEmpty(productStoreId)) {
criteria.add(Restrictions.eq(OrderHeader.Fields.productStoreId.name(), productStoreId));
}
// filter by the user who created the order if given
if (UtilValidate.isNotEmpty(createdBy)) {
criteria.add(Restrictions.eq(OrderHeader.Fields.createdBy.name(), createdBy));
}
// filter by the given orderName string
if (UtilValidate.isNotEmpty(orderName)) {
criteria.add(Restrictions.ilike(OrderHeader.Fields.orderName.name(), orderName, MatchMode.START));
}
// filter by the given customerPartyId string, from the OrderRole entity
if (UtilValidate.isNotEmpty(customerPartyId)) {
roleCriteria.add(Restrictions.ilike("or.id." + OrderRole.Fields.partyId.name(), customerPartyId, MatchMode.START));
}
// filter by the given purchaseOrderId string, from the OrderItem entity
criteria.createAlias("orderItems", "oi");
if (UtilValidate.isNotEmpty(purchaseOrderId)) {
criteria.add(Restrictions.ilike("oi." + OrderItem.Fields.correspondingPoId.name(), purchaseOrderId, MatchMode.START));
}
// filter by the given productId string, from the OrderItem entity
if (UtilValidate.isNotEmpty(productId)) {
criteria.add(Restrictions.ilike("oi." + OrderItem.Fields.productId.name(), productId, MatchMode.START));
}
// filter by the given shippingAddress string, from the OrderItemShipGroup entity
criteria.createAlias("orderItemShipGroups", "oisg");
Criteria address = criteria.createCriteria("oisg.postalAddress");
if (UtilValidate.isNotEmpty(shippingAddress)) {
address.add(Restrictions.ilike(PostalAddress.Fields.address1.name(), shippingAddress, MatchMode.ANYWHERE));
}
if (UtilValidate.isNotEmpty(shippingCountry)) {
address.add(Restrictions.ilike(PostalAddress.Fields.countryGeoId.name(), shippingCountry, MatchMode.EXACT));
}
if (UtilValidate.isNotEmpty(shippingStateProvince)) {
address.add(Restrictions.ilike(PostalAddress.Fields.stateProvinceGeoId.name(), shippingStateProvince, MatchMode.EXACT));
}
if (UtilValidate.isNotEmpty(shippingCity)) {
address.add(Restrictions.ilike(PostalAddress.Fields.city.name(), shippingCity, MatchMode.START));
}
if (UtilValidate.isNotEmpty(shippingPostalCode)) {
address.add(Restrictions.ilike(PostalAddress.Fields.postalCode.name(), shippingPostalCode, MatchMode.START));
}
if (UtilValidate.isNotEmpty(shippingToName)) {
address.add(Restrictions.ilike(PostalAddress.Fields.toName.name(), shippingToName, MatchMode.START));
}
if (UtilValidate.isNotEmpty(shippingAttnName)) {
address.add(Restrictions.ilike(PostalAddress.Fields.attnName.name(), shippingAttnName, MatchMode.START));
}
// filter by the given lotId and serialNumber, which may come either from
// OrderItemShipGrpInvRes -> InventoryItem
// or
// ItemIssuance -> InventoryItem
criteria.createCriteria("orderItemShipGrpInvReses", Criteria.LEFT_JOIN).createCriteria("inventoryItem", "rii", Criteria.LEFT_JOIN);
criteria.createCriteria("itemIssuances", Criteria.LEFT_JOIN).createCriteria("inventoryItem", "iii", Criteria.LEFT_JOIN);
if (UtilValidate.isNotEmpty(lotId)) {
criteria.add(Restrictions.or(
Restrictions.ilike("rii." + InventoryItem.Fields.lotId.name(), lotId, MatchMode.START),
Restrictions.ilike("iii." + InventoryItem.Fields.lotId.name(), lotId, MatchMode.START)));
}
if (UtilValidate.isNotEmpty(serialNumber)) {
criteria.add(Restrictions.or(
Restrictions.ilike("rii." + InventoryItem.Fields.serialNumber.name(), serialNumber, MatchMode.START),
Restrictions.ilike("iii." + InventoryItem.Fields.serialNumber.name(), serialNumber, MatchMode.START)));
}
criteria.createCriteria("trackingCodeOrders", "tco" ,Criteria.LEFT_JOIN);
// specify the fields to return
criteria.setProjection(Projections.projectionList()
.add(Projections.distinct(Projections.property(OrderHeader.Fields.orderId.name())))
.add(Projections.property(OrderHeader.Fields.orderName.name()))
.add(Projections.property(OrderHeader.Fields.statusId.name()))
.add(Projections.property(OrderHeader.Fields.grandTotal.name()))
.add(Projections.property(OrderHeader.Fields.orderDate.name()))
.add(Projections.property(OrderHeader.Fields.currencyUom.name()))
.add(Projections.property("or.id." + OrderRole.Fields.partyId.name()))
.add(Projections.property("oi." + OrderItem.Fields.correspondingPoId.name()))
.add(Projections.property("tco." + TrackingCodeOrder.Fields.trackingCodeId.name()))
);
Debug.logInfo("criteria.toString() : " + criteria.toString(), MODULE);
// set the order by
if (orderBy == null) {
orderBy = Arrays.asList(OrderHeader.Fields.orderDate.desc());
}
// some substitution is needed to fit the hibernate field names
// this also maps the calculated fields and indicates the non sortable fields
Map<String, String> subs = new HashMap<String, String>();
subs.put("partyId", "or.id.partyId");
subs.put("partyName", "or.id.partyId");
subs.put("orderDateString", "orderDate");
subs.put("shipByDateString", null);
subs.put("orderNameId", "orderId");
subs.put("statusDescription", "statusId");
subs.put("correspondingPoId", "oi.correspondingPoId");
subs.put("trackingCodeId", "tco.trackingCodeId");
HibernateUtil.setCriteriaOrder(criteria, orderBy, subs);
ScrollableResults results = null;
List<OrderViewForListing> results2 = new ArrayList<OrderViewForListing>();
try {
// fetch the paginated results
results = criteria.scroll(ScrollMode.SCROLL_INSENSITIVE);
if (usePagination()) {
results.setRowNumber(getPageStart());
} else {
results.first();
}
// convert them into OrderViewForListing objects which will also calculate or format some fields for display
Object[] o = results.get();
int n = 0; // number of results actually read
while (o != null) {
OrderViewForListing r = new OrderViewForListing();
r.initRepository(this);
int i = 0;
r.setOrderId((String) o[i++]);
r.setOrderName((String) o[i++]);
r.setStatusId((String) o[i++]);
r.setGrandTotal((BigDecimal) o[i++]);
r.setOrderDate((Timestamp) o[i++]);
r.setCurrencyUom((String) o[i++]);
r.setPartyId((String) o[i++]);
r.setCorrespondingPoId((String) o[i++]);
r.setTrackingCodeId((String) o[i++]);
r.calculateExtraFields(getDelegator(), timeZone, locale);
results2.add(r);
n++;
if (!results.next()) {
break;
}
if (usePagination() && n >= getPageSize()) {
break;
}
o = results.get();
}
results.last();
// note: row number starts at 0
setResultSize(results.getRowNumber() + 1);
} finally {
if (results != null) {
results.close();
}
}
return results2;
} catch (InfrastructureException e) {
Debug.logError(e, MODULE);
throw new RepositoryException(e);
} finally {
if (session != null) {
session.close();
}
}
}
/** {@inheritDoc} */
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
/** {@inheritDoc} */
public void setOrderBy(List<String> orderBy) {
this.orderBy = orderBy;
}
/** {@inheritDoc} */
public void setCustomerPartyId(String customerPartyId) {
this.customerPartyId = customerPartyId;
}
/** {@inheritDoc} */
public void setExteralOrderId(String externalOrderId) {
this.externalOrderId = externalOrderId;
}
/** {@inheritDoc} */
public void setFromDate(String fromDate) {
this.fromDateStr = fromDate;
}
/** {@inheritDoc} */
public void setFromDate(Timestamp fromDate) {
this.fromDate = fromDate;
}
/** {@inheritDoc} */
public void setThruDate(String thruDate) {
this.thruDateStr = thruDate;
}
/** {@inheritDoc} */
public void setThruDate(Timestamp thruDate) {
this.thruDate = thruDate;
}
/** {@inheritDoc} */
public void setLotId(String lotId) {
this.lotId = lotId;
}
/** {@inheritDoc} */
public void setOrderId(String orderId) {
this.orderId = orderId;
}
/** {@inheritDoc} */
public void setProductId(String productId) {
this.productId = productId;
}
/** {@inheritDoc} */
public void setStatusId(String statusId) {
this.statusId = statusId;
}
/** {@inheritDoc} */
public void setOrderName(String orderName) {
this.orderName = orderName;
}
/** {@inheritDoc} */
public void setOrganizationPartyId(String organizationPartyId) {
this.organizationPartyId = organizationPartyId;
}
/** {@inheritDoc} */
public void setProductStoreId(String productStoreId) {
this.productStoreId = productStoreId;
}
/** {@inheritDoc} */
public void setPurchaseOrderId(String purchaseOrderId) {
this.purchaseOrderId = purchaseOrderId;
}
/** {@inheritDoc} */
public void setSerialNumber(String serialNumber) {
this.serialNumber = serialNumber;
}
/** {@inheritDoc} */
public void setShippingAddress(String address) {
this.shippingAddress = address;
}
/** {@inheritDoc} */
public void setShippingCountry(String countryGeoId) {
this.shippingCountry = countryGeoId;
}
/** {@inheritDoc} */
public void setShippingStateProvince(String stateProvinceGeoId) {
this.shippingStateProvince = stateProvinceGeoId;
}
/** {@inheritDoc} */
public void setShippingCity(String city) {
this.shippingCity = city;
}
/** {@inheritDoc} */
public void setShippingPostalCode(String postalCode) {
this.shippingPostalCode = postalCode;
}
/** {@inheritDoc} */
public void setShippingToName(String toName) {
this.shippingToName = toName;
}
/** {@inheritDoc} */
public void setShippingAttnName(String attnName){
this.shippingAttnName = attnName;
}
/** {@inheritDoc} */
public void setUserLoginId(String userLoginId) {
this.userLoginId = userLoginId;
}
/** {@inheritDoc} */
public void setViewPref(String viewPref) {
this.viewPref = viewPref;
}
/** {@inheritDoc} */
public void setLocale(Locale locale) {
this.locale = locale;
}
/** {@inheritDoc} */
public void setTimeZone(TimeZone timeZone) {
this.timeZone = timeZone;
}
/** {@inheritDoc} */
public void setFindActiveOnly(boolean findActiveOnly) {
this.findActiveOnly = findActiveOnly;
}
/** {@inheritDoc} */
public void setFindDesiredOnly(boolean findDesiredOnly) {
this.findDesiredOnly = findDesiredOnly;
}
}
|
#!/bin/bash
PID=`ps -eaf | grep "OvenSpace" | grep -v grep | awk '{print $2}'`
if [[ "" != "$PID" ]]; then
echo "killing $PID"
sudo kill -9 $PID
fi
./run.sh
|
//给你一份旅游线路图,该线路图中的旅行线路用数组 paths 表示,其中 paths[i] = [cityAi, cityBi] 表示该线路将会从
//cityAi 直接前往 cityBi 。请你找出这次旅行的终点站,即没有任何可以通往其他城市的线路的城市。
//
// 题目数据保证线路图会形成一条不存在循环的线路,因此恰有一个旅行终点站。
//
//
//
// 示例 1:
//
//
//输入:paths = [["London","New York"],["New York","Lima"],["Lima","Sao Paulo"]]
//输出:"Sao Paulo"
//解释:从 "London" 出发,最后抵达终点站 "Sao Paulo" 。本次旅行的路线是 "London" -> "New York" ->
//"Lima" -> "Sao Paulo" 。
//
//
// 示例 2:
//
//
//输入:paths = [["B","C"],["D","B"],["C","A"]]
//输出:"A"
//解释:所有可能的线路是:
//"D" -> "B" -> "C" -> "A".
//"B" -> "C" -> "A".
//"C" -> "A".
//"A".
//显然,旅行终点站是 "A" 。
//
//
// 示例 3:
//
//
//输入:paths = [["A","Z"]]
//输出:"Z"
//
//
//
//
// 提示:
//
//
// 1 <= paths.length <= 100
// paths[i].length == 2
// 1 <= cityAi.length, cityBi.length <= 10
// cityAi != cityBi
// 所有字符串均由大小写英文字母和空格字符组成。
//
// Related Topics 哈希表 字符串 👍 97 👎 0
package algorithm_1400
func destCity(paths [][]string) string {
var res = make(map[string]bool)
for i := 0; i < len(paths); i++ {
res[paths[i][0]] = true
}
for i := 0; i < len(paths); i++ {
if _, ok := res[paths[i][1]]; !ok {
return paths[i][1]
}
}
return ""
}
|
cm.setMessages('Com.AbstractFormField', {
'required' : 'Пожалуйста, заполните поле выше.',
'too_short' : 'Значение должно содержать минимум %count% символов.',
'too_long' : 'Значение не должно быть больше %count% символов.',
'*' : '*'
});
|
<filename>src/example-components/ElementsButtons/Buttons7/index.js
import React from 'react';
import { Button } from '@material-ui/core';
export default function LivePreviewExample() {
return (
<>
<div className="d-flex align-items-center justify-content-center flex-wrap">
<Button variant="contained" className="btn-pill m-2 btn-primary">
Primary
</Button>
<Button variant="contained" className="btn-pill m-2 btn-first">
First
</Button>
<Button variant="contained" className="btn-pill m-2 btn-second">
Second
</Button>
<Button variant="contained" className="btn-pill m-2 btn-info">
Info
</Button>
<Button variant="contained" className="btn-pill m-2 btn-success">
Success
</Button>
<Button variant="contained" className="btn-pill m-2 btn-warning">
Warning
</Button>
<Button variant="contained" className="btn-pill m-2 btn-danger">
Danger
</Button>
<Button variant="contained" className="btn-pill m-2 btn-dark">
Dark
</Button>
</div>
<div className="divider my-3" />
<div className="d-flex align-items-center justify-content-center flex-wrap">
<Button variant="text" className="btn-pill m-2 btn-outline-primary">
Primary
</Button>
<Button variant="text" className="btn-pill m-2 btn-outline-first">
First
</Button>
<Button variant="text" className="btn-pill m-2 btn-outline-second">
Second
</Button>
<Button variant="text" className="btn-pill m-2 btn-outline-info">
Info
</Button>
<Button variant="text" className="btn-pill m-2 btn-outline-success">
Success
</Button>
<Button variant="text" className="btn-pill m-2 btn-outline-warning">
Warning
</Button>
<Button variant="text" className="btn-pill m-2 btn-outline-danger">
Danger
</Button>
<Button variant="text" className="btn-pill m-2 btn-outline-dark">
Dark
</Button>
</div>
</>
);
}
|
<reponame>ddallaire/Adaptone-app
import Controller from '@ember/controller';
import {inject as service} from '@ember/service';
import {readOnly} from '@ember/object/computed';
export default Controller.extend({
connection: service('connection'),
isConnected: readOnly('connection.isConnected')
});
|
#!/bin/sh
# createTeleporterPodest.sh
#
# Creates an 5x5 teleporter podest around the given position.
# The acting component, the command_block has to be set seperatly!
# (See subscript setCommand)
#
# Use setCommand at the very end of your script - sometimes the inbuild
# command fires instantly, your player is teleported to the given location
# and later setblock orders might no longer be executed!
#
# Created by fex on 09.10.18.
#
X=""
Y=""
Z=""
# Read parameters
# x: = x coordinate (east <-> west)
# y: = height (up <-> down)
# z: = z coordinate (south <-> north)
USAGE="Usage: $0 [-x x coordinate (east <-> west)] [-y height (up <-> down)] [-z z coordinate (south <-> north)]"
# Start processing options at index 1.
OPTIND=1
# OPTERR=1
while getopts ":x:y:z:" VALUE "$@" ; do
case "$VALUE" in
x) X="$OPTARG";;
y) Y="$OPTARG";;
z) Z="$OPTARG";;
:) echo "$USAGE"; exit 1;;
?)echo "Unknown flag -$OPTARG detected."; echo "$USAGE"; exit 1
esac
done
# Verify parameters
if [ "$X" = "" ]; then echo "x coordinate missing"; exit 1; fi
if [ "$Y" = "" ]; then echo "y coordinate missing"; exit 1; fi
if [ "$Z" = "" ]; then echo "z coordinate missing"; exit 1; fi
# create podest
echo "setblock $(($X - 2)) $Y $(($Z - 2)) quartz_stairs[facing=south,shape=outer_left]"
echo "setblock $(($X - 1)) $Y $(($Z - 2)) quartz_stairs[facing=south]"
echo "setblock $X $Y $(($Z - 2)) quartz_stairs[facing=south]"
echo "setblock $(($X + 1)) $Y $(($Z - 2)) quartz_stairs[facing=south]"
echo "setblock $(($X + 2)) $Y $(($Z - 2)) quartz_stairs[facing=south,shape=outer_right]"
echo "setblock $(($X + 2)) $Y $(($Z - 1)) quartz_stairs[facing=west]"
echo "setblock $(($X + 2)) $Y $Z quartz_stairs[facing=west]"
echo "setblock $(($X + 2)) $Y $(($Z + 1)) quartz_stairs[facing=west]"
echo "setblock $(($X + 2)) $Y $(($Z + 2)) quartz_stairs[facing=west,shape=outer_right]"
echo "setblock $(($X + 1)) $Y $(($Z + 2)) quartz_stairs[facing=north]"
echo "setblock $X $Y $(($Z + 2)) quartz_stairs[facing=north]"
echo "setblock $(($X - 1)) $Y $(($Z + 2)) quartz_stairs[facing=north]"
echo "setblock $(($X - 2)) $Y $(($Z + 2)) quartz_stairs[facing=north,shape=outer_right]"
echo "setblock $(($X - 2)) $Y $(($Z + 1)) quartz_stairs[facing=east]"
echo "setblock $(($X - 2)) $Y $Z quartz_stairs[facing=east]"
echo "setblock $(($X - 2)) $Y $(($Z - 1)) quartz_stairs[facing=east]"
# fill podest
echo "setblock $(($X - 1)) $Y $(($Z - 1)) obsidian"
echo "setblock $X $Y $(($Z - 1)) obsidian"
echo "setblock $(($X + 1)) $Y $(($Z - 1)) obsidian"
echo "setblock $(($X + 1)) $Y $Z obsidian"
echo "setblock $(($X + 1)) $Y $(($Z + 1)) obsidian"
echo "setblock $X $Y $(($Z + 1)) obsidian"
echo "setblock $(($X - 1)) $Y $(($Z + 1)) obsidian"
echo "setblock $(($X - 1)) $Y $Z obsidian"
echo "setblock $X $Y $Z redstone_lamp"
echo "setblock $X $(($Y + 1)) $Z light_weighted_pressure_plate"
echo""
|
#!/usr/bin/env bash
bmk_home=${ALADDIN_HOME}/integration-test/with-cpu/test_multiple_invocations
gem5_dir=${ALADDIN_HOME}/../..
${gem5_dir}/build/X86/gem5.opt \
--debug-flags=HybridDatapath,Aladdin \
--outdir=${bmk_home}/outputs \
${gem5_dir}/configs/aladdin/aladdin_se.py \
--num-cpus=1 \
--enable_prefetchers \
--mem-size=4GB \
--mem-type=DDR3_1600_8x8 \
--sys-clock=1GHz \
--cpu-type=detailed \
--caches \
--cacheline_size=32 \
--accel_cfg_file=${bmk_home}/gem5.cfg \
-c ${bmk_home}/test_multiple_invocations \
| gzip -c > stdout.gz
|
<reponame>bentlyedyson/HEARTY-HEARTY
"""Simple script to read wfdb file and outputs it as json"""
from wfdb import rdsamp
from json import dumps
from sys import argv, stdout
file_dir = argv[1]
stdout.write(dumps(rdsamp(file_dir)[0].T.tolist(), separators=(',', ':')))
stdout.flush()
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-EgoiPushLibrary_Tests/EgoiPushLibrary.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-EgoiPushLibrary_Tests/EgoiPushLibrary.framework"
fi
|
<reponame>achintya-kumar/mq-consume-to-file
package com.ultratendency;
import com.ibm.mq.jms.JMSC;
import com.ibm.mq.jms.MQConnectionFactory;
import com.ibm.mq.jms.MQQueue;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import javax.jms.*;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManagerFactory;
import java.io.FileInputStream;
import java.io.IOException;
import java.security.*;
import java.security.cert.CertificateException;
import java.util.Map;
public class MqConnectionManager {
private static final Logger LOG = LogManager.getLogger(MqConnectionManager.class);
private String mqHostname;
private Integer mqPort;
private String mqChannel;
private String mqQueueManager;
private String mqQueueName;
private String keystoreType;
private String keystorePassword;
private String keystoreLocation;
private String cipherSuite;
private Connection connection;
private Session session;
private MessageProducer producer;
private MessageConsumer consumer;
public MqConnectionManager(String mqHostname, Integer mqPort,
String mqChannel, String mqQueueManager, String mqQueueName,
String keystoreType, String keystoreLocation, String keystorePassword,
String cipherSuite) throws Exception {
this.mqHostname = mqHostname;
this.mqPort = mqPort;
this.mqChannel = mqChannel;
this.mqQueueManager = mqQueueManager;
this.mqQueueName = mqQueueName;
this.cipherSuite = cipherSuite;
System.setProperty("com.ibm.mq.cfg.useIBMCipherMappings", "false");
this.keystoreType = keystoreType;
this.keystorePassword = <PASSWORD>;
this.keystoreLocation = keystoreLocation;
LOG.info("MqConnectionManager - Creating session ...");
ConnectionFactory cof = getConnectionFactory();
connection = cof.createConnection();
connection.start();
session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
LOG.info("MqConnectionManager - Initializing producer ...");
Destination sendDest = getSendDest();
producer = session.createProducer(sendDest);
LOG.info("MqConnectionManager - Initializing consumer ...");
consumer = session.createConsumer(sendDest);
}
public MqConnectionManager(Map<String, String> mqConnectionConfigs) throws Exception {
this(mqConnectionConfigs.get("mqHostName"),
Integer.parseInt(mqConnectionConfigs.get("mqPort")),
mqConnectionConfigs.get("mqChannel"),
mqConnectionConfigs.get("mqQueueManager"),
mqConnectionConfigs.get("mqQueueName"),
mqConnectionConfigs.get("keystoreType"),
mqConnectionConfigs.get("keystoreLocation"),
mqConnectionConfigs.get("keystorePassword"),
mqConnectionConfigs.get("cipherSuite"));
}
public void sendMQ(String message) throws JMSException {
TextMessage txtMessage = session.createTextMessage();
txtMessage.setText(message);
long duration = System.currentTimeMillis();
producer.send(txtMessage);
duration = System.currentTimeMillis() - duration;
LOG.info("MqConnectionManager - Message sent in " + duration + " [ms]!");
}
public String consume() throws JMSException {
TextMessage textMessage = (TextMessage) consumer.receive();
return textMessage.getText();
}
/**
* @return
* @throws JMSException
*/
private ConnectionFactory getConnectionFactory()
throws JMSException, NoSuchAlgorithmException, KeyManagementException, CertificateException,
KeyStoreException, IOException, UnrecoverableKeyException {
MQConnectionFactory qcf = new MQConnectionFactory();
qcf.setTransportType(JMSC.MQJMS_TP_CLIENT_MQ_TCPIP);
qcf.setQueueManager(mqQueueManager);
qcf.setHostName(mqHostname);
qcf.setChannel(mqChannel);
qcf.setPort(mqPort);
// qcf.setClientID("SVC_MQSeries");
qcf.setFailIfQuiesce(JMSC.MQJMS_FIQ_YES);
if(!cipherSuite.isEmpty()) {
qcf.setSSLCipherSuite(cipherSuite);
qcf.setSSLSocketFactory(buildSocketFactory());
}
return qcf;
}
private Destination getSendDest()
throws JMSException {
MQQueue dest = new MQQueue(mqQueueName);
dest.setTargetClient(JMSC.MQJMS_CLIENT_NONJMS_MQ);
dest.setPersistence(DeliveryMode.NON_PERSISTENT);
return dest;
}
@Override
protected void finalize() {
try {
LOG.info("Closing MQ connection...");
connection.close();
session.close();
producer.close();
consumer.close();
} catch (JMSException jmsEx) {
jmsEx.printStackTrace();
}
}
private SSLSocketFactory buildSocketFactory() throws KeyManagementException, NoSuchAlgorithmException,
KeyStoreException, IOException, CertificateException, UnrecoverableKeyException {
SSLContext ctx = SSLContext.getInstance("TLS");
if(keystoreType.equalsIgnoreCase("jks") && !keystoreLocation.isEmpty()) {
KeyStore truststore = KeyStore.getInstance(KeyStore.getDefaultType());
truststore.load(new FileInputStream(keystoreLocation), null);
TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
tmf.init(truststore);
KeyStore keystore = KeyStore.getInstance(KeyStore.getDefaultType());
keystore.load(new FileInputStream(keystoreLocation), keystorePassword.toCharArray());
KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
kmf.init(keystore, keystorePassword.toCharArray());
ctx.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
} else {
throw new RuntimeException("Currently only JKS format is supported");
}
return ctx.getSocketFactory();
}
}
|
<reponame>manoela-reis/pxt-calliope
let item = pins.i2cReadNumber(123, NumberFormat.Int8LE)
pins.i2cWriteNumber(123, 0, NumberFormat.Int8LE)
let item = pins.i2cReadNumber(123, NumberFormat.Int8LE, true)
pins.i2cWriteNumber(123, 0, NumberFormat.Int8LE, true)
|
<filename>go/comments_test.go<gh_stars>0
package swagger
/*
import (
"fmt"
//my "github.com/simple-web-app/Server/go"
"testing"
)
func TestCreateComment(t *testing.T) {
fmt.Println("Testing for creating comments...")
test := []struct {
name string
}{
{name: "testcase1: "},
}
for _, tt := range test {
t.Run(tt.name, func(t *testing.T) {
fmt.Println(tt.name)
CreateComments()
})
}
}
*/
|
<reponame>GuRuGuMaWaRu/CodeProblems<gh_stars>0
/*
Determine whether the given string can be obtained by one concatenation of some string to itself.
Example
For inputString = "tandemtandem", the output should be
isTandemRepeat(inputString) = true;
For inputString = "qqq", the output should be
isTandemRepeat(inputString) = false;
For inputString = "2w2ww", the output should be
isTandemRepeat(inputString) = false.
[execution time limit] 4 seconds (js)
[input] string inputString
Guaranteed constraints:
2 ≤ inputString.length ≤ 20.
[output] boolean
true if inputString represents a string concatenated to itself, false otherwise.
*/
function isTandemRepeat(inputString) {
return (
inputString.slice(0, inputString.length / 2) ===
inputString.slice(inputString.length / 2)
);
}
const q1 = "tandemtandem"; // true
const q2 = "qqq"; // false
const q3 = "2w2ww"; // false
const q4 = "hophey"; // false
const q5 = "CodeSignalCodeSignal"; // true
const q6 = "interestinterest"; // true
const q7 = "aa"; // true
const q8 = "ab"; // false
const q9 = "stringString"; // false
const q10 = "truetruetrue"; // false
console.log(isTandemRepeat(q1));
console.log(isTandemRepeat(q2));
console.log(isTandemRepeat(q3));
console.log(isTandemRepeat(q4));
console.log(isTandemRepeat(q5));
console.log(isTandemRepeat(q6));
console.log(isTandemRepeat(q7));
console.log(isTandemRepeat(q8));
console.log(isTandemRepeat(q9));
console.log(isTandemRepeat(q10));
|
#ifndef B2G_TIMER_H
#define B2G_TIMER_H
int timer_Init(long rate);
#endif
|
<gh_stars>10-100
package io.opensphere.wfs.state.activate;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.awt.Color;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.easymock.EasyMock;
import org.junit.Test;
import io.opensphere.core.MapManager;
import io.opensphere.core.NetworkConfigurationManager;
import io.opensphere.core.PluginToolboxRegistry;
import io.opensphere.core.SecurityManager;
import io.opensphere.core.SystemToolbox;
import io.opensphere.core.Toolbox;
import io.opensphere.core.api.Envoy;
import io.opensphere.core.control.ui.MenuBarRegistry;
import io.opensphere.core.control.ui.UIRegistry;
import io.opensphere.core.data.DataRegistry;
import io.opensphere.core.event.EventManager;
import io.opensphere.core.metrics.MetricsRegistry;
import io.opensphere.core.order.OrderManager;
import io.opensphere.core.order.OrderManagerRegistry;
import io.opensphere.core.order.impl.DefaultOrderCategory;
import io.opensphere.core.preferences.Preferences;
import io.opensphere.core.preferences.PreferencesRegistry;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.registry.GenericRegistry;
import io.opensphere.mantle.MantleToolbox;
import io.opensphere.mantle.controller.DataGroupController;
import io.opensphere.mantle.controller.DataTypeController;
import io.opensphere.mantle.data.BasicVisualizationInfo;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.DataTypeInfoPreferenceAssistant;
import io.opensphere.mantle.data.LoadsTo;
import io.opensphere.mantle.data.MapVisualizationInfo;
import io.opensphere.mantle.data.MapVisualizationType;
import io.opensphere.mantle.data.geom.style.MutableVisualizationStyle;
import io.opensphere.mantle.data.geom.style.VisualizationStyleRegistry;
import io.opensphere.mantle.data.impl.DefaultDataGroupInfo;
import io.opensphere.mantle.data.impl.DefaultDataTypeInfo;
import io.opensphere.server.control.DefaultServerDataGroupInfo;
import io.opensphere.server.control.ServerConnectionParamsImpl;
import io.opensphere.server.services.ServerConnectionParams;
import io.opensphere.server.source.OGCServerSource;
import io.opensphere.server.state.StateConstants;
import io.opensphere.server.toolbox.LayerConfiguration;
import io.opensphere.server.toolbox.ServerSourceController;
import io.opensphere.server.toolbox.ServerSourceControllerManager;
import io.opensphere.server.toolbox.ServerToolbox;
import io.opensphere.server.toolbox.ServerToolboxUtils;
import io.opensphere.server.toolbox.WFSLayerConfigurationManager;
import io.opensphere.server.util.ServerConstants;
import io.opensphere.wfs.WFSPlugin;
import io.opensphere.wfs.envoy.WFSEnvoy;
import io.opensphere.wfs.envoy.WFSToolbox;
import io.opensphere.wfs.envoy.WFSTools;
import io.opensphere.wfs.layer.WFSDataType;
import io.opensphere.wfs.layer.WFSLayerColumnManager;
import io.opensphere.wfs.layer.WFSMapVisualizationInfo;
import io.opensphere.wfs.layer.WFSMetaDataInfo;
import io.opensphere.wfs.state.model.BasicFeatureStyle;
import io.opensphere.wfs.state.model.WFSLayerState;
import io.opensphere.wfs.state.model.WFSStateParameters;
import io.opensphere.wfs.util.WFSConstants;
/**
* Tests building a WFSDataType from an existing WFSDataType.
*/
public class WFSDataTypeBuilderTest
{
/** The Constant ourDataLayer1. */
private static final String ourDataLayer1 = "dataLayer1";
/** The Constant ourDataLayer2. */
private static final String ourDataLayer2 = "dataLayer2";
/**
* The expected server protocol.
*/
private static final String ourProtocol = "http://";
/** The Constant ourServerTitle. */
private static final String ourServerTitle = "somehost/ogc";
/** The STAT e1. */
private static final String STATE1 = "state1";
/** The Layer1 URL. */
private String myLaye1Url1;
/** The Layer key1. */
private String myLayerKey1;
/** The Layer key2. */
private String myLayerKey2;
/** The Layer2 URL. */
private String myLayerUrl2;
/**
* Test.
*/
@SuppressWarnings("unchecked")
@Test
public void test()
{
List<WFSLayerState> states = New.list();
states.add(createState1());
states.add(createState2());
Toolbox toolbox = EasyMock.createNiceMock(Toolbox.class);
PreferencesRegistry preferencesRegistry = EasyMock.createNiceMock(PreferencesRegistry.class);
EasyMock.expect(toolbox.getPreferencesRegistry()).andReturn(preferencesRegistry).anyTimes();
Preferences serverPrefs = EasyMock.createNiceMock(Preferences.class);
EasyMock.expect(preferencesRegistry.getPreferences(ServerConstants.class)).andReturn(serverPrefs).anyTimes();
Preferences dataTypePrefs = EasyMock.createNiceMock(Preferences.class);
EasyMock.expect(preferencesRegistry.getPreferences(DefaultDataTypeInfo.class)).andReturn(dataTypePrefs).anyTimes();
OrderManagerRegistry orderManagerRegistry = EasyMock.createNiceMock(OrderManagerRegistry.class);
EasyMock.expect(toolbox.getOrderManagerRegistry()).andReturn(orderManagerRegistry).anyTimes();
OrderManager manager = EasyMock.createNiceMock(OrderManager.class);
EasyMock.expect(orderManagerRegistry.getOrderManager(DefaultOrderCategory.DEFAULT_FEATURE_LAYER_FAMILY,
DefaultOrderCategory.FEATURE_CATEGORY)).andReturn(manager).anyTimes();
PluginToolboxRegistry toolboxRegistry = EasyMock.createNiceMock(PluginToolboxRegistry.class);
EasyMock.expect(toolbox.getPluginToolboxRegistry()).andReturn(toolboxRegistry).anyTimes();
DataRegistry dataRegistry = EasyMock.createNiceMock(DataRegistry.class);
EasyMock.expect(toolbox.getDataRegistry()).andReturn(dataRegistry).anyTimes();
MantleToolbox mantleToolbox = EasyMock.createNiceMock(MantleToolbox.class);
EasyMock.expect(toolboxRegistry.getPluginToolbox(EasyMock.eq(MantleToolbox.class))).andReturn(mantleToolbox).anyTimes();
DataTypeInfoPreferenceAssistant dtiPrefAssistant = EasyMock.createNiceMock(DataTypeInfoPreferenceAssistant.class);
EasyMock.expect(mantleToolbox.getDataTypeInfoPreferenceAssistant()).andReturn(dtiPrefAssistant).anyTimes();
DataGroupController dataGroupController = EasyMock.createNiceMock(DataGroupController.class);
EasyMock.expect(mantleToolbox.getDataGroupController()).andReturn(dataGroupController).anyTimes();
DataTypeController dataTypeController = EasyMock.createNiceMock(DataTypeController.class);
EasyMock.expect(mantleToolbox.getDataTypeController()).andReturn(dataTypeController).anyTimes();
VisualizationStyleRegistry visReg = EasyMock.createNiceMock(VisualizationStyleRegistry.class);
EasyMock.expect(mantleToolbox.getVisualizationStyleRegistry()).andReturn(visReg).anyTimes();
WFSToolbox wfsToolbox = EasyMock.createNiceMock(WFSToolbox.class);
EasyMock.expect(toolboxRegistry.getPluginToolbox(EasyMock.eq(WFSToolbox.class))).andReturn(wfsToolbox).anyTimes();
MutableVisualizationStyle style = EasyMock.createNiceMock(MutableVisualizationStyle.class);
EasyMock.expect(visReg.getDefaultStyleInstanceForStyleClass(EasyMock.isA(Class.class))).andReturn(style).anyTimes();
EventManager eventManager = EasyMock.createNiceMock(EventManager.class);
EasyMock.expect(toolbox.getEventManager()).andReturn(eventManager).anyTimes();
MetricsRegistry metricsRegistry = EasyMock.createNiceMock(MetricsRegistry.class);
EasyMock.expect(toolbox.getMetricsRegistry()).andReturn(metricsRegistry).anyTimes();
MapManager mapManager = EasyMock.createNiceMock(MapManager.class);
EasyMock.expect(toolbox.getMapManager()).andReturn(mapManager).anyTimes();
UIRegistry uiRegistry = EasyMock.createNiceMock(UIRegistry.class);
EasyMock.expect(toolbox.getUIRegistry()).andReturn(uiRegistry).anyTimes();
SecurityManager secManager = EasyMock.createNiceMock(SecurityManager.class);
EasyMock.expect(toolbox.getSecurityManager()).andReturn(secManager).anyTimes();
SystemToolbox sysToolbox = EasyMock.createNiceMock(SystemToolbox.class);
EasyMock.expect(toolbox.getSystemToolbox()).andReturn(sysToolbox).anyTimes();
NetworkConfigurationManager ncm = EasyMock.createNiceMock(NetworkConfigurationManager.class);
EasyMock.expect(sysToolbox.getNetworkConfigurationManager()).andReturn(ncm).anyTimes();
ServerToolbox serverToolbox = EasyMock.createNiceMock(ServerToolbox.class);
EasyMock.expect(toolboxRegistry.getPluginToolbox(EasyMock.eq(ServerToolbox.class))).andReturn(serverToolbox).anyTimes();
MenuBarRegistry mbr = EasyMock.createNiceMock(MenuBarRegistry.class);
EasyMock.expect(uiRegistry.getMenuBarRegistry()).andReturn(mbr).anyTimes();
EasyMock.replay(uiRegistry);
GenericRegistry<Envoy> envoyRegistry = new GenericRegistry<>();
EasyMock.expect(toolbox.getEnvoyRegistry()).andReturn(envoyRegistry).anyTimes();
ServerSourceControllerManager serverMgr = EasyMock.createNiceMock(ServerSourceControllerManager.class);
EasyMock.expect(serverToolbox.getServerSourceControllerManager()).andReturn(serverMgr).anyTimes();
WFSLayerConfigurationManager layerConfigurationManager = new WFSLayerConfigurationManager();
EasyMock.expect(serverToolbox.getLayerConfigurationManager()).andReturn(layerConfigurationManager).anyTimes();
ServerSourceController serverCtrl = EasyMock.createNiceMock(ServerSourceController.class);
EasyMock.expect(serverMgr.getServerSourceController("serverType")).andReturn(serverCtrl).anyTimes();
OGCServerSource serverSource = new OGCServerSource();
serverSource.setName("source1");
serverSource.setServerType("serverType");
serverSource.setWFSServerURL(ourProtocol + ourServerTitle + "/wfsServer");
EasyMock.replay(toolbox, toolboxRegistry, preferencesRegistry, sysToolbox, secManager, mapManager, ncm);
EasyMock.replay(serverPrefs, dataTypePrefs, serverToolbox, serverMgr, serverCtrl, mbr);
ServerConnectionParams scp = new ServerConnectionParamsImpl(serverSource, uiRegistry.getMainFrameProvider(), toolbox,
null);
WFSTools wfsTools = new WFSTools(toolbox);
WFSEnvoy wfsEnvoy = new WFSEnvoy(toolbox, preferencesRegistry.getPreferences(WFSPlugin.class), scp, wfsTools);
envoyRegistry.addObjectsForSource(this, Collections.singleton(wfsEnvoy));
EasyMock.replay(manager);
EasyMock.replay(eventManager);
EasyMock.replay(metricsRegistry);
EasyMock.replay(dtiPrefAssistant);
EasyMock.replay(dataTypeController);
EasyMock.replay(visReg);
EasyMock.replay(style);
EasyMock.replay(mantleToolbox, dataRegistry, orderManagerRegistry);
// Create the types to clone
WFSDataType type1 = createTypeToClone(toolbox, manager, ourServerTitle, myLayerKey1, ourDataLayer1, myLaye1Url1, "ALT1");
WFSDataType type2 = createTypeToClone(toolbox, manager, ourServerTitle, myLayerKey2, ourDataLayer2, myLayerUrl2, "ALT2");
DefaultServerDataGroupInfo rootGroup = new DefaultServerDataGroupInfo(false, toolbox, "rootGroup");
DefaultDataGroupInfo group1 = new DefaultDataGroupInfo(false, toolbox, "WFSDataTypeBuilder", "layer1_group");
group1.addMember(type1, this);
DefaultDataGroupInfo group2 = new DefaultDataGroupInfo(false, toolbox, "WFSDataTypeBuilder", "layer2_group");
group2.addMember(type2, this);
Set<DataGroupInfo> members = New.set();
members.add(group1);
members.add(group2);
rootGroup.addChild(group1, this);
rootGroup.addChild(group2, this);
EasyMock.expect(dataGroupController.findMemberById(type1.getTypeKey())).andReturn(type1).anyTimes();
EasyMock.expect(dataGroupController.findMemberById(type2.getTypeKey())).andReturn(type2).anyTimes();
EasyMock.replay(dataGroupController);
dataGroupController.addRootDataGroupInfo(rootGroup, this);
WFSDataTypeBuilder builder = new WFSDataTypeBuilder(toolbox);
List<WFSDataType> types = builder.createWFSTypes(STATE1, states);
assertEquals(2, types.size());
Map<String, WFSDataType> expected = New.map();
expected.put(type1.getDisplayName(), type1);
expected.put(type2.getDisplayName(), type2);
compare(expected.get(types.get(0).getDisplayName().split(" ")[0]), types.get(0));
compare(expected.get(types.get(1).getDisplayName().split(" ")[0]), types.get(1));
Map<String, WFSDataType> actualTypes = New.map();
for (WFSDataType type : types)
{
actualTypes.put(type.getDisplayName().split(" ")[0], type);
}
}
/**
* Compares the original data type to the cloned data type.
*
* @param type1 the original data type
* @param type2 the cloned data type
*/
private void compare(WFSDataType type1, WFSDataType type2)
{
assertTrue(type2.getTypeKey().startsWith(type1.getTypeKey()));
assertEquals(type1.getTypeName(), type2.getTypeName());
assertEquals(type1.getDisplayName(), type2.getDisplayName().split(" ")[0]);
assertEquals(type1.getSourcePrefix(), type2.getSourcePrefix());
assertEquals(Boolean.valueOf(type1.isVisible()), Boolean.valueOf(type2.isVisible()));
assertEquals(type1.getUrl(), type2.getUrl());
assertEquals(Boolean.valueOf(type1.isQueryable()), Boolean.valueOf(type2.isQueryable()));
assertEquals(type1.getOrderKey(), type2.getOrderKey());
assertEquals(type1.getOutputFormat(), type2.getOutputFormat());
assertEquals(Boolean.valueOf(type1.isLatBeforeLon()), Boolean.valueOf(type2.isLatBeforeLon()));
assertEquals(type1.getTimeExtents(), type2.getTimeExtents());
BasicVisualizationInfo type1BasicVisInfo = type1.getBasicVisualizationInfo();
BasicVisualizationInfo newTypeBasicVisInfo = type2.getBasicVisualizationInfo();
assertEquals(type1BasicVisInfo.getTypeColor(), newTypeBasicVisInfo.getTypeColor());
assertEquals(type1BasicVisInfo.getTypeOpacity(), newTypeBasicVisInfo.getTypeOpacity());
assertEquals(type1BasicVisInfo.getLoadsTo(), newTypeBasicVisInfo.getLoadsTo());
WFSMetaDataInfo mdi = (WFSMetaDataInfo)type1.getMetaDataInfo();
WFSMetaDataInfo newMdi = (WFSMetaDataInfo)type2.getMetaDataInfo();
assertEquals(Boolean.valueOf(mdi.isDynamicTime()), Boolean.valueOf(newMdi.isDynamicTime()));
// assertEquals(type1_mdi.getDeselectedColumns(),
// newType1_mdi.getDeselectedColumns());
// assertEquals(Boolean.valueOf(type1_mdi.automaticallyDisableEmptyColumns()),
// Boolean.valueOf(newType1_mdi.automaticallyDisableEmptyColumns()));
assertEquals(mdi.getGeometryColumn(), newMdi.getGeometryColumn());
assertEquals(mdi.getAltitudeKey(), newMdi.getAltitudeKey());
}
/**
* Creates the second state.
*
* @return the wFS layer state
*/
private WFSLayerState createState1()
{
StringBuilder sb = new StringBuilder(17);
sb.append(ourProtocol);
sb.append(ourServerTitle);
sb.append("/wfsServer");
myLaye1Url1 = sb.toString();
sb.append(WFSConstants.LAYERNAME_SEPARATOR);
sb.append(ourDataLayer1);
myLayerKey1 = sb.toString();
WFSStateParameters params = new WFSStateParameters();
params.setTypeName(ourDataLayer1);
params.setVersion("1.0.0");
WFSLayerState state1 = new WFSLayerState();
state1.setUrl(myLaye1Url1);
state1.setId(myLayerKey1);
state1.setDisplayName(ourDataLayer1);
state1.setWFSParameters(params);
BasicFeatureStyle style1 = new BasicFeatureStyle();
style1.setPointColor("aaff00");
style1.setPointOpacity(255);
style1.setAltitudeColumn("ALT1");
state1.setBasicFeatureStyle(style1);
return state1;
}
/**
* Creates the first state.
*
* @return the wFS layer state
*/
private WFSLayerState createState2()
{
StringBuilder sb = new StringBuilder(17);
sb.append(ourProtocol);
sb.append(ourServerTitle);
sb.append("/wfsServer");
myLayerUrl2 = sb.toString();
sb.append(WFSConstants.LAYERNAME_SEPARATOR);
sb.append(ourDataLayer2);
myLayerKey2 = sb.toString();
WFSStateParameters params = new WFSStateParameters();
params.setTypeName(ourDataLayer2);
params.setVersion("1.0.0");
WFSLayerState state2 = new WFSLayerState();
state2.setUrl(myLayerUrl2);
state2.setId(myLayerKey2);
state2.setDisplayName(ourDataLayer2);
state2.setWFSParameters(params);
BasicFeatureStyle style2 = new BasicFeatureStyle();
style2.setPointColor("aaff00");
style2.setPointOpacity(255);
style2.setAltitudeColumn("ALT2");
state2.setBasicFeatureStyle(style2);
return state2;
}
/**
* Creates a WFS data type to clone.
*
* @param toolbox the toolbox
* @param manager the order manager
* @param serverTitle the server title
* @param layerKey the layer key
* @param layerName the layer name
* @param layerURL the layer URL.
* @param altCol the altitude column
* @return the wFS data type
*/
private WFSDataType createTypeToClone(Toolbox toolbox, OrderManager manager, String serverTitle, String layerKey,
String layerName, String layerURL, String altCol)
{
WFSLayerColumnManager columnManager = new WFSLayerColumnManager(toolbox);
WFSMetaDataInfo type1Mdi = new WFSMetaDataInfo(toolbox, columnManager);
if (StringUtils.isNotEmpty(altCol))
{
type1Mdi.setAltitudeKey(altCol, this);
}
LayerConfiguration configuration = ServerToolboxUtils.getServerToolbox(toolbox).getLayerConfigurationManager()
.getConfigurationFromName(StateConstants.WFS_LAYER_TYPE);
WFSDataType wfsDataType = new WFSDataType(toolbox, serverTitle, layerKey, layerName, layerName, type1Mdi, configuration);
wfsDataType.setQueryable(true);
wfsDataType.setUrl(layerURL);
MapVisualizationInfo mapInfo1 = new WFSMapVisualizationInfo(MapVisualizationType.UNKNOWN, manager);
wfsDataType.setMapVisualizationInfo(mapInfo1);
wfsDataType.setVisible(true, this);
if (StringUtils.isNotEmpty(altCol))
{
wfsDataType.getBasicVisualizationInfo().setTypeColor(new Color(170, 255, 0), this);
}
else
{
wfsDataType.getBasicVisualizationInfo().setTypeColor(new Color(255, 255, 0), this);
wfsDataType.getBasicVisualizationInfo().setLoadsTo(LoadsTo.TIMELINE, this);
}
wfsDataType.getStreamingSupport().setStreamingEnabled(true);
return wfsDataType;
}
}
|
package ch.raiffeisen.openbank.common.repository.model;
import java.math.BigDecimal;
import javax.persistence.Column;
import javax.persistence.Embeddable;
/**
* This embeddable represents the fee value object.
*
* @author <NAME>
*/
@Embeddable
public class Fee {
@Column(name = "FEE", nullable = false)
private BigDecimal fee;
@Column(name = "CURRENCY", nullable = false)
private String currency;
public BigDecimal getFee() {
return fee;
}
public void setFee(BigDecimal fee) {
this.fee = fee;
}
public String getCurrency() {
return currency;
}
public void setCurrency(String currency) {
this.currency = currency;
}
}
|
import Promise from 'bluebird';
import async from 'async';
import jwt from 'jsonwebtoken';
import moment from 'moment';
import { getUser } from './firebase-admin';
import settings from '../../config';
import { createClient } from './redisClient';
import api from './server';
let pubClient;
let jwtCert;
createClient().then(
(client) => {
pubClient = client;
}
);
export default {
setCert(cert) {
jwtCert = cert;
},
pushLast8Attenance() {
return api
.attendance
.latest()
.then(
(results) => {
pubClient.publish('evangelize:attendance.UPDATE_LATEST_ATTENDANCE_FULFILLED', JSON.stringify(results));
return results;
},
(err) => {
console.log(err);
return err;
}
);
},
pushAvgAttendance() {
return api
.attendance
.average()
.then(
(results) => {
pubClient.publish('evangelize:attendance.UPDATE_AVG_ATTENDANCE_FULFILLED', JSON.stringify(results));
return results;
},
(err) => {
console.log(err);
return err;
}
);
},
pushMessage(channel, message) {
pubClient.publish(`evangelize:outgoing:${channel}`, JSON.stringify(message));
return null;
},
getPeoplePassword(email) {
return new Promise((resolve, reject) => {
api
.people
.find(
'emailAddress',
email
)
.then(
(people) => {
if (people.length){
api
.users
.get(people[0].id)
.then(
(results) => {
const payload = {
person: people[0].toJSON(),
user: results.toJSON(),
};
resolve(payload);
return null;
},
(err) => {
reject(err);
return null;
}
);
} else {
resolve(null);
return null;
}
},
(err) => {
reject(err);
return null;
}
);
});
},
getAllTables(entityId, personId, lastUpdate) {
const update = (lastUpdate) ? moment(lastUpdate, 'X').format('YYYY-MM-DD HH:mm:ss') : lastUpdate;
const bEntityId = (entityId) ? new Buffer(entityId, 'hex') : null;
const exclude = [
'thirdPartyLogins',
'errors',
];
const member = [
'memberSettings',
];
const keys = Object.keys(api).filter(k => !exclude.includes(k));
const getTable = async (key) => {
let table;
if (member.includes(key)) {
table = await api[key].allPerson(personId, update);
} else {
table = await api[key].all(bEntityId, update);
}
const values = table.map(t => t.get());
return {
key,
values,
};
};
return Promise.map(keys, getTable).then(
(values) => {
const retVal = values.reduce(
(m, v) => {
m[v.key] = v.values;
return m;
},
{}
);
return retVal;
}
).catch(e => e);
},
validateJwt(token, callback) {
const payload = {
decoded: null,
person: null,
firebase: null,
};
console.log('validateJwt');
jwt.verify(
token,
jwtCert,
{
algorithm: 'RS256',
},
(err, decoded) => {
console.log('jwt', decoded);
payload.decoded = decoded;
if (err) {
console.log(decoded);
return callback(err, false, decoded);
}
const uid = decoded.uid;
getUser(uid)
.then(
(firebase) => {
if (firebase) {
payload.firebase = firebase;
}
return api.people.get(decoded.peopleId);
}
)
.then(
(results) => {
if (results) {
payload.person = results.toJSON();
}
return callback(null, true, payload);
},
)
.catch((error) => callback(error, false, null));
}
);
},
}
|
#!/usr/bin/env bash
# fail fast settings from https://dougrichardson.org/2018/08/03/fail-fast-bash-scripting.html
set -eov pipefail
ORIG_DIR="$(pwd)"
cd "$(dirname "$0")"
BIN_DIR="$(pwd)"
trap "cd '${ORIG_DIR}'" EXIT
# Check presence of environment variables
TRAVIS_BUILD_NUMBER="${TRAVIS_BUILD_NUMBER:-0}"
# obtain current repository name
REPO_LOCAL_PATH=`git rev-parse --show-toplevel`
REPO_NAME=`basename $REPO_LOCAL_PATH`
# Create a Docker image and tag it as 'travis_<build number>'
buildTag=travis_$TRAVIS_BUILD_NUMBER # We use a temporary build number for tagging, since this is a transient artefact
docker build -t eoepca/${REPO_NAME} ..
docker tag eoepca/${REPO_NAME} eoepca/${REPO_NAME}:$buildTag # Tags container in EOEPCA repository with buildTag
if [ -n "${DOCKER_USERNAME}" -a -n "${DOCKER_PASSWORD}" ]
then
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
docker push eoepca/${REPO_NAME}:$buildTag # defaults to docker hub EOEPCA repository
else
echo "WARNING: No credentials - Cannot push to docker hub"
fi
|
package io.github.vampirestudios.obsidian.minecraft.obsidian;
import net.minecraft.entity.EquipmentSlot;
import net.minecraft.item.ArmorMaterial;
import net.minecraft.recipe.Ingredient;
import net.minecraft.sound.SoundEvent;
import net.minecraft.util.registry.Registry;
public record CustomArmorMaterial(io.github.vampirestudios.obsidian.api.obsidian.item.ArmorMaterial material) implements ArmorMaterial {
@Override
public int getDurability(EquipmentSlot slot) {
return this.material.maxDamageFactor;
}
@Override
public int getProtectionAmount(EquipmentSlot slot) {
return this.material.protection_amount;
}
public int getEnchantability() {
return this.material.enchantability;
}
@Override
public SoundEvent getEquipSound() {
return Registry.SOUND_EVENT.get(this.material.sound_event);
}
@Override
public Ingredient getRepairIngredient() {
return Ingredient.ofItems(Registry.ITEM.get(this.material.repair_item));
}
public String getName() {
return this.material.name;
}
public float getToughness() {
return this.material.toughness;
}
public float getKnockbackResistance() {
return this.material.knockback_resistance;
}
}
|
import sys
def usage():
print("Usage: utility.py [options]")
print("Options:")
print(" --help, -h Display usage information and exit.")
print(" --version, -v Display the version of the utility and exit.")
print(" --action, -a Perform a specific action based on the provided argument.")
sys.exit(1)
def main(args):
if not args:
usage()
if args[0] in ('--help', '-h'):
print("Displaying usage information...")
usage()
elif args[0] in ('--version', '-v'):
print("Utility version 1.0")
sys.exit(0)
elif args[0] in ('--action', '-a'):
if len(args) < 2:
print("Error: Missing argument for --action")
usage()
else:
action = args[1]
print(f"Performing action: {action}")
# Perform the action based on the provided argument
# Your implementation here
sys.exit(0)
else:
print(f"Error: Unsupported option '{args[0]}'")
usage()
if __name__ == "__main__":
main(sys.argv[1:])
sys.exit(0)
|
<gh_stars>1-10
import * as t from "io-ts";
import { optional } from "../../../../util/io-ts";
import { rpcUnsignedInteger } from "../base-types";
export const rpcForkConfig = optional(
t.type(
{
jsonRpcUrl: t.string,
blockNumber: optional(t.number),
},
"RpcForkConfig"
)
);
export type RpcForkConfig = t.TypeOf<typeof rpcForkConfig>;
export const rpcHardhatNetworkConfig = t.type(
{
forking: optional(rpcForkConfig),
},
"HardhatNetworkConfig"
);
export type RpcHardhatNetworkConfig = t.TypeOf<typeof rpcHardhatNetworkConfig>;
export const optionalRpcHardhatNetworkConfig = optional(
rpcHardhatNetworkConfig
);
const isNumberPair = (x: unknown): x is [number, number] =>
Array.isArray(x) &&
x.length === 2 &&
Number.isInteger(x[0]) &&
Number.isInteger(x[1]);
// TODO: This can be simplified
const rpcIntervalMiningRange = new t.Type<[number, number]>(
"Interval mining range",
isNumberPair,
(u, c) =>
isNumberPair(u) && u[0] >= 0 && u[1] >= u[0]
? t.success(u)
: t.failure(u, c),
t.identity
);
export const rpcIntervalMining = t.union([
rpcUnsignedInteger,
rpcIntervalMiningRange,
]);
export type RpcIntervalMining = t.TypeOf<typeof rpcIntervalMining>;
|
#!/bin/bash
#
# Copyright 2019 IBM Corp. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
git init
git config core.sparsecheckout true
echo releases/v0.1.0/ >> .git/info/sparse-checkout
git remote add -f origin https://github.com/IBM/admission-control.git
git pull origin master
|
<filename>app/src/main/java/com/acmvit/acm_app/ui/profile/ProfileViewModel.java
package com.acmvit.acm_app.ui.profile;
import android.app.Application;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.MutableLiveData;
import com.acmvit.acm_app.model.User;
import com.acmvit.acm_app.model.UserData;
import com.acmvit.acm_app.network.BackendResponse;
import com.acmvit.acm_app.network.BackendService;
import com.acmvit.acm_app.network.ServiceGenerator;
import com.acmvit.acm_app.repository.AuthRepository;
import com.acmvit.acm_app.ui.ActivityViewModel;
import com.acmvit.acm_app.ui.base.BaseViewModel;
import com.acmvit.acm_app.util.Resource;
import com.acmvit.acm_app.util.reactive.SingleTimeObserver;
import java.util.Objects;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.NotNull;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
public class ProfileViewModel extends BaseViewModel {
private enum State {
STANDBY,
LOGOUT,
}
private State state = State.STANDBY;
private final AuthRepository authRepository;
public final MutableLiveData<String> name = new MutableLiveData<>("");
public final MutableLiveData<String> disp = new MutableLiveData<>("");
public final MutableLiveData<String> dp = new MutableLiveData<>("");
private final MutableLiveData<User> user = new MutableLiveData<>();
public ProfileViewModel(
ActivityViewModel activityViewModel,
Application application
) {
super(activityViewModel, application);
authRepository = AuthRepository.getInstance();
}
public void initializeData() {
User user = activityViewModel.getSessionManager().getUserDetails();
if (user != null) {
this.user.setValue(user);
name.setValue(user.getName());
disp.setValue(user.getDisp());
dp.setValue(user.getDp());
}
}
public void fetchData(String uid) {
BackendService service = ServiceGenerator
.getInstance()
.createTokenizedService(BackendService.class);
service
.fetchUserById(uid)
.enqueue(
new Callback<BackendResponse<UserData>>() {
@Override
public void onResponse(
@NotNull Call<BackendResponse<UserData>> call,
@NotNull Response<BackendResponse<UserData>> response
) {
assert response.body() != null;
user.setValue(response.body().getData().getUser());
name.setValue(
Objects.requireNonNull(user.getValue()).getName()
);
dp.setValue(user.getValue().getDp());
disp.setValue(user.getValue().getDisp());
}
@Override
public void onFailure(
@NotNull Call<BackendResponse<UserData>> call,
@NotNull Throwable t
) {}
}
);
}
public void logout() {
if (
activityViewModel.canRunAuthenticatedNetworkTask() &&
state == State.STANDBY
) {
state = State.LOGOUT;
activityViewModel.setIsLoading(true);
LiveData<Resource<Void>> status = authRepository.logout();
new SingleTimeObserver<Resource<Void>>() {
@Override
public void onReceived(Resource<Void> resource) {
activityViewModel.setIsLoading(false);
state = State.STANDBY;
}
}
.attachTo(status);
}
}
}
|
def check_permission(user, action, user_permissions):
if user in user_permissions:
return action in user_permissions[user]
return False
# Example usage
users = {
"user1": ["read", "write"],
"user2": ["read"],
"user3": ["write"]
}
print(check_permission("user1", "read", users)) # Output: True
print(check_permission("user2", "write", users)) # Output: False
print(check_permission("user3", "read", users)) # Output: False
|
<filename>src/main/java/seedu/address/model/util/SampleDataUtil.java
package seedu.address.model.util;
import java.time.DayOfWeek;
import java.time.LocalTime;
import java.time.format.DateTimeFormatter;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import seedu.address.model.AddressBook;
import seedu.address.model.ReadOnlyAddressBook;
import seedu.address.model.TripDay;
import seedu.address.model.TripTime;
import seedu.address.model.person.Name;
import seedu.address.model.person.Phone;
import seedu.address.model.person.driver.Driver;
import seedu.address.model.person.passenger.Address;
import seedu.address.model.person.passenger.Passenger;
import seedu.address.model.person.passenger.Price;
import seedu.address.model.pool.Pool;
import seedu.address.model.tag.Tag;
/**
* Contains utility methods for populating {@code AddressBook} with sample data.
*/
public class SampleDataUtil {
private static final Optional<Price> NO_PRICE = Optional.empty();
private static final DateTimeFormatter timeFormat = DateTimeFormatter.ofPattern("HHmm");
/**
* Prevents SampleDataUtil from being instantiated.
*/
private SampleDataUtil() {}
public static Passenger[] getSamplePassengers() {
return new Passenger[] {
new Passenger(new Name("<NAME>"), new Phone("87438807"),
new Address("Blk 30 Geylang Street 29, #06-40"),
new TripDay(DayOfWeek.valueOf("MONDAY")), new TripTime(LocalTime.parse("1400", timeFormat)),
NO_PRICE,
getTagSet("marketing")),
new Passenger(new Name("<NAME>"), new Phone("99272758"),
new Address("Blk 30 Lorong 3 Serangoon Gardens, #07-18"),
new TripDay(DayOfWeek.valueOf("WEDNESDAY")), new TripTime(LocalTime.parse("2130", timeFormat)),
NO_PRICE,
getTagSet("finance", "marketing")),
new Passenger(new Name("<NAME>"), new Phone("93210283"),
new Address("Blk 11 Ang Mo Kio Street 74, #11-04"),
new TripDay(DayOfWeek.valueOf("THURSDAY")), new TripTime(LocalTime.parse("0845", timeFormat)),
NO_PRICE,
getTagSet("management")),
new Passenger(new Name("<NAME>"), new Phone("91031282"),
new Address("Blk 436 Serangoon Gardens Street 26, #16-43"),
new TripDay(DayOfWeek.valueOf("FRIDAY")), new TripTime(LocalTime.parse("1305", timeFormat)),
NO_PRICE,
getTagSet("operations")),
new Passenger(new Name("<NAME>"), new Phone("92492021"),
new Address("Blk 47 Tampines Street 20, #17-35"),
new TripDay(DayOfWeek.valueOf("TUESDAY")), new TripTime(LocalTime.parse("1930", timeFormat)),
NO_PRICE,
getTagSet("sales")),
new Passenger(new Name("<NAME>"), new Phone("92624417"),
new Address("Blk 45 Aljunied Street 85, #11-31"),
new TripDay(DayOfWeek.valueOf("MONDAY")), new TripTime(LocalTime.parse("0915", timeFormat)),
NO_PRICE,
getTagSet("finance")),
new Passenger(new Name("<NAME>"), new Phone("85313089"),
new Address("Blk 37 Toa Payoh Street 28, #15-37"),
new TripDay(DayOfWeek.valueOf("MONDAY")), new TripTime(LocalTime.parse("0900", timeFormat)),
NO_PRICE,
getTagSet("operations")),
new Passenger(new Name("<NAME>"), new Phone("99750221"),
new Address("Blk 27 Aljunied Street 78, #14-42"),
new TripDay(DayOfWeek.valueOf("THURSDAY")), new TripTime(LocalTime.parse("1200", timeFormat)),
NO_PRICE,
getTagSet("management")),
new Passenger(new Name("<NAME>"), new Phone("98121532"),
new Address("Blk 25 Woodlands Street 70, #10-19"),
new TripDay(DayOfWeek.valueOf("THURSDAY")), new TripTime(LocalTime.parse("1215", timeFormat)),
NO_PRICE,
getTagSet("marketing"))
};
}
public static Pool[] getSamplePools() {
return new Pool[] {
new Pool(
new Driver(new Name("<NAME>"), new Phone("92492021")),
new TripDay(DayOfWeek.MONDAY),
new TripTime(LocalTime.of(12, 0)),
List.of(
getSamplePassengers()[5],
getSamplePassengers()[6]
),
new HashSet<>()
),
new Pool(
new Driver(new Name("<NAME>"), new Phone("88526716")),
new TripDay(DayOfWeek.THURSDAY),
new TripTime(LocalTime.of(12, 0)),
List.of(
getSamplePassengers()[7],
getSamplePassengers()[8]
),
getTagSet("lunch")
),
};
}
public static ReadOnlyAddressBook getSampleAddressBook() {
AddressBook sampleAb = new AddressBook();
for (Passenger samplePassenger : getSamplePassengers()) {
sampleAb.addPassenger(samplePassenger);
}
for (Pool samplePool : getSamplePools()) {
sampleAb.addPool(samplePool);
}
return sampleAb;
}
/**
* Returns a tag set containing the list of strings given.
*/
public static Set<Tag> getTagSet(String... strings) {
return Arrays.stream(strings)
.map(Tag::new)
.collect(Collectors.toSet());
}
}
|
#!/usr/bin/env bash
{
echo ' - "-test.coverprofile=/workspace/data/e2e-profile.out"'
echo ' - "__DEVEL__E2E"'
echo ' - "-test.run=E2EMain"'
echo ' - "-test.coverpkg=$(go list ./pkg/...| tr '"'"'\n'"'"' '"'"','"'"'| sed '"'"'s/,$//g'"'"')"'
} > tmp_add.txt
sed '/ args:/r tmp_add.txt' ./charts/vela-core/templates/kubevela-controller.yaml > tmp.yaml
rm ./charts/vela-core/templates/kubevela-controller.yaml
cat tmp.yaml
mv tmp.yaml ./charts/vela-core/templates/kubevela-controller.yaml
|
<filename>navigation/arena_local_planner/learning_based/arena_local_planner_drl/rl_agent/utils/reward.py
import numpy as np
from numpy.lib.utils import safe_eval
import rospy
from typing import Tuple
class RewardCalculator():
def __init__(self, robot_radius: float, safe_dist:float, goal_radius:float, rule:str = 'rule_00' ):
"""A class for calculating reward based various rules.
Args:
safe_dist (float): The minimum distance to obstacles or wall that robot is in safe status.
if the robot get too close to them it will be punished. Unit[ m ]
goal_radius (float): The minimum distance to goal that goal position is considered to be reached.
"""
self.curr_reward = 0
# additional info will be stored here and be returned alonge with reward.
self.info = {}
self.robot_radius = robot_radius
self.goal_radius = goal_radius
self.last_goal_dist = None
self.safe_dist = safe_dist
self._cal_funcs = {
'rule_00': RewardCalculator._cal_reward_rule_00,
'rule_01': RewardCalculator._cal_reward_rule_01
}
self.cal_func = self._cal_funcs[rule]
def reset(self):
"""reset variables related to the episode
"""
self.last_goal_dist = None
def _reset(self):
"""reset variables related to current step
"""
self.curr_reward = 0
self.info = {}
def get_reward(self, laser_scan:np.ndarray, goal_in_robot_frame: Tuple[float,float], *args, **kwargs):
"""
Args:
laser_scan (np.ndarray):
goal_in_robot_frame (Tuple[float,float]: position (rho, theta) of the goal in robot frame (Polar coordinate)
"""
self._reset()
self.cal_func(self,laser_scan,goal_in_robot_frame,*args,**kwargs)
return self.curr_reward, self.info
def _cal_reward_rule_00(self, laser_scan: np.ndarray, goal_in_robot_frame: Tuple[float,float],*args,**kwargs):
self._reward_goal_reached(goal_in_robot_frame)
self._reward_safe_dist(laser_scan)
self._reward_collision(laser_scan)
self._reward_goal_approached(goal_in_robot_frame)
def _cal_reward_rule_01(self, laser_scan: np.ndarray, goal_in_robot_frame: Tuple[float,float],*args,**kwargs):
self._reward_goal_reached(goal_in_robot_frame)
self._reward_safe_dist(laser_scan)
self._reward_collision(laser_scan)
self._reward_goal_approached2(goal_in_robot_frame)
def _reward_goal_reached(self,goal_in_robot_frame, reward = 15):
if goal_in_robot_frame[0] < self.goal_radius:
self.curr_reward = reward
self.info['is_done'] = True
self.info['done_reason'] = 2
else:
self.info['is_done'] = False
def _reward_goal_approached(self, goal_in_robot_frame,reward = 1, punishment = 0.0001):
if self.last_goal_dist is not None:
#goal_in_robot_frame : [rho, theta]
"""
if goal_in_robot_frame[0] < self.last_goal_dist:
self.curr_reward += reward
else:
self.curr_reward -=punishment
"""
# if current goal distance shorter than last one, positive weighted reward - otherwise negative wegihted reward
w = 0.25
reward = round(w*(self.last_goal_dist - goal_in_robot_frame[0]), 3)
# punishment for not moving
if self.last_goal_dist == goal_in_robot_frame[0]:
reward = -punishment
self.curr_reward += reward
self.last_goal_dist = goal_in_robot_frame[0]
def _reward_goal_approached2(self, goal_in_robot_frame,reward = 1, punishment = 0.01):
if self.last_goal_dist is not None:
#goal_in_robot_frame : [rho, theta]
# if current goal distance shorter than last one, positive weighted reward - otherwise negative wegihted reward
w = 0.25
reward = round(w*(self.last_goal_dist - goal_in_robot_frame[0]), 3)
# higher negative weight when moving away from goal (to avoid driving unnecessary circles when train in contin. action space)
if (self.last_goal_dist - goal_in_robot_frame[0]) > 0:
w = 0.25
elif (self.last_goal_dist - goal_in_robot_frame[0]) < 0:
w = 0.4
reward = round(w*(self.last_goal_dist - goal_in_robot_frame[0]), 3)
# punishment for not moving
if self.last_goal_dist == goal_in_robot_frame[0]:
reward = -punishment
self.curr_reward += reward
self.last_goal_dist = goal_in_robot_frame[0]
def _reward_collision(self,laser_scan, punishment = 10):
if laser_scan.min() <= self.robot_radius:
self.curr_reward -= punishment
self.info['is_done'] = True
self.info['done_reason'] = 1
def _reward_safe_dist(self, laser_scan, punishment = 0.15):
if laser_scan.min() < self.safe_dist:
self.curr_reward -= punishment
|
from time import time
from tables.check.base import CheckBase
from tables.models import SimpleTable
class CheckUpdate(CheckBase):
name = 'update'
graph_title = 'Update'
def check_rows(self, rows):
m = SimpleTable.objects.create(name='testname')
start_time = time()
for i in range(rows):
m.name = f'name{i}'
m.save()
time_taken = time() - start_time
return time_taken
|
/*******************************************************************************
* This file is part of the Symfony eclipse plugin.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
******************************************************************************/
package com.dubture.symfony.core.goals.evaluator;
import org.eclipse.dltk.ti.GoalState;
import org.eclipse.dltk.ti.ISourceModuleContext;
import org.eclipse.dltk.ti.goals.IGoal;
import org.eclipse.dltk.ti.types.IEvaluatedType;
import org.eclipse.php.internal.core.typeinference.PHPClassType;
import org.eclipse.php.internal.core.typeinference.evaluators.AbstractPHPGoalEvaluator;
import com.dubture.symfony.core.goals.ServiceTypeGoal;
import com.dubture.symfony.core.model.Service;
import com.dubture.symfony.core.model.SymfonyModelAccess;
@SuppressWarnings("restriction")
public class ServiceTypeGoalEvaluator extends AbstractPHPGoalEvaluator {
protected ServiceTypeGoal goal;
private IEvaluatedType result = null;
public ServiceTypeGoalEvaluator(ServiceTypeGoal goal) {
super(goal);
this.goal = goal;
}
@Override
public IGoal[] init() {
if (goal.getContext() instanceof ISourceModuleContext) {
Service findService = SymfonyModelAccess.getDefault().findService(goal.getServiceId(),((ISourceModuleContext)goal.getContext()).getSourceModule().getScriptProject().getPath());
if (findService != null)
result = new PHPClassType(findService.getFullyQualifiedName());
}
return IGoal.NO_GOALS;
}
@Override
public Object produceResult() {
return result;
}
@Override
public IGoal[] subGoalDone(IGoal subgoal, Object result, GoalState state) {
return IGoal.NO_GOALS;
}
}
|
#!/usr/bin/env bash
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
####################################################################
## This script contains util functions. The util functions are used
## in modules/agent-policy/scripts/create-update-script.sh and
## modules/agent-policy/scripts/delete-script.sh
####################################################################
CREATE="create"
UPDATE="update"
LAUNCH_STAGE="alpha"
# Params:
# $1 = JSON formatted list(string)
# Return:
# A well-formatted command line flag value for a list of strings
function get_formatted_list_of_strings() {
local formatted
local python="python -c 'import json, sys;"
python="$python list_of_strings = json.load(sys.stdin);"
python="$python print (\",\".join(x for x in list_of_strings))'"
formatted="$(echo "$1" | eval "$python")"
echo "$formatted"
}
# Params:
# $1 = JSON formatted list(object)
# Return:
# A well-formatted command line flag value for a list of objects
function get_formatted_list_of_objects() {
local formatted
local python="python -c 'import json, sys;"
python="$python list_of_objs = json.load(sys.stdin);"
python="$python print (\";\".join(\",\".join([\"{}={}\".format(k.replace(\"_\", \"-\"),"
python="$python str(v).lower() if type(v) is bool else v) for k, v in obj.items()])"
python="$python for obj in list_of_objs))'"
formatted="$(echo "$1" | eval "$python")"
echo "$formatted"
}
# Params:
# $1 = JSON formatted list(map)
# Return:
# A well-formatted command line flag value for a list of list of objects
function get_formatted_list_of_map() {
local formatted
local python="python -c 'import json, sys;"
python="$python list_of_objs = json.load(sys.stdin);"
python="$python print (\";\".join(\",\".join([\"{}={}\".format(k, v)"
python="$python for k, v in obj.items()]) for obj in list_of_objs))'"
formatted="$(echo "$1" | eval "$python")"
echo "$formatted"
}
# Params:
# $1 = output of successful describe command (json format)
# Return:
# the etag in the given string
function get_etag() {
local python="python -c 'import json, sys;"
python="$python json_dump = json.load(sys.stdin);"
python="$python print json_dump[\"etag\"]'"
formatted="$(echo "$1" | eval "$python")"
echo "$formatted"
}
# Params:
# $1 = flag name
# $2 = flag value
# Return:
# An empty string if the flag value is empty, otherwise returns the appropriate flag
function get_flag() {
local flag_name="$1"
local flag_value="$2"
local flag=""
if [ -n "$flag_value" ]; then
# flag value is not empty
flag=" --$flag_name='$flag_value'"
fi
echo "$flag"
}
# Params:
# $1 = flag name
# $2 = flag value
# Return:
# An appropriate --clear-x flag (where x is instances, group-labels, or zones)
# if the flag value is empty, otherwise returns the appropriate flag
function get_update_flag() {
local flag_name="$1"
local flag_value="$2"
local update_flag=""
if [ -z "$flag_value" ]; then
# flag value is empty
update_flag=" --clear-$flag_name"
fi
echo "$update_flag"
}
# Params:
# $1 = group labels flag name
# $2 = group labels flag value
# $3 = zones flag name
# $4 = zones flag value
# $5 = instances flag name
# $5 = instances flag value
# Return:
# The appropriate --clear-x flags (where x is group-labels, zones, or instances)
# based on grloup labels flag value, zones flag value, and instances flag value
function get_update_flags() {
local group_labels_flag_name="$1"
local group_labels_flag_value="$2"
local zones_flag_name="$3"
local zones_flag_value="$4"
local instances_flag_name="$5"
local instances_flag_value="$6"
local clear_group_labels_flag
local clear_zones_flag
local update_flags
clear_group_labels_flag="$(get_update_flag "$group_labels_flag_name" \
"$group_labels_flag_value")"
clear_zones_flag=$(get_update_flag "$zones_flag_name" "$zones_flag_value")
clear_instances_flag=$(get_update_flag "$instances_flag_name" "$instances_flag_value")
local update_flags="$clear_group_labels_flag$clear_zones_flag$clear_instances_flag"
echo "$update_flags"
}
# Params:
# $1 = project id
# Return:
# The appropriate global flags (--project and --quiet)
function get_global_flags() {
local project_id="$1"
local project_flag_name="project"
local project_flag
project_flag=$(get_flag "$project_flag_name" "$project_id")
local quiet_flag=" --quiet"
local global_flags="$project_flag$quiet_flag"
echo "$global_flags"
}
# Params:
# $1 = action (create or update)
# $2 = policy id
# $3 = description of the agent policy
# $4 = agent rules, in json format
# $5 = group labels, in json format
# $6 = os types, in json format
# $7 = zones, in json format
# $8 = instances, in json format
# Return:
# the appropriate gcloud create or update command, given the args
function get_base_upsert_command() {
local action="$1"
local policy_id="$2"
local description="$3"
local agent_rules_json="$4"
local group_labels_json="$5"
local os_types_json="$6"
local zones_json="$7"
local instances_json="$8"
local description_flag_name="description"
local agent_rules_flag_name="agent-rules"
local group_labels_flag_name="group-labels"
local os_types_flag_name="os-types"
local zones_flag_name="zones"
local instances_flag_name="instances"
local agent_rules_flag_value
local group_labels_flag_value
local os_types_flag_value
local zones_flag_value
local instances_flag_value
agent_rules_flag_value=$(get_formatted_list_of_objects "$agent_rules_json")
group_labels_flag_value=$(get_formatted_list_of_map "$group_labels_json")
os_types_flag_value=$(get_formatted_list_of_objects "$os_types_json")
zones_flag_value=$(get_formatted_list_of_strings "$zones_json")
instances_flag_value=$(get_formatted_list_of_strings "$instances_json")
local description_flag
local agent_rules_flag
local group_labels_flag
local os_types_flag
local zones_flag
local instances_flag
local project_flag
description_flag=$(get_flag "$description_flag_name" "$description")
agent_rules_flag=$(get_flag "$agent_rules_flag_name" "$agent_rules_flag_value")
group_labels_flag=$(get_flag "$group_labels_flag_name" "$group_labels_flag_value")
os_types_flag=$(get_flag "$os_types_flag_name" "$os_types_flag_value")
zones_flag=$(get_flag "$zones_flag_name" "$zones_flag_value")
instances_flag=$(get_flag "$instances_flag_name" "$instances_flag_value")
local update_flags=""
if [ "$action" = "$UPDATE" ]; then
update_flags="$(get_update_flags "$group_labels_flag_name" \
"$group_labels_flag_value" "$zones_flag_name" "$zones_flag_value" \
"$instances_flag_name" "$instances_flag_value")"
fi
local command="gcloud $LAUNCH_STAGE compute instances ops-agents policies $action"
command="$command $policy_id$description_flag$agent_rules_flag$group_labels_flag"
command="$command$os_types_flag$zones_flag$instances_flag$update_flags"
echo "$command"
}
# Params:
# $1 = project id
# $2 = policy id
# $3 = description of the agent policy
# $4 = agent rules, in json format
# $5 = group labels, in json format
# $6 = os types, in json format
# $7 = zones, in json format
# $8 = instances, in json format
# Return:
# the appropriate gcloud create command, given the args
function get_create_command() {
local project_id="$1"
local policy_id="$2"
local description="$3"
local agent_rules_json="$4"
local group_labels_json="$5"
local os_types_json="$6"
local zones_json="$7"
local instances_json="$8"
local base_create_command
local global_flags
base_create_command="$(get_base_upsert_command "$CREATE" \
"$policy_id" "$description" "$agent_rules_json" \
"$group_labels_json" "$os_types_json" "$zones_json" "$instances_json")"
global_flags=$(get_global_flags "$project_id")
local create_command="$base_create_command$global_flags"
echo "$create_command"
}
# Params:
# $1 = project id
# $2 = policy id
# $3 = description of the agent policy
# $4 = agent rules, in json format
# $5 = group labels, in json format
# $6 = os types, in json format
# $7 = zones, in json format
# $8 = instances, in json format
# $9 = etag
# Return:
# the appropriate gcloud update command, given the args
function get_update_command() {
local project_id="$1"
local policy_id="$2"
local description="$3"
local agent_rules_json="$4"
local group_labels_json="$5"
local os_types_json="$6"
local zones_json="$7"
local instances_json="$8"
local etag="$9"
local base_update_command
local etag_flag
local global_flags
base_update_command="$(get_base_upsert_command "$UPDATE" \
"$policy_id" "$description" "$agent_rules_json" \
"$group_labels_json" "$os_types_json" "$zones_json" "$instances_json")"
etag_flag=$(get_flag etag "$etag")
global_flags=$(get_global_flags "$project_id")
local update_command="$base_update_command$etag_flag$global_flags"
echo "$update_command"
}
# Params:
# $1 = project id
# $2 = policy id
# Return:
# the appropriate gcloud describe command, given the args
function get_describe_command() {
local project_id="$1"
local policy_id="$2"
local project_flag_name="project"
local project_flag
project_flag=$(get_flag "$project_flag_name" "$project_id")
local command="gcloud $LAUNCH_STAGE compute instances ops-agents policies describe"
command="$command $policy_id$project_flag --quiet"
echo "$command"
}
# Params:
# $1 = project id
# $2 = policy id
# Return:
# the appropriate gcloud delete command, given the args
function get_delete_command() {
local project_id="$1"
local policy_id="$2"
local project_flag_name="project"
local project_flag
project_flag=$(get_flag "$project_flag_name" "$project_id")
local command="gcloud $LAUNCH_STAGE compute instances ops-agents policies delete"
command="$command $policy_id$project_flag --quiet"
echo "$command"
}
|
import requests
import asyncio
class ProjectManager:
def create_project(self, project, body, **kwargs):
"""Create project with project name
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
"""
async_req = kwargs.get('async_req', False)
if async_req:
# Make an asynchronous HTTP request
async def make_async_request():
# Assume an async HTTP request implementation using aiohttp
async with aiohttp.ClientSession() as session:
async with session.post('http://api.example.com/projects', json=body) as response:
return await response.json()
return asyncio.run(make_async_request())
else:
# Make a synchronous HTTP request
response = requests.post('http://api.example.com/projects', json=body)
return response.json()
|
function readURL(input) {
if (input.files && input.files[0]) {
var reader = new FileReader();
reader.onload = function(e) {
document.getElementById('blah').setAttribute('src', e.target.result);
};
reader.readAsDataURL(input.files[0]);
}
}
|
<gh_stars>100-1000
package example;
import javax.persistence.AttributeConverter;
import javax.persistence.Converter;
import java.time.LocalDate;
import java.time.MonthDay;
import java.sql.Date;
@Converter
public class MonthDayDateAttributeConverter implements AttributeConverter<MonthDay, Date> {
@Override
public Date convertToDatabaseColumn(MonthDay monthDay) {
if (monthDay == null) {
return null;
}
return Date.valueOf(monthDay.atYear(2000));
}
@Override
public MonthDay convertToEntityAttribute(Date date) {
if (date == null) {
return null;
}
LocalDate localDate = date.toLocalDate();
return MonthDay.of(
localDate.getMonth(),
localDate.getDayOfMonth()
);
}
}
|
def generate_squares(perimeter):
squares = []
n = int(perimeter / 4)
for i in range(2, n+1):
for j in range(2, n+1):
if i + j == n:
squares.append((i,j))
return squares
print(generate_squares(16)) # Prints [(2, 7), (3, 6), (4, 5)]
|
#!/bin/bash
set -e
globalTests+=(
utc
cve-2014--shellshock
no-hard-coded-passwords
override-cmd
)
# for "explicit" images, only run tests that are explicitly specified for that image/variant
explicitTests+=(
[:onbuild]=1
[:nanoserver]=1
[:windowsservercore]=1
)
imageTests[:onbuild]+='
override-cmd
'
testAlias+=(
[amazoncorretto]='openjdk'
[adoptopenjdk]='openjdk'
[sapmachine]='openjdk'
[iojs]='node'
[jruby]='ruby'
[pypy]='python'
[ubuntu]='debian'
[ubuntu-debootstrap]='debian'
[mariadb]='mysql'
[percona]='mysql'
[percona:psmdb]='mongo'
[hola-mundo]='hello-world'
[hello-seattle]='hello-world'
)
imageTests+=(
[aerospike]='
'
[busybox]='
'
[cassandra]='
cassandra-basics
'
[celery]='
'
[clojure]='
'
[crate]='
'
[composer]='
composer
'
[convertigo]='
convertigo-hello-world
'
[debian]='
debian-apt-get
'
[docker:dind]='
docker-dind
docker-registry-push-pull
'
[django]='
'
[elasticsearch]='
elasticsearch-basics
'
[elixir]='
elixir-hello-world
'
[erlang]='
erlang-hello-world
'
[fsharp]='
fsharp-hello-world
'
[gcc]='
gcc-c-hello-world
gcc-cpp-hello-world
golang-hello-world
'
[ghost]='
ghost-basics
'
[golang]='
golang-hello-world
'
[haproxy]='
haproxy-basics
'
[haskell]='
haskell-cabal
haskell-stack
haskell-ghci
haskell-runhaskell
'
[haxe]='
haxe-hello-world
haxe-haxelib-install
'
[hylang]='
hylang-sh
hylang-hello-world
'
[jetty]='
jetty-hello-web
'
[julia]='
julia-hello-world
julia-downloads
'
[logstash]='
logstash-basics
'
[memcached]='
memcached-basics
'
[mongo]='
mongo-basics
mongo-auth-basics
mongo-tls-basics
mongo-tls-auth
'
[mono]='
'
[mysql]='
mysql-basics
mysql-initdb
mysql-log-bin
'
[nextcloud]='
nextcloud-cli-mysql
nextcloud-cli-postgres
nextcloud-cli-sqlite
'
[nextcloud:apache]='
nextcloud-apache-run
'
[nextcloud:fpm]='
nextcloud-fpm-run
'
[node]='
node-hello-world
'
[nuxeo]='
nuxeo-conf
nuxeo-basics
'
[openjdk]='
java-hello-world
java-uimanager-font
java-ca-certificates
'
[open-liberty]='
open-liberty-hello-world
'
[percona]='
percona-tokudb
percona-rocksdb
'
[perl]='
perl-hello-world
'
[php]='
php-ext-install
php-hello-world
php-argon2
'
[php:apache]='
php-apache-hello-web
'
[php:fpm]='
php-fpm-hello-web
'
[plone]='
plone-basics
plone-addons
plone-zeoclient
'
[postgres]='
postgres-basics
postgres-initdb
'
[python]='
python-hy
python-imports
python-pip-requests-ssl
python-sqlite3
python-stack-size
'
[rabbitmq]='
rabbitmq-basics
rabbitmq-tls
'
[r-base]='
'
[rails]='
'
[rapidoid]='
rapidoid-hello-world
rapidoid-load-balancer
'
[redis]='
redis-basics
redis-basics-config
redis-basics-persistent
'
[redmine]='
redmine-basics
'
[registry]='
docker-registry-push-pull
'
[rethinkdb]='
'
[ruby]='
ruby-hello-world
ruby-standard-libs
ruby-gems
ruby-bundler
ruby-nonroot
'
[rust]='
rust-hello-world
'
[silverpeas]='
silverpeas-basics
'
[swipl]='
swipl-modules
'
[swift]='
swift-hello-world
'
[tomcat]='
tomcat-hello-world
'
[wordpress:apache]='
wordpress-apache-run
'
[wordpress:fpm]='
wordpress-fpm-run
'
[znc]='
znc-basics
'
[zookeeper]='
zookeeper-basics
'
# example onbuild
# [python:onbuild]='
# py-onbuild
# '
)
globalExcludeTests+=(
# single-binary images
[hello-world_utc]=1
[nats_utc]=1
[nats-streaming_utc]=1
[swarm_utc]=1
[traefik_utc]=1
[hello-world_no-hard-coded-passwords]=1
[nats_no-hard-coded-passwords]=1
[nats-streaming_no-hard-coded-passwords]=1
[swarm_no-hard-coded-passwords]=1
[traefik_no-hard-coded-passwords]=1
# clearlinux has no /etc/password
# https://github.com/docker-library/official-images/pull/1721#issuecomment-234128477
[clearlinux_no-hard-coded-passwords]=1
# alpine/slim openjdk images are headless and so can't do font stuff
[openjdk:alpine_java-uimanager-font]=1
[openjdk:slim_java-uimanager-font]=1
# and adoptopenjdk has opted not to
[adoptopenjdk_java-uimanager-font]=1
# no "native" dependencies
[ruby:alpine_ruby-bundler]=1
[ruby:alpine_ruby-gems]=1
[ruby:slim_ruby-bundler]=1
[ruby:slim_ruby-gems]=1
[percona:psmdb_percona-tokudb]=1
[percona:psmdb_percona-rocksdb]=1
# the Swift slim images are not expected to be able to run the swift-hello-world test because it involves compiling Swift code. The slim images are for running an already built binary.
# https://github.com/docker-library/official-images/pull/6302#issuecomment-512181863
[swift:slim_swift-hello-world]=1
)
|
#!/bin/bash
if [ $# != 2 ]; then
echo "USAGE: ./removesmalls.sh <fasta-file> <threshold>"
exit
fi
filename=$1
threshold=$2
awk -v min="$threshold" 'BEGIN {RS = ">" ; ORS = ""} length($2) >= min {print ">"$0}' $filename
|
struct PuzzleSolver {
moves: Vec<char>,
}
impl PuzzleSolver {
fn new() -> PuzzleSolver {
PuzzleSolver { moves: Vec::new() }
}
fn solve(&mut self, moves: &str) {
self.moves = moves.chars().collect();
// Implement the puzzle-solving logic here (omitted for brevity)
}
fn is_solvable(&self) -> bool {
// Implement the puzzle-solving logic to determine if the puzzle is solvable
// Example: Check if the number of moves is even and return true if solvable, false otherwise
self.moves.len() % 2 == 0
}
fn print_result(&self) {
if self.is_solvable() {
println!("{} {}", self.moves.len() / 2, self.moves.iter().collect::<String>());
} else {
println!("Not solvable");
}
}
}
fn main() {
let mut solver = PuzzleSolver::new();
let moves = "UDLR"; // Example input
solver.solve(moves);
solver.print_result();
}
|
package alvi17.klooni1010.game;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.audio.Sound;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.math.Interpolation;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.utils.Array;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import alvi17.klooni1010.Klooni;
import alvi17.klooni1010.serializer.BinSerializable;
// A holder of pieces that can be drawn on screen.
// Pieces can be picked up from it and dropped on a board.
public class PieceHolder implements BinSerializable {
//region Members
final Rectangle area;
private final Piece[] pieces;
private final Sound pieceDropSound;
private final Sound invalidPieceDropSound;
private final Sound takePiecesSound;
// Count of pieces to be shown
private final int count;
// Currently held piece index (picked by the user)
private int heldPiece;
public boolean enabled;
// Needed after a piece is dropped, so it can go back
private final Rectangle[] originalPositions;
// The size the cells will adopt once picked
private final float pickedCellSize;
// Every piece holder belongs to a specific board
private final Board board;
//endregion
//region Static members
public static final float DRAG_SPEED = 0.5f; // Interpolation value ((pos -> new) / frame)
//endregion
//region Constructor
public PieceHolder(final GameLayout layout, final Board board,
final int pieceCount, final float pickedCellSize) {
this.board = board;
enabled = true;
count = pieceCount;
pieces = new Piece[count];
originalPositions = new Rectangle[count];
pieceDropSound = Gdx.audio.newSound(Gdx.files.internal("sound/piece_drop.mp3"));
invalidPieceDropSound = Gdx.audio.newSound(Gdx.files.internal("sound/invalid_drop.mp3"));
takePiecesSound = Gdx.audio.newSound(Gdx.files.internal("sound/take_pieces.mp3"));
heldPiece = -1;
this.pickedCellSize = pickedCellSize;
area = new Rectangle();
layout.update(this);
// takeMore depends on the layout to be ready
// TODO So, how would pieces handle a layout update?
takeMore();
}
//endregion
//region Private methods
// Determines whether all the pieces have been put (and the "hand" is finished)
private boolean handFinished() {
for (int i = 0; i < count; ++i)
if (pieces[i] != null)
return false;
return true;
}
// Takes a new set of pieces. Should be called when there are no more piece left
private void takeMore() {
for (int i = 0; i < count; ++i)
pieces[i] = Piece.random();
updatePiecesStartLocation();
if (Klooni.soundsEnabled()) {
// Random pitch so it's not always the same sound
takePiecesSound.play(1, MathUtils.random(0.8f, 1.2f), 0);
}
}
private void updatePiecesStartLocation() {
float perPieceWidth = area.width / count;
Piece piece;
for (int i = 0; i < count; ++i) {
piece = pieces[i];
if (piece == null)
continue;
// Set the absolute position on screen and the cells' cellSize
// Also clamp the cell size to be the picked size as maximum, or
// it would be too big in some cases.
piece.pos.set(area.x + i * perPieceWidth, area.y);
piece.cellSize = Math.min(Math.min(
perPieceWidth / piece.cellCols,
area.height / piece.cellRows), pickedCellSize);
// Center the piece on the X and Y axes. For this we see how
// much up we can go, this is, (area.height - piece.height) / 2
Rectangle rectangle = piece.getRectangle();
piece.pos.y += (area.height - rectangle.height) * 0.5f;
piece.pos.x += (perPieceWidth - rectangle.width) * 0.5f;
originalPositions[i] = new Rectangle(
piece.pos.x, piece.pos.y,
piece.cellSize, piece.cellSize);
// Now that we have the original positions, reset the size so it animates and grows
piece.cellSize = 0f;
}
}
//endregion
//region Public methods
// Picks the piece below the finger/mouse, returning true if any was picked
public boolean pickPiece() {
Vector2 mouse = new Vector2(
Gdx.input.getX(),
Gdx.graphics.getHeight() - Gdx.input.getY()); // Y axis is inverted
final float perPieceWidth = area.width / count;
for (int i = 0; i < count; ++i) {
if (pieces[i] != null) {
Rectangle maxPieceArea = new Rectangle(
area.x + i * perPieceWidth, area.y, perPieceWidth, area.height);
if (maxPieceArea.contains(mouse)) {
heldPiece = i;
return true;
}
}
}
heldPiece = -1;
return false;
}
public Array<Piece> getAvailablePieces() {
Array<Piece> result = new Array<Piece>(count);
for (int i = 0; i < count; ++i)
if (pieces[i] != null)
result.add(pieces[i]);
return result;
}
// If no piece is currently being held, the area will be 0
public int calculateHeldPieceArea() {
return heldPiece > -1 ? pieces[heldPiece].calculateArea() : 0;
}
public Vector2 calculateHeldPieceCenter() {
return heldPiece > -1 ? pieces[heldPiece].calculateGravityCenter() : null;
}
// Tries to drop the piece on the given board. As a result, it
// returns one of the following: NO_DROP, NORMAL_DROP, ON_BOARD_DROP
public DropResult dropPiece() {
DropResult result;
if (heldPiece > -1) {
boolean put;
put = enabled && board.putScreenPiece(pieces[heldPiece]);
if (put) {
if (Klooni.soundsEnabled()) {
// The larger the piece size, the smaller the pitch
// Considering 10 cells to be the largest, 1.1 highest pitch, 0.7 lowest
float pitch = 1.104f - pieces[heldPiece].calculateArea() * 0.04f;
pieceDropSound.play(1, pitch, 0);
}
result = new DropResult(calculateHeldPieceArea(), calculateHeldPieceCenter());
pieces[heldPiece] = null;
} else {
if (Klooni.soundsEnabled())
invalidPieceDropSound.play();
result = new DropResult(true);
}
heldPiece = -1;
if (handFinished())
takeMore();
} else
result = new DropResult(false);
return result;
}
// Updates the state of the piece holder (and the held piece)
public void update() {
Piece piece;
if (heldPiece > -1) {
piece = pieces[heldPiece];
Vector2 mouse = new Vector2(
Gdx.input.getX(),
Gdx.graphics.getHeight() - Gdx.input.getY()); // Y axis is inverted
if (Klooni.onDesktop) {
// Center the piece to the mouse
mouse.sub(piece.getRectangle().width * 0.5f, piece.getRectangle().height * 0.5f);
} else {
// Center the new piece position horizontally
// and push it up by it's a cell (arbitrary) vertically, thus
// avoiding to cover it with the finger (issue on Android devices)
mouse.sub(piece.getRectangle().width * 0.5f, -pickedCellSize);
}
if (Klooni.shouldSnapToGrid())
mouse.set(board.snapToGrid(piece, mouse));
piece.pos.lerp(mouse, DRAG_SPEED);
piece.cellSize = Interpolation.linear.apply(piece.cellSize, pickedCellSize, DRAG_SPEED);
}
// Return the pieces to their original position
// TODO This seems somewhat expensive, can't it be done any better?
Rectangle original;
for (int i = 0; i < count; ++i) {
if (i == heldPiece)
continue;
piece = pieces[i];
if (piece == null)
continue;
original = originalPositions[i];
piece.pos.lerp(new Vector2(original.x, original.y), 0.3f);
piece.cellSize = Interpolation.linear.apply(piece.cellSize, original.width, 0.3f);
}
}
public void draw(SpriteBatch batch) {
for (int i = 0; i < count; ++i) {
if (pieces[i] != null) {
pieces[i].draw(batch);
}
}
}
//endregion
//region Serialization
@Override
public void write(DataOutputStream out) throws IOException {
// Piece count, false if piece == null, true + piece if piece != null
out.writeInt(count);
for (int i = 0; i < count; ++i) {
if (pieces[i] == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
pieces[i].write(out);
}
}
}
@Override
public void read(DataInputStream in) throws IOException {
// If the saved piece count does not match the current piece count,
// then an IOException is thrown since the data saved was invalid
final int savedPieceCount = in.readInt();
if (savedPieceCount != count)
throw new IOException("Invalid piece count saved.");
for (int i = 0; i < count; i++)
pieces[i] = in.readBoolean() ? Piece.read(in) : null;
updatePiecesStartLocation();
}
//endregion
//region Sub-classes
public class DropResult {
public final boolean dropped;
public final boolean onBoard;
public final int area;
public final Vector2 pieceCenter;
DropResult(final boolean dropped) {
this.dropped = dropped;
onBoard = false;
area = 0;
pieceCenter = null;
}
DropResult(final int area, final Vector2 pieceCenter) {
dropped = onBoard = true;
this.area = area;
this.pieceCenter = pieceCenter;
}
}
//endregion
}
|
import { Column, Entity, JoinColumn, OneToOne, PrimaryGeneratedColumn } from 'typeorm';
import { Tweeter } from '../twitter/tweeter';
import { DiscordChannelsEntity } from './discord-channels.entity';
@Entity('twitter-streamers')
export class TwitterStreamersEntity {
@PrimaryGeneratedColumn() id: number;
@Column({ type: 'varchar', nullable: false, length: 30 }) handle: string;
@Column({ type: 'int', nullable: false, default: 1 }) rate: number;
@Column({ type: 'int', nullable: false }) discord_channel: number;
@OneToOne(() => DiscordChannelsEntity)
@JoinColumn({
name: 'discord_channel',
referencedColumnName: 'id',
})
channel?: DiscordChannelsEntity;
tweeter: Tweeter | undefined;
}
|
import { action, Action } from 'easy-peasy';
export interface UserChapter {
userId: string | null;
setUserId: Action<UserChapter, string | null>;
}
export const userChapter: UserChapter = {
userId: null,
setUserId: action((state, payload) => {
state.userId = payload;
})
};
|
package com.netcracker.ncstore.config;
import org.springframework.boot.web.servlet.ServletListenerRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.web.session.HttpSessionEventPublisher;
@Configuration
public class SessionConfiguration {
@Bean
public ServletListenerRegistrationBean<HttpSessionEventPublisher> httpSessionEventPublisher() {
return new ServletListenerRegistrationBean<HttpSessionEventPublisher>(new HttpSessionEventPublisher());
}
}
|
public int[,] CountSurroundingWalls(int[,] map)
{
int rows = map.GetLength(0);
int cols = map.GetLength(1);
int[,] wallCount = new int[rows, cols];
for (int i = 0; i < rows; i++)
{
for (int j = 0; j < cols; j++)
{
if (map[i, j] == 0) // Empty space tile
{
int count = 0;
if (i > 0 && map[i - 1, j] == 1) // Check top tile
count++;
if (i < rows - 1 && map[i + 1, j] == 1) // Check bottom tile
count++;
if (j > 0 && map[i, j - 1] == 1) // Check left tile
count++;
if (j < cols - 1 && map[i, j + 1] == 1) // Check right tile
count++;
wallCount[i, j] = count;
}
}
}
return wallCount;
}
|
#!/bin/sh
# {# jinja-parse #}
INSTALL_PREFIX={{INSTALL_PREFIX}}
[ -z "$1" ] && echo "Error: should be run by udhcpc" && exit 1
OPTS_FILE=/var/run/udhcpc_$interface.opts
set_classless_routes()
{
local max=128
local type
while [ -n "$1" -a -n "$2" -a $max -gt 0 ]; do
[ ${1##*/} -eq 32 ] && type=host || type=net
echo "udhcpc: adding route for $type $1 via $2"
route add -$type "$1" gw "$2" dev "$interface"
max=$(($max-1))
shift 2
done
}
print_opts()
{
[ -n "$router" ] && echo "gateway=$router"
[ -n "$timesrv" ] && echo "timesrv=$timesrv"
[ -n "$namesrv" ] && echo "namesrv=$namesrv"
[ -n "$dns" ] && echo "dns=$dns"
[ -n "$logsrv" ] && echo "logsrv=$logsrv"
[ -n "$cookiesrv" ] && echo "cookiesrv=$cookiesrv"
[ -n "$lprsrv" ] && echo "lprsrv=$lprsrv"
[ -n "$hostname" ] && echo "hostname=$hostname"
[ -n "$domain" ] && echo "domain=$domain"
[ -n "$swapsrv" ] && echo "swapsrv=$swapsrv"
[ -n "$ntpsrv" ] && echo "ntpsrv=$ntpsrv"
[ -n "$lease" ] && echo "lease=$lease"
# vendorspec may contain all sorts of binary characters, convert it to base64
[ -n "$vendorspec" ] && echo "vendorspec=$(echo $vendorspec | base64)"
}
setup_interface()
{
echo "udhcpc: ifconfig $interface $ip netmask ${subnet:-255.255.255.0} broadcast ${broadcast:-+}"
ifconfig $interface $ip netmask ${subnet:-255.255.255.0} broadcast ${broadcast:-+}
[ -n "$router" ] && [ "$router" != "0.0.0.0" ] && [ "$router" != "255.255.255.255" ] && {
echo "udhcpc: setting default routers: $router"
local valid_gw=""
for i in $router ; do
route add default gw $i dev $interface
valid_gw="${valid_gw:+$valid_gw|}$i"
done
eval $(route -n | awk '
/^0.0.0.0\W{9}('$valid_gw')\W/ {next}
/^0.0.0.0/ {print "route del -net "$1" gw "$2";"}
')
}
# CIDR STATIC ROUTES (rfc3442)
[ -n "$staticroutes" ] && set_classless_routes $staticroutes
[ -n "$msstaticroutes" ] && set_classless_routes $msstaticroutes
#
# Save the options list
#
print_opts > $OPTS_FILE
}
applied=
case "$1" in
deconfig)
ifconfig "$interface" 0.0.0.0
rm -f "$OPTS_FILE"
;;
renew)
setup_interface update
;;
bound)
setup_interface ifup
;;
esac
# custom scripts
for x in ${INSTALL_PREFIX}/scripts/udhcpc.d/[0-9]*
do
[ ! -x "$x" ] && continue
# Execute custom scripts
"$x" "$1"
done
# user rules
[ -f /etc/udhcpc.user ] && . /etc/udhcpc.user
exit 0
|
// This file is part of BenchExec, a framework for reliable benchmarking:
// https://github.com/sosy-lab/benchexec
//
// SPDX-FileCopyrightText: 2019-2020 <NAME> <https://www.sosy-lab.org>
//
// SPDX-License-Identifier: Apache-2.0
import React from "react";
import { faTrash } from "@fortawesome/free-solid-svg-icons";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import Slider, { createSliderWithTooltip } from "rc-slider";
import "rc-slider/assets/index.css";
import {
without,
pathOr,
emptyStateValue,
getStep,
NumberFormatterBuilder,
} from "../../utils/utils";
const Range = createSliderWithTooltip(Slider.Range);
let debounceHandler = setTimeout(() => {}, 500);
export default class FilterCard extends React.PureComponent {
constructor(props) {
super(props);
const {
values,
min,
max,
type,
number_of_significant_digits: significantDigits,
} = props.filter || { values: [] };
let currentMin = 0;
let currentMax = 0;
if (type === "measure" || type === "number") {
const builder = new NumberFormatterBuilder(significantDigits).build();
currentMin = builder(min);
currentMax = builder(max);
const value = values && values[0];
if (value && value.includes(":")) {
const res = this.handleMinMaxValue(value, significantDigits);
currentMin = res.min;
currentMax = res.max;
}
}
this.state = {
title:
props.availableFilters && props.availableFilters.length
? props.availableFilters[0].title
: "",
values: [],
idx: pathOr(["availableFilters", 0, "idx"], 0, props),
active: true,
selectedDistincts: [],
currentMin,
currentMax,
};
}
sendFilterUpdate(values) {
const { type } = this.props.filter;
if (values.length === 0 && type === "status") {
this.props.onFilterUpdate({
values: [emptyStateValue],
title: this.state.title || this.props.title,
});
} else {
this.props.onFilterUpdate({
values,
title: this.state.title || this.props.title,
});
}
}
componentDidUpdate(prevProps, prevState) {
if (!this.props.filter) {
return;
}
if (
!prevProps.filter ||
prevProps.filter.values !== this.props.filter.values
) {
const {
values,
number_of_significant_digits: significantDigits,
} = this.props.filter;
const [value] = values;
if (value && value.includes(":")) {
const { min, max } = this.handleMinMaxValue(value, significantDigits);
this.setState({ currentMin: min, currentMax: max });
}
}
}
handleMinMaxValue(value, significantDigits) {
const builder = new NumberFormatterBuilder(significantDigits).build();
const { min: propMin, max: propMax } = this.props.filter || {
min: 0,
max: Infinity,
};
const [vMin, vMax] = value.split(":");
return {
min: vMin.trim() !== "" ? builder(vMin) : builder(propMin),
max: vMax.trim() !== "" ? builder(vMax) : builder(propMax),
};
}
render() {
const { filter, editable, availableFilters } = this.props;
const selectRef = React.createRef();
const filterAddSelection = () => (
<>
<span style={{ marginLeft: "12px" }}>Add filter for: </span>
<select
className="filter-selection"
defaultValue="-1"
ref={selectRef}
onChange={({ target: { value: idx } }) => {
if (idx === -1) {
return;
}
this.setState({ idx: -1, active: true });
selectRef.current.value = "-1"; // Reset preselected option to "Column"
this.props.addFilter(idx);
}}
>
<option value="-1" disabled>
Column
</option>
{availableFilters.map(({ idx, display_title }) => (
<option key={idx} value={idx}>
{display_title}
</option>
))}
</select>
</>
);
const makeHeader = (name, editable) => (
<div className="filter-card--header">
{editable ? (
filterAddSelection()
) : (
<>
<h4 className="title">{`${filter.display_title} ${
filter.unit ? "(" + filter.unit + ")" : ""
}`}</h4>
<FontAwesomeIcon
className="delete-button"
icon={faTrash}
onClick={() => {
this.props.removeFilter();
}}
/>
</>
)}
</div>
);
const makeFilterBody = (filter) => {
if (!filter) {
return null;
}
const {
title,
type,
number_of_significant_digits: significantDigits,
categories,
statuses,
values = [],
} = filter;
let { min, max } = filter;
let body;
if (type === "status") {
body = (
<>
Category
<ul className="filter-card--body--list">
{categories.map((category) => {
const ref = React.createRef();
return (
<li key={category}>
<input
type="checkbox"
name={`cat-${category}`}
checked={values.includes(category)}
ref={ref}
onChange={({ target: { checked } }) => {
if (checked) {
const newValues = [...values, category];
this.setState({ values: newValues });
this.sendFilterUpdate(newValues);
} else {
const newValues = without(category, values);
this.setState({ values: newValues });
this.sendFilterUpdate(newValues);
}
}}
/>
<label
htmlFor={`cat-${category}`}
onClick={() => ref.current.click()}
>
{category}
</label>
</li>
);
})}
</ul>
Status
<ul className="filter-card--body--list">
{statuses.map((status) => {
const ref = React.createRef();
return (
<li key={status}>
<input
type="checkbox"
name={`stat-${status}`}
ref={ref}
checked={values.includes(status)}
onChange={({ target: { checked } }) => {
if (checked) {
const newValues = [...values, status];
this.setState({ values: newValues });
this.sendFilterUpdate(newValues);
} else {
const newValues = without(status, values);
this.setState({ values: newValues });
this.sendFilterUpdate(newValues);
}
}}
/>
<label
htmlFor={`stat-${status}`}
onClick={() => ref.current.click()}
>
{status}
</label>
</li>
);
})}
</ul>
</>
);
} else if (type === "text") {
const [value] = values;
body = (
<input
type="text"
name={`text-${title}`}
placeholder="Search for value"
value={value}
onChange={({ target: { value: textValue } }) => {
clearTimeout(debounceHandler);
this.setState({ values: [textValue] });
debounceHandler = setTimeout(() => {
this.sendFilterUpdate([textValue]);
}, 500);
}}
/>
);
} else {
const builder = new NumberFormatterBuilder(significantDigits).build();
min = builder(min);
max = builder(max);
const minStep = getStep(min);
const maxStep = getStep(max);
// get the bigger step by length of string (== smaller step)
const step = minStep.length > maxStep.length ? minStep : maxStep;
//shift the decimal
body = (
<>
<div className="filter-card--range-container">
<b>{min}</b>
<b>{max}</b>
</div>
<Range
min={Number(min)}
max={Number(max)}
step={step}
defaultValue={[Number(min), Number(max)]}
value={[
Number(this.state.currentMin),
Number(this.state.currentMax),
]}
onChange={([nMin, nMax]) => {
this.setState({
currentMin: builder(nMin),
currentMax: builder(nMax),
});
}}
onAfterChange={([nMin, nMax]) => {
const fMin = builder(nMin);
const fMax = builder(nMax);
const stringRepMin = fMin === min ? "" : fMin;
const stringRepMax = fMax === max ? "" : fMax;
this.setState({
currentMin: fMin,
currentMax: fMax,
values: [`${stringRepMin}:${stringRepMax}`],
});
this.sendFilterUpdate([`${stringRepMin}:${stringRepMax}`]);
}}
/>
<div className="filter-card--range-input-fields">
<label
className="range-input-fields--min"
htmlFor={`inp-${title}-min`}
>
minimum
</label>
<label
className="range-input-fields--max"
htmlFor={`inp-${title}-max`}
>
maximum
</label>
<input
type="number"
name={`inp-${title}-min`}
value={this.state.currentMin}
lang="en-US"
step={step}
onChange={({ target: { value } }) => {
const { currentMin, currentMax } = this.state;
if (value > this.state.currentMax) {
const stringRepMin = currentMin === min ? "" : currentMin;
const stringRepMax = value === max ? "" : value;
this.setState({
currentMax: value,
currentMin: this.state.currentMax,
values: [`${stringRepMin}:${stringRepMax}`],
});
} else {
const stringRepMin = value === min ? "" : value;
const stringRepMax = currentMax === max ? "" : currentMax;
this.setState({
currentMin: value,
values: [`${stringRepMin}:${stringRepMax}`],
});
}
}}
/>
<input
type="number"
name={`inp-${title}-max`}
step={step}
lang="en-US"
value={this.state.currentMax}
onChange={({ target: { value } }) => {
const { currentMin, currentMax } = this.state;
if (value < this.state.currentMin) {
const stringRepMin = value === min ? "" : value;
const stringRepMax = currentMax === max ? "" : currentMax;
this.setState({
currentMax: this.state.currentMin,
currentMin: value,
values: [`${stringRepMin}:${stringRepMax}`],
});
} else {
const stringRepMin = currentMin === min ? "" : currentMin;
const stringRepMax = value === max ? "" : value;
this.setState({
currentMax: value,
values: [`${stringRepMin}:${stringRepMax}`],
});
}
}}
/>
</div>
</>
);
}
return <div className="filter-card--body">{body}</div>;
};
return (
<div className="filter-card">
{makeHeader(this.props.name, editable)}
{makeFilterBody(this.props.filter)}
</div>
);
}
}
|
/*
==============================================================================
This file was auto-generated!
It contains the basic framework code for a JUCE plugin processor.
==============================================================================
*/
#include "PluginProcessor.h"
#include "PluginEditor.h"
#include "utils/UtilityFunctions.h"
AudioProcessor::BusesProperties TickAudioProcessor::getDefaultLayout()
{
// workaround to Ableton Live 10
if (PluginHostType::getPluginLoadedAs() == AudioProcessor::wrapperType_VST3)
return BusesProperties()
.withInput ("Input", AudioChannelSet::stereo(), true)
.withOutput ("Output", AudioChannelSet::stereo(), true);
return BusesProperties()
#if ! JucePlugin_IsMidiEffect
#if ! JucePlugin_IsSynth
.withInput ("Input", AudioChannelSet::stereo(), true)
#endif
.withOutput ("Output", AudioChannelSet::stereo(), true)
#endif
;
}
//==============================================================================
TickAudioProcessor::TickAudioProcessor()
#ifndef JucePlugin_PreferredChannelConfigurations
: AudioProcessor (getDefaultLayout())
#endif
,
settings (ticks),
parameters (*this, nullptr, Identifier (JucePlugin_Name),
{
std::make_unique<AudioParameterFloat> (IDs::filterCutoff.toString(), // parameter ID
"Filter Cutoff", // parameter name
TickUtils::makeLogarithmicRange<float> (100.0, 20000.0f),
20000.0f,
"Hz",
AudioProcessorParameter::genericParameter,
[=] (int val, int maxLen) {
return String (roundToInt (val)) + "Hz";
}) // default value
})
{
// init samples reading
ticks.clear();
filterCutoff = parameters.getRawParameterValue (IDs::filterCutoff.toString());
// load default preset
setStateInformation (BinaryData::factory_default_preset, BinaryData::factory_default_presetSize);
settings.useHostTransport.setValue (wrapperType != WrapperType::wrapperType_Standalone, nullptr);
lastKnownPosition_.resetToDefault();
settings.isDirty = false;
}
TickAudioProcessor::~TickAudioProcessor()
{
tickState.clear();
ticks.clear();
}
void TickAudioProcessor::setExternalProps (juce::PropertySet* s)
{
static_cast<TickAudioProcessorEditor*> (getActiveEditor())->standaloneProps = s;
}
//==============================================================================
const String TickAudioProcessor::getName() const
{
return JucePlugin_Name;
}
bool TickAudioProcessor::acceptsMidi() const
{
#if JucePlugin_WantsMidiInput
return true;
#else
return false;
#endif
}
bool TickAudioProcessor::producesMidi() const
{
#if JucePlugin_ProducesMidiOutput
return true;
#else
return false;
#endif
}
bool TickAudioProcessor::isMidiEffect() const
{
#if JucePlugin_IsMidiEffect
return true;
#else
return false;
#endif
}
double TickAudioProcessor::getTailLengthSeconds() const
{
return 2.0;
}
int TickAudioProcessor::getNumPrograms()
{
return 1; // NB: some hosts don't cope very well if you tell them there are 0 programs,
// so this should be at least 1, even if you're not really implementing programs.
}
int TickAudioProcessor::getCurrentProgram()
{
return 0;
}
void TickAudioProcessor::setCurrentProgram (int index)
{
}
const String TickAudioProcessor::getProgramName (int index)
{
return {};
}
void TickAudioProcessor::changeProgramName (int index, const String& newName)
{
}
//==============================================================================
void TickAudioProcessor::prepareToPlay (double sampleRate, int samplesPerBlock)
{
getState().samplerate = sampleRate;
ticks.setSampleRate (sampleRate);
tickState.clear();
}
void TickAudioProcessor::releaseResources()
{
// When playback stops, you can use this as an opportunity to free up any
// spare memory, etc.
}
#ifndef JucePlugin_PreferredChannelConfigurations
bool TickAudioProcessor::isBusesLayoutSupported (const BusesLayout& layouts) const
{
// standalone can assert due to different layouts!
if (wrapperType == wrapperType_Standalone)
return true;
#if JucePlugin_IsMidiEffect
ignoreUnused (layouts);
return true;
#else
// This is the place where you check if the layout is supported.
// In this template code we only support mono or stereo.
if (layouts.getMainOutputChannelSet() != AudioChannelSet::mono()
&& layouts.getMainOutputChannelSet() != AudioChannelSet::stereo())
return false;
// This checks if the input layout matches the output layout
#if ! JucePlugin_IsSynth
if (layouts.getMainOutputChannelSet() != layouts.getMainInputChannelSet())
return false;
#endif
return true;
#endif
}
#endif
bool TickAudioProcessor::isHostSyncSupported()
{
return wrapperType != AudioProcessor::wrapperType_Standalone;
}
void TickAudioProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
ScopedNoDenormals noDenormals;
// if (wrapperType != wrapperType_Standalone)
// lastKnownPosition_.resetToDefault();
buffer.clear();
// standalone mode
if (! isHostSyncSupported() || ! getState().useHostTransport.get())
{
#if JUCE_IOS
AbletonLink::Requests requests;
if (lastKnownPosition_.isPlaying != settings.transport.isPlaying.get())
requests.isPlaying = settings.transport.isPlaying.get();
if (lastKnownPosition_.bpm != settings.transport.bpm.get())
requests.bpm = settings.transport.bpm.get();
#endif
lastKnownPosition_.isPlaying = settings.transport.isPlaying.get();
lastKnownPosition_.timeSigNumerator = settings.transport.numerator.get();
lastKnownPosition_.timeSigDenominator = settings.transport.denumerator.get();
lastKnownPosition_.bpm = settings.transport.bpm.get();
if (lastKnownPosition_.isPlaying && ! tickState.isClear)
{
const double bufInSecs = buffer.getNumSamples() / getSampleRate();
const double iqps = lastKnownPosition_.bpm / 60.0; // quarter-per-second
lastKnownPosition_.ppqPosition += iqps * bufInSecs;
lastKnownPosition_.timeInSamples += buffer.getNumSamples();
lastKnownPosition_.timeInSeconds += bufInSecs;
}
else
{
lastKnownPosition_.ppqPosition = 0.0;
lastKnownPosition_.timeInSamples = 0;
lastKnownPosition_.timeInSeconds = 0;
tickState.clear();
}
#if JUCE_IOS
if (m_link.isLinkConnected())
{
m_link.linkPosition (lastKnownPosition_, requests);
settings.transport.isPlaying.setValue (lastKnownPosition_.isPlaying, nullptr);
}
#endif
}
else if (getPlayHead())
{
getPlayHead()->getCurrentPosition (lastKnownPosition_);
}
// setValue only triggers if value is different
settings.transport.bpm.setValue (lastKnownPosition_.bpm, nullptr);
settings.transport.numerator.setValue (lastKnownPosition_.timeSigNumerator, nullptr);
settings.transport.denumerator.setValue (lastKnownPosition_.timeSigDenominator, nullptr);
if (lastKnownPosition_.isPlaying)
{
if (! ticks.getLock().try_lock())
return;
// calculate where tick starts in samples...
const auto pos = lastKnownPosition_.ppqPosition;
const auto bps = lastKnownPosition_.bpm / 60.0;
const auto bpSmp = getSampleRate() / bps;
const auto ttq = (4.0 / lastKnownPosition_.timeSigDenominator); // tick to quarter
const auto tickAt = ttq / tickMultiplier; // tick every (1.0 = 1/4, 0.5 = 1/8, ...)
const auto tickLengthInSamples = tickAt * bpSmp;
const auto ppqFromBufStart = fmod (pos, tickAt);
const double ppqOffset = tickAt - ppqFromBufStart;
const auto bufStartInSecs = lastKnownPosition_.timeInSeconds;
const auto bufEndInSecs = bufStartInSecs + (buffer.getNumSamples() / getSampleRate());
ppqEndVal = pos + ((bufEndInSecs - bufStartInSecs) * bps);
const auto bufLengthInPPQ = bps * (buffer.getNumSamples() / getSampleRate());
auto ppqToBufEnd = bufLengthInPPQ;
auto ppqPosInBuf = ppqOffset;
auto currentSampleToTick = 0;
// reset tick state
tickState.tickStartPosition = 0;
if (ppqFromBufStart == 0)
{
ppqPosInBuf = 0.0;
}
if (ticks.getNumOfTicks() == 0)
{
tickState.clear();
return;
}
while (ppqToBufEnd > ppqPosInBuf)
{
jassert (ppqToBufEnd >= ppqPosInBuf);
// add tick(s) to current buffer
currentSampleToTick = roundToInt (ppqPosInBuf * bpSmp);
ppqPosInBuf += tickAt; // next sample
tickState.beat = floor (fmod ((pos + ppqPosInBuf) / ttq, lastKnownPosition_.timeSigNumerator)); // + 1;
if (tickState.beat == 0)
tickState.beat = lastKnownPosition_.timeSigNumerator;
const auto& beatAssign = settings.beatAssignments[jlimit (1, TickSettings::kMaxBeatAssignments, tickState.beat) - 1];
const auto tickIdx = jlimit (0, jmax ((int) ticks.getNumOfTicks() - 1, 0), beatAssign.tickIdx.get());
tickState.refer[0] = ticks[tickIdx].getTickAudioBuffer();
tickState.sample.makeCopyOf (AudioSampleBuffer (tickState.refer, 1, ticks[tickIdx].getLengthInSamples()));
tickState.sample.applyGain (ticks[tickIdx].getGain());
// LPF is per beat, less responsive but more optimized
lpfFilter.setCoefficients (IIRCoefficients::makeLowPass (getSampleRate(), filterCutoff->load()));
lpfFilter.processSamples (tickState.sample.getWritePointer (0), tickState.sample.getNumSamples());
if (tickState.currentSample >= 0)
TickUtils::fadeOut (tickState.sample);
// hard-clip if needed
TickUtils::processClip (tickState.sample);
tickState.beatGain = beatAssign.gain.get();
tickState.addTickSample (buffer, currentSampleToTick, tickLengthInSamples);
}
tickState.fillTickSample (buffer);
ticks.getLock().unlock();
}
}
//==============================================================================
bool TickAudioProcessor::hasEditor() const
{
return true;
}
AudioProcessorEditor* TickAudioProcessor::createEditor()
{
return new TickAudioProcessorEditor (*this);
}
//==============================================================================
void TickAudioProcessor::getStateInformation (MemoryBlock& destData)
{
// save
MemoryOutputStream writeStream (destData, false);
settings.cutoffFilter.setValue (filterCutoff->load(), nullptr);
settings.saveToArchive (writeStream, ticks, false, false);
}
void TickAudioProcessor::setStateInformation (const void* data, int sizeInBytes)
{
auto* stream = new MemoryInputStream (data, sizeInBytes, false);
ZipFile archive (stream, true);
settings.loadFromArchive (archive, ticks, false);
auto* cutOff = parameters.getParameter (IDs::filterCutoff);
cutOff->setValueNotifyingHost (cutOff->convertTo0to1 (settings.cutoffFilter.get()));
}
double TickAudioProcessor::getCurrentBeatPos()
{
const auto ttq = (4.0 / lastKnownPosition_.timeSigDenominator); // tick to quarter
auto subDiv = fmod (lastKnownPosition_.ppqPosition, ttq) / ttq;
return tickState.beat + subDiv;
}
void TickAudioProcessor::TickState::addTickSample (AudioBuffer<float>& bufferToFill, int startPos, int length)
{
isClear = false;
currentSample = 0;
tickStartPosition = startPos;
tickLengthInSamples = length;
fillTickSample (bufferToFill);
tickStartPosition = -1;
}
void TickAudioProcessor::TickState::fillTickSample (AudioBuffer<float>& bufferToFill)
{
if (tickStartPosition < 0)
return; // fillTick was consumed
if (currentSample < 0)
return; // not active tick.
auto constrainedLength = jmin (tickLengthInSamples - currentSample, sample.getNumSamples() - currentSample, bufferToFill.getNumSamples() - tickStartPosition);
const auto maxSampleChannelIndex = sample.getNumChannels() - 1;
for (auto ch = 0; ch < bufferToFill.getNumChannels(); ch++)
{
bufferToFill.copyFrom (ch, tickStartPosition, sample, jlimit (0, maxSampleChannelIndex, ch), currentSample, constrainedLength);
}
bufferToFill.applyGain (beatGain);
currentSample += constrainedLength;
if (currentSample == sample.getNumSamples())
{
currentSample = -1; // mark as not valid.
}
}
void TickAudioProcessor::TickState::clear()
{
isClear = true;
currentSample = -1;
tickLengthInSamples = tickStartPosition = beat = 0;
}
//==============================================================================
// This creates new instances of the plugin..
AudioProcessor* JUCE_CALLTYPE createPluginFilter()
{
return new TickAudioProcessor();
}
|
<reponame>a1098832322/JWSysAssistant-2.0<filename>login/src/main/java/com/wishes/assistant/net/LibraryCrawler.java
package com.wishes.assistant.net;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import com.wishes.assistant.Constants;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.FormBody;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
/**
* 是用进行和图书馆相关网络请求的工具类
* Created by 郑龙 on 2018/5/6.
*/
public class LibraryCrawler {
private static OkHttpClient mClient = OKHttpUtils.getInstanceClient();
/**
* 使用账号密码验证码进行登录
*/
public static boolean loginLibrary(String account, String passwd, String captcha) {
RequestBody body = new FormBody.Builder().add("number", account).add("passwd", <PASSWORD>).add("captcha", captcha)
.add("select", "cert_no").add("returnUrl", "").build();
Request request = new Request.Builder()
.addHeader("Accept",
"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8")
.addHeader("Content-Type", "application/x-www-form-urlencoded")
.addHeader("User-Agent",
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3355.4 Safari/537.36")
.post(body).url(Constants.libLoginUrl).build();
try {
Response response = mClient.newCall(request).execute();
if (response.code() == 200) {
String result = response.body().string();
Document document = Jsoup.parse(result);
Elements elements = document.getElementsByTag("strong");
//根据strong内显示文字判断是否登录成功
for (Element element : elements) {
String values = element.text().toString().trim();
if (values.equals("登录我的图书馆")) {
Log.d("Library Login Fail", "登陆失败!");
return false;
}
}
return true;
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return false;
}
/**
* 获取验证码方法
*
* @return
*/
public static void getValidateCode(final Handler handler) {
Request request = new Request.Builder().url(Constants.libCaptchaUrl).build();
mClient.newCall(request).enqueue(new Callback() {
@Override
public void onFailure(Call call, IOException e) {
Log.e("Library getValidateCode", "fail");
Message message = Message.obtain();
message.what = 0;
handler.sendMessage(message);
}
@Override
public void onResponse(Call call, Response response) throws IOException {
InputStream is = response.body().byteStream();
Bitmap code = BitmapFactory.decodeStream(is);
//使用Handler传出图片
Message message = Message.obtain();
message.what = 1;
message.obj = code;
handler.sendMessage(message);
}
});
}
/**
* 获得用户基本信息的方法
*
* @param url
* @return
*/
public static List<String> getUserLibInfo(String url) {
List<String> userInfoList = new ArrayList<>();
// 发起http请求
String result = getter(url, "");
// 解析返回结果
Document document = Jsoup.parse(result);
// 得到div
Element div = document.getElementById("mylib_info");
// 从div中解析出tr
Elements trs = div.select("table").select("tr");
for (int i = 0; i < trs.size(); i++) {
Elements tds = trs.get(i).select("td");
for (int j = 0; j < tds.size(); j++) {
String text = tds.get(j).text();
System.out.println(text);
userInfoList.add(text);
}
}
return userInfoList;
}
/**
* Some get function
* <br>
* <p>
* <p>params eg. <br></p>
* <p>?id=123&pwd=<PASSWORD></p>
*
* @param url
* @param params
* @return
*/
public static String getter(String url, String params) {
Request request = new Request.Builder().url(url + params).get().build();
try {
Response response = mClient.newCall(request).execute();
if (response.isSuccessful()) {
return response.body().string();
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
}
|
#ifndef ACCOUNT_H
#define ACCOUNT_H
#include <string>
#include <vector>
#include "date.h"
using namespace std;
class Loan;
class AccountException : public exception {
public:
AccountException() : message{""} {}
explicit AccountException(const string& msg) : message{msg} {}
explicit AccountException(const char* msg) : message{msg} {}
virtual ~AccountException() {}
virtual const char* what() const noexcept { return message.c_str(); }
private:
string message;
};
class Bank;
class Account {
public:
Account(const string&, const string&, const Date&, Bank*);
Account(const string&, const string&, const Date&, const int64_t&, Bank*);
Account(const string&, const string&, const Date&, const int64_t&, const int&, const bool&, Bank*);
Account(const Account&);
Account(Account&&) noexcept;
~Account();
void changeStatus(const bool&);
void setBalance(const int64_t&);
void setValidationCount(const int&);
void setAccountNumber(const string&);
string getAccountNumber() const;
string getOwnersId() const;
bool getStatus() const;
int64_t getBalance() const;
int getValidationCount() const;
Bank* getBank() const;
Loan* getLoan() const;
void setLoan(Loan*);
Date getCreationDate() const;
Account& operator=(const Account&);
operator string() const;
private:
string _accountNumber;
string _ownersId;
Date _creationDate;
int64_t _balance;
int _validationCount;
bool _activated;
Bank* _bank;
Loan* _loan;
const static int64_t validAmount = 1e6;
Account();
void reset();
};
#endif // ACCOUNT_H
|
#!/bin/sh
python3 -m venv py_env &&
source py_env/bin/activate &&
pip3 install -r requirements.txt &&
deactivate &&
yarn
|
<gh_stars>1-10
package processor.misc.operands;
/**
* Created by lionell on 24.02.16.
*
* @author <NAME>
*/
public enum Operands {
Address, Number
}
|
export type Props = {
children?: JSX.Element
onClickHandler?: () => void
href: string
}
|
module Embulk
module Parser
class ActiveSupportParser
attr_accessor :log_format
attr_accessor :decoder
attr_accessor :current_data_record
# データを取り扱う箱
DataRecord = Struct.new(:pid, :message, :start_at, :end_at)
DataItem = Struct.new(:severity_id, :timestamp, :pid, :severity, :message)
def initialize(log_format_type, decoder)
@log_format = select_format(log_format_type)
@decoder = decoder
@current_data_record = nil
end
def parse(page_builder)
while decoder.nextFile
while line = decoder.poll
line = line.gsub(/\e/, '')
data = line_to_item(line)
next if data.nil?
next if current_data_record.nil? && set_current_data_record(data)
# pidがかわった = 別の処理に行動ログに移った
if current_data_record.pid != data.pid
page_builder.add(current_data_record.values)
set_current_data_record(data)
else
current_data_record.message = current_data_record.message + "<br />" + data.message
current_data_record.end_at = data.timestamp
end
end
end
end
private
def select_format(format_type)
# 実際にはこういう分岐をconfig.ymlから受け取った変数で行ったりする
case format_type
when 'simple'
/\A([A-Z])*, \[([0-9T:.-]*) #([0-9]*)\] ([A-Z\s]*) -- : (.*)/
# when 'detail'
# /\A([A-Z])*, \[([0-9T:.-]*) #([0-9]*)\] ([A-Z\s]*) -- : (.*)/)
end
end
def line_to_item(line)
parsed = line.match(log_format)
return nil if parsed.nil?
severity_id = parsed[1].strip # 重症度という意味らしい
timestamp = Time.new(parsed[2]).to_i # こうしないと渡らない
pid = parsed[3].strip
severity = parsed[4].gsub(/\s/, '')
message = parsed[5].strip
DataItem.new(*[severity_id, timestamp, pid, severity, message])
end
def set_current_data_record(data)
@current_data_record = DataRecord.new(data.pid,
data.message,
data.timestamp,
data.timestamp)
end
end
end
end
|
#!/bin/bash
########################################################################
#
# Linux on Hyper-V and Azure Test Code, ver. 1.0.0
# Copyright (c) Microsoft Corporation
#
# All rights reserved.
# Licensed under the Apache License, Version 2.0 (the ""License"");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
# ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR
# PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT.
#
# See the Apache Version 2.0 License for specific language governing
# permissions and limitations under the License.
#
########################################################################
#
# STOR_Large_Disk_CopyFile.sh
# Description:
# This script will verify if you can copy 5G files on the disk, perform dd, wget, cp, nfs
#
# The test performs the following steps:
# 1. Creates partition
# 2. Creates filesystem
# 3. Performs copy operations by copy locally, wget, copy from nfs
# 4. Unmounts partition
# 5. Deletes partition
#
########################################################################
ICA_TESTRUNNING="TestRunning"
ICA_TESTCOMPLETED="TestCompleted"
ICA_TESTABORTED="TestAborted"
ICA_TESTFAILED="TestFailed"
CONSTANTS_FILE="constants.sh"
function LogMsg()
{
echo `date "+%a %b %d %T %Y"` : ${1} # To add the timestamp to the log file
}
function UpdateSummary()
{
echo $1 >> ~/summary.log
}
function UpdateTestState()
{
echo $1 > ~/state.txt
}
# test dd 5G files, dd one 5G file locally, then copy to /mnt which is mounted to disk
function TestLocalCopyFile()
{
LogMsg "Start to dd file"
echo "start to dd file"
#dd 5G files
dd if=/dev/zero of=/root/data bs=2048 count=2500000
file_size=`ls -l /root/data | awk '{ print $5}' | tr -d '\r'`
LogMsg "Successful dd file as /root/data"
LogMsg "Start to copy file to /mnt"
echo "start to copy file to /mnt"
cp /root/data /mnt
rm -f /root/data
file_size1=`ls -l /mnt/data | awk '{ print $5}' | tr -d '\r'`
echo "file_size after dd=$file_size"
echo "file_size after copyed= $file_size1"
if [[ $file_size1 = $file_size ]]; then
LogMsg "Successful copy file"
LogMsg "Listing directory: ls /mnt/"
ls /mnt/
df -h
rm -rf /mnt/*
LogMsg "Disk test completed for ${driveName}1 with filesystem for copying 5G files ${fs} successfully"
echo "Disk test completed for ${driveName}1 with filesystem for copying 5G files ${fs} successfully" >> ~/summary.log
else
LogMsg "Copying 5G file for ${driveName}1 with filesystem ${fs} failed"
echo "Copying 5G file for ${driveName}1 with filesystem ${fs} failed" >> ~/summary.log
UpdateTestState $ICA_TESTFAILED
exit 80
fi
}
# test wget file, wget one 5G file to /mnt which is mounted to disk
function TestWgetFile()
{
file_basename=`basename $Wget_Path`
wget -O /mnt/$file_basename $Wget_Path
file_size=`curl -sI $Wget_Path | grep Content-Length | awk '{print $2}' | tr -d '\r'`
file_size1=`ls -l /mnt/$file_basename | awk '{ print $5}' | tr -d '\r'`
echo "file_size before wget=$file_size"
echo "file_size after wget=$file_size1"
if [[ $file_size = $file_size1 ]]; then
LogMsg "Drive wget to ${driveName}1 with filesystem ${fs} successfully"
echo "Drive wget to ${driveName}1 with filesystem ${fs} successfully" >> ~/summary.log
else
LogMsg "Drive wget to ${driveName}1 with filesystem ${fs} failed"
echo "Drive wget to ${driveName}1 with filesystem ${fs} failed" >> ~/summary.log
UpdateTestState $ICA_TESTFAILED
exit 80
fi
rm -rf /mnt/*
}
# test copy from nfs path, dd one file to /mnt2 which is mounted to nfs, then copy to /mnt
# which is mounted to disk
function TestNFSCopyFile()
{
if [ ! -d "/mnt_2" ]; then
mkdir /mnt_2
fi
mount -t nfs $NFS_Path /mnt_2
if [ "$?" = "0" ]; then
LogMsg "Mount nfs successfully from $NFS_Path"
# dd file
dd if=/dev/zero of=/mnt_2/data bs=$File_DD_Bs count=$File_DD_Count
sleep 2
LogMsg "Finish dd file in nfs path, start to copy to drive..."
cp /mnt_2/data /mnt/
sleep 2
file_size=`ls -l /mnt_2/data | awk '{ print $5}' | tr -d '\r'`
file_size1=`ls -l /mnt/data | awk '{ print $5}' | tr -d '\r'`
echo "file_size after dd=$file_size"
echo "file_size after copy=$file_size1"
rm -rf /mnt/*
if [ $file_size = $file_size1 ]; then
LogMsg "Drive mount nfs and copy file successfully"
echo "Drive mount nfs and copy file successfully">> ~/summary.log
else
LogMsg "Drive mount nfs and copy file failed"
echo "Drive mount nfs and copy file failed" >> ~/summary.log
UpdateTestState $ICA_TESTFAILED
exit 80
fi
umount /mnt_2
else
LogMsg "Mount nfs ... from $NFS_Path failed"
echo "Mount nfs ... from $NFS_Path failed" >> ~/summary.log
UpdateTestState $ICA_TESTFAILED
exit 80
fi
}
# Format the disk and create a file system, mount and create file on it.
function TestFileSystemCopy()
{
drive=$1
fs=$2
parted -s -- $drive mklabel gpt
parted -s -- $drive mkpart primary 64s -64s
if [ "$?" = "0" ]; then
sleep 5
wipefs -a "${driveName}1"
# IntegrityCheck $driveName
mkfs.$fs ${driveName}1
if [ "$?" = "0" ]; then
LogMsg "mkfs.${fs} ${driveName}1 successful..."
mount ${driveName}1 /mnt
if [ "$?" = "0" ]; then
LogMsg "Drive mounted successfully..."
# step 1: test for local copy file
if [[ $TestLocalCopy = "True" ]]; then
LogMsg "Start to test local copy file"
TestLocalCopyFile
fi
if [[ $TestWget = "True" ]]; then
# step 2: wget 5GB file to disk
LogMsg "Start to test wget file"
TestWgetFile
fi
# step 3: mount nfs file, then copy file to disk
if [[ $TestNFSCopy = "True" ]]; then
LogMsg "Start to test copy file from nfs mout point"
TestNFSCopyFile
fi
df -h
# umount /mnt files
umount /mnt
if [ "$?" = "0" ]; then
LogMsg "Drive unmounted successfully..."
fi
else
LogMsg "Error in mounting drive..."
echo "Drive mount : Failed" >> ~/summary.log
UpdateTestState $ICA_TESTFAILED
fi
else
LogMsg "Error in creating file system ${fs}.."
echo "Creating Filesystem : Failed ${fs}" >> ~/summary.log
UpdateTestState $ICA_TESTFAILED
fi
else
LogMsg "Error in executing parted ${driveName}1 for ${fs}"
echo "Error in executing parted ${driveName}1 for ${fs}" >> ~/summary.log
UpdateTestState $ICA_TESTFAILED
fi
}
# Source the constants file
if [ -e ~/${CONSTANTS_FILE} ]; then
source ~/${CONSTANTS_FILE}
else
msg="Error: in ${CONSTANTS_FILE} file"
LogMsg $msg
echo $msg >> ~/summary.log
UpdateTestState $ICA_TESTABORTED
exit 10
fi
echo "Covers: ${TC_COVERED}" >> ~/summary.log
# Create the state.txt file so ICA knows we are running
UpdateTestState $ICA_TESTRUNNING
# Cleanup any old summary.log files
if [ -e ~/summary.log ]; then
LogMsg "Cleaning up previous copies of summary.log"
rm -rf ~/summary.log
fi
# Make sure the constants.sh file exists
if [ ! -e ./constants.sh ];
then
echo "Cannot find constants.sh file."
UpdateTestState $ICA_TESTABORTED
exit 1
fi
#Check for Testcase count
if [ ! ${TC_COVERED} ]; then
LogMsg "Warning: The TC_COVERED variable is not defined."
echo "Warning: The TC_COVERED variable is not defined." >> ~/summary.log
fi
echo "Covers: ${TC_COVERED}" >> ~/summary.log
# Check for call trace log
dos2unix check_traces.sh
chmod +x check_traces.sh
./check_traces.sh &
# Count the number of SCSI= and IDE= entries in constants
diskCount=0
for entry in $(cat ./constants.sh)
do
# Convert to lower case
lowStr="$(tr '[A-Z]' '[a-z' <<<"$entry")"
# does it start wtih ide or scsi
if [[ $lowStr == ide* ]];
then
diskCount=$((diskCount+1))
fi
if [[ $lowStr == scsi* ]];
then
diskCount=$((diskCount+1))
fi
done
echo "constants disk count= $diskCount"
# Compute the number of sd* drives on the system
for driveName in /dev/sd*[^0-9];
do
# Skip /dev/sda
if [ ${driveName} = "/dev/sda" ]; then
continue
fi
for fs in "${fileSystems[@]}"; do
LogMsg "Start testing filesystem: $fs"
StartTst=$(date +%s.%N)
command -v mkfs.$fs
if [ $? -ne 0 ]; then
echo "File-system tools for $fs not present. Skipping filesystem $fs.">> ~/summary.log
LogMsg "File-system tools for $fs not present. Skipping filesystem $fs."
else
TestFileSystemCopy $driveName $fs
EndTst=$(date +%s.%N)
DiffTst=$(echo "$EndTst - $StartTst" | bc)
LogMsg "End testing filesystem: $fs; Test duration: $DiffTst seconds."
fi
done
done
UpdateTestState $ICA_TESTCOMPLETED
exit 0
|
<filename>frontend/src/routes/Deployer/DeployPage/DeployConsole/SignerManager/SignerManager.tsx
import { UserOutlined, UserSwitchOutlined } from '@ant-design/icons';
import { useQuery } from '@apollo/client';
import { BeaconWallet } from '@taquito/beacon-wallet';
import { TezosToolkit } from '@taquito/taquito';
import { importKey, InMemorySigner } from '@taquito/signer';
import { Button } from 'antd';
import React, { useCallback, useEffect } from 'react';
import { useTranslation } from 'react-i18next';
import { Endpoint, GET_ENDPOINT } from '../../../../../graphql/endpoint';
import { useDeployState } from '../../../state';
import "./SignerManager.css";
import { SignerCard } from './SignerCard';
export const SignerManager: React.FC = () => {
const { t } = useTranslation();
const { data, loading, error } = useQuery<{ endpoint: Endpoint }>(GET_ENDPOINT);
const [Tezos, setTezosToolkit] = useDeployState('Tezos');
const [wallet, setWallet] = useDeployState('wallet');
const [signer, setSigner] = useDeployState('signer');
const [signWith, setSignedWith] = useDeployState('signWith');
const [opHash] = useDeployState('operationHash');
useEffect(() => {
if (Tezos) {
return;
}
if (!data || loading || error) {
return;
}
setTezosToolkit(new TezosToolkit(data.endpoint.url));
}, [Tezos, data, loading, error, setTezosToolkit]);
const setupWallet = useCallback(() => {
if (wallet) {
return;
}
const w = new BeaconWallet({
name: t('deployer.signer.wallet.name'),
disclaimerText: t('deployer.signer.wallet.disclaimerText')
});
setSignedWith('wallet');
setWallet(w);
}, [wallet, setWallet, setSignedWith, t]);
useEffect(() => {
if (wallet || signer) {
return;
}
if (!Tezos) {
return;
}
if (!data || loading || error) {
return;
}
const endpoint = data.endpoint;
if (endpoint.scope === 'mainnet' || (endpoint.scope === 'testnet' && !endpoint.faucet && !endpoint.signerPrivateKey)) {
setupWallet();
} else {
if (endpoint.faucet) {
const { email, password, mnemonic, secret } = endpoint.faucet;
// Mnemonic gets always as string from backend
importKey(Tezos, email, password, mnemonic as string, secret);
setSigner(email);
setSignedWith('faucet');
} else if (endpoint.signerPrivateKey) {
const pk = endpoint.signerPrivateKey;
Tezos.setProvider({
signer: new InMemorySigner(pk)
});
setSigner(`${pk.substring(0, 3)}...${pk.substring(pk.length - 4)}`);
setSignedWith('privateKey');
}
}
}, [t, wallet, signer, Tezos, data, loading, error, setupWallet, setSigner, setSignedWith]);
const handleSetupSigner = useCallback(async () => {
if (!Tezos || !wallet || !data) {
return;
}
await wallet.requestPermissions({
network: { type: data.endpoint.protocolVersion },
});
const address = await wallet.getPKH();
// Tezos.setWalletProvider(wallet);
Tezos.setProvider({ wallet });
setSigner(address);
}, [Tezos, wallet, data, setSigner]);
if (loading) {
return <>{t('loadingEndpointSettings')}</>;
}
if (error) {
return <>
<h1>{t('endpointSettingsError')}</h1>
<code>{JSON.stringify(error, null, 2)}</code>
</>;
}
return (
<div className="signer-manager">
<SignerCard signer={signer} signWith={signWith} />
{ (signWith === 'wallet' && !opHash) &&
<div className="call-to-action">
{ !signer
? <Button
type="primary"
size="large"
onClick={handleSetupSigner}
disabled={!!signer}
loading={loading || !!error || !Tezos}
icon={<UserOutlined />}
>{t('deployer.chooseSigner')}</Button>
: <Button
type="default"
size="middle"
onClick={handleSetupSigner}
loading={loading || !!error || !Tezos}
icon={<UserSwitchOutlined />}
>{t('deployer.changeSigner')}</Button>
}
</div>
}
</div>
);
}
|
<filename>src/main/java/top/luozhou/classpath/impl/DirClassEntry.java
package top.luozhou.classpath.impl;
import top.luozhou.classpath.ClassEntry;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
/**
* @description: 文件夹形式class入口
* @author: luozhou <EMAIL>
* @create: 2020-02-16 22:14
**/
public class DirClassEntry implements ClassEntry {
private Path absPath;
public DirClassEntry(String path) {
absPath = Paths.get(path).toAbsolutePath();
}
@Override
public byte[] readClass(String className) throws IOException {
return Files.readAllBytes(absPath.resolve(className));
}
}
|
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { CrudService } from '@app/core';
import { Device } from '../entities';
import { OmitType } from '@nestjs/swagger';
import { ServiceName } from '../enums';
export class NewDevice extends OmitType(Device, [
'id',
'authId',
'service',
'ip',
]) {}
@Injectable()
export class DevicesService extends CrudService<Device> {
constructor(
@InjectRepository(Device)
repo: Repository<Device>,
) {
super(repo);
}
async updateDevice(
data: NewDevice,
service: ServiceName,
ip?: string,
authId?: number,
): Promise<Device> {
let device = await this.findOneBy('udid', data.udid);
if (device) {
device = this.repo.merge(device, data);
} else {
device = this.create(data);
}
device.service = service;
if (authId) {
device.authId = authId;
device.authedAt = new Date();
}
if (ip) {
device.ip = ip;
}
return this.save(device);
}
}
|
import {Injectable} from '@angular/core';
import {HttpClient, HttpHeaders} from '@angular/common/http';
import {environment} from '../../environments/environment';
import {Connection} from '../util/connection';
import {Project} from '../db/entities/project';
import {Observable} from 'rxjs';
import {map} from 'rxjs/operators';
import {Converter} from '../util/Converter';
import {SpinnerOverlayService} from './spinner-overlay.service';
@Injectable()
export class ProjectService {
public isWaitingForNextFetch = false;
private readonly currentConnectionURL = null;
constructor(private http: HttpClient, private spinnerService: SpinnerOverlayService) {
console.log('ProjectService constructor is called');
if (environment.production) {
this.currentConnectionURL = Connection.prodURLConnection;
} else {
this.currentConnectionURL = Connection.localURLConnection;
}
}
/**
* returns the loaded projects of the server
*
* @return array of Project
*/
public fetchData(): Observable<Project[]> {
const httpOptions = {
headers: new HttpHeaders({
'Content-Type': 'application/json'
})
};
try {
return this.http.get<Project[]>(this.currentConnectionURL + '/project', httpOptions).pipe(
map((data: Project[]) => data.map(res => {
return new Project(res.orderNumber, res.projectDescription, res.start,
res.end, res.reminder, res.startReminder, res.endReminder, Converter.convertToNormalUserName(res.responsiblePersonName));
})));
} finally {
this.spinnerService.hide();
this.isWaitingForNextFetch = false;
}
}
public create(project: Project) {
this.http.post<Project>(this.currentConnectionURL + '/project', project).subscribe(res => {
});
this.isWaitingForNextFetch = true;
this.spinnerService.show();
}
public update(project: Project) {
debugger;
this.http.put<Project>(this.currentConnectionURL + '/project', project).subscribe(res => {
});
this.isWaitingForNextFetch = true;
this.spinnerService.show();
}
public delete(orderNumber: number): Observable<Project> {
return this.http.delete<Project>(this.currentConnectionURL + '/project/' + orderNumber);
}
}
|
package nl.dulsoft.demo.schedulingjobs;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest
public class SchedulingJobsApplicationTests {
private static final Logger LOGGER = LoggerFactory.getLogger(SchedulingJobsApplicationTests.class);
@Test
public void contextLoads() throws InterruptedException {
// Sleep for 2 minutes allowing the job to be executed twice
// Thread.sleep(60 * 1000 * 2);
}
}
|
#!/bin/bash
set_current() {
current="$1"
current_replacement=$(sed 's/[/&]/\\&/g' <<< "$1")
sed -i "s/\(^current[^\"]*\"\)[^\"]*/\1$current_replacement/" $0
}
back() {
[[ $current =~ / ]] && current="${current%/*}" || current=''
set_current "$current"
}
notify_on_finish() {
while kill -0 $pid 2> /dev/null; do
sleep 1
done && ~/.orw/scripts/notify.sh "Music library updated."
}
current=""
if [[ -z $@ ]]; then
set_current ''
else
case "$@" in
back) back;;
update)
coproc (mpc -q update &)
pid=$((COPROC_PID + 1))
coproc (notify_on_finish &);;
refresh);;
add_all)
mpc add "$current"
back;;
*.mp3)
[[ $current ]] && current+='/'
mpc add "$current${@// /\ }";;
*)
file="${@// /\ }"
[[ $current ]] && current+="/$file" || current="$file"
set_current "$current";;
esac
fi
[[ $current ]] && echo -e 'back'
echo -e 'update\nrefresh\nadd_all\n━━━━━━━'
mpc ls "$current" | awk -F '/' '! /m3u$/ { print $NF }'
|
import 'directives/queryBuilder/queryBuilder.directive';
declare var CPALS: any;
declare var autosize: any;
declare var document: any;
let moduleName = CPALS.modules.directives.MAIN,
directiveName = 'queryBuilder',
tpl = require('directives/queryBuilder/queryBuilder.html');
describe(moduleName + '.' + directiveName, () => {
let $scope = null,
element = null,
orchestrator = {
getQuery: jasmine.createSpy("getQuery").and.returnValue([])
},
field = '',
QueryBuilderController = {
setNextState: jasmine.createSpy("setNextState"),
filterText: jasmine.createSpy("filterText").and.returnValue(field),
deleteAllFragments: jasmine.createSpy("deleteAllFragments"),
setPasteEvent: jasmine.createSpy("setPasteEvent"),
setTypedQuery: jasmine.createSpy("setTypedQuery"),
setDeleteStatement: jasmine.createSpy("setDeleteStatement"),
keyValidations: jasmine.createSpy("keyValidations"),
setCursorPositionWhenRange: function () { },
setStatusValidity: jasmine.createSpy("setStatusValidity"),
instanceOrchestrator: jasmine.createSpy("instanceOrchestrator"),
orchestrator: jasmine.createSpy("orchestrator").and.returnValue(orchestrator),
selected: ''
},
$httpBackend = null,
directiveHtml = '<query-builder query-value="queryValue"></query-builder>';
beforeEach(angular.mock.module(
moduleName, ($controllerProvider) => {
$controllerProvider.register('QueryBuilderController', () => {
return QueryBuilderController;
});
}));
beforeEach(angular.mock.inject(($compile, $rootScope, _$httpBackend_) => {
function onNewSearch () {
return true;
}
$scope = $rootScope.$new();
$scope.onNewSearch = onNewSearch;
$scope.queryValue = "";
$httpBackend = _$httpBackend_;
$httpBackend.whenGET('scripts/app/directives/queryBuilder/queryBuilder.html').respond(200, tpl);
element = $compile(directiveHtml)($scope);
document.body.appendChild(element[0]);
$scope.$digest();
$httpBackend.flush();
}));
it('Should compile the directive' + directiveHtml, () => {
expect(element.html()).not.toBe('');
});
it("Should setNextState method have to been colled when search Button is pressed", () => {
var searchButton,
selectionStart = 0;
searchButton = angular.element(element[0].querySelector("#glassSearch"));
searchButton.trigger('click');
expect(QueryBuilderController.setNextState).toHaveBeenCalledWith(field, selectionStart, true);
$scope.$apply();
});
it('should detect whether the input has value' + directiveHtml, () => {
expect(element.isolateScope().isInputEmpty()).toBe(true);
element.find('textarea').val('somenthing');
expect(element.isolateScope().isInputEmpty()).toBe(false);
});
it('Should setPasteEvent method have been called when keydown on input tag', () => {
var input = element.find('textarea');
var event = document.createEvent('KeyboardEvent');
event.initEvent("keydown", true, true);
input[0].dispatchEvent(event);
expect(QueryBuilderController.setTypedQuery).toHaveBeenCalled();
});
xit('Should call keyValidations method when keyup on input', () => {
var input = element.find('textarea');
var event = document.createEvent('KeyboardEvent');
spyOn(QueryBuilderController, 'setCursorPositionWhenRange');
event.initEvent("keyup", true, true);
input[0].dispatchEvent(event);
expect(QueryBuilderController.keyValidations).toHaveBeenCalled();
});
xit('Should set selection range when setCursorPositionWhenRange is true', () => {
var input = element.find('textarea'),
initalValue = "somenthing",
event = document.createEvent('KeyboardEvent');
spyOn(QueryBuilderController, 'setCursorPositionWhenRange').and.returnValue(true);
event.initEvent("keyup", true, true);
input.val(initalValue);
input[0].dispatchEvent(event);
expect(QueryBuilderController.setCursorPositionWhenRange).toHaveBeenCalled();
expect(input.val()).toBe(initalValue + "''");
});
it('Should call instanceOrchestrator method when is focus on input', () => {
var input = element.find('textarea');
var event = document.createEvent('KeyboardEvent');
event.initEvent("focus", true, true);
input[0].dispatchEvent(event);
expect(QueryBuilderController.instanceOrchestrator).toHaveBeenCalled();
});
it('Should modify the input value on model change, when selected exist', () => {
var selectedItem = 'MyItem';
QueryBuilderController.selected = selectedItem;
$scope.model = "AnyValue";
$scope.$digest();
$scope.model = "OtherValue";
$scope.$digest();
expect(element.find('textarea').val()).toBe(selectedItem);
});
it('Should clear the input value on model change, when selected doesn´t exist', () => {
var selectedItem = null;
QueryBuilderController.selected = selectedItem;
$scope.model = "AnyValue";
$scope.$digest();
$scope.model = "OtherValue";
$scope.$digest();
expect(element.find('textarea').val()).toBe('');
});
it('Should autosize update method have been called when keydown on input tag', () => {
var input = element.find('textarea');
var event = document.createEvent('KeyboardEvent');
spyOn(autosize, 'update');
event.initEvent("keydown", true, true);
input[0].dispatchEvent(event);
expect(autosize.update).toHaveBeenCalled();
});
it('Should update status when keyup is fired', () => {
var input = element.find('textarea');
var event = document.createEvent('KeyboardEvent');
event.initEvent("keyup", true, true);
input[0].dispatchEvent(event);
expect(QueryBuilderController.keyValidations).toHaveBeenCalled();
});
it("Should delete All fragments when new search is pressed", () => {
var newSearchLink,
selectionStart = 0;
newSearchLink = element.find("a.new-search");
var event = document.createEvent('MouseEvents');
spyOn(autosize, 'update');
event.initMouseEvent('click', true, true, window, 0, 0, 0, 0, 0, false, false, false, false, 0, null);
newSearchLink[0].dispatchEvent(event);
expect(QueryBuilderController.deleteAllFragments).toHaveBeenCalled();
expect(autosize.update).toHaveBeenCalled();
});
});
|
TERMUX_PKG_HOMEPAGE=https://xorg.freedesktop.org/
TERMUX_PKG_DESCRIPTION="X.org 75dpi fonts"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_MAINTAINER="Leonid Pliushch <leonid.pliushch@gmail.com>"
TERMUX_PKG_VERSION=1.0.3
TERMUX_PKG_REVISION=24
TERMUX_PKG_SRCURL=("https://xorg.freedesktop.org/releases/individual/font/font-adobe-75dpi-1.0.3.tar.bz2"
"https://xorg.freedesktop.org/releases/individual/font/font-adobe-utopia-75dpi-1.0.4.tar.bz2"
"https://xorg.freedesktop.org/releases/individual/font/font-bh-75dpi-1.0.3.tar.bz2"
"https://xorg.freedesktop.org/releases/individual/font/font-bh-lucidatypewriter-75dpi-1.0.3.tar.bz2"
"https://xorg.freedesktop.org/releases/individual/font/font-bitstream-75dpi-1.0.3.tar.bz2")
TERMUX_PKG_SHA256=("c6024a1e4a1e65f413f994dd08b734efd393ce0a502eb465deb77b9a36db4d09"
"8732719c61f3661c8bad63804ebfd54fc7de21ab848e9a26a19b1778ef8b5c94"
"3486aa51ac92c646a448fe899c5c3dae0024b1fef724d5100d52640d1cac721c"
"4ac16afbe205480cc5572e2977ea63488c543d05be0ea8e5a94c845a6eebcb31"
"ba3f5e4610c07bd5859881660753ec6d75d179f26fc967aa776dbb3d5d5cf48e")
TERMUX_PKG_DEPENDS="fontconfig-utils, xorg-font-util, xorg-fonts-alias, xorg-fonts-encodings, xorg-mkfontscale"
TERMUX_PKG_CONFLICTS="xorg-fonts-lite"
TERMUX_PKG_PLATFORM_INDEPENDENT=true
TERMUX_PKG_BUILD_IN_SRC=true
termux_step_get_source() {
mkdir -p "$TERMUX_PKG_SRCDIR"
local i
for i in {0..4}; do
termux_download "${TERMUX_PKG_SRCURL[i]}" "$(basename "${TERMUX_PKG_SRCURL[i]}")" "${TERMUX_PKG_SHA256[i]}"
tar xf "$(basename "${TERMUX_PKG_SRCURL[i]}")" -C "${TERMUX_PKG_SRCDIR}"
done
}
termux_step_make_install() {
local i
for i in {0..4}; do
local file=$(basename "${TERMUX_PKG_SRCURL[i]}")
local dir="${TERMUX_PKG_SRCDIR}/${file%%.tar.bz2}"
pushd "${dir}"
./configure \
--prefix="${TERMUX_PREFIX}" \
--host="${TERMUX_HOST_PLATFORM}" \
--with-fontdir="${TERMUX_PREFIX}/share/fonts/75dpi"
make -j "${TERMUX_MAKE_PROCESSES}"
make install
popd
done
}
termux_step_post_make_install() {
rm -f "${TERMUX_PREFIX}"/share/fonts/75dpi/fonts.*
}
termux_step_install_license() {
install -Dm600 -t $TERMUX_PREFIX/share/doc/xorg-fonts-75dpi $TERMUX_PKG_BUILDER_DIR/COPYING
}
|
import sklearn
import numpy as np
# load the classifier
clf = sklearn.svm.SVC()
# load the pre-trained classifier
with open('classifier.pkl', 'rb') as f:
clf = pickle.load(f)
def classify_text(texts):
# compile the inputs into a single array
data = np.array(texts)
# predict the labels
labels = clf.predict(data)
return labels
labels = classify_text([
"This article discusses politics in the United States.",
"This article discusses the history of the British Empire.",
"This article discusses the effects of climate change."
])
print(labels) # -> ['Politics', 'History', 'Climate Change']
|
<reponame>Kristopher38/LuaCPU
/******************************************************************************
* *
* License Agreement *
* *
* Copyright (c) 2009 Altera Corporation, San Jose, California, USA. *
* All rights reserved. *
* *
* Permission is hereby granted, free of charge, to any person obtaining a *
* copy of this software and associated documentation files (the "Software"), *
* to deal in the Software without restriction, including without limitation *
* the rights to use, copy, modify, merge, publish, distribute, sublicense, *
* and/or sell copies of the Software, and to permit persons to whom the *
* Software is furnished to do so, subject to the following conditions: *
* *
* The above copyright notice and this permission notice shall be included in *
* all copies or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR *
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *
* DEALINGS IN THE SOFTWARE. *
* *
* This agreement shall be governed in all respects by the laws of the State *
* of California and by the laws of the United States of America. *
* *
* Altera does not recommend, suggest or require that this reference design *
* file be used in conjunction or combination with any other product. *
******************************************************************************/
#include <errno.h>
#include "system.h"
/*
* Provides an interrupt registry mechanism for the any CPUs internal interrupt
* controller (IIC) when the enhanced interrupt API is active.
*/
#ifndef ALT_CPU_EIC_PRESENT
#ifdef ALT_ENHANCED_INTERRUPT_API_PRESENT
#include "alt_types.h"
#include "sys/alt_irq.h"
#include "priv/alt_iic_isr_register.h"
/*
* The header, alt_irq_entry.h, contains the exception entry point, and is
* provided by the processor component. It is included here, so that the code
* will be added to the executable only if alt_irq_register() is present, i.e.
* if no interrupts are registered - there's no need to provide any
* interrupt handling.
*/
#include "sys/alt_irq_entry.h"
/*
* The header, alt_irq_table.h contains a table describing which function
* handles each interrupt.
*/
#include "priv/alt_irq_table.h"
/** @Function Description: This function registers an interrupt handler.
* If the function is succesful, then the requested interrupt will be enabled
* upon return. Registering a NULL handler will disable the interrupt.
*
* @API Type: External
* @param ic_id Interrupt controller ID
* @param irq IRQ ID number
* @param isr Pointer to interrupt service routine
* @param isr_context Opaque pointer passed to ISR
* @param flags
* @return 0 if successful, else error (-1)
*/
int alt_iic_isr_register(alt_u32 ic_id, alt_u32 irq, alt_isr_func isr,
void *isr_context, void *flags)
{
int rc = -EINVAL;
int id = irq; /* IRQ interpreted as the interrupt ID. */
alt_irq_context status;
if (id < ALT_NIRQ)
{
/*
* interrupts are disabled while the handler tables are updated to ensure
* that an interrupt doesn't occur while the tables are in an inconsistant
* state.
*/
status = alt_irq_disable_all();
alt_irq[id].handler = isr;
alt_irq[id].context = isr_context;
rc = (isr) ? alt_ic_irq_enable(ic_id, id) : alt_ic_irq_disable(ic_id, id);
alt_irq_enable_all(status);
}
return rc;
}
#endif /* ALT_ENHANCED_INTERRUPT_API_PRESENT */
#endif /* ALT_CPU_EIC_PRESENT */
|
import java.util.*;
public class APISessionManager {
private Logger logger;
private Set<APISession> sessions;
private Map<String, List<APISession>> chatTakers;
private Map<String, List<APISession>> snitchTakers;
public APISessionManager() {
logger = new Logger(); // Initialize the logger
sessions = new HashSet<>();
chatTakers = new HashMap<>();
snitchTakers = new HashMap<>();
}
public void addSession(APISession session) {
sessions.add(session); // Add session to the set of active sessions
if (session.getType() == SessionType.CHAT) {
chatTakers.computeIfAbsent(session.getChatRoomId(), k -> new ArrayList<>()).add(session);
} else if (session.getType() == SessionType.SNITCH) {
snitchTakers.computeIfAbsent(session.getUserId(), k -> new ArrayList<>()).add(session);
}
logger.log("Session added: " + session.getId());
}
public void removeSession(APISession session) {
sessions.remove(session); // Remove session from the set of active sessions
if (session.getType() == SessionType.CHAT) {
List<APISession> chatSessions = chatTakers.get(session.getChatRoomId());
if (chatSessions != null) {
chatSessions.remove(session);
if (chatSessions.isEmpty()) {
chatTakers.remove(session.getChatRoomId());
}
}
} else if (session.getType() == SessionType.SNITCH) {
List<APISession> snitchSessions = snitchTakers.get(session.getUserId());
if (snitchSessions != null) {
snitchSessions.remove(session);
if (snitchSessions.isEmpty()) {
snitchTakers.remove(session.getUserId());
}
}
}
logger.log("Session removed: " + session.getId());
}
public List<APISession> getSessionsByChatRoom(String chatRoomId) {
return chatTakers.getOrDefault(chatRoomId, Collections.emptyList());
}
public List<APISession> getSessionsByUser(String userId) {
return snitchTakers.getOrDefault(userId, Collections.emptyList());
}
}
|
<reponame>mohamedkhairy/dhis2-android-sdk
/*
* Copyright (c) 2004-2021, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.core.data.attribute;
import static org.hisp.dhis.android.core.data.utils.FillPropertiesTestUtils.fillNameableProperties;
import org.hisp.dhis.android.core.attribute.Attribute;
import org.hisp.dhis.android.core.common.ValueType;
public class AttributeSamples {
public static Attribute getAttribute() {
Attribute.Builder attributeBuilder = Attribute.builder();
fillNameableProperties(attributeBuilder);
attributeBuilder
.id(1L)
.valueType(ValueType.TEXT)
.mandatory(false)
.unique(false)
.indicatorAttribute(false)
.indicatorGroupAttribute(false)
.userGroupAttribute(false)
.dataElementAttribute(false)
.constantAttribute(false)
.categoryOptionAttribute(false)
.optionSetAttribute(false)
.sqlViewAttribute(false)
.legendSetAttribute(false)
.trackedEntityAttributeAttribute(false)
.organisationUnitAttribute(false)
.dataSetAttribute(false)
.documentAttribute(false)
.validationRuleGroupAttribute(false)
.dataElementGroupAttribute(false)
.sectionAttribute(false)
.trackedEntityTypeAttribute(false)
.userAttribute(false)
.categoryOptionGroupAttribute(false)
.programStageAttribute(true)
.programAttribute(false)
.categoryAttribute(false)
.categoryOptionComboAttribute(false)
.categoryOptionGroupSetAttribute(false)
.validationRuleAttribute(false)
.programIndicatorAttribute(false)
.organisationUnitGroupAttribute(false)
.dataElementGroupSetAttribute(false)
.organisationUnitGroupSetAttribute(false)
.optionAttribute(false);
return attributeBuilder.build();
}
}
|
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import block from 'bem-cn';
import { bind } from 'decko';
import './Menu.scss';
type EntryContent = JSX.Element | string;
interface IEntry {
content: EntryContent;
onClick(): void;
}
interface IDisabledEntry {
content: EntryContent;
renderDisabledEntry(content: EntryContent): EntryContent;
}
type Entry = IEntry | IDisabledEntry;
type EntryHeight = 'small' | 'normal' | 'big';
interface IProps {
entriesSections: Entry[][];
scrollableParent?: HTMLElement;
entryHeight?: EntryHeight;
withVerticallyArrangedIcon?: boolean;
menuPosition?: 'left' | 'right';
onToggle?(): void;
}
interface IState {
open: boolean;
listPosition: {
left: number;
top: number;
};
}
const b = block('menu');
class Menu extends React.PureComponent<IProps, IState> {
public state: IState = {
open: false,
listPosition: this.getListPosition(),
};
private icon: HTMLDivElement | null = null;
private list: HTMLDivElement | null = null;
public componentWillMount() {
document.body.addEventListener('click', this.handleDocumentBodyClick);
}
public componentWillUnmount() {
document.body.removeEventListener('click', this.handleDocumentBodyClick);
}
public render() {
const { open } = this.state;
const { children } = this.props;
return (
<div className={b({ open })()}>
<div
className={b('icon', { highlighted: open })()}
onClick={this.handleIconClick}
ref={this.initIcon}
>
{children ? children : this.renderCircles()}
</div>
{ReactDOM.createPortal(this.renderList(), document.body)}
</div>
);
}
private renderList() {
const { open } = this.state;
const { entriesSections, menuPosition = 'right' } = this.props;
return (
<div
className={b('list', { open, position: menuPosition })()}
ref={this.initList}
style={{ ...this.state.listPosition }}
>
{entriesSections.map(this.renderEntriesSection)}
</div>
);
}
@bind
private getListPosition() {
if (this.icon && this.list) {
const { menuPosition } = this.props;
const iconBoundingClientRect = this.icon.getBoundingClientRect();
const iconLeft = iconBoundingClientRect.left;
const iconTop = iconBoundingClientRect.top;
const iconWidth = iconBoundingClientRect.width;
const iconHeight = iconBoundingClientRect.height;
const listWidth = this.list.getBoundingClientRect().width;
const left = menuPosition === 'right' ? iconLeft : iconLeft - (listWidth - iconWidth);
return {
left,
top: iconTop + iconHeight,
};
}
return {
left: 0,
top: 0,
};
}
@bind
private updateListPosition(callback?: () => void) {
const listPosition = this.getListPosition();
this.setState(() => ({ listPosition }), callback);
}
@bind
private renderCircles() {
const { withVerticallyArrangedIcon } = this.props;
return (
<div className={b('circles', { 'vertically-arranged': !!withVerticallyArrangedIcon })()}>
{[1, 2, 3].map((key) => (
<div className={b('circles-item')()} key={key} />
))}
</div>
);
}
@bind
private initIcon(x: HTMLDivElement | null) {
this.icon = x;
}
@bind
private initList(x: HTMLDivElement | null) {
this.list = x;
}
@bind
private handleDocumentBodyClick(event: MouseEvent) {
const { open } = this.state;
if (this.icon) {
if (open && !this.icon.contains(event.target as Node)) {
this.toggleMenu();
}
} else {
console.error('selected option not initialized');
}
}
@bind
private renderEntriesSection(x: Entry[], index: number) {
return (
<section className={b('entries-section')()} key={index}>
{x.map(this.renderEntry)}
</section>
);
}
@bind
private renderEntry(x: Entry, index: number) {
const { entryHeight = 'normal' } = this.props;
return isDisabledEntry(x)
? (
<div className={b('disabled-entry', { height: entryHeight })()} key={index}>
{x.renderDisabledEntry(x.content)}
</div>
)
: (
<div className={b('entry', { height: entryHeight })()} onClick={x.onClick} key={index}>
{x.content}
</div>
);
}
@bind
private handleIconClick(event: React.MouseEvent<HTMLDivElement>) {
this.toggleMenu();
// to avoid event handling in wrappers above, becaues it causes menu closing
// (handleDocumentBodyClick invokes)
event.stopPropagation();
event.preventDefault();
}
@bind
private toggleMenu() {
const { onToggle } = this.props;
if (onToggle) {
onToggle();
}
this.setState(({ open }: IState) => ({ open: !open }), this.toggleScrollListeners);
}
@bind
private toggleScrollListeners() {
const { scrollableParent } = this.props;
const addListeners = () => {
scrollableParent && scrollableParent.addEventListener('scroll', this.handleScroll);
window.addEventListener('scroll', this.handleScroll);
};
if (this.state.open) {
this.updateListPosition(addListeners);
} else {
scrollableParent && scrollableParent.removeEventListener('scroll', this.handleScroll);
window.removeEventListener('scroll', this.handleScroll);
}
}
@bind
private handleScroll() {
const { scrollableParent } = this.props;
if (scrollableParent && this.list) {
const parentBoundingClientRect = scrollableParent.getBoundingClientRect();
const listBoundingClientRect = this.list.getBoundingClientRect();
const parentLeft = parentBoundingClientRect.left;
const parentWidth = parentBoundingClientRect.width;
const listLeft = listBoundingClientRect.left;
const listWidth = listBoundingClientRect.width;
const isParentOverflown = listLeft > parentLeft + parentWidth - listWidth;
if (isParentOverflown) {
this.toggleMenu();
} else {
this.updateListPosition();
}
}
this.updateListPosition();
}
}
function isDisabledEntry(entry: Entry): entry is IDisabledEntry {
return !!(entry as IDisabledEntry).renderDisabledEntry;
}
export { IProps as IMenuProps, Entry as IMenuEntry };
export default Menu;
|
import json
def read_and_write_config(config: dict) -> int:
modified_config = {int(k): v for k, v in config.items()}
with open("config.json", 'w') as f:
json.dump(modified_config, f)
return len(modified_config)
|
#include<iostream>
#include<stack>
#include<string>
using namespace std;
int evaluateExpression(string expression) {
// Stack to store operands
stack <int> st;
// Stack to store operators
stack <char> opst;
// Iterate over the expression
for (int i=0; i<expression.length(); ) {
// Check if the current character is an
// operator or an operand
if (expression[i]=='*'||expression[i]=='/'||expression[i]=='+'||expression[i]=='-') {
// Current character is an operator, push it to the operator stack
opst.push(expression[i]);
i++;
}
else {
// Push the operand to the stack
int _val = 0;
while (i<expression.length()&&expression[i]>='0' && expression[i]<='9') {
_val = (_val*10) + (expression[i]-'0');
i++;
}
st.push(_val);
}
// Perform arithmetic with the appropriate
// operator and operands in the stack
if (!opst.empty()) {
if (opst.top()=='*') {
int val1 = st.top();
st.pop();
int val2 = st.top();
st.pop();
st.push(val2*val1);
opst.pop();
}
else if (opst.top()=='/') {
int val1 = st.top();
st.pop();
int val2 = st.top();
st.pop();
st.push(val2/val1);
opst.pop();
}
else if (opst.top()=='+') {
int val1 = st.top();
st.pop();
int val2 = st.top();
st.pop();
st.push(val2+val1);
opst.pop();
}
else if (opst.top()=='-') {
int val1 = st.top();
st.pop();
int val2 = st.top();
st.pop();
st.push(val2-val1);
opst.pop();
}
}
}
return st.top();
}
int main() {
// Sample expression
string expression = "5*(4+6)";
// Call the function to evaluate the expression
cout << evaluateExpression(expression) << endl;
return 0;
}
Output:
50
|
package com.blog.board.service;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import org.springframework.stereotype.Service;
import com.blog.board.dao.MemberDAO;
import com.blog.board.domain.MemberVO;
@Service("MemberService")
public class MemberServiceImpl implements MemberService {
@Resource(name="MemberDAO")
private MemberDAO memberDAO;
public MemberVO selectLoginForm(MemberVO memberVO)
{
return this.memberDAO.selectLoginForm(memberVO);
}
public MemberVO SelectIdCheck(MemberVO checkidVO)
{
return this.memberDAO.SelectIdCheck(checkidVO);
}
public void InsertSignUpForm(MemberVO signVO)
{
this.memberDAO.InsertSignUpForm(signVO);
}
public MemberVO selectLoginPasswordForm(MemberVO memberPVO)
{
return this.memberDAO.selectLoginPasswordForm(memberPVO);
}
}
|
<reponame>rav3r/flutter
//
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/**
* A protocol for actions that are performed on accessibility elements.
*/
@protocol GREYAction<NSObject>
/**
* Perform the action specified by the GREYAction object on an @c element if and only if the
* @c element matches the constraints of the action.
*
* @param element The element the action is to be performed on. This must not be @c nil.
* @param[out] errorOrNil Error that will be populated on failure. The implementing class should
* handle the behavior when it is @c nil by, for example, logging the error
* or throwing an exception.
*
* @return @c YES if the action succeeded, else @c NO. If an action returns @c NO, it does not
* mean that the action was not performed at all but somewhere during the action execution
* the error occurred and so the UI may be in an unrecoverable state.
*/
- (BOOL)perform:(id)element error:(__strong NSError *_Nullable *)errorOrNil;
/**
* A method to get the name of this action.
*
* @return The name of the action. If the action fails, then the name is printed along with all
* other relevant information.
*/
- (NSString *)name;
@end
NS_ASSUME_NONNULL_END
|
import pprint
import os.path
import json
import time
import ipdb
all_cities_raw = None
with open(os.path.join("datasource", "all_cities_full.json"), "r") as f:
all_cities_raw = list(map(lambda x: json.loads(x), f.readlines()))
# --------------------------------------------------
# Find cities with best matches to the char and highest ratio of salaray / cost
# Input:
# - occupation: string
# - city_char: object structure as defined in datasource/city_char.py
# output:
# - List of top 10 matches, with details as indicated in datasource/city_char.py
# And expected salary and cost
# --------------------------------------------------
def rate_city_single_core(occupation, city_char):
global all_cities_raw
# top 50 cities by characteristics
score_cities_char = map(lambda x: rate_city_char(x, city_char), all_cities_raw)
best_cities_char = sorted(score_cities_char, key=lambda x: x[1], reverse=True)[:500]
job_cities_score = map(lambda x: (x[0], rate_city_job(x[0], occupation) + x[1]), best_cities_char)
best_cities_cost = sorted(job_cities_score, key=lambda x: x[1], reverse=True)
return best_cities_cost
def rampf(x):
return x if x > 0 else 0
def rate_city_char(city_obj, city_char):
# calculate climate score
temp_score = rampf(12.5 - rampf(city_char["temp_low"] - city_obj["temp_low"]) - rampf(city_obj["temp_high"] - city_char["temp_high"]))
precip_score = rampf(12.5 - abs(city_char["precip"] - city_obj["precip"]) / 2)
climate_score = temp_score + precip_score
# calculate city type and score it
if city_obj["density"] < 300:
city_type = 0
elif city_obj["density"] < 1000:
city_type = 1
else:
city_type = 2
settle_type = map(lambda x: get_settle_type(x), city_char["settle_type"])
urban_score = 25 - min(map(lambda x: abs(x - city_type), settle_type)) * 12.5
char_score = climate_score + urban_score
return (city_obj, char_score)
def rate_city_job(city_obj, occupation):
if "job" not in city_obj or city_obj["cost"] == 0:
return 0
salary = 0
for cached_occ in city_obj["job"]:
if cached_occ["name"] == occupation:
salary = cached_occ["salary"]
break
if salary == 0:
# will call indeed api here
# for now just hard code value
salary = 30000
average_household_income = 55000
raw_salary_score = salary / (city_obj["cost"] / 100.0 * average_household_income)
if raw_salary_score > 2:
return 50
elif raw_salary_score < 0.5:
return 0
else:
return (raw_salary_score - 0.5) / 1.5 * 50
def get_settle_type(type_str):
if type_str == "rural":
return 0
elif type_str == "suburban":
return 1
else:
return 2
if __name__ == "__main__":
# load_data_to_memory()
city_char = {
"temp_low" : 40,
"temp_high" : 120,
"precip_low" : 0,
"precip_high" : 20,
"settle_type": ["urban"]
}
occupation = "Physical Therapist"
pp = pprint.PrettyPrinter(indent=4)
start_time = time.time()
all_cities = rate_city_single_core(occupation, city_char)
print("--- finished in %s seconds ---" % (time.time() - start_time))
all_cities_result = all_cities
with open(".tmp.txt", "w") as f:
pprint.pprint(all_cities_result, stream=f, indent=4)
|
SELECT name, age FROM employees WHERE salary > 5000;
|
<gh_stars>0
import defineDependentFunctions from './internal/react/defineDependentFunctions.js';
import defineStandardComponent from './api/defineStandardComponent.js';
import defineAdvancedComponent from './api/defineAdvancedComponent.js';
import hyperscript from './api/hyperscript.js';
import Component from './api/Component.js';
import Constraints from './api/Constraints.js';
import React from 'react';
import ReactDOM from 'react-dom';
const {
defineBasicComponent,
defineFunctionalComponent,
isElement
} = defineDependentFunctions({
Component: React.Component,
createElement: createElement,
createFactory: React.createFactory,
isValidElement: React.isValidElement
});
export {
createElement,
defineAdvancedComponent,
defineStandardComponent,
defineFunctionalComponent,
defineBasicComponent,
hyperscript,
isElement,
render,
Component,
Constraints,
};
const createElement = React.createElement;
function render(content, targetNode) {
if (!isElement(content)) {
throw new TypeError(
"[render] First argument 'content' has to be a valid element");
}
if (typeof targetNode === 'string') {
targetNode = document.getElementById(targetNode);
}
return ReactDOM.render(content, targetNode);
}
|
#!/usr/bin/env bash
# exit immediately when a command fails
set -e
# only exit with zero if all commands of the pipeline exit successfully
set -o pipefail
# error on unset variables
set -u
[ "$#" -eq 1 ] || echo "One argument required, $# provided."
REF_CURRENT="$(git rev-parse --abbrev-ref HEAD)"
REF_TO_COMPARE=$1
RESULT_CURRENT="$(mktemp)"
RESULT_TO_COMPARE="$(mktemp)"
echo ""
echo "### Testing ${REF_CURRENT}"
go test -benchmem -run=NONE -bench=. ./... | tee "${RESULT_CURRENT}"
echo ""
echo "### Done testing ${REF_CURRENT}"
echo ""
echo "### Testing ${REF_TO_COMPARE}"
git checkout "$REF_TO_COMPARE"
go test -benchmem -run=NONE -bench=. ./... | tee "$RESULT_TO_COMPARE"
echo ""
echo "### Done testing ${REF_TO_COMPARE}"
git checkout -
echo ""
echo "### Result"
echo "old=${REF_TO_COMPARE} new=${REF_CURRENT}"
benchcmp "$RESULT_TO_COMPARE" "$RESULT_CURRENT"
|
<reponame>smagill/opensphere-desktop
package io.opensphere.core;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Enumeration;
import java.util.List;
import javax.xml.XMLConstants;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
import javax.xml.bind.ValidationEvent;
import javax.xml.bind.ValidationEventHandler;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import org.apache.log4j.Logger;
import org.xml.sax.SAXException;
import io.opensphere.core.util.collections.New;
/**
* Loader for plug-in configurations.
*/
public class PluginConfigLoader
{
/** Logger reference. */
private static final Logger LOGGER = Logger.getLogger(PluginConfigLoader.class);
/** Path to XML file. */
private static final String PLUGIN_LOADER_PROPERTIES_FILENAME = "pluginLoader.xml";
/** Path to XSD file. */
private static final String PLUGIN_LOADER_XSD_FILENAME = "/pluginLoader.xsd";
/**
* Get the list of plug-in configurations.
*
* @return The list of plug-in configurations.
*/
public List<PluginLoaderData> getPluginConfigurations()
{
URL schemaURL = getSchemaURL();
Schema schema = getSchema(schemaURL);
Unmarshaller unmarshaller;
try
{
unmarshaller = setUpUnmarshaller(schema);
}
catch (JAXBException e)
{
LOGGER.error("Failed to create unmarshaller for plugin configurations: " + e, e);
throw new IllegalStateException("Plugin configurations could not be loaded.", e);
}
List<PluginLoaderData> configurations = New.list();
try
{
Enumeration<URL> urls = PluginConfigLoader.class.getClassLoader().getResources(PLUGIN_LOADER_PROPERTIES_FILENAME);
while (urls.hasMoreElements())
{
URL url = urls.nextElement();
InputStream is = url.openStream();
try
{
List<PluginLoaderData> pluginLoaderData = ((PluginLoaderCollection)unmarshaller.unmarshal(is))
.getPluginLoaderData();
if (!configurations.isEmpty())
{
pluginLoaderData.removeAll(configurations);
}
configurations.addAll(pluginLoaderData);
}
catch (JAXBException e)
{
LOGGER.error("Failed to unmarshal plugin config at " + url + ": " + e, e);
}
finally
{
is.close();
}
}
}
catch (IOException e)
{
LOGGER.error("Error trying to read plugin configurations: " + e, e);
}
return configurations;
}
/**
* Get the schema at the specified URL.
*
* @param schemaURL The URL for the schema.
* @return The schema.
*/
private Schema getSchema(URL schemaURL)
{
Schema schema;
SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
try
{
schema = schemaFactory.newSchema(schemaURL);
}
catch (SAXException e)
{
String errorStr = "Error parsing plugin schema file: " + PLUGIN_LOADER_XSD_FILENAME;
LOGGER.error(errorStr, e);
throw new IllegalStateException(errorStr, e);
}
return schema;
}
/**
* Get a URL for the XSD.
*
* @return The XSD URL.
*/
private URL getSchemaURL()
{
URL schemaURL = PluginConfigLoader.class.getResource(PLUGIN_LOADER_XSD_FILENAME);
if (schemaURL == null)
{
String errorStr = "Unable to load plugin schema file: " + PLUGIN_LOADER_XSD_FILENAME.substring(1)
+ " not found in the classpath.";
LOGGER.error(errorStr);
throw new IllegalStateException(errorStr);
}
return schemaURL;
}
/**
* Set up the unmarshaller for the XML.
*
* @param schema The XML schema.
* @return The unmarshaller.
* @throws JAXBException if the unmarshaller cannot be created.
*/
private Unmarshaller setUpUnmarshaller(Schema schema) throws JAXBException
{
JAXBContext jc = JAXBContext.newInstance(PluginLoaderCollection.class.getPackage().getName());
Unmarshaller unmarshaller = jc.createUnmarshaller();
unmarshaller.setSchema(schema);
unmarshaller.setEventHandler(new PluginXMLValidatorHandler());
return unmarshaller;
}
/** Handler for validation errors. */
private static final class PluginXMLValidatorHandler implements ValidationEventHandler
{
@Override
public boolean handleEvent(ValidationEvent event)
{
String error = "Problem validating xml, continuing but objects may be invalid. " + event.getMessage();
LOGGER.error(error);
return true;
}
}
}
|
<reponame>muthukumaravel7/armnn
var _serializer_8cpp =
[
[ "GetFlatBufferActivationFunction", "_serializer_8cpp.xhtml#aac3bf4453f8a909ca23f290089df8ff1", null ],
[ "GetFlatBufferArgMinMaxFunction", "_serializer_8cpp.xhtml#a6fcb1eefde815b0b7465a689c8d26b50", null ]
];
|
const expect = require('expect.js');
const $ = require('../scripts/utils.js');
// for existy
expect($.existy(null)).to.be(false);
expect($.existy(undefined)).to.be(false);
expect($.existy(false)).to.be(true);
expect($.existy({})).to.be(true);
expect($.existy([])).to.be(true);
expect($.existy(0)).to.be(true);
// for falsy
expect($.falsy(null)).to.be(true);
expect($.falsy(undefined)).to.be(true);
expect($.falsy(false)).to.be(true);
expect($.falsy(0)).to.be(true);
expect($.falsy('0')).to.be(true);
expect($.falsy(0.0)).to.be(true);
expect($.falsy('false')).to.be(true);
expect($.falsy('')).to.be(true);
expect($.falsy(true)).to.be(false);
expect($.falsy([])).to.be(false);
expect($.falsy({})).to.be(false);
expect($.falsy(1)).to.be(false);
|
package baubles.api;
public enum BaubleType {
RING,
AMULET,
BELT
}
|
<reponame>MissionBit/missionbit.org<gh_stars>1-10
import * as React from "react";
import { SvgIconProps } from "@material-ui/core/SvgIcon";
import { createSvgIcon } from "@material-ui/core";
const Windows = createSvgIcon(
<path d="M0 93.7l183.6-25.3v177.4H0V93.7zm0 324.6l183.6 25.3V268.4H0v149.9zm203.8 28L448 480V268.4H203.8v177.9zm0-380.6v180.1H448V32L203.8 65.7z" />,
"Windows"
);
const WindowsLogo = (props: SvgIconProps): JSX.Element => (
<Windows viewBox="0 0 448 512" {...props} />
);
export default WindowsLogo;
|
#!/usr/bin/env bash
realpath() {
[[ $1 = /* ]] && echo "$1" || echo "$PWD/${1#./}"
}
APP_HOME=$(dirname "$(realpath "$0")")
APP_PID=$(<"$APP_HOME/app.pid")
if ps -p $APP_PID > /dev/null
then
echo "Failed to start, service already started!"
exit 1
fi
nohup java -jar -Dfile.ecoding=UTF-8 -Xmx512M "$APP_HOME/jphp-site.jar" > "$APP_HOME/application.log" 2>&1 &
APP_PID=$!
echo $APP_PID > "$APP_HOME/app.pid"
echo "App has been started, pid = $APP_PID."
|
package com.uumind.log4j.appender.redis;
import org.apache.log4j.Logger;
public class RedisAppenderTest {
private final static Logger log = Logger.getLogger(RedisAppenderTest.class);
public static void main(String[] args) throws Exception {
for(int i=0;i<10000;i++) {
log.info("Log Test");
}
System.out.println("Done");
}
}
|
<reponame>Ian3110/stock-diary-lab-stock-diary-server<gh_stars>0
const axios = require('axios');
const cheerio = require('cheerio');
/*
const getHtml = async () => {
try {
return await axios.get(
'https://finance.naver.com/sise/sise_index.naver?code=KOSPI',
);
} catch (error) {
console.error(error);
}
};
*/
async function getStockIndex() {
try {
const response = await axios.get(
'https://finance.naver.com/sise/sise_index.naver?code=KOSPI',
);
console.log(response);
} catch (error) {
console.error(error);
}
}
|
#!/usr/bin/env bash
echo "Running pre-push hook"
./scripts/run-brakeman.bash
./scripts/run-tests.bash
# $? stores exit value of the last command
if [ $? -ne 0 ]; then
echo "Brakeman and Tests must pass before pushing!"
exit 1
fi
|
package com.company.project.common.result;
import cn.hutool.core.util.StrUtil;
import com.company.project.common.exception.BusinessException;
import com.company.project.common.util.RequestContextHolderUtil;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.springframework.http.HttpStatus;
import java.util.Date;
/**
* Created with IntelliJ IDEA.
* Description:
* 默认全局错误返回结果
* @author LErry.li
* Date: 2018-06-15
* Time: 14:41
*/
@Builder
@AllArgsConstructor
@NoArgsConstructor
@Data
public class DefaultErrorResult implements Result {
private static final long serialVersionUID = 1899083570489722793L;
/**
* HTTP响应状态码 {@link org.springframework.http.HttpStatus}
*/
private Integer status;
/**
* HTTP响应状态码的英文提示
*/
private String error;
/**
* 异常堆栈的精简信息
*
*/
private String message;
/**
* 我们系统内部自定义的返回值编码,{@link ResultCode} 它是对错误更加详细的编码
*
* 备注:spring boot默认返回异常时,该字段为null
*/
private Integer code;
/**
* 调用接口路径
*/
private String path;
/**
* 异常的名字
*/
private String exception;
/**
* 异常的错误传递的数据
*/
private Object errors;
/**
* 时间戳
*/
private Date timestamp;
public static DefaultErrorResult failure(ResultCode resultCode, Throwable e, HttpStatus httpStatus, Object errors) {
DefaultErrorResult result = DefaultErrorResult.failure(resultCode, e, httpStatus);
result.setErrors(errors);
return result;
}
public static DefaultErrorResult failure(ResultCode resultCode, Throwable e, HttpStatus httpStatus) {
DefaultErrorResult result = new DefaultErrorResult();
result.setCode(resultCode.code());
result.setMessage(resultCode.message());
result.setStatus(httpStatus.value());
result.setError(httpStatus.getReasonPhrase());
result.setException(e.getClass().getName());
result.setPath(RequestContextHolderUtil.getRequest().getRequestURI());
result.setTimestamp(new Date());
return result;
}
public static DefaultErrorResult failure(BusinessException e) {
DefaultErrorResult defaultErrorResult = DefaultErrorResult.failure(e.getResultCode() == null ? ResultCode.SUCCESS : e.getResultCode(), e, HttpStatus.OK, e.getData());
if (StrUtil.isNotEmpty(e.getMessage())) {
defaultErrorResult.setMessage(e.getMessage());
}
return defaultErrorResult;
}
public Integer getStatus() {
return status;
}
public DefaultErrorResult setStatus(Integer status) {
this.status = status;
return this;
}
public String getError() {
return error;
}
public DefaultErrorResult setError(String error) {
this.error = error;
return this;
}
public String getMessage() {
return message;
}
public DefaultErrorResult setMessage(String message) {
this.message = message;
return this;
}
public Integer getCode() {
return code;
}
public DefaultErrorResult setCode(Integer code) {
this.code = code;
return this;
}
public String getPath() {
return path;
}
public DefaultErrorResult setPath(String path) {
this.path = path;
return this;
}
public String getException() {
return exception;
}
public DefaultErrorResult setException(String exception) {
this.exception = exception;
return this;
}
public Object getErrors() {
return errors;
}
public DefaultErrorResult setErrors(Object errors) {
this.errors = errors;
return this;
}
public Date getTimestamp() {
return timestamp;
}
public DefaultErrorResult setTimestamp(Date timestamp) {
this.timestamp = timestamp;
return this;
}
}
|
<reponame>stephenhu/nbad
package main
import (
"encoding/json"
//"fmt"
//"log"
"net/http"
"github.com/gorilla/mux"
"github.com/stephenhu/stats"
)
func liveApiHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case http.MethodPut:
case http.MethodGet:
d := stats.RedisLastGame()
if d == "" {
w.WriteHeader(http.StatusNotFound)
} else {
vars := mux.Vars(r)
id := vars["id"]
if id == "" {
w.WriteHeader(http.StatusNotFound)
} else {
liveMutex.Lock()
game, ok := LiveMap[id]
liveMutex.Unlock()
if ok {
j, err := json.Marshal(game)
if err != nil {
logf("liveApiHandler", err.Error())
w.WriteHeader(http.StatusInternalServerError)
} else {
w.Header().Set("Content-Type", "application/json")
w.Write(j)
}
} else {
w.WriteHeader(http.StatusNotFound)
}
}
}
case http.MethodDelete:
case http.MethodPost:
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
} // liveApiHandler
|
#!/usr/bin/env bash
# Copyright 2020 Amazon.com Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -x
set -o errexit
set -o nounset
set -o pipefail
REPO="$1"
CLONE_URL="$2"
TAG="$3"
BIN_ROOT="_output/bin"
BIN_PATH=$BIN_ROOT/$REPO
readonly SUPPORTED_PLATFORMS=(
linux/amd64
linux/arm64
)
MAKE_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd -P)"
source "${MAKE_ROOT}/../../../build/lib/common.sh"
function build::external-attacher::binaries(){
mkdir -p $BIN_PATH
git clone $CLONE_URL $REPO
cd $REPO
git checkout $TAG
for platform in "${SUPPORTED_PLATFORMS[@]}";
do
OS="$(cut -d '/' -f1 <<< ${platform})"
ARCH="$(cut -d '/' -f2 <<< ${platform})"
make BUILD_PLATFORMS="$OS $ARCH"
mkdir -p ../${BIN_PATH}/${OS}-${ARCH}
mv bin/* ../${BIN_PATH}/${OS}-${ARCH}
make clean
done
build::gather_licenses ./ $MAKE_ROOT/LICENSES
cd ..
rm -rf $REPO
}
build::external-attacher::binaries
|
def powers_of_two(n):
list_power = []
for i in range(n + 1):
list_power.append(2 ** i)
return list_power
|
<gh_stars>1-10
import { ExternalProxyConfigObject } from '../external-proxy-config';
export abstract class AbstractContext {
public externalProxy: ExternalProxyConfigObject | undefined | null;
protected status_startTime: number | undefined;
protected status_endTime: number | undefined;
public markStart(): void {
this.status_startTime = Date.now();
}
public markEnd(): void {
if (!this.status_endTime) this.status_endTime = Date.now();
}
}
|
<reponame>alexis35115/graphical-dice-microbit
def calibrate():
"""Starts the calibration process. An instructive message will be scrolled to the user after which they will need to rotate the device in order to draw a circle on the LED display."""
def is_calibrated():
"""Returns True if the compass has been successfully calibrated, and returns False otherwise."""
return True
def clear_calibration():
"""Undoes the calibration, making the compass uncalibrated again."""
def get_x():
"""Gives the reading of the magnetic field strength on the x axis in nano tesla, as a positive or negative integer, depending on the direction of the field."""
return 0
def get_y():
"""Gives the reading of the magnetic field strength on the y axis in nano tesla, as a positive or negative integer, depending on the direction of the field."""
return 0
def get_z():
"""Gives the reading of the magnetic field strength on the z axis in nano tesla, as a positive or negative integer, depending on the direction of the field."""
return 0
def heading():
"""Gives the compass heading, calculated from the above readings, as an integer in the range from 0 to 360, representing the angle in degrees, clockwise, with north as 0."""
return 0
def get_field_strength():
"""Returns an integer indication of the magnitude of the magnetic field around the device in nano tesla."""
return 0
|
require("@nomiclabs/hardhat-waffle");
require("@nomiclabs/hardhat-etherscan");
require('dotenv').config();
// This is a sample Hardhat task. To learn how to create your own go to
// https://hardhat.org/guides/create-task.html
task("accounts", "Prints the list of accounts", async (taskArgs, hre) => {
const accounts = await hre.ethers.getSigners();
for (const account of accounts) {
console.log(account.address);
}
});
// You need to export an object to set up your config
// Go to https://hardhat.org/config/ to learn more
/**
* @type import('hardhat/config').HardhatUserConfig
*/
module.exports = {
defaultNetwork: 'ganache',
solidity: {
compilers: [
{
version: "0.5.16",
settings: {
optimizer: {
enabled: true,
runs: 200
}
}
},
{
version: "0.8.4",
settings: {
optimizer: {
enabled: true,
runs: 200
}
}
},
{
version: "0.7.0",
settings: {
optimizer: {
enabled: true,
runs: 200
}
}
},
],
},
networks: {
// hardhat: {
// gas: 9000000000,
// blockGasLimit: 9000000000,
// allowUnlimitedContractSize: true,
// },
// localhost: {
// url: "http://127.0.0.1:8545",
// gas: 150000000,
// blockGasLimit: 150000000,
// allowUnlimitedContractSize: true,
// throwOnTransactionFailures: true,
// throwOnCallFailures: true,
// accounts: ["59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"]
// },
ganache: {
url: "http://127.0.0.1:7545",
gas: 20000000000,
// blockGasLimit: 6721975,
throwOnTransactionFailures: true,
throwOnCallFailures: true,
timeout: 1800000,
allowUnlimitedContractSize: true,
},
rinkeby: {
url: "https://rinkeby.infura.io/v3/9aa3d95b3bc440fa88ea12eaa4456161",
},
truffdash: {
url: "http://localhost:24012/rpc",
}
},
etherscan: {
apiKey: {
bscTestnet: '<KEY>'
}
},
paths: {
sources: './contracts',
tests: './test',
artifacts: './artifacts'
},
gasReporter: {
currency: 'USD',
gasPrice: 1,
enabled: false
},
};
|
import React from "react";
import { bazel_config } from "../../proto/bazel_config_ts_proto";
import authService from "../auth/auth_service";
import capabilities from "../capabilities/capabilities";
import rpcService from "../service/rpc_service";
import SetupCodeComponent from "./setup_code";
interface Props {}
interface State {
menuExpanded: boolean;
bazelConfigResponse: bazel_config.GetBazelConfigResponse;
}
export default class SetupComponent extends React.Component<Props> {
state: State = {
menuExpanded: false,
bazelConfigResponse: null,
};
componentWillMount() {
document.title = `Setup | BuildBuddy`;
let request = new bazel_config.GetBazelConfigRequest();
request.host = window.location.host;
request.protocol = window.location.protocol;
request.includeCertificate = true;
rpcService.service.getBazelConfig(request).then((response: bazel_config.GetBazelConfigResponse) => {
console.log(response);
this.setState({ ...this.state, bazelConfigResponse: response });
});
}
render() {
return (
<div className="home">
<div className="container narrow">
<div className="title">Setup instructions</div>
{this.props.children}
To get started, select options below then copy the results to your <b>.bazelrc</b> file.
<br />
<br />
If you don't have a <b>.bazelrc</b> file - create one in the same directory as your Bazel <b>WORKSPACE</b>{" "}
file.
<h2>1. Configure your .bazelrc</h2>
{this.state.bazelConfigResponse && (
<SetupCodeComponent bazelConfigResponse={this.state.bazelConfigResponse} />
)}
<h2>2. Verify your installation</h2>
Once you've added those lines to your <b>.bazelrc</b>, kick off a bazel build.
<br />
<br />
You'll get a BuildBuddy URL printed at the beginning and the end of every Bazel invocation like this:
<code>
bazel build //...
<br />
INFO: Streaming build results to: {window.location.protocol}//
{window.location.host}
/invocation/7bedd84e-525e-4b93-a5f5-53517d57752b
<br />
...
</code>
Now you can ⌘ click / double click on these urls to see the results of your build!
<br />
<br />
{capabilities.enterprise && (
<>
Visit your <a href="/">build history</a> to make sure that your builds are associated with your account.
<br />
<br />
{capabilities.anonymous && (
<span>
Note: Builds using the <b>No auth</b> option will not appear in your history, as they can't be
associated with your account.
</span>
)}
</>
)}
{!capabilities.enterprise && (
<div>
<h2>Enterprise BuildBuddy</h2>
Want enterprise features like SSO, organization build history, trends, remote build execution and more?
<br />
<br />
<b>
<a target="_blank" href="https://buildbuddy.typeform.com/to/wIXFIA">
Click here
</a>
</b>{" "}
to upgrade to enterprise BuildBuddy.
</div>
)}
<h2>Documentation</h2>
Visit our <a href="https://www.buildbuddy.io/docs/introduction">documentation</a> for more information on
setting up, configuring, and using BuildBuddy.
<h2>Get in touch!</h2>
Join our <a href="https://slack.buildbuddy.io">Slack channel</a> or email us at{" "}
<a href="mailto:<EMAIL>"><EMAIL></a> if you have any questions or feature requests!
</div>
</div>
);
}
}
|
<reponame>seratch/junithelper
/*
* Copyright 2009-2010 junithelper.org.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.junithelper.plugin.constant;
public final class Dialog {
private static final String common = "Dialog.Common.";
public static class Common {
public static final String title = common + "title";
public static final String invalidPreference = common + "invalidPreference";
public static final String confirmToExecute = common + "confirmToExecute";
public static final String required = common + "required";
public static final String selectOneOnly = common + "selectOneOnly";
public static final String alreadyExist = common + "alreadyExist";
public static final String notExist = common + "notExist";
public static final String confirmToProceed = common + "confirmToProceed";
public static final String notToCreateNewFilePreference = common + "notToCreateNewFilePreference";
public static final String confirmToCreateNewFile = common + "confirmToCreateNewFile";
public static final String resourceRefreshError = common + "resourceRefreshError";
public static final String confirmToChangeToJUnitVersion3 = common + "confirmToChangeToJUnitVersion3";
public static final String confirmToChangeToJUnitVersion4 = common + "confirmToChangeToJUnitVersion4";
public static final String selectJavaFile = common + "selectJavaFile";
public static final String notTestClass = common + "notTestClass";
}
}
|
<reponame>Ashindustry007/competitive-programming
// https://www.aceptaelreto.com/problem/statement.php?id=442
#include<bits/stdc++.h>
using namespace std;
int main() {
for (;;) {
string a, b, s, r;
getline(cin, s);
if (s.empty()) break;
stringstream in(s);
in >> a >> b;
vector<string> w;
int i = 0, j = 1;
if (a[0] >= 'A' && a[0] <= 'Z') {
while (j <= a.size()) {
while (j < a.size() && a[j] >= 'a' && a[j] <= 'z') j++;
a[i] += 32;
w.push_back(a.substr(i, j - i));
i = j, j++;
}
} else {
while (j <= a.size()) {
while (j < a.size() && a[j] != '-' && a[j] != '_') j++;
w.push_back(a.substr(i, j - i));
i = j + 1, j += 2;
}
}
if (b == "snake_case")
for (int i = 0; i < w.size(); i++)
cout << w[i] << "_\n"[i == w.size() - 1];
else if (b == "kebab-case")
for (int i = 0; i < w.size(); i++)
cout << w[i] << "-\n"[i == w.size() - 1];
else {
for (int i = 0; i < w.size(); i++) {
w[i][0] -= 32;
cout << w[i];
}
cout << endl;
}
}
}
|
#!/bin/sh
checkside() {
echo "scale=400;ibase=16;bs=$2;ibase=A;pow=$3;dpow=$1;estimate(dpow,bs,pow);" | bc -l analyze.bc
}
i=0
while read -r decpower base power; do
i=$(($i+1))
# echo "decpower: $decpower"
# echo "base: $base"
# echo "power: $power"
# echo $(($i%4))
checkside $decpower $base $power
if [ $(($i%4)) -eq 0 ]; then
echo ""
fi
done
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.