text
stringlengths 1
1.05M
|
|---|
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
INSTALL_ZSH=${1:-"true"}
USERNAME=${2:-"automatic"}
USER_UID=${3:-"automatic"}
USER_GID=${4:-"automatic"}
UPGRADE_PACKAGES=${5:-"true"}
INSTALL_OH_MYS=${6:-"true"}
ADD_NON_FREE_PACKAGES=${7:-"false"}
set -e
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
rm -f /etc/profile.d/00-restore-env.sh
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
chmod +x /etc/profile.d/00-restore-env.sh
# If in automatic mode, determine if a user already exists, if not use vscode
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
USERNAME=""
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
USERNAME=${CURRENT_USER}
break
fi
done
if [ "${USERNAME}" = "" ]; then
USERNAME=vscode
fi
elif [ "${USERNAME}" = "none" ]; then
USERNAME=root
USER_UID=0
USER_GID=0
fi
# Load markers to see which steps have already run
MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
if [ -f "${MARKER_FILE}" ]; then
echo "Marker file found:"
cat "${MARKER_FILE}"
source "${MARKER_FILE}"
fi
# Ensure apt is in non-interactive to avoid prompts
export DEBIAN_FRONTEND=noninteractive
# Function to call apt-get if needed
apt-get-update-if-needed()
{
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
echo "Running apt-get update..."
apt-get update
else
echo "Skipping apt-get update."
fi
}
# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
PACKAGE_LIST="apt-utils \
git \
openssh-client \
gnupg2 \
iproute2 \
procps \
lsof \
htop \
net-tools \
psmisc \
curl \
wget \
rsync \
ca-certificates \
unzip \
zip \
nano \
vim-tiny \
less \
jq \
lsb-release \
apt-transport-https \
dialog \
libc6 \
libgcc1 \
libkrb5-3 \
libgssapi-krb5-2 \
libicu[0-9][0-9] \
liblttng-ust0 \
libstdc++6 \
zlib1g \
locales \
sudo \
ncdu \
man-db \
strace \
manpages \
manpages-dev \
init-system-helpers"
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
CODENAME="$(cat /etc/os-release | grep -oE '^VERSION_CODENAME=.+$' | cut -d'=' -f2)"
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${CODENAME} main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${CODENAME} main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
echo "Running apt-get update..."
apt-get update
PACKAGE_LIST="${PACKAGE_LIST} manpages-posix manpages-posix-dev"
else
apt-get-update-if-needed
fi
# Install libssl1.1 if available
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
PACKAGE_LIST="${PACKAGE_LIST} libssl1.1"
fi
# Install appropriate version of libssl1.0.x if available
LIBSSL=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
if [ "$(echo "$LIBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
# Debian 9
PACKAGE_LIST="${PACKAGE_LIST} libssl1.0.2"
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
# Ubuntu 18.04, 16.04, earlier
PACKAGE_LIST="${PACKAGE_LIST} libssl1.0.0"
fi
fi
echo "Packages to verify are installed: ${PACKAGE_LIST}"
apt-get -y install --no-install-recommends ${PACKAGE_LIST} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 )
PACKAGES_ALREADY_INSTALLED="true"
fi
# Get to latest versions of all packages
if [ "${UPGRADE_PACKAGES}" = "true" ]; then
apt-get-update-if-needed
apt-get -y upgrade --no-install-recommends
apt-get autoremove -y
fi
# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
# Common need for both applications and things like the agnoster ZSH theme.
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
locale-gen
LOCALE_ALREADY_SET="true"
fi
# Create or update a non-root user to match UID/GID.
if id -u ${USERNAME} > /dev/null 2>&1; then
# User exists, update if needed
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -G $USERNAME)" ]; then
groupmod --gid $USER_GID $USERNAME
usermod --gid $USER_GID $USERNAME
fi
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
usermod --uid $USER_UID $USERNAME
fi
else
# Create user
if [ "${USER_GID}" = "automatic" ]; then
groupadd $USERNAME
else
groupadd --gid $USER_GID $USERNAME
fi
if [ "${USER_UID}" = "automatic" ]; then
useradd -s /bin/bash --gid $USERNAME -m $USERNAME
else
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
fi
fi
# Add add sudo support for non-root user
if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME
chmod 0440 /etc/sudoers.d/$USERNAME
EXISTING_NON_ROOT_USER="${USERNAME}"
fi
# ** Shell customization section **
if [ "${USERNAME}" = "root" ]; then
USER_RC_PATH="/root"
else
USER_RC_PATH="/home/${USERNAME}"
fi
# .bashrc/.zshrc snippet
RC_SNIPPET="$(cat << 'EOF'
if [ -z "${USER}" ]; then export USER=$(whoami); fi
if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
# Display optional first run image specific notice if configured and terminal is interactive
if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
cat "/workspaces/.codespaces/shared/first-run-notice.txt"
fi
mkdir -p "$HOME/.config/vscode-dev-containers"
# Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
fi
EOF
)"
# code shim, it fallbacks to code-insiders if code is not available
cat << 'EOF' > /usr/local/bin/code
#!/bin/sh
get_in_path_except_current() {
which -a "$1" | grep -A1 "$0" | grep -v "$0"
}
code="$(get_in_path_except_current code)"
if [ -n "$code" ]; then
exec "$code" "$@"
elif [ "$(command -v code-insiders)" ]; then
exec code-insiders "$@"
else
echo "code or code-insiders is not installed" >&2
exit 127
fi
EOF
chmod +x /usr/local/bin/code
# systemctl shim - tells people to use 'service' if systemd is not running
cat << 'EOF' > /usr/local/bin/systemctl
#!/bin/sh
set -e
if [ -d "/run/systemd/system" ]; then
exec /bin/systemctl/systemctl "$@"
else
echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services intead. e.g.: \n\nservice --status-all'
fi
EOF
chmod +x /usr/local/bin/systemctl
# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
CODESPACES_BASH="$(cat \
<<'EOF'
# Codespaces bash prompt theme
__bash_prompt() {
local userpart='`export XIT=$? \
&& [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
&& [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
local gitbranch='`\
export BRANCH=$(git rev-parse --abbrev-ref HEAD 2>/dev/null); \
if [ "${BRANCH}" = "HEAD" ]; then \
export BRANCH=$(git describe --contains --all HEAD 2>/dev/null); \
fi; \
if [ "${BRANCH}" != "" ]; then \
echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
&& if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
echo -n " \[\033[1;33m\]✗"; \
fi \
&& echo -n "\[\033[0;36m\]) "; \
fi`'
local lightblue='\[\033[1;34m\]'
local removecolor='\[\033[0m\]'
PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
unset -f __bash_prompt
}
__bash_prompt
EOF
)"
CODESPACES_ZSH="$(cat \
<<'EOF'
__zsh_prompt() {
local prompt_username
if [ ! -z "${GITHUB_USER}" ]; then
prompt_username="@${GITHUB_USER}"
else
prompt_username="%n"
fi
PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
PROMPT+='$(git_prompt_info)%{$fg[white]%}$ %{$reset_color%}' # Git status
unset -f __zsh_prompt
}
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
__zsh_prompt
EOF
)"
# Add notice that Oh My Bash! has been removed from images and how to provide information on how to install manually
OMB_README="$(cat \
<<'EOF'
"Oh My Bash!" has been removed from this image in favor of a simple shell prompt. If you
still wish to use it, remove "~/.oh-my-bash" and install it from: https://github.com/ohmybash/oh-my-bash
You may also want to consider "Bash-it" as an alternative: https://github.com/bash-it/bash-it
See here for infomation on adding it to your image or dotfiles: https://aka.ms/codespaces/omb-remove
EOF
)"
OMB_STUB="$(cat \
<<'EOF'
#!/usr/bin/env bash
if [ -t 1 ]; then
cat $HOME/.oh-my-bash/README.md
fi
EOF
)"
# Add RC snippet and custom bash prompt
if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
echo "${RC_SNIPPET}" >> /etc/bash.bashrc
echo "${CODESPACES_BASH}" >> "${USER_RC_PATH}/.bashrc"
echo 'export PROMPT_DIRTRIM=4' >> "${USER_RC_PATH}/.bashrc"
if [ "${USERNAME}" != "root" ]; then
echo "${CODESPACES_BASH}" >> "/root/.bashrc"
echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc"
fi
chown ${USERNAME}:${USERNAME} "${USER_RC_PATH}/.bashrc"
RC_SNIPPET_ALREADY_ADDED="true"
fi
# Add stub for Oh My Bash!
if [ ! -d "${USER_RC_PATH}/.oh-my-bash}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
mkdir -p "${USER_RC_PATH}/.oh-my-bash" "/root/.oh-my-bash"
echo "${OMB_README}" >> "${USER_RC_PATH}/.oh-my-bash/README.md"
echo "${OMB_STUB}" >> "${USER_RC_PATH}/.oh-my-bash/oh-my-bash.sh"
chmod +x "${USER_RC_PATH}/.oh-my-bash/oh-my-bash.sh"
if [ "${USERNAME}" != "root" ]; then
echo "${OMB_README}" >> "/root/.oh-my-bash/README.md"
echo "${OMB_STUB}" >> "/root/.oh-my-bash/oh-my-bash.sh"
chmod +x "/root/.oh-my-bash/oh-my-bash.sh"
fi
chown -R "${USERNAME}:${USERNAME}" "${USER_RC_PATH}/.oh-my-bash"
fi
# Optionally install and configure zsh and Oh My Zsh!
if [ "${INSTALL_ZSH}" = "true" ]; then
if ! type zsh > /dev/null 2>&1; then
apt-get-update-if-needed
apt-get install -y zsh
fi
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
echo "${RC_SNIPPET}" >> /etc/zsh/zshrc
ZSH_ALREADY_INSTALLED="true"
fi
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
OH_MY_INSTALL_DIR="${USER_RC_PATH}/.oh-my-zsh"
if [ ! -d "${OH_MY_INSTALL_DIR}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
TEMPLATE_PATH="${OH_MY_INSTALL_DIR}/templates/zshrc.zsh-template"
USER_RC_FILE="${USER_RC_PATH}/.zshrc"
umask g-w,o-w
mkdir -p ${OH_MY_INSTALL_DIR}
git clone --depth=1 \
-c core.eol=lf \
-c core.autocrlf=false \
-c fsck.zeroPaddedFilemode=ignore \
-c fetch.fsck.zeroPaddedFilemode=ignore \
-c receive.fsck.zeroPaddedFilemode=ignore \
"https://github.com/ohmyzsh/ohmyzsh" "${OH_MY_INSTALL_DIR}" 2>&1
echo -e "$(cat "${TEMPLATE_PATH}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${USER_RC_FILE}
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${USER_RC_FILE}
mkdir -p ${OH_MY_INSTALL_DIR}/custom/themes
echo "${CODESPACES_ZSH}" > "${OH_MY_INSTALL_DIR}/custom/themes/codespaces.zsh-theme"
# Shrink git while still enabling updates
cd "${OH_MY_INSTALL_DIR}"
git repack -a -d -f --depth=1 --window=1
# Copy to non-root user if one is specified
if [ "${USERNAME}" != "root" ]; then
cp -rf "${USER_RC_FILE}" "${OH_MY_INSTALL_DIR}" /root
chown -R ${USERNAME}:${USERNAME} "${USER_RC_PATH}"
fi
fi
fi
# Persist image metadata info, script if meta.env found in same directory
META_INFO_SCRIPT="$(cat << 'EOF'
#!/bin/sh
. /usr/local/etc/vscode-dev-containers/meta.env
# Minimal output
if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
echo "${VERSION}"
exit 0
elif [ "$1" = "release" ]; then
echo "${GIT_REPOSITORY_RELEASE}"
exit 0
elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
echo "${CONTENTS_URL}"
exit 0
fi
#Full output
echo
echo "Development container image information"
echo
if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
echo
EOF
)"
SCRIPT_DIR="$(cd $(dirname $0) && pwd)"
if [ -f "${SCRIPT_DIR}/meta.env" ]; then
mkdir -p /usr/local/etc/vscode-dev-containers/
cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
echo "${META_INFO_SCRIPT}" > /usr/local/bin/devcontainer-info
chmod +x /usr/local/bin/devcontainer-info
fi
# Write marker file
mkdir -p "$(dirname "${MARKER_FILE}")"
echo -e "\
PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}"
echo "Done!"
|
<filename>src-rx/src/components/Utils.js
/**
* Copyright 2018-2021 bluefox <<EMAIL>>
*
* MIT License
*
**/
import React from 'react';
import I18n from '@iobroker/adapter-react/i18n';
const NAMESPACE = 'material';
const days = ['Su', 'Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa'];
const months = ['Jan', 'Feb', 'Mar', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
class Utils {
static namespace = NAMESPACE;
static INSTANCES = 'instances';
static dateFormat = ['DD', 'MM'];
static FORBIDDEN_CHARS = /[^._\-/ :!#$%&()+=@^{}|~\p{Ll}\p{Lu}\p{Nd}]+/gu;
/**
* Capitalize words.
* @param {string | undefined} name
* @returns {string}
*/
static CapitalWords(name) {
return (name || '').split(/[\s_]/)
.filter(item => item)
.map(word => word ? word[0].toUpperCase() + word.substring(1).toLowerCase() : '')
.join(' ');
}
static formatSeconds(seconds) {
const days = Math.floor(seconds / (3600 * 24));
seconds %= 3600 * 24;
let hours = Math.floor(seconds / 3600);
if (hours < 10) {
hours = '0' + hours;
}
seconds %= 3600;
let minutes = Math.floor(seconds / 60);
if (minutes < 10) {
minutes = '0' + minutes;
}
seconds %= 60;
seconds = Math.floor(seconds);
if (seconds < 10) {
seconds = '0' + seconds;
}
let text = '';
if (days) {
text += days + ' ' + I18n.t('daysShortText') + ' ';
}
text += hours + ':' + minutes + ':' + seconds;
return text;
}
/**
* Get the name of the object by id from the name or description.
* @param {Record<string, ioBroker.Object>} objects
* @param {string} id
* @param {{ name: any; } | ioBroker.Languages | null} settings
* @param {{ language?: ioBroker.Languages; }} options
* @param {boolean} [isDesc] Set to true to get the description.
* @returns {string}
*/
static getObjectName(objects, id, settings, options, isDesc) {
let item = objects[id];
let text = id;
const attr = isDesc ? 'desc' : 'name';
if (typeof settings === 'string' && !options) {
options = {language: settings};
settings = null;
}
options = options || {};
if (!options.language) {
options.language = (objects['system.config'] && objects['system.config'].common && objects['system.config'].common.language) || window.sysLang || 'en';
}
if (settings && settings.name) {
text = settings.name;
if (typeof text === 'object') {
text = text[options.language] || text.en;
}
} else
if (item && item.common && item.common[attr]) {
text = item.common[attr];
if (attr !== 'desc' && !text && item.common.desc) {
text = item.common.desc;
}
if (typeof text === 'object') {
text = text[options.language] || text.en || text.de || text.ru || '';
}
text = (text || '').toString().replace(/[_.]/g, ' ');
if (text === text.toUpperCase()) {
text = text[0] + text.substring(1).toLowerCase();
}
} else {
let pos = id.lastIndexOf('.');
text = id.substring(pos + 1).replace(/[_.]/g, ' ');
text = Utils.CapitalWords(text);
}
return text.trim();
}
/**
* Get the name of the object from the name or description.
* @param {ioBroker.PartialObject} obj
* @param {{ name: any; } | ioBroker.Languages | null } settings or language
* @param {{ language?: ioBroker.Languages; } } options
* @param {boolean} [isDesc] Set to true to get the description.
* @returns {string}
*/
static getObjectNameFromObj(obj, settings, options, isDesc) {
let item = obj;
let text = (obj && obj._id) || '';
const attr = isDesc ? 'desc' : 'name';
if (typeof settings === 'string' && !options) {
options = {language: settings};
settings = null;
}
options = options || {};
if (settings && settings.name) {
text = settings.name;
if (typeof text === 'object') {
text = text[options.language] || text.en;
}
} else
if (item && item.common && item.common[attr]) {
text = item.common[attr];
if (attr !== 'desc' && !text && item.common.desc) {
text = item.common.desc;
}
if (typeof text === 'object') {
text = text[options.language] || text.en;
}
text = (text || '').toString().replace(/[_.]/g, ' ');
if (text === text.toUpperCase()) {
text = text[0] + text.substring(1).toLowerCase();
}
}
return text.trim();
}
/**
* @param {ioBroker.PartialObject | ioBroker.ObjectCommon} obj
* @param {string} forEnumId
* @param {{ user: string; }} options
* @returns {string | null}
*/
static getSettingsOrder(obj, forEnumId, options) {
if (obj && obj.hasOwnProperty('common')) {
obj = obj.common;
}
let settings;
if (obj && obj.custom) {
settings = (obj.custom || {})[NAMESPACE];
const user = options.user || 'admin';
if (settings && settings[user]) {
if (forEnumId) {
if (settings[user].subOrder && settings[user].subOrder[forEnumId]) {
return JSON.parse(JSON.stringify(settings[user].subOrder[forEnumId]));
}
} else {
if (settings[user].order) {
return JSON.parse(JSON.stringify(settings[user].order));
}
}
}
}
return null;
}
/**
* @param {ioBroker.PartialObject | ioBroker.ObjectCommon} obj
* @param {string} forEnumId
* @param {{ user: string; }} options
*/
static getSettingsCustomURLs(obj, forEnumId, options) {
if (obj && obj.hasOwnProperty('common')) {
obj = obj.common;
}
let settings;
if (obj && obj.custom) {
settings = (obj.custom || {})[NAMESPACE];
const user = options.user || 'admin';
if (settings && settings[user]) {
if (forEnumId) {
if (settings[user].subURLs && settings[user].subURLs[forEnumId]) {
return JSON.parse(JSON.stringify(settings[user].subURLs[forEnumId]));
}
} else {
if (settings[user].URLs) {
return JSON.parse(JSON.stringify(settings[user].URLs));
}
}
}
}
return null;
}
/**
* Reorder the array items in list between source and dest.
* @param {Iterable<any> | ArrayLike<any>} list
* @param {number} source
* @param {number} dest
*/
static reorder(list, source, dest) {
const result = Array.from(list);
const [removed] = result.splice(source, 1);
result.splice(dest, 0, removed);
return result;
};
/**
* @param {any} obj
* @param {{ id: any; user: any; name: any; icon: any; color: any; language: ioBroker.Languages; }} options
* @param {boolean} [defaultEnabling]
*/
static getSettings(obj, options, defaultEnabling) {
let settings;
const id = (obj && obj._id) || (options && options.id);
if (obj && obj.hasOwnProperty('common')) {
obj = obj.common;
}
if (obj && obj.custom) {
settings = obj.custom || {};
settings = settings[NAMESPACE] && settings[NAMESPACE][options.user || 'admin'] ? JSON.parse(JSON.stringify(settings[NAMESPACE][options.user || 'admin'])) : {enabled: true};
} else {
settings = {enabled: defaultEnabling === undefined ? true : defaultEnabling, useCustom: false};
}
if (!settings.hasOwnProperty('enabled')) {
settings.enabled = defaultEnabling === undefined ? true : defaultEnabling;
}
if (false && settings.useCommon) {
if (obj.color) settings.color = obj.color;
if (obj.icon) settings.icon = obj.icon;
if (obj.name) settings.name = obj.name;
} else {
if (options) {
if (!settings.name && options.name) settings.name = options.name;
if (!settings.icon && options.icon) settings.icon = options.icon;
if (!settings.color && options.color) settings.color = options.color;
}
if (obj) {
if (!settings.color && obj.color) settings.color = obj.color;
if (!settings.icon && obj.icon) settings.icon = obj.icon;
if (!settings.name && obj.name) settings.name = obj.name;
}
}
if (typeof settings.name === 'object') {
settings.name = settings.name[options.language] || settings.name.en;
settings.name = (settings.name || '').toString().replace(/_/g, ' ');
if (settings.name === settings.name.toUpperCase()) {
settings.name = settings.name[0] + settings.name.substring(1).toLowerCase();
}
}
if (!settings.name && id) {
let pos = id.lastIndexOf('.');
settings.name = id.substring(pos + 1).replace(/[_.]/g, ' ');
settings.name = (settings.name || '').toString().replace(/_/g, ' ');
settings.name = Utils.CapitalWords(settings.name);
}
return settings;
}
/**
* @param {any} obj
* @param {any} settings
* @param {{ user: any; language: ioBroker.Languages; }} options
*/
static setSettings(obj, settings, options) {
if (obj) {
obj.common = obj.common || {};
obj.common.custom = obj.common.custom || {};
obj.common.custom[NAMESPACE] = obj.common.custom[NAMESPACE] || {};
obj.common.custom[NAMESPACE][options.user || 'admin'] = settings;
const s = obj.common.custom[NAMESPACE][options.user || 'admin'];
if (s.useCommon) {
if (s.color !== undefined) {
obj.common.color = s.color;
delete s.color;
}
if (s.icon !== undefined) {
obj.common.icon = s.icon;
delete s.icon;
}
if (s.name !== undefined) {
if (typeof obj.common.name !== 'object') {
obj.common.name = {};
obj.common.name[options.language] = s.name;
} else{
obj.common.name[options.language] = s.name;
}
delete s.name;
}
}
return true;
} else {
return false;
}
}
/**
* Get the icon for the given settings.
* @param {{ icon: string | undefined; name: string | undefined; prefix: string | undefined}} settings
* @param {any} style
* @returns {JSX.Element | null}
*/
static getIcon(settings, style) {
if (settings && settings.icon) {
// If UTF-8 icon
if (settings.icon.length <= 2) {
return <span style={style || {}}>{settings.icon}</span>;
} else
if (settings.icon.startsWith('data:image')) {
return <img alt={settings.name} src={settings.icon} style={style || {}}/>;
} else { // may be later some changes for second type
return <img alt={settings.name} src={(settings.prefix || '') + settings.icon} style={style || {}}/>;
}
}
return null;
}
/**
* Get the icon for the given object.
* @param {string} id
* @param {{ common: { icon: any; }; }} obj
* @returns {string | null}
*/
static getObjectIcon(id, obj) {
// If id is Object
if (typeof id === 'object') {
obj = id;
id = obj._id;
}
if (obj && obj.common && obj.common.icon) {
let icon = obj.common.icon;
// If UTF-8 icon
if (typeof icon === 'string' && icon.length <= 2) {
return icon;
} else
if (icon.startsWith('data:image')) {
return icon;
} else {
const parts = id.split('.');
if (parts[0] === 'system') {
icon = 'adapter/' + parts[2] + (icon.startsWith('/') ? '' : '/') + icon;
} else {
icon = 'adapter/' + parts[0] + (icon.startsWith('/') ? '' : '/') + icon;
}
if (window.location.pathname.match(/adapter\/[^/]+\/[^/]+\.html/)) {
icon = '../../' + icon;
} else if (window.location.pathname.match(/material\/[.\d]+/)) {
icon = '../../' + icon;
} else
if (window.location.pathname.match(/material\//)) {
icon = '../' + icon;
}
return icon;
}
} else {
return null;
}
}
/**
* Splits CamelCase into words.
* @param {string | undefined} text
* @returns {string}
*/
static splitCamelCase(text) {
if (false && text !== text.toUpperCase()) {
const words = text.split(/\s+/);
for (let i = 0; i < words.length; i++) {
let word = words[i];
if (word.toLowerCase() !== word && word.toUpperCase() !== word) {
let z = 0;
const ww = [];
let start = 0;
while (z < word.length) {
if (word[z].match(/[A-ZÜÄÖА-Я]/)) {
ww.push(word.substring(start, z));
start = z;
}
z++;
}
if (start !== z) {
ww.push(word.substring(start, z));
}
for (let k = 0; k < ww.length; k++) {
words.splice(i + k, 0, ww[k]);
}
i += ww.length;
}
}
return words.map(w => {
w = w.trim();
if (w) {
return w[0].toUpperCase() + w.substring(1).toLowerCase();
}
return '';
}).join(' ');
} else {
return Utils.CapitalWords(text);
}
}
/**
* Check if the given color is bright.
* https://stackoverflow.com/questions/35969656/how-can-i-generate-the-opposite-color-according-to-current-color
* @param {string | null | undefined} color
* @param {boolean} [defaultValue]
* @returns {boolean}
*/
static isUseBright(color, defaultValue) {
if (color === null || color === undefined || color === '') {
return defaultValue === undefined ? true : defaultValue;
}
color = color.toString();
if (color.indexOf('#') === 0) {
color = color.slice(1);
}
let r;
let g;
let b;
const rgb = color.match(/^rgba?[\s+]?\([\s+]?(\d+)[\s+]?,[\s+]?(\d+)[\s+]?,[\s+]?(\d+)[\s+]?/i);
if (rgb && rgb.length === 4) {
r = parseInt(rgb[1], 10);
g = parseInt(rgb[2], 10);
b = parseInt(rgb[3], 10);
} else {
// convert 3-digit hex to 6-digits.
if (color.length === 3) {
color = color[0] + color[0] + color[1] + color[1] + color[2] + color[2];
}
if (color.length !== 6) {
return false;
}
r = parseInt(color.slice(0, 2), 16);
g = parseInt(color.slice(2, 4), 16);
b = parseInt(color.slice(4, 6), 16);
}
// http://stackoverflow.com/a/3943023/112731
return (r * 0.299 + g * 0.587 + b * 0.114) <= 186;
};
/**
* Get the time string in the format 00:00.
* @param {string | number} seconds
*/
static getTimeString(seconds) {
seconds = parseFloat(seconds);
if (isNaN(seconds)) {
return '--:--';
}
const hours = Math.floor(seconds / 3600);
let minutes = Math.floor((seconds % 3600) / 60);
let secs = seconds % 60;
if (hours) {
if (minutes < 10) minutes = '0' + minutes;
if (secs < 10) secs = '0' + secs;
return hours + ':' + minutes + ':' + secs;
} else {
if (secs < 10) secs = '0' + secs;
return minutes + ':' + secs;
}
}
/**
* Gets the wind direction with the given angle (degrees).
* @param {number} angle in degrees.
* @returns {string | undefined}
*/
static getWindDirection(angle) {
if (angle >= 0 && angle < 11.25) {
return 'N'
} else if (angle >= 11.25 && angle < 33.75) {
return 'NNE'
} else if (angle >= 33.75 && angle < 56.25) {
return 'NE'
} else if (angle >= 56.25 && angle < 78.75) {
return 'ENE'
} else if (angle >= 78.75 && angle < 101.25) {
return 'E'
} else if (angle >= 101.25 && angle < 123.75) {
return 'ESE'
} else if (angle >= 123.75 && angle < 146.25) {
return 'SE'
} else if (angle >= 146.25 && angle < 168.75) {
return 'SSE'
} else if (angle >= 168.75 && angle < 191.25) {
return 'S'
} else if (angle >= 191.25 && angle < 213.75) {
return 'SSW'
} else if (angle >= 213.75 && angle < 236.25) {
return 'SW'
} else if (angle >= 236.25 && angle < 258.75) {
return 'WSW'
} else if (angle >= 258.75 && angle < 281.25) {
return 'W'
} else if (angle >= 281.25 && angle < 303.75) {
return 'WNW'
} else if (angle >= 303.75 && angle < 326.25) {
return 'NW'
} else if (angle >= 326.25 && angle < 348.75) {
return 'NNW'
} else if (angle >= 348.75) {
return 'N'
}
}
/**
* Pad the given number with a zero if its not 2 digits long.
* @param {string | number} num
*/
static padding(num) {
if (typeof num === 'string') {
if (num.length < 2) {
return '0' + num;
} else {
return num;
}
} else if (num < 10) {
return '0' + num;
} else {
return num;
}
}
/**
* Sets the date format.
* @param {string} format
*/
static setDataFormat(format) {
if (format) {
Utils.dateFormat = format.toUpperCase().split(/[.-/]/);
Utils.dateFormat.splice(Utils.dateFormat.indexOf('YYYY'), 1);
}
}
/**
* Converts the date to a string.
* @param {string | number | Date} now
* @returns {string}
*/
static date2string(now) {
if (typeof now === 'string') {
now = now.trim();
if (!now) return '';
// only letters
if (now.match(/^[\w\s]+$/)) {
// Day of week
return now;
}
let m = now.match(/(\d{1,4})[-./](\d{1,2})[-./](\d{1,4})/);
if (m) {
let a = [parseInt(m[1], 10), parseInt(m[2], 10), parseInt(m[3], 10)];
let year = a.find(y => y > 31);
a.splice(a.indexOf(year), 1);
let day = a.find(m => m > 12);
if (day) {
a.splice(a.indexOf(day), 1);
now = new Date(year, a[0] - 1, day);
} else {
// MM DD
if (Utils.dateFormat[0][0] === 'M' && Utils.dateFormat[1][0] === 'D') {
now = new Date(year, a[0] - 1, a[1]);
if (Math.abs(now.getTime - Date.now()) > 3600000 * 24 * 10) {
now = new Date(year, a[1] - 1, a[0]);
}
} else
// DD MM
if (Utils.dateFormat[0][0] === 'D' && Utils.dateFormat[1][0] === 'M') {
now = new Date(year, a[1] - 1, a[0]);
if (Math.abs(now.getTime - Date.now()) > 3600000 * 24 * 10) {
now = new Date(year, a[0] - 1, a[1]);
}
} else {
now = new Date(now);
}
}
} else {
now = new Date(now);
}
} else {
now = new Date(now);
}
let date = I18n.t('ra_dow_' + days[now.getDay()]).replace('ra_dow_', '');
date += '. ' + now.getDate() + ' ' + I18n.t('ra_month_' + months[now.getMonth()]).replace('ra_month_', '');
return date;
}
/**
* Render a text as a link.
* @param {string} text
* @returns {string | JSX.Element[]}
*/
static renderTextWithA(text) {
let m = text.match(/<a [^<]+<\/a>/);
if (m) {
const result = [];
let key = 1;
do {
let href = m[0].match(/href="([^"]+)"/) || m[0].match(/href='([^']+)'/);
let target = m[0].match(/target="([^"]+)"/) || m[0].match(/target='([^']+)'/);
let rel = m[0].match(/rel="([^"]+)"/) || m[0].match(/rel='([^']+)'/);
const title = m[0].match(/>([^<]*)</);
const p = text.split(m[0]);
p[0] && result.push(<span key={'a' + (key++)}>{p[0]}</span>);
// eslint-disable-next-line
result.push(<a key={'a' + (key++)} href={href ? href[1] : ''} target={target ? target[1] : '_blank'} rel={rel ? rel[1] : ''}>{title ? title[1] : ''}</a>);
text = p[1];
m = text && text.match(/<a [^<]+<\/a>/);
if (!m) {
p[1] && result.push(<span key={'a' + (key++)}>{p[1]}</span>);
}
} while (m);
return result;
} else {
return text;
}
}
/**
* Get the smart name of the given state.
* @param {Record<string, ioBroker.StateObject> | ioBroker.StateObject} states
* @param {string} id
* @param {string} instanceId
* @param {boolean} [noCommon]
*/
static getSmartName(states, id, instanceId, noCommon) {
if (!id) {
if (!noCommon) {
if (!states.common) {
return states.smartName;
} else {
if (states && !states.common) {
return states.smartName;
} else {
return states.common.smartName;
}
}
} else {
if (states && !states.common) {
return states.smartName;
} else {
return (states &&
states.common &&
states.common.custom &&
states.common.custom[instanceId]) ?
states.common.custom[instanceId].smartName : undefined;
}
}
} else
if (!noCommon) {
return states[id].common.smartName;
} else {
return (states[id] &&
states[id].common &&
states[id].common.custom &&
states[id].common.custom[instanceId]) ?
states[id].common.custom[instanceId].smartName || null : null;
}
}
/**
* Get the smart name from a state.
* @param {ioBroker.StateObject} obj
* @param {string} instanceId
* @param {boolean} [noCommon]
*/
static getSmartNameFromObj(obj, instanceId, noCommon) {
if (!noCommon) {
if (!obj.common) {
return obj.smartName;
} else {
if (obj && !obj.common) {
return obj.smartName;
} else {
return obj.common.smartName;
}
}
} else {
if (obj && !obj.common) {
return obj.smartName;
} else {
return (obj &&
obj.common &&
obj.common.custom &&
obj.common.custom[instanceId]) ?
obj.common.custom[instanceId].smartName : undefined;
}
}
}
/**
* Enable smart name for a state.
* @param {ioBroker.StateObject} obj
* @param {string} instanceId
* @param {boolean} [noCommon]
*/
static enableSmartName(obj, instanceId, noCommon) {
if (noCommon) {
obj.common.custom = obj.common.custom || {};
obj.common.custom[instanceId] = obj.common.custom[instanceId] || {};
obj.common.custom[instanceId].smartName = {};
} else {
obj.common.smartName = {};
}
}
/**
* Completely remove smart name from a state.
* @param {ioBroker.StateObject} obj
* @param {string | number} instanceId
* @param {boolean} [noCommon]
*/
static removeSmartName(obj, instanceId, noCommon) {
if (noCommon) {
if (obj.common && obj.common.custom && obj.common.custom[instanceId]) {
obj.common.custom[instanceId] = null;
}
} else {
obj.common.smartName = null;
}
}
/**
* Update the smartname of a state.
* @param {ioBroker.StateObject} obj
* @param {string} newSmartName
* @param {string | undefined} byON
* @param {string | undefined} smartType
* @param {string} instanceId
* @param {boolean} [noCommon]
*/
static updateSmartName(obj, newSmartName, byON, smartType, instanceId, noCommon) {
const language = I18n.getLanguage();
// convert Old format
if (typeof obj.common.smartName === 'string') {
const nnn = obj.common.smartName;
obj.common.smartName = {};
obj.common.smartName[language] = nnn;
}
// convert old settings
if (obj.native && obj.native.byON) {
delete obj.native.byON;
let _smartName = obj.common.smartName;
if (!_smartName || typeof _smartName !== 'object') {
_smartName = {en: _smartName};
_smartName[language] = _smartName.en;
}
obj.common.smartName = _smartName;
}
if (smartType !== undefined) {
if (noCommon) {
obj.common.custom = obj.common.custom || {};
obj.common.custom[instanceId] = obj.common.custom[instanceId] || {};
obj.common.custom[instanceId].smartName = obj.common.custom[instanceId].smartName || {};
if (!smartType) {
delete obj.common.custom[instanceId].smartName.smartType;
} else {
obj.common.custom[instanceId].smartName.smartType = smartType;
}
} else {
obj.common.smartName = obj.common.smartName || {};
if (!smartType) {
delete obj.common.smartName.smartType;
} else {
obj.common.smartName.smartType = smartType;
}
}
}
if (byON !== undefined) {
if (noCommon) {
obj.common.custom = obj.common.custom || {};
obj.common.custom[instanceId] = obj.common.custom[instanceId] || {};
obj.common.custom[instanceId].smartName = obj.common.custom[instanceId].smartName || {};
obj.common.custom[instanceId].smartName.byON = byON;
} else {
obj.common.smartName = obj.common.smartName || {};
obj.common.smartName.byON = byON;
}
}
if (newSmartName !== undefined) {
let smartName;
if (noCommon) {
obj.common.custom = obj.common.custom || {};
obj.common.custom[instanceId] = obj.common.custom[instanceId] || {};
obj.common.custom[instanceId].smartName = obj.common.custom[instanceId].smartName || {};
smartName = obj.common.custom[instanceId].smartName;
} else {
obj.common.smartName = obj.common.smartName || {};
smartName = obj.common.smartName;
}
smartName[language] = newSmartName;
// If smart name deleted
if (smartName && (!smartName[language] ||
(smartName[language] === obj.common.name &&
(!obj.common.role || obj.common.role.indexOf('button') >= 0)))) {
delete smartName[language];
let empty = true;
// Check if structure has any definitions
for (const key in smartName) {
if (smartName.hasOwnProperty(key)) {
empty = false;
break;
}
}
// If empty => delete smartName completely
if (empty) {
if (noCommon) {
if (obj.common.custom[instanceId].smartName.byON === undefined) {
delete obj.common.custom[instanceId];
} else {
delete obj.common.custom[instanceId].en;
delete obj.common.custom[instanceId].de;
delete obj.common.custom[instanceId].ru;
delete obj.common.custom[instanceId].nl;
delete obj.common.custom[instanceId].pl;
delete obj.common.custom[instanceId].it;
delete obj.common.custom[instanceId].fr;
delete obj.common.custom[instanceId].pt;
delete obj.common.custom[instanceId].es;
delete obj.common.custom[instanceId]['zh-cn'];
}
} else {
if (obj.common.smartName.byON !== undefined) {
delete obj.common.smartName.en;
delete obj.common.smartName.de;
delete obj.common.smartName.ru;
delete obj.common.smartName.nl;
delete obj.common.smartName.pl;
delete obj.common.smartName.it;
delete obj.common.smartName.fr;
delete obj.common.smartName.pt;
delete obj.common.smartName.es;
delete obj.common.smartName['zh-cn'];
} else {
obj.common.smartName = null;
}
}
}
}
}
}
/**
* Disable the smart name of a state.
* @param {ioBroker.StateObject} obj
* @param {string} instanceId
* @param {boolean} [noCommon]
*/
static disableSmartName(obj, instanceId, noCommon) {
if (noCommon) {
obj.common.custom = obj.common.custom || {};
obj.common.custom[instanceId] = obj.common.custom[instanceId] || {};
obj.common.custom[instanceId].smartName = false;
} else {
obj.common.smartName = false;
}
}
/**
* Copy text to the clipboard.
* @param {string} text
* @param {Event} [e]
*/
static copyToClipboard(text, e) {
const el = window.document.createElement('textarea');
el.value = text;
window.document.body.appendChild(el);
el.select();
window.document.execCommand('copy');
window.document.body.removeChild(el);
console.log(text);
e && e.stopPropagation();
e && e.preventDefault();
}
/**
* Gets the extension of a file name.
* @param {string | null} [fileName] the file name.
* @returns {string | null} The extension in lower case.
*/
static getFileExtension(fileName) {
const pos = (fileName || '').lastIndexOf('.');
if (pos !== -1) {
return fileName.substring(pos + 1).toLowerCase();
} else {
return null;
}
}
/**
* Format number of bytes as a string with B, KB, MB or GB.
* The base for all calculations is 1024.
* @param {number} bytes The number of bytes.
* @returns {string} The formatted string (e.g. '723.5 KB')
*/
static formatBytes(bytes) {
if (Math.abs(bytes) < 1024) {
return bytes + ' B';
}
const units = ['KB','MB','GB'];
//const units = ['KiB','MiB','GiB','TiB','PiB','EiB','ZiB','YiB'];
let u = -1;
do {
bytes /= 1024;
++u;
} while (Math.abs(bytes) >= 1024 && u < units.length - 1);
return bytes.toFixed(1) + ' ' + units[u];
}
// Big thanks to : https://stackoverflow.com/questions/35969656/how-can-i-generate-the-opposite-color-according-to-current-color
/**
* Invert the given color
* @param {string} hex Color in the format '#rrggbb' or '#rgb' (or without hash)
* @param {boolean} [bw] Set to black or white.
* @returns {string}
*/
static invertColor(hex, bw) {
if (hex.indexOf('#') === 0) {
hex = hex.slice(1);
}
// convert 3-digit hex to 6-digits.
if (hex.length === 3) {
hex = hex[0] + hex[0] + hex[1] + hex[1] + hex[2] + hex[2];
}
if (hex.length !== 6) {
throw new Error('Invalid HEX color.');
}
let r = parseInt(hex.slice(0, 2), 16);
let g = parseInt(hex.slice(2, 4), 16);
let b = parseInt(hex.slice(4, 6), 16);
if (bw) {
// http://stackoverflow.com/a/3943023/112731
return (r * 0.299 + g * 0.587 + b * 0.114) > 186
? '#000000'
: '#FFFFFF';
}
// invert color components
r = (255 - r).toString(16);
g = (255 - g).toString(16);
b = (255 - b).toString(16);
// pad each with zeros and return
return '#' + r.padStart(2, '0') + g.padStart(2, '0') + b.padStart(2, '0');
}
// https://github.com/lukeed/clsx/blob/master/src/index.js
// License
// MIT © <NAME>
/**
* @private
* @param {any} mix
* @returns {string}
*/
static _toVal(mix) {
let k, y, str='';
if (typeof mix === 'string' || typeof mix === 'number') {
str += mix;
} else if (typeof mix === 'object') {
if (Array.isArray(mix)) {
for (k=0; k < mix.length; k++) {
if (mix[k]) {
if ((y = Utils._toVal(mix[k]))) {
str && (str += ' ');
str += y;
}
}
}
} else {
for (k in mix) {
if (mix[k]) {
str && (str += ' ');
str += k;
}
}
}
}
return str;
}
// https://github.com/lukeed/clsx/blob/master/src/index.js
// License
// MIT © <NAME>
/**
* Convert any object to a string with its values.
* @returns {string}
*/
static clsx () {
let i = 0;
let tmp;
let x;
let str = '';
while (i < arguments.length) {
if ((tmp = arguments[i++])) {
if ((x = Utils._toVal(tmp))) {
str && (str += ' ');
str += x
}
}
}
return str;
}
/**
* Get the current theme name (either from local storage or the browser settings).
* @param {string} [themeName]
* @returns {string}
*/
static getThemeName(themeName = '') {
return themeName ? themeName : window.localStorage && window.localStorage.getItem('App.themeName') ?
window.localStorage.getItem('App.themeName') : window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'colored';
}
/**
* Get the type of theme.
* @param {string} [themeName]
* @returns {'dark' | 'light'}
*/
static getThemeType(themeName = '') {
themeName = themeName || (window.localStorage && window.localStorage.getItem('App.themeName'));
return themeName === 'dark' || themeName === 'blue' ? 'dark' : 'light';
}
/**
* Set the theme name and theme type.
* @param {string} themeName
*/
static setThemeName(themeName) {
window.localStorage.setItem('App.themeName', themeName);
window.localStorage.setItem('App.theme', themeName === 'dark' || themeName === 'blue' ? 'dark' : 'light');
}
/**
* Toggle the theme name between 'dark' and 'colored'.
* @param {string | null} themeName
* @returns {string} the new theme name.
*/
static toggleTheme(themeName) {
themeName = themeName || (window.localStorage && window.localStorage.getItem('App.themeName'));
// dark => blue => colored => light => dark
const newThemeName = themeName === 'dark' ? 'blue' :
(themeName === 'blue' ? 'colored' :
(themeName === 'colored' ? 'light' : 'dark'));
Utils.setThemeName(newThemeName);
return newThemeName;
}
/**
* Parse a query string into its parts.
* @param {string} query
* @returns {Record<string, string | boolean | number>}
*/
static parseQuery(query) {
query = (query || '').toString().replace(/^\?/, '');
/** @type {Record<string, string | boolean | number>} */
const result = {};
query.split('&').forEach(part => {
part = part.trim();
if (part) {
const parts = part.split('=');
const attr = decodeURIComponent(parts[0]).trim();
if (parts.length > 1) {
result[attr] = decodeURIComponent(parts[1]);
if (result[attr] === 'true') {
result[attr] = true;
} else if (result[attr] === 'false') {
result[attr] = false;
} else {
const f = parseFloat(result[attr]);
if (f.toString() === result[attr]) {
result[attr] = f;
}
}
} else {
result[attr] = true;
}
}
});
return result;
}
/**
* Returns parent ID.
* @param {string} id
* @returns {string | null} parent ID or null if no parent
*/
static getParentId(id) {
const p = (id || '').toString().split('.');
if (p.length > 1) {
p.pop();
return p.join('.');
} else {
return null;
}
}
static formatDate(dateObj, dateFormat) {
// format could be DD.MM.YYYY, YYYY.MM.DD or MM/DD/YYYY
if (!dateObj) {
return '';
}
let text;
let mm = dateObj.getMonth() + 1;
if (mm < 10) {
mm = '0' + mm;
}
let dd = dateObj.getDate();
if (dd < 10) {
dd = '0' + dd;
}
if (dateFormat === 'MM/DD/YYYY') {
text = mm + '/' + dd + '/' + dateObj.getFullYear();
} else {
text = dateObj.getFullYear() + '-' + mm + '-' + dd;
}
// time
let v = dateObj.getHours();
if (v < 10) {
text += ' 0' + v;
} else {
text += ' ' + v;
}
v = dateObj.getMinutes();
if (v < 10) {
text += ':0' + v;
} else {
text += ':' + v;
}
v = dateObj.getSeconds();
if (v < 10) {
text += ':0' + v;
} else {
text += ':' + v;
}
v = dateObj.getMilliseconds();
if (v < 10) {
text += '.00' + v;
} else if (v < 100) {
text += '.0' + v;
} else {
text += '.' + v;
}
return text;
}
static MDtext2link(text) {
const m = text.match(/\d+\.\)\s/);
if (m) {
text = text.replace(m[0], m[0].replace(/\s/, ' '));
}
return text.replace(/[^a-zA-Zа-яА-Я0-9]/g, '').trim().replace(/\s/g, '').toLowerCase();
}
static openLink(url, target) {
if (target === 'this') {
window.location = url;
} else {
window.open(url, target || '_blank');
}
}
static MDgetTitle(text) {
let {body, header} = Utils.extractHeader(text);
if (!header.title) {
// remove {docsify-bla}
body = body.replace(/{[^}]*}/g, '');
body = body.trim();
const lines = body.replace(/\r/g, '').split('\n');
for (let i = 0; i < lines.length; i++) {
if (lines[i].startsWith('# ')) {
return lines[i].substring(2).trim();
}
}
return '';
} else {
return header.title;
}
}
static MDextractHeader(text) {
const attrs = {};
if (text.substring(0, 3) === '---') {
const pos = text.substring(3).indexOf('\n---');
if (pos !== -1) {
const _header = text.substring(3, pos + 3);
const lines = _header.replace(/\r/g, '').split('\n');
lines.forEach(line => {
if (!line.trim()) {
return;
}
const pos = line.indexOf(':');
if (pos !== -1) {
const attr = line.substring(0, pos).trim();
attrs[attr] = line.substring(pos + 1).trim();
attrs[attr] = attrs[attr].replace(/^['"]|['"]$/g, '');
if (attrs[attr] === 'true') {
attrs[attr] = true;
} else if (attrs[attr] === 'false') {
attrs[attr] = false;
} else if (parseFloat(attrs[attr]).toString() === attrs[attr]) {
attrs[attr] = parseFloat(attrs[attr]);
}
} else {
attrs[line.trim()] = true;
}
});
text = text.substring(pos + 7);
}
}
return {header: attrs, body: text};
}
static MDremoveDocsify(text) {
const m = text.match(/{docsify-[^}]*}/g);
if (m) {
m.forEach(doc => text = text.replace(doc, ''));
}
return text;
}
/**
* Generate the json file on the file for download.
* @param {string} filename file name
* @returns {object} json structure (not stringified)
*/
static generateFile(filename, json) {
let el = document.createElement('a');
el.setAttribute('href', 'data:application/json;charset=utf-8,' + encodeURIComponent(JSON.stringify(json, null, 2)));
el.setAttribute('download', filename);
el.style.display = 'none';
document.body.appendChild(el);
el.click();
document.body.removeChild(el);
}
}
export default Utils;
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.IllegalArgumentError = void 0;
const client_error_js_1 = require("./client.error.js");
class IllegalArgumentError extends client_error_js_1.ClientError {
constructor(message, paramName) {
super(`Illegal argument${paramName ? ` (caused by ${paramName})` : ''}${message ? `: ${message}` : ''}`);
}
}
exports.IllegalArgumentError = IllegalArgumentError;
//# sourceMappingURL=illegalArgument.error.js.map
|
// pages/destination/destination.js
Page({
data:{
hotcity:["热门","周边","香港","澳门","海南","云南"],
nearbyCity:["昆明","红河","西双版纳","大理","文山","楚雄","丽江","香港"],
active:5,
hotView:[{
title:"大理三塔",
imgUrl:"/images/destination/view1.png"
},{
title:"丽江古城",
imgUrl:"/images/destination/view2.png"
},{
title:"昆明石林",
imgUrl:"/images/destination/view3.png"
},{
title:"丘北普者黑",
imgUrl:"/images/destination/view4.png"
}]
},
activeClick (e){
let index=e.currentTarget.dataset.index;
this.setData({
active:index
})
},
// 进入景点详情
enterDetail(e) {
let sid=e.currentTarget.dataset.id;
wx.navigateTo({
url: '../index/view-detail/view-detail?sid='+sid+''
})
},
enterCity(e){
let cityname=e.currentTarget.dataset.cityname;
wx.navigateTo({
url: 'cityView/cityView?cityname='+cityname+''
})
}
})
|
import torch
from torchvision.datasets import FashionMNIST
from torchvision import transforms
FASHION_MNIST_CLASSES = [
"T-shirt/top", "Trouser", "Pullover", "Dress", "Coat",
"Sandal", "Shirt", "Sneaker", "Bag", "Ankle boot"
]
def output_label(label):
output_mapping = {i: value for i, value in enumerate(FASHION_MNIST_CLASSES)}
_value = label.item() if type(label) == torch.Tensor else label
return output_mapping[_value]
def get_fashion_mnist_dataset(is_train_dataset: bool = True) -> FashionMNIST:
transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,))])
return FashionMNIST(root='./data', train=is_train_dataset, download=True, transform=transform)
|
#!/usr/bin/env bash
set -e
echo "Enter release version: "
read VERSION
read -p "Releasing $VERSION - are you sure? (y/n)" -n 1 -r
echo # (optional) move to a new line
if [[ $REPLY =~ ^[Yy]$ ]]
then
echo "Releasing $VERSION ..."
# npm test
VERSION=$VERSION npm run build
# commit
git add -A
git commit -m "[build] $VERSION"
npm version $VERSION --message "[release] $VERSION"
# publish
git push origin refs/tags/v$VERSION
git push
npm publish --access public
fi
|
import { formatFlux } from '../util/format';
export interface IAccount {
id: string,
username: string,
idToken: string,
//TODO source of bug? on check auth need to check expiresIn also?
//Actually get a 401 so better with a account action that triggers a redirect to login and resets all state
expiresIn: string,
// TODO: good way to do it? (function with expiresCheck, not sure)
isAuthenticated: boolean
}
export async function postLogin({username, password}) {
// Login - holds idToken and expiresIn
const response = await fetch(process.env.REACT_APP_API_URL + '/accounts/login', {
method: 'POST',
headers: {'Content-Type': 'application/json'},
body: JSON.stringify({username, password})
});
return await response.json();
}
export function getAuthHeader({idToken}) {
return {'Authorization': `Bearer ${idToken}`};
}
export function isAuth(account:IAccount) {
// TODO: add check for isExpired? maybe other stuff?
return Boolean(account.isAuthenticated);
}
// Note: could group into an object also
export const LOGIN_REQUEST = 'LOGIN_REQUEST';
export const LOGIN_SUCCESS = 'LOGIN_SUCCESS';
export const LOGIN_ERROR = 'LOGIN_ERROR';
export function loginAction({username, password}) {
// Note: assuming arrow func is to closure this scope for, hmm something
return async (dispatch) => {
if (!username || !password) {
dispatch(formatFlux({
type: LOGIN_ERROR,
payload: new Error('Login missing username or password')
}));
}
dispatch({
type: LOGIN_REQUEST,
payload: {}
});
try {
const result = await postLogin({
username: username.trim(),
password: password.<PASSWORD>()
});
if (result.error) {
throw new Error(result.payload.message);
}
dispatch(formatFlux({
type: LOGIN_SUCCESS,
payload: result.payload
}));
} catch(err) {
dispatch(formatFlux({
type: LOGIN_ERROR,
payload: err
}));
}
};
}
const INITIAL_STATE:IAccount = {
id: '',
username: '',
idToken: '',
expiresIn: '',
isAuthenticated: false
};
export function accountReducer(state=INITIAL_STATE, action) {
switch(action.type) {
case LOGIN_REQUEST:
return {
...state,
...action.payload,
isLoading: true
};
case LOGIN_SUCCESS:
return {
...state,
...action.payload,
isAuthenticated: Boolean(action.payload.idToken),
isLoading: false
};
case LOGIN_ERROR:
return {
...state,
error: action.payload.message,
isAuthenticated: false,
isLoading: false,
};
default:
return state;
}
}
// TODO
//
/**
* Account Service uses Session Storage. Mainly just used for Login APIs to do
* an initial check for any sessions - then Redux is used from then on.
*
* So sessionStorage:
* - data in sessionStorage is cleared when the page session ends
* - page session continues: page reload
* - page session ends: open a new tab (same URL - each gets sessionStore)
* - page session ends: closing tab
*
* BIG BONUS?
* - No cleanup, all session data cleared on tab/browser close.
*/
export function getAccountSession() {
return {
id: window.sessionStorage.getItem('id') || '',
username: window.sessionStorage.getItem('username') || '',
idToken: window.sessionStorage.getItem('idToken') || '',
expiresIn: window.sessionStorage.getItem('expiresIn') || '',
isAuthenticated: window.sessionStorage.getItem('idToken') ? true : false
};
}
export function setAccountSession({id,username,idToken, expiresIn}) {
window.sessionStorage.setItem('id', id);
window.sessionStorage.setItem('username', username);
window.sessionStorage.setItem('idToken', idToken);
window.sessionStorage.setItem('expiresIn', expiresIn);
}
|
import { css } from '@emotion/react';
import { useTheme } from '@mui/material';
export const useStyles = () => {
const theme = useTheme();
const getLabel = ({ hasError }: { hasError: boolean }) => css`
display: block;
margin-bottom: 4px;
${hasError && `color: ${theme.palette.error.main};`};
`;
const getInputContainer = ({ hasError }: { hasError: boolean }) => css`
display: flex;
align-items: center;
padding: ${theme.spacing(1, 1, 1, 2)};
border-radius: 12px;
border: 2px solid transparent;
background-color: ${theme.palette.background.default};
&:focus-within {
border-color: ${hasError
? theme.palette.interactive.error
: theme.palette.interactive.primary};
}
`;
const leftIcon = css`
margin-right: ${theme.spacing(1)};
`;
const getInput = ({ hasRightAdornment }: { hasRightAdornment: boolean }) => css`
background-color: transparent;
flex: 1;
font-weight: 600;
line-height: ${theme.spacing(3)};
height: ${theme.spacing(5)};
padding-top: 4px; /* Vertically align input content */
border: 0;
${hasRightAdornment && `margin-right: ${theme.spacing(1)}`};
&:focus {
outline: 0;
}
`;
const rightButton = css`
margin-right: ${theme.spacing(1)};
`;
const description = css`
display: block;
color: ${theme.palette.text.secondary};
margin-top: 4px;
`;
return { getLabel, getInputContainer, leftIcon, getInput, rightButton, description, theme };
};
|
#!/bin/bash
set -e
#set -o xtrace
mkdir "$HOME/static-libs"
cp "$HOME/ltsmin-deps/lib/libzmq.a" "$HOME/static-libs"
cp "$HOME/ltsmin-deps/lib/libczmq.a" "$HOME/static-libs"
cp /usr/local/lib/libgmp.a "$HOME/static-libs"
cp /usr/local/lib/libpopt.a "$HOME/static-libs"
libxml2_version=$(brew list --versions libxml2 | cut -d' ' -f2)
cp "/usr/local/Cellar/libxml2/$libxml2_version/lib/libxml2.a" \
"$HOME/static-libs"
export LTSMIN_LDFLAGS="-Wl,-search_paths_first"
# libiconv is necessary for libpopt
export LTSMIN_LDFLAGS="$LTSMIN_LDFLAGS -L$HOME/static-libs -weak-liconv"
export LTSMIN_CFLAGS=""
export LTSMIN_CXXFLAGS=""
export STRIP_FLAGS=""
export MCRL2_LIB_DIR="/mCRL2.app/Contents"
. travis/build-release-generic.sh
set +e
|
#! /bin/bash
# This script expects to be executed with the current working directory:
#
# kgtk/datasets/time-machine-20101201
source common.sh
# ==============================================================================
echo -e "\nCount the properties in ${DATADIR}/${WIKIDATA_ALL_EDGES}-sorted.tsv."
kgtk ${KGTK_FLAGS} \
unique ${VERBOSE} \
--input-file ${DATADIR}/${WIKIDATA_ALL}-properties.tsv \
--output-file ${DATADIR}${WIKIDATA_ALL}-property-counts.tsv \
--column label \
--label total-count \
|& tee ${LOGDIR}/${WIKIDATA_ALL}-property-counts.log
# ==============================================================================
echo -e "\nLift the property labels:"
#
# CMR: This step takes 10 minutes on my home workstation because
# ${DATADIR}/${WIKIDATA_ALL}-labels-en-only-sorted.tsv is fairly large (6.6G
# as of 05-Oct-2020).
kgtk ${KGTK_FLAGS} \
lift ${VERBOSE} \
--input-file ${DATADIR}${WIKIDATA_ALL}-property-counts.tsv \
--label-file ${DATADIR}/${WIKIDATA_ALL}-labels-en-only-sorted.tsv \
--output-file ${DATADIR}/${WIKIDATA_ALL}-property-counts-with-labels.tsv \
--columns-to-lift node1 \
--prefilter-labels \
|& tee ${LOGDIR}/${WIKIDATA_ALL}-property-counts-with-labels.log
# ==============================================================================
echo -e "\nCompress the data product files."
time gzip --keep --force --verbose \
${DATADIR}/${WIKIDATA_ALL}-property-counts-with-labels.tsv \
|& tee ${LOGDIR}/count-properties-compress.log
# ==============================================================================
echo -e "\nDeliver the compressed data products to the KGTK Google Drive."
time rsync --archive \
${DATADIR}/${WIKIDATA_ALL}-property-counts-with-labels.tsv.gz \
${PRODUCTDIR}/ \
|& tee ${LOGDIR}/count-properties-deliver.log
|
#!/bin/sh
echo
echo "### Cleaning the Draft folder"
echo
echo "* offset: $3 (Number of Nightly revision to keep)"
echo
revisionToDelete=$(slicer_package_manager_client --api-url $1 --api-key $2 draft list Slicer --offset $3 | tail -n +3 | cut -d' ' -f1)
echo "List of resource to delete:"
echo
for rev in $revisionToDelete
do
echo $rev
done
if [[ ! $4 = "-y" ]]
then
read -p "Do you really want to delete all of these revision? [Yy]" -n 1 -r
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]
then
echo
echo "CANCELED"
echo
[[ "$0" = "$BASH_SOURCE" ]] && exit 1 || return 1 # handle exits from shell or function but don't exit interactive shell
fi
echo
fi
echo
for rev in $revisionToDelete
do
slicer_package_manager_client --api-url $1 --api-key $2 draft delete Slicer $rev
done
echo
|
<reponame>huangjianqin/bigdata<filename>kin-jraft/kin-jraft-starter/src/test/java/org/kin/jraft/springboot/counter/server/CounterRaftServiceSpringBootTest.java<gh_stars>1-10
package org.kin.jraft.springboot.counter.server;
import org.kin.jraft.NodeStateChangeListener;
import org.kin.jraft.RaftServiceFactory;
import org.kin.jraft.StateMachineFactory;
import org.kin.jraft.springboot.EnableJRaftServer;
import org.kin.jraft.springboot.counter.processor.GetValueRequestProcessor;
import org.kin.jraft.springboot.counter.processor.IncrementAndGetRequestProcessor;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import java.util.concurrent.TimeUnit;
/**
* @author huangjianqin
* @date 2021/11/8
*/
@EnableJRaftServer
@SpringBootApplication
public class CounterRaftServiceSpringBootTest {
public static void main(String[] args) throws InterruptedException {
ConfigurableApplicationContext context = SpringApplication.run(CounterRaftServiceSpringBootTest.class, args);
Thread.sleep(TimeUnit.MINUTES.toMillis(5));
context.stop();
}
@Bean
public NodeStateChangeListener printListener() {
return new NodeStateChangeListener() {
@Override
public void onBecomeLeader(long term) {
System.out.println("[CounterBootstrap] Leader start on term: " + term);
}
@Override
public void onStepDown(long oldTerm) {
System.out.println("[CounterBootstrap] Leader step down: " + oldTerm);
}
};
}
@Bean
public RaftServiceFactory<CounterRaftService> counterRaftServiceFactory() {
return (b, s) -> {
CounterRaftService counterService = new CounterRaftServiceImpl(b);
s.registerProcessor(new GetValueRequestProcessor(counterService));
s.registerProcessor(new IncrementAndGetRequestProcessor(counterService));
return counterService;
};
}
@Bean
public StateMachineFactory<CounterStateMachine, CounterRaftService> counterStateMachineFactory() {
return (b, s) -> new CounterStateMachine();
}
}
|
#!/bin/bash
set -eo pipefail
SCRIPT_DIR=$(cd "$(dirname "$0")"; pwd)
PROJECT_DIR=$1
shift
"$@" ./src/play/play \
RQEJDEN \
"${SCRIPT_DIR}/tiles.txt" \
"${PROJECT_DIR}/boards/wwf_challenge.txt"
|
<reponame>gcusnieux/jooby
package morphia;
import javax.inject.Inject;
import org.mongodb.morphia.annotations.PreLoad;
public class MyListener {
private Service service;
@Inject
public MyListener(final Service service) {
this.service = service;
}
@PreLoad void preLoad(final Beer object) {
service.doSomething(object);
}
}
|
#!/bin/bash
dr=/home/stream_vid/Dokumente/latex/Script_Diff_Gal
if [ "$1" == "" ]
then
dr="$dr"07/
echo "No path adjunct, checking:"
echo $dr
fl=$dr/script_diff_gal.tks
if [ -e $fl ]
then
echo "opening $fl as texmaker session file"
texmaker $fl &
else
echo "No such file found: $fl"
fi
else
dr="$dr"$1
echo $dr
texmaker $dr/script_diff_gal.tks &
fi
|
<filename>kattis/backspace.cc
// https://open.kattis.com/problems/backspace
#include <iostream>
using namespace std;
int main() {
string s;
cin >> s;
string s2;
for (auto c : s) {
if (c == '<') {
if (!s.empty()) s2.pop_back();
} else s2.push_back(c);
}
cout << s2 << endl;
}
|
<gh_stars>1-10
# encoding: utf-8
require 'logstash/devutils/rspec/spec_helper'
require 'logstash/outputs/adls'
describe 'outputs/adls' do
let(:adls_fqdn) { 'XXXXXXXXXXX.azuredatalakestore.net' }
let(:adls_token_endpoint) { 'https://login.microsoftonline.com/<KEY>' }
let(:adls_client_id) { '00000000-0000-0000-0000-000000000000' }
let(:adls_client_key) { '<KEY>' }
let(:path) { '/test.log' }
let(:config) { { 'adls_fqdn' =>adls_fqdn, 'adls_token_endpoint' => adls_token_endpoint, 'adls_client_id' => adls_client_id, 'adls_client_key' => adls_client_key, 'path' => path } }
subject(:plugin) { LogStash::Plugin.lookup("output", "adls").new(config) }
describe '#initializing' do
it 'should fail to register without %{[@metadata][cid]} in the path' do
plugin = LogStash::Plugin.lookup("output", "adls")
expect { plugin.new }.to raise_error(error=LogStash::ConfigurationError)
end
context "default values" do
it 'should have default line_separator' do
expect(subject.line_separator).to eq("\n")
end
it 'should have default created_files_permission' do
expect(subject.created_files_permission).to eq(755)
end
it 'should have default adls_token_expire_security_margin' do
expect(subject.adls_token_expire_security_margin).to eq(300)
end
it 'should have default single_file_per_thread' do
expect(subject.single_file_per_thread).to eq(true)
end
it 'should have default retry_interval' do
expect(subject.retry_interval).to eq(1)
end
it 'should have default max_retry_interval' do
expect(subject.max_retry_interval).to eq(10)
end
it 'should have default retry_times' do
expect(subject.retry_times).to eq(3)
end
it 'should have default exit_if_retries_exceeded' do
expect(subject.exit_if_retries_exceeded).to eq(false)
end
end
end
end
|
<gh_stars>10-100
import createFakeElement from 'tests/createFakeElement'
import { Anchor, createAnchorWithPoint } from './anchor';
import { DIRECTION } from './consts';
test('createAnchorWithPoint direction top', () => {
const anchor: Anchor = {
node: createFakeElement({
x: 750,
y: 450,
width: 50,
height: 150,
}),
direction: DIRECTION.TOP,
translation: [0.5, 1],
};
const anchorWithPoint = createAnchorWithPoint(anchor);
expect(anchorWithPoint).toMatchObject({ x: 775, y: 450 });
});
test('createAnchorWithPoint node function with direction top', () => {
const anchor: Anchor = {
node: () => createFakeElement({
x: 750,
y: 450,
width: 50,
height: 150,
}),
direction: DIRECTION.TOP,
translation: [0.5, 1],
};
const anchorWithPoint = createAnchorWithPoint(anchor);
expect(anchorWithPoint).toMatchObject({ x: 775, y: 450 });
});
test('createAnchorWithPoint test all directions', () => {
Object.values(DIRECTION).forEach(direction => {
const anchor: Anchor = {
node: () => createFakeElement({
x: 750,
y: 450,
width: 50,
height: 150,
}),
direction,
translation: [0.5, 1],
};
const anchorWithPoint = createAnchorWithPoint(anchor);
switch (direction) {
case DIRECTION.TOP_LEFT:
expect(anchorWithPoint).toMatchObject({ x: 750, y: 450 });
break
case DIRECTION.TOP_RIGHT:
expect(anchorWithPoint).toMatchObject({ x: 800, y: 450 });
break
case DIRECTION.RIGHT:
expect(anchorWithPoint).toMatchObject({ x: 800, y: 525 });
break
case DIRECTION.BOTTOM_LEFT:
expect(anchorWithPoint).toMatchObject({ x: 750, y: 600 });
break
case DIRECTION.BOTTOM:
expect(anchorWithPoint).toMatchObject({ x: 775, y: 600 });
break
case DIRECTION.BOTTOM_RIGHT:
expect(anchorWithPoint).toMatchObject({ x: 800, y: 600 });
break
case DIRECTION.LEFT:
expect(anchorWithPoint).toMatchObject({ x: 750, y: 525 });
break
case DIRECTION.TOP:
expect(anchorWithPoint).toMatchObject({ x: 775, y: 450 });
break
default:
expect(anchorWithPoint).toMatchObject({ x: 0, y: 0 });
break
}
})
});
test('createAnchorWithPoint test with wrong direction', () => {
const anchor: Anchor = {
node: () => createFakeElement({
x: 750,
y: 450,
width: 50,
height: 150,
}),
direction: 'test',
translation: [0.5, 1],
};
const anchorWithPoint = createAnchorWithPoint(anchor);
expect(anchorWithPoint).toMatchObject({ x: 775, y: 525 });
});
test('createAnchorWithPoint null point', () => {
const anchor: Anchor = {
node: () => null,
direction: DIRECTION.TOP,
translation: [0.5, 1],
};
let hadException = false
try {
createAnchorWithPoint(anchor)
} catch (e) {
hadException = true
}
expect(hadException).toBeTruthy()
});
test('createAnchorWithPoint getBoundingClientRect x, y', () => {
const measure = {
x: 750,
y: 450,
width: 50,
height: 150,
}
const node = createFakeElement(measure)
node.getBoundingClientRect = () => <DOMRect>({
...measure,
left: undefined,
top: undefined,
})
const anchor: Anchor = {
node,
direction: DIRECTION.TOP,
translation: [0.5, 1],
};
const anchorWithPoint = createAnchorWithPoint(anchor);
expect(anchorWithPoint).toMatchObject({ x: 775, y: 450 });
});
test('createAnchorWithPoint getBoundingClientRect width, height undefined', () => {
const measure = {
x: 750,
y: 450,
width: 50,
height: 150,
}
const node = createFakeElement(measure)
node.getBoundingClientRect = () => <DOMRect>({
...measure,
x: undefined,
y: undefined,
left: measure.x,
top: measure.y,
})
const anchor: Anchor = {
node,
direction: DIRECTION.TOP,
translation: [0.5, 1],
};
const anchorWithPoint = createAnchorWithPoint(anchor);
expect(anchorWithPoint).toMatchObject({ x: 775, y: 450 });
});
test('createAnchorWithPoint window undefined', () => {
const windowSpy = jest.spyOn(window, "window", "get");
windowSpy.mockImplementation(() => undefined);
const anchor: Anchor = {
node: createFakeElement({
x: 750,
y: 450,
width: 50,
height: 150,
}),
direction: DIRECTION.TOP,
translation: [0.5, 1],
};
const anchorWithPoint = createAnchorWithPoint(anchor);
expect(anchorWithPoint).toMatchObject({ x: 775, y: 450 });
windowSpy.mockRestore();
})
|
package tree.symbols;
import tree.DefaultTreeNodeSymbol;
public class TSBraceRight extends DefaultTreeNodeSymbol {
public static int id = BRACE_RIGHT;
public static String text = "}";
public TSBraceRight() {
super(text, id);
}
}
|
<gh_stars>1-10
import { Component, OnInit } from '@angular/core';
import { FormBuilder,FormGroup,Validators } from '@angular/forms';
import Swal from 'sweetalert2';
import { flyInOut , expand} from '../../Utilities/animations/animation';
import { SharingService } from 'src/app/services/sharing.service';
@Component({
selector: 'app-forget-pas',
templateUrl: './forget-pas.component.html',
styleUrls: ['./forget-pas.component.scss'],
animations: [
flyInOut(),
expand()
]
})
export class ForgetPasComponent implements OnInit {
HForgotForm !: FormGroup;
light ! : string;
constructor(private fb: FormBuilder, private sharingService:SharingService) { }
ngOnInit(): void {
this.HForgotForm = this.fb.group({
email: ['',[
Validators.required,
Validators.email,
Validators.maxLength(40)
]],
agree:[false,[
Validators.requiredTrue
]]});
this.light = this.sharingService.getData();
}
get email(){
return this.HForgotForm.get('email');
}
get agree(){
return this.HForgotForm.get('agree');
}
submit(){
console.log(this.HForgotForm.value);
Swal.fire({
icon: 'success',
title: 'Email Sent',
text: 'Please check your inbox',
});
this.HForgotForm.reset({
email: '',
password: '',
});
}
}
|
<gh_stars>0
from clip import Clip
class User:
def __init__(self, *args):
if isinstance(args[0], str):
self.login = args[0]
self.clips_recent = []
self.clips_trending = []
self.clips_ignored = []
self.mu = 0
self.std = 0
elif isinstance(args[0], dict):
dictionary = args[0]
for key in dictionary:
setattr(self, key, dictionary[key])
def load_clips(self, data):
self.clips_recent.clear()
# everytime load_clips() is called
# clips gets cleared and loaded with fresh data
for group in data:
for clip in group['data']:
self.clips_recent.append(Clip(clip).__dict__)
def __str__(self):
result = f'login: {self.login} \nclips: \n'
for clip in self.clips_recent:
result += f'{clip}\n'
return result
|
package sportsstore.dto;
public class ImportedProductDTO {
private ProductDTO product;
private int quantity;
public ProductDTO getProduct() {
return product;
}
public void setProduct(ProductDTO product) {
this.product = product;
}
public int getQuantity() {
return quantity;
}
public void setQuantity(int quantity) {
this.quantity = quantity;
}
}
|
<filename>tournament/origins.js
module.exports = function(deck) {
allowedCards = ("4sa 4sj 4sk 4sl 4sm 4sn 4so 4sp 4sq 4sr 4ss 4st 4su 4t3 4t4 4t5 4vc 4vd 4ve 4vf 4vg 4vh 4vi 4vj 4vk 4vl 4vm 52g 52h 52i 52j 52k 52l 52m 52n 52o 52p 52q 52r 55k 55l 55m 55n 55o 55p 55q 55r 55s 55t 55u 58o 58p 58q 58r 58s 58t 58u 58v 590 591 " +
"592 593 5bs 5bt 5bu 5bv 5c0 5c1 5c2 5c4 5c5 5c6 5f0 5f1 5f2 5f3 5f4 5f5 5f6 5f7 5f8 5f9 5fa 5i4 5i5 5i6 5i7 5i8 5i9 5ia 5ib 5ic 5id 5ie 5if 5l8 5l9 5la 5lb 5lc 5ld 5le 5lf 5lg 5lh 5li 5oc 5od 5oe 5of 5og 5oh 5oi 5oj 5ok 5ol 5rg 5rh 5ri 5rj " +
"5rk 5rl 5rm 5rn 5ro 5uk 5ul 5um 5un 5uo 5up 5uq 5ur 5us 5ut 61o 61p 61q 61r 61s 61t 61u 61v 620").split(" ");
for (var i = 0;i < deck.length;i++) {
var card = deck[i];
if (card.upped) return "Upgraded cards are banned";
if (!~allowedCards.indexOf(card.code)) return card.name + " is not allowed";
}
return "Legal"
}
|
/*
recast4j copyright (c) 2021 <NAME> <EMAIL>
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
*/
package org.recast4j.demo.tool;
import static org.lwjgl.nuklear.Nuklear.*;
import static org.lwjgl.system.MemoryStack.stackPush;
import static org.recast4j.demo.draw.DebugDraw.duRGBA;
import static org.recast4j.demo.draw.DebugDrawPrimitives.LINES;
import static org.recast4j.demo.math.DemoMath.vCross;
import static org.recast4j.detour.DetourCommon.vNormalize;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Random;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.lwjgl.BufferUtils;
import org.lwjgl.PointerBuffer;
import org.lwjgl.nuklear.NkContext;
import org.lwjgl.system.MemoryStack;
import org.lwjgl.util.tinyfd.TinyFileDialogs;
import org.recast4j.demo.RecastBuilderThreadFactory;
import org.recast4j.demo.builder.SampleAreaModifications;
import org.recast4j.demo.draw.GLU;
import org.recast4j.demo.draw.NavMeshRenderer;
import org.recast4j.demo.draw.RecastDebugDraw;
import org.recast4j.demo.geom.DemoInputGeomProvider;
import org.recast4j.demo.io.ObjImporter;
import org.recast4j.demo.sample.Sample;
import org.recast4j.demo.ui.NuklearUIHelper;
import org.recast4j.detour.Tupple2;
import org.recast4j.dynamic.DynamicNavMesh;
import org.recast4j.dynamic.collider.BoxCollider;
import org.recast4j.dynamic.collider.CapsuleCollider;
import org.recast4j.dynamic.collider.Collider;
import org.recast4j.dynamic.collider.CompositeCollider;
import org.recast4j.dynamic.collider.ConvexTrimeshCollider;
import org.recast4j.dynamic.collider.CylinderCollider;
import org.recast4j.dynamic.collider.SphereCollider;
import org.recast4j.dynamic.collider.TrimeshCollider;
import org.recast4j.dynamic.io.VoxelFile;
import org.recast4j.dynamic.io.VoxelFileReader;
import org.recast4j.dynamic.io.VoxelFileWriter;
import org.recast4j.recast.RecastConstants.PartitionType;
public class DynamicUpdateTool implements Tool {
private enum ToolMode {
BUILD, COLLIDERS, RAYCAST
}
private enum ColliderShape {
SPHERE, CAPSULE, BOX, CYLINDER, COMPOSITE, CONVEX, TRIMESH_BRIDGE, TRIMESH_HOUSE
}
private Sample sample;
private ToolMode mode = ToolMode.BUILD;
private final FloatBuffer cellSize = BufferUtils.createFloatBuffer(1).put(0, 0.3f);
private PartitionType partitioning = PartitionType.WATERSHED;
private boolean filterLowHangingObstacles = true;
private boolean filterLedgeSpans = true;
private boolean filterWalkableLowHeightSpans = true;
private final FloatBuffer walkableHeight = BufferUtils.createFloatBuffer(1).put(0, 2f);
private final FloatBuffer walkableRadius = BufferUtils.createFloatBuffer(1).put(0, 0.6f);
private final FloatBuffer walkableClimb = BufferUtils.createFloatBuffer(1).put(0, 0.9f);
private final FloatBuffer walkableSlopeAngle = BufferUtils.createFloatBuffer(1).put(0, 45f);
private final FloatBuffer minRegionArea = BufferUtils.createFloatBuffer(1).put(0, 6);
private final FloatBuffer regionMergeSize = BufferUtils.createFloatBuffer(1).put(0, 36);
private final FloatBuffer maxEdgeLen = BufferUtils.createFloatBuffer(1).put(0, 12f);
private final FloatBuffer maxSimplificationError = BufferUtils.createFloatBuffer(1).put(0, 1.3f);
private final IntBuffer vertsPerPoly = BufferUtils.createIntBuffer(1).put(0, 6);
private boolean buildDetailMesh = true;
private boolean compression = true;
private final FloatBuffer detailSampleDist = BufferUtils.createFloatBuffer(1).put(0, 6f);
private final FloatBuffer detailSampleMaxError = BufferUtils.createFloatBuffer(1).put(0, 1f);
private boolean showColliders = false;
private long buildTime;
private long raycastTime;
private ColliderShape colliderShape = ColliderShape.SPHERE;
private DynamicNavMesh dynaMesh;
private final ExecutorService executor;
private final Map<Long, Collider> colliders = new HashMap<>();
private final Map<Long, ColliderGizmo> colliderGizmos = new HashMap<>();
private final Random random = new Random();
private final DemoInputGeomProvider bridgeGeom;
private final DemoInputGeomProvider houseGeom;
private final DemoInputGeomProvider convexGeom;
private boolean sposSet;
private boolean eposSet;
private float[] spos;
private float[] epos;
private boolean raycastHit;
private float[] raycastHitPos;
public DynamicUpdateTool() {
executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() / 2, new RecastBuilderThreadFactory());
bridgeGeom = new ObjImporter().load(getClass().getClassLoader().getResourceAsStream("bridge.obj"));
houseGeom = new ObjImporter().load(getClass().getClassLoader().getResourceAsStream("house.obj"));
convexGeom = new ObjImporter().load(getClass().getClassLoader().getResourceAsStream("convex.obj"));
}
@Override
public void setSample(Sample sample) {
this.sample = sample;
}
@Override
public void handleClick(float[] s, float[] p, boolean shift) {
if (mode == ToolMode.COLLIDERS) {
if (!shift) {
Tupple2<Collider, ColliderGizmo> colliderWithGizmo = null;
if (dynaMesh != null) {
if (colliderShape == ColliderShape.SPHERE) {
colliderWithGizmo = sphereCollider(p);
} else if (colliderShape == ColliderShape.CAPSULE) {
colliderWithGizmo = capsuleCollider(p);
} else if (colliderShape == ColliderShape.BOX) {
colliderWithGizmo = boxCollider(p);
} else if (colliderShape == ColliderShape.CYLINDER) {
colliderWithGizmo = cylinderCollider(p);
} else if (colliderShape == ColliderShape.COMPOSITE) {
colliderWithGizmo = compositeCollider(p);
} else if (colliderShape == ColliderShape.TRIMESH_BRIDGE) {
colliderWithGizmo = trimeshBridge(p);
} else if (colliderShape == ColliderShape.TRIMESH_HOUSE) {
colliderWithGizmo = trimeshHouse(p);
} else if (colliderShape == ColliderShape.CONVEX) {
colliderWithGizmo = convexTrimesh(p);
}
}
if (colliderWithGizmo != null) {
long id = dynaMesh.addCollider(colliderWithGizmo.first);
colliders.put(id, colliderWithGizmo.first);
colliderGizmos.put(id, colliderWithGizmo.second);
}
}
}
if (mode == ToolMode.RAYCAST) {
if (shift) {
sposSet = true;
spos = Arrays.copyOf(p, p.length);
} else {
eposSet = true;
epos = Arrays.copyOf(p, p.length);
}
if (sposSet && eposSet && dynaMesh != null) {
float[] sp = { spos[0], spos[1] + 1.3f, spos[2] };
float[] ep = { epos[0], epos[1] + 1.3f, epos[2] };
long t1 = System.nanoTime();
Optional<Float> hitPos = dynaMesh.voxelQuery().raycast(sp, ep);
long t2 = System.nanoTime();
raycastTime = (t2 - t1) / 1_000_000L;
raycastHit = hitPos.isPresent();
raycastHitPos = hitPos.map(t -> new float[] { sp[0] + t * (ep[0] - sp[0]), sp[1] + t * (ep[1] - sp[1]),
sp[2] + t * (ep[2] - sp[2]) }).orElse(ep);
}
}
}
private Tupple2<Collider, ColliderGizmo> sphereCollider(float[] p) {
float radius = 1 + random.nextFloat() * 10;
return new Tupple2<>(
new SphereCollider(p, radius, SampleAreaModifications.SAMPLE_POLYAREA_TYPE_WATER, dynaMesh.config.walkableClimb),
ColliderGizmo.sphere(p, radius));
}
private Tupple2<Collider, ColliderGizmo> capsuleCollider(float[] p) {
float radius = 0.4f + random.nextFloat() * 4f;
float[] a = new float[] { (1f - 2 * random.nextFloat()), 0.01f + random.nextFloat(), (1f - 2 * random.nextFloat()) };
vNormalize(a);
float len = 1f + random.nextFloat() * 20f;
a[0] *= len;
a[1] *= len;
a[2] *= len;
float[] start = new float[] { p[0], p[1], p[2] };
float[] end = new float[] { p[0] + a[0], p[1] + a[1], p[2] + a[2] };
return new Tupple2<>(new CapsuleCollider(start, end, radius, SampleAreaModifications.SAMPLE_POLYAREA_TYPE_WATER,
dynaMesh.config.walkableClimb), ColliderGizmo.capsule(start, end, radius));
}
private Tupple2<Collider, ColliderGizmo> boxCollider(float[] p) {
float[] extent = new float[] { 0.5f + random.nextFloat() * 6f, 0.5f + random.nextFloat() * 6f,
0.5f + random.nextFloat() * 6f };
float[] forward = new float[] { (1f - 2 * random.nextFloat()), 0, (1f - 2 * random.nextFloat()) };
float[] up = new float[] { (1f - 2 * random.nextFloat()), 0.01f + random.nextFloat(), (1f - 2 * random.nextFloat()) };
float[][] halfEdges = BoxCollider.getHalfEdges(up, forward, extent);
return new Tupple2<>(
new BoxCollider(p, halfEdges, SampleAreaModifications.SAMPLE_POLYAREA_TYPE_WATER, dynaMesh.config.walkableClimb),
ColliderGizmo.box(p, halfEdges));
}
private Tupple2<Collider, ColliderGizmo> cylinderCollider(float[] p) {
float radius = 0.7f + random.nextFloat() * 4f;
float[] a = new float[] { (1f - 2 * random.nextFloat()), 0.01f + random.nextFloat(), (1f - 2 * random.nextFloat()) };
vNormalize(a);
float len = 2f + random.nextFloat() * 20f;
a[0] *= len;
a[1] *= len;
a[2] *= len;
float[] start = new float[] { p[0], p[1], p[2] };
float[] end = new float[] { p[0] + a[0], p[1] + a[1], p[2] + a[2] };
return new Tupple2<>(new CylinderCollider(start, end, radius, SampleAreaModifications.SAMPLE_POLYAREA_TYPE_WATER,
dynaMesh.config.walkableClimb), ColliderGizmo.cylinder(start, end, radius));
}
private Tupple2<Collider, ColliderGizmo> compositeCollider(float[] p) {
float[] baseExtent = new float[] { 5, 3, 8 };
float[] baseCenter = new float[] { p[0], p[1] + 3, p[2] };
float[] baseUp = new float[] { 0, 1, 0 };
float[] forward = new float[] { (1f - 2 * random.nextFloat()), 0, (1f - 2 * random.nextFloat()) };
vNormalize(forward);
float[] side = vCross(forward, baseUp);
BoxCollider base = new BoxCollider(baseCenter, BoxCollider.getHalfEdges(baseUp, forward, baseExtent),
SampleAreaModifications.SAMPLE_POLYAREA_TYPE_ROAD, dynaMesh.config.walkableClimb);
float[] roofExtent = new float[] { 4.5f, 4.5f, 8f };
float[] rx = GLU.build_4x4_rotation_matrix(45, forward[0], forward[1], forward[2]);
float[] roofUp = mulMatrixVector(new float[3], rx, baseUp);
float[] roofCenter = new float[] { p[0], p[1] + 6, p[2] };
BoxCollider roof = new BoxCollider(roofCenter, BoxCollider.getHalfEdges(roofUp, forward, roofExtent),
SampleAreaModifications.SAMPLE_POLYAREA_TYPE_ROAD, dynaMesh.config.walkableClimb);
float[] trunkStart = new float[] { baseCenter[0] - forward[0] * 15 + side[0] * 6, p[1],
baseCenter[2] - forward[2] * 15 + side[2] * 6 };
float[] trunkEnd = new float[] { trunkStart[0], trunkStart[1] + 10, trunkStart[2] };
CapsuleCollider trunk = new CapsuleCollider(trunkStart, trunkEnd, 0.5f, SampleAreaModifications.SAMPLE_POLYAREA_TYPE_ROAD,
dynaMesh.config.walkableClimb);
float[] crownCenter = new float[] { baseCenter[0] - forward[0] * 15 + side[0] * 6, p[1] + 10,
baseCenter[2] - forward[2] * 15 + side[2] * 6 };
SphereCollider crown = new SphereCollider(crownCenter, 4f, SampleAreaModifications.SAMPLE_POLYAREA_TYPE_GRASS,
dynaMesh.config.walkableClimb);
CompositeCollider collider = new CompositeCollider(base, roof, trunk, crown);
ColliderGizmo baseGizmo = ColliderGizmo.box(baseCenter, BoxCollider.getHalfEdges(baseUp, forward, baseExtent));
ColliderGizmo roofGizmo = ColliderGizmo.box(roofCenter, BoxCollider.getHalfEdges(roofUp, forward, roofExtent));
ColliderGizmo trunkGizmo = ColliderGizmo.capsule(trunkStart, trunkEnd, 0.5f);
ColliderGizmo crownGizmo = ColliderGizmo.sphere(crownCenter, 4f);
ColliderGizmo gizmo = ColliderGizmo.composite(baseGizmo, roofGizmo, trunkGizmo, crownGizmo);
return new Tupple2<>(collider, gizmo);
}
private Tupple2<Collider, ColliderGizmo> trimeshBridge(float[] p) {
return trimeshCollider(p, bridgeGeom);
}
private Tupple2<Collider, ColliderGizmo> trimeshHouse(float[] p) {
return trimeshCollider(p, houseGeom);
}
private Tupple2<Collider, ColliderGizmo> convexTrimesh(float[] p) {
float[] verts = transformVertices(p, convexGeom, 360);
ConvexTrimeshCollider collider = new ConvexTrimeshCollider(verts, convexGeom.faces,
SampleAreaModifications.SAMPLE_POLYAREA_TYPE_ROAD, dynaMesh.config.walkableClimb * 10);
return new Tupple2<>(collider, ColliderGizmo.trimesh(verts, convexGeom.faces));
}
private Tupple2<Collider, ColliderGizmo> trimeshCollider(float[] p, DemoInputGeomProvider geom) {
float[] verts = transformVertices(p, geom, 0);
TrimeshCollider collider = new TrimeshCollider(verts, geom.faces, SampleAreaModifications.SAMPLE_POLYAREA_TYPE_ROAD,
dynaMesh.config.walkableClimb * 10);
return new Tupple2<>(collider, ColliderGizmo.trimesh(verts, geom.faces));
}
private float[] transformVertices(float[] p, DemoInputGeomProvider geom, float ax) {
float[] rx = GLU.build_4x4_rotation_matrix(random.nextFloat() * ax, 1, 0, 0);
float[] ry = GLU.build_4x4_rotation_matrix(random.nextFloat() * 360, 0, 1, 0);
float[] m = GLU.mul(rx, ry);
float[] verts = new float[geom.vertices.length];
float[] v = new float[3];
float[] vr = new float[3];
for (int i = 0; i < geom.vertices.length; i += 3) {
v[0] = geom.vertices[i];
v[1] = geom.vertices[i + 1];
v[2] = geom.vertices[i + 2];
mulMatrixVector(vr, m, v);
vr[0] += p[0];
vr[1] += p[1] - 0.1f;
vr[2] += p[2];
verts[i] = vr[0];
verts[i + 1] = vr[1];
verts[i + 2] = vr[2];
}
return verts;
}
private float[] mulMatrixVector(float[] resultvector, float[] matrix, float[] pvector) {
resultvector[0] = matrix[0] * pvector[0] + matrix[4] * pvector[1] + matrix[8] * pvector[2];
resultvector[1] = matrix[1] * pvector[0] + matrix[5] * pvector[1] + matrix[9] * pvector[2];
resultvector[2] = matrix[2] * pvector[0] + matrix[6] * pvector[1] + matrix[10] * pvector[2];
return resultvector;
}
@Override
public void handleClickRay(float[] start, float[] dir, boolean shift) {
if (mode == ToolMode.COLLIDERS) {
if (shift) {
for (Entry<Long, Collider> e : colliders.entrySet()) {
if (hit(start, dir, e.getValue().bounds())) {
dynaMesh.removeCollider(e.getKey());
colliders.remove(e.getKey());
colliderGizmos.remove(e.getKey());
break;
}
}
}
}
}
private boolean hit(float[] point, float[] dir, float[] bounds) {
float cx = 0.5f * (bounds[0] + bounds[3]);
float cy = 0.5f * (bounds[1] + bounds[4]);
float cz = 0.5f * (bounds[2] + bounds[5]);
float dx = 0.5f * (bounds[3] - bounds[0]);
float dy = 0.5f * (bounds[4] - bounds[1]);
float dz = 0.5f * (bounds[5] - bounds[2]);
float rSqr = dx * dx + dy * dy + dz * dz;
float mx = point[0] - cx;
float my = point[1] - cy;
float mz = point[2] - cz;
float c = mx * mx + my * my + mz * mz - rSqr;
if (c <= 0.0f) {
return true;
}
float b = mx * dir[0] + my * dir[1] + mz * dir[2];
if (b > 0.0f) {
return false;
}
float disc = b * b - c;
return disc >= 0.0f;
}
@Override
public void handleRender(NavMeshRenderer renderer) {
if (mode == ToolMode.COLLIDERS) {
if (showColliders) {
colliderGizmos.values().forEach(g -> g.render(renderer.getDebugDraw()));
}
}
if (mode == ToolMode.RAYCAST) {
RecastDebugDraw dd = renderer.getDebugDraw();
int startCol = duRGBA(128, 25, 0, 192);
int endCol = duRGBA(51, 102, 0, 129);
if (sposSet) {
drawAgent(dd, spos, startCol);
}
if (eposSet) {
drawAgent(dd, epos, endCol);
}
dd.depthMask(false);
if (raycastHitPos != null) {
int spathCol = raycastHit ? duRGBA(128, 32, 16, 220) : duRGBA(64, 128, 240, 220);
dd.begin(LINES, 2.0f);
dd.vertex(spos[0], spos[1] + 1.3f, spos[2], spathCol);
dd.vertex(raycastHitPos[0], raycastHitPos[1], raycastHitPos[2], spathCol);
dd.end();
}
dd.depthMask(true);
}
}
private void drawAgent(RecastDebugDraw dd, float[] pos, int col) {
float r = sample.getSettingsUI().getAgentRadius();
float h = sample.getSettingsUI().getAgentHeight();
float c = sample.getSettingsUI().getAgentMaxClimb();
dd.depthMask(false);
// Agent dimensions.
dd.debugDrawCylinderWire(pos[0] - r, pos[1] + 0.02f, pos[2] - r, pos[0] + r, pos[1] + h, pos[2] + r, col, 2.0f);
dd.debugDrawCircle(pos[0], pos[1] + c, pos[2], r, duRGBA(0, 0, 0, 64), 1.0f);
int colb = duRGBA(0, 0, 0, 196);
dd.begin(LINES);
dd.vertex(pos[0], pos[1] - c, pos[2], colb);
dd.vertex(pos[0], pos[1] + c, pos[2], colb);
dd.vertex(pos[0] - r / 2, pos[1] + 0.02f, pos[2], colb);
dd.vertex(pos[0] + r / 2, pos[1] + 0.02f, pos[2], colb);
dd.vertex(pos[0], pos[1] + 0.02f, pos[2] - r / 2, colb);
dd.vertex(pos[0], pos[1] + 0.02f, pos[2] + r / 2, colb);
dd.end();
dd.depthMask(true);
}
@Override
public void handleUpdate(float dt) {
if (dynaMesh != null) {
updateDynaMesh();
}
}
private void updateDynaMesh() {
long t = System.nanoTime();
try {
boolean updated = dynaMesh.update(executor).get();
if (updated) {
buildTime = (System.nanoTime() - t) / 1_000_000;
sample.update(null, dynaMesh.recastResults(), dynaMesh.navMesh());
sample.setChanged(false);
}
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
@Override
public void layout(NkContext ctx) {
nk_layout_row_dynamic(ctx, 18, 1);
if (nk_option_label(ctx, "Build", mode == ToolMode.BUILD)) {
mode = ToolMode.BUILD;
}
nk_layout_row_dynamic(ctx, 18, 1);
if (nk_option_label(ctx, "Colliders", mode == ToolMode.COLLIDERS)) {
mode = ToolMode.COLLIDERS;
}
nk_layout_row_dynamic(ctx, 18, 1);
if (nk_option_label(ctx, "Raycast", mode == ToolMode.RAYCAST)) {
mode = ToolMode.RAYCAST;
}
nk_layout_row_dynamic(ctx, 1, 1);
nk_spacing(ctx, 1);
if (mode == ToolMode.BUILD) {
nk_layout_row_dynamic(ctx, 18, 1);
if (nk_button_text(ctx, "Load Voxels...")) {
load();
}
if (dynaMesh != null) {
nk_layout_row_dynamic(ctx, 18, 1);
compression = nk_check_text(ctx, "Compression", compression);
if (nk_button_text(ctx, "Save Voxels...")) {
save();
}
}
nk_layout_row_dynamic(ctx, 1, 1);
nk_spacing(ctx, 1);
nk_layout_row_dynamic(ctx, 18, 1);
nk_label(ctx, "Rasterization", NK_TEXT_ALIGN_LEFT);
nk_layout_row_dynamic(ctx, 18, 2);
nk_label(ctx, "Cell Size", NK_TEXT_ALIGN_LEFT);
nk_label(ctx, String.format("%.2f", cellSize.get(0)), NK_TEXT_ALIGN_RIGHT);
nk_layout_row_dynamic(ctx, 1, 1);
nk_spacing(ctx, 1);
nk_layout_row_dynamic(ctx, 18, 1);
nk_label(ctx, "Agent", NK_TEXT_ALIGN_LEFT);
nk_layout_row_dynamic(ctx, 20, 1);
nk_property_float(ctx, "Height", 0f, walkableHeight, 5f, 0.01f, 0.01f);
nk_layout_row_dynamic(ctx, 20, 1);
nk_property_float(ctx, "Radius", 0f, walkableRadius, 10f, 0.01f, 0.01f);
nk_layout_row_dynamic(ctx, 20, 1);
nk_property_float(ctx, "Max Climb", 0f, walkableClimb, 10f, 0.01f, 0.01f);
nk_layout_row_dynamic(ctx, 18, 2);
nk_label(ctx, "Max Slope", NK_TEXT_ALIGN_LEFT);
nk_label(ctx, String.format("%.0f", walkableSlopeAngle.get(0)), NK_TEXT_ALIGN_RIGHT);
nk_layout_row_dynamic(ctx, 1, 1);
nk_spacing(ctx, 1);
nk_layout_row_dynamic(ctx, 18, 1);
nk_label(ctx, "Partitioning", NK_TEXT_ALIGN_LEFT);
partitioning = NuklearUIHelper.nk_radio(ctx, PartitionType.values(), partitioning,
p -> p.name().substring(0, 1) + p.name().substring(1).toLowerCase());
nk_layout_row_dynamic(ctx, 1, 1);
nk_spacing(ctx, 1);
nk_layout_row_dynamic(ctx, 18, 1);
nk_label(ctx, "Filtering", NK_TEXT_ALIGN_LEFT);
nk_layout_row_dynamic(ctx, 18, 1);
filterLowHangingObstacles = nk_option_text(ctx, "Low Hanging Obstacles", filterLowHangingObstacles);
nk_layout_row_dynamic(ctx, 18, 1);
filterLedgeSpans = nk_option_text(ctx, "Ledge Spans", filterLedgeSpans);
nk_layout_row_dynamic(ctx, 18, 1);
filterWalkableLowHeightSpans = nk_option_text(ctx, "Walkable Low Height Spans", filterWalkableLowHeightSpans);
nk_layout_row_dynamic(ctx, 1, 1);
nk_spacing(ctx, 1);
nk_layout_row_dynamic(ctx, 18, 1);
nk_label(ctx, "Region", NK_TEXT_ALIGN_LEFT);
nk_layout_row_dynamic(ctx, 20, 1);
nk_property_float(ctx, "Min Region Size", 0, minRegionArea, 150, 0.1f, 0.1f);
nk_layout_row_dynamic(ctx, 20, 1);
nk_property_float(ctx, "Merged Region Size", 0, regionMergeSize, 400, 0.1f, 0.1f);
nk_layout_row_dynamic(ctx, 1, 1);
nk_spacing(ctx, 1);
nk_layout_row_dynamic(ctx, 18, 1);
nk_label(ctx, "Polygonization", NK_TEXT_ALIGN_LEFT);
nk_layout_row_dynamic(ctx, 20, 1);
nk_property_float(ctx, "Max Edge Length", 0f, maxEdgeLen, 50f, 0.1f, 0.1f);
nk_layout_row_dynamic(ctx, 20, 1);
nk_property_float(ctx, "Max Edge Error", 0.1f, maxSimplificationError, 10f, 0.1f, 0.1f);
nk_layout_row_dynamic(ctx, 20, 1);
nk_property_int(ctx, "Verts Per Poly", 3, vertsPerPoly, 12, 1, 1);
nk_layout_row_dynamic(ctx, 1, 1);
nk_spacing(ctx, 1);
nk_layout_row_dynamic(ctx, 18, 1);
nk_label(ctx, "Detail Mesh", NK_TEXT_ALIGN_LEFT);
nk_layout_row_dynamic(ctx, 20, 1);
buildDetailMesh = nk_check_text(ctx, "Enable", buildDetailMesh);
nk_layout_row_dynamic(ctx, 20, 1);
nk_property_float(ctx, "Sample Distance", 0f, detailSampleDist, 16f, 0.1f, 0.1f);
nk_layout_row_dynamic(ctx, 20, 1);
nk_property_float(ctx, "Max Sample Error", 0f, detailSampleMaxError, 16f, 0.1f, 0.1f);
nk_layout_row_dynamic(ctx, 1, 1);
nk_spacing(ctx, 1);
nk_layout_row_dynamic(ctx, 20, 1);
if (nk_button_text(ctx, "Build")) {
if (dynaMesh != null) {
buildDynaMesh();
sample.setChanged(false);
}
}
}
if (mode == ToolMode.COLLIDERS) {
nk_layout_row_dynamic(ctx, 1, 1);
nk_spacing(ctx, 1);
nk_layout_row_dynamic(ctx, 18, 1);
nk_label(ctx, "Colliders", NK_TEXT_ALIGN_LEFT);
nk_layout_row_dynamic(ctx, 20, 1);
showColliders = nk_check_text(ctx, "Show", showColliders);
nk_layout_row_dynamic(ctx, 20, 1);
if (nk_option_label(ctx, "Sphere", colliderShape == ColliderShape.SPHERE)) {
colliderShape = ColliderShape.SPHERE;
}
nk_layout_row_dynamic(ctx, 18, 1);
if (nk_option_label(ctx, "Capsule", colliderShape == ColliderShape.CAPSULE)) {
colliderShape = ColliderShape.CAPSULE;
}
nk_layout_row_dynamic(ctx, 18, 1);
if (nk_option_label(ctx, "Box", colliderShape == ColliderShape.BOX)) {
colliderShape = ColliderShape.BOX;
}
nk_layout_row_dynamic(ctx, 18, 1);
if (nk_option_label(ctx, "Cylinder", colliderShape == ColliderShape.CYLINDER)) {
colliderShape = ColliderShape.CYLINDER;
}
nk_layout_row_dynamic(ctx, 18, 1);
if (nk_option_label(ctx, "Composite", colliderShape == ColliderShape.COMPOSITE)) {
colliderShape = ColliderShape.COMPOSITE;
}
nk_layout_row_dynamic(ctx, 18, 1);
if (nk_option_label(ctx, "Convex Trimesh", colliderShape == ColliderShape.CONVEX)) {
colliderShape = ColliderShape.CONVEX;
}
nk_layout_row_dynamic(ctx, 18, 1);
if (nk_option_label(ctx, "Trimesh Bridge", colliderShape == ColliderShape.TRIMESH_BRIDGE)) {
colliderShape = ColliderShape.TRIMESH_BRIDGE;
}
nk_layout_row_dynamic(ctx, 18, 1);
if (nk_option_label(ctx, "Trimesh House", colliderShape == ColliderShape.TRIMESH_HOUSE)) {
colliderShape = ColliderShape.TRIMESH_HOUSE;
}
}
nk_layout_row_dynamic(ctx, 2, 1);
nk_spacing(ctx, 1);
nk_layout_row_dynamic(ctx, 18, 1);
if (mode == ToolMode.RAYCAST) {
nk_label(ctx, String.format("Raycast Time: %d ms", raycastTime), NK_TEXT_ALIGN_LEFT);
if (sposSet) {
nk_layout_row_dynamic(ctx, 18, 1);
nk_label(ctx, String.format("Start: %.3f, %.3f, %.3f", spos[0], spos[1] + 1.3f, spos[2]), NK_TEXT_ALIGN_LEFT);
}
if (eposSet) {
nk_layout_row_dynamic(ctx, 18, 1);
nk_label(ctx, String.format("End: %.3f, %.3f, %.3f", epos[0], epos[1] + 1.3f, epos[2]), NK_TEXT_ALIGN_LEFT);
}
if (raycastHit) {
nk_layout_row_dynamic(ctx, 18, 1);
nk_label(ctx, String.format("Hit: %.3f, %.3f, %.3f", raycastHitPos[0], raycastHitPos[1], raycastHitPos[2]),
NK_TEXT_ALIGN_LEFT);
}
} else {
nk_label(ctx, String.format("Build Time: %d ms", buildTime), NK_TEXT_ALIGN_LEFT);
}
}
private void load() {
try (MemoryStack stack = stackPush()) {
PointerBuffer aFilterPatterns = stack.mallocPointer(1);
aFilterPatterns.put(stack.UTF8("*.voxels"));
aFilterPatterns.flip();
String filename = TinyFileDialogs.tinyfd_openFileDialog("Open Voxel File", "", aFilterPatterns, "Voxel File", false);
if (filename != null) {
load(filename);
}
}
}
private void load(String filename) {
File file = new File(filename);
if (file.exists()) {
VoxelFileReader reader = new VoxelFileReader();
try (FileInputStream fis = new FileInputStream(file)) {
VoxelFile voxelFile = reader.read(fis);
dynaMesh = new DynamicNavMesh(voxelFile);
dynaMesh.config.keepIntermediateResults = true;
updateUI();
buildDynaMesh();
colliders.clear();
} catch (Exception e) {
e.printStackTrace();
dynaMesh = null;
}
}
}
private void save() {
try (MemoryStack stack = stackPush()) {
PointerBuffer aFilterPatterns = stack.mallocPointer(1);
aFilterPatterns.put(stack.UTF8("*.voxels"));
aFilterPatterns.flip();
String filename = TinyFileDialogs.tinyfd_saveFileDialog("Save Voxel File", "", aFilterPatterns, "Voxel File");
if (filename != null) {
save(filename);
}
}
}
private void save(String filename) {
File file = new File(filename);
try (FileOutputStream fos = new FileOutputStream(file)) {
VoxelFile voxelFile = VoxelFile.from(dynaMesh);
VoxelFileWriter writer = new VoxelFileWriter();
writer.write(fos, voxelFile, compression);
} catch (Exception e) {
e.printStackTrace();
}
}
private void buildDynaMesh() {
configDynaMesh();
long t = System.nanoTime();
try {
dynaMesh.build(executor).get();
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
buildTime = (System.nanoTime() - t) / 1_000_000;
sample.update(null, dynaMesh.recastResults(), dynaMesh.navMesh());
}
private void configDynaMesh() {
dynaMesh.config.partitionType = partitioning;
dynaMesh.config.walkableHeight = walkableHeight.get(0);
dynaMesh.config.walkableSlopeAngle = walkableSlopeAngle.get(0);
dynaMesh.config.walkableRadius = walkableRadius.get(0);
dynaMesh.config.walkableClimb = walkableClimb.get(0);
dynaMesh.config.filterLowHangingObstacles = filterLowHangingObstacles;
dynaMesh.config.filterLedgeSpans = filterLedgeSpans;
dynaMesh.config.filterWalkableLowHeightSpans = filterWalkableLowHeightSpans;
dynaMesh.config.minRegionArea = minRegionArea.get(0);
dynaMesh.config.regionMergeArea = regionMergeSize.get(0);
dynaMesh.config.maxEdgeLen = maxEdgeLen.get(0);
dynaMesh.config.maxSimplificationError = maxSimplificationError.get(0);
dynaMesh.config.vertsPerPoly = vertsPerPoly.get(0);
dynaMesh.config.buildDetailMesh = buildDetailMesh;
dynaMesh.config.detailSampleDistance = detailSampleDist.get(0);
dynaMesh.config.detailSampleMaxError = detailSampleMaxError.get(0);
}
private void updateUI() {
cellSize.put(0, dynaMesh.config.cellSize);
partitioning = dynaMesh.config.partitionType;
walkableHeight.put(0, dynaMesh.config.walkableHeight);
walkableSlopeAngle.put(0, dynaMesh.config.walkableSlopeAngle);
walkableRadius.put(0, dynaMesh.config.walkableRadius);
walkableClimb.put(0, dynaMesh.config.walkableClimb);
minRegionArea.put(0, dynaMesh.config.minRegionArea);
regionMergeSize.put(0, dynaMesh.config.regionMergeArea);
maxEdgeLen.put(0, dynaMesh.config.maxEdgeLen);
maxSimplificationError.put(0, dynaMesh.config.maxSimplificationError);
vertsPerPoly.put(0, dynaMesh.config.vertsPerPoly);
buildDetailMesh = dynaMesh.config.buildDetailMesh;
detailSampleDist.put(0, dynaMesh.config.detailSampleDistance);
detailSampleMaxError.put(0, dynaMesh.config.detailSampleMaxError);
filterLowHangingObstacles = dynaMesh.config.filterLowHangingObstacles;
filterLedgeSpans = dynaMesh.config.filterLedgeSpans;
filterWalkableLowHeightSpans = dynaMesh.config.filterWalkableLowHeightSpans;
}
@Override
public String getName() {
return "Dynamic Updates";
}
}
|
#!/bin/bash
PDIR=env/occo
echo "Reseting '$PDIR'"
rm -rf "$PDIR"
virtualenv -p python3 "$PDIR"
source "$PDIR"/bin/activate
pip install -r requirements_test.txt --find-links https://pip3.lpds.sztaki.hu/packages --no-index
#cat /etc/grid-security/certificates/*.pem >> $(python -m requests.certs)
set +ex
echo "It's dangerous to go alone. Take these:"
echo "source '$PDIR/bin/activate'"
|
#!/usr/bin/env bash
# Copyright 2020 Chaos Mesh Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# See the License for the specific language governing permissions and
# limitations under the License.
# This is a script to quickly install chaos-mesh.
# This script will check if docker and kubernetes are installed. If local mode is set and kubernetes is not installed,
# it will use kind or minikube to install the kubernetes cluster according to the configuration.
# Finally, when all dependencies are installed, chaos-mesh will be installed.
usage() {
cat << EOF
This script is used to install chaos-mesh.
Before running this script, please ensure that:
* have installed docker if you run chaos-mesh in local.
* have installed Kubernetes if you run chaos-mesh in normal Kubernetes cluster
USAGE:
install.sh [FLAGS] [OPTIONS]
FLAGS:
-h, --help Prints help information
-d, --dependency-only Install dependencies only, including kind, kubectl, local-kube.
--force Force reinstall all components if they are already installed, include: kind, local-kube, chaos-mesh
--force-chaos-mesh Force reinstall chaos-mesh if it is already installed
--force-local-kube Force reinstall local Kubernetes cluster if it is already installed
--force-kubectl Force reinstall kubectl client if it is already installed
--force-kind Force reinstall Kind if it is already installed
--docker-mirror Use docker mirror to pull image, dockerhub.azk8s.cn => docker.io, gcr.azk8s.cn => gcr.io
--volume-provisioner Deploy volume provisioner in local Kubernetes cluster
--local-registry Deploy local docker registry in local Kubernetes cluster
--template Locally render templates
--k3s Install chaos-mesh in k3s environment
--microk8s Install chaos-mesh in microk8s environment
--host-network Install chaos-mesh using hostNetwork
OPTIONS:
-v, --version Version of chaos-mesh, default value: latest
-l, --local [kind] Choose a way to run a local kubernetes cluster, supported value: kind,
If this value is not set and the Kubernetes is not installed, this script will exit with 1.
-n, --name Name of Kubernetes cluster, default value: kind
-c --crd The path of the crd files. Get the crd file from "https://mirrors.chaos-mesh.org" if the crd path is empty.
-r --runtime Runtime specifies which container runtime to use. Currently we only supports docker and containerd. default value: docker
--kind-version Version of the Kind tool, default value: v0.7.0
--node-num The count of the cluster nodes,default value: 3
--k8s-version Version of the Kubernetes cluster,default value: v1.17.2
--volume-num The volumes number of each kubernetes node,default value: 5
--release-name Release name of chaos-mesh, default value: chaos-mesh
--namespace Namespace of chaos-mesh, default value: chaos-testing
--timezone Specifies timezone to be used by chaos-dashboard, chaos-daemon and controlller.
EOF
}
main() {
local local_kube=""
local cm_version="latest"
local kind_name="kind"
local kind_version="v0.7.0"
local node_num=3
local k8s_version="v1.17.2"
local volume_num=5
local release_name="chaos-mesh"
local namespace="chaos-testing"
local timezone="UTC"
local force_chaos_mesh=false
local force_local_kube=false
local force_kubectl=false
local force_kind=false
local docker_mirror=false
local volume_provisioner=false
local local_registry=false
local crd=""
local runtime="docker"
local template=false
local install_dependency_only=false
local k3s=false
local microk8s=false
local host_network=false
local docker_registry="docker.io"
while [[ $# -gt 0 ]]
do
key="$1"
case "$key" in
-h|--help)
usage
exit 0
;;
-l|--local)
local_kube="$2"
shift
shift
;;
-v|--version)
cm_version="$2"
shift
shift
;;
-n|--name)
kind_name="$2"
shift
shift
;;
-c|--crd)
crd="$2"
shift
shift
;;
-r|--runtime)
runtime="$2"
shift
shift
;;
-d|--dependency-only)
install_dependency_only=true
shift
;;
--force)
force_chaos_mesh=true
force_local_kube=true
force_kubectl=true
force_kind=true
shift
;;
--force-local-kube)
force_local_kube=true
shift
;;
--force-kubectl)
force_kubectl=true
shift
;;
--force-kind)
force_kind=true
shift
;;
--force-chaos-mesh)
force_chaos_mesh=true
shift
;;
--template)
template=true
shift
;;
--docker-mirror)
docker_mirror=true
shift
;;
--volume-provisioner)
volume_provisioner=true
shift
;;
--local-registry)
local_registry=true
shift
;;
--kind-version)
kind_version="$2"
shift
shift
;;
--node-num)
node_num="$2"
shift
shift
;;
--k8s-version)
k8s_version="$2"
shift
shift
;;
--volume-num)
volume_num="$2"
shift
shift
;;
--release-name)
release_name="$2"
shift
shift
;;
--namespace)
namespace="$2"
shift
shift
;;
--k3s)
k3s=true
shift
;;
--microk8s)
microk8s=true
shift
;;
--host-network)
host_network=true
shift
;;
--timezone)
timezone="$2"
shift
shift
;;
--docker-registry)
docker_registry="$2"
shift
shift
;;
*)
echo "unknown flag or option $key"
usage
exit 1
;;
esac
done
if [ "${runtime}" != "docker" ] && [ "${runtime}" != "containerd" ]; then
printf "container runtime %s is not supported\n" "${local_kube}"
exit 1
fi
if [ "${local_kube}" != "" ] && [ "${local_kube}" != "kind" ]; then
printf "local Kubernetes by %s is not supported\n" "${local_kube}"
exit 1
fi
if [ "${local_kube}" == "kind" ]; then
runtime="containerd"
fi
if [ "${k3s}" == "true" ]; then
runtime="containerd"
fi
if [ "${microk8s}" == "true" ]; then
runtime="containerd"
fi
if [ "${crd}" == "" ]; then
crd="https://mirrors.chaos-mesh.org/${cm_version}/crd.yaml"
fi
if $template; then
ensure gen_crd_manifests "${crd}"
ensure gen_chaos_mesh_manifests "${runtime}" "${k3s}" "${cm_version}" "${timezone}" "${host_network}" "${docker_registry}" "${microk8s}"
exit 0
fi
need_cmd "sed"
need_cmd "tr"
if [ "${local_kube}" == "kind" ]; then
prepare_env
install_kubectl "${k8s_version}" ${force_kubectl}
check_docker
install_kind "${kind_version}" ${force_kind}
install_kubernetes_by_kind "${kind_name}" "${k8s_version}" "${node_num}" "${volume_num}" ${force_local_kube} ${docker_mirror} ${volume_provisioner} ${local_registry}
fi
if [ "${install_dependency_only}" = true ]; then
exit 0
fi
check_kubernetes
install_chaos_mesh "${release_name}" "${namespace}" "${local_kube}" ${force_chaos_mesh} ${docker_mirror} "${crd}" "${runtime}" "${k3s}" "${cm_version}" "${timezone}" "${docker_registry}" "${microk8s}"
ensure_pods_ready "${namespace}" "app.kubernetes.io/component=controller-manager" 100
ensure_pods_ready "${namespace}" "app.kubernetes.io/component=chaos-daemon" 100
ensure_pods_ready "${namespace}" "app.kubernetes.io/component=chaos-dashboard" 100
printf "Chaos Mesh %s is installed successfully\n" "${release_name}"
}
prepare_env() {
mkdir -p "$HOME/local/bin"
local set_path="export PATH=$HOME/local/bin:\$PATH"
local env_file="$HOME/.bash_profile"
if [[ ! -e "${env_file}" ]]; then
ensure touch "${env_file}"
fi
grep -qF -- "${set_path}" "${env_file}" || echo "${set_path}" >> "${env_file}"
ensure source "${env_file}"
}
check_kubernetes() {
need_cmd "kubectl"
kubectl_err_msg=$(kubectl version 2>&1 1>/dev/null)
if [ "$kubectl_err_msg" != "" ]; then
printf "check Kubernetes failed, error: %s\n" "${kubectl_err_msg}"
exit 1
fi
check_kubernetes_version
}
check_kubernetes_version() {
version_info=$(kubectl version | sed 's/.*GitVersion:\"v\([0-9.]*\).*/\1/g')
for v in $version_info
do
if version_lt "$v" "1.12.0"; then
printf "Chaos Mesh requires Kubernetes cluster running 1.12 or later\n"
exit 1
fi
done
}
install_kubectl() {
local kubectl_version=$1
local force_install=$2
printf "Install kubectl client\n"
err_msg=$(kubectl version --client=true 2>&1 1>/dev/null)
if [ "$err_msg" == "" ]; then
v=$(kubectl version --client=true | sed 's/.*GitVersion:\"v\([0-9.]*\).*/\1/g')
target_version=$(echo "${kubectl_version}" | sed s/v//g)
if version_lt "$v" "${target_version}"; then
printf "Chaos Mesg requires kubectl version %s or later\n" "${target_version}"
else
printf "kubectl Version %s has been installed\n" "$v"
if [ "$force_install" != "true" ]; then
return
fi
fi
fi
need_cmd "curl"
local KUBECTL_BIN="${HOME}/local/bin/kubectl"
local target_os=$(lowercase $(uname))
ensure curl -Lo /tmp/kubectl https://storage.googleapis.com/kubernetes-release/release/${kubectl_version}/bin/${target_os}/amd64/kubectl
ensure chmod +x /tmp/kubectl
ensure mv /tmp/kubectl "${KUBECTL_BIN}"
}
install_kubernetes_by_kind() {
local cluster_name=$1
local cluster_version=$2
local node_num=$3
local volume_num=$4
local force_install=$5
local docker_mirror=$6
local volume_provisioner=$7
local local_registry=$8
printf "Install local Kubernetes %s\n" "${cluster_name}"
need_cmd "kind"
work_dir=${HOME}/kind/${cluster_name}
kubeconfig_path=${work_dir}/config
data_dir=${work_dir}/data
clusters=$(kind get clusters)
cluster_exist=false
for c in $clusters
do
if [ "$c" == "$cluster_name" ]; then
printf "Kind cluster %s has been installed\n" "${cluster_name}"
cluster_exist=true
break
fi
done
if [ "$cluster_exist" == "true" ]; then
if [ "$force_install" == "true" ]; then
printf "Delete Kind Kubernetes cluster %s\n" "${cluster_name}"
kind delete cluster --name="${cluster_name}"
status=$?
if [ $status -ne 0 ]; then
printf "Delete Kind Kubernetes cluster %s failed\n" "${cluster_name}"
exit 1
fi
else
ensure kind get kubeconfig --name="${cluster_name}" > "${kubeconfig_path}"
return
fi
fi
ensure mkdir -p "${work_dir}"
printf "Clean data dir: %s\n" "${data_dir}"
if [ -d "${data_dir}" ]; then
ensure rm -rf "${data_dir}"
fi
config_file=${work_dir}/kind-config.yaml
cat <<EOF > "${config_file}"
kind: Cluster
apiVersion: kind.sigs.k8s.io/v1alpha3
kubeadmConfigPatches:
- |
apiVersion: kubeadm.k8s.io/v1alpha3
kind: ClusterConfiguration
metadata:
name: config
apiServerExtraArgs:
enable-admission-plugins: NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook
nodes:
- role: control-plane
extraPortMappings:
- containerPort: 5000
hostPort: 5000
listenAddress: 127.0.0.1
protocol: TCP
EOF
for ((i=0;i<"${node_num}";i++))
do
ensure mkdir -p "${data_dir}/worker${i}"
cat <<EOF >> "${config_file}"
- role: worker
extraMounts:
EOF
for ((k=1;k<="${volume_num}";k++))
do
ensure mkdir -p "${data_dir}/worker${i}/vol${k}"
cat <<EOF >> "${config_file}"
- containerPath: /mnt/disks/vol${k}
hostPath: ${data_dir}/worker${i}/vol${k}
EOF
done
done
local kind_image="kindest/node:${cluster_version}"
if [ "$docker_mirror" == "true" ]; then
azk8spull "${kind_image}" || true
fi
printf "start to create kubernetes cluster %s" "${cluster_name}"
ensure kind create cluster --config "${config_file}" --image="${kind_image}" --name="${cluster_name}" --retain -v 1
ensure kind get kubeconfig --name="${cluster_name}" > "${kubeconfig_path}"
ensure export KUBECONFIG="${kubeconfig_path}"
if [ "$volume_provisioner" == "true" ]; then
deploy_volume_provisioner "${work_dir}" ${docker_mirror}
fi
}
deploy_volume_provisioner() {
local data_dir=$1
local docker_mirror=$2
local config_file=${data_dir}/local-volume-provisionser.yaml
volume_provisioner_image="quay.io/external_storage/local-volume-provisioner:v2.3.2"
if [ "$docker_mirror" == "true" ]; then
azk8spull volume_provisioner_image || true
kind load docker-image ${volume_provisioner_image} > /dev/null 2>&1 || true
fi
cat <<EOF >"${config_file}"
apiVersion: storage.k8s.io/v1
kind: StorageClass
metadata:
name: "local-storage"
provisioner: "kubernetes.io/no-provisioner"
volumeBindingMode: "WaitForFirstConsumer"
---
apiVersion: v1
kind: ConfigMap
metadata:
name: local-provisioner-config
namespace: kube-system
data:
nodeLabelsForPV: |
- kubernetes.io/hostname
storageClassMap: |
local-storage:
hostDir: /mnt/disks
mountDir: /mnt/disks
---
apiVersion: apps/v1
kind: DaemonSet
metadata:
name: local-volume-provisioner
namespace: kube-system
labels:
app: local-volume-provisioner
spec:
selector:
matchLabels:
app: local-volume-provisioner
template:
metadata:
labels:
app: local-volume-provisioner
spec:
serviceAccountName: local-storage-admin
containers:
- image: ${volume_provisioner_image}
name: provisioner
securityContext:
privileged: true
env:
- name: MY_NODE_NAME
valueFrom:
fieldRef:
fieldPath: spec.nodeName
- name: MY_NAMESPACE
valueFrom:
fieldRef:
fieldPath: metadata.namespace
- name: JOB_CONTAINER_IMAGE
value: "quay.io/external_storage/local-volume-provisioner:v2.3.2"
resources:
requests:
cpu: 100m
memory: 100Mi
limits:
cpu: 100m
memory: 100Mi
volumeMounts:
- mountPath: /etc/provisioner/config
name: provisioner-config
readOnly: true
# mounting /dev in DinD environment would fail
# - mountPath: /dev
# name: provisioner-dev
- mountPath: /mnt/disks
name: local-disks
mountPropagation: "HostToContainer"
volumes:
- name: provisioner-config
configMap:
name: local-provisioner-config
# - name: provisioner-dev
# hostPath:
# path: /dev
- name: local-disks
hostPath:
path: /mnt/disks
---
apiVersion: v1
kind: ServiceAccount
metadata:
name: local-storage-admin
namespace: kube-system
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: local-storage-provisioner-pv-binding
namespace: kube-system
subjects:
- kind: ServiceAccount
name: local-storage-admin
namespace: kube-system
roleRef:
kind: ClusterRole
name: system:persistent-volume-provisioner
apiGroup: rbac.authorization.k8s.io
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: local-storage-provisioner-node-clusterrole
namespace: kube-system
rules:
- apiGroups: [""]
resources: ["nodes"]
verbs: ["get"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: local-storage-provisioner-node-binding
namespace: kube-system
subjects:
- kind: ServiceAccount
name: local-storage-admin
namespace: kube-system
roleRef:
kind: ClusterRole
name: local-storage-provisioner-node-clusterrole
apiGroup: rbac.authorization.k8s.io
EOF
ensure kubectl apply -f "${config_file}"
}
install_kind() {
local kind_version=$1
local force_install=$2
printf "Install Kind tool\n"
err_msg=$(kind version 2>&1 1>/dev/null)
if [ "$err_msg" == "" ]; then
v=$(kind version | awk '{print $2}' | sed s/v//g)
target_version=$(echo "${kind_version}" | sed s/v//g)
if version_lt "$v" "${target_version}"; then
printf "Chaos Mesh requires Kind version %s or later\n" "${target_version}"
else
printf "Kind Version %s has been installed\n" "$v"
if [ "$force_install" != "true" ]; then
return
fi
fi
fi
local KIND_BIN="${HOME}/local/bin/kind"
local target_os=$(lowercase $(uname))
ensure curl -Lo /tmp/kind https://github.com/kubernetes-sigs/kind/releases/download/"$1"/kind-"${target_os}"-amd64
ensure chmod +x /tmp/kind
ensure mv /tmp/kind "$KIND_BIN"
}
install_chaos_mesh() {
local release_name=$1
local namespace=$2
local local_kube=$3
local force_install=$4
local docker_mirror=$5
local crd=$6
local runtime=$7
local k3s=$8
local version=$9
local timezone=${10}
local docker_registry=${11}
local microk8s=${12}
printf "Install Chaos Mesh %s\n" "${release_name}"
local chaos_mesh_image="${docker_registry}/pingcap/chaos-mesh:${version}"
local chaos_daemon_image="${docker_registry}/pingcap/chaos-daemon:${version}"
local chaos_dashboard_image="${docker_registry}/pingcap/chaos-dashboard:${version}"
if [ "$docker_mirror" == "true" ]; then
azk8spull "${chaos_mesh_image}" || true
azk8spull "${chaos_daemon_image}" || true
azk8spull "${chaos_dashboard_image}" || true
if [ "${local_kube}" == "kind" ]; then
kind load docker-image "${chaos_mesh_image}" > /dev/null 2>&1 || true
kind load docker-image "${chaos_daemon_image}" > /dev/null 2>&1 || true
kind load docker-image "${chaos_dashboard_image}" > /dev/null 2>&1 || true
fi
fi
gen_crd_manifests "${crd}" | kubectl apply -f - || exit 1
gen_chaos_mesh_manifests "${runtime}" "${k3s}" "${version}" "${timezone}" "${host_network}" "${docker_registry}" "${microk8s}" | kubectl apply -f - || exit 1
}
version_lt() {
vercomp $1 $2
if [ $? == 2 ]; then
return 0
fi
return 1
}
vercomp () {
if [[ $1 == $2 ]]
then
return 0
fi
local IFS=.
local i ver1=($1) ver2=($2)
# fill empty fields in ver1 with zeros
for ((i=${#ver1[@]}; i<${#ver2[@]}; i++))
do
ver1[i]=0
done
for ((i=0; i<${#ver1[@]}; i++))
do
if [[ -z ${ver2[i]} ]]
then
# fill empty fields in ver2 with zeros
ver2[i]=0
fi
if ((10#${ver1[i]} > 10#${ver2[i]}))
then
return 1
fi
if ((10#${ver1[i]} < 10#${ver2[i]}))
then
return 2
fi
done
return 0
}
check_docker() {
need_cmd "docker"
docker_err_msg=$(docker version 2>&1 1>/dev/null)
if [ "$docker_err_msg" != "" ]; then
printf "check docker failed:\n"
echo "$docker_err_msg"
exit 1
fi
}
say() {
printf 'install chaos-mesh: %s\n' "$1"
}
err() {
say "$1" >&2
exit 1
}
need_cmd() {
if ! check_cmd "$1"; then
err "need '$1' (command not found)"
fi
}
check_cmd() {
command -v "$1" > /dev/null 2>&1
}
lowercase() {
echo "$@" | tr "[A-Z]" "[a-z]"
}
# Run a command that should never fail. If the command fails execution
# will immediately terminate with an error showing the failing
# command.
ensure() {
if ! "$@"; then err "command failed: $*"; fi
}
ensure_pods_ready() {
local namespace=$1
local labels=""
local limit=$3
if [ "$2" != "" ]; then
labels="-l $2"
fi
count=0
while [ -n "$(kubectl get pods -n "${namespace}" ${labels} --no-headers | grep -v Running)" ];
do
echo "Waiting for pod running" && sleep 10;
kubectl get pods -n "${namespace}" ${labels} --no-headers | >&2 grep -v Running || true
((count=count+1))
if [ $count -gt $limit ]; then
printf "Waiting for pod status running timeout\n"
exit 1
fi
done
}
azk8spull() {
image=$1
if [ -z $image ]; then
echo "## azk8spull image name cannot be null."
else
array=(`echo $image | tr '/' ' '` )
domainName=""
repoName=""
imageName=""
if [ ${#array[*]} -eq 3 ]; then
repoName=${array[1]}
imageName=${array[2]}
if [ "${array[0]}"x = "docker.io"x ]; then
domainName="dockerhub.azk8s.cn"
elif [ "${array[0]}"x = "gcr.io"x ]; then
domainName="gcr.azk8s.cn"
elif [ "${array[0]}"x = "quay.io"x ]; then
domainName="quay.azk8s.cn"
else
echo '## azk8spull can not support pulling $image right now.'
fi
elif [ ${#array[*]} -eq 2 ]; then
if [ "${array[0]}"x = "k8s.gcr.io"x ]; then
domainName="gcr.azk8s.cn"
repoName="google_containers"
imageName=${array[1]}
else
domainName="dockerhub.azk8s.cn"
repoName=${array[0]}
imageName=${array[1]}
fi
elif [ ${#array[*]} -eq 1 ]; then
domainName="dockerhub.azk8s.cn"
repoName="library"
imageName=${array[0]}
else
echo '## azk8spull can not support pulling $image right now.'
fi
if [ $domainName != "" ]; then
echo "## azk8spull try to pull image from mirror $domainName/$repoName/$imageName."
docker pull $domainName/$repoName/$imageName
if [ $? -eq 0 ]; then
echo "## azk8spull try to tag $domainName/$repoName/$imageName to $image."
docker tag $domainName/$repoName/$imageName $image
if [ $? -eq 0 ]; then
echo '## azk8spull finish pulling. '
fi
fi
fi
fi
}
gen_crd_manifests() {
local crd=$1
if check_url "$crd"; then
need_cmd curl
ensure curl -sSL "$crd"
return
fi
ensure cat "$crd"
}
check_url() {
local url=$1
local regex='^(https?|ftp|file)://[-A-Za-z0-9\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\+&@#/%=~_|]\.[-A-Za-z0-9\+&@#/%?=~_|!:,.;]*[-A-Za-z0-9\+&@#/%=~_|]$'
if [[ $url =~ $regex ]];then
return 0
else
return 1
fi
}
gen_chaos_mesh_manifests() {
local runtime=$1
local k3s=$2
local version=$3
local timezone=$4
local host_network=$5
local docker_registry=$6
local microk8s=$7
local socketPath="/var/run/docker.sock"
local mountPath="/var/run/docker.sock"
if [ "${runtime}" == "containerd" ]; then
socketPath="/run/containerd/containerd.sock"
mountPath="/run/containerd/containerd.sock"
fi
if [ "${k3s}" == "true" ]; then
socketPath="/run/k3s/containerd/containerd.sock"
mountPath="/run/containerd/containerd.sock"
fi
if [ "${microk8s}" == "true" ]; then
socketPath="/var/snap/microk8s/common/run/containerd.sock"
mountPath="/run/containerd/containerd.sock"
fi
need_cmd mktemp
need_cmd openssl
need_cmd curl
K8S_SERVICE="chaos-mesh-controller-manager"
K8S_NAMESPACE="chaos-testing"
VERSION_TAG="${version}"
DOCKER_REGISTRY_PREFIX="${docker_registry}"
tmpdir=$(mktemp -d)
ensure openssl genrsa -out ${tmpdir}/ca.key 2048 > /dev/null 2>&1
ensure openssl req -x509 -new -nodes -key ${tmpdir}/ca.key -subj "/CN=${K8S_SERVICE}.${K8S_NAMESPACE}.svc" -days 1875 -out ${tmpdir}/ca.crt > /dev/null 2>&1
ensure openssl genrsa -out ${tmpdir}/server.key 2048 > /dev/null 2>&1
cat <<EOF > ${tmpdir}/csr.conf
[req]
prompt = no
req_extensions = v3_req
distinguished_name = dn
[dn]
CN = ${K8S_SERVICE}.${K8S_NAMESPACE}.svc
[v3_req]
basicConstraints = CA:FALSE
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
extendedKeyUsage = serverAuth
subjectAltName = @alt_names
[alt_names]
DNS.1 = ${K8S_SERVICE}
DNS.2 = ${K8S_SERVICE}.${K8S_NAMESPACE}
DNS.3 = ${K8S_SERVICE}.${K8S_NAMESPACE}.svc
EOF
ensure openssl req -new -key ${tmpdir}/server.key -out ${tmpdir}/server.csr -config ${tmpdir}/csr.conf > /dev/null 2>&1
ensure openssl x509 -req -in ${tmpdir}/server.csr -CA ${tmpdir}/ca.crt -CAkey ${tmpdir}/ca.key -CAcreateserial -out ${tmpdir}/server.crt -days 1875 -extensions v3_req -extfile ${tmpdir}/csr.conf > /dev/null 2>&1
TLS_KEY=$(openssl base64 -A -in ${tmpdir}/server.key)
TLS_CRT=$(openssl base64 -A -in ${tmpdir}/server.crt)
CA_BUNDLE=$(openssl base64 -A -in ${tmpdir}/ca.crt)
# chaos-mesh.yaml start
cat <<EOF
---
apiVersion: v1
kind: Namespace
metadata:
name: chaos-testing
---
# Source: chaos-mesh/templates/chaos-daemon-rbac.yaml
kind: ServiceAccount
apiVersion: v1
metadata:
namespace: chaos-testing
name: chaos-daemon
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: chaos-daemon
---
# Source: chaos-mesh/templates/controller-manager-rbac.yaml
kind: ServiceAccount
apiVersion: v1
metadata:
namespace: chaos-testing
name: chaos-controller-manager
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: controller-manager
---
# Source: chaos-mesh/templates/webhook-configuration.yaml
kind: Secret
apiVersion: v1
metadata:
name: chaos-mesh-webhook-certs
namespace: chaos-testing
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: webhook-secret
type: Opaque
data:
tls.crt: "${TLS_CRT}"
tls.key: "${TLS_KEY}"
---
# Source: chaos-mesh/templates/controller-manager-rbac.yaml
# roles
kind: ClusterRole
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: chaos-mesh:chaos-controller-manager-target-namespace
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: controller-manager
rules:
- apiGroups: [ "" ]
resources: [ "pods" ]
verbs: [ "get", "list", "watch", "delete", "update" ]
- apiGroups:
- ""
resources:
- events
verbs:
- patch
- create
- apiGroups: [ "" ]
resources: [ "configmaps" ]
verbs: [ "*" ]
- apiGroups: [ "chaos-mesh.org" ]
resources:
- "*"
verbs: [ "*" ]
---
# Source: chaos-mesh/templates/controller-manager-rbac.yaml
kind: ClusterRole
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: chaos-mesh:chaos-controller-manager-cluster-level
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: controller-manager
rules:
- apiGroups: [ "" ]
resources:
- nodes
- namespaces
- services
verbs: [ "get", "list", "watch" ]
---
# Source: chaos-mesh/templates/controller-manager-rbac.yaml
# bindings cluster level
kind: ClusterRoleBinding
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: chaos-mesh:chaos-controller-manager-cluster-level
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: controller-manager
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: chaos-mesh:chaos-controller-manager-cluster-level
subjects:
- kind: ServiceAccount
name: chaos-controller-manager
namespace: chaos-testing
---
# Source: chaos-mesh/templates/controller-manager-rbac.yaml
kind: ClusterRoleBinding
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: chaos-mesh:chaos-controller-manager-target-namespace
namespace: chaos-testing
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: controller-manager
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: chaos-mesh:chaos-controller-manager-target-namespace
subjects:
- kind: ServiceAccount
name: chaos-controller-manager
namespace: chaos-testing
---
# Source: chaos-mesh/templates/controller-manager-rbac.yaml
kind: Role
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: chaos-mesh:chaos-controller-manager-control-plane
namespace: chaos-testing
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: controller-manager
rules:
- apiGroups: [ "" ]
resources: [ "configmaps", "services" ]
verbs: [ "get", "list", "watch" ]
---
# Source: chaos-mesh/templates/controller-manager-rbac.yaml
# binding for control plane namespace
kind: RoleBinding
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: chaos-mesh:chaos-controller-manager-control-plane
namespace: chaos-testing
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: controller-manager
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: chaos-mesh:chaos-controller-manager-control-plane
subjects:
- kind: ServiceAccount
name: chaos-controller-manager
namespace: chaos-testing
---
# Source: chaos-mesh/templates/chaos-dashboard-deployment.yaml
apiVersion: v1
kind: Service
metadata:
namespace: chaos-testing
name: chaos-dashboard
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: chaos-dashboard
spec:
selector:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: chaos-dashboard
type: NodePort
ports:
- protocol: TCP
port: 2333
targetPort: 2333
name: http
---
# Source: chaos-mesh/templates/controller-manager-service.yaml
apiVersion: v1
kind: Service
metadata:
namespace: chaos-testing
name: chaos-mesh-controller-manager
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: controller-manager
spec:
type: ClusterIP
ports:
- port: 10081
targetPort: pprof
protocol: TCP
name: pprof
- port: 10080
targetPort: http
protocol: TCP
name: http
- port: 443
targetPort: webhook
protocol: TCP
name: webhook
selector:
app.kubernetes.io/component: controller-manager
app.kubernetes.io/instance: chaos-mesh
---
# Source: chaos-mesh/templates/chaos-daemon-daemonset.yaml
apiVersion: apps/v1
kind: DaemonSet
metadata:
namespace: chaos-testing
name: chaos-daemon
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: chaos-daemon
spec:
selector:
matchLabels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: chaos-daemon
template:
metadata:
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: chaos-daemon
spec:
hostNetwork: ${host_network}
serviceAccount: chaos-daemon
hostIPC: true
hostPID: true
containers:
- name: chaos-daemon
image: ${DOCKER_REGISTRY_PREFIX}/pingcap/chaos-daemon:${VERSION_TAG}
imagePullPolicy: IfNotPresent
command:
- /usr/local/bin/chaos-daemon
- --runtime
- ${runtime}
- --http-port
- !!str 31766
- --grpc-port
- !!str 31767
- --pprof
env:
- name: TZ
value: ${timezone}
securityContext:
capabilities:
add:
- SYS_PTRACE
- NET_ADMIN
- MKNOD
- SYS_CHROOT
- SYS_ADMIN
- KILL
# CAP_IPC_LOCK is used to lock memory
- IPC_LOCK
volumeMounts:
- name: socket-path
mountPath: ${mountPath}
- name: sys-path
mountPath: /sys
ports:
- name: grpc
containerPort: 31767
hostPort: 31767
- name: http
containerPort: 31766
volumes:
- name: socket-path
hostPath:
path: ${socketPath}
- name: sys-path
hostPath:
path: /sys
---
# Source: chaos-mesh/templates/chaos-dashboard-deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: chaos-testing
name: chaos-dashboard
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: chaos-dashboard
spec:
replicas: 1
selector:
matchLabels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: chaos-dashboard
template:
metadata:
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: chaos-dashboard
spec:
serviceAccount: chaos-controller-manager
containers:
- name: chaos-dashboard
image: ${DOCKER_REGISTRY_PREFIX}/pingcap/chaos-dashboard:${VERSION_TAG}
imagePullPolicy: IfNotPresent
resources:
limits: {}
requests:
cpu: 25m
memory: 256Mi
command:
- /usr/local/bin/chaos-dashboard
env:
- name: DATABASE_DATASOURCE
value: "/data/core.sqlite"
- name: DATABASE_DRIVER
value: "sqlite3"
- name: LISTEN_HOST
value: "0.0.0.0"
- name: LISTEN_PORT
value: "2333"
- name: TZ
value: ${timezone}
- name: TARGET_NAMESPACE
value: chaos-testing
- name: CLUSTER_SCOPED
value: "true"
volumeMounts:
- name: storage-volume
mountPath: /data
subPath: ""
ports:
- name: http
containerPort: 2333
volumes:
- name: storage-volume
emptyDir: {}
---
# Source: chaos-mesh/templates/controller-manager-deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
namespace: chaos-testing
name: chaos-controller-manager
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: controller-manager
spec:
replicas: 1
selector:
matchLabels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: controller-manager
template:
metadata:
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: controller-manager
annotations:
rollme: "install.sh"
spec:
hostNetwork: ${host_network}
serviceAccount: chaos-controller-manager
containers:
- name: chaos-mesh
image: ${DOCKER_REGISTRY_PREFIX}/pingcap/chaos-mesh:${VERSION_TAG}
imagePullPolicy: IfNotPresent
resources:
limits: {}
requests:
cpu: 25m
memory: 256Mi
command:
- /usr/local/bin/chaos-controller-manager
env:
- name: NAMESPACE
valueFrom:
fieldRef:
fieldPath: metadata.namespace
- name: TEMPLATE_NAMESPACE
valueFrom:
fieldRef:
fieldPath: metadata.namespace
- name: TARGET_NAMESPACE
value: chaos-testing
- name: CLUSTER_SCOPED
value: "true"
- name: TZ
value: ${timezone}
- name: CHAOS_DAEMON_PORT
value: !!str 31767
- name: BPFKI_PORT
value: !!str 50051
- name: TEMPLATE_LABELS
value: "app.kubernetes.io/component:template"
- name: CONFIGMAP_LABELS
value: "app.kubernetes.io/component:webhook"
- name: PPROF_ADDR
value: ":10081"
- name: CHAOS_DNS_SERVICE_NAME
value: chaos-mesh-dns-server
- name: CHAOS_DNS_SERVICE_PORT
value: !!str 9288
volumeMounts:
- name: webhook-certs
mountPath: /etc/webhook/certs
readOnly: true
ports:
- name: webhook
containerPort: 9443 # Customize containerPort
- name: http
containerPort: 10080
- name: pprof
containerPort: 10081
volumes:
- name: webhook-certs
secret:
secretName: chaos-mesh-webhook-certs
---
# Source: chaos-mesh/templates/webhook-configuration.yaml
apiVersion: admissionregistration.k8s.io/v1beta1
kind: MutatingWebhookConfiguration
metadata:
name: chaos-mesh-mutation
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: admission-webhook
webhooks:
- name: admission-webhook.chaos-mesh.org
clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: "/inject-v1-pod"
rules:
- operations: [ "CREATE" ]
apiGroups: [""]
apiVersions: ["v1"]
resources: ["pods"]
namespaceSelector:
matchLabels:
admission-webhook: enabled
failurePolicy: Ignore
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /mutate-chaos-mesh-org-v1alpha1-podchaos
failurePolicy: Fail
name: mpodchaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- podchaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /mutate-chaos-mesh-org-v1alpha1-iochaos
failurePolicy: Fail
name: miochaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- iochaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /mutate-chaos-mesh-org-v1alpha1-timechaos
failurePolicy: Fail
name: mtimechaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- timechaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /mutate-chaos-mesh-org-v1alpha1-networkchaos
failurePolicy: Fail
name: mnetworkchaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- networkchaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /mutate-chaos-mesh-org-v1alpha1-kernelchaos
failurePolicy: Fail
name: mkernelchaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- kernelchaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /mutate-chaos-mesh-org-v1alpha1-stresschaos
failurePolicy: Fail
name: mstresschaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- stresschaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /mutate-chaos-mesh-org-v1alpha1-podiochaos
failurePolicy: Fail
name: mpodiochaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- podiochaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /mutate-chaos-mesh-org-v1alpha1-podnetworkchaos
failurePolicy: Fail
name: mpodnetworkchaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- podnetworkchaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /mutate-chaos-mesh-org-v1alpha1-dnschaos
failurePolicy: Fail
name: mdnschaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- dnschaos
---
# Source: chaos-mesh/templates/webhook-configuration.yaml
apiVersion: admissionregistration.k8s.io/v1beta1
kind: ValidatingWebhookConfiguration
metadata:
name: chaos-mesh-validation
labels:
app.kubernetes.io/name: chaos-mesh
app.kubernetes.io/instance: chaos-mesh
app.kubernetes.io/component: admission-webhook
webhooks:
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /validate-chaos-mesh-org-v1alpha1-podchaos
failurePolicy: Fail
name: vpodchaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- podchaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /validate-chaos-mesh-org-v1alpha1-iochaos
failurePolicy: Fail
name: viochaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- iochaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /validate-chaos-mesh-org-v1alpha1-timechaos
failurePolicy: Fail
name: vtimechaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- timechaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /validate-chaos-mesh-org-v1alpha1-networkchaos
failurePolicy: Fail
name: vnetworkchaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- networkchaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /validate-chaos-mesh-org-v1alpha1-kernelchaos
failurePolicy: Fail
name: vkernelchaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- kernelchaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /validate-chaos-mesh-org-v1alpha1-stresschaos
failurePolicy: Fail
name: vstresschaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- stresschaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /validate-chaos-mesh-org-v1alpha1-podnetworkchaos
failurePolicy: Fail
name: vpodnetworkchaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- podnetworkchaos
- clientConfig:
caBundle: "${CA_BUNDLE}"
service:
name: chaos-mesh-controller-manager
namespace: chaos-testing
path: /validate-chaos-mesh-org-v1alpha1-dnschaos
failurePolicy: Fail
name: vdnschaos.kb.io
rules:
- apiGroups:
- chaos-mesh.org
apiVersions:
- v1alpha1
operations:
- CREATE
- UPDATE
resources:
- dnschaos
EOF
# chaos-mesh.yaml end
}
main "$@" || exit 1
|
package com.github.shimmerjordan.common.security.utils;
import com.github.shimmerjordan.common.core.utils.SpringContextHolder;
import com.github.shimmerjordan.common.security.constant.SecurityConstant;
import com.github.shimmerjordan.common.security.tenant.TenantContextHolder;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.bouncycastle.util.encoders.Base64;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.oauth2.common.OAuth2AccessToken;
import org.springframework.security.oauth2.provider.authentication.OAuth2AuthenticationDetails;
import org.springframework.security.oauth2.provider.token.ResourceServerTokenServices;
import org.springframework.security.web.authentication.WebAuthenticationDetails;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.security.Principal;
/**
* 系统工具类
*
* @author shimmerjordan
* @date 2021/01/13 20:50
*/
@Slf4j
public class SysUtil {
/**
* 获取当前登录的用户名
*
* @return String
* @author shimmerjordan
* @date 2021/03/17 11:46
*/
public static String getUser() {
Object principal = SecurityContextHolder.getContext().getAuthentication().getPrincipal();
if (principal instanceof UserDetails)
return ((UserDetails) principal).getUsername();
if (principal instanceof Principal)
return ((Principal) principal).getName();
return String.valueOf(principal);
}
/**
* 获取系统编号
*
* @return String
*/
public static String getSysCode() {
return SecurityConstant.SYS_CODE;
}
/**
* 获取租户编号
*
* @return String
*/
public static String getTenantCode() {
String tenantCode = TenantContextHolder.getTenantCode();
if (StringUtils.isBlank(tenantCode))
tenantCode = getCurrentUserTenantCode();
if (StringUtils.isBlank(tenantCode))
tenantCode = SecurityConstant.DEFAULT_TENANT_CODE;
return tenantCode;
}
/**
* 获取当前登录的租户code
*
* @return String
*/
private static String getCurrentUserTenantCode() {
String tenantCode = "";
try {
ResourceServerTokenServices resourceServerTokenServices = SpringContextHolder.getApplicationContext().getBean(ResourceServerTokenServices.class);
Object details = SecurityContextHolder.getContext().getAuthentication().getDetails();
if (details instanceof OAuth2AuthenticationDetails) {
OAuth2AuthenticationDetails oAuth2AuthenticationDetails = (OAuth2AuthenticationDetails) details;
OAuth2AccessToken oAuth2AccessToken = resourceServerTokenServices.readAccessToken(oAuth2AuthenticationDetails.getTokenValue());
Object tenantObj = oAuth2AccessToken.getAdditionalInformation().get(SecurityConstant.TENANT_CODE);
tenantCode = tenantObj == null ? "" : tenantObj.toString();
} else if (details instanceof WebAuthenticationDetails) {
// 未认证
Object requestObj = RequestContextHolder.getRequestAttributes();
if (requestObj != null) {
HttpServletRequest request = ((ServletRequestAttributes) requestObj).getRequest();
tenantCode = request.getParameter(SecurityConstant.TENANT_CODE);
}
}
} catch (Exception e) {
log.error(e.getMessage(), e);
}
return tenantCode;
}
/**
* 获取当前用户的授权信息
*
* @return Authentication
* @author shimmerjordan
* @date 2021/03/17 19:18
*/
public static Authentication getCurrentAuthentication() {
return SecurityContextHolder.getContext().getAuthentication();
}
/**
* 获取当前登录用户的授权信息
*
* @return Object
* @author shimmerjordan
* @date 2021/03/17 11:48
*/
public static Object getCurrentPrincipal() {
return SecurityContextHolder.getContext().getAuthentication().getPrincipal();
}
/**
* 从header 请求中的clientId/clientsecect
*
* @param header header中的参数
* @throws RuntimeException if the Basic header is not present or is not valid
* Base64
*/
public static String[] extractAndDecodeHeader(String header) throws IOException {
byte[] base64Token = header.substring(6).getBytes(StandardCharsets.UTF_8);
byte[] decoded;
try {
decoded = Base64.decode(base64Token);
} catch (IllegalArgumentException e) {
throw new RuntimeException("Failed to decode basic authentication token");
}
String token = new String(decoded, StandardCharsets.UTF_8);
int delim = token.indexOf(":");
if (delim == -1)
throw new RuntimeException("Invalid basic authentication token");
return new String[]{token.substring(0, delim), token.substring(delim + 1)};
}
}
|
<reponame>rockenbf/ze_oss<filename>imp_core/include/imp/core/roi.hpp
// Copyright (c) 2015-2016, ETH Zurich, <NAME>, Zurich Eye
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of the ETH Zurich, Wyss Zurich, Zurich Eye nor the
// names of its contributors may be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL ETH Zurich, Wyss Zurich, Zurich Eye BE LIABLE FOR ANY
// DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#pragma once
#include <cstdint>
#include <array>
#include <algorithm>
#include <imp/core/size.hpp>
namespace ze {
//------------------------------------------------------------------------------
/**
* @brief The class RoiBase defines the templated base class of an \a DIM-dimensional
* region-of-interest utilizing the CRTP pattern.
*/
template<typename T, std::uint8_t DIM, typename Derived>
struct RoiBase
{
std::array<T, DIM> pt; //!< internal data storage for all dimensions' 'left-upper' corner
ze::Size<T, DIM> sz; //!< size of the given roi
RoiBase()
{
std::fill(pt.begin(), pt.end(), 0);
}
/**
* @brief RoiBase Constructor given the lef-upper corner and the ROI's size
* @param lu array of the left-upper corner of the format {a1, a2, a3, ...., aN}
* @param sz Size of the \a DIM-dimensional ROI
*/
RoiBase(const std::array<T,DIM>& lu, const ze::Size<T,DIM>& sz)
: pt(lu)
, sz(sz)
{
}
/**
* @brief RoiBase initialized max size only. The top-left corner will be ZEROS
* @param sz The ROI's size in all dimensions.
*/
RoiBase(const ze::Size<T,DIM>& sz)
: sz(sz)
{
std::fill(pt.begin(), pt.end(), 0);
}
virtual ~RoiBase() = default;
RoiBase(const RoiBase& from)
: pt(from.pt)
, sz(from.sz)
{
}
RoiBase& operator= (const RoiBase& from)
{
this->sz = from.sz;
this->pt = from.pt;
return *this;
}
/**
* @brief dim Returns the dimension of the Roi object.
* @return Dimension.
*/
std::uint8_t dim() const {return DIM;}
/**
* @brief data gives access to the underlying (raw) data storage
* @return Pointer address to the buffer of the underlying data storage.
*/
T* luRaw() {return pt.data();}
/**
* @brief data gives access to the underlying (raw) const data storage
* @return Pointer address to the buffer of the underlying data storage.
*/
const T* luRaw() const {return reinterpret_cast<const T*>(pt.data());}
/**
* @brief array with the internal data storing the values for the left-upper corner
*/
std::array<T, DIM>& lu() {return pt;}
/**
* @brief const array with the internal data storing the values for the left-upper corner
*/
const std::array<T, DIM>& lu() const {return reinterpret_cast<const std::array<T, DIM>&>(pt);}
/**
* @brief size of the ROI
*/
ze::Size<T, DIM>& size() {return sz;}
/**
* @brief size of the ROI (const)
*/
const ze::Size<T, DIM>& size() const {return reinterpret_cast<const ze::Size<T, DIM>&>(sz);}
};
template<typename T, std::uint8_t DIM, typename Derived>
inline std::ostream& operator<<(std::ostream &os, const RoiBase<T, DIM, Derived>& rhs)
{
auto it = rhs.lu().begin();
os << "(" << *it;
++it;
for (; it != rhs.lu().end(); ++it)
{
os << "," << *it;
}
os << ")";
os << rhs.size();
return os;
}
//------------------------------------------------------------------------------
// relational operators
template<typename T, std::uint8_t DIM, typename Derived>
inline bool operator==(const RoiBase<T, DIM, Derived>& lhs,
const RoiBase<T, DIM, Derived>& rhs)
{
return ( (lhs.pt == rhs.pt) && (lhs.sz == rhs.sz) );
}
template<typename T, std::uint8_t DIM, typename Derived>
inline bool operator!=(const RoiBase<T, DIM, Derived>& lhs,
const RoiBase<T, DIM, Derived>& rhs)
{
return ( (lhs.pt != rhs.pt) || (lhs.sz != rhs.sz) );
}
//------------------------------------------------------------------------------
/**
* @brief The class Roi defines region of interest for \a DIM dimensions
*/
template<typename T, std::uint8_t DIM>
struct Roi
: public RoiBase<T, DIM, Roi<T, DIM> >
{
using Base = RoiBase<T, DIM, Roi<T, DIM> >;
using Base::Base;
Roi() = default;
virtual ~Roi() = default;
};
//------------------------------------------------------------------------------
/**
* @brief The Roi<T, 1> is a special Roi for e.g. an 1D array defining its start element and length
*/
template<typename T>
struct Roi<T, 1>
: public RoiBase<T, 1, Roi<T, 1> >
{
using Base = RoiBase<T, 1, Roi<T, 1> >;
using Base::Base;
Roi() = default;
virtual ~Roi() = default;
Roi(const T& x, const T& length)
: Base({x}, {length})
{
}
/**
* @brief x returns the ROI's x coordinate of the left-upper corner
*/
T& x() noexcept {return this->pt[0];}
constexpr const T& x() const noexcept {return this->pt[0];}
/**
* @brief width returns the width of the 2d Roi
*/
T& length() noexcept {return this->sz[0];}
constexpr const T& length() const noexcept {return this->sz[0];}
};
//------------------------------------------------------------------------------
/**
* @brief The Roi<T, 2> is a special Roi for a 2D shape defining its width and height
*/
template<typename T>
struct Roi<T, 2>
: public RoiBase<T, 2, Roi<T, 2> >
{
using Base = RoiBase<T, 2, Roi<T, 2> >;
using Base::Base;
Roi() = default;
virtual ~Roi() = default;
Roi(const T& x, const T& y, const T& width, const T& height)
: Base({x,y}, {width, height})
{
}
/**
* @brief x returns the ROI's x coordinate of the left-upper corner
*/
T& x() noexcept {return this->pt[0];}
constexpr const T& x() const noexcept {return this->pt[0];}
/**
* @brief y returns the ROI's y coordinate of the left-upper corner
*/
T& y() noexcept {return this->pt[1];}
constexpr const T& y() const noexcept {return this->pt[1];}
/**
* @brief width returns the width of the 2d Roi
*/
T& width() noexcept {return this->sz[0];}
constexpr const T& width() const noexcept {return this->sz[0];}
/**
* @brief height returns the length of the second dimension of the 2d Roi
* @return
*/
T& height() noexcept {return this->sz[1];}
constexpr const T& height() const noexcept {return this->sz[1];}
};
//------------------------------------------------------------------------------
// some convencience typedefs
// 1D
using Roi1u = Roi<uint32_t, 1>;
using Roi1i = Roi<std::int32_t, 1>;
using Roi1f = Roi<float, 1>;
using Roi1d = Roi<float, 1>;
// 2D
using Roi2u = Roi<uint32_t, 2>;
using Roi2i = Roi<std::int32_t, 2>;
using Roi2f = Roi<float, 2>;
using Roi2d = Roi<float, 2>;
//3D
using Roi3u = Roi<uint32_t, 3>;
using Roi3i = Roi<std::int32_t, 3>;
using Roi3f = Roi<float, 3>;
using Roi3d = Roi<float, 3>;
} // namespace ze
|
#!/bin/bash
# Copyright (c) 2018-2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# runs benchmark and reports time to convergence
# to use the script:
# run_and_time.sh
set -e
# start timing
start=$(date +%s)
start_fmt=$(date +%Y-%m-%d\ %r)
echo "STARTING TIMING RUN AT $start_fmt"
# run benchmark
set -x
BATCHSIZE=${BATCHSIZE:-32}
EVALBATCHSIZE=${EVALBATCHSIZE:-${BATCHSIZE}}
NUMEPOCHS=${NUMEPOCHS:-120}
LOG_INTERVAL=${LOG_INTERVAL:-100}
DATASET_SIZE=${DATASET_SIZE:-117266}
EVAL_DATASET_SIZE=${EVAL_DATASET_SIZE:-5000}
PRETRAINED_BACKBONE=${PRETRAINED_BACKBONE:-"./resnet34-333f7ec4.pickle"}
DATASET_DIR=${DATASET_DIR:-"/data/coco2017"}
DATA_LAYOUT=${DATA_LAYOUT:-NHWC}
ASYNC_EVAL=${ASYNC_EVAL:-1}
echo "running benchmark"
declare -a CMD
if [ -n "${SLURM_LOCALID-}" ]; then
# Mode 1: Slurm launched a task for each GPU and set some envvars; no need for parallel launch
if [ "${SLURM_NTASKS}" -gt "${SLURM_JOB_NUM_NODES}" ]; then
cluster=''
if [[ "${DGXSYSTEM}" == DGX2* ]]; then
cluster='circe'
fi
if [[ "${DGXSYSTEM}" == DGXA100* ]]; then
cluster='selene'
fi
CMD=( './bind.sh' "--cluster=${cluster}" '--ib=single' '--' 'python' '-u' )
else
CMD=( 'python' '-u' )
fi
else
# Mode 2: Single-node Docker; need to launch tasks with Pytorch's distributed launch
# TODO: use bind.sh instead of bind_launch.py
# torch.distributed.launch only accepts Python programs (not bash scripts) to exec
CMD=( 'python' '-u' '-m' 'bind_launch' "--nsockets_per_node=${DGXNSOCKET}" \
"--ncores_per_socket=${DGXSOCKETCORES}" "--nproc_per_node=${DGXNGPU}" )
fi
#if [ "$PMIX_RANK" = "0" ]; then
# echo "*****************************printing env********************************"
# env
# echo "***************************done printing env*****************************"
#fi
PARAMS=(
--batch-size "${BATCHSIZE}"
--eval-batch-size "${EVALBATCHSIZE}"
--epochs "${NUMEPOCHS}"
--log-interval "${LOG_INTERVAL}"
--coco-root "${DATASET_DIR}"
--dataset-size "${DATASET_SIZE}"
--eval-dataset-size "${EVAL_DATASET_SIZE}"
--pretrained-backbone "${PRETRAINED_BACKBONE}"
--data-layout "${DATA_LAYOUT}"
)
if [[ ${ASYNC_EVAL} -eq 1 ]]; then
PARAMS+=(
--async-val
)
fi
if [ "$LOGGER" = "apiLog.sh" ];
then
LOGGER="${LOGGER} -p MLPerf/${MODEL_NAME} -v ${FRAMEWORK}/train/${DGXSYSTEM}"
# TODO(ahmadki): track the apiLog.sh bug and remove the workaround
# there is a bug in apiLog.sh preventing it from collecting
# NCCL logs, the workaround is to log a single rank only
# LOCAL_RANK is set with an enroot hook for Pytorch containers
# SLURM_LOCALID is set by Slurm
# OMPI_COMM_WORLD_LOCAL_RANK is set by mpirun
readonly node_rank="${SLURM_NODEID:-0}"
readonly local_rank="${LOCAL_RANK:=${SLURM_LOCALID:=${OMPI_COMM_WORLD_LOCAL_RANK:-}}}"
if [ "$node_rank" -eq 0 ] && [ "$local_rank" -eq 0 ];
then
LOGGER=$LOGGER
else
LOGGER=""
fi
fi
# run training
${LOGGER:-} "${CMD[@]}" ssd_main_async.py "${PARAMS[@]}" ${EXTRA_PARAMS} ; ret_code=$?
set +x
sleep 3
if [[ $ret_code != 0 ]]; then exit $ret_code; fi
# end timing
end=$(date +%s)
end_fmt=$(date +%Y-%m-%d\ %r)
echo "ENDING TIMING RUN AT $end_fmt"
# report result
result=$(( $end - $start ))
result_name="SINGLE_STAGE_DETECTOR"
echo "RESULT,$result_name,,$result,nvidia,$start_fmt"
|
#include "app_mainwindow.h"
#include <lib/fpconv.h>
namespace M
{
namespace App
{
MainWindow::MainWindow ()
{
setupUi (this);
setMinimumWidth (3 * logicalDpiX ());
connect (_input_wgt, &QLineEdit::textChanged, this, &MainWindow::updateNumber);
connect (_prec_wgt, static_cast <void (QSpinBox::*) (int)> (&QSpinBox::valueChanged), this, &MainWindow::updateNumber);
connect (_trim_wgt, &QCheckBox::stateChanged, this, &MainWindow::updateNumber);
updateNumber ();
}
void MainWindow::updateNumber ()
{
bool ok = false;
double value = _input_wgt->text ().toDouble (&ok);
if (!ok)
_output_lbl->setText ("");
else
{
int prec = _prec_wgt->value ();
bool trim = _trim_wgt->isChecked ();
Lib::DoubleToString double_to_string (prec, trim);
QString result = double_to_string (value);
QChar x = result[0];
_output_lbl->setText (result);
}
}
}
}
|
<filename>app/src/main/java/sample/sadashiv/examplerealmmvp/ui/adapter/BookGridAdapter.java
package sample.sadashiv.examplerealmmvp.ui.adapter;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import butterknife.BindView;
import butterknife.ButterKnife;
import io.realm.RealmList;
import sample.sadashiv.examplerealmmvp.R;
import sample.sadashiv.examplerealmmvp.model.Book;
public class BookGridAdapter extends RecyclerView.Adapter<BookGridAdapter.ViewHolder> {
private RealmList<Book> mBooks;
@Override
public ViewHolder onCreateViewHolder(final ViewGroup parent, final int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_publisher_book, parent, false);
return new ViewHolder(view);
}
@Override
public void onBindViewHolder(final ViewHolder holder, final int position) {
holder.mTextTitle.setText(mBooks.get(position).getTitle());
}
@Override
public int getItemCount() {
return mBooks.size();
}
public void setBooks(final RealmList<Book> books) {
mBooks = books;
notifyDataSetChanged();
}
public class ViewHolder extends RecyclerView.ViewHolder {
@BindView(R.id.text_title) TextView mTextTitle;
public ViewHolder(final View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
}
}
}
|
package db
import (
"context"
"errors"
"github.com/golark/utaskdaemon/dbcontainer"
log "github.com/sirupsen/logrus"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
)
// MongoConn
// db connection
type MongoConn struct {
uri string
database string
collectionName string
collection *mongo.Collection
client *mongo.Client
}
const (
ContainerName = "utaskmongo"
)
// NewMongoConn
// if container is not running, restart it or create a new one
// start connection with the container
func NewMongoConn(URI string, database string, colName string) (*MongoConn, error) {
// first check if the container exists
if !dbcontainer.IsContainerRunning(ContainerName) {
dbcontainer.CreateMongodbContainer(ContainerName)
}
db := &MongoConn{uri: URI, database: database, collectionName: colName}
return db, db.connect()
}
// Disconnect from db connection
func (db *MongoConn) Disconnect() error {
// first check if we have a valid client
if db.client == nil {
log.Error("can not disconnect from nil client")
errors.New("nil client, can not disconnect")
}
if err := db.client.Disconnect(context.TODO()); err != nil {
log.WithFields(log.Fields{"err": err}).Error("error while disconnecting from db")
return err
}
log.WithFields(log.Fields{"db": db}).Trace("disconnected from db")
return nil
}
// Connect connect to mongo db
// ping for sanity, and create a database and a collection
func (db *MongoConn) connect() error {
// step 1 - check URI
if db.uri == "" {
log.Error("URI can not be empty")
return errors.New("URI can not be empty")
}
log.WithFields(log.Fields{"uri": db.uri}).Info("URI")
// step 2 - connect to mongodb
log.WithFields(log.Fields{"uri": db.uri}).Info("attempting connection to db")
clientOpts := options.Client().ApplyURI(db.uri)
client, err := mongo.Connect(context.TODO(), clientOpts)
if err != nil {
log.WithFields(log.Fields{"err": err, "URI": db.uri}).Error("error while trying to connect to db")
}
log.Info("connected to mongodb")
db.client = client
// step 3 - try to ping
err = db.client.Ping(context.TODO(), nil)
if err != nil {
log.WithFields(log.Fields{"err": err}).Error("cant ping db")
}
log.Trace("successfully pinged mongodb")
// step 4 - create a database
if db.database == "" {
log.Error("database name can not be empty string")
return errors.New("database name can not be empty string")
}
if db.collectionName == "" {
log.Error("collection name can not be empty")
return errors.New("collection name can not be empty")
}
db.collection = db.client.Database(db.database).Collection(db.collectionName)
log.WithFields(log.Fields{"Database": db.database, "CollectionName": db.collectionName}).Info("created database & collection")
return nil
}
func (db *MongoConn) InsertNewDocument(intf interface{}) error {
if db.collection == nil {
log.Error("collection is nil")
return errors.New("nil collection")
}
res, err := db.collection.InsertOne(context.TODO(), intf)
if err != nil {
log.WithFields(log.Fields{"err": err}).Error("error occured while trying to insert to collection")
}
log.WithFields(log.Fields{"result": res, "db": db}).Trace("inserted single document to collection")
return nil
}
func (db *MongoConn) DeleteAll() error {
if db.collection == nil {
log.Error("collection is nil")
return nil
}
log.Info("deleting all documents")
res, err := db.collection.DeleteMany(context.TODO(), bson.M{})
if err != nil {
log.WithFields(log.Fields{"err":err}).Error("can not delete documents")
return err
}
log.WithFields(log.Fields{"res":res}).Info("delete result")
return nil
}
func (db* MongoConn) GetAllDocuments() ([]map[string]interface{}, error) {
if db.collection == nil {
log.Error("collection is nil")
return nil, errors.New("nil collection")
}
// query all
cursor, err := db.collection.Find(context.TODO(), bson.M{})
if err != nil {
log.WithFields(log.Fields{"err": err}).Error("errored while querying collection")
return nil, errors.New("errored while querying collection")
}
defer cursor.Close(context.TODO())
// decode
var extractedData []map[string]interface{}
for cursor.Next(context.TODO()) {
var data map[string]interface{}
if err = cursor.Decode(&data); err != nil {
log.Error("errored while reading the cursor")
} else {
log.WithFields(log.Fields{"data":data}).Trace("get document")
extractedData= append(extractedData, data)
}
}
return extractedData, nil
}
|
#include <gtest/gtest.h>
#include <algorithm>
#include <set>
#include <unordered_set>
#include <vector>
#include <gbwtgraph/algorithms.h>
#include <gbwtgraph/gfa.h>
#include "shared.h"
using namespace gbwtgraph;
namespace
{
//------------------------------------------------------------------------------
class ComponentTest : public ::testing::Test
{
public:
gbwt::GBWT index;
GBWTGraph graph;
size_t components;
std::vector<std::set<gbwt::vector_type>> correct_paths;
ComponentTest()
{
}
void SetUp() override
{
auto gfa_parse = gfa_to_gbwt("gfas/components.gfa");
this->index = *(gfa_parse.first);
this->graph = GBWTGraph(this->index, *(gfa_parse.second));
this->components = 2;
}
};
TEST_F(ComponentTest, Components)
{
std::vector<std::vector<nid_t>> correct_components =
{
{ 11, 12, 13, 14, 15, 16, 17 },
{ 21, 22, 23, 24, 25 }
};
std::vector<std::vector<nid_t>> result = weakly_connected_components(this->graph);
ASSERT_EQ(result.size(), correct_components.size()) << "Wrong number of components";
for(size_t i = 0; i < result.size(); i++)
{
ASSERT_EQ(result[i].size(), correct_components[i].size()) << "Wrong number of nodes in component " << i;
auto result_iter = result[i].begin();
auto correct_iter = correct_components[i].begin();
while(result_iter != result[i].end())
{
EXPECT_EQ(*result_iter, *correct_iter) << "Incorrect node in component " << i;
++result_iter; ++correct_iter;
}
}
}
TEST_F(ComponentTest, HeadNodes)
{
std::vector<std::vector<nid_t>> correct_heads =
{
{ 11 },
{ }
};
std::vector<std::vector<nid_t>> components = weakly_connected_components(this->graph);
ASSERT_EQ(components.size(), correct_heads.size()) << "Wrong number of components";
// For both components, add a version with a nonexistent node id.
components.emplace_back(components[0]);
components.back().push_back(42);
correct_heads.emplace_back(correct_heads[0]);
components.emplace_back(components[1]);
components.back().push_back(42);
correct_heads.emplace_back(correct_heads[1]);
for(size_t i = 0; i < components.size(); i++)
{
std::vector<nid_t> heads = is_nice_and_acyclic(this->graph, components[i]);
ASSERT_EQ(heads.size(), correct_heads[i].size()) << "Wrong number of head nodes in component " << i;
auto result_iter = heads.begin();
auto correct_iter = correct_heads[i].begin();
while(result_iter != heads.end())
{
EXPECT_EQ(*result_iter, *correct_iter) << "Incorrect head node in component " << i;
++result_iter; ++correct_iter;
}
}
}
//------------------------------------------------------------------------------
class TopologicalOrderTest : public ::testing::Test
{
public:
gbwt::GBWT index;
GBWTGraph graph;
TopologicalOrderTest()
{
}
void SetUp() override
{
auto gfa_parse = gfa_to_gbwt("gfas/cyclic.gfa");
this->index = *(gfa_parse.first);
this->graph = GBWTGraph(this->index, *(gfa_parse.second));
}
void check_subgraph(const std::unordered_set<nid_t>& subgraph, bool acyclic) const
{
std::vector<handle_t> order = topological_order(this->graph, subgraph);
if(!acyclic)
{
ASSERT_TRUE(order.empty()) << "Non-empty order for a subgraph containing cycles";
return;
}
// Determine the node ids that do not exist in the graph.
size_t missing_nodes = 0;
for(nid_t node : subgraph)
{
if(!(this->graph.has_node(node))) { missing_nodes++; }
}
ASSERT_EQ(order.size(), 2 * (subgraph.size() - missing_nodes)) << "Wrong number of handles in the order";
for(nid_t node : subgraph)
{
if(!(this->graph.has_node(node))) { continue; }
for(bool orientation : { false, true })
{
handle_t from = this->graph.get_handle(node, orientation);
auto from_iter = std::find(order.begin(), order.end(), from);
ASSERT_NE(from_iter, order.end()) << "Node " << node << ", orientation " << orientation << " not found in the order";
bool ok = this->graph.follow_edges(from, false, [&](const handle_t& to) -> bool
{
if(subgraph.find(this->graph.get_id(to)) == subgraph.end()) { return true; }
auto to_iter = std::find(order.begin(), order.end(), to);
if(to_iter == order.end()) { return false; }
return (from_iter < to_iter);
});
EXPECT_TRUE(ok) << "Constraints not satisfied for node " << node << ", orientation " << orientation;
}
}
}
};
TEST_F(TopologicalOrderTest, SingleComponent)
{
std::unordered_set<nid_t> subgraph =
{
static_cast<nid_t>(1),
static_cast<nid_t>(2),
static_cast<nid_t>(4),
static_cast<nid_t>(5),
static_cast<nid_t>(6)
};
this->check_subgraph(subgraph, true);
}
TEST_F(TopologicalOrderTest, TwoComponents)
{
std::unordered_set<nid_t> subgraph =
{
static_cast<nid_t>(1),
static_cast<nid_t>(2),
static_cast<nid_t>(4),
static_cast<nid_t>(6),
static_cast<nid_t>(7),
static_cast<nid_t>(8),
static_cast<nid_t>(9)
};
this->check_subgraph(subgraph, true);
}
TEST_F(TopologicalOrderTest, CyclicComponent)
{
std::unordered_set<nid_t> subgraph =
{
static_cast<nid_t>(2),
static_cast<nid_t>(4),
static_cast<nid_t>(5),
static_cast<nid_t>(6),
static_cast<nid_t>(8)
};
this->check_subgraph(subgraph, false);
}
TEST_F(TopologicalOrderTest, MissingNodes)
{
std::unordered_set<nid_t> subgraph =
{
static_cast<nid_t>(1),
static_cast<nid_t>(2),
static_cast<nid_t>(4),
static_cast<nid_t>(5),
static_cast<nid_t>(6),
static_cast<nid_t>(42)
};
this->check_subgraph(subgraph, true);
}
//------------------------------------------------------------------------------
} // namespace
|
#!/usr/bin/env bash
#== Import script args ==
timezone=$(echo "$1")
#== Bash helpers ==
function info {
echo " "
echo "--> $1"
echo " "
}
#== Provision script ==
info "Provision-script user: `whoami`"
export DEBIAN_FRONTEND=noninteractive
info "Adding EPEL repos"
yum update -y
yum install epel-release yum-utils -y
yum install http://rpms.remirepo.net/enterprise/remi-release-7.rpm -y
yum update -y
info "Update OS software"
yum upgrade -y
info "Install additional software (php and nginx)"
yum-config-manager --enable remi-php73
yum install php php-common php-opcache php-mcrypt php-cli php-gd php-curl php-mysqlnd php-xml php-mbstring php-intl php-fpm php-zip unzip nginx -y
info "Configure NGINX"
sed -i 's/.*user nginx/user vagrant/g' /etc/nginx/nginx.conf
echo "Done!"
info "Configure PHP-FPM"
sed -i 's/user = apache/user = vagrant/g' /etc/php-fpm.d/www.conf
sed -i 's/group = apache/group = vagrant/g' /etc/php-fpm.d/www.conf
sed -i 's/owner = apache/owner = vagrant/g' /etc/php-fpm.d/www.conf
sed -i 's/listen = 127.0.0.1:9000/listen = \/var\/run\/php-fpm\/php-fpm.sock/g' /etc/php-fpm.d/www.conf
sed -i 's/;listen.owner = nobody/listen.owner = vagrant/g' /etc/php-fpm.d/www.conf
sed -i 's/;listen.group = nobody/listen.group = vagrant/g' /etc/php-fpm.d/www.conf
sed -i 's/listen.owner = nobody/listen.owner = vagrant/g' /etc/php-fpm.d/www.conf
sed -i 's/listen.group = nobody/listen.group = vagrant/g' /etc/php-fpm.d/www.conf
systemctl enable php-fpm
echo "Done!"
info "Install composer"
curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer
chown vagrant:vagrant /var/lib/php -R
info "Enabling site configuration"
ln -s /app/vagrant/nginx/app.conf /etc/nginx/conf.d/app.conf
echo "Done!"
info "Disabling SElinux"
setenforce 0
sed -i 's/SELINUX=enforcing/SELINUX=disabled/g' /etc/sysconfig/selinux
info "Change owners"
chown vagrant:vagrant /var/lib/nginx -R
|
(function() {
var APP_name = 343,
test_name = 4,
me,
that,
self;
console.log( A, Gone, Expected );
That = this;
self = this;
That = self = this;
me = this;
try {
} catch( e ) {
}
});
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolHopper-v1_ddpg_softcopy_epsilon_greedy_seed4_run9_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolHopper-v1 --random-seed 4 --exploration-strategy epsilon_greedy --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolHopper-v1/ddpg_softcopy_epsilon_greedy_seed4_run9 --continuous-act-space-flag --double-ddpg-flag
|
<reponame>isandlaTech/cohorte-runtime<gh_stars>1-10
/**
* Copyright 2014 isandlaTech
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.psem2m.isolates.ui.admin.impl;
import java.awt.Color;
import java.awt.Dimension;
import org.osgi.framework.BundleContext;
import org.psem2m.utilities.CXObjectBase;
import org.psem2m.utilities.CXStringUtils;
/**
* @author ogattaz
*
*/
public class CFrameMainConfig extends CXObjectBase {
private final static String COLOR = "psem2m.demo.ui.viewer.color";
private final static String DIM_HEIGHT = "psem2m.demo.ui.viewer.height";
private final static String DIM_LEFT = "psem2m.demo.ui.viewer.left";
private final static String DIM_TOP = "psem2m.demo.ui.viewer.top";
private final static String DIM_WIDTH = "psem2m.demo.ui.viewer.width";
/** The bundle context (to access properties) */
private final BundleContext pContext;
private CFrameSizeValue pHeight;
private EHtmlColor pHtmlColor;
private CFrameSizeValue pLeft;
private CFrameSizeValue pTop;
private CFrameSizeValue pWidth;
/**
*
*/
public CFrameMainConfig(final BundleContext aContext) {
super();
pContext = aContext;
init();
}
/*
* (non-Javadoc)
*
* @see
* org.psem2m.utilities.CXObjectBase#addDescriptionInBuffer(java.lang.Appendable
* )
*/
@Override
public Appendable addDescriptionInBuffer(final Appendable aBuffer) {
CXStringUtils.appendIXDescriberInBuff(aBuffer, pTop);
CXStringUtils.appendIXDescriberInBuff(aBuffer, pLeft);
CXStringUtils.appendIXDescriberInBuff(aBuffer, pHeight);
CXStringUtils.appendIXDescriberInBuff(aBuffer, pWidth);
CXStringUtils.appendIXDescriberInBuff(aBuffer, pHtmlColor);
return aBuffer;
}
/**
* @return
*/
public Color getColor() {
return pHtmlColor.getColor();
}
/**
* @return
*/
public int getHeight() {
return pHeight.getPixels();
}
/**
* @return
*/
public int getLeft() {
return pLeft.getPixels();
}
/**
* Retrieves the value of the given property from the framework or the
* system.
*
* @param aKey
* Property name
* @return Property value (or null)
*/
private String getProperty(final String aKey) {
String value = null;
if (pContext != null) {
// Try the framework property
value = pContext.getProperty(aKey);
}
if (value == null) {
// Try the system property
value = System.getProperty(aKey);
}
return value;
}
/**
* @return
*/
public int getTop() {
return pTop.getPixels();
}
/**
* @return
*/
public int getWidth() {
return pWidth.getPixels();
}
/**
* @return
*/
public Dimension getWidthHeight() {
return new java.awt.Dimension(getWidth(), getHeight());
}
/**
*
*/
private void init() {
setHeight(new CFrameSizeValue(EFrameSize.HEIGHT,
getProperty(DIM_HEIGHT)));
setWidth(new CFrameSizeValue(EFrameSize.WIDTH, getProperty(DIM_WIDTH)));
setTop(new CFrameSizeValue(EFrameSize.TOP, getProperty(DIM_TOP)));
setLeft(new CFrameSizeValue(EFrameSize.LEFT, getProperty(DIM_LEFT)));
setHtmlColor(EHtmlColor.getHtmlColor(getProperty(COLOR)));
}
/**
* @param aHeight
*/
private void setHeight(final CFrameSizeValue aHeight) {
this.pHeight = aHeight;
}
/**
* @param aColor
*/
private void setHtmlColor(final EHtmlColor aColor) {
pHtmlColor = aColor;
}
/**
* @param aLeft
*/
private void setLeft(final CFrameSizeValue aLeft) {
this.pLeft = aLeft;
}
/**
* @param aTop
*/
private void setTop(final CFrameSizeValue aTop) {
this.pTop = aTop;
}
/**
* @param aWidth
*/
private void setWidth(final CFrameSizeValue aWidth) {
this.pWidth = aWidth;
}
}
|
#!/bin/bash
cd /home/ros2/leogate/ros2-native
source /home/ros2/leogate/ros2-native/install/setup.bash
colcon build
|
// All commands converted to Javascript by using "tsc"
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const discord_js_1 = require("discord.js");
exports.default = {
name: 'customembed',
description: 'Customembed | code in desc!',
usage: 'customembed <title> <desc>',
aliases: ['ce'],
category: 'misc',
run: ({
// welcome back to another video! code will be in desc!
args, message }) => {
if (!args[0]) {
return message.reply('You will need to specify the title of the embed.');
}
;
if (!args[1]) {
return message.reply('Please specify the description of the embed.');
}
;
const desc = args.slice(1).join(' ');
const embed = new discord_js_1.MessageEmbed()
.setTitle(args[0])
.setDescription(desc)
// Set color to hex or favorite color!
.setColor('RED');
message.channel.send({
embeds: [embed]
});
}
};
|
package com.digirati.taxman.rest.server.taxonomy.mapper;
import com.digirati.taxman.common.rdf.RdfModelException;
import com.digirati.taxman.common.rdf.RdfModelFactory;
import com.digirati.taxman.common.taxonomy.ConceptSchemeModel;
import com.digirati.taxman.rest.server.taxonomy.identity.ConceptIdResolver;
import com.digirati.taxman.rest.server.taxonomy.identity.ConceptSchemeIdResolver;
import com.digirati.taxman.rest.server.taxonomy.storage.ConceptDataSet;
import com.digirati.taxman.rest.server.taxonomy.storage.ConceptSchemeDataSet;
import com.digirati.taxman.rest.server.taxonomy.storage.record.ConceptSchemeRecord;
import org.apache.commons.lang3.StringUtils;
import org.apache.jena.vocabulary.DCTerms;
import org.apache.jena.vocabulary.SKOS;
import java.util.stream.Collectors;
/**
* A POJO mapper that can convert between the representation of a {@code skos:ConceptScheme} in the database, and an RDF
* object graph.
*/
public class ConceptSchemeMapper {
private final ConceptIdResolver conceptIdResolver;
private final ConceptSchemeIdResolver schemeIdResolver;
private final RdfModelFactory modelFactory;
private final ConceptMapper conceptMapper;
public ConceptSchemeMapper(ConceptSchemeIdResolver schemeIdResolver, ConceptIdResolver conceptIdResolver,
RdfModelFactory modelFactory, ConceptMapper conceptMapper) {
this.schemeIdResolver = schemeIdResolver;
this.conceptIdResolver = conceptIdResolver;
this.modelFactory = modelFactory;
this.conceptMapper = conceptMapper;
}
/**
* Convert a database data representation to a typed RDF model.
*
* @param dataset The database dataset to map.
* @return A RDF representation of the provided database records.
* @throws RdfModelException if an error occurred building an RDF model.
*/
public ConceptSchemeModel map(ConceptSchemeDataSet dataset) throws RdfModelException {
var builder = modelFactory.createBuilder(ConceptSchemeModel.class);
var record = dataset.getRecord();
builder.setUri(schemeIdResolver.resolve(record.getUuid()));
builder.addPlainLiteral(DCTerms.title, record.getTitle());
if (StringUtils.isNotBlank(record.getSource())) {
builder.addStringProperty(DCTerms.source, record.getSource());
}
dataset.getTopConcepts()
.stream()
.map(conceptRecord -> conceptMapper.map(new ConceptDataSet(conceptRecord)))
.forEach(conceptModel -> builder.addEmbeddedModel(SKOS.hasTopConcept, conceptModel));
return builder.build();
}
/**
* Convert a typed RDF model to database data representation.
*
* @param model The typed RDF model to map.
* @return A {@link ConceptSchemeDataSet} representing records to be passed to the database.
*/
public ConceptSchemeDataSet map(ConceptSchemeModel model) {
var uuid = model.getUuid();
var record = new ConceptSchemeRecord(uuid, model.getProjectId());
record.setTitle(model.getTitle());
record.setSource(model.getSource());
var topConcepts = model.getTopConcepts()
.map(concept -> {
// Unsure if this is needed
var id = concept.getUuid();
if (id == null) {
concept.setUuid(conceptIdResolver.resolve(concept.getUri()).orElse(null));
}
return conceptMapper.map(concept).getRecord();
})
.collect(Collectors.toList());
return new ConceptSchemeDataSet(record, topConcepts);
}
}
|
#!/bin/bash
# sb --gres=gpu:titan_xp:rtx --cpus-per-task=16 --mem=100G coco_run.sh
export MASTER_ADDR="0.0.0.0"
export MASTER_PORT="8088"
export NODE_RANK=0
#SBATCH --mail-type=ALL # mail configuration: NONE, BEGIN, END, FAIL, REQUEUE, ALL
#SBATCH --output=%j.out # where to store the output ( %j is the JOBID )
#SBATCH --gres=gpu:geforce_rtx_3090:1
#SBATCH --cpus-per-task=5
#SBATCH --mem=30G
/bin/echo Running on host: `hostname`
/bin/echo In directory: `pwd`
/bin/echo Starting on: `date`
/bin/echo SLURM_JOB_ID: $SLURM_JOB_IdD
#
# binary to execute
set -o errexit
source /itet-stor/fencai/net_scratch/anaconda3/bin/activate diora
export PYTHONPATH=/itet-stor/fencai/net_scratch/diora/pytorch/:$PYTHONPATH
# ebdde512
# e2bd512b
# bag: 30cb2364
# bed: a4df82a5
# table: 49d65919
# table: model not frozen 6cce0009
# chair not frozen 4d9e6436
# chair frozen: bb6f3ac1: model.step_50800.pt
### new loss
# table
# frozen: e7ed6d25
# not frozen: 0fe669fa
# c3c9f330
# 61d9004a: checkpoint 14300
# 7c2531b3; model 16800
# kl + freecls: 4d391393
# correct the number of classes: 472643c9 model.step_14300.pt
# use the classfication result: 0f271581 model.step_3100.pt
# a0d66393 12400
# b6d3adf7
# ecb61fe6 model.step_12400.pt
# 8de11564: model.
srun python diora/scripts/parse_combine.py \
--batch_size 1 \
--data_type partit \
--emb resnet18 \
--load_model_path ../log/53fd0a27/model.step_10400.pt \
--model_flags ../log/53fd0a27/flags.json \
--validation_path ./data/partit_data/3.bag/test \
--validation_filter_length 20 \
--word2idx './data/partit_data/partnet.dict.pkl' \
--k_neg 5 \
--freeze_model 1 \
--cuda \
--vision_type "bag" \
--level_attn 1 \
--diora_shared 0 \
--mixture 1 \
--txt2img 1 \
--outside_attn 1
echo finished at: `date`
exit 0;
|
def is_armstrong_number(num):
digits_sum = 0
num_string = str(num)
for digit in num_string:
digits_sum += int(digit)**len(num_string)
return digits_sum == num
|
#!/usr/bin/env bash
export PATH=$PATH:$(dirname $0)
input=$1
min_window=$2
max_window=$3
false_num=$4
output=$5
pass "$input" "$min_window" "$max_window" "$false_num" "$output" >/dev/null
sed -i -e 's/\t\t*/\t/g' "$output"
|
package com.yoga.points.summary.service;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.yoga.core.base.BaseService;
import com.yoga.core.data.tuple.TwoTuple;
import com.yoga.core.exception.BusinessException;
import com.yoga.core.mybatis.MapperQuery;
import com.yoga.logging.annotation.LoggingPrimary;
import com.yoga.points.adjust.service.PointsAdjustService;
import com.yoga.points.summary.mapper.PointsSummaryMapper;
import com.yoga.points.summary.mapper.PointsYearMapper;
import com.yoga.points.summary.model.PointsSummary;
import com.yoga.points.summary.model.PointsYear;
import com.yoga.points.summary.model.SummarySetting;
import com.yoga.setting.service.SettingService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.Calendar;
import java.util.List;
@Service
@LoggingPrimary(module = PointsAdjustService.ModuleName, name = "积分统计")
public class PointsSummaryService extends BaseService {
@Autowired
private PointsYearMapper yearMapper;
@Autowired
private PointsSummaryMapper summaryMapper;
@Autowired
private SettingService settingService;
@Autowired
private PointsYearService yearService;
public final static String ModuleName = "points_summary";
public final static String PointsLowestCount = "points.lowest.count";
public final static String PointsSummary = "points.summary";
public PageInfo<PointsSummary> list(long tenantId, int year, Long userId, Long branchId, Long dutyId, String keyword, boolean penaltyOnly, int pageIndex, int pageSize, String orderBy) {
PageHelper.startPage(pageIndex + 1, pageSize);
List<PointsSummary> summaries = summaryMapper.list(tenantId, year, userId, branchId, dutyId, keyword, penaltyOnly, penaltyOnly ? "penalty" : orderBy);
return new PageInfo<>(summaries);
}
public List<PointsSummary> list(long tenantId, int year, Long userId, Long branchId, Long dutyId, String keyword, boolean penaltyOnly, String orderBy) {
return summaryMapper.list(tenantId, year, userId, branchId, dutyId, keyword, penaltyOnly, penaltyOnly ? "penalty" : orderBy);
}
public void setLowestWarning(long tenantId, int year) {
int percent = getScoreLowestCount(tenantId);
if (percent < 1) return;
PointsYear pointsYear = yearService.getYear(tenantId, year);
if (pointsYear == null) return;
SummarySetting setting = getSetting(tenantId);
if (setting == null) return;
if (setting.getAnnualNum() != year) return;;
long count = new MapperQuery<>(PointsSummary.class)
.andEqualTo("tenantId", tenantId)
.andEqualTo("year", year)
.count(summaryMapper) * percent / 100;
List<Long> ids = summaryMapper.lowestPoints(tenantId, year, count);
String allIds = ids.stream().map(id -> id.toString()).reduce(",", (l, r) -> l + r + ",");
PointsYear updated = new PointsYear(pointsYear.getId(), allIds);
yearMapper.updateByPrimaryKeySelective(updated);
}
public boolean isScoreLowest(long tenantId, long userId) {
SummarySetting setting = getSetting(tenantId);
if (setting == null) return false;
int year = setting.getAnnualNum();
PointsYear pointsYear = yearService.getYear(tenantId, year);
if (pointsYear == null) return false;
if (pointsYear.getWarningUserIds() == null) return false;
return pointsYear.getWarningUserIds().contains("," + userId + ",");
}
public TwoTuple<LocalDateTime, PointsSummary> myScore(long tenantId, long userId) {
SummarySetting setting = getSetting(tenantId);
if (setting == null) throw new BusinessException("尚未配置积分年度");
int year = setting.getAnnualNum();
PointsYear pointsYear = yearService.getYear(tenantId, year);
if (pointsYear == null) throw new BusinessException("尚未配置积分年度");
PointsSummary summaryItem = new MapperQuery<>(PointsSummary.class)
.andEqualTo("tenantId", tenantId)
.andEqualTo("userId", userId)
.andEqualTo("year", year)
.queryFirst(summaryMapper);
if (summaryItem == null) throw new BusinessException("尚未汇总积分");
return new TwoTuple<>(pointsYear.getUpdateTime(), summaryItem);
}
public List<PointsSummary> myScores(long tenantId, long userId) {
List<PointsSummary> summaryItems = new MapperQuery<>(PointsSummary.class)
.andEqualTo("tenantId", tenantId)
.andEqualTo("userId", userId)
.query(summaryMapper);
return summaryItems;
}
public int getScoreLowestCount(long tenantId) {
return settingService.get(tenantId, ModuleName, PointsLowestCount, 0);
}
public void setSetting(long tenantId, SummarySetting value, String showValue) {
settingService.save(tenantId, ModuleName, PointsSummary, value.toString(), showValue);
}
public SummarySetting getSetting(long tenantId) {
SummarySetting setting = settingService.get(tenantId, ModuleName, PointsSummary, SummarySetting.class);
if (setting == null) {
Calendar calendar = Calendar.getInstance();
int year = calendar.get(Calendar.YEAR);
PointsYear pointsYear = yearService.getYear(tenantId, year);
if (pointsYear == null) {
pointsYear = yearMapper.maxYear(tenantId);
}
if (pointsYear == null) {
pointsYear = new PointsYear();
pointsYear.setTenantId(tenantId);
pointsYear.setYear(calendar.get(Calendar.YEAR));
calendar.set(calendar.get(Calendar.YEAR), 0, 1, 0, 0, 0);
pointsYear.setBeginDate(LocalDate.of(calendar.get(Calendar.YEAR), 1, 1));
pointsYear.setEndDate(LocalDate.of(calendar.get(Calendar.YEAR), 1, 1).plusYears(1).minusDays(1));
yearMapper.insert(pointsYear);
}
setting = new SummarySetting();
setting.setAnnualNum(pointsYear.getYear());
setting.setWeekAt(0);
}
return setting;
}
}
|
import Foundation
class ThreadSafeLogger {
private var privateByteCounter: UInt64 = 0
private var privateModificationTracker: TimeInterval = 0
private let lockQueue = DispatchQueue(label: "com.example.logQueue", attributes: .concurrent)
/// The size of this log file in bytes.
var sizeInBytes: UInt64? {
var size: UInt64?
self.lockQueue.sync { size = self.privateByteCounter }
return size
}
/// The date when this log file was last modified.
var modificationDate: Date? {
var interval: TimeInterval?
self.lockQueue.sync { interval = self.privateModificationTracker }
if let interval = interval {
return Date(timeIntervalSinceReferenceDate: interval)
}
return nil
}
func appendLogMessage(_ message: String) {
self.lockQueue.async(flags: .barrier) {
self.privateByteCounter += UInt64(message.utf8.count)
self.privateModificationTracker = Date().timeIntervalSinceReferenceDate
// Append the log message to the log file and update privateByteCounter and privateModificationTracker
// Ensure thread-safe access to the log file data
}
}
func clearLogFile() {
self.lockQueue.async(flags: .barrier) {
self.privateByteCounter = 0
self.privateModificationTracker = Date().timeIntervalSinceReferenceDate
// Clear the contents of the log file by resetting privateByteCounter and privateModificationTracker
// Ensure thread-safe access to the log file data
}
}
}
|
import React, { Component } from 'react';
import { Card, Icon, Image } from 'semantic-ui-react'
import DefaultAvatar from '../../assets/default-avatar.png'
class UserInfo extends Component {
render(){
return(
<Card>
<Image src={DefaultAvatar} />
<Card.Content>
<Card.Header>{this.props.currentUser.userName}</Card.Header>
<Card.Meta>
<span className='date'>{this.props.currentUser.userEmail}</span>
</Card.Meta>
</Card.Content>
</Card>
)
}
}
export default UserInfo;
|
<reponame>Codernauti/Sweetie<filename>app/src/main/java/com/codernauti/sweetie/couple/CoupleDetailsContract.java
package com.codernauti.sweetie.couple;
import android.net.Uri;
import java.util.Date;
public interface CoupleDetailsContract {
interface View {
void setPresenter(Presenter presenter);
void updateCoupleData(String imageUri, String partnerOneName, String partnerTwoName,
Date anniversary, String anniversaryString, String creationTime);
void updateUploadProgress(int progress);
void hideUploadProgress();
}
interface Presenter {
void deleteCouple();
void sendCoupleImage(Uri stringUriLocal);
void sendNewAnniversaryData(Date newAnniversary);
}
}
|
<reponame>Hannah-Abi/python-pro-21
import unittest
from unittest.mock import patch
from tmc import points
from tmc.utils import load, load_module, reload_module, get_stdout, check_source
from functools import reduce
import os
import textwrap
exercise = 'src.everything_reversed'
function = 'everything_reversed'
def get_correct(test_case: list) -> list:
return [x[::-1] for x in test_case][::-1]
@points('4.everything_reversed')
class EverythingReversedTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
with patch('builtins.input', side_effect=[AssertionError("Asking input from the user was not expected")]):
cls.module = load_module(exercise, 'en')
def test_0_main_program_ok(self):
ok, line = check_source(self.module)
message = """The code for testing the functions should be placed inside
if __name__ == "__main__":
block. The following row should be moved:
"""
self.assertTrue(ok, message+line)
def test_1_function_exists(self):
try:
from src.everything_reversed import everything_reversed
except:
self.assertTrue(False, 'Your code should contain function named as everything_reversed(my_list: list)')
try:
everything_reversed = load(exercise, function, 'en')
everything_reversed(["abc"])
except:
self.assertTrue(False, 'Make sure, that function can be called as follows everything_reversed(["abc"])')
def test_2_type_of_return_value(self):
everything_reversed = load(exercise, function, 'en')
val = everything_reversed(["abc"])
self.assertTrue(type(val) == list, "Function everything_reversed does not return list when calling everything_reversed(['abc'])")
def test_3_short_ones(self):
for test_case in [["abc","def"], ["first","second","third"], ["one","two","three"]]:
with patch('builtins.input', side_effect=[AssertionError("Asking input from the user was not expected")]):
reload_module(self.module)
output_at_start = get_stdout()
everything_reversed = load(exercise, function, 'en')
correct = get_correct(test_case)
test_case2 = test_case[:]
test_result = everything_reversed(test_case)
self.assertEqual(correct, test_result, f"The result {test_result} does not match with the expected result {correct} when calling formatted everything_reversed({test_case2})")
self.assertEqual(test_case, test_case2, f"Function should not change the original list. The list should be {test_case2} but it is {test_case}.")
def test_4_longer_ones(self):
for test_case in [["here", "is", "a", "little", "longer", "list", "with", "more", "words"],
["abcd", "efghijklmnopqrstu", "vwxyz"]]:
with patch('builtins.input', side_effect=[AssertionError("Asking input from the user was not expected")]):
reload_module(self.module)
output_at_start = get_stdout()
everything_reversed = load(exercise, function, 'en')
correct = get_correct(test_case)
test_case2 = test_case[:]
test_result = everything_reversed(test_case)
self.assertEqual(correct, test_result, f"The result {test_result} does not match with the expected result {correct} when calling formatted everything_reversed({test_case2})")
self.assertEqual(test_case, test_case2, f"Function should not change the original list. The list should be {test_case2} but it is {test_case}.")
if __name__ == '__main__':
unittest.main()
|
<reponame>liuzhiyi1992/UCToutiaoClone
//
// UCTWebViewController.h
// UCToutiaoClone
//
// Created by zhiyi on 16/10/12.
// Copyright © 2016年 lzy. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "UCTViewController.h"
@interface UCTWebViewController : UCTViewController
- (instancetype)initWithRequestUrlString:(NSString *)requestUrlString title:(NSString *)title;
@end
|
/*
* Copyright (c) CERN 2013-2015
*
* Copyright (c) Members of the EMI Collaboration. 2010-2013
* See http://www.eu-emi.eu/partners for details on the copyright
* holders.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef LISTTRANSFERCLI_H_
#define LISTTRANSFERCLI_H_
#include "DnCli.h"
#include "VoNameCli.h"
#include "TransferCliBase.h"
namespace fts3
{
namespace cli
{
/**
* Command line utility for specifying the list of states of interest
*
* - state, positional parameter, allows for specifying several
* states of interest
*/
class ListTransferCli : public DnCli, public VoNameCli, public TransferCliBase
{
public:
/**
* Default constructor.
*
* Creates the transfer-list specific command line options. State is
* market as both: hidden and positional
*/
ListTransferCli();
/**
* Destructor.
*/
virtual ~ListTransferCli();
/**
* Validates command line options.
* Call the validate methods of the base clases
*/
virtual void validate();
/**
* Gives the instruction how to use the command line tool.
*
* @return a string with instruction on how to use the tool
*/
std::string getUsageString(std::string tool) const;
/**
* @return an array of statuses by which the user wants to filter
*/
std::vector<std::string> getStatusArray();
/**
*
*/
std::string source();
/**
*
*/
std::string destination();
/**
* @return true if user requested a deletion query, false otherwise
*/
bool deletion();
};
}
}
#endif /* LISTTRANSFERCLI_H_ */
|
import {AnimationData, AsynchronousAnimator, SynchronousAnimator, TextAnimation} from "../index";
class GSAPAsyncAnimator implements AsynchronousAnimator {
private readonly sync: SynchronousAnimator;
constructor(syncAnimator: SynchronousAnimator) {
this.sync = syncAnimator;
}
AnimateText(e: Element, text: string, data?: TextAnimation): Promise<Element> {
return this.p("AnimateText", [text], e, data);
}
BackgroundColor(e: Element, ori: string, to: string, data?: AnimationData): Promise<Element> {
return this.p("BackgroundColor", [ori, to], e, data);
}
Blur(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("Blur", [ori, to], e, data);
}
Brightness(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("Brightness", [ori, to], e, data);
}
Contrast(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("Contrast", [ori, to], e, data);
}
FontColor(e: Element, ori: string, to: string, data?: AnimationData): Promise<Element> {
return this.p("FontColor", [ori, to], e, data);
}
Greyscale(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("Greyscale", [ori, to], e, data);
}
H(e: Element, ori: string | number, to: string | number, data?: AnimationData): Promise<Element> {
return this.p("H", [ori, to], e, data);
}
HueRotation(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("HueRotation", [ori, to], e, data);
}
Invert(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("Invert", [ori, to], e, data);
}
Opacity(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("Opacity", [ori, to], e, data);
}
Rotate(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("Rotate", [ori, to], e, data);
}
Saturate(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("Saturate", [ori, to], e, data);
}
ScaleX(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("ScaleX", [ori, to], e, data);
}
ScaleY(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("ScaleY", [ori, to], e, data);
}
Sepia(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("Sepia", [ori, to], e, data);
}
SkewX(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("SkewX", [ori, to], e, data);
}
SkewY(e: Element, ori: number, to: number, data?: AnimationData): Promise<Element> {
return this.p("SkewY", [ori, to], e, data);
}
W(e: Element, ori: string | number, to: string | number, data?: AnimationData): Promise<Element> {
return this.p("W", [ori, to], e, data);
}
X(e: Element, ori: string | number, to: string | number, data?: AnimationData): Promise<Element> {
return this.p("X", [ori, to], e, data);
}
Y(e: Element, ori: string | number, to: string | number, data?: AnimationData): Promise<Element> {
return this.p("Y", [ori, to], e, data);
}
Wait(e: Element, data?: AnimationData): Promise<Element> {
return this.p("Wait", [], e, data);
}
BorderColor(e: Element, ori: string, to: string, data?: AnimationData): Promise<Element> {
return this.p('BorderColor', [ori, to], e, data);
}
BorderRadius(e: Element, ori: number | string, to: number | string, data?: AnimationData): Promise<Element> {
return this.p('BorderRadius', [ori, to], e, data);
}
private p(type: string, args: any[], e: Element, data: AnimationData = {}) {
return new Promise<Element>((resolve: (e: Element) => void) => {
let original: Function = data.callback || (() => {});
data.callback = () => {
original();
resolve(e);
};
let arg: any[] = [e].Add(args).Add(data as any);
(this.sync as any)[type].apply(this.sync, arg);
});
}
}
export {GSAPAsyncAnimator};
|
package com.chankin.ssms.core.feature.orm.dataSources;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
public class DataSourceTypeManager extends AbstractRoutingDataSource {
private static final ThreadLocal<DataSources> dataSourceTypes = new ThreadLocal<DataSources>() {
@Override
protected DataSources initialValue() {
return DataSources.MYSQL;
}
};
public static DataSources get() {
return dataSourceTypes.get();
}
public static void set(DataSources dataSourceType) {
dataSourceTypes.set(dataSourceType);
}
public static void reset() {
dataSourceTypes.set(DataSources.MYSQL);
}
@Override
protected Object determineCurrentLookupKey() {
return DataSourceTypeManager.get();
}
}
|
package org.jeecg.modules.bim.mapper;
import java.util.List;
import org.jeecg.modules.bim.entity.BimModelAttrsCategoriesProps;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Param;
/**
* @Description: 模型属性类别属性
* @Author: jeecg-boot
* @Date: 2021-12-25
* @Version: V1.0
*/
public interface BimModelAttrsCategoriesPropsMapper extends BaseMapper<BimModelAttrsCategoriesProps> {
public boolean deleteByMainId(@Param("mainId") String mainId);
public List<BimModelAttrsCategoriesProps> selectByMainId(@Param("mainId") String mainId);
}
|
#!/usr/bin/env bash
set -e
# TODO: Set to URL of git repo.
PROJECT_GIT_URL='https://github.com/jyojk/DjangoREST.git'
PROJECT_BASE_PATH='/usr/local/apps/profiles-rest-api'
echo "Installing dependencies..."
apt-get update
apt-get install -y python3-dev python3-venv sqlite python-pip supervisor nginx git
# Create project directory
mkdir -p $PROJECT_BASE_PATH
git clone $PROJECT_GIT_URL $PROJECT_BASE_PATH
# Create virtual environment
mkdir -p $PROJECT_BASE_PATH/env
python3 -m venv $PROJECT_BASE_PATH/env
# Install python packages
$PROJECT_BASE_PATH/env/bin/pip install -r $PROJECT_BASE_PATH/requirements.txt
$PROJECT_BASE_PATH/env/bin/pip install uwsgi==2.0.18
# Run migrations and collectstatic
cd $PROJECT_BASE_PATH
$PROJECT_BASE_PATH/env/bin/python manage.py migrate
$PROJECT_BASE_PATH/env/bin/python manage.py collectstatic --noinput
# Configure supervisor
cp $PROJECT_BASE_PATH/deploy/supervisor_profiles_api.conf /etc/supervisor/conf.d/profiles_api.conf
supervisorctl reread
supervisorctl update
supervisorctl restart profiles_api
# Configure nginx
cp $PROJECT_BASE_PATH/deploy/nginx_profiles_api.conf /etc/nginx/sites-available/profiles_api.conf
rm /etc/nginx/sites-enabled/default
ln -s /etc/nginx/sites-available/profiles_api.conf /etc/nginx/sites-enabled/profiles_api.conf
systemctl restart nginx.service
echo "DONE! :)"
|
<gh_stars>0
import { Vector2 } from '@daign/math';
import { SvgConstants } from '../svg-constants';
import { SvgNodeObject } from '../svg-node-object';
export class BasicCircle extends SvgNodeObject {
public node: any;
public constructor( c: Vector2, r: number ) {
super();
this.node = document.createElementNS( SvgConstants.SVGNS, 'circle' );
this.baseStyle = 'basicCircle';
this.repaint( c, r );
}
public repaint( c: Vector2, r: number ): void {
this.node.setAttribute( 'cx', String( c.x ) );
this.node.setAttribute( 'cy', String( c.y ) );
this.node.setAttribute( 'r', String( r ) );
}
}
|
/* eslint-disable no-console */
const webpack = require('webpack');
const WebpackDevServer = require('webpack-dev-server');
const config = require('./webpack.dev');
const server = new WebpackDevServer(webpack(config), {
publicPath: config.output.publicPath,
hot: true,
historyApiFallback: true,
stats: { colors: true, chunks: false },
});
server.listen(3000, 'localhost', err => { // eslint-disable-line consistent-return
if (err) {
return console.log(err);
}
console.log('Listening at http://localhost:3000/');
});
|
package edu.mdamle.beans;
import java.util.Map;
import edu.mdamle.beans.User.Role;
public abstract class TrmsMessage {
public static enum MessageTypes {
INFOREQ, DENIALRES, TRRCHANGE
}
//protected String head;
protected String body;
protected String senderUsername;
protected Role senderRole;
protected Map<String, Role> declaredRecipients;
protected Map<String, Role> validRecipients;
protected MessageTypes messageType;
protected int id;
public String getBody() {
return body;
}
public void setBody(String body) {
this.body = body;
}
public String getSenderUsername() {
return senderUsername;
}
public void setSenderUsername(String senderUsername) {
this.senderUsername = senderUsername;
}
public Role getSenderRole() {
return senderRole;
}
public void setSenderRole(Role senderRole) {
this.senderRole = senderRole;
}
public Map<String, Role> getDeclaredRecipients() {
return declaredRecipients;
}
public void setDeclaredRecipients(Map<String, Role> declaredRecipients) {
this.declaredRecipients = declaredRecipients;
}
public Map<String, Role> getValidRecipients() {
return validRecipients;
}
public void setValidRecipients(Map<String, Role> validRecipients) {
this.validRecipients = validRecipients;
}
public MessageTypes getMessageType() {
return messageType;
}
public void setMessageType(MessageTypes messageType) {
this.messageType = messageType;
}
}
|
<gh_stars>10-100
package com.gank.gankly.ui.discovered.video;
import com.gank.gankly.bean.ResultsBean;
import com.gank.gankly.mvp.IFetchPresenter;
import com.gank.gankly.mvp.IFetchView;
import java.util.List;
/**
* Create by LingYan on 2017-01-03
* Email:<EMAIL>
*/
public interface VideoContract {
interface View extends IFetchView {
void refillData(List<ResultsBean> list);
void appendData(List<ResultsBean> list);
}
interface Presenter extends IFetchPresenter {
}
}
|
import threading
# Create a lock to ensure atomicity and thread safety
session_lock = threading.Lock()
def interruptor():
if not interrupted:
# Acquire the lock before creating the session
with session_lock:
try:
session2 = db_session.session_factory()
session2.connection(execution_options={"isolation_level": "SERIALIZABLE"})
add_participant(session2)
except Exception as e:
# Handle any exceptions and log the error
print(f"Error in interruptor: {e}")
finally:
# Ensure the session is closed to prevent resource leaks
if session2:
session2.close()
|
package com.wpisen.trace.server.dao.entity;
import java.util.Date;
public class Project {
private Integer proId;
private String name;
private String proKey;
private String proSecret;
private String describes;
private String belongsWay;
private Integer belongsId;
private Date createTime;
private Date lastUpdateTime;
private Boolean disable;
public Integer getProId() {
return proId;
}
public void setProId(Integer proId) {
this.proId = proId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name == null ? null : name.trim();
}
public String getProKey() {
return proKey;
}
public void setProKey(String proKey) {
this.proKey = proKey == null ? null : proKey.trim();
}
public String getProSecret() {
return proSecret;
}
public void setProSecret(String proSecret) {
this.proSecret = proSecret == null ? null : proSecret.trim();
}
public String getDescribes() {
return describes;
}
public void setDescribes(String describes) {
this.describes = describes == null ? null : describes.trim();
}
public String getBelongsWay() {
return belongsWay;
}
public void setBelongsWay(String belongsWay) {
this.belongsWay = belongsWay == null ? null : belongsWay.trim();
}
public Integer getBelongsId() {
return belongsId;
}
public void setBelongsId(Integer belongsId) {
this.belongsId = belongsId;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getLastUpdateTime() {
return lastUpdateTime;
}
public void setLastUpdateTime(Date lastUpdateTime) {
this.lastUpdateTime = lastUpdateTime;
}
public Boolean getDisable() {
return disable;
}
public void setDisable(Boolean disable) {
this.disable = disable;
}
}
|
<gh_stars>1-10
require 'open3'
require 'tempfile'
class BinTest_MrubyBinDebugger
@debug1=false
@debug2=true
@debug3=true
def self.test(rubysource, testcase)
script, bin = Tempfile.new(['test', '.rb']), Tempfile.new(['test', '.mrb'])
# .rb
script.write rubysource
script.flush
# compile
`./bin/mrbc -g -o "#{bin.path}" "#{script.path}"`
# add mrdb quit
testcase << {:cmd=>"quit"}
stdin_data = testcase.map{|t| t[:cmd]}.join("\n") << "\n"
["bin/mrdb #{script.path}","bin/mrdb -b #{bin.path}"].each do |cmd|
o, s = Open3.capture2(cmd, :stdin_data => stdin_data)
exp_vals = testcase.map{|t| t.fetch(:exp, nil)}
unexp_vals = testcase.map{|t| t.fetch(:unexp, nil)}
if @debug1
o.split("\n").each_with_index do |i,actual|
p [i,actual]
end
end
# compare actual / expected
o.split("\n").each do |actual|
next if actual.empty?
exp = exp_vals.shift
if @debug2
a = true
a = actual.include?(exp) unless exp.nil?
p [actual, exp] unless a
end
assert_true actual.include?(exp) unless exp.nil?
end
# compare actual / unexpected
o.split("\n").each do |actual|
next if actual.empty?
unexp = unexp_vals.shift
if @debug3
a = false
a = actual.include?(unexp) unless unexp.nil?
p [actual, unexp] if a
end
assert_false actual.include?(unexp) unless unexp.nil?
end
end
end
end
INVCMD = "invalid command"
assert('mruby-bin-debugger(mrdb) command line') do
# ruby source
src = "foo = 'foo'\n"
str = ""
103.times {
str += "1234567890"
}
cmd = "p a=#{str}"
# test case
BinTest_MrubyBinDebugger.test(src, [{:cmd=>cmd[0...1023], :unexp=>'command line too long.'}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>cmd[0...1024], :unexp=>'command line too long.'}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>cmd[0...1025], :exp=>'command line too long.'}])
end
assert('mruby-bin-debugger(mrdb) command: "break"') do
# ruby source
src = "foo = 'foo'\n"
# test case
tc = []
tc << {:cmd=>"b", :unexp=>INVCMD}
tc << {:cmd=>"br", :unexp=>INVCMD}
tc << {:cmd=>"brea", :unexp=>INVCMD}
tc << {:cmd=>"break", :unexp=>INVCMD}
BinTest_MrubyBinDebugger.test(src, tc)
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"bl", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"breaka", :exp=>INVCMD}])
end
assert('mruby-bin-debugger(mrdb) command: "continue"') do
# ruby source
src = "foo = 'foo'\n"
# test case
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"c", :unexp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"co", :unexp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"continu", :unexp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"continue", :unexp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"cn", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"continuee", :exp=>INVCMD}])
end
assert('mruby-bin-debugger(mrdb) command: "delete"') do
# ruby source
src = "foo = 'foo'\n"
# test case
tc = []
tc << {:cmd=>"d 1", :unexp=>INVCMD}
tc << {:cmd=>"de 1", :unexp=>INVCMD}
tc << {:cmd=>"delet 1", :unexp=>INVCMD}
tc << {:cmd=>"delete 1", :unexp=>INVCMD}
BinTest_MrubyBinDebugger.test(src, tc)
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"dd 1", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"deletee 1", :exp=>INVCMD}])
end
assert('mruby-bin-debugger(mrdb) command: "disable"') do
# ruby source
src = "foo = 'foo'\n"
# test case
tc = []
tc << {:cmd=>"dis", :unexp=>INVCMD}
tc << {:cmd=>"disa", :unexp=>INVCMD}
tc << {:cmd=>"disabl", :unexp=>INVCMD}
tc << {:cmd=>"disable", :unexp=>INVCMD}
BinTest_MrubyBinDebugger.test(src, tc)
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"di", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"disb", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"disablee", :exp=>INVCMD}])
end
assert('mruby-bin-debugger(mrdb) command: "enable"') do
# ruby source
src = "foo = 'foo'\n"
# test case
tc = []
tc << {:cmd=>"en", :unexp=>INVCMD}
tc << {:cmd=>"ena", :unexp=>INVCMD}
tc << {:cmd=>"enabl", :unexp=>INVCMD}
tc << {:cmd=>"enable", :unexp=>INVCMD}
BinTest_MrubyBinDebugger.test(src, tc)
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"e", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"enb", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"enablee", :exp=>INVCMD}])
end
assert('mruby-bin-debugger(mrdb) command: "eval"') do
# ruby source
src = "foo = 'foo'\n"
# test case
tc = []
tc << {:cmd=>"ev", :unexp=>INVCMD}
tc << {:cmd=>"eva", :unexp=>INVCMD}
tc << {:cmd=>"eval", :unexp=>INVCMD}
BinTest_MrubyBinDebugger.test(src, tc)
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"e", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"evl", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"evall", :exp=>INVCMD}])
end
assert('mruby-bin-debugger(mrdb) command: "help"') do
# ruby source
src = "foo = 'foo'\n"
# test case
tc = []
tc << {:cmd=>"h", :unexp=>INVCMD}
tc << {:cmd=>"he", :unexp=>INVCMD}
tc << {:cmd=>"hel", :unexp=>INVCMD}
tc << {:cmd=>"help", :unexp=>INVCMD}
BinTest_MrubyBinDebugger.test(src, tc)
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"hl", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"helpp", :exp=>INVCMD}])
end
assert('mruby-bin-debugger(mrdb) command: "info breakpoints"') do
# ruby source
src = "foo = 'foo'\n"
# test case
tc = []
tc << {:cmd=>"i b", :unexp=>INVCMD}
tc << {:cmd=>"in b", :unexp=>INVCMD}
tc << {:cmd=>"i br", :unexp=>INVCMD}
tc << {:cmd=>"inf breakpoint", :unexp=>INVCMD}
tc << {:cmd=>"info breakpoints", :unexp=>INVCMD}
BinTest_MrubyBinDebugger.test(src, tc)
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"ii b", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"i bb", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"infoo breakpoints", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"info breakpointss", :exp=>INVCMD}])
end
assert('mruby-bin-debugger(mrdb) command: "list"') do
# ruby source
src = "foo = 'foo'\n"
# test case
tc = []
tc << {:cmd=>"l", :unexp=>INVCMD}
tc << {:cmd=>"li", :unexp=>INVCMD}
tc << {:cmd=>"lis", :unexp=>INVCMD}
tc << {:cmd=>"list", :unexp=>INVCMD}
BinTest_MrubyBinDebugger.test(src, tc)
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"ll", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"listt", :exp=>INVCMD}])
end
assert('mruby-bin-debugger(mrdb) command: "print"') do
# ruby source
src = "foo = 'foo'\n"
# test case
tc = []
tc << {:cmd=>"p", :unexp=>INVCMD}
tc << {:cmd=>"pr", :unexp=>INVCMD}
tc << {:cmd=>"prin", :unexp=>INVCMD}
tc << {:cmd=>"print", :unexp=>INVCMD}
BinTest_MrubyBinDebugger.test(src, tc)
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"pp", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"printt", :exp=>INVCMD}])
end
assert('mruby-bin-debugger(mrdb) command: "quit"') do
# ruby source
src = "foo = 'foo'\n"
# test case
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"q", :unexp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"qu", :unexp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"qui", :unexp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"quit", :unexp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"qq", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"quitt", :exp=>INVCMD}])
end
assert('mruby-bin-debugger(mrdb) command: "run"') do
# ruby source
src = "foo = 'foo'\n"
# test case
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"r", :unexp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"ru", :unexp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"run", :unexp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"rr", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"runn", :exp=>INVCMD}])
end
assert('mruby-bin-debugger(mrdb) command: "step"') do
# ruby source
src = <<"SRC"
while true
foo = 'foo'
end
SRC
# test case
tc = []
tc << {:cmd=>"s", :unexp=>INVCMD}
tc << {:cmd=>"st", :unexp=>INVCMD}
tc << {:cmd=>"ste", :unexp=>INVCMD}
tc << {:cmd=>"step", :unexp=>INVCMD}
BinTest_MrubyBinDebugger.test(src, tc)
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"ss", :exp=>INVCMD}])
BinTest_MrubyBinDebugger.test(src, [{:cmd=>"stepp", :exp=>INVCMD}])
end
|
# Import python libs
import secrets
# Import local libs
import rend.exc
def __init__(hub):
hub.pop.sub.add(dyne_name='output')
def standalone(hub):
'''
Execute the render system onto a single file, typically to test basic
functionality
'''
hub.pop.conf.integrate('rend', cli='rend')
hub.pop.loop.start(_standalone(hub))
async def _standalone(hub):
outputter = hub.OPT['rend']['output']
ret = await hub.rend.init.parse(hub.OPT['rend']['file'], hub.OPT['rend']['pipe'])
print(getattr(hub, f'output.{outputter}.display')(ret))
async def parse(hub, fn, pipe=None):
'''
Pass in the render pipe to use to render the given file. If no pipe is
passed in then the file will be checked for a render shebang line. If
no render shebang line is present then the system will raise an
Exception
If a file defines a shebang render pipe and a pipe is passed in, the
shebang render pipe line will be used
'''
with open(fn, 'rb') as rfh:
data = rfh.read()
if data.startswith(b'#!'):
dpipe = data[2:data.index(b'\n')].split(b'|')
elif pipe:
dpipe = pipe.split('|')
else:
raise rend.exc.RendPipeException(f'File {fn} passed in without a render pipe defined')
for render in dpipe:
if isinstance(render, bytes):
render = render.decode()
data = await getattr(hub, f'rend.{render}.render')(data)
return data
async def parse_bytes(hub, block, pipe=None):
'''
Send in a block from a render file and render it using the named pipe
'''
if isinstance(pipe, str):
pipe = pipe.split('|')
if isinstance(pipe, bytes):
pipe = pipe.split(b'|')
fn = block.get('fn')
ln = block.get('ln')
data = block.get('bytes')
pipe = block.get('pipe', pipe)
if pipe is None:
raise rend.exc.RendPipeException(f'File {fn} at block line {ln} passed in without a render pipe defined')
for render in pipe:
if isinstance(render, bytes):
render = render.decode()
data = await getattr(hub, f'rend.{render}.render')(data)
return data
def blocks(hub, fn):
'''
Pull the render blocks out of a file along with the render metadata
stored in shebang lines
'''
bname = 'raw'
ret = {bname: {'ln': 0, 'fn': fn, 'bytes': b''}}
bnames = [bname]
rm_bnames = set()
bind = 0
with open(fn, 'rb') as rfh:
for num, line in enumerate(rfh):
if line.startswith(b'#!'):
# Found metadata tag
root = line[2:].strip()
if root == b'END':
bnames.pop(-1)
if not bnames:
raise rend.exc.RenderException(f'Unexpected End of file line {num}')
bname = bnames[-1]
continue
else:
bname = f'{fn}|{secrets.token_hex(2)}'
ret[bname] = {'ln': num, 'fn': fn, 'keys': {}, 'bytes': b''}
bnames.append(bname)
parts = root.split(b';')
for ind, part in enumerate(parts):
if b':' in part:
req = part.split(b':')
if len(req) < 2:
continue
ret[bname]['keys'][req[0].decode()] = req[1].decode()
else:
if b'|' in part:
pipes = part.split(b'|')
else:
pipes = [part]
ret[bname]['pipe'] = pipes
else:
ret[bname]['bytes'] += line
for bname, data in ret.items():
if not data['bytes']:
rm_bnames.add(bname)
for bname in rm_bnames:
ret.pop(bname)
return ret
|
<gh_stars>1-10
package de.ids_mannheim.korap.constant;
/** Defines some predefined roles used in the system.
*
* @author margaretha
*
*/
public enum PredefinedRole {
USER_GROUP_ADMIN(1), USER_GROUP_MEMBER(2), VC_ACCESS_ADMIN(3), VC_ACCESS_MEMBER(4),
QUERY_ACCESS_ADMIN(5), QUERY_ACCESS_MEMBER(6);
private int id;
private String name;
PredefinedRole (int i) {
this.id = i;
this.name = name().toLowerCase().replace("_", " ");
}
public int getId () {
return id;
}
@Override
public String toString () {
return this.name;
}
}
|
package json
import (
"time"
"github.com/go-faster/jx"
)
const (
dateLayout = "2006-01-02"
timeLayout = "15:04:05"
)
func DecodeDate(i *jx.Decoder) (v time.Time, err error) {
s, err := i.Str()
if err != nil {
return v, err
}
return time.Parse(dateLayout, s)
}
func EncodeDate(s *jx.Writer, v time.Time) {
s.Str(v.Format(dateLayout))
}
func DecodeTime(i *jx.Decoder) (v time.Time, err error) {
s, err := i.Str()
if err != nil {
return v, err
}
return time.Parse(timeLayout, s)
}
func EncodeTime(s *jx.Writer, v time.Time) {
s.Str(v.Format(timeLayout))
}
func DecodeDateTime(i *jx.Decoder) (v time.Time, err error) {
s, err := i.Str()
if err != nil {
return v, err
}
return time.Parse(time.RFC3339, s)
}
func EncodeDateTime(s *jx.Writer, v time.Time) {
s.Str(v.Format(time.RFC3339))
}
func DecodeDuration(i *jx.Decoder) (v time.Duration, err error) {
s, err := i.Str()
if err != nil {
return v, err
}
return time.ParseDuration(s)
}
func EncodeDuration(s *jx.Writer, v time.Duration) {
s.Str(v.String())
}
|
module Geometry
=begin
Bézier curves are like lines, but curvier.
http://en.wikipedia.org/wiki/Bézier_curve
== Constructors
Bezier.new [0,0], [1,1], [2,2] # From control points
== Usage
To get a point on the curve for a particular value of t, you can use the subscript operator
bezier[0.5] # => [1,1]
=end
class Bezier
# @!attribute degree
# @return [Number] The degree of the curve
def degree
points.length - 1
end
# @!attribute points
# @return [Array<Point>] The control points for the Bézier curve
attr_reader :points
def initialize(*points)
@points = points.map { |v| Point[v] }
end
# http://en.wikipedia.org/wiki/Binomial_coefficient
# http://rosettacode.org/wiki/Evaluate_binomial_coefficients#Ruby
def binomial_coefficient(k)
(0...k).inject(1) { |m, i| (m * (degree - i)) / (i + 1) }
end
# @param t [Float] the input parameter
def [](t)
return nil unless (0..1).include?(t)
result = Point.zero(points.first.size)
points.each_with_index do |v, i|
result += v * binomial_coefficient(i) * ((1 - t) ** (degree - i)) * (t ** i)
end
result
end
end
end
|
//
// IUpgradeViewController.h
// IUpgrade
//
// Created by felix.lin on 07/31/2016.
// Copyright (c) 2016 felix.lin. All rights reserved.
//
@import UIKit;
@interface IUpgradeViewController : UIViewController
@end
|
from django.http import HttpResponse
import json
def validate_data(request_data):
# Your implementation of data validation logic goes here
# Return validated data and any validation errors
# For example:
validated_data = {} # Placeholder for validated data
errors = {} # Placeholder for validation errors
# Your validation logic
# ...
return validated_data, errors
def register(request):
if request.method == 'POST':
request_data = json.loads(request.body)
validated_data, errors = validate_data(request_data)
if errors:
response_data = {'data': validated_data, 'errors': errors}
response = HttpResponse(content_type='application/json')
response.write(json.dumps(response_data))
response.status_code = 400 # Set status code to 400 for validation errors
return response
else:
# Process the validated data (e.g., save to database)
# Return a success response
# Example success response:
success_response_data = {'message': 'Registration successful'}
success_response = HttpResponse(content_type='application/json')
success_response.write(json.dumps(success_response_data))
return success_response
|
<reponame>youngzhu/golab<filename>effective/iprint/iprint.go
package iprint
import "fmt"
// Sprintf 调用的是 类型的 String 方法
// 所以,下面的方法是错误的,导致无限循环
type MyString string
func (m MyString) String() string {
// 编译时就有提示
//return fmt.Sprintf("MyString=%s", m)
return fmt.Sprintf("MyString=%s", string(m)) // 正确
}
|
const express = require('express')
const Keto = require('../src/ketogenic')
console.verbose = console.info
const app = express()
const keto = Keto({
logger: console,
verbose: true,
chaos: true
})
const {
__KETO: { utils: { loadRoutes, set } }
} = keto(app)
set('myExtra', function () {
console.log('Hello from inside keto!')
})
set('logger', console)
loadRoutes({
root: __dirname,
dir: 'v1',
mountPath: '/',
router: express.Router(),
app
})
app.use(Keto.processStandardError)
app.listen(8080, function () {
console.verbose(`Listening on port 8080`)
})
|
#!/bin/bash -i
#####################################################################################################
### CONFIG VARS #####################################################################################
declare LLTEST_CMD="/app/srcds_run -game tf2classic +map ctf_2fort -insecure -norestart +sv_lan 1";
declare LLTEST_NAME="gamesvr-tf2classic-$(date '+%H%M%S')";
#####################################################################################################
#####################################################################################################
# Runtime vars
declare LLCOUNTER=0;
declare LLBOOT_ERRORS="";
declare LLTEST_HASFAILURES=false;
declare LLTEST_LOGFILE="$LLTEST_NAME"".log";
declare LLTEST_RESULTSFILE="$LLTEST_NAME"".results";
# Server log file should contain $1 because $2
function should_have() {
if ! grep -i -q "$1" "$LLTEST_LOGFILE"; then
echo $"[FAIL] - $2" >> "$LLTEST_RESULTSFILE";
LLTEST_HASFAILURES=true;
else
echo $"[PASS] - $2" >> "$LLTEST_RESULTSFILE";
fi;
}
# Server log file should NOT contain $1 because $2
function should_lack() {
if grep -i -q "$1" "$LLTEST_LOGFILE"; then
echo $"[FAIL] - $2" >> "$LLTEST_RESULTSFILE";
LLTEST_HASFAILURES=true;
else
echo $"[PASS] - $2" >> "$LLTEST_RESULTSFILE";
fi;
}
# Command $1 should make server return $2
function should_echo() {
tmux has-session -t "$LLTEST_NAME" 2>/dev/null;
if [ "$?" == 0 ] ; then
LLCOUNTER=0;
LLTMP=$(md5sum "$LLTEST_LOGFILE");
tmux send -t "$LLTEST_NAME" C-z "$1" Enter;
while true; do
sleep 0.5;
if (( "$LLCOUNTER" > 30)); then
echo $"[FAIL] - Command '$!' TIMED OUT";
LLTEST_HASFAILURES=true;
break;
fi;
if [[ $(md5sum "$LLTEST_LOGFILE") != "$LLTMP" ]]; then
should_have "$2" "'$1' should result in '$2' (loop iterations: $LLCOUNTER)";
break;
fi;
(( LLCOUNTER++ ));
done;
else
echo $"[ERROR]- Could not run command '$1'; tmux session not found" >> "$LLTEST_RESULTSFILE";
LLTEST_HASFAILURES=true;
fi;
}
function print_log() {
if [ ! -s "$LLTEST_LOGFILE" ]; then
echo $'\nOUTPUT LOG IS EMPTY!\n';
exit 1;
else
echo $'\n[LOGFILE OUTPUT]';
awk '{print "»» " $0}' "$LLTEST_LOGFILE";
fi;
}
# Check prereqs
command -v awk > /dev/null 2>&1 || echo "awk is missing";
command -v md5sum > /dev/null 2>&1 || echo "md5sum is missing";
command -v sleep > /dev/null 2>&1 || echo "sleep is missing";
command -v tmux > /dev/null 2>&1 || echo "tmux is missing";
# Prep log file
: > "$LLTEST_LOGFILE"
if [ ! -f "$LLTEST_LOGFILE" ]; then
echo 'Failed to create logfile: '"$LLTEST_LOGFILE"'. Verify file system permissions.';
exit 2;
fi;
# Prep results file
: > "$LLTEST_RESULTSFILE"
if [ ! -f "$LLTEST_RESULTSFILE" ]; then
echo 'Failed to create logfile: '"$LLTEST_RESULTSFILE"'. Verify file system permissions.';
exit 2;
fi;
echo $'\n\nRUNNING TEST: '"$LLTEST_NAME";
echo $'Command: '"$LLTEST_CMD";
echo "Running under $(id)"$'\n';
# Execute test command in tmux session
tmux new -d -s "$LLTEST_NAME" "sleep 0.5; $LLTEST_CMD";
sleep 0.3;
tmux pipe-pane -t "$LLTEST_NAME" -o "cat > $LLTEST_LOGFILE";
while true; do
tmux has-session -t "$LLTEST_NAME" 2>/dev/null;
if [ "$?" != 0 ] ; then
echo $'terminated.\n';
LLBOOT_ERRORS="Test process self-terminated";
break;
fi;
if (( "$LLCOUNTER" >= 29 )); then
if [ -s "$LLTEST_LOGFILE" ] && ((( $(date +%s) - $(stat -L --format %Y "$LLTEST_LOGFILE") ) > 20 )); then
echo $'succeeded.\n';
break;
fi;
if (( "$LLCOUNTER" > 120 )); then
echo $'timed out.\n';
LLBOOT_ERRORS="Test timed out";
break;
fi;
fi;
if (( LLCOUNTER % 5 == 0 )); then
echo -n "$LLCOUNTER...";
fi;
(( LLCOUNTER++ ));
sleep 1;
done;
if [ ! -s "$LLTEST_LOGFILE" ]; then
echo $'\nOUTPUT LOG IS EMPTY!\n';
exit 1;
fi;
if [ ! -z "${LLBOOT_ERRORS// }" ]; then
echo "Boot error: $LLBOOT_ERRORS";
print_log;
exit 1;
fi;
#####################################################################################################
### TESTS ###########################################################################################
## Stock TF2 Classic server tests
should_have 'Setting breakpad minidump AppID = 244310' 'Sever started executing';
should_lack 'Server restart in 10 seconds' 'Server is not boot-looping';
should_lack 'Running the dedicated server as root' 'Server is not running under root';
should_have 'server_srv.so loaded for "Team Fortress 2 Classic"' 'srcds_run loaded TF2 Classic';
should_lack 'Your server needs to be restarted in order to receive the latest update.' 'Server is not reporting itself as out of date';
should_lack 'AppFramework : Unable to load module' 'no linux shared objects failed to load';
## Verify server responds to commands
should_echo "say STARTING COMMAND TESTS" 'Console: STARTING COMMAND TESTS';
#####################################################################################################
#####################################################################################################
tmux has-session -t "$LLTEST_NAME" 2>/dev/null;
if [ "$?" == 0 ] ; then
tmux kill-session -t "$LLTEST_NAME";
fi;
print_log;
echo $'\n[TEST RESULTS]\n';
cat "$LLTEST_RESULTSFILE";
echo $'\n[OUTCOME]\n';
if [ $LLTEST_HASFAILURES = true ]; then
echo $'Checks have failures!\n\n';
exit 1;
fi;
echo $'All checks passed!\n\n';
exit 0;
|
package com.gu.mediaservice.lib.elasticsearch
import com.sksamuel.elastic4s.requests.analysis.{Analysis, CustomAnalyzer, PathHierarchyTokenizer, StandardTokenizer, StemmerTokenFilter, StopTokenFilter, TokenFilter}
import com.sksamuel.elastic4s.requests.analyzers.{AsciiFoldingTokenFilter, LowercaseTokenFilter}
import org.elasticsearch.index.analysis.ASCIIFoldingTokenFilterFactory
object IndexSettings {
private val s_stemmer = "s_stemmer"
private val english_possessive_stemmer = "english_possessive_stemmer"
private val gu_stopwords = "gu_stopwords"
private val standard = "standard"
private val path_hierarchy = "path_hierarchy"
// TODO rename `english_s_stemmer` as its an analyzer not a stemmer - would require a reindex.
val englishSStemmerAnalyzerName = "english_" + s_stemmer
val hierarchyAnalyserName = "hierarchyAnalyzer"
def analysis: Analysis = {
val tokenizers = List(
StandardTokenizer(standard),
PathHierarchyTokenizer(path_hierarchy)
)
val filters: List[TokenFilter] = List(
// I (Justin) don't think we need to specify these, but can just refer to them by name (below)
// LowercaseTokenFilter,
// AsciiFoldingTokenFilter,
StemmerTokenFilter(name = english_possessive_stemmer, lang = "possessive_english"),
StopTokenFilter(name = gu_stopwords, stopwords = Seq("_english_")),
StemmerTokenFilter(name = s_stemmer, lang = "minimal_english")
)
val englishSStemmerAnalyzer = CustomAnalyzer(
englishSStemmerAnalyzerName,
standard,
List(),
List(
LowercaseTokenFilter.name,
AsciiFoldingTokenFilter.name,
english_possessive_stemmer,
gu_stopwords,
s_stemmer
)
)
val hierarchyAnalyzer = CustomAnalyzer(
hierarchyAnalyserName,
path_hierarchy,
List(),
List(LowercaseTokenFilter.name)
)
val analyzers = List(englishSStemmerAnalyzer, hierarchyAnalyzer)
Analysis(
analyzers,
tokenizers,
filters,
)
}
}
|
import pandas as pd
# create dataframe
df = pd.DataFrame({'Name':['John', 'Jane'],
'Age':[30, 25],
'Gender':['Male', 'Female']})
print(df)
|
docker build -t "justinrmiller/github-actions-test" .
|
The for loop in Java increments the iterator after executing its body because the loop typically checks, at the beginning of each iteration, to see if the control variable has reached its limit. By incrementing the control variable after the iteration, the loop can use the next iteration to process the last item in the series. This ensures that all items in the series are processed and that no data is lost. Additionally, it makes sure that the loop terminates at the expected time.
|
#!/bin/bash
#SBATCH --job-name=/data/unibas/boittier/test-neighbours2
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --partition=short
#SBATCH --output=/data/unibas/boittier/test-neighbours2_%A-%a.out
hostname
# Path to scripts and executables
cubefit=/home/unibas/boittier/fdcm_project/mdcm_bin/cubefit.x
fdcm=/home/unibas/boittier/fdcm_project/fdcm.x
ars=/home/unibas/boittier/fdcm_project/ARS.py
# Variables for the job
n_steps=10
n_charges=24
scan_name=SCAN_amide1.pdb-
suffix=.xyz.chk
cubes_dir=/data/unibas/boittier/fdcm/amide/scan-large
output_dir=/data/unibas/boittier/test-neighbours2
frames=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/frames.txt
initial_fit=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/24_charges_refined.xyz
initial_fit_cube=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/amide1.pdb.chk
prev_frame=37
start_frame=38
next_frame=57
acd=/home/unibas/boittier/fdcm_project/0_fit.xyz.acd
start=$start_frame
next=$next_frame
dir='frame_'$next
output_name=$output_dir/$dir/$dir'-'$start'-'$next'.xyz'
initial_fit=$output_dir/"frame_"$start/"frame_"$start'-'$prev_frame'-'$start'.xyz'
# Go to the output directory
mkdir -p $output_dir
cd $output_dir
mkdir -p $dir
cd $dir
# Do Initial Fit
# for initial fit
esp1=$cubes_dir/$scan_name$start$suffix'.p.cube'
dens1=$cubes_dir/$scan_name$start$suffix'.d.cube'
esp=$cubes_dir/$scan_name$next$suffix'.p.cube'
dens=$cubes_dir/$scan_name$next$suffix'.d.cube'
# adjust reference frame
python $ars -charges $initial_fit -pcube $dens1 -pcube2 $dens -frames $frames -output $output_name -acd $acd > $output_name.ARS.log
# do gradient descent fit
$fdcm -xyz $output_name.global -dens $dens -esp $esp -stepsize 0.2 -n_steps $n_steps -learning_rate 0.5 -output $output_name > $output_name.GD.log
# adjust reference frame
python $ars -charges $output_name -pcube $esp -pcube2 $esp -frames $frames -output $output_name -acd $acd > $output_name.ARS-2.log
# make a cube file for the fit
$cubefit -v -generate -esp $esp -dens $dens -xyz refined.xyz > $output_name.cubemaking.log
# do analysis
$cubefit -v -analysis -esp $esp -esp2 $n_charges'charges.cube' -dens $dens > $output_name.analysis.log
echo $PWD
|
import * as Promise from "bluebird"
import * as utils from "./utils"
import * as WinReg from "winreg"
import * as which from "which"
import {each as asyncEach} from "async"
import {join, basename} from "path"
import {unique} from "underscore"
import {execFile} from "child_process"
import {inspect} from "util"
export class JavaVersion {
major: number
minor: number
patch: number
update: number
constructor(version: string) {
const matches = version.match(/(\d+?)\.(\d+?)\.(\d+?)(?:_(\d+))?/)
this.major = parseInt(matches[1])
this.minor = parseInt(matches[2])
this.patch = parseInt(matches[3])
this.update = parseInt(matches[4] || "0")
}
}
export class JavaInstall {
private _path: string
private _arch: string
private _version: JavaVersion
private _gotInfo: boolean
private _invalid: boolean
constructor(path: string) {
this._path = path
this._gotInfo = false
this._invalid = false
}
get path() {
return this._path
}
get arch() {
return this._arch
}
get version() {
return this._version
}
get invalid() {
return this._invalid
}
/**
* @internal
*/
ensureInfo(): Promise<void> {
if (!this._gotInfo && !this._invalid) {
return new Promise<void>((resolve, reject) => {
var jarFile = __dirname + "/java/PrintJavaVersion.jar"
debug(`jarFile: ${jarFile}`)
execFile(this._path, ["-jar", jarFile], {timeout: 1000}, (err, stdoutBuf, stderrBuf) => {
if (err) {
debug(`[${this._path}] Err: ${err}`)
this._invalid = true
resolve()
return
}
const stdout = stdoutBuf.toString().trim()
debug(`[${this._path}] stdout: ${inspect(stdout)}`)
const lines = stdout.split("\n")
const arch = lines[1]
switch (arch) {
case "32":
this._arch = "x86"
break
case "64":
this._arch = "x64"
break
default:
this._arch = "unknown"
break
}
const version = lines[0]
this._version = new JavaVersion(version)
debug(`[${this._path}] ${inspect(this)}`)
this._gotInfo = true
resolve()
})
})
} else {
return Promise.resolve()
}
}
}
let debug = (debug: string) => {}
export function setDebug(debugFn: (debug: string) => void) {
debug = debugFn
}
export const getJavas = utils.PromiseCache((): Promise<Array<JavaInstall>> => {
debug(`getJavas start (${process.platform})`)
let javas: Promise<Array<JavaInstall>>
switch (process.platform) {
case "win32":
javas = findJavasWindows()
break
case "darwin":
javas = findJavasMac()
break
case "linux":
javas = findJavasLinux()
break
default:
javas = findJavaOnPath()
break
}
return javas
.tap(v => {debug(`Versions Raw: ${inspect(v.map(v => v.path))}`)})
.filter<JavaInstall>(version => utils.canExecute(version.path))
.tap(v => {debug(`Versions Existing: ${inspect(v.map(v => v.path))}`)})
.then(versions => unique(versions, v => v.path))
.each<JavaInstall, void>(version => version.ensureInfo())
.filter<JavaInstall>(version => !version.invalid)
.tap(v => {debug(`Versions Final: ${inspect(v)}`)})
})
//region Linux
const defaultJavasLinux = [
new JavaInstall("/opt/java/bin/java"),
new JavaInstall("/usr/bin/java")
]
function findJavasLinux(): Promise<Array<JavaInstall>> {
return Promise.all([defaultJavasLinux, findJavaOnPath()]).then(utils.flatten)
}
//endregion
//region Mac
const defaultJavasMac = [
new JavaInstall("/Applications/Xcode.app/Contents/Applications/Application Loader.app/Contents/MacOS/itms/java/bin/java"),
new JavaInstall("/Library/Internet Plug-Ins/JavaAppletPlugin.plugin/Contents/Home/bin/java"),
new JavaInstall("/System/Library/Frameworks/JavaVM.framework/Versions/Current/Commands/java")
]
function findJavasMac(): Promise<Array<JavaInstall>> {
let javaVersionPromises: Array<Promise<Array<JavaInstall>>> = []
javaVersionPromises.push(findJavaOnPath())
javaVersionPromises.push(Promise.resolve(defaultJavasMac))
javaVersionPromises.push(
utils.allDirectories("/Library/Java/JavaVirtualMachines/").catch(err => [])
.map(dir => [
new JavaInstall(join(dir, "Contents/Home/bin/java")),
new JavaInstall(join(dir, "Contents/Home/jre/bin/java"))
]).then(utils.flatten)
)
javaVersionPromises.push(
utils.allDirectories("/System/Library/Java/JavaVirtualMachines/").catch(err => [])
.map(dir => [
new JavaInstall(join(dir, "Contents/Home/bin/java")),
new JavaInstall(join(dir, "Contents/Commands/java"))
]).then(utils.flatten)
)
return Promise.all(javaVersionPromises).then(utils.flatten)
}
//endregion
//region Windows
const javaRegKeys = [
"SOFTWARE\\JavaSoft\\Java Runtime Environment",
"SOFTWARE\\JavaSoft\\Java Development Kit"
]
const defaultJavasWindows = [
new JavaInstall("C:/Program Files/Java/jre8/bin/javaw.exe"),
new JavaInstall("C:/Program Files/Java/jre7/bin/javaw.exe"),
new JavaInstall("C:/Program Files/Java/jre6/bin/javaw.exe"),
new JavaInstall("C:/Program Files (x86)/Java/jre8/bin/javaw.exe"),
new JavaInstall("C:/Program Files (x86)/Java/jre7/bin/javaw.exe"),
new JavaInstall("C:/Program Files (x86)/Java/jre6/bin/javaw.exe")
]
function findJavasWindows(): Promise<Array<JavaInstall>> {
let javaVersionPromises: Array<Promise<Array<JavaInstall>>> = []
javaVersionPromises.push(Promise.resolve(defaultJavasWindows))
javaVersionPromises.push(findJavaOnPath())
javaRegKeys.forEach(key => {
javaVersionPromises.push(findJavasFromRegistryKey(key, "x64").catch(err => []))
javaVersionPromises.push(findJavasFromRegistryKey(key, "x86").catch(err => []))
})
return Promise.all(javaVersionPromises).then(utils.flatten)
}
function findJavasFromRegistryKey(keyName: string, arch: string): Promise<Array<JavaInstall>> {
return new Promise<Array<JavaInstall>>((resolve, reject) => {
let key = new WinReg({ key: keyName, arch: arch })
// For each subkey of the given key, each of which should be
key.keys((err: Error, javaKeys: Array<WinReg>) => {
if (err) {
resolve([])
return
}
let javaVersions: Array<JavaInstall> = []
asyncEach<WinReg>(javaKeys, (javaKey, cb) => {
javaKey.get("JavaHome", (err, javaHome) => {
if (err) return
let path = join(javaHome.value, "bin", "javaw.exe")
javaVersions.push(new JavaInstall(path))
cb()
})
}, (err) => {
debug(`Reg key ${keyName} arch ${arch}, got ${javaVersions}`)
if (err) reject(err)
else resolve(javaVersions)
})
})
})
}
//endregion
const whichP = Promise.promisify(which)
function findJavaOnPath(): Promise<Array<JavaInstall>> {
return whichP("java")
.then(path => [new JavaInstall(path)], err => [])
}
|
<gh_stars>100-1000
#include <stdlib.h>
#include <stdint.h>
#include <arm_neon.h>
#include <assert.h>
/* Routine optimized for shuffling a buffer for a type size of 4 bytes. */
static void
shuffle4_neon(uint8_t* const dest, const uint8_t* const src,
const size_t vectorizable_elements, const size_t total_elements)
{
size_t i, j, k;
static const size_t bytesoftype = 4;
uint8x16x4_t r0;
for(i = 0, k = 0; i < vectorizable_elements*bytesoftype; i += 64, k++) {
/* Load (and permute) 64 bytes to the structure r0 */
r0 = vld4q_u8(src + i);
/* Store the results in the destination vector */
vst1q_u8(dest + total_elements*0 + k*16, r0.val[0]);
vst1q_u8(dest + total_elements*1 + k*16, r0.val[1]);
vst1q_u8(dest + total_elements*2 + k*16, r0.val[2]);
vst1q_u8(dest + total_elements*3 + k*16, r0.val[3]);
}
}
/* Routine optimized for unshuffling a buffer for a type size of 4 bytes. */
static void
unshuffle4_neon(uint8_t* const dest, const uint8_t* const src,
const size_t vectorizable_elements, const size_t total_elements)
{
size_t i, j, k;
static const size_t bytesoftype = 4;
uint8x16x4_t r0;
for(i = 0, k = 0; i < vectorizable_elements*bytesoftype; i += 64, k++) {
/* load 64 bytes to the structure r0 */
r0.val[0] = vld1q_u8(src + total_elements*0 + k*16);
r0.val[1] = vld1q_u8(src + total_elements*1 + k*16);
r0.val[2] = vld1q_u8(src + total_elements*2 + k*16);
r0.val[3] = vld1q_u8(src + total_elements*3 + k*16);
/* Store (with permutation) the results in the destination vector */
vst4q_u8(dest + k*64, r0);
}
}
main()
{
uint8_t *src = "\xcb\xff\xf1\x79\x24\x7c\xb1\x58\x69\xd2\xee\xdd\x99\x9a\x7a\x86"
"\x45\x3e\x5f\xdf\xa2\x43\x41\x25\x77\xae\xfd\x22\x19\x1a\x38\x2b"
"\x56\x93\xab\xc3\x61\xa8\x7d\xfc\xbb\x98\xf6\xd1\x29\xce\xe7\x58"
"\x73\x4c\xd3\x12\x3f\xcf\x46\x94\xba\xfa\x49\x83\x71\x1e\x35\x5f"
"\xbc\x2d\x3f\x7c\xf8\xb4\xb9\xa8\xc9\x9f\x8d\x9d\x11\xc4\xc3\x23"
"\x44\x3a\x11\x4f\xf2\x41\x31\xb8\x19\xbe\xad\x72\xdc\x3a\xbc\x34"
"\x53\xa7\xc6\xb3\x71\xc8\x83\x27\xb3\x45\x82\xd8\x95\x9e\x71\x92"
"\x88\x4f\xdd\x66\xbf\xc5\xd6\x42\x33\x18\x33\xf7\xaf\xab\x42\x47"
"\x13\x21\x17\xc8\xc9\x34\x25\x11\x67\x74\x4e\xe8\x67\x74\x4e\xe8";
size_t vectorizable_elements = 32;
size_t total_elements = 36;
size_t i;
uint8_t *dest1 = calloc(144,2);
uint8_t *dest2 = calloc(144,2);
shuffle4_neon(dest1, src, vectorizable_elements, total_elements);
unshuffle4_neon(dest2, dest1, vectorizable_elements, total_elements);
for (i = 0; i < 128; i++) {
assert(dest2[i] == src[i]);
}
free(dest1);
free(dest2);
}
|
#!/usr/bin/bash
curl -XPUT "${ES_HOST}:9200/_template/metrics?pretty" -H 'Content-Type: application/json' -d'
{
"template": "metrics*",
"settings": {
"number_of_shards": 1,
"number_of_replicas": 0
},
"mappings": {
"measurement": {
"_source": {
"enabled": true
},
"properties": {
"temperature": {
"type": "half_float"
},
"humidity": {
"type": "half_float"
},
"pressure": {
"type": "half_float"
},
"co2": {
"type": "integer"
},
"uncertainty": {
"type": "half_float"
}
}
}
}
}
'
|
<filename>src/app/dashboard/student-dashboard/student-reportcard/student-report-card.component.ts
import {Component, OnDestroy, OnInit} from '@angular/core'
import {ActivatedRoute} from '@angular/router'
import {ReportCardEntryService} from '../../../services/report-card-entry.service'
import {EMPTY, of, Subscription, zip} from 'rxjs'
import {ReportCardEntryAtom} from '../../../models/report-card-entry.model'
import {mergeAll, switchMap, toArray} from 'rxjs/operators'
import {TableHeaderColumn} from '../../../abstract-crud/abstract-crud.component'
import {MatTableDataSource} from '@angular/material'
import {distinctEntryTypeColumns} from '../../../report-card-table/report-card-table-utils'
import {format, formatTime} from '../../../utils/lwmdate-adapter'
import {ReportCardTableModel} from '../../../report-card-table/report-card-table.component'
import {dateOrderingASC, subscribe} from '../../../utils/functions'
import {AnnotationService} from '../../../services/annotation.service'
import {AnnotationAtom} from '../../../models/annotation'
import {groupBy, mapMap} from '../../../utils/group-by'
import {fullUserName} from '../../../utils/component.utils'
import {RescheduleService} from '../../../services/reschedule.service'
import {ReportCardRescheduledAtom} from '../../../models/report-card-rescheduled.model'
interface LabworkView {
title: string
}
interface AnnotationView {
index: number
label: string
content: string[]
}
@Component({
selector: 'lwm-student-report-card',
templateUrl: './student-report-card.component.html',
styleUrls: ['./student-report-card.component.scss']
})
export class StudentReportCardComponent implements OnInit, OnDestroy {
tableModel: ReportCardTableModel
annotationDataSource: MatTableDataSource<AnnotationView>
labworkView: LabworkView
displayedColumns: string[] = ['index', 'label', 'content']
private subs: Subscription[] = []
constructor(
private readonly route: ActivatedRoute,
private readonly reportCardEntryService: ReportCardEntryService,
private readonly annotationService: AnnotationService,
private readonly rescheduledService: RescheduleService,
) {
}
ngOnInit(): void {
this.fetchReportCards(cards => {
this.fetchReschedules(cards, res => this.updateReportCardEntryTableUI(res))
this.fetchAnnotations(cards)
})
}
ngOnDestroy() {
this.subs.forEach(_ => _.unsubscribe())
}
private fetchReschedules = (cards: ReportCardEntryAtom[], completion: (rs: [ReportCardEntryAtom, ReportCardRescheduledAtom[]][]) => void) => {
this.subs.push(
subscribe(
of(cards).pipe(
switchMap(xs => xs.map(x => zip(of(x), this.rescheduledService.all(x.id)))),
mergeAll(),
toArray()
),
rs => completion(rs)
)
)
}
private fetchReportCards = (completion: (cards: ReportCardEntryAtom[]) => void) => {
this.subs.push(subscribe(
this.route.paramMap.pipe(
switchMap(map => {
const labwork = map.get('lid')
const student = map.get('sid')
return labwork && student ? this.reportCardEntryService.fromStudent(student, labwork) : EMPTY
})
),
cards => {
this.updateTitle(cards[0])
completion(cards)
}
))
}
private fetchAnnotations = (cards: ReportCardEntryAtom[]) => {
this.subs.push(subscribe(
this.route.paramMap.pipe(
switchMap(map => {
const labwork = map.get('lid')
return labwork ? this.annotationService.getForStudent(labwork) : EMPTY
})
),
this.updateAnnotationsUI(cards)
))
}
private updateTitle = (card: ReportCardEntryAtom) =>
this.labworkView = {
title: `Praktikumsdaten zu ${card.labwork.label}`,
}
private updateReportCardEntryTableUI = (cards: [ReportCardEntryAtom, ReportCardRescheduledAtom[]][]) => {
this.tableModel = this.makeTableModel(cards)
}
private updateAnnotationsUI = (cards: ReportCardEntryAtom[]): (annotations: AnnotationAtom[]) => void => annotations => {
this.annotationDataSource = new MatTableDataSource<AnnotationView>(
mapMap(
groupBy(annotations, a => a.reportCardEntry),
(k, v) => {
// tslint:disable-next-line:no-non-null-assertion
const card = cards.find(_ => _.id === k)!!
return {
index: card.assignmentIndex + 1,
label: card.label,
content: v
.sort((a, b) => dateOrderingASC(a.lastModified, b.lastModified))
.map(a => `${format(a.lastModified, 'dd.MM.yyyy - HH:mm')}: ${a.message} (${fullUserName(a.author)})`)
}
}
).sort((a, b) => a.index - b.index)
)
}
private makeTableModel = (cards: [ReportCardEntryAtom, ReportCardRescheduledAtom[]][]): ReportCardTableModel => {
const basicColumns: TableHeaderColumn[] = [
{attr: 'assignmentIndex', title: '#'},
{attr: 'date', title: 'Datum'},
{attr: 'start', title: 'Start'},
{attr: 'end', title: 'Ende'},
{attr: 'room.label', title: 'Raum'},
{attr: 'label', title: 'Bezeichnung'},
]
return {
dataSource: new MatTableDataSource(
cards
.sort(([a], [b]) => a.assignmentIndex - b.assignmentIndex)
.map(([entry, reschedules]) => ({entry, reschedules, annotationCount: 0}))
),
columns: basicColumns.concat(distinctEntryTypeColumns(cards.flatMap(([e]) => e.entryTypes)))
}
}
tableContentFor = (e: ReportCardEntryAtom, attr: string) => {
switch (attr) {
case 'date':
return format(e.date, 'dd.MM.yyyy')
case 'start':
return formatTime(e.start, 'HH:mm')
case 'end':
return formatTime(e.end, 'HH:mm')
case 'assignmentIndex':
return e.assignmentIndex + 1
case 'room.label':
return e.room.label
default:
return e[attr]
}
}
}
|
<reponame>Frayo44/WikiGame---A-Wikipedia-Game<filename>app/src/main/java/com/yoavfranco/wikigame/adapters/AboutAdapter.java<gh_stars>1-10
package com.yoavfranco.wikigame.adapters;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import java.util.ArrayList;
import com.yoavfranco.wikigame.R;
import com.yoavfranco.wikigame.utils.Item;
/**
* Created by tomer aka rosenpin on 2/9/16.
*/
public class AboutAdapter extends ArrayAdapter<Item> {
public AboutAdapter(Context context, ArrayList<Item> items) {
super(context, 0, items);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
Item item = getItem(position);
convertView = LayoutInflater.from(getContext()).inflate(R.layout.about_item, parent, false);
TextView tvName = (TextView) convertView.findViewById(R.id.tvTitle);
TextView tvDescription = (TextView) convertView.findViewById(R.id.tvDescription);
ImageView ivIcon = (ImageView) convertView.findViewById(R.id.ivIcon);
tvName.setText(item.getTitle());
tvDescription.setText(item.getDescription());
ivIcon.setImageDrawable(item.getImg());
return convertView;
}
}
|
def array_sum(arr):
sum = 0
for i in range(len(arr)):
for j in range(len(arr[i])):
sum += arr[i][j]
return sum
array_sum([[1,2,3],[4,5,6]])
|
"""
#Develop a code generation task to perform a linear search through an unsorted array for a given Integer
def linear_search(array, search_element):
for i in range(len(array)):
if array[i] == search_element:
return i
return -1
if __name__ == '__main__':
array = [20, 15, 25, 11, 55]
search_element = 25
x = linear_search(array, search_element)
if x == -1:
print("Element not found")
else:
print("Element found at index " + str(x))
"""
Element found at index 2
|
<reponame>Jose-Bustamante/EmailsFieldVanilla
export function checkBrowser() {
var c = navigator.userAgent.search("Chrome");
var f = navigator.userAgent.search("Firefox");
var ie11 = navigator.userAgent.indexOf("Trident/7.0") > -1;
var browser;
if (c > -1) {
browser = "Chrome";
} else if (f > -1) {
browser = "Firefox";
} else if (ie11) browser = "IE11";
return browser;
}
|
<gh_stars>0
//
// FSInventoryController.h
// myhome
//
// Created by FudonFuchina on 2018/2/3.
// Copyright © 2018年 fuhope. All rights reserved.
//
#import "FSShakeBaseController.h"
@interface FSInventoryController : FSShakeBaseController
@property (nonatomic,copy) NSString *table;
@end
|
package timely.api.response.timeseries;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
public class SearchLookupResponse {
public static class Result {
private Map<String, String> tags = new HashMap<>();
private String metric;
private String tsuid;
public Map<String, String> getTags() {
return tags;
}
public void setTags(Map<String, String> tags) {
this.tags = tags;
}
public void putTag(String key, String value) {
this.tags.put(key, value);
}
public String getMetric() {
return metric;
}
public void setMetric(String metric) {
this.metric = metric;
}
public String getTsuid() {
return tsuid;
}
public void setTsuid(String tsuid) {
this.tsuid = tsuid;
}
@Override
public boolean equals(Object obj) {
if (null == obj) {
return false;
}
if (this == obj) {
return true;
}
if (obj instanceof Result) {
Result other = (Result) obj;
EqualsBuilder builder = new EqualsBuilder();
builder.append(this.metric, other.metric);
builder.append(this.tsuid, other.tsuid);
builder.append(this.tags, other.tags);
return builder.isEquals();
} else {
return false;
}
}
@Override
public int hashCode() {
HashCodeBuilder hcb = new HashCodeBuilder();
hcb.append(this.metric);
hcb.append(this.tsuid);
hcb.append(this.tags);
return hcb.toHashCode();
}
}
private String type;
private String metric;
private Map<String, String> tags = new HashMap<>();
private int limit;
private int time;
private int totalResults;
private List<Result> results = new ArrayList<>();
private int startIndex = 0; // Does not change, here for serialization
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getMetric() {
return metric;
}
public void setMetric(String metric) {
this.metric = metric;
}
public Map<String, String> getTags() {
return tags;
}
public void setTags(Map<String, String> tags) {
this.tags = tags;
}
public void putTag(String key, String value) {
this.tags.put(key, value);
}
public int getTime() {
return time;
}
public void setTime(int time) {
this.time = time;
}
public int getTotalResults() {
return totalResults;
}
public void setTotalResults(int totalResults) {
this.totalResults = totalResults;
}
public List<Result> getResults() {
return results;
}
public void setResults(List<Result> results) {
this.results = results;
}
public int getLimit() {
return limit;
}
public void setLimit(int limit) {
this.limit = limit;
}
@Override
public boolean equals(Object obj) {
if (null == obj) {
return false;
}
if (obj == this) {
return true;
}
if (obj instanceof SearchLookupResponse) {
SearchLookupResponse other = (SearchLookupResponse) obj;
EqualsBuilder builder = new EqualsBuilder();
builder.append(this.type, other.type);
builder.append(this.metric, other.metric);
builder.append(this.time, other.time);
builder.append(this.totalResults, other.totalResults);
builder.append(this.tags, other.tags);
builder.append(this.results, other.results);
builder.append(this.startIndex, other.startIndex);
builder.append(this.limit, other.limit);
return builder.isEquals();
} else {
return false;
}
}
@Override
public int hashCode() {
HashCodeBuilder hcb = new HashCodeBuilder();
hcb.append(this.type);
hcb.append(this.metric);
hcb.append(this.time);
hcb.append(this.totalResults);
hcb.append(this.tags);
hcb.append(this.results);
hcb.append(this.startIndex);
hcb.append(this.limit);
return hcb.toHashCode();
}
}
|
import XCTest
class Calculator {
func add(_ a: Int, _ b: Int) -> Int {
return a + b
}
func subtract(_ a: Int, _ b: Int) -> Int {
return a - b
}
func multiply(_ a: Int, _ b: Int) -> Int {
return a * b
}
func divide(_ a: Int, _ b: Int) -> Int {
return a / b
}
}
class CalculatorTests: XCTestCase {
var calculator: Calculator!
override func setUp() {
super.setUp()
calculator = Calculator()
}
override func tearDown() {
calculator = nil
super.tearDown()
}
func testAddition() {
XCTAssertEqual(calculator.add(3, 5), 8, "Addition should return the correct sum")
}
func testSubtraction() {
XCTAssertEqual(calculator.subtract(8, 3), 5, "Subtraction should return the correct difference")
}
func testMultiplication() {
XCTAssertEqual(calculator.multiply(4, 6), 24, "Multiplication should return the correct product")
}
func testDivision() {
XCTAssertEqual(calculator.divide(10, 2), 5, "Division should return the correct quotient")
}
func testPerformanceMultiplication() {
self.measure {
_ = calculator.multiply(999999999, 999999999) // Large number multiplication
}
}
}
|
#!/usr/bin/env bash
# -*- coding: utf-8 -*-
# author: Hanzhang Yang
REPO_PATH=/Users/yuang/PA_tech/text_corrector/ChineseBert/csc_correct_task_yuang
BERT_PATH=/Users/yuang/PA_tech/text_corrector/ChineseBert/ChineseBERT-base
CHECKPOINT_PATH=/Users/yuang/PA_tech/text_corrector/ChineseBert/csc_correct_task_yuang/results/0810/corrector_glyce_base_1000_2_3e-5_0.002_0.02_256_0.1_1_0.1/checkpoint/epoch=29_v1.ckpt
MAX_LEN=256
CLASSIFIER=multi
CUDA_VISIBLE_DEVICES="" python $REPO_PATH/corrector_trainer.py \
--max_length ${MAX_LEN} \
--bert_path ${BERT_PATH} \
--classifier ${CLASSIFIER} \
--checkpoint_path ${CHECKPOINT_PATH}
|
<reponame>nabeelkhan/Oracle-DBA-Life
REM FILE NAME: db_tbl8.sql
REM LOCATION: Object Management\Tables\Reports
REM FUNCTION: Generate table report
REM CATEGORY:
REM TESTED ON: 8.0.4.1, 8.1.5, 8.1.7, 9.0.1
REM PLATFORM: non-specific
REM REQUIRES: dba_tables
REM
REM This is a part of the Knowledge Xpert for Oracle Administration library.
REM Copyright (C) 2001 Quest Software
REM All rights reserved.
REM
REM******************** Knowledge Xpert for Oracle Administration ********************
CLEAR columns
COLUMN owner format a15 heading 'Table | Owner'
COLUMN table_name heading Table
COLUMN tablespace_name format A15 heading Tablespace
COLUMN pct_increase heading 'Pct|Increase'
COLUMN init heading 'Initial|Extent'
COLUMN next heading 'Next|Extent'
COLUMN partitioned format a15 heading 'Partitioned?'
BREAK on owner on tablespace_name
SET pages 48 lines 132 echo off
START TITLE132 "ORACLE TABLE REPORT"
SPOOL rep_out\db_tbl8
SELECT owner, tablespace_name, table_name, initial_extent init,
next_extent NEXT, pct_increase, partitioned
FROM sys.dba_tables
WHERE owner NOT IN ('SYSTEM', 'SYS')
ORDER BY owner, tablespace_name, table_name;
SPOOL off
CLEAR columns
SET pages 22 lines 80
TTITLE off
CLEAR columns
CLEAR breaks
|
<reponame>marcocanopoli/laravel-boolpress<gh_stars>0
import Vue from 'vue'
import VueRouter from 'vue-router'
Vue.use(VueRouter)
import PageBlog from './pages/PageBlog.vue';
import PageBlogPost from './pages/PageBlogPost.vue';
import PageHome from './pages/PageHome.vue';
import PageAbout from './pages/PageAbout.vue';
import PageCategory from './pages/PageCategory.vue';
import PageTag from './pages/PageTag.vue';
import PageNotFound from './pages/PageNotFound.vue';
const router = new VueRouter({
mode: 'history',
routes: [
{
path: '/',
name: 'home',
component: PageHome
},
{
path: '/blog',
name: 'blog',
component: PageBlog
},
{
path: '/blog/:slug',
name: 'blog-post',
component: PageBlogPost
},
{
path: '/blog/category/:slug',
name: 'category',
component: PageCategory
},
{
path: '/blog/tag/:slug',
name: 'tag',
component: PageTag
},
{
path: '/about',
name: 'about',
component: PageAbout
},
{
path: '*',
name: 'not-found',
component: PageNotFound
}
]
});
export default router;
|
def multiply(nums):
result = []
for i in range(len(nums)):
for j in range(i+1, len(nums)):
result.append(nums[i] * nums[j])
return result
|
export LSCOLORS="exfxcxdxbxegedabagacad"
export CLICOLOR=true
export COMPLETION_WAITING_DOTS=true
fpath=($ZSH/functions $fpath)
autoload -U $ZSH/functions/*(:t)
HISTFILE=~/.zsh_history
HISTSIZE=10000
SAVEHIST=10000
setopt NO_BG_NICE # don't nice background tasks
setopt NO_HUP
setopt NO_LIST_BEEP
setopt LOCAL_OPTIONS # allow functions to have local options
setopt LOCAL_TRAPS # allow functions to have local traps
setopt HIST_VERIFY
setopt SHARE_HISTORY # share history between sessions ???
setopt EXTENDED_HISTORY # add timestamps to history
setopt PROMPT_SUBST
setopt CORRECT
setopt COMPLETE_IN_WORD
setopt IGNORE_EOF
setopt APPEND_HISTORY # adds history
setopt INC_APPEND_HISTORY SHARE_HISTORY # adds history incrementally and share it across sessions
setopt HIST_IGNORE_ALL_DUPS # don't record dupes in history
setopt HIST_REDUCE_BLANKS
# don't expand aliases _before_ completion has finished
# like: git comm-[tab]
setopt complete_aliases
bindkey '^[^[[D' backward-word
bindkey '^[^[[C' forward-word
bindkey '^[[5D' beginning-of-line
bindkey '^[[5C' end-of-line
bindkey '^[[3~' delete-char
bindkey '^?' backward-delete-char
|
#!/usr/bin/env bash
source default-build-config
set -eux
CUR_DIR=`pwd`
MAKE_DIR=${CUR_DIR}/../
BUILD_DIR=${CUR_DIR}/build
test -e ${BUILD_DIR} || mkdir ${BUILD_DIR}
cd ${BUILD_DIR} && test -e ab-mruby || git clone --recursive https://github.com/matsumoto-r/ab-mruby.git
cd ${BUILD_DIR}/ab-mruby && make
|
<filename>js/consortium.js<gh_stars>0
$(document).ready(function() {
var libraryList = [];
function finalizeSelect() {
// Sort alphabetically. https://stackoverflow.com/questions/6712034/sort-array-by-firstname-alphabetically-in-javascript
libraryList.sort(function(a, b){
var nameA=a.name.toLowerCase(), nameB=b.name.toLowerCase();
if (nameA < nameB) //sort string ascending
return -1;
if (nameA > nameB)
return 1;
return 0; //default return value (no sorting)
});
// Add items to the list
for (var i=0; i<libraryList.length; i++) {
var x = document.getElementById("librarySelector");
var option = document.createElement("option");
option.text = libraryList[i].name;
option.value = libraryList[i].id;
x.add(option, x[i]);
}
// Set selected & init niceSelect
$("#librarySelector option[value='" + library + "']").attr("selected","selected");
setTimeout(function(){
$('#librarySelector').niceSelect();
$('#librarySelectorContainer').addClass("always-visible");
}, 50);
}
// Fetch libraries of city, that belong to the same consortium
if(consortium !== undefined && city !== undefined) {
$.getJSON("https://api.kirjastot.fi/v3/organisation?lang=" + lang + "&city.name=" + city, function(data) {
for (var i=0; i<data.items.length; i++) {
// Ignore mobile libraries & other consortiums.
if(data.items[i].branch_type !== "mobile" && data.items[i].consortium == consortium) {
libraryList.push({name: data.items[i].name, id: data.items[i].id});
}
}
finalizeSelect();
});
}
// Fetch libraries of city
else if(consortium === undefined && city !== undefined) {
$.getJSON("https://api.kirjastot.fi/v3/organisation?lang=" + lang + "&city.name=" + city, function(data) {
for (var i=0; i<data.items.length; i++) {
// Ignore mobile libraries
if(data.items[i].branch_type !== "mobile") {
libraryList.push({name: data.items[i].name, id: data.items[i].id});
}
}
finalizeSelect();
});
}
// Fetch libraries of consortium
else if(consortium !== undefined && city === undefined) {
$.getJSON("https://api.kirjastot.fi/v3/organisation?lang=" + lang + "&consortium=" + consortium, function(data) {
for (var i=0; i<data.items.length; i++) {
if(data.items[i].branch_type !== "mobile") {
libraryList.push({name: data.items[i].name, id: data.items[i].id});
}
}
finalizeSelect();
});
}
$("#librarySelector").change(function(){
$("#pageContainer").replaceWith(divClone.clone()); // Restore main with a copy of divClone
// Reset variables.
accessibilityIsEmpty = true;
transitIsEmpty = true;
descriptionIsEmpty = true;
transitAccessibilityTextSet = false;
mapLoaded = false;
sliderNeedsToRestart = true;
contactsIsEmpty = true;
noServices = true;
indexItemClicked = false;
isReFetching = false;
// Set the global library parameter, so schedule switching won't mess things up.
library = $(this).val();
// Fetch data
getWeekSchelude(0, library);
fetchInformation(lang, $(this).val());
fetchImagesAndSocialMedia($(this).val());
// Re-bind navigation and other stuff.
bindActions();
bindScheduleKeyNavigation();
// Add swiping detection for schedules & sliderbox if available.
detectswipe("schedules", swipeNavigation);
if(document.getElementById("sliderBox") != null) {
detectswipe("sliderBox", swipeNavigation);
}
});
}); // OnReady
|
<reponame>zettca/pacex2
import React from 'react';
import DataStore from '../stores/DataStore';
class InputPace extends React.Component {
constructor(props) {
super(props);
this.state = {
units: DataStore.getUnits(),
input: DataStore.getPace(),
};
}
componentWillMount() {
this.handleDataChange = this.handleDataChange.bind(this);
DataStore.on('changed', this.handleDataChange);
}
componentWillUnmount() {
DataStore.removeListener('changed', this.handleDataChange);
}
handleDataChange() {
this.setState(DataStore.getPace());
}
parsePace(pace) {
const f00 = (n) => (n < 10) ? '0' + n : String(n);
const { units } = this.state;
const multi = (units === 'mi') ? 1610 : 1000;
const mpk = pace * (multi / 60);
return [mpk, (mpk % 1) * 60].map(Math.floor).map(f00).join(':') + '/' + units;
}
parseSpeed(pace) {
const { units } = this.state;
const mps = 1 / pace; // meter/sec
const kph = mps * (3600 / 1000);
return `${kph.toFixed(1)} ${units}/h`;
}
formatValue(value) {
const isPace = true;
return (isPace) ? this.parsePace(value) : this.parseSpeed(value);
}
render() {
const { value } = this.state.input;
return (
<section className='bordered'>
<h2>{`Pace ${this.formatValue(value)}`}</h2>
</section>
);
}
}
export default InputPace;
|
from . util import _get_Z, _get_name, _get_isotopes
class Element:
def __init__(self, constructor):
if type(constructor) == str:
self.Z = _get_Z(constructor)
self.name = constructor
elif type(constructor) == int or type(constructor) == float:
self.name = _get_name(int(constructor))
self.Z = int(constructor)
else:
raise TypeError("Unknown type of nuclide specifier")
self.isotopes = sorted(_get_isotopes(self.Z), key=lambda nu: nu.N)
self.isomers = sorted(_get_isotopes(self.Z, isomer=True), key=lambda nu: nu.N) # TODO: This is failing!
self._first_avail = self.isotopes[0].A
self.n_nuclides = len(self.isotopes)
self._idx = 0
def __getitem__(self, A):
idx = [idx for idx, nuc in enumerate(self.isotopes) if nuc.A == A]
if not idx:
raise ValueError('No data for this nuclide available')
return self.isotopes[idx[0]]
def __iter__(self):
return self
def __next__(self):
if self._idx < self.n_nuclides:
res = self.isotopes[self._idx]
self._idx += 1
return res
else:
raise StopIteration
|
<reponame>Himenon/dependents-view
import { View, OriginLibrary } from "@app/interface";
export const isViewLibrary = (displayLibrary: View.Library | OriginLibrary[] | undefined): displayLibrary is View.Library => {
if (!displayLibrary) {
return false;
}
if (Array.isArray(displayLibrary)) {
return false;
}
return true;
};
|
def find_second_largest(arr):
largest = arr[0]
second_largest = -float('inf')
for num in arr:
if num > largest:
second_largest = largest
largest = num
elif num > second_largest and num < largest:
second_largest = num
return second_largest
|
package cassandra;
import com.datastax.driver.mapping.annotations.PartitionKey;
import com.datastax.driver.mapping.annotations.Table;
@Table(name = "beer")
public class Beer {
@PartitionKey
public String id;
public String name;
public String getId() {
return id;
}
public void setId(final String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
}
|
parseFloat.length = {};
parseFloat.name = {};
|
// interaction with the graph
// set up svg
// click location
// click label
var svgMode = false;
var automataIndex;
var stateIndex;
var labelIndex;
function setSvgMode (){
svgMode = true;
}
function clearBuffer (){
automataIndex = null;
stateIndex = null;
labelIndex = null;
}
var width = 640,
height = 740;
var svg;
var nodes;
var links;
var force;
var circle;
var text;
var path;
var linktext;
// public methods:
/*
drawAutomata()
restart()
sets gets
updateLocationNode
*/
// function called by app.js
function updateLocationNode(locationName, locationNewName){
//console.log(locationNewName);
var pos1 = nodes.map(function(e){return e.name}).indexOf(locationName);
var new_node = JSON.parse(JSON.stringify(nodes[pos1]));
new_node.name = locationNewName;
nodes.splice(pos1, 1);
nodes.push(new_node);
var indices = [];
var array = nodes.map(function(e){return e.name.split(".")[0]});
var idx = array.indexOf(locationName);
while (idx != -1) {
indices.push(idx);
idx = array.indexOf(locationName, idx + 1);
}
indices.reverse().forEach(function(pos){
var new_node = JSON.parse(JSON.stringify(nodes[pos]));
new_node.name = locationNewName + "." + new_node.name.split(".")[1];
nodes.splice(pos, 1);
nodes.push(new_node);
});
var links_to_remove = [];
var links_to_add = [];
links.forEach(function(link, index){
var remove_flag = false;
var source_flag = 0;
var target_flag = 0;
if (link.source.name == locationName){
remove_flag = true;
source_flag = 1;
//link.source = new_node;
}
else if(link.source.name.split(".")[0] == locationName){
remove_flag = true;
source_flag = 2;
//var pos = nodes.map(function(e){return e.name}).indexOf(locationNewName + "." + link.source.name.split(".")[1]);
//link.source = nodes[pos];
}
if (link.target.name == locationName){
remove_flag = true;
target_flag = 1;
//link.target = new_node;
}
else if(link.target.name.split(".")[0] == locationName){
remove_flag = true;
target_flag = 2;
//var pos = nodes.map(function(e){return e.name}).indexOf(locationNewName + "." + link.target.name.split(".")[1]);
//link.target = nodes[pos];
}
if(remove_flag){
links_to_remove.push(index);
var new_link = new Object();
new_link.type = link.type;
if (link.probability != null){
new_link.probability = link.probability;
}
var new_source = new Object();
var new_target = new Object();
if(source_flag == 1){
new_source = new_node;
}
else if(source_flag == 2){
var pos = nodes.map(function(e){return e.name}).indexOf(locationNewName + "." + link.source.name.split(".")[1]);
new_source = nodes[pos];
}
else {
new_source = link.source;
}
if(target_flag == 1){
new_target = new_node;
}
else if(target_flag == 2){
var pos = nodes.map(function(e){return e.name}).indexOf(locationNewName + "." + link.target.name.split(".")[1]);
new_target = nodes[pos];
}
else{
new_target = link.target;
}
new_link.source = new_source;
new_link.target = new_target;
//var new_link = JSON.parse(JSON.stringify(link));
//console.log(JSON.stringify(link));
links.push(new_link);
}
});
links_to_remove.reverse().forEach(function(pos){
links.splice(pos, 1);
});
//links.push.apply(links, links_to_add);
restart();
}
function deleteLocationNode(locationName){
var pos1 = nodes.map(function(e){return e.name}).indexOf(locationName);
nodes.splice(pos1, 1);
var indices = [];
var array = nodes.map(function(e){return e.name.split(".")[0]});
var idx = array.indexOf(locationName);
while (idx != -1) {
indices.push(idx);
idx = array.indexOf(locationName, idx + 1);
}
indices.reverse().forEach(function(pos){
nodes.splice(pos, 1);
});
var links_to_remove = [];
links.forEach(function(link, index){
var remove_flag = false;
if (link.source.name == locationName){
remove_flag = true;
}
else if(link.source.name.split(".")[0] == locationName){
remove_flag = true;
}
if (link.target.name == locationName){
remove_flag = true;
}
else if(link.target.name.split(".")[0] == locationName){
remove_flag = true;
}
if(remove_flag){
links_to_remove.push(index);
}
});
links_to_remove.reverse().forEach(function(pos){
links.splice(pos, 1);
});
restart();
}
function addLocationNode(locationAddName){
var new_node = new Object();
new_node.name = locationAddName;
nodes.push(new_node);
restart();
}
function updateEdgeNode(edgeName, edgeNewName){
var pos1 = nodes.map(function(e){return e.name}).indexOf(edgeName);
var new_node = JSON.parse(JSON.stringify(nodes[pos1]));
new_node.name = edgeName.split(".")[0] + "." + edgeNewName;
nodes.splice(pos1, 1);
nodes.push(new_node);
var links_to_remove = [];
var links_to_add = [];
links.forEach(function(link, index){
var remove_flag = false;
var source_flag = 0;
var target_flag = 0;
if (link.source.name == edgeName){
remove_flag = true;
source_flag = 1;
//link.source = new_node;
}
if (link.target.name == edgeName){
remove_flag = true;
target_flag = 1;
//link.target = new_node;
}
if(remove_flag){
links_to_remove.push(index);
var new_link = new Object();
new_link.type = link.type;
if (link.probability != null){
new_link.probability = link.probability;
}
var new_source = new Object();
var new_target = new Object();
if(source_flag == 1){
new_source = new_node;
}
else {
new_source = link.source;
}
if(target_flag == 1){
new_target = new_node;
}
else{
new_target = link.target;
}
new_link.source = new_source;
new_link.target = new_target;
//var new_link = JSON.parse(JSON.stringify(link));
//console.log(JSON.stringify(link));
links.push(new_link);
}
});
links_to_remove.reverse().forEach(function(pos){
links.splice(pos, 1);
});
//links.push.apply(links, links_to_add);
restart();
}
function addEdgeNode(edgeAddName){
var new_node = new Object();
new_node.name = edgeAddName;
nodes.push(new_node);
var pos = nodes.map(function(e){return e.name}).indexOf(edgeAddName.split(".")[0]);
var new_link = new Object();
new_link.type = 0;
new_link.source = nodes[pos];
new_link.target = new_node;
links.push(new_link);
restart();
}
function deleteEdgeNode(edgeName){
var pos = nodes.map(function(e){return e.name}).indexOf(edgeName);
nodes.splice(pos, 1);
var links_to_remove = [];
links.forEach(function(link, index){
if (link.source.name == edgeName || link.target.name == edgeName){
links_to_remove.push(index);
}
});
links_to_remove.reverse().forEach(function(pos){
links.splice(pos, 1);
});
restart();
}
function editEdgeNode(edgeName, destinationList){
var pos = nodes.map(function(e){return e.name}).indexOf(edgeName);
var links_to_remove = [];
links.forEach(function(link, index){
if (link.source.name == edgeName){
links_to_remove.push(index);
}
});
links_to_remove.reverse().forEach(function(pos){
links.splice(pos, 1);
});
// a trick
restart();
var index = destinationList.length/2;
while (index > 0){
var new_link = new Object();
new_link.type = 1;
new_link.source = nodes[pos];
new_link.probability = destinationList[2*index-1];
var target_pos = nodes.map(function(e){return e.name}).indexOf(destinationList[2*index-2]);
new_link.target = nodes[target_pos];
links.push(new_link);
--index;
}
restart();
}
function drawAutomata(data, automatonName) {
console.log("show: " + automatonName);
var result = getEdges(data, automatonName);
var A_links = result.actions;
var B_links = result.links;
try{
links = A_links.concat(B_links);
nodes = {};
// Compute the distinct nodes from the links.
links.forEach(function(link) {
link.source = nodes[link.source] || (nodes[link.source] = {name: link.source});
link.target = nodes[link.target] || (nodes[link.target] = {name: link.target});
});
nodes = d3.values(nodes);
}
catch(e){
links = [];
nodes = [];
}
// add nodes that have no links
try{
locations = getLocations(data, automatonName);
locations.forEach(function(location){
var pos = nodes.map(function(e){return e.name}).indexOf(location.name);
if (pos == -1){
var new_node = new Object();
new_node.name = location.name;
nodes.push(new_node);
}
});
}
catch(e){
}
//console.log(links);
//console.log(nodes);
drawAutomata1();
}
function drawAutomata1() {
if(svgMode){
// call dom.js
clear("svg", "graph");
}
setSvgMode();
drawAutomata2();
}
function drawAutomata2() {
force = d3.layout.force()
.nodes(nodes)
.links(links)
.size([width, height])
.linkDistance(function(d) {
if (d.type == 0)
return 20;
else
return 60;
})
.linkStrength(function(d) {
if (d.type == 0)
return 1;
else
return 0.5;
})
.charge(-200)
.on("tick", tick)
.start();
svg = d3.select("#app-body .graph").append("svg")
.attr("id", "svg")
.attr("width", width)
.attr("height", height)
.append("g")
.call(d3.behavior.zoom().scaleExtent([1, 8]).on("zoom", zoom));
function zoom() {
svg.attr("transform", "translate(" + d3.event.translate + ")scale(" + d3.event.scale + ")");
}
svg.append("rect")
.attr("class", "overlay")
.attr("width", width)
.attr("height", height);
// Per-type markers, as they don't inherit styles.
svg.append("defs").selectAll("marker")
.data(["suit"])
.enter().append("marker")
.attr("id", function(d) { return d; })
.attr("viewBox", "0 -5 10 10")
.attr("refX", 15)
.attr("refY", -1.5)
.attr("markerWidth", 6)
.attr("markerHeight", 6)
.attr("orient", "auto")
.append("path")
.attr("d", "M0,-5L10,0L0,5")
.text(function(d) { return d.name; });
path = svg.append("g").selectAll("path")
.data(links, function(d) { return d.source.name + "." + d.target.name; })
.enter().append("path")
.attr("class", function(d) { return "link " + "suit"; })
.attr("id",function(d) { console.log(d.source.name + "." + d.target.name); return "linkId_" + d.source.name + "." + d.target.name; })
.attr("marker-end", function(d) {
if (d.type == 1)
return "url(#" + "suit" + ")";
else
return;
});
linktext = svg.append("g").selectAll("linklabelholder").data(links, function(d) { return d.source.name + "." + d.target.name; })
.enter().append("g").attr("class", "linklabelholder")
.append("text")
.attr("class", "linklabel")
.style("font-size", "8")
.attr("dx", "30")
.attr("text-anchor", "start")
.style("fill","#000")
.append("textPath")
.attr("xlink:href",function(d) { return "#linkId_" + d.source.name + "." + d.target.name;})
.text(function(d) {
return d.probability;
});
circle = svg.append("g").selectAll("circle")
.data(nodes, function(d) { return d.name; })
.enter().append("circle")
.attr("r", function(d) {
var str = d.name;
var res = str.split(".");
if(res.length == 1)
return 8;
else
return 3;
})
.call(force.drag)
.on("mousedown", function() { d3.event.stopPropagation(); });
text = svg.append("g").selectAll("text")
.data(nodes, function(d) { return d.name; })
.enter().append("text")
.attr("x", "8")
.attr("y", ".31em")
.text(function(d) {
var str = d.name;
var res = str.split(".");
if(res.length == 1)
return d.name;
else
return res[1];
});
// Use elliptical arc path segments to doubly-encode directionality.
function tick() {
path.attr("d", linkArc);
circle.attr("transform", transform);
text.attr("transform", transform);
}
function linkArc(d) {
var dx = d.target.x - d.source.x,
dy = d.target.y - d.source.y,
dr = Math.sqrt(dx * dx + dy * dy);
var dx1 = d.source.x,
dx2 = d.target.x,
dy1 = d.source.y,
dy2 = d.target.y;
if (d.type == 0){
return "M" + dx1 + "," + dy1 + "L" + dx2 + "," + dy2;
}
else{
var str = d.source.name;
var res = str.split(".");
if (d.target.name == res[0]){
return "M" + dx1 + "," + dy1 + "A" + dr/2 + "," + dr/2 + " 0 1,1 " + dx2 + "," + dy2;
}
else{
return "M" + dx1 + "," + dy1 + "A" + dr + "," + dr + " 0 0,1 " + dx2 + "," + dy2;
}
}
}
function transform(d) {
return "translate(" + d.x + "," + d.y + ")";
}
}
// untested helper functions
function getNodes(){
return nodes;
}
function getLinks(){
return links;
}
function setNodes(new_nodes){
nodes = new_nodes;
}
function setLinks(new_links){
links = new_links;
}
/*
// tested helper functions
var linkDistance = function(d) {
return (d.type == 0) ? 20 : 60;
};
var linkStrength = function(d) {
return (d.type == 0) ? 1 : 0.5;
};
var circleR = function(d) {
var str = d.name;
var res = str.split(".");
if(res.length == 1)
return 8;
else
return 3;
};
var textR = function(d) {
var str = d.name;
var res = str.split(".");
if(res.length == 1)
return d.name;
else
return res[1];
}; */
// public function: very important
function restart(){
// add a circle
/* var m2 = new Object();
m2.name = "m2";
nodes.push(m2); */
//nodes.splice(0, 1);
circle = circle.data(nodes, function(d) { return d.name; });
circle.selectAll("circle")
.attr("r", function(d) {
var str = d.name;
var res = str.split(".");
if(res.length == 1)
return 8;
else
return 3;
})
.call(force.drag);
circle
.enter()
.append("circle")
.attr("r", function(d) {
var str = d.name;
var res = str.split(".");
if(res.length == 1)
return 8;
else
return 3;
})
.call(force.drag);
circle.exit().remove();
text = text.data(nodes, function(d) { return d.name; });
text.selectAll("text")
.attr("x", "8")
.attr("y", ".31em")
.text(function(d) {
var str = d.name;
var res = str.split(".");
if(res.length == 1)
return d.name;
else
return res[1];
});
text
.enter().append("text")
.attr("x", "8")
.attr("y", ".31em")
.text(function(d) {
var str = d.name;
var res = str.split(".");
if(res.length == 1)
return d.name;
else
return res[1];
});
text.exit().remove();
/* var l2 = new Object();
l2.type = 1;
l2.probability = 0;
l2.source = nodes[1];
l2.target = nodes[9];
links.push(l2); */
//links.splice(0, 1);
path = path.data(links, function(d) { console.log(d.source.name + "." + d.target.name); return d.source.name + "." + d.target.name; });
path.selectAll("path")
.attr("class", function(d) { return "link " + "suit"; })
.attr("id",function(d,i) { console.log(d.source.name + "." + d.target.name); return "linkId_" + d.source.name + "." + d.target.name; })
.attr("marker-end", function(d) {
if (d.type == 1)
return "url(#" + "suit" + ")";
else
return;
});
path.enter().append("path")
.attr("class", function(d) { return "link " + "suit"; })
.attr("id",function(d,i) { console.log(d.source.name + "." + d.target.name); return "linkId_" + d.source.name + "." + d.target.name; })
.attr("marker-end", function(d) {
if (d.type == 1)
return "url(#" + "suit" + ")";
else
return;
});
path.exit().remove();
linktext = linktext.data(links, function(d) { return d.source.name + "." + d.target.name; });
linktext.selectAll("linklabelholder")
.attr("class", "linklabelholder")
.append("text")
.attr("class", "linklabel")
.style("font-size", "8")
.attr("dx", "30")
.attr("text-anchor", "start")
.style("fill","#000")
.append("textPath")
.attr("xlink:href",function(d,i) { return "#linkId_" + d.source.name + "." + d.target.name;})
.text(function(d) {
return d.probability;
});
linktext.enter().append("g")
.attr("class", "linklabelholder")
.append("text")
.attr("class", "linklabel")
.style("font-size", "8")
.attr("dx", "30")
.attr("text-anchor", "start")
.style("fill","#000")
.append("textPath")
.attr("xlink:href",function(d,i) { return "#linkId_" + d.source.name + "." + d.target.name;})
.text(function(d) {
return d.probability;
});
linktext.exit().remove();
force.start();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.