text stringlengths 1 1.05M |
|---|
func compareVersions(_ version1: String, _ version2: String) -> String {
let versionDelimiter = "."
let components1 = version1.components(separatedBy: versionDelimiter)
let components2 = version2.components(separatedBy: versionDelimiter)
let maxLength = max(components1.count, components2.count)
for i in 0..<maxLength {
let value1 = i < components1.count ? Int(components1[i]) ?? 0 : 0
let value2 = i < components2.count ? Int(components2[i]) ?? 0 : 0
if value1 < value2 {
return "Version 1 is less than Version 2"
} else if value1 > value2 {
return "Version 1 is greater than Version 2"
}
}
return "Version 1 is equal to Version 2"
}
// Test cases
print(compareVersions("1.2.3", "1.2.4")) // Output: "Version 1 is less than Version 2"
print(compareVersions("1.2.3", "1.2.3")) // Output: "Version 1 is equal to Version 2"
print(compareVersions("1.2.4", "1.2")) // Output: "Version 1 is greater than Version 2" |
# WARNING: Please make this shell not working-directory dependant, for example
# instead of using 'ls blabla', use 'ls "${REPO_DIR}/blabla"'
#
# WARNING: Don't use "cd" in this shell, use it in a subshell instead,
# for example ( cd blabla && do_blabla ) or $( cd .. && do_blabla )
###############################################################################
# VARIABLES #
###############################################################################
source "${REPO_DIR}/lib-core.sh"
source "${REPO_DIR}/lib-flatpak.sh"
WHITESUR_SOURCE+=("lib-install.sh")
###############################################################################
# DEPENDENCIES #
###############################################################################
# Be careful of some distro mechanism, some of them use rolling-release
# based installation instead of point-release, e.g., Arch Linux
# Rolling-release based distro doesn't have a seprate repo for each different
# build. This can cause a system call error since an app require the compatible
# version of dependencies. In other words, if you install an new app (which you
# definitely reinstall/upgrade the dependency for that app), but your other
# dependencies are old/expired, you'll end up with broken system.
# That's why we need a full system upgrade there
#---------------------SWUPD--------------------#
# 'swupd' bundles just don't make any sense. It takes about 30GB of space only
# for installing a util, e.g. 'sassc' (from 'desktop-dev' bundle, or
# 'os-utils-gui-dev' bundle, or any other 'sassc' provider bundle)
# Manual package installation is needed for that, but please don't use 'dnf'.
# The known worst impact of using 'dnf' is you install 'sassc' and then you
# remove it, and you run 'sudo dnf upgrade', and boom! Your 'sudo' and other
# system utilities have gone!
#----------------------APT---------------------#
# Some apt version doesn't update the repo list before it install some app.
# It may cause "unable to fetch..." when you're trying to install them
#--------------------PACMAN--------------------#
# 'Syu' (with a single y) may causes "could not open ... decompression failed"
# and "target not found <package>". We got to force 'pacman' to update the repos
#--------------------OTHERS--------------------#
# Sometimes, some Ubuntu distro doesn't enable automatic time. This can cause
# 'Release file for ... is not valid yet'. This may also happen on other distros
#============================================#
#-------------------Prepare------------------#
installation_sorry() {
prompt -w "WARNING: We're sorry, your distro isn't officially supported yet."
prompt -i "INSTRUCTION: Please make sure you have installed all of the required dependencies. We'll continue the installation in 15 seconds"
prompt -i "INSTRUCTION: Press 'ctrl'+'c' to cancel the installation if you haven't install them yet"
start_animation; sleep 15; stop_animation
}
prepare_deps() {
local remote_time=""
local local_time=""
prompt -i "DEPS: Checking your internet connection..."
local_time="$(date -u "+%s")"
if ! remote_time="$(get_utc_epoch_time)"; then
prompt -e "DEPS ERROR: You have an internet connection issue\n"; exit 1
fi
# 5 minutes is the maximum reasonable time delay, so we choose '4' here just
# in case
if (( local_time < remote_time-(4*60) )); then
prompt -w "DEPS: Your system clock is wrong"
prompt -i "DEPS: Updating your system clock..."
# Add "+ 25" here to accomodate potential time delay by sudo prompt
sudo date -s "@$((remote_time + 25))"; sudo hwclock --systohc
fi
}
prepare_swupd() {
[[ "${swupd_prepared}" == "true" ]] && return 0
local remove=""
local ver=""
local conf=""
local dist=""
if has_command dnf; then
prompt -w "CLEAR LINUX: You have 'dnf' installed in your system. It may break your system especially when you remove a package"
confirm remove "CLEAR LINUX: You wanna remove it?"; echo
fi
if ! sudo swupd update -y; then
ver="$(curl -s -o - "${swupd_ver_url}")"
dist="NAME=\"Clear Linux OS\"\nVERSION=1\nID=clear-linux-os\nID_LIKE=clear-linux-os\n"
dist+="VERSION_ID=${ver}\nANSI_COLOR=\"1;35\"\nSUPPORT_URL=\"https://clearlinux.org\"\nBUILD_ID=${ver}"
prompt -w "\n CLEAR LINUX: Your 'swupd' is broken"
prompt -i "CLEAR LINUX: Patching 'swupd' distro version detection and try again...\n"
sudo rm -rf "/etc/os-release"; echo -e "${dist}" | sudo tee "/usr/lib/os-release" > /dev/null
sudo ln -s "/usr/lib/os-release" "/etc/os-release"
sudo swupd update -y
fi
if ! has_command bsdtar; then sudo swupd bundle-add libarchive; fi
if [[ "${remove}" == "y" ]]; then sudo swupd bundle-remove -y dnf; fi
swupd_prepared="true"
}
install_swupd_packages() {
if [[ ! "${swupd_packages}" ]]; then
swupd_packages="$(curl -s -o - "${swupd_url}" | awk -F '"' '/-bin-|-lib-/{print $2}')"
fi
for key in "${@}"; do
for pkg in $(echo "${swupd_packages}" | grep -F "${key}"); do
curl -s -o - "${swupd_url}/${pkg}" | sudo bsdtar -xf - -C "/"
done
done
}
prepare_install_apt_packages() {
local status="0"
sudo apt update -y; sudo apt install -y "${@}" || status="${?}"
if [[ "${status}" == "100" ]]; then
prompt -w "\n APT: Your repo lists might be broken"
prompt -i "APT: Full-cleaning your repo lists and try again...\n"
sudo apt clean -y; sudo rm -rf /var/lib/apt/lists
sudo apt update -y; sudo apt install -y "${@}"
fi
}
prepare_xbps() {
[[ "${xbps_prepared}" == "true" ]] && return 0
# 'xbps-install' requires 'xbps' to be always up-to-date
sudo xbps-install -Syu xbps
# System upgrading can't remove the old kernel files by it self. It eats the
# boot partition and may cause kernel panic when there is no enough space
sudo vkpurge rm all; sudo xbps-install -Syu
xbps_prepared="true"
}
#-----------------Deps-----------------#
install_theme_deps() {
if ! has_command glib-compile-resources || ! has_command sassc || ! has_command xmllint; then
prompt -w "DEPS: 'glib2.0', 'sassc', and 'xmllint' are required for theme installation."
prepare_deps
if has_command zypper; then
sudo zypper in -y sassc glib2-devel libxml2-tools
elif has_command swupd; then
# Rolling release
prepare_swupd && sudo swupd bundle-add libglib libxml2 && install_swupd_packages sassc libsass
elif has_command apt; then
prepare_install_apt_packages sassc libglib2.0-dev-bin libxml2-utils
elif has_command dnf; then
sudo dnf install -y sassc glib2-devel libxml2
elif has_command yum; then
sudo yum install -y sassc glib2-devel libxml2
elif has_command pacman; then
# Rolling release
sudo pacman -Syyu --noconfirm --needed sassc glib2 libxml2
elif has_command xbps-install; then
# Rolling release
# 'libxml2' is already included here, and it's gonna broke the installation
# if you add it
prepare_xbps && sudo xbps-install -Sy sassc glib-devel
elif has_command eopkg; then
# Rolling release
sudo eopkg -y upgrade; sudo eopkg -y install sassc glib2 libxml2
else
installation_sorry
fi
fi
}
install_beggy_deps() {
if ! has_command convert; then
prompt -w "DEPS: 'imagemagick' is required for background editing."
prepare_deps; stop_animation
if has_command zypper; then
sudo zypper in -y ImageMagick
elif has_command swupd; then
# Rolling release
prepare_swupd && sudo swupd bundle-add ImageMagick
elif has_command apt; then
prepare_install_apt_packages imagemagick
elif has_command dnf; then
sudo dnf install -y ImageMagick
elif has_command yum; then
sudo yum install -y ImageMagick
elif has_command pacman; then
# Rolling release
sudo pacman -Syyu --noconfirm --needed imagemagick
elif has_command xbps-install; then
# Rolling release
prepare_xbps && sudo xbps-install -Sy ImageMagick
elif has_command eopkg; then
# Rolling release
sudo eopkg -y upgrade; sudo eopkg -y install imagemagick
else
installation_sorry
fi
fi
}
install_dialog_deps() {
[[ "${silent_mode}" == "true" ]] && return 0
if ! has_command dialog; then
prompt -w "DEPS: 'dialog' is required for this option."
prepare_deps
if has_command zypper; then
sudo zypper in -y dialog
elif has_command swupd; then
# Rolling release
prepare_swupd && install_swupd_packages dialog
elif has_command apt; then
prepare_install_apt_packages dialog
elif has_command dnf; then
sudo dnf install -y dialog
elif has_command yum; then
sudo yum install -y dialog
elif has_command pacman; then
# Rolling release
sudo pacman -Syyu --noconfirm --needed dialog
elif has_command xbps-install; then
# Rolling release
prepare_xbps && sudo xbps-install -Sy dialog
elif has_command eopkg; then
# Rolling release
sudo eopkg -y upgrade; sudo eopkg -y install dialog
else
installation_sorry
fi
fi
}
install_flatpak_deps() {
if ! has_command ostree || ! has_command appstream-compose; then
prompt -w "DEPS: 'ostree' and 'appstream-util' is required for flatpak installing."
prepare_deps; stop_animation
if has_command zypper; then
sudo zypper in -y libostree appstream-glib
elif has_command swupd; then
# Rolling release
prepare_swupd && sudo swupd ostree libappstream-glib
elif has_command apt; then
prepare_install_apt_packages ostree appstream-util
elif has_command dnf; then
sudo dnf install -y ostree libappstream-glib
elif has_command yum; then
sudo yum install -y ostree libappstream-glib
elif has_command pacman; then
# Rolling release
sudo pacman -Syyu --noconfirm --needed ostree appstream-glib
elif has_command xbps-install; then
# Rolling release
# 'libxml2' is already included here, and it's gonna broke the installation
# if you add it
prepare_xbps && sudo xbps-install -Sy ostree appstream-glib
elif has_command eopkg; then
# Rolling release
sudo eopkg -y upgrade; sudo eopkg -y ostree appstream-glib
else
installation_sorry
fi
fi
}
###############################################################################
# THEME MODULES #
###############################################################################
install_beggy() {
local CONVERT_OPT=""
[[ "${no_blur}" == "false" ]] && CONVERT_OPT+=" -scale 1280x -blur 0x50 "
[[ "${no_darken}" == "false" ]] && CONVERT_OPT+=" -fill black -colorize 45% "
case "${background}" in
blank)
cp -r "${THEME_SRC_DIR}/assets/gnome-shell/common-assets/background-blank.png" "${WHITESUR_TMP_DIR}/beggy.png" ;;
default)
if [[ "${no_blur}" == "false" && "${no_darken}" == "true" ]]; then
cp -r "${THEME_SRC_DIR}/assets/gnome-shell/common-assets/background-blur.png" "${WHITESUR_TMP_DIR}/beggy.png"
elif [[ "${no_blur}" == "false" && "${no_darken}" == "false" ]]; then
cp -r "${THEME_SRC_DIR}/assets/gnome-shell/common-assets/background-blur-darken.png" "${WHITESUR_TMP_DIR}/beggy.png"
elif [[ "${no_blur}" == "true" && "${no_darken}" == "true" ]]; then
cp -r "${THEME_SRC_DIR}/assets/gnome-shell/common-assets/background-default.png" "${WHITESUR_TMP_DIR}/beggy.png"
else
cp -r "${THEME_SRC_DIR}/assets/gnome-shell/common-assets/background-darken.png" "${WHITESUR_TMP_DIR}/beggy.png"
fi
;;
*)
if [[ "${no_blur}" == "false" || "${darken}" == "true" ]]; then
install_beggy_deps
convert "${background}" ${CONVERT_OPT} "${WHITESUR_TMP_DIR}/beggy.png"
else
cp -r "${background}" "${WHITESUR_TMP_DIR}/beggy.png"
fi
;;
esac
}
install_xfwmy() {
local color="$(destify ${1})"
local TARGET_DIR="${dest}/${name}${color}${colorscheme}"
local HDPI_TARGET_DIR="${dest}/${name}${color}${colorscheme}-hdpi"
local XHDPI_TARGET_DIR="${dest}/${name}${color}${colorscheme}-xhdpi"
mkdir -p "${TARGET_DIR}/xfwm4"
cp -r "${THEME_SRC_DIR}/assets/xfwm4/assets${color}${colorscheme}/"*".png" "${TARGET_DIR}/xfwm4"
cp -r "${THEME_SRC_DIR}/main/xfwm4/themerc${color}" "${TARGET_DIR}/xfwm4/themerc"
mkdir -p "${HDPI_TARGET_DIR}/xfwm4"
cp -r "${THEME_SRC_DIR}/assets/xfwm4/assets${color}${colorscheme}-hdpi/"*".png" "${HDPI_TARGET_DIR}/xfwm4"
cp -r "${THEME_SRC_DIR}/main/xfwm4/themerc${color}" "${HDPI_TARGET_DIR}/xfwm4/themerc"
mkdir -p "${XHDPI_TARGET_DIR}/xfwm4"
cp -r "${THEME_SRC_DIR}/assets/xfwm4/assets${color}${colorscheme}-xhdpi/"*".png" "${XHDPI_TARGET_DIR}/xfwm4"
cp -r "${THEME_SRC_DIR}/main/xfwm4/themerc${color}" "${XHDPI_TARGET_DIR}/xfwm4/themerc"
}
install_shelly() {
local color="$(destify ${1})"
local opacity="$(destify ${2})"
local alt="$(destify ${3})"
local theme="$(destify ${4})"
local icon="$(destify ${5})"
local TARGET_DIR=
if [[ -z "${6}" ]]; then
TARGET_DIR="${dest}/${name}${color}${opacity}${alt}${theme}${colorscheme}/gnome-shell"
else
TARGET_DIR="${6}"
fi
if [[ "${GNOME_VERSION}" == 'none' ]]; then
local GNOME_VERSION='42-0'
fi
mkdir -p "${TARGET_DIR}"
mkdir -p "${TARGET_DIR}/assets"
cp -r "${THEME_SRC_DIR}/assets/gnome-shell/icons" "${TARGET_DIR}"
cp -r "${THEME_SRC_DIR}/main/gnome-shell/pad-osd.css" "${TARGET_DIR}"
sassc ${SASSC_OPT} "${THEME_SRC_DIR}/main/gnome-shell/shell-${GNOME_VERSION}/gnome-shell${color}.scss" "${TARGET_DIR}/gnome-shell.css"
cp -r "${THEME_SRC_DIR}/assets/gnome-shell/common-assets/"*".svg" "${TARGET_DIR}/assets"
cp -r "${THEME_SRC_DIR}/assets/gnome-shell/assets${color}/"*".svg" "${TARGET_DIR}/assets"
cp -r "${THEME_SRC_DIR}/assets/gnome-shell/theme${theme}${colorscheme}/"*".svg" "${TARGET_DIR}/assets"
cp -r "${THEME_SRC_DIR}/assets/gnome-shell/activities/activities${icon}.svg" "${TARGET_DIR}/assets/activities.svg"
cp -r "${THEME_SRC_DIR}/assets/gnome-shell/activities/activities${icon}.svg" "${TARGET_DIR}/assets/activities-white.svg"
cp -r "${WHITESUR_TMP_DIR}/beggy.png" "${TARGET_DIR}/assets/background.png"
(
cd "${TARGET_DIR}"
mv -f "assets/no-events.svg" "no-events.svg"
mv -f "assets/process-working.svg" "process-working.svg"
mv -f "assets/no-notifications.svg" "no-notifications.svg"
)
if [[ "${black_font:-}" == 'true' || "${opacity}" == '-solid' ]] && [[ "${color}" == '-Light' ]]; then
cp -r "${THEME_SRC_DIR}/assets/gnome-shell/activities-black/activities${icon}.svg" "${TARGET_DIR}/assets/activities.svg"
fi
}
install_theemy() {
local color="$(destify ${1})"
local opacity="$(destify ${2})"
local alt="$(destify ${3})"
local theme="$(destify ${4})"
if [[ "${color}" == '-Light' ]]; then
local iconcolor=''
elif [[ "${color}" == '-Dark' ]]; then
local iconcolor='-Dark'
fi
local TARGET_DIR="${dest}/${name}${color}${opacity}${alt}${theme}${colorscheme}"
local TMP_DIR_T="${WHITESUR_TMP_DIR}/gtk-3.0${color}${opacity}${alt}${theme}${colorscheme}"
local TMP_DIR_F="${WHITESUR_TMP_DIR}/gtk-4.0${color}${opacity}${alt}${theme}${colorscheme}"
mkdir -p "${TARGET_DIR}"
local desktop_entry="[Desktop Entry]\n"
desktop_entry+="Type=X-GNOME-Metatheme\n"
desktop_entry+="Name=${name}${color}${opacity}${alt}${theme}${colorscheme}\n"
desktop_entry+="Comment=A MacOS BigSur like Gtk+ theme based on Elegant Design\n"
desktop_entry+="Encoding=UTF-8\n\n"
desktop_entry+="[X-GNOME-Metatheme]\n"
desktop_entry+="GtkTheme=${name}${color}${opacity}${alt}${theme}${colorscheme}\n"
desktop_entry+="MetacityTheme=${name}${color}${opacity}${alt}${theme}${colorscheme}\n"
desktop_entry+="IconTheme=${name}${iconcolor}\n"
desktop_entry+="CursorTheme=WhiteSur-cursors\n"
desktop_entry+="ButtonLayout=close,minimize,maximize:menu\n"
echo -e "${desktop_entry}" > "${TARGET_DIR}/index.theme"
#--------------------GTK-3.0--------------------#
mkdir -p "${TMP_DIR_T}"
cp -r "${THEME_SRC_DIR}/assets/gtk/common-assets/assets" "${TMP_DIR_T}"
cp -r "${THEME_SRC_DIR}/assets/gtk/common-assets/sidebar-assets/"*".png" "${TMP_DIR_T}/assets"
cp -r "${THEME_SRC_DIR}/assets/gtk/scalable" "${TMP_DIR_T}/assets"
cp -r "${THEME_SRC_DIR}/assets/gtk/windows-assets/titlebutton${alt}${colorscheme}" "${TMP_DIR_T}/windows-assets"
sassc ${SASSC_OPT} "${THEME_SRC_DIR}/main/gtk-3.0/gtk${color}.scss" "${TMP_DIR_T}/gtk.css"
sassc ${SASSC_OPT} "${THEME_SRC_DIR}/main/gtk-3.0/gtk-Dark.scss" "${TMP_DIR_T}/gtk-dark.css"
mkdir -p "${TARGET_DIR}/gtk-3.0"
cp -r "${THEME_SRC_DIR}/assets/gtk/thumbnails/thumbnail${color}${theme}${colorscheme}.png" "${TARGET_DIR}/gtk-3.0/thumbnail.png"
echo '@import url("resource:///org/gnome/theme/gtk.css");' > "${TARGET_DIR}/gtk-3.0/gtk.css"
echo '@import url("resource:///org/gnome/theme/gtk-dark.css");' > "${TARGET_DIR}/gtk-3.0/gtk-dark.css"
glib-compile-resources --sourcedir="${TMP_DIR_T}" --target="${TARGET_DIR}/gtk-3.0/gtk.gresource" "${THEME_SRC_DIR}/main/gtk-3.0/gtk.gresource.xml"
#--------------------GTK-4.0--------------------#
mkdir -p "${TMP_DIR_F}"
cp -r "${TMP_DIR_T}/assets" "${TMP_DIR_F}"
cp -r "${TMP_DIR_T}/windows-assets" "${TMP_DIR_F}"
sassc ${SASSC_OPT} "${THEME_SRC_DIR}/main/gtk-4.0/gtk${color}.scss" "${TMP_DIR_F}/gtk.css"
sassc ${SASSC_OPT} "${THEME_SRC_DIR}/main/gtk-4.0/gtk-Dark.scss" "${TMP_DIR_F}/gtk-dark.css"
mkdir -p "${TARGET_DIR}/gtk-4.0"
cp -r "${THEME_SRC_DIR}/assets/gtk/thumbnails/thumbnail${color}${theme}${colorscheme}.png" "${TARGET_DIR}/gtk-4.0/thumbnail.png"
echo '@import url("resource:///org/gnome/theme/gtk.css");' > "${TARGET_DIR}/gtk-4.0/gtk.css"
echo '@import url("resource:///org/gnome/theme/gtk-dark.css");' > "${TARGET_DIR}/gtk-4.0/gtk-dark.css"
glib-compile-resources --sourcedir="${TMP_DIR_F}" --target="${TARGET_DIR}/gtk-4.0/gtk.gresource" "${THEME_SRC_DIR}/main/gtk-4.0/gtk.gresource.xml"
#----------------Cinnamon-----------------#
mkdir -p "${TARGET_DIR}/cinnamon"
sassc ${SASSC_OPT} "${THEME_SRC_DIR}/main/cinnamon/cinnamon${color}.scss" "${TARGET_DIR}/cinnamon/cinnamon.css"
cp -r "${THEME_SRC_DIR}/assets/cinnamon/common-assets" "${TARGET_DIR}/cinnamon/assets"
cp -r "${THEME_SRC_DIR}/assets/cinnamon/assets${color}${colorscheme}/"*".svg" "${TARGET_DIR}/cinnamon/assets"
cp -r "${THEME_SRC_DIR}/assets/cinnamon/theme${theme}${colorscheme}/"*".svg" "${TARGET_DIR}/cinnamon/assets"
cp -r "${THEME_SRC_DIR}/assets/cinnamon/thumbnails/thumbnail${color}${theme}${colorscheme}.png" "${TARGET_DIR}/cinnamon/thumbnail.png"
#----------------Misc------------------#
mkdir -p "${TARGET_DIR}/gtk-2.0"
cp -r "${THEME_SRC_DIR}/main/gtk-2.0/gtkrc${color}${theme}${colorscheme}" "${TARGET_DIR}/gtk-2.0/gtkrc"
cp -r "${THEME_SRC_DIR}/main/gtk-2.0/menubar-toolbar${color}.rc" "${TARGET_DIR}/gtk-2.0/menubar-toolbar.rc"
cp -r "${THEME_SRC_DIR}/main/gtk-2.0/common/"*".rc" "${TARGET_DIR}/gtk-2.0"
cp -r "${THEME_SRC_DIR}/assets/gtk-2.0/assets-common${color}${colorscheme}" "${TARGET_DIR}/gtk-2.0/assets"
cp -r "${THEME_SRC_DIR}/assets/gtk-2.0/assets${color}${theme}${colorscheme}/"*".png" "${TARGET_DIR}/gtk-2.0/assets"
mkdir -p "${TARGET_DIR}/metacity-1"
cp -r "${THEME_SRC_DIR}/main/metacity-1/metacity-theme${color}.xml" "${TARGET_DIR}/metacity-1/metacity-theme-1.xml"
cp -r "${THEME_SRC_DIR}/main/metacity-1/metacity-theme-3.xml" "${TARGET_DIR}/metacity-1"
cp -r "${THEME_SRC_DIR}/assets/metacity-1/titlebuttons${color}${colorscheme}" "${TARGET_DIR}/metacity-1/titlebuttons"
cp -r "${THEME_SRC_DIR}/assets/metacity-1/thumbnail${color}${colorscheme}.png" "${TARGET_DIR}/metacity-1/thumbnail.png"
( cd "${TARGET_DIR}/metacity-1" && ln -s "metacity-theme-1.xml" "metacity-theme-2.xml" )
mkdir -p "${TARGET_DIR}/plank"
cp -r "${THEME_SRC_DIR}/other/plank/theme${color}/"*".theme" "${TARGET_DIR}/plank"
}
remove_packy() {
rm -rf "${dest}/${name}$(destify ${1})$(destify ${2})$(destify ${3})$(destify ${4})${colorscheme}"
rm -rf "${HOME}/.config/gtk-4.0/"{gtk.css,gtk-dark.css,assets,windows-assets}
rm -rf "${dest}/${name}$(destify ${1})${colorscheme}-hdpi"
rm -rf "${dest}/${name}$(destify ${1})${colorscheme}-xhdpi"
# Backward compatibility
# rm -rf "${dest}/${name}$(destify ${1})-mdpi"
}
remove_old_packy() {
rm -rf "${dest}/${name}${1}$(destify ${2})$(destify ${3})$(destify ${4})${5}"
rm -rf "${dest}/${name}${1}${5}-hdpi"
rm -rf "${dest}/${name}${1}${5}-xhdpi"
}
config_gtk4() {
local color="$(destify ${1})"
local alt="$(destify ${2})"
local TARGET_DIR="${HOME}/.config/gtk-4.0"
# Install gtk4.0 into config for libadwaita
mkdir -p "${TARGET_DIR}"
rm -rf "${TARGET_DIR}/"{gtk.css,gtk-dark.css,assets,windows-assets}
sassc ${SASSC_OPT} "${THEME_SRC_DIR}/main/gtk-4.0/gtk${color}.scss" "${TARGET_DIR}/gtk.css"
sassc ${SASSC_OPT} "${THEME_SRC_DIR}/main/gtk-4.0/gtk-Dark.scss" "${TARGET_DIR}/gtk-dark.css"
cp -r "${THEME_SRC_DIR}/assets/gtk/common-assets/assets" "${TARGET_DIR}"
cp -r "${THEME_SRC_DIR}/assets/gtk/common-assets/sidebar-assets/"*".png" "${TARGET_DIR}/assets"
cp -r "${THEME_SRC_DIR}/assets/gtk/scalable" "${TARGET_DIR}/assets"
cp -r "${THEME_SRC_DIR}/assets/gtk/windows-assets/titlebutton${alt}${colorscheme}" "${TARGET_DIR}/windows-assets"
}
###############################################################################
# THEMES #
###############################################################################
install_themes() {
# "install_theemy" and "install_shelly" require "gtk_base", so multithreading
# isn't possible
install_theme_deps; start_animation; install_beggy
for opacity in "${opacities[@]}"; do
for alt in "${alts[@]}"; do
for theme in "${themes[@]}"; do
for color in "${colors[@]}"; do
gtk_base "${color}" "${opacity}" "${theme}" "${compact}"
install_theemy "${color}" "${opacity}" "${alt}" "${theme}"
install_shelly "${color}" "${opacity}" "${alt}" "${theme}" "${icon}"
install_xfwmy "${color}"
done
done
done
done
stop_animation
}
install_libadwaita() {
gtk_base "${colors[0]}" "${opacities[0]}" "${themes[0]}" "${compact[0]}"
config_gtk4 "${colors[0]}" "${alt[0]}"
}
remove_themes() {
process_ids=()
for color in "${COLOR_VARIANTS[@]}"; do
for opacity in "${OPACITY_VARIANTS[@]}"; do
for alt in "${ALT_VARIANTS[@]}"; do
for theme in "${THEME_VARIANTS[@]}"; do
remove_packy "${color}" "${opacity}" "${alt}" "${theme}" &
process_ids+=("${!}")
done
done
done
done
for color in '-light' '-dark'; do
for opacity in "${OPACITY_VARIANTS[@]}"; do
for alt in "${ALT_VARIANTS[@]}"; do
for theme in "${THEME_VARIANTS[@]}"; do
for scheme in '' '-nord'; do
remove_old_packy "${color}" "${opacity}" "${alt}" "${theme}" "${scheme}"
done
done
done
done
done
wait ${process_ids[*]} &> /dev/null
}
install_gdm_theme() {
local TARGET=
# Let's go!
install_theme_deps
rm -rf "${WHITESUR_GS_DIR}"; install_beggy
gtk_base "${colors[0]}" "${opacities[0]}" "${themes[0]}"
if check_theme_file "${COMMON_CSS_FILE}"; then # CSS-based theme
install_shelly "${colors[0]}" "${opacities[0]}" "${alts[0]}" "${themes[0]}" "${icon}" "${WHITESUR_GS_DIR}"
sed $SED_OPT "s|assets|${WHITESUR_GS_DIR}/assets|" "${WHITESUR_GS_DIR}/gnome-shell.css"
if check_theme_file "${UBUNTU_CSS_FILE}"; then
TARGET="${UBUNTU_CSS_FILE}"
elif check_theme_file "${ZORIN_CSS_FILE}"; then
TARGET="${ZORIN_CSS_FILE}"
fi
backup_file "${COMMON_CSS_FILE}"; backup_file "${TARGET}"
ln -sf "${WHITESUR_GS_DIR}/gnome-shell.css" "${COMMON_CSS_FILE}"
ln -sf "${WHITESUR_GS_DIR}/gnome-shell.css" "${TARGET}"
# Fix previously installed WhiteSur
restore_file "${ETC_CSS_FILE}"
else # GR-based theme
install_shelly "${colors[0]}" "${opacities[0]}" "${alts[0]}" "${themes[0]}" "${icon}" "${WHITESUR_TMP_DIR}/shelly"
sed $SED_OPT "s|assets|resource:///org/gnome/shell/theme/assets|" "${WHITESUR_TMP_DIR}/shelly/gnome-shell.css"
if check_theme_file "$POP_OS_GR_FILE"; then
TARGET="${POP_OS_GR_FILE}"
elif check_theme_file "$YARU_GR_FILE"; then
TARGET="${YARU_GR_FILE}"
elif check_theme_file "$ZORIN_GR_FILE"; then
TARGET="${ZORIN_GR_FILE}"
elif check_theme_file "$MISC_GR_FILE"; then
TARGET="${MISC_GR_FILE}"
fi
backup_file "${TARGET}"
glib-compile-resources --sourcedir="${WHITESUR_TMP_DIR}/shelly" --target="${TARGET}" "${GS_GR_XML_FILE}"
# Fix previously installed WhiteSur
restore_file "${ETC_GR_FILE}"
fi
}
revert_gdm_theme() {
rm -rf "${WHITESUR_GS_DIR}"
restore_file "${COMMON_CSS_FILE}"; restore_file "${UBUNTU_CSS_FILE}"
restore_file "${ZORIN_CSS_FILE}"; restore_file "${ETC_CSS_FILE}"
restore_file "${POP_OS_GR_FILE}"; restore_file "${YARU_GR_FILE}"
restore_file "${MISC_GR_FILE}"; restore_file "${ETC_GR_FILE}"
restore_file "${ZORIN_GR_FILE}"
}
###############################################################################
# FIREFOX #
###############################################################################
install_firefox_theme() {
if has_snap_app firefox; then
local TARGET_DIR="${FIREFOX_SNAP_THEME_DIR}"
elif has_flatpak_app org.mozilla.firefox; then
local TARGET_DIR="${FIREFOX_FLATPAK_THEME_DIR}"
else
local TARGET_DIR="${FIREFOX_THEME_DIR}"
fi
remove_firefox_theme
udo mkdir -p "${TARGET_DIR}"
udo cp -rf "${FIREFOX_SRC_DIR}"/customChrome.css "${TARGET_DIR}"
if [[ "${monterey}" == 'true' ]]; then
udo cp -rf "${FIREFOX_SRC_DIR}"/Monterey "${TARGET_DIR}"
udo cp -rf "${FIREFOX_SRC_DIR}"/WhiteSur/{icons,titlebuttons} "${TARGET_DIR}"/Monterey
if [[ "${alttheme}" == 'true' ]]; then
udo cp -rf "${FIREFOX_SRC_DIR}"/userChrome-Monterey-alt.css "${TARGET_DIR}"/userChrome.css
else
udo cp -rf "${FIREFOX_SRC_DIR}"/userChrome-Monterey.css "${TARGET_DIR}"/userChrome.css
fi
else
udo cp -rf "${FIREFOX_SRC_DIR}"/WhiteSur "${TARGET_DIR}"
udo cp -rf "${FIREFOX_SRC_DIR}"/userChrome-WhiteSur.css "${TARGET_DIR}"/userChrome.css
fi
config_firefox
}
config_firefox() {
if has_snap_app firefox; then
local TARGET_DIR="${FIREFOX_SNAP_THEME_DIR}"
local FIREFOX_DIR="${FIREFOX_SNAP_DIR_HOME}"
elif has_flatpak_app org.mozilla.firefox; then
local TARGET_DIR="${FIREFOX_FLATPAK_THEME_DIR}"
local FIREFOX_DIR="${FIREFOX_FLATPAK_DIR_HOME}"
else
local TARGET_DIR="${FIREFOX_THEME_DIR}"
local FIREFOX_DIR="${FIREFOX_DIR_HOME}"
fi
killall "firefox" "firefox-bin" &> /dev/null || true
for d in "${FIREFOX_DIR}/"*"default"*; do
if [[ -f "${d}/prefs.js" ]]; then
rm -rf "${d}/chrome"
udo ln -sf "${TARGET_DIR}" "${d}/chrome"
udoify_file "${d}/prefs.js"
echo "user_pref(\"toolkit.legacyUserProfileCustomizations.stylesheets\", true);" >> "${d}/prefs.js"
echo "user_pref(\"browser.tabs.drawInTitlebar\", true);" >> "${d}/prefs.js"
echo "user_pref(\"browser.uidensity\", 0);" >> "${d}/prefs.js"
echo "user_pref(\"layers.acceleration.force-enabled\", true);" >> "${d}/prefs.js"
echo "user_pref(\"mozilla.widget.use-argb-visuals\", true);" >> "${d}/prefs.js"
fi
done
}
edit_firefox_theme_prefs() {
if has_snap_app firefox; then
local TARGET_DIR="${FIREFOX_SNAP_THEME_DIR}"
elif has_flatpak_app org.mozilla.firefox; then
local TARGET_DIR="${FIREFOX_FLATPAK_THEME_DIR}"
else
local TARGET_DIR="${FIREFOX_THEME_DIR}"
fi
[[ ! -d "${TARGET_DIR}" ]] && install_firefox_theme ; config_firefox
udo ${EDITOR:-nano} "${TARGET_DIR}/userChrome.css"
udo ${EDITOR:-nano} "${TARGET_DIR}/customChrome.css"
}
remove_firefox_theme() {
rm -rf "${FIREFOX_DIR_HOME}/"*"default"*"/chrome"
rm -rf "${FIREFOX_THEME_DIR}"
rm -rf "${FIREFOX_FLATPAK_DIR_HOME}/"*"default"*"/chrome"
rm -rf "${FIREFOX_FLATPAK_THEME_DIR}"
rm -rf "${FIREFOX_SNAP_DIR_HOME}/"*"default"*"/chrome"
rm -rf "${FIREFOX_SNAP_THEME_DIR}"
}
###############################################################################
# DASH TO DOCK #
###############################################################################
install_dash_to_dock() {
if [[ -d "${DASH_TO_DOCK_DIR_HOME}" ]]; then
backup_file "${DASH_TO_DOCK_DIR_HOME}" "udo"
rm -rf "${DASH_TO_DOCK_DIR_HOME}"
fi
udo cp -rf "${DASH_TO_DOCK_SRC_DIR}/dash-to-dock@micxgx.gmail.com" "${GNOME_SHELL_EXTENSION_DIR}"
udo dbus-launch dconf write /org/gnome/shell/extensions/dash-to-dock/apply-custom-theme true
}
install_dash_to_dock_theme() {
gtk_base "${colors[0]}" "${opacities[0]}" "${themes[0]}"
if [[ -d "${DASH_TO_DOCK_DIR_HOME}" ]]; then
backup_file "${DASH_TO_DOCK_DIR_HOME}/stylesheet.css" "udo"
udoify_file "${DASH_TO_DOCK_DIR_HOME}/stylesheet.css"
if [[ "${GNOME_VERSION}" == 'new' ]]; then
udo sassc ${SASSC_OPT} "${DASH_TO_DOCK_SRC_DIR}/stylesheet-4.scss" "${DASH_TO_DOCK_DIR_HOME}/stylesheet.css"
else
udo sassc ${SASSC_OPT} "${DASH_TO_DOCK_SRC_DIR}/stylesheet-3.scss" "${DASH_TO_DOCK_DIR_HOME}/stylesheet.css"
fi
elif [[ -d "${DASH_TO_DOCK_DIR_ROOT}" ]]; then
backup_file "${DASH_TO_DOCK_DIR_ROOT}/stylesheet.css" "sudo"
if [[ "${GNOME_VERSION}" == 'new' ]]; then
sudo sassc ${SASSC_OPT} "${DASH_TO_DOCK_SRC_DIR}/stylesheet-4.scss" "${DASH_TO_DOCK_DIR_ROOT}/stylesheet.css"
else
sudo sassc ${SASSC_OPT} "${DASH_TO_DOCK_SRC_DIR}/stylesheet-3.scss" "${DASH_TO_DOCK_DIR_ROOT}/stylesheet.css"
fi
fi
udo dbus-launch dconf write /org/gnome/shell/extensions/dash-to-dock/apply-custom-theme true
}
revert_dash_to_dock_theme() {
if [[ -d "${DASH_TO_DOCK_DIR_HOME}" ]]; then
restore_file "${DASH_TO_DOCK_DIR_HOME}/stylesheet.css" "udo"
elif [[ -d "${DASH_TO_DOCK_DIR_ROOT}" ]]; then
restore_file "${DASH_TO_DOCK_DIR_ROOT}/stylesheet.css" "sudo"
fi
udo dbus-launch dconf write /org/gnome/shell/extensions/dash-to-dock/apply-custom-theme false
}
###############################################################################
# FLATPAK & SNAP #
###############################################################################
connect_flatpak() {
install_flatpak_deps
for opacity in "${opacities[@]}"; do
for alt in "${alts[@]}"; do
for theme in "${themes[@]}"; do
for color in "${colors[@]}"; do
pakitheme_gtk3 "${color}" "${opacity}" "${alt}" "${theme}"
done
done
done
done
}
disconnect_flatpak() {
for opacity in "${opacities[@]}"; do
for alt in "${alts[@]}"; do
for theme in "${themes[@]}"; do
for color in "${colors[@]}"; do
flatpak_remove "${color}" "${opacity}" "${alt}" "${theme}"
done
done
done
done
}
connect_snap() {
sudo snap install whitesur-gtk-theme
for i in $(snap connections | grep gtk-common-themes | awk '{print $2}' | cut -f1 -d: | sort -u); do
sudo snap connect "${i}:gtk-3-themes" "whitesur-gtk-theme:gtk-3-themes"
sudo snap connect "${i}:icon-themes" "whitesur-gtk-theme:icon-themes"
done
}
disconnect_snap() {
for i in $(snap connections | grep gtk-common-themes | awk '{print $2}' | cut -f1 -d: | sort -u); do
sudo snap disconnect "${i}:gtk-3-themes" "whitesur-gtk-theme:gtk-3-themes"
sudo snap disconnect "${i}:icon-themes" "whitesur-gtk-theme:icon-themes"
done
}
#########################################################################
# GTK BASE #
#########################################################################
gtk_base() {
cp -rf "${THEME_SRC_DIR}/sass/_gtk-base"{".scss","-temp.scss"}
# Theme base options
if [[ "${compact}" == 'false' ]]; then
sed $SED_OPT "/\$laptop/s/true/false/" "${THEME_SRC_DIR}/sass/_gtk-base-temp.scss"
fi
if [[ "${opacity}" == 'solid' ]]; then
sed $SED_OPT "/\$trans/s/true/false/" "${THEME_SRC_DIR}/sass/_gtk-base-temp.scss"
fi
if [[ "${theme}" != '' ]]; then
sed $SED_OPT "/\$theme/s/default/${theme}/" "${THEME_SRC_DIR}/sass/_gtk-base-temp.scss"
fi
}
###############################################################################
# CUSTOMIZATIONS #
###############################################################################
customize_theme() {
cp -rf "${THEME_SRC_DIR}/sass/_theme-options"{".scss","-temp.scss"}
# Darker dark colors
if [[ "${colorscheme}" == '-nord' ]]; then
prompt -s "Changing color scheme style to nord style ..."
sed $SED_OPT "/\$colorscheme/s/default/nord/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
fi
# Darker dark colors
if [[ "${darker}" == 'true' ]]; then
prompt -s "Changing dark color style to darker one ..."
sed $SED_OPT "/\$darker/s/false/true/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
fi
# Change Nautilus sidarbar size
if [[ "${sidebar_size}" != 'default' ]]; then
prompt -s "Changing Nautilus sidebar size ... \n"
sed $SED_OPT "/\$sidebar_size/s/200px/${sidebar_size}px/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
fi
# Change Nautilus style
if [[ "${nautilus_style}" != 'stable' ]]; then
prompt -s "Changing Nautilus style ..."
sed $SED_OPT "/\$nautilus_style/s/stable/${nautilus_style}/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
fi
# Change Nautilus titlebutton placement style
if [[ "${right_placement}" == 'true' ]]; then
prompt -s "Changing Nautilus titlebutton placement style ..."
sed $SED_OPT "/\$placement/s/left/right/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
fi
# Change maximized window radius
if [[ "${max_round}" == 'true' ]]; then
prompt -s "Changing maximized window style ..."
sed $SED_OPT "/\$max_window_style/s/square/round/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
fi
# Change gnome-shell panel transparency
if [[ "${panel_opacity}" != 'default' ]]; then
prompt -s "Changing panel transparency ..."
sed $SED_OPT "/\$panel_opacity/s/0.15/0.${panel_opacity}/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
fi
# Change gnome-shell panel height size
if [[ "${panel_size}" != 'default' ]]; then
prompt -s "Changing panel height size to '${panel_size}'..."
sed $SED_OPT "/\$panel_size/s/default/${panel_size}/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
fi
# Change gnome-shell show apps button style
if [[ "${showapps_normal}" == 'true' ]]; then
prompt -s "Changing gnome-shell show apps button style ..."
sed $SED_OPT "/\$showapps_button/s/bigsur/normal/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
fi
# Change panel font color
if [[ "${monterey}" == 'true' ]]; then
black_font="true"
prompt -s "Changing to Monterey style ..."
sed $SED_OPT "/\$monterey/s/false/true/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
sed $SED_OPT "/\$panel_opacity/s/0.15/0.5/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
fi
# Change panel font color
if [[ "${black_font}" == 'true' ]]; then
prompt -s "Changing panel font color ..."
sed $SED_OPT "/\$panel_font/s/white/black/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
fi
if [[ "${compact}" == 'false' ]]; then
prompt -s "Changing Definition mode to HD (Bigger font, Bigger size) ..."
#FIXME: @vince is it not implemented yet? (Only Gnome-shell and Gtk theme finished!)
fi
if [[ "${scale}" == 'x2' ]]; then
prompt -s "Changing GDM scaling to 200% ..."
sed $SED_OPT "/\$scale/s/default/x2/" "${THEME_SRC_DIR}/sass/_theme-options-temp.scss"
fi
}
#-----------------------------------DIALOGS------------------------------------#
# The default values here should get manually set and updated. Some of default
# values are taken from _variables.scss
show_panel_opacity_dialog() {
install_dialog_deps
dialogify panel_opacity "${THEME_NAME}" "Choose your panel opacity (Default is 15)" ${PANEL_OPACITY_VARIANTS[*]}
}
show_sidebar_size_dialog() {
install_dialog_deps
dialogify sidebar_size "${THEME_NAME}" "Choose your Nautilus minimum sidebar size (default is 200px)" ${SIDEBAR_SIZE_VARIANTS[*]}
}
show_nautilus_style_dialog() {
install_dialog_deps
dialogify nautilus_style "${THEME_NAME}" "Choose your Nautilus style (default is BigSur-like style)" ${NAUTILUS_STYLE_VARIANTS[*]}
}
show_needed_dialogs() {
if [[ "${need_dialog["-p"]}" == "true" ]]; then show_panel_opacity_dialog; fi
if [[ "${need_dialog["-s"]}" == "true" ]]; then show_sidebar_size_dialog; fi
if [[ "${need_dialog["-N"]}" == "true" ]]; then show_nautilus_style_dialog; fi
}
|
def find_first_duplicate(arr):
seen = dict()
for i, n in enumerate(arr):
if n in seen:
return i
else:
seen[n] = True
return -1
arr = [5, 7, 9, 3, 1, 3]
print(find_first_duplicate(arr))
# Output: 4 |
package android.rest.client.entity;
public class Employee {
private int Id;
private String Full_Name;
private String Login_Date;
private String Logout_Date;
public static final String EMPLOYEE_ID = "Id";
public static final String EMPLOYEE_NAME = "Full_Name";
public int getId() {
return Id;
}
public void setId(int id) {
this.Id = id;
}
public String getName() {
return Full_Name;
}
public void setName(String name) {
this.Full_Name = name;
}
public String getLogin_Date() {
return Login_Date;
}
public void setLogin_Date(String login_Date) {
Login_Date = login_Date;
}
public String getLogout_Date() {
return Logout_Date;
}
public void setLogout_Date(String logout_Date) {
Logout_Date = logout_Date;
}
}
|
import React, { Component, createRef, useEffect, useState } from "react";
import ReactDOM from 'react-dom';
import PropTypes from 'prop-types';
export default function Modal(props) {
const [show, setShow] = useState(false);
console.log(props);
const wrapperRef = createRef();
const closeClick = (event) => {
const elementTarget = event.target.dataset.ref;
setShow({
[elementTarget]: { show: false }
});
}
useEffect(() => {
document.addEventListener('click', handleClickOutside, true);
}, []);
const handleClickOutside = event => {
// event.preventDefault();
if (!event.target.dataset.target && !event.target.dataset.toggle) return false;
const elementTarget = event.target.dataset.target.substring(1);
const domNode = document.getElementById(elementTarget);
if (!domNode) {
setShow({ [elementTarget]: { show: true } });
return false;
} else {
setShow({ [elementTarget]: { show: true } });
}
}
let showClass, elementTarget = show[props.id];
if (show[props.id] != undefined) {
elementTarget = show[props.id];
showClass = elementTarget.show ? 'modal flipX open' : 'modal flipX';
} else {
showClass = 'modal flipX'
}
const renderClass = { id: props.id, className: showClass };
return (
<div {...renderClass} ref={wrapperRef}>
<div className="modal-backdrop"></div>
<div className="modal-content">
<div className="modal-header">
{props.id}
<button className="close" onClick={closeClick} data-ref={props.id}>×</button>
</div>
<hr></hr>
<div className="modal-body">
{props.body}
</div>
<div className="modal-footer"></div>
</div>
</div>
);
}
Modal.propTypes = {
id: PropTypes.element.isRequired,
body: PropTypes.string.isRequired
};
|
/*
* Copyright [2020-2030] [https://www.stylefeng.cn]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Guns采用APACHE LICENSE 2.0开源协议,您在使用过程中,需要注意以下几点:
*
* 1.请不要删除和修改根目录下的LICENSE文件。
* 2.请不要删除和修改Guns源码头部的版权声明。
* 3.请保留源码和相关描述文件的项目出处,作者声明等。
* 4.分发源码时候,请注明软件出处 https://gitee.com/stylefeng/guns
* 5.在修改包名,模块名称,项目代码等时,请注明软件出处 https://gitee.com/stylefeng/guns
* 6.若您的项目无法满足以上几点,可申请商业授权
*/
package cn.stylefeng.roses.kernel.dict.modular.service.impl;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.util.ObjectUtil;
import cn.stylefeng.roses.kernel.auth.api.context.LoginContext;
import cn.stylefeng.roses.kernel.db.api.factory.PageFactory;
import cn.stylefeng.roses.kernel.db.api.factory.PageResultFactory;
import cn.stylefeng.roses.kernel.db.api.pojo.page.PageResult;
import cn.stylefeng.roses.kernel.dict.api.enums.DictTypeClassEnum;
import cn.stylefeng.roses.kernel.dict.api.exception.DictException;
import cn.stylefeng.roses.kernel.dict.api.exception.enums.DictExceptionEnum;
import cn.stylefeng.roses.kernel.dict.modular.entity.SysDict;
import cn.stylefeng.roses.kernel.dict.modular.entity.SysDictType;
import cn.stylefeng.roses.kernel.dict.modular.mapper.DictTypeMapper;
import cn.stylefeng.roses.kernel.dict.modular.pojo.request.DictTypeRequest;
import cn.stylefeng.roses.kernel.dict.modular.service.DictService;
import cn.stylefeng.roses.kernel.dict.modular.service.DictTypeService;
import cn.stylefeng.roses.kernel.pinyin.api.PinYinApi;
import cn.stylefeng.roses.kernel.rule.enums.StatusEnum;
import cn.stylefeng.roses.kernel.rule.enums.YesOrNotEnum;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.LambdaUpdateWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
import java.util.List;
/**
* 字典类型表 服务实现类
*
* @author fengshuonan
* @date 2020/12/26 22:36
*/
@Service
public class DictTypeServiceImpl extends ServiceImpl<DictTypeMapper, SysDictType> implements DictTypeService {
@Resource
private PinYinApi pinYinApi;
@Override
public void add(DictTypeRequest dictTypeRequest) {
// 如果是系统级字典,只允许管理员操作
validateSystemTypeClassOperate(dictTypeRequest);
SysDictType sysDictType = new SysDictType();
BeanUtil.copyProperties(dictTypeRequest, sysDictType);
sysDictType.setStatusFlag(StatusEnum.ENABLE.getCode());
sysDictType.setDictTypeNamePinyin(pinYinApi.parseEveryPinyinFirstLetter(sysDictType.getDictTypeName()));
this.save(sysDictType);
}
@Override
@Transactional(rollbackFor = Exception.class)
public void del(DictTypeRequest dictTypeRequest) {
// 如果是系统级字典,只允许管理员操作
validateSystemTypeClassOperate(dictTypeRequest);
SysDictType sysDictType = this.querySysDictType(dictTypeRequest);
sysDictType.setDelFlag(YesOrNotEnum.Y.getCode());
this.updateById(sysDictType);
}
@Override
public void edit(DictTypeRequest dictTypeRequest) {
// 如果是系统级字典,只允许管理员操作
validateSystemTypeClassOperate(dictTypeRequest);
// 更新数据
SysDictType sysDictType = this.querySysDictType(dictTypeRequest);
BeanUtil.copyProperties(dictTypeRequest, sysDictType);
sysDictType.setDictTypeCode(null);
// 设置首字母拼音
sysDictType.setDictTypeNamePinyin(pinYinApi.parseEveryPinyinFirstLetter(sysDictType.getDictTypeName()));
this.updateById(sysDictType);
}
@Override
@Transactional(rollbackFor = Exception.class)
public void editStatus(DictTypeRequest dictTypeRequest) {
// 如果是系统级字典,只允许管理员操作
validateSystemTypeClassOperate(dictTypeRequest);
// 更新数据
SysDictType oldSysDictType = this.querySysDictType(dictTypeRequest);
oldSysDictType.setStatusFlag(dictTypeRequest.getStatusFlag());
this.updateById(oldSysDictType);
}
@Override
public SysDictType detail(DictTypeRequest dictTypeRequest) {
return this.getOne(this.createWrapper(dictTypeRequest), false);
}
@Override
public List<SysDictType> findList(DictTypeRequest dictTypeRequest) {
return this.list(this.createWrapper(dictTypeRequest));
}
@Override
public PageResult<SysDictType> findPage(DictTypeRequest dictTypeRequest) {
Page<SysDictType> page = this.page(PageFactory.defaultPage(), this.createWrapper(dictTypeRequest));
return PageResultFactory.createPageResult(page);
}
/**
* 校验dictTypeClass是否是系统字典,如果是系统字典只能超级管理员操作
*
* @author fengshuonan
* @date 2020/12/25 15:57
*/
private void validateSystemTypeClassOperate(DictTypeRequest dictTypeRequest) {
if (DictTypeClassEnum.SYSTEM_TYPE.getCode().equals(dictTypeRequest.getDictTypeClass())) {
if (!LoginContext.me().getSuperAdminFlag()) {
throw new DictException(DictExceptionEnum.SYSTEM_DICT_NOT_ALLOW_OPERATION);
}
}
}
/**
* 根据主键id获取对象
*
* @author chenjinlong
* @date 2021/1/26 13:28
*/
private SysDictType querySysDictType(DictTypeRequest dictTypeRequest) {
SysDictType sysDictType = this.getById(dictTypeRequest.getDictTypeId());
if (ObjectUtil.isEmpty(sysDictType)) {
throw new DictException(DictExceptionEnum.DICT_TYPE_NOT_EXISTED, dictTypeRequest.getDictTypeId());
}
return sysDictType;
}
/**
* 实体构建queryWrapper
*
* @author fengshuonan
* @date 2021/1/24 22:03
*/
private LambdaQueryWrapper<SysDictType> createWrapper(DictTypeRequest dictTypeRequest) {
LambdaQueryWrapper<SysDictType> queryWrapper = new LambdaQueryWrapper<>();
Long dictTypeId = dictTypeRequest.getDictTypeId();
String dictTypeCode = dictTypeRequest.getDictTypeCode();
String dictTypeName = dictTypeRequest.getDictTypeName();
// SQL拼接
queryWrapper.eq(ObjectUtil.isNotNull(dictTypeId), SysDictType::getDictTypeId, dictTypeId);
queryWrapper.eq(ObjectUtil.isNotNull(dictTypeCode), SysDictType::getDictTypeCode, dictTypeCode);
queryWrapper.like(ObjectUtil.isNotNull(dictTypeName), SysDictType::getDictTypeName, dictTypeName);
// 查询未删除的
queryWrapper.eq(SysDictType::getDelFlag, YesOrNotEnum.N.getCode());
return queryWrapper;
}
}
|
// Sample books collection
const books = [
{
id: 1,
title: 'The Great Gatsby',
author: 'F. Scott Fitzgerald',
genre: 'Novel',
},
{
id: 2,
title: 'The Catcher in the Rye',
author: 'J.D. Salinger',
genre: 'Novel',
},
{
id: 3,
title: 'To Kill a Mockingbird',
author: 'Harper Lee',
genre: 'Novel',
},
];
const express = require("express");
const app = express();
app.get("/search", (req, res) => {
const searchTerm = req.query.term;
const results = books.filter(book => book.title.includes(searchTerm) || book.author.includes(searchTerm) || book.genre.includes(searchTerm));
res.json({
results,
});
});
app.listen(3000); |
declare module 'spi-device' {
export function openSync(a: number, b: number ,c: any): any
export class SpiDevice {
transferSync(data: any): void
}
}
|
<reponame>knkgun/curve25519-voi
// Copyright (c) 2019 <NAME>. All rights reserved.
// Copyright (c) 2020-2021 Oasis Labs Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
// TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package curve
import "github.com/oasisprotocol/curve25519-voi/curve/scalar"
func edwardsMultiscalarMulPippengerVartime(out *EdwardsPoint, scalars []*scalar.Scalar, points []*EdwardsPoint) *EdwardsPoint {
switch supportsVectorizedEdwards {
case true:
return edwardsMultiscalarMulPippengerVartimeVector(out, nil, nil, scalars, points)
default:
return edwardsMultiscalarMulPippengerVartimeGeneric(out, nil, nil, scalars, points)
}
}
func expandedEdwardsMultiscalarMulPippengerVartime(out *EdwardsPoint, staticScalars []*scalar.Scalar, staticPoints []*ExpandedEdwardsPoint, dynamicScalars []*scalar.Scalar, dynamicPoints []*EdwardsPoint) *EdwardsPoint {
// There is no actual precomputed Pippenger's implementation,
// but pretending that there is one saves memory and time
// when we have to fall back to the non-precomputed version.
points := make([]*EdwardsPoint, 0, len(staticPoints))
for _, point := range staticPoints {
points = append(points, &point.point)
}
switch supportsVectorizedEdwards {
case true:
return edwardsMultiscalarMulPippengerVartimeVector(out, staticScalars, points, dynamicScalars, dynamicPoints)
default:
return edwardsMultiscalarMulPippengerVartimeGeneric(out, staticScalars, points, dynamicScalars, dynamicPoints)
}
}
func edwardsMultiscalarMulPippengerVartimeGeneric(out *EdwardsPoint, staticScalars []*scalar.Scalar, staticPoints []*EdwardsPoint, dynamicScalars []*scalar.Scalar, dynamicPoints []*EdwardsPoint) *EdwardsPoint {
size := len(staticScalars) + len(dynamicScalars)
// Digit width in bits. As digit width grows,
// number of point additions goes down, but amount of
// buckets and bucket additions grows exponentially.
var w uint
switch {
case size < 500:
w = 6
case size < 800:
w = 7
default:
w = 8
}
maxDigit := 1 << w
digitsCount := scalar.ToRadix2wSizeHint(w)
bucketsCount := maxDigit / 2 // digits are signed+centered hence 2^w/2, excluding 0-th bucket.
// Collect optimized scalars and points in buffers for repeated access
// (scanning the whole set per digit position).
optScalars := make([][43]int8, 0, size)
for _, scalars := range [][]*scalar.Scalar{staticScalars, dynamicScalars} {
for _, scalar := range scalars {
optScalars = append(optScalars, scalar.ToRadix2w(w))
}
}
optPoints, off := make([]projectiveNielsPoint, size), 0
for _, points := range [][]*EdwardsPoint{staticPoints, dynamicPoints} {
for i, point := range points {
optPoints[i+off].SetEdwards(point)
}
off += len(points)
}
// Prepare 2^w/2 buckets.
// buckets[i] corresponds to a multiplication factor (i+1).
//
// No need to initialize the buckets since calculateColumn initializes
// them as needed as the first thing in the routine.
buckets := make([]EdwardsPoint, bucketsCount)
calculateColumn := func(idx int) EdwardsPoint {
// Clear the buckets when processing another digit.
for i := 0; i < bucketsCount; i++ {
buckets[i].Identity()
}
// Iterate over pairs of (point, scalar)
// and add/sub the point to the corresponding bucket.
// Note: if we add support for precomputed lookup tables,
// we'll be adding/subtracting point premultiplied by `digits[i]` to buckets[0].
var tmp completedPoint
for i := 0; i < size; i++ {
digit := int16(optScalars[i][idx])
if digit > 0 {
b := uint(digit - 1)
buckets[b].setCompleted(tmp.AddEdwardsProjectiveNiels(&buckets[b], &optPoints[i]))
} else if digit < 0 {
b := uint(-digit - 1)
buckets[b].setCompleted(tmp.SubEdwardsProjectiveNiels(&buckets[b], &optPoints[i]))
}
}
// Add the buckets applying the multiplication factor to each bucket.
// The most efficient way to do that is to have a single sum with two running sums:
// an intermediate sum from last bucket to the first, and a sum of intermediate sums.
//
// For example, to add buckets 1*A, 2*B, 3*C we need to add these points:
// C
// C B
// C B A Sum = C + (C+B) + (C+B+A)
bucketsIntermediateSum := buckets[bucketsCount-1]
bucketsSum := buckets[bucketsCount-1]
for i := int((bucketsCount - 1) - 1); i >= 0; i-- {
bucketsIntermediateSum.Add(&bucketsIntermediateSum, &buckets[i])
bucketsSum.Add(&bucketsSum, &bucketsIntermediateSum)
}
return bucketsSum
}
// Take the high column as an initial value to avoid wasting time doubling
// the identity element.
sum := calculateColumn(int(digitsCount - 1))
for i := int(digitsCount-1) - 1; i >= 0; i-- {
var sumMul EdwardsPoint
p := calculateColumn(i)
sum.Add(sumMul.mulByPow2(&sum, w), &p)
}
return out.Set(&sum)
}
func edwardsMultiscalarMulPippengerVartimeVector(out *EdwardsPoint, staticScalars []*scalar.Scalar, staticPoints []*EdwardsPoint, dynamicScalars []*scalar.Scalar, dynamicPoints []*EdwardsPoint) *EdwardsPoint {
size := len(staticScalars) + len(dynamicScalars)
var w uint
switch {
case size < 500:
w = 6
case size < 800:
w = 7
default:
w = 8
}
maxDigit := 1 << w
digitsCount := scalar.ToRadix2wSizeHint(w)
bucketsCount := maxDigit / 2
optScalars := make([][43]int8, 0, size)
for _, scalars := range [][]*scalar.Scalar{staticScalars, dynamicScalars} {
for _, scalar := range scalars {
optScalars = append(optScalars, scalar.ToRadix2w(w))
}
}
optPoints, off := make([]cachedPoint, size), 0
for _, points := range [][]*EdwardsPoint{staticPoints, dynamicPoints} {
for i, point := range points {
var ep extendedPoint
optPoints[i+off].SetExtended(ep.SetEdwards(point))
}
off += len(points)
}
buckets := make([]extendedPoint, bucketsCount)
calculateColumn := func(idx int) extendedPoint {
for i := 0; i < bucketsCount; i++ {
buckets[i].Identity()
}
for i := 0; i < size; i++ {
digit := int16(optScalars[i][idx])
if digit > 0 {
b := uint(digit - 1)
buckets[b].AddExtendedCached(&buckets[b], &optPoints[i])
} else if digit < 0 {
b := uint(-digit - 1)
buckets[b].SubExtendedCached(&buckets[b], &optPoints[i])
}
}
bucketsIntermediateSum := buckets[bucketsCount-1]
bucketsSum := buckets[bucketsCount-1]
for i := int((bucketsCount - 1) - 1); i >= 0; i-- {
var cp cachedPoint
bucketsIntermediateSum.AddExtendedCached(&bucketsIntermediateSum, cp.SetExtended(&buckets[i]))
bucketsSum.AddExtendedCached(&bucketsSum, cp.SetExtended(&bucketsIntermediateSum))
}
return bucketsSum
}
sum := calculateColumn(int(digitsCount - 1))
for i := int(digitsCount-1) - 1; i >= 0; i-- {
var (
sumMul extendedPoint
cp cachedPoint
)
ep := calculateColumn(i)
sum.AddExtendedCached(sumMul.MulByPow2(&sum, w), cp.SetExtended(&ep))
}
return out.setExtended(&sum)
}
|
<filename>3rdparty/astc-codec/src/decoder/test/weight_infill_test.cc<gh_stars>1000+
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "src/decoder/weight_infill.h"
#include "src/decoder/footprint.h"
#include <gtest/gtest.h>
#include <vector>
namespace astc_codec {
namespace {
// Make sure that the physical size of the bit representations for certain
// dimensions of weight grids matches our expectations
TEST(ASTCWeightInfillTest, TestGetBitCount) {
// Bit encodings
EXPECT_EQ(32, CountBitsForWeights(4, 4, 3));
EXPECT_EQ(48, CountBitsForWeights(4, 4, 7));
EXPECT_EQ(24, CountBitsForWeights(2, 4, 7));
EXPECT_EQ(8, CountBitsForWeights(2, 4, 1));
// Trit encodings
EXPECT_EQ(32, CountBitsForWeights(4, 5, 2));
EXPECT_EQ(26, CountBitsForWeights(4, 4, 2));
EXPECT_EQ(52, CountBitsForWeights(4, 5, 5));
EXPECT_EQ(42, CountBitsForWeights(4, 4, 5));
// Quint encodings
EXPECT_EQ(21, CountBitsForWeights(3, 3, 4));
EXPECT_EQ(38, CountBitsForWeights(4, 4, 4));
EXPECT_EQ(49, CountBitsForWeights(3, 7, 4));
EXPECT_EQ(52, CountBitsForWeights(4, 3, 19));
EXPECT_EQ(70, CountBitsForWeights(4, 4, 19));
}
// Make sure that we bilerp our weights properly
TEST(ASTCWeightInfillTest, TestInfillBilerp) {
std::vector<int> weights = InfillWeights(
{{ 1, 3, 5, 3, 5, 7, 5, 7, 9 }}, Footprint::Get5x5(), 3, 3);
std::vector<int> expected_weights = {
1, 2, 3, 4, 5,
2, 3, 4, 5, 6,
3, 4, 5, 6, 7,
4, 5, 6, 7, 8,
5, 6, 7, 8, 9 };
ASSERT_EQ(weights.size(), expected_weights.size());
for (int i = 0; i < weights.size(); ++i) {
EXPECT_EQ(weights[i], expected_weights[i]);
}
}
} // namespace
} // namespace astc_codec
|
/**
* For SSR with Emotion, cache contains the styles
*/
import createCache from "@emotion/cache";
export function createEmotionCache() {
return createCache({ key: "css" });
}
|
-- ***************************************************************************
-- File: 4_5.sql
--
-- Developed By TUSC
--
-- Disclaimer: Neither Osborne/McGraw-Hill, TUSC, nor the author warrant
-- that this source code is error-free. If any errors are
-- found in this source code, please report them to TUSC at
-- (630)960-2909 ext 1011 or <EMAIL>.
-- ***************************************************************************
SPOOL 4_5.lis
SELECT ds.owner owner, table_owner,
SUBSTR(dc.table_type,1,6) table_type,
ds.table_name table_name, synonym_name
FROM dba_synonyms ds, user_catalog dc
WHERE ds.table_name = dc.table_name
ORDER BY ds.owner, table_owner, table_type, ds.table_name;
SPOOL OFF
|
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# What to do
sign=false
verify=false
build=false
setupenv=false
# Systems to build
linux=true
windows=true
osx=true
# Other Basic variables
SIGNER=
VERSION=
commit=false
url=https://github.com/dachcoin/dach
proc=2
mem=2000
lxc=true
osslTarUrl=http://downloads.sourceforge.net/project/osslsigncode/osslsigncode/osslsigncode-1.7.1.tar.gz
osslPatchUrl=https://bitcoincore.org/cfields/osslsigncode-Backports-to-1.7.1.patch
scriptName=$(basename -- "$0")
signProg="gpg --detach-sign"
commitFiles=true
# Help Message
read -d '' usage <<- EOF
Usage: $scriptName [-c|u|v|b|s|B|o|h|j|m|] signer version
Run this script from the directory containing the dach, gitian-builder, gitian.sigs, and dach-detached-sigs.
Arguments:
signer GPG signer to sign each build assert file
version Version number, commit, or branch to build. If building a commit or branch, the -c option must be specified
Options:
-c|--commit Indicate that the version argument is for a commit or branch
-u|--url Specify the URL of the repository. Default is https://github.com/dachcoin/dach
-v|--verify Verify the gitian build
-b|--build Do a gitian build
-s|--sign Make signed binaries for Windows and Mac OSX
-B|--buildsign Build both signed and unsigned binaries
-o|--os Specify which Operating Systems the build is for. Default is lwx. l for linux, w for windows, x for osx, a for aarch64
-j Number of processes to use. Default 2
-m Memory to allocate in MiB. Default 2000
--kvm Use KVM instead of LXC
--setup Setup the gitian building environment. Uses KVM. If you want to use lxc, use the --lxc option. Only works on Debian-based systems (Ubuntu, Debian)
--detach-sign Create the assert file for detached signing. Will not commit anything.
--no-commit Do not commit anything to git
-h|--help Print this help message
EOF
# Get options and arguments
while :; do
case $1 in
# Verify
-v|--verify)
verify=true
;;
# Build
-b|--build)
build=true
;;
# Sign binaries
-s|--sign)
sign=true
;;
# Build then Sign
-B|--buildsign)
sign=true
build=true
;;
# PGP Signer
-S|--signer)
if [ -n "$2" ]
then
SIGNER=$2
shift
else
echo 'Error: "--signer" requires a non-empty argument.'
exit 1
fi
;;
# Operating Systems
-o|--os)
if [ -n "$2" ]
then
linux=false
windows=false
osx=false
aarch64=false
if [[ "$2" = *"l"* ]]
then
linux=true
fi
if [[ "$2" = *"w"* ]]
then
windows=true
fi
if [[ "$2" = *"x"* ]]
then
osx=true
fi
if [[ "$2" = *"a"* ]]
then
aarch64=true
fi
shift
else
echo 'Error: "--os" requires an argument containing an l (for linux), w (for windows), x (for Mac OSX), or a (for aarch64)\n'
exit 1
fi
;;
# Help message
-h|--help)
echo "$usage"
exit 0
;;
# Commit or branch
-c|--commit)
commit=true
;;
# Number of Processes
-j)
if [ -n "$2" ]
then
proc=$2
shift
else
echo 'Error: "-j" requires an argument'
exit 1
fi
;;
# Memory to allocate
-m)
if [ -n "$2" ]
then
mem=$2
shift
else
echo 'Error: "-m" requires an argument'
exit 1
fi
;;
# URL
-u)
if [ -n "$2" ]
then
url=$2
shift
else
echo 'Error: "-u" requires an argument'
exit 1
fi
;;
# kvm
--kvm)
lxc=false
;;
# Detach sign
--detach-sign)
signProg="true"
commitFiles=false
;;
# Commit files
--no-commit)
commitFiles=false
;;
# Setup
--setup)
setup=true
;;
*) # Default case: If no more options then break out of the loop.
break
esac
shift
done
# Set up LXC
if [[ $lxc = true ]]
then
export USE_LXC=1
export LXC_BRIDGE=lxcbr0
sudo ifconfig lxcbr0 up 10.0.2.2
fi
# Check for OSX SDK
if [[ ! -e "gitian-builder/inputs/MacOSX10.11.sdk.tar.gz" && $osx == true ]]
then
echo "Cannot build for OSX, SDK does not exist. Will build for other OSes"
osx=false
fi
# Get signer
if [[ -n"$1" ]]
then
SIGNER=$1
shift
fi
# Get version
if [[ -n "$1" ]]
then
VERSION=$1
COMMIT=$VERSION
shift
fi
# Check that a signer is specified
if [[ $SIGNER == "" ]]
then
echo "$scriptName: Missing signer."
echo "Try $scriptName --help for more information"
exit 1
fi
# Check that a version is specified
if [[ $VERSION == "" ]]
then
echo "$scriptName: Missing version."
echo "Try $scriptName --help for more information"
exit 1
fi
# Add a "v" if no -c
if [[ $commit = false ]]
then
COMMIT="v${VERSION}"
fi
echo ${COMMIT}
# Setup build environment
if [[ $setup = true ]]
then
sudo apt-get install ruby apache2 git apt-cacher-ng python-vm-builder qemu-kvm qemu-utils
git clone https://github.com/dachcoin/gitian.sigs.git
git clone https://github.com/dachcoin/dach-detached-sigs.git
git clone https://github.com/devrandom/gitian-builder.git
pushd ./gitian-builder
if [[ -n "$USE_LXC" ]]
then
sudo apt-get install lxc
bin/make-base-vm --suite trusty --arch amd64 --lxc
else
bin/make-base-vm --suite trusty --arch amd64
fi
popd
fi
# Set up build
pushd ./dach
git fetch
git checkout ${COMMIT}
popd
# Build
if [[ $build = true ]]
then
# Make output folder
mkdir -p ./dach-binaries/${VERSION}
# Build Dependencies
echo ""
echo "Building Dependencies"
echo ""
pushd ./gitian-builder
mkdir -p inputs
wget -N -P inputs $osslPatchUrl
wget -N -P inputs $osslTarUrl
make -C ../dach/depends download SOURCES_PATH=`pwd`/cache/common
# Linux
if [[ $linux = true ]]
then
echo ""
echo "Compiling ${VERSION} Linux"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit dach=${COMMIT} --url dach=${url} ../dach/contrib/gitian-descriptors/gitian-linux.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-linux --destination ../gitian.sigs/ ../dach/contrib/gitian-descriptors/gitian-linux.yml
mv build/out/dach-*.tar.gz build/out/src/dach-*.tar.gz ../dach-binaries/${VERSION}
fi
# Windows
if [[ $windows = true ]]
then
echo ""
echo "Compiling ${VERSION} Windows"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit dach=${COMMIT} --url dach=${url} ../dach/contrib/gitian-descriptors/gitian-win.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-win-unsigned --destination ../gitian.sigs/ ../dach/contrib/gitian-descriptors/gitian-win.yml
mv build/out/dach-*-win-unsigned.tar.gz inputs/dach-win-unsigned.tar.gz
mv build/out/dach-*.zip build/out/dach-*.exe ../dach-binaries/${VERSION}
fi
# Mac OSX
if [[ $osx = true ]]
then
echo ""
echo "Compiling ${VERSION} Mac OSX"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit dach=${COMMIT} --url dach=${url} ../dach/contrib/gitian-descriptors/gitian-osx.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-osx-unsigned --destination ../gitian.sigs/ ../dach/contrib/gitian-descriptors/gitian-osx.yml
mv build/out/dach-*-osx-unsigned.tar.gz inputs/dach-osx-unsigned.tar.gz
mv build/out/dach-*.tar.gz build/out/dach-*.dmg ../dach-binaries/${VERSION}
fi
# AArch64
if [[ $aarch64 = true ]]
then
echo ""
echo "Compiling ${VERSION} AArch64"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit dach=${COMMIT} --url dach=${url} ../dach/contrib/gitian-descriptors/gitian-aarch64.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-aarch64 --destination ../gitian.sigs/ ../dach/contrib/gitian-descriptors/gitian-aarch64.yml
mv build/out/dach-*.tar.gz build/out/src/dach-*.tar.gz ../dach-binaries/${VERSION}
popd
if [[ $commitFiles = true ]]
then
# Commit to gitian.sigs repo
echo ""
echo "Committing ${VERSION} Unsigned Sigs"
echo ""
pushd gitian.sigs
git add ${VERSION}-linux/${SIGNER}
git add ${VERSION}-aarch64/${SIGNER}
git add ${VERSION}-win-unsigned/${SIGNER}
git add ${VERSION}-osx-unsigned/${SIGNER}
git commit -a -m "Add ${VERSION} unsigned sigs for ${SIGNER}"
popd
fi
fi
# Verify the build
if [[ $verify = true ]]
then
# Linux
pushd ./gitian-builder
echo ""
echo "Verifying v${VERSION} Linux"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-linux ../dach/contrib/gitian-descriptors/gitian-linux.yml
# Windows
echo ""
echo "Verifying v${VERSION} Windows"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-win-unsigned ../dach/contrib/gitian-descriptors/gitian-win.yml
# Mac OSX
echo ""
echo "Verifying v${VERSION} Mac OSX"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-unsigned ../dach/contrib/gitian-descriptors/gitian-osx.yml
# AArch64
echo ""
echo "Verifying v${VERSION} AArch64"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-aarch64 ../dach/contrib/gitian-descriptors/gitian-aarch64.yml
# Signed Windows
echo ""
echo "Verifying v${VERSION} Signed Windows"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-signed ../dach/contrib/gitian-descriptors/gitian-osx-signer.yml
# Signed Mac OSX
echo ""
echo "Verifying v${VERSION} Signed Mac OSX"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-signed ../dach/contrib/gitian-descriptors/gitian-osx-signer.yml
popd
fi
# Sign binaries
if [[ $sign = true ]]
then
pushd ./gitian-builder
# Sign Windows
if [[ $windows = true ]]
then
echo ""
echo "Signing ${VERSION} Windows"
echo ""
./bin/gbuild -i --commit signature=${COMMIT} ../dach/contrib/gitian-descriptors/gitian-win-signer.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-win-signed --destination ../gitian.sigs/ ../dach/contrib/gitian-descriptors/gitian-win-signer.yml
mv build/out/dach-*win64-setup.exe ../dach-binaries/${VERSION}
mv build/out/dach-*win32-setup.exe ../dach-binaries/${VERSION}
fi
# Sign Mac OSX
if [[ $osx = true ]]
then
echo ""
echo "Signing ${VERSION} Mac OSX"
echo ""
./bin/gbuild -i --commit signature=${COMMIT} ../dach/contrib/gitian-descriptors/gitian-osx-signer.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-osx-signed --destination ../gitian.sigs/ ../dach/contrib/gitian-descriptors/gitian-osx-signer.yml
mv build/out/dach-osx-signed.dmg ../dach-binaries/${VERSION}/dach-${VERSION}-osx.dmg
fi
popd
if [[ $commitFiles = true ]]
then
# Commit Sigs
pushd gitian.sigs
echo ""
echo "Committing ${VERSION} Signed Sigs"
echo ""
git add ${VERSION}-win-signed/${SIGNER}
git add ${VERSION}-osx-signed/${SIGNER}
git commit -a -m "Add ${VERSION} signed binary sigs for ${SIGNER}"
popd
fi
fi
|
#!/bin/bash
cd docker/
docker-compose up -d |
#!/bin/bash
# Copyright 2020 CERN for the benefit of the ATLAS collaboration.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# - Benedikt Ziemons <benedikt.ziemons@cern.ch>, 2020
set -eo pipefail
function srchome() {
export RUCIO_HOME=/usr/local/src/rucio
cd $RUCIO_HOME
}
if [[ $SUITE == "syntax" ]]; then
srchome
tools/test/check_syntax.sh
fi
if [[ $SUITE == "python3" ]]; then
srchome
tools/test/check_python_3.sh
fi
if [[ $SUITE == "client" ]]; then
srchome
nosetests -v lib/rucio/tests/test_clients.py
nosetests -v lib/rucio/tests/test_bin_rucio.py
nosetests -v lib/rucio/tests/test_module_import.py
fi
if [[ $SUITE == "all" ]]; then
tools/run_tests_docker.sh
fi
if [[ $SUITE == "multi_vo" ]]; then
tools/run_multi_vo_tests_docker.sh
fi
|
class FilesService
{
private $registeredFiles = [];
public function onRegistered()
{
// Emit event when a file is registered
// Implementation details omitted for brevity
}
public function uploadFile($fileName)
{
// Simulate uploading a file with the given file name
// Implementation details omitted for brevity
}
public function deleteFile($fileName)
{
// Simulate deleting a file with the given file name
// Implementation details omitted for brevity
}
}
class FilesServiceTest extends TestCase
{
private ?FilesService $files;
protected function setUp(): void
{
parent::setUp();
$this->files = new FilesService();
$this->files->onRegistered();
}
public function testUploadFile()
{
$fileName = 'example.txt';
$this->files->uploadFile($fileName);
// Add assertions to test the upload functionality
}
public function testDeleteFile()
{
$fileName = 'example.txt';
$this->files->uploadFile($fileName);
$this->files->deleteFile($fileName);
// Add assertions to test the delete functionality
}
} |
#!/bin/sh
source lib.sh
source text.sh
init_display
ip=`ifconfig wlan0 | grep -oE "\b(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b" | head -n1`
write_text "IP: $ip"
ssid=`iwgetid -r`
write_text "SSID: $ssid"
|
<filename>docs/html/search/related_4.js
var searchData=
[
['stringtoatomtype',['stringToAtomType',['../class_smol_dock_1_1_atom.html#abe46f23e1f169949c8d3916b7e0ad38b',1,'SmolDock::Atom']]],
['stringtorestype',['stringToResType',['../class_smol_dock_1_1_amino_acid.html#a196e09203295c01d2a5fc157ee89f7ee',1,'SmolDock::AminoAcid']]]
];
|
package com.jiulong.eureka.application;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.cloud.netflix.eureka.EnableEurekaClient;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
/**
* eureka client starter
* @author kaishui
*/
@Configuration
@ComponentScan({"com.jiulong"})
@EnableAutoConfiguration
@EnableEurekaClient
public class EurekaClientApplication {
/**
* spring eureka server 启动入口
* @param args
*/
public static void main(String[] args) {
new SpringApplicationBuilder(EurekaClientApplication.class).web(true).run(args);
}
}
|
#include<stdio.h>
#include<string.h>
int main(){
char str[] = "hello";
char char_array[strlen(str) + 1];
for(int i=0; str[i]!='\0'; ++i){
char_array[i] = str[i];
}
char_array[strlen(str)]='\0';
printf("String = %s\n", str);
printf("Character array = %s\n", char_array);
return 0;
} |
// http://www.cplusplus.com/forum/general/16532/
// GB boost header files compile error
#include <cstdio>
#include <string>
#include <iostream>
#include <boost/iostreams/device/file_descriptor.hpp>
#include <boost/iostreams/stream.hpp>
namespace io = boost::iostreams;
int main()
{
FILE* pipe = popen("find . -type f", "r");
io::stream_buffer<io::file_descriptor_source> fpstream(fileno(pipe));
std::istream in(&fpstream);
std::string line;
while (in)
{
std::getline(in, line);
std::cout << line << "\n";
}
return 0;
}
|
def sum_matrix_layers(arr):
result = 0
for layer in arr:
for element in layer:
result += element
return result
matrix_sum = sum_matrix_layers([[1,2],[3,4]])
print(matrix_sum) |
import { createSlice, PayloadAction } from "@reduxjs/toolkit";
import { DocNote, DocBeat, DocPattern, DocTrack } from "../firebase/types";
import {
Track,
Tracks,
TrackId,
Pattern,
Patterns,
PatternId,
Note,
Notes,
NoteId,
Beat,
Beats,
BeatId,
} from "./types";
export interface TrackStore {
selectedTrackId?: TrackId;
selectedPatternId?: PatternId;
tracks: Tracks;
track?: Track;
patterns: Patterns;
}
const initialState: TrackStore = {
selectedTrackId: undefined,
selectedPatternId: undefined,
tracks: {},
track: undefined,
patterns: {},
};
const trackSlice = createSlice({
name: "track",
initialState,
reducers: {
selectTrack(state, { payload }: PayloadAction<TrackId>) {
state.selectedTrackId = payload;
state.track = undefined;
state.selectedPatternId = undefined;
state.patterns = {};
},
selectPattern(state, { payload }: PayloadAction<PatternId>) {
state.selectedPatternId = payload;
},
setTracks(state, { payload }: PayloadAction<Tracks>) {
state.tracks = payload;
},
setTrack(state, { payload }: PayloadAction<DocTrack>) {
state.track = payload;
},
setPatterns(
state,
{ payload }: PayloadAction<Record<PatternId, DocPattern>>,
) {
for (const patternId in payload) {
state.patterns[patternId] = {
...payload[patternId]!,
notes: state.patterns[patternId]?.notes || {},
};
}
if (state.selectedTrackId && !state.selectedPatternId) {
const firstPattern = Object.keys(state.patterns).reduce<
[PatternId, Pattern] | undefined
>(
(acc, patternId) =>
acc === undefined ||
state.patterns[patternId]!.order < acc[1].order
? [patternId, state.patterns[patternId]! as Pattern]
: acc,
undefined,
);
if (firstPattern) {
state.selectedPatternId = firstPattern[0];
}
}
},
setPatternNotes(
state,
{
payload: { patternId, notes },
}: PayloadAction<{
patternId: PatternId;
notes: Record<NoteId, DocNote>;
}>,
) {
const pattern = state.patterns[patternId];
if (pattern) {
let noteId: NoteId;
for (noteId in notes) {
pattern.notes[noteId] = {
...notes[noteId]!,
beats: pattern.notes[noteId]?.beats || {},
};
}
} else {
console.error(
`Can't set notes for pattern ${patternId} because pattern doesn't exist`,
);
}
},
updatePatternNotes(
state,
{
payload: { patternId, notes },
}: PayloadAction<{
patternId: PatternId;
notes: { [note in NoteId]?: DocNote | null };
}>,
) {
const pattern = state.patterns[patternId];
if (pattern) {
let noteId: NoteId;
for (noteId in notes) {
const note = notes[noteId];
if (note) {
pattern.notes[noteId] = {
...note,
beats: pattern.notes[noteId]?.beats || {},
};
} else {
delete pattern.notes[noteId];
}
}
} else {
console.error(
`Can't update notes for pattern ${patternId} because pattern doesn't exist`,
);
}
},
updatePatternBeats(
state,
{
payload: { patternId, noteId, beats },
}: PayloadAction<{
patternId: PatternId;
noteId: NoteId;
beats: Record<BeatId, DocBeat | null>;
}>,
) {
const pattern = state.patterns[patternId];
if (pattern) {
const note = pattern.notes[noteId];
if (note) {
for (const beatIdS in beats) {
const beatId = parseInt(beatIdS) ?? -1;
const beat = beats[beatId];
if (beat) {
note.beats[beatId] = beat;
} else {
if (note.beats[beatId]) {
delete note.beats[beatId];
}
}
}
} else {
console.error(
`Can't update beats for note ${noteId} in pattern ${patternId} because note doesn't exist`,
);
}
} else {
console.error(
`Can't update beats for pattern ${patternId} because pattern doesn't exist`,
);
}
},
},
});
export const actions = trackSlice.actions;
export default trackSlice;
|
public class MatrixOptimization {
public static int optimize(int[][] matrix) {
int N = matrix.length;
int sum = 0;
int[][] dp = new int[N][N];
// Initialize the state
dp[0][0] = matrix[0][0];
// Sum of first row and first column
for (int i = 1; i < N; i++) {
dp[0][i] = dp[0][i-1] + matrix[0][i];
dp[i][0] = dp[i-1][0] + matrix[i][0];
}
// Calculate the maximum sum
for (int i = 1; i < N; i++) {
for (int j = 1; j < N; j++) {
dp[i][j] = Math.max(dp[i][j-1], dp[i-1][j]) + matrix[i][j];
}
}
// Return the saved state
sum = dp[N-1][N-1];
return sum;
}
} |
const mongodb = require('mongodb');
const multer = require('multer');
const uuidv4 = require('uuid/v4');
const base64URL = require('base64url');
const handleError = require('./../utility/handleError');
const handleInternalError = require('./../utility/handleInternalError');
const isString = require('./../../validator/isString');
const isStencilData = require('./../../validate/stencilData/isStencilData');
const storage = multer.diskStorage({
destination: 'tmpFiles',
filename: (_req, file, nameFile) => {
const fileName = base64URL.encode(file.originalname);
const uuid = uuidv4();
nameFile(null, fileName + uuid);
}
});
const upload = multer({
storage,
limits: {
fieldNameSize: 20,
fieldSize: 10000,
fields: 1,
fileSize: 100000000,
files: 1,
parts: 2,
headerPairs: 200
}
}).single('archive');
// const data = JSON.parse(req.body.data)
// const url = req.file.archive.path
// const mimeType = req.file.acrive.mimetype
// <a href={url} download={data.fileName} type={mimeType} />
function makeOrder(req, res) {
upload(req, res, err => {
if(err) {
if(err instanceof multer.MulterError) {
handleError(res, { invalidData: 'notAValidOrderData' });
} else {
handleInternalError(
{ data: req.body.data, file: req.file },
error,
'Fail to receive order data',
res, 'notReceivedOrderData'
);
}
return;
}
if(!isString(req.body.data)) {
handleError(res, { invalidData: 'notAString' });
return;
}
try {
const data = JSON.parse(req.body.data);
handleData(data, extractFileData(req.file), req.user, req.appShared.db, res);
} catch(error) {
handleError(res, { invalidData: 'notAJSON' });
}
});
}
async function handleData(orderData, fileData, user, db, res) {
const validationResult = isStencilData(orderData);
if(!validationResult.valid) {
handleError(res, validationResult);
return;
}
const orderID = new mongodb.ObjectID();
const fileID = new mongodb.ObjectID();
const fileDocument = {
_id: fileID,
orderID,
organizationID: user.organizationID,
...fileData
};
const orderDocument = {
_id: orderID,
fileID,
...orderData,
userID: user._id,
organizationID: user.organizationID,
date: new Date(),
status: 'waiting'
};
try {
const orderCollection = db.collection('order');
const fileCollection = db.collection('file');
const _result = await Promise.all([
orderCollection.insertOne(orderDocument),
fileCollection.insertOne(fileDocument)
]);
res.status(200).json({ createdOrder: orderID });
} catch(error) {
// hadle error (remove from collections)
// delete file
handleInternalError({ document }, error, '[mongodb] Failed to inser', res, 'makeOrder');
}
}
function extractFileData({
encoding,
mimetype,
filename,
size
}) {
return {
encoding,
mimetype,
filename,
size
};
}
module.exports = makeOrder; |
/**
* @license
* Copyright (c) 2018 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/// <reference path="../../../node_modules/@types/mocha/index.d.ts" />
/// <reference path="../../../node_modules/@types/chai/index.d.ts" />
import {StyleInfo, styleMap} from '../../directives/styleMap.js';
import {render} from '../../lib/render.js';
import {html} from '../../lit-html.js';
const assert = chai.assert;
suite('styleMap', () => {
let container: HTMLDivElement;
function renderStyleMap(cssInfo: StyleInfo) {
render(html`<div style="${styleMap(cssInfo)}"></div>`, container);
}
function renderStyleMapStatic(cssInfo: StyleInfo) {
render(
html`<div style="height: 1px; ${styleMap(cssInfo)} color: red"></div>`,
container);
}
setup(() => {
container = document.createElement('div');
});
test('adds and updates styles', () => {
renderStyleMap({marginTop: '2px', paddingBottom: '4px', opacity: '0.5'});
const el = container.firstElementChild! as HTMLElement;
assert.equal(el.style.marginTop, '2px');
assert.equal(el.style.paddingBottom, '4px');
assert.equal(el.style.opacity, '0.5');
renderStyleMap({marginTop: '4px', paddingBottom: '8px', opacity: '0.55'});
assert.equal(el.style.marginTop, '4px');
assert.equal(el.style.paddingBottom, '8px');
assert.equal(el.style.opacity, '0.55');
});
test('removes styles', () => {
renderStyleMap({marginTop: '2px', paddingBottom: '4px'});
const el = container.firstElementChild! as HTMLElement;
assert.equal(el.style.marginTop, '2px');
assert.equal(el.style.paddingBottom, '4px');
renderStyleMap({});
assert.equal(el.style.marginTop, '');
assert.equal(el.style.paddingBottom, '');
});
test('works with static styles', () => {
renderStyleMapStatic({marginTop: '2px', paddingBottom: '4px'});
const el = container.firstElementChild! as HTMLElement;
assert.equal(el.style.height, '1px');
assert.equal(el.style.color, 'red');
assert.equal(el.style.marginTop, '2px');
assert.equal(el.style.paddingBottom, '4px');
renderStyleMapStatic({});
assert.equal(el.style.height, '1px');
assert.equal(el.style.color, 'red');
assert.equal(el.style.marginTop, '');
assert.equal(el.style.paddingBottom, '');
});
test('throws when used on non-style attribute', () => {
assert.throws(() => {
render(html`<div id="${styleMap({})}"></div>`, container);
});
});
test('throws when used in attribute with more than 1 part', () => {
assert.throws(() => {
render(
html`<div style="${'height: 2px;'} ${styleMap({})}"></div>`,
container);
});
});
test('throws when used in NodePart', () => {
assert.throws(() => {
render(html`<div>${styleMap({})}</div>`, container);
});
});
}); |
<filename>src/main/java/patron/mains/guis/main/MainStage.java<gh_stars>0
package patron.mains.guis.main;
import patron.mains.managers.app.AppManager;
import com.appscharles.libs.fxer.exceptions.FxerException;
import com.appscharles.libs.fxer.factories.FXStageFactory;
import com.appscharles.libs.fxer.factories.IFXStageFactory;
import com.appscharles.libs.fxer.stages.FXStage;
/**
* The type Main stage.
*/
public class MainStage {
/**
* Launch.
*
* @throws FxerException the fxer exception
*/
public static void launch() throws FxerException {
IFXStageFactory stageFactory = new FXStageFactory("/patron/mains/guis/main/MainView.fxml",
"patron/mains/guis/main/translations/Main");
stageFactory.setIcon(AppManager.getResourceIcon());
stageFactory.addStylesheet("patron/mains/guis/main/style.css");
stageFactory.setController(new MainController());
FXStage stage = stageFactory.create();
stage.setOpacity(0);
stage.showAndWaitFX();
}
}
|
import numpy as np
import re
# tokenize user input
def tokenize(sentence):
return re.findall('[\d\w]+', sentence.lower())
# create a dictionary to hold the words and their respective vectors
word2vec = {}
# add words and vectors to the dictionary
word2vec['hello'] = np.array([1,2,3,4])
word2vec['world'] = np.array([2,3,4,5])
# create a function to get the word vector corresponding to the user input
def word_vector(sentence):
words = tokenize(sentence)
# if the word is in our dictionary, return the corresponding vector
if words in word2vec:
return word2vec[words]
# otherwise, return a vector of zeroes
else:
return np.zeros((1, len(word2vec[words])))
# create a function to parse a user's input
def parse_user_input(sentence):
words = tokenize(sentence)
# get the corresponding vector for each word in the sentence
vectors = [word_vector(word) for word in words]
# return an average of all the vectors
return np.mean(vectors, axis=0) |
package org.springsource.html5expense;
import android.os.Bundle;
import com.phonegap.DroidGap;
public class Html5expense extends DroidGap {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
super.loadUrl("file:///android_asset/www/index.html");
}
} |
#!/bin/bash
dotnet build
cd SiegeClipHighlighter.TesseractTool
./build.sh
cd ..
dotnet run --project SiegeClipHighlighter
|
#!/bin/sh
cd /root
./app
|
<reponame>r-kapoor/Crawlersforweb
inputModule.controller('KuberController', ['$scope', '$rootScope', '$http', '$q', '$location', '$window', '$timeout', 'formData', 'cityData', function($scope, $rootScope, $http, $q, $location, $window, $timeout, formData, cityData) {
$scope.isDetailsCollapsed = false;
$scope.isOverviewCollapsed = false;
$scope.isSuggestDestinationsOn = formData.getSuggestDestinationOn();
$scope.destinationCityList = formData.getDestinations();
$scope.helpLabel="Don't know where to go";
$scope.tripStartTime = formData.getTripStartTime();
$scope.tripEndTime = formData.getTripEndTime();
$scope.$watch('tripStartTime', function startTimeSelected() {
formData.setTripStartTime($scope.tripStartTime);
}, true);
$scope.$watch('tripEndTime', function endTimeSelected() {
formData.setTripEndTime($scope.tripEndTime);
}, true);
$rootScope.$on('destinationAdded',function()
{
$scope.helpLabel="Don't know where else to go";
});
$scope.proceed = function checkAndShowOtherInputs() {
console.log('PROCEED');
var startTimeSet=(formData.getTripStartTime()!=null)&&(formData.getTripStartTime().morning == true || formData.getTripStartTime().evening == true);
var endTimeSet=(formData.getTripEndTime()!=null)&&(formData.getTripEndTime().morning == true || formData.getTripEndTime().evening == true);
var originSet=formData.getOrigin()!=null;
var destinationSet=formData.getDestinations().length;
var startDateSet = (formData.getStartDate() !== null);
var endDateSet = (formData.getEndDate() !== null);
console.log("isSuggestDestinationsOn:"+$scope.isSuggestDestinationsOn);
formData.setSuggestDestinationOn($scope.isSuggestDestinationsOn);
if( startDateSet && endDateSet && startTimeSet && endTimeSet && originSet && (destinationSet > 0 || $scope.isSuggestDestinationsOn)) {
var url = $location.path('/setBudget');
$scope.isOverviewCollapsed = true;
$scope.setBudgetLimits();
//MixPanel Tracking
var destinations = formData.getDestinations();
var trackObject = {
"Origin": formData.getOrigin().CityName,
"Destinations": destinationNames(destinations),
"StartDate": formData.getStartDate().toISOString(),
"EndDate": formData.getEndDate().toISOString(),
"NumberOfDestinations": destinations.length,
"DestinationGroups": appendNames(destinations),
"suggestDestination":formData.getSuggestDestinationOn()
};
mixObjectsTag(formData.getTripStartTime(), trackObject, 'Start');
mixObjectsTag(formData.getTripEndTime(), trackObject, 'End');
console.log('trackObject:'+JSON.stringify(trackObject));
//mixpanel.track('Cities Input', trackObject);
mixPanelTrack('Cities Input', trackObject);
$rootScope.$on('$viewContentLoaded', function() {
$rootScope.$broadcast('formComplete');
//MixPanel Timing
//mixpanel.time_event('Budget Input');
mixPanelTimeEvent('Budget Input');
});
}
else
{
if(!originSet){
//Origin not valid
var orgElement=angular.element(document.querySelector("#originId"));
orgElement.addClass("has-error");
}
if(!(destinationSet > 0 || $scope.isSuggestDestinationsOn)) {
var destElement=angular.element(document.querySelector("#destinationId"));
destElement.addClass("has-error");
}
if(!startDateSet){
var startDateElement=angular.element(document.querySelector("#depart-date"));
startDateElement.removeClass("btn-primary");
startDateElement.addClass("btn-danger");
}
if(!endDateSet){
var endDateElement=angular.element(document.querySelector("#arrival-date"));
endDateElement.removeClass("btn-primary");
endDateElement.addClass("btn-danger");
}
if(!startTimeSet){
var startTimeElement = angular.element(document.querySelectorAll(".startTimeE"));
startTimeElement.addClass("time-not-selected");
}
if(!endTimeSet){
var endTimeElement = angular.element(document.querySelectorAll(".endTimeE"));
endTimeElement.addClass("time-not-selected");
}
}
};
function destinationNames(destinations){
var destinationNamesList = [];
for(var i = 0; i < destinations.length; i++){
destinationNamesList.push(destinations[i].CityName);
}
return destinationNamesList;
}
function appendNames(destinations){
var appendedDestinations = "";
for(var i = 0; i < destinations.length; i++){
appendedDestinations += destinations[i].CityName;
if(i != destinations.length - 1){
appendedDestinations += '-';
}
}
return appendedDestinations;
}
function cleanDetailsPanelData(){
formData.resetBudget();
formData.resetNumPersons();
formData.resetTastes();
}
function mixObjectsTag(source, target, tag) {
for(var key in source) {
if (source.hasOwnProperty(key)) {
target[tag+key] = source[key];
}
}
}
$scope.getLocation = function(queryString, deferred) {
$http.get(queryString)
.success(
function onLocationFound(data, status) {
deferred.resolve(data);
})
.error(
function(data, status) {
console.log(data || "Request failed");
deferred.reject("Request Failed for:" + queryString);
});
};
$scope.locationQueryString = function(city) {
console.log('http://maps.googleapis.com/maps/api/geocode/json?address='+city+'&sensor=true&callback=JSON_CALLBACK');
return 'http://maps.googleapis.com/maps/api/geocode/json?address='+city+'&sensor=true';
};
$scope.getDistance = function(originLat, originLong, destinationLat, destinationLong) {
var R = 6371;
var dLat = (destinationLat - originLat) * Math.PI / 180;
var dLon = (destinationLong - originLong) * Math.PI / 180;
var a = Math.sin(dLat / 2) * Math.sin(dLat / 2) +
Math.cos(originLat * Math.PI / 180) * Math.cos(destinationLat * Math.PI / 180) *
Math.sin(dLon / 2) * Math.sin(dLon / 2);
var c = 2 * Math.asin(Math.sqrt(a));
var d = R * c;
return d;
};
$scope.getTravelFare = function (totalDistance, numOfDays) {
var travelFare = 0;
var avgSpeed = 60;//kmph
//calculate average non-flight travel time round trip
var nonFlightTime = (totalDistance / avgSpeed);
var totalTime = 24 * numOfDays;
//if avg non-flight travel time is 50% more than non-travel time then go with flight else train or bus or cab
if ((nonFlightTime * 100) / totalTime >= 50) {
//travel by flight
if (totalDistance < 1500) {
travelFare += 7000;
}
else if (totalDistance < 2000) {
travelFare += 10000;
}
else if (totalDistance < 2500) {
travelFare += 13000;
}
else if (totalDistance < 3000) {
travelFare += 18000;
}
else {
travelFare += 25000;
}
}
else {
if (totalDistance < 1500) {
travelFare += 1000;//round trip min travelFare
}
else if (totalDistance < 2000) {
travelFare += 3000;
}
else if (totalDistance < 2500) {
travelFare += 4000;
}
else if (totalDistance < 3000) {
travelFare += 5000;
}
else {
travelFare += 8000;
}
}
console.log('travelFare:'+travelFare);
return travelFare;
};
$scope.getFareFromTier = function(tier) {
switch(tier){
case 1:
return 1500;
case 2:
return 1000;
case 3:
return 750;
}
};
$scope.getAccommodationFoodFare = function(cities, numOfDays) {
var accommodationFoodFare = 0;
var count=0;
var numOfDaysInEachCity = Math.ceil(numOfDays / cities.length); //Equally dividing the days in each city
angular.forEach(cities, function(city, indexCity) {
if(city.tier != undefined) {
accommodationFoodFare += numOfDaysInEachCity * $scope.getFareFromTier(city.tier);
count++;
}
});
while(count < cities.length) {
//Assuming all rest cities are from tier 3
accommodationFoodFare += $scope.getFareFromTier(3);
count++;
}
console.log('accommodationFoodFare:'+accommodationFoodFare);
return accommodationFoodFare;
};
$scope.getBudget = function(origin,destinations,totalDistance,numOfDays)
{
console.log(totalDistance,numOfDays);
console.log(destinations[0]);
var fare=0;
//Add the Approximate Travel Fare
fare += $scope.getTravelFare(totalDistance, numOfDays, fare);
//Now calculate approx. accommodation and food fare according to the destination city
fare += $scope.getAccommodationFoodFare(destinations, numOfDays);
return fare;
};
$scope.setBudgetLimits = function() {
console.log("in setBudgetLimits");
console.log($scope.isSuggestDestinationsOn);
console.log(formData.getDestinations().length);
if((!$scope.isSuggestDestinationsOn)||(($scope.isSuggestDestinationsOn)&&(formData.getDestinations().length>0))) {
console.log("in if");
var origin = formData.getOrigin();
var destinations = formData.getDestinations();
var startDate = formData.getStartDate();
var endDate = formData.getEndDate();
console.log(startDate+","+endDate);
if (origin != null && destinations.length != 0 && startDate != null && endDate != null) {
console.log('3');
var diff = Math.abs(endDate - startDate);
var numOfDays = diff / (1000 * 60 * 60 * 24);
var deferred = [];
var promise = [];
deferred[0] = $q.defer();
promise[0] = deferred[0].promise;
$scope.getLocation($scope.locationQueryString(origin.CityName), deferred[0]);
angular.forEach(destinations, function (destination, index) {
deferred[index + 1] = $q.defer();
promise[index + 1] = deferred[index + 1].promise;
$scope.getLocation($scope.locationQueryString(destination.CityName), deferred[index + 1]);
});
$q.all(promise)
.then(
function onQuerySuccessful(results) {
var latitudes = [];
var longitudes = [];
var totalDistance = 0;
var destinationsData = formData.getDestinations();
angular.forEach(results, function (result, index) {
latitudes[index] = result.results[0].geometry.location.lat;
longitudes[index] = result.results[0].geometry.location.lng;
if (index > 0) {
destinationsData[index-1].Latitude = latitudes[index];
destinationsData[index-1].Longitude = longitudes[index];
totalDistance += parseInt($scope.getDistance(latitudes[index - 1], longitudes[index - 1], latitudes[index], longitudes[index]));
}
else
{
formData.setOriginGeoCoordinates({
orgLat:latitudes[index],
orgLong:longitudes[index]
});
}
});
formData.setDestinations(destinationsData);
console.log(latitudes, longitudes, latitudes.length, longitudes.length);
totalDistance += parseInt($scope.getDistance(latitudes[0], longitudes[0], latitudes[latitudes.length-1], longitudes[longitudes.length-1]));
var totalFare = $scope.getBudget(origin, destinations, totalDistance, numOfDays);
console.log('totalFare:'+totalFare);
//$scope.options.from=parseInt(totalFare);
//$scope.value1 = $scope.options.from;
formData.setMinimumBudget(parseInt(totalFare));
formData.setBudget(parseInt(totalFare));
//console.log('$scope.value1:'+$scope.value1);
//console.log('$scope.options.from:'+$scope.options.from);
$rootScope.$on('$viewContentLoaded', function() {
$rootScope.$emit('BudgetSet');
console.log('BUDGET SET EMITTED');
});
//$scope.sliderOptions.min = parseInt(totalFare);
//$scope.$render();
//console.log($scope.sliderOptions.min);
//$scope.sliders.sliderValue = $scope.sliderOptions.min;
},
function onQueryFailure(result) {
console.log('At least one request for location failed');
}
);
}
else {
console.log('Some Problem with the inputs. Opening the upper part to fix them');
console.log(origin.CityName+','+destinations+','+startDate+','+endDate);
$scope.isDetailsCollapsed = true;
$scope.isOverviewCollapsed = false;
}
}
else {
console.log('Will suggest destinations');
}
};
}]);
|
#!/bin/bash
#
# Copyright (c) 2018 The BitcoinUnlimited developers
# Copyright (c) 2019 The Eccoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C.UTF-8
echo $TRAVIS_COMMIT_RANGE
echo $TRAVIS_COMMIT_LOG
|
<reponame>fredx/pivotaltracker
/*
Source, Integration resource structure:
https://www.pivotaltracker.com/help/api/rest/v5#integration_resource
*/
var ptutil = require('./utils'),
util = require('util');
function Integration(data) {
if (!(this instanceof Integration)){
return new Integration(data);
}
data = data || {};
var _kind = null,
_id = null,
_externalProjectId = null,
_externalApiToken = null,
_apiUsername = null,
_apiPassword = null,
_basicAuthUsername = null,
_basicAuthPassword = null,
_zendeskUserPassword = null,
_zendeskUserEmail = null,
_statusesToExclude = null,
_filterId = null,
_component = null,
_binId = null,
_product = null,
_viewId = null,
_name = null,
_storyName = null,
_projectId = null,
_importApiUrl = null,
_account = null,
_company = null,
_baseUrl = null,
_canImport = null,
_commentsPrivate = null,
_updateComments = null,
_updateState = null,
_isOther = null,
_active = null,
_createdAt = null,
_updatedAt = null;
Object.defineProperties(this, {
"kind": {
enumerable: true,
configurable: false,
get: function() {
return _kind;
},
set: function(val) {
_kind = ptutil.stringOrNull(val);
}
},
"id": {
enumerable: true,
configurable: false,
get: function() {
return _id;
},
set: function(val) {
_id = ptutil.intOrNull(val);
}
},
"externalProjectId": {
enumerable: true,
configurable: false,
get: function() {
return _externalProjectId;
},
set: function(val) {
_externalProjectId = ptutil.intOrNull(val);
}
},
"externalApiToken": {
enumerable: true,
configurable: false,
get: function() {
return _externalApiToken;
},
set: function(val) {
_externalApiToken = ptutil.stringOrNull(val);
}
},
"apiUsername": {
enumerable: true,
configurable: false,
get: function() {
return _apiUsername;
},
set: function(val) {
_apiUsername = ptutil.stringOrNull(val);
}
},
"apiPassword": {
enumerable: true,
configurable: false,
get: function() {
return _apiPassword;
},
set: function(val) {
_apiPassword = ptutil.stringOrNull(val);
}
},
"basicAuthUsername": {
enumerable: true,
configurable: false,
get: function() {
return _basicAuthUsername;
},
set: function(val) {
_basicAuthUsername = ptutil.stringOrNull(val);
}
},
"basicAuthPassword": {
enumerable: true,
configurable: false,
get: function() {
return _basicAuthPassword;
},
set: function(val) {
_basicAuthPassword = ptutil.stringOrNull(val);
}
},
"zendeskUserPassword": {
enumerable: true,
configurable: false,
get: function() {
return _zendeskUserPassword;
},
set: function(val) {
_zendeskUserPassword = ptutil.stringOrNull(val);
}
},
"zendeskUserEmail": {
enumerable: true,
configurable: false,
get: function() {
return _zendeskUserEmail;
},
set: function(val) {
_zendeskUserEmail = ptutil.stringOrNull(val);
}
},
"statusesToExclude": {
enumerable: true,
configurable: false,
get: function() {
return _statusesToExclude;
},
set: function(val) {
_statusesToExclude = ptutil.stringOrNull(val);
}
},
"filterId": {
enumerable: true,
configurable: false,
get: function() {
return _filterId;
},
set: function(val) {
_filterId = ptutil.stringOrNull(val);
}
},
"component": {
enumerable: true,
configurable: false,
get: function() {
return _component;
},
set: function(val) {
_component = ptutil.stringOrNull(val);
}
},
"binId": {
enumerable: true,
configurable: false,
get: function() {
return _binId;
},
set: function(val) {
_binId = ptutil.intOrNull(val);
}
},
"product": {
enumerable: true,
configurable: false,
get: function() {
return _product;
},
set: function(val) {
_product = ptutil.stringOrNull(val);
}
},
"viewId": {
enumerable: true,
configurable: false,
get: function() {
return _viewId;
},
set: function(val) {
_viewId = ptutil.stringOrNull(val);
}
},
"name": {
enumerable: true,
configurable: false,
get: function() {
return _name;
},
set: function(val) {
_name = ptutil.stringOrNull(val);
}
},
"storyName": {
enumerable: true,
configurable: false,
get: function() {
return _storyName;
},
set: function(val) {
_storyName = ptutil.stringOrNull(val);
}
},
"projectId": {
enumerable: true,
configurable: false,
get: function() {
return _projectId;
},
set: function(val) {
_projectId = ptutil.intOrNull(val);
}
},
"importApiUrl": {
enumerable: true,
configurable: false,
get: function() {
return _importApiUrl;
},
set: function(val) {
_importApiUrl = ptutil.stringOrNull(val);
}
},
"account": {
enumerable: true,
configurable: false,
get: function() {
return _account;
},
set: function(val) {
_account = ptutil.stringOrNull(val);
}
},
"company": {
enumerable: true,
configurable: false,
get: function() {
return _company;
},
set: function(val) {
_company = ptutil.stringOrNull(val);
}
},
"baseUrl": {
enumerable: true,
configurable: false,
get: function() {
return _baseUrl;
},
set: function(val) {
_baseUrl = ptutil.stringOrNull(val);
}
},
"canImport": {
enumerable: true,
configurable: false,
get: function() {
return _canImport;
},
set: function(val) {
_canImport = ptutil.booleanOrNull(val);
}
},
"commentsPrivate": {
enumerable: true,
configurable: false,
get: function() {
return _commentsPrivate;
},
set: function(val) {
_commentsPrivate = ptutil.booleanOrNull(val);
}
},
"updateComments": {
enumerable: true,
configurable: false,
get: function() {
return _updateComments;
},
set: function(val) {
_updateComments = ptutil.booleanOrNull(val);
}
},
"updateState": {
enumerable: true,
configurable: false,
get: function() {
return _updateState;
},
set: function(val) {
_updateState = ptutil.booleanOrNull(val);
}
},
"isOther": {
enumerable: true,
configurable: false,
get: function() {
return _isOther;
},
set: function(val) {
_isOther = ptutil.booleanOrNull(val);
}
},
"active": {
enumerable: true,
configurable: false,
get: function() {
return _active;
},
set: function(val) {
_active = ptutil.booleanOrNull(val);
}
},
"createdAt": {
enumerable: true,
configurable: false,
get: function() {
return _createdAt;
},
set: function(val) {
_createdAt = ptutil.dateOrNull(val);
}
},
"updateState": {
enumerable: true,
configurable: false,
get: function() {
return _updatedAt;
},
set: function(val) {
_updatedAt = ptutil.dateOrNull(val);
}
}
});
Object.seal(this);
for (var key in data) {
if (data.hasOwnProperty(key) && this.hasOwnProperty(key)) {
this[key] = data[key];
}
}
}
Integration.prototype.inspect = function() {
return ptutil.inspect(this);
};
module.exports = {
Integration : Integration
};
|
def get_current_address(serial_connection, variables):
# Execute the command to retrieve the address
serial_connection.execute_command(serial_connection.str_to_bytes(variables.GET_ADDR))
# Get the response from the LoRa module
addr = serial_connection.response_q.get(variables.COMMAND_VERIFICATION_TIMEOUT)
# Convert the response to a string
addr = serial_connection.bytes_to_str(addr)
# Split the response into a list based on the delimiter
addr_as_list = addr.split(variables.LORA_MODULE_DELIMITER)
# Check if the response format is as expected
if addr_as_list[0].strip() != 'AT' or addr_as_list[2].strip() != 'OK':
raise ValueError('Could not get address of module')
# Return the address extracted from the response
return addr_as_list[1] |
import { Provider, DynamicModule, Module } from '@nestjs/common';
import { ConfigModule } from '@nestjs/config';
import { UserService } from './user.service';
import { Neo4jModule } from '../neo4j/neo4j.module';
import { UserResolver } from './user.resolver';
import { USER_CONFIG } from './user.constants';
import { UserNeo4jRepository } from './user.neo4j.repository';
@Module({})
export class UserModule {
static forRootAsync(configProvider): DynamicModule {
return {
module: UserModule,
global: true,
imports: [
ConfigModule,
Neo4jModule,
],
providers: [
UserService,
UserResolver,
UserNeo4jRepository,
{
provide: USER_CONFIG,
...configProvider,
} as Provider<any>,
],
exports: [UserService, UserResolver, UserNeo4jRepository],
};
}
}
// old simple module, with moke data
// @Module({
// providers: [UserService, UserResolver],
// exports: [UserService, UserResolver],
// })
// export class UserModule { }
|
class Bill < ActiveRecord::Base
has_many :user_bills
has_many :users, through: :user_bills, dependent: :delete_all
accepts_nested_attributes_for :user_bills, allow_destroy: true
scope :unpaid, -> { where(paid: false) }
end |
package app.habitzl.elasticsearch.status.monitor.tool.client.params;
/**
* Parameter list of all values offered by any Elasticsearch API.
*/
public final class GeneralParams {
private GeneralParams() {
// instantiation protection
}
public static final String PARAM_FORMAT = "format";
}
|
#!/bin/bash
#
# Copyright 2019-present Dell EMC
# Copyright 2019-present Open Networking Foundation
# SPDX-License-Identifier: Apache-2.0
#
set -e
set -x
DOCKERFILE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
CONFIG_DIR=${CONFIG_DIR:-${DOCKERFILE_DIR}/configs}
LOG_DIR=${LOG_DIR:-${DOCKERFILE_DIR}/logs}
DOCKER_IMAGE=${DOCKER_IMAGE:-stratumproject/stratum-np4intel}
ENTRYPOINT="/stratum-entrypoint.sh"
CMD=""
#
# This script is used to start the stratum container
#
print_help() {
cat << EOF
The script starts the containerized version of Stratum for NP4 Intel based devices.
Usage: $0
[--debug] Start the debug stratum binary
[--bash] Run a bash shell in the container
Example:
$0
EOF
}
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-h|--help)
print_help
exit 0
;;
--debug)
CMD="--debug"
shift
;;
--bash)
ENTRYPOINT="/bin/bash"
shift
;;
"--")
shift
break
;;
*) # unknown option
print_help
exit 1
;;
esac
done
docker run -it --rm --privileged \
-v /dev:/dev -v /sys:/sys \
-p 28000:28000 \
-p 9339:9339 \
-p 9559:9559 \
-v $CONFIG_DIR:/stratum_configs \
-v $LOG_DIR:/stratum_logs \
--entrypoint=$ENTRYPOINT \
$DOCKER_IMAGE $CMD
|
<reponame>strictd/squareup_v2<filename>models/endpoints/refund/i-squareup-create-refund.ts
import { ISquareupError } from '../../i-squareup-error';
import { ISquareupMoney } from '../../i-squareup-money';
export class ISquareupCaptureTransaction {
location_id: string; // The ID of the original transaction's associated location.
transaction_id: string; // The ID of the original transaction that includes the tender to refund.
idempotency_key: string; // A value you specify that uniquely identifies this refund among refunds you've created for the tender.
// If you're unsure whether a particular refund succeeded, you can reattempt it with the same idempotency key without worrying about duplicating the refund.
// See Idempotency keys for more information.
tender_id: string; // The ID of the tender to refund.
// A Transaction has one or more tenders (i.e., methods of payment) associated with it, and you refund each tender separately with the Connect API.
reason?: string; // A description of the reason for the refund.
// Default value: Refund via API
amount_money: ISquareupMoney; // The amount of money to refund.
// Note that you specify the amount in the smallest denomination of the applicable currency. For example, US dollar amounts are specified in cents. See Working with monetary amounts for details.
// This amount cannot exceed the amount that was originally charged to the tender that corresponds to tender_id.
}
export class ISquareupCreateRefundResponse {
errors?: ISquareupError[]; // Any errors that occurred during the request.
refund?: Refund; // The created refund.
}
|
# Show failure of the associative law
u, v, w = F(11111113), F(-11111111), F(7.51111111)
assert (u+v)+w == 9.5111111
assert u+(v+w) == 10
# Show failure of the commutative law for addition
assert u+v+w != v+w+u
# Show failure of the distributive law
u, v, w = F(20000), F(-6), F(6.0000003)
assert u*v == -120000
assert u*w == 120000.01
assert v+w == .0000003
assert (u*v) + (u*w) == .01
assert u * (v+w) == .006
|
#!/bin/bash
if [[ $target_platform =~ linux.* ]] || [[ $target_platform == win-32 ]] || [[ $target_platform == win-64 ]] || [[ $target_platform == osx-64 ]]; then
export DISABLE_AUTOBREW=1
$R CMD INSTALL --build .
else
mkdir -p $PREFIX/lib/R/library/TH.data
mv * $PREFIX/lib/R/library/TH.data
if [[ $target_platform == osx-64 ]]; then
pushd $PREFIX
for libdir in lib/R/lib lib/R/modules lib/R/library lib/R/bin/exec sysroot/usr/lib; do
pushd $libdir || exit 1
for SHARED_LIB in $(find . -type f -iname "*.dylib" -or -iname "*.so" -or -iname "R"); do
echo "fixing SHARED_LIB $SHARED_LIB"
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5.0-MRO/Resources/lib/libR.dylib "$PREFIX"/lib/R/lib/libR.dylib $SHARED_LIB || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libR.dylib "$PREFIX"/lib/R/lib/libR.dylib $SHARED_LIB || true
install_name_tool -change /usr/local/clang4/lib/libomp.dylib "$PREFIX"/lib/libomp.dylib $SHARED_LIB || true
install_name_tool -change /usr/local/gfortran/lib/libgfortran.3.dylib "$PREFIX"/lib/libgfortran.3.dylib $SHARED_LIB || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libquadmath.0.dylib "$PREFIX"/lib/libquadmath.0.dylib $SHARED_LIB || true
install_name_tool -change /usr/local/gfortran/lib/libquadmath.0.dylib "$PREFIX"/lib/libquadmath.0.dylib $SHARED_LIB || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libgfortran.3.dylib "$PREFIX"/lib/libgfortran.3.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libgcc_s.1.dylib "$PREFIX"/lib/libgcc_s.1.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libiconv.2.dylib "$PREFIX"/sysroot/usr/lib/libiconv.2.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libncurses.5.4.dylib "$PREFIX"/sysroot/usr/lib/libncurses.5.4.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libicucore.A.dylib "$PREFIX"/sysroot/usr/lib/libicucore.A.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libexpat.1.dylib "$PREFIX"/lib/libexpat.1.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libcurl.4.dylib "$PREFIX"/lib/libcurl.4.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libc++.1.dylib "$PREFIX"/lib/libc++.1.dylib $SHARED_LIB || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libc++.1.dylib "$PREFIX"/lib/libc++.1.dylib $SHARED_LIB || true
done
popd
done
popd
fi
fi
|
#!/bin/bash
# This rewrites the grok patterns file
# in case of changes to the existing patterns
# this file can be regenerated
(
echo "class GrokPatterns"
echo ""
echo " @@global_patterns = {"
# ensure we parse the common grok patterns first
GLOBIGNORE="*grok-patterns"
for i in src/patterns/grok-patterns src/patterns/* ; do
#for i in src/patterns/grok-patterns ; do
echo " # ${i/*\/}"
cat $i | grep '^[A-Z]' | grep -vE '(RAILS3PROFILE|RAILS3)' | sed -e 's/^\([A-Z0-9_]*\) \(.*\)/ "\1" => %q(\2),/g'
echo ""
done
echo " }"
echo
echo " def self.patterns"
echo " @@global_patterns"
echo " end"
echo
echo "end"
) > src/patterns.cr
|
var xxEvents = ('ontouchstart' in window) ? { start: 'touchstart', move: 'touchmove', end: 'touchend'} : { start: 'mousedown', move: 'mousemove', end: 'mouseup' };
var _xx = _xx || {
astrict:true,
z:2,
pIndex:0
};
(function ($) {
$.fn.moveIn = function (options) {
var defaults = {
classIn: 'moveIn',
classOut: 'moveOut',
complete: function () { }
// CALLBACKS
};
var options = $.extend(defaults, options);
this.show().addClass(options.classIn);
this.one('webkitAnimationEnd', function () {
$(this).removeClass(options.classIn);
options.complete();
});
return this;
};
$.fn.moveOut = function (options) {
var defaults = {
classIn: 'moveIn',
classOut: 'moveOut',
complete: function () { }
// CALLBACKS
};
var options = $.extend(defaults, options);
this.show().addClass(options.classOut);
this.one('webkitAnimationEnd', function () {
$(this).removeClass(options.classOut).hide();
options.complete();
});
return this;
};
$.fn.btnHover = function () {
this.on(xxEvents.start, function (e) {
$(this).addClass('on');
e.preventDefault();
});
this.on(xxEvents.end, function (e) {
$(this).removeClass('on');
e.preventDefault();
});
}
})(jQuery);
_xx.page3_in=function(delay){
var delay=delay||0;
_xx.z++;
$('#page3').css('zIndex',_xx.z).addClass('animate').show();
$('#page3 .bg').transition({opacity:1,scale:1,delay:delay},1000);
var aImg=$('#page3 .p3Circle img');
aImg.each(function(i,d){
delay+=200;
$(this).css({opacity:0,scale:5}).transition({ opacity: 1, scale: 1,delay:delay},1000,'easeOutQuart');
});
delay+=200;
$('#page3 .p3Text').css({opacity:0,scale:5}).transition({ opacity: 1, scale: 1,delay:delay},1000,'easeOutQuart');
var aFt=$('#page3 .p3_ft li');
delay+=200;
aFt.eq(0).css({width:'0%'}).transition({width:'100%',delay:delay},1000,'easeOutQuart');
delay+=500;
aFt.eq(1).css({width:'0%'}).transition({width:'100%',delay:delay},1000,'easeOutQuart');
delay+=200;
aFt.eq(2).css({opacity:0,x:100}).transition({opacity:1,x:0,delay:delay},1000,'easeOutQuart',function(){
_xx.pIndex=1;
_xx.astrict=false;
});
};
_xx.page3_out=function(){
var delay=0;
var aImg=$('#page3 .p3Circle img');
aImg.transition({ opacity: 0, scale: 0.1,delay:delay},1000,'easeOutQuart');
$('#page3 .p3Text').transition({ opacity: 0, scale: 0.1,delay:delay},1000,'easeOutQuart');
var aFt=$('#page3 .p3_ft li');
aFt.eq(0).transition({opacity:0,x:100,delay:delay},1000,'easeOutQuart');
delay+=200;
$('#page3 .bg').transition({opacity:0,delay:delay},1000,function(){
$('#page3').removeClass('animate').hide();
});
}; |
//#####################################################################
// Copyright 2012, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Namespace INTERSECTION
//#####################################################################
#include <PhysBAM_Tools/Vectors/VECTOR.h>
#include <PhysBAM_Geometry/Basic_Geometry/RAY.h>
#include <PhysBAM_Geometry/Basic_Geometry/SEGMENT_3D.h>
#include <PhysBAM_Geometry/Basic_Geometry/TRIANGLE_3D.h>
#include <PhysBAM_Geometry/Basic_Geometry_Intersections/RAY_TRIANGLE_3D_INTERSECTION.h>
#include <PhysBAM_Geometry/Basic_Geometry_Intersections/SEGMENT_3D_TRIANGLE_3D_INTERSECTION.h>
namespace PhysBAM{
namespace INTERSECTION{
//#####################################################################
// Function Intersects
//#####################################################################
template<class T> bool Intersects(const TRIANGLE_3D<T>& triangle1,const TRIANGLE_3D<T>& triangle2,const T thickness_over_two)
{
if(Intersects(SEGMENT_3D<T>(triangle1.x1,triangle1.x2),triangle2)) return true;
if(Intersects(SEGMENT_3D<T>(triangle1.x2,triangle1.x3),triangle2)) return true;
if(Intersects(SEGMENT_3D<T>(triangle1.x3,triangle1.x1),triangle2)) return true;
if(Intersects(SEGMENT_3D<T>(triangle2.x1,triangle2.x2),triangle1)) return true;
if(Intersects(SEGMENT_3D<T>(triangle2.x2,triangle2.x3),triangle1)) return true;
if(Intersects(SEGMENT_3D<T>(triangle2.x3,triangle2.x1),triangle1)) return true;
return false;
}
//#####################################################################
template bool Intersects(const TRIANGLE_3D<float>&,const TRIANGLE_3D<float>&,const float);
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template bool Intersects(const TRIANGLE_3D<double>&,const TRIANGLE_3D<double>&,const double);
#endif
};
};
|
#! /bin/bash
#SBATCH -J scheduler
#SBATCH -t 5-00:00:00
#SBATCH -N 1
#SBATCH -n 1
#SBATCH -c 1
#SBATCH -p all
#SBATCH --mem=2gb
cd /cluster/home/quever/workflows/rna-seq-star-deseq2
condaprefix='/cluster/home/quever/workflows/rna-seq-star-deseq2/.snakemake/conda'
snakemake \
--jobs 6 \
--profile slurm \
--cluster-config slurm/cluster.json \
--conda-frontend conda \
--use-conda \
--use-singularity \
--conda-prefix ${condaprefix} \
--wrapper-prefix 'file:///cluster/home/quever/downloads/snakemake-wrappers/' \
--rerun-incomplete
|
import importlib
def get_core_plugins(module_name):
try:
module = importlib.import_module(module_name)
core_plugins = [name for name, obj in module.__dict__.items() if isinstance(obj, type)]
return core_plugins
except (ModuleNotFoundError, AttributeError):
return []
# Test the function with the provided code snippet
print(get_core_plugins('robotide')) # Output: ['RunAnything', 'RecentFilesPlugin', 'PreviewPlugin', 'KeywordSearch', 'EditorPlugin', 'TextEditorPlugin', 'LogPlugin', 'TestSearchPlugin', 'SpecImporterPlugin'] |
<reponame>cschladetsch/KAI<gh_stars>10-100
#pragma once
KAI_BEGIN
namespace meta
{
/// Null is used to represent the equivalent to a 'null-pointer' in compile-time space
struct Null
{
typedef Null Next, Prev, Value, Type;
};
/// Compile-time conditional evaluation.
template <bool T, class A, class B>
struct If
{
typedef A Type;
};
template <class A, class B>
struct If<false, A, B>
{
typedef B Type;
};
template <class A, class B>
struct SameType
{
enum { value = 0 };
};
template <class A>
struct SameType<A, A>
{
enum { value = 1 };
};
template <class T>
struct IsNull { enum { Value = 0 }; };
template <>
struct IsNull<Null> { enum { Value = 1 }; };
/// Caclulate the number of none-Null types in a list
template <class T0 = Null, class T1 = Null, class T2 = Null, class T3 = Null>
struct Arity { enum { Value = 4 }; };
template <class T0, class T1, class T2>
struct Arity<T0, T1, T2, Null> { enum { Value = 3 }; };
template <class T0, class T1>
struct Arity<T0, T1, Null, Null> { enum { Value = 2 }; };
template <class T0>
struct Arity<T0, Null, Null, Null> { enum { Value = 1 }; };
template <>
struct Arity<Null, Null, Null, Null> { enum { Value = 0 }; };
}
KAI_END
|
package com.ozeanly.kafka.exception;
public class ConsumerNonRecoverableException extends RuntimeException {
public ConsumerNonRecoverableException(String message) {
super(message);
}
public ConsumerNonRecoverableException(Throwable cause) {
super(cause);
}
}
|
<gh_stars>0
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: pkg/scheduler/scheduler.proto
/*
Package scheduler is a generated protocol buffer package.
It is generated from these files:
pkg/scheduler/scheduler.proto
It has these top-level messages:
Schedule
ScheduleRequest
Action
AbortAction
InvokeTaskAction
NotifyTaskAction
*/
package scheduler
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
import fission_workflows_types1 "github.com/fission/fission-workflows/pkg/types"
import fission_workflows_types "github.com/fission/fission-workflows/pkg/types/typedvalues"
import google_protobuf "github.com/golang/protobuf/ptypes/timestamp"
import google_protobuf1 "github.com/golang/protobuf/ptypes/any"
import (
context "golang.org/x/net/context"
grpc "google.golang.org/grpc"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
// ActionType contains all possible types of actions the scheduler can undertake.
//
// Each action can have a seperate datastructure defined here.
// Note about a RETURN/COMPLETE/FAIL action: The controller decides the final status of the workflow. In order to avoid
// inconsistencies in the replay.
type ActionType int32
const (
// Invokes a task
ActionType_INVOKE_TASK ActionType = 0
// Abort cancels the invocation, leading to an ABORTED status.
ActionType_ABORT ActionType = 1
// Notify a task
ActionType_NOTIFY_TASK ActionType = 2
)
var ActionType_name = map[int32]string{
0: "INVOKE_TASK",
1: "ABORT",
2: "NOTIFY_TASK",
}
var ActionType_value = map[string]int32{
"INVOKE_TASK": 0,
"ABORT": 1,
"NOTIFY_TASK": 2,
}
func (x ActionType) String() string {
return proto.EnumName(ActionType_name, int32(x))
}
func (ActionType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
type Schedule struct {
InvocationId string `protobuf:"bytes,1,opt,name=invocationId" json:"invocationId,omitempty"`
CreatedAt *google_protobuf.Timestamp `protobuf:"bytes,2,opt,name=createdAt" json:"createdAt,omitempty"`
Actions []*Action `protobuf:"bytes,3,rep,name=actions" json:"actions,omitempty"`
}
func (m *Schedule) Reset() { *m = Schedule{} }
func (m *Schedule) String() string { return proto.CompactTextString(m) }
func (*Schedule) ProtoMessage() {}
func (*Schedule) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
func (m *Schedule) GetInvocationId() string {
if m != nil {
return m.InvocationId
}
return ""
}
func (m *Schedule) GetCreatedAt() *google_protobuf.Timestamp {
if m != nil {
return m.CreatedAt
}
return nil
}
func (m *Schedule) GetActions() []*Action {
if m != nil {
return m.Actions
}
return nil
}
type ScheduleRequest struct {
Workflow *fission_workflows_types1.Workflow `protobuf:"bytes,1,opt,name=workflow" json:"workflow,omitempty"`
Invocation *fission_workflows_types1.WorkflowInvocation `protobuf:"bytes,2,opt,name=invocation" json:"invocation,omitempty"`
}
func (m *ScheduleRequest) Reset() { *m = ScheduleRequest{} }
func (m *ScheduleRequest) String() string { return proto.CompactTextString(m) }
func (*ScheduleRequest) ProtoMessage() {}
func (*ScheduleRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
func (m *ScheduleRequest) GetWorkflow() *fission_workflows_types1.Workflow {
if m != nil {
return m.Workflow
}
return nil
}
func (m *ScheduleRequest) GetInvocation() *fission_workflows_types1.WorkflowInvocation {
if m != nil {
return m.Invocation
}
return nil
}
// Action is the generic container of an action (signalled by ActionType) and
// the relevant message as a payload.
type Action struct {
Type ActionType `protobuf:"varint,1,opt,name=type,enum=fission.workflows.scheduler.ActionType" json:"type,omitempty"`
Payload *google_protobuf1.Any `protobuf:"bytes,4,opt,name=payload" json:"payload,omitempty"`
}
func (m *Action) Reset() { *m = Action{} }
func (m *Action) String() string { return proto.CompactTextString(m) }
func (*Action) ProtoMessage() {}
func (*Action) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
func (m *Action) GetType() ActionType {
if m != nil {
return m.Type
}
return ActionType_INVOKE_TASK
}
func (m *Action) GetPayload() *google_protobuf1.Any {
if m != nil {
return m.Payload
}
return nil
}
type AbortAction struct {
Reason string `protobuf:"bytes,1,opt,name=reason" json:"reason,omitempty"`
}
func (m *AbortAction) Reset() { *m = AbortAction{} }
func (m *AbortAction) String() string { return proto.CompactTextString(m) }
func (*AbortAction) ProtoMessage() {}
func (*AbortAction) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
func (m *AbortAction) GetReason() string {
if m != nil {
return m.Reason
}
return ""
}
type InvokeTaskAction struct {
// Id of the task in the workflow
Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"`
Inputs map[string]*fission_workflows_types.TypedValue `protobuf:"bytes,2,rep,name=inputs" json:"inputs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
}
func (m *InvokeTaskAction) Reset() { *m = InvokeTaskAction{} }
func (m *InvokeTaskAction) String() string { return proto.CompactTextString(m) }
func (*InvokeTaskAction) ProtoMessage() {}
func (*InvokeTaskAction) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} }
func (m *InvokeTaskAction) GetId() string {
if m != nil {
return m.Id
}
return ""
}
func (m *InvokeTaskAction) GetInputs() map[string]*fission_workflows_types.TypedValue {
if m != nil {
return m.Inputs
}
return nil
}
type NotifyTaskAction struct {
Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"`
ExpectedAt *google_protobuf.Timestamp `protobuf:"bytes,2,opt,name=expectedAt" json:"expectedAt,omitempty"`
}
func (m *NotifyTaskAction) Reset() { *m = NotifyTaskAction{} }
func (m *NotifyTaskAction) String() string { return proto.CompactTextString(m) }
func (*NotifyTaskAction) ProtoMessage() {}
func (*NotifyTaskAction) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} }
func (m *NotifyTaskAction) GetId() string {
if m != nil {
return m.Id
}
return ""
}
func (m *NotifyTaskAction) GetExpectedAt() *google_protobuf.Timestamp {
if m != nil {
return m.ExpectedAt
}
return nil
}
func init() {
proto.RegisterType((*Schedule)(nil), "fission.workflows.scheduler.Schedule")
proto.RegisterType((*ScheduleRequest)(nil), "fission.workflows.scheduler.ScheduleRequest")
proto.RegisterType((*Action)(nil), "fission.workflows.scheduler.Action")
proto.RegisterType((*AbortAction)(nil), "fission.workflows.scheduler.AbortAction")
proto.RegisterType((*InvokeTaskAction)(nil), "fission.workflows.scheduler.InvokeTaskAction")
proto.RegisterType((*NotifyTaskAction)(nil), "fission.workflows.scheduler.NotifyTaskAction")
proto.RegisterEnum("fission.workflows.scheduler.ActionType", ActionType_name, ActionType_value)
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// Client API for Scheduler service
type SchedulerClient interface {
Evaluate(ctx context.Context, in *ScheduleRequest, opts ...grpc.CallOption) (*Schedule, error)
}
type schedulerClient struct {
cc *grpc.ClientConn
}
func NewSchedulerClient(cc *grpc.ClientConn) SchedulerClient {
return &schedulerClient{cc}
}
func (c *schedulerClient) Evaluate(ctx context.Context, in *ScheduleRequest, opts ...grpc.CallOption) (*Schedule, error) {
out := new(Schedule)
err := grpc.Invoke(ctx, "/fission.workflows.scheduler.Scheduler/evaluate", in, out, c.cc, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// Server API for Scheduler service
type SchedulerServer interface {
Evaluate(context.Context, *ScheduleRequest) (*Schedule, error)
}
func RegisterSchedulerServer(s *grpc.Server, srv SchedulerServer) {
s.RegisterService(&_Scheduler_serviceDesc, srv)
}
func _Scheduler_Evaluate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ScheduleRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(SchedulerServer).Evaluate(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/fission.workflows.scheduler.Scheduler/Evaluate",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(SchedulerServer).Evaluate(ctx, req.(*ScheduleRequest))
}
return interceptor(ctx, in, info, handler)
}
var _Scheduler_serviceDesc = grpc.ServiceDesc{
ServiceName: "fission.workflows.scheduler.Scheduler",
HandlerType: (*SchedulerServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "evaluate",
Handler: _Scheduler_Evaluate_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "pkg/scheduler/scheduler.proto",
}
func init() { proto.RegisterFile("pkg/scheduler/scheduler.proto", fileDescriptor0) }
var fileDescriptor0 = []byte{
// 547 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x52, 0x5d, 0x6f, 0xd3, 0x30,
0x14, 0x5d, 0xb2, 0xad, 0x6b, 0x6f, 0xd0, 0x56, 0x59, 0x08, 0x95, 0x22, 0x44, 0xc9, 0x34, 0x51,
0xf1, 0xe1, 0x4a, 0xe5, 0x65, 0x2d, 0xe2, 0x21, 0x93, 0x86, 0x54, 0x15, 0xb5, 0xc2, 0x8b, 0x86,
0xe0, 0x61, 0xc8, 0x4d, 0xdc, 0x2e, 0x6a, 0x1b, 0x87, 0xd8, 0xd9, 0xc8, 0x8f, 0xe1, 0x99, 0x5f,
0xc4, 0xff, 0x41, 0x71, 0x9c, 0xb6, 0x2b, 0x50, 0x2a, 0x5e, 0x12, 0x7f, 0x9c, 0x73, 0x7c, 0xee,
0xb9, 0x17, 0x1e, 0x47, 0xd3, 0x49, 0x4b, 0x78, 0xd7, 0xcc, 0x4f, 0x66, 0x2c, 0x5e, 0xae, 0x70,
0x14, 0x73, 0xc9, 0xd1, 0xa3, 0x71, 0x20, 0x44, 0xc0, 0x43, 0x7c, 0xcb, 0xe3, 0xe9, 0x78, 0xc6,
0x6f, 0x05, 0x5e, 0x40, 0xea, 0xdd, 0x49, 0x20, 0xaf, 0x93, 0x11, 0xf6, 0xf8, 0xbc, 0xa5, 0x71,
0xc5, 0xff, 0xd5, 0x02, 0xdf, 0xca, 0x1e, 0x90, 0x69, 0xc4, 0x44, 0xfe, 0xcd, 0x85, 0xeb, 0xef,
0xff, 0x83, 0xeb, 0xdf, 0xd0, 0x59, 0x72, 0x77, 0xad, 0xd5, 0x9e, 0x4c, 0x38, 0x9f, 0xcc, 0x58,
0x4b, 0xed, 0x46, 0xc9, 0xb8, 0x25, 0x83, 0x39, 0x13, 0x92, 0xce, 0x23, 0x0d, 0x78, 0xb8, 0x0e,
0xa0, 0x61, 0x9a, 0x5f, 0xd9, 0x3f, 0x0c, 0x28, 0x5f, 0xe8, 0x9a, 0x90, 0x0d, 0xf7, 0x82, 0xf0,
0x86, 0x7b, 0x54, 0x06, 0x3c, 0xec, 0xf9, 0x35, 0xa3, 0x61, 0x34, 0x2b, 0xe4, 0xce, 0x19, 0x3a,
0x85, 0x8a, 0x17, 0x33, 0x2a, 0x99, 0xef, 0xc8, 0x9a, 0xd9, 0x30, 0x9a, 0x56, 0xbb, 0x8e, 0x73,
0x7d, 0x5c, 0xe8, 0x63, 0xb7, 0x30, 0x40, 0x96, 0x60, 0xf4, 0x16, 0x0e, 0xa8, 0x97, 0xa9, 0x88,
0xda, 0x6e, 0x63, 0xb7, 0x69, 0xb5, 0x8f, 0xf1, 0x86, 0x7c, 0xb1, 0xa3, 0xb0, 0xa4, 0xe0, 0xd8,
0xdf, 0x0d, 0x38, 0x2a, 0x9c, 0x12, 0xf6, 0x35, 0x61, 0x22, 0x93, 0x2c, 0x17, 0x54, 0x65, 0xd6,
0x6a, 0x3f, 0xfd, 0x83, 0x66, 0x9e, 0xfc, 0x47, 0xbd, 0x27, 0x0b, 0x0a, 0xea, 0x03, 0x2c, 0x6b,
0xd3, 0xc5, 0xbc, 0xf8, 0xa7, 0x40, 0x6f, 0x41, 0x21, 0x2b, 0x74, 0x3b, 0x81, 0x52, 0x6e, 0x19,
0xbd, 0x81, 0xbd, 0x8c, 0xa1, 0x1c, 0x1d, 0xb6, 0x9f, 0x6d, 0x51, 0xa5, 0x9b, 0x46, 0x8c, 0x28,
0x12, 0xc2, 0x70, 0x10, 0xd1, 0x74, 0xc6, 0xa9, 0x5f, 0xdb, 0x53, 0x86, 0xee, 0xff, 0x96, 0xae,
0x13, 0xa6, 0xa4, 0x00, 0xd9, 0x27, 0x60, 0x39, 0x23, 0x1e, 0x4b, 0xfd, 0xf6, 0x03, 0x28, 0xc5,
0x8c, 0x0a, 0x1e, 0xea, 0xe6, 0xe9, 0x9d, 0xfd, 0xd3, 0x80, 0x6a, 0x66, 0x7c, 0xca, 0x5c, 0x2a,
0xa6, 0x1a, 0x7c, 0x08, 0x66, 0x50, 0x74, 0xd9, 0x0c, 0x7c, 0xf4, 0x01, 0x4a, 0x41, 0x18, 0x25,
0x52, 0xd4, 0x4c, 0xd5, 0xa0, 0xce, 0x46, 0xeb, 0xeb, 0x72, 0xb8, 0xa7, 0xb8, 0xe7, 0xa1, 0x8c,
0x53, 0xa2, 0x85, 0xea, 0x57, 0x60, 0xad, 0x1c, 0xa3, 0x2a, 0xec, 0x4e, 0x59, 0xaa, 0x9f, 0xcc,
0x96, 0xa8, 0x03, 0xfb, 0x6a, 0x98, 0x75, 0xfc, 0xc7, 0x7f, 0x8d, 0x3f, 0xcb, 0xc8, 0xbf, 0xcc,
0xa0, 0x24, 0x67, 0x74, 0xcd, 0x53, 0xc3, 0xbe, 0x82, 0xea, 0x80, 0xcb, 0x60, 0x9c, 0x6e, 0x28,
0xab, 0x0b, 0xc0, 0xbe, 0x45, 0xcc, 0xdb, 0x76, 0x66, 0x57, 0xd0, 0xcf, 0x3b, 0x00, 0xcb, 0x16,
0xa1, 0x23, 0xb0, 0x7a, 0x83, 0xcb, 0x61, 0xff, 0xfc, 0x8b, 0xeb, 0x5c, 0xf4, 0xab, 0x3b, 0xa8,
0x02, 0xfb, 0xce, 0xd9, 0x90, 0xb8, 0x55, 0x23, 0xbb, 0x1b, 0x0c, 0xdd, 0xde, 0xbb, 0x4f, 0xf9,
0x9d, 0xd9, 0x0e, 0xa1, 0x52, 0xcc, 0x6b, 0x8c, 0x28, 0x94, 0x59, 0xe6, 0x9a, 0x4a, 0x86, 0x5e,
0x6e, 0x8c, 0x75, 0x6d, 0xc6, 0xeb, 0x27, 0x5b, 0xa1, 0xed, 0x9d, 0x33, 0xeb, 0x73, 0x65, 0x71,
0x3e, 0x2a, 0xa9, 0xba, 0x5e, 0xff, 0x0a, 0x00, 0x00, 0xff, 0xff, 0xb4, 0xa5, 0xdf, 0x7d, 0xe2,
0x04, 0x00, 0x00,
}
|
<gh_stars>1-10
/**
* @namespace PIXI.mesh
*/
export { default as Mesh } from './Mesh';
export { default as MeshRenderer } from './webgl/MeshRenderer';
export { default as CanvasMeshRenderer } from './canvas/CanvasMeshRenderer';
export { default as Plane } from './Plane';
export { default as NineSlicePlane } from './NineSlicePlane';
export { default as Rope } from './Rope';
|
import stringcase
from ariadne import convert_camel_case_to_snake
from django import forms
class GraphQLForm(forms.Form):
def __init__(self, data=None, *args, **kwargs):
# TODO: convert back the field name in the errors to CamelCase
# TODO: provide an escape hatch for more flexible renaming
data = {convert_camel_case_to_snake(k): v for k, v in data.items()}
super().__init__(data, *args, **kwargs)
for field_name in self.base_fields.keys():
if field_name not in data.keys() and not self.fields[field_name].required:
self.fields.pop(field_name)
@property
def graphql_errors(self):
errors = []
for field_name, error_list in self.errors.get_json_data().items():
for error in error_list:
errors.append(
{
"field": stringcase.camelcase(field_name),
"message": error["message"],
"code": error["code"],
}
)
return errors
class GraphQLMultipleChoiceField(forms.MultipleChoiceField):
def __init__(self, key_name="id", *args, **kwargs):
self.key_name = key_name
super().__init__(*args, **kwargs)
def to_python(self, value):
# TODO: ValueError/ValidationError if not a list of dict ? Still accept a list of int/str ?
if not value:
return []
value = [x[self.key_name] for x in value]
return super().to_python(value)
class GraphQLChoiceField(forms.ChoiceField):
def __init__(self, key_name="id", *args, **kwargs):
self.key_name = key_name
super().__init__(*args, **kwargs)
def to_python(self, value):
# TODO: ValueError/ValidationError if not a dict ? Still accept a single str/int ?
return super().to_python(value[self.key_name])
class EmptyValue:
"""
Can be used in forms.CharField like `CharField(required=False, min_length=3, empty_value=EmptyValue())`.
This is useful to force the min_length validator to trigger is you send it an empty string.
without the `empty_value=EmptyValue` and when `required=False`.
Technical explanation: in `.run_validators()` (inherited from Field), the validators are short circuited
when `value in self.empty_values` evaluates to True and `empty_value=''` for CharField so the value `''`
(the empty string) bypasses all validation.
Deeper technical explanation: this is needed only when `required=False` because otherwise `Field.validate()`
raises a Validation error when the field value equals to `empty_value`.
"We need to go deeper" technical explanation: by setting `empty_value=EmptyValue`, there is no chance that
the field value will ever be equal to EmptyValue.
"""
def __eq__(self, other):
return False
def __len__(self):
return 0
|
<reponame>arcology-network/consensus-engine<filename>proto/tendermint/types/block.pb.go
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: tendermint/types/block.proto
package types
import (
fmt "fmt"
_ "github.com/gogo/protobuf/gogoproto"
proto "github.com/gogo/protobuf/proto"
io "io"
math "math"
math_bits "math/bits"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
type Block struct {
Header Header `protobuf:"bytes,1,opt,name=header,proto3" json:"header"`
Data Data `protobuf:"bytes,2,opt,name=data,proto3" json:"data"`
Evidence EvidenceList `protobuf:"bytes,3,opt,name=evidence,proto3" json:"evidence"`
LastCommit *Commit `protobuf:"bytes,4,opt,name=last_commit,json=lastCommit,proto3" json:"last_commit,omitempty"`
}
func (m *Block) Reset() { *m = Block{} }
func (m *Block) String() string { return proto.CompactTextString(m) }
func (*Block) ProtoMessage() {}
func (*Block) Descriptor() ([]byte, []int) {
return fileDescriptor_70840e82f4357ab1, []int{0}
}
func (m *Block) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *Block) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_Block.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *Block) XXX_Merge(src proto.Message) {
xxx_messageInfo_Block.Merge(m, src)
}
func (m *Block) XXX_Size() int {
return m.Size()
}
func (m *Block) XXX_DiscardUnknown() {
xxx_messageInfo_Block.DiscardUnknown(m)
}
var xxx_messageInfo_Block proto.InternalMessageInfo
func (m *Block) GetHeader() Header {
if m != nil {
return m.Header
}
return Header{}
}
func (m *Block) GetData() Data {
if m != nil {
return m.Data
}
return Data{}
}
func (m *Block) GetEvidence() EvidenceList {
if m != nil {
return m.Evidence
}
return EvidenceList{}
}
func (m *Block) GetLastCommit() *Commit {
if m != nil {
return m.LastCommit
}
return nil
}
func init() {
proto.RegisterType((*Block)(nil), "tendermint.types.Block")
}
func init() { proto.RegisterFile("tendermint/types/block.proto", fileDescriptor_70840e82f4357ab1) }
var fileDescriptor_70840e82f4357ab1 = []byte{
// 296 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0x41, 0x4b, 0xf3, 0x30,
0x18, 0xc7, 0x9b, 0xf7, 0xad, 0x43, 0xb2, 0x8b, 0x14, 0x91, 0x32, 0x24, 0x13, 0x4f, 0x5e, 0x6c,
0x44, 0x41, 0xf0, 0x26, 0x9d, 0xc2, 0x04, 0x0f, 0x32, 0x3d, 0x09, 0x22, 0x69, 0xfa, 0xd0, 0x06,
0xdb, 0x64, 0x34, 0x99, 0xe0, 0xb7, 0xf0, 0x63, 0xed, 0xb8, 0xa3, 0x27, 0x91, 0xf6, 0xee, 0x67,
0x90, 0xa6, 0x51, 0x61, 0xc5, 0x4b, 0x78, 0xc8, 0xef, 0xff, 0x4b, 0xfe, 0x24, 0x78, 0xd7, 0x80,
0x4c, 0xa1, 0x2a, 0x85, 0x34, 0xd4, 0xbc, 0xcc, 0x41, 0xd3, 0xa4, 0x50, 0xfc, 0x29, 0x9a, 0x57,
0xca, 0xa8, 0x60, 0xeb, 0x97, 0x46, 0x96, 0x8e, 0xb6, 0x33, 0x95, 0x29, 0x0b, 0x69, 0x3b, 0x75,
0xb9, 0x51, 0xff, 0x14, 0xbb, 0x3a, 0x3a, 0xee, 0x51, 0x78, 0x16, 0x29, 0x48, 0x0e, 0x5d, 0x60,
0xff, 0x13, 0xe1, 0x8d, 0xb8, 0xbd, 0x36, 0x38, 0xc5, 0x83, 0x1c, 0x58, 0x0a, 0x55, 0x88, 0xf6,
0xd0, 0xc1, 0xf0, 0x38, 0x8c, 0xd6, 0x1b, 0x44, 0x53, 0xcb, 0x63, 0x7f, 0xf9, 0x3e, 0xf6, 0x66,
0x2e, 0x1d, 0x1c, 0x61, 0x3f, 0x65, 0x86, 0x85, 0xff, 0xac, 0xb5, 0xd3, 0xb7, 0x2e, 0x98, 0x61,
0xce, 0xb1, 0xc9, 0xe0, 0x1c, 0x6f, 0x7e, 0xb7, 0x08, 0xff, 0x5b, 0x8b, 0xf4, 0xad, 0x4b, 0x97,
0xb8, 0x16, 0xda, 0x38, 0xfb, 0xc7, 0x0a, 0xce, 0xf0, 0xb0, 0x60, 0xda, 0x3c, 0x72, 0x55, 0x96,
0xc2, 0x84, 0xfe, 0x5f, 0x85, 0x27, 0x96, 0xcf, 0x70, 0x1b, 0xee, 0xe6, 0xf8, 0x61, 0x59, 0x13,
0xb4, 0xaa, 0x09, 0xfa, 0xa8, 0x09, 0x7a, 0x6d, 0x88, 0xb7, 0x6a, 0x88, 0xf7, 0xd6, 0x10, 0xef,
0x7e, 0x92, 0x09, 0x93, 0x2f, 0x92, 0x88, 0xab, 0x92, 0x4e, 0x6f, 0xae, 0x6e, 0xef, 0x80, 0xe7,
0x52, 0x15, 0x2a, 0x13, 0xa0, 0x29, 0x57, 0x52, 0x83, 0xd4, 0x0b, 0x7d, 0x08, 0x32, 0x13, 0x12,
0x68, 0xf7, 0x11, 0xeb, 0xcf, 0x9b, 0x0c, 0xec, 0xfe, 0xc9, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff,
0x1d, 0x9d, 0xa3, 0xae, 0xdd, 0x01, 0x00, 0x00,
}
func (m *Block) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *Block) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *Block) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.LastCommit != nil {
{
size, err := m.LastCommit.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintBlock(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x22
}
{
size, err := m.Evidence.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintBlock(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x1a
{
size, err := m.Data.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintBlock(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0x12
{
size, err := m.Header.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintBlock(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
return len(dAtA) - i, nil
}
func encodeVarintBlock(dAtA []byte, offset int, v uint64) int {
offset -= sovBlock(v)
base := offset
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return base
}
func (m *Block) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
l = m.Header.Size()
n += 1 + l + sovBlock(uint64(l))
l = m.Data.Size()
n += 1 + l + sovBlock(uint64(l))
l = m.Evidence.Size()
n += 1 + l + sovBlock(uint64(l))
if m.LastCommit != nil {
l = m.LastCommit.Size()
n += 1 + l + sovBlock(uint64(l))
}
return n
}
func sovBlock(x uint64) (n int) {
return (math_bits.Len64(x|1) + 6) / 7
}
func sozBlock(x uint64) (n int) {
return sovBlock(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (m *Block) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBlock
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: Block: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: Block: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Header", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBlock
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthBlock
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthBlock
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Header.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 2:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Data", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBlock
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthBlock
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthBlock
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Data.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 3:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Evidence", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBlock
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthBlock
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthBlock
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if err := m.Evidence.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
case 4:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field LastCommit", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowBlock
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthBlock
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthBlock
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
if m.LastCommit == nil {
m.LastCommit = &Commit{}
}
if err := m.LastCommit.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipBlock(dAtA[iNdEx:])
if err != nil {
return err
}
if (skippy < 0) || (iNdEx+skippy) < 0 {
return ErrInvalidLengthBlock
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipBlock(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
depth := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowBlock
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowBlock
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
case 1:
iNdEx += 8
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowBlock
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if length < 0 {
return 0, ErrInvalidLengthBlock
}
iNdEx += length
case 3:
depth++
case 4:
if depth == 0 {
return 0, ErrUnexpectedEndOfGroupBlock
}
depth--
case 5:
iNdEx += 4
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
if iNdEx < 0 {
return 0, ErrInvalidLengthBlock
}
if depth == 0 {
return iNdEx, nil
}
}
return 0, io.ErrUnexpectedEOF
}
var (
ErrInvalidLengthBlock = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowBlock = fmt.Errorf("proto: integer overflow")
ErrUnexpectedEndOfGroupBlock = fmt.Errorf("proto: unexpected end of group")
)
|
#!/bin/bash
# https://devcenter.heroku.com/articles/build-and-release-using-the-api
#
# To run this script on Codeship, add the following
# command as a custom deployment script:
# \curl -sSL https://raw.githubusercontent.com/codeship/scripts/master/deployments/heroku.sh | bash -s $HEROKU_APP_NAME
#
# Add the following environment variables to your project configuration:
# * HEROKU_APP_NAME
# * HEROKU_API_KEY
#
# If you need to deploy multiple Heroku apps you can setup multiple app name variables like
# HEROKU_APP_NAME_1 and HEROKU_APP_NAME_2 and then call the script twice passing in the different names
HEROKU_APP_NAME=${1:?'You need to provide your Heroku app name.'}
HEROKU_API_KEY=${HEROKU_API_KEY:?'Set the HEROKU_API_KEY environment variable. Get the key from https://dashboard.heroku.com/account'}
APPLICATION_FOLDER=$HOME/clone
AFTER_DEPLOYMENT_WAIT_TIME=5
echo "STARTING DEPLOYMENT"
function error_message() {
echo -e "DEPLOYMENT FAILED on line $1 of the deployment script"
}
set -e
heroku apps:info "${HEROKU_APP_NAME}"
echo -e "\e[32mThe application \"${HEROKU_APP_NAME}\" can be accessed.\e[39m"
trap 'error_message $LINENO' ERR
set -o pipefail
set -e
echo "CHANGING Directory to $APPLICATION_FOLDER"
cd $APPLICATION_FOLDER
#echo "CHECKING Access to Heroku application $HEROKU_APP_NAME"
#codeship_heroku check_access $HEROKU_APP_NAME
ARTIFACT_PATH=/tmp/deployable_artifact.tar.gz
echo "PACKAGING tar.gz for deployment"
tar -pczf $ARTIFACT_PATH ./
echo "PREPARING Heroku source for upload"
sources=`curl -sS -X POST https://api.heroku.com/apps/$HEROKU_APP_NAME/sources -H 'Accept: application/vnd.heroku+json; version=3' -H "Authorization: Bearer $HEROKU_API_KEY"`
get_url=`echo $sources | jq -r .source_blob.get_url`
put_url=`echo $sources | jq -r .source_blob.put_url`
echo "UPLOADING tar.gz file to Heroku"
curl -sS -X PUT "$put_url" -H 'Content-Type:' --data-binary @$ARTIFACT_PATH
echo "STARTING Build process on Heroku"
deployment=`curl -sS -X POST https://api.heroku.com/apps/$HEROKU_APP_NAME/builds -d "{\"source_blob\":{\"url\":\"$get_url\", \"version\": \"$CI_COMMIT_ID\"}}" -H 'Accept: application/vnd.heroku+json; version=3' -H 'Content-Type: application/json' -H "Authorization: Bearer $HEROKU_API_KEY"`
deployment_id=`echo "$deployment" | jq -r .id`
echo "DEPLOYMENT: $deployment_id"
output_stream_url=`echo "$deployment" | jq -r .output_stream_url`
curl -sS "$output_stream_url"
# Sleep to allow Heroku to store the result of the deployment
sleep $AFTER_DEPLOYMENT_WAIT_TIME
echo "CHECKING API for deployment success"
deployment_result_json=`curl -sS https://api.heroku.com/apps/$HEROKU_APP_NAME/builds/$deployment_id -H 'Accept: application/vnd.heroku+json; version=3' -H "Authorization: Bearer $HEROKU_API_KEY"`
deployment_status=`echo "$deployment_result_json" | jq -r .status`
echo "DEPLOYMENT STATUS: $deployment_status"
[ "$deployment_status" = "succeeded" ]
echo "DEPLOYMENT SUCCESSFUL"
|
#!/bin/sh
set -eu
[ $# -ge 2 ] || {
echo "Usage: debian/setup-mysql.sh port data-dir" >&2
exit 1
}
# CLI arguments #
port=$1
datadir=$2
action=${3:-start}
if [ "$(id -u)" -eq 0 ]; then
user="mysql"
else
user="$(whoami)"
fi
# Some vars #
socket=$datadir/mysql.sock
# Commands:
mysql="mysql --no-defaults --user root --socket=$socket --no-beep"
mysqladmin="mysqladmin --no-defaults --user root --port $port --host 127.0.0.1 --socket=$socket --no-beep"
mysqld="/usr/sbin/mysqld --no-defaults --user=$user --bind-address=127.0.0.1 --port=$port --socket=$socket --datadir=$datadir"
mysqld_version=$($mysqld -V 2>/dev/null | sed -ne 's/.*Ver \([0-9]\+\.[0-9]\+\).*/\1/p')
# Main code #
if [ "$action" = "stop" ]; then
$mysqladmin shutdown
exit
fi
rm -rf $datadir
mkdir -p $datadir
chmod go-rx $datadir
chown $user: $datadir
case "$mysqld_version" in
10.0)
echo "MariaDB is not supported yet in the test script"
exit 1
;;
5.7)
$mysqld --initialize-insecure
;;
5.5|5.6|*)
mysql_install_db --no-defaults --user=$user --datadir=$datadir
;;
esac
# Start the daemon
$mysqld &
pid=$!
# Wait for the server to be actually available
c=0;
while ! nc -z 127.0.0.1 $port; do
c=$(($c+1));
sleep 3;
if [ $c -gt 20 ]; then
echo "Timed out waiting for mysql server to be available" >&2
if [ "$pid" ]; then
kill $pid || :
sleep 2
kill -s KILL $pid || :
fi
exit 1
fi
done
# Check if the server is running
$mysqladmin status
# Drop the database if it exists
$mysqladmin --force --silent drop test || true
# Create new empty database
$mysqladmin create test
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Unit tests using the pytest framework.
@file test_serialreader.py
@author: <NAME> (juusokorhonen on github.com)
@license: MIT License
"""
import pytest
import os
import pty
import string
try:
import serial
except ModuleNotFoundError:
print("Cannot test serial functionality without 'pyserial'. Skipping tests.")
serial = None
@pytest.fixture
def fake_serial_ports():
"""Creates a fake pair of tty's
Returns
-------
tuple(master_pty, slave_serial)
PTY name for master.
TTU name for slave.
"""
master, slave = pty.openpty()
return (master, os.ttyname(slave))
@pytest.mark.skipif(serial is None, reason="Cannot test without 'serial' module.")
@pytest.mark.skipif(pty is None, reason="Cannot test without 'pty' module (available on " +
"unix-like platforms).")
def test_serialreader(fake_serial_ports):
"""Tests importing and basic functionality of serialreader.
"""
from readersender.readers import SerialReader
master_pty, slave_tty = fake_serial_ports
assert master_pty is not None
assert slave_tty is not None
available_ports = SerialReader.available_ports()
assert available_ports is not None
assert isinstance(available_ports, list)
assert len(available_ports) > 0
sr = SerialReader(port=slave_tty)
assert sr.connected
sr.connect()
assert sr.connected
sr.serial.reset_input_buffer()
assert sr.serial.in_waiting == 0
with os.fdopen(master_pty, "wb") as fd:
assert sr.encoding == 'utf-8'
probe_msg = bytes("".join(string.ascii_letters + string.digits + string.punctuation),
encoding='utf-8')
fd.write(probe_msg)
fd.flush()
assert sr.serial.in_waiting == len(probe_msg)
assert sr.read() == probe_msg
assert sr.serial.in_waiting == 0
sr.disconnect()
assert not sr.connected
probe_msg = bytes("Sáhtán borrat lása, dat ii leat bávččas.",
encoding='cp1250')
sr.encoding = 'cp1250'
assert sr.encoding == 'cp1250'
assert sr.read_command is None
sr.read_command = 'p'
assert sr.read_command == 'p'
sr.connect()
assert sr.serial.in_waiting == 0
fd.write(probe_msg)
fd.flush()
assert sr.serial.in_waiting == len(probe_msg)
assert len(sr.read(flush=True)) == 0
fd.write(probe_msg)
fd.flush()
assert sr.read() == probe_msg
|
#!/bin/bash
# Deployment script for WebApp
echo "APPLICATION_BUCKET=$APPLICATION_BUCKET"
export APPLICATION_DIR="web"
echo "APPLICATION_DIR=$APPLICATION_DIR"
# export REACT_APP_API_DOMAIN="https://xxx.execute-api.us-east-1.amazonaws.com/dev"
echo "REACT_APP_API_DOMAIN=$REACT_APP_API_DOMAIN"
echo -e "Deploying Web Application..."
echo -ne "\tApp bucket name: ${APPLICATION_BUCKET}\n"
echo -ne "\tPACKAGE_VERSION: " &&
PACKAGE_VERSION=$(cat $APPLICATION_DIR/package.json | grep version | head -1 | awk -F: '{ print $2 }' | sed 's/[",]//g' | tr -d '[[:space:]]') &&
echo -e $PACKAGE_VERSION
echo "\tBuilding application... "
cd $APPLICATION_DIR
rm -r build
npm install
npm run build
echo "Done"
echo -ne "\tUploading files... " &&
aws s3 sync build s3://$APPLICATION_BUCKET --delete >/dev/null &&
echo -e "Done"
echo -ne "\tSetting cache-control header... " &&
aws s3api copy-object --copy-source $APPLICATION_BUCKET/index.html --bucket $APPLICATION_BUCKET --key index.html --metadata-directive REPLACE --cache-control "max-age=0" --content-type "text/html" >/dev/null &&
echo -e "Done" |
DROP KEYSPACE IF EXISTS test;
CREATE KEYSPACE IF NOT EXISTS webapp WITH replication = {
'class':'SimpleStrategy',
'replication_factor':1
};
CREATE TABLE IF NOT EXISTS webapp.fruit(
store_id text,
name text,
description text,
PRIMARY KEY(
(store_id),
name
)
);
CREATE TABLE IF NOT EXISTS webapp.msg(
id int,
msg text,
PRIMARY KEY(id)
);
|
<reponame>olegkorzun/nest_server
export declare class CouriersModule {
}
|
->
E
/ \
% D
/ \
/ \
A /
B C |
<reponame>smartao/estudos_python
#!/usr/bin/python3
'''
'''
def spam():
bacon()
def bacon():
raise Exception('This is the error message.')
spam()
'''
Exemplo de saida, leia-se de cima para baixo!
Execao ocorreu na linha 12, mostrando o tipo que foi
que foi chamado na linha 8, pela funcao bacon()
que foi chamado pela linha 15 pela funcao spam()
Traceback (most recent call last):
File "03_errorExample.py", line 15, in <module>
spam()
File "03_errorExample.py", line 8, in spam
bacon()
File "03_errorExample.py", line 12, in bacon
raise Exception('This is the error message.')
Exception: This is the error message.
'''
# Fonte
# Livro Automatiando tarefas maçantes com python, capitulo 10
|
#!/usr/bin/env bash
#
# Copyright (c) 2018 The BitRub Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Check for duplicate includes.
# Guard against accidental introduction of new Boost dependencies.
# Check includes: Check for duplicate includes. Enforce bracket syntax includes.
export LC_ALL=C
IGNORE_REGEXP="/(leveldb|secp256k1|univalue)/"
# cd to root folder of git repo for git ls-files to work properly
cd "$(dirname $0)/../.." || exit 1
filter_suffix() {
git ls-files | grep -E "^src/.*\.${1}"'$' | grep -Ev "${IGNORE_REGEXP}"
}
EXIT_CODE=0
for HEADER_FILE in $(filter_suffix h); do
DUPLICATE_INCLUDES_IN_HEADER_FILE=$(grep -E "^#include " < "${HEADER_FILE}" | sort | uniq -d)
if [[ ${DUPLICATE_INCLUDES_IN_HEADER_FILE} != "" ]]; then
echo "Duplicate include(s) in ${HEADER_FILE}:"
echo "${DUPLICATE_INCLUDES_IN_HEADER_FILE}"
echo
EXIT_CODE=1
fi
done
for CPP_FILE in $(filter_suffix cpp); do
DUPLICATE_INCLUDES_IN_CPP_FILE=$(grep -E "^#include " < "${CPP_FILE}" | sort | uniq -d)
if [[ ${DUPLICATE_INCLUDES_IN_CPP_FILE} != "" ]]; then
echo "Duplicate include(s) in ${CPP_FILE}:"
echo "${DUPLICATE_INCLUDES_IN_CPP_FILE}"
echo
EXIT_CODE=1
fi
done
INCLUDED_CPP_FILES=$(git grep -E "^#include [<\"][^>\"]+\.cpp[>\"]" -- "*.cpp" "*.h")
if [[ ${INCLUDED_CPP_FILES} != "" ]]; then
echo "The following files #include .cpp files:"
echo "${INCLUDED_CPP_FILES}"
echo
EXIT_CODE=1
fi
EXPECTED_BOOST_INCLUDES=(
boost/algorithm/string.hpp
boost/algorithm/string/classification.hpp
boost/algorithm/string/replace.hpp
boost/algorithm/string/split.hpp
boost/chrono/chrono.hpp
boost/date_time/posix_time/posix_time.hpp
boost/filesystem.hpp
boost/filesystem/fstream.hpp
boost/multi_index/hashed_index.hpp
boost/multi_index/ordered_index.hpp
boost/multi_index/sequenced_index.hpp
boost/multi_index_container.hpp
boost/optional.hpp
boost/preprocessor/cat.hpp
boost/preprocessor/stringize.hpp
boost/signals2/connection.hpp
boost/signals2/last_value.hpp
boost/signals2/signal.hpp
boost/test/unit_test.hpp
boost/thread.hpp
boost/thread/condition_variable.hpp
boost/thread/mutex.hpp
boost/thread/thread.hpp
boost/variant.hpp
boost/variant/apply_visitor.hpp
boost/variant/static_visitor.hpp
)
for BOOST_INCLUDE in $(git grep '^#include <boost/' -- "*.cpp" "*.h" | cut -f2 -d: | cut -f2 -d'<' | cut -f1 -d'>' | sort -u); do
IS_EXPECTED_INCLUDE=0
for EXPECTED_BOOST_INCLUDE in "${EXPECTED_BOOST_INCLUDES[@]}"; do
if [[ "${BOOST_INCLUDE}" == "${EXPECTED_BOOST_INCLUDE}" ]]; then
IS_EXPECTED_INCLUDE=1
break
fi
done
if [[ ${IS_EXPECTED_INCLUDE} == 0 ]]; then
EXIT_CODE=1
echo "A new Boost dependency in the form of \"${BOOST_INCLUDE}\" appears to have been introduced:"
git grep "${BOOST_INCLUDE}" -- "*.cpp" "*.h"
echo
fi
done
for EXPECTED_BOOST_INCLUDE in "${EXPECTED_BOOST_INCLUDES[@]}"; do
if ! git grep -q "^#include <${EXPECTED_BOOST_INCLUDE}>" -- "*.cpp" "*.h"; then
echo "Good job! The Boost dependency \"${EXPECTED_BOOST_INCLUDE}\" is no longer used."
echo "Please remove it from EXPECTED_BOOST_INCLUDES in $0"
echo "to make sure this dependency is not accidentally reintroduced."
echo
EXIT_CODE=1
fi
done
QUOTE_SYNTAX_INCLUDES=$(git grep '^#include "' -- "*.cpp" "*.h" | grep -Ev "${IGNORE_REGEXP}")
if [[ ${QUOTE_SYNTAX_INCLUDES} != "" ]]; then
echo "Please use bracket syntax includes (\"#include <foo.h>\") instead of quote syntax includes:"
echo "${QUOTE_SYNTAX_INCLUDES}"
echo
EXIT_CODE=1
fi
exit ${EXIT_CODE}
|
<filename>src/main/java/com/qdynasty/security/validate/impl/SessionValidateCodeRepository.java
/**
*
*/
package com.qdynasty.security.validate.impl;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang3.StringUtils;
import org.springframework.social.connect.web.HttpSessionSessionStrategy;
import org.springframework.social.connect.web.SessionStrategy;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.ServletWebRequest;
import com.qdynasty.constants.AppConstants;
import com.qdynasty.security.validate.ValidateCodeRepository;
import com.qdynasty.security.validate.entity.ValidateCode;
import com.qdynasty.security.validate.enums.ValidateCodeType;
/**
* @author fei.qin
*
*/
@Component("sessionValidateCodeRepository")
public class SessionValidateCodeRepository implements ValidateCodeRepository {
/**
* spring-social 操作session得工具类
*/
private SessionStrategy sessionStrategy = new HttpSessionSessionStrategy();
/**
* 根据验证码类型,将验证码保存到session中不同的KEY中
*/
@Override
public void save(HttpServletRequest request, ValidateCode validateCode, ValidateCodeType validateCodeType) {
sessionStrategy.setAttribute(new ServletWebRequest(request), getSessionKey(validateCodeType),
validateCode.getCode());
}
/**
* 根据验证码类型,到session中获取对应的验证码
*/
@Override
public ValidateCode get(HttpServletRequest request, ValidateCodeType validateCodeType) {
return (ValidateCode) sessionStrategy.getAttribute(new ServletWebRequest(request),
getSessionKey(validateCodeType));
}
/**
* 根据验证码类型,删除session中对应的验证码
*/
@Override
public void remove(HttpServletRequest request, ValidateCodeType validateCodeType) {
sessionStrategy.removeAttribute(new ServletWebRequest(request), getSessionKey(validateCodeType));
}
/**
* 构建验证码翻入session得可以
*
* @return
*/
private String getSessionKey(ValidateCodeType validateCodeType) {
return AppConstants.SESSION_VALIDATE_KEY_PREFIX + StringUtils.upperCase(validateCodeType.toString());
}
}
|
def convertStringToInt(s):
res = 0
pow = 1
for char in s[::-1]:
res += int(char) * pow
pow *= 10
return res
print (convertStringToInt("12345"))
#Output: 12345 |
<reponame>martbrugg/loopback-offline-example<filename>client/lbclient/boot/replication.js
// Copyright IBM Corp. 2014,2015. All Rights Reserved.
// Node module: loopback-example-offline-sync
// This file is licensed under the MIT License.
// License text available at https://opensource.org/licenses/MIT
'use strict';
// TODO(bajtos) Move the bi-di replication to loopback core,
// add model settings to enable the replication.
// Example:
// LocalTodo: { options: {
// base: 'Todo',
// replicate: {
// target: 'Todo',
// mode: 'push' | 'pull' | 'bidi'
// }}}
var proquint = require('proquint');
module.exports = function(client) {
var LocalTodo = client.models.LocalTodo;
var RemoteTodo = client.models.RemoteTodo;
client.network = {
_isConnected: true,
get isConnected() {
console.log('isConnected?', this._isConnected);
return this._isConnected;
},
set isConnected(value) {
this._isConnected = value;
}
};
// setup model replication
var since = { push: -1, pull: -1 };
function sync(cb) {
LocalTodo.replicate(
since.push,
RemoteTodo,
function pushed(err, conflicts, cps) {
since.push = cps;
RemoteTodo.replicate(
since.pull,
LocalTodo,
function pulled(err, conflicts, cps) {
since.pull = cps;
if (cb) cb();
});
});
}
// sync local changes if connected
LocalTodo.on('after save', function(ctx, next) {
next();
sync();
});
LocalTodo.on('after delete', function(ctx, next) {
next();
sync();
});
client.sync = sync;
client.getReadableModelId = function(modelId) {
return proquint.encode(new Buffer(modelId.substring(0, 8), 'binary'));
};
};
|
<reponame>Yonggyu-Jeong/-GraduationAssignment<gh_stars>0
-- --------------------------------------------------------
-- 호스트: 192.168.3.11
-- 서버 버전: 10.2.11-MariaDB-10.2.11+maria~xenial-log - mariadb.org binary distribution
-- 서버 OS: debian-linux-gnu
-- HeidiSQL 버전: 11.0.0.5919
-- --------------------------------------------------------
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET NAMES utf8 */;
/*!50503 SET NAMES utf8mb4 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-- 테이블 shareonfoot.member 구조 내보내기
CREATE TABLE IF NOT EXISTS `member` (
`id` varchar(256) NOT NULL,
`password` varchar(45) NOT NULL,
`name` varchar(45) NOT NULL,
`gender` varchar(45) NOT NULL,
`birth` varchar(100) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- 내보낼 데이터가 선택되어 있지 않습니다.
/*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */;
/*!40014 SET FOREIGN_KEY_CHECKS=IF(@OLD_FOREIGN_KEY_CHECKS IS NULL, 1, @OLD_FOREIGN_KEY_CHECKS) */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
|
#!/bin/bash
#
# Copyright (c) 2017-2018 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
cidir=$(dirname "$0")
source "${cidir}/lib.sh"
collect_logs()
{
local -r log_copy_dest="$1"
local -r kata_runtime_log_filename="kata-runtime.log"
local -r kata_runtime_log_path="${log_copy_dest}/${kata_runtime_log_filename}"
local -r kata_runtime_log_prefix="kata-runtime_"
local -r proxy_log_filename="kata-proxy.log"
local -r proxy_log_path="${log_copy_dest}/${proxy_log_filename}"
local -r proxy_log_prefix="kata-proxy_"
local -r shim_log_filename="kata-shim.log"
local -r shim_log_path="${log_copy_dest}/${shim_log_filename}"
local -r shim_log_prefix="kata-shim_"
local -r ksm_throttler_log_filename="kata-ksm_throttler.log"
local -r ksm_throttler_log_path="${log_copy_dest}/${ksm_throttler_log_filename}"
local -r ksm_throttler_log_prefix="kata-ksm_throttler_"
local -r vc_throttler_log_filename="kata-vc_throttler.log"
local -r vc_throttler_log_path="${log_copy_dest}/${vc_throttler_log_filename}"
local -r vc_throttler_log_prefix="kata-vc_throttler_"
local -r crio_log_filename="crio.log"
local -r crio_log_path="${log_copy_dest}/${crio_log_filename}"
local -r crio_log_prefix="crio_"
local -r docker_log_filename="docker.log"
local -r docker_log_path="${log_copy_dest}/${docker_log_filename}"
local -r docker_log_prefix="docker_"
local -r collect_data_filename="kata-collect-data.log"
local -r collect_data_log_path="${log_copy_dest}/${collect_data_filename}"
local -r collect_data_log_prefix="kata-collect-data_"
local -r kubelet_log_filename="kubelet.log"
local -r kubelet_log_path="${log_copy_dest}/${kubelet_log_filename}"
local -r kubelet_log_prefix="kubelet_"
local -r kernel_log_filename="kernel.log"
local -r kernel_log_path="${log_copy_dest}/${kernel_log_filename}"
local -r kernel_log_prefix="kernel_"
local -r collect_script="kata-collect-data.sh"
# If available, procenv will be run twice - once as the current user
# and once as the superuser.
local -r procenv_user_log_filename="procenv-${USER}.log"
local -r procenv_user_log_path="${log_copy_dest}/${procenv_user_log_filename}"
local -r procenv_root_log_filename="procenv-root.log"
local -r procenv_root_log_path="${log_copy_dest}/${procenv_root_log_filename}"
have_collect_script="no"
collect_script_path="$(command -v $collect_script)" && have_collect_script="yes"
have_procenv="no"
[ -n "$(command -v procenv)" ] && have_procenv="yes"
# Copy log files if a destination path is provided, otherwise simply
# display them.
if [ "${log_copy_dest}" ]; then
# Create directory if it doesn't exist
[ -d "${log_copy_dest}" ] || mkdir -p "${log_copy_dest}"
# Create the log files
sudo journalctl --no-pager -t kata-runtime > "${kata_runtime_log_path}"
sudo journalctl --no-pager -t kata-proxy > "${proxy_log_path}"
sudo journalctl --no-pager -t kata-shim > "${shim_log_path}"
sudo journalctl --no-pager -u kata-ksm-throttler > "${ksm_throttler_log_path}"
sudo journalctl --no-pager -u kata-vc-throttler > "${vc_throttler_log_path}"
sudo journalctl --no-pager -u crio > "${crio_log_path}"
sudo journalctl --no-pager -u docker > "${docker_log_path}"
sudo journalctl --no-pager -u kubelet > "${kubelet_log_path}"
sudo journalctl --no-pager -t kernel > "${kernel_log_path}"
[ "${have_collect_script}" = "yes" ] && sudo -E PATH="$PATH" "${collect_script_path}" > "${collect_data_log_path}"
# Split them in 5 MiB subfiles to avoid too large files.
local -r subfile_size=5242880
pushd "${log_copy_dest}"
split -b "${subfile_size}" -d "${kata_runtime_log_path}" "${kata_runtime_log_prefix}"
split -b "${subfile_size}" -d "${proxy_log_path}" "${proxy_log_prefix}"
split -b "${subfile_size}" -d "${shim_log_path}" "${shim_log_prefix}"
split -b "${subfile_size}" -d "${ksm_throttler_log_path}" "${ksm_throttler_log_prefix}"
split -b "${subfile_size}" -d "${vc_throttler_log_path}" "${vc_throttler_log_prefix}"
split -b "${subfile_size}" -d "${crio_log_path}" "${crio_log_prefix}"
split -b "${subfile_size}" -d "${docker_log_path}" "${docker_log_prefix}"
split -b "${subfile_size}" -d "${kubelet_log_path}" "${kubelet_log_prefix}"
split -b "${subfile_size}" -d "${kernel_log_path}" "${kernel_log_prefix}"
[ "${have_collect_script}" = "yes" ] && split -b "${subfile_size}" -d "${collect_data_log_path}" "${collect_data_log_prefix}"
local prefixes=""
prefixes+=" ${kata_runtime_log_prefix}"
prefixes+=" ${proxy_log_prefix}"
prefixes+=" ${shim_log_prefix}"
prefixes+=" ${crio_log_prefix}"
prefixes+=" ${docker_log_prefix}"
prefixes+=" ${kubelet_log_prefix}"
prefixes+=" ${ksm_throttler_log_prefix}"
prefixes+=" ${vc_throttler_log_prefix}"
prefixes+=" ${kernel_log_prefix}"
[ "${have_collect_script}" = "yes" ] && prefixes+=" ${collect_data_log_prefix}"
if [ "${have_procenv}" = "yes" ]
then
procenv --file "${procenv_user_log_path}"
sudo -E procenv --file "${procenv_root_log_path}" && \
sudo chown ${USER}: "${procenv_root_log_path}"
fi
local prefix
# Compress log files
for prefix in $prefixes
do
gzip -9 "$prefix"*
done
# The procenv logs are tiny so don't require chunking
gzip -9 "${procenv_user_log_path}" "${procenv_root_log_path}"
# Remove *.log files, which contain the uncompressed data.
rm -f *".log"
popd
else
echo "Kata Containers Runtime Log:"
sudo journalctl --no-pager -t kata-runtime
echo "Kata Containers Proxy Log:"
sudo journalctl --no-pager -t kata-proxy
echo "Kata Containers Shim Log:"
sudo journalctl --no-pager -t kata-shim
echo "Kata Containers KSM Throttler Log:"
sudo journalctl --no-pager -u kata-ksm-throttler
echo "Kata Containers Virtcontainers Throttler Log:"
sudo journalctl --no-pager -u kata-vc-throttler
echo "CRI-O Log:"
sudo journalctl --no-pager -u crio
echo "Docker Log:"
sudo journalctl --no-pager -u docker
echo "Kubelet Log:"
sudo journalctl --no-pager -u kubelet
echo "Kernel Log:"
sudo journalctl --no-pager -t kernel
if [ "${have_collect_script}" = "yes" ]
then
echo "Kata Collect Data script output"
sudo -E PATH="$PATH" $collect_script
fi
if [ "${have_procenv}" = "yes" ]
then
echo "Procenv output (user $USER):"
procenv
echo "Procenv output (superuser):"
sudo -E procenv
fi
fi
}
check_log_files()
{
info "Checking log files"
make log-parser
local component
local unit
local file
local args
local cmd
for component in \
kata-proxy \
kata-runtime \
kata-shim
do
file="${component}.log"
args="--no-pager -q -o cat -a -t \"${component}\""
cmd="sudo journalctl ${args} > ${file}"
eval "$cmd" || true
done
for unit in \
kata-ksm-throttler \
kata-vc-throttler
do
file="${unit}.log"
args="--no-pager -q -o cat -a -u \"${unit}\""
cmd="sudo journalctl ${args} |grep ^time= > ${file}"
eval "$cmd" || true
done
local -r logs=$(ls "$(pwd)"/*.log || true)
local ret
cmd="kata-log-parser"
args="--debug --check-only --error-if-no-records"
{ $cmd $args $logs; ret=$?; } || true
local errors=0
local log
for log in $logs
do
local pattern
local results
# Display *all* errors caused by runtime exceptions and fatal
# signals.
for pattern in "fatal error" "fatal signal" "segfault at [0-9]"
do
# Search for pattern and print all subsequent lines with specified log
# level.
results=$(grep "${pattern}" "$log" || true )
if [ -n "$results" ]
then
errors=1
echo >&2 -e "ERROR: detected ${pattern} in '${log}'\n${results}"
fi
done
done
# Always remove logs since:
#
# - We don't want to waste disk-space.
# - collect_logs() will save the full logs anyway.
# - the log parser tool shows full details of what went wrong.
rm -f $logs
[ "$errors" -ne 0 ] && exit 1
[ $ret -eq 0 ] && true || false
}
check_collect_script()
{
local -r cmd="kata-collect-data.sh"
local -r cmdpath=$(command -v "$cmd" || true)
local msg="Kata data collection script"
[ -z "$cmdpath" ] && info "$msg not found" && return
info "Checking $msg"
sudo -E PATH="$PATH" chronic $cmd
}
main()
{
# We always want to try to collect the logs at the end of a test run,
# so don't run with "set -e".
collect_logs "$@"
# The following tests can fail and should fail the teardown phase
# (but only after we've collected the logs).
set -e
check_log_files
check_collect_script
}
main "$@"
|
<filename>code/assignment6.py
# assignment 6
# due Sunday Dec 2, 11:59pm EST
# 1. explore how (a) number of hidden units, and (b) the cost on the
# sum of squared weights, affects the network's classification of
# the input space
#
# 2. (bonus): implement an additional hidden layer and explore how
# this affects the capability of the network to divide up the input
# space into classes
# choose a different random seed each time we run the code
import time
myseed = int(time.time())
random.seed(myseed)
# we will need this for conjugate gradient descent
from scipy.optimize import fmin_cg
############################################################
# IMPORT TRAINING DATA #
############################################################
# inputs: 100 x 2 matrix (100 examples, 2 inputs each)
# outputs: 100 x 1 matrix (100 examples, 1 output each)
# we will want to change outputs to 4:
# outputs: 100 x 4 matrix (100 examples, 4 outputs each)
# "1" = [1,0,0,0]
# "2" = [0,1,0,0]
# "3" = [0,0,1,0]
# "4" = [0,0,0,1]
#
import pickle
fid = open('traindata.pickle','r')
traindata = pickle.load(fid)
fid.close()
train_in = traindata['inputs']
n_examples = shape(train_in)[0]
out1 = traindata['outputs']
# convert one output value {1,2,3,4} into four binary outputs [o1,o2,o3,o4] {0,1}
train_out = zeros((n_examples,4))
for i in range(n_examples):
out_i = out1[i,0]
train_out[i,out_i-1] = 1.0
############################################################
# UTILITY FUNCTIONS #
############################################################
# The output layer transfer function will be logsig [ 0, +1 ]
def logsig(x):
""" logsig activation function """
return 1.0 / (1.0 + exp(-x))
def dlogsig(x):
""" derivative of logsig function """
return multiply(x,(1.0 - x))
# The hidden layer transfer function will be tansig [-1, +1 ]
def tansig(x):
""" tansig activation function """
return tanh(x)
def dtansig(x):
""" derivative of tansig function """
return 1.0 - (multiply(x,x)) # element-wise multiplication
def pack_weights(w_hid, b_hid, w_out, b_out, params):
""" pack weight matrices into a single vector """
n_in, n_hid, n_out = params[0], params[1], params[2]
g_j = hstack((reshape(w_hid,(1,n_in*n_hid)),
reshape(b_hid,(1,n_hid)),
reshape(w_out,(1,n_hid*n_out)),
reshape(b_out,(1,n_out))))[0]
g_j = array(g_j[0,:])[0]
return g_j
def unpack_weights(x, params):
""" unpack weights from single vector into weight matrices """
n_in, n_hid, n_out = params[0], params[1], params[2]
pat_in, pat_out = params[3], params[4]
n_pat = shape(pat_in)[0]
i1,i2 = 0,n_in*n_hid
w_hid = reshape(x[i1:i2], (n_in,n_hid))
i1,i2 = i2,i2+n_hid
b_hid = reshape(x[i1:i2],(1,n_hid))
i1,i2 = i2,i2+(n_hid*n_out)
w_out = reshape(x[i1:i2], (n_hid,n_out))
i1,i2 = i2,i2+n_out
b_out = reshape(x[i1:i2],(1,n_out))
return w_hid, b_hid, w_out, b_out
def net_forward(x, params, ret_hids=False):
""" propagate inputs through the network and return outputs """
w_hid,b_hid,w_out,b_out = unpack_weights(x, params)
pat_in = params[3]
hid_act = tansig((pat_in * w_hid) + b_hid)
out_act = logsig((hid_act * w_out) + b_out)
if ret_hids:
return out_act,hid_act
else:
return out_act
def f(x,params):
""" returns the cost (SSE) of a given weight vector """
t = params[4]
y = net_forward(x,params)
sse = sum(square(t-y))
w_cost = params[5]*sum(square(x))
cost = sse + w_cost
print "sse=%7.5f wcost=%7.5f" % (sse,w_cost)
return cost
def fd(x,params):
""" returns the gradients (dW/dE) for the weight vector """
n_in, n_hid, n_out = params[0], params[1], params[2]
pat_in, pat_out = params[3], params[4]
w_cost = params[5]
w_hid,b_hid,w_out,b_out = unpack_weights(x, params)
act_hid = tansig( (pat_in * w_hid) + b_hid )
act_out = logsig( (act_hid * w_out) + b_out )
err_out = act_out - pat_out
deltas_out = multiply(dlogsig(act_out), err_out)
err_hid = deltas_out * transpose(w_out)
deltas_hid = multiply(dtansig(act_hid), err_hid)
grad_w_out = transpose(act_hid)*deltas_out
grad_w_out = grad_w_out + (2*w_cost*grad_w_out)
grad_b_out = sum(deltas_out,0)
grad_b_out = grad_b_out + (2*w_cost*grad_b_out)
grad_w_hid = transpose(pat_in)*deltas_hid
grad_w_hid = grad_w_hid + (2*w_cost*grad_w_hid)
grad_b_hid = sum(deltas_hid,0)
grad_b_hid = grad_b_hid + (2*w_cost*grad_b_hid)
return pack_weights(grad_w_hid, grad_b_hid, grad_w_out, grad_b_out, params)
############################################################
# TRAIN THE SUCKER #
############################################################
# network parameters
n_in = shape(train_in)[1]
n_hid = 4
n_out = shape(train_out)[1]
w_cost = 0.01
params = [n_in, n_hid, n_out, train_in, train_out, w_cost]
# initialize weights to small random (uniformly distributed)
# values between -0.10 and +0.10
nw = n_in*n_hid + n_hid + n_hid*n_out + n_out
w0 = random.rand(nw)*0.1 - 0.05
# optimize using conjugate gradient descent
out = fmin_cg(f, w0, fprime=fd, args=(params,),
full_output=True, retall=True, disp=True,
gtol=1e-3, maxiter=1000)
# unpack optimizer outputs
wopt,fopt,func_calls,grad_calls,warnflag,allvecs = out
# net performance
netout = net_forward(wopt,params)
pc = array(netout.argmax(1).T) == params[4].argmax(1) # I hate munging numpy matrices/arrays
pc, = where(pc[0,:]) # hate hate hate
pc = float(len(pc)) / float(shape(params[4])[0]) # more hate
print "percent correct = %6.3f" % (pc)
############################################################
# PRETTY PLOTS #
############################################################
# test our network on the entire range of inputs
# and visualize the results
#
n_grid = 100
min_grid,max_grid = -10.0, 20.0
g_grid = linspace(min_grid, max_grid, n_grid)
g1,g2 = meshgrid(g_grid, g_grid)
grid_inputs = matrix(hstack((reshape(g1,(n_grid*n_grid,1)),
reshape(g2,(n_grid*n_grid,1)))))
params_grid = list(params)
params_grid[3] = grid_inputs
act_grid,hid_grid = net_forward(wopt,params_grid,ret_hids=True)
# choose which neuron has greatest activity
cat_grid = reshape(act_grid.argmax(1),(n_grid,n_grid))
figure()
# plot the network performance
imshow(cat_grid,extent=[min_grid,max_grid,min_grid,max_grid])
# now overlay the training data
i1 = where(traindata['outputs']==1)[0]
i2 = where(traindata['outputs']==2)[0]
i3 = where(traindata['outputs']==3)[0]
i4 = where(traindata['outputs']==4)[0]
plot(traindata['inputs'][i1,0],traindata['inputs'][i1,1],'ys',markeredgecolor='k')
plot(traindata['inputs'][i2,0],traindata['inputs'][i2,1],'rs',markeredgecolor='k')
plot(traindata['inputs'][i3,0],traindata['inputs'][i3,1],'bs',markeredgecolor='k')
plot(traindata['inputs'][i4,0],traindata['inputs'][i4,1],'cs',markeredgecolor='k')
axis([min_grid,max_grid,min_grid,max_grid])
xlabel('INPUT 1')
ylabel('INPUT 2')
# hidden neuron activations for entire range of inputs
#
figure()
ncols = ceil(sqrt(n_hid))
nrows = ceil(float(n_hid)/float(ncols))
w_hid, b_hid, w_out, b_out = unpack_weights(wopt, params)
for i in range(n_hid):
cgi = reshape(hid_grid[:,i], (n_grid,n_grid))
subplot(nrows,ncols,i+1)
imshow(cgi, extent=[min_grid,max_grid,min_grid,max_grid])
axis([min_grid,max_grid,min_grid,max_grid])
axis('off')
title('HID_%d' % i)
# output neuron activations for entire range of inputs
#
figure(figsize=(16,4))
for i in range(4):
cgi = reshape(act_grid[:,i], (n_grid,n_grid))
subplot(1,4,i+1)
imshow(cgi, extent=[min_grid,max_grid,min_grid,max_grid])
plot(traindata['inputs'][i1,0],traindata['inputs'][i1,1],'ys',markeredgecolor='k')
plot(traindata['inputs'][i2,0],traindata['inputs'][i2,1],'rs',markeredgecolor='k')
plot(traindata['inputs'][i3,0],traindata['inputs'][i3,1],'bs',markeredgecolor='k')
plot(traindata['inputs'][i4,0],traindata['inputs'][i4,1],'cs',markeredgecolor='k')
axis([min_grid,max_grid,min_grid,max_grid])
xlabel('INPUT 1')
ylabel('INPUT 2')
title('OUT_%d' % i)
|
<reponame>kdubiel/bh-events
import React from 'react';
import { useTranslation } from 'react-i18next';
import { Typography, Box } from '@material-ui/core';
import VisibilityOffRoundedIcon from '@material-ui/icons/VisibilityOffRounded';
import { ErrorMessage } from 'components';
interface Props {
showIcon?: boolean;
error?: string | null;
}
export const NoData = ({ showIcon, error }: Props) => {
const { t } = useTranslation();
return (
<Box display="flex" flexDirection="column" alignItems="center">
<Box display="flex" flexDirection="row" alignItems="center">
{showIcon && (
<>
<VisibilityOffRoundedIcon fontSize="large" />
</>
)}
<Typography>{t('common:nodata')}</Typography>
</Box>
{error && <ErrorMessage>{error}</ErrorMessage>}
</Box>
);
};
|
# === {{CMD}} "{.. JSON .. }" /path
# === Used for DEV env.
POST () {
curl -k -D - -X POST --header "Content-Type:application/json" -d "$1" http://localhost:4567$2
echo ""
} # === end function
|
#!/bin/bash -eux
## https://access.redhat.com/site/solutions/58625 (subscription required)
# http://www.linuxquestions.org/questions/showthread.php?p=4399340#post4399340
# add 'single-request-reopen' so it is included when /etc/resolv.conf is generated
echo 'RES_OPTIONS="single-request-reopen"' >> /etc/sysconfig/network
service network restart
echo '==> Slow DNS fix applied (single-request-reopen)'
|
<gh_stars>0
import firebase from 'firebase'
const fb = firebase.initializeApp({
apiKey: '<KEY>',
authDomain: 'nodecasts-a9904.firebaseapp.com',
databaseURL: 'https://nodecasts-a9904.firebaseio.com',
storageBucket: 'nodecasts-a9904.appspot.com',
messagingSenderId: '679261507638'
})
export default fb
|
/**
* Copyright © 2014-2021 The SiteWhere Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sitewhere.rest.model.batch.request;
import java.io.Serializable;
import java.util.Date;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.sitewhere.spi.batch.ElementProcessingStatus;
import com.sitewhere.spi.batch.request.IBatchElementCreateRequest;
/**
* Holds information needed to create/update a batch operation element.
*/
@JsonInclude(Include.NON_NULL)
public class BatchElementCreateRequest implements IBatchElementCreateRequest, Serializable {
/** Serialization version identifier */
private static final long serialVersionUID = -3369336266183401785L;
/** Token for affected device */
private String deviceToken;
/** Processing status for update */
private ElementProcessingStatus processingStatus;
/** Date element was processed */
private Date processedDate;
/** Metadata values */
private Map<String, String> metadata;
/*
* @see
* com.sitewhere.spi.batch.request.IBatchElementCreateRequest#getDeviceToken()
*/
@Override
public String getDeviceToken() {
return deviceToken;
}
public void setDeviceToken(String deviceToken) {
this.deviceToken = deviceToken;
}
/*
* @see com.sitewhere.spi.batch.request.IBatchElementCreateRequest#
* getProcessingStatus()
*/
@Override
public ElementProcessingStatus getProcessingStatus() {
return processingStatus;
}
public void setProcessingStatus(ElementProcessingStatus processingStatus) {
this.processingStatus = processingStatus;
}
/*
* @see
* com.sitewhere.spi.batch.request.IBatchElementCreateRequest#getProcessedDate()
*/
@Override
public Date getProcessedDate() {
return processedDate;
}
public void setProcessedDate(Date processedDate) {
this.processedDate = processedDate;
}
/*
* @see com.sitewhere.spi.batch.request.IBatchElementCreateRequest#getMetadata()
*/
@Override
public Map<String, String> getMetadata() {
return metadata;
}
public void setMetadata(Map<String, String> metadata) {
this.metadata = metadata;
}
} |
import xml.etree.ElementTree as ET
def process_xml_ticket(xml_data):
# Parse the XML data
root = ET.fromstring(xml_data)
# Extract information from the XML
ticket_id = root.find('id').text
customer_name = root.find('customer').text
issue_description = root.find('issue').text
priority = root.find('priority').text
# Create a dictionary with the extracted information
extracted_info = {
'ticket_id': ticket_id,
'customer_name': customer_name,
'issue_description': issue_description,
'priority': priority
}
return extracted_info |
from . import TestReader, time_function_call
from reader.importer.Diogenes import DiogenesLemmataImporter, DiogenesAnalysesImporter
from reader.models import WordDescription, Lemma, WordForm
from reader.language_tools.greek import Greek
class TestDiogenesAnalysesImport(TestReader):
@time_function_call
def test_import_file(self):
# Get the lemmas so that we can match up the analyses
DiogenesLemmataImporter.import_file(self.get_test_resource_file_name("greek-lemmata.txt"), return_created_objects=True)
# Import the analyses
DiogenesAnalysesImporter.import_file(self.get_test_resource_file_name("greek-analyses2.txt"), return_created_objects=True)
# See if the analyses match up with the lemmas
# Find the word description and make sure the lemma matches
descriptions = WordDescription.objects.filter(meaning="favourite slave")
self.assertEqual(descriptions[0].lemma.reference_number, 537850)
self.assertEqual(descriptions[0].meaning, "favourite slave")
def test_import_file_no_match(self):
# Get the lemmas so that we can match up the analyses
DiogenesLemmataImporter.import_file(self.get_test_resource_file_name("greek-lemmata-no-match.txt"), return_created_objects=True)
# Import the analyses
analyses = DiogenesAnalysesImporter.import_file(self.get_test_resource_file_name("greek-analyses-no-match.txt"), return_created_objects=True, raise_exception_on_match_failure=True)
self.assertEqual(len(analyses), 4)
@time_function_call
def test_lookup_by_form(self):
# Get the lemmas so that we can match up the
DiogenesLemmataImporter.import_file(self.get_test_resource_file_name("greek-lemmata.txt"), return_created_objects=True)
# Import the analyses
DiogenesAnalysesImporter.import_file(self.get_test_resource_file_name("greek-analyses2.txt"), return_created_objects=True)
# See if the analyses match up with the lemmas
# Find the word description and make sure the lemma matches
descriptions = WordDescription.objects.filter(word_form__form=Greek.beta_code_str_to_unicode("a(/bra"))
self.assertEqual(descriptions[0].lemma.reference_number, 537850)
self.assertEqual(descriptions[0].meaning, "favourite slave")
def make_lemma(self):
lemma = Lemma(lexical_form=Greek.beta_code_str_to_unicode("a(/bra"), language="Greek", reference_number=537850)
lemma.save()
return lemma
def make_form(self):
lemma = self.make_lemma()
word_form = WordForm()
word_form.lemma = lemma
word_form.form = Greek.beta_code_str_to_unicode("a(/bra")
word_form.save()
return word_form
def test_handle_multiple_genders(self):
# Get the lemmas so that we can match up the analyses
DiogenesLemmataImporter.import_file(self.get_test_resource_file_name("greek-lemmata.txt"), return_created_objects=True)
word_form = self.make_form()
description = DiogenesAnalysesImporter.import_analysis_entry("537850 9 a_)ghko/ti,a)ga/w\t \tperf part act masc/neut dat sg (attic doric ionic aeolic)}" , word_form)
self.assertTrue(description.masculine)
self.assertTrue(description.neuter)
self.assertFalse(description.feminine)
def test_handle_no_match(self):
# Get the lemmas so that we can match up the analyses
DiogenesLemmataImporter.import_file(self.get_test_resource_file_name("greek-lemmata-no-match.txt"), return_created_objects=True)
word_form = self.make_form()
#full_entry = "e)pamfie/sasqai\t{31475848 9 e)pamfi+e/sasqai,e)pi/,a)mfi/-e(/zomai\tseat oneself\taor inf mid (attic epic doric ionic aeolic parad_form prose)}[40532015][6238652]{6264700 9 e)pi/-a)mfia/zw\tciothe\taor inf mid}[40532015]{6365952 9 e)pi/-a)mfie/nnumi\tput round\taor inf mid (attic)}[40532015]"
word_description = DiogenesAnalysesImporter.import_analysis_entry("e)pamfie/sasqai\t{31475848 9 e)pamfi+e/sasqai,e)pi/,a)mfi/-e(/zomai\tseat oneself\taor inf mid (attic epic doric ionic aeolic parad_form prose)}[40532015][6238652]", word_form)
self.assertNotEqual(word_description, None)
def test_parse_no_match(self):
word_form = WordForm()
word_form.form = "test_parse_no_match"
word_form.save()
desc = "Wont match regex"
# Make sure this does not trigger an exception
self.assertEquals(DiogenesAnalysesImporter.import_analysis_entry(desc, word_form), None)
|
<reponame>dcoloma/gaia<filename>apps/camera/build/configure.js
var fs = require('fs');
var config = require('./customizeConfig.js');
var maxImagePixelSize = 5 * 1024 * 1024;
var gaiaDistributionDirectory = process.env.GAIA_DISTRIBUTION_DIR;
var configurationObject = {};
var configurationFile;
var generateConfigurationFile = function() {
var content = config.customizeMaximumImageSize(configurationObject);
fs.writeFile('js/config.js', content, function(err) {
if (err) {
console.log(err);
} else {
console.log('Configuration file has been generated: js/config.js');
}
});
};
if (gaiaDistributionDirectory) {
fs.readFile(gaiaDistributionDirectory + '/camera.json',
'utf8', function(err, data) {
if (err) {
if (err.code === 'ENOENT') {
console.log('The configuration file :' + gaiaDistributionDirectory +
'/camera.json doesn\'t exist');
} else {
return console.log(err);
}
} else {
configurationObject = JSON.parse(data);
}
generateConfigurationFile();
});
} else {
generateConfigurationFile();
}
|
import cv2
import numpy as np
def save_image_as_bgr(array: np.ndarray, path: str) -> None:
bgr_image = cv2.cvtColor(array, cv2.COLOR_RGB2BGR)
cv2.imwrite(path, bgr_image) |
docker kill $(docker ps -qa)
docker rm $(docker ps -qa)
docker network rm $(docker network ls -q)
docker rmi $(docker images | grep "\dev-peer[0-9]*\-" | tr -s ' ' | cut -d ' ' -f 1 | less)
#rm -rf /var/hyperledger/
|
const { expect } = require('chai')
const { validate } = require('./index')
describe('validate', () => {
describe('.string', () => {
it('should not throw on string target', () => {
const name = 'name'
const target = 'target'
expect(() => validate.string(target, name)).not.to.throw()
})
it('should throw on non-string target', () => {
const name = 'name'
let target = 1
expect(() => validate.string(target, name)).to.throw(TypeError, 'name 1 is not a string')
target = true
expect(() => validate.string(target, name)).to.throw(TypeError, 'name true is not a string')
target = [1, 2, 3]
expect(() => validate.string(target, name)).to.throw(TypeError, 'name 1,2,3 is not a string')
})
it('should throw on empty string by default', () => {
const target = ''
const name = 'name'
expect(() => validate.string(target, name)).to.throw(Error, 'name is empty')
})
it('should not throw on empty string with empty flag to false', () => {
const target = ''
const name = 'name'
expect(() => validate.string(target, name, false)).not.to.throw()
})
})
describe('.email', () => {
it('should not fail on valid email target', () => {
const target = '<EMAIL>'
expect(() => validate.email(target)).not.to.throw()
})
it('should fail on non-vaild email target', () => {
const target = 'notvalidemail'
expect(() => validate.email(target)).to.throw(Error, 'notvalidemail is not an email')
})
it('should fail on non-string target', () => {
let target = 1
expect(() => validate.email(target)).to.throw(Error, '1 is not an email')
target = true
expect(() => validate.email(target)).to.throw(Error, 'true is not an email')
target = {}
expect(() => validate.email(target)).to.throw(Error, `${target} is not an email`)
})
it('should fail on empty target', () => {
target = ''
expect(() => validate.email(target)).to.throw(Error, `${target} is not an email`)
})
})
}) |
<gh_stars>0
var _parameter_window_8h =
[
[ "ParameterWindow", "class_parameter_window.html", "class_parameter_window" ],
[ "Scalar", "_parameter_window_8h.html#ae52308b3630bba23d07d4fd32347bbce", null ]
]; |
import * as React from 'react';
import { css } from '@patternfly/react-styles';
import styles from '@patternfly/react-styles/css/components/Topology/topology-components';
import SvgDropShadowFilter from '../svg/SvgDropShadowFilter';
import { createSvgIdUrl, useHover } from '../../utils';
import { DEFAULT_DECORATOR_PADDING } from '../nodes';
interface DecoratorTypes {
className?: string;
x: number;
y: number;
radius: number;
padding?: number;
showBackground?: boolean;
icon?: React.ReactNode;
onClick?(event: React.MouseEvent<SVGGElement, MouseEvent>): void;
ariaLabel?: string;
circleRef?: React.Ref<SVGCircleElement>;
}
const HOVER_FILTER_ID = 'DecoratorDropShadowHoverFilterId';
const Decorator: React.FunctionComponent<DecoratorTypes> = ({
className,
x,
y,
showBackground,
radius,
padding = DEFAULT_DECORATOR_PADDING,
children,
icon,
onClick,
ariaLabel,
circleRef
}) => {
const [hover, hoverRef] = useHover();
const iconRadius = radius - padding;
return (
<g
ref={hoverRef}
className={css(styles.topologyNodeDecorator, className)}
{...(onClick
? {
onClick: e => {
e.stopPropagation();
onClick(e);
},
role: 'button',
'aria-label': ariaLabel
}
: null)}
>
<SvgDropShadowFilter id={HOVER_FILTER_ID} dy={3} stdDeviation={5} floodOpacity={0.5} />
{showBackground && (
<circle
key={hover ? 'circle-hover' : 'circle'} // update key on hover to force update of shadow filter
ref={circleRef}
className={css(styles.topologyNodeDecoratorBg)}
cx={x}
cy={y}
r={radius}
filter={hover ? createSvgIdUrl(HOVER_FILTER_ID) : undefined}
/>
)}
<g transform={`translate(${x}, ${y})`}>
{icon ? (
<g
className={css(styles.topologyNodeDecoratorIcon)}
style={{ fontSize: `${iconRadius * 2}px` }}
transform={`translate(-${iconRadius}, -${iconRadius})`}
>
{icon}
</g>
) : null}
{children}
</g>
</g>
);
};
export default Decorator;
|
#!/bin/bash
until curl -s http://elasticsearch:9200/_cat/health -o /dev/null; do
echo 'Waiting for Elasticsearch...'
sleep 10
done
until curl -s http://kibana:5601/login -o /dev/null; do
echo 'Waiting for Kibana...'
sleep 10
done
echo 'Extra settings for single node cluster...'
curl -XPUT -H 'Content-Type: application/json' 'http://elasticsearch:9200/_settings' -d '
{
"index" : {
"number_of_replicas" : 0
}
}'
echo 'Setup mappings...'
curl -X PUT "http://elasticsearch:9200/logstash-2020.05.18?pretty" -H 'Content-Type: application/json' -d'
{
"mappings": {
"log": {
"properties": {
"geo": {
"properties": {
"coordinates": {
"type": "geo_point"
}
}
}
}
}
}
}
'
curl -X PUT "http://elasticsearch:9200/logstash-2020.05.19?pretty" -H 'Content-Type: application/json' -d'
{
"mappings": {
"log": {
"properties": {
"geo": {
"properties": {
"coordinates": {
"type": "geo_point"
}
}
}
}
}
}
}
'
curl -X PUT "http://elasticsearch:9200/logstash-2020.05.20?pretty" -H 'Content-Type: application/json' -d'
{
"mappings": {
"log": {
"properties": {
"geo": {
"properties": {
"coordinates": {
"type": "geo_point"
}
}
}
}
}
}
}
'
echo 'Setup mappings done...'
echo 'Load data...'
curl -s -H 'Content-Type: application/x-ndjson' -XPOST 'http://elasticsearch:9200/_bulk?pretty' --data-binary @/tmp/data/logs.jsonl
echo 'Data loaded...'
echo 'Create index pattern...'
curl -X POST "http://kibana:5601/api/saved_objects/index-pattern/logstash" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d'
{
"attributes": {
"title": "logstash-*",
"timeFieldName": "@timestamp"
}
}'
echo 'Index pattern created...'
|
#!/usr/bin/env bash
# Author: 潘维吉
# kubernetes云原生容器编排管理安装 生产级别的容器编排系统 自动化的容器部署、扩展和管理
# chmod +x k8s-install.sh 给shell脚本执行文件可执行权限
#./docker-install.sh
echo "国内网络原因 设置阿里镜像源"
cat <<EOF >/etc/yum.repos.d/kubernetes.repo
[kubernetes]
name=Kubernetes
baseurl=http://mirrors.aliyun.com/kubernetes/yum/repos/kubernetes-el7-x86_64
enabled=1
gpgcheck=1
repo_gpgcheck=1
gpgkey=http://mirrors.aliyun.com/kubernetes/yum/doc/yum-key.gpg
http://mirrors.aliyun.com/kubernetes/yum/doc/rpm-package-key.gpg
EOF
#echo "配置Master Node节点 修改各自的hostname和hosts文件"
#hostnamectl set-hostname k8s-master01 # 设置主机名 部署master节点开启
#hostnamectl set-hostname k8s-node01 # 设置主机名 部署node节点开启
#echo '127.0.0.1 k8s-master01
#127.0.0.1 k8s-node01' >>/etc/hosts
echo "安装Kubernetes"
sudo dnf install -y kubelet kubeadm kubectl --disableexcludes=kubernetes
# apt-get install -y kubelet kubeadm kubectl
echo "启动Kubernetes服务并加入开机自启动"
systemctl enable --now kubelet
echo "关闭防火墙"
systemctl disable --now firewalld
echo "关闭Swap分区 "
swapoff -a && sysctl -w vm.swappiness=0 # 关闭swap
sed -ri '/^[^#]*swap/s@^@#@' /etc/fstab # 取消开机挂载swap
free # 使用 kubeadm 部署集群必须关闭Swap分区 Swap已经为0是关闭
echo "禁用SELinux"
sed -ri 's/SELINUX=enforcing/SELINUX=disabled/' /etc/selinux/config # 修改配置
sestatus # 检查是否开启
echo "桥接流量"
cat <<EOF >/etc/sysctl.d/k8s.conf
net.bridge.bridge-nf-call-ip6tables = 1
net.bridge.bridge-nf-call-iptables = 1
EOF
sysctl --system
echo "所有节点同步时间"
ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
echo 'Asia/Shanghai' /etc/timezone
echo "创建一个守护程序文件 docker与k8s一致 使用systemd"
cat <<EOF >/etc/docker/daemon.json
{
"exec-opts": ["native.cgroupdriver=systemd"],
"log-driver": "json-file",
"log-opts": {
"max-size": "100m"
},
"storage-driver": "overlay2",
"storage-opts": [
"overlay2.override_kernel_check=true"
]
}
EOF
systemctl restart docker # 重启使配置生效
echo "Kubernetes版本"
sudo kubectl version
#echo "安装Dashboard UI管理集群资源 扩展部署,启动滚动更新,重新启动Pod或部署新应用程序等"
#wget https://raw.githubusercontent.com/kubernetes/dashboard/v2.0.0-beta4/aio/deploy/recommended.yaml
#kubectl apply -f recommended.yaml
#kubectl get pods --all-namespaces | grep dashboard
#echo "初始化集群"
#sudo kubeadm config images pull
#sudo kubeadm init
echo "重启配置生效"
sleep 3s
#reboot
|
<gh_stars>100-1000
(function UMD(context,definition){
/* istanbul ignore next */if (typeof define === "function" && define.amd) { define(definition); }
/* istanbul ignore next */else if (typeof module !== "undefined" && module.exports) { module.exports = definition(); }
/* istanbul ignore next */else { Object.assign(context,definition()); }
})(this,function DEF(){
"use strict";
var publicAPI = {
any, undef, nul, string, bool, number, finite, int, bint,
symb, array, object, func, regex,
};
return publicAPI;
// ***********************************
function any(strs,...v) {
if (strs.length == 1) {
if (strs[0] === "") return undefined;
return strs[0];
}
else if (
strs.length > 2 ||
strs[0].length > 0 ||
strs[1].length > 0
) {
return String.raw({ raw: strs, },...v);
}
else {
return v[0];
}
}
function undef(...v) {
v = parseSingleInput(v,checkUndefined,"undefined");
if (
typeof v == "string" &&
(
v === "" ||
v === "undefined"
)
) {
v = undefined;
}
if (!checkUndefined(v)) {
failedTypeAssertion(v,"undefined");
}
return v;
}
function nul(...v) {
v = parseSingleInput(v,checkNull,"null");
if (
typeof v == "string" &&
(
v === "" ||
v === "null"
)
) {
v = null;
}
if (!checkNull(v)) {
failedTypeAssertion(v,"null");
}
return v;
}
function string(strs,...v) {
if (strs.length == 1) {
return strs[0];
}
else {
// validate the types of all values
for (let val of v) {
if (typeof val != "string") {
failedTypeAssertion(val,"string");
}
}
return any(strs,...v);
}
}
function bool(...v) {
v = parseSingleInput(v,checkBoolean,"boolean");
if (typeof v == "string") {
if (v === "") throw new Error("No default for type: boolean");
else if (v === "true") v = true;
else if (v === "false") v = false;
}
if (!checkBoolean(v)) {
failedTypeAssertion(v,"boolean");
}
return v;
}
function number(...v) {
v = parseSingleInput(v,checkNumber,"number");
if (typeof v == "string") {
if (v === "") throw new Error("No default for type: number");
else if (v == "NaN") v = NaN;
else {
let t = Number(v);
if (typeof t == "number" && !Number.isNaN(t)) {
v = t;
}
}
}
if (!checkNumber(v)) {
failedTypeAssertion(v,"number");
}
return v;
}
function finite(...v) {
v = parseSingleInput(v,checkNumber,"finite number");
if (typeof v == "string") {
if (v === "") throw new Error("No default for type: 'finite number'");
let t = Number(v);
if (!Number.isNaN(t)) {
v = t;
}
}
if (!Number.isFinite(v)) {
failedTypeAssertion(v,"finite number");
}
return v;
}
function int(...v) {
v = parseSingleInput(v,checkNumber,"integer");
if (typeof v == "string") {
if (v === "") throw new Error("No default for type: integer");
let t = Number(v);
if (!Number.isNaN(t)) {
v = t;
}
}
if (!Number.isSafeInteger(v) || Object.is(v,-0)) {
failedTypeAssertion(v,"integer");
}
return v;
}
function bint(...v) {
v = parseSingleInput(v,checkBigInt,"bigint");
if (typeof v == "string") {
if (v === "") throw new Error("No default for type: bigint");
v = safeEval(v) || v;
}
if (typeof v != "bigint") {
failedTypeAssertion(v,"bigint");
}
return v;
}
function symb(...v) {
v = parseSingleInput(v,checkSymbol,"symbol");
if (typeof v == "string") {
v = (v != "") ? v : "Symbol()";
v = safeEval(v) || v;
}
if (typeof v != "symbol") {
failedTypeAssertion(v,"symbol");
}
return v;
}
function array(...v) {
var parseShapeOnly = false;
var shape = null;
// manual control override?
if (
v.length == 1 &&
typeof v[0] == "object" &&
"parseShapeOnly" in v[0]
) {
parseShapeOnly = true;
v[0] = v[0].v;
}
var parsedV = parseSingleInput(v,checkArray,"array");
var shapeLiteral = (
typeof parsedV == "string" &&
!(
parsedV == "" ||
/^\[.*\]$/s.test(parsedV)
)
);
// array shape to parse?
if (shapeLiteral) {
shape = parseArrayShape(parsedV);
parsedV = null;
// manual control override?
if (parseShapeOnly) {
return shape;
}
}
// missing shape while trying to parse shape?
else if (parseShapeOnly) {
throw new Error("Array shape missing");
}
var arrValReady = (parsedV != null);
var getArrayLiteralFn = getShapeAndArrayLiteral(shape,!arrValReady);
if (arrValReady) {
// NOTE: `parsedV` doesn't need to be passed in
return getArrayLiteralFn();
}
else {
return getArrayLiteralFn;
}
// **********************************
function getShapeAndArrayLiteral(shape,needToParseV) {
return function array(...v) {
v = needToParseV ? parseSingleInput(v,checkArray,"array") : parsedV;
if (typeof v == "string") {
v = (v != "") ? v : "[]";
v = safeEval(v) || v;
}
if (!Array.isArray(v)) {
failedTypeAssertion(v,"array");
}
else {
// array shape to verify against?
if (shape) {
validateArrayOfShape(shape,v);
}
return v;
}
};
}
}
function object(...v) {
v = parseSingleInput(v,checkObject,"object");
if (typeof v == "string") {
v = (v != "") ? v : "{}";
v = safeEval(v) || v;
}
if (!(v && typeof v == "object")) {
failedTypeAssertion(v,"object");
}
return v;
}
function func(...v) {
v = parseSingleInput(v,checkFunction,"function");
if (typeof v == "string") {
v = (v != "") ? v : "()=>undefined";
v = safeEval(v) || v;
}
if (typeof v != "function") {
failedTypeAssertion(v,"function");
}
return v;
}
function regex(strs,...v) {
// single value (no literals)?
if (
strs.length == 2 &&
strs[0].length == 0 &&
strs[1].length == 0
) {
return validateRegex(v[0]);
}
else {
let t = any(strs,...v) || "";
t = (t != "") ? t.trim() : "/(?:)/";
t = safeEval(t) || t;
return validateRegex(t);
}
}
// ***********************************
function prepareStr(s,includeQuotes = false) {
s = s.trim().replace(/[\n]/g,"\\n").replace(/[\r]/g,"\\r");
if (includeQuotes) {
s = `'${s}'`;
}
return s;
}
function isNonTrivialStr(s) {
return /[^\s]/.test(s);
}
function parseSingleInput([strs,...v],validatePrimitiveType,typeDesc) {
// are the inputs invalid?
if (
strs.length > 2 ||
(
strs.length == 2 &&
(
isNonTrivialStr(strs[0]) ||
isNonTrivialStr(strs[1])
)
)
) {
// stringify all (invalid) inputs for exception message
v = v.map(safeString);
v = prepareStr(safeString(any(strs,...v)));
throw new Error(`Invalid input: ${v}`);
}
// single literal?
if (strs.length == 1) {
return strs[0].trim();
}
// else single value
else {
// need to validate the primitive type?
if (
validatePrimitiveType &&
!validatePrimitiveType(v[0])
) {
let t = prepareStr(safeString(v[0]),/*includeQuotes=*/(typeof v[0] == "string"));
failedTypeAssertion(v[0],typeDesc);
}
return v[0];
}
}
function parseArrayShape(str) {
str = str.trim();
var tokenizeRE = /[()<>,]|(?:\[\s*\+?\s*\])/g;
var tokenRE = /[()<>,]|(?:\[\+?\])/;
var nextMatchIdx = 0;
var hasTuple = false;
var stateStack = [ { curr: [], }, ];
var prevToken = null;
while (nextMatchIdx < str.length) {
// 1. TOKENIZE (next tokens)
let nextTokens = [];
let match = tokenizeRE.exec(str);
if (match) {
if (match.index > nextMatchIdx) {
let prevToken = str.substring(nextMatchIdx,match.index).trim();
if (prevToken != "") {
nextTokens.push(prevToken);
}
}
// normalize the array postfix?
let token = (/^\[\s*\]$/.test(match[0])) ? "[]" : match[0];
nextTokens.push(token);
nextMatchIdx = tokenizeRE.lastIndex;
}
else {
nextTokens.push(str.substring(nextMatchIdx,str.length));
nextMatchIdx = str.length;
}
// 2. PARSE (next tokens)
for (let token of nextTokens) {
let state = stateStack[stateStack.length - 1];
let typeAllowedPosition = (
prevToken == null ||
/^[(<,]$/.test(prevToken)
);
// array postfix?
if (
/^\[\+?\]$/.test(token) &&
state.curr.length > 0 &&
!typeAllowedPosition
) {
let last = state.curr[state.curr.length - 1];
state.curr[state.curr.length - 1] = {
type: "array",
contains: last,
description: `${last.description || last}${token}`,
};
if (token == "[+]") {
state.curr[state.curr.length - 1].nonEmpty = true;
}
}
// opening a grouping (for readability only)?
else if (
token == "(" &&
typeAllowedPosition
) {
stateStack.push({
type: token,
curr: [],
});
}
// opening a tuple?
else if (
token == "<" &&
(
state.type == "<" ||
state.curr.length == 0
) &&
typeAllowedPosition
) {
hasTuple = true;
stateStack.push({
type: token,
curr: [],
});
}
// closing a grouping?
else if (
token == ")" &&
state &&
state.type == "(" &&
state.curr.length > 0
) {
stateStack.pop();
stateStack[stateStack.length - 1].curr.push(...state.curr);
}
// closing a tuple?
else if (
token == ">" &&
state &&
state.type == "<" &&
state.curr.length > 0 &&
prevToken != ","
) {
let contents = state.curr.slice();
let tuple = {
type: "array",
contains: contents,
description: `<${
contents.map(function getDesc(elem){
return elem.description || elem;
})
.join(",")
}>`,
};
stateStack.pop();
stateStack[stateStack.length - 1].curr.push(tuple);
}
// comma in sequence?
else if (
token == "," &&
state &&
state.type == "<" &&
state.curr.length > 0 &&
!typeAllowedPosition
) {
// comma allowed here
token;
}
// non-delimiter token (aka, a type-ID)?
else if (
!tokenRE.test(token) &&
typeAllowedPosition
) {
state.curr.push(token);
}
// otherwise, invalid token stream
else {
throw new Error(`Array shape invalid ('${str}'): '${token}' not allowed`);
}
prevToken = token;
}
}
if (stateStack.length != 1) {
throw new Error(`Array shape invalid ('${str}'): '${stateStack[stateStack.length - 1].type}' unterminated`);
}
var shape = stateStack[0].curr[0];
// valid final shape?
if (
shape &&
typeof shape == "object"
) {
return shape;
}
else {
throw new Error(`Array shape invalid: '${str}' not an array`);
}
}
function validateArrayOfShape(shape,arr) {
// array must contain elements of only this type?
if (typeof shape.contains == "string") {
if (
arr.length == 0 &&
shape.nonEmpty
) {
throw new Error(`Empty array missing required element(s) of type: '${shape.contains}'`);
}
for (let elem of arr) {
if (!validateElemAsType(shape.contains,elem)) {
throw new Error(`Array element ${prepareStr(safeString(elem),/*includeQuotes=*/(typeof elem == "string"))} not of type: '${shape.contains}'`);
}
}
}
// array is a tuple and must contain all and only these elements (of types)
else if (Array.isArray(shape.contains)) {
for (let [idx,elem,] of arr.entries()) {
if (idx >= shape.contains.length) {
throw new Error(`Array-Tuple element ${prepareStr(safeString(elem),/*includeQuotes=*/(typeof elem == "string"))} beyond the tuple definition`);
}
else if (typeof shape.contains[idx] == "string") {
if (!validateElemAsType(shape.contains[idx],elem)) {
throw new Error(`Array element ${prepareStr(safeString(elem),/*includeQuotes=*/(typeof elem == "string"))} not of type: '${shape.contains[idx]}'`);
}
}
else {
validateArrayOfShape(shape.contains[idx],[].concat(elem));
}
}
if (arr.length < shape.contains.length) {
throw new Error(`Array-Tuple missing expected element of type '${shape.contains[arr.length]}'`);
}
}
// otherwise, must include nested arrays-of-type
else {
for (let elem of arr) {
if (Array.isArray(elem)) {
validateArrayOfShape(shape.contains,elem);
}
else {
throw new Error(`${prepareStr(safeString(elem),/*includeQuotes=*/(typeof elem == "string"))} is not an array`);
}
}
}
return true;
}
function validateElemAsType(type,elem) {
try {
if (type in publicAPI) {
// run the type assertion
publicAPI[type](["","",],elem);
return true;
}
else {
let fn = Function(`return ${type};`)();
if (typeof fn == "function") {
// run the type assertion
fn(["","",],elem);
return true;
}
}
}
catch (err) {}
return false;
}
function validateRegex(val) {
if (val && typeof val == "object" && val instanceof RegExp) {
return val;
}
else {
failedTypeAssertion(val,"regular expression");
}
}
function safeEval(s) {
try {
return Function(`return (${s.trim()});`)();
}
catch (e) {}
}
function safeString(v) {
// stringifying some values can throw
try {
return String(v);
}
catch (e) {
return "\ufffd";
}
}
function failedTypeAssertion(v,expectedType) {
var t = Object.is(v,-0) ? "-0" : safeString(v);
t = prepareStr(t,/*includeQuotes=*/(typeof v == "string"));
throw new Error(`${t} is not type: '${expectedType}'`);
}
function checkUndefined(v) { return v === undefined; }
function checkNull(v) { return v === null; }
function checkBoolean(v) { return typeof v == "boolean"; }
function checkNumber(v) { return typeof v == "number"; }
function checkBigInt(v) { return typeof v == "bigint"; }
function checkSymbol(v) { return typeof v == "symbol"; }
function checkFunction(v) { return typeof v == "function"; }
function checkObject(v) { return v && typeof v == "object"; }
function checkArray(v) { return Array.isArray(v); }
});
|
"""Example usage:
$ py.py -o thunk
>>> from __pypy__ import thunk, lazy, become
>>> def f():
... print 'computing...'
... return 6*7
...
>>> x = thunk(f)
>>> x
computing...
42
>>> x
42
>>> y = thunk(f)
>>> type(y)
computing...
<pypy type 'int'>
>>> @lazy
... def g(n):
... print 'computing...'
... return n + 5
...
>>> y = g(12)
>>> y
computing...
17
"""
from pypy.objspace.proxy import patch_space_in_place
from pypy.interpreter import gateway, baseobjspace, argument
from pypy.interpreter.error import OperationError
from pypy.interpreter.function import Method
# __________________________________________________________________________
# 'w_obj.w_thunkalias' points to another object that 'w_obj' has turned into
baseobjspace.W_Root.w_thunkalias = None
# adding a name in __slots__ after class creation doesn't "work" in Python,
# but in this case it has the effect of telling the annotator that this
# attribute is allowed to be moved up to this class.
baseobjspace.W_Root.__slots__ += ('w_thunkalias',)
class W_Thunk(baseobjspace.W_Root, object):
def __init__(w_self, w_callable, args):
w_self.w_callable = w_callable
w_self.args = args
w_self.operr = None
# special marker to say that w_self has not been computed yet
w_NOT_COMPUTED_THUNK = W_Thunk(None, None)
W_Thunk.w_thunkalias = w_NOT_COMPUTED_THUNK
def _force(space, w_self):
w_alias = w_self.w_thunkalias
while w_alias is not None:
if w_alias is w_NOT_COMPUTED_THUNK:
assert isinstance(w_self, W_Thunk)
if w_self.operr is not None:
raise w_self.operr
w_callable = w_self.w_callable
args = w_self.args
if w_callable is None or args is None:
raise OperationError(space.w_RuntimeError,
space.wrap("thunk is already being computed"))
w_self.w_callable = None
w_self.args = None
try:
w_alias = space.call_args(w_callable, args)
except OperationError, operr:
w_self.operr = operr
raise
if _is_circular(w_self, w_alias):
operr = OperationError(space.w_RuntimeError,
space.wrap("circular thunk alias"))
w_self.operr = operr
raise operr
w_self.w_thunkalias = w_alias
# XXX do path compression?
w_self = w_alias
w_alias = w_self.w_thunkalias
return w_self
def _is_circular(w_obj, w_alias):
assert (w_obj.w_thunkalias is None or
w_obj.w_thunkalias is w_NOT_COMPUTED_THUNK)
while 1:
if w_obj is w_alias:
return True
w_next = w_alias.w_thunkalias
if w_next is None:
return False
if w_next is w_NOT_COMPUTED_THUNK:
return False
w_alias = w_next
def force(space, w_self):
if w_self.w_thunkalias is not None:
w_self = _force(space, w_self)
return w_self
def thunk(w_callable, __args__):
"""thunk(f, *args, **kwds) -> an object that behaves like the
result of the call f(*args, **kwds). The call is performed lazily."""
return W_Thunk(w_callable, __args__)
app_thunk = gateway.interp2app(thunk)
def is_thunk(space, w_obj):
"""Check if an object is a thunk that has not been computed yet."""
while 1:
w_alias = w_obj.w_thunkalias
if w_alias is None:
return space.w_False
if w_alias is w_NOT_COMPUTED_THUNK:
return space.w_True
w_obj = w_alias
app_is_thunk = gateway.interp2app(is_thunk)
def become(space, w_target, w_source):
"""Globally replace the target object with the source one."""
w_target = force(space, w_target)
if not _is_circular(w_target, w_source):
w_target.w_thunkalias = w_source
return space.w_None
app_become = gateway.interp2app(become)
def lazy(space, w_callable):
"""Decorator to make a callable return its results wrapped in a thunk."""
meth = Method(space, space.w_fn_thunk,
w_callable, space.type(w_callable))
return space.wrap(meth)
app_lazy = gateway.interp2app(lazy)
# __________________________________________________________________________
nb_forcing_args = {}
def setup():
nb_forcing_args.update({
'setattr': 2, # instead of 3
'setitem': 2, # instead of 3
'get': 2, # instead of 3
# ---- irregular operations ----
'wrap': 0,
'str_w': 1,
'int_w': 1,
'float_w': 1,
'uint_w': 1,
'unicode_w': 1,
'bigint_w': 1,
'interpclass_w': 1,
'unwrap': 1,
'is_true': 1,
'is_w': 2,
'newtuple': 0,
'newlist': 0,
'newdict': 0,
'newslice': 0,
'call_args': 1,
'marshal_w': 1,
'log': 1,
})
for opname, _, arity, _ in baseobjspace.ObjSpace.MethodTable:
nb_forcing_args.setdefault(opname, arity)
for opname in baseobjspace.ObjSpace.IrregularOpTable:
assert opname in nb_forcing_args, "missing %r" % opname
setup()
del setup
# __________________________________________________________________________
def proxymaker(space, opname, parentfn):
nb_args = nb_forcing_args[opname]
if nb_args == 0:
proxy = None
elif nb_args == 1:
def proxy(w1, *extra):
w1 = force(space, w1)
return parentfn(w1, *extra)
elif nb_args == 2:
def proxy(w1, w2, *extra):
w1 = force(space, w1)
w2 = force(space, w2)
return parentfn(w1, w2, *extra)
elif nb_args == 3:
def proxy(w1, w2, w3, *extra):
w1 = force(space, w1)
w2 = force(space, w2)
w3 = force(space, w3)
return parentfn(w1, w2, w3, *extra)
elif nb_args == 4:
def proxy(w1, w2, w3, w4, *extra):
w1 = force(space, w1)
w2 = force(space, w2)
w3 = force(space, w3)
w4 = force(space, w4)
return parentfn(w1, w2, w3, w4, *extra)
else:
raise NotImplementedError("operation %r has arity %d" %
(opname, nb_args))
return proxy
def Space(*args, **kwds):
# for now, always make up a wrapped StdObjSpace
from pypy.objspace import std
space = std.Space(*args, **kwds)
patch_space_in_place(space, 'thunk', proxymaker)
space.resolve_target = lambda w_arg: _force(space, w_arg)
w___pypy__ = space.getbuiltinmodule("__pypy__")
space.w_fn_thunk = space.wrap(app_thunk)
space.setattr(w___pypy__, space.wrap('thunk'),
space.w_fn_thunk)
space.setattr(w___pypy__, space.wrap('is_thunk'),
space.wrap(app_is_thunk))
space.setattr(w___pypy__, space.wrap('become'),
space.wrap(app_become))
space.setattr(w___pypy__, space.wrap('lazy'),
space.wrap(app_lazy))
return space
|
#!/bin/bash
# Stop script if errors occur
trap 'echo Error: $0:$LINENO stopped; exit 1' ERR INT
set -eu
# Load vital library that is most important and
# constructed with many minimal functions
# For more information, see etc/README.md
. "$DOTPATH"/etc/lib/vital.sh
# This script is only supported with OS X
if ! is_osx; then
log_fail "error: this script is only supported with osx"
exit 1
fi
if has "brew"; then
log_pass "brew: already installed"
exit
fi
# The script is dependent on ruby
if ! has "ruby"; then
log_fail "error: require: ruby"
exit 1
fi
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
if has "brew"; then
brew doctor
else
log_fail "error: brew: failed to install"
exit 1
fi
log_pass "brew: installed successfully"
|
#!/bin/bash
candidates=$1
scored_candidates_dir_parent=$2
query_expanded=$3
init_params=$4
output_params=$5
KDE_output_params=$6
model=$7
vocab=$8
gpu=$9
eval_args=${@:10}
scoring_output_all_years=$scored_candidates_dir_parent/scoring_output
export TAC_ROOT=/iesl/canvas/hschang/TAC_2016/codes/tackbp2016-sf
export TAC_CONFIG=$TAC_ROOT/config/coldstart2015_updated.config
export TH_RELEX_ROOT=/iesl/canvas/hschang/TAC_2016/codes/torch-relation-extraction
export TAC_EVAL_ROOT=${TH_RELEX_ROOT}/bin/tac-evaluation
mkdir -p $scored_candidates_dir_parent
>$scoring_output_all_years
MAX_SEQ=20
$TH_RELEX_ROOT/bin/tac-evaluation/generate_response_wo_threshold.sh 2012 $vocab $model $gpu $scored_candidates_dir_parent $MAX_SEQ $eval_args
$TH_RELEX_ROOT/bin/tac-evaluation/generate_response_wo_threshold.sh 2013 $vocab $model $gpu $scored_candidates_dir_parent $MAX_SEQ $eval_args
$TH_RELEX_ROOT/bin/tac-evaluation/generate_response_wo_threshold.sh 2014 $vocab $model $gpu $scored_candidates_dir_parent $MAX_SEQ $eval_args
scored_candidates_dir=$scored_candidates_dir_parent/2015
echo $scored_candidates_dir
mkdir -p $scored_candidates_dir
source ${TAC_EVAL_ROOT}/scoring_function.sh ${candidates} ${scored_candidates_dir} ${vocab} ${model} $gpu $MAX_SEQ $eval_args
#CAND_SCORE_CMD="th ${TH_RELEX_ROOT}/src/eval/ScoreCandidateFile.lua -candidates $candidates -vocabFile $vocab -model $model -gpuid $gpu -threshold 0 -outFile $scored_candidates_dir/scored_candidates -maxSeq $MAX_SEQ $eval_args"
#echo $CAND_SCORE_CMD
#$CAND_SCORE_CMD
$TAC_ROOT/components/bin/response_inv.sh $query_expanded $scored_candidates_dir/scored_candidates $scored_candidates_dir/response_full
$TAC_ROOT/components/bin/postprocess2015.sh $scored_candidates_dir/response_full $query_expanded /dev/null $scored_candidates_dir/response_full_pp15
$TAC_ROOT/components/bin/response_cs_sf.sh $scored_candidates_dir/response_full_pp15 $scored_candidates_dir/response_full_pp15_noNIL
ASSESSMENTS=$TAC_ROOT/evaluation/resources/2015/batch_00_05_poolc.assessed.fqec
RELCONFIG=/iesl/canvas/beroth/workspace/relationfactory_iesl/config/relations_coldstart2015.config
grep inverse $RELCONFIG \
| cut -d' ' -f1 \
| sed $'s#\(.*\)#\t\\1\t#g' \
> $scored_candidates_dir/inverses_with_tabs.tmp
REL_INV_CONFIG=/iesl/canvas/hschang/TAC_2016/codes/tackbp2016-kb/config/coldstart_relations2015_inverses.config
python $TH_RELEX_ROOT/bin/tac-evaluation/eval-scripts/scoring_outputs_for_tuning_2015.py $scored_candidates_dir/response_full_pp15_noNIL $ASSESSMENTS $scored_candidates_dir/scoring_output $scored_candidates_dir/inverses_with_tabs.tmp $REL_INV_CONFIG
cat $scored_candidates_dir/scoring_output >> $scoring_output_all_years
NUM_ITER=3
output_params_dir=`dirname $output_params`
mkdir -p $output_params_dir
PERFORMANCE_LOG=$output_params_dir/training_loss_log
KDE_dir=$scored_candidates_dir_parent/KDE
mkdir -p $KDE_dir
/home/hschang/anaconda2/bin/python $TH_RELEX_ROOT/bin/tac-evaluation/eval-scripts/KDE_accuracy_estimation_local.py $scoring_output_all_years $REL_INV_CONFIG $KDE_dir/accuracy_estimations
python $TH_RELEX_ROOT/bin/tac-evaluation/eval-scripts/tune_based_on_pred_distribution_2015.py $scored_candidates_dir/response_full_pp15_noNIL $scored_candidates_dir/inverses_with_tabs.tmp $REL_INV_CONFIG $KDE_dir/accuracy_estimations $KDE_dir/KDE_scoring
LOWEST_THRESHOLD_LIST="0.0 0.05 0.1 0.15 0.2 0.25 0.3 0.35 0.4"
for t in ${LOWEST_THRESHOLD_LIST}; do
echo "python $TH_RELEX_ROOT/bin/tac-evaluation/eval-scripts/tune_threshold_2012-2015.py $scoring_output_all_years $init_params ${output_params}_t$t $NUM_ITER $REL_INV_CONFIG $t | tee ${PERFORMANCE_LOG}_t$t"
python $TH_RELEX_ROOT/bin/tac-evaluation/eval-scripts/tune_threshold_2012-2015.py $scoring_output_all_years $init_params ${output_params}_t$t $NUM_ITER $REL_INV_CONFIG $t | tee ${PERFORMANCE_LOG}_t$t
INIT_RECALL=`tail -n 1 ${PERFORMANCE_LOG}_t$t`
echo "python $TH_RELEX_ROOT/bin/tac-evaluation/eval-scripts/tune_threshold_2012-2015.py $KDE_dir/KDE_scoring ${output_params}_t$t ${KDE_output_params}_t$t $NUM_ITER $REL_INV_CONFIG $t $INIT_RECALL | tee $KDE_dir/training_loss_log_t$t"
python $TH_RELEX_ROOT/bin/tac-evaluation/eval-scripts/tune_threshold_2012-2015.py $KDE_dir/KDE_scoring ${output_params}_t$t ${KDE_output_params}_t$t $NUM_ITER $REL_INV_CONFIG $t $INIT_RECALL | tee $KDE_dir/training_loss_log_t$t
done
rm $scored_candidates_dir/inverses_with_tabs.tmp
|
cd ../build/debug
cmake ../..
make
clear
../../TXTencrypter_dbg |
<reponame>ShaolinDeng/SDK-Android
/*
* Copyright (C) 2018 iFLYTEK CO.,LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.iflytek.cyber.iot.show.core.widget;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.ImageView;
import com.iflytek.cyber.iot.show.core.R;
public class RecognizeBar extends FrameLayout {
private ImageView recognizeButton;
private RecognizeWave recognizeWave;
private OnWakeUpButtonClickListener onWakeUpButtonClickListener;
private boolean isWakeUpState = false;
public RecognizeBar(@NonNull Context context) {
super(context);
init();
}
public RecognizeBar(@NonNull Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
init();
}
public RecognizeBar(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
private void init() {
setBackgroundColor(Color.argb(1, 0, 0, 0));
recognizeButton = new ImageView(getContext());
recognizeButton.setImageResource(R.drawable.ic_logo_border);
int recognizeButtonSize = getResources().getDimensionPixelSize(R.dimen.bottom_bar_logo_size);
int recognizeButtonPadding = getResources().getDimensionPixelSize(R.dimen.bottom_bar_logo_padding);
recognizeButton.setPadding(recognizeButtonPadding, recognizeButtonPadding, recognizeButtonPadding, recognizeButtonPadding);
LayoutParams layoutParams = new LayoutParams(recognizeButtonSize, recognizeButtonSize);
layoutParams.gravity = Gravity.CENTER;
int[] attrs = new int[]{R.attr.selectableItemBackgroundBorderless};
TypedArray ta = getContext().getTheme().obtainStyledAttributes(attrs);
Drawable selectableItemBackgroundBorderless = ta.getDrawable(0);
recognizeButton.setBackground(selectableItemBackgroundBorderless);
recognizeButton.setOnClickListener(v -> {
if (onWakeUpButtonClickListener != null) {
boolean result = onWakeUpButtonClickListener.onWakeUpButtonClick(this);
if (!result)
return;
if (!isWakeUpState)
animateToWakeUp();
else
animateFromWakeUp();
isWakeUpState = !isWakeUpState;
}
});
addView(recognizeButton, layoutParams);
recognizeWave = new RecognizeWave(getContext());
LayoutParams waveParams = new LayoutParams(LayoutParams.MATCH_PARENT, recognizeButtonSize);
waveParams.gravity = Gravity.CENTER;
addView(recognizeWave, waveParams);
}
private void animateToWakeUp() {
if (recognizeButton.getAlpha() != 1)
return;
recognizeButton.setEnabled(false);
recognizeButton.animate().alpha(0).setDuration(200)
.withEndAction(() -> recognizeButton.setEnabled(true)).start();
recognizeWave.animateToWaving();
}
private void animateFromWakeUp() {
if (recognizeButton.getAlpha() != 0)
return;
recognizeButton.setEnabled(false);
recognizeButton.animate().alpha(1).setDuration(200)
.withEndAction(() -> recognizeButton.setEnabled(true)).start();
recognizeWave.animateFromWaving();
}
public OnWakeUpButtonClickListener getOnWakeUpButtonClickListener() {
return onWakeUpButtonClickListener;
}
public void setOnWakeUpButtonClickListener(OnWakeUpButtonClickListener onWakeUpButtonClickListener) {
this.onWakeUpButtonClickListener = onWakeUpButtonClickListener;
}
public void stopWaving() {
animateFromWakeUp();
}
public void startWaving() {
animateToWakeUp();
}
public void updateVolume(int level) {
recognizeWave.updateVolume(level);
}
public interface OnWakeUpButtonClickListener {
/**
* if do nothing, should return false
*
* @param view recognizeBar
* @return result
*/
boolean onWakeUpButtonClick(View view);
}
}
|
let myObj = {
key1: "value1",
key2: "value2",
key3: "value3"
}; |
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
/* THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. */
import * as React from 'react';
var SvgSwap = React.forwardRef(function (props, svgRef) {
return React.createElement("svg", _extends({
width: "1em",
height: "1em",
viewBox: "0 0 32 32",
ref: svgRef
}, props), React.createElement("path", {
fill: "currentColor",
d: "M20.026 9.289H6V6.643h14.043l-4.988-4.988L16.709.001l7.984 7.984-7.859 7.859-1.681-1.681 4.872-4.872zM10.65 22.611h14.043v2.646H10.667l4.872 4.872-1.681 1.681-7.859-7.859 7.984-7.984 1.654 1.654-4.988 4.988z"
}));
});
export default SvgSwap; |
<template>
<table>
<thead>
<tr>
<th>Name</th>
<th>Price</th>
</tr>
</thead>
<tbody>
<tr v-for="product in products" :key="product.name">
<td>{{product.name}}</td>
<td>{{product.price}}</td>
</tr>
</tbody>
</table>
</template>
<script>
export default {
data() {
return {
products: [
{ name: 'Product 1', price: 19.99 },
{ name: 'Product 2', price: 29.99 },
{ name: 'Product 3', price: 39.99 },
{ name: 'Product 4', price: 49.99 },
{ name: 'Product 5', price: 59.99 }
]
}
}
}
</script> |
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.util.internal.ssh.sshj;
import static com.google.common.base.Objects.equal;
import static com.google.common.base.Preconditions.checkNotNull;
import java.io.File;
import java.io.IOException;
import net.schmizz.sshj.SSHClient;
import net.schmizz.sshj.transport.verification.PromiscuousVerifier;
import net.schmizz.sshj.userauth.keyprovider.OpenSSHKeyFile;
import net.schmizz.sshj.userauth.password.PasswordUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import brooklyn.util.GroovyJavaMethods;
import brooklyn.util.internal.ssh.SshAbstractTool.SshAction;
import com.google.common.base.Objects;
import com.google.common.net.HostAndPort;
/** based on code from jclouds */
public class SshjClientConnection implements SshAction<SSHClient> {
private static final Logger LOG = LoggerFactory.getLogger(SshjClientConnection.class);
public static Builder builder() {
return new Builder();
}
public static class Builder {
protected HostAndPort hostAndPort;
protected String username;
protected String password;
protected String privateKeyPassphrase;
protected String privateKeyData;
protected File privateKeyFile;
protected long connectTimeout;
protected long sessionTimeout;
protected boolean strictHostKeyChecking;
public Builder hostAndPort(HostAndPort hostAndPort) {
this.hostAndPort = hostAndPort;
return this;
}
public Builder username(String username) {
this.username = username;
return this;
}
public Builder password(String val) {
this.password = val;
return this;
}
/** @deprecated use privateKeyData */
public Builder privateKey(String val) {
this.privateKeyData = val;
return this;
}
public Builder privateKeyPassphrase(String val) {
this.privateKeyPassphrase = val;
return this;
}
public Builder privateKeyData(String val) {
this.privateKeyData = val;
return this;
}
public Builder privateKeyFile(File val) {
this.privateKeyFile = val;
return this;
}
public Builder strictHostKeyChecking(boolean val) {
this.strictHostKeyChecking = val;
return this;
}
public Builder connectTimeout(long connectTimeout) {
this.connectTimeout = connectTimeout;
return this;
}
public Builder sessionTimeout(long sessionTimeout) {
this.sessionTimeout = sessionTimeout;
return this;
}
public SshjClientConnection build() {
return new SshjClientConnection(this);
}
protected static Builder fromSSHClientConnection(SshjClientConnection in) {
return new Builder().hostAndPort(in.getHostAndPort()).connectTimeout(in.getConnectTimeout()).sessionTimeout(
in.getSessionTimeout()).username(in.username).password(in.password).privateKey(in.privateKeyData).privateKeyFile(in.privateKeyFile);
}
}
private final HostAndPort hostAndPort;
private final String username;
private final String password;
private final String privateKeyPassphrase;
private final String privateKeyData;
private final File privateKeyFile;
private final boolean strictHostKeyChecking;
private final int connectTimeout;
private final int sessionTimeout;
SSHClient ssh;
private SshjClientConnection(Builder builder) {
this.hostAndPort = checkNotNull(builder.hostAndPort);
this.username = builder.username;
this.password = builder.password;
this.privateKeyPassphrase = builder.privateKeyPassphrase;
this.privateKeyData = builder.privateKeyData;
this.privateKeyFile = builder.privateKeyFile;
this.strictHostKeyChecking = builder.strictHostKeyChecking;
this.connectTimeout = checkInt("connectTimeout", builder.connectTimeout, Integer.MAX_VALUE);
this.sessionTimeout = checkInt("sessionTimeout", builder.sessionTimeout, Integer.MAX_VALUE);
}
static Integer checkInt(String context, long value, Integer ifTooLarge) {
if (value > Integer.MAX_VALUE) {
LOG.warn("Value '"+value+"' for "+context+" too large in SshjClientConnection; using "+value);
return ifTooLarge;
}
return (int)value;
}
public boolean isConnected() {
return ssh != null && ssh.isConnected();
}
public boolean isAuthenticated() {
return ssh != null && ssh.isAuthenticated();
}
@Override
public void clear() {
if (ssh != null && ssh.isConnected()) {
try {
if (LOG.isTraceEnabled()) LOG.trace("Disconnecting SshjClientConnection {} ({})", this, System.identityHashCode(this));
ssh.disconnect();
} catch (IOException e) {
if (LOG.isDebugEnabled()) LOG.debug("<< exception disconnecting from {}: {}", e, e.getMessage());
}
ssh = null;
}
}
@Override
public SSHClient create() throws Exception {
if (LOG.isTraceEnabled()) LOG.trace("Connecting SshjClientConnection {} ({})", this, System.identityHashCode(this));
ssh = new net.schmizz.sshj.SSHClient();
if (!strictHostKeyChecking) {
ssh.addHostKeyVerifier(new PromiscuousVerifier());
}
if (connectTimeout != 0) {
ssh.setConnectTimeout(connectTimeout);
}
if (sessionTimeout != 0) {
ssh.setTimeout(sessionTimeout);
}
ssh.connect(hostAndPort.getHostText(), hostAndPort.getPortOrDefault(22));
if (password != null) {
ssh.authPassword(username, password);
} else if (privateKeyData != null) {
OpenSSHKeyFile key = new OpenSSHKeyFile();
key.init(privateKeyData, null,
GroovyJavaMethods.truth(privateKeyPassphrase) ?
PasswordUtils.createOneOff(privateKeyPassphrase.toCharArray())
: null);
ssh.authPublickey(username, key);
} else if (privateKeyFile != null) {
OpenSSHKeyFile key = new OpenSSHKeyFile();
key.init(privateKeyFile,
GroovyJavaMethods.truth(privateKeyPassphrase) ?
PasswordUtils.createOneOff(privateKeyPassphrase.toCharArray())
: null);
ssh.authPublickey(username, key);
} else {
// Accept defaults (in ~/.ssh)
ssh.authPublickey(username);
}
return ssh;
}
/**
* @return host and port, where port if not present defaults to {@code 22}
*/
public HostAndPort getHostAndPort() {
return hostAndPort;
}
/**
* @return username used in this ssh
*/
public String getUsername() {
return username;
}
/**
*
* @return how long to wait for the initial connection to be made
*/
public int getConnectTimeout() {
return connectTimeout;
}
/**
*
* @return how long to keep the ssh open, or {@code 0} for indefinitely
*/
public int getSessionTimeout() {
return sessionTimeout;
}
/**
*
* @return the current ssh or {@code null} if not connected
*/
public SSHClient getSSHClient() {
return ssh;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
SshjClientConnection that = SshjClientConnection.class.cast(o);
return equal(this.hostAndPort, that.hostAndPort) && equal(this.username, that.username)
&& equal(this.password, that.password) && equal(this.privateKeyData, that.privateKeyData)
&& equal(this.privateKeyFile, that.privateKeyFile) && equal(this.ssh, that.ssh);
}
@Override
public int hashCode() {
return Objects.hashCode(hostAndPort, username, password, privateKeyData, ssh);
}
@Override
public String toString() {
return Objects.toStringHelper("")
.add("hostAndPort", hostAndPort)
.add("user", username)
.add("ssh", ssh != null ? ssh.hashCode() : null)
.add("password", (password != null ? "<PASSWORD>" : null))
.add("privateKeyFile", privateKeyFile)
.add("privateKey", (privateKeyData != null ? "xxxxxx" : null))
.add("connectTimeout", connectTimeout)
.add("sessionTimeout", sessionTimeout).toString();
}
}
|
#!/bin/bash
set -x
set -e
export PYTHONUNBUFFERED="True"
export CUDA_VISIBLE_DEVICES=$1
LOG="experiments/logs/test_kinect_fusion.txt.`date +'%Y-%m-%d_%H-%M-%S'`"
exec &> >(tee -a "$LOG")
echo Logging output to "$LOG"
# test icp
time ./tools/test_kinect_fusion.py --gpu 0 \
--imdb rgbd_scene_trainval \
--cfg experiments/cfgs/rgbd_scene.yml \
--rig data/RGBDScene/camera.json
|
<reponame>whois-api-llc/whois-api-java<filename>src/main/java/com/whoisxmlapi/whoisapi/ApiClient.java
package com.whoisxmlapi.whoisapi;
import com.whoisxmlapi.whoisapi.exception.*;
import com.whoisxmlapi.whoisapi.json.ErrorMessageParser;
import com.whoisxmlapi.whoisapi.json.WhoisRecordParser;
import com.whoisxmlapi.whoisapi.model.*;
import com.whoisxmlapi.whoisapi.net.HttpClient;
import okhttp3.OkHttpClient;
import java.util.ArrayList;
import java.util.Objects;
public class ApiClient {
protected HttpClient client;
protected RequestParameters rp;
public ApiClient(String apiKey) {
Objects.requireNonNull(apiKey);
this.rp = new RequestParameters(apiKey);
this.client = new HttpClient(this.rp);
}
public ApiClient(String apiKey, NetworkTimeouts networkTimeouts) {
Objects.requireNonNull(apiKey);
Objects.requireNonNull(networkTimeouts);
this.rp = new RequestParameters(apiKey);
this.client = new HttpClient(this.rp, networkTimeouts);
}
public ApiClient(String apiKey, OkHttpClient okHttpClient) {
Objects.requireNonNull(apiKey);
Objects.requireNonNull(okHttpClient);
this.rp = new RequestParameters(apiKey);
this.client = new HttpClient(this.rp);
this.client.setClient(okHttpClient);
}
public ApiClient(RequestParameters rp) {
Objects.requireNonNull(rp);
this.rp = rp;
this.client = new HttpClient(this.rp);
}
public ApiClient(RequestParameters rp, NetworkTimeouts networkTimeouts) {
Objects.requireNonNull(rp);
Objects.requireNonNull(networkTimeouts);
this.rp = rp;
this.client = new HttpClient(this.rp, networkTimeouts);
}
protected BaseRecord parseRecord(String record) throws UnparsableRecordException {
if (record.contains("WhoisRecord")) {
return WhoisRecordParser.parse(record);
}
if (record.contains("ErrorMessage")) {
return ErrorMessageParser.parse(record);
}
throw new UnparsableRecordException(record);
}
protected String handleErrorResponse(String response) throws ApiErrorMessageException {
if (response.contains("WhoisRecord")) {
return response;
}
ErrorMessage error = ErrorMessageParser.parse(response);
String msg = "";
if (error.getErrorCode().isPresent()) {
msg += "Code: " + error.getErrorCode().get();
}
if (error.getMsg().isPresent()) {
msg += " Message: " + error.getMsg().get();
}
throw new ApiErrorMessageException(msg, error);
}
public WhoisRecord getWhois(String domain)
throws EmptyApiKeyException, NetworkException, ApiEndpointException, ApiErrorMessageException, ApiAuthorizationException {
Objects.requireNonNull(domain);
RequestParameters temp = new RequestParameters(this.rp);
temp.setOutputFormat(HttpClient.ResponseFormat.JSON.format());
return WhoisRecordParser.parse(this.handleErrorResponse(this.client.get(domain, temp)));
}
public WhoisRecord getWhois(String domain, RequestParameters rp)
throws EmptyApiKeyException, NetworkException, ApiEndpointException, ApiErrorMessageException, ApiAuthorizationException {
Objects.requireNonNull(domain);
Objects.requireNonNull(rp);
rp.setOutputFormat(HttpClient.ResponseFormat.JSON.format());
return WhoisRecordParser.parse(this.handleErrorResponse(this.client.get(domain, rp)));
}
public String getRawResponse(String domain)
throws EmptyApiKeyException, NetworkException, ApiEndpointException, ApiAuthorizationException {
Objects.requireNonNull(domain);
return this.client.get(domain);
}
public String getRawResponse(String domain, RequestParameters rp)
throws EmptyApiKeyException, NetworkException, ApiEndpointException, ApiAuthorizationException {
Objects.requireNonNull(domain);
Objects.requireNonNull(rp);
return this.client.get(domain, rp);
}
public String getRawResponse(String domain, HttpClient.ResponseFormat dataFormat)
throws EmptyApiKeyException, NetworkException, ApiEndpointException, ApiAuthorizationException {
Objects.requireNonNull(domain);
Objects.requireNonNull(dataFormat);
RequestParameters temp = new RequestParameters(this.rp);
temp.setOutputFormat(dataFormat.format());
return this.client.get(domain, temp);
}
public BaseRecord[] getWhois(String[] domains) throws Exception {
Objects.requireNonNull(domains);
String[] records = this.client.get(domains);
ArrayList<BaseRecord> recordsList = new ArrayList<>();
for (String record : records) {
recordsList.add(this.parseRecord(record));
}
return recordsList.toArray(new BaseRecord[0]);
}
public BaseRecord[] getWhois(String[] domains, RequestParameters rp) throws Exception {
Objects.requireNonNull(domains);
Objects.requireNonNull(rp);
String[] records = this.client.get(domains, rp);
ArrayList<BaseRecord> recordsList = new ArrayList<>();
for (String record : records) {
recordsList.add(this.parseRecord(record));
}
return recordsList.toArray(new BaseRecord[0]);
}
public String[] getRawResponse(String[] domains) throws Exception {
Objects.requireNonNull(domains);
return this.client.get(domains);
}
public String[] getRawResponse(String[] domains, RequestParameters rp) throws Exception {
Objects.requireNonNull(domains);
Objects.requireNonNull(rp);
return this.client.get(domains, rp);
}
public String[] getRawResponse(String[] domains, HttpClient.ResponseFormat dataFormat) throws Exception {
Objects.requireNonNull(domains);
Objects.requireNonNull(dataFormat);
RequestParameters temp = new RequestParameters(this.rp);
temp.setOutputFormat(dataFormat.format());
return this.client.get(domains, temp);
}
public void setRetries(int retries) {
this.client.setRetries(retries);
}
public int getRetries() {
return this.client.getRetries();
}
public void setPoolSize(int poolSize) {
this.client.setPoolSize(poolSize);
}
public int getPoolSize() {
return this.client.getPoolSize();
}
public void setOkHttpClient(OkHttpClient okHttpClient) {
this.client.setClient(okHttpClient);
}
public void forceShutdown() {
this.client.forceShutdown();
}
}
|
<reponame>krcourville/odata-qraphql<gh_stars>0
import { DisableNoLoginDirective } from './disable-no-login.directive';
describe('DisableNoLoginDirective', () => {
it('should create an instance', () => {
const directive = new DisableNoLoginDirective();
expect(directive).toBeTruthy();
});
});
|
def triage_ticket(ticket):
ticket_type, urgency, escalation, redirect_status, _ , no_redirect = ticket
if ticket_type == "PROBLEM_TICKET_TYPE" and urgency and escalation and redirect_status:
return 200
else:
return no_redirect() |
git remote set-url origin git@github.com:AUTHOR-HANDLE/LIBRARY-NAME.git
git checkout -B gh-pages
gem install jazzy
./.docs.sh
find . -type f ! -path "./.git/*" ! -path "./docs*" ! -path "./LICENSE" -exec rm -f {} +
mv docs/* ./
touch .nojekyll
git add -f .
git commit -m "Documentation update."
git push -f origin gh-pages
|
<reponame>saviorocha/freeCodeCamp-study
import React, { useState, useReducer } from 'react';
import Modal from './Modal';
import { data } from '../../../data';
// reducer function
const Index = () => {
return <h2>useReducer</h2>;
};
export default Index;
|
#!/bin/bash
# build the PPT
yarn
node architecture.js
node -e "require('./drawio.js')().generate('drawio.xml')"
# get the current gh-pages branch
git clone --depth=1 --branch=gh-pages "https://l2fprod:${GITHUB_TOKEN}@github.com/l2fprod/myarchitecture" build
# copy over the PPT
cp mycatalog-architecture-diagram-template.pptx build
cp drawio.xml build
git config --global push.default simple
git config --global user.email "autobuild@not-a-dom.ain"
git config --global user.name "autobuild"
# commit to gh-pages
(cd build && git add . && git commit -m "new deck" && git push)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.