text stringlengths 1 1.05M |
|---|
/*
* LangPi basic language processing tasks
*
* Copyright 2015-2016 <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kariminf.langpi.basic;
import kariminf.ktoolja.plugins.Info;
/**
* Contains the informations about the preprocessed language
*
* @author <NAME>
*
*/
public interface BasicInfo extends Info {
static final String version = "1.0";
/**
* Returns the English name of the language, for example "Arabic".
*
* @return English name of the language
*/
public String getLangEnglishName();
/**
* Returns the original name of the language, for example: "العربية".
* @return language's name with the original alphabet
*/
public String getLangName();
}
|
module Sequent
module Migrations
class Planner
Plan = Struct.new(:projectors, :migrations) do
def replay_tables
migrations.select { |m| m.class == ReplayTable }
end
def alter_tables
migrations.select { |m| m.class == AlterTable }
end
def empty?
migrations.empty?
end
end
attr_reader :versions
def initialize(versions)
@versions = versions
end
def plan(old, new)
migrations = versions.slice(*Range.new(old + 1, new).to_a.map(&:to_s))
Plan.new(
migrations.yield_self(&method(:select_projectors)),
migrations
.yield_self(&method(:create_migrations))
.yield_self(&method(:remove_redundant_migrations))
)
end
private
def select_projectors(migrations)
migrations
.values
.flatten
.select { |v| v.is_a?(Class) && v < Sequent::Projector }.uniq
end
def remove_redundant_migrations(migrations)
redundant_migrations = migrations
.yield_self(&method(:group_identical_migrations))
.yield_self(&method(:select_redundant_migrations))
.yield_self(&method(:remove_redundancy))
.values
.flatten
(migrations - redundant_migrations)
.yield_self(&method(:remove_alter_tables_before_replay_table))
end
def group_identical_migrations(migrations)
migrations
.group_by { |migration| {migration_type: migration.class, record_class: migration.record_class} }
end
def select_redundant_migrations(grouped_migrations)
grouped_migrations.select { |type, ms| type[:migration_type] == ReplayTable && ms.length > 1 }
end
def remove_alter_tables_before_replay_table(migrations)
migrations - migrations
.each_with_index
.select { |migration, _index| migration.class == AlterTable }
.select { |migration, index| migrations
.slice((index + 1)..-1)
.find { |m| m.class == ReplayTable && m.record_class == migration.record_class }
}.map(&:first)
end
def remove_redundancy(grouped_migrations)
grouped_migrations.reduce({}) { |memo, (key, ms)|
memo[key] = ms
.yield_self(&method(:order_by_version_desc))
.slice(1..-1)
memo
}
end
def order_by_version_desc(migrations)
migrations.sort_by { |m| m.version.to_i }
.reverse
end
def create_migrations(migrations)
migrations
.yield_self(&method(:map_to_migrations))
.values
.compact
.flatten
end
def map_to_migrations(migrations)
migrations.reduce({}) do |memo, (version, _migrations)|
fail "Declared migrations for version #{version} must be an Array. For example: {'3' => [FooProjector]}" unless _migrations.is_a?(Array)
memo[version] = _migrations.flat_map do |migration|
if migration.is_a?(AlterTable)
alter_table_sql_file_name = "#{Sequent.configuration.migration_sql_files_directory}/#{migration.table_name}_#{version}.sql"
fail "Missing file #{alter_table_sql_file_name} to apply for version #{version}" unless File.exist?(alter_table_sql_file_name)
migration.copy(version)
elsif migration < Sequent::Projector
migration.managed_tables.map { |table| ReplayTable.create(table, version) }
else
fail "Unknown Migration #{migration}"
end
end
memo
end
end
end
end
end
|
protocol CommentProtocol {
// Define protocol requirements here
}
final class Comment: NSManagedObject, CommentProtocol {
@NSManaged private(set) var identifier: Int64
@NSManaged private(set) var body: String
@NSManaged private(set) var name: String
@NSManaged private(set) var post: Post?
func configure(commentFromApi: CommentFromApi) {
// Implement configuration from API data
}
func validateCommentBody() -> Bool {
return !body.isEmpty && body.count <= 280
}
} |
#! /bin/bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH
function set_fonts_colors(){
clear
# Font colors
default_fontcolor="\033[0m"
red_fontcolor="\033[31m"
green_fontcolor="\033[32m"
warning_fontcolor="\033[33m"
info_fontcolor="\033[36m"
# Background colors
red_backgroundcolor="\033[41;37m"
green_backgroundcolor="\033[42;37m"
yellow_backgroundcolor="\033[43;37m"
# Fonts
error_font="${red_fontcolor}[Error]${default_fontcolor}"
ok_font="${green_fontcolor}[OK]${default_fontcolor}"
warning_font="${warning_fontcolor}[Warning]${default_fontcolor}"
info_font="${info_fontcolor}[Info]${default_fontcolor}"
}
function check_os(){
clear
echo -e "正在检测当前是否为ROOT用户..."
if [ "${EUID}" -eq "0" ]; then
clear
echo -e "${ok_font}检测到当前为Root用户。"
else
clear
echo -e "${error_font}当前并非ROOT用户,请先切换到ROOT用户后再使用本脚本。"
exit 1
fi
clear
echo -e "正在检测此系统是否被支持..."
if [ -n "$(grep 'Aliyun Linux release' /etc/issue)" -o -e "/etc/redhat-release" ]; then
System_OS="CentOS"
[ -n "$(grep ' 7\.' /etc/redhat-release)" ] && OS_Version="7"
[ -n "$(grep ' 6\.' /etc/redhat-release)" -o -n "$(grep 'Aliyun Linux release6 15' /etc/issue)" ] && OS_Version="6"
[ -n "$(grep ' 5\.' /etc/redhat-release)" -o -n "$(grep 'Aliyun Linux release5' /etc/issue)" ] && OS_Version="5"
if [ -z "${OS_Version}" ]; then
[ ! -e "$(command -v lsb_release)" ] && { yum -y update; yum -y install redhat-lsb-core; clear; }
OS_Version="$(lsb_release -sr | awk -F. '{print $1}')"
fi
elif [ -n "$(grep 'Amazon Linux AMI release' /etc/issue)" -o -e /etc/system-release ]; then
System_OS="CentOS"
OS_Version="6"
elif [ -n "$(grep Debian /etc/issue)" -o "$(lsb_release -is 2>/dev/null)" == 'Debian' ]; then
System_OS="Debian"
[ ! -e "$(command -v lsb_release)" ] && { apt-get -y update; apt-get -y install lsb-release; clear; }
OS_Version="$(lsb_release -sr | awk -F. '{print $1}')"
elif [ -n "$(grep Deepin /etc/issue)" -o "$(lsb_release -is 2>/dev/null)" == 'Deepin' ]; then
System_OS="Debian"
[ ! -e "$(command -v lsb_release)" ] && { apt-get -y update; apt-get -y install lsb-release; clear; }
OS_Version="$(lsb_release -sr | awk -F. '{print $1}')"
elif [ -n "$(grep Ubuntu /etc/issue)" -o "$(lsb_release -is 2>/dev/null)" == 'Ubuntu' ]; then
System_OS="Ubuntu"
[ ! -e "$(command -v lsb_release)" ] && { apt-get -y update; apt-get -y install lsb-release; clear; }
OS_Version="$(lsb_release -sr | awk -F. '{print $1}')"
else
clear
echo -e "${error_font}目前暂不支持您使用的操作系统。"
exit 1
fi
clear
echo -e "${ok_font}该脚本支持您的系统。"
clear
echo -e "正在检测系统构架是否被支持..."
if [[ "$(uname -m)" == "i686" ]] || [[ "$(uname -m)" == "i386" ]]; then
System_Bit="386"
elif [[ "$(uname -m)" == *"x86_64"* ]]; then
System_Bit="amd64"
elif [[ "$(uname -m)" == *"armv7"* ]] || [[ "$(uname -m)" == "armv6l" ]]; then
System_Bit="arm"
elif [[ "$(uname -m)" == *"armv8"* ]] || [[ "$(uname -m)" == "aarch64" ]]; then
System_Bit="amd64"
else
clear
echo -e "${error_font}目前暂不支持此系统的构架。"
exit 1
fi
clear
echo -e "${ok_font}该脚本支持您的系统构架。"
clear
echo -e "正在检测进程守护安装情况..."
if [ -n "$(command -v systemctl)" ]; then
clear
daemon_name="systemd"
echo -e "${ok_font}您的系统中已安装 systemctl。"
elif [ -n "$(command -v chkconfig)" ]; then
clear
daemon_name="sysv"
echo -e "${ok_font}您的系统中已安装 chkconfig。"
elif [ -n "$(command -v update-rc.d)" ]; then
clear
daemon_name="sysv"
echo -e "${ok_font}您的系统中已安装 update-rc.d。"
else
clear
echo -e "${error_font}您的系统中没有配置进程守护工具,安装无法继续!"
exit 1
fi
clear
echo -e "${ok_font}Support OS: ${System_OS}${OS_Version} ${System_Bit} with ${daemon_name}."
}
function check_install_status(){
if [ ! -f "/usr/local/gost/gost" ]; then
install_status="${red_fontcolor}未安装${default_fontcolor}"
gost_use_command="${red_fontcolor}未安装${default_fontcolor}"
else
install_status="${green_fontcolor}已安装${default_fontcolor}"
gost_pid="$(ps -ef |grep "gost" |grep -v "grep" | grep -v ".sh"| grep -v "init.d" |grep -v "service" |awk '{print $2}')"
if [ -z "${gost_pid}" ]; then
gost_status="${red_fontcolor}未运行${default_fontcolor}"
gost_use_command="${red_fontcolor}未运行${default_fontcolor}"
gost_pid="0"
else
gost_status="${green_fontcolor}正在运行${default_fontcolor} | ${green_fontcolor}${gost_pid}${default_fontcolor}"
ip_address="$(curl -4 ip.sb)"
if [ -z "${ip_address}" ]; then
ip_address="$(curl -4 https://ipinfo.io/ip)"
fi
if [ -n "$(grep -Eo "[0-9a-zA-Z\_\-]+:[0-9a-zA-Z\_\-]+" "/usr/local/gost/socks5.json")" ]; then
gost_use_command="\n${green_backgroundcolor}https://t.me/socks?server=${ip_address}?port=$(grep -Eo "@\:[0-9]+" /usr/local/gost/socks5.json | sed "s/@://g")&user=$(grep -Eo "[0-9a-zA-Z\_\-]+:[0-9a-zA-Z\_\-]+" /usr/local/gost/socks5.json | awk -F : '{print $1}')&pass=$(grep -Eo "[0-9a-zA-Z\_\-]+:[0-9a-zA-Z\_\-]+" /usr/local/gost/socks5.json | awk -F : '{print $2}')${default_fontcolor}"
else
gost_use_command="\n${green_backgroundcolor}https://t.me/socks?server=${ip_address}?port=$(grep -Eo "\:[0-9]+" /usr/local/gost/socks5.json | sed "s/://g")${default_fontcolor}"
fi
fi
fi
}
function echo_install_list(){
clear
echo -e "脚本当前安装状态:${install_status}
--------------------------------------------------------------------------------------------------
1.安装Gost
--------------------------------------------------------------------------------------------------
Gost当前运行状态:${gost_status}
2.更新脚本
3.更新程序
4.卸载程序
5.启动程序
6.关闭程序
7.重启程序
--------------------------------------------------------------------------------------------------
Telegram代理链接:${gost_use_command}
--------------------------------------------------------------------------------------------------"
stty erase '^H' && read -r -p "请输入序号:" determine_type
if [ "${determine_type}" -ge "1" ] && [ "${determine_type}" -le "10" ]; then
data_processing
else
clear
echo -e "${error_font}请输入正确的序号!"
exit 1
fi
}
function data_processing(){
clear
echo -e "正在处理请求中..."
if [ "${determine_type}" = "2" ]; then
upgrade_shell_script
elif [ "${determine_type}" = "3" ]; then
stop_service
prevent_uninstall_check
upgrade_program
restart_service
clear
echo -e "${ok_font}Gost更新成功。"
elif [ "${determine_type}" = "4" ]; then
prevent_uninstall_check
uninstall_program
elif [ "${determine_type}" = "5" ]; then
prevent_uninstall_check
start_service
elif [ "${determine_type}" = "6" ]; then
prevent_uninstall_check
stop_service
elif [ "${determine_type}" = "7" ]; then
prevent_uninstall_check
restart_service
else
if [ "${determine_type}" = "1" ]; then
prevent_install_check
os_update
generate_base_config
clear
mkdir -p /usr/local/gost
cd /usr/local/gost
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}创建文件夹成功。"
else
clear
echo -e "${error_font}创建文件夹失败!"
clear_install_reason="创建文件夹失败。"
clear_install
exit 1
fi
gost_version="$(wget -qO- "https://github.com/ginuerzh/gost/tags"|grep "/gost/releases/tag/"|head -n 1|awk -F "/tag/" '{print $2}'|sed 's/\">//'|sed 's/v//g')"
wget "https://github.com/ginuerzh/gost/releases/download/v${gost_version}/gost_${gost_version}_linux_${System_Bit}.tar.gz"
tar -zxvf "gost_${gost_version}_linux_${System_Bit}.tar.gz"
mv "gost_${gost_version}_linux_${System_Bit}/gost" "./gost"
rm -f "gost_${gost_version}_linux_${System_Bit}.tar.gz"
rm -rf "gost_${gost_version}_linux_${System_Bit}"
if [ -f "/usr/local/gost/gost" ]; then
clear
echo -e "${ok_font}下载Gost成功。"
else
clear
echo -e "${error_font}下载Gost文件失败!"
clear_install_reason="下载Gost文件失败。"
clear_install
exit 1
fi
clear
chmod +x "/usr/local/gost/gost"
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}设置Gost执行权限成功。"
else
clear
echo -e "${error_font}设置Gost执行权限失败!"
clear_install_reason="设置Gost执行权限失败。"
clear_install
exit 1
fi
clear
input_port
clear
echo -e "${info_font}温馨提示:用户名和密码仅支持大小写字母、数字、下划线和横线,输入其他字符会导致控制台输出的TG代理链接出现问题,届时请手动执行下面的命令以查看链接:\n${green_backgroundcolor}cat /usr/local/gost/telegram_link.info${default_fontcolor}\n\n"
stty erase '^H' && read -r -p "请输入连接用户名(可空):" connect_username
if [ -n "${connect_username}" ]; then
stty erase '^H' && read -r -p "请输入连接密码:" connect_password
if [ -z "${connect_password}" ]; then
clear
echo -e "${error_font}连接密码不能为空!"
clear_install_reason="连接密码不能为空!"
clear_install
exit 1
fi
fi
clear
echo -e "${info_font}Gost拥有路由控制功能,可以指定代理的内容,借助此功能可实现只代理Telegram,无法用其代理其他内容,例如Google、Youtube等。\n${info_font}温馨提示:脚本默认设置只能用于Telegram,如需取消请输入N。\n\n"
stty erase '^H' && read -r -p "是否需要设定为只能用于Telegram?(Y/n):" install_for_tgonly
case "${install_for_tgonly}" in
[nN][oO]|[nN])
clear
echo -e "${ok_font}已取消设定为Telegram专用。"
;;
*)
telegram_iprange="$(echo -e "$(echo -e "$(curl https://ipinfo.io/AS59930 | grep -Eo "[0-9]+.[0-9]+.[0-9]+.[0-9]+/[0-9]+")\n$(curl https://ipinfo.io/AS62041 | grep -Eo "[0-9]+.[0-9]+.[0-9]+.[0-9]+/[0-9]+")" | sort -u -r)\n$(echo -e "$(curl https://ipinfo.io/AS59930 | grep -Eo "[0-9a-z]+\:[0-9a-z]+\:[0-9a-z]+\:\:/[0-9]+")\n$(curl https://ipinfo.io/AS62041 | grep -Eo "[0-9a-z]+\:[0-9a-z]+\:[0-9a-z]+\:\:/[0-9]+")" | sort -u)")"
if [ -n "${telegram_iprange}" ]; then
clear
echo -e "${ok_font}获取Telegram IP段成功。"
else
clear
echo -e "${error_font}获取Telegram IP段失败!"
clear_install_reason="获取Telegram IP段失败!"
clear_install
exit 1
fi
echo -e "reverse true\n${telegram_iprange}" > "/usr/local/gost/telegram_iprange.info"
if [ -n "$(cat "/usr/local/gost/telegram_iprange.info")" ]; then
clear
echo -e "${ok_font}写入路由控制配置成功。"
else
clear
echo -e "${error_font}写入路由控制配置失败!"
clear_install_reason="写入路由控制配置失败!"
clear_install
exit 1
fi
;;
esac
socks5_config="$(echo -e "
{
\"Debug\": false,
\"Retries\": 3,
\"ServeNodes\": [")"
if [ -n "${connect_username}" ] && [ -n "${connect_password}" ]; then
socks5_config="$(echo -e "${socks5_config}
\"socks5://${connect_username}:${connect_password}@:${install_port}")"
else
socks5_config="$(echo -e "${socks5_config}
\"socks5://:${install_port}")"
fi
if [ -n "$(cat "/usr/local/gost/telegram_iprange.info")" ]; then
socks5_config="$(echo -e "${socks5_config}?bypass=/usr/local/gost/telegram_iprange.info\"")"
else
socks5_config="$(echo -e "${socks5_config}\"")"
fi
socks5_config="$(echo -e "${socks5_config}
]
}")"
echo -e "${socks5_config}" > "/usr/local/gost/socks5.json"
if [ -n "$(cat "/usr/local/gost/socks5.json")" ]; then
clear
echo -e "${ok_font}写入配置文件成功。"
else
clear
echo -e "${error_font}写入配置文件失败!"
clear_install_reason="写入配置文件失败。"
clear_install
exit 1
fi
if [ "${daemon_name}" == "systemd" ]; then
curl "https://raw.githubusercontent.com/shell-script/gost-socks5-onekey/master/gost.service" -o "/etc/systemd/system/gost.service"
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}下载进程守护文件成功。"
else
clear
echo -e "${error_font}下载进程守护文件失败!"
clear_install_reason="下载进程守护文件失败。"
clear_install
exit 1
fi
systemctl daemon-reload
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}重载进程守护文件成功。"
else
clear
echo -e "${error_font}重载进程守护文件失败!"
clear_install_reason="重载进程守护文件失败。"
clear_install
exit 1
fi
systemctl enable gost.service
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}设置Gost开启自启动成功。"
else
clear
echo -e "${error_font}设置Gost开启自启动失败!"
clear_install_reason="设置Gost开启自启动失败。"
clear_install
exit 1
fi
elif [ "${daemon_name}" == "sysv" ]; then
curl "https://raw.githubusercontent.com/shell-script/gost-socks5-onekey/master/gost.sh" -o "/etc/init.d/gost"
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}下载进程守护文件成功。"
else
clear
echo -e "${error_font}下载进程守护文件失败!"
clear_install_reason="下载进程守护文件失败。"
clear_install
exit 1
fi
chmod +x "/etc/init.d/gost"
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}设置进程守护文件执行权限成功。"
else
clear
echo -e "${error_font}设置进程守护文件执行权限失败!"
clear_install_reason="设置进程守护文件执行权限失败。"
clear_install
exit 1
fi
if [ "${System_OS}" == "CentOS" ]; then
chkconfig --add gost
chkconfig gost on
elif [ "${System_OS}" == "Debian" -o "${System_OS}" == "Ubuntu" ]; then
update-rc.d -f gost defaults
fi
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}设置Gost开启自启动成功。"
else
clear
echo -e "${error_font}设置Gost开启自启动失败!"
clear_install_reason="设置Gost开启自启动失败。"
clear_install
exit 1
fi
fi
clear
service gost start
sleep 3s
if [ -n "$(ps -ef |grep "gost" |grep -v "grep" | grep -v ".sh"| grep -v "init.d" |grep -v "service" |awk '{print $2}')" ]; then
clear
echo -e "${ok_font}Gost 启动成功。"
echo_gost_config
else
clear
echo -e "${error_font}Gost 启动失败!"
echo_gost_config
echo -e "\n\n${error_font}Gost 启动失败!"
fi
fi
fi
echo -e "\n${ok_font}请求处理完毕。"
}
function upgrade_shell_script(){
clear
echo -e "正在更新脚本中..."
filepath="$(cd "$(dirname "$0")"; pwd)"
filename="$(echo -e "${filepath}"|awk -F "$0" '{print $1}')"
curl "https://raw.githubusercontent.com/shell-script/gost-socks5-onekey/master/gost-go.sh" -o "${filename}/gost-go.sh"
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}脚本更新成功,脚本位置:\"${green_backgroundcolor}${filename}/$0${default_fontcolor}\",使用:\"${green_backgroundcolor}bash ${filename}/$0${default_fontcolor}\"。"
else
clear
echo -e "${error_font}脚本更新失败!"
fi
}
function prevent_uninstall_check(){
clear
echo -e "正在检查安装状态中..."
if [ "${install_status}" = "${green_fontcolor}已安装${default_fontcolor}" ]; then
echo -e "${ok_font}您已安装本程序,正在执行相关命令中..."
else
clear
echo -e "${error_font}检测到您的系统中未安装Gost。"
exit 1
fi
}
function start_service(){
clear
echo -e "正在启动服务中..."
if [ "${install_status}" = "${green_fontcolor}已安装${default_fontcolor}" ]; then
if [ "${gost_pid}" -eq "0" ]; then
service gost start
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}Gost 启动成功。"
else
clear
echo -e "${error_font}Gost 启动失败!"
fi
else
clear
echo -e "${error_font}Gost 正在运行。"
fi
else
clear
echo -e "${error_font}检测到您的系统中未安装Gost。"
exit 1
fi
}
function stop_service(){
clear
echo -e "正在停止服务中..."
if [ "${install_status}" = "${green_fontcolor}已安装${default_fontcolor}" ]; then
if [ "${gost_pid}" -eq "0" ]; then
echo -e "${error_font}Gost 未在运行。"
else
service gost stop
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}Gost 停止成功。"
else
clear
echo -e "${error_font}Gost 停止失败!"
fi
fi
else
clear
echo -e "${error_font}检测到您的系统中未安装Gost。"
exit 1
fi
}
function restart_service(){
clear
echo -e "正在重启服务中..."
if [ "${install_status}" = "${green_fontcolor}已安装${default_fontcolor}" ]; then
service gost restart
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}Gost 重启成功。"
else
clear
echo -e "${error_font}Gost 重启失败!"
fi
else
clear
echo -e "${error_font}检测到您的系统中未安装Gost。"
exit 1
fi
}
function prevent_install_check(){
clear
echo -e "正在检测安装状态中..."
if [ "${determine_type}" = "1" ]; then
if [ "${install_status}" = "${green_fontcolor}已安装${default_fontcolor}" ]; then
clear
stty erase '^H' && read -r -r -p "您已经安装Gost,是否需要强制重新安装?[y/N]" install_force
case "${install_force}" in
[yY][eE][sS]|[yY])
service gost stop
close_port
rm -rf /usr/local/gost
if [ "${daemon_name}" == "systemd" ]; then
systemctl disable gost
rm -rf "/etc/systemd/system/gost.service"
elif [ "${daemon_name}" == "sysv" ]; then
if [ "${System_OS}" == "CentOS" ]; then
chkconfig --del gost
elif [ "${System_OS}" == "Debian" -o "${System_OS}" == "Ubuntu" ]; then
update-rc.d -f gost remove
fi
rm -rf /etc/init.d/gost
fi
;;
*)
clear
echo -e "${error_font}安装已取消。"
exit 1
;;
esac
else
clear
echo -e "${ok_font}检测到您的系统中未安装Gost,正在执行相关命令中..."
fi
fi
}
function uninstall_program(){
clear
echo -e "正在卸载中..."
if [ "${install_status}" = "${green_fontcolor}已安装${default_fontcolor}" ]; then
service gost stop
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}停止Gost成功。"
else
clear
echo -e "${error_font}停止Gost失败!"
fi
close_port
if [ "${daemon_name}" == "systemd" ]; then
systemctl disable gost.service
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}取消开机自启动成功。"
else
clear
echo -e "${error_font}取消开机自启动失败!"
fi
rm -f /etc/systemd/system/gost.service
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}删除进程守护文件成功。"
else
clear
echo -e "${error_font}删除进程守护文件失败!"
fi
elif [ "${daemon_name}" == "sysv" ]; then
if [ "${System_OS}" == "CentOS" ]; then
chkconfig --del gost
elif [ "${System_OS}" == "Debian" -o "${System_OS}" == "Ubuntu" ]; then
update-rc.d -f gost remove
fi
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}取消开机自启动成功。"
else
clear
echo -e "${error_font}取消开机自启动失败!"
fi
rm -f /etc/init.d/gost
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}删除进程守护文件成功。"
else
clear
echo -e "${error_font}删除进程守护文件失败!"
fi
fi
rm -rf /usr/local/gost
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}删除Gost文件夹成功。"
else
clear
echo -e "${error_font}删除Gost文件夹失败!"
fi
clear
echo -e "${ok_font}Gost卸载成功。"
fi
}
function upgrade_program(){
clear
echo -e "正在更新程序中..."
if [ "${install_status}" = "${green_fontcolor}已安装${default_fontcolor}" ]; then
clear
cd /usr/local/gost
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}进入Gost目录成功。"
else
clear
echo -e "${error_font}进入Gost目录失败!"
exit 1
fi
mv /usr/local/gost/gost /usr/local/gost/gost.bak
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}备份旧文件成功。"
else
clear
echo -e "${error_font}备份旧文件失败!"
exit 1
fi
echo -e "更新Gost主程序中..."
clear
gost_version="$(wget -qO- "https://github.com/ginuerzh/gost/tags"|grep "/gost/releases/tag/"|head -n 1|awk -F "/tag/" '{print $2}'|sed 's/\">//'|sed 's/v//g')"
wget "https://github.com/ginuerzh/gost/releases/download/v${gost_version}/gost_${gost_version}_linux_${System_Bit}.tar.gz"
tar -zxvf "gost_${gost_version}_linux_${System_Bit}.tar.gz"
mv "gost_${gost_version}_linux_${System_Bit}/gost" "./gost"
rm -f "gost_${gost_version}_linux_${System_Bit}.tar.gz"
rm -rf "gost_${gost_version}_linux_${System_Bit}"
if [ -f "/usr/local/gost/gost" ]; then
clear
echo -e "${ok_font}下载Gost成功。"
else
clear
echo -e "${error_font}下载Gost文件失败!"
mv /usr/local/gost/gost.bak /usr/local/gost/gost
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}恢复备份文件成功。"
else
clear
echo -e "${error_font}恢复备份文件失败!"
fi
clear
echo -e "${error_font}Gost升级失败!"
echo -e "${error_font}失败原因:下载Gost文件失败。"
echo -e "${info_font}如需获得更详细的报错信息,请在shell窗口中往上滑动。"
exit 1
fi
clear
chmod +x "/usr/local/gost/gost"
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}设置Gost执行权限成功。"
else
clear
echo -e "${error_font}下载Gost文件失败!"
mv /usr/local/gost/gost.bak /usr/local/gost/gost
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}恢复备份文件成功。"
else
clear
echo -e "${error_font}恢复备份文件失败!"
fi
clear
echo -e "${error_font}Gost升级失败!"
echo -e "${error_font}失败原因:设置Gost执行权限失败。"
echo -e "${info_font}如需获得更详细的报错信息,请在shell窗口中往上滑动。"
exit 1
fi
clear
echo -e "${ok_font}Gost更新成功。"
fi
}
function clear_install(){
clear
echo -e "正在卸载中..."
if [ "${determine_type}" -eq "1" ]; then
service gost stop
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}停止Gost成功。"
else
clear
echo -e "${error_font}停止Gost失败!"
fi
close_port
if [ "${daemon_name}" == "systemd" ]; then
systemctl disable gost.service
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}取消开机自启动成功。"
else
clear
echo -e "${error_font}取消开机自启动失败!"
fi
rm -f /etc/systemd/system/gost.service
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}删除进程守护文件成功。"
else
clear
echo -e "${error_font}删除进程守护文件失败!"
fi
elif [ "${daemon_name}" == "sysv" ]; then
if [ "${System_OS}" == "CentOS" ]; then
chkconfig --del gost
elif [ "${System_OS}" == "Debian" -o "${System_OS}" == "Ubuntu" ]; then
update-rc.d -f gost remove
fi
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}取消开机自启动成功。"
else
clear
echo -e "${error_font}取消开机自启动失败!"
fi
rm -f /etc/init.d/gost
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}删除进程守护文件成功。"
else
clear
echo -e "${error_font}删除进程守护文件失败!"
fi
fi
rm -rf /usr/local/gost
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}删除Gost文件夹成功。"
else
clear
echo -e "${error_font}删除Gost文件夹失败!"
fi
echo -e "${error_font}Gost安装失败。"
echo -e "\n${error_font}失败原因:${clear_install_reason}"
echo -e "${info_font}如需获得更详细的报错信息,请在shell窗口中往上滑动。"
fi
}
function os_update(){
clear
echo -e "正在更新系统组件中..."
if [ "${System_OS}" == "CentOS" ]; then
yum update -y
if [[ $? -ne 0 ]]; then
clear
echo -e "${error_font}系统源更新失败!"
exit 1
else
clear
echo -e "${ok_font}系统源更新成功。"
fi
yum upgrade -y
if [[ $? -ne 0 ]]; then
clear
echo -e "${error_font}系统组件更新失败!"
exit 1
else
clear
echo -e "${ok_font}系统组件更新成功。"
fi
if [ "${OS_Version}" -le "6" ]; then
yum install -y wget curl unzip lsof daemon iptables ca-certificates
if [[ $? -ne 0 ]]; then
clear
echo -e "${error_font}所需组件安装失败!"
exit 1
else
clear
echo -e "${ok_font}所需组件安装成功。"
fi
elif [ "${OS_Version}" -ge "7" ]; then
yum install -y wget curl unzip lsof daemon firewalld ca-certificates
if [[ $? -ne 0 ]]; then
clear
echo -e "${error_font}所需组件安装失败!"
exit 1
else
clear
echo -e "${ok_font}所需组件安装成功。"
fi
systemctl start firewalld
if [[ $? -ne 0 ]]; then
clear
echo -e "${error_font}启动firewalld失败!"
exit 1
else
clear
echo -e "${ok_font}启动firewalld成功。"
fi
else
clear
echo -e "${error_font}目前暂不支持您使用的操作系统的版本号。"
exit 1
fi
elif [ "${System_OS}" == "Debian" -o "${System_OS}" == "Ubuntu" ]; then
apt-get update -y
if [[ $? -ne 0 ]]; then
clear
echo -e "${error_font}系统源更新失败!"
exit 1
else
clear
echo -e "${ok_font}系统源更新成功。"
fi
apt-get upgrade -y
if [[ $? -ne 0 ]]; then
clear
echo -e "${error_font}系统组件更新失败!"
exit 1
else
clear
echo -e "${ok_font}系统组件更新成功。"
fi
apt-get install -y wget curl unzip lsof daemon iptables ca-certificates
if [[ $? -ne 0 ]]; then
clear
echo -e "${error_font}所需组件安装失败!"
exit 1
else
clear
echo -e "${ok_font}所需组件安装成功。"
fi
fi
clear
echo -e "${ok_font}相关组件 安装/更新 完毕。"
}
function generate_base_config(){
clear
echo "正在生成基础信息中..."
ip_address="$(curl -4 ip.sb)"
if [ -z "${ip_address}" ]; then
ip_address="$(curl -4 https://ipinfo.io/ip)"
fi
if [ -z "${ip_address}" ]; then
clear
echo -e "${warning_font}获取服务器公网IP失败,请手动输入服务器公网IP地址!"
stty erase '^H' && read -r -p "请输入您服务器的公网IP地址:" ip_address
fi
if [[ -z "${ip_address}" ]]; then
clear
echo -e "${error_font}获取服务器公网IP地址失败,安装无法继续。"
exit 1
else
clear
echo -e "${ok_font}您的vps_ip为:${ip_address}"
fi
}
function input_port(){
clear
stty erase '^H' && read -r -p "请输入监听端口(默认监听1080端口):" install_port
if [ -z "${install_port}" ]; then
install_port="1080"
fi
check_port
echo -e "${install_port}" > "/usr/local/gost/install_port.info"
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}Gost端口配置成功。"
else
clear
echo -e "${error_font}Gost端口配置失败!"
clear_install_reason="Gost端口配置失败。"
clear_install
exit 1
fi
}
function check_port(){
clear
echo -e "正在检测端口占用情况中..."
if [[ 0 -eq "$(lsof -i:"${install_port}" | wc -l)" ]]; then
clear
echo -e "${ok_font}${install_port}端口未被占用"
open_port
else
clear
echo -e "${error_font}检测到${install_port}端口被占用,以下为端口占用信息:"
lsof -i:"${install_port}"
stty erase '^H' && read -r -r -p "是否尝试强制终止该进程?[Y/n]" install_force
case "${install_force}" in
[nN][oO]|[nN])
clear
echo -e "${error_font}取消安装。"
clear_install_reason="${install_port}端口被占用。"
clear_install
exit 1
;;
*)
clear
echo -e "正在尝试强制终止该进程..."
if [ -n "$(lsof -i:"${install_port}" | awk '{print $1}' | grep -v "COMMAND" | grep "nginx")" ]; then
service nginx stop
fi
if [ -n "$(lsof -i:"${install_port}" | awk '{print $1}' | grep -v "COMMAND" | grep "apache")" ]; then
service apache stop
service apache2 stop
fi
if [ -n "$(lsof -i:"${install_port}" | awk '{print $1}' | grep -v "COMMAND" | grep "caddy")" ]; then
service caddy stop
fi
lsof -i:"${install_port}" | awk '{print $2}'| grep -v "PID" | xargs kill -9
if [ "0" -eq "$(lsof -i:"${install_port}" | wc -l)" ]; then
clear
echo -e "${ok_font}强制终止进程成功,${install_port}端口已变为未占用状态。"
open_port
else
clear
echo -e "${error_font}尝试强制终止进程失败,${install_port}端口仍被占用!"
clear_install_reason="尝试强制终止进程失败,${install_port}端口仍被占用。"
clear_install
exit 1
fi
;;
esac
fi
}
function open_port(){
clear
echo -e "正在设置防火墙中..."
if [ "${System_OS}" == "CentOS" ] && [ "${OS_Version}" -ge "7" ]; then
firewall-cmd --permanent --zone=public --add-port="${install_port}"/tcp
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}开放 ${install_port}端口tcp协议 请求成功。"
else
clear
echo -e "${error_font}开放 ${install_port}端口tcp协议 请求失败!"
clear_install_reason="开放 ${install_port}端口tcp协议 请求失败。"
clear_install
exit 1
fi
firewall-cmd --complete-reload
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}重载firewalld规则成功。"
else
clear
echo -e "${error_font}重载firewalld规则失败!"
clear_install_reason="重载firewalld规则失败。"
clear_install
exit 1
fi
if [ "$(firewall-cmd --query-port="${install_port}"/tcp)" == "yes" ]; then
clear
echo -e "${ok_font}开放 ${install_port}端口tcp协议 成功。"
else
clear
echo -e "${error_font}开放 ${install_port}端口tcp协议 失败!"
clear_install_reason="开放 ${install_port}端口tcp协议 失败。"
clear_install
exit 1
fi
elif [ "${System_OS}" == "CentOS" ] && [ "${OS_Version}" -le "6" ]; then
service iptables save
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}保存当前iptables规则成功。"
else
clear
echo -e "${warning_font}保存当前iptables规则失败!"
fi
iptables -I INPUT -m state --state NEW -m tcp -p tcp --dport "${install_port}" -j ACCEPT
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}开放 ${install_port}端口tcp协议 请求成功。"
else
clear
echo -e "${error_font}开放 ${install_port}端口tcp协议 请求失败!"
clear_install_reason="开放 ${install_port}端口tcp协议 请求失败。"
clear_install
exit 1
fi
service iptables save
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}保存iptables规则成功。"
else
clear
echo -e "${error_font}保存iptables规则失败!"
clear_install_reason="保存iptables规则失败。"
clear_install
exit 1
fi
service iptables restart
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}重启iptables成功。"
else
clear
echo -e "${error_font}重启iptables失败!"
clear_install_reason="重启iptables失败。"
clear_install
exit 1
fi
if [ -n "$(iptables -L -n | grep ACCEPT | grep tcp |grep "${install_port}")" ]; then
clear
echo -e "${ok_font}开放 ${install_port}端口tcp协议 成功。"
else
clear
echo -e "${error_font}开放 ${install_port}端口tcp协议 失败!"
clear_install_reason="开放 ${install_port}端口tcp协议 失败。"
clear_install
exit 1
fi
elif [ "${System_OS}" == "Debian" -o "${System_OS}" == "Ubuntu" ]; then
iptables-save > /etc/iptables.up.rules
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}保存当前iptables规则成功。"
else
clear
echo -e "${error_font}保存当前iptables规则失败!"
clear_install_reason="保存当前iptables规则失败。"
clear_install
exit 1
fi
echo -e '#!/bin/bash\n/sbin/iptables-restore < /etc/iptables.up.rules' > /etc/network/if-pre-up.d/iptables
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}配置iptables启动规则成功。"
else
clear
echo -e "${error_font}配置iptables启动规则失败!"
clear_install_reason="配置iptables启动规则失败。"
clear_install
exit 1
fi
chmod +x /etc/network/if-pre-up.d/iptables
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}设置iptables启动文件执行权限成功。"
else
clear
echo -e "${error_font}设置iptables启动文件执行权限失败!"
clear_install_reason="设置iptables启动文件执行权限失败。"
clear_install
exit 1
fi
iptables-restore < /etc/iptables.up.rules
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}导入iptables规则成功。"
else
clear
echo -e "${error_font}导入iptables规则失败!"
clear_install_reason="导入iptables规则失败。"
clear_install
exit 1
fi
iptables -I INPUT -m state --state NEW -m tcp -p tcp --dport "${install_port}" -j ACCEPT
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}开放 ${install_port}端口tcp协议 请求成功。"
else
clear
echo -e "${error_font}开放 ${install_port}端口tcp协议 请求失败!"
clear_install_reason="开放 ${install_port}端口tcp协议 请求失败。"
clear_install
exit 1
fi
iptables-save > /etc/iptables.up.rules
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}保存iptables规则成功。"
else
clear
echo -e "${error_font}保存iptables规则失败!"
clear_install_reason="保存iptables规则失败。"
clear_install
exit 1
fi
if [ -n "$(iptables -L -n | grep ACCEPT | grep tcp |grep "${install_port}")" ]; then
clear
echo -e "${ok_font}开放 ${install_port}端口tcp协议 成功。"
else
clear
echo -e "${error_font}开放 ${install_port}端口tcp协议 失败!"
clear_install_reason="开放 ${install_port}端口tcp协议 失败。"
clear_install
exit 1
fi
fi
clear
echo -e "${ok_font}防火墙配置完毕。"
}
function close_port(){
clear
echo -e "正在设置防火墙中..."
if [ "${daemon_name}" == "systemd" ] && [ -f "/etc/systemd/system/gost.service" ]; then
uninstall_port="$(grep -Eo "@\:[0-9]+" /usr/local/gost/socks5.json | sed "s/@://g")"
elif [ "${daemon_name}" == "sysv" ] && [ -f "/etc/init.d/gost" ]; then
uninstall_port="$(grep -Eo "@\:[0-9]+" /etc/init.d/gost | sed "s/@://g")"
fi
if [ -z "${uninstall_port}" ]; then
uninstall_port="$(cat "/usr/local/gost/install_port.info")"
fi
if [ "${System_OS}" == "CentOS" ] && [ "${OS_Version}" -ge "7" ]; then
firewall-cmd --permanent --zone=public --remove-port="${uninstall_port}"/tcp
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}关闭 ${uninstall_port}端口tcp协议 请求成功。"
else
clear
echo -e "${error_font}关闭 ${uninstall_port}端口tcp协议 请求失败!"
fi
firewall-cmd --complete-reload
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}重载firewalld规则成功。"
else
clear
echo -e "${error_font}重载firewalld规则失败!"
fi
if [ "$(firewall-cmd --query-port="${uninstall_port}"/tcp)" == "no" ]; then
clear
echo -e "${ok_font}关闭 ${uninstall_port}端口tcp协议 成功。"
else
clear
echo -e "${error_font}关闭 ${uninstall_port}端口tcp协议 失败!"
fi
elif [ "${System_OS}" == "CentOS" ] && [ "${OS_Version}" -le "6" ]; then
service iptables save
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}保存当前iptables规则成功。"
else
clear
echo -e "${warning_font}保存当前iptables规则失败!"
fi
iptables -D INPUT -m state --state NEW -m tcp -p tcp --dport "${uninstall_port}" -j ACCEPT
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}关闭 ${uninstall_port}端口tcp协议 请求成功。"
else
clear
echo -e "${error_font}关闭 ${uninstall_port}端口tcp协议 请求失败!"
fi
service iptables save
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}保存iptables规则成功。"
else
clear
echo -e "${error_font}保存iptables规则失败!"
fi
service iptables restart
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}重启iptables成功。"
else
clear
echo -e "${error_font}重启iptables失败!"
fi
if [ -z "$(iptables -L -n | grep ACCEPT | grep tcp |grep "${uninstall_port}")" ]; then
clear
echo -e "${ok_font}关闭 ${uninstall_port}端口tcp协议 成功。"
else
clear
echo -e "${error_font}关闭 ${uninstall_port}端口tcp协议 失败!"
fi
elif [ "${System_OS}" == "Debian" -o "${System_OS}" == "Ubuntu" ]; then
iptables-save > /etc/iptables.up.rules
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}保存当前iptables规则成功。"
else
clear
echo -e "${error_font}保存当前iptables规则失败!"
clear_install_reason="保存当前iptables规则失败。"
clear_install
exit 1
fi
echo -e '#!/bin/bash\n/sbin/iptables-restore < /etc/iptables.up.rules' > /etc/network/if-pre-up.d/iptables
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}配置iptables启动规则成功。"
else
clear
echo -e "${error_font}配置iptables启动规则失败!"
clear_install_reason="配置iptables启动规则失败。"
clear_install
exit 1
fi
chmod +x /etc/network/if-pre-up.d/iptables
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}设置iptables启动文件执行权限成功。"
else
clear
echo -e "${error_font}设置iptables启动文件执行权限失败!"
clear_install_reason="设置iptables启动文件执行权限失败。"
clear_install
exit 1
fi
iptables-restore < /etc/iptables.up.rules
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}导入iptables规则成功。"
else
clear
echo -e "${error_font}导入iptables规则失败!"
clear_install_reason="导入iptables规则失败。"
clear_install
exit 1
fi
iptables -D INPUT -m state --state NEW -m tcp -p tcp --dport "${uninstall_port}" -j ACCEPT
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}关闭 ${uninstall_port}端口tcp协议 请求成功。"
else
clear
echo -e "${error_font}关闭 ${uninstall_port}端口tcp协议 请求失败!"
fi
iptables-save > /etc/iptables.up.rules
if [ "$?" -eq "0" ]; then
clear
echo -e "${ok_font}保存iptables规则成功。"
else
clear
echo -e "${error_font}保存iptables规则失败!"
fi
if [ -z "$(iptables -L -n | grep ACCEPT | grep tcp |grep "${uninstall_port}")" ]; then
clear
echo -e "${ok_font}关闭 ${uninstall_port}端口tcp协议 成功。"
else
clear
echo -e "${error_font}关闭 ${uninstall_port}端口tcp协议 失败!"
fi
else
clear
echo -e "${error_font}目前暂不支持您使用的操作系统。"
fi
clear
echo -e "${ok_font}防火墙配置完毕。"
}
function echo_gost_config(){
if [ "${determine_type}" = "1" ]; then
clear
if [ -n "${connect_username}" ] && [ -n "${connect_password}" ]; then
telegram_link="https://t.me/socks?server=${ip_address}&port=${install_port}&user=${connect_username}&pass=${connect_password}"
else
telegram_link="https://t.me/socks?server=${ip_address}&port=${install_port}"
fi
echo -e "您的连接信息如下:"
echo -e "服务器地址:${ip_address}"
echo -e "端口:${install_port}"
if [ -n "${connect_username}" ] && [ -n "${connect_password}" ]; then
echo -e "用户名:${connect_username}"
echo -e "密码:${connect_password}"
fi
echo -e "Telegram设置指令:${green_backgroundcolor}${telegram_link}${default_fontcolor}"
fi
echo -e "${telegram_link}" > /usr/local/gost/telegram_link.info
}
function main(){
set_fonts_colors
check_os
check_install_status
echo_install_list
}
main
|
#!/bin/bash
set -e
# R system library to user library migration script for Mac OS X
#
# Date: January 14, 2014
# Author: Joe Cheng <joe@rstudio.com>
#
# From http://cran.r-project.org/bin/macosx/RMacOSX-FAQ.html:
# The official CRAN binaries come pre-packaged in such a way that
# administrator have sufficient privileges to update R and install
# packages system-wide.
#
# This means that any install.packages() call, or using Install Package
# from RStudio, causes packages to be installed in the system library
# (e.g. /Library/Frameworks/R.framework/Versions/3.0/Resources/library).
# The system library contains base and recommended packages as well.
#
# We believe it's more hygienic to keep base/recommended packages
# separate from user-installed packages, and this separation is
# necessary for the Packrat[0] dependency management system to provide
# isolation benefits.
#
# This script creates a personal library directory, and migrates any
# non-base, non-recommended packages from the system library into
# it. It then sets the permissions on the system library to only be
# writable by root. This will ensure that future install.packages calls
# will not add more packages to the system library.
#
# [0] http://rstudio.github.io/packrat/
# The system-wide library
RLIBDIR=`R --vanilla --slave -e "cat(tail(.libPaths(), 1))"`
# The user library (which might not exist yet)
RLIBSUSER=`R --vanilla --slave -e "cat(path.expand(head(Sys.getenv('R_LIBS_USER'), 1)))"`
# The list of non-base, non-recommended packages in the system-wide library
PKGS=`R --vanilla --slave -e "cat(with(as.data.frame(installed.packages(tail(.libPaths(), 1))), paste(Package[is.na(Priority)])))"`
if [ "$RLIBDIR" == "" ]; then
echo "ERROR: Couldn't detect system library directory, aborting" >&2
exit 1
fi
if [ "$RLIBSUSER" == "" ]; then
echo "ERROR: Couldn't detect R_LIBS_USER directory, aborting" >&2
exit 1
fi
echo "Saving backup of $RLIBDIR to ./SystemLibBackup.tar.gz"
if [ -f ./SystemLibBackup.tar.gz ]; then
echo "SystemLibBackup.tar.gz exists. Press Enter to overwrite, or Ctrl-C to abort:" >&2
read -s < /dev/tty
echo "Backing up..."
fi
tar -czPf SystemLibBackup.tar.gz "$RLIBDIR"
#tar -czf SystemLibBackup.tar.gz -C "$RLIBDIR" $(ls $RLIBDIR)
echo "Backup successful."
echo "Migrating user-installed packages to $RLIBSUSER"
echo "Press Enter to continue, or Ctrl-C to abort"
read -s < /dev/tty
mkdir -p "$RLIBSUSER"
for pkg in $PKGS; do
echo "Moving $pkg"
if [ -d "$RLIBSUSER/$pkg" ]; then
echo "ERROR: The directory $RLIBSUSER/$pkg already exists, aborting" >&2
echo "Please delete the package $pkg from either $RLIBDIR or $RLIBSUSER."
exit 3
fi
# Do a copy to get default permissions
cp -R "$RLIBDIR/$pkg" "$RLIBSUSER"
sudo rm -rf "$RLIBDIR/$pkg"
done
echo
echo "Making $RLIBDIR writable only by root (chmod 755)"
sudo chmod -R 755 "$RLIBDIR"
echo
echo Success!
|
require 'spec_helper'
# Testing private nfs::server::config class via nfs class
describe 'nfs' do
describe 'private nfs::server::config' do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
let(:facts) {
# to workaround service provider issues related to masking haveged
# when tests are run on GitLab runners which are docker containers
os_facts.merge( { :haveged__rngd_enabled => false } )
}
context 'with default nfs and nfs::server parameters' do
let(:params) {{ :is_server => true }}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it { is_expected.to_not create_concat__fragment('nfs_conf_exportfs') }
it { is_expected.to create_concat__fragment('nfs_conf_mountd').with( {
:target => '/etc/nfs.conf',
:content => <<~EOM
[mountd]
port = 20048
EOM
} ) }
it { is_expected.to create_concat__fragment('nfs_conf_nfsd').with( {
:target => '/etc/nfs.conf',
:content => <<~EOM
[nfsd]
port = 2049
vers2 = false
vers3 = false
vers4 = true
vers4.0 = false
vers4.1 = true
vers4.2 = true
EOM
} ) }
it { is_expected.to_not create_concat__fragment('nfs_conf_nfsdcltrack') }
# the next 4 omissions are true for EL > 7, always, and for EL7 in
# this case, because there is no custom config
it { is_expected.to_not create_concat__fragment('nfs_RPCIDMAPDARGS') }
it { is_expected.to_not create_concat__fragment('nfs_RPCMOUNTDARGS') }
it { is_expected.to_not create_concat__fragment('nfs_RPCNFSDCOUNT') }
it { is_expected.to_not create_concat__fragment('nfs_RPCNFSDARGS') }
it { is_expected.to create_file('/etc/sysconfig/rpc-rquotad').with( {
:owner => 'root',
:group => 'root',
:mode => '0644',
:content => <<~EOM
# This file is managed by Puppet (simp-nfs module). Changes will be overwritten
# at the next puppet run.
#
RPCRQUOTADOPTS="-p 875"
EOM
} ) }
it { is_expected.to create_concat('/etc/exports').with( {
:owner => 'root',
:group => 'root',
:mode => '0644'
} ) }
it { is_expected.to create_systemd__unit_file('simp_etc_exports.path').with( {
:enable => true,
:active => true,
:content => <<~EOM
# This file is managed by Puppet (simp-nfs module). Changes will be overwritten
# at the next puppet run.
[Path]
Unit=simp_etc_exports.service
PathChanged=/etc/exports
[Install]
WantedBy=multi-user.target
EOM
} ) }
it { is_expected.to create_systemd__unit_file('simp_etc_exports.service').with( {
:enable => true,
:content => <<~EOM
# This file is managed by Puppet (simp-nfs module). Changes will be overwritten
# at the next puppet run.
[Service]
Type=simple
ExecStart=/usr/sbin/exportfs -ra
EOM
} ) }
end
context 'when nfsv3 only enabled for the NFS client' do
let(:hieradata) { 'nfs_nfsv3_and_not_nfs_server_nfsd_vers3' }
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it { is_expected.to create_concat__fragment('nfs_conf_nfsd').with( {
:target => '/etc/nfs.conf',
:content => <<~EOM
[nfsd]
port = 2049
vers2 = false
vers3 = false
vers4 = true
vers4.0 = false
vers4.1 = true
vers4.2 = true
EOM
} ) }
end
context 'when stunnel enabled' do
context 'when nfsd tcp and udp are not specified in custom config' do
let(:params) {{
:is_server => true,
:stunnel => true
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it 'should explicitly enabled tcp and disable udp in nfsd config' do
is_expected.to create_concat__fragment('nfs_conf_nfsd').with( {
:target => '/etc/nfs.conf',
:content => <<~EOM
[nfsd]
port = 2049
tcp = true
udp = false
vers2 = false
vers3 = false
vers4 = true
vers4.0 = false
vers4.1 = true
vers4.2 = true
EOM
} )
end
end
context 'when nfsd tcp and udp are specified with bad settings for stunnel in custom config' do
let(:params) {{
:is_server => true,
:stunnel => true,
:custom_nfs_conf_opts => {
'nfsd' => {
# ask for protocol settings that are the opposite of those
# required for stunnnel
'tcp' => false,
'udp' => true
}
}
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it 'should override tcp and udp settings in nfsd config' do
is_expected.to create_concat__fragment('nfs_conf_nfsd').with( {
:target => '/etc/nfs.conf',
:content => <<~EOM
[nfsd]
port = 2049
tcp = true
udp = false
vers2 = false
vers3 = false
vers4 = true
vers4.0 = false
vers4.1 = true
vers4.2 = true
EOM
} )
end
end
end
context 'with nfs::custom_nfs_conf_opts set' do
context "when nfs::custom_nfs_conf_opts has 'exportfs' key" do
let(:params) {{
:is_server => true,
:custom_nfs_conf_opts => {
'exportfs' => {
'debug' => 'all'
}
}
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it { is_expected.to create_concat__fragment('nfs_conf_exportfs').with( {
:target => '/etc/nfs.conf',
:content => <<~EOM
[exportfs]
debug = all
EOM
} ) }
end
context "when nfs::custom_nfs_conf_opts has 'mountd' key" do
let(:params) {{
:is_server => true,
:custom_nfs_conf_opts => {
'mountd' => {
'threads' => 16
}
}
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it { is_expected.to create_concat__fragment('nfs_conf_mountd').with( {
:target => '/etc/nfs.conf',
:content => <<~EOM
[mountd]
port = 20048
threads = 16
EOM
} ) }
end
context "when nfs::custom_nfs_conf_opts has 'nfsd' key" do
let(:params) {{
:is_server => true,
:custom_nfs_conf_opts => {
'nfsd' => {
'threads' => 32
}
}
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it { is_expected.to create_concat__fragment('nfs_conf_nfsd').with( {
:target => '/etc/nfs.conf',
:content => <<~EOM
[nfsd]
port = 2049
threads = 32
vers2 = false
vers3 = false
vers4 = true
vers4.0 = false
vers4.1 = true
vers4.2 = true
EOM
} ) }
end
if os_facts[:os][:release][:major].to_i < 8
context "when nfs::custom_nfs_conf_opts has 'nfsd' key with 'threads' key on EL7" do
let(:params) {{
:is_server => true,
:custom_nfs_conf_opts => {
'nfsd' => {
'threads' => 32
}
}
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it 'should also set RPCNFSDCOUNT in /etc/sysconfig/nfs' do
is_expected.to create_concat__fragment('nfs_RPCNFSDCOUNT').with( {
:target => '/etc/sysconfig/nfs',
:content => 'RPCNFSDCOUNT="32"'
} )
end
end
end
context "when nfs::custom_nfs_conf_opts has 'nfsdcltrack' key" do
let(:params) {{
:is_server => true,
:custom_nfs_conf_opts => {
'nfsdcltrack' => {
'storagedir' => '/some/path'
}
}
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it { is_expected.to create_concat__fragment('nfs_conf_nfsdcltrack').with( {
:target => '/etc/nfs.conf',
:content => <<~EOM
[nfsdcltrack]
storagedir = /some/path
EOM
} ) }
end
end
if os_facts[:os][:release][:major].to_i < 8
context 'when nfs::custom_daemon_args set' do
context "when nfs::custom_daemon_args has 'RCIDMAPDARGS' key" do
let(:params) {{
:is_server => true,
:custom_daemon_args => { 'RPCIDMAPDARGS' => '-C' }
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it { is_expected.to create_concat__fragment('nfs_RPCIDMAPDARGS').with( {
:target => '/etc/sysconfig/nfs',
:content => 'RPCIDMAPDARGS="-C"'
} ) }
end
context "when nfs::custom_daemon_args has 'RPCMOUNTDARGS' key" do
let(:params) {{
:is_server => true,
:custom_daemon_args => { 'RPCMOUNTDARGS' => '-f /some/export/file' }
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it { is_expected.to create_concat__fragment('nfs_RPCMOUNTDARGS').with( {
:target => '/etc/sysconfig/nfs',
:content => 'RPCMOUNTDARGS="-f /some/export/file"'
} ) }
end
context "when nfs::custom_daemon_args has 'RPCNFSDARGS' key" do
let(:params) {{
:is_server => true,
:custom_daemon_args => { 'RPCNFSDARGS' => '--syslog' }
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it { is_expected.to create_concat__fragment('nfs_RPCNFSDARGS').with( {
:target => '/etc/sysconfig/nfs',
:content => 'RPCNFSDARGS="--syslog"'
} ) }
end
end
end
context 'when nfs::server::custom_rpcrquotad_opts set' do
let(:hieradata) { 'nfs_server_custom_rpcrquotad_opts' }
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it { is_expected.to create_file('/etc/sysconfig/rpc-rquotad').with_content(
<<~EOM
# This file is managed by Puppet (simp-nfs module). Changes will be overwritten
# at the next puppet run.
#
RPCRQUOTADOPTS="--setquota -p 875"
EOM
) }
end
context 'when tcpwrappers enabled' do
let(:params) {{
:is_server => true,
:tcpwrappers => true
}}
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('nfs::server::config') }
it { is_expected.to create_class('nfs::server::tcpwrappers') }
end
end
end
end
end
|
#!/bin/bash
echo "Creating a new ed25519 SSH key..."
ssh-keygen -t ed25519 -C ''
eval "$(ssh-agent -s)"
ssh-add ~/.ssh/id_ed25519
echo "Done."
|
import { HostedZone } from '@aws-cdk/aws-route53';
import { IVpc } from '@aws-cdk/aws-ec2';
import { RemoteZone } from '../../components/remote';
import { CosmosCoreStack, ICosmosCore } from '../../cosmos/cosmos-core-stack';
import { CosmosExtensionStack, ICosmosExtension } from '../../cosmos/cosmos-extension-stack';
export interface DomainProps {
/**
* Top Level Domain Name
*
* You can add Sub Domain names via `addSubDomain()`
*/
readonly tld: string;
/**
* Export name for the Subdomain
*/
readonly exportName: string;
/**
* A VPC that you want to associate with this hosted zone.
*
* When you specify
* this property, a private hosted zone will be created.
*
* You can associate additional VPCs to this private zone using `addVpc(vpc)`.
*
* @default public (no VPCs associated)
*/
readonly vpcs?: IVpc[];
/**
* The Amazon Resource Name (ARN) for the log group that you want Amazon Route 53 to send query logs to.
*
* @default disabled
*/
readonly queryLogsLogGroupArn?: string;
}
export class Domain extends HostedZone {
readonly scope: ICosmosCore | ICosmosExtension;
readonly export: RemoteZone;
constructor(scope: ICosmosCore | ICosmosExtension, id: string, props: DomainProps) {
super(scope, id, {
zoneName: props.tld,
vpcs: props.vpcs,
queryLogsLogGroupArn: props.queryLogsLogGroupArn,
comment: `Root Domain form ${props.tld}`,
});
const { exportName } = props;
this.scope = scope;
this.export = new RemoteZone(this, exportName);
}
}
declare module '../../cosmos/cosmos-core-stack' {
export interface CosmosCoreStack {
addDomain(id: string, tld: string): Domain;
}
}
declare module '../../cosmos/cosmos-extension-stack' {
export interface CosmosExtensionStack {
addDomain(id: string, tld: string): Domain;
}
}
CosmosCoreStack.prototype.addDomain = function (id, tld): Domain {
const resource = new Domain(this, id, {
tld: tld,
exportName: this.singletonId(id),
});
return resource;
};
CosmosExtensionStack.prototype.addDomain = function (id, tld): Domain {
const resource = new Domain(this, id, {
tld: tld,
exportName: this.nodeId(id),
});
return resource;
};
|
// Copyright 2020 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "biod/fp_seed_command.h"
namespace biod {
FpSeedCommand::~FpSeedCommand() {
ClearSeedBuffer();
}
bool FpSeedCommand::Run(int fd) {
bool ret = EcCommandRun(fd);
// Clear intermediate buffers throughout the stack. We expect running the
// command to fail since the SBP will reject the new seed.
ClearSeedBuffer();
EcCommandRun(fd);
return ret;
}
void FpSeedCommand::ClearSeedBuffer() {
brillo::SecureMemset(Req()->seed, 0, sizeof(Req()->seed));
}
bool FpSeedCommand::EcCommandRun(int fd) {
return EcCommand::Run(fd);
}
} // namespace biod
|
<reponame>sinoaube/course-scheduling-system<gh_stars>0
package controllers
import (
"log"
"net"
"github.com/cc14514/go-geoip2"
geoip2db "github.com/cc14514/go-geoip2-db"
)
func getOffset(pageIndex int, pageSize int) int {
return (pageIndex - 1) * pageSize
}
func getIPLoc(addr string) string {
db, _ := geoip2db.NewGeoipDbByStatik()
defer func(db *geoip2.DBReader) {
err := db.Close()
if err != nil {
log.Println(err)
}
}(db)
record, _ := db.City(net.ParseIP(addr))
return record.Country.Names["zh-CN"]
}
|
# Function to remove all vowels from a string
def remove_vowels(text):
vowels = 'aeiouAEIOU'
new_text = ''
for char in text:
if char not in vowels:
new_text += char
return new_text
new_string = remove_vowels(text)
print("The new string with all the vowels removed is:",new_string) |
<reponame>minodisk/vscode-bigquery
import { format as formatBytes } from "bytes";
import { createFlat, Output } from "core";
import { OutputChannel } from ".";
import { RunJobResponse } from "./runJobManager";
import { ErrorWithId } from "./runner";
import { StatusManager } from "./statusManager";
export type Renderer = ReturnType<typeof createRenderer>;
export function createRenderer({
outputChannel,
statusManager,
}: {
readonly outputChannel: OutputChannel;
readonly statusManager: StatusManager;
}) {
return {
async render({
fileName,
output,
response,
}: {
readonly fileName: string;
readonly output: Output;
readonly response: RunJobResponse;
}) {
if (response.type === "routine") {
const { metadata, routine } = response;
console.log("----------------------");
console.log(metadata, routine);
output.writeRoutine({
routine,
metadata,
});
return;
}
try {
const { metadata, structs, table, page } = response;
statusManager.loadBilled({ fileName });
outputChannel.appendLine(`Result: ${structs.length} rows`);
const bytes = formatBytes(
parseInt(metadata.statistics.query.totalBytesBilled, 10)
);
outputChannel.appendLine(
`Result: ${bytes} to be billed (cache: ${metadata.statistics.query.cacheHit})`
);
if (table.schema.fields === undefined) {
throw new Error("fields is not defined");
}
const flat = createFlat(table.schema.fields);
await output.writeHeads({ flat });
await output.writeRows({
structs,
flat,
metadata,
table,
page,
});
// const bytesWritten = await output.bytesWritten();
// if (bytesWritten !== undefined) {
// outputChannel.appendLine(
// `Total bytes written: ${formatBytes(bytesWritten)}`
// );
// }
statusManager.succeedBilled({
fileName,
billed: { bytes, cacheHit: metadata.statistics.query.cacheHit },
});
} catch (err) {
statusManager.errorBilled({ fileName });
// statusManager.hide();
if (response.jobId) {
throw new ErrorWithId(err, response.jobId);
} else {
throw err;
}
}
},
dispose() {
// do nothing
},
};
}
|
#!/usr/bin/env bash
# 1. Create folder
mkDIR output
# 2. Download website
wget -r -P output/ http://example.com |
<gh_stars>0
package com.twu.biblioteca;
import com.twu.Helpers.*;
import com.twu.Models.*;
public class BibliotecaApp {
private static Biblioteca biblioteca;
private static BibliotecaManager bibliotecaManager;
private static Authenticator authenticator;
private static InputReader inputReader;
private static Menu menu;
private static Printer printer;
public static void main(String[] args) {
printer = new Printer();
inputReader = new InputReader(printer);
biblioteca = new Biblioteca("The Bangalore Public Library");
bibliotecaManager = new BibliotecaManager(biblioteca, printer);
authenticator = new Authenticator(bibliotecaManager);
addDB(biblioteca);
bibliotecaManager.giveWelcome();
login();
}
public static void login(){
boolean accessGranted = false;
while(!accessGranted){
printer.print(Messages.GET_USER_LIBRARYNUMBER);
String libraryNumber = inputReader.getInput();
printer.print(Messages.GET_USER_PASSWORD);
String password = inputReader.getInput();
accessGranted = authenticator.giveAccess(libraryNumber, password);
if(accessGranted){
init();
}else{
printer.printWithColor(Messages.INCORRECT_LOGIN, "RED");
}
}
}
public static void init(){
menu = new Menu(bibliotecaManager, inputReader);
while (true) {
menu.printMenu();
String input = inputReader.getInput();
try{
menu.comandoLines.get(input).executeAction();
}
catch (NullPointerException e){
printer.print(Messages.INVALID_MENU_MESSAGE);
}
}
}
public static void addDB(Biblioteca biblioteca){
Book randomBook = new Book("Something Random", "<NAME>", 2018);
User randomUser = new User("Mariana", "Perez","123-123","password", "<EMAIL>", "555-55-55", false );
biblioteca.getBookList().add(new Book("Living la vida loca", "<NAME>", 1989));
biblioteca.getBookList().add(new Book("What life is all about", "Spongebob squarepants", 2010));
biblioteca.getBookList().add(randomBook);
biblioteca.getBookList().add(new Book("<NAME> ", "<NAME>", 1980));
biblioteca.getMovieList().add(new Movie("50 first Dates", "<NAME>", 2004, 5.0f));
biblioteca.getMovieList().add(new Movie("Random", "<NAME>", 2018, 2.0f));
biblioteca.getUserList().add(randomUser);
biblioteca.getUserList().add(new User("Librarian", "User","111-111","admin", "<EMAIL>", "666-55-55", true ));
randomBook.setCheckedOut();
Loan loan = new Loan(randomUser, randomBook);
biblioteca.getLoanList().add(loan);
}
}
|
import requests
from bs4 import BeautifulSoup
url = "http://example.com"
# Get the HTML page
r = requests.get(url)
# Parse the HTML
soup = BeautifulSoup(r.text, 'html.parser')
# Get all elements with the class "article-title"
titles = soup.find_all(class_="article-title")
# Store the titles in a list
titles_list = []
for title in titles:
titles_list.append(title.text)
# Print out the titles
for title in titles_list:
print(title) |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Cache/Cache.framework"
install_framework "${BUILT_PRODUCTS_DIR}/CacheManagerSwift/CacheManagerSwift.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Cache/Cache.framework"
install_framework "${BUILT_PRODUCTS_DIR}/CacheManagerSwift/CacheManagerSwift.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#!/bin/bash
#
# Copyright (c) 2020, 2021 Red Hat, IBM Corporation and others.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
############################### v MiniKube v #################################
function minikube_first() {
kubectl_cmd="kubectl -n ${autotune_ns}"
echo "Info: One time setup - Create a service account to deploy autotune"
${kubectl_cmd} apply -f ${AUTOTUNE_SA_MANIFEST}
check_err "Error: Failed to create service account and RBAC"
${kubectl_cmd} apply -f ${AUTOTUNE_OPERATOR_CRD}
check_err "Error: Failed to create autotune CRD"
${kubectl_cmd} apply -f ${AUTOTUNE_CONFIG_CRD}
check_err "Error: Failed to create autotuneconfig CRD"
${kubectl_cmd} apply -f ${AUTOTUNE_QUERY_VARIABLE_CRD}
check_err "Error: Failed to create autotunequeryvariable CRD"
${kubectl_cmd} apply -f ${AUTOTUNE_ROLE_MANIFEST}
check_err "Error: Failed to create role"
sed -e "s|{{ AUTOTUNE_NAMESPACE }}|${autotune_ns}|" ${AUTOTUNE_RB_MANIFEST_TEMPLATE} > ${AUTOTUNE_RB_MANIFEST}
${kubectl_cmd} apply -f ${AUTOTUNE_RB_MANIFEST}
check_err "Error: Failed to create role binding"
${kubectl_cmd} apply -f ${SERVICE_MONITOR_MANIFEST}
check_err "Error: Failed to create service monitor for Prometheus"
}
# You can deploy using kubectl
function minikube_deploy() {
echo
echo "Creating environment variable in minikube cluster using configMap"
${kubectl_cmd} apply -f ${AUTOTUNE_CONFIGMAPS}/${cluster_type}-config.yaml
echo
echo "Deploying AutotuneConfig objects"
${kubectl_cmd} apply -f ${AUTOTUNE_CONFIGS}
echo
echo "Deploying AutotuneQueryVariable objects"
${kubectl_cmd} apply -f ${AUTOTUNE_QUERY_VARIABLES}
echo "Info: Deploying autotune yaml to minikube cluster"
# Replace autotune docker image in deployment yaml
sed -e "s|{{ AUTOTUNE_IMAGE }}|${AUTOTUNE_DOCKER_IMAGE}|" ${AUTOTUNE_DEPLOY_MANIFEST_TEMPLATE} > ${AUTOTUNE_DEPLOY_MANIFEST}
${kubectl_cmd} apply -f ${AUTOTUNE_DEPLOY_MANIFEST}
sleep 2
check_running autotune
if [ "${err}" != "0" ]; then
# Indicate deploy failed on error
exit 1
fi
# Get the Autotune application port in minikube
MINIKUBE_IP=$(minikube ip)
AUTOTUNE_PORT=$(${kubectl_cmd} get svc autotune --no-headers -o=custom-columns=PORT:.spec.ports[*].nodePort)
echo "Info: Access Autotune at http://${MINIKUBE_IP}:${AUTOTUNE_PORT}/listAutotuneTunables"
echo
}
function minikube_start() {
echo
echo "### Installing autotune for minikube"
echo
# If autotune_ns was not set by the user
if [ -z "$autotune_ns" ]; then
autotune_ns="monitoring"
fi
check_prometheus_installation
minikube_first
minikube_deploy
}
function check_prometheus_installation() {
echo
echo "Info: Checking pre requisites for minikube..."
kubectl_tool=$(which kubectl)
check_err "Error: Please install the kubectl tool"
# Check to see if kubectl supports kustomize
kubectl kustomize --help >/dev/null 2>/dev/null
check_err "Error: Please install a newer version of kubectl tool that supports the kustomize option (>=v1.12)"
kubectl_cmd="kubectl"
prometheus_pod_running=$(${kubectl_cmd} get pods --all-namespaces | grep "prometheus-k8s-1")
if [ "${prometheus_pod_running}" == "" ]; then
echo "Prometheus is not running, use 'scripts/prometheus_on_minikube.sh' to install."
exit 1
fi
echo "Prometheus is installed and running."
}
function minikube_terminate() {
# If autotune_ns was not set by the user
if [ -z "$autotune_ns" ]; then
autotune_ns="monitoring"
fi
echo -n "### Removing autotune for minikube"
kubectl_cmd="kubectl -n ${autotune_ns}"
echo
echo "Removing autotune"
${kubectl_cmd} delete -f ${AUTOTUNE_DEPLOY_MANIFEST} 2>/dev/null
echo
echo "Removing autotune service account"
${kubectl_cmd} delete -f ${AUTOTUNE_SA_MANIFEST} 2>/dev/null
echo
echo "Removing autotune role"
${kubectl_cmd} delete -f ${AUTOTUNE_ROLE_MANIFEST} 2>/dev/null
echo
echo "Removing autotune rolebinding"
${kubectl_cmd} delete -f ${AUTOTUNE_RB_MANIFEST} 2>/dev/null
echo
echo "Removing autotune serviceMonitor"
${kubectl_cmd} delete -f ${SERVICE_MONITOR_MANIFEST} 2>/dev/null
echo
echo "Removing AutotuneConfig objects"
${kubectl_cmd} delete -f ${AUTOTUNE_CONFIGS} 2>/dev/null
echo
echo "Removing AutotuneQueryVariable objects"
${kubectl_cmd} delete -f ${AUTOTUNE_QUERY_VARIABLES} 2>/dev/null
echo
echo "Removing Autotune configmap"
${kubectl_cmd} delete -f ${AUTOTUNE_CONFIGMAPS}/${cluster_type}-config.yaml 2>/dev/null
echo
echo "Removing Autotune CRD"
${kubectl_cmd} delete -f ${AUTOTUNE_OPERATOR_CRD} 2>/dev/null
echo
echo "Removing AutotuneConfig CRD"
${kubectl_cmd} delete -f ${AUTOTUNE_CONFIG_CRD} 2>/dev/null
echo
echo "Removing AutotuneQueryVariables CRD"
${kubectl_cmd} delete -f ${AUTOTUNE_QUERY_VARIABLE_CRD} 2>/dev/null
rm ${AUTOTUNE_DEPLOY_MANIFEST}
rm ${AUTOTUNE_RB_MANIFEST}
rm -rf minikube_downloads
}
|
import { ModuleWithProviders } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { BasketComponent } from './basket/basket.component';
import { LoginComponent } from './login/login.component';
import { ProfileComponent } from './profile/profile.component';
import { BooksComponent } from './books/books.component';
import { BookDetailsComponent } from './book-details/book-details.component';
import { HomeComponent } from './home/home.component';
import { ContactComponent } from './contact/contact.component';
import { LoggedUserGuards } from './services/logged-user-guards';
const appRoutes: Routes = [
{ path: 'home', component: HomeComponent },
{ path: 'books', component: BooksComponent },
{ path: 'book/:id', component: BookDetailsComponent },
{ path: 'basket', component: BasketComponent, canActivate: [LoggedUserGuards] },
{ path: 'profile', component: ProfileComponent },
{ path: 'login', component: LoginComponent },
{ path: 'contact', component: ContactComponent },
{ path: '', redirectTo: 'home', pathMatch: 'full' }
];
export const routing: ModuleWithProviders = RouterModule.forRoot(appRoutes);
|
<reponame>abhilashkrishnan/synaptra
package org.synaptra.mlib;
public class DecisionTree {
}
|
<reponame>MarcelBraghetto/AndroidNanoDegree2016<filename>AndroidNanoDegreeProject2/PopularMovies/src/main/java/com/lilarcor/popularmovies/framework/foundation/network/models/MovieVideosDTO.java<gh_stars>0
package com.lilarcor.popularmovies.framework.foundation.network.models;
import android.support.annotation.Nullable;
import com.google.gson.annotations.SerializedName;
import com.lilarcor.popularmovies.framework.movies.models.MovieVideo;
/**
* Created by <NAME> on 8/08/15.
*
* DTO representing the data response for a given
* movie when requesting the videos for that movie.
*/
public class MovieVideosDTO {
@SerializedName("results")
@SuppressWarnings("unused")
private MovieVideo[] mVideos;
@Nullable
public MovieVideo[] getVideos() {
return mVideos;
}
} |
#!/bin/sh
for x in 2 1 0
do
echo "resizing pc$x"
ssh pc$x 'sudo resize_rock64.sh'
done
|
<gh_stars>0
package befaster.solutions.HLO;
import org.junit.Test;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
public class HelloSolutionTest {
private HelloSolution helloSolution = new HelloSolution();
@Test
public void shouldReturnHelloWorldToEmpty() {
assertThat(helloSolution.hello(""), is("Hello, !"));
}
@Test
public void shouldReturnHelloWorldToNull() {
assertThat(helloSolution.hello(null), is("Hello, !"));
}
@Test
public void shouldReturnHelloWorldToCraftsman() {
assertThat(helloSolution.hello("Craftsman"), is("Hello, Craftsman!"));
}
@Test
public void shouldReturnHelloWorldToMtX() {
assertThat(helloSolution.hello("Mr. X"), is("Hello, Mr. X!"));
}
}
|
<gh_stars>0
package com.weikwer.market.aop;
import com.weikwer.market.bean.Users;
import com.weikwer.market.common.bean.Result;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.http.HttpServletRequest;
@Aspect
@Component
public class LoginHelper {
@Pointcut(value = "execution(public * com.weikwer.market.controller.*Controller.*(..)) && @annotation(com.weikwer.market.annotation.IsLogin)")
public void userPointcut(){
}
@Pointcut(value = "execution(public * com.weikwer.market.controller.*Controller.*(..)) && @annotation(com.weikwer.market.annotation.IsManager)")
public void managerPointcut(){
}
@Around("userPointcut()")
public Object isLogin(ProceedingJoinPoint proceedingJoinPoint) throws Throwable {
ServletRequestAttributes requestAttributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
HttpServletRequest request = requestAttributes.getRequest();
Object loginO=request.getSession().getAttribute("login");
int logins=loginO !=null?(int)loginO:0;
if(logins>0){
Object args[]=proceedingJoinPoint.getArgs();
for(int i=0;i<args.length;i++){
if(args[i].getClass() == Users.class){
args[i]=(Users)request.getSession().getAttribute("user");
}
}
Object o=proceedingJoinPoint.proceed(args);
return o;
}
return new Result<>(0).setDescription("未登录或权限不够");
}
@Around("managerPointcut()")
public Object ManagerLogin(ProceedingJoinPoint proceedingJoinPoint) throws Throwable {
ServletRequestAttributes requestAttributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
HttpServletRequest request = requestAttributes.getRequest();
Object loginO=request.getSession().getAttribute("login");
int logins=loginO !=null?(int)loginO:0;
if(logins>0){
Users users=(Users)request.getSession().getAttribute("user");
if(users==null) return new Result<>(0).setDescription("非管理员,无此操作权限");
//if(user==null || users.getUserType()!=2) return new Result<>(0).setDescription("非管理员,无此操作权限");
Object args[]=proceedingJoinPoint.getArgs();
for(int i=0;i<args.length;i++){
if(args[i].getClass() == Users.class){
args[i]=users;
}
}
Object o=proceedingJoinPoint.proceed(args);
return o;
}
return new Result<>(0);
}
}
|
/*******************************************************************************
* The MIT License (MIT)
*
* Copyright (C) 2014-2018 <NAME> (aka Lothrazar)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package com.lothrazar.cyclicmagic.block.clockredstone;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import com.lothrazar.cyclicmagic.ModCyclic;
import com.lothrazar.cyclicmagic.block.clockredstone.TileEntityClock.Fields;
import com.lothrazar.cyclicmagic.gui.GuiTextFieldInteger;
import com.lothrazar.cyclicmagic.gui.button.ButtonTileEntityField;
import com.lothrazar.cyclicmagic.gui.core.ButtonTriggerWrapper.ButtonTriggerType;
import com.lothrazar.cyclicmagic.gui.core.CheckboxFacingComponent;
import com.lothrazar.cyclicmagic.gui.core.GuiBaseContainer;
import com.lothrazar.cyclicmagic.net.PacketTileSetField;
import com.lothrazar.cyclicmagic.util.Const;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.client.gui.GuiTextField;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.util.EnumFacing;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
public class GuiClock extends GuiBaseContainer {
int w = 18, h = 15;
int rowOffset = Const.PAD / 2;
int colOffset = Const.PAD / 4;
int xCol1 = 4;
int xCol2 = xCol1 + w + colOffset;
int xColText = xCol2 + 34;
int xCol3 = xColText + 24;
int xCol4 = xCol3 + w + colOffset;
int yRow1 = Const.PAD * 3 + rowOffset;
int yRow2 = yRow1 + h + colOffset;
int yRow3 = yRow2 + h + colOffset;
int xColFacing = xCol4 + w + Const.PAD;
CheckboxFacingComponent checkboxes;
TileEntityClock tileClock;
public GuiClock(InventoryPlayer inventoryPlayer, TileEntityClock tileEntity) {
super(new ContainerClock(inventoryPlayer, tileEntity), tileEntity);
tileClock = (TileEntityClock) this.tile;
this.fieldRedstoneBtn = Fields.REDSTONE.ordinal();
checkboxes = new CheckboxFacingComponent(this);
checkboxes.setX(140);
checkboxes.setY(Const.PAD * 4);
Map<EnumFacing, Integer> facingFields = new HashMap<EnumFacing, Integer>();
for (EnumFacing side : EnumFacing.values()) {
switch (side) {
case DOWN:
facingFields.put(side, Fields.D.ordinal());
break;
case EAST:
facingFields.put(side, Fields.E.ordinal());
break;
case NORTH:
facingFields.put(side, Fields.N.ordinal());
break;
case SOUTH:
facingFields.put(side, Fields.S.ordinal());
break;
case UP:
facingFields.put(side, Fields.U.ordinal());
break;
case WEST:
facingFields.put(side, Fields.W.ordinal());
break;
}
}
checkboxes.setFacingFields(facingFields);
}
@Override
public void initGui() {
super.initGui();
int id = 30;
int xColTextbox = xCol2 + 22;
addButton(xCol1, yRow1, Fields.TON.ordinal(), -5, "duration");
addButton(xCol2, yRow1, Fields.TON.ordinal(), -1, "duration");
// here
GuiTextFieldInteger txtPower = addTextbox(id++, xColTextbox, yRow1, tile.getField(Fields.TON.ordinal()) + "", 4);
txtPower.setMaxVal(9999);
txtPower.setMinVal(1);
txtPower.height = 16;
txtPower.width = 32;
txtPower.setTileFieldId(TileEntityClock.Fields.TON.ordinal());
//
addButton(xCol3, yRow1, Fields.TON.ordinal(), 1, "duration");
addButton(xCol4, yRow1, Fields.TON.ordinal(), 5, "duration");
addButton(xCol1, yRow2, Fields.TOFF.ordinal(), -5, "delay");
addButton(xCol2, yRow2, Fields.TOFF.ordinal(), -1, "delay");
//
GuiTextFieldInteger txtTOFF = addTextbox(id++, xColTextbox, yRow2, tile.getField(Fields.TOFF.ordinal()) + "", 4);
txtTOFF.setMaxVal(9999);
txtTOFF.setMinVal(1);
txtTOFF.height = 16;
txtTOFF.width = 32;
txtTOFF.setTileFieldId(TileEntityClock.Fields.TOFF.ordinal());
//
addButton(xCol3, yRow2, Fields.TOFF.ordinal(), 1, "delay");
addButton(xCol4, yRow2, Fields.TOFF.ordinal(), 5, "delay");
addButton(xCol2, yRow3, Fields.POWER.ordinal(), -1, "power");
addButton(xCol3, yRow3, Fields.POWER.ordinal(), 1, "power");
//checkboxes
checkboxes.initGui();
}
private GuiTextFieldInteger addTextbox(int id, int x, int y, String text, int maxLen) {
int width = 10 * maxLen, height = 20;
GuiTextFieldInteger txt = new GuiTextFieldInteger(id, this.fontRenderer, x, y, width, height);
txt.setMaxStringLength(maxLen);
txt.setText(text);
txtBoxes.add(txt);
return txt;
}
@Override
protected void actionPerformed(GuiButton button) throws IOException {
super.actionPerformed(button);
if (button instanceof ButtonTileEntityField) {
ButtonTileEntityField btn = (ButtonTileEntityField) button;
for (GuiTextField t : txtBoxes) { //push value to the matching textbox
GuiTextFieldInteger txt = (GuiTextFieldInteger) t;
if (txt.getTileFieldId() == btn.getFieldId()) {
int val = btn.getValue() + txt.getCurrent();
txt.setText(val + "");
}
}
}
}
private void addButton(int x, int y, int field, int value, String tooltip) {
ButtonTileEntityField btn = new ButtonTileEntityField(field + 50,
this.guiLeft + x,
this.guiTop + y, this.tile.getPos(), field, value,
w, h);
if (value > 0) {
btn.displayString = "+" + value;
if (field == Fields.POWER.ordinal()) {
//TODO: setup/find magic numbers for redstone, 15 is max
this.registerButtonDisableTrigger(btn, ButtonTriggerType.EQUAL, field, 15);
}
}
else {
btn.displayString = "" + value;
int min = (field == Fields.POWER.ordinal()) ? 0 : 1;
this.registerButtonDisableTrigger(btn, ButtonTriggerType.EQUAL, field, min);
}
btn.setTooltip("tile.clock." + tooltip);
this.addButton(btn);
}
@SideOnly(Side.CLIENT)
@Override
protected void drawGuiContainerForegroundLayer(int mouseX, int mouseY) {
super.drawGuiContainerForegroundLayer(mouseX, mouseY);
for (GuiTextField txt : txtBoxes) {
if (txt != null) {
txt.drawTextBox();
}
}
this.drawString("" + this.tile.getField(Fields.POWER.ordinal()), xColText, yRow3 + rowOffset);
}
@Override
protected void keyTyped(char pchar, int keyCode) throws IOException {
super.keyTyped(pchar, keyCode);
for (GuiTextField t : txtBoxes) {
GuiTextFieldInteger txt = (GuiTextFieldInteger) t;
String oldval = txt.getText();
txt.textboxKeyTyped(pchar, keyCode);
String newval = txt.getText();
boolean yes = false;
try {
int val = Integer.parseInt(newval);
if (val <= txt.getMaxVal() && val >= txt.getMinVal()) {
yes = true;
//also set it clientisde to hopefully prevent desycn
tile.setField(txt.getTileFieldId(), val);
ModCyclic.network.sendToServer(new PacketTileSetField(tile.getPos(), txt.getTileFieldId(), val));
}
}
catch (NumberFormatException e) {}
if (!yes && !newval.isEmpty()) {//allow empty string in case user is in middle of deleting all and retyping
txt.setText(oldval);//rollback to the last valid value. ex if they type 'abc' revert to valid
}
}
}
}
|
<reponame>GrantMeStrength/SkyTime
//
// Universe.h
// skytime
//
// Time Calculations..
#import <Foundation/Foundation.h>
@interface Universe : NSObject
+(void) calculateTime;
+(NSString *) getJulian;
+(NSString *) getModifiedJulian;
+(NSString *) getTime;
+(NSString *) getDate;
+(NSString *) getLST;
+(NSString *) getUT;
+(NSString *) getLocation;
+(NSString *) getAMPM;
+(NSString *) getGMTDelta;
+(NSString *)getHour;
+(NSString *)getMinute;
+(NSString *)getSecond;
+(int) getHourD;
+(int) getMinuteD;
+(int) getSecondD;
+(void) setLocation: (double) lat : (double) lon;
+(void) set24Hour: (BOOL) is24Hour;
@end
|
echo "Removing Existing Builds!!!"
rm build/libs/*.jar
echo "Building New Build!!!"
./gradlew shadowJar
echo "Starting Server!!!"
cd build/libs
java -jar CrewMate-*.jar "$@" |
#!/bin/bash
cd $(dirname $(dirname $0))
git pull
git clone https://github.com/yt-dlp/yt-dlp.git
npm install
cd yt-dlp
git pull
pm2 restart ksbot
|
<gh_stars>0
package org.jooby.issues;
import static org.junit.Assert.assertNotEquals;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import org.jooby.Deferred;
import org.jooby.test.ServerFeature;
import org.junit.Test;
import com.google.inject.Key;
import com.google.inject.name.Names;
public class Issue488c extends ServerFeature {
{
executor("ste");
use((env, conf, binder) -> {
binder.bind(Key.get(Executor.class, Names.named("ste")))
.toInstance(Executors.newSingleThreadExecutor());
});
get("/488", req -> {
return new Deferred(deferred -> {
deferred.resolve(deferred.callerThread() + ":" + Thread.currentThread().getName());
});
});
get("/488/promise", promise((req, deferred) -> {
deferred.resolve(deferred.callerThread() + ":" + Thread.currentThread().getName());
}));
}
@Test
public void deferredWithExecutorReference() throws Exception {
request()
.get("/488")
.expect(rsp -> {
String[] threads = rsp.split(":");
assertNotEquals(threads[0], threads[1]);
});
request()
.get("/488/promise")
.expect(rsp -> {
String[] threads = rsp.split(":");
assertNotEquals(threads[0], threads[1]);
});
}
}
|
#!/usr/bin/env bash
set -ex
# Perform a release.
# See the release process documentation for details.
cd "$(mktemp -d)"
git clone git@github.com:mesosphere/dcos-e2e.git
cd dcos-e2e
virtualenv -p python3 release
source release/bin/activate
pip install --editable .[dev]
python admin/release.py
|
<gh_stars>0
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _data = require("./data");
var _data2 = _interopRequireDefault(_data);
var _stats = require("./stats");
var _stats2 = _interopRequireDefault(_stats);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = { data: _data2.default, stats: _stats2.default }; |
<filename>src/index.js
#!/usr/bin/env node
const inquirer = require("inquirer");
const { writeFileSync } = require('fs');
const reactTsConfig = require('./config/tsconfig.react.json')
const reactNativeTsConfig = require('./config/tsconfig.react-native.json')
const nodeTsConfig = require('./config/tsconfig.node.json')
inquirer
.prompt([
{type: 'list' , message: 'Pick the framework you are using', name: 'framework' ,choices:[
'react',
'react-native',
'node'
]}
])
.then(({ framework }) => {
let tsConfigToWrite = '';
if(framework === 'react'){
tsConfigToWrite = JSON.stringify(reactTsConfig);
}else if(framework === 'react-native'){
tsConfigToWrite = JSON.stringify(reactNativeTsConfig);
}else{
tsConfigToWrite = JSON.stringify(nodeTsConfig);
}
// console.log(tsConfigToWrite);
const cwd = process.cwd();
writeFileSync(cwd + "/tsconfig.json", tsConfigToWrite , 'utf8')
})
.catch((error) => {
if (error.isTtyError) {
// Prompt couldn't be rendered in the current environment
} else {
// Something else went wrong
}
});
|
<filename>cipher.java
Scanner scanner = new Scanner(System.in);
String[] letters = {"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"};
ArrayList<String> message = new ArrayList<String>();
List<String> arrCheck = Arrays.asList(letters);
System.out.println("e/d?");
String eOrD = scanner.nextLine();
System.out.println("#-1+25?");
int shift = scanner.nextInt();
System.out.println("Message?");
String msg = scanner.nextLine();
int le = msg.length();
String encrypted = "a";
int i = 0;
if(eOrD.equalsIgnoreCase("e")){
for(int a1 = 0; a1 < le; a1++){
char letN = msg.charAt(a1);
if(arrCheck.contains(" ")){
message.add(" ");
}
if(arrCheck.contains(letN)){
int index = -1;
for (i = 0; i < letters.length; i++) {
if (letters[i].equals(letN)){
index = i;
break;
}
}
index = index + shift;
if(index > 25){
for(int a2 = 0;a2 < shift; a2++){
if(index + shift > 25){
index = i;
for(int a3 = 0; a3 < shift; a3++){
index = index + 1;
if(index == 25){
index = 0;
}
}
}
}
}
encrypted = letters[index];
message.add(encrypted);
System.out.println(message);
}
}
}
|
<reponame>jpic/pinax
from django.contrib import admin
from pinax.apps.account.models import Account, OtherServiceInfo, PasswordReset
class PasswordResetAdmin(admin.ModelAdmin):
list_display = ["user", "temp_key", "timestamp", "reset"]
admin.site.register(Account)
admin.site.register(OtherServiceInfo)
admin.site.register(PasswordReset, PasswordResetAdmin) |
module DeepCheck
def check
include DeepCheck
system('find . | grep "/.*\.*\.rb$" | xargs -L 1 ruby -c')
end
end |
#! /usr/bin/env python
# -*- coding:UTF-8
import socket
import struct
import sys
multicast_group = '172.16.58.3'
server_address = ('',10000)
sock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
sock.bind(server_address)
# 把套接字添加到组播地址
group = socket.inet_aton(multicast_group)
mreq = struct.pack('4sL',group,socket.INADDR_ANY)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
# 数据处理
while True:
print >>sys.stderr,"\nwaiting to receive message"
data, address = sock.recvfrom(1024)
print >>sys.stderr, "received %s bytes from %s"%\
(len(data),address)
print >> sys.stderr,data
print >> sys.stderr,"sending acknowledgement to",address
sock.sendto('ack',address)
|
<reponame>parmam/e-commerce<filename>src/components/dashboard/Budget.js
import {
Avatar,
Box,
Card,
CardContent,
Grid,
Typography
} from '@material-ui/core'
import MoneyIcon from '@material-ui/icons/Money'
import { red } from '@material-ui/core/colors'
import { useSelector } from 'react-redux'
import { useEffect, useState } from 'react'
const Budget = () => {
const products = useSelector(store => store.products.allProducts)
const [stock, setStocks] = useState(0)
useEffect(() => {
let stocks = 0
products.map((product) => {
stocks += product.stock
return stocks
}
)
setStocks(stocks)
}, [])
return (
<Card
sx={{ height: '100%' }}
>
<CardContent>
<Grid
container
spacing={3}
sx={{ justifyContent: 'space-between' }}
>
<Grid item>
<Typography
color='textSecondary'
gutterBottom
variant='h6'
>
STOCK
</Typography>
<Typography
color='textPrimary'
variant='h3'
>{`${stock} productos`}
</Typography>
</Grid>
<Grid item>
<Avatar
sx={{
backgroundColor: red[600],
height: 56,
width: 56
}}
>
<MoneyIcon />
</Avatar>
</Grid>
</Grid>
<Box
sx={{
pt: 2,
display: 'flex',
alignItems: 'center'
}}
/>
</CardContent>
</Card>
)
}
export default Budget
|
// These are the pages you can go to.
// They are all wrapped in the App component, which should contain the navbar etc
// See http://blog.mxstbr.com/2016/01/react-apps-with-pages for more information
// about the code splitting business
import { getAsyncInjectors } from 'utils/asyncInjectors';
const errorLoading = (err) => {
console.error('Dynamic page loading failed', err); // eslint-disable-line no-console
};
const loadModule = (cb) => (componentModule) => {
cb(null, componentModule.default);
};
export default function createRoutes(store) {
// Create reusable async injectors using getAsyncInjectors factory
const { injectReducer, injectSagas } = getAsyncInjectors(store); // eslint-disable-line no-unused-vars
function homePageDisplay(nextState, cb) {
const importModules = Promise.all([
import('containers/Lists/reducer'),
import('containers/SavedSearch/reducer'),
import('containers/HomePage/sagas'),
import('containers/Lists/sagas'),
import('containers/SavedSearch/sagas'),
import('containers/HomePage'),
]);
const renderRoute = loadModule(cb);
importModules.then(([listReducer, savedSearchReducer, sagas, listSaga, savedSearchSaga, component]) => {
injectReducer('lists', listReducer.default);
injectReducer('savedSearch', savedSearchReducer.default);
injectSagas(sagas.default);
injectSagas(listSaga.default);
injectSagas(savedSearchSaga.default);
renderRoute(component);
});
importModules.catch(errorLoading);
}
const childRoutes = [
{
path: '/s/leadbook-database-react',
name: 'home',
getComponent: homePageDisplay,
},
{
path: '/',
name: 'home',
getComponent: homePageDisplay,
},{
path: '/leadbook-database-react',
name: 'home',
getComponent: homePageDisplay,
},
{
path: '/index.php/s/leadbook-database-react',
name: 'home',
getComponent: homePageDisplay,
},
{
path: '/app/v3/organization/:id',
name: 'organization',
getComponent(nextState, cb) {
const importModules = Promise.all([
import('containers/OrganizationView/reducer'),
import('containers/OrganizationView/sagas'),
import('containers/HomePage/sagas'),
import('containers/OrganizationView'),
]);
const renderRoute = loadModule(cb);
importModules.then(([reducer, sagas, homepageSaga, component]) => {
injectReducer('organizationView', reducer.default);
injectSagas(sagas.default);
injectSagas(homepageSaga.default);
renderRoute(component);
});
importModules.catch(errorLoading);
},
},
{
path: '*',
name: 'notfound',
getComponent: homePageDisplay,
},
];
return {
getComponent(nextState, cb) {
const importModules = Promise.all([
import('containers/HomePage/reducer'),
import('containers/Integrations/reducer'),
import('containers/Integrations/sagas'),
import('containers/App/sagas'),
import('containers/App'),
]);
const renderRoute = loadModule(cb);
importModules.then(
([
homepageReducer,
integrationReducer,
integrationSaga,
sagas,
component,
]) => {
injectReducer('homePage', homepageReducer.default);
injectReducer('integrations', integrationReducer.default);
injectSagas(integrationSaga.default);
injectSagas(sagas.default);
renderRoute(component);
}
);
importModules.catch(errorLoading);
},
childRoutes,
};
}
|
set -e
export ISTIO_VERSION=1.6.2
export KNATIVE_VERSION=v0.15.0
export KFSERVING_VERSION=v0.4.1
curl -L https://git.io/getLatestIstio | sh -
cd istio-${ISTIO_VERSION}
# Create istio-system namespace
cat <<EOF | kubectl apply -f -
apiVersion: v1
kind: Namespace
metadata:
name: istio-system
labels:
istio-injection: disabled
EOF
cat << EOF > ./istio-minimal-operator.yaml
apiVersion: install.istio.io/v1alpha1
kind: IstioOperator
spec:
values:
global:
proxy:
autoInject: disabled
useMCP: false
# The third-party-jwt is not enabled on all k8s.
# See: https://istio.io/docs/ops/best-practices/security/#configure-third-party-service-account-tokens
jwtPolicy: first-party-jwt
addonComponents:
pilot:
enabled: true
tracing:
enabled: true
kiali:
enabled: true
prometheus:
enabled: true
components:
ingressGateways:
- name: istio-ingressgateway
enabled: true
- name: cluster-local-gateway
enabled: true
label:
istio: cluster-local-gateway
app: cluster-local-gateway
k8s:
service:
type: ClusterIP
ports:
- port: 15020
name: status-port
- port: 80
name: http2
- port: 443
name: https
EOF
bin/istioctl manifest apply -f istio-minimal-operator.yaml
# Install Knative
kubectl apply --filename https://github.com/knative/serving/releases/download/${KNATIVE_VERSION}/serving-crds.yaml
kubectl apply --filename https://github.com/knative/serving/releases/download/${KNATIVE_VERSION}/serving-core.yaml
kubectl apply --filename https://github.com/knative/net-istio/releases/download/${KNATIVE_VERSION}/release.yaml
# Install Cert Manager
kubectl apply --validate=false -f https://github.com/jetstack/cert-manager/releases/download/v0.15.1/cert-manager.yaml
kubectl wait --for=condition=available --timeout=600s deployment/cert-manager-webhook -n cert-manager
cd ..
# Install KFServing
K8S_MINOR=$(kubectl version | perl -ne 'print $1."\n" if /Server Version:.*?Minor:"(\d+)"/')
if [[ $K8S_MINOR -lt 16 ]]; then
kubectl apply -f install/${KFSERVING_VERSION}/kfserving.yaml --validate=false
else
kubectl apply -f install/${KFSERVING_VERSION}/kfserving.yaml
fi
# Clean up
rm -rf istio-${ISTIO_VERSION}
|
package server
import (
"fmt"
"github.com/Xhofe/alist/conf"
"github.com/Xhofe/alist/utils"
"github.com/gin-gonic/gin"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
"net/http/httputil"
"net/url"
"path/filepath"
"strings"
)
func Down(c *gin.Context) {
rawPath := c.Param("path")
rawPath = utils.ParsePath(rawPath)
log.Debugf("down: %s", rawPath)
pw := c.Query("pw")
if !CheckDownLink(utils.Dir(rawPath), pw, utils.Base(rawPath)) {
ErrorResp(c, fmt.Errorf("wrong password"), 401)
return
}
account, path, driver, err := ParsePath(rawPath)
if err != nil {
ErrorResp(c, err, 500)
return
}
if account.Type == "GoogleDrive" {
Proxy(c)
return
}
link, err := driver.Link(path, account)
if err != nil {
ErrorResp(c, err, 500)
return
}
if account.Type == "Native" {
c.File(link)
return
} else {
c.Redirect(302, link)
return
}
}
func Proxy(c *gin.Context) {
rawPath := c.Param("path")
rawPath = utils.ParsePath(rawPath)
log.Debugf("proxy: %s", rawPath)
pw := c.Query("pw")
if !CheckDownLink(utils.Dir(rawPath), pw, utils.Base(rawPath)) {
ErrorResp(c, fmt.Errorf("wrong password"), 401)
return
}
account, path, driver, err := ParsePath(rawPath)
if err != nil {
ErrorResp(c, err, 500)
return
}
if !account.Proxy && utils.GetFileType(filepath.Ext(rawPath)) != conf.TEXT {
ErrorResp(c, fmt.Errorf("[%s] not allowed proxy", account.Name), 403)
return
}
link, err := driver.Link(path, account)
if err != nil {
ErrorResp(c, err, 500)
return
}
if account.Type == "Native" {
c.File(link)
return
} else {
if utils.GetFileType(filepath.Ext(rawPath)) == conf.TEXT {
Text(c, link)
return
}
driver.Proxy(c, account)
r := c.Request
w := c.Writer
target, err := url.Parse(link)
if err != nil {
ErrorResp(c, err, 500)
return
}
protocol := "http://"
if strings.HasPrefix(link, "https://") {
protocol = "https://"
}
targetHost, err := url.Parse(fmt.Sprintf("%s%s", protocol, target.Host))
proxy := httputil.NewSingleHostReverseProxy(targetHost)
r.URL = target
r.Host = target.Host
proxy.ServeHTTP(w, r)
}
}
var client *resty.Client
func init() {
client = resty.New()
client.SetRetryCount(3)
}
func Text(c *gin.Context, link string) {
res, err := client.R().Get(link)
if err != nil {
ErrorResp(c, err, 500)
return
}
text := res.String()
t := utils.GetStrCoding(res.Body())
log.Debugf("text type: %s", t)
if t != utils.UTF8 {
body, err := utils.GbkToUtf8(res.Body())
if err != nil {
ErrorResp(c, err, 500)
return
}
text = string(body)
}
c.String(200, text)
}
|
<gh_stars>0
package com.decathlon.ara.postman.bean;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Wither;
@Data
@Wither
@NoArgsConstructor
@AllArgsConstructor
@JsonIgnoreProperties(ignoreUnknown = true)
public class Assertion {
/**
* Eg. "Status code is 200".
*/
@JsonProperty("assertion")
private String name;
}
|
FIREFOX_DIRS="/usr/lib/firefox /usr/lib64/firefox /usr/local/lib/firefox /usr/local/lib64/firefox"
for FIREFOX_DIR in ${FIREFOX_DIRS}; do
if [ -d "${FIREFOX_DIR}" ]; then
grep -q '^lockPref(\"dom.disable_window_open_feature.status\", true);' "${FIREFOX_DIR}"/mozilla.cfg && \
sed -i 's/lockPref(\"dom.disable_window_open_feature.status\".*/lockPref(\"dom.disable_window_open_feature.status\", true);/g' "${FIREFOX_DIR}"/mozilla.cfg
if ! [ $? -eq 0 ] ; then
echo 'lockPref("dom.disable_window_open_feature.status", true);' >> "${FIREFOX_DIR}"/mozilla.cfg
fi
fi
done
|
<gh_stars>0
const mongoose = require('mongoose');
const Schema = mongoose.Schema,
model = mongoose.model.bind(mongoose),
ObjectId = mongoose.Schema.Types.ObjectId;
const categorySchema = new Schema ({
category: String,
});
const Category = model('Category', categorySchema);
const serviceSchema = new mongoose.Schema(
{
id: ObjectId,
name: {type:String, required: true},
categories: {type: ObjectId, ref: 'Category'},
price:{type: Number, default: 0, required:true},
description: { type: String,required:true},
timeSlotMinutes: {type: Number, required:true},
});
const Service = model('Service', serviceSchema);
const slotSchema = new Schema ({
slot_time: String,
slot_date: String,
created_at: Date
});
const Slot = model('Slot', slotSchema);
const appointmentSchema = new Schema({
id: ObjectId,
name: {type: String, required:true},
email:{type:String, required:true, match: /.+\@.+\..+/, unique: true},
phone:{type: Number, trim: true, required:true},
categories:{type: ObjectId, ref: 'Category'},
slots:{type: ObjectId, ref: 'Slot'},
created_at: Date
});
const Appointment = model('Appointment', appointmentSchema);
const userSchema = new Schema({
name: {type: String, required:true},
email:{type:String, required:true, match: /.+\@.+\..+/, unique: true},
phone:{type: Number, trim: true, required:true},
password:{type: String, required: true},
isAdmin: {type: Boolean, required: true, default:false}
});
const User = model("User", userSchema);
module.exports = {
Appointment, Slot, Category, Service, User
};
|
'use strict'
const { Client } = require('@kyeotic/airtable')
module.exports = {
configure
}
function configure({ config }) {
return new Client({
apiKey: config.apiKey
})
}
|
list1.append(max(list1)+1) # Output: [1.4, 0.7, 3.2, 6.7, 7.7] |
awk 'BEGIN {RS=""} {printf("0x%s;0x%s, 0x%s, 0x%s, 0x%s, 0x%s, 0x%s, 0x%s, 0x%s\n", $1, $2, $3, $4, $5, $6, $7, $8, $9)}' $1
|
package com.segmentify.segmentifysdk;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import com.segmentify.segmentifyandroidsdk.SegmentifyManager;
import com.segmentify.segmentifyandroidsdk.model.SearchPageModel;
import com.segmentify.segmentifyandroidsdk.model.SearchResponseModel;
import com.segmentify.segmentifyandroidsdk.utils.SegmentifyCallback;
public class SearchEventActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
SearchPageModel searchPage = new SearchPageModel();
searchPage.setQuery("");
searchPage.setLang("EN");
SegmentifyManager.INSTANCE.sendSearchPageView(searchPage, new SegmentifyCallback<SearchResponseModel>() {
@Override
public void onDataLoaded(SearchResponseModel data) {
if(data!=null){
System.out.println(data);
}
}
});
SegmentifyManager.INSTANCE.sendSearchClickView("product","21046");
}
} |
#!/usr/bin/env bash
set -euo pipefail
IFS=$'\n\t'
DOCKER_COMPOSE_FILE="docker-compose.yml"
DOCKER_TEMPLATE_FILE="template-docker-compose.yml"
TRAEFIK_COMPOSE_FILE="traefik-compose.yml"
TRAEFIK_TEMPLATE_FILE="template-traefik-compose.yml"
# Creates a self-signed wildcard cert for local test and dev
# EXAMPLE: ./cert.sh something.com
# three files are created:
# something.com.key - Secret key good for proxy configs
# something.com.crt - Public cert good for proxy configs
# something.com.pem - Combo of those two good for browser/OS import
DOMAIN_NAME=$1
openssl req \
-newkey rsa:2048 \
-x509 \
-nodes \
-keyout "$DOMAIN_NAME.key" \
-new \
-out "$DOMAIN_NAME.crt" \
-subj "/CN=*.$DOMAIN_NAME" \
-reqexts SAN \
-extensions SAN \
-config <(cat /etc/ssl/openssl.cnf \
<(printf "[SAN]\nsubjectAltName=DNS:*.%s, DNS:%s" "$DOMAIN_NAME" "$DOMAIN_NAME")) \
-sha256 \
-days 3650
cat "$DOMAIN_NAME.crt" "$DOMAIN_NAME.key" > "$DOMAIN_NAME.pem"
# Update compose file(s) (if needed).
if [[ ! -f $DOCKER_COMPOSE_FILE ]]; then
cat $DOCKER_TEMPLATE_FILE | sed 's/_DOMAIN_NAME_/'${DOMAIN_NAME}'/g' > $DOCKER_COMPOSE_FILE
fi
if [[ ! -f $TRAEFIK_COMPOSE_FILE ]]; then
cat $TRAEFIK_TEMPLATE_FILE | sed 's/_DOMAIN_NAME_/'${DOMAIN_NAME}'/g' > $TRAEFIK_COMPOSE_FILE
fi
|
<reponame>burongtz/select2-autocomplete<gh_stars>0
/**
* Repeat
* @param integer size
* @param function cb - callback
*/
const repeat = function(size = 0, cb = null) {
for (let index = 0; index < size; index++) {
if (cb(index) === false) {
break;
}
}
};
export default repeat; |
package gonymizer
import (
"fmt"
"math/rand"
"strconv"
"strings"
"time"
"unicode/utf8"
"github.com/google/uuid"
"github.com/icrowley/fake"
)
// All processors are designed to work "unseeded"
// Make sure something seeds the RNG before you call the top level process function.
// in order for the processor to "find" the functions it's got to
// 1. conform to ProcessorFunc
// 2. be in the processor map
// There are fancy ways for the reflection/runtime system to find functions
// that match certain text patters, like how the system finds TestX(*t.Testing) funcs
// but we dont' need that. just put them in the map to make my life easy please.
// The number of times to check the input string for similarity to the output string. We want to keep this at a distance
// of 0.4 or higher. Please see: https://en.wikipedia.org/wiki/Jaro%E2%80%93Winkler_distance
//const jaroWinklerAttempts = 1000
// lookup string for random lowercase letters
const lowercaseSet = "abcdefghijklmnopqrstuvwxyz"
// lookup string for random uppercase letters
const uppercaseSet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
// lookup string for random integers
const numericSet = "0123456789"
const lowercaseSetLen = 26
const uppercaseSetLen = 26
const numericSetLen = 10
// ProcessorCatalog is the function map that points to each Processor to it's entry function. All Processors are listed
// in this map.
var ProcessorCatalog map[string]ProcessorFunc
// AlphaNumericMap is used to keep consistency with scrambled alpha numeric strings.
// For example, if we need to scramble things such as Social Security Numbers, but it is nice to keep track of these
// changes so if we run across the same SSN again we can scramble it to what we already have.
var AlphaNumericMap = map[string]map[string]string{}
// UUIDMap is the Global UUID map for all UUIDs that we anonymize. Similar to AlphaNumericMap this map contains all
// UUIDs and what they are changed to. Some tables use UUIDs as the primary key and this allows us to keep consistency
// in the data set when anonymizing it.
var UUIDMap = map[uuid.UUID]uuid.UUID{}
// init initializes the ProcessorCatalog map for all processors. A processor must be listed here to be accessible.
func init() {
ProcessorCatalog = map[string]ProcessorFunc{
"AlphaNumericScrambler": ProcessorAlphaNumericScrambler,
"FakeStreetAddress": ProcessorAddress,
"FakeCity": ProcessorCity,
"FakeCompanyName": ProcessorCompanyName,
"FakeEmailAddress": ProcessorEmailAddress,
"FakeFirstName": ProcessorFirstName,
"FakeFullName": ProcessorFullName,
"FakeIPv4": ProcessorIPv4,
"FakeLastName": ProcessorLastName,
"FakePhoneNumber": ProcessorPhoneNumber,
"FakeState": ProcessorState,
"FakeStateAbbrev": ProcessorStateAbbrev,
"FakeUsername": ProcessorUserName,
"FakeZip": ProcessorZip,
"Identity": ProcessorIdentity, // Default: Does not modify field
"RandomDate": ProcessorRandomDate,
"RandomDigits": ProcessorRandomDigits,
"RandomUUID": ProcessorRandomUUID,
"ScrubString": ProcessorScrubString,
}
}
// ProcessorFunc is a simple function prototype for the ProcessorMap function pointers.
type ProcessorFunc func(*ColumnMapper, string) (string, error)
// fakeFuncPtr is a simple function prototype for function pointers to the Fake package's fake functions.
//type fakeFuncPtr func() string
// ProcessorAlphaNumericScrambler will receive the column metadata via ColumnMap and the column's actual data via the
// input string. The processor will scramble all alphanumeric digits and characters, but it will leave all
// non-alphanumerics the same without modification. These values are globally mapped and use the AlphaNumericMap to
// remap values once they are seen more than once.
//
// Example:
// "PUI-7x9vY" = ProcessorAlphaNumericScrambler("ABC-1a2bC")
func ProcessorAlphaNumericScrambler(cmap *ColumnMapper, input string) (string, error) {
var (
err error
scramble string
parentKey string
)
// Build the parent key which will be used for mapping columns to each other. Useful for PK/FK relationships
parentKey = fmt.Sprintf("%s.%s.%s", cmap.ParentSchema, cmap.ParentTable, cmap.ParentColumn)
// Check to see if we are working on a mapped column
if cmap.ParentSchema != "" && cmap.ParentTable != "" && cmap.ParentColumn != "" {
// Check to see if value already exists in AlphaNumericMap
if len(AlphaNumericMap[parentKey]) < 1 {
AlphaNumericMap[parentKey] = map[string]string{}
}
if len(AlphaNumericMap[parentKey][input]) < 1 {
scramble = scrambleString(input)
AlphaNumericMap[parentKey][input] = scramble
} else {
// Key already exists so use consistent value
scramble = AlphaNumericMap[parentKey][input]
}
} else {
scramble = scrambleString(input)
}
return scramble, err
}
// ProcessorAddress will return a fake address string that is compiled from the fake library
func ProcessorAddress(cmap *ColumnMapper, input string) (string, error) {
return fake.StreetAddress(), nil
}
// ProcessorCity will return a real city name that is >= 0.4 Jaro-Winkler similar than the input.
func ProcessorCity(cmap *ColumnMapper, input string) (string, error) {
return fake.City(), nil
}
// ProcessorEmailAddress will return an e-mail address that is >= 0.4 Jaro-Winkler similar than the input.
func ProcessorEmailAddress(cmap *ColumnMapper, input string) (string, error) {
return fake.EmailAddress(), nil
}
// ProcessorFirstName will return a first name that is >= 0.4 Jaro-Winkler similar than the input.
func ProcessorFirstName(cmap *ColumnMapper, input string) (string, error) {
return fake.FirstName(), nil
}
// ProcessorFullName will return a full name that is >= 0.4 Jaro-Winkler similar than the input.
func ProcessorFullName(cmap *ColumnMapper, input string) (string, error) {
return fake.FullName(), nil
}
// ProcessorIdentity will skip anonymization and leave output === input.
func ProcessorIdentity(cmap *ColumnMapper, input string) (string, error) {
return input, nil
}
func ProcessorIPv4(cmap *ColumnMapper, input string) (string, error) {
return fake.IPv4(), nil
}
// ProcessorLastName will return a last name that is >= 0.4 Jaro-Winkler similar than the input.
func ProcessorLastName(cmap *ColumnMapper, input string) (string, error) {
return fake.LastName(), nil
}
// ProcessorPhoneNumber will return a phone number that is >= 0.4 Jaro-Winkler similar than the input.
func ProcessorPhoneNumber(cmap *ColumnMapper, input string) (string, error) {
return fake.Phone(), nil
}
// ProcessorState will return a state that is >= 0.4 Jaro-Winkler similar than the input.
func ProcessorState(cmap *ColumnMapper, input string) (string, error) {
return fake.State(), nil
}
// ProcessorStateAbbrev will return a state abbreviation.
func ProcessorStateAbbrev(cmap *ColumnMapper, input string) (string, error) {
return fake.StateAbbrev(), nil
}
// ProcessorUserName will return a username that is >= 0.4 Jaro-Winkler similar than the input.
func ProcessorUserName(cmap *ColumnMapper, input string) (string, error) {
return fake.UserName(), nil
}
// ProcessorZip will return a zip code that is >= 0.4 Jaro-Winkler similar than the input.
func ProcessorZip(cmap *ColumnMapper, input string) (string, error) {
return fake.Zip(), nil
}
// ProcessorCompanyName will return a company name that is >= 0.4 Jaro-Winkler similar than the input.
func ProcessorCompanyName(cmap *ColumnMapper, input string) (string, error) {
return fake.Company(), nil
}
// ProcessorRandomDate will return a random day and month, but keep year the same (See: HIPAA rules)
func ProcessorRandomDate(cmap *ColumnMapper, input string) (string, error) {
// ISO 8601/SQL standard -> 2018-08-28
dateSplit := strings.Split(input, "-")
if len(dateSplit) < 3 || len(dateSplit) > 3 {
return "", fmt.Errorf("Date format is not ISO-8601: %q", dateSplit)
}
// Parse Year
year, err := strconv.Atoi(dateSplit[0])
if err != nil {
return "", fmt.Errorf("Unable to parse year from date: %q", dateSplit)
}
// NOTE: HIPAA only requires we scramble month and day, not year
scrambledDate := randomizeDate(year)
return scrambledDate, nil
}
// ProcessorRandomDigits will return a random string of digit(s) keeping the same length of the input.
func ProcessorRandomDigits(cmap *ColumnMapper, input string) (string, error) {
return fake.DigitsN(len(input)), nil
}
// ProcessorRandomUUID will generate a random UUID and replace the input with the new UUID. The input however will be
// mapped to the output so every occurrence of the input UUID will replace it with the same output UUID that was
// originally created during the first occurrence of the input UUID.
func ProcessorRandomUUID(cmap *ColumnMapper, input string) (string, error) {
var scrambledUUID string
inputID, err := uuid.Parse(input)
if err != nil {
scrambledUUID = ""
} else {
scrambledUUID, err = randomizeUUID(inputID)
}
return scrambledUUID, err
}
// ProcessorScrubString will replace the input string with asterisks (*). Useful for blanking out password fields.
func ProcessorScrubString(cmap *ColumnMapper, input string) (string, error) {
return scrubString(input), nil
}
/*
func jaroWinkler(input string, jwDistance float64, faker fakeFuncPtr) (output string, err error) {
for counter := 0; counter < jaroWinklerAttempts; counter++ {
output = faker()
if jw := matchr.JaroWinkler(input, output, true); jw > jwDistance {
return output, nil
}
}
return output, fmt.Errorf("Jaro-Winkler: distance < %e for %d attempts. Input: %s, Output: %s",
jwDistance, jaroWinklerAttempts, input, output)
}
*/
// randomizeUUID creates a random UUID and adds it to the map of input->output. If input already exists it returns
// the output that was previously calculated for input.
func randomizeUUID(input uuid.UUID) (string, error) {
var (
finalUUID uuid.UUID
err error
)
if _, ok := UUIDMap[input]; !ok {
finalUUID, err = uuid.NewRandom()
if err != nil {
return "", err
}
UUIDMap[input] = finalUUID
} else {
finalUUID = UUIDMap[input]
}
return finalUUID.String(), nil
}
// randomizeDate randomizes a day and month for a given year. This function is leap year compatible.
func randomizeDate(year int) string {
// To find the length of the randomly selected month we need to find the last day of the month.
// See: https://yourbasic.org/golang/last-day-month-date/
randMonth := rand.Intn(12) + 1
monthMaxDay := date(year, randMonth, 0).Day()
randDay := rand.Intn(monthMaxDay) + 1
fullDateTime := date(year, randMonth, randDay).Format("2006-01-02")
return fullDateTime
}
// date returns the date for a given year, month, day. Used to check validity of supplied date.
func date(year, month, day int) time.Time {
return time.Date(year, time.Month(month), day, 0, 0, 0, 0, time.UTC)
}
// scrambleString will replace capital letters with a random capital letter, a lower-case letter with a random
// lower-case letter, and numbers with a random number. String size will be the same length and non-alphanumerics will
// be ignored in the input and output.
func scrambleString(input string) string {
var b strings.Builder
for i := 0; i < len(input); i++ {
switch c := input[i]; {
case c >= 'a' && c <= 'z':
b.WriteString(randomLowercase())
case c >= 'A' && c <= 'Z':
b.WriteString(randomUppercase())
case c >= '0' && c <= '9':
b.WriteString(randomNumeric())
default:
b.WriteByte(c)
}
}
return b.String()
}
// scrubString replaces the input string with asterisks (*) and returns it as the output.
func scrubString(input string) string {
return strings.Repeat("*", utf8.RuneCountInString(input))
}
// randomLowercase will pick a random location in the lowercase constant string and return the letter at that position.
func randomLowercase() string {
return string(lowercaseSet[rand.Intn(lowercaseSetLen)])
}
// randomUppercase will pick a random location in the uppercase constant string and return the letter at that position.
func randomUppercase() string {
return string(uppercaseSet[rand.Intn(uppercaseSetLen)])
}
// randomNumeric will return a random location in the numeric constant string and return the number at that position.
func randomNumeric() string {
return string(numericSet[rand.Intn(numericSetLen)])
}
|
#! /bin/bash
#SBATCH -o /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_fd/run_rexi_par_m000128_t001_n0128_r0224_a1.txt
###SBATCH -e /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_fd/run_rexi_par_m000128_t001_n0128_r0224_a1.err
#SBATCH -J rexi_par_m000128_t001_n0128_r0224_a1
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=224
#SBATCH --cpus-per-task=1
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=08:00:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=1
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
echo
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_fd
cd ../../../
. local_software/env_vars.sh
# force to use FFTW WISDOM data
declare -x SWEET_FFTW_LOAD_WISDOM_FROM_FILE="FFTW_WISDOM_nofreq_T0"
time -p mpiexec.hydra -genv OMP_NUM_THREADS 1 -envall -ppn 28 -n 224 ./build/rexi_par_m_tno_a1 --initial-freq-x-mul=2.0 --initial-freq-y-mul=1.0 -f 1 -g 1 -H 1 -X 1 -Y 1 --compute-error 1 -t 50 -R 4 -C 0.3 -N 128 -U 0 -S 0 --use-specdiff-for-complex-array 1 --rexi-h 0.2 --timestepping-mode 1 --staggering 0 --rexi-m=128 -C -5.0
|
const CodeMap = [".-","-...","-.-.","-..",".","..-.","--.","....","..",".---","-.-",".-..","--","-.","---",".--.","--.-",".-.","...","-","..-","...-",".--","-..-","-.--","--.."];
const IndexA = 'a'.charCodeAt(0);
const decode = word => word.split('').reduce((s, c) => (s += CodeMap[c.charCodeAt(0) - IndexA], s), '');
/**
* @param {string[]} words
* @return {number}
*/
var uniqueMorseRepresentations = function(words) {
return new Set(words.map(decode)).size;
};
|
#!/bin/bash
python -m pip install --no-deps --ignore-installed .
gatk CreateSequenceDictionary -R $PREFIX/share/tbprofiler/tbdb.fasta
samtools faidx $PREFIX/share/tbprofiler/tbdb.fasta
bwa index $PREFIX/share/tbprofiler/tbdb.fasta
|
<gh_stars>10-100
var demonHunter = [
{
name_en:'<NAME>',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_thrillofthehunt.png',
text_en:'Every {6} seconds, your next skill that costs Hatred will immobilize all enemies hit for {2} seconds.',
flavor_en:'"The first three were dead before they hit the ground. The others weren\'t so lucky." —<NAME>'
},
{
name_en:'Tactical Advantage',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_tacticaladvantage.png',
text_en:'Whenever you use Vault, Smoke Screen, or backflip with Evasive Fire you gain {60%} movement speed for {2} seconds.',
flavor_en:'Never start a fair fight.'
},
{
name_en:'Blood Vengeance',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_vengeance.png',
text_en:'Your maximum Hatred is increased by {25}. In addition, gain {30} Hatred and {3} Discipline when you are healed by a health globe.',
flavor_en:'"If it bleeds you can kill it. Kill this one slowly." —Greyscarr, Veteran Demon Hunter'
},
{
name_en:'Steady Aim',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_steadyaim.png',
text_en:'As long as there are no enemies within {10} yards, all damage is increased by {20%}.',
flavor_en:'"Flee if you can, demon. I have a quiver full of friends who fly faster than you." —<NAME>'
},
{
name_en:'Cull the Weak',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_culltheweak.png',
text_en:'Increase damage against Slowed or Chilled enemies by {20%}.',
flavor_en:'"I\'ll show you the same mercy you showed my helpless family." —<NAME>'
},
{
name_en:'<NAME>',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_nightstalker.png',
text_en:'Your primary skills generate an additional {4} Hatred.',
flavor_en:'"How did I learn to hate, to kill, to flay the flesh from the bones of my prey without pity? You taught me." —<NAME>\'s final words to Naragh the Sin Eater'
},
{
name_en:'Brooding',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_brooding.png',
text_en:'Gain {3.0%} Life regeneration per second for every second you remain stationary, stacking up to {3} times. This bonus is reset {5} seconds after you move.',
flavor_en:'Never forget where you came from.'
},
{
name_en:'<NAME>',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_hotpursuit.png',
text_en:'Increase movement speed by {20%} for {2} seconds when you hit an enemy.',
flavor_en:'"Don\'t make me chase you back to Hell, vermin. I know a quicker way to send you there." —<NAME>'
},
{
name_en:'Archery',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_archery.png',
text_en:'Gain a bonus based on your weapon type:\nBow: 8% increased damage\nCrossbow: 50% Critical Hit Damage\nHand Crossbow: 5% Critical Hit Chance\n2nd Hand Crossbow: 1 Hatred per Second',
flavor_en:'"Feel the weight of the crossbow in your hand. Now... the wind, the distance, the target\'s speed. Good. Now try it with blood in your eyes and a demon at your throat." —Greyscarr, Veteran D<NAME>'
},
{
name_en:'Numbing Traps',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_numbingtraps.png',
text_en:'Enemies you Slow or hit with Fan of Knives, Spike Trap, Caltrops, Grenades, and Sentry fire have their damage reduced by {25%} for {3} seconds.',
flavor_en:'"Demonic anatomy is strange and diverse. At times they defy sanity, but in other ways they are just as vulnerable as we are." —Meditations on the Enemy'
},
{
name_en:'Perfectionist',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_perfectionist.png',
text_en:'Reduce the Discipline cost of all skills by {10%}. Increase your Armor and resistance to all damage types by {10%}.',
flavor_en:'Once the trap is sprung you must not relent until every last enemy has fallen. To hesitate is to invite disaster.'
},
{
name_en:'Custom Engineering',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_customengineering.png',
text_en:'Increase the duration of your Caltrops, Marked for Death, Spike Trap, and Sentry by {100%}.\nIncrease the maximum number and charges of Sentries to {3} and number of Spike Traps to {6}.',
flavor_en:'The self-loading mechanisms built into most hand crossbows and traps are delicate and difficult to maintain. Extensive practice is required to use them properly.'
},
{
name_en:'Grenadier',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_grenadier.png',
text_en:'Increase the damage of grenades by {10%}.\nIncrease the explosion size of grenades by {20%}.\n\nUpon death, you drop a giant grenade that explodes for {1000%} weapon damage as Fire.',
flavor_en:'"Do not construct these devices by candlelight, nor by firelight. Once armed, be no closer than ten paces upon the count of three." —Explicit instructions from Quang the Chemist'
},
{
name_en:'Sharpshooter',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_sharpshooter.png',
text_en:'Gain {4%} Critical Hit Chance every second. This bonus is reset {1} seconds after you successfully critically hit.',
flavor_en:'"Every creature has a vulnerability. Find it, and put an arrow in it." —Greyscarr, Veteran Demon Hunter'
},
{
name_en:'Ballistics',
image:'//media.blizzard.com/d3/icons/skills/64/demonhunter_passive_ballistics.png',
text_en:'Increase damage of rockets by {100%}.\n\nIn addition, you have a {20%} chance to fire a homing rocket for {150%} weapon damage when you attack.',
flavor_en:'"How should I know who first invented these devices? Just make sure the correct end is pointed at the enemy and they\'ll do the rest." —Greyscarr, Veteran Demon Hunter'
},
{
name_en:'Ambush',
image:'//media.blizzard.com/d3/icons/skills/64/x1_demonhunter_passive_ambush.png',
text_en:'You deal {40%} additional damage to enemies above {75%} health.',
flavor_en:'"The art of surprise is difficult, but rewarding to master." —Mynton, <NAME>'
},
{
name_en:'Awareness',
image:'//media.blizzard.com/d3/icons/skills/64/x1_demonhunter_passive_awareness.png',
text_en:'When you receive fatal damage, you instead vanish for {2} seconds and regenerate {50%} of maximum Life.\n\nThis effect may occur once every {60} seconds.',
flavor_en:''
},
{
name_en:'Single Out',
image:'//media.blizzard.com/d3/icons/skills/64/x1_demonhunter_passive_singleout.png',
text_en:'Gain {25%} Critical Hit Chance against enemies who are more than {20} yards away from any other enemies.',
flavor_en:'Wield your focus like a weapon.'
}
];
module.exports = demonHunter; |
#ifndef NLIB_THREADPOOL_H
#define NLIB_THREADPOOL_H
#include "header.h"
#include "Condition.h"
#include "Mutex.h"
#include "Thread.h"
// 1.通过类成员函数启动多个线程+
// 线程回调设置为类的成员函数好处
//
// 一个控制线程通过线程池对象
// 构造多个Thread对象,并启动多个并发执行的线程
//
// 由于线程池对象所启动的线程
// 最后的线程函数是线程池类型的成员函数
// 且std::bind时绑定了this
//
// 所有线程池启动的每个线程均可通过
// 隐式的this获得线程需要的语境.
//
// 线程池对象的成员既包含由其启动的线程执行所需要的语境信息
// 也包含它启动的每个线程的属性信息,
// 这些信息对线程池启动的线程可能没用.
// 但对于可访问到线程池对象的访问者,
// 可以提供线程池所管理的所有线程的属性,状态,
// 这些信息可能正是访问者需要的信息
//
// 2.线程池工作模型
// 线程池内每个线程可访问到一个共享的线程池对象
// 所有需要提交执行任务&获取线程池属性/状态的访问者
// 也可以访问到共享的线程池对象
//
//
// 访问者,可以提交任务
// 线程池内每个执行线程,
// 要么,在执行任务
// 要么,取出一个任务并执行
// 要么,阻塞,等待有任务可取
//
// 任务队列存储在多个线程可共享的线程池对象内
// 线程池中每个线程对任务队列来说,是消费者
// 任何需借助线程池对象提交任务的访问线程,是生产者
//
//
// 这样形成了一个
// 多线程下对共享资源的多生产者多消费者问题
// 需要线程间互斥/同步机制.
//
class ThreadPool
{
public:
typedef std::function<void ()> Task;
explicit ThreadPool(
const string& nameArg = string("ThreadPool"));
~ThreadPool();
void setMaxQueueSize(int maxSize)
{
m_nMaxQueueSize = maxSize;
}
void setThreadInitCallback(const Task& cb)
{
m_nThreadInitCallback = cb;
}
void start(int numThreads);
void stop();
const string& name() const
{
return m_strName;
}
size_t queueSize() const;
void run(Task f);
private:
bool isFull() const;
void runInThread();
Task take();
private:
mutable MutexLock m_nMutex;
Condition m_nNotEmpty;
Condition m_nNotFull;
string m_strName;
Task m_nThreadInitCallback;
std::vector<std::unique_ptr<Thread>> m_vecThreads;
std::deque<Task> m_nQueue;
size_t m_nMaxQueueSize;
bool m_bRunning;
};
#endif
|
/* eslint-disable class-methods-use-this */
/* eslint-disable func-names */
/* eslint-disable no-restricted-syntax */
import Phaser from 'phaser';
import sceneoptions from '../config/sceneoptions';
import bg from '../assets/bg.png';
import gamoraWalk from '../assets/gamora_walk.png';
import platform1 from '../assets/bigplatform1.png';
import platform2 from '../assets/platform2.png';
import platform3 from '../assets/platform3.png';
import coin from '../assets/coin.png';
import tresure from '../assets/chest.png';
import orc1 from '../assets/orc.png';
import orc2 from '../assets/orcHunter.png';
import orc3 from '../assets/orcWarrior.png';
import tree1 from '../assets/tree1.png';
import tree2 from '../assets/tree2.png';
import tree3 from '../assets/tree3.png';
import tree4 from '../assets/tree4.png';
import tree5 from '../assets/tree5.png';
import tree6 from '../assets/tree6.png';
import bush1 from '../assets/bush1.png';
import bush2 from '../assets/bush2.png';
import bush3 from '../assets/bush3.png';
import flower from '../assets/flower.png';
import sign from '../assets/sign.png';
import stone from '../assets/stone1.png';
import block from '../assets/block.png';
export const gameState = {
score: 0,
speed: 240,
ups: 380,
width: 2332,
height: 585,
};
export default class MainScene extends Phaser.Scene {
constructor() {
super({ key: 'MainScene' });
this.heights = [null, 5, 2, 6, 4, 3, 5, 2, 8, 6];
this.levelKey = MainScene;
}
preload() {
this.load.image('bg', bg);
this.load.image('bigplatform1', platform1);
this.load.image('platform2', platform2);
this.load.image('platform3', platform3);
this.load.spritesheet('gamora_walk', gamoraWalk, { frameWidth: 30, frameHeight: 32 });
this.load.spritesheet('coin', coin, { frameWidth: 9.5, frameHeight: 10 });
this.load.spritesheet('chest', tresure, { frameWidth: 32, frameHeight: 32 });
this.load.spritesheet('orc1', orc1, { frameWidth: 48, frameHeight: 64 });
this.load.spritesheet('orc2', orc2, { frameWidth: 48, frameHeight: 64 });
this.load.spritesheet('orc3', orc3, { frameWidth: 48, frameHeight: 64 });
this.load.spritesheet('block', block, { frameWidth: 95, frameHeight: 42 });
this.load.image('tree1', tree1);
this.load.image('tree2', tree2);
this.load.image('tree3', tree3);
this.load.image('tree4', tree4);
this.load.image('tree5', tree5);
this.load.image('tree6', tree6);
this.load.image('bush1', bush1);
this.load.image('bush2', bush2);
this.load.image('bush3', bush3);
this.load.image('sign', sign);
this.load.image('stone', stone);
this.load.image('flower', flower);
}
create() {
gameState.active = true;
this.add.image(630, 292, 'bg');
this.add.image(53, 415, 'sign').setScale(0.3);
const platforms = this.physics.add.staticGroup();
gameState.platforms = this.physics.add.staticGroup();
const trees = this.physics.add.staticGroup();
trees.create(450, 430, 'tree1').setScale(0.5).refreshBody();
trees.create(1100, 430, 'tree2').setScale(0.5).refreshBody();
trees.create(1690, 430, 'tree3').setScale(0.5).refreshBody();
const bush = this.physics.add.staticGroup();
const stone = this.physics.add.staticGroup();
const flower = this.physics.add.staticGroup();
const trees1Positions = [
{ x: 1190, y: 470 }, { x: 1620, y: 480 }, { x: 1760, y: 470 },
];
trees1Positions.forEach(plat => {
trees.create(plat.x, plat.y, 'tree4').setScale(0.3).refreshBody();
});
const trees2Positions = [
{ x: 490, y: 480 }, { x: 1160, y: 487 }, { x: 1670, y: 480 },
];
trees2Positions.forEach(plat => {
trees.create(plat.x, plat.y, 'tree5').setScale(0.2).refreshBody();
});
const trees3Positions = [
{ x: 530, y: 477 }, { x: 400, y: 482 }, { x: 1050, y: 485 },
];
trees3Positions.forEach(plat => {
trees.create(plat.x, plat.y, 'tree6').setScale(0.3).refreshBody();
});
const trees4Positions = [
{ x: 230, y: 290 }, { x: 340, y: 290 }, { x: 440, y: 75 }, { x: 630, y: 375 },
{ x: 870, y: 220 }, { x: 1060, y: 160 }, { x: 1240, y: 290 }, { x: 1450, y: 75 },
{ x: 1870, y: 375 },
];
trees4Positions.forEach(plat => {
trees.create(plat.x, plat.y, 'tree5').setScale(0.3).refreshBody();
});
const trees5Positions = [
{ x: 250, y: 296 }, { x: 530, y: 75 }, { x: 660, y: 367 }, { x: 840, y: 228 },
{ x: 1350, y: 289 }, { x: 1972, y: 370 }, { x: 1526, y: 89 },
];
trees5Positions.forEach(plat => {
trees.create(plat.x, plat.y, 'tree4').setScale(0.3).refreshBody();
});
const trees6Positions = [
{ x: 497, y: 87 }, { x: 735, y: 367 }, { x: 950, y: 228 }, { x: 1150, y: 160 },
{ x: 1280, y: 295 }, { x: 1560, y: 80 },
];
trees6Positions.forEach(plat => {
trees.create(plat.x, plat.y, 'tree6').setScale(0.3).refreshBody();
});
const bush1Positions = [
{ x: 76, y: 440 }, { x: 960, y: 490 }, { x: 1100, y: 180 },
];
bush1Positions.forEach(plat => {
bush.create(plat.x, plat.y, 'bush1').setScale(0.4).refreshBody();
});
const bush2Positions = [
{ x: 296, y: 490 }, { x: 2150, y: 245 },
];
bush2Positions.forEach(plat => {
bush.create(plat.x, plat.y, 'bush2').setScale(0.4).refreshBody();
});
const bush3Positions = [
{ x: 576, y: 495 }, { x: 1260, y: 490 }, { x: 1950, y: 490 },
];
bush3Positions.forEach(plat => {
bush.create(plat.x, plat.y, 'bush3').setScale(0.4).refreshBody();
});
const stonesPositions = [
{ x: 710, y: 395 }, { x: 1320, y: 325 }, { x: 1840, y: 500 },
];
stonesPositions.forEach(plat => {
stone.create(plat.x, plat.y, 'stone').setScale(0.3).refreshBody();
});
const flowersPositions = [
{ x: 364, y: 491 }, { x: 300, y: 317 }, { x: 282, y: 319 }, { x: 470, y: 103 },
{ x: 321, y: 319 }, { x: 510, y: 494 }, { x: 635, y: 492 }, { x: 900, y: 248 },
{ x: 920, y: 244 }, { x: 1110, y: 180 }, { x: 1084, y: 493 }, { x: 1329, y: 493 },
{ x: 1342, y: 491 }, { x: 1260, y: 321 }, { x: 1500, y: 110 }, { x: 1474, y: 104 },
{ x: 1578, y: 494 }, { x: 1589, y: 491 }, { x: 1650, y: 493 }, { x: 1732, y: 494 },
{ x: 1782, y: 493 }, { x: 1890, y: 491 }, { x: 1910, y: 387 }, { x: 1930, y: 384 },
];
flowersPositions.forEach(plat => {
flower.create(plat.x, plat.y, 'flower').setScale(0.3).refreshBody();
});
const plat1Positions = [
{ x: 340, y: 585 }, { x: 580, y: 585 }, { x: 1010, y: 585 }, { x: 1250, y: 585 },
{ x: 1660, y: 585 }, { x: 1900, y: 585 }, { x: 2200, y: 330 },
];
plat1Positions.forEach(plat => {
platforms.create(plat.x, plat.y, 'bigplatform1').setScale(0.4).refreshBody();
});
const plat3Positions = [
{ x: 70, y: 585 },
];
plat3Positions.forEach(plat => {
platforms.create(plat.x, plat.y, 'platform3').setScale(0.6).refreshBody();
});
gameState.player = this.physics.add.sprite(90, 420, 'gamora_walk').setScale(1.5);
gameState.player.body.debug = true;
gameState.exit = this.physics.add.sprite(2250, 200, 'chest').setScale(1.5);
gameState.enemy1 = this.physics.add.sprite(320, 400, 'orc1');
gameState.enemy2 = this.physics.add.sprite(990, 400, 'orc2');
gameState.enemy3 = this.physics.add.sprite(1640, 400, 'orc3');
gameState.movil = this.physics.add.sprite(1700, 265, 'block').setImmovable(true);
this.createAnimations();
this.levelSetup();
gameState.movil.body.allowGravity = false;
const timeline = this.tweens.createTimeline();
timeline.add({
targets: gameState.movil,
x: 2000,
ease: Phaser.Math.Easing.Sine.InOut,
duration: 1500,
yoyo: true,
repeat: -1,
});
timeline.play();
const collisionMovingPlatform = (sprite, platform) => {
if (platform.body.touching.up && sprite.body.touching.down) {
sprite.isOnPlatform = true;
sprite.currentPlatform = platform;
}
};
// set Cameras here
this.cameras.main.setBounds(0, 0, gameState.width, gameState.height, true, true, true, false);
this.physics.world.setBounds(0, 0, gameState.width, gameState.height, true, true, true, false);
this.cameras.main.startFollow(gameState.player, true, 0.5, 0.5);
gameState.player.setCollideWorldBounds(true, true, true, false);
// Makes a collision between the character and the platforms
this.physics.add.collider(gameState.player, platforms);
this.physics.add.collider(gameState.player, gameState.platforms);
this.physics.add.collider(gameState.enemy1, platforms);
this.physics.add.collider(gameState.enemy2, platforms);
this.physics.add.collider(gameState.enemy3, platforms);
this.physics.add.collider(gameState.player, gameState.movil, collisionMovingPlatform);
gameState.player.body.bounce.y = 0.2;
gameState.cursors = this.input.keyboard.createCursorKeys();
const coins = this.physics.add.group({
key: 'coin',
repeat: 52,
setXY: { x: 200, y: 0, stepX: 38 },
});
coins.children.iterate((child) => {
child.setBounceY(Phaser.Math.FloatBetween(0.4, 0.8)).setScale(2.3);
child.anims.play('rotate');
});
// Displays initial Score: 0 text
gameState.scoreText = this.add.text(1150, 30, 'Score: 0', { fontFamily: 'Arial', fontSize: '24px', fill: '#000000' });
this.physics.add.collider(coins, platforms);
this.physics.add.collider(coins, gameState.platforms);
this.physics.add.collider(gameState.exit, platforms);
gameState.exit.anims.play('movement');
gameState.enemy1.anims.play('orc1Alert');
gameState.enemy2.anims.play('orc2Alert');
gameState.enemy3.anims.play('orc3Alert');
// makes an overlap event for when the player gets an item
this.physics.add.overlap(gameState.player, coins, this.collectCoin, null, this);
this.physics.add.overlap(gameState.player, gameState.exit, () => {
// Add in the collider that will fade out to the next level here
this.cameras.main.fade(800, 0, 0, 0, false, function (camera, progress) {
if (progress > 0.9) {
this.scene.restart(this.levelKey);
}
});
}, null, this);
// Adding events to interact with the character
this.input.keyboard.on('keydown-UP', this.jump, this);
this.input.on('pointerdown', this.jump, this);
// Sets the jumps to 0 for the double jump
this.jumps = 0;
gameState.moveTween = this.tweens.add({
targets: [gameState.enemy1, gameState.enemy2, gameState.enemy3],
props: {
x: { value: '+=250', duration: 3000, ease: 'Power2' },
},
yoyo: true,
repeat: -1,
});
this.physics.add.overlap(gameState.player,
[gameState.enemy1, gameState.enemy2, gameState.enemy3], function () {
// Add in the collider that will fade out to the next level here
this.cameras.main.shake(290, 0.01, false);
this.add.text(1120, 100, 'Game Over', { fontSize: '24px', fill: '#000000' });
this.cameras.main.fade(800, 0, 0, 0, false, function (camera, progress) {
if (progress > 0.9) {
this.scene.start('GameOver');
} else {
gameState.player.body.setVelocityY(-200);
gameState.player.setTint(0xff0000);
gameState.player.anims.play('idle');
}
});
}, null, this);
}
createPlatform(xIndex, yIndex) {
// Creates a platform evenly spaced along the two indices.
// If either is not a number it won't make a platform
if (typeof yIndex === 'number' && typeof xIndex === 'number') {
gameState.platforms.create((205 * xIndex), yIndex * 70, 'platform2').setOrigin(0, 0.5).setScale(0.3).refreshBody();
}
}
createAnimations() {
this.anims.create({
key: 'left',
frames: this.anims.generateFrameNumbers('gamora_walk', { start: 0, end: 3 }),
frameRate: 10,
repeat: -1,
});
this.anims.create({
key: 'idle',
frames: [{ key: 'gamora_walk', frame: 4 }],
frameRate: 0.4,
repeat: -1,
});
this.anims.create({
key: 'right',
frames: this.anims.generateFrameNumbers('gamora_walk', { start: 5, end: 8 }),
frameRate: 10,
repeat: -1,
});
this.anims.create({
key: 'rotate',
frames: this.anims.generateFrameNumbers('coin', {
start: 0,
end: 3,
}),
frameRate: 15,
yoyo: true,
repeat: -1,
});
this.anims.create({
key: 'movement',
frames: this.anims.generateFrameNumbers('chest', { start: 0, end: 3 }),
frameRate: 10,
yoyo: true,
repeat: -1,
});
this.anims.create({
key: 'orc1Alert',
frames: this.anims.generateFrameNumbers('orc1', { start: 3, end: 5 }),
frameRate: 4,
repeat: -1,
});
this.anims.create({
key: 'orc2Alert',
frames: this.anims.generateFrameNumbers('orc2', { start: 3, end: 5 }),
frameRate: 4,
repeat: -1,
});
this.anims.create({
key: 'orc3Alert',
frames: this.anims.generateFrameNumbers('orc3', { start: 3, end: 5 }),
frameRate: 4,
repeat: -1,
});
}
levelSetup() {
for (const [xIndex, yIndex] of this.heights.entries()) {
// call createPlatform here with xIndex and yIndex
this.createPlatform(xIndex, yIndex);
}
}
update() {
if (gameState.active) {
if (gameState.cursors.left.isDown) {
gameState.player.setVelocityX(-280);
gameState.player.anims.play('left', true);
} else if (gameState.cursors.right.isDown) {
gameState.player.setVelocityX(280);
gameState.player.anims.play('right', true);
} else {
gameState.player.setVelocityX(0);
gameState.player.anims.play('idle', true);
}
}
if (gameState.cursors.up.isDown && gameState.player.body.touching.down) {
gameState.player.setVelocityY(-350);
this.jumps = 0;
}
if (gameState.player.y > gameState.height) {
this.cameras.main.shake(240, 0.01, false, function (camera, progress) {
if (progress > 0.9) {
this.scene.start('GameOver');
}
});
}
}
jump() {
if (gameState.player.body.touching.down || this.jumps < 2) {
gameState.player.setVelocityY(sceneoptions.jumpForce * -1);
this.jumps += 1;
}
}
collectCoin(player, coin) {
coin.disableBody(true, true);
player.refreshBody();
this.sys.game.globals.score += 100;
gameState.scoreText.setText(`Score: ${this.sys.game.globals.score}`);
}
}
|
package controller
import (
"fmt"
"time"
)
// HelloWorld func say hi
func HelloWorld(name string) string {
//go 时间诞生之日
currentTime := time.Now().Format("2006-01-02 15:04:05")
return fmt.Sprintf("Hi, %s,today is %s", name, currentTime)
}
|
<filename>src/main/java/dev/fiki/forgehax/main/mods/player/ElytraFlight.java<gh_stars>100-1000
package dev.fiki.forgehax.main.mods.player;
import dev.fiki.forgehax.api.Switch.Handle;
import dev.fiki.forgehax.api.cmd.settings.BooleanSetting;
import dev.fiki.forgehax.api.cmd.settings.EnumSetting;
import dev.fiki.forgehax.api.cmd.settings.FloatSetting;
import dev.fiki.forgehax.api.event.SubscribeListener;
import dev.fiki.forgehax.api.events.entity.LocalPlayerUpdateEvent;
import dev.fiki.forgehax.api.extension.LocalPlayerEx;
import dev.fiki.forgehax.api.mod.Category;
import dev.fiki.forgehax.api.mod.ToggleMod;
import dev.fiki.forgehax.api.modloader.RegisterMod;
import dev.fiki.forgehax.asm.events.movement.ClampMotionSpeedEvent;
import dev.fiki.forgehax.asm.events.movement.ElytraFlyMovementEvent;
import dev.fiki.forgehax.main.Common;
import net.minecraft.network.play.client.CEntityActionPacket;
import static dev.fiki.forgehax.main.Common.getGameSettings;
import static dev.fiki.forgehax.main.Common.getLocalPlayer;
@RegisterMod(
name = "ElytraFlight",
description = "Elytra flight",
category = Category.PLAYER
)
public class ElytraFlight extends ToggleMod {
enum FlyMode {
FLIGHT,
SLOW_FALL,
;
}
public final BooleanSetting flyOnEnable = newBooleanSetting()
.name("fly-on-enable")
.description("Start flying when enabled")
.defaultTo(false)
.build();
public final FloatSetting speed = newFloatSetting()
.name("speed")
.description("Movement speed")
.defaultTo(0.05f)
.build();
private final EnumSetting<FlyMode> mode = newEnumSetting(FlyMode.class)
.name("mode")
.description("Elytra flight mode")
.defaultTo(FlyMode.SLOW_FALL)
.build();
private final Handle flying = LocalPlayerEx.getFlySwitch().createHandle(getName());
@Override
protected void onEnabled() {
if (flyOnEnable.getValue()) {
Common.addScheduledTask(() -> {
if (getLocalPlayer() != null && !getLocalPlayer().isFallFlying()) {
Common.sendNetworkPacket(new CEntityActionPacket(getLocalPlayer(), CEntityActionPacket.Action.START_FALL_FLYING));
}
});
}
}
@Override
public void onDisabled() {
flying.disable();
// Are we still here?
if (FlyMode.FLIGHT.equals(mode.getValue()) && getLocalPlayer() != null) {
// Ensure the player starts flying again.
Common.sendNetworkPacket(new CEntityActionPacket(getLocalPlayer(), CEntityActionPacket.Action.START_FALL_FLYING));
}
}
@SubscribeListener
public void onElytraMovement(ElytraFlyMovementEvent event) {
if(!FlyMode.FLIGHT.equals(mode.getValue())) {
event.setCanceled(true);
}
}
@SubscribeListener
public void onClampMotion(ClampMotionSpeedEvent event) {
if(!FlyMode.FLIGHT.equals(mode.getValue())) {
event.setCanceled(true);
}
}
@SubscribeListener
public void onLocalPlayerUpdate(LocalPlayerUpdateEvent event) {
if(FlyMode.FLIGHT.equals(mode.getValue())) {
if (getLocalPlayer().isFallFlying()) {
flying.enable();
}
getLocalPlayer().abilities.setFlyingSpeed(speed.getValue());
} else {
if (!getLocalPlayer().isFallFlying()) {
return;
}
double motionX = 0.0D;
double motionY = -0.0001D;
double motionZ = 0.0D;
final float speed = (float) (1.7F * 1.06);
double forward = getLocalPlayer().input.forwardImpulse;
double strafe = getLocalPlayer().input.leftImpulse;
float yaw = getLocalPlayer().yRot;
if ((forward == 0.0D) && (strafe == 0.0D)) {
motionX = 0.0D;
motionZ = 0.0D;
} else {
if (forward != 0.0D) {
if (strafe > 0.0D) {
yaw += (forward > 0.0D ? -45 : 45);
} else if (strafe < 0.0D) {
yaw += (forward > 0.0D ? 45 : -45);
}
strafe = 0.0D;
if (forward > 0.0D) {
forward = 1.0D;
} else if (forward < 0.0D) {
forward = -1.0D;
}
}
final double cos = Math.cos(Math.toRadians(yaw + 90.0F));
final double sin = Math.sin(Math.toRadians(yaw + 90.0F));
motionX = (forward * speed * cos + strafe * speed * sin);
motionZ = (forward * speed * sin - strafe * speed * cos);
}
if (getGameSettings().keyShift.isDown()) {
motionY = -1.0D;
}
getLocalPlayer().setDeltaMovement(motionX, motionY, motionZ);
}
}
}
|
<reponame>lananh265/social-network<filename>node_modules/react-icons-kit/md/ic_pan_tool.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_pan_tool = void 0;
var ic_pan_tool = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M23,5.5V20c0,2.2-1.8,4-4,4h-7.3c-1.08,0-2.1-0.43-2.85-1.19L1,14.83c0,0,1.26-1.23,1.3-1.25 c0.22-0.19,0.49-0.29,0.79-0.29c0.22,0,0.42,0.06,0.6,0.16C3.73,13.46,8,15.91,8,15.91V4c0-0.83,0.67-1.5,1.5-1.5S11,3.17,11,4v7 h1V1.5C12,0.67,12.67,0,13.5,0S15,0.67,15,1.5V11h1V2.5C16,1.67,16.67,1,17.5,1S19,1.67,19,2.5V11h1V5.5C20,4.67,20.67,4,21.5,4 S23,4.67,23,5.5z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M23,5.5V20c0,2.2-1.8,4-4,4h-7.3c-1.08,0-2.1-0.43-2.85-1.19L1,14.83c0,0,1.26-1.23,1.3-1.25 c0.22-0.19,0.49-0.29,0.79-0.29c0.22,0,0.42,0.06,0.6,0.16C3.73,13.46,8,15.91,8,15.91V4c0-0.83,0.67-1.5,1.5-1.5S11,3.17,11,4v7 h1V1.5C12,0.67,12.67,0,13.5,0S15,0.67,15,1.5V11h1V2.5C16,1.67,16.67,1,17.5,1S19,1.67,19,2.5V11h1V5.5C20,4.67,20.67,4,21.5,4 S23,4.67,23,5.5z"
},
"children": []
}]
}]
}]
}]
}]
}]
}]
};
exports.ic_pan_tool = ic_pan_tool; |
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, MaxPooling2D
# Create model
model = Sequential()
model.add(Conv2D(64, kernel_size=3, activation='relu', input_shape=(28, 28, 1)))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Flatten())
model.add(Dense(10, activation='softmax'))
# Compile model
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
# Fit model
model.fit(x, y, epochs=5) |
#!/bin/bash
set -eo pipefail
export NDK_VERSION=android-ndk-r21d
export NDK_FILENAME=${NDK_VERSION}-linux-x86_64.zip
export ACCEPT_EULA=Y
sha256_file=dd6dc090b6e2580206c64bcee499bc16509a5d017c6952dcd2bed9072af67cbd
sudo apt-get -yqq update
#sudo apt-get -yqq upgrade
sudo apt-get -yqq install python3 python3-{pip,virtualenv,mako} curl build-essential libtool autotools-dev automake pkg-config bsdmainutils unzip git gettext
python3 -m pip install virtualenv
mkdir -p /opt
cd /opt && curl -sSO https://dl.google.com/android/repository/${NDK_FILENAME}
echo "${sha256_file} ${NDK_FILENAME}" | shasum -a 256 --check
unzip -qq ${NDK_FILENAME}
rm ${NDK_FILENAME}
if [ -f /.dockerenv ]; then
sudo apt-get -yqq --purge autoremove unzip
sudo apt-get -yqq clean
rm -rf /var/lib/apt/lists/* /var/cache/* /tmp/* /usr/share/locale/* /usr/share/man /usr/share/doc /lib/xtables/libip6*
fi
|
from sklearn.svm import SVC
clf = SVC(kernel='poly', degree=3)
clf.fit(X_train, y_train) |
alias ls="ls -F -A"
alias ll="ls -lAh"
alias l="ls -lh"
alias pubkey="more ~/.ssh/id_rsa.pub | pbcopy | echo '=> Public key copied to pasteboard.'"
alias spacer="defaults write com.apple.dock persistent-apps -array-add '{tile-data={}; tile-type='spacer-tile';}' && killall Dock"
|
rm docs/UML/classes/*.*
rm docs/UML/diagrams/*.*
../specifications-AA_GLOBAL/bin/uml_generate.sh -i {its_rest_release} -r ITS-REST -o docs/UML computable/UML/openEHR_UML-ITS-REST.mdzip
|
#!/bin/bash
# Execute sudo bash ./createtopo.sh
# ----------------------- CREATE TOPOLOGY STEP --------------------------------
sudo ./sdntool bridge --create --switchname ovs1 --bridgename br1 --controllerip tcp:172.10.1.2:6633
sudo ./sdntool bridge --create --switchname ovs1 --bridgename br2 --controllerip tcp:172.10.1.2:6633
sudo ./sdntool patch --create --switchname ovs1 --bridgeA br1 --bridgeB br2
#sudo ./sdntool patch --delete --switchname ovs1 --bridgeA br1 --bridgeB br2
sudo ./sdntool link --create --switchname ovs1 --bridge br1 --host h1 --iphost 10.0.0.1/16
sudo ./sdntool link --create --switchname ovs1 --bridge br2 --host h2 --iphost 10.0.0.2/16
#sudo ./sdntool link --delete --switchname ovs1 --bridge br1 --host h1 //revisar
#sudo ./sdntool link --delete --switchname ovs1 --bridge br2 --host h2
|
<gh_stars>10-100
package sentry
import (
"testing"
"github.com/pkg/errors"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func ExampleAddInternalPrefixes() {
// This adds the provided prefixes to your list of internal
// package prefixes used to tag stacktrace frames as in-app.
AddInternalPrefixes("github.com/SierraSoftworks/sentry-go")
}
func ExampleStackTrace() {
cl := NewClient()
cl.Capture(
// You can specify that a StackTrace should be included when
// sending your event to Sentry
StackTrace().
// You can also gather the stacktrace frames from a specific
// error if it is created using `pkg/errors`
ForError(errors.New("example error")).
// And you can mark frames as "internal" by specifying the
// internal frame prefixes here.
WithInternalPrefixes(
"github.com/SierraSoftworks/sentry-go",
),
)
}
func TestAddInternalPrefixes(t *testing.T) {
assert.Contains(t, defaultInternalPrefixes, "main")
AddInternalPrefixes("github.com/SierraSoftworks/sentry-go")
assert.Contains(t, defaultInternalPrefixes, "github.com/SierraSoftworks/sentry-go")
}
func TestStackTrace(t *testing.T) {
o := StackTrace()
require.NotNil(t, o, "it should return a non-nil option")
assert.Implements(t, (*Option)(nil), o, "it should implement the Option interface")
assert.Equal(t, "stacktrace", o.Class(), "it should use the right option class")
sti, ok := o.(*stackTraceOption)
require.True(t, ok, "it should actually be a *stackTraceOption")
assert.NotEmpty(t, sti.Frames, "it should start off with your current stack frames")
originalFrames := sti.Frames
err := errors.New("example error")
assert.Same(t, o, o.ForError(err), "it should reuse the same instance when adding error information")
assert.NotEmpty(t, sti.Frames, "it should have loaded frame information from the error")
assert.NotEqual(t, originalFrames, sti.Frames, "the frames should not be the original ones it started with")
assert.Equal(t, defaultInternalPrefixes, sti.internalPrefixes, "it should start out with the default internal prefixes")
o.WithInternalPrefixes("github.com/SierraSoftworks")
assert.Contains(t, sti.internalPrefixes, "github.com/SierraSoftworks", "it should allow you to add new internal prefixes")
if assert.Implements(t, (*FinalizeableOption)(nil), o, "it should implement the FinalizeableOption interface") {
for i, frame := range sti.Frames {
assert.False(t, frame.InApp, "all frames should initially be marked as external (frame index=%d)", i)
}
sti.Finalize()
if assert.NotEmpty(t, sti.Frames, "the frames list should not be empty") {
assert.True(t, sti.Frames[len(sti.Frames)-1].InApp, "the final frame should be marked as internal")
}
}
}
|
def longest_common_subsequence(s1, s2):
# Initialise the matrix to track common subsequences
m = [[0 for x in range(len(s2)+1)] for x in range(len(s1)+1)]
# Iterate over the two strings
for i in range(len(s1)+1):
for j in range(len(s2)+1):
# If one of the strings is empty, there is no common subsequence
if i == 0 or j == 0:
m[i][j] = 0
# If the two characters match, add 1 to the length of the common subsequence
elif s1[i-1] == s2[j-1]:
m[i][j] = m[i-1][j-1] + 1
# If the two characters don't match, take the maximum between the two strings
else:
m[i][j] = max(m[i-1][j], m[i][j-1])
# Get the length of the LCS
lcs_length = m[len(s1)][len(s2)]
# Use the matrix to iterate backward and get the string
lcs = ""
i = len(s1)
j = len(s2)
while i > 0 and j > 0:
if s1[i-1] == s2[j-1]:
lcs = lcs + s1[i-1]
i -= 1
j -= 1
elif m[i-1][j] > m[i][j-1]:
i -= 1
else:
j -= 1
# reverse the string
lcs = lcs[::-1]
return lcs, lcs_length
lcs, lcs_length = longest_common_subsequence(s1, s2)
print("LCS: " + lcs)
print("LCS length: " + str(lcs_length)) |
#include "Game.h"
#include <algorithm>
#include <limits.h>
using namespace std;
SnakeGame::SnakeGame()
:gameName{"~Snake Game by VissaM~"},highScore{0}, bestPlayer{"None"}
{}
SnakeGame::~SnakeGame(){}
void SnakeGame::addPlayer(string playerName){
players.emplace_back( Player{playerName} );
}
unsigned int SnakeGame::getHighScore(void){
return highScore;
}
string SnakeGame::getBestPlayer(void){
return bestPlayer;
}
void SnakeGame::play(string playerName){
pair<string, unsigned int> curBest{playerName, 0};
for(auto player : players){
if(player.getName() == playerName){
initializeGraphics((char *)gameName.c_str());
curBest = player.play();
endGraphics();
break;
}
}
if(curBest.second > highScore){
highScore = curBest.second;
bestPlayer = curBest.first;
}
cout << "Highscore: " << highScore << " by " << bestPlayer << "\n" <<endl;
}
void SnakeGame::play(void){
while(1){
string playerName;
cout << "Who's playing: ";
cin >> playerName;
cout << endl;
list<Player>::iterator p;
for(p=players.begin(); p!=players.end(); p++){
if(p->getName() == playerName)
break;
}
if(p == players.end()){
//if the player isn't in the list, add him/her
addPlayer(playerName);
}
play(playerName); //get the player to play the game
cin.clear();
cin.ignore(numeric_limits<streamsize>::max(), '\n');
cout << "Do you want to play again? (yes or no): ";
string ans;
cin >> ans;
if(ans != "yes"){
cout << "Exiting ..." << endl;
break;
}
cout << "Perfect...\n" << endl;
}
} |
<gh_stars>1-10
package br.indie.fiscal4j.cte200.classes;
import br.indie.fiscal4j.DFAmbiente;
/**
* <h1>URLs dos serviços</h1><br>
* <a href="http://hom.nfe.fazenda.gov.br/portal/webServices.aspx?tipoConteudo=Wak0FwB7dKs=">NFE Homologação</a><br>
* <a href="http://www.nfe.fazenda.gov.br/portal/webServices.aspx?tipoConteudo=Wak0FwB7dKs=">NFE Produção</a><br>
* <br>
* <a href="http://nfce.encat.org/desenvolvedor/webservices-h">NFCE Homologação</a><br>
* <a href="http://nfce.encat.org/desenvolvedor/webservices-p">NFCE Produção</a>
*/
public enum CTAutorizador {
AN {
@Override
public String getDistribuicaoDFe(final DFAmbiente ambiente) {
return DFAmbiente.HOMOLOGACAO.equals(ambiente) ? "https://hom1.cte.fazenda.gov.br/CTeDistribuicaoDFe/CTeDistribuicaoDFe.asmx" : "https://www1.cte.fazenda.gov.br/CTeDistribuicaoDFe/CTeDistribuicaoDFe.asmx";
}
};
public abstract String getDistribuicaoDFe(final DFAmbiente ambiente);
} |
class ShoppingCart
def initialize
@products = []
@payment = nil
end
def add_product(product)
@products << product
end
def remove_product(product)
@products.delete(product)
end
def get_total_price
total = 0
@products.each do |product|
total += product.price
end
total
end
def set_payment(payment)
@payment = payment
end
def checkout
@payment.pay(get_total_price)
end
end
class Product
attr_reader :name, :price
def initialize(name, price)
@name = name
@price = price
end
end
class Payment
def pay(amount)
# payment logic
end
end |
# Empty build script, such that it works from the common execution flow.
echo Build of LSTM DONE.
cp ../*.py .
|
cm.setMessages('Com.AbstractContainer', {
'title': 'Контейнер',
'close': 'Закрыть',
'save': 'Сохранить',
'help': '',
});
|
#!/bin/bash
pushd `dirname ${BASH_SOURCE[0]}` >/dev/null
export SCRIPT_DIR=`pwd -P`
popd >/dev/null
. $SCRIPT_DIR/git-prompt.sh
if [ -z $OSSIM_GIT_BRANCH ] ; then
export OSSIM_GIT_BRANCH=`__git_ps1 "%s"`
fi
if [ -z $WORKSPACE ] ; then
if [ -z "$OSSIM_DEV_HOME" ]; then
pushd $SCRIPT_DIR/../.. >/dev/null
export OSSIM_DEV_HOME=$PWD
popd >/dev/null
fi
else
export OSSIM_DEV_HOME=$WORKSPACE
fi
if [ -z "$OSSIM_MAKE_JOBS" ]; then
export OSSIM_MAKE_JOBS=4
fi
if [ -z "$OSSIM_INSTALL_PREFIX" ]; then
export OSSIM_INSTALL_PREFIX=$OSSIM_DEV_HOME/install
fi
if [ -z "$OSSIM_BUILD_DIR" ]; then
export OSSIM_BUILD_DIR=$OSSIM_DEV_HOME/build
fi
export CMAKE_CONFIG_SCRIPT=$OSSIM_DEV_HOME/ossim/cmake/scripts/ossim-cmake-config.sh
# Setup JAVA Home
#
# If not explicitly set then try to set. Add more for other OS's
# this should work with OpenJDK installation.
#
if [ -z $JAVA_HOME ] ; then
if [ -d "/usr/lib/jvm/java" ] ; then
export JAVA_HOME="/usr/lib/jvm/java"
elif [ -f "/usr/libexec/java_home" ] ; then
export JAVA_HOME=`/usr/libexec/java_home`
fi
fi
# for packaging and general version number
#
if [ -z $OSSIM_VERSION ] ; then
export OSSIM_VERSION=1.9.0
fi
if [ -z $OSSIM_VERSION_TAG ] ; then
if [ "${OSSIM_GIT_BRANCH}" == "dev" ] ; then
export OSSIM_VERSION_TAG="SNAPSHOT"
else
export OSSIM_VERSION_TAG="RELEASE"
fi
fi
# For RPM packaging
#
if [ -z $OSSIM_BUILD_RELEASE ] ; then
export OSSIM_BUILD_RELEASE=1
fi
if [ -z $BUILD_OSSIM_APPS ] ; then
export BUILD_OSSIM_APPS=ON
fi
if [ -z $BUILD_OSSIM_CURL_APPS ] ; then
export BUILD_OSSIM_CURL_APPS=OFF
fi
if [ -d $OSSIM_DEV_HOME/ossim-video ] ; then
if [ -z $BUILD_OSSIM_VIDEO ] ; then
export BUILD_OSSIM_VIDEO=ON
fi
else
export BUILD_OSSIM_VIDEO=ON
fi
if [ -d $OSSIM_DEV_HOME/ossim-oms ] ; then
if [ -z $BUILD_OMS ] ; then
export BUILD_OMS=ON
fi
else
export BUILD_OMS=OFF
fi
if [ -d $OSSIM_DEV_HOME/ossim-gui ] ; then
if [ -z $BUILD_OSSIM_GUI ] ; then
export BUILD_OSSIM_GUI=ON
fi
else
export BUILD_OSSIM_GUI=OFF
fi
if [ -d $OSSIM_DEV_HOME/ossim-planet ] ; then
if [ -z $BUILD_OSSIM_PLANET ] ; then
export BUILD_OSSIM_PLANET=ON
fi
else
export BUILD_OSSIM_PLANET=OFF
fi
if [ -d $OSSIM_DEV_HOME/ossim-wms ] ; then
if [ -z $BUILD_OSSIM_WMS ] ; then
export BUILD_OSSIM_WMS=ON
fi
else
export BUILD_OSSIM_WMS=OFF
fi
if [ -d $OSSIM_DEV_HOME/ossim-plugins ] ; then
if [ -z $BUILD_CNES_PLUGIN ] ; then
export BUILD_CNES_PLUGIN=ON
fi
if [ -z $BUILD_CSM_PLUGIN ] ; then
export BUILD_CSM_PLUGIN=OFF
fi
if [ -z $BUILD_WEB_PLUGIN ] ; then
export BUILD_WEB_PLUGIN=OFF
fi
if [ -z $BUILD_SQLITE_PLUGIN ] ; then
export BUILD_SQLITE_PLUGIN=OFF
fi
if [ -z $BUILD_KAKADU_PLUGIN ] ; then
export BUILD_KAKADU_PLUGIN=OFF
fi
if [ -z $BUILD_KML_PLUGIN ] ; then
export BUILD_KML_PLUGIN=OFF
fi
if [ -z $BUILD_GDAL_PLUGIN ] ; then
export BUILD_GDAL_PLUGIN=OFF
fi
#if [ -z $BUILD_HDF5_PLUGIN ] ; then
# export BUILD_HDF5_PLUGIN=ON
#fi
if [ -z $BUILD_POTRACE_PLUGIN ] ; then
export BUILD_POTRACE_PLUGIN=OFF
fi
if [ -z $BUILD_FFTW3_PLUGIN ] ; then
export BUILD_FFTW3_PLUGIN=OFF
fi
if [ -z $BUILD_GEOPDF_PLUGIN ] ; then
export BUILD_GEOPDF_PLUGIN=OFF
fi
if [ -z $BUILD_OPENCV_PLUGIN ] ; then
export BUILD_OPENCV_PLUGIN=OFF
fi
if [ -z $BUILD_OPENJPEG_PLUGIN ] ; then
export BUILD_OPENJPEG_PLUGIN=OFF
fi
if [ -z $BUILD_PNG_PLUGIN ] ; then
export BUILD_PNG_PLUGIN=OFF
fi
if [ -z $BUILD_JPEG12_PLUGIN ] ; then
export BUILD_JPEG12_PLUGIN=ON
fi
if [ -z $BUILD_OSSIM_HDF5_SUPPORT ] ; then
export BUILD_OSSIM_HDF5_SUPPORT=OFF
fi
fi
if [ -z $OSSIM_BUILD_ADDITIONAL_DIRECTORIES ] ; then
if [ -d $OSSIM_DEV_HOME/ossim-private/ossim-kakadu-jpip-server ]; then
export OSSIM_BUILD_ADDITIONAL_DIRECTORIES=$OSSIM_DEV_HOME/ossim-private/ossim-kakadu-jpip-server
fi
fi
if [ \( "${BUILD_KAKADU_PLUGIN}"="ON" \) -o \( -d "$OSSIM_DEV_HOME/ossim-private/ossim-kakadu-jpip-server" \) ] ; then
if [ -d "${OSSIM_DEV_HOME}/kakadu-${KAKADU_VERSION}" ] ; then
if [ -z $KAKADU_ROOT_SRC ] ; then
export KAKADU_ROOT_SRC="${OSSIM_DEV_HOME}/kakadu-${KAKADU_VERSION}"
fi
if [ -d "${KAKADU_ROOT_SRC}/lib/Linux-x86-64-gcc" ] ; then
if [ -z $KAKADU_LIBRARY ] ; then
export KAKADU_LIBRARY="${KAKADU_ROOT_SRC}/lib/Linux-x86-64-gcc/libkdu_v75R.so"
fi
if [ -z $KAKADU_AUX_LIBRARY ] ; then
export KAKADU_AUX_LIBRARY="${KAKADU_ROOT_SRC}/lib/Linux-x86-64-gcc/libkdu_a75R.so"
fi
fi
fi
fi
|
package io.opensphere.mantle.data.impl.dgset.v1;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import io.opensphere.core.util.Utilities;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.lang.EqualsHelper;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.DataGroupInfoActiveHistoryRecord;
import io.opensphere.mantle.data.DataGroupInfoActiveSet;
/**
* The Class JAXBDataGroupInfoActiveSetConfig.
*/
@XmlRootElement(name = "DataGroupInfoActiveSetConfig")
@XmlAccessorType(XmlAccessType.FIELD)
public class JAXBDataGroupInfoActiveSetConfig
{
/** The history. */
@XmlElement(name = "activeHistory")
private final JAXBDataGroupInfoActiveHistoryList myHistoryList;
/** The set lock. */
private final transient ReentrantLock mySetLock = new ReentrantLock();
/** The sets. */
@XmlElement(name = "activeSet", required = true)
private final List<JAXBDataGroupInfoActiveSet> mySets;
/**
* Instantiates a new jAXB data group info active set config.
*/
public JAXBDataGroupInfoActiveSetConfig()
{
mySets = New.list();
myHistoryList = new JAXBDataGroupInfoActiveHistoryList();
}
/**
* Instantiates a new jAXB data group info active set config.
*
* @param other the other
*/
public JAXBDataGroupInfoActiveSetConfig(JAXBDataGroupInfoActiveSetConfig other)
{
Utilities.checkNull(other, "other");
mySets = New.list();
other.mySetLock.lock();
try
{
other.mySets.stream().map(JAXBDataGroupInfoActiveSet::new).forEach(mySets::add);
}
finally
{
other.mySetLock.unlock();
}
myHistoryList = new JAXBDataGroupInfoActiveHistoryList(other.myHistoryList);
}
/**
* Adds the set.
*
* @param setToAdd the set to add
*/
public void addSet(DataGroupInfoActiveSet setToAdd)
{
mySetLock.lock();
try
{
mySets.add(new JAXBDataGroupInfoActiveSet(setToAdd));
}
finally
{
mySetLock.unlock();
}
}
/**
* Clear activity history.
*/
public void clearActivityHistory()
{
myHistoryList.clearActivityHistory();
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
{
return true;
}
if (obj == null || getClass() != obj.getClass())
{
return false;
}
JAXBDataGroupInfoActiveSetConfig other = (JAXBDataGroupInfoActiveSetConfig)obj;
return EqualsHelper.equals(mySets, other.mySets, myHistoryList, other.myHistoryList);
}
/**
* Gets the activity history.
*
* @return the activity history
*/
public List<DataGroupInfoActiveHistoryRecord> getActivityHistory()
{
return myHistoryList.getActivityHistory();
}
/**
* Returns the first set found that has the given name.
*
* @param name the name to search with
* @return the first found {@link DataGroupInfoActiveSet} with the given
* name.
*/
public DataGroupInfoActiveSet getSetByName(String name)
{
Utilities.checkNull(name, "name");
mySetLock.lock();
try
{
return mySets.stream().filter(set -> name.equals(set.getName())).findFirst().orElse(null);
}
finally
{
mySetLock.unlock();
}
}
/**
* Gets the names of all the sets.
*
* @return the names
*/
public List<String> getSetNames()
{
List<String> result = New.list();
mySetLock.lock();
try
{
mySets.stream().map(s -> s.getName()).forEach(result::add);
}
finally
{
mySetLock.unlock();
}
return result.isEmpty() ? Collections.<String>emptyList() : Collections.unmodifiableList(result);
}
/**
* Gets an unmodifiable copy of the sets list.
*
* @return the sets
*/
public List<DataGroupInfoActiveSet> getSets()
{
List<DataGroupInfoActiveSet> result = null;
mySetLock.lock();
try
{
result = Collections.unmodifiableList(new ArrayList<DataGroupInfoActiveSet>(mySets));
}
finally
{
mySetLock.unlock();
}
return result;
}
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + (mySets == null ? 0 : mySets.hashCode());
result = prime * result + (myHistoryList == null ? 0 : myHistoryList.hashCode());
return result;
}
/**
* Checks for name.
*
* Stops on first found, may be more than one.
*
* @param name the name
* @return true, if successful
*/
public boolean hasName(String name)
{
return getSetByName(name) != null;
}
/**
* Note a set of data group info have been active in the history.
*
* @param dgiCollection the dgi collection to note active.
*/
public void noteActive(Collection<DataGroupInfo> dgiCollection)
{
myHistoryList.noteActive(dgiCollection);
}
/**
* Note that a data group info has been active active.
*
* @param dgi the DataGroupInfo
*/
public void noteActive(DataGroupInfo dgi)
{
myHistoryList.noteActive(dgi);
}
/**
* Removes the set by name. Note will remove all sets with the specified
* name.
*
* @param setName the set name
* @return the sets that were removed.
*/
public Set<DataGroupInfoActiveSet> removeSet(String setName)
{
Utilities.checkNull(setName, "setName");
Set<DataGroupInfoActiveSet> removeSet = New.set();
mySetLock.lock();
try
{
mySets.stream().filter(set -> setName.equals(set.getName())).forEach(removeSet::add);
if (!removeSet.isEmpty())
{
mySets.removeAll(removeSet);
}
}
finally
{
mySetLock.unlock();
}
return removeSet.isEmpty() ? Collections.<DataGroupInfoActiveSet>emptySet() : Collections.unmodifiableSet(removeSet);
}
/**
* Sort and de-duplicate activity history.
*/
public void sortAndDeduplicateActivityHistory()
{
myHistoryList.sortAndDeduplicateActivityHistory();
}
}
|
<filename>commands/down_test.go
package commands_test
import (
"testing"
"time"
. "github.com/metrue/fx/commands"
"github.com/metrue/fx/pkg/server"
"github.com/stretchr/testify/assert"
)
func TestDown(t *testing.T) {
addr := "localhost:23451"
functions := []string{"id-should-not-exist"}
s := server.NewFxServiceServer(addr)
go func() {
s.Start()
}()
time.Sleep(2 * time.Second)
err := Down(addr, functions)
assert.Nil(t, err)
s.Stop()
time.Sleep(2 * time.Second)
}
|
<reponame>ranliu98/ISYE-6669-Deterministic-Optimization<gh_stars>0
# Implement the Subset Selection problem
import gurobipy as gp
import numpy as np
from gurobipy import *
id = 903515184
# Invoke Gurobi to solve the IP
def solve_ip(id, m=700, n=400, time_limit=30):
gm = gp.Model("subset")
h, c = get_data(id, m, n)
# print(h.shape,c.shape) # (700, 400) (400,)
F = gm.addVars(n, lb=0, ub=1, vtype=GRB.INTEGER)
# F_a = gm.addVars(m, n, lb=-GRB.INFINITY)
F_max = gm.addVars(m, n, lb=0, ub=1, vtype=GRB.INTEGER)
gm.setObjective(sum([F_max[mi,ni]*h[mi,ni] for mi in range(m) for ni in range(n)]) - sum([c[ni]*F[ni] for ni in range(n)]), GRB.MAXIMIZE)
gm.addConstrs(F_max[mi, ni] <= F[ni] for ni in range(n) for mi in range(m))
gm.addConstrs(sum([F_max[mi,ni] for ni in range(n)]) == 1 for mi in range(m))
#gm.addConstrs(F_max[mi] == max_([F_a[mi,ni] for ni in range(n)]) for mi in range(m))
# Solve the model
gm.Params.TimeLimit = time_limit
gm.update()
gm.optimize()
return(gm)
# Generate the data (DO NOT MODIFY)
def get_data(id, m, n):
np.random.seed(id)
h = abs(np.random.normal(size=(m, n)))
c = abs(np.random.normal(size=n))
return(h, c)
solve_ip(id=id) |
package com.crowdin.client.sourcestrings.model;
import lombok.Data;
import java.util.Date;
@Data
public class SourceString {
private Long id;
private Long projectId;
private Long fileId;
private String identifier;
private Object text;
private String type;
private String context;
private Integer maxLength;
private boolean isHidden;
private Integer revision;
private boolean hasPlurals;
private Object plurals;
private boolean isIcu;
private Date createdAt;
private Date updatedAt;
}
|
#!/bin/bash
# For more information on arrays see:
# https://www.gnu.org/software/bash/manual/html_node/Arrays.html
# http://tldp.org/LDP/Bash-Beginners-Guide/html/sect_10_02.html
# you can use `declare -a distros` as well but is not required
distros=(
'wheezy' # Debian 7
'jessie' # Debian 8
'sid' # Debian unstable
'quantal' # Ubuntu 12.10
'raring' # Ubuntu 13.04
'saucy' # Ubuntu 13.10
'trusty' # Ubuntu 14.04
'utopic' # Ubuntu 14.10
)
# remove first element
# unset distros[0]
echo "First value: $distros"
echo "Second value: ${distros[1]}"
echo
echo "All the values, in the same line: ${distros[@]}"
echo
echo "List of values, one per line:"
for p in "${distros[@]}"; do
echo "- $p"
done
|
import $axios from '@/core/services/api.service'
const state = () => ({
lectures: {
data: []
},
classroom_lectures: [],
comments: [],
lecture: {
id: '',
title: '',
body: '',
subject_id: '',
isactive: false,
created_at: ''
},
page: 1,
classroom_lecture_page: 1
})
const mutations = {
ASSIGN_DATA_LECTURES(state, payload) {
state.lectures = payload
},
ASSIGN_DATA_COMMENTS(state, payload) {
state.comments = payload
},
ASSIGN_DATA_LECTURES_CLASSROOM(state, payload) {
state.classroom_lectures = payload
},
ASSIGN_FORM(state, payload) {
state.lecture = {
id: payload.id,
title: payload.title,
body: payload.body,
subject_id: payload.subject_id,
isactive: payload.isactive,
addition: payload.addition,
created_at: payload.created_at
}
},
CLEAR_FORM_LECTURE(state, payload) {
state.lecture = {
id: '',
title: '',
body: '',
subject_id: '',
isactive: false
}
},
SET_PAGE(state, payload) {
state.page = payload
},
SET_CLASSROOM_LECTURE_PAGE(state, payload) {
state.classroom_lecture_page = payload
}
}
const actions = {
getDataLectures({ commit, state }, payload) {
commit('SET_LOADING', true, { root: true })
let perPage = typeof payload.perPage != 'undefined' ? payload.perPage : ''
let search = typeof payload.search != 'undefined' ? payload.search : ''
return new Promise(async(resolve, reject) => {
try {
let network = await $axios.get(`lectures?page=${state.page}&perPage=${perPage}&q=${search}`)
commit('ASSIGN_DATA_LECTURES', network.data.data)
commit('SET_LOADING', false, { root: true })
resolve(network.data)
} catch (error) {
commit('SET_LOADING', false, { root: true })
reject(error.response.data)
}
})
},
createNewLecture({ commit, state }) {
commit('SET_LOADING', true, { root: true })
return new Promise(async(resolve, reject) => {
try {
let formData = new FormData()
for(let key in state.lecture) {
formData.append(key, state.lecture[key])
}
let network = await $axios.post(`lectures`, formData, {
headers: {
'content-type': 'multipart/form-data'
}
})
commit('CLEAR_ERROR', true, { root: true })
commit('CLEAR_FORM_LECTURE', true)
commit('SET_LOADING', false, { root: true })
resolve(network.data)
} catch (error) {
if (error.response && error.response.status == 422) {
commit('SET_ERRORS', error.response.data.errors, { root: true })
}
commit('SET_LOADING', false , { root: true })
reject(error.response.data)
}
})
},
getDataLecture({ commit }, payload) {
commit('SET_LOADING', true, { root: true })
return new Promise(async(resolve, reject) => {
try {
let network = await $axios.get(`lectures/${payload}`)
commit('ASSIGN_FORM', network.data.data)
commit('SET_LOADING', false, { root: true })
resolve(network.data)
} catch (error) {
commit('SET_LOADING', false, { root: true })
reject(error.response.data)
}
})
},
deleteDataLecture({ commit }, payload) {
commit('SET_LOADING', true, { root: true })
return new Promise(async(resolve, reject) => {
try {
let network = await $axios.delete(`lectures/${payload}`)
commit('SET_LOADING', false, { root: true })
resolve(network.data)
} catch (error) {
commit('SET_LOADING', false, { root: true })
reject(error.response.data)
}
})
},
updateDataLecture({ commit, state }, payload) {
commit('SET_LOADING', true, { root: true })
return new Promise(async(resolve, reject) => {
try {
let network = await $axios.put(`lectures/${state.lecture.id}`, state.lecture)
commit('CLEAR_ERROR', true, { root: true })
commit('SET_LOADING', false, { root: true })
resolve(network.data)
} catch (error) {
if (error.response && error.response.status == 422) {
commit('SET_ERRORS', error.response.data.errors, { root: true })
}
commit('SET_LOADING', false, { root: true })
reject(error.response.data)
}
})
},
getDataComments({ commit }, payload) {
commit('SET_LOADING', true, { root: true })
return new Promise(async (resolve, reject) => {
try {
let network = await $axios.get(`lectures/${payload}/comment`)
commit('ASSIGN_DATA_COMMENTS', network.data.data)
commit('SET_LOADING', false, { root: true })
resolve(network.data)
} catch (error) {
commit('SET_LOADING', false, { root: true })
reject(error.response.data)
}
})
},
createNewComment({ commit }, payload) {
commit('SET_LOADING', true, { root: true })
return new Promise(async (resolve, reject) => {
try {
let network = await $axios.post(`lectures/${payload.lecture_id}/comment`, payload)
commit('SET_LOADING', false, { root: true })
resolve(network.data)
} catch (error) {
commit('SET_LOADING', false, { root: true })
reject(error.response.data)
}
})
},
getDataLectureClassroom({ commit, state }, payload) {
commit('SET_LOADING', true, { root: true })
return new Promise(async (resolve, reject) => {
try {
let network = await $axios.get(`classrooms/${payload}/lecture?page=${state.classroom_lecture_page}`)
commit('ASSIGN_DATA_LECTURES_CLASSROOM', network.data.data)
commit('SET_LOADING', false, { root: true })
resolve(network.data)
} catch (error) {
commit('SET_LOADING', false, { root: true })
reject(error.response.data)
}
})
},
deleteShareLecture({ commit }, payload) {
return new Promise(async (resolve, reject) => {
try {
let network = await $axios.delete(`lectures/sharee/${payload}`)
commit('SET_LOADING', false, { root: true })
resolve(network.data)
} catch (error) {
commit('SET_LOADING', false, { root: true })
reject(error.response.data)
}
})
},
shareeLectureToClassroom({ commit }, payload) {
commit('SET_LOADING', true, { root: true })
return new Promise(async (resolve, reject) => {
try {
let network = await $axios.post(`lectures/${payload.id}/sharee`, payload.data)
commit('SET_LOADING', false, { root: true})
resolve(network.data)
} catch (error) {
commit('SET_LOADING', false, { root: true })
reject(error.response.data)
}
})
}
}
export default {
namespaced: true,
state,
mutations,
actions
} |
// Copyright 2021 the Exposure Notifications Verification Server authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package database
import (
"testing"
"time"
keyserver "github.com/google/exposure-notifications-server/pkg/api/v1"
"github.com/google/go-cmp/cmp"
)
func TestCompositeStats_MarshalCSV(t *testing.T) {
t.Parallel()
cases := []struct {
name string
stats CompositeStats
expCSV string
expJSON string
}{
{
name: "empty",
stats: nil,
expCSV: ``,
expJSON: `{}`,
},
{
name: "single",
stats: []*CompositeDay{
{
Day: time.Date(2020, 2, 3, 0, 0, 0, 0, time.UTC),
RealmStats: &RealmStat{
Date: time.Date(2020, 2, 3, 0, 0, 0, 0, time.UTC),
RealmID: 1,
CodesIssued: 10,
CodesClaimed: 9,
CodesInvalid: 1,
CodesInvalidByOS: []int64{0, 1, 0},
UserReportsIssued: 3,
UserReportsClaimed: 2,
TokensClaimed: 7,
TokensInvalid: 2,
UserReportTokensClaimed: 2,
CodeClaimMeanAge: FromDuration(time.Minute),
CodeClaimAgeDistribution: []int32{1, 3, 4},
},
KeyServerStats: &keyserver.StatsDay{
Day: time.Date(2020, 2, 3, 0, 0, 0, 0, time.UTC),
PublishRequests: keyserver.PublishRequests{
UnknownPlatform: 2,
Android: 39,
IOS: 12,
},
TotalTEKsPublished: 49,
RevisionRequests: 3,
TEKAgeDistribution: []int64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14},
OnsetToUploadDistribution: nil,
RequestsMissingOnsetDate: 2,
},
},
},
expCSV: `date,codes_issued,codes_claimed,codes_invalid,tokens_claimed,tokens_invalid,code_claim_mean_age_seconds,code_claim_age_distribution,publish_requests_unknown,publish_requests_android,publish_requests_ios,total_teks_published,requests_with_revisions,requests_missing_onset_date,tek_age_distribution,onset_to_upload_distribution,user_reports_issued,user_reports_claimed,user_report_tokens_claimed,codes_invalid_unknown_os,codes_invalid_ios,codes_invalid_android
2020-02-03,10,9,1,7,2,60,1|3|4,2,39,12,49,3,2,0|1|2|3|4|5|6|7|8|9|10|11|12|13|14,,3,2,2,0,1,0
`,
expJSON: `{"realm_id":1,"has_key_server_stats":true,"statistics":[{"date":"2020-02-03T00:00:00Z","data":{"codes_issued":10,"codes_claimed":9,"codes_invalid":1,"codes_invalid_by_os":{"unknown_os":0,"ios":1,"android":0},"user_reports_issued":3,"user_reports_claimed":2,"tokens_claimed":7,"tokens_invalid":2,"user_report_tokens_claimed":2,"code_claim_mean_age_seconds":60,"code_claim_age_distribution":[1,3,4],"day":"0001-01-01T00:00:00Z","publish_requests":{"unknown":2,"android":39,"ios":12},"total_teks_published":49,"requests_with_revisions":3,"tek_age_distribution":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],"onset_to_upload_distribution":null,"requests_missing_onset_date":2,"total_publish_requests":53}}]}`,
},
{
name: "no_realm_stats",
stats: []*CompositeDay{
{
Day: time.Date(2020, 2, 3, 0, 0, 0, 0, time.UTC),
KeyServerStats: &keyserver.StatsDay{
Day: time.Date(2020, 2, 3, 0, 0, 0, 0, time.UTC),
PublishRequests: keyserver.PublishRequests{
UnknownPlatform: 2,
Android: 39,
IOS: 12,
},
TotalTEKsPublished: 49,
RevisionRequests: 3,
TEKAgeDistribution: []int64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14},
OnsetToUploadDistribution: nil,
RequestsMissingOnsetDate: 2,
},
},
},
expCSV: `date,codes_issued,codes_claimed,codes_invalid,tokens_claimed,tokens_invalid,code_claim_mean_age_seconds,code_claim_age_distribution,publish_requests_unknown,publish_requests_android,publish_requests_ios,total_teks_published,requests_with_revisions,requests_missing_onset_date,tek_age_distribution,onset_to_upload_distribution,user_reports_issued,user_reports_claimed,user_report_tokens_claimed,codes_invalid_unknown_os,codes_invalid_ios,codes_invalid_android
2020-02-03,,,,,,,,2,39,12,49,3,2,0|1|2|3|4|5|6|7|8|9|10|11|12|13|14,,,,,,,
`,
expJSON: `{"realm_id":0,"has_key_server_stats":true,"statistics":[{"date":"2020-02-03T00:00:00Z","data":{"codes_issued":0,"codes_claimed":0,"codes_invalid":0,"codes_invalid_by_os":{"unknown_os":0,"ios":0,"android":0},"user_reports_issued":0,"user_reports_claimed":0,"tokens_claimed":0,"tokens_invalid":0,"user_report_tokens_claimed":0,"code_claim_mean_age_seconds":0,"code_claim_age_distribution":null,"day":"0001-01-01T00:00:00Z","publish_requests":{"unknown":2,"android":39,"ios":12},"total_teks_published":49,"requests_with_revisions":3,"tek_age_distribution":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14],"onset_to_upload_distribution":null,"requests_missing_onset_date":2,"total_publish_requests":53}}]}`,
},
{
name: "no_keyserver_stats",
stats: []*CompositeDay{
{
Day: time.Date(2020, 2, 3, 0, 0, 0, 0, time.UTC),
RealmStats: &RealmStat{
Date: time.Date(2020, 2, 3, 0, 0, 0, 0, time.UTC),
RealmID: 1,
CodesIssued: 10,
CodesClaimed: 9,
CodesInvalid: 1,
CodesInvalidByOS: []int64{0, 1, 0},
UserReportsIssued: 3,
UserReportsClaimed: 2,
TokensClaimed: 7,
TokensInvalid: 2,
UserReportTokensClaimed: 2,
CodeClaimMeanAge: FromDuration(time.Minute),
CodeClaimAgeDistribution: []int32{1, 3, 4},
},
},
},
expCSV: `date,codes_issued,codes_claimed,codes_invalid,tokens_claimed,tokens_invalid,code_claim_mean_age_seconds,code_claim_age_distribution,publish_requests_unknown,publish_requests_android,publish_requests_ios,total_teks_published,requests_with_revisions,requests_missing_onset_date,tek_age_distribution,onset_to_upload_distribution,user_reports_issued,user_reports_claimed,user_report_tokens_claimed,codes_invalid_unknown_os,codes_invalid_ios,codes_invalid_android
2020-02-03,10,9,1,7,2,60,1|3|4,,,,,,,,,3,2,2,0,1,0
`,
expJSON: `{"realm_id":1,"has_key_server_stats":false,"statistics":[{"date":"2020-02-03T00:00:00Z","data":{"codes_issued":10,"codes_claimed":9,"codes_invalid":1,"codes_invalid_by_os":{"unknown_os":0,"ios":1,"android":0},"user_reports_issued":3,"user_reports_claimed":2,"tokens_claimed":7,"tokens_invalid":2,"user_report_tokens_claimed":2,"code_claim_mean_age_seconds":60,"code_claim_age_distribution":[1,3,4],"day":"0001-01-01T00:00:00Z","publish_requests":{"unknown":0,"android":0,"ios":0},"total_teks_published":0,"requests_with_revisions":0,"tek_age_distribution":null,"onset_to_upload_distribution":null,"requests_missing_onset_date":0,"total_publish_requests":0}}]}`,
},
}
for _, tc := range cases {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
b, err := tc.stats.MarshalCSV()
if err != nil {
t.Fatal(err)
}
if diff := cmp.Diff(string(b), tc.expCSV); diff != "" {
t.Errorf("bad csv (+got, -want): %s", diff)
}
b, err = tc.stats.MarshalJSON()
if err != nil {
t.Fatal(err)
}
if got, want := string(b), tc.expJSON; got != want {
t.Errorf("bad json, expected \n%s\nto be\n%s\n", got, want)
}
})
}
}
|
'use strict';
angular.module('yaru22.angular-timeago').config(function(timeAgoSettings) {
timeAgoSettings.strings['fr_FR'] = {
prefixAgo: 'il y a',
prefixFromNow: 'dans',
suffixAgo: null,
suffixFromNow: null,
seconds: 'moins d\'une minute',
minute: 'environ une minute',
minutes: '%d minutes',
hour: 'environ une heure',
hours: 'environ %d heures',
day: 'un jour',
days: '%d jours',
month: 'environ un mois',
months: '%d mois',
year: 'environ un an',
years: '%d ans',
numbers: []
};
});
|
<reponame>ChrisWcs/sprites<gh_stars>0
import React from 'react';
import PropTypes from 'prop-types';
import Column from '../styledcomps/Column';
import Row from '../styledcomps/Row';
import Input from '../styledcomps/Input';
const ColorPicker = ({handleChange, color}) => (
<Column>
<Row>r =><Input type="text" value={color.r} onChange={handleChange('r')}/></Row>
<Row>g =><Input type="text" value={color.g} onChange={handleChange('g')}/></Row>
<Row>b =><Input type="text" value={color.b} onChange={handleChange('b')}/></Row>
</Column>
);
ColorPicker.propTypes = {
handleChange: PropTypes.func,
color: PropTypes.object
};
export default ColorPicker; |
<reponame>anticipasean/girakkafunc
package cyclops.reactor.container.transformer;
import cyclops.container.filterable.Filterable;
import cyclops.container.MonadicValue;
import com.oath.cyclops.anym.transformers.ValueTransformer;
import cyclops.container.transformable.To;
import cyclops.container.transformable.Transformable;
import cyclops.container.control.Eval;
import cyclops.function.enhanced.Function3;
import cyclops.function.enhanced.Function4;
import cyclops.monads.AnyM;
import cyclops.monads.WitnessType;
import cyclops.reactive.ReactiveSeq;
import cyclops.container.immutable.tuple.Tuple;
import cyclops.container.immutable.tuple.Tuple2;
import cyclops.container.immutable.tuple.Tuple3;
import cyclops.container.immutable.tuple.Tuple4;
import org.reactivestreams.Publisher;
import java.util.Iterator;
import java.util.function.*;
/**
* Monad Transformer for Eval's
*
* EvalT allows the deeply wrapped Eval to be manipulating within it's nested /contained context
*
* @author johnmcclean
*
* @param <T> Type of data stored inside the nested Eval(s)
*/
public final class EvalT<W extends WitnessType<W>,T> extends ValueTransformer<W,T>
implements To<EvalT<W,T>>,
Transformable<T>, Filterable<T> {
private final AnyM<W,Eval<T>> run;
@Override
public Iterator<T> iterator() {
return stream().iterator();
}
@Override
public ReactiveSeq<T> stream() {
return run.stream().map(Eval::get);
}
/**
* @return The wrapped AnyM
*/
@Override
public AnyM<W,Eval<T>> unwrap() {
return run;
}
public <R> R unwrapTo(Function<? super AnyM<W,Eval<T>>, ? extends R> fn) {
return unwrap().to(fn);
}
private EvalT(final AnyM<W,Eval<T>> run) {
this.run = run;
}
@Override @Deprecated (/*DO NOT USE INTERNAL USE ONLY*/)
protected <R> EvalT<W,R> unitAnyM(AnyM<W,? super MonadicValue<R>> traversable) {
return of((AnyM) traversable);
}
@Override
public AnyM<W,? extends MonadicValue<T>> transformerStream() {
return run;
}
@Override
public EvalT<W,T> filter(final Predicate<? super T> test) {
return of(run.map(f->f.map(in->Tuple.tuple(in,test.test(in))))
.filter( f->f.get()._2() )
.map( f->f.map(in->in._1())));
}
/**
* Peek at the current value of the Eval
* <pre>
* {@code
* EvalWT.of(AnyM.fromStream(Arrays.asEvalW(10))
* .peek(System.out::println);
*
* //prints 10
* }
* </pre>
*
* @param peek Consumer to accept current value of Eval
* @return EvalWT with peek call
*/
@Override
public EvalT<W,T> peek(final Consumer<? super T> peek) {
return map(e->{
peek.accept(e);
return e;
});
}
/**
* Map the wrapped Eval
*
* <pre>
* {@code
* EvalWT.of(AnyM.fromStream(Arrays.asEvalW(10))
* .map(t->t=t+1);
*
*
* //EvalWT<AnyMSeq<Stream<Eval[11]>>>
* }
* </pre>
*
* @param f Mapping function for the wrapped Eval
* @return EvalWT that applies the transform function to the wrapped Eval
*/
@Override
public <B> EvalT<W,B> map(final Function<? super T, ? extends B> f) {
return new EvalT<W,B>(
run.map(o -> o.map(f)));
}
/**
* Flat Map the wrapped Eval
* <pre>
* {@code
* EvalWT.of(AnyM.fromStream(Arrays.asEvalW(10))
* .flatMap(t->Eval.completedEval(20));
*
*
* //EvalWT<AnyMSeq<Stream<Eval[20]>>>
* }
* </pre>
* @param f FlatMap function
* @return EvalWT that applies the flatMap function to the wrapped Eval
*/
public <B> EvalT<W,B> flatMapT(final Function<? super T, EvalT<W,B>> f) {
return of(run.map(Eval -> Eval.flatMap(a -> f.apply(a).run.stream()
.toList()
.get(0))));
}
private static <W extends WitnessType<W>,B> AnyM<W,Eval<B>> narrow(final AnyM<W,Eval<? extends B>> run) {
return (AnyM) run;
}
public <B> EvalT<W,B> flatMap(final Function<? super T, ? extends MonadicValue<? extends B>> f) {
final AnyM<W,Eval<? extends B>> mapped = run.map(o -> o.flatMap(f));
return of(narrow(mapped));
}
/**
* Lift a function into one that accepts and returns an EvalWT
* This allows multiple monad types to add functionality to existing function and methods
*
* e.g. to add list handling / iteration (via Eval) and iteration (via Stream) to an existing function
* <pre>
* {@code
Function<Integer,Integer> add2 = i -> i+2;
Function<EvalWT<Integer>, EvalWT<Integer>> optTAdd2 = EvalWT.lift(add2);
Stream<Integer> withNulls = Stream.of(1,2,3);
AnyMSeq<Integer> stream = AnyM.fromStream(withNulls);
AnyMSeq<Eval<Integer>> streamOpt = stream.map(Eval::completedEval);
List<Integer> results = optTAdd2.applyHKT(EvalWT.of(streamOpt))
.unwrap()
.<Stream<Eval<Integer>>>unwrap()
.map(Eval::join)
.collect(CyclopsCollectors.toList());
//Eval.completedEval(List[3,4]);
*
*
* }</pre>
*
*
* @param fn Function to enhance with functionality from Eval and another monad type
* @return Function that accepts and returns an EvalWT
*/
public static <W extends WitnessType<W>,U, R> Function<EvalT<W,U>, EvalT<W,R>> lift(final Function<? super U, ? extends R> fn) {
return optTu -> optTu.map(input -> fn.apply(input));
}
/**
* Lift a BiFunction into one that accepts and returns EvalWTs
* This allows multiple monad types to add functionality to existing function and methods
*
* e.g. to add list handling / iteration (via Eval), iteration (via Stream) and asynchronous execution (Eval)
* to an existing function
*
* <pre>
* {@code
BiFunction<Integer,Integer,Integer> add = (a,b) -> a+b;
BiFunction<EvalWT<Integer>,EvalWT<Integer>,EvalWT<Integer>> optTAdd2 = EvalWT.lift2(add);
Stream<Integer> withNulls = Stream.of(1,2,3);
AnyMSeq<Integer> stream = AnyM.ofMonad(withNulls);
AnyMSeq<Eval<Integer>> streamOpt = stream.map(Eval::completedEval);
Eval<Eval<Integer>> two = Eval.completedEval(Eval.completedEval(2));
AnyMSeq<Eval<Integer>> Eval= AnyM.fromEvalW(two);
List<Integer> results = optTAdd2.applyHKT(EvalWT.of(streamOpt),EvalWT.of(Eval))
.unwrap()
.<Stream<Eval<Integer>>>unwrap()
.map(Eval::join)
.collect(CyclopsCollectors.toList());
//Eval.completedEval(List[3,4,5]);
}
</pre>
* @param fn BiFunction to enhance with functionality from Eval and another monad type
* @return Function that accepts and returns an EvalWT
*/
public static <W extends WitnessType<W>, U1, U2, R> BiFunction<EvalT<W,U1>, EvalT<W,U2>, EvalT<W,R>> lift2(
final BiFunction<? super U1, ? super U2, ? extends R> fn) {
return (optTu1, optTu2) -> optTu1.flatMapT(input1 -> optTu2.map(input2 -> fn.apply(input1, input2)));
}
/**
* Construct an EvalWT from an AnyM that contains a monad type that contains type other than Eval
* The values in the underlying monad will be mapped to Eval<A>
*
* @param anyM AnyM that doesn't contain a monad wrapping an Eval
* @return EvalWT
*/
public static <W extends WitnessType<W>,A> EvalT<W,A> fromAnyM(final AnyM<W,A> anyM) {
return of(anyM.map(Eval::now));
}
/**
* Construct an EvalWT from an AnyM that wraps a monad containing EvalWs
*
* @param monads AnyM that contains a monad wrapping an Eval
* @return EvalWT
*/
public static <W extends WitnessType<W>,A> EvalT<W,A> of(final AnyM<W,Eval<A>> monads) {
return new EvalT<>(
monads);
}
/*
* (non-Javadoc)
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return String.format("EvalT[%s]", run.unwrap().toString());
}
public <R> EvalT<W,R> unitIterable(final Iterable<R> it) {
return of(run.unitIterable(it)
.map(i -> Eval.now(i)));
}
@Override
public <R> EvalT<W,R> unit(final R value) {
return of(run.unit(Eval.now(value)));
}
@Override
public <R> EvalT<W,R> empty() {
return of(run.unit(Eval.<R>eval()));
}
@Override
public int hashCode() {
return run.hashCode();
}
@Override
public boolean equals(final Object o) {
if (o instanceof EvalT) {
return run.equals(((EvalT) o).run);
}
return false;
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#iterate(java.util.function.UnaryOperator)
*/
@Override
public AnyM<W, ? extends ReactiveSeq<T>> iterate(UnaryOperator<T> fn, T alt) {
return super.iterate(fn,alt);
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#generate()
*/
@Override
public AnyM<W, ? extends ReactiveSeq<T>> generate(T alt) {
return super.generate(alt);
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#zip(java.lang.Iterable, java.util.function.BiFunction)
*/
@Override
public <T2, R> EvalT<W, R> zip(Iterable<? extends T2> iterable,
BiFunction<? super T, ? super T2, ? extends R> fn) {
return (EvalT<W, R>)super.zip(iterable, fn);
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#zip(java.util.function.BiFunction, org.reactivestreams.Publisher)
*/
@Override
public <T2, R> EvalT<W, R> zip(BiFunction<? super T, ? super T2, ? extends R> fn, Publisher<? extends T2> publisher) {
return (EvalT<W, R>)super.zip(fn, publisher);
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#zip(java.lang.Iterable)
*/
@Override
public <U> EvalT<W, Tuple2<T, U>> zip(Iterable<? extends U> other) {
return (EvalT)super.zip(other);
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#forEach4(java.util.function.Function, java.util.function.BiFunction, com.oath.cyclops.util.function.TriFunction, com.oath.cyclops.util.function.QuadFunction)
*/
@Override
public <T2, R1, R2, R3, R> EvalT<W, R> forEach4(Function<? super T, ? extends MonadicValue<R1>> value1,
BiFunction<? super T, ? super R1, ? extends MonadicValue<R2>> value2,
Function3<? super T, ? super R1, ? super R2, ? extends MonadicValue<R3>> value3,
Function4<? super T, ? super R1, ? super R2, ? super R3, ? extends R> yieldingFunction) {
return (EvalT<W, R>)super.forEach4(value1, value2, value3, yieldingFunction);
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#forEach4(java.util.function.Function, java.util.function.BiFunction, com.oath.cyclops.util.function.TriFunction, com.oath.cyclops.util.function.QuadFunction, com.oath.cyclops.util.function.QuadFunction)
*/
@Override
public <T2, R1, R2, R3, R> EvalT<W, R> forEach4(Function<? super T, ? extends MonadicValue<R1>> value1,
BiFunction<? super T, ? super R1, ? extends MonadicValue<R2>> value2,
Function3<? super T, ? super R1, ? super R2, ? extends MonadicValue<R3>> value3,
Function4<? super T, ? super R1, ? super R2, ? super R3, Boolean> filterFunction,
Function4<? super T, ? super R1, ? super R2, ? super R3, ? extends R> yieldingFunction) {
return (EvalT<W, R>)super.forEach4(value1, value2, value3, filterFunction, yieldingFunction);
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#forEach3(java.util.function.Function, java.util.function.BiFunction, com.oath.cyclops.util.function.TriFunction)
*/
@Override
public <T2, R1, R2, R> EvalT<W, R> forEach3(Function<? super T, ? extends MonadicValue<R1>> value1,
BiFunction<? super T, ? super R1, ? extends MonadicValue<R2>> value2,
Function3<? super T, ? super R1, ? super R2, ? extends R> yieldingFunction) {
return (EvalT<W, R>)super.forEach3(value1, value2, yieldingFunction);
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#forEach3(java.util.function.Function, java.util.function.BiFunction, com.oath.cyclops.util.function.TriFunction, com.oath.cyclops.util.function.TriFunction)
*/
@Override
public <T2, R1, R2, R> EvalT<W, R> forEach3(Function<? super T, ? extends MonadicValue<R1>> value1,
BiFunction<? super T, ? super R1, ? extends MonadicValue<R2>> value2,
Function3<? super T, ? super R1, ? super R2, Boolean> filterFunction,
Function3<? super T, ? super R1, ? super R2, ? extends R> yieldingFunction) {
return (EvalT<W, R>)super.forEach3(value1, value2, filterFunction, yieldingFunction);
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#forEach2(java.util.function.Function, java.util.function.BiFunction)
*/
@Override
public <R1, R> EvalT<W, R> forEach2(Function<? super T, ? extends MonadicValue<R1>> value1,
BiFunction<? super T, ? super R1, ? extends R> yieldingFunction) {
return (EvalT<W, R>)super.forEach2(value1, yieldingFunction);
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#forEach2(java.util.function.Function, java.util.function.BiFunction, java.util.function.BiFunction)
*/
@Override
public <R1, R> EvalT<W, R> forEach2(Function<? super T, ? extends MonadicValue<R1>> value1,
BiFunction<? super T, ? super R1, Boolean> filterFunction,
BiFunction<? super T, ? super R1, ? extends R> yieldingFunction) {
return (EvalT<W, R>)super.forEach2(value1, filterFunction, yieldingFunction);
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#concatMap(java.util.function.Function)
*/
@Override
public <R> EvalT<W, R> concatMap(Function<? super T, ? extends Iterable<? extends R>> mapper) {
return (EvalT<W, R>)super.concatMap(mapper);
}
/* (non-Javadoc)
* @see cyclops2.monads.transformers.values.ValueTransformer#flatMapP(java.util.function.Function)
*/
@Override
public <R> EvalT<W, R> mergeMap(Function<? super T, ? extends Publisher<? extends R>> mapper) {
return (EvalT<W, R>)super.mergeMap(mapper);
}
public <T2, R1, R2, R3, R> EvalT<W,R> forEach4M(Function<? super T, ? extends EvalT<W,R1>> value1,
BiFunction<? super T, ? super R1, ? extends EvalT<W,R2>> value2,
Function3<? super T, ? super R1, ? super R2, ? extends EvalT<W,R3>> value3,
Function4<? super T, ? super R1, ? super R2, ? super R3, ? extends R> yieldingFunction) {
return this.flatMapT(in->value1.apply(in)
.flatMapT(in2-> value2.apply(in,in2)
.flatMapT(in3->value3.apply(in,in2,in3)
.map(in4->yieldingFunction.apply(in,in2,in3,in4)))));
}
public <T2, R1, R2, R3, R> EvalT<W,R> forEach4M(Function<? super T, ? extends EvalT<W,R1>> value1,
BiFunction<? super T, ? super R1, ? extends EvalT<W,R2>> value2,
Function3<? super T, ? super R1, ? super R2, ? extends EvalT<W,R3>> value3,
Function4<? super T, ? super R1, ? super R2, ? super R3, Boolean> filterFunction,
Function4<? super T, ? super R1, ? super R2, ? super R3, ? extends R> yieldingFunction) {
return this.flatMapT(in->value1.apply(in)
.flatMapT(in2-> value2.apply(in,in2)
.flatMapT(in3->value3.apply(in,in2,in3)
.filter(in4->filterFunction.apply(in,in2,in3,in4))
.map(in4->yieldingFunction.apply(in,in2,in3,in4)))));
}
public <T2, R1, R2, R> EvalT<W,R> forEach3M(Function<? super T, ? extends EvalT<W,R1>> value1,
BiFunction<? super T, ? super R1, ? extends EvalT<W,R2>> value2,
Function3<? super T, ? super R1, ? super R2, ? extends R> yieldingFunction) {
return this.flatMapT(in->value1.apply(in).flatMapT(in2-> value2.apply(in,in2)
.map(in3->yieldingFunction.apply(in,in2,in3))));
}
public <T2, R1, R2, R> EvalT<W,R> forEach3M(Function<? super T, ? extends EvalT<W,R1>> value1,
BiFunction<? super T, ? super R1, ? extends EvalT<W,R2>> value2,
Function3<? super T, ? super R1, ? super R2, Boolean> filterFunction,
Function3<? super T, ? super R1, ? super R2, ? extends R> yieldingFunction) {
return this.flatMapT(in->value1.apply(in).flatMapT(in2-> value2.apply(in,in2).filter(in3->filterFunction.apply(in,in2,in3))
.map(in3->yieldingFunction.apply(in,in2,in3))));
}
public <R1, R> EvalT<W,R> forEach2M(Function<? super T, ? extends EvalT<W,R1>> value1,
BiFunction<? super T, ? super R1, ? extends R> yieldingFunction) {
return this.flatMapT(in->value1.apply(in)
.map(in2->yieldingFunction.apply(in,in2)));
}
public <R1, R> EvalT<W,R> forEach2M(Function<? super T, ? extends EvalT<W,R1>> value1,
BiFunction<? super T, ? super R1, Boolean> filterFunction,
BiFunction<? super T, ? super R1, ? extends R> yieldingFunction) {
return this.flatMapT(in->value1.apply(in)
.filter(in2->filterFunction.apply(in,in2))
.map(in2->yieldingFunction.apply(in,in2)));
}
public String mkString(){
return toString();
}
@Override
public <U> EvalT<W,U> ofType(Class<? extends U> type) {
return (EvalT<W,U>) Filterable.super.ofType(type);
}
@Override
public EvalT<W,T> filterNot(Predicate<? super T> predicate) {
return (EvalT<W,T>) Filterable.super.filterNot(predicate);
}
@Override
public EvalT<W,T> notNull() {
return (EvalT<W,T>) Filterable.super.notNull();
}
@Override
public <U> EvalT<W,Tuple2<T, U>> zipWithPublisher(Publisher<? extends U> other) {
return (EvalT)super.zipWithPublisher(other);
}
@Override
public <S, U> EvalT<W,Tuple3<T, S, U>> zip3(Iterable<? extends S> second, Iterable<? extends U> third) {
return (EvalT)super.zip3(second,third);
}
@Override
public <S, U, R> EvalT<W,R> zip3(Iterable<? extends S> second, Iterable<? extends U> third, Function3<? super T, ? super S, ? super U, ? extends R> fn3) {
return (EvalT<W,R>)super.zip3(second,third, fn3);
}
@Override
public <T2, T3, T4> EvalT<W,Tuple4<T, T2, T3, T4>> zip4(Iterable<? extends T2> second, Iterable<? extends T3> third, Iterable<? extends T4> fourth) {
return (EvalT)super.zip4(second,third,fourth);
}
@Override
public <T2, T3, T4, R> EvalT<W,R> zip4(Iterable<? extends T2> second, Iterable<? extends T3> third, Iterable<? extends T4> fourth, Function4<? super T, ? super T2, ? super T3, ? super T4, ? extends R> fn) {
return (EvalT<W,R>)super.zip4(second,third,fourth,fn);
}
}
|
CUDA_VISIBLE_DEVICES=0,1 python train_refinedet_locscore.py --save_folder weights_locscore/RefineDet512/ --input_size 512 --dataset COCO --dataset_root ../pytorch-retinanet-master_new/datasets/COCO
|
#!/bin/bash
# Get date
date=${1:-$cdate}
# Get process id
nid=$(pwd | grep -o -P "$SUBDIR_NAME.{0,5}" | sed -e "s/$SUBDIR_NAME//g")
# Log
echo `date +%H:%M:%S` link " " $SUBDIR_NAME${nid} >> $WORK/master.log
# pert000 is ctrl
if [ $nid -eq 0 ]; then
name=ctrl
else
name=$SUBDIR_NAME$nid
fi
# Copy namelist
cp -f $SCRI/namelist.$name fort.4
# Under which alias the unperturbed ini is
ename=oifs
# Set initial perturbations off if INIPERT variable is not set
if [ -z $INIPERT ]; then
INIPERT=0
fi
# Clean directory
rm -f ICM*+00*
# Link or create day specific fields
if [ $RES -ne 21 ]; then # T21 does not need the climate file
ln -sf ${IFSDATA}/t${RES}/$date/ICMCL${ename}INIT.1 ICMCL${EXPS}INIT
fi
if [ $nid -eq 0 ] || [ $INIPERT -eq 0 ]; then
ln -sf ${IFSDATA}/t${RES}/$date/ICMGG${ename}INIT ICMGG${EXPS}INIT
ln -sf ${IFSDATA}/t${RES}/$date/ggml$RES ICMGG${EXPS}INIUA
ln -sf ${IFSDATA}/t${RES}/$date/ICMSH${ename}INIT ICMSH${EXPS}INIT
else
# Pick odd initial state pertubations
if [ ! -z $INIFAIR ] && [ $INIFAIR -eq 1 ]; then
nid=$((10#$nid * 2 - 1)) # force into 10-base number with 10#
nid=$(printf '%03d' $nid)
fi
# Change initial state perturbation amplitude if defined
# SV
if [ -z $INIPERT_AMPLITUDE_SV ] ; then
swi_sv=false
elif [ 1 -eq $(echo "$INIPERT_AMPLITUDE_SV == 1.0" | bc) ]; then
swi_sv=false
else
amp_sv=$INIPERT_AMPLITUDE_SV
swi_sv=true
fi
# EDA
if [ -z $INIPERT_AMPLITUDE_EDA ] ; then
swi_eda=false
elif [ 1 -eq $(echo "$INIPERT_AMPLITUDE_EDA == 1.0" | bc) ]; then
swi_eda=false
else
amp_eda=$INIPERT_AMPLITUDE_EDA
swi_eda=true
fi
gginit=${IFSDATA}/t${RES}/$date/ICMGG${ename}INIT
gginiua=${IFSDATA}/t${RES}/$date/ggml$RES
shinit=${IFSDATA}/t${RES}/$date/ICMSH${ename}INIT
gginit_pert=${IFSDATA}/t${RES}/$date/psu_$nid
gginiua_pert=${IFSDATA}/t${RES}/$date/pan_$nid
shinit_pert=${IFSDATA}/t${RES}/$date/pua_$nid
sv_pert=${IFSDATA}/t${RES}/$date/pert_$nid
# GGINIT
if [ $INIPERT_TYPE == 'sv' ]; then
# Grid space perturbations are only from EDA.
# If only SVs requested use control fields instead.
ln -sf $gginit ICMGG${EXPS}INIT
elif $swi_eda ; then
# get perturbations
$GRIBTOOLS/grib_set -s edition=1 $gginit gginit
$GRIBTOOLS/grib_set -s edition=1 $gginit_pert gginit_pert
cdo -sub gginit_pert gginit pert
# change perturbation magnitude
cdo -mulc,$amp_eda pert pert_dot
# add back to unperturbed fields
cdo -add gginit pert_dot pert_fin
$GRIBTOOLS/grib_set -s edition=2 -s gridType=reduced_gg pert_fin ICMGG${EXPS}INIT
rm -f pert pert_dot gginit gginit_pert pert_fin
else
# Link original perturbation field
ln -sf $gginit_pert ICMGG${EXPS}INIT
fi
# GGINIUA
if [ $INIPERT_TYPE == 'sv' ]; then
# Grid space perturbations are only from EDA.
# If only SVs requested use control fields instead.
ln -sf $gginiua ICMGG${EXPS}INIUA
elif $swi_eda ; then
# get perturbations
$GRIBTOOLS/grib_set -s edition=1 $gginiua gginiua
$GRIBTOOLS/grib_set -s edition=1 $gginiua_pert gginiua_pert
cdo -sub gginiua_pert gginiua pert
# change perturbation magnitude
cdo -mulc,$amp_eda pert pert_dot
# add back to unperturbed fields
cdo -add gginiua pert_dot pert_fin
$GRIBTOOLS/grib_set -s edition=2 -s gridType=reduced_gg pert_fin ICMGG${EXPS}INIUA
rm -f pert pert_dot gginiua gginiua_pert pert_fin
else
# Link original perturbation field
ln -sf $gginiua_pert ICMGG${EXPS}INIUA
fi
# SHINIT
# Get SV structures if not requesting both perturbations
if [ ! $INIPERT_TYPE == 'both' ] || $swi_eda || $swi_sv ; then
# get perturbations
cdo -sp2gpl $shinit tmp_init
cdo -sp2gpl $shinit_pert tmp_pert
cdo -sub tmp_pert tmp_init pert
rm -f tmp_pert
# extract SV structures
cdo -sp2gp $sv_pert tmp_pert
# need to separate the fields, CDO bugs out in multi-field substraction
cdo -selvar,t tmp_pert sv_t
cdo -selvar,d tmp_pert sv_d
cdo -selvar,vo tmp_pert sv_vo
cdo -selvar,lnsp tmp_pert sv_lnsp
rm -f tmp_pert
# separate input fields to match those in SVs
cdo -selvar,t pert pert_t
cdo -selvar,d pert pert_d
cdo -selvar,vo pert pert_vo
cdo -selvar,lnsp pert pert_lnsp
cdo -selvar,z pert pert_z
# change resolution to match other fields
cdo -genbil,pert_t sv_t grid
cdo -remap,pert_t,grid sv_t sv_t_hr
cdo -remap,pert_t,grid sv_d sv_d_hr
cdo -remap,pert_t,grid sv_vo sv_vo_hr
cdo -remap,pert_t,grid sv_lnsp sv_lnsp_hr
rm -f tmp grid sv_t sv_d sv_vo sv_lnsp
if [ $INIPERT_TYPE != 'sv' ]; then
# remove SVs from perts
cdo -sub pert_t sv_t_hr tmp_t
cdo -sub pert_d sv_d_hr tmp_d
cdo -sub pert_vo sv_vo_hr tmp_vo
cdo -sub pert_lnsp sv_lnsp_hr tmp_lnsp
cdo -merge tmp_t tmp_vo tmp_d tmp_lnsp pert_z pert_eda
rm -f tmp_t tmp_d tmp_vo tmp_lnsp pert_z
# Increase amplitude if requested
if $swi_eda ; then
cdo -mulc,$amp_eda pert_eda pert_eda_dot
rm -f pert_eda
else
mv pert_eda pert_eda_dot
fi
# Change name if not requesting both perturbation types
if [ $INIPERT_TYPE != 'both' ]; then
mv pert_eda_dot pert_dot
rm -f sv_*_hr
fi
fi
if [ $INIPERT_TYPE != 'eda' ]; then
# create a zero-field for z
cdo -selvar,z tmp_init ini_z
cdo -sub ini_z ini_z zero_z
rm -f ini_z
# merge fields
cdo -merge sv_t_hr sv_vo_hr sv_d_hr sv_lnsp_hr zero_z pert_sv
rm -f sv_*_hr zero_z
# Increase amplitude if requested
if $swi_sv ; then
cdo -mulc,$amp_sv pert_sv pert_sv_dot
rm -f pert_sv
else
mv pert_sv pert_sv_dot
fi
# Change name if not requesting both perturbation types
if [ $INIPERT_TYPE != 'both' ]; then
mv pert_sv_dot pert_dot
fi
fi
rm -f pert_t pert_d pert_vo pert_lnsp pert_z
# Add perturbations together
if [ $INIPERT_TYPE == 'both' ]; then
cdo -add pert_eda_dot pert_sv_dot pert_dot
rm -f pert_eda_dot pert_sv_dot
fi
# add back to unperturbed fields
cdo -add tmp_init pert_dot pert_fin
cdo -gp2spl pert_fin ICMSH${EXPS}INIT
rm -f tmp_init pert pert_dot pert_fin
else
# Link original perturbation field
ln -sf $shinit_pert ICMSH${EXPS}INIT
fi
fi
# Link climatologies
if [ $OIFSv != "43r3v1" ]; then
ln -sf ${IFSDATA}/rtables rtables
ln -sf ${IFSDATA}/vtables vtables
ln -sf ${IFSDATA}/climatology/ifsdata .
if [ $RES -eq 21 ]; then
ln -sf ${IFSDATA}/38r1/climate/${RES}_full
else
ln -sf ${IFSDATA}/38r1/climate/${RES}l_2
fi
else
ln -sf ${IFSDATA}/43r3/rtables
ln -sf ${IFSDATA}/43r3/vtables
ln -sf ${IFSDATA}/43r3/ifsdata
ln -sf ${IFSDATA}/43r3/climate.v015/${RES}l_2
fi
|
LINUX_DISTRO="ubuntu-20.04"
cd "dist/$LINUX_DISTRO"
# Set env variables
export PYTHON_VERSION=3.7.5
export TF_VERSION_GIT_TAG="v1.14.0"
export BAZEL_VERSION=0.24.1
export USE_GPU=0
# install docker-compose 1.27.2 locally using pip
pip install docker-compose==1.27.2
USR_BIN=$(python -m site --user-base)/bin
# Build the Docker image
$USR_BIN/docker-compose build
# Start the compilation
docker-compose run tf
|
#!/bin/bash
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
#
set -e
usage() {
echo "install_prlimit.sh download_dir install_dir"
exit 1
}
if [ "$#" -ne "2" ]; then
usage
fi
BUILD_DIR=$1
INSTALL_DIR=$2
source codebuild/bin/jobs.sh
sudo apt-get install -y libncurses5-dev
cd "$BUILD_DIR"
# Originally from: https://www.kernel.org/pub/linux/utils/util-linux/v2.25/util-linux-2.25.2.tar.gz
curl --retry 3 https://s3-us-west-2.amazonaws.com/s2n-public-test-dependencies/2017-08-29_util-linux-2.25.2.tar.gz --output util-linux-2.25.2.tar.gz
tar -xzvf util-linux-2.25.2.tar.gz
cd util-linux-2.25.2
./configure ADJTIME_PATH=/var/lib/hwclock/adjtime \
--disable-chfn-chsh \
--disable-login \
--disable-nologin \
--disable-su \
--disable-setpriv \
--disable-runuser \
--disable-pylibmount \
--disable-static \
--without-python \
--without-systemd \
--disable-makeinstall-chown \
--without-systemdsystemunitdir \
--without-ncurses \
--prefix="$INSTALL_DIR" || cat config.log
make -j $JOBS > /dev/null
make -j $JOBS install > /dev/null
|
<?php
// List of characters
$uppercase = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';
$lowercase = 'abcdefghijklmnopqrstuvwxyz';
$numbers = '0123456789';
$special_characters = '!"#$%&\'()*+,-./:;<=>?@[\]^_`{|}~';
// Function to generate random password
function generateRandomPassword($length = 10) {
global $uppercase, $lowercase, $numbers, $special_characters;
$password_characters = ''; // Empty string
$password_characters .= $uppercase;
$password_characters .= $lowercase;
$password_characters .= $numbers;
$password_characters .= $special_characters;
// Get the length of characters list
$password_characters_length = strlen($password_characters) - 1;
// Initialize an empty string to store the password
$random_password = '';
for ($i = 0; $i < $length; $i++) {
// Generate a random index from 0 to length of characters list
$random_index = mt_rand(0, $password_characters_length);
// Append the character at the position of the random index
$random_password .= $password_characters[$random_index];
}
return $random_password;
}
// Generate random password
$password = generateRandomPassword();
// Print the generated password
echo $password;
?> |
from collections import OrderedDict
import sqlite3
from exceptions import *
class TimeReportDatabase(object):
"""
Blueprint for database.
"""
def __init__(self, db_name, *args, **kwargs):
self.db_name = db_name
# Database table info with attributes
self.tables = dict(Employee=OrderedDict(employee_nr='INTEGER',
first_name='TEXT',
last_name='TEXT',
department='TEXT'),
Project=OrderedDict(project_nr='INTEGER',
ksts='INTEGER',
project_name='TEXT',
hours_total='INTEGER'),
Staffing=OrderedDict(staffing_id='INTEGER',
project_id='INTEGER',
employee_nr='INTEGER',
hours_planed='INTEGER'),
TimeReport=OrderedDict(time_report_id='INTEGER',
date='TEXT',
project_nr='INTEGER',
employee_nr='INTEGER',
hours_reported='INTEGER'))
# Autoincrement for primary keys
self.autoincrement = ['time_report_id']
def __str__(self):
return f'Database: {self.db_name}'
def __repr__(self):
return f'{self.__class__.__name__}({self.db_name}'
def get_tables(self):
"""
Returns a list of the tabbels in the database.
:return: list
"""
return sorted(self.tables)
def get_attributes_for_table(self, table, all=False):
"""
Returns the attributes for the fiven table.
If all=False autoincrements attributes ar excluded.
:param table: str
:param all: boolean
:return:
"""
return_list = self.tables.get(table)
if not all:
return_list = [item for item in return_list if item not in self.autoincrement]
return return_list
def create_database(self):
"""
Creates the database
:return: None
"""
raise NotImplementedError
def add_record_to_table(self, table, **kwargs):
"""
Adds a record to the give table
:param table: str
:param kwargs: keys=attributs
:return:
"""
raise NotImplementedError
def check_attributes(self, table, **kwargs):
"""
Checks so that all attributes ar valid. Raises exceptions if not:
MissingAttribute
NonValidAttribute
:param table:
:param kwargs:
:return:
"""
if sorted(self.get_attributes_for_table(table)) == sorted(kwargs):
return True
attr_not_given = [a for a in self.get_attributes_for_table(table) if a not in kwargs]
if attr_not_given:
raise MissingAttribute(attr_not_given)
else:
non_valid_attr = [a for a in kwargs if a not in self.get_attributes_for_table(table)]
raise NonValidAttribute(non_valid_attr)
def convert_kwargs(self, table, **kwargs):
def get_int(value):
try:
value = int(value)
except:
value = None
return value
def get_str(value):
if value:
return str(value)
return ''
table_dict = self.tables.get(table)
kw = {}
for key, val in kwargs.items():
if table_dict.get(key) == 'INTEGER':
val = get_int(val)
elif table_dict.get(key) == 'TEXT':
val = get_str(val)
kw[key] = val
return kw
class TimeReportDatabaseSqlite3(TimeReportDatabase):
def __init__(self, file_path):
super().__init__(file_path)
self.file_path = file_path
def _connect(self):
self.conn = sqlite3.connect(self.file_path)
self.c = self.conn.cursor()
def _disconnect(self):
self.conn.close()
self.c = None
def _create_command(self, command):
"""
Method that creates things in database using CREATES
:param command: str
:return: None
"""
try:
self._connect()
self.c.execute(command)
self.conn.commit()
except Exception as e:
raise e
finally:
self._disconnect()
def _insert_command(self, command, value_dict):
"""
Method to insert records into the database.
:param command:
:param value_dict:
:return:
"""
try:
self._connect()
self.c.execute(command, value_dict)
self.conn.commit()
except Exception as e:
raise e
finally:
self._disconnect()
def _select_command(self, command):
"""
Method that extracts data from the database.
:param command:
:param value_dict:
:return:
"""
data = None
try:
self._connect()
self.c.execute(command)
data = self.c.fetchall()
except Exception as e:
raise e
finally:
self._disconnect()
return data
def _get_select_string(self, tables=None, keys=None):
"""
Returns a SELECT string.
:param table_name:
:param keys:
:return:
"""
# Selected keys
if keys == '*':
pass
elif not keys:
key_string = '*'
else:
key_string = ',\n'.join(keys)
if type(tables) == list:
tables = ' JOIN '.join(tables)
select_line = f"""SELECT \n{key_string} \n\nFROM {tables}"""
return select_line
# ===========================================================================
def _get_where_string(self, match, match_table=None):
# Matching items for specific key.
# Items is a value or list of values
match_string = ''
if match:
for key, items in match.items():
if match_table:
key = f'{match_table}.{key}'
if items in ['', None, 'NULL', 'null', 'None']:
match_string = f"""{match_string}AND {key} IS NULL\n"""
else:
if not isinstance(items, list):
items = [items]
if len(items) == 1:
item = items[0]
if type(item) == str:
item_str = f"('{item}')"
else:
item_str = f"({item})"
else:
item_str = str(tuple(items)).replace("'", "'")
match_string = f"""{match_string}AND {key} IN {item_str}\n"""
match_string = match_string.lstrip('AND ')
where_line = f"""\n\nWHERE\n{match_string}"""
else:
where_line = """"""
return where_line
def create_database(self):
"""
Sets up the tables in teh database.
:return:
"""
for table in self.tables:
content = ''
for i, [attribute, datatype] in enumerate(self.tables[table].items()):
if i == 0:
pk = ' PRIMARY KEY'
ai = ''
if attribute in self.autoincrement:
ai = ' AUTOINCREMENT'
else:
pk = ''
ai = ''
content = content + f'{attribute} {datatype}{pk}{ai}, '
command = f"""CREATE TABLE {table} ({content.strip(', ')})"""
self._create_command(command)
def add_record_to_table(self, table, **kwargs):
"""
:param table:
:param kwargs:
:return:
"""
self.check_attributes(table, **kwargs)
kw = self.convert_kwargs(table, **kwargs)
values = ''
for key in self.get_attributes_for_table(table, all=True):
if key in self.autoincrement:
kw[key] = None
values = values + f':{key}, '
command = f"""INSERT INTO {table} VALUES ({values.strip(', ')})"""
self._insert_command(command, kw)
def _get_data_string(self, table=None, keys=[], match={}, match_table=None, **kwargs):
if not isinstance(keys, list):
keys = [keys]
# SELECT statement
select_line = self._get_select_string(table, keys)
# WHERE statement
where_line = self._get_where_string(match, match_table)
string = select_line + where_line
# Write to database
return string
def _get_data(self, tables=None, columns=['*'], **kwargs):
"""
kwargs are match criteria.
:param table:
:param columns:
:param kwargs:
:return:
"""
match_table = None
if type(tables) == list:
# Add JOIN statement if match_table is given
match_table = tables[0]
string = self._get_data_string(table=tables, keys=columns, match=kwargs, match_table=match_table)
data = self._select_command(string)
return data
def get_data(self, tables=None, columns=None, as_dict=True, **kwargs):
"""
kwargs are match criteria.
:param tables: str or list
:param columns: str or list
:param as_dict: boolean
:param kwargs:
:return:
"""
data = self._get_data(tables=tables, columns=columns, **kwargs)
if not as_dict:
return data
if not columns or columns == '*':
if type(tables) == list:
columns = []
for table in tables:
columns.extend(self.get_attributes_for_table(table))
else:
columns = self.get_attributes_for_table(tables)
data_dict = {item: [] for item in columns}
for line in data:
for item, value in zip(columns, line):
data_dict[item].append(value)
return data_dict
def get_test_data(self):
command = """SELECT *
FROM Employee JOIN TimeReport
WHERE Employee.employee_nr=1"""
data = self._select_command(command)
return data
|
import { createSelector } from 'reselect'
export const GET_POKEMONS = 'GET_POKEMONS'
export const getPokemons = () => ({
type: GET_POKEMONS
})
const initialError = {
code: '',
message: '',
values: []
}
const initialState = {
error: initialError,
name: '',
weight: 0,
id: 0,
order: 0
}
// TODO hook in correctly
export default function (state = initialState, action) {
switch (action.type) {
case 'GET_POKEMON_COMPLETE': return getPokemonResult(state, action)
default: return state
}
}
const getPokemonResult = (state, action) => {
const p = action.response || {}
return {
...state,
loading: false,
name: p.name || '',
weight: p.weight || 0,
id: p.id || 0,
order: p.order || 0
}
}
export const selectPokemon = createSelector(
state => state.pokemon,
pokemon => pokemon
)
|
# Levanta el corpus y genera la base de datos de aprendizaje
#
# uso: generate_corpus_db.sh {$BIOSCOPE | $BIOSCOPED}
WORKING_DIR=$1
# Recreo las tablas
# Genero las tablas vacías, borrándolas si existían
echo "$0: Generating table structure..."
cat $SRC/scripts/create_db.sql | sqlite3 $WORKING_DIR/bioscope.db
cat $SRC/scripts/create_error_log_db.sql | sqlite3 $WORKING_DIR/bioscope.db
# Cargo las tablas
echo "$0: populating corpus tables"
python $SRC/scripts/gen_corpus_db.py $WORKING_DIR
# Actualizo los valores para los hedge_cues discontinuos
python $SRC/scripts/update_discontinuous_hc.py $WORKING_DIR
|
/*==================================================================*\
| EXIP - Embeddable EXI Processor in C |
|--------------------------------------------------------------------|
| This work is licensed under BSD 3-Clause License |
| The full license terms and conditions are located in LICENSE.txt |
\===================================================================*/
/**
* @file staticOutputUtils.c
* @brief Implement utility functions for storing EXIPSchema instances as static code
* @date May 7, 2012
* @author <NAME>
* @version 0.5
* @par[Revision] $Id$
*/
#include "schemaOutputUtils.h"
#include "hashtable.h"
static void setProdStrings(IndexStrings *indexStrings, Production *prod)
{
char *indexMaxStr = "INDEX_MAX";
char *uriMaxStr = "URI_MAX";
char *lnMaxStr = "LN_MAX";
if (prod->typeId == INDEX_MAX)
{
strcpy(indexStrings->typeIdStr, indexMaxStr);
}
else
{
sprintf(indexStrings->typeIdStr, "%u", (unsigned int) prod->typeId);
}
if (prod->qnameId.uriId == URI_MAX)
{
strcpy(indexStrings->uriIdStr, uriMaxStr);
}
else
{
sprintf(indexStrings->uriIdStr, "%u", (unsigned int) prod->qnameId.uriId);
}
if (prod->qnameId.lnId == LN_MAX)
{
strcpy(indexStrings->lnIdStr, lnMaxStr);
}
else
{
sprintf(indexStrings->lnIdStr, "%u", (unsigned int) prod->qnameId.lnId);
}
}
void staticStringDefOutput(String* str, char* varName, FILE* out)
{
Index charIter, charMax;
char displayStr[VAR_BUFFER_MAX_LENGTH];
charMax = str->length;
if(charMax > 0)
{
fprintf(out, "CONST CharType %s[] = {", varName);
for(charIter = 0; charIter < charMax; charIter++)
{
fprintf(out, "0x%x", str->str[charIter]);
if(charIter < charMax - 1)
fprintf(out, ", ");
}
strncpy(displayStr, str->str, str->length);
displayStr[str->length] = '\0';
fprintf(out, "}; /* %s */\n", displayStr);
}
}
void staticStringTblDefsOutput(UriTable* uriTbl, char* prefix, FILE* out)
{
Index uriIter, pfxIter, lnIter;
char varName[VAR_BUFFER_MAX_LENGTH];
fprintf(out, "/** START_STRINGS_DEFINITONS */\n\n");
for(uriIter = 0; uriIter < uriTbl->count; uriIter++)
{
// Printing of a uri string
sprintf(varName, "%sURI_%u", prefix, (unsigned int) uriIter);
staticStringDefOutput(&uriTbl->uri[uriIter].uriStr, varName, out);
// Printing of a pfx strings if any
if(uriTbl->uri[uriIter].pfxTable != NULL)
{
for(pfxIter = 0; pfxIter < uriTbl->uri[uriIter].pfxTable->count; pfxIter++)
{
sprintf(varName, "%sPFX_%u_%u", prefix, (unsigned int) uriIter, (unsigned int) pfxIter);
staticStringDefOutput(&uriTbl->uri[uriIter].pfxTable->pfxStr[pfxIter], varName, out);
}
}
// Printing of all local names for that uri
for(lnIter = 0; lnIter < uriTbl->uri[uriIter].lnTable.count; lnIter++)
{
sprintf(varName, "%sLN_%u_%u", prefix, (unsigned int) uriIter, (unsigned int) lnIter);
staticStringDefOutput(&uriTbl->uri[uriIter].lnTable.ln[lnIter].lnStr, varName, out);
}
}
fprintf(out, "\n/** END_STRINGS_DEFINITONS */\n\n");
}
void staticProductionsOutput(EXIGrammar* gr, char* prefix, Index grId, FILE* out)
{
Index ruleIter;
char varName[VAR_BUFFER_MAX_LENGTH];
Index prodIter;
IndexStrings indexStrings;
for(ruleIter = 0; ruleIter < gr->count; ruleIter++)
{
if (gr->rule[ruleIter].pCount)
{
// Printing of the Production variable string
sprintf(varName, "%sprod_%u_%u", prefix, (unsigned int) grId, (unsigned int) ruleIter);
fprintf(out, "static CONST Production %s[%u] =\n{\n", varName, (unsigned int) gr->rule[ruleIter].pCount);
for(prodIter = 0; prodIter < gr->rule[ruleIter].pCount; prodIter++)
{
setProdStrings(&indexStrings, &gr->rule[ruleIter].production[prodIter]);
fprintf(out,
" {\n %u, %s,\n {%s, %s}}%s",
gr->rule[ruleIter].production[prodIter].content,
indexStrings.typeIdStr,
indexStrings.uriIdStr,
indexStrings.lnIdStr,
prodIter==(gr->rule[ruleIter].pCount - 1) ? "\n};\n\n" : ",\n");
}
}
}
}
void staticRulesOutput(EXIGrammar* gr, char* prefix, Index grId, FILE* out)
{
Index ruleIter;
fprintf(out,
"static CONST GrammarRule %srule_%u[%u] =\n{",
prefix,
(unsigned int) grId,
(unsigned int) gr->count);
for(ruleIter = 0; ruleIter < gr->count; ruleIter++)
{
fprintf(out, "\n {");
if (gr->rule[ruleIter].pCount > 0)
{
fprintf(out,
"%sprod_%u_%u, ",
prefix,
(unsigned int) grId,
(unsigned int) ruleIter);
}
else
fprintf(out, "NULL, ");
fprintf(out, "%u, ", (unsigned int) gr->rule[ruleIter].pCount);
fprintf(out, "%u", (unsigned int) gr->rule[ruleIter].meta);
fprintf(out, "}%s", ruleIter != (gr->count-1)?",":"");
}
fprintf(out, "\n};\n\n");
}
void staticDocGrammarOutput(EXIGrammar* docGr, char* prefix, FILE* out)
{
char varNameContent[VAR_BUFFER_MAX_LENGTH];
char varNameEnd[VAR_BUFFER_MAX_LENGTH];
Index prodIter;
IndexStrings indexStrings;
// Printing of the Production variable string
sprintf(varNameContent, "%sprod_doc_content", prefix);
/* Build the document grammar, DocContent productions */
fprintf(out, "static CONST Production %s[%u] =\n{\n", varNameContent, (unsigned int) docGr->rule[GR_DOC_CONTENT].pCount);
for(prodIter = 0; prodIter < docGr->rule[GR_DOC_CONTENT].pCount; prodIter++)
{
setProdStrings(&indexStrings, &docGr->rule[GR_DOC_CONTENT].production[prodIter]);
fprintf(out,
" {\n %u, %s,\n {%s, %s}}%s",
(unsigned int) docGr->rule[GR_DOC_CONTENT].production[prodIter].content,
indexStrings.typeIdStr,
indexStrings.uriIdStr,
indexStrings.lnIdStr,
prodIter==(docGr->rule[GR_DOC_CONTENT].pCount - 1) ? "\n};\n\n" : ",\n");
}
// Printing of the Production variable string
sprintf(varNameEnd, "%sprod_doc_end", prefix);
/* Build the document grammar, DocEnd productions */
fprintf(out, "static CONST Production %s[%u] =\n{\n", varNameEnd, 1);
fprintf(out," {\n 0xAFFFFFF, INDEX_MAX,\n {URI_MAX, LN_MAX}}\n};\n\n");
/* Build the document grammar rules */
fprintf(out, "static CONST GrammarRule %sdocGrammarRule[2] =\n{\n", prefix);
fprintf(out, " {%s, %u, 0},\n\
{%s, 1, 0}\n};\n\n", varNameContent, (unsigned int) docGr->rule[GR_DOC_CONTENT].pCount, varNameEnd);
}
void staticPrefixOutput(PfxTable* pfxTbl, char* prefix, Index uriId, FILE* out)
{
Index pfxIter;
if(pfxTbl != NULL)
{
fprintf(out, "static CONST PfxTable %spfxTable_%u =\n{\n %u,\n {\n", prefix, (unsigned int) uriId, (unsigned int) pfxTbl->count);
for(pfxIter = 0; pfxIter < pfxTbl->count; pfxIter++)
{
if(pfxTbl->pfxStr[pfxIter].length > 0)
fprintf(out, " {%sPFX_%u_%u, %u},\n", prefix, (unsigned int) uriId, (unsigned int) pfxIter, (unsigned int) pfxTbl->pfxStr[pfxIter].length);
else
fprintf(out, " {NULL, 0},\n");
}
for(; pfxIter < MAXIMUM_NUMBER_OF_PREFIXES_PER_URI; pfxIter++)
{
fprintf(out, " {NULL, 0}%s", pfxIter==MAXIMUM_NUMBER_OF_PREFIXES_PER_URI-1 ? "\n }\n};\n\n" : ",\n");
}
}
}
void staticLnEntriesOutput(LnTable* lnTbl, char* prefix, Index uriId, FILE* out)
{
Index lnIter;
char elemGrammar[20];
char typeGrammar[20];
if(lnTbl->count > 0)
{
fprintf(out, "static CONST LnEntry %sLnEntry_%u[%u] =\n{\n", prefix, (unsigned int) uriId, (unsigned int) lnTbl->count);
for(lnIter = 0; lnIter < lnTbl->count; lnIter++)
{
if(lnTbl->ln[lnIter].elemGrammar == INDEX_MAX)
strcpy(elemGrammar, "INDEX_MAX");
else
sprintf(elemGrammar, "%u", (unsigned int) lnTbl->ln[lnIter].elemGrammar);
if(lnTbl->ln[lnIter].typeGrammar == INDEX_MAX)
strcpy(typeGrammar, "INDEX_MAX");
else
sprintf(typeGrammar, "%u", (unsigned int) lnTbl->ln[lnIter].typeGrammar);
fprintf(out, " {\n#if VALUE_CROSSTABLE_USE\n NULL,\n#endif\n");
if(lnTbl->ln[lnIter].lnStr.length > 0)
fprintf(out, " {%sLN_%u_%u, %u},\n %s, %s\n", prefix, (unsigned int) uriId, (unsigned int) lnIter, (unsigned int) lnTbl->ln[lnIter].lnStr.length, elemGrammar, typeGrammar);
else
fprintf(out, " {NULL, 0},\n %s, %s\n", elemGrammar, typeGrammar);
fprintf(out, "%s", lnIter==(lnTbl->count-1)?" }\n};\n\n":" },\n");
}
} /* END if(lnTableSize > 0) */
}
void staticUriTableOutput(UriTable* uriTbl, char* prefix, FILE* out)
{
Index uriIter;
fprintf(out, "static CONST UriEntry %suriEntry[%u] =\n{\n", prefix, (unsigned int) uriTbl->count);
for(uriIter = 0; uriIter < uriTbl->count; uriIter++)
{
if(uriTbl->uri[uriIter].lnTable.count > 0)
{
fprintf(out,
" {\n {{sizeof(LnEntry), %u, %u}, %sLnEntry_%u, %u},\n",
(unsigned int) uriTbl->uri[uriIter].lnTable.count,
(unsigned int) uriTbl->uri[uriIter].lnTable.count,
prefix,
(unsigned int) uriIter,
(unsigned int) uriTbl->uri[uriIter].lnTable.count);
}
else
{
fprintf(out, " {\n {{sizeof(LnEntry), 0, 0}, NULL, 0},\n");
}
if(uriTbl->uri[uriIter].pfxTable != NULL)
{
fprintf(out, " &%spfxTable_%u,\n", prefix, (unsigned int) uriIter);
}
else
{
fprintf(out, " NULL,\n");
}
if(uriTbl->uri[uriIter].uriStr.length > 0)
fprintf(out, " {%sURI_%u, %u}%s", prefix, (unsigned int) uriIter, (unsigned int) uriTbl->uri[uriIter].uriStr.length,
uriIter==(uriTbl->count-1)?"\n }\n};\n\n":"\n },\n");
else
fprintf(out, " {NULL, 0}%s", uriIter==(uriTbl->count-1)?"\n }\n};\n\n":"\n },\n");
}
}
void staticEnumTableOutput(EXIPSchema* schema, char* prefix, FILE* out)
{
EnumDefinition* tmpDef;
char varName[VAR_BUFFER_MAX_LENGTH];
Index i, j;
if(schema->enumTable.count == 0)
return;
for(i = 0; i < schema->enumTable.count; i++)
{
tmpDef = &schema->enumTable.enumDef[i];
switch(GET_EXI_TYPE(schema->simpleTypeTable.sType[tmpDef->typeId].content))
{
case VALUE_TYPE_STRING:
{
String* tmpStr;
for(j = 0; j < tmpDef->count; j++)
{
tmpStr = &((String*) tmpDef->values)[j];
sprintf(varName, "%sENUM_%u_%u", prefix, (unsigned int) i, (unsigned int) j);
staticStringDefOutput(tmpStr, varName, out);
}
fprintf(out, "\nstatic CONST String %senumValues_%u[%u] = { \n", prefix, (unsigned int) i, (unsigned int) tmpDef->count);
for(j = 0; j < tmpDef->count; j++)
{
tmpStr = &((String*) tmpDef->values)[j];
if(tmpStr->str != NULL)
fprintf(out, " {%sENUM_%u_%u, %u}", prefix, (unsigned int) i, (unsigned int) j, (unsigned int) tmpStr->length);
else
fprintf(out, " {NULL, 0}");
if(j < tmpDef->count - 1)
fprintf(out, ",\n");
else
fprintf(out, "\n};\n\n");
}
} break;
case VALUE_TYPE_BOOLEAN:
// NOT_IMPLEMENTED
assert(FALSE);
break;
case VALUE_TYPE_DATE_TIME:
case VALUE_TYPE_YEAR:
case VALUE_TYPE_DATE:
case VALUE_TYPE_MONTH:
case VALUE_TYPE_TIME:
// NOT_IMPLEMENTED
assert(FALSE);
break;
case VALUE_TYPE_DECIMAL:
// NOT_IMPLEMENTED
assert(FALSE);
break;
case VALUE_TYPE_FLOAT:
// NOT_IMPLEMENTED
assert(FALSE);
break;
case VALUE_TYPE_INTEGER:
// NOT_IMPLEMENTED
assert(FALSE);
break;
case VALUE_TYPE_SMALL_INTEGER:
// NOT_IMPLEMENTED
assert(FALSE);
break;
case VALUE_TYPE_NON_NEGATIVE_INT:
fprintf(out, "\nstatic CONST UnsignedInteger %senumValues_%u[%u] = { \n", prefix, (unsigned int) i, (unsigned int) tmpDef->count);
for(j = 0; j < tmpDef->count; j++)
{
fprintf(out, " 0x%016lX", (long unsigned) ((UnsignedInteger*) tmpDef->values)[j]);
if(j < tmpDef->count - 1)
fprintf(out, ",\n");
else
fprintf(out, "\n};\n\n");
}
break;
}
}
fprintf(out, "static CONST EnumDefinition %senumTable[%u] = { \n", prefix, (unsigned int) schema->enumTable.count);
for(i = 0; i < schema->enumTable.count; i++)
{
tmpDef = &schema->enumTable.enumDef[i];
fprintf(out, " {%u, %senumValues_%u, %u}", (unsigned int) tmpDef->typeId, prefix, (unsigned int) i, (unsigned int) tmpDef->count);
if(i < schema->enumTable.count - 1)
fprintf(out, ",\n");
else
fprintf(out, "\n};\n\n");
}
}
|
/*
* limitMajors.sql
* Chapter 10, Oracle10g PL/SQL Programming
* by <NAME>, <NAME> and <NAME>
*
* This script demonstrates user defined non-mutating trigger.
*/
SET ECHO ON
CREATE OR REPLACE TRIGGER LimitMajors
/* Limits the number of students in each major to 5.
If this limit is exceeded, an error is raised through
raise_application_error. */
BEFORE INSERT OR UPDATE OF major ON students
FOR EACH ROW
DECLARE
v_MaxStudents CONSTANT NUMBER := 5;
v_CurrentStudents NUMBER;
BEGIN
-- Determine the current number of students in this
-- major.
SELECT COUNT(*)
INTO v_CurrentStudents
FROM students
WHERE major = :new.major;
-- If there isn't room, raise an error.
IF v_CurrentStudents + 1 > v_MaxStudents THEN
RAISE_APPLICATION_ERROR(-20000,
'Too many students in major ' || :new.major);
END IF;
END LimitMajors;
/
UPDATE students
SET major = 'History'
WHERE id = 1;
|
import React from 'react';
import PropTypes from 'prop-types';
import { Table } from 'semantic-ui-react';
import EntryRow from './EntryRow';
const EntryList = ({ entries, form, filterText,
editEntry, deleteEntry, updateFormField}) => {
let rows = entries
.filter(entry => entry.customer.includes(filterText))
.map(entry =>
<EntryRow
entry={entry} key={entry.id} form={form}
editEntry={editEntry} deleteEntry={deleteEntry}
updateFormField={updateFormField}
/>);
return (
<Table compact>
<Table.Header>
<Table.Row>
<Table.HeaderCell>Customer</Table.HeaderCell>
<Table.HeaderCell>Rate</Table.HeaderCell>
<Table.HeaderCell>Length</Table.HeaderCell>
<Table.HeaderCell>Actions</Table.HeaderCell>
</Table.Row>
</Table.Header>
<Table.Body>
{rows}
</Table.Body>
</Table>
);
};
EntryList.propTypes = {
entries: PropTypes.array,
form: PropTypes.object,
filterText: PropTypes.string,
editEntry: PropTypes.func,
deleteEntry: PropTypes.func,
updateFormField: PropTypes.func,
};
export default EntryList;
|
//export {default as Navbar} from './navbar'
export {default as PlayRandom} from './playRandom'
export {default as AnalyseLyrics} from './analyseLyrics'
|
def getHexValue(colorName, colorMap):
if colorName in colorMap:
return colorMap[colorName]
else:
return "Color not found"
# Sample usage
colorMap = {
"Light Sandy Day": 0xe1dacf,
"Light Sea Breeze": 0xb7cdd9,
"Light Sea Cliff": 0xb9d4e7,
"Light Sea Spray": 0xabd6de,
"Light Sea-Foam": 0xa0febf,
"Light Seafoam Green": 0xa7ffb5,
"Light Security": 0xe0e9d0,
"Light Shell Haven": 0xf1e8ce,
"Light Shell Tint": 0xfce0d6,
"Light Shetland Lace": 0xe7dccf,
"Light Shimmer": 0xa3d4ef,
"Light Short Phase": 0xcbe8df,
"Light Shutterbug": 0xcef2e4
}
print(getHexValue("Light Sea Breeze", colorMap)) # Output: 0xb7cdd9
print(getHexValue("Dark Forest Green", colorMap)) # Output: Color not found |
<filename>vendor/github.com/hashicorp/consul-template/config/consul_test.go
package config
import (
"fmt"
"reflect"
"testing"
"time"
)
func TestConsulConfig_Copy(t *testing.T) {
cases := []struct {
name string
a *ConsulConfig
}{
{
"nil",
nil,
},
{
"empty",
&ConsulConfig{},
},
{
"same_enabled",
&ConsulConfig{
Address: String("1.2.3.4"),
Auth: &AuthConfig{Enabled: Bool(true)},
Retry: &RetryConfig{Enabled: Bool(true)},
SSL: &SSLConfig{Enabled: Bool(true)},
Token: String("<KEY>"),
Transport: &TransportConfig{
DialKeepAlive: TimeDuration(20 * time.Second),
},
},
},
}
for i, tc := range cases {
t.Run(fmt.Sprintf("%d_%s", i, tc.name), func(t *testing.T) {
r := tc.a.Copy()
if !reflect.DeepEqual(tc.a, r) {
t.Errorf("\nexp: %#v\nact: %#v", tc.a, r)
}
})
}
}
func TestConsulConfig_Merge(t *testing.T) {
cases := []struct {
name string
a *ConsulConfig
b *ConsulConfig
r *ConsulConfig
}{
{
"nil_a",
nil,
&ConsulConfig{},
&ConsulConfig{},
},
{
"nil_b",
&ConsulConfig{},
nil,
&ConsulConfig{},
},
{
"nil_both",
nil,
nil,
nil,
},
{
"empty",
&ConsulConfig{},
&ConsulConfig{},
&ConsulConfig{},
},
{
"address_overrides",
&ConsulConfig{Address: String("same")},
&ConsulConfig{Address: String("different")},
&ConsulConfig{Address: String("different")},
},
{
"address_empty_one",
&ConsulConfig{Address: String("same")},
&ConsulConfig{},
&ConsulConfig{Address: String("same")},
},
{
"address_empty_two",
&ConsulConfig{},
&ConsulConfig{Address: String("same")},
&ConsulConfig{Address: String("same")},
},
{
"address_same",
&ConsulConfig{Address: String("same")},
&ConsulConfig{Address: String("same")},
&ConsulConfig{Address: String("same")},
},
{
"auth_overrides",
&ConsulConfig{Auth: &AuthConfig{Enabled: Bool(true)}},
&ConsulConfig{Auth: &AuthConfig{Enabled: Bool(false)}},
&ConsulConfig{Auth: &AuthConfig{Enabled: Bool(false)}},
},
{
"auth_empty_one",
&ConsulConfig{Auth: &AuthConfig{Enabled: Bool(true)}},
&ConsulConfig{},
&ConsulConfig{Auth: &AuthConfig{Enabled: Bool(true)}},
},
{
"auth_empty_two",
&ConsulConfig{},
&ConsulConfig{Auth: &AuthConfig{Enabled: Bool(true)}},
&ConsulConfig{Auth: &AuthConfig{Enabled: Bool(true)}},
},
{
"auth_same",
&ConsulConfig{Auth: &AuthConfig{Enabled: Bool(true)}},
&ConsulConfig{Auth: &AuthConfig{Enabled: Bool(true)}},
&ConsulConfig{Auth: &AuthConfig{Enabled: Bool(true)}},
},
{
"retry_overrides",
&ConsulConfig{Retry: &RetryConfig{Enabled: Bool(true)}},
&ConsulConfig{Retry: &RetryConfig{Enabled: Bool(false)}},
&ConsulConfig{Retry: &RetryConfig{Enabled: Bool(false)}},
},
{
"retry_empty_one",
&ConsulConfig{Retry: &RetryConfig{Enabled: Bool(true)}},
&ConsulConfig{},
&ConsulConfig{Retry: &RetryConfig{Enabled: Bool(true)}},
},
{
"retry_empty_two",
&ConsulConfig{},
&ConsulConfig{Retry: &RetryConfig{Enabled: Bool(true)}},
&ConsulConfig{Retry: &RetryConfig{Enabled: Bool(true)}},
},
{
"retry_same",
&ConsulConfig{Retry: &RetryConfig{Enabled: Bool(true)}},
&ConsulConfig{Retry: &RetryConfig{Enabled: Bool(true)}},
&ConsulConfig{Retry: &RetryConfig{Enabled: Bool(true)}},
},
{
"ssl_overrides",
&ConsulConfig{SSL: &SSLConfig{Enabled: Bool(true)}},
&ConsulConfig{SSL: &SSLConfig{Enabled: Bool(false)}},
&ConsulConfig{SSL: &SSLConfig{Enabled: Bool(false)}},
},
{
"ssl_empty_one",
&ConsulConfig{SSL: &SSLConfig{Enabled: Bool(true)}},
&ConsulConfig{},
&ConsulConfig{SSL: &SSLConfig{Enabled: Bool(true)}},
},
{
"ssl_empty_two",
&ConsulConfig{},
&ConsulConfig{SSL: &SSLConfig{Enabled: Bool(true)}},
&ConsulConfig{SSL: &SSLConfig{Enabled: Bool(true)}},
},
{
"ssl_same",
&ConsulConfig{SSL: &SSLConfig{Enabled: Bool(true)}},
&ConsulConfig{SSL: &SSLConfig{Enabled: Bool(true)}},
&ConsulConfig{SSL: &SSLConfig{Enabled: Bool(true)}},
},
{
"token_overrides",
&ConsulConfig{Token: String("same")},
&ConsulConfig{Token: String("different")},
&ConsulConfig{Token: String("different")},
},
{
"token_empty_one",
&ConsulConfig{Token: String("same")},
&ConsulConfig{},
&ConsulConfig{Token: String("same")},
},
{
"token_empty_two",
&ConsulConfig{},
&ConsulConfig{Token: String("same")},
&ConsulConfig{Token: String("same")},
},
{
"token_same",
&ConsulConfig{Token: String("same")},
&ConsulConfig{Token: String("same")},
&ConsulConfig{Token: String("same")},
},
{
"transport_overrides",
&ConsulConfig{Transport: &TransportConfig{DialKeepAlive: TimeDuration(10 * time.Second)}},
&ConsulConfig{Transport: &TransportConfig{DialKeepAlive: TimeDuration(20 * time.Second)}},
&ConsulConfig{Transport: &TransportConfig{DialKeepAlive: TimeDuration(20 * time.Second)}},
},
{
"transport_empty_one",
&ConsulConfig{Transport: &TransportConfig{DialKeepAlive: TimeDuration(10 * time.Second)}},
&ConsulConfig{},
&ConsulConfig{Transport: &TransportConfig{DialKeepAlive: TimeDuration(10 * time.Second)}},
},
{
"transport_empty_two",
&ConsulConfig{},
&ConsulConfig{Transport: &TransportConfig{DialKeepAlive: TimeDuration(10 * time.Second)}},
&ConsulConfig{Transport: &TransportConfig{DialKeepAlive: TimeDuration(10 * time.Second)}},
},
{
"transport_same",
&ConsulConfig{Transport: &TransportConfig{DialKeepAlive: TimeDuration(10 * time.Second)}},
&ConsulConfig{Transport: &TransportConfig{DialKeepAlive: TimeDuration(10 * time.Second)}},
&ConsulConfig{Transport: &TransportConfig{DialKeepAlive: TimeDuration(10 * time.Second)}},
},
}
for i, tc := range cases {
t.Run(fmt.Sprintf("%d_%s", i, tc.name), func(t *testing.T) {
r := tc.a.Merge(tc.b)
if !reflect.DeepEqual(tc.r, r) {
t.Errorf("\nexp: %#v\nact: %#v", tc.r, r)
}
})
}
}
func TestConsulConfig_Finalize(t *testing.T) {
cases := []struct {
name string
i *ConsulConfig
r *ConsulConfig
}{
{
"empty",
&ConsulConfig{},
&ConsulConfig{
Address: String(""),
Auth: &AuthConfig{
Enabled: Bool(false),
Username: String(""),
Password: String(""),
},
Retry: &RetryConfig{
Backoff: TimeDuration(DefaultRetryBackoff),
MaxBackoff: TimeDuration(DefaultRetryMaxBackoff),
Enabled: Bool(true),
Attempts: Int(DefaultRetryAttempts),
},
SSL: &SSLConfig{
CaCert: String(""),
CaPath: String(""),
Cert: String(""),
Enabled: Bool(false),
Key: String(""),
ServerName: String(""),
Verify: Bool(true),
},
Token: String(""),
Transport: &TransportConfig{
DialKeepAlive: TimeDuration(DefaultDialKeepAlive),
DialTimeout: TimeDuration(DefaultDialTimeout),
DisableKeepAlives: Bool(false),
IdleConnTimeout: TimeDuration(DefaultIdleConnTimeout),
MaxIdleConns: Int(DefaultMaxIdleConns),
MaxIdleConnsPerHost: Int(DefaultMaxIdleConnsPerHost),
TLSHandshakeTimeout: TimeDuration(DefaultTLSHandshakeTimeout),
},
},
},
}
for i, tc := range cases {
t.Run(fmt.Sprintf("%d_%s", i, tc.name), func(t *testing.T) {
tc.i.Finalize()
if !reflect.DeepEqual(tc.r, tc.i) {
t.Errorf("\nexp: %#v\nact: %#v", tc.r, tc.i)
}
})
}
}
|
<reponame>smagill/opensphere-desktop
package io.opensphere.core.matchers;
import java.lang.ref.SoftReference;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.locks.ReentrantLock;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
/**
* Helper class that creates and caches JAXB contexts.
*/
public final class JAXBContextHelper
{
/** Map of cached contexts. */
private static Map<String, SoftReference<JAXBContext>> ourContextMap = new HashMap<>();
/** Lock for creating contexts. */
private static ReentrantLock ourCreateContextLock = new ReentrantLock();
/**
* Clears all cached contexts.
*/
public static void clearCachedContexts()
{
ourCreateContextLock.lock();
try
{
ourContextMap.clear();
}
finally
{
ourCreateContextLock.unlock();
}
}
/**
* Gets or creates a JAXBContext and caches it so it does not have to be
* created twice but creates/retrieves all contexts in a thread safe way.
*
* @param classes The classes for which to make a context.
* @return the {@link JAXBContext}
* @throws JAXBException If an error occurred obtaining the JAXBContext.
*/
public static JAXBContext getCachedContext(Class<?>... classes) throws JAXBException
{
JAXBContext ctx;
ourCreateContextLock.lock();
try
{
String key;
if (classes.length == 1)
{
key = classes[0].getName();
}
else if (classes.length == 0)
{
key = "";
}
else
{
String[] names = new String[classes.length];
for (int i = 0; i < classes.length; ++i)
{
names[i] = classes[i].getName();
}
Arrays.sort(names);
StringBuilder sb = new StringBuilder(32);
for (String name : names)
{
sb.append(name).append(',');
}
sb.setLength(sb.length() - 1);
key = sb.toString();
}
SoftReference<JAXBContext> ref = ourContextMap.get(key);
ctx = ref == null ? null : ref.get();
if (ctx == null)
{
ctx = createContext(classes);
ref = new SoftReference<>(ctx);
ourContextMap.put(key, ref);
}
}
finally
{
ourCreateContextLock.unlock();
}
return ctx;
}
/**
* Gets or creates a JAXBContext and caches it so it does not have to be
* created twice but creates/retrieves all contexts in a thread safe way.
*
* @param classes The classes for which to make a context.
* @return the {@link JAXBContext}
* @throws JAXBException If an error occurred obtaining the JAXBContext.
*/
public static JAXBContext getCachedContext(Collection<? extends Class<?>> classes) throws JAXBException
{
return getCachedContext(classes.toArray(new Class<?>[classes.size()]));
}
/**
* Gets or creates a JAXBContext from one or more packages. The packages
* must contain "jaxb.index" files and/or object factories.
*
* @param packages The packages.
* @return The context.
* @throws JAXBException If an error occurred obtaining the JAXBContext.
* @see JAXBContext#newInstance(String, ClassLoader)
*/
public static JAXBContext getCachedContext(Package... packages) throws JAXBException
{
String[] packageNames = new String[packages.length];
for (int i = 0; i < packages.length;)
{
packageNames[i] = packages[i++].getName();
}
Arrays.sort(packageNames);
StringBuilder sb = new StringBuilder(32);
for (String packageName : packageNames)
{
sb.append(packageName).append(':');
}
sb.setLength(sb.length() - 1);
return getCachedContext(sb.toString());
}
/**
* Gets or creates a JAXBContext and caches it so it does not have to be
* created twice but creates/retrieves all contexts in a thread safe way.
*
* @param contextPath Colon-separated list of package names.
* @return the {@link JAXBContext}
* @throws JAXBException If an error occurred obtaining the JAXBContext.
* @see JAXBContext#newInstance(String, ClassLoader)
*/
public static JAXBContext getCachedContext(String contextPath) throws JAXBException
{
JAXBContext ctx;
ourCreateContextLock.lock();
try
{
SoftReference<JAXBContext> ref;
ref = ourContextMap.get(contextPath);
ctx = ref == null ? null : ref.get();
if (ctx == null)
{
ctx = createContext(contextPath);
ref = new SoftReference<>(ctx);
ourContextMap.put(contextPath, ref);
}
}
finally
{
ourCreateContextLock.unlock();
}
return ctx;
}
/**
* Removes a cached context from the internal cache.
*
* @param target - the target class to remove
*/
public static void removeCachedContext(Class<?> target)
{
ourCreateContextLock.lock();
try
{
ourContextMap.remove(target.getName());
}
finally
{
ourCreateContextLock.unlock();
}
}
/**
* Creates a new {@link JAXBContext} in a thread safe way.
*
* @param classes The classes for which to make a context.
* @return the {@link JAXBContext}
* @throws JAXBException If an error occurred obtaining the JAXBContext.
*/
private static JAXBContext createContext(Class<?>[] classes) throws JAXBException
{
JAXBContext ctx;
ourCreateContextLock.lock();
try
{
ctx = JAXBContext.newInstance(classes);
}
finally
{
ourCreateContextLock.unlock();
}
return ctx;
}
/**
* Creates a new {@link JAXBContext} in a thread safe way.
*
* @param contextPath Colon-separated list of package names.
* @return the {@link JAXBContext}
* @throws JAXBException If an error occurred obtaining the JAXBContext.
* @see JAXBContext#newInstance(String, ClassLoader)
*/
private static JAXBContext createContext(String contextPath) throws JAXBException
{
JAXBContext ctx;
ourCreateContextLock.lock();
try
{
ctx = JAXBContext.newInstance(contextPath);
}
finally
{
ourCreateContextLock.unlock();
}
return ctx;
}
/** Disallow instantiation. */
private JAXBContextHelper()
{
}
}
|
# !/bin/sh
rm -rf /var/unprovisioned_state
systemctl restart aos.target
|
<reponame>KharkovIT/PP
package com.ua.nure.TestHelper.controller;
import com.ua.nure.TestHelper.domain.Link;
import com.ua.nure.TestHelper.domain.Template;
import com.ua.nure.TestHelper.domain.User;
import com.ua.nure.TestHelper.repository.TemplateRepository;
import com.ua.nure.TestHelper.service.TemplateService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.security.SecurityProperties;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
import java.util.List;
@RestController
@RequestMapping("/templates")
public class TemplateController {
@Autowired
TemplateService templateService;
@CrossOrigin
@RequestMapping(value ="/addTemplate", method = RequestMethod.POST, consumes = MediaType.APPLICATION_JSON_VALUE)
public Template addTempl(@RequestBody Template template) {
try{
return templateService.addTemplate(template);
}catch (NullPointerException e){
System.out.println("no no no");
}
return null;
}
@CrossOrigin
@RequestMapping(value ="/getTemplates", method = RequestMethod.POST, consumes = MediaType.APPLICATION_JSON_VALUE)
public List<Template> getTemplates(@RequestBody User user) {
try{
System.out.println(templateService.getAllByTeacherId(user.getIdUser()));
return templateService.getAllByTeacherId(user.getIdUser());
}catch (NullPointerException e){
System.out.println("no no no");
}
return null;
}
@CrossOrigin
@RequestMapping(value ="/deleteTemplate", method = RequestMethod.POST, consumes = MediaType.APPLICATION_JSON_VALUE)
public void deleteTempl(@RequestBody Template template) {
try{
templateService.delete(template);
}catch (NullPointerException e){
System.out.println("no no no");
}
}
}
|
<reponame>webeyemob/TGCenter_iOS_Pub<filename>TGCAppsFlyer.podspec
Pod::Spec.new do |spec|
spec.name = "TGCAppsFlyer"
spec.version = "6.2.5.1"
spec.summary = "AppsFlyer for TGCAppsFlyer."
spec.homepage = "https://github.com/webeyemob/TGCenter_iOS_Pub"
spec.license = { :type => 'MIT', :file => "TGCAppsFlyer_#{spec.version}/LICENSE" }
spec.author = "TGCenter"
spec.platform = :ios, "8.0"
spec.source = { :http => "https://github.com/webeyemob/TGCenter_iOS_Pub/raw/master/TGCAppsFlyer/TGCAppsFlyer_#{spec.version}.zip" }
spec.vendored_frameworks = "TGCAppsFlyer_#{spec.version}/TGCAppsFlyer.framework"
spec.dependency 'AppsFlyerFramework', '6.2.5'
end |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.