blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
8bccda9a0959fd7ed6ed2b91d9ff1f9b7fcb986d | Shell | aurae-runtime/aurae | /auraed/hack/kernel/mk-kernel | UTF-8 | 3,960 | 3.234375 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
# -------------------------------------------------------------------------- #
# Apache 2.0 License Copyright © 2022-2023 The Aurae Authors #
# #
# +--------------------------------------------+ #
# | █████╗ ██╗ ██╗██████╗ █████╗ ███████╗ | #
# | ██╔══██╗██║ ██║██╔══██╗██╔══██╗██╔════╝ | #
# | ███████║██║ ██║██████╔╝███████║█████╗ | #
# | ██╔══██║██║ ██║██╔══██╗██╔══██║██╔══╝ | #
# | ██║ ██║╚██████╔╝██║ ██║██║ ██║███████╗ | #
# | ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝ | #
# +--------------------------------------------+ #
# #
# Distributed Systems Runtime #
# #
# -------------------------------------------------------------------------- #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
# #
# -------------------------------------------------------------------------- #
set -e
workingDir=$(mktemp -d)
thisDir=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
function clean {
rm -rf $workingDir
}
trap clean EXIT
source config.sh
TARGET_DIR=${TARGET_DIR:-$thisDir/../../target}
[ ! -d "${TARGET_DIR}" ] && echo "Directory: '$TARGET_DIR' does not exist." && exit 1
[ -f "${TARGET_DIR}/kernel/vmlinuz-$KERNEL_VERSION" ] &&
[ -f "${TARGET_DIR}/kernel/System.map-$KERNEL_VERSION" ] &&
[ -f "${TARGET_DIR}/kernel/config-$KERNEL_VERSION" ] &&
[ -z ${REBUILD_KERNEL+x} ] &&
echo -e "Skip kernel build, artifacts already exist." &&
echo -e "\t clean kernel artifacts if you want to rebuild." &&
exit 0
pushd $workingDir
echo "Downloading Linux Kernel $KERNEL_VERSION"
wget -qO- --show-progress "https://cdn.kernel.org/pub/linux/kernel/v5.x/linux-${KERNEL_VERSION}.tar.xz" |
tar xJ
cp "$thisDir/config/$KERNEL_CONFIG" linux-${KERNEL_VERSION}/.config
pushd linux-${KERNEL_VERSION}
make -j`nproc`
mkdir -p ${TARGET_DIR}/kernel
cp -v arch/x86/boot/bzImage ${TARGET_DIR}/kernel/vmlinuz-$KERNEL_VERSION
cp -v System.map ${TARGET_DIR}/kernel/System.map-$KERNEL_VERSION
cp -v .config ${TARGET_DIR}/kernel/config-$KERNEL_VERSION
popd # linux src
popd # working dir | true |
5bfc2d4c86a4623dd1f8dace475353d75a5e0abf | Shell | MrXcitement/dot-bash | /home/.bash_logout | UTF-8 | 391 | 3.234375 | 3 | [] | no_license | #!/bin/bash
# bash_logout -- Personal items to perform at logout.
# Mike Mike <mike@thebarkers.com>
# April 13th, 2013
# Copyright (C) 2013-2014 Mike Barker
# shellcheck disable=SC2128
[ "${DEBUG}" ] && echo "${BASH_SOURCE}" >&2
# Cleanup when logging out
if [ -n "$GNOME_KEYRING_PID" ]; then
killall gnome-keyring-daemon
killall -9 dbus-launch
fi
[ "${DEBUG}" ] && echo "Goodbye, from bash..." >&2
| true |
5cfb005e631ee42e3ad1e5a00c9d1b247356bf28 | Shell | squatched/comp-setup | /bash/.bashrc | UTF-8 | 8,320 | 3.546875 | 4 | [] | no_license | #
# ~/.bashrc
#
__source_if_file () {
[[ -f "$1" ]] && source "$1"
}
#DEBUG_PATH=true
PATH_CACHE=
__format_path () {
echo "${PATH//:/$'\n'}"
}
__update_path_cache () {
[[ $DEBUG_PATH != true ]] && return 0
PATH_CACHE=$(__format_path)
}
__display_path () {
[[ $DEBUG_PATH != true ]] && return 0
echo "$1 PATH:"
__format_path
echo $'\n'
}
__display_path_diff () {
[[ $DEBUG_PATH != true ]] && return 0
DISP_PATH=$(__format_path)
echo "$1 PATH Differences:"
diff --normal <(echo "$PATH_CACHE") <(echo "$DISP_PATH")
echo ""
PATH_CACHE=$DISP_PATH
}
__update_path_cache
__display_path "Initial path"
__source_if_file ${HOME}/.bash_proprietary_pre_all
__display_path_diff "After ~/.bash_proprietary_pre_all"
# If not running interactively, don't do anything
[[ $- != *i* ]] && return
# Source global definitions
__source_if_file /etc/bashrc
__display_path_diff "After /etc/bashrc"
# Source pre-proprietary stuff
__source_if_file ${HOME}/.bash_proprietary_pre
__display_path_diff "After ~/.bash_proprietary_pre"
# Turn off history expansion through '!'.
#set +o histexpand
# Disable C-d exiting the shell.
set -o ignoreeof
export IGNOREEOF=1000
# don't put duplicate lines in the history. See bash(1) for more options
# ... or force ignoredups and ignorespace
export HISTCONTROL=ignoredups:ignorespace
# append to the history file, don't overwrite it
shopt -s histappend
# save multi-line commands as one line
shopt -s cmdhist
# for setting history length see HISTSIZE and HISTFILESIZE in bash(1)
export HISTSIZE=100000
export HISTFILESIZE=2000000
# ignore the exit command and duplicates
export HISTIGNORE="&:[ ]*:exit:ignoredups"
# Tell ncurses to always use UTF-8 line drawing characters
export NCURSES_NO_UTF8_ACS=1
# check the window size after each command and, if necessary,
# update the values of LINES and COLUMNS.
shopt -s checkwinsize
# make less more friendly for non-text input files, see lesspipe(1)
[[ -x /usr/bin/lesspipe ]] && eval "$(SHELL=/bin/sh lesspipe)"
# source git prompt decoration and tab completion
distributor_id=''
hash brew 2>/dev/null && distributor_id="MacOS"
if hash lsb_release 2>/dev/null; then
distributor_id=$(lsb_release -a 2>/dev/null | sed --expression '/^Distributor ID:/!d' --regexp-extended --expression 's/^[^:]+:\s*//')
fi
if [[ -z $distributor_id ]] && [[ -f /etc/os-release ]]; then
distributor_id=$(sed '/^NAME=/ !d; s/NAME="\([^"]\+\)"/\1/' /etc/os-release)
fi
case $distributor_id in
Arch*|Manjaro*|BlackArch*)
# Arch based distros
__source_if_file /usr/share/git/completion/git-prompt.sh
__source_if_file /usr/share/git/completion/git-completion.bash
;;
Ubuntu|Parrot)
# Debian distros
__source_if_file /usr/lib/git-core/git-sh-prompt
__source_if_file /usr/share/bash-completion/completions/git
;;
MacOS)
if [[ -e "$(brew --prefix git)/etc/bash_completion.d" ]]; then
source $(brew --prefix git)/etc/bash_completion.d/git-prompt.sh
source $(brew --prefix git)/etc/bash_completion.d/git-completion.bash
fi
;;
"Amazon Linux")
# Git Completion is handled through /etc/bashrc but git-prompt is not...
# Go figure.
__source_if_file /usr/share/git-core/contrib/completion/git-prompt.sh
esac
__display_path_diff "After git prompt & completion"
# Source fzf auto completion
__source_if_file /usr/share/fzf/key-bindings.bash
__source_if_file ${HOME}/.fzf.bash
__display_path_diff "After fzf setup"
# set a fancy prompt (non-color, unless we know we "want" color)
case "$TERM" in
linux|xterm-color|*-256color) color_prompt=yes;;
esac
# uncomment for a colored prompt, if the terminal has the capability; turned
# off by default to not distract the user: the focus in a terminal window
# should be on the output of commands, not on the prompt
#force_color_prompt=true
if ${force_color_prompt:-false}; then
if [[ -x /usr/bin/tput ]] && tput setaf 1 >&/dev/null; then
# We have color support; assume it's compliant with Ecma-48
# (ISO/IEC-6429). (Lack of such support is extremely rare, and such
# a case would tend to support setf rather than setaf.)
color_prompt=yes
else
color_prompt=
fi
fi
xtc_default=
xtc_yellow=
xtc_green=
xtc_purple=
xtc_red=
xtc_white=
xtc_lblue=
xtc_lgreen=
if [[ ${color_prompt} = yes ]]; then
# Set a bunch of xTerm colors.
xtc_default='\[\e[00;00m\]'
xtc_yellow='\[\e[01;33m\]'
xtc_green='\[\e[01;32m\]'
xtc_purple='\[\e[01;34m\]'
xtc_red='\[\e[00;31m\]'
xtc_white='\[\e[00;37m\]'
xtc_lblue='\[\e[01;96m\]'
xtc_lgreen='\[\e[00;39m\]'
fi
# Prompt customization. Broken up this way to facilitate
# PROMPT_COMMAND='__git_ps1 ...'
# Red with an optional (white) [✗] if the last command failed.
PROMPT_PRE_GIT=$xtc_red'┌─$([[ $? != 0 ]] && printf "['$xtc_white'✗'$xtc_red']─")'
# [user with normal user being green and root being red.
PROMPT_PRE_GIT=$PROMPT_PRE_GIT'['$(if [[ ${EUID} == 0 ]]; then echo $xtc_red'root'; else echo $xtc_lgreen'\u'; fi)
# yellow '@' and light blue hostname with red ']─'.
PROMPT_PRE_GIT=$PROMPT_PRE_GIT$xtc_yellow'@'$xtc_lblue'${HOSTNAME_PROMPT_LABEL:-\h}'$xtc_red']─'
# [pwd] (green)
PROMPT_PRE_GIT=$PROMPT_PRE_GIT'['$xtc_green'\w'$xtc_red']'$xtc_default
# ─[virtualenv] if it exists.
PROMPT_POST_GIT='$([[ ! -z $VIRTUAL_ENV ]] && printf "'$xtc_red'─['$xtc_purple\${VIRTUAL_ENV//*\\/}$xtc_red']'$xtc_default'")'
# Second line └──╼ $
PROMPT_POST_GIT=$PROMPT_POST_GIT'\n'$xtc_red'└──╼'$xtc_yellow'$'$xtc_default' '
if type __git_ps1 >/dev/null 2>&1; then
# Git customization, show in yellow.
PROMPT_GIT=$xtc_red'─['$xtc_yellow'%s'$xtc_red']'$xtc_default
PROMPT_COMMAND='__git_ps1 "$PROMPT_PRE_GIT" "$PROMPT_POST_GIT" "$PROMPT_GIT"'
else
PS1=$PROMPT_PRE_GIT$PROMPT_POST_GIT
fi
# Set 'man' colors
if [ "$color_prompt" = yes ]; then
man() {
env \
LESS_TERMCAP_mb=$'\e[01;31m' \
LESS_TERMCAP_md=$'\e[01;31m' \
LESS_TERMCAP_me=$'\e[0m' \
LESS_TERMCAP_se=$'\e[0m' \
LESS_TERMCAP_so=$'\e[01;44;33m' \
LESS_TERMCAP_ue=$'\e[0m' \
LESS_TERMCAP_us=$'\e[01;32m' \
man "$@"
}
fi
unset color_prompt force_color_prompt
# enable programmable completion features (you don't need to enable
# this, if it's already enabled in /etc/bash.bashrc and /etc/profile
# sources /etc/bash.bashrc).
if ! shopt -oq posix; then
if [[ -f /usr/share/bash-completion/bash_completion ]]; then
. /usr/share/bash-completion/bash_completion
elif [[ -f /etc/bash_completion ]]; then
. /etc/bash_completion
fi
fi
# Configure Perforce
[[ -f $HOME/.p4config ]] && export P4CONFIG=$HOME/.p4config
# Source script environment setup.
__source_if_file ${HOME}/.bash_script_env
__display_path_diff "After .bash_script_env"
# Alias definitions from a separate file.
__source_if_file $HOME/.bash_aliases
__display_path_diff "After .bash_aliases"
__source_if_file $HOME/.bash_functions
__display_path_diff "After .bash_functions"
# Environment exports from a separate file.
__source_if_file $HOME/.bash_environment_global
__display_path_diff "After .bash_environment_global"
__source_if_file $HOME/.bash_environment
__display_path_diff "After .bash_environment"
# Source completions. The nullglob shenanigans are just in case
# the directory doesn't exist.
ORIGINAL_NULLGLOB=$(shopt -p nullglob)
shopt -s nullglob
for completion_file in $HOME/.bash_completion.d/*; do
source "$completion_file"
done
$ORIGINAL_NULLGLOB
__display_path_diff "After .bash_completion.d/"
# Special bash tab-completion
hash aws_completer 2>/dev/null && complete -C aws_completer aws
hash terraform 2>/dev/null && complete -C terraform terraform
# Proprietary scripts.
__source_if_file $HOME/.bash_proprietary_post
__display_path_diff "After .bash_proprietary_post"
# Always include the systemd expected local bin. This is where I put
# my scripts so to keep things sane, I'll include it even in non
# systemd managed systems.
echo "${PATH}" | grep "/.local/bin" >/dev/null || PATH="${HOME}/.local/bin:${PATH}"
__display_path_diff "After ~/.local/bin setup"
[ -f ~/.fzf.bash ] && source ~/.fzf.bash
__display_path_diff "After FZF setup"
| true |
bb2e52abf6bd0dd168dad645e26bf57ad12ecf00 | Shell | sfujiwara/preemptible-workers-sample-python | /startup.sh | UTF-8 | 1,292 | 3.03125 | 3 | [] | no_license | #!/usr/bin/env bash
apt-get update
apt-get -y upgrade
apt-get -y install python-dev
apt-get -y install python-pip
apt-get -y install git
# Install Python packages
pip install gcloud
pip install google-api-python-client
pip install git+https://github.com/sfujiwara/gjhandler.git
# Install google-fluentd
curl -sSO https://dl.google.com/cloudagents/install-logging-agent.sh
sha256sum install-logging-agent.sh
sudo bash install-logging-agent.sh
# Create config file for google-fluentd
FLUENTD_CONF_FILE="/etc/google-fluentd/config.d/python.conf"
echo "<source>" > ${FLUENTD_CONF_FILE}
echo " type tail" >> ${FLUENTD_CONF_FILE}
echo " format json" >> ${FLUENTD_CONF_FILE}
echo " path /var/log/python/*.log,/var/log/python/*.json" >> ${FLUENTD_CONF_FILE}
echo " read_from_head true" >> ${FLUENTD_CONF_FILE}
echo " tag python" >> ${FLUENTD_CONF_FILE}
echo "</source>" >> ${FLUENTD_CONF_FILE}
# Create log directory for Python script
mkdir -p /var/log/python
# Restart google-fluentd
service google-fluentd restart
# Run python script
PROJECT_ID=`curl "http://metadata.google.internal/computeMetadata/v1/project/project-id" -H "Metadata-Flavor: Google"`
PYTHON_SCRIPT="consume_tasks.py"
cd /tmp
gsutil cp gs://${PROJECT_ID}.appspot.com/tmp/${PYTHON_SCRIPT} /tmp/
python ${PYTHON_SCRIPT}
| true |
ddbb17bfbb55e34ea6e66b22f03193b2bcef275c | Shell | tiltit/messy80-breadboard | /utils/export_labels/export_labels.sh | UTF-8 | 244 | 3.0625 | 3 | [] | no_license | #!/bin/bash
AWKARG="("
while read p; do
if [ "$AWKARG" != "(" ]
then
AWKARG+=" || "
fi
AWKARG+="/"
AWKARG+=$p
AWKARG+="/"
done < $1
AWKARG+=")"
AWKARG+=" && /equ/"
#echo $AWKARG
while read p; do
echo $p | awk "$AWKARG"
done < $2
| true |
b8b58214ae249e5a66e8a2b5335650993ea99db3 | Shell | Yodata/solid-serverless | /scripts/lambda-gradle-deploy | UTF-8 | 455 | 3.109375 | 3 | [] | no_license | #!/usr/bin/env bash
SERVICE="$1"
if [ -z "$SERVICE" ]; then
SERVICE=$(service-compute-name)
fi
BUCKET="$2"
if [ -z "$BUCKET" ]; then
BUCKET=$(s3-bucket-compute-name)
fi
FUNCTION="$3"
if [ -z "$FUCTION" ]; then
FUNCTION="solid-server-$SERVICE"
fi
JAR_NAME=$(service-compute-name)
echo "Deploying $SERVICE"
aws lambda update-function-code --region "$AWS_REGION" --function-name "$FUNCTION" --s3-bucket "$BUCKET" --s3-key "$JAR_NAME.jar" > /dev/null
| true |
0406e833c6b459f8a822c31516786c1eb03054c0 | Shell | SliTaz-official/wok-next | /pcmanfm/stuff/gvfs-mount-archive.sh | UTF-8 | 147 | 2.578125 | 3 | [] | no_license | #!/bin/sh
# gvfs-mount-archive: simple wrapper for PCManFM action "Mount archive"
gvfs-mount "archive://$(echo "$1" | sed 's|:|%3A|g;s|/|%2F|g')"
| true |
8bf99bea6d5a4e3290bca67b9f01fe165dd3bf07 | Shell | mijime/sham | /test/plug/list.sh | UTF-8 | 2,069 | 2.71875 | 3 | [] | no_license | #!/bin/bash
source src/plug/list.sh;
setup() {
export __g__home=/tmp/sham-${UNITTEST_NO};
export __g__stats=${__g__home}/cache;
mkdir -p "${__g__home}";
cat << EOF > "${__g__home}/cache";
@@#no=0#as=mijime/sham00#at=#dir=/tmp/sham/repos/mijime/sham00#from=file://.#of=#use=#do=#stat=0
@@#no=1#as=mijime/sham01#at=#dir=/tmp/sham/repos/mijime/sham01#from=file://.#of=#use=#do=#stat=0
@@#no=2#as=mijime/sham02#at=#dir=/tmp/sham/repos/mijime/sham02#from=file://.#of=#use=#do=#stat=0
@@#no=3#as=mijime/sham03#at=#dir=/tmp/sham/repos/mijime/sham03#from=file://.#of=#use=#do=#stat=3
@@#no=4#as=mijime/sham04#at=#dir=/tmp/sham/repos/mijime/sham04#from=file://.#of=#use=#do=#stat=4
EOF
}
teardown() {
[[ ! -d ${__g__home} ]] || rm -rf "${__g__home}";
unset SHAM_PLUGS;
unset __g__home __g__stats;
}
__test__list_01() {
local SHAM_PLUGS=("@@#no=0#as=mijime/sham00#at=#dir=/tmp/sham/repos/mijime/sham00#from=file://.#of=#use=#do=#stat=1");
__sham__plug__list;
__sham__plug__list|grep -c "#as=mijime/sham00#.*#stat=1";
}
__test__list_01_noupdate() {
local SHAM_PLUGS=("@@#no=0#as=mijime/sham00#at=#dir=/tmp/sham/repos/mijime/sham00#from=file://.#of=#use=#do=#stat=2");
__sham__plug__list;
__sham__plug__list|grep -c "#as=mijime/sham00#.*#stat=0";
}
__test__list_01_update() {
local SHAM_PLUGS=("@@#no=0#as=mijime/sham00#at=#dir=/tmp/sham/repos/mijime/sham00#from=file://.#of=*.sh#use=#do=#stat=2");
__sham__plug__list;
__sham__plug__list|grep -c "#as=mijime/sham00#.*#stat=2";
}
__test__list_02() {
__sham__plug__list;
__sham__plug__list|grep -c "#as=mijime/sham02#.*#stat=0";
}
__test__list_03() {
local SHAM_PLUGS=("@@#no=0#as=mijime/sham01#at=#dir=/tmp/sham/repos/mijime/sham00#from=file://.#of=#use=#do=#stat=1");
__sham__plug__list;
__sham__plug__list|grep -c "#as=mijime/sham02#.*#stat=3";
}
__test__list_04() {
local SHAM_PLUGS=("@@#no=0#as=mijime/sham01#at=#dir=/tmp/sham/repos/mijime/sham00#from=file://.#of=#use=#do=#stat=1");
__sham__plug__list;
__sham__plug__list|grep -c "#as=mijime/sham04#.*#stat=4";
}
| true |
a802714485f96e559233ad9fb19b72f4525b9f30 | Shell | RobertTalbert/discretecs-book | /convert | UTF-8 | 412 | 2.796875 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
rm -rf _chapters_temp #clear out previous versions of the book
cp -rf _chapters _chapters_temp
sed -i '1,4d' _chapters_temp/* # Remove Jekyll headers from pages
cat _chapters_temp/*.md > book.txt # Place all chapters in one txt file
pandoc title.txt book.txt -o book.epub # convert to epub
pandoc title.txt book.txt -o book.docx # convert to docx
rm -rf _chapters_temp # remove temp files
| true |
5135400bda8768a77b7cfb1a42936e19ea647935 | Shell | spjulius/Wallbasedownloader | /wallbasedownloader.sh | UTF-8 | 7,078 | 3.640625 | 4 | [] | no_license | #!/bin/bash
#
# WWD v0.4
# Copyright (C) 2011 EXio4
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
## Version 0.4.1
## Cambios:
## Agregado menu de categorias [Al hacer Random en WallBase]
## Version 0.4
## Se agrego:
## Soporte de busquedas en Deviantart
## Cambios minimos:
## Algunos colores agregados
## Version 0.3 [Rearmada]
## Cambios de esta version:
## No se usan mas bucles
## Soporte de categorias
## Opcion de bajar wallpapers con solo pasar el link (a wallbase)
## Soporte de directorios corregido
##
## Se borro:
## El manejo de la cantidad por bucles
## El uso de funciones a medias
## Variables globales [Default]
walls=10 # Cantidad de wallpapers por default
categoria="high-resolution" # Aca puede ir 'high-resolution' 'rozne' 'manga-anime'
dir=$PWD # Directorio por default [A USAR]
## Variables del programa [Se recomienda no editarlas]
file1="cache1.txt"
file2="cache2.txt"
file3="cache3.txt"
wget_list="wget-list.txt"
option="normal"
cant=0
## Colores..
export esc="\033"
export red="${esc}[31m${esc}[1m" # Rojo
export green="${esc}[32m${esc}[1m" # Verde
export yellow="${esc}[33m${esc}[1m" # Amarillo
export bold="${esc}[1m" # Negrita
export reset2="${esc}[0m" # Restablecer colores
recho() {
echo -e "${yellow}>> ${red}${@}${reset2}"
}
gecho() {
echo -e "${yellow}>> ${green}${@}${reset2}"
}
gprintf() {
echo -en "${green}${@}${reset2}"
}
rprintf() {
echo -en "${red}${@}${reset2}"
}
normal.reload() {
wget -O "$file1" "http://wallbase.cc/random" &>/dev/null
}
normal.extract2() {
code=$(cat $1 | grep jpg | grep "<img" | cut -d"'" -f2)
for i in $code; do
if [[ "$i" = *${categoria}* ]]; then
gecho "URL: $i"
echo $i >> $wget_list
return 0
else
recho "URL: $i [Not downloading...]"
return 1
fi
done
}
normal.extract2l() {
code=$(cat $1 | grep jpg | grep "<img" | cut -d"'" -f2)
for i in $code; do
gecho "URL: $i"
echo $i >> $wget_list
return 0
done
}
normal.extract() {
recho "RandomWallBase running.."
while true; do
normal.reload
wallpapers=$(cat $file1 | grep "<a href=" | grep wallpaper | cut -d"=" -f2 | cut -d"\"" -f2 | grep wallbase) #
for i in $wallpapers; do
[[ "$cant" = "$walls" ]] && break 2
wget -O "$file2" "$i" &>/dev/null
normal.extract2 $file2
result=$?
if [[ $result = 0 ]]; then
cant=$(expr $cant + 1)
fi
done
done
}
download_list() {
gecho "Downloading list of files.."
for i in $(cat $wget_list); do # Leemos la lista
if [[ ! -e "$(basename $i)" ]]; then
wget -O "./$(basename $i)" $i # Bajamos el archivo si existe
else
recho "$i already downloaded.." # sino tiramos el "error"
fi
done
}
deviantart_search() {
walls=$1
shift
search=$1
[[ -z $search ]] && return 1
recho "Searching $search in deviantart..."
wget -O "$file1" "http://browse.deviantart.com/?qh=§ion=&q=$search" -U Mozilla &>/dev/null
lista=$(cat $file1 | grep href | grep "http://"|grep ".deviantart.com/art/" | cut -d"<" -f4|grep href|cut -d'"' -f4)
cant=0
for i in $lista; do
wget -O "$file2" "$i" -U Mozilla &>/dev/null
url=$(cat $file2 | grep jpg | grep "<img" | grep deviantart | sed -e 's/<.*>//g' | cut -d"=" -f3|cut -d'"' -f2|grep devian)
for a in $url; do
[[ "$cant" = "$walls" ]] && break 3
recho "URL: $a"
echo "$a" >> $wget_list
cant=$(expr $cant + 1)
done
done
}
menu2.1() {
rprintf "Inserte la cantidad de wallpapers a bajar: "
read cantidad
[[ -z $cantidad ]] && exit 2
walls=$cantidad
if ! [[ "$walls" =~ ^[0-9]+$ ]] ; then
recho "Introduzca un numero positivo y sin coma.."
exit 3
fi
recho "De que categoria?"
recho "1- high-resolution"
recho "2- rozne"
recho "3- manga-anime"
rprintf ">> "
read catg
case $catg in
1)
recho "Usando high-resolution.."
categoria="high-resolution"
;;
2)
recho "Usando rozne.."
categoria="rozne"
;;
3)
recho "Usando manga-anime.."
categoria="manga-anime"
;;
*)
recho "Categoria nula o erronea.. usando la default [ $categoria ]"
;;
esac
#categoria="" # Aca puede ir 'high-resolution' 'rozne' 'manga-anime'
normal.extract
}
menu2.2() {
rprintf "Inserte la url: "
read url
[[ -z $url ]] && exit 2
unset categoria
wget -O $file1 $url
result=$?
[[ $result != 0 ]] && exit 3
normal.extract2 $file1
}
menu2.3() {
recho "Que desea buscar?"
rprintf ">> "
read search
[[ -z $search ]] && exit 2
gecho "Bajando la pagina de busquedas.."
wget -O "$file1" "http://wallbase.cc/search/_${search}_" &> /dev/null
recho "Cuantos resultados desea bajar?"
rprintf ">> "
read cantidad
if ! [[ "$cantidad" =~ ^[0-9]+$ ]] ; then
recho "Introduzca un numero positivo y sin coma.."
exit 3
fi
cant=0
wallpapers=$(cat $file1 | grep "<a href=" | grep wallpaper | cut -d"=" -f2 | cut -d"\"" -f2 | grep wallbase) #
for i in $wallpapers; do
[[ "$cant" = "$cantidad" ]] && break 2
wget -O "$file2" "$i" &>/dev/null
normal.extract2l $file2
cant=$(expr $cant + 1)
done
return 0
}
menu2.4() {
recho "Que desea buscar?"
rprintf ">> "
read search
[[ -z $search ]] && exit 2
recho "Cuantos resultados desea bajar?"
rprintf ">> "
read cantidad
if ! [[ "$cantidad" =~ ^[0-9]+$ ]] ; then
recho "Introduzca un numero positivo y sin coma.."
exit 3
fi
deviantart_search "$cantidad" "$search"
return 0
}
licence() {
printf "$red"
echo "This program under GPL Licence"
printf "$green"
echo " This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>."
printf "$reset2"
exit 0
}
menu() {
recho "WWD -> 0.4 By EXio4"
recho "Vamos a un directorio.."
if [[ "$1" = "-d" ]]; then
gprintf "Inserte el directorio: "
read path
cd $path
result=$?
if [[ $result != 0 ]]; then
recho "Hubo un error, compruebe que existe el directorio"
exit 3
fi
fi
gecho "Que desea?"
recho "1- Bajar wallpapers al azar [WallBase]"
recho "2- Bajar un wallpaper especifico [WallBase]"
recho "3- Buscar wallpapers en Wallbase"
recho "4- Buscar wallpapers en Deviantart"
rprintf ">> "
read opt
[[ -z $opt ]] && exit 1
case $opt in
1)
menu2.1
;;
2)
menu2.2
;;
3)
menu2.3
;;
4)
menu2.4
;;
*)
recho "Opcion incorrecta"
exit 1
;;
esac
}
[[ "$1" = "-l" ]] && licence
menu $@
download_list | true |
a5b36c21e5b7891742df4c22696e574da30fd612 | Shell | wsj31013/murano-apps | /RefStackClient/package/Resources/installRefStackClient.sh | UTF-8 | 1,337 | 3.140625 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
set -e
# Create stacker user
groupadd stacker
useradd -g stacker -s /bin/bash -d /home/stacker -m stacker
( umask 226 && echo "stacker ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/50_dev )
{
echo "PS1='${debian_chroot:+($debian_chroot)}\u:\w\$ '"
echo "source /home/stacker/refstack-client/.venv/bin/activate"
echo "alias 'refstack-client'=/home/stacker/refstack-client/refstack-client"
} >> /home/stacker/.bashrc
[ -d '/home/debian' ] && cp -r /home/debian/.ssh /home/stacker
[ -d '/home/ubuntu' ] && cp -r /home/ubuntu/.ssh /home/stacker
chown -R stacker:stacker /home/stacker/.ssh
apt-get update
#Clone refstack-client repo
su -c "git clone %REPO% /home/stacker/refstack-client" stacker
cd /home/stacker/refstack-client
#Setup environment
su -c "./setup_env" stacker
| true |
45f76af30983b96843089fa42b366650993d5536 | Shell | zl430/linux | /auto.sh | UTF-8 | 2,874 | 3.046875 | 3 | [] | no_license | #!/bin/bash
#生成蛮荒nginx代理点配置文件
for groupid in $(seq -f "%02g" 17 19)
do
cat ../allgameserver_Proxy |grep "_${groupid}_" > .auto.tmp
speedy_proxy(){
#迅达云走外网
cat .auto.tmp | grep "UCLOUD香港"|awk '{print $2}' > host_ip.txt
for i in $(cat .auto.tmp|grep "Proxy"|egrep "speedy"|awk '{print $2}')
do
echo "+++++++++++$i+++++++++++"
server_zone=$(grep "$i" .auto.tmp|awk '{print $5"-"$6}'|grep -Po "[A-Z]{1,5}-[a-z]{1,20}")
flag=1
while [[ $flag != 0 ]];do
ssh $i "echo -e \"nameserver 8.8.8.8\nnameserver 114.114.114.114\" > /etc/resolv.conf && service iptables stop && chkconfig iptables off && rm -rf /usr/local/src/* /etc/yum.repos.d/* && yum clean all && if [ ! -d /data/shell/ ];then mkdir -p /data/shell/;fi" && \
scp -q yum/* $i:/etc/yum.repos.d/ && \
scp -q /data/shell/nginx_log_cut_2.sh $i:/data/shell/ && \
scp -q ../allgameserver_Proxy ../shell/ng_tcp_module.sh host_ip.txt ../shell/add_nginx_proxy.sh $i:/usr/local/src/ && \
echo "server zone: ${server_zone}" && \
echo "group id: $groupid" && \
ssh $i "
cd /usr/local/src/
sh ng_tcp_module.sh $groupid ${server_zone}
\\cp -f /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
npid=\$(ps -ef |grep -c \"ngin[x]\")
if [ \$npid -gt 1 ];then
/usr/local/nginx/sbin/nginx -s reload
else
/usr/local/nginx/sbin/nginx
fi
zpid=\$(ps -ef |grep -c \"zabbi[x]\")
if [ \$zpid -gt 2 ];then
killall zabbix_agentd && /usr/local/zabbix-2.4.2/sbin/zabbix_agentd
else
/usr/local/zabbix-2.4.2/sbin/zabbix_agentd
fi
" && \
flag=0
done
done
}
softlayer_proxy(){
cat ../allgameserver_Proxy |grep "_${groupid}_" > .auto.tmp
#softlayer走内网
cat .auto.tmp |grep "softlayer香港" |awk '{print $1}' > host_ip.txt
for i in $(cat .auto.tmp|grep "Proxy"|egrep "softlayer"|egrep -v "softlayer香港"|awk '{print $2}')
do
echo "+++++++++++$i++++++++++++"
server_zone=$(grep "$i" .auto.tmp|awk '{print $5"-"$6}'|grep -Po "[A-Z]{1,5}-[a-z]{1,20}")
flag=1
# while [[ $flag != 0 ]];do
ssh $i "echo -e \"nameserver 8.8.8.8\nnameserver 114.114.114.114\" > /etc/resolv.conf && service iptables stop && chkconfig iptables off && rm -rf /usr/local/src/* /etc/yum.repos.d/* && yum clean all && if [ ! -d /data/shell/ ];then mkdir -p /data/shell/;fi" && \
scp -q yum/* $i:/etc/yum.repos.d/ && \
scp -q /data/shell/nginx_log_cut_2.sh $i:/data/shell/ && \
scp -q ../allgameserver_Proxy ../shell/ng_tcp_module.sh host_ip.txt ../shell/add_nginx_proxy.sh $i:/usr/local/src/ && \
echo "server zone: ${server_zone}" && \
echo "group id: $groupid" && \
ssh $i "
cd /usr/local/src/
sh ng_tcp_module.sh $groupid ${server_zone}
\\cp -f /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
npid=\$(ps -ef |grep -c \"ngin[x]\")
if [ \$npid -gt 1 ];then
/usr/local/nginx/sbin/nginx -s reload
else
/usr/local/nginx/sbin/nginx
fi
" && \
flag=0
# done
done
}
#speedy_proxy
softlayer_proxy
done
| true |
a178c1c49ed5237fc75d89c4379836419c8c072d | Shell | globz-eu/recipes | /scripts/release | UTF-8 | 345 | 3.046875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
set -eo pipefail
DIST_DIR="dist"
ARTIFACTS_DIR="artifacts"
echo "Building"
npm run build
echo "Packaging"
mkdir "${ARTIFACTS_DIR}"
cd "${DIST_DIR}"
echo {\"branch\": \"$TRAVIS_BRANCH\", \"commit\": \"$TRAVIS_COMMIT\", \"build\": $TRAVIS_BUILD_NUMBER} > build.json
tar czvf "../${ARTIFACTS_DIR}/recipes-${TRAVIS_BRANCH}.tar.gz" *
| true |
f93876fed0056d34dde6616fbbb000be83df5134 | Shell | xiaoyao007/SANA | /regression-tests/CompressedFiles/test.sh | UTF-8 | 1,163 | 3.53125 | 4 | [] | no_license | #!/bin/bash
die() { echo "$@" >&2; exit 1
}
echo 'Testing measurement CompressedFiles'
TEST_DIR=`pwd`/regression-tests/CompressedFiles
[ -d "$TEST_DIR" ] || die "should be run from top-level directory of the SANA repo"
echo "" > $TEST_DIR/test1.align
echo "" > $TEST_DIR/test2.align
exitCode=0
echo "Test 1: Testing compressed edgelists AThaliana.el.bz2 and MMusculus.el.gz"
./sana -t 1 -fg1 "$TEST_DIR/AThaliana.el.bz2" -fg2 "$TEST_DIR/MMusculus.el.gz" -o "$TEST_DIR/test1" &> "$TEST_DIR/test1.progress"
lines=`wc -l $TEST_DIR/test1.align | awk '{print $1}'`;
if [ $lines -lt 100 ]
then
echo "Test 1: Failed to load networks AThaliana.el.bz2 and MMusculus.el.gz"
exitCode=1
fi
echo "Test 2: Testing compressed edgelists AThaliana.el.gz and yeast-reduced-wayne.gw.gz"
./sana -t 1 -fg1 "$TEST_DIR/AThaliana.el.gz" -fg2 "$TEST_DIR/yeast-reduced-wayne.gw.gz" -o "$TEST_DIR/test2" &> "$TEST_DIR/test2.progress"
lines=`wc -l $TEST_DIR/test1.align | awk '{print $1}'`;
if [ $lines -lt 100 ]
then
echo "Test 2: Failed to load networks AThaliana.el.gz and yeast-reduced-wayne.gw.gz"
exitCode=1
fi
echo 'Done testing compression'
exit $exitCode
| true |
d0a7211106a771ff869dd7e4fbc62285abf167ed | Shell | brennv/palette | /env.sh | UTF-8 | 1,282 | 3.109375 | 3 | [
"MIT"
] | permissive | #!/bin/sh
# Set environment variables
export FLASK_DEBUG=1 # 0 for prod, 1 for dev
echo "FLASK_DEBUG: $FLASK_DEBUG"
export FLASK_APP="$(pwd)/autoapp.py"
echo "FLASK_APP: $FLASK_APP"
# Create keys.sh if not found
[ ! -f "keys.sh" ] && cat > keys.sh <<- EOM
export PALETTE_SECRET="super-secret" # TODO change
export DATABASE_URL="postgresql://postgres:postgres@localhost/postgres"
# export DATABASE_URL="sqlite:///$(pwd)/dev.db"
EOM
# Set environment secrets
source keys.sh
# Install virtual environment if not found
[ ! -d "env" ] && virtualenv -p python env
# Activate the virtual environment
source env/bin/activate
# Install requirements for dev or prod
[ $FLASK_DEBUG = 1 ] && pip install -r requirements/dev.txt
[ $FLASK_DEBUG = 0 ] && pip install -r requirements/prod.txt
# Install abd bundle assets
npm install bower
bower install
flask assets build
# Create db/ & migrations/ if not found
# [ ! -d "migrations" ] && mkdir db
# docker run -td -p 5432:5432 -v $(pwd)/db:/var/lib/postgresql/data postgres:9.6.1
# [ ! -d "migrations" ] && flask db init # && \
# yes | cp -rf utils/env.py migrations/env.py # for batch patch
# Sync models with db
# flask db migrate
# flask db upgrade
# Load data if found
# [ -f "data.py" ] && python -c "import data; data.load()"
| true |
b45ba66bb8b5c730802a6ad881184118de327755 | Shell | delph-in/delphintools | /bin/tab2tbl | UTF-8 | 239 | 2.90625 | 3 | [
"MIT"
] | permissive | #!/bin/bash
tab2tbl () {
awk '
BEGIN {
FS = "\t"
OFS = "\t& "
ORS = " \\\\\n"
print "\\begin{tabular}{llrrrr} \\\\ \\hline"
}
{
print $1, $2, $3, $4, $5, $6
}
END {
print "\\hline"
print "\\end{tabular}"
}'
}
tab2tbl | true |
48051f24a1c80c39e6c91cd5eb9fb524421a16ab | Shell | benjaminhuanghuang/shell-study | /_Text-File-Process/append.sh | UTF-8 | 471 | 3.0625 | 3 | [] | no_license | #-------------------------------------------------------------
# set time every day
if ! ctontab -l | grep ntpdate &>/dev/null ; then
# append a task to crontab
(echo "* 1 * * * ntpdate time.windows.com>/dev/null 2>71";crontab -l) | crontab
fi
#-------------------------------------------------------------
# break SSH when time out
# appened if it does not exist
if ! grep "TMOUT=600" /etc/profile &>/dev/null; then
echo "export TMOUT=600" >> /etc/profile
fi
| true |
74e5e27f4c9db1effd69c38d7c589c962a572eb7 | Shell | iamsmkr/stage-deployment-configs | /config/setup/service-e.setup.sh | UTF-8 | 335 | 2.765625 | 3 | [
"MIT"
] | permissive | #!/bin/bash
ip=$(ifconfig eth1 | grep 'inet addr' | awk '{ print substr($2,6) }')
echo "<h1>$ip $(hostname)</h1>" > /home/vagrant/ip.html
docker run -d \
--name ServiceE \
-p 8084:80 \
--restart unless-stopped \
-v /home/vagrant/ip.html:/usr/share/nginx/html/ip.html:ro \
nginx
| true |
e1f6655d26926ae6f42d72416349a155aa462c9e | Shell | viktorstrate/qr-gen | /build.sh | UTF-8 | 511 | 2.984375 | 3 | [
"MIT"
] | permissive | #!/bin/sh
set -e
build_dir="build"
js="$build_dir/elm.js"
min="$build_dir/elm.min.js"
mkdir -p $build_dir
cp index.html $build_dir
elm make --optimize --output=$js src/Main.elm
npx uglify-js $js --compress 'pure_funcs=[F2,F3,F4,F5,F6,F7,F8,F9,A2,A3,A4,A5,A6,A7,A8,A9],pure_getters,keep_fargs=false,unsafe_comps,unsafe' | npx uglify-js --mangle --output $min
echo "Compiled size:$(wc -c $js) bytes ($js)"
echo "Minified size:$(wc -c $min) bytes ($min)"
echo "Gzipped size: $(gzip -c $min | wc -c) bytes" | true |
26211156598d3041297caabe33584bc83f73e924 | Shell | vdevpriya/boilerplate-ui | /startfeeManagement.sh | UTF-8 | 648 | 2.90625 | 3 | [] | no_license | #!/bin/bash
set -e
echo your argumnets are: "$@"
if [ "$#" -ne 4 ]; then
echo "please provide the correct arguments 1:umHost 2:umPort 3:apiHost 4:apiPort"
exit
else
sed -i "/umHost/s~http[s].*~$1\";~g" /usr/src/app/src/client/js/redux/actions/wsconfig.js
sed -i "/umPort/s/[0-9]\{4\}/$2/g" /usr/src/app/src/client/js/redux/actions/wsconfig.js
sed -i "/apiHost/s~http[s].*~$3\";~g" /usr/src/app/src/client/js/redux/actions/wsconfig.js
sed -i "/apiPort/s/[0-9]\{4\}/$4/g" /usr/src/app/src/client/js/redux/actions/wsconfig.js
yarn start
# echo "=====updated wsconfig.js========= "
# cat /usr/src/app/src/client/js/redux/actions/wsconfig.js
fi | true |
1be7a0b0335cce596c7f8b2582c7f356219f647b | Shell | tkessler45/rsbackup | /rsbackup.sh | UTF-8 | 3,397 | 3.5625 | 4 | [] | no_license | #!/bin/bash
# written by Topher Kessler, email tkessler@ucdavis.edu with any questions
# this script will backup the the User's local profile to the burnsxs
# server using rsync running in the Cygwin bash shell on the local windows
# computer. It will connect to the shared "rsyncbackup" directory on the
# server and create the appropriate files and destination folders. This
# script will also backup the 'All Users' folder, and all documents in that
# folder which are readable by the current user.
#
# This script does not require command line access to the server.
#
# This script will run at logout.
# $HOSTNAME is the local computers name:
# $USERNAME is the current user account name:
DAYDATE=`/usr/bin/date +%Y-%m-%d`
HOURMINSEC=`/usr/bin/date +%H%M%S`
USERBACKUPDIR=//Burnsxs/rsyncbackup/$USERNAME/backups/$HOSTNAME/$DAYDATE/$HOURMINSEC/
/usr/bin/mkdir -p //Burnsxs/rsyncbackup/$USERNAME/backups/$HOSTNAME/$DAYDATE/$HOURMINSEC/
ALLBACKUPDIR=//Burnsxs/rsyncbackup/All\ Users/backups/$DAYDATE/$HOURMINSEC/$HOSTNAME
/usr/bin/mkdir -p //Burnsxs/rsyncbackup/All\ Users/backups/$DAYDATE/$HOURMINSEC/$HOSTNAME
USERRSARGS="-rtglv --delete --delete-after --delete-excluded --progress --backup --backup-dir=$USERBACKUPDIR --exclude-from=//Burnsxs/rsyncbackup/info/rsyncexclude.txt"
ALLRSARGS="-rtglv --delete --delete-after --delete-excluded --progress --backup --backup-dir=$ALLBACKUPDIR --exclude-from=//Burnsxs/rsyncbackup/info/rsyncexclude.txt"
USERDEST=//Burnsxs/rsyncbackup/$USERNAME/current/$HOSTNAME
/usr/bin/mkdir -p //Burnsxs/rsyncbackup/$USERNAME/current/$HOSTNAME
ALLDEST=//Burnsxs/rsyncbackup/All\ Users/current/$HOSTNAME
/usr/bin/mkdir -p //Burnsxs/rsyncbackup/All\ Users/current/$HOSTNAME
USERSRC=/cygdrive/c/Documents\ and\ Settings/$USERNAME/
ALLSRC=/cygdrive/c/Documents\ and\ Settings/"All Users"/
# append backup time to user log file
echo ------------backing up $USERNAME on $HOSTNAME: $(/usr/bin/date +%b%e\ -\ %H:%M) >> //Burnsxs/rsyncbackup/info/logs/$USERNAME.log
# run the rsync command:
echo "Syncing documents and settings for $USERNAME on $HOSTNAME"
/usr/bin/date > //Burnsxs/rsyncbackup/info/logs/$USERNAME-errors.log
/usr/bin/rsync $USERRSARGS "$USERSRC" "$USERDEST" 2>> //Burnsxs/rsyncbackup/info/logs/$USERNAME-errors.log
#REPEAT FOR "ALL USERS" ACCOUNT
# append backup time to "All Users" log file
echo ------------backing up 'All Users' on $HOSTNAME: $(/usr/bin/date +%b%e\ -\ %H:%M) >> //Burnsxs/rsyncbackup/info/logs/All\ Users.log
# run the rsync command:
echo "Syncing documents and settings for 'All Users' on $HOSTNAME"
/usr/bin/date > //Burnsxs/rsyncbackup/info/logs/All\ Users-errors.log
/usr/bin/rsync $ALLRSARGS "$ALLSRC" "$ALLDEST" 2>> //Burnsxs/rsyncbackup/info/logs/All\ Users-errors.log
# cleanup added directories if they're empty...
/usr/bin/rmdir --ignore-fail-on-non-empty //Burnsxs/rsyncbackup/$USERNAME/backups/$HOSTNAME/$DAYDATE/$HOURMINSEC
/usr/bin/rmdir --ignore-fail-on-non-empty //Burnsxs/rsyncbackup/$USERNAME/backups/$HOSTNAME/$DAYDATE
/usr/bin/rmdir --ignore-fail-on-non-empty //Burnsxs/rsyncbackup/$USERNAME/backups/$HOSTNAME
/usr/bin/rmdir --ignore-fail-on-non-empty //Burnsxs/rsyncbackup/All\ Users/backups/$HOSTNAME/$DAYDATE/$HOURMINSEC
/usr/bin/rmdir --ignore-fail-on-non-empty //Burnsxs/rsyncbackup/All\ Users/backups/$HOSTNAME/$DAYDATE
/usr/bin/rmdir --ignore-fail-on-non-empty //Burnsxs/rsyncbackup/All\ Users/backups/$HOSTNAME
| true |
98c96ab105810247d5599fc671787c358b036af5 | Shell | lardo/khaper | /Bin/monitor | UTF-8 | 1,155 | 3.03125 | 3 | [] | no_license | #!/bin/bash
#environment for the python script
source /nfs2/pipe/genomics/toolkit/pymonitor/settings.sh
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/nfs/config/Python-2.6.5/lib
export PYTHONPATH=/nfs2/pipe/genomics/toolkit/pymonitor/python
export DRMAA_LIBRARY_PATH=/opt/gridengine/lib/linux-x64/libdrmaa.so
export PYMONITOR_PY_PATH=/nfs2/pipe/genomics/toolkit/pymonitor/pymonitor.py
export PYMONITOR_SH_PATH=/nfs2/pipe/genomics/toolkit/pymonitor/monitor
export PYMONITOR_CONF_PATH=~/.pymonitor.conf
# export PYMONITOR_LOG_PATH=~/.pymonitor.log
export LOG_DIR="/lustre/project/og03/Public/Log/$USER"
export PYMONITOR_LOG_PATH="$LOG_DIR/pymonitor.log"
if [ ! -d "$LOG_DIR" ]; then
mkdir $LOG_DIR
fi
if [ ! -d "$PYMONITOR_LOG_PATH" ]; then
touch "$PYMONITOR_LOG_PATH"
fi
if [ "$1" = "cron" ]
then
TASKLIST=`ps -o pid,args -u $USER`
CRONTASK=`echo "$TASKLIST" | awk '/pymonitor.py cron/ {print $1}'`
if [ -n "$CRONTASK" ]
then
kill -9 $CRONTASK
else
/usr/bin/python $PYMONITOR_PY_PATH "$@" 2>$PYMONITOR_LOG_PATH
#echo $USER >> /ifs5/PC_HUMAN_AP/USER/limiao/temp/monitor/log
fi
else
/usr/bin/python $PYMONITOR_PY_PATH "$@"
fi
| true |
47a0e2eb3899dfa9d40b568d3aab41926a0549b2 | Shell | neko-neko/dotfiles | /setup/util.zsh | UTF-8 | 490 | 3.75 | 4 | [] | no_license | #!/bin/zsh
# setup util functions.
util::error() {
local message="$1"
echo -e "\e[31m${message}\e[m"
}
util::warning() {
local message="$1"
echo -e "\e[33m${message}\e[m"
}
util::info() {
local message="$1"
echo -e "\e[32m${message}\e[m"
}
util::confirm() {
local message="$1"
if [[ ${FORCE} = 1 ]]; then
return 0
fi
echo "${message} (y/N)"
read confirmation
if [[ ${confirmation} = "y" || ${confirmation} = "Y" ]]; then
return 0
fi
return 4
}
| true |
b6fc933c0891b38b7e81f5921c65fdc06181b2b4 | Shell | time-iter/sdn-qos | /stratup.sh | UTF-8 | 591 | 3.0625 | 3 | [] | no_license | #!/bin/bash
#startup run in bash shell
#
#__main__
while true
do
clear
echo "*************************"
echo "* SPEED CONTROL FOR ODL *"
echo "*************************"
echo "1.show network speed"
echo "2.speed control(mod1)"
echo "3.speed control(mod2)"
echo ">>q to quit<<"
read achoice
case $achoice in
1) python getspeed_only.py
read junk
;;
2) python starupmod1.py
read junk
;;
3) python startupmod2.py
read junk
;;
q) clear
break
;;
*) echo "wrong choice"
printf "try again, any key to continue >"
read junk
;;
esac
done
| true |
156da1b788979fddde6acbc12b69854ec200477c | Shell | nbedelman/DiscovarCompGenomics | /phylogenies/fullAlignmentBlocks/extractedAlignments/runAll_HmelRef.sh | UTF-8 | 6,147 | 3.015625 | 3 | [] | no_license | #!/bin/bash
#Making this runAll into a slurm file because each script takes too little time. May regret this decision, but can always revert back to sending small slurm jobs.
#SBATCH -J getSingleCopyEverything
#SBATCH -n 1
#SBATCH --mem=10000
#SBATCH -t 3-00:00
#SBATCH -p general
#SBATCH -o getSingleCopyEverything.out
#SBATCH -e getSingleCopyEverything.err
#script to get the "usable parts" of the hal alignment. This means projecting the alignment onto good genomes and only taking the single-copy regions.
######### Setup Environment ##############
mkdir -p data
mkdir -p code
mkdir -p mafs
source /n/sw/progressiveCactus-latest/progressiveCactus/environment
export PATH=$PATH:/n/mallet_lab/edelman/software/kentUtils/bin
module load libpng/1.5.21-fasrc01
module load legacy/0.0.1-fasrc01 centos6/openssl-1.0.1f
module load bedtools
ln -s /n/mallet_lab/edelman/18Genomes/results/DiscovarCompGenomics/data/finalAssemblies_highQual_1kbFilter_161101.hal data/fullAlignment.hal
ln -s /n/holylfs/INTERNAL_REPOS/PCACTUS/edelman/genomes/1kbFilter/Herato2.fasta_1kb.fa data/HeraRef.fa
ln -s /n/holylfs/INTERNAL_REPOS/PCACTUS/edelman/genomes/1kbFilter/Hmel2.fa_1kb.fa data/HmelRef.fa
ln -s /n/holylfs/INTERNAL_REPOS/PCACTUS/edelman/genomes/1kbFilter/DAS_09-132_e_tales_a-scaffolds.fasta_1kb.fa data/Etal.fa
ln -s /n/holylfs/INTERNAL_REPOS/PCACTUS/edelman/genomes/1kbFilter/Bombyx_mori_ASM15162v1_-_scaffolds.fa_1kb.fa data/Bmor.fa
ln -s /n/mallet_lab/edelman/18Genomes/results/DiscovarCompGenomics/data/heliconius_melpomene_melpomene_hmel2_core_32_85_1.gff data/Hmel.gff
########## Define Variables #############
refGenomes=HmelRef
refGenomes=$(echo $refGenomes|sed 's/,/\n/g')
fullRefGenomes=Bmor,HmelRef,HeraRef
fullRefGenomes=$(echo $fullRefGenomes|sed 's/,/\n/g')
helRefGenomes=Etal,HmelRef,HeraRef
helRefGenomes=$(echo $helRefGenomes|sed 's/,/\n/g')
halFile=data/fullAlignment.hal
gffFile=data/Hmel.gff
CDSBed=data/HmelRef.CDS.bed
#the exclusion lists will be for things like introgression analysis. I have several subsets here to use depending on the analysis desired.
allHel="Pxyl|Bmor|Lacc|Ppol|Dple|Bany|Mcin"
#helSlim doesn't have the hybrid, and only includes one of the melpomene and erato genomes.
helSlim="Pxyl|Bmor|Lacc|Ppol|Dple|Bany|Mcin|HeraDisco|HmelDisco|HeraHhimHyb"
melClade="Pxyl|Bmor|Lacc|Ppol|Dple|Bany|Mcin|Avan|HeraDisco|HmelDisco|HeraHhimHyb|Hdem|Hsar|Htel|Hhsa|Hhim|HeraRef"
eraClade="Pxyl|Bmor|Lacc|Ppol|Dple|Bany|Mcin|Avan|Ldor|HmelRef|Hcyd|Htim|Hbes|Hnum|Hpar|HeraDisco|HmelDisco|HeraHhimHyb"
exclusionList=$allHel,$helSlim,$melClade,$eraClade
exclusionList=$(echo $exclusionList|sed 's/,/\n/g')
######### run code ##########
#extract full alignment for each of the refGenomes. Do this by scaffold for efficiency.
#then, use grep to get sub-alignments for each species set
#and extract only the single-copy alignment blocks
for r in $refGenomes
do
mkdir -p $r\_mafs
scaffolds=$(faSize -detailed data/$r.fa |awk '{print $1}')
for scaffold in $scaffolds
do
mafFile=$r\_mafs/$r\_$scaffold.full.maf
# extractFullMaf=`sbatch code/hal2maf_scaffold.slurm $halFile $r $scaffold $mafFile|cut -d " " -f 4`
# allSingleCopy=`sbatch --dependency=afterok:$extractFullMaf code/subsetAndSingleCopy.slurm "None" $mafFile "full"`
code/hal2maf_scaffold.slurm $halFile $r $scaffold $mafFile
code/subsetAndSingleCopy.slurm "None" $mafFile "all"
for ex in $exclusionList
do #figure out which list we have
if [ $ex == $allHel ];then
list=allHel
elif [ $ex == $helSlim ];then
list=helSlim
elif [ $ex == $melClade ];then
list=melClade
else
list=eraClade
fi
# singleCopy=`sbatch --dependency=afterok:$extractFullMaf code/subsetAndSingleCopy.slurm $ex $mafFile $list`
code/subsetAndSingleCopy.slurm $ex $mafFile $list
done
done
done
#after I ran this code, I realized that I needed to re-do the fully-aligned sites analysis because I originally did not consider blocks (with no gaps) were aligned among ALL of the species.
mkdir $r\_beds
mkdir $r\_fastas
grep -v "#" $gffFile| awk '$3=="CDS" {print $1"\t"$4"\t"$5"\t"$9"\t.\t"$7}' > $CDSBed
for i in $r\_mafs/*.all_singleCopy.maf; do code/getFullyAligned.py $i 25 $r\_mafs/$(basename $i .maf); done
cat $r\_mafs/*.fullAlign*bed > $r\_mafs/HmelRef.fullyAlignedSites.bed
awk '$3-$2 > 150' $r\_mafs/HmelRef.fullyAlignedSites.bed > $r\_mafs/HmelRef.fullyAlignedSites.large.bed
bedtools intersect -a $r\_mafs/HmelRef.fullyAlignedSites.large.bed -b $CDSBed |sort -u > $r\_beds/HmelRef.fullyAlignedSites.large.coding.bed
bedtools intersect -v -a $r\_mafs/HmelRef.fullyAlignedSites.large.bed -b $CDSBed > $r\_beds/HmelRef.fullyAlignedSites.large.noncoding.bed
#put each region into its own bed file
code/batchGenes.sh $r\_beds/HmelRef.fullyAlignedSites.large.coding.bed $r\_beds/fullyAligned.Hmel.coding
code/batchGenes.sh $r\_beds/HmelRef.fullyAlignedSites.large.noncoding.bed $r\_beds/fullyAligned.Hmel.noncoding
for gene in $(ls beds)
do code/halToMaf_codingRegion.sh $refGenome $halFile beds/$gene
done
############## all heliconius
mkdir $r\_allHeliconius_fullyAligned
workDir=$r\_allHeliconius_fullyAligned
for i in $r\_mafs/*.allHel_singleCopy.maf; do code/getFullyAligned.py $i 18 $workDir/$(basename $i .maf); done
cat $workDir/*.fullAlign*bed > $workDir/HmelRef.allHel.fullyAlignedSites.bed
awk '$3-$2 > 150' $workDir/HmelRef.allHel.fullyAlignedSites.bed > $workDir/HmelRef.allHel.fullyAlignedSites.large.bed
bedtools intersect -a $workDir/HmelRef.allHel.fullyAlignedSites.large.bed -b $CDSBed |sort -u > $workDir/HmelRef.allHel.fullyAlignedSites.coding.bed
bedtools intersect -v -a $workDir/HmelRef.allHel.fullyAlignedSites.large.bed -b $CDSBed > $workDir/HmelRef.allHel.fullyAlignedSites.noncoding.bed
#put each region into its own bed file
code/batchGenes.sh $r\_beds/HmelRef.fullyAlignedSites.large.coding.bed $r\_beds/fullyAligned.Hmel.coding
code/batchGenes.sh $r\_beds/HmelRef.fullyAlignedSites.large.noncoding.bed $r\_beds/fullyAligned.Hmel.noncoding
for gene in $(ls beds)
do code/halToMaf_codingRegion.sh $refGenome $halFile beds/$gene
done
| true |
a59b0f0f87cd85b70a7df1426c98273ab0003ddf | Shell | christhehippo/School | /unix/notes/script2 | UTF-8 | 379 | 3.5625 | 4 | [] | no_license | #!/bin/bash
#
# script2 - another a-mazing script.
#
echo -n "Gimme a nubmer: "
read number
let result=number+7
# way the 1: math with let
echo "${number} + 7 = ${result}"
# way the 2: pockets of arithmetic
result=$((${number}+19))
echo "${number} + 19 = ${result}"
# way the 3: use a calculator
result=`echo "${number}+37" | bc -q`
echo "${number} + 37 = ${result}"
exit 0
| true |
461a79ef0783cfc6902026940cc808b69f2f8556 | Shell | TaWeiTu/dotfiles | /.config/yabai/scripts/open_firefox.sh | UTF-8 | 464 | 3.171875 | 3 | [] | no_license | #!/usr/bin/env bash
# Detects if "Firefox Nightly" is running
if ! pgrep -f "Firefox Nightly" > /dev/null; then
open -a "/Applications/Firefox Nightly.app"
else
# Create a new window
if ! osascript $HOME/.config/yabai/scripts/firefox.applescript > /dev/null; then
# Get pids for any app with "Firefox Nightly" and kill
for i in $(pgrep -f "iTerm"); do kill -15 "$i"; done
open -a "/Applications/Firefox Nightly.app"
fi
fi
| true |
66e8daccb32bb3ae49c7cfaf2c4f0ea2c46a7e71 | Shell | smittytone/scripts | /gitcheck.zsh | UTF-8 | 2,974 | 4.375 | 4 | [
"MIT"
] | permissive | #!/bin/zsh
# gitcheck.zsh
#
# Display $GIT directory repos with unmerged or uncommitted changes
#
# @author Tony Smith
# @copyright 2022, Tony Smith
# @version 1.3.2
# @license MIT
show_error_and_exit() {
echo "[ERROR] $1"
exit 1
}
local max=0
local repos=()
local states=()
local branches=()
local show_branches=0
if [[ -z "$GIT" ]]; then
show_error_and_exit 'Environment variable "$GIT" not set with your Git directory'
fi
if [[ ! -d "$GIT" ]]; then
show_error_and_exit 'Directory referenced by environment variable "$GIT" does not exist'
fi
# FROM 1.3.1
# Process the arguments
for arg in "$@"; do
# Temporarily convert argument to lowercase, zsh-style
check_arg=${arg:l}
if [[ "${check_arg}" = "--branches" || "${check_arg}" = "-b" ]]; then
show_branches=1
else
show_error_and_exit "Unknown command ${arg}"
fi
done
# FROM 1.2.1 -- Add progress marker
echo -n "Checking"
if cd "$GIT"; then
# Process the files
for repo in *; do
if [[ -d "${repo}" && -d "${repo}/.git" ]]; then
if cd "${repo}"; then
local state=""
if [[ "$show_branches" -eq 1 ]]; then
# FROM 1.3.1 -- determine repo current branches
repos+=("$repo")
if [[ ${#repo} -gt ${max} ]] max=${#repo}
local branch=$(git branch --show-current)
branches+=("${branch}")
else
# Determine repo states, but only those that are not up to date
local unmerged=$(git status --ignore-submodules)
unmerged=$(grep 'is ahead' < <((echo -e "$unmerged")))
if [[ -n "${unmerged}" ]]; then
state="unmerged"
fi
local uncommitted=$(git status --porcelain --ignore-submodules)
if [[ -n "${uncommitted}" ]]; then
state="uncommitted"
fi
if [[ -n "$state" ]]; then
states+=("${state}")
repos+=("${repo}")
if [[ ${#repo} -gt ${max} ]] max=${#repo}
fi
fi
cd ..
fi
# FROM 1.2.1 Add progress marker
echo -n "."
fi
done
fi
if [[ ${#repos} -eq 0 ]]; then
echo -e "\nAll local repos up to date"
else
# FROM 1.3.1 -- show repo current branches, or states
if [[ "$show_branches" -eq 1 ]]; then
echo -e "\nLocal repo current branches:"
for (( i = 1 ; i <= ${#repos[@]} ; i++ )); do
printf '%*s is on %s\n' ${max} ${repos[i]} ${branches[i]}
done
else
echo -e "\nLocal repos with changes:"
for (( i = 1 ; i <= ${#repos[@]} ; i++ )); do
printf '%*s has %s changes\n' ${max} ${repos[i]} ${states[i]}
done
fi
fi
exit 0 | true |
028d8ea69e820f492fc2181b4b8886fd31da546d | Shell | bernardng/codeSync | /fMRI2dMRIalignment/alignment_batch.sh | UTF-8 | 2,162 | 3.0625 | 3 | [] | no_license | # Batch align WM mask and parcel template to DWI
# Notes: Specify script_path as required to call the python scripts
# Might need to enter FSL environment by typing "fsl"; rem to type "exit" when done
script_path='/home/bernardyng/code/fMRI2dMRIalignment'
for line in `cat /media/GoFlex/research/data/imagen/subjectLists/subjectListDWI.txt`
do
echo $line
cd /media/GoFlex/research/data/imagen/$line/dwi/
#cp /media/GoFlex/research/data/imagen/$line/restfMRI/rest.nii .
#cp /media/GoFlex/research/data/imagen/group/fs_parcel500.nii .
#cp /media/GoFlex/research/data/imagen/$line/anat/wmMask.nii .
# Generate average volumes
#python $script_path/gen_ave_vol.py -i rest.nii
#python $script_path/gen_ave_vol.py -i dwi_ecc.nii
# Skull strip the average volumes
#fsl4.1-bet rest_ave.nii rest_ave_ss.nii -m -f 0.3
#gunzip -f rest_ave_ss.nii.gz
#gunzip -f rest_ave_ss_mask.nii.gz
#fsl4.1-bet dwi_ecc_ave.nii dwi_ecc_ave_ss.nii -m -f 0.3
#gunzip -f dwi_ecc_ave_ss.nii.gz
#gunzip -f dwi_ecc_ave_ss_mask.nii.gz
# Resample wmMast to EPI resolution
#python $script_path/resample.py -i wmMask.nii -r rest_ave_ss.nii
# Align EPI to DWI volume
#fsl4.1-flirt -in rest_ave_ss.nii -ref dwi_ecc_ave_ss.nii -out rest_ave_ss_aff.nii -omat mni3mmtodwi_aff.txt
#gunzip -f rest_ave_ss_aff.nii.gz
# Apply learned warp to WM mask and parcel template to DWI
fsl4.1-flirt -in /media/GoFlex/research/data/imagen/group/ica_roi_parcel150_refined.nii -ref alignment/dwi_ecc_ave_ss.nii -applyxfm -init alignment/mni3mmtodwi_aff.txt -out ica_roi_parcel150_refined_aff.nii -interp nearestneighbour
gunzip -f ica_roi_parcel150_refined_aff.nii.gz
#fsl4.1-flirt -in wmMask_rs.nii -ref dwi_ecc_ave_ss.nii -applyxfm -init mni3mmtodwi_aff.txt -out wmMask_rs_aff.nii
#gunzip -f wmMask_rs_aff.nii.gz
# Binarize the WM mask
#python $script_path/gen_bin_mask.py -i wmMask_rs_aff.nii -t 0.3
# Move results to alignment folder
#mkdir -p alignment
#mv res*.nii alignment
#mv dwi_ecc_*.nii alignment
#mv *Mas*.nii alignment
#mv alignment/*Mask_rs_aff.nii .
#mv alignment/*Mask_rs_aff_bin.nii .
#mv fs_parcel50*.nii alignment
#mv alignment/fs_parcel500_aff.nii .
#mv mni3mmtodwi_aff.txt alignment
done
| true |
8989b89a9275b8482e69a4bb7d5baec9d2aa7a75 | Shell | Arlion/shiny-pancake | /bash/Ubuntu_Challenges/challenge4.sh | UTF-8 | 1,010 | 4.125 | 4 | [] | no_license | #!/bin/bash
number=$((RANDOM%100+0))
count=0
function high_or_low {
if [[ "$input" == "$number" ]] ; then
echo "You win!"
echo "You did it in $count turns!"
exit 0
fi
if [[ $input -gt $number ]] ; then
echo "lower"
let "count += 1"
question
fi
if [[ $input -lt $number ]] ; then
echo "higher"
let "count += 1"
question
fi
}
function question {
read -p "Please guess a number between 1-100: " input
# exit or quit will result in a exit with status 0
if [[ "$input" == "exit" || "$input" == "quit" ]] ; then exit 0 ; fi
## test to see if input is a number
if ! [[ $input == ?(-)+([0-9]) ]] ; then
echo "Invalid number, please enter a valid number"
question
fi
## test input to make sure value is inbetween 0 and 99999
if [[ $input -gt 100 || $1 -lt 0 ]] ; then
echo "Invalid number, please enter a number above 0 and below 100"
question
fi
}
question
while true; do
high_or_low $input
done
| true |
8a67909c1611db1d6d6154d979d3c76176368160 | Shell | jwgl/bell-db | /ha/replscripts/create_slot.sh | UTF-8 | 5,835 | 4.03125 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/sh
# https://www.itenlight.com/blog/2016/05/21/PostgreSQL+HA+with+pgpool-II+-+Part+4
# (Re)creates replication slot.
# NOTE: The script should be executed as postgres user
echo "create_slot - Start"
# Defining default values
version="9.6"
trigger_file="/etc/postgresql/$version/main/im_the_master"
slot_name=""
recreate=false
debug=true
while test $# -gt 0; do
case "$1" in
-h|--help)
echo "Creates replication slot"
echo " "
echo "create_slot [options]"
echo " "
echo "options:"
echo "-h, --help show brief help"
echo "-t, --trigger_file=FILE specify trigger file path"
echo " Optional, default: $trigger_file"
echo "-n, --name=NAME slot name (mandatory)"
echo " Slot name can be also specified without using"
echo " flags (i.e. 'create_slot myslot')"
echo "-r, --recreate Forces re-creation if the slot already exists"
echo " Optional, default: N/A"
echo " Description: Without this flag the script won't do anything if"
echo " the slot with defined name already exists."
echo " With the flag set, if the slot with defined name"
echo " already exists it will be deleted and re-created."
echo " "
echo "Error Codes:"
echo " 1 - Wrong user. The script has to be executed as 'postgres' user."
echo " 2 - Argument error. Caused either by bad format of provided flags and"
echo " arguments or if a mandatory argument is missing."
echo " 3 - Inapropriate trigger / standby files. This script REQUIRES trigger"
echo " file to be present."
echo " 4 - Error executing a slot-related operation (query/create/drop)."
exit 0
;;
-t)
shift
if test $# -gt 0; then
trigger_file=$1
else
echo "ERROR: -t flag requires trigger file to be specified."
exit 2
fi
shift
;;
--trigger-file=*)
trigger_file=`echo $1 | sed -e 's/^[^=]*=//g'`
shift
;;
-n)
if [ "$slot_name" != "" ]; then
echo "ERROR: Invalid command. For help execute 'create_slot -h'"
exit 2
fi
shift
if test $# -gt 0; then
slot_name=$1
else
echo "ERROR: -n flag requires slot name to be specified."
exit 2
fi
shift
;;
--name=*)
if [ "$slot_name" != "" ]; then
echo "ERROR: Invalid command. For help execute 'create_slot -h'"
exit 2
fi
slot_name=`echo $1 | sed -e 's/^[^=]*=//g'`
shift
;;
-r|--recreate)
recreate=true
shift
;;
*)
if [ "$slot_name" != "" ]; then
echo "ERROR: Invalid command. For help execute 'create_slot -h'"
exit 2
fi
slot_name=$1
shift
;;
esac
done
# Ensuring that 'postgres' runs the script
if [ "$(id -u)" -ne "$(id -u postgres)" ]; then
echo "ERROR: The script must be executed as 'postgres' user."
exit 1
fi
if [ "$slot_name" = "" ]; then
echo "ERROR: Slot name is mandatory. For help execute 'create_slot -h'"
exit 2
fi
if $debug; then
echo "DEBUG: The script will be executed with the following arguments:"
echo "DEBUG: --trigger-file=${trigger_file}"
echo "DEBUG: --name=${slot_name}"
if $recreate; then
echo "DEBUG: --recreate"
fi
fi
echo "Checking if trigger file exists..."
if [ ! -e $trigger_file ]; then
echo "ERROR: Cannot create replication slot if the server does not contain trigger file: ${trigger_file}"
exit 3
fi
success=false
echo "INFO: Checking if slot '${slot_name}' exists..."
slotcount=$(psql -Atc "SELECT count (*) FROM pg_replication_slots WHERE slot_name='${slot_name}';") && success=true
if ! $success ; then
echo "ERROR: Cannot check for '${slot_name}' slot existence."
exit 4
fi
if [ "$slotcount" = "0" ]; then
echo "INFO: Slot not found. Creating..."
success=false
psql -c "SELECT pg_create_physical_replication_slot('${slot_name}');" && success=true
if ! $success ; then
echo "ERROR: Cannot create '${slot_name}' slot."
exit 4
fi
elif $recreate ; then
echo "INFO: Slot found. Removing..."
success=false
psql -c "SELECT pg_drop_replication_slot('${slot_name}');" && success=true
if ! $success ; then
echo "ERROR: Cannot drop existing '${slot_name}' slot."
exit 4
fi
echo "INFO: Re-creating the slot..."
success=false
psql -c "SELECT pg_create_physical_replication_slot('${slot_name}');" && success=true
if ! $success ; then
echo "ERROR: Cannot create '${slot_name}' slot."
exit 4
fi
fi
echo "create_slot - Done!"
exit 0
| true |
ee3db98b14bf7f03141e2ae7b3f9ed0be54d4cae | Shell | RomeuCarvalhoAntunes/2018.1-Reabilitacao-Motora | /Reabilitacao-Motora/Assets/Scripts/travis_build.sh | UTF-8 | 1,630 | 3.171875 | 3 | [
"MIT"
] | permissive | #! /bin/sh
project="Reabilitacao-Motora"
echo "========================================"
echo "Initializing Build Script for $project"
cd Reabilitacao-Motora
echo "========================================"
echo "Current folder contains:"
ls
echo "========================================"
echo "Attempting to activate Unity"
/Applications/Unity/Unity.app/Contents/MacOS/Unity -batchmode -serial $UNITY_KEY -username $UNITY_LOGIN -password $UNITY_PASSWORD -logFile /dev/stdout -quit
echo "========================================"
echo "Attempting to build $project for OSX"
/Applications/Unity/Unity.app/Contents/MacOS/Unity -batchmode -logFile /dev/stdout -projectPath $(pwd) -buildOSXUniversalPlayer "Build/osx/$project.app" -quit
echo "========================================"
echo "Attempting to build $project for Linux"
/Applications/Unity/Unity.app/Contents/MacOS/Unity -batchmode -logFile /dev/stdout -projectPath $(pwd) -buildLinuxUniversalPlayer "Build/linux/$project.exe" -quit
echo "========================================"
echo "Attempting to build $project for Windows"
/Applications/Unity/Unity.app/Contents/MacOS/Unity -batchmode -logFile /dev/stdout -projectPath $(pwd) -buildWindowsPlayer "Build/windows/$project.exe" -quit
echo "========================================"
echo "Opening in order to deactivate Unity"
/Applications/Unity/Unity.app/Contents/MacOS/Unity -batchmode -returnlicense -logFile /dev/stdout -quit
echo "========================================"
echo 'Attempting to zip builds'
zip -r Build/linux.zip Build/linux/
zip -r Build/mac.zip Build/osx/
zip -r Build/windows.zip Build/windows/ | true |
9cce6166a500b150b3c9340a9a1123b318927871 | Shell | mgioioso/wordpredictionproject | /wordPredictionApp/preprocess.sh | UTF-8 | 2,246 | 2.734375 | 3 | [] | no_license | # replace numbers with tokens
# replace words with first letter cap to all lower case
# replace characters that are repeated 3 or more times with one occurrence
# of that character
#----------
# remove smiley faces
# replace w/ with with
# replace / with 'and'
# replace two or more hyphens in a row OR ' - ' OR [?!.:;] with newline
# replace single quote by itself or at beginning or end of a word with newline
# remove punction: $ % @ # * < > , / \ "
# remove blank lines
iconv -c -f utf-8 -t ascii < ../data/en_US/en_US.twitter.txt\
| tr '\32' ' '\
| sed -r 's/([0-9]+([,.][0-9]+)*)/\n/g'\
| sed -r 's/([A-Z])/\L\1/g'\
| sed -r 's/(.)\1{2,}/\1/g'\
| sed -r 's/[:;]-*[\)\(PpD\/]*//g'\
| sed -r 's| w/| with |g'\
| sed -r 's|[&+/]| and |g'\
| sed -r 's|=| equals |g'\
| sed -r 's/[?!.:;()]/\n/g'\
| sed -r 's|[$%@#*<>,/\\"_]||g'\
| sed -r 's/(\[|\]|\{|\})//g'\
| sed -r "s/(\b' | '\b| ' )/\n/g"\
| sed -r 's/\-\-| *\- /\n/g'\
| sed -r 's/ +/ /g' | sed -r 's/^ +| +$//g'\
| sed -r '/^$|^.$/d' > ../data/sample/en_US.twitter.sample5.txt
echo 'Preprocessed twitter...'
iconv -c -f utf-8 -t ascii < ../data/en_US/en_US.blogs.txt\
| tr '\32' ' '\
| sed -r 's/([0-9]+([,.][0-9]+)*)/\n/g'\
| sed -r 's/([A-Z])/\L\1/g'\
| sed -r 's/(.)\1{2,}/\1/g'\
| sed -r 's/[:;]-*[\)\(PpD\/]*//g'\
| sed -r 's| w/| with |g'\
| sed -r 's|[&+/]| and |g'\
| sed -r 's|=| equals |g'\
| sed -r 's/[?!.:;()]/\n/g'\
| sed -r 's|[$%@#*<>,\\"_]||g'\
| sed -r 's/(\[|\]|\{|\})//g'\
| sed -r "s/(\b' | '\b| ' )/\n/g"\
| sed -r 's/\-\-| *\- /\n/g'\
| sed -r 's/ +/ /g' | sed -r 's/^ +| +$//g'\
| sed -r '/^$|^.$/d' > ../data/sample/en_US.blogs.sample5.txt
echo 'Preprocessed blogs...'
iconv -c -f utf-8 -t ascii < ../data/en_US/en_US.news.txt\
| tr '\32' ' '\
| sed -r 's/([0-9]+([,.][0-9]+)*)/\n/g'\
| sed -r 's/([A-Z])/\L\1/g'\
| sed -r 's/(.)\1{2,}/\1/g'\
| sed -r 's/[:;]-*[\)\(PpD\/]*//g'\
| sed -r 's| w/| with |g'\
| sed -r 's|[&+/]| and |g'\
| sed -r 's|=| equals |g'\
| sed -r 's/[?!.:;()]/\n/g'\
| sed -r 's|[$%@#*<>,\\"_]||g'\
| sed -r 's/(\[|\]|\{|\})//g'\
| sed -r "s/(\b' | '\b| ' )/\n/g"\
| sed -r 's/\-\-| *\- /\n/g'\
| sed -r 's/ +/ /g' | sed -r 's/^ +| +$//g'\
| sed -r '/^$|^.$/d' > ../data/sample/en_US.news.sample5.txt
echo 'Preprocessed news...'
| true |
19d9a0782218ae67185f0c6ea8818af17f620965 | Shell | gehuangyi20/Koinonia | /stunnel-oqs/stunnel-5.40/build-android.sh | UTF-8 | 701 | 2.90625 | 3 | [
"MIT",
"FSFAP",
"OpenSSL",
"GPL-2.0-or-later",
"GPL-1.0-or-later",
"LGPL-2.0-or-later",
"LicenseRef-scancode-generic-exception",
"LicenseRef-scancode-proprietary-license",
"GPL-2.0-only",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | #!/bin/sh
set -ev
VERSION=5.40
DST=stunnel-$VERSION-android
# to build OpenSSL:
# ./Configure threads no-shared no-dso --cross-compile-prefix=arm-linux-androideabi- --openssldir=/opt/androideabi/sysroot linux-armv4
# make install
test -f Makefile && make distclean
mkdir -p bin/android
cd bin/android
../../configure --with-sysroot --build=i686-pc-linux-gnu --host=arm-linux-androideabi --prefix=/data/local
make clean
make
cd ../..
mkdir $DST
cp bin/android/src/stunnel $DST
# arm-linux-androideabi-strip $DST/stunnel $DST/openssl
# cp /opt/androideabi/sysroot/bin/openssl $DST
# arm-linux-androideabi-strip $DST/openssl
zip -r $DST.zip $DST
rm -rf $DST
# sha256sum $DST.zip
# mv $DST.zip ../dist/
| true |
5a6699e66645654b00f6a0e41d99abeede732e97 | Shell | dwasmer/bin | /mkprinter.sh | UTF-8 | 3,764 | 3.65625 | 4 | [] | no_license | #!/bin/ksh
#########|#########|#########|#########|#########|#########|#########|#########|
#
# FILENAME: /usr/local/bin/mkprinter
# DATE: Jan. 13, 2000
# AUTHOR: David A. Wasmer
# PURPOSE: Create new printer using default values
#
# Syntax: /usr/local/bin/mkprinter -?
#
# Revised:
#
#
#########|#########|#########|#########|#########|#########|#########|#########|
#set -x
#set +x
##############################
# HELP FUNCTION
##############################
function FUNC_HELP
{
print "
$(tput rev) USAGE: $(tput sgr0)
mkprinter [ -a Address ] [ -q Queue ] [ -m Model ] [ -b BU ] [ -c Contact ]
-a Enter the IP address of printer
-q Enter the Queue name for the printer
-b Enter the Business Unit name who will use this printer
-c Enter a Contact phone number near the printer
"
exit 0
}
##############################
# PROCESS FILE FUNCTION
##############################
function PROCESSFILE
{
while read ADDRESS QUEUE MODEL ; do
print "\nAbout to process the printer $(tput rev) ${QUEUE} $(tput sgr0) \c"
print "at $(tput rev) ${ADDRESS} $(tput sgr0) using the $(tput rev) ${MODEL} $(tput sgr0) drivers."
if [[ "$(grep $QUEUE /etc/hosts)" = "$(grep $ADDRESS /etc/hosts)" && "$(grep $QUEUE /etc/hosts | wc -l)" -eq 1 ]]; then
print " IP and Queue entries match in the hosts file and only one entry exists. Continuing..."
else
print "ERROR: IP and Queue are NOT equal"
fi
if lpstat -p$QUEUE > /dev/null 2>&1; then
print "\nERROR: Queue $QUEUE already exists.\nExiting...\n"
exit
fi
print " Verified that Queue $QUEUE does not already exist. Continuing..."
case $MODEL in
lexOptraN ) MODEL=lexOptraN ;;
lex4049 ) MODEL=lex4049 ;;
hplj-2 ) MODEL=hplj-2 ;;
hplj-3 ) MODEL=hplj-3 ;;
hplj-3si ) MODEL=hplj-3si ;;
hplj-4 ) MODEL=hplj-4 ;;
hplj-4si ) MODEL=hplj-4si ;;
hplj-4v ) MODEL=hplj-4v ;;
hplj-5si ) MODEL=hplj-5si ;;
hplj-c ) MODEL=hplj-c ;;
* ) MODEL=BAD ;;
esac
if [[ "$MODEL" = BAD ]]; then
print "\n\n\nERROR: The print model, $MODEL, is not recognized. Please rexamine the model selected"
print "and either change it or add it to the case statement in the mkprinter script.\n\n"
exit
fi
print " Creating queue ..."
/usr/lib/lpd/pio/etc/piomkjetd mkpq_jetdirect -p $MODEL -D pcl -q $QUEUE -h $QUEUE -x '9100'
print " Changing virtual printer attributes..."
/usr/lib/lpd/pio/etc/piochpq -q $QUEUE -d hp@$QUEUE -l 180 -w 180 -d p -j 0 -J '!' -Z '!'
print " Sending test page to printer ..."
/usr/local/bin/qtest $QUEUE
done
}
##############################
# MAIN
##############################
while getopts "a:m:q:f:h" OPTION; do
case $OPTION in
a ) ADDRESS="$OPTARG" ;;
m ) MODEL="$OPTARG" ;;
q ) QUEUE="$OPTARG" ;;
f ) FILE="$OPTARG" ;;
f ) FUNC_HELP ; exit 0 ;;
? ) FUNC_HELP ; exit 0 ;;
* ) FUNC_HELP ; exit 0 ;;
esac
done
shift $(($OPTIND - 1))
if [[ ! -z "${ADDRESS}${MODEL}${QUEUE}" && ! -z "${FILE}" ]]; then
print "\nERROR: cannot accept command line input and read input from a file at same time.\n"
FUNC_HELP
fi
if [[ ! -z "${FILE}" ]]; then
cat $FILE | PROCESSFILE
else
MODEL=${MODEL:=hplj-5si}
ADDRESS=${ADDRESS:=$(host $QUEUE | awk '{print $3}')}
print "\nAddress = $ADDRESS\n\nQueue = $QUEUE\n\nModel = $MODEL\n"
sleep 5
print "$ADDRESS $QUEUE $MODEL" | PROCESSFILE
fi
###############
# END
##########
| true |
3cfd2460ddd773018454edfcbd4255bfa17889f4 | Shell | rap77/Scute | /bin/jedwards.sh | UTF-8 | 257 | 2.609375 | 3 | [
"CC-BY-3.0",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | #!/bin/sh
# export JAVA_HOME=/usr/local/java
# PATH=/usr/local/java/bin:${PATH}
# build the classpath
for i in `ls ../lib/**/*.jar`
do
CP=${CP}:${i}
done
CP=$CP:../classes
export CP
echo $CP
java -cp $CP org.hyperdata.scute.demos.JEdwards --home ~/
| true |
3d8149413edc535eb1853f74821589dbcec26f08 | Shell | achacond/gem-gpu-benchmark | /scripts/mappers/hpg-aligner_v2.0.0.sh | UTF-8 | 1,203 | 3.03125 | 3 | [] | no_license | #!/bin/bash
#SBATCH --job-name="HPG-install"
#SBATCH -w huberman
#SBATCH --time=1:00:00
#SBATCH --partition=p_hpca4se
#SBATCH --output=../../logs/HPG.installation_mapping_tools.log
#SBATCH --error=../../logs/HPG.installation_mapping_tools.log
source ../../scripts/node_profiles.sh
logfile="$1"HPG.installation_mapping_tools.err
# Install HPG-aligner V2.0.0 (Develop branch)
####################################
# Dependences:
# the GSL (GNU Scientific Library), http://www.gnu.org/software/gsl/
# the Check library, http://check.sourceforge.net/
# scons
#
# Dependences from samtools:
# libncurses5-dev libncursesw5-dev
# libcurl4-gnutls-dev
####################################
echo "Start installing HPG-aligner V2.0.1"
echo "Start installing HPG-aligner V2.0.1" > $logfile 2>&1
rm -Rf hpg-aligner-2.0.1
echo "Downloading hpg-aligner v2.0.1 ..."
git clone https://github.com/opencb/hpg-aligner.git >> $logfile 2>&1
mv hpg-aligner hpg-aligner-2.0.1
cd hpg-aligner-2.0.1/
git submodule update --init >> ../$logfile 2>&1
cd lib/hpg-libs
git checkout develop
cd ../..
git checkout develop
echo "Compiling ..."
scons -c >> ../$logfile 2>&1
scons >> ../$logfile 2>&1
cd ..
echo "Done"
| true |
0c81d058e48e8f394a0821c3dfc3abdca5698b03 | Shell | Alveo/alveo-transcriber-services | /docs/examples/store-export.sh | UTF-8 | 745 | 3.0625 | 3 | [
"BSD-3-Clause"
] | permissive | #!/bin/bash
# Set environment variables for:
# ATS_API_DOMAIN, e.g app.alveo.edu.au
# ATS_API_KEY, e.g a valid Alveo API key
# ATS_URL, e.g https://segmenter.apps.alveo.edu.au/alveo
#
# Attempts to authenticate and download (as a zip) all stored data associated
# to the user.
#
# You can narrow down a query by altering the URL:
# /datastore/export/<key_id>
# /datastore/export/<key_id>/<revision>
#
# Exporting by a user is also possible.
# /datastore/user/<user_id>/export/<key_id>
# /datastore/user/<user_id>/export/<key_id>/<revision>
#
# Example usage:
# sh store-export.sh
curl \
--header "X-Api-Domain: $ATS_API_DOMAIN" \
--header "X-Api-Key: $ATS_API_KEY" \
$ATS_URL/datastore/export/
# $ATS_URL/datastore/export/<user>
| true |
150ea95820d07270800c92e1f45c0f98feaf1d9c | Shell | aur-archive/libechonest-qt5-git | /PKGBUILD | UTF-8 | 793 | 2.6875 | 3 | [] | no_license | #Maintainer: Michael Wendland <michael@michiwend.com>
pkgname=libechonest-qt5-git
pkgver=2.3.2.6895a77
pkgrel=3
pkgdesc="C++ library for interfacing with Echo Nest (builds the qt5 version)"
arch=('i686' 'x86_64')
url="https://projects.kde.org/projects/playground/libs/libechonest"
license=('GPL')
makedepends=('git')
depends=('qt5-base')
provides=('libechonest-qt5')
conflicts=('libechonest-qt5')
source=("libechonest::git://github.com/lfranchi/libechonest.git")
md5sums=('SKIP')
prepare() {
rm -rf build
mkdir build
}
build() {
cd build
cmake ../libechonest -DCMAKE_BUILD_TYPE=Release \
-DECHONEST_BUILD_TESTS=OFF \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=lib \
-DBUILD_WITH_QT4=off
make
}
package() {
cd build
make DESTDIR="${pkgdir}" install
}
# vim: ts=2 sw=2 et:
| true |
09eea20c54566009121c9f0a5997692c625ea466 | Shell | edenhuang101/pde | /install_vim.sh | UTF-8 | 1,463 | 3.546875 | 4 | [] | no_license | #!/bin/bash
echo "Check python version ......."
#Check whether python is installed
if [ -f /usr/local/bin/python2.7 ];then
local_python=$(/usr/local/bin/python2.7 -c 'import sys; print(".".join(map(str, sys.version_info[:3])))')
echo "Current local python version $local_python"
else
echo "Do not yet install local python"
exit
fi
# Show current OS python version
python_version=$(/usr/bin/python -c 'import sys; print(".".join(map(str, sys.version_info[:3])))')
python_major=$(/usr/bin/python -c 'import sys; print(".".join(map(str, sys.version_info[0:1])))')
python_minor=$(/usr/bin/python -c 'import sys; print(".".join(map(str, sys.version_info[1:2])))')
if [[ -z "$python_version" ]]; then
echo "No Python!"
else
echo "Current OS python version $python_version"
fi
# prompt User to be continue or breake installation
cat << EOF
Start to install vim 7.4 ....
EOF
read -p "Press [Enter] key to continue... or Ctrl+C to cancel"
# install vim
if [ -d /usr/local/src/vim ]; then
rm -rf /usr/local/src/vim
fi
cd /usr/local/src && hg clone https://bitbucket.org/vim-mirror/vim vim && \
export LD_LIBRARY_PATH=/usr/local/lib && \
export vi_cv_path_python=/usr/local/bin/python2.7 && \
export vi_cv_path_python_pfx=/usr/local && \
cd vim/src && \
./configure LDFLAGS="-Wl,--rpath=/usr/local/lib" --enable-pythoninterp --with-features=huge --with-python-config-dir=/usr/local/lib/python2.7/config && \
make && make install
vim --version
| true |
fdf5458686176e2ca109752d00cbe00154c6e23d | Shell | outotec/iot-edge | /v1/tools/docs/gen_jsdocs.sh | UTF-8 | 619 | 2.890625 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE file in the project root for full license information.
build_root=$(cd "$(dirname "$0")/../.." && pwd)
cd $build_root
# -----------------------------------------------------------------------------
# -- Generate Node.js binding docs
# -----------------------------------------------------------------------------
doc_target_dir=$build_root/doc/api_reference/node
if [ -d $doc_target_dir ]
then
rm -rf $doc_target_dir
fi
mkdir -p $doc_target_dir
jsdoc -c ../build/docs/jsdoc-device.conf.json -d $doc_target_dir
| true |
822026224a87aaf2a79e0db564851d60c664beda | Shell | lloredia/Projects-X | /devops/find.sh | UTF-8 | 204 | 3.109375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
echo "input: "
read input
echo "searching file with this pattern '${input}' under present directory"
array=(`find . -name '*'${input}'*'`)
for i in "${array[@]}"
do :
echo $i
done
~
~
~
| true |
f37c92a4710d8204469b915bf42e2e17a3ebe203 | Shell | breeze1126/shell | /ipcheck_test.sh | UTF-8 | 530 | 3.515625 | 4 | [] | no_license | #!/bin/bash
#script name: ipcheck.sh
#author:
#version: v1
#date: 2023-03-12
##定义
>UP_IP_LIST
>DOWN_IP_LIST
c_class='10.64.8'
echo "IP lsit is start!! "
for ((i=0;i<255;i++))
do
IP=$c_class'.'$i
echo "IP is $IP"
sleep 5s
ping -c 3 -i 0.2 -w 3 $IP &>/dev/null #-c 3 ping3次主机 -i 0.2ping主机间隔 -w 3ping主机超时间隔
if [ $? -eq 0 ];then
echo "host $IP is up"
echo -e "$IP" >>UP_IP_LIST
else
echo "host $IP is down"
echo -e "$IP" >>DOWN_IP_LIST
fi
done
echo "IP lsit is end!! " | true |
d0aa7a55b7b94223232fe2f4bf981e61c0df8045 | Shell | xwing3/scripts | /kube.sh | UTF-8 | 1,543 | 3.03125 | 3 | [] | no_license | function kncos {
ENV=$( cat ~/.kube/ncos | fzf )
gcloud container clusters get-credentials $ENV --zone europe-west1-b --project jlr-dl-ncos-qa
}
function kexec {
if [ -z $1 ]
then
NS=$(kubectl get ns | fzf | awk "{print \$1}")
else
NS=$1
fi
POD=$(kubectl get pods -n $NS | fzf | awk "{print \$1}")
CONTAINERS=$(kubectl get pods $POD -n $NS -o jsonpath='{.spec.containers[*].name}')
COUNT_CONTAINERS=$(echo $CONTAINERS | wc -w | tr -d ' ')
if [ "$COUNT_CONTAINERS" -gt "1" ]
then
kubectl exec -it $POD -c $(echo $CONTAINERS | tr " " "\n" | fzf) -n $NS -- sh
else
kubectl exec -it $POD -n $NS -- sh
fi
}
function klogs {
if [ -z $1 ]
then
NS=$(kubectl get ns | fzf | awk "{print \$1}")
else
NS=$1
fi
POD=$(kubectl get pods -n $NS | fzf | awk "{print \$1}")
CONTAINERS=$(kubectl get pod $POD -n $NS -o jsonpath='{.spec.containers[*].name}')
COUNT_CONTAINERS=$(echo $CONTAINERS | wc -w | tr -d ' ')
if [ "$COUNT_CONTAINERS" -gt "1" ]
then
kubectl logs $POD -c $(echo $CONTAINERS | tr " " "\n" | fzf) -n $NS -f
else
kubectl logs $POD -n $NS -f
fi
}
function kdel {
NS=$(kubectl get ns | fzf | awk "{print \$1}")
POD=$(kubectl get pods -n $NS | fzf | awk "{print \$1}")
kubectl delete pod $POD -n $NS
}
function kedit {
if [ -z $1 ]
then
kubectl edit $(kubectl get deployments -o name | fzf)
else
kubectl edit $(kubectl get \$1 -o name | fzf)
fi
}
function wkp {
NS=$(kubectl get ns | fzf | awk "{print \$1}")
watch "kubectl get pods -n $NS"
} | true |
82d99673c7b728832f236b38682d34f9c4b6fb6e | Shell | TheRockXu/deepdive | /test/postgresql/load-db-driver.bats | UTF-8 | 345 | 3.125 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bats
# Tests for load-db-driver.sh
load test_environ
@test "db.url allows use of environment variables" {
dbname=$(
cd "$BATS_TEST_DIRNAME"/deepdive_load/test.app
unset DEEPDIVE_DB_URL
USER=foobar
. load-db-driver.sh
echo "$DBNAME"
)
[[ "$dbname" = "deepdive_test_foobar" ]]
}
| true |
46203ff4bc49db1ad116dffb82d21d94c6a2719f | Shell | davidosomething/git-my | /git-my | UTF-8 | 6,214 | 4.09375 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
# git-my v1.1.2
#
# Lists a user's remote branches and shows if it was merged
# and/or available locally.
#
# Copyright (c) 2017 David O'Trakoun <me@davidosomething.com>
#
# Requires:
# bash 4+ for readarray
#
# Usage:
# git my
#
# is_repository
#
# Exits with error if not a git repository
#
is_repository() {
git rev-parse --git-dir >/dev/null 2>&1
}
# get_local_branches
#
# Proper way to get a porcelain list of local branches for shell script use
#
get_local_branches() {
local fmt
local cmd_get_local_branches
fmt="
r=%(refname)
refname_without_prefix=\${r#refs/heads/}
printf \"%s\\t%s\\n\" \"\$refname_without_prefix\"
"
cmd_get_local_branches=$(
git for-each-ref --shell --format="$fmt" refs/heads
)
eval "$cmd_get_local_branches"
}
# get_everyones_remotes
#
# Get porcelain list of all remote branches
#
# @TODO support remote_name (currently origin)
#
get_everyones_remotes() {
local fmt
local cmd_everyones_remotes
local remote_name
remote_name=${1:-"origin"}
# user.name<TAB>branch.name
# the TAB is used as a delimiter to awk with
fmt="
a=%(authorname)
r=%(refname)
refname_without_prefix=\${r#refs/remotes/\${remote_name}/}
printf \"%s\\t%s\\n\" \"\$a\" \"\$refname_without_prefix\"
"
cmd_everyones_remotes=$(
git for-each-ref --shell --format="$fmt" "refs/remotes/$remote_name"
)
eval "$cmd_everyones_remotes"
}
# get_merged_remote_branches
#
# @param string optional remote to list merged branches of. Defaults to
# "origin/master"
# @output names of remote branches that are merged into given branch
get_merged_remote_branches() {
local remote
local remote_name
#local remote_refname
local merged_remote_branches
local stripped_branchnames
remote=${1:-"origin/master"}
# trim from end of string until / for the remote name
remote_name=${remote%/*}
# trim from beginning of string until / for the remote refname
#remote_refname=${remote#*/}
merged_remote_branches=$( \
git branch --no-color \
--remotes \
--merged "$remote" \
)
# remove "origin/"
stripped_branchnames=$( \
echo "$merged_remote_branches" | sed "s/^\s*$remote_name\///g" \
)
echo "$stripped_branchnames"
}
# filter_mine
#
# @param git_user
# @param branchnames
# @output branchnames owned by current git user
filter_mine() {
local git_user
local branchnames
git_user=$1
branchnames=$2
# use eval to not parse the fmt string
local my_remotes
my_remotes=$(echo "$branchnames" | grep -i "^$git_user")
# output only the branchname
echo "$my_remotes" | grep -v "HEAD" | awk -F'\t' '{ print $2 }'
}
# merge_lists
#
# Convert two lists to bash arrays, merge them, sort them
#
# @param list1
# @param list2
merge_lists() {
local l1
local l2
l1=$(echo "$1" | awk '{$1=$1};1')
l2=$(echo "$2" | awk '{$1=$1};1')
readarray -t a1 <<< "$l1"
readarray -t a2 <<< "$l2"
local merged
merged=( "${a1[@]}" "${a2[@]}" )
local intersect
readarray -t intersect < <(printf '%s\n' "${merged[@]}" | sort -u)
echo "${intersect[@]}"
}
# decorate_merged
#
# @param string my_branches list of remote branch names owned by me
# @param string local_branches list of local branch names
# @param string merged_remote_branches list of all remote branch names merged
# into another branch
# @output table of branch names and status
decorate_merged() {
local my_branches
readarray -t my_branches <<< "$(echo "$1" | tr ' ' '\n' )"
local local_branches=$2
local merged_remote_branches=$3
local normal
local magenta
normal=$(tput sgr0)
magenta=$(tput setaf 5)
local clreol
clreol=$'\x1B[K'
# heading
echo 'local | tracked | merged'
echo '----- + ------- + ------'
# body
local zebra=0
local last=$((${#my_branches[@]} - 1))
for i in "${!my_branches[@]}"; do
local branchname="${my_branches[$i]}"
local decorated=''
local is_local=' '
local is_tracked=' '
local is_merged=' '
echo "$local_branches" | grep -q "$branchname" && \
is_local='✓'
if git rev-parse --symbolic-full-name "$branchname@{u}" >/dev/null 2>&1; then
is_tracked='✓'
fi
echo "$merged_remote_branches" | grep -q "$branchname" && \
is_merged='✓'
if [ "$branchname" == "$(git symbolic-ref --short HEAD)" ]; then
branchname="${magenta}${branchname}"
else
branchname="${branchname}"
fi
decorated=$(printf " %s | %s | %s %s" \
"$is_local" "$is_tracked" "$is_merged" "$branchname")
if [ $zebra = 0 ]; then
echo -en "${normal}"
printf "\e[48;5;0m%s %s\n" "$decorated" "$clreol"
zebra=1
else
echo -en "${normal}"
printf "\e[48;5;236m%s %s" "$decorated" "$clreol"
if [[ "$i" != "$last" ]]; then printf "\n"; fi
zebra=0
fi
done
printf "\e[48;5;0m\n"
}
main() {
local decorated
local everyones_remotes
local git_remote
local git_user
local local_branches
local merged_remote_branches
local my_branches
local my_remotes
local remote=${1:-"origin/master"}
# trim from end of string until / for the remote name
local remote_name=${remote%/*}
# trim from beginning of string until / for the remote refname
local remote_ref=${remote#*/}
git_user=$(git config --get user.name)
if [ -z "$git_user" ]; then
echo "ERROR: user.name is not set in git config"
exit 1
fi
git_remote=$(git config --get "remote.${remote_name}.url")
if [ -z "$git_remote" ]; then
echo "ERROR: remote.${remote_name}.url is not set"
exit 1
fi
everyones_remotes=$(get_everyones_remotes "$remote_name")
my_remotes=$(filter_mine "$git_user" "$everyones_remotes")
merged_remote_branches=$(get_merged_remote_branches "$remote_name/$remote_ref")
local_branches=$(get_local_branches)
my_branches=$(merge_lists "$my_remotes" "$local_branches")
echo
echo "branches owned by user: $git_user"
echo "compare local/merge to: $remote"
echo "in remote repository: $git_remote"
echo
decorate_merged "$my_branches" "$local_branches" "$merged_remote_branches"
echo
}
if ! is_repository; then
echo "This is not a git repository."
exit 1
fi
main "$@"
| true |
ee9d68ab8e64b8ea325e6b45ab7a3368f90099f0 | Shell | stjordanis/CLAP | /scripts/change_password.sh | UTF-8 | 1,801 | 3.984375 | 4 | [] | no_license | #!/bin/bash
set -o nounset
set -o errexit
set -o pipefail
BASE_DIR="${HOME}/.clap"
PASS_DIR="${BASE_DIR}/data"
NEW_PASS_DIR="${BASE_DIR}/data_new"
CONF_DIR="${BASE_DIR}/conf"
CONF_FILE="${CONF_DIR}/clap.conf"
BACKUP_DIR="${BASE_DIR}/backups/$(date +"%Y-%m-%d_%T")/.clap"
function ask_password(){
IFS="" read -p "${1}" -s -r passwd ; echo
printf -v passwd "%q" "$passwd" #scape special characters
eval $2="'$passwd'"
unset passwd
}
function ask_master_password(){
salt=$(head -n 1 "$CONF_FILE")
h=$(tail -1 "$CONF_FILE")
ask_password "Insert master password (hidden): " _master
_result=$(echo "$_master${salt}" | sha256sum)
if [ "${_result: 0: -3}" = "$h" ]; then
eval $1="$_master"
else
echo "Wrong password, bye!"
exit 1
fi
unset _master
unset _result
}
function password_input(){
ask_password "Insert new password (hidden): " _pass
eval $1="$_pass"
unset _pass
}
ask_master_password old_master
password_input new_master
printf -v escaped_new_master "%q" "$new_master" #scape special characters
echo "Backing up, in case of emergency restore ${BACKUP_DIR} to your home"
mkdir -p $BACKUP_DIR
cp -r $BASE_DIR/data $BACKUP_DIR/
cp -r $BASE_DIR/conf $BACKUP_DIR/
new_salt=$(head /dev/urandom | tr -dc 'a-zA-Z0-9' | head -c 30)
checksum=$(echo "${escaped_new_master}${new_salt}" | sha256sum)
echo "Updating configuration file ${CONF_FILE} with new master password and salt"
printf "${new_salt}\n${checksum: 0: -3}" > "${CONF_FILE}"
echo "Updating $(ls -1q ${PASS_DIR}/* | wc -l) encrypted files..."
for entry in "$PASS_DIR"/*
do
pass=$(openssl aes-256-cbc -d -pbkdf2 -k "$old_master" -in "$entry")
echo -n "$pass" | openssl aes-256-cbc -pbkdf2 -k "$new_master" -out "$entry"
done
echo "All OK"
| true |
b3ff5218df2675e4cb6e4b1ec1000ccab8fc18f2 | Shell | ankitanallana/Bash-Scripts | /add_dirs.sh | UTF-8 | 1,403 | 3.6875 | 4 | [] | no_license | #!/bin/bash
#Outer loop for 4 pages of results (we have 120 repos)
for count in {1..4}
do
echo PAGE $count
for i in {0..29}
do
#retrieve all repos by page. By default, 30 results are returned PER PAGE
clone_url=$(curl -H 'Authorization: token <token>' https://hostname/api/v3/orgs/{org-name}/repos?page=$count | jq ".[$i]" | jq '.clone_url')
repo_name=$(curl -H 'Authorization: token <token>' https://hostname/api/v3/orgs/{org-name}/repos?page=$count | jq ".[$i]" | jq '.name')
#remove quotes from both strings
repo_name=${repo_name#\"}
repo_name=${repo_name%\"}
echo REPO $repo_name
clone_url=${clone_url#\"}
clone_url=${clone_url%\"}
echo CLONE URL $clone_url
#pick a location of your choice
cd /
cd Users/{YourName}/Desktop
mkdir MSD_Fall2017
cd MSD_Fall2017/
#clone the repository to your local
git clone $clone_url
echo CLONED $repo_name
#create directories inside repos
cd $repo_name
mkdir HW1
echo "Place Homework 1 in this directory" > hw1/readme.txt
mkdir HW2
echo "Place Homework 2 in this directory" > hw2/readme.txt
mkdir HW3
echo "Place Homework 3 in this directory" > hw3/readme.txt
mkdir HW4
echo "Place Homework 4 in this directory" > hw4/readme.txt
mkdir HW5
echo "Place Homework 5 in this directory" > hw5/readme.txt
git add -A
git commit -m "Created directories for Homeworks"
git push
done
done
#end
| true |
b2d4950884d592d6df6e958c21ca3aebfc726bb2 | Shell | dancing4you/dotfiles | /bin/easyconfig | UTF-8 | 1,045 | 3.3125 | 3 | [] | no_license | #!/bin/sh
action=$1
time()
{
echo "config $0..."
echo "old config is:"
file /etc/localtime
date
rm /etc/localtime
ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
echo
echo "new config is:"
file /etc/localtime
#sudo ntpdate -s cn.pool.ntp.org
ntpdate -s time.nist.gov
date
}
locale()
{
ch=$(locale -a | grep "en_US.utf8")
if [ -z "$ch" ]; then
sudo localedef -i en_US -f UTF-8 -v -c --no-archive en_US.UTF-8
fi
export LANG=en_US.utf8 LANGUAGE=en_US.utf8 LC_ALL=en_US.utf8
sudo update-locale LANG=en_US.utf8 LANGUAGE=en_US.utf8 LC_ALL=en_US.utf8
}
kcp()
{
sudo apt-get install kcptun
}
ss()
{
sudo apt-get install python-pip -y
sudo pip install shadowsocks
}
ipaddr()
{
:
}
wifi()
{
:
}
proxy()
{
ss
kcptun
}
rssh()
{
:
}
app_vim()
{
:
}
system()
{
time
#locale
}
network()
{
ipaddr
wifi
proxy
rssh
}
dev()
{
:
}
case $action in
system)
;;
network)
;;
dev)
;;
*)
echo "Action not support!"
exit 1
;;
esac
eval $1
| true |
b81b5006379efb289f05c4a304969ab3402375c1 | Shell | JonasKorte/Sky-Engine | /build.sh | UTF-8 | 1,005 | 3.6875 | 4 | [
"MIT"
] | permissive | #!/bin/sh
PrintHelp()
{
echo Enter 'build.sh action' where action is one of the following:
echo
echo clean Remove all binaries and generated files
echo codelite Generate CodeLite project files
echo gmake2 Generate GNU makefiles for POSIX, MinGW, and Cygwin
echo vs2015 Generate Visual Studio 2015 project files
echo vs2017 Generate Visual Studio 2017 project files
echo vs2019 Generate Visual Studio 2019 project files
echo xcode4 Generate Apple Xcode 4 project files
}
if [ "$1" = "" ] ;then
PrintHelp
else
if [ "$2" = "mac" ] ;then
echo using mac
./vendor/premake5mac/premake5 $1
exit
fi
if [ "$2" = "linux" ] ;then
echo using linux
./vendor/premake5lin/premake5 $1
exit
fi
if [ "$(uname)" = "Darwin" ]; then
echo using mac
./vendor/premake5mac/premake5 $1
elif [ "$(expr substr "$(uname -s)" 1 5)" = "Linux" ]; then
echo using linux
./vendor/premake5lin/premake5 $1
fi
fi | true |
975c4e83be954458e581ff5e5e810d20eb00a7bc | Shell | andir/saltstack-config | /state/networking/rc.local.gateway | UTF-8 | 1,231 | 2.984375 | 3 | [
"WTFPL"
] | permissive | #!/bin/sh
# Welcome to our main routing rule configuration.
# all traffic will lookup table private, where routes for IC-VPN, dn42
# and our own network are located. at the end of this table any rfc1918
# traffic hits an unreachable route, to prevent this type of traffic to
# leave freifunk/icvpn/dn42 networks
#
/sbin/ip -4 rule add prio 100 table private
/sbin/ip -6 rule add prio 100 table private
# for traffic from our own network, which is not destined to an rfc1918
# destination, we lookup table internet
#
/sbin/ip -4 rule add prio 110 from 10.185.0.0/16 table internet
#/sbin/ip -6 rule add prio 110 from 10.185.0.0/16 table internet
# finally, everything except local traffic will hit the sink table with
# an unreachable default route, instead of looking up main table
#
/sbin/ip -4 rule add prio 190 iif fffd.bat table sink
/sbin/ip -6 rule add prio 190 iif fffd.bat table sink
/sbin/ip -4 rule add prio 190 iif fffd.vpn table sink
/sbin/ip -6 rule add prio 190 iif fffd.vpn table sink
/sbin/ip -4 rule add prio 190 iif icvpn table sink
/sbin/ip -6 rule add prio 190 iif icvpn table sink
/sbin/ip -4 rule add prio 190 iif fffd.internet table sink
/sbin/ip -6 rule add prio 190 iif fffd.internet table sink
| true |
2a3637584194551084b7f400fcb384b316a86bb8 | Shell | delkyd/alfheim_linux-PKGBUILDS | /curlbomb/PKGBUILD | UTF-8 | 950 | 2.71875 | 3 | [] | no_license | pkgbase=('curlbomb')
pkgname=('curlbomb')
_module='curlbomb'
pkgver='1.4.1'
pkgrel=0
pkgdesc="A personal HTTP server for serving one-time-use shell scripts"
url="https://github.com/EnigmaCurry/curlbomb"
depends=('python'
'python-tornado'
'python-requests'
'python-psutil')
optdepends=('openssh: SSH tunnel support (--ssh)'
'openssl: TLS security (--ssl)'
'gnupg: Encrypted SSL certificate and resource support'
'python-notify2: Desktop notification support')
makedepends=('python-setuptools')
license=('MIT')
arch=('any')
source=("https://pypi.python.org/packages/2a/1e/c6db152cd596aa31682c1e7126891209182503eaac902a63f46c36316c21/curlbomb-1.4.1.tar.gz")
md5sums=('b39197c2b9ea1362c387bfe6fc7e7e2c')
package() {
depends+=()
cd "${srcdir}/${_module}-${pkgver}"
python setup.py install --root="${pkgdir}"
mkdir -p ${pkgdir}/usr/share/man/man1/
install curlbomb.1 ${pkgdir}/usr/share/man/man1/
}
| true |
0bfe0a4fc93e1d61449d641195d5b7a5fb3bec72 | Shell | haiyanghee/haiyangConfig | /scripts/renameFileExtension.sh | UTF-8 | 166 | 3 | 3 | [] | no_license | #!/bin/sh
#got from https://unix.stackexchange.com/questions/19654/how-do-i-change-the-extension-of-multiple-files
for f in *$1; do
mv -- "$f" "${f%$1}$2"
done
| true |
4862fb3ee1ce7717967f669e5e9102458222be12 | Shell | georgeee/itmo-8sem-networks | /task3/launch.sh | UTF-8 | 1,288 | 3.9375 | 4 | [] | no_license | #!/bin/bash
### Launch.sh
### Launches virtual machine cluster on several qemu instances
### This script is to be adapted for concrete application
### Basic idea: each machine in cluster has type and id (subnetwork id)
APP_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source $APP_DIR/../launch-base.sh
### Arguments
## Here you can customize arguments for your setup
## Note that basic options like -m and -b would be parsed in launch-base.sh, so you don't have to worry about them
ei=true #enable_internet
no_quagga=false
while test $# -gt 0
do
case "$1" in
-ei) ei=true
;;
-nq) no_quagga=true
;;
*) break
;;
esac
shift
done
### VM config generator
## In this function you need to generate files for VM's environment
## On VM start init.sh will be launched (if exists)
# create_config <type> <id> <dir>
function create_config() {
local type=$1
local id=$2
local dir=$3
local i=$4
cat "$APP_DIR/init-stub.sh" \
| sed "s/%enable_inet%/$ei/g" \
| sed "s/%id%/$id/g" \
| sed "s/%machine_id%/$i/g" \
| sed "s/%type%/$type/g" \
| sed "s/%no_quagga%/$no_quagga/g" \
> "$dir/init.sh"
cp "ifaces.sh" "$dir"
chmod +x "$dir/init.sh"
chmod +x "$dir/ifaces.sh"
}
source $APP_DIR/launch-impl.sh
| true |
69588ac7cff529d68b367716dfcb08a1aafa9367 | Shell | DHaoYu/ali_cloud | /shell/if_test/var.sh | UTF-8 | 280 | 2.875 | 3 | [] | no_license | #!/bin/bash
#define path var
#author by dhy
a=123
echo "UID is $UID"
echo "PWD is $PWD"
echo "This is my first shell var a = $a"
echo "0 is $0" #0 1 2 为命令行参数
echo "1 is $1, 2 is $2"
echo $? #退出码
echo "The \$? is $?"
echo "The \$* is $*"
echo "The \$# is $#"
| true |
f26f3c29fc12e316295cd8d45d99a1a8bc9a8726 | Shell | YunLiCode/packages-build | /make_SystemUI | UTF-8 | 650 | 2.8125 | 3 | [] | no_license | #!/bin/bash
#此脚本用来 DIY ROM 用
#制作者:陈云
#写于2014年3月 窝窝
#写于2014年4月 深圳语信公司
#写于2014年6月 深圳语信公司
#写于2014年7月 深圳云雀公司
PATH=/bin:/sbin:/usr/bin:usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin:~/yun
export PATH
app=app
make_SystemUI(){
echo -e "\033[33m------SysytemUI------\033[0m"
echo $device
echo "cd packages/SystemUI/$CPU "
cd packages/SystemUI/$CPU
b SystemUI
echo -e "\033[32minstall out/SystemUI.apk---> ../../../out/$device/system/$app/SystemUI.apk\033[0m"
mv out/SystemUI.apk ../../../out/$device/system/$app/SystemUI.apk
rmout
cd ../../../
}
| true |
de14dce5634faf45ba1cae6e9340a98d2ac12ed3 | Shell | martinradev/tum-microprocessors | /ex4/generate_results.sh | UTF-8 | 366 | 3.34375 | 3 | [] | no_license | #!/bin/bash
MAX_THREADS=$1
exeprog()
{
progname=$1
startsz=10000
endsz=40000000
stepsz=1000000
i=1
while [ "$i" -le "$MAX_THREADS" ]; do
f="${progname}_gcc_${startsz}_${endsz}_${i}.txt"
cmd="./$progname -l $startsz $endsz $stepsz -t $i > $f"
echo "Running $cmd"
eval $cmd
i=$((i+1))
done
}
for u in 1 2 3 4; do
exeprog "toupper_variant$u"
done
| true |
12c9d488ef0193848a05d1c7e140b5fa6ee59b7f | Shell | isindir/helmfileenv | /libexec/helmfileenv-version-file-write | UTF-8 | 516 | 4.125 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
# Usage: helmfileenv version-file-write <file> <version>
set -e
[ -n "$HELMFILEENV_DEBUG" ] && set -x
HELMFILEENV_VERSION_FILE="$1"
HELMFILEENV_VERSION="$2"
if [ -z "$HELMFILEENV_VERSION" ] || [ -z "$HELMFILEENV_VERSION_FILE" ]; then
helmfileenv-help --usage version-file-write >&2
exit 1
fi
# Make sure the specified version is installed.
helmfileenv-bin-path "$HELMFILEENV_VERSION" > /dev/null
# Write the version out to disk.
echo "$HELMFILEENV_VERSION" > "$HELMFILEENV_VERSION_FILE"
| true |
3228cbe385529c0d054436dea661ed4f993129b0 | Shell | jvhaarst/dellmont-credit-checker | /dellmont-credit-checker.sh | UTF-8 | 30,172 | 3.625 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
VERSION="4.5.3 [09 Sep 2019]"
if [ -z "$TEMP" ]; then
for TEMP in /tmp /var/tmp /var/temp /temp $PWD; do
[ -d "$TEMP" ] && break
done
fi
# Define functions for later use
send_message() {
# single parameter is the message text
MESSAGE="$1\n\nThis message was generated by $THIS v$VERSION\nhttps://www.timedicer.co.uk/programs/help/$THIS.php"
MAILNOTSENT=1
if [ -n "$EMAIL" ]; then
echo -e "To:$EMAIL\nSubject:$MESSAGE" | sendmail $EMAIL; MAILNOTSENT=$?
fi
if [ -z "$QUIET" ]; then
echo -en "\n\nThis message has "
[ "$MAILNOTSENT" -gt 0 ] && echo -n "*not* "
echo -e "been emailed:\n\n$MESSAGE"
fi
}
check_credit_level() {
#parameters: website username password warning_credit_level_in_cents/pence
#example: www.voipdiscount.com myaccount mypassword 200
unset CREDITCENTS
# Show website
if [ -z "$QUIET" ]; then
[ -n "$VERBOSE" ] && echo -e "\n$1" || echo -n "$1 "
fi
# Set up cookiejar
COOKIEJAR="$TEMP/$THIS-$(id -u)-$1-$2-cookiejar.txt"
[[ -n $DEBUG ]] && echo "COOKIEJAR: '$COOKIEJAR'"
if [ -n "$NEWCOOKIEJAR" ]; then
rm -f "$COOKIEJAR"; touch "$COOKIEJAR"
[ -z "$QUIET" ] && echo " deleted any existing cookie jar"
elif [ ! -f "$COOKIEJAR" ]; then
touch "$COOKIEJAR"
[ -n "$VERBOSE" ] && echo " could not find any existing cookie jar"
# Check whether cookie is still valid
else
FIRSTEXPIRE=$(grep "#Http" "$COOKIEJAR"|grep -v "deleted"|awk '{if ($5!=0) print $5}'|sort -u|head -n 1)
if [ -n "$FIRSTEXPIRE" ]; then
if [ $(date +%s) -gt $FIRSTEXPIRE ]; then
# cookies have expired
[ -n "$VERBOSE" ] && echo -n " at least one login cookie has expired"
if [ -n "$PAUSEONCOOKIEEXPIRY" ]; then
[ -n "$VERBOSE" ] && echo -n " - waiting 2 minutes [12 dots]:"
for (( i=1; i<=12; i++)); do sleep 10s; [ -n "$VERBOSE" ] && echo -n "."; done
[ -n "$VERBOSE" ] && echo -n "done"
fi
else
[ -n "$VERBOSE" ] && echo -n " all login cookies are still valid"
fi
[ -n "$VERBOSE" ] && echo
else
[ -n "$VERBOSE" ] && echo "No successful login cookies found in $COOKIEJAR"
fi
fi
if [ -z "$QUIET" ]; then
[ -n "$VERBOSE" ] && echo -n " "
echo -en "$2"
if [ -n "$4" ]; then
echo -en " for credit >$4"
if [ ${#4} -lt 3 -a "$4" != "0" ]; then
echo -e "\nError: $1 / $2 - can't check for $4 (<100), please supply higher value">&2
return 1
fi
fi
echo -n ": "
fi
# Curl settings
# -L --location option follows redirects, -i --include adds header information to the output file (makes debug easier)
CURLOPTIONS=( "--user-agent" "\"$USERAGENT\"" "--max-time" "30" "--insecure" "--show-error" "--location" )
[[ -z "$DEBUG" ]] && CURLOPTIONS+=( "--silent" ) || echo -e "\nCURLOPTIONS : ${CURLOPTIONS[@]}"
# Get remote login page with curl
PAGE1="https://$1/recent_calls${DELLMONTH}"
for ((RETRIEVELOOP=1; RETRIEVELOOP<=3; RETRIEVELOOP++)); do
[ $RETRIEVELOOP -gt 1 ] && echo -n " try $RETRIEVELOOP/3: "
unset EXPIRED
curl -b "$COOKIEJAR" -c "$COOKIEJAR" "${CURLOPTIONS[@]}" --fail --include -o "$TEMP/$THIS-$(id -u)-$1-1.htm" "$PAGE1"
CURLEXIT=$?; [ -n "$DEBUG" ] && echo "Curl exit status : $CURLEXIT"; [ $CURLEXIT -gt 0 ] && { echo "Curl exit code $CURLEXIT, skipping...">&2; return 2; }
[ -n "$DEBUG" ] && echo -e "Visited : $PAGE1\nSaved as : $TEMP/$THIS-$(id -u)-$1-1.htm\nCookies saved as : $COOKIEJAR"
if [ -n "`grep "$2" "$TEMP/$THIS-$(id -u)-$1-1.htm"`" ]; then
[ -n "$DEBUG" ] && echo "We are already logged in, retrieving info from original page"
USEFILE=1; break
fi
# Locate the correct version of the hidden tag (inside Ajax code, if present)
unset LINESTART
HIDDENTAG=$(sed -n '/show_webclient&update_id=&/{s/.*=//;s/".*/\" \//p}' "$TEMP/$THIS-$(id -u)-$1-1.htm")
if [ -n "$HIDDENTAG" ]; then
# this works on some portals with Firefox useragent, not with IE or Safari
# find the form input line which contains the hiddentag
LINEOFTAG=$(grep -n "$HIDDENTAG" "$TEMP/$THIS-$(id -u)-$1-1.htm"|awk -F: '{printf $1}')
# find the line of the preceding start of form
LINESTART=$(awk -v LINEOFTAG=$LINEOFTAG '{if (NR==LINEOFTAG) {printf FORMSTART; exit}; if (match($0,"<form")!=0) FORMSTART=NR}' "$TEMP/$THIS-$(id -u)-$1-1.htm")
[ -n "$DEBUG" ] && echo -e "Hidden Tag : '$HIDDENTAG'\nLine of Tag : '$LINEOFTAG'\nForm starts @ line: '$LINESTART'"
[ -z "$LINESTART" ] && echo "An error occurred extracting start of the correct form"
fi
if [ -z "$LINESTART" ]; then
# this decryption method seems to be required for voicetrading.com at least
[ -n "$DEBUG" ] && echo -e "Unable to find correct version of hidden tag directly, using decryption"
# extract the encrypted_string and the key
ENC_AND_KEY=( $(sed -n '/getDecVal/{s/.*getDecValue(//;s/).*//;s/,//;s/"//gp;q}' "$TEMP/$THIS-$(id -u)-$1-1.htm") )
[ -z "${ENC_AND_KEY[0]}" -o -z "${ENC_AND_KEY[1]}" ] && echo "Unable to extract encrypted magictag and/or key, aborting..." >&2 && return 3
[ -n "$DEBUG" ] && echo -e "Encrypted Magictag: \"${ENC_AND_KEY[0]}\"\nKey : \"${ENC_AND_KEY[1]}\"\nDecryption using openssl..."
# decrypt the magictag by splitting it into 32-character lines then passing to openssl (code by Loran)
MAGICTAG=$(echo "${ENC_AND_KEY[0]}" | sed 's/.\{32\}/&\n/g;s/\n$//' | openssl enc -d -aes-256-cbc -a -md md5 -k "${ENC_AND_KEY[1]}" 2>/dev/null)
[ -z "$MAGICTAG" ] && echo "An error occurred extracting magictag, aborting...">&2 && return 4
[ -n "$DEBUG" ] && echo -e "Decrypted Magictag: \"$MAGICTAG\""
# get start line of the correct form i.e. div tagged with MAGICTAG
LINESTART=$(grep -n "$MAGICTAG" "$TEMP/$THIS-$(id -u)-$1-1.htm"|awk -F: '{printf $1; exit}')
[ -z "$LINESTART" ] && echo "An error occurred extracting start of the correct form using magic key '$MAGICTAG', aborting...">&2 && return 5
[ -n "$DEBUG" ] && echo -e "Form starts @ line: '$LINESTART' of $TEMP/$THIS-$(id -u)-$1-1.htm"
fi
# extract the form info
sed -n "1,$(( ${LINESTART} -1 ))d;p;/<\/form>/q" "$TEMP/$THIS-$(id -u)-$1-1.htm">"$TEMP/$THIS-$(id -u)-$1-3.htm"
[ -n "$DEBUG" ] && echo -e "Form saved as : $TEMP/$THIS-$(id -u)-$1-3.htm"
# check for a captcha image
CAPTCHA=$(sed -n '/id="captcha_img/{s/.*src="//;s/".*//p;q}' "$TEMP/$THIS-$(id -u)-$1-3.htm")
unset HIDDEN
if [ ${#CAPTCHA} -gt 100 ]; then
echo -e "\nError extracting CAPTCHA code">&2
return 6
elif [ -n "$CAPTCHA" ]; then
if [ -z "$SKIPONCAPTCHA" ]; then
[ -n "$DEBUG" ] && echo -e "Retrieving Captcha: $CAPTCHA"
curl -c "$COOKIEJAR" -b "$COOKIEJAR" "${CURLOPTIONS[@]}" -e "$PAGE1" --fail -o "$CAPTCHAPATH$THIS-$1-captcha.jpeg" $CAPTCHA
CURLEXIT=$?
[ -n "$DEBUG" ] && echo "Curl exit status : $CURLEXIT"
echo -e "\n Captcha image saved as $CAPTCHAPATH$THIS-$1-captcha.jpeg"
read -p " Please enter Captcha code: " -t 120 </dev/stderr
[ -z "$REPLY" ] && { echo "Skipping $1 retrieval...">&2; return 7; }
echo -n " "
HIDDEN=" -F \"login[usercode]=$REPLY\""
else
[ -n "$QUIET" ] && echo -n "$1: "
echo "[FAIL] - captcha code requested, try again with -c option"
rm -f "$COOKIEJAR"
USEFILE=0
break
fi
fi
# there are hidden fields with complicated name and data
HIDDEN+=$(grep -o "<input type=\"hidden\"[^>]*>" "$TEMP/$THIS-$(id -u)-$1-3.htm"|awk -F \" '{for (i=1; i<NF; i++) {if ($i==" name=") printf " -F " $(i+1) "="; if ($i==" value=") printf $(i+1)}}')
FORMRETURNPAGE=`sed -n '/<form/{s/.*action="\([^"]*\).*/\1/;p;q}' "$TEMP/$THIS-$(id -u)-$1-3.htm"`
if [ -n "$DEBUG" ]; then
[ -n "$HIDDEN" ] && echo -e "Hidden fields : $HIDDEN"
DEBUGFILE="$TEMP/$THIS-$(id -u)-$1-2d.htm"
DEBUGCURLEXTRA=" --trace-ascii $DEBUGFILE "
else
unset DEBUGCURLEXTRA
fi
# Get the form data
if [ -n "$FORMRETURNPAGE" ]; then
curl -b "$COOKIEJAR" -c "$COOKIEJAR" "${CURLOPTIONS[@]}" $DEBUGCURLEXTRA -e "$PAGE1" --fail --include -F "login[username]=$2" -F "login[password]=$3" $HIDDEN -o "$TEMP/$THIS-$(id -u)-$1-2.htm" "$FORMRETURNPAGE"
CURLEXIT=$?; [ -n "$DEBUG" ] && echo "Curl exit status : $CURLEXIT"; [ $CURLEXIT -gt 0 ] && { echo "Curl exit code $CURLEXIT, aborting...">&2; return 8; }
[ -s "$TEMP/$THIS-$(id -u)-$1-2.htm" ] || { echo "Curl failed to save file $TEMP/$THIS-$(id -u)-$1-2.htm, aborting...">&2; return 9; }
if [ -n "$DEBUG" ]; then
sed -i "s/$3/\[hidden\]/g" "$DEBUGFILE" # remove password from debug file
echo -e "Visited : $FORMRETURNPAGE\nSaved as : $(ls -l $TEMP/$THIS-$(id -u)-$1-2.htm)\nTrace-ascii output: $DEBUGFILE (password removed)"
fi
if [ -n "$(grep "This account has been disabled" "$TEMP/$THIS-$(id -u)-$1-2.htm")" ]; then
echo "[FAIL] - account disabled"
USEFILE=0; break
fi
EXPIRED=$(grep -o "your session.*expired" "$TEMP/$THIS-$(id -u)-$1-2.htm")
if [ -n "$EXPIRED" ]; then
[ -n "$DEBUG" ] && { echo " Session expired">&2; USEFILE=0; break; }
echo "[FAIL] - session expired"
rm -f "$COOKIEJAR"
USEFILE=0
else
USEFILE=2; break
fi
else
echo "No form data found, unable to obtain credit amount">&2
USEFILE=0; break
fi
done
if [[ -n $CALLRECORDSDIR && -s "$TEMP/$THIS-$(id -u)-$1-$USEFILE.htm" ]]; then
CALLRECORDSFILE="$CALLRECORDSDIR/$1-$2.out"
[[ -n $DEBUG ]] && echo "Appending to $CALLRECORDSFILE:" && ls -l "$CALLRECORDSFILE"
CRFTMP="$TEMP/$THIS-$(id -u)-$1-callrecords.out"
if [[ -s $CALLRECORDSFILE ]]; then
cp -a "$CALLRECORDSFILE" "$CRFTMP" 2>/dev/null
else
truncate -s0 $CRFTMP
fi
sed -n '/recent-call-list-details/,/date-navigator center/p' "$TEMP/$THIS-$(id -u)-$1-$USEFILE.htm" | sed '/helptip/d;/SIP call/d;/<tr/d;s/.*<td>//;/ /s/\.//;s/.* //' \
|sed -e :a -e "/td/N; s/..td..\n/$DELIMITER/; ta" | sed -n "s/Free;/0000/;s/$DELIMITER *<\/tr>//p" >> "$CRFTMP"
sed "s/^\([0-9][0-9]\)-\([A-Z][a-z][a-z]\)-\(20[1-9][0-9]\);/\3-\2-\1;/;s/Jan/01/;s/Feb/02/;s/M.r/03/;s/Apr/04/;s/Ma./05/;s/Jun/06/;s/Jul/07/;s/Aug/08/;s/Sep/09/;s/O.t/10/;s/Nov/11/;s/De[^s]/12/" "$CRFTMP"\
|sort -u > "$CALLRECORDSFILE"
[[ -n $DEBUG ]] && ls -l $CALLRECORDSFILE || [[ -f "$CRFTMP" ]] && rm -f "$CRFTMP"
fi
if [ $USEFILE -gt 0 ]; then
FREEDAYS=$(sed -n '/class="freedays"/{s/.*class="freedays".//;s/ days.*$//;p}' "$TEMP/$THIS-$(id -u)-$1-$USEFILE.htm")
CREDITCENTS=$(sed -n '/class="[^"]*balance"/{s/.*pound; //;s/.*euro; //;s/.*\$//;s/<.*//;s/\.//;s/^0*//;p}' "$TEMP/$THIS-$(id -u)-$1-$USEFILE.htm")
fi
if [ -n "$DEBUG" ];then
echo "Credit : '$CREDITCENTS'"
[[ -n $FREEDAYS ]] && echo "Freedays : '$FREEDAYS'"
else
# Clean up
rm -f "$TEMP/$THIS-$(id -u)-$1-"*.htm # note COOKIEJARs are not removed, so cookies can be reused if it is rerun
[ -z "$4" ] || [ -z "$QUIET" -a -n "$CREDITCENTS" ] && echo -n "$CREDITCENTS"
[[ -z $QUIET && -n $FREEDAYS ]] && echo -n " Freedays : $FREEDAYS"
fi
if [ -z "$CREDITCENTS" ]; then
echo "Error: $1 / $2 - CREDITCENTS is blank">&2
RETURNCODE=11
elif [ "$CREDITCENTS" -ge 0 -o "$CREDITCENTS" -lt 0 2>&- ]; then
if [ -n "$6" -a -n "$5" ]; then
# check for periodic (e.g. daily) change in credit
if [ -s "$6" ]; then
local PREVCREDIT=(`tail -n 1 "$6"`)
else
local PREVCREDIT=("2000-01-01 00:00 0")
fi
echo -e "`date +"%Y-%m-%d %T"`\t$CREDITCENTS">>"$6" 2>/dev/null || echo "Warning: unable to write to $6" >&2
local CREDITFALL=$((${PREVCREDIT[2]}-$CREDITCENTS))
[ -n "$DEBUG" ] && echo -en "Previous credit : '${PREVCREDIT[2]}' at ${PREVCREDIT[0]} ${PREVCREDIT[1]}\nCredit Reduction : '$CREDITFALL'"
if [ $CREDITFALL -gt $5 ]; then
send_message "Credit Reduction Warning - $1\nThe credit on your $1 account '$2' stands at ${CREDITCENTS:0:$((${#CREDITCENTS}-2))}.${CREDITCENTS:(-2):2}, and has fallen by ${CREDITFALL:0:$((${#CREDITFALL}-2))}.${CREDITFALL:(-2):2} since ${PREVCREDIT[0]} ${PREVCREDIT[1]}."
fi
fi
if [ -z "$4" ]; then
echo
else
if [ "$4" != "0" ] && [ "$CREDITCENTS" -lt "$4" ]; then
send_message "Credit Level Warning - $1\nThe credit on your $1 account '$2' stands at ${CREDITCENTS:0:$((${#CREDITCENTS}-2))}.${CREDITCENTS:(-2):2} - below your specified test level of ${4:0:$((${#4}-2))}.${4:(-2):2}.\nYou can buy more credit at: https://$1/myaccount/"
elif [ -z "$QUIET" ]; then
echo " - ok"
fi
fi
RETURNCODE=0
else
echo "Error: $1 / $2 - CREDITCENTS is a non-integer value: '$CREDITCENTS'">&2
RETURNCODE=13
fi
shift 999
return $RETURNCODE
}
# Start of main script
# Global variables
THIS="`basename $0`"; COLUMNS=$(stty size 2>/dev/null||echo 80); COLUMNS=${COLUMNS##* }
UMASK=177 # all files created are readable/writeable only by current user
DELIMITER=","
unset DELLMONTH
# Check whether script is run as CGI
if [ -n "$SERVER_SOFTWARE" ]; then
# if being called by CGI, set the content type
echo -e "Content-type: text/plain\n"
# extract any options
OPTS=$(echo "$QUERY_STRING"|sed -n '/options=/{s/.*options=\([^&]*\).*/\1/;s/%20/ /;p}')
#echo -e "QUERY_STRING: '$QUERY_STRING'\nOPTS: '$OPTS'"
SKIPONCAPTCHA="y" # for now we have no way to show captcha images when called from CGI script, so prevent it happening..
fi
# Parse commandline switches
while getopts ":dc:f:hlm:M:npr:qst:u:vw" optname $@$OPTS; do
case "$optname" in
"c") CAPTCHAPATH="$OPTARG";;
"c") CAPTCHAPATH="$OPTARG";;
"d") DEBUG="y";VERBOSE="y";;
"f") CONFFILE="$OPTARG";;
"h") HELP="y";;
"l") CHANGELOG="y";;
"m") EMAIL="$OPTARG";;
"M") DELLMONTH="$OPTARG";;
"n") NEWCOOKIEJAR="y";;
"p") PAUSEONCOOKIEEXPIRY="y";;
"q") QUIET="y";;
"r") CALLRECORDSDIR="$OPTARG";;
"s") SKIPONCAPTCHA="y";;
"t") DELIMITER="$OPTARG";;
"u") UMASK=$OPTARG;;
"v") VERBOSE="y";;
"w") COLUMNS=30000;; #suppress line-breaking
"?") echo "Unknown option $OPTARG"; exit 1;;
":") echo "No argument value for option $OPTARG"; exit 1;;
*) # Should not occur
echo "Unknown error while processing options"; exit 1;;
esac
done
shift $(($OPTIND-1))
# Show debug info
[ -n "$DEBUG" -a -n "$QUERY_STRING" ] && echo -e "QUERY_STRING: '$QUERY_STRING'\nOPTS: '$OPTS'"
# Show author information
[ -z "$QUIET" -o -n "$HELP$CHANGELOG" ] && echo -e "\n$THIS v$VERSION by Dominic\n${THIS//?/=}"
# Show help
if [ -n "$HELP" ]; then
echo -e "\nGNU/Linux program to notify if credit on one or more \
Dellmont/Finarea/Betamax voip \
provider accounts is running low. Once successfully tested it can be run \
as daily cron job with -q option and -m email_address option \
so that an email is generated when action to top up \
credit on the account is required. Can also run under MS Windows using Cygwin \
(http://www.cygwin.com/), or can be run as CGI job on Linux/Apache webserver.
Usage: `basename $0` [option]
Conffile:
A conffile should be in the same directory as $THIS with name \
$(basename $THIS .sh).conf, or if elsewhere or differently named then be specified by option -f, and should contain one or more lines giving the \
Dellmont/Finarea/Betamax account details in the form:
website username password [test_credit_level_in_cents/pence] [credit_reduction_in_cents/pence] [credit_recordfile]
where the test_credit_level_in_cents/pence is >=100 or 0 (0 means 'never send \
email'). If you don't specify a test_credit_level_in_cents/pence then the \
current credit level is always displayed (but no email is ever sent).
If you specify them, the credit_reduction and credit_recordfile work together \
to perform an additional test. The program will record in credit_recordfile \
the amount of credit for the given portal each time it is run, and notify you \
if the credit has reduced since the last time by more than the \
credit_reduction. This can be useful to warn you of unusual activity on \
the account or of a change in tariffs that is significant for you. \
Set the credit_reduction_in_cents/pence to a level that is more than you \
would expect to see consumed between consecutive (e.g. daily) runs of $THIS \
e.g. 2000 (for 20 euros/day or 20 dollars/day).
Here's an example single-line conffile to generate a warning \
email if the credit \
on the www.voipdiscount.com account falls below 3 euros (or dollars):
www.voipdiscount.com myaccount mypassword 300
Temporary_Files:
Temporary files are saved with 600 permissions in \$TEMP which is set to a \
standard location, normally /tmp, unless it is already defined (so you can \
define it if you want a special location). Unless run with debug option, \
all such files are deleted after running - except the cookiejar file which \
is retained so it can be reused. (The same cookiejar file is also used, if \
found, by get-vt-cdrs.sh.)
Call Records History:
You can use options -r and -t to download call records and append them to a \
specified file.
CGI_Usage:
Here is an example of how you could use $THIS on your own (presumably \
internal) website (with CGI configured appropriately on your webserver):
http://www.mywebsite.com/$THIS?options=-vf%20/path/to/my_conf_file.conf%20-m%20me@mymailaddress.com
Options:
-c [path] - save captcha images (if any) at path (default is current path)
-d debug - be very verbose and retain temporary files
-f [path/conffile] - path and name of conffile
-h show this help and exit
-l show changelog and exit
-m [emailaddress] - send any messages about low credit or too-rapidly-falling credit to the specified address (assumes sendmail is available and working)
-M \"[month-year-page]\" - specify a specific earlier month and page for call record history retrieval (with -r option) - format '/MM/YYYY/P'
-n delete any existing cookies and start over
-p pause on cookie expiry - wait 2 minutes if cookies have expired before \
trying to login (because cookies are usually for 24 hours exactly this should \
allow a second login 24 hours later without requiring new cookies)
-q quiet
-r [path/file] - specify a directory for call record history files (per \
website and account) - data is appended to any existing files
-s skip if captcha code is requested (e.g. for unattended process)
-t [char] - if extracting call records (-r), this specifies the field separator character (default comma)
-u set umask for any created files (default 177: files are readable/writable only by current user)
-v be more verbose
Dependencies: awk, bash, coreutils, curl, grep, openssl, sed, [sendmail], umask
License: Copyright © 2022 Dominic Raferd. Licensed under the Apache License, \
Version 2.0 (the \"License\"); you may not use this file except in compliance \
with the License. You may obtain a copy of the License at \
https://www.apache.org/licenses/LICENSE-2.0. Unless required by applicable \
law or agreed to in writing, software distributed under the License is \
distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY \
KIND, either express or implied. See the License for the specific language \
governing permissions and limitations under the License.
Portal List:
Here is a list of websites / sip portals belonging to and/or operated by \
Dellmont. To find more, google \"is a service from dellmont sarl\" \
(with the quotes). Try a portal with $THIS - it might work!
If one of these (or \
another which you know is run by Dellmont) does not work, run $THIS with -d \
option and drop me an email attaching the temporary files (two or three \
per portal, password is stripped out anyway).
https://www.12voip.com
https://www.actionvoip.com
https://www.aptvoip.com
https://www.bestvoipreselling.com
https://www.calleasy.com
https://www.callingcredit.com
https://www.cheapbuzzer.com
https://www.cheapvoip.com
https://www.cosmovoip.com
https://www.dialcheap.com
https://www.dialnow.com
https://www.discountvoip.co.uk
https://www.easyvoip.com
https://www.freecall.com
https://www.freevoipdeal.com
https://www.frynga.com
https://www.hotvoip.com
https://www.internetcalls.com
https://www.intervoip.com
https://www.jumblo.com
https://www.justvoip.com
https://www.lowratevoip.com
https://www.megavoip.com
https://www.netappel.fr
https://www.nonoh.net
https://www.pennyconnect.com
https://www.poivy.com
https://www.powervoip.com
https://www.rebvoice.com
https://www.rynga.com
https://www.scydo.com
https://www.sipdiscount.com
https://www.smartvoip.com
https://www.smsdiscount.com
https://www.smslisto.com
https://www.stuntcalls.com
https://www.supervoip.com
https://www.voicetrading.com
https://www.voipblast.com
https://www.voipblazer.com
https://www.voipbuster.com
https://www.voipbusterpro.com
https://www.voipcheap.co.uk
https://www.voipcheap.com
https://www.voipdiscount.com
https://www.voipgain.com
https://www.voipmove.com
https://www.voippro.com
https://www.voipraider.com
https://www.voipsmash.com
https://www.voipstunt.com
https://www.voipwise.com
https://www.voipzoom.com
https://www.webcalldirect.com
A page showing relative prices for many of these sites may be found at http://backsla.sh/betamax - it may or may not still be current.
"|fold -s -w $COLUMNS
fi
# Show changelog
if [ -n "$CHANGELOG" ]; then
[ -n "$HELP" ] && echo "Changelog:" || echo
echo "\
4.5.3 [09 Sep 2019]: read gpg2-encrypted conf file (if .gpg filename extension)
4.5.2 [15 Aug 2019]: bugfix for freedays, add -M option (kudos: Mathias Rothe)
4.5.1 [12 Aug 2019]: bugfixes, change -r option to set the output directory (not file)
4.5.0 [07 Aug 2019]: add -r and -t options and show Freedays (if any) (kudos: Mathias Rothe)
4.4.6 [16 Jun 2019]: hide openssl 1.1.1 'deprecated key derivation' message when decrypting
4.4.5 [11 Apr 2019]: update to work with pounds sterling (kudos: Mathias Rothe)
4.4.4 [10 Apr 2019]: update to try to work with captcha
4.4.3 [31 Jul 2018]: update to work with OpenSSL 1.1.0g (backwards compatible)
4.4.2 [27 Mar 2017]: add -u option (set umask)
4.4.1 [29 Jun 2016]: rename cookiejar and temporary files to include userid (number) rather than username
4.4.0 [25 Mar 2016]: bugfix
4.3.9 [16 Mar 2016]: bugfix
4.3.8 [15 Mar 2016]: set permissions of all files created to 600, to secure from other users, move cookiejar files back to \$TEMP and rename cookiejar filename to include \$USER so that multiple users do not overwrite one another's cookiejars
4.3.7 [19 Feb 2016]: if the specified credit_recordfile can't be accessed, show warning instead of failing
4.3.6 [08 Feb 2016]: bugfix for credit <100 eurocents
4.3.5 [18 May 2015]: move cookiejar file location to /var/tmp
4.3.4 [01 Oct 2014]: minor bugfix
4.3.3 [06 Sep 2014]: allow checking of multiple accounts for same provider
4.3.2 [05 Sep 2014]: improvements to debug text and error output
4.3.1 [23 Jul 2014]: warning message if no lines found in conf file
4.3.0 [28 Nov 2013]: use local openssl for decryption (when \
required) instead of remote web call (thanks Loran)
4.2.0 [03 Nov 2013]: a lot of changes! Enable CGI usage, remove \
command-line setting of conffile and email and instead specify these by -f \
and -m options. Test_credit_level_in_cents is now optional in conffile. Add \
-v (verbose) option. Squash a bug causing failure if a captcha was requested.
4.1.1 [01 Nov 2013]: select the reported 'user-agent' randomly from a few
4.1.0 [01 Nov 2013]: local solution is tried before relying on remote \
decryption call (thanks Loran)
4.0.5 [01 Nov 2013]: fix for low-balance or $ currency
4.0.1 [30 Oct 2013]: fix magictag decryption
4.0.0 [29 Oct 2013]: works again, requires an additional decryption web call \
- note a change to conf file format
3.6 [21 Oct 2013]: works sometimes...
3.5 [04 Oct 2013]: small tweaks but more reliable I think...
3.4 [03 Oct 2013]: retrieves captcha image but still not reliable :(
3.3 [29 Sep 2013]: correction for new credit display code
3.2 [18 Sep 2013]: corrected for new login procedure
3.1 [10 Oct 2012]: minor text improvements
3.0 [27 Aug 2012]: minor text correction for credit reduction
2.9 [16 Aug 2012]: added optional credit reduction notification
2.8 [27 Jun 2012]: now works with www.cheapbuzzer.com, added \
a list of untested Dellmont websites to the help information
2.7 [25 May 2012]: now works with www.webcalldirect.com
2.6 [25 May 2012]: fix to show correct credit amounts if >=1000
2.5 [15 May 2012]: fix for added hidden field on voipdiscount.com
2.4 [10 May 2012]: improved debug information, voicetrading.com \
uses method 2, rename previously-named fincheck.sh as \
dellmont-credit-checker.sh
2.3 [04 May 2012]: improved debug information
2.2 [03 May 2012]: further bugfixes
2.1 [03 May 2012]: now works with www.voipbuster.com
2.0315 [15 Mar 2012]: allow comment lines (beginning with \
hash #) in conffile
2.0313 [13 Mar 2012]: changes to email and help text and \
changelog layout, and better removal of temporary files
2.0312 [10 Mar 2012]: improve help, add -l changelog option, remove \
deprecated methods, add -d debug option, tidy up temporary files, \
use conffile instead of embedding account data directly in \
script, first public release
2.0207 [07 Feb 2012]: new code uses curl for voipdiscount.com
2.0103 [03 Jan 2012]: no longer uses finchecker.php or fincheck.php \
unless you select \
deprecated method; has 2 different approaches, one currently works for \
voipdiscount, the other for voicetrading.
1.3 [21 Jun 2010]: stop using external betamax.sh, now uses external \
fincheck.php via finchecker.php, from \
http://simong.net/finarea/, using fincheck.phps for fincheck.php; \
finchecker.php is adapted from example.phps
1.2 [03 Dec 2008]: uses external betamax.sh script
1.1 [17 May 2007]: allow the warning_credit_level_in_euros to be set separately on \
each call
1.0 [05 Jan 2007]: written by Dominic, it is short and sweet and it works!
"|fold -sw $COLUMNS
fi
# Exit if help or changelog was asked
[ -n "$HELP$CHANGELOG" ] && exit
# Show debug info
[ -n "$DEBUG" ] && echo -e "Debug mode"
# Ensure that all files created are readable/writeable only by current user
umask $UMASK
# Check for conffile
if [ -z "$CONFFILE" ]; then
[ ! -s "$1" ] && CONFFILE="$(echo "$(dirname "$0")/$(basename "$0" .sh).conf")" || CONFFILE="$1"
fi
[ -n "$DEBUG" ] && echo -e "CONFFILE: '$CONFFILE'\nCALLRECORDSDIR: '$CALLRECORDSDIR'\nDELIMITER: '$DELIMITER'"
[ ! -s "$CONFFILE" ] && echo "Cannot find conf file '$1', aborting">&2 && exit 1
[[ -z $CALLRECORDSDIR || -d $CALLRECORDSDIR ]] || { echo "Can't locate call records directory '$CALLRECORDSDIR', aborting" >&2; exit 1; }
# Print email adress
[ -n "$EMAIL" -a -z "$QUIET" ] && echo -e "Any low credit warnings will be emailed to $EMAIL\n"
# Ensure CAPTCHAPATH ends with a slash, and that the path exists
if [ -n "$CAPTCHAPATH" ];then
if [ "${CAPTCHAPATH:$(( ${#CAPTCHAPATH} - 1 )): 1}" != "/" ]; then
CAPTCHAPATH="${CAPTCHAPATH}/"
[ -n "$DEBUG" ] && echo "CAPTCHAPATH amended to: '$CAPTCHAPATH'"
fi
[ -d "$CAPTCHAPATH" ] || { echo "Could not find path '$CAPTCHAPATH', aborting...">&2; exit 1; }
fi
# select (fake) user agent from a few possibles
# skip over Safari and IE because with them we never get the embedded hiddentag
#Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25
#Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.13+ (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2
#Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/534.55.3 (KHTML, like Gecko) Version/5.1.3 Safari/534.53.10
#Mozilla/5.0 (iPad; CPU OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko ) Version/5.1 Mobile/9B176 Safari/7534.48.3
#Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0
#Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)
#Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)
#Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)
#Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)
#Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)
#Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)
#Mozilla/1.22 (compatible; MSIE 10.0; Windows 3.1)
echo "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:25.0) Gecko/20100101 Firefox/25.0
Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:25.0) Gecko/20100101 Firefox/25.0
Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:24.0) Gecko/20100101 Firefox/24.0
Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0
Mozilla/5.0 (Windows NT 6.0; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0
Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:24.0) Gecko/20100101 Firefox/24.0
Mozilla/5.0 (Windows NT 6.2; rv:22.0) Gecko/20130405 Firefox/23.0
Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20130406 Firefox/23.0
Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:23.0) Gecko/20131011 Firefox/23.0
Mozilla/5.0 (Windows NT 6.2; rv:22.0) Gecko/20130405 Firefox/22.0
Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:22.0) Gecko/20130328 Firefox/22.0
Mozilla/5.0 (Windows NT 6.1; rv:22.0) Gecko/20130405 Firefox/22.0
Mozilla/5.0 (Windows NT 6.1; WOW64; rv:22.0) Gecko/20100101 Firefox/22.0" >"$TEMP/$THIS-$(id -u)-$$-useragents.txt"
USERAGENT="$(sort -R "$TEMP/$THIS-$(id -u)-$$-useragents.txt"|head -n 1)"; rm "$TEMP/$THIS-$(id -u)-$$-useragents.txt"
[ -n "$DEBUG" ] && echo "Selected user agent: \"$USERAGENT\""
# Loop through conffile line by line
LINENUM=0; ERRS=0
[[ ${CONFFILE: -4} == ".gpg" ]] && READCONF="gpg2 -d $CONFFILE" || READCONF="cat $CONFFILE"
while read LINE; do
let LINENUM++
[ -n "$DEBUG" ] && { echo -n "conffile line $LINENUM :"; echo "$LINE"|awk '{printf $1 " " $2 "..." }'; }
if [ -n "$LINE" -a "${LINE:0:1}" != "#" ]; then
[ -n "$DEBUG" ] && echo -n " - checking"
check_credit_level $LINE; CERR=$?
[ $CERR -eq 0 ] || { let ERRS++; echo -n "credit_check_level reported error $CERR for "; echo "$LINE"|awk '{print $1 " " $2 }'; }
elif [ -n "$DEBUG" ]; then
echo " - skipping"
fi
[ -n "$DEBUG" ] && echo
done < <($READCONF)
[ $LINENUM -eq 0 ] && echo "Could not find any lines in $CONFFILE to process, did you miss putting an EOL?" >&2
[ -n "$DEBUG" ] && echo "Completed with ERRS: '$ERRS'"
exit $ERRS
| true |
2e476a2c12b01ed501c3cf9ea3aa50bdffe4c99f | Shell | iridium-browser/iridium-browser | /third_party/breakpad/breakpad/android/common-functions.sh | UTF-8 | 9,551 | 3.5625 | 4 | [
"BSD-3-Clause",
"APSL-1.0",
"FSFAP",
"LicenseRef-scancode-unicode",
"BSD-4-Clause-UC",
"MIT",
"curl",
"Autoconf-exception-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-other-copyleft",
"GPL-1.0-or-later",
"Autoconf-exception-3.0",
"ClArtistic",
"LGPL-2.1-or-later",
... | permissive | # Copyright 2012 Google LLC
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google LLC nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Collection of common shell functions for 'run-checks.sh' et 'test-shell.sh'
# All internal variables and functions use an underscore as a prefix
# (e.g. _VERBOSE, _ALL_CLEANUPS, etc..).
# Sanitize the environment
export LANG=C
export LC_ALL=C
if [ "$BASH_VERSION" ]; then
set -o posix
fi
# Utility functions
_ALL_CLEANUPS=
# Register a function to be called when the script exits, even in case of
# Ctrl-C, logout, etc.
# $1: function name.
atexit () {
if [ -z "$_ALL_CLEANUPS" ]; then
_ALL_CLEANUPS=$1
# Ensure a clean exit when the script is:
# - Exiting normally (EXIT)
# - Interrupted by Ctrl-C (INT)
# - Interrupted by log out (HUP)
# - Being asked to quit nicely (TERM)
# - Being asked to quit and dump core (QUIT)
trap "_exit_cleanups \$?" EXIT INT HUP QUIT TERM
else
_ALL_CLEANUPS="$_ALL_CLEANUPS $1"
fi
}
# Called on exit if at least one function was registered with atexit
# $1: final exit status code
_exit_cleanups () {
local CLEANUP CLEANUPS
# Ignore calls to atexit during cleanups
CLEANUPS=$_ALL_CLEANUPS
_ALL_CLEANUPS=
for CLEANUP in $CLEANUPS; do
($CLEANUP)
done
exit "$@"
}
# Dump a panic message then exit.
# $1+: message
panic () {
echo "ERROR: $@" >&2
exit 1
}
# If the previous command failed, dump a panic message then exit.
# $1+: message.
fail_panic () {
if [ $? != 0 ]; then
panic "$@"
fi;
}
_VERBOSE=0
# Increase verbosity for dump/log/run/run2 functions
increase_verbosity () {
_VERBOSE=$(( $_VERBOSE + 1 ))
}
# Decrease verbosity
decrease_verbosity () {
_VERBOSE=$(( $_VERBOSE - 1 ))
}
# Returns success iff verbosity level is higher than a specific value
# $1: verbosity level
verbosity_is_higher_than () {
[ "$_VERBOSE" -gt "$1" ]
}
# Returns success iff verbosity level is lower than a specific value
# $1: verbosity level
verbosity_is_lower_than () {
[ "$_VERBOSE" -le "$1" ]
}
# Dump message to stdout, unless verbosity is < 0, i.e. --quiet was called
# $1+: message
dump () {
if [ "$_VERBOSE" -ge 0 ]; then
printf "%s\n" "$*"
fi
}
# If --verbose was used, dump a message to stdout.
# $1+: message
log () {
if [ "$_VERBOSE" -ge 1 ]; then
printf "%s\n" "$*"
fi
}
_RUN_LOG=
# Set a run log file that can be used to collect the output of commands that
# are not displayed.
set_run_log () {
_RUN_LOG=$1
}
# Run a command. Output depends on $_VERBOSE:
# $_VERBOSE <= 0: Run command, store output into the run log
# $_VERBOSE >= 1: Dump command, run it, output goest to stdout
# Note: Ideally, the command's output would go to the run log for $_VERBOSE >= 1
# but the 'tee' tool doesn't preserve the status code of its input pipe
# in case of error.
run () {
local LOGILE
if [ "$_RUN_LOG" ]; then
LOGFILE=$_RUN_LOG
else
LOGFILE=/dev/null
fi
if [ "$_VERBOSE" -ge 1 ]; then
echo "COMMAND: $@"
"$@"
else
"$@" >>$LOGFILE 2>&1
fi
}
# Same as run(), but only dump command output for $_VERBOSE >= 2
run2 () {
local LOGILE
if [ "$_RUN_LOG" ]; then
LOGFILE=$_RUN_LOG
else
LOGFILE=/dev/null
fi
if [ "$_VERBOSE" -ge 1 ]; then
echo "COMMAND: $@"
fi
if [ "$_VERBOSE" -ge 2 ]; then
"$@"
else
"$@" >>$LOGFILE 2>&1
fi
}
# Extract number of cores to speed up the builds
# Out: number of CPU cores
get_core_count () {
case $(uname -s) in
Linux)
grep -c -e '^processor' /proc/cpuinfo
;;
Darwin)
sysctl -n hw.ncpu
;;
CYGWIN*|*_NT-*)
echo $NUMBER_OF_PROCESSORS
;;
*)
echo 1
;;
esac
}
# Check for the Android ADB program.
#
# On success, return nothing, but updates internal variables so later calls to
# adb_shell, adb_push, etc.. will work. You can get the path to the ADB program
# with adb_get_program if needed.
#
# On failure, returns 1, and updates the internal adb error message, which can
# be retrieved with adb_get_error.
#
# $1: optional ADB program path.
# Return: success or failure.
_ADB=
_ADB_STATUS=
_ADB_ERROR=
adb_check () {
# First, try to find the executable in the path, or the SDK install dir.
_ADB=$1
if [ -z "$_ADB" ]; then
_ADB=$(which adb 2>/dev/null)
if [ -z "$_ADB" -a "$ANDROID_SDK_ROOT" ]; then
_ADB=$ANDROID_SDK_ROOT/platform-tools/adb
if [ ! -f "$_ADB" ]; then
_ADB=
fi
fi
if [ -z "$_ADB" ]; then
_ADB_STATUS=1
_ADB_ERROR="The Android 'adb' tool is not in your path."
return 1
fi
fi
log "Found ADB program: $_ADB"
# Check that it works correctly
local ADB_VERSION
ADB_VERSION=$("$_ADB" version 2>/dev/null)
case $ADB_VERSION in
"Android Debug Bridge "*) # Pass
log "Found ADB version: $ADB_VERSION"
;;
*) # Fail
_ADB_ERROR="Your ADB binary reports a bad version ($ADB_VERSION): $_ADB"
_ADB_STATUS=1
return 1
esac
_ADB_STATUS=0
return 0
}
# Return the path to the Android ADB program, if correctly detected.
# On failure, return the empty string.
# Out: ADB program path (or empty on failure)
# Return: success or failure.
adb_get_program () {
# Return cached value as soon as possible.
if [ -z "$_ADB_STATUS" ]; then
adb_check $1
fi
echo "$_ADB"
return $_ADB_STATUS
}
# Return the error corresponding to the last ADB function failure.
adb_get_error () {
echo "$_ADB_ERROR"
}
# Check that there is one device connected through ADB.
# In case of failure, use adb_get_error to know why this failed.
# $1: Optional adb program path
# Return: success or failure.
_ADB_DEVICE=
_ADB_DEVICE_STATUS=
adb_check_device () {
if [ "$_ADB_DEVICE_STATUS" ]; then
return $_ADB_DEVICE_STATUS
fi
# Check for ADB.
if ! adb_check $1; then
_ADB_DEVICE_STATUS=$_ADB_STATUS
return 1
fi
local ADB_DEVICES NUM_DEVICES FINGERPRINT
# Count the number of connected devices.
ADB_DEVICES=$("$_ADB" devices 2>/dev/null | awk '$2 == "device" { print $1; }')
NUM_DEVICES=$(echo "$ADB_DEVICES" | wc -l)
case $NUM_DEVICES in
0)
_ADB_ERROR="No Android device connected. Please connect one to your machine."
_ADB_DEVICE_STATUS=1
return 1
;;
1) # Pass
# Ensure the same device will be called in later adb_shell calls.
export ANDROID_SERIAL=$ADB_DEVICES
;;
*) # 2 or more devices.
if [ "$ANDROID_SERIAL" ]; then
ADB_DEVICES=$ANDROID_SERIAL
NUM_DEVICES=1
else
_ADB_ERROR="More than one Android device connected. \
Please define ANDROID_SERIAL in your environment"
_ADB_DEVICE_STATUS=1
return 1
fi
;;
esac
_ADB_DEVICE_STATUS=0
_ADB_DEVICE=$ADB_DEVICES
FINGERPRINT=$(adb_shell getprop ro.build.fingerprint)
log "Using ADB device: $ANDROID_SERIAL ($FINGERPRINT)"
return 0
}
# The 'adb shell' command is pretty hopeless, try to make sense of it by:
# 1/ Removing trailing \r from line endings.
# 2/ Ensuring the function returns the command's status code.
#
# $1+: Command
# Out: command output (stdout + stderr combined)
# Return: command exit status
adb_shell () {
local RET ADB_LOG
# Check for ADB device.
adb_check_device || return 1
ADB_LOG=$(mktemp "${TMPDIR:-/tmp}/adb-XXXXXXXX")
"$_ADB" shell "$@" ";" echo \$? > "$ADB_LOG" 2>&1
sed -i -e 's![[:cntrl:]]!!g' "$ADB_LOG" # Remove \r.
RET=$(sed -e '$!d' "$ADB_LOG") # Last line contains status code.
sed -e '$d' "$ADB_LOG" # Print everything except last line.
rm -f "$ADB_LOG"
return $RET
}
# Push a file to a device.
# $1: source file path
# $2: device target file path
# Return: success or failure.
adb_push () {
adb_check_device || return 1
run "$_ADB" push "$1" "$2"
}
# Pull a file from a device
# $1: device file path
# $2: target host file path
# Return: success or failure.
adb_pull () {
adb_check_device || return 1
run "$_ADB" pull "$1" "$2"
}
# Same as adb_push, but will panic if the operations didn't succeed.
adb_install () {
adb_push "$@"
fail_panic "Failed to install $1 to the Android device at $2"
}
| true |
361e574a6beccf269dd0b021593efb3b3559fae6 | Shell | guillaumewuip/bitbar-plugins | /Network/network-toggler.sh | UTF-8 | 1,364 | 3.453125 | 3 | [] | no_license | #!/bin/bash
# <bitbar.title>Network Toggler</bitbar.title>
# <bitbar.version>v1.0</bitbar.version>
# <bitbar.author>David Shrestha</bitbar.author>
# <bitbar.author.github>davidshr10</bitbar.author.github>
# <bitbar.image>https://i.imgur.com/QRNTmet.png</bitbar.image>
# <bitbar.desc>Provides an easy way to toggle your network connections on and off.</bitbar.desc>
# <bitbar.dependencies>OS X 10.11</bitbar.dependencies>
if [ "$2" == 'toggle_on' ]; then
networksetup -setnetworkserviceenabled "$1" on
elif [ "$2" == 'toggle_off' ]; then
networksetup -setnetworkserviceenabled "$1" off
fi
NETWORK_INTERFANCES=$(networksetup -listallnetworkservices |\
awk '/disabled\./,EOF { if(NR>1) print $0 }')
network_icon="iVBORw0KGgoAAAANSUhEUgAAADIAAAAyCAYAAAAeP4ixAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAAFiUAABYlAUlSJPAAAAD2SURBVGhD7ZhRDoJADETxXN5DD+Ph/OBgukPoB42YXeh0N2Re0kBIszOPyI+TEEIIIcR1uK9XBsyzN7zKfNZrNMyzN1iQTWQg8+wNPig6jH3+QkpIgZqTJWFQ8rIljNDcXhJGSH5vCeNUj1EkjEN9RpMwmnrVLt/KPMvM6+AezzzRe1X9aiUAgvzuo4wneg/87dkiAd5l/D6eeaL3jN2+lxEBLTLD/rSMWhl8iAjCW8Pgfu8jjtyr7bfQtJzIoV6jyZzqM4pMSI/eMqH5vWQoudky1LwsmZQcdkiKhOHDmCI0CcMCGUHMs39yib9MhRBCCCGoTNMX1Vc8LLB3HSAAAAAASUVORK5CYII="
echo "|templateImage=$network_icon"
echo "---"
while read -r line; do
echo "$line"
if [ "${line:0:1}" == '*' ]; then
interface="${line:1}"
echo "-- Toggle On | bash='$0' param1='$interface' param2='toggle_on' terminal=false refresh=true color=#3ab24c"
else
echo "-- Toggle Off | bash='$0' param1=$line param2='toggle_off' terminal=false refresh=true color=#d65342"
fi
done <<< "$NETWORK_INTERFANCES"
echo "---"
echo "Network Preferences | bash='$0' param1=launch-system-preferences terminal=false refresh=true"
if [ "$1" = 'launch-system-preferences' ]; then
open /System/Library/PreferencePanes/Network.prefPane
fi | true |
dfadbdf1fc66490a97ed69b9d87fed1c94c1c862 | Shell | liuluo1979/openwrt-package | /lienol/luci-app-softethervpn/root/usr/share/softethervpn/firewall.include | UTF-8 | 2,350 | 3.453125 | 3 | [] | no_license | #!/bin/sh
CONFIG=softethervpn
get_config()
{
config_get_bool enable $1 enable 0
}
uci_get_by_type() {
local index=0
if [ -n $4 ]; then
index=$4
fi
local ret=$(uci get $CONFIG.@$1[$index].$2 2>/dev/null)
echo ${ret:=$3}
}
l2tp=$(uci_get_by_type softether l2tp)
openvpn=$(uci_get_by_type softether openvpn)
sstp=$(uci_get_by_type softether sstp)
openvpnport=$(cat /usr/libexec/softethervpn/vpn_server.config 2>/dev/null|grep OpenVPN_UdpPortList | awk -F " " '{print $3}')
[ -z "$openvpnport" ] && openvpnport=1194
iptables -D INPUT -p udp -m multiport --dports 500,1701,4500 -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT 2>/dev/null
[ -n "$openvpnport" ] && iptables -D INPUT -p udp --dport $openvpnport -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT 2>/dev/null
[ -n "$openvpnport" ] && iptables -D INPUT -p tcp --dport $openvpnport -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT 2>/dev/null
iptables -D INPUT -p tcp --dport 443 -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT 2>/dev/null
iptables -D INPUT -p tcp --dport 5555 -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT 2>/dev/null
iptables -D INPUT -p tcp --dport 8888 -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT 2>/dev/null
iptables -D INPUT -p tcp --dport 992 -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT 2>/dev/null
enable=$(uci get softethervpn.@softether[0].enable)
if [ $enable -eq 1 ]; then
if [ "$l2tp" = "1" ];then
iptables -I INPUT -p udp -m multiport --dports 500,1701,4500 -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT 2>/dev/null
fi
if [ "$openvpn" = "1" ];then
iptables -I INPUT -p udp --dport $openvpnport -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT
iptables -I INPUT -p tcp --dport $openvpnport -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT
fi
if [ "$sstp" = "1" ];then
iptables -I INPUT -p tcp --dport 443 -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT
fi
iptables -I INPUT -p tcp --dport 5555 -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT
iptables -I INPUT -p tcp --dport 8888 -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT
iptables -I INPUT -p tcp --dport 992 -m comment --comment "Rule For SoftEther VPN Server" -j ACCEPT
fi
| true |
4bb45b2f791ef5c45890c143d45fd99009c48e0e | Shell | kemitchell/cases | /opinion | UTF-8 | 445 | 3.453125 | 3 | [] | no_license | #!/bin/zsh
set -e
if [[ -z "$1" ]]; then
echo "Usage: <case file>"
exit 1
fi
base=$(basename "$1")
pdf="opinions/$base.pdf"
mkdir -p opinions
if [[ ! -f "$pdf" ]]; then
digest=$(fgrep "document: " "$1" | cut -d ' ' -f 2)
if [[ -z "$digest" ]]; then
echo "No digest"
exit 1
fi
aws s3api get-object \
--bucket kemitchell-documents \
--key "$digest" \
"$pdf"
> /dev/null
fi
nohup zathura "$pdf" &> /dev/null
| true |
bf72a794fb5b4bf4593822470c60415b9ae4f0b5 | Shell | yaswant/ysh | /ruler | UTF-8 | 2,266 | 3.84375 | 4 | [
"MIT"
] | permissive | #!/bin/bash
# Display a simple pixel ruler.
#
# usage:
# ruler [-b <bg-colour>] [ruler-length] [major-interval] [minor-interval]
# all units in pixels.
#
# 2017-04-21 initial release. yaswant.pradhan.
# 2018-03-07 v1.0.1. update options. yp.
# -----------------------------------------------------------------------------
# set -e
DISPLAY_OPT=
ARGS=""
while [ $# -gt 0 ]; do
unset OPTIND
unset OPTARG
while getopts hvVb:c:f: opts; do
case $opts in
h) echo "usage: ${0##*/} [-b <bg-colour>] [-c <colour>] [-f <font-name>] [ruler-length] [major-interval] [minor-interval]"; exit 0 ;;
v) DISPLAY_OPT='-rotate 90' ;;
V) echo "${0##*/}: Version 1.0.1"; exit 0 ;;
b) bcol="$OPTARG" ;;
c) col="$OPTARG" ;;
f) font="$OPTARG" ;;
esac
done
shift $((OPTIND-1))
ARGS="${ARGS} $1"
shift
done
ARGS=($ARGS)
# -----------------------------------------------------------------------------
rulerlength="${ARGS[0]}"
majorint="${ARGS[1]}"
minorint="${ARGS[2]}"
rulername="$TMPDIR/ruler.gif"
rulerlength="${rulerlength:=520}"
majorint="${majorint:=100}"
minorint="${minorint:=5}"
bgcol="${bcol:=orange}"
col="${col:=black}"
font="${font:=fixed}"
drawstring=""
# Build the line definitions for the ruler marks
for x1 in `seq 0 $majorint $rulerlength`; do
drawstring="$drawstring line $x1,30 $x1,60"
for x2 in 0 $(( majorint / 2 )); do
(( offset = $x1 + $x2 ))
drawstring="$drawstring line $offset,40 $offset,60"
for x3 in `seq $minorint $minorint $(( majorint / 2))`; do
(( offset2 = $offset + $x3 ))
drawstring="$drawstring line $offset2,50 $offset2,60"
done
done
done
# Add labels
labelfont="-fill $col -font $font -pointsize 18 -draw"
labelstring="text 0,20 '0' "
for x3 in $majorint; do
offset3=$(($x3 - 12 ))
labelstring="$labelstring text $offset3,20 '$x3' "
done
for x4 in `seq $(( majorint * 2 )) $majorint $rulerlength`; do
offset4=$(( $x4 - 12 ))
labelstring="$labelstring text $offset4,20 '$x4' "
done
convert -size "${rulerlength}x60" xc:$bgcol -fill $col \
-draw "$drawstring" $labelfont "$labelstring" "$rulername"
# Display ruler; use \ / * to rotate the ruler
display $DISPLAY_OPT -title 'Pixel Ruler' $rulername
# Housekeep
rm -rf $rulername
| true |
d0c04b785e6f2f4e7ed5fe7ced04bd503f329bbd | Shell | morningconsult/docker-credential-vault-login | /scripts/generate/mockgen.sh | UTF-8 | 1,643 | 3.046875 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
# Copyright 2019 The Morning Consult, LLC or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may
# not use this file except in compliance with the License. A copy of the
# License is located at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
set -e
package=${1?Must provide package}
interfaces=${2?Must provide interface names}
outputfile=${3?Must provide an output file}
PROJECT_VENDOR="github.com/morningconsult/docker-credential-vault-login/vendor"
export PATH="${GOPATH//://bin:}/bin:$PATH"
data=$(
cat << EOF
// Copyright 2018 The Morning Consult, LLC or its affiliates. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"). You may
// not use this file except in compliance with the License. A copy of the
// License is located at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// or in the "license" file accompanying this file. This file is distributed
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
// express or implied. See the License for the specific language governing
// permissions and limitations under the License.
$(mockgen "${package}" "${interfaces}")
EOF
)
mkdir -p $(dirname ${outputfile})
echo "$data" | sed -e "s|${PROJECT_VENDOR}||" | goimports > "${outputfile}"
| true |
ac29f321a734f56ebfad53602fa51d9f5654f0ae | Shell | firstshivu/task2 | /jenkins/jobs/sample_project/builds/4/file.sh | UTF-8 | 151 | 2.96875 | 3 | [] | no_license | #!/bin/bash
myfunc1() {
a=10
}
myfunc2() {
local b=20
c=($a+$b)
}
myfunc1
myfunc2
echo "a=$a"
echo "b=$b"
echo "c=$c"
| true |
6fe1b1752ce32f2a70c5c834985f746578c90315 | Shell | nijo34/Script | /calc.sh | UTF-8 | 617 | 3.78125 | 4 | [] | no_license | #!/bin/bash
echo "Calculator"
sum=0
i="y"
while [ $i = "y" ]
do
echo "Enter the first number:"
read n1
echo "Enter the second number"
read n2
echo "1.Addition"
echo "2.Subtraction"
echo "3.Multiplication"
echo "4.Division"
echo "Enter choice"
read ch
case $ch in
1)res=$(echo $n1 + $n2)
echo "Added result is: $res";;
2)res=$(echo $n1 -$n2 | bc -l)
echo "Subtracted result is: $res";;
3)res=$(echo "$n1 * $n2" | bc -l )
echo "Product is :$res";;
4) res=$(echo $n1 /$n2 | bc -l)
echo "Divided result is: $res";;
*) echo "invalid Choice"
esac
echo "Do you want to continue?"
read i
if [ $i != "y" ]
then
exit
fi
done
| true |
fa423925e596f61cc505e1fdbc0f5791cff09835 | Shell | delkyd/alfheim_linux-PKGBUILDS | /mingw-w64-mygui/PKGBUILD | UTF-8 | 1,811 | 2.78125 | 3 | [] | no_license | # Maintainer: Karl-Felix Glatzer <karl.glatzer@gmx.de>
pkgbase=mingw-w64-mygui
pkgname=('mingw-w64-mygui')
pkgver=3.2.2
pkgrel=1
pkgdesc="A multilayer and overlappable GUI System for OGRE (mingw-w64)"
arch=('any')
url="http://mygui.info/"
license=('LGPL')
depends=('mingw-w64-crt' 'mingw-w64-boost' 'mingw-w64-ogre' 'mingw-w64-ois' 'mingw-w64-pkg-config')
options=(!strip !buildflags staticlibs)
makedepends=('mingw-w64-cmake' 'mingw-w64-gcc' 'mingw-w64-pkg-config' 'doxygen' 'graphviz' 'ttf-dejavu')
source=("https://github.com/MyGUI/mygui/archive/MyGUI$pkgver.tar.gz"
'findogre.patch'
'platforms.patch'
'cmakeinstall.patch'
'cmake-boost-system.patch')
md5sums=('0023a689a2a63febc2cc703f81f86c62'
'bb4b844665e339fe1f5e596b8997d162'
'd0fb901ad31fb43357d88ce091999652'
'03369fc85c24a92af9a24b6e530fac40'
'69c53e5e0b57a9537092dbaf0a0d03c2')
_architectures="i686-w64-mingw32 x86_64-w64-mingw32"
prepare() {
cd $srcdir/mygui-MyGUI$pkgver
patch -Np1 < ../findogre.patch
patch -Np1 < ../platforms.patch
patch -Np1 < ../cmakeinstall.patch
patch -Np1 < ../cmake-boost-system.patch
}
build() {
for _arch in ${_architectures}; do
mkdir -p ${srcdir}/build-${_arch} && cd ${srcdir}/build-${_arch}
unset LDFLAGS CXXFLAGS
${_arch}-cmake .. \
-DCMAKE_INSTALL_PREFIX=/usr/${_arch} \
-DCMAKE_BUILD_TYPE="release" \
-DMYGUI_SAMPLES_INPUT=2 \
-DMYGUI_BUILD_DEMOS="False" \
-DMYGUI_BUILD_TOOLS="False" \
../mygui-MyGUI$pkgver
make
done
}
package() {
for _arch in ${_architectures}; do
cd ${srcdir}/build-${_arch}
make DESTDIR=${pkgdir} install
${_arch}-strip -x -g ${pkgdir}/usr/${_arch}/bin/release/*.dll
${_arch}-strip -g ${pkgdir}/usr/${_arch}/lib/release/*.a
done
}
# vim:set ts=2 sw=2 et:
| true |
6c610530d00426c03ea730118b4f0fc90f205c9f | Shell | apache/cloudstack | /scripts/vm/hypervisor/xenserver/logrotate | UTF-8 | 1,152 | 2.859375 | 3 | [
"GPL-2.0-only",
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"GPL-1.0-or-later",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"LicenseRef-scancode-unknown"
] | permissive | #!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Version @VERSION@
#
# script to perform logrotation on xenserver 6.0.2 and later
/usr/sbin/logrotate /etc/logrotate.d/cloudlog
EXITVALUE=$?
if [ $EXITVALUE != 0 ]; then
/usr/bin/logger -t logrotate "ALERT exited abnormally with [$EXITVALUE]"
fi
[ -f /opt/xensource/bin/delete_old_logs_by_space ] && /opt/xensource/bin/delete_old_logs_by_space
exit 0
| true |
45e3bda28c78a9a3e89322a4db37006e8fc3bad0 | Shell | Bubujka/bu.bin | /bin/camshot | UTF-8 | 910 | 2.5625 | 3 | [] | no_license | #!/bin/bash
#---
#title: Сделать фото с вебкамеры и залить его в инет
#tags: [video]
#refs: []
#---
mkdir /tmp/webshot 2> /dev/null
cd /tmp/webshot
rm * 2> /dev/null
mplayer -vo png -frames 40 tv:// 2> /dev/null > /dev/null
mv 00000040.png 00000040.png_bak
rm 000000*.png
cd ~/.db/history/camshot
DIRECTORY=`date +%F | sed 's/-/\//g'`
mkdir -p $DIRECTORY 2> /dev/null
cd $DIRECTORY
NAME=`date +%X | sed 's/:/-/g'`_`cat /tmp/webshot/00000040.png_bak | md5sum | head -c10`
NAME_HIGH=$NAME.png
NAME_LOW=$NAME.jpg
convert -quality 70 -flop /tmp/webshot/00000040.png_bak $NAME_LOW
cd ~/.db/history/camshot
echo "http://cs.bubujka.org/$DIRECTORY/$NAME_LOW" | xc
#echo "http://cs.bubujka.org/$DIRECTORY/$NAME_LOW" >> ~/.db/wiki/camshots-$(hostname)
echo "Сфотографировано =)" | dzen2-wrapper green
s3cmd sync $HOME/.db/history/camshot/ s3://cs.bubujka.org
| true |
7a8ba9bfcc4729929ee1e2013814ee6f11eb6697 | Shell | cacao-org/AOloopControl | /scripts/auxscripts/processTelemetryPSDs | UTF-8 | 2,668 | 3.796875 | 4 | [] | no_license | #!/bin/bash
# number of arguments to script
NBARGS=0
# ======================= PROCESS NAME =================================
pname=`echo "$0" | sed "s/\.\///g"`
function printHELP {
echo "------------------------------------------------------------------------"
echo "$(tput bold) $pname : PROCESS TELEMETRY, make PSDs $(tput sgr 0)"
echo "------------------------------------------------------------------------"
echo " Make open and close loop PSDs"
echo " "
echo " $(tput bold)USAGE:$(tput sgr 0)"
echo " $0 [-h]"
echo ""
echo " $(tput bold)OPTIONS:$(tput sgr 0)"
echo " $(tput bold)-h$(tput sgr 0) help"
echo ""
echo " $(tput bold)INPUT:$(tput sgr 0)"
echo " PredictiveControl/modeval_ol.fits"
echo " PredictiveControl/modeval_cl.fits"
echo ""
echo " $(tput bold)OUTPUT:$(tput sgr 0)"
echo " PredictiveControl/psd_ol.fits"
echo " PredictiveControl/psd_cl.fits"
echo " PredictiveControl/psd_ol.txt"
echo " PredictiveControl/psd_cl.txt"
echo ""
echo "------------------------------------------------------------------------"
}
printHELP1 ()
{
printf " $(tput bold)%-25s$(tput sgr0) Process telemetry: create open and closed loop PSDs\n" "$0"
}
# Transform long options to short ones
singlelinehelp=0
for arg in "$@"; do
shift
case "$arg" in
"--help") set -- "$@" "-h" ;;
"--help1")
set -- "$@" "-h"
singlelinehelp=1;
;;
*) set -- "$@" "$arg"
esac
done
# ================= OPTIONS =============================
while getopts :h FLAG; do
case $FLAG in
h) #show help
if [ "$singlelinehelp" -eq "0" ]; then
printHELP
else
printHELP1
fi
exit
;;
\?) #unrecognized option - show help
echo -e \\n"Option -${BOLD}$OPTARG${NORM} not allowed."
printHELP
;;
esac
done
shift $((OPTIND-1))
if [ "$1" = "help" ] || [ "$#" -ne $NBARGS ]; then
if [ "$#" -ne $NBARGS ]; then
echo "$(tput setaf 1)$(tput bold) Illegal number of parameters ($NBARGS params required, $# entered) $(tput sgr 0)"
fi
printHELP
exit
fi
loopnb=$( head -1 LOOPNUMBER )
./AOloopControl << EOF
loadfits "./PredictiveControl/modeval_ol.fits" im
im3Dto2D im
imswapaxis2D im im1
do1Drfft im1 imcc
c2ap imcc ima imp
imi=ima*ima
savefits imi "!./PredictiveControl/psd_ol.fits"
im2ascii imi "./PredictiveControl/psd_ol.txt"
listim
exitCLI
EOF
./AOloopControl << EOF
loadfits "./PredictiveControl/modeval_cl.fits" im
im3Dto2D im
imswapaxis2D im im1
do1Drfft im1 imcc
c2ap imcc ima imp
imi=ima*ima
savefits imi "!./PredictiveControl/psd_cl.fits"
im2ascii imi "./PredictiveControl/psd_cl.txt"
listim
exitCLI
EOF
| true |
69d9b751a6ac65d1dd2e8c21db820acf15260dd5 | Shell | DawnEve/NGS_training | /scripts/datamart/.demo_lab6_tuxedo/run_tuxedo.sh | UTF-8 | 1,477 | 2.6875 | 3 | [] | no_license | #!/bin/bash
###Align the RNA-Seq reads to the genome
bowtie2-build genome.fa genome
tophat -p 4 -G genes.gtf -o C1_R1_thout genome 4k_READS_sample/C1_R1_1.fq 4k_READS_sample/C1_R1_2.fq
tophat -p 4 -G genes.gtf -o C1_R2_thout genome 4k_READS_sample/C1_R2_1.fq 4k_READS_sample/C1_R2_2.fq
tophat -p 4 -G genes.gtf -o C2_R1_thout genome 4k_READS_sample/C2_R1_1.fq 4k_READS_sample/C2_R1_2.fq
tophat -p 4 -G genes.gtf -o C2_R2_thout genome 4k_READS_sample/C2_R2_1.fq 4k_READS_sample/C2_R2_2.fq
###Assemble exprssed genes and transcripts
cufflinks -p 4 -o C1_R1_clout C1_R1_thout/accepted_hits.bam
cufflinks -p 4 -o C1_R2_clout C1_R2_thout/accepted_hits.bam
cufflinks -p 4 -o C2_R1_clout C2_R1_thout/accepted_hits.bam
cufflinks -p 4 -o C2_R2_clout C2_R2_thout/accepted_hits.bam
echo "./C1_R1_clout/transcripts.gtf" > assemblies.txt
echo "./C1_R2_clout/transcripts.gtf" >> assemblies.txt
echo "./C2_R1_clout/transcripts.gtf" >> assemblies.txt
echo "./C2_R2_clout/transcripts.gtf" >> assemblies.txt
###Create a single merged transcriptome annotation
cuffmerge -g genes.gtf -s genome.fa -p 4 assemblies.txt
###Identify differentially expressed genes and transcripts
cuffdiff -o diff_out -b genome.fa -p 4 -L C1,C2 -u merged_asm/merged.gtf ./C1_R1_thout/accepted_hits.bam,./C1_R2_thout/accepted_hits.bam ./C2_R1_thout/accepted_hits.bam,./C2_R2_thout/accepted_hits.bam
###Explore differential analysis results with CummeRbund
echo "use the R to visualize the analysis results\n"
~
| true |
570a6b42899f7b9696ad192393885725dcaa97f1 | Shell | inaba-minoru/txx | /run_all.sh | UTF-8 | 1,251 | 2.578125 | 3 | [] | no_license | #!/usr/bin/env bash
# If project not ready, generate cmake file.
if [[ ! -d build ]]; then
mkdir -p build
cd build
cmake ..
cd ..
fi
# Build project.
cd build
make -j
cd ..
# Run all testcases.
# You can comment some lines to disable the run of specific examples.
mkdir -p output
# time bin/PA1 testcases/scene09_s.txt output/scene09_s.bmp
# time bin/PA1 mycase/case1.txt output/scene.bmp
time bin/PA1 mycase/case2.txt output/scene.bmp
# time bin/PA1 mycase/case3.txt output/scene.bmp
# time bin/PA1 testcases/scene.txt output/scene.bmp
# time bin/PA1 testcases/scene0X.txt output/scene0X.bmp
# time bin/PA1 testcases/scene00.txt output/scene00.bmp
# time bin/PA1 testcases/scene10_wineglass.txt output/scene10_wineglass.bmp
# time bin/PA1 testcases/scene06_bunny_1k.txt output/scene06.bmp
# time bin/PA1 testcases/scene01_basic.txt output/scene01.bmp
# time bin/PA1 testcases/scene02_cube.txt output/scene02.bmp
# time bin/PA1 testcases/scene03_sphere.txt output/scene03.bmp
# time bin/PA1 testcases/scene04_axes.txt output/scene04.bmp
# time bin/PA1 testcases/scene05_bunny_200.txt output/scene05.bmp
# time bin/PA1 testcases/scene07_shine.txt output/scene07.bmp
# time bin/PA1 testcases/scene08_dragon.txt output/scene08.bmp
| true |
49db92d315b64d24d59aa2df17ee40080958de50 | Shell | pbirsinger/CARDIO | /runner.sh | UTF-8 | 1,210 | 3.046875 | 3 | [] | no_license | #!/bin/bash
export CILK_NWORKERS=32
echo -e "\e[01;34mcompiling...\e[0m"
if [ "$1" = "quicksort" ] || [ "$1" = "quicksort.c" ]; then
icc -mkl -o harness -O3 -ipo -xHOST -no-prec-div -fno-strict-aliasing -fno-omit-frame-pointer sort_harness.c quicksort.c
echo -e "\e[0;32mrunning quicksort...\e[0m"
elif [ "$1" = "mergesort" ] || [ "$1" = "mergesort.c" ]; then
icc -mkl -o harness -O3 -ipo -xHOST -no-prec-div -fno-strict-aliasing -fno-omit-frame-pointer sort_harness.c mergesort.c
echo -e "\e[0;32mrunning mergesort...\e[0m"
elif [ "$1" = "carma" ] || [ "$1" = "carma.c" ]; then
icc -mkl -o harness -O3 -ipo -xHOST -no-prec-div -fno-strict-aliasing -fno-omit-frame-pointer carma_harness.c carma.c
echo -e "\e[0;32mrunning CARMA...\e[0m"
elif [ "$1" = "strassen" ] || [ "$1" = "strassen.c" ]; then
icc -mkl -o harness -O3 -ipo -xHOST -no-prec-div -fno-strict-aliasing -fno-omit-frame-pointer strassen_harness.c strassen.c
#icc -mkl -o harness -g -O0 -ipo -xHOST -no-prec-div -fno-strict-aliasing -fno-omit-frame-pointer strassen_harness.c strassen.c
echo -e "\e[0;32mrunning STRASSEN...\e[0m"
else
echo -e "\e[0;31mERROR: Algorithm not found\e[0m"
exit
fi
./harness
rm -rf harness
| true |
51df16d04b783bb99b61ba338c2193d49f90fdac | Shell | khanqamarali/smartclaim | /DTL-Network/buildartifacts/create-artifacts.sh | UTF-8 | 1,137 | 2.6875 | 3 | [] | no_license |
chmod -R 0755 ./crypto-config
# Delete existing artifacts
rm -rf ./crypto-config
rm genesis.block claimchannel.tx
rm -rf ../../channel-artifacts/*
#Generate Crypto artifactes for organizations
cryptogen generate --config=./crypto-config.yaml --output=./crypto-config/
# System channel
SYS_CHANNEL="sys-channel"
# channel name defaults to "mychannel"
CHANNEL_NAME="claimchannel"
echo $CHANNEL_NAME
# Generate System Genesis block
configtxgen -profile OrdererGenesis -configPath . -channelID $SYS_CHANNEL -outputBlock ./genesis.block
# Generate channel configuration block
configtxgen -profile BasicChannel -configPath . -outputCreateChannelTx ./claimchannel.tx -channelID $CHANNEL_NAME
echo "####### Generating anchor peer update for nationwideMSP ##########"
configtxgen -profile BasicChannel -configPath . -outputAnchorPeersUpdate ./nationwideMSPanchors.tx -channelID $CHANNEL_NAME -asOrg nationwideMSP
echo "####### Generating anchor peer update for calibberMSP ##########"
configtxgen -profile BasicChannel -configPath . -outputAnchorPeersUpdate ./calibberMSPanchors.tx -channelID $CHANNEL_NAME -asOrg calibberMSP | true |
fbc2d4ed7a05b8fe493997cdcb1c9802d3fd48ba | Shell | tabrizihamid84/easy-vpn-server-installer | /createUser.sh | UTF-8 | 233 | 3.296875 | 3 | [] | no_license |
# Chek if running as root
if (( "${UID}" != 0 )) ; then
echo "You are not root, Exiting ..."
exit 0
fi
# Getting domain name
read -p "Enter new user's username:" USERNAME
sudo ocpasswd -c /etc/ocserv/ocpasswd "${USERNAME}" | true |
fe1e6464c8d0d6116221920bff97af1e59653fa1 | Shell | oremj/puppet-aws | /modules/puppet/templates/init.d/puppetmaster | UTF-8 | 991 | 3.78125 | 4 | [] | no_license | #!/bin/sh
### BEGIN INIT INFO
# Provides: puppetmaster
# Required-Start: $local_fs $remote_fs
# Required-Stop: $local_fs $remote_fs
# Default-Start: 2 3 4 5
# Default-Stop: S 0 1 6
# Short-Description: puppetmaster
# Description: puppetmaster
### END INIT INFO
# Source function library.
. /etc/rc.d/init.d/functions
DAEMON=/usr/bin/thin
SCRIPT_NAME=/etc/init.d/puppetmaster
CONFIG=/etc/puppetmaster.yml
PIDDIR=<%= puppet_run_dir %>
# Exit if the package is not installed
[ -x "$DAEMON" ] || exit 0
if [ "X$2" = X ] || [ "X$3" = X ]; then
INSTANCES="-C $CONFIG"
fi
rh_status() {
for PIDFILE in $PIDDIR/puppetmasterd.*.pid
do
status -p ${PIDFILE} puppetmaster
done
}
case "$1" in
status)
rh_status
RETVAL=$?
;;
start)
$DAEMON start $INSTANCES
;;
stop)
$DAEMON stop $INSTANCES
;;
restart)
$DAEMON restart $INSTANCES
;;
*)
echo "Usage: $SCRIPT_NAME {status|start|stop|restart}" >&2
exit 3
;;
esac
:
| true |
831d29186ff46631a813ab6e174d6e6814621158 | Shell | VU-Thesis-2019-2020-Wesley-Shann/subjects | /update-nappa-library.sh | UTF-8 | 2,445 | 3.484375 | 3 | [] | no_license | #!/bin/zsh
# Nappa library paths
NAPPA_LIBRARY_BASE_PATH="/home/sshann/Documents/thesis/NAPPA/Prefetching-Library/"
NAPPA_LIBRARY_AAR_PATH="${NAPPA_LIBRARY_BASE_PATH}android_prefetching_lib/build/outputs/aar/"
AAR_ORIGINAL_PATH="${NAPPA_LIBRARY_AAR_PATH}android_prefetching_lib-debug.aar"
AAR_NEW_PATH="${NAPPA_LIBRARY_AAR_PATH}nappa-prefetching-library.aar"
# Subjects paths
SUBJECT_PROJECT_BASE_PATH="/home/sshann/Documents/thesis/subjects/"
AAR_SCRIPT_PATH="libs/aars/"
SUBJECTS_BASE_PATH=(
# Nappa Greedy
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-greedy/AntennaPod/app/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-greedy/AntennaPod/core/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-greedy/Hillffair/app/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-greedy/materialistic/app/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-greedy/NewsBlur/clients/android/NewsBlur/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-greedy/RedReader/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-greedy/Travel-Mate/Android/app/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-greedy/uob-timetable-android/uob/uob-timetable/"
# Nappa TFPR
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-tfpr/AntennaPod/app/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-tfpr/AntennaPod/core/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-tfpr/Hillffair/app/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-tfpr/materialistic/app/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-tfpr/NewsBlur/clients/android/NewsBlur/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-tfpr/RedReader/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-tfpr/Travel-Mate/Android/app/"
"${SUBJECT_PROJECT_BASE_PATH}instrumented-nappa-tfpr/uob-timetable-android/uob/uob-timetable/"
)
# Build the library
cd $NAPPA_LIBRARY_BASE_PATH || exit
echo "Building NAPPA Prefetching Library"
./gradlew build
echo "NAPPA Library finished building"
# Rename the library
echo "Renaming AAR file"
cp -rf "${AAR_ORIGINAL_PATH}" "${AAR_NEW_PATH}"
# Copy to subjects
count=0
echo "Copying AAR file to subjects"
for base_path in "${SUBJECTS_BASE_PATH[@]}"; do
if [ -d "${base_path}" ]; then
path_to_copy="${base_path}${AAR_SCRIPT_PATH}"
mkdir -p "${path_to_copy}"
cp -rf "${AAR_NEW_PATH}" "${path_to_copy}"
count=$((count + 1))
else
echo "Subject path not found ${base_path}"
fi
done
echo "Copied ${count} AAR files"
| true |
9a13bb22ad5873f76a0486ede35f5f6216ad2c09 | Shell | athityakumar/colorls | /test/run | UTF-8 | 2,121 | 3.953125 | 4 | [
"MIT"
] | permissive | #! /usr/bin/env bash
set -EuCo pipefail
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
set -e
fi
declare RED=$'\033[31m'
declare GREEN=$'\033[32m'
declare RESET=$'\033[39m'
declare -i ERRORS=0 TESTS=0
function colorls() {
command bundle exec colorls "$@"
}
# check that the given command returns exit code 0
#
# SYNOPSIS: OK COMMAND [ARGS]
#
function OK() {
local ret
((++TESTS))
if "$@"; then
echo "$GREEN" "OK $RESET - $*" >&2
else
ret=$?
((++ERRORS))
echo "$RED" "FAIL$RESET - $* (exit code: $ret, ${BASH_SOURCE[1]}:${BASH_LINENO[0]})" >&2
fi
}
# check that the given command returns a non-zero exit code
#
# SYNOPSIS: XFAIL [exit-code] COMMAND [ARGS]
#
function XFAIL() {
local expected ret
if [[ "$1" == [1-9] ]]; then
# expect a specific non-zero exit code
expected="$1"
shift
fi
((++TESTS))
if "$@"; then
((++ERRORS))
echo "$RED" "FAIL$RESET - $* (unexpected success, ${BASH_SOURCE[1]}:${BASH_LINENO[0]})" >&2
else
ret=$?
if [[ -n "$expected" && $expected -ne $ret ]]; then
((++ERRORS))
echo "$RED" "FAIL$RESET - $* (expected: $expected got $ret, ${BASH_SOURCE[1]}:${BASH_LINENO[0]})" >&2
else
echo "$GREEN" "OK $RESET - $*" >&2
fi
fi
}
# check that the given command returns with exit code 0 and its stdout contains a text
#
# SYNOPSIS: OUT TEXT COMMAND [ARGS]
#
function OUT() {
local STR="$1" ret
shift
((++TESTS))
if "$@" | grep -F "$STR"; then
echo "$GREEN" "OK $RESET - $*" >&2
else
ret=${PIPESTATUS[0]}
((++ERRORS))
if [[ $ret -ne 0 ]]; then
echo "$RED" "FAIL$RESET - $* (exit code: $ret, ${BASH_SOURCE[1]}:${BASH_LINENO[0]})" >&2
else
echo "$RED" "FAIL$RESET - $* ('$STR' not found in output, ${BASH_SOURCE[1]}:${BASH_LINENO[0]})" >&2
fi
fi
}
function summary() {
if [[ $ERRORS -gt 0 ]]; then
printf '\n\n %d of %d tests failed.\n\n' "$ERRORS" "$TESTS" >&2
exit 1
else
printf '\n\n %d tests passed.\n\n' "$TESTS" >&2
fi
}
trap summary EXIT
# shellcheck source=test/checks
source "$(dirname "${BASH_SOURCE[0]}")/checks"
| true |
044899bffd39f926fbc6f50ab97b316c75de5404 | Shell | 6871/scripts | /bash/time_utils.sh | UTF-8 | 2,341 | 4.46875 | 4 | [] | no_license | #!/usr/bin/env bash
# Time related helper functions.
##############################################################################
# Print the time and date (in UTC and local formats) for any given epoch
# second arguments to stdout; if no arguments are given the current epoch time
# is used.
#
# Ubuntu Docker container non UTC support may require:
# apt-get update
# export DEBIAN_FRONTEND=noninteractive
# apt-get install --yes tzdata
# export TZ=America/Chicago
#
# Arguments:
# zero or more epoch second values to format
# Returns:
# 0 on success, non zero on failure
function epoch_seconds() {
local inputs
if [[ $# -eq 0 ]]; then
# Default to current epoch seconds if no inputs
inputs=("$(date +%s)")
else
inputs=("$@")
fi
local epoch_time
for epoch_time in "${inputs[@]}"; do
if [[ $(uname) == 'Darwin' ]]; then
printf '%16s : %s : %s\n' \
"${epoch_time}" \
"$(date -u -r "${epoch_time}")" \
"$(date -r "${epoch_time}")"
else
printf '%16s : %s : %s\n' \
"${epoch_time}" \
"$(date -u -d @"${epoch_time}")" \
"$(date -d @"${epoch_time}")"
fi
done
}
# macOS : date -u -j -f '%Y-%m-%d %H:%M:%S' '2021-06-01 16:35:16'
# macOS : date -u -j -f '%Y-%m-%d %H:%M:%S' '2021-06-01 16:35:16' +'%s'
# : 1622565316
# Ubuntu : TZ=UTC date -d '2021-06-01 16:35:16' +'%s'
# : 1622565316
function epoch_seconds_from_date_string() {
if [ $# -ne 1 ]; then
printf 'Usage : date_string\n'
printf 'macOS example, UTC only : %s\n' "'2021-06-01 16:35:16'"
printf 'Linux examples : %s\n' "'2021-06-01 16:35:16'"
printf ' : %s\n' "'2021-06-01 16:35:16 BST'"
printf ' : %s\n' "'2021-06-01 16:35:16 PST'"
return 1
fi
local date_string="${1}"
if [[ $(uname) == 'Darwin' ]]; then
local format_string='%Y-%m-%d %H:%M:%S'
printf '%s : %s : %s\n' \
"${date_string}" \
"$(date -u -j -f "${format_string}" "${date_string}")" \
"$(date -u -j -f "${format_string}" "${date_string}" +'%s')"
else
local time_zone='UTC'
printf '%s : %s : %s\n' \
"${date_string}" \
"$(TZ="${time_zone}" date -d "${date_string}")" \
"$(TZ="${time_zone}" date -d "${date_string}" +'%s')"
fi
}
| true |
d92525ca5933f57b10f859a09653ce3dd9cfb46d | Shell | mohlendo/dotfiles | /.bashrc | UTF-8 | 2,564 | 2.890625 | 3 | [] | no_license | #homebrew settings
PATH="/usr/local/bin:/usr/local/sbin:$PATH:~/bin"
#git bash completion
if [ -f `brew --prefix`/etc/bash_completion ]; then
. `brew --prefix`/etc/bash_completion
fi
#git stuff
GIT_PS1_SHOWDIRTYSTATE=true
GIT_PS1_SHOWSTASHSTATE=true
GIT_PS1_SHOWUNTRACKEDFILES=true
#git ps1 extension
PS1='\[\033[32m\]\u@\h\[\033[00m\]:\[\033[34m\]\w\[\033[31m\]$(__git_ps1)\[\033[00m\]\$ '
# some more ls aliases
alias ls="ls -G" #color ls
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
#for dotfiles management
alias dotfiles='git --git-dir=.dotfiles.git --work-tree=.'
#Node.js ndm
NODE_PATH="/usr/local/lib/node:/usr/local/lib/node_modules"
PATH="$PATH:/usr/local/share/npm/bin"
#java
function setjdk() {
if [ $# -ne 0 ]; then
removeFromPath '/System/Library/Frameworks/JavaVM.framework/Home/bin'
if [ -n "${JAVA_HOME+x}" ]; then
removeFromPath $JAVA_HOME
fi
export JAVA_HOME=`/usr/libexec/java_home -v $@`
export PATH=$JAVA_HOME/bin:$PATH
fi
}
function removeFromPath() {
export PATH=$(echo $PATH | sed -E -e "s;:$1;;" -e "s;$1:?;;")
}
setjdk 1.8
#android
export ANDROID_HOME=/Library/Android
# android tools to path
PATH="$PATH:$ANDROID_HOME/platform-tools:$ANDROID_HOME/tools"
###-begin-pm2-completion-###
### credits to npm for the completion file model
#
# Installation: pm2 completion >> ~/.bashrc (or ~/.zshrc)
#
COMP_WORDBREAKS=${COMP_WORDBREAKS/=/}
COMP_WORDBREAKS=${COMP_WORDBREAKS/@/}
export COMP_WORDBREAKS
if type complete &>/dev/null; then
_pm2_completion () {
local si="$IFS"
IFS=$'\n' COMPREPLY=($(COMP_CWORD="$COMP_CWORD" \
COMP_LINE="$COMP_LINE" \
COMP_POINT="$COMP_POINT" \
pm2 completion -- "${COMP_WORDS[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
complete -o default -F _pm2_completion pm2
elif type compctl &>/dev/null; then
_pm2_completion () {
local cword line point words si
read -Ac words
read -cn cword
let cword-=1
read -l line
read -ln point
si="$IFS"
IFS=$'\n' reply=($(COMP_CWORD="$cword" \
COMP_LINE="$line" \
COMP_POINT="$point" \
pm2 completion -- "${words[@]}" \
2>/dev/null)) || return $?
IFS="$si"
}
compctl -K _pm2_completion + -f + pm2
fi
###-end-pm2-completion-###
source /usr/local/etc/bash_completion.d/password-store
### vs code
code () { VSCODE_CWD="$PWD" open -n -b "com.microsoft.VSCode" --args $* ;}
| true |
651cd3131e477322a3ae6a310870b57bb9966e99 | Shell | rthangaraj/azure-3tier-deployment | /metadata-retrieval/azure-metadaretrieval.sh | UTF-8 | 552 | 3.046875 | 3 | [] | no_license | #!/bin/bash
## Install JQ in the host machine
sudo yum install -y epel-release
sudo yum update -y
sudo yum install -y jq
## Retrieve the metadata of the instance IMDS is a REST API that's available at a well-known, non-routable IP address (169.254.169.254). You can only access it from within the VM. Communication between the VM and IMDS never leaves the host
curl -H Metadata:True --noproxy "*" "http://169.254.169.254/metadata/instance?api-version=2019-03-11&format=json" | jq . >metadata.json
echo "Metadata of the instance has been retrieved"
| true |
7b3b0df5186e18590d4ec68f7f59a8dc79aac0f9 | Shell | AndreHermanto/web-questionnaire-builder | /scripts/build-for-demo.sh | UTF-8 | 2,234 | 3.078125 | 3 | [
"MIT"
] | permissive | #!/bin/bash
set -ex
branch_name="$bamboo_planRepository_branch"
app_name="$bamboo_planRepository_name"
clean_app_name=${app_name:4}
PART_OF_BRANCH_NAME=`echo "${bamboo_repository_git_branch}" | cut -d'/' -f2 | cut -d'-' -f1,2`
# Build for sandbox mock server
rm -rf build
npm run build:demo 2> >(tee build.txt >&2)
mkdir -p $branch_name
cp -R build/* $branch_name
# SSH to sandbox
ssh 35.189.35.255 /bin/bash <<EOF
mkdir -p "/var/nfs/demo/$app_name/$branch_name"
exit
EOF
# Copy the files over
scp -r $branch_name/. 35.189.35.255:/var/nfs/demo/$app_name/$branch_name/
# Build for sandbox gateway
rm -rf build
REACT_APP_ENABLE_LOGS=true npm run build:gateway 2> >(tee build.txt >&2)
mkdir -p $branch_name
cp -R build/* $branch_name
# SSH to sandbox
echo "Creating folder on Sandbox - /var/nfs/demo/uat/$app_name/$branch_name"
ssh 35.189.35.255 /bin/bash <<EOF
mkdir -p "/var/nfs/demo/uat/$app_name/$branch_name"
exit
EOF
# Copy the files over
scp -r $branch_name/. 35.189.35.255:/var/nfs/demo/uat/$app_name/$branch_name/
# Build the docker image for sandbox
printf "FROM gcr.io/genomeone-sandbox/infra-nginx:latest\nADD build /usr/share/nginx/html" > Dockerfile
echo "Creating Docker Image for Web Application" $bamboo_planRepository_name "with Tag:latest-"$PART_OF_BRANCH_NAME
/home/bamboo/google-cloud-sdk/bin/gcloud config set project genomeone-sandbox
/home/bamboo/google-cloud-sdk/bin/gcloud container clusters get-credentials genomeone-sandbox
docker build --force-rm=true --tag=gcr.io/genomeone-sandbox/$bamboo_planRepository_name:latest-$PART_OF_BRANCH_NAME .
/home/bamboo/google-cloud-sdk/bin/gcloud -q container images untag --quiet gcr.io/genomeone-sandbox/$bamboo_planRepository_name:latest-$PART_OF_BRANCH_NAME || true
tagdigest=$(/home/bamboo/google-cloud-sdk/bin/gcloud container images list-tags gcr.io/genomeone-sandbox/$bamboo_planRepository_name --filter='-tags:*' --format='get(digest)' --limit=1)
/home/bamboo/google-cloud-sdk/bin/gcloud container images delete --quiet gcr.io/genomeone-sandbox/$bamboo_planRepository_name@"$tagdigest" || true
/home/bamboo/google-cloud-sdk/bin/gcloud docker -- push gcr.io/genomeone-sandbox/$bamboo_planRepository_name:latest-$PART_OF_BRANCH_NAME
rm Dockerfile | true |
3253a3a74c43cd42240226b582b9d254b90973e2 | Shell | CESNET/glite-testsuites | /rOCCI/tests/rOCCI-common-testbeds.sh | UTF-8 | 3,556 | 3.359375 | 3 | [] | no_license | #!/bin/bash
#The work represented by this source file is partially or entirely funded
#by the EGI-InSPIRE project through the European Commission's 7th Framework
#Programme (contract # INFSO-RI-261323)
#
#Copyright (c) 2014 CESNET
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
#http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
function gen_arrange_script_rOCCI()
{
remotehost=$1
COPYPROXY=$2
INSTALLPKGS='ca-certificates curl git globus-proxy-utils lsof wget'
if egrep -iq "Debian|Ubuntu" /etc/issue; then
INSTALLCMD="apt-get install -q --yes --force-yes"
INSTALLPKGS="${INSTALLPKGS} lintian voms-clients"
else
if egrep -iq '^Fedora' /etc/redhat-release; then
VOMSPKG='voms-clients-cpp'
else
VOMSPKG='voms-clients'
fi
INSTALLCMD="yum install -q -y --nogpgcheck"
INSTALLPKGS="${INSTALLPKGS} rpmlint ${VOMSPKG}"
fi
cat << EndArrangeScript > arrange_rOCCI_test_root.sh
CERTFILE=\$1
GLITE_USER=\$2
GSTSTCOLS=\$3
OUTPUT_OPT=\$4
GITROOT=git://github.com/CESNET/glite-testsuites.git
echo "Certificate file: \$CERTFILE "
echo "gLite user: \$GLITE_USER "
echo "Terminal width: \$GSTSTCOLS "
echo "Output format: \$OUTPUT_OPT "
export GSTSTCOLS CVSROOT
${INSTALLCMD} ${INSTALLPKGS}
cd /tmp
if [ $COPYPROXY -eq 1 ]; then
mv \$CERTFILE x509up_u\`id -u\`
chown \`id -un\`:\`id -gn\` x509up_u\`id -u\`
else
rm -rf /tmp/test-certs/grid-security
[ -r glite-testsuites/LB/tests/lb-generate-fake-proxy.sh ] || wget -q -P glite-testsuites/LB/tests/ https://raw.github.com/CESNET/glite-testsuites/master/LB/tests/lb-generate-fake-proxy.sh
chmod +x glite-testsuites/LB/tests/lb-generate-fake-proxy.sh
glite-testsuites/LB/tests/lb-generate-fake-proxy.sh --all > fake-prox.out.\$\$
FAKE_CAS=\`cat fake-prox.out.\$\$ | grep -E "^X509_CERT_DIR" | sed 's/X509_CERT_DIR=//'\`
if [ "\$FAKE_CAS" = "" ]; then
echo "Failed generating proxy" >&2
exit 2
else
cp -rv \$FAKE_CAS/* /etc/grid-security/certificates/
fi
TRUSTED_CERTS=\`cat fake-prox.out.\$\$ | grep -E "^TRUSTED_CERTS" | sed 's/TRUSTED_CERTS=//'\`
export X509_USER_CERT=\${TRUSTED_CERTS}/trusted_client00.cert
export X509_USER_KEY=\${TRUSTED_CERTS}/trusted_client00.priv-clear
rm fake-prox.out.\$\$
fi
cd ~/
git clone --depth 1 \$GITROOT rOCCI_testing
cd rOCCI_testing/rOCCI/tests
echo ========================
echo " REAL TESTS START HERE"
echo ========================
echo "</PRE>"
# dummy
./rOCCI-test-helper-switch-backend.sh dummy \$OUTPUT_OPT
./rOCCI-test-deployment.sh \$OUTPUT_OPT
# opennebula
./rOCCI-test-helper-switch-backend.sh opennebula-4.12 1200 \$OUTPUT_OPT && ./rOCCI-test-deployment.sh \$OUTPUT_OPT
./rOCCI-test-helper-switch-backend.sh opennebula-4.10 120 \$OUTPUT_OPT && ./rOCCI-test-deployment.sh \$OUTPUT_OPT
./rOCCI-test-helper-switch-backend.sh opennebula-4.8 120 \$OUTPUT_OPT && ./rOCCI-test-deployment.sh \$OUTPUT_OPT
# amazon
./rOCCI-test-helper-switch-backend.sh amazon 120 \$OUTPUT_OPT && ./rOCCI-test-deployment.sh \$OUTPUT_OPT
echo "<PRE>"
echo ==================
echo " TESTS END HERE"
echo ==================
echo "</PRE>"
EndArrangeScript
}
| true |
2523272ef8d01a216ff255f034c200e8299d99dd | Shell | tnakaicode/jburkardt | /r8col/r8col.sh | UTF-8 | 237 | 2.796875 | 3 | [] | no_license | #! /bin/bash
#
cp r8col.hpp /$HOME/include
#
g++ -c -I /$HOME/include r8col.cpp
if [ $? -ne 0 ]; then
echo "Errors compiling r8col.cpp"
exit
fi
#
mv r8col.o ~/libcpp/$ARCH/r8col.o
#
echo "Library installed as ~/libcpp/$ARCH/r8col.o"
| true |
06c6b68f7041e12292dab3b967790cde2e236473 | Shell | frzb/coinboot-plugins | /src/telegraf/upstream/scripts/mac_installgo.sh | UTF-8 | 901 | 4.09375 | 4 | [
"MIT"
] | permissive | #!/bin/sh
version="1.16.2"
# This path is cachable, while saving directly in /usr/local/ will cause issues restoring the cache
path="/usr/local/Cellar"
# Download Go directly from tar, the reason we aren't using brew: it is slow to update and we can't pull specific minor versions
setup_go () {
echo "installing go"
curl -OL https://golang.org/dl/go${version}.darwin-amd64.tar.gz --output go${version}.darwin-amd64.tar.gz
sudo rm -rf ${path}/go
sudo tar -C $path -xzf go${version}.darwin-amd64.tar.gz
ln -sf ${path}/go/bin/go /usr/local/bin/go
ln -sf ${path}/go/bin/gofmt /usr/local/bin/gofmt
}
if command -v go &> /dev/null; then
echo "Go is already installed"
v=`go version | { read _ _ v _; echo ${v#go}; }`
echo "$v is installed, required version is $version"
if [ "$v" != $version ]; then
setup_go
go version
fi
else
setup_go
fi
| true |
7470ae232324cdbc3cf8885279b277d42f79582f | Shell | darrenldl/blockyarchive | /tests/compare_decode_file_and_stdout_nometa.sh | UTF-8 | 1,664 | 3.5625 | 4 | [
"MIT"
] | permissive | #!/bin/bash
exit_code=0
VERSIONS=(1 2 3 17 18 19)
source functions.sh
file_size=$(ls -l dummy | awk '{ print $5 }')
corrupt_count=10
for ver in ${VERSIONS[*]}; do
echo -n "Encoding in version $ver"
output=$(./../blkar encode --json --sbx-version $ver -f --no-meta dummy dummy$ver.sbx \
--rs-data 10 --rs-parity 2)
if [[ $(echo $output | jq -r ".error") != null ]]; then
echo " ==> Invalid JSON"
exit_code=1
fi
if [[ $(echo $output | jq -r ".stats.sbxVersion") == "$ver" ]]; then
echo " ==> Okay"
else
echo " ==> NOT okay"
exit_code=1
fi
container_name=dummy$ver.sbx
echo "Decoding version $ver container"
output=$(./../blkar decode --json --verbose dummy$ver.sbx dummy$ver -f)
# if [[ $(echo $output | jq -r ".error") != null ]]; then
# echo " ==> Invalid JSON"
# exit_code=1
# fi
# if [[ $(echo $output | jq -r ".stats.sbxVersion") == "$ver" ]]; then
# echo " ==> Okay"
# else
# echo " ==> NOT okay"
# exit_code=1
# fi
echo "Decoding version $ver container (stdout output)"
output=$(./../blkar decode --json --verbose dummy$ver.sbx - 2>&1 > dummy"$ver"_stdout)
# if [[ $(echo $output | jq -r ".error") != null ]]; then
# echo " ==> Invalid JSON"
# exit_code=1
# fi
# if [[ $(echo $output | jq -r ".stats.sbxVersion") == "$ver" ]]; then
# echo " ==> Okay"
# else
# echo " ==> NOT okay"
# exit_code=1
# fi
echo -n "Comparing decode output file and stdout output"
cmp dummy$ver dummy"$ver"_stdout
if [[ $? == 0 ]]; then
echo " ==> Okay"
else
echo " ==> NOT okay"
exit_code=1
fi
done
echo $exit_code > exit_code
| true |
3a8776b0c7c66e69358b1bd46e6a378f0f33d06c | Shell | justinsong01/TrendTracker | /script/s3-local-sync.sh | UTF-8 | 2,152 | 4.21875 | 4 | [
"MIT"
] | permissive | #!/bin/bash
#Program to copy or sync the file from local machine to s3.
# Useage: copyFile inputPath s3storagePath
# There are 2 option avaliable. First copy files are local machine to
# Amazon s3 storage. Second, it act as a sync from local machine to s3.
PROGNAME=$(basename $0)
CURRENT_PATH=`pwd`
TIMESTAMP=`date "+%Y%m%d%H%M"`
LOG_FILE=$CURRENT_PATH/run_s3.log.$TIMESTAMP
START=`date "+%Y-%m-%d %H:%M"`
echo $START >> $LOG_FILE
function usage {
cat << EOF
usage: $PROGNAME options
This script run the test1 or test2 over a machine.
OPTIONS:
-h Show this message
-o Required.Provide option value 1 For copy files from local system
2 For sync folder from local to s3
3 Confirm the files that will be deleted from s3
4 Delete the file from s3
-f Required.Specify the local system file name
-s Required.Specify s3 path.
-a Specify s3 access key
-k Specify s3 secreat key
EOF
}
function error_exit {
# Display error message and exit
echo "${PROGNAME}: ${1:-"Unknown Error"}" >> $LOG_FILE 2>&1
clean_up 1
}
function perform {
if [ ! -n "$optionval" ]||[ ! -n "$localfile" ]||[ ! -n "$s3file" ]; then
echo "Please check value of -o -f -s"
exit 0
fi
case "$optionval" in
1 )
s3cmd put -r --acl-public "$localfile" "$s3file" >> $LOG_FILE 2>&1;;
2 )
s3cmd sync --acl-public "$localfile" "$s3file" >> $LOG_FILE 2>&1;;
3 )
s3cmd sync --dry-run --delete-removed "$localfile" "$s3file";;
4 )
s3cmd sync --delete-removed "$arg2" "$arg3" >> $LOG_FILE 2>&1;;
* ) echo "You did not enter a number"
echo "between 1 and 4."
esac
}
while getopts “h:a:k:o:f:s:” OPTION
do
case $OPTION in
h)
usage
exit 1
;;
o)
optionval=$OPTARG
;;
f)
localfile=$OPTARG
;;
s)
s3file=$OPTARG
;;
?)
usage
exit 1
;;
esac
done
perform
| true |
4de134559a50bab9b6a246dfc3a7cb68426f9d59 | Shell | sapan2211/openstack-helm | /rabbitmq/templates/bin/_rabbitmq-start.sh.tpl | UTF-8 | 1,190 | 2.765625 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
{{/*
Copyright 2017 The Openstack-Helm Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/}}
set -eux
set -o pipefail
cp /etc/rabbitmq/erlang.cookie /var/lib/rabbitmq/.erlang.cookie
chmod 600 /var/lib/rabbitmq/.erlang.cookie
# This should be called after rabbitmq-server is started but in current design we don't have
# any other way of doing this. PreStart could not be used here as it's:
# - executed just after container creation (not after entrypoint)
# - Currently, there are (hopefully rare) scenarios where PostStart hooks may not be delivered.
# Startup marker is used by liveness and readiness probes.
date +%s > /tmp/rabbit-startup-marker
exec /usr/lib/rabbitmq/bin/rabbitmq-server
| true |
b5c7f7a494b4d0d01fa3c7398854d46fd1ec5eb4 | Shell | DrenfongWong/kvm-nested | /preinstall.sh | UTF-8 | 436 | 2.703125 | 3 | [] | no_license | #! /bin/bash
chmod -R 777 /nested
cd /nested
cp beaker-RHEL-AppStream.repo /etc/yum.repos.d/
yum install net-tools nmap wget -y&&yum module install virt -y
cp qemu* /etc/
cp ifcfg* /etc/sysconfig/network-scripts/
cd /etc/sysconfig/network-scripts/
nic=$(ip addr|grep -o "2: \<e.*\: "|awk -F: '{print $2}'|sed "s/ //g")
sed -i "s/enp4s0f0/$nic/g" ifcfg-enp4s0f0
rm -rf ifcfg-$nic
cp ifcfg-enp4s0f0 ifcfg-$nic
systemctl restart network
| true |
0e93d786e41f2f392989ffba1ee2a319d87c92e4 | Shell | brucesuny/EVIDEO | /Installation/3rd_party/ffmpeg_libs/mac-compile | UTF-8 | 776 | 2.53125 | 3 | [] | no_license | #!/bin/bash
echo "------------------------------------------------------------------------"
echo `cd "${0%/*}" 2>/dev/null; echo $PWD/${0##*/}`
echo "------------------------------------------------------------------------"
set -x
source "${HOME3P}/utilx"
[ $? -ne 0 ] && exit 1
#
compile mp4v2 mac || exit 1
compile sdl mac || exit 1
compile ogg mac || exit 1
compile vorbis mac || exit 1
compile lame mac || exit 1
#
# x264 depends on yasm and pthread
compile x264 mac || exit 1
# xvid depends on yasm (latest version) and pthread
compile xvid mac || exit 1
# faac and faad depends on mp4v2 (not really?!)
compile faac mac || exit 1
compile faad mac || exit 1
# theora depends on sdl, ogg, vorbis and png12 (base_libs)
compile theora mac || exit 1
#
exit 0
| true |
2bb8eecdbb14281130d78b6e3cc7289c681badac | Shell | JRasmusBm/dotfiles | /install_dependencies/ruby | UTF-8 | 361 | 3.0625 | 3 | [] | no_license | #!/bin/sh
set -e
install_ruby() {
sudo apt update
ruby_version=2.7.1
wget https://cache.ruby-lang.org/pub/ruby/2.7/ruby-$ruby_version.tar.gz
tar zxvf ruby-$ruby_version.tar.gz
cd ruby-$ruby_version
./configure
make
sudo make install
cd ..
rm -rf ruby-$ruby_version
rm -rf ruby-$ruby_version.tar.gz
install_ruby_extras
}
install_ruby
| true |
921e22f0bab40a23d87061842e673dc92f105e9a | Shell | dobkeratops/unfold | /scripts/gedit/find_def | UTF-8 | 557 | 3.3125 | 3 | [] | no_license | #!/bin/sh
#search for selected text or symbol under cursor (needs patch for functions.py)
search=$GEDIT_SELECTED_TEXT
if [ -z $search ]
then
search=$GEDIT_CURRENT_SYMBOL
fi
P=$GEDIT_CURRENT_DOCUMENT_DIR
if [ -z $search ]
then
echo "no symbol to search for"
exit
fi
echo "grep defs of: $search (fn|trait|struct|type|class|typedef|auto)"
// Find
grep -rn --include "*.rs" --include "*.h" --include "*.cpp" --include "*.c" "\(class\|fn\|type\|trait\|struct\|enum\|auto\)\\s*$search[^a-zA-Z0-9_]" $GEDIT_CURRENT_DOCUMENT_DIR $RUST_SRC |unfold -Scu
| true |
b544070adea1575d2d3d0dbe344985c4bf283830 | Shell | abveritas/apps-testing | /cppunit/PKGBUILD | UTF-8 | 680 | 2.578125 | 3 | [] | no_license | #
# Chakra Packages for Chakra, part of chakra-project.org
#
# contributor (x86_64): Giuseppe Calà <jiveaxe@gmail.com>
pkgname=cppunit
pkgver=1.12.1
pkgrel=2
pkgdesc="A C++ unit testing framework"
arch=('i686' 'x86_64')
url="http://cppunit.sourceforge.net"
license=('LGPL')
depends=('sh' 'gcc-libs')
makedepends=('gcc')
categories=('programming')
options=('!libtool')
source=("http://downloads.sourceforge.net/$pkgname/$pkgname-$pkgver.tar.gz")
md5sums=('bd30e9cf5523cdfc019b94f5e1d7fd19')
build() {
cd "${srcdir}/${pkgname}-${pkgver}"
export LDFLAGS+=" -ldl"
./configure --prefix=/usr
make
}
package() {
cd "${srcdir}/${pkgname}-${pkgver}"
make DESTDIR="${pkgdir}" install
}
| true |
45ce25eb5c8741138008e161d531309bf5a0d9a8 | Shell | kumomi/TechLauncher-Beacon | /sydney-filter-graph.sh | UTF-8 | 630 | 3.03125 | 3 | [] | no_license | echo "Installation Gephi-Filter"
git clone https://github.com/ukiyo-e/SydneyPipeline.git
echo "Cd target forlder"
cd SydneyPipeline/src/main/resources
echo "Download Sydney Graphml file"
aws s3 --region us-west-2 cp "s3://graphml.rd-switchboard/sydney/sydney.zip" .
#sudo curl "https://s3-us-west-2.amazonaws.com/graphml.rd-switchboard/sydney/sydney.zip"
sudo unzip sydney.zip
rm sydney.zip
echo "Go Parent Java"
cd ../..
cd ..
pwd
mvn install
mvn exec:java -Dexec.mainClass="Main"
echo "Copy files to S3..."
NOW="$(date +'%Y-%m-%d')"
aws s3 --region us-west-2 output.gexf s3://graphml.rd-switchboard/sydney/filtergraph/$NOW/
| true |
34f430fd087bd928942497daea19a0b7dff06d39 | Shell | sorinello/xwiki-libreoffice-backend-importer | /xwiki-libreoffice-backend-importer.sh | UTF-8 | 6,854 | 3.453125 | 3 | [] | no_license | #!/bin/bash
###############################################################################
### XWiki Open Office/Libre Office Importer ###
### ###
### This script allow the import of Office documents using an Open Office ###
### or Libre Office Server. It uses POST method and supports batch import. ###
### ###
###############################################################################
# ---------------------------------------------------------------------------
# See the NOTICE file distributed with this work for additional
# information regarding copyright ownership.
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this software; if not, write to the Free
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA, or see the FSF site: http://www.fsf.org.
# ---------------------------------------------------------------------------
## Configuration ##
# Text color variables
txtund=$(tput sgr 0 1) # Underline
txtbld=$(tput bold) # Bold
bldred=${txtbld}$(tput setaf 1) # red
bldgrn=${txtbld}$(tput setaf 2) # green
bldblu=${txtbld}$(tput setaf 4) # blue
bldwht=${txtbld}$(tput setaf 7) # white
txtrst=$(tput sgr0) # Reset
info=${bldwht}*${txtrst} # Feedback
pass=${bldblu}*${txtrst}
warn=${bldred}*${txtrst}
ques=${bldblu}?${txtrst}
## Default values (Not defined as parameters) ##
XWIKI_URL='http://localhost:8080/xwiki';
XWIKI_FORM_LOGIN_URL=$XWIKI_URL"/bin/loginsubmit/XWiki/XWikiLogin";
XWIKI_OFFICE_IMPORTER_URL=$XWIKI_URL"/bin/view/XWiki/OfficeImporterResults";
XWIKI_TARGET_SPACE='Tests';
XWIKI_USERNAME='Admin';
XWIKI_PASSWORD='admin';
COOKIE_FILE='cookies.txt';
LOG_FILE_NAME='log.txt';
WORKING_DIR=$PWD;
## Parameters
# Program name for usage
PRGNAME=`basename $0`
#################
### Display help
usage() {
echo "Usage: $PRGNAME [OPTIONS]"
echo ""
echo " This script can do the following steps easily to help you testing XWiki Open Office/Libre Office importer:"
echo " - Attach using POST a single file using a parameter."
echo " - Attach multiple/batch files using POST with several filtering options."
echo ""
echo "Options:"
echo " -t Specify the target hostname. If none specified, localhost:8080/xwiki is used. Don't forget to add the port and the path to XWiki"
echo " -s Import a single file"
echo " -b Import desired files from the working dir. Possible values are: word, excel, powerpoint. Default is word. Only when -b is used"
echo " -h Prints this message"
exit 1
}
# Login into XWiki
function LOGIN_TO_XWIKI {
curl --user-agent 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:16.0) Gecko/20100101 Firefox/16.0' --connect-timeout '36000' --max-time '36000' --cookie-jar $COOKIE_FILE \
--data "j_username=$XWIKI_USERNAME&j_password=$XWIKI_PASSWORD" $XWIKI_FORM_LOGIN_URL;
echo "$info Cookie Created/LoggedIn on" $XWIKI_URL;
}
# Destroy the cookie. Further operations will require a new login
function LOGOUT_FROM_XWIKI {
if [ -e $COOKIE_FILE ];then
rm $COOKIE_FILE;
fi
echo "$info Cookie Destroyed/Logged Out";
}
function UPLOAD_FILE {
PRINT_CONTEXT $1;
CURRENT_FILE="$1";
TARGET_PAGE_NAME="${CURRENT_FILE%.*}";
curl -s --connect-timeout '36000' --max-time '36000' --cookie $COOKIE_FILE --request POST -F "filePath=@$CURRENT_FILE" -F "targetSpace=$XWIKI_TARGET_SPACE" -F "targetPage=$TARGET_PAGE_NAME" -o $LOG_FILE_NAME $XWIKI_OFFICE_IMPORTER_URL;
#RESULTS_MESSAGE=$(grep 'class=\"box' $LOG_FILE_NAME | sed -e :a -e 's/<[^>]*>//g;/</N;//ba');
RESULTS_MESSAGE=$(cat $LOG_FILE_NAME);
#if [[grep -e "succeeded" "$RESULTS_MESSAGE"]]
if [[ `echo $RESULTS_MESSAGE | grep 'succeeded.'` ]]
then
RESULTS_MESSAGE=$(grep 'class=\"box' $LOG_FILE_NAME | sed -e :a -e 's/<[^>]*>//g;/</N;//ba');
echo "$info$bldgrn Processing $CURRENT_FILE file -> $RESULTS_MESSAGE ${txtrst}";
else echo "$info$bldred Processing $CURRENT_FILE file -> $RESULTS_MESSAGE ${txtrst}";
fi
rm -f $LOG_FILE_NAME;
}
# Used for Debugging the script
function PRINT_CONTEXT {
echo "Importing file" $1;
echo "XWIKI_URL" $XWIKI_URL;
echo "XWIKI_FORM_LOGIN_URL" $XWIKI_FORM_LOGIN_URL;
echo "XWIKI_OFFICE_IMPORTER_URL" $XWIKI_OFFICE_IMPORTER_URL;
echo "XWIKI_TARGET_SPACE" $XWIKI_TARGET_SPACE;
echo "XWIKI_USERNAME" $XWIKI_USERNAME;
echo "XWIKI_PASSWORD" $XWIKI_PASSWORD;
echo "COOKIE_FILE" $COOKIE_FILE;
echo "LOG_FILE_NAME" $LOG_FILE_NAME;
}
# Parse command line arguments
while getopts "t:s:b:h" OPT; do
case $OPT in
t) #Specify the target host
XWIKI_URL=$OPTARG;
XWIKI_FORM_LOGIN_URL=$XWIKI_URL"/bin/loginsubmit/XWiki/XWikiLogin";
XWIKI_OFFICE_IMPORTER_URL=$XWIKI_URL"/bin/view/XWiki/OfficeImporterResults";
;;
s) # Single file name
LOGIN_TO_XWIKI
SINGLE_FILE_NAME=$OPTARG;
#echo $SINGLE_FILE_NAME;
UPLOAD_FILE $SINGLE_FILE_NAME;
#LOGOUT_FROM_XWIKI;
;;
b) # Batch import
LOGIN_TO_XWIKI
FILTER=$OPTARG;
echo "$info Using Filter: " $FILTER;
if [[ $FILTER == "word" ]]; then
FILE_TYPES=`ls | grep .doc`;
fi
if [[ $FILTER == "excel" ]]; then
FILE_TYPES=`ls | grep .xls`;
fi
if [[ $FILTER == "powerpoint" ]]; then
FILE_TYPES=`ls | grep .ppt`;
fi
echo "$info Batch Import starting in directory" $WORKING_DIR
shopt -s nullglob;
for f in $FILE_TYPES;
do
#echo $f;
UPLOAD_FILE "$f";
done
echo "$info Batch Import ended"
#LOGOUT_FROM_XWIKI
;;
h) # Print Usage Information
usage
exit 0
;;
esac
done
shift $((OPTIND-1))
exit;
| true |
cb195091728355e36deffc784d946f1331e6fd44 | Shell | ZeroKnight/dotfiles | /.config/zsh/modules/z/z.zsh | UTF-8 | 788 | 2.921875 | 3 | [] | no_license | #
# A z implementation in Lua
#
export ZLUA_SCRIPT="$HOME/.local/opt/zlua/z.lua"
export ZLUA_LUAEXE="${commands[luajit]:-$commands[lua]}"
export _ZL_DATA="${XDG_DATA_HOME:-"$HOME/.local/share/zlua"}"
export _ZL_NO_PROMPT_COMMAND=1
export _ZL_EXCLUDE_DIRS='/tmp'
export _ZL_MATCH_MODE=1 # Use enhanced matching
export _ZL_FZF_FLAG="+s -0 -1 --prompt 'z.lua > ' --preview 'exa --oneline --icons --group-directories-first {2}'"
autoload -Uz add-zsh-hook
add-zsh-hook precmd _zlua_precmd
[[ -d ${_ZL_DATA:h} ]] || mkdir -p ${_ZL_DATA:h}
alias z='zlua'
alias zb='z -b' # Restrict to parents of CWD
alias zz='z -c' # Restrict to subdirectories of CWD
# Select interactively
if (( $+commands[fzf] )); then
alias zi='z -I'
else
alias zi='z -i'
fi
alias zbi='zi -b'
alias zzi='zi -c'
| true |
a47d9d09d1624727af83b7e1e8dd233c21002d92 | Shell | lixingang/docker_backup | /run_docker.sh | UTF-8 | 709 | 2.796875 | 3 | [] | no_license | #!/bin/bash
CONTAINER_NAME=lixg
SSH_PORT=11000
PORT1=`expr $SSH_PORT + 1 `
PORT2=`expr $SSH_PORT + 2 `
PORT3=`expr $SSH_PORT + 3`
PORT4=`expr $SSH_PORT + 4`
docker stop ${CONTAINER_NAME} && docker rm -f ${CONTAINER_NAME}
# if use --privileged, -e NVIDIA_VISIBLE_DEVICES=0 will not work
docker run -d \
--restart=always \
--gpus=all \
--shm-size=32gb \
-e DISPLAY=$DISPLAY \
-p $SSH_PORT:22 \
-p $PORT1:$PORT1 \
-p $PORT2:$PORT2 \
-p $PORT3:$PORT3 \
-p $PORT4:$PORT4 \
-v /data/lixg:/root/data \
-v /data/cpf:/root/cpf \
-v /data/Public:/root/Public \
--name ${CONTAINER_NAME} \
--hostname ${CONTAINER_NAME} \
ssh-dev:11.1-cudnn8-devel-ubuntu18.04
| true |
c14cb43e368c59b8a8962503c85861fb683e6cdb | Shell | kbabioch/docker-chrony | /entrypoint.sh | UTF-8 | 207 | 2.578125 | 3 | [] | no_license | #! /bin/sh
set -e
set -x # TODO remove
if [ -z "$1" ]; then
exec /usr/sbin/chronyd -d -F 1 -r -R -s -m
#-m will fail, use --memmory & --memory-swap from docker instead to not be paged out
fi
exec "$@"
| true |
a12d10ebb98964e0eb64f4ea110b43a1cc2702df | Shell | amgando/dragonear-as | /scripts/1.dev-deploy.sh | UTF-8 | 1,600 | 3.40625 | 3 | [] | no_license | #!/usr/bin/env bash
[ -z "$CONTRACT" ] && echo "Missing \$CONTRACT environment variable"
[ -z "$CONTRACT" ] || echo "Found it! \$CONTRACT is set to [ $CONTRACT ]"
[ -z "$OWNER" ] && echo "Missing \$OWNER environment variable"
[ -z "$OWNER" ] || echo "Found it! \$OWNER is set to [ $OWNER ]"
near delete $CONTRACT $OWNER
rm -rf ./neardev
# exit on first error after this point to avoid redeploying with successful build
set -e
echo
echo ---------------------------------------------------------
echo "Step 1: Build the contract (may take a few seconds)"
echo ---------------------------------------------------------
echo
yarn build:release
echo
echo
echo ---------------------------------------------------------
echo "Step 2: Deploy the contract"
echo
echo "(edit scripts/1.dev-deploy.sh to deploy other contract)"
echo ---------------------------------------------------------
echo
near dev-deploy ./build/release/dragonear.wasm
echo
echo
echo ---------------------------------------------------------
echo "Step 3: Prepare your environment for next steps"
echo
echo "(a) find the contract (account) name in the message above"
echo " it will look like this: [ Account id: dev-###-### ]"
echo
echo "(b) set an environment variable using this account name"
echo " see example below (this may not work on Windows)"
echo
echo ---------------------------------------------------------
echo "export CONTRACT=<dev-123-456>"
echo "near call \$CONTRACT init '{\"owner_id\":\"'\$CONTRACT'\"}' --accountId \$CONTRACT"
echo ---------------------------------------------------------
echo
exit 0
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.