blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
321bfffe9bd58a55881d5ecfd70ffa85dd193a78 | Shell | gaocegege/treadmill | /vagrant/scripts/install-tinydns.sh | UTF-8 | 980 | 2.546875 | 3 | [
"Apache-2.0"
] | permissive | #/bin/bash
SCRIPTDIR=$(cd $(dirname $0) && pwd)
. $SCRIPTDIR/svc_utils.sh
DNS=/usr/local/bin
mkdir -p /package
chmod 1755 /package
cd /package
wget https://cr.yp.to/ucspi-tcp/ucspi-tcp-0.88.tar.gz
gunzip ucspi-tcp-0.88.tar.gz
tar -xf ucspi-tcp-0.88.tar
rm -f ucspi-tcp-0.88.tar
cd ucspi-tcp-0.88
echo gcc -O2 -include /usr/include/errno.h > conf-cc
make
make setup check
cd /package
wget https://cr.yp.to/djbdns/djbdns-1.05.tar.gz
gunzip djbdns-1.05.tar.gz
tar -xf djbdns-1.05.tar
rm -f djbdns-1.05.tar
cd djbdns-1.05
echo gcc -O2 -include /usr/include/errno.h > conf-cc
make
make setup check
$DNS/tinydns-conf tinydns tinydnslog /etc/tinydns 10.10.10.10
$DNS/axfrdns-conf tinydnstcp tinydnslog /etc/axfrdns /etc/tinydns \
10.10.10.10
echo ':allow,AXFR=""' > /etc/axfrdns/tcp
cd /etc/axfrdns
$DNS/tcprules tcp.cdb tcp.tmp < tcp
del_svc tinydns
del_svc tinydns-tcp
del_svc zk2fs
del_svc tinydns-app
add_svc tinydns
add_svc tinydns-tcp
add_svc zk2fs
add_svc tinydns-app
| true |
2451fcbfa6dab804da552d38ba1576ae6c736e33 | Shell | user4847378942/Germany-Mortality | /import_csv.sh | UTF-8 | 1,015 | 3.8125 | 4 | [] | no_license | #!/bin/bash
FILE_PATH=$1
DB=$2
FILE=$(basename "$FILE_PATH")
FILE_NAME=$(echo $FILE | sed 's/.csv//g' | sed 's/ /_/g' | sed 's/-/_/g' | sed 's/\//_/g' | sed 's/+/_/g')
COLUMNS=$(head -n 1 $FILE_PATH | sed 's/ /_/g' | tr -cd '[:alnum:],_' | awk '{print tolower($0)}')
COLUMNS=${COLUMNS%$'\r'}
TABLE_NAME="imp_$FILE_NAME"
# Create table sql statement.
IFS=',' read -ra COLUMN <<<"$COLUMNS"
SQL="CREATE TABLE $TABLE_NAME ("
for i in "${COLUMN[@]}"; do
SQL+=" $i varchar(20),"
done
SQL=${SQL:0:${#SQL}-1}
SQL+=" );"
# Prepare DB.
mysql -h 127.0.0.1 -u root -e "CREATE DATABASE IF NOT EXISTS $DB;"
mysql -h 127.0.0.1 -u root -e "USE $DB;"
mysql -h 127.0.0.1 -u root -e "DROP TABLE IF EXISTS $TABLE_NAME" $DB
mysql -h 127.0.0.1 -u root -e "$SQL" $DB
cd $(dirname $FILE_PATH)
ln -s "$FILE" "$TABLE_NAME.csv"
# Import data.
mysqlimport --local -h 127.0.0.1 -u root --columns=$COLUMNS --fields-terminated-by="," --fields-optionally-enclosed-by='"' --ignore-lines=1 $DB "$TABLE_NAME.csv"
unlink "$TABLE_NAME.csv"
cd ~- | true |
1778596af4bde7c9112f828eec4c649245795acb | Shell | mittlin/shell-scripts | /renames.sh | UTF-8 | 628 | 3.59375 | 4 | [] | no_license | #!/bin/bash
echo "?---输入批量文件所在目录(如:~/Pictures)(当前目录输入 .即可)---";
read directory;
cd "$directory";
echo "?---输入要重命名文件类型和前缀(如:png img_ (区分大小写))[以空格分割]---";
read ext begin;
mkdir temp
echo ">>>开始批量重命名 $directory 下的 $ext 文件>>>>>>>>>>>>>>";
i=0;
for it in *.$ext; do
mv "$it" ./temp/"$begin$i.$ext";
i=$(($i+1));
done
mv ./temp/*.$ext ./
rm -r temp
echo "===完成$i个文件批量重命名,文件列表如下:";
ls *.$ext
| true |
b23cb2caf8ab044aa3cadb7bdd4e7729899b012c | Shell | lgq9220/centos-script | /Chrome.sh | UTF-8 | 372 | 2.53125 | 3 | [] | no_license | #!/bin/bash
echo -e '\033[1;31m ********************************此脚本自动化安装Chrome浏览器******************************** \033[0m'
echo -e "\033[1;31m 安装EPEL安装源 \033[0m"
yum -y install epel-release
echo -e "\033[1;31m 开始安装Chrome浏览器 \033[0m"
yum -y install chromium
echo -e "\033[1;31m 清除yum安装包 \033[0m"
yum -y clean all
exit | true |
90aaf5cf30a32306aa71d118516a39a0bfbe1c97 | Shell | scottsummers/ambariscripts | /haScripts/hbaseHAinstall.sh | UTF-8 | 1,947 | 3.75 | 4 | [] | no_license | #!/bin/bash
AMBARI_SERVER_HOST="master01"
ambariPort="8080"
cluster="HDPCluster"
username="admin"
PASSWORD="admin"
curlServices(){
curl --user $username:$PASSWORD \
-i -H "X-Requested-By: ambari" \
-X PUT -d '{"RequestInfo": {"context" :"'$action' '$service' via REST"}, "Body": {"ServiceInfo":{"state":"'$state'"}}}' \
"http://$AMBARI_SERVER_HOST:$ambariPort/api/v1/clusters/$cluster/services/$service"
}
hbaseInstall(){
read -p "What server is do you want HBase Master installed to? " newHbaseMaster
#Tells Ambari Which Server so install The new HBase Master to
curl -X POST "http://$AMBARI_SERVER_HOST:$ambariPort/api/v1/clusters/$cluster/hosts?Hosts/host_name=$newHbaseMaster" \
-H "X-Requested-By: ambari" \
-u $username:$PASSWORD \
-d '{"host_components" : [{"HostRoles":{"component_name":"HBASE_MASTER"}}] }'
sleep 5
#Installs HBase to the New HBASE Master
service="HBASE"; state='INSTALLED'; action="Installing"; curlServices;
echo "Waiting for service to install on servers. Sleeping for 30"
sleep 30
curl -u $username:$PASSWORD -X GET "http://$AMBARI_SERVER_HOST:$ambariPort/api/v1/clusters/$cluster/components/$newHbaseMaster/"
read -p "Would you like to start HBase? " yn
case $yn in
y ) echo "Nagios and Ganglia Configuration need to be changed restarting."
service="GANGLIA"; state="INSTALLED"; action="Restarting"; curlServices;
service="NAGIOS"; state="INSTALLED"; action="Restarting"; curlServices; sleep 30;
service="GANGLIA"; state="STARTED"; action="Restarting"; curlServices;
service="NAGIOS"; state="STARTED"; action="Restarting"; curlServices; sleep 30;
echo "Ganglia Restarting complete, starting hbase."
service="HBASE"; state="STARTED"; action="Starting"; curlServices;;
n ) echo "Ok, you'll need to start on your own.";;
* ) ;;
esac
}
echo "This script will install HBase HA using Ambari API calls"
hbaseInstall | true |
09b3934fccfe71060f948f87ad72e5d54f05f3e3 | Shell | matthewwoodruff/playground | /bash/examples/input.sh | UTF-8 | 217 | 3.625 | 4 | [] | no_license | #! /bin/bash
echo "Type your name"
read NAME
echo "Your name is" $NAME
echo Type some words
read -a ARRAY
echo "All words will be in an array using the -a flag " $ARRAY
for word in $ARRAY
do
echo $word
done
| true |
8a8309f5521a00efdf1fba8c4c285d1b86f30dd0 | Shell | 5l1v3r1/crispy-bash-utilities | /b.sh | UTF-8 | 1,571 | 3.375 | 3 | [
"MIT"
] | permissive | #
clear
chmod +x b.sh
bash m.sh | lolcat
echo "what you want to choose " |lolcat
echo -e "\e[5;31m 1) Encrypter / Decrypter press 1 "
echo -e "\e[5;32m 2) for random password hashed password generator "
echo -e "\e[5;33m 3) for finding hashes of ecrypted password "
echo -e "\e[5;36m 4) for transferring file through ssh to other machine press 4" |lolcat
echo " 5)for checking updates press 5"| lolcat
echo -e "\e[5;31m 6) press 6 at your own risk !!!\e[0m "
echo " 7) for base 64 encoding hash decrypter press 7" | lolcat
read -p "enter value in (digits) -:" m
if [ "$m" == 1 ]
then
bash k.sh
elif [ "$m" == 2 ]
then
echo "enter any word or name for random hash "
read r
echo "hashed in base64"|lolcat
echo "$r" | base64
echo "hashed in base32" |lolcat
echo "$r" | base32
elif [ "$m" = 3 ]
then
echo "hash finder and decrypter"
bash H.sh
elif [ "$m" == 4 ]
then
echo "example - /root/Desktop/file/"
echo " enter path of your file "
read l
echo "enter the root name of the recievers machine with IP address "
echo "example - /root@192.168.43.31"
read -n -p "[USER@]HOST/" k
echo "enter destination path where file is to be shifted"
echo "example - /home/Desktop/sshbackup"
read -p "here -" b
rsync -av -e ssh "$l" "$k":"$b"
elif [ "$m" == 5 ]
then
bash u.sh
elif [ "$m" == 6 ]
then
cd src
cd source
cd new
bash r.sh
elif [ "$m" == 7 ]
then
read -p "enter your hash here " o
echo "base64 decoding"
echo "$o" |base64 -d
echo "if your password is in base 32"
echo "$o" | base32 -d
else
echo "error invalid input given exiting !!!"
fi
| true |
5c8c35faa346423c7c6a257d21add4a769bc7be9 | Shell | ghyeon0/fxmark | /bin/dbench-workloads/iscsi/reserve_parallel.sh | UTF-8 | 910 | 3.21875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
PORT=3260
PORTAL=192.0.2.209
LUN=1
TARGET_NAME=foob
DBENCH=../../dbench
echo "Running dbench instance 1"
$DBENCH -B iscsi --iscsi-lun=$LUN --iscsi-port=$PORT --iscsi-target=$TARGET_NAME --iscsi-portal=$PORTAL --warmup=2 -t 25 -c reserve_parallel_1.txt 1 &> output_1.txt &
sleep 2
echo "Running dbench instance 2"
$DBENCH -B iscsi --iscsi-lun=$LUN --iscsi-port=$PORT --iscsi-target=$TARGET_NAME --iscsi-portal=$PORTAL --warmup=1 -t 25 -c reserve_parallel_2.txt 1 &> output_2.txt &
echo "Waiting instances to finish"
sleep 60
cat output_1.txt | egrep failed\|ERROR > /dev/null
if [ $? -eq 0 ]
then
echo "the test Failed..."
echo "Instance 1 result"
echo "-----------------"
cat output_1.txt | egrep failed\|ERROR
echo
echo "Instance 2 result"
echo "-----------------"
cat output_2.txt | egrep failed\|ERROR
else
echo "The tests were a success..."
fi
rm output_1.txt output_2.txt
| true |
a670e5434964af81a6e98afafd06d45c86bd1246 | Shell | judiba/Main | /ConfVPS14.sh | UTF-8 | 5,446 | 3.015625 | 3 | [
"Apache-2.0"
] | permissive | #BY: @judiba
#R3V1V3R VPS
#Data de criação: 30/04/2017
#Nome: ConfVPS14 - PackVPS 2.0"
clear
echo -e "\033[01;34mConfVPS14"
echo ""
echo -e "\033[01;36m BY: @judiba
R3V1V3R VPS
Data de Atualização: 18/05/2017
VPS Manager 3.0
Nome: PackVPS 2.0 \033[1;37m"
echo ""
read -p "De enter para continuar..."
clear
tput setaf 8 ; tput setab 5 ; tput bold ; printf '%30s%s%-15s\n' "VPS Manager 3.0" ; tput sgr0
tput setaf 2 ; tput bold ; echo "" ; echo "Este script para uso do VPS Manager APK:" ;
echo "Modificado by:@judiba"
echo ""
tput setaf 3 ; tput bold ; read -n 1 -s -p "Aperte qualquer tecla para continuar..." ; echo "" ; echo "" ; tput sgr0
if [ -f "/root/usuarios.db" ]
then
tput setaf 6 ; tput bold ; echo ""
echo "Uma base de dados de usuários ('usuarios.db') foi encontrada!"
echo "Deseja mantê-la (preservando o limite de conexões simultâneas dos usuários)"
echo "ou criar uma nova base de dados?"
tput setaf 6 ; tput bold ; echo ""
echo "[1] Manter Base de Dados Atual"
echo "[2] Criar uma Nova Base de Dados"
echo "" ; tput sgr0
read -p "Opção?: " -e -i 1 optiondb
else
awk -F : '$3 >= 500 { print $1 " 1" }' /etc/passwd | grep -v '^nobody' > /root/usuarios.db
fi
echo ""
tput setaf 8 ; tput setab 5 ; tput bold ; echo "" ; echo "Aguarde a configuração automática" ; echo "" ; tput sgr0
sleep 3
apt-get update -y
apt-get upgrade -y
rm /bin/criarusuario /bin/expcleaner /bin/sshlimiter /bin/addhost /bin/listar /bin/sshmonitor /bin/ajuda > /dev/null
rm /root/ExpCleaner.sh /root/CriarUsuario.sh /root/sshlimiter.sh > /dev/null
apt-get install squid3 bc screen nano unzip dos2unix wget -y
if [ -f "/usr/sbin/ufw" ] ; then
ufw allow 443/tcp ; ufw allow 80/tcp ; ufw allow 3128/tcp ; ufw allow 8799/tcp ; ufw allow 8080/tcp
fi
if [ -d "/etc/squid3/" ]
then
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/addhost.sh -O /bin/addhost
chmod +x /bin/addhost
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/alterarsenha.sh -O /bin/alterarsenha
chmod +x /bin/alterarsenha
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/criarusuario2.sh -O /bin/criarusuario
chmod +x /bin/criarusuario
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/delhost.sh -O /bin/delhost
chmod +x /bin/delhost
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/expcleaner2.sh -O /bin/expcleaner
chmod +x /bin/expcleaner
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/mudardata.sh -O /bin/mudardata
chmod +x /bin/mudardata
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/remover.sh -O /bin/remover
chmod +x /bin/remover
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/sshlimiter2.sh -O /bin/sshlimiter
chmod +x /bin/sshlimiter
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/alterarlimite.sh -O /bin/alterarlimite
chmod +x /bin/alterarlimite
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/ajuda.sh -O /bin/ajuda
chmod +x /bin/ajuda
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/sshmonitor2.sh -O /bin/sshmonitor
chmod +x /bin/sshmonitor
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/userbackup.sh -O /bin/userbackup
chmod +x /bin/userbackup
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/list.sh -O /bin/list
chmod +x /bin/list
fi
if [ -d "/etc/squid/" ]
then
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/2/addhost.sh -O /bin/addhost
chmod +x /bin/addhost
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/alterarsenha.sh -O /bin/alterarsenha
chmod +x /bin/alterarsenha
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/criarusuario2.sh -O /bin/criarusuario
chmod +x /bin/criarusuario
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/2/delhost.sh -O /bin/delhost
chmod +x /bin/delhost
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/expcleaner2.sh -O /bin/expcleaner
chmod +x /bin/expcleaner
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/mudardata.sh -O /bin/mudardata
chmod +x /bin/mudardata
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/remover.sh -O /bin/remover
chmod +x /bin/remover
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/sshlimiter2.sh -O /bin/sshlimiter
chmod +x /bin/sshlimiter
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/alterarlimite.sh -O /bin/alterarlimite
chmod +x /bin/alterarlimite
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/ajuda.sh -O /bin/ajuda
chmod +x /bin/ajuda
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/sshmonitor2.sh -O /bin/sshmonitor
chmod +x /bin/sshmonitor
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/userbackup.sh -O /bin/userbackup
chmod +x /bin/userbackup
wget https://www.vpswebpanels.com/script/vpsmanager/scripts/list.sh -O /bin/list
chmod +x /bin/list
fi
echo ""
tput setaf 8 ; tput setab 5 ; tput bold ; echo "Scripts para gerenciamento de usuário instalados" ; tput sgr0
tput setaf 8 ; tput setab 5 ; tput bold ; echo "Leia a documentação para evitar dúvidas e problemas!" ; tput sgr0
tput setaf 8 ; tput setab 5 ; tput bold ; echo "Para ver os comandos disponíveis use o comando: ajuda" ; tput sgr0
echo ""
if [[ "$optiondb" = '2' ]]; then
awk -F : '$3 >= 500 { print $1 " 1" }' /etc/passwd | grep -v '^nobody' > /root/usuarios.db
fi
exit 1
| true |
baa549c399752f47091a9953cf58e194e8f9d04a | Shell | TetragrammatonHermit/zsh-config | /modules/12_path_compressor.zsh | UTF-8 | 3,707 | 3.609375 | 4 | [
"MIT"
] | permissive | # ================
# path compressors
# ================
# Reduce path to shortest prefixes. Heavily Optimized
function minify_path () {
emulate -LR zsh
setopt glob_dots extended_glob
local full_path="/" ppath cur_path dir
local -a revise
local -i matches
eval "1=\${\${1:A}:gs/${HOME:gs/\//\\\//}/\~}"
for token in ${(s:/:)1}; do
cur_path=${full_path:s/\~/$HOME/}
local -i col=1
local glob="${token[0,1]}"
cur_path=($cur_path/*(/))
# prune the single dir case
if [[ $#cur_path == 1 ]]; then
ppath+="/"
full_path=${full_path%%(/##)}
full_path+="/$token"
continue
fi
while; do
matches=0
revise=()
for fulldir in $cur_path; do
dir=${${fulldir%%/}##*/}
if [[ ! -o caseglob ]]; then
if (( ${#dir##(#i)($glob)} < $#dir )); then
((matches++))
revise+=$fulldir
if ((matches > 1)); then
break
fi
fi
else
if (( ${#dir##($glob)} < $#dir )); then
((matches++))
revise+=$fulldir
if ((matches > 1)); then
break
fi
fi
fi
done
if (( $matches > 1 )); then
glob=${token[0,$((col++))]}
(( $col -1 > $#token )) && break
else
break
fi
cur_path=($revise)
done
ppath+="/$glob"
full_path=${full_path%%(/##)}
full_path+="/$token"
done
echo ${ppath:s/\/\~/\~/}
}
# take every possible branch on the file system into account
function minify_path_full () {
emulate -LR zsh
# setopt caseglob
setopt extended_glob null_glob
local glob
glob=("${(@s:/:)$(minify_path $1)}")
local -i index=$(($#glob - 1))
while ((index >= 1)); do
if [[ ${glob[$index]} == "~" ]]; then
break
fi
local old_token=${glob[$index]}
while [[ ${#$(eval "echo ${${(j:*/:)glob}:s/*//}*(/)")} == 1 ]]; do
old_token=${glob[$index]}
if [[ ${#glob[$index]} == 0 ]]; then
break
fi
glob[$index]=${glob[$index][0,-2]}
done
glob[$index]=$old_token
((index--))
done
if [[ ${#${(j:/:)glob}} == 0 ]]; then
echo /
else
echo ${(j:/:)glob}
fi
}
# collapse empty runs too
function minify_path_smart () {
# emulate -LR zsh
local cur_path glob
local -i i
cur_path=$(minify_path_full $1)
for ((i=${#cur_path:gs/[^\/]/}; i>1; i--)); do
glob=${(l:$((2*$i))::\/:)}
eval "cur_path=\${cur_path:gs/$glob/\%\{\$FX[underline]\%\}$i\%\{\$FX[no-underline]\%\}}"
done
cur_path=${cur_path:s/\~\//\~}
for char in {a-zA-Z}; do
eval "cur_path=\${cur_path:gs/\/$char/\%\{\$FX[underline]\%\}$char\%\{\$FX[no-underline]\%\}}"
done
echo $cur_path
}
# find shortest unique fasd prefix. Heavily optimized
function minify_path_fasd () {
zparseopts -D -E a=ALL
# emulate -LR zsh
if [[ $(type fasd) == *function* ]]; then
local dirs index above higher base test
local -i escape i k
1=${${1:A}%/}
dirs=${(nOa)$(fasd)##[0-9.[:space:]]##}
if (( ${+dirs[(r)$1]} )); then
dirs=($(print ${(f)dirs}))
index=${${${dirs[$((${dirs[(i)$1]}+1)),-1]}%/}##*/}
1=$1:t
for ((i=0; i<=$#1+1; i++)); do
for ((k=1; k<=$#1-$i; k++)); do
test=${1[$k,$(($k+$i))]}
if (( ! ${+index[(r)*$test*]} )); then
if [[ $(type $test) == *not* && ! -n ${(P)temp} || -n $ALL ]]; then
echo $test
escape=1
break
fi
fi
done
(( $escape == 1 )) && break
done
else
printf " "
return 1
fi
else
printf " "
fi
}
| true |
f8d449f2316ec7b7b6074f9ef28bfff1d6a13fc2 | Shell | freebsd/freebsd-ports | /sysutils/datadog-agent/files/datadog-dogstatsd.in | UTF-8 | 1,543 | 3.765625 | 4 | [
"BSD-2-Clause"
] | permissive | #!/bin/sh
# PROVIDE: datadog_dogstatsd
# REQUIRE: LOGIN
# KEYWORD: shutdown
#
# Add the following lines to /etc/rc.conf.local or /etc/rc.conf
# to enable this service:
#
# datadog_dogstatsd_enable (bool): Set to NO by default.
# Set it to YES to enable Datadog dogstatsd.
# datadog_dogstatsd_user (user): Set user to run Datadog dogstatsd.
# Default is "%%USER%%".
# datadog_dogstatsd_conf (path): Path to Datadog dogstatsd configuration file.
# Default is %%ETCDIR%%/datadog.yaml
. /etc/rc.subr
name=datadog_dogstatsd
rcvar=datadog_dogstatsd_enable
load_rc_config $name
: ${datadog_dogstatsd_enable:="NO"}
: ${datadog_dogstatsd_user:="%%USER%%"}
: ${datadog_dogstatsd_conf:="%%ETCDIR%%/datadog.yaml"}
command="%%DATADOG_PREFIX%%/dogstatsd"
command_args="-c ${datadog_dogstatsd_conf}"
required_files=%%ETCDIR%%/datadog.yaml
pidfile="%%RUNDIR%%/datadog-dogstatsd.pid"
start_cmd="${name}_start start $@"
stop_cmd="${name}_stop stop $@"
status_cmd="${name}_status"
datadog_dogstatsd_status()
{
rc_pid=`check_pidfile ${pidfile} ${command}`
if [ -n "${rc_pid}" ]; then
echo "Datadog dogstatsd is running as pid ${rc_pid}."
else
echo "Datadog dogstatsd is not running."
return 1
fi
}
datadog_dogstatsd_start() {
/usr/sbin/daemon -f -p ${pidfile} -u ${datadog_dogstatsd_user} ${command} start ${command_args}
return $?
}
datadog_dogstatsd_stop() {
rc_pid=`check_pidfile ${pidfile} ${command}`
if [ -n "${rc_pid}" ]; then
kill ${rc_pid}
else
echo "Datadog dogstatsd is not running."
fi
}
run_rc_command "$@"
| true |
ca74c175dd7758f16c4e1fd2a1b20538c6c1e3c6 | Shell | amclees/dotfiles | /install_dotfiles | UTF-8 | 279 | 2.5625 | 3 | [] | no_license | #!/bin/sh
DOTFILES=$(dirname "$(readlink -f "$0")")
echo "$DOTFILES"
ln -s "$DOTFILES/git-completion.bash" ~/git-completion.bash
ln -s "$DOTFILES/.bashrc" ~/.bashrc
ln -s "$DOTFILES/.irbrc" ~/.irbrc
ln -s "$DOTFILES/.config" ~/.config
ln -s "$DOTFILES/.tmux.conf" ~/.tmux.conf
| true |
40e7c9a3b8c655be2e8d1c406f66d78a2f4e13a3 | Shell | donghaichuan/shell_scripts | /case_test_1.sh | UTF-8 | 553 | 3.609375 | 4 | [] | no_license | #!/bin/env bash
read -p "Please input your service name(sshd):" service_name
case $service_name in
sshd)
case $1 in
start)
systemctl start sshd
sshd_state=`ps -ef |grep sshd |grep -v grep`
if [ "$sshd_state" != " " ];then
echo -e "\033[32mService sshd is running\033[0m"
else
echo -e "\033[31mPlease check sshd service state\033[0m"
fi
;;
-h|--help)
echo "this is usage"
;;
*)
echo "-h or --help, get help for this script"
;;
esac
;;
*)
echo -e "\033[31mPlease input correct service name\033[0m"
;;
esac
| true |
ab447bff018f6fcc081c2d34390dc5c15e72fe4c | Shell | TheShellLand/exo | /ubuntu/anon.sh | UTF-8 | 448 | 2.59375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
#w=wlx00c0ca90009e
w=wlp58s0
scans=/root/airodump
if [ ! -d $scans ]; then mkdir $scans; fi
service network-manager stop && \
ifconfig $w down && \
macchanger -r $w
if [ ! -z "$1" ]; then
wpa_supplicant -i $w -c "$1"
fi
airodump-ng -b a $w -M -U -w $scans/$w-$(date +%FT%T)-5GHz
airodump-ng -b g $w -M -U -w $scans/$w-$(date +%FT%T)-2GHz
ifconfig $w down && \
macchanger -r $w && \
iwconfig $w mode managed && \
ifconfig $w up
| true |
3e7da0b639b568edd79344a36d85218e9c17a0df | Shell | oscar7692/bash | /cert_scaner.sh | UTF-8 | 1,903 | 2.8125 | 3 | [] | no_license | #!/bin/bash
# -*- ENCODING: UTF-* -*-
#connection config
cert[0]=mabi015g
cert[1]=mabi016g
cert[2]=mabi017g
cert[3]=mabi018g
cert[4]=mabi019g
cert[5]=mabi01ag
cert[6]=mabi01bg
cert[7]=mabi01cg
cert[8]=mabi01dg
cert[9]=mabi01eg
cert[10]=mabi01fg
cert[11]=mabi01gg
cert[12]=mabi01hg
cert[13]=mabi01ig
cert[14]=mabi01jg
cert[15]=mabi01kg
cert[16]=mabi01lg
cert[17]=mabi01mg
cert[18]=mabi01ng
cert[19]=mabi01og
cert[20]=mabi01pg
cert[21]=mabi01qg
user=burnin
pass=hosepipe
admin=bonsai
flag=0
#*******************************************************************
#cmd to send
cmd1="cd /home/burnin/testdata"
cmd2=ls boxdata.txt
cmd3="cat boxdata.txt>>${cert[flag]}"
cmd4=ls ${cert[flag]}
cmd5="scp /home/burnin/testdata/${cert[flag]} burnin@teft011g.sac.xyratex.com:/home/burnin/normadocs"
#*******************************************************************
#envia los cmd por telnet
for((i=1;i<=1;i++));do
(echo open ${cert[flag]}
sleep 3
echo ${user}
sleep 3
echo ${pass}
sleep 3
echo ${cmd1}
sleep 1
echo ls boxdata.txt
sleep 1
echo "cat boxdata.txt>>${cert[flag]}"
sleep 1
echo ls ${cert[flag]}
sleep 1
echo "scp /home/burnin/testdata/${cert[flag]} burnin@teft011g.sac.xyratex.com:/home/burnin/normadocs"
sleep 4
echo hosepipe
sleep 4
echo rm ${cert[flag]}
sleep 1
)| telnet
flag=$(( $flag + 1 ))
done
#********************************************************************
#compress files
echo "cd /home/burnin/normadocs"
sleep 1
echo "zip cert_scan.zip mabi015g mabi016g mabi017g mabi018g mabi019g mabi01ag mabi01bg mabi01cg mabi01dg mabi01eg mabi01fg mabi01gg mabi01hg mabi01ig mabi01jg mabi01kg mabi01lg mabi01mg mabi01ng mabi01og mabi01pg mabi01qg"
sleep 5
#********************************************************************
#envia mail
./mailexample.sh
#echo "Buen dia Norma ya se copiaron los archivos en un momento seran enviados" | mail -s "Listado de unidades en cert Butser" -c oscar.acosta@seagate.com
sleep 4
| true |
81efe55ec64f8c3b932290dc9dfb39a0ca315171 | Shell | UnixMonky/scripts | /r8152-refresh.sh | UTF-8 | 451 | 3.234375 | 3 | [] | no_license | #!/bin/bash
# originally sourced from: https://askubuntu.com/questions/1029250/ubuntu-18-04-ethernet-disconnected-after-suspend
# goes into /lib/systemd/system-sleep/r8152-refresh:
PROGNAME=$(basename "$0")
state=$1
action=$2
function log {
logger -i -t "$PROGNAME" "$*"
}
log "Running $action $state"
if [[ $state == post ]]; then
modprobe -r r8152 \
&& log "Removed r8152" \
&& modprobe -i r8152 \
&& log "Inserted r8152"
fi | true |
2a5e3d45ef7b16ab578f3b7cb0f44b1eda317181 | Shell | yanfr0818/Condor-IDDS | /test/massSubmitWrapper.sh | UTF-8 | 623 | 2.640625 | 3 | [] | no_license | #!/bin/sh
for index in {1..973}
do
dir=$(sed -n "${index}p" /home/bloomcap3/listdir.txt)
cms=$(sed -n "${index}p" /home/bloomcap3/listcms.txt)
arch=$(sed -n "${index}p" /home/bloomcap3/listarch.txt)
echo "The variables: ${dir} ${cms} ${arch}"
condor_submit massWrapper.sub -append "Arguments = ${dir} ${cms} ${arch}" "transfer_input_files = /home/bloomcap3/$dir, /home/bloomcap3/$dir/PSetDump.py, /home/bloom/yanfr0818/IDDS/condor/psetB.py, /home/bloom/yanfr0818/IDDS/condor/psetEditWrapper.py" "Error = massWrapper/err.$index" "Output = massWrapper/out.$index" "Log = massWrapper/log.$index"
done
| true |
f5eba88aed77ab1606cf446eb349aaf865391453 | Shell | CU-CommunityApps/aws-examples | /cloudformation/iam/assume-role-example/assume-role.sh | UTF-8 | 1,143 | 3.53125 | 4 | [] | no_license | #!/bin/bash
# Example scrip that assumes a role in another account.
ASSUME_ROLE="arn:aws:iam::999999999999:role/shib-dba"
ROLE_SESSION_NAME="mysession"
TMP_FILE="assume-role-output.tmp"
DURATION_SECONDS=900
AWS_REGION="us-east-1"
echo "Assume role: ${ASSUME_ROLE}"
aws sts assume-role --output json --duration-seconds $DURATION_SECONDS --role-arn ${ASSUME_ROLE} --role-session-name ${ROLE_SESSION_NAME} > ${TMP_FILE}
echo "Assume role response:"
cat ${TMP_FILE}
ACCESS_KEY_ID=$(jq -r ".Credentials.AccessKeyId" < ${TMP_FILE})
SECRET_ACCESS_KEY=$(jq -r ".Credentials.SecretAccessKey" < ${TMP_FILE})
SESSION_TOKEN=$(jq -r ".Credentials.SessionToken" < ${TMP_FILE})
EXPIRATION=$(jq -r ".Credentials.Expiration" < ${TMP_FILE})
echo "Retrieved temp access key ${ACCESS_KEY} for role ${ASSUME_ROLE}. Key will expire at ${EXPIRATION}."
CMD="aws rds describe-db-instances --region us-east-1"
echo "AWS CLI command: $CMD"
OUTPUT_FILE="run-command-output.tmp"
AWS_ACCESS_KEY_ID=${ACCESS_KEY_ID} AWS_SECRET_ACCESS_KEY=${SECRET_ACCESS_KEY} AWS_SESSION_TOKEN=${SESSION_TOKEN} \
${CMD} > ${OUTPUT_FILE}
echo "Command output:"
cat ${OUTPUT_FILE}
| true |
782b42a063673f871e4c7c898c5edcfa0c01b7c1 | Shell | UCDenver-ccp/ccp-nlp-pipelines | /scripts/runner/pipelines/concept-mapper/download-ontologies.sh | UTF-8 | 1,173 | 3.859375 | 4 | [
"BSD-3-Clause"
] | permissive | #!/bin/bash
EXPECTED_ARGS=2
if [ $# -ne $EXPECTED_ARGS ]
then
echo "#NOTE: due to CD'ing in script use absolute file names!!"
echo "Usage: LOG_FILE TARGET_DOWNLOAD_DIR"
echo "current usage:"
echo $@
exit 1
fi
LOG_FILE=$1
TARGET_DIR=$2
URLS="http://purl.obolibrary.org/obo/chebi.owl
http://purl.obolibrary.org/obo/cl.owl
http://purl.obolibrary.org/obo/doid.owl
http://purl.obolibrary.org/obo/ncbitaxon.owl
http://purl.obolibrary.org/obo/pr.owl
http://purl.obolibrary.org/obo/so.owl
http://purl.obolibrary.org/obo/uberon/ext.owl
http://purl.obolibrary.org/obo/go.owl"
mkdir -p $TARGET_DIR
cd $TARGET_DIR
#append forwardslash to target directory if it doesn't end in a slash already
case "$TARGET_DIR" in
*/)
;;
*)
TARGET_DIR=$(echo "$TARGET_DIR/")
;;
esac
#verify the log file
touch $LOG_FILE
exit_code=0
for url in $URLS
do
echo "downloading $url"
echo "downloading $URL"
date | tee -a $LOG_FILE
curl --remote-name --write-out "file: %{filename_effective} final-url: %{url_effective} size: %{size_download} time: %{time_total} final-time: " -L $url | tee -a $LOG_FILE
e=$?
if [ $e -ne 0 ]; then
exit_code=$e
fi
done
exit $exit_code | true |
70dd909bd5b32a0714f9aade76239fcd1aa3aa08 | Shell | ADAS-study-group/adas-study-group.github.io | /tools/build.sh | UTF-8 | 2,126 | 4.28125 | 4 | [
"MIT"
] | permissive | #!/bin/bash
#
# Build jekyll site and store site files in ./_site
# v2.0
# https://github.com/cotes2020/jekyll-theme-chirpy
# © 2019 Cotes Chung
# Published under MIT License
set -eu
CMD="JEKYLL_ENV=production bundle exec jekyll b"
WORK_DIR="$(dirname "$(dirname "$(realpath "$0")")")"
CONTAINER="${WORK_DIR}/.container"
DEST="${WORK_DIR}/_site"
_help() {
echo "Usage:"
echo
echo " bash build.sh [options]"
echo
echo "Options:"
echo " -b, --baseurl <URL> The site relative url that start with slash, e.g. '/project'"
echo " -h, --help Print the help information"
echo " -d, --destination <DIR> Destination directory (defaults to ./_site)"
}
_init() {
cd "$WORK_DIR"
if [[ -d "$CONTAINER" ]]; then
rm -rf "$CONTAINER"
fi
if [[ -d "_site" ]]; then
jekyll clean
fi
local _temp="$(mktemp -d)"
cp -r ./* "$_temp"
cp -r ./.git "$_temp"
mv "$_temp" "$CONTAINER"
}
_build() {
cd "$CONTAINER"
echo "$ cd $(pwd)"
bash "_scripts/sh/create_pages.sh"
bash "_scripts/sh/dump_lastmod.sh"
CMD+=" -d $DEST"
echo "\$ $CMD"
eval "$CMD"
echo -e "\nBuild success, the site files have been placed in '${DEST}'."
if [[ -d "${DEST}/.git" ]]; then
if [[ ! -z $(git -C "$DEST" status -s) ]]; then
git -C "$DEST" add .
git -C "$DEST" commit -m "[Automation] Update site files." -q
echo -e "\nPlease push the changes of $DEST to remote master branch.\n"
fi
fi
cd .. && rm -rf "$CONTAINER"
}
_check_unset() {
if [[ -z ${1:+unset} ]]
then
_help
exit 1
fi
}
main() {
while [[ $# -gt 0 ]]
do
opt="$1"
case $opt in
-b|--baseurl)
local _baseurl="$2"
if [[ -z "$_baseurl" ]]; then
_baseurl='""'
fi
CMD+=" -b $_baseurl"
shift
shift
;;
-d|--destination)
_check_unset "$2"
DEST="$(realpath "$2")"
shift;
shift;
;;
-h|--help)
_help
exit 0
;;
*) # unknown option
_help
exit 1
;;
esac
done
_init
_build
}
main "$@"
| true |
0757971873d283b99df8cfb242a939a40db17ec8 | Shell | dkoudlo/envoy | /tools/api/clone.sh | UTF-8 | 2,031 | 4.21875 | 4 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | #!/bin/bash
# Simple script to clone vM to vN API, performing sed-style heuristic fixup of
# build paths and package references.
#
# Usage:
#
# ./tools/api/clone.sh v2 v3
set -e
declare -r OLD_VERSION="$1"
declare -r NEW_VERSION="$2"
# For vM -> vN, replace //$1*/vMalpha\d* with //$1*/vN in BUILD file $2
# For vM -> vN, replace //$1*/vM with //$1*/vN in BUILD file $2
function replace_build() {
sed -i -e "s#\(//$1[^\S]*\)/${OLD_VERSION}alpha[[:digit:]]*#\1/${NEW_VERSION}#g" "$2"
sed -i -e "s#\(//$1[^\S]*\)/${OLD_VERSION}#\1/${NEW_VERSION}#g" "$2"
}
# For vM -> vN, replace $1*[./]vMalpha with $1*[./]vN in .proto file $2
# For vM -> vN, replace $1*[./]vM with $1*[./]vN in .proto file $2
function replace_proto() {
sed -i -e "s#\($1\S*[\./]\)${OLD_VERSION}alpha[[:digit:]]*#\1${NEW_VERSION}#g" "$2"
sed -i -e "s#\($1\S*[\./]\)${OLD_VERSION}#\1${NEW_VERSION}#g" "$2"
}
# We consider both {vM, vMalpha} to deal with the multiple possible combinations
# of {vM, vMalpha} existence for a given package.
for p in $(find api/ -name "${OLD_VERSION}*")
do
declare PACKAGE_ROOT="$(dirname "$p")"
declare OLD_VERSION_ROOT="${PACKAGE_ROOT}/${OLD_VERSION}"
declare NEW_VERSION_ROOT="${PACKAGE_ROOT}/${NEW_VERSION}"
# Deal with the situation where there is both vM and vMalpha, we only want vM.
if [[ -a "${OLD_VERSION_ROOT}" && "$p" != "${OLD_VERSION_ROOT}" ]]
then
continue
fi
# Copy BUILD and .protos across
rsync -a "${p}"/ "${NEW_VERSION_ROOT}/"
# Update BUILD files with vM -> vN
for b in $(find "${NEW_VERSION_ROOT}" -name BUILD)
do
replace_build envoy "$b"
# Misc. cleanup for go BUILD rules
sed -i -e "s#\"${OLD_VERSION}\"#\"${NEW_VERSION}\"#g" "$b"
done
# Update .proto files with vM -> vN
for f in $(find "${NEW_VERSION_ROOT}" -name "*.proto")
do
replace_proto envoy "$f"
replace_proto api "$f"
replace_proto service "$f"
replace_proto common "$f"
replace_proto config "$f"
replace_proto filter "$f"
replace_proto "" "$f"
done
done
| true |
526857c1f2b7dd696e8b7b1b24dccf40699a6f3a | Shell | RyanCargan/ROSCoq | /scripts/rosInstall.sh | UTF-8 | 1,423 | 2.71875 | 3 | [] | no_license | set -e
sudo sh -c 'echo "deb http://packages.ros.org/ros/ubuntu trusty main" > /etc/apt/sources.list.d/ros-latest.list'
wget https://raw.githubusercontent.com/ros/rosdistro/master/ros.key -O - | sudo apt-key add -
sudo apt-get update
sudo apt-get install ros-indigo-desktop-full
sudo rosdep init
rosdep update
echo "source /opt/ros/indigo/setup.bash" >> ~/.bashrc
source ~/.bashrc
# Skip the parts below if you do not have an actual iRobot Create.
# Below, we assume that the iRobot create device is connected to the computer at the port /dev/ttyUSB0.
# On your machine, it might be /dev/ttyUSB1 or /dev/ttyUSB2 ... (especially if you have multiple USB devices connected).
# One way to find out is to see the difference between the output of ls /dev/ttyUSB* before and after connecting the iRobot Create via USB.
# If needed, replace the two occurrences of /dev/ttyUSB0 below.
sudo apt-get install ros-indigo-turtlebot ros-indigo-turtlebot-bringup
sudo adduser $USER dialout
echo "export TURTLEBOT_BASE=create" >> ~/.bashrc
echo "export TURTLEBOT_SERIAL_PORT=/dev/ttyUSB0" >> ~/.bashrc
echo "export TURTLEBOT_STACKS=circles" >> ~/.bashrc
sudo chmod a+rw /dev/ttyUSB0
source ~/.bashrc
#roscore
#roslaunch turtlebot_bringup minimal.launch
#rostopic pub -1 /cmd_vel_mux/input/navi geometry_msgs/Twist '[0.1, 0, 0]' '[0, 0, 0]' && rostopic pub -1 /cmd_vel_mux/input/navi geometry_msgs/Twist -- '[0, 0, 0]' '[0, 0, 0]'
| true |
7f8e4a45510ca2307c555b2280db3f3d3a058080 | Shell | idorax/vCodeHub | /sharpsword/bash/imod.sh | UTF-8 | 1,269 | 4.0625 | 4 | [] | no_license | #!/bin/bash
#
# Copyright (C) 2014, 2020, Vector Li (idorax@126.com). All rights reserved.
#
#
# This is script is to delete all blank lines endswith
# spaces (' ' or '\t' mixed)
# o sed: sed -i "s/[[:space:]]*$//g" <file>
# o vim: %s/\s\+$//g
#
NAME=$(basename $0)
S_CAT=cat
S_SED=sed
S_DOS2UNIX=/usr/bin/dos2unix
if [[ ! -x $S_DOS2UNIX ]]; then
echo "Oops, $S_DOS2UNIX not found" >&2
exit 1
fi
dir2backup=/tmp/.$NAME/$(date +"%m%d")
[[ ! -d $dir2backup ]] && mkdir -p $dir2backup
for file in $@; do
[[ ! -f $file ]] && continue
fbk=$dir2backup/$(basename $file).backup
fr1=$dir2backup/$(basename $file).cat1
fr2=$dir2backup/$(basename $file).cat2
printf "Start to deal with file $file\n"
printf " 1# backup $file to $fbk\n"
cp $file $fbk
printf " # dos2unix $file\n"
$S_DOS2UNIX $file
printf " 2# save $file via $S_CAT -A as $fr1\n"
$S_CAT -A $file > $fr1
printf " 3# delete lines endswith spaces (' ' or '\t' mixed)\n"
$S_SED -i "s/[[:space:]]*$//g" $file
printf " 4# save $file via gcat -A again as $fr2\n"
$S_CAT -A $file > $fr2
printf " 5# diff $fr1 $fr2\n"
if [[ -x ~/bin/colordiff ]]; then
diff=~/bin/colordiff
else
diff=diff
fi
$diff -u $fr1 $fr2 | $S_SED 's/^/ /g'
printf "\n"
done
rm -f $f_dos2unix
exit 0
| true |
9265e0104d30f64715721cfdcb414a8a0c4518c4 | Shell | dsemenov87/SampleGitlabDebianDeployment | /docker/scripts/modul_unit_tests.sh | UTF-8 | 158 | 2.625 | 3 | [] | no_license | #!/bin/bash
set -e
for i in /src/test/*.UnitTests/*.*sproj; do
if [ -f "$i" ]; then
modul_restore.sh $i
dotnet test $i --no-restore
fi
done;
| true |
9e9d21ff2f4e51629cdea036549050fc6c19a761 | Shell | nantoniazzi/unix-files-techio | /work/validate1.sh | UTF-8 | 288 | 3.140625 | 3 | [] | no_license | # { autofold
#!/bin/bash
function checkFileExists {
FILE=$1
FINISHED=0
while [ $FINISHED -eq 0 ]; do
if [ -e "$FILE" ]
then
echo "TECHIO> success true"
FINISHED=1
fi
sleep 0.1
done
}
echo "TECHIO> terminal"
checkFileExists "/foo/bar/valid_file"
# }
| true |
256452b081becf3f31640e4b872a9c5ecc7e1614 | Shell | antontest/tools | /shell/backup/shini | UTF-8 | 4,862 | 3.90625 | 4 | [] | no_license | #! /bin/bash
# Name : shini
# Func :
# Time : 2015-08-19 19:50:33
ini_name=""
app_name=""
key_name=""
key_value=""
get_flag=0
set_flag=0
rm_flag=0
all_get_flag=0
all_rm_flag=0
add_flag=0
# \brief printf usage of shell script
# \param $@ [in] paramters from cmd line
# \return 0, if succ
# -1, if fail
function print_usage()
{
echo -e "\e[1;31m#-------------------Shell Usage------------------#"
echo "Usage : $(basename $0) [options] ini_name"
echo "Options: "
echo " -h,--help Show usage of shell"
echo " -g,--get app_name key_name get value"
echo " -s,--set app_name key_name set value"
echo " -r,--rm app_name key_name rm ini info"
echo " -a,--add app_name key_name key_value add ini info"
echo " --all-get app_name show ini value of a section"
echo -e "#-------------------Shell Usage-------------------#\e[0m"
}
# \brief get paramters from cmd line
# \param $@ [in] paramters from cmd line
# \return 0, if succ
# -1, if fail
function parser_args()
{
TEMP=`getopt -o hig:s:r:a: --long help,ignore,get:,set:,add:,all-get,rm: \
-n 'shini' -- "$@"`
if [ $? != 0 ] ; then echo "Terminating..." >&2 ; exit 1 ; fi
# Note the quotes around `$TEMP': they are essential!
# set 会重新排列参数的顺序,也就是改变$1,$2...$n的值,这些值在getopt中重新排列过了
#eval set -- "$TEMP"
# 经过getopt的处理,下面处理具体选项。
while true ; do
case "$1" in
-h|--help) print_usage $@ ; exit 1;;
-g|--get) get_flag=1 ; app_name=$2 ; key_name=$3 ; shift 3 ;;
-s|--set) set_flag=1 ; app_name=$2 ; key_name=$3 ; key_value=$4 ; shift 4 ;;
-r|--rm) rm_flag=1 ; app_name=$2 ; key_name=$3 ; shift 3 ;;
--all-get) all_get_flag=1 ; app_name=$2 ; shift 2 ;;
-a|--add) add_flag=1 ; app_name=$2 ; key_name=$3 ; key_value=$4 ; shift 4 ;;
--) shift ; break ;;
*) break ;;
esac
done
# get others values
for arg in $@ ; do
ini_name="$arg"
done
return 0
}
function get_value()
{
# If has read pemission, then read value ; or exit
test -r $ini_name &&
sed -n "/^\[$app_name\]/,/^\[/s/^$key_name\ *[=:]\ *//pg" $ini_name ||
(test -e $ini_name && \
echo -e "\033[1;31m$ini_name has no read permission!\033[0m" || \
echo -e "\033[1;31m$ini_name is not exist!\033[0m") && exit 1
return 0
}
function get_all_value()
{
# If has read pemission, then read value ; or exit
test -r $ini_name &&
sed -n "/^\[$app_name\]/,/^\[/s/.*[=:]\ *//pg" $ini_name ||
(test -e $ini_name && \
echo -e "\033[1;31m$ini_name has no read permission!\033[0m" || \
echo -e "\033[1;31m$ini_name is not exist!\033[0m") && exit 1
return 0
}
function set_value()
{
# If has write pemission, then write ; or exit
test -w $ini_name &&
sed -i "/^\[$app_name\]/,/^\[/s/^$key_name\ *\([=:]\)\ *.*$/$key_name\1$key_value/" $ini_name ||
(test -e $ini_name && \
echo -e "\033[1;31m$ini_name has no write permission!\033[0m" || \
echo -e "\033[1;31m$ini_name is not exist!\033[0m" && exit 1)
return 0
}
function add_value()
{
# Test app whether already exist
sed -n "/^\[$app_name\]/,/^\[/p" $ini_name |
grep "^$key_name\ *=" > /dev/null &&
echo -e "\033[1;31mAlready exist!\033[0m" &&
exit 1
# Get line num of adding
num=`sed -n "/^\[$app_name\]/,/^\[/{/^$/d;=}" $ini_name | sed '$d' | sed -n '$p'`
# If has write pemission, then write ; or exit
test -w $ini_name &&
sed -i "$num a $key_name=$key_value" $ini_name ||
(test -e $ini_name && \
echo -e "\033[1;31m$ini_name has no write permission!\033[0m" || \
echo -e "\033[1;31m$ini_name is not exist!\033[0m" && exit 1)
return 0
}
function rm_value()
{
# If has write pemission, then write ; or exit
test -w $ini_name &&
sed -i "/^\[$app_name\]/,/^\[/{/^$key_name\ *\([=:]\)\ *.*$/d}" $ini_name ||
(test -e $ini_name && \
echo -e "\033[1;31m$ini_name has no write permission!\033[0m" || \
echo -e "\033[1;31m$ini_name is not exist!\033[0m" && exit 1)
return 0
}
# parser parameters
parser_args $@
# Test ini file is exist
test -z "$ini_name" &&
echo -e "\033[1;31mPlease input ini file name!\033[0m" &&
exit 1
test $get_flag -eq 1 && get_value && exit 0
test $all_get_flag -eq 1 && get_all_value && exit 0
test $set_flag -eq 1 && set_value && exit 0
test $add_flag -eq 1 && add_value && exit 0
test $rm_flag -eq 1 && rm_value && exit 0
exit 1
| true |
76c98417211e2808fb671670bcd77af35a5ac453 | Shell | chamzheng/blaster | /run_result.sh | UTF-8 | 1,446 | 3.15625 | 3 | [] | no_license | #!/usr/bin/env bash
touch finished/result.txt
echo "Sample Name|Scientific Name|Ratio|Accession|Type" >> finished/result.txt
for file in ./blaster_requests/*
do
#echo $file
org_filename=`cat $file/filename`
#echo $org_filename
singlename=${org_filename##*/}
filename=${singlename%.*}
awk-csv-parser --output-separator='|' $file/report.csv | cut -d'|' -f2,7,9 | sort -r -k2 | sed '/^$/d' |head -n 4 | tail -n 3 > finished/output
# echo ${p_output//|/,} | sed "s/^/$filename,/g"
sed -i "s/^/$filename|/g" finished/output
sed -i "s/$/|Per. ident/g" finished/output
cat finished/output >> finished/result.txt
rm finished/output
awk-csv-parser --output-separator='|' $file/report.csv | cut -d'|' -f2,5,9 | sort -r -k2 | sed '/^$/d' |head -n 4 | tail -n 3 > finished/output
# echo ${p_output//|/,} | sed "s/^/$filename,/g"
sed -i "s/^/$filename|/g" finished/output
sed -i "s/$/|Query Cover/g" finished/output
cat finished/output >> finished/result.txt
rm finished/output
# echo ${p_output//|/,} | sed "s/^/$filename,/g" | sed "s/$/,P/g" >> finished/result.csv
## echo -n '#' > finished/$filename.txt
## echo -n $filename'_' >> finished/$filename.txt
## head -n 2 $file/report.csv | tail -n 1 | cut -d',' -f3 >> finished/$filename.txt
## cat $file/sequence.fasta >> finished/$filename.txt
done
cd finished
zip -q -r ../result.zip *
cd ..
#mv result.zip ~/
| true |
5d4f957a1a09ff5f1227415676abf9e3a28ee4be | Shell | eikenb/bin.d | /files/mkswap | UTF-8 | 1,316 | 4.3125 | 4 | [] | no_license | #!/bin/sh
# automated notes on how to make a swap file
#
# Cheap RAM + SSDs == no need for swap partitions in many cases.
# Keep this around to add some swap when I think I need it.
noc="\033[0m"
red="\033[0;31m"
usage () {
[ -n "$1" ] && printf "$red$1$noc\n"
name=`basename $0`
echo "Usage: $name SWAPFILE SIZE_IN_MB"
echo "(Example: $name /var/swap 1024)"
exit 1
}
# run as root
[ $(id -u) -eq 0 ] || usage "Run as root"
# argument checking
[ $# -eq 2 ] || usage "Not enough arguments"
swapfile="$1"
[ -n "$swapfile" ] || usage
#[ "${swapfile%%/*}" = "" ] || usage "SWAPFILE must be full path"
[ -e "$swapfile" ] && usage "$swapfile already exists!"
# make sure we can create the file and set it up right
touch $swapfile
chown root:root ${swapfile}
chmod 0600 ${swapfile}
# get true full path
swapfile=$(readlink -e $swapfile)
# block-count; ie. 1024 * 512 for 512M swapfile
size_in_mb="$2"
[ $size_in_mb -ge 0 2> /dev/null ] \
|| usage "$size_in_mb is not a positive integer"
block_count=$(( $size_in_mb * 1024 ))
# do it
dd if=/dev/zero of=${swapfile} bs=1024 count=${block_count}
mkswap ${swapfile}
swapon ${swapfile}
# reminder about fstab
echo "Add this to /etc/fstab if needed."
echo "# swap file created on" $(date +"%F")
echo "$swapfile none swap sw 0 0"
| true |
b3951466a6dc6229a3e430140dcb1a2403413a66 | Shell | JeffersonLab/build_scripts | /gluex_env_version.sh | UTF-8 | 1,215 | 3.734375 | 4 | [] | no_license | #!/bin/sh
if [ -z "$GLUEX_TOP" ]; then export GLUEX_TOP=/home/$USER/gluex_top; fi
if [ -z "$BUILD_SCRIPTS" ]
then export BUILD_SCRIPTS=$GLUEX_TOP/build_scripts
fi
error_message="Error in gluex_env_version.sh"
error_action="environment settings not done"
if [ -z $BUILD_SCRIPTS ]
then
echo ${error_message}: BUILD_SCRIPTS not defined, $error_action
return 5
fi
if [ ! -f $BUILD_SCRIPTS/version.pl ]
then
echo ${error_message}: $BUILD_SCRIPTS/version.pl not found, $error_action
return 1
fi
if [ ! -f $BUILD_SCRIPTS/gluex_env.sh ]
then
echo ${error_message}: $BUILD_SCRIPTS/gluex_env.sh not found, $error_action
return 4
fi
if [ ! -z $1 ]
then # version argument given
if [ ! -f $1 ]
then
echo ${error_message}: version xml file $1 not found, $error_action
return 2
else
version_file=$1
fi
elif [ ! -f "version.xml" ]
then # look for version.xml in current working directory
echo ${error_message}: no argument given and no version.xml found in current working directory, $error_action
return 3
else
version_file=version.xml
fi
eval `$BUILD_SCRIPTS/version.pl -sbash $version_file`
source $BUILD_SCRIPTS/gluex_env.sh
| true |
3452edd4cf4288641da4ddfe7675ddd1196323bf | Shell | smartems/smartems | /scripts/build/prepare-enterprise.sh | UTF-8 | 236 | 2.78125 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
cd ..
if [ -z "$CIRCLE_TAG" ]; then
_target="master"
else
_target="$CIRCLE_TAG"
fi
git clone -b "$_target" --single-branch git@github.com:smartems/smartems-enterprise.git --depth 1
cd smartems-enterprise || exit
./build.sh
| true |
5c11cff3dcc7747ccb96d970de2b9f9d953b73a3 | Shell | vith/archlinux-packages-community | /gwenhywfar/repos/community-x86_64/PKGBUILD | UTF-8 | 991 | 2.53125 | 3 | [] | no_license | # Maintainer: Balló György <ballogyor+arch at gmail dot com>
# Maintainer: Jaroslav Lichtblau <svetlemodry@archlinux.org>
# Contributor: Sergej Pupykin <pupykin.s+arch@gmail.com>
# Contributor: William Rea <sillywilly@gmail.com>
pkgname=gwenhywfar
pkgver=5.2.0
_verid=256
pkgrel=1
pkgdesc="OS abstraction functions for various projects"
arch=(x86_64)
url="https://www.aquamaniac.de/"
license=(LGPL)
depends=(gnutls libgcrypt openssl)
makedepends=(git gtk3 qt5-base)
optdepends=('gtk3: for the GTK3 UI'
'qt5-base: for the Qt5 UI')
source=("https://www.aquamaniac.de/rdm/attachments/download/$_verid/$pkgname-$pkgver.tar.gz")
sha256sums=('ed8e1f81aa32c8c387cccb9d40390db31632be55bc41bd30bc27e3e45d4d2766')
#validpgpkeys=(42400AF5EB2A17F0A69BB551E9899D784A977416) # AqBanking Package Key <packages@aqbanking.de>
build() {
cd $pkgname-$pkgver
./configure --prefix=/usr --sysconfdir=/etc --enable-system-certs --with-guis="gtk3 qt5"
sed -i -e 's/ -shared / -Wl,-O1,--as-needed\0/g' libtool
make
}
package() {
cd $pkgname-$pkgver
make DESTDIR="$pkgdir" install
}
| true |
9931002270a8f5bc1ac28fd8aa68c48b884a7005 | Shell | alexpyoung/dotfiles | /scripts/update/cron | UTF-8 | 184 | 3.21875 | 3 | [
"Unlicense"
] | permissive | #!/usr/bin/env bash
set -euo pipefail
main() {
local -r FILE=$(realpath "${0%/*}/../../crontab")
echo "Generating crontab..."
crontab -l > "$FILE"
git status
}
main
| true |
894004ea612dac6684763953bd8e4da77c436ced | Shell | kartheekbaddukonda/Learnings | /shellclass/Practice/My-practice01.sh | UTF-8 | 449 | 3.25 | 3 | [] | no_license | #!/bin/bash
# Hello from main OS
echo 'Hello'
#assign a value to a variable
WORD='Script'
echo "$WORD"
echo '$WORD'
echo "This is a shell $WORD"
echo "This is a shell ${WORD}"
# Append text to a variable
echo "${WORD}ing is fun"
# this doesnt expan and displays nothing
echo "$WORDing is fun"
#another variable
ENDING='ed'
echo "${WORD}${ENDING}"
# Reassignment
ENDING='ing'
echo "$WORD$ENDING is fun" #another way of echoing multiple variables | true |
b72a458ac19fdaaf432e8620052b837513844569 | Shell | pawel-soja/AsiCamera | /scripts/debug-test.sh | UTF-8 | 1,349 | 3.90625 | 4 | [] | no_license | #!/bin/bash
command -v realpath >/dev/null 2>&1 || realpath() {
[[ $1 = /* ]] && echo "$1" || echo "$PWD/${1#./}"
}
command -v nproc >/dev/null 2>&1 || function nproc {
command -v sysctl >/dev/null 2>&1 && \
sysctl -n hw.logicalcpu ||
echo "3"
}
SRCS=$(dirname $(realpath $0))/..
mkdir -p build/AsiCameraBoost/logs/archive
mv build/AsiCameraBoost/logs/*.txt build/AsiCameraBoost/logs/archive/
VERSIONS=( 1.16 1.17 )
for ASIVERSION in "${VERSIONS[@]}"
do
BUILDDIR=./build/AsiCameraBoost/build-SDK-${ASIVERSION}
LOGDIR=./build/AsiCameraBoost/logs/
echo "Build boost library with SDK ${ASIVERSION}"
mkdir -p ${BUILDDIR}
pushd ${BUILDDIR}
cmake -DASIVERSION=${ASIVERSION} -DCMAKE_BUILD_TYPE=Debug . $SRCS
make -j$(($(nproc)+1))
popd
echo "Test SDK ${ASIVERSION} with boost library"
${BUILDDIR}/example/simply/AsiCameraSimply |& tee ${LOGDIR}/SDK_${ASIVERSION}_BOOST_$(date +%s).txt && echo "Success" || echo "Fail"
echo "Test SDK ${ASIVERSION}"
LD_PRELOAD=./AsiCamera/libasicamera2/${ASIVERSION}/lib/x64/libASICamera2.so \
${BUILDDIR}/example/simply/AsiCameraSimply |& tee ${LOGDIR}/SDK_${ASIVERSION}_______$(date +%s).txt && echo "Success" || echo "Fail"
done
LOGDIR_FULL=$(realpath ${LOGDIR})
echo
echo "Logs are in the ${LOGDIR_FULL} directory"
ls -l ${LOGDIR}
| true |
65f94596912873e52618664012c8f55958f046c4 | Shell | RobertMirantis/MCC_install_make_easy | /install_MCC_from_scratch.ksh | UTF-8 | 3,804 | 3.65625 | 4 | [] | no_license |
DATE=`date "+%y%m%d"`
#######################################################
# Install docker first
#######################################################
if [ -f /usr/bin/docker ]
then
if [ `docker images | grep "REPOSITORY" |wc -l` -eq 1 ]
then
echo "Docker is install correctly"
else
sudo apt-get update -y
sudo apt install docker.io
sudo usermod -aG docker $USER
echo "Docker is now installed - please logoff, login again and start me for a second time!"
exit
fi
else
sudo apt-get update -y
sudo apt install docker.io -y
sudo usermod -aG docker $USER
echo "Docker is now installed - please logoff, login again and start me for a second time!"
exit
fi
#######################################################
# Install AWS CLI
#######################################################
sudo apt install awscli -y
#######################################################
# Other checks
#######################################################
# License file
if [ -f /home/ubuntu/mirantis.lic ]
then
echo "Good you have a license file"
else
echo "No license file found in /home/ubuntu"
echo "Get a license file at https://www.mirantis.com/download/mirantis-cloud-native-platform/mirantis-container-cloud/"
exit 1
fi
# Sourceme file
if [[ -f /home/ubuntu/sourceme.ksh && `cat /home/ubuntu/sourceme.ksh | grep "<" | wc -l` -eq 0 ]]
then
echo "Great! You have the sourceme.ksh file already "
else
echo "No sourceme.ksh file found in /home/ubuntu"
echo "Create a file called /home/ubuntu/sourceme.ksh filled with the following content"
echo "export KAAS_AWS_ENABLED=true"
echo "export AWS_DEFAULT_REGION=<YOUR REGION YOU WANT TO DEPLOY>"
echo "export AWS_ACCESS_KEY_ID=\"<YOUR AWS ACCESSKEY OF bootstrapper user with yourname (See IAM)>\""
echo "export AWS_SECRET_ACCESS_KEY=\"<YOUR AWS SECRET ACCESSKEY>"\
exit 1
fi
#######################################################
# Old stuff will be here...
#######################################################
if [ ! -d /home/ubuntu/old ]
then
mkdir /home/ubuntu/old
fi
#######################################################
# Get latests and greatest
#######################################################
if [ -f get_container_cloud.sh ]
then
mv get_container_cloud.sh old/get_container_cloud${DATE}.sh
fi
wget https://binary.mirantis.com/releases/get_container_cloud.sh
chmod 755 get_container_cloud.sh
LATEST=`grep "LATEST_KAAS_VERSION=" get_container_cloud.sh `
echo "Latest version we are about to install = $LATEST"
#######################################################
# Install new directory
#######################################################
if [ -d /home/ubuntu/kaas-bootstrap ]
then
echo "Old directory is still here"
tar cf kaas-bootstrap_${DATE}.tar kaas-bootstrap/
mv kaas-bootstrap_${DATE}.tar /home/ubuntu/old/
rm -rf kaas-bootstrap/
fi
./get_container_cloud.sh
# Plaats licentiefile
cp mirantis.lic kaas-bootstrap/mirantis.lic
# Change AMI
FILE=~/kaas-bootstrap/templates/aws/machines.yaml.template
REPLACEAMI=`cat $FILE | grep "ami-" | cut -f 2 -d":" | awk '{print $1}' `
# FILL IN YOUR AMI ID IN YOUR REGION (AMI number differs per region), and I will do the magic... ;-)!!
TOAMI="ami-0e0102e3ff768559b"
# MAGIC
cat $FILE | sed "s/${REPLACEAMI}/${TOAMI}/g" > ${FILE}.1
mv ${FILE}.1 ${FILE}
# Source the AWS parameters
. ./sourceme.ksh
cp sourceme.ksh kaas-bootstrap/
# Kick start_my_MCC_cluster.ksh script
# The other script
if [ -f /home/ubuntu/start_my_MCC_cluster.ksh ]
then
echo "Fantastic! You have the start_my_MCC_sluter.ksh file - Lets start building"
./start_my_MCC_cluster.ksh
else
echo "You are missing the start_my_MCC_cluster.ksh script!"
echo "Sorry buddy, you have to do the installation manually...."
exit 1
fi
| true |
8c639fce531b48b5ae5e7ac7078c924a0b4011de | Shell | cheesiong-lee/buildah | /tests/containers.bats | UTF-8 | 1,988 | 2.9375 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bats
load helpers
@test "containers" {
cid1=$(buildah from --pull --signature-policy ${TESTSDIR}/policy.json alpine)
cid2=$(buildah from --pull --signature-policy ${TESTSDIR}/policy.json busybox)
run buildah --debug=false containers
[ $(wc -l <<< "$output") -eq 3 ]
[ "${status}" -eq 0 ]
buildah rm -a
buildah rmi -a -f
}
@test "containers filter test" {
cid1=$(buildah from --pull --signature-policy ${TESTSDIR}/policy.json alpine)
cid2=$(buildah from --pull --signature-policy ${TESTSDIR}/policy.json busybox)
run buildah --debug=false containers --filter name=$cid1
[ $(wc -l <<< "$output") -eq 2 ]
[ "${status}" -eq 0 ]
buildah rm -a
buildah rmi -a -f
}
@test "containers format test" {
cid1=$(buildah from --pull --signature-policy ${TESTSDIR}/policy.json alpine)
cid2=$(buildah from --pull --signature-policy ${TESTSDIR}/policy.json busybox)
run buildah --debug=false containers --format "{{.ContainerName}}"
[ $(wc -l <<< "$output") -eq 2 ]
[ "${status}" -eq 0 ]
buildah rm -a
buildah rmi -a -f
}
@test "containers json test" {
cid1=$(buildah from --pull --signature-policy ${TESTSDIR}/policy.json alpine)
out=$(buildah --debug=false containers --json | grep "{" | wc -l)
[ "$out" -ne "0" ]
buildah rm -a
buildah rmi -a -f
}
@test "containers noheading test" {
cid1=$(buildah from --pull --signature-policy ${TESTSDIR}/policy.json alpine)
cid2=$(buildah from --pull --signature-policy ${TESTSDIR}/policy.json busybox)
run buildah --debug=false containers --noheading
[ $(wc -l <<< "$output") -eq 2 ]
[ "${status}" -eq 0 ]
buildah rm -a
buildah rmi -a -f
}
@test "containers quiet test" {
cid1=$(buildah from --pull --signature-policy ${TESTSDIR}/policy.json alpine)
cid2=$(buildah from --pull --signature-policy ${TESTSDIR}/policy.json busybox)
run buildah --debug=false containers --quiet
[ $(wc -l <<< "$output") -eq 2 ]
[ "${status}" -eq 0 ]
buildah rm -a
buildah rmi -a -f
}
| true |
ba3a37be9616683b60dfe38399470508a6f65eb5 | Shell | p2sousa/dotfiles | /linux/apps.sh | UTF-8 | 730 | 2.75 | 3 | [] | no_license | #!/usr/bin/env bash
cd "$(dirname "$0")"
set -e
DIR=$(pwd)
sudo apt update
sudo apt upgrade
echo "Installing tweak..."
sudo add-apt-repository universe
sudo apt install gnome-tweak-tool
echo "Installing chrome..."
wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
sudo apt install ./google-chrome-stable_current_amd64.deb
rm ./google-chrome-stable_current_amd64.deb
echo "Installing insomnia..."
sudo snap install insomnia
echo "Installing slack..."
sudo snap install slack --classic
echo "Installing Jetbrain..."
sudo snap install phpstorm --classic
echo "Installing vscode..."
sudo snap install code --classic
echo "Installing mysql-workbench..."
sudo snap install mysql-workbench-community
| true |
c6dcb16c70d62fba889fda48bdb97f298fb019b3 | Shell | mbkusharki/cornelius | /framework/scripts/regularize.sh | UTF-8 | 3,920 | 4.21875 | 4 | [] | no_license | #!/bin/bash
###############################################################################
# Regularize a file and its mutants
#
# Cornelius only works on Simple programs, and Simple programs have regular
# control flow (no early returns/breaks/continues/etc). This script transforms a
# subject and its mutants into reguarlized but equivalent versions and compiles
# them into classfiles (the classfiles are used by Medusa to compute ground
# truth).
#
# This outputs the regularized version of the program in `/path/to/subject/regularized`
source util.sh
################################################################################
# Print a usage screen with an optional message
function usage {
printf "$(bold "usage:") $1\n"
printf "$(bold Forms)\n"
printf "$(bold -----)\n"
printf " ./regularize.sh --help: print this help message\n"
printf " ./regularize.sh --subject SUBJECT: regularize the subject whose original\n"
printf " source code is at path SUBJECT\n"
printf " ./regularize.sh --single-source SOURCE: regularize the SOURCE Java file\n"
exit 1
}
###############################################################################
# regularize: given a source file $1 and a target file $2, regularize the file
# and save it to disk at location target.
#
# For instance, if the original file is `src/Foo.java` and you want to write a
# new file `regularized/src/Foo.java`, run
#
# regularize "src/Foo.java" "regularized/src/Foo.java"
#
# Note that this will overwrite `regularized/src/Foo.java` if it exists without
# asking for permission.
function regularize {
file="$1"
target="$2"
target_dir="$(dirname "$target")"
if [ ! -e "$target_dir" ]
then
mkdir -p "$target_dir"
fi
java -jar "$REGULARIZE_JAR" "$file" > "$target"
}
###############################################################################
# A helper function that takes a mutant id and regularizes it to the new
# location
function regularize_mutant {
base="$1"
filename="$2"
regdir="$3"
mid="$4"
mkdir -p "$regdir/mutants/$mid"
source="$base/mutants/$mid/$filename"
target="$regdir/mutants/$mid/$filename"
regularize "$source" "$target"
printf "."
}
function regularize_subject {
dir="$1"
filename="$2"
regdir=$(realpath "$dir/regularized")
echo "================================================================================"
echo "Regularizing subject $(green "$dir/$filename") to $(green "$dir/regularized")\n"
if [ -e "$regdir" ]
then
rm -rf "$regdir"
fi
mkdir -p "$regdir"
cp "$dir/mutants.log" "$regdir"
regularize "$dir/$filename" "$regdir/$filename"
# regularize the mutants
for mid in $(ls "$dir/mutants")
do
regularize_mutant "$dir" "$filename" "$regdir" "$mid"
done
echo
}
while (( "$#" )); do
case "$1" in
--help)
usage
;;
--subject)
shift
if [ ! -e "$1" ]; then
red "no such file as $1...aborting"
exit 1
fi
dir="$(realpath "$(dirname "$1")")"
base="$(basename "$1")"
shift
regdir="$dir/regularized"
if [ -d "$dir" ]
then
if [ -e "$regdir" ]
then
rm -rf "$regdir"
fi
mkdir -p "$regdir"
regularize_subject "$dir" "$base"
fi
;;
*)
if [ ! -e "$1" ]; then
red "no such file as $1...aborting"
exit 1
fi
dir="$(realpath "$(dirname "$1")")"
base="$(basename "$1")"
shift
regdir="$dir/regularized"
if [ -d "$dir" ]
then
if [ -e "$regdir" ]
then
rm -rf "$regdir"
fi
mkdir -p "$regdir"
regularize "$dir/$base" "$regdir/$base"
fi
;;
esac
done
| true |
0e34ca7860591726eede0c7c1f4fc33c7838f745 | Shell | makeclean/pyne-ci | /bin/conda-inst.sh | UTF-8 | 726 | 3.359375 | 3 | [
"BSD-2-Clause"
] | permissive | #!/bin/bash
# Installs conda locally
set -x
set -e
# Setup
UNAME=$(uname)
MINICONDA_VER="3.7.0"
export PATH="$(pwd)/anaconda/bin:${PATH}"
if [[ "${MINICONDA_PYVER}" == '2' ]]; then
export MINICONDA_PYVER=""
fi
if [[ "$UNAME" == 'Linux' ]]; then
MINICONDA="Miniconda${MINICONDA_PYVER}-${MINICONDA_VER}-Linux-x86_64.sh"
else
MINICONDA="Miniconda${MINICONDA_PYVER}-${MINICONDA_VER}-MacOSX-x86_64.sh"
fi
# Install
mv condarc $HOME/.condarc
chmod 755 ${MINICONDA}
./${MINICONDA} -b -p ./anaconda
#conda install conda=3.6.1 # is this needed?
conda update conda
conda install jinja2
conda install setuptools
conda install binstar
if [[ "$UNAME" == 'Linux' ]]; then
conda install patchelf
fi
conda install conda-build
| true |
b7c40b1457f09e15fabb16d7ed53f20669157ef0 | Shell | Snusel/linuxgsm | /functions/fn_details_config | UTF-8 | 4,078 | 2.703125 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# LGSM fn_details_config function
# Author: Daniel Gibbs
# Website: http://gameservermanagers.com
# Version: 160415
# Description: Gets specific details from config files.
if [ "${engine}" == "avalanche" ]; then
servername=$(grep -s Name "${servercfgfullpath}"|sed 's/Name//g'|tr -d '=\"; '|sed 's/,//g')
elif [ "${engine}" == "projectzomboid" ]; then
servername=$(grep -s PublicName= "${servercfgfullpath}"|sed 's/PublicName=//g'|tr -d '=\";'|sed 's/,//g')
port=$(grep -s DefaultPort= "${servercfgfullpath}"|sed 's/DefaultPort=//g'|tr -cd [:digit:])
elif [ "${engine}" == "realvirtuality" ]; then
servername=$(grep -s hostname "${servercfgfullpath}"| grep -v //|sed -e 's/\<hostname\>//g'| tr -d '=\"; ')
port=$(grep -s serverport= "${servercfgfullpath}"|grep -v //|tr -d '\r'|tr -cd [:digit:])
queryport=$(grep -s steamqueryport= "${servercfgfullpath}"|grep -v //|tr -d '\r'|tr -cd [:digit:])
masterport=$(grep -s steamport= "${servercfgfullpath}"|grep -v //|tr -d '\r'|tr -cd [:digit:])
elif [ "${engine}" == "seriousengine35" ]; then
servername=$(grep -s prj_strMultiplayerSessionName "${servercfgfullpath}"|sed 's/prj_strMultiplayerSessionName = //g'|sed 's/"//g'|sed 's/;//g')
rcon=$(grep -s rcts_strAdminPassword "${servercfgfullpath}"|sed 's/rcts_strAdminPassword = //g'|sed 's/"//g'|sed 's/;//g')
port=$(grep -s prj_uwPort "${servercfgfullpath}"|tr -d '\r'|tr -cd [:digit:])
queryport=$((${port} + 1))
elif [ "${engine}" == "source" ]||[ "${engine}" == "goldsource" ]; then
servername=$(grep -s hostname "${servercfgfullpath}"|sed 's/hostname //g'|sed 's/"//g')
rcon=$(grep -s rcon_password "${servercfgfullpath}"|sed 's/rcon_password //g'|sed 's/"//g')
elif [ "${engine}" == "spark" ]; then
queryport=$((${port} + 1))
elif [ "${gamename}" == "Teamspeak 3" ]; then
if [ -s "${servercfgfullpath}" ]; then
port=$(grep default_voice_port= "${servercfgfullpath}"|tr -cd [:digit:])
queryport=$(grep query_port= "${servercfgfullpath}"|tr -cd [:digit:])
fileport=$(grep filetransfer_port= "${servercfgfullpath}"|tr -cd [:digit:])
ip=$(grep voice_ip= "${servercfgfullpath}"|sed 's/\voice_ip=//g')
dbplugin=$(grep dbplugin= "${servercfgfullpath}"|sed 's/\dbplugin=//g')
else
port="9987"
queryport="10011"
fileport="30033"
fi
elif [ "${engine}" == "unity3d" ]; then
servername=$(grep ServerName "${servercfgfullpath}"|sed 's/^.*value="//'|cut -f1 -d"\"")
port=$(grep ServerPort "${servercfgfullpath}"|tr -cd [:digit:])
queryport=$((${port} + 1))
webadminenabled=$(grep ControlPanelEnabled "${servercfgfullpath}"|sed 's/^.*value="//'|cut -f1 -d"\"")
webadminport=$(grep ControlPanelPort "${servercfgfullpath}"|tr -cd [:digit:])
webadminpass=$(grep ControlPanelPassword "${servercfgfullpath}"|sed 's/^.*value="//'|cut -f1 -d"\"")
telnetenabled=$(grep TelnetEnabled "${servercfgfullpath}"|sed 's/^.*value="//'|cut -f1 -d"\"")
telnetport=$(grep TelnetPort "${servercfgfullpath}"|tr -cd [:digit:])
telnetpass=$(grep TelnetPassword "${servercfgfullpath}"|sed 's/^.*value="//'|cut -f1 -d"\"")
elif [ "${engine}" == "unreal" ]||[ "${engine}" == "unreal2" ]; then
servername=$(grep -s ServerName= ${servercfgfullpath}|sed 's/ServerName=//g')
port=$(grep Port= "${servercfgfullpath}"|grep -v Master|grep -v LAN|grep -v Proxy|grep -v Listen|tr -d '\r'|tr -cd [:digit:])
queryport=$((${port} + 1))
gsqueryport=$(grep OldQueryPortNumber= "${servercfgfullpath}"|tr -d '\r'|tr -cd [:digit:])
udplinkport=$((${port} + 2))
webadminenabled=$(grep bEnabled= "${servercfgfullpath}"|sed 's/bEnabled=//g'|tr -d '\r')
webadminport=$(grep ListenPort= "${servercfgfullpath}"|tr -d '\r'|tr -cd [:digit:])
if [ "${engine}" == "unreal" ]; then
webadminuser=$(grep AdminUsername= "${servercfgfullpath}"|sed 's/\AdminUsername=//g')
webadminpass=$(grep UTServerAdmin.UTServerAdmin "${servercfgfullpath}" -A 2| grep AdminPassword=|sed 's/\AdminPassword=//g')
else
webadminuser=$(grep AdminName= "${servercfgfullpath}"|sed 's/\AdminName=//g')
webadminpass=$(grep AdminPassword= "${servercfgfullpath}"|sed 's/\AdminPassword=//g')
fi
fi | true |
d78984c70e3fddef53e60168a07b32d44bb4abd9 | Shell | gitlisted/ccc | /make | UTF-8 | 209 | 2.546875 | 3 | [] | no_license | #!/bin/bash
if [ parser/ccc.pegjs -nt parser/ccc.js ]; then
pegjs -e ccc.Parser --allowed-start-rules start,datum parser/ccc.pegjs
fi
cat $(cat sources.list) > ccc.js
yui-compressor ccc.js -o ccc.min.js
| true |
353c765659d5c13965b61ad276a8043774b6f262 | Shell | Orc/configure | /samples/magicfilter | UTF-8 | 4,675 | 3.640625 | 4 | [
"BSD-3-Clause"
] | permissive | #! /bin/sh
# local options: ac_help is the help message that describes them
# and LOCAL_AC_OPTIONS is the script that interprets them. LOCAL_AC_OPTIONS
# is a script that's processed with eval, so you need to be very careful to
# make certain that what you quote is what you want to quote.
ac_help='--filterdir=DIR where to put printer filters (prefix/sbin/printers)
--traditional build a more traditional sort of magicfilter
--use-local-magic install a private magic file
--use-fifo pipe the output from gs through a fifo
--with-papersize=SIZE set the default paper size for gs/pstext'
LOCAL_AC_OPTIONS='
case X"$1" in
X--filterdir=*)
AC_FILTER=`echo "$1" | sed -e 's/^[^=]*=//'`
shift 1
;;
X--filterdir)
AC_FILTER=$2
shift 2
;;
X--traditional)
TARGET=traditional
shift
;;
*) ac_error=1
;;
esac'
# load in the configuration file
#
TARGET=magicfilter
. ./configure.inc
# and away we go
#
USE_FIFO=T # default to piping ghostscript via a fifo
AC_INIT magicfilter
AC_PROG_CC
AC_SUB filterdir ${AC_FILTER:-$AC_PREFIX/sbin/printers}
if [ "$USE_FIFO" = "T" ]; then
AC_SUB GSOUT '${FIFO}'
else
AC_SUB GSOUT '-'
fi
if [ ! "$USE_LOCAL_MAGIC" ]; then
case $ac_os in
[Ff]ree[Bb][Ss][Dd])magicpath=/etc:/usr/etc:/usr/share/misc ;;
*) magicpath=/etc:/usr/etc: ;;
esac
# check to see if the system magic file is moderately recent
#
trap "rm -f $$" 1 2 3 9 15
echo "@PJL JOB" > $$
F=`file $$ 2>/dev/null | grep -i pjl`
echo "%PDF-1.1" > $$
G=`file $$ 2>/dev/null | grep -i pdf`
if [ "$F" -a "$G" ]; then
save_AC_PATH=$AC_PATH
AC_PATH=$magicpath MF_PATH_INCLUDE MAGIC -r magic || USE_LOCAL_MAGIC=T
AC_PATH=$save_AC_PATH
else
LOG "file(1) is too old -- using private magic file"
USE_LOCAL_MAGIC=T
fi
else
LOG "Using private magic file $AC_CONFDIR/mf.magic"
fi
rm -f $$
trap 1 2 3 9 15
if [ "$USE_LOCAL_MAGIC" ]; then
# if we're using local magic, manually write the substitution
# information into the config files
AC_CONFIG MAGIC "$AC_CONFDIR"/mf.magic
AC_SUB INSTALL_MAGIC ""
else
AC_SUB INSTALL_MAGIC ": "
fi
AC_SUB DO_WHAT install-$TARGET
# AC_PROG_LN_S
# AC_PROG_YACC
if AC_CHECK_HEADERS basis/options.h; then
if LIBS="-lbasis" AC_CHECK_FUNCS x_getopt; then
AC_LIBS="$AC_LIBS -lbasis"
AC_SUB XGETOPT
HAVE_XGETOPT=T
fi
fi
test "$HAVE_XGETOPT" || AC_SUB XGETOPT options.o
AC_CHECK_HEADERS malloc.h || AC_CHECK_HEADERS sys/malloc.h
AC_CHECK_FUNCS basename
MF_PATH_INCLUDE RANLIB ranlib true || AC_CONFIG RANLIB ':'
MF_PATH_INCLUDE M4 m4 || AC_FAIL "magicfilter requires m4"
MF_PATH_INCLUDE GS gs
if MF_PATH_INCLUDE NENSCRIPT nenscript enscript; then
TLOGN "checking if $CF_NENSCRIPT supports the -q flag..."
__v=`$CF_NENSCRIPT -q -p - </dev/null`
if [ \( "$?" -ne 0 \) -o "$__v" ]; then
TLOG " no"
AC_SUB NENSCRIPT_QUIET ''
else
TLOG " yes"
AC_SUB NENSCRIPT_QUIET -q
fi
else
AC_SUB NENSCRIPT_QUIET ''
MF_PATH_INCLUDE PSTEXT pstext
fi
paper=$WITH_PAPERSIZE
if [ "$CF_GS" -a "$paper" ]; then
if ! echo "quit" | $CF_GS -sPAPERSIZE=$paper -dNODISPLAY -; then
AC_FAIL "$CF_GS cannot write to $paper sized paper"
unset paper
fi
fi
if [ "${CF_PSTEXT:-$CF_NENSCRIPT}" -a "$paper" ]; then
PROG=${CF_PSTEXT:-$CF_NENSCRIPT}
if ! $PROG -T$paper </dev/null; then
AC_FAIL "$PROG cannot write to $paper sized paper"
unset paper
fi
fi
test "$paper" && AC_DEFINE PAPERSIZE \"$WITH_PAPERSIZE\"
MF_PATH_INCLUDE GZIP gzip gzcat || MF_PATH_INCLUDE ZCAT zcat
MF_PATH_INCLUDE BZIP bzip2
MF_PATH_INCLUDE UNCOMPRESS uncompress
MF_PATH_INCLUDE DVIPS dvips
MF_PATH_INCLUDE PNMTOPS pnmtops
MF_PATH_INCLUDE GIFTOPPM giftopnm giftoppm
MF_PATH_INCLUDE G3TOPBM g3topbm
MF_PATH_INCLUDE DJPEG djpeg
MF_PATH_INCLUDE PNGTOPNM pngtopnm
MF_PATH_INCLUDE SGITOPNM sgitopnm
MF_PATH_INCLUDE TIFFTOPNM tifftopnm
MF_PATH_INCLUDE BMPTOPPM bmptopnm bmptoppm
MF_PATH_INCLUDE RASTTOPNM rasttopnm
MF_PATH_INCLUDE FIG2DEV fig2dev
MF_PATH_INCLUDE ACROREAD acroread
# MF_PROG_GNU_ZCAT($ZCAT)
save_AC_PATH=$AC_PATH
AC_PATH=/usr/lib:/usr/sbin:/usr/bin:/bin:/sbin MF_PATH_INCLUDE SENDMAIL sendmail smail mail Mail
AC_PATH=$save_AC_PATH
AC_CHECK_HEADERS memory.h
AC_CHECK_HEADERS paths.h
AC_CHECK_HEADERS stdlib.h
AC_CHECK_HEADERS unistd.h
AC_HEADER_SYS_WAIT
AC_C_CONST
AC_TYPE_PID_T
AC_CHECK_FUNCS dup2
AC_CHECK_FUNCS tmpnam
AC_CHECK_FUNCS waitpid
AC_CHECK_FUNCS wait4
AC_CHECK_FUNCS unsetenv
AC_CHECK_FUNCS setenv
AC_CHECK_FUNCS setlinebuf
AC_SUB MAGICFILTER ${AC_EXEC}/magicfilter
AC_OUTPUT Makefile file/Makefile magicfilter.8templ magicfilter.5 magicfilter.h magicfilter-t.5 magic.m4
| true |
174aa6d3e58d8ad7c4fe77c8ce825028e106997e | Shell | tkoolen/drake-humanoid-controller | /extract_controller.sh | UTF-8 | 1,035 | 3.171875 | 3 | [] | no_license | #!/bin/bash
set -e
DRAKE_DIR=$1
SOURCE="$DRAKE_DIR/install"
SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )"
DESTINATION="$SCRIPTPATH/install"
rm -rf $DESTINATION
mkdir -p $DESTINATION
FILES="share/drake/examples/humanoid_controller/
lib/libBulletCollision.so
lib/libLinearMath.so
lib/libgurobi75.so
lib/libscsdir.so
lib/liblcm.so
share/drake/examples/valkyrie/
share/drake/common
share/drake/.drake-find_resource-sentinel"
cd $SOURCE
for f in $FILES
do
cp -r --parent $f $DESTINATION
done
cp -r $DRAKE_DIR/examples/humanoid_controller/config/ $DESTINATION/share/drake/examples/humanoid_controller/config
SYSTEM_LIBS="/usr/lib/x86_64-linux-gnu/libprotobuf.so.9
/usr/lib/x86_64-linux-gnu/libtinyxml2.so.2
/usr/lib/libdreal.so
/usr/lib/libibex.so
/usr/lib/x86_64-linux-gnu/libClp.so.1
/usr/lib/x86_64-linux-gnu/libCoinUtils.so.3
/usr/lib/libipopt.so.1
/usr/lib/libdmumps_seq-4.10.0.so
/usr/lib/x86_64-linux-gnu/libnlopt.so.0
/usr/lib/libmumps_common_seq-4.10.0.so"
for lib in $SYSTEM_LIBS
do
cp $lib $DESTINATION/lib
done
| true |
f715822f7ba8ba821205e3fcfd61b035baedbb2a | Shell | manuelbachl/MacOS-Config | /aliases/functions.sh | UTF-8 | 6,875 | 4.09375 | 4 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | ########################################################################################################################
# #
# HELPER-FUNCTIONS #
# #
########################################################################################################################
#====================#
# shell/terminal #
#====================#
# print a separator line, as wide as the terminal
function hr() {
print ${(l:COLUMNS::=:)}
}
#=================#
# conversions #
#=================#
# convert string to int
function toint() {
local -i num="10#${1}"
echo "${num}"
}
# urlencode text
function urlencode() {
echo -e $(perl -MURI::Escape -e 'print uri_escape($ARGV[0]);' "$1");
}
#=================#
# web-related #
#=================#
# find what is using a particular port
# USAGE: whoisport {PORTNUMBER}
function whoisport() {
if [ $# -lt 1 ];
then
echo 'USAGE: whoisport {PORTNUMBER}';
else
port=$1;
local -i portNum=$(toint "${port}" 2>/dev/null);
if (( $portNum < 1 || $portNum > 65535 ));
then
echo $port' is not a valid port (must be an integer between 1 and 65535)';
else
pidInfo=$(sudo fuser $port/tcp 2> /dev/null);
if [[ ! -z $pidInfo ]];
then
pidInfoClean="$(echo -e $pidInfo | tr -d '[:space:]')"
pid=$(echo $pidInfoClean | cut -d':' -f2);
sudo ls -l /proc/$pid/exe;
else
if ! lsof -i:$port
then
echo 'port '$port' is not in use';
fi
fi
fi
fi
}
# get public ip
function myip() {
local api
case "$1" in
"-4")
api="http://v4.ipv6-test.com/api/myip.php"
;;
"-6")
api="http://v6.ipv6-test.com/api/myip.php"
;;
*)
api="http://v4v6.ipv6-test.com/api/myip.php"
;;
esac
curl -s "$api"
echo # Newline.
}
alias ipinfo='myip'
#================#
# navigation #
#================#
# cd into directory and list contents
# USAGE: cls {DIRECTORYNAME}
function cls() {
if [ $# -lt 1 ];
then
echo 'USAGE: cls {DIRECTORYNAME}';
else
cd "$1";
ls;
fi
}
# cd into directory and list contents (detailed)
# USAGE: cls {DIRECTORYNAME}
function cll() {
if [ $# -lt 1 ];
then
echo 'USAGE: cls {DIRECTORYNAME}';
else
cd "$1";
ll;
fi
}
#================================#
# file/directory manipulaion #
#================================#
# create directory and cd into it, also creates parent directories
# USAGE: mcd {DIRECTORYNAME}
function mcd() {
if [ $# -lt 1 ];
then
echo 'USAGE: mcd {DIRECTORYNAME}';
else
mkdir -pv "$1";
cd "$1";
fi
}
# make backup of file
# USAGE: backup {FILENAME} {ENDING}
function backup() {
if [ $# -lt 2 ];
then
if [ $# -lt 1 ];
then
echo 'USAGE: backup {FILENAME} {ENDING}';
else
cp -a "$1" "${1}_$(date --iso-8601=seconds)"
fi
else
cp "$1"{,."$2"};
fi
}
# print the last ten modified files in the specified directory
# USAGE: lastmod {DIRECTORY}
function lastmod() {
if [ $# -lt 1 ];
then
ls -lt ./ | head;
else
ls -lt $1 | head;
fi
}
# copy a file to the clipboard from the command line (requires xclip to be installed)
# USAGE copyfile {FILENAME}
#function copyfile() {
# if [ $# -lt 1 ];
# then
# echo 'USAGE copyfile {FILENAME}';
# else
# cat $1 | xclip -selection clipboard;
# fi
#}
#==============#
# archives #
#==============#
# extract any kind of archive
# USAGE: extract {FILENAME}
function extract() {
if [ $# -lt 1 ];
then
echo 'USAGE: extract {FILENAME}';
else
if [ -f $1 ] ; then
case $1 in
*.tar.bz2)
tar xjf $1
;;
*.tar.gz)
tar xzf $1
;;
*.bz2)
bunzip2 $1
;;
*.rar)
unrar e $1
;;
*.gz)
gunzip $1
;;
*.tar)
tar xf $1
;;
*.tbz2)
tar xjf $1
;;
*.tgz)
tar xzf $1
;;
*.zip)
unzip $1
;;
*.Z)
uncompress $1
;;
*.7z)
7z x $1
;;
*)
echo "'$1' cannot be extracted via extract()"
;;
esac
else
echo "'$1' is not a valid file"
fi
fi
}
#===============#
# utilities #
#===============#
# shows uptime using a shorter formula
function myuptime() {
uptime | awk '{ print "Uptime:", $3, $4, $5 }' | sed 's/,//g';
return;
}
# google for specified term in default browser
function google() {
xdg-open "https://www.google.com/search?q=`urlencode "${(j: :)@}"`";
}
#===================#
# TYPO3-related #
#===================#
# grab version (if not exist) and add symlinks
function typo3init() {
if [ $# -lt 1 ];
then
echo 'USAGE: typo3init {VERSION}';
else
version=$1;
pwd=$(pwd);
cd ${typo3SourcesDirectory};
if [ ! -d "typo3_src-${version}" ]; then
wget https://get.typo3.org/${version}/zip -O typo3_src-${version}.zip;
unzip typo3_src-${version}.zip;
rm typo3_src-${version}.zip;
fi
cd ${pwd};
ln -s ${typo3SourcesDirectory}/typo3_src-${version}/ typo3_src;
ln -s typo3_src/typo3/;
ln -s typo3_src/index.php;
ls -la;
fi
}
# update typo3
function typo3update() {
if [ $# -lt 1 ];
then
echo 'USAGE: typo3update {VERSION}';
else
version=$1;
pwd=$(pwd);
cd ${typo3SourcesDirectory};
if [ ! -d "typo3_src-${version}" ]; then
wget https://get.typo3.org/${version}/zip -O typo3_src-${version}.zip;
unzip typo3_src-${version}.zip;
rm typo3_src-${version}.zip;
fi
cd ${pwd};
rm -rf typo3_src;
ln -s ${typo3SourcesDirectory}/typo3_src-${version}/ typo3_src;
ls -la;
fi
} | true |
ce39ad40c271db6f6cbf8a47328ad99455531847 | Shell | vinayakadkoli/k8s-multi-container | /show-pods.sh | UTF-8 | 1,172 | 3.765625 | 4 | [] | no_license | #!/usr/bin/env bash
NAMESPACE="--all-namespaces"
if [[ -n $1 ]];then
NAMESPACE="-n $1"
fi
CUSTOM_COLUMNS='-o=custom-columns=namespace:.metadata.namespace,name:.metadata.name,node:.spec.nodeName'
GET_PODS_ARGS=(get pods $NAMESPACE --no-headers "$CUSTOM_COLUMNS")
# disable default space as the output from
IFS=$'\n'
NODES=($(kubectl get nodes --no-headers -o=custom-columns=name:.metadata.labels."kubernetes\.io\/hostname",fd:.metadata.labels."failure-domain\.beta\.kubernetes\.io/zone"))
if [[ $? -ne 0 ]];then
echo "Error getting nodes:$NODES"
exit 1
fi
function getFaultDomainForNode() {
if [[ -z $1 ]];then
echo "Error no node passed as parameter"
exit 1
fi
RESULT="No FD found for node $1"
for node in "${NODES[@]}";do
IFS=' ' read NODE FD <<< "$node"
if [[ $1 == ${NODE} ]];then
RESULT=${FD}
break
fi
done
echo -n $RESULT
}
echo "namespace,pod,node,fd"
IFS=$'\n'
PODS=($(kubectl "${GET_PODS_ARGS[@]}"))
for pod in "${PODS[@]}";do
IFS=' ' read NS POD POD_NODE <<< "$pod"
POD_FD=$(getFaultDomainForNode $POD_NODE)
echo "$NS,$POD,$POD_NODE,$POD_FD"
done
| true |
7bfdd00902836852c41021de80293f03c542d49e | Shell | geroyoshi/vorm | /exe/root.vte_upd_sched.sh | UTF-8 | 1,692 | 3.46875 | 3 | [] | no_license | #!/bin/sh
#---------------------------------#
# vte upd sched #
# Authored by Y.Miyamoto #
#---------------------------------#
#-----#
# env #
#-----#
. `dirname $0`/../cfg/set_env.sh
MY_NAME=`basename $0`
EXEC_NAME=`echo ${MY_NAME} | awk -F'.' '{print $2}'`
CFG_FILE=${CFG_DIR}/${EXEC_NAME}.cfg
RC_G_TMP=${LOG_DIR}/${EXEC_NAME}.tmp ; cp /dev/null ${RC_G_TMP}
cd ${VMSSC_PATH}
#-------#
# start #
#-------#
echo -e "\n***** Info : ${MY_NAME} Start *****"
#--- exec ---#
grep -v ^# ${CFG_FILE} | while read TGT
do
FILE=`echo ${TGT} | awk '{print $1}'`
NAME=`grep Name: ${CFG_DIR}/${FILE} | awk -F':' '{print $2}' | tr -d ' '`
#--- before ---#
BEF_FILE=${LOG_DIR}/bef_${EXEC_NAME}_${NAME}_`date "+%Y%m%d-%H%M%S"`.log
./vmssc rekey getsched ${NAME} > ${BEF_FILE}
#--- update ---#
echo -e "\n***** Info : TGT_SCHEDULE=${NAME} *****\n"
./vmssc rekey updatesched -f ${CFG_DIR}/${FILE}
RC_C=$?
if [[ ${RC_C} != ${SUCCESS} ]]; then
echo -e "\n***** ${NAME} : [ NG ] *****"
fi
(( RC = ${RC} + ${RC_C} ))
#--- after ---#
AFT_FILE=${LOG_DIR}/aft_${EXEC_NAME}_${NAME}_`date "+%Y%m%d-%H%M%S"`.log
./vmssc rekey getsched ${NAME} | tee -a ${AFT_FILE}
diff ${AFT_FILE} ${CFG_DIR}/${FILE}
RC_G=$?
if [[ ${RC_G} != ${SUCCESS} ]]; then
echo -e "\n***** ${NAME} : [ NG ] *****"
RC_G=${ERRORS}
fi
(( RC = ${RC} + ${RC_G} ))
echo ${RC} > ${RC_G_TMP}
done
RC=`cat ${RC_G_TMP}`
if [[ ${RC} = ${SUCCESS} ]]; then
echo -e "\n***** Info : ${MY_NAME} End *****"
exit ${SUCCESS}
else
echo -e "\n***** Info : ${MY_NAME} Abend *****"
exit ${ERRORS}
fi
| true |
341e06c18341e0ce27de59eb36fb13a82365a7ae | Shell | Kaixiang/appfirst-collector-release | /jobs/appfirst_collector/templates/appfirst_collector_ctl | UTF-8 | 463 | 3.609375 | 4 | [] | no_license | #!/bin/bash
RUN_DIR=/var/vcap/sys/run/appfirst_collector
PIDFILE=$RUN_DIR/collector.pid
source /var/vcap/packages/appfirst-common/utils.sh
case $1 in
start)
mkdir -p $RUN_DIR
pid_guard $PIDFILE "AppfirstCollector"
dpkg -i /var/vcap/packages/afcollector/appfirst-x86_64.deb
pidof collector | cut -f 1 -d \ > $PIDFILE
;;
stop)
kill_and_wait $PIDFILE
;;
*)
echo "Usage: appfirst_collector_ctl {start|stop}"
;;
esac
| true |
92748e82e40ea68ef561dddc89f2441b4504c318 | Shell | M-Arimase/dev-ethereum | /init_geth.sh | UTF-8 | 842 | 2.546875 | 3 | [] | no_license | #!/bin/bash
export GOROOT=/usr/local/go
export GOPATH=/root
export PATH=/usr/local/go/bin:/root/1806-ethereum/build/bin:$PATH
cd /root/1806-ethereum
git reset --hard
git pull
cd /root
datadir='/root/data-ethereum'
genesis='/root/dev-ethereum/genesis.json'
networkid='1806'
bootnode='enode://efdd65ad5419e2f7d6a53d65d72b0189cef1f89beab2b5f8860f3e48a63fd108843254759c5ba4e226a66f7d8cf51323d1f05e0e929b45e4b87372411fc4938a@172.17.0.8:30301'
extip=$(/sbin/ifconfig -a | grep inet | grep -v 127.0.0.1 | awk '{print $2}')
cmd="nohup geth --datadir $datadir init $genesis &"
echo $cmd
nohup geth --datadir $datadir init $genesis &
cmd="nohup geth --datadir $datadir --networkid $networkid --bootnodes $bootnode --nat "extip:$extip" &"
echo $cmd
nohup geth --datadir $datadir --networkid $networkid --bootnodes $bootnode --nat "extip:$extip" &
| true |
db8646ad1278b9316161b7dd09136bdb3a6ad0fb | Shell | benmezger/dotfiles | /dot_bin/executable_reload-on-change | UTF-8 | 822 | 3.75 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
# Usable: ./reload-on-change <command-to-run> file1 files2 file..
declare -a prevcsums csums files
cmd="$1"
for arg in "${@:2}"; do
files+=("$(realpath $arg)")
done
for v in "${files[@]}"; do
prevcsums+=("$(sha1sum "$v" | cut -d " " -f 1)")
csums+=("$(sha1sum "$v" | cut -d " " -f 1)")
done
reload=1
while true; do
for i in "${!files[@]}"; do
if [ "${prevcsums[$i]}" != "${csums[$i]}" ]; then
reload=1
prevcsums[$i]="$(sha1sum "${files[$i]}" | cut -d " " -f 1)"
fi
csums[$i]="$(sha1sum "${files[$i]}" | cut -d " " -f 1)"
done
if [[ $reload -eq 1 ]]; then
reload=0
$cmd
fi
reload=0
sleep 1
done
| true |
53c59666b413f08e83a3a5136fde8ed778c728b5 | Shell | sagarbagwe6699/Doc-ify | /script6699.sh | UTF-8 | 950 | 3.34375 | 3 | [] | no_license | #! /bin/bash
echo
echo -------- Shell ---------
echo $SHELL
echo
echo -------- Os ---------
echo $OSTYPE
echo
echo -------- Path ---------
echo $PATH
echo
echo -------- Current working directory ---------
pwd
echo
echo -------- OS version ---------
lsb_release -r
echo
echo -------- Release number ---------
lsb_release -sr
echo
echo -------- Kernel ---------
uname -r
echo
echo -------- Sort ---------
sort ./name.txt
echo
sort ./number.txt
echo ---- Sort unique ----
sort -r -u ./name.txt
echo
sort -r -u ./number.txt
echo ---- Sort numbers ----
sort -r -u -n ./number.txt
echo
echo -------- Grep ---------
grep Lucifer ./name.txt
echo -------- Grep -i ---------
grep -i "Lucifer" ./name.txt
echo
echo -------- AWK ---------
echo
echo -------- AWK print ---------
awk '{print}' ./name.txt
echo
echo -------- AWK word ---------
awk '/Lucifer/{print}' ./name.txt
echo
echo -------- AWK first and third ---------
awk '{print $1 , $3}' ./name.txt
echo
| true |
0e4fa36018da34dbfc2a06e02c1b1f03529cf474 | Shell | DBMI/TDT_mpc | /main.sh | UTF-8 | 560 | 2.765625 | 3 | [] | no_license | #!/bin/bash
### This shell script is expected to run from 'docker run' command within a Docker Container.
### Added to https://github.com/DBMI/TDT_mpc forked from https://github.com/ndokmai/TDT_mpc
# copy configuration files from host drive to container drive
cp /opt/mydata/addresses.txt /opt/TDT_mpc/input
cp /opt/mydata/parameters.txt /opt/TDT_mpc/input
# change a working directory
cd /opt/TDT_mpc
# prepare secure multi-party computation (MPC)
./gen_circuit.sh /opt/mydata/mydata.input
# Sequentially, run multi-party computation (MPC)
./run_mpc.sh | true |
e340b15bf58505c2d482e567c8caccd32b9ac3e3 | Shell | m0zgen/vbox-cli | /vbox-cli.sh | UTF-8 | 6,436 | 4.03125 | 4 | [] | no_license | #!/bin/bash
# Simple management tool for VirtualBox machines
# Created by Yevgeniy Goncharov / https://sys-adm.in
# ---------------------------------------------------------- VARIABLES #
PATH=$PATH:/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
SCRIPTPATH=$(cd `dirname "${BASH_SOURCE[0]}"` && pwd)
SCRIPTNAME=`basename "$0"`
# ---------------------------------------------------------- Colorize #
Info() {
printf "\033[1;32m$@\033[0m\n"
}
Warning() {
printf "\033[1;35m$@\033[0m\n"
}
Error() {
printf "\033[1;31m$@\033[0m\n"
}
End() {
echo -e ""
}
# ---------------------------------------------------------- FUNCTIONS #
function find-vbox(){
# Check if VirtualBox installed
if [[ -f /usr/bin/VBoxManage ]]; then
Info "\nVirtualBox found!\n"
fi
}
function usage() {
Info "\nUsage"
echo -e "" \
"$SCRIPTNAME -show | --show-vm\n" \
"$SCRIPTNAME -status | --show-vm-status\n" \
"$SCRIPTNAME -running | --show-vm-running\n" \
"$SCRIPTNAME -start | --start-vm [vmname]\n" \
"$SCRIPTNAME -start-hidden | --start-vm-hidden [vmname]\n" \
"$SCRIPTNAME -start-simple | --start-vm-simple-gui [vmname]\n" \
"$SCRIPTNAME -pause | --pause-vm [vmname]\n" \
"$SCRIPTNAME -resume | --resume-vm [vmname]\n" \
"$SCRIPTNAME -reset | --reset-vm [vmname]\n" \
"$SCRIPTNAME -off | --poweroff-vm [vmname]\n" \
"$SCRIPTNAME -save | --save-vm [vmname]\n" \
"$SCRIPTNAME -i | --install\n" \
"$SCRIPTNAME -u | --uninstall\n"
}
# VM Statuses
function show-vm(){
VBoxManage list vms | sed "s/\"\(.*\)\".*/\1/"
End
}
function show-vm-status(){
VBoxManage list vms -l | grep -e ^Name: -e ^State | sed "s/Name:[ ]*\(.*\)/\1 \//;s/State:[\ ]*//" | paste -d " " - -
}
function show-vm-running(){
VBoxManage list runningvms
}
# VM Operations
#
function start-vm(){
if [[ -z "$1" ]]; then
Warning "Please determine [vmname]"
else
VBoxManage startvm $1
fi
}
function start-vm-hidden(){
if [[ -z "$1" ]]; then
Warning "Please determine [vmname]"
else
VBoxManage startvm $1 --type headless
fi
}
function start-vm-simple-gui(){
if [[ -z "$1" ]]; then
Warning "Please determine [vmname]"
else
VBoxManage startvm $1 --type sdl
fi
}
function pause-vm(){
if [[ -z "$1" ]]; then
Warning "Please determine [vmname]"
else
VBoxManage controlvm $1 pause
fi
}
function resume-vm(){
if [[ -z "$1" ]]; then
Warning "Please determine [vmname]"
else
VBoxManage controlvm $1 resume
fi
}
function reset-vm(){
if [[ -z "$1" ]]; then
Warning "Please determine [vmname]"
else
VBoxManage controlvm $1 reset
fi
}
function poweroff-vm(){
if [[ -z "$1" ]]; then
Warning "Please determine [vmname]"
else
VBoxManage controlvm $1 poweroff
fi
}
function save-vm(){
if [[ -z "$1" ]]; then
Warning "Please determine [vmname]"
else
VBoxManage controlvm $1 savestate
fi
}
# Install
#
function install(){
if [ "$(id -u)" != "0" ]; then
Error "This script must be run as root" 1>&2
exit 1
else
if [[ -f /usr/bin/$SCRIPTNAME ]]; then
Warning "Script already installed. Uninstall first."
else
cp $SCRIPTPATH/$SCRIPTNAME /usr/bin/
Info "Script installed to folder /usr/bin/$SCRIPTNAME"
fi
fi
}
# Uninstall
#
function uninstall(){
if [ "$(id -u)" != "0" ]; then
Error "This script must be run as root" 1>&2
exit 1
else
if [[ -f /usr/bin/$SCRIPTNAME ]]; then
rm /usr/bin/$SCRIPTNAME
Info "Script removed from folder /usr/bin/$SCRIPTNAME. Done!"
else
Warning "Script not installed!"
fi
fi
}
# ---------------------------------------------------------- CHECK ARGS #
if [[ -z $1 ]]; then
find-vbox
usage
End
exit
fi
# ---------------------------------------------------------- ARGS #
while [ "$1" != "" ]; do
case $1 in
-show | --show-vm ) shift
show-vm
;;
-status | --show-vm-status ) shift
show-vm-status
;;
-running | --show-vm-running ) shift
show-vm-running
;;
-start | --start-vm ) shift
start-vm $1
;;
-start-hidden | --start-vm-hidden ) shift
start-vm-hidden $1
;;
-start-simple | --start-vm-simple-gui ) shift
start-vm-simple-gui $1
;;
-pause | --pause-vm ) shift
pause-vm $1
;;
-resume | --resume-vm ) shift
resume-vm $1
;;
-reset | --reset-vm ) shift
reset-vm $1
;;
-off | --poweroff-vm ) shift
poweroff-vm $1
;;
-save | --save-vm ) shift
save-vm $1
;;
-i | --install ) shift
install
;;
-u | --uninstall ) shift
uninstall
;;
-h | --help ) usage
exit
;;
* ) usage
exit 1
esac
shift
done | true |
a014799c7bf4eee78943724f83b6f43d7089c407 | Shell | binghuonayimiao/Dotfiles-5 | /install.sh | UTF-8 | 8,966 | 4.4375 | 4 | [] | no_license | #!/bin/bash
# DO NOT EXECUTE THIS FILE OUTSIDE OF THE CLONED REPO AND BE SURE THE CLONED REPO IS IN YOUR $HOME DIRECTORY
if command -v zsh &> /dev/null && command -v git &> /dev/null && command -v wget &> /dev/null; then
echo -e "ZSH and Git are already installed\n"
else
if sudo apt install -y zsh git wget || sudo pacman -S zsh git wget || sudo dnf install -y zsh git wget || sudo yum install -y zsh git wget || sudo brew install git zsh wget || pkg install git zsh wget ; then
echo -e "zsh wget and git Installed\n"
else
echo -e "Please install the following packages first, then try again: zsh git wget \n" && exit
fi
fi
CLONED_REPO_DIR=$(pwd)
if [ -d $CLONED_REPO_DIR ]; then
echo -e "Moving to parent directory...\n"
cd ..
echo -e "We are in $PWD"
# check if cloned repo is located in $HOME directory
if [ -d $HOME ]; then
echo -e "Repo appears to be located in users home directory, continuing...\n"
else
echo -e "REPO IS NOT IN YOUR HOME DIRECTORY\nPLEASE MOVE REPO TO $HOME BEFORE RUNNING THE INSTALL SCRIPT!\n"
echo -e "Use:\nmv -drf $CLONED_REPO_DIR $HOME\nTo move the git repo directory to your home directory, then try running this again." && exit
fi
else
echo -e "Something went wrong, stopping...\n"
exit
fi
if mv -n ~/.zshrc ~/.zshrc-backup-$(date +"%Y-%m-%d"); then # backup .zshrc
echo -e "Backed up the current .zshrc to .zshrc-backup-date\n"
fi
# Create user's ~/.config/zsh directory
if [ -d ~/.config/zsh ]; then
echo -e "~/.config/zsh directory already exists.\n"
else
mkdir -p ~/.config/zsh
fi
Z_DOT_DIR=~/.config/zsh
# HOMEBREW/LINUXBREW INSTALL
if [ -d /home/linuxbrew ]; then
echo -e "Homebrew is already installed.\n"
else
echo -e "Homebrew not installed. Installing Homebrew...\n"
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
fi
# MINICONDA INSTALL
if [ -d ~/miniconda3 ]; then
echo -e "Miniconda3 is already installed.\n"
else
if [[ -e Miniconda3-latest-Linux-x86_64.sh ]]; then
echo -e "Miniconda3 is not installed but already downloaded.\n"
echo -e "Starting Miniconda3 setup...\n"
bash ~/Miniconda3-latest-Linux-x86_64.sh
else
echo -e "Miniconda3 is not installed, downloading...\n"
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh
echo -e "Starting Miniconda3 setup...\n"
bash ~/Miniconda3-latest-Linux-x86_64.sh
fi
fi
if [[ -f ~/Miniconda3-latest-Linux-x86_64.sh && -d ~/miniconda3 ]]; then
echo -e "Removing Miniconda3 install file...\n"
rm ~/Miniconda3-latest-Linux-x86_64.sh
fi
# OMZ INSTALL
echo -e "Installing oh-my-zsh\n"
if [ -d ~/.oh-my-zsh ]; then
echo -e "oh-my-zsh is already installed in home directory, moving to new ~/.config/zsh directory...\n"
mv ~/.oh-my-zsh $Z_DOT_DIR
cd $Z_DOT_DIR/.oh-my-zsh && git pull
else
if [ -d $Z_DOT_DIR/.oh-my-zsh ]; then
echo -e "oh-my-zsh is already installed in $Z_DOT_DIR.\n"
cd $Z_DOT_DIR/.oh-my-zsh && git pull
else
echo -e "oh-my-zsh is not installed in $Z_DOT_DIR. Installing...\n"
git clone --depth=1 git://github.com/robbyrussell/oh-my-zsh.git $Z_DOT_DIR/.oh-my-zsh
fi
fi
#if [ -f ~/.zshrc ]; then
# cp -f .zshrc ~/
#fi
# INSTALL FONTS
echo -e "Installing Nerd Fonts version of Hack, Roboto Mono, DejaVu Sans Mono, Source Code Pro\n"
if [ -f ~/.fonts/DejaVu\ Sans\ Mono\ Nerd\ Font\ Complete.ttf ]; then
echo -e "DejaVu Sans Mono Nerd Font already installed.\n"
else
echo -e "Installing Nerd Fonts version of DejaVu Sans Mono\n"
wget -q --show-progress -N https://github.com/ryanoasis/nerd-fonts/raw/master/patched-fonts/DejaVuSansMono/Regular/complete/DejaVu%20Sans%20Mono%20Nerd%20Font%20Complete.ttf -P ~/.fonts/
fi
if [ -f ~/.fonts/Roboto\ Mono\ Nerd\ Font\ Complete.ttf ]; then
echo -e "Roboto Mono Nerd Font already installed.\n"
else
echo -e "Installing Nerd Fonts version of Roboto Mono\n"
wget -q --show-progress -N https://github.com/ryanoasis/nerd-fonts/raw/master/patched-fonts/RobotoMono/Regular/complete/Roboto%20Mono%20Nerd%20Font%20Complete.ttf -P ~/.fonts/
fi
if [ -f ~/.fonts/Hack\ Regular\ Nerd\ Font\ Complete.ttf ]; then
echo -e "Hack Nerd Font already installed.\n"
else
echo -e "Installing Nerd Fonts version of Hack\n"
wget -q --show-progress -N https://github.com/ryanoasis/nerd-fonts/raw/master/patched-fonts/Hack/Regular/complete/Hack%20Regular%20Nerd%20Font%20Complete.ttf -P ~/.fonts/
fi
if [ -f ~/.fonts/Sauce\ Code\ Pro\ Nerd\ Font\ Complete.ttf ]; then
echo -e "Sauce Code Pro Nerd Font already installed.\n"
else
echo -e "Installing Nerd Fonts version of Source Code Pro\n"
wget -q --show-progress -N https://github.com/ryanoasis/nerd-fonts/raw/master/patched-fonts/SourceCodePro/Regular/complete/Sauce%20Code%20Pro%20Nerd%20Font%20Complete.ttf -P ~/.fonts/
fi
fc-cache -fv ~/.fonts
# OMZ PLUGINS INSTALL
if [ -d $Z_DOT_DIR/.oh-my-zsh/custom/plugins/zsh-autosuggestions ]; then
cd $Z_DOT_DIR/.oh-my-zsh/custom/plugins/zsh-autosuggestions && git pull
else
git clone --depth=1 https://github.com/zsh-users/zsh-autosuggestions $Z_DOT_DIR/.oh-my-zsh/custom/plugins/zsh-autosuggestions
fi
if [ -d $Z_DOT_DIR/.oh-my-zsh/custom/plugins/conda-zsh-completion ]; then
cd $Z_DOT_DIR/.oh-my-zsh/custom/plugins/conda-zsh-completion && git pull
else
git clone --depth=1 https://github.com/esc/conda-zsh-completion $Z_DOT_DIR/.oh-my-zsh/custom/plugins/conda-zsh-completion
fi
if [ -d $Z_DOT_DIR/.oh-my-zsh/custom/plugins/zsh-syntax-highlighting ]; then
cd $Z_DOT_DIR/.oh-my-zsh/custom/plugins/zsh-syntax-highlighting && git pull
else
git clone --depth=1 https://github.com/zsh-users/zsh-syntax-highlighting.git $Z_DOT_DIR/.oh-my-zsh/custom/plugins/zsh-syntax-highlighting
fi
if [ -d $Z_DOT_DIR/.oh-my-zsh/custom/plugins/zsh-completions ]; then
cd $Z_DOT_DIR/.oh-my-zsh/custom/plugins/zsh-completions && git pull
else
git clone --depth=1 https://github.com/zsh-users/zsh-completions $Z_DOT_DIR/.oh-my-zsh/custom/plugins/zsh-completions
fi
if [ -d $Z_DOT_DIR/.oh-my-zsh/custom/plugins/zsh-history-substring-search ]; then
cd $Z_DOT_DIR/.oh-my-zsh/custom/plugins/zsh-history-substring-search && git pull
else
git clone --depth=1 https://github.com/zsh-users/zsh-history-substring-search $Z_DOT_DIR/.oh-my-zsh/custom/plugins/zsh-history-substring-search
fi
# INSTALL POWERLEVEL10K THEME
if [ -d $Z_DOT_DIR/.oh-my-zsh/custom/themes/powerlevel10k ]; then
cd $Z_DOT_DIR/.oh-my-zsh/custom/themes/powerlevel10k && git pull
else
git clone --depth=1 https://github.com/romkatv/powerlevel10k.git $Z_DOT_DIR/.oh-my-zsh/custom/themes/powerlevel10k
fi
cd $HOME
# copying files from repo to user's home and ~/.config/zsh directories
# files going into $HOME
#if [ -d ~/.bashrc ]; then
# echo -e ".bashrc already exists, making backup in current directory...\n"
# mv ~/.bashrc ~/.bashrc_pre_dotfiles
# cp -f $CLONED_REPO_DIR/.bashrc .
#else
# cp -f $CLONED_REPO_DIR/.bashrc .
#fi
#if [ -d ~/.profile ]; then
# echo -e ".profile already exists, making backup in current directory...\n"
# mv ~/.profile ~/.profile_pre_dotfiles
# cp -f $CLONED_REPO_DIR/.profile .
#else
# cp -f $CLONED_REPO_DIR/.profile .
#fi
# files going into $HOME/.config/zsh
if [ -d ~/.p10k.zsh ]; then
echo -e ".p10k.zsh already exists, making backup in current directory...\n"
mv ~/.p10k.zsh ~/.p10k.zsh_pre_dotfiles
cp -f $CLONED_REPO_DIR/.p10k.zsh $Z_DOT_DIR
else
cp -f $CLONED_REPO_DIR/.p10k.zsh $Z_DOT_DIR
fi
cp -f $CLONED_REPO_DIR/.conda_setup $Z_DOT_DIR
if [ -d ~/.zshrc ]; then
echo -e ".zshrc already exists, making backup in current directory...\n"
mv ~/.zshrc ~/.zshrc_pre_dotfiles
cp -f $CLONED_REPO_DIR/.zshrc $Z_DOT_DIR
else
cp -f $CLONED_REPO_DIR/.zshrc $Z_DOT_DIR
fi
if [ -d ~/.zshenv ]; then
echo -e ".zshenv already exists, making backup in current directory...\n"
mv ~/.zshenv ~/.zshenv_pre_dotfiles
cp -f $CLONED_REPO_DIR/.zshenv $Z_DOT_DIR
else
cp -f $CLONED_REPO_DIR/.zshenv $Z_DOT_DIR
fi
if [ -d ~/.zsh_aliases ]; then
echo -e ".zsh_aliases already exists, making backup in current directory...\n"
mv ~/.zsh_aliases ~/.zsh_aliases_pre_dotfiles
cp -f $CLONED_REPO_DIR/.zsh_aliases $Z_DOT_DIR
else
cp -f $CLONED_REPO_DIR/.zsh_aliases $Z_DOT_DIR
fi
echo -e "Finished transferring repo files into new .config/zsh directory.\n"
echo -e "Creating symlink for .zshenv in home directory so ZDOTDIR variable will be set on shell startup.\n"
ln -srf $Z_DOT_DIR/.zshenv ~/.
# source ~/.zshrc
echo -e "\nSudo access is needed to change default shell\n"
if chsh -s $(which zsh) && /bin/zsh -i -c upgrade_oh_my_zsh; then
echo -e "Installation Successful, exit terminal and enter a new session"
else
echo -e "Something is wrong"
fi
exit
| true |
40733076ea2114cae1f6b671330b9749ee214667 | Shell | gaochangshui-trec/SoftChouhyou_CGI | /CGI/CHOUHYOU_INITLOAD_KENSAKU_KIKAN_KISETU.SEARCH | UTF-8 | 3,062 | 3.140625 | 3 | [] | no_license | #!/bin/bash -xv
#
# CHOUHYOU_INITLOAD_KENSAKU_KIKAN_KISETU.SEARCH >>> 帳票区分
# Usage : CHOUHYOU_INITLOAD_KENSAKU_KIKAN_KISETU.SEARCH
#
# Written by Li.dan(TRE・CHINA) / Date : 31 July. 2019
#/////////////////////////////////////////////////////////////////////////
# 初期設定
#/////////////////////////////////////////////////////////////////////////
# 環境変数設定
export PATH=/home/SMART:/home/SMART_TRIAL:/usr/local/bin:${PATH}
export LANG=ja_JP.UTF-8
## ログディレクトリの定義
cgishld=/home/trial/AP/SOFT_CHOUHYOU
logd=${cgishld}/LOG
##走行ログの記録
echo "${logd}/LOG.$(basename $0).$(date +%Y%m%d)_$(date +%H%M%S)_$$" &>/dev/null
exec 2>${logd}/LOG.$(basename $0).$(date +%Y%m%d)_$(date +%H%M%S)_$$
# ディレクトリ定義
tmp=/tmp/$$-$(basename $0)_$(date +%Y%m%d%H%M%S)
lv4d=/home/trial/APDATA/SOFT_CHOUHYOU/POMPA
tbld=/SKCWEB/TBL
dir=/home/trial
lockd=${dir}/APDATA/SOFT_CHOUHYOU/LOCK
# 変数チェック
[ $# -ne 3 ] && exit 1
chouhyoutype=$1
kikan=$2
kisetutype=$3
ERROR_EXIT(){
exit 1
}
year=$(date +%Y)
if [ ${chouhyoutype} == "KISETU" ];then
kuniq -k2 ${lv4d}/KISETU/MASTER/COMMON/MST_KISETU_UREZAIKO_KIKAN |
hejoin -k2 - ${lv4d}/${chouhyoutype}/MASTER/COMMON/MST_KIKAN |
# 1:期間ID 2:期間名 3:年 4:季節タイプ
ssort -k1 |
tail -n1 > $tmp-maxidfile
[ $(errchk ${PIPESTATUS[@]}) -ne 0 ] && ERROR_EXIT
maxid=$(selcol -c1 $tmp-maxidfile)
else
cat ${lv4d}/${chouhyoutype}/MASTER/COMMON/MST_KIKAN |
# 1:期間ID 2:期間名 3:年 4:季節タイプ
awk '$3<="'${year}'"' |
ssort -k1 |
tail -n1 > $tmp-maxidfile
[ $(errchk ${PIPESTATUS[@]}) -ne 0 ] && ERROR_EXIT
maxid=$(selcol -c1 $tmp-maxidfile)
fi
#/////////////////////////////////////////////////////////////////////////
# 処理部分
#/////////////////////////////////////////////////////////////////////////
cat ${lv4d}/${chouhyoutype}/MASTER/COMMON/MST_KIKAN |
# 1:期間ID 2:期間名 3:年 4:季節タイプ
awk '$1<="'${maxid}'"' |
ssort -k1 |
cat <(echo F1 F2 F3 F4) - >${kikan}
# 1:期間ID 2:期間名 3:年 4:季節タイプ
[ $(errchk ${PIPESTATUS[@]}) -ne 0 ] && ERROR_EXIT
cat ${lv4d}/${chouhyoutype}/MASTER/COMMON/MST_KISETUTYPE |
# 1:季節区分ID 2:季節区分名 3:季節がイプ
ssort -k1 |
cat <(echo F1 F2 F3) - >${kisetutype}
# 1:季節区分ID 2:季節区分名 3:季節がイプ
[ $(errchk ${PIPESTATUS[@]}) -ne 0 ] && ERROR_EXIT
#/////////////////////////////////////////////////////////////////////////
# 終了部分
#/////////////////////////////////////////////////////////////////////////
# 終了
rm -rf $tmp-* &>/dev/null
exit 0
| true |
5bbe3370be650005262b2f0df3d71a75527b73ef | Shell | keystroke3/dotfiles | /.bin/sleep-duration | UTF-8 | 353 | 2.953125 | 3 | [] | no_license | #!/bin/bash
SLEEP=$(journalctl -n2 -u sleep.target | awk -F' ' '/Reached/ {print $3}' | xargs -I '{}' date -d '{}' "+%s")
WAKE=$(journalctl -n2 -u sleep.target | awk -F' ' '/Stopped/ {print $3}' | xargs -I '{}' date -d '{}' "+%s")
DIFF=$(($WAKE-$SLEEP))
((h=${DIFF}/3600))
((m=(${DIFF}%3600)/60))
((s=${DIFF}%60))
printf "%02dh:%02dm:%02ds\n" $h $m $s
| true |
a1f311fffc971ef89bf76f4944c784cb170f5745 | Shell | Rajeshnalla21/CodinClub | /PatternAssignment/pinCodePattern.sh | UTF-8 | 214 | 3.078125 | 3 | [] | no_license | #!/bin/bash -x
read -p "enter a pincode " pin
#pat="^[1-9]{1}[0-9]{2}[ ]{0,1}[0-9]{3}$"
#pat="^[1-9]{1}[0-9]{2}[ ]?[0-9]{3}$"
pat="^[4000]{1}[0-9]{2}"
if [[ $pin =~ $pat ]]
then
echo valid
else
echo invalid
fi
| true |
8b362c1f3af56dba2e163a5218d7666ea4524270 | Shell | jenkins-infra/update-center2 | /site/generate-htaccess.sh | UTF-8 | 4,740 | 3.734375 | 4 | [] | no_license | #!/bin/bash
USAGE="Usage: $0 [<release> ...]
"
[[ $# -gt 0 ]] || { echo "${USAGE}Expected at least one argument." >&2 ; exit 1 ; }
set -o pipefail
set -o errexit
set -o nounset
cat <<EOF
# THIS FILE IS GENERATED
#
# See: <https://github.com/jenkins-infra/update-center2/blob/master/site/generate-htaccess.sh>
RewriteEngine on
# https://github.com/jenkinsci/jenkins/blob/56b6623915c50b4a2ef994a143a8fe0829587f3c/core/src/main/java/hudson/model/UpdateCenter.java#L1182-L1207
RewriteCond %{QUERY_STRING} ^.*uctest$
RewriteRule ^(|.+/)(update\-center.*\.(json|html)+) /uctest.json [NC,L]
EOF
echo "# Version-specific rulesets generated by generate.sh"
n=$#
versions=( "$@" )
newestStable=
oldestStable=
oldestWeekly=
for (( i = n-1 ; i >= 0 ; i-- )) ; do
version="${versions[i]}"
IFS=. read -ra versionPieces <<< "$version"
major=${versionPieces[0]}
minor=${versionPieces[1]}
patch=
if [[ ${#versionPieces[@]} -gt 2 ]] ; then
patch=${versionPieces[2]}
fi
if [[ "$version" =~ ^2[.][0-9]+[.][0-9]$ ]] ; then
# This is an LTS version
if [[ -z "$newestStable" ]] ; then
newestStable="$version"
fi
cat <<EOF
# If major > ${major} or major = ${major} and minor > ${minor} or major = ${major} and minor = ${minor} and patch >= ${patch}, use this LTS update site
RewriteCond %{QUERY_STRING} ^.*version=(\d)\.(\d+)\.(\d+)(|[-].*)$ [NC]
RewriteCond %1 >${major}
RewriteRule ^(update\-center.*\.(json|html)+) /dynamic-stable-${major}\.${minor}\.${patch}%{REQUEST_URI}? [NC,L,R]
RewriteCond %{QUERY_STRING} ^.*version=(\d)\.(\d+)\.(\d+)(|[-].*)$ [NC]
RewriteCond %1 =${major}
RewriteCond %2 >${minor}
RewriteRule ^(update\-center.*\.(json|html)+) /dynamic-stable-${major}\.${minor}\.${patch}%{REQUEST_URI}? [NC,L,R]
RewriteCond %{QUERY_STRING} ^.*version=(\d)\.(\d+)\.(\d+)(|[-].*)$ [NC]
RewriteCond %1 =${major}
RewriteCond %2 =${minor}
RewriteCond %3 >=${patch}
RewriteRule ^(update\-center.*\.(json|html)+) /dynamic-stable-${major}\.${minor}\.${patch}%{REQUEST_URI}? [NC,L,R]
EOF
oldestStable="$version"
else
# This is a weekly version
# Split our version up into an array for rewriting
# 1.651 becomes (1 651)
oldestWeekly="$version"
cat <<EOF
# If major > ${major} or major = ${major} and minor >= ${minor}, use this weekly update site
RewriteCond %{QUERY_STRING} ^.*version=(\d)\.(\d+)(|[-].*)$ [NC]
RewriteCond %1 >${major}
RewriteRule ^(update\-center.*\.(json|html)+) /dynamic-${major}\.${minor}%{REQUEST_URI}? [NC,L,R]
RewriteCond %{QUERY_STRING} ^.*version=(\d)\.(\d+)(|[-].*)$ [NC]
RewriteCond %1 =${major}
RewriteCond %2 >=${minor}
RewriteRule ^(update\-center.*\.(json|html)+) /dynamic-${major}\.${minor}%{REQUEST_URI}? [NC,L,R]
EOF
fi
done
cat <<EOF
# First LTS update site (stable-$oldestStable) gets all older LTS releases
RewriteCond %{QUERY_STRING} ^.*version=\d\.(\d+)\.\d+(|[-].*)$ [NC]
RewriteRule ^(update\-center.*\.(json|html)+) /dynamic-stable-${oldestStable}%{REQUEST_URI}? [NC,L,R]
RewriteCond %{QUERY_STRING} ^.*version=\d\.(\d+)+(|[-].*)$ [NC]
RewriteRule ^(update\-center.*\.(json|html)+) /dynamic-${oldestWeekly}%{REQUEST_URI}? [NC,L,R]
EOF
echo "# Add a RewriteRule for /stable which will always rewrite to the last LTS site we have"
cat <<EOF
RewriteRule ^stable/(.+) "/dynamic-stable-${newestStable}/\$1" [NC,L,R]
EOF
# Further static rules
cat <<EOF
# These are static rules
# If that all failed, but we have an update center, let's go to current
RewriteRule ^update\-center.*\.(json|html)+ /current%{REQUEST_URI}? [NC,L,R=301]
RewriteRule ^latestCore\.txt /current%{REQUEST_URI}? [NC,L,R=301]
# Ensure /release-history.json goes to the right place
RewriteRule ^release\-history\.json$ /current%{REQUEST_URI}? [NC,L,R=301]
# Ensure /plugin-documentation-urls.json goes to the right place
RewriteRule ^plugin\-documentation\-urls\.json$ /current%{REQUEST_URI}? [NC,L,R=301]
# Ensure /plugin-versions.json goes to the right place
RewriteRule ^plugin\-versions\.json$ /current%{REQUEST_URI}? [NC,L,R=301]
DirectoryIndex index.html
# download/* directories contain virtual URL spaces for redirecting download traffic to mirrors.
# 'latest' need special handling here since they're not getting mirrored properly to get.jenkins.io
RedirectMatch 302 /download/war/latest/jenkins[.]war$ https://updates.jenkins.io/latest/jenkins.war
RedirectMatch 302 /download/plugins/(.*)/latest/(.+)[.]hpi$ https://updates.jenkins.io/latest/\$2.hpi
RedirectMatch 302 /download/war/([0-9]+[.][0-9]+[.][0-9]+/jenkins)[.]war$ https://get.jenkins.io/war-stable/\$1.war
RedirectMatch 302 /download/war/(.+)[.]war$ https://get.jenkins.io/war/\$1.war
RedirectMatch 302 /download/plugins/(.+)[.]hpi$ https://get.jenkins.io/plugins/\$1.hpi
EOF
| true |
54a6afc3e907e83a9db94bf10e4d18e1efa63ed6 | Shell | Ascend/ModelZoo-PyTorch | /PyTorch/contrib/audio/speech-transformer/test/utils/build_kenlm_model_from_arpa.sh | UTF-8 | 1,170 | 3.953125 | 4 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] | permissive | #!/usr/bin/env bash
# 2020 author Jiayu DU
# Apache 2.0
# This script reads in an Arpa format language model, and converts it into the
# KenLM format language model.
[ -f path.sh ] && . ./path.sh;
# begin configuration section
kenlm_opts="" # e.g. "-q 8 -b 8" for 8bits quantization
model_type="trie" # "trie" or "probing". trie is smaller, probing is faster.
# end configuration section
. utils/parse_options.sh
if [ $# != 2 ]; then
echo "Usage: "
echo " $0 [options] <arpa-lm-path> <kenlm-path>"
echo "e.g.:"
echo " $0 data/local/lm/4gram.arpa data/lang_test/G.trie"
echo "Options:"
echo " --model-type can be either \"trie\" or \"probing\""
echo " --kenlm-opts directly pass through to kenlm"
echo " e.g. for 8bits quantization, feed \"-q 8 -b 8\""
exit 1;
fi
export LC_ALL=C
arpa_lm=$1
kenlm=$2
if ! which build_binary >& /dev/null ; then
echo "$0: cannot find KenLM's build_binary tool,"
echo "check kenlm installation (tools/extras/install_kenlm_query_only.sh)."
exit 1
fi
mkdir -p $(dirname $kenlm)
build_binary $kenlm_opts $model_type $arpa_lm $kenlm
echo "$0: Successfully built arpa into kenlm format: $kenlm"
exit 0
| true |
ae07dce8b08c4cea470971c3452c878dba230a6d | Shell | deemel/code-snippets | /zookeeper-dynamic/zookeeper_svr/src/main/bin/zk-tunnels-upgrade.sh | UTF-8 | 3,903 | 3.90625 | 4 | [] | no_license | #!/bin/bash
# ======================================================================
# Shell variables originating from Maven properties.
# ======================================================================
RC_SUCCESS=${rc.success}
RC_FAILURE=${rc.failure}
USER_ROOT="${user.root}"
DIR_MFDD_BIN="${dir.mfdd.bin}"
DIR_TUNNELS="${dir.tunnels}"
# ======================================================================
# Constants.
# ======================================================================
MIN_ARGS=1
MAX_ARGS=1
PHASE_PRE="pre"
PHASE_POST="post"
CMD="$(basename $0)"
USER=$(whoami)
HOSTNAME=$(hostname -s)
DIR_OPT_TUNNELS="/opt/tunnels"
CMD_CLI="$DIR_MFDD_BIN/cli -c"
SERVICE_ZK_SSH_TUNNEL_PREFIX="zks-"
SERVICE_PROFILE_ZK_SSH_TUNNELS="ZKSSH-SERVER"
CLI_UPDATE_SERVICEPROFILE_OUTPUT_CANNOT_DEALLOCATE="Cannot de-allocate profile as one of the service in profile is currently running"
# ======================================================================
function displayUsage() {
local func="${FUNCNAME[0]}"
echo
echo "This script is intended to be used when upgrading a non-dynamic ZK"
echo "server to a dynamic ZK server (e.g., from ZK in MFDD 2.63 to ZK in MFDD"
echo "3.11.0)."
echo
echo "The \"$PHASE_PRE\" option is intended to ensure SM-managed ZK SSH"
echo "tunnel services continue to work during upgrade. It should be called"
echo "before upgrade."
echo
echo "The \"$PHASE_POST\" option is intended to remove SM-managed ZK SSH"
echo "tunnel services. It should be called after upgrade."
echo
echo "USAGE: $CMD {$PHASE_PRE|$PHASE_POST}"
echo
exit $RC_FAILURE
}
function phasePre() {
local func="${FUNCNAME[0]}"
local rc=$RC_FAILURE
if [ -d "$DIR_OPT_TUNNELS" ]
then
echo
echo "[$func] Directory <$DIR_OPT_TUNNELS> exists; returning <$rc>."
return $rc
fi
ln -sfv $DIR_TUNNELS $DIR_OPT_TUNNELS
rc=$?
return $rc
}
function phasePost() {
local func="${FUNCNAME[0]}"
local rc=$RC_FAILURE
local cmd=""
local output=""
#
# Remove /opt/tunnels link BEFORE stopping & disabling ZK SSH tunnel services
# via CLI because underlying tunnels need to remain running afterwards.
#
rm -fv $DIR_OPT_TUNNELS
cmd="$CMD_CLI \"show-service\""
output=$(eval "$cmd")
rc=$?
echo
echo -e "[$func] Command <$cmd>, rc <$rc>, output <\n$output>."
if [ $rc -ne $RC_SUCCESS ]
then
echo
echo "[$func] Command <$cmd> failed with return code <$rc>; returning <$rc>."
return $rc
fi
output=$(echo "$output" | grep "$SERVICE_ZK_SSH_TUNNEL_PREFIX" | sort)
rc=$?
echo
echo -e "[$func] According to CLI, ZK SSH tunnel services <\n$output>."
if [ $rc -ne $RC_SUCCESS ]
then
echo
echo "[$func] No ZK SSH tunnel services found; returning <$rc>."
return $rc
fi
if [ ! -z "$output" ]
then
for i in $output
do
echo
$CMD_CLI "stop-service --node=$HOSTNAME --service=$i"
echo
$CMD_CLI "update-service --node=$HOSTNAME --service=$i --disable"
done
fi
echo
$CMD_CLI "update-serviceprofile --node=$HOSTNAME --serviceprofile=$SERVICE_PROFILE_ZK_SSH_TUNNELS --deallocate"
echo
$CMD_CLI "show-service-status --node=$HOSTNAME"
echo
$CMD_CLI "show-serviceprofile --node=$HOSTNAME"
rc=$?
return $rc
}
# ======================================================================
# Main.
# ======================================================================
if [ "$USER" != "$USER_ROOT" ]
then
echo "[$CMD] User <$USER> is not <$USER_ROOT>; exiting <$RC_FAILURE>."
exit $RC_FAILURE
fi
if [ $# -lt $MIN_ARGS ]
then
displayUsage
fi
if [ $# -gt $MAX_ARGS ]
then
displayUsage
fi
phase="$1"
case "$phase" in
"$PHASE_PRE")
phasePre
rc=$?
;;
"$PHASE_POST")
phasePost
rc=$?
;;
*)
rc=$RC_FAILURE
echo "[$CMD] Unsupported phase <$phase>; exiting <$rc>."
esac
exit $rc
| true |
9b44c74b51439eb772306f1f42989cc2b653c8e0 | Shell | dozent2018/IFA_LINUX_DEV | /Testpruefungen/test-1/musterloesungen/aufgabe-06.sh | UTF-8 | 3,198 | 4.21875 | 4 | [] | no_license | #!/bin/bash
# Name: aufgabe-6.sh
# Themen: Test von Dateiattributen, Parameter im Script auswerten, Ausgabeumlenkung
# Aufgabe: Dateiattribute von Verzeichnissen prüfen
# Beispiel-Aufruf: $ bash aufgabe-6.sh $HOME / /etc /tmp /foobar
# /Users/jochenreinholdt: create file(s) permitted
# /: create file(s) not permitted. ls permitted.
# /etc: create file(s) not permitted. ls permitted.
# /tmp: create file(s) permitted. ls permitted.
# /geheim: create file(s) not permitted. ls not permitted.
# /foobar: directory not found
# $
#
# Hauptfunktionen: Prüft für beliebig viele übergebene Kommandozeilenargumente, ob
# a) ob überhaupt ein Verzeichnis dieses Namens existiert
# b) ob Sie den Inhalt des Verzeichnisses mit ls auflisten dürfen
# c) ob Sie in dem Verzeichnis Dateien oder Verzeichnisse anlegen dürfen
# Beispiel:
# $ bash aufgabe-6.sh / /foobar /tmp $HOME/geheim
# /: create file(s) not permitted. ls permitted.
# /foobar: directory not found
# /tmp: create file(s) permitted. ls permitted.
# /home/user1/geheim: create file(s) not permitted. ls not permitted.
# $
#
# Fehlerbehandlung: d) "usage: aufgabe-6.sh <directory> <directory> ..." wird
# auf die Standard-Fehlerausgabe ausgegeben, wenn keine Argumente angegeben wurden.
# Diese Meldung soll auch noch stimmen, wenn Sie dieses Script umbennenen
# und nicht den Pfad enthalten, auch wenn sie es mit /home/bin/aufgabe-6.sh
# oder mit ./aufgabe-6.sh aufrufen. Anschliessend wird das Script mit dem
# Rückgabewert 1 beendet.
# e) "/foobar: directory not found" wird
# auf die Standard-Fehlerausgabe ausgegeben, wenn eines der Argumente
# (in diesem Fall /foobar) kein Verzeichnis ist.
# Der Shell wird in diesem Fall am Ende der Wert 2 zurückgegeben,
# aber das Script läuft bis zu Ende.
#
# Einschränkungen: 1. Benennen sie diese Datei nicht um.
# 2. Verwenden Sie keine andere Shell als /bin/bash
# ******************** Ihre Lösung: ********************
if [ $# -lt 1 ];then
echo "usage: $(basename $0) <directory> <directory> ..." 1>&2
exit 1
fi
exit_code=0
# Durchlaufen aller Argumente auf der Kommandozeile
for name in "$@"
do
if ! [ -d $name ];then
echo "${name}: directory not found" 1>&2
exit_code=2
else
outstring=${name}:
if [ -w $name ];then
outstring="${outstring} create file(s) permitted."
else
outstring="${outstring} create file(s) not permitted."
fi
if [ -x $name ];then
outstring="${outstring} ls permitted."
else
outstring="${outstring} ls not permitted."
fi
echo $outstring
fi
done
exit $exit_code
| true |
1ac9c72d50939e527acf4dde7e5ff4865b512e65 | Shell | preym17/csit | /tests/dpdk/dpdk_scripts/patch_l3fwd.sh | UTF-8 | 817 | 3.546875 | 4 | [
"CC-BY-4.0",
"Apache-2.0",
"LicenseRef-scancode-dco-1.1"
] | permissive | #!/bin/bash
set -x
# Setting variables
# set arch, default to x86_64 if none given
ARCH=${1:-"x86_64"}
PATCH=$2
# dpdk prefers "arm64" to "aarch64" and does not allow arm64 native target
if [ $ARCH == "aarch64" ]; then
ARCH="arm64"
MACHINE="armv8a"
else
MACHINE="native"
fi
DPDK_DIR=dpdk
ROOTDIR=/tmp/openvpp-testing
PWDDIR=$(pwd)
# Compile the l3fwd
export RTE_SDK=${ROOTDIR}/${DPDK_DIR}/
export RTE_TARGET=${ARCH}-${MACHINE}-linuxapp-gcc
cd ${RTE_SDK}/examples/l3fwd
sudo sed -i 's/^#define RTE_TEST_RX_DESC_DEFAULT 128/#define RTE_TEST_RX_DESC_DEFAULT 2048/g' ./main.c
sudo sed -i 's/^#define RTE_TEST_TX_DESC_DEFAULT 512/#define RTE_TEST_TX_DESC_DEFAULT 2048/g' ./main.c
chmod +x ${PATCH} && source ${PATCH}
make clean
make -j || \
{ echo "Failed to compile l3fwd"; exit 1; }
cd ${PWDDIR}
| true |
1cbee381cfb529badb78d75c04a82daca6a723ce | Shell | cburns-mirantis/CloudFerry | /devlab/jenkins/nosetests.sh | UTF-8 | 1,120 | 3.25 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
set -e
set -x
export WORKSPACE="${WORKSPACE:-$( cd $( dirname "$0" ) && cd ../../../ && pwd)}"
export CF_DIR="${WORKSPACE}/CloudFerry"
cd ${CF_DIR}/devlab/
cf_hostname=`vagrant status | grep running | grep cloudferry | awk '{print $1}'`
echo "Copy code archive to cloudferry-${cf_hostname} VM ..."
cf_ip=`vagrant ssh-config ${cf_hostname} | grep HostName | awk '{print $2}'`
cf_user=`vagrant ssh-config ${cf_hostname} | grep -w "User" | awk '{print $2}'`
cf_port=`vagrant ssh-config ${cf_hostname} | grep Port | awk '{print $2}'`
cf_id=`vagrant ssh-config ${cf_hostname} | grep IdentityFile | awk '{print $2}'`
cf_ssh_options="-oConnectTimeout=5 -oConnectionAttempts=3 -oStrictHostKeyChecking=no -oCheckHostIP=no"
cf_ssh_cmd="ssh -q ${cf_ssh_options} -i ${cf_id} ${cf_user}@${cf_ip} -p ${cf_port}"
run_nosetests="CloudFerry/devlab/jenkins/cf/run_nosetests.sh"
${cf_ssh_cmd} ${run_nosetests}
xml_src_path="CloudFerry/devlab/tests/nosetests.xml"
xml_dst_path="${CF_DIR}/devlab/tests/nosetests.xml"
scp -q ${cf_ssh_options} -i ${cf_id} -P ${cf_port} \
${cf_user}@${cf_ip}:${xml_src_path} ${xml_dst_path}
| true |
84512b7c91a866719eab8f433f1d17f037e89b4a | Shell | Rayane-Hamani/Master-en-Genie-Logiciel | /Semestre 2/SR2 - Systèmes Répartis 2/flopbox/scripts/rename.sh | UTF-8 | 850 | 2.984375 | 3 | [] | no_license | read -p '> Press enter to register the localhost server.'
echo ''
curl --request PUT 'http://localhost:8080/flopbox/localhost' --header 'host:localhost'
echo ''
read -p '> Press enter to create the file and directory to rename.'
touch file_to_rename
mkdir dir_to_rename
echo ''
read -p '> Press enter to rename the file and directory.'
echo ""
curl --request PATCH 'http://localhost:8080/flopbox/localhost/file_to_rename' --header 'port:2121' --header 'to:file_renamed' --header 'username:anonymous' --header 'password:anonymous'
curl --request PATCH 'http://localhost:8080/flopbox/localhost/dir_to_rename' --header 'port:2121' --header 'to:dir_renamed' --header 'username:anonymous' --header 'password:anonymous'
echo ''
read -p '> Press enter to delete the file and directory and exit the script.'
rm -f file_renamed
rm -rf dir_renamed
| true |
66a5e8fb667db673a8ee517f06c3d67d2f7fbd8a | Shell | Sachinarjun/sachin_repository | /forloopprime.sh | UTF-8 | 385 | 3.734375 | 4 | [] | no_license | #!/bin/bash -x
#storing the number to be checked
read -p "ENTER THE NUMBER: " number
i=2
#flag variable
f=0
#running a loop from 2 to number/2
while test $i -le `expr $number / 2`
do
#checking if i is factor of number
if test `expr $number % $i` -eq 0
then
f=1
fi
#increment the loop variable
i=`expr $i + 1`
done
if test $f -eq 1
then
echo "Not Prime"
else
echo "Prime"
fi
| true |
8ea6b23cf6a1c2acf14b661ba68085120f3698dc | Shell | kmcmanus3/integrated-toolchain | /create-tls.sh | UTF-8 | 5,128 | 3.578125 | 4 | [] | no_license | #!/bin/bash
# COPYRIGHT (C) 2016 CSC, INC. ALL RIGHTS RESERVED. CONFIDENTIAL
# AND PROPRIETARY.
# ALL SOFTWARE, INFORMATION AND ANY OTHER RELATED COMMUNICATIONS (COLLECTIVELY,
# "WORKS") ARE CONFIDENTIAL AND PROPRIETARY INFORMATION THAT ARE THE EXCLUSIVE
# PROPERTY OF CSC. ALL WORKS ARE PROVIDED UNDER THE APPLICABLE
# AGREEMENT OR END USER LICENSE AGREEMENT IN EFFECT BETWEEN YOU AND
# CSC. UNLESS OTHERWISE SPECIFIED IN THE APPLICABLE AGREEMENT, ALL
# WORKS ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND EITHER EXPRESSED OR
# IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE. ALL USE, DISCLOSURE
# AND/OR REPRODUCTION OF WORKS NOT EXPRESSLY AUTHORIZED BY CSC IS
# STRICTLY PROHIBITED.
function usage () {
echo "Usage: $0 --publicip <Jenkins Public IP>"
exit 1
}
if [ $# -eq 2 ] && [ $1 == "--publicip" ]; then
PUBLICIP="$2"
else
usage
fi
TLSHOME="/etc/docker/tls"
BITS=2048
echo " => Ensuring config directory exists..."
if [ ! -d $TLSHOME ]; then
mkdir -p $TLSHOME
fi
cd $TLSHOME
echo " => Verifying ca.srl"
if [ ! -f "ca.srl" ]; then
echo " => Creating ca.srl"
echo 01 > ca.srl
fi
# Check for ca-key.pem file
echo " => Verifying ca-key.pem"
if [ ! -f "ca-key.pem" ]; then
echo " => Generating CA key"
openssl genrsa -out ca-key.pem $BITS
fi
# Check for ca.pem file
echo " => Verifying ca.pem"
if [ ! -f "ca.pem" ]; then
echo " => Generating CA certificate"
openssl req -new -key ca-key.pem -x509 -days 3650 -nodes -subj "/CN=cfrmgr.proserveau.local" -out ca.pem
fi
if [ -f ca.pem ] && [ -f ca-key.pem ]; then
# Create the client certificate
echo " => Generating client key"
openssl genrsa -out client-key.pem $BITS
echo " => Generating client CSR"
openssl req -subj "/CN=docker.client" -new -key client-key.pem -out client.csr
echo " => Creating extended key usage"
echo "extendedKeyUsage = clientAuth" > extfile.cnf
echo " => Signing client CSR with CA"
openssl x509 -req -days 3650 -in client.csr -CA ca.pem -CAkey ca-key.pem -out client-cert.pem -extfile extfile.cnf
# Create the PUBLICIP certificate
echo " => Generating server key for Jenkins on the public IP."
openssl genrsa -out $PUBLICIP-key.pem $BITS
echo " => Generating server CSR"
openssl req -subj "/CN=$PUBLICIP" -new -key $PUBLICIP-key.pem -out $PUBLICIP.csr
echo " => Signing server CSR with CA"
openssl x509 -req -days 3650 -in $PUBLICIP.csr -CA ca.pem -CAkey ca-key.pem -out $PUBLICIP-cert.pem
# Create the Docker Swarn certificate
echo " => Generating Docker Swarm key"
openssl genrsa -out swarm-key.pem $BITS
echo " => Genereating swarm CSR"
openssl req -subj "/CN=swarm.proserveau.local" -new -key swarm-key.pem -out swarm.csr
echo " => Creating extended key usage"
echo "extendedKeyUsage = clientAuth,serverAuth" > extfile.cnf
echo " => Signing swarm CSR with CA"
openssl x509 -req -days 3650 -in swarm.csr -CA ca.pem -CAkey ca-key.pem -out swarm-cert.pem -extfile extfile.cnf
# Create the Docker Engine certificates
echo " => Generating server key for cfgmgr.proserveau.local."
openssl genrsa -out cfgmgr.proserveau.local-key.pem $BITS
echo " => Generating server CSR"
openssl req -subj "/CN=cfgmgr.proserveau.local" -new -key cfgmgr.proserveau.local-key.pem -out cfgmgr.proserveau.local.csr
echo " => Signing server CSR with CA"
openssl x509 -req -days 3650 -in cfgmgr.proserveau.local.csr -CA ca.pem -CAkey ca-key.pem -out cfgmgr.proserveau.local-cert.pem
echo " => Generating server key for docker0.proserveau.local."
openssl genrsa -out docker0.proserveau.local-key.pem $BITS
echo " => Generating server CSR"
openssl req -subj "/CN=docker0.proserveau.local" -new -key docker0.proserveau.local-key.pem -out docker0.proserveau.local.csr
echo " => Signing server CSR with CA"
openssl x509 -req -days 3650 -in docker0.proserveau.local.csr -CA ca.pem -CAkey ca-key.pem -out docker0.proserveau.local-cert.pem
echo " => Generating server key for docker1.proserveau.local."
openssl genrsa -out docker1.proserveau.local-key.pem $BITS
echo " => Generating server CSR"
openssl req -subj "/CN=docker1.proserveau.local" -new -key docker1.proserveau.local-key.pem -out docker1.proserveau.local.csr
echo " => Signing server CSR with CA"
openssl x509 -req -days 3650 -in docker1.proserveau.local.csr -CA ca.pem -CAkey ca-key.pem -out docker1.proserveau.local-cert.pem
echo " => Generating server key for docker2.proserveau.local."
openssl genrsa -out docker2.proserveau.local-key.pem $BITS
echo " => Generating server CSR"
openssl req -subj "/CN=docker2.proserveau.local" -new -key docker2.proserveau.local-key.pem -out docker2.proserveau.local.csr
echo " => Signing server CSR with CA"
openssl x509 -req -days 3650 -in docker2.proserveau.local.csr -CA ca.pem -CAkey ca-key.pem -out docker2.proserveau.local-cert.pem
# Copy certificates to the Docker hosts
scp -i ~/.ssh/docker.pem ca*.pem docker0*pem ubuntu@docker0.proserveau.local:~/
scp -i ~/.ssh/docker.pem ca*.pem docker1*pem swarm*pem ubuntu@docker1.proserveau.local:~/
scp -i ~/.ssh/docker.pem ca*.pem docker2*pem ubuntu@docker2.proserveau.local:~/
fi
echo " => Script $0 complete."
exit 0
| true |
c70157725532ead1736e2fa8c6bc616300ad93cb | Shell | digimokan/smart-build | /tests/unit_tests/help_test.sh | UTF-8 | 2,392 | 3.640625 | 4 | [] | permissive | #!/bin/sh
################################################################################
# ENV VARS
# unit_test_function optional single test function to run in this file
################################################################################
################################################################################
# LOAD COMMON VARS
################################################################################
#shellcheck source=../unit_test_vars.sh
. ../unit_test_vars.sh
################################################################################
# SETUP / TEARDOWN
################################################################################
setUp() {
#shellcheck source=../per_test_setup.sh
. ../per_test_setup.sh
}
tearDown() {
# shellcheck source=../per_test_teardown.sh
. ../../per_test_teardown.sh
}
################################################################################
# HELPER FUNCTIONS
################################################################################
helpHelper() {
cmd_output=$(${1})
exit_code=${?}
assertContains 'Basic print-usage help has output -->' "${cmd_output}" 'USAGE'
assertContains 'Basic print-usage help has output -->' "${cmd_output}" 'OPTIONS'
assertEquals 'Basic print-usage help has correct exit code -->' "${exit_code}" 0
}
################################################################################
# UNIT TESTS
################################################################################
helpShort() {
helpHelper "./${EXEC_NAME} -h"
}
helpLong() {
helpHelper "./${EXEC_NAME} --help"
}
################################################################################
# TEST SUITE
################################################################################
suite() {
# shellcheck disable=SC2154
if [ "${unit_test_function}" != '' ] && [ "$( type -t "${unit_test_function}" )" = "function" ]; then
suite_addTest "${unit_test_function}"
else
suite_addTest helpShort
suite_addTest helpLong
fi
}
################################################################################
# LOAD TEST FRAMEWORK (MUST GO LAST)
################################################################################
# zsh compatibility options
export SHUNIT_PARENT=$0
setopt shwordsplit 2> /dev/null
# shellcheck disable=SC1091
. "${PATH_TO_SHUNIT}"
| true |
8a6c6183998a5de0a5d8232aad43e1247f385d11 | Shell | scala-stm/scala-stm | /bin/test_coverage | UTF-8 | 1,224 | 3.203125 | 3 | [
"BSD-3-Clause"
] | permissive | #!/bin/sh
set -x
COBERTURA_DIR=$HOME/cobertura/cobertura-1.9.4.1
SCALATEST_VERSION=1.2
BASE="`dirname $0`/.."
cd $BASE
WHEN=`date '+%Y%m%d-%H%M%S'`
DIR="cover/cover-$WHEN"
SER="$DIR/cobertura.ser"
if [ "x$SCALA_VERSION" = "x" ]; then
SCALA_VERSION="`awk -F= '$1==\"build.scala.versions\" {print $2}' project/build.properties | sed 's/ .*//g'`"
fi
TARGET_BASE="target/scala_$SCALA_VERSION"
SCALATEST="lib_managed/scala_$SCALA_VERSION/compile/scalatest-$SCALATEST_VERSION.jar"
mkdir -p $DIR
git show --quiet HEAD > $DIR/git.hash
sbt ++$SCALA_VERSION test-compile > "$DIR"/log 2>&1
rm -rf $TARGET_BASE/classes-instr
sh $COBERTURA_DIR/cobertura-instrument.sh \
--basedir $TARGET_BASE/classes \
--datafile $SER \
--destination $TARGET_BASE/classes-instr \
scala
# we have to include classes because cobertura skips those with no methods
scala -cp ${SCALATEST}:$COBERTURA_DIR/cobertura.jar:$TARGET_BASE/classes-instr:$TARGET_BASE/classes \
-Dnet.sourceforge.cobertura.datafile=$SER \
-Dccstm.stats=1 \
org.scalatest.tools.Runner \
-oW -l slow -p $TARGET_BASE/test-classes
sh $COBERTURA_DIR/cobertura-report.sh \
--datafile $SER \
--destination $DIR \
$BASE/src/main/scala
| true |
72db77bcf616761a46356f60713b7d799f3f7ab9 | Shell | fprimex/junonia | /junonia | UTF-8 | 99,264 | 4.21875 | 4 | [
"Unlicense"
] | permissive | #!/bin/sh
###
### Typical usage and program flow
###
# In the script being written to use junonia:
# Run a copy of junonia_bootstrap to set JUNONIA_TARGET and JUNONIA_PATH
# Source junonia with . "$JUNONIA_PATH/some/path/to/junonia"
# Use junonia_run "$@" to run with doc and command auto-discovery
#
# Then, in junonia the following is run:
# junonia_init to set up the environment upon being sourced
# junonia_run_* function chosen based on auto-discovery
# possibly _junonia_md2spec to generate spec from md files
# _junonia_run_final to collect all of the run options and start execution
# _junonia_set_args to determine arg values from:
# spec defaults, config file, env vars, and cli args
# _junonia_exec to receive all arg values and run the function
# possibly run help and exit
# possibly run a user specified filter function to preprocess arg values
# run the specified function with the fully resolved arguments
###
### Copy of the bootstrap function
###
### For a compact version of this script to copy into your own script, see
### junonia_bootstrap
###
# This function can be copied to the top level script to set absolute paths to
# the script. From there, junonia, other shell libraries, and other assets
# can be loaded or referenced. For example, for a project with directories like
# the following:
# /home/user/foo/code/project.git/script
# /home/user/foo/code/project.git/lib/junonia
# the following code could be used in script:
# # copied from junonia or junonia_bootstrap
# junonia_bootstrap () {
# ...
# }
#
# junonia_bootstrap
# . "$JUNONIA_PATH/lib/junonia"
#
# # continue using junonia functions like junonia_run, echoerr, etc...
# Note one oddity: in order to keep the global variable namespace unpolluted,
# the JUNONIA_PATH variable is used to hold the value of the symbolic link path
# until it is finally set to the absolute path to the directory containing the
# script. In this way only the variables ultimately set, JUNONIA_TARGET and
# JUNONIA_PATH, are created / used.
# Bootstrap determines the script location. With the exception of the function
# name and globals set, this is generic and does not rely on anything specific
# to the rest of junonia. Use this in any script and the following will be set:
#
# JUNONIA_TARGET Absolute path to script being run with symlinks resolved.
# JUNONIA_PATH Absolute path to directory containing script being run.
# Note that this is not POSIX but portable to many systems with nearly any kind
# of implementation of readlink. I choose to use readlink because neither
# alpine nor ubuntu docker images come with the file command, which could be
# used instead, but do come with readlink.
junonia_bootstrap () {
# Get the command used to start this script
JUNONIA_TARGET="$0"
# If executing via a series of symlinks, resolve them all the way back to the
# script itself. Some danger here of infinitely cycling.
while [ -h "$JUNONIA_TARGET" ]; do
# Begin usage of JUNONIA_PATH to hold the link path.
# Look at what this link points to
JUNONIA_PATH=$(readlink "$JUNONIA_TARGET")
if [ "$(echo "$JUNONIA_PATH" | cut -c -1)" = "/" ]; then
# Link path is absolute (first character is /); just need to follow it.
JUNONIA_TARGET="$JUNONIA_PATH"
else
# Link path is relative, need to relatively follow it.
# e.g. running `./foo` and link is to `../../bar`
# Go look at ./../../bar
JUNONIA_TARGET="$(dirname $JUNONIA_TARGET)"
JUNONIA_TARGET="$JUNONIA_TARGET/$JUNONIA_PATH"
fi
# End usage of JUNONIA_PATH to hold the link path.
done
# Now TARGET should be like the following, where 'script' is not a symlink:
# /some/path/to/the/actual/script
# or
# ./../some/path/to/the/actual/script
#
# Set absolute paths for TARGET and PATH
# PATH /home/user/code/project/name/bin
# TARGET /home/user/code/project/name/bin/script
JUNONIA_PATH="$(cd "$(dirname "$JUNONIA_TARGET")" && pwd -P)"
JUNONIA_TARGET="$JUNONIA_PATH/$(basename $JUNONIA_TARGET)"
# If the script is in a bin directory, then set the path one directory back.
# /some/path/to/the/actual # (no bin/script))
# or
# /home/user/code/project/name # (bin/script))
if [ "$(basename "$JUNONIA_PATH")" = bin ]; then
JUNONIA_PATH="$(dirname "$JUNONIA_PATH")"
fi
}
###
### I/O helpers
###
# Print messages to stderr. Use printf to ensure the message is verbatim.
# e.g. do not interpret \n in JSON.
echoerr_raw () { printf '%s\n' "$@" 1>&2; }
echoerr () { echoerr_raw "[ERROR] $@"; }
# Print debug messages to file descriptor 3, which will either go to stderr if
# debug output is enabled or /dev/null if it is not.
echodebug_raw () { printf '%s\n' "$@" 1>&3; }
echodebug () { echodebug_raw "[DEBUG] $@"; }
# Convenience functions for using a verbose level echo
echov () { [ -n "$verbose" ] && printf '%s\n' "$@"; }
echovv () { [ -n "$vverbose" ] && printf '%s\n' "$@"; }
echovvv () { [ -n "$vvverbose" ] && printf '%s\n' "$@"; }
JUNONIA_WRAP="${JUNONIA_WRAP:-78}"
JUNONIA_COL1="${JUNONIA_COL1:-18}"
JUNONIA_COL2="${JUNONIA_COL2:-60}"
###
### AWK utility functions
###
# Convenience functions for error and debug output
junonia_awk_echoerr='function echoerr(msg) {
printf "[ERROR] %s\n", msg >"/dev/stderr"
}'
junonia_awk_echoerr_raw='function echoerr_raw(msg) {
printf "%s\n", msg >"/dev/stderr"
}'
junonia_awk_echodebug='function echodebug(msg) {
if(ENVIRON["JUNONIA_DEBUG"]) {
echoerr_raw("[DEBUG] " msg)
}
}'
junonia_awk_echodebug_raw='function echodebug_raw(msg) {
if(ENVIRON["JUNONIA_DEBUG"]) {
echoerr_raw(msg)
}
}'
junonia_awk_hardwrap_line='
# Wrap a long line to a specified width and optionally add a prefix / indent.
#
# Arguments
# ---------
# line Text to wrap
# width Line width to wrap to
# pre Prefix string such as an indent
# float Float text without spaces longer than width instead of breaking it
#
# Locals
# ------
# str Portion of the line being wrapped
# n Index of the next space in the line
# start Index of the start of the next chunk
# wrapped Final wrapped result
function hardwrap_line(line, width, pre, float,
str, n, wrapped) {
# The start of the line will be removed as it is wrapped, so continue
# producing wrapped lines as long as line is longer than the wrap width.
while(length(line) > width) {
# Position of the next space.
n = index(line, " ")
# If floating long lines, deal with lines that have no spaces (space
# position 0) by not touching them.
if(n == 0 && float) {
break
}
if(n > width && float) {
# If floating long lines, and this line is long (space beyond the
# width), wrap at that space and continue, producing a line that is
# wider than the width.
str = substr(line, 1, n - 1)
start = n
} else {
# Either:
# Not floating, so break in the middle of long lines
# OR
# There is a space within in the wrap width.
# Take a chunk that is the width.
str = substr(line, 1, width)
# Remove everything at the end of the string that is the last space
# followed by not a space.
sub(/ [^ ]*$/, "", str)
# Strip leading space from the chunk so it will be aligned.
sub(/^ /, "", str)
if(n == 0 || n > width) {
# The space is beyond the wrap width or there is no space, so this is
# is a break in the middle of a word.
start = width + 1
} else {
# The space is within the wrap width, so this is a break on a space.
# Note that this does not take into account multiple spaces between
# words. On that, we assume that if you have more than one space that
# they must be significant so your weird formatting is retained.
start = length(str) + 2
}
}
# Add this wrapped line to the hardwrapped result.
wrapped = wrapped pre str "\n"
# Removed the portion that was just wrapped from the line for continued
# processing.
line = substr(line, start, length(line))
}
# There probably is a bit of text that is leftover and needs to be added to
# the wrapped result.
if(line) {
wrapped = wrapped pre line "\n"
}
# Send back the hardwrapped string with the final newline removed.
return substr(wrapped, 1, length(wrapped) - 1)
}
'
junonia_awk_hardwrap='
function hardwrap(lines, width, pre, float,
linesa, str, n, i, wrapped) {
n = split(lines, linea, "\n")
for(i=1; i<=n; i++) {
wrapped = wrapped hardwrap_line(linea[i], width, pre, float) "\n"
}
# Send back the hardwrapped string with the final newline removed.
return substr(wrapped, 1, length(wrapped) - 1)
}
'
junonia_awk_twocol='
# Given two strings and specifications for two columns, format the text side
# by side in two columns.
#
# Arguments
# ---------
# t1 Text to go into the first column
# t2 Text to go into the second column
# col1 Width of column one
# col2 Width of column two
# gutter Text to go in between the columns
# pre Text to go in front of the complete text, like an indent
# f1 If unbroken lines of t1 longer than col1 should be left unbroken
# f2 If unbroken lines of t2 longer than col2 should be left unbroken
#
# Locals
# ------
# fmt Print format for each wrapped and combined line
# t1a Array of lines in text1
# t2a Array of lines in text2
# i Iterator variable
# j Iterator variable
# n Number of lines being processed for t1
# m Number of lines being processed for t2
# formatted Final result
function twocol(t1, t2, col1, col2, gutter, pre, f1, f2,
fmt, t1a, t2a, i, j, n, m, formatted) {
# Wrap each line to the desired column width.
t1 = hardwrap(t1, col1, pre, f1)
t2 = hardwrap(t2, col2, "", f2)
# Assemble the print format. e.g.
# Prefix 2 spaces, col1 20, gutter 1 space, col2 40
# " %-20s %-40s"
fmt = "%-" col1 + length(pre) "s" gutter "%-" col2 "s"
# Put each line of each hardwrapped column in arrays
n = split(t1, t1a, "\n")
m = split(t2, t2a, "\n")
# Iterate over the arrays and put the lines next to each other using the
# assembled format.
i = 1
j = 1
while(i<=n || j<=m) {
if(length(t1a[i]) > col1 + length(pre)) {
formatted = formatted t1a[i] "\n"
i++
} else {
formatted = formatted sprintf(fmt, t1a[i], t2a[j])
sub(/ *$/, "", formatted)
formatted = formatted "\n"
i++
j++
}
}
# Send back the final, two column formatted text with the final newline
# removed.
return substr(formatted, 1, length(formatted) - 1)
}
'
junonia_awk_ncol='
# Given n strings and specifications for n columns, format the text side
# by side in n columns.
#
# Arguments
# ---------
# n Number of columns
# texts Array of text to go into the columns
# cols Array of column widths
# gutters Array of text to go between the columns
# pre Text to go in front of the complete text, like an indent
# floats If unbroken lines longer than cols should be left unbroken
#
# Locals
# ------
# i Iterator variable
# formatted Final result
function ncol(n, texts, cols, gutters, pre, floats,
ctotal, i, formatted) {
if(n < 2) {
echoerr("two or more columns are required, received " n)
exit 1
}
# Process all columns except the last one.
formatted = texts[1]
ctotal = cols[1]
for(i=1; i<n-1; i++) {
formatted = twocol(formatted, texts[i+1], ctotal, cols[i+1], gutters[i],
"", floats[i], floats[i+1])
ctotal += cols[i+1] + length(gutters[i])
}
# Process the last column and add the prefix to the entire result.
formatted = twocol(formatted, texts[n], ctotal, cols[n], gutters[n-1],
pre, floats[n-1], floats[n])
return formatted
}
'
# This convenience function is a POSIX way of getting some random digits. It is
# so-called 'randomish' because it is NOT CRYPTOGRAPHICALLY SOUND and SHOULD
# NOT BE USED FOR CRYPTOGRAPHIC PURPOSES. It does, however, produce things that
# are random enough for temporary file names and the like.
#
# The seed HAS to be sufficient in order for this to work. Sending the current
# time, for example, is not usually sufficient unless using a nonstandard level
# of precision. See the shell wrapper for an example of a suitable seed.
junonia_awk_randomish_int='
function randomish_int(s, n) {
# A seed has to be given
if(! s) {
print "no seed provided for generating randomish int" >"/dev/stderr"
exit 1
}
# Default to 10 digits
if(! n) {
n=10
}
# As mentioned, the seed given here needs to be suitable.
srand(s)
# Initial accumulation. Strip leading zeros from this one so the result is
# useful as an integer.
r = rand()
sub(/0\.0*/, "", r)
# Build up enough digits, then take the first n of them.
while(length(r) < n) {
r = r rand()
sub(/0\./, "", r)
}
return substr(r, 1, n)
}
'
# Replace quotes and newlines with escape characters to prepare the
# value for insertion into JSON.
junonia_awk_json_escape='
function json_escape(s, lines, nlines, i, result) {
nlines = split(s, lines, "\n")
result = lines[1]
for(i=2; i<nlines; i++) {
gsub(/"/,"\\\"")
gsub(/\\n/,"\\\\n")
result = result "\\n" a[i]
}
return result
}
'
# All bools are either 1 or empty, but we accept the text true, 1, and the
# presence of the flag (empty value) as true, and the text false and 0 as
# false. All variations of casing for true and false are accepted.
junonia_awk_mapbool='
function mapbool(b, opt) {
echodebug("setting bool " opt " using value " b)
if(tolower(b) == "true" || b == "1" || b == "") {
return "1"
} else {
if(tolower(b) == "false" || b == "0") {
return ""
} else {
msg = "option " opt " argument must be omitted (true) or one of:"
msg = msg "\ntrue false 1 0"
msg = msg "\ngot: " b
echoerr(msg)
e = 1
exit 1
}
}
}
'
junonia_awk_make_jqarg='
function make_jqarg(val) {
# Unquoted special values
if(val == "true" || val == "false" || val == "null" || val == val*1) {
return val
} else {
# Quoted string value
# Allowing |tostring adds support for specifying that a value should be
# a string no matter what. Values like true and 2.6 can be made "true"
# and "2.6" by specifying true|tostring and 2.6|tostring.
sub(/\|tostring$/, "", val)
return "\"" val "\""
}
}
'
junonia_awk_make_jqprog='
function make_jqprog(name, val, next_argchar) {
echodebug("")
echodebug("making jq prog: " name " " val " " next_argchar)
next_argchar = substr(val, 1, 1)
if(substr(name, 1, 1) == "#") {
# If the next thing is a comment, then add the comment and move on.
jqprog = jqprog "\n" name
return 0
}
if(next_argchar == "-" || next_argchar == "." || ! next_argchar) {
# If the next thing is an option instead of a value, then set the
# value to true and move on.
jqprog = jqprog "\n" name " = true |"
return 0
}
if(substr(name, length(name)-1, 2) == "[]") {
sub(/\[\]$/, "", name)
jqprog = jqprog "\n" name " |= . + [" make_jqarg(val) "] |"
} else {
# Add this arg and its value, the next value, to the jq program
jqprog = jqprog "\n" name " = " make_jqarg(val) " |"
}
return 1
}
'
junonia_awk_end_jqprog='
function end_jqprog() {
sub(/^\n/, "", jqprog)
if(jqprog) {
jqprog = jqprog "\ntostring"
}
}
'
junonia_awk_isort='
function isort(A, start, end,
i, j, value) {
for(i = start; i <= end; i++) {
value = A[i]
j = i - 1
while( ( j > start-1) && ( A[j] > value ) ) {
A[j+1] = A[j]
j--
}
A[j+1] = value
}
}
'
readonly JUNONIA_AWKS="
$junonia_awk_hardwrap_line
$junonia_awk_hardwrap
$junonia_awk_twocol
$junonia_awk_ncol
$junonia_awk_echoerr
$junonia_awk_echoerr_raw
$junonia_awk_echodebug
$junonia_awk_echodebug_raw
$junonia_awk_randomish_int
$junonia_awk_json_escape
$junonia_awk_mapbool
$junonia_awk_make_jqarg
$junonia_awk_make_jqprog
$junonia_awk_end_jqprog
$junonia_awk_isort
"
###
### Shell utility functions
###
# Shell version of junonia_awk_randomish_int. See its documentation for VERY
# important information on appropriate usage. With no argument provided it uses
# the default in the awk function.
junonia_randomish_int () {
awk_prog='BEGIN { printf "%s", randomish_int(s, n) }'
# Provide a seed to awk's srand that is the process ID of a new sh process.
if ! awk -v s="$(/bin/sh -c 'echo $$')" \
-v n="$1" "$junonia_awk_randomish_int $awk_prog"; then
echoerr 'unable to generate random int'
return 1
fi
}
junonia_json_escape () {
awk_prog='BEGIN { printf "%s", json_escape(s) }'
if ! awk -v s="$1" "$junonia_awk_json_escape $awk_prog"; then
echoerr 'unable to convert value to escaped json'
return 1
fi
}
# Determine if an argument is an integer.
junonia_is_int () {
test "$1" -eq "$1" >/dev/null 2>&1
}
# Determine if an argument is any kind of number.
junonia_is_num () {
awk -v n="$1" 'BEGIN{if(n!=n*1){exit 1}}' >/dev/null 2>&1
}
# Shell entrypoint for hardwrapping a line.
junonia_hardwrap () {
awk_prog='BEGIN { printf "%s", hardwrap(s, w, p, f) }'
if ! awk -v s="$1" -v w="$2" -v p="$3" -v f="$4" \
"$JUNONIA_AWKS $awk_prog"; then
echoerr "failed to hardwrap (width:$2, prefix:$3, float:$4):"
echoerr "$1"
return 1
fi
}
# Shell entrypoint for printing two listings of text in 2 columns, separated by
# a gutter string and prefixed by a string.
junonia_twocol () {
awk_prog='BEGIN { printf "%s", twocol(t1, t2, c1, c2, g, p, f1, f2) }'
if ! awk -v t1="$1" -v t2="$2" -v c1="$3" -v c2="$4" \
-v g="$5" -v p="$6" -v f1="$7" -v f2="$8"\
"$JUNONIA_AWKS $awk_prog"; then
echoerr "failed to format in two columns with parameters:"
echoerr "col1=$3 col2=$4 gutter=$5 prefix=$6 float1=$7 float2=$8"
echoerr "text1: $1"
echoerr "text2: $2"
return 1
fi
}
# Shell entrypoint for printing n listings of text in n columns, separated by
# n-1 gutter strings and prefixed by a string. Since Bourne shell has no
# arrays, use JFS to separate the array entries to go to awk.
junonia_ncol () {
awk_prog='BEGIN {
n = split(t, ta)
split(c, ca)
split(g, ga)
split(f, fa)
FS=" "
printf "%s", ncol(n, ta, ca, ga, p, fa)
}'
if ! awk -F "$JFS" \
-v t="$1" -v c="$2" -v g="$3" -v p="$4" -v f="$5" \
"$JUNONIA_AWKS $awk_prog"; then
echoerr "failed to format text into multiple columns"
return 1
fi
}
junonia_require_cmds () {
for required_cmd in $1; do
if [ -z "$(command -v $required_cmd)" ]; then
echoerr "$required_cmd must be installed"
return 1
fi
done
}
###
### Configuration file management
###
# Add, remove, or modify given values in a shell config file at the given path.
# Remove values by providing an empty value. If no file exists it will be
# created.
#
# junonia_update_config FILEPATH VAR [VAR ...]
#
# Where VAR is NAME=VALUE to set the value and NAME= or NAME to remove the
# value.
junonia_update_config () {
if [ -f "$1" ]; then
echodebug "modifying $1"
else
echodebug "creating $1"
if ! mkdir -p "$(dirname "$1")"; then
echoerr "could not create $(dirname "$1")"
fi
if ! touch "$1"; then
echoerr "could not create $1"
return 1
fi
fi
if ! config="$(awk -v JUS="$JUS" '
# Generate the config from arg input and existing file.
# Given a potential var=value line, separate them, set VARNAME
# and VARVALUE.
function splitvar(var) {
# Find = or the end
eq = index(var, "=")
if(eq == 0) {
eq = length(var + 1)
}
# Extract the name and value
VARNAME = substr(var, 1, eq - 1)
VARVALUE = substr(var, eq + 1)
# The value may be multiple distinct lines
gsub(JUS, "\n", VARVALUE)
# Enclose the value in quotes if not already
if(VARVALUE && VARVALUE !~ /^".*"$/) {
VARVALUE = "\"" VARVALUE "\""
}
# Error if VARNAME is not a valid shell variable name
if(VARNAME !~ varname_re) {
VARNAME=""
VARVALUE=""
return 1
}
return 0
}
BEGIN {
# Matches valid shell variable names
varname_re = "[A-Za-z_][A-Za-z0-9_]*"
# Arg1 is the config file. The rest are config entries to process,
# so make them into an array and remove them from the arg vector.
for(i=2; i<ARGC; i++) {
if(splitvar(ARGV[i]) == 0) {
config[VARNAME] = VARVALUE
ARGV[i] = ""
vars++
}
}
# No variables were given to process.
if(!vars) {
exit 1
}
ARGC = 2
}
# Start processing the config file.
# This line is a variable we were given to modify.
$0 ~ "^" varname_re && splitvar($0) == 0 && config[VARNAME] {
# If no value was supplied, skip it, effectively removing it from
# the config file.
if(! config[VARNAME] || config[VARNAME] == "\"\"") {
delete config[VARNAME]
next
}
# There is a value, so write that and remove it from the array
# since it was processed.
print VARNAME "=" config[VARNAME]
delete config[VARNAME]
next
}
# Preserve unmodified lines as-is.
{ print }
END {
# If there are still config entries that means we were given
# variables to process that were not already in the config file.
# Those should then be added at the end.
for(c in config) {
if(config[c]) {
print c "=" config[c]
}
}
}
' "$@")"; then
echoerr "Error processing configuration"
echoerr "$config"
return 1
fi
if ! echo "$config" | tee "$1"; then
echoerr "Error writing configuration to file $1"
echoerr "$config"
return 1
fi
}
###
### Markdown parsing functions
###
# Parse Markdown text into a program argument spec
_junonia_md2spec () {
echodebug "begin md2spec"
awk_prog='
# Print the currently stored spec and reset for the next one.
function spec () {
if(junonia_web) {
cmd = cmd ", junonia_web"
}
print indent cmd
for(i=1; i<=n_params; i++) {
print indent " " params[i]
}
if(junonia_web) {
print indent " -JUNONIA_WEB_METHOD METHOD=" jw_method
print indent " -JUNONIA_CONTENT_TYPE TYPE=" jw_content_t
print indent " -JUNONIA_WEB_URL URL=" jw_url
}
if(output_text) {
if(output_table == 1) {
insert_jtable()
output_table = 0
}
gsub(/\n/, "\\\\n", output_text)
print indent " -JUNONIA_OUTPUT OUTPUT=" output_text
}
if(junonia_web) {
if(!output_text) {
print indent " -JUNONIA_OUTPUT OUTPUT="
}
print indent " -json"
}
for(i=1; i<=n_opts; i++) {
print indent " " opts[i]
}
indent = ""
cmd = ""
output_text = ""
n_params = 0
n_opts = 0
split("", params, ":")
split("", opts, ":")
junonia_web = 0
}
function md_to_jq(s, r, a, i) {
n = split(s, a, /`/)
for(i=1; i<=n; i++) {
if(i % 2 == 1) {
r = r a[i]
} else {
r = r "\\(" a[i] ")"
}
}
sub(/ $/, "", r)
return r
}
function insert_jtable() {
if(n_cols > n_headers) {
root_element = rowa[n_cols-1]
gsub(/`/, "", root_element)
table_cols = table_cols "]"
} else {
root_element = "."
table_cols = table_cols "," rowa[n_cols-1] "]"
}
manual_fmt = 0
for(i=2; i<n_headers; i++) {
echodebug("header: " headersa[i])
echodebug("row: " rowa[i])
h = headersa[i]
r = rowa[i]
hlen = length(h)
rlen = length(r)
sub(/^ */, "", h)
sub(/ *$/, "", h)
sub(/^ */, "", r)
sub(/ *$/, "", r)
hstriplen = length(h)
rstriplen = length(r)
echodebug("hlen: " hlen)
echodebug("rlen: " rlen)
echodebug("hstriplen: " hstriplen)
echodebug("rstriplen: " rstriplen)
if(hstriplen > rstriplen) {
if(substr(headersa[i], hlen) == " ") {
manual_fmt = 1
break
}
} else {
echodebug(substr(rowa[i], rlen))
if(substr(rowa[i], rlen) == " ") {
manual_fmt = 1
break
}
}
}
echodebug("manual_fmt: " manual_fmt)
gsub(/`/, "\"", table_cols)
if(! manual_fmt) {
gsub(" *" jfs "|$", "", table_headers)
}
if(output_text) {
output_text = output_text "\n" SOH
} else {
output_text = SOH
}
output_text = output_text "\n" "junonia_format_jtable"
output_text = output_text "\n" STX
output_text = output_text "\n\",\"" "\\(jtable({\"element\":" root_element ",\"columns\":" table_cols "}))\",\"" JRS
output_text = output_text "\n" ETX
output_text = output_text "\n" STX
output_text = output_text "\n" table_headers
output_text = output_text "\n" ETX
output_text = output_text "\n" EOT
output_text = output_text "\n"
}
# When encountering a header, leave any header we were in.
/^#/ {
if(output_table == 1) {
insert_jtable()
sub(JRS, "", output_text)
}
synopsis = 0
positional = 0
options = 0
endpoint = 0
content_type = 0
output = 0
output_icode = 0
output_tcode = 0
output_table = 0
}
# Top level ## header
# ## `command subcommand`
/^## `[-,_A-Za-z0-9 ]+`/ {
if(cmd) {
spec()
}
# Strip everything around the title
gsub(/^## `|`$/, "")
# Empty the list first
split("", subcmds, ":")
# Split on commas to get the first part plus the aliases
sn = split($0, subcmda, ",")
# Split the first part and get the last item to get the primary subcmd
pn = split(subcmda[1], primary_subcmd)
# Put that primary subcommand in the first command slot
cmd = primary_subcmd[pn] ","
# Add the command aliases
for(i=2; i<=sn; i++) {
# Normalize the entries to have a comma on the end
sub(/,+$/, ",", subcmda[i])
cmd = cmd subcmda[i]
}
# Strip the last trailing comma
sub(/,$/, "", cmd)
indent = ""
for(i=1; i<pn; i++) {
indent = indent " "
}
}
/^### REST endpoint/ {
endpoint = 1
next
}
endpoint && /^ [A-Z]+ http/ {
junonia_web = 1
jw_method = $1
jw_url = $2
jw_content_t = ""
next
}
/^### Content type/ {
content_type = 1
next
}
content_type && /^ [^\/]+\/[^\/]+/ {
jw_content_t = $1
}
/^### Positional parameters/ {
positional = 1
next
}
# * `POS_ONE`
positional && /^\* `[-_A-Z0-9]+`/ {
gsub(/^\* `|`$/, "")
params[++n_params] = $0
param = 1
next
}
positional && /\[default: `.*`\]$/ {
match($0, /\[default: `.*`\]$/)
def = substr($0, RSTART+12, RLENGTH-12-2)
params[n_params] = params[n_params] "=" def
}
/^### Options/ {
options = 1
}
# A flag is idenfied by nothing coming after it
# * `-option`
#
# A normal option
# * `-option VAL`
#
# An option with an alias
# * `-o, -option VAL`
#
# An option that can be repeated
# * `-option VAL1 [-option VAL2 ...]`
options && /^\* `-[-A-Za-z0-9]+/ {
# Remove the leading bullet-space-backtick and the last backtick
gsub(/^\* `|`$/, "")
# Options can have aliases. The definitive one is the last one, so loop
# through the option names until the last one is reached.
opt = ""
while(match($0, /^-[-_A-Za-z0-9]+/)) {
opt = opt substr($0, RSTART, RLENGTH) ","
$0 = substr($0, RLENGTH + 1)
sub(/^ *, */, "")
}
opt = substr(opt, 1, length(opt) - 1)
opts[++n_opts] = opt
sub(/^ */, "")
n_metas = split($0, a, " ")
if(n_metas == 1) {
# Only meta variable; not repeatable option
opts[n_opts] = opts[n_opts] " " a[1]
} else {
if(n_metas > 1) {
# More items after meta variable; multi-option
opts[n_opts] = opts[n_opts] " [" a[1] "]"
}
}
}
options && /^\* `\.[-A-Za-z0-9]+/ {
# Remove the leading bullet-space-backtick and the last backtick
gsub(/^\* `|`$/, "")
opts[++n_opts] = $1
if($2 ~ /[-_A-Z0-9]+/) {
opts[n_opts] = opts[n_opts] " " $2
}
}
options && /\[default: `.*`\]$/ {
match($0, /\[default: `.*`\]/)
def = substr($0, RSTART+11, RLENGTH-11-2)
if(n_metas == 0) {
# This was previously determined to be a flag, so the default needs
# to be processed and mapped to a spec bool value.
def = mapbool(def, opt)
}
opts[n_opts] = opts[n_opts] "=" def
}
/^### Output/ {
output = 1
next
}
output && /^ / {
output_icode = 1
}
output_icode && ( /^ / || /^$/ ) {
sub(/^ /, "")
output_text = output_text "\n" $0
next
}
output_icode && ! (/^$/ || /^ /) {
output_icode = 0
}
output_tcode && /^```/ {
output_tcode = 0
next
}
output && /^```/ {
output_tcode = 1
next
}
output_tcode {
output_text = output_text "\n" $0
next
}
# divider row
output_table && /^\|[- \|]+$/ {
next
}
output_table && /^\|/ {
n_cols = split($0, rowa, /\|/)
sub(/^ /, "", rowa[2])
sub(/ $/, "", rowa[2])
table_cols = "[" rowa[2]
for(i=3; i<n_cols-1; i++) {
sub(/^ /, "", rowa[i])
sub(/ $/, "", rowa[i])
table_cols = table_cols "," rowa[i]
}
sub(/^ /, "", rowa[n_cols-1])
sub(/ $/, "", rowa[n_cols-1])
next
}
output_table && /^$/ {
insert_jtable()
output_table = 0
next
}
output && /^\|/ {
output_table = 1
table_headers = ""
table_cols = ""
n_headers = split($0, headersa, /\|/)
sub(/^ /, "", headersa[2])
sub(/ $/, "", headersa[2])
table_headers = headersa[2]
for(i=3; i<n_headers; i++) {
sub(/^ /, "", headersa[i])
sub(/ $/, "", headersa[i])
table_headers = table_headers JFS headersa[i]
}
next
}
output && output_text {
output_text = output_text "\n" md_to_jq($0)
next
}
output {
output_text = md_to_jq($0)
next
}
END {
if(output_table == 1) {
insert_jtable()
sub(JRS, "", output_text)
output_table = 0
}
spec()
}
'
awk -v wrap="$JUNONIA_WRAP" -v col1="$JUNONIA_COL1" \
-v col2="$JUNONIA_COL2" \
-v SOH="$JSOH" -v STX="$JSTX" -v ETX="$JETX" -v EOT="$JEOT" \
-v JUS="$JUS" -v JRS="$JRS" -v JFS="$JFS" \
"$JUNONIA_AWKS $awk_prog" "$@"
}
# Parse Markdown text into command line help
_junonia_md2help () {
echodebug "begin md2help"
awk_prog='
BEGIN {
echodebug("cmd: " cmd)
# Split the supplied command into an array of parts
n = split(cmd, cmda)
i = 1
col1_indent = sprintf("%" col1 "s", "")
txt = "NAME\n"
subcmd_txt = "SUBCOMMANDS\n"
}
# When encountering a header, leave any header we were in.
/^#/ {
if(positional_no_desc) {
txt = txt twocol(param_col1, "", col1 - 3, col2, " ", " ") "\n"
}
# When leaving a header and processing docs, add a separating newline
# (e.g. between the last line of description and start of OPTIONS)
# but only if text was added when passing through a section
if(cmd_doc_start && added_txt) {
txt = txt "\n"
}
title = 0
synopsis = 0
description = 0
positional = 0
options = 0
added_txt = 0
positional_no_desc = 0
}
# Top level "##" header
# ## `command subcommand`
/^## / {
echodebug("entering " $0)
# Strip everything around the title
gsub(/^## `|`$/, "")
title = 1
# If help docs have started, this signals the end of processing the
# non-subcommand documentation.
if(cmd_doc_start) {
cmd_doc_start = 0
cmd_doc_end = 1
echodebug("end command documentation")
}
# Smush the aliases together to make them grouped when split on spaces
sub(/, +/, ",")
sub(/ +,/, ",")
# Extract the commands, subcommands, and aliases in this header
split("", subcmds, ":")
hn = split($0, subcmds)
# Extract aliases as keys (array indices) so it can be checked with an
# "in" expression later. Also, need to empty the array first.
split("", aliases, ":")
an = split(subcmds[hn], aliases, ",")
for(j=1; j<=an; j++) {
aliases[aliases[j]] = ""
delete aliases[j]
}
}
# Find the topmost level command title to bootstrap subcommand location.
title && i == 1 && $0 == cmda[1] {
# The top level command; the name of the program.
cmd_part = cmda[1]
echodebug("top level command: " cmd_part)
# Bootstrap an expression to find subcommands of this command.
subcmd_re = "^" cmd_part " [-_A-Za-z0-9]+(,|$)"
echodebug(" subcmd regex: " subcmd_re)
# Increment the level of subcommand we are interested in.
i = 2
# This is the only command given; help for the program itself. So
# actually we are done searching for the command to do help for.
if(n == 1) {
txt = txt " " cmd_part "\n"
cmd_doc_start = 1
added_txt = 1
echodebug("start command documentation")
}
next
}
# Keep moving until the top level command is located
i == 1 {
next
}
# At this point i > 1
# Keep looking at titles until the whole command is located
i > 1 && n == hn {
echodebug(" " i "<" n " " title " " cmda[i] " == " subcmds[i])
for(j=1; j<=hn; j++) {
echodebug(" " j " " subcmds[j])
}
}
title && i < n && n == hn && cmda[i] == subcmds[i] {
for(j=1; j<i; j++) {
if(cmda[j] != subcmds[j]) {
next
}
}
i++
next
}
# The whole command has been found, so document it and get set up to
# identify and document the subcommand summary
#title && i == n && cmda[n] in subcmds {
title && i == n && n == hn && cmda[n] in aliases {
# Everything up to this point has to match up also
for(j=1; j<i; j++) {
if(cmda[j] != subcmds[j]) {
next
}
}
# This is the header line with all Markdown stripped
cmd_part = $0
# Unsmush any aliases
sub(/,/, ", ", cmd_part)
txt = txt " " cmd_part "\n"
added_txt = 1
# Get the last primary subcommand (not the aliases) and build a regex to
# find this commands subcommands to document.
split($0, subcmda, ",")
subcmd_re = "^" subcmda[1] " [-_A-Za-z0-9]+$"
echodebug(" subcmd regex: " subcmd_re)
cmd_doc_start = 1
echodebug("start command documentation")
i++
next
}
# Keep moving until the whole command is located.
i <= n {
next
}
# In the command description, so add it to the name line.
title && ! cmd_doc_end && ! /^$/ {
txt = substr(txt, 1, length(txt) - 1) " -- " $0 "\n"
added_txt = 1
next
}
# Collect this subcommand. If it has aliases, extract the primary command.
title && $0 ~ subcmd_re {
n_subcmds++
split($0, subcmda, ",")
pn = split(subcmda[1], primary_subcmd)
subcmd = primary_subcmd[pn]
next
}
# Leaving this subcommand title without a description
subcmd && /^#/ {
subcmd_txt = subcmd_txt " " subcmd "\n"
subcmd = ""
next
}
# Subcommand title block with a description
subcmd && ! /^$/ {
subcmd_txt = subcmd_txt twocol(subcmd, $0, col1 - 3, col2, " ", " ") "\n"
subcmd = ""
next
}
# Not seen the right command or have processed it already, so none of the
# below processing should be done. All that remains is collecting
# subcommands.
! cmd_doc_start || cmd_doc_end {
next
}
/^### Synopsis/ {
synopsis = 1
txt = txt "SYNOPSIS\n"
added_txt = 1
next
}
synopsis && /^ [a-z]/ {
sub(/^ /, " ")
syn = $0
txt = txt $0 "\n"
added_txt = 1
}
/^### REST endpoint/ {
junonia_web = 1
txt = txt "REST ENDPOINT\n"
added_txt = 1
next
}
junonia_web && /^ [A-Z]+ *http/ {
sub(/^ /, " ")
txt = txt $0 "\n"
added_txt = 1
}
/^### Description/ {
description = 1
txt = txt "DESCRIPTION"
added_txt = 1
next
}
description && ! /^$/ {
txt = txt "\n" hardwrap($0, wrap - 2, " ") "\n"
added_txt = 1
}
/^### Positional parameters/ {
positional = 1
txt = txt "PARAMETERS\n"
added_txt = 1
next
}
#* `POS_ONE`
positional && /^\* `[-_A-Z0-9]+`/ {
gsub(/`/, "")
param_col1 = $2
positional_no_desc = 1
}
positional && /^[A-Za-z0-9]/ {
txt = txt twocol(param_col1, $0, col1 - 3, col2, " ", " ") "\n"
positional_no_desc = 0
added_txt = 1
}
/^### Options/ {
options = 1
txt = txt "OPTIONS\n"
added_txt = 1
next
}
#* `-option`
#* `-option VAL`
#* `-option VAL1 [-option1 VAL2 ...]`
options && /^\* `[\.-][-_A-Za-z0-9]+/ {
gsub(/^\* |`/, "")
opt_col1 = $0
}
options && /^[A-Za-z0-9]/ {
if(length(opt_col1) > col1 - 3) {
opt_col2 = hardwrap($0, wrap - col1, col1_indent)
txt = txt " " opt_col1 "\n" opt_col2 "\n\n"
added_txt = 1
} else {
txt = txt twocol(opt_col1, $0, col1 - 3, col2, " ", " ") "\n"
added_txt = 1
}
}
/^### Online documentation/ {
online_doc = 1
txt = txt "ONLINE DOCUMENTATION\n"
added_txt = 1
next
}
online_doc && /^http/ {
txt = txt " " $0 "\n"
added_txt = 1
}
END {
if(txt == "NAME\n") {
echoerr("help documentation failed to be generated")
exit 1
}
sub(/\n*$/, "", txt)
print txt
if(n_subcmds > 0) {
print "\n" subcmd_txt
}
}
'
if [ -z "$1" ]; then
echoerr "Command text required to generate help"
return 1
fi
cat | awk -v wrap="$JUNONIA_WRAP" -v col1="$JUNONIA_COL1" \
-v col2="$JUNONIA_COL2" -v cmd="$1" \
"$JUNONIA_AWKS $awk_prog"
}
###
### Meta-commands
###
# All junonia programs support the following commands, which will be injected.
# Docs are created in functions so the titles can be set with the target
# program name.
_junonia_cmds=' help
config
cache
-clear
plugin
list
search
TERM
info
NAME
install
NAME
uninstall
NAME
upgrade
NAME
update
version
-junonia'
_junonia_md () {
cat << EOF
## \`$JUNONIA_NAME\`
Bourne shell scripting framework and program
### Synopsis
$JUNONIA_NAME SUBCOMMAND
### Description
Junonia is a Bourne shell framework for writing scripts. It is also a standalone program that can be used to retrieve and execute other plugins and programs.
EOF
}
_junonia_help_md () {
cat << EOF
## \`$JUNONIA_NAME help\`
Print information about program and subcommand usage
### Synopsis
$JUNONIA_NAME [SUBCOMMAND] help [SUBCOMMAND]
### Description
The help subcommand shows information about program and subcommand usage, including options, positional parameters, and subcommands. It can come before or after the subcommand for which help should be displayed.
EOF
}
_junonia_config_md () {
cat << EOF
## \`$JUNONIA_NAME config\`
Display or edit the \`$JUNONIA_NAME\` config file
### Synopsis
$JUNONIA_NAME config [subcommand] [options]
### Description
Running \`config\` without any options displays the current configuration. To set program options, run \`$JUNONIA_NAME config\` followed by the subcommand, options, and option values as they would be provided during an invocation of that command.
EOF
}
_junonia_cache_md () {
cat << EOF
## \`$JUNONIA_NAME cache\`
Generate or clear meta-information cache
### Synopsis
$JUNONIA_NAME cache [-clear]
### Description
Information that needs to be generated to parse arguments and display help can be cached in the user config directory and used instead.
### Options
* \`-clear\`
Remove the cache directory. Note that a subsequent run of \`cache\` will result in the caching of at least a program argument spec, and perhaps other artifacts as well. They are being cleared, but then regenerated when \`cache\` is rerun.
EOF
}
_junonia_plugin_md () {
cat << EOF
## \`$JUNONIA_NAME plugin\`
Manage $JUNONIA_NAME shell plugins and programs
### Synopsis
$JUNONIA_NAME plugin
### Description
Shell plugins and programs can be stored in the user config directory. Plugins can add subcommands to programs or provide new functions that can be used by existing or new plugins.
EOF
}
_junonia_plugin_list_md () {
cat << EOF
## \`$JUNONIA_NAME plugin list\`
Show installed plugins
### Synopsis
$JUNONIA_NAME plugin list
EOF
}
_junonia_plugin_search_md () {
cat << EOF
## \`$JUNONIA_NAME plugin search\`
Search the plugin registry for matches
### Synopsis
$JUNONIA_NAME plugin search TERM
### Description
The plugin registry contains information about plugins and programs that can be retrieved for installation. The listings include authors, plugin names, tags, and short descriptions. The \`TERM\` supplied is passed to an \`awk\` script for matching, and matches are printed for any match found in the information.
### Positional parameters
* \`TERM\`
Search term to match on author, package name, tags, and descriptions. The term can be a simple word or any regular expression that is valid in AWK.
EOF
}
_junonia_plugin_info_md () {
cat << EOF
## \`$JUNONIA_NAME plugin info\`
Display a plugin or program summary
### Synopsis
$JUNONIA_NAME plugin info NAME
### Description
Info displays the registry information about a package.
### Positional parameters
* \`NAME\`
Exact name of the package in author/name form.
EOF
}
_junonia_plugin_install_md () {
cat << EOF
## \`$JUNONIA_NAME plugin install\`
Retrieve and install plugins and programs
### Synopsis
$JUNONIA_NAME plugin install NAME
### Description
Install searches for, downloads, and extracts plugins and programs to the user plugin directory in the user config directory. If the item being installed is a program, the program script will be symlinked in the user bin directory in the user config directory.
### Positional parameters
* \`NAME\`
Plugin or program name in author/name form
EOF
}
_junonia_plugin_uninstall_md () {
cat << EOF
## \`$JUNONIA_NAME plugin uninstall\`
Remove an installed plugin or program
### Synopsis
$JUNONIA_NAME plugin uninstall NAME
### Description
Uninstall removes the specified plugin or program by deleting the directory. If the item is a program, the symlink in the user bin directory in the user config directory is also deleted.
### Positional parameters
* \`NAME\`
Plugin or program name in author/name form
EOF
}
_junonia_plugin_upgrade_md () {
cat << EOF
## \`$JUNONIA_NAME plugin upgrade\`
Search for and retrieve a new plugin or program version
### Synopsis
$JUNONIA_NAME plugin upgrade NAME
### Description
Upgrade searches the registry for a version newer than the one currently installed. Installation is performed by downloading the newer version, uninstalling the existing version, then installing the downloaded version.
### Positional parameters
* \`NAME\`
Plugin or program name in author/name form
EOF
}
_junonia_plugin_update_md () {
cat << EOF
## \`$JUNONIA_NAME plugin update\`
Update the plugin registry cache
### Synopsis
$JUNONIA_NAME plugin update
EOF
}
_junonia_version_md () {
cat << EOF
## \`$JUNONIA_NAME version\`
Display program version
### Synopsis
$JUNONIA_NAME version [-junonia]
### Options
* \`-junonia\`
Display the junonia library version
EOF
}
_junonia_mds () {
_junonia_help_md
_junonia_config_md
_junonia_cache_md
_junonia_plugin_md
_junonia_plugin_list_md
_junonia_plugin_search_md
_junonia_plugin_info_md
_junonia_plugin_install_md
_junonia_plugin_uninstall_md
_junonia_plugin_upgrade_md
_junonia_plugin_update_md
_junonia_version_md
_junonia_web_mds 2>/dev/null
}
junonia () {
exec "$0" help
}
# Print the junonia commands under a METACOMMANDS heading
_junonia_help_metacmds () {
{
echo "## \`$JUNONIA_NAME\`"
_junonia_mds
} | _junonia_md2help "$cmd" | \
awk '/SUBCOMMANDS/ {
printf "\n\nMETACOMMANDS\n"
p = 1
next
}
p'
}
_junonia_help () {
echodebug "begin help"
cmd="$1"
helpfunc="$(echo "$1" | sed 's/ /_/g; s/-/_/g')"
if [ -f "$JUNONIA_CACHEDIR/help/$helpfunc" ] &&
[ "$JUNONIA_CACHE" != 0 ]; then
cat "$JUNONIA_CACHEDIR/help/$helpfunc"
return 0
fi
case "${JUNONIA_SPEC_TYPE#cached_}" in
dir)
mds="$(for f in "$JUNONIA_MD"/*.md; do cat "$f"; echo; done)"
helptext="$( {
echo "$mds"
_junonia_plugin_docs
} | _junonia_md2help "$cmd")"
if [ "$JUNONIA_NAME" = "$cmd" ]; then
helptext="$helptext $(_junonia_help_metacmds)"
fi
;;
file)
helptext="$( {
cat "$JUNONIA_MD"
_junonia_plugin_docs
} | _junonia_md2help "$cmd")"
if [ "$JUNONIA_NAME" = "$cmd" ]; then
helptext="$helptext $(_junonia_help_metacmds)"
fi
;;
md_string)
if [ "$JUNONIA_NAME" = junonia ]; then
helptext="$( {
echo "$JUNONIA_MD"
_junonia_plugin_docs
} | _junonia_md2help "$cmd")"
else
helptext="$( {
echo "$JUNONIA_MD"
_junonia_plugin_docs
} | _junonia_md2help "$cmd")"
if [ "$JUNONIA_NAME" = "$cmd" ]; then
helptext="$helptext $(_junonia_help_metacmds)"
fi
fi
;;
spec_string)
helptext="$(echo "$JUNONIA_SPEC" | awk '{sub(/^# /, ""); print}')"
;;
esac
echo "$helptext"
junonia_cache_file "help/$helpfunc" "$helptext"
}
_junonia_config () {
echodebug "begin config"
if echo "$*" | grep -Eq '^ *$'; then
if [ -f "$JUNONIA_CONFIG" ]; then
cat "$JUNONIA_CONFIG"
else
echo "no configuration file at $JUNONIA_CONFIG"
fi
else
junonia_update_config "$JUNONIA_CONFIG" "$@"
fi
}
_junonia_cache () {
echodebug "begin cache"
clear=$1
echo "cache directory: $JUNONIA_CACHEDIR"
if [ "$JUNONIA_CACHE" != 0 ]; then
echo "caching is enabled"
else
echo "caching is disabled"
fi
if [ $clear ]; then
if ! cachedir_abspath="$( (cd "$JUNONIA_CACHEDIR" &&
pwd -P) 2>/dev/null)"; then
echoerr "unable to clear cache. could not enter directory:"
echoerr "$JUNONIA_CACHEDIR"
return 1
fi
if [ -n "$cachedir_abspath" ] &&
[ -d "$cachedir_abspath" ] &&
[ "$cachedir_abspath" != / ] &&
[ "$cachedir_abspath" != "$HOME" ]; then
if rm -rf "$cachedir_abspath"; then
echo "cache cleared"
else
echoerr "failed to remove cache directory:"
echoerr "$cachedir_abspath"
return 1
fi
fi
else
if [ -d "$JUNONIA_CACHEDIR" ]; then
# Do this in a subshell so we don't actually change directory.
(
echo "cached files:"
cd "$JUNONIA_CACHEDIR"
find . -type f | cut -c 3-
)
fi
fi
}
_junonia_plugin () {
exec "$0" plugin help
}
_junonia_plugin_list () {
if [ -d "$JUNONIA_CONFIGDIR/plugins" ]; then
cd "$JUNONIA_CONFIGDIR/plugins"
for d in */; do
if ! echo "$d" | grep -q '\.'; then
echo "${d%/}"
else
for siteorg in "${d%/}"/*/; do
for orgplugin in "${siteorg%/}"/*/; do
echo "${orgplugin%/}"
done
done
fi
done
fi
}
_junonia_plugin_search () {
term="$1"
if [ -z "$term" ]; then
echoerr "a search term is required"
fi
if ! reg="$(_junonia_get_registry)"; then
echoerr "registry not available"
return 1
fi
awk_prog='
$0 ~ s { m = 1 }
NR % 3 == 1 { info = $0 }
NR % 3 == 2 { descr = $0 }
NR % 3 == 0 {
if(m) {
print info
print descr
}
m = 0
}
'
echo "$reg" | awk -v s="$term" "$awk_prog"
}
_junonia_plugin_info () {
plugin_name="$1"
if ! reg="$(_junonia_get_registry)"; then
echoerr "registry not available"
return 1
fi
awk_prog='
NR % 3 == 1 {
info = $0
n = split($1, namea, "/")
}
NR % 3 == 2 {
if(namea[2] == name || namea[3] == name ||
namea[2] "/" namea[3] == name) {
print info
print $0
print ""
}
}
'
echo "$reg" | awk -v name="$plugin_name" "$awk_prog"
}
_junonia_plugin_install () {
plugin_name="$1"
if ! command -v git >/dev/null 2>&1; then
echoerr "git is required for installs (currently)"
return 1
fi
if ! reg="$(_junonia_get_registry)"; then
echoerr "registry not available"
return 1
fi
awk_prog='
NR % 3 == 1 {
n = split($1, namea, "/")
if(namea[2] "/" namea[3] == name) {
print $1
exit
}
}
'
repo="$(echo "$reg" | awk -v name="$plugin_name" "$awk_prog")"
plugindir="$JUNONIA_CONFIGDIR/plugins/$repo"
mkdir -p "$plugindir"
cd "$plugindir"
git clone "https://$repo.git" .
echo "installed $plugin_name to:"
echo "$plugindir"
}
_junonia_plugin_uninstall () {
plugin_name="$1"
plugindir="$JUNONIA_CONFIGDIR/plugins/github.com/$name"
if ! echo "$plugin_name" | awk -F '/' 'NF == 2 { exit 0 } { exit 1 }'; then
echoerr "plugin name should be of the form org/repo"
return 1
fi
if echo "$plugindir" | grep -Eq '\.\.'; then
echoerr "plugin path cannot contain relative path entries '..'"
return 1
fi
if [ -d "$plugindir" ]; then
cd "$JUNONIA_CONFIGDIR/plugins/github.com"
rm -rf "$name"
fi
}
_junonia_plugin_upgrade () {
plugin_name="$1"
plugindir="$JUNONIA_CONFIGDIR/plugins/github.com/$name"
if ! echo "$plugin_name" | awk -F '/' 'NF == 2 { exit 0 } { exit 1 }'; then
echoerr "plugin name should be of the form org/repo"
return 1
fi
if [ -d "$plugindir" ]; then
cd "$(dirname "$plugindir")"
git pull
fi
}
_junonia_plugin_update () {
reg_file="$JUNONIA_CACHEDIR/${JUNONIA_NAME}_registry"
if [ -f "$reg_file" ]; then
rm -f "$reg_file"
fi
_junonia_get_registry >/dev/null
}
_junonia_version () {
if command -v ${JUNONIA_NAME}_version >/dev/null 2>&1; then
ver="$JUNONIA_NAME $(${JUNONIA_NAME}_version)"
elif [ "$JUNONIA_NAME" = junonia ]; then
ver="$JUNONIA_VERSION"
else
ver="$JUNONIA_NAME unversioned"
fi
if [ -n "$1" ]; then
ver="$ver (junonia $JUNONIA_VERSION)"
fi
echo "$ver"
}
_junonia_normalize_func () {
echodebug "begin normalize func"
if [ "$1" = _junonia_help ]; then
echo "$1"
return 0
fi
# Check for meta-commands
for f in ${JUNONIA_NAME}_config \
${JUNONIA_NAME}_cache \
${JUNONIA_NAME}_plugin \
${JUNONIA_NAME}_plugin_list \
${JUNONIA_NAME}_plugin_search \
${JUNONIA_NAME}_plugin_info \
${JUNONIA_NAME}_plugin_install \
${JUNONIA_NAME}_plugin_uninstall \
${JUNONIA_NAME}_plugin_upgrade \
${JUNONIA_NAME}_plugin_update \
${JUNONIA_NAME}_version; do
if [ "$1" = "$f" ]; then
echo _junonia${1#$JUNONIA_NAME}
return 0
fi
done
if [ -n "$JUNONIA_INIT" ]; then
for f in $_junonia_web_func_names; do
if [ "$1" = "$f" ]; then
echo junonia${1#$JUNONIA_NAME}
return 0
fi
done
fi
echo "$1"
return 1
}
junonia_cache_file () {
echodebug "begin cache file"
cachepath="$1"
contents="$2"
if [ "$JUNONIA_CACHE" != 0 ]; then
d="$(dirname "$cachepath")"
if [ ! -d "$JUNONIA_CACHEDIR/$d" ]; then
mkdir -p "$JUNONIA_CACHEDIR/$d"
fi
printf '%s' "$contents" > "$JUNONIA_CACHEDIR/$cachepath"
else
echodebug "skipping, caching disabled"
fi
}
_junonia_get_registry () {
reg_file="$JUNONIA_CACHEDIR/${JUNONIA_NAME}_registry"
if [ -f "$reg_file" ] &&
[ "$JUNONIA_CACHE" != 0 ]; then
cat "$reg_file"
else
if ! command -v curl >/dev/null 2>&1; then
echoerr "unable to get plugin registry: curl not found"
return 1
fi
if ! command -v ${JUNONIA_NAME}_registry; then
echoerr "unable to get plugin registry: no registry info available"
return 1
fi
reg="$(curl -s ${JUNONIA_NAME}_registry)"
junonia_cache_file "$reg_file" "$reg"
fi
}
_junonia_plugin_docs () {
for plugin in $(_junonia_plugin_list); do
pname="$(basename "$plugin")"
plugindir="$JUNONIA_CONFIGDIR/plugins/$plugin"
for docdir in "$plugindir/$name/usr/share/doc/$pname" \
"$plugindir/docs" \
"$plugindir/doc" \
"$plugindir"; do
if ls "$docdir"/*.md >/dev/null 2>&1; then
cat "$docdir"/*.md
fi
done
done
}
# Try to find and source a given function name
_junonia_load_func () {
if ! command -v "$1" >/dev/null 2>&1; then
if ! _junonia_func_search "$JUNONIA_PATH" "$1"; then
for plugin in $(_junonia_plugin_list); do
plugindir="$JUNONIA_CONFIGDIR/plugins/$plugin"
_junonia_func_search "$plugindir" "$1"
done
fi
fi
}
_junonia_func_search () {
echodebug "begin func search"
searchpath="$1"
func_name="$2"
i=0
while ! command -v $func_name >/dev/null 2>&1; do
for ext in sh py; do
case $i in
0) p="$searchpath/$func_name";;
1) p="$searchpath/cmd/$func_name";;
2) p="$searchpath/cmds/$func_name";;
3) p="$searchpath/lib/$JUNONIA_NAME/$func_name";;
4) p="$searchpath/lib/$JUNONIA_NAME/cmd/$func_name";;
5) p="$searchpath/lib/$JUNONIA_NAME/cmds/$func_name";;
6) p="$searchpath/lib/$JUNONIA_NAME/command/$func_name";;
7) p="$searchpath/lib/$JUNONIA_NAME/commands/$func_name";;
*) return 1
esac
i=$(( $i + 1 ))
# Note that if the JUNONIA_INTERP_* variable is set manually it will be
# used unconditionally for the interpreter.
echodebug "searching for $p.$ext"
if [ -f "$p.$ext" ]; then
# Found the function file
case "$ext" in
sh)
. "$p"
# Only stop the search if the sourced file actually contained the
# function.
if command -v $func_name >/dev/null 2>&1; then
JUNONIA_INTERP=${JUNONIA_INTERP_SH:=sh}
return 0
fi
;;
py)
for py in python python3 pypy3 pypy; do
if command -v $py >/dev/null 2>&1; then
JUNONIA_INTERP=${JUNONIA_INTERP_PY:-$py}
JUNONIA_FUNCPATH="$p.$ext"
return 0
fi
done
echoerr "no suitable Python interpreter found"
return 1
;;
*)
echoerr "unsupported extension: $ext"
return 1
;;
esac
fi
done
done
echoerr "command not found: $1"
return 1
}
###
### Execution environment setup and management
###
# Configure the output level settings. Providing 0 or no argument disables
# output from echodebug. Providing 1 or any other non-empty value enables
# output from echodebug. This is accomplished by having echodebug output to
# file descriptor 3, and redirecting 3 to either /dev/null (disabled) or 2
# (stderr, enabled).
junonia_setdebug () {
case "$1" in
0|'')
exec 3>/dev/null
;;
1)
exec 3>&2
;;
*)
echoerr "invalid log level: $1"
echoerr "defaulting to 1"
exec 3>&2
;;
esac
}
# Configure the execution environment by setting global variables for names and
# paths. Additionally configure debugging and temporary storage.
junonia_init () {
echodebug "begin init"
# Variables set by junonia_bootstrap:
# JUNONIA_TARGET Absolute path to the script
# JUNONIA_PATH Absolute path to the directory containing the script
# Variables set by junonia_init:
# JUNONIA_NAME Name of script after resolving symlinks and removing .sh
# JUNONIA_CAPNAME Name in all caps
# JUNONIA_CONFIG Path to script rc file
# JUNONIA_CONFIGDIR Path to config directory
# JUNONIA_CACHEDIR Path to cache directory
# JUNONIA_CACHE Flag to optionally disable (0) caching
# JUNONIA_INIT Init guard to prevent attempted re-inits
# JSTX Communication control characters
# JETX
# JFS Information separator control characters
# JGS
# JRS
# JUS
# JUNONIA_WRAP Width of two column output (option help listings)
# JUNONIA_COL1 Width of column one
# JUNONIA_COL2 Width of column two
# TMPDIR Set if unset, always format with ending '/' removed
# This variable is used / checked, but is not set by junonia itself.
# JUNONIA_DEBUG Whether or not to show output on stderr from echodebug (FD3)
if [ -n "$JUNONIA_INIT" ]; then
# init has already been run
return
fi
readonly JUNONIA_VERSION="1.2.5"
# Communication control characters (TC1 - TC4
readonly JSOH="" # Start of heading (SOH / TC1 / dec 1)
readonly JSTX="" # Start of text (STX / TC2 / dec 2)
readonly JETX="" # End of text (ETX / TC3 / dec 3)
readonly JEOT="" # End of transmission (EOT / TC4 / dec 4)
# Information Separator control characters (IS1 - IS4)
readonly JUS="" # Unit Separator (US / IS1 / dec 31)
readonly JRS="" # Record Separator (RS / IS2 / dec 30)
readonly JGS="" # Group Separator (GS / IS3 / dec 29)
readonly JFS="" # File Separator (FS / IS4 / dec 28)
# Use TMPDIR if it is set. If not, set it to /tmp
if [ -z "$TMPDIR" ]; then
TMPDIR=/tmp
fi
# Strip the trailing / from TMPDIR if there is one
export TMPDIR="$(echo "$TMPDIR" | sed 's#/$##')"
# Get the absolute path to command used to start this script. JUNONIA_TARGET
# can be set to a path to avoid the bootstrap process if that path is known
# in advance, or can be set in advance. Otherwise bootstrapping will be
# attempted if the function is defined.
if [ -z "$JUNONIA_TARGET" ]; then
if ! junonia_bootstrap; then
echoerr "failed to bootstrap and init"
return 1
fi
fi
readonly JUNONIA_TARGET
if [ -z "$JUNONIA_PATH" ]; then
# Get the script path, go there, resolve the full path of symlinks with pwd
# /some/path/to/the/actual
# /home/user/code/project/name/bin
JUNONIA_PATH="$(cd "$(dirname "$JUNONIA_TARGET")" && pwd -P)"
# If the script is in a bin directory, then set the path one directory back.
if [ "$(basename "$JUNONIA_PATH")" = bin ]; then
JUNONIA_PATH="$(dirname "$JUNONIA_PATH")"
fi
fi
readonly JUNONIA_PATH
# Get the script name by removing the path and any .sh suffix:
readonly JUNONIA_NAME="$(basename "$JUNONIA_TARGET" .sh)"
readonly JUNONIA_CAPNAME="$(awk -v n="$JUNONIA_NAME" \
'BEGIN{print toupper(n)}')"
# Path to the config dir and file
readonly _JUNONIA_CONFIGDIR_DEF="$HOME/.$JUNONIA_NAME"
readonly _JUNONIA_CONFIG_DEF="$_JUNONIA_CONFIGDIR_DEF/${JUNONIA_NAME}rc"
readonly JUNONIA_CONFIGDIR="${JUNONIA_CONFIGDIR:-"$_JUNONIA_CONFIGDIR_DEF"}"
readonly JUNONIA_CONFIG="${JUNONIA_CONFIG:-"$_JUNONIA_CONFIG_DEF"}"
# Remove spaces and slashes from the program version to make a cache
# directory name.
prog_version="$(_junonia_version | sed 's/^ *//g; s/ *$//g; s/[\/ ]/_/g')"
# If the last character of the versioned directory name is a digit, then
# consider this a release version and enable caching. Otherwise, consider
# this a development version and disable caching.
if prog_version="$(_junonia_version | awk '
{
gsub(/^ *| *$/,"")
gsub(/[\/ ]/, "")
print
exit ($0 !~ /[0-9]$/)
}'
)"; then
# Enable caching unless explicitly disabled and version the cache
# directory. This ensures that things like the spec and help are
# regenerated when upgrading.
JUNONIA_CACHE="${JUNONIA_CACHE:-1}"
cache_subpath="/cache/$prog_version"
else
# Turn off caching for unversioned, in-development programs unless caching
# is explicitly enabled. Still set a cache directory, as the program still
# may want access to an unversioned cache directory.
JUNONIA_CACHE="${JUNONIA_CACHE:-0}"
cache_subpath=/cache
fi
readonly _JUNONIA_CACHEDIR_DEF="$_JUNONIA_CONFIGDIR_DEF$cache_subpath"
readonly JUNONIA_CACHEDIR="${JUNONIA_CACHEDIR:-"$_JUNONIA_CACHEDIR_DEF"}"
# Indicate that init has happened
readonly JUNONIA_INIT=1
# Try to find, source, and init junonia_web
if ! junonia_webinit 2>/dev/null 1>&2; then
for f in $JUNONIA_PATH/junonia_web \
$JUNONIA_PATH/bin/junonia_web \
$JUNONIA_PATH/lib/junonia_web \
$JUNONIA_PATH/lib/$JUNONIA_NAME/junonia_web; do
echodebug "checking $f for junonia_web"
if [ -f "$f" ]; then
echodebug "found $f"
. "$f"
junonia_webinit
break
fi
done
fi
}
###
### Argument parsing
###
# Use junonia_envvars to examine the current environment using env and extract
# the names of variables of interest. Options are to list all variables (a),
# readonly variables (r), or variables that are writable / can be set (2) as
# the first argument. A prefix can also be supplied as the second argument,
# which junonia itself sets to SCRIPT_ mostly typically.
#
# Unfortunately it is IMPOSSIBLE to determine from the output of env what
# actually are variables just by inspection. It's possible to have a multiline
# variable whose contents looks like a variable assignment:
#
# foo="one=two
# three=four"
#
# So the output of env is:
#
# foo=one=two
# three=four
#
# So 'three' looks like a variable but is not one. Therefore eval is used to
# see if each of the potential names are set [ ${var+set} = set ], meaning they
# are a variable of interest with a value, even if that value is empty, before
# adding the name to the list.
#
# Eval is used very carefully by only sending it things from known sources.
# The output of each line of env that is extracted must match the pattern
# given, which is most typically 'SCRIPT_<valid identifier chars>=', and the
# first field split on = is evaluated. Therefore, what is being 'eval'ed is a
# potential variable name.
junonia_envvars () {
case "$1" in
a)
mode_fmt=
;;
r)
mode_fmt='&& ! (unset %s 2>/dev/null)'
;;
w)
mode_fmt='&& (unset %s 2>/dev/null)'
;;
*)
echoerr "could not retrieve env vars. type must be one of:"
echoerr "a - get all environment variables"
echoerr "r - get all readonly environment variables"
echoerr "w - get all writable environment variables"
return 1
;;
esac
for v in $(env | awk -F= -v prefix="$2" \
'$0 ~ "^" prefix "[_A-Za-z0-9]+=" {print $1}'); do
var_mode="$(printf "$mode_fmt" "$v")"
eval 'if [ "${'$v'+set}" = set ] '$var_mode'; then
echo $v
fi'
done
}
# Accept an argument spec and arguments, produce a list of values for each
# positional argument and option in the spec. If no option was specified, an
# empty value is generated, such that every specified option has a value, even
# if that value is empty.
#
# $1 The full Markdown text
# $2 The source of the Markdown text
# $3 The full text of a program argument spec
# $4 - $N The program name and arguments from the command line
_junonia_set_args () {
echodebug "begin set args"
# NOTE THAT THE CONFIG FILE IS *MEANT* TO BE AN RC FILE WHERE YOU CAN SET
# ARGUMENT VARS AND RUN COMMANDS FOR SETUP TYPE THINGS. ARBITRARY COMMANDS
# CAN BE EXECUTED. THIS IS BY DESIGN. THE SECURITY MODEL OF SHELL SCRIPTING
# IS "IT CAN RUN WHATEVER THE CURRENT USER CAN."
# Non-script related variables (foo=bar) will not be available to commands
# because the sourcing occurs in a command substitution subshell. The script
# related values are available only because they are fully resolved and
# returned.
# The configuration file is in a shell format that can be sourced. In order
# to resolve arguments in the expected order (defaults, config file,
# environment variables, command line arguments), the config file cannot be
# directly sourced into this environment, otherwise it will overwrite
# already-set environment variables. This is worked around in the following
# manner.
if [ -f "$JUNONIA_CONFIG" ]; then
# Make a list of script related variables that are set.
set_vars="$(junonia_envvars w ${JUNONIA_CAPNAME}_)"
# Once the list of known variables that are already set is made, execute a
# subshell in a command substitution that outputs the text of some export
# commands to re-set the values of the existing variables.
# Eval is again used very carefully. Only identifiers are in the list that
# is iterated over. When the value is obtained, the resolution of the
# variable v is the variable name, the eval of that gives the *string*
# value of the variable, and then the resulting export command string
# encloses that value in single quotes. In this way, the value provided in
# the configuration file is treated *only as a string*. We're not worried
# so much about security as incorrectly, prematurely evaluating an
# expression. That is, the value should be preserved as-is.
# The resulting list of export commands to be eval'd looks like:
# export SCRIPT_foo='string value of foo from env var'
# export SCRIPT_bar='string value of bar from env var'
evalcmds="$(
for v in $(junonia_envvars w ${JUNONIA_CAPNAME}_); do
eval echo export $v=\\\'\"'$'$v\"\\\'
done
)"
# Source the config file, exporting all of the variables. Existing
# variables may get overwritten. This is where any commands in the config
# file will be executed.
set -a
. "$JUNONIA_CONFIG"
set +a
# Re-set any previously set variables so that environment variables take
# precedence over configuration file values.
eval "$evalcmds"
fi
md="$1"
shift
spec_type="$1"
shift
spec="$1"
shift
# The spec is usually quite long, so suppress the printing here unless needed.
#echodebug "spec:"
#echodebug "$spec"
readonly JUNONIA_ARGS="$*"
# Spaces and newlines need to be ignored when passing the determined values
# back. The output will be separated by Record Separator (control character
# 30).
awk_prog='
function help() {
echodebug("help command, flag, or option: " pos[i-1])
# Build the function name to get help on
func_name = pos[1]
# e.g. cmd subcommand help
for(j=2; j<i-1; j++) {
func_name = func_name " " pos[j]
}
# Check the next arg to see if that should be the func for help
# e.g. cmd subcommand help subcommand2
if(ARGV[i+1] && ARGV[i+1] !~ /^-/) {
func_name = func_name " " ARGV[i+1]
}
print JGS "_junonia_help" JRS func_name
e = 0
exit
}
BEGIN {
# All of the different options for summoning help
help_a["help"] = help_a["-h"] = help_a["-help"] = ""
help_a["--help"] = help_a["/h"] = help_a["/help"] = ""
# Arg 1 is stdin, so skip that and Iterate through the remaining program
# arguments, which will be either positional (including subcommands),
# options, or multi-options.
for(i = 2; i < ARGC; i++) {
opt_kind = substr(ARGV[i], 1, 1)
# Note that a hyphen by itself is often used as a positional parameter
# that means "read from stdin"
if(opt_kind == "-" && length(ARGV[i]) != 1) {
# This is an option
if(seen_opt_group && seen_json_group && opt_kind != last_opt_kind) {
echoerr("options and JSON options must be grouped, not mixed")
e = 1
exit
}
seen_opt_group = 1
# Check for help subcommand
if(ARGV[i] in help_a) {
help()
}
echodebug("")
echodebug("setting option " ARGV[i] " to:")
# How many times this option has been seen
opt_num[ARGV[i]]++
if(opts[ARGV[i]]) {
# Have already seen this arg once, so it gets another, numbered
# entry in the opts array.
opts[ARGV[i] opt_num[ARGV[i]]] = ARGV[i+1]
echodebug(ARGV[i+1] " in opts[" ARGV[i] opt_num[ARGV[i]] "]")
} else {
next_argchar = substr(ARGV[i+1], 1, 1)
next_next_argchar = substr(ARGV[i+2], 1, 1)
if(next_argchar == "-") {
# If the next thing is an option instead of a value, then set the
# value to empty and move on.
opts[ARGV[i]] = ""
delete ARGV[i]
echodebug("empty")
} else {
if(next_argchar == ".") {
if(next_next_argchar != "." &&
next_next_argchar != "-" &&
next_next_argchar != "" ) {
# This looked to be an option value, but now it looks like
# a JSON argument. Set the option value to empty.
opts[ARGV[i]] = ""
delete ARGV[i]
echodebug("empty")
last_opt_kind = opt_kind
continue
} else {
# Store this arg and its value, which is the next value.
opts[ARGV[i]] = ARGV[i+1]
echodebug(ARGV[i+1])
# The value may yet become a JSON flag.
make_jqprog("# " ARGV[i+1] " = true |")
}
} else {
# Store this arg and its value, which is the next value.
opts[ARGV[i]] = ARGV[i+1]
echodebug(ARGV[i+1])
}
# This was an option with a value, so remove both the option and the
# value (the next argument), and then additionally jump i forward to
# the next array index, since that does not shift during this loop.
delete ARGV[i]
delete ARGV[i+1]
i++
}
}
} else {
if(substr(ARGV[i], 1, 1) == ".") {
if(seen_opt_group && seen_json_group && opt_kind != last_opt_kind) {
echoerr("options and JSON options must be grouped, not mixed")
e = 1
exit
}
seen_json_group = 1
# This is a JSON argument
delete_val = make_jqprog(ARGV[i], ARGV[i+1])
# Always delete the argument name. Optionally delete the next item,
# depending on if it was the next option (name was a flag) or an
# argument value.
delete ARGV[i]
if(delete_val) {
delete ARGV[i+1]
i++
}
} else {
# This is a positional argument
for(i in opts) {
echoerr("all positional parameters must precede all options")
e = 1
exit
}
p++
# Check for help subcommand
if(ARGV[i] in help_a) {
help()
}
# Store and remove the argument
pos[i-1] = ARGV[i]
delete ARGV[i]
}
}
last_opt_kind = opt_kind
}
# Track the indent level as the spec is processed and values assigned.
# The indent level is essentially the tree traversal. We go down one
# path, from the root through all of the subcommand nodes. Along the way
# each subcommand can have options, and the final subcommand can have
# positional parameters as well as options. The order of the options and
# positonal parameters in the spec determines the order of the values
# that are output.
indents = ""
# The collected, IFS separated, ordered argument values that will be
# returned.
args = ""
# The function name to execute, constructed from program_subcommand based
# on the given arguments.
func_name = ""
# Check for the config option, to show or edit the rc config file
if(pos[2] == "config") {
for(i=2; i<=p; i++) {
pos[i] = pos[i+1]
}
# Set the variable config to the name of the program being configured.
# This serves as both a flag and as information for later.
config = pos[1]
}
# Both subcommands and positional arguments are stored in the same
# positional array. As each is resolved p is incremented to advance
# through the positional array. Once all subcommands are resolved,
# helping to build the function name, the remaining positional values are
# assigned in order as positional values.
p = 1
}
# Skip lines starting with # and blank lines
/^ #/ || /^$/ {
next
}
{
# Are we looking at the indent level of the spec that we are interested
# in?
indented = $0 ~ "^" indents "[-_A-Za-z0-9.]"
if(! indented) {
match($0, /^ +/)
if(RLENGTH < length(indents)) {
echodebug("")
echodebug("spec de-indented")
exit
}
}
}
# Spec entry starts with a "-", which indicates an option.
indented && substr($1, 1, 1) ~ "[-.]" {
echodebug("")
echodebug("processing spec option: " $0)
split($1, a, "=")
aliases = a[1]
n_aliases = split(aliases, aliasa, ",")
metavar_pos = n_aliases + 1
opt = ""
# Go through all aliases and see if this is an option in the command line.
for(i=1; i<=n_aliases; i++) {
if(aliasa[i] in opts) {
opt = aliasa[i]
}
}
if(config) {
booldef = ""
} else {
booldef = a[2]
}
# Get the junonia_web call arguments
if(a[1] == "-JUNONIA_WEB_METHOD") {
jw_method = substr($0, index($0, "=") + 1, length($0))
echodebug(" jw_method = " jw_method)
getline
jw_content_t = substr($0, index($0, "=") + 1, length($0))
echodebug(" jw_content_t = " jw_content_t)
getline
jw_url = substr($0, index($0, "=") + 1, length($0))
echodebug(" jw_url = " jw_url)
next
}
if(a[1] == "-JUNONIA_OUTPUT") {
j_output = substr($0, index($0, "=") + 1, length($0))
next
}
if(opt) {
# This option from the spec is one we have in the program arguments.
echodebug(opt " was given")
if(config) {
preconfig = opt
sub(/^-*/, "", preconfig)
gsub(/-/, "_", preconfig)
preconfig = envname "_" preconfig "=\""
postconfig = "\""
} else {
preconfig = ""
postconfig = ""
}
if($metavar_pos ~ /\[[A-Za-z0-9]/) {
# The option can be specified multiple times (brackets around metavar
# in the spec), so this option may have received multiple values.
args = args preconfig opts[opt]
delete opts[opt]
for(i=2; i<=opt_num[opt]; i++) {
args = args JUS opts[opt i]
delete opts[opt i]
}
args = args postconfig
echodebug(" setting value: " postconfig)
} else {
if($metavar_pos) {
# Single value option (no brackets around metavar in spec)
if(junonia_web) {
# For a junonia_web call, the option needs to be foo=bar
value = substr(opt, 2, length(opt)) "=" opts[opt]
} else {
# For a non-junonia_web call, only store the value
value = preconfig opts[opt] postconfig
}
args = args value
echodebug("setting value: " value)
} else {
# Flag (no metavar in spec)
if(opts[opt] == "") {
opts[opt] = booldef
} else {
if(substr(opts[opt], 1, 1) == ".") {
# The string next to this option originally looked like an
# option value, but since this option is a flag, this is
# actually a JSON flag. It has been added to the jqprog
# already, but is commented out. Uncomment it. This keeps
# the order of the program statements, which can be important
# if also consuming the arguments in other ways.
sub("# " opts[opt], opts[opt], jqprog)
echodebug("")
opts[opt] = booldef
}
}
value = preconfig mapbool(opts[opt], opt) postconfig
args = args value
echodebug(" setting value: " value)
}
delete opts[opt]
}
} else {
# None of the option aliases were given on the command line. Try to set
# the values from other sources. The preferred option name for
# environment variables and config file entries is the last given
# option.
opt = aliasa[n_aliases]
if(! config) {
optval = ""
envopt = envname "_" substr(opt, 2)
is_json = "." == substr(opt, 1, 1)
gsub(/[-_\.]/, "_", envopt)
sub(/^_/, "", envopt)
if(booldef) {
optval = mapbool(def, opt)
if(is_json && optval) {
optval = opt "= true"
} else {
optval = opt "= false"
}
} else {
if($2 !~ /\[[A-Za-z0-9]/) {
n = index($0, "=")
if(n) {
optval = substr($0, n + 1)
}
}
}
if(ENVIRON[envopt]) {
if($2) {
optval = ENVIRON[envopt]
} else {
optval = mapbool(ENVIRON[envopt], opt)
if(is_json && optval) {
optval = opt " = true"
} else {
optval = opt " = false"
}
}
}
if(is_json) {
if(optval) {
make_jqprog(opt, optval)
} else {
next
}
} else {
if(junonia_web && opt != "-json") {
args = args substr(opt, 2) "=" optval
} else {
args = args optval
}
}
}
}
args = args JRS
next
}
# Spec entry does not start with hyphen and is all uppercase, which
# indicates this is a positional parameter. Assign the current positional
# parameter value and increment to the next positional value.
indented && $0 ~ /^ *[_A-Z0-9]+=*/ {
if(config && pos[p] != "") {
# The config command cannot be used to set positional paramters
echoerr("positional parameters cannot be set via config: " pos[p])
e = 1
exit
}
param_name = $1
n = index($1, "=")
if(n) {
param_name = substr($1, n - 1) JRS
}
echodebug("")
echodebug("processing positional parameter: " param_name)
if(junonia_web) {
args = args param_name "="
}
if(pos[p] != "") {
# A value was supplied for this parameter
args = args pos[p] JRS
echodebug(" setting value: " pos[p])
p++
} else {
n = index($0, "=")
if(n) {
# A value was not supplied, but there is a default
args = args substr($0, n + 1) JRS
echodebug(" setting value: " substr($0, n+1))
} else {
# No value was supplied, so provide an empty value
args = args "" JRS
echodebug(" setting value: (empty)")
}
}
positional_params++
next
}
indented {
n_aliases = split($0, subcmd_aliases, / *, */)
for(i=1; i<=n_aliases; i++) {
sub(/^ */, "", subcmd_aliases[i])
if(subcmd_aliases[i] == pos[p]) {
# Spec entry does not start with hyphen and is not all caps, which
# indicates this is a subcommand. Start or add to the function name
# which will be executed and increment to the next positional value.
if(func_name) {
envname = envname "_" subcmd_aliases[1]
func_name = func_name "_" subcmd_aliases[1]
} else {
envname = toupper(subcmd_aliases[1])
func_name = subcmd_aliases[1]
}
# See if this subcommand is a junonia web request
if(subcmd_aliases[n_aliases] == "junonia_web") {
echodebug("found junonia_web alias")
junonia_web = 1
}
indents = indents " "
echodebug("")
echodebug("envname: " envname)
echodebug("func_name: " func_name)
echodebug("indents: " length(indents))
p++
next
}
}
}
END {
# There was an error or early success and we should just exit.
if(e != "") {
exit e
}
if(junonia_web) {
# Determine how many upper case parameters there are to replace in the url
tmp_url = jw_url
n_opts = gsub(/{[-_\.A-Z0-9]+}/, "", tmp_url)
# If more parameter values were given than can be substituted, error
if(positional_params > n_opts) {
echoerr("too many parameters: " pos[p])
echoerr("URL format given: " jw_url)
exit 1
}
# Add any extra options to the query string
for(opt in opts) {
value = substr(opt, 2, length(opt)) "=" opts[opt]
args = args "&" value
}
} else {
# There are leftover parameters so something was invalid.
if(pos[p]) {
echoerr("unknown parameter: " pos[p])
exit 1
}
# There are leftover options so something was invalid.
if(! junonia_web) {
for(i in opts) {
echoerr("unknown option: " i)
exit 1
}
}
}
# Add the end of the jq program that can produce the JSON value
end_jqprog()
# If the config subcommand was specified, append it to the function name
if(config) {
func_name = config "_config"
}
# If there was an output section add that
# junonia_web always expects to get an output arg, even if empty
if(j_output) {
args = j_output JRS args
} else {
if(junonia_web) {
args = "" JRS args
}
}
# This is a junonia_web call
if(junonia_web) {
args = func_name JRS jw_method JRS jw_content_t JRS jw_url JRS args
func_name = "junonia_web"
}
# Output everything properly separated for processing.
print jqprog JGS func_name JRS args
}'
printf "%s\n" "$spec" | awk -v "JGS=$JGS" \
-v "JRS=$JRS" \
-v "JUS=$JUS" \
-v "JEOT=$JEOT" \
"$JUNONIA_AWKS $awk_prog" - "$JUNONIA_NAME" "$@"
}
###
### User facing run entry functions
###
# Perform a search for defaults and run with them if found.
junonia_run () {
echodebug "begin run"
# This file is the program being run, which is a special case.
if [ "$JUNONIA_NAME" = junonia ]; then
junonia_runmd "$(_junonia_md)" "$@"
return $?
fi
# Look for a filter function
# ${JUNONIA_NAME}_junonia_filter (e.g. myscript_junonia_filter)
if command -v ${JUNONIA_NAME}_junonia_filter >/dev/null 2>&1; then
filter_func=${JUNONIA_NAME}_junonia_filter
else
filter_func=
fi
# Look in some particular paths for program markdown documentation.
for docdir in "$JUNONIA_PATH/usr/share/doc/$JUNONIA_NAME" \
"$JUNONIA_PATH/docs" \
"$JUNONIA_PATH/doc"; do
if [ -d "$docdir" ]; then
if [ -f "$docdir/$JUNONIA_NAME.md" ]; then
JUNONIA_DOCDIR="$docdir"
else
echodebug "top doc file not present: $docdir/$JUNONIA_NAME.md"
fi
else
echodebug "docdir not present: $docdir"
fi
done
# A directory containing markdown docs was found. Run with it.
if [ -n "$JUNONIA_DOCDIR" ]; then
junonia_runmd_filtered "$filter_func" "$JUNONIA_DOCDIR" "$@"
return $?
fi
# There is a markdown file in the same dir as the script named `script.md`.
# Run with it.
if [ -f "$JUNONIA_PATH/$JUNONIA_NAME.md" ]; then
junonia_runmd_filtered "$filter_func" "$JUNONIA_PATH/$JUNONIA_NAME.md" "$@"
return $?
fi
# There is a shell function that can provide a markdown doc named
# script_junonia_md
# so run with it.
if command -v ${JUNONIA_NAME}_junonia_md >/dev/null 2>&1; then
md="$(${JUNONIA_NAME}_junonia_md)"
if [ -n "$md" ]; then
junonia_runspec_filtered "$filter_func" "$md" "$@"
return $?
else
echoerr "markdown function content was empty"
return 1
fi
fi
# There is a shell function that can provide a spec named
# script_junonia_spec
# so run with it.
if command -v ${JUNONIA_NAME}_junonia_spec >/dev/null 2>&1; then
spec="$(${JUNONIA_NAME}_junonia_spec)"
if [ -n "$spec" ]; then
junonia_runspec_filtered "$filter_func" "$spec" "$@"
return $?
else
echoerr "program argument spec function returned empty"
return 1
fi
else
# If no spec has been found, make the spec just the script name.
# This allows for script to just source junonia and run a single
# top level command without any additional configuration.
echodebug "setting spec to script name: $JUNONIA_NAME"
spec="$JUNONIA_NAME"
junonia_runspec_filtered "$filter_func" "$spec" "$@"
return $?
fi
echoerr "unable to locate program specification required to run"
return 1
}
# Take a docs dir of md files, one md file, or md contents as a string, make
# the spec, run the function with the parsed arg values.
junonia_runmd () {
echodebug "begin runmd"
junonia_runmd_filtered "" "$@"
}
# Take a docs dir of md files, one md file, or md contents as a string, make
# the spec, put the results through the filter function, then run the function
# with the parsed arg values (which may have been changed by the filter
# function).
junonia_runmd_filtered () {
echodebug "begin runmd filtered"
filter_func="$1"
shift
md="$1"
shift
spec_cache="$JUNONIA_CACHEDIR/spec"
if [ -f "$spec_cache" ] &&
[ "$JUNONIA_CACHE" != 0 ]; then
spec="$(cat "$spec_cache")"
cached=cached_
ret=$?
else
cached=
spec=
fi
ret=1
spec_type=
if [ -d "$md" ]; then
readonly JUNONIA_DOCDIR="$md"
spec="${spec:-"$(_junonia_plugin_docs | _junonia_md2spec "$md"/*.md -)"}"
ret=$?
spec_type="${cached}dir"
elif [ -f "$md" ]; then
spec="${spec:-"$(_junonia_plugin_docs | _junonia_md2spec "$md" -)"}"
ret=$?
spec_type="${cached}file"
elif [ "$(echo "$md" | wc -l)" -gt 1 ]; then
spec="${spec:-"$( ( echo "$md"; _junonia_plugin_docs ) | _junonia_md2spec "" -)"}"
ret=$?
spec_type="${cached}md_string"
fi
if [ -z "$spec" ] || [ "$ret" -ne 0 ]; then
echoerr "Unable to generate spec from source provided: $md"
echoerr "Source should be a directory of Markdown, a Markdown file,"
echoerr "or a shell string variable containing the Markdown contents."
return 1
fi
_junonia_run_final "$filter_func" "$md" "$spec_type" "$spec" "$@"
}
# Take a spec string, run the function with the parsed args values.
junonia_runspec () {
echodebug "begin runspec"
junonia_runspec_filtered "" "$@"
}
# Take a spec string, put the results through the filter function, then run the
# function with the parsed arg values (which may have been changed by the
# filter function).
junonia_runspec_filtered () {
echodebug "begin runspec filtered"
filter_func="$1"
shift
_junonia_run_final "$filter_func" "" "spec_string" "$@"
}
###
### Run execution
###
get_jqprog () {
echo "$1"
}
get_jqargs () {
echo "$JW_JQPROG" | sed '/^#/d;$d' | cut -d ' ' -f 1
}
get_args () {
echo "$2"
}
_junonia_run_final () {
echodebug "begin run final"
filter_func="$1"
shift
md="$1"
shift
spec_type="$1"
shift
spec="$1"
shift
if [ "${spec_type%_*}" != cached ]; then
# Insert the standard meta parameters.
spec="$(
echo "$spec" | awk '
BEGIN {
s = ARGV[1]
delete ARGV[1]
}
/^ *#/ {
comments = comments $0 "\n"
}
/^[a-z]/ {
insert_meta = 1
if(comments) print substr(comments, 1, length(comments) - 1 )
comments = ""
print
next
}
insert_meta && /^ [a-z]/ {
print s
if(comments) print substr(comments, 1, length(comments) - 1 )
comments = ""
print
insert_meta = 0
next
}
! /^ *#/ {
if(comments) print substr(comments, 1, length(comments) - 1 )
comments = ""
print
}
END {
if(insert_meta) {
print s
}
}
' "$_junonia_cmds" -
)"
# The spec is usually quite long, so suppress the printing here unless needed.
#echodebug "final spec"
#echodebug_raw "$spec"
junonia_cache_file spec "$spec"
fi
# Retrieve the argument values in the order defined in the spec.
if ! arg_vals="$(_junonia_set_args "$md" "$spec_type" "$spec" \
"$@")"; then
# An error should have been supplied on stderr
return 1
fi
echodebug "all argument values returned to set"
echodebug "$arg_vals"
# The results of argument parsing always comes back as:
# jqprog<GS>func_name<RS>arg1<RS><arg2>...
# To separate the jqprog from the rest, it is easiest to make two functions
# that send back $1 and $2, with IFS set to GS.
IFS=$JGS
readonly JW_JQPROG="$(get_jqprog $arg_vals)"
echodebug "JQ_PROG"
echodebug_raw "$JW_JQPROG"
readonly JW_JQARGS="$(get_jqargs)"
args="$(get_args $arg_vals)"
# Since we are handling values that can be explicitly blank / empty, and
# values that have whitespace that might need to be preserved, change the IFS
# to RS, which is what separates the function name and arguments from each
# other.
IFS=$JRS
# Pass the execution info to a filter function. This allows us to handle the
# argument values as $@, and use shift to remove common options as specified
# by the filter function. Using a user filter function is optional, and in
# that case every function will receive every option; all common options in
# the spec tree path.
_junonia_exec "$filter_func" "$md" "$spec_type" "$spec" $args
}
# Receive function argument values, send them through the filter if needed,
# then execute the specified function with the values.
_junonia_exec () {
echodebug "begin exec"
echodebug "exec args:"
echodebug_raw "$@"
# Each value from the parsed args are now their own word, so the IFS can go
# back to normal.
unset IFS
filter_func="$1"
shift
md="$1"
shift
spec_type="$1"
shift
spec="$1"
shift
func="$1"
shift
if [ "$func" = "junonia_web" ]; then
# Need to pop these off and put them back so the filter function,
# if any, can correctly operate on the right options.
jw_func="$1"
shift
jw_method="$1"
shift
jw_content_t="$1"
shift
jw_url="$1"
shift
jw_output="$1"
shift
echodebug "jw_func: $jw_func"
echodebug "jw_method: $jw_method"
echodebug "jw_content_t: $jw_content_t"
echodebug "jw_url: $jw_url"
echodebug "jw_output: $jw_output"
fi
if [ -z "$func" ]; then
echoerr "no operation given to perform"
return 1
fi
shift_n=0
# Meta-commands should not be put through filters.
if func="$(_junonia_normalize_func "$func")"; then
# If the user filter is getting skipped then a meta parameter is being run.
# So the script's top level positional parameters and options need to be
# filtered out. Does not apply to config and help.
JUNONIA_INTERP=sh
if [ "$func" = _junonia_help ] || [ "$func" = _junonia_config ]; then
shift_n=0
else
awk_prog='BEGIN { shift = 0 }
/^ [-[A-Z]/ { shift++ }
END { print shift }'
shift_n="$(echo "$spec" | awk "$awk_prog")"
fi
else
# If there is a filter function and it is not getting skipped, then run it.
if [ -n "$filter_func" ] && command -v "$filter_func" >/dev/null 2>&1; then
$filter_func "$@"
shift_n=$?
# 64 is standard EX_USAGE, command used incorrectly. This is what filter
# functions should return to indicate a problem.
if [ $shift_n -eq 64 ]; then
# A program level error should be printed for this problem.
return 1
fi
fi
fi
# The filter function might indicate via its return value, or the meta filter
# by looking at the spec, that we should shift off some common (and possibly
# other) values.
echodebug "$filter_func shifting $shift_n arguments"
echodebug "args before shifting:"
echodebug_raw "$@"
i=0
n_args=$#
while [ $i -lt $n_args ] && [ $i -lt $shift_n ]; do
shift
i=$(( $i + 1 ))
done
echodebug "args after shifting:"
echodebug_raw "$@"
if [ "$func" = "$JUNONIA_NAME" ]; then
func_location="$(command -v "$func")"
if [ "$(echo "$func_location" | cut -c 1)" = / ]; then
# Shell command named the same as the top level script, so return 0. This
# assumes it is this script, which is a safe assumption because this
# script will just continue executing.
return 0
fi
if [ -z "$func_location" ]; then
# Command named the same as the top level script is not found. Return 0
# so that the top level script can continue.
return 0
fi
# If the func location resolves like a shell function, then fall through so
# it gets called.
fi
# At this point, all of the following are set and fixed.
readonly JUNONIA_CMD="$(echo "$func" | sed 's/_/ /g')"
readonly JUNONIA_FUNC="$(echo "$func" | sed 's/-/_/g')"
readonly JUNONIA_MD="$md"
readonly JUNONIA_SPEC_TYPE="$spec_type"
readonly JUNONIA_SPEC="$spec"
echodebug "JUNONIA_CMD : $JUNONIA_CMD"
echodebug "JUNONIA_FUNC : $JUNONIA_FUNC"
echodebug "JUNONIA_SPEC_TYPE: $JUNONIA_SPEC_TYPE"
echodebug "args:"
echodebug_raw "$@"
# If a jq program was generated we assume that that feature is desired.
if [ -n "$JW_JQPROG" ]; then
echodebug "generated jq program:"
echodebug_raw "$JW_JQPROG"
if junonia_require_cmds jq; then
if ! JW_JSON="$(jq -nr "$JW_JQPROG")"; then
echoerr "unable to generate JSON value from supplied '.' options"
echoerr "generated program was:"
echoerr_raw "$JW_JQPROG"
else
echodebug "resulting JSON:"
echodebug_raw "$JW_JSON"
readonly JW_JSON
fi
else
echoerr "arguments prefixed with '.' are used for JSON generation"
return 1
fi
else
JW_JSON=
fi
# If the command hasn't already been sourced or defined somewhere, try to
# discover it by checking for files corresponding to the function name.
if ! _junonia_load_func "$JUNONIA_FUNC"; then
return 1
fi
# A function file was found and sourced, and the function was found. Execute
# the function.
if [ "$func" = "junonia_web" ]; then
$JUNONIA_FUNC "$jw_func" "$jw_method" "$jw_content_t" "$jw_url" "$jw_output" "$@"
else
echodebug "JUNONIA_INTERP: $JUNONIA_INTERP"
case "$JUNONIA_INTERP" in
sh)
$JUNONIA_FUNC "$@"
;;
python|python3|pypy3|pypy)
echodebug "executing python command"
PYTHONPATH="$JUNONIA_PATH/lib/python" "$JUNONIA_INTERP" "$JUNONIA_FUNCPATH" "$@"
;;
*)
if command -v "$JUNONIA_INTERP"; then
echodebug "attempting execution with $JUNONIA_INTERP"
"$JUNONIA_INTERP" "$JUNONIA_FUNCPATH" "$@"
else
echoerr "Unable to execute $func with $JUNONIA_INTERP"
return 1
fi
;;
esac
fi
}
# Configure if debug messages will be printed.
junonia_setdebug "$JUNONIA_DEBUG"
# Set up the execution environment. Init is always safe to rerun as it has a
# guard. If junonia is copied into the bottom of a scirpt for single-file
# distribution, for example, junonia_init will need to be run at the top. When
# this one runs it will just return. Therefore, for single-file distributed
# scripts, use an explicit exit statement before the junonia code to return
# anything other than 0.
junonia_init
# If the program we are running is called 'junonia', then the base program /
# this library is being run with just the meta-commands. Yay! Time to run and
# probably go get some plugins!
if [ "$JUNONIA_NAME" = "junonia" ]; then
junonia_run "$@"
fi
| true |
2f6188d2edac3ee0126de3ea8b95b26a6a475036 | Shell | bil0u/.dotfiles | /home/.chezmoitemplates/darwin/load-brew.sh | UTF-8 | 183 | 2.65625 | 3 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | if ! command -v brew &>/dev/null; then
{{- if eq .host.arch "arm64" }}
eval "$(/opt/homebrew/bin/brew shellenv)"
{{- else }}
eval "$(/usr/local/bin/brew shellenv)"
{{- end }}
fi
| true |
dd17b54162c88059c0e03aaf48cfadb08ad6aa3d | Shell | glo-fi/DetGen-1 | /GUI_files/GUI_file.sh | UTF-8 | 515 | 3.125 | 3 | [
"MIT"
] | permissive | #!/bin/bash
Scenarios=("Ping" "HTTP-server-client" "FPT-client-server" "File-synchronisation" "SSH-client-server" "IRC" "BitTorrent")
Params=$(
zenity --forms --title="File length" --add-list="Specification" --list-values="Randomise|Manual" --add-entry="File length" --add-entry="Filename length" --add-entry="Directory length"
)
if [[ $Params == *"Randomise"* ]]; then
FileRandomisation="1"
fi
IFS='|'
read -a strarr <<< "$Params"
FLength="${strarr[1]}"
FNLength="${strarr[2]}"
DLength="${strarr[3]}"
| true |
5d0c1702dbc95860d5fb8d62eda2290ffebbcd09 | Shell | yuanyiyixi/shell-sh | /test_while.sh | UTF-8 | 118 | 2.828125 | 3 | [] | no_license | #! /bin/sh
echo "Enter password:"
read TRY
while [ "$TRY" != "secret" ]; do
echo "Sorry, try again"
read TRY
done
| true |
dc1d49cb22cc7a0b4886b515fc58230b84992583 | Shell | kroglice/o2ac-ur | /docker/o2ac-dev/scripts/run-command-repeatedly.sh | UTF-8 | 1,374 | 4.15625 | 4 | [
"MIT"
] | permissive | #!/bin/bash
################################################################################
RUN_ON_STARTUP="false"
case $1 in
--run-immediately ) RUN_ON_STARTUP="true"
shift # Moves remaining positional parameters down by one
esac
COMMAND=$1
if [ -z "${COMMAND}" ]; then
echo "No command is specified."
echo "Usage: $0 COMMAND"
exit 1
fi
PROMPT_START=$'\e[4m\e[1m'
PROMPT_END=$'\e[0m'
KEY_START=$'\e[7m'
KEY_END=$'\e[0m'${PROMPT_START}
PROMPT="${PROMPT_START}Run '${COMMAND}'? Press:"$'\n'"'${KEY_START}r${KEY_END}' to run, "$'\n'"'${KEY_START}c${KEY_END}' to enter a child shell,"$'\n'"'${KEY_START}q${KEY_END}' to quit.${PROMPT_END}"$'\n'
if [ "${RUN_ON_STARTUP}" = "true" ]; then
echo "Executing the command '${COMMAND}' once without waiting for confirmation"
${COMMAND};
echo "" # Display an empty line.
fi
while true; do
read -n 1 -s -p "${PROMPT}" input;
if [ "${input}" = "r" ]; then
echo "";
elif [ "${input}" = "q" ]; then
break;
elif [ "${input}" = "c" ]; then
cat <<EOF
Starting a new shell process.
You will return to the above prompt when you exit from this shell.
EOF
bash -i
continue;
else
continue;
fi;
echo "ROS_MASTER_URI: ${ROS_MASTER_URI}";
${COMMAND};
echo "" # Display an empty line.
done
cat <<EOF
Starting a new shell process.
EOF
exec bash -i
| true |
054e66e679994d2a4035fab43c1cda8e8ffc2ec9 | Shell | maradwan/Python-Bash-Scripts | /bash/100.sh | UTF-8 | 712 | 2.515625 | 3 | [] | no_license | #!/usr/bin/expect -f
set timeout 60
set HOST [lindex $argv 0 ]
set OLDENABLE [lindex $argv 1]
set OLDROOT [lindex $argv 2]
set NEWENABLE [lindex $argv 3]
set NEWROOT [lindex $argv 4]
spawn ssh admin@$HOST
sleep 2
expect "yes/no" {
send "yes\r"
expect "*?assword" { send "$OLDROOT\r" }
} "*?password" { send "$OLDROOT\r" }
expect ">" { send "enable\r" }
expect "Password:" { send "$OLDENABLE\r" }
expect "#" { send "config terminal\r" }
sleep 2
expect "#" { send "system user admin $NEWROOT\r" }
sleep 2
expect "#" { send "enable secret $NEWENABLE\r" }
sleep 2
expect "#" { send "end\r" }
sleep 2
expect "#" { send "write\r" }
sleep 2
expect "#" { send "end\r" }
expect ">" { send "exit\r" }
interact
| true |
a21e9bdee6bb21d79df17478b1c2958b10320be4 | Shell | ianwremmel/dotfiles | /plugins/vscode/vscode | UTF-8 | 515 | 3.421875 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
export DOTFILES_VSCODE_DEPS=('homebrew')
dotfiles_vscode_apply () {
local code_helper_path
code_helper_path='/Applications/Visual Studio Code.app/Contents/Resources/app/bin/code'
if [ ! -f "$code_helper_path" ]; then
debug "VSCode does not appear to be installed"
return 0
fi
if command -v code > /dev/null 2>&1; then
debug "VSCode CLI helper already appears to be installed"
return 0
fi
ln -s "$code_helper_path" "$(brew --prefix)/bin/code" >/dev/null 2>&1
}
| true |
8f0967dd90ff51e24aa09ac6037f1c0431ddf6a8 | Shell | jonsk/xglibs | /dev-lang/perl/perl-5.12.0.xgb | UTF-8 | 1,824 | 3.6875 | 4 | [] | no_license | #!/bin/bash
#
# Xiange Linux build scripts
# Short one-line description of this package.
DESCRIPTION="Larry Wall's Practical Extraction and Report Language"
# Homepage, not used by Portage directly but handy for developer reference
HOMEPAGE="http://www.perl.org/"
# Point to any required sources; these will be automatically downloaded by
# gpkg.
# $N = package name, such as autoconf, x-org
# $V = package version, such as 2.6.10
SRC_URI="http://www.cpan.org/src/$N-$V$R.tar.bz2"
# Binary package URI.
BIN_URI=""
# Runtime Depend
RDEPEND=""
# Build time depend
DEPEND="${RDEPEND}"
#unpack
xgb_unpack()
{
#unpard file from $XGPATH_SOURCE to current directory.
echo "Unpacking ..."
tar xf $XGPATH_SOURCE/$N-$V$R.tar.bz2
}
#config
xgb_config()
{
#fist, cd build directory
cd $N-$V$R
#This version of Perl now builds the Compress::Raw::Zlib module.
#By default Perl will use an internal copy of the Zlib source
#for the build. Issue the following command so that Perl
#will use the Zlib library installed on the system:
sed -i -e "s|BUILD_ZLIB\s*= True|BUILD_ZLIB = False|" \
-e "s|INCLUDE\s*= ./zlib-src|INCLUDE = /usr/include|" \
-e "s|LIB\s*= ./zlib-src|LIB = /usr/lib|" \
cpan/Compress-Raw-Zlib/config.in
#second, add package specified config params to XGB_CONFIG
XGB_CONFIG="-des -Dprefix=/usr \
-Dvendorprefix=/usr \
-Dman1dir=/usr/share/man/man1 \
-Dman3dir=/usr/share/man/man3 "
#Third, call configure with $XGB_CONFIG
sh Configure $XGB_CONFIG -Dpager="/usr/bin/less -isR"
err_check "config $N failed."
XGB_CONFIG+=" -Dpager=\"/usr/bin/less -isR\""
}
#build
xgb_build()
{
#run make in current directory
make
}
#install
xgb_install()
{
#install everything to $XGPATH_DEST
make DESTDIR=$XGPATH_DEST install
}
| true |
9461b4ab20b805272ac4a81788906d22c401b45c | Shell | rheehot/openSUSE | /packages/s/systemd/scripts-systemd-migrate-sysconfig-i18n.sh | UTF-8 | 4,240 | 3.34375 | 3 | [] | no_license | #! /bin/bash
# /etc/sysconfig/console | /etc/vconsole.conf
# -------------------------+---------------------
# CONSOLE_FONT | FONT
# CONSOLE_SCREENMAP | FONT_MAP
# CONSOLE_UNICODEMAP | FONT_UNIMAP
migrate_locale () {
local migrated=""
if ! test -f /etc/sysconfig/console; then
return
fi
source /etc/sysconfig/console || return
if test -f /etc/vconsole.conf; then
source /etc/vconsole.conf || return
fi
if test -n "$CONSOLE_FONT" && test -z "$FONT"; then
echo "FONT=$CONSOLE_FONT" >>/etc/vconsole.conf
migrated+="CONSOLE_FONT "
fi
if test -n "$CONSOLE_SCREENMAP" && test -z "$FONT_MAP"; then
echo "FONT_MAP=$CONSOLE_SCREENMAP" >>/etc/vconsole.conf
migrated+="CONSOLE_SCREENMAP "
fi
if test -n "$CONSOLE_UNICODEMAP" && test -z "$FONT_UNIMAP"; then
echo "FONT_UNIMAP=$CONSOLE_UNICODEMAP" >>/etc/vconsole.conf
migrated+="CONSOLE_UNICODEMAP "
fi
if test -n "$migrated"; then
echo >&2 "The following variables from /etc/sysconfig/console have been migrated"
echo >&2 "into /etc/vconsole.conf:"
echo >&2
for v in $migrated; do echo " - $v=${!v}"; done
echo >&2
echo >&2 "Please edit /etc/vconsole.conf if you need to tune these settings"
echo >&2 "as /etc/sysconfig/console won't be considered anymore."
echo >&2
fi
}
# /etc/sysconfig/keyboard | /etc/vconsole.conf
# -------------------------+---------------------
# KEYTABLE | KEYMAP
migrate_keyboard () {
local migrated=""
if ! test -f /etc/sysconfig/keyboard; then
return
fi
source /etc/sysconfig/keyboard || return
if test -f /etc/vconsole.conf; then
source /etc/vconsole.conf || return
fi
if test -n "$KEYTABLE" && test -z "$KEYMAP"; then
echo "KEYMAP=$KEYTABLE" >>/etc/vconsole.conf
migrated+="KEYTABLE "
fi
if test -n "$migrated"; then
echo >&2 "The following variables from /etc/sysconfig/keyboard have been migrated"
echo >&2 "into /etc/vconsole.conf:"
echo >&2
for v in $migrated; do echo " - $v=${!v}"; done
echo >&2
echo >&2 "Please use localectl(1) if you need to tune these settings since"
echo >&2 "/etc/sysconfig/keyboard won't be considered anymore."
echo >&2
fi
}
# According to
# https://www.suse.com/documentation/sles-12/book_sle_admin/data/sec_suse_l10n.html,
# variables in /etc/sysconfig/language are supposed to be passed to
# the users' shell *only*. However it seems that there has been some
# confusion and they ended up configuring the system-wide locale as
# well. The logic followed by systemd was implemented in commit
# 01c4b6f4f0d951d17f6873f68156ecd7763429c6, which was reverted. The
# code below follows the same logic to migrate content of
# /etc/sysconfig/language into locale.conf.
migrate_language () {
local lang=
local migrated=false
if ! test -f /etc/sysconfig/language; then
return
fi
source /etc/sysconfig/language || return
lang=$(grep ^LANG= /etc/locale.conf 2>/dev/null)
lang=${lang#LANG=}
case "$ROOT_USES_LANG" in
yes)
if test -z "$lang" && test -n "$RC_LANG"; then
echo "LANG=$RC_LANG" >>/etc/locale.conf
migrated=true
fi
;;
ctype)
if ! grep -q ^LC_CTYPE= /etc/locale.conf 2>/dev/null; then
: ${lc_ctype:="$lang"}
: ${lc_ctype:="$RC_LC_CTYPE"}
: ${lc_ctype:="$RC_LANG"}
if test -n "$lc_ctype"; then
echo "LC_CTYPE=$lc_ctype" >>/etc/locale.conf
migrated=true
fi
fi
;;
esac
if $migrated; then
echo >&2 "The content of /etc/sysconfig/language has been migrated into"
echo >&2 "/etc/locale.conf. The former file is now only used for setting"
echo >&2 "the locale used by user's shells. The system-wide locale is"
echo >&2 "only read from /etc/locale.conf since now."
echo >&2
echo >&2 "Please only use localectl(1) or YaST if you need to change the"
echo >&2 "settings of the *system-wide* locale from now."
fi
}
# The marker could have been incorrectly put in /usr/lib. In this case
# move it to its new place.
mv /usr/lib/systemd/scripts/.migrate-sysconfig-i18n.sh~done \
/var/lib/systemd/i18n-migrated &>/dev/null
if ! test -e /var/lib/systemd/i18n-migrated; then
declare -i rv=0
migrate_locale; rv+=$?
migrate_keyboard; rv+=$?
migrate_language; rv+=$?
test $rv -eq 0 && touch /var/lib/systemd/i18n-migrated
fi
| true |
eca15331f74b7079cc5a00910408f22d30548d21 | Shell | tatsuya4649/awst | /get | UTF-8 | 845 | 3.90625 | 4 | [] | no_license | #!/bin/bash
# get_tag_instance
source ./utils/gets
source ./utils/get
function get(){
function usage_exit(){
echo "Usage: $0 [-t Tag] [-s State] [-q]"
echo -e "\t-t: Search Tag(\"Key=key,Values=value\")"
echo -e "\t-s: Search State(default \"running\")"
echo -e "\t-q: Output Quiet"
exit 1
}
local OPT OPTIND OPTARG QUIET
while getopts ":t:s:hq" OPT; do
case $OPT in
t)
TAGS=$OPTARG
;;
s)
STATE=$OPTARG
;;
q)
QUIET=" >/dev/null 2>&1"
;;
h) usage_exit
;;
\?) usage_exit
;;
esac
done
# Default Search State => "running"
STATE=${STATE:-"running"}
if [ -z "$TAGS" ]; then
get_tag
fi
# first argument: TAGS
# second argument: STATE
eval "echo \"Search State => $STATE\" $QUIET"
eval "echo \"Search Tag => $TAGS\" $QUIET"
get_tag_instances $TAGS $STATE $QUIET
}
get $@
| true |
39580f3a99e63b4d8ca593711a28a368b1224476 | Shell | fbcotter/dotfiles | /vim/copy_files.sh | UTF-8 | 1,909 | 4.03125 | 4 | [
"MIT"
] | permissive | #!/bin/bash
MYDIR=$( cd $(dirname $0) ; pwd -P )
BASHDIR=$HOME
# function makesymlink
# Move the old file to a saved location (only if it wasn't
# a symlink). It is likely that this symlink will be pointing
# to where we want it to, but it is trivial to just remove it,
# add it again, and be sure about it.
function makesymlink {
if [ -h $2 ]; then
rm $2
elif [ -f $2 ]; then
echo "$2 exists. Moving it to $2.old"
mv $2 $2.old
fi
ln -s $1 $2
}
# Copy vimrc file
for file in .{vimrc,gvimrc}
do
[ -r "$MYDIR/$file" ] && [ -f "$MYDIR/$file" ] && \
echo "Symlinking $file" && \
makesymlink $MYDIR/$file $HOME/$file;
done;
unset file;
mkdir -p ~/.config/nvim
makesymlink $MYDIR/init.vim $HOME/.config/nvim/init.vim
# Download vim-plug for neovim
if [ ! -d $HOME/.config/nvim/plugged ]; then
echo "Downloading vim-plug for neovim"
curl -fLo ~/.local/share/nvim/site/autoload/plug.vim --create-dirs \
https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim
fi
# Download vundle for vim
if [ ! -d "$HOME/.vim/bundle/Vundle.vim" ]; then
echo "Downloading Vundle for Vim"
mkdir -p ~/.vim/bundle/Vundle.vim
git clone https://github.com/VundleVim/Vundle.vim.git ~/.vim/bundle/Vundle.vim
fi
# Copy ctags.vim file
makesymlink $MYDIR/cscope.vim $HOME/.vim/cscope.vim
# Copy ftplugin files
mkdir -p ~/.vim/ftplugin
for ftplugin in {$HOME/.vim/ftplugin,$HOME/.config/nvim/after/ftplugin}
do
mkdir -p $ftplugin;
makesymlink $MYDIR/ftplugin/python.vim $ftplugin/python.vim;
makesymlink $MYDIR/ftplugin/tex.vim $ftplugin/tex.vim;
makesymlink $MYDIR/ftplugin/cpp.vim $ftplugin/cpp.vim;
makesymlink $MYDIR/ftplugin/markdown.vim $ftplugin/markdown.vim;
makesymlink $MYDIR/ftplugin/rst.vim $ftplugin/rst.vim;
makesymlink $MYDIR/ftplugin/json.vim $ftplugin/json.vim;
done;
unset ftplugin;
| true |
2bb938c8c1f54a4f6d881d7059bf55281302d3dd | Shell | yiweig/the-playbook | /scripts/write_image.sh | UTF-8 | 694 | 3.21875 | 3 | [] | no_license | #!/bin/bash
DEVICE_NAME="PIXEL"
IMAGE_FILE_PATH=/Users/cmoore/Desktop/ubuntu-minimal-16.04-server-armhf-raspberry-pi.img
DEVICE=/dev/disk2
FILESIZE_IN_BYTES=$(stat -c%s $IMAGE_FILE_PATH)
FILESIZE_IN_MB=$(( ${FILESIZE_IN_BYTES%% *} / 1024 / 1024))
# echo $FILESIZE_IN_MB
# echo "diskutil eraseDisk FAT32 ${DEVICE_NAME} MBRFormat ${DEVICE}"
diskutil eraseDisk FAT32 "${DEVICE_NAME}" MBRFormat "${DEVICE}"
# echo "diskutil umount /dev/disk2"
diskutil umount "${DEVICE}"
# echo "diskutil umount /dev/disk2s1"
diskutil umount "${DEVICE}s1"
# echo "dd if=${IMAGE_FILE_PATH} | pv -s ${FILESIZE_IN_MB}M | dd bs=4M of=${DEVICE}"
sudo dd bs=4M if="${IMAGE_FILE_PATH}" of="${DEVICE}" status=progress
| true |
db4a939b02d9ce38ec1fa518206e9cf2dae129ec | Shell | zobertke/bashmatic | /lib/brew.sh | UTF-8 | 5,542 | 3.53125 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
#———————————————————————————————————————————————————————————————————————————————
# © 2016 — 2017 Author: Konstantin Gredeskoul
# Ported from the licensed under the MIT license Project Pullulant, at
# https://github.com/kigster/pullulant
#———————————————————————————————————————————————————————————————————————————————
export LibBrew__PackageCacheList="/tmp/.lib_brew_packages.txt"
export LibBrew__CaskCacheList="/tmp/.lib_brew_casks.txt"
lib::brew::cache-reset() {
rm -f ${LibBrew__PackageCacheList} ${LibBrew__CaskCacheList}
}
lib::brew::cache-reset::delayed() {
(( ${BASH_IN_SUBSHELL} )) || lib::brew::cache-reset
(( ${BASH_IN_SUBSHELL} )) && trap "rm -f ${LibBrew__PackageCacheList} ${LibBrew__CaskCacheList}" EXIT
}
lib::brew::upgrade() {
lib::brew::install
if [[ -z "$(which brew)" ]]; then
warn "brew is not installed...."
return 1
fi
run "brew update --force"
run "brew upgrade"
run "brew cleanup -s"
}
lib::brew::install() {
declare -a brew_packages=$@
local brew=$(which brew 2>/dev/null)
if [[ -z "${brew}" ]]; then
info "Installing Homebrew, please wait..."
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
else
info "Homebrew is already installed."
info "Detected Homebrew Version: ${bldylw}$(brew --version 2>/dev/null | head -1)"
fi
}
lib::brew::setup() {
lib::brew::upgrade
}
lib::brew::relink() {
local package=${1}
local verbose=
[[ -n ${opts_verbose} ]] && verbose="--verbose"
run "brew link ${verbose} ${package} --overwrite"
}
lib::brew::package::list() {
lib::cache-or-command "${LibBrew__PackageCacheList}" 30 "brew ls -1"
}
lib::brew::cask::list() {
lib::cache-or-command "${LibBrew__CaskCacheList}" 30 "brew cask ls -1"
}
lib::brew::cask::tap() {
run "brew tap homebrew/cask-cask"
}
lib::cache-or-command() {
local file="$1"; shift
local stale_minutes="$1"; shift
local command="$*"
lib::file::exists_and_newer_than "${file}" ${stale_minutes} && {
cat "${file}"
return 0
}
cp /dev/null ${file} > /dev/null
eval "${command}" | tee -a "${file}"
}
lib::brew::package::is-installed() {
local package="${1}"
local -a installed_packages=($(lib::brew::package::list))
array-contains-element $(basename "${package}") "${installed_packages[@]}"
}
lib::brew::cask::is-installed() {
local cask="${1}"
local -a installed_casks=($(lib::brew::cask::list))
array-contains-element $(basename "${cask}") "${installed_casks[@]}"
}
lib::brew::reinstall::package() {
local package="${1}"
local force=
local verbose=
[[ -n ${opts_force} ]] && force="--force"
[[ -n ${opts_verbose} ]] && verbose="--verbose"
run "brew unlink ${package} ${force} ${verbose}; true"
run "brew uninstall ${package} ${force} ${verbose}; true"
run "brew install ${package} ${force} ${verbose}"
run "brew link ${package} --overwrite ${force} ${verbose}"
lib::brew::cache-reset::delayed
}
lib::brew::install::package() {
local package=$1
local force=
local verbose=
[[ -n ${opts_force} ]] && force="--force"
[[ -n ${opts_verbose} ]] && verbose="--verbose"
inf "checking if package ${bldylw}${package}$(txt-info) is already installed..."
if [[ $(lib::brew::package::is-installed ${package}) == "true" ]]; then
ok:
else
printf "${bldred}not found.${clr}\n"
run "brew install ${package} ${force} ${verbose}"
if [[ ${LibRun__LastExitCode} != 0 ]]; then
info "NOTE: ${bldred}${package}$(txt-info) failed to install, attempting to reinstall..."
lib::brew::reinstall::package "${package}"
fi
lib::brew::cache-reset::delayed
fi
}
lib::brew::install::cask() {
local cask=$1
local force=
local verbose=
[[ -n ${opts_force} ]] && force="--force"
[[ -n ${opts_verbose} ]] && verbose="--verbose"
inf "verifying brew cask ${bldylw}${cask}"
if [[ -n $(ls -al /Applications/*.app | grep -i ${cask}) && -z ${opts_force} ]]; then
ok:
elif [[ $(lib::brew::cask::is-installed ${cask}) == "true" ]]; then
ok:
return 0
else
kind_of_ok:
run "brew cask install ${cask} ${force} ${verbose}"
fi
lib::brew::cache-reset::delayed
}
lib::brew::uninstall::package() {
local package=$1
local force=
local verbose=
[[ -n ${opts_force} ]] && force="--force"
[[ -n ${opts_verbose} ]] && verbose="--verbose"
export LibRun__AbortOnError=${False}
run "brew unlink ${package} ${force} ${verbose}"
export LibRun__AbortOnError=${False}
run "brew uninstall ${package} ${force} ${verbose}"
lib::brew::cache-reset::delayed
}
# set $opts_verbose to see more output
# set $opts_force to true to force it
lib::brew::install::packages() {
local force=
[[ -n ${opts_force} ]] && force="--force"
for package in $@; do
lib::brew::install::package ${package}
done
}
lib::brew::reinstall::packages() {
local force=
[[ -n ${opts_force} ]] && force="--force"
for package in $@; do
lib::brew::uninstall::package ${package}
lib::brew::install::package ${package}
done
}
lib::brew::uninstall::packages() {
local force=
[[ -n ${opts_force} ]] && force="--force"
for package in $@; do
lib::brew::uninstall::package ${package}
done
}
| true |
5d9121f1b7e2cc4c12494c12ecdea92dabb0d8e5 | Shell | boydndonga/scripts.sh | /nodejs.sh | UTF-8 | 650 | 3.375 | 3 | [] | no_license | #!/bin/bash
cd ~
echo "install npm"
npm install -g
echo "update npm"
npm install npm@latest -g
echo "confirm install"
npm -v
echo "The script clones the nvm repository to ~/.nvm and adds the source line to your profile (~/.bash_profile, ~/.zshrc, ~/.profile, or ~/.bashrc"
curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.33.2/install.sh | bash
echo "confirm install"
command -v nvm
echo "clone nodejs setup 8.X and install"
curl -sL https://deb.nodesource.com/setup_8.x | sudo -E bash -
sudo apt-get install -y nodejs
echo "install build tools"
sudo apt-get install -y build-essential
echo "confirm install"
nodejs -v
| true |
bbe7a714acb69d1ac0f084a19fccbd486e77d750 | Shell | waelby/application-Shell-Sujet-17- | /tmp1.sh | UTF-8 | 624 | 3.390625 | 3 | [] | no_license | #! /bin/bash
yad --center --width=500 --height=125 --text "Would you like to run the app ?" \
---button=gtk-no:1 --button=gtk-yes:0 --buttons-layout=center
while :
do
find -iname "*~.*"
yad --center --width=500 --height=125 --text="voulez vous supprimer les fichiers (yes or no) :" \
--button="yes":1 \
--button="no":2 \
--button="-help":3
foo=$?
if [[ $foo -eq 1 ]]; then
echo "tout les fichier qui finissent par ~ ont été supprimés avec succcés"
rm -r $(find -iname "*~.*")
elif [[ $foo -eq 2 ]]; then
echo "retour au menu principale"
exit 0
elif [[ $foo -eq 3 ]]; then
help
else
cancel && exit 0
fi
done
| true |
1a11b747383ce3cea14120e92088f203a7f3a277 | Shell | nam1796/android-ffmpeg-executable | /build_ffmpeg.sh | UTF-8 | 743 | 2.65625 | 3 | [
"MIT"
] | permissive | #!/bin/bash
function build_one
{
./configure \
--prefix=$PREFIX \
--disable-shared \
--enable-static \
--disable-doc \
--enable-ffmpeg \
--disable-ffplay \
--disable-ffprobe \
--disable-ffserver \
--disable-avdevice \
--disable-doc \
--disable-symver \
--cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
--target-os=linux \
--arch=arm \
--enable-cross-compile \
--enable-version3 \
--enable-gpl \
--enable-libx264 \
--sysroot=$SYSROOT \
--extra-cflags="-Os -fpic -I../x264" \
--extra-ldflags="-L../x264" \
$ADDITIONAL_CONFIGURE_FLAG
make clean
make
make install
}
pushd ffmpeg
CPU=arm
PREFIX=$(pwd)/android/$CPU
ADDI_CFLAGS="-marm"
build_one
popd
| true |
1b73dc6ae7d8ea2ba692bee7ae69718f9bdf42b4 | Shell | jarinfrench/scripts | /bash/extract_all_energies.sh | UTF-8 | 1,254 | 4.15625 | 4 | [] | no_license | #! /bin/bash
# This script utilizes the C++ script extract_energy.cpp and extracts the energy
# from all files with the format minimize_*.txt, and writes to the file
# specified by FN. Note that this assumes that extract_energy is found in PATH!
# extract the .txt files
targets=($(ls -v | grep ^minimize*))
value=($(ls | grep -E "(^minimize[_0-3]*_no_GB)")) # This is the single grain value
read -p "Please enter the filename to be written to: " FN
# find the index in targets that has the value we want
for i in "${!targets[@]}"; do
if [[ "${targets[$i]}" = "${value}" ]]; then
j=$i;
fi
done
# The energies will not be calculated correctly if the first value written to the
# file is not the single grain, so exit early.
# This line checks if the string is empty (null)
# See https://stackoverflow.com/questions/42111155/explanation-of-bash-if-command
if [ -z ${j+x} ]; then
echo "Error finding initial energy configuration."
exit 2
else
extract_energy ${targets[$j]} $FN -a 0.00 # gets the base value for a single grain
fi
# Extract the energy for each value.
for i in "${targets[@]}"
do
if [[ $i = ${targets[$j]} ]]; then
continue; # We don't want to double count the single grain energy.
fi
extract_energy $i $FN
done
| true |
5333ce21307b25f896afd76c3664650282bb579d | Shell | reedHam/programing-notes | /es6_import_scripts/importfix.sh | UTF-8 | 1,246 | 3.4375 | 3 | [] | no_license | #!/bin/bash
MODULE=$1
MODULE_PATH=$2
ADD_IMPORT=$3
printf "%-45s | %-45s | %-45s\n" "File" "Import" "Status"
printf "%-45s | %-45s | %-45s\n" "" "" ""
grep -oP "(?<=^export { default as )(\w*)\b" "$MODULE_PATH" | while read -r line;
do
CLASS="$line"
replacement="import { ${CLASS} } from '/modules/${MODULE}.js';\n"
grep -rlP "new $CLASS\W|\b$CLASS\.|\bextends $CLASS\b" "$HOME/development/trim5/public/" --exclude-dir "$HOME/development/trim5/public/modules" | sort | while read filename;
do
ALREADY_IMPORTED=$(awk '/import /,/ from/' $filename | grep -ow "$CLASS")
SELF_IMPORT=$(grep -oP "^export default class $CLASS\W" "$filename")
if [[ -z "$ALREADY_IMPORTED" && -z "$SELF_IMPORT" ]]; then
[ "$ADD_IMPORT" = "true" ] && sed -i "1s@^@$replacement@" "$filename"
printf "%-45s | %-45s | \e[32m%-45s\e[0m\n" "${filename##*/}" "$CLASS" "Imported Added."
elif [ ! -z "$ALREADY_IMPORTED" ]; then
printf "%-45s | %-45s | \e[35m%-45s\e[0m\n" "${filename##*/}" "$CLASS" "Already imported."
elif [ ! -z "$SELF_IMPORT" ]; then
printf "%-45s | %-45s | \e[33m%-45s\e[0m\n" "${filename##*/}" "$CLASS" "Self Import."
fi
done
done
| true |
8dce73f9f07b62b00d74e1e55b05bb4975760dea | Shell | girishbandaru/First | /demo3.sh | UTF-8 | 237 | 3.65625 | 4 | [] | no_license | # if / if else / condition
echo "enter a number:"
read num
if [ $num -gt 10 ]
then
echo " $num is greater than 10"
elif [ $num -eq 10 ]
then
echo " entered number is equal to 10"
else
echo " $num is less than 10"
fi
| true |
ecc814aaf642ea0f99420379b27df3c581dac8bc | Shell | zfsonlinux/xfstests | /tests/generic/299 | UTF-8 | 3,474 | 3.5 | 4 | [] | no_license | #! /bin/bash
# FSQA Test No. 299
#
# AIO/DIO stress test
# Run random AIO/DIO activity and fallocate/truncate simultaneously
# Test will operate on huge sparsed files so ENOSPC is expected.
#
#-----------------------------------------------------------------------
# (c) 2013 Dmitry Monakhov
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it would be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#-----------------------------------------------------------------------
#
seq=`basename $0`
seqres=$RESULT_DIR/$seq
echo "QA output created by $seq"
here=`pwd`
tmp=/tmp/$$
fio_config=$tmp.fio
status=1 # failure is the default!
trap "rm -f $tmp.*; exit \$status" 0 1 2 3 15
# get standard environment, filters and checks
. ./common/rc
. ./common/filter
# real QA test starts here
_supported_fs generic
_supported_os Linux
_need_to_be_root
_require_scratch
NUM_JOBS=$((4*LOAD_FACTOR))
BLK_DEV_SIZE=`blockdev --getsz $SCRATCH_DEV`
FILE_SIZE=$((BLK_DEV_SIZE * 512))
cat >$fio_config <<EOF
###########
# $seq test fio activity
# Filenames derived from jobsname and jobid like follows:
# ${JOB_NAME}.${JOB_ID}.${ITERATION_ID}
[global]
ioengine=libaio
bs=128k
directory=${SCRATCH_MNT}
filesize=${FILE_SIZE}
size=999G
iodepth=128*${LOAD_FACTOR}
continue_on_error=write
ignore_error=,ENOSPC
error_dump=0
create_on_open=1
fallocate=none
exitall=1
## Perform direct aio, to files which may be truncated
## by external task
[direct_aio]
direct=1
buffered=0
numjobs=${NUM_JOBS}
rw=randwrite
runtime=100*${TIME_FACTOR}
time_based
# Perform direct aio and verify data
# This test case should check use-after-free issues
[aio-dio-verifier]
numjobs=1
verify=crc32c-intel
verify_fatal=1
verify_dump=1
verify_backlog=1024
verify_async=4
verifysort=1
direct=1
bs=4k
rw=randrw
filename=aio-dio-verifier
# Perform buffered aio and verify data
# This test case should check use-after-free issues
[buffered-aio-verifier]
numjobs=1
verify=crc32c-intel
verify_fatal=1
verify_dump=1
verify_backlog=1024
verify_async=4
verifysort=1
direct=0
buffered=1
bs=4k
rw=randrw
filename=buffered-aio-verifier
EOF
_require_fio $fio_config
_workout()
{
echo ""
echo "Run fio with random aio-dio pattern"
echo ""
cat $fio_config >> $seqres.full
run_check $FIO_PROG $fio_config &
pid=$!
echo "Start fallocate/truncate loop"
for ((i=0; ; i++))
do
for ((k=1; k <= NUM_JOBS; k++))
do
fallocate -l $FILE_SIZE $SCRATCH_MNT/direct_aio.$k.0 \
>> $seqres.full 2>&1
done
for ((k=1; k <= NUM_JOBS; k++))
do
truncate -s 0 $SCRATCH_MNT/direct_aio.$k.0 >> $seqres.full 2>&1
done
# Following like will check that pid is still run.
# Once fio exit we can stop fallocate/truncate loop
kill -0 $pid > /dev/null 2>&1 || break
done
wait $pid
}
_scratch_mkfs >> $seqres.full 2>&1
_scratch_mount
if ! _workout; then
umount $SCRATCH_DEV 2>/dev/null
exit
fi
if ! _scratch_unmount; then
echo "failed to umount"
status=1
exit
fi
status=0
exit
| true |
3ba3350d37eeda2f66721e9fc84b7bffb8b7e69a | Shell | fengfengChina/AutoShell | /DeloyTomcat.sh | UTF-8 | 619 | 3.03125 | 3 | [
"Apache-2.0"
] | permissive | TOMCAT='/mnt/service/apache-tomcat-8.0.37'
PROJECT='/mnt/project/ascmMall/ui'
echo "deploy ui"
pidList=`ps -ef|grep apache-tomcat-8.0.37|grep -v 'grep'|awk '{print $2'}`
if [ "$pidList" = "" ]; then
echo "no tomcat pid alive"
else
echo "tomcat id list :$pidList"
kill -9 $pidList
echo "kill $pidList"
fi
echo "svn update and clean complier and package"
cd $PROJECT&&svn update&&gradle clean&&gradle war
rm -rf $TOMCAT/webapps/ui.war && rm -rf $TOMCAT/webapps/ui
echo "copy to tomcat webApps "
cp build/libs/ui.war $TOMCAT/webapps/
echo "start tomcat ! Go Go Go!!!"
cd $TOMCAT && ./bin/startup.sh
| true |
775d3b46e65b8667828329d63408ccc75038406a | Shell | joetww/work_script | /mysql_backup.sh | UTF-8 | 2,477 | 3.890625 | 4 | [] | no_license | #!/bin/sh
RC_FILE="${HOME}/.mysql_backuprc"
if [ -f "${RC_FILE}" ]; then
source ${RC_FILE}
else
echo "Check Config File: ${RC_FILE}"
cat << EOD
# 'db_user' is mysql username
# 'db_passwd' is mysql password
# 'db_host' is mysql host
# 'backup_dir' is the directory for story your backup file
# 'keep_backup' is used to configure the amount to store backup data
EOD
exit 2
fi
## if [ -z ${db_user+x} ]; then echo "db_user is unset";exit 4; fi
## if [ -z ${db_passwd+x} ]; then echo "db_passwd is unset";exit 4; fi
## if [ -z ${db_host+x} ]; then echo "db_host is unset";exit 4; fi
if [ -z ${backup_dir+x} ]; then echo "backup_dir is unset";exit 4; fi
if [ -z ${keep_backup+x} ]; then echo "keep_backup is unset"; exit 4; fi
# date format for backup file (dd-mm-yyyy)
time="$(date +"%d-%m-%Y")"
# 預先設定好加密過的帳密檔案
# http://dev.mysql.com/doc/refman/5.6/en/mysql-config-editor.html
# mysql_config_editor set --login-path=client --host=localhost --user=localuser --password
# mysql_config_editor print --all
# ~/.mylogin.cnf
# mysql, mysqldump and some other bin's path
MYSQL="$(which mysql) --login-path=client -h $db_host"
MYSQLDUMP="$(which mysqldump) --login-path=client --single-transaction --routines -h $db_host"
MKDIR="$(which mkdir)"
RM="$(which rm)"
MV="$(which mv)"
GZIP="$(which gzip)"
# check the directory for store backup is writeable
test ! -w $backup_dir && echo "Error: $backup_dir is un-writeable." && exit 0
# the directory for story the newest backup
test ! -d "$backup_dir/backup.0/" && $MKDIR "$backup_dir/backup.0/"
# get all databases
all_db="$($MYSQL -Bse 'show databases' | grep -v -P '^information_schema$|^mysql$')"
for db in $all_db
do
$MYSQLDUMP $db | $GZIP -9 > "$backup_dir/backup.0/$time.$db.gz"
done
# delete the oldest backup
test -d "$backup_dir/backup.${keep_backup}/" && $RM -rf "$backup_dir/backup.${keep_backup}"
# rotate backup directory
for int in $(seq `expr ${keep_backup} - 1` -1 0 | xargs)
do
if(test -d "$backup_dir"/backup."$int")
then
next_int=`expr $int + 1`
$MV "$backup_dir"/backup."$int" "$backup_dir"/backup."$next_int"
fi
done
exit 0;
################################################
###### 補充 設定檔 ${HOME}/.mysql_backuprc #####
################################################
db_user="xxxxxxxx"
db_passwd="xxxxxxxx"
db_host="10.0.0.1"
backup_dir="/cygdrive/d/mysql_backup"
keep_backup=7
| true |
f1fcb18e5b4103c101810a9106472e5e86156ea4 | Shell | JoshKloster/dotfiles | /update.sh | UTF-8 | 257 | 3.34375 | 3 | [] | no_license | #!/bin/bash
DIR=~/dotfiles
FILES="vimrc vimrc.bundles tmux.conf"
echo -n "Updating previous symlinks"
for file in $FILES; do
echo -n "."
rm ~/.$file # Remove old file
ln -s $DIR/$file ~/.$file # Add new reference.
done
echo "Done!"
| true |
70a8285356b062ccca01f62217723ec791ab6998 | Shell | ChristopherA/.dotfiles | /0-shell/.profile.d/5-bash-functions.sh | UTF-8 | 337 | 3.125 | 3 | [] | no_license | #!/bin/bash
# ~/.profile.d/5-bash-functions.sh: Various path related functions for both
# interactive and non-interactive bash
# from dyvers hands
# Recursively delete files that match a certain pattern
# (by default delete all `.DS_Store` files)
cleanup() {
local q="${1:-*.DS_Store}"
find . -type f -name "$q" -ls -delete
}
| true |
5f93d8800e93ebe84a4db1be2e1501a4b6f10636 | Shell | sourceperl/rpi.backup | /rpi-tools/rpi-img-mount | UTF-8 | 1,899 | 4.3125 | 4 | [
"MIT"
] | permissive | #!/bin/bash
# This script auto-mount a Raspberry PI image file and open a shell on it
#
# you can use it like this :
# rpi-img-mount IMAGE_FILE
#
# so directly on a Raspberry Pi:
# sudo rpi-img-mount myimage.img
# vars
NAME=$(basename "$0")
# parse optionals args
while getopts 'h' OPTION
do
case $OPTION in
h)
printf "Usage: %s: [-h] IMAGE_FILE\n" "$NAME"
printf "\n"
printf " -h print this help message\n"
exit 0
;;
esac
done
shift $(("$OPTIND" - 1))
# parse fixed args
IMG_FILE=$1
# some checks
[ $EUID -ne 0 ] && { printf "ERROR: %s needs to be run by root\n" "$NAME" 1>&2; exit 1; }
[ $# -ne 1 ] && { printf "ERROR: %s needs 1 arg\n" "$NAME" 1>&2; exit 1; }
[ ! -f "$IMG_FILE" ] && { printf "ERROR: file \"%s\" not exist\n" "$IMG_FILE" 1>&2; exit 1; }
# mount image file to a tmp dir
CUR_DIR=$(pwd)
TMP_DIR=$(mktemp -d --tmpdir="$CUR_DIR")
printf "INFO: tmp directory created \"%s\"\n" "$TMP_DIR"
LOOP=$(losetup --show -fP "${IMG_FILE}")
printf "INFO: map image file \"%s\" to \"%s\"\n" "$IMG_FILE" "$LOOP"
mount "$LOOP"p2 "$TMP_DIR" \
&& mount "$LOOP"p1 "$TMP_DIR"/boot/ \
&& printf "INFO: mount tmp directory to \"%s\" OK\n" "$LOOP"
# open a shell session in the tmp dir
printf "INFO: open bash session to image file (root fs is mount at %s) \n" "$TMP_DIR"
printf "INFO: enter \"exit\" to quit this session and unmount image\n"
cd "$TMP_DIR" && bash
# cleanup
cd "$CUR_DIR" \
&& printf "INFO: return to current directory\n"
umount "$TMP_DIR"/boot/ \
&& printf "INFO: unmount of boot part OK\n" \
|| printf "ERROR: unable to unmount boot part\n"
umount "$TMP_DIR" \
&& printf "INFO: unmount of root part OK\n" \
|| printf "ERROR: unable to unmount root part\n"
rmdir "$TMP_DIR" \
&& printf "INFO: remove tmp directory OK\n" \
|| printf "ERROR: unable to remove tmp directory\n"
losetup -d "$LOOP"
exit 0
| true |
fa20bb47ddea2508d06305a93ae11b3317ec4352 | Shell | mikewhitby/dotfiles | /install-packages.sh | UTF-8 | 1,365 | 2.796875 | 3 | [] | no_license | # get sudo and keep it alive, but only if not called by bootstrap.sh
if [ "$(ps $PPID | tail -n 1 | awk "{print \$6}")" != "bootstrap.sh" ]; then
sudo -v
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
fi
# install bash 4 and GNU core and findutils to replace outdates OSX versions
brew tap homebrew/dupes
brew install bash
brew install coreutils
brew install findutils
brew install diffutils
brew install grep
brew install gnu-sed
brew install gnu-tar
brew install gnu-which
brew install gzip
brew install watch
brew install wget
# install stuff OSX didn't come with
brew install ack
brew install bash-completion
brew install ctags
brew install macvim
brew install mysql
brew install netcat
brew install nmap
brew install phpmd
brew install pv
brew install git
# install DMGs
brew cask install iterm2
brew cask install macdown
brew cask install google-chrome
brew cask install dropbox
brew cask install integrity
brew cask install virtualbox
brew cask install spectacle
brew cask install sublime-text
brew cask install appcleaner
brew cask install vagrant
brew cask install phpstorm
brew cask install cyberduck
brew cask install libreoffice
brew cask install ansible
brew cask install transmission
brew cask install skype
# vagrant plugins
vagrant plugin install vagrant-bindfs
# cleanup
brew cleanup
brew cask cleanup
| true |
8107a6c607f964b440286758b3861c774a7986e8 | Shell | rbray89/hassos | /buildroot/support/dependencies/check-host-cmake.sh | UTF-8 | 1,094 | 4.25 | 4 | [
"GPL-2.0-only",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later",
"Apache-2.0"
] | permissive | #!/bin/sh
# prevent shift error
[ $# -lt 2 ] && exit 1
major_min="${1%.*}"
minor_min="${1#*.}"
shift
for candidate; do
# Try to locate the candidate. Discard it if not located.
cmake=`which "${candidate}" 2>/dev/null`
[ -n "${cmake}" ] || continue
# Extract version X.Y from versions in the form X.Y or X.Y.Z
# with X, Y and Z numbers with one or more digits each, e.g.
# 3.2 -> 3.2
# 3.2.3 -> 3.2
# 3.2.42 -> 3.2
# 3.10 -> 3.10
# 3.10.4 -> 3.10
# 3.10.42 -> 3.10
# Discard the candidate if no version can be obtained
version="$(${cmake} --version \
|sed -r -e '/.* ([[:digit:]]+\.[[:digit:]]+).*$/!d;' \
-e 's//\1/'
)"
[ -n "${version}" ] || continue
major="${version%.*}"
minor="${version#*.}"
if [ ${major} -gt ${major_min} ]; then
echo "${cmake}"
exit
elif [ ${major} -eq ${major_min} -a ${minor} -ge ${minor_min} ]; then
echo "${cmake}"
exit
fi
done
# echo nothing: no suitable cmake found
exit 1
| true |
653cccd1343558c3b48cbee9f6a513b84464c0df | Shell | work-jlsun/test | /test.sh | GB18030 | 5,383 | 3.390625 | 3 | [] | no_license | #!/bin/bash
#read the the paxstore.conf and then we will provide more convenient functions
#firest shoud all the status
# ls
# 1(22) 2(23) 3(24) 4(25) 5(26) 6(27) 7(28)
#
# range 1 ok ok ok
# range 2 ok ok ok
# range 3 ok ok ok
# range 4 ok ok ok
#
# recover -range 1
#restart -range 2 -id 1
#start -range 2 -id 2
#stop -range 2 -id 2
#stop -server 2 {"this stop all the instance of one server"
#get the binpath name
#shpath=/home/sjl/project/paxoslog9-multi-test/paxoslog/single.sh
shpath=/home/dyx/project/paxstore/single.sh
#get the binname
binname=$(grep "BINNAME" /etc/paxstore.conf | awk '{print $3}' )
#get server num
numberserver=$(grep "NumServer" /etc/paxstore.conf | awk '{print $3}' )
echo "NumServer" $numberserver
#get the server ip info
declare -a severs
for ((num=1; num<=$numberserver; num++))
do
servername=SERVER${num}_IP
servers[$num]=$(grep $servername /etc/paxstore.conf | awk '{print $3}')
echo ${servers[$num]}
done
#get range_num
rangenumber=$(grep "RANGE_NUM" /etc/paxstore.conf | awk '{print $3}' )
#get the range server
declare -a rangenumber
for ((num=1; num<=$rangenumber; num++))
do
rangename=RANGE${num}_SERVERS
rangeservers[$num]=$(grep $rangename /etc/paxstore.conf | awk '{print $3}')
echo ${rangeservers[$num]}
done
#get the rangerserverid by the range index and array index
rangeserver_index_index(){
servers=${rangeservers[$1]}
serverid=`echo $servers | cut -d, -f$2`
#echo $serverid
return $serverid
}
printhead(){
printf "\t"
for ((num=1; num<=$numberserver; num++ ))
do
serverip=${servers[$num]}
printf "(id=%d)(%s) " $num ${serverip##*192.168.3.}
done
printf "\n"
}
printcontext()
{
printindex=$1
for ((index=1; index<=$numberserver; index++))
do
state=${printindex[$index]}
if [ $state == 1 ];then
printf "OK "
else
printf " "
fi
done
printf "\n"
}
#show the system status
ls(){
declare -a printindex
printhead
for ((num=1; num<=$rangenumber; num++))
do
printf "range%d\t" $num
each_num=3
for ((index=1; index<=$numberserver; index++))
do
printindex[$index]=0
done
for ((index=1; index<=$each_num; index++))
do
rangeserver_index_index $num $index
serverid=`echo $?`
#the use the ssh to check remote machine state
#printf "ssh root@%s ps -ef | grep %s *%d | grep -v grep | wc -l\n" ${servers[$serverid]} $binname $num
count=`ssh root@${servers[$serverid]} ps -ef | grep "$binname *$num" | grep -v "grep" | wc -l`
if [ "$count" == "0" ];then
continue
elif [ "$count" == "1" ];then
printindex[$serverid]=1
else
echo "count:" $count
echo "fuck error"
fi
done
printcontext $printindex
done
}
start(){
rangeid=$1
serverid=$2
ifrecover=$3
ssh root@${servers[$serverid]} $shpath start $rangeid $ifrecover
}
stop(){
rangeid=$1
serverid=$2
ssh root@${servers[$serverid]} $shpath stop $rangeid
}
restart(){
rangeid=$1
serverid=$2
ifrecover=$3
ssh root@${servers[$serverid]} $shpath restart $rangeid $ifrecover
}
startrange(){
rangeid=$1
ifrecover=$2
each_num=3
for ((num=1; num<=$each_num; num++))
do
rangeserver_index_index $rangeid $num
serverid=`echo $?`
ifrecover=0
ssh root@${servers[$serverid]} $shpath start $rangeid $ifrecover
done
}
stoprange(){
rangeid=$1
each_num=3
for ((num=1; num<=$each_num; num++))
do
rangeserver_index_index $rangeid $num
serverid=`echo $?`
ssh root@${servers[$serverid]} $shpath stop $rangeid
done
}
startall(){
for (( range_index=1; range_index<=$rangenumber; range_index++ ))
do
ifrecover=0
startrange $range_index $ifrecover
done
}
stopall(){
for (( range_index=1; range_index<=$rangenumber; range_index++ ))
do
stoprange $range_index
done
}
while true;do
#printf "==>"
read -e -p "==>" cmd
The1=$(echo $cmd | awk '{print $1}')
The2=$(echo $cmd | awk '{print $2}')
The3=$(echo $cmd | awk '{print $3}')
The4=$(echo $cmd | awk '{print $4}')
The5=$(echo $cmd | awk '{print $5}')
The6=$(echo $cmd | awk '{print $6}')
The7=$(echo $cmd | awk '{print $7}')
case "$The1" in
ls )
ls
;;
start )
if [[ "$The2" != "-range" || "$The4" != "-id" || "$The6" != "-recover" ]];then
echo "cmd error"
continue
else
echo "fuck"
start $The3 $The5 $The7
fi
;;
stop )
if [[ "$The2" != "-range" || "$The4" != "-id" ]];then
echo "cmd error"
continue
else
stop $The3 $The5
fi
;;
restart )
if [[ "$The2" != "-range" || "$The4" != "-id" || "$The6" != "-recover" ]];then
echo "cmd error"
continue
else
restart $The3 $The5 $The7
fi
;;
startrange )
startrange $The2
;;
stoprange )
stoprange $The2
;;
startall )
startall
;;
stopall )
stopall
;;
help )
echo "ls"
echo "start -range 1 -id 2 -recover 1"
echo "stop -range 1 -id 2 "
echo "restart -range 1 -id 2 -recover 0"
echo "startrange 2 -recover 0"
echo "stoprange 2 "
echo "startall"
#echo "stopall"
echo "stopall " #here i have do a little change, һЩı
;;
*)
printf "others\n"
esac
printf "\n"
done
| true |
a74497baf3103f275df5a99b17797bda99883857 | Shell | aepsf/amoss | /scripts/combine-release-notes.sh | UTF-8 | 846 | 3.15625 | 3 | [
"MIT"
] | permissive | # Parameters:
# 1 - Version Name
# 2 - GitHub Release Tag
# 3 - Unlocked Package Install Command
# 4 - Unlocked Package Link (Prod / Dev Instances)
# 5 - Unlocked Package Link (Sandboxes)
echo "# Version $1" >> RELEASE_NOTES.md.new
echo "" >> RELEASE_NOTES.md.new
echo "* Git Tag : \`$2\`" >> RELEASE_NOTES.md.new
echo "* SFDX Install : \`$3\`" >> RELEASE_NOTES.md.new
echo "* Unlocked Package Links :" >> RELEASE_NOTES.md.new
echo " * $4" >> RELEASE_NOTES.md.new
echo " * $5" >> RELEASE_NOTES.md.new
sed -E 's/\# Release Notes since Last Release//' PENDING_RELEASE_NOTES.md >> RELEASE_NOTES.md.new
echo "" >> RELEASE_NOTES.md.new
cat RELEASE_NOTES.md >> RELEASE_NOTES.md.new
rm RELEASE_NOTES.md
mv RELEASE_NOTES.md.new RELEASE_NOTES.md
echo "# Release Notes since Last Release" > PENDING_RELEASE_NOTES.md | true |
6a78390dfd209e7c11b1dce76f2ed64dc3d7626c | Shell | PandaLinux/base-64 | /temp-system/perl/build.sh | UTF-8 | 1,332 | 3.9375 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
shopt -s -o pipefail
set -e # Exit on error
PKG_NAME="perl"
PKG_VERSION="5.30.0"
TARBALL="${PKG_NAME}-${PKG_VERSION}.tar.xz"
SRC_DIR="${PKG_NAME}-${PKG_VERSION}"
LINK="http://www.cpan.org/src/5.0/$TARBALL"
function showHelp() {
echo -e "--------------------------------------------------------------------------------------------------------------"
echo -e "Description: The Perl package contains the Practical Extraction and Report Language."
echo -e "--------------------------------------------------------------------------------------------------------------"
echo -e ""
}
function prepare() {
echo -e "Downloading $TARBALL from $LINK"
wget "$LINK" -O "$TARBALL"
}
function unpack() {
echo -e "Unpacking $TARBALL"
tar xf ${TARBALL}
}
function build() {
echo -e "Configuring $PKG_NAME"
sh Configure -des -Dprefix=/tools -Dlibs=-lm -Uloclibpth -Ulocincpth
make "$MAKE_PARALLEL"
}
function instal() {
echo -e "Installing $PKG_NAME"
cp -v perl cpan/podlators/scripts/pod2man /tools/bin
mkdir -pv /tools/lib/perl5/5.30.0
cp -Rv lib/* /tools/lib/perl5/5.30.0
}
function clean() {
echo -e "Cleaning up..."
rm -rf ${SRC_DIR} ${TARBALL}
}
# Run the installation procedure
time {
showHelp
clean
prepare
unpack
pushd ${SRC_DIR}
build
instal
popd
clean
}
| true |
16ecc12e5192b764c30e863a25693673a50f9b96 | Shell | cyberlooper/OpenFLIXR2.SetupScript | /.scripts/menu_other.sh | UTF-8 | 2,194 | 3.78125 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
set -euo pipefail
IFS=$'\n\t'
menu_other() {
local OTHER_COMPLETED="N"
local OTHEROPTS=()
if [[ $(grep -c "127.0.0.1" "/etc/resolv.conf") -ge 1 ]]; then
OTHEROPTS+=("Bypass Pi-hole " "")
else
OTHEROPTS+=("Undo Bypass Pi-hole " "")
fi
OTHEROPTS+=("Select specific fix to run " "")
OTHEROPTS+=("Run ALL fixes " "")
local OTHERCHOICE
if [[ ${CI:-} == true ]] && [[ ${TRAVIS:-} == true ]]; then
OTHERCHOICE="Cancel"
else
OTHERCHOICE=$(whiptail --fb --clear --title "OpenFLIXR - Fixes & Other Stuff" --menu "What would you like to do?" 0 0 0 "${OTHEROPTS[@]}" 3>&1 1>&2 2>&3 || echo "Cancel")
fi
case "${OTHERCHOICE}" in
"Bypass Pi-hole ")
info "Running Pi-hole bypass only"
run_script 'pihole_bypass' && OTHER_COMPLETED="Y"
;;
"Undo Bypass Pi-hole ")
info "Running Pi-hole unbypass only"
run_script 'pihole_unbypass' && OTHER_COMPLETED="Y"
;;
"Run ALL fixes ")
info "Running ALL fixes only"
run_script 'setup_fixes' && OTHER_COMPLETED="Y"
;;
"Select specific fix to run ")
run_script 'menu_config_select_fixes' || run_script 'menu_other' || return 1
;;
"Cancel")
info "Returning to Main Menu."
return 1
;;
*)
error "Invalid Option"
;;
esac
if [[ "${OTHER_COMPLETED:-}" == "Y" ]]; then
info "Fixes & Other Stuff - ${OTHERCHOICE}completed"
whiptail \
--backtitle ${OF_BACKTITLE} \
--title "OpenFLIXR - Fixes & Other Stuff" \
--clear \
--ok-button "Great!" \
--msgbox "${OTHERCHOICE}completed. Returning to menu." 0 0
return 1
else
info "Fixes & Other Stuff - ${OTHERCHOICE}failed"
whiptail \
--backtitle ${OF_BACKTITLE} \
--title "OpenFLIXR - Fixes & Other Stuff" \
--clear \
--ok-button "Fine..." \
--msgbox "${OTHERCHOICE}failed... Returning to menu." 0 0
return 0
fi
}
| true |
835b60e7f7171959506aea07290f7bd2c81b1b96 | Shell | rholdorf/bashsnippets | /path_and_branch_terminal_title.sh | UTF-8 | 301 | 3.625 | 4 | [] | no_license | #!/bin/bash
function git-title {
local title
if ! title="${USER}@${HOSTNAME}: ${PWD} (`git rev-parse --abbrev-ref HEAD 2>/dev/null`)"; then
# Not a git repository
title="${USER}@${HOSTNAME}: ${PWD}"
fi
echo -ne "\033]2;$title\007"
}
export PROMPT_COMMAND="git-title"
| true |
63e0909f84c844dc5c9f6bf1b8a0d6a2f77dda67 | Shell | msys2/MSYS2-packages | /cmatrix/PKGBUILD | UTF-8 | 997 | 2.59375 | 3 | [
"BSD-3-Clause"
] | permissive | pkgname=('cmatrix')
pkgver=2.0
pkgrel=1
pkgdesc="A curses-based scrolling 'Matrix'-like screen"
arch=('i686' 'x86_64')
url="http://www.asty.org/cmatrix/"
license=('spdx:GPL-3.0-or-later')
depends=('ncurses')
makedepends=('ncurses-devel' 'make' 'autotools' 'gcc')
source=("${pkgname}-${pkgver}.tar.gz"::"https://github.com/abishekvashok/cmatrix/archive/refs/tags/v${pkgver}.tar.gz"
"define_TIOCSTI.patch")
sha256sums=('ad93ba39acd383696ab6a9ebbed1259ecf2d3cf9f49d6b97038c66f80749e99a'
'70b3c94813427355490b5fad29ad8a116bae87e198033e1951ca218683317874')
prepare() {
cd "${srcdir}/${pkgname}-${pkgver}"
patch -p1 -i "${srcdir}/define_TIOCSTI.patch"
}
build() {
cd "${srcdir}/${pkgname}-${pkgver}"
autoreconf -i
./configure --prefix=/usr --mandir=/usr/share/man
make
}
package() {
cd "${srcdir}/${pkgname}-${pkgver}"
make DESTDIR="$pkgdir" install
for i in AUTHORS NEWS COPYING README ChangeLog INSTALL; do
install -Dm644 $i "$pkgdir/usr/share/doc/cmatrix/$i"
done
}
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.