blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
d51c97d1b6bd0b42d8ac601a6d83d29e787dfb55
|
Shell
|
shan520cao/teddysun
|
/lamp2.2/xcache_3.1.0.sh
|
UTF-8
| 3,142
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH
#===============================================================================================
# SYSTEM REQUIRED: CentOS-5 (32bit/64bit) or CentOS-6 (32bit/64bit)
# DESCRIPTION: Xcache for LAMP
# AUTHOR: Teddysun <i@teddysun.com>
# VISIT: https://code.google.com/p/teddysun/
# http://teddysun.com/lamp
#===============================================================================================
cur_dir=`pwd`
cd $cur_dir
clear
echo "#############################################################"
echo "# Xcache for LAMP"
echo "# Intro: http://teddysun.com/lamp"
echo "#"
echo "# Author: Teddysun <i@teddysun.com>"
echo "#"
echo "#############################################################"
echo ""
#download xcache-3.1.0
if [ -s xcache-3.1.0.tar.gz ]; then
echo "xcache-3.1.0.tar.gz [found]"
else
echo "xcache-3.1.0.tar.gz not found!!!download now......"
if ! wget http://teddysun.googlecode.com/files/xcache-3.1.0.tar.gz;then
echo "Failed to download xcache-3.1.0.tar.gz,please download it to $cur_dir directory manually and rerun the install script."
exit 1
fi
fi
#install xcache
echo "============================Xcache3.1.0 install start====================================="
rm -rf $cur_dir/untar/
mkdir -p $cur_dir/untar/
tar xzf xcache-3.1.0.tar.gz -C $cur_dir/untar/
cd $cur_dir/untar/xcache-3.1.0
export PHP_PREFIX="/usr/local/php"
$PHP_PREFIX/bin/phpize
./configure --enable-xcache -with-php-config=$PHP_PREFIX/bin/php-config
make install
rm -rf /data/www/default/xcache
cp -r htdocs/ /data/www/default/xcache
chown -R apache:apache /data/www/default/xcache
rm -rf /tmp/{pcov,phpcore}
mkdir /tmp/{pcov,phpcore}
chown -R apache:apache /tmp/{pcov,phpcore}
chmod 700 /tmp/{pcov,phpcore}
xc=`cat /usr/local/php/etc/php.ini |grep -q "xcache-common" && echo "include" || echo "not"`
if [ "$xc" = "not" ]; then
echo "Xcache configuration not found, create it!"
cat >>/usr/local/php/etc/php.ini<<-EOF
[xcache-common]
extension = /usr/local/php/lib/php/extensions/no-debug-non-zts-20100525/xcache.so
[xcache.admin]
xcache.admin.enable_auth = On
xcache.admin.user = "admin"
xcache.admin.pass = "e10adc3949ba59abbe56e057f20f883e"
[xcache]
xcache.shm_scheme = "mmap"
xcache.size = 64M
xcache.count = 1
xcache.slots = 8K
xcache.ttl = 3600
xcache.gc_interval = 60
xcache.var_size = 16M
xcache.var_count = 1
xcache.var_slots = 8K
xcache.var_ttl = 3600
xcache.var_maxttl = 0
xcache.var_gc_interval = 300
xcache.readonly_protection = Off
xcache.mmap_path = "/dev/zero"
xcache.coredump_directory = "/tmp/phpcore"
xcache.coredump_type = 0
xcache.disable_on_crash = Off
xcache.experimental = Off
xcache.cacher = On
xcache.stat = On
xcache.optimizer = Off
[xcache.coverager]
xcache.coverager = Off
xcache.coverager_autostart = On
xcache.coveragedump_directory = "/tmp/pcov"
EOF
fi
service httpd restart
#delete untar file
rm -rf $cur_dir/untar/
echo "============================Xcache3.1.0 install completed================================="
exit
| true
|
71ba22efa0d7cf79b0269d1e841cf500f5ebef2b
|
Shell
|
liebercanis/bacon
|
/pmt/runPlot
|
UTF-8
| 141
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/bash
if [ -z "$1" ]
then
echo "usage: runGen <FILE>"
exit
else
FILE=$1
fi
echo "plotEv.C(\"$FILE\")"
root "plotEv.C(\"$FILE\")"
| true
|
5f4a374eb10243363da148b20a04ed43e26f5cb4
|
Shell
|
Bulliby/Clone-Repositories
|
/clone.sh
|
UTF-8
| 1,477
| 4.125
| 4
|
[] |
no_license
|
#!/bin/bash
set -e
# ERRORS CODE
E_BADARGS=11
E_BADLOCATION=12
usage()
{
cat <<UsagePrint
Usage :
./clone.bash -t target [-v] [-h]
./clone.bash --target target [--verbose] [--help]
Clone all your repositories from your github account
Verbose : Permit to see what is done at each step
Help : Print this help
UsagePrint
exit $E_BADARGS
}
close_stderr_stdout()
{
exec 11>&1 12>&2 1>&- 2>&-
}
restore_stderr_stdout()
{
exec 11>&1- 12>&2-
}
options=$(getopt -a -n "$(basename $0)" -l "target:,help,verbose" -- "t:hv" "$@")
if [ $? -ne 0 ]; then
usage
fi
eval set --$options
while [ ! -z "$1" ]
do
case "$1" in
-t) target="$2"; shift ;;
-v) verbose=1 ;;
-h) usage ;;
--target) target="$2"; shift ;;
--help) usage ;;
--verbose) verbose=1 ;;
esac
shift
done
v=""
if [[ $verbose ]]; then
v="v"
fi
if [ -z $target ]; then
echo "Target path is manadatory"
exit $E_BADARGS
fi
if [ ! -d $target ]; then
echo "This is not a valid location"
exit $E_BADLOCATION
fi
if [[ $verbose -eq 0 ]]; then
close_stderr_stdout
fi
while read -r line
do
cd "$target"
lang=$( echo "$line" | cut -f1)
url=$( echo "$line" | cut -f2)
if [ ! -d "$lang" ]; then
mkdir $([[ $verbose = 1 ]] && echo "-v") "$lang"
fi
cd "$lang"
git clone "$url"
done < <(python clone.py)
if [[ $verbose -eq 1 ]]; then
restore_stderr_stdout
fi
| true
|
2d6eb5d5048c7a8121aa679901857ad05cb1d42f
|
Shell
|
tgrahamcodes/CS4513_proj1
|
/tests/test_mount.sh
|
UTF-8
| 2,936
| 3.453125
| 3
|
[] |
no_license
|
#!/bin/bash
FSBIN="./solution/test_goatfs"
# begin: expected output
mount-output() {
cat <<EOF
disk mounted.
2 disk block reads
0 disk block writes
EOF
}
mount-mount-output() {
cat <<EOF
disk mounted.
mount failed!
2 disk block reads
0 disk block writes
EOF
}
mount-format-output() {
cat <<EOF
disk mounted.
format failed!
2 disk block reads
0 disk block writes
EOF
}
# end
test-mount () {
TEST=$1
DISK=$2
echo -n "Testing $TEST on $DISK ... "
if diff -u <($FSBIN $DISK 5 $TEST 2> /dev/null) <($TEST-output) > test.log; then
echo -e "\e[32m passed\e[0m"
else
echo -e "\e[31m failed\e[0m"
cat test.log
fi
rm -f test.log
}
# test mount on image.5
test-mount mount data/image.5
test-mount mount-mount data/image.5
test-mount mount-format data/image.5
SCRATCH=$(mktemp -d)
# deleting after the test
trap "rm -fr $SCRATCH" INT QUIT TERM EXIT
# expected output for bad mount!
bad-mount-output() {
cat <<EOF
mount failed!
1 disk block reads
0 disk block writes
EOF
}
# test case: bad magic number
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x10 0x34 0xf1 0xf0) > $SCRATCH/image.5
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x05 0x00 0x00 0x00) >> $SCRATCH/image.5
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x01 0x00 0x00 0x00) >> $SCRATCH/image.5
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x80 0x00 0x00 0x00) >> $SCRATCH/image.5
echo -n "Testing bad-mount on $SCRATCH/image.5 ... "
test-mount bad-mount $SCRATCH/image.5
# test case: not enough inodes!
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x10 0x34 0xf0 0xf0) > $SCRATCH/image.5
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x00 0x00 0x00 0x00) >> $SCRATCH/image.5
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x01 0x00 0x00 0x00) >> $SCRATCH/image.5
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x80 0x00 0x00 0x00) >> $SCRATCH/image.5
echo -n "Testing bad-mount on $SCRATCH/image.5 ... "
test-mount bad-mount $SCRATCH/image.5
# ...error: inodes count does not match (256), should be (128)
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x10 0x34 0xf0 0xf0) > $SCRATCH/image.5
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x05 0x00 0x00 0x00) >> $SCRATCH/image.5
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x02 0x00 0x00 0x00) >> $SCRATCH/image.5
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x80 0x00 0x00 0x00) >> $SCRATCH/image.5
echo -n "Testing bad-mount on $SCRATCH/image.5 ... "
test-mount bad-mount $SCRATCH/image.5
#...error: inodes count does not match (112), should be (128)
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x10 0x34 0xf0 0xf0) > $SCRATCH/image.5
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x05 0x00 0x00 0x00) >> $SCRATCH/image.5
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x01 0x00 0x00 0x00) >> $SCRATCH/image.5
echo -n -e $(printf '\\x%x\\x%x\\x%x\\x%x' 0x70 0x00 0x00 0x00) >> $SCRATCH/image.5
echo -n "Testing bad-mount on $SCRATCH/image.5 ... "
test-mount bad-mount $SCRATCH/image.5
| true
|
16292b31f18ec38652ee0c4fa6833dca8171e8c1
|
Shell
|
korkmazkadir/sysproj-team-e
|
/code/test/integration-test/make_test.sh
|
UTF-8
| 4,304
| 3.265625
| 3
|
[
"MIT-Modern-Variant"
] |
permissive
|
#!/bin/bash
#Colors
GREEN='\033[0;32m'
RED='\033[0;31m'
NC='\033[0m' # No Color
EXECUTABLE="../../build/nachos-step6 -f -cp"
EXECUTABLE_RAND="../../build/nachos-step6 -rs -f -cp"
#EXECUTABLE_RAND_SEED="../../build/nachos-step6 -rs 18 -x"
nbFails=0
Check_Result(){
if [ "$?" -eq "0" ]
then
printf "${GREEN}OK\n\n"
else
printf "${RED}KO\n\n"
nbFails=$(($nbFails + 1))
fi
}
printf "\n\n---- Testing ----"
printf "${NC}\n\n### STEP 2 ###\n"
#----------------------------------------------------------------------------
printf "${NC}>> stdio input test\n"
${EXECUTABLE} ../../build/test_stdio_input prog -x prog < ./io/input_test_stdio
Check_Result
printf "${NC}\n\n### STEP 3 ###\n"
#----------------------------------------------------------------------------
printf "${RED}"
printf "${NC}>> Semaphore test (takes time to execute) ${RED}\n"
${EXECUTABLE} ../../build/testsemaphores prog -x prog -rs 15
Check_Result
#----------------------------------------------------------------------------
printf "${NC}>> manyThreads1 (max nb threads) test ${RED}\n"
${EXECUTABLE} ../../build/manyThreads1 prog -rs -x prog
Check_Result
#----------------------------------------------------------------------------
printf "${NC}>> manyThreads2 (memory limitations) test ${RED}\n"
${EXECUTABLE} ../../build/manyThreads2 prog -rs -x prog
Check_Result
#----------------------------------------------------------------------------
printf "${NC}>> nullFuncP test ${RED}\n"
${EXECUTABLE} ../../build/nullFuncP prog -rs -x prog
Check_Result
#----------------------------------------------------------------------------
printf "${NC}>> userThreadExit test ${RED}\n"
${EXECUTABLE} ../../build/userThreadExit prog -rs -x prog
Check_Result
printf "${NC}\n\n### STEP 4 ###\n"
#----------------------------------------------------------------------------
printf "${NC}>> Two sub-processes with userthreads - no joins${RED}\n"
${EXECUTABLE} ../../build/twoProcessesNoJoins prog -cp ../../build/userprog0 up0 -cp ../../build/userprog1 up1 -rs 2 -x prog >./io/twoProcessesNoJoinsOut
output=$(head -n -8 ./io/twoProcessesNoJoinsOut)
expected="abcdMachine halting!"
#diff <(echo "$output" ) <(echo "$expected")
outSize=${#output}
expSize=${#expected}
if [ $outSize == $expSize ]
then
printf "${GREEN}OK\n\n"
else
printf "output and expected strings do not match\n"
printf "${RED}KO\n\n"
nbFails=$(($nbFails + 1))
fi
printf "${NC}>> Two sub-processes with userthreads - first joins${RED}\n"
${EXECUTABLE} ../../build/twoProcessesFirstOneJoins tpf -cp ../../build/userprog0join up0j -cp ../../build/userprog1 up1 -rs 1 -x tpf >./io/twoProcessesFirstOneJoinsOut
output=$(head -n -8 ./io/twoProcessesFirstOneJoinsOut)
expected="abcdMachine halting!"
#diff <(echo "$output" ) <(echo "$expected")
outSize=${#output}
expSize=${#expected}
if [ $outSize == $expSize ]
then
printf "${GREEN}OK\n\n"
else
printf "output and expected strings do not match\n"
printf "${RED}KO\n\n"
nbFails=$(($nbFails + 1))
fi
#----------------------------------------------------------------------------
printf "${NC}>> Two sub-processes with userthreads - second joins${RED}\n"
${EXECUTABLE} ../../build/twoProcessesSecondOneJoins prog -cp ../../build/userprog0 up0 -cp ../../build/userprog1join up1j -rs 1 -x prog >./io/twoProcessesSecondOneJoinsOut
output=$(head -n -8 ./io/twoProcessesSecondOneJoinsOut)
expected="abcdMachine halting!"
#diff <(echo "$output" ) <(echo "$expected")
outSize=${#output}
expSize=${#expected}
if [ $outSize == $expSize ]
then
printf "${GREEN}OK\n\n"
else
printf "output and expected strings do not match\n"
printf "${RED}KO\n\n"
nbFails=$(($nbFails + 1))
fi
printf "${NC}\n\n### STEP 5 ###\n"
printf "${NC}>> File System test is working${RED}\n"
${EXECUTABLE} ../../build/filesys prog -x prog -rs 15
Check_Result
#----------------------------------------------------------------------------
printf "${NC}\n\n### STEP 6 ###\n"
python execute.py
Check_Result
#############################################
printf "${NC}\nTOTAL:\n"
if [ "$nbFails" -ne "0" ]
then
printf "${RED}KO $nbFails test(s) failed"
else
printf "${GREEN}OK all tests passed"
fi
printf "${NC}\n\n----End of Testing----\n\n"
| true
|
1d8776110620892c75967c9c134e17896f32ede9
|
Shell
|
wcybxzj/ulx
|
/ule_zhang/0.homework/7.vnc.sh
|
UTF-8
| 242
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/bash
for i in {2..254}
do
{
ip=172.16.80.$i
ping -c1 -W1 $ip &>/dev/null
if [ $? -eq 0 ];then
nmap $ip|grep vnc &>/dev/null
if [ $? -eq 0 ];then
echo "==========="
echo $ip vnc
fi
fi
}&
done
wait
echo "finish...."
| true
|
84f562abb476b511ae203abe4e5a14ea1b628592
|
Shell
|
hakatashi/esolang-box
|
/boxes/zig/script
|
UTF-8
| 186
| 2.578125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
infile=$(realpath "$1")
ln -sf "$infile" /tmp/code.zig
cd /tmp
~/zig-linux-x86_64-0.10.1/zig build-exe code.zig
cat - | /tmp/code
rm /tmp/code.zig code
rm -r /tmp/zig-cache
| true
|
5a519b285e120d1ddeebe39ca110650b0a6f485c
|
Shell
|
princebarpaga/BashPractice
|
/labtestpractice/findhidden.sh
|
UTF-8
| 875
| 3.75
| 4
|
[] |
no_license
|
#!/bin/bash
##https://askubuntu.com/questions/468901/how-to-show-only-hidden-files-in-terminal
# Using find and awk,
#
# find . -type f | awk -F"/" '$NF ~ /^\..*$/ {print $NF}'
# Explanation:
#
# find . -type f --> List all the files in the current directory along with it's path like,
#
# ./foo.html
# ./bar.html
# ./.foo1
# awk -F"/" '$NF ~ /^\..*$/ {print $NF}'
#
# / as field separator awk checks for the last field staring with a dot or not. If it starts with a dot, then it prints the last field of that corresponding line.
find . -type f | awk -F"/" '$NF ~ /^\..*$/ {print $NF}'
# OLD CODE
# if [ -z $1 ]; then
# path=$(pwd)
# #echo "path is empty" $path
# else
# path=$1
# #echo "path is not empty" $path
# fi
# cd $path
# for prince in $( ls -a ); do
# if [ -f $prince ];then
# case $prince in
# .?*) echo $prince;;
# esac
# fi
# done
| true
|
e8b397f6ec783739f6b335ba157351e76f7c0af0
|
Shell
|
davidandreoletti/dotfiles
|
/.oh-my-shell/shellrc/plugins/docker/functions.sh
|
UTF-8
| 1,182
| 3.9375
| 4
|
[] |
no_license
|
f_docker_showContainersInNetwork() {
# https://stackoverflow.com/a/43904733
docker network inspect $1 -f "{{json .Containers }}"
}
f_docker_SSHIntoContainer() {
local containerName="$1"
local dockerContainerID="`docker ps -a | grep \"$containerName\" | cut -d ' ' -f 1`";
docker exec -it "$dockerContainerID" /bin/sh
}
f_docker_RemoveContainerByVolume() {
local volumeRegex="$1"
local volumeNames=`docker volume ls --quiet | grep "$volumeRegex"`
local countVolume=$(echo "$volumeNames" | wc -l)
if [ "$countVolume" -eq "1" ] && [ ! -z "$volumeNames" ];
then
# One volume found only
local containerIds=`docker ps -a --filter volume="$volumeNames" --format {{.ID}}`
local countContainerId=$(echo "$containerIds" | wc -l)
if [ "$countContainerId" -eq "1" ] && [ ! -z "$containerIds" ];
then
docker rm "$containerIds" -f --volumes
else
echo -e "Containers associated to volume $volumeNames: \n$containerIds \n\nA single container can be delted at once."
fi
else
echo -e "Volumes found: \n $volumeNames \n\n A single volume can be deleted at once."
fi
}
| true
|
a22ff92706e6e9664a74139a6f290c9efde3236c
|
Shell
|
ESTbunto/Projet-syst-me-d-exploition
|
/EST.sh.txt
|
ISO-8859-2
| 1,081
| 3.96875
| 4
|
[] |
no_license
|
#!/bin/bash
# constants
quiz_file=ESTqcm.txt
score=0
# Pour vrifier si le fichier de qcm existe
if [ ! -f $quiz_file ]
then
echo "Fichier QCM introuvable !"
exit 1
fi
# game loop (loop over quiz file line/line)
while read -u9 line
do
# analyse du fichier qcm pour la question en cours
question=`echo $line | cut -f1 -d' '`
choice1=`echo $line | cut -f2 -d' '`
choice2=`echo $line | cut -f3 -d' '`
choice3=`echo $line | cut -f4 -d' '`
solution=`echo $line | cut -f5 -d' '`
# ecrire question et choix
echo "$question?"
echo "- $choice1"
echo "- $choice2"
echo "- $choice3"
# Lire le choix
read -p "- Votre rsponse: " player_choice
# Corriger et compter le score
if [ "$player_choice" == "$solution" ]
then
score=$(( ++score ))
echo "c'est correcte (^_^)"
else
echo "fausse rponse (-_-). La correcte est $solution"
fi
echo
done 9< $quiz_file
# afficher le score finale
echo "Votre score est $score/`wc -l $quiz_file`"
| true
|
b27993a9be6d5105814c5cba497526c5af7269dc
|
Shell
|
comentarismo/adex-adview
|
/addToAdexIpfs.sh
|
UTF-8
| 346
| 2.53125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
USERNAME=$1
HOSTNAME=$2
SCRIPT='/home/data/adex-adview/addAndPinIpfs.sh'
# npm run-script build
rsync -av ./dist ${USERNAME}@${HOSTNAME}:/home/data/adex-adview
rsync -av --chmod=Du=rwx,Dg=rx,Do=rx,Fu=rwx,Fg=r,Fo=r ./addAndPinIpfs.sh ${USERNAME}@${HOSTNAME}:/home/data/adex-adview
ssh -l ${USERNAME} ${HOSTNAME} "${SCRIPT}"
| true
|
254a6f103af1b0135374f72ea5acbb3eeef8db48
|
Shell
|
guillaume-ggo/shell-scripts
|
/bash/tokenisation_with_sed.sh
|
UTF-8
| 706
| 3.921875
| 4
|
[] |
no_license
|
#!/bin/bash
TARGET_ENV=$1
if [ -z "${TARGET_ENV}" ]; then
echo "The TARGET_ENV variable is empty."
exit 1
fi
if [ ! -f ${TARGET_ENV}.properties ]; then
echo "The ${TARGET_ENV}.properties file does not exist."
exit 1
fi
# We have to export variables to be used with envsubst command
set -a
source ${TARGET_ENV}.properties
# Another way to do it with sed command with the GNU extension enabled
sed -nr 's/^(.*)$/echo "\1"/ep' < technical.properties.template > technical.properties
source technical.properties
if [ "${TARGET_HOST}" = "${HOST_NAME}" ]; then
echo "Variable substitution is ok."
else
echo "TARGET_HOST=${TARGET_HOST} and HOST_NAME=${HOST_NAME}: different value"
exit 1
fi
exit 0
| true
|
817118cac956bcdb48ffbf68ee10128646ff09c3
|
Shell
|
boundless-exchange/web
|
/scripts/test:style
|
UTF-8
| 246
| 2.921875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
set -e
source ./scripts/include/node
FILES=("${@}")
if [[ "${#FILES[@]}" = "0" ]]; then
FILES+=($(find config -name "*.js"))
FILES+=($(find src -name "*.js"))
FILES+=($(find src -name "*.jsx"))
fi
eslint "${FILES[@]}"
| true
|
189d533edd1b52eb8f105ab363f61d023094afa8
|
Shell
|
TaichiN/kernel_build
|
/crespo/make-ramdisk.sh
|
UTF-8
| 437
| 2.984375
| 3
|
[] |
no_license
|
#!/bin/bash
cp -r $BUILD_ROOT_DIR/$DEVICE/ramdisk $TARGET_DIR/ramdisk
[ $? -ne 0 ] && echo "Error: failed to copy ramdisk." && exit 1
cd $TARGET_DIR/ramdisk
# make sure all directories are created because git doesn't save empty directories
mkdir -p data dev proc sbin sys system
chmod 750 init*
chmod 750 sbin/adbd
chmod 644 default.prop
chmod 640 fstab.$DEVICE
chmod 644 ueventd*
find . | cpio -o -H newc | gzip > ../ramdisk.cpio.gz
| true
|
9c343641a1a6f33265a721c9c244f6719e3896e0
|
Shell
|
leimark/fibonacci
|
/scripts/fibonaccid
|
UTF-8
| 739
| 3.640625
| 4
|
[] |
no_license
|
#!/bin/bash
# chkconfig:- 80 80
# Init file for Fibonacci service daemon
#
# Description: Fibonacci service deamon
#
# processname: fibonaccid
servicename="fibonacci"
prog="/usr/local/bin/fibonacci"
log="/var/log/fibonacci.log"
start() {
nohup $prog >> $log 2>&1 &
}
stop() {
ps -ef |grep $prog |grep -v "grep" |awk '{print $2}'|xargs kill -9
#pgrep $prog |xargs kill -s 9
}
status() {
ps -ef |grep $prog |grep -v "grep"
if [ $? != 0 ]; then
echo "Fibonacci service is stopped. "
else
echo "Fibonacci service is running. "
fi
}
case "$1" in
start)
start
;;
stop)
stop
;;
reload|restart)
stop
start
;;
status)
status $SNAME
;;
*)
echo $"Usage: $0 {start|stop|restart|status}"
exit 1
esac
| true
|
d41b494909438abb7eed0a537cd53213c35afc34
|
Shell
|
rtrouton/rtrouton_scripts
|
/rtrouton_scripts/check_apfs_encryption/check_apfs_encryption.sh
|
UTF-8
| 2,079
| 4.25
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Determine OS version
# Save current IFS state
OLDIFS=$IFS
IFS='.' read osvers_major osvers_minor osvers_dot_version <<< "$(/usr/bin/sw_vers -productVersion)"
# restore IFS to previous state
IFS=$OLDIFS
ERROR=0
# Checks to see if the OS on the Mac is 10.13 or higher.
# If it is not, the following message is displayed without quotes:
#
# "APFS Encryption Not Available For This Version Of macOS"
if [[ ( ${osvers_major} -eq 10 && ${osvers_minor} -lt 13 ) ]]; then
echo "APFS Encryption Not Available For This Version Of macOS"
fi
if [[ ( ${osvers_major} -eq 10 && ${osvers_minor} -ge 13 ) || ( ${osvers_major} -eq 11 ) ]]; then
# If the OS on the Mac is 10.13 or higher, check to see if the
# boot drive is formatted with APFS or HFS+
boot_filesystem_check=$(/usr/sbin/diskutil info / | awk '/Type \(Bundle\)/ {print $3}')
# If the drive is formatted with APFS, the fdesetup tool will
# be available and is able to display the encryption status.
if [[ "$boot_filesystem_check" = "apfs" ]]; then
# If encrypted, the following message is
# displayed without quotes:
# "FileVault is On."
#
# If encrypting, the following message is
# displayed without quotes:
# "Encryption in progress:"
# How much has been encrypted of of the total
# amount of space is also displayed.
#
# If decrypting, the following message is
# displayed without quotes:
# "Decryption in progress:"
# How much has been decrypted of of the total
# amount of space is also displayed
#
# If not encrypted, the following message is
# displayed without quotes:
# "FileVault is Off."
ENCRYPTSTATUS=$(fdesetup status | xargs)
if [[ -z $(echo "$ENCRYPTSTATUS" | awk '/Encryption | Decryption/') ]]; then
ENCRYPTSTATUS=$(fdesetup status | head -1)
echo "$ENCRYPTSTATUS"
else
ENCRYPTSTATUS=$(fdesetup status | tail -1)
echo "$ENCRYPTSTATUS"
fi
else
echo "Unable to display encryption status for filesystems other than APFS."
fi
fi
exit $ERROR
| true
|
6a9178fadacd1dc18dba41d36cd280a65abc7729
|
Shell
|
Otus-DevOps-2018-02/wildermesser_infra
|
/config-scripts/install_mongodb.sh
|
UTF-8
| 631
| 3.375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
echo "Setting mongodb repo"
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv EA312927
sudo bash -c 'echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.2 multiverse" > /etc/apt/sources.list.d/mongodb-org-3.2.list'
echo "Installing mongodb"
sudo apt update
sudo apt install -y mongodb-org
echo "Enabling and starting mongodb"
sudo systemctl start mongod
sudo systemctl enable mongod
mongodb_status=`sudo systemctl | grep mongod | awk {'print $4'}`
if [ $mongodb_status='running' ]
then
echo "Mongodb is running"
else
echo "Mongodb is not running, check systemctl status mongod"
fi
| true
|
c9fda3e44d2ac8fcad47e1770c5dd797141d0f64
|
Shell
|
rgbyk/color
|
/bin/lib/kill.sh
|
UTF-8
| 562
| 2.734375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
#######################################################################
## @rgbyk/color/bin/lib/kill.sh
#######################################################################
# 1. $ chmod +x ./bin/lib/kill.sh
# 2. $ ./bin/lib/kill.sh
# 3. $ "kill": "chmod +x ./bin/lib/kill.sh && ./bin/lib/kill.sh"
set -e
. ./bin/lib/config.sh
echo $white_ "... $ kill.sh;" $reset
ps aux | grep nodemon | awk '{print $2}' | xargs kill -9 &>/dev/null &
ps aux | grep jekyll | awk '{print $2}' | xargs kill -9 &>/dev/null &
echo $green_ "... $ kill.sh;\n" $reset
| true
|
d683c8d67797e022fe4cc51b4beebf006e94e47c
|
Shell
|
michalcza/scripting
|
/Automation/syncUSB.sh
|
UTF-8
| 1,403
| 3.625
| 4
|
[] |
no_license
|
#!/bin/sh
###################################################################
#Script Name :syncUSB.sh
#Description :Rsync between two folders when USB mounted
#Created :22 November 2018
#Args :
#Author :Michal Czarnecki
#Email :mczarnecki@gmail.com
#GitHub :http://www.github.com/michalcza
###################################################################
#To Do :Execute on mount
###################################################################
#Status :Development
###################################################################
#
# Set exit code to assume bad exit
exitcode=1
# Declare variables
USB="/Volumes/ESD-USB"
devices="diskutil list | grep -i external"
local="/Desktop/USBSync"
sync="rsync -a --inplace"
location="$HOME${local}"
exclude1=".Spotlight-V100"
exclude2=".Trashes"
exclude3=".fseventsd"
# Test if local and remote folders/devices exist.
if test -e $USB && test -e $location
then
echo "Tested for $location, found it."
echo "Tested for $USB, found it."
echo "Continuing with code 0"
# Rsync USB>LOCAL and if success, Rsync LOCAL>USB
$sync $location $USB && $sync $USB $location && echo "Rsync between $USB and $location has completed."
exitcode=0
else
echo "Tested for $location, didn't find it."
echo "Tested for $USB, didn't find it"
echo "Exiting with code 1"
exitcode=1
fi
#
exitcode=0
| true
|
bf9f353d6b74c2836f9b8f75ed5c7dc54a2752de
|
Shell
|
danzimm/introcs-drunken-octo-bear
|
/init.sh
|
UTF-8
| 278
| 2.703125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
CDIR=`pwd`
if [ ! -e "hmwk.scala" ]; then
echo "You must be in the base directory of the repo in order to run this"
fi
git submodule update --init --recursive
ln -s datafiles/*.data .
ln -s datafiles/students_*.txt .
ln -s datafiles/categories_*.txt .
| true
|
32a23b24c473758956addb9e1f588b11d0926fcb
|
Shell
|
IshtarGate/dockerScripts
|
/intialize-docker.sh
|
UTF-8
| 1,747
| 2.953125
| 3
|
[] |
no_license
|
## make sure your EC2 instance has the correct security group and is open on the connecting port
# ------ Typical ~/.ssh/config ------
## Personal
# Host jahdocker #my docker dev
# Hostname 34.195.153.103
# User ubuntu
# IdentityFile ~/.ssh/personal/jah-docker.pem
# ------ Install Docker Non-Sudo ------
## login
ssh jahdocker
## Install Docker
sudo apt update
sudo apt install apt-transport-https ca-certificates
sudo apt-key adv \
--keyserver hkp://ha.pool.sks-keyservers.net:80 \
--recv-keys 58118E89F3A912897C070ADBF76221572C52609D
# uname -r
# lsb_release -rs
# add docker-engine to ppa sources
echo "deb https://apt.dockerproject.org/repo ubuntu-xenial main" | sudo tee /etc/apt/sources.list.d/docker.list
sudo apt update
sudo apt-get install docker-engine
sudo service docker start
# Uncomment to test docker
# sudo docker run hello-world
## make docker not require "sudo"
sudo groupadd docker
sudo gpasswd -a $(whoami) docker
sudo service docker restart
# log out to get ready for next ssh
exit
# ------ ------
# ------ Set Up Docker and Jenkins Master ------
# log in
ssh jahdocker
# install aws cli
sudo apt install awscli
# docker create data volume
docker volume create --name jenkins_home
# as a test to check the contents or modify jenkins_home
# PROOVE THIS CODE!!!
aws s3 cp s3://dephinitive-backups/jenkins/FULL----- /tmp/backup/
tar -xzf /var/lib/docker/volumes/dataVolumeTest/_data/ /tmp/backup/FULL----
# start up jenkins as master with a
docker volume create --name jenkins_home
docker run -p 8080:8080 --name jenkins-master -p 50000:50000 -d -v jenkins_home:/var/jenkins_home jenkins
docker cp jenkins-master:/var/jenkins_home/secrets/initialAdminPassword /tmp/initialAdminPassword
cat /tmp/initialAdminPassword
# ------ ------
| true
|
1b431944d1b54822c6802779effeaa41f5cca08e
|
Shell
|
mosbth/dbwebb-cli
|
/dbwebb2-validate.bash
|
UTF-8
| 18,226
| 2.734375
| 3
|
[
"MIT"
] |
permissive
|
# --------------- DBWEBB-VALIDATE MAIN START HERE ---------------
#
# External tools
#
#HTMLHINT="dbwebb-htmlhint"
HTMLHINT="htmlhint"
HTMLHINT_OPTIONS=
HTMLHINT_CONFIG=
CSSLINT="csslint"
CSSLINT_OPTIONS="--quiet"
STYLELINT="stylelint"
STYLELINT_OPTIONS=""
SCSSLINT="stylelint"
SCSSLINT_OPTIONS=""
JSHINT="jshint"
ESLINT="eslint"
JSCS="jscs"
JSCS_OPTIONS="--verbose"
JSONLINT="jsonlint"
JSONLINT_OPTIONS="--quiet"
HTML_MINIFIER="html-minifier"
CLEANCSS="cleancss"
CLEANCSS_OPTIONS="--inline none"
UGLIFYJS="uglifyjs"
UGLIFYJS_OPTIONS="--mangle --compress --screw-ie8 --comments"
PYLINT="pylint"
PYLINT_OPTIONS="-r n -s n"
PHP="php"
PHP_OPTIONS="--syntax-check"
PHPMD="phpmd"
PHPMD_OPTIONS="text"
PHPCS="phpcs"
PHPCS_OPTIONS=""
FILE_CRLF="file"
FILE_CRLF_OPTIONS=""
FILE_BOM="file"
FILE_BOM_OPTIONS=""
PHPMINIFY="php"
PHPMINIFY_OPTIONS="--strip"
CHECKBASH="shellcheck"
CHECKBASH_OPTIONS="--shell=bash --exclude=SC2002,SC1091"
CHECKSH="shellcheck"
CHECKSH_OPTIONS="--shell=sh --exclude=SC2002,SC1091"
#YAML="dbwebb-js-yaml"
#YAML_OPTIONS="--silent"
YAML="js-yaml"
YAML_OPTIONS=""
# Exclude these paths/filenames from tools processing
#EXCLUDE_PATHS='\*/webgl/\* \*/libs/\* \*/lib/\* \*/node_modules/\*'
EXCLUDE_PATHS='\*/example/webgl/\* \*/libs/\* \*/lib/\* \*/node_modules/\* \*/platforms/\* \*/plugins/\* \*/docs/api/\* \*/vendor/\* \*/3pp/\* \*/example/lekplats/\* \*/css/anax-grid/\* \*/me/anax-flat/\* \*/cache/\* \*/build/\* \*/.git/\* \*/slide/\*'
EXCLUDE_FILES='phpliteadmin\* \*.min.\* \*.tpl.php font-awesome.css lessc.inc.php'
INCLUDE_PATHS='' #'\*/platforms/browser/www/\*'
#EXCLUDE_PATHS="*/example/webgl/* */libs/* */lib/* */node_modules/* */platforms/* */plugins/* */docs/api/* */vendor/* */3pp/* */example/lekplats/* */css/anax-grid/* */me/anax-flat/* */cache/* */build/* */.git/* */slide/*"
#EXCLUDE_FILES="phpliteadmin* *.min.* *.tpl.php font-awesome.css lessc.inc.php"
#
# Set default configurations
#
function setDefaultConfigFiles()
{
if [[ $DBW_COURSE_DIR ]]; then
if [ -f "$DBW_COURSE_DIR/.htmlhintrc" ]; then
HTMLHINT_CONFIG="--config '$DBW_COURSE_DIR/.htmlhintrc'"
fi
HTML_MINIFIER_CONFIG="--config-file '$DBW_COURSE_DIR/.html-minifier.conf'"
PYLINT_CONFIG="--rcfile '$DBW_COURSE_DIR/.pylintrc'"
PHPMD_CONFIG="'$DBW_COURSE_DIR/.phpmd.xml'"
PHPCS_CONFIG="--standard='$DBW_COURSE_DIR/.phpcs.xml'"
if [ -f "$DBW_COURSE_DIR/.csslintrc" ]; then
CSSLINT_CONFIG="$DBW_COURSE_DIR/.csslintrc"
else
CSSLINT_CONFIG="/dev/null"
fi
JSCS_CONFIG="--config=$DBW_COURSE_DIR/.jscsrc"
if [ ! -f "$DBW_COURSE_DIR/.eslintrc.json" ]; then
DISABLE_ESLINT=true
fi
if [ ! -f "$DBW_COURSE_DIR/.stylelintrc.json" ]; then
DISABLE_STYLELINT=true
DISABLE_SCSSLINT=true
fi
else
DISABLE_ESLINT=true
PHPMD_CONFIG="cleancode,codesize,controversial,design,naming,unusedcode"
fi
}
#
# Check for installed tools
#
function checkInstalledValidateTools
{
printf "Check for dbwebb tools.\n"
printf " dbwebb: %s\n" "$( checkCommandWithVersion dbwebb "--version" "| cut -d ' ' -f 3" )"
printf " dbwebb-validate: %s\n" "$( checkCommandWithVersion dbwebb-validate "--version" "| cut -d ' ' -f 3" )"
printf " dbwebb-inspect: %s\n" "$( checkCommandWithVersion dbwebb-inspect "--version" "| cut -d ' ' -f 3" )"
printf "Check for installed validation tools.\n"
printf " htmlhint: %s\n" "$( checkCommandWithVersion $HTMLHINT "--version" )"
printf " csslint: %s\n" "$( checkCommandWithVersion $CSSLINT "--version" )"
printf " stylelint: %s\n" "$( checkCommandWithVersion $STYLELINT "--version" )"
printf " SCSSLINT: %s\n" "$( checkCommandWithVersion $SCSSLINT "--version" )"
printf " jshint: %s\n" "$( checkCommandWithVersion $JSHINT "--version" "2>&1 | cut -d ' ' -f 2" )"
printf " eslint: %s\n" "$( checkCommandWithVersion $ESLINT "--version" )"
printf " jscs: %s\n" "$( checkCommandWithVersion $JSCS "--version" )"
printf " jsonlint: %s\n" "$( checkCommandWithVersion $JSONLINT "" )"
printf " pylint: %s\n" "$( checkCommandWithVersion $PYLINT "--version" "| head -1 | cut -d ' ' -f 2" )"
printf " php: %s\n" "$( checkCommandWithVersion $PHP "--version" "| head -1 | cut -d ' ' -f 2" )"
printf " phpmd: %s\n" "$( checkCommandWithVersion $PHPMD "--version" "| cut -d ' ' -f 2" )"
printf " phpcs: %s\n" "$( checkCommandWithVersion $PHPCS "--version" "| cut -d ' ' -f 3" )"
printf " bash: %s\n" "$( checkCommandWithVersion $CHECKBASH "--version" "| head -2 | tail -1 | cut -d ' ' -f 2" )"
printf " sh: %s\n" "$( checkCommandWithVersion $CHECKSH "--version" "| head -2 | tail -1 | cut -d ' ' -f 2" )"
printf " yaml: %s\n" "$( checkCommandWithVersion $YAML "--version" )"
printf " file CRLF: %s\n" "$( checkCommandWithVersion $FILE_CRLF "--version" "| head -1" )"
printf " file BOM: %s\n" "$( checkCommandWithVersion $FILE_BOM "--version" "| head -1" )"
printf "Check for installed publishing tools.\n"
printf " html-minifier: %s\n" "$( checkCommandWithVersion $HTML_MINIFIER "--version" "| cut -d ' ' -f 2" )"
printf " cleancss: %s\n" "$( checkCommandWithVersion $CLEANCSS "--version" )"
printf " uglifyjs: %s\n" "$( checkCommandWithVersion $UGLIFYJS "--version" "| cut -d ' ' -f 2" )"
printf " phpminify: %s\n" "$( checkCommandWithVersion $PHPMINIFY "--version" "| head -1 | cut -d ' ' -f 2" )"
printf "Check for other tools.\n"
printf " node: %s\n" "$( checkCommandWithVersion "node" "--version" )"
printf " npm: %s\n" "$( checkCommandWithVersion "npm" "--version" )"
printf " babel: %s\n" "$( checkCommandWithVersion "babel" "--version" " | cut -d ' ' -f 1" )"
printf " babel-node: %s\n" "$( checkCommandWithVersion "babel-node" "--version" )"
printf " python3: %s\n" "$( checkCommandWithVersion "python3" "--version" " | cut -d ' ' -f 2" )"
printf " pip3: %s\n" "$( checkCommandWithVersion "pip3" "--version" " | cut -d ' ' -f 2" )"
printf " docker: %s\n" "$( checkCommandWithVersion "docker" "--version" " | cut -d ' ' -f 3" )"
printf " docker-compose: %s\n" "$( checkCommandWithVersion "docker-compose" "--version" " | cut -d ' ' -f 3" )"
}
#
# Create a find expression for validate and publish
#
function getFindExpression
{
local dir="$1"
local extension="$2"
local includeExclude
local exclude
local findExtension=
if [ -f "$DBW_COURSE_DIR/.dbwebb/validate.exclude" ]; then
#includeExclude="$( grep -v "^#" "$DBW_COURSE_DIR/.dbwebb-validate.exclude" | sed "s/^-\(.*\)/-o -not -path \"\1\"/g" | sed "s/^+\(.*\)/-o -path \"\1\"/g" | tr "\n" " " )"
includeExclude="$( grep -v "^#" "$DBW_COURSE_DIR/.dbwebb/validate.exclude" | grep -v "^--" | sed "s/^-\(.*\)/-not -path \"\1\"/g" | sed "s/^+\(.*\)/-o -path \"\1\"/g" | tr "\n" " " )"
includeExclude="$( sed -e 's/[[:space:]]*$//' <<<${includeExclude} )"
if [ ! -z "$includeExclude" ]; then
includeExclude="\( $includeExclude \)"
fi
exclude="$( grep "^--" "$DBW_COURSE_DIR/.dbwebb/validate.exclude" | sed "s/^--\(.*\)/-not -path \"\1\"/g" | tr "\n" " " )"
exclude="$( sed -e 's/[[:space:]]*$//' <<<${exclude} )"
if [ ! -z "$exclude" ]; then
exclude="\( $exclude \)"
fi
else
# Hardcoded include exclude expressions
includeExclude="$( printf " -not -path %s " $( echo $EXCLUDE_PATHS ) )"
includeExclude="$includeExclude $( printf " -not -name %s " $( echo $EXCLUDE_FILES ) )"
#includePaths=$( printf " -path %s " $( echo $INCLUDE_PATHS ) )
fi
if [ ! -z "$extension" ]; then
findExtension="-name \"*.$extension\""
fi
echo "find \"$dir/\" $includeExclude -type f $findExtension" "$exclude"
}
#
# Perform validation tests
#
function validateCommand()
{
local dir="$1"
local cmd="$2"
local extension="$3"
local options="$4"
local output="$5"
local onlyExitStatus="$6"
local counter=0
local findExpression=
if hash "$cmd" 2>/dev/null; then
printf "\n *.$extension using $cmd"
# If within course repo, use relative links in find
if [[ $DBW_COURSE_DIR ]]; then
pushd "$DBW_COURSE_DIR" > /dev/null
dir=".${dir#$DBW_COURSE_DIR}"
fi
findExpression="$( getFindExpression "$dir" "$extension" )"
[[ $optDryRun ]] && printf "\n%s" "$findExpression"
OIFS="$IFS"
IFS=$'\n'
for filename in $( eval $findExpression ); do
if [[ $optDryRun ]]; then
printf "\n%s" "$cmd $options '$filename' $output"
else
if [ -z $optOnly ]; then
assert 0 "$cmd $options '$filename' $output" "$cmd failed: '$filename'" "$onlyExitStatus"
elif [ "$extension" == "$optOnly" ]; then
assert 0 "$cmd $options '$filename' $output" "$cmd failed: '$filename'" "$onlyExitStatus"
fi
fi
counter=$(( counter + 1 ))
printf "."
done
IFS="$OIFS"
[[ $DBW_COURSE_DIR ]] && popd &> /dev/null
printf " ($counter)"
else
printf "\n *.$extension (skipping - $cmd not installed)"
fi
}
#
# Perform validation tests
#
function validate()
{
local dir="$1"
[[ $ENABLE_ALL || ! $DISABLE_HTMLHINT ]] && validateCommand "$dir" "$HTMLHINT" "html" "$HTMLHINT_OPTIONS $HTMLHINT_CONFIG" '| grep -v "No problem." | grep -v "Config loaded." | grep -v "Scan " | grep -v "Scanned "; test ${PIPESTATUS[0]} -eq 0'
[[ $ENABLE_ALL || ! $DISABLE_CSSLINT ]] && validateCommand "$dir" "$CSSLINT" "css" "$CSSLINT_OPTIONS $( cat "$CSSLINT_CONFIG" )"
[[ $ENABLE_ALL || ! $DISABLE_STYLELINT ]] && validateCommand "$dir" "$STYLELINT" "css" "$STYLELINT_OPTIONS" "" ""
[[ $ENABLE_ALL || ! $DISABLE_SCSSLINT ]] && validateCommand "$dir" "$SCSSLINT" "scss" "$SCSSLINT_OPTIONS" "" ""
[[ $ENABLE_ALL || ! $DISABLE_JSHINT ]] && validateCommand "$dir" "$JSHINT" "js"
[[ $ENABLE_ALL || ! $DISABLE_ESLINT ]] && validateCommand "$dir" "$ESLINT" "js"
[[ $ENABLE_ALL || ! $DISABLE_JSCS ]] && validateCommand "$dir" "$JSCS" "js" "$JSCS_OPTIONS $JSCS_CONFIG < /dev/null" "" "onlyExitStatus"
[[ $ENABLE_ALL || ! $DISABLE_JSONLINT ]] && validateCommand "$dir" "$JSONLINT" "json" "$JSONLINT_OPTIONS" "" ""
#validateCommand "$dir" "$JSCS" "js" "$JSCS_OPTIONS $JSCS_CONFIG" ""
[[ $ENABLE_ALL || ! $DISABLE_PYLINT ]] && validateCommand "$dir" "$PYLINT" "py" "$PYLINT_OPTIONS $PYLINT_CONFIG" '|& grep -v "Using config file"; test ${PIPESTATUS[0]} -eq 0'
[[ $ENABLE_ALL || ! $DISABLE_PYLINT ]] && validateCommand "$dir" "$PYLINT" "cgi" "$PYLINT_OPTIONS $PYLINT_CONFIG" '|& grep -v "Using config file"; test ${PIPESTATUS[0]} -eq 0'
[[ $ENABLE_ALL || ! $DISABLE_PHP ]] && validateCommand "$dir" "$PHP" "php" "$PHP_OPTIONS" "> /dev/null"
[[ $ENABLE_ALL || ! $DISABLE_PHPMD ]] && validateCommand "$dir" "$PHPMD" "php" "" "$PHPMD_OPTIONS $PHPMD_CONFIG"
[[ $ENABLE_ALL || ! $DISABLE_PHPCS ]] && validateCommand "$dir" "$PHPCS" "php" "$PHPCS_OPTIONS $PHPCS_CONFIG"
[[ $ENABLE_ALL || ! $DISABLE_CHECKBASH ]] && validateCommand "$dir" "$CHECKBASH" "bash" "$CHECKBASH_OPTIONS"
[[ $ENABLE_ALL || ! $DISABLE_CHECKSH ]] && validateCommand "$dir" "$CHECKSH" "sh" "$CHECKSH_OPTIONS"
[[ $ENABLE_ALL || ! $DISABLE_YAML ]] && validateCommand "$dir" "$YAML" "yml" "$YAML_OPTIONS" "> /dev/null"
[[ $ENABLE_ALL || ! $DISABLE_FILE_CRLF ]] && validateCommand "$dir" "$FILE_CRLF" "" "$FILE_CRLF_OPTIONS" '| grep CRLF; test $? -eq 1' ""
[[ $ENABLE_ALL || ! $DISABLE_FILE_BOM ]] && validateCommand "$dir" "$FILE_BOM" "" "$FILE_BOM_OPTIONS" '| grep BOM; test $? -eq 1' ""
}
#
# Perform publish
#
function publishCommand()
{
local dir="$1"
local cmd="$2"
local extension="$3"
local options="$4"
local output="$5"
local counter=0
if hash "$cmd" 2>/dev/null; then
printf "\n *.$extension using $cmd"
# Find real path to cmd
set $cmd
local cmdPath="$( get_realpath "$( which $1 )" )"
# If within course repo, use relative links in find
if [[ $DBW_COURSE_DIR && $DBW_PUBLISH_ROOT ]]; then
pushd "$DBW_PUBLISH_ROOT" > /dev/null
[[ $optDryRun ]] && printf "\nCurrent dir: %s" "$(pwd)"
dir=".${dir#$DBW_PUBLISH_ROOT}"
fi
findExpression="$( getFindExpression "$dir" "$extension" )"
[[ $optDryRun ]] && printf "\n%s" "$findExpression"
for filename in $( eval $findExpression ); do
if [[ $optDryRun ]]; then
printf "\n%s" "$cmdPath $options $filename $output $filename"
else
assert 0 "$cmdPath $options $filename $output $filename" "$cmd failed: $filename"
fi
counter=$(( counter + 1 ))
printf "."
done
[[ $DBW_COURSE_DIR ]] && popd &> /dev/null
printf " ($counter)"
else
printf "\n *.$extension (skipping - $cmd not installed)"
fi
}
#
# Publish all
#
publish()
{
local from="$1"
local to="$2"
if [ -z "$from" ]; then
printf "\n$MSG_FAILED Publish with empty source directory: '$from'\n"
exit 2
elif [ ! -d "$from" ]; then
printf "\n$MSG_FAILED Publish without valid from directory: '$from'\n"
exit 2
elif [ -z "$to" ]; then
printf "\n$MSG_FAILED Publish with empty target directory: '$to'\n"
exit 2
elif [ ! -d $( dirname "$to" ) ]; then
printf "\n$MSG_FAILED Publish to nonexisting directory: '$to'\n"
exit 2
fi
if [[ $optDryRun ]]; then
printf "\nrsync -a $RSYNC_CHMOD --delete %s %s" "$from/" "$to/"
else
rsync -a $RSYNC_CHMOD --delete "$from/" "$to/"
fi
if [[ ! $noMinification ]]; then
[[ $ENABLE_ALL || ! $DISABLE_HTML_MINIFIER ]] && publishCommand "$to" "$HTML_MINIFIER" "html" "$HTML_MINIFIER_CONFIG $HTML_MINIFIER_OPTIONS" "--output"
[[ $ENABLE_ALL || ! $DISABLE_CLEANCSS ]] && publishCommand "$to" "$CLEANCSS" "css" "$CLEANCSS_OPTIONS" "-o"
[[ $ENABLE_ALL || ! $DISABLE_UGLIFYJS ]] && publishCommand "$to" "$UGLIFYJS" "js" "$UGLIFYJS_OPTIONS --output" "--"
[[ $ENABLE_ALL || ! $DISABLE_PHPMINIFY ]] && publishCommand "$to" "$PHPMINIFY" "php" "$PHPMINIFY_OPTIONS" "> /tmp/$$; mv /tmp/$$ "
fi
publishChmod "$to"
}
#
# Process options
#
DBW_PUBLISH_ROOT=
while (( $# ))
do
case "$1" in
--check | -c)
checkInstalledValidateTools
exit 0
;;
--publish | -p)
optPublish="yes"
shift
;;
--no-validate)
noValidate="yes"
shift
;;
--no-minification)
noMinification="yes"
shift
;;
--publish-root)
DBW_PUBLISH_ROOT="$( get_realpath "$2" )"
if [ ! -d $( dirname "$DBW_PUBLISH_ROOT" ) ]; then
badUsage "$MSG_FAILED --publish-root '$DBW_PUBLISH_ROOT' is not a valid directory."
exit 2
fi
shift
shift
;;
--publish-to)
DBW_PUBLISH_TO="$( get_realpath "$2" )"
if [ ! -d $( dirname "$DBW_PUBLISH_TO" ) ]; then
badUsage "$MSG_FAILED --publish-to '$DBW_PUBLISH_TO' is not a valid directory."
exit 2
fi
shift
shift
;;
--course-repo)
DBW_COURSE_DIR="$( get_realpath "$2" )"
if [ ! -d "$DBW_COURSE_DIR" ]; then
badUsage "$MSG_FAILED --course-repo '$DBW_COURSE_DIR' is not a valid directory."
exit 2
fi
# Get the name of the course as $DBW_COURSE
sourceCourseRepoFile
shift
shift
;;
--only)
optOnly="$2"
shift
shift
;;
--dry | -d)
optDryRun="yes"
shift
;;
--help | -h)
usage
exit 0
;;
--version | -v)
version
exit 0
;;
--selfupdate)
selfupdate dbwebb-validate
exit 0
;;
*)
if [[ $command ]]; then
badUsage "$MSG_FAILED Too many options/items and/or option not recognized."
exit 2
else
command=$1
fi
shift
;;
esac
done
#
# Validate (and publish) the path choosen
#
dir="$( getPathToDirectoryFor "$command" )"
dir="$( get_realpath "$dir" )"
if [ ! -d "$dir" ]; then
badUsage "$MSG_FAILED Directory '$command' is not a valid directory."
exit 2
fi
#
# Source validate config files
#
configFile=".dbwebb-validate.config"
[[ -f $DBW_VALIDATE_CONFIGFILE ]] && . "$DBW_VALIDATE_CONFIGFILE"
[[ -f $HOME/$configFile ]] && . "$HOME/$configFile"
[[ -f $DBW_COURSE_DIR/$configFile ]] && . "$DBW_COURSE_DIR/$configFile"
setDefaultConfigFiles
if [[ ! $noValidate ]]; then
printf "Validating '%s'." "$dir"
validate "$dir"
fi
if [[ $optPublish ]]; then
if [ -z "$DBW_PUBLISH_TO" ]; then
printf "\n$MSG_FAILED Missing target dir for publish, not supported.\n"
exit 2
fi
if [ ! -d $( dirname "$DBW_PUBLISH_TO" ) ]; then
printf "\n$MSG_FAILED Target dir for publish is not a valid directory '%s'.\n" "$DBW_PUBLISH_TO"
exit 2
fi
printf "\nPublishing to '%s' using root '%s'." "$DBW_PUBLISH_TO" "$DBW_PUBLISH_ROOT"
publish "$dir" "$DBW_PUBLISH_TO"
fi
assertResults
| true
|
0a28307bea8d645fe279b4fbea19c65b9c317197
|
Shell
|
russoz/homedir
|
/bin/git-sync-origin
|
UTF-8
| 1,195
| 3.984375
| 4
|
[] |
no_license
|
#!/bin/bash
#
# git-sync-origin
#
# local $alexei_znamensky, '<', 'russoz [AT] CPAN DOT ORG'
#
# This script will automate some steps for a developer working on a forked
# git repository:
#
# 1. Pull (and merge) changes from origin into master
# 2. Push the master branch (with the changes) into origin
#
# There is no dependency on github, but, as of now, the
# repository names are hard-coded as reasonably as::
#
# - origin: your forked repository
# - master: your local working copy
#
# usage: git-sync-origin
#
msg() {
echo "$@" >&2
}
die() {
msg "$@"
exit 1
}
o=origin
m=master
##############################################################################
url="$1"; shift
me=${0##.*/}
msg '=== Checking requisites'
git remote show | grep -q "$o" || {
die "*** FAILED There is no tracked repository named '$o'. Aborting!!!"
}
git branch | grep -q " $m\$" || {
die "*** FAILED There is no branch named '$m'. Aborting!!!"
}
msg "=== Pulling changes from remote repository $o into $m"
git pull "$o" "$m" || die "*** FAILED to pull from $o to $m"
msg "=== Pushing the merged tree into $o"
git push "$o" "$m" || die "*** FAILED to push changes into $o"
| true
|
72096670a48e8e1608b1091aadf8e3e3a79dd345
|
Shell
|
cncf/cnf-testbed
|
/comparison/box-by-box-kvm-docker/Pktgen/traffic_test.sh
|
UTF-8
| 289
| 2.984375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#! /bin/bash
iterations=1
nfvbench="sudo docker exec -it nfvbench nfvbench -c /tmp/nfvbench/nfvbench_config.cfg"
prefix="aug_21_test" # Change to something identifiable
for iter in $(seq 1 $iterations); do
$nfvbench --rate pdr_ndr | sudo tee -a results/${prefix}_iter_${iter}.log
done
| true
|
43ed297c2c04435817fa441db9ef1f4b5a803443
|
Shell
|
microscum/WebLogic_Notes
|
/practices/.internal/cleanenv.sh
|
UTF-8
| 3,671
| 3.1875
| 3
|
[] |
no_license
|
# ------------------------------------------------------------------------
# -- DISCLAIMER:
# -- This script is provided for educational purposes only. It is NOT
# -- supported by Oracle World Wide Technical Support.
# -- The script has been tested and appears to work as intended.
# -- You should always run new scripts on a test instance initially.
# --
# ------------------------------------------------------------------------
#This script cleans up the entire environment for all the WebLogic courses
#that are deployed to the module. It enforces execution on the host01 machine
#by only the oracle user
labs=/practices
bindir=/practices/.internal
$bindir/checkoracle.sh
$bindir/checkhost01.sh
##
## DO ALL CHANGES REGARDLESS OF HOST
##
############ GENERAL CLEANUP FOR ALL COURSES
echo "Cleaning General Common..."
#Delete setenv.sh line from .bashrc, remove part file, and remove server_migration from .bash_profile
$bindir/stripbashrc.sh
ssh host02 "$bindir/stripbashrc.sh"
#Forget Firefox logins
$bindir/firefoxforget.sh
ssh host02 "$bindir/firefoxforget.sh"
#Reset all database tables (for all courses)
ssh host02 "$bindir/cleanAllDB.sh"
#Set original OHS configuration (host01 only)
cp $bindir/mod_wl_ohs.conf.orig /u01/domains/ohs/config/fmwconfig/components/OHS/instances/ohs1/mod_wl_ohs.conf
#Strip all course bin paths out of PATH
export PATH=`echo $PATH | sed "s=/practices/part1/bin:==" | sed "s=/practices/part2/bin:==" | sed "s=/practices/jms/util:==" | sed "s=/practices/tune/bin:==" | sed "s=/practices/tshoot/bin:=="`
############ ADMIN I CLEANUP
echo "Cleaning Admin I..."
#Delete all -orig folders (/u01/app/fmw-orig, /u01/app/ohs-orig, jdk, etc) host01 only
rm -rf /u01/app/fmw-orig
rm -rf /u01/app/ohs-orig
rm -rf /u01/app/jdk-orig
#Delete Grinder from /home/oracle (both hosts)
rm -rf /home/oracle/grinder*
ssh host02 "rm -rf /home/oracle/grinder*"
#Remove benefits.com from /etc/hosts for virtual host lab
$bindir/stripbashrc.sh
ssh host02 "$bindir/stripbashrc.sh"
############ ADMIN II CLEANUP
echo "Cleaning Admin II..."
#Reset all labs and domain
/practices/part2/bin/cleanup.sh -all
#Database
#ssh host02 ". /home/oracle/.oraenv; cd /practices/part2/bin; ./deleteDatabase.sh"
ssh host02 ". /home/oracle/.oraenv; cd /practices/part2/bin; ./createDatabase.sh"
############ JMS CLEANUP
echo "Cleaning JMS..."
#Remove all domains that are created in course
rm -rf /u01/domains/jms/*
############ TUNE CLEANUP
echo "Cleaning Tune..."
#Reset all labs and domain
/practices/tune/bin/cleanup.sh -all
#Remove the setUserOverrides.sh script that cleanup.sh retains
rm -f /u01/domains/tune/wlsadmin/bin/setUserOverrides.sh
#Recreate and populate database for course
#Script knows how to do this from either host
ssh host02 ". /home/oracle/.oraenv; /practices/tune/bin/backup/BuildAuctionDatabase.sh"
#TBD: Difference between Admin II createDatabase.sh (servers online?) and BuildAuctionDatabase.sh (offline)?
#Create database uses sqlplus to create tables/triggers... build uses JPA offline to populate with data
############ TSHOOT CLEANUP
echo "Cleaning Tshoot..."
#Remove all domains that are created in course
rm -rf /u01/domains/tshoot/*
#Remove managed template jar file
rm -f /tmp/wlsadmin.jar
#Remove RDA output
rm -rf /u01/app/fmw/oracle_common/rda/output
rm -rf /u01/app/fmw/oracle_common/rda/RDA_output_host01.zip
#Remove practice06-01 files
rm -f /home/oracle/server1_threads.txt
#Remove practice10-1/2 files
#TBD: mod_wl_ohs.conf file?
rm -rf /practices/tshoot/practice10-01/grinderlog
rm -rf /practices/tshoot/practice10-02/grinderlog
| true
|
1fe79cdcb4e8cf7602aaa17b817d140195856bb7
|
Shell
|
scromityiii/PolyBandit
|
/level11.sh
|
UTF-8
| 3,029
| 3.59375
| 4
|
[] |
no_license
|
#!/bin/bash
echo "******************************************************************" >> /home/level11/README.txt;
echo "* Welcome to PolyBandit. This is a polymorphic clone of *" >> /home/level11/README.txt;
echo "* Overthewire.org's Bandit exercise. The object is to figure out *" >> /home/level11/README.txt;
echo "* what the password is for the next level, then log into that *" >> /home/level11/README.txt;
echo "* next level's account using SSH. *" >> /home/level11/README.txt;
echo "* *" >> /home/level11/README.txt;
echo "* You are at Level 11 *" >> /home/level11/README.txt;
echo "* *" >> /home/level11/README.txt;
echo "* The password for the next level is in a *hidden file* *" >> /home/level11/README.txt;
echo "* called inhere.txt *" >> /home/level11/README.txt;
echo "* *" >> /home/level11/README.txt;
echo "* When you get the password for the next level, log in to the *" >> /home/level11/README.txt;
echo "* next level with the command: *" >> /home/level11/README.txt;
echo "* ssh level12@localhost *" >> /home/level11/README.txt;
echo "* *" >> /home/level11/README.txt;
echo "******************************************************************" >> /home/level11/README.txt;
echo "cat /home/level11/README.txt" >> /home/level11/.bashrc
echo $level12_pass > /home/level11/.inhere.txt;
chown level11:level11 /home/level11/.inhere.txt;
#chown level0: /home/level1;
#chmod -rwxr-x--- /home/level1;
#This block prevents the host system's user (the one that executes PolyBandit) as well as anybody other than the level itself from being able to read into
#this level's directory and its subdirectories. In essence, no cheating, you must play the game in order, and you cannot tamper with any game files unless they are in
#the level you are currently in. The first for loop sets permissions on users before level11. The second sets them on all users after. None except level11 will be able to view the contents of /home/level11
#until they have ssh'd into it properly. Permissions are set to block others from reading and writing to level11.
setfacl -m u:level11:r-x /home/level11;
for i in {0..10};
do
levelname="level"
level="${levelname}${i}"
find /home/level11 -type f -exec setfacl -m u:$level:--x {} \;
find /home/level11 -type d -exec setfacl -m u:$level:--x {} \;
(($i+1));
done
for i in {12..101};
do
levelname="level"
level="${levelname}${i}"
find /home/level11 -type f -exec setfacl -m u:$level:--x {} \;
find /home/level11 -type d -exec setfacl -m u:$level:--x {} \;
(($i+1));
done
setfacl -m u:$USER:--x /home/level11;
| true
|
b5ca9226a89545a6954e7481ba1e310cb40b8724
|
Shell
|
vkonst/ether-connection-tests
|
/tcpdump/captureNetwTraf.sh
|
UTF-8
| 305
| 3.125
| 3
|
[] |
no_license
|
#!/bin/sh
[ -z "$1" ] && host="localhost:8545" || host="$1";
[ -z "$2" ] && fname="tcpdump-`date +%s`.lst" || fname="$2";
port=${host#*:*}
host=${host%%:*}
filter="host ${host}"
[ -z "$port" ] || filter="${filter} and port ${port}";
sudo tcpdump -s0 -U -n -w - -G 30 -W 1 -i en0 "${filter}" > ${fname}
| true
|
0fa47d5f47723a4a926ba23dce70da6faa84556d
|
Shell
|
adobe/experience-platform-streaming-connect
|
/docker-entrypoint.sh
|
UTF-8
| 3,458
| 3.296875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
##
# Copyright 2019 Adobe. All rights reserved.
# This file is licensed to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
# OF ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
##
tag="[docker-entrypoint.sh]"
function info {
echo "$tag (INFO) : $1"
}
function warn {
echo "$tag (WARN) : $1"
}
function error {
echo "$tag (ERROR): $1"
}
set -e
# Verify envs
if [[ -z "$CONNECT_BOOTSTRAP_SERVERS" ]]; then
error "EMPTY ENV 'CONNECT_BOOTSTRAP_SERVERS'"; exit 1
fi
if [[ -z "$CONNECT_REST_ADVERTISED_HOST_NAME" ]]; then
warn "EMPTY ENV 'CONNECT_REST_ADVERTISED_HOST_NAME'"; unset $CONNECT_REST_ADVERTISED_HOST_NAME
fi
if [[ -z "$CONNECT_REST_ADVERTISED_PORT" ]]; then
warn "EMPTY ENV 'CONNECT_REST_ADVERTISED_PORT'"; unset $CONNECT_REST_ADVERTISED_PORT
fi
if [[ -z "$CONNECT_GROUP_ID" ]]; then
warn "EMPTY ENV 'CONNECT_GROUP_ID'. USE DEFAULT VALUE"; unset $CONNECT_GROUP_ID
fi
export KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote=true \
-Djava.rmi.server.hostname=${CONNECT_REST_ADVERTISED_HOST_NAME} \
-Dcom.sun.management.jmxremote.rmi.port=9999 \
-Dcom.sun.management.jmxremote.authenticate=false \
-Dcom.sun.management.jmxremote.ssl=false"
#
# Configure the log files ...
#
if [[ -n "$CONNECT_LOG4J_LOGGERS" ]]; then
sed -i -r -e "s|^(log4j.rootLogger)=.*|\1=${CONNECT_LOG4J_LOGGERS}|g" $CONNECT_LOG_CFG
unset CONNECT_LOG4J_LOGGERS
fi
env | grep '^CONNECT_LOG4J' | while read -r VAR;
do
env_var=`echo "$VAR" | sed -r "s/([^=]*)=.*/\1/g"`
prop_name=`echo "$VAR" | sed -r "s/^CONNECT_([^=]*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
prop_value=`echo "$VAR" | sed -r "s/^CONNECT_[^=]*=(.*)/\1/g"`
if egrep -q "(^|^#)$prop_name=" $CONNECT_LOG_CFG; then
#note that no config names or values may contain an '@' char
sed -r -i "s@(^|^#)($prop_name)=(.*)@\2=${prop_value}@g" $CONNECT_LOG_CFG
else
echo "$prop_name=${prop_value}" >> $CONNECT_LOG_CFG
fi
if [[ "$SENSITIVE_PROPERTIES" = *"$env_var"* ]]; then
echo "--- Setting logging property from $env_var: $prop_name=[hidden]"
else
echo "--- Setting logging property from $env_var: $prop_name=${prop_value}"
fi
unset $env_var
done
if [[ -n "$LOG_LEVEL" ]]; then
sed -i -r -e "s|=INFO, stdout|=$LOG_LEVEL, stdout|g" $CONNECT_LOG_CFG
sed -i -r -e "s|^(log4j.appender.stdout.threshold)=.*|\1=${LOG_LEVEL}|g" $CONNECT_LOG_CFG
fi
# Extend CLASSPATH for custom connectors
export CLASSPATH=${CLASSPATH}:${KAFKA_HOME}/connectors/*
# Configure properties
echo -e "\n" >> $CONNECT_CFG
for VAR in `env`
do
if [[ $VAR =~ ^CONNECT_ && ! $VAR =~ ^CONNECT_CFG && ! $VAR =~ ^CONNECT_BIN ]]; then
connect_name=`echo "$VAR" | sed -r "s/CONNECT_(.*)=.*/\1/g" | tr '[:upper:]' '[:lower:]' | tr _ .`
env_var=`echo "$VAR" | sed -r "s/(.*)=.*/\1/g"`
if egrep -q "(^|^#)$connect_name=" $CONNECT_CFG; then
sed -r -i "s@(^|^#)($connect_name)=(.*)@\2=${!env_var}@g" $CONNECT_CFG
else
echo "$connect_name=${!env_var}" >> $CONNECT_CFG
fi
fi
done
exec "$@"
| true
|
2c9a4ffd431c6d0d9a37fef31bb35e9ce89e7e4b
|
Shell
|
mateusz/colander
|
/test.sh
|
UTF-8
| 400
| 3.015625
| 3
|
[] |
no_license
|
#!/bin/bash
ulimit -n 10000
if [ -z "$1" ]; then
echo "Please provide name for the test that will be used as the graph directory name."
exit 2
fi
./test_pseudo.sh &
PID=$!
ab -c 1 -n 200 -g ab.tsv http://localhost:8888/slow.php\?f\=1
killall siege
kill $PID
mkdir -p "$1"
gnuplot < graphs/distribplot.txt
mv distrib.jpg "$1/distrib.jpg"
gnuplot < graphs/seqplot.txt
mv seq.jpg "$1/seq.jpg"
| true
|
26eb4c6d8aa02e33ec3b8184334941a798836758
|
Shell
|
goneri/idrac-kvm-keyboard-fix
|
/javaws-idrac
|
UTF-8
| 431
| 3.234375
| 3
|
[] |
no_license
|
#! /bin/sh
#
# Use wrapper to work around Dell iDRAC keycode mangling
HACK=$HOME/local/lib/keycode-hack.so
# check for existence of icedtea-web
JAVAWS=$(which javaws)
if test -z "${JAVAWS}" ; then
JAVAWS=$(which javaws.itweb)
if test -z "${JAVAWS}" ; then
echo "ERROR: cannot find a suitable javaws" 1>&2
fi
fi
if [ ! -f $HACK ]
then
echo "ERROR: $HACK not available" 1>&2
exit 1
fi
LD_PRELOAD=$HACK exec ${JAVAWS} "$@"
| true
|
eff560797fa096fae936b62ab219e71f1a508aec
|
Shell
|
GoogleCloudPlatform/training-data-analyst
|
/courses/machine_learning/deepdive/10_recommend/endtoend/scripts/generate_traffic.sh
|
UTF-8
| 1,563
| 3.75
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euo pipefail
source util.sh
main() {
# Get our working project, or exit if it's not set.
local project_id=$(get_project_id)
if [[ -z "$project_id" ]]; then
exit 1
fi
local url="https://${project_id}.appspot.com/recommendation?userId=${USER_ID}&numRecs=${NUM_RECS}"
echo "This command will exit automatically in $TIMEOUT_SECONDS seconds."
echo "Generating traffic to ${url}..."
echo "Press Ctrl-C to stop."
local endtime=$(($(date +%s) + $TIMEOUT_SECONDS))
# Send queries repeatedly until TIMEOUT_SECONDS seconds have elapsed.
while [[ $(date +%s) -lt $endtime ]]; do
curl "$url" &> /dev/null
done
}
# Defaults.
USER_ID="5448543647176335931"
NUM_RECS=5
TIMEOUT_SECONDS=$((5 * 60)) # Timeout after 5 minutes.
if [[ "$#" == 0 ]]; then
: # Use defaults.
elif [[ "$#" == 1 ]]; then
USER_ID="$1"
else
echo "Wrong number of arguments specified."
echo "Usage: generate_traffic.sh [user_id]"
exit 1
fi
main "$@"
| true
|
6505664b1322a54199056a0c0cfd156cff00cbe7
|
Shell
|
kssfilo/pushpop
|
/push
|
UTF-8
| 2,049
| 3.921875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#set -x
showusage=0
pushdir=${PUSHDIR-$HOME/.push}
ispop=0
debug=0
count=1
popall=0
list=0
drop=0
trace() {
if [ $debug -eq 1 ]
then echo $*
fi
}
cmd=$(basename $0)
if [ $cmd == 'pop' ];then ispop=1;fi
while getopts "hvn:adl" i
do
case $i in
h)
showusage=1
;;
v)
debug=1
;;
n)
count=$OPTARG
;;
a)
popall=1
;;
d)
drop=1
;;
l)
list=1
;;
esac
done
shift $((OPTIND-1))
if [ $showusage -eq 1 -o \( \( $cmd == 'push' -a $# -eq 0 \) -a $list -eq 0 \) ]
then
if [ $ispop -eq 1 ]
then
echo 'pop [options] '
echo
echo '=options'
echo '-l:list stack'
echo '-n <count> :pop <count> times'
echo '-a:pop all'
echo '-d:drop(able to use with -n/-a'
else
echo 'push [options] <files>'
echo
echo '=options'
echo '-l:list stack'
fi
echo
echo '=environments'
echo 'PUSHDIR:stack directory(default:~/.push) '
exit 1
fi
for i
do
if [ ! -e "$i" ]
then
echo $i does not exist. 1>&2
exit 1
fi
done
if [ ! -d $pushdir ]
then
trace $pushdir does not exists. make it.
mkdir -p $pushdir
fi
if [ $list -eq 1 ]
then
pushd $pushdir >/dev/null
chunks=$(ls|sort -nr)
depth=1
for i in $chunks
do
pushd $i >/dev/null
echo -$depth:
ls -lA
echo
popd >/dev/null
depth=$((depth+1))
done
popd >/dev/null
exit
fi
if [ $ispop -eq 0 ]
then
pushd $pushdir >/dev/null
head=$(ls|sort -nr|head -n1)
if [ "$head" == "" ];then head=0;fi
trace HeadChunk=$head
next=$((head+1))
trace NextChunk=$next
mkdir $next
popd >/dev/null
trace mv "$@" $pushdir/$next
mv "$@" $pushdir/$next
else
while [ $count -gt 0 -o $popall -eq 1 ]
do
pushd $pushdir >/dev/null
head=$(ls|sort -nr|head -n1)
if [ "$head" == "" ]
then
if [ ! $popall -eq 1 ]
then
echo file stack is empty. 1>&2
exit 1
else
exit 0
fi
fi
trace HeadChunk=$head
popd >/dev/null
if [ $drop -ne 1 ]
then
trace mv $pushdir/$head/* .
mv $pushdir/$head/* .
fi
trace rm -rf $pushdir/$head
rm -rf $pushdir/$head
count=$((count-1))
done
fi
| true
|
7148b2cfce4f39df7f841e46c56dcaa7f8df7796
|
Shell
|
delkyd/alfheim_linux-PKGBUILDS
|
/sonarqube/PKGBUILD
|
UTF-8
| 3,048
| 3.015625
| 3
|
[] |
no_license
|
# Submitter: Atsushi Ichiki <hitotuky at gmail dot com>
# Maintainer: Max Falk <gmdfalk at gmail dot com>
_pkgname=sonarqube
pkgname=${_pkgname}
pkgver=6.2
pkgrel=1
pkgdesc="An open source platform for continuous inspection of code quality"
arch=("i686" "x86_64")
url="http://www.sonarqube.org/"
license=('LGPL3')
depends=('java-runtime>=8')
optdepends=('apache: a fully featured webserver'
'mariadb: Fast SQL database server, drop-in replacement for MySQL'
'maven: a java project management and project comprehension tool'
'postgresql: A sophisticated object-relational DBMS')
backup=("etc/${_pkgname}/sonar.properties"
"etc/${_pkgname}/wrapper.conf")
conflicts=("${_pkgname}-lts")
provides=("${_pkgname}")
options=('!strip')
install=${pkgname}.install
source=("https://sonarsource.bintray.com/Distribution/${_pkgname}/${_pkgname}-${pkgver}.zip"
"${_pkgname}.service"
"${_pkgname}-tmpfile.conf"
"${_pkgname}-user.conf")
sha256sums=('6bb187b8568d493131952e3b66e65940992f1a62c7962c1bf5801f6f48f1c160'
'acf98543b635778a690c5d1a8796bf67de725eeda1c72856bd63ea148a892223'
'6e024de469ebb1bc4083274412f0a5d68d5fa511c2139ce4cb1d243c51ff9535'
'43ff10bbb495827e952225dce79da79bb800627eaa6f1d933f8f7fb408aafe6d')
package() {
cd "${srcdir}/${_pkgname}-${pkgver}"
# Copy everything except bin and conf to /usr/share/sonarqube.
install -dm755 "${pkgdir}/usr/share/${_pkgname}"
cp -dr --no-preserve=ownership {bin,data,extensions,lib,temp,web} "${pkgdir}/usr/share/${_pkgname}/"
# Install the license.
install -Dm644 "COPYING" "${pkgdir}/usr/share/doc/${_pkgname}/COPYING"
# Install the configuration files to /etc/sonarqube.
install -Dm644 "conf/sonar.properties" "${pkgdir}/etc/${_pkgname}/sonar.properties"
install -Dm644 "conf/wrapper.conf" "${pkgdir}/etc/${_pkgname}/wrapper.conf"
# Install the systemd configuration and service files.
cd "${srcdir}"
install -Dm644 "${_pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${_pkgname}.service"
install -Dm644 "${_pkgname}-user.conf" "${pkgdir}/usr/lib/sysusers.d/${_pkgname}.conf"
install -Dm644 "${_pkgname}-tmpfile.conf" "${pkgdir}/usr/lib/tmpfiles.d/${_pkgname}.conf"
# Symbolic links because SonarQube expects a specific directory layout.
ln -s "/var/log/${_pkgname}" "${pkgdir}/usr/share/${_pkgname}/logs"
ln -s "/run/${_pkgname}" "${pkgdir}/usr/share/${_pkgname}/run"
ln -s "/etc/${_pkgname}" "${pkgdir}/usr/share/${_pkgname}/conf"
# Modify the service file in place to adjust the binary path to the CPU architecture. This is necessary because
# SonarQube expects a certain directory layout. The alternative would be to patch SonarQube's config files which is messy.
if [[ "$CARCH" == 'x86_64' ]]; then
sed -i 's/\$ARCH/linux-x86-64/g' "${pkgdir}/usr/lib/systemd/system/${_pkgname}.service"
elif [[ "$CARCH" == 'i686' ]]; then
sed -i 's/\$ARCH/linux-x86-32/g' "${pkgdir}/usr/lib/systemd/system/${_pkgname}.service"
fi
}
| true
|
a429889ec8f2cd408aac9f4cfdee0f98660d68d5
|
Shell
|
MRSD2018/reefbot-1
|
/hima_experiment/settings/vu_estimation/eth/SpectralSaliencyExtraction2.sh
|
UTF-8
| 839
| 2.765625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
OUTPUT_DIR=/data/mdesnoye/pedestrian/vu_estimation/eth/spectral_saliency
INPUT_DIRS=(/data/mdesnoye/pedestrian/eth/hima/Crossing \
/data/mdesnoye/pedestrian/eth/hima/Lowenplatz \
/data/mdesnoye/pedestrian/eth/hima/Linthescher \
)
#export CPUPROFILE=/home/mdesnoye/tmp/vu_saliency.prof
INPUT_DIR_FILE=${OUTPUT_DIR}/input_dirs.txt
mkdir -p ${OUTPUT_DIR}
rm -f ${INPUT_DIR_FILE}
for dir in ${INPUT_DIRS[*]}
do
echo $dir >> ${INPUT_DIR_FILE}
done
src/EvaluateVisualUtility.py \
--input_dirs ${INPUT_DIR_FILE} \
--output_dir ${OUTPUT_DIR} \
--annotations annotations.txt \
--vu_estimator SpectralSaliency \
--vu_estimator_wrapper RelativeEntropyVUWrapper \
--left_image_dir left \
--image_file_string "img_%04i.png" \
__log:=${OUTPUT_DIR}/rosout.log > ${OUTPUT_DIR}/stdout.log 2> ${OUTPUT_DIR}/stderr.log
#unset CPUPROFILE
| true
|
6f4d3b1a533d927d83456717212e57eeb7be3ef0
|
Shell
|
mochi5o/TIL
|
/ScrapBox/Linux/summery2.sh
|
UTF-8
| 224
| 3.546875
| 4
|
[] |
no_license
|
#!/bin/bash
for file in "$@"
do
if [ -d "$file" ]; then
echo "$file is a directory"
ls "$file"
elif [ -f "$file" ]; then
echo "$file is a regular file"
head -n 5 "$file"
fi
done
| true
|
7ce71feb1a34e58f540d45d47a58309f6636625e
|
Shell
|
wonder2014/innocent
|
/addBadCode.sh
|
UTF-8
| 3,086
| 3.4375
| 3
|
[] |
no_license
|
#!/bin/bash
# Copyright [2013] [alex-ko askovacheva<<at>>gmail<<dot>>com]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
. options.config
FULL_PATH="$1"
APK=$(basename $FULL_PATH)
WORK_DIR=$(echo $FULL_PATH | sed 's|\(.*\)/.*|\1|')
BAKSMALI=$baksmali
SMALI=$smali
OBF="addBadCode.py"
KEY=$key
PASS=$keypass
echo -n $APK >> $WORK_DIR/addOpaquePerformance.txt
echo -n ' ' $(du -s $FULL_PATH | awk '{print $1}') >> $WORK_DIR/addOpaquePerformance.txt
echo 'Extracting .dex file...'
unzip $FULL_PATH classes.dex
APK=${APK%%????}
ORIGINAL_DEX=$APK-classes.dex
OBF_DIR=baksmali_$APK
mv classes.dex $ORIGINAL_DEX
echo 'Baksmaling...'
java -jar $BAKSMALI -o $OBF_DIR $ORIGINAL_DEX
echo 'Adding opaque code...'
(/usr/bin/time -f " \t%e \t%M" python $OBF $OBF_DIR/) &>> $WORK_DIR/$APK-addOpaqueErr.txt
LINES=$(sed -n '$=' $WORK_DIR/$APK-addOpaqueErr.txt)
if [ $LINES -eq 2 ]
then
FIRST=$(sed '$d' $WORK_DIR/$APK-addOpaqueErr.txt)
LAST=$(sed '1d' $WORK_DIR/$APK-addOpaqueErr.txt)
echo -n ' ' $FIRST ' ' $LAST >> $WORK_DIR/addOpaquePerformance.txt
rm $WORK_DIR/$APK-addOpaqueErr.txt #clear up
else
echo 'Sorry, an error occured. Log file saved to '$WORK_DIR'/'$APK'-addOpaqueErr.txt'
exit
fi
echo 'Smaling...'
(java -jar $SMALI $OBF_DIR -o new-$ORIGINAL_DEX) &>> $WORK_DIR/$APK-addOpaqueSmaliErr.txt
LINES=$(ls -l $WORK_DIR/$APK-addOpaqueSmaliErr.txt | awk '{print $5}')
rm $ORIGINAL_DEX #clear up
if [ $LINES -eq 0 ]
then
rm $WORK_DIR/$APK-addOpaqueSmaliErr.txt #clear up
echo 'Modifying bytecode...'
# NOTE: If the modified app crashes, try replacing 1 with 0 in the line below.
# Warning! This will suppress verification which is why is disabled by default.
python dexBytecode.py new-$ORIGINAL_DEX 0
echo 'Replacing new .dex file...'
zip -d $FULL_PATH classes.dex
mv new-$ORIGINAL_DEX classes.dex
zip -g $FULL_PATH classes.dex
rm classes.dex #clear up
rm -r $OBF_DIR #clear up
echo 'Signing apk...'
zip -d $FULL_PATH META-INF/*
mkdir META-INF
zip -g $FULL_PATH META-INF
rm -r META-INF #clear up
jarsigner -sigalg MD5withRSA -digestalg SHA1 -keystore $KEY -storepass $PASS $FULL_PATH test
echo 'Verifying signature...'
jarsigner -verify -certs $FULL_PATH
echo ' ' $(du -s $FULL_PATH | awk '{print $1}') >> $WORK_DIR/addOpaquePerformance.txt
echo "" >> $WORK_DIR/addOpaquePerformance.txt
echo 'Done!'
else
rm -r $OBF_DIR #clear up
echo 'Error occured while assembling with smali. Log file saved to '$WORK_DIR'/'$APK'-addOpaqueSmaliErr.txt'
exit
fi
| true
|
6c8e2e65a4aefa47606233d78c00093ef7b96c09
|
Shell
|
clarenceb/docker_playground
|
/swarm_scripts/docker_swarm_ps.sh
|
UTF-8
| 256
| 2.671875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
basedir=$(readlink -f $(dirname $0))
source ${basedir}/common_env.sh
echo "=== Docker Swarm ==="
docker -H ${DOCKER_SWARM_HOST} ps | perl -ne '@cols = split /\s{2,}/, $_; printf "%0s %25s %20s %30s\n", $cols[0], $cols[1], $cols[4], $cols[6]'
| true
|
6f974f85ba9c903ad05678c1be3defcefed465d9
|
Shell
|
Procrat/dotfiles
|
/bin/wallpaper
|
UTF-8
| 621
| 3.921875
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
#
# Sets the wallpaper with feh and creates a png for the screen locker
set -euo pipefail
if [[ $# -ne 1 ]]; then
echo "Usage: $(basename "$0") file" >&2
exit 1
fi
wallpaper="$1"
FIXED_WALLPAPER_PATH="$HOME/.local/share/wallpaper.png"
geometry="$(xrandr | grep '\*' | awk '{ print $1 }')"
# Resize it such that the smallest dimension fits exactly, and
# cut of the extra bits of the other dimension, assuming a centered image
convert "$wallpaper" \
-resize "${geometry}^" \
-gravity Center -extent "${geometry}" \
"$FIXED_WALLPAPER_PATH"
feh --bg-fill "$FIXED_WALLPAPER_PATH"
| true
|
7dc4a33ee14adda74d1d04f8eb2c56edbaa549d6
|
Shell
|
hubzero/hapi
|
/scripts/mesquite-3.03_install.sh
|
UTF-8
| 1,371
| 3.609375
| 4
|
[] |
no_license
|
#!/bin/bash
# this depends on having the following debian packages installed
#
set -e
set -x
pkgname=mesquite
VERSION=3.03
basedir=/apps/share64/debian7
pkginstalldir=${basedir}/${pkgname}
tarinstalldir=${pkginstalldir}/tars
installprefix=${pkginstalldir}/${VERSION}
downloaduri=https://github.com/MesquiteProject/MesquiteCore/releases/download/3.03/Mesquite303_Linux.tgz
tarfilename=Mesquite303_Linux.tgz
tarfilebase=Mesquite_Folder
environdir=${basedir}/environ.d
script=$(readlink -f ${0})
installdir=$(dirname ${script})
if [[ ! -d ${pkginstalldir}/tars ]] ; then
mkdir -p ${pkginstalldir}/tars
fi
if [[ ! -d ${installprefix} ]] ; then
mkdir -p ${installprefix}
fi
cd ${pkginstalldir}/tars
if [[ ! -e ${tarfilename} ]] ; then
wget ${downloaduri}
fi
rm -rf ${tarfilebase}
tar xf ${tarfilename}
cd ${tarfilebase}
# copy
cp -r * ${installprefix}
# cleanup
cd ..
rm -rf ${tarfilebase}
# create the use script
if [[ ! -d ${environdir} ]] ; then
mkdir ${environdir}
fi
cat <<- _END_ > ${environdir}/${pkgname}-${VERSION}
conflict MESQUITE_CHOICE
desc "Mesquite ${VERSION}"
help "Mesquite is modular, extendible software for evolutionary biology, designed to help biologists organize and analyze comparative data about organisms."
version=${VERSION}
location=${pkginstalldir}/\${version}
prepend MESQUITEPATH \${location}
tags DEVEL
_END_
| true
|
85003fcd15ff722b13eb65d9d0991c2652b1925d
|
Shell
|
jerome-labidurie/buddiesjewel_firmware
|
/BlinkM/ReflashBlinkM/flash.sh
|
UTF-8
| 514
| 2.796875
| 3
|
[] |
no_license
|
#!/bin/bash
# flash a attiny45 with blinkm proprietary firmware
# firmware extracted from http://code.google.com/p/blinkm-projects/wiki/ReflashBlinkM
if [ -z "$1" ]
then
USB=/dev/ttyUSB0
else
USB=$1
fi
PGM="avrisp -b 19200"
#PGM=stk500
# lire fuses
avrdude -V -p t45 -c $PGM -P $USB -U lfuse:r:-:b -U hfuse:r:-:b -U efuse:r:-:b
# flash program & eeprom
avrdude -V -p t45 -c $PGM -P $USB -U flash:w:blinkmv1.hex:a -U eeprom:w:blinkmv1.eep
# write hfuse
avrdude -V -p t45 -c $PGM -P $USB -U hfuse:w:0xDD:m
| true
|
30ddf72a8603762d946236d88d30e8f22b78b383
|
Shell
|
rpodgorny/bootstrap
|
/atxmonc.sh
|
UTF-8
| 766
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/sh
set -e -x
echo "atxmon is dead"
exit 1
if [ -d /cygdrive/c/atxmonc ]; then
echo 'atxmonc already installed?'
exit 0
fi
/cygdrive/c/atxpkg/atxpkg install atxmonc --yes
cd /cygdrive/c/atxmonc
# the cat is there only to eat the possible error when grep finds nothing
mj=`ls /cygdrive/c/atx300 | grep mj | cat`
if [ "$mj" ]; then
sed -i "s/example.com/asterix.cz/g" atxmonc.conf
sed -i "s/;Host=.*/Host=$mj/g" atxmonc.conf
else
echo
echo "!!! NENASEL JSEM CISLO MJ, SPUSTIM EDITOR PRO RUCNI EDITACI. POKRACUJTE ENTEREM !!!"
read
nano -w atxmonc.conf
fi
# TODO: add check for sensible setting of Host=
cat atxmonc.conf
./install_service.bat
cd -
sc failure "atxmonc" actions= restart/600000/restart/600000/restart/600000 reset= 86400
net start atxmonc
| true
|
2b4f5e37633b5c8f7aaf024f58d8c3d2dc7d2ad4
|
Shell
|
yaswant/ysh
|
/caldat
|
UTF-8
| 3,339
| 4.125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash -e
[ -n "$DEBUG" ] && set -x
# ----------------------------------------------------------------------------
if [[ "$OSTYPE" == 'linux'* ]]; then
TEMP=$(getopt -o f:hvV --long format:,help,version,verbose \
--name "$0" -- "$@")
[ $? != 0 ] && { echo "Terminating..." >&2; exit 1 ; }
eval set -- "$TEMP"
fi
version(){
cat<<EOF
${0##*/} 2019.01.1
Copyright (C) 2019 Free Software Foundation, Inc.
This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
Written by Yaswant Pradhan.
EOF
}
usage(){
cat<<EOF
Get calendar date time from given julian day using Unix datetime.
Usage: ${0##*/} [OPTIONS] [JULDAY]
JULDAY Input Julian day (see julday(1)). Default is for system date time.
Options:
-f, --format STRING
Specify format for output caledar date. Default is ’%FT%TZ’.
See date(1) for valid date format specifications.
-h, --help
Show this help and exit.
-v, --version
Show version and exit.
-V, --verbose
Increase verbosity and show intermediate numbers used in caldat calculation.
Julian dates (abbreviated JD) are simply a continuous count of days and fractions since noon Universal Time on January 1, 4713 BC (on the Julian calendar). Almost 2.5 million days have transpired since this date.Julian dates are widely used as time variables within astronomical software. Typically, a 64-bit floating point (double precision) variable can represent an epoch expressed as a Julian date to about 1 millisecond precision. Note that the time scale that is the basis for Julian dates is Universal Time, and that 0h UT corresponds to a Julian date fraction of 0.5.
Unix time, also known as Unix Epoch time is a system for describing a point in time. It is the number of seconds that have elapsed since 00:00:00 Thursday, 1 January 1970 UTC, minus leap seconds. Every day is treated as if it contains exactly 86400 seconds, so leap seconds are to be subtracted since the epoch. The Unix epoch corresponds to the Julian day of 2440587.5
Examples:
Get julday on 26-Jan-1950 at 0:0:0:
julday 01/26/1950 -t 0:0:0
2433307.5
Convert above julian day back to calendar date:
caldat 2433307.5
1950-01-26T00:00:00Z
Get the day of year, the week day and week number for the above julian day:
caldat 2433307.5 -f "%j %A %w"
026 Thursday 4
Report bugs to <yaswant.pradhan>
EOF
}
# ----------------------------------------------------------------------------
while true; do
case "$1" in
-f |--format ) fmt="$2"; shift 2 ;;
-h |--help ) usage; exit 0 ;;
-v |--version ) version; exit 0 ;;
-V |--verbose) verb=1; shift 1 ;;
-- ) shift; break ;;
* ) break ;;
esac
done
_julday="${1:-$(julday $(date -u +'%F %T') %f)}"
fmt=${fmt:-'%FT%TZ'}
JD_UNIX=2440587.5 # Unix epoch in days.
seconds=$(echo "$_julday" $JD_UNIX | awk '{printf "%f", ($1 - $2) * 86400.0}')
if (( verb )); then
echo "Unix Julian Epoch = $JD_UNIX"
echo "Input Julian day = $_julday"
echo "Equivalent Unix seconds = $seconds"
echo
fi
case "$OSTYPE" in
darwin*) date -u -jf '%s' "$seconds" +"$fmt" 2>/dev/null;
;;
linux*) date -u --date @"$seconds" +"$fmt";
;;
*) echo "$OSTYPE not supported";
;;
esac
| true
|
005db68eab6f26e55842397e497b00a08e033218
|
Shell
|
isaacselement/ApkSmaliJebTools
|
/apk/signapk/signapk
|
UTF-8
| 16,550
| 3.03125
| 3
|
[] |
no_license
|
#!/system/bin/sh
TMP="/cache" # write temp files here, or try these if it won't work:
ALTTMP="/sqlite_stmt_journals /system/sd/tmp /system/sd-ext/tmp /tmp /data/local /system/sd /system/sd-ext /sdcard"
ZIP="$0-zip" # bundled zip (static uclibc/arm): not aware of any rom that has zip
#ZIP="/usr/bin/zip" # ...except those with Debian overlays.
OPENSSL="$0-openssl" # bundled openssl (static uclibc/arm): most roms have this, I think, however I've seen a few that don't
#OPENSSL="openssl" # ...ROM's openssl from $PATH (same as commenting out 'OPENSSL=...')
BUSYBOX="$0-bb" # bundled busybox (static uclibc/arm): required utils and.config settings (ENABLE_DESKTOP, CONFIG_ASH_BASH_COMPAT)
#BUSYBOX="busybox" # ...use rom's busybox from $PATH. Possibly incompatible utils. stock busybox 'unzip' will probably break
#BUSYBOX="/system/xbin/bb/busybox" # ...if $PATH is broken.
#BUSYBOX="" # ...no busybox: if unzip / sed / tr / printf / readlink / grep are in $PATH, use them.
#SED=/usr/bin/sed # Valid if no BUSYBOX is set. Also PRINTF TR GREP READLINK UNZIP
FORCEASH=1 # should stay unless #!/topofscript is changed to compatible shell. Avoids running on bionic sh
FORCEBBASH=1 # max compatiblity; script runs via (bundled) busybox ash
#######################
DEFKEYNAME="testkey" # default name of cert/key pair. script comes with AOSP testkey/media/platform/shared.
DEFKEYDIR="$0-key." # look for cert/key pairs here; 'signapk-key.(name).*'. Can change to /data/data/name.of.your.app/files/keys/
####################### One shouldn't have to edit below.
PKEY="$DEFKEYDIR$DEFKEYNAME.pk8" # generated path to default private key; 'signapk-key.testkey.pk8'.
CERT="$DEFKEYDIR$DEFKEYNAME.x509.pem" # generated path to default cert; 'signapk-key.testkey.x509.pem'
VERSION="0.3.1"
#######################
#######################
####################### Where's wald^H^H^H^Hbusybox?
if [ "x$ASHFORCED" != "x1" ]; then
if [ "x$BUSYBOX" != "x" ]; then
if [ "x$BUSYBOX" != "xbusybox" ]; then
if [ ! -e "$BUSYBOX" ]; then
export PATH="/system/xbin:/system/xbin/bb:.:/system/bin:/data/local:/sdcard:$PATH"
echo "Default \$BUSYBOX ($BUSYBOX) invalid, "
if BUSYBOX=`which busybox`; then
echo "but found: $BUSYBOX."
else
unset BUSYBOX
echo "could not find alternative. Trying anyway."
fi
fi
else
if ! `which busybox 2>/dev/null 1> /dev/null`; then
unset BUSYBOX
echo "Warning, could not find 'busybox' in \$PATH. Trying anyway."
fi
fi
BB=$BUSYBOX
fi
####################### Pointers to utils
if [ "x$OPENSSL" = "x" ]; then
OPENSSL="openssl";
else
if [ "x$OPENSSL" != "xopenssl" ] && [ ! -e "$OPENSSL" ]; then
OPENSSL="openssl";
fi
fi
if [ "x$PRINTF" = "x" ]; then PRINTF="printf"; fi
if [ "x$TR" = "x" ]; then TR="tr"; fi
if [ "x$SED" = "x" ]; then SED="sed"; fi
if [ "x$GREP" = "x" ]; then GREP="grep"; fi
if [ "x$READLINK" = "x" ]; then READLINK="readlink"; fi
if [ "x$UNZIP" = "x" ]; then
UNZIP="unzip"
if [ "x$BB" != "x" ]; then
BBZ=$BB
fi
fi
if [ "x$ZIP" = "x" ]; then ZIP="zip"; fi
fi
####################### OCD gimp avoidance for great justice
if [ "x$FORCEASH" != "x" ]; then
if [ "x$ASHFORCED" = "x" ]; then
myexe=$(readlink /proc/$$/exe)
if ( [ "x$myexe" != "x$BB" ] && [ "x$myexe" = "x/system/bin/sh" ] ) || [ "x$FORCEBBASH" != "x" ]; then
if [ "x$BB" != "x" ]; then
export ASHFORCED=1
export ASHFORCEDOEXE=$myexe
export ASHFORCEDOPID=$$
export BB BUSYBOX OPENSSL ZIP PRINTF UNZIP READLINK SED GREP TR BBZ
exec $BB ash $0 $@
exit $?
else
echo "WARNING: script is configured to use busybox's ash but cannot find busybox"
echo "$myexe may not be compatible."
fi
fi
fi
fi
####################### fooie on ash-with-bad-busybox-configs
testcond=$(if [ ! ]; then echo success;fi)
tmpstr="test123success"
testsubst="${tmpstr:7:50}"
if [ "x$testcond" != "xsuccess" ] || [ "x$testsubst" != "xsuccess" ]; then
echo "shell failed compatibility tests. testcond=$testcond, testsubst=$testsubst"
exit
fi
####################### misc utility variables
esc=${IFS:2:2}
OLDIFS=$IFS
PAD=" "
####################### debugging spew
dprint() {
if [ $DEBUG ] && [ "$DEBUG" != "0" ]; then
#IFS=$OLDIFS
echo "[DEBUG $$] $*" 1>&2
fi
}
####################### informational spew
p () {
if [ ! $OPTquiet ]; then
printf "$*"
fi
}
####################### lazyness
#######################
#######################
ordie() {
rc=$?
if [ $rc -ne 0 ]; then
if [ ! $2 ]; then
ecode=$rc
else
ecode=$2
fi
IFS=" "
echo "$1"
if [ $DEBUG ]; then echo "
[VARIABLE DUMP]
" $(set | $BB $TR '\n' '\t') "
[EXIT $ecode]"; fi
exit $ecode
fi
}
####################### util paranoia
testexe() {
for i in "$@"; do
dprint "testexe($i)"
case $i in
*sed) err=$(echo test|$BB $i -r 's,t,z,g' 2>&1);;
*unzip) err=$(which $i 2>&1);;
*zip) err=$($i -h 2>&1);;
*openssl) err=$($i -h 2>&1);;
*) err=$($BB $i -h 2>&1);;
esac
ordie "Missing or incompatibile utility: $i
Please locate. Script aborting. Error was:
$err" 128
done
}
####################### directory paranoia
testtmp() {
if [ ! -w "$TMP" ] || [ ! -d "$TMP" ]; then
ORIGtmp="$TMP"
unset TMP
if [ ! "$OPTtmp" ]; then
for d in $ALTTMP; do
dprint " checking for valid tmpdir: '$d'"
if [ -w "$d" ] && [ -d "$d" ]; then
TMP="$d"
break
fi
done
if [ ! $TMP ]; then
echo "Error, cannot write to '$ORIGtmp'. I also tried $ALTTMP.
Try $0 -t /directory."
exit 128
fi
else
echo "Error, cannot write to '$ORIGtmp', try $0 -t /directory."
exit 255
fi
fi
}
####################### key/cert paranoia
chkcert() {
#IFS=$esc
dprint "chkcert($1,$2)"
if [ "x$1" = "x" ] && [ "x$2" = "x" ]; then
dprint "using defaults"
return
elif [ "x$1" = "x" ] || [ "x$2" = "x" ]; then
alias="$1$2"
if [ "$alias" != "${alias%%.pk8}" ]; then
PKEY="$alias"
CERT="${alias%%.pk8}.x509.pem"
dprint "using key '$PKEY' and cert '$CERT'"
elif [ "$alias" != "${alias%%.x509.pem}" ]; then
PKEY="${alias%%.x509.pem}.pk8"
CERT="$alias"
dprint "using key '$PKEY' and cert '$CERT'"
else
PKEY="$DEFKEYDIR$alias.pk8"
CERT="$DEFKEYDIR$alias.x509.pem"
dprint "using alias '$alias': using key '$PKEY' and cert '$CERT'"
fi
fi
}
####################### MANIFEST.MF entry hash: stdin - stdout
mfhash() {
$OPENSSL sha1 -binary |$OPENSSL base64
}
####################### MANIFEST.MF entry: $zipfilename, $zipentryname - $ret
mfentry() {
local hash=$($BBZ $UNZIP -p "$ORIG" "$1"|mfhash)
ret="Name: $1\r\nSHA1-Digest: $hash\r\n\r\n"
}
####################### CERT.SF entry: $zipentryname, $mfentry - $ret
sfentry() {
local hash=$($BB $PRINTF "$2"|mfhash)
ret="Name: $1\r\nSHA1-Digest: $hash\r\n\r\n"
}
####################### serial of zip's signing cert: $zipfilename - $?, stdout
getcertid() {
if [ -e "$1" ]; then
cert=$($BBZ $UNZIP -p "$1" 'META-INF/*.RSA' 'META-INF/*.DSA' 2> /dev/null |$OPENSSL pkcs7 -inform DER -print_certs 2> /dev/null)
if [ $? -ne 0 ]; then
dprint "'$1' is not a zip, trying as file"
cert=$($OPENSSL pkcs7 -inform DER -print_certs < "$1" 2> /dev/null)
if [ $? -ne 0 ]; then
echo "error getting cert from '$1'"
return 2
fi
fi
else
echo "No such file '$1'"
return 1
fi
certid=$(echo "$cert"| $OPENSSL x509 -noout -serial|$BB $SED -r "s,^(serial=),,g" )
dprint "certid($?): '$certid'"
echo $certid
return 0
}
####################### Duh.
usage() {
echo "Usage: ${0##*/} (options) [command] (files)
commands:
sign FILE sign a .zip or .apk
sign FILE1 FILE2 create a signed copy of FILE1 as FILE2
cert FILE(s) print cert info on FILE's signer
certinfo FILE print detailed cert info on FILE's signer
cmp FILE compare signer of FILE with default/selected cert
cmp FILE1 FILE2 compare signer of FILE1 to signer of FILE2
options:
-k, --key FILE key to sign with
-c, --cert FILE cert to sign with
if -c or -k are not files then they are considered
aliases to builtins (ie -k testkey or -c platform)
-f, --force sign even if cert differs from original
-t, --tmp DIR use DIR for tempdir instead of '$TMP'
-d, --debug output debugging
-V, --version print '${0##*/} v$VERSION'
exit codes:
1: read error (file 1) 2: read error (file 2)
3: write error 4: ssl error
5: zip write error 9: key error
8: sign: cert mismatch 10: cmp: cert mismatch
128: script error 255: user error
"
}
####################### getopts() but not: @array - $args
getop() {
origarg="$*"
while [ "$1" ]; do
case "$1" in
-c|--cert)
shift
if [ ! "$1" ]; then usage;exit 255;fi
OPTcert="$1"
;;
-k|--key)
shift
if [ ! "$1" ]; then usage;exit 255;fi
OPTkey="$1"
;;
-d|--debug)
DEBUG=1
;;
-f|--force)
OPTforce=1
;;
-q|--quiet)
OPTquiet=1
QZIP="-q"
;;
-V|--version)
echo ${0##*/} v$VERSION;exit
;;
-t|--tmp|--tmpdir)
shift
if [ ! "$1" ]; then usage;exit 255;fi
OPTtmp="$1";TMP="$1"
;;
--)
shift
while [ "$1" ]; do
args="$args$1$esc"
shift
done
return
;;
-*)
usage;exit 255;;
*)
args="$args$1$esc"
;;
esac
shift
done
}
#######################
#######################
#######################
getop "$@"
IFS="$esc";
set -- $args
IFS=$OLDIFS
myexe=$(readlink /proc/$$/exe)
if [ $ASHFORCED ]; then
dprint "ASHFORCED=true, exe: $ASHFORCEDOEXE->$myexe, pid: $ASHFORCEDOPID->$$"
fi
dprint "me=$myexe, BUSYBOX=$BB PRINTF=$BB $PRINTF, TR=$BB $TR, SED=$BB $SED, GREP=$BB $GREP"
dprint "UNZIP=$BBZ $UNZIP, ZIP: $ZIP, OPENSSL=$OPENSSL"
#######################
#######################
if [ "x$1" = "xsign" ]; then
testexe "$ZIP" "$UNZIP" "$OPENSSL" "$SED" "$PRINTF"
chkcert "$OPTcert" "$OPTkey"
if [ ! -e "$PKEY" ]; then echo "Missing private key! I looked in '$PKEY'"; exit 9 ; fi
if [ ! -e "$CERT" ]; then echo "Missing cert! I looked in '$CERT'"; exit 9 ; fi
testtmp
IFS=$esc
ORIG=$($BB $READLINK -f "$2")
mf="Manifest-Version: 1.0\r\nCreated-By: 1.0 (Android SignApk)\r\n\r\n"
sf=""
ZIPls=$($BBZ $UNZIP -qql "$ORIG"); ordie "$ORIG: unzip error" 1
if [ "x$3" != "x" ] && [ "x$3" != "x$2" ]; then
TARGET=$($BB $READLINK -f "$3")
cp -a "$ORIG" "$TARGET"; ordie "Cannot write to $TARGET" 3
else
TARGET="$ORIG"
if [ ! $OPTforce ]; then
myserial=$($OPENSSL x509 -noout -serial < "$CERT" |$BB $SED -r "s,^(serial=),,g")
if origkey=$(getcertid "$ORIG"); then
if [ "x$origkey" != "x$myserial" ]; then
IFS=$OLDIFS
echo "$ORIG is signed with a cert ($origkey) that does not match '$CERT' ($myserial). Try"
echo " $0 --force $origarg"
echo "if you wish to proceed, or"
echo " $0 $origarg newfile"
exit 8
fi
dprint "cert $myserial matches cert in $ORIG"
fi
fi
fi
IFS=$esc
p "Checksumming $ORIG:\n\t"
for i in $ZIPls; do
IFS=$OLDIFS
set -- $i
if [ $# -ge 3 ] && [ "x$1" != "x0" ]; then
ret=""
file="${i:28}"
#file="$4"
if [ "x$file" != "xMETA-INF/MANIFEST.MF" ] && [ "x$file" != "xMETA-INF/CERT.SF" ] && [ "x$file" != "xMETA-INF/CERT.RSA" ]; then
p "$file "
mfentry "$file"
mf="$mf$ret"
sfentry "$file" "$ret"
sf="$sf$ret"
fi
fi
done
p "\n"
sfhead=$($BB $PRINTF "$mf"|mfhash)
sf="Signature-Version: 1.0\r\nCreated-By: 1.0 (Android SignApk)\r\nSHA1-Digest-Manifest: $sfhead\r\n\r\n$sf"
TMPDIR="${TMP}/sign-$$"
TMPPKEY="${TMPDIR}/tmp.pkey"
mkdir -p "$TMPDIR/META-INF"; ordie "" 3
$BB $PRINTF "$mf" > "${TMPDIR}/META-INF/MANIFEST.MF"; ordie "" 3
$BB $PRINTF "$sf" > "${TMPDIR}/META-INF/CERT.SF"; ordie "" 3
$OPENSSL pkcs8 -inform DER -nocrypt -in "$PKEY" > "$TMPPKEY"; ordie "" 4
$BB $PRINTF "$sf"|$OPENSSL smime -sign -inkey "$TMPPKEY" -signer "$CERT" -binary -outform DER -noattr > "${TMPDIR}/META-INF/CERT.RSA"; ordie "" 4
cd "${TMPDIR}"
ENVKLUDGE="$ZIP"; unset ZIP # thanks new-version-of-infozip =[
dprint $ENVKLUDGE "$TARGET" META-INF/MANIFEST.MF META-INF/CERT.SF META-INF/CERT.RSA
$ENVKLUDGE "$TARGET" META-INF/MANIFEST.MF META-INF/CERT.SF META-INF/CERT.RSA; ordie "" 5
cd - > /dev/null
$BB rm -rf "${TMPDIR}"
#######################
elif [ "x$1" = "xcertinfo" ]; then
testexe $UNZIP $OPENSSL
$BBZ $UNZIP -p "$2" 'META-INF/*.RSA' 'META-INF/*.DSA' 2> /dev/null | $OPENSSL pkcs7 -inform DER -print_certs -text
#######################
elif [ "x$1" = "xcert" ]; then
shift
testexe "$UNZIP" "$OPENSSL" "$SED" "$PRINTF"
packages=$($BB $GREP 'package name' /data/system/packages.xml|$BB $SED -r 's,(<package |>$),,g')
p $( echo "$packages"|$BB wc -l ) installed packages."\n"
for i in $*; do
unset real cert certserial title pkg name codepath system ts version shareduserid userid user
if [ -e "$i" ]; then
real=$($BB $READLINK -f "$i")
out="$real$PAD"
cert=""
IFS=$esc
cert=$($BBZ $UNZIP -p "$i" 'META-INF/*.RSA' 'META-INF/*.DSA' 2> /dev/null |$OPENSSL pkcs7 -inform DER -print_certs 2> /dev/null)
if [ $? -eq 0 ]; then
IFS=$OLDIFS
set -- $(echo "$cert"| $OPENSSL x509 -noout -serial -subject|$BB $SED -r "s,^(serial=|subject ?=.*/O=),,g" )
certserial=$1
case $certserial in
C2E08746644A308D) title="Google";;
936EACBE07F201DF) title="SDK Test Key";;
F2B98E6123572C4E) title="SDK Media Key";;
B3998086D056CFFA) title="SDK Platform Key";;
F2A73396BD38767A) title="SDK Shared Key";;
*) title="unknown $2";;
esac
real=$(echo "$real"|$BB $SED -r 's,/(system/sd|sd-ext)/app,/data/app,')
pkg=$(echo "$packages"|$BB $GREP "codePath=\"$real\""|$BB $TR -d '"' )
if [ $? -eq 0 ]; then
IFS=" "
set -- $pkg
for p in $*; do
IFS="="
set -- $p
case $1 in
name) name=$2;;
codePath) codepath=$2;;
system) system=$2;;
ts) ts=$2;;
version) version=$2;;
sharedUserId) shareduserid=$2;;
userId) userid=$2;;
esac
done
fi
user="shuid:$shareduserid"
if [ "x$shareduserid" = "x" ]; then
user="uid:$userid"
fi
out="${out:0:60} $user$PAD"
out="${out:0:74} $certserial$PAD"
$BB $PRINTF "${out:0:92} $title\n"
else
$BB $PRINTF "${out:0:60} Invalid\n"
fi
fi
done
elif [ "x$1" = "xcmp" ]; then
if [ "x$2" = "x" ]; then
echo "Usage: $0 cmp [file]"
echo " $0 cmp [file1] [file2]"
exit 255
fi
if [ "x$3" != "x" ]; then
c1=$(getcertid "$2"); ordie "Error: $c1" 1
c2=$(getcertid "$3"); ordie "Error: $c2" 2
else
chkcert "$OPTcert" "$OPTkey"
set -- "cmp" "$2" "$CERT"
c2=$($OPENSSL x509 -noout -serial -in "$CERT"); ordie "Error getting serial of '$CERT'" 9
c2=$(echo $c2|$BB $SED -r "s,^(serial=),,g")
c1=$(getcertid "$2"); ordie "Error: $c1" 1
fi
if [ "$c1" != "$c2" ]; then
echo "$2 ($c1) != $3 ($c2)"
exit 10
else
echo "$2 ($c1) == $3 ($c2)"
exit 0
fi
elif [ "x$1" = "xgetcert" ]; then
ret=$(getcertid "$2")
echo "$? '$ret'"
else
usage
exit 0
fi
| true
|
739e4c98ff1fd25520d266755e362d7db893ef60
|
Shell
|
EduardoPazz/dotfiles
|
/scripts/stabshell
|
UTF-8
| 349
| 2.765625
| 3
|
[] |
no_license
|
#!/bin/bash
function command() {
xdotool type "$1" && \
xdotool key KP_Enter
}
function control() {
xdotool key ctrl+$1
}
function stabilize() {
command "fg" && \
command "exec python3 -c 'import pty; pty.spawn(\"/bin/bash\")'" && \
control z && \
command "stty raw -echo" && \
command "fg" && \
command "export TERM=xterm"
}
stabilize &
| true
|
dbd7067b8fafd6254cf4ddcd12f2a408c44a06fc
|
Shell
|
osism/testbed
|
/scripts/enable-secondary-nodes.sh
|
UTF-8
| 252
| 2.96875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
number_of_secondary_nodes=$(expr $1 - 3)
if [[ $number_of_secondary_nodes -gt 0 ]]; then
for node in $(seq 4 $(expr $1 - 1)); do
sed -i "/^#.*testbed-node-$node/s/^#//" /opt/configuration/inventory/10-custom
done
fi
| true
|
8b5cd0147fd1420f642dfd04abce319eb36ec298
|
Shell
|
vcatafesta/chili
|
/bombando/kdialog/prgs/warningcontinuecancel.kdi
|
UTF-8
| 320
| 2.71875
| 3
|
[
"MIT"
] |
permissive
|
#! /bin/bash
# Exemplos do diálogo --warningcontinuecancel
# Para funcionar é necessário antes atribuir um
#+ valor à variável $pid
kdialog --title "Exemplo de warningcontinuecancel" \
--warningcontinuecancel "O programa com PID = $pid está demorando muito para terminar.
Deseja encerrá-lo?" && kill $pid
| true
|
c0c2fc5829da9fc037781da5a08dd364c55cd569
|
Shell
|
alokjani/openstack.ops-tools
|
/ceph/ceph-list-snapshots.sh
|
UTF-8
| 211
| 2.921875
| 3
|
[] |
no_license
|
#!/bin/bash
user="cinder"
conf="/etc/ceph/ceph.conf"
pool="volumes"
volume_id=$1
if [ -z $1 ];
then
echo "Usage: $1 <volume_id>"
exit
fi
rbd --user $user --conf $conf snap ls $pool/"volume-$volume_id"
| true
|
b50b5eaee02a86bf77f97b50cb046099d1f352cc
|
Shell
|
jpagano/pi-hosted
|
/tools/nginx-proxy-manager.sh
|
UTF-8
| 715
| 3.28125
| 3
|
[] |
no_license
|
#!/bin/bash
function error {
echo -e "\\e[91m$1\\e[39m"
exit 1
}
echo "Creating directories..."
sudo mkdir -p /portainer/Files/AppData/Config/nginx-proxy-manager/data || error "Failed to create data folder!"
sudo mkdir -p /portainer/Files/AppData/Config/nginx-proxy-manager/letsencrypt || error "Failed to create letsencrypt folder!"
sudo mkdir -p /portainer/Files/AppData/Config/nginx-proxy-manager/database || error "Failed to create database folder!"
echo "Creating a blank nginx-proxy-manager config files"
sudo touch /portainer/Files/AppData/Config/nginx-proxy-manager/config.json || error "Failed to touch config.json file!"
echo "Setup complete. You can now install the stack using the App Template."
| true
|
1394eadb4180a1ce194cd2ed1bbfc69c11e9374e
|
Shell
|
sjeemb/scout
|
/t-bench/tools/start-workers.sh
|
UTF-8
| 514
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/bash
if [[ "$1" == "" ]] || [[ "$2" == "" ]] || [[ "$3" == "" ]] || [[ "$4" == "" ]] ; then
echo "Usage: ./start-worker.sh nid model batch_size num_gpus [options]"
exit -1
else
nid=$1
model=$2
batch_size=$3
num_gpus=$4
python tf_cnn_benchmarks.py --local_parameter_device=cpu --num_gpus=${num_gpus} \
--batch_size=${batch_size} --model=${model} --variable_update=distributed_replicated \
--job_name=worker --ps_hosts=192.168.0.100:50000 \
--worker_hosts=192.168.0.101:50001 --task_index=${nid}
fi
| true
|
82777364dc7f175c26b6b57ecd067047dd3daa94
|
Shell
|
mborzyszkowski/LinuxBackup
|
/linuxrestore.sh
|
UTF-8
| 5,748
| 3.8125
| 4
|
[] |
no_license
|
#!/bin/bash
print_help(){
if [ "$1" -gt 1 ]; then
echo "Użycie opcji -h lub --help z innymi opcjami jest niedozwolone"
exit 1
fi
echo " ---- HELP: napisz man linuxrestore -----"
exit 0
}
print_version(){
if [ "$1" -gt 1 ]; then
echo "Użycie opcji -v lub --version z innymi opcjami jest niedozwolone"
exit 1
fi
echo " ---- VERSION: program 0.01 -----"
exit 0
}
###################################################################
#
# init
#
HELP=false
VERSION=false
GZIP=false
NAME_PREFIX=""
DATE_TO_RESTORE=""
BACKUP_DIR=""
OUT_DIR=""
###################################
# czytaj ustawienia
source $(dirname $0)/backup.config
# echo "ZENITY=${ZENITY}"
ZENITY_OUT=""
if [ "${ZENITY}" == "ON" ]; then
ZENITY_OUT=$( zenity --forms --height 250 --width 600 \
--title="Parametry Backupu do dodania" \
--text="Podaj prefix backupu oraz parametry czasowe:" \
--separator="|" \
--add-entry="Prefix nazwy:" \
--add-entry="Czas, na który odtworzyć (np.:2017_05_14_12_01):" \
--add-list "Czy kompresować?" --list-values "TAK|NIE")
ACCEPTED=$?
if [ ! "${ACCEPTED}" == "0" ]; then
echo "Zenity: Brak akceptacji"
exit 1
fi
NAME_PREFIX=$(awk --field-separator='|' '{print $1}' <<<${ZENITY_OUT})
DATE_TO_RESTORE=$(awk --field-separator='|' '{print $2}' <<<${ZENITY_OUT})
GZIP=$(awk --field-separator='|' '{print $3}' <<<${ZENITY_OUT})
if [ "${GZIP}" == "TAK" ]; then
GZIP=true
else
GZIP=false
fi
BACKUP_DIR=$(zenity --file-selection --title="Ścieżka do plików z backupem:" --directory )
ACCEPTED=$?
if [ ! "${ACCEPTED}" == "0" ]; then
echo "Zenity: Brak akceptacji ścieżki do backupów"
exit 1
fi
OUT_DIR=$(zenity --file-selection --title="Ścieżka do wypakowania plików:" --directory )
ACCEPTED=$?
if [ ! "${ACCEPTED}" == "0" ]; then
echo "Zenity: Brak akceptacji ścieżki do backupów"
exit 1
fi
QUESTION=$(zenity --question --height 250 --width 600 --title="Podsumowanie" --text="Ustawiono:\nname=${NAME_PREFIX}\ndate=${DATE_TO_RESTORE}\nbackup-dir=${BACKUP_DIR}\nout-dir=${OUT_DIR}\ngzip=${GZIP}\n\nCzy akceptujesz ustawienia? ")
ACCEPTED=$?
if [ ! "${ACCEPTED}" == "0" ]; then
echo "Zenity: Brak akceptacji parametrów"
exit 1
fi
# echo ${ZENITY_OUT}
# echo ${NAME_PREFIX}
# echo ${DATE_TO_RESTORE}
# echo ${BACKUP_DIR}
# echo ${OUT_DIR}
# echo ${GZIP}
# exit 0
else
#
# read options
#
for i in "$@"
do
case $i in
--name=*)
NAME_PREFIX="${i#*=}"
;;
--date=*)
DATE_TO_RESTORE="${i#*=}"
;;
--backup-dir=*)
BACKUP_DIR="${i#*=}"
;;
--out-dir=*)
OUT_DIR="${i#*=}"
;;
--gzip)
GZIP=true
;;
-h|--help)
HELP=true
;;
-v|--version)
VERSION=true
;;
*)
echo "$i": nieznana opcja
exit 1
;;
esac
done
fi
if [ "${HELP}" = true ]; then
print_help "$#"
fi
if [ ${VERSION} = true ]; then
print_version "$#"
fi
# test OUT_DIR
if [ -d "${OUT_DIR}" ]; then
if [ -L "${OUT_DIR}" ]; then
OUT_DIR=$(cd -P "${OUT_DIR}" && pwd)
else
OUT_DIR=$(cd "${OUT_DIR}" && pwd)
fi
else
echo "Ścieżka do odtworzenia plików: ${OUT_DIR} nie istnieje"
exit 1
fi
# test BACKUP_DIR
if [ -d "${BACKUP_DIR}" ]; then
if [ -L "${BACKUP_DIR}" ]; then
BACKUP_DIR=$(cd -P "${BACKUP_DIR}" && pwd)
else
BACKUP_DIR=$(cd "${BACKUP_DIR}" && pwd)
fi
else
echo "Ścieżka do kopii zapasowych: ${BACKUP_DIR} nie istnieje"
exit 1
fi
# archiv gziped or not
if [ "${GZIP}" = true ]; then
TAR_OPT="-xvzf"
BACKUP_EXT="tgz"
else
TAR_OPT="-zvf"
BACKUP_EXT="tar"
fi
# check DATE_TO_RESTORE
DATE_REGEX="^[0-9]{4}_[0-9]{2}_[0-9]{2}_[0-9]{2}_[0-9]{2}"
TAR_FILES=""
if [[ "${DATE_TO_RESTORE}" =~ ${DATE_REGEX} ]]; then
# echo "find ${BACKUP_DIR} -name ${NAME_PREFIX}_full_*.${BACKUP_EXT}"
FULL_BACKUPS=$(find ${BACKUP_DIR} -name "${NAME_PREFIX}_full_*.${BACKUP_EXT}" | sort )
# echo ${FULL_BACKUPS}
NAME_TO_COMPARE=${BACKUP_DIR}"/"${NAME_PREFIX}"_full_"${DATE_TO_RESTORE}"."${BACKUP_EXT}
NUM_OF_SEP=$(echo ${NAME_PREFIX}"_full_" | grep -o "_" | wc -l )
FULL_BACKUP=""
# FULL_BACKUP_PREV=""
for file in ${FULL_BACKUPS}
do
# FULL_BACKUP_PREV=${FULL_BACKUP}
if [[ ! "$file" > "${NAME_TO_COMPARE}" ]]; then
FULL_BACKUP=$file
fi
done
if [ -z "${FULL_BACKUP}" ]; then
echo "Nie mogę znaleźć pełnego backupu spełniającego wszystkie warunki"
exit 1
fi
#echo ${FULL_BACKUP}
NAME_TO_COMPARE=${BACKUP_DIR}"/"${NAME_PREFIX}"_incr_"${DATE_TO_RESTORE}"."${BACKUP_EXT}
YEAR_SEG=$((${NUM_OF_SEP} + 1))
MON_SEG=$((${YEAR_SEG} + 1))
DAY_SEG=$((${MON_SEG} + 1))
H_SEG=$((${DAY_SEG} + 1))
M_SEG=$((${H_SEG} + 1))
ALL_BACKUPS=$(find ${BACKUP_DIR} -name "${NAME_PREFIX}*.${BACKUP_EXT}" |\
sort -t_ -k${YEAR_SEG} -k${MON_SEG} -k${DAY_SEG} -k${H_SEG} -k${M_SEG} -k${NUM_OF_SEP}r)
# echo ${ALL_BACKUPS}
for file in ${ALL_BACKUPS}
do
# echo $file
if [ -n "${TAR_FILES}" ]; then
if [[ $file =~ ${BACKUP_DIR}/${NAME_PREFIX}_incr_* ]]; then
if [[ ! "$file" > "${NAME_TO_COMPARE}" ]]; then
TAR_FILES=${TAR_FILES}" "$file
fi
fi
fi
if [ "${FULL_BACKUP}" == "${file}" ]; then
TAR_FILES="${FULL_BACKUP}"
fi
done
else
echo "Opcja --date= powinna posiadać format: rok_miesiąc_dzień_godzina_minuta"
echo "Znaleziono: ${DATE_TO_RESTORE}"
echo "Przykład: 2017_05_14_16_40"
exit 1
fi
for file in ${TAR_FILES}
do
echo "tar ${TAR_OPT} ${file} -C ${OUT_DIR}"
tar ${TAR_OPT} ${file} -C ${OUT_DIR}
done
| true
|
25790ca7ec4a917b93a5d7f7a19b711371e4db4c
|
Shell
|
nonamenix/GreatFuckingAdviceBot
|
/bot/healh_check.sh
|
UTF-8
| 94
| 2.96875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
PID=$(cat ".pid")
if ps -p $PID > /dev/null
then exit 0
else exit -1
fi
| true
|
e4387215b748ede7f66ec3d8b7acb2c6c57cceba
|
Shell
|
sr105/shinyProtector
|
/shinyProtector
|
UTF-8
| 460
| 3.046875
| 3
|
[] |
no_license
|
#!/bin/bash
#stop any partially completed timer iterations.
timebar --stop
while :
do
sleep 5
# sure would like to replace this sleep with a native Cocoa or Quartz
# applescript call to look for the timebar dialog box and wait for it
# to be closed.
timebar --duration $(( 60 * 30 )) --message "Focus check at $(date +"%T")"
until egrep TimebarAudit /var/log/system.log|tail -1|grep -q complete
do
sleep 1
done
done
| true
|
9115c3907c4b0c0291d3b98d67cbf71b46a04f4e
|
Shell
|
rajkumarbl/EECS_2031
|
/Assignments/Assignment2/Unix/icount
|
UTF-8
| 1,380
| 3.8125
| 4
|
[] |
no_license
|
#!/bin/sh
# EECS2031 - Assignment 2
# Filename : icount
# Author : Balakrishnan Lakshmi, Rajkumar
# Email : kumarraj@my.yorku.ca
# Login ID : kumarraj
# Student ID : 213141197
echo -n "Enter input file name: "
read myfile
if test ! -e $myfile
then
echo "File '$myfile' does not exist."
exit 0
elif test ! -r $myfile
then
echo "File '$myfile' is not readable."
exit 0
else
echo -n "Count lines, words, characters or all three (l, m, c, a)? "
read mychoice
while test $mychoice != l && test $mychoice != m && test $mychoice != c && test $mychoice != a && test $mychoice != L && test $mychoice != M && test $mychoice != C && test $mychoice != A
do
echo "Invalid option"
echo -n "Count lines, words, characters or all three (l, m, c, a)? "
read mychoice
done
outputl=`cat $myfile | wc -l`
outputw=`cat $myfile | wc -w`
outputc=`cat $myfile | wc -c`
if test $mychoice = "l" || test $mychoice = "L"
then
echo "File '$myfile' contains $outputl lines."
elif test $mychoice = "m" || test $mychoice = "M"
then
echo "File '$myfile' contains $outputw words."
elif test $mychoice = "c" || test $mychoice = "C"
then
echo "File '$myfile' contains $outputc characters."
elif test $mychoice = "a" || test $mychoice = "A"
then
echo "File '$myfile' contains $outputl lines, $outputw words, $outputc characters."
fi
fi
| true
|
bafc88976fefad71ced0acd05618e74e23d2fa0a
|
Shell
|
wastrachan/pkgbuilds
|
/wireguard-vanity-keygen-bin/PKGBUILD
|
UTF-8
| 1,892
| 2.84375
| 3
|
[
"Unlicense"
] |
permissive
|
# Maintainer: Winston Astrachan <winston dot astrachan at gmail dot com>
pkgname=wireguard-vanity-keygen-bin
_shortname=wireguard-vanity-keygen
pkgver=0.0.5
pkgrel=1
pkgdesc='WireGuard vanity key generator'
arch=('x86_64' 'aarch64' 'i686' 'arm')
url='https://github.com/axllent/wireguard-vanity-keygen'
license=('custom')
depends=('glibc')
provides=('wireguard-vanity-keygen')
conflicts=('wireguard-vanity-keygen')
source=("https://raw.githubusercontent.com/axllent/wireguard-vanity-keygen/${pkgver}/LICENSE")
source_x86_64=("https://github.com/axllent/wireguard-vanity-keygen/releases/download/${pkgver}/${_shortname}_${pkgver}_linux_amd64.bz2")
source_aarch64=("https://github.com/axllent/wireguard-vanity-keygen/releases/download/${pkgver}/${_shortname}_${pkgver}_linux_arm64.bz2")
source_i686=("https://github.com/axllent/wireguard-vanity-keygen/releases/download/${pkgver}/${_shortname}_${pkgver}_linux_386.bz2")
source_arm=("https://github.com/axllent/wireguard-vanity-keygen/releases/download/${pkgver}/${_shortname}_${pkgver}_linux_arm.bz2")
sha256sums=('2cb653ca639b18430adce010d0e294db83c120c83fa32d13ea1abb17cca98c32')
sha256sums_x86_64=('3b90b40f70ee73b4e615225e093e27c948045707adf0d9e5af164cdf335aa501')
sha256sums_aarch64=('224c8c8f86a9ba30e324cbe394e63c08786a79d78b9da7b3595172e524a292e5')
sha256sums_i686=('0789980e62da4985ebdcd19122747ece6bd62fd60f286a16b9357a52aa5b7739')
sha256sums_arm=('449cfd76b233f9c679e4b874b3864d32fab359f36aa95cae718458dd8a40a2a7')
package() {
# Upstream uses different format for architechture notation
case $CARCH in
'x86_64') subarch='amd64' ;;
'aarch64') subarch='arm64' ;;
'i686') subarch='386' ;;
*) subarch=$CARCH ;;
esac
install -Dm644 "${srcdir}/LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
install -Dm755 "${srcdir}/${_shortname}_${pkgver}_linux_${subarch}" "${pkgdir}/usr/bin/${_shortname}"
}
| true
|
644e1141d07f7816554689562b37cdb258913fff
|
Shell
|
obscuredepths/dotfiles
|
/.git_template/hooks/pre-commit-images
|
UTF-8
| 263
| 3
| 3
|
[] |
no_license
|
#!/bin/bash -l
echo "Optimizing images"
# Find image files and optimize them
for FILE in `exec git diff --name-status |grep -v "^D" | grep ".png$" | awk '$1 != "R" { print $2 }'`; do
echo "Optimizing $FILE"
bundle exec smusher $FILE
git add "$FILE"
done
| true
|
d3a865271ab8b439a6d9cb5e094e4440b97fc95e
|
Shell
|
richarddewit/dotfiles
|
/bin/spotify-notify.sh
|
UTF-8
| 629
| 3.53125
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
case "$PLAYER_EVENT" in
start|change) ;;
*) exit 0;;
esac
caption="♬ Spotify"
metadata=""
tries=0
while [ -z "$metadata" ]; do
metadata="$(playerctl -p spotifyd metadata)"
((tries+=1))
echo "Tries: $tries"
sleep 1
if [ $tries -ge 10 ]; then
notify-send "$caption" "Unable to get song metadata!"
exit 1
fi
done
artist="$(echo "$metadata" | grep artist | awk -F '[[:space:]][[:space:]]+' '{print $2;}' | awk 'ORS=", "' | head -c -2)"
title="$(echo "$metadata" | grep title | awk -F '[[:space:]][[:space:]]+' '{print $2;}')"
notify-send "$caption - Now Playing" "$artist - $title"
| true
|
c4c2ddd48e0ff3e197b9e7a60129a12a25306b90
|
Shell
|
greggomann/mesos-healthcheck-benchmark
|
/process-results.sh
|
UTF-8
| 916
| 3.28125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
CHECK_RATE_FILE=plot_check_rate.txt
RESPONSE_TIME_FILE=plot_response_time.txt
rm $CHECK_RATE_FILE
echo "Command Check Rates" >> $CHECK_RATE_FILE
echo "Tasks in One Pod" >> $CHECK_RATE_FILE
echo "Average Check Rate (per sec.)" >> $CHECK_RATE_FILE
rm $RESPONSE_TIME_FILE
echo "Command Check Responsiveness" >> $RESPONSE_TIME_FILE
echo "Tasks in One Pod" >> $RESPONSE_TIME_FILE
echo "Time to Launch Check (sec.)" >> $RESPONSE_TIME_FILE
for NUM in 1 2 4 8 16 32 64 128 256; do
CHECK_COUNT=`cat ./results/results-$NUM/agent-log.txt | grep LAUNCH_NESTED_CONTAINER_SESSION | wc -l`
CHECK_RATE=`bc <<< "scale=2; $CHECK_COUNT/300"`
RESPONSE_TIME=`./healthcheck-response.py < ./results/results-$NUM/agent-log.txt`
# Write data for check rate plot.
echo "$NUM $CHECK_RATE" >> $CHECK_RATE_FILE
# Write data for response time plot.
echo "$NUM $RESPONSE_TIME" >> $RESPONSE_TIME_FILE
done
| true
|
2689056da76f673d2523cbdfd47a81909194dd46
|
Shell
|
linushsao/script
|
/nat-family-1.sh
|
UTF-8
| 5,015
| 3
| 3
|
[] |
no_license
|
#!/bin/bash
#
D=`date +%F@%R`
#BASIC CONFIGURE
RESET_MODE="" #TRUE:enable RESET,other:disable
HARD_RESET_MODE="" #TRUE:enable RESET,other:disable
WIRED_MODE="TRUE" #TRUE:enable wired,other:wireless
VERBOSE_MODE="" #TRUE:enable debug mode
MOBILE_MODE="" #TRUE:enable mobile network
INTRANET_MODE="TRUE" #TRUE means enable connect forward(NAT)
CREATE_AP="" #TRUE:enable create_ap app
BLOCK_TEST=""
TC_MODE="" # TRUE:tc enable traffic control,other:disable
CHECK_NETWORK=`cat home/linus/log/CHECK_NETWORK` # empty:disable
FORWARD="0"
SCRIPT_NAME="[nat-family]"
PATH_LOG="/home/linus/log"
NETMASK="255.255.255.0"
IP_AUSTIN=(
`cat ${PATH_LOG}/IP_AUSTIN.conf`
)
IP_AUSTIN_PC=${IP_AUSTIN[0]}
IP_ROSE=(
`cat ${PATH_LOG}/IP_ROSE.conf`
)
IP_ROSE_PC=${IP_ROSE[0]}
IP_TEST=(
`cat ${PATH_LOG}/IP_TEST.conf`
)
IP_PUBLIC=(
`cat ${PATH_LOG}/IP_PUBLIC.conf`
)
MOBILES_AP=(`cat /home/linus/log/MOBILES.conf`)
#extra_ip of server of debian sources.list & others
#LIBRARY 163.29.36.96 203.64.154.21
IP_EXTRA=(
140.112.30.75
133.242.99.74
163.29.36.96
203.64.154.21
)
BLOCK_AUSTIN=""
BLOCK_ROSE=""
BLOCK_ALL=""
FILTER_MODE=""
#WIRED="enp2s0"
#WIRELESS1="wlp3s0"
EXTIF="wlp3s0"
EXTIF_1="wlp0s20u2"
INIF="wlp0s29u1u3"
INNET="192.168.0.0/24" # 若無內部網域介面,請填寫成 INNET=""
#-----------all function
log_record () {
echo ${SCRIPT_NAME}" "${D}" :"${MSG} >> ${PATH_LOG}/check_ap.log
}
#-----------
MSG="++++++++++++++++++++++++++++++++[INIT START]"
log_record
echo $MSG
for var in "$@"
do
echo "[CHECKING PARAM]..."
if [ "$var" == "--enable-tc" ]
then
echo "[configure:ENABLE_TC]"
TC_MODE="TRUE"
elif [ "$var" == "--enable-verbose" ]
then
echo "[configure:ENABLE_VERBOSE]"
VERBOSE_MODE="TRUE"
elif [ "$var" == "--enable-intranet" ]
then
echo "[configure:ENABLE_INTRANET]"
INTRANET_MODE=""
elif [ "$var" == "--enable-create_ap" ]
then
echo "[configure:ENABLE_CREATE_AP]"
CREATE_AP="TRUE"
elif [ "$var" == "--enable-reset" ]
then
echo "[configure:ENABLE_reset]"
RESET_MODE="TRUE"
elif [ "$var" == "--enable-hardreset" ]
then
echo "[configure:ENABLE_hardreset]"
HARD_RESET_MODE="TRUE"
elif [ "$var" == "--enable-check-network" ]
then
echo "[configure:ENABLE_CHECK_NETWORK]"
CHECK_NETWORK="TRUE"
elif [ "$var" == "--block-austin" ]
then
echo "[configure:BLOCK_AUSTIN]"
BLOCK_AUSTIN="TRUE"
elif [ "$var" == "--block-rose" ]
then
echo "[configure:BLOCK_ROSE]"
BLOCK_ROSE="TRUE"
elif [ "$var" == "--enable-mobile" ]
then
echo "[configure:ENABLE MOBILE]"
MOBILE_MODE="TRUE"
elif [ "$var" == "--block-test" ]
then
echo "[configure:BLOCK_TEST]"
BLOCK_TEST="TRUE"
elif [ "$var" == "help" ]
then
echo "option: --enable-tc | --enable-reset | --disable-wired | --block-austin | --block-rose | --block-all"
exit 0
else
echo "Wrong param : $var ,script is terminated!!"
exit 0
fi
done
sleep 1
if [ "$HARD_RESET_MODE" == "TRUE" ]; then
ifconfig ${INIF} up
ifconfig ${EXTIF} up
ifconfig ${EXTIF_1} up
ifconfig $INIF 192.168.0.1 netmask 255.255.255.0
MSG="network interface init..."
log_record
if [ "$CREATE_AP" == "TRUE" ]; then
#create softAP
killall hostapd;sleep 1
killall dhcpd;sleep 1
create_ap $INIF $EXTIF Linuslab-AP 0726072652
MSG="CREATE AP by CREATE_AP SCRIPT"
log_record
else
systemctl stop haveged
systemctl start haveged
killall dhcpd;sleep 1
dhcpd $INIF
killall hostapd;sleep 1
hostapd -dd /etc/hostapd/hostapd.conf
MSG="CREATE AP by MANUEL(hostapd+dhcpd)"
log_record
fi
#connect to Yafinus
#killall wpa_supplicant
#sleep 1
#wpa_supplicant -i ${EXTIF_1} -D wext -c /home/linus/log/now.conf &
#sleep 1
#dhclient -v ${EXTIF_1} &
fi
if [ "$INTRANET_MODE" == "" ]; then
MSG="DISABLE IP_FORWARD MODE..."
log_record
FORWARD="0"
else
MSG="ENABLE IP_FORWARD MODE..."
log_record
FORWARD="1"
fi
if [ "$RESET_MODE" == "TRUE" ]; then
IPTABLES=/sbin/iptables
$IPTABLES -F
$IPTABLES -F -t nat
$IPTABLES -X
$IPTABLES -P INPUT ACCEPT
$IPTABLES -P OUTPUT ACCEPT
$IPTABLES -P FORWARD ACCEPT
modprobe ip_conntrack
modprobe iptable_nat
modprobe ip_conntrack_ftp
modprobe ip_nat_ftp
echo ${FORWARD} > /proc/sys/net/ipv4/ip_forward
MSG="RESET IPTABLES RULES & ENABLE MASQUERADE"
log_record
if [ "$BLOCK_AUSTIN" == "TRUE" ]; then
echo "[BLOCK AUSTIN]..."
for ((i=0; i<${#IP_AUSTIN[@]}; i++))
do
$IPTABLES -A FORWARD -s ${IP_AUSTIN[$i]} -o $EXTIF -j DROP
MSG="BLOCK AUSTIN "${IP_AUSTIN[$i]}
log_record
done
fi
if [ "$BLOCK_ROSE" == "TRUE" ]; then
echo "[BLOCK ROSE]..."
for ((i=0; i<${#IP_ROSE[@]}; i++))
do
$IPTABLES -A FORWARD -s ${IP_ROSE[$i]} -o $EXTIF -j DROP
MSG="BLOCK ROSE "${IP_ROSE[$i]}
log_record
done
fi
if [ "$BLOCK_TEST" == "TRUE" ]; then
echo "[BLOCK TESTING]..." ; sleep 1
for ((i=0; i<${#IP_TEST[@]}; i++))
do
$IPTABLES -A FORWARD -s ${IP_TEST[$i]} -o $EXTIF -j DROP
MSG="BLOCK TEST "${IP_TEST[$i]}
log_record
done
fi
$IPTABLES -t nat -A POSTROUTING -j MASQUERADE
fi
exit 0
| true
|
808b8f16af3e4c638e0049fbb7a0f808e0aa3edf
|
Shell
|
freebsd/freebsd-ports
|
/www/polipo/files/pkg-deinstall.in
|
UTF-8
| 629
| 3.46875
| 3
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/sh
if [ "$2" != "POST-DEINSTALL" ]; then
exit 0
fi
POLIPOUSER=%%USER%%
POLIPOGROUP=%%GROUP%%
PCONFIGDIR=%%PCONFIGDIR%%
PPIDDIR=%%PPIDDIR%%
POLIPOCACHE=%%PCACHEDIR%%
POLIPOLOG=%%PLOGFILE%%
POLIPOPID=%%PPIDFILE%%
POLIPODATA=%%DATADIR%%
if pw usershow "${POLIPOUSER}" 2>/dev/null 1>&2; then
echo "---> To delete ${POLIPOUSER} user permanently, use 'pw userdel \"${POLIPOUSER}\"'"
fi
if [ -d "$PPIDDIR" ]; then
rm -r "$PPIDDIR" || exit 1
fi
if fgrep "${POLIPOLOG}" "/etc/newsyslog.conf" 2>/dev/null 1>&2; then
echo "---> You should remove from /etc/newsyslog.conf the \"${POLIPOLOG}\" entry manually."
fi
exit 0
| true
|
5ce83abb6b811bec6db29b07b36ea4e37c59a71b
|
Shell
|
xizhan0513/demos
|
/linux_list/build.sh
|
UTF-8
| 109
| 2.71875
| 3
|
[] |
no_license
|
#!/bin/bash
if [ "$1" = "clean" ]; then
rm demo_list
exit 0
else
gcc list_demo.c -o demo_list
exit 1
fi
| true
|
a627f32e397979f4399b0ff6bfbb98cf205862c0
|
Shell
|
mateuszkucharczyk/toolbox
|
/buildchain/newProjectFullstack.sh
|
UTF-8
| 9,542
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/bash
function main() {
local projectId=${1:?"[ERROR] Specify a project id"};
local groupId="com.protonmail.mateuszkucharczyk.${projectId}";
mkdir "${projectId}";
cd "${projectId}";
git init .
$(createFileRootPom "${groupId}" "pom.xml");
mkdir "fe";
$(createFileFrontendPom "${groupId}" "fe/pom.xml");
ng new "${projectId}" -dir fe --skip-tests true --skip-git true --skip-commit true --skip-install true;
# https://spring.io/blog/2013/12/19/serving-static-web-content-with-spring-boot
# Spring Boot will automatically add static web resources located within any of the following directories:
# - /META-INF/resources/
# - /resources/
# - /static/
# - /public/
# Change build output directory to one of the above directories. This will make them included in jar and automatically served.
sed -i 's/"outDir":\s*".*"/"outDir": "target\/classes\/public"/' "fe/.angular-cli.json"
# Add build command to be used by maven. It must use local ng installed by frontend-maven-plugin.
sed -i 's/"scripts": [{]/"scripts": \{\n "mavenbuild": "node node\/node_modules\/@angular\/cli\/bin\/ng build",/' "fe/package.json"
mkdir "be";
echo $(createFileBackendPom "${groupId}" "be/pom.xml");
local package="${groupId}";
local packageDir="$(echo ${package} | sed 's/\./\//g')";
local mainApplicationDir="be/src/main/java/${packageDir}";
mkdir -p "${mainApplicationDir}";
$(createFileApplication "${package}" "${mainApplicationDir}/Application.java");
local testApplicationDir="be/src/test/java/${packageDir}";
mkdir -p "${testApplicationDir}";
$(createFileApplicationTest "${package}" "${testApplicationDir}/ApplicationTest.java");
mkdir -p "be/src/main/resources";
mkdir -p "be/src/test/resources";
# TODO add .gitignore
# git add .;
# git commit -m "initial commit";
}
function createFileApplication() {
local package=${1:?"[ERROR] missing package"};
local file=${2:?"[ERROR] missing file"};
echo "package ${package};" > "${file}"; # substitute package
echo '
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}' >> "${file}";
}
function createFileApplicationTest() {
local package=${1:?"[ERROR] missing package"};
local file=${2:?"[ERROR] missing file"};
echo "package ${package};" > "${file}"; # substitute package
echo '
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest
public class ApplicationTest {
@Test
public void contextLoads() {
}
}' >> "${file}";
}
function createFileBackendPom() {
local groupId=${1:?"[ERROR] missing groupId"};
local file=${2:?"[ERROR] missing file"};
echo '<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>' > "${file}";
echo " <groupId>${groupId}</groupId>" >> "${file}"; # substitute groupId
echo ' <artifactId>be</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>1.5.10.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.8</java.version>
</properties>
<dependencies>
<!-- web dependencies -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-validation</artifactId>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>fe</artifactId>
<version>0.0.1-SNAPSHOT</version>
</dependency>
<!-- persistence dependencies -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<scope>runtime</scope>
</dependency>
<!-- anti-boilerplate dependencies -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<!-- test dependencies -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>' >> "${file}";
}
function createFileFrontendPom() {
local groupId=${1:?"[ERROR] missing groupId"};
local file=${2:?"[ERROR] missing file"};
echo '<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>' > "${file}";
echo " <groupId>${groupId}</groupId>" >> "${file}"; # substitute groupId
echo ' <artifactId>fe</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<properties>
<frontend.plugin.version>1.5</frontend.plugin.version>
<node.version>v6.11.3</node.version>
<npm.version>5.4.2</npm.version>
</properties>
<build>
<plugins>
<plugin>
<groupId>com.github.eirslett</groupId>
<artifactId>frontend-maven-plugin</artifactId>
<version>${frontend.plugin.version}</version>
<configuration>
<workingDirectory>${project.basedir}</workingDirectory>
</configuration>
<executions>
<execution>
<id>npm build</id>
<goals>
<goal>npm</goal>
</goals>
<phase>compile</phase>
<configuration>
<arguments>run-script mavenbuild</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<profiles>
<profile>
<id>setup</id>
<build>
<plugins>
<plugin>
<groupId>com.github.eirslett</groupId>
<artifactId>frontend-maven-plugin</artifactId>
<version>${frontend.plugin.version}</version>
<configuration>
<workingDirectory>${project.basedir}</workingDirectory>
</configuration>
<executions>
<execution>
<id>install node and npm</id>
<goals>
<goal>install-node-and-npm</goal>
</goals>
<phase>initialize</phase>
<configuration>
<nodeVersion>${node.version}</nodeVersion>
<npmVersion>${npm.version}</npmVersion>
</configuration>
</execution>
<execution>
<id>install angular-cli</id>
<goals>
<goal>npm</goal>
</goals>
<phase>initialize</phase>
<configuration>
<arguments>install --no-optional -g @angular/cli</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
<profile>
<id>npm install</id>
<activation>
<property>
<name>skipNpmInstall</name>
<value>!true</value>
</property>
</activation>
<build>
<plugins>
<plugin>
<groupId>com.github.eirslett</groupId>
<artifactId>frontend-maven-plugin</artifactId>
<version>1.5</version>
<configuration>
<workingDirectory>${project.basedir}</workingDirectory>
</configuration>
<executions>
<execution>
<id>npm install</id>
<goals>
<goal>npm</goal>
</goals>
<phase>generate-resources</phase>
<configuration>
<arguments>install</arguments>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
</project>' >> "${file}";
}
function createFileRootPom() {
local groupId=${1:?"[ERROR] missing groupId"};
local file=${2:?"[ERROR] missing file"};
echo '<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>' > "${file}";
echo " <groupId>${groupId}</groupId>" >> "${file}"; # substitute package groupId
echo " <artifactId>parent</artifactId>" >> "${file}";
echo ' <version>0.0.1-SNAPSHOT</version>
<packaging>pom</packaging>
<modules>
<module>fe</module>
<module>be</module>
</modules>
</project>' >> "${file}";
}
main "$@";
| true
|
d40ee8c295e38476fe5cd1bb355445c60d8f3fb5
|
Shell
|
wpruszak/Scripts
|
/wtch
|
UTF-8
| 2,041
| 4.5625
| 5
|
[] |
no_license
|
#!/usr/bin/env bash
# Watches everything that is being appended to the
# end of the file. If provided with pattern, will filter
# lines to only the ones matching pattern. Pattern must be
# in POSIX extended regex format.
# Example usage:
#
# wtch /var/log/apache2/error.log 'error|critical'
#
# Above command will output any errors / criticals that will
# happen during this script execution. This could be useful for
# live debugging, cause you could see what the error was right
# after it happened.
#
# wtch /var/www/html/:symfony_project:/var/log/dev.log 'error|critical'
#
# Shows every symfony error / critical right after it happens.
#
# wtch file.txt
# Watches file.txt for any appended lines.
# Return 1 on pipe failure.
set -o pipefail
# Shows usage information.
usage() {
cat <<EOF
wtch [FILE]
wtch [FILE] [PATTERN]
Watches file for changes and outputs anything new to the stdout. Pattern can be any POSIX extended regex string.
EOF
}
# Stops script execution. Prints given error
# message and exits with given exit code (default 1).
#
# $1 - Error message
# $2 - Exit code
error() {
echo "Error: ${1:-"Unknown error"}"
usage
exit ${2:-1}
} >&2
# At least one argument needed.
[[ $# -eq 0 ]] && { error 'Please, provide at least file name'; }
# Maximum of two arguments allowed - pattern and file.
[[ $# -gt 2 ]] && { error 'Too many arguments'; }
# Pattern and file provided.
[[ $# -eq 2 ]] && {
declare -r file="$1"
declare -r pattern="$2"
}
# Only file provided.
[[ $# -eq 1 ]] && { declare -r file="$1"; }
# Assert file is fine.
[[ ! -e "$file" ]] && { error "File: '${file}' does not exist"; }
[[ ! -f "$file" ]] && { error "File: '${file}' is not a regular file"; }
[[ ! -r "$file" ]] && { error "File: '${file}' is not readable"; }
# Either look for pattern in appended file lines, or just
# follow whatever is appended to the end of the file.
if [[ $# -eq 2 ]]; then
tail -f "$file" | grep --line-buffered -E "$pattern"
else
tail -f "$file"
fi
# Probably never gets there.
exit 0
| true
|
3cdb93a7ced9cffbc33be6104b1425398ddd6056
|
Shell
|
lum/deploy_scripts
|
/bin/deploy_site
|
UTF-8
| 2,014
| 3.34375
| 3
|
[] |
no_license
|
#!/bin/bash
ENVIRONMENT=$1
BUILD_TYPE=$2
BUILD_ID=$3
LOCATION=$4
#echo "NOTICE --- NIGHTLY DEPLOYMENTS ARE CURRENTLY OFFLINE"
#echo "-------- Aaron Daniel <aarond@choochee.com> --------"
#exit 100
echo "--------------- $BUILD_TYPE Deployment ---------------"
echo "Deploying Site ID $BUILD_TYPE$BUILD_ID into environment $ENVIRONMENT"
echo
echo "Saving environment details"
PREV=`cat /var/lib/chef/deployment-$BUILD_TYPE-$ENVIRONMENT-$LOCATION 2> /dev/null`
echo $BUILD_ID > /var/lib/chef/deployment-$BUILD_TYPE-$ENVIRONMENT-$LOCATION
echo
echo "Creating proper roles"
cat > /tmp/$BUILD_TYPE$BUILD_ID.rb << EOP
name "$BUILD_TYPE$BUILD_ID"
description "$BUILD_TYPE deployment"
default_attributes("deployment_id" => "$BUILD_TYPE$BUILD_ID")
override_attributes "location" => "$LOCATION"
EOP
knife role from file /tmp/$BUILD_TYPE$BUILD_ID.rb
rm /tmp/$BUILD_TYPE$BUILD_ID.rb
echo
echo "Running deployment"
deploy_full_site -e $ENVIRONMENT -d $BUILD_TYPE$BUILD_ID -s $LOCATION
echo "Deployment complete, sending notice to hudson"
echo "Deploy: $BUILD_TYPE$BUILD_ID $ENVIRONMENT $LOCATION has been Deployed" | mail -s "Deploy: $BUILD_TYPE$BUILD_ID $ENVIRONMENT $LOCATION has been Deployed" ops@choochee.com,qa@choochee.com,engineering@choochee.com,messenger@choochee.com
if [ x"$ENVIRONMENT" = x"prod" ]; then
http://qa001.choochee.com:8080/job/WebServices/buildWithParameters?token=SQE&WS_BASE_URL=http://cws.choochee.com
else
http://qa001.choochee.com:8080/job/WebServices/buildWithParameters?token=SQE&WS_BASE_URL=http://cws.$ENVIRONMENT.choochee.com
fi
if [ $BUILD_TYPE == "nightinggale" ]
#if [ $BUILD_TYPE == "nightly" ]
then
if [ "0"$PREV != "0" ]
then
echo "Destroying the previous environment: $PREV"
echo
# delete_rackspace_site $ENVIRONMENT $BUILD_TYPE$PREV
# knife role delete $BUILD_TYPE$PREV -y
fi
fi
if [ $ENVIRONMENT == "prod" ]
then
knife ssh role:$BUILD_TYPE$BUILD_ID "crontab -r"
knife ssh "role:$BUILD_TYPE$BUILD_ID AND role:ws" "service tomcat stop"
knife ssh "role:$BUILD_TYPE$BUILD_ID AND role:messaging" "killall -9 java"
fi
| true
|
e52c5c9f1ebdf67ad6e35201e28cc846da9704a2
|
Shell
|
eltinawh/churn_prediction_api
|
/run.sh
|
UTF-8
| 413
| 2.75
| 3
|
[] |
no_license
|
#!/bin/bash
# To prepare environment and launch the app
# For local test only
# Usage: ./run.sh
# prepare python virtual environment
pip install --uprage pip
pip install pipenv
if [ ! -f 'Pipfile.lock' ]; then
pipenv --python 3.7
pipenv shell
pipenv install -r requirements.txt
fi
# run the app
pipenv run gunicorn --bind 0.0.0.0:5000 server:app & sleep 2 & x-www-browser http://0.0.0.0:5000/health
| true
|
65d743d8952c4aaa7873c0da61d6a8fc17a34103
|
Shell
|
bennetthardwick/gatsby-page-query-benchmark
|
/benchmark
|
UTF-8
| 204
| 2.71875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
rm -r .cache
rm -r ./public
if [ "$1" = "pageContext" ]; then
echo "Benchmarking page context"
USE_CREATE_PAGES=true yarn build
else
echo "Benchmarking page query"
yarn build
fi
| true
|
a3ada09f0bf4f18e98adb796757373f066a5a0da
|
Shell
|
bryanjenningz/react-duolingo
|
/.env.example
|
UTF-8
| 570
| 2.859375
| 3
|
[
"MIT"
] |
permissive
|
# Since the ".env" file is gitignored, you can use the ".env.example" file to
# build a new ".env" file when you clone the repo. Keep this file up-to-date
# when you add new variables to `.env`.
# This file will be committed to version control, so make sure not to have any
# secrets in it. If you are cloning this repo, create a copy of this file named
# ".env" and populate it with your secrets.
# When adding additional environment variables, the schema in "/src/env.mjs"
# should be updated accordingly.
# Example:
# SERVERVAR="foo"
# NEXT_PUBLIC_CLIENTVAR="bar"
| true
|
37ab971bf051baf1a37c30920e1259608eac5bdd
|
Shell
|
Angelbear/MetaOS
|
/out/target/product/passion/recovery/root/system/etc/tnosupdate-1.sh
|
UTF-8
| 375
| 2.546875
| 3
|
[] |
no_license
|
#!/sbin/sh
SERVER=`getprop ro.tserver.address`
mount -t vfat /dev/block/mmcblk0p1 /sdcard
sendmessage 0 #begin animation
sendmessage 1 0 0
sendmessage 2 "Downloading update package..."
cd /sdcard/
#sleep 150
rm tnosupdate-1.zip
wget http://$SERVER/tnosupdate-1.zip
sendmessage 2 "Successfully downloaded."
sendmessage 3 #endanimation
sendmessage 5 "SDCARD:tnosupdate-1.zip"
| true
|
314b1111c586e88f3706e4c8250a63423d10d1b1
|
Shell
|
cpausmit/DynamicData
|
/SmartCache/Client/testSmartCacheDbx.sh
|
UTF-8
| 2,515
| 2.65625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#---------------------------------------------------------------------------------------------------
# Test the SmartCache infrastructure onm all our relevant architectures/OS combinations.
#---------------------------------------------------------------------------------------------------
DSET=GluGluToHToGG_M-125_7TeV-powheg-pythia6+Fall11-PU_S6_START42_V14B-v1+AODSIM
BOOK=filefi/025
if [ -z "$SMARTCACHE_DATA" ] || [ -z "$SMARTCACHE_DIR" ]
then
echo " Please first source the setup in DynamicData/SmartCache/setup.sh"
else
echo " Deleting the test files."
echo "5A56FCE4-1DF0-E011-82D6-E41F13181834.root"; rm -f $SMARTCACHE_DATA/$BOOK/$DSET/5A56FCE4-1DF0-E011-82D6-E41F13181834.root
echo "8EA9D118-E8EF-E011-9909-00215E21DD50.root"; rm -f $SMARTCACHE_DATA/$BOOK/$DSET/8EA9D118-E8EF-E011-9909-00215E21DD50.root
echo "94C2366B-E4EF-E011-97C0-00215E220F78.root"; rm -f $SMARTCACHE_DATA/$BOOK/$DSET/94C2366B-E4EF-E011-97C0-00215E220F78.root
echo "569DFDB3-14F0-E011-9F1D-00215E2223D0.root"; rm -f $SMARTCACHE_DATA/$BOOK/$DSET/569DFDB3-14F0-E011-9F1D-00215E2223D0.root
echo "64338902-DDEF-E011-813C-00215E21D64E.root"; rm -f $SMARTCACHE_DATA/$BOOK/$DSET/64338902-DDEF-E011-813C-00215E21D64E.root
echo "D4BB3512-F1EF-E011-9639-00215E21D5BE.root"; rm -f $SMARTCACHE_DATA/$BOOK/$DSET/D4BB3512-F1EF-E011-9639-00215E21D5BE.root
echo "E276EBD8-DDEF-E011-9E87-001B2163C7CC.root"; rm -f $SMARTCACHE_DATA/$BOOK/$DSET/E276EBD8-DDEF-E011-9E87-001B2163C7CC.root
echo " Requesting the test files."
ssh t3btch000 $SMARTCACHE_DIR/Client/addDownloadRequest.py --file=5A56FCE4-1DF0-E011-82D6-E41F13181834.root --dataset=$DSET --book=$BOOK
ssh t3btch001 $SMARTCACHE_DIR/Client/addDownloadRequest.py --file=8EA9D118-E8EF-E011-9909-00215E21DD50.root --dataset=$DSET --book=$BOOK
ssh t3btch027 $SMARTCACHE_DIR/Client/addDownloadRequest.py --file=94C2366B-E4EF-E011-97C0-00215E220F78.root --dataset=$DSET --book=$BOOK
ssh t3btch028 $SMARTCACHE_DIR/Client/addDownloadRequest.py --file=569DFDB3-14F0-E011-9F1D-00215E2223D0.root --dataset=$DSET --book=$BOOK
ssh t3btch084 $SMARTCACHE_DIR/Client/addDownloadRequest.py --file=64338902-DDEF-E011-813C-00215E21D64E.root --dataset=$DSET --book=$BOOK
ssh t3btch087 $SMARTCACHE_DIR/Client/addDownloadRequest.py --file=D4BB3512-F1EF-E011-9639-00215E21D5BE.root --dataset=$DSET --book=$BOOK
ssh t3btch100 $SMARTCACHE_DIR/Client/addDownloadRequest.py --file=E276EBD8-DDEF-E011-9E87-001B2163C7CC.root --dataset=$DSET --book=$BOOK
fi
exit 0
| true
|
3dd42b9faedce0394f6d95697a9761a4e6e1be03
|
Shell
|
lukechilds/zsh-nvm
|
/tests/loading/Check zsh-nvm is loaded
|
UTF-8
| 329
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
source ../common.sh
# We don't need to actually load nvm for this test
export ZSH_NVM_NO_LOAD=true
# Check ZSH_NVM_DIR isn't already set
[[ -z ${ZSH_NVM_DIR+x} ]] || die "ZSH_NVM_DIR already set"
# Load zsh-nvm
load_zsh_nvm
# Check ZSH_NVM_DIR is now set
[[ ! -z ${ZSH_NVM_DIR+x} ]] || die "ZSH_NVM_DIR wasn't set"
| true
|
b6075d5ad16c6e8f4a9e5a12b5ded79bba39caf2
|
Shell
|
gridbugs/apocalypse-post
|
/scripts/build.sh
|
UTF-8
| 1,061
| 3.734375
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
APP_NAME=apocalypse-post
SUFFIX=$TRAVIS_BRANCH
RESOURCES=resources
USER=user
UPLOADS=uploads
DEPS_BUILD=`pwd`/deps_build
MACOS_FRAMEWORKS=$DEPS_BUILD/Frameworks
mkdir -pv $UPLOADS
mkdir -pv $DEPS_BUILD
function build_deps_macos {
pushd $DEPS_BUILD
mkdir -p $MACOS_FRAMEWORKS
source $DIR/download_sdl_macos.sh
popd
}
function build_nix {
OS=$1
MACHINE=$2
source $DIR/build_nix.sh
}
function build_macos {
OS=$1
MACHINE=$2
source $DIR/build_macos.sh
}
if [ -z ${TRAVIS_OS_NAME+x} ]; then
case `uname -s` in
Linux)
TRAVIS_OS_NAME=linux
;;
Darwin)
TRAVIS_OS_NAME=osx
;;
*)
echo "Unknown OS"
exit 1
esac
fi
cargo test --release --verbose --no-default-features
if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then
build_nix linux x86_64
elif [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
build_deps_macos
build_macos macos x86_64
fi
| true
|
48e848feec2e5ca9c8df65729ddb4fea4996b7bb
|
Shell
|
jwpage/dotfiles
|
/shell/exports.sh
|
UTF-8
| 423
| 2.75
| 3
|
[] |
no_license
|
path_add() {
if [ -d "$1" ] && [[ ":$PATH:" != *":$1:"* ]]; then
export PATH="${PATH:+"$PATH:"}$1"
fi
}
export DEVELOPMENT="true"
export DOTFILES=~/.dotfiles
export GREP_OPTIONS='--color=auto'
export PATH=$HOME/bin:/usr/local/bin:$PATH
path_add "/usr/local/sbin"
path_add "/usr/local/Cellar/coreutils/8.21/libexec/gnubin"
path_add "/usr/local/Cellar/ruby/2.1.0/bin"
path_add "/Users/johnson/.composer/vendor/bin"
| true
|
62918cd3b11c201452ca1d952a17e788f6181803
|
Shell
|
oracle/fmw-kubernetes
|
/FMWKubernetesMAA/OracleEnterpriseDeploymentAutomation/OracleIdentityManagement/oke_utils/delete_oke.sh
|
UTF-8
| 3,876
| 3.828125
| 4
|
[
"MIT",
"LGPL-2.1-or-later",
"LGPL-2.1-only",
"Apache-2.0",
"UPL-1.0"
] |
permissive
|
#!/bin/bash
# Copyright (c) 2023, Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
#
# This is an example of a script that will delete all of the infrastructure components that were
# created using the create_infra.sh script.
#
# Dependencies: ./responsefile/oci_oke.rsp
# ./common/oci_util_functions.sh
# ./common/oci_delete_functions.sh
#
# Usage: delete_oke.sh <response_tempate_file>
#
if [[ $# -eq 0 ]]; then
echo "Usage: $0 <response_tempate_file>"
exit 1
fi
DIRNAME=$(dirname $0)
if test -f $DIRNAME/responsefile/$1 ; then
source $DIRNAME/responsefile/$1
TEMPLATE=$(basename $DIRNAME/responsefile/$1 | sed 's/.rsp//')
LOGDIR=$WORKDIR/$TEMPLATE/logs
LOGFILE=delete_oke.log
OUTDIR=$WORKDIR/$TEMPLATE/output
RESOURCE_OCID_FILE=$OUTDIR/$TEMPLATE.ocid
else
echo "Error, Unable to read template file '$DIRNAME/responsefile/$1'"
exit 1
fi
if [[ ! -s "$RESOURCE_OCID_FILE" ]]; then
echo -e "\nThe '$RESOURCE_OCID_FILE' is not present, cannot proceed with automatic resource deletion."
exit 1
fi
source $DIRNAME/common/oci_util_functions.sh
source $DIRNAME/common/oci_delete_functions.sh
echo -e "Getting the OCID of the '$COMPARTMENT_NAME' compartment..."
get_compartment_ocid
echo -e "\n============================================================"
echo -e "Compartment Name: $COMPARTMENT_NAME"
echo -e "Compartment OCID: $COMPARTMENT_ID"
echo -e "Created Date/Time: $COMPARTMENT_CREATED"
echo -e "Created Using Template Named: $TEMPLATE"
echo -e "============================================================\n"
echo -e "Are you sure you wish to delete all of the installed OCI infrastructure"
read -r -p "components from the above compartment ($COMPARTMENT_NAME) [Y|N]? " confirm
if ! [[ $confirm =~ ^[Yy]$ ]]; then
echo "Exiting without making any changes"
exit 1
fi
START_TIME=`date +%s`
d=`date +%m-%d-%Y-%H-%M-%S`
mkdir -p $LOGDIR
mv $LOGDIR/$LOGFILE $LOGDIR/$LOGFILE-${d} 2>/dev/null
mv $LOGDIR/timings.log $LOGDIR/timings.log-${d} 2>/dev/null
d1=`date +"%a %d %b %Y %T"`
echo -e "Deletion of the OCI Infrastructure Resources Started on $d1" > $LOGDIR/timings.log
STEPNO=0
print_msg screen "Deleting the DNS Server..."
deleteDNS
print_msg screen "Deleting the Network Load Balancer..."
deleteNetworkLBR
print_msg screen "Deleting the Internal Load Balancer..."
deleteInternalLBR
print_msg screen "Deleting the Public Load Balancer..."
deletePublicLBR
print_msg screen "Deleting the NFS Resources..."
deleteNFS
print_msg screen "Deleting the Web Host Resources..."
deleteWebHosts
print_msg screen "Deleting the Bastion Host Resources..."
deleteBastion
print_msg screen "Deleting the Database..."
deleteDatabase
print_msg screen "Deleting the OKE Cluster..."
deleteOKE
print_msg screen "Deleting the VCN Resources..."
deleteVCN
rm -rf $LOGDIR/progressfile 2>/dev/null
rm -rf $RESOURCE_OCID_FILE 2>/dev/null
rm -rf $LOGDIR/provision_oci* 2>/dev/null
rm -rf $OUTDIR/*_mounts.sh 2>/dev/null
FINISH_TIME=`date +%s`
time_taken=$((FINISH_TIME-START_TIME))
if [[ "$ostype" == "Darwin" ]]; then
total_time=$(gdate -ud "@$time_taken" +' %H hours %M minutes %S seconds')
else
total_time=$(date -ud "@$time_taken" +' %H hours %M minutes %S seconds')
fi
d2=`date +"%a %d %b %Y %T"`
echo -e "Deletion of the OCI Infrastructure Resources Completed in $total_time" >> $LOGDIR/timings.log
echo -e "Deletion of the OCI Infrastructure Resources Completed on $d1" > $LOGDIR/timings.log
print_msg screen "Deletion/clean-up of all the OCI resources that were created with the provision_oci.sh"
print_msg screen "script have been completed. Review the log file at $LOGDIR/$LOGFILE for full details."
print_msg screen "The database and may take up to 1 hour before it has been fully deleted."
| true
|
ff06484f8c088ede5f9f1d9722480a52c36f06a0
|
Shell
|
zalando-incubator/kube-metrics-adapter
|
/hack/update-codegen.sh
|
UTF-8
| 3,138
| 3.078125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
SRC="github.com"
GOPKG="${SRC}/zalando-incubator/kube-metrics-adapter"
CUSTOM_RESOURCE_NAME="zalando.org"
CUSTOM_RESOURCE_VERSION="v1"
SCRIPT_ROOT="$(dirname "${BASH_SOURCE[0]}")/.."
OUTPUT_BASE="$(dirname "${BASH_SOURCE[0]}")/"
# generate the code with:
# --output-base because this script should also be able to run inside the vendor dir of
# k8s.io/kubernetes. The output-base is needed for the generators to output into the vendor dir
# instead of the $GOPATH directly. For normal projects this can be dropped.
OUTPUT_PKG="${GOPKG}/pkg/client"
APIS_PKG="${GOPKG}/pkg/apis"
GROUPS_WITH_VERSIONS="${CUSTOM_RESOURCE_NAME}:${CUSTOM_RESOURCE_VERSION}"
echo "Generating deepcopy funcs"
go run k8s.io/code-generator/cmd/deepcopy-gen \
--input-dirs "${APIS_PKG}/${CUSTOM_RESOURCE_NAME}/${CUSTOM_RESOURCE_VERSION}" \
-O zz_generated.deepcopy \
--bounding-dirs "${APIS_PKG}" \
--go-header-file "${SCRIPT_ROOT}/hack/boilerplate.go.txt" \
--output-base "$OUTPUT_BASE"
echo "Generating clientset for ${GROUPS_WITH_VERSIONS} at ${OUTPUT_PKG}/${CLIENTSET_PKG_NAME:-clientset}"
go run k8s.io/code-generator/cmd/client-gen \
--clientset-name versioned \
--input-base "" \
--input "${APIS_PKG}/${CUSTOM_RESOURCE_NAME}/${CUSTOM_RESOURCE_VERSION}" \
--output-package "${OUTPUT_PKG}/clientset" \
--go-header-file "${SCRIPT_ROOT}/hack/boilerplate.go.txt" \
--output-base "$OUTPUT_BASE"
echo "Generating listers for ${GROUPS_WITH_VERSIONS} at ${OUTPUT_PKG}/listers"
go run k8s.io/code-generator/cmd/lister-gen \
--input-dirs "${APIS_PKG}/${CUSTOM_RESOURCE_NAME}/${CUSTOM_RESOURCE_VERSION}" \
--output-package "${OUTPUT_PKG}/listers" \
--go-header-file "${SCRIPT_ROOT}/hack/boilerplate.go.txt" \
--output-base "$OUTPUT_BASE"
echo "Generating informers for ${GROUPS_WITH_VERSIONS} at ${OUTPUT_PKG}/informers"
go run k8s.io/code-generator/cmd/informer-gen \
--input-dirs "${APIS_PKG}/${CUSTOM_RESOURCE_NAME}/${CUSTOM_RESOURCE_VERSION}" \
--versioned-clientset-package "${OUTPUT_PKG}/${CLIENTSET_PKG_NAME:-clientset}/${CLIENTSET_NAME_VERSIONED:-versioned}" \
--listers-package "${OUTPUT_PKG}/listers" \
--output-package "${OUTPUT_PKG}/informers" \
--go-header-file "${SCRIPT_ROOT}/hack/boilerplate.go.txt" \
--output-base "$OUTPUT_BASE"
# hack to make the generated code work with Go module based projects
cp -r "$OUTPUT_BASE/$GOPKG/pkg/apis" ./pkg
cp -r "$OUTPUT_BASE/$GOPKG/pkg/client" ./pkg
rm -rf "${OUTPUT_BASE:?}${SRC}"
| true
|
438b5f38e8e0b6ded00c16e006e80fec8a7bfddb
|
Shell
|
wuertele/Drupal-Deployer
|
/devbin/test-subtree-revert
|
UTF-8
| 7,030
| 2.890625
| 3
|
[] |
no_license
|
#!/bin/sh -v
rm -rf subproject
mkdir subproject
cd subproject
git init
echo v0 > Changelog
echo v0 > README
git add .
git commit -m "v0"
git tag v0
# add v1 files
echo A > Feature-A-v1
echo B > Feature-B-v1v2
echo C > Feature-C-v1v3
echo D > Feature-D-v1v2v3
# tell README what files to expect in v1
echo v1 > README
ls *v1* >> README
# update Changelog
cat README > Changelog.new
echo "" >> Changelog.new
cat Changelog >> Changelog.new
mv Changelog.new Changelog
# commit this version
git add .
git commit -am "v1"
git tag v1
# remove non-v2 files
ls Feature* | grep -v v2 | xargs rm
# add new v2 files
echo E > Feature-E-v2
echo F > Feature-F-v2v3
echo G > Feature-G-v2v4
echo H > Feature-H-v2v3v4
# tell README what files to expect in v2
echo v2 > README
ls *v2* >> README
# update Changelog
cat README > Changelog.new
echo "" >> Changelog.new
cat Changelog >> Changelog.new
mv Changelog.new Changelog
# commit this version
git add .
git commit -am "v2"
git tag v2
# remove non-v3 files
ls Feature* | grep -v v3 | xargs rm
# add back removed v3 files
echo C > Feature-C-v1v3
# add new v3 files
echo I > Feature-I-v3
echo J > Feature-J-v3v4
# tell README what files to expect in v3
echo v3 > README
ls *v3* >> README
# update Changelog
cat README > Changelog.new
echo "" >> Changelog.new
cat Changelog >> Changelog.new
mv Changelog.new Changelog
# commit this version
git add .
git commit -am "v3"
git tag v3
# remove non-v4 files
ls Feature* | grep -v v4 | xargs rm
# add back removed v4 files
echo G > Feature-G-v2v4
# add new v4 files
echo K > Feature-K-v4
# tell README what files to expect in v4
echo v4 > README
ls *v4* >> README
# update Changelog
cat README > Changelog.new
echo "" >> Changelog.new
cat Changelog >> Changelog.new
mv Changelog.new Changelog
# commit this version
git add .
git commit -am "v4"
git tag v4
# Create superproject
cd ..
rm -rf superproject
mkdir superproject
cd superproject
git init
echo superproject > README
git add .
git commit -m "initial commit"
# Merge subproject at v1 as subtree
git remote add subproject ../subproject
git fetch --tags subproject
git merge -s ours --no-commit v1
git read-tree --prefix=subproject/ -u v1
git commit -m "added subproject at v1"
# Patch (incompatible with v2 or v3) subproject in a branch
git checkout master
git checkout -b patch-v1-subproject-incompatible
echo "superproject patch existing v1v2v3 file" >> subproject/Feature-D-v1v2v3
echo "superproject patch existing v1v2 file" >> subproject/Feature-B-v1v2
echo "superproject patch existing v1 file" >> subproject/Feature-A-v1
echo "superproject patch new file" >> subproject/Feature-L-v1v2v3v4
git add subproject/Feature-L-v1v2v3v4
git commit -am "patching subproject"
# Patch (incompatible with v2 or v3) subproject in a branch
git checkout master
git checkout -b patch-v1-subproject-compatible-v2
echo "superproject patch existing v1v2v3 file" >> subproject/Feature-D-v1v2v3
echo "superproject patch existing v1v2 file" >> subproject/Feature-B-v1v2
echo "superproject patch new file" >> subproject/Feature-M-v1v2v3v4
git add subproject/Feature-M-v1v2v3v4
git commit -am "patching subproject"
# Patch (incompatible with v2 or v3) subproject in a branch
git checkout master
git checkout -b patch-v1-subproject-compatible-v3
echo "superproject patch existing v1v2v3 file" >> subproject/Feature-D-v1v2v3
echo "superproject patch new file" >> subproject/Feature-M-v1v2v3v4
git add subproject/Feature-M-v1v2v3v4
git commit -am "patching subproject"
# Update subproject to v3 on a test branch
git checkout master
git checkout -b update-subproject-to-v3
git merge -s subtree -X subtree=subproject v3
# Make sure no unexpected files remain in superproject
ls Feature* 2>/dev/null; if [ $? -eq 0 ]; then echo Unexpected files in superproject:; ls Feature* ; exit -1; fi
# Make sure no unexpected files remain in subproject
ls subproject/Feature* | grep -v v3 >/dev/null; if [ $? -eq 0 ]; then echo Unexpected files in v3: ; ls subproject/Feature* | grep -v v3 ; exit -1; fi
# Make sure all expected files exist
cd subproject; grep Feature README | xargs ls > /dev/null; if [ $? -ne 0 ]; then echo Couldnt find expected file in v3 ; exit -1; fi
cd ..; echo Upgrade looks good
# Merge patches from patch-v1-subproject-compatible-v3
# git merge patch-v1-subproject-incompatible
# git merge patch-v1-subproject-compatible-v2
git merge patch-v1-subproject-compatible-v3
# Make sure no unexpected files remain in superproject
ls Feature* 2>/dev/null; if [ $? -eq 0 ]; then echo Unexpected files in superproject:; ls Feature* ; exit -1; fi
# Make sure no unexpected files remain in subproject
ls subproject/Feature* | grep -v v3 >/dev/null; if [ $? -eq 0 ]; then echo Unexpected files in v3: ; ls subproject/Feature* | grep -v v3 ; exit -1; fi
# Make sure all expected files exist
cd subproject; grep Feature README | xargs ls > /dev/null; if [ $? -ne 0 ]; then echo Couldnt find expected file in v3 ; exit -1; fi
cd ..; echo Merge looks good
# Patch (compatible with v3 but not v2) subproject in a branch
git checkout -b patch-v3-subproject
echo "superproject patch existing v3 file" >> subproject/Feature-I-v3
echo "superproject patch new file" >> subproject/Feature-O-v1v2v3v4
git add subproject/Feature-O-v1v2v3v4
git commit -am "patching subproject"
# Merge patches from patch-v3-subproject
git checkout update-subproject-to-v3
git merge patch-v3-subproject
# Make sure no unexpected files remain in superproject
ls Feature* 2>/dev/null; if [ $? -eq 0 ]; then echo Unexpected files in superproject:; ls Feature* ; exit -1; fi
# Make sure no unexpected files remain in subproject
ls subproject/Feature* | grep -v v3 >/dev/null; if [ $? -eq 0 ]; then echo Unexpected files in v3: ; ls subproject/Feature* | grep -v v3 ; exit -1; fi
# Make sure all expected files exist
cd subproject; grep Feature README | xargs ls > /dev/null; if [ $? -ne 0 ]; then echo Couldnt find expected file in v3 ; exit -1; fi
cd ..; echo Merge looks good
# Revert subproject to v2 on a test branch
git checkout -b downgrade-subproject-to-v2
#
# works:
# git diff v3 v2 --src-prefix=subproject/ --dst-prefix=subproject/ | patch -p0
#
# doesn't work:
# git revert --no-edit v3
# git merge -s subtree -X subtree=subproject v2
#
# testing:
#git revert --no-edit --strategy subtree -X subtree=subproject v3
git checkout v3
git revert --no-edit HEAD
git tag revert-v3
git checkout -
git merge -s subtree -X subtree=subproject revert-v3
#
# Make sure no unexpected files remain in superproject
ls Feature* 2>/dev/null; if [ $? -eq 0 ]; then echo Unexpected files in superproject:; ls Feature* ; exit -1; fi
# Make sure no unexpected files remain in subproject
ls subproject/Feature* | grep -v v2 >/dev/null; if [ $? -eq 0 ]; then echo Unexpected files in v2: ; ls subproject/Feature* | grep -v v2 ; exit -1; fi
# Make sure all expected files exist
cd subproject; grep Feature README | xargs ls > /dev/null; if [ $? -ne 0 ]; then echo Couldnt find expected file in v2 ; exit -1; fi
cd ..; echo Downgrade looks good
exit 0
| true
|
5d014a5c8b59f12cbbedaf1ce63d5f48792dd3d5
|
Shell
|
Richard-Li-lab-team/Visual-Passway-Reconstraction
|
/src/ROI2DWI.sh
|
UTF-8
| 380
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/bash
RawROI=$1
diff2struct_mrtrix=$2
#ResultRoot=$3
#if [ ! -d $ResultRoot ] ;do
# mkdir -p $ResultRoot
#done
if [ ! "${RawROI#*.}" == "mif" ] ; then
RawROI_mif=${RawROI%%.nii.gz}.mif
mrconvert $RawROI $RawROI_mif
else
RawROI_mif=${RawROI}
fi
ROIUse=${RawROI_mif%%.mif}-DWI.mif
mrtransform $RawROI_mif \
-linear $diff2struct_mrtrix \
-inverse $ROIUse
| true
|
05fb0c2ee68d8d6ab084bb32f1f6b679fe9e1303
|
Shell
|
tahia/SNP_calling_GATK
|
/16-GATK-HaplotypeCallerBSQR.sh
|
UTF-8
| 2,969
| 2.703125
| 3
|
[] |
no_license
|
############################################# Head of all Scripts ####################################
# The following directories and files are expected to run for SNP calling
refDir=/work/02786/taslima/stampede2/dbs/PH #Reference directory where the reference genome file will be
ref=PhalliiHAL_496_v2.0.softmasked.fa # Name of reference genome file
outDir=/scratch/02786/taslima/data/PHNATAcc/Analysis/V7 # output directory. It must be created before running the script
TMP=/scratch/02786/taslima/data/phalli/Temp
CHRFIL=/work/02786/taslima/stampede2/dbs/PH/PhalliiHAL_496_v2.0.chr
# load required module in TACC
ml intel/17.0.4
ml fastx_toolkit
ml bwa
ml picard
ml samtools
ml gatk/3.8.0
LC_ALL=C
############### !!!!!! Make sure you are using the same version of GATK for the total pipe !!!! #####################
########################################## Step 13: RUN GATK to call raw CALL SNP ################################################
# HaplotypeCaller(HC) is better in terms of miscall of heterozygosity compare to UnifiedGenotyper(UG)
if [ -e callgvcf.param ]; then rm callgvcf.param; fi
for f in `ls $outDir/FinalVCF/*_BSQR.bam`
do
BASE=$(basename $f)
NAME=${BASE%_BSQR.bam}
OFIL1="${outDir}/FinalVCF/${NAME}.BSQR.gvcf"
echo "java -jar -Xmx4G /home1/02786/taslima/GenomeAnalysisTK-3.8-1-0-gf15c1c3ef/GenomeAnalysisTK.jar -T HaplotypeCaller \
-I $f -R $refDir/$ref -o $OFIL1 -nct 4 \
-out_mode EMIT_ALL_CONFIDENT_SITES --emitRefConfidence GVCF \
-variant_index_type LINEAR -variant_index_parameter 128000 \
-rf BadCigar --logging_level ERROR -A QualByDepth -A RMSMappingQuality -A FisherStrand \
-A Coverage -A HaplotypeScore -A MappingQualityRankSumTest -A ReadPosRankSumTest -A MappingQualityZero" >>callgvcf.param
done
#--alleles $refDir/$vcf \
# -L $refDir/$intervals \
#--emitRefConfidence GVCF
#Core=`wc -l rawSNPg.param |cut -f1 -d ' '`
#if (( $Core % 12 == 0)); then Node="$(($Core/12))";
# else Node="$((($Core/12)+1))";
#fi
## Change time (-t) and partition (-p) as per your need and in slurm file change your allocation name
#sbatch -J rawsnp --mail-user=taslima@utexas.edu -N $Node -n $Core -p normal -t 48:00:00 slurm.sh rawSNPg.param
split -l 522 --additional-suffix=callgvcf.param callgvcf.param
#
Core=`wc -l xaacallgvcf.param |cut -f1 -d ' '`
if (( $Core % 12 == 0)); then Node="$(($Core/12))";
else Node="$((($Core/12)+1))";
fi
## Change time (-t) and partition (-p) as per your need and in slurm file change your allocation name
sbatch -J gvcf -N $Node -n $Core -p normal -t 48:00:00 --ntasks-per-node=12 slurm.sh xaacallgvcf.param
#
Core=`wc -l xabcallgvcf.param |cut -f1 -d ' '`
if (( $Core % 12 == 0)); then Node="$(($Core/12))";
else Node="$((($Core/12)+1))";
fi
## Change time (-t) and partition (-p) as per your need and in slurm file change your allocation name
sbatch -J gvcf -N $Node -n $Core -p normal -t 48:00:00 --ntasks-per-node=12 slurm.sh xabcallgvcf.param
| true
|
33803d223b4eccf5eb4f1354fe02488fd1180e12
|
Shell
|
hippothewild/dotfiles
|
/bootstrap
|
UTF-8
| 3,556
| 3.40625
| 3
|
[] |
no_license
|
#!/bin/bash
set -ex
printmsg() {
echo -e "\033[1;34m$1\033[0m"
}
# Ask for the administrator password upfront
sudo -v
# Keep-alive: update existing `sudo` time stamp until `.macos` has finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
printmsg "*** Check prerequisite ***"
if test ! "$(which tar)"; then
echo 'Install tar/gz to continue.'
exit 1
fi
if test ! "$(which curl)"; then
echo 'Install curl to continue.'
exit 1
fi
if test ! "$(which git)"; then
echo 'Install git to continue.'
exit 1
fi
DOTFILE_DIR=$(pwd)
printmsg "*** Install HomeBrew ***"
if [[ $(command -v /opt/homebrew/bin/brew) == "" ]]; then
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
else
printmsg "Homebrew is already installed, updating..."
/opt/homebrew/bin/brew update
fi
printmsg "*** Install binaries and applications via Homebrew ***"
export PATH="$PATH:/opt/homebrew/bin"
brew update
brew tap homebrew/bundle
brew bundle --file="$DOTFILE_DIR"/Brewfile
brew cleanup
# Install dockutil from source https://github.com/kcrawford/dockutil/issues/127 has fixed
printmsg "*** Install dockutils from source (https://github.com/kcrawford/dockutil/issues/127) ***"
DOCKUTIL_URL=$(curl --silent "https://api.github.com/repos/kcrawford/dockutil/releases/latest" | jq -r .assets[].browser_download_url | grep pkg)
curl -sL "${DOCKUTIL_URL}" -o /tmp/dockutil.pkg
sudo installer -pkg "/tmp/dockutil.pkg" -target /
rm /tmp/dockutil.pkg
printmsg "*** Set macOS system configurations ***"
sh .macos
printmsg "*** Set git configurations ***"
[ ! -f "$HOME"/.gitconfig ] && ln -nfs "$DOTFILE_DIR"/.gitconfig "$HOME"/.gitconfig
[ ! -f "$HOME"/.gitconfig-vessl ] && ln -nfs "$DOTFILE_DIR"/.gitconfig-vessl "$HOME"/.gitconfig-vessl
printmsg "*** Change default shell to zsh, install oh-my-zsh and set configuration ***"
which zsh | sudo tee -a /etc/shells
chsh -s "$(which zsh)"
if [ -d ~/.oh-my-zsh ]; then
printmsg "oh-my-zsh has already installed"
else
sh -c "$(curl -fsSL https://raw.github.com/ohmyzsh/ohmyzsh/master/tools/install.sh)"
fi
if [ -f "$HOME"/.zshrc ]; then
cp "$HOME"/.zshrc "$HOME"/.zshrc.backup
fi
ln -nfs "$DOTFILE_DIR"/.zshrc "$HOME"/.zshrc
printmsg "*** Install Visual Studio Code extensions ***"
if test "$(which code)"; then
for ext in arcanis.vscode-zipfs \
dbaeumer.vscode-eslint \
eamodio.gitlens \
esbenp.prettier-vscode \
GitHub.copilot \
GitHub.vscode-pull-request-github \
golang.go \
hashicorp.terraform \
James-Yu.latex-workshop \
ms-python.python \
ms-vscode-remote.remote-ssh \
ms-vscode-remote.remote-ssh-edit \
timonwong.shellcheck; do
code --install-extension $ext
done
if [ -f "$HOME"/Library/Application\ Support/Code/User/settings.json ]; then
cp "$HOME"/Library/Application\ Support/Code/User/settings.json "$HOME"/Library/Application\ Support/Code/User/settings.json.backup
fi
ln -nfs "$DOTFILE_DIR"/vscode_settings.json "$HOME"/Library/Application\ Support/Code/User/settings.json
fi
printmsg "*** Copy editor & terminal configurations ***"
defaults write com.googlecode.iterm2.plist PrefsCustomFolder -string "$DOTFILE_DIR/iterm"
defaults write com.googlecode.iterm2.plist LoadPrefsFromCustomFolder -bool true
if [ -f "$HOME"/.vimrc ]; then
cp "$HOME"/.vimrc "$HOME"/.vimrc.backup
fi
ln -nfs "$DOTFILE_DIR"/.vimrc "$HOME"/.vimrc
printmsg "All dotfiles setup completed!\nPlease logout/login to apply some system configurations."
| true
|
dd6bbbd8fc4a7d498fd4890bf0f1b8bba7531f4f
|
Shell
|
EnzoHaegel/MyBash
|
/mybash_install/scripts/script_man/ra
|
UTF-8
| 369
| 2.8125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
if [ -z "$1" ] || [ "$1" == "1" ]; then
# Ecrivez le man à la suite du echo
echo "
PROTOTYPE :
ra()
UTILISATION :
ra
DESCRIPTION :
ra est un raccourci de rebuild all, rebuild tout à neuf front et back sur rc
de poplee talent, peut mettre un peu de temps.
VALEUR DE RETOUR :
Pleins possibles suivant l'erreur
[1/1]
"
fi
| true
|
4a740ad6f8a0370a95962579562ab96284b6cc03
|
Shell
|
octobot-dev/react-boilerplate
|
/scripts/go-script-bash/tests/prompt/prompt-for-input.bats
|
UTF-8
| 1,900
| 3.46875
| 3
|
[
"MIT",
"ISC",
"LicenseRef-scancode-free-unknown"
] |
permissive
|
#! /usr/bin/env bats
load ../environment
setup() {
test_filter
@go.create_test_go_script '. "$_GO_USE_MODULES" "prompt"' \
'declare prompt="$1"' \
'declare default="$2"' \
'declare fail_msg="$3"' \
'declare response="initial value"' \
'declare result' \
'@go.prompt_for_input "response" "$prompt" "$default" "$fail_msg"' \
'result="$?"' \
'printf -- "%s\n" "$response"' \
'exit "$result"'
}
teardown() {
@go.remove_test_go_rootdir
}
@test "$SUITE: error if variable not a valid identifier" {
@go.create_test_go_script '. "$_GO_USE_MODULES" "prompt"' \
'@go.prompt_for_input "invalid;"'
run "$TEST_GO_SCRIPT"
assert_failure
local err_msg='Input prompt response variable name "invalid;" for '
err_msg+='@go.prompt_for_input contains invalid identifier characters at:'
assert_lines_match "^${err_msg}\$" \
"^ $TEST_GO_SCRIPT:[0-9] main$"
}
@test "$SUITE: reads and trims value" {
run "$TEST_GO_SCRIPT" $'What is your quest?\n' <<<' To seek the grail! '
assert_success 'What is your quest?' \
'To seek the grail!'
}
@test "$SUITE: with default preserves prompt space" {
run "$TEST_GO_SCRIPT" $'What is your quest?\n' 'To seek the grail!' <<<''
assert_success 'What is your quest? [default: To seek the grail!]' \
'To seek the grail!'
}
@test "$SUITE: with default adds prompt space if missing" {
run "$TEST_GO_SCRIPT" 'What is your quest?' 'To seek the grail!' <<<''
assert_success \
'What is your quest? [default: To seek the grail!] To seek the grail!'
}
@test "$SUITE: reads empty input if no error message" {
run "$TEST_GO_SCRIPT" $'What is your quest?\n' <<<''
assert_success 'What is your quest?'
}
@test "$SUITE: fails with error message on empty input" {
run "$TEST_GO_SCRIPT" $'What is your quest?\n' '' 'Auuuuuuuugh!' <<<''
assert_failure 'What is your quest?' \
'Auuuuuuuugh!'
}
| true
|
09f827acc46cf8a1ad880f6bf4fc3e6fe9354bd1
|
Shell
|
nhatzHK/wallpapers
|
/display.sh
|
UTF-8
| 902
| 3.734375
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/bash
quit=0
for i in $(ls | grep -sE "*.png|*.jpg"); do
printf "Displaying $i\n\n"
feh -xdpq $i & FEH_PID=$!
next=1
quit=1
until [[ $next == 0 ]] || [[ $quit -eq 0 ]]; do
read -n 1 -p ":>" watcher
printf "\n"
case $watcher in
N|n)
kill $FEH_PID
next=0
;;
Q|q)
kill $FEH_PID
quit=0
break
;;
H|h)
printf "\nPress n to display the next image\n"
printf "Press q to exit the script\n"
printf "Press h to display this help message\n"
;;
*)
printf "\nUnknown option\n"
printf "Press h for help\n"
;;
esac
done
if [[ quit -eq 0 ]]; then
break
fi
done
| true
|
3a74baaa36a8d39985b2e0bf5e888c6b6452aa25
|
Shell
|
Js-Nanodegree/ereb
|
/build-image
|
UTF-8
| 160
| 2.90625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
version=$1
if [[ -z "$version" ]]; then
echo 'Usage '$0' [version]'
exit 1
fi
docker build -t aviasales/ereb:$version -f Dockerfile .
| true
|
8e70fbca50c44bc5a52592be472ae0cf6a7bc990
|
Shell
|
ddv12138/WebApp-Auto-Depolyment
|
/init.sh
|
UTF-8
| 1,204
| 3.234375
| 3
|
[] |
no_license
|
#!/bin/bash
set -e
echo '开始自动部署'
pwd=`pwd`
mysqlname="mysql-server"
tomcatname="tomcat-server"
phpname="php-server"
read -p "请输入mysql容器的名字(默认是${mysqlname}):" mysqlname
read -p "请输入tomcat容器的名字(默认是"${tomcatname}"):" tomcatname
read -p "请输入php容器的名字(默认是"${phpname}"):" phpname
if [ -n "${mysqlname}" ];then
echo $mysqlname
else
mysqlname="mysql-server"
echo $mysqlname
fi
if [ -n "${tomcatname}" ];then
echo $tomcatname123
else
tomcatname="tomcat-server"
echo $tomcatname
fi
if [ -n "${phpname}" ];then
echo $phpname123
else
phpname="php-server"
echo $phpname
fi
echo "当前路径:${PWD}"
echo "启动mysql-server"
docker run -d -p 3306:3306 --name ${mysqlname} -v ${pwd}/mysql-init/mysql-data/:/var/lib/mysql/ mysql:5.7
echo "启动tomcat-server"
docker run -d -p 80:8080 --name ${tomcatname} --link=${mysqlname}:${mysqlname} -v ${pwd}/tomcat-init/webroot/:/usr/local/tomcat/webapps/ROOT ddv12138/tomcat-with-mysql-driver
echo "启动php-server"
docker run -d -p 8888:80 --name ${phpname} -v ${pwd}/php-init/phpmyadmin/:/var/www/html/ ddv12138/php-phpmyadmin:latest
| true
|
a753d318b74844afd24677b2d887cfdf75feeec0
|
Shell
|
kaiana/deprecated
|
/bigsudo/usr/bin/bigterminal
|
UTF-8
| 839
| 3.21875
| 3
|
[] |
no_license
|
#!/bin/bash
#Terminal
#
#Authors:
# Bruno Goncalves Araujo <www.biglinux.com.br>
#
#License: GPLv2 or later
#################################################
#kde-konsole
if [ "$(ps -A | grep plasma-desktop)" != "" ]
then
if [ "$(which konsole)" != "" ];
then
konsole $*
exit 0
fi
fi
#gnome-terminal
if [ "$(which gnome-terminal)" != "" ];
then
gnome-terminal $*
exit 0
fi
#xfce-terminal
if [ "$(which xfterm4)" != "" ];
then
xfterm4 $*
exit 0
fi
#lxde-terminal
if [ "$(which lxterm)" != "" ];
then
lxterm $*
exit 0
fi
#xterm
if [ "$(which xterm)" != "" ];
then
xterm $*
exit 0
fi
#eterm
if [ "$(which eterm)" != "" ];
then
eterm $*
exit 0
fi
#aterm
if [ "$(which aterm)" != "" ];
then
aterm $*
exit 0
fi
| true
|
c7e7ebf878134a971714a6a4c4e44ae45e2bb421
|
Shell
|
opnsense/tools
|
/build/skim.sh
|
UTF-8
| 6,588
| 3.578125
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/sh
# Copyright (c) 2015-2022 Franco Fichtner <franco@opnsense.org>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
set -e
SELF=skim
FROM=FreeBSD
. ./common.sh
if [ -z "${PORTSLIST}" ]; then
PORTSLIST=$(list_config ${CONFIGDIR}/skim.conf ${CONFIGDIR}/aux.conf \
${CONFIGDIR}/ports.conf)
fi
DIFF="$(which colordiff 2> /dev/null || echo cat)"
LESS="less -R"
setup_stage ${STAGEDIR}
git_branch ${PORTSDIR} ${PORTSBRANCH} PORTSBRANCH
git_fetch ${PORTSREFDIR}
git_pull ${PORTSREFDIR} ${PORTSREFBRANCH}
git_reset ${PORTSREFDIR} HEAD
git_reset ${PORTSDIR} HEAD
UNUSED=1
USED=1
for ARG in ${@}; do
case ${ARG} in
unused)
UNUSED=1
USED=
;;
used)
UNUSED=
USED=1
;;
esac
done
sh ./make.conf.sh > ${STAGEDIR}/make.conf
echo "${PORTSLIST}" > ${STAGEDIR}/skim
: > ${STAGEDIR}/used
PORTSCOUNT=$(wc -l ${STAGEDIR}/skim | awk '{ print $1 }')
PORTSNUM=0
echo -n ">>> Gathering dependencies: 0%"
while read PORT_ORIGIN PORT_BROKEN; do
FLAVOR=${PORT_ORIGIN##*@}
PORT=${PORT_ORIGIN%%@*}
MAKE_ARGS="
__MAKE_CONF=${STAGEDIR}/make.conf
PRODUCT_ABI=${PRODUCT_ABI}
"
if [ ${FLAVOR} != ${PORT} ]; then
MAKE_ARGS="${MAKE_ARGS} FLAVOR=${FLAVOR}"
fi
SOURCE=${PORTSDIR}
if [ ! -d ${PORTSDIR}/${PORT} ]; then
SOURCE=${PORTSREFDIR}
fi
${ENV_FILTER} make -C ${SOURCE}/${PORT} \
PORTSDIR=${SOURCE} ${MAKE_ARGS} all-depends-list \
| awk -F"${SOURCE}/" '{print $2}' >> ${STAGEDIR}/used
echo ${PORT} >> ${STAGEDIR}/used
PORTSNUM=$(expr ${PORTSNUM} + 1)
printf "\b\b\b\b%3s%%" \
$(expr \( 100 \* ${PORTSNUM} \) / ${PORTSCOUNT})
done < ${STAGEDIR}/skim
sort -u ${STAGEDIR}/used > ${STAGEDIR}/used.unique
cp ${STAGEDIR}/used.unique ${STAGEDIR}/used
while read PORT; do
SOURCE=${PORTSDIR}
if [ ! -d ${PORTSDIR}/${PORT} ]; then
SOURCE=${PORTSREFDIR}
fi
PORT_MASTER=$(${ENV_FILTER} make -C ${SOURCE}/${PORT} \
-v MASTER_PORT PORTSDIR=${SOURCE} ${MAKE_ARGS})
if [ -n "${PORT_MASTER}" ]; then
echo ${PORT_MASTER} >> ${STAGEDIR}/used
fi
done < ${STAGEDIR}/used.unique
sort -u ${STAGEDIR}/used > ${STAGEDIR}/used.unique
rm ${STAGEDIR}/used
echo
if [ -n "${UNUSED}" ]; then
(cd ${PORTSDIR}; mkdir -p $(make -C ${PORTSREFDIR} -v SUBDIR))
mkdir ${STAGEDIR}/ref
for ENTRY in ${PORTSDIR}/[a-z]*; do
ENTRY=${ENTRY##"${PORTSDIR}/"}
echo ">>> Refreshing ${ENTRY}"
if [ ! -d ${PORTSREFDIR}/${ENTRY} ]; then
cp -R ${PORTSDIR}/${ENTRY} ${STAGEDIR}/ref
continue
fi
cp -R ${PORTSREFDIR}/${ENTRY} ${STAGEDIR}/ref
(grep "^${ENTRY}/" ${STAGEDIR}/used.unique || true) > \
${STAGEDIR}/used.entry
while read PORT; do
rm -fr ${STAGEDIR}/ref/${PORT}
cp -R ${PORTSDIR}/${PORT} \
${STAGEDIR}/ref/$(dirname ${PORT})
done < ${STAGEDIR}/used.entry
rm -rf ${PORTSDIR}/${ENTRY}
mv ${STAGEDIR}/ref/${ENTRY} ${PORTSDIR}
done
(
cd ${PORTSDIR}
git add .
if ! git diff --quiet HEAD; then
git commit -m \
"*/*: sync with upstream
Taken from: ${FROM}"
fi
)
fi
: > ${STAGEDIR}/used.changed
while read PORT; do
if [ ! -d ${PORTSREFDIR}/${PORT} ]; then
continue;
fi
diff -rq ${PORTSDIR}/${PORT} ${PORTSREFDIR}/${PORT} \
> /dev/null && continue
echo ${PORT} >> ${STAGEDIR}/used.changed
done < ${STAGEDIR}/used.unique
if [ -n "${USED}" ]; then
while read PORT; do
clear
(diff -Nru ${PORTSDIR}/${PORT} ${PORTSREFDIR}/${PORT} \
2>/dev/null || true) | ${DIFF} | ${LESS}
echo -n ">>> Replace ${PORT} [c/e/y/N]: "
read YN < /dev/tty
case ${YN} in
[yY])
rm -fr ${PORTSDIR}/${PORT}
cp -a ${PORTSREFDIR}/${PORT} ${PORTSDIR}/${PORT}
;;
[eE])
rm -fr ${PORTSDIR}/${PORT}
cp -a ${PORTSREFDIR}/${PORT} ${PORTSDIR}/${PORT}
(cd ${PORTSDIR}; git checkout -p ${PORT} < /dev/tty)
(cd ${PORTSDIR}; git add ${PORT})
(cd ${PORTSDIR}; if ! git diff --quiet HEAD; then
git commit -m \
"${PORT}: partially sync with upstream
Taken from: ${FROM}"
fi)
;;
[cC])
rm -fr ${PORTSDIR}/${PORT}
cp -a ${PORTSREFDIR}/${PORT} ${PORTSDIR}/${PORT}
(cd ${PORTSDIR}; git add ${PORT})
(cd ${PORTSDIR}; git commit -m \
"${PORT}: sync with upstream
Taken from: ${FROM}")
;;
esac
done < ${STAGEDIR}/used.changed
for ENTRY in ${PORTSREFDIR}/[A-Z]*; do
ENTRY=${ENTRY##"${PORTSREFDIR}/"}
diff -rq ${PORTSDIR}/${ENTRY} ${PORTSREFDIR}/${ENTRY} \
> /dev/null || ENTRIES="${ENTRIES} ${ENTRY}"
done
if [ -n "${ENTRIES}" ]; then
clear
(for ENTRY in ${ENTRIES}; do
diff -Nru ${PORTSDIR}/${ENTRY} ${PORTSREFDIR}/${ENTRY} \
2>/dev/null || true
done) | ${DIFF} | ${LESS}
echo -n ">>> Replace Framework [c/e/y/N]: "
read YN < /dev/tty
case ${YN} in
[yY])
for ENTRY in ${ENTRIES}; do
rm -r ${PORTSDIR}/${ENTRY}
cp -a ${PORTSREFDIR}/${ENTRY} ${PORTSDIR}/
done
;;
[eE])
for ENTRY in ${ENTRIES}; do
rm -r ${PORTSDIR}/${ENTRY}
cp -a ${PORTSREFDIR}/${ENTRY} ${PORTSDIR}/
done
(cd ${PORTSDIR}; git checkout -p ${ENTRIES} < /dev/tty)
(cd ${PORTSDIR}; git add ${ENTRIES})
(cd ${PORTSDIR}; if ! git diff --quiet HEAD; then
git commit -m \
"Framework: partially sync with upstream
Taken from: ${FROM}"
fi)
;;
[cC])
for ENTRY in ${ENTRIES}; do
rm -r ${PORTSDIR}/${ENTRY}
cp -a ${PORTSREFDIR}/${ENTRY} ${PORTSDIR}/
(cd ${PORTSDIR}; git add ${ENTRY})
done
(cd ${PORTSDIR}; git commit -m \
"Framework: sync with upstream
Taken from: ${FROM}")
;;
esac
fi
fi
| true
|
1cd22d96496cae616140e1741e89eea14a2e3981
|
Shell
|
vasyahacker/perfectominer
|
/srv/work/set_hostname.sh
|
UTF-8
| 249
| 2.59375
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
NEW_HOSTNAME=$1
echo $NEW_HOSTNAME > /proc/sys/kernel/hostname
sed -i 's/127.0.1.1.*/127.0.1.1\t'"$NEW_HOSTNAME"'/g' /etc/hosts
echo "$NEW_HOSTNAME" > /etc/hostname
/etc/init.d/hostname.sh
sysctl kernel.hostname="$NEW_HOSTNAME"
| true
|
4a4ea9df536956f1c9e5c9eeac63444b162b6a72
|
Shell
|
kvpb/.files
|
/acos.d/install/Git.sh
|
UTF-8
| 1,393
| 2.8125
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
#DMG_filename="git-latest-osx-installer.dmg"
#DMG_filename_extension="${DMG_filename##*.}"
#DMG_filename_without_extension=$(basename "${DMG_filename}" .dmg)
#DMG_PKG_pathname="/Volumes/Git * Intel Universal/git-*-intel-universal-*.pkg"
#DMG_PKG_filename="${DMG_PKG_pathname##*/}"
mkdir -p ${HOME}/Temporary
cd ${HOME}/Temporary
curl --remote-header-name --location https://sourceforge.net/projects/git-osx-installer/files/latest --output git-latest-osx-installer.dmg
hdiutil mount -nobrowse git-latest-osx-installer.dmg
#DMG_PKG_filename=$(basename "/Volumes/Git\ *\ *\ Intel\ Universal/git-*-intel-universal-*.pkg")
#DMG_PKG_filename_extension="${DMG_PKG_filename##*.}"
#DMG_PKG_filename="${DMG_PKG_filename%.*}"
#DMG_PKG_filename_without_extension=$(basename "/Volumes/Git\ *\ *\ Intel\ Universal/git-*-intel-universal-*.pkg" .pkg)
sudo installer -pkg /Volumes/Git\ *\ Intel\ Universal/git-*-intel-universal-*.pkg -target /
hdiutil unmount /Volumes/Git\ *\ Intel\ Universal
#mv "${DMG_filename}" "${DMG_PKG_filename_without_extension}.${DMG_filename_extension}"
#mv git-*-intel-universal-*.dmg ~/Downloads/
mv git-latest-osx-installer.dmg ~/Downloads/
cd ${HOME}
rmdir Temporary
exit 0
# InstallGit.sh
#
# Karl V. P. B. `kvpb`
# +1 (DDD) DDD-DDDD
# local-part@domain
# https://www.linkedin.com/in/
# https://twitter.com/
# https://github.com/kvpb
# https://vm.tiktok.com//
| true
|
ee72bb05ffeb1b627643b0d2c1998b22cc4af862
|
Shell
|
bucko909/home-bin
|
/dvd_rip.sh
|
UTF-8
| 16,639
| 3.1875
| 3
|
[] |
no_license
|
#!/bin/bash
#RIP_TEMP=/smb/newton/media_rw/media4/temp
RIP_TEMP=/smb/newton/media/peer2peer
ENC_TEMP=/disks/media
FINAL=/disks/media
LOGS=/disks/media
run_long() {
fn=$1
shift
echo "$*"
$* > $LOGS/$fn.out 2> $LOGS/$fn.err
}
run_cmd() {
run_long "$*"
}
BASE=$1
ACTIONS="$2"
if [ -z "$ACTIONS" ]; then
ACTIONS="get_dvd_info find_tracks rip_titles extract_tracks remove_vobs encode_tracks merge_tracks clean"
fi
for ACTION in $ACTIONS; do
case $ACTION in
get_dvd_info)
echo get_dvd_info
mplayer dvd:// -frames 1 -vo null -ao null > $LOGS/$BASE.mplayer_output 2> $LOGS/$BASE.mplayer_stderr
TITLES="$(perl -ne '/^There are (\d+) titles on this DVD\./&&print"$1"' $BASE.mplayer_output)"
echo Titles: $TITLES
rm $FINAL/$BASE.rip_profile
echo "Disc_name = \"\";" >> $FINAL/$BASE.rip_profile
for TITLE in $(seq 1 $TITLES); do
echo Title $TITLE:
echo -n "TITLE_$TITLE: " >> $FINAL/$BASE.rip_profile
echo -n "Series = \"\"; Season = \"\"; " >> $FINAL/$BASE.rip_profile
mplayer dvd://$TITLE -frames 1 -vo null -ao null > $LOGS/$BASE.tit_$TITLE.mplayer_output 2> $LOGS/$BASE.tit_$TITLE.mplayer_stderr
FPS="$(perl -ne '/^VIDEO: MPEG2.*?(\d+\.\d+) fps/&&print"$1"' $BASE.tit_$TITLE.mplayer_output)"
TELECINE=1 # TODO infer
CHAPTERS="$(perl -ne '/^There are (\d+) chapters in this DVD title\./&&print"$1"' $BASE.tit_$TITLE.mplayer_output)"
ANGLES="$(perl -ne '/^There are (\d+) angles in this DVD title\./&&print"$1"' $BASE.tit_$TITLE.mplayer_output)"
AUDIO_CHANNELS="$(perl -ne '/^number of audio channels on disk: (\d+)\./&&print"$1"' $BASE.tit_$TITLE.mplayer_output)"
SUBTITLES="$(perl -ne '/^number of subtitles on disk: (\d+)/&&print"$1"' $BASE.tit_$TITLE.mplayer_output)"
echo -n "Telecine = $TELECINE; " >> $FINAL/$BASE.rip_profile
echo -n "FPS = $FPS; " >> $FINAL/$BASE.rip_profile
echo -n "Chapters = $CHAPTERS; " >> $FINAL/$BASE.rip_profile
if [ "$CHAPTERS" -ge 5 ]; then
echo -n "Type = Feature; " >> $FINAL/$BASE.rip_profile
else
echo -n "Type = Extras; " >> $FINAL/$BASE.rip_profile
fi
if [ "$ANGLES" -ne 1 ]; then echo "Angles = $ANGLES; " >> $FINAL/$BASE.rip_profile; fi
echo " "Chapters: $CHAPTERS
echo " "Audio channels: $AUDIO_CHANNELS
echo -n "Audio = $AUDIO_CHANNELS; " >> $FINAL/$BASE.rip_profile
for AUDIO_CHANNEL in $(seq 0 $(expr $AUDIO_CHANNELS - 1)); do
LANGUAGE="$(perl -ne '/^audio stream: '$AUDIO_CHANNEL' .* language: (\w+)/&&print"$1"' $BASE.tit_$TITLE.mplayer_output)"
FORMAT="$(perl -ne '/^audio stream: '$AUDIO_CHANNEL' format: (\S+)/&&print"$1"' $BASE.tit_$TITLE.mplayer_output)"
AID="$(perl -ne '/^audio stream: '$AUDIO_CHANNEL' .* aid: (\d+)/&&print"$1"' $BASE.tit_$TITLE.mplayer_output)"
if [ "$LANGUAGE" = "ja" -o "$LANGUAGE" = "jp" ]; then
L2=jpn
NAME=Japanese
elif [ "$LANGUAGE" = "en" ]; then
L2=eng
NAME=English
else
L2=$LANGUAGE
NAME=$LANGUAGE
fi
echo -n "Audio_$AUDIO_CHANNEL"_name = \"$NAME\"\;" " >> $FINAL/$BASE.rip_profile
echo -n "Audio_$AUDIO_CHANNEL"_language = $L2\;" " >> $FINAL/$BASE.rip_profile
echo -n "Audio_$AUDIO_CHANNEL"_format = $FORMAT\;" " >> $FINAL/$BASE.rip_profile
echo -n "Audio_$AUDIO_CHANNEL"_aid = $AID\;" " >> $FINAL/$BASE.rip_profile
done
echo -n "Subtitles = $SUBTITLES; " >> $FINAL/$BASE.rip_profile
echo " "Subtitles: $SUBTITLES
for SUBTITLE in $(seq 0 $(expr $SUBTITLES - 1)); do
LANGUAGE="$(perl -ne '/^subtitle \( sid \): '$SUBTITLE' language: (\w+)/&&print"$1"' $BASE.tit_$TITLE.mplayer_output)"
if [ "$LANGUAGE" = "ja" -o "$LANGUAGE" = "jp" ]; then
L2=jpn
NAME=Japanese
elif [ "$LANGUAGE" = "en" ]; then
L2=eng
NAME=English
else
L2=$LANGUAGE
NAME=$LANGUAGE
fi
echo -n "Subtitle_$SUBTITLE"_name = \"$NAME\"\;" " >> $FINAL/$BASE.rip_profile
echo -n "Subtitle_$SUBTITLE"_language = $L2\;" " >> $FINAL/$BASE.rip_profile
done
echo " "Angles: $ANGLES
echo -n "Angles = $ANGLES; " >> $FINAL/$BASE.rip_profile
if [ "$ANGLES" = 1 ]; then
echo -n "Angle_1" = \"Video\"\;" " >> $FINAL/$BASE.rip_profile
else
for ANGLE in $(seq 1 $ANGLES); do
echo -n "Angle_$ANGLE"_name = \"Angle $ANGLE\"\;" " >> $FINAL/$BASE.rip_profile
done
fi
echo >> $FINAL/$BASE.rip_profile
dvdxchap -t $TITLE /dev/dvd > $LOGS/$BASE.tit_$TITLE.chapters.txt
if [ "$CHAPTERS" -ge 5 ]; then
perl -ne 'if(/CHAPTER0*(\d+)=(\d+):(\d+):(\d+).(\d+)/){$pn=$2*3600+$3*60+$4+$5/1000;$l[$1-1]=$pn-$p if$1>1;$p=$pn;$p[$1]="$2:$3:$4.$5";$pl[$1]=$pn;}END{$e=int($pl[$#pl]/1320);for$n(1..$#l){@a=grep{abs($l[$_]-$l[$n])<3}(1..$#l);if($l[$n]<120&&@a>=$e-1){$r[$c++]=$n;last if$c==2}}for(1..$#p){print"CHAPTER_$_: ";if(abs($l[$r[0]+$_-1]-$l[$r[0]])<3){print "Episode = ".++$ep."; Episode_name = \"\"; ";$part=0;}print"Chapter_name = \"";if (abs($l[$r[0]]-$l[$_])<3){print"Opening Credits"}elsif(abs($l[$r[1]]-$l[$_])<3){print"Closing Credits"}else{print"Part ".++$part}print"\"; ";print"Chapter_start = $p[$_]; Chapter_length = ".sprintf("%.3f",$l[$_]).";\n"}}' < $LOGS/$BASE.tit_$TITLE.chapters.txt >> $FINAL/$BASE.rip_profile
else
perl -ne 'if(/CHAPTER0*(\d+)=(\d+):(\d+):(\d+).(\d+)/){$pn=$2*3600+$3*60+$4+$5/1000;$l[$1]=$pn-$p;$p=$pn;$p[$1]="$2:$3:$4.$5";$pl[$1]=$pn;}END{for(1..$#p){print"CHAPTER_$_: Extra = T'$TITLE'C$_; Extra_name = \"\"; ";print"Chapter_name = \"Part ".++$part."\"; ";print"Chapter_start = $p[$_]; Chapter_length = ".sprintf("%.3f",$l[$_]).";\n"}}' < $LOGS/$BASE.tit_$TITLE.chapters.txt >> $FINAL/$BASE.rip_profile
fi
done
;;
rip_titles)
if mount /mnt/cdrom0; then
UM=1
fi
# Now rip the raw VOBs; we won't need the DVD anymore after this.
for TITLE in $(seq 1 $TITLES); do
echo "Ripping title $TITLE:";
echo " Copying IFO file...";
cp /mnt/cdrom0/video_ts/vts_"$TITLE"_0.ifo $RIP_TEMP/$BASE.tit_$TITLE.vts.ifo
echo " Copying VOB stream...";
run_long $BASE.tit_$TITLE.mplayer_rip_vob \
mplayer dvd://$TITLE -dumpstream -dumpfile $RIP_TEMP/$BASE.tit_$TITLE.rawvob.vob
echo
# TODO multi angle here
done
if [ "$UM" = 1 ]; then
umount /mnt/cdrom0
fi
;;
find_tracks)
echo find_tracks
# Add some episode based stats
perl -ne '{
my($status,$name,$title,$chap,$start,$len,$type,$ignore);
sub finish_track {
print $cstatus."_$cname $ctitle $cstart $clen\n" unless $cignore;
undef $cstatus;
}
if(/TITLE_(\d+)/) {
$title = $1;
if (defined $cstatus) {
finish_track();
}
$ctitle = $title;
}
if(/CHAPTER_(\d+)/) { $chap = $1 }
if(/Episode\s*=\s*(\d+)/) { $name = $1; $status = "ep" }
if(/Extra\s*=\s*([^;]+)/) { $name = $1; $status = "ex" }
if(/Type\s*=\s*"([^"]+")/) { $status = $1 }
if(/Ignore\s*=\s*1/) { $ignore = 1 }
if(/Chapter_start\s*=\s*([0-9:.]+)/) { $start = $1 }
if(/Chapter_length\s*=\s*([0-9.]+)/) { $len = $1 }
if (defined $status && ($ctatus ne $status || $name ne $cname)) {
if (defined $cstatus) {
finish_track();
}
$cname = $name;
$cstatus = $status;
$clen = $len;
$cstart = $start;
$cignore = $ignore;
} elsif ($len) {
$clen += $len;
}
}
END {
finish_track() if $cstatus;
}
' $FINAL/$BASE.rip_profile > $ENC_TEMP/$BASE.tracks
;;
extract_tracks)
echo extract_tracks
cat $ENC_TEMP/$BASE.tracks|while read NAME TITLE START LENGTH; do
echo "Track: $NAME:"
echo " Title: $TITLE"
AUDIO_CHANNELS="$(perl -ne '/^TITLE_'$TITLE':.*Audio = (\d+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
SUBTITLES="$(perl -ne '/^TITLE_'$TITLE':.*Subtitles = (\d+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
ANGLES="$(perl -ne '/^TITLE_'$TITLE':.*Angles = (\d+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
TRACK_ONLY="$RIP_TEMP/$BASE.tit_$TITLE.rawvob.vob -ss $START -endpos $LENGTH"
for AUDIO_CHANNEL in $(seq 0 $(expr $AUDIO_CHANNELS - 1)); do
FORMAT="$(perl -ne '/^TITLE_'$TITLE':.*Audio_'$AUDIO_CHANNEL'_format = ([^;]+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
AID="$(perl -ne '/^TITLE_'$TITLE':.*Audio_'$AUDIO_CHANNEL'_aid = ([^;]+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
echo " Extracting audio track $AUDIO_CHANNEL ($AID/$FORMAT)...";
run_long $BASE.$NAME.mencoder_extract_audio-$AUDIO_CHANNEL \
mencoder $TRACK_ONLY -aid $AID -oac copy -of rawaudio -o $ENC_TEMP/$BASE.$NAME.audio-$AUDIO_CHANNEL.$FORMAT -ovc frameno
done
for SUBTITLE in $(seq 0 $(expr $SUBTITLES - 1)); do
echo " Extracting subtitle track $SUBTITLE..."
# tccat -i $BASE.tit_$TITLE.rawvob.vob -L | tcextract -x ps1 -t vob -a $(expr 32 + $SUBTITLE) > $BASE.tit_$TITLE.subs-$SUBTITLE
# subtitle2vobsub -o $BASE.tit_$TITLE.vobsubs-$SUBTITLE -i $BASE.tit_$TITLE.vts.ifo -a $SUBTITLE < $BASE.tit_$TITLE.subs-$SUBTITLE
run_long $BASE.$NAME.mencoder_extract_subtitle-$SUBTITLE \
mencoder $TRACK_ONLY -oac copy -ovc frameno -vobsubout $ENC_TEMP/$BASE.$NAME.subs-$SUBTITLE -vobsuboutindex 0 -sid $SUBTITLE -o /dev/null
done
for ANGLE in $(seq 1 $ANGLES); do
echo " Extracting video angle $ANGLE..."
run_long $BASE.$NAME.mencoder_extract_video-$ANGLE \
mencoder $TRACK_ONLY -oac copy -ovc copy -of rawvideo -o $ENC_TEMP/$BASE.$NAME.video-$ANGLE.mpeg
done
echo
done
;;
remove_vobs)
echo remove_vobs
# We should at this point be done with the VOBs, too.
for TITLE in $(seq 1 $TITLES); do
echo "Removing VOB for title $TITLE."
run_long $BASE.tit_$TITLE.remove_vob \
rm $RIP_TEMP/$BASE.tit_$TITLE.rawvob.vob
done
;;
encode_tracks)
echo encode_tracks
cat $ENC_TEMP/$BASE.tracks|while read NAME TITLE START LENGTH; do
echo "Track $NAME..."
ANGLES="$(perl -ne '/^TITLE_'$TITLE':.*Angles = (\d+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
for ANGLE in $(seq 1 $ANGLES); do
run_long $BASE.$NAME.mencoder_encode_angle_$ANGLE \
mencoder $ENC_TEMP/$BASE.$NAME.video-$ANGLE.mpeg -o $ENC_TEMP/$BASE.$NAME-video-$ANGLE.264 \
-vf pullup,softskip,harddup \
-ofps 24000/1001 -of rawvideo \
-oac copy \
-ovc x264 -x264encopts crf=20:subq=6:bframes=4:8x8dct:frameref=13:partitions=all:b_pyramid:weight_b:threads=auto
# old filters: pullup instead of filmdint
run_long $BASE.$NAME.mp4creator_video_angle_$ANGLE \
mp4creator -c $ENC_TEMP/$BASE.$NAME-video-$ANGLE.264 -rate 23.976 $ENC_TEMP/$BASE.$NAME-video-$ANGLE.mp4
done
done
;;
merge_tracks)
echo merge_tracks
cat $ENC_TEMP/$BASE.tracks|while read NAME TITLE START LENGTH; do
echo "Track $NAME..."
AUDIO_CHANNELS="$(perl -ne '/^TITLE_'$TITLE':.*Audio = (\d+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
SUBTITLES="$(perl -ne '/^TITLE_'$TITLE':.*Subtitles = (\d+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
ANGLES="$(perl -ne '/^TITLE_'$TITLE':.*Angles = (\d+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
case $NAME in
ep_*)
EPNO=${NAME#ep_}
SERIES="$(perl -ne '/^TITLE_'$TITLE':.*Series = "([^"]+)";/&&print"$1"' $FINAL/$BASE.rip_profile)"
SEASON="$(perl -ne '/^TITLE_'$TITLE':.*Season = "([^"]+)";/&&print"$1"' $FINAL/$BASE.rip_profile)"
ETITLE="$(perl -ne '/Episode = '$EPNO';.*Episode_name = "([^"]+)";/&&print"$1"' $FINAL/$BASE.rip_profile)"
FILENAME="$SERIES"
if [ -n "$SEASON" ]; then FILENAME="$FILENAME - $SEASON"; fi
FILENAME="$FILENAME - $EPNO"
if [ -n "$ETITLE" ]; then FILENAME="$FILENAME - $ETITLE"; fi
FILENAME="$FILENAME.mkv"
# Build a chapter file
perl -ne '
BEGIN{
$o="'$START'";
$o=~/(\d+):(\d+):(\d+)\.(\d+)/;
$o = $1 * 3600 + $2 * 60 + $3 + $4 / 1000;
}
$m=0 if/Episode =/;
$m=1 if/Episode = '$EPNO';/;
if ($m) {
/Chapter_start = (\d+):(\d+):(\d+)\.(\d+);/;
$s = $1 * 3600 + $2 * 60 + $3 + $4 / 1000;
/Chapter_length = ([^;]*);/;
$l = $1;
/Chapter_name = "([^"]*)";/;
$n = $1;
$s -= $o;
$sn = sprintf("%02i:%02i:%02i.%03i", $s/3600, ($s/60)%60, $s%60, ($s*1000)%1000);
$cn = sprintf("%02i",++$c);
print "CHAPTER$cn=$sn\n";
print "CHAPTER".$cn."NAME=".($n||"Chapter $cn")."\n";
}
' $FINAL/$BASE.rip_profile > $ENC_TEMP/$BASE.$NAME.chapters.txt
TNAME="$(perl -ne '/Episode = '$EPNO';.*Episode_name = "([^"]+)";/&&print"$1"' $FINAL/$BASE.rip_profile)"
;;
*)
SNAME=${NAME#*_}
DTITLE="$(perl -ne '/Disc_name = "([^"]+)";/&&print"$1"' $FINAL/$BASE.rip_profile)"
ETITLE="$(perl -ne '/Extra = "'$SNAME'";.*Extra_name = "([^"]+)";/&&print"$1"' $FINAL/$BASE.rip_profile)"
FILENAME="$DTITLE"
if [ -n "$ETITLE" ]; then
FILENAME="$FILENAME - $ETITLE"
else
FILENAME="$FILENAME - $NAME"
fi
FILENAME="$FILENAME.mkv"
perl -ne '
BEGIN{
$o="'$START'";
$o=~/(\d+):(\d+):(\d+)\.(\d+)/;
$o = $1 * 3600 + $2 * 60 + $3 + $4 / 1000;
}
$m=0 if/Extra =/;
$m=1 if/Extra = '$SNAME';/;
if ($m) {
/Chapter_start = (\d+):(\d+):(\d+)\.(\d+);/;
$s = $1 * 3600 + $2 * 60 + $3 + $4 / 1000;
/Chapter_length = ([^;]*);/;
$l = $1;
/Chapter_name = "([^"]*)";/;
$n = $1;
$s -= $o;
$sn = sprintf("%02i:%02i:%02i.%03i", $s/3600, ($s/60)%60, $s%60, ($s*1000)%1000);
$cn = sprintf("%02i",++$c);
print "CHAPTER$cn=$sn\n";
print "CHAPTER".$cn."NAME=".($n||"Chapter $cn")."\n";
}
' $FINAL/$BASE.rip_profile > $ENC_TEMP/$BASE.$NAME.chapters.txt
TNAME="$(perl -ne '/^Extra = '$SNAME';.*Episode_name = "([^"]+)"/&&print"$1"' $FINAL/$BASE.rip_profile)"
;;
esac
FILENAME="$(echo "$FILENAME"|tr -d '!')"
unset PARAMS
declare -a PARAMS
for AUDIO_CHANNEL in $(seq 0 $(expr $AUDIO_CHANNELS - 1)); do
FORMAT="$(perl -ne '/^TITLE_'$TITLE':.*Audio_'$AUDIO_CHANNEL'_format = ([^;]+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
LANGUAGE="$(perl -ne '/^TITLE_'$TITLE':.*Audio_'$AUDIO_CHANNEL'_language = ([^;]+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
ANAME="$(perl -ne '/^TITLE_'$TITLE':.*Audio_'$AUDIO_CHANNEL'_name = "([^"]+)";/&&print"$1"' $FINAL/$BASE.rip_profile)"
DEFAULT="$(perl -ne '/^TITLE_'$TITLE':.*Audio_'$AUDIO_CHANNEL'_default = 1;/&&print"$1"' $FINAL/$BASE.rip_profile)"
if [ -n "$DEFAULT" ]; then
PARAMS[${#PARAMS[@]}]="--default-track"
PARAMS[${#PARAMS[@]}]="0"
fi
if [ -n "$LANGUAGE" ]; then
PARAMS[${#PARAMS[@]}]="--language"
PARAMS[${#PARAMS[@]}]="0:$LANGUAGE"
fi
if [ -n "$ANAME" ]; then
PARAMS[${#PARAMS[@]}]="--track-name"
PARAMS[${#PARAMS[@]}]="0:$ANAME"
fi
PARAMS[${#PARAMS[@]}]="$BASE.$NAME.audio-$AUDIO_CHANNEL.$FORMAT"
done
for SUBTITLE in $(seq 0 $(expr $SUBTITLES - 1)); do
LANGUAGE="$(perl -ne '/^TITLE_'$TITLE':.*Subtitle_'$SUBTITLE'_language = ([^;]+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
ANAME="$(perl -ne '/^TITLE_'$TITLE':.*Subtitle_'$SUBTITLE'_name = "([^"]+)";/&&print"$1"' $FINAL/$BASE.rip_profile)"
DEFAULT="$(perl -ne '/^TITLE_'$TITLE':.*Subtitle_'$SUBTITLE'_default = 1;/&&print"$1"' $FINAL/$BASE.rip_profile)"
if [ -n "$DEFAULT" ]; then
PARAMS[${#PARAMS[@]}]="--default-track"
PARAMS[${#PARAMS[@]}]="0"
fi
if [ -n "$LANGUAGE" ]; then
PARAMS[${#PARAMS[@]}]="--language"
PARAMS[${#PARAMS[@]}]="0:$LANGUAGE"
fi
if [ -n "$ANAME" ]; then
PARAMS[${#PARAMS[@]}]="--track-name"
PARAMS[${#PARAMS[@]}]="0:$ANAME"
fi
PARAMS[${#PARAMS[@]}]="$BASE.$NAME.subs-$SUBTITLES.idx"
done
for ANGLE in $(seq 1 $ANGLES); do
LANGUAGE="$(perl -ne '/^TITLE_'$TITLE':.*Angle_'$ANGLE'_language = ([^;]+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
ANAME="$(perl -ne '/^TITLE_'$TITLE':.*Angle_'$ANGLE'_name = "([^"]+)";/&&print"$1"' $FINAL/$BASE.rip_profile)"
ASPECT="$(perl -ne '/^TITLE_'$TITLE':.*Angle_'$ANGLE'_aspect = ([^;]+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
FPS="$(perl -ne '/^TITLE_'$TITLE':.*FPS = ([^;]+);/&&print"$1"' $FINAL/$BASE.rip_profile)"
DEFAULT="$(perl -ne '/^TITLE_'$TITLE':.*Angle_'$ANGLE'_default = 1;/&&print"$1"' $FINAL/$BASE.rip_profile)"
if [ -n "$DEFAULT" ]; then PARAMS[${#PARAMS[@]}]="--default-track"; PARAMS[${#PARAMS[@]}]="1"; fi
if [ -n "$FPS" ]; then
PARAMS[${#PARAMS[@]}]="--timecodes"; PARAMS[${#PARAMS[@]}]="1:$BASE.$NAME.video-$ANGLE.time"
echo "# timecode format v1" > $ENC_TEMP/$BASE.$NAME.video-$ANGLE.time
echo "assume $FPS" >> $ENC_TEMP/$BASE.$NAME.video-$ANGLE.time
fi
if [ -n "$ASPECT" ]; then
PARAMS[${#PARAMS[@]}]="--aspect-ratio"
PARAMS[${#PARAMS[@]}]="1:$ASPECT"
fi
if [ -n "$LANGUAGE" ]; then
PARAMS[${#PARAMS[@]}]="--language"
PARAMS[${#PARAMS[@]}]="1:$LANGUAGE"
fi
if [ -n "$ANAME" ]; then
PARAMS[${#PARAMS[@]}]="--track-name"
PARAMS[${#PARAMS[@]}]="1:$ANAME"
fi
PARAMS[${#PARAMS[@]}]="$BASE.$NAME.video-$ANGLE.mp4"
done
PARAMS[${#PARAMS[@]}]="--chapters"; PARAMS[${#PARAMS[@]}]="/smb/newton/media_rw/media4/temp/sr1.1-chapters.txt"
run_long $BASE.$NAME.mkvmerge \
mkvmerge ${PARAMS[@]} -o $FINAL/"$FILENAME"
done
;;
clean)
;;
esac
done
| true
|
265e279bb4c48c0efaed8edb97aea59dca77c9cd
|
Shell
|
plijnzaad/advanced-git
|
/git-pre-commit-hook.sh
|
UTF-8
| 1,100
| 3.34375
| 3
|
[] |
no_license
|
#!/bin/sh
## simple example of a pre-commit hook. Put this in
## $YOURWORKINGCOPY/.git/hooks/pre-commit. Note that this check script
## itself is not versioned.
readfirst=./env.sh.example
if [ -f $readfirst ] ; then
source $readfirst
fi
## python (simplistic; also check python compileall; PyChecker,
## Pyflakes, Pylint)
for file in $(find . -name '*.py' -print); do
if python -m py_compile $file; then
:
else
status=$?
echo "Problem with $file, exit code $status" >&2
exit $status
fi
done
## perl:
for file in $(find . -name '*.p[lm]' -print); do
if perl -wc $file; then
:
else
status=$?
echo "Problem with $file, exit code $status" >&2
exit $status
fi
done
## R (very simplistic! Better check e.g. http://r-pkgs.had.co.nz/tests.html
## and for packages, use R CMD check)
for file in $(find . -name '*.R' -print); do
if cat $file | R --vanilla > /dev/null ; then
:
else
status=$?
echo "Problem with $file, exit code $status" >&2
exit $status
fi
done
| true
|
db1dd6c2cc372d80b344ec30907192f44d8ab4c5
|
Shell
|
godatadriven/whirl
|
/envs/sftp-mysql-example/whirl.setup.d/01_prepare_sftp.sh
|
UTF-8
| 512
| 2.5625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
echo "===================="
echo "== Configure SFTP =="
echo "===================="
airflow connections add ftp_server \
--conn-type SSH \
--conn-host ftp-server \
--conn-login $SFTP_USER \
--conn-port 22 \
--conn-password $SFTP_PASS
mkdir -p ${HOME}/.ssh
sudo apt-get update
sudo apt-get install -y openssh-client sshpass
ssh-keyscan ftp-server >> ${HOME}/.ssh/known_hosts
| true
|
d5394b4873db56d7c8dec03b41087c4e076390c8
|
Shell
|
davidofug/shell-programming
|
/wife.sh
|
UTF-8
| 324
| 3.015625
| 3
|
[] |
no_license
|
#!/bin/bash
#shebang
echo Hello!
read -p "Hi, enter your name " name
read -p "Enter name 1 " name2
echo "$name2" >> girls.txt
read -p "Enter name 2 " name3
echo "$name3" >> girls.txt
read -p "Enter name 3 " name4
echo "$name4" >> girls.txt
sleep 2
clear
echo "Please wait...."
sleep 2
cat girls.txt
echo "Goodbye $name"
| true
|
add41695d2410a1be73650fdb51847c708465933
|
Shell
|
cchantep/ReactiveMongo
|
/.ci_scripts/retry.sh
|
UTF-8
| 327
| 3.46875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#! /bin/sh
RETRY_COUNT="0"
RETRY_MAX="$1"
shift 1
CMD="$@"
RES="999"
while [ "$RETRY_COUNT" -lt "$RETRY_MAX" ]; do
$CMD
RES="$?"
RETRY_COUNT=`expr $RETRY_COUNT + 1`
if [ "$RES" -eq 0 ]; then
grep -r '<error' */target/test-reports/
grep -r '<failure' */target/test-reports/
exit 0
fi
done
exit $RES
| true
|
178c8dc2a6120e6fa5529389b7c0ea27a2319765
|
Shell
|
The-Vuppukari/EmployeeWage
|
/EmployeeWage.sh
|
UTF-8
| 898
| 3.59375
| 4
|
[] |
no_license
|
#!/bin/bash -x
isPartTime=1;
isFullTime=2;
maxHrsInMonth=100;
totalSalary=0;
empRatePerHr=20;
numWorkingDays=20;
totalEmpHrs=0;
totalWorkingDays=0;
function getWorkingHours(){
case $1 in
$isFullTime)
workingHours=8
;;
$isPartTime)
workingHours=4
;;
*)
workingHours=0
;;
esac
echo $workingHours
}
function calcDailyWage(){
local workingHours=$1
wage=$(($workingHours*$empRatePerHr))
echo $wage
}
while [[ $totalEmpHrs -lt $maxHrsInMonth && $totalWorkingDays -lt $numWorkingDays ]]
do
((totalWorkingDays++))
workingHours="$( getWorkingHours $(( RANDOM % 3 )) )"
totalWorkHours=$(($totalWorkHours + $workingHours ));
empDailyWage[$totalWorkingDays]="$( calcDailyWage $workingHours )"
done
totalSalary="$( calcDailyWage $totalWorkHours )";
echo " Daily Wage " ${empDailyWage[@]}
echo " Days " ${!empDailyWage[@]}
| true
|
1944352f5eaab977e0e66e78587755994ade83ff
|
Shell
|
radix-platform/platform
|
/sources/packages/l/freetype/create-2.6.3-subpixel-patch/create.patch.sh
|
UTF-8
| 393
| 2.578125
| 3
|
[] |
no_license
|
#!/bin/sh
VERSION=2.6.3
tar --files-from=file.list -xjvf ../freetype-$VERSION.tar.bz2
mv freetype-$VERSION freetype-$VERSION-orig
cp -rf ./freetype-$VERSION-new ./freetype-$VERSION
diff -b --unified -Nr freetype-$VERSION-orig freetype-$VERSION > freetype-$VERSION-subpixel.patch
mv freetype-$VERSION-subpixel.patch ../patches
rm -rf ./freetype-$VERSION
rm -rf ./freetype-$VERSION-orig
| true
|
0ab881395c2505df165ccaad204f7a335e03b815
|
Shell
|
stormyordos/STAR
|
/payload.txt
|
UTF-8
| 1,662
| 3.40625
| 3
|
[] |
no_license
|
#!/bin/bash
#
# Title: Optical Exfiltration
# Author: stormyordos from bg-wa's version
# Version: 1.0
# Category: HID
# Target: Windows
# Attackmodes: HID
# Sources: Hak5 2320, https://github.com/bg-wa/QRExtractor
#
# Quick HID only attack to write an HTML/JS file to target machine
# and open a browser, to exfiltrate data Using QR Codes and a video
# recording device.
#
# Optional html params:
# base64: Passing a base64 string to this param will auto-start processing QR Codes.
#
# playback: Passing the string "finish" to this param will auto-play the results,
# when QR codes finish rendering.
#
# Example:
# Ln65: Q STRING firefox "$target_html?playback=finish&base64=my_long_string"
#
# | Attack Stage | Description |
# | ------------------- | ---------------------------------------- |
# | SETUP | Open vi |
# | ATTACK | Writing HTML |
# | FINISH | Browser Ready/Processing |
#
ATTACKMODE HID
LED SETUP
target_html=\~\/index.html
RUN WIN cmd.exe
Q DELAY 1000
Q STRING del "$target_html"
Q ENTER
Q DELAY 500
Q STRING notepad "$target_html"
Q ENTER
Q DELAY 1000
Q ENTER
Q DELAY 1000
LED ATTACK
payload_dir=/root/udisk/payloads/$SWITCH_POSITION
source_html=$payload_dir/index.min.html
while IFS= read data
do
if [ "${data}" = " " ]
then
Q SPACE
else
Q STRING "$data"
fi
done < "$source_html"
Q CTRL S
Q ALT F4
Q STRING exit
Q ENTER
Q DELAY 500
Q WIN R
Q STRING chrome "$target_html"
Q ENTER
LED FINISH
| true
|
967acc3892702152e961b18deda233447f3961f2
|
Shell
|
thePureSky/shell_script
|
/Cases_Linux_command_line_and_shell_scripting_bible/13_test10
|
UTF-8
| 157
| 2.875
| 3
|
[] |
no_license
|
#!/bin/bash
# testing the $# variable
echo "The number of parameters is $#"
params=$#
echo the last parameter is $params
echo the last parameter is ${!#}
| true
|
e8f94e0388a48a0d962b42a3fd3b21ae1bb3e6c2
|
Shell
|
matthiasdiener/numafac
|
/calc_numafactor.sh
|
UTF-8
| 5,663
| 3.859375
| 4
|
[] |
no_license
|
#!/bin/bash
PROGNAME=$(basename $0)
#sysfs
SYSFS_CPU="/sys/devices/system/cpu"
SYSFS_NODE="/sys/devices/system/node"
CPU_POSSIBLE_COUNT=$(ls -d ${SYSFS_CPU}/cpu[0-9]* | wc -l)
NODE_POSSIBLE_COUNT=$(ls -1d ${SYSFS_NODE}/node[0-9]* | wc -l)
rm -rf output
mkdir -p output
if [ 1 -eq 1 ]; then
echo ">>>$PROGNAME: Compiling Stream Benchmark"
cd stream
make
echo ">>>$PROGNAME: Running Stream Benchmark on all cores"
export OMP_NUM_THREADS=${CPU_POSSIBLE_COUNT}
./stream_c.exe >> ../output/streamlocal.minas
echo ">>>$PROGNAME: Running Stream Benchmark on first core"
export OMP_NUM_THREADS=1
numactl --membind=0 --physcpubind=0 ./stream_c.exe >> ../output/streamlocal.minas
echo ">>>$PROGNAME: Running Stream Benchmark on different nodes"
#running Stream for every node on the machine
for ((j=0;j < ${NODE_POSSIBLE_COUNT} ;j++)); do
core=`ls -d /sys/devices/system/node/node$j/cpu[0-9]* | head -1`
core=`basename $core | sed s/cpu//`
for ((i=0;i<${NODE_POSSIBLE_COUNT};i++)); do
echo ">>>$PROGNAME: Running Stream Benchmark between node $i and core $core"
numactl --membind=$i --physcpubind=$core ./stream_c.exe >> ../output/stream.minas
done
done
#get bandwidth and access time local and remote
echo -e "\n#Local Parallel bandwidth" >> ../output/numacost.minas
cat ../output/streamlocal.minas | egrep '(Triad)' | head -1 | awk '{print $2}' >> ../output/numacost.minas
echo -e "\n#Local Sequential bandwidth" >> ../output/numacost.minas
cat ../output/streamlocal.minas | egrep '(Triad)' | head -2 | awk '{print $2}' >> ../output/numacost.minas
cat ../output/stream.minas | egrep '(Triad)' > stream.data
rm ../output/stream.minas
cut -f2 stream.data > tband.data
cut -f3 stream.data > tfn.data
rm stream.data
cut -c6-11 tfn.data > fn.data
cut -c3-11 tband.data > band.data
rm tfn.data tband.data
#computing average bandwidth and nodes bandwidth
echo -e "\n#Remote bandwidth" >> ../output/numacost.minas
awk '{sum+=$0} END { print sum/NR}' band.data >> ../output/numacost.minas
columns=""
for ((i=0;i<${NODE_POSSIBLE_COUNT};i++))
do
columns=$columns"- "
done
cat band.data | paste ${columns} > ../output/bandwidth.minas
rm band.data
#Computing NUMA factor
counter=0
while read n
do
let i=$counter/${NODE_POSSIBLE_COUNT}
let j=$counter%${NODE_POSSIBLE_COUNT}
let counter++
var="tabela_${i}_${j}"
declare $var=$n
done < fn.data
rm fn.data
ite=${NODE_POSSIBLE_COUNT}
for ((i=0;i<$ite;i++))
do
for ((j=0;j<$ite;j++))
do
num="tabela_${i}_${j}"
div="tabela_${i}_${i}"
var="fator_${i}_${j}"
declare $var=`echo scale=4\;${!num} / ${!div} | bc`
echo ${!var} >> tnumafactor.minas
done
done
echo -e ">>>$PROGNAME: NUMA factor calculated using Stream benchmark (bandwidth):"
echo -e "\n#NUMA factor" >> ../output/numacost.minas
awk '{sum+=$0} END { print sum/NR}' tnumafactor.minas |tee -a ../output/numacost.minas
cat tnumafactor.minas | paste ${columns} > ../output/numafactor_stream.minas
rm tnumafactor.minas
cd ..
fi
#####################################################
CACHE_LEVEL=`ls -d ${SYSFS_CPU}/cpu0/cache/index[0-9]* | wc -l`;
let llc=`expr ${CACHE_LEVEL}-1`
LLC_SIZE=`cat ${SYSFS_CPU}/cpu0/cache/index${llc}/size | tr -d 'K'`
if [ ${llc} -eq 2 ]
then
L2_SIZE=${LLC_SIZE}
let L2_SIZE=${L2_SIZE}*${NODE_POSSIBLE_COUNT}
else
L2_SIZE=`cat ${SYSFS_CPU}/cpu0/cache/index2/size | tr -d 'K'`
let LLC_SIZE=${LLC_SIZE}/1024
let LLC_SIZE=${LLC_SIZE}*${NODE_POSSIBLE_COUNT}*2
fi
#Latency for read - REMOTE and LOCAL
echo ">>>$PROGNAME: Compiling lmbench3"
cd lmbench3
make build
folder=`ls bin/`
cd bin/$folder
cp lmbench.a ../../../latencies/
echo ">>>$PROGNAME: Running lmbench3 on local node"
./lat_mem_rd -P 1 -N 1 $LLC_SIZE $L2_SIZE &> tmp.out
echo -e "\n#Local Latency " >> ../../../output/numacost.minas
cat tmp.out | tail -2 | awk '{print $2}' >> ../../../output/numacost.minas
rm tmp.out
####################################################################
#running lat_mem for every node on the machine
echo ">>>$PROGNAME: Running lmbench3 for all nodes"
for ((j=0;j < ${NODE_POSSIBLE_COUNT} ;j++))
do
core=`ls -d /sys/devices/system/node/node$j/cpu[0-9]* | head -1`
core=`basename $core | sed s/cpu//`
for ((i=0;i<${NODE_POSSIBLE_COUNT};i++))
do
echo ">>>$PROGNAME: Running lmbench3 between node $i and core $core"
(numactl --membind=$i --physcpubind=$core ./lat_mem_rd -P 1 -N 1 $LLC_SIZE $L2_SIZE) &> tmp.out
cat tmp.out | tail -2 | awk '{print $2}' >> tlatencies.minas
done
done
sed '/^$/d' < tlatencies.minas > latencies.minas
rm tlatencies.minas tmp.out
####################################################################
#Computing NUMA factor
counter=0
while read n
do
let i=$counter/${NODE_POSSIBLE_COUNT}
let j=$counter%${NODE_POSSIBLE_COUNT}
let counter++
var="tabela_${i}_${j}"
declare $var=$n
done < latencies.minas
rm latencies.minas
ite=${NODE_POSSIBLE_COUNT}
for ((i=0;i<$ite;i++))
do
for ((j=0;j<$ite;j++))
do
num="tabela_${i}_${j}"
div="tabela_${i}_${i}"
var="fator_${i}_${j}"
lat="latency_${i}_${j}"
declare $var=`echo scale=4\;${!num} / ${!div} | bc`
declare $lat=`echo scale=4\;${!num} | bc`
echo ${!var} >> tnumafactor.minas
echo ${!lat} >> tlatency.minas
done
done
echo -e ">>>$PROGNAME: NUMA factor calculated using lmbench3 (latency):"
echo -e "\n#NUMA factor lmbench" >> ../../../output/numacost.minas
awk '{sum+=$0} END { print sum/NR}' tnumafactor.minas |tee -a ../../../output/numacost.minas
cat tnumafactor.minas | paste ${columns} > ../../../output/numafactor_lmbench.minas
rm tnumafactor.minas
cat tlatency.minas | paste ${columns} > ../../../output/latency_lmbench.minas
rm tlatency.minas
| true
|
bbdb6b6201a64cd1279bbbdc370eeb11dec375e5
|
Shell
|
laidbackware/asdf-vmware
|
/tests/run-tests.sh
|
UTF-8
| 1,964
| 3.953125
| 4
|
[] |
no_license
|
#!/bin/bash
# This set of tests must be run in a clean environment
# It can either be run in docker of github actions
. $HOME/.asdf/asdf.sh
[[ -z ${DEBUGX:-} ]] || set -x
set -euo pipefail
sep=" "
[[ -z ${ASDF_LEGACY:-} ]] || sep="-"
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
function test_plugin() {
local plugin_name=$1
local version_command
case $plugin_name in
antctl)
version_command="test_not_possible"
;;
govc)
version_command="version"
;;
imgpkg)
version_command="version"
;;
sonobuoy)
version_command="version"
;;
*)
echo "Product ${file_name} is not currently supported"
exit 1
;;
esac
echo -e "\n#########################################"
echo -e "####### Starting: ${plugin_name}\n"
echo "Adding plugin $plugin_name"
asdf plugin${sep}add $plugin_name https://github.com/laidbackware/asdf-github-release-downloader
echo "Listing $plugin_name"
asdf list${sep}all $plugin_name
if [[ -z ${ASDF_LEGACY:-} ]]; then
echo "Installing $plugin_name"
asdf install $plugin_name latest
else
plugin_version=$(asdf list${sep}all $plugin_name |tail -1)
echo "Installing $plugin_name $plugin_version"
asdf install $plugin_name $plugin_version
fi
installed_version=$(asdf list $plugin_name | xargs)
asdf global $plugin_name $installed_version
if [[ $version_command != "test_not_possible" ]]; then
echo -e "\nChecking $plugin_name is executable"
echo "Running command '$plugin_name $version_command'"
eval "$plugin_name $version_command"
fi
echo -e "\n####### Finished: $plugin_name"
echo -e "#########################################\n"
}
function test_plugins() {
plugin_name=${1:-}
if [ -z "${plugin_name:-}" ]; then
test_plugin antctl
test_plugin govc
test_plugin imgpkg
test_plugin sonobuoy
else
test_plugin $plugin_name
fi
}
test_plugins ${1:-}
| true
|
7c77a89dbc0268f28cf04fe9158dd340837d5d7b
|
Shell
|
yibit/light
|
/tools/sakura
|
UTF-8
| 1,248
| 3.828125
| 4
|
[
"LicenseRef-scancode-other-permissive",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
#!/bin/sh
MYHOME=`pwd`
light="light.sh"
cat > $light <<EOF
#/bin/sh
usage()
{
echo "Usage: "
echo " \$0 <project name> "
echo " "
return 0
}
if test \$# -ne 1; then
usage
exit 1
fi
NAME=\$1
EOF
base=`find $MYHOME -type d |grep -v -E "(.git|_temp)"`
for f in $base; do
if test "x$f" = "x."; then
continue;
fi
xf=`echo $f |awk -F "$MYHOME/" ' { print $2; } '`
echo "mkdir -p \$NAME/$xf" >> $light
done
echo "\n" >> $light
base=`find $MYHOME -type f |grep -v -E "(.git\/|.DS_Store|_temp|\.log|\.png|\.crt|\.csr|\.key|nginx.pid|tags|$light|$0)"`
for f in $base; do
xf=`echo $f |awk -F "$MYHOME/" ' { print $2; } '`
yf=`echo $xf |sed 's/light/\"\$NAME\"/g'`
echo "cat > \$NAME/$yf <<EOF" >> $light
cat $xf >> $light
echo "EOF\n" >> $light
done
cat >> $light <<EOF
UNAME=\`uname -s\`
if test "x\$UNAME" = "xDarwin"; then
find \$NAME -type f |xargs grep -l light |xargs -I {} sed -i '' "s/light/\$NAME/g" {}
else
find \$NAME -type f |xargs grep -l light |xargs -I {} sed -i "s/light/\$NAME/g" {}
fi
git init
EOF
# Makefile
#sed -i '' "s/\\\`/\\\\\`/g" $light
#sed -i '' "s/\\\$/\\\\\$/g" $light
| true
|
c97b3b961eb7bc0907b852626e17e3e1a452b9e2
|
Shell
|
nationminu/tomcat7-script
|
/tomcat7/instance/simple/bin/shutdown.sh
|
UTF-8
| 569
| 3.21875
| 3
|
[] |
no_license
|
#!/bin/sh
BASEDIR=$(dirname "$0")
. $BASEDIR/env.sh
# ------------------------------------
PID=`ps -ef | grep java | grep "=$SERVER_NAME" | awk '{print $2}'`
if [ e$PID == "e" ]
then
echo "JBOSS($SERVER_NAME) is not RUNNING..."
exit;
fi
# ------------------------------------
UNAME=`id -u -n`
if [ e$UNAME != "e$SERVER_USER" ]
then
echo "$SERVER_USER USER to shutdown $SERVER_NAME Server ..."
exit;
fi
# ------------------------------------
unset JAVA_OPTS
export JAVA_OPTS="-Dshutdown.bind.port=$SHUTDOWN_PORT"
$CATALINA_HOME/bin/catalina.sh stop
| true
|
63153237addb32128c19d5f50be256b927a26e0f
|
Shell
|
recap/MicroInfrastructure
|
/ContainerAdaptors/lofar-stage/scripts/install-lofar-lta.sh
|
UTF-8
| 969
| 3.34375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
INSTANTCLIENT="/etc/ld.so.conf.d/oracle-instantclient.conf"
LIBRARY="lofar_lta-2.7.1"
# Install dependencies (libaio)
apt-get -qq update && apt-get -qq install -y libaio1 && rm -rf /var/lib/apt/lists/*
# Download and extract
curl http://www.astro-wise.org/losoft/$LIBRARY.tar.gz | tar xz && cd $LIBRARY
# Run installers
python3 setup.py --quiet install
python3 setup.py --quiet install_oracle
# LOFAR configuration
echo "[global]
; Database
database_user : AWWORLD
database_password : WORLD
database_engine : oracle_10g
database_name : db.lofar.target.rug.nl
; Server
data_server : ds.lofar.target.astro-wise.org
data_port : 8002
" > "$HOME/.awe/Environment.cfg"
# InstantClient configuration
if [ -d "/usr/lib/instantclient_11_2" ] ; then
sh -c "echo /usr/lib/instantclient_11_2 > $INSTANTCLIENT" && ldconfig
else
sh -c "echo /usr/local/lib/instantclient_11_2 > $INSTANTCLIENT" && ldconfig
fi
exit 0
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.