blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
ec754d866d48a99a403cd44c1a7e056b9ec83089 | Shell | retrohacker/pi-picture-frame | /downloadPictures.sh | UTF-8 | 884 | 4.03125 | 4 | [] | no_license | #! /usr/bin/env bash
URL=https://pictures.blankenship.io
DEVICE=barbara
function cleanup() {
rm -rf "${INDEX}"
}
trap cleanup EXIT
# Download the index of available pictures
INDEX="$(mktemp /home/pi/picturesIndex-XXXX)"
wget --output-document "${INDEX}" --header "Referer: ${DEVICE}.blankenship.io" "${URL}/index.csv"
# Don't do anything if we couldn't get an index from the server
if [ ! -s "${INDEX}" ]
then
echo "Failed to download index"
exit 1
fi
# Iterate through the index and download each image
OLDIFS="$IFS"
IFS=$'\n'
for FILE in $(cat "${INDEX}")
do
wget --no-clobber --directory-prefix /home/pi/pictures --header "Referer: ${DEVICE}.blankenship.io" "${URL}/${FILE}"
done
IFS="${OLDIFS}"
for FILE in /home/pi/pictures/*
do
FILENAME="$(basename "${FILE}")"
if ! grep --silent "^${FILENAME}$" "${INDEX}"
then
echo "Removing ${FILE}"
rm "${FILE}"
fi
done
| true |
7e659c1a1c4f0bb034c6c8c6c9998957b2332d86 | Shell | chipster/chipster-tools | /setup/bundles/builds/aligners.bash | UTF-8 | 4,419 | 3.171875 | 3 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | #!/bin/bash
set -euo pipefail
IFS=$'\n\t'
source $(dirname "$0")/build-env.bash
image="comp-20.04-r-deps"
BUNDLE_COLLECTION_VERSION=""
function finish {
bash $BUNDLE_SCRIPTS_DIR/clean-up.bash $JOB_NAME $BUILD_NUMBER
}
trap finish EXIT
bash $BUNDLE_SCRIPTS_DIR/start-pod.bash $JOB_NAME $BUILD_NUMBER $image \"$BUNDLE_COLLECTION_VERSION\"
bash $BUNDLE_SCRIPTS_DIR/run-in-pod.bash $JOB_NAME $BUILD_NUMBER root - <<EOF
apt-get install -q -y unzip make build-essential libz-dev gcc zlib1g-dev
EOF
bash $BUNDLE_SCRIPTS_DIR/run-in-pod.bash $JOB_NAME $BUILD_NUMBER ubuntu - <<EOF
# Hisat2
# current working directory is a temp dir
wget https://cloud.biohpc.swmed.edu/index.php/s/oTtGWbWjaxsQ2Ho/download
unzip download
rm download
cd hisat2-2.2.1
# replace 'python' with 'python3' for example in hisat-build
for f in \$(find . -type f); do sed -i 's_^#!/usr/bin/env python\$_#!/usr/bin/env python3_' \$f; done
cd ..
mv hisat2-2.2.1 $TOOLS_PATH/
cd $TOOLS_PATH
ln -s hisat2-2.2.1 hisat2
# samtools (used by the Hisat2 wrapper and many other tools)
cd $TMPDIR_PATH
wget https://github.com/samtools/samtools/releases/download/1.15.1/samtools-1.15.1.tar.bz2
tar -xvf samtools-1.15.1.tar.bz2
rm samtools-1.15.1.tar.bz2
cd samtools-1.15.1/
./configure --prefix=/opt/chipster/tools/samtools-1.15.1
make
make install
cd $TOOLS_PATH
ln -s samtools-1.15.1 samtools
# STAR
cd $TMPDIR_PATH
wget https://github.com/alexdobin/STAR/archive/2.7.10a.tar.gz
tar -xzf 2.7.10a.tar.gz
rm 2.7.10a.tar.gz
cd STAR-2.7.10a/source
make STAR
cd ../..
mv STAR-2.7.10a STAR-2.7.10a_build
mkdir STAR-2.7.10a
mv STAR-2.7.10a_build/bin/Linux_x86_64_static/STAR STAR-2.7.10a/
rm -rf STAR-2.7.10a_build
mv STAR-2.7.10a $TOOLS_PATH/
cd $TOOLS_PATH
ln -s STAR-2.7.10a STAR
# Bowtie2
cd $TMPDIR_PATH
wget https://sourceforge.net/projects/bowtie-bio/files/bowtie2/2.4.5/bowtie2-2.4.5-linux-x86_64.zip/download
unzip download
rm download
mv bowtie2-2.4.5-linux-x86_64 $TOOLS_PATH/
cd $TOOLS_PATH
ln -s bowtie2-2.4.5-linux-x86_64 bowtie2
# Tophat 2, The Artistic License
cd ${TMPDIR_PATH}
wget -O tophat-2.1.1.Linux_x86_64.tar.gz http://ccb.jhu.edu/software/tophat/downloads/tophat-2.1.1.Linux_x86_64.tar.gz
tar -xf tophat-2.1.1.Linux_x86_64.tar.gz -C ${TOOLS_PATH}/
rm -f tophat-2.1.1.Linux_x86_64.tar.gz
ln -s tophat-2.1.1.Linux_x86_64 ${TOOLS_PATH}/tophat2
ls -lah $TOOLS_PATH
# Bowtie
cd $TMPDIR_PATH
wget https://sourceforge.net/projects/bowtie-bio/files/bowtie/1.3.1/bowtie-1.3.1-linux-x86_64.zip/download
unzip download
rm download
mv bowtie-1.3.1-linux-x86_64 $TOOLS_PATH
cd $TOOLS_PATH
ln -s bowtie-1.3.1-linux-x86_64 bowtie
# remove example index
rm ${TOOLS_PATH}/bowtie/indexes/e_coli.*
# BWA
cd $TMPDIR_PATH
wget https://sourceforge.net/projects/bio-bwa/files/bwa-0.7.17.tar.bz2/download
tar -xvf download
rm download
cd bwa-0.7.17/
make
cd ..
mv bwa-0.7.17 $TOOLS_PATH/
cd $TOOLS_PATH
ln -s bwa-0.7.17 bwa
EOF
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/hisat2-2.2.1 $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/hisat2 $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/STAR-2.7.10a $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/STAR $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/bowtie2-2.4.5-linux-x86_64 $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/bowtie2 $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/tophat-2.1.1.Linux_x86_64 $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/tophat2 $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/bowtie-1.3.1-linux-x86_64 $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/bowtie $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/bwa-0.7.17 $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/bwa $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/samtools-1.15.1 $JOB_NAME $BUILD_NUMBER
bash $BUNDLE_SCRIPTS_DIR/move-to-artefacts.bash $TOOLS_PATH/samtools $JOB_NAME $BUILD_NUMBER
| true |
2a116b30b881d43578dde05055879938b975323b | Shell | fogcitymarathoner/linkedin | /vidality_resume_service/convert2txt.sh | UTF-8 | 89 | 2.578125 | 3 | [] | no_license | #!/bin/bash
for file in *.pdf
do
b=`basename $file`
pdf2txt.py $file > $b.txt
done
| true |
a0f9d80d9bf831e12ea83d6077ce39414a2ca8ef | Shell | nxmatic/tod | /dbgrid/scripts/deploy.sh | UTF-8 | 284 | 2.53125 | 3 | [] | no_license | #! /bin/sh
deploy()
{
HOST=$1
DIR=$2
rsync -avz -e ssh\
--rsync-path '/usr/bin/rsync \
--server --daemon --config=$HOME/rsync.conf .'\
--exclude ".svn/"\
$DIR\
gpothier@$HOST::tod
}
#deploy syntagma.dim.uchile.cl "$*" || exit 10
deploy dichato "$*" || exit 11
exit 0 | true |
5ee6ba9aa62d7afd243f1d71e14c356586a5e3fe | Shell | Wissmo/9Hits-AutoInstall | /manage.sh | UTF-8 | 3,750 | 3.78125 | 4 | [] | no_license | #!/bin/bash
cd /root || exit
green=$(tput setaf 2)
reset=$(tput sgr0)
if [[ $EUID -ne 0 ]]; then
whiptail --title "ERROR" --msgbox "This script must be run as root" 8 78
exit
else
os=$(whiptail --title "What do you want to do?" --menu "Choose an option" 16 100 9 \
"1)" "Start" \
"2)" "Stop" \
"3)" "Modify session amount" \
"4)" "Change session token" \
"5)" "Remove" 3>&2 2>&1 1>&3
)
case $os in
"1)")
crontab crontab
echo "${green}Sessions has been started${reset}"
;;
"2)")
crontab -r
/root/kill.sh
echo "${green}Sessions has been terminated${reset}"
;;
"3)")
option=$(whiptail --title "How many sessions you want?" --menu "Choose an option" 16 100 9 \
"1)" "Use one session" \
"2)" "Automatic max session based on system specs" \
"3)" "Use custom number" 3>&2 2>&1 1>&3
)
case $option in
"1)")
number=1
echo "${green}Amount of $number session has been set${reset}"
;;
"2)")
cores=$(nproc --all)
memphy=$(grep MemTotal /proc/meminfo | awk '{print $2}')
memswap=$(grep SwapTotal /proc/meminfo | awk '{print $2}')
let memtotal=$memphy+$memswap
let memtotalgb=$memtotal/100000
let sscorelimit=$cores*6
let ssmemlimit=$memtotalgb*6/10
if [[ $sscorelimit -le $ssmemlimit ]]
then
number=$sscorelimit
echo "${green}Amount of $number sessions has been set${reset}"
else
number=$ssmemlimit
echo "${green}Amount of $number sessions has been set${reset}"
fi
;;
"3)")
export NEWT_COLORS='
window=,red
border=white,red
textbox=white,red
button=black,white
'
whiptail --title "WARNING" --msgbox "IF YOU SET EXCESIVE AMOUNT OF SESSIONS, SESSIONS MAY BE BLOCKED || RECOMMENDED USE A AUTOMATIC SESSION" 8 78
export NEWT_COLORS='
window=,white
border=black,white
textbox=black,white
button=black,white
'
number=$(whiptail --inputbox "ENTER NUMBER OF SESSIONS" 8 78 --title "SESSIONS" 3>&1 1>&2 2>&3)
numberstatus=$?
if [ $numberstatus = 0 ]; then
echo "${green}Selected amount of $number sessions has been set${reset}"
else
echo "User selected Cancel"
exit
fi
;;
esac
isproxy=false
for i in $(seq 1 "$number");
do
file="/root/9HitsViewer_x64/sessions/156288217488$i.txt"
cat > "$file" <<EOFSS
{
"token": "$token",
"note": "",
"proxyType": "system",
"proxyServer": "",
"proxyUser": "",
"proxyPw": "",
"maxCpu": 10,
"isUse9HitsProxy": $isproxy
}
EOFSS
isproxy=true
proxytype=ssh
done
;;
"4)")
rm /root/9HitsViewer_x64/sessions/156288217488*
token=$(whiptail --inputbox "Enter your TOKEN" 8 78 --title "TOKEN" 3>&1 1>&2 2>&3)
tokenstatus=$?
if [ $tokenstatus = 0 ]; then
echo "${green}Token has been updated to $token${reset}"
else
echo "User selected cancel"
exit
fi
;;
"5)")
crontab -r
/root/kill.sh
rm -R 9Hits-AutoInstall 9HitsViewer_x64 9hviewer-linux-x64.tar.bz2 crashdetect.sh crontab install.sh kill.sh manage.sh reboot.sh
echo "${green}All files have been deleted${reset}"
;;
esac
fi
| true |
a55a8ae5c5cb43d983b08029aa746bcef52897b5 | Shell | microsoft/ContextualSP | /logigan/corpus_construction/mlm_corpus/filter.sh | UTF-8 | 1,363 | 2.875 | 3 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-unknown-license-reference",
"LGPL-2.1-or-later",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause"
] | permissive | indicator_type=$1
tmp_dir=./filter_${indicator_type}
if [ -d ${tmp_dir} ]
then
rm -r ${tmp_dir}
fi
mkdir ${tmp_dir}
if [ ${indicator_type} == premise ]
then
python filter.py --start_index 0 --end_index 500000 --indicator_type premise &
python filter.py --start_index 500000 --end_index 1000000 --indicator_type premise &
python filter.py --start_index 1000000 --end_index 1500000 --indicator_type premise
# python filter.py --start_index 0 --end_index 50 --indicator_type premise &
# python filter.py --start_index 50 --end_index 100 --indicator_type premise &
# python filter.py --start_index 150 --end_index 200 --indicator_type premise
fi
if [ ${indicator_type} == conclusion ]
then
python filter.py --start_index 0 --end_index 500000 --indicator_type conclusion &
python filter.py --start_index 500000 --end_index 1000000 --indicator_type conclusion &
python filter.py --start_index 1000000 --end_index 1500000 --indicator_type conclusion
# python filter.py --start_index 0 --end_index 50 --indicator_type conclusion &
# python filter.py --start_index 50 --end_index 100 --indicator_type conclusion &
# python filter.py --start_index 150 --end_index 200 --indicator_type conclusion
fi
wait
cat ${tmp_dir}/*.jsonl > ./filter_${indicator_type}.jsonl
rm -r ${tmp_dir}
| true |
d3c86a46cb286069837514a96594f333b3d0bc90 | Shell | Jooho/dotfiles_fedora | /install.sh | UTF-8 | 912 | 3.390625 | 3 | [] | no_license | #!/bin/bash
echo "Installing dotfiles"
echo "Initializing submodule(s)"
git submodule update --init --recursive
source install/link.sh
if [ "$(uname)" == "Darwin" ]; then
echo "Running on OSX"
echo "Brewing all the things"
source install/brew.sh
echo "Updating OSX settings"
source installosx.sh
# echo "Installing node (from nvm)"
# source install/nvm.sh
elif [ "$(uname)" == "Linux" ]; then
if [ "$(uname -a|grep fc)" ]; then
echo "Running on Fedora"
echo "dnf install all the things"
source install/fedora.sh
# echo "Updating Fedora settings"
# source install_fedora.sh
echo "Installing node (from nvm)"
source install/nvm_fedora.sh
fi
fi
echo "creating vim directories"
mkdir -p ~/.vim-tmp
cp -R vim/vim.symlink/* ~/.vim/
echo "Configuring zsh as default shell"
chsh -s $(which zsh)
echo "Done."
| true |
9b32f1546a591b1c8247c9169f3bd0c7d9827ca0 | Shell | zouchao2010/litesight-xx | /run.sh | UTF-8 | 557 | 2.671875 | 3 | [] | no_license | #!/usr/bin/env bash
if [[ $TESTNET -eq 1 ]];
then
echo testnet
litecoind -rpcuser=liteuser -rpcpassword=litepass -server=1 -daemon=1 -txindex=1 -listen=1 -testnet
sleep 10
INSIGHT_PORT=4001 INSIGHT_NETWORK=testnet npm start | tee -a testnet.log
else
echo livenet
litecoind -rpcuser=liteuser -rpcpassword=litepass -server=1 -daemon=1 -txindex=1 -listen=1
sleep 10
INSIGHT_PORT=4001 INSIGHT_NETWORK=livenet npm start | tee -a livenet.log
fi
#ps -ef|grep litecoind|grep -v grep |awk '{print $2}'|grep -v PID|xargs kill -9
| true |
fa16b3cad3f72db649998527112b2872579c511d | Shell | rsingh2083/scanner | /docker/build-all-base.sh | UTF-8 | 1,915 | 3.625 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
set -e
NO_CACHE=false
CORES=$(nproc)
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
for dir in $DIR/*/
do
base=`basename ${dir%*/}`
cp $DIR/../deps.sh $dir/deps.sh
cp $DIR/../deps_openvino.sh $dir/deps_openvino.sh
rm -rf $dir/thirdparty
mkdir -p $dir/thirdparty
cp -r $DIR/../thirdparty/resources $dir/thirdparty/
function build {
local TYPE=$1
local TAG=$2
local BASE_TAG=$3
docker build \
--pull \
--build-arg cores=$CORES \
--build-arg base_tag=$BASE_TAG \
--no-cache=$NO_CACHE \
-t scannerresearch/scanner-base:$TAG \
-f $dir/Dockerfile.$TYPE \
$dir 2>&1 > ${TAG}-output.log \
&& rm ${TAG}-output.log \
|| { echo "Building $TAG failed! Check $TAG-output.log."; }
}
function build_chain {
local TYPE=$1
local TAG=$2
local BASE_TAG=$3
build base "$TAG-base" $BASE_TAG && \
push "$TAG-base" && \
build $TYPE $TAG "scannerresearch/scanner-base:$TAG-base"
}
function push {
docker push scannerresearch/scanner-base:$1
}
function build_push_gpu {
local CUDA_MAJOR_VERSION=$1
local CUDA_VERSION=$2
local CUDNN_VERSION=$3
local BASE_TAG=nvidia/cuda:${CUDA_VERSION}-${CUDNN_VERSION}-devel-ubuntu16.04
local TAG=$base-gpu-$CUDA_VERSION-$CUDNN_VERSION
build_chain gpu${CUDA_MAJOR_VERSION} $TAG $BASE_TAG && \
push $TAG
}
base_tag=scannerresearch/scanner-base:$base
# Build cpu with ubuntu:16.04
build_chain cpu $base-cpu ubuntu:16.04 &
# GPU
build_push_gpu 9 9.0 cudnn7 &
build_push_gpu 9 9.1 cudnn7 &
build_push_gpu 10 10.0 cudnn7 &
build_push_gpu 10 10.1 cudnn7 &
wait $(jobs -p)
push $base-cpu
done
| true |
0867337cc2722984d949143d6af282752b0638d0 | Shell | doyke/avare | /extra/backend-scripts/parseweather/xmlweather.sh | UTF-8 | 1,049 | 3.625 | 4 | [
"BSD-2-Clause"
] | permissive | #!/bin/bash
# get the list of files
OUTDIR="./xml"
HOST="https://aviationweather.gov/adds/dataserver_current/current"
DESIGNATORS="aircraftreports metars airsigmets tafs"
WEATHERFILE="./output/weather"
WEATHERZIP="./weather.zip"
# Lets get the first 15 files on the list
mkdir $OUTDIR
for f in $DESIGNATORS; do
HOSTDESIG="${HOST}/${f}.cache.xml"
DESTFILENAME="$OUTDIR/${f}.xml"
echo "Getting $f from $HOSTDESIG"
echo "Writing output $DESTFILENAME"
wget -q -O $DESTFILENAME $HOSTDESIG
# cat $f >> $OUTFILE
done
# get upper level winds
wget -q -O winds.html https://aviationweather.gov/windtemp/data?level=l\&fcst=06\®ion=all\&layout=off
awk '/raw data begins here/ {p=1}; p; /raw data ends here/ {p=0}' winds.html > $OUTDIR/winds.txt
./parseweather
FILEDATE=$(date -u +%M_%d_%Y_%H:%M_UTC)
echo $FILEDATE > $WEATHERFILE
echo "weather.db" >> $WEATHERFILE
echo "latest_fcat.png" >> $WEATHERFILE
echo "latest_fcat.txt" >> $WEATHERFILE
echo "Preparing zip file"
rm -f $WEATHERZIP
zip $WEATHERZIP output/*
echo "Complete"
| true |
8b6d6f470432532f33f9850a713589fa1cabcbf9 | Shell | Harry-repo/harrys2 | /loops.sh | UTF-8 | 210 | 2.796875 | 3 | [] | no_license | #!/bin/bash
for var in 1 2 3 4 5
do
for var2 in 0 5 6
do
if [ $var == 1 -a $var2 == 5 ];then
break 2
else
echo " $var and $var2"
fi
done
done
echo "-----eos----"
| true |
85c4dfce83776c630d10aa6b4bfbe96dfad20253 | Shell | JaidenCook/300-MHz-Pipeline-Scripts | /selfcal.sh | UTF-8 | 8,139 | 3.671875 | 4 | [] | no_license | #!/bin/bash
start_time=`date +%s`
# Set default values for optional parameters
mwapath="/home/jaidencook/.local/lib/python2.7/site-packages/mwa_pb/data/"
threshold=0.95
flag=yes
pipecond=no
selfcal=yes
# Read the options
TEMP=`getopt -o a:b:c:d:e: --long input_dir:,output_dir:,obsid_list:,chan:,pipecond: -- "$@"`
eval set -- "$TEMP"
# Extract options and their arguments into variables
while true ; do
case "$1" in
-a|--input_dir) # input directory (required argument)
case "$2" in
"") shift 2 ;;
*) input_dir=$2 ; shift 2 ;;
esac ;;
-b|--output_dir) # output directory (required argument)
case "$2" in
"") shift 2 ;;
*) output_dir=$2 ; shift 2 ;;
esac ;;
-c|--obsid_list) # obsID list (required argument)
case "$2" in
"") shift 2 ;;
*) obsid_list=$2 ; shift 2 ;;
esac ;;
-d|--chan) # channel (required argument); set to 69, 93, 121, 145 or 169
case "$2" in
"") shift 2 ;;
*) chan=$2 ; shift 2 ;;
esac ;;
-e|--pipecond) # channel (required argument); set to 69, 93, 121, 145 or 169
case "$2" in
"") shift 2 ;;
*) pipecond=$2 ; shift 2 ;;
esac ;;
-f|--selfcal) # channel (required argument); set to 69, 93, 121, 145 or 169
case "$2" in
"") shift 2 ;;
*) selfcal=$2 ; shift 2 ;;
esac ;;
--) shift ; break ;;
*) echo "Internal error!" ; exit 1 ;;
esac
done
# Check required arguments have been specified
if [ -z "$input_dir" ]; then
echo "Error: input_dir not specified. Aborting."
exit 1
elif [ -z "$output_dir" ]; then
echo "Error: output_dir not specified. Aborting."
exit 1
elif [ -z "$obsid_list" ]; then
echo "Error: obsid_list not specified. Aborting."
exit 1
elif [ -z "$chan" ]; then
echo "Error: chan not specified. Aborting."
exit 1
fi
# Check chan parameter
if [ $chan != "69" ] && [ $chan != "93" ] && [ $chan != "121" ] && [ $chan != "145" ] && [ $chan != "169" ] && [ $chan != "236" ] && [ $chan != "237" ]; then
echo "Error: chan should be set to 69, 93, 121, 145 or 169. Aborting."
exit 1
fi
# Getting the OBSID.
if [ $pipecond == "yes" ]
then
# In this case the input is a string not a file.
obsid=$(echo $obsid_list | awk '{print $1}')
elif [ $pipecond == "no" ]
then
# In this case the input is a file not a string.
obsid=`awk '{print $1}' $obsid_list`
else
echo "pipe condition not specified, exiting program."
exit 1
fi
# Create output directory
if [ ! -e $output_dir ]; then
mkdir $output_dir
fi
# Create snapshot directory
if [ -e $output_dir/$obsid ]; then
rm -rf $output_dir/$obsid
fi
mkdir $output_dir/$obsid
cd $output_dir/$obsid
echo "========================================================================"
echo "Coping over ${obsid}.* from directory ${input_data}/${obsid}"
echo "========================================================================"
# Copy measurement set, metafits file and sky model to output directory
if [ -e $input_dir/$obsid/$obsid.ms ] && [ -e $input_dir/$obsid/$obsid.metafits ]; then
cd $input_dir/$obsid/
cd $output_dir/$obsid
cp -r $input_dir/$obsid/$obsid.ms $input_dir/$obsid/$obsid.ms.flagversions $input_dir/$obsid/$obsid.metafits .
else
echo "Error: input files are missing. Aborting."
exit 1
fi
if [ $pipecond == "yes" ]
then
# In this case the input is a string not a file.
# Flag tiles if required
if [ $flag == "yes" ]; then
tile_list=$(echo $obsid_list | awk '{print $2}')
echo $tile_list
if [ -z "$tile_list" ]; then
echo "No tiles to flag for snapshot $obsid"
elif [ $tile_list == "none" ]; then
echo "No tiles to flag for snapshot $obsid"
else
tile_list=`echo ${tile_list//,/ }`
# Check tiles are integers between 0 and 127
for tile in ${tile_list[@]}; do
if [ "${tile//[0-9]}" != "" ] || [ $(echo "$tile < 0"|bc) -eq 1 ] || [ $(echo "$tile > 127"|bc) -eq 1 ]; then
echo "Error: tile $tile is not an integer between 0 and 127. Aborting."
exit 1
fi
done
# Flag tiles
echo "Flagging tiles $tile_list for snapshot $obsid listed in $obsid_list"
flagantennae $obsid.ms $tile_list
fi
fi
elif [ $pipecond == "no" ]
then
# In this case the input is a file not a string.
if [ $flag == "yes" ]; then
tile_list=` awk '{print $2}' $obsid_list`
echo $tile_list
if [ -z "$tile_list" ]; then
echo "No tiles to flag for snapshot $obsid"
elif [ $tile_list == "none" ]; then
echo "No tiles to flag for snapshot $obsid"
else
tile_list=`echo ${tile_list//,/ }`
# Check tiles are integers between 0 and 127
for tile in ${tile_list[@]}; do
if [ "${tile//[0-9]}" != "" ] || [ $(echo "$tile < 0"|bc) -eq 1 ] || [ $(echo "$tile > 127"|bc) -eq 1 ]; then
echo "Error: tile $tile is not an integer between 0 and 127. Aborting."
exit 1
fi
done
# Flag tiles
echo "Flagging tiles $tile_list for snapshot $obsid listed in $obsid_list"
flagantennae $obsid.ms $tile_list
fi
fi
else
echo "pipe condition not specified, exiting program."
exit 1
fi
maxuvm=887250/$chan
# Changing the centre to zenith.
chgcentre -zenith ${obsid}.ms
echo "======================================================================================="
echo "Making a shallow all-sky image."
echo "======================================================================================="
# Creating a shallow all sky image with 4arc minute resolution. This is so we can identify bright regions.
# Parameters for the all-sky image are fixed for the case of phase 1 300MHz.
wsclean -name ${obsid}_deeper -size 7000 7000 -niter 300000 -auto-threshold 0.3 -auto-mask 3.0 -pol I -weight uniform \
-scale 59asec -abs-mem 30 -j 12 -apply-primary-beam -mwa-path $mwapath -mgain 0.85 -minuv-l 60 -taper-gaussian 2.4amin $obsid.ms
echo "======================================================================================="
echo "Running BANE and Aegean."
echo "======================================================================================="
# After the all-sky image is created Aegean is run to determine where the flux density is.
BANE ${obsid}_deeper-image.fits
aegean --autoload --table ${obsid}-sources.fits ${obsid}_deeper-image.fits
echo "======================================================================================="
echo "Identifying primary beam and grating lobes."
echo "======================================================================================="
lobe-finder.py --obsid ${obsid}
echo "======================================================================================="
echo "Performing self-calibration"
echo "======================================================================================="
# For calibration transfer observations we may not want to self-calibrate.
if [ $selfcal == "yes" ]
then
# Performing self cal:
calibrate -absmem 30 -minuv 60 $obsid.ms ${obsid}_solutions.bin
echo "======================================================================================="
echo "Applying solutions to $obsid"
echo "======================================================================================="
# Apply the solutions
applysolutions $obsid.ms ${obsid}_solutions.bin
# Plotting the amplitude of the channels vs frequency.
echo "======================================================================================="
echo "Creating phase and amplitude plots."
echo "======================================================================================="
# Plot phase and amplitude calibration solutions
aocal_plot.py --refant=127 ${obsid}_solutions.bin
else
echo "Not selfcalibrating"
fi
end_time=`date +%s`
duration=`echo "$end_time-$start_time" | bc -l`
echo "Total runtime = $duration sec"
exit 0
| true |
d576893027e643da19156c1fd5221c13436c3e69 | Shell | betrybe/cypress-evaluator-action | /entrypoint.sh | UTF-8 | 710 | 3.078125 | 3 | [] | no_license | #!/bin/bash
set -x
RUN_NPM_START=$1
CYPRESS_HEADLESS=$2
CYPRESS_BROWSER=$3
export CY_CLI=true
npm install
if $RUN_NPM_START ; then
npm start & # Open server in background
npx wait-on -t 300000 $wait_for_url # wait for server until timeout
fi
headless_flag=''
if $CYPRESS_HEADLESS ; then
headless_flag="--headless"
fi
node_modules/.bin/cypress install
node_modules/.bin/cypress run "$headless_flag" --browser "$CYPRESS_BROWSER"
ls
node_modules/.bin/mochawesome-merge cypress/reports/*.json > output.json
ls
node /evaluator.js output.json .trybe/requirements.json result.json
if [ $? != 0 ]; then
echo "Execution error"
exit 1
fi
echo "result=`cat result.json | base64 -w 0`" >> $GITHUB_OUTPUT
| true |
dc948a3fd92d0c78e7d7f03b206299aa91f8c2dc | Shell | MiningMarsh/config.puppet | /modules/zfs/templates/remove-bad-snapshots.erb | UTF-8 | 461 | 3.28125 | 3 | [] | no_license | #!/usr/bin/env zsh
# This file is managed by puppet.
# This file will remove snapshots supported by zfs-auto-snapshot if the dataset
# is no longer marked as needing them.
if ! which zfs > /dev/null 2>&1; then
exit
fi
for dataset in $(zfs list -H -o name); do
if [[ "$(zfs get com.sun:auto-snapshot $dataset -H -o value)" == true ]]; then
continue
fi
zfs list -t snapshot -H -o name | grep $dataset@zfs-auto-snap_ | xargs -rn1 -P12 zfs destroy -R
done
| true |
39389c7c714208f1523a8673d37c22e341e5b2ee | Shell | PazhaniMurugesan/Scripts-collection | /linux/gunzip.sh | UTF-8 | 64 | 2.75 | 3 | [] | no_license | for f in *.gz ; do gunzip -c "$f" > ../folder2/"${f%.*}" ; done
| true |
d249565cf36b0b9920548b80b4bd73a3e6aef4cc | Shell | gl3n0n/Multichannel | /protected/crontab/start_cron_schedpost.sh | UTF-8 | 1,316 | 3.015625 | 3 | [] | no_license | #!/bin/bash
###########################################################
# Filename:
#
# Desc :
#
# Date : 2015-04-02
#
#
# By : bayugitus
#
#
###########################################################
umask 002
###########################################################
# FUNCTIONS
###########################################################
function _init()
{
ROOTD="/var/www/html/multichannel/protected/crontab"
LOGF=${ROOTD}/log/schedpost.php-$(date '+%Y-%m-%d').log
}
function timeStamp()
{
local pid=$(printf "%05d" $$)
echo "[$(date)] - info - $*" >> ${LOGF}
}
###########################################################
###########################################################
# MAIN ENTRY
###########################################################
_init
#move 2 parent'shouse
cd $ROOTD
[[ ! -d "${ROOTD}/log" ]] && {
mkdir -p ${ROOTD}/log 2>/dev/null
}
timeStamp "start"
timeStamp "==================================="
if [[ "root" == "${LOGNAME}" ]]
then
/usr/bin/php -f ${ROOTD}/schedpost.php "DAILY" >> $LOGF 2>/dev/null
chown -R apache.apache $LOGF ${ROOTD}/log/* 2>/dev/null
else
/usr/bin/php -f ${ROOTD}/schedpost.php "DAILY" >> $LOGF 2>/dev/null
fi
timeStamp "ret:$?"
timeStamp "==================================="
timeStamp "done here"
| true |
8599b654a12fcf37070a1e65492ca750abc8903f | Shell | doitian/dotfiles-public | /default/bin/m-out-of-n-split.sh | UTF-8 | 509 | 3.859375 | 4 | [] | no_license | #!/bin/bash
if (("$#" != 3)); then
echo "Usage: m-out-of-n-split.sh m n file"
fi
m="$1"
n="$2"
pieces=$((n - m + 1))
file="$3"
lineno=1
padding=" "
head=1
for id in $(seq $n); do
if [ -f "$file.$id" ]; then
echo "$file.$id already exists" >&2
exit 127
fi
done
while IFS= read -r line; do
if ((lineno > 9)); then
padding=""
fi
for i in $(seq $pieces); do
echo "$padding$lineno: $line" >>"$file.$head"
head=$(((head % n) + 1))
done
lineno=$((lineno + 1))
done <"$file"
| true |
c7cd09bbec17aaf3a806bd3a3c0acc36754c18d9 | Shell | thousandeyes/appd-integration-reference | /custom-monitor-image/build/thousandeyes-monitor/startup.sh | UTF-8 | 3,481 | 3.21875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
GREEN='\033[92m'
BLUE='\033[96m'
YELLOW='\033[93m'
GREY='\033[37m'
CLEAR='\033[90m'
# Sample Docker start script for the AppDynamics Standalone Machine Agent
# See the AppDynamics Product Docs (Standalone Machine Agent Configuration Reference)
# Environment variables are passed to the container at runtime
# Default the unique host id to thousandeyes-<appname>
if [ "x${APPDYNAMICS_AGENT_UNIQUE_HOST_ID}" == "x" ]; then
APPDYNAMICS_AGENT_UNIQUE_HOST_ID="thousandeyes-${APPDYNAMICS_AGENT_APPLICATION_NAME}"
fi
MA_PROPERTIES=${APPDYNAMICS_MA_PROPERTIES}
MA_PROPERTIES+=" -Dappdynamics.controller.hostName=${APPDYNAMICS_CONTROLLER_HOST_NAME}"
MA_PROPERTIES+=" -Dappdynamics.controller.port=${APPDYNAMICS_CONTROLLER_PORT}"
MA_PROPERTIES+=" -Dappdynamics.agent.accountName=${APPDYNAMICS_AGENT_ACCOUNT_NAME}"
MA_PROPERTIES+=" -Dappdynamics.agent.accountAccessKey=${APPDYNAMICS_AGENT_ACCOUNT_ACCESS_KEY}"
MA_PROPERTIES+=" -Dappdynamics.controller.ssl.enabled=${APPDYNAMICS_CONTROLLER_SSL_ENABLED}"
MA_PROPERTIES+=" -Dappdynamics.sim.enabled=${APPDYNAMICS_SIM_ENABLED}"
if [ "x${APPDYNAMICS_DOCKER_ENABLED}" != "x" ]; then
MA_PROPERTIES+=" -Dappdynamics.docker.enabled=${APPDYNAMICS_DOCKER_ENABLED}"
fi
MA_PROPERTIES+=" -Dappdynamics.docker.container.containerIdAsHostId.enabled=false"
if [ "x${APPDYNAMICS_MACHINE_HIERARCHY_PATH}" != "x" ]; then
MA_PROPERTIES+=" -Dappdynamics.machine.agent.hierarchyPath=SVM-${APPDYNAMICS_MACHINE_HIERARCHY_PATH}"
fi
if [ "x${APPDYNAMICS_AGENT_UNIQUE_HOST_ID}" != "x" ]; then
MA_PROPERTIES+=" -Dappdynamics.agent.uniqueHostId=${APPDYNAMICS_AGENT_UNIQUE_HOST_ID}"
fi
if [ "x${APPDYNAMICS_AGENT_PROXY_HOST}" != "x" ]; then
MA_PROPERTIES+=" -Dappdynamics.http.proxyHost=${APPDYNAMICS_AGENT_PROXY_HOST}"
fi
if [ "x${APPDYNAMICS_AGENT_PROXY_PORT}" != "x" ]; then
MA_PROPERTIES+=" -Dappdynamics.http.proxyPort=${APPDYNAMICS_AGENT_PROXY_PORT}"
fi
if [ "x${APPDYNAMICS_AGENT_PROXY_USER}" != "x" ]; then
MA_PROPERTIES+=" -Dappdynamics.http.proxyUser=${APPDYNAMICS_AGENT_PROXY_USER}"
fi
if [ "x${APPDYNAMICS_AGENT_PROXY_PASS}" != "x" ]; then
MA_PROPERTIES+=" -Dappdynamics.http.proxyPasswordFile=${APPDYNAMICS_AGENT_PROXY_PASS}"
fi
if [ "x${APPDYNAMICS_AGENT_APPLICATION_NAME}" != "x" ]; then
MA_PROPERTIES+=" -Dappdynamics.agent.applicationName=${APPDYNAMICS_AGENT_APPLICATION_NAME}"
fi
if [ "x${APPDYNAMICS_AGENT_TIER_NAME}" != "x" ]; then
MA_PROPERTIES+=" -Dappdynamics.agent.tierName=${APPDYNAMICS_AGENT_TIER_NAME}"
fi
if [ "x${APPDYNAMICS_AGENT_NODE_NAME}" != "x" ]; then
MA_PROPERTIES+=" -Dappdynamics.agent.nodeName=${APPDYNAMICS_AGENT_NODE_NAME}"
fi
if [ "x${APPDYNAMICS_AGENT_METRIC_LIMIT}" != "x" ]; then
MA_PROPERTIES+=" -Dappdynamics.agent.maxMetrics=${APPDYNAMICS_AGENT_METRIC_LIMIT}"
fi
echo "\nMA_PROPERTIES: ${MA_PROPERTIES}\n"
printf "Starting the ${BLUE} ThousandEyes AppD Monitor Extension${CLEAR}\n"
printf " - Application: ${YELLOW}${APPDYNAMICS_AGENT_APPLICATION_NAME}${CLEAR}\n"
printf " - AppD Controller: ${YELLOW}${APPDYNAMICS_CONTROLLER_HOST_NAME}${CLEAR}\n"
printf " - Machine Agent Host ID: ${YELLOW}${APPDYNAMICS_AGENT_UNIQUE_HOST_ID}${CLEAR}\n"
printf " - ThousandEyes Tests: ${YELLOW}${TE_TESTS}${CLEAR}\n"
printf " - ThousandEyes Account: ${YELLOW}${TE_ACCOUNTGROUP}${CLEAR}\n"
printf " - Custom Metric Format: ${YELLOW}${TE_METRIC_TEMPLATE}${CLEAR}\n"
# Start Machine Agent
${MACHINE_AGENT_HOME}/bin/machine-agent ${MA_PROPERTIES}
| true |
0bdde7990baff8097b8473f43169ab4fe13b93e6 | Shell | freemed/asterisk-reporting | /salesdisplay-gwt/SalesDisplay-shell | UTF-8 | 385 | 2.65625 | 3 | [] | no_license | #!/bin/sh
APPDIR=`dirname $0`;
CP="$APPDIR/src:$APPDIR/bin:/opt/gwt/gwt-user.jar:/opt/gwt/gwt-dev-linux.jar:/usr/lib/gwt/gwt-user.jar:/usr/lib/gwt/gwt-dev-linux.jar"
for JAR in war/WEB-INF/lib/*.jar; do
CP="${CP}:$APPDIR/lib/$(basename "$JAR")"
done
java -Xmx256M -cp "$CP" com.google.gwt.dev.HostedMode -out "$APPDIR/war" "$@" com.github.freemed.gwt.SalesDisplay/SalesDisplay.html;
| true |
bae177de3289826112653cac61506f97a01f1cc0 | Shell | Vinapo/OTNSkills | /deploy.sh | UTF-8 | 1,048 | 2.75 | 3 | [] | no_license | #!/bin/bash
rm -r packages
rm packages.zip
pip install -r requirements.txt --target ./packages
if [ -d packages ]; then
cd packages
mkdir apis
mkdir adapters
mkdir data
cp ../*.py .
cp ../adapters/* adapters
cp ../apis/* apis
cp ../data/* data
cp ../production.ini .
find . -name "tests.py" -delete
find . -name "importer.py" -delete
find . -name "app.py" -delete
find . -name "*.pyc" -delete
find . -name "*.egg-info" | xargs rm -rf
find . -name "*.dist-info" | xargs rm -rf
zip -9mrv packages.zip .
mv packages.zip ..
cd ..
rm -rf packages
fi
<<COMMENT
aws lambda create-function \
--region us-east-1 \
--function-name otnskills \
--zip-file fileb://packages.zip \
--role "arn:aws:iam::065056466896:role/APIGatewayLambdaExecRole" \
--environment Variables={STAGE=production} \
--handler lambda_function.lambda_handler \
--runtime python2.7 \
--profile default
COMMENT
aws lambda update-function-code --function-name otnskills --zip-file fileb://packages.zip
rm -rf ./packages.zip
| true |
d02c84bb35dddd23eb13ba5cfa76634dfb5ab819 | Shell | ivapanic/skrj-2019-2020 | /LAB1/zadatak5.sh | UTF-8 | 210 | 3.34375 | 3 | [] | no_license | #!/bin/bash
echo $1
echo $2
if [ $# -ne 2 ] ; then
echo "Unesite kazalo i zeljeni oblik imena"
else
ime_direktorija=$1
ime_datoteka=$2
cd $ime_direktorija
find . -name "$ime_datoteka" | xargs wc -l
fi
| true |
4a0f5f2b35e24376cbcdcf86f5725c5cf3de7aee | Shell | sean-jc/settings | /bin/l2h | UTF-8 | 433 | 3.296875 | 3 | [] | no_license | #!/bin/bash
function pushd() {
command pushd "$@" > /dev/null
}
function popd() {
command popd "$@" > /dev/null
}
set -e
pushd ~/go/src/kernel.org/linux
patch=$(git diff --no-prefix)
rev=$(git rev-parse HEAD)
popd
pushd ~/go/src/kernel.org/host
hrev=$(git rev-parse HEAD)
if [[ $rev != $hrev ]]; then
printf "linux and host must be on the same commit"
exit 1
fi
git reset --hard HEAD
echo "$patch" | patch -p0
popd
| true |
c9fe642912d9337a56905cf8349089b71ff293d6 | Shell | vandalt/dotfiles-archive | /.config/i3/scripts/podcast | UTF-8 | 311 | 3 | 3 | [] | no_license | #!/bin/bash
pod_print() {
meta="$(playerctl metadata 2>/dev/null)"
if echo $meta | grep -q 'cumulonimbus'; then
show="$(playerctl metadata artist 2>/dev/null)"
echo "$show"
fi
}
# Left click
if [[ "${BLOCK_BUTTON}" -eq 1 ]]; then
playerctl play-pause 2>/dev/null
fi
pod_print
| true |
5958e5a8d01e061c429d9e1bfdb72e440fe01f2d | Shell | opentracing-contrib/nginx-opentracing | /ci/install_opentracing.sh | UTF-8 | 335 | 2.8125 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
set -e
[ -z "${OPENTRACING_VERSION}" ] && export OPENTRACING_VERSION="v1.6.0"
# Build OpenTracing
cd /
git clone -b ${OPENTRACING_VERSION} https://github.com/opentracing/opentracing-cpp.git
cd opentracing-cpp
mkdir .build && cd .build
cmake \
-DBUILD_MOCKTRACER=OFF \
-DBUILD_TESTING=OFF \
..
make && make install
| true |
a7c5756f4d3b05f033e0be0f054a5f6ef5e23081 | Shell | dmitry-ti/twitch-download | /twitch-stream-download.sh | UTF-8 | 2,707 | 3.890625 | 4 | [] | no_license | #!/bin/bash
CLIENT_ID="jzkbprff40iqj646a697cyrvl0zt2m6"
OUTPUT_PLAYLIST="output.m3u8"
getAccess() {
local channel="$1"
local clientId="$2"
curl "https://api.twitch.tv/api/channels/$channel/access_token?client_id=$clientId" 2> /dev/null
}
getMasterPlaylist() {
local channel="$1"
local token="$2"
local sig="$3"
curl -G --data-urlencode "token=$token" "https://usher.ttvnw.net/api/channel/hls/$channel.m3u8?sig=$sig&allow_source=true" 2> /dev/null
}
getJSONField() {
local json="$1"
local fieldName="$2"
echo "$json" | jq -r ".$fieldName"
}
getMediaPlaylistUrl() {
local masterPlaylist="$1"
echo "$masterPlaylist" | grep -E "^https?:\/\/(www\.)?.*\.m3u8$" | head -n 1
}
getMediaPlaylist() {
local mediaPlaylistUrl="$1"
curl "$mediaPlaylistUrl" 2>/dev/null
}
getPlaylistTag() {
local mediaPlaylist="$1"
local tag="$2"
echo "$mediaPlaylist" | sed -n "s/#$tag://p"
}
processMediaSegment() {
local segmentNumber="$1"
if grep "^$segmentNumber.ts$" "$outputDir/$OUTPUT_PLAYLIST" &> /dev/null; then
return
fi
local outputDir="$3"
local segmentOutputName="$segmentNumber.ts"
echo "downloading segment $segmentOutputName"
local segmentUrl="$2"
wget -O "$outputDir/$segmentOutputName" "$segmentUrl" &> /dev/null
echo "$segmentOutputName" >> "$outputDir/$OUTPUT_PLAYLIST"
}
main() {
local channel="$1"
if [ -z "$channel" ]; then
echo "Error: channel name must be provided"
return
fi
local outputDir
outputDir="${channel}_$(date +%Y-%m-%d_%H-%M-%S)"
mkdir "$outputDir"
if [ $? -ne 0 ] ; then
echo "Error: could not create output directory: $outputDir"
return
fi
local masterPlaylist
local mediaPlaylistUrl
local mediaPlaylist
local targetDuration
local mediaSequence
local access
local token
local sig
access=$(getAccess "$channel" "$CLIENT_ID")
token=$(getJSONField "$access" "token")
sig=$(getJSONField "$access" "sig")
masterPlaylist=$(getMasterPlaylist "$channel" "$token" "$sig")
mediaPlaylistUrl=$(getMediaPlaylistUrl "$masterPlaylist")
if [ -z "$mediaPlaylistUrl" ]; then
echo "Error: Could not get media playlist url"
return
fi
while true
do
mediaPlaylist=$(getMediaPlaylist "$mediaPlaylistUrl")
targetDuration=$(getPlaylistTag "$mediaPlaylist" "EXT-X-TARGETDURATION")
mediaSequence=$(getPlaylistTag "$mediaPlaylist" "EXT-X-MEDIA-SEQUENCE")
if [ -z "$mediaSequence" ]; then
echo "Error: Could not get media sequence"
return
fi
echo "$mediaPlaylist" | grep -v "^#" | while IFS= read -r segmentUrl ; do processMediaSegment "$((mediaSequence++))" "$segmentUrl" "$outputDir"; done
sleep "$targetDuration"
done
}
main "$@"
| true |
1329f337386f630f1f0f2aefaa24d13451bf48b8 | Shell | abidmunirmalik/shell-scripts | /case.sh | UTF-8 | 1,214 | 4.125 | 4 | [] | no_license | #!/usr/bin/env bash
#
: """
Syntax of Case:
case $variable in
option1)
statement
;;
option2)
statement
;;
*)
statement
;;
esac
"""
# Exercise: Guess number
read -p "Enter number between 1 - 5: " n
case $n in
1)
echo "your number 1"
;;
2)
echo "your number 2"
;;
3)
echo "your number 3"
;;
4)
echo "your number 4"
;;
5)
echo "your number 5"
;;
*)
echo "number not between 1 and 5"
exit 100
;;
esac
# Excercise - find best actor
tarzen() { echo "Tarzen...."; }
superman() { echo "Superman...."; }
spiderman() { echo "Spiderman...."; }
captain() { echo "Captain America...."; }
echo "1 - Tarzen."
echo "2 - Superman."
echo "3 - Spiderman."
echo "4 - Captain America."
read -p "What is your favorite character? " actor
case $actor in
1) tarzen ;;
2) superman ;;
3) spiderman ;;
4) captain ;;
*) echo "No one ?....";;
esac
# Excercise: find birth year
echo "[What is your Birth Year]"
current_year=$(date +"%Y")
read -p "How old are you? " age
birth_year=$((current_year - age))
echo "Were you born in $birth_year or $((birth_year - 1))?"
| true |
c183c72286bb9c081ba2532784477c1deff08b41 | Shell | lskatz/file-tracker | /scripts/ce_addFile.sh | UTF-8 | 1,647 | 4.25 | 4 | [
"MIT"
] | permissive | #!/bin/bash
set -e
db=$1; shift
paths=$@
set -u
if [ "$paths" == "" ]; then
echo "Adds a file to the CE database. The file should already be in the CE."
echo "Usage: $0 database.sqlite path/to/file.fastq.gz [second/path/file.fastq.gz...]"
exit 0
fi
# Make all directories first to help with directory foreign
# key requirements.
for path in $paths; do
path=$(realpath $path)
dirname=$(dirname $path)
# Directory table
dir_id=$(ce_addDir.sh "$db" "$dirname");
done
for path in $paths; do
path=$(realpath $path)
filename=$(basename $path)
dirname=$(dirname $path)
filesize=$(wc -c < "$path")
md5sum=$(md5sum < "$path" | awk '{print $1}')
# Directory table
#dir_id=$(ce_addDir.sh "$db" "$dirname");
# File table
# TODO add in a first set of values with /dev/null or some comment from a cli parameter
file_id=$(
sqlite3 "$db" "
PRAGMA foreign_keys=ON;
INSERT INTO FILE(filename, dir_id, filesize, md5sum)
VALUES ('$filename',
(SELECT dir_id FROM DIRECTORY WHERE path='$dirname'),
'$filesize', '$md5sum');
SELECT last_insert_rowid();
"
)
# File operation table
op_id=$(
sqlite3 "$db" "
PRAGMA foreign_keys=ON;
INSERT INTO OPERATION(file_id, date, time, operation, to_name, to_dir)
VALUES ($file_id,
(SELECT DATE('now','localtime')),
(SELECT TIME('now','localtime')),
(SELECT op_enum_id FROM OPERATION_ENUM WHERE operation='init'),
'$filename', $dir_id
);
SELECT last_insert_rowid();
"
)
echo "Initialized file $path into $db" >&2
echo "$op_id"
done
| true |
d27dcc39bada33ebfa295279beb79eb78ba60997 | Shell | zanud/xds-2.60 | /build/cUnix/build.sh | UTF-8 | 705 | 3.3125 | 3 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | #!/bin/bash
report() {
if test $1 -gt 0; then
echo $2 build failed;
else
echo $2 build ok;
fi
}
setAttrs() {
find ./enduser/ -type f -not -iname '*\.*' -exec dos2unix '{}' \;
find ./enduser/ -type f -iname '*\.*' -exec chmod 664 '{}' \;
find ./enduser/ -type f -not -iname '*\.*' -exec chmod 775 '{}' \;
find ./enduser/ -type d -exec chmod 775 '{}' \;
find src/ -type f -iname '*\.*' -exec chmod 664 '{}' \;
}
setAttrs
if [ ! -e "enduser\xds" ]
then
./get.sh
fi
./makexds-c.sh clean all
XDS_C_RESULT=$?
./pack-xds.sh
PACK_XDS_RESULT=$?
echo
echo Build results
echo
report $XDS_C_RESULT xds-c
report $PACK_XDS_RESULT xds-setup-package
| true |
cf689e5306cf78e1f8509c057a9446bcb041f3e0 | Shell | palladius/pasta | /.vecchiume/docker-experiments/20201226-ruby18/install-random-docker.sh | UTF-8 | 854 | 3.484375 | 3 | [] | no_license | #!/bin/bash
ACTION=${1:-_NO_ACTION_}
UGLY_TARGET=${2:-NO_TARGET} # eg Dockerfile.ruby18-[build|run]
TARGET=$(echo $UGLY_TARGET | cut -d . -f 2 | sed -e s/-build// | sed -e s/-run// ) # -> ruby18
echo "Hi from: $0. Args: $*" | lolcat
echo "ACTION: $ACTION. Target: $TARGET" | lolcat
if [ "build" = "$ACTION" ]; then
echo "lets build. To run, just call me with run instead of build"
docker build -t multipasta:$TARGET - < Dockerfile.$TARGET # " | lolcat
#exit 0
else
if [ "run" = "$ACTION" ]; then
echo lets run | lolcat
#echo "docker run -it -p 8080:8080 multipasta:$TARGET --volume TODO" | lolcat
docker run -it -p 8080:8080 \
-v /home/riccardo/git/gcp-metarepo/pasta/:/pasta-mounted-volume/ \
multipasta:$TARGET bash
else
echo 'call me with "run" or "build" mofo'
fi
fi
| true |
f647eea5df308e35a43ce6a1d59864c67c4c440f | Shell | austinkelleher/marko-snackbars | /jshint.sh | UTF-8 | 176 | 2.5625 | 3 | [] | no_license | echo "Running jshint..."
./node_modules/.bin/jshint --exclude-path .jshintignore .
if [ $? != 0 ]; then
echo "Found JSHINT Errors"
exit 1
fi
echo "No JSHINT errors"
| true |
7af14781cb78cbb84334ef3af5a5b80d952fab9a | Shell | zhangxiang-planet/GLEAM-X-pipeline | /bin/autoflag.tmpl | UTF-8 | 1,441 | 2.8125 | 3 | [] | no_license | #! /bin/bash -l
#SBATCH --export=NONE
#SBATCH -M HOST
#SBATCH -p workq
#SBATCH --account=ACCOUNT
#SBATCH --time=01:00:00
#SBATCH --nodes=1
TASKLINE
function test_fail {
if [[ $1 != 0 ]]
then
track_task.py fail --jobid=${SLURM_JOBID} --taskid=1 --finish_time=`date +%s`
exit $1
fi
}
# start
track_task.py start --jobid=${SLURM_JOBID} --taskid=1 --start_time=`date +%s`
#cores=`grep -P '^core id\t' /proc/cpuinfo | wc -l`
obsnum=OBSNUM
datadir=DATADIR
cd ${datadir}/${obsnum}
#metafits="${obsnum}.metafits"
#if [[ ! -e ${metafits} ]]
#then
# wget "http://mwa-metadata01.pawsey.org.au/metadata/fits?obs_id=${obsnum}" -O ${metafits}
# test_fail $?
#fi
# Most long-baseline observations have problems with these two antennas
# Set up telescope-configuration-dependent options
if [[ $obsnum -gt 1151402936 ]]
then
flags="76 80 "
fi
# Rx7 has a fibre problem on nights of 2019-01-08 and 2019-06-17
if [[ $obsnum -ge 1244798800 ]] && [[ $obsnum -le 1244849032 ]]
then
flags="$flags 48 49 50 51 52 53 54 55"
fi
# Tile indices 60 and 63 also behave weirdly -- one polarisation has totally different calibration, as if it's not pointing correctly
if [[ $obsnum -ge 1203937000 ]] && [[ $obsnum -le 1203975000 ]]
then
flags="$flags 48 49 50 51 52 53 54 55 60 63"
fi
if [[ ! -z ${flags} ]]
then
flagantennae ${obsnum}.ms $flags
fi
track_task.py finish --jobid=${SLURM_JOBID} --taskid=1 --finish_time=`date +%s`
| true |
26fb8b510c3292d7512fd14abff585bf811cdac3 | Shell | acapello/PLN-2015 | /tagging/scripts/train_all_models.sh | UTF-8 | 2,910 | 2.578125 | 3 | [] | no_license | #! /usr/bin/bash
# You must be in the virtualenv ($ workon pln-2015) to run this script
SCRIPT_DIR="$(dirname $0)"
mkdir $SCRIPT_DIR/../models
M=$SCRIPT_DIR/../models
# ORIGINAL_DIR="$(pwd)"
echo -n > $M/train_output.txt
echo "Redirecting output to $M/train_output.txt"
echo "Working..."
echo "Training base..."
{ time -p python $SCRIPT_DIR/train.py -m base -o $M/base >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training mlhmm n=1..."
{ time -p python $SCRIPT_DIR/train.py -m mlhmm -n 1 -o $M/mlhmm-1 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training mlhmm n=2..."
{ time -p python $SCRIPT_DIR/train.py -m mlhmm -n 2 -o $M/mlhmm-2 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training mlhmm n=3..."
{ time -p python $SCRIPT_DIR/train.py -m mlhmm -n 3 -o $M/mlhmm-3 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training mlhmm n=4..."
{ time -p python $SCRIPT_DIR/train.py -m mlhmm -n 4 -o $M/mlhmm-4 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training memm logreg n=1..."
{ time -p python $SCRIPT_DIR/train.py -m memm -n 1 -c logreg -o $M/memm-logreg-1 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training memm logreg n=2..."
{ time -p python $SCRIPT_DIR/train.py -m memm -n 2 -c logreg -o $M/memm-logreg-2 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training memm logreg n=3..."
{ time -p python $SCRIPT_DIR/train.py -m memm -n 3 -c logreg -o $M/memm-logreg-3 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training memm logreg n=4..."
{ time -p python $SCRIPT_DIR/train.py -m memm -n 4 -c logreg -o $M/memm-logreg-4 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training memm nb n=1..."
{ time -p python $SCRIPT_DIR/train.py -m memm -n 1 -c nb -o $M/memm-nb-1 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training memm nb n=2..."
{ time -p python $SCRIPT_DIR/train.py -m memm -n 2 -c nb -o $M/memm-nb-2 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training memm nb n=3..."
{ time -p python $SCRIPT_DIR/train.py -m memm -n 3 -c nb -o $M/memm-nb-3 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training memm nb n=4..."
{ time -p python $SCRIPT_DIR/train.py -m memm -n 4 -c nb -o $M/memm-nb-4 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training memm svc n=1..."
{ time -p python $SCRIPT_DIR/train.py -m memm -n 1 -c svc -o $M/memm-svc-1 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training memm svc n=2..."
{ time -p python $SCRIPT_DIR/train.py -m memm -n 2 -c svc -o $M/memm-svc-2 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training memm svc n=3..."
{ time -p python $SCRIPT_DIR/train.py -m memm -n 3 -c svc -o $M/memm-svc-3 >> $M/train_output.txt ;} 2>> $M/train_output.txt
echo "Training memm svc n=4..."
{ time -p python $SCRIPT_DIR/train.py -m memm -n 4 -c svc -o $M/memm-svc-4 >> $M/train_output.txt ;} 2>> $M/train_output.txt | true |
0e139b9975885ad46178d336de1bb429cf26f590 | Shell | Blackfynn/blackfynn-matlab | /setup/setup.sh | UTF-8 | 708 | 3.296875 | 3 | [
"MIT",
"Apache-2.0"
] | permissive | #!/bin/bash
BUILD_DIR="build"
MATLAB_LOC="matlab"
# Uncomment and edit line below if building toolbox on local machine
#MATLAB_LOC="/Applications/MATLAB_R2017b.app/bin/matlab"
[ -d $BUILD_DIR ] || mkdir ${BUILD_DIR}
echo "Copying jars..."
javac -cp ../java/protobuf-java-3.5.1.jar ../java/*.java -d . -source 1.8 -target 1.8
jar cf blackfynio.jar blackfynn/*.class
# Get build version of toolbox
tag=$(git describe --tags); echo "$tag">build/matlab_version.txt
echo "Building from tag: $tag"
echo -e "Running matlab build..."
${MATLAB_LOC} -nodisplay -nodesktop -r "run ./createToolbox.m"
[ -f build/blackfynn.mltbx ] || { echo "ERROR: build failed /build/blackfynn.mltbx doesn't exist." && exit 1; }
| true |
8a5fae194a2f8260bd5e7ad465222af47789ee4a | Shell | mikeg2105/matlab-old | /matlab/dctcourse/ex4_parmatlab_transport/sendc/mpi.sh | UTF-8 | 1,062 | 2.515625 | 3 | [] | no_license | #!/bin/bash
#
#
# Set the name of the job (optional)
#$ -N MPI_Job
#
# parallel environment request - Edit the following line to change the number of processors that you wish to use
#$ -pe openmpi-gnu-ether 1
#$ -P parmatlab
#
# You must request the parallel queue as below along with request the mpich-gm parallel environment
##$ -q parallel.q
#
# MPIR_HOME from submitting environment
##$ -v MPIR_HOME=/usr/local/mpich-gm2_PGI
##$ -v MPIR_HOME=/usr/local/packages/openmpi-gnu-ether
# ---------------------------
#MPIR_HOME=/usr/local/packages/openmpi-gnu-ether
MPIR_HOME=/usr/local/packages/mpich-gnu-gm
echo "Got $NSLOTS slots."
# enables $TMPDIR/rsh to catch rsh calls if available
#set path=($TMPDIR $path)
# Edit the following line if you wish to run a different binary
#( echo 100 ; echo 100 ) | $MPIR_HOME/bin/mpirun -np $NSLOTS -machinefile $TMPDIR/machines #~/EXAMPLE/parallel-gm/PMB/PMB-MPI1
$MPIR_HOME/bin/mpirun -np 8 -machinefile machines ./ex1
#/usr/local/sge6.0/mpi/myrinet/sge_mpirun ./ex1
#/usr/local/sge6.0/mpi/sge_mpirun ./ex1
| true |
6c39729e13ecde2be04139a10cbf4be0455eaddf | Shell | necrose99/alpine-arm64 | /hooks/push.old | UTF-8 | 392 | 2.640625 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
VERSION=$(cat VERSION)
echo "Tagging and pushing..."
docker tag $DOCKER_REPO:3.6-$VERSION $DOCKER_REPO:latest
docker tag $DOCKER_REPO:3.6-$VERSION $DOCKER_REPO:3.6
docker tag $DOCKER_REPO:3.5-$VERSION $DOCKER_REPO:3.5
docker push $DOCKER_REPO
docker push $DOCKER_REPO:3.6-$VERSION
docker push $DOCKER_REPO:3.6
docker push $DOCKER_REPO:3.5-$VERSION
docker push $DOCKER_REPO:3.5
| true |
1540122c58655499c007762ff6be4651dd3989ca | Shell | utooley/motion-FC-metrics | /transfer_HCP_movementData.sh | UTF-8 | 629 | 2.78125 | 3 | [] | no_license | #!/bin/bash/
task=rfMRI_REST2_RL;
mkdir /Users/ArunMahadevan/Documents/hcp_Max/Motion_S1200/$task
ARRAY=( $(tail -n +2 /Users/ArunMahadevan/Documents/hcp_Max/Covariates/S1200_Release_Subjects_Demographics.csv | cut -d , -f1))
for SUB in ${ARRAY[@]}
do
cp /Users/ArunMahadevan/.CMVolumes/asm7/HCP_1200/$SUB/MNINonLinear/Results/$task/Movement_RelativeRMS_mean.txt /Users/ArunMahadevan/Documents/hcp_Max/Motion_S1200/$task
mv /Users/ArunMahadevan/Documents/hcp_Max/Motion_S1200/$task/Movement_RelativeRMS_mean.txt /Users/ArunMahadevan/Documents/hcp_Max/Motion_S1200/$task/${SUB}_Movement_RelativeRMS_mean.txt
echo $SUB
done
| true |
23cdecb2469c4d7ce6d56957b39aaab162b88029 | Shell | mehulsbhatt/rpm-mesos | /mesos-master | UTF-8 | 1,078 | 4 | 4 | [] | no_license | #!/bin/bash
#
# /etc/init.d/mesos-master
#
# Startup script for mesos-master
#
# chkconfig: 2345 20 80
# description: Starts and stops mesos-master
. /etc/init.d/functions
prog="mesos-master"
mesosBin="/usr/sbin/$prog"
desc="Mesos Master daemon"
outFile="/var/log/mesos/$prog.out"
if ! [ -f $mesosBin ]; then
echo "$prog binary not found."
exit 5
fi
if [ -f /etc/sysconfig/mesos ]; then
. /etc/sysconfig/mesos
fi
start() {
echo "Starting $desc ($prog): "
su $mesosUser -c "nohup $mesosBin --quiet --conf=/etc/mesos/conf/ >>$outFile 2>&1 &"
RETVAL=$?
return $RETVAL
}
stop() {
echo "Shutting down $desc ($prog): "
pkill -f $mesosBin
}
restart() {
stop
start
}
status() {
if [ -z $pid ]; then
pid=$(pgrep -f $mesosBin)
fi
if [ -z $pid ]; then
echo "$prog is NOT running."
return 1
else
echo "$prog is running (pid is $pid)."
fi
}
case "$1" in
start) start;;
stop) stop;;
restart) restart;;
status) status;;
*) echo "Usage: $0 {start|stop|restart|status}"
RETVAL=2;;
esac
exit $RETVAL | true |
c23c9529e52353d65e53a6d9e9a3987a7a24e917 | Shell | vriveraq/introduction-to-data-science | /Homework8.sh | UTF-8 | 4,711 | 3.640625 | 4 | [] | no_license | # GOAL: Learn basic commands of UNIX/ Linux
#!/bin/bash
# You can use the following commands to run the script on google cloud:
# chmod +x HW8_490IDS_YOURCLASSID.sh
# ./HW8_490IDS_YOURCLASSID.sh Argument_1 Argument_2 Argument_3 Argument_4
# ./HW8_490IDS_solution.sh 4 Homework_8.txt make 3
# Here is a list of your input arguments:
# Argument_1: a positive number
# Argument_2: text file (Homework_8.txt)
# Argument_3: a word
# Argument_4: a positve interger which is less than 15
# Q1. Calculate the square root of the input number(Argument_1) and print your result. (Hint: bc)
# install bc package
# sudo apt-get install bc
echo "************ Q1 ************"
echo "The square root of $1:"
# Your answer here:
echo "sqrt ( $1 )" | bc -l
# Q2. Check whether your input integer(Argument_4) is even or odd and print your result
echo "************ Q2 ************"
# Your answer here:
rem=$(( $4 % 2 ))
if [ $rem -eq 0 ]
then
echo "$4 is even number"
else
echo "$4 is odd number"
fi
# Q3. Input a lowercase letter(Argument_3) and convert it to uppercse and print your result. (Hint: tr)
echo "************ Q3 ************"
# Your answer here:
echo $3 | tr a-z A-Z
# Q4. Convert the following phrase "CS 498/LIS 490/STAT 430:INTRODUCTION
# TO DATA SCIENCE" into separate words, and put each word on its own
# line (ignoring space,'/' and ':').
# The output would look like:
# CS
# 498
# LIS
# 490
# STAT
# 430
# INTRODUCTION
# TO
# DATA
# SCIENCE
echo "************ Q4 ************"
# Your answer here:
echo "CS 498/LIS 490/STAT 430:INTRODUCTION TO DATA SCIENCE"|
tr -c '[:alnum:]' '\n'
# Q5. Sort the answer in Q4 by descending order. The output would look like:
# TO
# STAT
# SCIENCE
# LIS
# INTRODUCTION
# DATA
# CS
# 498
# 490
# 430
echo "************ Q5 ************"
# Your answer here:
echo "CS 498/LIS 490/STAT 430:INTRODUCTION TO DATA SCIENCE"|
tr -c '[:alnum:]' '\n' | sort -r
# Q6. The dataset menu.csv provides some nutrition facts for McDonald's
# menu, calculate how many items are there in each category and print your result. (Hint: awk)
echo "************ Q6 ************"
# Your answer here:
awk -F, 'NR!=1{a[$1]++;}END{for (i in a)print i, a[i];}' menu.csv
# Q7. For your output in Q4, change the format of categories, replace "&"
# with word "and", and connect the words by "_".
# For example: "Chicken & Fish" ---> "Chicken_and_Fish" (Hint: sed)
# The output would look like:
# Smoothies_and_Shakes 28
# Coffee_and_Tea 95
# Salads 6
# ......
echo "************ Q7 ************"
# Your answer here:
awk -F, 'NR!=1{a[$1]++;}END{for (i in a)print i, a[i];}' menu.csv|
sed 's/ & /_and_/g'
# Q8. Count the lines of your text file(Argument_2). (Hint: wc)
echo "************ Q8 ************"
echo "The number of lines in $2:"
# Your answer here:
line=$(wc -l < $2)
echo "$line+1" | bc -l
# Q9. Count the frequency of a input word(Argument_3) in a text
# file(Argument_2),and print "The frequency of word ____ is ____ ". (Hint: grep)
echo "************ Q9 ************"
echo "The frequency of word $3:"
# Your answer here:
cat $2 | grep -ow $3 | wc -l
# Q10. Print the number of unique words in the text file(Argument_2).
echo "************ Q10 ************"
echo "The number of unique words in text file:"
# Your answer here:
uniqwords=$(tr -c '[:alnum:]' '\n' < $2 | sort | uniq |wc -l)
echo "$uniqwords-1" | bc -l
# or
echo 'If convert all upper case letters into lower case,
the number of unique words in text file is:'
tr '[:upper:]' '[:lower:]' < $2 | tr -d '[:punct:]' | tr -s ' ' '\n' | sort | uniq | tail -n+2 | wc -l
# Q11. Print the number of words that begins with letter 'a' in the
# text file(Argument_2) (5 points).
echo "************ Q11 ************"
echo "The number of words that begins with letter 'a':"
# Your answer here:
tr -c '[:alnum:]' '\n' < $2 | grep ^a | wc -l
# or
echo 'If convert all upper case letters into lower case,
the number of words that begins with letter "a" is:'
tr '[:upper:]' '[:lower:]' < $2 | tr -d '[:punct:]' | tr -s ' ' '\n' | sort | grep -o '\ba\w*'| wc -l
# or
echo 'The number of unique words that begins with letter "a" is:'
tr -c '[:alnum:]' '\n' < $2 | grep ^a |sort|uniq| wc -l
# Q12. Print top-k(Argument_4) frequent word and their frequencies.
# (Hint: uniq, sort, head) (5 points).
echo "************ Q12 ************"
echo "Top-$4 words are:"
# Your answer here:
cat $2 | tr -d '[:punct:]' | tr -s ' ' '\n' | sort | uniq -c | tail -n+2 | sort -bnr | head -$4
# or
echo "If convert all upper case letters into lower case, top-$4 words are:"
tr '[:upper:]' '[:lower:]' < $2 | tr -d '[:punct:]' | tr -s ' ' '\n' | sort | uniq -c | tail -n+2 | sort -bnr | head -$4
| true |
e165a1272c5cfc8bafbaf5c3016cd838fda61d17 | Shell | vlad17/misc | /remotebuild/remote-build.sh | UTF-8 | 3,346 | 3.96875 | 4 | [] | no_license | #!/bin/bash
#
# Script run on the dev workstation to ship your repo
# to the remote build.
HELP_SCREEN="Options are:
--debug regular debug build*
--release regular release build*
--tsan debug build with thread sanitizer*
--asan debug build with address sanitizer*
--ninja use ninja instead of make
--cmake clean cmake files
--clean clean object files
--git clean clean git ignored files
--clang use clang compiler
--no-compile-all no compilation
--no-test no testing
--help show help screen
*build type specifications will require cmake file regeneration.
Additionally, tsan and asan flags will cause clang to be used.
These specifications override other flags.
"
TARGETS=
REMOTE_REF=refs/heads/test
for arg in "$@" ; do
case $arg in
--debug)
REMOTE_REF=$REMOTE_REF%build_type=debug
;;
--release)
REMOTE_REF=$REMOTE_REF%build_type=release
;;
--tsan)
REMOTE_REF=$REMOTE_REF%build_type=tsan
;;
--asan)
REMOTE_REF=$REMOTE_REF%build_type=asan
;;
--client)
REMOTE_REF=$REMOTE_REF%build_type=client
;;
--ninja)
REMOTE_REF=$REMOTE_REF%builder=ninja
;;
--cmake)
REMOTE_REF=$REMOTE_REF%clean=cmake
;;
--clean)
REMOTE_REF=$REMOTE_REF%clean=make
;;
--git-clean)
REMOTE_REF=$REMOTE_REF%clean=git
;;
--clang)
REMOTE_REF=$REMOTE_REF%compiler=clang
;;
--no-compile-all)
REMOTE_REF=$REMOTE_REF%compile_all=false
;;
--no-test)
REMOTE_REF=$REMOTE_REF%test=false
;;
--help)
echo "$HELP_SCREEN"
exit 0
;;
--*)
echo bad arg: $arg
echo 'try \"remote-build.sh --help\" for a list of commands'
exit 1
;;
*)
if [ -z "$TARGETS" ]; then
TARGETS=$arg
else
TARGETS="$TARGETS;$arg"
fi
;;
esac
done
REMOTE_BUILD_REPO=$(git config remote-build.url)
if [ -z "$REMOTE_BUILD_REPO" ]; then
echo No remote build repo configured.
echo
echo Please use 'git config remote-build.url ssh://host/path' to specify
echo the repository where the remote build is setup. For example:
echo git config remote-build.url \\
echo ssh://c0328.halxg.cloudera.com/home/todd/autotest/
exit 1
fi
set -e
set -o pipefail
COMMIT_ID=$(git stash create)
# In a non-dirty tree, it returns an empty string, so just push the current
# HEAD
if [ -z "$COMMIT_ID" ]; then
COMMIT_ID=$(git rev-parse HEAD)
fi
if [ ! -z "$TARGETS" ]; then
REMOTE_REF=$REMOTE_REF%targets=$TARGETS
fi
# Figure out the mapping from the remote repo path to the local
# one so that the output of the remote build can still be used in
# local emacs to jump to errors.
REMOTE_PATH=
LOCAL_PATH=
if [[ $REMOTE_BUILD_REPO =~ ssh://[^/]+(/.*) ]]; then
REMOTE_PATH=${BASH_REMATCH[1]}
LOCAL_PATH=$(cd ./$(git rev-parse --show-cdup) && pwd)/
echo remote: $REMOTE_PATH local: $LOCAL_PATH
fi
# Delete the old test ref if it exists. Otherwise we can't re-build
# when it's already been built.
git push -f $REMOTE_BUILD_REPO :$REMOTE_REF 2>/dev/null || :
git push -f $REMOTE_BUILD_REPO $COMMIT_ID:$REMOTE_REF 2>&1 | perl -p -e "s,$REMOTE_PATH,$LOCAL_PATH,g"
| true |
99fe59bcd6740befa33796aad249ae68e44deeb8 | Shell | chayan7/FlaGsV1.2.7 | /build.sh | UTF-8 | 1,473 | 3.921875 | 4 | [
"MIT"
] | permissive | #!/bin/sh
unset DYLD_LIBRARY_PATH
if ! echo "$0" | grep '\.sh$' > /dev/null; then
printf 'Please run using "bash" or "sh", but not "." or "source"\\n' >&2
return 1
fi
conv="$(conda -V 2>&1)"
printf "$conv"
condaExec="$(echo "$conv" | grep 'not' >&1)"
if [ "$condaExec" != "" ];then
printf "\\nPlease install Conda properly and run this again, Thanks. \\n"
exit 2
fi
if [ "$condaExec" = "" ];then
printf "\\nAdding FlaGs environment with all dependencies. \\n"
fi
THIS_DIR=$(DIRNAME=$(dirname "$0"); cd "$DIRNAME"; pwd)
THIS_FILE=$(basename "$0")
THIS_PATH="$THIS_DIR/$THIS_FILE"
ENV_PATH="$THIS_DIR/env"
MAC_FILE="$ENV_PATH/eFlaGs.txt"
LIN_FILE="$ENV_PATH/elinFlaGs.txt"
if [ "$(uname)" = "Linux" ];then
conda create --name eFlaGs -f --file "$LIN_FILE"
printf "\\nPlease type the following command from any terminal to activate FlaGs environment and run:\\n"
printf ">> conda activate eFlaGs\\n"
printf "\\nTo deactivate please type:\\n"
printf ">> conda deactivate\\n"
printf "\\n"
fi
if [ "$(uname)" = "Darwin" ]; then
conda create --name eFlaGs -f --file "$MAC_FILE"
printf "\\nPlease type the following command from any terminal to activate FlaGs environment and run:\\n"
printf ">> conda activate eFlaGs\\n"
printf "\\nTo deactivate please type:\\n"
printf ">> conda deactivate\\n"
printf "\\n"
fi
printf "One can run FlaGs by activating eFlaGs environment from any terminal.\\n"
printf "\\nInstallation complete, Thanks. \\n"
exit 2
| true |
c0a925c31684126cff8d13f93b99174d6ed3aa26 | Shell | MobileMedTek/wercker-step-tick-version | /run.sh | UTF-8 | 1,136 | 3.828125 | 4 | [
"MIT"
] | permissive | #!/bin/sh
validate_dependencies() {
if [ ! -e /usr/bin/git ]; then
echo "git not installed"
fi
if [ ! -e /usr/bin/npm ]; then
echo "npm not installed"
fi
if [ ! -d ".git" ]; then
echo "No git repository found"
fi
}
validate_token(){
if [ -z $WERCKER_TICK_VERSION_TOKEN ]; then
fail "No git token supplied"
fi
}
main() {
validate_dependencies;
validate_token;
local git_token="https://$WERCKER_TICK_VERSION_TOKEN@github.com/$WERCKER_GIT_OWNER/$WERCKER_GIT_REPOSITORY.git";
npm config set git-tag-version true
echo "set Git tag version config to true"
git config --global user.email "builds@test.com"
git config --global user.name "Build Pipeline"
echo "set git config to builds@test.com and Build Pipeline"
npm version patch -m 'Version Change'
echo "Changed version in package.json"
git add package-lock.json
git commit -m "Version Change"
echo "Committed change to package-lock.json"
git push $git_token HEAD:$WERCKER_GIT_BRANCH
git push --tags $git_token
success "New version applied with git tag"
}
main;
| true |
20e8ebeaf340bd3c5e36650e65c0a567106bd1c4 | Shell | SKroell/rsync-deploy-action | /entrypoint.sh | UTF-8 | 345 | 3.125 | 3 | [
"MIT"
] | permissive | #!/bin/sh
# Run in strict mode
set -eu
# Set deploy key
SSH_PATH="$HOME/.ssh"
mkdir -p "$SSH_PATH"
echo "$DEPLOY_KEY" > "$SSH_PATH/deploy_key"
chmod 600 "$SSH_PATH/deploy_key"
# RSYNC Deployment
sh -c "rsync ${INPUT_RSYNC_ARGS} -e 'ssh -i $SSH_PATH/deploy_key -o StrictHostKeyChecking=no' $GITHUB_WORKSPACE/ ${INPUT_USER_HOST}:${INPUT_DEST}"
| true |
e1c18df3f0856eff569af857c76a1c4e810a17cf | Shell | csparker247/batchSegmenter | /batchSegmenter.sh | UTF-8 | 2,422 | 3.78125 | 4 | [] | no_license | #!/bin/sh
#batchSegmenter - Batch clipping videos with common start and end frames
echo
echo "======================================================================="
echo "batchSegmenter - Batch clipping videos with common start and end frames"
echo "======================================================================="
echo
#Make an output dir
if [[ ! -d output/ ]]; then
mkdir output
fi
for movie in *.mp4; do
echo "Starting work on $movie..."
#Clear all variables
log="output/$(basename "$movie" .mp4).txt"
totalFrames=""
startRange=""
endRange=""
metric=""
adjustedStart=""
adjustedEnd=""
#Make a clean temp dir
if [[ -d temp/ ]]; then
rm -rf temp
mkdir temp
else
mkdir temp
fi
#Generate frames for comparison
echo "Making thumbnails..."
ffmpeg -loglevel panic -i "$movie" -q:v 2 -vf scale=400:-1 -f image2 temp/%d.jpg
#Set a search range for the start and end points
echo "Setting search ranges for start and end frames..."
totalFrames=$(ls -l temp/ | wc -l)
totalFrames=$(expr $totalFrames - 1)
startRange=$(echo "scale=0; ($totalFrames*.20)/1" | bc)
endRange=$(echo "scale=0; ($totalFrames*.80)/1" | bc)
#Get the latest frame that matches Start Frame - 1 (in case Start Frame is black)
for i in $(seq 1 $startRange); do
frame="temp/$i.jpg"
metric=$(compare -metric MAE "$frame" tests/start.jpg null: 2>&1 | awk '{ print $1 }')
metric=$(echo "scale=0; $metric/1" | bc)
if [[ "$metric" -lt 1000 ]]; then
#Add 1 (to get frame number of Start Frame) and convert to timecode in format ss.xxx
adjustedStart=$(echo "scale=2; ($i + 1)/(24000/1001)" | bc)
echo "start $adjustedStart" > "$log"
break
fi
done
#Get the latest frame that matches End Frame
for i in $(seq $endRange $totalFrames); do
frame="temp/$i.jpg"
metric=$(compare -metric MAE "$frame" tests/end.jpg null: 2>&1 | awk '{ print $1 }')
metric=$(echo "scale=0; $metric/1" | bc)
if [[ "$metric" -lt 1000 ]]; then
#Convert to timecode in format ss.xx
adjustedEnd=$(echo "scale=2; ($i - 2)/(24000/1001)" | bc)
echo "end $adjustedEnd" >> "$log"
fi
done
echo " Start: $adjustedStart End: $adjustedEnd"
#Clip out the segment we want
echo "Saving segmented clip..."
ffmpeg -loglevel panic -i "$movie" -ss $adjustedStart -to $adjustedEnd -c:v libx264 -crf 16 -tune animation -level 41 -c:a copy -movflags faststart -y output/"$movie" && echo "Clip saved!"
echo
done | true |
4c9449a629c82c86545d3bb57262007c209a7f4a | Shell | UlybinVitaliy/Diktofon | /tools/make_icons.bash | UTF-8 | 2,223 | 3.21875 | 3 | [
"Apache-2.0"
] | permissive | # Bash script that creates Android icons (png) of different sizes
# from a single input SVG file.
#
# Icon name prefixes determine the type (and the size) of the icon.
#
# Icons: ic: ic_star.png
# Launcher icons: ic_launcher: ic_launcher_calendar.png
# Menu icons: ic_menu: ic_menu_archive.png
# Status bar icons: ic_stat_notify: ic_stat_notify_msg.png
# Tab icons: ic_tab: ic_tab_recent.png
# Dialog icons: ic_dialog: ic_dialog_info.png
#
# @author Kaarel Kaljurand
# @version 2013-03-20
# @work_in_progress
dir_svg=${DIKTOFON_SRC}/images/
dir_png=${DIKTOFON_SRC}/app/res/
dir_png_xhdpi=${dir_png}/drawable-xhdpi/
dir_png_hdpi=${dir_png}/drawable-hdpi/
dir_png_mdpi=${dir_png}/drawable-mdpi/
# Launcher icon
launcher_logo_w=512
launcher_logo_h=512
launcher_xhdpi_w=96
launcher_xhdpi_h=96
launcher_hdpi_w=72
launcher_hdpi_h=72
launcher_mdpi_w=48
launcher_mdpi_h=48
launcher_ldpi_w=36
launcher_ldpi_h=36
# Status bar icons (Android 3.0+)
stat_xhdpi_w=48
stat_xhdpi_h=48
# Status bar icons (Android 2.2-)
stat_hdpi_w=38
stat_hdpi_h=38
stat_mdpi_w=25
stat_mdpi_h=25
stat_ldpi_w=19
stat_ldpi_h=19
# Status bar icons (Android 2.3+)
stat_hdpi_v9_w=24
stat_hdpi_v9_h=38
stat_mdpi_v9_w=16
stat_mdpi_v9_h=25
stat_ldpi_v9_w=12
stat_ldpi_v9_h=19
echo "Generating status bar notification icons:"
for path in ${dir_svg}ic_stat_notify_*.svg
do
filename=$(basename $path)
file=${filename%.*}
echo "$file"
rsvg-convert -f png -w ${stat_xhdpi_w} -h ${stat_xhdpi_h} -o ${dir_png_xhdpi}/$file.png $path
rsvg-convert -f png -w ${stat_hdpi_w} -h ${stat_hdpi_h} -o ${dir_png_hdpi}/$file.png $path
rsvg-convert -f png -w ${stat_mdpi_w} -h ${stat_mdpi_h} -o ${dir_png_mdpi}/$file.png $path
done
echo "Generating launcher icon:"
for path in ${dir_svg}ic_launcher.svg
do
filename=$(basename $path)
file=${filename%.*}
echo "$file"
rsvg-convert -f png -w ${launcher_logo_w} -h ${launcher_logo_h} -o ${dir_svg}/$file.png $path
rsvg-convert -f png -w ${launcher_xhdpi_w} -h ${launcher_xhdpi_h} -o ${dir_png_xhdpi}/$file.png $path
rsvg-convert -f png -w ${launcher_hdpi_w} -h ${launcher_hdpi_h} -o ${dir_png_hdpi}/$file.png $path
rsvg-convert -f png -w ${launcher_mdpi_w} -h ${launcher_mdpi_h} -o ${dir_png_mdpi}/$file.png $path
done
| true |
6c10df90c594fe4479610380727cb20e51c20f8e | Shell | numb2007/tssh-1 | /bin/tssh-setup | UTF-8 | 2,465 | 4.28125 | 4 | [
"MIT"
] | permissive | #!/bin/bash
# Copyright 2012 Derek Ashley Thomas
SERV=$HOME/.tsshrc
# setup safe exit
trap 'killListener' EXIT
tmp=$(mktemp -t $(basename $0).XXX)
killListener() {
if [[ $count -gt 0 ]] ; then
echo
echo
echo "$(basename $0):$count servers added"
rm $tmp > /dev/null
fi
}
# function to retrieve a new server from the user
get_new_server(){
while [[ true ]] ; do
# test for name
# make sure no repeat
read -p "$(($count + 1)). Name: " name
awk -F'|' '{print $1}' $SERV | sed 's/\*$//g' > $tmp
test=$( grep "^${name}$" $tmp)
if [[ "x$test" == "x" ]] ; then
break
else
echo "*** A server with the name '$name' already exists"
fi
done
while [[ true ]] ; do
# test for address
# make sure no repeat
read -p " address: " addr
awk -F'|' '{print $2}' $SERV > $tmp
test=$( grep "^${name}$" $tmp)
if [[ "x$test" == "x" ]] ; then
break
else
echo "*** A server with the address '$addr' already exists"
fi
done
# now get username and port, which can be anything
# repetition is ok
read -p " user: " user
read -p " port: " port
echo " msg (not a password):"
read -p " > " msg
# stamp the server file with this server
echo "$name|$addr|$user|$port|$msg" >> $SERV
# change the count
count=$count+1
}
# prints out the server file in a readable format
print_server_file() {
echo " ---- $(basename $SERV) ----"
echo "name|address|user|port|message" | awk -F'|' '{ printf "%-20s | %-16.16s | %-15s | %-4s | %s\n",$1,$2,$3,$4,$5 }'
echo "********************************************************************************"
awk -F'|' '{ printf "%-20s | %-16.16s | %-15s | %-4s | %s\n",$1,$2,$3,$4,$5 }' $SERV
echo " ---- $(basename $SERV) ----"
}
echo "$(basename $0): setup for server list"
if [[ ! -f $SERV ]] ; then
touch $SERV
echo " - creating new file: $SERV"
echo " - append a star at the end of servernames"
echo " for the central, port-forwarding server"
echo " - Port is optional, but is used for port forwarding"
echo ""
else
print_server_file
if [[ $1 == "-p" ]] ; then
exit
fi
fi
count=$(cat $SERV | wc -l)
while [[ true ]] ; do
echo " - adding new servers"
get_new_server
done
| true |
5c5dbd83628442235ae5c5399b9747a7828735b1 | Shell | theposey/vim | /update_all.sh | UTF-8 | 537 | 3.265625 | 3 | [] | no_license | #!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [ "$1" != "" ]; then
j=$(realpath $DIR/$1)
$(find $j -name "*.c" -o -name "*.cpp" -o -name "*.h" > $j/cscope.files)
$(ctags -L $j/cscope.files -o $j/tags --extra=+f)
$(cd $j && cscope -b -q -k)
else
for i in $( ls -d $DIR/*/ ); do
j=$(realpath $i)
$(find $j -name "*.c" -o -name "*.cpp" -o -name "*.h" > $j/cscope.files)
$(ctags -L $j/cscope.files -o $j/tags --extra=+f)
$(cd $j && cscope -b -q -k)
done
fi
| true |
d0ff97855f1e05ff25438b6e48e1645d237a17ed | Shell | joeladam518/dotfiles | /tmux/right-status.sh | UTF-8 | 1,377 | 3.21875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# status-right:
# #[fg=colour235,bg=colour235,nobold,nounderscore,noitalics]#[fg=colour121,bg=colour235] %r %a %Y
# #[fg=colour238,bg=colour235,nobold,nounderscore,noitalics]#[fg=colour222,bg=colour238] #H
# #[fg=colour154,bg=colour238,nobold,nounderscore,noitalics]#[fg=colour232,bg=colour154] #(rainbarf --battery --remaining --no-rgb)
# Variables
default="#[default]"
previous="colour232"
section_start() {
local fg="${1}"
local bg="${2}"
local previous_bg="${3}"
printf "%s " "#[fg=${bg},bg=${previous_bg},nobold,nounderscore,noitalics]#[fg=${fg},bg=${bg}]"
}
date_time() {
local previous_bg="${previous}"
previous="colour238"
printf "%s " "$(section_start "colour222" "colour238" "${previous_bg}")$(date +'%H:%M %Z')"
}
user() {
local previous_bg="${previous}"
previous="colour238"
printf "%s " "$(section_start "colour232" "colour154" "${previous_bg}")$(whoami)@#H"
}
host() {
local previous_bg="${previous}"
previous="colour154"
printf "%s " "$(section_start "colour232" "colour154" "${previous_bg}")#H"
}
user_host() {
local previous_bg="${previous}"
previous="colour154"
printf "%s " "$(section_start "colour232" "colour154" "${previous_bg}")$(whoami)@#H"
}
main() {
date_time
user_host
}
# Calling the main function which will call the other functions.
main
| true |
1b8f85b4b76e657077615249f6d8e4f645aeb918 | Shell | UMM-CSci-3403-Fall-2017/lab-4-c-system-calls-brianshawnandthatsit | /summarize_tree/summarize_tree.sh | UTF-8 | 323 | 3.703125 | 4 | [
"MIT"
] | permissive | #!/bin/bash
#take input and save a directory
dir=$1
#go to the path
cd $dir
#get the number of regular files
num_regular=$(find . -type f | wc -l)
#get the numeber of directories
num_dirs=$(find . -type d | wc -l)
#print out the result
echo "There were $num_dirs directories."
echo "There were $num_regular regular files."
| true |
178db6d07da9821e410218c37424760e37cfc95a | Shell | camitr/instant-script | /instant-script/directry.sh | UTF-8 | 221 | 3.4375 | 3 | [] | no_license | #!/bin/bash
if [ -d "$1" ]
then
search=$1
echo "name of directory is $1 and it exists "
cd $1
echo "type the name of directory to create "
read name
mkdir $name
else
echo "Directory not found"
fi
| true |
a50719e09e6f21e3f540ec0edfc949a56931bf35 | Shell | damonwang2/LinuxPractice | /for3.sh | UTF-8 | 147 | 3.25 | 3 | [] | no_license | #!/bin/bash
#计算1到100的值,然后输出结果
s=0
for((i=1; i <= 100; i = i+1))
do
#用$[]表示数值运算
s=$[ $s + $i ]
done
echo $s
| true |
0ef0e5032fa94f3bce9e4130dc093d00f023b823 | Shell | eramon-gmbh/js-docker | /buildWARFileInstaller/build-WAR-file-installer.sh | UTF-8 | 357 | 2.578125 | 3 | [
"BSD-2-Clause"
] | permissive | #!/bin/sh
ANT_OPTS="-Xms128m -Xmx512m -Djava.net.preferIPv4Stack=true"
#
# setup to use bundled of ant
#
ANT_HOME=./apache-ant
export ANT_HOME
ANT_RUN=$ANT_HOME/bin/ant
export ANT_RUN
PATH=$ANT_HOME/bin:$PATH
export PATH
#
# Collect the command line args
#
CMD_LINE_ARGS=$*
$ANT_RUN --noconfig -nouserlib -f buildWARFileInstaller.xml $CMD_LINE_ARGS
| true |
a69df5dd6cfdf719380fcfba798bf4dc659fe262 | Shell | moeraj/romstools | /create_tides_ROMS/make_tides.sh | UTF-8 | 1,801 | 2.921875 | 3 | [
"MIT"
] | permissive |
#!/bin/bash
#
#--------------------------------------
# Make file with tidal forcing for ROMS
#--------------------------------------
#
# Start and end period for simulation
syear=1980; eyear=2017
# Input files
gridfile=/Users/trondkr/Dropbox/NIVA/ROHO800/Grid/ROHO800_grid.nc
# Name of app
APPNAME=ROHO800
#
cd Tpxo
if [ -s tpxo2grid ]; then rm tpxo2grid *.o; fi
make
# I had to run this command interactively to make this work (this script does not run as expected):
./tpxo2grid ${gridfile} nc ${APPNAME}_tpxo.nc # Extract TPXO-data to new model domain
cd ..
#
export CRAY_CPU_TARGET=x86-64
cd Tides
#module load pgi/11.10.0 # Program core-dumps with a newer pgi-compiler
if [ -s tidenc2roms ]; then rm tidenc2roms *.o; fi
make
# Substitute start simulation date and end date in sup-file, and run program
cp ./tidenc2roms.sup_ORIG tmp1.fil
perl -pe "s#YY_START#${syear}#g" < tmp1.fil > tmp2.fil; mv tmp2.fil tmp1.fil
perl -pe "s#MM_START#01#g" < tmp1.fil > tmp2.fil; mv tmp2.fil tmp1.fil
perl -pe "s#DD_START#01#g" < tmp1.fil > tmp2.fil; mv tmp2.fil tmp1.fil
perl -pe "s#HH_START#00#g" < tmp1.fil > tmp2.fil; mv tmp2.fil tmp1.fil
perl -pe "s#MI_START#00#g" < tmp1.fil > tmp2.fil; mv tmp2.fil tmp1.fil
perl -pe "s#YY_END#${eyear}#g" < tmp1.fil > tmp2.fil; mv tmp2.fil tmp1.fil
perl -pe "s#MM_END#12#g" < tmp1.fil > tmp2.fil; mv tmp2.fil tmp1.fil
perl -pe "s#DD_END#31#g" < tmp1.fil > tmp2.fil; mv tmp2.fil tmp1.fil
perl -pe "s#HH_END#00#g" < tmp1.fil > tmp2.fil; mv tmp2.fil tmp1.fil
perl -pe "s#MI_END#00#g" < tmp1.fil > tmp2.fil; mv tmp2.fil tmp1.fil
mv tmp1.fil ./tidenc2roms.sup
ln -sf ../Tpxo/${APPNAME}_tpxo.nc ./tpxo.nc
./tidenc2roms
rm ./tpxo.nc
mv ./tide.nc ../${APPNAME}_tides.nc
#module unload pgi/11.10.0
#
cd ..
#
exit
#
| true |
41d29a57a6d8a6a889d8bbef8cb38b97b4206598 | Shell | asahib/HabenulaAnalysis | /Segmentation/3-AverageTPs_HbprobSeg2_JL.sh | UTF-8 | 3,503 | 3.265625 | 3 | [
"MIT"
] | permissive | if [[ ${HCPPIPEDEBUG} == "true" ]]; then
set -x
fi
#############################################################################
get_batch_options() {
local arguments=("$@")
unset command_line_specified_study_folder
unset command_line_specified_subj
unset command_line_specified_run_local
local index=0
local numArgs=${#arguments[@]}
local argument
while [ ${index} -lt ${numArgs} ]; do
argument=${arguments[index]}
case ${argument} in
--StudyFolder=*)
STUDY_DIR=${argument#*=}
index=$(( index + 1 ))
;;
--DataFolder=*)
DATA_DIR=${argument#*=}
index=$(( index + 1 ))
;;
--Subjlist=*)
SUBJ=${argument#*=}
index=$(( index + 1 ))
;;
--runlocal)
command_line_specified_run_local="TRUE"
index=$(( index + 1 ))
;;
*)
echo ""
echo "ERROR: Unrecognized Option: ${argument}"
echo ""
exit 1
;;
esac
done
}
#################################################
# Function: main
# Description: main processing work of this script
main()
{
get_batch_options "$@"
cd /nafs/narr/HCP_OUTPUT/Habenula/outputs/Segmentation/2_FinalHbROIs_ShapeOptimized
outputdir="${STUDY_DIR}/outputs/Segmentation/3_AverageTPs_HbprobSeg/${SUBJ}"
if [ ! -d ${outputdir} ]; then
mkdir ${outputdir}
fi
ls -d -1 ${SUBJ}* > ${STUDY_DIR}/outputs/Segmentation/tmp.txt
sublistfile="${STUDY_DIR}/outputs/Segmentation/tmp.txt"
SubIDtmp=$(<${sublistfile})
if [[ $(wc -l <${sublistfile}) -ge 2 ]]; then
echo "${SUBJ} has more than one timepoint"
args=""
for s2 in ${SubIDtmp}; do
args="${args} ${STUDY_DIR}/outputs/Segmentation/2_FinalHbROIs_ShapeOptimized/${s2}/${s2}_bilat_Hb_region_full_prob.nii.gz"
#echo "${sub} ${roi}"
echo "${s2}"
done
fslmerge -t ${outputdir}/${SUBJ}_bilat_Hb_region_full_prob_ALLTPsMerged.nii.gz ${args}
fslmaths ${outputdir}/${SUBJ}_bilat_Hb_region_full_prob_ALLTPsMerged.nii.gz -Tmean ${outputdir}/${SUBJ}_bilat_Hb_region_full_prob_ALLTPsAveraged.nii.gz
fslmaths ${outputdir}/${SUBJ}_bilat_Hb_region_full_prob_ALLTPsAveraged.nii.gz -thr 0.25 -bin ${outputdir}/${SUBJ}_bilat_shape_optimized_Hb_ROI_ALLTPsAveraged.nii.gz
echo "${SUBJ} generate left probabilistic map"
fslmaths ${STUDY_DIR}/code/Segmentation/habenula_template/habenula_template_HCP_50_native_thr0bin_left_2mminterp.nii.gz -mul ${outputdir}/${SUBJ}_bilat_shape_optimized_Hb_ROI_ALLTPsAveraged.nii.gz ${outputdir}/${SUBJ}_L_shape_optimized_Hb_ROI_ALLTPsAveraged.nii.gz
echo "${SUBJ} generate left probabilistic map"
fslmaths ${STUDY_DIR}/code/Segmentation/habenula_template/habenula_template_HCP_50_native_thr0bin_right_2mminterp.nii.gz -mul ${outputdir}/${SUBJ}_bilat_shape_optimized_Hb_ROI_ALLTPsAveraged.nii.gz ${outputdir}/${SUBJ}_R_shape_optimized_Hb_ROI_ALLTPsAveraged.nii.gz
else
echo "${SUBJ} only has one TP"
for s2 in ${SubIDtmp}; do
cp ${STUDY_DIR}/outputs/Segmentation/2_FinalHbROIs_ShapeOptimized/${s2}/${s2}_bilat_Hb_region_full_prob.nii.gz ${outputdir}/${SUBJ}_bilat_Hb_region_full_prob_ALLTPsAveraged.nii.gz
cp ${STUDY_DIR}/outputs/Segmentation/2_FinalHbROIs_ShapeOptimized/${s2}/${s2}_R_shape_optimized_Hb_ROI.nii.gz ${outputdir}/${SUBJ}_R_shape_optimized_Hb_ROI_ALLTPsAveraged.nii.gz
cp ${STUDY_DIR}/outputs/Segmentation/2_FinalHbROIs_ShapeOptimized/${s2}/${s2}_L_shape_optimized_Hb_ROI.nii.gz ${outputdir}/${SUBJ}_L_shape_optimized_Hb_ROI_ALLTPsAveraged.nii.gz
done
fi
}
main "$@"
ret=$?; times; exit "${ret}"
| true |
3fb616930dcf5667e7275b9099729e5a5f693662 | Shell | tavolivos/Download-multiple-sequencesfrom-NCBI- | /genome.sh | UTF-8 | 447 | 2.90625 | 3 | [] | no_license | #This is a bash-script to download multiple sequences from NCBI
#Writen by Gustavo Olivos (tavolivos@gmail.com)
#Replace the genus here i=genus
#pip install ncbi-genome-download
i="Anabaena"
ncbi-genome-download --format fasta --genus $i bacteria
mv refseq/bacteria/GCF_* . && rm -r refseq
for d in GCF*/; do
cd $d && gzip -d *.gz && cd ..;
done
for d in GCF*/; do
cd $d && mv *.fna ../ && cd ..;
done
rm -r GCF*/
echo 'All sequences have been downloaded'
| true |
122f99d7432f97c7c900655c8647f0464d1afac1 | Shell | PiotrLuniov/Zabbix | /01/zabbix01.sh | UTF-8 | 1,162 | 3.0625 | 3 | [] | no_license | #!/bin/bash
ZABBIX_CONF="/etc/zabbix/zabbix_server.conf"
yum -y install vim mc net-tools
yum -y install mariadb mariadb-server
/usr/bin/mysql_install_db --user=mysql
systemctl start mariadb
mysql -uroot<<EOM
create database zabbix character set utf8 collate utf8_bin;
grant all privileges on zabbix.* to zabbix@localhost identified by 'zabbix';
quit;
EOM
yum -y install http://repo.zabbix.com/zabbix/3.2/rhel/7/x86_64/zabbix-release-3.2-1.el7.noarch.rpm
yum -y install zabbix-server-mysql zabbix-web-mysql
zcat /usr/share/doc/zabbix-server-mysql-*/create.sql.gz | mysql -uzabbix -pzabbix zabbix
if [ $(grep -c "^DBHost=" $ZABBIX_CONF) -eq 0 ]; then
echo "DBHost=localhost" >> $ZABBIX_CONF
fi
if [ $(grep -c "^DBName=" $ZABBIX_CONF) -eq 0 ]; then
echo "DBName=zabbix" >> $ZABBIX_CONF
fi
if [ $(grep -c "^DBUser=" $ZABBIX_CONF) -eq 0 ]; then
echo "DBUser=zabbix" >> $ZABBIX_CONF
fi
if [ $(grep -c "^DBPassword=" $ZABBIX_CONF) -eq 0 ]; then
echo "DBPassword=zabbix" >> $ZABBIX_CONF
fi
systemctl start zabbix-server
sed -i '/\<IfModule mod_php5.c\>/a php_value date.timezone Europe\/Minsk' /etc/httpd/conf.d/zabbix.conf
systemctl start httpd
| true |
17d1a251957dd0b2897de10bb3f160e861568c49 | Shell | wenchaomeng/db_related | /redisconf/utils/sentinels.sh | UTF-8 | 1,087 | 3.203125 | 3 | [] | no_license | function count(){
while read line
do
ip=`echo $line | awk '{print $1}'`
port=`echo $line | awk '{print $2}'`
count=`redis-cli -h $ip -p $port sentinel masters | egrep "\+" | wc -l`
echo =====process sentinel $ip $port "sentinel count:" $count
done < $1
}
function command(){
while read line
do
ip=`echo $line | awk '{print $1}'`
port=`echo $line | awk '{print $2}'`
echo ====$ip $port
redis-cli -h $ip -p $port $2
done < $1
}
function removeAll(){
while read line
do
ip=`echo $line | awk '{print $1}'`
port=`echo $line | awk '{print $2}'`
echo =====process sentinel $ip $port
names=`redis-cli -h $ip -p $port sentinel masters | egrep "\+"`
echo before remove `echo $names | wc -w`
for name in $names
do
redis-cli -h $ip -p $port sentinel remove $name
done
echo after remove `redis-cli -h $ip -p $port sentinel masters | egrep "name" | wc -l`
done < $1
}
function test(){
echo $@
}
#removeAll sentinels_real.data
#count sentinels_real.data
command sentinels_real.data "sentinel masters" | egrep "==|name"
test a b c
| true |
9d080ececb0f2e4c9b14c165bb6e380b0b8dd859 | Shell | j-mir-prazak/mk3 | /linaro/startup.sh | UTF-8 | 2,921 | 3.21875 | 3 | [] | no_license | #!/bin/bash
path=$(dirname $0)
log="/home/pi"
sudo chmod 0777 -R /home/*
sudo chmod +x -R /etc/network/*
sudo rm "$log"/dhcp.status
sudo systemctl stop systemd-timesyncd.service
sudo systemctl disable systemd-timesyncd.service
sudo service systemd-timesyncd stop
sudo service hwclock.sh stop
sudo date --s "08:00:00"
if [ -f "/boot/dhcp-server" ]; then
echo "cp ntp server settings"
sudo cp "$path"/ntpserver /etc/ntp.conf
sudo service ntp restart
elif [ -f "/boot/dhcp-client" ]; then
echo "cp ntp client settings"
sudo cp "$path"/ntpclient /etc/ntp.conf
sudo service ntp restart
fi
c=570
nctries=0
dhcptries=0;
while true; do
echo $c
c=$(($c+1))
sleep 1
if [ -f "/boot/dhcp-server" ]; then
echo "-------------------------------------------------" >> "$log"/dhcp.status
echo "startup loop" >> "$log"/dhcp.status
echo "-------------------------------------------------" >> "$log"/dhcp.status
if sudo systemctl is-active --quiet isc-dhcp-server.service; then
date >> "$log"/dhcp.status
echo "dhcp is running" >> "$log"/dhcp.status
else
dhcptries=$(dhcptries+1)
if [ $dhcptries -eq 10 ]; then
echo "restarting dhcp" >> "$log"/dhcp.status
sudo systemctl restart isc-dhcp-server | tee -a "$log"/dhcp.status
dhcptries=0
fi
fi
if ! fping -q -c1 -t500 192.168.88.1 &>/dev/null;
then echo "lost connection?";
nctries=$(($nctries+1))
if [ $nctries -eq 10 ]; then
echo "restarting connection"
if sudo fping -q -c4 -t1500 192.168.99.1 &>/dev/null; then
echo "saved by the master connection"
elif sudo fping -q -c4 -t1500 8.8.8.8 &>/dev/null; then
echo "saved by the internet connection"
else
bash /home/pi/mk3/linaro/dhcp-startup-setup.sh
fi
nctries=0
fi
fi
echo "-------------------------------------------------" >> "$log"/dhcp.status
elif [ -f "/boot/dhcp-client" ]; then
if [ $c -eq 600 ]; then
echo "600 loops"
echo "-------------------------------------------------" >> "$log"/dhcp.status
echo "time sync" >> "$log"/dhcp.status
echo "-------------------------------------------------" >> "$log"/dhcp.status
sudo service ntp stop
sudo sntp -s 192.168.88.1
sudo service ntp start
c=0
fi
if ! fping -q -c1 -t500 192.168.88.1 &>/dev/null; then
echo "lost connection?" >> "$log"/dhcp.status
nctries=$(($nctries+1))
if [ $nctries -eq 10 ]; then
echo "restarting connection">> "$log"/dhcp.status
if sudo fping -q -c4 -t1500 192.168.99.1 &>/dev/null; then
echo "saved by the master connection">> "$log"/dhcp.status
elif sudo fping -q -c4 -t1500 8.8.8.8 &>/dev/null; then
echo "saved by the internet connection">> "$log"/dhcp.status
else
bash /home/pi/mk3/linaro/dhcp-startup-setup.sh
fi
nctries=0
fi
fi
if fping -q -c1 -t500 192.168.88.1 &>/dev/null; then
echo "connection" >> "$log"/dhcp.status
nctries=0
fi
fi
done
| true |
d4ff331ebe81acf544c10550656f306c90e9ad51 | Shell | studiorak/bacula_ctl | /generate-bacula-conf | UTF-8 | 174 | 2.6875 | 3 | [] | no_license | #!/usr/bin/env bash
# set -o xtrace
MYSED=$(which sed)
NAME=$1
TEMPLATE=$2
SERVICE=$3
${MYSED} "s/<machine>/${NAME}/g" "${TEMPLATE}" >> "cust_${NAME}-${SERVICE}-all.conf"
| true |
47aedd14f943105f4f68cb75274965e5255a0b04 | Shell | JeffersonLab/remoll_solid | /macros/submit_xml_jobs.sh | UTF-8 | 144 | 2.59375 | 3 | [] | no_license | #!/bin/bash
# Authors : Rakitha Beminiwattha
# Tue Jan 7 15:01:14 EST 2014
echo -e "received arguments >$*<"
echo " Submitting jobs for runs "$1" to "$2
| true |
ecf7a8ef6c7a4d18d444381fa1907fafc00ee36b | Shell | greymd/ttcopy | /bin/ttcopy | UTF-8 | 1,741 | 3.609375 | 4 | [
"MIT"
] | permissive | #!/bin/bash
# Portable and reliable way to get the PARENT directory of this script.
# (Note to `cd` before `pwd`)
# Based on http://stackoverflow.com/a/246128
# then added zsh support from http://stackoverflow.com/a/23259585 .
_TTCP_DIR="$(cd "$(dirname "${BASH_SOURCE[0]:-${(%):-%N}}")"; cd ..; pwd)"
source "$_TTCP_DIR"/lib/ttcp
# Get options
__ttcp::opts "$@"
# Check whether TTCP_ID and TTCP_PASSWORD are set or not.
__ttcp::is_credential_valid || \
# If not, check config file and load it.
{ __ttcp::check_config && __ttcp::load_config; } || \
# If config could not be loaded, initial screen is shown.
__ttcp::init
__ttcp::is_dependency_installed
trap "__ttcp::unspin; kill 0; exit $_TTCP_EINTR" SIGHUP SIGINT SIGQUIT SIGTERM
__ttcp::spin "Copying..."
_HTTP_CLIENT="$(__ttcp::http_client)"
# Following credentials will be prepared.
# TTCP_ID_CLIP, TTCP_PASSWORD_CLIP, TTCP_ID_TRANS, TTCP_PASSWORD_TRANS
__ttcp::generate_credentials
TRANS_URL=$(cat | __ttcp::encode "${TTCP_PASSWORD_TRANS}" | ${_HTTP_CLIENT} -so- --fail --upload-file - $TTCP_TRANSFER_SH/$TTCP_ID_TRANS );
if [ $? -ne 0 ]; then
__ttcp::unspin
echo "Failed to upload the content" >&2
exit $_TTCP_ECONTTRANS
fi
ENCRYPTED_TRANS_URL="$(echo "$TRANS_URL" | __ttcp::encode "$TTCP_PASSWORD_CLIP" | __ttcp::base64enc)"
if [ "$ENCRYPTED_TRANS_URL" = "" ]; then
__ttcp::unspin
echo "Failed to encode data." >&2
exit $_TTCP_EENCODE
fi
${_HTTP_CLIENT} -s -X POST "$TTCP_CLIP_NET/$TTCP_ID_PREFIX/$TTCP_ID_CLIP" --fail --data "content=TTCP[$ENCRYPTED_TRANS_URL]" > /dev/null
if [ $? -ne 0 ]; then
__ttcp::unspin
echo "Failed to save the content url" >&2
exit $_TTCP_ECONTURL
fi
__ttcp::unspin
echo "Copied!" >&2
exit 0
| true |
06621bc6fb0e61b0dd361daf624a4f4a8dc8eb33 | Shell | peteyWheatstraw/raspberry_master_bash | /scripts/rmb_menu_loadScript.sh | UTF-8 | 1,910 | 3.484375 | 3 | [] | no_license |
rmb_menu_loadScript_INIT=0
function rmb_menu_loadScript(){
if [[ "$rmb_menu_loadScript_INIT" -eq 0 ]]; then
rmb_findScripts
# echo "INITIALIZING"
declare -a foundScripts=("${rmb_findScripts_RETURN[@]}")
declare -a ls_key=()
declare -a ls_func=()
declare -a ls_label=()
local scriptFound=""
local scriptCounter=0
for scriptFound in "${foundScripts[@]}"; do
ls_key+=("$scriptCounter")
ls_func+=("rmb_loadScript $scriptFound")
ls_label+=("LOAD $scriptFound")
((scriptCounter++))
done
ls_key+=("c")
ls_func+=("clear")
ls_label+=("clear")
ls_key+=("q")
ls_func+=("exitLoop=1")
ls_label+=("exit")
rmb_menu_loadScript_INIT=1
fi
function menu_script_text(){
local foundScript=""
local textCounter=0
echo "*******************************"
echo "******* LOAD SCRIPT **********"
echo "*******************************"
# echo ""
# echo " ---FOUND SCRIPTS----"
for foundScript in "${foundScripts[@]}"; do
echo "-${ls_key[$textCounter]}---${ls_label[$textCounter]}---"
(( textCounter++ ))
done
echo "-------------------------------"
while [[ "$textCounter" -lt "${#ls_key[@]}" ]]; do
echo "-${ls_key[$textCounter]}---${ls_label[$textCounter]}---"
((textCounter++))
done
echo "----------------------------"
}
local exitLoop=0
local userEntry=""
while [[ "$exitLoop" -eq 0 ]]; do
menu_script_text
read userEntry
echo "------------------------------------------------------"
rmb_kfl_findKey "$userEntry" ls_key
if [[ "$rmb_kfl_findKey_RETURN" -ne -1 ]]; then
eval "${ls_func[$rmb_kfl_findKey_RETURN]}"
else
echo "UNKNOWN COMMAND"
fi
done
}
rmb_findScripts_RETURN=()
function rmb_findScripts(){
rmb_findScripts_RETURN=()
while IFS= read -r -d $'\0'; do
rmb_findScripts_RETURN+=("$REPLY")
done < <(find "$rmb_SETTING_scriptFolder/"*.sh -prune -type f -print0)
}
rmb_menu_loadScript | true |
f58ef364d6d5f1a1a438aed42f85f776e562f19f | Shell | jeromepin/dotfiles | /bashrc.d/10_env.sh | UTF-8 | 920 | 2.9375 | 3 | [] | no_license | #!/usr/bin/env bash
export TERM=screen-256color
if [ -z "$SSH_AUTH_SOCK" ] ; then
eval `ssh-agent -s`
ssh-add
fi
export BAT_PAGER='less -R'
export PAGER=most
export HISTSIZE=2000
export HISTCONTROL=ignoreboth:erasedups:ignorespace
export FZF_DEFAULT_COMMAND='fd --follow --exclude .git'
export GOPATH=$HOME/go
export LDFLAGS="-L/opt/local/lib/openssl-1.0/"
export CPPFLAGS="-I/opt/local/include/openssl"
# MacPorts's ports
PATH=/opt/local/bin:/opt/local/sbin:/opt/local/libexec/gnubin/:$PATH
# Cargo-installed binaries
PATH=$HOME/.cargo/bin:$PATH
# Golang's binaries
PATH=$GOPATH/bin:/usr/local/go/bin:$PATH
PATH=$HOME/.local/bin:$PATH
# Personal scripts
PATH=$HOME/bin:$PATH
export PATH
# export BASH_COMPLETION_COMPAT_DIR="/usr/local/etc/bash_completion.d"
if [ -f /opt/local/etc/profile.d/bash_completion.sh ]; then
. /opt/local/etc/profile.d/bash_completion.sh
fi
[ -f ~/.fzf.bash ] && source ~/.fzf.bash
| true |
368b23f4ddb145e38b8b80d08ffff44f80796000 | Shell | dgist-datalab/exp_parser | /bash_parser/ssd_io_parser.sh | UTF-8 | 4,534 | 3.40625 | 3 | [] | no_license | #!/bin/bash
if [ $# != "2" ]; then
echo"[usage] :$0 [src_directory] [des_directory]"
exit
fi
target_fs=("ext4_data_journal" "ext4_metadata_journal" "f2fs" "xfs" "btrfs")
micro_bench_list=("copyfiles_16_gamma_fuse.f" "filemicro_createfiles_16_fuse.f" "filemicro_delete_16_fuse.f" "makedirs_16_fuse.f" "removedirs_16_fuse.f")
macro_bench_list=("fileserver_gamma_fuse.f" "varmail_gamma_fuse.f" "webproxy_gamma_fuse.f" "webserver_gamma_fuse.f")
SRCD=$2/ssd_io
SRCD_MERGE=$SRCD/merged
rm -rf $SRCD
mkdir -p $SRCD
mkdir -p $SRCD_MERGE
IO_TYPE="#bench TRIM MAPPINGR MAPPINGW GCMR GCMW DATAR DATAW GCDR GCDW GCMR_DGC GCMW_DGC"
for fs in ${target_fs[@]}
do
for d in $(find $1$fs/ -type d)
do
target=$(basename $d)
if [ $target == "kukania" ]; then
echo "$IO_TYPE" >> $SRCD_MERGE/micro-$fs
echo "$IO_TYPE" >> $SRCD_MERGE/macro-$fs
for f in $(find $d -type f)
do
bench_name=$(basename $f)
bench_name="${bench_name%.*}"
target_file_name=$SRCD/$fs-$bench_name.dat
micro_result=""
macro_result=""
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
micro_result="$bench_name"
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
macro_result="$bench_name"
fi
while read a b c; do
case "$a" in
( "TRIM" ) echo $a $b >> $target_file_name
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
micro_result="$micro_result $b"
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
macro_result="$macro_result $b"
fi
;;
( "MAPPINGR" ) echo $a $b >> $target_file_name
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
micro_result="$micro_result $b"
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
macro_result="$macro_result $b"
fi
;;
( "MAPPINGW" ) echo $a $b >> $target_file_name
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
micro_result="$micro_result $b"
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
macro_result="$macro_result $b"
fi
;;
( "GCMR" ) echo $a $b >> $target_file_name
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
micro_result="$micro_result $b"
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
macro_result="$macro_result $b"
fi
;;
( "GCMW" ) echo $a $b >> $target_file_name
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
micro_result="$micro_result $b"
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
macro_result="$macro_result $b"
fi
;;
( "DATAR" ) echo $a $b >> $target_file_name
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
micro_result="$micro_result $b"
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
macro_result="$macro_result $b"
fi
;;
( "DATAW" ) echo $a $b >> $target_file_name
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
micro_result="$micro_result $b"
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
macro_result="$macro_result $b"
fi
;;
( "GCDR" ) echo $a $b >> $target_file_name
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
micro_result="$micro_result $b"
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
macro_result="$macro_result $b"
fi
;;
( "GCDW" ) echo $a $b >> $target_file_name
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
micro_result="$micro_result $b"
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
macro_result="$macro_result $b"
fi
;;
( "GCMR_DGC" ) echo $a $b >> $target_file_name
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
micro_result="$micro_result $b"
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
macro_result="$macro_result $b"
fi
;;
( "GCMW_DGC" ) echo $a $b >> $target_file_name
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
micro_result="$micro_result $b"
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
macro_result="$macro_result $b"
fi
;;
esac
done < <(cat $f)
if [[ "${micro_bench_list[@]}" =~ "${bench_name}" ]]; then
echo "$micro_result" >> $SRCD_MERGE/micro-$fs
elif [[ "${macro_bench_list[@]}" =~ "${bench_name}" ]]; then
echo "$macro_result" >> $SRCD_MERGE/macro-$fs
fi
done
fi
done
done
| true |
48a9cd97cdc66371fa9439b8c9ca18c44f83fb1e | Shell | henryqin1997/verdict | /bin/verdict-shell | UTF-8 | 426 | 2.6875 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
# veeline - script to launch Verdict's command line interface
BINPATH=$(dirname $0)
JAR_DIR=$BINPATH/../jars
JDBC_DIR=$BINPATH/../jdbc_jars
VERSION=0.4.7
#JDBC_JAR=$JAR_DIR/verdict-jdbc-$VERSION.jar
#VEELINE_JAR=$JAR_DIR/verdict-shell-$VERSION.jar
verbose=true
java -cp "$JAR_DIR/*:$JDBC_DIR/*" \
-Dlog4j.configuration=file:$BINPATH/../conf/log4j.properties \
sqlline.SqlLine --verbose=${verbose} "$@"
| true |
32bc2081bc6d2fca0cb352e100291ae68444d603 | Shell | kawamanza/home_files | /sh/who_holds | UTF-8 | 105 | 2.953125 | 3 | [] | no_license | #!/usr/bin/env bash
if [ "$1" == "" ]; then
echo "missing port number"
else
lsof -w -n -i tcp:$1
fi
| true |
fdd6d4a52648f64a4a887e06daf452f399ee506b | Shell | ggear/asystem | /src/net/udmutilities/install_post.sh | UTF-8 | 3,208 | 2.9375 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/sh
SERVICE_HOME=/home/asystem/${SERVICE_NAME}/${SERVICE_VERSION_ABSOLUTE}
SERVICE_INSTALL=/var/lib/asystem/install/${SERVICE_NAME}/${SERVICE_VERSION_ABSOLUTE}
add_on_boot_script() {
[ ! -f "/mnt/data/on_boot.d/${1}.sh" ] &&
[ -d "/var/lib/asystem/install" ] &&
[ -f "/var/lib/asystem/install/*udm-net*/${2}/latest/install.sh" ] &&
cp -rvf "/var/lib/asystem/install/${2}/latest/install.sh" "/mnt/data/on_boot.d/${1}.sh"
}
cd ${SERVICE_INSTALL} || exit
cp -rvf /mnt/data/asystem/install/udmutilities/latest/install_prep.sh /mnt/data/on_boot.d/20-asystem-install.sh
chmod a+x /mnt/data/on_boot.d/20-asystem-install.sh
chmod a+x ./config/udm-utilities/on-boot-script/remote_install.sh
if [ ! -d /mnt/data/on_boot.d ]; then
./config/udm-utilities/on-boot-script/remote_install.sh
fi
chmod a+x ./config/udm-utilities/container-common/on_boot.d/05-container-common.sh
if [ ! -f /mnt/data/on_boot.d/05-container-common.sh ]; then
cp -rvf ./config/udm-utilities/container-common/on_boot.d/05-container-common.sh /mnt/data/on_boot.d
/mnt/data/on_boot.d/05-container-common.sh
fi
add_on_boot_script "10-unifios" "_unifios"
add_on_boot_script "11-users" "_users"
add_on_boot_script "12-links" "_links"
podman exec unifi-os systemctl enable udm-boot
podman exec unifi-os systemctl restart udm-boot
chmod a+x ./config/udm-host-records/*.sh
rm -rf /mnt/data/udm-host-records && cp -rvf ./config/udm-host-records /mnt/data
chmod a+x ./config/udm-dnsmasq/09-dnsmasq.sh
rm -rf /mnt/data/udm-dnsmasq && cp -rvf ./config/udm-dnsmasq /mnt/data
cp -rvf ./config/udm-dnsmasq/09-dnsmasq.sh /mnt/data/on_boot.d
/mnt/data/on_boot.d/09-dnsmasq.sh
cp -rvf ./config/udm-utilities/cni-plugins/05-install-cni-plugins.sh /mnt/data/on_boot.d
chmod a+x /mnt/data/on_boot.d/05-install-cni-plugins.sh
/mnt/data/on_boot.d/05-install-cni-plugins.sh
cp -rvf ./config/udm-utilities/cni-plugins/20-dns.conflist /mnt/data/podman/cni
mkdir -p /mnt/data/etc-pihole && chmod 777 /mnt/data/etc-pihole
mkdir -p /mnt/data/pihole/etc-dnsmasq.d && chmod 777 /mnt/data/pihole/etc-dnsmasq.d
if [ ! -f /mnt/data/pihole/etc-dnsmasq.d/02-custom.conf ]; then
cat <<EOF >>/mnt/data/pihole/etc-dnsmasq.d/02-custom.conf
rev-server=10.0.0.0/8,10.0.0.1
server=/janeandgraham.com/10.0.0.1
server=//10.0.0.1
EOF
chmod 644 /mnt/data/pihole/etc-dnsmasq.d/02-custom.conf
fi
podman stop pihole 2>/dev/null
podman rm pihole 2>/dev/null
podman create --network dns --restart always \
--name pihole \
-e TZ="Australia/Perth" \
-v "/mnt/data/etc-pihole/:/etc/pihole/" \
-v "/mnt/data/pihole/etc-dnsmasq.d/:/etc/dnsmasq.d/" \
--dns=127.0.0.1 \
--dns=1.1.1.1 \
--dns=8.8.8.8 \
--hostname udm-pihole \
-e VIRTUAL_HOST="udm-pihole" \
-e PROXY_LOCATION="udm-pihole" \
-e ServerIP="${PIHOLE_IP}" \
-e IPv6="False" \
pihole/pihole:2023.02.2
cp -rvf ./config/udm-utilities/dns-common/on_boot.d/10-dns.sh /mnt/data/on_boot.d
chmod a+x /mnt/data/on_boot.d/10-dns.sh
/mnt/data/on_boot.d/10-dns.sh
podman exec -it pihole pihole -a -p ${PIHOLE_KEY}
cp -rvf ./config/udm-cloudflare-ddns/13-cloudflare-ddns.sh /mnt/data/on_boot.d
chmod a+x /mnt/data/on_boot.d/13-cloudflare-ddns.sh
/mnt/data/on_boot.d/13-cloudflare-ddns.sh
| true |
196f15d9867f15e7a8a015d5c6e8af7e324fb31e | Shell | lalitsharma16/terraform | /terraform_installation.sh | UTF-8 | 1,740 | 3.625 | 4 | [] | no_license | #!/bin/bash
terraform_home="/usr/local/bin"
terraform_file="terraform_0.12.23_linux_amd64.zip"
terragrunt_file="terragrunt_linux_amd64"
function terraform_install() {
[[ -f ${terraform_home}/terraform ]] && echo "`${terraform_home}/terraform version` already installed at ${terraform_home}/terraform" && return 0
LATEST_URL="https://releases.hashicorp.com/terraform/0.12.23/terraform_0.12.23_linux_amd64.zip"
cd /tmp/; sudo rm -rf terraform* ; wget ${LATEST_URL} --no-check-certificate
cd /tmp/; unzip ${terraform_file}
sudo mv /tmp/terraform ${terraform_home}
echo "Installed: `${terraform_home}/terraform version`"
}
function terragrunt_install() {
[[ -f ${terraform_home}/terragrunt ]] && echo "`${terraform_home}/terragrunt --version` already installed at ${terraform_home}/terragrunt" && return 0
LATEST_URL="https://github.com/gruntwork-io/terragrunt/releases/download/v0.21.11/terragrunt_linux_amd64"
cd /tmp/; sudo rm -rf terragrunt* ; wget ${LATEST_URL} --no-check-certificate
sudo mv /tmp/${terragrunt_file} ${terraform_home}/terragrunt
chmod +x ${terraform_home}/terragrunt
echo "Installed: `${terraform_home}/terragrunt --version`"
}
#function kubectl_install() {
# [[ -f ${terraform_home}/kubectl ]] && echo "`${terraform_home}/kubectl version --client --short` already installed at ${terraform_home}/kubectl" && return 0
# LATEST_URL="https://storage.googleapis.com/kubernetes-release/release/v1.18.0/bin/linux/amd64/kubectl"
# cd /tmp/; wget ${LATEST_URL} --no-check-certificate
# sudo mv /tmp/kubectl ${terraform_home}/kubectl
# chmod +x ${terraform_home}/kubectl
# echo "Installed: `${terraform_home}/kubectl --version`"
#}
terraform_install
terragrunt_install
#kubectl_install
| true |
e806b916cf080aa37b1576714f32574fdfd220dd | Shell | jmoody/ljsj-utils | /ljsj-utils/scripts/deploy-aw-and-wayang.sh | UTF-8 | 47,359 | 3.15625 | 3 | [] | no_license | #!/bin/sh
########################################################################
# WARNING: do not execute this as root or with sudo
# instead execute as a user with sudo privileges
# and provide your pass when prompted
########################################################################
### --------------------------------------------------------------------------
### {dev | production}
### --------------------------------------------------------------------------
BUILD_TYPE=dev
### --------------------------------------------------------------------------
### user
### --------------------------------------------------------------------------
USERNAME=`whoami`
### --------------------------------------------------------------------------
### the host we will install on
### --------------------------------------------------------------------------
HOST=`hostname`
MYSQL_HOST=$HOST
### --------------------------------------------------------------------------
### the path to the server and client source
### the server and the client do _not_ need to have the same release number
### also note that the initial setup instructions indicate that one should
### create sym links for /Users -> /home and /home -> /Users on mac and linux
### respectfully
### --------------------------------------------------------------------------
SERVER_SOURCE=/home/$USERNAME/Documents/svn/k12/wayang/server/trunk
CLIENT_SOURCE=/home/$USERNAME/Documents/svn/k12/wayang/client/branches/client-release-branch-1.2.3
### --------------------------------------------------------------------------
### should we save the old configuration (conf/Catalina/localhost/*.xml and
### the .war)? {y | n} note that saving configurations takes up ~1G per build.
### --------------------------------------------------------------------------
ARCHIVE_OLD_BUILD=n
ARCHIVED_BUILD_DIRECTORY=/home/$USERNAME/Documents/archived-builds
### --------------------------------------------------------------------------
### name of the webapp
### --------------------------------------------------------------------------
APP_NAME=woj
### --------------------------------------------------------------------------
### the path to the tomcat software
### --------------------------------------------------------------------------
PATH_TO_TOMCAT=/usr/local/tomcat
### --------------------------------------------------------------------------
### the port we will use to access tomcat
### for the most part, this will not need to be changed
### --------------------------------------------------------------------------
OS_NAME=`uname`
PORT=80
if [ "$OS_NAME" = "Darwin" ];
then
PORT=8080
fi
### --------------------------------------------------------------------------
### what log level should we use for the server?
### for production, please use warn
### debug produces copious amounts of information - not exhaustive (yet)
### --------------------------------------------------------------------------
if [ "$BUILD_TYPE" = "dev" ];
then
LOG4J_LOG_LEVEL=debug
CLIENT_DEBUG_VALUE=true
elif [ "$BUILD_TYPE" = "production" ];
then
LOG4J_LOG_LEVEL=info
CLIENT_DEBUG_VALUE=false
else
echo [FATAL] Build type must be "{dev | production}" - found $BUILD_TYPE
echo [FATAL] Adjust this variable in deploy.sh
echo [INFO] exiting
exit 1
fi
### --------------------------------------------------------------------------
### where should the client files be placed?
### we are no longer installing in $TOMCAT_HOME/webapps/ROOT
### there should be no need to adjust this
### --------------------------------------------------------------------------
CLIENT_DESTINATION=$SERVER_SOURCE/web/client
### --------------------------------------------------------------------------
### don't adjust this unless you know what you are about
### --------------------------------------------------------------------------
LOG_DIRECTORY=$SERVER_SOURCE/logs/deploy
LOG_FILE=$SERVER_SOURCE/logs/deploy/deploy.log
### --------------------------------------------------------------------------
### must be writable by non root
### --------------------------------------------------------------------------
TMP_DIRECTORY=/tmp
### --------------------------------------------------------------------------
### where client files will be staged or copied to temporarily.
### no need to change this.
### --------------------------------------------------------------------------
CLIENT_TMP_DIRECTORY=$TMP_DIRECTORY/client-`date +%Y%m%d%H%M%S`
WEBORB_DIRECTORY_NAME=weborb
### --------------------------------------------------------------------------
### variables for writing <client>/config.xml
### --------------------------------------------------------------------------
PATH_TO_CLIENT_CONFIG=$CLIENT_DESTINATION/config.xml
GATEWAY_HOST=$HOST # weborb
GATEWAY_PORT=$PORT
GATEWAY_URI=http://$GATEWAY_HOST:$GATEWAY_PORT/$WEBORB_DIRECTORY_NAME/console/weborb.wo
CHAT_HOST=$HOST
### coupled with writting of the web.xml file (see below)
CHAT_PORT=8088
OLD_GATEWAY_HOST=$HOST # tutor brain
OLD_GATEWAY_PORT=$PORT # tutor brain
### coupled with writting of the web.xml file (see below)
OLD_GATEWAY_SERVLET=TutorBrain # tutor brain
SIGNUP_HOST=$HOST
SIGNUP_PORT=$PORT
### coupled with writting of the web.xml file (see below)
SIGNUP_SERVLET=WoAdmin
SIGNUP_ACTION=CreateUser1
### --------------------------------------------------------------------------
### variables for writing <server>/web/META-INF/context.xml
### --------------------------------------------------------------------------
PATH_TO_CONTEXT=$SERVER_SOURCE/web/META-INF/context.xml
SERVER_MYSQL_PASS=MeSWAjE4U5aq4jec
SERVER_MYSQL_USER=woj-server
SERVER_MYSQL_HOST=$MYSQL_HOST
### coupled with the web.xml writing (see below)
SERVER_MYSQL_DB_NAME=adm
### --------------------------------------------------------------------------
### variables for writing <server>/web/index.html
### --------------------------------------------------------------------------
PATH_TO_INDEX_HTML=$SERVER_SOURCE/web/index.html
PATH_TO_CLIENT_DIRECTORY=client
########################################################################
# do not edit below this line unless you know what you are about #
########################################################################
### --------------------------------------------------------------------------
### we support Mac OS builds and Linux builds
### --------------------------------------------------------------------------
TOMCAT_STOP_ARGS=""
TOMCAT_START_ARGS=""
if [ "$OS_NAME" = "Darwin" ];
then
CATALINA_PID=$PATH_TO_TOMCAT/logs/tomcat6.pid
TOMCAT_USER=www
TOMCAT_GROUP=www
DARWIN_MAJOR_VERSION=`uname -r | cut -f 1 -d '.'`
if [ $DARWIN_MAJOR_VERSION -eq 9 ];
then
TOMCAT_USER=_www
TOMCAT_GROUP=_www
fi
PATH_TO_TOMCATCTL=$PATH_TO_TOMCAT/bin/catalina.sh
TOMCAT_STOP_ARGS="-force"
TOMCAT_START_ARGS=""
else
TOMCAT_USER=tomcat6
TOMCAT_GROUP=adm
PATH_TO_TOMCATCTL=/etc/init.d/tomcat6
fi
### --------------------------------------------------------------------------
### variables for writing log4j.properties
### --------------------------------------------------------------------------
LOG4J_PROPERTIES=log4j.properties
# coupled with writing of web.xml (see below)
PATH_TO_LOG4J_PROPERTIES=$SERVER_SOURCE/web/WEB-INF/classes/$LOG4J_PROPERTIES
LOG4J_LOG_FILE=$PATH_TO_TOMCAT/logs/$APP_NAME.log
### --------------------------------------------------------------------------
### variables for writing <server>/web/web.xml
### --------------------------------------------------------------------------
WEB_XML=web.xml
PATH_TO_WEB_XML=$SERVER_SOURCE/web/WEB-INF/$WEB_XML
TUTOR_SERVLET_NAME=TutorBrain
TUTOR_SERVLET_CLASS=edu.usc.k12.sys.server.TutorServlet
TUTOR_SERVLET_RUN_MODE=hsexperiment
TUTOR_SERVLET_URL=/$TUTOR_SERVLET_NAME
CPANEL_SERVLET_NAME=CPanelServlet
CPANEL_SERVLET_CLASS=edu.usc.k12.cpanel.server.CPanelServlet
CONTROL_PANEL_NAME=cpanel/controlpanel
CONTROL_PANEL_CLASS=edu.usc.k12.cpanel.server.CPanelServlet
### WO_ADMIN_SERVLET_NAME=WoAdminServlet
### WO_ADMIN_SERVLET_CLASS=edu.usc.k12.sys.server.AdminServlet
### WO_ADMIN_SERVLET_URL=/WoAdmin
SESSION_TIMEOUT=60
### --------------------------------------------------------------------------
### check to see if the log directory exists - if it does, create a new log
### file, if not, throw a FATAL message and exit
### --------------------------------------------------------------------------
if [ -d $LOG_DIRECTORY ];
then
if [ -e $LOG_FILE ];
then
rm -f $LOG_FILE
else
touch $LOG_FILE
fi
else
echo [FATAL] Check your configuration. Log file not found.
echo [FATAL] $LOG_FILE
exit 1
fi
### --------------------------------------------------------------------------
### determine if we are cleaning the client or reusing it
### --------------------------------------------------------------------------
CLEAN_CLIENT=.
case $1 in
rebuild-client)
CLEAN_CLIENT=1
echo [INFO] Will rebuild client files | tee -a $LOG_FILE ;;
reuse-client) CLEAN_CLIENT=0
echo [INFO] Will attempt to reuse client files | tee -a $LOG_FILE ;;
*)
echo "Usage: deploy.sh {rebuild-client | reuse-client}"
exit 1 ;;
esac
### --------------------------------------------------------------------------
### do _not_ run as root
### --------------------------------------------------------------------------
WHOAMI=`whoami`
if [ $WHOAMI = root ];
then
echo [FATAL] Do NOT run as root. | tee -a $LOG_FILE
echo exiting | tee -a $LOG_FILE
exit 1
fi
### --------------------------------------------------------------------------
### check to see if the tomcat directory exists
### --------------------------------------------------------------------------
if [ ! -d $PATH_TO_TOMCAT ];
then
echo [FATAL] Tomcat directory does not exist: $PATH_TO_TOMCAT | tee -a $LOG_FILE
echo exiting | tee -a $LOG_FILE
exit 1
fi
### --------------------------------------------------------------------------
### check to see if the weborb directory exists
### --------------------------------------------------------------------------
if [ ! -d $PATH_TO_TOMCAT/webapps/$WEBORB_DIRECTORY_NAME ];
then
echo [FATAL] weborb is not installed. Install it before continuing. | tee -a $LOG_FILE
echo exiting | tee -a $LOG_FILE
exit 1
fi
### --------------------------------------------------------------------------
### check to see if the tomcatctl program exists
### --------------------------------------------------------------------------
echo [INFO] Testing for tomcatctl in $PATH_TO_TOMCATCTL... | tee -a $LOG_FILE
if [ -e $PATH_TO_TOMCATCTL ]; then
echo [INFO] Found $PATH_TO_TOMCATCTL | tee -a $LOG_FILE
else
echo [FATAL] $PATH_TO_TOMCAT does not exist | tee -a $LOG_FILE
echo [INFO] exiting | tee -a $LOG_FILE
exit 1
fi
### --------------------------------------------------------------------------
### check to see if the server and client source exist
### --------------------------------------------------------------------------
if [ ! -d $SERVER_SOURCE ];
then
echo [FATAL] Server source directory does not exist: $SERVER_SOURCE | tee -a $LOG_FILE
echo exiting | tee -a $LOG_FILE
exit 1
fi
if [ ! -d $CLIENT_SOURCE ];
then
echo [FATAL] Server source directory does not exist: $CLIENT_SOURCE | tee -a $LOG_FILE
echo exiting | tee -a $LOG_FILE
exit 1
fi
### --------------------------------------------------------------------------
### check to see if the tmp directory exists and test write permissions
### --------------------------------------------------------------------------
if [ ! -d $TMP_DIRECTORY ];
then
echo [FATAL] Tmp directory does not exist: $TMP_DIRECTORY | tee -a $LOG_FILE
echo exiting | tee -a $LOG_FILE
exit 1
fi
### --------------------------------------------------------------------------
echo [INFO] Testing write permissions on $TMP_DIRECTORY... | tee -a $LOG_FILE
touch $CLIENT_TMP_DIRECTORY > /dev/null
if [ ! -e $CLIENT_TMP_DIRECTORY ];
then
echo [FATAL] Can not write to $CLIENT_TMP_DIRECTORY | tee -a $LOG_FILE
echo exiting | tee -a $LOG_FILE
exit 1
fi
rm -rf $CLIENT_TMP_DIRECTORY
### --------------------------------------------------------------------------
### Setting up Maven environment
### --------------------------------------------------------------------------
if [ "$OS_NAME" = "Darwin" ];
then
MAVEN=/opt/local/bin/mvn
MAVEN_REPOSITORY=/Users/$USERNAME/.m2/repository
else
MAVEN=/usr/local/maven/bin/mvn
MAVEN_REPOSITORY=/home/$USERNAME/.m2/repository
fi
### --------------------------------------------------------------------------
echo [INFO] Testing for $MAVEN_REPOSITORY... | tee -a $LOG_FILE
if [ ! -e $MAVEN_REPOSITORY ];
then
echo [WARNING] $MAVEN_REPOSITORY does not exist, running maven for the first time... | tee -a $LOG_FILE
echo [INFO] Changing directory to $SERVER_SOURCE | tee -a $LOG_FILE
cd $SERVER_SOURCE/
$MAVEN install
fi
### --------------------------------------------------------------------------
### figure out which version of mysql-connector-java maven will use
### this jar will be placed in the $PATH_TO_TOMCAT/lib once the war is built
### --------------------------------------------------------------------------
MYSQL_CONNECTOR_DIRECTORY=$MAVEN_REPOSITORY/mysql/mysql-connector-java
MYSQL_CONNECTOR_HIGHEST_VERSION=`ls $MYSQL_CONNECTOR_DIRECTORY | sort -r | head -1`
PATH_TO_MYSQL_CONNECTOR=$MYSQL_CONNECTOR_DIRECTORY/$MYSQL_CONNECTOR_HIGHEST_VERSION/mysql-connector-java-$MYSQL_CONNECTOR_HIGHEST_VERSION.jar
echo [INFO] Testing for mysql-connector at path $PATH_TO_MYSQL_CONNECTOR... | tee -a $LOG_FILE
if [ -e $PATH_TO_MYSQL_CONNECTOR ];
then
echo [INFO] Found $PATH_TO_MYSQL_CONNECTOR | tee -a $LOG_FILE
else
echo [FATAL] $PATH_TO_MYSQL_CONNECTOR does not exist | tee -a $LOG_FILE
echo [INFO] You may need to run mvn install in the $SERVER_SOURCE directory | tee -a $LOG_FILE
echo [INFO] exiting | tee -a $LOG_FILE
exit 1
fi
### --------------------------------------------------------------------------
### add provenance information to log file
### --------------------------------------------------------------------------
echo "[INFO] recording svn info SERVER_SOURCE to log file"
svn info $SERVER_SOURCE >> $LOG_FILE
echo "[INFO] recording svn info CLIENT_SOURCE to log file"
svn info $CLIENT_SOURCE >> $LOG_FILE
### --------------------------------------------------------------------------
### if we are cleaning the client, we need to grab the web/client/.svn directory
### to preserve the svn status of the web/client directory (which is empty)
### so we move the .svn directory to tmp/client-svn-<data> and move it back
### when we are done
### --------------------------------------------------------------------------
CLIENT_SVN_TMP_DIRECTORY=$TMP_DIRECTORY/client-svn-`date +%Y%m%d%H%M%S`
echo [INFO] Testing for $CLIENT_DESTINATION... | tee -a $LOG_FILE
if [ -d $CLIENT_DESTINATION ];
then
if [ $CLEAN_CLIENT = 1 ];
then
echo [INFO] Moving $CLIENT_DESTINATION/.svn directory to $CLIENT_SVN_TMP_DIRECTORY | tee -a $LOG_FILE
mv $CLIENT_DESTINATION/.svn $CLIENT_SVN_TMP_DIRECTORY
echo [INFO] Deleting $CLIENT_DESTINATION | tee -a $LOG_FILE
rm -rf $CLIENT_DESTINATION/*
else
echo [INFO] Will reuse client files | tee -a $LOG_FILE
fi
else
echo [WARNING] Nothing to be done: $CLIENT_DESTINATION does not exist | tee -a $LOG_FILE
echo [WARNING] Cleaning client despite $1 being passed | tee -a $LOG_FILE
CLEAN_CLIENT=1
fi
### --------------------------------------------------------------------------
### since we want to be able to walk away from the script while it is running
### we execute a trivial sudo command to give us sudo privileges before
### the long copy process
### --------------------------------------------------------------------------
sudo date > /dev/null
### --------------------------------------------------------------------------
if [ $CLEAN_CLIENT = 1 ];
then
# copy the client files to web/client
echo [INFO] Copying client sources to $CLIENT_DESTINATION | tee -a $LOG_FILE
cp -r $CLIENT_SOURCE/* $CLIENT_DESTINATION
# report what files we are going to remove
echo [INFO] Removing the following files and directories from $CLIENT_DESTINATION | tee -a $LOG_FILE
for i in *.fla *.flp *.as *.dv *.app *.exe *.mx thumbs.db .DS_STORE .svn/ components/ include/ com/ ;
do
echo "[INFO] - " $i | tee -a $LOG_FILE
done
# delete the files with find
find $CLIENT_DESTINATION \( -name "*.fla" -o -name "*.flp" -o -name "*.as" -o -name "*.dv" -o -name ".svn" -o -name "thumbs.db" -o -name ".DS_STORE" -o -name "components" -o -name "include" -o -name "com" -o -name "*.app" -o -name "*.exe" -o -name "*.mx" \) | xargs rm -rf > /dev/null
# move the .svn directory back to the web/client directory
echo [INFO] Moving $CLIENT_SVN_TMP_DIRECTORY to $CLIENT_DESTINATION/.svn | tee -a $LOG_FILE
mv $CLIENT_SVN_TMP_DIRECTORY $CLIENT_DESTINATION/.svn
fi
### --------------------------------------------------------------------------
### A message to indicate that a file has been generated by this script
### --------------------------------------------------------------------------
THISHOST=`hostname`
AUTODATE=`date`
MESSAGE0="This file has been generated by deploy.sh located on $THISHOST at"
MESSAGE1="$SERVER_SOURCE/scripts/deploy.sh"
MESSAGE2="on $AUTODATE."
MESSAGE3="Recover with svn revert."
### --------------------------------------------------------------------------
### write the client config.xml file
### --------------------------------------------------------------------------
echo [INFO] Removing $PATH_TO_CLIENT_CONFIG | tee -a $LOG_FILE
echo [INFO] Recover with svn revert | tee -a $LOG_FILE
rm -f $PATH_TO_CLIENT_CONFIG
echo [INFO] Writing new $PATH_TO_CLIENT_CONFIG | tee -a $LOG_FILE
echo [INFO] === BEGIN config.xml === | tee -a $LOG_FILE
echo \<\?xml version=\"1.0\" encoding=\"ISO-8859-1\"\?\> | tee -a $PATH_TO_CLIENT_CONFIG
echo "<!-- $MESSAGE0" | tee -a $PATH_TO_CLIENT_CONFIG
echo " $MESSAGE1" | tee -a $PATH_TO_CLIENT_CONFIG
echo " $MESSAGE2" | tee -a $PATH_TO_CLIENT_CONFIG
echo " $MESSAGE3 -->" | tee -a $PATH_TO_CLIENT_CONFIG
echo \<config\> | tee -a $PATH_TO_CLIENT_CONFIG
echo " "\<data name=\"gatewayURI\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "value=\"$GATEWAY_URI\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "type=\"string\" /\> | tee -a $PATH_TO_CLIENT_CONFIG
echo " "\<data name=\"debug\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "value=\"$CLIENT_DEBUG_VALUE\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "type=\"bool\" /\> | tee -a $PATH_TO_CLIENT_CONFIG
echo " "\<data name=\"chatServer\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "value=\"$CHAT_HOST\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "type=\"string\" /\> | tee -a $PATH_TO_CLIENT_CONFIG
echo " "\<data name=\"chatServerPort\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "value=\"$CHAT_PORT\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "type=\"number\" /\> | tee -a $PATH_TO_CLIENT_CONFIG
echo " "\<data name=\"oldGatewayURI\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "value=\"http://$OLD_GATEWAY_HOST:$OLD_GATEWAY_PORT/$APP_NAME/$OLD_GATEWAY_SERVLET\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "type=\"string\" /\> | tee -a $PATH_TO_CLIENT_CONFIG
echo " "\<data name=\"signupURL\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "value=\"http://$SIGNUP_HOST:$SIGNUP_PORT/$SIGNUP_SERVLET\?action=$SIGNUP_ACTION\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "type=\"string\" /\> | tee -a $PATH_TO_CLIENT_CONFIG
#echo " "\<data name=\"disabledItems\" | tee -a $PATH_TO_CLIENT_CONFIG
#echo " "value=\"animalWatch\" | tee -a $PATH_TO_CLIENT_CONFIG
#echo " "type=\"string\" /\> | tee -a $PATH_TO_CLIENT_CONFIG
echo " "\<data name=\"motd\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "value=\"Welcome! Explore the system. Some features might not be available at this time.\" | tee -a $PATH_TO_CLIENT_CONFIG
echo " "type=\"string\" /\> | tee -a $PATH_TO_CLIENT_CONFIG
echo \</config\> | tee -a $PATH_TO_CLIENT_CONFIG
cat $PATH_TO_CLIENT_CONFIG >> $LOG_FILE
echo [INFO] === END config.xml | tee -a $LOG_FILE
### --------------------------------------------------------------------------
### write the web/META-INF/context.xml file
### --------------------------------------------------------------------------
echo [INFO] Removing $PATH_TO_CONTEXT | tee -a $LOG_FILE
echo [INFO] Recover with svn revert | tee -a $LOG_FILE
rm -f $PATH_TO_CONTEXT
echo [INFO] Writing new $PATH_TO_CONTEXT | tee -a $LOG_FILE
echo [INFO] === BEGIN context.xml === | tee -a $LOG_FILE
echo \<\?xml version=\"1.0\" encoding=\"UTF-8\"\?\> | tee -a $PATH_TO_CONTEXT
echo "<!-- $MESSAGE0" | tee -a $PATH_TO_CONTEXT
echo " $MESSAGE1" | tee -a $PATH_TO_CONTEXT
echo " $MESSAGE2" | tee -a $PATH_TO_CONTEXT
echo " $MESSAGE3 -->" | tee -a $PATH_TO_CONTEXT
echo " <!-- Note: the maxActive attribute is linked to:" | tee -a $PATH_TO_CONTEXT
echo " 1. max_connections in my.cnf" | tee -a $PATH_TO_CONTEXT
echo " 2. maxThreads in tomcat/conf/server.xml thread pool Executor" | tee -a $PATH_TO_CONTEXT
echo " If you make a change anywhere, you need to adjust the settings" | tee -a $PATH_TO_CONTEXT
echo " everywhere (including deploy.sh)" | tee -a $PATH_TO_CONTEXT
echo " -->" | tee -a $PATH_TO_CONTEXT
echo \<Context docbase=\"$APP_NAME\" path=\"/$APP_NAME\"\> | tee -a $PATH_TO_CONTEXT
echo " "\<Resource auth=\"Container\" | tee -a $PATH_TO_CONTEXT
echo " "type=\"javax.sql.DataSource\" | tee -a $PATH_TO_CONTEXT
# this is not required (leaving as a reminder)
#echo " "factory=\"org.apache.tomcat.dbcp.dbcp.BasicDataSourceFactory\" | tee -a $PATH_TO_CONTEXT
echo " "driverClassName=\"com.mysql.jdbc.Driver\" | tee -a $PATH_TO_CONTEXT
echo " "maxActive=\"100\" | tee -a $PATH_TO_CONTEXT
echo " "maxIdle=\"25\" | tee -a $PATH_TO_CONTEXT
echo " "minIdle=\"20\" | tee -a $PATH_TO_CONTEXT
echo " "maxWait=\"3000\" | tee -a $PATH_TO_CONTEXT
echo " "removeAbandoned=\"true\" | tee -a $PATH_TO_CONTEXT
echo " "removeAbandonedTimeout=\"60\" | tee -a $PATH_TO_CONTEXT
echo " "logAbandoned=\"true\" | tee -a $PATH_TO_CONTEXT
echo " "validationQuery=\"SELECT 1\" | tee -a $PATH_TO_CONTEXT
echo " "testOnBorrow=\"true\" | tee -a $PATH_TO_CONTEXT
echo " "testWhileIdle=\"true\" | tee -a $PATH_TO_CONTEXT
echo " "timeBetweenEvictionRunsMillis=\"10000\" | tee -a $PATH_TO_CONTEXT
echo " "minEvictableIdleTimeMillis=\"60000\" | tee -a $PATH_TO_CONTEXT
echo " "name=\"jdbc/$SERVER_MYSQL_DB_NAME\" | tee -a $PATH_TO_CONTEXT
echo " "username=\"$SERVER_MYSQL_USER\" | tee -a $PATH_TO_CONTEXT
echo " "password=\"$SERVER_MYSQL_PASS\"| tee -a $PATH_TO_CONTEXT
#echo " "url=\"jdbc:mysql://$SERVER_MYSQL_HOST:3306/$SERVER_MYSQL_DB_NAME\" /\> | tee -a $PATH_TO_CONTEXT
echo " "url=\"jdbc:mysql://$SERVER_MYSQL_HOST:3306\" /\> | tee -a $PATH_TO_CONTEXT
echo \</Context\> | tee -a $PATH_TO_CONTEXT
cat $PATH_TO_CONTEXT >> $LOG_FILE
echo [INFO] === END context.xml === | tee -a $LOG_FILE
### --------------------------------------------------------------------------
### write the web/index.html file
### --------------------------------------------------------------------------
echo [INFO] Removing $PATH_TO_INDEX_HTML | tee -a $LOG_FILE
echo [INFO] Recover with svn revert | tee -a $LOG_FILE
rm -f $PATH_TO_INDEX_HTML
echo [INFO] Writing new $PATH_TO_INDEX_HTML | tee -a $LOG_FILE
echo [INFO] === BEGIN index.html === | tee -a $LOG_FILE
echo \<html\> | tee -a $PATH_TO_INDEX_HTML
echo "<!-- $MESSAGE0" | tee -a $PATH_TO_INDEX_HTML
echo " $MESSAGE1" | tee -a $PATH_TO_INDEX_HTML
echo " $MESSAGE2" | tee -a $PATH_TO_INDEX_HTML
echo " $MESSAGE3 -->" | tee -a $PATH_TO_INDEX_HTML
echo " "\<head\> | tee -a $PATH_TO_INDEX_HTML
echo " "\<meta http-equiv=\"refresh\" | tee -a $PATH_TO_INDEX_HTML
echo " "content=\"0\;$PATH_TO_CLIENT_DIRECTORY/\"\> | tee -a $PATH_TO_INDEX_HTML
echo " "\</head\> | tee -a $PATH_TO_INDEX_HTML
echo \</html\> | tee -a $PATH_TO_INDEX_HTML
cat $PATH_TO_INDEX_HTML >> $LOG_FILE
echo [INFO] === END index.html === | tee -a $LOG_FILE
### --------------------------------------------------------------------------
### write the /web/WEB-INF/classes/log4j.properties file
### --------------------------------------------------------------------------
echo [INFO] Removing $PATH_TO_LOG4J_PROPERTIES | tee -a $LOG_FILE
echo [INFO] Recover with svn revert | tee -a $LOG_FILE
rm -f $PATH_TO_LOG4J_PROPERTIES
echo [INFO] Writing new $PATH_TO_LOG4J_PROPERTIES | tee -a $LOG_FILE
echo [INFO] === BEGIN $LOG4J_PROPERTIES === | tee -a $LOG_FILE
echo "### $MESSAGE0" | tee -a $PATH_TO_LOG4J_PROPERTIES
echo "### $MESSAGE1" | tee -a $PATH_TO_LOG4J_PROPERTIES
echo "### $MESSAGE2" | tee -a $PATH_TO_LOG4J_PROPERTIES
echo "### $MESSAGE3" | tee -a $PATH_TO_LOG4J_PROPERTIES
echo "log4j.rootLogger=$LOG4J_LOG_LEVEL, wayangServer" | tee -a $PATH_TO_LOG4J_PROPERTIES
echo "log4j.appender.wayangServer=org.apache.log4j.RollingFileAppender" | tee -a $PATH_TO_LOG4J_PROPERTIES
echo "log4j.appender.wayangServer.File=$LOG4J_LOG_FILE" | tee -a $PATH_TO_LOG4J_PROPERTIES
echo "log4j.appender.wayangServer.MaxFileSize=10MB" | tee -a $PATH_TO_LOG4J_PROPERTIES
echo "log4j.appender.wayangServer.MaxBackupIndex=10" | tee -a $PATH_TO_LOG4J_PROPERTIES
echo "log4j.appender.wayangServer.layout=org.apache.log4j.PatternLayout" | tee -a $PATH_TO_LOG4J_PROPERTIES
echo "log4j.appender.wayangServer.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss:SSS} %5p (%C{1}.%M %L) - %m%n" | tee -a $PATH_TO_LOG4J_PROPERTIES
cat $PATH_TO_LOG4J_PROPERTIES >> $LOG_FILE
echo [INFO] === END $LOG4J_PROPERTIES === | tee -a $LOG_FILE
### --------------------------------------------------------------------------
### write the web/WEB-INF/web.xml file
### --------------------------------------------------------------------------
echo [INFO] Removing $PATH_TO_WEB_XML | tee -a $LOG_FILE
echo [INFO] Recover with svn revert | tee -a $LOG_FILE
rm -f $PATH_TO_WEB_XML
echo [INFO] Writing new $PATH_TO_WEB_XML | tee -a $LOG_FILE
echo [INFO] === BEGIN $WEB_XML === | tee -a $LOG_FILE
echo "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" | tee -a $PATH_TO_WEB_XML
echo " <!-- " | tee -a $PATH_TO_WEB_XML
echo " <!DOCTYPE web-app " | tee -a $PATH_TO_WEB_XML
echo " PUBLIC \"-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN\"" | tee -a $PATH_TO_WEB_XML
echo " \"http://java.sun.com/dtd/web-app_2_3.dtd\">" | tee -a $PATH_TO_WEB_XML
echo " -->" | tee -a $PATH_TO_WEB_XML
echo "<!-- $MESSAGE0" | tee -a $PATH_TO_WEB_XML
echo " $MESSAGE1" | tee -a $PATH_TO_WEB_XML
echo " $MESSAGE2" | tee -a $PATH_TO_WEB_XML
echo " $MESSAGE3 -->" | tee -a $PATH_TO_WEB_XML
echo "<web-app version=\"2.4\" xmlns=\"http://java.sun.com/xml/ns/j2ee\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://java.sun.com/xml/ns/j2ee http://java.sun.com/xml/ns/j2ee/web-app_2_4.xsd\">" | tee -a $PATH_TO_WEB_XML
echo " <display-name>$APP_NAME</display-name>" | tee -a $PATH_TO_WEB_XML
### --------------------------------------------------------------------------
### database definition
### --------------------------------------------------------------------------
echo " <resource-ref>" | tee -a $PATH_TO_WEB_XML
echo " <description>$APP_NAME DB</description>" | tee -a $PATH_TO_WEB_XML
echo " <res-ref-name>jdbc/$SERVER_MYSQL_DB_NAME</res-ref-name>" | tee -a $PATH_TO_WEB_XML
echo " <res-type>javax.sql.DataSource</res-type>" | tee -a $PATH_TO_WEB_XML
echo " <res-auth>Container</res-auth>" | tee -a $PATH_TO_WEB_XML
echo " </resource-ref>" | tee -a $PATH_TO_WEB_XML
### --------------------------------------------------------------------------
### chat port
### --------------------------------------------------------------------------
echo " <context-param>" | tee -a $PATH_TO_WEB_XML
echo " <param-name>chat.port</param-name><param-value>$CHAT_PORT</param-value>" | tee -a $PATH_TO_WEB_XML
echo " </context-param>" | tee -a $PATH_TO_WEB_XML
### --------------------------------------------------------------------------
### log4J startup servlet
### --------------------------------------------------------------------------
echo " <servlet>" | tee -a $PATH_TO_WEB_XML
echo " <servlet-name>log4jSetup</servlet-name>" | tee -a $PATH_TO_WEB_XML
echo " <servlet-class>edu.usc.k12.sys.server.Log4jSetupServlet</servlet-class>" | tee -a $PATH_TO_WEB_XML
echo " <init-param>" | tee -a $PATH_TO_WEB_XML
echo " <param-name>log4j.properties</param-name>" | tee -a $PATH_TO_WEB_XML
echo " <param-value>$PATH_TO_TOMCAT/webapps/$APP_NAME/WEB-INF/classes/$LOG4J_PROPERTIES</param-value>" | tee -a $PATH_TO_WEB_XML
echo " </init-param>" | tee -a $PATH_TO_WEB_XML
echo " <load-on-startup>0</load-on-startup>" | tee -a $PATH_TO_WEB_XML
echo " </servlet>" | tee -a $PATH_TO_WEB_XML
### --------------------------------------------------------------------------
### Variates random seed setup (-1 implies use system time - the default)
### --------------------------------------------------------------------------
echo " <servlet>" | tee -a $PATH_TO_WEB_XML
echo " <servlet-name>variatesSetup</servlet-name>" | tee -a $PATH_TO_WEB_XML
echo " <servlet-class>edu.usc.k12.sys.server.InitializeVariatesServlet</servlet-class>" | tee -a $PATH_TO_WEB_XML
echo " <init-param>" | tee -a $PATH_TO_WEB_XML
echo " <param-name>randomSeed</param-name>" | tee -a $PATH_TO_WEB_XML
echo " <param-value>-1</param-value>" | tee -a $PATH_TO_WEB_XML
echo " </init-param>" | tee -a $PATH_TO_WEB_XML
echo " <load-on-startup>1</load-on-startup>" | tee -a $PATH_TO_WEB_XML
echo " </servlet>" | tee -a $PATH_TO_WEB_XML
### --------------------------------------------------------------------------
### Tutor servlet
### --------------------------------------------------------------------------
echo " <servlet>" | tee -a $PATH_TO_WEB_XML
echo " <servlet-name>$TUTOR_SERVLET_NAME</servlet-name>" | tee -a $PATH_TO_WEB_XML
echo " <servlet-class>$TUTOR_SERVLET_CLASS</servlet-class>" | tee -a $PATH_TO_WEB_XML
echo " <init-param>" | tee -a $PATH_TO_WEB_XML
echo " <param-name>runmode</param-name>" | tee -a $PATH_TO_WEB_XML
echo " <param-value>$TUTOR_SERVLET_RUN_MODE</param-value>" | tee -a $PATH_TO_WEB_XML
echo " </init-param>" | tee -a $PATH_TO_WEB_XML
echo " </servlet>" | tee -a $PATH_TO_WEB_XML
echo " <servlet-mapping>" | tee -a $PATH_TO_WEB_XML
echo " <servlet-name>$TUTOR_SERVLET_NAME</servlet-name>" | tee -a $PATH_TO_WEB_XML
echo " <url-pattern>$TUTOR_SERVLET_URL</url-pattern>" | tee -a $PATH_TO_WEB_XML
echo " </servlet-mapping>" | tee -a $PATH_TO_WEB_XML
### --------------------------------------------------------------------------
### CPanel servlet
### --------------------------------------------------------------------------
# echo " <servlet>" | tee -a $PATH_TO_WEB_XML
# echo " <servlet-name>$CPANEL_SERVLET_NAME</servlet-name>" | tee -a $PATH_TO_WEB_XML
# echo " <servlet-class>$CPANEL_SERVLET_CLASS</servlet-class>" | tee -a $PATH_TO_WEB_XML
# echo " </servlet>" | tee -a $PATH_TO_WEB_XML
### --------------------------------------------------------------------------
### Control Panel
### --------------------------------------------------------------------------
# echo " <servlet>" | tee -a $PATH_TO_WEB_XML
# echo " <servlet-name>$CONTROL_PANEL_NAME</servlet-name>" | tee -a $PATH_TO_WEB_XML
# echo " <servlet-class>$CONTROL_PANEL_CLASS</servlet-class>" | tee -a $PATH_TO_WEB_XML
# echo " </servlet>" | tee -a $PATH_TO_WEB_XML
### --------------------------------------------------------------------------
### WO admin servlet
### --------------------------------------------------------------------------
# echo " <servlet>" | tee -a $PATH_TO_WEB_XML
# echo " <servlet-name>$WO_ADMIN_SERVLET_NAME</servlet-name>" | tee -a $PATH_TO_WEB_XML
# echo " <servlet-class>$WO_ADMIN_SERVLET_CLASS</servlet-class>" | tee -a $PATH_TO_WEB_XML
# echo " </servlet>" | tee -a $PATH_TO_WEB_XML
# echo " <servlet-mapping>" | tee -a $PATH_TO_WEB_XML
# echo " <servlet-name>$WO_ADMIN_SERVLET_NAME</servlet-name>" | tee -a $PATH_TO_WEB_XML
# echo " <url-pattern>$WO_ADMIN_SERVLET_URL</url-pattern>" | tee -a $PATH_TO_WEB_XML
# echo " </servlet-mapping>" | tee -a $PATH_TO_WEB_XML
### --------------------------------------------------------------------------
### session configuration
### --------------------------------------------------------------------------
echo " <session-config>" | tee -a $PATH_TO_WEB_XML
echo " <session-timeout>$SESSION_TIMEOUT</session-timeout>" | tee -a $PATH_TO_WEB_XML
echo " </session-config>" | tee -a $PATH_TO_WEB_XML
### --------------------------------------------------------------------------
### mime mapping
### --------------------------------------------------------------------------
echo " <mime-mapping><extension>txt</extension><mime-type>text/plain</mime-type></mime-mapping>" | tee -a $PATH_TO_WEB_XML
echo " <mime-mapping><extension>htm</extension><mime-type>text/html</mime-type></mime-mapping>" | tee -a $PATH_TO_WEB_XML
echo " <mime-mapping><extension>gif</extension><mime-type>image/gif</mime-type></mime-mapping>" | tee -a $PATH_TO_WEB_XML
echo " <mime-mapping><extension>jpeg</extension><mime-type>image/jpeg</mime-type></mime-mapping>" | tee -a $PATH_TO_WEB_XML
echo " <mime-mapping><extension>html</extension><mime-type>text/html</mime-type></mime-mapping>" | tee -a $PATH_TO_WEB_XML
echo " <mime-mapping><extension>jpg</extension><mime-type>image/jpeg</mime-type></mime-mapping>" | tee -a $PATH_TO_WEB_XML
echo "</web-app>" | tee -a $PATH_TO_WEB_XML
cat $PATH_TO_WEB_XML >> $LOG_FILE
echo [INFO] === END $WEB_XML === | tee -a $LOG_FILE
### --------------------------------------------------------------------------
### run maven clean if this is a production build, otherwise, just run
### maven install
### --------------------------------------------------------------------------
echo [INFO] Changing directory to $SERVER_SOURCE | tee -a $LOG_FILE
cd $SERVER_SOURCE/
if [ "$BUILD_TYPE" = production ];
then
echo [INFO] installation type is $BUILD_TYPE - calling mvn clean | tee -a $LOG_FILE
$MAVEN clean | tee -a $LOG_FILE
else
echo [INFO] installation type is $BUILD_TYPE - skipping mvn clean | tee -a $LOG_FILE
fi
$MAVEN compile war:war | tee -a $LOG_FILE
MVN_EXIT=`echo $?`
if [ $MVN_EXIT -eq 0 ];
then
echo [INFO] maven build succeeded | tee -a $LOG_FILE
else
echo [FATAL] maven build failed | tee -a $LOG_FILE
echo [FATAL] exiting code with code $MVN_EXIT | tee -a $LOG_FILE
exit $MVN_EXIT
fi
### --------------------------------------------------------------------------
### stop tomcat
### --------------------------------------------------------------------------
if [ "$OS_NAME" = "Darwin" ];
then
if [ -e $CATALINA_PID ];
then
PID=`cat $CATALINA_PID`
FOUND_PROCESS=`ps auxw | grep $PID | grep -v grep`
if [ -z "$FOUND_PROCESS" ];
then
echo [WARNING] Can not find process with $PID - assuming Tomcat is not running
else
echo [INFO] Stopping Tomcat | tee -a $LOG_FILE
sudo $PATH_TO_TOMCATCTL stop $TOMCAT_STOP_ARGS > /dev/null # suppress output
fi
else
echo [WARNING] $CATALINA_PID does not exist - assuming Tomcat is not running
fi
else
echo [INFO] Stopping Tomcat | tee -a $LOG_FILE
sudo $PATH_TO_TOMCATCTL stop $TOMCAT_STOP_ARGS > /dev/null # suppress output
fi
### --------------------------------------------------------------------------
### now we let's rm and/or archive the old build
### --------------------------------------------------------------------------
INNER_ARCHIVE_DIR=$ARCHIVED_BUILD_DIRECTORY/`date +%Y-%m-%d-%H%M%S`
### --------------------------------------------------------------------------
### move or delete the .war
### --------------------------------------------------------------------------
if [ "$ARCHIVE_OLD_BUILD" = "y" ];
then
echo [INFO] Creating archive directory $INNER_ARCHIVE_DIR... | tee -a $LOG_FILE
mkdir -p $INNER_ARCHIVE_DIR
fi
echo [INFO] Testing for $PATH_TO_TOMCAT/webapps/$APP_NAME.war... | tee -a $LOG_FILE
if [ -e $PATH_TO_TOMCAT/webapps/$APP_NAME.war ];
then
if [ "$ARCHIVE_OLD_BUILD" = "y" ];
then
echo [INFO] Archiving $PATH_TO_TOMCAT/webapps/$APP_NAME.war | tee -a $LOG_FILE
sudo mv $PATH_TO_TOMCAT/webapps/$APP_NAME.war $INNER_ARCHIVE_DIR
else
echo [INFO] Deleting $PATH_TO_TOMCAT/webapps/$APP_NAME.war | tee -a $LOG_FILE
sudo rm $PATH_TO_TOMCAT/webapps/$APP_NAME.war
fi
else
echo [WARNING] Nothing to be done: $PATH_TO_TOMCAT/webapps/$APP_NAME.war does not exist | tee -a $LOG_FILE
fi
### --------------------------------------------------------------------------
### delete the webapps/application directory
### --------------------------------------------------------------------------
echo [INFO] Testing for $PATH_TO_TOMCAT/webapps/$APP_NAME... | tee -a $LOG_FILE
if [ -d $PATH_TO_TOMCAT/webapps/$APP_NAME ];
then
if [ "$ARCHIVE_OLD_BUILD" = "y" ];
then
echo [INFO] Moving $PATH_TO_TOMCAT/webapps/$APP_NAME/deploy.log to $INNER_ARCHIVE_DIR... | tee -a $LOG_FILE
sudo mv $PATH_TO_TOMCAT/webapps/$APP_NAME/deploy.log $INNER_ARCHIVE_DIR
# change the mod from 600
sudo chmod 660 $INNER_ARCHIVE_DIR/deploy.log
fi
echo [INFO] Deleting $PATH_TO_TOMCAT/webapps/$APP_NAME | tee -a $LOG_FILE
sudo rm -r $PATH_TO_TOMCAT/webapps/$APP_NAME
else
echo [WARNING] Nothing to be done: $PATH_TO_TOMCAT/webapps/$APP_NAME does not exist | tee -a $LOG_FILE
fi
### --------------------------------------------------------------------------
### testing for context files in tomcat/conf/Catalina/localhost
### --------------------------------------------------------------------------
echo [INFO] Testing for $PATH_TO_TOMCAT/conf/Catalina/localhost/$APP_NAME.xml... | tee -a $LOG_FILE
if [ -e $PATH_TO_TOMCAT/conf/Catalina/localhost/$APP_NAME.xml ];
then
if [ "$ARCHIVE_OLD_BUILD" = "y" ];
then
echo [INFO] Moving $PATH_TO_TOMCAT/conf/Catalina/localhost/$APP_NAME.xml to $INNER_ARCHIVE_DIR... | tee -a $LOG_FILE
sudo mv $PATH_TO_TOMCAT/conf/Catalina/localhost/$APP_NAME.xml $INNER_ARCHIVE_DIR
else
echo [INFO] Deleting $PATH_TO_TOMCAT/conf/Catalina/localhost/$APP_NAME.xml | tee -a $LOG_FILE
sudo rm $PATH_TO_TOMCAT/conf/Catalina/localhost/$APP_NAME.xml
fi
else
echo [WARNING] Nothing to be done: $PATH_TO_TOMCAT/conf/Catalina/localhost/$APP_NAME.xml does not exist | tee -a $LOG_FILE
fi
### --------------------------------------------------------------------------
echo [INFO] Testing for $PATH_TO_TOMCAT/conf/Catalina/localhost/weborb.xml... | tee -a $LOG_FILE
if [ -e $PATH_TO_TOMCAT/conf/Catalina/localhost/weborb.xml ];
then
if [ "$ARCHIVE_OLD_BUILD" = "y" ];
then
echo [INFO] Moving $PATH_TO_TOMCAT/conf/Catalina/localhost/weborb.xml to $INNER_ARCHIVE_DIR... | tee -a $LOG_FILE
sudo mv $PATH_TO_TOMCAT/conf/Catalina/localhost/weborb.xml $INNER_ARCHIVE_DIR
else
echo [INFO] Deleting $PATH_TO_TOMCAT/conf/Catalina/localhost/weborb.xml | tee -a $LOG_FILE
sudo rm $PATH_TO_TOMCAT/conf/Catalina/localhost/weborb.xml
fi
else
echo [WARNING] Nothing to be done: $PATH_TO_TOMCAT/conf/Catalina/localhost/weborb.xml does not exist | tee -a $LOG_FILE
fi
### --------------------------------------------------------------------------
### change the ownership of the archive directory
### --------------------------------------------------------------------------
if [ "$ARCHIVE_OLD_BUILD" = "y" ];
then
echo [INFO] Changing ownership of $ARCHIVED_BUILD_DIRECTORY... | tee -a $LOG_FILE
sudo chown -R $USERNAME:$USERNAME $ARCHIVED_BUILD_DIRECTORY
fi
### --------------------------------------------------------------------------
### move the new war from the build directory to the tomcat/webapps dir
### --------------------------------------------------------------------------
echo [INFO] Copying $SERVER_SOURCE/target/$APP_NAME.war to $PATH_TO_TOMCAT/webapps/ | tee -a $LOG_FILE
sudo cp -r $SERVER_SOURCE/target/$APP_NAME.war $PATH_TO_TOMCAT/webapps/
### --------------------------------------------------------------------------
### copy context.xml to weborb/META-INF/context.xml and
### tomcat/conf/Catalina/localhost/weborb.xml
### will chown later
### --------------------------------------------------------------------------
echo [INFO] Copying $PATH_TO_CONTEXT to $PATH_TO_TOMCAT/webapps/$WEBORB_DIRECTORY_NAME/META-INF/context.xml | tee -a $LOG_FILE
sudo cp $PATH_TO_CONTEXT $PATH_TO_TOMCAT/webapps/$WEBORB_DIRECTORY_NAME/META-INF/context.xml
echo [INFO] Copying $PATH_TO_CONTEXT to $PATH_TO_TOMCAT/conf/Catalina/localhost/weborb.xml | tee -a $LOG_FILE
sudo cp $PATH_TO_CONTEXT $PATH_TO_TOMCAT/conf/Catalina/localhost/weborb.xml | tee -a $LOG_FILE
### --------------------------------------------------------------------------
### installing weborb
### --------------------------------------------------------------------------
PATH_TO_APP_CLASSES=$SERVER_SOURCE/target/classes
PATH_TO_WEBORB_CLASSES=$PATH_TO_TOMCAT/webapps/$WEBORB_DIRECTORY_NAME/WEB-INF/classes/ # [pace]
echo [INFO] Installing files for WebORB... | tee -a $LOG_FILE
for dir in `ls $PATH_TO_APP_CLASSES`;
do
if [ -d $PATH_TO_APP_CLASSES/$dir ];
then
echo [INFO] Copying $PATH_TO_APP_CLASSES/$dir to $PATH_TO_WEBORB_CLASSES | tee -a $LOG_FILE
sudo cp -r $PATH_TO_APP_CLASSES/$dir $PATH_TO_WEBORB_CLASSES
fi
done
### --------------------------------------------------------------------------
### install the latest mysql connector jar
### --------------------------------------------------------------------------
echo [INFO] testing for mysql-connector jar in $PATH_TO_TOMCAT/lib... | tee -a $LOG_FILE
if [ -z `ls $PATH_TO_TOMCAT/lib | grep mysql-connector` ];
then
echo [INFO] No jars found | tee -a $LOG_FILE
else
for jar in `ls $PATH_TO_TOMCAT/lib | grep mysql-connector`;
do
echo [INFO] Found $jar | tee -a $LOG_FILE
echo [INFO] Deleteing $PATH_TO_TOMCAT/lib/$jar | tee -a $LOG_FILE
sudo rm $PATH_TO_TOMCAT/lib/$jar
done
fi
echo [INFO] Installing $PATH_TO_MYSQL_CONNECTOR to $PATH_TO_TOMCAT/lib | tee -a $LOG_FILE
sudo cp $PATH_TO_MYSQL_CONNECTOR $PATH_TO_TOMCAT/lib
### --------------------------------------------------------------------------
### fix index.html in ROOT, if this is production
### --------------------------------------------------------------------------
echo [INFO] checking installation type "{dev | production}"... | tee -a $LOG_FILE
echo [INFO] found installation type $BUILD_TYPE | tee -a $LOG_FILE
if [ "$BUILD_TYPE" = "production" ];
then
TMP_ROOT_INDEX_HTML=/tmp/index-`date +%Y%m%d%H%M%S`.html
echo [INFO] writing temporary index.html file to $TMP_ROOT_INDEX_HTML | tee -a $LOG_FILE
PATH_TO_APP_CLIENT=$APP_NAME/client
echo [INFO] Removing $PATH_TO_TOMCAT/webapps/ROOT/index.html | tee -a $LOG_FILE
if [ -e $PATH_TO_TOMCAT/webapps/ROOT/index.html ];
then
sudo rm -f $PATH_TO_TOMCAT/webapps/ROOT/index.html
else
echo [WARNING] there is no $PATH_TO_TOMCAT/webapps/ROOT/index.html | tee -a $LOG_FILE
fi
echo [INFO] Writing new $TMP_ROOT_INDEX_HTML | tee -a $LOG_FILE
echo [INFO] === BEGIN index.html === | tee -a $LOG_FILE
echo \<html\> | tee -a $TMP_ROOT_INDEX_HTML
echo "<!-- $MESSAGE0" | tee -a $TMP_ROOT_INDEX_HTML
echo " $MESSAGE1" | tee -a $TMP_ROOT_INDEX_HTML
echo " $MESSAGE2 -->" | tee -a $TMP_ROOT_INDEX_HTML
echo " "\<head\> | tee -a $TMP_ROOT_INDEX_HTML
echo " "\<meta http-equiv=\"refresh\" | tee -a $TMP_ROOT_INDEX_HTML
echo " "content=\"0\;$PATH_TO_APP_CLIENT/\"\> | tee -a $TMP_ROOT_INDEX_HTML
echo " "\</head\> | tee -a $TMP_ROOT_INDEX_HTML
echo \</html\> | tee -a $TMP_ROOT_INDEX_HTML
cat $TMP_ROOT_INDEX_HTML >> $LOG_FILE
echo [INFO] === END index.html === | tee -a $LOG_FILE
echo [INFO] copying $TMP_ROOT_INDEX_HTML to $PATH_TO_TOMCAT/webapps/ROOT | tee -a $LOG_FILE
sudo cp $TMP_ROOT_INDEX_HTML $PATH_TO_TOMCAT/webapps/ROOT/index.html
rm $TMP_ROOT_INDEX_HTML
else
echo [INFO] Nothing to be done for $BUILD_TYPE | tee -a $LOG_FILE
fi
### --------------------------------------------------------------------------
### changing owner:group for the tomcat directory
### --------------------------------------------------------------------------
echo [INFO] Changing owner:group to $TOMCAT_USER:$TOMCAT_GROUP for $PATH_TO_TOMCAT | tee -a $LOG_FILE
sudo chown -RL $TOMCAT_USER:$TOMCAT_GROUP $PATH_TO_TOMCAT
########################################################################
echo [INFO] Starting Tomcat... | tee -a $LOG_FILE
sudo $PATH_TO_TOMCATCTL start $TOMCAT_START_ARGS > /dev/null # suppressing output
while [ ! -d $PATH_TO_TOMCAT/webapps/$APP_NAME ]
do
echo [INFO] Waiting for $PATH_TO_TOMCAT/webapps/$APP_NAME.war to deploy... | tee -a $LOG_FILE
sleep 1
done
########################################################################
echo [INFO] $PATH_TO_TOMCAT/webapps/$APP_NAME deployed | tee -a $LOG_FILE
echo [INFO] Removing any backup files | tee -a $LOG_FILE
if [ "$OS_NAME" = "Darwin" ];
then
find $PATH_TO_TOMCAT/webapps/$APP_NAME \( -name "#*" -o -name "*#" -o -name "*~" \) | xargs rm -rf
else
find $PATH_TO_TOMCAT/webapps/$APP_NAME \( -name "#*" -o -name "*#" -o -name "*~" \) -exec rm -rf {} \; > /dev/null
fi
echo [INFO] $APP_NAME successfully installed to $PATH_TO_TOMCAT/webapps/$APP_NAME | tee -a $LOG_FILE
echo [INFO] Copying $LOG_FILE to $PATH_TO_TOMCAT/webapps/$APP_NAME | tee -a $LOG_FILE
sudo cp $LOG_FILE $PATH_TO_TOMCAT/webapps/woj/
### --------------------------------------------------------------------------
### making deploy.log unreadable
### --------------------------------------------------------------------------
sudo chmod 600 $PATH_TO_TOMCAT/webapps/woj/deploy.log
DEPLOY_DATE=`date +%Y%m%d%H%M%S`
echo [INFO] Copying $LOG_FILE to $SERVER_SOURCE/logs/deploy/deploy-$DEPLOY_DATE.log
cp $LOG_FILE $SERVER_SOURCE/logs/deploy/deploy-$DEPLOY_DATE.log
exit 0
### --------------------------------------------------------------------------
### dead sea
### --------------------------------------------------------------------------
| true |
c834b9b2d658bd2844a36dda8ce027f64a74cf13 | Shell | shamizi/ft_server | /srcs/start.sh | UTF-8 | 1,370 | 2.734375 | 3 | [] | no_license | mkdir /var/www/localhost
#mySQL
service mysql start
echo "CREATE DATABASE wordpress DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci;" | mysql -u root
echo "GRANT ALL ON wordpress.* TO 'wordpress_user'@'localhost' IDENTIFIED BY 'password';" | mysql -u root
echo "FLUSH PRIVILEGES;" | mysql -u root
#CERTIFICAT SSL
mkdir /etc/nginx/ssl
openssl req -newkey rsa:4096 -x509 -sha256 -days 365 -nodes -out /etc/nginx/ssl/localhost.pem -keyout /etc/nginx/ssl/localhost.key -subj "/C=FR/ST=Paris/L=Paris/O=42 School/OU=said/CN=localhost"
#NGINX
ln -s /etc/nginx/sites-available/default /etc/nginx/sites-enabled
rm -rf /etc/nginx/sites-enabled/default
cp nginx.conf /etc/nginx/sites-enabled/
#index
chmod +x index.sh
#PHPmyadmin
wget https://files.phpmyadmin.net/phpMyAdmin/4.9.5/phpMyAdmin-4.9.5-all-languages.tar.gz
tar -zxvf phpMyAdmin-4.9.5-all-languages.tar.gz
mv phpMyAdmin-4.9.5-all-languages phpmyadmin
mv phpmyadmin /var/www/localhost/phpmyadmin
#WORDPRESS
wget http://fr.wordpress.org/latest-fr_FR.tar.gz
tar -xzvf latest-fr_FR.tar.gz
mkdir /var/www/localhost/wordpress
mv wordpress/* /var/www/localhost/wordpress
rm /var/www/localhost/wordpress/wp-config-sample.php
cp wp-config.php /var/www/localhost/wordpress
#START
service php7.3-fpm start
service nginx start
service mysql restart
nginx -t
tail -f /var/log/nginx/access.log /var/log/nginx/error.log
| true |
7d1c80f5e46d2507918fc19ab122fddfd750f538 | Shell | jumski/old-dotfiles | /bin/worklog-path | UTF-8 | 333 | 3.328125 | 3 | [] | no_license | #!/bin/bash
project_name=$(basename `pwd`)
date=`date +"%Y-%m-%d"`
if [ -n "$1" ]; then
date="$1"
if [ -n "$2" ]; then
project_name="$1"
date="$2"
fi
fi
worklogs_dir=~/Dropbox/monk-shared/worklog/$project_name
mkdir -p $worklogs_dir
worklog_path=$worklogs_dir/${date}.txt
touch $worklog_path
echo $worklog_path
| true |
5deff34a2c26a719f4b6acdf3e4882c3c564a840 | Shell | qaisar7/scripts | /logout.sh | UTF-8 | 181 | 2.8125 | 3 | [] | no_license | #!/bin/bash
echo " $(date) $1 logging out"
myPID=$(echo "$pID" | grep ^"$$") # PID of this program
pgrep lock-screen | xargs kill -9
pgrep -u $1 | grep -v myPID | xargs kill -9
| true |
871a655351435dc52e5e9671e84224c776b39616 | Shell | dueyfinster/mail-sync | /scripts/sync-fastmail-to-gmail.sh | UTF-8 | 573 | 2.765625 | 3 | [] | no_license | #!/bin/bash
# See: http://imapsync.lamiral.info/FAQ.d/FAQ.Gmail.txt
IMAP_SYNC="/usr/bin/imapsync"
HOST1="mail.messagingengine.com"
HOST2="imap.gmail.com"
$IMAP_SYNC \
--host1 $HOST1 --user1 $USER1 --password1 $PASS1 -ssl1 \
--host2 $HOST2 --user2 $USER2 --password2 $PASS2 -ssl2 \
--f1f2 "INBOX.Archive"="[Gmail]/All Mail" \
--exitwhenover 500000000 \
--maxsize 25000000 \
--automap \
--expunge1 \
--addheader \
--folderlast "INBOX.Archive" \
--exclude "\[Gmail\]$" \
--regextrans2 "s/[\^]/_/g" \
--regextrans2 "s/['\"\\\\]/_/g" \
#--justfolders #Uncomment to do a dry run | true |
79caefbf1c5fd458165f6af8b0303b61f19d814b | Shell | tkusmierczyk/multiple_regression_lda | /src/postprocessing/export_from_samples.sh | UTF-8 | 1,224 | 3.578125 | 4 | [] | no_license | #!/bin/sh
echo "EXPORTS TOPICS AND LM-WEIGHTS FROM JAGS SAMPLES (*.RData)"
echo "ARGS: INPUT DIRECTORY, WORDS FILE, MAX NUM PROCESSES"
######################################################################
if [ ! -z "$1" ]; then
INPUT=$1
else
echo "ARG REQUIRED: DIRECTORY WITH *samples*RData FILES"
exit 1
fi
echo "INPUT = $INPUT"
if [ ! -z "$2" ]; then
WORDS=$2
else
echo "ARG REQUIRED: *words*.RData FILE"
exit 1
fi
echo "WORDS = $WORDS"
K=2
if [ ! -z "$3" ]; then
K=$3
fi
echo "NUM PROCESSES = $K"
set -x
######################################################################
for INF in `ls $INPUT/samples*.RData`
do
FNAME=`basename $INF`
echo "PROCESSING $FNAME"
OUTLM="$INPUT/$FNAME-lmweights.tsv"
echo " $INF => $OUTLM"
Rscript ../postprocessing/samples2lmweights.R $INF $OUTLM &
sh ../base/numproc.sh $K R
OUTI="$INPUT/$FNAME-intercepts.tsv"
echo " $INF => $OUTI"
Rscript ../postprocessing/samples2intercepts.R $INF $OUTI &
sh ../base/numproc.sh $K R
OUTTOPICS="$INPUT/$FNAME-topics.tsv"
echo " $INF => $OUTTOPICS"
Rscript ../postprocessing/samples2topics.R $INF $WORDS $OUTTOPICS &
sh ../base/numproc.sh $K R
done
wait
| true |
c487f82eb455a0f60f6146bf449829afa3e2b380 | Shell | tylerlthompson/PyKotaWebAdmin | /configs/pykotlisten.init | UTF-8 | 1,185 | 3.765625 | 4 | [] | no_license | #!/bin/bash
### BEGIN INIT INFO
# Provides: PyKota Listener
# Required-Start: $local_fs $network $named $time $syslog
# Required-Stop: $local_fs $network $named $time $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: PyKota Account Balance Lookup
# Description: PyKota Account Balance Lookup
### END INIT INFO
NAME=pykotlisten
#DIR=$(echo $( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) | sed -e 's/\(configs\)*$//g')
DIR=/opt/PyKotaWebAdmin
EXEC="$DIR"/pykotlisten.py
PIDFILE=/var/run/$NAME.pid
RUNAS=root
start() {
echo "Starting $NAME..."
sleep 2
CMD="/usr/bin/python $EXEC &"
su $RUNAS -c "$CMD"
echo "$NAME Started"
sleep 1
status
}
stop() {
echo "Stopping $NAME"
sleep 2
kill $(ps aux | grep $NAME | grep python | cut -c 11-15)
sleep 1
status
}
status() {
echo "Status of $NAME"
ps aux | grep $NAME | grep python
}
case "$1" in
start)
start
;;
stop)
stop
;;
restart)
stop
start
;;
status)
status
;;
*)
echo "Usage: $0 {start|stop|restart|status}"
esac
| true |
859dadfcbc57f559f67a96f2db7ac395352d9c8d | Shell | qhdong/base-ci | /update_repo.sh | UTF-8 | 977 | 3.796875 | 4 | [] | no_license | #!/bin/bash
source run_or_fail.sh
# delete the old .commit_id
rm -f .commit_id
run_or_fail "Repository folder not found!" pushd $1 1> /dev/null
run_or_fail "Could not reset git" git reset --hard HEAD
# call git log and parse the output, find most recent commit ID
COMMIT=$(run_or_fail "Could not call 'git log' on repository" git log -n1)
if [ $? != 0 ]; then
echo "Could not call 'git log' on repository"
exit 1
fi
COMMIT_ID=`echo $COMMIT | awk '{ print $2 }'`
# pulls the repo, getting any recent changes
# then get the most recent commit ID
run_or_fail "Could not pull from repository" git pull
COMMIT=$(run_or_fail "Could not call 'git log' on repository" git log -n1)
if [ $? != 0 ]; then
echo "Could not call 'git log' on repository"
exit 1
fi
NEW_COMMIT_ID=`echo $COMMIT | awk '{ print $2 }'`
# if the id changed, then write it to a file
if [ $NEW_COMMIT_ID != $COMMIT_ID ]; then
popd 1> /dev/null
echo $NEW_COMMIT_ID > .commit_id
fi
| true |
dc4413c6860bee4e27d3a17851bbb2e05eea11d9 | Shell | ryanwohara/migrate_pantheon | /tests/circle/configure-migrations.sh | UTF-8 | 1,394 | 2.71875 | 3 | [] | no_license | #!/bin/bash
# Set the source database connection info in a secrets file where it can be
# read by settings.migrate-on-pantheon.php
export D7_MYSQL_URL=$(terminus site connection-info --site=$PANTHEON_D7_SITE --env=$PANTHEON_D7_BRANCH --field=mysql_url)
terminus secrets set migrate_source_db__url $D7_MYSQL_URL
export PANTHEON_D7_URL="http://$PANTHEON_D7_BRANCH-$PANTHEON_D7_SITE.pantheonsite.io"
# Run a cache clear to take time. Otherwise immediately enabling modules
# after a code push might not work.
terminus site clear-cache
terminus drush "en -y migrate_plus migrate_tools migrate_upgrade"
terminus site set-connection-mode --mode=sftp
terminus drush "config-export -y"
# Make sure the source site is available.
terminus site wake --site=$PANTHEON_D7_SITE --env=$PANTHEON_D7_BRANCH
terminus drush "migrate-upgrade --legacy-db-key=drupal_7 --configure-only --legacy-root=$PANTHEON_D7_URL"
# These cache rebuilds might not be necessary but I have seen odd errors
# related to migration registration go away after cache-rebuild.
terminus drush "cache-rebuild"
terminus drush "config-export -y"
terminus drush "cache-rebuild"
#terminus site code diffstat
# @todo commit the code change. But there seems to be a multidev bug preventing
# Terminus from seeing the code change. Terminus will only report a code change
# in terminus site code diffstat after the dashboard is refreshed.
| true |
3d24b5bed065afac750b21fb1fcc4ac4a7b3b223 | Shell | Nobgul/shell-scripts-1 | /clock/clock.sh | UTF-8 | 2,393 | 3.671875 | 4 | [] | no_license | #! /bin/bash
# Fri 25 Feb 2011 15:06:43 IRST (+0330)
# -b flag depends on beep.sh for beeping
# This is bloody awesome:
# while sleep 1;do tput sc;tput cup 0 $(($(tput cols)-29));date;tput rc;done &
# (from commandlinefu.com by flatcap?)
## Get options:
IS_SILENT="true"
while getopts 'u:t:d:hs' OPTIONNN
do
case $OPTIONNN in
'd') IS_COUNT_DOWN="true"; OPT_COUNTDOWN="$OPTARG" ;;
't') IS_COUNT_UP="true"; OPT_COUNTUP="$OPTARG" ;;
'u') IS_COUNT_UP="true"; OPT_COUNTUP="$OPTARG" ;;
'b') IS_SILENT="false"; echo "With beep." ;;
'h')
echo "Usage:"
echo " clock.sh 2 Count down from minute 2"
echo " clock.sh Count up"
echo "Also:"
echo " clock.sh -d 2 Count down from minute 2"
echo " clock.sh -u 2 Count up to minute 2"
echo " clock.sh -u Count up"
echo ""
echo " Note: -t is an alias for -u"
exit
;;
esac
done
if [ ! "$IS_COUNT_DOWN" ]; then
if [ ! "$IS_COUNT_UP" ]; then
## If no option at all, then count up
if [ -z "$*" ]; then
IS_COUNT_UP="true"
else
# if a value is given as the only option (e.g. "click.sh 3")
IS_COUNT_DOWN="true"
OPT_COUNTDOWN="$*"
fi
fi
fi
if [ "$IS_COUNT_DOWN" ]; then
## if count down value not given, then nothing
if [ -z "$OPT_COUNTDOWN" ]; then
echo $OPT_COUNTDOWN
echo "Count down from what? Need a value for -d option"
else
MIN=$OPT_COUNTDOWN;
for ((i=$((MIN*60));i>=0;i--)); do
echo -ne "\rCount down from $MIN min: $(date -d"0+$i sec" +%H:%M:%S) "; sleep 1;
done
if [ ! "$IS_SILENT" ]; then
beep.sh
fi
fi
elif [ "$IS_COUNT_UP" ]; then
## if count up value not given, then start without stop
if [ -z "$OPT_COUNTUP" ]; then
for ((i=$((0));;i++)); do
echo -ne "\rCount up: $(date -d"0+$i sec" +%H:%M:%S) "; sleep 1;
done
else
MAX=$OPT_COUNTUP;
for ((i=$((0));i<=$((MAX*60));i++)); do
echo -ne "\rCount up to $MAX min: $(date -d"0+$i sec" +%H:%M:%S) "; sleep 1;
done
if [ ! "$IS_SILENT" ]; then
beep.sh
fi
fi
fi
| true |
46d85df526130ec407faee22a2afe72eebd4b302 | Shell | thofisch/ssup | /init.sh | UTF-8 | 5,619 | 3.796875 | 4 | [
"MIT"
] | permissive | #!/bin/bash
#
# Run the MSSQL database migrations as well as seeding the database
[[ -n "${DEBUG}" ]] && set -o xtrace
readonly GREEN='\033[0;32m'
readonly RESET='\033[0m'
readonly SQLCMD="/opt/mssql-tools/bin/sqlcmd -S ${SERVER} -U sa -P ${SA_PASSWORD}"
set -o nounset
set -o errexit
##############################################################################
# wait for database to come online
# Globals:
# SQLCMD
# MIGRATION_TIMEOUT
# Arguments:
# None
# Returns:
# None
##############################################################################
wait_for_database() {
until ${SQLCMD} -l 1 -Q "select getdate()" > /dev/null 2>&1; do
echo "SqlServer is unavailable - waiting ${MIGRATION_TIMEOUT}..."
sleep ${MIGRATION_TIMEOUT}
done
>&2 echo "SqlServer is up - executing command"
}
##############################################################################
# Create database (dropping if needed) for local development
# Globals:
# LOCAL_DEVELOPMENT
# DATABASE
# SQLCMD
# GREEN
# RESET
# Arguments:
# None
# Returns:
# None
##############################################################################
create_database() {
if [[ -n "${LOCAL_DEVELOPMENT:+set}" ]]; then
echo -e "Creating database ${GREEN}${DATABASE}${RESET}..."
${SQLCMD} -Q "DROP DATABASE IF EXISTS ${DATABASE};"
${SQLCMD} -Q "CREATE DATABASE ${DATABASE};"
fi
}
##############################################################################
# Create migration table if needed
# Globals:
# SQLCMD
# MIGRATION_TABLE_NAME
# DATABASE
# GREEN
# RESET
# Arguments:
# None
# Returns:
# None
##############################################################################
create_migration_table() {
echo -e "Creating ${GREEN}${MIGRATION_TABLE_NAME}${RESET} table..."
cat <<SQL > ${MIGRATION_SCRIPT_LOCATION}
CREATE TABLE [dbo].[_Migrations] (
[ScriptFile] [nvarchar](255) NOT NULL,
[Hash] [nvarchar](64) NOT NULL,
[DateApplied] [datetime] NOT NULL
CONSTRAINT [PK__Migrations] PRIMARY KEY clustered
(
[ScriptFile] ASC
)
)
GO
CREATE INDEX [IX_usd_DateApplied] ON [dbo].[_Migrations]
(
[DateApplied] ASC
)
GO
SQL
${SQLCMD} -d ${DATABASE} -i ${MIGRATION_SCRIPT_LOCATION}
}
##############################################################################
# Run migrations/*.sql scripts
# Globals:
# MIGRATION_DB_FOLDER
# MIGRATION_SCRIPT_LOCATION
# GREEN
# RESET
# Arguments:
# None
# Returns:
# None
##############################################################################
run_migrations() {
local readonly migrations="${MIGRATION_DB_FOLDER}/migrations"
local readonly dry_run=$([[ -n "${DRY_RUN:+set}" ]] && echo " [DRY RUN]" || echo "")
if [[ -d ${migrations} ]] && ls ${migrations}/*.sql 1>/dev/null; then
echo -e "Preparing migration script from ${GREEN}${migrations}${RESET}..."
cat << SQL > ${MIGRATION_SCRIPT_LOCATION}
/* Script was generated on $(date -u '+%Y-%m-%d %H:%M:%SZ') */
--LOCK TABLE ONLY "${MIGRATION_TABLE_NAME}" IN ACCESS EXCLUSIVE MODE;
SQL
for entry in $(ls ${migrations}/*.sql | sort)
do
echo -e "Adding migration ${GREEN}${entry}${RESET}"
local readonly name=$(basename ${entry})
local readonly hash=$(sha1sum ${entry} | cut -f 1 -d ' ')
cat << SQL >> ${MIGRATION_SCRIPT_LOCATION}
--
-- BEG: $name
--
IF NOT EXISTS (SELECT 1 FROM [${MIGRATION_TABLE_NAME}] WHERE [ScriptFile] = '${name}')
BEGIN
SQL
if [[ -z "${dry_run}" ]]; then
cat ${entry} | sed -e 's,^,\t,g' >> ${MIGRATION_SCRIPT_LOCATION}
cat << SQL >> ${MIGRATION_SCRIPT_LOCATION}
INSERT INTO [${MIGRATION_TABLE_NAME}] (ScriptFile, Hash, DateApplied) VALUES ('${name}', '${hash}', GETDATE())
SQL
fi
cat << SQL >> ${MIGRATION_SCRIPT_LOCATION}
PRINT N'APPLIED: ${name}${dry_run}'
END
ELSE PRINT N'SKIPPED: ${name} was already applied';
GO
--
-- END: ${name}
--
SQL
done
echo "Running migration script..."
${SQLCMD} -d ${DATABASE} -i ${MIGRATION_SCRIPT_LOCATION}
else
echo -e "No migrations found at ${GREEN}${migrations}${RESET}"
fi
}
##############################################################################
# seed local database table if needed
# Globals:
# LOCAL_DEVELOPMENT
# MIGRATION_DB_FOLDER
# DRY_RUN
# GREEN
# RESET
# Arguments:
# None
# Returns:
# None
##############################################################################
seed_database() {
local readonly seed="${MIGRATION_DB_FOLDER}/seed"
local readonly order_file="${seed}/_order"
local readonly dry_run=$([[ -n "${DRY_RUN:+set}" ]] && echo " [DRY RUN]" || echo "")
if [[ -n "${LOCAL_DEVELOPMENT:+set}" ]] && [[ -d ${seed} ]] && [[ -f ${order_file} ]]; then
echo -e "Importing seed data from ${GREEN}${seed}${RESET}..."
while read name; do
local readonly table_name=$(echo ${name} | cut -f 1 -d '.')
local readonly file="${seed}/${name}"
echo -e "Seeding ${GREEN}${table_name}${RESET} with ${GREEN}${file}${RESET}${dry_run}"
if [[ -z "${dry_run}" ]]; then
/opt/mssql-tools/bin/bcp "[${table_name}]" in ${file} -c -t',' -r "\n" -F 2 -S ${SERVER} -d ${DATABASE} -U sa -P ${SA_PASSWORD}
fi
done < ${order_file}
else
echo -e "No seed data found at ${GREEN}${seed}${RESET}"
fi
}
wait_for_database
create_database
create_migration_table
run_migrations
seed_database
echo "Done"
| true |
82b61717030c81f9b389e6f3a5d65896527e1712 | Shell | nhulox97/bash_so | /tarea1/tarea1.sh | UTF-8 | 13,129 | 4.4375 | 4 | [] | no_license | #!/bin/bash
current_dir=`pwd`
function show_menu() {
echo '--------------Menu--------------'
echo '(1). Crear un directorio'
echo '(2). Crear un archivo'
echo '(3). Brindar permisos'
echo '(4). Listar directorios'
echo '(5). Comprimir archivos de un directorio'
echo '(6). Salir'
}
function dir_exists() {
local result=''
if [ -a $1 ]; then
result='y'
echo "$result"
else
result='n'
echo "$result"
fi
}
function file_exists() {
local result=''
if [ -f $1 ]; then
result='y'
echo "$result"
else
result='n'
echo "$result"
fi
}
function create_dir() {
local is_valid=0
echo -e "################# Crear mi directorio ###################\n"
while [ $is_valid -ne 1 ]; do
read -p '=> Ingrese el nombre del directorio: ' dir
if [[ $dir =~ ^[a-zA-Z0-9_.-]*$ ]]; then
if [ -a $dir ]; then
echo -e "\nEl directorio $dir ya existe, favor introducir un nombre diferente\n"
else
mkdir $dir
echo -e "\nEl directorio $dir se creó correctamente en: $current_dir \n"
is_valid=1
fi
else
echo -e "\nNombre inválido no se permiten caracteres especiales ni espacios; por favor ingrese un nombre válido\n"
fi
done
}
function create_file() {
local is_valid=0
echo -e "################# Crear mi archivo ###################\n"
while [ $is_valid -ne 1 ]; do
read -p '=> Ingrese el nombre del archivo: ' file
if [[ $file =~ ^[a-zA-Z0-9_.-]*$ ]]; then
if [ -f $file ]; then
echo -e "\nEl archivo $file ya existe, favor introducir un nombre diferente\n"
else
touch $file
echo -e "Archivo antes de agregar linea\n-----------------\n"
cat $file
read -p "=>Ingrese el texto de para el archivo $file: " line
echo "$line" >> $file
echo -e "Archivo despues de agregar linea\n-----------------\n"
cat $file
echo -e "\nEl archivo $file se creó correctamente en: $current_dir \n"
is_valid=1
fi
else
echo -e "\nNombre inválido no se permiten caracteres especiales ni espacios; por favor ingrese un nombre válido\n"
fi
done
}
function permissions_menu() {
echo 'Menu de permisos'
echo '(1). Permisos de lectura'
echo '(2). Permisos de lectura y escritura'
echo '(3). Permisos de lectura, escritura y ejecucion'
echo '(4). Permisos de de escritura'
echo '(5). Permisos de escritura y ejecucion'
echo '(6). Permisos de ejecucion'
echo '(7). Permisos de ejecucion y lectura'
echo '(8). Ninguno'
}
function set_permissions(){
case $1 in
1)
chmod +r $2;;
2)
chmod +rw $2;;
3)
chmod +rwx $2;;
4)
chmod +w $2;;
5)
chmod +wx $2;;
6)
chmod +x $2;;
7)
chmod +rx $2;;
esac
}
function give_permissions() {
echo -e "################# Brindar permisos ###################\n"
option=0
while [ $option -ne 8 ]; do
read -p '=> Ingrese el nombre del archivo o directorio al cual se brindarán los permisos: ' name
if [[ $file =~ ^[\\/a-zA-Z0-9_.-]*$ ]]; then
if [ -f $name ]; then
permissions_menu
read -p "=> Ingrese el tipo de permisos a asignar al archivo: " option
if [ $option -ne 8 ]; then
echo -e "Permisos del archivo antes de cambiarlos"
echo `ls -l $name`
set_permissions "$option" "$name"
echo -e "Permisos del archivo despues de cambiarlos"
echo `ls -l $name`
option=8
fi
elif [ -a $name ]; then
permissions_menu
if [ $option -ne 8 ]; then
read -p "=> Ingrese el tipo de permisos a asignar al archivo: " option
echo -e "Permisos del archivo antes de cambiarlos"
echo `ls -ld $name`
set_permissions "$option" "$name"
echo -e "Permisos del archivo despues de cambiarlos"
echo `ls -ld $name`
option=8
fi
else
echo -e "\nEl directorio o archivo no existe, si el directorio o archivo esta en otro directorio verificar si la ruta es la correcta\n"
fi
else
echo -e "\nIngresar un nombre de archivo o directorio valido\n"
fi
done
}
function list_dirs_and_files_by_route() {
local is_valid=0
echo -e "################# Listar mi directorio ###################\n"
while [ $is_valid -ne 1 ]; do
read -p '=> Ingrese el nombre del directorio: ' dir
if [[ $dir =~ ^[\\/a-zA-Z0-9_.-]*$ ]]; then
check_dir=$(dir_exists $dir)
if [ $check_dir = 'y' ]; then
# dir_listed=`ls -lh --block-size=MB $dir`
echo '###############################################'
echo -e "Lista de archivos y directorios del directorio: $dir\n"
# echo $dir_listed
ls -lh --block-size=MB $dir
echo -e "\n"
is_valid=1
elif [ $check_dir = 'n' ]; then
echo -e "\nEl directorio $dir no existe, favor verificar la ruta"
fi
else
echo -e "\nIngresar un nombre de archivo o directorio valido\n"
fi
done
}
function compression_menu() {
echo -e "\n========Seleccione el tipo de compresión"
echo '(1). zip'
echo '(2). tar'
}
function do_compress() {
echo -e "\n"
read -p "=> Ingrese el nombre que le asignará al comprimido: " filename
if [ $1 -eq 1 ]; then
# zip compression
compress=`zip -r $3/$filename.zip $2`
echo -e "\nEl directorio $2 se comprimió en $3/$filename.zip"
elif [ $1 -eq 2 ]; then
# tar compression
compress=`tar -cvf $3/$filename.tar $2`
echo -e "\nEl directorio $2 se comprimió en $3/$filename.tar"
fi
}
function do_uncompress() {
check_valid='n'
echo -e "\n"
read -p "=> Ingrese el nombre del comprimido (incluida su extension): " filename
check_file=$(file_exists $1/$filename)
if [ $check_file = 'y' ]; then
if [[ $filename =~ ^.*\.(zip)$ ]]; then
check_valid='y'
echo "$check_valid"
elif [[ $filename =~ ^.*\.(tar)$ ]]; then
uncompress=`tar -xvf $1/$filename -C $2`
check_valid='y'
echo "$check_valid"
else
check_valid='i'
echo "$check_valid"
fi
elif [ $check_file = 'n' ]; then
check_valid='n'
echo "$check_valid"
fi
}
function compress_and_uncompress_files() {
local is_valid=0
echo -e "################# Comprimir mi directorio ###################\n"
echo 'Menu:'
echo '(1). Comprimir archivos'
echo '(2). Descomprimir archivos'
read -p '=> Ingrese su opcion: ' menu_option
while [ $is_valid -ne 1 ]; do
if [ $menu_option -eq 1 ]; then
# Solicitando directorio origen
read -p '=> Ingrese el nombre del directorio a comprimir: ' org_dir
# validando rura
if [[ $org_dir =~ ^[\\/a-zA-Z0-9_.-]*$ ]]; then
check_dir=$(dir_exists $org_dir)
if [ $check_dir = 'y' ]; then
# Solicitando directorio destino
echo -e "\n"
read -p "=> Ingrese la ruta en donde se almacenará el comprimido, si desea guardar el comprimido en la misma carpeta escriba 'origen': " dest_dir
# comprimir en carpeta origen
if [ $dest_dir = 'origen' ]; then
compression_menu
read -p '=> Ingrese su opción: ' option
result="$(do_compress $option $org_dir $org_dir)"
clear
echo $result
is_valid=1
# comprimir en otra ruta
elif [[ $dest_dir =~ ^[\\/a-zA-Z0-9_.-]*$ ]]; then
check_dir=$(dir_exists $dest_dir)
if [ $check_dir = 'y' ]; then
compression_menu
read -p '=> Ingrese su opción: ' option
result="$(do_compress $option $org_dir $dest_dir)"
clear
echo $result
is_valid=1
elif [ $check_dir = 'n' ]; then
echo -e "\nEl directorio destino $dest_dir no existe, favor verificar la ruta"
fi
else
echo -e "\nIngresar un nombre de directorio valido\n"
fi
elif [ $check_dir = 'n' ]; then
echo -e "\nEl directorio origen $org_dir no existe, favor verificar la ruta"
fi
else
echo -e "\nIngresar un nombre de directorio valido\n"
fi
# fin comprimir
# inicio descomprimir
elif [ $menu_option -eq 2 ]; then
read -p '=> Ingrese la ruta del directorio en donde se encuentra el comprimido: ' org_dir
# validando rura
if [[ $org_dir =~ ^[\\/a-zA-Z0-9_.-]*$ ]]; then
check_dir=$(dir_exists $org_dir)
if [ $check_dir = 'y' ]; then
# Solicitando directorio destino
echo -e "\n"
read -p "=> Ingrese la ruta en donde se almacenarán los archivos descomprimidos, si desea guardar los archivos decomprimidos en la misma carpeta escriba 'origen': " dest_dir
# comprimir en carpeta origen
if [ $dest_dir = 'origen' ]; then
did_uncompress=$(do_uncompress $org_dir $org_dir)
if [ $did_uncompress = 'y' ]; then
is_valid=1
echo -e "\nEl archivo comprimido se descomprimió correctamente en: $org_dir\n"
elif [ $did_uncompress = 'i' ]; then
echo 'Extensión de archivo inválida'
elif [ $did_uncompress = 'n' ]; then
echo 'El archivo no existe'
fi
# comprimir en otra ruta
elif [[ $dest_dir =~ ^[\\/a-zA-Z0-9_.-]*$ ]]; then
check_dir=$(dir_exists $dest_dir)
if [ $check_dir = 'y' ]; then
did_uncompress=$(do_uncompress $org_dir $dest_dir)
if [ $did_uncompress = 'y' ]; then
is_valid=1
echo -e "\nEl archivo comprimido se descomprimió correctamente en: $dest_dir\n"
elif [ $did_uncompress = 'i' ]; then
echo 'Extensión de archivo inválida'
elif [ $did_uncompress = 'n' ]; then
echo 'El archivo no existe'
fi
elif [ $check_dir = 'n' ]; then
echo -e "\nEl directorio destino $dest_dir no existe, favor verificar la ruta"
fi
else
echo -e "\nIngresar un nombre de directorio valido\n"
fi
elif [ $check_dir = 'n' ]; then
echo -e "\nEl directorio origen $org_dir no existe, favor verificar la ruta"
fi
else
echo -e "\nIngresar un nombre de directorio valido\n"
fi
fi
done
}
# Testing zone
# dir_exists "a"
# file_exists "a/c.txt"
function main(){
menu_condition='s'
while [ $menu_condition != 'n' ]; do
show_menu
read -p '=> Ingrese su opcion: ' option
case $option in
1)
clear
create_dir;;
2)
clear
create_file;;
3)
clear
give_permissions;;
4)
clear
list_dirs_and_files_by_route;;
5)
clear
compress_and_uncompress_files;;
6)
clear
menu_condition='n';;
esac
if [ $option -ne 6 ]; then
read -p '=> Desea realizar otra operacion? (s)i (n)o: ' menu_condition
echo -e "\n"
fi
clear
done
}
main
| true |
7f00c0e74648d1a85994561e92d21e1b8f5ac0db | Shell | Zivosh/PiTrader | /install.sh | UTF-8 | 575 | 2.515625 | 3 | [] | no_license | #!/bin/bash
clear
echo "======================================="
echo "=======Installing requirements ========"
echo "======================================="
read -r "This script will install all the necessary modules etc on your Pi so you can run the bot"
sudo apt-get install libatlas-base-dev
pip3 install -U urllib3
pip3 install -U pandas
pip3 install -U requests
pip3 install -U statsmodels
pip3 install -U matplotlib
pip3 install -U binance
pip3 install -U python-binance
pip3 install -U cbpro
pip3 install -U numpy
pip3 install -U views
chmod +x Run.sh
exit 0
| true |
7b976d225783b7e6de2d2aed8fc546306feb57b5 | Shell | KhalidCK/metro-paris | /tools/scripts/download_ratp.sh | UTF-8 | 288 | 2.625 | 3 | [] | no_license | #!/bin/bash
output='data/raw'
mkdir -p $output
echo 'Download data from ratp website'
wget -O $output/ratp-trafic-2016.json 'https://data.ratp.fr/explore/dataset/trafic-annuel-entrant-par-station-du-reseau-ferre-2016/download/?format=json&refine.reseau=M%C3%A9tro&timezone=Europe/Berlin'
| true |
4a4e3bd543eb47fbc08e88244b1ff320a339e10f | Shell | ayazhafiz/hmcd | /.zsh_etc/yarn.zsh | UTF-8 | 554 | 2.921875 | 3 | [] | no_license | # Handles Yarn environment configuration.
# This file is governed under no license.
#
# Author: @ayazhafiz
# Source: https://github.com/ayazhafiz/hmcd
source "$ZSH_ETC/status_msg.zsh"
export PATH="$HOME/.yarn/bin:$PATH"
export PATH="$HOME/.config/yarn/global/node_modules/.bin:$PATH"
# yvm configuration
# https://github.com/tophat/yvm
export YVM_DIR="$HOME/.yvm"
yvm_file="$YVM_DIR/yvm.sh"
if [ -r "$yvm_file" ]; then
source "$yvm_file"
else
warn "yvm source file (\`$yvm_file\') cannot be found; not loading configuration." >&2
return
fi
| true |
14d1c270ecae6cdd4db4b9c50cff68470d858b91 | Shell | tuxjdk/tuxjdk | /cloneCheckoutPackTuxjdk.sh | UTF-8 | 990 | 4.25 | 4 | [] | no_license | #!/bin/bash
readonly TAG="$1"
readonly GIT="$( which git 2>/dev/null )"
readonly TAR="$( which tar 2>/dev/null )"
if [[ -n "$2" ]] ; then
readonly UPSTREAM="$2"
else
readonly UPSTREAM='https://github.com/tuxjdk/tuxjdk.git'
fi
if [[ -z $TAG ]] ; then
readonly NAME='tuxjdk'
else
readonly NAME="tuxjdk-$TAG"
fi
if [[ -z $GIT ]] ; then
echo 'git was not found' >&2
exit 1
fi
if [[ -z $TAR ]] ; then
echo 'tar was not found' >&2
exit 1
fi
if [[ -a "$NAME" ]] ; then
echo "'$NAME' file or folder already exists" >&2
exit 1
fi
if [[ -a "$NAME.tar.xz" ]] ; then
echo "'$NAME.tar.xz' file or folder already exists" >&2
exit 1
fi
echo -e "\e[0;35mCloning the source from '$UPSTREAM'...\e[0m"
( $GIT clone "$UPSTREAM" "$NAME" )
if [[ -n $TAG ]] ; then
echo -e '\e[0;35mChecking out the tag...\e[0m'
( cd "$NAME" && $GIT checkout "$TAG" )
fi
echo -e '\e[0;35mCreating tarball...\e[0m'
( tar --exclude-vcs -cJf "$NAME.tar.xz" "$NAME" )
echo -e '\e[0;35mDone.\e[0m'
| true |
efe39b69045ab447872b8320b5175225103b595a | Shell | saserr/Dotfiles | /lib/value/empty.bash | UTF-8 | 116 | 3.21875 | 3 | [] | no_license | import 'arguments::expect'
value::empty() {
arguments::expect $# 'value'
local value=$1
[[ ! "$value" ]]
}
| true |
27baa310047adff52d070225edd1191c88428e20 | Shell | Mewbi/LazyMoon | /Repositorio/relatorio.sh | UTF-8 | 2,655 | 3.484375 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
#---------------------------------------Libraries
source $HOME/LM-relatorio/config.txt
path="${user}@${ip}:${dir}"
#------------------------------------------------
#-----------------Hardware & Software Information
distro=$(uname -a | cut -d\ -f 2)
kernelName=$(uname -s)
hardwareArchiteture=$(uname -m)
cpu=$(cat /proc/cpuinfo | grep "model name" | tail -n 1 | cut -d: -f 2)
memTotal=$(cat /proc/meminfo | grep "MemTotal" | tr ' ' '\n' | grep [0-9]) #KBytes
memTotalMb=$(echo "scale=2;(${memTotal}/1024)" | bc)
#------------------------------------------------
#----------------------------------Hardware Usage
uptime=$(uptime | cut -d, -f -1)
hds=$(df -H | grep "sd") #GBytes
memAvailable=$(cat /proc/meminfo | grep "MemAvailable" | tr ' ' '\n' | grep [0-9]) #KBytes
memAvailableMb=$(echo "scale=2;(${memAvailable}/1024)" | bc)
cpuUsage=$(top -b -n2 -d 1 | awk "/^top/{i++}i==2" | grep -Ei "cpu\(s\)\s*:" | cut -d\ -f 2) #Percentage
#------------------------------------------------
#--------------------------------Creating Reports
date=$(date "+%d-%m-%Y-[%Hh%Mm]")
name="relatorio-${serverName}-${date}"
cat >> ${name}.txt << END
Server: ${serverName}
----------Informações de Hardware e Software----------
Distribuição: ${distro}
Nome do Kernel: ${kernelName}
Arquitetura do Hardware: ${hardwareArchiteture}
Modelo do Processador: ${cpu}
Memória RAM: ${memTotalMb}MB
-------------------Uso de Hardware--------------------
Ligado as: ${uptime/up/Tempo Ligado} h
Uso da CPU: ${cpuUsage}%
Memória RAM livre para uso: ${memAvailableMb}MB
Sist. Arq. Tam. Usado Disp. Uso% Montado em
HDs: ${hds}
END
cat >> ${name}.html << END
<!DOCTYPE html>
<html lang="pt-br">
<head>
<meta charset="UTF-8">
<title>${serverName}</title>
</head>
<body>
<h1>Server: ${serverName}</h1><hr>
<h2>Informações de Hardware e Software</h2>
<p><b>Distribuição: </b>${distro}</p>
<p><b>Nome do Kernel: </b>${kernelName}</p>
<p><b>Arquitetura do Hardware: </b>${hardwareArchiteture}</p>
<p><b>Modelo do Processador: </b>${cpu}</p>
<p><b>Memória RAM: </b>${memTotalMb} MB</p>
<hr>
<h2>Uso de Hardware</h2>
<p><b>Ligado as: </b>${uptime/up/Tempo Ligado} h</p>
<p><b>Uso da CPU: </b>${cpuUsage}%</p>
<p><b>Memória RAM livre para uso: </b>${memAvailableMb} MB</p>
<p>Sist. Arq. Tam. Usado Disp. Uso% Montado em</p>
<p><b>HDs: </b>${hds}</p>
</body>
</html>
END
#------------------------------------------------
#---------------------------------Sending Reports
scp ${name}.txt ${path}
scp ${name}.html ${path}
mv ${name}.txt LM-relatorio
mv ${name}.html LM-relatorio
#------------------------------------------------ | true |
d37bc0c6106d02310fa8121a2e9d6a28fc2464f9 | Shell | erlong15/mongo_k8s_deploy | /new-mongodb.sh | UTF-8 | 1,221 | 3.03125 | 3 | [] | no_license | #!/bin/bash
export LANG=C
export LC_CTYPE=C
export EPASS=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1)
export ECOLLECTION=EXNESS_$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 4 | head -n 1)
helm repo add bitnami https://charts.bitnami.com/bitnami
envsubst < values.yaml | helm install --wait --timeout 60s otus-mongodb bitnami/mongodb -f -
echo
echo "created user test-user with password $EPASS"
echo "created collection $ECOLLECTION"
echo
echo
NODE_IP=$(kubectl get node -o wide | tail -1 | awk '{print $6}')
NODE_PORT=$(kubectl -n mongodb get svc otus-mongodb-metrics -o json | jq -r '.spec.ports[0].nodePort')
echo "curl $NODE_IP:$NODE_PORT/metrics"
curl $NODE_IP:$NODE_PORT/metrics 2>&1 | grep health
echo
echo
kubectl run --namespace mongodb otus-mongodb-client --rm --tty -i \
--restart='Never' --image docker.io/bitnami/mongodb:4.4.0-debian-10-r0 \
--command -- mongo test-database \
--host "otus-mongodb-0.otus-mongodb-headless.mongodb.svc.cluster.local,otus-mongodb-1.otus-mongodb-headless.mongodb.svc.cluster.local,otus-mongodb-2.otus-mongodb-headless.mongodb.svc.cluster.local," \
--authenticationDatabase test-database -u test-user -p $EPASS --eval 'db.getCollectionNames()'
| true |
c55889048e4c13f58002e6087d8efbeae3a80eac | Shell | tolysz/ghcjs-stack | /special/cabal-next/cabal-install/tests/IntegrationTests/exec/should_run/configures_cabal_to_use_sandbox.sh | UTF-8 | 442 | 3.078125 | 3 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | . ../common.sh
cabal sandbox delete > /dev/null
cabal exec my-executable && die "Unexpectedly found executable"
cabal sandbox init > /dev/null
cabal install > /dev/null
# The library should not be available outside the sandbox
"$GHC_PKG" list | grep -v "my-0.1"
# When run inside 'cabal-exec' the 'sandbox hc-pkg list' sub-command
# should find the library.
cabal exec sh -- -c 'cd subdir && "$CABAL" sandbox hc-pkg list' | grep "my-0.1"
| true |
83ba8476934c6b4f53ddecea239bb87182d4a6e1 | Shell | MathiasStadler/markup-mermaid-inline | /inline_mermaid.sh | UTF-8 | 18,630 | 3.953125 | 4 | [
"MIT"
] | permissive | #!/bin/bash
set -o posix -o errexit
# from here
# https://askubuntu.com/questions/509795/how-to-write-a-portable-shell-script-with-error-handl
#set MARKDOWN_BLOCK_RUNNING to true colour
readonly local COLOUR_DEFAULT='\033[0;m'
readonly local COLOUR_RED='\033[0;31m'
readonly local COLOUR_GREEN='\033[0;32m'
readonly local COLOUR_YELLOW='\033[0;33m'
# verbosity level
SILENT=0
CRITICAL=1
ERROR=2
WARN=3
NOTICE=4
INFO=5
DEBUG=6
LOGLEVEL=${SILENT}
silent() {
log ${SILENT} "$COLOUR_YELLOW silent $* ${COLOUR_DEFAULT} \n"
}
critical() {
log ${CRITICAL} "$COLOUR_YELLOW critical $* ${COLOUR_DEFAULT} \n"
}
error() {
log ${ERROR} "$COLOUR_RED error ( line: ${BASH_LINENO[*]}) - $* ${COLOUR_DEFAULT} \n"
}
warn() {
log ${WARN} "$COLOUR_RED warn -- $* ${COLOUR_DEFAULT} \n"
}
notice() {
log ${NOTICE} "$COLOUR_GREEN notice $* ${COLOUR_DEFAULT} \n"
}
info() {
log ${INFO} "${COLOUR_DEFAULT} info $* ${COLOUR_DEFAULT} \n"
}
debug() {
log ${DEBUG} "$COLOUR_YELLOW debug ( line: ${BASH_LINENO[*]}) $* ${COLOUR_DEFAULT} \n"
}
log() {
if [[ "${LOGLEVEL}" -ge "${1}" ]]; then
shift
datestring=$(date +"%Y-%m-%d %H:%M:%S")
# %b for show color
printf "%b\n" "$datestring - $*"
fi
}
# log end
########################################################
# custom script
########################################################
usage() {
LOGLEVEL=${SILENT}
printf "usage:\n"
printf "%s [-d -f -o] -i <file> \n" "${0##*/}"
printf "%s [--debug --force --output] --input <file> \n" "${0##*/}"
printf "\t-f | --force overwrite output\n"
printf "\t-i | --input set markdown file to parse\n"
printf "\t-o | --output set output file\n"
printf "\t-v | --verbose [debug,info,notice,warn,error,silent] set debug level\n"
}
# global variable for this script ##################
# flag set with options
GLOBAL_OVERRIDE_ALL_MERMAID_FILE=false
# output folder for generated output
OUTPUT_FOLDER="output"
# sub folder for mermind files
MERMAID_FOLDER="mermind"
# sub folder for images files
IMAGES_FOLDER="images"
# image format for mermaid
MERMAID_OUTPUT_FORMAT="png"
# output file default
MERMAID_OUTPUT_FILE=""
# array for hold all filename for detect double name
declare -a MERMAID_FILENAME=()
# spilt -abc to -a -b -c
# from here
# https://gist.github.com/Decstasy/19814b80a3551b34d78e8be7f3b5e8d8
if (($# < 1)); then
error "no input option"
usage
exit 1
fi
debug "parameter =>$*"
ARGS=()
# split -abc to -a -b -c
for i in "$@"; do
if [[ "${i:0:1}" == "-" && "${i:1:1}" != "-" ]]; then
for ((j = 1; j < ${#i}; j++)); do
ARGS+=("-${i:$j:1}")
done
else
ARGS+=("$i")
fi
done
debug " count arguments => ${#ARGS[@]} \n"
for ((i = 0; i <= ${#ARGS[@]}; i++)); do
debug "parse ${ARGS[i]}"
case "${ARGS[i]}" in
'') # Skip if element is empty (happens when it's un set before)
continue
;;
-i | --input) # Use +1 to access next array element and unset it
debug "option --input trigger"
MERMAID_INPUT_FILE="${ARGS[i + 1]}"
debug "MERMAID_INPUT_FILE length=> ${#MERMAID_INPUT_FILE}"
if [ "${#MERMAID_INPUT_FILE}" -eq 0 ]; then
error "Input file missing ${MERMAID_INPUT_FILE}"
printf "Input file missing %s \n" "${MERMAID_INPUT_FILE}"
exit 1
fi
# unset 'ARGS[i]';
i=$((i + 1))
continue
;;
-o | --output) # Use +1 to access next array element and unset it
debug "option --output trigger"
MERMAID_OUTPUT_FILE="${ARGS[i + 1]}"
debug "MERMAID_OUTPUT_FILE ${MERMAID_OUTPUT_FILE}"
debug "MERMAID_OUTPUT_FILE length=> ${#MERMAID_OUTPUT_FILE}"
if [ "${#MERMAID_OUTPUT_FILE}" -eq 0 ]; then
error "Output file not available ${MERMAID_OUTPUT_FILE}"
printf "ERROR: Output file is missing!\n"
exit 1
fi
# unset 'ARGS[i]';
i=$((i + 1))
continue
;;
-f | --force) # Parameter without argument
debug "option --force trigger"
GLOBAL_OVERRIDE_ALL_MERMAID_FILE=true
# unset 'ARGS[i]'
continue
;;
-v | --verbose) # Parameter without argument
debug "option --debug trigger"
LOGLEVEL=${DEBUG}
unset 'ARGS[i]'
continue
;;
--) # End of arguments
unset 'ARGS[i]'
break
;;
*) # Skip unset if our argument has not been matched
debug "option ${ARGS[i]} NOT FOUND"
printf "option %s NOT FOUND\n" "${ARGS[i]}"
usage
exit 1
continue
;;
esac
# TODO check is necessary
# unset 'ARGS[i]'
done
if [[ "${LOGLEVEL}" -ge "${DEBUG}" ]]; then
debug "debug"
info "info"
notice "notice"
warn "warn"
critical "critical"
error "error"
silent "silent"
fi
debug "Parameter:"
if [[ "${LOGLEVEL}" -ge "${DEBUG}" ]]; then
for i in "${ARGS[@]}"; do
# debug "ARGS[$i] => ${ARGS[$i]}"
debug "ARGS => >$i<"
done
for ((i = 0; i < ${#ARGS[@]}; i++)); do
debug "ARGS $i => >${ARGS[i]}<"
debug "ARGS $i + 1=> >${ARGS[$((i + 1))]}<"
done
fi
info "Log is ON !!!"
debug "input file ${MERMAID_INPUT_FILE}"
if [ ! -f "${MERMAID_INPUT_FILE}" ]; then
error "Input file not available ${MERMAID_INPUT_FILE}"
printf "ERROR: Input file not available %s\n" "${MERMAID_INPUT_FILE}"
exit 1
fi
debug "set MERMAID_OUTPUT_FILE"
debug "set name of MERMAID_OUTPUT_FILE"
if [ "${#MERMAID_OUTPUT_FILE}" -eq 0 ]; then
debug "NOT ENTER OUTPUT FILE"
MERMAID_OUTPUT_FILE=${MERMAID_INPUT_FILE}
MERMAID_OUTPUT_FILE=${MERMAID_OUTPUT_FILE/md/mermaid_replace.md}
else
# TODO old
# debug " extract filename from path ${MERMAID_OUTPUT_FILE}"
# MERMAID_OUTPUT_FILE=$(echo "${MERMAID_OUTPUT_FILE}"| sed "s/.*\///");
# debug "MERMAID_OUTPUT_FILE => ${MERMAID_OUTPUT_FILE}";
debug "check filename ${MERMAID_OUTPUT_FILE} end with md"
if [[ "${MERMAID_OUTPUT_FILE}" =~ ^.*md$ ]]; then
debug "file name has ending"
else
debug "add .md to filename"
MERMAID_OUTPUT_FILE+=".mermaid_replace.md"
debug "set output file name => ${MERMAID_OUTPUT_FILE}"
fi
fi
debug "MERMAID_OUTPUT_FILE => ${MERMAID_OUTPUT_FILE}"
#####################
# function for script ###################
check_mermaid_filename() {
debug "${FUNCNAME[0]}:+${FUNCNAME[0]}(): start"
local return_value=1
if [[ "${MERMAID_FILENAME[*]}" =~ $1 ]]; then
error "flowchart name $1 contain in array, please used uniq filename"
return_value=1
# TODO move error exit outside the function
# TODO no exit inside function
exit 1
else
notice "flowchart name $1 was not used before - OK"
return_value=0
fi
debug "${FUNCNAME[0]}:+${FUNCNAME[0]}(): end with return_value ${return_value}"
return ${return_value}
}
push_mermaid_filename() {
debug "${FUNCNAME[0]}:+${FUNCNAME[0]}(): start"
debug "add mermaid filename $1 to array MERMAID_FILENAME"
MERMAID_FILENAME=("${MERMAID_FILENAME[@]}" "$1")
debug "${FUNCNAME[0]}:+${FUNCNAME[0]}(): end"
}
check_file_is_exists_add_should_replace() {
debug "${FUNCNAME[0]}:+${FUNCNAME[0]}(): start"
local return_value=1
if [ -e "$1" ]; then
debug "file $1 exists"
if [ "${GLOBAL_OVERRIDE_ALL_MERMAID_FILE}" = true ]; then
debug "delete file $1"
debug "GLOBAL_OVERRIDE_ALL_MERMAID_FILE is set to ${GLOBAL_OVERRIDE_ALL_MERMAID_FILE}"
rm -rf "$1"
return_value=$?
fi
else
debug "file $1 not exits"
return_value=0
fi
debug "${FUNCNAME[0]}:+${FUNCNAME[0]}(): end with return_value ${return_value}"
return $return_value
}
convert_file_to_image_and_add_output_file() {
# TODO check mermaid is installed
local _MERMAID_COMMAND="./node_modules/.bin/mmdc"
local _MERMAID_PROPERTIES_FLAG="--puppeteerConfigFile"
local _MERMAID_PROPERTIES_FILE="puppeteer-config.json"
local _MERMAID_INPUT_FLAG="--input"
local _MERMAID_OUTPUT_FLAG="--output"
local _MERMAID_INPUT_FILE="$1"
debug "check graph tag is only once in file"
if [ "$(grep -c graph <"${_MERMAID_INPUT_FILE}")" -eq 1 ]; then
debug " Ok one graph tag in file"
else
error " ERROR: file with two and more graph tag in file"
printf " ERROR: file with two and more graph tag in file => %s\n" "${_MERMAID_INPUT_FILE}"
exit 1
fi
# take input filename as output filename
local _MERMAID_OUTPUT_FILE="$2"
# append filetype
local _MERMAID_OUTPUT_FILE+=".${MERMAID_OUTPUT_FORMAT}"
debug "Command: ${_MERMAID_COMMAND} ${_MERMAID_PROPERTIES_FLAG} ${_MERMAID_PROPERTIES_FILE} ${_MERMAID_INPUT_FLAG} ${_MERMAID_INPUT_FILE} ${_MERMAID_OUTPUT_FLAG} ${_MERMAID_OUTPUT_FILE}"
debug " add line link"
# "${_MERMAID_COMMAND}" "${_MERMAID_PROPERTIES_FLAG}" "${_MERMAID_PROPERTIES_FILE}" "${_MERMAID_INPUT_FLAG}" "${_MERMAID_INPUT_FILE}" "${_MERMAID_OUTPUT_FLAG}" "${_MERMAID_OUTPUT_FILE}")
local _rt="$(${_MERMAID_COMMAND} ${_MERMAID_PROPERTIES_FLAG} ${_MERMAID_PROPERTIES_FILE} ${_MERMAID_INPUT_FLAG} ${_MERMAID_INPUT_FILE} ${_MERMAID_OUTPUT_FLAG} ${_MERMAID_OUTPUT_FILE})"
return $?
}
###################
# TODO check is mermaid install
if [ "${GLOBAL_OVERRIDE_ALL_MERMAID_FILE}" = true ]; then
if [ -e ${OUTPUT_FOLDER} ]; then
debug "delete $OUTPUT_FOLDER"
rm -rf ${OUTPUT_FOLDER}
else
debug "output folder ${OUTPUT_FOLDER} not exists"
fi
fi
if ! check_file_is_exists_add_should_replace "${OUTPUT_FOLDER}/${MERMAID_OUTPUT_FILE}"; then
debug "GLOBAL_OVERRIDE_ALL_MERMAID_FILE => ${GLOBAL_OVERRIDE_ALL_MERMAID_FILE}"
error "file exists !! Delete by hand or add -f or --force to your command line for overwrite the files"
printf "ERROR: file exists %s !! Delete by hand or add -f or --force to your command line for overwrite the files" "${OUTPUT_FOLDER}/${MERMAID_OUTPUT_FILE}"
exit 1
fi
debug "check output folder is exists"
if [ -e "${OUTPUT_FOLDER}" ]; then
debug "output folder exists"
else
debug "folder NOT exists, make output folder"
mkdir "${OUTPUT_FOLDER}"
mkdir -p "${OUTPUT_FOLDER}/${IMAGES_FOLDER}"
mkdir -p "${OUTPUT_FOLDER}/${MERMAID_FOLDER}"
if [[ ${MERMAID_OUTPUT_FILE} == *"/"* ]]; then
debug "MERMAID_OUTPUT_FILE ${MERMAID_OUTPUT_FILE} is path"
debug " determine filename "
MERMAID_OUTPUT_FILE_NAME=$(echo "${MERMAID_OUTPUT_FILE}" | sed "s/.*\///")
MERMAID_OUTPUT_FILE_FOLDER=$(echo "${MERMAID_OUTPUT_FILE}" | sed -r "s/(.+)\/.+/\1/")
debug "Folder => ${MERMAID_OUTPUT_FILE_FOLDER}"
debug "Name => ${MERMAID_OUTPUT_FILE_NAME}"
debug " create output folder for custom output file"
debug "create path => mkdir --parents ${PWD}/${OUTPUT_FOLDER}/${MERMAID_OUTPUT_FILE_FOLDER}"
mkdir --parents "${PWD}/${OUTPUT_FOLDER}/${MERMAID_OUTPUT_FILE_FOLDER}"
debug "create file touch ${PWD}/${OUTPUT_FOLDER}/${MERMAID_OUTPUT_FILE_FOLDER}/${MERMAID_OUTPUT_FILE_NAME}"
touch "${PWD}/${OUTPUT_FOLDER}/${MERMAID_OUTPUT_FILE_FOLDER}/${MERMAID_OUTPUT_FILE_NAME}"
else
debug "MERMAID_OUTPUT_FILE ${MERMAID_OUTPUT_FILE} is only filename"
fi
fi
# flag in or out blog
MARKDOWN_BLOCK_RUNNING=false
# flag for process mermaid
MERMAID_BLOCK_RUNNING=false
# line counter for display in errors message
LINE_COUNTER=0
# FIXME no loop without read
# loop
while read -r line || [[ $line ]]; do
# debug "Next line=> $line"
if [[ $line =~ ^\`\`\`.*$ ]]; then
debug "source block found in line ${LINE_COUNTER} "
# block close
if [[ $line =~ ^\`\`\`$ ]]; then
notice "End of script block found in line ${LINE_COUNTER}"
debug "End of script block found in line ${LINE_COUNTER}"
if [ ${MARKDOWN_BLOCK_RUNNING} = false ]; then
# error
error "block error without flavor"
error "Hint: each markdown block must have a flavor for this script :-("
# TODO set github link here
error "block without start"
printf "ERROR: source block has nor flavor line =>%i\n" "${LINE_COUNTER}"
exit 1
elif [ ${MERMAID_BLOCK_RUNNING} = true ]; then
notice "source block with mermaid flavor end"
debug "source block with mermaid flavor end"
debug "set MERMAID_BLOCK_RUNNING to false"
MERMAID_BLOCK_RUNNING=false
debug "convert block to image"
debug "get last filename"
# FIXME No local variable needed
# TODO old mermaid_filename=${MERMAID_FILENAME[${#MERMAID_FILENAME[@]} - 1]}
debug "filename ${mermaid_filename}"
debug "output folder ${OUTPUT_FOLDER}"
if ! convert_file_to_image_and_add_output_file "${OUTPUT_FOLDER}/${MERMAID_FOLDER}/${mermaid_filename}" "${OUTPUT_FOLDER}/${IMAGES_FOLDER}/${mermaid_filename}"; then
exit 1
fi
debug " add file link to target mermaid file ./${IMAGES_FOLDER}/${mermaid_filename}"
printf "\n" "${mermaid_filename}" "${IMAGES_FOLDER}/${mermaid_filename}.${MERMAID_OUTPUT_FORMAT}" >>"${OUTPUT_FOLDER}/${MERMAID_OUTPUT_FILE}"
debug "output folder ${OUTPUT_FOLDER}"
else
debug "set flag MARKDOWN_BLOCK_RUNNING to false"
MARKDOWN_BLOCK_RUNNING=false
# FIXME only for security
debug "set flag MERMAID_BLOCK_RUNNING to false"
MERMAID_BLOCK_RUNNING=false
debug " write source block CLOSE to file => ${OUTPUT_FOLDER}/${mermaid_filename}"
printf "%s\n" "${line}" >>"${OUTPUT_FOLDER}/${MERMAID_OUTPUT_FILE}"
fi
else
notice "source block START found line ${LINE_COUNTER}"
debug "set MARKDOWN_BLOCK_RUNNING to TRUE"
MARKDOWN_BLOCK_RUNNING=true
if [[ ${line} =~ ^\`\`\`mermaid.*$ ]]; then
debug "mermaid block START found line ${LINE_COUNTER}"
debug "check mermind process start already"
if [ ${MERMAID_BLOCK_RUNNING} = true ]; then
error "error source block with flavor mermaid already start line ${LINE_COUNTER}"
error "please check your input file ${MERMAID_INPUT_FILE}"
exit 1
else
debug "parse mermaid argument to array ${line}"
read -r -a mermaid_argument <<<"$line"
debug "mermaid arguments ${mermaid_argument[*]}"
debug " n arguments ${#mermaid_argument[*]}"
if [[ ${#mermaid_argument[*]} -ge 2 ]]; then
debug "determine to the filename"
mermaid_filename=${mermaid_argument[1]}
debug "the filename is ${mermaid_filename}"
debug "check the mermaid_filename is NOT double used file:${mermaid_filename}"
# if check_mermaid_filename "${mermaid_filename}"; then
if [ -e "${OUTPUT_FOLDER}/${MERMAID_FOLDER}/${mermaid_filename}" ]; then
error "CASE: double filename"
error " Action: Please check your block mermaid flavor"
error " Hint: only uniq filename allow"
printf "ERROR: flowchart name %s already in used \n" "${OUTPUT_FOLDER}/${MERMAID_FOLDER}/${mermaid_filename}"
exit 1
else
# TODO wrong push_mermaid_filename "${mermaid_filename}"
debug "used mermaid output file => ${OUTPUT_FOLDER}/${MERMAID_FOLDER}/${mermaid_filename}"
fi
# debug "check_file_is_exists_add_should_replace ${OUTPUT_FOLDER}/${MERMAID_FOLDER}/${mermaid_filename}"
# if ! check_file_is_exists_add_should_replace "${OUTPUT_FOLDER}/${mermaid_filename}"; then
# debug "File ${OUTPUT_FOLDER}/${mermaid_filename} exists"
# error "File ${OUTPUT_FOLDER}/${mermaid_filename} exists"
# printf "File %s/%s exists" "${OUTPUT_FOLDER}" "${mermaid_filename}"
# exit 1
# fi
# TODO add resolution flag
debug "set MERMAID_BLOCK_RUNNING=true"
MERMAID_BLOCK_RUNNING=true
else
error "please enter the filename after the mermaid block tag"
printf "ERROR: Please enter the filename after the mermaid block tag line %s file %i\n" "${LINE_COUNTER}" "${MERMAID_INPUT_FILE}"
exit 1
fi
fi
else
debug " write source block START to file => ${OUTPUT_FOLDER}/${MERMAID_OUTPUT_FILE}"
printf "%s\n" "${line}" >>"${OUTPUT_FOLDER}/${MERMAID_OUTPUT_FILE}";
fi
fi
elif
[ ${MERMAID_BLOCK_RUNNING} = true ]
then
# TODO check folder for mermaid files
debug "write mermaid source block to file: ${OUTPUT_FOLDER}/${MERMAID_FOLDER}/${mermaid_filename}"
# OLD mermaid_filename=${MERMAID_FILENAME[${#MERMAID_FILENAME[@]} - 1]}
debug "mermaid_filename => ${OUTPUT_FOLDER}/${MERMAID_FOLDER}/${mermaid_filename}"
debug "add line ${line} to file ${OUTPUT_FOLDER}/${MERMAID_FOLDER}/${mermaid_filename}"
printf "%s\n" "${line}" >>"${OUTPUT_FOLDER}/${MERMAID_FOLDER}/${mermaid_filename}"
else
debug "normal line (${LINE_COUNTER}) ${line} write to file ${OUTPUT_FOLDER}/${MERMAID_OUTPUT_FILE}"
printf "%s\n" "${line}" >>"${OUTPUT_FOLDER}/${MERMAID_OUTPUT_FILE}"
fi
# increase counter
LINE_COUNTER=$((LINE_COUNTER + 1))
done <"${MERMAID_INPUT_FILE}"
printf "generated output to => %s/%s\n" "${OUTPUT_FOLDER}" "${MERMAID_OUTPUT_FILE}"
printf "successful finished\n"
info "successful finished"
debug "successful finished"
exit 0
| true |
c89b13dbf3ccd757546afd240e93fa395632e677 | Shell | jayfurmanek/libvorbis-feedstock | /recipe/build.sh | UTF-8 | 191 | 2.671875 | 3 | [
"BSD-3-Clause"
] | permissive | #!/bin/bash
if [ `uname` == Darwin ]; then
export LDFLAGS="${LDFLAGS} -Wl,-rpath,$PREFIX/lib"
fi
./configure --prefix=${PREFIX} --disable-dependency-tracking
make
make check
make install
| true |
39d90e707789817e11d2d7ef82d13c0c3757f90a | Shell | AaronZhangL/REST_API_Flask | /scritpTools/curl_post_json.sh | UTF-8 | 607 | 3.796875 | 4 | [] | no_license | #!/bin/sh
# Usage: xxxxxx.sh url times json
#
# Arguments:
# url
# times
# json it MUST be surrounded single-quartations
#
# Example
# sh xxxxxx.sh http://localhost:8080/webapi/user/XXXXX 10 '{"token":"xxxxx","version":"1.3"}'
#
URL=$1
TIMES=$2
JSON=$3
if [ -z "$URL" ]; then
echo "no url"
exit;
fi
if [ -z "$TIMES" ]; then
echo "no times"
exit;
fi
if [ -z "$JSON" ]; then
echo "no json"
exit;
fi
n=1
until [ $n -gt $TIMES ]
do
curl -H 'Content-Type:application/json' -H 'User-Agent:iPhone' -d "$JSON" $URL
n=$(( n+1 ))
echo ""
sleep 1s
done
| true |
709be0a0202e0d588447630bf16dd79e9d604207 | Shell | petronny/aur3-mirror | /mp3plot/PKGBUILD | UTF-8 | 561 | 2.578125 | 3 | [] | no_license | # Contributor: Christoph Zeiler <archNOSPAM_at_moonblade.dot.org>
pkgname=mp3plot
pkgver=0.5.1
pkgrel=1
pkgdesc="Creates a textual or graphical plot of an MP3 file's bitrate distribution"
arch=('i686' 'x86_64')
url="http://p.outlyer.net/$pkgname/"
license=('GPL')
depends=('boost>=1.34.1' 'gd' 'imagemagick')
makedepends=('pkgconfig>=0.9')
source=($url/files/$pkgname-$pkgver.tar.gz)
md5sums=('20d1e6b21926b9d42412eae37fe1cf91')
build() {
cd $pkgname-$pkgver
./configure --prefix=/usr
make || return 1
make DESTDIR="$pkgdir" install
}
# vim:set ts=2 sw=2 et:
| true |
f494075ac0fff6eff6665295bd464a54e0ab9074 | Shell | dgou/dot-files | /zprofile | UTF-8 | 61 | 2.671875 | 3 | [] | no_license | for f in ~/.env_setup
do
test -s "$f" && source "$f"
done
| true |
0daf7ea44f6182e782d54cf2f59aff7275193c66 | Shell | mrasif/Unix-Shell-Script | /Advanced/concatenate.sh | UTF-8 | 145 | 3.140625 | 3 | [] | no_license | #!/bin/bash
echo -n "Enter String 1: "
read str1
echo -n "Enter String 2: "
read str2
str=$str1$str2
echo "String is $str and Length is ${#str}"
| true |
81ac9d884de645e4e2a5b6dc24c3a10828d474b9 | Shell | A1Gard/gitea | /snap/helpers/configuration.sh | UTF-8 | 3,277 | 3.71875 | 4 | [
"MIT"
] | permissive | #!/bin/bash
if snapctl get gitea.snap.custom; then
cdir=$(snapctl get gitea.snap.custom)
else
cdir=$SNAP_COMMON
fi
cfg="$cdir/conf/app.ini"
bak="$cdir/conf/app.ini.bak-$(date -Ins)"
basecfg="$SNAP/snap/helpers/app.ini"
smp="$SNAP/gitea/custom/conf/app.ini.sample"
function toSnap() {
OIFS=$IFS
IFS='
'
category="none"
src="$cfg"
[[ "$1" = "init" ]] && src="$smp"
[[ "$1" = "snap" ]] && src="$basecfg"
for l in $(sed 's_;\([A-Z]*\)_\1_g' $src | grep -v -e '^;' -e '^$'); do
if echo $l | grep -q '^[[]'; then
category=$(CatToSnap "$l")
elif echo $l | grep -q '^[A-Z]'; then
option=$(OptToSnap "$l")
value=$(ValToSnap "$l")
if [[ $category = "none" ]]; then
snapctl set "$option=$value"
else
snapctl set "$category.$option=$value"
fi
fi
done
IFS=$OIFS
}
function toIni() {
OIFS=$IFS
IFS='
'
category="none"; option="none"; catUnset=true
src=$smp
[[ -f $cfg ]] && src="$cfg"
tmpIni="$cfg.tmp"
[[ -f $src ]] && cp "$src" "$tmpIni"
cp $tmpIni $bak
echo '' > $cfg
for l in $(grep -v -e '^;' -e '^$' $tmpIni); do
if echo $l | grep -q '^[[]'; then
category=$(CatToSnap "$l")
catUnset=true
elif echo $l | grep -q '^[A-Z]'; then
option=$(OptToSnap "$l")
if [[ $category = "none" ]]; then
value=$(snapctl get $option)
echo $(OptToIni "$option") = $value >> $cfg
else
value=$(snapctl get $category.$option)
if $catUnset; then
echo "" >> $cfg
echo "[$(CatToIni "$category")]" >> $cfg
catUnset=false
fi
echo $(OptToIni "$option") = $value >> $cfg
fi
fi
done;
IFS=$OIFS
}
function CatToSnap {
ret=$(echo "$1" \
| grep -oP '[A-Za-z0-9._]+' \
| sed 's|\.|-|g' \
| sed 's|_|99|g')
echo $ret
}
function OptToSnap {
ret=$(echo "$1" \
| grep -oP '^[A-Z_]+' \
| tr '[:upper:]' '[:lower:]' \
| sed 's|_|-|g')
echo $ret
}
function ValToSnap {
ret=$(echo "$1" \
| grep -oP '=.*$' \
| sed 's_^= __g' \
| sed 's_^=__g' \
| sed "s|SNAP_DIR_DATA|$SDATA|g" \
| sed "s|SNAP_DIR_COMMON|$SCOMMON|g" \
| sed 's|{}||g')
echo $ret
}
function CatToIni {
ret=$(echo "$1" \
| sed 's|-|.|g' \
| sed 's|\ |_|g' \
| sed 's|99|_|g')
echo $ret
}
function OptToIni {
ret=$(echo "$1" \
| tr '[:lower:]' '[:upper:]' \
| sed 's|-|_|g')
echo $ret
}
[[ "$1" = "configure" ]] \
&& toIni \
&& exit 0
[[ "$1" = "install" ]] \
&& echo "Initial Configuration..." \
&& mkdir -p $SNAP_COMMON/conf \
&& toSnap init \
&& toSnap snap \
&& toIni sample \
&& exit 0
[[ "$1" = "save" ]] \
&& echo "Saving current config..." \
&& toSnap \
&& exit 0
| true |
f7a5438af17ed0107fcd185aaf2794a39a4b804c | Shell | 01alchemist/django_oracle | /docker/build.sh | UTF-8 | 284 | 3.046875 | 3 | [] | no_license | #!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
declare -i build
build=$(< $DIR/.build)
build=build+1
echo "Build:1."${build}
docker build -t tutum.co/skr64/CoFoot:1.${build} .
docker build -t tutum.co/skr64/CoFoot:latest .
echo ${build} > $DIR/.build | true |
cedc4455f5ef07e82aacbd3a61bb622b90572142 | Shell | masayukist/sad_scripts | /include/nextid.sh | UTF-8 | 153 | 2.515625 | 3 | [] | no_license | #!/bin/sh
. ./config.sh
MAX_ID=`./include/ldapsearch_head.sh uidNumber | grep ^uidNumber | awk '{if(m<$2) m=$2} END{print m}'`
echo `expr $MAX_ID + 1`
| true |
cfbcb35d07cf9a8aa358ea5ed964631706f879b0 | Shell | EBohte/DAS-4_IPFS_and_Swarm_scripts | /IPFS/multi_download.sh | UTF-8 | 2,761 | 3.75 | 4 | [] | no_license | #!/bin/bash
NODE_ID_DIR="/var/scratch/ebohte/node_id/temp"
HASH_DIR="/var/scratch/ebohte/hash/"
OUTPUT_DIR="/home/ebohte/output/"
echo "start id ${1}"
export IPFS_PATH=/var/scratch/ebohte/.ipfs"${1}" #1 NODE_ID
/home/ebohte/ipfs/ipfs init
/home/ebohte/ipfs/ipfs bootstrap rm --all
/home/ebohte/ipfs/ipfs config Addresses.Gateway /ip4/127.0.0.1/tcp/56003
/home/ebohte/ipfs/ipfs config Addresses.API /ip4/127.0.0.1/tcp/63781
echo "${1}: config done."
tmux new-session -s ipfs -d
tmux send -t ipfs export SPACE IPFS_PATH=/var/scratch/ebohte/.ipfs"${1}" ENTER #1 NODE_ID
tmux send -t ipfs /home/ebohte/ipfs/ipfs SPACE daemon ENTER
echo "${1}: ipfs started"
if [ ! -d "${NODE_ID_DIR}" ]; then
mkdir -p "${NODE_ID_DIR}"
echo "${1}: created ${NODE_ID_DIR}"
fi
/home/ebohte/ipfs/ipfs id > /var/scratch/ebohte/node_id/temp/node_id_"${1}".txt #1 NODE_ID
ID_LINES=$( cat /var/scratch/ebohte/node_id/temp/node_id_"${1}".txt | wc -l )
while [[ ID_LINES -lt 11 ]]; do
sleep 0.5
/home/ebohte/ipfs/ipfs id > /var/scratch/ebohte/node_id/temp/node_id_"${1}".txt #1 NODE_ID
ID_LINES=$( cat /var/scratch/ebohte/node_id/temp/node_id_"${1}".txt | wc -l )
done
/home/ebohte/ipfs/ipfs id > /var/scratch/ebohte/node_id/node_id_"${1}".txt #1 NODE_ID
echo "${1}: wrote id"
while [ ! -f /var/scratch/ebohte/node_id/all_ipfs_node_ids.txt ]
do
sleep 0.2
done
NODE_IDS=$( cat /var/scratch/ebohte/node_id/all_ipfs_node_ids.txt )
ADD_ALL_COMMAND="/home/ebohte/ipfs/ipfs bootstrap add ${NODE_IDS}"
eval $ADD_ALL_COMMAND
echo "${1}: added all nodes"
if [ ! -d "${HASH_DIR}" ]; then
mkdir -p "${HASH_DIR}"
echo "${1}: created ${HASH_DIR}"
fi
while [ ! -f /var/scratch/ebohte/hash/medium_hash.txt ]
do
sleep 2
echo "${1}: waiting for hash"
done
if [ ! -d "${OUTPUT_DIR}" ]; then
mkdir -p "${OUTPUT_DIR}"
echo "${1} created ${OUTPUT_DIR}"
fi
MED_HASH=$( cat /var/scratch/ebohte/hash/medium_hash.txt )
for i in `seq 1 50`;
do
date
echo "${1}: downloading file ${i} ... "
OUTPUT=$( { time /home/ebohte/ipfs/ipfs get "${MED_HASH}" -o /dev/null; } 2>&1 )
echo "${OUTPUT}" > /home/ebohte/output/med_file_"${i}"_"${1}".txt
/home/ebohte/ipfs/ipfs pin ls --type recursive | cut -d' ' -f1 | xargs -n1 /home/ebohte/ipfs/ipfs pin rm # remove all pinned files
/home/ebohte/ipfs/ipfs repo gc
echo "${1}: got med file ${i}"
EXPECTED_FILES=$(($i * $2))
echo "${1}: expected files $EXPECTED_FILES"
NUMBER_OF_FILES=$( ls /home/ebohte/output/ | wc -l )
while [[ NUMBER_OF_FILES -lt EXPECTED_FILES ]]; do
sleep 0.1
NUMBER_OF_FILES=$( ls /home/ebohte/output/ | wc -l )
echo "${1}: number of files $NUMBER_OF_FILES"
done
done
echo "${1}: med file done"
echo "${1}: shutdown downloader" | true |
d081f58ae21743283bed9dff77c61ce81f78c4dc | Shell | sranjanagi/Unit-Operation-Reaction-Engg.-Process-Control | /scripts/initialize.sh | UTF-8 | 1,001 | 3.828125 | 4 | [] | no_license | #!/bin/bash
# initialize script for Computer Programming Lab
# initialize script creates the initial environment for the Computer Programming
# Lab by installing the server side dependencies for the lab and invokes the build script.
# Mention all the server-side dependencies of the lab in
# dependencies.txt
# Usage of the Script
# To use initialize script, run the command
# initialize scripts/dependencies.txt
# initialize script takes dependencies.txt as an argument and installs the
# packages mentioned in the dependencies.txt file.
# exporting the proxy server
export http_proxy=http://proxy.iiit.ac.in:8080/
# updating the packages
yum update
# $1 is the shell variable for command-line argument. cd /var/www/build/
FILENAME=dependencies.txt
# reads the file given as an argument to the script line by line and
# installs the packages
cat $FILENAME | while read LINE
do
echo $LINE
yum install -y $LINE
done
# invoke the build script
./build.sh
| true |
66e8803322a1c71e8fe136a831ce678916e17240 | Shell | IRSN/SalinoPhD | /Drakkar/data/Tihange.save | UTF-8 | 1,014 | 3.875 | 4 | [] | no_license | #!/bin/bash
savename=$(basename $0 | cut -d"." -f1)
if [ $# = 0 ]
then
echo "usage: "$savename".save directory" 1>&2
exit 1
fi
echo -e "\nExecute ${savename}.save script\n"
MACH=`uname -s`
Sysx="`echo $MACH | cut -b -6`"
if [ $Sysx = "CYGWIN" ]; then
MACH=`uname -o`
elif [ $Sysx = "AIX" ]; then
MACH=`uname -s`
else
MACH=`uname -sm | sed 's/[ ]/_/'`
fi
ls -ltr
if [ ! -z "$(ls | grep -P "_[0-9]+\.result")" ]
then
outdir="$1"/Output_TIH_TMC
mkdir -p "$outdir"
# Keep only a few result files, in case of random sampling.
if [ ! -z "$(ls | grep -P "_[0-9][0-9]+\.result")" ]
then
tail -45 *.result
rm -f *.result
else
mv *.result "$outdir"/.
fi
else
outdir="$1"/Output_TIH_BestEstimate
mkdir -p "$outdir"
mv *.result "$outdir"/.
fi
for file in *Power*
do
md5sum=$(md5sum "$file" | cut -d" " -f1)
prefix=$(echo "$file" | cut -d"." -f1)
mv "$file" "$outdir"/"$prefix"_"$md5sum".ascii
done
echo -e $savename".save script terminated\n"
| true |
985b082cb288b1073b99ae513df521ad73dbae87 | Shell | merttoka/simpleraytracer | /run.sh | UTF-8 | 411 | 3.03125 | 3 | [] | no_license | #!/bin/bash
#Run this in terminal
if [ ! -d build ]; then
mkdir -p build;
fi
g++ tinyxml/tinyxml.cpp tinyxml/tinystr.cpp tinyxml/tinyxmlparser.cpp tinyxml/tinyxmlerror.cpp Timing/ctimer.cpp Matrix3x3.cpp Vector3D.cpp main.cpp -o build/raytracer.exe
cd build
if [ ! $1 ]; then
echo "WARNING: No filename provided, interpreating as \"out.bmp\""
echo ''
./raytracer out.bmp
else
./raytracer $1
fi
exit 0 | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.