blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
19b109aeccecf7f82d51a12f82abfa56fe62f330 | Shell | petronny/aur3-mirror | /lib32-fst-git/PKGBUILD | UTF-8 | 600 | 2.5625 | 3 | [] | no_license | # Contributor: Marco Süß <m_a_r_c_o.suess at gmx.de>
pkgname=lib32-fst-git
pkgver=20091102
pkgrel=1
pkgdesc="FreeST: Standalone wrapper for Windows VST plug-ins"
arch=(x86_64)
url="http://www.joebutton.co.uk/fst/"
license=('GPL')
depends=('lib32-lash' 'bin32-wine')
makedepends=()
provides=('fst')
conflicts=('fst')
install=$pkgname.install
source=(http://repos.archaudio.org/testing/i686/${pkgname/lib32-/}-${pkgver}-${pkgrel}-i686.pkg.tar.gz)
md5sums=(ebf827123f1bce493d667fa7f36f3978)
build() {
install -dm755 $pkgdir/opt/lib32 || return 1
cp -R $srcdir/usr $pkgdir/opt/lib32 || return 1
}
| true |
2cbc8077b7b5dfe868516dec2df6287db3ec55df | Shell | knutjelitto/LiFo | /Recipes/Tools/glibc | UTF-8 | 2,351 | 3.46875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
Title="6.9. Glibc-2.27"
Name=glibc
Version=2.27
ToolDeps=(bootstrap)
BuildIn=build
Supplies=(
https://ftpmirror.gnu.org/gnu/glibc/$Name-$Version.tar.xz
http://www.linuxfromscratch.org/patches/lfs/8.2/glibc-2.27-fhs-1.patch
http://www.iana.org/time-zones/repository/releases/tzdata2018e.tar.gz
)
Prepare ()
{
patch -Np1 -i ../glibc-2.27-fhs-1.patch
}
Build()
{
if [[ ! -d /usr/lib/gcc ]]; then
ln -sfv /tools/lib/gcc /usr/lib
fi
GCC_INCDIR=/usr/lib/gcc/x86_64-pc-linux-gnu/7.3.0/include
#ln -sfv ../lib/ld-linux-x86-64.so.2 /usr/lib64
#ln -sfv ../lib/ld-linux-x86-64.so.2 /usr/lib64/ld-lsb-x86-64.so.3
rm -f /usr/include/limits.h
CC="gcc -isystem $GCC_INCDIR -isystem /usr/include" \
../configure \
--prefix=/usr \
--exec-prefix=/usr \
--libexecdir=/usr/lib \
--disable-werror \
--disable-nscd \
--disable-build-nscd \
--enable-stack-protector=strong \
libc_cv_slibdir=/usr/lib
unset GCC_INCDIR
make
touch /etc/ld.so.conf
sed '/test-installation/s@$(PERL)@echo not running@' -i ../Makefile
make install
rm /etc/ld.so.conf
if [[ -L /usr/lib/gcc ]]; then
rm /usr/lib/gcc
fi
cp -v ../nscd/nscd.conf /etc/nscd.conf
mkdir -pv /var/cache/nscd
cat > /etc/nsswitch.conf << "EOF"
# Begin /etc/nsswitch.conf
passwd: files
group: files
shadow: files
hosts: files dns
networks: files
protocols: files
services: files
ethers: files
rpc: files
# End /etc/nsswitch.conf
EOF
#cp --verbose ../../tzdata2018c/* .
#
#ZONEINFO=/usr/share/zoneinfo
#mkdir -pv $ZONEINFO/{posix,right}
#
#for tz in etcetera southamerica northamerica europe africa antarctica \
# asia australasia backward pacificnew systemv; do
# zic -L /dev/null -d $ZONEINFO -y "sh yearistype.sh" ${tz}
# zic -L /dev/null -d $ZONEINFO/posix -y "sh yearistype.sh" ${tz}
# zic -L leapseconds -d $ZONEINFO/right -y "sh yearistype.sh" ${tz}
#done
#
#cp -v zone.tab zone1970.tab iso3166.tab $ZONEINFO
#zic -d $ZONEINFO -p America/New_York
#unset ZONEINFO
#
#echo PARTIALLY DONE
}
Cleanup ()
{
RemoveDir usr/share/locale
}
| true |
6fa008c81b32c48434ce3ffcb62de3cb6c113827 | Shell | jacksujingcheng/CellUniverse | /regression-test-all.sh | UTF-8 | 382 | 3.234375 | 3 | [] | no_license | #!/bin/sh
RETURN=0
#renice 19 $$
STDBUF=''
if which stdbuf >/dev/null; then
STDBUF='stdbuf -oL -eL'
fi
for dir in regression-tests/*; do
REG_DIR=$dir
export REG_DIR
echo --- in directory $dir ---
for r in $dir/*.sh; do
echo --- running test $r ---
if eval $STDBUF "$r"; then # force output and error to be line buffered
:
else
RETURN=1
fi
done
done
exit $RETURN
| true |
7c9964dd440ab7884ee19a4a95bdc062023c01e4 | Shell | devacto/meggyos | /ExamplesFromEric/compile.sh | UTF-8 | 292 | 2.71875 | 3 | [] | no_license | #!/bin/bash
if [ $# != 1 ]
then
echo "Usage: compile.sh tone (not tone.c!)"
else
avr-gcc -DF_CPU=16000000UL -mmcu=atmega328p -o $1.out $1.c
avr-objcopy -O ihex -R .eeprom $1.out $1.hex
avrdude -b57600 -patmega328p -cstk500v1 -P/dev/tty.usbserial-A501JU54 -U flash:w:$1.hex
fi
| true |
ab7e52b971d692708ebe5c525d21799e8b089561 | Shell | FauxFaux/debian-control | /a/apparmor/apparmor_2.13.2-2_amd64/preinst | UTF-8 | 1,127 | 3.265625 | 3 | [] | no_license | #!/bin/sh
# preinst script for apparmor
#
# see: dh_installdeb(1)
set -e
case "$1" in
install|upgrade|abort-upgrade)
# Remove old click/snapv1 state files on upgrade (dpkg will remove the
# directories)
if [ "$1" = "upgrade" ] && dpkg --compare-versions "$2" lt-nl "2.12-4ubuntu2"; then
rm -f /var/lib/apparmor/profiles/.*.md5sums
fi
;;
*)
echo "preinst called with unknown argument \`$1'" >&2
exit 1
;;
esac
# dh_installdeb will replace this with shell code automatically
# generated by other debhelper scripts.
# Automatically added by dh_installdeb/12
dpkg-maintscript-helper rm_conffile /etc/apparmor.d/abstractions/launchpad-integration 2.13.1-2\~ -- "$@"
dpkg-maintscript-helper rm_conffile /etc/apparmor.d/abstractions/ubuntu-sdk-base 2.8.0-0ubuntu20\~ -- "$@"
dpkg-maintscript-helper rm_conffile /etc/apparmor/features 2.11.1-4\~ -- "$@"
dpkg-maintscript-helper rm_conffile /etc/apparmor/subdomain.conf 2.13.2-2\~ -- "$@"
dpkg-maintscript-helper rm_conffile /etc/init/apparmor.conf 2.11.0-11\~ -- "$@"
# End automatically added section
exit 0
| true |
0ec39db208ffe08adf4875cc13c8d83adb9cb8c3 | Shell | joxyuki/dotfiles | /install.sh | UTF-8 | 4,430 | 4.15625 | 4 | [] | no_license | #!/bin/bash
set -Ceu
function usage() {
cat << EOT
Usage:
$0 [OPTIONS]
Options:
-t Test mode. Just output commands but do not make links.
-f Force overwrite existing files (default is interctive).
-h Show this help message.
EOT
}
function get_os() {
if [ "$(uname)" == "Darwin" ]; then
OS="mac"
elif [ "$(expr substr $(uname -s) 1 5)" == 'Linux' ]; then
if [ -e /etc/debian_version ] || [ -e /etc/debian_release ]
then
if [ -e /etc/lsb-release ]; then
OS="ubuntu"
else
OS="debian"
fi
elif [ -e /etc/redhat-release ]; then
if [ -e /etc/centos-release ]; then
OS="centos"
elif [ -e /etc/oracle-release ]; then
OS="oracle"
else
OS="redhat"
fi
else
OS="linux"
fi
else
echo "Your platform ($(uname -a)) is no supported."
exit 1
fi
}
function get_script_dir() {
if [ -L $0 ]; then
echo "Please run physical file (not link)."
exit 1
fi
readonly SCR_DIR=$(cd $(dirname $0) && pwd)
}
# log output syntax presetting
msg=""
tred=$(tput setaf 1)
tgreen=$(tput setaf 2)
tyellow=$(tput setaf 3)
treset=$(tput sgr0)
function log() {
local level=${1}
local _msg=""
if [ $# -gt 2 ]; then
_msg=${2}
else
_msg=${msg}
fi
if [ "${level}" == "DEBUG" ]; then
if ${DEBUG}; then
echo "(DEBUG) ${_msg}"
fi
elif [ "${level}" == "INFO" ]; then
echo "${tgreen}(INFO)${treset} $_msg"
elif [ "${level}" == "WARNING" ]; then
echo "${tyellow}(WARNING)${treset} ${_msg}"
elif [ "${level}" == "ERROR" ]; then
echo "${tred}(ERROR)${treset} ${_msg}"
fi
}
OS=""
TEST_MODE="false"
LN_OPT="-snvi"
get_os
get_script_dir
cd ${SCR_DIR}
while getopts tfh OPT
do
case ${OPT} in
t) TEST_MODE=true ;;
f) LN_OPT=${LN_OPT/i/}; LN_OPT+='f' ;;
h) usage; exit 0 ;;
\?) usage; exit 1 ;;
esac
done
msg="Create link .bashrc -> ${SCR_DIR}/.bashrc.<OS>"
log INFO
set +e
case "${OS}" in
"ubuntu")
ln -snvf bashrc.ubuntu .bashrc ;;
"centos")
ln -snvf bashrc.centos .bashrc ;;
"mac")
ln -snvf bashrc.mac .bashrc ;;
*) ;;
esac
set -e
msg="Install dotfiles..."
log INFO
for f in .??*; do
[ "${f}" = ".git" ] && continue
[ "${f}" = ".gitignore" ] && continue
[ "${f}" = ".gitconfig.local.template" ] && continue
[ "${f}" = ".gitmodules" ] && continue
[ "${f}" = ".tmux.conf" ] && ! type "tmux" > /dev/null 2>&1 &&\
msg="Skip .tmux.conf, tmux is not installed." &&\
log INFO &&\
continue
LN_NAME="${f}"
if [ "${f}" = ".bashrc" ]; then
case "${OS}" in
"ubuntu") LN_NAME=".bash_aliases" ;;
"centos") ;;
"mac") ;;
*) msg="Skipp .bashrc, your platform is not suppoted."
log WARNING
continue;;
esac
fi
if ${TEST_MODE}; then
msg="(test mode) ln ${LN_OPT} ${SCR_DIR}/${f} ~/${LN_NAME}"
log INFO
else
set +e
ln ${LN_OPT} ${SCR_DIR}/${f} ~/${LN_NAME}
set -e
fi
done
if type "nvim"; then
msg="Install Neovim settings..."
log INFO
if [ ! -e ~/.config ]; then
if ${TEST_MODE}; then
msg="(test mode) mkdir ~/.config"
log INFO
else
msg="mkdir ~/.config"
log INFO
mkdir ~/.config
fi
fi
if ${TEST_MODE}; then
msg="(test mode) ln ${LN_OPT} ${SCR_DIR}/${f} ~/${LN_NAME}"
log INFO
else
set +e
ln ${LN_OPT} ${SCR_DIR}/.vim ~/.config/nvim
ln ${LN_OPT} ${SCR_DIR}/.vimrc ${SCR_DIR}/.vim/init.vim
set -e
fi
msg="Install dein.vim..."
log INFO
if ${TEST_MODE}; then
msg="(test mode) install dein.vim"
log INFO
else
set +e
curl https://raw.githubusercontent.com/Shougo/dein.vim/master/bin/installer.sh > dein_installer.sh
chmod +x dein_installer.sh
/bin/bash dein_installer.sh ~/.cache/dein> /dev/null 2>&1
rm dein_installer.sh
set -e
fi
else
msg="Neovim is not installed. Skip Neovim related settings."
log INFO
fi
msg="Completed!"
log INFO
| true |
2193269cf1e2589b2c89b5935f99767993243413 | Shell | jpietek/video-trim | /video_trim | UTF-8 | 289 | 2.765625 | 3 | [] | no_license | #!/bin/bash
if [ "$#" -eq 4 ]; then
ffmpeg -y -loglevel panic -i "concat:$1|$2|$3" -c copy -fflags +genpts -avoid_negative_ts make_zero $4
elif [ "$#" -eq 3 ]; then
ffmpeg -y -loglevel panic -i "concat:$1|$2" -c copy -fflags +genpts -avoid_negative_ts make_zero $3
else
exit 1
fi
| true |
e08fee64944fae04cd21ccf345a63c57754ba315 | Shell | nykimberly/playground-bash | /compare_nums.sh | UTF-8 | 247 | 3.65625 | 4 | [] | no_license | #!/bin/bash
#Given two integers, X and Y, identify whether X < Y or X > Y or X = Y
read X
read Y
if [ $(( X > Y )) == 1 ]
then
echo "X is greater than Y"
elif [ $(( X == Y )) == 1 ]
then
echo "X is equal to Y"
else
echo "X is less than Y"
fi
| true |
9ffd29479c0628228c57ce130a0ef5cdc98fb099 | Shell | cyrelk/scripts | /lib/recipes/makebox | UTF-8 | 1,494 | 3.640625 | 4 | [] | no_license | #!/bin/bash
set -e
source ../utilities
source ./cgroups_utils
CGM="/usr/bin/cgm"
SU="/bin/su"
#
# LXC parameters
#
if ! grep $USER /etc/lxc/lxc-usernet > /dev/null 2>&1; then
echo "$USER br0 10" >> /etc/lxc/lxc-usernet
fi
configure_user () {
mkdir -p /home/${USER}/.cache/lxc
mkdir -p /home/${USER}/.config/lxc
mkdir -p /home/${USER}/.local/share/lxc/
if [ ! -f /home/${USER}/.config/lxc/default.conf ]; then
cat > /home/${USER}/.config/lxc/default.conf <<EOF
lxc.network.type = veth
lxc.network.link = br0
lxc.network.flags = up
#lxc.network.hwaddr = 00:16:3e:xx:xx:xx
lxc.id_map = u 0 $subuid $subgid
lxc.id_map = g 0 $subuid $subgid
EOF
fi
}
#
# Container creation as the newly created user
#
make_container () {
local user=$1
local userbox=$2
su - $user
$CGM movepid all $user $$
lxc-create -n $userbox -t download -- -d debian -r jessie -a amd64
}
configure_container () {
local user=$1
local userbox=$2
local ip=$3
local gw=$4
cat >> /home/${user}/.local/share/lxc/${userbox}/config <<EOF
lxc.network.ipv4 = $ip
lxc.network.ipv4.gateway = $gw
EOF
}
configure_inside_container () {
# deletion of the dhclient from the container
# lxc-usernsexec echo|cat|whatever /home/$USER...
# if root does that lxc-userrnsexec is not necessary
:
}
_usage () {
:
}
_initargs () {
:
}
_validateargs () {
:
}
_run () {
:
}
source ../base
initargs a b c
initcommands a b c
run $@
| true |
581320c0d44cc5ce71393e945cda4eda1272afd7 | Shell | mikehoward/YASiteKit--Yet-Another-Site-Kit- | /yasitekit_data/installation/remote_install.sh-template | UTF-8 | 1,846 | 3.71875 | 4 | [] | no_license | #! /bin/sh
# This script is run on the remote system. It works on pair.com, which
# runs Free BSD. It should also work on Linux, UNIX, and AIX systems.
# You will need to re-write it if you're on windows
PROGNAME="`basename $0`"
for dir in {document_root} \
{private_data_root} \
{private_data_root}/dev-dump.d \
{private_data_root}/ajax_scripts \
{private_data_root}/atom \
{private_data_root}/dump.d \
{private_data_root}/includes \
{private_data_root}/json \
{private_data_root}/objects \
{private_data_root}/page_structure \
{private_data_root}/pages \
{private_data_root}/rss \
{private_data_root}/xml \
{private_data_root}/installation \
{private_data_root}/sqlite_db.d \
{system_root} ; do
test -d $dir || mkdir $dir
done
. tarfile-list.sh
(
echo "Unpacking DOCUMENT ROOT"
cd {document_root} ;
tar xzf ~/${DOCUMENT_ROOT_TAR}
mv ~/index.php-{site_installation} index.php
mv ~/htaccess-{site_installation} .htaccess
)
(
echo "Unpacking Private Files and Data"
cd {private_data_root}
tar xzf ~/${PRIVATE_ROOT_TAR}
if [ -s config.php ] ; then
rm -f config.php-BAK
mv -f config.php config.php-BAK
fi
mv ~/config.php-{site_installation} ./config.php
echo "Unpacking System files"
tar xzf ~/${SYSTEM_TAR}
)
(
if [ -s ~/{site_id}-dump.tar.gz ] ; then
echo "Unbacking Database File"
cd {private_data_root}/dev-dump.d
tar xzf ~/${DUMP_TAR}
else
echo "no data base dump file"
fi
)
(
echo "Bootstrapping Database"
cd {private_data_root}/installation
php ./bootstrap_db.php
)
echo '________________________________________________________________________________'
echo ''
echo 'Please Log in As Administrator and Go to the "Dump and Reload Database" Function'
echo ''
echo '________________________________________________________________________________'
| true |
5a20ff6e9d47061723089e2257399826cb354a4e | Shell | Agilulfo1820/crypto.bot | /develop.sh | UTF-8 | 1,535 | 3.375 | 3 | [] | no_license | # Set environment variables for dev
export NODE_ENV=${APP_ENV:-development}
export PORT=${PORT:-8080}
export APP_NAME=${APP_NAME:-crypto.bot}
export SERVICE_PREFIX=${SERVICE_PREFIX:-crypto.bot}
export DB_PORT=${DB_PORT:-3307}
export DB_ROOT_PASSWORD=${DB_ROOT_PASSWORD:-password}
export DB_DATABASE=${DB_DATABASE:-homestead}
export DB_USER=${DB_USER:-homestead}
export DB_PASSWORD=${DB_PASSWORD:-secret}
# Decide which compose command & docker-compose file to use
COMPOSE="docker-compose -f ./deploy/docker-compose.yml"
if [ $# -gt 0 ];then
# If "build" is used, run the build command
if [ "$1 $2" == "build db" ]; then
shift 1
docker build -f ./deploy/mysql/Dockerfile .
elif [ "$1" == "build" ]; then
shift 1
docker build -f ./deploy/api/Dockerfile .
elif [ "$1" == "bash" ]; then
shift 1
docker exec -it crypto.bot-api /bin/sh
elif [ "$1" == "mfs" ]; then
shift 1
$COMPOSE run --rm \
-w /srv/app \
--entrypoint "adonis migration:refresh --seed" \
api-crypto \
"$@"
elif [ "$1" == "test" ]; then
shift 1
$COMPOSE run --rm \
-w /srv/app \
--entrypoint "adonis test" \
api-crypto \
"$@"
elif [ "$1" == "adonis" ]; then
shift 1
$COMPOSE run --rm \
-w /srv/app \
--entrypoint "adonis" \
api-crypto \
"$@"
else
$COMPOSE "$@"
fi
else
$COMPOSE ps
fi
| true |
ae065304004d0f7866976e518cc1b4884b078918 | Shell | skoskosko/Proceslimitor | /userLimit.sh | UTF-8 | 564 | 3.171875 | 3 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | #!/bin/bash
while :
do
declare largestpid;
declare -i largest=0;
# declare -i total=0;
pids=( $(ps -u $1 | awk '{print $1}') );
for i in ${pids[@]:1};
do
declare -i prec=( $(ps -p "$i" -o %cpu | sed "s/[^0-9]//g") );
if [ ${prec:+1} ];
then
if ((largest < prec));
then
declare -i largest="$prec"
declare largestpid="$i"
fi;
# total=$(($total+$prec))
fi;
done
if ((largest/10 > $2))
then
cpulimit -p "$largestpid" -l "$2"
fi;
sleep 600
done
| true |
b205ace51bee85744a2c3ffe1e92578d73de286d | Shell | Jerrylee6/Optimizations | /Linux_system_optimization.bash | UTF-8 | 2,551 | 3.453125 | 3 | [] | no_license | #!/bin/bash
#
# chkconfig: - 90 10
# description: Optimizing server performance
# System: CentOS 6 x64
# Version: 1.0
#
# @name: Linux system optimization.sh
# @author: 51inte <51inte@hotmail.com>
# @created: 13.3.2017
# @Script Version: v1.0
#
#
# Source function library.
. /etc/init.d/functions
# Variables
usage(){
case $choice in
1)
sed -i '/^SELINUX/s/enforcing/disabled/g' /etc/selinux/config
setenforce 0
;;
2)
sed -i "/ACTIVE_CONSOLES=\/dev\/tty/s/6/3/g" /etc/init/start-ttys.conf
sed -i "/^ACTIVE_CONSOLES=\/dev\/tty/s/6/3/g" /etc/sysconfig/init
## need reboot ## ##netstat -nat |grep tty
;;
3)
sed -i '/exec/s/^/#/g' /etc/init/control-alt-delete.conf
;;
4)
;;
5)
yum install ntp
echo "0 0 * * * /usr/sbin/ntpdate 0.centos.pool.ntp.org" >> /etc/crontab
/etc/init.d/crond reload
;;
6)
sed -i "/^*/s/1024/65535/" /etc/security/limits.d/90-nproc.conf
echo "Permanently need to restart the system. If temporary action is needed, \" ulimit -SHn 65535 \" will be executed"
;;
7)
echo "net.ipv4.tcp_max_tw_buckets = 6000" >>/etc/sysctl.conf
echo "net.ipv4.ip_local_port_range = 1024 65000" >>/etc/sysctl.conf
echo "net.ipv4.tcp_tw_recycle = 1" >>/etc/sysctl.conf
echo "net.ipv4.tcp_tw_reuse = 1" >>/etc/sysctl.conf
echo "net.ipv4.tcp_syncookies = 1" >>/etc/sysctl.conf
echo "net.core.somaxconn = 262144" >>/etc/sysctl.conf
echo "net.core.netdev_max_backlog = 262144" >>/etc/sysctl.conf
echo "net.ipv4.tcp_max_orphans = 262144" >>/etc/sysctl.conf
echo "net.ipv4.tcp_max_syn_backlog =262144" >>/etc/sysctl.conf
echo "net.ipv4.tcp_synack_retries = 1" >>/etc/sysctl.conf
echo "net.ipv4.tcp_syn_retries = 1" >>/etc/sysctl.conf
echo "net.ipv4.tcp_fin_timeout = 1" >>/etc/sysctl.conf
echo "net.ipv4.tcp_keepalive_time = 30" >>/etc/sysctl.conf
/sbin/sysctl -p
;;
Q)
exit 0;
;;
esac
}
while :
do
cat <<EOF
=======================================
1.............................Disabled selinux
2.............................Change the TTY changed from 6 to 3
3.............................Disabled control-alt-delete
4.............................Disabled Print_Service
5.............................Time synchronization
6.............................Maximum number of open files
7.............................Optimize Kernel
8.............................Optimize CPU/IO
Q.............................exit
=======================================
EOF
read -p "Please Enter Your Choice:" choice
usage
done | true |
6e263456be39499fe30097be8b971d24293656e0 | Shell | pujakgupta/nix | /scripting/generate_sequence.sh | UTF-8 | 207 | 3.25 | 3 | [] | no_license |
#!/bin/bash
if [ $# -lt 2 ]
then
echo "\n\tUsage: sh $0 <num1> <num2> "
echo "\nt\t<num1>: start number"
echo "\nt\t<num2>: end number"
echo "\nt\texiting ..."
exit
fi
seq $1 $2
| true |
e65117a44495e787102796e88a46522f73b7fed6 | Shell | rumachan/waihaha_rcl_argos | /extract_argos.sh | UTF-8 | 2,555 | 4.0625 | 4 | [] | no_license | #!/bin/bash
##############################################################
# BASH script to extract clean Argos data files from one or
# more raw files.
#
# Usage:
# extract_argos.sh YYYYMMDD
#
# YYYYMMDD - Date to process.
#
# Creates a clean argos CSV file in
# /home/volcano/data/rcl_argos/clean
#
# Any existing file will be overwritten.
#
# USES: grep
# perl
# argos_cleanup.pl
#
# Written by J Cole-Baker / GNS / 2011
##############################################################
### Define some file paths: ###
ArgosDataPath="/home/volcano/data/rcl_argos"
ArgosProgPath="/home/volcano/programs/rcl_argos"
RawPath="${ArgosDataPath}/raw"
CleanPath="${ArgosDataPath}/clean"
TempFile="${ArgosDataPath}/temp_raw.csv"
# Check whether a date was specified:
if [[ ( ("$#" == 1) && ($1 =~ ^[0-9]{8}$) ) ]]
then
# The user specified a date:
echo "Proessing files in $RawPath for $1"
else
echo "USAGE: extract_argos.sh YYYYMMDD"
exit 1
fi
# Delete temporary file:
rm -f $TempFile
# Get the various parts of the supplied date:
Year="${1:0:4}"
ShortYear="${1:2:2}"
Month="${1:4:2}"
Day="${1:6:2}"
# Grep out records for the date from the raw data, and place in a temp file:
# This operation includes a sort and removal of duplicates.
SearchDate="${Year}/${Month}/${Day}"
echo "Extracting data for: $SearchDate"
grep ${SearchDate} ${RawPath}/*.CSV -h | sort -t ';' -k 8 -u > ${TempFile}
# Build the path and file name of the clean file we are going to generate:
CleanDir="${CleanPath}/${ShortYear}-${Month}"
CleanFile="${CleanDir}/${Year}${Month}${Day}_clean.csv"
CompFile="${CleanDir}/${Year}${Month}${Day}_comp.csv"
TestFile="${CleanDir}/${Year}${Month}${Day}_test.csv"
echo "Output File: ${CleanFile}"
# Check to see if the path for the clean file exists, and create if it doesn't:
if [ ! -d "$CleanDir" ]
then
mkdir "$CleanDir"
fi
# Delete any existing version of the clean file:
rm -f $CleanFile
# Decode and clean the temporary file of raw data, using the perl script argos_cleanup.pl,
# and store a cleaned CSV file:
#perl "${ArgosProgPath}/argos_cleanup.pl" -f ${TempFile} > ${CleanFile}
#perl "${ArgosProgPath}/argos_cleanup_21022013.pl" -f ${TempFile} > ${CleanFile}
#perl "${ArgosProgPath}/argos_cleanup_15032014.pl" -f ${TempFile} > ${CleanFile}
#perl "${ArgosProgPath}/argos_cleanup_05032016.pl" -f ${TempFile} > ${CleanFile}
perl "${ArgosProgPath}/argos_cleanup_15032016.pl" -f ${TempFile} > ${CleanFile}
#perl "${ArgosProgPath}/argos_test.pl" -f ${TempFile} > ${TestFile}
| true |
f1173229c54323e39bd08812614a14d647a3039f | Shell | MarthoxGJ/SATReductor | /reducir.sh | UTF-8 | 1,035 | 3.640625 | 4 | [
"MIT"
] | permissive | #!/bin/bash
echo Python version is: $(python3 --version)
if [[ $1 == "-m" ]]
then
if [[ ! -z $2 ]]
then
if [[ $(python3 --version) == *"Python"* ]];
then
echo "The app will reduce all the files on Instancias SAT to $2 and store them on X-SAT"
echo "Python3 is installed"
echo "Running script with python3"
cd ./Reductor && python3 main.py $1 $2
echo "Finished, please check the X-SAT folder"
elif [[ $(python3 --version) == *"Python"* ]]
then
echo "The app will reduce all the files on Instancias SAT to $2 and store them on X-SAT"
echo "Python2 is installed"
echo "Running script with python2"
cd ./Reductor && python main.py $1 $2
echo "Finished, please check the X-SAT folder"
else
echo "Please install python"
fi
else
echo "Please type a X-SAT value to reduce to [-m x]"
fi
else
echo "Please type a valir argument [-m]"
fi | true |
31ef7802f153916601f1188f23b1ed51dec3b12a | Shell | iankronquist/dotfiles | /.aliases | UTF-8 | 4,586 | 3.109375 | 3 | [] | no_license | # vim: set ft=bash
echo A $LINENO
alias vt='vim -t'
alias v='vim -p'
# zsh and bash behavior differ, effing "backwards compatible shell"
# Slow as hell, just as ugly ugh
#v() {
# REGEX="(.*):([0-9]+)$"
# [[ $@ =~ $REGEX ]]
# # Bash
# #LINE="${BASH_REMATCH[2]}"
# # zsh
# LINE="${BASH_REMATCH[3]}"
# echo $LINE
# # Bash
# #FILE="${BASH_REMATCH[1]}"
# # zsh
# FILE="${BASH_REMATCH[2]}"
# echo $FILE
# if [ -n "$LINE" ] ; then
# echo vim -p $FILE "+$LINE"
# vim -p $FILE "+$LINE"
# else
# vim -p $@
# fi
#}
# Don't search ctags files
alias ag='ag --ignore ./tags --ignore ./tags-e'
alias l=ls
alias ll='ls -hla'
alias la='ls -a'
alias sl='ls'
alias venv=virtualenv
alias grep='grep --color=auto'
alias vus='vagrant up && vagrant ssh'
alias vs='vagrant ssh'
alias vu='vagrant up'
alias rebash="exec bash"
echo A $LINENO
if [[ $(uname) == "Darwin" ]]; then
#alias 'objdump'='gobjdump'
alias opn='open'
alias pp='echo cd `pwd` | pbcopy'
alias asleep='/System/Library/CoreServices/Menu\ Extras/User.menu/Contents/Resources/CGSession -suspend'
elif [[ $(uname) == "Linux" ]]; then
alias opn='xdg-open'
alias pp='echo cd `pwd` | xclip'
alias ls='ls --color'
alias ff='/Applications/FirefoxNightly.app/Contents/MacOS/firefox'
echo A $LINENO
fi
echo A $LINENO
alias py=python
alias py3=python3
alias ga='git add'
alias gap='git add -up'
alias gb='git branch'
alias gc='git commit'
alias gcp='git commit -p'
alias gcb='git checkout -b'
alias gco='git checkout'
alias gd='git diff'
alias gdn='git diff --name-only'
alias gf='git fetch'
alias gfwl='git push --force-with-lease'
alias gl='git log'
alias gn='git net'
alias gp='git pull'
alias gpu='git push -u origin `git rev-parse --abbrev-ref HEAD`'
alias gr='git grep'
alias gs='git status'
#if [[ "$(type -t __git_complete)" == 'function' ]]; then
# __git_complete ga _git_add
# __git_complete gap _git_add
# __git_complete gb _git_branch
# __git_complete gc _git_commit
# __git_complete gcp _git_commit
# __git_complete gcb _git_checkout
# __git_complete gco _git_checkout
# __git_complete gd _git_diff
# __git_complete gdn _git_diff
# __git_complete gf _git_fetch
# __git_complete gfwl _git_push
# __git_complete gl _git_log
# __git_complete gn _git_log
# __git_complete gp _git_pull
# __git_complete gpu _git_push
# __git_complete gr _git_grep
#
#else
# echo shorthand git completions not added
#fi
alias vgs='vim -p `git status -s | grep " M " | cut -d" " -f 3`'
alias :tabe='vim -p'
alias jcheck="python -m json.tool"
grind () {
find ${2:-.} | grep -i $1
}
usage () {
du -d1 ${@:-.} | awk '$1 > 10000' | sort -n | awk '{ split( "KB MB GB" , v ); s=1; while( $1 > 1024 ){ $1/=1024; s++ } print int($1)/2 v[s] "\t" $2 }'
}
alias hgr='history | grep'
# Weird habits from vim
alias tabe='vim -p'
alias :tabe='vim -p'
alias ':w'='echo this is not vim'
alias ':q'='echo this is not vim'
alias ':wq'='echo this is not vim'
alias 'pycheck'='python -m py_compile'
echo A $LINENO
git-authors () {
git ls-tree -r -z --name-only HEAD --full-tree | xargs -0 -n1 git blame --line-porcelain HEAD | grep "^author "|sort|uniq -c|sort -nr
}
echo A $LINENO
godoc () {
env godoc $@ | less
}
#objdump () {
# env objdump $@ | less
#}
# If I'm writing too much asm :P
alias mov=mv
# Alias windows commands to unix
alias dir=ls
alias cls=clear
# Case sensitive recursive search
# One character for convenience like 'l' and 'v'
alias s='rg -n'
alias si='rg -in'
#if ! type "rg" > /dev/null; then
#alias s='rg -n'
#alias si='rg -in'
#else
#if ! type "ag" > /dev/null; then
#alias s='ag -s'
#alias si='ag'
#else
#
#alias s='grep -Rn'
#alias si='grep -Rin'
#fi
#fi
echo A $LINENO
hbrun() {
OLDPATH=$PATH
PATH=/opt/homebrew/bin:$PATH
#echo $@
"${@:1}"
PATH=$OLDPATH
}
echo A $LINENO
alias le=less
dis() {
objdump -CD $@ | less
}
echo A $LINENO
alias hist='history 0'
alias le='less -R'
ring_decode() {
xcrun -sdk iphoneos.internal embedded_device_map -query "select Platform, PlatformGeneration,PlatformName, Target, ProductType, ProductDescription from Targets" | grep -i $@
}
dis() {
xcrun -sdk iphoneos.internal otool -xV $@ | less
}
alias rfind=~/Library/Python/3.8/bin/rfind
alias runm='xcrun -sdk macosx.internal'
alias mrun=runm
alias runi='xcrun -sdk iphoneos.internal'
alias irun=runi
sdkheader() {
xcrun --sdk macosx.internal clang -x c -include ptrcheck.h /dev/null -E | grep $@
}
alias xcpretty=/Users/ian/.gem/ruby/2.6.0/bin/xcpretty
export RIPGREP_CONFIG_PATH=~/gg/dotfiles/.ripgrepconfig
retag() {
cscope -b -R
hbrun ctags -R . --fields=+iaS --links=no
}
vg () {
vim -p `grind $@`
}
| true |
89f1383242a5c16bf1a42bf3ceedcc865418f9c0 | Shell | weihongchang3/hongchang | /Clover Config/Generate | UTF-8 | 52,797 | 2.96875 | 3 | [] | no_license | #!/bin/sh
# generate Clover config file
CloverModel=`/usr/libexec/PlistBuddy -c "Print :CloverModel" /tmp/PBI.plist`
CloverResolution=`/usr/libexec/PlistBuddy -c "Print :CloverResolution" /tmp/PBI.plist`
#CloverHD4600=`/usr/libexec/PlistBuddy -c "Print :CloverHD4600" /tmp/PBI.plist`
osxver=`/usr/libexec/PlistBuddy -c 'Print ProductVersion' "${3}"/System/Library/CoreServices/SystemVersion.plist`
if [ "$CloverModel" != "no" ]
then
if [ "$CloverResolution" == "no" ]
then
/usr/bin/osascript <<EOT
tell application "Finder"
activate
display dialog "If you see this warning message, that means:
You didn't select display resolution in PBI CE.
If you want to make yourself a new patched DSDT, you should do the following:
Press F4 at Clover screen
Run PBI Clover Edition again and select correct DSDT options." buttons {"OK"} default button "OK" with icon note with title "Missing display resolution!"
end tell
EOT
fi
config="/tmp/config.plist"
# header
printf '<?xml version="1.0" encoding="UTF-8"?>\n' > $config
printf '<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">\n' >> $config
printf '<plist version="1.0">\n' >> $config
printf '<dict>\n' >> $config
# graphics section
printf '\t<key>Graphics</key>\n' >> $config
printf '\t<dict>\n' >> $config
if [ "$CloverModel" == "4x0G1" ] || [ "$CloverModel" == "4x0G2" ] || [ "$CloverModel" == "3x0G1" ] || [ "$CloverModel" == "ZBook" ]
then
printf '\t\t<key>ig-platform-id</key>\n' >> $config
printf '\t\t<string>0x0a260006</string>\n' >> $config
printf '\t\t<key>Inject</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Intel</key>\n' >> $config
printf '\t\t\t<true/>\n' >> $config
printf '\t\t\t<key>ATI</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>NVidia</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t</dict>\n' >> $config
elif [ "$CloverModel" == "4x0G2Broadwell" ]
then
printf '\t\t<key>ig-platform-id</key>\n' >> $config
printf '\t\t<string>0x16260006</string>\n' >> $config
printf '\t\t<key>Inject</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Intel</key>\n' >> $config
printf '\t\t\t<true/>\n' >> $config
printf '\t\t\t<key>ATI</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>NVidia</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t</dict>\n' >> $config
elif [ "$CloverModel" == "6x70AMD" ]
then
case $CloverResolution in
low)
printf '\t\t<key>DualLink</key>\n' >> $config
printf '\t\t<integer>0</integer>\n' >> $config
;;
high)
printf '\t\t<key>DualLink</key>\n' >> $config
printf '\t\t<integer>1</integer>\n' >> $config
;;
esac
printf '\t\t<key>FBName</key>\n' >> $config
printf '\t\t<string>Ebi</string>\n' >> $config
printf '\t\t<key>Inject</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Intel</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>ATI</key>\n' >> $config
printf '\t\t\t<true/>\n' >> $config
printf '\t\t\t<key>NVidia</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t</dict>\n' >> $config
printf '\t\t<key>LoadVBios</key>\n' >> $config
printf '\t\t<true/>\n' >> $config
elif [ "$CloverModel" == "6x70NVIDIA" ]
then
case $CloverResolution in
low)
printf '\t\t<key>DualLink</key>\n' >> $config
printf '\t\t<integer>0</integer>\n' >> $config
;;
high)
printf '\t\t<key>DualLink</key>\n' >> $config
printf '\t\t<integer>1</integer>\n' >> $config
;;
esac
printf '\t\t<key>Inject</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Intel</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>ATI</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>NVidia</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t</dict>\n' >> $config
printf '\t\t<key>LoadVBios</key>\n' >> $config
printf '\t\t<true/>\n' >> $config
elif [ "$CloverModel" == "6x60wAMD" ]
then
case $CloverResolution in
low)
printf '\t\t<key>DualLink</key>\n' >> $config
printf '\t\t<integer>0</integer>\n' >> $config
;;
high)
printf '\t\t<key>DualLink</key>\n' >> $config
printf '\t\t<integer>1</integer>\n' >> $config
;;
esac
printf '\t\t<key>FBName</key>\n' >> $config
printf '\t\t<string>Cattail</string>\n' >> $config
printf '\t\t<key>Inject</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Intel</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>ATI</key>\n' >> $config
printf '\t\t\t<true/>\n' >> $config
printf '\t\t\t<key>NVidia</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t</dict>\n' >> $config
printf '\t\t<key>LoadVBios</key>\n' >> $config
printf '\t\t<true/>\n' >> $config
elif [ "$CloverModel" == "6x60wNVIDIA" ]
then
case $CloverResolution in
low)
printf '\t\t<key>DualLink</key>\n' >> $config
printf '\t\t<integer>0</integer>\n' >> $config
;;
high)
printf '\t\t<key>DualLink</key>\n' >> $config
printf '\t\t<integer>1</integer>\n' >> $config
;;
esac
printf '\t\t<key>Inject</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Intel</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>ATI</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>NVidia</key>\n' >> $config
printf '\t\t\t<true/>\n' >> $config
printf '\t\t</dict>\n' >> $config
printf '\t\t<key>LoadVBios</key>\n' >> $config
printf '\t\t<true/>\n' >> $config
elif [ "$CloverModel" == "6x60pAMD" ]
then
case $CloverResolution in
low)
printf '\t\t<key>DualLink</key>\n' >> $config
printf '\t\t<integer>0</integer>\n' >> $config
;;
high)
printf '\t\t<key>DualLink</key>\n' >> $config
printf '\t\t<integer>1</integer>\n' >> $config
;;
esac
printf '\t\t<key>FBName</key>\n' >> $config
printf '\t\t<string>Elodea</string>\n' >> $config
printf '\t\t<key>Inject</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Intel</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>ATI</key>\n' >> $config
printf '\t\t\t<true/>\n' >> $config
printf '\t\t\t<key>NVidia</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t</dict>\n' >> $config
printf '\t\t<key>LoadVBios</key>\n' >> $config
printf '\t\t<true/>\n' >> $config
printf '\t\t<key>InjectEDID</key>\n' >> $config
printf '\t\t<true/>\n' >> $config
printf '\t\t<key>VRAM</key>\n' >> $config
printf '\t\t<integer>1024</integer>\n' >> $config
printf '\t\t<key>VideoPorts</key>\n' >> $config
printf '\t\t<integer>5</integer>\n' >> $config
else
case $CloverResolution in
low)
printf '\t\t<key>DualLink</key>\n' >> $config
printf '\t\t<integer>0</integer>\n' >> $config
printf '\t\t<key>ig-platform-id</key>\n' >> $config
printf '\t\t<string>0x01660003</string>\n' >> $config
printf '\t\t<key>Inject</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Intel</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>ATI</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>NVidia</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t</dict>\n' >> $config
;;
high)
printf '\t\t<key>DualLink</key>\n' >> $config
printf '\t\t<integer>1</integer>\n' >> $config
printf '\t\t<key>ig-platform-id</key>\n' >> $config
printf '\t\t<string>0x01660004</string>\n' >> $config
printf '\t\t<key>Inject</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Intel</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>ATI</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>NVidia</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t</dict>\n' >> $config
;;
esac
fi
printf '\t</dict>\n' >> $config
# smbios section
week=CDFGHJKLMNPQRTVWXY12345678
week2=012345678
chars=ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890
week_letter=`echo ${week:$(($RANDOM%${#week})):1}`
week_letter2=`echo ${week:$(($RANDOM%${#week2})):2}`
boardserial=""
mlb=""
rom=""
ModelY=""
function random_char { echo ${chars:$(($RANDOM%${#chars})):1}; }
if [ -f /Volumes/EFI/EFI/CLOVER/config.plist ]
then
mlb=$(/usr/libexec/PlistBuddy -c 'Print :RtVariables:MLB' /Volumes/EFI/EFI/CLOVER/config.plist 2>null)
rom=$(/usr/libexec/PlistBuddy -c 'Print :RtVariables:ROM' /Volumes/EFI/EFI/CLOVER/config.plist 2>null)
boardserial=$(/usr/libexec/PlistBuddy -c 'Print :SMBIOS:BoardSerialNumber' /Volumes/EFI/EFI/CLOVER/config.plist 2>null)
if [[ "$boardserial" == "" ]] || [[ "$boardserial" == Error* ]]
then
if [[ "$mlb" == "" ]] || [[ "$mlb" == Error* ]]
then
boardserial=""
else
boardserial="$mlb"
fi
fi
else
boardserial=""
fi
case $CloverModel in
6x60pAMD|6x60wAMD)
boardtype="10"
chassistype="8"
chassis="MacBook-Aluminum"
family="MacBook Pro"
mobile="true"
biosdate="10/14/11"
biosversion="MBP81.88Z.0047.B24.1110141131"
boardprod="Mac-94245A3940C91C80"
prodname="MacBookPro8,2"
serial="C02J"$week_letter$(random_char)$(random_char)$(random_char)"DF8X"
ModelY="1"
;;
6x60wNVIDIA)
boardtype="10"
chassistype="8"
chassis="MacBook-Aluminum"
family="MacBook Pro"
mobile="true"
biosdate="12/21/12"
biosversion="MBP101.88Z.00EE.B03.1212211437"
boardprod="Mac-C3EC7CD22292981F"
prodname="MacBookPro10,1"
serial="C02J"$week_letter$(random_char)$(random_char)$(random_char)"DKQ1"
ModelY="2"
;;
4x30|6x60)
boardtype="10"
chassistype="8"
chassis="MacBook-Aluminum"
family="MacBook Pro"
mobile="true"
biosdate="01/24/12"
biosversion="MBP81.88Z.0047.B27.1201241646"
boardprod="Mac-94245B3640C91C81"
prodname="MacBookPro8,1"
serial="C02J"$week_letter$(random_char)$(random_char)$(random_char)"DH2G"
ModelY="1"
;;
4x40*|6x70*|4x0G0)
boardtype="10"
chassistype="8"
chassis="MacBook-Aluminum"
family="MacBook Pro"
mobile="true"
biosdate="08/08/12"
biosversion="MBP91.88Z.00D3.B08.1208081132"
boardprod="Mac-6F01561E16C75D06"
prodname="MacBookPro9,2"
serial="C02L"$week_letter$(random_char)$(random_char)$(random_char)"DTY3"
ModelY="2"
;;
9470)
boardtype="10"
chassistype="10"
chassis="Air-Enclosure"
family="MacBook Air"
mobile="true"
biosdate="05/10/12"
biosversion="MBA51.88Z.00EF.B00.1205221442"
boardprod="Mac-2E6FAB96566FE58C"
prodname="MacBookAir5,2"
serial="CK2"$week_letter2$(random_char)$(random_char)$(random_char)"DRVC"
ModelY="2"
;;
4x0G1|4x0G2|3x0G1|ZBook)
boardtype="10"
chassistype="8"
chassis="MacBook-Aluminum"
family="MacBook Pro"
mobile="true"
biosdate="10/29/13"
biosversion="MBP111.88Z.0138.B03.1310291227"
boardprod="Mac-189A3D4F975D5FFC"
prodname="MacBookPro11,1"
serial="C02L"$week_letter$(random_char)$(random_char)$(random_char)"FGYY"
ModelY="3"
;;
4x0G2Broadwell)
boardtype="10"
chassistype="10"
chassis="Air-Enclosure"
family="MacBook Air"
mobile="true"
biosdate="03/24/15"
biosversion="MBA71.88Z.0166.B02.1503241251"
boardprod="Mac-937CB26E2E02BB01"
prodname="MacBookAir7,2"
serial="C1M"$week_letter2$(random_char)$(random_char)$(random_char)"G944"
ModelY="5"
;;
esac
# check existing serial number
if [ -f /Volumes/EFI/EFI/CLOVER/config.plist ]
then
oldprodname=$(/usr/libexec/PlistBuddy -c 'Print :SMBIOS:ProductName' /Volumes/EFI/EFI/CLOVER/config.plist 2>null)
if [ "$oldprodname" == "$prodname" ]
then
oldserial=$(/usr/libexec/PlistBuddy -c 'Print :SMBIOS:SerialNumber' /Volumes/EFI/EFI/CLOVER/config.plist 2>null)
if [[ "$oldserial" != "" ]] && [[ "$oldserial" != Error* ]]
then
serial=$oldserial
fi
fi
fi
printf '\t<key>SMBIOS</key>\n' >> $config
printf '\t<dict>\n' >> $config
printf '\t\t<key>BoardType</key>\n' >> $config
printf '\t\t<integer>'"$boardtype"'</integer>\n' >> $config
printf '\t\t<key>ChassisType</key>\n' >> $config
printf '\t\t<integer>'"$chassistype"'</integer>\n' >> $config
printf '\t\t<key>BiosReleaseDate</key>\n' >> $config
printf '\t\t<string>'"$biosdate"'</string>\n' >> $config
printf '\t\t<key>Version</key>\n' >> $config
printf '\t\t<string>1.0</string>\n' >> $config
printf '\t\t<key>SerialNumber</key>\n' >> $config
printf '\t\t<string>'"$serial"'</string>\n' >> $config
printf '\t\t<key>Manufacturer</key>\n' >> $config
printf '\t\t<string>Apple Inc.</string>\n' >> $config
printf '\t\t<key>BoardManufacturer</key>\n' >> $config
printf '\t\t<string>Apple Inc.</string>\n' >> $config
printf '\t\t<key>ChassisAssetTag</key>\n' >> $config
printf '\t\t<string>'"$chassis"'</string>\n' >> $config
printf '\t\t<key>BiosVendor</key>\n' >> $config
printf '\t\t<string>Apple Inc.</string>\n' >> $config
printf '\t\t<key>ChassisManufacturer</key>\n' >> $config
printf '\t\t<string>Apple Inc.</string>\n' >> $config
printf '\t\t<key>BiosVersion</key>\n' >> $config
printf '\t\t<string>'"$biosversion"'</string>\n' >> $config
printf '\t\t<key>Board-ID</key>\n' >> $config
printf '\t\t<string>'"$boardprod"'</string>\n' >> $config
printf '\t\t<key>Family</key>\n' >> $config
printf '\t\t<string>'"$family"'</string>\n' >> $config
printf '\t\t<key>ProductName</key>\n' >> $config
printf '\t\t<string>'"$prodname"'</string>\n' >> $config
if [[ "$boardserial" == '' ]]
then
WeekN=$(echo $[ ( $RANDOM % 53 ) ])
while [ $WeekN -eq 0 ]; do
WeekN=$(echo $[ ( $RANDOM % 53 ) ])
done
WeekNU=$(printf "%02d" $WeekN)
rm temp
BASE62=($(echo {0..9} {a..z} {A..Z}))
NUM=$(($RANDOM*7584284))
ProductionN=$(echo $NUM | cut -c 1-5)
for i in $(bc <<< "obase=36; $ProductionN"); do
echo ${BASE62[$(( 10#$i ))]} | tr "\\n" "," | tr -d , >> temp
done
SSS=$(cat temp | tr '[:lower:]' '[:upper:]')
rm tempy
NUMB=$(($RANDOM*7584284))
PartN=$(echo $NUMB | cut -c 1-8)
for m in $(bc <<< "obase=36; $PartN"); do
echo ${BASE62[$(( 10#$m ))]} | tr "\\n" "," | tr -d , >> tempy
done
CCCC=$(cat tempy | cut -c 1-4 | tr '[:lower:]' '[:upper:]')
boardserial="C02"$ModelY$WeekNU$SSS$CCCC
fi
printf '\t\t<key>BoardSerialNumber</key>\n' >> $config
printf '\t\t<string>'"$boardserial"'</string>\n' >> $config
printf '\t\t<key>Mobile</key>\n' >> $config
printf '\t\t<'"$mobile"'/>\n' >> $config
printf '\t\t<key>Trust</key>\n' >> $config
printf '\t\t<true/>\n' >> $config
printf '\t</dict>\n' >> $config
# RTVariables section
printf '\t<key>RtVariables</key>\n' >> $config
printf '\t<dict>\n' >> $config
if ! [[ "$rom" == "*Does Not Exist" ]] && [[ "$rom" != '' ]]
then
printf '\t\t<key>ROM</key>\n' >> $config
printf '\t\t<string>'"$rom"'</string>\n' >> $config
fi
printf '\t\t<key>CsrActiveConfig</key>\n' >> $config
printf '\t\t<string>0x67</string>\n' >> $config
printf '\t\t<key>BooterConfig</key>\n' >> $config
printf '\t\t<string>0x28</string>\n' >> $config
printf '\t</dict>\n' >> $config
# boot section
printf '\t<key>Boot</key>\n' >> $config
printf '\t<dict>\n' >> $config
if [ "$CloverModel" == "9470" ] || [ "$CloverModel" == "6x70AMD" ] || [ "$CloverModel" == "6x70NVIDIA" ]
then
printf '\t\t<key>Arguments</key>\n' >> $config
printf '\t\t<string>-xcpm kext-dev-mode=1 dart=0 rootless=0</string>\n' >> $config
elif [ "$CloverModel" == "4x0G1" ] || [ "$CloverModel" == "3x0G1" ] || [ "$CloverModel" == "ZBook" ] || [ "$CloverModel" == "4x0G2Broadwell" ]
then
printf '\t\t<key>Arguments</key>\n' >> $config
printf '\t\t<string>kext-dev-mode=1 dart=0 rootless=0</string>\n' >> $config
elif [ "$CloverModel" == "4x0G2" ]
then
printf '\t\t<key>Arguments</key>\n' >> $config
printf '\t\t<string>kext-dev-mode=1 dart=0 darkwake=0 rootless=0</string>\n' >> $config
else
printf '\t\t<key>Arguments</key>\n' >> $config
printf '\t\t<string>kext-dev-mode=1 dart=0 rootless=0</string>\n' >> $config
fi
printf '\t\t<key>Legacy</key>\n' >> $config
printf '\t\t<string>LegacyBiosDefault</string>\n' >> $config
printf '\t\t<key>Log</key>\n' >> $config
printf '\t\t<false/>\n' >> $config
printf '\t\t<key>NeverHibernate</key>\n' >> $config
printf '\t\t<true/>\n' >> $config
printf '\t\t<key>Timeout</key>\n' >> $config
printf '\t\t<integer>5</integer>\n' >> $config
printf '\t\t<key>XMPDetection</key>\n' >> $config
printf '\t\t<string>Yes</string>\n' >> $config
printf '\t\t<key>Secure</key>\n' >> $config
printf '\t\t<false/>\n' >> $config
printf '\t</dict>\n' >> $config
# acpi section
printf '\t<key>ACPI</key>\n' >> $config
printf '\t<dict>\n' >> $config
printf '\t\t<key>DSDT</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Debug</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>ReuseFFFF</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>DropOEM_DSM</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t</dict>\n' >> $config
printf '\t\t<key>DropTables</key>\n' >> $config
printf '\t\t<array>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Signature</key>\n' >> $config
printf '\t\t\t\t<string>DMAR</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
if [ "$CloverModel" == "6x60pAMD" ] || [ "$CloverModel" == "6x60wAMD" ] || [ "$CloverModel" == "6x60wNVIDIA" ]
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Signature</key>\n' >> $config
printf '\t\t\t\t<string>MCFG</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
fi
printf '\t\t</array>\n' >> $config
printf '\t\t<key>SSDT</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>DropOem</key>\n' >> $config
printf '\t\t\t<true/>\n' >> $config
printf '\t\t\t<key>Generate</key>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>PStates</key>\n' >> $config
printf '\t\t\t\t<false/>\n' >> $config
printf '\t\t\t\t<key>CStates</key>\n' >> $config
printf '\t\t\t\t<false/>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t</dict>\n' >> $config
printf '\t</dict>\n' >> $config
# kernel and kext patches section
printf '\t<key>KernelAndKextPatches</key>\n' >> $config
printf '\t<dict>\n' >> $config
if [ "$CloverModel" == "6x60pAMD" ]
then
printf '\t\t<key>ATIConnectorsController</key>\n' >> $config
printf '\t\t<string>6000</string>\n' >> $config
printf '\t\t<key>ATIConnectorsData</key>\n' >> $config
printf '\t\t<string>0200000000010000090100001204030300040000040300000001000010000505000400000403000000010000110201010004000000010000000910002103020200040000000100000009100022050404</string>\n' >> $config
printf '\t\t<key>ATIConnectorsPatch</key>\n' >> $config
printf '\t\t<string>0200000040000000090100001001010500040000040300000001000011020204000400000403000000010000210303020004000004030000000100001204040310000000100000000001000000000001</string>\n' >> $config
elif [ "$CloverModel" == "6x60wAMD" ]
then
printf '\t\t<key>ATIConnectorsController</key>\n' >> $config
printf '\t\t<string>6000</string>\n' >> $config
printf '\t\t<key>ATIConnectorsData</key>\n' >> $config
printf '\t\t<string>020000004000000029050000000000050004000004030000000100001102010100040000000100000009100021030202</string>\n' >> $config
printf '\t\t<key>ATIConnectorsPatch</key>\n' >> $config
printf '\t\t<string>100000001000000000010000000000020200000040000000290500001000010500040000040300000001000012040203</string>\n' >> $config
elif [ "$CloverModel" == "6x70AMD" ]
then
printf '\t\t<key>ATIConnectorsController</key>\n' >> $config
printf '\t\t<string>6000</string>\n' >> $config
printf '\t\t<key>ATIConnectorsData</key>\n' >> $config
printf '\t\t<string>0004000004030000000101001102040100040000040300000001020021030502000400000001000000090300100002050004000000010000000904002001030604000000140200000001050002040103</string>\n' >> $config
printf '\t\t<key>ATIConnectorsPatch</key>\n' >> $config
printf '\t\t<string>0200000040000000090100001001000100040000040300000001020011020105000400000402000000010200210202020004000004020000000100001200030310000000100000000001000000000408</string>\n' >> $config
fi
printf '\t\t<key>AppleRTC</key>\n' >> $config
printf '\t\t<true/>\n' >> $config
if [ "$CloverModel" == "4x30" ] || [ "$CloverModel" == "4x40" ] || [[ $CloverModel == 6x60* ]] || [ "$CloverModel" == "6x70" ]
then
printf '\t\t<key>AsusAICPUPM</key>\n' >> $config
printf '\t\t<true/>\n' >> $config
else
printf '\t\t<key>KernelPm</key>\n' >> $config
printf '\t\t<true/>\n' >> $config
fi
printf '\t\t<key>KextsToPatch</key>\n' >> $config
printf '\t\t<array>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>IOAHCIBlockStorage</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tQVBQTEUgU1NE\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAAAAAAAAAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Enable Trim on SSD</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
if [ "$CloverModel" != "6x60pAMD" ] && [ "$CloverModel" != "6x0wAMD" ] && [ "$CloverModel" != "6x60wNVIDIA" ] && [ "$CloverModel" != "6x70AMD" ] && [ "$CloverModel" != "6x70NVIDIA" ]
then
if [[ "$osxver" == 10.10* ]]
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>IOGraphicsFamily</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tQYjE6xE=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tQYjE6zE=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Second Stage patch 10.10</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
fi
if [[ "$osxver" == 10.11* ]]
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>IOGraphicsFamily</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAQAAdRc=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAQAAdBc=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Second Stage patch 10.11</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
fi
fi
findsmbios=$(printf "Mac-2E6FAB96566FE58C" | base64)
patchsmbios=$(printf "$boardprod" | base64)
if [ "$CloverModel" != "4x30" ]
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AirPortBrcm4360</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\t'"$findsmbios"'\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\t'"$patchsmbios"'\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Broadcom 43224 whitelist</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AirPortBrcm4360</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tQYP8/3QsSA==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tZscGVVPrKw==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Broadcom 5Ghz US</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
if [[ "$osxver" == 10.10* ]]
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>IOBluetoothFamily</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tSIXAdFwPt0g=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tQb4PAAAA61k=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Handoff Fix</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
elif [[ "$osxver" == 10.11* ]]
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>IOBluetoothFamily</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tSIX/dEdIiwc=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tQb4PAAAA60Q=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Handoff Fix</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
fi
fi
if [ "$CloverModel" == "4x30" ] || [ "$CloverModel" == "6x60" ] || [ "$CloverModel" == "4x40sb" ]
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleIntelSNBGraphicsFB</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAQIEABAHAAAQBwAABQMAAAIAAAAwAAAAAgUAAAAEAAAHAAAAAwQAAAAEAAAJAAAABAYAAAAEAAAJAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAQIDABAHAAAQBwAABgIAAAABAAAJAAAABQMAAAIAAAAwAAAABAYAAAAIAAAJAAAAAAAAAAAAAAAAAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Patch VGA port HD3000 (disable this if you use HDMI)</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>disabled:AppleIntelSNBGraphicsFB</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBAYAAAAEAAAJAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBAYAAAAIAAAJAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Patch HDMI Audio HD3000 (disable this if you use VGA patch)</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
elif ([ "$CloverModel" == "4x40" ] || [ "$CloverModel" == "4x0G0" ]) && [ "$CloverResolution" == "low" ]
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleIntelFramebufferCapri</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBAYAAAAEAACBAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBAYAAAAIAAAGAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Patch audio HDMI HD4000 1366x768</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
elif ([ "$CloverModel" == "4x40" ] || [ "$CloverModel" == "4x0G0" ]) && [ "$CloverResolution" == "high" ]
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleIntelFramebufferCapri</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBABmAQEDAQEAAAAC\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBABmAQECBAIAAAAE\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Patch HDMI port HD4000 1600x900 #1</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleIntelFramebufferCapri</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tMAIAAAAAAAABAAAAQAAAAAAAAAABAAAAQAAAAAAAAAABAAAAQAAAAAAAAAAAAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tMAIAAAIFAAAABAAABwQAAAMEAAAABAAAgQAAAAQGAAAACAAABgAAAAAAAAAAAgAR\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Patch HDMI port HD4000 1600x900 #2</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
elif ([ "$CloverModel" == "6x70" ] || [ "$CloverModel" == "9470" ]) && [ "$CloverResolution" == "high" ]
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleIntelFramebufferCapri</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBABmAQEDAQEAAAAC\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBABmAQECBAIAAAAE\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Patch DP port HD4000 1600x900 #1</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleIntelFramebufferCapri</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tMAIAAAAAAAABAAAAQAAAAAAAAAABAAAAQAAAAAAAAAABAAAAQAAAAAAAAAAAAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tMAIAAAIFAAAABAAABwQAAAMEAAAABAAAgQAAAAQGAAAABAAAgQAAAAAAAAAAAgAR\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Patch DP port HD4000 1600x900 #2</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
elif ([ "$CloverModel" == "4x0G1" ] || [ "$CloverModel" == "3x0G1" ] || [ "$CloverModel" == "4x0G2" ])
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleIntelFramebufferAzul</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBgAmCgEDAwMAAAACAAAwAQAAYAA=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBgAmCgEDAwMAAAACAAAwAQAAkAA=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Patch HD4400/4600 9mb Cursor bytes</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleIntelFramebufferAzul</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAQUJAAAEAACHAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAQUSAAAIAACHAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Patch HD4400/4600 HDMI Freeze</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
elif ([ "$CloverModel" == "4x0G2Broadwell" ])
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleIntelBDWGraphicsFramebuffer</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAQULAAAEAAAHBQAAAgQLAAAEAAAHBQAA/wAAAAEAAABAAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAQULAAAIAACCAAAAAgQLAAAIAACCAAAA/wAAAAEAAABAAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Patch HD5500 HDMI Audio</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
elif [ "$CloverModel" == "ZBook" ]
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleIntelFramebufferAzul</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBgAmCgEDAwMAAAACAAAwAQAAYAA=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBgAmCgEDAwMAAAACAAAwAQAAkAA=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Patch HD4400/4600 9mb Cursor bytes</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleIntelFramebufferAzul</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAgQJAAAEAACHAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAwYDAAAEAACHAAAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>Patch HD4400/4600 DP</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
fi
if ([ "$CloverModel" == "4x30" ] || [[ $CloverModel == 6x60* ]]) && [[ "$osxver" != 10.11* ]]
then
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleUSBXHCI</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\t9oDUAAAAgHU6\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\t9oDUAAAAgOs6\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>USB 3.0 Disable Intel USB3 controller check 10.9</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleUSBXHCI</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\t9oDUAAAAgHU0\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\t9oDUAAAAgOs0\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>USB 3.0 Disable Intel USB3 controller check 10.10</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleUSBXHCI</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tZj0AAQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tZj0AAA==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>USB 3.0 Disable XHCI 1.0 check 10.9</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleUSBXHCI</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tQbzHAgDgPQABAAA=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tQbzHAgDgPQAAAAA=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>USB 3.0 Disable XHCI 1.0 check 10.10</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleUSBXHCI</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tgUkIAABAAA==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tgUkIAAAAAA==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>USB 3.0 Patch MSI or pin interrupts #1</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleUSBXHCI</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tQcdHCAAAQAA=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tQcdHCAAAAAA=\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>USB 3.0 Patch MSI or pin interrupts #2</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleUSBXHCI</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tdU1Ii7voAQAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\t601Ii7voAQAA\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>USB 3.0 Enable PCI power management</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
fi
# if [ "$CloverHD4600" == "yes" ] && [[ "$osxver" == 10.10* ]]
# then
# printf '\t\t\t<dict>\n' >> $config
# printf '\t\t\t\t<key>Name</key>\n' >> $config
# printf '\t\t\t\t<string>AppleIntelFramebufferAzul</string>\n' >> $config
# printf '\t\t\t\t<key>Find</key>\n' >> $config
# printf '\t\t\t\t<data>\n' >> $config
# printf '\t\t\t\tPYaAEgQ=\n' >> $config
# printf '\t\t\t\t</data>\n' >> $config
# printf '\t\t\t\t<key>Replace</key>\n' >> $config
# printf '\t\t\t\t<data>\n' >> $config
# printf '\t\t\t\tPYaAFgQ=\n' >> $config
# printf '\t\t\t\t</data>\n' >> $config
# printf '\t\t\t\t<key>Comment</key>\n' >> $config
# printf '\t\t\t\t<string>Patch HD4600 10.10</string>\n' >> $config
# printf '\t\t\t</dict>\n' >> $config
# printf '\t\t\t<dict>\n' >> $config
# printf '\t\t\t\t<key>Name</key>\n' >> $config
# printf '\t\t\t\t<string>AppleIntelHD5000Graphics</string>\n' >> $config
# printf '\t\t\t\t<key>Find</key>\n' >> $config
# printf '\t\t\t\t<data>\n' >> $config
# printf '\t\t\t\tPYaAEgR0EOtd\n' >> $config
# printf '\t\t\t\t</data>\n' >> $config
# printf '\t\t\t\t<key>Replace</key>\n' >> $config
# printf '\t\t\t\t<data>\n' >> $config
# printf '\t\t\t\tPYaAFgR0EOtd\n' >> $config
# printf '\t\t\t\t</data>\n' >> $config
# printf '\t\t\t\t<key>Comment</key>\n' >> $config
# printf '\t\t\t\t<string>Patch HD4600 10.10</string>\n' >> $config
# printf '\t\t\t</dict>\n' >> $config
# fi
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleHDA</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tgxnUEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAAAAAA==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>AppleHDA Patch 10.11</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
case $CloverModel in
4x30)
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleHDA</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\thBnUEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\t0XYdEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>IDT 76D1</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
;;
4x40*)
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleHDA</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\thBnUEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\t2XYdEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>IDT 76D9</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
;;
4x0G0|9470|4x0G1|ZBook)
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleHDA</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\thBnUEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAAAAAA==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>IDT 76E0</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleHDA</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tixnUEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\t4HYdEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>IDT 76E0</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
;;
3x0G1)
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleHDA</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\thBnUEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tlXYdEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>IDT 7695</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
;;
4x0G2*)
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleHDA</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\thAjsEA==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAAAAAA==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>ALC 282</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleHDA</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\thQjsEA==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAAAAAA==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>ALC 282</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleHDA</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\thBnUEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tggLsEA==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>ALC 282</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
;;
6x60*|6x70*)
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleHDA</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\thBnUEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tAAAAAA==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>IDT 7605</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
printf '\t\t\t<dict>\n' >> $config
printf '\t\t\t\t<key>Name</key>\n' >> $config
printf '\t\t\t\t<string>AppleHDA</string>\n' >> $config
printf '\t\t\t\t<key>Find</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tixnUEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Replace</key>\n' >> $config
printf '\t\t\t\t<data>\n' >> $config
printf '\t\t\t\tBXYdEQ==\n' >> $config
printf '\t\t\t\t</data>\n' >> $config
printf '\t\t\t\t<key>Comment</key>\n' >> $config
printf '\t\t\t\t<string>IDT 7605</string>\n' >> $config
printf '\t\t\t</dict>\n' >> $config
;;
esac
printf '\t\t</array>\n' >> $config
printf '\t</dict>\n' >> $config
# gui section
printf '\t<key>GUI</key>\n' >> $config
printf '\t<dict>\n' >> $config
CurrentTheme=''
if [ -f /Volumes/EFI/EFI/CLOVER/config.plist ]
then
CurrentTheme=$(/usr/libexec/PlistBuddy -c 'Print :GUI:Theme' /Volumes/EFI/EFI/CLOVER/config.plist 2>null)
fi
if ( ( [ "$CloverModel" != "6x70AMD" ] && [ "$CloverModel" != "6x70NVIDIA" ] ) && [ "$CloverResolution" == "low" ] ) || ( [ "$CloverModel" == "Zbook" ] || [ "$CloverModel" == "4x0G2Broadwell" ] || [ "$CloverModel" == "4x0G1" ] || [ "$CloverModel" == "4x0G2" ] || [ "$CloverModel" == "3x0G1" ] )
then
if [[ "$CurrentTheme" == '' ]] || [[ "$CurrentTheme" == Error* ]]
then
CurrentTheme='Yosemite S'
fi
printf '\t\t<key>ScreenResolution</key>\n' >> $config
printf '\t\t<string>1024x768</string>\n' >> $config
printf '\t\t<key>Theme</key>\n' >> $config
printf '\t\t<string>'"$CurrentTheme"'</string>\n' >> $config
else
if [[ "$CurrentTheme" == '' ]] || [[ "$CurrentTheme" == Error* ]]
then
CurrentTheme='Yosemite'
fi
printf '\t\t<key>Theme</key>\n' >> $config
printf '\t\t<string>'"$CurrentTheme"'</string>\n' >> $config
fi
printf '\t\t<key>Mouse</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Enabled</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t</dict>\n' >> $config
printf '\t\t<key>Scan</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Entries</key>\n' >> $config
printf '\t\t\t<true/>\n' >> $config
printf '\t\t\t<key>Tool</key>\n' >> $config
printf '\t\t\t<true/>\n' >> $config
printf '\t\t\t<key>Legacy</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t</dict>\n' >> $config
printf '\t\t<key>Custom</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Entries</key>\n' >> $config
printf '\t\t\t<array>\n' >> $config
printf '\t\t\t\t<dict>\n' >> $config
printf '\t\t\t\t\t<key>FullTitle</key>\n' >> $config
printf '\t\t\t\t\t<string>UEFI Internal</string>\n' >> $config
printf '\t\t\t\t\t<key>Hidden</key>\n' >> $config
printf '\t\t\t\t\t<string>Always</string>\n' >> $config
printf '\t\t\t\t\t<key>Disabled</key>\n' >> $config
printf '\t\t\t\t\t<false/>\n' >> $config
printf '\t\t\t\t\t<key>Type</key>\n' >> $config
printf '\t\t\t\t\t<string>Other</string>\n' >> $config
printf '\t\t\t\t</dict>\n' >> $config
printf '\t\t\t</array>\n' >> $config
printf '\t\t</dict>\n' >> $config
printf '\t</dict>\n' >> $config
# devices section
printf '\t<key>Devices</key>\n' >> $config
printf '\t<dict>\n' >> $config
if [ "$CloverModel" == "4x0G1" ] || [ "$CloverModel" == "ZBook" ] || [ "$CloverModel" == "4x0G2" ] || [ "$CloverModel" == "3x0G1" ]
then
printf '\t\t<key>FakeID</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>IntelGFX</key>\n' >> $config
printf '\t\t\t<string>0x04128086</string>\n' >> $config
printf '\t\t</dict>\n' >> $config
fi
printf '\t\t<key>USB</key>\n' >> $config
printf '\t\t<dict>\n' >> $config
printf '\t\t\t<key>Inject</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>FixOwnership</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t\t<key>AddClockID</key>\n' >> $config
printf '\t\t\t<false/>\n' >> $config
printf '\t\t</dict>\n' >> $config
printf '\t</dict>\n' >> $config
# system parameters section
printf '\t<key>SystemParameters</key>\n' >> $config
printf '\t<dict>\n' >> $config
printf '\t\t<key>InjectKexts</key>\n' >> $config
printf '\t\t<string>Detect</string>\n' >> $config
printf '\t</dict>\n' >> $config
# end
printf '\t</dict>\n' >> $config
printf '\t</plist>\n' >> $config
chown "$USER" "$config"
fi
| true |
ffea5f2bfec957b8f205fc5756f84e1cbad81e51 | Shell | changyihsin/grepid | /grepid | UTF-8 | 609 | 2.84375 | 3 | [] | no_license | #!/bin/bash
shopt -s -o nounset
PROJECT_HOME="$PWD"
project="tmp"
if test ! -f "${project}.id"; then
touch ${PROJECT_HOME}/${project}.id
fi
echo "*.c text" > ${PROJECT_HOME}/${project}.id
echo "*.cpp text" >> ${PROJECT_HOME}/${project}.id
echo "*.h text" >> ${PROJECT_HOME}/${project}.id
echo "*.js text" >> ${PROJECT_HOME}/${project}.id
echo "*.s text" >> ${PROJECT_HOME}/${project}.id
echo "*.S text" >> ${PROJECT_HOME}/${project}.id
echo "Makefile text" >> ${PROJECT_HOME}/${project}.id
echo "*.mk text" >> ${PROJECT_HOME}/${project}.id
shopt -s extglob
mkid -m ${PROJECT_HOME}/${project}.id !(obj*)
| true |
43185b57cf357383b544c2d1a9c95387db1a34f1 | Shell | joecannatti/ShellShowBash | /0-history-basics.sh | UTF-8 | 2,136 | 3.296875 | 3 | [] | no_license |
#UNIX philosophy
#Do one thing and do it well (Doug McIlroy)
#Everything is a file...........Linus say "stream of bytes" is more accurate
#Brief History of UNIX leading up to Linux, and FreeBSD
#most UNIX tools have a canonical GNU/Linux version and BSD version
#Bourne
#Again
#SHell
#Written by Brian Fox for GNU as a replacement for the BourneShell (sh) in 1989
#The BourneShell was the original UNIX shell developed in 1969
#Took many advanced featurns for the KornShell (ksh) and CShell (csh) and added them the the BourneSh
#examples - history, tab completion,
#Works in through piping output in streams from process to process
#find all ruby files; pull out the lines that contain instance eval; limit to 10 per file; strip leading whitespace; sort alphabetically
find . -name \*.rb | while read line; do grep instance_eval $line | head -10 ; done | sed "s/^[ ]*//" | sort
#Well get to more of how this works later
#Very powerful way of expressing a program
#It's sort of like functional programming, not quite.
#Thats why it works so well for one liners.
#BASH is the default shell in the GNU system (All Linux Distros), and OS X
#OpenSolaris defaults to ksh, which is compatable with the BourneShell
#But FreeBSD defaults to tcsh shell, which is not
#When in doubt and cross enviornment compatiblity is needed stick to sh features.
#which means avoid brace expansion
ls *.{jpg,jpeg,png}
#non subshell math operations
$((4 + 6))
# SheBang (#!) your files to the most common shell type that will support your script
#.......If you are on Linux or OS X you will have BASH though. So I wouldn't worry too much about this.
#zsh is also becoming popular for developers.
#Adds some featurns around globbing, completion, and customization
#When using someone elses machine with ZSH everything I do works as it does in BASH, but I don't think it is strictly compatable with BASH
#It is compatible with sh
#Startup Files (In Order)
# /etc/profile
# ~/.bash_profile can also be named (~/.profile and .bash_login)
# ~/.bashrc
#Basic commands (I'm assuming most people know all of these)
ls
mv
cp
cd
rm
mkdir
chmod
head
tail
| true |
578e819a367b773dd072789920c9584e7ffe4755 | Shell | makometr/ADS-8304 | /Mukhin/lab2/lab2.sh | UTF-8 | 961 | 2.546875 | 3 | [] | no_license | #!/bin/bash
g++ -Wall -Werror ./Source/lab2.cpp -o lab2
echo -n "1) "
./lab2 "((a))"
echo -n "2) "
./lab2 "(a)"
echo -n "3) "
./lab2 "((((asd)frw)(df(sdf)as(feqw(sd)fa)xca)dfa)sdf)"
echo -n "4) "
./lab2 "(a(a(a)((a)a))(a(a)(a))"
echo -n "5) "
./lab2 "(a(bc(de)d(er)q(er(er)ty)er(df)))"
echo -n "6) "
./lab2 "(a(bc(de)q(er(er)ty)er(df)hf))"
echo -n "7) "
./lab2 "(a(bc(de)d(er)q(erty)erdk))"
echo -n "8) "
./lab2 "(a(bc(gd)gd(bgd(dfg(dfgbdg)g)dg(bg)dfgbd)gb(dg(b)b(d(g)dgb)d)bgd))"
echo -n "9) "
./lab2 "(a(b(cd())d)(d(f(g()g)f(ds))))"
echo -n "10) "
./lab2 "a(b(cd())d)(d(f(g()g)f(ds))))"
echo -n "11) "
./lab2 "(a(b(cd)d)(d(f(gg)f(ds))))"
echo -n "12) "
./lab2 "(a(bfs)f"
echo -n "13) "
./lab2 "f)(df(re)))(d(f(g)f(d))))"
echo -n "14) "
./lab2 "(a(b(dd(f(g)f(d))))"
echo -n "15) "
./lab2 "(a(b(d)d(f)(dfdghdf(re)))(d(ghfghfgh(d))))"
echo -n "16) "
./lab2 "(a(b(d)d(fdhfghdfd(f(g)f(d))))"
echo -n "17) "
./lab2 "((a))"
echo -n "17) "
./lab2 "(a(b))(c)"
rm lab2
| true |
c9ebcbae120730b6dee3402bd52e1b0856763880 | Shell | davetcoleman/unix_settings | /scripts/delete/ros_commit.sh | UTF-8 | 1,230 | 3.375 | 3 | [] | no_license | function gitHasChanges()
{
if git diff-index --quiet HEAD --; then
echo "No changes detected in git repo"
else
echo ""
echo "Changes have been detected in git repo:"
echo "--------------------------------------------------------"
echo -e "\e[00;1;95m"
pwd
parse_vc_branch_and_add_brackets
echo -e "\e[00m"
echo "--------------------------------------------------------"
echo ""
gitst
read -p "View git diff? (y/n): " resp
if [ "$resp" = "y" ]; then
echo ""
git diff
read -p "Commit with gitall? (y/n): " resp
if [ "$resp" = "y" ]; then
gitall
read -p "Continue? " resp
fi
fi
fi
}
function scanThisDirectoryForGit()
{
for x in `find \`pwd\` -name .git -type d -prune`; do
cd $x
cd ../
gitHasChanges
done
}
files=( "/home/dave/ros/ws_base/src" )
for i in "${files[@]}"
do :
done
scanThisDirectoryForGit
cd /home/dave/ros/ws_moveit/src
scanThisDirectoryForGit
cd /home/dave/ros/ws_moveit_other/src
scanThisDirectoryForGit
cd /home/dave/ros/ws_amazon/src
scanThisDirectoryForGit
#cd /home/dave/ros/ws_amazon/src
#scanThisDirectoryForGit
cd /home/dave/ros
echo ""
echo "Finished committing all ROS repos!"
echo ""
play -q ~/unix_settings/emacs/success.wav
| true |
a3e53a09d33062f4058874f15f68535a2d0c2431 | Shell | Morenware/tvster-ci | /app/organizer/image/build.sh | UTF-8 | 729 | 3.734375 | 4 | [] | no_license | #!/bin/bash
usage() {
echo "$0 <appname> <tag> <build-location>"
}
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
APP=$1
TAG=$2 || v1
BUILD_PATH=$3
if [ -z "$BUILD_PATH" ]; then
BUILD_PATH=./app/$APP
fi
# Multiple Dockerfiles
if [ -f "$DIR/Dockerfile" ]; then
# Change Dockerfile
mv $BUILD_PATH/Dockerfile $DIR/Dockerfile-previous
cp $DIR/Dockerfile $BUILD_PATH
fi
# For ARM emulation on build
docker run --rm --privileged multiarch/qemu-user-static:register --reset
echo "Building Dockerfile:"
cat $BUILD_PATH/Dockerfile
docker build -t tvster/$APP:$TAG $BUILD_PATH
if [ -f "$DIR/Dockerfile-previous" ]; then
rm $BUILD_PATH/Dockerfile
mv $DIR/Dockerfile-previous $BUILD_PATH/Dockerfile
fi | true |
fd199388343094b6cf4f2e694501c43d9f5f8d00 | Shell | kromanow94/dotfiles | /kromanow_conf | UTF-8 | 3,777 | 3.296875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
function pre_commit_disable() {
repo_root="$(git rev-parse --show-toplevel)"
for file in {pre-commit,commit-msg}; do
chmod -x "${repo_root}/.git/hooks/${file}"
done
}
function pre_commit_enable() {
repo_root="$(git rev-parse --show-toplevel)"
for file in {pre-commit,commit-msg}; do
chmod +x "${repo_root}/.git/hooks/${file}"
done
}
function gtm.manual.turn_me_off() {
kubectl \
--kubeconfig /dev/null \
--certificate-authority /var/run/secrets/kubernetes.io/serviceaccount/ca.crt \
--token "$(cat /var/run/secrets/kubernetes.io/serviceaccount/token)" \
--server https://kubernetes.default \
patch \
--type merge \
--patch '{"metadata": {"annotations": {"kubeflow-resource-stopped": "true"}}}' \
notebook "${HOSTNAME/-0/}"
}
function gtm.manual.notebook-show-last-activity() {
now_seconds=$(date +%s)
for notebook in $(kubectl get pods -A -l notebook-name -ojson | jq '.items[]| {namespace: .metadata.namespace, name: .metadata.name} | join("/")' -r); do
namespace=$(awk -F/ '{print $1}' <<<$notebook)
pod_name=$(awk -F/ '{print $2}' <<<$notebook)
nb_name=$(sed 's/-0/ /g' <<<$pod_name)
last_active_date=$(kubectl -n $namespace exec $pod_name -- stat ~/.local/share/code-server/heartbeat | awk '/Change/ { print $2, substr($3, 1, 8) }')
last_active_seconds=$(date -d "$last_active_date" +%s)
difference_in_minutes=$(((now_seconds - last_active_seconds) / 60))
echo $notebook "$last_active_date" $difference_in_minutes | awk -F'[/ ]' '{print $1, $2, "last_active_date:", $3, $4, "difference_in_minutes:", $5}'
done
}
function gtm.manual.notebook-culling() {
echo "#################################################"
echo "### Starting manual notebook culling ###"
echo "### each notebook to be culled will be logged ###"
echo "#################################################"
idle_time_minutes=120
now_seconds=$(date +%s)
for notebook in $(kubectl get pods -A -l notebook-name -ojson | jq '.items[]| {namespace: .metadata.namespace, name: .metadata.name} | join("/")' -r); do
namespace=$(awk -F/ '{print $1}' <<<$notebook)
pod_name=$(awk -F/ '{print $2}' <<<$notebook)
nb_name=$(sed 's/-0/ /g' <<<$pod_name)
last_active_date=$(kubectl -n $namespace exec $pod_name -- stat ~/.local/share/code-server/heartbeat | awk '/Change/ { print $2, substr($3, 1, 8) }')
last_active_seconds=$(date -d "$last_active_date" +%s)
difference_in_minutes=$(((now_seconds - last_active_seconds) / 60))
if [ $difference_in_minutes -ge $idle_time_minutes ]; then
echo $notebook "$last_active_date" $difference_in_minutes | awk -F'[/ ]' '{print $1, $2, "last_active_date:", $3, $4, "difference_in_minutes:", $5}'
kubectl -n $namespace patch notebook $nb_name --type=merge -p '{"metadata":{"annotations":{"kubeflow-resource-stopped":"true"}}}'
fi
done
echo "########################################"
echo "### Manual notebook culling finished ###"
echo "########################################"
}
function gtm.manual.nodes-show-template() {
kubectl get nodes -oyaml |
yq '.items[]| .metadata |
{
node: .name,
nodegroupType: .labels."eks.amazonaws.com/nodegroup",
launchTemplateId: .labels."eks.amazonaws.com/sourceLaunchTemplateId",
launchTemplateVersion: .labels."eks.amazonaws.com/sourceLaunchTemplateVersion"
}' -y
}
function gtm.manual.nodes-show-unneeded() {
kubectl -n kube-system logs -l app=cluster-autoscaler | awk '/unneeded/ && /ip-/ { print $5 }' | sort | uniq
}
| true |
9818eed38a31425901a928aa548f0df52dc9264f | Shell | hellonuh/vagrant-boxes | /xe/bootstrap.sh | UTF-8 | 597 | 2.53125 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
echo "provisioning the box"
yum install -y glibc make binutils gcc libaio bc initscripts net-tools openssl
rpm -ivh /vagrant/software/oracle-xe-11.2.0-1.0.x86_64.rpm > /tmp/XEsilentinstall.log
/etc/init.d/oracle-xe configure responseFile=/vagrant/software/xe.rsp >> /tmp/XEsilentinstall.log
echo "export ORACLE_HOME=/u01/app/oracle/product/11.2.0/xe" >> .bash_profile
echo "export ORACLE_SID=XE" >> .bash_profile
echo "export NLS_LANG=`\$ORACLE_HOME/bin/nls_lang.sh`" >> .bash_profile
echo "export PATH=\$ORACLE_HOME/bin:\$PATH" >> .bash_profile
yum clean all
echo "All done"
| true |
51649d205b6a7fae2fcbbcfe681ddcc196748ab1 | Shell | ghsecuritylab/N14 | /components/libsodium/libsodium/dist-build/ios.sh | UTF-8 | 3,662 | 3.34375 | 3 | [
"ISC",
"MIT"
] | permissive | #! /bin/sh
#
# Step 1.
# Configure for base system so simulator is covered
#
# Step 2.
# Make for iOS and iOS simulator
#
# Step 3.
# Merge libs into final version for xcode import
export PREFIX="$(pwd)/libsodium-ios"
export IOS32_PREFIX="$PREFIX/tmp/ios32"
export IOS64_PREFIX="$PREFIX/tmp/ios64"
export SIMULATOR32_PREFIX="$PREFIX/tmp/simulator32"
export SIMULATOR64_PREFIX="$PREFIX/tmp/simulator64"
export XCODEDIR=$(xcode-select -p)
xcode_major=$(xcodebuild -version|egrep '^Xcode '|cut -d' ' -f2|cut -d. -f1)
if [ $xcode_major -ge 8 ]; then
export IOS_SIMULATOR_VERSION_MIN=${IOS_SIMULATOR_VERSION_MIN-"6.0.0"}
export IOS_VERSION_MIN=${IOS_VERSION_MIN-"6.0.0"}
else
export IOS_SIMULATOR_VERSION_MIN=${IOS_SIMULATOR_VERSION_MIN-"5.1.1"}
export IOS_VERSION_MIN=${IOS_VERSION_MIN-"5.1.1"}
fi
mkdir -p $SIMULATOR32_PREFIX $SIMULATOR64_PREFIX $IOS32_PREFIX $IOS64_PREFIX || exit 1
# Build for the simulator
export BASEDIR="${XCODEDIR}/Platforms/iPhoneSimulator.platform/Developer"
export PATH="${BASEDIR}/usr/bin:$BASEDIR/usr/sbin:$PATH"
export SDK="${BASEDIR}/SDKs/iPhoneSimulator.sdk"
## i386 simulator
export CFLAGS="-O2 -arch i386 -isysroot ${SDK} -mios-simulator-version-min=${IOS_SIMULATOR_VERSION_MIN} -flto"
export LDFLAGS="-arch i386 -isysroot ${SDK} -mios-simulator-version-min=${IOS_SIMULATOR_VERSION_MIN} -flto"
make distclean > /dev/null
./configure --host=i686-apple-darwin10 \
--disable-shared \
--enable-minimal \
--prefix="$SIMULATOR32_PREFIX" || exit 1
make -j3 install || exit 1
## x86_64 simulator
export CFLAGS="-O2 -arch x86_64 -isysroot ${SDK} -mios-simulator-version-min=${IOS_SIMULATOR_VERSION_MIN} -flto"
export LDFLAGS="-arch x86_64 -isysroot ${SDK} -mios-simulator-version-min=${IOS_SIMULATOR_VERSION_MIN} -flto"
make distclean > /dev/null
./configure --host=x86_64-apple-darwin10 \
--disable-shared \
--enable-minimal \
--prefix="$SIMULATOR64_PREFIX"
make -j3 install || exit 1
# Build for iOS
export BASEDIR="${XCODEDIR}/Platforms/iPhoneOS.platform/Developer"
export PATH="${BASEDIR}/usr/bin:$BASEDIR/usr/sbin:$PATH"
export SDK="${BASEDIR}/SDKs/iPhoneOS.sdk"
## 32-bit iOS
export CFLAGS="-O2 -mthumb -arch armv7 -isysroot ${SDK} -mios-version-min=${IOS_VERSION_MIN} -flto"
export LDFLAGS="-mthumb -arch armv7 -isysroot ${SDK} -mios-version-min=${IOS_VERSION_MIN} -flto"
make distclean > /dev/null
./configure --host=arm-apple-darwin10 \
--disable-shared \
--enable-minimal \
--prefix="$IOS32_PREFIX" || exit 1
make -j3 install || exit 1
## 64-bit iOS
export CFLAGS="-O2 -arch arm64 -isysroot ${SDK} -mios-version-min=${IOS_VERSION_MIN} -flto"
export LDFLAGS="-arch arm64 -isysroot ${SDK} -mios-version-min=${IOS_VERSION_MIN} -flto"
make distclean > /dev/null
./configure --host=arm-apple-darwin10 \
--disable-shared \
--enable-minimal \
--prefix="$IOS64_PREFIX" || exit 1
make -j3 install || exit 1
# Create universal binary and include folder
rm -fr -- "$PREFIX/include" "$PREFIX/libsodium.a" 2> /dev/null
mkdir -p -- "$PREFIX/lib"
lipo -create \
"$SIMULATOR32_PREFIX/lib/libsodium.a" \
"$SIMULATOR64_PREFIX/lib/libsodium.a" \
"$IOS32_PREFIX/lib/libsodium.a" \
"$IOS64_PREFIX/lib/libsodium.a" \
-output "$PREFIX/lib/libsodium.a"
mv -f -- "$IOS32_PREFIX/include" "$PREFIX/"
echo
echo "libsodium has been installed into $PREFIX"
echo
file -- "$PREFIX/lib/libsodium.a"
# Cleanup
rm -rf -- "$PREFIX/tmp"
make distclean > /dev/null
| true |
e41ff84f3a1ceda7932c036ef3d875fd1c6f2d54 | Shell | sebChevre/FSharp | /azure-web-first/build.sh | UTF-8 | 496 | 3.359375 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
PREFIX="./"
if test "$OS" = "Windows_NT"
then
EXE=""
FLAGS=""
else
EXE="mono"
FLAGS="-d:MONO"
fi
if [ ! -f packages/FAKE/tools/FAKE.exe ]; then
${EXE} ${PREFIX}/.paket/paket.bootstrapper.exe
exit_code=$?
if [ $exit_code -ne 0 ]; then
exit $exit_code
fi
fi
${EXE} ${PREFIX}/.paket/paket.exe restore
exit_code=$?
if [ $exit_code -ne 0 ]; then
exit $exit_code
fi
${EXE} ${PREFIX}/packages/FAKE/tools/FAKE.exe $@ --fsiargs ${FLAGS} build.fsx
| true |
9a5bca2edbd953910adbc88cff4986278c4f9d70 | Shell | dmvieira/dotfiles | /dot_functions/help.sh | UTF-8 | 7,138 | 3.171875 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
shopt -s extglob
DOT_FUNCTIONS="change_hadoop copy_hadoop_deps mkproj cdproj rmproj mkvirtualenv rmvirtualenv usevirtualenv cdvirtualenv kafka_service mysql_service redis_service elasticsearch_service mongo_service zookeeper_service drun drun_java8 notifyme notifymessage"
function dot_help {
h_change_hadoop() {
echo $"""
#########################################################
change_hadoop:
Change hadoop configuration files for environments.
Usage: change_hadoop (local)
#########################################################
"""
}
h_copy_hadoop_deps() {
echo $"""
#########################################################
copy_hadoop_deps:
Copy dependencies from hadoop to another.
Usage: copy_hadoop_deps hdfs:///dep1.jar,hdfs:///dep2.jar user (prod|qa|local) (prod|qa|local)
Copy deps from hadoop environment to another using some user
#########################################################
"""
}
h_cdproj(){
echo $"""
#########################################################
cdproj:
Go to project path or workspace path if no args.
Usage: cdproj {project_name}
#########################################################
"""
}
h_mkproj(){
echo $"""
#########################################################
mkproj:
make project path on workspace using a git repo.
Usage: mkproj project_git_repo {project_name}
#########################################################
"""
}
h_rmproj(){
echo $"""
#########################################################
rmproj:
remove project path on workspace.
Usage: rmproj project_name
#########################################################
"""
}
h_mkvirtualenv(){
echo $"""
#########################################################
mkvirtualenv:
Make virtualenv with name and python version. Default with no version (system python).
Usage: mkvirtualenv project_name {2.7|3.3}
#########################################################
"""
}
h_rmvirtualenv(){
echo $"""
#########################################################
rmvirtualenv:
Remove virtualenv with name.
Usage: rmvirtualenv project_name
#########################################################
"""
}
h_usevirtualenv(){
echo $"""
#########################################################
usevirtualenv:
Use virtualenv with name.
Usage: usevirtualenv project_name
#########################################################
"""
}
h_cdvirtualenv(){
echo $"""
#########################################################
cdvirtualenv:
Use virtualenv and change to project with name or directory with same name as virtualenv.
Usage: cdvirtualenv virtualenv_name {project_name}
#########################################################
"""
}
h_kafka_service(){
echo $"""
#########################################################
kafka_service:
Run kafka with zookeeper docker compose.
Usage: kafka_service (start|stop|restart)
#########################################################
"""
}
h_mysql_service(){
echo $"""
#########################################################
mysql_service:
Run mysql docker.
Usage: mysql_service (start|stop|restart)
#########################################################
"""
}
h_redis_service(){
echo $"""
#########################################################
redis_service:
Run redis docker.
Usage: redis_service (start|stop|restart)
#########################################################
"""
}
h_zookeeper_service(){
echo $"""
#########################################################
zookeeper_service:
Run zookeeper docker.
Usage: zookeeper_service (start|stop|restart)
#########################################################
"""
}
h_mongo_service(){
echo $"""
#########################################################
mongo_service:
Run mongodb docker.
Usage: mongo_service (start|stop|restart)
#########################################################
"""
}
h_elasticsearch_service(){
echo $"""
#########################################################
elasticsearch_service:
Run elasticsearch docker.
Usage: elasticsearch_service (start|stop|restart)
#########################################################
"""
}
h_drun(){
echo $"""
#########################################################
drun:
Run command on docker with any image.
Usage: drun cogniteev/oracle-java:java8 \"df -h\"
#########################################################
"""
}
h_drun_java8(){
echo $"""
#########################################################
drun_java8:
Run command on docker with java 8 image cogniteev/oracle-java:java8.
Usage: drun_java8 df -h
#########################################################
"""
}
h_notifyme(){
echo $"""
#########################################################
notifyme:
Run command with json that will be sent to notifications IFTTT api.
Usage: notifyme '{"message": "its ok"}'
#########################################################
"""
}
h_notifymessage(){
echo $"""
#########################################################
notifymessage:
Run command with message that will be sent to notifications IFTTT api.
Usage: notifymessage "its ok"
#########################################################
"""
}
local case_functions=$(echo '+('$(echo $DOT_FUNCTIONS | sed -e 's/ /|/g')')')
case "$1" in
$case_functions)
eval "h_"$1
;;
*)
IFS=' ' read -r -a array <<< "$DOT_FUNCTIONS"
for func in "${array[@]}"; do
eval "h_"$func
done
esac
}
_dot_help()
{
if [ $COMP_CWORD -eq 1 ]; then
local cur=${COMP_WORDS[COMP_CWORD]}
COMPREPLY=( $(compgen -W "$DOT_FUNCTIONS" -- $cur) )
fi
}
complete -F _dot_help dot_help
| true |
8f32a713536884f8d9d7f21957dc2fa10e27897c | Shell | consag/njmonForLinux | /config/influxdb/stopinfluxdb.sh | UTF-8 | 163 | 2.953125 | 3 | [
"MIT",
"Apache-2.0"
] | permissive | . ./influxdb_env.sh
pidFile="$INSTALLDIR/influxdb.pid"
if [ -f $pidFile ] ; then
kill $(cat $pidFile)
else
echo "$(date) pidFile >$pidFile< not found."
fi
| true |
7ec62f061b56f8d7058e6188f367d6e9bbee5b3b | Shell | tatfook/git | /git.sh | UTF-8 | 1,241 | 3.484375 | 3 | [] | no_license | #!/bin/bash
OLD_REPO_DIR=/git/repositories
NEW_REPO_DIR=/git-data/git
#OLD_REPO_DIR=/root/workspace/js/git/repositories
#NEW_REPO_DIR=/root/workspace/js/git/data/git
USERNAME_PREFIX=gitlab_www_
#rm -fr ${NEW_REPO_DIR}/*
#rm -fr ${OLD_REPO_DIR}
#tar -zxvf repo.tar.gz
for username in `ls ${OLD_REPO_DIR}`
do
prefix=${username:0:11}
if [ ${prefix} != ${USERNAME_PREFIX} ]; then
continue
fi
realname=${username:11}
#echo ${realname}
for reponame in `ls ${OLD_REPO_DIR}/${username}`
do
# continue .wiki.git
if [ "${reponame:0-9:9}" = ".wiki.git" -o "${reponame}" = "__keepwork__.git" ]; then
echo "continue ${reponame}"
continue;
fi
short_reponame=${reponame%.git}
old_repopath=${OLD_REPO_DIR}/${username}/${reponame}
new_reponame=`echo -n ${realname}/${short_reponame} | base64 -w 0`
new_repopath=${NEW_REPO_DIR}/${new_reponame}
#echo ${old_repopath}
#echo ${new_repopath}
if [ -e ${new_repopath} ]; then
echo ${new_repopath} already exist!!!
else
echo mv ${old_repopath} ${new_repopath}
#mv ${old_repopath} ${new_repopath}
fi
done
done
| true |
9284647b165208819137bba8c7b46f624b594b2f | Shell | chipster/chipster | /src/main/admin/bundle/setup-genome-browser-sheep.sh | UTF-8 | 1,248 | 3.34375 | 3 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | #!/bin/bash
# Script for downloading sheep genome and converting it into correct format for Chipster.
# Please verify the filesize (SEE THE LAST COMMENT), wget/server seems to cut off files sometimes.
# download assembly report
wget ftp://ftp.ncbi.nlm.nih.gov/genomes/ASSEMBLY_REPORTS/All/GCA_000298735.1.assembly.txt
# keep only chromosome identifiers
cat GCA_000298735.1.assembly.txt |grep chromosome |grep -v "#" |cut -d " " -f 4 > chrs.txt
# download sequence for each chromosome
while read chr; do
wget "http://www.ncbi.nlm.nih.gov/sviewer/viewer.fcgi?tool=portal&sendto=on&log$=seqview&db=nuccore&dopt=fasta&sort=&val=$chr&extrafeat=0&maxplex=1" -O "$chr.fa"
done < chrs.txt
# append all sequences together
cat CM*.fa > toplevel.fa
# clean headers
sed 's/>gi.*chromosome />/g' toplevel.fa | sed 's/, whole genome shotgun sequence//g' > toplevel-rename.fa
# remove empty rows
grep . toplevel-rename.fa > toplevel-clean.fa
# Result length should equal to "Total sequence length" minus "Total length" of "Unpaced scaffolds", in this case 2587507083
BYTES=$(grep -v ">" toplevel-clean.fa | wc -c); LINES=$(grep -v ">" toplevel-clean.fa | wc -l);expr $BYTES - $LINES
echo $BYTES
mv toplevel-clean.fa Ovis_aries.Oar_v3.1.dna.toplevel.fa
| true |
878e6f3f17d05cf70fcd8fbffc3516d0cfbc6dc5 | Shell | valegucci/route4me-curl | /Linux/Activities/get_activities_driver_arrived_early.sh | UTF-8 | 324 | 2.75 | 3 | [] | no_license | #!/bin/bash
# Example refers to the process of getting all driver-arrived-early activities
url=https://www.route4me.com/api/get_activities.php
apikey=11111111111111111111111111111111
act_type=driver-arrived-early
curl -o file1.txt -g -k -X GET "$url?api_key=$apikey&activity_type=$act_type"
echo "Finished..."
sleep 15
| true |
966dbec0179c75575ed06206a030c59f16ca4b3e | Shell | kvadrage/mlnx-switchdev-altlinux | /etcnet-switchdev/etc/net/scripts/destroy-wlan | UTF-8 | 229 | 3.09375 | 3 | [] | no_license | #!/bin/bash
pickup_defaults
pickup_options
[ -x "${WLANCONFIG:=$DEFAULT_WLANCONFIG}" ] || {
print_error "$WLANCONFIG does not exist or is not executable. Try installing madwifi-utils RPM."
exit 1
}
$WLANCONFIG $NAME destroy
| true |
739d228521b601dd439ede4c6c4606cf5b7bc6db | Shell | bhumi2007shah/hexagonsearch | /server/exportMasterData.sh | UTF-8 | 1,209 | 3.21875 | 3 | [] | no_license | #!/bin/sh
echo " "
date
echo "creating master data sql script"
# Capabilities + Complexities master data to ML #40
# this file created an sql script in apache folder that can be downloaded anywhere using url litmusblox.io/capabilityComplexityMaster.sql
# If you want to add more table add in below script " -t <table_name> "
# export psql format create and insert script to root folder
PGPASSWORD="H#X@g0nL1tmu$" pg_dump -h localhost -p 5432 -U postgres -t public.capability_master -t public.complexity_master -t public.level_master -t public.capability_complexity_mapping -t public.complexity_scenario -t public.level_capability_scenario_mapping -d scoringengine> /home/lbprod/capabilityComplexityMaster.psql
# change directory to /tmp/pg2mysql-master
cd /tmp/pg2mysql-master
# convert exported psql file to mysql
php pg2mysql_cli.php /home/lbprod/capabilityComplexityMaster.psql /home/lbprod/capabilityComplexityMaster.sql
#remove public. from table names so it could run in mysql
sed -i -e 's/public.//g' /home/lbprod/capabilityComplexityMaster.sql
# copy scoringEngineMaster file to apache root directory
sudo cp /home/lbprod/capabilityComplexityMaster.sql /var/www/html/capabilityComplexityMaster.sql | true |
4120591ad349c28c0bdb5d8e1104a25a4896da97 | Shell | dsnlab/TAG_scripts | /dMRI/subcort_masks.sh | UTF-8 | 3,255 | 3.25 | 3 | [] | no_license | #!/bin/bash
# Note: Run this after subcort_seeds.sh
# Load FSL
module load fsl/5.0.9
# This script will parcellate subcortical structures using FSL's first. It will create masks for these structures and warp them into subjects' diffusion space, so they can be used as seed masks for tractography analyses.
# Set directory names
datadir="/projects/dsnlab/shared/tag/bids_data"
scriptsdir="/projects/dsnlab/shared/tag/TAG_scripts/dMRI"
outputdir="/projects/dsnlab/shared/tag/bids_data/derivatives/dMRI_preproc"
# Set error log file
errorlog=""$scriptsdir"/errorlog_subcortmasks.txt"
# Create error log file
touch "${errorlog}"
if [ -f $("$outputdir"/"${subid}"/ses-wave1/anat/sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz) ]; then
# Extracting subject-specific masks for subcortical structures.
cd "$outputdir"/"${subid}"/ses-wave1/anat
# Thalamus masks
echo generating "${subid}" left and right thalamus masks
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 9.5 -uthr 10.5 -bin ../masks/l_thal_mask
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 48.5 -uthr 49.5 -bin ../masks/r_thal_mask
# Caudate masks
echo generating "${subid}" left and right caudate masks
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 10.5 -uthr 11.5 -bin ../masks/l_caud_mask
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 49.5 -uthr 50.5 -bin ../masks/r_caud_mask
# Putamen masks
echo generating "${subid}" left and right putamen masks
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 11.5 -uthr 12.5 -bin ../masks/l_puta_mask
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 50.5 -uthr 51.5 -bin ../masks/r_puta_mask
# Pallidum masks
echo generating "${subid}" left and right pallidum masks
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 12.5 -uthr 13.5 -bin ../masks/l_pall_mask
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 51.5 -uthr 52.5 -bin ../masks/r_pall_mask
# Brain-Stem/4th Ventricle mask
echo generating "${subid}" brain stem/4th ventricle mask
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 15.5 -uthr 16.5 -bin ../masks/stem_mask
# Hippocampus masks
echo generating "${subid}" left and right hippocampus masks
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 16.5 -uthr 17.5 -bin ../masks/l_hipp_mask
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 52.5 -uthr 53.5 -bin ../masks/r_hipp_mask
# Amygdala masks
echo generating "${subid}" left and right amygdala masks
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 17.5 -uthr 18.5 -bin ../masks/l_amyg_mask
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 53.5 -uthr 54.5 -bin ../masks/r_amyg_mask
# Accumbens masks
echo generating "${subid}" left and right accumbens masks
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 25.5 -uthr 26.5 -bin ../masks/l_nacc_mask
fslmaths sub-"${subid}"_ses-wave1_all_fast_firstseg.nii.gz -thr 57.5 -uthr 58.5 -bin ../masks/r_nacc_mask
fi
else
# Making a note of missing files in error log
echo "ERROR: no fsl first files"
echo "$outputdir"/"${subid}"/ses-wave1/anat: MISSING AUTOMATED SUBCORTICAL SEGMENTATION FILES >> $errorlog
fi
| true |
9b6a2d9b6e183c4671017c4f01a548dfae9d6ac1 | Shell | chrisaddy/bootstrap | /install-xcode | UTF-8 | 194 | 2.875 | 3 | [] | no_license | #!/usr/bin/env bash
echo checking &>/dev/null
xcode-select -p
if [[ $? != 0 ]]; then
echo xcode not installed, installing now
xcode-select --install
else
echo xcode already installed!
fi
| true |
66432792d2fc4c5c54c53b34b127179162d56077 | Shell | mooshmoosh/hackerscripts | /quickbash | UTF-8 | 560 | 4.0625 | 4 | [
"MIT"
] | permissive | #!/bin/bash
# Create a blank bash script, open it in vim. When vim exits, run the
# script and delete it. If you know how to write all the commands you
# want quickly using vim macros, this will let you do that.
#
# If you want to keep the temporary script around, use quickbash --keep
FILENAME="./tmp-"`date +"%Y%m%d%H%M%S"`
echo "#!/bin/bash" >> $FILENAME
echo "" >> $FILENAME
echo "" >> $FILENAME
chmod +x $FILENAME
vim $FILENAME +
./$FILENAME
if [ $# -gt 0 ] && [ $1=="--keep" ]
then
echo "Keeping temp script as $FILENAME"
else
rm $FILENAME
fi
| true |
5fd5fc30834566eeb3ab6945947d08a191a70d54 | Shell | yys8646/miniconda3 | /run.sh | UTF-8 | 461 | 3.296875 | 3 | [] | no_license | #!/bin/bash
# bash container.sh -n project_name
while getopts rn: option
do
case "${option}"
in
r) REMOVE='--rm' ;;
n) NAME='--name='${OPTARG} ;;
esac
done
docker run ${REMOVE} \
-it \
--net host \
${NAME:-"--name=miniconda3"} \
--mount type=bind,source=$HOME/yys,target=/workbench \
--mount type=bind,source=/mnt/sdb1/data,target=/home/data \
yys8646/miniconda3:latest \
/bin/bash
| true |
7794f782bd2ff50252f6243fda15ed198e8aaa1a | Shell | KaiLangen/scripts_for_thesis | /bash_scripts/test_scripts/discover_dec_helper.bash | UTF-8 | 786 | 3.15625 | 3 | [] | no_license | #!/bin/bash
if [[ $# -ne 4 ]]
then
echo "usage ./script keyQP gopLevel vid dir"
exit
fi
# Setup Variables
keyQP=$1
gopLevel=$2
vid=$3
dir=data/$4
out=$dir/discover
oracle=oracle_${keyQP}.yuv
rec=rec_${keyQP}_${gopLevel}.yuv
# recolour the video
echo `hostname`
cd dvc_test/
mkdir -p $out
#Usage: ./deDVC [wz varBitstream file] [key frame file]
# [original video file] [channel] [SI Method] [helper file]
(time ./deDVC $dir/wz_u_${keyQP}_${gopLevel}.bin \
$dir/$rec $vid u 3 $dir/$oracle > $out/dec_${keyQP}_${gopLevel}_u.log) \
2> $out/dec_time_${keyQP}_${gopLevel}.log
(time ./deDVC $dir/wz_u_${keyQP}_${gopLevel}.bin \
$dir/$rec $vid v 3 $dir/$oracle > $out/dec_${keyQP}_${gopLevel}_v.log) \
2>> $out/dec_time_${keyQP}_${gopLevel}.log
| true |
3d8ba1be8f38697d80f67ce7891833f8c1687791 | Shell | clemesha-ooi/nimbus | /bin/delete-persistence-directory.sh | UTF-8 | 389 | 2.796875 | 3 | [] | no_license | #!/bin/sh
NIMBUS_PRINTNAME="delete persistence directory"
NIMBUS_EXTRAPRINT=" [ ** ] Note there are persistence mgmt scripts @ $GLOBUS_LOCATION/share/nimbus"
NIMBUS_ANT_CMD="delete-GT4.0-service-persistence $*"
BASEDIR_REL="`dirname $0`/.."
BASEDIR=`cd $BASEDIR_REL; pwd`
RUN=$BASEDIR/bin/lib/gt4.0/build/run.sh
export NIMBUS_PRINTNAME NIMBUS_ANT_CMD NIMBUS_EXTRAPRINT
exec sh $RUN
| true |
0de66e154e8741c6c6a31535587447ba2feb0544 | Shell | collinvandyck/dotfiles | /omz/custom/git.zsh | UTF-8 | 1,135 | 3.140625 | 3 | [
"MIT"
] | permissive | function ci() {
msg=${@:-wip}
git add -A
git commit -m "${msg}"
}
function cip() {
ci ${@}; put
fg 2>/dev/null || true
}
function bookmark() {
name=$(echo "$@" | tr ' ' '-')
if [[ "$name" == "" ]]; then
name=bookmark
fi
name="collin/${name}"
git branch "${name}" 2>/dev/null
git branch --force "${name}" HEAD
git co "${name}"
}
# TODO: how to enable git completion for this?
function delbranch()
{
git branch -D $1 ; git push origin :refs/heads/$1
}
function deltag()
{
git tag -d $1 ; git push origin :refs/tags/$1
}
function sha() {
echo -n $(git rev-parse --short HEAD)
}
function gdom() {
git diff $(git merge-base HEAD origin/$(git_main_branch))
}
function gcob() {
git checkout "$(git branch --all --sort=committerdate | grep -v remotes | fzf --no-sort --tac | tr -d '[:space:]' | tr -d '^\* ')"
}
function gcobr() {
git checkout "$(git branch --all --sort=committerdate | fzf --no-sort --tac | tr -d '[:space:]' | tr -d '^\* ')"
}
function fzfbranch() {
git branch --all --sort=committerdate | fzf --no-sort --tac | tr -d '[:space:]' | tr -d '^\* '
}
| true |
6cdb7d2b94ed767064768d229258b950deec7b63 | Shell | fixdocker/ursula | /roles/common/files/usr/local/bin/apt-get-update.sh | UTF-8 | 247 | 3.171875 | 3 | [
"MIT",
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
# A simple wrapper around "apt-get update" that returns a suitable exit
# code on failure. Useful for cron jobs.
if ! { apt-get update 2>&1 || echo "E: update failed"; } | grep -q '^[WE]:'; then
exit 0
else
exit 1
fi
| true |
0d12651ff4f6d08c090d356251d9c36544eb37d1 | Shell | dpineiden/automatizacion_docs | /BAK/inspeccion_sse.sh | UTF-8 | 1,905 | 3.203125 | 3 | [] | no_license | for ((i=0;i<=$Cant_lab-1;i++))
do
# <nombre_laboratorio> es la variable que se ingresa para buscar segun la columna N_Lab
Nombre_lab=${Laboratorios[i]}
for ((j=0;j<=$N_matrices-1;j++))
do
#SI Laboratorio es CEA-->generar resumen-->FL33
#Se genera un archivo con nombre <laboratorio>.csv
#Con estructura:
# matriz - cantidad - total_parametro - replicas por estacion
#Para esto se capturan las variables de <nombre lab> <matriz-cols> y se usan los valores de Nfilas y NColLab
#Un array de bash se puede ingresar en awk de la siguiente manera:
# awk -v var="${test[*]}" '{n=split(var,test," ");print test[2]}' SSE_matriz.csv
# <matriz-cols> es el arreglo que se ingresa para buscar solamente las columnas que corresponden a esa matriz
#Entrega un string con el nombre y los indices:
matriz_cols=$(grep ${this_matrices[j]} columnas_matriz.csv | sed 's/_/ /g' )
#dentro del awk:
# Separo el nombre de matriz de cols y obtengo los indices de las columnas, itero en la cantidad de ellas
# las sumo, saco un promedio y obtengo el floor entero del promedio 'replica=int(a/b)'
#Cada nueva variable en awk se debe ingresar antecediendo -v
#awk -v var="${test[*]}" -v group="$Grupo" '{n=split(var,test," ");print test[2], group}' SSE_matriz.csv
awk -F';' -v limit="${limites[*]}" -v matriz_cols="$matriz_cols" -v lab="$Nombre_lab" '{
OFS=";";
ncols=split(matriz_cols,columnas,";");
nlim=split(limit,limites," ");
N_filas=limites[1];
N_Lab=limites[2];
if($N_Lab~lab)
{
muestras=0;
parametro=$1;
for (i=2;i<=ncols;i++){
muestras=muestras+$(columnas[i]);
}
replica=int(muestras/(ncols-1));
print lab,columnas[1], muestras , parametro , replica;
}
}' SSE_matriz.csv
#Si laboratorio es de los otros--->Generar resumen-->R08
done
done | true |
0b4c761a21091d0b9a7dbeac528c1428a9cb81ae | Shell | ovh/terraform-ovh-publiccloud-consul | /modules/install-fabio/install-fabio | UTF-8 | 3,478 | 4.46875 | 4 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | #!/bin/bash
# This script can be used to install Fabio and its dependencies. This script has been tested with CoreOS & CentOS 7 operating systems.
set -e
readonly DEFAULT_INSTALL_PATH="/opt/bin"
readonly SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
readonly SCRIPT_NAME="$(basename "$0")"
function print_usage {
echo
echo "Usage: install-fabio [OPTIONS]"
echo
echo "This script can be used to install Fabio and its dependencies. This script has been tested with CentOS 7."
echo
echo "Options:"
echo
echo -e " --version\t\tThe version of Fabio to install. Required."
echo -e " --sha256sum\t\tThe sha256 checksum of the Fabio binary. Required."
echo -e " --path\t\tThe path where Fabio should be installed. Optional. Default: $DEFAULT_INSTALL_PATH."
echo
echo "Example:"
echo
echo " install-fabio --version 1.0.0"
}
function log {
local readonly level="$1"
local readonly message="$2"
local readonly timestamp=$(date +"%Y-%m-%d %H:%M:%S")
>&2 echo -e "${timestamp} [${level}] [$SCRIPT_NAME] ${message}"
}
function log_info {
local readonly message="$1"
log "INFO" "$message"
}
function log_warn {
local readonly message="$1"
log "WARN" "$message"
}
function log_error {
local readonly message="$1"
log "ERROR" "$message"
}
function assert_not_empty {
local readonly arg_name="$1"
local readonly arg_value="$2"
if [[ -z "$arg_value" ]]; then
log_error "The value for '$arg_name' cannot be empty"
print_usage
exit 1
fi
}
# Install steps are based on: http://stackoverflow.com/a/31576473/483528
function setup_systemd_services {
sudo cp "$SCRIPT_DIR/fabio@.service" "/etc/systemd/system/fabio@.service"
sudo sed -i -e "s,/opt/fabio,$path,g" "/etc/systemd/system/fabio@.service"
# disable firewalld. TODO: make a proper setup for fabio
if systemctl list-unit-files --all | grep -q firewalld.service; then
sudo systemctl stop firewalld
sudo systemctl mask firewalld
fi
}
function install_binaries {
local readonly version="$1"
local readonly sha256sum="$2"
local readonly path="$3"
local readonly url="https://github.com/fabiolb/fabio/releases/download/v${version}/fabio-${version}-go1.9.2-linux_amd64"
log_info "Creating install dir for Fabio at $path"
sudo mkdir -p "$path"
log_info "Downloading Fabio $version from $url to /tmp"
curl -L -o "/tmp/fabio" "$url"
local dl_sha256=$(sha256sum "/tmp/fabio" | awk '{print $1}')
if [ "$sha256sum" != "$dl_sha256" ]; then
log_error "dl binary checksum error $sha256sum != $dl_sha256"
exit 1
fi
sudo mv /tmp/fabio "$path"
sudo chmod a+x "$path/fabio"
}
function install {
local version=""
local path="$DEFAULT_INSTALL_PATH"
while [[ $# > 0 ]]; do
local key="$1"
case "$key" in
--version)
version="$2"
shift
;;
--sha256sum)
sha256sum="$2"
shift
;;
--path)
path="$2"
shift
;;
--help)
print_usage
exit
;;
*)
log_error "Unrecognized argument: $key"
print_usage
exit 1
;;
esac
shift
done
assert_not_empty "--version" "$version"
assert_not_empty "--sha256sum" "$sha256sum"
assert_not_empty "--path" "$path"
log_info "Starting Fabio install"
install_binaries "$version" "$sha256sum" "$path"
setup_systemd_services
log_info "Fabio install complete!"
}
install "$@"
| true |
c4c9c986cd3a48f67bc75cd240f1cf25da187d8b | Shell | awilliamson-puppet/instruqt-images-reorg | /LabBuildSteps/pe-deploy-and-discover-lab-3-1/setup-steps/nixagent/SetupFacts.sh | UTF-8 | 282 | 2.59375 | 3 | [] | no_license | set -x
mkdir -p /etc/puppetlabs/facter/facts.d
cd /etc/puppetlabs/facter/facts.d
touch datacenter.sh
cat << 'EOF' > /etc/puppetlabs/facter/facts.d/datacenter.sh
#!/usr/bin/env bash
echo "datacenter=datacenter-west"
EOF
chmod +x /etc/puppetlabs/facter/facts.d/datacenter.sh
exit 0 | true |
9ec230155aa0bd343489390580328773b5588e83 | Shell | uk-gov-mirror/hmrc.national-duty-repayment-center-frontend | /migrations/applied_migrations/ClaimantType.sh | UTF-8 | 3,663 | 2.78125 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
echo ""
echo "Applying migration ClaimantType"
echo "Adding routes to conf/app.routes"
echo "" >> ../conf/app.routes
echo "GET /claimantType controllers.ClaimantTypeController.onPageLoad(mode: Mode = NormalMode)" >> ../conf/app.routes
echo "POST /claimantType controllers.ClaimantTypeController.onSubmit(mode: Mode = NormalMode)" >> ../conf/app.routes
echo "GET /changeClaimantType controllers.ClaimantTypeController.onPageLoad(mode: Mode = CheckMode)" >> ../conf/app.routes
echo "POST /changeClaimantType controllers.ClaimantTypeController.onSubmit(mode: Mode = CheckMode)" >> ../conf/app.routes
echo "Adding messages to conf.messages"
echo "" >> ../conf/messages.en
echo "claimantType.title = Are you the importer or their representative?" >> ../conf/messages.en
echo "claimantType.heading = Are you the importer or their representative?" >> ../conf/messages.en
echo "claimantType.importer = I am the importer" >> ../conf/messages.en
echo "claimantType.representative = I am a representative of the importer" >> ../conf/messages.en
echo "claimantType.checkYourAnswersLabel = Are you the importer or their representative?" >> ../conf/messages.en
echo "claimantType.error.required = Select claimantType" >> ../conf/messages.en
echo "Adding to UserAnswersEntryGenerators"
awk '/trait UserAnswersEntryGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitraryClaimantTypeUserAnswersEntry: Arbitrary[(ClaimantTypePage.type, JsValue)] =";\
print " Arbitrary {";\
print " for {";\
print " page <- arbitrary[ClaimantTypePage.type]";\
print " value <- arbitrary[ClaimantType].map(Json.toJson(_))";\
print " } yield (page, value)";\
print " }";\
next }1' ../test/generators/UserAnswersEntryGenerators.scala > tmp && mv tmp ../test/generators/UserAnswersEntryGenerators.scala
echo "Adding to PageGenerators"
awk '/trait PageGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitraryClaimantTypePage: Arbitrary[ClaimantTypePage.type] =";\
print " Arbitrary(ClaimantTypePage)";\
next }1' ../test/generators/PageGenerators.scala > tmp && mv tmp ../test/generators/PageGenerators.scala
echo "Adding to ModelGenerators"
awk '/trait ModelGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitraryClaimantType: Arbitrary[ClaimantType] =";\
print " Arbitrary {";\
print " Gen.oneOf(ClaimantType.values.toSeq)";\
print " }";\
next }1' ../test/generators/ModelGenerators.scala > tmp && mv tmp ../test/generators/ModelGenerators.scala
echo "Adding to UserAnswersGenerator"
awk '/val generators/ {\
print;\
print " arbitrary[(ClaimantTypePage.type, JsValue)] ::";\
next }1' ../test/generators/UserAnswersGenerator.scala > tmp && mv tmp ../test/generators/UserAnswersGenerator.scala
echo "Adding helper method to CheckYourAnswersHelper"
awk '/class/ {\
print;\
print "";\
print " def claimantType: Option[AnswerRow] = userAnswers.get(ClaimantTypePage) map {";\
print " x =>";\
print " AnswerRow(";\
print " HtmlFormat.escape(messages(\"claimantType.checkYourAnswersLabel\")),";\
print " HtmlFormat.escape(messages(s\"claimantType.$x\")),";\
print " routes.ClaimantTypeController.onPageLoad(CheckMode).url";\
print " )"
print " }";\
next }1' ../app/utils/CheckYourAnswersHelper.scala > tmp && mv tmp ../app/utils/CheckYourAnswersHelper.scala
echo "Migration ClaimantType completed"
| true |
eebf5047de3d0d077aaeb8cf5a2e2aa89706c6f5 | Shell | daveshah1/prjuray | /.github/kokoro/db-quick.sh | UTF-8 | 1,168 | 2.875 | 3 | [
"ISC",
"LicenseRef-scancode-dco-1.1"
] | permissive | #!/bin/bash
#
# Copyright (C) 2020 The Project U-Ray Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
set -e
cd github/$KOKORO_DIR/
source ./.github/kokoro/steps/hostsetup.sh
source ./.github/kokoro/steps/hostinfo.sh
source ./.github/kokoro/steps/git.sh
source ./.github/kokoro/steps/xilinx.sh
source ./.github/kokoro/steps/prjuray-env.sh
echo
echo "========================================"
echo "Downloading current database"
echo "----------------------------------------"
(
./download-latest-db.sh
)
echo "----------------------------------------"
source settings/$URAY_SETTINGS.sh
echo
echo "========================================"
echo "Running quick fuzzer sanity check"
echo "----------------------------------------"
(
cd fuzzers
echo "make --dry-run"
make --dry-run
echo "----------------------------------------"
export MAX_VIVADO_PROCESS=$CORES
set -x
script --return --flush --command "make -j $CORES MAX_VIVADO_PROCESS=$CORES QUICK=y" -
set +x
)
echo "----------------------------------------"
| true |
d526ebe22e19e02b729a214c914b9a39a8e53684 | Shell | rsmith3716/jamfScripts | /API/Export-GET/isMDManaged-CSV.sh | UTF-8 | 1,874 | 3.546875 | 4 | [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | #!/bin/bash
####################################################################################################
#
# THIS SCRIPT IS NOT AN OFFICIAL PRODUCT OF JAMF SOFTWARE
# AS SUCH IT IS PROVIDED WITHOUT WARRANTY OR SUPPORT
#
# BY USING THIS SCRIPT, YOU AGREE THAT JAMF SOFTWARE
# IS UNDER NO OBLIGATION TO SUPPORT, DEBUG, OR OTHERWISE
# MAINTAIN THIS SCRIPT
#
####################################################################################################
#
# DESCRIPTION
# Read in CSV of mobile device and echo if managed
#
####################################################################################################
#
# DEFINE VARIABLES & READ IN PARAMETERS
#
####################################################################################################
read -p "Jamf Pro URL: " server
read -p "Jamf Pro Username: " username
read -s -p "Jamf Pro Password: " password
echo ""
read -p "Please drag and drop the csv into this window and hit enter: " deviceList
####################################################################################################
####################################################################################################
#
# SCRIPT CONTENTS - DO NOT MODIFY BELOW THIS LINE
#
####################################################################################################
# Courtesy of github dot com slash iMatthewCM
#Read CSV into array
IFS=$'\n' read -d '' -r -a deviceIDs < $deviceList
length=${#deviceIDs[@]}
#Do all the things
for ((i=0; i<$length;i++));
do
id=$(echo ${deviceIDs[i]} | sed 's/,//g' | sed 's/ //g'| tr -d '\r\n')
managed=$(curl -ksu "$username":"$password" -H "content-type: text/xml" "$server"/JSSResource/mobiledevices/id/"$id" | xmllint --xpath '/mobile_device/general/managed/text()' - )
echo "ID $id is Managed: $managed" >> /tmp/9185-md.txt
echo "Export Done"
done
| true |
a944aecf1f9e820e4372984838e9755c243208a1 | Shell | anwarchk/quay | /conf/init/service/batch/queuecleanupworker/run | UTF-8 | 208 | 2.734375 | 3 | [
"Apache-2.0"
] | permissive | #! /bin/bash
echo 'Starting Queue cleanup worker'
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.queuecleanupworker 2>&1
echo 'Repository Queue cleanup exited' | true |
1260f8f1bb8d4d9de64ef7322093cfa6bc337343 | Shell | delkyd/alfheim_linux-PKGBUILDS | /lexmark_pro700/PKGBUILD | UTF-8 | 1,075 | 2.734375 | 3 | [] | no_license | # Contributor: Zeph <zeph33@gmail.com>
# For Lexmark Pro700 Series
pkgname=lexmark_pro700
pkgver=1.0
pkgrel=2
pkgdesc="Lexmark Pro700 Series Printer Driver"
makedepends=('rpmextract' 'gzip' 'bash')
depends=('java-runtime' 'cups' 'libstdc++5' 'lua')
arch=('x86_64' 'i686')
license=('Freeware')
if [[ $CARCH == i686 ]]; then
_arch='i386'
md5sums=('421a5461e8cc04f1b838d23e868448fc')
else
_arch='x86_64'
md5sums=('fc421f21e50a40a4ba482c0b04f17b84')
fi
install=lexmark_pro700.install
url="http://www.lexmark.com/"
source=("http://downloads.lexmark.com/downloads/cpd/lexmark-inkjet-legacy-1.0-1.${_arch}.rpm.sh.tar.gz")
prepare() {
# cd $startdir/src
# tar xf lexmark-inkjet-legacy-1.0-1.x86_64.rpm.sh.tar.gz
cd ${srcdir}
sh lexmark-inkjet-legacy-1.0-1.${_arch}.rpm.sh --noexec --target Installer-Files
cd Installer-Files
mkdir Driver
tar xvvf instarchive_all --lzma -C Driver/
cd Driver
rpmextract.sh lexmark-inkjet-legacy-1.0-1.${_arch}.rpm
rpmextract.sh lexmark-legacy-wsu-1.0-1.${_arch}.rpm
}
package(){
mv -f ${srcdir}/Installer-Files/Driver/usr ${pkgdir}/
}
| true |
5f6bb4842446d8625123621bde2c33e707133ec9 | Shell | dotnetCarpenter/rives | /matrikkelenhetID.sh | UTF-8 | 519 | 3.234375 | 3 | [] | no_license | #!/usr/bin/env bash
if [ $# -eq 0 ]; then
input=($(cat))
else
input=($@)
fi
if [ -z "$input" ]; then
exit 1
fi
# Gisle Johnsons gate 5
# curl -s "https://seeiendom.kartverket.no/api/matrikkelenhet/5001/411/179" | jq .matrikkelenhetId
# Anders Buens gate 7A
# curl -s "https://seeiendom.kartverket.no/api/matrikkelenhet/5001/411/146"
# echo ${#input[@]}
# exit
url="https://seeiendom.kartverket.no/api/matrikkelenhet$(printf '/%s' "${input[@]}")"
# echo $url
result=$(curl -s "$url")
echo $result | jq '.matrikkelenhetId'
| true |
1e27879616303d4d5c343ec45729bedc60ce5a05 | Shell | super-season/centos-yum-mount | /shellsql/forcepay.sh | UTF-8 | 1,276 | 3.140625 | 3 | [] | no_license | #!/bin/bash
echo "------------start----------------"
forcepath=/home/liuzhaochen/forcepaywar
cd $forcepath
file=`ls -tr | tail -n 1` #获取最新上传的文件名
file2=`echo $file | sed -nr 's/(.*)\.war/\1/p'` #去掉后缀名
rm -rf /home/liuzhaochen/apache-tomcat-8.5.43/webapps/forcepay_war* #删除旧的解压文件
cp -a ${forcepath}/$file /home/liuzhaochen/apache-tomcat-8.5.43/webapps/ #把新文件放在特定位置
/home/liuzhaochen/apache-tomcat-8.5.43/bin/startup.sh #启动tomcat,解压war包
sleep 5
kill -9 `ps -aux | grep '\/liuzhaochen\/apache-tomcat-8.5.43' | awk '{print $2}'`
#/home/liuzhaochen/apache-tomcat-8.5.43/bin/shutdown.sh #关闭tomcat
T=`date "+%m%d%H%M"`
cp -ra /home/wwwforcepay/forcepay /home/wwwforcepay/forcepay$T #备份
pid=`ps -aux | grep '\/local\/apache-tomcat*' | awk '{print $2}'`
kill -9 $pid #停掉生产环境
rm -rf /home/wwwforcepay/forcepay/* #删除旧版本文件
cp -ra /home/liuzhaochen/apache-tomcat-8.5.43/webapps/${file2}/* /home/wwwforcepay/forcepay/
/usr/local/apache-tomcat/apache-tomcat-8.5.43/bin/startup.sh
echo "-------------end----------------"
| true |
0be57aa0b799c2c5f4d5c9c3a812b5d190bffb1e | Shell | jeff-hykin/better-shell-syntax | /settings/extensions/nodejs/during_clean.sh | UTF-8 | 200 | 2.90625 | 3 | [
"MIT"
] | permissive | # delete all the __pycache__'s
for item in $(find "$FORNIX_FOLDER" ! -path . -iregex '__pycache__')
do
"$FORNIX_FOLDER/settings/extensions/#standard/commands/tools/file_system/remove" "$item"
done | true |
faa360810a315e3e7db3876345164feeedacff2d | Shell | OskarCarl/fyrlang-native-benchmark | /prepare_32bit.sh | UTF-8 | 405 | 2.890625 | 3 | [
"BSD-3-Clause"
] | permissive | #!/bin/bash
source .env
echo "Preparing the runtime sources for use on a 32 bit system..."
sed -i 's/uint64_t/uint32_t/g' runtime/*/runtime.h runtime/*/runtime.c
sed -i 's/uint64_t/uint32_t/g' src/common/*.h src/common/*.c
echo "Preparing the library headers..."
sed -i 's/uint64_t/uint32_t/g' $FYRLIB_NATIVE/include/*/malloc.h
echo "Cleaning previously built archives..."
make clean
echo "Done."
| true |
fc43daed4f8cf919ba10f779fc90e3924e35ce09 | Shell | guanxv/Codecademy_note | /Shell/shell.sh | UTF-8 | 14,017 | 3.015625 | 3 | [] | no_license | ls
ls -a
ls -alt
ls ../paint/
#-a - lists all contents, including hidden files and directories
#-l - lists all contents of a directory in long format
#-t - order files and directories by the time they were last modified.
pwd
#print working directory
cd
cd jan/memory
cd ..
cd ../ ..
cd ../feb
cd ../../comedy
mkdir media
touch aa.txt # create a new file called aa.txt
touch bmx/tricks.txt
cp frida.txt lincoln.txt
cp biopic/ray.txt biopic/notorious.txt historical/
cp * satire/ #copy only files not folder
cp m*.txt scifi/
mv superman.txt superhero/
mv wonderwoman.txt batman.txt superhero/
mv batman.txt spiderman.txt #rename file
rm waterboy.txt
rm -r comedy #remove folder
#The -r is an option that modifies the behavior of the rm command. The -r stands for “recursive,” and it’s used to delete a directory and all of its child directories.
rm -f # -f, --force / ignore nonexistent files, never prompt
#--------------------
#standard input, abbreviated as stdin, is information inputted into the terminal through the keyboard or input device.
#standard output, abbreviated as stdout, is the information outputted after a process is run.
#standard error, abbreviated as stderr, is an error message outputted by a failed process.
echo "Hello"
echo "Hello" > hello.txt
cat hello.txt
cat oceans.txt > continents.txt
#> takes the standard output of the command on the left, and redirects it to the file on the right.
cat glaciers.txt >> rivers.txt
#>> takes the standard output of the command on the left and appends (adds) it to the file on the right.
cat < lakes.txt
#< takes the standard input from the file on the right and inputs it into the program on the left.
cat volcanoes.txt | wc
#| is a “pipe”. The | takes the standard output of the command on the left, and pipes it as standard input to the command on the right. You can think of this as “command to command” redirection.
#in turn, the wc command outputs the number of lines, words, and characters in volcanoes.txt, respectively.
cat volcanoes.txt | wc | cat > islands.txt
wc -l < plants.txt
sort lakes.txt
cat lakes.txt | sort > sorted-lakes.txt
uniq deserts.txt
sort deserts.txt | uniq > uniq-deserts.txt
#uniq stands for “unique” and filters out adjacent, duplicate lines in a file.
grep Mount mountains.txt
grep -i Mushroom fungi.txt
#grep stands for “global regular expression print”. It searches files for lines that match a pattern and returns the results. It is also case sensitive. Here, grep searches for “Mount” in mountains.txt
#grep -i enables the command to be case insensitive.
grep -R Arctic /home/ccuser/workspace/geography
# return /home/ccuser/workspace/geography/aa.txt:Artic Desect
#grep -R searches all files in a directory and outputs filenames and lines containing matched results. -R stands for “recursive”. Here grep -R searches the /home/ccuser/workspace/geography directory for the string “Arctic” and outputs filenames and lines with matched results.
grep -Rl Arctic /home/ccuser/workspace/
#returns /home/ccuser/workspace/Artic_aa.txt
#grep -Rl searches all files in a directory and outputs only filenames with matched results. -R stands for “recursive” and l stands for “files with matches”. Here grep -Rl searches the /home/ccuser/workspace/geography directory for the string “Arctic” and outputs filenames with matched results.
grep -R player .
# search string 'player' in the current diretory
sed 's/snow/rain/' forests.txt
sed 's/Dirt/Soils/g' soils.txt
#s: stands for “substitution”. it is always used when using sed for substitution.
#snow: the search string, the text to find.
#rain: the replacement string, the text to add in place.
ls -l | head > list1.txt
ls -la | head >> list1.txt | wc
nano a.txt
#Ctrl + O saves a file. ‘O’ stands for output.
#Ctrl + X exits the nano program. ‘X’ stands for exit.
#Ctrl + G opens a help menu.
clear
# clear screen
history
#command line outputs a history of commands that were entered in the current session.
date
#print out current date
less a.txt
#On Linux systems, less is a command that displays file contents or command output one page at a time in your terminal. less is most useful for viewing the content of large files or the results of commands that produce many lines of output. The content displayed by less can be navigated by entering keyboard shortcuts.
#use q for finish
nano ~/.bash_profile
source ~/.bash_profile
#~/.bash_profile is the name of file used to store environment settings. It is commonly called the “bash profile”. When a session starts, it will load the contents of the bash profile before executing commands.
#The ~ represents the user’s home directory.
#The . indicates a hidden file.
#The name ~/.bash_profile is important, since this is how the command line recognizes the bash profile.
#command source ~/.bash_profile activates the changes in ~/.bash_profile for the current session. Instead of closing the terminal and needing to start a new session, source makes the changes available right away in the session we are in.
# ~/.bash_profile ------------
alias pd="pwd"
export USER="Jane Doe"
# use this command , echo $USER at
#line USER="Jane Doe" sets the environment variable USER to a name “Jane Doe”. Usually the USER variable is set to the name of the computer’s owner.
#The line export makes the variable to be available to all child sessions initiated from the session you are in. This is a way to make the variable persist across programs.
#At the command line, the command echo $USER returns the value of the variable. Note that $ is always used when returning a variable’s value. Here, the command echo $USER returns the name set for the variable.
export PS1=">> "
#PS1 is a variable that defines the makeup and style of the command prompt.
#export PS1=">> " sets the command prompt variable and exports the variable. Here we change the default command prompt from $ to >>.
#After using the source command, the command line displays the new command prompt.
HOME
echo $HOME #type in command line. not in bash_profile
#The HOME variable is an environment variable that displays the path of the home directory. Here by typing
PATH
echo $PATH #type in command line. not in bash_profile
#PATH is an environment variable that stores a list of directories separated by a colon. Looking carefully
#/bin/pwd
#/bin/ls
LESS
export LESS="-N"
#Open the bash profile, and create and export a new environment variable called LESS, setting it equal to the option "-N". The -N option adds line numbers to the file.
# ~/.bash_profile ----- end -------
env
env | grep PATH
#The env command stands for “environment”, and returns a list of the environment variables for the current user.
#----------------------- bash script ---------------------------------
#The beginning of your script file should start with #!/bin/bash on its own line. This tells the computer which type of interpreter to use for the script.
#When saving the script file, it is good practice to place commonly used scripts in the ~/bin/ directory.
#The script files also need to have the “execute” permission to allow them to be run. To add this permission to a file with filename: script.sh use:
chmod +x script.sh
#Your terminal runs a file every time it is opened to load its configuration.
#On Linux style shells, this is ~/.bashrc and on OSX, this is ~/.bash_profile.
#To ensure that scripts in ~/bin/ are available, you must add this directory to your PATH within your configuration file:
PATH=~/bin:$PATH
#Use ./script.sh to run the script.
./script.sh
greeting="Hello" #Note that there is no space between the variable name, the equals sign, or “Hello”.
#To access the value of a variable, we use the variable name prepended with a dollar sign ($).
echo $greeting
#When bash scripting, you can use conditionals to control which set of commands within the script run. Use if to start the conditional, followed by the condition in square brackets ([ ]). then begins the code that will run if the condition is met. else begins the code that will run if the condition is not met. Lastly, the conditional is closed with a backwards if, fi.
if [ $index -lt 5 ]
then
echo $index
else
echo 5
fi
#Equal: -eq
#Not equal: -ne
#Less than or equal: -le
#Less than: -lt
#Greater than or equal: -ge
#Greater than: -gt
#Is null: -z
#When comparing strings, it is best practice to put the variable into quotes ("). This prevents errors if the variable is null or contains spaces. The common operators for comparing strings are:
#Equal: ==
#Not equal: !=
#For example, to compare if the variables foo and bar contain the same string:
if [ "$foo" == "$bar"]
#----sample script.sh-------
#!/bin/bash
first_greeting="Nice to meet you!"
later_greeting="How are you?"
greeting_occasion=1
if [ $greeting_occasion -lt 1 ]
then
echo $first_greeting
else
echo $later_greeting
fi
#------- end sample ----------
#There are 3 different ways to loop within a bash script: for, while and until.
#For example, if we had a list of words stored in a variable paragraph, we could use the following syntax to print each one:
for word in $paragraph
do
echo $word
done
#Note that word is being “defined” at the top of the for loop so there is no $ prepended. Remember that we prepend the $ when accessing the value of the variable. So, when accessing the variable within the do block, we use $word as usual.
#Within bash scripting until and while are very similar. while loops keep looping while the provided condition is true whereas until loops loop until the condition is true. Conditions are established the same way as they are within an if block, between square brackets. If we want to print the index variable as long as it is less than 5, we would use the following while loop:
while [ $index -lt 5 ]
do
echo $index
index=$((index + 1))
done
#Note that arithmetic in bash scripting uses the $((...)) syntax and within the brackets the variable name is not prepended with a $.
#The same loop could also be written as an until loop as follows:
until [ $index -eq 5 ]
do
echo $index
index=$((index + 1))
done
#
echo "Guess a number"
read number
echo "You guessed $number"
#Another way to access external data is to have the user add input arguments when they run your script. These arguments are entered after the script name and are separated by spaces. For example:
saycolors red green blue
#Within the script, these are accessed using $1, $2, etc, where $1 is the first argument (here, “red”) and so on. Note that these are 1 indexed.
#If your script needs to accept an indefinite number of input arguments, you can iterate over them using the "$@" syntax. For our saycolors example, we could print each color using:
for color in "$@"
do
echo $color
done
#Lastly, we can access external files to our script. You can assign a set of files to a variable name using standard bash pattern matching using regular expressions. For example, to get all files in a directory, you can use the * character:
files=/some/directory/*
#You can then iterate through each file and do something. Here, lets just print the full path and filename:
for file in $files
do
echo $file
done
# set up aliases
alias saycolors='./saycolors.sh'
#You can even add standard input arguments to your alias. For example, if we always want “green” to be included as the first input to saycolors, we could modify our alias to:
alias saycolors='./saycolors.sh "green"'
#you can also make alias in command line.
#sample of script.sh
#!/bin/bash
first_greeting="Nice to meet you!"
later_greeting="How are you?"
greeting_occasion=0
greeting_limit=$1
while [ $greeting_occasion -lt $greeting_limit ]
do
if [ $greeting_occasion -lt 1 ]
then
echo $first_greeting
else
echo $later_greeting
fi
greeting_occasion=$((greeting_occasion + 1))
done
#------------
#!/bin/bash
first_greeting="Nice to meet you!"
later_greeting="How are you?"
greeting_occasion=0
echo "How many times should I greet?"
read greeting_limit
while [ $greeting_occasion -lt $greeting_limit ]
do
if [ $greeting_occasion -lt 1 ]
then
echo $first_greeting
else
echo $later_greeting
fi
greeting_occasion=$((greeting_occasion + 1))
done
#sample code ------------------------------
#One common use of bash scripts is for releasing a “build” of your source code. Sometimes your private source code may contain developer resources or private information that you don’t want to release in the published version.
#In this project, you’ll create a release script to copy certain files from a source directory into a build directory.
#!/bin/bash
echo "🔥🔥🔥Beginning build!! 🔥🔥🔥"
firstline=$(head -n 1 source/changelog.md) # read first line from file changelog.md
read -a splitfirstline <<< $firstline # split string into array
version=${splitfirstline[1]} #get the versioin number from array, the index is 1
echo "You are building version" $version
echo 'Do you want to continue? (enter "1" for yes, "0" for no)' # ask user if want to continue
read versioncontinue
if [ $versioncontinue -eq 1 ]
then
echo "OK"
for filename in source/*
do
echo $filename # show user the filenames
if [ "$filename" == "source/secretinfo.md" ] # not copying the secretinfo
then
echo "Not copying" $filename
else
echo "Copying" $filename
cp $filename build/.
fi
done
cd build/
echo "Build version $version contains:" # show the end result in build folder
ls
cd ..
else
echo "Please come back when you are ready" # if user not chose 1 , just stop running.
fi
#some more ideas
#Copy secretinfo.md but replace “42” with “XX”.
#Zip the resulting build directory.
#Give the script more character with emojis.
#If you are familiar with git, commit the changes in the build directory.
| true |
8cd7f77b267150125c5ad5f7fb810e8e54dcd82c | Shell | davidfite/risks-scripts | /dom0/attach_hush_to | UTF-8 | 2,051 | 4.09375 | 4 | [
"MIT"
] | permissive | #!/bin/bash
# did the user add the vm as input parameter?
if [ ! ${1} ]; then
echo "Usage: $0 <vm_name>"
exit 1
fi
#block="dom0:mmcblk0p1"
block=${SDCARD_BLOCK}
# does the vm exist?
vm=""
for item in $(qvm-ls | grep -v dom0 | awk {'print $1'} | grep ${1})
do
if [ "${item}" == "${1}" ]; then
vm=${1}
fi
done
if [ ${#vm} -eq 0 ]; then
echo "No vm with name ${1} exists or can not be used. Aborted."
exit 1
fi
# check if the vm is connected to the internet
netvm=$(qvm-prefs ${vm} | grep "netvm" | awk {'print $3'})
if [ "${netvm}" != "None" ]; then
echo "${vm} might be connected to the internet. Aborted."
echo "Check: qvm-prefs ${vm} | grep netvm"
exit 1
fi
# check if the vm is labeled as green
#label=$(qvm-prefs ${vm} | grep label | awk {'print $3'})
#if [ "${label}" != "green" ]; then
# echo "The vm ${vm} is not labeled as green but ${label}. Aborted."
# exit 1
#fi
# detach sdcard if already attached to some other vm
ovm=$(qvm-block list | grep ${block} | awk {'print $3'} | cut -d" " -f1)
if [ ${#ovm} -gt 0 ]; then
if [ "${ovm}" == "${vm}" ]; then
echo "Block ${SDCARD_BLOCK} is already attached to ${vm}"
exit 0
fi
echo -e "\nBlock ${SDCARD_BLOCK} is currently attached to ${ovm}."
echo "Please umount it properly from there and rerun this program."
exit 1
# slam tombs open in the vm
#qvm-run -u user ${ovm} '/usr/local/bin/tomb slam all'
# umount sdcard from the vm
#qvm-run -u user ${ovm} '/usr/local/bin/risks umount sdcard'
# detach the sdcard
#qvm-block detach ${ovm} ${block}
#if [ $? != 0 ]; then
# echo "Block ${block} can not be detached from ${ovm}. Aborted."
# exit
#fi
fi
# is the vm running?
qvm-ls | grep Running | awk {'print $1'} | grep "^"${vm}"$" &> /dev/null
if [ "$?" != "0" ]; then
qvm-start ${vm}
sleep 15
fi
# finally attach the sdcard encrypted partition to the qube
qvm-block attach ${vm} ${block}
if [[ $? -eq 0 ]]; then
echo "Block ${SDCARD_BLOCK} has been attached to ${vm}"
else
echo "Block ${SDCARD_BLOCK} can not be attached to ${vm}"
fi
| true |
ced8bdebe24ab1eb388ddacac2ce4274152db576 | Shell | genesis-community/bosh-genesis-kit | /hooks/addon-runtime-config~rc | UTF-8 | 6,553 | 4.09375 | 4 | [
"MIT"
] | permissive | #!/bin/bash
set -u
action="${1:-''}"
if [[ "$action" == 'help' ]] ; then
describe \
"Generate the runtime config, and upload it to the target BOSH director" \
"" \
"Options:" \
" #y{-n} Dry run, just print out the runtime config without uploading it." \
" #y{-y} Upload changes without prompting for confirmation." \
" #y{-d} Upload the config to the default runtime config, merging with" \
" what is currently there. This is not recommended, but included" \
" for backwards compatibility".
exit 0
fi
[[ "$action" == 'run' ]] || bail \
"#R{[ERROR]} Internal error: download stemcells addon called with invalid argument." \
" Expecting help or run, got '$1'"
# -- MAIN ----------------------------------------------------------------------
[[ -n "${GENESIS_DEBUG:-}" ]] && set -x
describe "" "Gathering current Exodus metadata for #C{$GENESIS_ENVIRONMENT}..."
if ! have_exodus_data ; then bail \
"BOSH environment '$GENESIS_ENVIRONMENT' has not been deployed. Please deploy" \
"it first, then run the 'runtime-config' addon"
fi
params_dns_deployments_whitelist="$(lookup params.dns_deployments_whitelist '[]')"
params_dns_cache="$(lookup params.dns_cache 'true')"
exodus="$(exodus --all)"
IFS=',' read -ra features <<< "$(echo "$exodus" | jq -r .features)"
upload_runtime_config() {
name="${1:-default}"
contents="$2"
genesis_bosh -A update-runtime-config --tty --name="${name}" <(echo "$contents")
}
remove_runtime_config() {
name="${1:-default}"
if [[ -n "$(genesis_bosh -A configs --type=runtime --name="$name")" ]] ; then
describe >&2 "" "Removing existing '$name' runtime:" "- ---"
get_runtime_config "$name" 2>&1 | sed -e 's/\(.*\)/- \1/' >&2
echo >&2 ''
genesis_bosh -A delete-config --type=runtime --name="$name"
fi
}
get_runtime_config() {
name="${1:-default}"
(set +e
genesis_bosh -A config --type=runtime --name="$name" ) \
| tail -n+5 | sed -e 's/\s\+$//' | spruce merge
}
generate_dns_runtime() {
cat <<EOF
addons:
- include:
stemcell:
- os: ubuntu-trusty
- os: ubuntu-xenial
- os: ubuntu-bionic
- os: ubuntu-jammy
EOF
if [[ "$params_dns_deployments_whitelist" != '[]' ]] ; then
echo " deployments:"
echo "$params_dns_deployments_whitelist" | jq -cM '.[]' | sed -e 's/^/ - /'
fi
cat <<EOF
jobs:
- name: bosh-dns
properties:
api:
client:
tls:
ca: (( vault \$GENESIS_SECRETS_BASE "dns_api_tls/ca:certificate" ))
certificate: (( vault \$GENESIS_SECRETS_BASE "dns_api_tls/client:certificate" ))
private_key: (( vault \$GENESIS_SECRETS_BASE "dns_api_tls/client:key" ))
server:
tls:
ca: (( vault \$GENESIS_SECRETS_BASE "dns_api_tls/ca:certificate" ))
certificate: (( vault \$GENESIS_SECRETS_BASE "dns_api_tls/server:certificate" ))
private_key: (( vault \$GENESIS_SECRETS_BASE "dns_api_tls/server:key" ))
cache:
enabled: $params_dns_cache
EOF
if printf '%s\n' "${features[@]}" | grep -q '^bosh-dns-healthcheck$'; then
cat <<EOF
health:
enabled: true
client:
tls:
ca: (( vault \$GENESIS_SECRETS_BASE "dns_healthcheck_tls/ca:certificate" ))
certificate: (( vault \$GENESIS_SECRETS_BASE "dns_healthcheck_tls/client:certificate" ))
private_key: (( vault \$GENESIS_SECRETS_BASE "dns_healthcheck_tls/client:key" ))
server:
tls:
ca: (( vault \$GENESIS_SECRETS_BASE "dns_healthcheck_tls/ca:certificate" ))
certificate: (( vault \$GENESIS_SECRETS_BASE "dns_healthcheck_tls/server:certificate" ))
private_key: (( vault \$GENESIS_SECRETS_BASE "dns_healthcheck_tls/server:key" ))
EOF
fi
cat <<EOF
release: bosh-dns
name: bosh-dns
EOF
cat overlay/releases/bosh-dns.yml
}
generate_ops_access_runtime() {
if ! printf '%s\n' "${features[@]}" | grep -q '^\(net\|sys\)op-access$' ; then
echo ""
return 0
fi
cat <<EOF
addons:
- name: genesis-local-users
exclude:
jobs:
- name: user_add
release: os-conf
jobs:
- name: user_add
release: os-conf
properties:
persistent_homes: true
users:
EOF
if printf '%s\n' "${features[@]}" | grep -q '^netop-access$'; then
cat <<'EOF'
- name: netop
public_key: (( vault $GENESIS_SECRETS_BASE "op/net:public" ))
EOF
fi
if printf '%s\n' "${features[@]}" | grep -q '^sysop-access$'; then
cat <<'EOF'
- name: sysop
crypted_password: (( vault $GENESIS_SECRETS_BASE "op/sys:password-crypt-sha512" ))
EOF
fi
cat overlay/releases/os-conf.yml
}
generate_merged_default_runtime() {
spruce merge \
<(cat <<'EOF'
addons:
- name: genesis-local-users
- name: bosh-dns
EOF
) \
<(get_runtime_config) \
<( cat <<'EOF'
addons:
- (( delete "genesis-local-users" ))
- (( delete "bosh-dns" ))
EOF
) \
<(generate_dns_runtime) \
<(generate_ops_access_runtime)
}
# Parse options
shift
dryrun=
default=
while test $# -gt 0 ; do
case "$1" in
-n) dryrun=1 ;;
-y) export BOSH_NON_INTERACTIVE=1 ;;
-d) default=1 ;;
-*) bail "#R{[ERROR]} Bad option $1" ;;
*) bail "#R{[ERROR]} No arguments expected: $1" ;;
esac
shift
done
if [[ -n "$dryrun" ]] ; then
if [[ -n "$default" ]] ; then
describe "" \
"Runtime Config" \
"-----------------------"
generate_merged_default_runtime
else
describe "" \
"BOSH DNS Runtime Config" \
"-----------------------"
generate_dns_runtime | spruce merge
if printf '%s\n' "${features[@]}" | grep -q '^\(net\|sys\)op-access$' ; then
describe "" \
"Operator Access Runtime Config" \
"------------------------------"
generate_ops_access_runtime | spruce merge
fi
fi
echo
exit 0
elif [[ -n "$default" ]] ; then
describe >&2 "Uploading runtime - this will replace your existing default runtime"
upload_runtime_config default "$(generate_merged_default_runtime)"
else
describe >&2 "Uploading 'genesis.bosh-dns' runtime"
upload_runtime_config "genesis.bosh-dns" "$(generate_dns_runtime | spruce merge)"
opsaccess="$(generate_ops_access_runtime)"
if [[ -z "$opsaccess" ]] ; then
remove_runtime_config "genesis.ops-access"
else
describe >&2 "Uploading 'genesis.ops-access' runtime"
upload_runtime_config "genesis.ops-access" "$(generate_ops_access_runtime | spruce merge)"
fi
fi
| true |
5b56384f36f68e0820ce60090ea2d725cb45ec64 | Shell | antipiot/nextcloud_appliance | /startup.sh | UTF-8 | 1,610 | 3.234375 | 3 | [] | no_license | #!/bin/sh
# Jonas Sauge
# Settings
username=nextcloud
http=80
https=443
dbusername=nextcloud
dbname=nextcloud
dbhostname=db
mysqlrootpwd=$(LC_ALL=C tr -dc 'A-Za-z0-9!#%&\()*+,-./:;<=>?@[\]^_{}~' </dev/urandom | head -c 20)
mysqlnextcloudpwd=$(LC_ALL=C tr -dc 'A-Za-z0-9!#%&\()*+,-./:;<=>?@[\]^_{}~' </dev/urandom | head -c 20)
## Starting Nextcloud Installation
# Creating environnment and variables
useradd $username
gid=$(id -g $username)
uid=$(id -u $username)
rootdatafolder=/mnt/nextcloud
mkdir $rootdatafolder
mkdir $rootdatafolder/database
chown -R $uid:$gid $rootdatafolder
# Starting mysql container
docker run -d --name $dbhostname --restart unless-stopped --user $uid:$gid -v $rootdatafolder/database:/var/lib/mysql -e MYSQL_ROOT_PASSWORD=$mysqlrootpwd -e MYSQL_DATABASE=$dbname -e MYSQL_USER=$dbusername -e MYSQL_PASSWORD=$mysqlnextcloudpwd mariadb:10.5 --transaction-isolation=READ-COMMITTED --binlog-format=ROW
# Starting nextcloud container
docker run -d --name=nextcloud --restart unless-stopped -p $https:443 --link $dbhostname -e PUID=$uid -e PGID=$gid -e TZ=Europe/Geneva -v $rootdatafolder/config:/config -v $rootdatafolder/data:/data linuxserver/nextcloud
# Starting updater container
docker run -d --name watchtower --restart=unless-stopped -e WATCHTOWER_SCHEDULE="0 0 4 * * *" -e WATCHTOWER_CLEANUP="true" -e TZ="Europe/paris" -v /var/run/docker.sock:/var/run/docker.sock containrrr/watchtower
echo "Database user: $dbusername
Database password: $mysqlnextcloudpwd
Database name: $dbname
Database hostname: $dbhostname
Database root password: $mysqlrootpwd" > $rootdatafolder/credentials.txt
rm -f $0
| true |
26301500d9feb136fd5db0e3221a0badad1da7bc | Shell | NachiMK/nodejs | /serverless-template/devops/functions.sh | UTF-8 | 1,123 | 3.453125 | 3 | [
"MIT"
] | permissive | #!/bin/bash
. ./devops/colors.sh
. ./devops/config.sh
infoMessage() {
echo -e "$GREEN$1$NONE"
}
processMessage() {
echo -e "$CYAN$1$NONE"
}
workingMessage() {
echo -e "$ORANGE$1$NONE"
}
gitUsername() {
# Get GIT username
echo $(git config user.name)
}
gitBranch() {
# Get GIT username
echo $(git rev-parse --symbolic-full-name --abbrev-ref HEAD)
}
getPackageVersion() {
cat ./package.json \
| grep version \
| head -1 \
| awk -F: '{ print $2 }' \
| sed 's/[\ ",]//g'
}
slack() {
if [ -z "$2" ]; then
echo Not enough arguments sent for slack
exit
fi
workingMessage "\nSending slack to #$1..."
curl -X POST --data-urlencode 'payload={"channel": "#'"$1"'", "username": "Sir Service", "text": "'"$2"'"}' $SLACK_HOOK
echo " 👌"
}
slackTestMessage() {
if [ -z "$1" ]; then
echo Not enough arguments sent for slack
exit
fi
curl -X POST --data-urlencode 'payload={"channel": "#eng-services", "username": "Testy McTestface", "icon_url": "https://s3.amazonaws.com/dev-slack-images/all-the-things-face.jpg", "text": "'"$1"'"}' $SLACK_HOOK
echo " 👌"
}
| true |
3238bb44897c486f4379aa7342d032eb3ec0189f | Shell | fhwrdh/dotfiles | /custom/themes/fhwrdh.zsh-theme | UTF-8 | 988 | 2.703125 | 3 | [] | no_license | # fhwrdh.zsh-theme
#
# requires git-prompt plugin
#
FH_GREEN=34
FH_RED=88
FH_GREY=240
FH_BLUE=27
FH_YELLOW=112
git_user() {
echo $(git config user.name)
}
git_email() {
echo $(git config user.email)
}
PROMPT='%F{$FH_GREY}%m%f %1(j.%F{$FH_RED}_ %f.)%F{$FH_GREEN}%(5~|…/%3~|%~)%f $(git_super_status)
%F{$FH_BLUE}%(!.#.ⵙ)%f '
# ⵙ ⵂ ䷀
RPROMPT='%F{$FH_GREY}$(git_email)%f'
ZSH_THEME_GIT_PROMPT_PREFIX="%F{$FH_GREY}|%f "
ZSH_THEME_GIT_PROMPT_SUFFIX="%F{$FH_GREY}%f"
ZSH_THEME_GIT_PROMPT_SEPARATOR="%F{$FH_GREY}%f"
ZSH_THEME_GIT_PROMPT_BRANCH="%F{$FH_GREY}"
ZSH_THEME_GIT_PROMPT_STAGED=" %F{$FH_GREEN}%{✚%G%}"
ZSH_THEME_GIT_PROMPT_CHANGED=" %F{$FH_RED}%{✚%G%}"
ZSH_THEME_GIT_PROMPT_CONFLICTS=" %F{$FH_RED}%{✖%G%}"
ZSH_THEME_GIT_PROMPT_BEHIND=" %{↓%G%}"
ZSH_THEME_GIT_PROMPT_AHEAD=" %F{$FH_GREEN}%{↑%G%}"
ZSH_THEME_GIT_PROMPT_UNTRACKED=" %F{$FH_YELLOW}%{-%G%}"
ZSH_THEME_GIT_PROMPT_CLEAN=" %F{$FH_GREEN}%{✔%G%}"
ZSH_THEME_GIT_PROMPT_STASHED=" %F{$FH_GREY}%{▾%G%}"
| true |
3481f1aab8e03a1feb43144161d9402a617abd62 | Shell | somasis/beginning-scripts | /bluez | UTF-8 | 502 | 3.359375 | 3 | [
"ISC"
] | permissive | #!/bin/bash
BLUEZ_ARGS=${BLUEZ_ARGS:-}
bluez_exists() {
if prog_exists bluetoothd || PATH="@@libexecdir@@"/bluetooth:"@@bindir@@" prog_exists bluetoothd;then
return 0
else
return 1
fi
}
bluez_depends() {
echo dbus dev
}
bluez_start() {
PATH="@@libexecdir@@"/bluetooth:"@@bindir@@":"${PATH}"
bluetoothd -n ${BLUEZ_ARGS} >/dev/null 2>&1 &
pidfile "$!" bluetooth
}
bluez_stop() {
pidfilekill bluetooth
}
bluez_status() {
pidfileexists bluetooth
}
| true |
16c44849d7082d38a6c31506ce265cea68758bd0 | Shell | bioconda/bioconda-recipes | /recipes/mango/activate.sh | UTF-8 | 477 | 2.890625 | 3 | [
"MIT"
] | permissive | #!/bin/sh
# Set SPARK_HOME to conda installed pyspark package if not already set
if [ -z "${SPARK_HOME}" ]; then
export SPARK_HOME=$( eval "echo ${CONDA_PREFIX}/lib/python*/site-packages/pyspark" )
fi
# enable widget in environment
jupyter=${CONDA_PREFIX}/bin/jupyter
$jupyter nbextension enable --py widgetsnbextension
$jupyter nbextension install --overwrite --py --symlink --user bdgenomics.mango.pileup
$jupyter nbextension enable bdgenomics.mango.pileup --user --py
| true |
3fcab30a95708f8444d2811568f19aa00d0d7289 | Shell | Genius/ree-1.8.7-heroku | /build_ree.sh | UTF-8 | 1,310 | 3.515625 | 4 | [] | no_license | #!/bin/bash
set -e
function init() {
FULL_VERSION="1.8.7-2012.02"
FULL_NAME="ruby-enterprise-${FULL_VERSION}"
TEMP_DIR=$(mktemp -d)
echo "Serving from /tmp"
cd /tmp
python -m SimpleHTTPServer $PORT &
}
function download_and_patch() {
init
cd $TEMP_DIR
curl https://rubyenterpriseedition.googlecode.com/files/${FULL_NAME}.tar.gz -s -o - | tar zxf -
cd $FULL_NAME/source
patch -p1 < /app/patches/34ba44f94a62c63ddf02a045b6f4edcd6eab4989.patch
patch -p1 < /app/patches/5384967a015be227e16af7a332a50d45e14ed0ad.patch
patch -p1 < /app/patches/CVE-2015-1855.patch
patch -p1 < /app/patches/tcmalloc_declare_memalign_volatile.patch
}
function run_installer() {
DIRECTORY=$1
OUTPUT=$2
download_and_patch
if [ -e $DIRECTORY ]; then
echo "Output directory ${DIRECTORY} already exists. Removing."
rm -rf $DIRECTORY
fi
cd $TEMP_DIR/$FULL_NAME
export CFLAGS="-fno-tree-dce -fno-optimize-sibling-calls"
./installer --auto $DIRECTORY --no-dev-docs --dont-install-useful-gems --no-tcmalloc
cd $DIRECTORY && {
tar czvf /tmp/${OUTPUT}.tgz .
}
}
function listen_and_serve() {
while true
do
sleep 60
echo "Still serving."
done
}
run_installer /app/vendor/ruby-1.8.7 ruby-1.8.7
run_installer /tmp/ruby-1.8.7 ruby-build-1.8.7
listen_and_serve
| true |
df99ea41bbe766a5baf8c5dec3ac5223e87a75d7 | Shell | chaoshunh/seismic_unix | /su/Xsuradonfreq_ken | UTF-8 | 2,309 | 2.578125 | 3 | [] | no_license | #! /bin/sh
# Xsuradonfk_ken --- Attenuation of multiples in Kenwincmp
# Normal (apex centered) Radon Transform.
# Data contain multiples of diffractions.
# This data set will be used later for testing interpolation
# with suradonfk. Since the normal multiples are giving problems
# they wil be removed in this code as a preprocessing.
#
# data preprocessing:
# Althouth the data is a real data set, for this example we need to take a
# window, and make the data to look symmetric
# from kencmpsnonmo.su take cdp 5300 and window from 5.5s to end.
# Data name kencmpwin0.su
# then make the data symmetric by
# suchw key1=offset key2=offset b=-1 < kencmpwin0.su > pp
# cat pp kencmpwin0.su > kencmpwin.su
# sushw key=delrt a=0 < kencmpwin.su > pp
# susort +offset < pp > kencmpwin.su
# Xsetntr kencmpwin.su
######################################################################
# Author: Daniel Trad- November 2001
# Some shortcuts for plots
PLOTR="suxwigb key=f2 wbox=550 hbox=650"
PLOTX="suxwigb key=offset wbox=550 hbox=650 perc=94"
suxx="suximage perc=98 hbox=900 wbox=700 xbox=$2 curve=curve1 npair=5 & "
set -x
METHOD='FREQ'
RADONFORW='y'
sufix=${METHOD}
FILE=kencmp0.su; clip= #clip corresponding to perc=97
FILEN=$FILE
FILETEMP=pp
FILETEMP2=ppp
offsetfile=$FILE'off'
FILERAD=${FILEN}'rad'${sufix}
FILERAD0=${FILEN}'rad0'${sufix}
FILEREC=$FILEN'rec'${sufix}
FILERES=$FILEN'res'${sufix}
par=stkvel.$FILE
# Options
# None
scale=1.0
############################*****
perc=100
cp $FILE $FILETEMP
cdpmin=5300
cdpmax=5300
dxcdp=1
if [ $METHOD = "FREQ" ]; then
mute="tmin_m=5.0 tmax_m=10.2 ihmin_m=90 ihmax_m=115 thres_m=0 slope_m=0"
mute="tmin_m=5.0 tmax_m=10.2 ihmin_m=90 ihmax_m=115 thres_m=0 slope_m=0"
suradonfreq < $FILE nhmax=200 cdpmin=$cdpmin cdpmax=$cdpmax dxcdp=$dxcdp par=$par itercg=30 iter_end=3 eps2=9e-1 eps1=9e-1 nq=150 verbose=1 mute=1 qmin=-4.25e-8 factor=1.5 solver=cgfft modelfile=$FILERAD $mute > $FILEREC
else
# Use other tests here
exit 0
fi
cp $FILEREC kencmp1.su
Velan1 kencmp1.su 5300 5300 1 0.05
Xpreprocessken kencmp1
cp cdp5300.su kencmp1win.su
sufilter < kencmp1win.su f=80,100 amps=1,0 > pp
cp pp kencmp1win.su
suop2 kencmp0win.su kencmp1win.su | suximage perc=98 &
sufft < model1 | suamp | suximage perc=98 &
exit 0
| true |
f04e4813f4a5df237d9b5083bf3062436ea640ba | Shell | sillsdev/fwmeta | /fwmeta/tests/git-finishTests | UTF-8 | 27,165 | 3.390625 | 3 | [] | no_license | #!/bin/bash
oneTimeSetUp()
{
basedir=$(pwd)/../..
. ../functions.sh
unittest=1
dir=$(pwd)
tmpdir=$(mktempdir)
cd $tmpdir
git init -q --bare bareRepo
git clone -q bareRepo masterRepo &>/dev/null
cd masterRepo
touch file1
git add file1
git commit -q -m "Initial commit"
git checkout -q -b develop
touch file2
git add file2
git commit -q -m "Commit on develop"
git push -q origin --all &>/dev/null
cd ..
git clone -q bareRepo testingRepo &>/dev/null
cd testingRepo
git checkout -q develop > /dev/null
git checkout -q master > /dev/null
cd $dir
SUT=$basedir/fwmeta/git-finish
}
oneTimeTearDown()
{
cd $HOME
rm -rf $tmpdir
}
setUp()
{
cd $tmpdir/testingRepo
git checkout -q master
git reset -q --hard origin/master
git checkout -q develop
git reset -q --hard origin/develop
git merge -q -s recursive -X theirs master &>/dev/null
git push -q origin develop &> /dev/null
}
deleteBranch()
{
git branch | grep -q $1 && git branch -D $1 > /dev/null
git push -q origin :$1 &> /dev/null
}
tearDown()
{
cd $tmpdir/testingRepo
git merge --abort &> /dev/null || true
git checkout -q master
for branch in $(git branch | grep "\(release\|support\|hotfix\)" | cut -c3-); do
deleteBranch $branch
done
for branch in $(git ls-remote origin 2>/dev/null | cut -f2 | grep "\(release\|support\|hotfix\)"); do
deleteBranch $branch
done
rm $(git rev-parse --git-dir)/GITFINISH_MERGE 2>/dev/null || true
}
exerciseSUT()
{
$SUT "$@" -m "tagmessage"
}
# Returns true if the commit $1 or one of its parents is $2
basedOnCommit()
{
for commit in $(git rev-list --parents -n 1 $1); do
if [ "$commit" = "$2" ]; then
return 0
fi
done
return 1
}
testNoParameters()
{
cd $tmpdir/testingRepo
assertFalse "Didn't fail when called without parameters" "exerciseSUT"
}
testNoVersion()
{
cd $tmpdir/testingRepo
assertFalse "Didn't fail when called without version number" "exerciseSUT release"
}
testDirtyWorkdir()
{
cd $tmpdir/testingRepo
touch bla
git add bla
assertFalse "Didn't fail when called with dirty workdir" "exerciseSUT release"
}
testReleaseBranch()
{
version=1.1
branch=release/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
touch file3
git add file3
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
assertTrue "Failed finishing branch" "exerciseSUT release $version"
assertFalse "Didn't delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertTrue "master branch doesn't include change" "basedOnCommit master $releaseCommit"
assertFalse "Didn't delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Didn't create local tag" "git tag -l | grep -q "^$version\$""
assertTrue "Didn't create tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
assertEquals "Didn't push master branch:" "$(git rev-parse master)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/master | cut -f1)"
}
testReleaseWithoutLocalBranch()
{
branch=release/1.2
cd $tmpdir/bareRepo
git branch $branch develop
cd $tmpdir/testingRepo
assertFalse "Didn't fail when called without having local branch" "exerciseSUT release 1.2"
}
testReleaseWithoutRemoteBranch()
{
branch=release/1.3
cd $tmpdir/testingRepo
git branch $branch develop
assertFalse "Didn't fail when called without having remote branch" "exerciseSUT release 1.3"
}
testFeatureWrongBranch()
{
cd $tmpdir/testingRepo
git checkout -q -b 123 develop
assertFalse "Didn't fail when called from wrong branch" "exerciseSUT feature"
}
testFailsIfAheadOnRelease()
{
branch=release/1.4
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
git push -q origin $branch &>/dev/null
touch file4
git add file4
git commit -q -m "Commit on $branch"
assertFalse "Didn't fail when ahead of remote" "exerciseSUT release 1.4"
}
testFailsIfAheadOnDevelop()
{
branch=release/1.5
cd $tmpdir/testingRepo
git branch $branch develop
git push -q origin $branch &>/dev/null
git checkout -q develop
touch file5
git add file5
git commit -q -m "Commit on develop"
git checkout -q $branch
assertFalse "Didn't fail when ahead of remote" "exerciseSUT release 1.5"
}
testFailsIfAheadOnMaster()
{
branch=release/1.6
cd $tmpdir/testingRepo
git branch $branch develop
git push -q origin $branch &>/dev/null
git checkout -q master
touch file6
git add file6
git commit -q -m "Commit on master"
git checkout -q $branch
assertFalse "Didn't fail when ahead of remote" "exerciseSUT release 1.6"
}
testRebaseIfBehindOnRelease()
{
version=1.7
branch=release/$version
cd $tmpdir/bareRepo
git branch $branch develop
cd $tmpdir/testingRepo
git fetch -q origin
git checkout -q $branch &>/dev/null
cd $tmpdir/masterRepo
git fetch -q origin
git checkout -q $branch &>/dev/null
touch file7
git add file7
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
cd $tmpdir/testingRepo
assertTrue "Failed finishing branch" "exerciseSUT release $version"
}
testFailsWithMergeConflictOnDevelop()
{
# develop and release branch have conflicting changes
version=1.8
branch=release/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
echo "$branch line" >> file
git add file
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
git checkout -q develop
echo "develop line" >> file
git add file
git commit -q -m "Change in develop"
git push -q origin develop &> /dev/null
assertFalse "Didn't fail when getting merge conflicts" "exerciseSUT release $version"
}
testFailsToContinueWithoutConflicts()
{
version=1.9
branch=release/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
git push -q origin $branch &>/dev/null
assertFalse "Didn't fail continuing without conflicts" "exerciseSUT --continue"
}
testReleaseContinueFromMergeConflictOnDevelop()
{
# develop and release branch have conflicting changes
version=1.10
branch=release/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
echo "$branch line" >> file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
git checkout -q develop
echo "develop line" >> file$version
git add file$version
git commit -q -m "Change in develop"
git push -q origin develop &> /dev/null
exerciseSUT release $version &> /dev/null
git add file$version
git commit -q -m "Merged"
assertTrue "Failed continuing from merge conflict" "exerciseSUT --continue"
assertFalse "Didn't delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertTrue "master branch doesn't include change" "basedOnCommit master $releaseCommit"
assertFalse "Didn't delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Didn't create local tag" "git tag -l | grep -q "^$version\$""
assertTrue "Didn't create tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
assertEquals "Didn't push master branch:" "$(git rev-parse master)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/master | cut -f1)"
}
testReleaseContinueFromMergeConflictOnMaster()
{
# master and release branch have conflicting changes
version=1.11
branch=release/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
echo "$branch line" >> file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
git checkout -q master
echo "master line" >> file$version
git add file$version
git commit -q -m "Change in master"
git push -q origin master &> /dev/null
exerciseSUT release $version &> /dev/null
git add file$version
git commit -q -m "Merged"
assertTrue "Failed continuing from merge conflict" "exerciseSUT --continue"
assertFalse "Didn't delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertTrue "master branch doesn't include change" "basedOnCommit master $releaseCommit"
assertFalse "Didn't delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Didn't create local tag" "git tag -l | grep -q "^$version\$""
assertTrue "Didn't create tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
assertEquals "Didn't push master branch:" "$(git rev-parse master)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/master | cut -f1)"
}
testReleaseContinueFromMergeConflictOnBoth()
{
# master and develop have conflicting changes with release branch
version=1.12
branch=release/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
echo "$branch line" >> file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
git checkout -q master
echo "master line" >> file$version
git add file$version
git commit -q -m "Change in master"
git push -q origin master &> /dev/null
git checkout -q develop
echo "develop line" >> file$version
git add file$version
git commit -q -m "Change in develop"
git push -q origin develop &> /dev/null
assertFalse "Didn't fail with conflicts" "exerciseSUT release $version"
git add file$version
git commit -q -m "Merged"
assertFalse "Didn't fail continuing without conflicts" "exerciseSUT --continue"
git add file$version
git commit -q -m "Merged"
assertTrue "Failed continuing from merge conflict" "exerciseSUT --continue"
assertFalse "Didn't delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertTrue "master branch doesn't include change" "basedOnCommit master $releaseCommit"
assertFalse "Didn't delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Didn't create local tag" "git tag -l | grep -q "^$version\$""
assertTrue "Didn't create tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
assertEquals "Didn't push master branch:" "$(git rev-parse master)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/master | cut -f1)"
}
testFeatureBranch()
{
version=ABC-1
branch=feature/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
touch file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
assertTrue "Failed finishing branch" "exerciseSUT feature"
assertFalse "Failed to delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertFalse "master branch shouldn't include change" "basedOnCommit master $releaseCommit"
assertFalse "Failed to delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertFalse "Created local tag" "git tag -l | grep -q "^$version\$""
assertFalse "Created tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
}
testFeatureContinueFromMergeConflictOnDevelop()
{
# develop and feature branch have conflicting changes
version=ABC-2
branch=feature/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
echo "$branch line" >> file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
git checkout -q develop
echo "develop line" >> file$version
git add file$version
git commit -q -m "Change in develop"
git push -q origin develop &> /dev/null
git checkout -q $branch
assertFalse "Didn't fail on merge conflict" "exerciseSUT feature"
git add file$version
git commit -q -m "Merged" &> /dev/null
assertTrue "Failed continuing from merge conflict" "exerciseSUT --continue"
assertFalse "Failed to delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertFalse "master branch shouldn't include change" "basedOnCommit master $releaseCommit"
assertFalse "Failed to delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertFalse "Created local tag" "git tag -l | grep -q "^$version\$""
assertFalse "Created tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
}
testFeatureBranchExplicit()
{
version=ABC-3
branch=feature/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
touch file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
git checkout -q develop
assertTrue "Failed finishing branch" "exerciseSUT feature $version"
assertFalse "Failed to delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertFalse "master branch shouldn't include change" "basedOnCommit master $releaseCommit"
assertFalse "Failed to delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertFalse "Created local tag" "git tag -l | grep -q "^$version\$""
assertFalse "Created tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
}
testFeatureBranchNonexisting()
{
version=ABC-4
branch=feature/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
touch file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
assertFalse "Failed finishing branch" "exerciseSUT feature ${version}-X"
}
testHotfixBranch()
{
version=1.11.1
branch=hotfix/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch master
touch file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
assertTrue "Failed finishing branch" "exerciseSUT hotfix $version master"
assertFalse "Failed to delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertTrue "master branch doesn't include change" "basedOnCommit master $releaseCommit"
assertFalse "Failed to delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Failed to create local tag" "git tag -l | grep -q "^$version\$""
assertTrue "Failed to create tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
assertEquals "Didn't push master branch:" "$(git rev-parse master)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/master | cut -f1)"
}
testHotfixContinueFromMergeConflictOnDevelop()
{
version=1.11.2
branch=hotfix/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch master
echo "$branch line" >> file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
git checkout -q develop
echo "develop line" >> file$version
git add file$version
git commit -q -m "Change in develop"
git push -q origin develop &> /dev/null
assertFalse "Didn't fail on merge conflict" "exerciseSUT hotfix $version master"
git add file$version
git commit -q -m "Merged" &> /dev/null
assertTrue "Failed finishing branch" "exerciseSUT --continue"
assertFalse "Failed to delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertTrue "master branch doesn't include change" "basedOnCommit master $releaseCommit"
assertFalse "Failed to delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Failed to create local tag" "git tag -l | grep -q "^$version\$""
assertTrue "Failed to create tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
assertEquals "Didn't push master branch:" "$(git rev-parse master)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/master | cut -f1)"
}
testHotfixBranchBasedOnSupportBranch()
{
version=1.1.3.1
branch=hotfix/$version
supportBranch=support/1.1.3
cd $tmpdir/testingRepo
git checkout -q -b $supportBranch master
echo "$supportBranch line" >> file$version
git add file$version
git commit -q -m "Commit on $supportBranch"
git push -q origin $supportBranch &>/dev/null
git checkout -q -b $branch $supportBranch
echo "$branch line" >> file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
assertTrue "Failed finishing branch" "exerciseSUT hotfix $version $supportBranch"
assertFalse "Failed to delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertFalse "master branch shouldn't include change" "basedOnCommit master $releaseCommit"
assertTrue "support branch doesn't include change" "basedOnCommit $supportBranch $releaseCommit"
assertFalse "Failed to delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Failed to create local tag" "git tag -l | grep -q "^$version\$""
assertTrue "Failed to create tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
assertEquals "Didn't push support branch:" "$(git rev-parse $supportBranch)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/$supportBranch | cut -f1)"
}
testHotfixBranchBasedOnSupportBranchContiueFromMergeConflict()
{
version=1.1.3.2
branch=hotfix/$version
supportBranch=support/1.1.3
cd $tmpdir/testingRepo
git checkout -q develop
echo "develop line" >> file$version
git add file$version
git commit -q -m "Change in develop"
git push -q origin develop &> /dev/null
git checkout -q -b $supportBranch master
echo "$supportBranch line" >> file$version
git add file$version
git commit -q -m "Commit on $supportBranch"
git push -q origin $supportBranch &>/dev/null
git checkout -q -b $branch $supportBranch
echo "$branch line" >> file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
assertFalse "Didn't fail on merge conflict" "exerciseSUT hotfix $version $supportBranch"
git add file$version
git commit -q -m "Merged" &> /dev/null
assertTrue "Failed finishing branch" "exerciseSUT --continue"
assertFalse "Failed to delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertFalse "master branch shouldn't include change" "basedOnCommit master $releaseCommit"
assertTrue "support branch doesn't include change" "basedOnCommit $supportBranch $releaseCommit"
assertFalse "Failed to delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Failed to create local tag" "git tag -l | grep -q "^$version\$""
assertTrue "Failed to create tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
assertEquals "Didn't push support branch:" "$(git rev-parse $supportBranch)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/$supportBranch | cut -f1)"
}
testNoPush()
{
version=1.13
branch=release/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
touch file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
assertTrue "Failed finishing branch" "exerciseSUT --no-push release $version"
assertFalse "Didn't delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertTrue "master branch doesn't include change" "basedOnCommit master $releaseCommit"
assertTrue "Deleted branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Didn't create local tag" "git tag -l | grep -q "^$version\$""
assertFalse "Created tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertNotEquals "Pushed develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
assertNotEquals "Pushed master branch:" "$(git rev-parse master)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/master | cut -f1)"
}
testContinuePush()
{
version=1.14
branch=release/$version
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
touch file$version
git add file$version
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
exerciseSUT --no-push release $version &> /dev/null
assertTrue "Failed finishing branch" "exerciseSUT --continue"
assertFalse "Didn't delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Didn't create tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
assertEquals "Didn't push master branch:" "$(git rev-parse master)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/master | cut -f1)"
}
testReleaseAlphaVersion()
{
baseversion=1.15
version=$baseversion-alpha
branch=release/$baseversion
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
touch file$baseversion
git add file$baseversion
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
assertTrue "Failed finishing branch" "exerciseSUT --keep release $version"
assertTrue "Deleted branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertFalse "master branch includes change" "basedOnCommit master $releaseCommit"
assertTrue "Deleted branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Didn't create local tag" "git tag -l | grep -q "^$version\$""
assertTrue "Didn't create tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
}
testReleaseWithDifferentVersion()
{
baseversion=1.16
version=$baseversion-alpha
branch=release/$baseversion
cd $tmpdir/testingRepo
git checkout -q -b $branch develop
touch file$baseversion
git add file$baseversion
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
assertTrue "Failed finishing branch" "exerciseSUT release $version"
assertFalse "Didn't delete branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertTrue "master branch doesn't include change" "basedOnCommit master $releaseCommit"
assertFalse "Didn't delete branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Didn't create local tag" "git tag -l | grep -q "^$version\$""
assertTrue "Didn't create tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
assertEquals "Didn't push master branch:" "$(git rev-parse master)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/master | cut -f1)"
}
testHotfixAlphaVersion()
{
baseversion=1.17.1
version=$baseversion-alpha
branch=hotfix/$baseversion
cd $tmpdir/testingRepo
git checkout -q -b $branch master
touch file$baseversion
git add file$baseversion
git commit -q -m "Commit on $branch"
git push -q origin $branch &>/dev/null
releaseCommit=$(git rev-parse HEAD)
assertTrue "Failed finishing branch" "exerciseSUT --keep hotfix $version master"
assertTrue "Deleted branch" "git branch | grep -q $branch"
assertTrue "develop branch doesn't include change" "basedOnCommit develop $releaseCommit"
assertFalse "master branch includes change" "basedOnCommit master $releaseCommit"
assertTrue "Deleted branch on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/heads/$branch\$""
assertTrue "Failed to create local tag" "git tag -l | grep -q "^$version\$""
assertTrue "Failed to create tag on remote" "git ls-remote origin 2>/dev/null | cut -f2 | grep -q "refs/tags/$version\$""
assertEquals "Didn't push develop branch:" "$(git rev-parse develop)" "$(git ls-remote origin 2>/dev/null | grep refs/heads/develop | cut -f1)"
}
. shunit2/src/shunit2
| true |
ca38c710ec124632395b1ef5585efbed14a17d01 | Shell | chenk008/greys-anatomy | /bin/install.sh | UTF-8 | 445 | 3.59375 | 4 | [] | no_license | #! /bin/sh
GREYS_FILE="greys.zip";
if [[ ! -w ./ ]]; then
echo "permission denied, current direct not writable." >> /dev/stderr
exit -1;
fi
echo "Download... greys.zip";
curl -sLk "http://ompc.oss.aliyuncs.com/greys/greys.zip" -o ${GREYS_FILE};
if [[ ! $? -eq 0 ]]; then
echo "download file failed!" >> /dev/stderr
exit -1;
fi
unzip ${GREYS_FILE}
rm -rf ${GREYS_FILE}
chmod +x greys/greys.sh
echo "greys install successed." | true |
98552b77fd9b954ad2596066f0de096f1ce4764a | Shell | jhzn/dotfiles | /.zshrc | UTF-8 | 6,974 | 2.6875 | 3 | [] | no_license | # zmodload zsh/zprof # For debugging zsh startup
# If not running interactively, don't do anything
[[ $- != *i* ]] && return
src() {
[ -f "$1" ] && source "$1"
}
unset PS1
source ~/.profile
#CTRL+d no longer closes terminal
set -o ignoreeof
# Turn off control flow
stty -ixon
#begin history config
HISTFILE=~/.zsh_history
HISTFILESIZE=1000000000
SAVEHIST=1000000000
HISTSIZE=10000000
#Immediate append Setting the inc_append_history option ensures that commands are added to the history immediately (otherwise, this would happen only when the shell exits, and you could lose history upon unexpected/unclean termination of the shell).
setopt INC_APPEND_HISTORY
setopt EXTENDED_HISTORY
setopt HIST_FIND_NO_DUPS
setopt SHAREHISTORY
#make history behave like bash's
alias history='history 1'
# Prevent record in history entry if preceding them with at least one space
setopt hist_ignore_space
zstyle ':completion::complete:*' gain-privileges 1
#end of history config
# More advanced globbing pattern like negation
setopt extendedglob
# Basic auto/tab completion
# Speed up compinit. Source: https://gist.github.com/ctechols/ca1035271ad134841284?permalink_comment_id=3994613#gistcomment-3994613
zmodload zsh/complist
autoload -Uz compinit
for dump in ~/.zcompdump(N.mh+24); do
compinit
done
compinit -C
# Auto complete with case insenstivity
zstyle ':completion:*' matcher-list '' 'm:{a-zA-Z}={A-Za-z}' 'r:|[._-]=* r:|=*' 'l:|=* r:|=*'
source ~/.config/dotfiles/fzf-tab-completion/zsh/fzf-zsh-completion.sh
#begin vi config
# vi mode
bindkey -v
export KEYTIMEOUT=1
# Edit line in vim buffer ctrl-v
autoload edit-command-line; zle -N edit-command-line
bindkey '^v' edit-command-line
# Enter vim buffer from normal mode
autoload -U edit-command-line && zle -N edit-command-line && bindkey -M vicmd "v" edit-command-line
# Use vim keys in tab complete menu:
bindkey -M menuselect 'h' vi-backward-char
bindkey -M menuselect 'j' vi-down-line-or-history
bindkey -M menuselect 'k' vi-up-line-or-history
bindkey -M menuselect 'l' vi-forward-char
bindkey -M menuselect 'left' vi-backward-char
bindkey -M menuselect 'down' vi-down-line-or-history
bindkey -M menuselect 'up' vi-up-line-or-history
bindkey -M menuselect 'right' vi-forward-char
# Fix backspace bug when switching modes
bindkey "^?" backward-delete-char
# ci", ci', ci`, di", etc
autoload -U select-quoted
zle -N select-quoted
for m in visual viopp; do
for c in {a,i}{\',\",\`}; do
bindkey -M $m $c select-quoted
done
done
# ci{, ci(, ci<, di{, etc
autoload -U select-bracketed
zle -N select-bracketed
for m in visual viopp; do
for c in {a,i}${(s..)^:-'()[]{}<>bB'}; do
bindkey -M $m $c select-bracketed
done
done
# end of vi config
#autoload -Uz up-line-or-beginning-search down-line-or-beginning-search
#zle -N up-line-or-beginning-search
#zle -N down-line-or-beginning-search
#[[ -n "${key[Up]}" ]] && bindkey -- "${key[Up]}" up-line-or-beginning-search
#[[ -n "${key[Down]}" ]] && bindkey -- "${key[Down]}" down-line-or-beginning-search
# Enforce safe file editing Practice
function sudo() {
if [[ "$1" == "$EDITOR" ]]; then
#Running your editor as root exposes you to the possible vulnerabilites in your editor
echo "Don't run your editor as root ya dumbwit! Use sudoedit instead!"
else
#Sudo is bloat :)
command doas "$@"
fi
}
source ~/.bash_aliases
source ~/bin/scripts/functions.sh
#Make sure to never add this file to git!
src ~/.host_specific_settings.sh
src ~/.cache/tmux_theme
src ~/.cache/zsh_theme
# begin FZF config
FZF_DEFAULT_OPTS="--bind 'tab:toggle-down,btab:toggle-up' --header-first --reverse"
src /usr/share/fzf/key-bindings.zsh # ArchLinux
src /usr/share/doc/fzf/examples/key-bindings.zsh # Debian
src ~/.nix-profile/share/fzf/key-bindings.zsh # nix
#overwrite exinting function to change "fc" to include timestampt as well.
#TODO make pull request to FZF github repo
# CTRL-R - Paste the selected command from history into the command line
fzf-history-widget() {
local selected num
setopt localoptions noglobsubst noposixbuiltins pipefail no_aliases 2> /dev/null
selected=( $(fc -ril 1 | perl -ne 'print if !$seen{(/^\s*[0-9]+\**\s+(.*)/, $1)}++' |
FZF_DEFAULT_OPTS="--height ${FZF_TMUX_HEIGHT:-40%} $FZF_DEFAULT_OPTS -n2..,.. --tiebreak=index --bind=ctrl-r:toggle-sort,ctrl-z:ignore $FZF_CTRL_R_OPTS --query=${(qqq)LBUFFER} +m" $(__fzfcmd)) )
local ret=$?
if [ -n "$selected" ]; then
num=$selected[1]
if [ -n "$num" ]; then
zle vi-fetch-history -n $num
fi
fi
zle reset-prompt
return $ret
}
src /usr/share/fzf/completion.zsh # Arch linux
src /usr/share/doc/fzf/examples/completion.zsh # Debian
src ~/.nix-profile/share/fzf/completion.zsh # nix
export FZF_DEFAULT_COMMAND='fd --type f --hidden --follow --exclude .git'
# To apply the command to CTRL-T as well
export FZF_CTRL_T_COMMAND="$FZF_DEFAULT_COMMAND"
# end of FZF config
# Add lfcd function which allows the shell to cd to the path you navigate to in lf
source ~/.config/lf/lfcd.sh
bindkey -s "^o" "lfcd\n" # bash keybinding
# Add a space before command to prevent history entry
bindkey -s "^q" " exit\n" # exit shell
src /usr/share/zsh/plugins/zsh-syntax-highlighting/zsh-syntax-highlighting.zsh # Archlinux
src /usr/share/zsh-syntax-highlighting/zsh-syntax-highlighting.zsh # Debian
src ~/.nix-profile/share/zsh-syntax-highlighting/zsh-syntax-highlighting.zsh
# source /usr/share/zsh/plugins/zsh-syntax-highlighting/zsh-syntax-highlighting.zsh
if [ -z "$SSH_AUTH_SOCK" ]; then
eval $(/usr/bin/gnome-keyring-daemon)
export SSH_AUTH_SOCK
fi
src /usr/share/zsh/plugins/zsh-autosuggestions/zsh-autosuggestions.zsh # Archlinux
src /usr/share/zsh-autosuggestions/zsh-autosuggestions.zsh # Debian
src ~/.nix-profile/share/zsh-autosuggestions/zsh-autosuggestions.zsh # Nix
ZSH_AUTOSUGGEST_BUFFER_MAX_SIZE=20
#ZSH_AUTOSUGGEST_COMPLETION_IGNORE="git *"
bindkey '^ ' autosuggest-accept
export LS_COLORS="$(< ~/.config/dotfiles/lscolors.sh)"
export MANPAGER='nvim +Man!'
# make time output more readable
export TIMEFMT=$'\n================\nreal\t%*E\nCPU\t%P\nuser\t%*U\nsystem\t%*S'
#begin prompt config
if command -v purs > /dev/null; then
function zle-line-init zle-keymap-select {
PROMPT=$(purs prompt -k "$KEYMAP" -r "$?" --venv "${${VIRTUAL_ENV:t}%-*}" )
zle reset-prompt
}
zle -N zle-line-init
zle -N zle-keymap-select
autoload -Uz add-zsh-hook
function _prompt_purs_precmd() {
if [[ -n "$TMUX" ]]; then
last_cmd=${history[$(($HISTCMD-1))][1,30]}
echo -en "\033k$last_cmd\033]"
fi
purs precmd --git-detailed
}
add-zsh-hook precmd _prompt_purs_precmd
else
autoload -Uz promptinit
promptinit
prompt elite2
fi
# end of prompt config
#
setopt interactivecomments #Allow comment when using zsh interactively
ZSH_HIGHLIGHT_STYLES[comment]='none' # Fixes comment being a better color for visibility
ZSH_HIGHLIGHT_STYLES[comment]=fg=245,standout
ZSH_HIGHLIGHT_STYLES[comment]=fg=245,italic
# zprof # For debugging zsh startup
| true |
cea7a002f5864b757befa9ed46b936b44cb333b8 | Shell | Sqooba/docker-sp | /dockerfiles/tag-push-all.sh | UTF-8 | 1,410 | 3.703125 | 4 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
### USAGE ###
#
# Builds docker images for WSO2 Stream-processor components
#
# --- Usage: ./tag-push-all.sh version registry
#
# -- Example usage: ./tag-push-all.sh 4.3.0 docker.sqooba.io/sqooba/
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
registry=${2:-""}
version=$1
echo "Building docker components for Stream processor version: $version"
pushd $DIR/base
docker tag wso2sp-base:$version ${registry}wso2sp-base:$version
docker push ${registry}wso2sp-base:$version
popd
pushd $DIR/dashboard
docker tag wso2sp-dashboard:$version ${registry}wso2sp-dashboard:$version
docker push ${registry}wso2sp-dashboard:$version
popd
pushd $DIR/editor
docker tag wso2sp-editor:$version ${registry}wso2sp-editor:$version
docker push ${registry}wso2sp-editor:$version
popd
pushd $DIR/manager
docker tag wso2sp-manager:$version ${registry}wso2sp-manager:$version
docker push ${registry}wso2sp-manager:$version
popd
pushd $DIR/worker/
docker tag wso2sp-worker:$version ${registry}wso2sp-worker:$version
docker push ${registry}wso2sp-worker:$version
popd
pushd $DIR/worker-extended/
version_ext=${version}_extended
docker tag wso2sp-worker:${version_ext} ${registry}wso2sp-worker:${version_ext}
docker push ${registry}wso2sp-worker:${version_ext}
popd
echo "Finished building base, dashboard, editor, manager, worker" | true |
3e84cc549b5c53048bed40c94b7a42c228e08e0a | Shell | SDGTiva/tiva_proposal_manager | /install.sh | UTF-8 | 424 | 2.90625 | 3 | [] | no_license | #! /bin/sh
create_directories(){
mkdir -p /usr/share/tiva/proposals/pubkeys
mkdir -p /usr/share/tiva/proposals/signatures
chmod -R a+wt /usr/share/tiva/proposals
mkdir -p /usr/share/tiva/acts/log
}
create_bins(){
cp sbin/tivapmd /usr/sbin/tivapmd
}
create_scripts(){
cp init.d/tivapmd /etc/init.d/tivapmd
update-rc.d tivapmd defaults
}
do_install(){
create_directories
create_bins
create_scripts
}
do_install
| true |
f5a67003924ecaef06716cd4c14c5d26f026bb3e | Shell | brhaka/AutoBackup.sh | /AutoBackup.sh | UTF-8 | 581 | 3.390625 | 3 | [
"MIT"
] | permissive | # AutoBackup.sh #
# by ejuliao- && tpereira #
if [ -z ${BACKUP_DRIVE} ]
then
printf "Variable \"BACKUP_DRIVE\" does not exists."
else
if [ -d "/Volumes/$BACKUP_DRIVE/" ]
then
export BACKUP_DATETIME_FILE_NAME="$(date +'%Y-%m-%d__%H-%M-%S')"
cd /Volumes/$BACKUP_DRIVE/
zip -r /Volumes/$BACKUP_DRIVE/$BACKUP_DATETIME_FILE_NAME.zip ~/ -x '*Library*'
printf "Backup successfully done.\nFiles were zipped to $BACKUP_DRIVE/$BACKUP_DATETIME_FILE_NAME.zip"
unset BACKUP_DATETIME_FILE_NAME
else
printf "Drive specified in variable \"BACKUP_DRIVE\" does not exists."
fi
fi
| true |
990308065cfb5bb8d64d14d8f5eaf772a13ac97c | Shell | miyyet/D8Training | /architecture/scripts/set-maintenance.sh | UTF-8 | 283 | 2.671875 | 3 | [] | no_license | #!/bin/bash
cd "$( dirname "${BASH_SOURCE[0]}" )"
cd ..
USAGE_ADDITIONAL_PARAMETER="[status]"
USAGE_ADDITIONAL_HELP="\tstatus : enable/disable\n"
source scripts/_environment.sh
ansible-playbook provisioning/set-maintenance.yml -i provisioning/inventory/$inventory -e "status=$2"
| true |
f263eff9ee5c1cffeab3993cca111ac4cfbb0690 | Shell | himdel/dotfiles | /bin/notes | UTF-8 | 127 | 3.078125 | 3 | [] | no_license | #!/bin/bash
echo Notes:
for f in ~/.notes.d/*; do
echo -ne '\t'`basename "$f"`': '
[ -s "$f" ] && head -n1 "$f" || echo
done
| true |
82cc3d4fafaf6b9f692805fa62f112a1b69109b0 | Shell | wohshon/amq-streams-benchmarking | /test-scripts/run-test.sh | UTF-8 | 984 | 3.171875 | 3 | [] | no_license | #!/bin/bash
if [ $# -ne 9 ]
then
echo "Enter the kafka following parameters:"
echo "- Test_ID: any arbituary string to describe your test"
echo "- BootStrap server url"
echo "- topic name"
echo "- payload size, in bytes"
echo "- Number of records"
echo "- throughput"
echo "- namespace where kafka is deployed"
echo "- namespace where test client is to be deployed, please create beforehand"
echo "- promethius url "
echo "e.g:"
echo "./run-test.sh TestCase-50mil-100k-1024-p1 my-cluster-kafka-bootstrap.kafka-cluster.svc.cluster.local:9092 topic1 1024 50000000 100000 kafka-cluster p1 http://prometheus-operated-kafka-cluster-1.apps.cluster-83a6.83a6.example.opentlc.com/api/v1/query"
exit 1
fi
echo Running test: $1
export TEST_ID=$1-$(date +%F-%T)
echo $TEST_ID
mkdir $TEST_ID
echo $2 $3 $4 $5 $6 $7 $8
./run-perf.sh $2 $3 $4 $5 $6 $8 &
export PERF_ID=$!
echo Perf test PID: $PERF_ID
sleep 1
./run-metrics.sh $7 $9 &
exit 0
| true |
fc15d3bf5ea4147e3b4807fbf3105655255832f9 | Shell | ossimlabs/o2-pushbutton | /openshift/disconnected/containers/httpd/run-httpd.sh | UTF-8 | 2,819 | 3.890625 | 4 | [] | no_license | #
# Accepts the following ENV variables
# CRL_HOME location where the crl lists are located
# It will create appropriate hash files. Defaults
# /etc/httpd/crl
#
#
#!/bin/bash
# Checks that a reverse-proxy.conf file was passed in
# If so, copies that file to the httpd/conf.d folder so that httpd uses it
# If not, defaults to the conf file that already exists on the Docker container
# if [ -e "$OMAR_WEB_PROXY_CONF" ] ; then
# cp $OMAR_WEB_PROXY_CONF /etc/httpd/conf.d/reverse-proxy.conf
# echo "reverse-proxy config file mounted at $OMAR_WEB_PROXY_CONF, copying to /etc/httpd/conf.d/reverse-proxy.conf"
# else
# echo "No reverse-proxy config file provided, using container default!"
# fi
# Make sure we're not confused by old, incompletely-shutdown httpd
# context after restarting the container. httpd won't start correctly
# if it thinks it is already running.
rm -rf /run/httpd/* /tmp/httpd*
if [ -z $HOME ] ; then
export HOME=/home/omar
fi
if [ ! -z "${AWS_ACCESS_KEY}" ] ; then
export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY}
fi
if [ ! -z "${AWS_SECRET_KEY}" ] ; then
export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_KEY}
fi
export USER_ID=$(id -u)
export GROUP_ID=$(id -g)
##
# Cat and delete the entry that starts with omar from
# the password file and copy it to /tmp
# and add the omar user with the random user id
#
if [ -f /etc/passwd ] ; then
cat /etc/passwd | sed '/^omar/d' > /tmp/passwd
echo omar:x:$USER_ID:$GROUP_ID:Default Application User:$HOME:/sbin/nologin >> /tmp/passwd
fi
export LD_PRELOAD=/usr/lib64/libnss_wrapper.so
export NSS_WRAPPER_PASSWD=/tmp/passwd
export NSS_WRAPPER_GROUP=/etc/group
#
# Check defautl CRL location
#
if [ -z $CRL_HOME ] ; then
if [ -d /etc/ssl/crl ] ; then
export CRL_HOME=/etc/ssl/crl
fi
fi
# If the crl directory exists create the proper hash
# files for revocation path
#
if [ ! -z $CRL_HOME ] ; then
if [ ! -d /etc/httpd/crl ] ; then
mkdir -p /etc/httpd/crl
fi
pushd /etc/httpd/crl > /dev/null
for x in `find $CRL_HOME -name "*.crl"` ; do
ln -s $x `openssl crl -noout -hash -in $x`.r0 2>/dev/null
done
popd > /dev/null
fi
if [ -z "${MOUNT_POINT}" ] ; then
export MOUNT_POINT=/s3
fi
if [ -z "${GOOFY_OPTS}" ] ; then
GOOFY_OPTS="-o allow_other"
fi
# force to forground
# we are taking a comma separated list of buckets in the form of
# AWS <bucket>:<prefix-path>,.....
# where :<prefix-path> is optional.
# we will mount to the location <mount-point>/<prefix-path>
#
GOOFY_OPTS="-f ${GOOFY_OPTS}"
if [ ! -z "${BUCKETS}" ] ; then
SPLIT_BUCKET=${BUCKETS//\,/ }
for BUCKET in ${SPLIT_BUCKET} ; do
BUCKET_PATH="${MOUNT_POINT}/${BUCKET//://}"
mkdir -p $BUCKET_PATH
goofys ${GOOFY_OPTS} ${BUCKET} ${BUCKET_PATH} &
done
fi
exec /usr/sbin/apachectl -DFOREGROUND
| true |
19f6a99e3d14a979023f8fd521b6e10e88a1db3f | Shell | brianseek/dkan-tools | /bin/dktl | UTF-8 | 5,970 | 3.921875 | 4 | [] | no_license | #!/bin/bash
# Set Drupal version if it isn't set.
DRUPAL_VERSION=${DRUPAL_VERSION:-"V8"}
# Which platform we're running on (linux and mac supported)
if [ -z $PLATFORM ]; then
PLATFORM=`uname` && export PLATFORM
fi
# Determine whether we want to run inside the docker container or in the host machine.
if [ -z $DKTL_MODE ] || [ "$DKTL_MODE" = "DOCKER" ]; then
DKTL_MODE="DOCKER"
elif [ "$DKTL_MODE" = "HOST" ]; then
DKTL_MODE="HOST"
else
echo "Incorrect DKTL_MODE set to ${DKTL_MODE}. Appropriate values are 'DOCKER'and 'HOST'."
exit 1
fi
# Check dependencies.
if [ "$DKTL_MODE" = "DOCKER" ]; then
if [ -z `which docker` ] || [ -z `which docker-compose` ]; then
echo "docker and docker-compose are required. Exiting."
exit 1
fi
else
if [ -z `which php` ] || [ -z `which composer` ] || [ -z `which drush` ]; then
echo "php, composer and drush are required. Exiting."
exit 1
fi
fi
# Function to find project root so we can run from anywhere
find-up () {
path=$(pwd)
while [[ "$path" != "" && ! -e "$path/$1" ]]; do
path=${path%/*}
done
echo "$path"
}
DKTL_PROJECT_DIRECTORY=$(find-up dktl.yml)
if [ -z "$DKTL_PROJECT_DIRECTORY" ]; then
if [ "$1" = "init" ]; then
DKTL_PROJECT_DIRECTORY=$(pwd)
else
echo "DKTL is running outside of a DKTL project. Run dktl init in the project directory first."
exit 1
fi
fi
export DKTL_PROJECT_DIRECTORY
DKTL_DIRECTORY=$(which dktl)
if [[ -L $(which dktl) ]]; then
# readlink command needs -f to work properly in linux
if [ "$PLATFORM" = "Linux" ]; then RL_OPT='-f'; fi;
DKTL_DIRECTORY=$(readlink $RL_OPT $DKTL_DIRECTORY)
fi
DKTL_DIRECTORY=$(dirname $(dirname $DKTL_DIRECTORY))
export DKTL_DIRECTORY
if [ -z "$DKTL_SLUG" ]; then
DKTL_SLUG=${DKTL_PROJECT_DIRECTORY##*/}
DKTL_SLUG=${DKTL_SLUG//-/}
DKTL_SLUG=${DKTL_SLUG//_/}
DKTL_SLUG=$(echo ${DKTL_SLUG} | tr -d '[:space:]' | tr "[A-Z]" "[a-z]") # Mixed case dirs cause issue with docker image names
export DKTL_SLUG
fi
# Setup for Docker mode and Docker-specific commands.
if [ "$DKTL_MODE" = "DOCKER" ]; then
# Check for proxy container, get domain from that.
if [ -z "$PROXY_DOMAIN" ]; then
PROXY_DOMAIN=`docker inspect proxy 2> /dev/null | grep docker.domain | tr -d ' ",-' | cut -d \= -f 2 | head -1`
fi
# If no proxy is running, use the overridden or default proxy domain
if [ -z "$PROXY_DOMAIN" ]; then
[ "$AHOY_WEB_DOMAIN" ] && WEB_DOMAIN=$AHOY_WEB_DOMAIN || WEB_DOMAIN="localtest.me"
PROXY_DOMAIN=${WEB_DOMAIN}
fi
export DKTL_PROXY_DOMAIN=$PROXY_DOMAIN
COMMON_CONF="$DKTL_DIRECTORY/assets/docker/docker-compose.common.yml"
PROXY_CONF="$DKTL_DIRECTORY/assets/docker/docker-compose.noproxy.yml"
OVERRIDES_CONF="$DKTL_PROJECT_DIRECTORY/src/docker/docker-compose.overrides.yml"
BASE_DOCKER_COMPOSE_COMMAND="docker-compose -f $COMMON_CONF -f $PROXY_CONF -p "${DKTL_SLUG}" --project-directory $DKTL_PROJECT_DIRECTORY"
if [ -f $OVERRIDES_CONF ]; then
BASE_DOCKER_COMPOSE_COMMAND+=" -f $OVERRIDES_CONF"
fi
# Check for interactive shell if DKTL_NO_PTY is not set
if [ ! -z "$DKTL_NO_PTY" ]; then
EXEC_OPTS='-T'
elif [ -t 1 ]; then
EXEC_OPTS=''
else
EXEC_OPTS='-T';
fi
# Run docker commands immediately then exit
if [ "$1" = "docker:compose" ] || [ "$1" = "dc" ]; then
$BASE_DOCKER_COMPOSE_COMMAND ${@:2}
exit 0
elif [ "$1" = "url" ] || [ "$1" = "docker:url" ]; then
echo "http://$PROXY_DOMAIN:$($BASE_DOCKER_COMPOSE_COMMAND port web 80|cut -d ':' -f2)"
exit 0
elif [ "$1" = "surl" ] || [ "$1" = "docker:surl" ]; then
echo "https://$PROXY_DOMAIN:$($BASE_DOCKER_COMPOSE_COMMAND port web 443|cut -d ':' -f2)"
exit 0
elif [ "$1" = "uli" ]; then
dktl drush uli --uri=`dktl url`
exit 0
elif [ "$1" = "suli" ]; then
dktl drush uli --uri=`dktl surl`
exit 0
fi
# Check containers state
containers=$($BASE_DOCKER_COMPOSE_COMMAND top)
if [ -z "$containers" ]; then
echo "Starting docker containers."
$BASE_DOCKER_COMPOSE_COMMAND up -d
fi
# The containers are running, set DKTL inside the cli container.
ALIAS="$($BASE_DOCKER_COMPOSE_COMMAND exec $EXEC_OPTS cli which dktl)"
if [ -z "$ALIAS" ]; then
$BASE_DOCKER_COMPOSE_COMMAND exec $EXEC_OPTS cli chmod 777 /usr/local/dkan-tools/bin/dktl
$BASE_DOCKER_COMPOSE_COMMAND exec $EXEC_OPTS cli ln -s /usr/local/dkan-tools/bin/dktl /usr/local/bin/dktl
fi
# Proxy pass to internal DKTL and save exit status
$BASE_DOCKER_COMPOSE_COMMAND exec $EXEC_OPTS cli env DRUPAL_VERSION=$DRUPAL_VERSION dktl $1 "${@:2}"
exit_status=$?
# Reset web and cli containers if xdebug
if [ $? -eq 0 ] && [[ $1 == "xdebug"* ]]; then
$BASE_DOCKER_COMPOSE_COMMAND restart web
$BASE_DOCKER_COMPOSE_COMMAND restart cli
fi
if [ -z $DKTL_CHOWN ] || [ "$DKTL_CHOWN" = "TRUE" ]; then
# Docker creates files that appear as owned by root on host. Fix:
if [ ! -z "`find $DKTL_PROJECT_DIRECTORY -user root -print -quit`" ]; then
$BASE_DOCKER_COMPOSE_COMMAND exec $EXEC_OPTS cli chown -R `id -u`:`id -g` /var/www
fi
fi
# Now that we've run some cleanup ...
# if we encountered a non-zero exit status during the docker exec, pass it on
if [ $exit_status -ne 0 ]; then
exit $exit_status
fi
elif [ "$DKTL_MODE" = "HOST" ]; then
if [[ "$1" == "docker"* ]] || [ "$1" = "dc" ]; then
echo "${1} is not available in 'HOST' mode"
exit 1
fi
# Check whether dkan-tools' dependencies have been initialized.
VENDOR="$(ls -lha $DKTL_DIRECTORY | grep vendor)"
if [ -z "$VENDOR" ]; then
echo "Composer Install"
composer install --working-dir=$DKTL_DIRECTORY
fi
# For several commands, we want to insert a "--" to pass all arguments as an array.
case $1 in
drush | phpunit | *test* )
php $DKTL_DIRECTORY/bin/app.php $1 -- "${@:2}"
;;
*)
php $DKTL_DIRECTORY/bin/app.php $1 "${@:2}"
;;
esac
fi
| true |
d88093894e9e2f7aecd7d588e5124a885cd072df | Shell | kdave/xfstests | /tests/generic/273 | UTF-8 | 2,213 | 3.171875 | 3 | [] | no_license | #! /bin/bash
# SPDX-License-Identifier: GPL-2.0
# Copyright (c) 2011-2012 Fujitsu, Inc. All Rights Reserved.
#
# FS QA Test No. 273
#
# reservation test with heavy cp workload
#
#creator
. ./common/preamble
_begin_fstest auto rw
status=0 # success is the default!
# Override the default cleanup function.
_cleanup()
{
cd /
rm -rf $tmp.*
_scratch_unmount
}
. ./common/filter
threads=50
count=2
_threads_set()
{
_cpu_num=`$here/src/feature -o`
threads=$(($_cpu_num * 50))
if [ $threads -gt 200 ]
then
threads=200
fi
}
_file_create()
{
block_size=$1
_i=0
if ! mkdir $SCRATCH_MNT/origin
then
echo "mkdir origin err"
status=1
exit
fi
cd $SCRATCH_MNT/origin
_disksize=$(_get_available_space $SCRATCH_MNT)
_free_inodes=$(_get_free_inode $SCRATCH_MNT)
# Some filesystems do not limit number of inodes and return 0
if [ $_free_inodes -eq 0 ]; then
# Guess one block per inode
_free_inodes=$(($_disksize / $block_size))
fi
# Leave some slack for directories etc.
_free_inodes=$(($_free_inodes - $_free_inodes/8))
_disksize=$(($_disksize / 3))
_num=$(($_disksize / $count / $block_size))
if [ $_num -gt $_free_inodes ]; then
_num=$_free_inodes
fi
_num=$(($_num/$threads))
_count=$count
while [ $_i -lt $_num ]
do
dd if=/dev/zero of=file_$_i bs=$block_size count=$_count >/dev/null 2>&1
_i=$(($_i + 1))
done
cd $here
}
_porter()
{
_suffix=$1
if ! mkdir $SCRATCH_MNT/sub_$_suffix
then
echo "mkdir sub_xxx err"
status=1
exit
fi
cp -r $SCRATCH_MNT/origin $SCRATCH_MNT/sub_$_suffix >>$seqres.full 2>&1
if [ $? -ne 0 ]
then
echo "_porter $_suffix not complete"
fi
sync
}
_do_workload()
{
_pids=""
_pid=1
block_size=$(_get_file_block_size $SCRATCH_MNT)
_threads_set
_file_create $block_size
_threads=$threads
while [ $_pid -lt $_threads ]
do
_porter $_pid &
_pids="$_pids $!"
_pid=$(($_pid + 1))
done
wait $_pids
}
# real QA test starts here
_supported_fs generic
_require_scratch
echo "------------------------------"
echo "start the workload"
echo "------------------------------"
_scratch_unmount 2>/dev/null
_scratch_mkfs_sized $((2 * 1024 * 1024 * 1024)) >>$seqres.full 2>&1
_scratch_mount
_do_workload
status=0
exit
| true |
418e7c639a343ea5e0ccda54dc3a290d98b5a374 | Shell | venturasr/test-infra | /performance-tools/performance-cluster/cluster.sh | UTF-8 | 2,291 | 3.921875 | 4 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
set -o errexit
set -o pipefail # Fail a pipe if any
SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
export PERFORMACE_CLUSTER_SETUP="true"
if [ -f "../../prow/scripts/library.sh" ]; then
source "../../prow/scripts/library.sh"
elif [ -f "../test-infra/prow/scripts/library.sh" ]; then
source "../test-infra/prow/scripts/library.sh"
else
echo "File 'library.sh' can't be found."
exit 1;
fi
if [ $# -lt "1" ]; then
echo "Usage: $0 --action (create or delete) --cluster-grade (production or development)"
exit 1;
fi
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case ${key} in
--action)
checkInputParameterValue "$2"
ACTION="${2}"
checkActionInputParameterValue "$2"
shift # past argument
shift # past value
;;
--cluster-grade)
checkInputParameterValue "$2"
CLUSTER_GRADE="$2"
checkClusterGradeInputParameterValue "$2"
shift # past argument
shift # past value
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
export ACTION
export CLUSTER_GRADE
if [[ "${ACTION}" == "" ]]; then
shoutFail "--action is required"
exit 1
fi
if [[ "${ACTION}" == "create" ]] && [[ "${CLUSTER_GRADE}" == "" ]]; then
shoutFail "--cluster-grade is required"
exit 1
fi
shout "Cluster Grade ${CLUSTER_GRADE}"
if [[ "${INPUT_CLUSTER_NAME}" == "" ]]; then
shoutFail "Environment INPUT_CLUSTER_NAME is required"
exit 1
fi
if [[ ! -f "${GOOGLE_APPLICATION_CREDENTIALS}" ]]; then
shoutFail "Environment GOOGLE_APPLICATION_CREDENTIALS with service_account credentials is required."
exit 1
fi
if [[ "${CLUSTER_GRADE}" == "development" ]] && [[ ! -d "${GOPATH}/src/github.com/kyma-project/kyma" ]]; then
shoutFail "Directory ${GOPATH}/src/github.com/kyma-project/kyma does not exists."
exit 1
fi
setupCluster() {
set +o errexit
source "${SCRIPTS_DIR}/scripts/kyma-gke-cluster.sh"
set -o errexit
}
setupCluster
shout "${ACTION} finished with success"
| true |
45af525532deb53b171fd8712027596027645250 | Shell | signposts/tactics | /src/control/server.sh | UTF-8 | 747 | 3.34375 | 3 | [] | no_license | #!/bin/sh
# This shell script will start all the available tactics at server side simultaneously, creating all the tunnels by the available tactics.
#This will look for all the tactics folder present in 'working' directory
for dir in $HOME/tactics/working/*/
do
echo $dir
#sh $dir/install.sh # UNCOMMENT this if you want to install softwares for tactics
if [ -e $dir/server/aprior.sh ]; then #Search for any aprior.sh script which need to run before main script
sh $dir/server/aprior.sh
fi
if [ -e $dir/server/initialize_server.sh ]; then #Running main script which will start server
sh $dir/server/initialize_server.sh
fi
sleep 10 #Sleep added so that tactic which was started before the other tactic is completed
done
| true |
1ad2f102379f818d74b4cb84b48c1d58f590f588 | Shell | liatrio/github-actions | /gitops-semver-increment-yaml/entrypoint.sh | UTF-8 | 372 | 2.984375 | 3 | [
"MIT"
] | permissive | #!/bin/sh -l
set -e
# get old version
PREVIOUS_VERSION=$(yq e "$MANIFEST_PATH" $MANIFEST_FILE)
# increment version
NEXT_VERSION=$(node /semver-increment $PREVIOUS_VERSION $SEMVER_POSITION)
echo "Change version in $MANIFEST_FILE:$MANIFEST_PATH from $PREVIOUS_VERSION to $NEXT_VERSION"
# Change manifest value
yq e "$MANIFEST_PATH = \"$NEXT_VERSION\"" -i $MANIFEST_FILE
| true |
24166610e6f856df16a9d22025f79c21dcab8237 | Shell | FrankKair/dotfiles | /config/shell-utils/git.sh | UTF-8 | 845 | 3.015625 | 3 | [
"MIT"
] | permissive | alias gcane='git commit --amend --no-edit && git push --force'
alias gb='git checkout $(git branch | fzf)'
alias gbc='git branch | cat'
alias gprune="git branch --merged master | grep -v '^[ *]*master$' | xargs git branch -d"
gshow () {
if [ $# -eq 0 ]; then
COMMIT_HASH=$(git log --oneline | fzf --multi --preview 'git show {+1}' | awk '{print $1}')
eval "git show $COMMIT_HASH"
else
eval "git show $1"
fi
}
prdiff () {
PULL_REQUEST=$(gh pr list | fzf | awk '{print $1}')
eval "gh pr diff $PULL_REQUEST"
}
issueview () {
ISSUE=$(gh issue list | fzf | awk '{print $1}')
eval "gh issue view --comments $ISSUE"
}
setupstreambranch () {
# BRANCH=`git branch | cat | grep -i '*' | cut -c 3-`
BRANCH=$(git branch --show-current)
echo "origin/$BRANCH"
eval "git branch --set-upstream-to=origin/$BRANCH $BRANCH"
}
| true |
589c73953a6a5eac1b7e1fbc7cd4eb0bcf7b5e16 | Shell | arilence/keymaps | /link.sh | UTF-8 | 237 | 2.796875 | 3 | [] | no_license | QMK_LOCATION=../qmk_firmware
CURRENT_LOCATION=$(pwd)
# Symlink the keymaps contained in this folder to their corresponding QMK location
echo "Linking..."
ln -hfs $CURRENT_LOCATION/lets_split/ $QMK_LOCATION/keyboards/lets_split/keymaps/
| true |
bfd232389edebaf4a93b9d7f2df5b68bbda8abe5 | Shell | HannahCorman/Intro_Biocomp_Group_Project | /Expression_Counts.sh | UTF-8 | 466 | 2.921875 | 3 | [] | no_license | #Bash script to get expression counts. Can make this final code version if we can't get for loop to work.
rm Expression_data.txt
for cond in "Control1" "Control2" "Obese1" "Obese2"; do
for seq in "Atp12a_8.fasta" "Gsta2_1.fasta" "Lhx2_9.fasta" "Ptpn5_6.fasta" "S1c7a12_2.fasta" "Synpr_10.fasta"; do
count=$(grep $cond FinalHmmOutput | grep $seq | wc -l)
seq_name=$(echo $seq | cut -d '.' -f 1)
echo "$cond,$seq_name,$count" >> Expression_data.txt
done
done
| true |
8951a6360e602e32e35a8c1aa5c4ec90d26741aa | Shell | dmitrievanthony/ignite-tf-demo | /.common.sh | UTF-8 | 660 | 3.703125 | 4 | [] | no_license | #!/usr/bin/env bash
fetch_and_build() {
REPO_URL=$1
REPO_BRANCH=$2
TMP_DIR=$3
# Create or clear directory.
if [ -d ${TMP_DIR} ]; then rm -rf ${TMP_DIR}; fi && \
mkdir ${TMP_DIR} && \
# Fetch sources.
echo Fetching sources \[repo=\"${REPO_URL}\", branch=\"${REPO_BRANCH}\"\] && \
git clone --quiet ${REPO_URL} --depth=1 --branch ${REPO_BRANCH} --single-branch ${TMP_DIR} 1>/dev/null && \
cd ${TMP_DIR} && \
{
# Build sources.
echo Building sources \[repo=\"${REPO_URL}\", branch=\"${REPO_BRANCH}\"\] && \
mvn clean install -B -q -DskipTests -Prelease,lgpl,tensorflow
cd ..
}
}
| true |
df68c84cbce77c2a6b6406bad26a3481ac3424b6 | Shell | bumbot/tla | /adl-xi/import.sh | UTF-8 | 274 | 3 | 3 | [] | no_license | #!/bin/bash
# $1 = Name of the import container.
# $2 = Name of the CSV file to import.
# $3 = Name of the collection in MongoDB to import to.
# Copy CSV into Node app container.
docker cp $2 $1:/usr/src/app/$2
# Execute import script.
docker exec $1 node import.js $2 $3 | true |
ef756eda7817ced12ac86b4b62de459538c7fa72 | Shell | klimczak-michal/BashScripts | /BashScripts/symboliclinks.sh | UTF-8 | 402 | 3.6875 | 4 | [] | no_license | #!/bin/bash
#For directory tree, create copy in which
#all links point to the same files in orignal directory
if [ $# != 2 ]
then
echo "ilosc argumentow inna niz dwa"
exit 1
fi
if [ ! -d $1 ]
then
echo "1 argument nie jest katalogiem"
exit 2
fi
if [ ! -d $2 ]
then
mkdir $2
for plik in `ls $1`
do
ln -s `readlink -e $1/$plik` `readlink -e $2/$plik`
done
fi | true |
5ac3aef7cff570947d78f61a0280f99797fb5577 | Shell | singleManFan/cloudpress | /bin/login.sh | UTF-8 | 219 | 2.703125 | 3 | [] | no_license | #!/bin/sh
if [ -f .env ];then
export $(cat .env | sed 's/#.*//g' | xargs)
else
echo "未监测到.env, 开发模式请创建.env"
fi
echo N | cloudbase login --apiKeyId $TCB_SECRET_ID --apiKey $TCB_SECRET_KEY | true |
09850c2484cd8b8d1014a43978cc65aeac2032f2 | Shell | PAI-aplikacje-internetowe/kokinejo | /backend/run.sh | UTF-8 | 1,394 | 4.1875 | 4 | [] | no_license | #!/usr/bin/env bash
set -e
export PORT=3000
DIR=$(dirname $0)
BACKEND_ROOT="$DIR"
DB_SCHEMA="database.sql"
DB_SCHEMA_PATH="$DIR/$DB_SCHEMA"
DB_FILE="database.db"
DB_PATH="$BACKEND_ROOT/$DB_FILE"
DEV_MODE=false
usage() {
echo "./run.sh"
echo " -h"
echo " -c remove database"
echo " -d run in development mode"
echo " -p use specific port, default: 3000"
}
createDbIfMissing() {
if [ ! -f "$DB_PATH" ]; then
echo "Creating database $DB_PATH..."
sqlite3 "$DB_PATH" ".read '$DB_SCHEMA_PATH'"
echo "Database $DB_PATH created"
fi
}
_clear() {
echo "Removing database $DB_PATH..."
rm -f "$DB_PATH"
echo "Removed"
}
runBackend() {
cd ${BACKEND_ROOT}
if [ "$DEV_MODE" = true ]; then
exec npm run dev -- --port "$PORT"
else
exec npm run start -- --port "$PORT"
fi
}
trap ctrl_c INT
ctrl_c() {
echo "Shutting down application"
echo "Releasing port $PORT"
echo "Goodbye!"
}
#################################################### MAIN
while getopts ":hcdp:" option; do
case $option in
h)
usage
exit 0
;;
c)
_clear
exit 0
;;
d)
DEV_MODE=true
;;
p)
PORT="$OPTARG"
;;
\?)
echo "ERROR: unknown parameter $OPTARG"
usage
exit 1
;;
esac
done
echo "Welcome to Kokinejo backend application"
createDbIfMissing
runBackend
| true |
31e4d2faa048edfa44dc0b572f11377ba640d0de | Shell | x-stride/lfs-arm-utilite | /chapter_6/tcl8.6.2.sh | UTF-8 | 595 | 3.578125 | 4 | [] | no_license | #!/bin/bash
set -e
echo "Doing tcl8.6.2"
SOURCE=/mnt/lfs/sources/tcl8.6.2-src.tar.gz
PKG=tcl8.6.2
[ ! -d "/tmp/build" ] && mkdir -v /tmp/build
cd /tmp/build
if [ -f tcl8.6.2.installed ];
then
echo "Package tcl8.6.2 already installed."
else
echo "Building tcl8.6.2"
[ -d "/tmp/build/tcl8.6.2" ] && rm -rf /tmp/build/tcl8.6.2
tar -xf /mnt/lfs/sources/tcl8.6.2-src.tar.gz
PKG=`find . -type d ! -path .|head -n 1|cut -f2 -d'/'`
cd /tmp/build/$PKG
./configure --prefix=/tools
make
make check
make install
touch $PKG.installed
[ -d "/tmp/build/$PKG" ] && rm -rf /tmp/build/$PKG
fi
| true |
0f7f3eeeb3e6bd00bee3215ce20b89c4872cbc15 | Shell | utkarsa007/Thirdstage | /codinclub.sh | UTF-8 | 180 | 3.1875 | 3 | [] | no_license | #!/bin/bash
shopt -s extglob
echo “Enter word”;
read word;
a=(bridgelabcodingclub|codingclub){1,19}
if [[ $word =~ $a ]];
then
echo “valid”
else
echo “invalid”
fi
| true |
aabbb15ee7f56da4089e7cb17db87c1f430913a1 | Shell | theimagingcollective/tic_core | /studies/active/scripts/bids_clean.sh | UTF-8 | 8,371 | 3.375 | 3 | [] | no_license | #!/bin/bash
start_dir=$PWD
subject_value=$1
session_value=$2
full_subject_session_value=sub-${subject_value}_ses-${session_value}
session_dir=${start_dir}/sub-${subject_value}/ses-${session_value}
echo
echo "================================================================================="
echo
echo "session_value = " $subject_value
echo "subject_value = " $session_value
echo
echo "session_dir = " $session_dir
echo
echo "List images collected and stored as DICOM files"
echo "------------------------------------------------------------------------------------------------"-
hdc_bids_path=$HFPEF_BIDS_PATH/.heudiconv/${subject_value}/ses-${session_value}/info/
$HDC_PATH/hdc_add_header.py -v ${hdc_bids_path}/dicominfo_ses-${session_value}.tsv \
-o ${hdc_bids_path}/dicominfo_ses-${session_value}.csv
echo
echo "List images converted by heudiconv (HDC)"
echo "-------------------------------------------------------------------------------------------------"
echo
cat -n ${hdc_bids_path}/${subject_value}_ses-${session_value}.auto.txt
echo
echo
#--- Remove .1., fmap/*.bval, fmap/*.bvec, fmap/*magnitude1*.json --------------------------------------------
#chmod +w -R ${session_dir}
find ${session_dir} -name "*.1.*" | xargs rename .1. .
find ${session_dir} -name "*.nii.gz" -or -name "*.json" | xargs chmod +w
find ${session_dir} -type d | xargs chmod +w
cd ${session_dir}/fmap
# magnitude1 of the phasediff fieldmap does not require a json file according to BIDS.
# Since the JSON file will be almost identical to phasediff.json file and is not required
# by the BIDS requirements I am removing it.
rm -rf *magnitude1.json
# removing bval and bvecs from this directory. These files were created from the DWI file
# which is being used as its own TOPUP reference for distortion correction.
rm -rf *bvec *bval
#--- Extract volumes for topup distortion correction from data to match corresponding topup calibration scans ------
# Keep only the first 3 volumes of pcasl
echo
echo "Extract reference images for topup distortion correction"
echo "--------------------------------------------------------"
pcasl_topup_lr=${full_subject_session_value}_acq-pcasl_dir-lr_epi.nii.gz
if [ -f $pcasl_topup_lr ]
then
echo fslroi $pcasl_topup_lr
fslroi $pcasl_topup_lr $pcasl_topup_lr 0 3
else
echo "$pcasl_topup_lr file not found"
fi
# Keep only the first 1 volumes of dwi
dwi_topup_ap=${full_subject_session_value}_acq-30ap_dir-ap_epi.nii.gz
if [ -f $dwi_topup_ap ]
then
echo fslroi $dwi_topup_ap
fslroi $dwi_topup_ap $dwi_topup_ap 0 1
else
echo "$dwi_topup_ap file not found"
fi
# Keep only the first 10 volumes of mbepi_rl
mbepi_topup_rl=${full_subject_session_value}_acq-mbepi_dir-rl_epi.nii.gz
if [ -f $mbepi_topup_rl ]
then
echo fslroi $mbepi_topup_rl
fslroi $mbepi_topup_rl $mbepi_topup_rl 0 10
else
echo "$mbepi_topup_rl file not found"
fi
#--- Update JSON files to include Echo1, Echo2, and IntendedFor information -------------------------------------
# Add Echo times for phase difference map
fmap_phasediff=${full_subject_session_value}_acq-bold_phasediff.json
echo
echo "sed EchoTime " $fmap_phasedif
echo "-----------------------------------------------------------------------"
sed -i 's/"EchoTime": 0.00738,/"EchoTime1": 0.00492,\n "EchoTime2": 0.00738,/' $fmap_phasediff
grep -H "EchoTime" $fmap_phasediff
#--- Update JSON files to include IntendedFor information -------------------------------------
sed -i '/IntendedFor/d' *.json # Remove Intended for from all JSON maps. The next portion of the shell script
# will add the IntendedFor back in. This eliminates the possibility of IntendedFor
# appearing multiple times.
# epi fmap
sed -i 's%"AcquisitionMatrixPE": 64,%"IntendedFor": [ "ses-__session_value__/func/sub-__subject_value___ses-__session_value___task-rest_acq-epi_rec-fmap_bold.nii.gz" ],\n "AcquisitionMatrixPE": 64,%' \
$fmap_phasediff
# epi topup
rest_topup_ap_json=${full_subject_session_value}_acq-epse_dir-ap_epi.json
rest_topup_pa_json=${full_subject_session_value}_acq-epse_dir-pa_epi.json
sed -i 's%"AcquisitionMatrixPE": 64,%"IntendedFor": [ "ses-__session_value__/func/sub-__subject_value___ses-__session_value___task-rest_acq-epi_rec-topup_bold.nii.gz" ],\n "AcquisitionMatrixPE": 64,%' \
$rest_topup_ap_json
sed -i 's%"AcquisitionMatrixPE": 64,%"IntendedFor": [ "ses-__session_value__/func/sub-__subject_value___ses-__session_value___task-rest_acq-epi_rec-topup_bold.nii.gz" ],\n "AcquisitionMatrixPE": 64,%' \
$rest_topup_pa_json
# mbepi topup
mbepi_topup_lr_json=${full_subject_session_value}_acq-mbepi_dir-lr_epi.json
mbepi_topup_rl_json=${full_subject_session_value}_acq-mbepi_dir-rl_epi.json
sed -i 's%"AcquisitionMatrixPE": 64,%"IntendedFor": [ "ses-__session_value__/func/sub-__subject_value___ses-__session_value___task-rest_acq-mbepi_bold.nii.gz" ],\n "AcquisitionMatrixPE": 64,%' \
$mbepi_topup_lr_json
sed -i 's%"AcquisitionMatrixPE": 64,%"IntendedFor": [ "ses-__session_value__/func/sub-__subject_value___ses-__session_value___task-rest_acq-mbepi_bold.nii.gz" ],\n "AcquisitionMatrixPE": 64,%' \
$mbepi_topup_rl_json
# pcasl topup
pcasl_topup_lr_json=${full_subject_session_value}_acq-pcasl_dir-lr_epi.json
pcasl_topup_rl_json=${full_subject_session_value}_acq-pcasl_dir-rl_epi.json
sed -i 's%"AcquisitionMatrixPE": 56,%"IntendedFor": [ "ses-__session_value__/func/sub-__subject_value___ses-__session_value___task-rest_acq-pcasl_bold.nii.gz" ],\n "AcquisitionMatrixPE": 56,%' \
$pcasl_topup_lr_json
sed -i 's%"AcquisitionMatrixPE": 56,%"IntendedFor": [ "ses-__session_value__/func/sub-__subject_value___ses-__session_value___task-rest_acq-pcasl_bold.nii.gz" ],\n "AcquisitionMatrixPE": 56,%' \
$pcasl_topup_rl_json
# dwi topup (This is not used in standard fmriprep.)
dwi_topup_ap_json=${full_subject_session_value}_acq-30ap_dir-ap_epi.json
dwi_topup_pa_json=${full_subject_session_value}_acq-30ap_dir-pa_epi.json
for ii in $dwi_topup_ap_json $dwi_topup_pa_json;
do
if [ -f $ii ]
then
sed -i 's%"AcquisitionMatrixPE": 112,%"IntendedFor": [ "ses-__session_value__/dwi/sub-__subject_value___ses-__session_value___acq-30ap_dwi.nii.gz" ],\n "AcquisitionMatrixPE": 112,%' $ii
else
echo "$ii file not found."
fi
done
#sed -i 's%"AcquisitionMatrixPE": 112,%"IntendedFor": [ "ses-__session_value__/dwi/sub-__subject_value___ses-__session_value___acq-30ap_dwi.nii.gz" ],\n "AcquisitionMatrixPE": 112,%' \
# $dwi_topup_pa_json
# Replace __session__ with ${session_value} and __subject__ with ${subject_value}.
# I would prefer to do this in a single call. Unfortunately, I haven't worked out the syntax
sed -i 's#__session_value__#'${session_value}'#g' *.json
sed -i 's#__subject_value__#'${subject_value}'#g' *.json
# This awk script removed the second IntendedFor if script is run multiple times. This is a complete hack
#for ii in *.json; do
# awk '/IntendedFor/&&c++>0 {next} 1' $ii > tmp.$ii
# mv -f tmp.$ii $ii
#done
echo
echo "grep -H IntendedFor *.json"
echo "-------------------------------------------------------------------------------------------------"
grep -H "IntendedFor" *.json
echo
cd $start_dir
#--- Reorient all images to match FSL orientation -------------------------------------------------
echo "Reorienting all *.gz files with fslreorient2std"
echo "-----------------------------------------------"
for ii in $(find $session_dir -name "*.gz"); do
echo "reorienting $ii "
fslreorient2std $ii $ii
done
#echo
#echo
#echo "Set permission to read only for *.nii.gz and *.json files"
#echo "--------------------------------------------------------------------------------"
#find $session_dir -type f -name "*.nii.gz" | xargs chmod -w -R
#find $session_dir -type f -name "*.json" | xargs chmod -w -R
#--- Look for repeat scans -----------------------------------------------------------------------
echo
echo
echo "Looking for repeated scans one last time. "
echo "If you see something reported here you must CHOOSE which images you want to use."
echo "--------------------------------------------------------------------------------"
find $session_dir -name "*.[0-9]*"
echo " "
| true |
714d44a611590d6605fbec45ffc4b037e998f5a8 | Shell | usert5432/notes | /notes | UTF-8 | 12,418 | 4.1875 | 4 | [
"ISC"
] | permissive | #!/usr/bin/env bash
NOTES_DIR="${NOTES_DIR:-${HOME}/@NOTEDIR@}"
EDITOR="${EDITOR:-vi}"
PAGER="${PAGER:-less}"
EXCLUDES=( '.git' '.svn' )
ESCAPE_CHAR=$'\033'
COLOR_NC="${ESCAPE_CHAR}[0m"
COLOR_INDEX="${ESCAPE_CHAR}[1;33m" # Bright yellow
COLOR_PATH="${ESCAPE_CHAR}[32m" # Green
die ()
{
echo "ERROR: ${*}"
exit 1
}
usage()
{
cat <<EOF
Usage: notes [OPTION...] COMMAND [ARG...]
Notes management application.
OPTIONS:
-h, --help Print this help message
-d, --dir=DIR Set note directory to DIR
-V, --version Print version and exit
COMMAND:
add NAME Adds new note
cat|show NOTE Print NOTE content
cp|copy NOTE DST Copy NOTE to destination DST
find|search [-t] NAME Search note with name containing NOTE.
If '-t' is specified then the output will
be formatted as a tree, or as a list
otherwise.
edit NOTE Open editor to modify NOTE
grep [GREP_ARGS...] Run 'grep' in the notes directory
less|view NOTE View NOTE in a pager
ls|list|tree NAME_PREFIX Print notes tree with names starting with
NAME_PREFIX
mv|move NOTE DST Move NOTE to destination DST
rm|remove NOTE Remove NOTE
For all commands NOTE can either be a note file name in the notes directory,
or an index returned by 'list', 'grep' or 'find' commands.
EOF
if [ $# -gt 0 ]
then
die "${*}"
else
exit 0
fi
}
version ()
{
echo "notes version @VERSION@"
}
validate_note_name ()
{
local note_name="${1}"
if [[ "${note_name}" == "../"* ]] || [[ "${note_name}" == *"/../"* ]]
then
die "../ is not allowed in note name"
fi
}
get_note_cache_path ()
{
local tmpdir="${TMPDIR:-/tmp}"
# shellcheck disable=SC2155
local user="$(id -u)"
_get_note_cache_path_result="${tmpdir}/notes-${user}-cache"
touch -- "${_get_note_cache_path_result}" \
|| die "Failed to create notes cache '${_get_note_cache_path_result}'"
chmod 600 -- "${_get_note_cache_path_result}" \
|| die "Failed to chmod notes cache '${_get_note_cache_path_result}'"
}
collect_cached_notes ()
{
_collect_notes_result=()
local note_cache_path="${1}"
if [[ -z "${note_cache_path}" ]]
then
get_note_cache_path
note_cache_path="${_get_note_cache_path_result}"
fi
while read -r -d '' line
do
_collect_notes_result+=( "${line}" )
done < "${note_cache_path}"
}
strip_prefix ()
{
local prefix="${1}"
while IFS= read -r -d '' line
do
line="${line#${prefix}}"
printf "%s\0" "${line##/}"
done
}
collect_notes_with_find ()
{
_collect_notes_result=()
local note_dir="${1}"
shift
local find_args=( "${@}" )
local args=( "${note_dir}" "${find_args[@]}" )
for ex in "${EXCLUDES[@]}"
do
args+=( \! -ipath "*${ex}*" )
done
get_note_cache_path
local note_cache_path="${_get_note_cache_path_result}"
find "${args[@]}" -print0 \
| strip_prefix "${note_dir}" | sort -z > "${note_cache_path}"
collect_cached_notes
}
collect_notes_with_grep ()
{
_collect_notes_result=()
local note_dir="${1}"
shift
local args=( -l -Z -R --color=never )
for ex in "${EXCLUDES[@]}"
do
args+=( --exclude-dir="${ex}" )
done
args+=( "${@}" )
get_note_cache_path
local note_cache_path="${_get_note_cache_path_result}"
grep "${args[@]}" -- "${note_dir}" \
| strip_prefix "${note_dir}" | sort -z > "${note_cache_path}"
collect_cached_notes "${note_cache_path}"
}
add_note ()
{
local note_dir="${1}"
local note_name="${2}"
local editor="${3}"
validate_note_name "${note_name}"
local note_path="${note_dir}/${note_name}"
local note_root="${note_path%/*}"
mkdir -p -- "${note_root}" \
|| die "Failed to create note directory '${note_root}'"
${editor} -- "${note_path}"
}
pprint_notes_tree ()
{
local notes=( "${@}" )
local tree_args=( -a --noreport -N --fromfile . )
local color_idx=""
local color_nc=""
if is_interactive
then
color_idx="${COLOR_INDEX}"
color_nc="${COLOR_NC}"
tree_args+=( -C )
fi
local idx=0
echo "${color_idx}notes${color_nc}"
for note in "${notes[@]}"
do
idx=$(( idx + 1 ))
printf "%s \t [ %s%d%s ]\n" \
"${note}" "${color_idx}" "${idx}" "${color_nc}"
done | tree "${tree_args[@]}" | tail -n +2
}
pprint_notes_list ()
{
local notes=( "${@}" )
local color_idx=""
local color_path=""
local color_nc=""
if is_interactive
then
color_idx="${COLOR_INDEX}"
color_path="${COLOR_PATH}"
color_nc="${COLOR_NC}"
fi
local idx=0
for fname in "${notes[@]}"
do
idx=$(( idx + 1 ))
echo "[ ${color_idx}${idx}${color_nc} ]" \
": ${color_path}${fname}${color_nc}"
done
}
pprint_note_path ()
{
local note="${1}"
local color_path=""
local color_nc=""
if is_interactive
then
color_path="${COLOR_PATH}"
color_nc="${COLOR_NC}"
fi
echo ">>> ${color_path}${note}${color_nc}"
}
find_notes ()
{
local note_dir="${1}"
shift
local tree=0
local file_glob=""
while [ $# -gt 0 ]
do
case "${1}" in
-t|--tree)
tree=1
;;
*)
if [[ -z "${file_glob}" ]]
then
file_glob="${1}"
else
usage "Too many arguments for find command"
fi
;;
esac
shift
done
collect_notes_with_find "${note_dir}" -type f -ipath "*${file_glob}*"
local notes=( "${_collect_notes_result[@]}" )
if [[ "${tree}" -eq 1 ]]
then
pprint_notes_tree "${notes[@]}"
else
pprint_notes_list "${notes[@]}"
fi
}
list_notes ()
{
local note_dir="${1}"
local note_prefix="${2}"
collect_notes_with_find "${note_dir}/" -type f \
-ipath "${note_dir}/${note_prefix}*"
local notes=( "${_collect_notes_result[@]}" )
pprint_notes_tree "${notes[@]}"
}
get_note_by_number ()
{
_get_note_by_number_result=""
local note_dir="${1}" # Not used now. Reserved for possible future use.
local number="${2}"
collect_cached_notes
local notes=( "${_collect_notes_result[@]}" )
[[ ${number} -eq 0 ]] && die "Note numberation starts with 1"
if [[ ${number} -gt ${#notes[@]} ]]
then
die "Note number ${number} is greater than the number of notes " \
"${#notes[@]}"
fi
local index=$(( number - 1 ))
_get_note_by_number_result="${notes[${index}]}"
}
resolve_note_name ()
{
local note_dir="${1}"
local name="${2}"
_resolve_note_name_result=""
[[ -z "${name}" ]] && die "Note name is not specified"
if [[ "${name}" =~ ^[[:digit:]]+$ ]]
then
get_note_by_number "${note_dir}" "${name}"
_resolve_note_name_result="${_get_note_by_number_result}"
return
fi
if ! [[ -e "${note_dir}/${name}" ]]
then
die "Note '${name}' is not found in '${note_dir}'"
fi
_resolve_note_name_result="${name}"
}
cat_note ()
{
local note_dir="${1}"
local name="${2}"
resolve_note_name "${note_dir}" "${name}"
name="${_resolve_note_name_result}"
local path="${note_dir}/${name}"
pprint_note_path "${name}"
exec cat -- "${path}"
}
copy_or_move_note ()
{
local note_dir="${1}"
local move="${2}"
local name="${3}"
local dst="${4}"
[[ -z "${dst}" ]] && die "Copy/Move require destination: copy/move SRC DST"
resolve_note_name "${note_dir}" "${name}"
name="${_resolve_note_name_result}"
local path="${note_dir}/${name}"
if [[ "${move}" -eq 1 ]]
then
echo "Moving '${path}' to '${note_dir}/${dst}'"
exec mv -- "${path}" "${note_dir}/${dst}"
else
echo "Copying '${path}' to '${note_dir}/${dst}'"
exec cp -- "${path}" "${note_dir}/${dst}"
fi
}
edit_note ()
{
local note_dir="${1}"
local name="${2}"
local editor="${3}"
resolve_note_name "${note_dir}" "${name}"
name="${_resolve_note_name_result}"
local path="${note_dir}/${name}"
${editor} -- "${path}"
}
is_interactive ()
{
# check if fd=1 (stdout) is a terminal
[ -t 1 ]
}
grep_notes ()
{
local note_dir="${1}"
shift
local grep_args=( "${@}" )
collect_notes_with_grep "${note_dir}" "${grep_args[@]}"
local notes=( "${_collect_notes_result[@]}" )
local color_idx=""
local color_path=""
local color_nc=""
if is_interactive
then
grep_args+=( --color=always )
color_idx="${COLOR_INDEX}"
color_path="${COLOR_PATH}"
color_nc="${COLOR_NC}"
fi
local idx=0
for note in "${notes[@]}"
do
idx=$(( idx + 1 ))
echo "[ ${color_idx}${idx}${color_nc} ]" \
": ${color_path}${note}${color_nc}"
grep "${grep_args[@]}" "${note_dir}/${note}"
echo
done
}
remove_note ()
{
local note_dir="${1}"
local name="${2}"
resolve_note_name "${note_dir}" "${name}"
name="${_resolve_note_name_result}"
local path="${note_dir}/${name}"
local response
local color_path=""
local color_nc=""
if is_interactive
then
color_path="${COLOR_PATH}"
color_nc="${COLOR_NC}"
fi
read -p "Remove note '${color_path}${path}${color_nc}'. Confirm [y/N]? " \
-r response
if [[ "${response}" =~ ^[yY]$ ]]
then
echo "Removing note '${path}'"
exec rm -- "${path}"
else
echo "Answered: '${response}'. Aborting..."
fi
}
main ()
{
local cmd="${1}"
local dir="${2}"
shift 2
local args=( "${@}" )
[[ -z "${dir}" ]] && dir="${NOTES_DIR}"
dir="$(readlink -v -f "${dir}")" || die "Note directory does not exist"
[ -d "${dir}" ] || die "Note directory '${dir}' is not a directory"
dir="${dir%/}"
case "${cmd}" in
add)
[[ "${#args[@]}" -eq 1 ]] \
|| usage "Invalid number of arguments for '${cmd}'"
add_note "${dir}" "${args[0]}" "${EDITOR}"
;;
cat|show)
cat_note "${dir}" "${args[@]}"
;;
copy|cp)
copy_or_move_note "${dir}" 0 "${args[@]}"
;;
edit)
edit_note "${dir}" "${args[0]}" "${EDITOR}"
;;
find|search)
[[ "${#args[@]}" -ge 1 ]] \
|| usage "Too few arguments for '${cmd}'"
find_notes "${dir}" "${args[@]}"
;;
grep)
grep_notes "${dir}" "${args[@]}"
;;
less|view)
cat_note "${dir}" "${args[@]}" | "${PAGER}"
;;
list|ls|tree)
[[ "${#args[@]}" -le 1 ]] \
|| usage "Invalid number of arguments for '${cmd}'"
list_notes "${dir}" "${args[@]}"
;;
move|mv)
copy_or_move_note "${dir}" 1 "${args[@]}"
;;
remove|rm)
remove_note "${dir}" "${args[@]}"
;;
*)
usage "Unknown command ${1}"
esac
}
cmd=
dir=
args=( )
while [ $# -gt 0 ]
do
case "$1" in
-h|--help|help)
usage
;;
-V|--version)
version
exit 0
;;
-d|--dir)
if [[ -n "${2}" ]]
then
dir="${2}"
shift
else
usage "No DIR specified after $1"
fi
;;
add|cat|show|copy|cp)
;&
edit|grep|less|view|ls|list|tree|move|mv|remove|rm|search|find)
cmd="${1}"
shift
break
;;
*)
usage "Unknown argument '${1}'"
;;
esac
shift
done
args=( "${@}" )
[[ -z "${cmd}" ]] && usage "No command specified"
main "${cmd}" "${dir}" "${args[@]}"
| true |
0787384d8638b5147192adfb2a6d206a3e867e16 | Shell | angpa/DevOps-Bash-tools | /spotify_playlist_name_to_id.sh | UTF-8 | 3,731 | 3.734375 | 4 | [] | no_license | #!/usr/bin/env bash
# vim:ts=4:sts=4:sw=4:et
#
# args: "My Shazam Tracks" | tee /dev/stderr | spotify_playlist_id_to_name.sh
#
# Author: Hari Sekhon
# Date: 2020-07-03 00:25:24 +0100 (Fri, 03 Jul 2020)
#
# https://github.com/harisekhon/bash-tools
#
# License: see accompanying Hari Sekhon LICENSE file
#
# If you're using my code you're welcome to connect with me on LinkedIn and optionally send me feedback to help steer this or other code I publish
#
# https://www.linkedin.com/in/harisekhon
#
set -euo pipefail
[ -n "${DEBUG:-}" ] && set -x
srcdir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
# shellcheck disable=SC1090
. "$srcdir/lib/spotify.sh"
# shellcheck disable=SC2034,SC2154
usage_description="
Uses Spotify API to translate a Spotify public playlist name to ID
If a Spotify playlist ID is given, returns it as is (this is for coding convenience when calling from other scripts)
Needed by several other adjacent spotify tools
$usage_playlist_help
$usage_auth_help
"
# used by usage() in lib/utils.sh
# shellcheck disable=SC2034
usage_args="<playlist> [<curl_options>]"
help_usage "$@"
#if is_mac; then
# awk(){
# command gawk "$@"
# }
#fi
# causes way too many random problems to allow partial substring matching, wastes time debugging, better to fail
export SPOTIFY_PLAYLIST_EXACT_MATCH=1
playlist_name_to_id(){
local playlist_name="$1"
shift || :
# if it's not a playlist id, scan all playlists and take the ID of the first matching playlist name
if [[ "$playlist_name" =~ ^[[:alnum:]]{22}$ ]]; then
echo "$playlist_name"
else
# If we've auto-completed a playlist name from the filename, replace the unicode slashes with the real ones
playlist_name="$("$srcdir/spotify_filename_to_playlist.sh" <<< "$playlist_name")"
# works but could get needlessly complicated to escape all possible regex special chars, switching to partial string match instead
#playlist_regex="${playlist_id//\//\\/}"
#playlist_regex="${playlist_regex//\(/\\(}"
#playlist_regex="${playlist_regex//\)/\\)}"
#awk "BEGIN{IGNORECASE=1} /${playlist_regex//\//\\/}/ {print \$1; exit}" || :)"
playlist_id="$(SPOTIFY_PLAYLISTS_ALL=1 "$srcdir/spotify_playlists.sh" "$@" |
if [ "${SPOTIFY_PLAYLIST_EXACT_MATCH:-}" ]; then
# do not tr [:upper:] [:lower:] as this invalidates the ID which is case insensitive
# save last case sensitive setting, ignore return code which will error if not already set
last_nocasematch="$(shopt -p nocasematch || :)"
shopt -s nocasematch
while read -r id name; do
if [[ "$name" = "$playlist_name" ]]; then
echo "$id"
break
fi
done
# restore last case sensitive setting
eval "$last_nocasematch"
else
grep -Fi -m1 "$playlist_name" |
awk '{print $1}'
fi || :
)"
if is_blank "$playlist_id"; then
echo "Error: failed to find playlist ID matching given playlist name '$playlist_name'" >&2
exit 1
fi
echo "$playlist_id"
fi
}
spotify_token
if [ $# -gt 0 ]; then
playlist_name="$1"
shift || :
playlist_name_to_id "$playlist_name" "$@"
else
while read -r playlist_name; do
playlist_name_to_id "$playlist_name" "$@"
done
fi
| true |
4378f31c8ee7f9237da05123964c1d343c194265 | Shell | OpenMandrivaAssociation/aide | /aideinit | UTF-8 | 2,846 | 4.3125 | 4 | [] | no_license | #!/bin/sh
#
# script to initialize an AIDE database and create a GPG key
# specifically for use with the AIDE database
#
# written by Vincent Danen <vdanen-at-annvix.org>
#
# $Id: aideinit 6673 2007-01-16 17:40:05Z vdanen $
if [ ! -d /var/lib/aide ]; then
printf "The AIDE database directory /var/lib/aide does not exist!\n\n"
exit 1
fi
host="`hostname`"
gpg="/usr/bin/gpg"
aide="/usr/sbin/aide"
fname="aide-`hostname`-`date +%Y%m%d-%H%M%S`"
if [ "`${gpg} --list-secret-key | grep aide@${host} >/dev/null 2>&1; echo $?`" == "1" ]; then
# we need to generate a gpg key
printf "Generating GPG private key for aide@${host}\n\n"
printf "This is done automatically, but you must provide a strong passphrase\nto protect the key.\n\n"
getpass() {
unset PASS1
unset PASS2
read -s -e -p "Passphrase: " PASS1
printf "\n"
read -s -e -p "Re-enter passphrase: " PASS2
printf "\n"
if [ "${PASS1}" != "${PASS2}" ]; then
printf "FATAL: Passwords do not match!\n\n"
unset PASS1
unset PASS2
fi
}
getpass
[[ "${PASS1}" == "" ]] && getpass
[[ "${PASS1}" == "" ]] && {
printf "FATAL: Password mis-match occurred twice. Aborting.\n\n"
exit 1
}
printf "Generating GPG key... "
tmpfile=`mktemp` || exit 1
echo "Key-Type: DSA" >>${tmpfile}
echo "Key-Length: 1024" >>${tmpfile}
echo "Subkey-Type: ELG-E" >>${tmpfile}
echo "Subkey-Length: 1024" >>${tmpfile}
echo "Name-Real: AIDE" >>${tmpfile}
echo "Name-Comment: AIDE verification key" >>${tmpfile}
echo "Name-Email: aide@${host}" >>${tmpfile}
echo "Expire-Date: 0" >>${tmpfile}
echo "Passphrase: ${PASS1}" >>${tmpfile}
${gpg} --batch --gen-key ${tmpfile}
if [ "$?" == "0" ]; then
printf " success!\n\n"
rm -f ${tmpfile}
else
printf " failed!\nAn error occurred; cannot proceed!\n\n"
rm -f ${tmpfile}
exit 1
fi
fi
signfile() {
echo ${PASS1} | ${gpg} -u aide@${host} --passphrase-fd stdin --no-tty --detach-sign aide.db
if [ "$?" == "1" ]; then
printf "FATAL: Error occurred when creating the signature file!\n\n"
exit 1
fi
}
printf "Initializing the AIDE database... this may take a minute or two.\n"
# set database to a non-existant file to prevent warnings
${aide} --init -B "database=file:/tmp/foo" -B "database_out=file:/var/lib/aide/aide.db"
pushd /var/lib/aide >/dev/null 2>&1
# create the signature file; we don't have to ask for the passphrase here, we've already got it
[[ -f aide.db.sig ]] && rm -f aide.db.sig
signfile
[[ ! -f aide.db.sig ]] && {
printf "FATAL: Signature was not created! Aborting.\n\n"
exit 1
}
printf "Database successfully signed.\n\n"
popd >/dev/null 2>&1
exit 0
| true |
155fbaabe3ee8da620e1784f0b7801ca63e706e5 | Shell | sandyliao80/shell-all | /ping-check.sh | UTF-8 | 646 | 3.0625 | 3 | [] | no_license | #!/bin/bash
# blog:https://blog.e9china.net
server_key=1234567 #server酱申请key http://sc.ftqq.com/
IP_LIST="192.168.1.2 192.168.1.3 10.10.10.10"
for IP in $IP_LIST; do
FAIL_COUNT=0
for ((i=1;i<=3;i++)); do
if ping -c 1 $IP >/dev/null; then
echo "$IP Ping is successful."
break
else
# echo "$IP Ping is failure $i"
let FAIL_COUNT++
fi
done
if [ $FAIL_COUNT -eq 3 ]; then
echo "$IP Ping is failure!"
curl -d "text=$IP Ping失败&desp=尝试对 $IP ping但是不通,主人快来解决!" https://sc.ftqq.com/$server_key.send
fi
done
| true |
7fa518574e471eef9dcc0d4e0b0eb76b6b692762 | Shell | sdzharkov/ECS40 | /P2/grepdir.sh | UTF-8 | 483 | 3.859375 | 4 | [] | no_license | #!/bin/bash
# grepdir
dir=$1
pattern=$2
if [[ $# -lt 2 ]]
then echo "usage: grepdir.sh directory pattern [-grep option]*"
exit 1
fi
if [[ ! -d $dir ]]
then echo "usage: grepdir.sh directory pattern [-grep option]*"
exit 1
fi
shift
shift
for var in $@; do
if [[ "$var" != -* ]]
then
echo "usage: grepdir.sh directory pattern [-grep option]*"
exit 1
else
continue
fi
done
find $dir -type f -exec grep $@ "$pattern" {} \;
shift
[[
| true |
697a9854013d6a754a31271e5a65e39f4a032f71 | Shell | gnidan/docker-eth-dev | /bin/truffle | UTF-8 | 457 | 3.078125 | 3 | [] | no_license | #!/bin/bash
set -o errexit
set -o pipefail
set -o nounset
source ${ETH}/bin/utils.sh
SERVICE="truffle"
main() {
local cmd=$1
local opts=${@:2}
local name=$(echo "truffle-${cmd}-$(get-container-dapp-path)" | tr '/' '-')
local extra_run_opts="--rm --name ${name}"
local run_opts=$( get-run-opts ${extra_run_opts} )
run-docker-compose \
run ${run_opts} \
${SERVICE} \
truffle ${cmd} ${opts}
}
main "$@"
| true |
60de2123904c9f2534ea86cff687dcc446cbc6aa | Shell | sev7e0/data-warehouse | /shell/zkcluster_run.sh | UTF-8 | 926 | 3.53125 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash --login
ZOOKEEPERS=spark01 spark02 spark03
PATH=/hadoopadmin/install/zookeeper-3.4.5-cdh5.14.2/bin/zkServer.sh
COMMAND=$3
# 执行前需要在/etc/environment 配置jdk环境
A=start status stop
start(){
echo "$1 zookeeper on $2"
ssh -l hadoopadmin $2 "$3 $1"
#ssh -l hadoopadmin $zookeeper "java -version"
}
if [ "$ZOOKEEPERS" == "" ] || [ "$COMMAND" == "" ];then
echo "usage:'node01 node02 node03' ./zkServer.sh [start status stop]"
exit 0
fi
# 判断是否为支持的命令
for c in $A
do
if [ "$COMMAND" != "$c" ];then
echo "当前只支持:[start status stop]命令"
exit 0
fi
done
if [ "$COMMAND" != "" ];then
for zookeeper in $ZOOKEEPERS
do
start $COMMAND $zookeeper $PATH
done
else
echo "请输入正确命令"
echo "'node01 node02 node03' ./zkServer.sh [start status stop]"
fi | true |
89e3c957e50780a073871e754b2d8bcd597e802e | Shell | RobinsonSdG/CompressionImpactCNN | /jpeg2000/j2kCompress.sh | UTF-8 | 379 | 2.78125 | 3 | [] | no_license | #!/bin/bash
INPUT="../png/inputImg/*"
OUTPUT="./outputImg/"
PARAMETERS="200,40,20 200,40 200 100,20,10 100,20 100 50,40,20 50,40 50 25,20,10 25,20 25 15,10,1 15,10 15 10,5,1 10,5 10"
rm -f ./outputImg/*
for f in $INPUT
do
f2=$(basename -- "$f")
f3=${f2::-4}
for p in $PARAMETERS
do
opj_compress -i $f -o "$OUTPUT$f3$p.j2k" -r $p -t 500,500
done
done | true |
91195666b3fae2c35edd42b024f750fca132bc79 | Shell | built1n/csaa | /testretrieve.sh | UTF-8 | 268 | 3.390625 | 3 | [] | no_license | #!/bin/bash
# retrieve files $3-($3+$2), outputting to `out'
echo "Retrieve:"
stop=$(echo "$3+$2-1" | bc)
for i in $(seq $3 $stop)
do
$1 -u 1 -k a retrievefile -f $i -o out -p > /dev/null
if [[ $? -ne 0 ]]
then
echo "Request failed!"
fi
done
| true |
41ac39d1b84f89a6c8d2f518eec7e48d0075bb3c | Shell | waikit8891/playwright-go | /scripts/generate-api.sh | UTF-8 | 564 | 2.6875 | 3 | [
"BSD-3-Clause",
"MIT"
] | permissive | #!/bin/bash
set -e
set +x
echo "Generating Interfaces"
node scripts/generate-interfaces.js > generated_interfaces.go
go fmt generated_interfaces.go > /dev/null
echo "Generated Interfaces"
PLAYWRIGHT_DIR="../playwright"
node $PLAYWRIGHT_DIR/utils/doclint/generateGoApi.js
cp $PLAYWRIGHT_DIR/utils/doclint/generate_types/go/generated-{enums,structs}.go .
go fmt generated-{enums,structs}.go > /dev/null
# echo "Validating API"
# node scripts/validate-interfaces.js
# echo "Validated API"
# echo "Updating README"
go run scripts/update-readme-versions/main.go
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.