blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
e1702371a05bc603acda2e621ff6aa72371b64dc
|
Shell
|
Drakeo/ktown
|
/deps/alldeps.SlackBuild
|
UTF-8
| 1,948
| 3.171875
| 3
|
[] |
no_license
|
#!/bin/sh
# Build (and install) all KDE dependencies
# Modified from the KDE Slackware script by Eric Hameleers <alien@slackware.com>
# Set initial variables:
CWD=$(pwd)
TMP=${TMP:-/tmp}
# Set the config option variables if they are not already set:
[ -r ./alldeps.options ] && . ./alldeps.options
# This avoids compiling a version number into KDE's .la files:
QTDIR=/usr/lib${LIBDIRSUFFIX}/qt ; export QTDIR
ALLDEPS=" \
extra-cmake-modules \
md4c \
sni-qt \
wayland \
qjson \
phonon \
phonon-gstreamer \
phonon-qt4 \
phonon-qt4-gstreamer \
python-enum34 \
pyxdg \
pcaudiolib \
espeak-ng \
dotconf \
flite \
speech-dispatcher \
sip \
PyQt \
PyQt5 \
QScintilla \
qca-qt5 \
libdbusmenu-qt5 \
polkit-qt-1 \
polkit-qt5-1 \
grantlee \
grantlee-qt4 \
poppler \
libdbusmenu-gtk \
libindicator \
libappindicator \
cfitsio \
libdmtx \
qrencode \
hack-font-ttf \
noto-font-ttf \
noto-cjk-font-ttf \
gpgme \
lensfun \
opencv \
dvdauthor \
vid.stab \
frei0r-plugins \
mlt \
cracklib \
libpwquality \
accountsservice \
libburn \
qtav \
ddcutil \
id3lib \
cryptopp \
cryfs \
python3-random2 \
perl-path-tiny \
perl-template-toolkit \
freecell-solver \
drumstick \
libsass \
sassc \
rttr \
quazip \
"
# Only needed when adding support for Wayland:
#elogind \
# Allow for specification of individual packages to be built:
if [ -z "$1" ]; then
MODQUEUE=$ALLDEPS
else
MODQUEUE=$*
fi
for module in \
$MODQUEUE ;
do
cd $module
./$module.SlackBuild
if [ $? = 0 ]; then
# Yes, I will build with the fresh packages installed:
upgradepkg --install-new --reinstall ${TMP}/${module}-*.txz
# Keep MIME database current:
/usr/bin/update-mime-database /usr/share/mime 1> /dev/null 2> /dev/null &
rm -rf ${TMP}/package-${module} ${TMP}/${module}-$VERSION
else
echo "${module} failed to build."
exit 1
fi
cd - ;
done
| true
|
786ce8b01bcfcb20cf8fd1e3a3259c979eeb32a2
|
Shell
|
stepro/azure-iot-pcs-remote-monitoring-dotnet
|
/scripts/build
|
UTF-8
| 147
| 2.6875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
# Script required by git pre-commit hook
APP_HOME="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )/"
cd $APP_HOME
| true
|
a90d2e9a84469d2b5b6c738ddb4cae504250d349
|
Shell
|
rafecolton/wip
|
/wip
|
UTF-8
| 1,145
| 4.21875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -o errexit
set -o pipefail
# GLOBALS
readonly WIP_PATH="$HOME/.wip"
readonly WIP_TMP="$HOME/.wip.tmp"
readonly CMD_LS="ls"
readonly CMD_ADD="add"
readonly CMD_RM="rm"
readonly CURRENT="$(pwd -P)"
readonly BASE="$(basename $CURRENT)"
# set signal trap
trap "rm -f $WIP_TMP" EXIT SIGINT SIGTERM
usage() {
cat <<USAGE >&2
Usage: wip <command>
Commands:
-h/--help - show this message
ls - list
add - add current
rm - remove current
USAGE
}
main() {
touch $WIP_PATH
local exit_status=0
local command="$1"
shift
if [[ -z $command ]] || [[ "$command" =~ -h|--help ]] ; then
exit 1
fi
if ! type "wip_${command}" >/dev/null 2>&1 ; then
usage
exit 2
fi
eval "wip_${command}"
}
function wip_ls() {
_sort
echo -e "\nWIPs:"
cat $WIP_PATH
echo
}
_sort() {
cat $WIP_PATH > $WIP_TMP
cat $WIP_TMP | sort > $WIP_PATH
rm -f $WIP_TMP
}
function wip_add() {
if ! grep "$BASE" $WIP_PATH ; then
echo "$BASE" >> $WIP_PATH
fi
}
function wip_rm() {
cat $WIP_PATH | awk "\$0 != \"$BASE\"" > $WIP_TMP
cat $WIP_TMP > $WIP_PATH
rm -f $WIP_TMP
}
main "$@"
| true
|
542b7cd127146fa65062edc80bf4b7cf8dffaee7
|
Shell
|
gakaya/initial_sys_config
|
/install_docker_CentOS7.sh
|
UTF-8
| 2,667
| 3.921875
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
#
# https://pkg.jenkins.io/redhat-stable/...
# http://www.unix.com/shell-programming-and-scripting/249212-sed-command-replace-line-file-using-line-number-output-pipe.html
# https://unix.stackexchange.com/questions/69112/how-can-i-use-variables-when-doing-a-sed
#
# Date: Sun Dec 31 21:40:01 PST 2017 (this timestamp is not a mistake..)
#
_YUM="yum install --nogpgcheck -y"
OS_VERSION=`cat /etc/redhat-release | awk '{print $4}'`
DOCKER_VERSION='docker-ce-17.12.0.ce'
OS=7
echo
echo "#######################################"
echo "SCRIPT: $0 "
echo "Installing $DOCKER_VERSION..."
echo "#######################################"
echo
# Remove anything previously installed
function docker_cleanup {
yum -y remove $DOCKER_VERSION \
docker-common \
docker-selinux \
docker-engine \
2> /dev/null
}
# Get the docker repo
function docker_repo() {
sudo $_YUM yum-utils device-mapper-persistent-data lvm2
sudo /bin/yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo
if [ $? != 0 ]; then
echo "ERROR :: Failed to add docker repo."
exit 1
fi
}
# Install docker
function docker_install(){
echo "INFO: Installing docker"
$_YUM $DOCKER_VERSION
}
# Start the docker service.
function docker_service() {
rpm -qa | grep docker
if [ $? != 0 ]; then
echo -e "\nERROR :: Docker is not installed. Service not started."
exit 1
fi
if [[ "$OS_VERSION" > "$OS" ]] ; then
#for Centos 7
systemctl start docker
systemctl enable docker
systemctl status docker
else
#for Centos 6
/sbin/service docker start
/sbin/service docker status
/sbin/chkconfig docker on
fi
echo
echo "Start docker on CentOS version $OS_VERSION"
echo
}
# Pull down a test image to ensure docker works
function docker_validate {
# remove the test image if present
docker image ls hello-world | grep hello
if [ $? == 0 ]; then
docker rmi hello-world --force
fi
# pull down the test image
docker run hello-world
if [ $? != 0 ]; then
echo -e "\nERROR :: Could not start test container \"hello-world\"."
exit 1
fi
}
# Ensure java is installed before installing docker.
function java_check () {
java -version 2> /dev/null
if [ $? == 0 ]; then
echo
echo "INFO :: Java is installed. Good to go. "
echo
else
echo
echo "ERROR :: Java is not installed. Install the JDK before installing $DOCKER_VERSION."
echo
exit 1;
fi
}
########################################
# MAIN PROGRAM
#
java_check
docker_cleanup
docker_repo
docker_install
docker_service
docker_validate
| true
|
46e576fee5cf96a885cff49fb0d8d91f49b5767a
|
Shell
|
hyp3r5pace/useful-bash-scripts
|
/src/battery_status.sh
|
UTF-8
| 789
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/zsh
echo "---------------------BATTERY INFORMATION------------------------------"
percentage=$(cat /sys/class/power_supply/battery/capacity)
echo "current charge percentage: $percentage%"
charging_status=$(cat /sys/class/power_supply/battery/status)
echo "charging status: $charging_status"
health=$(cat /sys/class/power_supply/battery/health)
echo "battery health: $health"
battery_num=$(cat /sys/class/power_supply/battery/present)
echo "number of batteries: $battery_num"
battery_type=$(cat /sys/class/power_supply/battery/technology)
echo "battery type: $battery_type"
temp=$(cat /sys/class/power_supply/battery/temp)
echo "battery temperature: $temp F"
# Doubt in the above temperature unit
echo "----------------------------------------------------------------------"
| true
|
38e8d622a0872e0243040f114b4427914b2c9648
|
Shell
|
waqarnabi/tybec
|
/lib-intern/ocl2tir_python_obs/code_MScStudents/tools/gen_llvm_ir_no_inline.sh
|
UTF-8
| 491
| 2.96875
| 3
|
[
"MIT"
] |
permissive
|
# Strip .cl from filename
filename="${1%.*}"
# Copy the original file to llvm_tmp_.cl
cp $1 llvm_tmp_.cl
# llvm_tmp_.cl is #included in llvm_tmp.c
# llvm_tmp.c has macros to undef OpenCL type attributes etc
# Run LLVM
# Make sure the LLVM version is 3.8
clang -O0 -S -Wunknown-attributes -emit-llvm -c llvm_tmp.c -o llvm_tmp.unopt.ll
opt -mem2reg -S llvm_tmp.unopt.ll -o llvm_tmp.opt1.ll
clang -O0 -S -emit-llvm llvm_tmp.opt1.ll -o $1.ll
# Rename the final file
#mv $1.ll $filename.ll
| true
|
d24b77481c984659b062b77977cefe99c642858c
|
Shell
|
areejhelal/MP3-RaspberryPi-Buildroot
|
/mp3_overlay/etc/profile.d/readDisplayDaemon.sh
|
UTF-8
| 389
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/sh
currentState=`cat /tmp/mp3_state`
clear
echo "Welcome to MP3!"
sleep 1
clear
echo $currentState
while :
do
if [ `cat /tmp/clear_flag` -eq 1 ] || [ "$currentState" != "`cat /tmp/mp3_state`" ]; then
currentState=`cat /tmp/mp3_state`
clear
echo $currentState
echo '0' > /tmp/clear_flag
fi
sh /MP3/readInputs.sh
sleep 0.8
done
| true
|
2b3cb5fcd3ac7abda226317bc109d449a23c7293
|
Shell
|
bakoenig/config
|
/bin/calendar.sh
|
UTF-8
| 383
| 3.09375
| 3
|
[] |
no_license
|
#
# calendar.sh
#
# show 3 month calendar
cal -3
# show time
echo ""
echo "$(date +%A," "%B" "%d)"
echo ""
echo "$(TZ='America/Los_Angeles' date +%a\ %H:%M) in Los Angeles"
echo "$(TZ='America/New_York' date +%a\ %H:%M) in New York"
echo "$(TZ='Europe/Berlin' date +%a\ %H:%M) in Berlin"
echo "$(TZ='Asia/Tokyo' date +%a\ %H:%M) in Tokyo"
# wait for a key press to exit
read -n 1
| true
|
cf08eb5ae82e991bde88ac5aa04b13a7f94118fd
|
Shell
|
ArnieBerg/seqdb
|
/kipper/biomaj_utils/cleanup_cpn60.sh
|
UTF-8
| 497
| 3.234375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
echo "Bash script to:"
echo " - Convert cpn60 database into proper fasta format."
echo "Source is http://haruspex.usask.ca/cpnDB/cpn60_a_nut"
echo "Damion Dooley, Dec 19, 2014"
echo "Running..."
file_name=$datadir/$dirversion/future_release/$1
# All entries with >v are preliminary / not valid yet.
sed --in-place '/^>v/Q' $file_name
# Switch b[cpndb id] [accession id] line around to match fasta header
sed -r --in-place 's/>(b[0-9]+) ([a-zA-Z0-9_\.]+) (.*)/>ref|\2|\1 \3/g;' $file_name
| true
|
e104788b65bde6d87461be1070602711a4dfde42
|
Shell
|
keguo1119/signal_quaility_check
|
/script/run.sh
|
UTF-8
| 551
| 2.921875
| 3
|
[] |
no_license
|
#!/bin/sh
date_t=$(date +"%Y-%m-%d-%H-%M-%S")
for i in $(seq 0 2)
do
ipaddr=$(ifconfig usb$i | grep "inet addr" | awk '{print $2}' | awk 'BEGIN {FS=":"} {print$2}')
echo $ipaddr
if [ "$ipaddr" = "" ]; then
continue
fi
date +"%Y-%m-%d %H:%M:%S" > /root/scanner/iperf/iperf_usb"$i"_info-"$date_t".txt
iperf -u -c 120.27.136.251 -i 1 -b 2M -t 86400 -B $ipaddr >> /root/scanner/iperf/iperf_usb"$i"_info-"$date_t".txt 2>&1 &
done
# kill $(ps | grep iperf | grep -v grep | awk '{print $1}')
| true
|
01b4f2a62bcabc7b26eb03dd514a30d78234daa9
|
Shell
|
barry-scott/scm-workbench
|
/Source/Hg/Experiments/make-status-test-repo.sh
|
UTF-8
| 1,837
| 2.984375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -x
set -e
SCRIPT_DIR=${PWD}
REPO=${TMPDIR:? set TMPDIR}/test-hg-repo-status
rm -rf ${TMPDIR:? set TMPDIR}/test-hg-repo-status
mkdir ${REPO}
hg init ${REPO}
cd ${REPO}
hg status
mkdir Folder1
mkdir Folder1/Folder1.1
mkdir Folder2
cat <<EOF >.hgignore
syntax: glob
*.pyc
*~
.bash_history
.directory
EOF
echo 1 deleted-sh-rm.txt >Folder1/deleted-sh-rm.txt
echo 1 deleted-git-rm.txt >Folder1/deleted-git-rm.txt
echo 1 renamed.txt >Folder1/renamed.txt
echo 1 changed-staged.txt >Folder1/changed-staged.txt
echo 1 changed-working.txt >Folder1/Folder1.1/changed-working.txt
echo 1 changed-staged-and-working.txt >Folder1/changed-staged-and-working.txt
hg add \
.hgignore \
Folder1/deleted-sh-rm.txt \
Folder1/deleted-git-rm.txt \
Folder1/renamed.txt \
Folder1/changed-staged.txt \
Folder1/Folder1.1/changed-working.txt \
Folder1/changed-staged-and-working.txt \
;
hg commit -m "commit 1"
# delete file
rm Folder1/deleted-sh-rm.txt
hg rm Folder1/deleted-git-rm.txt
# rename file
hg mv Folder1/renamed.txt Folder2/renamed2.txt
# modify files
cp Folder1/changed-staged.txt Folder1/changed-staged.txt~
echo 2 staged change >> Folder1/changed-staged.txt
hg add Folder1/changed-staged.txt
echo 2 working chage >> Folder1/Folder1.1/changed-working.txt
echo 2 staged change >> Folder1/changed-staged-and-working.txt
hg add Folder1/changed-staged-and-working.txt
echo 3 working change >> Folder1/changed-staged-and-working.txt
# new files
echo 3 new-working.txt > Folder1/new-working.txt
echo 3 new-staged.txt > Folder1/new-staged.txt
hg add Folder1/new-staged.txt
# status
hg status
# allow Source/Git modules to be tested
export PYTHONPATH=${BUILDER_TOP_DIR}/Source/Git
python3 ${SCRIPT_DIR}/hglib_status.py ${REPO} $1
#python3 ${SCRIPT_DIR}/hg_wb_project_status.py ${REPO} $1
| true
|
8a2e4d712108d7794d849de183f8f309a7094ad1
|
Shell
|
maxfish/mgl2d
|
/clean.sh
|
UTF-8
| 319
| 2.609375
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
# Run this script to remove folders and files produced from PyPI build and pytest.
# Returns the project folder to a pre-build state.
echo "Remove PyPI build folders and files..."
rm -rf build/
rm -rf dist/
rm -rf mytestpackage.egg-info/
echo "Remove pytest cache folder..."
rm -rf .cache/
echo "Cleanup complete."
| true
|
2ad1d46c87b3b12afc70f8a4b50dc7783a9be582
|
Shell
|
lurch/BarrysEmacs
|
/Editor/PyQtBEmacs/bemacs-linux.sh
|
UTF-8
| 1,051
| 3.25
| 3
|
[] |
no_license
|
#!/bin/bash
export PYTHONPATH=${BUILDER_TOP_DIR}/Editor/PyQtBEmacs:${BUILDER_TOP_DIR}/Editor/exe-pybemacs
case "${BUILDER_CFG_PLATFORM}" in
Linux-Fedora)
export BEMACS_EMACS_LIBRARY=${BUILDER_TOP_DIR}/Kits/Linux/RPM/ROOT/usr/local/bemacs8/lib/bemacs
;;
Linux-Debian)
export BEMACS_EMACS_LIBRARY=${BUILDER_TOP_DIR}/Kits/Linux/DPKG/tree/usr/local/bemacs8/lib/bemacs
;;
*)
exit 1
;;
esac
if [ "$1" = "--gdb" ]
then
shift 1
echo
echo >.gdbinit
if [ -e init.gdb ]
then
cat init.gdb >>.gdbinit
fi
echo "run -u be_main.py " "$@" >>.gdbinit
echo
gdb python${PYTHON_VERSION}
elif [ "$1" = "--valgrind" ]
then
shift 1
rm -f .gdbinit
if [ "$1" = "--gdb" ]
then
shift 1
valgrind \
--db-attach=yes \
${TMPDIR:-/tmp}/python -u be_main.py "$@"
else
valgrind \
--log-file=bemacs-memcheck.log \
${TMPDIR:-/tmp}p/python -u be_main.py "$@"
fi
else
python${PYTHON_VERSION} -u be_main.py "$@"
fi
| true
|
fd4e38adca0f8386c6ae48f7f0efe7f91a40ce52
|
Shell
|
dazza-codes/aws-ops
|
/lib/bash/debian/git.sh
|
UTF-8
| 161
| 2.625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
if which git > /dev/null; then
echo "Found git installation"
else
echo "git installation"
apt-get install -y -qq git git-doc git-man gitk
fi
| true
|
a0dfc078fffecea58b8c8f6b4e1dc550dbd5662b
|
Shell
|
megaannum/bash_info
|
/xterm
|
UTF-8
| 1,477
| 3.890625
| 4
|
[] |
no_license
|
#!/bin/bash
#
# xterm
#
# Place this in your path before /usr/bin/xterm
#
# This script is only needed when a new Xterm is started. When I
# reboot Linux (Fedora) and restart the Window Manager (KDE),
# the Window Manager "knows" what applications were running on
# its previous shutdown (and those application's command-line
# options) so it restarts the /usr/bin/xterm, not this script
# named xterm.
#
# Tailor xterm options as desired.
# I require unicode characters for my Vim forms so I use the iso10646
# fixed width font.
doXterm() {
/usr/bin/xterm -g 80x30 -bg lightgrey -sl 1000 +si -fn '-misc-fixed-medium-r-normal-*-20-*-*-*-*-*-iso10646-*' $@ &
}
# Location of bash_info files and directories
export BASH_INFO="$HOME/.bash_info"
# Make if it does not exist
if [[ ! -d $BASH_INFO ]]; then
/bin/mkdir $BASH_INFO
fi
# Find new bash id
declare -i BASH_ID=0
while [[ -e $BASH_INFO/xterm_$BASH_ID ]]; do
BASH_ID=$(( $BASH_ID + 1))
done
# Make unique directory based upon bash id
/bin/mkdir $BASH_INFO/xterm_$BASH_ID
# Store bash id, bash_info directory and command to source INIT_FILE
# in the xterm's local init file
LOCAL_INIT_FILE=" $BASH_INFO/xterm_$BASH_ID/LOCAL_INIT_FILE"
echo "# LOCAL_INIT_FILE" > $LOCAL_INIT_FILE
echo "export BASH_ID=$BASH_ID" >> $LOCAL_INIT_FILE
echo "export BASH_INFO=$HOME/.bash_info" >> $LOCAL_INIT_FILE
echo ". $BASH_INFO/INIT_FILE" >> $LOCAL_INIT_FILE
doXterm -e /bin/bash --init-file $LOCAL_INIT_FILE
| true
|
dcdb2253b0946a29c286678b5620e43640ebcdfd
|
Shell
|
pperezrubio/deep_q_rl
|
/deep_q_rl/converttomovie.sh
|
UTF-8
| 166
| 2.9375
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
if [ $# -lt 1 ]
then
output=output.mp4
else
output=$1
fi
if [ $# -lt 2 ]
then
fps=15
else
fps=$2
fi
avconv -r $fps -i frame%06d.png $output
| true
|
61f3c91b3b136209df70139a0395386c0856bf2f
|
Shell
|
1092-bioinformatics/finalproject-110753110
|
/code/scripts/step5_SIS-analysis
|
UTF-8
| 987
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/bash
#################################################################
# INPUT:
# tree
# species_tree [optional]
# OUTPUT:
# tree_SIS.ph
##################################################################
TREEBEST_CMD="../../bin/treebest sdi"
PLOT_CMD="../../scripts/plot_SISvsNumLeaf.R"
INPUT_TREE=$1
[[ $# == 2 ]] && SPECIES_TREE=$2
SIS_COUNT=${INPUT_TREE/.ph/}_SIS.txt
SIS_CSV=${INPUT_TREE/.ph/}_SIS.csv
SIS_JPEG=${INPUT_TREE/.ph/}_SIS.jpg
sed "s/\[&&NHX.*\]//g" $INPUT_TREE > tmp.ph
if [[ $# == 2 ]]
then
$TREEBEST_CMD -s $SPECIES_TREE tmp.ph > $INPUT_TREE
else
$TREEBEST_CMD tmp.ph > $INPUT_TREE
fi
echo " count range"> $SIS_COUNT
grep SIS $INPUT_TREE |awk -F"SIS=" '{print $2}'|awk -F: '{printf "%d\n",$1/10}'|sort -n|uniq -c >> $SIS_COUNT
echo "num_leaf,SIS">$SIS_CSV
perl -lne 'print "$2,$1" if /SIS=(\d+).*XN=(\d+)/' $INPUT_TREE >> $SIS_CSV
R --slave --args $SIS_CSV $SIS_JPEG < $PLOT_CMD &> /dev/null
while [ ! -e $SIS_JPEG ]
do
sleep 5
done
rm tmp.ph
| true
|
73c32c42b5d56de9a6fa2552fe5cc8df534d380f
|
Shell
|
ZengFLab/PyroTools
|
/xgsutils/bamutils/xBamCoverageEachChr
|
UTF-8
| 1,615
| 4
| 4
|
[] |
no_license
|
#!/bin/bash
BIOINFO_TOOL_XGSUTILS=$(realpath $0 | xargs dirname)"/../"
xgsutils_averageDepth="$BIOINFO_TOOL_XGSUTILS/bamutils/xBamAverageDepth"
# help message
help(){
>&2 echo "SYNOPSIS"
>&2 echo " xBamCoverageEachChr [OPTIONS] <GENOME_FASTA> <BAM_FILE>"
>&2 echo ""
>&2 echo "DESCRIPTION"
>&2 echo " Compute the genomic coverage for each chromosome"
>&2 echo ""
>&2 echo "OPTIONS"
>&2 echo " -q,--mq skip reads with mapping quality less than the value [INT]"
>&2 echo " -f,--ff skip reads with the specified flags [INT]"
>&2 echo " -h,--help print help message"
exit 0
}
# print help message if no arguments provided
if [ $# = 0 ];then help;fi
PARSED_OPTIONS=$(getopt -n "$0" -o r:q:f:h --long roi:,mq:,ff:,help -- "$@")
# bad argument
if [ $? -ne 0 ];then help;fi
# A little magic, necessary when using getopt
eval set -- "$PARSED_OPTIONS"
mq=""
ff=""
# parse arguments
while true;do
case "$1" in
-h | --help )
help
shift;;
-q | --mq )
mq="-q $2"
shift 2;;
-f | --ff )
ff="--ff $2 $ff"
shift 2;;
-- )
shift
break;;
esac
done
genomeFile=$1 && shift
bamFile=$1
# number of chromosomes
numChr=$(wc -l "$genomeFile.fai" | awk '{print $1}')
# loop over chromosomes
for i in $(seq 1 $numChr)
do
chrName=$(awk -v n=$i 'NR==n{print $1}' "$genomeFile.fai")
chrLen=$(awk -v n=$i 'NR==n{print $2}' "$genomeFile.fai")
chrCov=$(samtools mpileup -r "${chrName}" $ff $mq $bamFile 2>/dev/null | wc -l)
chrCovR=$(echo "scale=6;${chrCov}/${chrLen}" | bc)
echo -e "$chrName\t$chrCovR"
done
| true
|
563a8f8f73030264f0e5b1fe44781adcce6b0a2e
|
Shell
|
Symmetra/nominatim-docker
|
/scripts/update-multiple-countries.sh
|
UTF-8
| 4,287
| 4.125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Usage:
# ./update-multiple-countries.sh [<log file>]
#
# See also: https://github.com/openstreetmap/Nominatim/issues/60
# Nominatim-Osmium does not support update multiple countries by default
DEFAULT_OUT="/dev/stdout"
OUT="$DEFAULT_OUT"
CUSTOM_OUT="$1"
USERNAME="nominatim"
NOMINATIM_HOME="/srv/nominatim"
UPDATES_DIR="${NOMINATIM_HOME}/updates"
COUNTRY_LIST="${NOMINATIM_HOME}/data/countries.txt"
BUILD_DIR="${NOMINATIM_HOME}/build"
UPDATE_PHP="./utils/update.php"
LOCKED=0
LOCK_DIR="/var/run/nominatim-update.lock"
UPDATE_URL="http://download.geofabrik.de"
function init_log {
if [ ! -z "$CUSTOM_OUT" ] ; then
OUT="$CUSTOM_OUT"
if [ ! -f "$OUT" ] ; then
touch "$OUT"
chown "$USERNAME" "$OUT"
fi
fi
}
function release_lock {
if ((${LOCKED})); then
rmdir "$LOCK_DIR"
LOCKED=0
fi
}
function onexit {
if [ ! -z "$1" ] ; then
echo "[ERROR] $1" >>"$OUT" 2>&1
if [ "$OUT" != "$DEFAULT_OUT" ] ; then
echo "[ERROR] $1"
fi
else
echo "[$$] Exiting successfully" >>"$OUT" 2>&1
fi
if [ -f "$OUT" ] ; then
OUT_SIZE=`du -b "$OUT" | cut -f1`
if [ $OUT_SIZE -ge $((1024 * 1024)) ] ; then
rm -f "$OUT"
fi
fi
release_lock
if [ ! -z "$1" ] ; then
exit 1
fi
exit 0
}
function acquire_lock {
if mkdir "$LOCK_DIR"; then
LOCKED=1
trap "release_lock" EXIT
else
onexit "[$$] Update script already running"
fi
}
function startup_checks {
test "$(whoami)" == "root" || onexit "[$$] Script shall be run as root. Current user: $(whoami)"
id -u "$USERNAME" >/dev/null 2>&1 || onexit "[$$] User '$USERNAME' does not exist"
test -d "$NOMINATIM_HOME" || onexit "[$$] NOMINATIM_HOME=${NOMINATIM_HOME} directory does not exist"
test -d "$BUILD_DIR" || onexit "[$$] BUILD_DIR=${BUILD_DIR} directory does not exist"
test -f "$COUNTRY_LIST" || onexit "[$$] COUNTRY_LIST=${COUNTRY_LIST} file does not exist"
}
function run_cmd {
local cmd="$*"
echo "[$$] Running: $cmd" >>"$OUT" 2>&1
$cmd >>"$OUT" 2>&1
local retv="$?"
test $retv -eq 0 || onexit "[$$] Command $cmd failed with status $retv"
}
init_log
acquire_lock
startup_checks
echo "[$$] Starting Nominatim data update at $(date)" >>"$OUT" 2>&1
### Foreach country check if configuration exists (if not create one) and then import the diff
while read -r COUNTRY; do
COUNTRY_UPDATE_DIR="${UPDATES_DIR}/$COUNTRY"
COUNTRY_CONFIG_FILE="${COUNTRY_UPDATE_DIR}/configuration.txt"
if [ ! -f "$COUNTRY_CONFIG_FILE" ] ; then
run_cmd sudo -u $USERNAME mkdir -p "$COUNTRY_UPDATE_DIR" >>"$OUT" 2>&1
run_cmd sudo -u $USERNAME osmosis --read-replication-interval-init workingDirectory=${COUNTRY_UPDATE_DIR}/.
echo "[$$] Osmosis initial $COUNTRY_CONFIG_FILE" >>"$OUT" 2>&1
cat "$COUNTRY_CONFIG_FILE" >>"$OUT" 2>&1
sudo -u $USERNAME echo "baseUrl=${UPDATE_URL}/${COUNTRY}-updates" > "$COUNTRY_CONFIG_FILE"
sudo -u $USERNAME echo "maxInterval = 0" >> "$COUNTRY_CONFIG_FILE"
echo "[$$] Final $COUNTRY_CONFIG_FILE" >>"$OUT" 2>&1
cat "$COUNTRY_CONFIG_FILE" >>"$OUT" 2>&1
cd "$COUNTRY_UPDATE_DIR"
run_cmd sudo -u $USERNAME wget -q "${UPDATE_URL}/${COUNTRY}-updates/state.txt" >>"$OUT" 2>&1
fi
echo "[$$] $COUNTRY initial state.txt content:" >>"$OUT" 2>&1
cat "${COUNTRY_UPDATE_DIR}/state.txt" >>"$OUT" 2>&1
COUNTRY_OSC_FILENAME=${COUNTRY//[\/]/_}
run_cmd sudo -u $USERNAME osmosis --read-replication-interval workingDirectory=${COUNTRY_UPDATE_DIR}/. --wxc ${COUNTRY_OSC_FILENAME}.osc.gz
echo "[$$] $COUNTRY modified state.txt content:" >>"$OUT" 2>&1
cat "${COUNTRY_UPDATE_DIR}/state.txt" >>"$OUT" 2>&1
done < "$COUNTRY_LIST"
INDEX=0 # false
cd "$BUILD_DIR"
echo "[$$] Entered build directory: $BUILD_DIR" >>"$OUT" 2>&1
echo "[$$] $UPDATES_DIR content:" >>"$OUT" 2>&1
tree "$UPDATES_DIR" >>"$OUT" 2>&1
### Foreach diff files do the import
for OSC in $(find "$UPDATES_DIR" -type f -name *.osc.gz); do
echo "[$$] Loading diff file $OSC" >>"$OUT" 2>&1
run_cmd sudo -u $USERNAME "$UPDATE_PHP" --import-diff "$OSC" --no-npi
INDEX=1
done
### Re-index if needed
if ((${INDEX})); then
run_cmd sudo -u $USERNAME "$UPDATE_PHP" --index
fi
### Remove all diff files
find "$UPDATES_DIR" -type f -name *.osc.gz -exec rm -v {} \;
echo "[$$] Finished Nominatim data update at $(date)" >>"$OUT" 2>&1
onexit
| true
|
f171d0f6e048d3942b933734784a6547ff0ebe5b
|
Shell
|
guardian/dotcom-rendering
|
/scripts/ci-ar.sh
|
UTF-8
| 1,286
| 3.71875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# exit when any command fails
set -e
git fetch origin main
gitBranches="$(git branch -r)"
echo "git branches: $gitBranches"
currentBranch="$(git rev-parse --abbrev-ref HEAD)"
echo "current branch: $currentBranch"
# files that were changed between current branch and main
files="$(git diff --name-only $currentBranch origin/main)"
echo "git diff files: $files"
# files that are not within dotcom-rendering sub directory
filteredFiles="$(echo "$files" | { grep -v 'dotcom-rendering' || :; })"
echo "files that are not in dotcom-rendering: $filteredFiles"
# run the ci steps if either of the followings is true
# - filteredFiles is empty (all changes were in dotcom-rendering)
# - we are in the main branch
if [[ $currentBranch != "main" ]] && [ -z "$filteredFiles" ]
then
printf "Skipping AR ci build because AR file changes is empty and branch is $currentBranch\n\n"
else
printf "Running AR ci build because AR file changes contains $filteredFiles and branch is $currentBranch\n\n"
source ~/.nvm/nvm.sh
nvm install
nvm use
npm i -g yarn@1.x
yarn --silent --frozen-lockfile
cd apps-rendering
yarn test
yarn build:client:prod
yarn build:server:prod
yarn copy-manifest
yarn copy-fonts
yarn synth
yarn upload
fi
| true
|
ffde88084e69e0d2f1afc97f08cf2cc8ad9376b5
|
Shell
|
fxthomas/conky
|
/start
|
UTF-8
| 97
| 2.78125
| 3
|
[] |
no_license
|
#!/bin/sh
CONKY=/usr/bin/conky
for f in $HOME/.conky/*.conf;
do
$CONKY -c $f
sleep 2s
done
| true
|
1030298aeedb89db2d09b67ff481548ab77bdc26
|
Shell
|
thomaslee/spin-debian
|
/debian/tests/cli-tests
|
UTF-8
| 249
| 2.75
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
exec 2>&1
test_spin_usage() {
local output="$(spin --bogus 2>&1 || true)"
if ! echo "$output" | grep -q 'use: spin'; then
fail "expected to find 'use: spin' in usage output"
fi
}
# TODO more tests here
. shunit2
| true
|
7e76a99b48f88062b7adcfe7e50ccf951cc79163
|
Shell
|
Odin-CI/dotfiles
|
/config
|
UTF-8
| 339
| 2.71875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# Author: Aaron Kuehler
# Purpose: Configures the shell
# Add pretty colors
export CLICOLOR=1
# Setup Ruby Version Manager
RVM_CONFIG=$DOFILES_THIRD_PARTY_HOME/rvm
if [ -f $RVM_CONFIG ];
then source $RVM_CONFIG
fi
# Setup Git
GIT_CONFIG=$DOFILES_THIRD_PARTY_HOME/git
if [ -f $GIT_CONFIG ];
then source $GIT_CONFIG
fi
| true
|
0b248956fcb14debd02cc1f68645055d1fab1f93
|
Shell
|
dpolioudakis/scRNAseq_C196_001_002
|
/Merge_FASTQs_Variant_Calling.sh
|
UTF-8
| 1,239
| 3.40625
| 3
|
[] |
no_license
|
#!/bin/bash
# Damon Polioudakis
# 2016-04-21
# Select fastqs for human only capture sites and merge lane 1 and 2 to prepare
# for variant calling
# Suggested script calls:
# qsub -cwd -o logs/Merge_FASTQs_Variant_Calling_QSUB_$(date +%Y%m%d).log -e logs/Merge_FASTQs_Variant_Calling_QSUB_$(date +%Y%m%d).error -S /bin/bash -V -N MergeFQ -q geschwind.q -l h_data=4G,h_rt=12:00:00 Merge_FASTQs_Variant_Calling.sh
################################################################################
echo ""
echo "Starting Merge_FASTQs_Variant_Calling.sh"
echo ""
################################################################################
# Define Input Variables and Functions
inSampleID=../analysis/tables/Human_Only_Capture_Sites_10^5Hs_10^5Mm.txt
outDir=../data/fastq/Merged_For_Variant_Calling
mkdir -p ${outDir}
################################################################################
# Merge
ls ../data/fastq/SxaQSEQsXap089L2/*fastq.gz | while read pathFastq; do
cat "${pathFastq}" ../data/fastq/SxaQSEQsXbp060L2/$(basename ${pathFastq}) > ${outDir}/$(basename ${pathFastq})
done
################################################################################
echo ""
echo "End of Merge_FASTQs_Variant_Calling.sh... "$(date)
| true
|
bc766710ccdcd7e0a4b83b3fe3bcfe19b5575c1b
|
Shell
|
nazoking/git-multi-ssh.sh
|
/git-multi-ssh.sh
|
UTF-8
| 1,409
| 3.921875
| 4
|
[] |
no_license
|
#!/bin/bash
# https://github.com/nazoking/git-multi-ssh.sh/blob/master/git-multi-ssh.sh
# ssh deploy key manager for git command ( ex: for private submodules )
#
# `export GIT_SSH=git-multi-ssh.sh`
#
# and set environment valiable as `DEPLOY_KEY_[name]=[private-key]`
#
# - when repository ssh url is `git@github.com:nazoking/git-multi-ssh.sh.git`, then `DEPLOY_KEY_git_github_com_nazoking_git_multi_ssh_sh_git` ( replace all `[^0-9a-z]` to `_`(under bar))
# - `private-key` is private key for deploy
#
set -e
function normalize
{
echo "$1"|sed -e "s/'//g" -e 's/[^0-9a-z]/_/g'
}
function mk_ssh_key
{
local tmp="$(mktemp)"
echo "$1" |perl -pe 's/\\n/\n/g' > "$tmp"
chmod 0600 "$tmp"
echo "$tmp"
}
function targets
{
while [ $# -ne 0 ]
do
case "$1" in
-[BbcDEeFIiJLlmOo])
shift 2
;;
-[46AaCfGgKkMNnqsTtVvXxYy]|-[BbcDEeFIiJLlmOo]*)
shift
;;
*)
echo "$1"
shift
esac
done
}
org=("$@")
t=($(targets "$@"))
case "${t[1]}" in
"git-upload-pack"|"git-receive-pack")
h="DEPLOY_KEY_$(normalize "${t[0]}")_$(normalize "${t[2]}")"
if [ -n "${!h}" ];then
echo "[git-multi-ssh]use $h" >&2
tmp=$(mk_ssh_key "${!h}")
ssh -i "$tmp" "${org[@]}"
ret=$?
rm $tmp
exit $ret
else
echo "[git-multi-ssh]not found $h" >&2
ssh "${org[@]}"
fi
;;
*)
echo "[git-multi-ssh]unknwon ssh command ${@}" >&2
ssh "$@"
esac
| true
|
9df3c9e4fbcffd3c6961a6cfb237a37b6eb56e52
|
Shell
|
hachibits/dotfiles
|
/install.sh
|
UTF-8
| 2,651
| 2.984375
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
set -eu
platform=$(uname)
if [[ $platform == 'Darwin' ]]; then
# Homebrew
if ! command -v brew &> /dev/null; then
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
fi
echo "Updating homebrew"
brew install --cask xquartz iterm2 keepingyouawake spectacle \
mat visualvm google-backup-and-sync rstudio r mactex osxfuse \
karabiner-elements maccy adoptopenjdk8
brew install \
zsh vim neovim tmux git tectonic wget pure fzf ranger tree \
cmake coreutils cscope exiftool doxygen liboauth \
python@3.9 pyenv anaconda go maven yarn bash-completion \
reattach-to-user-namespace ripgrep vifm
xcode-select --install
# https://github.com/tmux/tmux/issues/1257#issuecomment-581378716
/usr/local/opt/ncurses/bin/infocmp tmux-256color > ~/tmux-256color.info
tic -xe tmux-256color tmux-256color.info
infocmp tmux-256color | head
# gcc/stdc++.h
cd /Library/Developer/CommandLineTools/usr/include
[ ! -d ./bits ] && sudo mkdir bits
curl https://github.com/gcc-mirror/gcc/blob/master/libstdc%2B%2B-v3/include/precompiled/stdc%2B%2B.h > bits/stdc++.h
cd ~
source ~/miniconda3/bin/activate
conda init zsh
conda update -n base -c defaults conda
conda install conda-build
else
rm -f ~/.tmux.conf
grep -v reattach-to-user-namespace tmux.conf > ~/.tmux.conf
fi
if [ ! -e ~/.git-prompt.sh ]; then
curl https://raw.githubusercontent.com/git/git/master/contrib/completion/git-prompt.sh -o ~/.git-prompt.sh
fi
if [[ ! -d ~/.fzf ]]; then
echo "Installing fzf"
git clone --depth 1 https://github.com/junegunn/fzf.git ~/.fzf
~/.fzf/install --all
fi
if [[ ! -f ~/.vim/autoload/plug.vim ]]; then
echo "Installing vim-plug"
curl -fLo ~/.vim/autoload/plug.vim --create-dirs \
https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim
fi
if [[ ! -f ~/.local/share/nvim/site/autoload/plug.vim ]]; then
echo "Installing vim-plug (nvim)"
sh -c 'curl -fLo ~/.local/share/nvim/site/autoload/plug.vim --create-dirs \
https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim'
fi
if [[ ! -d ~/.tmux/plugins/tpm ]]; then
echo "Installing Tmux Plugin Manager"
git clone https://github.com/tmux-plugins/tpm ~/.tmux/plugins/tpm
fi
if [[ -f ~/.zshrc ]]; then
mkdir -p ~/.zsh/plugins/bd
curl https://raw.githubusercontent.com/Tarrasch/zsh-bd/master/bd.zsh > ~/.zsh/plugins/bd/bd.zsh
print -- "\n# zsh-bd\n. \~/.zsh/plugins/bd/bd.zsh" >> ~/.zshrc
fi
tmux source-file ~/.tmux.conf
./makesymlinks.sh
vim -es -u ~/.vimrc +PlugInstall +qa
nvim -es -u ~/.config/nvim/init.vim +PlugInstall +qa
| true
|
036719c8bd31c926cafe3b7424cd56459b0ebfce
|
Shell
|
BioContainers/ci
|
/test-built-container
|
UTF-8
| 4,996
| 4.15625
| 4
|
[] |
no_license
|
#!/bin/bash
##################
# Constants {{{1 #
##################
PROG_NAME=$(basename $0)
VERSION=1.0
YES=yes
#########################
# Global variables {{{1 #
#########################
DEBUG=0
CONTAINER_IMAGE=
#CMDS_FILE=test_cmds.txt
CMDS_FILE=
TESTDATA_DIR=
REPO="BioContainers/containers"
GITHUB_STATUS_TOKEN=$GITHUB_AUTH_TOKEN
COMMIT=$GIT_COMMIT
PR_ID=$PULL_REQUEST_ID
SOFTWARE=$CONTAINER
HDR1="Accept: application/vnd.github.v3+json"
HDR2="Authorization: token $GITHUB_STATUS_TOKEN"
###################
# Print help {{{1 #
###################
function print_help {
echo "Usage: $PROG_NAME [options] container_image"
echo
echo " -t, --test-cmds Path to test-cmds.txt file (and its associated test files)."
echo " -g, --debug Debug mode."
echo " -h, --help Print this help message."
}
############
# Msg {{{1 #
############
function msg {
local tag=$1
shift
local code_level=1
is_numeric=$(echo $1 | grep '^[0-9]*$')
if [ -n "$is_numeric" ] ; then
code_level=$1
shift
fi
local msg=$1
# Check tag
if [ -z "$tag" -o \( "$tag" != INFO -a "$tag" != DEBUG -a "$tag" != ERROR \) ] ; then
echo "ERROR: Unvalid message tag \"$tag\"." >&2
exit 999
fi
# Print message
[ "$tag" = DEBUG -a "$DEBUG" -lt "$code_level" ] || echo "$tag: $msg" >&2
# Exit
[ $tag = ERROR ] && exit $code_level
}
#################
# Requires {{{1 #
#################
function requires {
local prg=$1
[ -n "$(which $prg)" ] || msg ERROR "Cannot find $prg. Please install required package."
}
##################
# Read args {{{1 #
##################
function read_args {
local args="$*" # save arguments for debugging purpose
# Read options
while true ; do
shift_count=1
case $1 in
-g|--debug) DEBUG=$((DEBUG + 1)) ;;
-h|--help) print_help ; exit 0 ;;
-t|--test-cmds) shift; CMDS_FILE=$1 ;;
-|--|--*) msg ERROR "Illegal option $1." ;;
-?) msg ERROR "Unknown option $1." ;;
-[^-]*) split_opt=$(echo $1 | sed 's/^-//' | sed 's/\([a-zA-Z]\)/ -\1/g') ; set -- $1$split_opt "${@:2}" ;;
*) break
esac
shift $shift_count
done
shift $((OPTIND - 1))
# Read remaining arguments
[ $# -eq 1 ] || msg ERROR "You must set one, and only one, container image."
CONTAINER_IMAGE=$1
# Debug
msg DEBUG 1 "Arguments are : $args"
}
########################
# Test container {{{ 1 #
########################
function test_container {
local entrypoint=$1
local args=$2
local mountpath=$3
local entrypoint_arg=
[ -z "$entrypoint" ] || entrypoint_arg="--entrypoint=$1"
echo "Running the following command:"
echo "docker run --rm -v $mountpath:/biocontainers $entrypoint_arg $CONTAINER_IMAGE $args"
docker run --rm -v $mountpath:/biocontainers $entrypoint_arg $CONTAINER_IMAGE $args
#echo "Command $?"
return $?
}
################
# Send comment #
################
function send_comment {
local comment=$1
local githuburl="https://api.github.com/repos/$REPO/commits/$COMMIT/comments"
local header1=$HDR1
if [ -n "$PR_ID" ] ; then
header1="Accept:application/vnd.github.v3.raw+json"
githuburl="https://api.github.com/repos/$REPO/issues/$PR_ID/comments"
fi
#echo "Cmd sent:"
#echo "curl -H '$header1' -H '$HDR2' -d '{\"body\": \"$comment\"}' $githuburl"
curl -H "$header1" \
-H "$HDR2" \
-d "{\"body\": \"$comment\"}" \
"$githuburl"
}
###############
# Send status #
###############
function send_status {
local software=$1
local status=$2
local msg=$3
local json="{\"description\": \"$msg\",\"state\": \"$status\",\"context\": \"biocontainers/status/test/$SOFTWARE\"}"
local githuburl="https://api.github.com/repos/$REPO/statuses/$COMMIT"
case "$status" in
"s" | "success")
status="success"
;;
"f" | "failure")
status="failure"
;;
"n" | "none")
status="pending";;
*)
msg ERROR "Unknown test status: $status"
return 1;;
esac
#echo "Curl command:"
# echo "curl -H '$HDR1' -H '$HDR2' -d '$json' $githuburl"
curl -H "$HDR1" \
-H "$HDR2" \
-d "$json" \
"$githuburl"
}
#############
# MAIN {{{1 #
#############
# Requirements
requires docker
# Read arguments
read_args "$@"
# Test
if [ -f "$CMDS_FILE" ] ; then
testdatapath=$(dirname $(realpath "$CMDS_FILE"))
#echo "Dir to mount: $testdatapath"
while read line ; do
has_entrypoint=$(echo $line | grep '^[^-]')
if [ -n "$has_entrypoint" ] ; then
entrypoint=$(echo $line | sed 's/^\([^ ]*\).*$/\1/')
args=$(echo $line | sed 's/^[^ ]*\(.*\)$/\1/')
else
entrypoint=
args=$line
fi
if ! test_container "$entrypoint" "$args" $testdatapath; then
send_status $CONTAINER_IMAGE "failure" "Testing failed on command: $line"
exit 1
fi
done <"$CMDS_FILE"
send_status $CONTAINER_IMAGE "success" "All tests successful"
else
#echo "No $CMDS_FILE (test file) present, skipping tests"
send_comment "No $CMDS_FILE (test file) present, skipping tests"
fi
| true
|
cfc323c7aa7d566af7c41741d602d03d45bcecd4
|
Shell
|
toke/dotfiles
|
/waybar/.config/waybar/modules/loopy
|
UTF-8
| 89
| 2.625
| 3
|
[] |
no_license
|
#!/bin/bash
set -euo pipefail
while true ; do
echo "abc $SECONDS"
sleep 5
done
| true
|
ab2db05c538747038e95a4366ddfc643dab7bd02
|
Shell
|
BigWednesdayIO/kubernetes-deployment-scripts
|
/kubernetes_deploy.sh
|
UTF-8
| 3,630
| 3.578125
| 4
|
[] |
no_license
|
#! /bin/bash
set -e
usage="Usage: './kubernetes_deploy.sh image-name selector namespace context rc' e.g. './kubernetes_deploy.sh myImageName app=myApp myNamespace . ./kubernetes/rc.json ./kubernetes/service.json'"
if [[ $# -lt 6 ]]; then
echo "Incorrect number of arguments, minimum of 6 required";
echo $usage;
exit 1;
fi
IMAGE=$1;
SELECTOR=$2;
NAMESPACE=$3
CONTEXT=$4
RC_FILE=$5
SVC_FILE=$6
ADDITIONAL_TAG=$7
export NAMESPACE=$NAMESPACE
export VERSION=${CIRCLE_SHA1:0:7}-ci${CIRCLE_BUILD_NUM}
export QUALIFIED_IMAGE_NAME=${GCLOUD_REGISTRY_PREFIX}gcr.io/${CLOUDSDK_CORE_PROJECT}/${IMAGE}:${VERSION}
export CLOUDSDK_CORE_DISABLE_PROMPTS=1
export CLOUDSDK_PYTHON_SITEPACKAGES=1
export DEPLOYMENT_ID=$CIRCLE_BUILD_NUM
echo "Installing json command line tool"
npm install -g json
echo "Building image ${QUALIFIED_IMAGE_NAME} with context ${CONTEXT}"
docker build -t ${QUALIFIED_IMAGE_NAME} ${CONTEXT}
source $(dirname "$0")/authenticate.sh
echo "Authenticating against cluster"
~/google-cloud-sdk/bin/gcloud container clusters get-credentials $GCLOUD_CLUSTER
echo "Pushing image to registry"
~/google-cloud-sdk/bin/gcloud docker push ${QUALIFIED_IMAGE_NAME} > /dev/null
if [[ -z $ADDITIONAL_TAG ]]; then
ADDITIONAL_IMAGE=${GCLOUD_REGISTRY_PREFIX}gcr.io/${CLOUDSDK_CORE_PROJECT}/${IMAGE}:${ADDITIONAL_TAG}
echo "Pushing additional ${ADDITIONAL_IMAGE} image to registry"
docker tag ${QUALIFIED_IMAGE_NAME} ${ADDITIONAL_IMAGE}
~/google-cloud-sdk/bin/gcloud docker push ${ADDITIONAL_IMAGE} > /dev/null
fi
echo "Expanding variables in service config file"
cat ${SVC_FILE} | perl -pe 's/\{\{(\w+)\}\}/$ENV{$1}/eg' > svc.txt
echo "Checking for existing svc"
SVC_NAME=$(cat svc.txt | json metadata.name)
SVC_EXISTS=$(~/google-cloud-sdk/bin/kubectl get svc $SVC_NAME --namespace=${NAMESPACE} || true)
if [[ -z $SVC_EXISTS ]]; then
echo "Creating svc $SVC_NAME"
cat svc.txt | ~/google-cloud-sdk/bin/kubectl create --namespace=${NAMESPACE} -f -
fi
if [[ -n $SVC_EXISTS ]]; then
echo "svc $SVC_NAME is already deployed"
fi
echo "Checking for existing rc"
RC_QUERY_RESULT=$(~/google-cloud-sdk/bin/kubectl get rc -l ${SELECTOR} --namespace=${NAMESPACE} -o template --template="{{.items}}")
if [[ $RC_QUERY_RESULT == "[]" ]]; then
echo "Deploying new rc"
export REPLICAS=1
cat ${RC_FILE} | perl -pe 's/\{\{(\w+)\}\}/$ENV{$1}/eg' > rc.txt
echo Checking all required secrets exist
SECRETS=$(cat rc.txt | json spec.template.spec.volumes | json -a secret.secretName)
for s in $(echo $SECRETS | tr " " "\n")
do
SECRET_EXISTS=$(~/google-cloud-sdk/bin/kubectl get secret $s --namespace=${NAMESPACE} || true)
if [[ -z $SECRET_EXISTS ]]; then
echo "Secret $s does not exist in namespace $NAMESPACE"
exit 1
fi
unset SECRET_EXISTS
done
echo "Creating rc"
cat rc.txt | ~/google-cloud-sdk/bin/kubectl create --namespace=${NAMESPACE} -f -
fi
if [[ $RC_QUERY_RESULT != "[]" ]]; then
echo "Performing rc rolling update"
OLD_RC_NAME=$(~/google-cloud-sdk/bin/kubectl get rc -l ${SELECTOR} --namespace=${NAMESPACE} -o template --template="{{(index .items 0).metadata.name}}")
echo "Old replication controller name: ${OLD_RC_NAME}"
export REPLICAS=$(~/google-cloud-sdk/bin/kubectl get rc ${OLD_RC_NAME} --namespace=${NAMESPACE} -o template --template="{{.spec.replicas}}")
echo "Current replicas: ${REPLICAS}"
echo "Expanding variables in rc config file"
cat ${RC_FILE} | perl -pe 's/\{\{(\w+)\}\}/$ENV{$1}/eg' > rc.txt
echo "Updating rc"
cat rc.txt | ~/google-cloud-sdk/bin/kubectl rolling-update ${OLD_RC_NAME} --namespace=${NAMESPACE} -f -
fi
| true
|
a4f4a5b416d029c7e6fadf2d0e36b173c210cba7
|
Shell
|
sjznxd/myluci-11
|
/luci/luci/applications/luci-aria2/root/etc/aria2/post
|
UTF-8
| 203
| 3.421875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
urldecode(){
echo -e "$(sed 's/+/ /g; s/%/\\x/g')"
}
rawfile="$3"
newfile="$(dirname "$rawfile")/$(basename "$rawfile" | urldecode)"
[ "$rawfile" != "$newfile" ] && mv "$rawfile" "$newfile"
| true
|
e87842258becf214ef1f9c690600923701cf8ea8
|
Shell
|
neurabenn/precon_all
|
/bin/N4_pig.sh
|
UTF-8
| 3,860
| 4
| 4
|
[] |
no_license
|
#!/bin/bash
set -e
Usage() {
echo " "
echo "Usage: `basename $0` [options] -i <T1_image> -x <Binary Mask>"
echo ""
echo " Compulsory Arguments "
echo "-i <T1.nii.gz> : Image must include nii or nii.gz file extension "
echo "-x <binary brain mask> : Binary brain mask defining region for Bias field. Typically a mask from brain extraction"
echo " "
echo " Optional Arguments"
echo " -o <output_directory> : Output directory. Default is directory of input image"
echo " -s < shrink factor> : specify a shrink factor. Default is 0. For larger images reccomended 2."
echo " "
echo "Example: `basename $0` -i pig_T1.nii.gz -x pig_binary_mask.nii.gz "
echo " "
exit 1
}
if [ $# -lt 4 ] ; then Usage; exit 0; fi #### check that command is not called empty
NC=$(echo -en '\033[0m') #NC
RED=$(echo -en '\033[00;31m') #Red for error messages
####variable to be filled via options
img=""
mask=""
out=""
shrink=""
#### parse them options
while getopts ":i:x:o:s:" opt ; do
case $opt in
i)
i=1;
img=`echo $OPTARG`
if [ ! -f ${img} ];then echo " "; echo " ${RED}CHECK INPUT FILE PATH ${NC}"; Usage; exit 1;fi ### check input file exists
if [ "${img: -4}" == ".nii" ] || [ "${img: -7}" == ".nii.gz" ] ;then : ; else Usage; exit 1 ;fi
;;
x)
x=1;
mask=`echo $OPTARG`
if [ ! -f ${mask} ];then echo " "; echo " ${RED}CHECK INPUT FILE PATH ${NC}"; Usage; exit 1;fi ### check input file exists
if [ "${mask: -4}" == ".nii" ] || [ "${mask: -7}" == ".nii.gz" ] ;then : ; else Usage; exit 1 ;fi
;;
o)
out=`echo $OPTARG`
out=$(dirname $img)/${out}
if [ -d ${out} ];then : ; else mkdir ${out} ;fi
cp ${img} ${out}/
cp ${mask} ${out}/
;;
s)
shrink=`echo $OPTARG` #### shrink factor for B field correctoin in ants. Recommended 0 for small images. Maximum 4.
if [ ${shrink} -le 4 ] && [ ${shrink} -ge 0 ];then
:
else
echo " ${RED}SHRINK FACTOR MUST BE BETWEEN 0-4${NC}"
Usage
exit 1
fi
;;
\?)
echo "Invalid option: -$OPTARG" >&2
Usage
exit 1
;;
esac
done
if [ ${i} -eq 1 ];then : ; else echo "${RED}-i is required input ${NC}" ; Usage; exit 2;fi
if [ ${x} -eq 1 ];then : ; else echo "${RED}-x is required input ${NC}" ; Usage; exit 2;fi
if [ "${out}" == "" ];then out=$(dirname $img) ;fi ##### default output folder is the
#set output directory
if [ "${out}" == $(dirname $img) ];then
img=${img}
mask=${mask}
else
cp ${img} ${out}
img=${out}/$(basename ${img})
mask=${out}/$(basename ${mask})
fi
if [ "${img: -4}" == ".nii" ];then gzip ${img}; img=${img/.nii/.nii.gz};fi
if [ "${mask: -4}" == ".nii" ];then gzip ${mask}; mask=${mask/.nii/.nii.gz};fi
####mask the T1 image to remove any remaining non brain voxels####
$FSLDIR/bin/fslmaths $img -mas $mask $img
echo ${img}
echo ${mask}
#### truncate image instensity prior to bias field correction
${ANTSPATH}/ImageMath 3 ${img/.nii.gz/}_0N4.nii.gz TruncateImageIntensity $img 0.025 0.995 256 $mask 1
T1=${img/.nii.gz/}_0N4.nii.gz
echo ${T1}
# reregister your brain mask to the brain to be safe in the N4 call
# Perform Bias field correction
if [[ ${shrink} == "" ]];then
echo ${ANTSPATH}/N4BiasFieldCorrection -d 3 -i $T1 -x $mask -c [100x100x100x100,0.0000000001] -b [200] -o $T1 --verbose 0
${ANTSPATH}/N4BiasFieldCorrection -d 3 -i $T1 -c [100x100x100x100,0.0000000001] -b [200] -o $T1 --verbose 0
fslmaths ${T1} -mas ${mask} ${T1}
else
echo ${ANTSPATH}/N4BiasFieldCorrection -d 3 -i $T1 -x $mask -s ${shrink} -c [100x100x100x100,0.0000000001] -b [200] -o $T1 --verbose 0
${ANTSPATH}/N4BiasFieldCorrection -d 3 -i $T1 -s ${shrink} -c [100x100x100x100,0.0000000001] -b [200] -o $T1 --verbose 0
fslmaths ${T1} -mas ${mask} ${T1}
fi
| true
|
82c85a5648e7f28ce0801ecba7a6a7089f7e483a
|
Shell
|
azroger/cyverse_agave_apps
|
/htprocess_jellyfish_test.sh
|
UTF-8
| 8,940
| 2.78125
| 3
|
[] |
no_license
|
#!/bin/bash
#$ -V #Inherit the submission environment
#$ -cwd # Start job in submission directory
#$ -N jellytest2 # Job Name
#$ -A iPlant-Collabs
#$ -j y # Combine stderr and stdout
#$ -o $JOB_NAME.o$JOB_ID # Name of the output file (eg. myMPI.oJobID)
#$ -pe 12way 24 # Requests 12 tasks/node, 24 cores total (16 now)
#$ -q development # Queue name "normal"
#$ -l h_rt=1:00:00 # Run time (hh:mm:ss) - 1.5 hours (8 hrs now)
#$ -M rogerab@email.arizona.edu # Use email notification address
#$ -m be # Email at Begin and End of job
set -x # Echo commands, use "set echo" with csh
#source ~/.profile_user
module swap intel gcc/4.4.5
module load iRODS
module load R
iget /iplant/home/rogerab/applications/jellyfish2/bin/jellyfish
Input_Dir="/iplant/home/rogerab/testfiles/BAreads"
iget -r "${Input_Dir}"
INPUTS=$(basename ${Input_Dir})
kmer1="25"
kmer2="29"
kmer3="35"
# Input_Dir="${Input_files}"
# kmer1="${kmer1}"
# kmer2="${kmer2}"
# kmer3="${kmer3}"
y=''
jellyfish count -m "${kmer1}" -s 1000000000 -t 23 -o 'jelly'"${kmer1}" "${INPUTS}"/*
jellyfish histo 'jelly'"${kmer1}"* -t 23 -o 'jellyhistoA'
touch jellyfish.r
echo 'dat=read.table("jellyhistoA")' > jellyfish.r
name="$kmer1"_1
echo "png('rplot$name.png')" >> jellyfish.r
echo 'barplot(dat[,2], xlim=c(0,75), ylim=c(0,1e7), ylab="No of kmers", xlab="Counts of a k-mer", names.arg=dat[,1], cex.names=0.8)' >> jellyfish.r
echo 'dev.off()' >> jellyfish.r
echo 'q()' >> jellyfish.r
echo 'n' >> jellyfish.r
R --vanilla -q < jellyfish.r
openssl base64 -in rplot$name.png -out rplot$name.b64
echo 'dat=read.table("jellyhistoA")' > jellyfish.r
name="$kmer1"_2
echo "png('rplot$name.png')" >> jellyfish.r
echo 'barplot(dat[,2], xlim=c(0,75), ylim=c(0,5e8), ylab="No of kmers", xlab="Counts of a k-mer", names.arg=dat[,1], cex.names=0.8)' >> jellyfish.r
echo 'dev.off()' >> jellyfish.r
echo 'q()' >> jellyfish.r
echo 'n' >> jellyfish.r
R --vanilla -q < jellyfish.r
openssl base64 -in rplot$name.png -out rplot$name.b64
echo 'dat=read.table("jellyhistoA")' > jellyfish.r
name="$kmer1"_3
echo "png('rplot$name.png')" >> jellyfish.r
echo 'barplot(dat[,2], xlim=c(0,75), ylim=c(0,1e10), ylab="No of kmers", xlab="Counts of a k-mer", names.arg=dat[,1], cex.names=0.8)' >> jellyfish.r
echo 'dev.off()' >> jellyfish.r
echo 'q()' >> jellyfish.r
echo 'n' >> jellyfish.r
R --vanilla -q < jellyfish.r
openssl base64 -in rplot$name.png -out rplot$name.b64
if [[ -n $kmer2 ]];
then
jellyfish count -m "${kmer2}" -s 1000000000 -t 23 -o 'jelly'"${kmer2}" "${INPUTS}"/*
jellyfish histo 'jelly'"${kmer2}"* -t 23 -o 'jellyhistoB'
echo 'dat=read.table("jellyhistoB")' > jellyfish.r
name="$kmer2"_1
echo "png('rplot$name.png')" >> jellyfish.r
echo 'barplot(dat[,2], xlim=c(0,75), ylim=c(0,1e7), ylab="No of kmers", xlab="Counts of a k-mer", names.arg=dat[,1], cex.names=0.8)' >> jellyfish.r
echo 'dev.off()' >> jellyfish.r
echo 'q()' >> jellyfish.r
echo 'n' >> jellyfish.r
R --vanilla -q < jellyfish.r
openssl base64 -in rplot$name.png -out rplot$name.b64
echo 'dat=read.table("jellyhistoB")' > jellyfish.r
name="$kmer2"_2
echo "png('rplot$name.png')" >> jellyfish.r
echo 'barplot(dat[,2], xlim=c(0,75), ylim=c(0,5e8), ylab="No of kmers", xlab="Counts of a k-mer", names.arg=dat[,1], cex.names=0.8)' >> jellyfish.r
echo 'dev.off()' >> jellyfish.r
echo 'q()' >> jellyfish.r
echo 'n' >> jellyfish.r
R --vanilla -q < jellyfish.r
openssl base64 -in rplot$name.png -out rplot$name.b64
echo 'dat=read.table("jellyhistoB")' > jellyfish.r
name="$kmer2"_3
echo "png('rplot$name.png')" >> jellyfish.r
echo 'barplot(dat[,2], xlim=c(0,75), ylim=c(0,1e10), ylab="No of kmers", xlab="Counts of a k-mer", names.arg=dat[,1], cex.names=0.8)' >> jellyfish.r
echo 'dev.off()' >> jellyfish.r
echo 'q()' >> jellyfish.r
echo 'n' >> jellyfish.r
R --vanilla -q < jellyfish.r
openssl base64 -in rplot$name.png -out rplot$name.b64
fi
if [[ -n $kmer3 ]];
then
jellyfish count -m "${kmer3}" -s 1000000000 -t 23 -o 'jelly'"${kmer3}" "${INPUTS}"/*
jellyfish histo 'jelly'"${kmer3}"* -t 23 -o 'jellyhistoC'
echo 'dat=read.table("jellyhistoC")' > jellyfish.r
name="$kmer3"_1
echo "png('rplot$name.png')" >> jellyfish.r
echo 'barplot(dat[,2], xlim=c(0,75), ylim=c(0,1e7), ylab="No of kmers", xlab="Counts of a k-mer", names.arg=dat[,1], cex.names=0.8)' >> jellyfish.r
echo 'dev.off()' >> jellyfish.r
echo 'q()' >> jellyfish.r
echo 'n' >> jellyfish.r
R --vanilla -q < jellyfish.r
openssl base64 -in rplot$name.png -out rplot$name.b64
echo 'dat=read.table("jellyhistoC")' > jellyfish.r
name="$kmer3"_2
echo "png('rplot$name.png')" >> jellyfish.r
echo 'barplot(dat[,2], xlim=c(0,75), ylim=c(0,5e8), ylab="No of kmers", xlab="Counts of a k-mer", names.arg=dat[,1], cex.names=0.8)' >> jellyfish.r
echo 'dev.off()' >> jellyfish.r
echo 'q()' >> jellyfish.r
echo 'n' >> jellyfish.r
R --vanilla -q < jellyfish.r
openssl base64 -in rplot$name.png -out rplot$name.b64
echo 'dat=read.table("jellyhistoC")' > jellyfish.r
name="$kmer3"_3
echo "png('rplot$name.png')" >> jellyfish.r
echo 'barplot(dat[,2], xlim=c(0,75), ylim=c(0,1e10), ylab="No of kmers", xlab="Counts of a k-mer", names.arg=dat[,1], cex.names=0.8)' >> jellyfish.r
echo 'dev.off()' >> jellyfish.r
echo 'q()' >> jellyfish.r
echo 'n' >> jellyfish.r
R --vanilla -q < jellyfish.r
openssl base64 -in rplot$name.png -out rplot$name.b64
fi
echo "Starting creation of summary file for Jellyfish Results"
echo '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Strict//EN">' > jellyfish_summary.html
echo '<html' >> jellyfish_summary.html
echo '<head><title>Summary of Fastqc Reports</title>' >> jellyfish_summary.html
echo '<style type="text/css">' >> jellyfish_summary.html
echo ' body { font-family: sans-serif; color: #0098aa; background-color: #FFF; font-size: 100%; border: 0; margin: 0; padding: 0; }' >> jellyfish_summary.html
echo ' h1 { font-family: sans-serif; color: #0098aa; background-color: #FFF; font-size: 300%; font-weight: bold; border: 0; margin: 0; padding: 0; }' >> jellyfish_summary.html
echo ' h2 { font-family: sans-serif; color: #0098aa; background-color: #FFF; font-size: 200%; font-weight: bold; border: 0; margin: 0; padding: 0; }' >> jellyfish_summary.html
echo ' h3 { font-family: sans-serif; color: #0098aa; background-color: #FFF; font-size: 40%; font-weight: bold; border: 0; margin: 0; padding: 0; }' >> jellyfish_summary.html
echo ' .TFtable tr:nth-child(even){ background: #D2DADC; }' >> jellyfish_summary.html
echo ' </style>' >> jellyfish_summary.html
echo ' </head>' >> jellyfish_summary.html
echo ' <h1> Summary of Jellyfish Results </h1>' >> jellyfish_summary.html
echo ' <br/>' >> jellyfish_summary.html
echo ' <br/>' >> jellyfish_summary.html
echo ' <br/>' >> jellyfish_summary.html
echo ' <body> ' >> jellyfish_summary.html
echo ' <table border="1" cellpadding="10" bgcolor="white" class="TFtable">' >> jellyfish_summary.html
echo ' <tr>' >> jellyfish_summary.html
echo ' <td><b>Kmer Value</b></td>' >> jellyfish_summary.html
echo ' <td><b>Max = 1e7 </b></td>' >> jellyfish_summary.html
echo ' <td><b>Max = 5e8 </b></td>' >> jellyfish_summary.html
echo ' <td><b>Max = 1e10 </b></td>' >> jellyfish_summary.html
echo ' </tr>' >> jellyfish_summary.html
echo ' <tr>' >> jellyfish_summary.html
echo ' <td><b>kmer='"$kmer1"' </b></td>' >> jellyfish_summary.html
for ((RR=1; RR < 4; RR += 1))
do
name="rplot""$kmer1"_"$RR"'.b64'
image=`cat $name`
graphcontent="<h3 id=$name >"'<li><img src="data:image/png;base64,'"$image"'"</li></h3>'
echo " <td><b>$graphcontent</b></td>" >> jellyfish_summary.html
done
echo ' <br/>' >> jellyfish_summary.html
echo ' </tr>' >> jellyfish_summary.html
if [[ -n $kmer2 ]];
then
echo ' <tr>' >> jellyfish_summary.html
echo ' <td><b>kmer='"$kmer2"' </b></td>' >> jellyfish_summary.html
for ((SS=1; SS < 4; SS += 1))
do
name="rplot""$kmer2"_"$SS"'.b64'
image=`cat $name`
graphcontent="<h3 id=$name >"'<li><img src="data:image/png;base64,'"$image"'"</li></h3>'
echo " <td><b>$graphcontent</b></td>" >> jellyfish_summary.html
done
echo ' <br/>' >> jellyfish_summary.html
echo ' </tr>' >> jellyfish_summary.html
fi
if [[ -n $kmer3 ]];
then
echo ' <tr>' >> jellyfish_summary.html
echo ' <td><b>kmer='"$kmer3"' </b></td>' >> jellyfish_summary.html
for ((TT=1; TT < 4; TT += 1))
do
name="rplot""$kmer3"_"$TT"'.b64'
image=`cat $name`
graphcontent="<h3 id=$name >"'<li><img src="data:image/png;base64,'"$image"'"</li></h3>'
echo " <td><b>$graphcontent</b></td>" >> jellyfish_summary.html
done
echo ' <br/>' >> jellyfish_summary.html
echo ' </tr>' >> jellyfish_summary.html
fi
echo '</table>' >> jellyfish_summary.html
echo '</body>' >> jellyfish_summary.html
echo '</html' >> jellyfish_summary.html
echo "The summary file for Jellyfish has been created."
| true
|
8764490863cf2c6ba7bc109b2dd889eddf0d2343
|
Shell
|
veltzer/bashy
|
/core/measure.bash
|
UTF-8
| 330
| 2.96875
| 3
|
[
"MIT"
] |
permissive
|
function measure() {
local -n __user_var=$1
local function_name=$2
local -n __var_name=$3
local -n __var_name2=$4
local _start
local _end
_start=$(date +%s.%N)
"${function_name}" __var_name __var_name2
#__var_name=$?
_end=$(date +%s.%N)
local _diff
_diff=$(echo "${_end} - ${_start}" | bc -l)
__user_var="${_diff}"
}
| true
|
56a01bfbc958959c28fb735515baec76d18c8410
|
Shell
|
Juravenator/submarine-cables-backup
|
/stitch.sh
|
UTF-8
| 487
| 3.46875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
set -o errexit -o nounset -o pipefail
IFS=$'\n\t\v'
cd `dirname "${BASH_SOURCE[0]:-$0}"`
if [[ ! -d "tiles" ]]; then
echo "run scrape.sh first"
exit 1
fi
X_TILES_CNT=`find tiles/0 -type f | wc -l`
Y_TILES_CNT=`find tiles -mindepth 1 -maxdepth 1 -type d | wc -l`
echo "generating a ${X_TILES_CNT}x${Y_TILES_CNT} grid"
gm montage +frame +shadow -tile ${X_TILES_CNT}x${Y_TILES_CNT} -geometry +0+0 `find tiles -type f | sort -V` out.png
echo "written to out.png"
| true
|
cbd9b3f994fbf6a6116106edf784dbe800955f6a
|
Shell
|
Ghostcode75/Bash-Tools
|
/GIT/git-getall.sh
|
UTF-8
| 570
| 3.46875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
USERNAME='username'
PASSWORD='password'
ORGSNAME='organisation'
SSHBYPAS='-work'
ssh-add -l ; read -p "If your key is not present please add, otherwise type [n]: " load
[[ $load != n ]] && ssh-add $(eval echo $load)
mkdir -p ~/Repositories/${ORGSNAME} \
&& cd $_
for REPO in $(curl -sSL -u "${USERNAME}:${PASSWORD}" \
https://api.github.com/orgs/${ORGSNAME}/repos \
|jq -r '.[] .ssh_url') ; do
[[ $SSHBYPAS = *[![:space:]]* ]] && {
REPO=$(echo $REPO |sed "s/github\.com/github\.com${SSHBYPAS}/")
}
git clone $REPO
done
| true
|
c11e6a0e5630456a19c526930f2099984188d1bb
|
Shell
|
flxndn/bin
|
/fecha_foto.sh
|
UTF-8
| 400
| 3.3125
| 3
|
[] |
no_license
|
#!/bin/bash
if [ x$1 = "x-h" ]; then
echo "
* $(basename $0)
* Uso
> $(basename $0) fichero1 [fichero2 ...]
* Descripción
Para cada uno de los ficheros usados como parámetros devuelve una
cadena con la fecha de modificación de la forma aaaa/mm/dd.
Apto para almacenar como lo hace shotwell."
exit 0
fi
ls --full-time $* \
| sed "s/ */ /g" \
| cut -f6 -d' ' \
| sed "s/-/\//g"
| true
|
a765655a4d92796c8661126f985ec6ad96b0f1a4
|
Shell
|
Perpedes/hill_climbing_sailboat_controller
|
/utils/mcu_start_all_processes.sh
|
UTF-8
| 533
| 2.515625
| 3
|
[] |
no_license
|
#!/bin/bash
mkdir -p sailboat-log/system
echo "-> Starting U200 process.."
nohup sh mcu_launcher_u200.sh > sailboat-log/system/u200.log &
echo "-> Starting ACTUATORS process.."
nohup sh mcu_launcher_actuators.sh > sailboat-log/system/actuators.log &
echo "-> Starting CONTROLLER.."
nohup sh mcu_launcher_controller.sh > sailboat-log/system/controller.log &
echo "-> Starting XBEE SERVER.."
nohup sh mcu_launcher_xbee_server.sh > sailboat-log/system/xbee_server.log &
sleep 1
echo -e "\n.. all processes are up and running.\n"
| true
|
7070fbdd8de3ccd68f611a437604a7cb6b7af50d
|
Shell
|
andreeaDumitrache29/AA_homework4
|
/checker.sh
|
UTF-8
| 879
| 3.71875
| 4
|
[] |
no_license
|
#!/bin/bash
input_dir=./input/
output_dir=./output/
ref_dir=./ref/
tests_nr=$(ls $input_dir | wc -w)
points_per_test=$((100/tests_nr))
make build
if [ $? -ne 0 ]; then
"Makefile failed."
else
total_points=0
for ((i=1; i<=$tests_nr; i++)); do
printf "Test $i: "
cp $input_dir/test${i}.in ./test.in
make run &>/dev/null
if [ $? -eq 0 ]; then
mv test.out $output_dir/test${i}.out
equiv=$(python3 test.py $output_dir/test${i}.out $ref_dir/test${i}.ref)
if [ "$equiv" == "1" ]; then
printf "Success"
total_points=$((total_points+points_per_test))
else
printf "Fail"
fi
else
printf "Run target failed"
fi
rm ./test.in
printf "\n"
done
fi
echo "Total: " $total_points
make clean
| true
|
28e6248e992e94caac321ee7e1174cf377200ac9
|
Shell
|
alisw/alidist
|
/autotools.sh
|
UTF-8
| 5,184
| 3.515625
| 4
|
[] |
no_license
|
package: autotools
version: "%(tag_basename)s"
tag: v1.6.3
source: https://github.com/alisw/autotools
prefer_system: "(?!slc5|slc6)"
prefer_system_check: |
export PATH=$PATH:$(brew --prefix gettext || true)/bin:$(brew --prefix texinfo || true)/bin;
which autoconf && which m4 && which automake && which makeinfo && which aclocal && which pkg-config && which autopoint && which libtool;
if [ $? -ne 0 ]; then printf "One or more autotools packages are missing on your system.\n * On a RHEL-compatible system you probably need: autoconf automake texinfo gettext gettext-devel libtool\n * On an Ubuntu-like system you probably need: autoconf automake autopoint texinfo gettext libtool libtool-bin pkg-config\n * On macOS you need: brew install autoconf automake gettext pkg-config"; exit 1; fi
prepend_path:
PKG_CONFIG_PATH: $(pkg-config --debug 2>&1 | grep 'Scanning directory' | sed -e "s/.*'\(.*\)'/\1/" | xargs echo | sed -e 's/ /:/g')
build_requires:
- termcap
- make
---
#!/bin/bash -e
unset CXXFLAGS
unset CFLAGS
export EMACS=no
case $ARCHITECTURE in
slc6*) USE_AUTORECONF=${USE_AUTORECONF:="false"} ;;
*) USE_AUTORECONF=${USE_AUTORECONF:="true"} ;;
esac
echo "Building ALICE autotools. To avoid this install autoconf, automake, autopoint, texinfo, pkg-config."
# Restore original timestamps to avoid reconf (Git does not preserve them)
pushd $SOURCEDIR
./missing-timestamps.sh --apply
popd
rsync -a --delete --exclude '**/.git' $SOURCEDIR/ .
# Use our auto* tools as we build them
export PATH=$INSTALLROOT/bin:$PATH
export LD_LIBRARY_PATH=$INSTALLROOT/lib:$LD_LIBRARY_PATH
# help2man
if pushd help2man*; then
./configure --disable-dependency-tracking --prefix $INSTALLROOT
make ${JOBS+-j $JOBS}
make install
hash -r
popd
fi
# m4 -- requires: nothing special
pushd m4*
# texinfo uses utf-8 by default, but doc/m4.text is still iso-8859-1.
# MacOS sed only understands the command with the linebreaks like this.
sed -i.bak '1i\
@documentencoding ISO-8859-1
' doc/m4.texi
rm -f doc/m4.texi.bak
$USE_AUTORECONF && autoreconf -ivf
./configure --disable-dependency-tracking --prefix $INSTALLROOT
make ${JOBS+-j $JOBS}
make install
hash -r
popd
# autoconf -- requires: m4
# FIXME: is that really true? on slc7 it fails if I do it the other way around
# with the latest version of autoconf / m4
pushd autoconf*
$USE_AUTORECONF && autoreconf -ivf
./configure --prefix $INSTALLROOT
make MAKEINFO=true ${JOBS+-j $JOBS}
make MAKEINFO=true install
hash -r
popd
# libtool -- requires: m4
pushd libtool*
./configure --disable-dependency-tracking --prefix $INSTALLROOT --enable-ltdl-install
make ${JOBS+-j $JOBS}
make install
hash -r
popd
# Do not judge me. I am simply trying to float.
# Apparently slc6 needs a different order compared
# to the rest.
case $ARCHITECTURE in
slc6*|ubuntu14*)
# automake -- requires: m4, autoconf, gettext
pushd automake*
$USE_AUTORECONF && [ -e bootstrap ] && sh ./bootstrap
./configure --prefix $INSTALLROOT
make MAKEINFO=true ${JOBS+-j $JOBS}
make MAKEINFO=true install
hash -r
popd
;;
*) ;;
esac
# gettext -- requires: nothing special
pushd gettext*
$USE_AUTORECONF && autoreconf -ivf
./configure --prefix $INSTALLROOT \
--without-xz \
--without-bzip2 \
--disable-curses \
--disable-openmp \
--enable-relocatable \
--disable-rpath \
--disable-nls \
--disable-native-java \
--disable-acl \
--disable-java \
--disable-dependency-tracking \
--without-emacs \
--disable-silent-rules
make ${JOBS+-j $JOBS}
make install
hash -r
popd
# Do not judge me. I am simply trying to float.
case $ARCHITECTURE in
slc6*|ubuntu14*) ;;
*)
# automake -- requires: m4, autoconf, gettext
pushd automake*
$USE_AUTORECONF && [ -e bootstrap ] && sh ./bootstrap
./configure --prefix $INSTALLROOT
make MAKEINFO=true ${JOBS+-j $JOBS}
make MAKEINFO=true install
hash -r
popd
;;
esac
# pkgconfig -- requires: nothing special
pushd pkg-config*
OLD_LDFLAGS="$LDFLAGS"
[[ ${ARCHITECTURE:0:3} == osx ]] && export LDFLAGS="$LDFLAGS -framework CoreFoundation -framework Carbon"
./configure --disable-debug \
--prefix=$INSTALLROOT \
--disable-host-tool \
--with-internal-glib
export LDFLAGS="$OLD_LDFLAGS"
make ${JOBS+-j $JOBS}
make install
hash -r
popd
# We need to detect OSX becase xargs behaves differently there
XARGS_DO_NOT_FAIL='-r'
[[ ${ARCHITECTURE:0:3} == osx ]] && XARGS_DO_NOT_FAIL=
# Fix perl location, required on /usr/bin/perl
grep -l -R -e '^#!.*perl' $INSTALLROOT | \
xargs ${XARGS_DO_NOT_FAIL} -n1 sed -ideleteme -e 's;^#!.*perl;#!/usr/bin/perl;'
find $INSTALLROOT -name '*deleteme' -delete
grep -l -R -e 'exec [^ ]*/perl' $INSTALLROOT | \
xargs ${XARGS_DO_NOT_FAIL} -n1 sed -ideleteme -e 's;exec [^ ]*/perl;exec /usr/bin/perl;g'
find $INSTALLROOT -name '*deleteme' -delete
# Pretend we have a modulefile to make the linter happy (don't delete)
#%Module
| true
|
ec07ae5a8b9ce7badbbddc55bfd9767b8d6177e9
|
Shell
|
zanewestover/dotfiles
|
/bash/environment.sh
|
UTF-8
| 4,326
| 2.96875
| 3
|
[] |
no_license
|
# Make sure bash can find .inputrc
test -n "$INPUTRC" || export INPUTRC="~/.inputrc"
# Collect some info about the local system
export HOST="$($uname -n)"
export OS="$($uname -s)"
test -n "$UNAME" || export UNAME="$($uname)"
# Tunnel CVS via SSH
export CVS_RSH="ssh"
export IRCNAME="dan"
# Erase duplicate entries from history
export HISTCONTROL="erasedups"
# Increase history size
export HISTSIZE="10000"
# For Python Pygments on Dreamhost
test -d "$HOME/local/pygments-install" && \
export PYTHONPATH="$HOME/local/pygments-install"
# Need local terminfo files for xterm-256color on some platforms.
# Yeah, I'm looking at you, Solaris.
test -d "$HOME/.terminfo" && export TERMINFO="$HOME/.terminfo"
#######################################################################
# Command search path
_rsort() {
local _sort="/bin/sort"
[[ -x /usr/bin/sort ]] && _sort="/usr/bin/sort"
/bin/ls -d $1 2>/dev/null | $_sort -r
}
unset PATH
for _this in \
~/local/bin \
~/bin \
~/.rbenv/bin \
$(_rsort "/usr/local/Cellar/ruby/*/bin") \
~/.gems/bin \
$(_rsort "$HOME/.gem/ruby/*/bin") \
/admin/bin \
/usr/local/bin \
/bin \
/usr/bin \
/usr/ccs/bin \
/sbin \
/usr/sbin \
/usr/local/sbin \
/usr/proc/bin \
/usr/openwin/bin \
/usr/dt/bin \
/admin/tools/system \
/admin/tools/mail/{bin,sbin} \
/admin/config/auth/bin \
/usr/local/pkg/perl/bin \
/usr/local/pkg/ruby/bin \
/usr/local/pkg/mailman/bin \
/usr/lib/mailman/bin \
/usr/local/pkg/mysql/bin \
/usr/local/pkg/pgsql/bin \
/usr/local/pkg/openldap/{bin,sbin} \
/opt/openldap/{bin,sbin} \
/usr/sfw/bin \
/usr/X11R6/bin \
$(_rsort "/usr/local/Cellar/python3/*/bin") \
$(_rsort "/usr/local/Cellar/python/*/bin") \
~/tools
do
test -d $_this && {
test -n "$PATH" && PATH="$PATH:$_this" || PATH="$_this"
}
done
export PATH
#######################################################################
# Now that PATH is set, detect the presence of various tools
HAVE_ACK=$(command -v ack 2>/dev/null)
HAVE_LESS=$(command -v less 2>/dev/null)
HAVE_MVIM=$(command -v mvim 2>/dev/null)
HAVE_SCREEN=$(command -v screen 2>/dev/null)
HAVE_TMUX=$(command -v tmux 2>/dev/null)
HAVE_VIM=$(command -v vim 2>/dev/null)
HAVE_VIMDIFF=$(command -v vimdiff 2>/dev/null)
#######################################################################
# Man page search path
unset MANPATH
for _this in \
/usr/man \
/usr/share/man \
/usr/local/man \
/usr/local/share/man \
/usr/local/pkg/perl/man \
/usr/dt/man \
/usr/openwin/man \
/usr/sfw/man \
~/local/man \
~/local/share/man
do
test -d $_this && {
test -n "$MANPATH" && MANPATH="$MANPATH:$_this" || MANPATH="$_this"
}
done
export MANPATH
#######################################################################
# Editor
test -n "$HAVE_VIM" && EDITOR="$HAVE_VIM" || EDITOR="vi"
VISUAL="$EDITOR"
export EDITOR VISUAL
# Set vim paths
test -n "$HAVE_VIM" && {
alias vi="$HAVE_VIM"
alias vim="$HAVE_VIM"
alias view="$HAVE_VIM -R"
}
# Set vim paths for diff mode
test -n "$HAVE_VIMDIFF" && {
alias vimdiff="$HAVE_VIMDIFF -O"
alias vdiff="$HAVE_VIMDIFF -O"
}
#######################################################################
# Pagers
# Options for less:
# -F Exit automatically if entire file fits on one screen
# -i Ignore case in searches (capitals are honored)
# -r Show raw control characters, instead of e.g. ^X
# -R Let ANSI color codes come through raw, others use ^X
# -s Squeeze consecutive blank lines into one line
# -w Hilight first new line on a new screenful
#
# NOTE: -F seems to break things on some systems I use; avoiding it.
if test -n "$HAVE_LESS" ; then
PAGER="less -iRw"
MANPAGER="less -iRsw"
alias more="less"
test -n "$HAVE_ACK" && {
export ACK_PAGER="less -R"
export ACK_PAGER_COLOR="less -R"
}
else
PAGER=more
MANPAGER="$PAGER"
fi
LESS="-iRw"
export PAGER MANPAGER LESS
## Oracle
test -d /apps/oracle/product/9.2.0 &&
export ORACLE_HOME="/apps/oracle/product/9.2.0"
test -d /apps/oracle/product/10.2.0 &&
export ORACLE_HOME="/apps/oracle/product/10.2.0"
test -f ~/.rbenv/bin/rbenv && eval "$(rbenv init -)"
| true
|
42f3302aefb486472a55ce3a09cfe9fd05a4fd1f
|
Shell
|
mpetri/RoSA
|
/scripts/generate_patterns.sh
|
UTF-8
| 1,671
| 3.140625
| 3
|
[] |
no_license
|
#!/bin/bash
threshold=4096
fac_dens=64
rosa_exec="../bin/rosa_sd2_delta"
tmp_dir="../tmp/"
outdir="../data/output/"
poccmin=50
poccmax=100
if [ $# -lt 1 ]; then
echo "Usage: ${0} file [threshold] [fac_dens]"
echo " file : File containing the example string"
echo " threshold: Block threshold; default=${threshold}"
echo " fac_dens : Sampling parameter for factorization pointer; default=${fac_dens}"
exit 1
fi
input=${1}
#if [ $# -gt 1 ]
#then
# plen=${2}
#fi
#
#if [ $# -gt 2 ]
#then
# poccmin=${3}
#fi
#if [ $# -gt 3 ]
#then
# poccmax=${4}
#fi
${rosa_exec} --input_file=$input --threshold=4096 --fac_dens=1 --generate_patterns --pattern_len=4 --pattern_number=1000 --pattern_min_occ=$poccmin --pattern_max_occ=$poccmax --tmp_file_dir=$tmp_dir --output_dir=$outdir
${rosa_exec} --input_file=$input --threshold=4096 --fac_dens=1 --generate_patterns --pattern_len=10 --pattern_number=1000 --pattern_min_occ=$poccmin --pattern_max_occ=$poccmax --tmp_file_dir=$tmp_dir --output_dir=$outdir
${rosa_exec} --input_file=$input --threshold=4096 --fac_dens=1 --generate_patterns --pattern_len=20 --pattern_number=1000 --pattern_min_occ=$poccmin --pattern_max_occ=$poccmax --tmp_file_dir=$tmp_dir --output_dir=$outdir
${rosa_exec} --input_file=$input --threshold=4096 --fac_dens=1 --generate_patterns --pattern_len=40 --pattern_number=1000 --pattern_min_occ=$poccmin --pattern_max_occ=$poccmax --tmp_file_dir=$tmp_dir --output_dir=$outdir
${rosa_exec} --input_file=$input --threshold=4096 --fac_dens=1 --generate_patterns --pattern_len=100 --pattern_number=1000 --pattern_min_occ=$poccmin --pattern_max_occ=$poccmax --tmp_file_dir=$tmp_dir --output_dir=$outdir
| true
|
e8b1bb0c37c3623fa26103c240e5f30751d82233
|
Shell
|
nuxlli/azk-crystal
|
/src/libexec/setup_circle.sh
|
UTF-8
| 313
| 2.875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
clone_dir=`pwd`/shards
if [[ ! -e $clone_dir/bin/shards ]]; then
if [[ ! -d $clone_dir ]]; then
git clone https://github.com/crystal-lang/shards $clone_dir
fi
cd $clone_dir
crystal build src/shards.cr -o bin/shards --release
fi
sudo install -m 755 $clone_dir/bin/shards /usr/local/bin
| true
|
602bc348eada5cd226763bc401e605d29a1fe23c
|
Shell
|
jemacchi/docker.sdi.samples
|
/compose/nginx-proxy-geocluster/jetty/bin/startup-admin.sh
|
UTF-8
| 4,695
| 3.546875
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/sh
# -----------------------------------------------------------------------------
# Start Script for GEOSERVER
#
# $Id$
# -----------------------------------------------------------------------------
# Guard against misconfigured JAVA_HOME
if [ ! -z "$JAVA_HOME" -a ! -x "$JAVA_HOME"/bin/java ]; then
echo "The JAVA_HOME environment variable is set but JAVA_HOME/bin/java"
echo "is missing or not executable:"
echo " JAVA_HOME=$JAVA_HOME"
echo "Please either set JAVA_HOME so that the Java runtime is JAVA_HOME/bin/java"
echo "or unset JAVA_HOME to use the Java runtime on the PATH."
exit 1
fi
# Find java from JAVA_HOME or PATH
if [ ! -z "$JAVA_HOME" ]; then
_RUNJAVA="$JAVA_HOME"/bin/java
elif [ ! -z "$(which java)" ]; then
_RUNJAVA=java
else
echo "A Java runtime (java) was not found in JAVA_HOME/bin or on the PATH."
echo "Please either set the JAVA_HOME environment variable so that the Java runtime"
echo "is JAVA_HOME/bin/java or add the Java runtime to the PATH."
exit 1
fi
if [ -z $GEOSERVER_HOME ]; then
#If GEOSERVER_HOME not set then guess a few locations before giving
# up and demanding user set it.
if [ -r start.jar ]; then
echo "GEOSERVER_HOME environment variable not found, using current "
echo "directory. If not set then running this script from other "
echo "directories will not work in the future."
export GEOSERVER_HOME=`pwd`
else
if [ -r ../start.jar ]; then
echo "GEOSERVER_HOME environment variable not found, using current "
echo "location. If not set then running this script from other "
echo "directories will not work in the future."
export GEOSERVER_HOME=`pwd`/..
fi
fi
if [ -z "$GEOSERVER_HOME" ]; then
echo "The GEOSERVER_HOME environment variable is not defined"
echo "This environment variable is needed to run this program"
echo "Please set it to the directory where geoserver was installed"
exit 1
fi
fi
if [ ! -r "$GEOSERVER_HOME"/bin/startup.sh ]; then
echo "The GEOSERVER_HOME environment variable is not defined correctly"
echo "This environment variable is needed to run this program"
exit 1
fi
#Find the configuration directory: GEOSERVER_DATA_DIR
if [ -z $GEOSERVER_DATA_DIR ]; then
if [ -r "$GEOSERVER_HOME"/data_dir ]; then
export GEOSERVER_DATA_DIR="$GEOSERVER_HOME"/data_dir
else
echo "No GEOSERVER_DATA_DIR found, using application defaults"
GEOSERVER_DATA_DIR=""
fi
fi
cd "$GEOSERVER_HOME"
if [ -z $MARLIN_JAR]; then
export MARLIN_JAR=`find \`pwd\`/webapps -name "marlin*.jar" | head -1`
fi
export MARLIN_ENABLER="-Xbootclasspath/a:$MARLIN_JAR -Dsun.java2d.renderer=org.marlin.pisces.MarlinRenderingEngine"
# Identify Geoserver instance
export GEOSERVER_NODE_OPTS=id:docker-$HOSTNAME
# echo "Setting cluster dir ..."
export CLUSTER_CONFIG_DIR="$GEOSERVER_DATA_DIR/cluster/$(hostname -s)"
# echo "Cleaning old cluster settings ..."
rm -vf $CLUSTER_CONFIG_DIR/*
# echo "Creating cluster.properties file ..."
mkdir -pv ${CLUSTER_CONFIG_DIR}
echo "#
#$(date)
toggleSlave=true
connection=enabled
topicName=VirtualTopic.>
brokerURL=
xbeanURL=./broker.xml
durable=false
toggleMaster=true
embeddedBroker=enabled
CLUSTER_CONFIG_DIR=${CLUSTER_CONFIG_DIR}
embeddedBrokerProperties=embedded-broker.properties
connection.retry=3
readOnly=disabled
instanceName="$(hostname -s)-$(openssl rand -hex 6)"
group=geoserver-cluster
connection.maxwait=10000" > ${CLUSTER_CONFIG_DIR}/cluster.properties
# echo "Setting broker ip ..."
# Here the cluster check
ip -f inet -o addr show ethwe0
if [ $? -eq 0 ]
then
echo "Weave Network detected"
export BROKERIP=$(ip -f inet -o addr show ethwe0 | awk '{print $4}' | cut -d/ -f1)
else
echo "Weave Network does not exist - Set to local"
export BROKERIP=0.0.0.0
fi
# echo "Setting cluster OPTS ..."
# export GEOSERVER_CLUSTER_OPTS="-Dactivemq.base=\"$CLUSTER_CONFIG_DIR/tmp\" -Dactivemq.transportConnectors.server.uri=\"tcp://"$BROKERIP":0?maximumConnections=1000&wireFormat.maxFrameSize=104857600&jms.useAsyncSend=true&transport.daemon=true&trace=true\""
echo "GEOSERVER DATA DIR is $GEOSERVER_DATA_DIR"
#added headless to true by default, if this messes anyone up let the list
#know and we can change it back, but it seems like it won't hurt -ch
exec "$_RUNJAVA" $JAVA_OPTS $MARLIN_ENABLER -DGEOSERVER_DATA_DIR="$GEOSERVER_DATA_DIR" -Dactivemq.base="$CLUSTER_CONFIG_DIR/tmp" -Dactivemq.transportConnectors.server.uri="tcp://"$BROKERIP":0?maximumConnections=1000&wireFormat.maxFrameSize=104857600&jms.useAsyncSend=true&transport.daemon=true&trace=true" -Djava.awt.headless=true -DSTOP.PORT=8079 -DSTOP.KEY=geoserver -jar start.jar
| true
|
0b4e0130b0d050410d9cb01207260d29f7345beb
|
Shell
|
PengixH50WXC/HytaleAPI
|
/hytale
|
UTF-8
| 1,056
| 3.75
| 4
|
[] |
no_license
|
#!/bin/bash
post(){
result=$(curl --request GET --url https://hytale.com/api/blog/post/published -s)
echo "Hytale blog posztok"
echo "Készítette Boda Viktor (H50WXC)"
echo "A hytale blog post címei:"
echo
I=0
year=2020
month=12
while [ "$(echo $result | jq .[$I])" != null ]
do
echo $result | jq .[$I].title | tr -d "\""
I=$(($I+1))
done
}
datumpost(){
echo "Cím és dátum:"
echo
year=$(echo $1 | cut -d . -f 1)
month=$(echo $1 | cut -d . -f 2)
result=$(curl --request GET --url https://hytale.com/api/blog/post/archive/$year/$month/ -s)
I=0
while [ "$(echo $result | jq .[$I])" != null ]
do
echo $result | jq .[$I].title | tr -d "\""
echo $result | jq .[$I].createdAt | tr -d "\"Z" | tr "T" " "
I=$(($I+1))
done
}
while getopts ":ad:" opt;
do
case ${opt} in
a )
post
;;
d )
target=$OPTARG
datumpost $target
;;
\? )
echo "Helytelen opció: $OPTARG" 1>&2
;;
: )
echo "Helytelen opció: $OPTARG kér egy argumentumot" 1>&2
;;
esac
done
shift $((OPTIND -1))
| true
|
c73d5bd750396beb2e0679cdb9057933f4c57a37
|
Shell
|
mircomannino/mkudoobuntu18_04
|
/include/packages.sh
|
UTF-8
| 1,630
| 3.3125
| 3
|
[] |
no_license
|
#!/bin/bash
# Script fot update and install the packages and programs
# Included script
DIR_PACKAGES=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
source "$DIR_PACKAGES/utils/color.sh"
source "$DIR_PACKAGES/include/resize.sh"
################################################################################
function add_source_list() {
echo_yellow "Adding source list"
# cp $DIR_PACKAGES/../configure/packages.txt mnt/etc/apt/sources.list
install -m 644 $DIR_PACKAGES/../configure/sources.list "mnt/etc/apt/sources.list"
}
function install_packages() {
# Update
echo_yellow Installing update...
chroot mnt/ /bin/bash -c "apt update -y"
chroot mnt/ /bin/bash -c "apt upgrade -y"
# # Packages list
local BASE_PACKAGES=( openssh-server alsa-utils bash-completion policykit-1
bluez blueman curl dosfstools fbset iw nano module-init-tools ntp unzip usbutils
vlan wireless-tools wget wpasupplicant unicode-data console-data console-common
pv sysfsutils cpufrequtils ntfs-3g locate command-not-found man-db git i2c-tools
python-pip vim minicom crda manpages systemd-services systemd-shim wireless-regdb
udoo-gpio-export net-tools)
#UDOO related
local BASE_PACKAGES+=( firmware-imx-9t fsl-alsa-plugins-9t imx-lib-9t imx-udev-fsl-rules
imx-vpu-9t libfslcodec-9t libfslparser-9t libfslvpuwrap-9t hostapd dtweb )
#dev library
local BASE_PACKAGES+=( python-serial librxtx-java )
echo_yellow Installing packages...
for package in "${BASE_PACKAGES[@]}"
do :
chroot mnt/ /bin/bash -c "apt-get install $package -y"
done
}
| true
|
a2d586bfe36b480780b1d1210895faf208fd3e17
|
Shell
|
brainsik/dotfiles
|
/bash/.bash_finale
|
UTF-8
| 734
| 3.296875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# Interactive pieces or output we want at the end
# these often get set with root perms which jacks shit up
stat="stat -f %u" # BSD
[[ "$OSNAME" = "Linux" ]] && stat="stat -c %u"
if [[ "$USER" == "brainsik" ]]; then
for file in $HOME/.bash_history $HOME/.viminfo; do
if [[ -e $file ]] && [[ $($stat "$file") != "$UID" ]]; then
echo "!! Bad owner for $file"
fi
done
fi
# add SSH keys if needed
if [[ "$OSNAME" = "Darwin" ]] && [[ -s ~/.ssh/id_ed25519 ]] && ! [[ -s ~/.ssh/id_krypton.pub ]]; then
if [[ $(ssh-add -l) =~ "no identities" ]]; then
ssh-add -A || ssh-add ~/.ssh/id_{ed25519,rsa}
fi
fi
# finale
mesg y # for a good time, call
uptime # curiousness
| true
|
b994e608894f6617deadf00d5955607091c082a8
|
Shell
|
YunoHost-Apps/rspamdui_ynh
|
/scripts/remove
|
UTF-8
| 1,507
| 2.921875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#=================================================
# GENERIC START
#=================================================
# IMPORT GENERIC HELPERS
#=================================================
source _common.sh
source /usr/share/yunohost/helpers
#=================================================
# LOAD SETTINGS
#=================================================
ynh_script_progression --message="Loading installation settings..."
app=$YNH_APP_INSTANCE_NAME
domain=$(ynh_app_setting_get --app=$app --key=domain)
#=================================================
# STANDARD REMOVE
#=================================================
# REMOVE NGINX CONFIGURATION
#=================================================
ynh_script_progression --message="Removing NGINX web server configuration..."
# Remove the dedicated NGINX config
ynh_remove_nginx_config
#=================================================
# SPECIFIC REMOVE
#=================================================
# CONFIGURE RSPAMD
#=================================================
ynh_script_progression --message="Configuring RSPAMD..."
ynh_replace_special_string --match_string="^password = .*" --replace_string="password = \"q1\";" --target_file=/etc/rspamd/worker-controller.inc
ynh_systemd_action --service_name=rspamd --action="restart"
#=================================================
# END OF SCRIPT
#=================================================
ynh_script_progression --message="Removal of $app completed"
| true
|
36e45349c54535ab90618c5a0266dd67653eeebd
|
Shell
|
TheMonsterCheese/shell-scripts
|
/practice-scripts/md2pdf
|
UTF-8
| 230
| 3.53125
| 4
|
[] |
no_license
|
#!/bin/bash
file="${1}"
if [ -z "${file}" ]
then
echo "Usage: md2pdf <file without extension>"
exit 1
fi
if [ ! -f "${file}.md" ]
then
echo "expected ${file}.md"
exit 1
fi
pandoc -o "${file}.pdf" "${file}.md"
| true
|
cfb7bf67a77e0a7f275eba70cb15bbf247bc4d3e
|
Shell
|
rmadar/pandoc-utils
|
/scripts/NBtoMD~
|
UTF-8
| 1,309
| 3.328125
| 3
|
[] |
no_license
|
#!/bin/bash
if [ $# -ne 2 ]; then
echo ""
echo "Script usage: "
echo " NBtoArticle <input.ipynb> <output.pdf>"
echo ""
exit 1
fi
# Notebook -> markdown
jupyter-nbconvert --to markdown ${1} --template=${PANDOC_TEMPLATES}/nbconverter_md_pandoc.tpl
# Get the name wihtout the extension
FILE=${1}
raw_name="${FILE%%.*}"
mv ${raw_name}.md tmp.md
# Markdown -> latex
echo "Converting MD into latex"
pandoc -N -s ${PANDOC_TEMPLATES}/nb_code_env.yaml tmp.md -o tmp.tex --template ${PANDOC_TEMPLATES}/document_template.tex \
--filter ${PANDOC_FILTERS}/pandoc_latex_environment.py\
--variable geometry="a4paper, total={6in,9in}"\
-V linestretch="1.2" -V fontsize="11pt" -V display-abstract\
--listings --highlight-style kate\
--toc\
-V notebook
#--highlight-style kate\
# Filtering of latex file
echo "Cleaning latex"
python ${PANDOC_FILTERS}/manual_remove_verbatim.py -i tmp.tex -o temp_clean.tex
# latex -> pdf
echo "Compiling"
pdflatex temp_clean.tex >& log
pdflatex temp_clean.tex >& log
# Cleaning
rm -f temp_clean.aux temp_clean.tex temp_clean.out\
temp_clean.log tmp.md tmp.tex log\
temp_clean.toc temp_clean.nav temp_clean.snm\
temp_clean.vrb ${raw_name}.md
rm -rf ${raw_name}_files
mv temp_clean.pdf ${2}
| true
|
16693af0794c6778369faca8e76b2ed4e4119816
|
Shell
|
Lucas042-crypto/GrandStream
|
/Massa_GrandStream.sh
|
UTF-8
| 361
| 3.546875
| 4
|
[] |
no_license
|
#!/bin/bash
SENHA=$1
ACAO=$2
if [ $# -lt 2 ]; then
echo "Faltou passar senha e ação a ser tomada (REBOOT OU RESET)!"
exit 1
fi
echo "Numero de argumentos: $#"
while read IP; do
echo "RESET:$IP"
if [ $? -eq 0 ]; then
if [[ -n "$IP" ]]; then
curl "http://$IP/cgi-bin/api-sys_operation?passcode=$SENHA&request=$ACAO"
fi
fi
done < ip.log
| true
|
bcfab7cdd615f78ee8bd3b81c9000c10b7107060
|
Shell
|
piyushghai/training_results_v0.7
|
/DellEMC/benchmarks/ssd/implementation/mxnet/build_ssd.sh
|
UTF-8
| 765
| 2.515625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# GPU driver path
export PATH=/mnt/driver/bin:$PATH
export LD_LIBRARY_PATH=/mnt/driver/lib64:$LD_LIBRARY_PATH
# Update container's pycocotools to optimized version
pip uninstall -y pycocotools
export COCOAPI_VERSION=2.0+nv0.4.0
export COCOAPI_TAG=$(echo ${COCOAPI_VERSION} | sed 's/^.*+n//')
pip install --no-cache-dir pybind11
pip install --no-cache-dir git+https://github.com/NVIDIA/cocoapi.git@${COCOAPI_TAG}#subdirectory=PythonAPI
cd /mnt/current
pip install --no-cache-dir cython
pip install --no-cache-dir https://github.com/mlperf/logging/archive/9ea0afa.zip
pip install --no-cache-dir -r requirements.txt
# Compile Horovod MPI test
cd tests
mpicxx --std=c++11 horovod_mpi_test.cpp -o horovod_mpi_test
| true
|
91a64dce311c3d6d0bf0c36e642eefbbe2824523
|
Shell
|
nings/testbed
|
/script/upload_cfg.sh
|
UTF-8
| 516
| 3.96875
| 4
|
[] |
no_license
|
#!/bin/bash
. node.conf
USAGE() {
echo USAGE:
echo "$0 <configfile> <number of nodes>"
exit 1
}
if [ $# -lt 2 ];then
USAGE
fi
cd $(dirname $0)
configfile=$1
node_count=$2
for((i = 0; $i < $node_count; i++)); do
node_name="node-"$i
# Create .Haggle on the node.
ssh $NODE_USERNAME@$node_name mkdir .Haggle
# Copy configuration file to node.
if ! scp $configfile $NODE_USERNAME@$node_name:.Haggle/config.xml; then
echo "Could not copy configuration file to $node_name"
exit 1
fi
done
| true
|
578eb7accacb9ac9807c99fd1860b0eb7d405a28
|
Shell
|
lubatang/LLib
|
/run.sh
|
UTF-8
| 647
| 3.78125
| 4
|
[] |
no_license
|
#!/bin/bash
if [ $# -lt 1 ]; then
echo "Usage:"
echo "$ ./run.sh [Path of Test Directory]"
echo
echo "example: ./run.sh Color\ Models/"
exit
fi
VIEWER=lviewer.debug
DIRECTORY=$1
files=`find ${DIRECTORY} | grep --color=never ".obj$"`
echo "Bump map path? (skip this step by typing \`enter')";
read BUMP_MAP;
if [ ! -z ${BUMP_MAP} ]; then
while [ ! -f ${BUMP_MAP} ]; do
echo "Can not find file \`${BUMP_MAP}'. Please to key-in the path again.";
ls
read BUMP_MAP;
done
fi
for file in ${files}; do
if [ -z ${BUMP_MAP} ]; then
${VIEWER} -f ${file}
else
${VIEWER} -f ${file} -b ${BUMP_MAP}
fi
done
| true
|
23e802fc863ca66714a99836d65d880909b9c19a
|
Shell
|
bopopescu/recipes
|
/universe_on.sh
|
UTF-8
| 2,241
| 3.21875
| 3
|
[] |
no_license
|
#PREREQUISITES
rpm -Uvh https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm
yum install -y epel-release
yum install -y git python-pip python34 jq nginx
curl https://bootstrap.pypa.io/get-pip.py | python3.4
pip3 install --upgrade pip jsonschema
# upgrade python to 3.6+ for latest universe
sudo yum -y install https://centos7.iuscommunity.org/ius-release.rpm
sudo yum -y install python36u python36u-pip
sudo mv /bin/python3 /bin/python3.bak.3-4
sudo ln -s /usr/bin/python3.6 /bin/python3
sudo mv /bin/pyenv /bin/pyenv.bak.3-4
sudo ln -s /bin/pyenv-3.6 /bin/pyenv
sudo mv /bin/pydoc3 /bin/pydoc3.bak-3.4
ln -s /bin/pydoc-3.6 /bin/pydoc3
#####################
BASEDIR=~
GITHUB_USER=mesosphere
REPONAME=universe
PACKAGENAME="version-3.x"
BACKEND_PACKAGE=""
SERVERIP=$(ip addr show eth0 | grep -Eo \
'[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' | head -1) #this node's eth0
SERVERPORT=8085
########################################
# BOOTSTRAP NODE: create from scratch after code change
########################################
cd $BASEDIR
rm -Rf $REPONAME
git clone -b $PACKAGENAME http://github.com/$GITHUB_USER/$REPONAME
cd $REPONAME
scripts/build.sh
#build image and marathon.json
DOCKER_TAG=$PACKAGENAME docker/server/build.bash
#run the docker image in the bootstrap node
docker run -d --name universe-dev -p $SERVERPORT:80 mesosphere/universe-server:$PACKAGENAME
#OPTIONAL: save image for exporting the repo
docker save -o $BASEDIR/$REPONAME/$REPONAME$PACKAGENAME.tar mesosphere/universe-server:$PACKAGENAME
#add repo from the universe we just started
dcos package repo add --index=0 dev-universe http://$SERVERIP:$SERVERPORT/repo
#check that the universe is running -- FROM THE BOOTSTRAP OR ANY NODE
#curl http://$SERVERIP:8085/repo | grep $PACKAGENAME
dcos package install --yes $BACKEND_PACKAGE
dcos package install --yes $PACKAGENAME
dcos package install --yes $PACKAGENAME-admin
echo -e "Copy and paste the following into each node of the cluster to activate this server's certificate on them:"
echo -e "mkdir -p /etc/docker/certs.d/$SERVERIP:5000"
echo -e "curl -o /etc/docker/certs.d/$SERVERIP:5000/ca.crt http://$SERVERIP:$SERVERPORT/certs/domain.crt"
echo -e "systemctl restart docker"
echo -e ""
| true
|
c797de3a3b9f17a4b02e4fd26f70ddd72b5e0adf
|
Shell
|
webclinic017/ml_monorepo
|
/statarb/src/bin/checkcrontab.sh
|
UTF-8
| 404
| 3.65625
| 4
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
if [ "$ROOT_DIR" = "" ]; then
exit "Must set ROOT_DIR!"
else
. $ROOT_DIR/src/bin/include.sh
fi
TMPFILE=$TMP_DIR/checkcrontab.$$.tmp
MACHINE=`echo $HOSTNAME | cut -d \. -f 1-2`
crontab -l | diff -u - $ROOT_DIR/cron/$MACHINE.cron > $TMPFILE
RESULT=$?
if [ -s $TMPFILE ]
then
echo "crontab not up to date on $MACHINE:"
cat $TMPFILE
RESULT=1
fi
rm -f $TMPFILE
exit $RESULT
| true
|
b8356d95bf8976ad22fcd9177b8c55c8ee22aa32
|
Shell
|
nebula-actions/nebula-gears
|
/docker/images/oss-upload.sh
|
UTF-8
| 630
| 3.765625
| 4
|
[] |
no_license
|
#! /bin/bash
[[ $# -lt 2 ]] && echo "$0 <subdir> <files...>" && exit 1
CMD=""
if [[ ! -z "${OSS_ENDPOINT}" ]] && [[ ! -z "${OSS_ID}" ]] && [[ ! -z "${OSS_SECRET}" ]]; then
CMD="ossutil64 -e ${OSS_ENDPOINT} -i ${OSS_ID} -k ${OSS_SECRET}"
elif [[ -f $HOME/.ossutilconfig ]]; then
CMD="ossutil64"
elif [[ -f /run/secrets/ossutilconfig ]]; then
CMD="ossutil64 -c /run/secrets/ossutilconfig"
else
echo "Could not find valid oss authentication configure file."
exit 1
fi
OSS_BASE=oss://nebula-graph
OSS_SUBDIR=$1
shift
for file in $@
do
${CMD} -f cp ${file} ${OSS_BASE}/${OSS_SUBDIR}/$(basename ${file})
done
| true
|
f48e50f69e7d4d9daf0a9b5325161494eed09a8d
|
Shell
|
kraeml/raspberry-edu-devops
|
/scripts/install_ansible.sh
|
UTF-8
| 609
| 3.125
| 3
|
[] |
no_license
|
#!/bin/bash -l
# script name: install_ansible.sh
# sudo: no
set -x
if [ $(id -u) = 0 ]; then
echo "to be run with no sudo"
exit 1
fi
echo "Install ansible via pip"
sudo apt-get install -y libffi-dev libssl-dev cowsay sshpass ieee-data
#ToDo kerberos dependencies
ANSIBLE="ansible>=2.4.0=<2.7 pywinrm enum34 httplib2 idna ipaddress jinja2 crypto cryptography markupsafe netaddr paramiko pyasn1 six xmltodict"
sudo pip3 install $ANSIBLE || (sudo apt-get install python3-pip && sudo pip3 install $ANSIBLE)
sudo mkdir -p /etc/ansible
sudo cp files/etc/ansible/hosts /etc/ansible/
set +x
| true
|
a51f7fc0133218fb22062ee95bcceca10e465d80
|
Shell
|
maikelarabori/shell-script-examples
|
/args-basics.sh
|
UTF-8
| 131
| 2.734375
| 3
|
[] |
no_license
|
#!/bin/sh
echo "Argument 1: $1"
echo "Argument 2: $2"
echo "Total of arguments: $#"
echo "The actual string of arguments: $@"
| true
|
d5690c4ef7d2ab6e0089b63b84f4e1b47299ab35
|
Shell
|
Mcdonoughd/ansible-playbooks
|
/scripts/shell/check_process.sh
|
UTF-8
| 181
| 3.34375
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/bash
pid=`ps ax | grep -i 'nginx' | grep -v grep | awk '{print $1}'`
if [ -n "$pid" ]
then
echo "Nginx is Running as PID: $pid"
else
echo "Nginx is not Running"
fi
| true
|
55f363930f5c3d302817ba19e7ec8bf927b9a16d
|
Shell
|
nju33/.dotfiles
|
/.zsh.d/functions/version-management.zsh
|
UTF-8
| 447
| 3.109375
| 3
|
[] |
no_license
|
function install_homebrew() {
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
}
function install_rbenv() {
# https://github.com/rbenv/rbenv#installation
git clone https://github.com/rbenv/rbenv.git ~/.rbenv
cd ~/.rbenv && src/configure && make -C src
}
function install_pyenv() {
# https://github.com/yyuu/pyenv#installation
git clone https://github.com/yyuu/pyenv.git ~/.pyenv
}
| true
|
7236aa7fb8538f72264ceee82e26fe6b75d12fbb
|
Shell
|
datspike/excel_validator
|
/build.sh
|
UTF-8
| 2,189
| 4
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
green='\e[0;32m'
yellow='\e[33m'
bold='\e[1m'
endColor='\e[0m'
usage() {
echo -e "
${green}excel_validator compiler v1.1:
==================================${endColor}
$0 option
${yellow}Available options:
-b --build - build executable
-c --clean - delete prepared files without executable
-r --remove - delete all build files with executable
-h --help - show this${endColor}
"
}
prepare_env() {
echo -e "${green}[*] PREPARING PYTHON2 VIRTUAL ENVIRONMENT${endColor}"
if [ ! -d env ] ; then
if [ -f /usr/bin/virtualenv ] ; then
virtualenv env
else
if [ -f /usr/bin/virtualenv2 ] ; then
virtualenv2 env
else
echo "${yellow}[!] NO VIRTUALENV DETECTED, PLEASE INSTALL IT!"
exit 1
fi
fi
fi
source env/bin/activate
pip install -r requirements.txt
pip install pyinstaller
source env/bin/activate
}
clean() {
echo -e "${green}[*] CLEANING BUILD ENVIRONMENT${endColor}"
rm -rf env build excel_validator.spec
}
if [ $# -le 0 ] ; then
usage
exit 1
fi
for val in $@ ; do
if [[ ("$val" == "--help") || "$val" == "-h" ]] ; then
usage
exit 0
fi
if [[ ("$val" == "--build") || "$val" == "-b" ]] ; then
prepare_env
echo -e "${green}[*] BUILDING EXECUTABLE${endColor}"
pyinstaller --clean --onefile -p validator \
--hidden-import=validator.BaseValidator \
--hidden-import=validator.ChoiceValidator \
--hidden-import=validator.ConditionalValidator \
--hidden-import=validator.CountryValidator \
--hidden-import=validator.DateTimeValidator \
--hidden-import=validator.EmailValidator \
--hidden-import=validator.ExcelDateValidator \
--hidden-import=validator.LengthValidator \
--hidden-import=validator.NotBlankValidator \
--hidden-import=validator.RegexValidator \
--hidden-import=validator.TypeValidator \
excel_validator.py
fi
if [[ ("$val" == "--clean") || "$val" == "-c" ]] ; then
clean
fi
if [[ ("$val" == "--remove") || "$val" == "-r" ]] ; then
clean
echo -e "${green}[*] REMOVING BUILD FILES${endColor}"
rm -rf dist
fi
done
echo -e "${green}${bold}[*] DONE${endColor}"
| true
|
e2f77781d7b5693ee0b194f5945d5cef3e20b4d7
|
Shell
|
eggsyntax/packages
|
/deploy-changed.sh
|
UTF-8
| 1,286
| 3.5625
| 4
|
[] |
no_license
|
#!/bin/bash
set -e
# Retrieve current versions from Clojars
declare -A versions
OFS=$IFS
IFS=$'\n'
for e in $(curl -s https://clojars.org/api/groups/cljsjs | jq -c ".[]"); do
group=$(echo $e | jq -r ".group_name")
artifact=$(echo $e | jq -r ".jar_name")
id="$group/$artifact"
version=$(echo $e | jq -r ".latest_version")
versions["$id"]=$version
done
IFS=$OFS
for x in *; do
if [[ -d $x ]]; then
if [[ $x = "jquery" ]]; then
echo "WARNING: jquery not supported"
break;
fi
if [[ ! -f $x/build.boot ]]; then
echo "WARNING: $x skipped"
break;
fi
id=$(grep :project $x/build.boot | grep -o "'.*" | head -n1 | cut -c 2-)
version=$(grep "def +lib-version+" $x/build.boot | grep -o "\".*\"" | head -n1 | cut -d \" -f 2)
version=$version$(grep "def +version+" $x/build.boot | grep -o "\".*\"" | head -n1 | cut -d \" -f 2)
deployed=${versions["$id"]}
if [[ $version != $deployed ]]; then
echo "$id deployed version $deployed, current $version"
(
cd $x
boot package -- build-jar -- push --gpg-sign --gpg-passphrase $GPG_PASSPHRASE --repo "deploy-clojars"
)
fi
fi
done
| true
|
3c1271891a41044d8549104565610e30f041186d
|
Shell
|
marcoAmir/NGS_analysis
|
/RNAseq/src/indexGenomeForSTAR.sh
|
UTF-8
| 4,075
| 3.859375
| 4
|
[] |
no_license
|
#!/bin/bash -e
# Amir Marcovitz
#
# A wrapper for indexing a genome+gtf for RNA-seq analysis with STAR
#
# Inputs: genome assembly for which 2bit is availale (e.g., mm10)
# relevant genome annotation file (gtf) from Ensembl
#
# for some assemblies (e.g., mm10, rn6, bosTau8, hg38) the script will download gtf
# from ensembl
# before indexing, the script will make sure that chromosome names are consistent
# while removing chromosomes that have 'random' in their name
#
# requirements:
# kent source utilities (https://github.com/ENCODE-DCC/kentUtils)
# STAR v2.4 and up (https://github.com/alexdobin/STAR)
# defaults:
default_read_length=100 # modify if your RNA-seq library is different
default_avail_cores=8 # make sure it fites your system
path_to_genomes=/cluster/gbdb # path to dir storing genome 2bits
genomeDir=/cluster/u/amirma/geneLoss/hg38/validations/rnaseq/indexed_genomes # output dir for indexed genomes
if [ "$#" -eq 1 ]; then
assembly=$1
if [ ${assembly} = "mm10" ]; then
gtf_file="ftp://ftp.ensembl.org/pub/release-86/gtf/mus_musculus/Mus_musculus.GRCm38.86.gtf.gz"
elif [ ${assembly} = "rn6" ]; then
gtf_file="ftp://ftp.ensembl.org/pub/release-86/gtf/rattus_norvegicus/Rattus_norvegicus.Rnor_6.0.86.gtf.gz"
elif [ ${assembly} = "bosTau8" ]; then
gtf_file="ftp://ftp.ensembl.org/pub/release-86/gtf/bos_taurus/Bos_taurus.UMD3.1.86.gtf.gz"
elif [ ${assembly} = "hg38" ]; then
gtf_file="ftp://ftp.ensembl.org/pub/release-86/gtf/homo_sapiens/Homo_sapiens.GRCh38.86.gtf.gz"
else
echo -e "\n\tError! genome-annotation file (gtf) or ftp path to gtf not found\n\n"
exit 1
fi
elif [ "$#" -eq 2 ]; then
assembly=$1
gtf_file=$2
else
echo -e "\nA wrapper for indexing a genome+gtf for RNA-seq analysis with STAR\n
Usage:\n\t$0 assembly gtf_file\n for some assemblies (e.g., mm10, rn6, bosTau8, hg38) gtf downloaded automatically\n"
exit 1
fi
if [ ! -d "${genomeDir}/${assembly}" ]; then
mkdir ${genomeDir}/${assembly}
fi
# get the genome file (2bit->fa):
echo -e "\n\t...genome to fasta file"
twoBitToFa ${path_to_genomes}/${assembly}/${assembly}.2bit ${assembly}.tmp.fa
# filter out random chromosomes:
echo -e "\n\t...removing random chromosomes"
cat ${assembly}.tmp.fa | grep ">" | sed -e "s/>//g" | grep -v "random" > chroms.txt
faFilter -namePatList=chroms.txt ${assembly}.tmp.fa ${assembly}.fa
rm -rf ${assembly}.tmp.fa
# get gtf-file and process it:
if ls *.gtf*; then
echo -e "\n\t...processing gtf file: ${gtf_file}"
else
echo -e "\n\t...downloading gtf file: ${gtf_file}"
wget ${gtf_file}
gtf_file=`echo ${gtf_file} | awk -F'/' '{print $NF}'`
fi
echo -e "\n\t...making genome and and gtf chromosome names consistent"
zcat ${gtf_file} | egrep -v "^#" | awk -F'\t' \
'{if($1<=25 || $1=="X" || $1=="Y" || $1=="MT") {print "chr"$0} else {split($1,a,"."); \
print a[1]"\t"$2"\t"$3"\t"$4"\t"$5"\t"$6"\t"$7"\t"$8"\t"$9}}' | sed -e "s/chrMT/chrM/g" > tmp
comm -1 -2 <(cut -f1 tmp | sort -u) <(sed -e "s/chrUn_//g" chroms.txt | sort -u) | sort -u > chroms.gtf.txt
join -t$'\t' -1 1 -2 1 -o '1.1 1.2 1.3 1.4 1.5 1.6 1.7 1.8 1.9' <(sort -u tmp) <(sort -u chroms.gtf.txt) \
| awk -F'\t' '{if($1~/GL/ || $1~/JH/) {print "chrUn_"$0} else {print $0}}' > ${assembly}.gtf
rm -rf tmp chroms.* ${gtf_file}
gtf_file=${assembly}.gtf
n_chroms=`cut -f1 ${gtf_file} | sort -u | wc -l`
echo -e "\t\t${n_chroms} references (chromosomes) in genome file ${assembly}.fa and annotation file ${gtf_file}\n\n"
# indexing genome+gtf with STAR
echo -e "\n\t...indexing genome+gtf with STAR\n"
if [ ${m_chroms} -gt 5000 ]; then
STAR --runThreadN ${default_avail_cores} --runMode genomeGenerate --genomeDir ${genomeDir}/${assembly} \
--genomeFastaFiles ${assembly}.fa --sjdbGTFfile ${gtf_file} --sjdbOverhang ${default_read_length}
else
STAR --runThreadN ${default_avail_cores} --runMode genomeGenerate --genomeDir ${genomeDir}/${assembly} \
--genomeFastaFiles ${assembly}.fa --sjdbGTFfile ${gtf_file} --sjdbOverhang ${default_read_length} \
--genomeChrBinNbits 18
fi
echo -e "\n\n"
rm -rf ${gtf_file} ${assembly}.fa
| true
|
31818dd079ef739d4bbce56f444b0c60245515d2
|
Shell
|
aubema/aodsem-2
|
/Scripts/epar_filte
|
UTF-8
| 1,125
| 3.5
| 4
|
[] |
no_license
|
#!/bin/bash
echo " *******************************************************"
echo " * *"
echo " * Edit parameter for filteraod *"
echo " * *"
echo " * Apply a maximum limit to an *"
echo " * aerosol optical depth maps *"
echo " * *"
echo " * Martin Aube 2000 *"
echo " * *"
echo " *******************************************************"
echo " "
folder=`pwd`
echo " Experiment directory "$folder
echo " "
if [ -f filteraod.par ]
then echo " Actual parameters set to:"
echo " "
more filteraod.par
fi
ls *.pgm
echo " "
echo " New parameters:"
echo " "
echo "Root file name (.pgm will be added)?"
read nom
echo "AOD threshold x 100 (e.g. for AOD=0.5, enter 50)?"
read value
echo $nom " Root file name " > filteraod.par
echo $value " AOD threshold value" >> filteraod.par
echo "Done"
| true
|
f045bb45f006d60b4cfb6ba9b83379d75cbfc370
|
Shell
|
cpai1/Projects
|
/create-app-env.sh
|
UTF-8
| 1,054
| 2.9375
| 3
|
[] |
no_license
|
#!/bin/bash
dbname="login"
dbidentifier="itmo-chandu"
dbsgrp="sg-8f6fa4f6"
dbinstance="db.t2.micro"
dbengine="mysql"
muser="controller"
mpassword="iloveuchandu"
storage="5"
dbzone="us-west-2b"
#creating the rds instance
aws rds create-db-instance --db-name $dbname --db-instance-identifier $dbidentifier --vpc-security-group-ids $dbsgrp --allocated-storage $storage --db-instance-class $dbinstance --engine $dbengine --master-username $muser --master-user-password $mpassword --availability-zone $dbzone
aws rds wait db-instance-available --db-instance-identifier $dbidentifier
echo "RDS instance successfully created"
#creating SNS:
savearn=`aws sns create-topic --name my-message`
aws sns subscribe --topic-arn $savearn --protocol email --notification-endpoint cpai1@hawk.iit.edu
#creating SQS:
saveurl=`aws sqs create-queue --queue-name chanduQueue`
aws sqs send-message --queue-url $saveurl --message-body "Hello"
aws sqs receive-message --queue-url $saveurl
#creating s3 bucket:
aws s3 mb s3://$1 --region us-west-2
aws s3 mb s3://$2 --region us-west-2
| true
|
be03fc1af7ee969fbdc366f557f8736d97a85554
|
Shell
|
apoorvakumar690/proximity
|
/scripts/lint.sh
|
UTF-8
| 450
| 3.40625
| 3
|
[] |
no_license
|
#!/bin/bash
#current directory
dir="$(pwd)"
#parent directory
parentdir="$(dirname "$dir")"
#make flags
makeArgs=$1
#sets proper path based on if invoked from make or directly
if [ "$makeArgs" != "TRUE" ]; then
cd $parentdir
fi
# checks if golint exists or not , if not installs it
wq=$(which golint)
if [ "$wq" == "" ]; then go get -u golang.org/x/lint/golint; fi
# run linter
golint -set_exit_status $(go list ./... | grep -v /vendor/)
| true
|
880b98039205c9a06a35d675d7af70491ce36460
|
Shell
|
jadesoul/bootstrap
|
/7-install-conda/install.sh
|
UTF-8
| 514
| 3.078125
| 3
|
[] |
no_license
|
#for mac x86
url=https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
#for mac m1
#url=https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-arm64.sh
#for linux
#url=https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh
#for win
#url=https://repo.anaconda.com/miniconda/Miniconda3-latest-Windows-x86_64.exe
file=$(basename $url)
test -f $file || wget $url -O $file || curl -o $file $url
prefix=$HOME/tools/miniconda3
sh $file -b -f -u -p $prefix
$prefix/bin/conda init
| true
|
ae2946774fb538da96ac9a8ec0ff0a440bded6e4
|
Shell
|
crdoconnor/personalenv
|
/bin/digclock.sh
|
UTF-8
| 1,533
| 4.15625
| 4
|
[] |
no_license
|
#!/bin/bash
# SCRIPT: digclock.sh
# USAGE: ./digiclock &
# PURPOSE: Displays time and date in the top right corner of the
# screen using tput command.
# To stop this digclock use command "kill pid"
################################################################
####################### VARIABLE DECLARATION ###################
# To place the clock on the appropriate column, subtract the
# length of $Time and $Date, which is 22, from the total number
# of columns
Columns=$(tput cols)
Startpoint=$(($Columns-22))
# If you're in an X Window System terminal,you can resize the
# window, and the clock will adjust its position because it is
# displayed at the last column minus 22 characters.
Color1=`tput setab 2` # Green background color for time
Color2=`tput setab 6` # Cyan background color for date
Normal=`tput sgr0` # back to normal screen colors
####################### MAIN PROGRAM ###########################
# The script is executed inside a while without conditions
while :
do
Time=`date +%r`
Date=`date +"%d-%m-%Y"`
tput sc #Save the cursor position&attributes
tput cup 0 $Startpoint
# You can also use bellow one liner
# tput cup 0 $((`tput cols`-22))
# But it is not efficient to calculate cursor position for each
# iteration. That's why I placed variable assignment before
# beginning of the loop.
# print time and date in the top right corner of the screen.
echo -n $Color1$Time $Color2$Date$Normal
# restore the cursor to whatever was its previous position
tput rc
# Delay for 1 second
sleep 1
done
| true
|
c6d9b2317debeda3762d0b3e985a4799f4467244
|
Shell
|
lalawue/shell_script_stuff
|
/upload_script.sh
|
UTF-8
| 3,007
| 3.609375
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#
# Program: Upload shell script
# Purpose: Upload the all the normal files under the publish_dir
# Author : Sucha <suchaaa@gmail.com>
# Version: 2.00
# Usage : Fill in your PUBLISH_DIR and FTP information blow , then
# run it anywhere.
#
# History: v2.00 - 2014.01.04
# * Using ftp script to transfer
# v1.04 - 2006.08.05
# * If transfer failure or any other mistakes occur,
# abort the mission, DO NOT update the record file,
# and you should upload the files at later time.
# [ Thanks to zhao wang, arithboy AT gmail DOT com ]
# v1.03 - 2006.01.19
# * Upload all the normal files under the publish dir
# use the find command, no depth limited, and less
# tmp file generated.
# v1.02 - 2005.05.03
# * Upload all the normal files under the publish dir,
# no depth limited.
# v1.01 - 2005.05.02
# * Upload all the normal files under the publish dir
# wtih the depth is 2.
# v1.00 - 2005.01.07
# * Upload a single dir's normal file, and also support
# it's sub image dir.
# Settins
#
# 1. Local settings
# publish_dir holding your ftp files, normal file under this
# dir will be checked, path including last "/".
publish_dir=$HOME/workport/homesite/publish/
# file of recording the modified time, no need to change
ctf=$publish_dir.ctime.txt
# tmp file prefix
tmpfile="/tmp/upssdef"$RANDOM
# eof line
eof_line="___EOF_"
# 2. Serve settings
server=192.168.10.12
user=my_ftp_username
passwd=my_ftp_password
rdir=my_ftp_publish_dir
# 3. Backup
#
store_dir=$HOME
backup_dir=$HOME/workport
file_name="homesite"
_backup()
{
echo "------ backup files ------"
echo "from $backup_dir/$file_name"
echo "To $store_dir/site.tar.gz"
cd $backup_dir
tar -czf $store_dir/site.tar.gz $file_name
}
_generate_and_run_script()
{
echo "------ generate ftp script ------"
echo "#!/bin/bash" > $tmpfile
echo "ftp -n << $eof_line" >> $tmpfile
echo "open $server" >> $tmpfile
echo "user $user $passwd" >> $tmpfile
for path in $(find . -cnewer $ctf -print)
do
file=$(echo $path | cut -c3- )
if [ -d $file ]; then
echo "mkdir $file" >> $tmpfile
else
echo "put $file $rdir/$file" >> $tmpfile
fi
done
echo "bye" >> $tmpfile
echo "$eof_line" >> $tmpfile
# run script
echo "------ run script ------"
bash $tmpfile
echo "------ delete script ------"
rm $tmpfile
}
# run
# generate_google_sitemap
if [ -e $ctf ]; then
cd $publish_dir
_generate_and_run_script
_backup
echo "created in $(date), records the modified time." > $ctf
else
echo "First run this program, create the file for recoreding the motified time."
echo "created in $(date), records the modified time." > $ctf
fi
| true
|
d8710cfd55dde2bed18f9b57d31cfb1217c084f6
|
Shell
|
SultanSGillani/dotfiles
|
/roles/zsh/files/functions/runx.zsh
|
UTF-8
| 210
| 3.5625
| 4
|
[
"ISC"
] |
permissive
|
# Run a command x times | Usage: runx <value>
# http://www.stefanoforenza.com/how-to-repeat-a-shell-command-n-times/
runx() {
n=$1
shift
while [ $(( n -= 1 )) -ge 0 ]
do
"$@"
done
}
| true
|
10625a4708944539b77e23de6faa633a7037631e
|
Shell
|
OSSSP/cgc-monitor
|
/cgc-monitor/simics/slaveInstall/cpAndRun.sh
|
UTF-8
| 332
| 3.203125
| 3
|
[] |
no_license
|
#!/bin/bash
# copy the script to add user cgc to the remote host, and then execute it
# user created with no password, access via ssh keys
HOSTS=hosts.txt
if [ ! -z "$2" ]; then
HOSTS=$2
fi
pscp -h $HOSTS -l mike $1 /tmp/$1
./mikessh.sh "chmod a+x /tmp/$1" $HOSTS
echo "command will be: /tmp/$12"
./mikessh.sh "/tmp/$1" $HOSTS
| true
|
c71a9a110b1a78da9fe26b07353313e7337b35c3
|
Shell
|
zs1621/bashStudy
|
/test_construct_01032014/various_test_construct.sh
|
UTF-8
| 553
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
# the following 4 cases have same meaning
#1
if /usr/bin/[ -z "$1" ]
then
echo "No command-line arguments"
else
echo "First command-line argument is $1"
fi
echo
#2
if [ -z "$1" ]
then
echo "No command-lane arguments"
else
echo "First command-line argument is $1"
fi
echo
#3
if test -z "$1"
then
echo "No command-line arguments"
else
echo "First command-line argument is $1"
fi
echo
#4
if /usr/bin/test -z "$1"
then
echo "No command-line arguments"
else
echo "First command-line argument is $1"
fi
echo
| true
|
16de1c8dd332359adb7ee88554d2271ed4a6811f
|
Shell
|
yupswing/dotfiles
|
/dotfiles/bin/cpuwatch
|
UTF-8
| 1,561
| 4.125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# LAUNCH EXAMPLES
# - launch watcher, log on file and kill old istances
# ./cpuwatch 40 >> cpuwatch.log &
if [ "$1" == "-h" ] || [ "$1" == "--help" ]; then
echo "cpuwatch [THRESHOLD:40] [SLEEP:0] [AVERAGE_FOR_SECONDS:30] >> /path/to/log"
echo " the script check if the 'instant' CPU percantage usage exceed a threshold"
echo " and log the 5 most used processes"
echo "cpuwatch -s (kill any instance)"
exit 0
fi
# kill other instances
me=$$
ps -ef | grep $0 | awk -v me=$me '$2 != me {print $2}' | xargs kill -9 >/dev/null 2>&1
if [ "$1" == "-s" ]; then
exit 0
fi
# config
THRESHOLD=${1:-66}
WAIT_INTERVAL=${2:-0}
CPU_INTERVAL=${3:-30}
log() {
echo -e "$@"
}
TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S')
log "# Started at $TIMESTAMP"
log "# Checking CPU usage every $CPU_INTERVAL+$WAIT_INTERVAL seconds and log when average over $THRESHOLD%"
while true; do
LOADAVG=$(cat /proc/loadavg)
CPU_USAGE=$(awk '{u=$2+$4; t=$2+$4+$5; if (NR==1){u1=u; t1=t;} else print ($2+$4-u1) * 100 / (t-t1); }' \
<(grep 'cpu ' /proc/stat) <(
sleep $CPU_INTERVAL
grep 'cpu ' /proc/stat
))
if (($(echo "${CPU_USAGE} > $THRESHOLD" | bc -l))); then
TIMESTAMP=$(date '+%Y-%m-%d %H:%M:%S')
# cmd is last otherwise it truncate
PROC_LIST=$(ps -eo pid,ppid,%mem,%cpu,cmd --sort=-%cpu | head -n 6)
log "----------------------------------------"
log "$TIMESTAMP | loadavg $LOADAVG | cpu $CPU_USAGE%"
log "$PROC_LIST"
fi
if (($(echo "$WAIT_INTERVAL > 0" | bc -l))); then
sleep $WAIT_INTERVAL
fi
done
| true
|
4e5d6340b00366075d6a40a1bca0899aaaa508a9
|
Shell
|
devcontrol/VBoxMaestro
|
/VBoxMaestro.sh
|
UTF-8
| 932
| 3.640625
| 4
|
[] |
no_license
|
#!/bin/bash
#VBoxManager bash script handler
command=$1
server="${@:2}"
case "$command" in
'start')
echo "Will start machine: ""$server"
VBoxManage startvm $server --type=headless
echo "DONE; \n"
;;
'save')
echo "Will save machine: ""$server"
for server
do
if [ "$server" != "save" ] && [ "$server" != "start" ]
then
VBoxManage controlvm $server savestate
fi
done
echo "DONE; \n"
;;
'boot')
echo "Will load from ./boot"
set -- $(<./boot)
VBoxManage startvm $@ --type=headless
echo "DONE; \n"
;;
'sleep')
echo "Will save all the machines!"
VBoxManage list vms > ./.tmp
what=`grep -o '".*"' .tmp | sed 's/"//g'`
runningMachines=$what
for runningMachines in `echo $runningMachines`
do
echo "Saving Maschine: $runningMachines"
VBoxManage controlvm $runningMachines savestate
echo "DONE;"
echo ""
done
;;
'*')
echo "Unknown command: $command"
;;
esac
| true
|
06d82ab6da178b9a33a1a00e59a7c08159a4c7fd
|
Shell
|
onireto/CS1XA3
|
/Project01/project_analyze.sh
|
UTF-8
| 1,143
| 3.578125
| 4
|
[] |
no_license
|
#!/bin/bash
function todoLog(){
# Puts all #TODO into the the todo.log file
grep -r "#TODO" * --exclude=project_analyze.sh > todo.log;
echo "TODO is placed in todo.log";
}
function fileCount(){
# Outputs the count of haskell, java script, css , python, html and bash script files
haskell=$(find . -type f - name ".hs" | wc -l);
javascript=$(find . -type f - name ".js" | wc -l);
css=$(find . -type f - name ".css" | wc -l);
python=$(find . - type f - name ".py" | wc -l);
html_v=$(find . - type f - name ".html" | wc -l);
bash_v=$(find . - type f - name ".sh" |wc -l);
echo "Haskell: $haskell, Javascript: $javascript, CSS: $css, Python: $python, HTML: $html_v, and Bash Script: $bash_v";
}
function easygit(){
echo "Whole directory or one fil? [D/F]:"
read resp
if [$resp == "D"]
then
git add * ;
else
echo "Enter file name"
read file
git add $file
fi
echo "Enter commit message";
read msg
git commit -m $msg;
git push origin master;
}
function logError(){
find . -name "*hs" -exec ghc -fno -code {} \; 2>compile_fail.log
find . -name "*py" -exec python -fno -code {} \; 2>>compile_fail.log
echo"Compile Error log created";
}
| true
|
79e5e0c9190e7f2cd63d103fc7fc5c69674d7d93
|
Shell
|
m-e-l-u-h-a-n/Information-Technology-Workshop-I
|
/Linux assignments(before-mid-sem)/assignment-4/scripts/14.sh
|
UTF-8
| 127
| 3.09375
| 3
|
[] |
no_license
|
#!/bin/bash
echo "Enter a string"read s
if [[ $(rev <<< "$s") == "$s" ]]
then
echo "Palindrome"
else
echo "Not a Palindrome"
fi
| true
|
3b5398a482cf309b9eeca54727722054b7cea609
|
Shell
|
langyuxiansheng/vue-aliplayer-v2
|
/pages.sh
|
UTF-8
| 366
| 2.671875
| 3
|
[
"MIT"
] |
permissive
|
echo "打包 => dist";
# 打包 => dist
npm run build;
# 更新gh-pages
echo "更新gh-pages";
git branch -D gh-pages;
echo "删除gh-pages";
git checkout -b gh-pages;
echo "创建并切换到gh-pages";
git add -f dist;
git commit -m 'create gh-pages';
git push origin -d gh-pages;
git subtree push --prefix dist origin gh-pages;
echo "已推送gh-pages";
| true
|
5836b1604fc097fed8d6636e4dde84202e7888a2
|
Shell
|
ideal/scrot
|
/man/create-man.sh
|
UTF-8
| 574
| 3.65625
| 4
|
[
"MIT-advertising",
"MIT-feh"
] |
permissive
|
#!/bin/bash
# Copyright 2015-2019 Joao Eriberto Mota Filho <eriberto@eriberto.pro.br>
# Create a manpage using txt2man command.
#
# This script can be used under BSD-3-Clause license.
TEST=$(txt2man -h 2> /dev/null)
[ "$TEST" ] || { echo -e "\nYou need to install txt2man, from https://github.com/mvertes/txt2man.\n"; exit 1; }
T2M_DATE="16 April 2020"
T2M_NAME=scrot
T2M_VERSION=1.3
T2M_LEVEL=1
T2M_DESC="command line screen capture utility"
# Don't change the following line
txt2man -d "$T2M_DATE" -t $T2M_NAME -r $T2M_NAME-$T2M_VERSION -s $T2M_LEVEL -v "$T2M_DESC" $T2M_NAME.txt > $T2M_NAME.$T2M_LEVEL
| true
|
dbed8e1a65cbe5ac956bfd03d5a18f0d62fb5133
|
Shell
|
luiscg99/PracticaA-SI
|
/prac06.sh
|
UTF-8
| 331
| 3.0625
| 3
|
[] |
no_license
|
read -p "Introduce la cantidad de litros de agua consumidos: " litros
total=0
if [ $litros -le 50 ]; then
total=$((total+20))
fi
if [ $litros -gt 50 ] && [ $litros -le 200 ]; then
total=$((litros/20))
total=$((total+20))
fi
if [ $litros -gt 200 ]; then
total=$((litros/10))
total=$((total+20+30))
fi
echo "El total es $total"
| true
|
e69bca25ea99e3a8ab2e29815d01a9e720a8653e
|
Shell
|
Jay-R-H/practisefiles
|
/forwhilefiles/primenumber.sh
|
UTF-8
| 220
| 3.671875
| 4
|
[] |
no_license
|
#!/bin/bash -x
read -p "Enter the number :" n
for(( count=2 ; count<=$n ; count++ ))
do
test=$(( $n % $count ))
if [ $test -eq 0 ]
then
echo "$n is not a prime number "
else
echo "$n is a prime number "
fi
break
done
| true
|
f0b5b236c9bb3d41966926fb056d6d0b1b5d6c9b
|
Shell
|
SixSq/dataClay
|
/demos/wordcount/3_BuildApps.sh
|
UTF-8
| 567
| 3.25
| 3
|
[] |
no_license
|
#!/bin/bash
DCLIB="../../tool/lib/dataclayclient.jar"
if [ ! -f $DCLIB ]; then
echo "[ERROR] dataClay client lib (or link) not found at $DCLIB."
exit -1
fi
if [ -z $COMPSSLIB ]; then
echo "[ERROR] COMPSSLIB variable with valid COMPSS path is undefined"
exit -1
fi
if [ ! -f $COMPSSLIB ]; then
echo "[ERROR] COMPSs lib (or link) not found at COMPSSLIB=$COMPSSLIB."
exit -1
fi
echo ""
echo -n "Compiling ... "
mkdir -p bin
javac -cp stubs:$DCLIB:$COMPSSLIB src/consumer/*.java -d bin/
javac -cp stubs:$DCLIB:$COMPSSLIB src/producer/*.java -d bin/
echo " done"
| true
|
542581bf40629502b946e2fabd128effe6ddadff
|
Shell
|
klauern/dot-vim
|
/bootstrap.sh
|
UTF-8
| 295
| 3.234375
| 3
|
[] |
no_license
|
#!/bin/bash
# Use this script to set up your environment for vim if you have never done this before.
set -ue
readonly PROGNAME=$(basename "$0")
readonly PROGDIR=$(readlink -m "$(dirname "$0")")
cd $PROGDIR
mkdir -p tmp/{backup,swap,undo}
rm -f ~/.vimrc # if it exists
ln -rs vimrc ../.vimrc
| true
|
11e845e7560985ec4a5a0f0a96e4c471211d84fe
|
Shell
|
bboerner/dotfiles
|
/osx/env.bash
|
UTF-8
| 1,014
| 2.953125
| 3
|
[
"MIT"
] |
permissive
|
#
# Mac OS X - Pythong
#
PYTHON_ROOT="$HOME/Library/Python/2.7"
if [[ -d $PYTHON_ROOT/bin ]]; then
export PATH="$PYTHON_ROOT/bin:$PATH"
[[ -d $PYTHON_ROOT/man ]] && export MANPATH="$PYTHON_ROOT/man:$MANPATH"
[[ -d $PYTHON_ROOT/lib ]] && export LD_LIBRARY_PATH="$PYTHON_ROOT/lib:$LD_LIBRARY_PATH"
fi
unset PYTHON_ROOT
if false; then
#
# Mac OS X - MacPorts
#
if [ -d /opt/local/bin ]; then
#echo_i Using DarwinPorts
#export PATH=/opt/local/bin:/opt/local/sbin:$PATH
#export MANPATH=/opt/local/man:$MANPATH
[[ -d /opt/local/bin ]] && modlist PATH /opt/local/bin
[[ -d /opt/local/man ]] && modlist MANPATH /opt/local/man
fi
# Setting PATH for EPD v5.1.1
[[ -d "/Library/Frameworks/Python.framework/Versions/Current/bin" ]] && modlist PATH "/Library/Frameworks/Python.framework/Versions/Current/bin"
# Wireshark
#[[ -d "$HOME/usr/local/wireshark/bin" ]] && modlist PATH "$HOME/usr/local/wireshark/bin"
if [[ -f /opt/local/bin/python2.5 ]]; then
alias python=python2.5
fi
fi
| true
|
397f43de7405ab4aa2ccb3d6d93eeb92920c65b3
|
Shell
|
CitizenScienceCenter/k8s_charts
|
/deploy/certificate-manager/replicate.sh
|
UTF-8
| 329
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/zsh
secret=tls-certificate-secret
echo $secret
sed "s/auxsecret/$secret/g" replicated_secret.yaml > replicated_secret-$secret.yaml
NS=$(kubectl get ns | grep -v kube-system | awk '{ print $1 }' | tail -n +2)
#Get NS - kube-system
for i in $NS; do
echo $i
kubectl apply -f replicated_secret-$secret.yaml -n $i
done
| true
|
ca39f21eb414b5ed695d7faa39d0702369341b78
|
Shell
|
mathias-nyman/blackarch
|
/packages/freeradius-wpe/PKGBUILD
|
UTF-8
| 1,635
| 2.671875
| 3
|
[] |
no_license
|
pkgname=freeradius-wpe
pkgver=2.0.2
pkgrel=3
pkgdesc="The premier open source RADIUS server - Wireless Pwnage Edition (WPE), demonstrating RADIUS impersonation vulnerabilities"
groups=('blackarch' 'blackarch-wireless')
arch=('i686' 'x86_64')
url='http://www.freeradius.org/'
license=("GPL")
provides=('freeradius')
depends=('openssl' 'pth' 'libldap>=2.4.18' 'net-snmp'
'postgresql-libs>=9.0.3' 'libmysqlclient')
optdepends=('libpcap' 'unixodbc' 'python2')
options=('!libtool' 'emptydirs' '!makeflags')
conflicts=('freeradius')
install=freeradius.install
source=("http://ftp.cc.uoc.gr/mirrors/ftp.freeradius.org/old/freeradius-server-$pkgver.tar.gz"
"http://www.willhackforsushi.com/code/freeradius-wpe/freeradius-wpe-$pkgver.patch"
'radiusd')
md5sums=('45c244281c84d38d90a12384a1f29652'
'6671917d602373d8010fe38de66377e4'
'f1a6530b1b69d2fa793aa45b2de379bb')
prepare() {
cd $srcdir/freeradius-server-$pkgver
sed -i 's/ -DKRB5_DEPRECATED//' src/modules/rlm_krb5/Makefile.in
patch -p1 <$srcdir/freeradius-wpe-2.0.2.patch
}
build() {
cd $srcdir/freeradius-server-$pkgver
export CFLAGS="$CFLAGS -fno-strict-aliasing"
./configure --with-system-libtool --with-system-libltdl \
--prefix=/usr \
--localstatedir=/var \
--sysconfdir=/etc \
--libdir=/usr/lib/freeradius \
--sbindir=/usr/bin
make
}
package() {
cd $srcdir/freeradius-server-$pkgver
make install R=$pkgdir
install -Dm755 ../radiusd $pkgdir/etc/rc.d/radiusd
chmod o+r "$pkgdir"/etc/raddb/*
mv "$pkgdir/etc/raddb" "$pkgdir/etc/raddb.default"
rm -rf "$pkgdir/var/run"
}
| true
|
e1057cdd8927d081f42847dbec8fa622c4a0a793
|
Shell
|
shydefoo/vimrc
|
/my_plugins/firenvim/release.sh
|
UTF-8
| 4,526
| 3.578125
| 4
|
[
"MIT",
"GPL-3.0-only"
] |
permissive
|
#!/bin/sh -e
if ! [ -e ./package.json ] ; then
echo "Not in firenvim repository. Aborting."
exit 1
fi
if [ "$1" = "" ] ; then
echo "No new version specified. Aborting."
exit 1
fi
if [ "$(git rev-parse --abbrev-ref HEAD)" != "master" ] ; then
echo "Not on master. Aborting."
exit 1
fi
if ! git diff --quiet --exit-code ; then
echo "Git working directory unclean. Aborting."
exit 1
fi
if ! git diff --cached --quiet --exit-code ; then
echo "Git staged area unclean. Aborting."
exit 1
fi
git fetch origin master
if ! git diff --quiet --exit-code origin/master ; then
echo "Local master is different from origin master. Aborting"
exit 1
fi
newMajor="$(echo "$1" | cut -d. -f1)"
newMinor="$(echo "$1" | cut -d. -f2)"
newPatch="$(echo "$1" | cut -d. -f3)"
oldVersion="$(grep '"version": "\(.\+\)"' package.json | grep -o '[0-9.]\+')"
oldMajor="$(echo "$oldVersion" | cut -d. -f1)"
oldMinor="$(echo "$oldVersion" | cut -d. -f2)"
oldPatch="$(echo "$oldVersion" | cut -d. -f3)"
if [ "$oldMajor" = "$newMajor" ] ; then
if [ "$oldMinor" = "$newMinor" ] ; then
if [ "$((oldPatch + 1))" != "$newPatch" ] ; then
echo "New version has same minor and major but patch doesn't follow."
exit 1
fi
elif [ "$((oldMinor + 1))" -eq "$newMinor" ] ; then
if [ "$newPatch" != 0 ] ; then
echo "New version has new minor but patch isn't 0."
exit 1
fi
else
echo "New version has same major but minor doesn't follow."
exit 1
fi
elif [ "$((oldMajor + 1))" -eq "$newMajor" ] ; then
if [ "$newMinor" != 0 ] ; then
echo "New version has new major but minor isn't 0."
exit 1
fi
if [ "$newPatch" != 0 ] ; then
echo "New version has new major but patch isn't 0."
exit 1
fi
else
echo "New version doesn't follow previous one."
exit 1
fi
oldVersion="$oldMajor.$oldMinor.$oldPatch"
newVersion="$newMajor.$newMinor.$newPatch"
echo "Updating firenvim from v$oldVersion to v$newVersion."
# First, edit package info
sed -i "s/\"version\": \"$oldVersion\"/\"version\": \"$newVersion\"/" package.json
# Then, do manual update/editing
npm ci
# Make sure none of the files have changed, except for package-lock.json
if [ "$(git diff --name-only | grep -v "package\(-lock\)\?.json")" != "" ] ; then
echo "Some files have been modified. Aborting."
exit 1
fi
# npm run test takes care of building the extension in test mode
npm run test-firefox
npm run test-chrome
# now we need a release build
npm run build
# lint firefox add-on to make sure we'll be able to publish it
"$(npm bin)/addons-linter" target/xpi/firefox-latest.xpi
# Add finishing touches to chrome manifest
sed 's/"key":\s*"[^"]*",//' -i target/chrome/manifest.json
# Generate bundles that need to be uploaded to chrome/firefox stores
rm -f target/chrome.zip
zip --junk-paths target/chrome.zip target/chrome/*
source_files="$(echo ./* | sed s@./node_modules@@ | sed s@./target@@)"
rm -f target/firenvim-firefox-sources.tar.gz
tar -cvzf target/firenvim-firefox-sources.tar.gz $source_files
rm -f target/firenvim-thunderbird-sources.tar.gr
tar -cvzf target/firenvim-thunderbird-sources.tar.gz $source_files
# Prepare commit message
COMMIT_TEMPLATE="/tmp/firenvim_release_message"
echo "package.json: bump version $oldVersion -> $newVersion" > "$COMMIT_TEMPLATE"
echo "" >> "$COMMIT_TEMPLATE"
git log --pretty=oneline --abbrev-commit --invert-grep --grep='dependabot' "v$oldVersion..HEAD" >> "$COMMIT_TEMPLATE"
# Everything went fine, we can commit our changes, tag them, push them
git add package.json package-lock.json
git commit -t "$COMMIT_TEMPLATE"
git tag --delete "v$newVersion" 2>/dev/null || true
git tag "v$newVersion"
git push
git push --tags
gh release create "$newVersion" target/chrome.zip target/xpi/firefox-latest.xpi target/xpi/thunderbird-latest.xpi --notes ""
firefox --private-window 'https://chrome.google.com/webstore/devconsole/g06704558984641971849/egpjdkipkomnmjhjmdamaniclmdlobbo/edit?hl=en'
sleep 1
firefox --private-window 'https://addons.mozilla.org/en-US/developers/addon/firenvim/versions/submit/'
sleep 1
firefox --private-window 'https://addons.thunderbird.net/en-US/developers/addon/firenvim/versions/submit/'
| true
|
bda44db4e22ea44d9acfd71911b0900f5f022947
|
Shell
|
online-ventures/accounts
|
/config/docker/dev/entrypoint.sh
|
UTF-8
| 742
| 3.4375
| 3
|
[] |
no_license
|
#!/bin/bash
bundle check || bundle install --binstubs="$BUNDLE_BIN"
if [ -z $1 ]; then
if [ -f tmp/pids/server.pid ]; then
echo "Stopping old server processes..."
kill -SIGINT "$(cat tmp/pids/server.pid)" >/dev/null 2>&1
rm tmp/pids/server.pid
fi
echo "Starting rails server in $RAILS_ENV environment"
rails s -b 0.0.0.0 -p 3000
elif [[ $@ =~ ^(bundle|bundle install)$ ]]; then
echo "Running bundle install"
bundle install
elif [ $1 = "bundle" ]; then
echo "Running bundle command: $@"
"$@"
elif [ $1 = "rake" ]; then
echo "Running rake command: $@"
shift
rake "$@"
elif [ $1 = "rails" ]; then
echo "Running rails command: $@"
shift
rails "$@"
else
echo "Running rails command: $@"
rails "$@"
fi
| true
|
0cb24c0498fc146b94d9f475e541c818234abd6b
|
Shell
|
kahsieh/bioconda-recipes
|
/recipes/grid/build.sh
|
UTF-8
| 412
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/bash
set -eu -o pipefail
outdir=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $outdir
mkdir -p $PREFIX/bin
cd $SRC_DIR
cp grid.sh GRiD*.R README.md update_database.sh bowtie.txt check_R_libraries.R $outdir
cp -R blast_database $outdir
cp -R PathoScope $outdir
chmod +x $outdir/*.sh
ln -s $outdir/grid.sh $PREFIX/bin/grid.sh
ln -s $outdir/update_database.sh $PREFIX/bin/update_database.sh
| true
|
b62aebef01f123f9f39026faf983453d6762d945
|
Shell
|
edwardsmarkf/server-migration
|
/clamav.txt
|
UTF-8
| 2,264
| 3.046875
| 3
|
[] |
no_license
|
#! /usr/bin/bash -w
#####clamav ####### -- 2016-02-06 -- moved from README 2019-06-19
## make SURE clamav is installed:
clamscan --version ;
/usr/bin/clamscan --version ;
## dont forget to change 'SERVER-NAME-HERE' ...!
mkdir --verbose /var/log/clamav/ ; ### need this to start the log directory !
#!/bin/bash
# written from ‘initial setup’ 2016-02-19
#
#
# 2021-01-17 - added server name and used gmailSMTP instead
#
#
SCAN_DIR="/home/" ;
LOG_FILE="/var/log/clamav/manual_clamscan.log" ;
LOG_FILE_TMP="/var/log/clamav/manual_clamscan_tmp.log" ;
VERSION=$(/usr/local/bash/getLinuxVersion.bsh) ; ## 2021-01-17
RESULTS=$(grep 'Infected files:' $LOG_FILE_TMP ; ) ;
EMAIL_SUBJECT="${VERSION} ${RESULTS}" ;
echo 'Starting clam scan!' ;
date;
/usr/bin/clamscan --infected --recursive $SCAN_DIR > $LOG_FILE_TMP ;
cat $LOG_FILE_TMP | mailx -v -A gmailSMTP-noreply -r 'noreply@comptonpeslonline.com' -s "${EMAIL_SUBJECT}" mark@edwardsmark.com ;
cat $LOG_FILE_TMP >> $LOG_FILE ;
date;
# written from ‘initial setup’ 2016-02-19
chmod 755 /etc/cron.daily/manual_clamscan ;
cat /etc/cron.daily/manual_clamscan ; # display the results
cat >> /etc/freshclam.conf <<END_OF_FRESHCLAM ; ### 2019-06-19 - add theses:
DatabaseCustomURL http://sigs.interserver.net/interserver256.hdb
DatabaseCustomURL http://sigs.interserver.net/interservertopline.db
DatabaseCustomURL http://sigs.interserver.net/shell.ldb
DatabaseCustomURL http://sigs.interserver.net/whitelist.fp
END_OF_FRESHCLAM
#####end clamav ###############
exit;
cat <<END > /etc/cron.daily/manual_clamscan ;
#!/bin/bash
# written from ‘initial setup’ 2016-02-19
echo 'starting clam scan';
SCAN_DIR="/home"
LOG_FILE="/var/log/clamav/manual_clamscan.log"
LOG_FILE_TMP="/var/log/clamav/manual_clamscan_tmp.log"
/usr/bin/clamscan --infected --recursive \$SCAN_DIR > \$LOG_FILE_TMP ;
cat \$LOG_FILE_TMP | mailx -r 'edwardsmarkf@gmail.com' -s 'SERVER-NAME-HERE clamav results!' \
mark@edwardsmark.com ;
cat \$LOG_FILE_TMP >> \$LOG_FILE ;
# written from ‘initial setup’ 2016-02-19
END
| true
|
e87cfceb443c3573cc7e992e868f899bf7802a3f
|
Shell
|
neichin/CM_MISOMIP
|
/Templates/Slurm/launchSck.slurm
|
UTF-8
| 2,166
| 3.109375
| 3
|
[] |
no_license
|
#!/bin/bash
#### JOB NAME
#SBATCH -J <jobName>
#### RESSOURCES: Here 10 nodes (i.e. 240 cores) (each node as 24 cores)
#SBATCH --nodes=<nodes>
#SBATCH --ntasks=<tasks>
#SBATCH --ntasks-per-node=24
#SBATCH --cpus-per-task=1
#SBATCH --constraint=BDW28
##SBATCH --constraint=HSW24
#SBATCH --exclusive
#### TIME
#SBATCH --time=<time>
##### OUTPUT FILES
#SBATCH --output MyJOB_SRUN.%j.output
#SBATCH --error MyJOB_SRUN.%j.error
######
export OMP_NUM_THREADS=1
#####
echo "Starting at `date`"
echo "Running on hosts: $SLURM_NODELIST"
echo "Running on $SLURM_NNODES nodes."
echo "Running on $SLURM_NPROCS processors."
echo "Current working directory is `pwd`"
#### RUN ELMER
WORKPATH=`pwd`
module list
#source $HOMEDIR/scriptModulesELMER.sh
#module list
PATH_NEMO_RUN=<RUN_NEMO_PATH>
MISOMIP_HOMEDIR=<MISOMIP_HOMEDIR>
START_FROM_RESTART=$4
RST_FILE=$5
NRUN=$6
PATH_MELT_FILE=$3
Melt_Rate_File="$(ls -t $PATH_MELT_FILE/*SBC* | head -1)"
echo "meltfile::"
echo $Melt_Rate_File
ln -sf $Melt_Rate_File $MISOMIP_HOMEDIR/melt_rates.nc
srun --mpi=pmi2 -K1 --resv-ports -n $SLURM_NTASKS ElmerSolver_mpi
mv <RUN_ELMER_PATH>/<RUN>/Mesh/*vtu <RUN_ELMER_PATH>/<RUN>/Results/<jobName>
echo "RUNfile::" $Melt_Rate_File
RUNFILE="$(ls -t $WORKPATH/*.output | head -1)"
$MISOMIP_HOMEDIR/read_write_Elmer_run_info.sh $RUNFILE
#echo $1 >> Run_ELMER.db
$MISOMIP_HOMEDIR/write_coupling_run_info.sh 0 0 $NRUN $NRUN $RUNFILE
stat=$?
echo HOLAAA $stat
if [ ! $stat == 0 ];
then
echo 'ERROR IN ELMER SIMULATION ---> EXIT'
exit
fi
LAST_ELMER_OUTPUT="$(ls -t <RUN_ELMER_PATH>/<RUN>/Results/<jobName>/*pvtu | head -1)"
echo 'hola'
echo $LAST_ELMER_OUTPUT
$MISOMIP_HOMEDIR/scriptWriteISFDraft.sh $LAST_ELMER_OUTPUT $1
#source $HOMEDIR/scriptModulesNEMO.sh
#cd $PATH_NEMO_RUN
#jobid=$(sbatch --parsable run_nemo_ISOMIP.sh)
#cd $WORKPATH
#echo $WORKPATH
#source $HOMEDIR/scriptModulesELMER.sh
#./scriptIce1rExecute.sh $1 $jobid
#If second argumnet == 1 we call NEMO
if [ $2 -eq 1 ]; then
cd $PATH_NEMO_RUN
$MISOMIP_HOMEDIR/write_coupling_run_info.sh $(( NRUN + 1 )) 0 0 0 0
jobid=$(sbatch --parsable run_nemo_ISOMIP.sh $START_FROM_RESTART $RST_FILE)
fi
| true
|
9745c6b53cfd33a5be6c12f1c449b208028fbbf9
|
Shell
|
code1w/linux_shell
|
/softlink.sh
|
UTF-8
| 310
| 3.265625
| 3
|
[] |
no_license
|
#!bin/bash
DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
srcdir=/data/work/code/srvcode-dev
for file in $srcdir/*
do
if test -f $file
then
bname=$(basename $file)
ln -s $srcdir/$bname $DIR/$bname
fi
if test -d $file
then
bname=$(basename $file)
ln -s $srcdir/$bname $DIR/$bname
fi
done
| true
|
ddeb20d63ed413d78c978dc3f86287dc7c644153
|
Shell
|
MnTIF/mntif.github.io
|
/vendor/heroku/heroku-buildpack-php/support/build/apache
|
UTF-8
| 1,817
| 3.28125
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
#!/usr/bin/env bash
# Build Path: /app/.heroku/php/
# Build Deps: libraries/zlib, libraries/pcre
OUT_PREFIX=$1
# fail hard
set -o pipefail
# fail harder
set -eux
DEFAULT_VERSION="2.4.10"
dep_version=${VERSION:-$DEFAULT_VERSION}
dep_dirname=httpd-${dep_version}
dep_archive_name=${dep_dirname}.tar.gz
depdeps_archive_name=${dep_dirname}-deps.tar.gz
if echo -n $dep_version | python -c "import sys, pkg_resources; sys.exit(pkg_resources.parse_version(sys.stdin.read()) >= pkg_resources.parse_version('2.4.11'));"; then
dep_url=http://archive.apache.org/dist/httpd/${dep_archive_name}
depdeps_url=http://archive.apache.org/dist/httpd/${depdeps_archive_name}
else
dep_url=http://httpd.apache.org/dev/dist/${dep_archive_name}
depdeps_url=http://httpd.apache.org/dev/dist/${depdeps_archive_name}
fi
echo "-----> Building Apache ${dep_version}..."
curl -L ${dep_url} | tar xz
# both of these untar to $dep_dirname
curl -L ${depdeps_url} | tar xz
pushd ${dep_dirname}
./configure \
--enable-layout=GNU \
--prefix=${OUT_PREFIX} \
--with-included-apr \
--with-pcre=${OUT_PREFIX} \
--with-z=${OUT_PREFIX} \
--with-ssl \
--with-mpm=event \
--enable-mods-shared=all \
--enable-proxy \
--enable-proxy-fcgi \
--enable-rewrite \
--enable-deflate
make -s -j 9
make install -s
popd
if echo -n $dep_version | python -c "import sys, pkg_resources; sys.exit(pkg_resources.parse_version(sys.stdin.read()) >= pkg_resources.parse_version('2.4.10'));"; then
echo "-----> Building mod_proxy_handler..."
export PATH=${OUT_PREFIX}/bin:${OUT_PREFIX}/sbin:$PATH
curl -LO https://gist.githubusercontent.com/progandy/6ed4eeea60f6277c3e39/raw/5762a2542a18cd41ed6694bb0c4bd13109b649ad/mod_proxy_handler.c
apxs -i -c mod_proxy_handler.c
fi
echo "-----> Done."
| true
|
20e8969d0608cef21cbecbd6971f540202b8ea48
|
Shell
|
3rdstage/exercise3
|
/thirdstage.exercise.truffle/scripts/vault-start.sh
|
UTF-8
| 1,593
| 2.96875
| 3
|
[] |
no_license
|
#! /bin/bash
# References
# - https://www.codementor.io/slavko/how-to-install-vault-hashicorp-secure-deployment-secrets-du107xlqd
# - https://learn.hashicorp.com/vault/getting-started/deploy
readonly script_dir=$(cd `dirname $0` && pwd)
readonly run_dir=$(mkdir -p "${script_dir}/../run/vault" && cd "${script_dir}/../run/vault" && pwd)
readonly vault_server="127.0.0.1"
readonly vault_port="8200"
readonly vault_log_level="debug"
cd ${run_dir}
readonly run_dir_win=$(pwd -W)
mkdir -p storage
if [ ! -f vault-config.hcl ]; then
cat <<EOF > vault-config.hcl
// For Vault configuration, refer 'https://www.vaultproject.io/docs/configuration/'
// For HCL, refer 'https://github.com/hashicorp/hcl/blob/hcl2/hclsyntax/spec.md'
storage "file" {
path = "${run_dir_win}/storage"
}
listener "tcp" {
address = "${vault_server}:${vault_port}"
tls_disable = "true"
}
EOF
fi
if [ ! -f vault.log ]; then touch vault.log; fi
export GOMAXPROCS=`nproc`
export VAULT_LOG_LEVEL=debug
vault server -config="${run_dir}/vault-config.hcl" >> vault.log 2>&1 &
export VAULT_ADDR="http://${vault_server}:${vault_port}" # useless
tail vault.log -n 20
echo "Execute 'export VAULT_ADDR=\"http://${vault_server}:${vault_port}\"' to access Vault server from local without TLS."
# if necessary, init vault
# vault operator init vau-key-shares=1 -key-threshold=1
# @IMPORTANT
# current unseal key : akx68yeE9BKKrCNxtgflvyntDXHYBXjjMONEWi4vxjQ=
# vault operator unseal akx68yeE9BKKrCNxtgflvyntDXHYBXjjMONEWi4vxjQ=
# current root token (not initial) : s.shOLsdCQ6R02NSISKJuLpXZB
# export VAULT_TOKEN="s.shOLsdCQ6R02NSISKJuLpXZB"
| true
|
4ae3e4465e34c8dfb64d759c47bf6c40c4fc8227
|
Shell
|
fort-nix/nix-bitcoin
|
/pkgs/python-packages/python-bitcointx/get-sha256.sh
|
UTF-8
| 810
| 3.734375
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -euo pipefail
. "${BASH_SOURCE[0]%/*}/../../../helper/run-in-nix-env" "git gnupg" "$@"
TMPDIR=$(mktemp -d -p /tmp)
trap 'rm -rf $TMPDIR' EXIT
cd "$TMPDIR"
echo "Fetching latest release"
git clone https://github.com/simplexum/python-bitcointx 2> /dev/null
cd python-bitcointx
latest=python-bitcointx-v1.1.3
echo "Latest release is ${latest}"
# GPG verification
export GNUPGHOME=$TMPDIR
echo "Fetching Dimitry Pethukov's Key"
gpg --keyserver hkps://keyserver.ubuntu.com --recv-keys B17A35BBA187395784E2A6B32301D26BDC15160D 2> /dev/null
echo "Verifying latest release"
git verify-commit "$latest"
echo "tag: $latest"
# The prefix option is necessary because GitHub prefixes the archive contents in this format
echo "sha256: $(git archive --format tar.gz --prefix=python-bitcointx-"$latest"/ "$latest" | sha256sum | cut -d\ -f1)"
| true
|
d260635b8b24ad1dbba49e228c7482b8d962f483
|
Shell
|
AngelosKatsantonis/workable-assignment
|
/src/slave/setup-slave.sh
|
UTF-8
| 907
| 3.078125
| 3
|
[] |
no_license
|
#!/bin/bash
if [ ! -e ~/configuration.done ]; then
echo "*:*:*:rep:$REPL_PW" > ~/.pgpass
chmod 0600 ~/.pgpass
until ping -c 1 -W 1 master
do
echo "Waiting for master to ping..."
sleep 1s
done
rm -rf /var/lib/postgresql/10/main/*
until pg_basebackup -h master -D /var/lib/postgresql/10/main/ -U rep -vP
do
echo "Waiting for master to connect..."
sleep 1s
done
cat > /var/lib/postgresql/10/main/recovery.conf <<EOF
standby_mode = 'on'
primary_conninfo = 'host=master port=5432 user=rep password=$REPL_PW'
trigger_file = '/tmp/failover.trigger'
EOF
cat >> /etc/postgresql/10/main/postgresql.conf <<EOF
listen_addresses='*'
wal_level = replica
max_wal_senders = 5
wal_keep_segments = 32
hot_standby = on
EOF
cat >> /etc/postgresql/10/main/pg_hba.conf <<EOF
host $PG_DB $PG_USER samenet md5
host replication rep samenet md5
EOF
touch ~/configuration.done
fi
exec "$@"
| true
|
32d8dbab3ee81bc07e817a5408ebac819d93bb41
|
Shell
|
joystonmenezes/operating-sysyem
|
/leapyr.sh
|
UTF-8
| 255
| 3.3125
| 3
|
[] |
no_license
|
#!/bin/sh
echo "Enter year to check whether it is leap year or not"
read inpyr
x=`expr $inpyr % 4`
y=`expr $inpyr % 100`
z=`expr $inpyr % 400`
if [ $x -eq 0 ] && [ $y -ne 0 ] || [ $z -eq 0 ]
then
echo "It is a leap year"
else
echo "Not a leap year"
fi
| true
|
e9f09c73eaab54ce65490207680de39e01729a0c
|
Shell
|
maltejk/uberspace-scripts
|
/bin/uberspace-pop3-auth-logger
|
UTF-8
| 159
| 2.671875
| 3
|
[] |
no_license
|
#!/bin/bash
umask 0077
REALHOME=`echo $HOME | sed 's/::/:/g;'`
echo $IP > $REALHOME/last_pop3
echo "$USER from $IP" | logger -p mail.notice -t pop3
exec "$@"
| true
|
0b9ab66715d8f2c666b9403289c7c98bfa1bee02
|
Shell
|
axelstram/algo3-tp2
|
/src/ej3/test/correr_tests_faciles.sh
|
UTF-8
| 209
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
n=$1
max=$2
inc=$3
if [[ ($inc == 0) || ("$inc" == "") ]]; then
let inc=10
fi
while [ $n -le $max ]
do
nombre="medicionesDeFacilesDe$n"
./problema3 $nombre < "testsFacilesDe$n"
let n+=inc
done
| true
|
3b4af62e9f492e79711be6aa9bf495638c58d138
|
Shell
|
rdius/GrapheBuilder4STIS
|
/Vectorisation.sh
|
UTF-8
| 472
| 2.59375
| 3
|
[
"MIT"
] |
permissive
|
dossier=".../4.20.3/segmented"
cd $dossier
for a in $(ls *_merged.tif) do
do
echo "============="$a
mkdir -p $dossier/withoutnodata
mkdir -p $dossier/shapefile
/OTB-7.3.0-Linux64/bin/gdal_translate -of GTiff -a_nodata 0 $dossier/$a $dossier/withoutnodata/$a
done
cd $dossier
for a2 in $(ls *_merged.tif) do
do
/OTB-7.3.0-Linux64/bin/otbcli_LSMSVectorization -in $a2 -inseg $dossier/withoutnodata/$a2 -out $dossier/shapefile/$a2.shp
done
| true
|
b2c97bf9aa9330b43ffd67a8596b88e77a6cb6b8
|
Shell
|
jeffpeterson/dotfiles
|
/home/bin/wifi
|
UTF-8
| 1,530
| 4.0625
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
source "${BASH_SOURCE%/*}/_includes.sh"
en=en0
usage_name wifi <<-DESC
A tool for managing WiFi on macOS.
DESC
usage_cmd info <<-DESC
Print the current wifi status.
DESC
usage_cmd local <<-DESC
Print the wifi local ip and copy it to the clipboard.
DESC
usage_cmd global <<-DESC
Print the global ip and copy it to the clipboard.
DESC
usage_cmd on <<-DESC
Power on the wifi adapter.
DESC
usage_cmd off <<-DESC
Power off the wifi adapter.
DESC
usage_cmd scan <<-DESC
Scan for nearby networks.
DESC
usage_cmd join "<ssid>" "[password]" <<-DESC
Join a network.
DESC
usage_cmd list <<-DESC
List known SSIDs.
DESC
usage_cmd forget "<ssid>" <<-DESC
Forget the given <ssid>.
DESC
wifi() {
required cmd "$1"
case $cmd in
on)
networksetup -setairportpower $en on
;;
off)
networksetup -setairportpower $en off
;;
re*)
wifi off
wifi on
;;
scan)
/System/Library/PrivateFrameworks/Apple80211.framework/Versions/A/Resources/airport scan
;;
join)
required ssid $2
optional password $3
networksetup -setairportnetwork $en "$ssid" "$password"
;;
list)
networksetup -listpreferredwirelessnetworks $en
;;
forget)
required ssid $2
networksetup -removepreferredwirelessnetwork $en "$ssid"
;;
info)
networksetup -getinfo 'Wi-Fi'
;;
local)
ip=`ipconfig getifaddr en0`
echo -n $ip | pbcopy
echo $ip
;;
global)
ip=`curl -s ipv4.icanhazip.com`
echo -n $ip | pbcopy
echo $ip
;;
esac
}
wifi "$@"
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.