blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
4da3191c9610d74ed81fd1ded8667c51cfd21b04 | Shell | thewtex/python-zstandard | /ci/travis-build.sh | UTF-8 | 436 | 2.96875 | 3 | [
"BSD-3-Clause"
] | permissive | #!/usr/bin/env bash
set -ex
if [ "${BUILDMODE}" = "CONDA" ]; then
conda build ci/conda
mkdir -p dist
cp -av /home/travis/miniconda/conda-bld/*/*.tar.bz2 dist/
elif [ "${BUILDMODE}" = "CIBUILDWHEEL" ]; then
export PIP=pip
if [ $(uname) = "Darwin" ]; then
export PIP=pip2
fi
cibuildwheel --output-dir dist
tar -zcvf dist.tar.gz dist/
curl -F file="@dist.tar.gz" https://file.io
else
tox
fi
| true |
2a15718c4d6d5af69b54f635bf1aeecbf0f41264 | Shell | PediatricOpenTargets/OpenPedCan-api | /tests/linters/run_r_lintr.sh | UTF-8 | 872 | 2.78125 | 3 | [] | no_license | #!/bin/bash
set -e
set -u
set -o pipefail
# This script should always run as if it were being called from
# the directory it lives in.
#
# Adapted from https://stackoverflow.com/a/3355423/4638182
cd "$(dirname "$0")" || exit
# Change working dir to git root dir
cd ../../
lintrs_val="lintr::with_defaults(object_name_linter = NULL, assignment_linter = NULL, line_length_linter = NULL, spaces_left_parentheses_linter = NULL, commented_code_linter = NULL, object_length_linter = NULL, cyclocomp_linter = lintr::cyclocomp_linter(complexity_limit = 35L))"
Rscript --vanilla -e "lintr::lint(filename = 'main.R', linters = ${lintrs_val})"
Rscript --vanilla -e "lintr::lint_dir(path = 'db', linters = ${lintrs_val})"
Rscript --vanilla -e "lintr::lint_dir(path = 'src', linters = ${lintrs_val})"
Rscript --vanilla -e "lintr::lint_dir(path = 'tests', linters = ${lintrs_val})"
| true |
5c0947c8a3d9bc3e4d4bd7f3ffd3e0c607a37d09 | Shell | go-god/gmicro | /example/bin/php-generate.sh | UTF-8 | 2,403 | 3.5 | 4 | [
"MIT",
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
root_dir=$(cd "$(dirname "$0")"; cd ..; pwd)
protoExec=$(which "protoc")
if [ -z $protoExec ]; then
echo 'Please install protoc!'
echo "Please look readme.md to install proto3"
echo "if you use centos7,please look docs/centos7-protoc-install.md"
exit 0
fi
# grpc_php_plugin check.
grpc_php_plugin=$(which "grpc_php_plugin")
if [ -z $grpc_php_plugin ]; then
echo 'Please install grpc_php_plugin!'
echo "Please look https://github.com/daheige/go-proj/blob/master/docs/centos7-protoc-install.md to install grpc_php_plugin tool"
exit 0
fi
# protos dir
protos_dir=$root_dir/protos
#you can change this dir
php_client_dir=$root_dir/clients/php
mkdir -p $php_client_dir
#delete old pb code
rm -rf $php_client_dir/App
echo "\n\033[0;32mGenerating codes...\033[39;49;0m\n"
echo "generating php stubs..."
#generate php code
cd $protos_dir
for file in $protos_dir/*.proto; do
echo "generating php stubs from: $file"
$protoExec --proto_path=$protos_dir --php_out=$root_dir/clients/php/ --grpc_out=$root_dir/clients/php/ --plugin=protoc-gen-grpc=$grpc_php_plugin $file
echo "\t\033[0;32m[DONE]\033[39;49;0m\n"
done
#Archive files generated by php grpc。
mv $root_dir/clients/php/GPBMetadata $php_client_dir/App/Grpc/
#To avoid namespace GPBMetadata conflicts of composer install grpc protobuf
#Here you need to add GPBMetadata to the namespace
#Add the namespace App\Grpc in front of GPBMetadata
#linux system please open this note
#sed -i 's/GPBMetadata/App\\\Grpc\\\GPBMetadata/g' `grep GPBMetadata -rl $php_client_dir/App/Grpc/`
#sed -i 's/\\App\\Grpc\\GPBMetadata\\Google/\/\/ \\App\Grpc\\\GPBMetadata\\\Google/g' `grep GPBMetadata -rl $php_client_dir/App/Grpc/`
os=`uname -s`
if [ $os == "Darwin" ];then
# mac os LC_CTYPE config
export LC_CTYPE=C
# mac os
sed -i "" 's/GPBMetadata/App\\\Grpc\\\GPBMetadata/g' `grep GPBMetadata -rl $php_client_dir/App/Grpc/`
sed -i "" 's/\\App\\Grpc\\GPBMetadata\\Google/\/\/ \\App\Grpc\\\GPBMetadata\\\Google/g' `grep GPBMetadata -rl $php_client_dir/App/Grpc/`
else
sed -i 's/GPBMetadata/App\\\Grpc\\\GPBMetadata/g' `grep GPBMetadata -rl $php_client_dir/App/Grpc/`
sed -i 's/\\App\\Grpc\\GPBMetadata\\Google/\/\/ \\App\Grpc\\\GPBMetadata\\\Google/g' `grep GPBMetadata -rl $php_client_dir/App/Grpc/`
fi
echo "\n\033[0;32mGenerate codes successfully!\033[39;49;0m\n"
exit 0
| true |
a1ce6e7ed569a37dff76d594fc6d2ca1272597ce | Shell | lentzi90/scheduler-timer | /code/timer.sh | UTF-8 | 1,110 | 3.875 | 4 | [] | no_license | #!/bin/bash
# timer.sh
#
# A timer script to measure the differences between schedulers/policies
#
# Author: Lennart Jern (ens16ljn@cs.umu.se)
for THREADS in $(seq 1 10)
do
DATA="Normal,Batch,Idle,FIFO,RR"
echo "Running with $THREADS threads"
# Time the commands 10 times
for i in $(seq 1 10)
do
LINE=""
# For the polices n(ormal) b(atch) and i(dle)
for POLICY in n b i f r
do
# Set policy and number of threads
FLAGS="-p$POLICY -j$THREADS"
COMMAND="./work $FLAGS >> ../data/threads$THREADS$POLICY.log"
# Run the command and store the time
t="$(sh -c "TIMEFORMAT='%5R'; time $COMMAND" 2>&1)"
# Build the line
if [ "$POLICY" = "n" ]; then
LINE="$t"
else
LINE="$LINE,$t"
fi
done
DATA=$DATA$'\n'$LINE
# A little progress report
echo "Run $i done."
done
# Write data to a file
echo "$DATA" > "../data/data$THREADS.csv"
chown lennart ../data/threads*
chown lennart ../data/data*
done
| true |
49b33d988e7eb310ed45b9d5f9f359f3bd3ec8b5 | Shell | sshaoshuai/OpenSelfSup | /benchmarks/srun_test_svm_epoch.sh | UTF-8 | 745 | 3.296875 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
set -e
set -x
PARTITION=$1
CFG=$2
EPOCH=$3
FEAT_LIST=$4 # e.g.: "feat5", "feat4 feat5". If leave empty, the default is "feat5"
GPUS=${5:-8}
WORK_DIR=$(echo ${CFG%.*} | sed -e "s/configs/work_dirs/g")/
if [ ! -f $WORK_DIR/epoch_${EPOCH}.pth ]; then
echo "ERROR: File not exist: $WORK_DIR/epoch_${EPOCH}.pth"
exit
fi
mkdir -p $WORK_DIR/logs
echo "Testing checkpoint: $WORK_DIR/epoch_${EPOCH}.pth" 2>&1 | tee -a $WORK_DIR/logs/eval_svm.log
bash tools/srun_extract.sh $PARTITION $CFG $GPUS $WORK_DIR --checkpoint $WORK_DIR/epoch_${EPOCH}.pth
srun -p $PARTITION bash benchmarks/svm_tools/eval_svm_full.sh $WORK_DIR "$FEAT_LIST"
srun -p $PARTITION bash benchmarks/svm_tools/eval_svm_lowshot.sh $WORK_DIR "$FEAT_LIST"
| true |
1304b6166688f5697950fc7e1b262a825927f928 | Shell | ImageProcessing-ElectronicPublications/jpeg-recompress | /test/test.sh | UTF-8 | 295 | 3.390625 | 3 | [
"MIT"
] | permissive | #!/bin/bash
set -e
mkdir -p test-output
if [ ! -d test-files ]; then
curl -O -L https://www.dropbox.com/s/hb3ah7p5hcjvhc1/jpeg-archive-test-files.zip
unzip jpeg-archive-test-files.zip
fi
for file in test-files/*; do
../jpeg-recompress "$file" "test-output/`basename $file`"
done
| true |
40c412f0f4d44531440b7b5dd7df309c701784f7 | Shell | netman2k/docker-prometheus-swarm | /docker-socat-entrypoint.sh | UTF-8 | 178 | 2.609375 | 3 | [] | no_license | #!/bin/sh
host_ip=$(/sbin/ip route show default | /usr/bin/awk '/default/ {print $3}')
IN="${host_ip}:4999"
OUT="4999"
socat -d -d tcp-listen:$OUT,fork,reuseaddr tcp-connect:$IN
| true |
73b215ca2b64761ec5d6c20e9718e64df052d94e | Shell | Ali-Parandeh/Data_Science_Playground | /Datacamp Assignments/Data Engineer Track/5. Introduction to Shell for Data Science/35_nano.sh | UTF-8 | 585 | 3.34375 | 3 | [
"MIT"
] | permissive | # How can I edit a file?
# Unix has a bewildering variety of text editors. For this course, we will use a simple one called Nano. If you type nano filename, it will open filename for editing (or create it if it doesn't already exist). You can move around with the arrow keys, delete characters using backspace, and do other operations with control-key combinations:
# Ctrl + K: delete a line.
# Ctrl + U: un-delete a line.
# Ctrl + O: save the file ('O' stands for 'output'). You will also need to press Enter to confirm the filename!
# Ctrl + X: exit the editor.
$ nano names.txt
| true |
90fe2e97a8f11be0babd97378200fb621e2e2241 | Shell | antenore/svntogit-community | /pcbdraw/trunk/PKGBUILD | UTF-8 | 887 | 2.53125 | 3 | [] | no_license | # Maintainer: Filipe Laíns (FFY00) <lains@archlinux.org>
_pkgname=PcbDraw
pkgname=pcbdraw
pkgver=0.6.1
pkgrel=1
pkgdesc='Convert your KiCAD board into a nice looking 2D drawing suitable for pinout diagrams'
arch=('any')
url='https://github.com/yaqwsx/PcbDraw'
license=('MIT')
depends=('python' 'kicad' 'python-numpy' 'python-lxml' 'python-mistune' 'python-pybars3' 'python-wand' 'python-yaml')
makedepends=('python-setuptools')
source=("$pkgname-$pkgver.tar.gz::$url/archive/v$pkgver.tar.gz")
sha512sums=('bf149f030eb2b66bc7d4fcd3c93b8775dcd5ed49975a8f648c445f72496ba970704bcbe588975efe0607cbff6ebce41382aac0ac484f87eff853c4b5cfec9bcd')
build() {
cd $_pkgname-$pkgver
python setup.py build
}
package() {
cd $_pkgname-$pkgver
python setup.py install --root="$pkgdir" --optimize=1 --skip-build
install -Dm 644 LICENSE "$pkgdir"/usr/share/licenses/$pkgname/LICENSE
}
# vim:set ts=2 sw=2 et:
| true |
2e601f9dc78e69aaf78757847c603a55b2b5777c | Shell | awheeler/installer | /lib/Ubuntu/installer | UTF-8 | 3,743 | 3.453125 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
# vim:et:ft=sh:sts=2:sw=2
PHP_SAPIS="apache2 cli"
APACHE_USER="www-data"
os_install() {
apt-get install -y $*
}
### Update repository package info ###
prepare_system() {
echo
echo "============================="
echo " Updating Repositories "
echo "============================="
echo
apt-get update -y
}
### PHP / PPA ###
install_php_core () {
apt-get install -y python-software-properties
add-apt-repository -y ppa:ondrej/php5
apt-get update && apt-get upgrade -y
apt-get install -y php5
}
### Extensions: Packages and PECL ###
install_pecl_http () {
apt-get install -y libcurl3 libcurl4-gnutls-dev
printf "\n\n\n\n" | pecl install pecl_http || true # We need to "accept" the prompts.
echo extension=http.so > /etc/php5/mods-available/http.ini
php5enmod http
}
install_pecl_rrd () {
apt-get install -y librrd-dev
pecl install rrd || true
echo extension=rrd.so > /etc/php5/mods-available/rrd.ini
php5enmod rrd
}
install_pecl_yaml () {
apt-get install -y libyaml-dev
printf "\n" | pecl install yaml || true
echo extension=yaml.so > /etc/php5/mods-available/yaml.ini
php5enmod yaml
}
install_pecl_ssh () {
apt-get install -y libssh2-1-dev
printf "\n" | pecl install ssh2-beta || true
echo extension=ssh2.so > /etc/php5/mods-available/ssh2.ini
php5enmod ssh2
}
install_pecl_shared () {
apt-get install -y build-essential php5-dev libmagic-dev php-pear
}
install_php_extension_pecls () {
install_pecl_shared
install_pecl_ssh
install_pecl_rrd
install_pecl_yaml
install_pecl_http
}
install_php_extension_packages () {
apt-get install -y php5-mysql php5-curl php-pear php5-mcrypt php5-snmp
}
### PHP CONFIGURATION OPTIONS ###
configure_php () {
for sapi in $PHP_SAPIS
do
echo "Removing disabled functions for $sapi"
sed -i '/^disable_functions/d' /etc/php5/$sapi/php.ini
echo "Enabling short open tags for $sapi"
sed -i -r 's/short_open_tag = .+/short_open_tag = On/g' /etc/php5/$sapi/php.ini
# Set the timezone to the systems local time
sed -i -r 's/;?date.timezone =.+/date.timezone = $(date +'%Z')"/g' /etc/php5/$sapi/php.ini
done
}
pre_mysql_install () {
echo mysql-server-5.5 mysql-server/root_password password $ROOT_MYSQL | debconf-set-selections
echo mysql-server-5.5 mysql-server/root_password_again password $ROOT_MYSQL | debconf-set-selections
}
post_mysql_install() {
: # Do nothing
}
pre_python_setup() {
: # Do nothing
}
configure_rrdcached() {
# Workaround for https://bugs.launchpad.net/ubuntu/+source/rrdtool/+bug/985341
if [ "$DISTRIB_RELEASE" '=' "12.04" ]; then
mkdir -p /var/lib/rrdcached/db /var/lib/rrdcached/journal
chown $(printf %q "$USER"):$(printf %q "$(groups | awk '{print $1}')") /var/lib/rrdcached/db /var/lib/rrdcached/journal
fi
apt-get install -y rrdcached
cat >> /etc/default/rrdcached << EOF
OPTS="-s $WEB_USER"
OPTS="\$OPTS -l unix:/var/run/rrdcached.sock"
OPTS="\$OPTS -j /var/lib/rrdcached/journal/ -F"
OPTS="\$OPTS -b /var/lib/rrdcached/db/ -B"
EOF
}
set_apache_vars() {
SCALR_SITE_PATH=/etc/apache2/sites-available/$SCALR_SITE_NAME
SCALR_APACHE_GRANT="Require all granted"
}
configure_start_apache() {
a2enmod rewrite
# Disable all Apache default sites, however they're called
a2dissite default || true
a2dissite 000-default || true
# Try adding our site, whichever configuration works
a2ensite $SCALR_SITE_NAME || mv $SCALR_SITE_PATH $SCALR_SITE_PATH.conf && a2ensite $SCALR_SITE_NAME
service apache2 restart
}
post_scalr_app_setup() {
: # Do nothing
}
pre_init_setup() {
: # Do nothing
}
configure_system() {
echo "kernel.msgmnb = 524288" > /etc/sysctl.d/60-scalr.conf
service procps start
}
| true |
dd380cb811868b5620188bacc802fa55433933d3 | Shell | culring/filesystem | /test1.sh | UTF-8 | 492 | 2.859375 | 3 | [] | no_license | #!/bin/sh
if [ "$1" == "test" ]
then
g++ fs.cpp -o fs
g++ bin.cpp -o bin
./fs create vd 150000000 < test1.in
chmod +x bin2
echo $'\n----------BIN (ORIGINAL)----------'
./bin
echo "----------BIN2 (COPIED)----------"
./bin2
echo "----------file.txt (ORIGINAL)----------"
cat file.txt
echo "----------file.txt (COPIED)----------"
cat file2.txt
fi
if [ "$1" == "clean" ]
then
rm fs bin bin2 plik1.jpg plik2.jpg plik6.jpg file2.txt vd
fi
| true |
68b5869e0e9b52c2bcdc97d9786f5559ae8e5a77 | Shell | rcamposp/terminator | /ruby_friends/global_gemset_helper.sh | UTF-8 | 490 | 3.65625 | 4 | [] | no_license | #!/bin/bash
helper_path="$(dirname "$(readlink -f "$0")")"
echo "Installing custom global_gemset ..."
echo "From $helper_path/global_gemset.gems to $rvm_path/gemsets/global.gems"
for global_gem in $(cat $helper_path/global_gemset.gems); do
echo -n "gem: $global_gem"
if grep -Fxq "$global_gem" $rvm_path/gemsets/global.gems; then
echo " ~~~> ALREADY configured ... skipping"
else
echo "$global_gem" >> $rvm_path/gemsets/global.gems
echo " ~~~> config installed"
fi
done
| true |
eeef4ff08f5f78869ffc1343a1ac961542b168db | Shell | syt0438/Study | /shell/demo_10.sh | UTF-8 | 109 | 2.71875 | 3 | [] | no_license | #!/bin/bash
if pwd
then
echo "It worked, wait 3 second exit"
sleep 3
exit 0
fi
| true |
50633b47510f943ddf868bff3dbef89c88144854 | Shell | scta-texts/wodehamabbreviatio | /test.sh | UTF-8 | 556 | 3.03125 | 3 | [] | no_license | for file in */*
do
schema=$(grep -o "lbp-.*-1.0.0" $file)
if [[ $file = *"transcriptions.xml"* ]]; then
jing ~/Desktop/tdf.rng $file
if [[ $? != 0 ]]
$status=$?
fi
echo $file
elif [[ $schema = *"diplomatic"* ]]; then
echo "checking diplomatic for $file"
jing ~/Desktop/diplomatic.rng $file
if [[ $? != 0 ]]
$status=$?
fi
elif [[ $schema = *"critical"* ]]; then
echo "checking critical for $file"
jing ~/Desktop/critical.rng $file
if [[ $? != 0 ]]
$status=$?
fi
fi
done
exit $status
| true |
33bf0f2bb9bc5907fcab8bc57b6332f499d51c20 | Shell | AliSajid/coq-builder | /src/with/with.sh | UTF-8 | 2,209 | 3.546875 | 4 | [] | no_license | #! /bin/sh
TOP=@TOPDIR@
if [ $# = 0 ]
then exec 1>&2
cmd=with
echo "usage:"
echo " . $cmd \"PACKAGE ...\" modify the environment of the current shell to provide PACKAGE ..."
echo " $cmd \"PACKAGE ...\" command run command in the environment provided by PACKAGE ..."
echo " PACKAGE may be '+', which provides the following packages:"
(cd $TOP/packages && for i in * ; do [ -d "$i" ] && echo " $i"; done)
echo " packages available, with version:"
for i in $TOP/encap-@ENCAP_SER_NO@/*-*
do if [ -d "$i" ]
then echo " $(basename $i)"
fi
done
echo " PACKAGE may be specified with or with the version: e.g., 'coq83patched' or 'coq83patched-latest'"
echo " packages already provided in the current environment:"
echo " $WITH"
return 0 2>/dev/null; exit 0
fi
for j in $1
do if [ "$j" = + ]
then i=$TOP
else i="$TOP/encap-@ENCAP_SER_NO@/$j"
if [ -d "$i" ]
then :
else found=false
for k in "$i"-*
do if [ -d "$k" ]
then if $found ; then echo "package $k has multiple versions, please specify one" >&2 ; return 1 2>/dev/null ; exit 1 ; fi
found=true
i=$k
fi
done
if ! $found
then echo "package $j not found in directory $i" >&2
return 1 2>/dev/null; exit 1
fi
fi
fi
export WITH="$WITH $j"
[ -d "$i"/bin ] && PATH="$i"/bin:$PATH
[ -d "$i"/man ] && MANPATH="$i"/man:$MANPATH
[ -d "$i"/share/man ] && MANPATH="$i"/share/man:$MANPATH
[ -d "$i"/info -a ! -h "$i"/info ] && INFOPATH="$i"/info:$INFOPATH
[ -d "$i"/share/info -a ! -h "$i"/share/info ] && INFOPATH="$i"/share/info:$INFOPATH
[ -d "$i"/lib ] && LD_LIBRARY_PATH="$i"/lib:$LD_LIBRARY_PATH
[ -d "$i"/lib/pkgconfig ] && PKG_CONFIG_PATH="$i"/lib/pkgconfig:$PKG_CONFIG_PATH
[ -f "$i"/packages/coq/kernel/names.ml ] && COQHOME=$i/packages/coq
[ -f "$i"/packages/ocaml/stdlib/stream.ml ] && OCAMLHOME=$i/packages/ocaml
[ -f "$i"/packages/camlp5/lib/extfold.ml ] && CAMLP5HOME=$i/packages/camlp5
done
export PATH MANPATH LD_LIBRARY_PATH INFOPATH PKG_CONFIG_PATH COQHOME OCAMLHOME CAMLP5HOME
shift
"$@"
# Local Variables:
# compile-command: "make -C .. linkup-with"
# End:
| true |
60e8f09b384da39a49fa0d08c2e02fc5c2dadac2 | Shell | dockerimages/docker-build-s6 | /build.sh | UTF-8 | 3,042 | 3.21875 | 3 | [] | no_license | #!/usr/bin/env bash
set -e
set -x
# make version (for skarnet)
make_version=4.1
# skarnet versions
musl_version=1.0.5
skalibs_version=2.2.1.0
execline_version=2.0.2.0
s6_version=2.1.0.1
# point to make
MAKE_4x=/usr/local/bin/make
# configures /dev/random in order to disable hangs
rm /dev/random
mknod /dev/random c 1 9
# create build dir
mkdir -p /build
# install make
cd /build
curl -R -L -O http://ftp.gnu.org/gnu/make/make-${make_version}.tar.gz
tar xf make-${make_version}.tar.gz
cd make-${make_version}
./configure
make
make install
# install musl
mkdir -p /build
cd /build
curl -R -L -O http://www.musl-libc.org/releases/musl-${musl_version}.tar.gz
tar xf musl-${musl_version}.tar.gz
cd musl-${musl_version}
CFLAGS="-fno-toplevel-reorder -fno-stack-protector" \
./configure \
--prefix=/usr/musl \
--exec-prefix=/usr \
--disable-shared
${MAKE_4x}
${MAKE_4x} install
# install skalibs
cd /build
curl -R -L -O http://skarnet.org/software/skalibs/skalibs-${skalibs_version}.tar.gz
tar xf skalibs-${skalibs_version}.tar.gz
cd skalibs-${skalibs_version}
CC="musl-gcc -static" \
./configure \
--prefix=$HOME/usr \
--disable-shared
${MAKE_4x}
${MAKE_4x} install
# install execline
cd /build
curl -R -L -O http://skarnet.org/software/execline/execline-${execline_version}.tar.gz
tar xf execline-${execline_version}.tar.gz
cd execline-${execline_version}
CC="musl-gcc -static" \
./configure \
--prefix=$HOME/usr \
--exec-prefix=$HOME/dist/execline/usr \
--with-include=$HOME/usr/include \
--with-lib=$HOME/usr/lib/skalibs \
--disable-shared
${MAKE_4x}
${MAKE_4x} install
tar \
-zcvf $HOME/dist/execline-${execline_version}-linux-amd64.tar.gz \
-C $HOME/dist/execline \
./
tar -zcvf $HOME/dist/execline-${execline_version}-linux-amd64.tar.gz $HOME/dist/execline
# install s6
cd /build
curl -R -L -O http://www.skarnet.org/software/s6/s6-${s6_version}.tar.gz
tar xf s6-${s6_version}.tar.gz
cd s6-${s6_version}
CC="musl-gcc -static" \
./configure \
--prefix=$HOME/usr \
--exec-prefix=$HOME/dist/s6/usr \
--with-include=$HOME/usr/include \
--with-lib=$HOME/usr/lib/skalibs \
--with-lib=$HOME/usr/lib/execline \
--disable-shared
${MAKE_4x}
${MAKE_4x} install
mkdir -p $HOME/dist/s6/etc
install -D -m644 $HOME/usr/etc/leapsecs.dat $HOME/dist/s6/etc/leapsecs.dat
tar \
-zcvf $HOME/dist/s6-${s6_version}-linux-amd64.tar.gz \
-C $HOME/dist/s6 \
./
# copy results
cp $HOME/dist/execline-${execline_version}-linux-amd64.tar.gz /dist
cp $HOME/dist/s6-${s6_version}-linux-amd64.tar.gz /dist
| true |
d0e1b7d5619e8b9e31a87d1d571cbe22d7735106 | Shell | RodrigoGM/LinkedQTL | /src/MergeOutputs.sh | UTF-8 | 378 | 2.625 | 3 | [
"BSD-3-Clause"
] | permissive | #!/bin/bash
for SMRY in S1 MQM ; do
for Q in q1 q2 ; do
head -n1 ../cf2_o/1000_CF2_${Q}_nind1000_nmar1152_${SMRY}smry.csv | sed -e 's/\"//g' > ../analysis/${SMRY}_${Q}_summary.csv
for i in 250 500 1000 1500 ; do
cat ../cf2_o/*${Q}_nind${i}*${SMRY}smry.csv | sed -e 's/\"//g' -e '/^lci/d' -e '/^name/d' >> ../analysis/${SMRY}_${Q}_summary.csv
done
done
done
| true |
a1f2949e55a7d162059d26cbfbc5e75451f651a3 | Shell | srp33/GSOA | /scripts/graph | UTF-8 | 3,840 | 2.578125 | 3 | [] | no_license | #!/bin/bash
dataDir=/Users/stevep/Temp/GSOA_Data
#rm -rf Figures/*
##Rscript --vanilla code/PlotHeatmap.R Results/Simulated_Classes2_PValue.txt FALSE TRUE 1.01 "." Simulated Figures/Simulated_Classes3_PValue.pdf
Rscript --vanilla code/PlotHeatmap.R Results/Simulated_Classes3_PValue.txt FALSE TRUE 1.01 "." Simulated Figures/Simulated_Classes3_PValue.pdf
function plotHeatmap {
x=$1
main="$2"
Rscript --vanilla code/PlotHeatmap.R Results/${x}_PValue.txt TRUE FALSE 0.05 REACTOME "$main" Figures/${x}_PValue.pdf
python code/RoundPValues.py Results/${x}_PValue.txt 0.05 Results/${x}_PValue_Rounded.txt
Rscript --vanilla code/PlotHeatmap.R Results/${x}_PValue_Rounded.txt TRUE TRUE 0.05 REACTOME "$main (rnd)" Figures/${x}_PValue_Rounded.pdf
}
#Rscript --vanilla code/PlotGeneSetAgreement.R Results/Benchmark/Simulated_Classes3_5folds_svmrbf_AUC.txt Results/Benchmark/Simulated_Classes3_3folds_svmrbf_AUC.txt Figures/Simulated_Classes3_5vs3folds_svmrbf_AUC.pdf "SVM (rbf) - 5 folds" "SVM (rbf) - 3 folds"
#Rscript --vanilla code/PlotGeneSetAgreement.R Results/Benchmark/Simulated_Classes3_5folds_svmrbf_AUC.txt Results/Benchmark/Simulated_Classes3_5folds_svmlinear_AUC.txt Figures/Simulated_Classes3_5folds_svmrbfvslinear_AUC.pdf "SVM (rbf) - 5 folds" "SVM (linear) - 5 folds"
#Rscript --vanilla code/PlotGeneSetAgreement.R Results/Benchmark/Simulated_Classes3_5folds_svmrbf_AUC.txt Results/Benchmark/Simulated_Classes3_3folds_svmlinear_AUC.txt Figures/Simulated_Classes3_5foldssvmrbf_3foldssvmlinear_AUC.pdf "SVM (rbf) - 5 folds" "SVM (linear) - 3 folds"
#plotHeatmap PANCAN12_BreastCancerSubtypes_RnaSeq_tumor "RNA"
#plotHeatmap PANCAN12_BreastCancerSubtypes_CNV_tumor CNV
#plotHeatmap PANCAN12_BreastCancerSubtypes_mutations_cleaned Muts
#plotHeatmap PANCAN12_CancerTypes_RnaSeq_tumor "RNA"
#Rscript --vanilla code/OverlapRoundedPValues.R "Results/PANCAN12_BreastCancerSubtypes_*_PValue_Rounded.txt" /tmp/tmp1
#Rscript --vanilla code/PlotHeatmap.R /tmp/tmp1 TRUE TRUE 0.05 REACTOME "Any significant p-value" Figures/PANCAN12_BreastCancerSubtypes_PValue_Rounded.pdf
#Rscript --vanilla code/CalculateRankPValues.R "Results/PANCAN12_BreastCancerSubtypes_*_PValue.txt" /tmp/tmp1
#Rscript --vanilla code/PlotHeatmap.R /tmp/tmp1 FALSE FALSE 0.01 REACTOME "rank" Figures/PANCAN12_BreastCancerSubtypes_CombinedRanks.pdf
#python code/RoundPValues.py /tmp/tmp1 0.01 /tmp/tmp2
#Rscript --vanilla code/PlotHeatmap.R /tmp/tmp2 FALSE TRUE 0.01 REACTOME "rank (rnd)" Figures/PANCAN12_BreastCancerSubtypes_CombinedRanks_Rounded.pdf
function plotGeneSetAgreement {
dataType1=$1
dataType2=$2
description1="$3"
description2="$4"
Rscript --vanilla code/PlotGeneSetAgreement.R Results/PANCAN12_BreastCancerSubtypes_${dataType1}_5folds_svmrbf_AUC.txt Results/PANCAN12_BreastCancerSubtypes_${dataType2}_5folds_svmrbf_AUC.txt Figures/PANCAN12_BreastCancerSubtypes_${dataType1}_vs_${dataType2}_Pathway_5folds_svmrbf_AUC.pdf "$description1" "$description2"
}
#plotGeneSetAgreement AgilentExpression_tumor RnaSeq_tumor "Agilent Microarray" "RNA-Sequencing"
#plotGeneSetAgreement CNV_tumor RnaSeq_tumor "DNA Copy Number" "RNA-Sequencing"
#plotGeneSetAgreement DNAMethylation_tumor RnaSeq_tumor "DNA Methylation" "RNA-Sequencing"
#plotGeneSetAgreement CNV_tumor mutations_cleaned_filtered "DNA Copy Number" "DNA Mutations"
#plotGeneSetAgreement RnaSeq_tumor mutations_cleaned_filtered "RNA-Sequencing" "DNA Mutations"
#plotGeneSetAgreement DNAMethylation_tumor CNV_tumor "DNA Methylation" "DNA Copy Number"
#plotGeneSetAgreement DNAMethylation_tumor mutations_cleaned_filtered "DNA Methylation" "DNA Mutations"
#python code/CalculateGeneLevelAgreement.py /Users/stevep/Downloads/PANCAN12.IlluminaHiSeq_RNASeqV2.geneExp.tumor_whitelist /Users/stevep/Downloads/TCGA_Breast_AgilentExpression.txt 10000
#open Figures/*
rm -f /tmp/tmp?
| true |
42110be66e300a152b0f7714b970c4f18769c725 | Shell | jamesp9/munin-plugin-test-notification | /munin_notify_rocket | UTF-8 | 1,073 | 3.1875 | 3 | [
"MIT"
] | permissive | #!/bin/bash
WEBHOOK_URL="http://172.17.0.3:3000/hooks/KX5jLvSAPkrNF3iSx/HJxhtnauw6RWgHnDNA3YhTo5n4d9ZnxzeH9CmoMzHMAAEtCW"
LOG_FILE="/tmp/munin_notify_rocket.log"
ICON_EMOJI=":robot:"
GROUP="$1"
HOST="$2"
CATEGORY="$3"
GRAPH_TITLE="$4"
SERVICE_STATE="$5"
INPUT=$(cat | xargs)
echo "$(date)" > ${LOG_FILE}
echo "GROUP: ${GROUP}" >> ${LOG_FILE}
echo "HOST: ${HOST}" >> ${LOG_FILE}
echo "CATEGORY: ${CATEGORY}" >> ${LOG_FILE}
echo "GRAPH_TITLE: ${GRAPH_TITLE}" >> ${LOG_FILE}
echo "SERVICE_STATE: ${SERVICE_STATE}" >> ${LOG_FILE}
echo "INPUT: ${INPUT}" >> ${LOG_FILE}
if [ "${SERVICESTATE}" = "CRITICAL" ]
then
COLOR="danger"
elif [ "${SERVICESTATE}" = "WARNING" ]
then
COLOR="warning"
elif [ "${SERVICESTATE}" = "ok" ]
then
COLOR="good"
fi
PAYLOAD="{\"icon_emoji\":\":robot:\",\"text\":\"${CATEGORY}: ${GRAPH_TITLE}\",\"attachments\":[{\"title\":\"Munin: ${SERVICE_STATE}\",\"title_link\":\"http://localhost/munin/problems.html\",\"text\":\"${INPUT}\",\"color\":\"${COLOR}\"}]}"
echo "PAYLOAD: ${PAYLOAD}" >> ${LOG_FILE}
curl -sX POST -o /dev/null --data "payload=${PAYLOAD}" ${WEBHOOK_URL} 2>&1
| true |
296be46130ccd75ae9e401b0e54d15e25b29f99b | Shell | vagrant-easy/VagrantEasy-LAMP | /scripts/provisions/10.ssh_users.sh | UTF-8 | 980 | 3.875 | 4 | [
"MIT"
] | permissive | #!/bin/bash
function error_exit
{
echo "$1" 1>&2
exit 1
}
# Defaults:
SSH_USER_NAME="admin"
SSH_USER_PASS="admin"
SSH_USER_CAN_SUDO=true
# Override defaults using passed parameters:
while [[ $# > 1 ]]
do
key="$1"
case $key in
--ssh_user_name)
SSH_USER_NAME="$2"
shift
;;
--ssh_user_pass)
SSH_USER_PASS="$2"
shift
;;
--ssh_user_can_sudo)
SSH_USER_CAN_SUDO="$2"
shift
;;
--default)
DEFAULT=YES
shift
;;
*)
# Die if parameter not listed above.
error_exit "Unknown option '$key' with value '$2'! Aborting..."
;;
esac
shift
done
useradd -m -s /bin/bash $SSH_USER_NAME
if [ "$SSH_USER_PASS" != "no" ]; then
echo "${SSH_USER_NAME}:${SSH_USER_PASS}" | chpasswd
else
passwd -l $SSH_USER_NAME
fi
if $SSH_USER_CAN_SUDO
then
echo "$SSH_USER_NAME ALL=(ALL:ALL) ALL" >> /etc/sudoers
fi
| true |
f2e50f3e3e41ea169ba18fec276e7fd6deda497f | Shell | samejack/blog-content | /my-service | UTF-8 | 1,391 | 3.78125 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/sh
### BEGIN INIT INFO
# Provides:
# Required-Start: my-service
# Required-Stop: my-service
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: My Linux Service
### END INIT INFO
set -e
SERVICE_NAME=`basename $0`
PIDFILE=/var/run/myserv.pid
LOGPATH="/var/log/${SERVICE_NAME}"
FOREVER_BIN=`which forever`
APP_PATH="/var/share/work-js/app.js"
case $1 in
start)
if [ -e "${PIDFILE}" ]; then
PID=`cat ${PIDFILE}`
echo "Service is running already. (PID=${PID})"
exit 1
fi
if [ ! -d "${LOGPATH}" ]; then
mkdir -p "${LOGPATH}"
fi
PID=`ps aux | grep ${APP_PATH} | head -n1 | awk '{print $2}'`
${FOREVER_BIN} start ${APP_PATH} -l "${LOGPATH}/service.log" -o "${LOGPATH}/out.log" -e "${LOGPATH}/error.log" > "${LOGPATH}/start.log"
rm -rf ${PIDFILE}
echo "${PID}" > ${PIDFILE}
echo "Service ${SERVICE_NAME} start. PID=${PID}"
;;
stop)
if [ ! -e "${PIDFILE}" ]; then
echo "Service is not running."
else
PID=`cat ${PIDFILE}`
kill ${PID} || true
rm -rf ${PIDFILE}
echo "Service ${SERVICE_NAME} stop. PID=${PID}"
fi
;;
restart)
$0 stop
sleep 1
$0 start
;;
status)
if [ -e "${PIDFILE}" ]; then
PID=`cat ${PIDFILE}`
echo "Service is running. (PID=${PID})"
else
echo "Service is not running."
fi
;;
*)
echo "Usage: ${0} {start|stop|restart|status}"
exit 1
;;
esac
| true |
15905fe49997408655e3786afba45104960bf32e | Shell | c47s/HFetch | /HFetch DEMO.app/Contents/Resources/getassgns | UTF-8 | 4,105 | 4.15625 | 4 | [] | no_license | #!/bin/bash
interactive=1
justDoCredentials=0
tty=$(tty) # tty to direct interactive output to
while getopts "gret:" opt; do
case "$opt" in
g) # GUI mode - non-interactive
interactive=0
tty="/dev/null"
;;
r) # Delete the API credential files
rm "$HOME/.scgyKey"
rm "$HOME/.scgySecret"
;;
e) # Exit after modifying credentials
justDoCredentials=1
;;
t) # Specify tty for interactive output
tty="$OPTARG"
;;
*) # Invalid option
echo "usage: $0 [-gre] [-t tty]"
;;
esac
done
PATH=$PATH":/usr/local/bin/" # Make sure we can find jq if it is installed
jq --help > /dev/null 2> /dev/null || { # Test if we can find jq
echo "Coulndn't find jq." > /dev/stderr
if [ $interactive -eq 1 ]; then
echo "Install it?"
echo -n "(yes or no?) "
read -n 1 answer
read
if [ "$answer" != "y" ]; then # If the user does not type something beginning with y
exit 5
fi
brew install jq || \
pkg install jq || \
sudo bash -c '
apt-get install jq ||
dnf install jq ||
zypper install jq ||
pacman -Sy jq' # Try every possible way to install jq
else
exit 4 # 4 - The GUI needs to install jq
fi
}
# Try to read the credentials. If they are missing, prompt the user. If the user doesn't respond (or we're in GUI mode), exit with an error
read consumerKey < "$HOME/.scgyKey" || \
{ echo -n "Consumer Key: " > "$tty"; read -t 5 && echo "$REPLY" > "$HOME/.scgyKey" || exit 2; } # 2 - Missing Consumer Key
read consumerSecret < "$HOME/.scgySecret" || \
{ echo -n "Consumer Secret: " > "$tty"; read -t 5 && echo "$REPLY" > "$HOME/.scgySecret" || exit 3; } # 3 - Missing Consumer Secret
[ $justDoCredentials -eq 1 ] && exit # Stop if option -e (exit after modifying credentials) was specified
IDs=( $(
curl https://api.schoology.com/v1/users/25493325/sections \
-sH "$(php -r "echo 'authorization: OAuth realm=\"Schoology API\", oauth_consumer_key=\"$consumerKey\", oauth_token=\"\", oauth_nonce=\"'; echo uniqid(); echo '\", oauth_timestamp=\"'; echo time(); echo '\", oauth_signature_method=\"PLAINTEXT\", oauth_version=\"1.0\", oauth_signature=\"$consumerSecret%26\"'; ")"\
| jq -r .section[].id
) ) # Obtain the IDs of the user's classes
length=${#IDs[@]} # Find how many classes the user has
[ $interactive -eq 1 ] && tput sc > "$tty" # If we're in interactive mode, save the current cursor position so we can return here in order to overwrite the previous progress percentage
result=$(
for i in "${!IDs[@]}"; do
[ $interactive -eq 1 ] && tput rc > "$tty"
echo -n $((i * 100 / length))"%" > "$tty" # Display progress percentage
# Get each class' assignments
until curl https://api.schoology.com/v1/sections/"${IDs[$i]}"/assignments \
-sfH "$(php -r "echo 'authorization: OAuth realm=\"Schoology API\", oauth_consumer_key=\"$consumerKey\", oauth_token=\"\", oauth_nonce=\"'; echo uniqid(); echo '\", oauth_timestamp=\"'; echo time(); echo '\", oauth_signature_method=\"PLAINTEXT\", oauth_version=\"1.0\", oauth_signature=\"$consumerSecret%26\"'; ")"
do
sleep 0.1
done
done
)
if [ $interactive -eq 1 ]; then
tput rc > "$tty"
{
echo "Name\`Due Date"
# Parse the json returned by the server for the due date and name, keeping only ones that are due in the future
echo "$result" | jq -r '.assignment[] |
select(
(.due | strptime("%Y-%m-%d %H:%M:%S") | mktime)? > ($date | strptime("%Y-%m-%d %H:%M:%S") | mktime)
)
|
"\(.title):`\(.due)"' --arg date "$(date +"%C%y-%m-$(($(date +%d))) %H:%M:%S")"
} | column -ts "\`" > "$tty"
else
# Parse the json returned by the server for the due date and name, keeping only ones that are due in the future
result=$(
echo "$result" | jq -r '.assignment[] |
select(
(.due | strptime("%Y-%m-%d %H:%M:%S") | mktime)? > ($date | strptime("%Y-%m-%d %H:%M:%S") | mktime)
)
|
"Due \(.due):: \"\(.title)\": due \(.due), URL \(.web_url)"' --arg date "$(date +"%C%y-%m-$(($(date +%d))) %H:%M:%S")" | sort
# --arg date "2000-01-01 00:00:00
)
# Sort the assignments by due date
tempIFS="$IFS"
IFS='
'
for line in $result; do
echo "${line#*:: *}"
done
IFS="$tempIFS"
fi
| true |
d64dded020964b991c714cb25eb764fad16253eb | Shell | zhangsirsdo/openstack-install | /docker/docker-keystone-setup/setup.sh | UTF-8 | 2,094 | 3.15625 | 3 | [] | no_license | #!/bin/bash
ADMIN_TOKEN=${ADMIN_TOKEN:=016f77abde58da9c724b}
ADMIN_PASS=${ADMIN_PASS:=root}
DEMO_PASS=${DEMO_PASS:=root}
export OS_IDENTITY_API_VERSION=${OS_IDENTITY_API_VERSION:=3}
export OS_TOKEN=$ADMIN_TOKEN
ADVERTISEMENT_HOST=`echo $ADVERTISEMENT_URL|awk -F ':' '{print $1}'`
export OS_URL=http://$ADVERTISEMENT_HOST:35357/v3
#keystone:
# Create the service entity for the Identity service
openstack service create --name keystone --description "OpenStack Identity" identity
# OpenStack uses three API endpoint variants for each service: admin, internal, and public.
#
# The admin API endpoint allows modifying users and tenants by default, while the public
# and internal APIs do not allow these operations.
#
# In a production environment, the variants might reside on separate networks that service
# different types of users for security reasons.
openstack endpoint create --region RegionOne identity public http://$ADVERTISEMENT_HOST:5000/v3
openstack endpoint create --region RegionOne identity internal http://$ADVERTISEMENT_HOST:5000/v3
openstack endpoint create --region RegionOne identity admin http://$ADVERTISEMENT_HOST:35357/v3
# Create the default domain
openstack domain create --description "Default Domain" default
# Create an administrative project, user, and role for administrative operations
openstack project create --domain default --description "Admin Project" admin
openstack user create --domain default --password $ADMIN_PASS admin
openstack role create admin
openstack role add --project admin --user admin admin
# Create the service project
openstack project create --domain default --description "Service Project" service
# Regular (non-admin) tasks should use an unprivileged project and user.
# As an example, Create the demo project:
openstack project create --domain default --description "Demo Project" demo
# Create the demo user
openstack user create --domain default --password $DEMO_PASS demo
# Create the user role
openstack role create user
# Add the user role to the demo project and user
openstack role add --project demo --user demo user
| true |
f1e974832cb78fdd7cfb24c79f66f5fd7bd0c2de | Shell | LibreRouterOrg/pi-gen | /stage3/00-setup-unique-hostname/01-run-chroot.sh | UTF-8 | 356 | 2.828125 | 3 | [
"BSD-3-Clause"
] | permissive | ## Setup unique device hostname with a systemd startup script.
cat > /etc/systemd/system/configure-hostname.service << EOF
[Unit]
Description=Setup the device hostname to be a unique name
[Service]
Type=oneshot
ExecStart=/opt/configure-hostname.sh
RemainAfterExit=true
[Install]
WantedBy=multi-user.target
EOF
systemctl enable configure-hostname.service
| true |
6d272bfd10299741318bf143e1e8780527fbb07e | Shell | masatake/miniascape-templates | /guest/net_register.sh | UTF-8 | 531 | 3.140625 | 3 | [
"MIT",
"Vim"
] | permissive | #! /bin/bash
# see also `[...]/usr/libexec/miniascape/guest_network_register.sh -h`
#
# Use installed version of 'guest_network_register.sh' if exists:
register_sh=/usr/libexec/miniascape/guest_network_register.sh
test -f ${register_sh} || register_sh=${0%/*}/../../host/${register_sh}
{% for nic in interfaces -%}
{% if nic.mac is defined and nic.ip is defined and nic.fqdn is defined -%}
bash ${register_sh} -m {{ nic.mac }} -n {{ nic.network|default('default') }} {{ nic.fqdn }} {{ nic.ip }}
{%- endif %}
{% endfor %}
| true |
940b7a8da5e7b98d2542da86ab2e16ef3a422193 | Shell | rtfmplz/fabric-playground | /network/with-reverse-proxy/test-gw.sh | UTF-8 | 1,363 | 3.0625 | 3 | [] | no_license | #!/bin/bash
# import functions
source test-function.sh
GATEWAY_IP="127.0.0.1"
GATEWAY_ADDR="127.0.0.1:57999"
GATEWAY_ADDR_FOR_ORDERER1="127.0.0.1:57051"
GATEWAY_ADDR_FOR_PEER1="127.0.0.1:57050"
GATEWAY_ADDR_FOR_PEER2="127.0.0.1:58050"
###############################################################
# HEALTH CHECK
###############################################################
try "org1_gw health check"
out=$(curl $GATEWAY_ADDR/health --silent --stderr - | awk '{print $1}')
assert "OK" "$out"
###############################################################
# IP:PORT로 접속 테스트
###############################################################
try "-> org1_gw"
out=$(get_conn $GATEWAY_ADDR)
assert "$GATEWAY_ADDR" "$out"
###############################################################
# CN(Common Name) 확인 테스트
###############################################################
try "-> org1 g/w -> orderer1.ordererorg"
out=$(get_cn "orderer1.ordererorg:57050" "$GATEWAY_IP")
assert "CN=orderer1.ordererorg" "$out"
try "-> org1 g/w -> peer1.org1"
out=$(get_cn "peer1.org1:57051" "$GATEWAY_IP")
assert "CN=peer1.org1" "$out"
try "-> org1 g/w -> peer2.org1"
out=$(get_cn "peer2.org1:57051" "$GATEWAY_IP")
assert "CN=peer2.org1" "$out"
###############################################################
echo
echo "PASS: $tests_run tests run"
| true |
348b65f4a9c777d1acbc8f6fad58f0750021f44c | Shell | bsc-wdc/compss | /compss/runtime/adaptors/gos/scripts/init.sh | UTF-8 | 401 | 2.875 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
workingDir=$1
jobStatus=$2
cancelScript=$3
batchOutput=$4
#-------------------------------------
# Create sandbox
#-------------------------------------
mkdir -p "$workingDir"
mkdir -p "${workingDir}/${jobStatus}"
mkdir -p "${workingDir}/${cancelScript}"
mkdir -p "${workingDir}/${batchOutput}"
if [ ! -d "${workingDir}/${batchOutput}" ]; then
exit 7
fi
| true |
0cbefd81e6a15a1dcc2480e410e88528d3a6ec16 | Shell | JaSiLez/tfsec-sarif-action | /entrypoint.sh | UTF-8 | 1,165 | 3.640625 | 4 | [
"MIT"
] | permissive | #!/bin/bash
set -x
if [ -n "${GITHUB_WORKSPACE}" ]; then
cd "${GITHUB_WORKSPACE}" || exit
fi
VERSION="latest"
if [ "$INPUT_TFSEC_VERSION" != "latest" ]; then
VERSION="tags/${INPUT_TFSEC_VERSION}"
fi
# Download the required tfsec version
wget -O - -q "$(wget -q https://api.github.com/repos/aquasecurity/tfsec/releases/${VERSION} -O - | grep -o -E "https://.+?tfsec-linux-amd64" | head -n1)" >tfsec
install tfsec /usr/local/bin/tfsec
if [ -n "${INPUT_TFVARS_FILE}" ]; then
echo "::debug::Using tfvars file ${INPUT_TFVARS_FILE}"
TFVARS_OPTION="--tfvars-file ${INPUT_TFVARS_FILE}"
fi
if [ -n "${INPUT_CONFIG_FILE}" ]; then
echo "::debug::Using config file ${INPUT_CONFIG_FILE}"
CONFIG_FILE_OPTION="--config-file ${INPUT_CONFIG_FILE}"
fi
if [ -n "${INPUT_TFSEC_ARGS}" ]; then
echo "::debug::Using specified args: ${INPUT_TFSEC_ARGS}"
TFSEC_ARGS_OPTION="${INPUT_TFSEC_ARGS}"
fi
echo {} >${INPUT_SARIF_FILE}
tfsec --format=sarif "${INPUT_WORKING_DIRECTORY}" ${CONFIG_FILE_OPTION} ${TFVARS_OPTION} ${TFSEC_ARGS_OPTION} >${INPUT_SARIF_FILE}
tfsec_return="${PIPESTATUS[0]}" exit_code=$?
echo ::set-output name=tfsec-return-code::"${tfsec_return}"
| true |
c5055708c6d884ae61da320bfeee4843c835025a | Shell | cr-rateionn/maya_exploit_SP | /bin/cleanUserSetup | UTF-8 | 514 | 3.828125 | 4 | [
"MIT"
] | permissive | #!/bin/bash
#
# sed command to remove malware scriptNode from userSetup.mel files
#
# Usage cleanScriptNode fileName
#
# future work:
# check on status
#
# were we passed any parameters
if [ $# -gt 0 ]; then
grep -q MayaMelUIConfigurationFile "$1"
if [ $? -eq 0 ]; then
echo "processing file: ["$1"] ..."
sed -i.bak '/Maya Mel UI Configuration File.Maya Mel UI Con/,/("autoUpdatoAttrEnd") `;}}}autoUpdatcAttrEnd;/d' "$1"
echo "done"
fi
else
echo "Usage: cleanUserSetup filePattern"
fi
| true |
a809c25b7287b3db9f2450d407895665540ac099 | Shell | demers/blockchain_docker | /blockchain.bash | UTF-8 | 8,031 | 4.0625 | 4 | [] | no_license | #!/bin/bash
HEIGHT=17
WIDTH=80
CHOICE_HEIGHT=19
BACKTITLE="Gestion Blockchain version 1.3"
TITLE="Gestion de chaîne de blocs"
MENU="Choisir une des options suivantes:"
function creerConteneur() {
read -p "Voulez-vous créer la chaîne de blocs avec Docker pour ce port ($1)? (o/n)" CHOIX
if [[ $CHOIX =~ ^[Oo]$ ]]
then
cp -v -f Dockerfile Dockerfile$1
sed -i -e "s/5000/$1/g" "Dockerfile$1"
echo "Voici la commande qui sera exécutée:"
echo "docker build -t blockchain$1 -f Dockerfile$1 ."
docker build -t blockchain$1 -f Dockerfile$1 .
fi
read -p "Voulez-vous démarrer la chaîne de blocs pour ce port ($1)? (o/n)" CHOIX
if [[ $CHOIX =~ ^[Oo]$ ]]
then
echo "Voici les commandes qui seront exécutées:"
echo "docker rm blockchain$1"
echo "docker run -d -p $1:$1 --network=\"blockchain\" --name blockchain$1 blockchain$1"
docker rm blockchain$1
docker run -d -p $1:$1 --network="blockchain" --name blockchain$1 blockchain$1
fi
echo "Vérification des ports ouverts..."
sleep 1
echo "Voici la liste des chaînes de blocs disponibles:"
docker ps | grep --color=never blockchain
netstat -tlpn | grep --color=never 0.0.0.0
read -n 1 -s -r -p "Tapez une touche pour afficher le menu..."
}
# Port par défaut.
PORT=()
NOPORT=0
echo "------------------------"
echo " --- INITIALISATIION ---"
echo "------------------------"
echo "Attention, aucune chaîne de bloc n'est disponible actuellement."
echo "Vous devez en avoir au moins une."
echo ""
echo "Création du réseau blockchain..."
docker network create blockchain
echo ""
read -p "Entrez le premier port d'accès à la chaîne de bloc: " NOUVEAUPORT
PORT[${#PORT[@]}]=$NOUVEAUPORT
creerConteneur $NOUVEAUPORT
while true;
do
OPTIONS=(1 "Lister les chaînes de bloc disponibles;"
2 "Changer le port de la chaîne de bloc (actuellement, ${PORT[$NOPORT]});"
3 "Créer ou démarrer une nouvelle chaîne de bloc;"
4 "Ajouter une transaction;"
5 "Afficher la chaîne de bloc;"
6 "Miner les dernières transactions ajoutées;"
7 "Enregistrer une chaîne de bloc;"
8 "Résoudre un conflit de chaîne de bloc (consensus);"
9 "Supprimer les conteneurs et les images associées;"
10 "Quitter")
CHOICE=$(dialog --clear \
--backtitle "$BACKTITLE" \
--title "$TITLE" \
--menu "$MENU" \
$HEIGHT $WIDTH $CHOICE_HEIGHT \
"${OPTIONS[@]}" \
2>&1 >/dev/tty)
clear
case $CHOICE in
1)
echo "Voici la liste des ports disponibles: " ${PORT[@]}
echo "Voici la liste des chaînes de bloc disponibles (Docker):"
docker ps | grep --color=never blockchain
netstat -tlpn | grep --color=never 0.0.0.0
read -n 1 -s -r -p "Tapez une touche pour revenir au menu..."
;;
2)
echo "Voici la liste des ports disponibles: " ${PORT[@]}
read -p "Fournir le port voulu parmi cette liste ci-haut: " PORTCHOISI
for i in "${!PORT[@]}"; do
if [[ "${PORT[$i]}" = "${PORTCHOISI}" ]]; then
NOPORT=${i}
fi
done
echo Le nouveau port considéré sera: ${PORT[$NOPORT]}
read -n 1 -s -r -p "Tapez une touche pour revenir au menu..."
;;
3)
read -p "Entrez le nouveau port d'accès à la chaîne de bloc: " NOUVEAUPORT
PORT[${#PORT[@]}]=$NOUVEAUPORT
creerConteneur $NOUVEAUPORT
;;
4)
read -p "Quel est l'envoyeur (sender)? " SENDER
read -p "Vous voulez protéger l'adresse (md5) (o/n)" CHOIX
if [[ $CHOIX =~ ^[Oo]$ ]]
then
SENDER=$(echo -n $SENDER | sha256sum | cut -d' ' -f1)
fi
read -p "Quel est le destinataire (recicient)? " RECIPIENT
read -p "Vous voulez protéger l'adresse (md5) (o/n)" CHOIX
if [[ $CHOIX =~ ^[Oo]$ ]]
then
RECIPIENT=$(echo -n $RECIPIENT | sha256sum | cut -d' ' -f1)
fi
echo -n "Quel est le montant envoyé? "
read AMOUNT
echo "Voici l'ajout d'une transaction..."
curl -X POST -H "Content-Type: application/json" -d "{
\"sender\": \"$SENDER\",
\"recipient\": \"$RECIPIENT\",
\"amount\": $AMOUNT
}" "http://localhost:${PORT[$NOPORT]}/transactions/new"
read -n 1 -s -r -p "Tapez une touche pour revenir au menu..."
;;
5)
echo "Voici le contenu de la chaîne de bloc..."
#curl "http://localhost:${PORT[$NOPORT]}/chain" | less
http "http://localhost:${PORT[$NOPORT]}/chain"
read -n 1 -s -r -p "Tapez une touche pour revenir au menu..."
;;
6)
echo "Voici le minage de la chaîne de bloc..."
#curl "http://localhost:${PORT[$NOPORT]}/mine"
http "http://localhost:${PORT[$NOPORT]}/mine"
read -n 1 -s -r -p "Tapez une touche pour revenir au menu..."
;;
7)
read -p "Quel est le port de la chaîne de bloc à enregistrer? " PORTENR
if [[ $PORTENR =~ ${PORT[$NOPORT]} ]]
then
echo "ERREUR:"
echo " Impossible d'enregistrer la chaîne de blocs actuelle ($PORTENR)."
echo " Sinon, cela va créer une boucle sans fin au moment de l'action du consensus."
echo " Retour au menu..."
read -n 1 -s -r -p "Tapez une touche pour revenir au menu..."
else
echo "Noeud ajouté: blockchain$PORTENR:$PORTENR"
curl -X POST -H "Content-Type: application/json" -d "{
\"nodes\": [ \"blockchain$PORTENR:$PORTENR\" ]
}" "http://localhost:${PORT[$NOPORT]}/nodes/register"
read -n 1 -s -r -p "Tapez une touche pour revenir au menu..."
fi
;;
8)
echo Le port considéré est: ${PORT[$NOPORT]}
echo "La résolution (consensus) sera faite sur cette chaîne de blocs..."
echo "http \"http://localhost:${PORT[$NOPORT]}/nodes/resolve\""
#curl "http://localhost:${PORT[$NOPORT]}/nodes/resolve"
http "http://localhost:${PORT[$NOPORT]}/nodes/resolve"
read -n 1 -s -r -p "Tapez une touche pour revenir au menu..."
;;
9)
for i in "${!PORT[@]}"; do
echo "Arrêt du conteneur blockchain${PORT[$i]}..."
docker stop blockchain${PORT[$i]}
echo "Suppression du conteneur blockchain${PORT[$i]}..."
docker rm blockchain${PORT[$i]}
echo "Suppression de l'image blockchain${PORT[$i]}..."
docker rmi blockchain${PORT[$i]}
echo "Suppression du fichier Dockerfile${PORT[$i]}..."
rm -f Dockerfile${PORT[$i]}
done
read -n 1 -s -r -p "Tapez une touche pour revenir au menu..."
;;
10)
echo "Sortie du script."
echo "Script de gestion du blockchain écrit en Python"
echo "disponible à https://github.com/demers/blockchain"
echo "Écrit par FND avril 2018, mars 2022."
exit 0
esac
done
| true |
f612fd3f9bace1ba8bc7b44a7b3c59256249d341 | Shell | ejohnson44/SoundStream | /utilities/addGPL.sh | UTF-8 | 1,695 | 2.734375 | 3 | [] | no_license | #!/bin/sh
# This script is designed to add our GPL statement to the top of all of our
# source files
# Java source files
for i in ../SoundStream*/src/com/lastcrusade/soundstream/*/*.java
do
if ! grep -q Copyright\ 2013\ The\ Last\ Crusade\ ContactLastCrusade@gmail.com $i
then
cat ../docs/copyrightJava.txt $i > $i.new && mv $i.new $i
fi
done
# XML source files
for i in ../SoundStream*/res/*/*.xml
do
if ! grep -q Copyright\ 2013\ The\ Last\ Crusade\ ContactLastCrusade@gmail.com $i
then
cat ../docs/copyrightXML.txt $i > $i.new && mv $i.new $i
fi
done
# Edge case: /net/message
for i in ../SoundStream*/src/com/lastcrusade/soundstream/*/*/*.java
do
if ! grep -q Copyright\ 2013\ The\ Last\ Crusade\ ContactLastCrusade@gmail.com $i
then
cat ../docs/copyrightJava.txt $i > $i.new && mv $i.new $i
fi
done
# Edge case: AndroidManifest.xml and build.xml
for i in ../SoundStream*/*.xml
do
if ! grep -q Copyright\ 2013\ The\ Last\ Crusade\ ContactLastCrusade@gmail.com $i
then
cat ../docs/copyrightXML.txt $i > $i.new && mv $i.new $i
fi
done
# Edge case: SoundStream-test Parcelable.java
for i in ../SoundStream*/src/android/os/*.java
do
if ! grep -q Copyright\ 2013\ The\ Last\ Crusade\ ContactLastCrusade@gmail.com $i
then
cat ../docs/copyrightJava.txt $i > $i.new && mv $i.new $i
fi
done
# Edge case: CustomApp.java and CoreActivity.java
for i in ../SoundStream*/src/com/lastcrusade/soundstream/*.java
do
if ! grep -q Copyright\ 2013\ The\ Last\ Crusade\ ContactLastCrusade@gmail.com $i
then
cat ../docs/copyrightJava.txt $i > $i.new && mv $i.new $i
fi
done
# Edge case: monkeyTest.py
for i in ../monkeyTest.py
do
if ! grep -q Copyright\ 2013\ The\ Last\ Crusade\ ContactLastCrusade@gmail.com $i
then
cat ../docs/copyrightPy.txt $i > $i.new && mv $i.new $i
fi
done
| true |
1b9f0d7529968f1e303d8ab13a5ef6e9c74b979f | Shell | RohithCIS/LowCostLoRaGw | /gw_full_latest/scripts/stop_access_point.sh | UTF-8 | 673 | 3.515625 | 4 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | #!/bin/bash
INTERFACES_NOT_AP_FILE=/etc/network/interfaces_not_ap
INTERFACES_AP_FILE=/etc/network/interfaces_ap
INTERFACES_FILE=/etc/network/interfaces
#if "interfaces_not_ap" exists, then replace current interfaces file by this one
if [ -f $INTERFACES_NOT_AP_FILE ];
then
sudo service hostapd stop
sudo service dnsmasq stop
sudo service networking stop
sudo mv $INTERFACES_FILE $INTERFACES_AP_FILE
sudo mv $INTERFACES_NOT_AP_FILE $INTERFACES_FILE
sudo service networking start
echo "The access point is now disabled, using the file /etc/wpa_supplicant/wpa_supplicant.conf to connect to an access point."
else
echo "The access point is already disabled."
fi
| true |
ba1b65aeed19a28776f1774378cb9265ee349568 | Shell | mikalstill/ipa-buildroot | /buildroot-ipa/board/openstack/ipa/post-build.sh | UTF-8 | 10,374 | 3.65625 | 4 | [] | no_license | #!/usr/bin/env bash
## We use this script to compile Python wheels for IPA
## These are compiled using the buildroot host but installed into the target
##
## We use pip from get-pip.py, not Buildroot
# We want to know if anything fails
set -xue
set -o pipefail
# Path to target is always first argument, as per Buildroot
BR2_TARGET_DIR="${1}"
# Logs don't work cause /var/log is linked to /tmp which gets mounted over the top of
# NOTE: Do we want to write logs anyway, given we send to stdout?
# Disable for now, also see overlay journald.conf which sets logs to "none"
#[[ -L "${BR2_TARGET_DIR}/var/log" ]] && unlink "${BR2_TARGET_DIR}/var/log"
# IPA and Requirements Git URLs and versions
# If no repos are specified, default to upstream
# If no branches are specified, set nothing (use remote default/HEAD)
# Don't change these here, they should be set using Buildroot config
# These are passed through as arguments to this script
OPENSTACK_IPA_GIT_URL="${2:-https://git.openstack.org/openstack/ironic-python-agent}"
OPENSTACK_IPA_RELEASE="${3:-}"
OPENSTACK_REQUIREMENTS_GIT_URL="${4:-https://git.openstack.org/openstack/requirements}"
OPENSTACK_REQUIREMENTS_RELEASE="${5:-}"
# Where to build and store the Python wheelhouse (compiled binary packages)
PIP_DL_DIR="${BR2_EXTERNAL_IPA_PATH}/../dl/pip"
PIP_WHEELHOUSE="${BR2_EXTERNAL_IPA_PATH}/../dl/wheelhouse"
# Remove any old source and build dirs (we keep successful binary wheels only)
rm -Rf "${PIP_DL_DIR}"/{src,build}
# Make sure it exists for first time builds
mkdir -p "${PIP_DL_DIR}"/{src,build}
# Location to clone IPA and requirements locally for bundling
OPENSTACK_IPA_GIT_DIR="${PIP_DL_DIR}/src/ironic-python-agent"
OPENSTACK_REQUIREMENTS_GIT_DIR="${PIP_DL_DIR}/src/requirements"
# Python version, to make it easier to update
PYTHON_VERSION="python2.7"
# Get pip and install deps for creating Python wheels for IPA
rm -f "${PIP_DL_DIR}/get-pip.py"
wget https://bootstrap.pypa.io/get-pip.py -O "${PIP_DL_DIR}/get-pip.py"
# Force reinstall pip and install deps for building
"${HOST_DIR}/usr/bin/python" "${PIP_DL_DIR}/get-pip.py" --force-reinstall
"${HOST_DIR}/usr/bin/pip" install --upgrade pip
"${HOST_DIR}/usr/bin/pip" --cache-dir "${PIP_DL_DIR}" install appdirs packaging pbr setuptools wheel
# Git clone IPA source, wheel will build from this directory
rm -Rf "${OPENSTACK_IPA_GIT_DIR}"
git clone --depth 1 ${OPENSTACK_IPA_RELEASE:+--branch ${OPENSTACK_IPA_RELEASE}} "${OPENSTACK_IPA_GIT_URL}" "${OPENSTACK_IPA_GIT_DIR}"
if [ "${OPENSTACK_IPA_RELEASE}" == "stable/newton" ]
then
pushd "${OPENSTACK_IPA_GIT_DIR}"
wget http://www.stillhq.com/openstack/patches/newton/ironic-python-agent-ssl-options
patch -p 1 < ironic-python-agent-ssl-options
popd
fi
# Git clone Requirements to get specified upper-constraints.txt
rm -Rf "${OPENSTACK_REQUIREMENTS_GIT_DIR}"
git clone --depth 1 ${OPENSTACK_REQUIREMENTS_RELEASE:+--branch ${OPENSTACK_REQUIREMENTS_RELEASE}} "${OPENSTACK_REQUIREMENTS_GIT_URL}" "${OPENSTACK_REQUIREMENTS_GIT_DIR}"
# Variables for Python builds for target
# HACK this needs cleaning up
_python_sysroot="$(find "${HOST_DIR}" -type d -name sysroot)"
export _python_sysroot
export _python_prefix=/usr
export _python_exec_prefix=/usr
export PYTHONPATH="${BR2_TARGET_DIR}/usr/lib/${PYTHON_VERSION}/sysconfigdata/:${BR2_TARGET_DIR}/usr/lib/${PYTHON_VERSION}/site-packages/"
export PATH="${HOST_DIR}"/bin:"${HOST_DIR}"/sbin:"${HOST_DIR}"/usr/bin:"${HOST_DIR}"/usr/sbin:${PATH}
export AR="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-ar
export AS="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-as
export LD="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-ld
export NM="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-nm
export CC="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-gcc
export GCC="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-gcc
export CPP="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-cpp
export CXX="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-g++
export FC="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-gfortran
export F77="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-gfortran
export RANLIB="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-ranlib
export READELF="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-readelf
export STRIP="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-strip
export OBJCOPY="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-objcopy
export OBJDUMP="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-objdump
export AR_FOR_BUILD=/usr/bin/ar
export AS_FOR_BUILD=/usr/bin/as
export CC_FOR_BUILD="${HOST_DIR}/usr/bin/ccache /usr/lib64/ccache/gcc"
export GCC_FOR_BUILD="${HOST_DIR}/usr/bin/ccache /usr/lib64/ccache/gcc"
export CXX_FOR_BUILD="${HOST_DIR}/usr/bin/ccache /usr/lib64/ccache/g++"
export LD_FOR_BUILD=/usr/bin/ld
export CPPFLAGS_FOR_BUILD="-I${HOST_DIR}/usr/include"
export CFLAGS_FOR_BUILD="-O2 -I${HOST_DIR}/usr/include"
export CXXFLAGS_FOR_BUILD="-O2 -I${HOST_DIR}/usr/include"
export LDFLAGS_FOR_BUILD="-L${HOST_DIR}/lib -L${HOST_DIR}/usr/lib -Wl,-rpath,${HOST_DIR}/usr/lib"
export FCFLAGS_FOR_BUILD=""
export DEFAULT_ASSEMBLER="${HOST_DIR}/usr/bin/x86_64-buildroot-linux-gnu-as"
export DEFAULT_LINKER="${HOST_DIR}/usr/bin/x86_64-buildroot-linux-gnu -ld"
export CPPFLAGS="-D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64"
export CFLAGS="-D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64 -Os "
export CXXFLAGS="-D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64 -Os "
export LDFLAGS=""
export FCFLAGS=" -Os "
export FFLAGS=" -Os "
export PKG_CONFIG="${HOST_DIR}"/usr/bin/pkg-config
export STAGING_DIR="${HOST_DIR}"/usr/x86_64-buildroot-linux-gnu/sysroot
export INTLTOOL_PERL=/usr/bin/perl
export PIP_TARGET="${BR2_TARGET_DIR}/usr/lib/${PYTHON_VERSION}/site-packages"
export CC="${HOST_DIR}"/usr/bin/x86_64-buildroot-linux-gnu-gcc
export CFLAGS="-D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64 -Os "
export LDSHARED="${HOST_DIR}/usr/bin/x86_64-buildroot-linux-gnu-gcc -shared "
export LDFLAGS=""
# Build ironic-python-agent dependency wheels
"${HOST_DIR}/usr/bin/pip" wheel \
--src "${PIP_DL_DIR}/src/" \
--build "${PIP_DL_DIR}/build/" \
--wheel-dir "${PIP_WHEELHOUSE}" \
--constraint "${OPENSTACK_REQUIREMENTS_GIT_DIR}/upper-constraints.txt" \
--requirement "${OPENSTACK_IPA_GIT_DIR}/requirements.txt"
# Build ironic-python-agent from Git repo path
"${HOST_DIR}/usr/bin/pip" wheel \
--no-index \
--find-links="${PIP_WHEELHOUSE}" \
--src "${PIP_DL_DIR}/src/" \
--build "${PIP_DL_DIR}/build/" \
--wheel-dir "${PIP_WHEELHOUSE}" \
--constraint "${OPENSTACK_REQUIREMENTS_GIT_DIR}/upper-constraints.txt" \
--requirement "${OPENSTACK_IPA_GIT_DIR}/requirements.txt" \
"${OPENSTACK_IPA_GIT_DIR}"
# Install ironic-python-agent from our compiled wheels
"${HOST_DIR}/usr/bin/pip" install \
--no-compile \
--upgrade \
--force-reinstall \
--no-index \
--find-links="${PIP_WHEELHOUSE}" \
--constraint "${OPENSTACK_REQUIREMENTS_GIT_DIR}/upper-constraints.txt" \
--requirement "${OPENSTACK_IPA_GIT_DIR}/requirements.txt" \
ironic-python-agent
# Compile wheels for pip, setuptools and wheel for target
"${HOST_DIR}/usr/bin/pip" wheel \
--src "${PIP_DL_DIR}/src/" \
--build "${PIP_DL_DIR}/build/" \
--wheel-dir "${PIP_WHEELHOUSE}" \
pip setuptools wheel
# Install pip, setuptools and wheel for target to generate ironic-python-agent executable on boot
"${HOST_DIR}/usr/bin/pip" install \
--no-compile \
--upgrade \
--no-index \
--find-links="${PIP_WHEELHOUSE}" \
pip setuptools wheel
# Remove ensurepip to save space as we already installed pip and setuptools
rm -Rf "${BR2_TARGET_DIR}/usr/lib/${PYTHON_VERSION}/ensurepip"
# Optimise python and remove .py files except for IPA related (so we can modify if needed)
# HACK disabled for now, because we can rebuild the image it breaks things
# Might need to just use Buildroot's ability to use pyc instead
# Currently eventlet/green/http/client.py fails to compile, too
#"${HOST_DIR}/usr/bin/python" -OO -m compileall "${BR2_TARGET_DIR}/usr/lib/${PYTHON_VERSION}"
#find "${BR2_TARGET_DIR}/usr/lib/${PYTHON_VERSION}" -name '*.py' -regextype posix-egrep -not -regex ".*(eventlet|ironic_python_agent|ironic_lib)/.*" -not -empty -exec rm -v {} \;
# Copy target ldconfig in from build dir so we can create ld.so.cache for pyudev
install -m 755 -p -D "${BUILD_DIR}"/glibc*/*/*/ldconfig "${BR2_TARGET_DIR}/sbin/"
#find "${BUILD_DIR}"/glibc* -type f -name ldconfig -exec cp {} "${BR2_TARGET_DIR}/sbin/" \;
#chmod 755 "${BR2_TARGET_DIR}/sbin/ldconfig"
# Copy in ldconfig.service file from systemd, it will be enabled in post-fakeroot.sh
install -m 644 -p -D "${BUILD_DIR}"/systemd*/units/ldconfig.service "${BR2_TARGET_DIR}/usr/lib/systemd/system/"
#find "${BUILD_DIR}"/systemd*/units/ -type f -name ldconfig.service -exec cp {} "${BR2_TARGET_DIR}/usr/lib/systemd/system/" \;
#chmod 644 "${BR2_TARGET_DIR}/usr/lib/systemd/system/ldconfig.service"
# Ensure any SSH keys and configs have appropriate permissions,
# else it may fail to start and that would make life hard
# (Commands are separated out for clarity)
# System keys and configs
if [[ -d "${BR2_TARGET_DIR}/etc/ssh" ]]; then
find "${BR2_TARGET_DIR}/etc/ssh" -type f -name ssh_config -exec chmod 0644 {} \;
find "${BR2_TARGET_DIR}/etc/ssh" -type f -name "*pub" -exec chmod 0644 {} \;
find "${BR2_TARGET_DIR}/etc/ssh" -type f -name sshd_config -exec chmod 0600 {} \;
find "${BR2_TARGET_DIR}/etc/ssh" -type f -name "*key" -exec chmod 0600 {} \;
fi
# Fix root's keys and config
find "${BR2_TARGET_DIR}/root" -type f -name .rhosts -exec chmod 0600 {} \;
find "${BR2_TARGET_DIR}/root" -type f -name .shosts -exec chmod 0600 {} \;
if [[ -d "${BR2_TARGET_DIR}/root/.ssh" ]]; then
# Enable root logins via ssh keys only, if we detect a public key
# This hack is for convenience, it's better to provide new sshd_config in overlay
if [[ -f "${BR2_TARGET_DIR}/root/.ssh/authorized_keys" ]]; then
sed -i 's/^#PermitRootLogin.*/PermitRootLogin\ prohibit-password/g' "${BR2_TARGET_DIR}/etc/ssh/sshd_config"
fi
# Ensure root's home directory and other SSH related files are restricted
chmod 0700 "${BR2_TARGET_DIR}/root"
chmod 0700 "${BR2_TARGET_DIR}/root/.ssh"
find "${BR2_TARGET_DIR}/root/.ssh" -type f -exec chmod 0600 {} \;
fi
## This file will get clobbered by Git.
## Add your own commands to $(BR2_EXTERNAL_IPA_PATH)/../scripts/post-build.sh
| true |
e48eef053bfb5eb811829ab616e3aa8b3fb0a814 | Shell | aaronknister/kickseed | /kickseed-anna | UTF-8 | 1,809 | 3.421875 | 3 | [] | no_license | #! /bin/sh
. /usr/share/debconf/confmodule
# Back up anna's state.
standard_modules_backup=true
if db_get anna/standard_modules; then
standard_modules_backup="$RET"
fi
if [ -f /var/lib/anna-install/queue ]; then
mv -f /var/lib/anna-install/queue /var/lib/anna-install/queue.backup
fi
# Don't do the usual installation of everything we can find.
db_set anna/standard_modules false
anna-install "$@"
# We only support specific retrievers here, because some of them are
# difficult to use properly at this stage, and not all of them make sense.
# For instance, the netboot initrd already has enough packages to retrieve
# Kickstart files from the network, and if you were using that initrd it's
# unlikely that you'd want to retrieve Kickstart files from a CD.
#
# Whatever way we do it, this is going to be a hideous layering violation.
# The only question is which layers we violate. We call postinsts manually
# to avoid leaving packages configured, which would confuse d-i later on.
export UDPKG_QUIET=y
if [ -x /var/lib/dpkg/info/load-cdrom.postinst ]; then
if [ ! -d /cdrom/dists ]; then
/lib/kickseed/kickseed-udpkg cdrom-detect
fi
/var/lib/dpkg/info/load-cdrom.postinst configure
elif [ -x /var/lib/dpkg/info/load-iso.postinst ]; then
if [ ! -d /cdrom/dists ]; then
/lib/kickseed/kickseed-udpkg iso-scan
fi
/var/lib/dpkg/info/load-iso.postinst configure
elif [ -x /var/lib/dpkg/info/load-media.postinst ]; then
/var/lib/dpkg/info/load-media.postinst configure
else
logger -t kickseed "No supported retriever found!"
exit 1
fi
# Put anna's state back the way it was, to allow d-i's normal run.
if [ -f /var/lib/anna-install/queue.backup ]; then
mv -f /var/lib/anna-install/queue.backup /var/lib/anna-install/queue
fi
db_set anna/standard_modules "$standard_modules_backup"
exit 0
| true |
8ca0923a7311b26fa201385fb3ed22eac3257481 | Shell | AnthonyWlodarski/TLDP | /Bash-Prog-Intro-HOWTO/conditionals.sh | UTF-8 | 455 | 3.609375 | 4 | [
"MIT"
] | permissive | #!/bin/bash
#conditional examples
if [ "foo" = "foo" ]; then
echo "foo"
fi
# if expression then code else code fi example
if [ "foo" = "bar" ]; then
echo "foo"
else
echo "bar"
fi
# expressions with variables
foo="foo"
bar="foo"
if [ $foo = $bar ]; then
echo Foo is equal to bar.
fi
#expressions with else if
if [ "foo" = "bar" ]; then
echo Should not happen.
elif [ "foo" = "foo" ]; then
echo Should happen.
else
echo How did we get here?
fi
| true |
9a5ca784bff0e39406369e261be392cea527f2f0 | Shell | nataliarampon/microcontrollers | /labs/lab03/blink.sh | UTF-8 | 275 | 3 | 3 | [
"MIT"
] | permissive | #!/bin/bash
boardId=`cat /sys/devices/virtual/dmi/id/board_name`
case $boardId in
"Galileo")
GPIO=3
;;
"GalileoGen2")
GPIO=7
;;
esac
while true ; do
echo -n "0" > /sys/class/gpio/gpio$GPIO/value
sleep 1
echo -n "1" > /sys/class/gpio/gpio$GPIO/value
sleep 1
done
| true |
6efee0a7dfb89c91a41150cce6749cab5eaeb7b1 | Shell | BioLockJ-Dev-Team/sheepdog_testing_suite | /test/feature/envVars/script/checkMasterConfig.sh | UTF-8 | 160 | 2.609375 | 3 | [] | no_license | #! /bin/bash
MASTER=$(ls ../../MASTER*.properties)
OUT=../output/recordedEnvVars.txt
grep "TEST_VAR=" $MASTER >> $OUT
grep "pipeline.envVars" $MASTER >> $OUT
| true |
24266258b6f04ffa858e658b6e6a8e15efe4acb4 | Shell | kimkh415/713_team_4 | /scripts/bowtie_single.sh | UTF-8 | 288 | 2.59375 | 3 | [] | no_license | #!/bin/bash
set -e
read1=$1
index=$2
outputdir=$3
numprocessors=$4
module load bowtie2/2.2.7
# /pylon5/mc5frap/kimkh415/713_team_4/data/bowtie_index/GRCh38_index
bowtie2 -p ${numprocessors} -q -x ${index} -U ${read1} --un ${outputdir}/unmapped.fq --quiet > ${outputdir}/alignment.sam
| true |
62fa8f1f9acb7cb30cdf4a3454f3c5108b4e1cc1 | Shell | qichangc/linux | /shell/ulimit.sh | UTF-8 | 192 | 2.6875 | 3 | [] | no_license | #!/bin/bash
#查看nginx可以打开的最大文件数
# 功能待测试
for pid in 'ps aux |grep nginx |grep -v grep|awk '{print$2}''
do
cat /proc/${pid}/limits |grep 'Max open files'
done
| true |
e3dbf6c9a2dff75d00f763ace364406cb67f032f | Shell | continuum-swapnal-shinde/RefactoringRepo | /platform-qa-automation/PlatformAutomation/src/test/resources/DebianAgentInstall.sh | UTF-8 | 954 | 3.421875 | 3 | [] | no_license | #!/bin/bash
linuxbuildno=$1
build_ENV=$2
token_ID=$3
echo "----Create Folders---"
cd /home/qaadmin/
if [ -d /home/qaadmin/Juno-Agent ]; then
rm -rf Juno-Agent
fi
cd /home/qaadmin
mkdir Juno-Agent
cd /home/qaadmin/Juno-Agent
echo "----Download and Install Debian Package---"
attempt_counter=0
max_attempts=5
until $(curl http://artifact.corp.continuum.net:8081/artifactory/int-dev_platform-agent-debian-sbm/${linuxbuildno}/ContinuumPlatformAgent_2.0.${linuxbuildno}_amd64_Full.deb -o /home/qaadmin/Juno-Agent/ContinuumPlatformAgent_2.0.${linuxbuildno}_amd64_Full.deb); do
if [ ${attempt_counter} -eq ${max_attempts} ];then
echo "Max attempts reached"
exit 1
fi
printf '.'
attempt_counter=$(($attempt_counter+1))
sleep 30
done
echo root | sudo -S BUILD_ENVIRONMENT=${build_ENV} TOKEN=${token_ID} dpkg -i /home/qaadmin/Juno-Agent/ContinuumPlatformAgent_2.0.${linuxbuildno}_amd64_Full.deb
echo "----End Script---" | true |
78ed42c2dbcafb9a47d1e908dcf06ab59ce7c335 | Shell | NVIDIA/nvidia-container-toolkit | /scripts/archive-packages.sh | UTF-8 | 4,440 | 3.703125 | 4 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
# Copyright (c) NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
function assert_usage() {
echo "Incorrect arguments: $*"
echo "$(basename ${BASH_SOURCE[0]}) ARTIFACTORY_REPO [GIT_REFERENCE]"
echo " ARTIFACTORY_REPO: URL to Artifactory repository"
echo " GIT_REFERENCE: Git reference to use for the package version"
echo " (if not specified, PACKAGE_IMAGE_TAG must be set)"
exit 1
}
set -e
SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )"/../scripts && pwd )"
PROJECT_ROOT="$( cd ${SCRIPTS_DIR}/.. && pwd )"
if [[ $# -lt 1 ]]; then
assert_usage "$@"
fi
source "${SCRIPTS_DIR}"/utils.sh
ARTIFACTORY_REPO=$1
if [[ $# -eq 2 ]]; then
REFERENCE=$2
SHA=$(git rev-parse --short=8 ${REFERENCE})
elif [[ -z ${PACKAGE_IMAGE_TAG} ]]; then
echo "Either PACKAGE_IMAGE_TAG or REFERENCE must be specified"
assert_usage "$@"
fi
: ${CURL:=curl}
: ${PACKAGE_IMAGE_NAME="registry.gitlab.com/nvidia/container-toolkit/container-toolkit/staging/container-toolkit"}
: ${PACKAGE_IMAGE_TAG=${SHA}-packaging}
VERSION="$(get_version_from_image ${PACKAGE_IMAGE_NAME}:${PACKAGE_IMAGE_TAG} ${SHA})"
REPO="experimental"
if [[ ${VERSION/rc./} == ${VERSION} ]]; then
REPO="stable"
fi
PACKAGE_CACHE=release-${VERSION}-${REPO}
REMOVE_PACKAGE_CACHE=no
if [ ! -d ${PACKAGE_CACHE} ]; then
echo "Fetching packages with SHA '${SHA}' as tag '${VERSION}' to ${PACKAGE_CACHE}"
${SCRIPTS_DIR}/pull-packages.sh \
${PACKAGE_IMAGE_NAME}:${PACKAGE_IMAGE_TAG} \
${PACKAGE_CACHE}
REMOVE_PACKAGE_CACHE=yes
else
echo "Using existing package cache: ${PACKAGE_CACHE}"
fi
ARTIFACTS_DIR=${PROJECT_ROOT}/${PACKAGE_CACHE}
IMAGE_EPOCH=$(extract_info "IMAGE_EPOCH")
# Note we use the main branch for the kitmaker archive.
GIT_BRANCH=main
GIT_COMMIT=$(extract_info "GIT_COMMIT")
GIT_COMMIT_SHORT=$(extract_info "GIT_COMMIT_SHORT")
PACKAGE_VERSION=$(extract_info "PACKAGE_VERSION")
tar -czvf ${PACKAGE_CACHE}.tar.gz ${PACKAGE_CACHE}
if [[ ${REMOVE_PACKAGE_CACHE} == "yes" ]]; then
rm -rf ${PACKAGE_CACHE}
fi
: ${PACKAGE_ARCHIVE_FOLDER=releases-testing}
function upload_archive() {
local archive=$1
local component=$2
local version=$3
if [ ! -r "${archive}" ]; then
echo "ERROR: File not found or not readable: ${archive}"
exit 1
fi
local sha1_checksum=$(sha1sum -b "${archive}" | awk '{ print $1 }')
local upload_url="${ARTIFACTORY_REPO}/${PACKAGE_ARCHIVE_FOLDER}/${component}/${version}/$(basename ${archive})"
local props=()
# Required KITMAKER properties:
props+=("component_name=${component}")
props+=("version=${version}")
props+=("changelist=${GIT_COMMIT_SHORT}")
props+=("branch=${GIT_BRANCH}")
props+=("source=https://gitlab.com/nvidia/container-toolkit/container-toolkit")
# Package properties:
props+=("package.epoch=${IMAGE_EPOCH}")
props+=("package.version=${PACKAGE_VERSION}")
props+=("package.commit=${GIT_COMMIT}")
for var in "CI_PROJECT_ID" "CI_PIPELINE_ID" "CI_JOB_ID" "CI_JOB_URL" "CI_PROJECT_PATH"; do
if [ -n "${!var}" ]; then
optionally_add_property "${var}" "${!var}"
fi
done
local PROPS=$(join_by ";" "${props[@]}")
echo "Uploading ${upload_url} from ${archive}"
echo -H "X-JFrog-Art-Api: REDACTED" \
-H "X-Checksum-Sha1: ${sha1_checksum}" \
${archive:+-T ${archive}} -X PUT \
"${upload_url};${PROPS}"
if ! ${CURL} -f \
-H "X-JFrog-Art-Api: ${ARTIFACTORY_TOKEN}" \
-H "X-Checksum-Sha1: ${sha1_checksum}" \
${archive:+-T ${archive}} -X PUT \
"${upload_url};${PROPS}" ;
then
echo "ERROR: upload file failed: ${archive}"
exit 1
fi
}
upload_archive "${PACKAGE_CACHE}.tar.gz" "nvidia_container_toolkit" "${VERSION}"
echo "Removing ${PACKAGE_CACHE}.tar.gz"
rm -f "${PACKAGE_CACHE}.tar.gz"
| true |
e96b5285e06d9695d735b03baed41e8839c4b1b4 | Shell | dedalusj/cwmonitor | /e2e/e2e.sh | UTF-8 | 2,123 | 3.671875 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
set -euf -o pipefail
METRIC_DATA_QUERY_TEMPLATE_FILE="metric-data-queries-template.json"
METRIC_DATA_QUERY_FILE="metric-data-queries.json"
METRIC_DATA_RESULTS_FILE="metric-data-results.json"
export START_DATE=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
export END_DATE=$(date -d "+10 minutes" -u +"%Y-%m-%dT%H:%M:%SZ")
export NAMESPACE="e2e_test_${START_DATE}"
echo "INFO | Running end to end test with:"
echo "INFO | START_DATE: ${START_DATE}"
echo "INFO | END_DATE: ${END_DATE}"
echo "INFO | NAMESPACE: ${NAMESPACE}"
echo "INFO | Running cwmonitor and test help containers"
docker-compose up -d
timeout 180s docker-compose logs -f || true
docker-compose down
echo "INFO | Generating metric query json"
jq -c --arg v "${NAMESPACE}" \
'(.[] | .MetricStat | .Metric | .Namespace) |= sub("Test"; $v)' \
${METRIC_DATA_QUERY_TEMPLATE_FILE} | tee ${METRIC_DATA_QUERY_FILE}
echo "INFO | Fetching the metric data uploaded by cwmonitor"
aws cloudwatch get-metric-data \
--metric-data-queries file://./${METRIC_DATA_QUERY_FILE} \
--start-time ${START_DATE} \
--end-time ${END_DATE} | jq -c '.' | tee ${METRIC_DATA_RESULTS_FILE}
EXPECTED_METRICS=$(jq '.[] | .Id' ${METRIC_DATA_QUERY_FILE} | uniq | wc -l)
RESULT_METRICS=$(jq '.MetricDataResults | .[] | select(.Values | length >= 3) | .Id' \
${METRIC_DATA_RESULTS_FILE} | uniq | wc -l)
if [[ "$EXPECTED_METRICS" -ne "$RESULT_METRICS" ]]; then
echo "Expected ${EXPECTED_METRICS} metrics with 3 data points but found ${RESULT_METRICS}"
exit 1
fi
if [[ $(jq '.MetricDataResults | .[] | select(.Id | contains("health_healthy")) | .Values | map(select(. == 1)) | length' \
${METRIC_DATA_RESULTS_FILE}) -lt 2 ]]; then
echo "Expected to find at least 2 healthy data points for healthy container"
exit 1
fi
if [[ $(jq '.MetricDataResults | .[] | select(.Id | contains("health_unhealthy")) | .Values | map(select(. == 0)) | length' \
${METRIC_DATA_RESULTS_FILE}) -lt 3 ]]; then
echo "Expected to find at least 3 unhealthy data points for unhealthy container"
exit 1
fi
| true |
dab4ba86323bbc7d925051a21cd224d0624e7833 | Shell | ShefWuzi/msc-dissertation | /data_collection/git_text.sh | UTF-8 | 2,116 | 3.671875 | 4 | [] | no_license | #!/bin/bash
if [ $# -ne 1 ];then
echo "[*] Usage: $0 <github url>"
exit;
fi
if [[ $1 == "https://github.com/*" ]]; then
echo "[X] Can't extract data for non-github website"
exit;
fi
issue_data() {
issue_resp=$(echo "$1")
for issue in $(echo $issue_resp | jq -r '.[] | @base64'); do
title=$(echo "$issue" | base64 -d | jq -r '.title' | sed 's/,//g' )
state=$(echo "$issue" | base64 -d | jq -r '.state')
id=$(echo "$issue" | base64 -d | jq -r '.number')
created_time=$(echo "$issue" | base64 -d | jq -r '.created_at')
updated_time=$(echo "$issue" | base64 -d | jq -r '.updated_at')
closed_time=$(echo "$issue" | base64 -d | jq -r '.closed_at')
body=$(python ../data_preparation/extract_natural_text.py <(echo "$issue" | base64 -d | jq -r '.body' | perl -p -e 's/\r/<\\r>/' | perl -p -e 's/\n/<\\n>/' | sed 's/,/<c>/g'))
user_name=$(echo "$issue" | base64 -d | jq -r '.user.login')
user_id=$(echo "$issue" | base64 -d | jq -r '.user.id')
author_assoc=$(echo "$issue" | base64 -d | jq -r '.author_association')
if [[ $is_pull == "" ]]; then
is_pull="0";
else
is_pull="1";
fi
echo "$id,$state,$created_time,$updated_time,$closed_time,$user_name,$user_id,$author_assoc,$title,$body"
done
}
check_github_api=$(curl -s https://api.github.com/users/ShefWuzi | jq 'select (.message != null) | .message' | grep "API rate limit")
while true;
do
if [[ $check_github_api != "" ]]; then
sleep 5m;
check_github_api=$(curl -s https://api.github.com/users/ShefWuzi | jq 'select (.message != null) | .message')
else
break
fi
done
echo "ID,State,Time Created,Time Updated,Time Closed,Username,User ID,Author Association,Title,Body"
user=$(echo $1 | rev | cut -d / -f 2 | rev)
repo=$(echo $1 | rev | cut -d / -f 1 | rev)
issue_resp=$(curl -s https://api.github.com/repos/$user/$repo/issues?state=closed&filter=all&direction=asc)
if [[ $issue_resp == "" ]]; then
exit;
fi
issue_data "$issue_resp"
pull_resp=$(curl -s https://api.github.com/repos/$user/$repo/pulls?state=closed&filter=all&direction=asc)
if [[ $issue_resp == "" ]]; then
exit;
fi
issue_data "$pull_resp"
| true |
ea2c6895eb639be77b7184bbf41b7947ed67211c | Shell | g2forge/forklift | /builtin/package/Cygwin/special/ant | UTF-8 | 1,554 | 4.03125 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
set -eu -o pipefail
function help () {
echo "$(basename ${0}): Install ant on windows"
echo
echo "Usage: $(basename ${0})"
echo
}
SELF_DIR=$(cd $(dirname ${0}) && pwd -P)
SELF=${SELF_DIR}/$(basename ${0})
. ${SELF_DIR}/../../../common/source.bash
. "${SELF_DIR}/../install-common"
install java java
SP_NAME="ant"
SP_TARGET="/cygdrive/c/Program Files/ANT"
SP_RELATIVE="bin"
# Accepts the temporary directory as an argument, returns the version number for the most recent version we can download on stdout
function sp_downloadversion() {
URL=$(curl https://ant.apache.org/bindownload.cgi 2> /dev/null | grep -E "/apache-ant-([^-]*)-bin.tar.gz\">apache-ant-\\1-bin.tar.gz</a>" | sed -e 's#.*href="\([^"]*\)".*#\1#' | tail -n1)
echo "${URL}" > "${1}/url"
basename "${URL}" | cut -f3 -d '-'
}
# Fails if the program is not installed, otherwise returns install directory and installed version as two lines on stdout
function sp_installedversion() {
if WHICH_OUTPUT=$(which ant 2> /dev/null); then
dirname "$(dirname "${WHICH_OUTPUT}")"
ant -version | head -n1 | cut -f4 -d ' '
else
exit 1
fi
}
# Accepts the temporary directory as an argument, downloads the most recent version, and creates the directory structure that should be installed. Returns the absolute path to that directory on stdout
function sp_downloadsource() {
pushd "${1}" > /dev/null 2>&1
curl -o "ant.tar.gz" $(cat "url")
tar -xf ant.tar.gz
popd > /dev/null 2>&1
find "${1}" -maxdepth 1 -type d -name "apache-ant-*"
}
. ${SELF_DIR}/../special-common
| true |
de3e09074738e4b94f1774a93e78750bd09ee556 | Shell | sumaiyaazad/Operating-System-CSE-314-BUET | /Shell Script/Assignment1/findAbsents.sh | UTF-8 | 603 | 3.453125 | 3 | [] | no_license | #!/bin/bash
. ./data.sh
. ./util.sh
. ./findID.sh
# Generates initial absent log
# 1) FILE_ROSTER 2) FILE_ABS 3) FILE_MARKS
function genAbsList() {
IFS=$'\n'
for student in `cut -d '"' -f 2-3 $1`
do
sid=$(echo $student| cut -d'"' -f 1|cut -f 2)
sname=$(echo $student| cut -d',' -f 2)
# echo $sid
hasStdID $sid
if [[ $RET_VAL_BOOL == 0 ]]; then
# echo "$sid,$sname" >> "$2"
appendToFile "$sid,$sname" "$2"
# echo "$sid $sname 0" >> "$3"
appendToFile "$sid 0" "$3"
fi
done
}
##############
genAbsList $FILE_ROSTER $FILE_ABS $FILE_MARKS
| true |
ffebf4893cead9dfc75166e6c95b0eb2ece9d24c | Shell | doug-dianomic/fledge | /tests/system/python/scripts/package/reset | UTF-8 | 290 | 2.515625 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
echo "Stopping Fledge using systemctl ..."
sudo systemctl stop fledge
echo "YES" | /usr/local/fledge/bin/fledge reset || exit 1
echo
echo "Starting Fledge using systemctl ..."
sudo systemctl start fledge
echo "Fledge Status"
sudo systemctl status fledge | grep "Active" | true |
c5eabcebd8d2bfddc80662f48f0fd6ad8214ba4d | Shell | moog/confd-node-example | /docker-entrypoint.sh | UTF-8 | 617 | 2.84375 | 3 | [] | no_license | #!/bin/sh
echo "downloading"
wget --quiet "https://github.com/kelseyhightower/confd/releases/download/v0.15.0/confd-0.15.0-linux-amd64"
mkdir -p /opt/confd/bin
mv "confd-0.15.0-linux-amd64" /opt/confd/bin/confd
chmod +x /opt/confd/bin/confd
export PATH="/opt/confd/bin/:$PATH"
mkdir -p /etc/confd/conf.d
mkdir -p /etc/confd/templates
mv /confd-example/confd/template.toml /etc/confd/conf.d/api.toml
mv /confd-example/confd/template.conf.tmpl /etc/confd/templates/template.conf.tmpl
mv /confd-example/confd/confd.toml /etc/confd/confd.toml
confd --onetime -node "https://ssm.$AWS_REGION.amazonaws.com"
npm start
| true |
78cbf37fe0895912aa145f2ad478d9de3b23a1d0 | Shell | pabplanalp/pvmail | /java/bin/sh/erK4ProductionSetup.sh | UTF-8 | 1,414 | 2.59375 | 3 | [] | no_license | #!/bin/sh
dir=`dirname $0`
mydir=`cd $dir;pwd`
. $mydir/acSetEnv.sh
echo "MAKE SURE TOMCAT IS STOPPED!"
echo "Importing Data"
$mydir/acImportData.sh erK4ProductionBase
echo "Deleting Files"
rm -rf $AC_PROJECT_HOME/output/edi/transmit/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/usps/receive/archive/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/usps/receive/new/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/usps/receive/error/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/usps/receive/duplicate/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/usps/send/archive/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/ca/receive/archive/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/ca/receive/new/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/ca/receive/error/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/ca/receive/duplicate/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/ca/send/archive/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/uspsd/receive/archive/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/uspsd/receive/new/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/uspsd/receive/error/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/uspsd/receive/duplicate/*
rm -rf $AC_PROJECT_HOME/output/edi/k4/uspsd/send/archive/*
rm -rf $AC_PROJECT_HOME/output/mobile/k4
echo "Generating Consignments"
$mydir/acJava.sh com.vmail.utility.AcConsignmentGenerator k4 usps me1
$mydir/acJava.sh com.vmail.utility.AcConsignmentGenerator k4 ca ca1
$mydir/acJava.sh com.vmail.utility.AcConsignmentGenerator k4 uspsd dom
| true |
c9aaa0e5522362624a881e78d8db2030834bcd9f | Shell | latexstudio/swufethesis | /ci/pwd.sh | UTF-8 | 509 | 4.03125 | 4 | [
"LPPL-1.3c"
] | permissive | #!/usr/bin/env sh
set -e
# 这个脚本输出$PWD,但会在Git Bash (Windows)下会将路径转为Windows的格式
if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" ]]; then
# 下面sed使用`|`作为分割符,并启用\(group capture\)
# 先提取$PWD的盘符和其余部分
disk=$(echo $PWD | sed 's|^/\([cd]\)/.*|\1|' | tr 'a-z' 'A-Z')
after_disk=$(echo $PWD | sed 's|/[cd]/\(.*\)|\1|')
# 再拼接并替换路径分隔符
echo "$disk:/$after_disk" | sed 's|/|\\|g'
else
echo $PWD
fi
| true |
bb3f437659d210982ce09895314da7ff7d3cb6ee | Shell | cha63506/aur-mirror-migration | /gnome-shell-extension-window-buttons-git/PKGBUILD | UTF-8 | 1,063 | 2.734375 | 3 | [] | no_license | # Maintainer: Pi3R1k <pierrick.brun at gmail dot com>
pkgname=gnome-shell-extension-window-buttons-git
pkgver=20111207
pkgrel=1
pkgdesc="Extension which puts minimize, maximize and close buttons in the top panel."
arch=('any')
url="https://github.com/biox/Gnome-Shell-Window-Buttons-Extension"
license=('GPL3')
depends=('gnome-shell')
makedepends=('git' 'intltool' 'gnome-common')
install='gschemas.install'
_gitroot="https://github.com/biox/Gnome-Shell-Window-Buttons-Extension.git"
_gitname="gnome-shell-extension-window-buttons"
build() {
cd ${srcdir}
msg "Connecting to the GIT server..."
if [[ -d ${srcdir}/${_gitname} ]] ; then
cd ${_gitname}
git pull origin
msg "The local files are updated..."
else
git clone ${_gitroot} ${_gitname}
cd ${_gitname}
fi
msg "GIT checkout done."
mkdir -p $pkgdir/usr/share/glib-2.0/schemas/
mkdir -p $pkgdir/usr/share/gnome-shell/extensions/
cp org.gnome.shell.extensions.window-buttons.gschema.xml $pkgdir/usr/share/glib-2.0/schemas/
cp -R window_buttons@biox.github.com $pkgdir/usr/share/gnome-shell/extensions/
}
| true |
8950682cf710d73ac48d80912baf74e32a392b50 | Shell | chungers/atp | /ops/munin/plugins/hz | UTF-8 | 2,693 | 3.21875 | 3 | [] | no_license | #!/bin/bash
#%# family=auto
#%# capabilities=autoconf
OPTION=$1
function doAutoConf {
echo "yes"
}
function doConfig {
cat <<EOF
multigraph hz_leveldb_writes
graph_title hz leveldb writes
graph_order leveldb_writes
graph_args --base 1000
graph_vlabel records written
graph_category hz
graph_info This graph shows number of messages written in leveldb
leveldb_writes.label leveldb_writes
leveldb_writes.type COUNTER
leveldb_writes.min 0
multigraph hz_subscriber_messages_received
graph_title hz subscriber messages received
graph_order subscriber_messages_received
graph_args --base 1000
graph_vlabel messages received
graph_category hz
graph_info This graph shows number of messages written in leveldb
subscriber_messages_received.label messages received
subscriber_messages_received.type COUNTER
subscriber_messages_received.min 0
multigraph hz_subscriber_messages_persisted
graph_title hz subscriber messages persisted
graph_order subscriber_messages_persisted
graph_args --base 1000
graph_vlabel messages persisted
graph_category hz
graph_info This graph shows number of messages persisted in leveldb
subscriber_messages_persisted.label messages persisted
subscriber_messages_persisted.type COUNTER
subscriber_messages_persisted.min 0
multigraph hz_subscriber_message_process_latency
graph_title hz subscriber message process latency
graph_order subscriber_message_process_micros
graph_args --base 1000
graph_vlabel micros handling message
graph_category hz
graph_info This graph shows number of usec handling message
subscriber_message_process_micros.label usec processing message
subscriber_message_process_micros.type GAUGE
subscriber_message_process_micros.min 0
EOF
}
function parse {
grep \"$1\" $2 | cut -f2 -d: | sed -e 's/ //g' -e 's/,//g'
}
function doSample {
varz=/tmp/hz-varz
curl "http://localhost:18001/varz" 2>/dev/null > $varz
leveldb_writes=$(parse leveldb_writes $varz)
subscriber_messages_received=$(parse subscriber_messages_received $varz)
subscriber_messages_persisted=$(parse subscriber_messages_persisted $varz)
subscriber_message_process_micros=$(parse subscriber_message_process_micros $varz)
cat <<EOF
multigraph hz_leveldb_writes
leveldb_writes.value $leveldb_writes
multigraph hz_subscriber_messages_received
subscriber_messages_received.value $subscriber_messages_received
multigraph hz_subscriber_messages_persisted
subscriber_messages_persisted.value $subscriber_messages_persisted
multigraph hz_subscriber_message_process_latency
subscriber_message_process_micros.value $subscriber_message_process_micros
EOF
}
case $OPTION in
autoconf) doAutoConf
;;
config) doConfig
;;
*) doSample
;;
esac
exit;
| true |
3eaf01a1737b60933cc4e65e54e115553b9a09a6 | Shell | tarunagarwal99/LeetCode | /solution | UTF-8 | 823 | 4.40625 | 4 | [] | no_license | #!/bin/bash
set -e
# A POSIX variable
OPTIND=1 # Reset in case getopts has been used previously in the shell.
# Initialize our own variables:
ext="java"
function show_help() {
echo "$(basename "$0") [-l] [-l java] The Problem Name
It will
1. Create directory of 'The-Problem-Name'
2. Use default EDITOR to open 'The-Problem-Name/Solution.java' file
where:
-h show this help text
-l set the language (default: java)"
}
while getopts "h?l:" opt; do
case "$opt" in
h|\?)
show_help
exit 0
;;
l) ext=$OPTARG
;;
esac
done
shift $((OPTIND-1))
[ "$1" = "--" ] && shift
function join { local IFS="$1"; shift; echo "$*"; }
directory=$(join "-" "${@}")
if [ -z "${directory}" ]; then
show_help
exit 1
fi
mkdir "${directory}"
${EDITOR} "${directory}/Solution.${ext}"
| true |
357a418abcb00ac261f094ab195235f760570d9f | Shell | allthingsclowd/two_consul_clusters_one_vault | /scripts/factorySecretIDTest.sh | UTF-8 | 2,663 | 3.40625 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
setup_environment () {
source /usr/local/bootstrap/var.env
IFACE=`route -n | awk '$1 == "192.168.2.0" {print $8}'`
CIDR=`ip addr show ${IFACE} | awk '$2 ~ "192.168.2" {print $2}'`
IP=${CIDR%%/24}
VAULT_IP=${LEADER_IP}
FACTORY_IP=${LEADER_IP}
if [ "${TRAVIS}" == "true" ]; then
IP="127.0.0.1"
VAULT_IP=${IP}
fi
export VAULT_ADDR=http://${VAULT_IP}:8200
export VAULT_SKIP_VERIFY=true
if [ -d /vagrant ]; then
LOG="/vagrant/logs/VaultServiceIDFactory_${HOSTNAME}.log"
else
LOG="${TRAVIS_HOME}/VaultServiceIDFactory.log"
fi
}
verify_factory_service () {
curl http://${FACTORY_IP}:8314/health
STATUS=`curl http://${FACTORY_IP}:8314/health`
if [ "${STATUS}" = "UNINITIALISED" ];then
echo "Initialisng the Factory Service with a Provisioner Token"
# Initialise with Vault Token
WRAPPED_VAULT_TOKEN=`cat /usr/local/bootstrap/.wrapped-provisioner-token`
curl --header "Content-Type: application/json" \
--request POST \
--data "{\"token\":\"${WRAPPED_VAULT_TOKEN}\"}" \
http://${FACTORY_IP}:8314/initialiseme
fi
# Get a secret ID and test access to the Vault KV Secret
ROLENAME="id-factory"
WRAPPED_SECRET_ID=`curl --header "Content-Type: application/json" \
--request POST \
--data "{\"RoleName\":\"${ROLENAME}\"}" \
http://${FACTORY_IP}:8314/approlename`
echo "WRAPPED_SECRET_ID : ${WRAPPED_SECRET_ID}"
SECRET_ID=`curl --header "X-Vault-Token: ${WRAPPED_SECRET_ID}" \
--request POST \
${VAULT_ADDR}/v1/sys/wrapping/unwrap | jq -r .data.secret_id`
echo "SECRET_ID : ${SECRET_ID}"
# retrieve the appRole-id from the approle - /usr/local/bootstrap/.appRoleID
APPROLEID=`cat /usr/local/bootstrap/.appRoleID`
echo "APPROLEID : ${APPROLEID}"
# login
tee id-factory-secret-id-login.json <<EOF
{
"role_id": "${APPROLEID}",
"secret_id": "${SECRET_ID}"
}
EOF
APPTOKEN=`curl \
--request POST \
--data @id-factory-secret-id-login.json \
${VAULT_ADDR}/v1/auth/approle/login | jq -r .auth.client_token`
echo "Reading secret using newly acquired token"
RESULT=`curl \
--header "X-Vault-Token: ${APPTOKEN}" \
${VAULT_ADDR}/v1/kv/development/redispassword | jq -r .data.value`
echo "SECRET : ${RESULT}"
echo "APPLICATION VERIFICATION COMPLETE"
curl http://${FACTORY_IP}:8314/health
}
set -x
echo 'Start of Factory Service Test'
setup_environment
verify_factory_service
echo 'End of Factory Service Test' | true |
8416c33665bf42a79550b0be6dba5dc0370892f9 | Shell | scherzhaft/roles | /global-solaris-adminfunctions/files/etc/default/SYSconstants | UTF-8 | 3,153 | 3.046875 | 3 | [] | no_license | #!/bin/bash
loadallmodules()
{
. /etc/default/slack-functions
. /etc/default/admin-functions
HOURS=`echo ${DATESTRING}|awk -F_ {'print $4'}`
MINS=`echo ${DATESTRING}|awk -F_ {'print $5'}`
SECS=`echo ${DATESTRING}|awk -F_ {'print $6'}`
HOURS2SECS=`expr ${HOURS} \* 60 \* 60`
MINS2SECS=`expr ${MINS} \* 60`
TIMEINSECS=`expr ${HOURS2SECS} + ${MINS2SECS} + ${SECS}`
}
__getrev()
{
loadallmodules
REVDATE=`echo $__DATESTRING|awk -F\_ {'print $3"."$1"."$2'}`
echo "${REVDATE}.${TIMEINSECS}"
}
loadallmodules
__TIMEINSECS="${TIMEINSECS}"
__DATESTRING="${DATESTRING}"
__slapwhoami ${__HOSTNAME} >/dev/null 2>/dev/null
__whichsiteanddomainami >/dev/null 2>&1
__SUPPORT="${__CLASSIFICATION}slvmqvuus01.${__DNSDOMAIN}"
__SHORTSUPPORT=`echo "${__SUPPORT}"|awk -F\. {'print $1'}`
__SITES=`echo "${__SITEMATRIX}"|awk -F\| {'print $2'}`
__SUPPORTSERVERS=`echo "${__SITES}"|perl -p -e "s/^/${__CLASSIFICATION}slvmqvuus01\./g"|perl -p -e "s/\n/\.${__DNSDOMAIN}\n/g"`
nmsitehosts >/dev/null 2>&1
WHOAMI=`who am i|awk {'print $1'}`
if [ "X${WHOAMI}" != "X" ] ; then
SUDO_USER="${WHOAMI}"
fi
if [ "X${LOGNAME}" != "X" -a "X${SUDO_USER}" = "X" ] ; then
SUDO_USER="${LOGNAME}"
fi
getent passwd ${SUDO_USER}|${__GREP} "^${SUDO_USER}:x:[0-9]*:25000:" >/dev/null
GIDSTATUS="$?"
if [ "X${SUDO_USER}" != "X" -a "X${GIDSTATUS}" = "X0" ] ; then
__SUDO_USER_MAIL=`__ldapsearchhandler -b uid=${SUDO_USER},ou=people,${__LDAPDN} mail|grep "^mail:"|awk -F\: {'print $2'}|awk {'print $1'}`
__SUDO_USER="${SUDO_USER}"
elif [ "X${SUDO_USER}" = "X" -a "${__OSNAME}" = "SunOS" ] ; then
PID=$$
foo=`ptree $PID|awk {'print $1'}|perl -p -e "s/\n/\,/g"|perl -p -e "s/\,$//g"`
processinfo=`ps -f -p ${foo}`
listlength=`echo "${processinfo}"|wc -l|awk {'print $1'}`
lengthminus=`expr "${listlength}" - 1`
userlist=`ps -f -p ${foo}|tail -${lengthminus}|awk {'print $1'}|sort -u|grep -v "^root$"`
for i in `echo "${userlist}"` ; do
usermailsearch=`__ldapsearchhandler -b uid=${i},ou=people,${__LDAPDN} mail`
SEARCHSTATUS=$?
mailattrib=`echo "${usermailsearch}"|grep "^mail:.*"`
MAILSTATUS=$?
if [ "X${SEARCHSTATUS}" = "X0" -a "X${MAILSTATUS}" = "X0" ] ; then
__SUDO_USER_MAIL=`echo "${mailattrib}"|awk -F\: {'print $2'}|awk {'print $1'}`
__SUDO_USER="${i}"
break
fi
done
fi
for i in `echo "${__SITES}"` ; do
evalval=`echo "${__SUPPORTSERVERS}"|grep "\.${i}\."`
evalstring="__SUPPORT${i}=${evalval}"
eval "export ${evalstring}"
done
__SOFTWARE="osImage/${__OSNAME}/software/stable/${__ARCH}/${__RELEASE}"
__PKGS="osImage/${__OSNAME}/PKGS/stable/${__ARCH}/${__RELEASE}"
__GNU_R=`/usr/bin/lsb_release -r 2>/dev/null|awk {'print $2'}|awk -F\. {'print $1'}`
test "X${__SUDO_USER_MAIL}" = "X" && __SUDO_USER_MAIL='foo@foo'
test "X${__SUDO_USER}" = "X" && __SUDO_USER="${SUDO_USER}"
export __STAT __GNU_R __SOFTWARE __SUPPORTSERVERS __SITES __SHORTSUPPORT __SUPPORT __DNSDOMAIN __SITE __SITEMATRIX __cmEnable __CI __CUSTOMERBLOB __CLASSIFICATION __LDAPDN __LDAPADD __LDAPMODIFY __LDAPSEARCH __GREP __OSNAME __HOSTNAME __HOSTSHORT __ARCH __RELEASE __PKGS __SUDO_USER_MAIL __SUDO_USER __DATESTRING __TIMEINSECS __MYCLASSBEES __NMSITEHOSTS __NMSITEHOSTMATRIX
| true |
a172efc343994d2a1d453639947668be17e8fabd | Shell | JaideepBgit/TRDP-Database-generator | /img2pano_distortion/yolo_tests/test_eval_voc/calc_mAP_voc_py.sh | UTF-8 | 1,951 | 2.890625 | 3 | [] | no_license | # rem C:\Users\Alex\AppData\Local\Programs\Python\Python36\Scripts\pip install numpy
# rem C:\Users\Alex\AppData\Local\Programs\Python\Python36\Scripts\pip install cPickle
# rem C:\Users\Alex\AppData\Local\Programs\Python\Python36\Scripts\pip install _pickle
# rem darknet.exe detector valid data/voc.data cfg/yolov2-tiny-voc.cfg yolov2-tiny-voc.weights
cd ~/darknet_AlexeyAB/build/darknet/x64
tests='0 0.1 0.2 0.3 0.4 0.5'
VOC_path='/home/pcl/darknet_AlexeyAB/build/darknet/x64/data/voc'
for test in $tests
do
echo 'Test '$test
# copy ImageSet folder to the appropiate dir
cp -Tr $VOC_path/VOCdevkit/VOC2007/ImageSets /home/pcl/img2pano_distortion/results_loop/dist$test/VOCdevkit/VOC2007/ImageSets
# read -n1 -r -p "Press space to continue..." key
# create list file with voc_label
python3 /home/pcl/img2pano_distortion/voc_label_input.py /home/pcl/img2pano_distortion/results_loop/dist$test
python3 /home/pcl/img2pano_distortion/voc_label_difficult_input.py /home/pcl/img2pano_distortion/results_loop/dist$test
# read -n1 -r -p "Press space to continue..." key
# TODO: create data file
# this version writes detection results to a results folder
./darknet detector valid /home/pcl/img2pano_distortion/yolo_tests/test_eval_voc/voc_dist$test.data ./cfg/yolov2-voc.cfg ./yolo-voc.weights
# fails trying to write detection results in a different folder
rm -rf ~/img2pano_distortion/yolo_tests/test_eval_voc/results_det_voc_dist$test/
cp -r ./results ~/img2pano_distortion/yolo_tests/test_eval_voc/results_det_voc_dist$test/
# python script fails if one class presents 0 detections
python3 reval_voc_py3.py --year 2007 --classes ./data/voc.names --image_set test --voc_dir /home/pcl/img2pano_distortion/results_loop/dist$test/VOCdevkit ./results > ~/img2pano_distortion/yolo_tests/test_eval_voc/mAP_results_dist$test.txt
done
read -n1 -r -p "Press space to continue..." key
| true |
20ed96ae442e9556fe20a9ad3c6d462b88af4542 | Shell | gh123man/dotfiles | /scripts/git-checkout-from | UTF-8 | 159 | 2.71875 | 3 | [] | no_license | #!/bin/bash
set -e
branch=$(git --no-pager branch -vv | fzf +m | awk '{print $1}')
files=$(git ls-files | fzf -m)
git checkout $branch -- $files
git status
| true |
cf16f76c656167b81dcf7969bca578af3862a516 | Shell | wsbu/docker-artifactory-cpp-ce | /start.sh | UTF-8 | 490 | 3.34375 | 3 | [] | no_license | #!/bin/bash
if [ ! "$(ls -A ${ARTIFACTORY_HOME})" ] ; then
set -e
echo "First startup detected. Copying default configuration files."
find "${ARTIFACTORY_HOME}/etc.defaults" -type f -exec cp {} "${ARTIFACTORY_HOME}/etc" \;
fi
"${ARTIFACTORY_HOME}/bin/artifactory.sh" start
trap "'${ARTIFACTORY_HOME}/bin/artifactory.sh' stop" SIGTERM
tail -F "${ARTIFACTORY_HOME}/logs/artifactory.log" &
# Hold here until Artifactory exits
while pgrep -f java > /dev/null; do sleep 1; done;
| true |
4df332f6067f3dba0f0de754f46ccf1905014582 | Shell | Romop5/holoinjector-tests | /generateReadme.sh | UTF-8 | 1,140 | 3.546875 | 4 | [] | no_license | REPO_URL='https://github.com/Romop5/holoinjector-tests/raw/master/'
RESULTS=`find results |sort | grep '_normal' | grep 'png'`
rm README.md
echo '# holoinjector-tests
## About
Semi-automatized test dataset for [HoloInjector](https://github.com/Romop5/holoinjector)
## Usage
- run *getDataset.sh* to download & build selected OpenGL examples from the internet
- run *runDataset.sh* to run HoloInjector over exampleList.txt. This results in screenshots,
stored in results. This screenshots compare a regular application (_normal) with
converted.
- run *generateReadme.sh* to regenerate this README.md
## Images of results
First image: regular application
Second iamge: after conversion
Quilt used: 3x3
' >> README.md
echo "<div align='center'>" >> README.md
for RESULT in ${RESULTS}
do
CONVERTED=`echo ${RESULT} | sed 's/_normal/_converted/g'`
FILE_NAME=`echo ${RESULT} | sed 's/_normal.*//g'`
echo "
<img src='${REPO_URL}${RESULT}' alt='Original' height='200px'/>
<img src='${REPO_URL}${CONVERTED}' alt='Converted' height='200px'/>
<br>
<b>${FILE_NAME}</b>
<br>
" >> README.md
done
echo "</div>" >> README.md
| true |
387c41e76490476d7e663783579380608c4b7103 | Shell | szipfel/base-d7 | /build/import.sh | UTF-8 | 358 | 2.84375 | 3 | [] | no_license | #!/usr/bin/env bash
#get the folder name of the git repo we're in.
CONTAINER_PREFIX="$(git rev-parse --show-toplevel)"
CONTAINER_NAME="$(basename ${CONTAINER_PREFIX//_})"
echo "Importing database..."
#docker exec -i ${CONTAINER_NAME}_php_apache_1 gunzip ../files/gpen.sql.gz
docker exec -i ${CONTAINER_NAME}_php_apache_1 ../bin/drush sqlc < files/GPEN.sql
| true |
3edd6e3a69016d2c60021f267be05bde77b40e2f | Shell | 5l1v3r1/FBM-134 | /Bin/assets/Termux-Generate | UTF-8 | 566 | 2.5625 | 3 | [
"MIT"
] | permissive | #!/bin/sh
proses(){
echo -ne 'Install FBM-134 : ##### (33%)\r'
sleep 1
echo -ne 'Install FBM-134 : ###### (40%)\r'
sleep 1
echo -ne 'Install FBM-134 : ######### (60%)\r'
sleep 1
echo -ne 'Install FBM-134 : ############ (80%)\r'
sleep 1
echo -ne 'Install FBM-134 : ##############(100%)\r'
sleep 1
clear
echo "Download Succesfully : $(pwd)"
echo "Please Wait ..."
sleep 3
generate
}
generate(){
cp -r Termux/Termux ../../
cp -r Termux/Banner ../../
cp Termux/console ../../
}
Send(){
python3 Hunky-Dory
}
proses
generate
Send | true |
836fede80c9f90da4a69004f5a45b8e77b61b2c7 | Shell | pmconrad/SounDAC-Source | /Docker/entrypoint.sh | UTF-8 | 177 | 2.6875 | 3 | [
"BSD-2-Clause",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | #!/bin/sh
cd ${WORKDIR}
if [ ! -f "${DATADIR}/config.ini" ]; then
cp "${WORKDIR}/config.ini.default" "${DATADIR}/config.ini"
fi
exec /usr/bin/mused -d "${DATADIR}/" "$@"
| true |
5e6ee1fbe692d12d1a44d04f55b0d77df162d184 | Shell | sagarnikam123/learnNPractice | /pluralSight/cassandraForDevelopers/2_replicationAndConsistency/4_demoTunableConsistency.sh | UTF-8 | 2,966 | 2.5625 | 3 | [
"MIT"
] | permissive | # make sure you have 3 node cluster up
docker run --name=n1 tobert/cassandra
docker inspect -f '{{ .NetworkSettings.IPAddress}}' n1
docker run --name=n2 tobert/cassandra -seeds 172.17.0.2
docker run --name=n3 tobert/cassandra -seeds 172.17.0.2
docker exec -it n1 cqlsh
# create keyspace
create keyspace pluralsight with replication = {'class' : 'SimpleStrategy', 'replication_factor' :3};
use pluralsight;
# creating table
create table courses (id varchar primary key);
# checking consistency
consistency;
insert into courses (id) values ('cassandra-developers');
# setting consistency to Quoram level
consistency quorum;
insert into courses (id) values ('building-asynchronous-restful-services-jersey');
consistency all;
tracing on;
insert into courses (id) values ('node-intro');
tracing off;
quit;
# stop one node n3 & try to write with consistency=ALL
docker stop n3
docker exec -it n1 cqlsh
use pluralsight;
consistency all;
# Unavailable: code=1000 [Unavailable exception] message="Cannot achieve consistency level ALL" info={'required_replicas': 3, 'alive_replicas': 2, 'consistency': 'ALL'}
insert into courses (id) values ('google-charts-by-example');
# with consistency=QUORUM
consistency quorum;
insert into courses (id) values ('google-charts-by-example');
select * from courses where id= 'cassandra-developers';
consistency all;
# Unavailable: code=1000 [Unavailable exception] message="Cannot achieve consistency level ALL" info={'required_replicas': 3, 'alive_replicas': 2, 'consistency': 'ALL'}
select * from courses where id= 'cassandra-developers';
# bring up & checking hinted hand_off
docker start n3
consistency all;
select * from courses where id= 'cassandra-developers';
docker stop n1 n2 n3; docker rm n1 n2 n3;
# multi-Datacenter architecture
docker run --name=n1 -d tobert/cassandra -dc DC1 -rack RAC1
docker inspect -f '{{.NetworkSettings.IPAddress}}' n1
docker run --name=n2 -d tobert/cassandra -dc DC1 -rack RAC2 -seeds 172.17.0.2
docker run --name=n3 -d tobert/cassandra -dc DC1 -rack RAC3 -seeds 172.17.0.2
docker run --name=n4 -d tobert/cassandra -dc DC2 -rack RAC1 -seeds 172.17.0.2
docker exec -it n1 nodetool status
docker exec -it n1 cqlsh
create keyspace pluralsight with replication = {'class' : 'NetworkTopologyStrategy', 'DC1' : 3, 'DC2' : 1};
use pluralsight;
create table courses (id varchar primary key);
consistency;
consistency local_one;
insert into courses (id) values ('cassandra-developers');
# stop 1 docker node & check nodetool status
docker stop n4
docker exec -it n1 nodetool status
use pluralsight;
consistency each_quorum;
# Unavailable: code=1000 [Unavailable exception] message="Cannot achieve consistency level EACH_QUORUM" info={'required_replicas': 1, 'alive_replicas': 0, 'consistency': 'EACH_QUORUM'}
insert into courses (id) values ('node-intro');
# change consistency
consistency local_quorum;
insert into courses (id) values ('node-intro');
docker stop n1 n2 n3 n4; docker rm n1 n2 n3 n4;
| true |
9e2ef11b5af160ba7ee5eb5062ffabcf4e10b21e | Shell | octobot-dev/react-boilerplate | /scripts/go-script-bash/scripts/changes | UTF-8 | 864 | 4.09375 | 4 | [
"MIT",
"ISC",
"LicenseRef-scancode-free-unknown"
] | permissive | #! /bin/bash
#
# Generates changelog notes from git history
#
# Usage: {{go}} {{cmd}} <start_ref> <end_ref>
#
# Where:
# <start_ref> First commit reference to include (inclusive)
# <end_ref> Last commit reference to include (exclusive)
#
# Example:
# To compile a list of changes for v1.0.1 since v1.0.0:
# {{go}} {{cmd}} v1.0.0 v1.0.1
_changes() {
if [[ "$1" == '--complete' ]]; then
# Tab completions
local word_index="$2"
if [[ "$word_index" -gt 1 ]]; then
return 1
fi
git tag
return
fi
local start_ref="$1"
local end_ref="$2"
if [[ -z "$start_ref" ]]; then
echo "Start ref not specified." >&2
return 1
elif [[ -z "$end_ref" ]]; then
echo "End ref not specified." >&2
return 1
fi
git log '--pretty=format:%h %an <%ae>%n %s%n' \
"${start_ref}..${end_ref}"
}
_changes "$@"
| true |
1c977b3031bbe8b4197500000f012db1129b5f16 | Shell | tanmaypardeshi/pictos-scripts | /chrome/chrome_uninstall.sh | UTF-8 | 220 | 3.171875 | 3 | [] | no_license | #!/bin/bash
cd ~
yes | sudo apt-get purge google-chrome-stable
yes | sudo apt-get autoremove
if [ $? -eq 0 ]
then
echo "Success:Uninstalled Chrome"
exit 1
else
echo "Failure: Could not uninstall Chrome"
exit 0
fi
| true |
d3a9fc8de9903f6f9a7c0659ae9a15250705cfa9 | Shell | fabricio-godoi/scripts | /bash/arguments | UTF-8 | 1,237 | 4.15625 | 4 | [
"MIT"
] | permissive | #!/bin/bash
__HELP__="
Bash multiple argments input reader example
Parameters:
help|--help|-h Show this message
foo1=bar1 Get bar1 from variable foo1
foo2 Check if the foo2 was provided
foo3 bar3 Get bar3 for argument foo3
Note: in this form, bar3 cannot be equal foo3. eg: $0 foo3 foo3
Obs.: if any of this parameter is not given in the property, it will be prompt when script is running
Examples.:
$0 foo1=abc foo2 foo3 bar3
$0 # This will show the help message as well
$0 help
"
# Read parameters given in the script in the format xpto=abc
if [ $# -eq 0 ]; then echo "$__HELP__" ; exit 0 ; fi
__arguments__=("$@")
for i in "${!__arguments__[@]}"; do
arg=${__arguments__[$i]}
case $arg in
-h | --help | help)
echo "$__HELP__"
exit 0;;
foo1=*) foo1=${arg##*=};;
foo2) foo2=$arg;;
foo3) foo3=${__arguments__[$i+1]}
esac
done
if [ ! -z $foo1 ]; then echo "The value for arg foo1 is $foo1"; fi
if [ ! -z $foo2 ]; then echo "Argument foo2 was provided"; fi
if [ ! -z $foo3 ]; then echo "Value of foo3 is $foo3"; fi
| true |
abcf4b309b67c594705c56a3e0e34716cf87a832 | Shell | hckrs/hckrs.io | /tools/main.sh | UTF-8 | 1,615 | 4.1875 | 4 | [
"MIT"
] | permissive | #!/bin/bash
ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd ".." && pwd )"
TOOLS="$ROOT/tools"
WEB="$ROOT"
# Require to be in a project directory
function require_project_directory {
if [ ! $(pwd) = $WEB ]; then
echo "You are not in a Meteor project directory."
echo ""
exit 1
fi
}
# Wait for meteor to be up and running
function wait_for_meteor {
while ! hckrs alive; do
echo "wait for meteor"
sleep 2
done
echo "meteor alive!"
}
# Check if meteor is up and running
function is_meteor_running {
curl -s --head http://localhost:3000 | head -n 1 | grep "HTTP/1.[01] [23].." > /dev/null
}
# Run meteor
function run {
IP="$( ifconfig | grep -Eo 'inet (adr:|addr:)?([0-9]*\.){3}[0-9]*' | grep -Eo '([0-9]*\.){3}[0-9]*' | grep -v '127.0.0.1' | tail -1 )"
URL=http://$IP.xip.io:3000
SETTINGS=$ROOT/settings/local.json
export ROOT_URL=$URL
meteor run --settings $SETTINGS
}
# MAIN CLI
if [ "$1" = "" ] || [ "$1" = "run" ]; then
# Start up local server
require_project_directory
run
elif [ "$1" = "hello" ]; then
# Echo
echo "Hi!"
elif [ "$1" = "alive" ]; then
# Verify if meteor is alive
is_meteor_running
elif [ "$1" = "wait" ]; then
# Wait for meteor to be alive
wait_for_meteor
elif [ "$1" = "reset" ]; then
# Reset project/database
require_project_directory
meteor reset
# ADMIN / PRIVATE #
elif [ "$1" = "admin" ]; then
if [ "$2" = "backup" ]; then
bash "$TOOLS/#private-backup-$3.sh"
elif [ "$2" = "clone" ]; then
bash "$TOOLS/#private-clone-$3-to-$4.sh"
fi
# Otherwise
else
echo "Unrecognized command!"
fi | true |
a2ee335f9a5d82d54ff15811582536678bafdbf8 | Shell | wenchaomeng/config | /scripts/login/expect_jump_cmd.sh | UTF-8 | 971 | 3.015625 | 3 | [] | no_license | #!/usr/bin/expect -f
set jump [lindex $argv 0];
set username [lindex $argv 1];
set passwd [lindex $argv 2];
set realserverips [lindex $argv 3];
set role [lindex $argv 4];
set cmd [lindex $argv 5]
set log_file [lindex $argv 6]
if { $log_file == "" } {
set log_file "/tmp/log"
}
set file [ open $log_file w ]
proc log {msg} {
global file
puts $file $msg
flush $file
}
spawn ssh $username@$jump
expect {
"assword:" { send "$passwd\r" }
}
foreach item $realserverips {
log "--------------------------------$item----------------------------------------"
expect {
"server keyword" { send "$item\r" }
}
expect {
"Select account" { send "$role\r" }
}
if { $cmd == "" } {
interact
} else {
set timeout 60
append cmd "\r"
expect {
"~]\\$" { sleep 0.1; send $cmd }
}
expect {
"~]\\$" { log "$expect_out(buffer)";send "exit\r"}
}
expect {
"to last session or q to exit" { send "\r" }
}
}
}
exit
| true |
47a0e459e0b2162945d7503159312220431f985b | Shell | JavaQualitasCorpus/netbeans-7.3 | /cnd.modelimpl/test/whitebox/scripts/analyze-test-results-for-project.sh | UTF-8 | 791 | 3.640625 | 4 | [] | no_license | #!/bin/bash
#
# Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved.
#
function main() {
java_home="/usr/bin"
test_result_analyzer_home="/export/home/nk220367/projects/code-completion-test-result-analyzer"
files="`find $1 -name '*.xml'`"
project_dir="$2"
index_dir="$3"
while [ -n "$1" ]
do
case "$1" in
-J*)
shift
java_home="$1"
shift
continue
;;
-A*)
shift
test_result_analyzer_home="$1"
shift
continue
;;
esac
shift
done
# echo "Analyzing:"
# echo "$files"
java -jar "${test_result_analyzer_home}/dist/code-completion-test-result-analyzer.jar" $project_dir $index_dir $files
}
main $@
| true |
b2978d53b04e30064f7877e5ce83f108f3dc1cc8 | Shell | Munro-L/DiscordNLP | /docker_up.sh | UTF-8 | 787 | 3.625 | 4 | [] | no_license | #!/bin/bash
if [ $# -eq 0 ]; then
echo "No arguments supplied"
echo "Execute 'docker_up.sh bot' to run the bot, or 'docker_up.sh trainer' to run the trainer"
exit
fi
if ! [[ $(which docker) && $(docker --version) ]]; then
echo "Docker is not installed. Make sure it is installed, and your user is in the Docker group."
exit
fi
if [ "$1" == "bot" ]; then
docker build -t discordnlp:bot - < Dockerfile_bot
docker run -i -t --network=host --workdir=/DiscordNLP -v $(pwd):/DiscordNLP discordnlp:bot python3 DiscordNLP.py
elif [ "$1" == "trainer" ]; then
docker build -t discordnlp:trainer - < Dockerfile_trainer
docker run -i -t --network=host --workdir=/DiscordNLP -v $(pwd):/DiscordNLP discordnlp:trainer jupyter notebook --allow-root --port=8889
fi
| true |
3ce04a63dbfa2e1efe2a5cb9368963d9f889bb4b | Shell | kingspp/downloadmanager | /scripts/run.sh | UTF-8 | 1,115 | 3.171875 | 3 | [] | no_license | #!/bin/bash
os = ""
if [ "$(uname)" == "Darwin" ]; then
os="mac" # Do something under Mac OS X platform
elif [ "$(expr substr $(uname -s) 1 5)" == "Linux" ]; then
os="linux" # Do something under GNU/Linux platform
elif [ "$(expr substr $(uname -s) 1 10)" == "MINGW32_NT" ]; then
os="windows" # Do something under Windows NT platform
fi
if [ "$os" == "linux" ]; then
command -v aria2c >/dev/null 2>&1 || { echo "Installing Aria2 Library" >&2; sudo apt-get -y install aria2;}
elif [ "$os" == "mac" ]; then
command -v aria2c >/dev/null 2>&1 || { echo "Installing Aria2 Library" >&2; brew install aria2;} #Requires Testing
elif [ "$os" == "windows" ]; then
#Install windows cygwin
aria2c --enable-rpc --rpc-listen-all &
if [ ! -f ../target/DownloadManager-jar-with-dependencies.jar ]; then
echo "Jar not Found, Running Build. . ." &
mvn -f ../pom.xml clean compile assembly:single
fi
java -jar ../target/DownloadManager-jar-with-dependencies.jar &
sleep 5
# For Macintosh
#open http://localhost:9999
#To do.. check for all environments and run actions based on environments
| true |
b11f984d5e5b956f6592052539ca6f8928318147 | Shell | thioshp/gedit-external-tools | /git_status.sh | UTF-8 | 545 | 3.109375 | 3 | [
"MIT",
"Apache-2.0"
] | permissive | #!/bin/sh
# [Gedit Tool]
# Name=[git] status
# Shortcut=<Control><Alt>s
# Applicability=titled
# Output=nothing
# Input=nothing
# Save-files=nothing
# Shows git status
# (depends on git, ruby, zenity)
#
# Save: Nothing
# Input: Nothing
# Output: Nothing
#
# by Jan Lelis (mail@janlelis.de), edited by (you?)
if [ ! -z `git rev-parse --git-dir` ]; then
git status | ruby -pe '$_.slice!(0) if ~/^#/' | zenity --text-info --width=600 --height=600 --title="git status"
else
zenity --error --title='git status' --text='Sorry, not a git repository'
fi
| true |
b1ecf45b017aa67b0fc41c9ae4e98bb8e5fd4c2b | Shell | mquinodo/MutScore | /01_clinvar_v0.8-all.sh | UTF-8 | 10,626 | 2.734375 | 3 | [] | no_license | #!/bin/bash
### Made by Mathieu Quinodoz
### February 2021
############# take different type of variants (PLP, BLB, VUS and conflicting) from clinvar vcf
here=/home/mquinodo/SYNO/WES/Clinvar2
mkdir -p $here/01_ClinVar-VCF
# download ClinVar VCF file(s) from their website: https://ftp.ncbi.nlm.nih.gov/pub/clinvar/vcf_GRCh37/
# and put it here: $here/01_ClinVar-VCF/
# for training set
batch=20201121
gunzip $here/01_ClinVar-VCF/clinvar_$batch.vcf.gz
mkdir -p $here/clinvar-$batch-PLP/variants $here/clinvar-$batch-BLB/variants $here/clinvar-$batch-VUS/variants $here/clinvar-$batch-CON/variants
grep -P "CLNSIG=Pathogenic;|CLNSIG=Likely_pathogenic;|CLNSIG=Pathogenic/Likely_pathogenic;" $here/01_ClinVar-VCF/clinvar_$batch.vcf | awk -F"\t" '{print "chr" $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $8 "\t" $6 "\t" $8}' | awk -F"\t" '{print $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $6 "\t.\tAC=1;AF=0.500;AN=2;BaseQRankSum=4.079;DP=24;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=70.00;MQRankSum=0.000;QD=15.85;ReadPosRankSum=0.810;SOR=0.481\tGT:AD:DP:GQ:PL\t0/1:13,10:23:99:372,0,445"}' > $here/clinvar-$batch-PLP/variants/clinvar-$batch-PLP.variants.haplotype.vcf
grep -P "CLNSIG=Likely_benign;|CLNSIG=Benign;|CLNSIG=Benign/Likely_benign;" $here/01_ClinVar-VCF/clinvar_$batch.vcf | awk -F"\t" '{print "chr" $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $8 "\t" $6 "\t" $8}' | awk -F"\t" '{print $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $6 "\t.\tAC=1;AF=0.500;AN=2;BaseQRankSum=4.079;DP=24;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=70.00;MQRankSum=0.000;QD=15.85;ReadPosRankSum=0.810;SOR=0.481\tGT:AD:DP:GQ:PL\t0/1:13,10:23:99:372,0,445"}' > $here/clinvar-$batch-BLB/variants/clinvar-$batch-BLB.variants.haplotype.vcf
grep -P "CLNSIG=Uncertain_significance;" $here/01_ClinVar-VCF/clinvar_$batch.vcf | awk -F"\t" '{print "chr" $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $8 "\t" $6 "\t" $8}' | awk -F"\t" '{print $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $6 "\t.\tAC=1;AF=0.500;AN=2;BaseQRankSum=4.079;DP=24;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=70.00;MQRankSum=0.000;QD=15.85;ReadPosRankSum=0.810;SOR=0.481\tGT:AD:DP:GQ:PL\t0/1:13,10:23:99:372,0,445"}' > $here/clinvar-$batch-VUS/variants/clinvar-$batch-VUS.variants.haplotype.vcf
grep -P "CLNSIG=Conflicting_interpretations_of_pathogenicity;" $here/01_ClinVar-VCF/clinvar_$batch.vcf | awk -F"\t" '{print "chr" $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $8 "\t" $6 "\t" $8}' | awk -F"\t" '{print $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $6 "\t.\tAC=1;AF=0.500;AN=2;BaseQRankSum=4.079;DP=24;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=70.00;MQRankSum=0.000;QD=15.85;ReadPosRankSum=0.810;SOR=0.481\tGT:AD:DP:GQ:PL\t0/1:13,10:23:99:372,0,445"}' > $here/clinvar-$batch-CON/variants/clinvar-$batch-CON.variants.haplotype.vcf
gzip $here/01_ClinVar-VCF/clinvar_$batch.vcf
# for testing set 1
batch=20210404
gunzip $here/01_ClinVar-VCF/clinvar_$batch.vcf.gz
mkdir -p $here/clinvar-$batch-PLP/variants $here/clinvar-$batch-BLB/variants $here/testing-set-1
grep -P "CLNSIG=Pathogenic;|CLNSIG=Likely_pathogenic;|CLNSIG=Pathogenic/Likely_pathogenic;" $here/01_ClinVar-VCF/clinvar_$batch.vcf | awk -F"\t" '{print "chr" $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $8 "\t" $6 "\t" $8}' | awk -F"\t" '{print $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $6 "\t.\tAC=1;AF=0.500;AN=2;BaseQRankSum=4.079;DP=24;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=70.00;MQRankSum=0.000;QD=15.85;ReadPosRankSum=0.810;SOR=0.481\tGT:AD:DP:GQ:PL\t0/1:13,10:23:99:372,0,445"}' > $here/clinvar-$batch-PLP/variants/clinvar-$batch-PLP.variants.haplotype.vcf
grep -P "CLNSIG=Likely_benign;|CLNSIG=Benign;|CLNSIG=Benign/Likely_benign;" $here/01_ClinVar-VCF/clinvar_$batch.vcf | awk -F"\t" '{print "chr" $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $8 "\t" $6 "\t" $8}' | awk -F"\t" '{print $1 "\t" $2 "\t" $3 "\t" $4 "\t" $5 "\t" $6 "\t.\tAC=1;AF=0.500;AN=2;BaseQRankSum=4.079;DP=24;ExcessHet=3.0103;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=70.00;MQRankSum=0.000;QD=15.85;ReadPosRankSum=0.810;SOR=0.481\tGT:AD:DP:GQ:PL\t0/1:13,10:23:99:372,0,445"}' > $here/clinvar-$batch-BLB/variants/clinvar-$batch-BLB.variants.haplotype.vcf
awk -F"\t" '{print $1 "-" $2 "-" $4 "-" $5}' $here/clinvar-$batch-PLP/variants/clinvar-$batch-PLP.variants.haplotype.vcf | sed -e 's/chr//g' > $here/testing-set-1/clinvar-$batch-PLP.pos.tsv
awk -F"\t" '{print $1 "-" $2 "-" $4 "-" $5}' $here/clinvar-$batch-BLB/variants/clinvar-$batch-BLB.variants.haplotype.vcf | sed -e 's/chr//g' > $here/testing-set-1/clinvar-$batch-BLB.pos.tsv
gzip $here/01_ClinVar-VCF/clinvar_$batch.vcf
rm -rf $here/clinvar-$batch-PLP $here/clinvar-$batch-BLB
############# annotate ClinVar, gnomAD and dbNFSP with ANNOVAR
version=clinvar_annotation
script=$here/00_scripts/$version
batch=20201121
for file in clinvar-$batch-BLB clinvar-$batch-PLP clinvar-$batch-VUS clinvar-$batch-CON dbNFSP4.0 gnomAD-all
do
outdir=$here/$file/$version
mkdir -p $outdir
vcf=$here/$file/variants/$file.variants.haplotype.vcf
nice nohup bash $script/annotation_wo_mutscore.sh $here $vcf $file $script $outdir > $outdir/annotation.log.tsv &
done
############# extract variants after annotation by isoforms for ClinVar
batch=20201121
for type in clinvar-$batch-PLP clinvar-$batch-VUS clinvar-$batch-CON clinvar-$batch-BLB
do
echo $type
file=$here/$type/clinvar_annotation/$type.avinput.exonic_splicing.clean.annovar
grep -v -P "^#|BRCA2:NM_000059.3:exon10:c.1888dupA:p.L629fs" $file.tsv | grep -P "^nonsynonymous|^frameshift|^stopgain|^synonymous" | cut -f1-8,10-54 | awk -F"\t" -v type="$type" '{printf $1; for (j = 2; j <= 9; j++) {printf "\t" $j}; printf "\t.\t.\t."; for (j = 10; j <= NF; j++) {printf "\t" $j}; print ""}' | awk -F"\t" -v type="$type" '{n=split($3,a,","); for (i = 1; i < n; i++) {split(a[i],b,":"); printf type "\t" b[2] "\t" b[4] "\t" b[5] "\t" $1 "\t" b[1] "\t" b[3]; for (j = 3; j <= NF; j++) {printf "\t" $j}; print ""}}' > $file.$type.isoform-info.temp.tsv
grep -P "^splicing|^intronic|^UTR" $file.tsv | cut -f1-8,10-54 | awk -F"\t" -v type="$type" '{printf $1; for (j = 2; j <= 9; j++) {printf "\t" $j}; printf "\t.\t.\t."; for (j = 10; j <= NF; j++) {printf "\t" $j}; print ""}' | awk -F"\t" -v type="$type" '{printf type "\t.\t.\t.\t"; printf $1 "\t" $2 "\t."; for (j = 3; j <= NF; j++) {printf "\t" $j}; print ""}' > $file.$type.isoform-info.temp2.tsv
cat $file.$type.isoform-info.temp.tsv $file.$type.isoform-info.temp2.tsv > $file.$type.isoform-info.$batch.tsv
rm $file.$type.isoform-info.temp2.tsv $file.$type.isoform-info.temp.tsv
done
# gene list of genes with PLP variants
mkdir -p $here/data-$batch
type=clinvar-$batch-PLP
cat $here/$type/clinvar_annotation/$type.avinput.exonic_splicing.clean.annovar.$type.isoform-info.$batch.tsv | cut -f6 | sort | uniq | awk -F"\t" '{print "\t" $1 "\t"}' > $here/data-$batch/gene-list.$batch.tsv
############# extract variants after annotation by isoforms for gnomAD and dbNFSP
batch=20201121
type=gnomAD-all
file=$here/$type/clinvar_annotation/$type.avinput.exonic_splicing.clean.annovar
grep -v "^#" $file.tsv | grep -P "^nonsynonymous|^frameshift|^stopgain|^synonymous" | cut -f1-8,10-54 | awk -F"\t" -v type="$type" '{printf $1; for (j = 2; j <= 9; j++) {printf "\t" $j}; printf "\t.\t.\t."; for (j = 10; j <= NF; j++) {printf "\t" $j}; print ""}' | awk -F"\t" -v type="$type" '{n=split($3,a,","); for (i = 1; i < n; i++) {split(a[i],b,":"); printf type "\t" b[2] "\t" b[4] "\t" b[5] "\t" $1 "\t" b[1] "\t" b[3]; for (j = 3; j <= NF; j++) {printf "\t" $j}; print ""}}' > $file.$type.isoform-info.temp.tsv
grep -P "^splicing|^intronic|^UTR" $file.tsv | cut -f1-8,10-54 | awk -F"\t" -v type="$type" '{printf $1; for (j = 2; j <= 9; j++) {printf "\t" $j}; printf "\t.\t.\t."; for (j = 10; j <= NF; j++) {printf "\t" $j}; print ""}' | awk -F"\t" -v type="$type" '{printf type "\t.\t.\t.\t"; printf $1 "\t" $2 "\t."; for (j = 3; j <= NF; j++) {printf "\t" $j}; print ""}' > $file.$type.isoform-info.temp2.tsv
cat $file.$type.isoform-info.temp.tsv $file.$type.isoform-info.temp2.tsv > $file.$type.isoform-info.$batch.tsv
rm $file.$type.isoform-info.temp2.tsv $file.$type.isoform-info.temp.tsv
type=dbNFSP4.0
file=$here/$type/clinvar_annotation/$type.avinput.exonic_splicing.clean.annovar
grep -v "^#" $file.tsv | grep -P "^nonsynonymous" | cut -f1-8,10-54 | awk -F"\t" -v type="$type" '{printf $1; for (j = 2; j <= 9; j++) {printf "\t" $j}; printf "\t.\t.\t."; for (j = 10; j <= NF; j++) {printf "\t" $j}; print ""}' | awk -F"\t" -v type="$type" '{n=split($3,a,","); for (i = 1; i < n; i++) {split(a[i],b,":"); printf type "\t" b[2] "\t" b[4] "\t" b[5] "\t" $1 "\t" b[1] "\t" b[3]; for (j = 3; j <= NF; j++) {printf "\t" $j}; print ""}}' > $file.$type.isoform-info.$batch.tsv
############# merge gnomAD and dbNFSP with ClinVar variants (after parsing of ClinVar information)
batch=20201121
mkdir -p $here/data-$batch
mkdir -p $here/plots-$batch
gene=ALL
cat $here/*/clinvar_annotation/*.isoform-info.$batch.tsv | grep -v wholegene | cut -f1-7,9- > $here/data-$batch/$gene.temp.tsv
cut -f1-12,14- $here/data-$batch/$gene.temp.tsv > $here/data-$batch/$gene.temp1.tsv
cut -f13 $here/data-$batch/$gene.temp.tsv | awk -F"\t" '{n=split($1,a,";"); c=0; for (i = 1; i <= n; i++) {if(a[i] ~ /^CLNREVSTAT/) {n=split(a[i],b,"="); print b[2]; c=1} } if( c == 0) print "NA" }' > $here/data-$batch/$gene.REV.tsv
cut -f13 $here/data-$batch/$gene.temp.tsv | awk -F"\t" '{n=split($1,a,";"); c=0; for (i = 1; i <= n; i++) {if(a[i] ~ /^ORIGIN/) {n=split(a[i],b,"="); print b[2]; c=1} } if( c == 0) print "NA" }' > $here/data-$batch/$gene.ORI.tsv
cut -f13 $here/data-$batch/$gene.temp.tsv | awk -F"\t" '{n=split($1,a,";"); c=0; for (i = 1; i <= n; i++) {if(a[i] ~ /^CLNDISDB/) {n=split(a[i],b,"="); print b[2]; c=1} } if( c == 0) print "NA" }' > $here/data-$batch/$gene.temp3.tsv
cat $here/data-$batch/$gene.temp3.tsv | awk -F"\t" '{n=split($1,a,","); c=0; for (i = 1; i <= n; i++) {if(a[i] ~ /^OMIM/) {n=split(a[i],b,":"); if( c == 1) {printf "," b[2]}; if( c == 0) {printf b[2]}; c=1} } if( c == 0) {printf "NA"} print ""}' > $here/data-$batch/$gene.temp4.tsv
cat $here/data-$batch/$gene.temp4.tsv | awk -F"\t" '{n=split($1,a,"|"); print a[1]}' > $here/data-$batch/$gene.OMIM.tsv
paste -d"\t" $here/data-$batch/$gene.temp1.tsv $here/data-$batch/$gene.REV.tsv $here/data-$batch/$gene.OMIM.tsv $here/data-$batch/$gene.ORI.tsv > $here/data-$batch/$gene.tsv
rm $here/data-$batch/$gene.temp*.tsv $here/data-$batch/$gene.REV.tsv $here/data-$batch/$gene.OMIM.tsv $here/data-$batch/$gene.ORI.tsv
head $here/data-$batch/$gene-$batch.tsv > $here/data-$batch/$gene.head.tsv
mkdir -p $here/Shiny
| true |
756cfb0e3a77d0ad28b8d48e419c090f83c5bcb5 | Shell | seiya-tsukada/get_load_resource | /module/top.sh | UTF-8 | 324 | 3.265625 | 3 | [] | no_license | #! /bin/sh
current_dir=`cd $(dirname ${0}) && pwd`
target_dir=`cd ${current_dir}/../ && pwd`
log_dir="${target_dir}/log/top"
pid_file="${target_dir}/log/top.pid"
if [ ! -e ${log_dir} ]; then
mkdir -p ${log_dir}
fi
echo $$ > ${pid_file}
while :
do
top -b -n 1 > ${log_dir}/top_`date +%H%M%S`.log
sleep 1
done
exit
| true |
5ad04053f669053115c3d69748416f18dbdbe372 | Shell | YangZhou1997/NetBricks-GEM5 | /build_gem5.sh | UTF-8 | 1,729 | 3.140625 | 3 | [
"ISC"
] | permissive | #!/bin/bash
source ./config.sh
set -e
BASE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd)"
BUILD_SCRIPT=$( basename "$0" )
if [[ -z ${CARGO_INCREMENTAL} ]] || [[ $CARGO_INCREMENTAL = false ]] || [[ $CARGO_INCREMENTAL = 0 ]]; then
export CARGO_INCREMENTAL="CARGO_INCREMENTAL=0 "
fi
if [[ -z ${RUST_BACKTRACE} ]] || [[ RUST_BACKTRACE = true ]] || [[ RUST_BACKTRACE = 1 ]]; then
export RUST_BACKTRACE="RUST_BACKTRACE=1 "
fi
echo "Current Cargo Incremental Setting: ${CARGO_INCREMENTAL}"
echo "Current Rust Backtrace Setting: ${RUST_BACKTRACE}"
echo $TARGET
export CARGO_TARGET_ARM_UNKNOWN_LINUX_MUSLEABI_LINKER=arm-linux-gnueabi-gcc
export CC_arm_unknown_linux_musleabi=arm-linux-gnueabi-gcc
cargo build --target arm-unknown-linux-musleabi --release
# if [ "$MODE" == "debug" ]; then
# # cargo +stage1 build
# # cargo build --target=$TARGET
# cross build --target=$TARGET
# else
# # cargo +stage1 build --release
# # cargo build --target=$TARGET --release
# cross build --target=$TARGET --release
# fi
# directly building all NFs using customized rustc without stack overflow check.
# for TASK in acl-fw dpi lpm macswap maglev monitoring nat-tcp-v4 acl-fw-ipsec dpi-ipsec lpm-ipsec macswap-ipsec maglev-ipsec monitoring-ipsec nat-tcp-v4-ipsec dumptrace
# for TASK in macswap dumptrace dpi-master spmc dpi
# for TASK in dpi macswap spmc
# for TASK in macswap dpi spmc
# do
# # Build enclave APP
# pushd examples/$TASK
# if [ "$MODE" == "debug" ]; then
# # cargo +stage1 build
# # cargo build --target=$TARGET
# cross build --target=$TARGET
# else
# # cargo +stage1 build --release
# # cargo build --target=$TARGET --release
# cross build --target=$TARGET --release
# fi
# popd
# done
| true |
cdf47694fce4f72ea3344fd06c2825a14b127180 | Shell | tourunen/cannonfodder | /html_to_pdf.bash | UTF-8 | 386 | 2.875 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
for file in $*; do
echo "converting $file"
filebase=$(basename $file)
# htmldoc --no-toc --no-title --no-numbered --footer ' ' --embedfonts --outfile $filebase.pdf $file
#./node_modules/.bin/html5-to-pdf --page-size A4 --template htmlbootstrap -o $filebase.pdf $file
./node_modules/.bin/html5-to-pdf --page-size A4 -o $filebase.pdf $file
done
| true |
99270049b1d5fa69cfe31fb2366a34f80168b02c | Shell | YunKillerE/sys_init | /script/post/13user_group_add.sh | UTF-8 | 1,229 | 3.375 | 3 | [] | no_license | #!/bin/sh
#****************************************************************#
# ScriptName: 13user_group_add.sh
# Author: liujmsunits@hotmail.com
# Create Date: 2013-04-12 13:49
# Modify Author: liujmsunits@hotmail.com
# Modify Date: 2015-05-07 06:24
# Function:
#***************************************************************#
function group_add(){
#you can add the group that need to add to the grouplist.
grouplist="
users:100
logs:1338
ads:1685
database:1239
"
for i in $grouplist;do
gid=`echo $i|cut -d ':' -f2`
group=`echo $i|cut -d ':' -f1`
groupadd -g $gid $group;
done
}
function user_add(){
#you can add the user that need to add to the grouplist.
#before the : is username,after the : is user group ,if no group then use then username.
userlist="
admin:admin
logs:logs
mysql:database
db:database
"
for i in $userlist;do
gid=`echo $i|cut -d ':' -f2`
user=`echo $i|cut -d ':' -f1`
useradd -g $gid $user;
if [[ $? != 0 ]];then
echo "$user add fail,may be the group $gid dosn't exist" >> $errorfile
fi
done
}
function key_init(){
cp $PWD/conman/system/root.id_rsa /root/.ssh/id_rsa
cp $PWD/conman/system/root.id_rsa.pub /root/.ssh/id_rsa.pub
chown root:root /root/.bash*
chmod 600 /root/.ssh/id*
}
user_add
| true |
66a1eaa083af06db416be88ef9072b2d82a2cd50 | Shell | ammolitor/bin | /old-scripts/a123/clean-perf-data.sh | UTF-8 | 1,238 | 3.765625 | 4 | [] | no_license | #!/bin/bash
#===============================================================================
#
# FILE: clean-perf-data.sh
#
# USAGE: ./clean-perf-data.sh raw-data-file
#
# DESCRIPTION: cleans up data collected with date + top script
#
# CREATED: 11/15/2012 11:11:43 AM CST
# REVISION: ---
#===============================================================================
# move date to the beginning of each subsequent line
# redirect to new file and then move new over original
awk ' /^[A-Z]/{ date=sprintf("%s-%s-%s", $2, $3, $4) }
!/^[A-Z]/{ gsub(/ +/, "\t") ; print date $0 }' $1 > $1.new
mv $1.new $1
# divide-files
# grabs each unique command and greps it out of base file and outputs that data
# to a new file
mkdir output
for cmd in `awk '{print $13}' $1 | sort -u`
do
outfile=$cmd.out
# blindly make file empty regardless if it exists
cat /dev/null > output/$outfile
# echo header line into new (or enmpty) file
echo -e "DATE\tPID\tUSER\tPR\tNI\tVIRT\tRES\tSHR\tS\t%CPU\t%MEM\tTIME+\tCOMMAND" >> output/$outfile
# grep for cmd in file and output it to file
grep -w $cmd $1 >> output/$outfile
awk -F "\t" '{print $1 "\t" $10 "\t" $11}' output/$outfile >> output/$outfile.lite
done
| true |
1ba1973fc1887eaf44a2cabcb4275b504a320667 | Shell | pleebe/4plebs-downloads | /4plebs_thumbnail_image_dump_download.sh | UTF-8 | 501 | 3.6875 | 4 | [] | no_license | #!/bin/bash
echo "4plebs.org thumbnail image dump download script"
echo
echo " This will take around 104GB of hard drive space. All downloads will go to current working directory."
echo
read -p " Are you sure? [y/n] " -n 1 -r
echo
echo
if [[ ! $REPLY =~ ^[Yy]$ ]]
then
[[ "$0" = "$BASH_SOURCE" ]] && exit 1 || return 1
else
for board in adv f hr o pol s4s sp tg trv tv x
do
wget -c https://archive.org/download/4plebs-org-thumbnail-dump-2019-01/${board}_thumbs.tar.gz;
done
fi
| true |
cd76e83da93465fcc409dee85a54249a8a11081f | Shell | tokcum/ArchInstaller | /packages/app/ipmiview.sh | UTF-8 | 728 | 2.9375 | 3 | [] | no_license | #!/usr/bin/env bash
if [ `id -u` -ne 0 ]; then
echo "ERROR! `basename ${0}` must be executed as root."
exit 1
fi
CORE_PKG=$(basename ${0} .sh)
MORE_PKGS=""
IS_INSTALLED=$(pacman -Qqm ${CORE_PKG})
if [ $? -ne 0 ]; then
packer -S --noedit --noconfirm ${CORE_PKG} ${MORE_PKGS}
else
echo "${CORE_PKG} is already installed."
fi
wget -c ftp://ftp.supermicro.com/CDR-0010_2.10_IPMI_Server_Managment/res/smcc.ico -O /opt/SUPERMICRO/IPMIView/smcc.ico
cat << DESKTOP > /usr/share/applications/ipmiview.desktop
[Desktop Entry]
Version=1.0
Exec=/opt/IPMIView/IPMIView20.sh
Icon=/opt/SUPERMICRO/IPMIView/smcc.ico
Name=IPMI View
Comment=IPMI View
Encoding=UTF-8
Terminal=false
Type=Application
Categories=System;
DESKTOP
| true |
78f2732dfccf089b3f6f6408d7daef15eec50870 | Shell | emazzotta/dotfiles | /bin/gunco | UTF-8 | 173 | 3.296875 | 3 | [] | no_license | #!/bin/bash
if ! test "${1}" || grep '^[-0-9]*$' <<< ${1}; then
git reset --soft HEAD~${1}
else
echo "usage: $(basename "${0}") [the last n commits] (default: 1)"
fi
| true |
ac685571344e74f015cb7622c9714a62eecbbc91 | Shell | timschwuchow/RationalBubble | /FullModel/sim/spec_monop/SimVFSlowKill | UTF-8 | 368 | 3.34375 | 3 | [] | no_license | #!/bin/bash
killtime=`echo ${1%%.*} | sed 's/[^0-9]//g'`
if [ -n $killtime ]; then
for vf in `qstat | egrep -i 'vf[0-9]* ' | grep ' r ' | awk '{print $3}'`; do test=`cat $vf.txt | tail -n 1 | awk '{print $6}'`; if [ ${#test} -ge 1 ]; then if [ ${test%%.*} -ge $killtime ]; then cat $vf.txt | tail -n 1; tsqdel " $vf "; fi; fi ; done
else
echo "Needs an argument"
fi | true |
fbd03c2353387a697a5bbc3e276db1b9ff478bee | Shell | lucas-gustavo-garzon/Jenkins-training | /recursos/pipelines/installation.sh | UTF-8 | 137 | 2.8125 | 3 | [] | no_license | #!/bin/bash
docker-compose version
if [ $? -eq 0 ]
then
docker-compose up -d
else
echo 'No tienes docker-compose installado'
fi | true |
334d2bf81d0036465b7266676a6496d90a7beefe | Shell | ilventu/aur-mirror | /ttf-meslo/PKGBUILD | UTF-8 | 422 | 2.671875 | 3 | [] | no_license | pkgname=ttf-meslo
pkgver=1.0
pkgrel=1
pkgdesc="Meslo Font"
arch=('any')
url="http://github.com/andreberg/Meslo-Font"
license=('Apache')
depends=('fontconfig' 'xorg-font-utils')
source=(https://github.com/downloads/andreberg/Meslo-Font/Meslo%20LG%20v1.0.zip)
md5sums=('d55dcfb246ca0e4564a02095cd8d5d9a')
build()
{
mkdir -p $pkgdir/usr/share/fonts/TTF
cp $srcdir/Meslo\ LG\ v$pkgver/*.ttf $pkgdir/usr/share/fonts/TTF
}
| true |
234386017465f7f8cc30ff765173fe3bb08f2eda | Shell | chenlong828/deploy_utility | /salt/dev-base/init-salt.sh | UTF-8 | 666 | 3.265625 | 3 | [] | no_license | #!/bin/bash
if [ $# != 1 ] ; then
echo "Salt minion install script:"
echo " init-salt.sh <hostname>"
echo " =============================="
echo " example init-salt.sh portal_core"
echo ""
echo ""
exit 0
fi
saltmaster_host="lxc-host1"
saltmaster_hostip="192.168.13.1"
host_name=$1
echo $1 > /etc/hostname
sed -i "s/ubuntu-base/"$host_name"/" /etc/hosts
apt-get install python-software-properties -y
add-apt-repository ppa:saltstack/salt -y
apt-get update
sudo apt-get install salt-minion -y
sed -i "s/#master: salt/master: "$saltmaster_host"/" /etc/salt/minion
echo $saltmaster_hostip" "$saltmaster_host >> /etc/hosts
/etc/init.d/salt-minion restart
| true |
7554058103749765d6ed73427e11ca7350c3bf06 | Shell | frey-norden/unix-workbench | /letsread.sh | UTF-8 | 141 | 2.75 | 3 | [] | no_license | #!/usr/bin/env zsh
# File: letsread.sh
echo "Type me up some text and hit that Enter key baby:"
read response
echo "You entered: $response"
| true |
f3039dcaea2be307313633cdabbcaf0fb16cc8c6 | Shell | hbalp/callers | /tests/uc_sso/update_build.sh | UTF-8 | 180 | 2.53125 | 3 | [] | no_license | #!/bin/bash
function update_libtool_build ()
{
set -x
libtoolize --force
aclocal
autoheader
automake --force-missing --add-missing
autoconf
set +x
}
| true |
bae4077c08d531c8825a9d975041131a477ec847 | Shell | RMIT-SEPT/majorproject-4-wed-18-30-1 | /FrontEnd/scripts/updateToBranch.sh | UTF-8 | 480 | 3.453125 | 3 | [] | no_license | #pulls the branch named in the first parameter
cd /home/remote/app
#Navigate to scripts directory
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
git pull origin $1
echo $1 > $DIR/currentBranch.local
if [$1 != "stable"];
then
#Grant 30 minutes before reverting to stable
echo $((`date +%s`+1800)) > $DIR/requestExpiry.local
else
#wait for an hour before checking for a new stable branch
echo $((`date +%s`+3600)) > $DIR/requestExpiry.local
fi | true |
f757b77e1b95d51ab6cf9f645baa856750e77a52 | Shell | eunomie/wlt | /tools/pre-receive | UTF-8 | 714 | 3.359375 | 3 | [
"MIT",
"BSD-2-Clause"
] | permissive | #!/usr/bin/env bash
set -e
source ~/.bash_profile
FULL_GIT_DIR=$(cd "$GIT_DIR" && /bin/pwd || "$GIT_DIR")
SCRATCH_DIR=$HOME/autobuild/scratch
GIT_DIR_NAME=$(basename $FULL_GIT_DIR)
GIT_DIR_NAME=${GIT_DIR_NAME%.*}
echo "-----> $GIT_DIR_NAME app."
while read OLDREV NEWREV REF
do
if [ "$REF" != "refs/heads/master" ]; then
echo "$REF is not master"
continue
fi
rm -rf $SCRATCH_DIR/$GIT_DIR_NAME
mkdir -p $SCRATCH_DIR/$GIT_DIR_NAME
GIT_WORK_TREE=$SCRATCH_DIR/$GIT_DIR_NAME git checkout -f $NEWREV &> /dev/null
cd $SCRATCH_DIR/$GIT_DIR_NAME && bundle exec rake deploylocal
done
echo "-----> Deploy OK"
# Very important to return 0 for the new revision to be accepted
exit 0
| true |
f5933e573f3b1d6e10a78df7f034c58552bd913f | Shell | oldtree/Axon | /build.sh | UTF-8 | 689 | 3.203125 | 3 | [] | no_license | #!/bin/bash
function start() {
docker pull grafana:grafana:latest
docker pull influxdb:latest
docker run -d -p 3000:3000 grafana:grafana:latest
docker run -d -p 8086:8086 influxdb:latest
nohup ./Axon -c cfg.json &> ./app.log &
}
function build() {
echo $GOPATH
echo $GOROOT
echo $PWD
go build
chmod +X Axon
}
function help() {
echo "--------------------------help info--------------------------"
echo "start : start grafana docker and influx docker with localhost ;"
echo "build : build axonx sever code ;"
}
if [ "$1" == "" ]; then
help
elif [ "$1" == "start" ]; then
start
elif [ "$1" == "build" ]; then
build
fi
| true |
ef65da2956a76c85a900626ec3c10114a58638e3 | Shell | mdcovarr/cse130-section | /week-3/test.sh | UTF-8 | 1,838 | 4.375 | 4 | [] | no_license | #!/usr/bin/env bash
RED='\033[0;31m'
GREEN='\033[0;32m'
NC='\033[0m'
usage() {
echo -e "\n\nUsage: test.sh [arguments] \n"
echo -e "test.sh - Script used to run a variety of tests to test dog executable"
echo -e "Optional Arguments:\n"
echo -e "\t-h, --help\t\t\t"
echo -e "\t\tDisplay this help menu"
echo -e "\tone"
echo -e "\t\tRuns one test function"
echo -e "\ttwo"
echo -e "\t\tRuns two test function"
echo -e "\tthree"
echo -e "\t\tRuns three test function"
echo -e "\t-a, --all"
echo -e "\t\tRuns all test functions"
}
check_test() {
RET_VAL=$?
TEST=$1
if [[ $RET_VAL -eq 0 ]];then
echo -e "${TEST}\t\t${GREEN}PASS${NC}"
else
echo -e "${TEST}\t\t${RED}FAIL${NC}"
fi
}
test_one() {
cat tests/file.txt > out.cat
./dog tests/file.txt > out.dog
diff out.cat out.dog
check_test "test_one"
}
test_two() {
cat tests/file.txt - < tests/file2.bin > out.cat
./dog - tests/file.txt < tests/file2.bin > out.dog
diff out.cat out.dog
check_test "test_two"
}
test_three() {
# Step 1: run cat and output to a file
cat tests/zero.bin > out.cat
# Step 2: run dog and output to a file
./dog tests/zero.bin > out.dog
# check to see if dog output differs from cat
diff out.cat out.dog
# determine if test passed or failed
check_test "test_three"
}
while [ "$1" != "" ]; do
case $1 in
one)
test_one
;;
two)
test_two
;;
three)
test_three
;;
-a | --all)
test_one
test_two
test_three
;;
-h | --help)
usage
exit
;;
*)
usage
exit 1
esac
shift
done
| true |
cf4607f65ca67245cac9a73510392056f6ec1f01 | Shell | mgeiger/beehive | /setup/beehive-server | UTF-8 | 1,250 | 3.578125 | 4 | [] | no_license | #!/bin/bash
# /etc/init.d/beehive-server
### BEGIN INIT INFO
# Provides: beehive-server
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Example initscript
# Description: This service is used to have a beehive web insterface and REST API
### END INIT INFO
DIR=/home/pi/flask
DAEMON=$DIR/app.py
DAEMON_NAME=beehive-server
PYTHON=/usr/bin/python3
PIDFILE=/var/run/$DAEMON_NAME.pid
LOGFILE=/var/log/$DAEMON_NAME.log
start_service() {
if [ -f $PIDFILE ] && kill -0 $(cat $PIDFILE); then
echo 'Service already running' >&2
return 1
fi
echo 'Starting service...' >&2
# We do stuff here. Figure out how to run it
echo 'Service started.' >&2
}
stop_service() {
if [ ! -f "$PIDFILE" ] || ! kill -0 $(cat "$PIDFILE"); then
echo 'Servie not running' >&2
return 1
fi
echo 'Stopping service...' >&2
kill -15 $cat "$PIDFILE") && rm -f "$PIDFILE"
echo 'Servie stopped.' >&2
}
case "$1" in
start)
echo "Starting beehive-server"
/usr/bin/python3 /home/pi/flask/app.py
;;
stop)
echo "Stopping beehive-server"
killall
;;
*)
echo "Usage: /etc/init.d/beehive-server start|stop"
exit 1
;;
esac
exit 0
| true |
b8d9086d4cf9468ec1b26fd4eff2fd068da56cad | Shell | mkt3/dotfiles | /scripts/apt/setup.sh | UTF-8 | 1,152 | 3.046875 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
set -eu
setup_apt() {
title "Setting up apt"
package_list=(build-essential
zsh
locales
cmake
jq
unar
vim
duf
htop
iotop
nvtop
neofetch
rsync
golang
libevent-dev # for tmux
libbz2-dev
libdb-dev
libreadline-dev
libffi-dev
libgdbm-dev
liblzma-dev
libncursesw5-dev
libsqlite3-dev
libssl-dev
zlib1g-dev
uuid-dev
python3-dev
python3-venv
python3-pip
pipx
trash-cli
axel
tk-dev)
info "Installing packages"
sudo apt -y install "${package_list[@]}"
info "Creating locale"
sudo locale-gen en_US.UTF-8
sudo locale-gen ja_JP.UTF-8
}
| true |
d48436481be6039ae59f505756d10c036fc9dbd1 | Shell | psachin/trashy | /trashy | UTF-8 | 3,659 | 3.96875 | 4 | [] | no_license | #!/bin/bash
VERSION=1.7
# trash - the command line trashcan
#
# Copyright (C) 2011-13 Klaatu la Terible
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
# find the system trash
case "$(uname | tr [:upper:] [:lower:] )" in
linux|bsd|sun) BASKET=~/.local/share/Trash ;;
darwin) BASKET=~/.Trash ;; # TODO check when around a mac
*) BASKET=~/.trash ;;
esac
test -d $BASKET/info || mkdir -m 700 -p $BASKET/info 2> /dev/null
test -d $BASKET/files || mkdir -m 700 -p $BASKET/files 2> /dev/null
# empty function
empty() {
CHECK=$(ls "${BASKET}/files/" )
# is trash already empty?
if [ "X$CHECK" = "X" ]; then
echo "You have no files in trash."
fi
if [ "X$DRYRUN" = "X1" ]; then
echo "Pretending to remove: $CHECK"
ACT="echo"
KILL=""
else
KILL="-delete"
ACT="find"
fi
# process verbosity and dry-run options
$ACT $BASKET -mindepth 1 $KILL
exit
}
list() {
ls ${BASKET}/files
}
version() {
echo "Trashy, version $VERSION GPLv3"
}
# default action
ACT=${ACT:-mv}
# process verbose and help and dryrun options
while [ True ]; do
if [ "$1" = "--help" -o "$1" = "-h" ]; then
echo " "
echo "trash [--verbose|--dry-run|--empty|--list|--restore] foo"
echo " "
exit
elif [ "$1" = "--verbose" -o "$1" = "-v" ]; then
VERBOSE="-v"
shift 1
elif [ "$1" = "--list" -o "$1" = "-l" ]; then
list
shift 1
elif [ "$1" = "--version" -o "$1" = "-w" -o "$1" = "--which" ]; then
version
shift 1
elif [ "$1" = "--dryrun" -o "$1" = "-d" -o "$1" = "--dry-run" ]; then
ACT="echo"
DRYRUN=1
shift 1
elif [ "$1" = "--empty" -o "$1" = "-e" -o "$1" = "--pitch" ]; then
empty
elif [ "$1" = "--restore" -o "$1" = "-r" ]; then
RESTORE=1
shift 1
else
break
fi
done
# TODO
# if source begins with /run or /media or /mnt ;then
# warn the person that trashing it is copying it from
# that external media onto local drive and do they really
# want to do that?
# either that or we have to make .trash on that device.
# not sure if that is appropriate either.
# sanitize input filenames
ARG="${@}"
set -e
RUBBISH=(${ARG})
if [ "X$RUBBISH" = "X" ]; then
TOTAL=1
fi
# pitch it
if [ "X$ARG" != "X" ]; then
for waste in ${ARG[*]} ; do
ORIGIN=$(realpath "${waste}")
if [ ! -z $RESTORE ]; then
$ACT $VERBOSE "$BASKET/files/`basename ${ORIGIN}`" $(grep ${waste} $BASKET/info/`basename ${ORIGIN}.trashinfo` | cut -d "=" -f2);
if [ -z $DRYRUN ]; then
ACT="rm"
fi
$ACT $VERBOSE $BASKET/info/`basename ${ORIGIN}.trashinfo`
else
$ACT $VERBOSE "${waste}" "$BASKET/files/`basename "${waste}"`" ;
echo "[Trash Info]" > $BASKET/info/`basename ${ORIGIN}.trashinfo`
echo "Path=${ORIGIN}" >> $BASKET/info/`basename ${ORIGIN}.trashinfo`
echo "DeletionDate=$(date +%Y-%m-%dT%T)" >> $BASKET/info/`basename ${ORIGIN}.trashinfo`
fi
done
fi
# tell the user what just happened
if [ "X$VERBOSE" == "X-v" ]; then
echo "$BASKET is currently $(du -h $BASKET | cut -f1 | tail -n -1) in size."
fi
| true |
f9b0b7b8fdb4439cb9bcdac42accb058f3294fab | Shell | ching08/th_swagger_mock | /run_mock.sh | UTF-8 | 2,650 | 3.796875 | 4 | [] | no_license | #! /bin/bash
GREEN='\033[0;32m'
BLUE='\033[0;34m'
RED='\033[0;31m'
NC='\033[0m' # No Color
title() {
printf "${BLUE}${1}${NC}\n"
}
green() {
printf "${GREEN}${1}${NC}\n"
}
error() {
printf "${RED}ERROR:${1}${NC}\n"
}
show_help() {
echo "show_help"
}
pre_pare_repo_source() {
service_repo="/data"
tmpDir="/data1"
if [ -d $tmpdir ] ; then
rm -rf $tmpDir
fi
mkdir $tmpDir
echo "mock directory : $tmpDir"
cp -rf $service_repo/swagger.yaml $tmpDir
cp $(find $service_repo/mock_connexion/ | egrep -v '(Dockerfile|mock_connexion/$|\.venv|__pycache|.gitignore)') $tmpDir || true
if [ ! -f "$tmpDir/swagger.yaml" ] ; then
error "$tmpDir/swagger.yaml not exists."
exit 1
fi
## remove host str
sed -r -i 's/host\:.*//1g' $tmpDir/swagger.yaml || true
## check dynamic mode
if [ "$mock_mode" = "dynamic" ] ; then
if [ ! -f "$tmpDir/app.py" ]; then
error "$tmpDir/app.py does not exists. please create it or use remote repo"
exit 1
fi
fi
}
start_connexion_mock() {
cd $tmpDir
#ls -l $tmpDir
## start container
#dockerReg="hub.gitlab-sj.thalesesec.com/lucy/mock_example/connexion:latest"
#cmd="docker run --name $service_name -d -p ${port}:8080 -v ${tmpDir}:/data $dockerReg"
cmd="connexion run -p 8080 --host 0.0.0.0 --verbose swagger.yaml"
if [ $mock_mode = "static" ]; then
cmd="$cmd --mock=all"
fi
title "$cmd"
eval $cmd
}
get_baseUrl() {
baseUrl=`cat $tmpDir/swagger.yaml | grep -e "basePath:"| cut -d ":" -f 2| tr -d ' '`
echo $get_baseUrl
baseUrl=`echo $baseUrl | sed -e 's/\/$//'`
if [ $? != 0 ]; then
error "Warning: Can't not parse baseUrl from $tmpDir/swagger.yaml!"
fi
}
test_mock() {
# this will be enabled after all server support status
url="http://0.0.0.0:${port}${baseUrl}/status"
#url="http://0.0.0.0:${port}${baseUrl}/ui"
cmd="curl --fail $url"
title "Testing mock server $service_name: $url"
set +e
eval $cmd
if [ $? != 0 ] ; then
error "HTTP GET $url FAILED !!!"
test_code=1
else
green "HTTP GET $url PASSED !!!"
test_code=0
fi
}
####################
mock_mode=$1
if [ "${mock_mode}x" = "x" ] ; then
mock_mode="static"
fi
pattern="^(static|dynamic)$"
if [[ ! ${mock_mode} =~ $pattern ]]; then
error "Unsupported mock type '$mock_mode', only allow 'static'or 'dynamic'"
show_help; exit 1
fi
pre_pare_repo_source
if [ -f $tmpDir/requirements.txt ]; then
pip3 install -r $tmpDir/requirements.txt
fi
get_baseUrl
title "Mock Sever UI : http://0.0.0.0:8080${baseUrl}/ui"
start_connexion_mock
#test_mock
| true |
bcf5b3e1d40c4f8214a81ebefd134650f1af4f7a | Shell | lkys37en/Lab-Automation | /Packer/Linux/Ubuntu/scripts/Restore-Eth.sh | UTF-8 | 327 | 2.546875 | 3 | [] | no_license | echo "[+] Setting eth0 to DHCP"
echo auto eth0 >> /etc/network/interfaces
echo iface eth0 inet dhcp >> /etc/network/interfaces
echo "[+] Modifying grub to supprt old eth interface naming"
sed -i 's/GRUB_CMDLINE_LINUX=""/GRUB_CMDLINE_LINUX="net.ifnames=0 biosdevname=0"/g' /etc/default/grub
grub-mkconfig -o /boot/grub/grub.cfg | true |
b308160721041d8adb643279a9887cf46ca39899 | Shell | rancher/validation-tests | /scripts/build-env | UTF-8 | 1,517 | 3.828125 | 4 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | #!/bin/bash
set -e
10acre_check()
{
if [ ! $(command -v gce-10acre-ranch) ]; then
echo "need gce-10acre-ranch command from cloudnautique/10acre-ranch" 1>&2
echo " in order to provision" 1>&2
exit 1
fi
}
while getopts "d" opt; do
case $opt in
d)
DESTROY_CLUSTER="true"
;;
esac
done
get_response()
{
local response_code
set -o pipefail
response_code=$(curl -I -m 1 -X GET http://${1}:8080 2>/dev/null | \
HEAD -1 | cut -d' ' -f2)
if [[ $? -ne 0 ]]; then
response_code=-1
fi
set +o pipefail
echo ${response_code}
}
build_environment()
{
10acre_check
local cluster_name="cattle-$(id -un)"
local response_code
local master_ip
echo "Building cluster ${cluster_name}..."
gce-10acre-ranch -b -n 3 -c ${cluster_name}
echo -n "Waiting for cluster ${cluster_name} "
master_ip=$(gce-10acre-ranch -c ${cluster_name} -i)
set +e
for ((i=0;i<30;i++))
do
response_code=$(get_response ${master_ip})
if [[ ${response_code} -eq 200 ]]; then
break
else
echo -n "."
sleep 10
fi
done
set -e
if [ $i -lt 30 ]; then
exit 0
else
echo "\nCluster was not created in time" 1>&2
exit 1
fi
}
destroy_environment()
{
gce-10acre-ranch -d -c cattle-$(id -un) -q
}
if [[ ${DESTROY_CLUSTER} == "true" ]]; then
destroy_environment
else
build_environment
fi
| true |
9ee1c214eeef40b5865e88a2f9e988926710251e | Shell | EduardoOrtiz89/.dotfiles | /.config/polybar/launch.sh | UTF-8 | 264 | 2.78125 | 3 | [] | no_license | #!/bin/bash
killall -q polybar
while pgrep -u $UID -x polybar > /dev/null; do sleep 1; done
polybar example &
my_laptop_external_monitor=$(xrandr --query | grep 'HDMI1')
if [[ $my_laptop_external_monitor != *disconnected* ]]; then
polybar external &
fi
| true |
0d549dd17d0b95042e2a4bde6ae87ada4da8e3a5 | Shell | yhiraki/docker-compose-gcloud | /_gcloud | UTF-8 | 237 | 2.9375 | 3 | [] | no_license | #!/usr/bin/env bash
if [ -L $0 ]
then SCRIPT_DIR=$(dirname $(readlink $0))
else SCRIPT_DIR=$(cd $(dirname $0); pwd)
fi
exec docker-compose \
-f $SCRIPT_DIR/docker-compose.yml \
run --rm \
-v $(pwd):/pwd \
-w /pwd \
gcloud $*
| true |
f865956602369b5beef7c591184f34fc3862c4ec | Shell | usayama/docker-react-firebase-3way | /tailwind.sh | UTF-8 | 1,882 | 3.140625 | 3 | [] | no_license | #!/bin/sh
set -e
echo 'パッケージをインストールします'
docker-compose run --rm environ npm install tailwindcss postcss-cli autoprefixer
echo 'パッケージのインストールが完了しました'
wait $!
sleep 3
echo 'Tailwindcssを初期化します'
docker-compose run --rm environ npx tailwindcss init
echo 'Tailwindcssを初期化しました'
wait $!
sleep 3
echo 'さっき作られたTailwind設定ファイルに書き込みます'
gsed -i -e "/purge/c\ purge: ['./src/**/*.ts', './src/**/*.tsx']," app/tailwind.config.js
echo '書き込みが完了しました'
wait $!
sleep 3
echo 'postcss.config.jsを作成して書き込みます'
touch app/postcss.config.js
wait
cat << 'EOS' > app/postcss.config.js
module.exports = {
plugins: [require('tailwindcss'), require('autoprefixer')]
}
EOS
echo '作成と書き込みが完了しました'
wait $!
sleep 3
echo 'styles.cssを作成して書き込みます'
touch app/src/styles.css
wait
cat << 'EOS' > app/src/styles.css
@tailwind base;
@tailwind components;
@tailwind utilities;
EOS
echo '作成と書き込みが完了しました'
wait $!
sleep 3
echo 'package.json から build の行を探して、下に行を挿入します'
gsed -i '/"test":/a \ "tailwind": "npx tailwindcss build src/styles.css -o src/tailwind.css",' app/package.json
echo '行の挿入が完了しました'
wait $!
sleep 3
echo 'package.json から build の行を探して、書き換えます'
gsed -i '/"build":/c\ "build": "NODE_ENV=production npm run tailwind && craco build",' app/package.json
echo '行の書き換えが完了しました'
wait $!
sleep 3
echo 'TailwindのCSSファイルを作成するためのビルドをします'
docker-compose run --rm react npm run tailwind
echo 'TailwindのCSSファイルを作成しました'
wait $!
echo '👍すべての処理が終了しました!'
| true |
715585e1fb5050a717770c4b2280d2888cf3b5ca | Shell | mmulenga/Computer-Science | /CMPT 332/group40/Assignment1/tag/3.0/partB.bash | UTF-8 | 3,950 | 4.0625 | 4 | [] | no_license | #!/bin/bash
# Sean Robson-Kullman
# skr519
# 11182480
# Matthew Mulenga
# mam558
# 11144528
# Check to see which version the user wishes to run.
VERSION="$1"
INPUT="user_input"
THREADS=NULL
DEADLINE=NULL
SIZE=NULL
LOOP_COUNT=0
ARCH=$(uname -sm | tr -d ' ')
# Check version function
check_version() {
if [ "$VERSION" == "A1" ]
then
if [[ $ARCH =~ MINGW[.]* ]]
then
./partA1.exe $THREADS $DEADLINE $SIZE
else
printf "Incorrect OS.\n"
exit 1
fi
elif [ "$VERSION" == "A2" ]
then
if [[ $ARCH =~ Linux[.]* ]]
then
./partA2 $THREADS $DEADLINE $SIZE
else
printf "Incorrect OS.\n"
exit 1
fi
elif [ "$VERSION" == "A3" ]
then
if [[ $ARCH =~ Linux[.]* ]]
then
./pthread $THREADS $DEADLINE $SIZE
else
printf "Incorrect OS.\n"
exit 1
fi
else
printf "usage: partB.bash <version>\n"
exit 1
fi
}
# Check to see if the user provided an input file
if ! [ -t 0 ]
then
while read INPUTFILE
do
# THIS IS SOME SERIOUSLY HACKY BULLSHIT BUT IT WORKS
# Begin by checking the loop count
# If it's 0 it means we're reading in the first argument
# for the program, if it's 1 we're on the second argument
# and finally if it's 2 we're on the third argument
if [ "$LOOP_COUNT" -eq 0 ]
then
# Make sure the supplied data is actually an integer.
if [[ "$INPUTFILE" =~ ^[0-9]+$ ]]
then
# If it's good data we assign it to one of the three variables.
THREADS=$INPUTFILE
else
# If at any point we read incorrect data exit the program.
printf "Incorrect file format.\n"
printf "Please provide 3 positive integers on separate lines.\n"
printf "Exiting...\n"
exit 1
fi
# Rinse and repeat for second argument.
elif [ "$LOOP_COUNT" -eq 1 ]
then
if [[ "$INPUTFILE" =~ ^[0-9]+$ ]]
then
DEADLINE=$INPUTFILE
else
printf "Incorrect file format.\n"
printf "Please provide 3 positive integers on separate lines.\n"
printf "Exiting...\n"
exit 1
fi
# Rinse and repeat for third argument.
elif [ "$LOOP_COUNT" -eq 2 ]
then
if [[ "$INPUTFILE" =~ ^[0-9]+$ ]]
then
SIZE=$INPUTFILE
else
printf "Incorrect file format.\n"
printf "Please provide 3 positive integers on separate lines.\n"
printf "Exiting...\n"
exit 1
fi
else
printf "Incorrect file format.\n"
printf "Please provide 3 positive integers on separate lines.\n"
printf "Exiting...\n"
exit 1
fi
let LOOP_COUNT=LOOP_COUNT+1
done
# Check which version the user selected and execute the appropriate program.
check_version
# If no input file was provided run the program interactively.
else
if ! [ $1 ]
then
printf "Usage: partB.bash <version>\n"
exit 1
fi
# Loop functions for parameter input.
thread_function() {
# Check to see whether $THREADS is a positive integer
# If it's not loop and ask the user for correct input
while ! [[ "$THREADS" =~ ^[0-9]+$ ]]
do
printf "Please input a positive integer for the number of threads: \n"
read THREADS
done
}
deadline_function() {
# Check to see whether $DEADLINE is a positive integer
# If it's not loop and ask the user for correct input
while ! [[ "$DEADLINE" =~ ^[0-9]+$ ]]
do
printf "Please input a positive integer the deadline: \n"
read DEADLINE
done
}
size_function() {
# Check to see whether $SIZE is a positive integer
# If it's not loop and ask the user for correct input
while ! [[ "$SIZE" =~ ^[0-9]+$ ]]
do
printf "Please input a positive integer for the number of squares: \n"
read SIZE
done
}
# Main script loop
# Check to see if the user whishes to quit at any time.
while [ $INPUT != "q" ]
do
printf "(s)tart - Start entering parameters for the program.\n"
printf "(q)uit - Exit the program.\n"
read INPUT
if [ $INPUT == "s" ]
then
thread_function
deadline_function
size_function
check_version
# Reset the arguments
THREADS=NULL
DEADLINE=NULL
SIZE=NULL
fi
done
fi
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.