blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
2eee0a591dd37eb7354e0f665e1743b5951eb441 | Shell | wgnet/wds_qt | /qtbase/tests/baselineserver/bin/runserver | UTF-8 | 204 | 2.734375 | 3 | [
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-commercial-license",
"LGPL-2.0-or-later",
"LGPL-2.1-only",
"GFDL-1.3-only",
"LicenseRef-scancode-qt-commercial-1.1",
"LGPL-3.0-only",
"LicenseRef-scancode-qt-company-exception-lgpl-2.1",
"GPL-1.0-or-later",
"GPL-3.0-only",
"BSD-3-Clause",
"LGPL-2.1-or-later",
"GPL-2.0-only",
"Qt-LGPL-exception-1.1",
"LicenseRef-scancode-digia-qt-preview",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-generic-exception"
] | permissive | #!/bin/bash
logfile=baselineserver.log
while true; do
echo >> $logfile
echo -n "***RESTARTING*** " >> $logfile
date >> $logfile
./baselineserver 2>&1 | tee -a $logfile
sleep 2
done
| true |
a73d9d69a036a95d8acabdd0d10f64d429ba0442 | Shell | yokefellow/gitee-pages-deploy-action | /deploy-gitee-pages.sh | UTF-8 | 452 | 3.03125 | 3 | [] | no_license | #!/usr/bin/env sh
set -e
git config --global user.email "$INPUT_GIT_USER_EMAIL"
git config --global user.name "$INPUT_GIT_USER_NAME"
mkdir tmp
git clone --depth=1 --single-branch --branch "$INPUT_GITEE_PAGES_BRANCH" "$INPUT_GITEE_REPO" ./tmp
cd ./tmp
git rm -r --ignore-unmatch *
cp -r ../"$INPUT_FOLDER"/* ./
if [ -n "$INPUT_CNAME" ]; then
echo "$INPUT_CNAME" > CNAME
fi
git add -A
git commit -m "$INPUT_COMMIT_MESSAGE"
git push origin master
| true |
b6221bed84a9cdcb9670cf045705cb2c130a4239 | Shell | heroxrq/kaggle | /cdiscount_image_classification_challenge/script/keras_tensorflow/run_pipeline.sh | UTF-8 | 792 | 2.9375 | 3 | [] | no_license | #!/bin/bash
CUR_DIR=$(cd `dirname $0`; pwd)
DATASET_DIR="$HOME/dataset/kaggle/cdiscount_image_classification_challenge/dataset"
if [ ! -d $DATASET_DIR ]; then
DATASET_DIR="/media/xrq/Elements/dataset/kaggle/cdiscount_image_classification_challenge/dataset"
fi
PYTHON2="$HOME/install/anaconda2/bin/python"
PYTHON3="$HOME/install/anaconda3/bin/python"
DATE=$(date +%F)
PY_LOG_DIR="../log/$DATE/py_log"
mkdir -p $PY_LOG_DIR
PY_LOG="$PY_LOG_DIR/log.$DATE"
# This is only needed for the first time.
#$PYTHON2 gen_imgs_from_bson.py \
#10 \
#$DATASET_DIR \
#$DATASET_DIR/train_bson_transform/train \
#$DATASET_DIR/train_bson_transform/valid
#$PYTHON2 train_inception_v3_transfer_learning.py >>$PY_LOG 2>&1
#$PYTHON2 predict.py >>$PY_LOG 2>&1
#$PYTHON2 predict_multi_gpu.py >>$PY_LOG 2>&1
| true |
155070f8ea8d1fe621d85433e8e8d56a0d4e3de0 | Shell | next-generate/Rainbow-buildall | /apps/mkcompile_h | UTF-8 | 779 | 3.234375 | 3 | [] | no_license | #!/bin/bash
CC=arm-hisiv300-linux-gcc
# Fix the language to get consistent output
LC_ALL=C
export LC_ALL
TIMESTAMP=`date`
APP_COMPILE_BY=$(whoami | sed 's/\\/\\\\/')
APP_COMPILE_HOST=`hostname`
UTS_VERSION="$TIMESTAMP"
# Truncate to maximum length
UTS_LEN=64
UTS_TRUNCATE="cut -b -$UTS_LEN"
# Generate a temporary compile.h
( echo /\* This file is auto generated \*/
echo \#ifndef __COMPILE_H__
echo \#define __COMPILE_H__
echo
echo \#define UTS_VERSION \"`echo $UTS_VERSION | $UTS_TRUNCATE`\"
echo \#define APP_COMPILE_BY \"`echo $APP_COMPILE_BY | $UTS_TRUNCATE`\"
echo \#define APP_COMPILE_HOST \"`echo $APP_COMPILE_HOST | $UTS_TRUNCATE`\"
echo \#define APP_COMPILER \"`$CC -v 2>&1 | tail -n 1`\"
echo
echo \#endif /*__COMPILE_H__*/
) > compile.h
| true |
d86bb3e855e3a8c4f617a5062da3702ee770a0c2 | Shell | lisuke/repo | /archlinuxcn/openmsx-catapult-git/PKGBUILD | UTF-8 | 1,826 | 2.671875 | 3 | [] | no_license | # Contributor: Tom < reztho at archlinux dot us >
pkgname=openmsx-catapult-git
pkgver=1025.c2231ed
pkgrel=1
pkgdesc="Front-end for openMSX: the MSX emulator that aims for perfection."
arch=('i686' 'x86_64')
url="http://openmsx.org"
license=('GPL')
depends=('libxml2' 'wxwidgets-gtk2' 'zlib' 'libjpeg' 'libpng' 'libtiff')
makedepends=('python' 'git')
optdepends=('openmsx')
source=("git+https://github.com/openMSX/wxcatapult.git")
provides=("openmsx-catapult")
conflicts=("openmsx-catapult")
pkgver() {
cd "${srcdir}/wxcatapult"
printf "%s.%s" "$(git rev-list --count HEAD)" "$(git rev-parse --short HEAD)"
}
build() {
cd "${srcdir}/wxcatapult"
# Catapult uses wxwidgets-gtk2
sed -i 's@wx-config@wx-config-gtk2@' build/probe.mk
sed -i 's@wx-config@wx-config-gtk2@' build/main.mk
# Changing some default configurations...
sed -i 's@SYMLINK_FOR_BINARY:=true@SYMLINK_FOR_BINARY:=false@' build/custom.mk
sed -i 's@/opt/openMSX-Catapult@/usr/share/openmsx-catapult@' build/custom.mk
sed -i 's@/opt/openMSX/bin/openmsx@/usr/bin/openmsx@' build/custom.mk
sed -i 's@/opt/openMSX/share@/usr/share/openmsx@' build/custom.mk
echo 'INSTALL_DOC_DIR:=/usr/share/doc/openmsx-catapult' >> build/custom.mk
echo 'INSTALL_SHARE_DIR:=/usr/share/openmsx-catapult' >> build/custom.mk
echo 'INSTALL_BINARY_DIR:=/usr/bin' >> build/custom.mk
# Compiling
make
}
package() {
cd "${srcdir}/wxcatapult"
mkdir -p "${pkgdir}/usr/share/applications"
make DESTDIR="${pkgdir}" install
# Fixing the .desktop file
sed -i 's@/usr/share/openmsx-catapult/bin/catapult@/usr/bin/catapult@' \
"${pkgdir}/usr/share/applications/openMSX-Catapult.desktop"
sed -i 's@/usr/share/openmsx-catapult/doc/@/usr/share/doc/openmsx-catapult/@' \
"${pkgdir}/usr/share/applications/openMSX-Catapult.desktop"
}
md5sums=('SKIP')
| true |
38052bc0293ff999eed94f1c3447d0ef6ce28a74 | Shell | Cicadaes/td | /marketing/marketing-report/web/src/main/resources/bin/shutdown.sh | UTF-8 | 215 | 2.90625 | 3 | [] | no_license | #!/bin/bash
basedir=$(cd "$(dirname "$0")"; pwd)
#脚本执行目录
bin_dir=$(cd `dirname $0`; pwd)
#设置APP HOME目录
APP_HOME=$(dirname ${bin_dir})
kill `cat ${APP_HOME}/app.pid`
rm -rf ${APP_HOME}/app.pid | true |
435c0098061dc82ad16fed444fffe078cb5c8793 | Shell | NoOne-dev/Diag | /platypus.command | UTF-8 | 481 | 3.03125 | 3 | [] | no_license | #!/bin/sh
export LAUNCHED="$0"
export LOCATION="$(dirname "$LAUNCHED")"
#export PATH="$LOCATION:$PATH"
echo "This script needs root access in order to look for faults in other users' home directories."
sudo PATH="$LOCATION:$PATH" "$LOCATION/diag" -a -l -i # -d afsc -d full -d unplug -d 2nd
echo " -- done --"
echo "Print this window to PDF if you wish to send the results to someone for review."
echo "Sleeping in order to keep the window open..."
sleep 18000 # five hours
| true |
d16c0e5a63f40211a801c043bb1b987a8b01faa3 | Shell | moki9/docker-duplicity-backup.sh | /entrypoint.sh | UTF-8 | 502 | 2.625 | 3 | [] | no_license |
#!/bin/bash
cat <<EOF > /home/duplicity/.s3cfg
$([ "$(echo "$DEST" | cut -d'/' -f1)" == "s3:" ] && echo "host_base = $(echo "$DEST" | cut -d'/' -f3)")
$([ "$(echo "$DEST" | cut -d'/' -f1)" == "s3:" ] && echo "host_bucket = $(echo "$DEST" | cut -d'/' -f3)")
bucket_location = ${BUCKET_LOCATION:-us-east-1}
use_https = True
access_key = ${AWS_ACCESS_KEY_ID}
secret_key = ${AWS_SECRET_ACCESS_KEY}
signature_v2 = False
EOF
/usr/local/bin/duplicity-backup.sh -c /home/duplicity/dulicity-backup.conf "$@"
| true |
9590795e90168fccb9f43dbe848db67515fc8c1d | Shell | enr/sub | /hack/build-dist | UTF-8 | 1,604 | 3.78125 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
set -e
TOOL_SOURCE="${BASH_SOURCE[0]}"
while [ -h "$TOOL_SOURCE" ] ; do TOOL_SOURCE="$(readlink "$TOOL_SOURCE")"; done
HACK_DIR="$( cd -P "$( dirname "$TOOL_SOURCE" )" && pwd )"
PRJ_HOME="$( cd -P "$( dirname "$HACK_DIR" )" && pwd )"
source "${HACK_DIR}/config"
buildtime=$(TZ=UTC date -u '+%Y-%m-%dT%H:%M:%SZ')
git_hash="$(git rev-parse HEAD)"
git_dirty="$(test -n "`git status --porcelain`" && echo "+CHANGES" || true)"
git_commit="${git_hash}${git_dirty}"
echo "building ${GH_REPO}"
echo "version : ${APP_VERSION}"
echo "git : ${git_commit}"
echo "build time : ${buildtime}"
[[ -z "$DIST_DIR" ]] && {
echo "no dist dir"
exit 1
}
[[ -d "${PRJ_HOME}/bin" ]] && rm -r "${PRJ_HOME}/bin"
[[ -d "${PRJ_HOME}/pkg" ]] && rm -r "${PRJ_HOME}/pkg"
[[ -d "$DIST_DIR" ]] && rm -r "$DIST_DIR"
mkdir -p "$DIST_DIR"
hash gb 2>/dev/null || {
go get -u github.com/constabulary/gb/...
}
platforms="windows/amd64 linux/amd64 darwin/amd64"
for platform in $platforms; do
target_goos=$(echo $platform | cut -d/ -f1)
target_goarch=$(echo $platform | cut -d/ -f2)
echo "building for ${target_goos} ${target_goarch}"
GOOS="$target_goos" GOARCH="$target_goarch" gb build -ldflags "-s \
-X main.Version=${APP_VERSION} \
-X main.BuildTime=${buildtime} \
-X main.GitCommit=${git_commit}" all > /dev/null
ext=''
[[ "windows" = "$target_goos" ]] && ext='.exe'
built="${PRJ_HOME}/bin/${GH_REPO}-${target_goos}-${target_goarch}${ext}"
[[ -e "$built" ]] || {
echo "expected file not found: ${built}"
exit 1
}
mv "$built" "$DIST_DIR"
done
| true |
6e7748bf6d663ddcfb765c725b2abf04fc2102d4 | Shell | dalent/gitcode | /expect/login_kill.sh | UTF-8 | 669 | 3.4375 | 3 | [] | no_license | #! /bin/bash -
set -u
set -e
ip_list=$1
name=$2
do_kill(){
pid_list_size=`ssh jujintao@$ip "ps axu | grep $name | grep -v grep" | awk '{print $2}' | wc -l`
ssh jujintao@$ip "ps -ef|grep $name |grep -v grep| cut -c 9-15 |xargs kill "
}
do_wait(){
sleep 2
while [ 1 -eq 1 ]
do
pid_list_size1=`ssh jujintao@$ip "ps axu | grep $name | grep -v grep" | awk '{print $2}' | wc -l`
echo $pid_list_size1 $pid_list_size
if [[ $pid_list_size1 -eq $pid_list_size ]];then
echo $name ok
break
fi
done
}
if [[ -f $ip_list ]];then
for ip in `cat $ip_list`
do
do_kill
do_wait
done
else
NUM=$#
for((i=1;i<=$#;i++))
do
ip=$i
do_kill
do_wait
done
fi
| true |
a90e8b4fd74b43f01b6763de1667a9c9f9eae358 | Shell | MoraAndrea/drone-operator | /buildLoadAndDeploy.sh | UTF-8 | 1,003 | 2.546875 | 3 | [] | no_license | CLUSTER_NAME="${CLUSTER_NAME}"
# echo "Build Application, Create docker..."
# operator-sdk build drone-operator:first
# echo "Load image on kind..."
# kind load docker-image drone-operator:first --name ${CLUSTER_NAME}
echo "Apply config map..,"
echo "Edit config for CLUSTER_NAME"
sed -i 's/{NAME}/'"$CLUSTER_NAME"'/g' config-map.yml
kubectl apply -f config-map.yml -n drone
sed -i 's/'"$CLUSTER_NAME"'/{NAME}/g' config-map.yml
echo "Apply operator and other utils..."
# Setup Service Account
kubectl apply -f deploy/service_account.yaml -n drone
# Setup RBAC
kubectl apply -f deploy/role.yaml -n drone
kubectl apply -f deploy/role_binding.yaml -n drone
# Setup the CRD
kubectl apply -f deploy/crds/drone_v1alpha1_dronefederateddeployment_crd.yaml -n drone
# Deploy the app-operator
kubectl apply -f deploy/operator.yaml -n drone
# Create an DroneFederatedDeployment CR
# echo "Create ad DroneFederatedDeployment"
# kubectl apply -f deploy/crds/app.example.com_v1alpha1_appservice_cr.yaml -n drone | true |
8056c9aab7cd750fa2eae258ed9322e428525ad8 | Shell | vtennero/TN_test_GNL | /resources/oui_qui_leaks.sh | UTF-8 | 1,372 | 3.3125 | 3 | [] | no_license | #!/bin/bash
OUI_QUI_LEAKS_LOGS_DIR="oui_qui_leaks_logs"
oui_qui_leaks()
{
local x=1
local y=1
clear
echo "$COLOR\0TN_TEST // GET_NEXT_LINE$END"
sleep 1
echo "$COLOR\0Oui qui leaks$END"
mkdir -p $OUI_QUI_LEAKS_LOGS_DIR
echo "$COLOR\0Compiling with BUFF_SIZE = 1...$END"
set_buff_and_compile 1 1 leaks file
echo "$COLOR\0Compiling with BUFF_SIZE = 2...$END"
set_buff_and_compile 2 2 leaks file
echo "$COLOR\0Compiling with BUFF_SIZE = 5...$END"
set_buff_and_compile 5 3 leaks file
echo "$COLOR\0Compiling with BUFF_SIZE = 1,000...$END"
set_buff_and_compile 1000 4 leaks file
echo "$COLOR\0Compiling with BUFF_SIZE = 1,000,000...$END"
set_buff_and_compile 1000000 5 leaks file
echo "$COLOR\0Tip: ID = exam ; password = exam$END"
while [ $x -le 5 ]
do
while [ $y -le 15 ]
do
kill_all_process $x
./test_gnl$x tests/ex$y 2>&1 >/dev/null &
sleep 0.1
leaks test_gnl$x > $OUI_QUI_LEAKS_LOGS_DIR/test_gnl_$x\_ex$y.txt
local wiki=$(cat $OUI_QUI_LEAKS_LOGS_DIR/test_gnl_$x\_ex$y.txt | grep "total leaked bytes" | cut -c 15-)
local nb_leaks=$(echo $wiki | cut -d " " -f1)
if [ "$nb_leaks" = "0" ]
then
printf "$COLOR.$END"
rm $OUI_QUI_LEAKS_LOGS_DIR/test_gnl_$x\_ex$y.txt
else
printf "\ntest_gnl$x, ex$y: \t$wiki"
say boom
fi
sleep 0.1
let "y++"
done
kill_all_process $x
let "y=1"
let "x++"
done
echo "$COLOR\n[DONE]$END"
} | true |
744a62534dc74df5c85fa0933774ccaaa111e02a | Shell | scsibug/jenkins_census_analysis | /get_data.sh | UTF-8 | 466 | 3.40625 | 3 | [] | no_license | #!/bin/bash
echo "Fetching Jenkins Census data..."
wget -A .gz -l 1 -r --http-user=jenkins --http-password=butler https://jenkins-ci.org/census/
if [ $? -ne 0 ]; then
echo "Error retrieving census data."
return 1
fi
mv jenkins-ci.org/census data
rm -rf jenkins-ci.org index.html
cd data
echo "Expanding compressed census data..."
for gz in *gz; do gzip -d $gz; done
if [ $? -ne 0 ]; then
echo "Error expanding census gzip files."
else
echo "Complete"
fi | true |
aa88aaf45e1831149862182871952f3caa145db2 | Shell | klooperator/flask-base | /redis-ve | UTF-8 | 872 | 3.59375 | 4 | [
"MIT"
] | permissive | #!/bin/bash
VERSION="3.2.6"
ARCHIVE=redis-${VERSION}.tar.gz
if [ -z "${VIRTUAL_ENV}" ]; then
echo "Please activate a virtualenv first";
exit
fi
pushd /tmp/
if [ ! -f redis-${VERSION}.tar.gz ]
then
wget http://download.redis.io/releases/${ARCHIVE}
fi
DIRNAME=`tar tzf ${ARCHIVE} 2>/dev/null | head -n 1`
tar xzf ${ARCHIVE}
pushd ${DIRNAME}
# make / make install w/ prefix
make PREFIX=${VIRTUAL_ENV}
make PREFIX=${VIRTUAL_ENV} install
mkdir -p ${VIRTUAL_ENV}/etc/
mkdir -p ${VIRTUAL_ENV}/run/
mkdir -p ${VIRTUAL_ENV}/etc/
mkdir -p ${VIRTUAL_ENV}/run/
sed -i 's/daemonize no/daemonize yes/' redis.conf
# prepare VIRTUAL_ENV-path for sed (escape / with \/)
VIRTUAL_ENV_ESC="${VIRTUAL_ENV//\//\\/}"
sed -i "s/\/var\/run/${VIRTUAL_ENV_ESC}\/run/" redis.conf
sed -i "s/dir \.\//dir ${VIRTUAL_ENV_ESC}\/run\//" redis.conf
cp redis.conf ${VIRTUAL_ENV}/etc/
popd
popd
| true |
859855ecca213bb0fb2d975436ec47f932777a28 | Shell | dragonmaus/home-old-2017-05-21 | /src/bin/dirdate/dirdate.sh | UTF-8 | 802 | 3.75 | 4 | [
"BSD-3-Clause"
] | permissive | #!/bin/sh -e
. $HOME/lib/sh/stdlib.sh
usage() (die 100 usage 'dirdate [-f] dir...')
walk() (cd "$1"; shift; exec find -x . ! -path . -prune ${1+"$@"})
a= f=0
while getopts f opt; do
case $opt in
f) f=`expr $f + 1`; a=$a' -f';;
*) usage;;
esac
done
shift `expr $OPTIND - 1`
test $# -gt 0 || usage
for dir do
test -e "$dir" || die 111 fatal "$dir: No such file or directory"
test -d "$dir" || die 111 fatal "$dir: Not a directory"
test $f -lt 2 -a `walk "$dir" | wc -l` -eq 0 && continue
walk "$dir" -type d -print0 | xargs -0 dirdate $a
walk "$dir" -type l -print0 | xargs -0 symdate $a
t=`walk "$dir" -print0 | xargs -0 stat -f %m | sort -n | tail -1`
o=`stat -f %m "$dir"`
d=`env TZ=UTC date -j -r $t +%FT%TZ`
test $f -gt 0 -o $t -gt $o && touch -c -d $d "$dir"
done
| true |
9539514b34e91af4443ee999990083a6f69587ad | Shell | yuchen-w/HPCE_CW6 | /test.sh | UTF-8 | 5,194 | 2.984375 | 3 | [] | no_license | #!/bin/bash
testvar="1 2 3 4 8 16 32 64 128 1024 2048 4096 8096"
testvar_mb="1 2 3 4 8 16 32 64 128 1024 2048 4096 8096 160000 320000"
#testvar="1 2 3 4 8 16"
echo "Running median_bits"
rm median_bits.log
for i in $testvar_mb; do
./bin/run_puzzle median_bits $i 2 2>> median_bits.log
done
declare -a t_start=($(cat median_bits.log | grep "Executing puzzle" | sed 's/.*\],//;s/,.*$//')) #replace everything before ], with nothing and everything after , with nothing
declare -a t_start_ref=($(cat median_bits.log | grep "Executing reference" | sed 's/.*\],//;s/,.*$//'))
declare -a t_end_ref=($(cat median_bits.log | grep "Checking output" | sed 's/.*\],//;s/,.*$//') )
echo $(cat median_bits.log | grep "Output is not correct")
j=0
for i in $testvar_mb; do
echo "For puzzle size '$i'"
echo "Execution time:"
echo ${t_start_ref[j]}-${t_start[j]} | bc -l
echo "Reference time: "
echo ${t_end_ref[j]}-${t_start_ref[j]} | bc -l
((j++))
#echo "Reference time = '${
done
##############
echo "Running circuit_sim"
rm circuit_sim.log
for i in $testvar; do
./bin/run_puzzle circuit_sim $i 2 2>> circuit_sim.log
done
declare -a t_start=($(cat circuit_sim.log | grep "Executing puzzle" | sed 's/.*\],//;s/,.*$//')) #replace everything before ], with nothing and everything after , with nothing
declare -a t_start_ref=($(cat circuit_sim.log | grep "Executing reference" | sed 's/.*\],//;s/,.*$//'))
declare -a t_end_ref=($(cat circuit_sim.log | grep "Checking output" | sed 's/.*\],//;s/,.*$//') )
echo $(cat circuit_sim.log | grep "Output is not correct")
j=0
for i in $testvar; do
echo "For puzzle size '$i'"
echo "Execution time:"
echo ${t_start_ref[j]}-${t_start[j]} | bc -l
echo "Reference time: "
echo ${t_end_ref[j]}-${t_start_ref[j]} | bc -l
((j++))
#echo "Reference time = '${
done
###############
testvarlife="1 2 3 4 8 16 32 64 128 256"
echo "Running life"
rm life.log
for i in $testvarlife; do
./bin/run_puzzle life $i 2 2>> life.log
done
declare -a t_start=($(cat life.log | grep "Executing puzzle" | sed 's/.*\],//;s/,.*$//')) #replace everything before ], with nothing and everything after , with nothing
declare -a t_start_ref=($(cat life.log | grep "Executing reference" | sed 's/.*\],//;s/,.*$//'))
declare -a t_end_ref=($(cat life.log | grep "Checking output" | sed 's/.*\],//;s/,.*$//') )
echo $(cat life.log | grep "Output is not correct")
j=0
for i in $testvarlife; do
echo "For puzzle size '$i'"
echo "Execution time:"
echo ${t_start_ref[j]}-${t_start[j]} | bc -l
echo "Reference time: "
echo ${t_end_ref[j]}-${t_start_ref[j]} | bc -l
((j++))
#echo "Reference time = '${
done
###############
echo "Running matrix_exponent"
rm matrix_exponent.log
for i in $testvar; do
./bin/run_puzzle matrix_exponent $i 2 2>> matrix_exponent.log
done
declare -a t_start=($(cat matrix_exponent.log | grep "Executing puzzle" | sed 's/.*\],//;s/,.*$//')) #replace everything before ], with nothing and everything after , with nothing
declare -a t_start_ref=($(cat matrix_exponent.log | grep "Executing reference" | sed 's/.*\],//;s/,.*$//'))
declare -a t_end_ref=($(cat matrix_exponent.log | grep "Checking output" | sed 's/.*\],//;s/,.*$//') )
echo $(cat matrix_exponent.log | grep "Output is not correct")
j=0
for i in $testvar; do
echo "For puzzle size '$i'"
echo "Execution time:"
echo ${t_start_ref[j]}-${t_start[j]} | bc -l
echo "Reference time: "
echo ${t_end_ref[j]}-${t_start_ref[j]} | bc -l
((j++))
#echo "Reference time = '${
done
###############
echo "Running option_explicit"
rm option_explicit.log
for i in $testvar; do
./bin/run_puzzle option_explicit $i 2 2>> option_explicit.log
done
declare -a t_start=($(cat option_explicit.log | grep "Executing puzzle" | sed 's/.*\],//;s/,.*$//')) #replace everything before ], with nothing and everything after , with nothing
declare -a t_start_ref=($(cat option_explicit.log | grep "Executing reference" | sed 's/.*\],//;s/,.*$//'))
declare -a t_end_ref=($(cat option_explicit.log | grep "Checking output" | sed 's/.*\],//;s/,.*$//') )
echo $(cat option_explicit.log | grep "Output is not correct")
j=0
for i in $testvar; do
echo "For puzzle size '$i'"
echo "Execution time:"
echo ${t_start_ref[j]}-${t_start[j]} | bc -l
echo "Reference time: "
echo ${t_end_ref[j]}-${t_start_ref[j]} | bc -l
((j++))
#echo "Reference time = '${
done
###############
echo "Running string_search"
rm string_search.log
for i in $testvar; do
./bin/run_puzzle string_search $i 2 2>> string_search.log
done
declare -a t_start=($(cat string_search.log | grep "Executing puzzle" | sed 's/.*\],//;s/,.*$//')) #replace everything before ], with nothing and everything after , with nothing
declare -a t_start_ref=($(cat string_search.log | grep "Executing reference" | sed 's/.*\],//;s/,.*$//'))
declare -a t_end_ref=($(cat string_search.log | grep "Checking output" | sed 's/.*\],//;s/,.*$//') )
echo $(cat string_search.log | grep "Output is not correct")
j=0
for i in $testvar; do
echo "For puzzle size '$i'"
echo "Execution time:"
echo ${t_start_ref[j]}-${t_start[j]} | bc -l
echo "Reference time: "
echo ${t_end_ref[j]}-${t_start_ref[j]} | bc -l
((j++))
#echo "Reference time = '${
done | true |
25840e2668f740a41ea39fb0efcddf198199c8ca | Shell | DaTeToMe/Archlinux | /Config/nvidia -> nouveau.sh | UTF-8 | 1,015 | 2.65625 | 3 | [] | no_license | #!/bin/bash
# nvidia -> nouveau
/usr/bin/sudo /bin/sed -i 's/#options nouveau modeset=1/options nouveau modeset=1/' /etc/modprobe.d/modprobe.conf
/usr/bin/sudo /bin/sed -i 's/#MODULES="nouveau"/MODULES="nouveau"/' /etc/mkinitcpio.conf
/usr/bin/sudo /usr/bin/pacman -Rdds --noconfirm nvidia-173xx{,-utils}
/usr/bin/sudo /usr/bin/pacman -S --noconfirm nouveau-dri xf86-video-nouveau
#/usr/bin/sudo /bin/cp {10-monitor,30-nouveau}.conf /etc/X11/xorg.conf.d/
/usr/bin/sudo /sbin/mkinitcpio -p linux
#想要成功地完成切换,一次重启是很有必要的。 请根据您正在使用的驱动版本来修改一些地方(在这里我使用的是nvidia-173xx)
#假如您正在使用的xorg-server的版本低于1.10.2-1,取消注释行,复制和删除{10-monitor,30-nouveau}.conf。自从1.10.2-1之后的版本,xorg-server修补为自动加载nouveau。我保留了10-monitor.conf和30-nouveau.conf[broken link: invalid section]在同一个目录作为这个脚本,必要时还要调整一下路径。
| true |
0dc7c18f5dfb2a6df74685213fdf0a689b6cc624 | Shell | mchmarny/buildstatus | /bin/cleanup | UTF-8 | 351 | 2.59375 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
DIR="$(dirname "$0")"
. "${DIR}/config"
# PubSub
gcloud pubsub subscriptions delete $SUBSCRIPTION_NAME -q
# Cloud Run
gcloud beta run services delete $SERVICE_NAME \
--platform managed \
--region $SERVICE_REGION -q
# IAM
gcloud iam service-accounts delete \
"${SERVICE_ACCOUNT_NAME}@${PROJECT}.iam.gserviceaccount.com" -q
| true |
3b3590d3516ac4bea617bf5019189a2f48215ffb | Shell | martian-lang/martian | /test/fork_test/autoretry_fail_test.sh | UTF-8 | 258 | 2.5625 | 3 | [
"MIT",
"CC-BY-4.0"
] | permissive | #!/bin/bash
MROPATH=$PWD
if [ -z "$MROFLAGS" ]; then
export MROFLAGS="--disable-ui"
fi
PATH=../../bin:$PATH
mkdir -p ar_fail
export FAILFILE_DIR=$PWD/ar_fail
touch $FAILFILE_DIR/fail1
mrp --autoretry=1 --psdir=ar_pipeline_fail pipeline.mro pipeline_fail
| true |
bf7a180d342615d65456982a6ccb3c71f71c0487 | Shell | vinriviere/cross-mint-ubuntu | /.github/main.sh | UTF-8 | 1,607 | 3.125 | 3 | [] | no_license | #!/bin/bash -eu
# -e: Exit immediately if a command exits with a non-zero status.
# -u: Treat unset variables as an error when substituting.
echorun () {
echo "::group::Run $@"
"$@"
echo "::endgroup::"
}
# Display packages versions
echorun git --version
echorun gpg --version
echorun dpkg --version
# Set DEBEMAIL variable for signing packages
echorun . .github/guess_debemail.sh
# Deploy updated packages to Ubuntu Launchpad PPA
echorun . .github/init_ppa.sh ppa:vriviere/ppa
echorun .github/deploy_changed_packages.sh -m68k-atari-mint
# The lines below are sorted according to dependencies
#echorun .github/deploy_ppa_all_dists.sh binutils-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh mintbin-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh gcc-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh mintlib-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh pml-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh gemlib-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh ncurses-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh zlib-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh cross-mint-essential
#echorun .github/deploy_ppa_all_dists.sh cflib-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh gemma-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh ldg-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh readline-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh sdl-m68k-atari-mint
#echorun .github/deploy_ppa_all_dists.sh openssl-m68k-atari-mint
echo "Packages will be available at $PPA_URL"
| true |
2a7621136dee57973a3a5035f4cc2ef57923f6c6 | Shell | StathVaille/livekills | /src/main/resources/solarsystem_id_generator.sh | UTF-8 | 317 | 2.828125 | 3 | [] | no_license | #!/usr/bin/env bash
# Extracts the solar system Ids for a specific region from the Eve Static Data Export.
# SDE can be downloaded from https://developers.eveonline.com/resource/resources
cd sde/fsd/universe/eve/Catch
grep -R "solarSystemID" . | cut -d "/" -f 3,4 | sed "s|/[^ ]*||g" | sed "s/ /,/g" > Catch_ids.csv | true |
eaed262651c1784924977c93b325cad47501c94b | Shell | PG2000/aws-codecommit-pr-resource | /assets/out | UTF-8 | 1,198 | 3.59375 | 4 | [] | no_license | #!/bin/bash
# vim: set ft=sh
set -e -u -x
exec 3>&1 # make stdout available as fd 3 for the result
exec 1>&2 # redirect all output to stderr for logging
TMPDIR=${TMPDIR:-/tmp}
source "$(dirname "$0")/common.sh"
destination="$1"
if [ -z "${destination}" ]; then
echo "Usage: $0 <path/to/destination>" >&2
exit 1
fi
payload="$(mktemp "${TMPDIR}/gitlab-merge-request-resource.XXXXXX")"
cat > "${payload}" <&0
jq -r < "${payload}"
uri="$(jq -r '.source.uri // ""' < "${payload}")"
repo_name=$(echo $uri | sed 's/codecommit::eu-central-1:\/\///g')
version="$(jq -r '.version // ""' < "${payload}")"
commit_sha="$(echo "${version}" | jq -r '.sha // ""')"
pr="$(echo "${version}" | jq -r '.pr // ""')"
cd "${destination}"
git clone ${uri} ${repo_name}
cd "${repo_name}"
git reset --hard "${commit_sha}"
before_commit_id=$(git rev-parse "${commit_sha}"^)
aws codecommit post-comment-for-pull-request \
--pull-request-id "${pr}" \
--repository-name "${repo_name}" \
--after-commit-id "${commit_sha}" \
--before-commit-id "${before_commit_id}" \
--content "test pr comment from concourse"
jq -n "{
version: ${version},
metadata: $(git_metadata)
}" >&3
| true |
a092500369a5f7bb448f83d2df73c953255b55c1 | Shell | legendddhgf/cmps012b-pt.w18.grading | /pa5/pa5.sh | UTF-8 | 380 | 2.921875 | 3 | [] | no_license | #!/usr/bin/bash
# cmps012b-pt.w18 grading
# usage: pa5.sh
# (run within your pa5 directory to test your code)
SRCDIR=https://raw.githubusercontent.com/legendddhgf/cmps012b-pt.w18.grading/master/pa5
EXE1="pa5-make-check.sh"
EXE2="pa5-dictionary-check.sh"
curl $SRCDIR/$EXE1 > $EXE1
curl $SRCDIR/$EXE2 > $EXE2
chmod +x $EXE1
chmod +x $EXE2
./$EXE1
./$EXE2
rm -f $EXE1
rm -f $EXE2
| true |
ee13cbbf925161453fa80a7470d7a1ab919dfb84 | Shell | zsh-vi-more/vi-motions | /functions/-select-a-command | UTF-8 | 963 | 2.859375 | 3 | [
"ISC"
] | permissive | #!/usr/bin/env zsh
emulate -L zsh
setopt extendedglob
local cmd_delim=$'(\n##''|\||\|\||&&|;|;;|;&)'
local -a reply
local -i REPLY{,2}
split-shell-arguments
local -i start=$REPLY end=$REPLY {b,f}count=1 back
if [[ $KEYS = *[[:upper:]] ]]; then
bcount=${NUMERIC:-1}
else
fcount=${NUMERIC:-1}
fi
#print $REPLY \<$^reply\>
# backwards
repeat $bcount; do
start="${${(@)reply[0,start-1]}[(I)$~cmd_delim]}"
((start)) || break
done
repeat $fcount; do
end+="${${(@)reply[end+1,-1]}[(i)$~cmd_delim]}"
((end < $#reply)) || break
done
if [[ $WIDGET = select-in* ]]; then
# move to exclude space after ";"
((start += 1))
# move to exclude space after "if"
[[ $reply[start+1] = (if|elif|then|else|while|do|until|\{|\() ]] && ((start += 2))
# move to include space before ";", exclude ";"
[[ $reply[end] = $~cmd_delim ]] && ((end -= 1 ))
else
# move to include space after ";"
((end += 1))
fi
CURSOR="${(cj..)#reply[1,start]}"
MARK="${(cj..)#reply[1,end]}"
| true |
731757a5cec605b3eee383313f810b0cfaad8394 | Shell | nikosChalk/miscellaneous | /ssh_csd.sh | UTF-8 | 515 | 3.328125 | 3 | [] | no_license | #!/bin/sh
params=1
gate1="gate1"
gate2="gate2"
ORANGE='\033[0;33m'
BLUE='\033[0;34m'
LIGHT_GREEN='\033[1;32m' #Light Green
NC='\033[0m' # No Color
clear
echo
echo "${LIGHT_GREEN}Starting ssh to csd.uoc.gr ... ${NC}"
if [ "$#" -lt "$params" ]; then
machine="kiwi"
echo "${ORANGE}Machine parameter omitted. Default machine is: ${BLUE}$machine${NC}"
else
machine=$1
fi
old_term=$TERM
TERM=xterm-color
ssh csd3638@$machine.csd.uoc.gr #-o HostKeyAlgorithms=+ssh-dss -o PubKeyAcceptedKeyTypes=+dsa
TERM=$old_term
| true |
d1cdd85949ca034e9113221de0b4a2bdf33afcb1 | Shell | redheet/Dwipantara | /dwipantara | UTF-8 | 35,457 | 3.375 | 3 | [] | no_license | #!/bin/bash
#!/data/data/com.termux/files/usr/bin/bash
"""
I love my country Indonesia
autoinstaller DWIPANTARA
coded MR.R3DH3T
Don't copyright and recode this script bro
"""
#Warna // Color
blue='\e[0;34m'
cyan='\e[0;36m'
okegreen='\033[92m'
lightgreen='\e[1;32m'
white='\e[1;37m'
red='\e[1;31m'
yellow='\e[1;33m'
#Start
function startingTools()
{
clear
echo -e "${okegreen}User : "$(uname -n)
echo -e "${lightgreen}Login as : "$(whoami)
echo -e "${okegreen}Starting Dwipantara tools at : "$(date)
sleep 1
echo " "
}
startingTools
#Bar
function spinner()
{
sleep 3 &
PID=$!
i=1
sp="/-\|"
echo -n ' '
while [ -d /proc/$PID ]
do
printf "\b${sp:i++%${#sp}:1}"
done
}
function bar()
{
bar=" ******************************************************"
barlength=${#bar}
i=0
while ((i < 100)); do
n=$((i*barlength / 100))
printf "\e[00;32m\r[%-${barlength}s]\e[00m" "${bar:0:n}"
((i += RANDOM%5+2))
sleep 0.02
done
}
function CheckTools ()
{
which wget > /dev/null 2>&1
if [ "$?" -eq "0" ]; then
echo -e $okegreen "[ Wget ] Status : ${lighgreen}[ Installed ]"
which wget > /dev/null 2>&1
sleep 0.5
else
echo -e $red "[ Wget ] Status : ${red}[ Not Installed ] "
echo -e $yellow "[ ! ] Installing Wget "
case $( uname -o ) in
GNU/Linux)
echo -e "${yellow}Installing Wget for PC"
apt-get install wget -y;;
Android)
echo -e "${yellow}Installing Wgt for Termux"
pkg install wget -y;;
esac
echo -e $okegreen "[ Wget ] Done installing [ ✔ ]"
which wget > /dev/null 2>&1
sleep 0.5
fi
which curl > /dev/null 2>&1
if [ "$?" -eq "0" ]; then
echo -e $okegreen "[ Curl ] Status : ${lighgreen}[ Installed ]"
which curl > /dev/null 2>&1
sleep 0.5
else
echo -e $red "[ Curl ] Status : ${red}[ Not Installed ] "
echo -e $yellow "[ ! ] Installing Curl "
case $( uname -o ) in
GNU/Linux)
echo -e "${yellow}Installing Curl for PC"
apt-get install curl -y;;
Android)
echo -e "${yellow}Installing Curl for Termux"
pkg install curl -y;;
esac
echo -e $okegreen "[ Curl ] Done installing [ ✔ ]"
which curl > /dev/null 2>&1
sleep 0.5
fi
which git > /dev/null 2>&1
if [ "$?" -eq "0" ]; then
echo -e $okegreen "[ Git ] Status : ${lighgreen}[ Installed ]"
which git > /dev/null 2>&1
sleep 1
else
echo -e $red "[ Git ] Status : ${red}[ Not Installed ] "
echo -e $yellow "[ ! ] Installing Git "
case $( uname -o ) in
GNU/Linux)
echo -e "${yellow}Installing Git for PC"
apt-get install git -y;;
Android)
echo -e "${yellow}Installing Git for Termux"
pkg install git -y;;
esac
echo -e $okegreen "[ Git ] Done installing [ ✔ ]"
which curl > /dev/null 2>&1
sleep 1
fi
echo " "
echo -e $yellow" [!] Checking Tools 4 Termux [!]"
echo " "
bar
echo " "
which python > /dev/null 2>&1
if [ "$?" -eq "0" ]; then
echo -e $okegreen "[ Python ] Status : ${lighgreen}[ Installed ]"
which python > /dev/null 2>&1
sleep 0.5
else
echo -e $red "[ Python ] Status : ${red}[ Not Installed ] "
echo -e $yellow "[ ! ] Installing Python For You [ ! ]"
case $( uname -o ) in
Android)
pkg install python -y;;
*)
echo -e "${yellow}Your System is Not Termux ${red}[ x ]";;
esac
echo -e $okegreen "[ Python ] Done installing [ ✔ ]"
which python > /dev/null 2>&1
sleep 0.5
fi
which python2 > /dev/null 2>&1
if [ "$?" -eq "0" ]; then
echo -e $okegreen "[ Python2 ] Status : ${lighgreen}[ Installed ]"
which python2 > /dev/null 2>&1
sleep 0.5
else
echo -e $red "[ Python2 ] Status : ${red}[ Not Installed ] "
echo -e $yellow "[ ! ] Installing Python2 For You [ ! ]"
case $( uname -o ) in
Android)
pkg install python2 -y;;
*)
echo -e "${yellow}Your System is Not Termux";;
esac
echo -e $okegreen "[ Python2 ] Done installing [ ✔ ]"
which curl > /dev/null 2>&1
sleep 0.5
fi
}
#Checking Internet Connection
echo -e "${yellow}Checking Internet Connection"
spinner
function checkinternet()
{
ping -c 1 google.com > /dev/null 2>&1
if [[ "$?" != 0 ]]
then
echo -e $yellow "${red}NOT CONNECTED"
echo
echo -e $red "Run this Script, if you Connected to Internet"
echo
echo -e $red "Exit!"
echo && sleep 2
exit
else
echo -e $yellow "${okegreen}CONNECTED"
fi
}
checkinternet
sleep 1
echo " "
echo -e $yellow" [!] Checking Tools [!]"
echo " "
bar
echo " "
CheckTools
clear
####################################################
# CTRL C
####################################################
trap ctrl_c INT
ctrl_c () {
clear
echo -e $cyan" Terima Kasih Sudah Memakai Tools DWIPANTARA "
sleep 2
echo ""
echo -e $cyan" Dunia Itu Hanya Sesaat, Jadi Berbuatlah Baik Untuk Akhiratmu "
sleep 2
echo ""
echo -e $cyan" ├─ DWIPANTARA ─┤"
echo -e $cyan" ├─ Instagram ─┤"
echo -e $cyan" └─> dwi_pantara0518"
sleep 0.5
echo -e $cyan" ├─ Website ─┤"
echo -e $cyan" └─> https://dwipantara0518.blogspot.com/ "
sleep 1
echo -e $cyan" ├─ Author 1 ─┤ "
echo -e $cyan" ├─ MR.R3DH3T ─┤ "
echo -e $cyan" ├─ Instagram ─┤ "
echo -e $cyan" └─> naufalziidane_ "
echo -e $cyan" ├─ Author 2 ─┤ "
echo -e $cyan" ├─ loopstr34k ─┤ "
echo -e $cyan" ├─ Instagram ─┤ "
echo -e $cyan" └─> loopstr34k "
echo " "
echo -e $white" +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ "
echo -e $white" | 1.MsLq | 2.Ksatria Mujahidin | 3.mv24ky | 4.farhan | 5.root-S4D | "
echo -e $white" | 6.greenhorn | 7.sasman96 | 8.mizuki | 9.nyi_iteung_ | 10.H Code | "
echo -e $white" | 11.hanum_zaahra | 12.pribumi_jihad | 13.Lokzy | 14.Nc_tesla199 | "
echo -e $white" | 15.Opposite6890 | 16.B0C4HTU4N4K4L | 17.loopstr34k | 18.Emak_Julia | "
echo -e $white" | 19.pasukan_hitam | 20.Niqabies_women | 21.A | 22._mushab_bin_umair | "
echo -e $white" | 23.Hidayat | 24.farhan | 25.Dani | 26.rezim_laknat | 27.Squad_monkey3 | "
echo -e $white" | 28.p417ger4n_bers0rb4n | 29.d_dev05 | 30.Mr.Cl4Y | 31.jurnal_jamaah | "
echo -e $white" +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ "
echo ""
sleep 2
read enter
exit
}
echo -e $red" ______ _____ ____ _ _ _ _____ _ ____ _ "
echo -e $red" | _ \ \ / /_ _| _ \ / \ | \ | |_ _|/ \ | _ \ / \ "
echo -e $red" | | | \ \ /\ / / | || |_) / _ \ | \| | | | / _ \ | |_) | / _ \ "
echo -e $red" | |_| |\ V V / | || __/ ___ \| |\ | | |/ ___ \| _ < / ___ \ "
echo -e $red" |____/ \_/\_/ |___|_| /_/ \_\_| \_| |_/_/ \_\_| \_\/_/ \_\ "
echo ""
echo -e $white" ___ _ _ ____ ___ _ _ _____ ____ ___ _ "
echo -e $white" |_ _| \ | | _ \ / _ \| \ | | ____/ ___|_ _| / \ "
echo -e $white" | || \| | | | | | | | \| | _| \___ \| | / _ \ "
echo -e $white" | || |\ | |_| | |_| | |\ | |___ ___) | | / ___ \ "
echo -e $white" |___|_| \_|____/ \___/|_| \_|_____|____/___/_/ \_\ "
echo ""
sleep 2
clear
function firsttools() {
echo -e "${yellow}Alert!"
echo -e "${yellow}Any = Linux / Android | N = Next tools | Version |"
echo -e "${yellow}GNU/Linux = Linux PC | P = Prev tools | |"
echo -e "${yellow}Android = Termux | C = Contact Author | 2.3 |"
echo -e $red"+-----+--------------------------------------+-----------------------+"
sleep 0.1
echo -e $red"| Num | Tools Name [Tested On] | Function |"
sleep 0.1
echo -e $red"+-----+--------------------------------------+-----------------------+"
sleep 0.1
echo -e $red"| 1 | Metasploit Framework [Any] | Pentesting |"
sleep 0.1
echo -e $red"| 2 | Torshammer [Any] | DDOS |"
sleep 0.1
echo -e $red"| 3 | Xerxes [Any] | DDOS |"
sleep 0.01
echo -e $red"| 4 | SQLMap [Any] | Get Database |"
sleep 0.1
echo -e $red"| 5 | Termux Ohmyzhs [Android] | Termux Theme |"
sleep 0.01
echo -e $red"| 6 | ko-dork [Any] | Pentesting Web |"
sleep 0.1
echo -e $red"| 7 | CMSmap [Any] | CMS Scanner |"
sleep 0.1
echo -e $red"| 8 | Ngrok [Any] | Go Online |"
sleep 0.01
echo -e $red"| 9 | Webdav [Any] | Web Server |"
sleep 0.1
echo -e $red"| 10 | Venom [Any] | Pentesting |"
sleep 0.1
echo -e $red"| 11 | Shell Finder [Any] | Databse Web |"
sleep 0.01
echo -e $red"| 12 | Termux Sudo [Android] | Access Root |"
sleep 0.1
echo -e $red"| 13 | Red Hawk [Any] | Scanning Web |"
sleep 0.1
echo -e $red"| 14 | Kali Nethunter [Android] | OS Kali Linux |"
sleep 0.1
echo -e $red"| 15 | Hasher [Any] | Hashing |"
sleep 0.1
echo -e $red"| 16 | Weeman [Any] | Pishing |"
sleep 0.1
echo -e $red"| 17 | Mercury [Any] | Pentesting |"
sleep 0.1
echo -e $red"| 18 | SpazSMS [Any] | Spam SMS |"
sleep 0.1
echo -e $red"| 19 | Websploit [Any] | Pentesting & MLITM |"
sleep 0.1
echo -e $red"| 20 | Script Deface [Any] | Creator Script |"
sleep 0.1
echo -e $red"| 21 | Fsociety [Any] | Pentesting |"
sleep 0.1
echo -e $red"| 22 | Vbug [Any] | Virtual Bug |"
sleep 0.1
echo -e $red"| 23 | IP Geolocation [Any] | Tracking IP |"
sleep 0.1
echo -e $red"| 24 | Instagram [Any] | Brute Force |"
sleep 0.1
echo -e $red"| 25 | User Recon [Any] | Scan Username |"
echo -e $red"+-----+--------------------------------------+-----------------------+"
sleep 0.1
}
function secondtools() {
echo -e "${yellow}Alert!"
echo -e "${yellow}Any = Linux / Android | N = Next tools | Version |"
echo -e "${yellow}GNU/Linux = Linux PC | P = Prev tools | |"
echo -e "${yellow}Android = Termux | C = Contact Author | 2.3 |"
echo -e $red"+-----+--------------------------------------+-----------------------+"
sleep 0.1
echo -e $red"| Num | Tools Name [Tested On] | Function |"
sleep 0.1
echo -e $red"+-----+--------------------------------------+-----------------------+"
sleep 0.1
echo -e $red"| 26 | The Fat Rat [GNU/Linux] | Pentesting |"
sleep 0.1
echo -e $red"| 27 | Evil - Droid [GNU/Linux] | Payload Android |"
sleep 0.1
echo -e $red"| 28 | Wifi Jammer [GNU/Linux] | Wlan Attack |"
sleep 0.1
echo -e $red"| 29 | Hydra [Any] | Password Attack |"
sleep 0.1
echo -e $red"| 30 | Nmap [Any] | Information Gathering |"
sleep 0.1
echo -e $red"| 31 | Wordprescan [Any] | Wp-login & password |"
sleep 0.1
echo -e $red"| 32 | Theharverster [Any] | Searching Any email |"
sleep 0.1
echo -e $red"| 33 | Inetspeedtest [Any] | Check Speed Internet |"
sleep 0.1
echo -e $red"| 34 | Termux-Packages [Android] | Command tools termux |"
sleep 0.1
echo -e $red"| 35 | Ubuntu [Android] | OS Ubuntu |"
sleep 0.1
echo -e $red"| 36 | Routersploit [GNU/Linux] | Router attack |"
sleep 0.1
echo -e $red"| 37 | Joomscan [GNU/Linux] | Scanning Web CMS |"
sleep 0.1
echo -e $red"| 38 | Pybelt [Any] | Scanning Vuln Web |"
sleep 0.1
echo -e $red"| 39 | Brute XSS [Any] | Pentesting Web |"
sleep 0.1
echo -e $red"| 40 | Dirsearch [Any] | Search Direktory Web |"
sleep 0.1
echo -e $red"| 41 | WebApp [Any] | Information Grather |"
sleep 0.1
echo -e $red"| 42 | Hunner [Any] | Any Bruteforce & Scan |"
sleep 0.1
echo -e $red"| 43 | Netattack2 [GNU/Linux] | Wlan attack & Kick |"
sleep 0.1
echo -e $red"| 44 | Wifiphisher [GNU/Linux] | Simple Wlan attack |"
sleep 0.1
echo -e $red"| 45 | Wifigod [GNU/Linux] | Simple Wlan attack |"
sleep 0.1
echo -e $red"| 46 | Th3inspector [Any] | Special for any search|"
sleep 0.1
echo -e $red"| 47 | Xattacker [Any] | Pentesting web |"
sleep 0.1
echo -e $red"| 48 | Darksploit [Any] | Pentesting |"
sleep 0.1
echo -e $red"| 49 | PTF [GNU/Linux] | Peneration Framework |"
sleep 0.1
echo -e $red"| 50 | IMSI-Catcher Detector [Android] | Protect your self |"
sleep 0.1
echo -e $red"+-----+--------------------------------------+-----------------------+"
sleep 0.1
echo -e $red"| 00 | Exit Dwipantara Tools | ${yellow}Good Bye ${red}|"
sleep 0.1
echo -e $red"+-----+--------------------------------------+-----------------------+"
sleep 0.01
}
firsttools
echo ""
while true; do
read -p" root@dwipantara ~/# " dwipan;
if [ $dwipan = n ] || [ $dwipan = N ]
then
secondtools
fi
if [ $dwipan = p ] || [ $dwipan = P ]
then
firsttools
fi
if [ $dwipan = c ] || [ $dwipan = C ]
then
clear
echo " "
echo -e $red " [ Contact Author ]"
echo " "
echo -e $okegreen " Author 1 ▶ MR.R3DH3T"
echo -e $okegreen " Instagram ▶ naufalziidane_"
echo -e $red "------------------------------------------------"
echo -e $blue " Author 2 ▶ LOOPSTR34K"
echo -e $blue " Instagram ▶ loopstr34k"
echo -e $blue " Telegram ▶ https://t.me/Grandshoot"
echo " "
exit
fi
if [ $dwipan = 1 ] || [ $dwipan = 01 ]
then
clear
echo -e $blue" [ Installing Metasploit ] "
sleep 1
echo " Harus Sabar! "
sleep 3
apt install curl -y
wget https://raw.githubusercontent.com/Hax4us/Metasploit_termux/master/metasploit.sh
sleep 1
chmod +x metasploit.sh
./metasploit.sh
sleep 5
mv metasploit-framework $HOME
echo "${green} Use Command 'msfconsole' to run metasploit "
echo "${green} e.g: root@dwipantara#~/ msfconsole"
sleep 2
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 0 ] || [ $dwipan = 00 ]
then
clear
echo -e $okegreen">> Pesan terakhir! << "
echo ""
echo ""
sleep 2
echo -e $lightgreen" Jangan pernah tinggalin ngaji ingat! menuntut ilmu agama itu wajib , karna jaman sekarang orang islam tidak tahu agamanya sendiri "
echo ""
sleep 3
echo -e $red" Jangan pacaran mulu! , Jika kamu diam saat agamamu dihina gantilah pakaianmu dengan kain kafan "
echo ""
sleep 3
echo -e $red" Islam bagaikan buih di lautan ( Banyak orang islam tapi tidak tahu islam yang sesungguhnya ) "
echo ""
sleep 3
echo -e $okegreen" Semangat Berjuang Demi Membela Kebenaran! TAKBIR! "
echo ""
sleep 3
echo -e $lightgreen" Per eratlah tali persaudaraan , wa alfu minkum wassalamu'alaikum warrohmatullohi wabarokatuhu "
echo ""
sleep 3
exit
fi
if [ $dwipan = 2 ] || [ $dwipan = 02 ]
then
clear
echo -e $blue" [ Installing Torshammer ] "
git clone https://github.com/cyweb/hammer.git
cd hammer
chmod +x hammer.py
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 3 ] || [ $dwipan = 03 ]
then
clear
echo -e $blue" [ Installing Xerxes ] "
echo -e "${yellow}Installing Clang for Xerxes ${red}[ ! ]"
function insclang ()
{
case $( uname -o ) in
GNU/Linux)
apt-get install clang -y
echo -e "${okegreen}Finish [ ✔ ]";;
Android)
pkg install clang -y
echo -e "${okegreen}Finish [ ✔ ]";;
esac
}
insclang
echo -e $yellow " [i] Downloading Xerxes! [i]"
down &
pid=$!
for i in `seq 1 100`;
do
load $i
sleep 0.1
done
kill $pid > /dev/null 2>&1
sleep 1
echo ""
git clone https://github.com/zanyarjamal/xerxes.git 2> /dev/null
echo -e $yellow " [i] Setting Up Xerxes Tools [i]"
setuptools &
sleep 5
kill "$!"
printf '\n'
cd xerxes
gcc xerxes.c -o xerxes
echo " use ? ./xerxes target (ex) www.suaranasional.com 80 , jika tidak ada tulisan voly sent ulang aja bro "
echo ""
echo ""
sleep 2
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 4 ] || [ $dwipan = 04 ]
then
clear
echo -$blue" [ Installing SQLmap-dev ] "
git clone --depth 1 https://github.com/sqlmapproject/sqlmap.git sqlmap-dev
cd sqlmap-dev
chmod +x *.py
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 5 ] || [ $dwipan = 05 ]
then
clear
echo -e $blue" [ Installing Termux-ohmyzsh ] "
sh -c "$(curl -fsSL https://github.com/Cabbagec/termux-ohmyzsh/raw/master/install.sh)"
~/.termux/colors.sh
echo -e $white" ganti color ? ketik ~/.termux/colors.sh "
sleep 2
echo ""
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 6 ] || [ $dwipan = 06 ]
then
clear
echo -e $blue" [ Installing ko-dork ] "
git clone https://github.com/ciku370/ko-dork.git
mv ko-dork $HOME
cd ~/
cd ko-dork
chmod +x *.py
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 7 ] || [ $dwipan = 07 ]
then
clear
echo -e $blue" [ Installing CMS scanner ] "
git clone https://github.com/Dionach/CMSmap.git
sleep 1
cd CMSmap
chmod +x cmsmap.py
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 8 ] || [ $dwipan = 08 ]
then
clear
echo -e $red" [ Installing Ngrok ] "
mkdir ngrok
cd ~/ngrok
echo -e "${red} (1) 32 bit"
echo -e "${red} (2) 64 bit"
echo -e "${red} (3) ARM"
read -p " root@dwipantara#~/ngrok-installer/ " ngrok;
if [ $ngrok = 1 ] || [ $ngrok = 01 ]
then
echo -e $yellow"Download Ngrok 32 bit "
wget https://bin.equinox.io/c/4VmDzA7iaHb/ngrok-stable-linux-386.zip
unzip ngrok-stable-linux-386.zip
echo $yellow"Use Command './ngrok http 80' to use Ngrok"
echo $yellow"e.g: root@dwipantara#~/ ngrok http 80"
exit
fi
if [ $ngrok = 2 ] || [ $ngrok = 02 ]
then
echo -e $yellow"Download Ngrok 64 bit "
wget https://bin.equinox.io/c/4VmDzA7iaHb/ngrok-stable-linux-amd64.zip
unzip ngrok-stable-linux-amd64.zip
echo $yellow"Use Command './ngrok http 80' to use Ngrok"
echo $yellow"e.g: root@dwipantara#~/ ngrok http 80"
exit
fi
if [ $ngrok = 3 ] || [ $ngrok = 03 ]
then
echo -e $yellow"Download Ngrok ARM "
wget https://bin.equinox.io/c/4VmDzA7iaHb/ngrok-stable-linux-arm.zip
unzip ngrok-stable-linux-arm.zip
echo -e $yellow"Use Command './ngrok http 80' to use Ngrok"
echo -e $yellow"e.g: root@dwipantara#~/ ngrok http 80"
exit
fi
cd ~/
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 9 ] || [ $dwipan = 09 ]
then
clear
echo -e $blue" [ Installing Webdav ] "
function reqweb ()
{
echo -e "${yellow}Installing Requirements Webdav"
case $( uname -o ) in
Android)
pip2 install urllib3 chardet certifi idna requests
apt install openssl curl
pkg install libcurl;;
GNU/Linux)
pip install urllib3 chardet certifi idna requests
apt-get --force-yes -y install openssl curl libcurl3
echo -e "${okegreen}Finish Installing Requirements Webdav [ ✔ ]"
esac
}
reqweb
mkdir webdav/
cd webdav
wget https://pastebin.com/raw/HnVyQPtR -O webdav.py
chmod 777 webdav.py
cd
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 10 ] || [ $dwipan = 010 ]
then
clear
echo -e $blue" [ Installing venom ] "
sleep 2
git clone https://github.com/r00t-3xp10it/venom.git
cd venom
sudo chmod -R +x *.sh
sudo chmod -R +x *.py
cd aux
sudo ./setup.sh
echo -e $white" install metasploit dulu bro error kan "
sleep 2
echo ""
echo -e $white" if you run main tools , use msfvenom -h "
sleep 3
echo ""
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 11 ] || [ $dwipan = 011 ]
then
clear
echo -e $blue" [ Installing Shell-finder ] "
git clone https://github.com/bhavyanshu/Shell-Finder.git
cd Shell-Finder
chmod +x shellfind.py
echo -e $white" use python2 and python if you run main tools bro "
echo -e $yellow" └>termux └> GNU/Linux (PC)"
echo " "
sleep 2
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 12 ] || [ $dwipan = 012 ]
then
clear
echo -e $blue" [ Installing sudo for Termux ] "
function sudooo()
{
case $( uname -o ) in
Android)
apt upgrade
pkg install ncurses-utils
git clone https://github.com/widhi18/Termux-Sudo.git
cd Termux-Sudo
cat sudo > /data/data/com.termux/files/usr/bin/sudo
chmod 700 /data/data/com.termux/files/usr/bin/sudo
cd;;
GNU/Linux)
echo -e "${red}Your OS is Not Termux"
exit;;
esac
}
sudooo
sleep 2
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 13 ] || [ $dwipan = 013 ]
then
clear
echo -e $blue" [ Installing RED_HAWK ] "
sleep 2
echo " "
echo -e "${blue}Installing Requirement RED_HAWK"
function instphp()
{
case $( uname -o ) in
Android)
pkg install php -y;;
GNU/Linux)
apt-get install php -y;;
esac
}
instphp
git clone https://github.com/Tuhinshubhra/RED_HAWK
cd RED_HAWK
chmod +x *.php
echo -e $white" if you run main tools , use php "
echo ""
sleep 2
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 14 ] || [ $dwipan = 014 ]
then
clear
echo -e $blue" [ Installing Kali-Nethunter ] "
sleep 2
echo -e $white" Sabar bro sambil ngopi :D "
function nethunter()
{
case $( uname -o ) in
Android)
apt upgrade
git clone https://github.com/Hax4us/Nethunter-In-Termux.git
cd /Nethunter-In-Termux
chmod 777 kalinethunter
sh kalinethunter;;
*)
echo -e "${cyan}Just For Termux"
esac
}
nethunter
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 15 ] || [ $dwipan = 015 ]
then
clear
echo -e $blue" [ Installing hasher ] "
git clone https://github.com/CiKu370/hasher.git
cd hasher
chmod +x hash.py
sleep 1
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 16 ] || [ $dwipan = 016 ]
then
clear
echo -e $blue" [ Installing Weeman ] "
git clone https://github.com/samyoyo/weeman
cd weeman
function reqweeman()
{
echo -e "${yellow}Installing Requirements Weeman"
case $( uname -o ) in
Android)
pip2 install beautifulsoup
pip2 install bs4;;
GNU/Linux)
pip install beutifulsoup
pip install bs4;;
esac
}
reqweeman
echo -e "${blue}Done."
cd
echo -e $white "use ngrok bro if you phising your friends "
echo ""
sleep 2
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 17 ] || [ $dwipan = 017 ]
then
clear
echo -e $blue" [ Installing Mercury ] "
git clone https://github.com/MetaChar/Mercury.git
function reqMer()
{
case $(uname -o) in
Android)
cd Mercury
pip2 install -r rerquirements.txt;;
GNU/Linux)
cd Mercury
pip install -e requirements.txt;;
esac
}
reqMer
cd Mercury
chmod +x Mercury.py
echo -e $white" use python2 and python for run Mercury Tools"
echo -e $yellow" └>termux └> GNU/Linux (PC)"
echo " "
sleep 3
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 18 ] || [ $dwipan = 018 ]
then
clear
echo -e $blue" [ Installing SpazSms & call tokopedia ] "
function reqsms()
{
case $( uname -o ) in
Android)
pkg install php;;
GNU/Linux)
apt-get install php -y;;
esac
}
reqsms
git clone https://github.com/Gameye98/SpazSMS.git
echo ""
sleep 1.5
git clone https://github.com/Senitopeng/SpamSms.git
echo ""
echo -e $white" This tools for Number Indonesian "
sleep 1.5
echo ""
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 19 ] || [ $dwipan = 019 ]
then
clear
echo -e $blue" [ Instaling websploit ] "
pip2 install request
pip2 install scapy
pip install --upgrade pip
git clone https://github.com/websploit/websploit.git
cd websploit
chmod +x websploit
sleep 0.6
echo -e $green" usage ? ./websploit or python2 websploit "
echo ""
sleep 2
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 20 ] || [ $dwipan = 020 ]
then
clear
echo -e $blue" [ Installing Script ] "
echo " ${red} pleas wait... "
sleep 3
python2 dwipan.py
fi
if [ $dwipan = 21 ] || [ $dwipan = 021 ]
then
clear
echo -e $blue" [ Installing fsociety ] "
git clone https://github.com/manisso/fsociety
cd fsociety
chmod a+x *.py
cd ~/
echo ""
sleep 1
echo -e $red" { Done Instaal bro } "
echo ""
sleep 1
cd fsociety
chmod +x fsociety.py
echo ""
sleep 0.5
echo -e $lightgreen" use python2 fsociety.py "
fi
if [ $dwipan = 22 ] || [ $dwipan = 022 ]
then
clear
echo -e $blue" [ Installing vbug ] "
sleep 2
echo ""
echo $red" pleas wait... "
unzip vbug.zip
cd vbug
chmod +x *.py
echo -e $lightgreen" use python2 and python for run this tool "
echo -e "${yellow} └> termux └> GNU/Linux PC"
echo " "
sleep 1.4
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 23 ] || [ $dwipan = 023 ]
then
clear
echo -e $blue" [ Installing IPGeolocation ] "
git clone https://github.com/maldevel/IPGeolocation.git
cd IPGeolocation
chmod +x *.py
function pip1pip2()
{
case $( uname -o ) in
GNU/Linux)
pip install -r requirements.txt;;
Android)
pip2 install -r requirements.txt;;
esac
}
pip1pip2
sleep 2
echo -e $okegreen" use python2 and python ipgeolocation.py bro if you run "
echo -e "${yellow} └> termux └> GNU/Linux PC"
echo ""
sleep 3
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 24 ] || [ $dwipan = 024 ]
then
clear
echo -e $blue" [ Installing Instagram ] "
git clone https://github.com/Pure-L0G1C/Instagram.git
cd Instagram
pip install -r requirements.txt
chmod +x *.py
echo -e "${green} python instagram.py <username> <wordlis> <threads> "
echo ""
echo -e "${green} (ex) python instagram.py cikukba wordlist.txt 16 "
sleep 3
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 25 ] || [ $dwipan = 025 ]
then
clear
echo -e "${blue}[ Installing User Recon ]"
git clone https://github.com/thelinuxchoice/userrecon.git
cd userrecon
chmod +x userrecon.sh
echo -e $okegreen"{ Done Install }"
fi
if [ $dwipan = 26 ] || [ $dwipan = 026 ]
then
clear
echo -e $blue"[ Installing The Fat Rat ]"
git clone https://github.com/Screetsec/TheFatRat.git
cd TheFatRat
chmod +x setup.sh && ./setup.sh
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 27 ] || [ $dwipan = 027 ]
then
clear
echo -e $okegreen"[ Installing Evil - Droid ]"
echo -e $red"[!] Generate Payload For Backdoor Android [!]"
git clone https://github.com/M4sc3r4n0/Evil-Droid.git
cd Evil-Droid
chmod +x evil-droid
echo -e $red" { Done Install } "
fi
if [ $dwipan = 28 ] || [ $dwipan = 028 ]
then
clear
echo -e $blue"[ Installing Wifi Jammer ]"
git clone https://github.com/DanMcInerney/wifijammer.git
cd wifijammer
chmod +x wifijammer.py
fi
if [ $dwipan = 29 ] || [ $dwipan = 029 ]
then
clear
echo -e $blue"[ Installing Hydra ]"
apt install update && apt upgrade
apt install hydra
echo ""
echo -e $red" pleas wait... "
sleep 1
echo -e $red" pleas wait... "
sleep 1
echo -e $red" pleas wait... "
sleep 2
echo ""
mkdir wordlist
cd wordlist
wget http://scrapmaker.com/download/data/wordlists/dictionaries/rockyou.txt
cd ~/
echo ${okegreen}" use hydra -h for help "
echo ""
echo ${okegreen}" This for ssh admin or root { e.g } hydra -t 4 127.0.0.1 ssh -l username -P wordlist.txt -s 22 -vV "
fi
if [ $dwipan = 30 ] || [ $dwipan = 030 ]
then
clear
echo -e $blue"[ Installing Nmap ] "
apt update && apt upgrade
apt install nmap
echo ""
echo -e $red" pleas wait... "
sleep 0.5
echo -e $red" pleas wait... "
sleep 0.5
echo -e $red" pleas wait... "
sleep 2
echo ""
echo ${okegreen}" { e.g } nmap 127.1.0.0 -sV "
echo ""
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 31 ] || [ $dwipan = 031 ]
then
clear
echo -e $blue" [ Installing WPscan ] "
apt install ruby -y
git clone https://github.com/wpscanteam/wpscan.git
echo ""
echo -e $red" ALERT! Don't close . "
sleep 4
gem install bundle
bundle install config build.nokogiri --use-system-libraries
gem install wpscan
bundle install && rake install
echo ${okegreen}" { e.g } wpscan -h "
echo ""
echo ${okegreen}" { e.g } wpscan --url http://kapanlobisa.co.id --enumerate u "
echo ""
sleep 2
echo ${okegreen}" { e.g } wpscan --url http://kapanlobisa.co.id --wordlist wordlist.txt --username input admin wordpress "
sleep 2
echo ""
echo -e $red" { Done Install bro } "
fi
if [ $dwipan = 32 ] || [ $dwipan = 032 ]
then
clear
echo -e $blue" [ Installing Theharvester ] "
git clone https://github.com/laramies/theHarvester.git 2> /dev/null
sleep 2
cd theHarvester
chmod +x *.py
echo -e "${red}{ Done Installing The Harvester }"
fi
if [ $dwipan = 33 ] || [ $dwipan = 033 ]
then
clear
echo -e $blue" [ Installing Inetspeedtest ] "
git clone https://github.com/loopstr34k/Inetspeedtest.git
cd Inetspeedtest
pip install -r requirements.txt
sleep 2
echo "${okegreen} Use python2 inetspeedtest if you check speed your internet "
echo ""
echo -e $red" { Done Install } "
fi
if [ $dwipan = 34 ] || [ $dwipan = 034 ]
then
clear
echo -e $blue" [ Installling Termux-Packages ] "
apt update && apt upgrade
git clone https://github.com/termux/termux-packages.git
echo ""
echo "${okegreen} If you look command or install all for termux open cd termux-packages/packages "
echo ""
sleep 4
echo -e $red" { Done Install } "
fi
if [ $dwipan = 35 ] || [ $dwipan = 035 ]
then
clear
echo -e $blue" [ Instaling Ubuntu ] "
apt update && apt upgrade
apt install wget
apt install proot
git clone https://github.com/Neo-Oli/termux-ubuntu.git
cd ~/termux-ubuntu
chmod a+x ubuntu.sh
sh ubuntu.sh
echo "${red} please wait ... "
sleep 3
echo ""
echo "${okegreen} use nano /data/data/com.termux/files/home/Dwipantara/termux-ubuntu/ubuntu-fs/etc/resolv.conf | nameserver 8.8.8.8 "
echo "${red} { Done } "
fi
if [ $dwipan = 36 ] || [ $dwipan = 036 ]
then
clear
echo -e $blue" [ Installing Routersploit ] "
apt update
apt-get install python3-pip
git clone https://www.github.com/threat9/routersploit.git
cd routersploit
python3 -m pip install -r requirements.txt
echo "${okegreen} Use Python3 if you run Routersploit "
echo ""
sleep 2
echo -e $red" { Done Install } "
fi
if [ $dwipan = 37 ] || [ $dwipan = 037 ]
then
clear
echo -e $blue" [ Installing Joomscan ] "
apt update
apt-get install perl
git clone https://github.com/rezasp/joomscan.git
cd joomscan
chmod a+x *.pl
echo "${okegreen} Use perl joomscan.pl "
echo ""
echo -e $red" { Done Install } "
fi
if [ $dwipan = 38 ] || [ $dwipan = 038 ]
then
clear
echo -e $blue" [ Installing Pybelt ] "
apt update
git clone https://github.com/Ekultek/Pybelt.git
cd Pybelt
pip install -r requirements.txt
sleep 3
echo "${okegreen} Use python2 pybelt.py or python pybelt.py "
echo -e "${yellow} └> termux └> GNU/Linux PC "
echo ""
echo -e $red" { Done Install } "
fi
if [ $dwipan = 39 ] || [ $dwipan = 039 ]
then
clear
echo -e $blue" [ Installing BruteXSS ] "
apt update
git clone https://github.com/ihamquentin/BruteXSS.git
cd BruteXSS
chmod a+x *.py
echo "${okegreen} Use python2 brutexss.py "
echo ""
echo -e $red" { Done Install } "
fi
if [ $dwipan = 40 ] || [ $dwipan = 040 ]
then
clear
echo -e $blue" [ Installing Dirserach ] "
apt update
git clone https://github.com/maurosoria/dirsearch.git
cd dirsearch
chmod a+x *.py
cd ~/
echo "${okegreen} This tools cool for Defacer "
sleep 2
echo ""
echo -e $red" { Done Install } "
fi
if [ $dwipan = 41 ] || [ $dwipan = 041 ]
then
clear
echo -e $blue" [ Installing WebApp ] "
apt update
git clone https://github.com/jekyc/wig.git
cd wig
chmod a+x *.py
echo ""
echo "${okegreen} If run this tools on termux , Use python3 wig.py example.com "
sleep 4
echo""
echo -e $red" { Done Install } "
fi
if [ $dwipan = 42 ] || [ $dwipan = 042 ]
then
clear
echo -e $blue" [ Installing Hunner ] "
apt update
git clone https://github.com/b3-v3r/Hunner.git
cd Hunner
chmod a+x *.py
cd ~/
echo""
echo -e $red" { Done Install } "
fi
if [ $dwipan = 43 ] || [ $dwipan = 043 ]
then
clear
echo -e $blue" [ Installing Netattack2 ] "
apt update
git clone https://github.com/wi-fi-analyzer/netattack2.git
cd netattack2
apt-get install python-nmap python-argparse python-scapy iw
chmod a+x *.py
cd ~/
echo""
echo -e $red" { Done Install } "
fi
if [ $dwipan = 44 ] || [ $dwipan = 044 ]
then
clear
echo -e $blue" [ Installing Wifiphisher ] "
apt update
git clone https://github.com/wifiphisher/wifiphisher.git
cd wifiphisher
chmod a+x *.py
sudo python setup.py install
echo "${okegreen} Runing main tools command : wifiphisher or python bin/wifiphisher . "
echo""
echo -e $red" { Done Install } "
fi
if [ $dwipan = 45 ] || [ $dwipan = 045 ]
then
clear
echo -e $blue" [ Installing Wifigod ] "
apt update
git clone https://github.com/blackholesec/wifigod.git
cd wifigod
chmod a+x *.py
echo""
echo -e $red" { Done Install } "
fi
if [ $dwipan = 46 ] || [ $dwipan = 046 ]
then
clear
echo -e "${blue} [ Installing Th3inspector ] "
function inspector()
{
case $( uname -o ) in
Android)
apt update && apt upgrade
pip2 install scapy;;
GNU/Linux)
pip install scapy;;
esac
}
inspector
echo -e $okegreen" Done Install Requirements Inspector [ ✔ ] "
sleep 3
git clone https://github.com/Moham3dRiahi/Th3inspector.git
sleep 1
cd Th3inspector
chmod +x install.sh && ./install.sh
cd ~/
echo""
echo -e "${okegreen} Use perl For Run Main Tools ; perl Th3inspector.pl "
echo -e $red" { Done Install } "
fi
if [ $dwipan = 47 ] || [ $dwipan = 047 ]
then
clear
echo -e $blue" [ Installing Xattacker ] "
function attack()
{
case $( uname -o ) in
Android)
apt update && apt upgrade
apt install perl
git clone https://github.com/Moham3dRiahi/XAttacker.git
cd XAttacker
chmod +x termux-install.sh
chmod a+x *.pl
bash termux-install.sh;;
GNU/Linux)
git clone https://github.com/Moham3dRiahi/XAttacker.git
cd XAttacker
chmod a+x *.pl;;
esac
}
attack
sleep 2.5
echo -e "${okegreen} Done Install Requirements Xattacker [ ✔ ] "
cd ~/
echo""
echo -e "${okegreen} Use perl For Run Main Tools ; perl XAttacker.pl "
echo""
echo -e "${red} { Done Install } "
fi
if [ $dwipan = 48 ] || [ $dwipan = 048 ]
then
clear
echo -e "${blue} [ Installing DarkSploit ] "
function darkpo()
{
case $( uname -o ) in
Android)
apt update && apt upgrade
sleep 2
pip install -r requirements.txt
echo -e "${okegreen} Done Install Requirements Dark [ ✔ ] ";;
GNU/Linux)
pip2 install -r requirements.txt
echo -e "${okegreen} Done Install Requirements DarkSploit [ ✔ ] ";;
esac
}
git clone https://github.com/anthrax3/DarkSploit.git
sleep 0.1
cd DarkSploit
./installgnuroot
darkpo
cd ~/
echo""
echo -e "${red} { Done Install } "
fi
if [ $dwipan = 49 ] || [ $dwipan = 049 ]
then
clear
echo -e "${blue} [ Installing PTF ] "
git clone https://github.com/trustedsec/ptf.git
chmod a+x ptf
echo""
echo -e "${okegreen} Run Main Tools : ./ptf "
echo""
echo -e "${red} { Done Install } "
fi
if [ $dwipan = 50 ] || [ $dwipan = 050 ]
then
clear
echo -e "${blue} [ Download AIMSICD ] "
echo""
echo -e "${okegreen} This tools For protect your self from Hacker , Fake tower call . "
echo -e "${okegreen} Because Hacker and tower call stole information on Your device . "
sleep 2
echo -e "${yellow} Copy This Link if you Downloads this apk "
echo""
echo -e "${yellow} https://sfile.mobi/5CFBrs7xIsU "
sleep 2
echo -e "${red} { If You protect Your Self Donwload This apk } "
fi
done
| true |
2b871e774b5f5b33e329ebead4d8b8996c30e235 | Shell | mit-plv/bedrock2 | /etc/analyze_ZnWords_log.sh | UTF-8 | 519 | 3.171875 | 3 | [
"MIT"
] | permissive | #!/bin/sh
# In ZnWords.v, replace
# Ltac ZnWords := ZnWords_pre; better_lia.
# by
# Ltac ZnWords := time "ZnWords" (ZnWords_pre; better_lia).
# Then run
# TIMED=1 time make | tee /path/to/logfile.log
# and then run this script with /path/to/logfile.log as its first argument.
echo -n "Number of ZnWords calls: "
sed $1 -E -n -e 's/Tactic call ZnWords ran for ([^ ]+) .*/\1/p' | wc -l
echo -n "Total time spent in ZnWords: "
sed $1 -E -n -e 's/Tactic call ZnWords ran for ([^ ]+) .*/\1/p' | paste -s -d+ - | bc
| true |
b0eaf070e65a6e1d4d49be50d5c0e8ebd25f0796 | Shell | pbreau3/ift6010-h21-team1 | /midi2wav.sh | UTF-8 | 276 | 2.890625 | 3 | [] | no_license | #!/bin/bash
# Requires fluidsynth
# Usage: ./midi2wav.sh midi-file wav-name
if [ $# -ne 2 ]; then
echo Usage: ./midi2wav.sh midi-file wav-name
exit 1
fi
# Converts a midi to a wav for listening on colab
fluidsynth -ni `cat sound_font_path.txt` "$1" -F "$2" -r 44100 | true |
8004a09d9cfec54f0dd3003f3b49c8a15bfcab7c | Shell | okta/okta-auth-js | /scripts/verify-registry-install.sh | UTF-8 | 1,859 | 3.5 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
# NOTE: MUST BE RAN *AFTER* THE PUBLISH SUITE
# Install required node version
export REGISTRY="${ARTIFACTORY_URL}/npm-topic"
cd ${OKTA_HOME}/${REPO}
NODE_VERSION="${1:-v14.18.0}"
setup_service node $NODE_VERSION
# Use the cacert bundled with centos as okta root CA is self-signed and cause issues downloading from yarn
setup_service yarn 1.22.19 /etc/pki/tls/certs/ca-bundle.crt
# Install required dependencies
yarn global add @okta/ci-append-sha
yarn global add @okta/ci-pkginfo
export PATH="${PATH}:$(yarn global bin)"
# Append a SHA to the version in package.json
if ! ci-append-sha; then
echo "ci-append-sha failed! Exiting..."
exit $FAILED_SETUP
fi
# NOTE: hyphen rather than '@'
artifact_version="$(ci-pkginfo -t pkgname)-$(ci-pkginfo -t pkgsemver)"
published_tarball=${REGISTRY}/@okta/okta-auth-js/-/${artifact_version}.tgz
# verify npm install
mkdir npm-test
pushd npm-test
npm init -y
if ! npm i ${published_tarball}; then
echo "npm install ${published_tarball} failed! Exiting..."
exit ${FAILED_SETUP}
fi
echo "Done with npm installation test"
popd
# verify yarn classic install
mkdir yarn-classic-test
pushd yarn-classic-test
yarn init -y
if ! yarn add ${published_tarball}; then
echo "yarn-classic install ${published_tarball} failed! Exiting..."
exit ${FAILED_SETUP}
fi
echo "Done with yarn classic installation test"
popd
# verify yarn v3 install
mkdir yarn-v3-test
pushd yarn-v3-test
# use yarn v3
yarn set version stable
yarn config set caFilePath /etc/pki/tls/certs/ca-bundle.crt
yarn init -y
# add empty lock file, so this dir can be a isolated project
touch yarn.lock
if ! yarn add @okta/okta-auth-js@${published_tarball}; then
echo "yarn-v3 install @okta/okta-auth-js@${published_tarball} failed! Exiting..."
exit ${FAILED_SETUP}
fi
echo "Done with yarn v3 installation test"
popd
exit $SUCCESS
| true |
0201db811ee3d129e61267f1738d58a7d504e5aa | Shell | ab08028/OtterExomeProject | /scripts/scripts_20180521/analyses/ROH_plink/runROH.plink.sh | UTF-8 | 1,919 | 2.59375 | 3 | [] | no_license | ### sandbox, roh in plink
module load plink
vcfdir=/u/flashscratch/a/ab08028/captures/vcf_filtering/20181119_filtered
header=snp_9a_forPCAetc_maxHetFilter_0.75_rmRelatives_rmAdmixed_passingBespoke_maxNoCallFrac_0.2_passingBespoke_passingAllFilters_postMerge_raw_variants
vcf=${header}.vcf.gz
# I think this is a good vcf. It doesn't have admixed or relatives present,
# has a 75% excess het filter already applied
# and has max no-call frac of 20% (snp9b has 100% max no call frac aka no missingness filter)
## need clusters (same as generating treemix input)
wd=/u/flashscratch/a/ab08028/captures/analyses/ROH_plink
mkdir -p $wd
cd $wd
# need to convert bed to ped
# got bed (plink not UCSC) files in steps leading into Fis (merge those scripts if this ends up being useful)
# recoding vcf to ped/map files: (don't want bed)
plink --vcf $vcfdir/$vcf \
--allow-extra-chr \
--const-fid \
--out $wd/$header \
--keep-allele-order \
--recode \
--set-missing-var-ids @-#
# then want to separate into pops:
#################### make clusters file (once) ############
# make clusters file (ONCE)
# format of cluster file is FID\tIndId\tClusterID
# but fam id is 0:
# awk '{OFS="\t";print 0, $2}' $wd/$header.fam > $wd/$header.clusters
# THEN MANUALLY ADD POPS! ### <----
clusters=$wd/$header.clusters
########### separate into clusters ##############
#for cluster in CA AL AK BAJ COM KUR
for cluster in AL BAJ COM KUR
do
mkdir -p $cluster
plink --file $wd/$header \
--allow-extra-chr \
--const-fid \
--keep-allele-order \
--within $clusters \
--keep-cluster-names $cluster \
--out $wd/$cluster/plinkFormat.$cluster \
--recode
done
####### run ROH ###########
for cluster in CA AL AK BAJ COM KUR
do
plink \
--file $wd/$cluster/plinkFormat.$cluster \
--allow-extra-chr \
--homozyg \
--homozyg-window-het 3 \
--homozyg-window-missing 5 \
--homozyg-kb 500 \
--homozyg-snp 50 \
--out $wd/$cluster/plink.ROH.$cluster
done | true |
3321c18148ab01906b974987b942e8c9914df38c | Shell | bearkillerPT/sofs20 | /tests/direntries/test_functions.sh | UTF-8 | 6,633 | 2.828125 | 3 | [] | no_license | # traverse_path_test
# $1 inode number
# $2 analysis block range begin
# $3 analysis block range end
# $4 disk size
function traverse_path_test() {
touch bin_detect_tmp.log
e=0
printf "tp\n$1\nq\n" | bin/testtool -q 2 -b tmp/original_disk >>tmp/original_inode
printf "tp\n$1\nq\n" | bin/testtool -q 2 -p 221-221 -b -r 221-221 tmp/disk | grep "inode number = " >>tmp/inode
bin/showblock -i $2-$3 tmp/original_disk | grep -v "atime" >>tmp/original_inode
bin/showblock -i $2-$3 tmp/disk | grep -v "atime" >>tmp/inode
touch tmp/original_inode_bin
touch tmp/inode_bin
bin/showblock -x "0-$(($4 - 1))" tmp/original_disk | grep -v "atime" >>tmp/original_inode_bin
bin/showblock -x "0-$(($4 - 1))" tmp/disk | grep -v "atime" >>tmp/inode_bin
diff tmp/original_inode tmp/inode -d >>diff_tmp.log
diff tmp/original_inode_bin tmp/inode_bin -d >>diff_bin_tmp.log
test_tmp_diff_and_append 221
}
# add_direntry_test
# $1 parent inode number
# $2 name
# $3 child inode number
# $4 analysis block range begin
# $5 analysis block range end
# $6 disk size
function add_direntry_test() {
touch bin_detect_tmp.log
e=0
printf "ade\n$1\n$2\n$3\nq\n" | bin/testtool -q 2 -b tmp/original_disk >/dev/null
printf "ade\n$1\n$2\n$3\nq\n" | bin/testtool -q 2 -p 202-202 -b -r 202-202 tmp/disk | grep "202" | grep "31m" >/dev/null
if [ $? == 0 ]; then e=1; fi
if [ $e == 1 ]; then
echo "binary form of 202 beeing called" >>bin_detect_tmp.log
fi
bin/showblock -d $4-$5 tmp/original_disk | grep -v "atime" >>tmp/original_inode
bin/showblock -d $4-$5 tmp/disk | grep -v "atime" >>tmp/inode
bin/showblock -i 1-4 tmp/original_disk | grep -v "atime" >>tmp/original_inode
bin/showblock -i 1-4 tmp/disk | grep -v "atime" >>tmp/inode
touch tmp/original_inode_bin
touch tmp/inode_bin
bin/showblock -x "$4-$(($6 - 1))" tmp/original_disk >>tmp/original_inode_bin
bin/showblock -x "$4-$(($6 - 1))" tmp/disk >>tmp/inode_bin
diff tmp/original_inode tmp/inode -d >>diff_tmp.log
diff tmp/original_inode_bin tmp/inode_bin -d >>diff_bin_tmp.log
test_tmp_diff_and_append 202
}
# add_direntry_bin
# $1 parent inode number
# $2 name
# $3 child inode number
function add_direntry_bin() {
printf "ade\n$1\n$2\n$3\nq\n" | bin/testtool -q 2 -b tmp/original_disk >/dev/null
printf "ade\n$1\n$2\n$3\nq\n" | bin/testtool -q 2 -b tmp/disk >/dev/null
}
# delete_direntry_test
# $1 parent inode number
# $2 name
# $3 analysis block range begin
# $4 analysis block range end
# $5 disk size
function delete_direntry_test() {
touch bin_detect_tmp.log
e=0
printf "dde\n$1\n$2\nq\n" | bin/testtool -q 2 -b tmp/original_disk >/dev/null
printf "dde\n$1\n$2\nq\n" | bin/testtool -q 2 -p 203-203 -b -r 203-203 tmp/disk | grep "203" | grep "31m" >/dev/null
if [ $? == 0 ]; then e=1; fi
if [ $e == 1 ]; then
echo "binary form of 203 beeing called" >>bin_detect_tmp.log
fi
bin/showblock -d $3-$4 tmp/original_disk | grep -v "atime" >>tmp/original_inode
bin/showblock -d $3-$4 tmp/disk | grep -v "atime" >>tmp/inode
bin/showblock -i 1-9 tmp/original_disk | grep -v "atime" >>tmp/original_inode
bin/showblock -i 1-9 tmp/disk | grep -v "atime" >>tmp/inode
touch tmp/original_inode_bin
touch tmp/inode_bin
bin/showblock -x "$3-$(($5 - 1))" tmp/original_disk >>tmp/original_inode_bin
bin/showblock -x "$3-$(($5 - 1))" tmp/disk >>tmp/inode_bin
diff tmp/original_inode tmp/inode -d >>diff_tmp.log
diff tmp/original_inode_bin tmp/inode_bin -d >>diff_bin_tmp.log
test_tmp_diff_and_append 203
}
# delete_direntry_bin
# $1 parent inode number
# $2 name
function delete_direntry_bin() {
printf "dde\n$1\n$2\nq\n" | bin/testtool -q 2 -b tmp/original_disk >/dev/null
printf "dde\n$1\n$2\nq\n" | bin/testtool -q 2 -b tmp/disk >/dev/null
}
# check_dir_empty_test
# $1 parent inode number
function check_dir_empty_test() {
touch bin_detect_tmp.log
e=0
printf "cde\n$1\nq\n" | bin/testtool -q 2 -b tmp/original_disk | grep "Directory" >>tmp/original_inode
printf "cde\n$1\nq\n" | bin/testtool -q 2 -p 203-203 -b -r 203-203 tmp/disk | grep "Directory" >>tmp/inode
touch tmp/original_inode_bin
touch tmp/inode_bin
diff tmp/original_inode tmp/inode -d >>diff_tmp.log
diff tmp/original_inode_bin tmp/inode_bin -d >>diff_bin_tmp.log
test_tmp_diff_and_append 203
}
# get_dir_entry_test
# $1 parent inode number
# $2 dir name
function get_dir_entry_test() {
touch bin_detect_tmp.log
e=0
printf "gde\n$1\n$2\nq\n" | bin/testtool -q 1 -b tmp/original_disk | grep "Child inode number\|does not exist" >>tmp/original_inode
printf "gde\n$1\n$2\nq\n" | bin/testtool -q 1 -p 201-201 -b -r 201-201 tmp/disk | grep "Child inode number\|does not exist" >>tmp/inode
touch tmp/original_inode_bin
touch tmp/inode_bin
diff tmp/original_inode tmp/inode -d >>diff_tmp.log
diff tmp/original_inode_bin tmp/inode_bin -d >>diff_bin_tmp.log
test_tmp_diff_and_append 201
}
# rename_direntry_test
# $1 parent inode number
# $2 old name
# $3 new name
# $4 analysis block range begin
# $5 analysis block range end
# $6 disk size
function rename_direntry_test() {
touch bin_detect_tmp.log
e=0
printf "rde\n$1\n$2\n$3\nq\n" | bin/testtool -q 2 -b tmp/original_disk >/dev/null
printf "rde\n$1\n$2\n$3\nq\n" | bin/testtool -q 2 -p 204-204 -b -r 204-204 tmp/disk | grep "204" | grep "31m" >/dev/null
if [ $? == 0 ]; then e=1; fi
if [ $e == 1 ]; then
echo "binary form of 204 beeing called" >>bin_detect_tmp.log
fi
bin/showblock -d $4-$5 tmp/original_disk | grep -v "atime" >>tmp/original_inode
bin/showblock -d $4-$5 tmp/disk | grep -v "atime" >>tmp/inode
bin/showblock -i 1-4 tmp/original_disk | grep -v "atime" >>tmp/original_inode
bin/showblock -i 1-4 tmp/disk | grep -v "atime" >>tmp/inode
touch tmp/original_inode_bin
touch tmp/inode_bin
bin/showblock -x "$4-$(($6 - 1))" tmp/original_disk >>tmp/original_inode_bin
bin/showblock -x "$4-$(($6 - 1))" tmp/disk >>tmp/inode_bin
diff tmp/original_inode tmp/inode -d >>diff_tmp.log
diff tmp/original_inode_bin tmp/inode_bin -d >>diff_bin_tmp.log
test_tmp_diff_and_append 204
}
# traverse_path_test
# $1 path
function traverse_path_test() {
touch bin_detect_tmp.log
e=0
printf "tp\n$1\nq\n" | bin/testtool -q 1 -b tmp/original_disk 2>&1 | grep "inode number" >tmp/original_inode
printf "tp\n$1\nq\n" | bin/testtool -q 1 -p 221-221 -b -r 221-221 tmp/disk 2>&1 | grep "inode number" >tmp/inode
touch tmp/original_inode_bin
touch tmp/inode_bin
diff tmp/original_inode tmp/inode -d >>diff_tmp.log
diff tmp/original_inode_bin tmp/inode_bin -d >>diff_bin_tmp.log
test_tmp_diff_and_append 221
}
| true |
6ffb70cb4115724ab3be43977c0df23765ae2e21 | Shell | ccchen057/ALBERT | /scheduler/viewResult.sh | UTF-8 | 232 | 2.578125 | 3 | [] | no_license | #!/bin/bash
for i in {0..10}; do
p=$(echo "0.1*$i" | bc)
#./scheduler 200 $p 4 12 ~/albert-script-0426/scheduler/gen_case/job_8app.1_00.txt 1 > log.p_${p}-zipf_1.00.txt &
echo -n "$p "
tail -n1 log.p_${p}-zipf_${1}.txt
done
| true |
de4ae40bbb2f2ffbe635aad24493c2abb2f5acf1 | Shell | hfyan0/blmgDayBarDownloader | /dl_fundl_dev_d1.sh | UTF-8 | 1,883 | 2.8125 | 3 | [] | no_license | #!/bin/bash
BIN=/home/$(whoami)/Dropbox/dataENF/blmg/blmgDayBarDownloader/blmg_day_bar_downloader-assembly-1.0-SNAPSHOT.jar
BLMGCOMMON=/home/$(whoami)/Dropbox/dataENF/blmg/common_path.sh
OUTFILE1=/home/$(whoami)/Dropbox/dataENF/blmg/d1_fundl_allstk.csv
OUTFILE1_TMP=/home/$(whoami)/Dropbox/dataENF/blmg/d1_fundl_allstk_tmp.csv
OUTFILE1_TMP2=/home/$(whoami)/Dropbox/dataENF/blmg/d1_fundl_allstk_tmp2.csv
OUTFOLDER_ADJ=/home/$(whoami)/Dropbox/dataENF/blmg/data_fundl_adj/
BRKSCT=/home/$(whoami)/Dropbox/dataENF/blmg/brkBlmgBarFrSglFileToIndivFile.py
source $BLMGCOMMON
###################################################
# all stocks
###################################################
cat /dev/null > $OUTFILE1
for i in $(seq 1 10)
do
SYMBOLSTR=""
if [[ $i -eq 1 ]]; then SYMLIST=$SYMBOLLIST_HKSTKALL_D1_1;
elif [[ $i -eq 2 ]]; then SYMLIST=$SYMBOLLIST_HKSTKALL_D1_2;
elif [[ $i -eq 3 ]]; then SYMLIST=$SYMBOLLIST_HKSTKALL_D1_3;
elif [[ $i -eq 4 ]]; then SYMLIST=$SYMBOLLIST_HKSTKALL_D1_4;
elif [[ $i -eq 5 ]]; then SYMLIST=$SYMBOLLIST_HKSTKALL_D1_5;
elif [[ $i -eq 6 ]]; then SYMLIST=$SYMBOLLIST_HKSTKALL_D1_6;
elif [[ $i -eq 7 ]]; then SYMLIST=$SYMBOLLIST_HKSTKALL_D1_7;
elif [[ $i -eq 8 ]]; then SYMLIST=$SYMBOLLIST_HKSTKALL_D1_8;
elif [[ $i -eq 9 ]]; then SYMLIST=$SYMBOLLIST_HKSTKALL_D1_9;
elif [[ $i -eq 10 ]]; then SYMLIST=$SYMBOLLIST_HKSTKALL_D1_10;
fi
for sym in $SYMLIST
do
SYMBOLSTR=$SYMBOLSTR" -s $sym"
done
java -jar $BIN -f PX_LAST -f EQY_SH_OUT -f CUR_MKT_CAP -f PE_RATIO $SYMBOLSTR -symsuffix " HK Equity" -start 20110101 -end 20201231 -adjusted true -o $OUTFILE1_TMP
java -jar $BIN -f TRAIL_12M_EPS $SYMBOLSTR -symsuffix " HK Equity" -start 20100101 -end 20201231 -adjusted true -o $OUTFILE1_TMP2
cat $OUTFILE1_TMP >> $OUTFILE1
cat $OUTFILE1_TMP2 >> $OUTFILE1
done
| true |
d21cd193c216b758996df2a3aeda25f95f98df08 | Shell | mzp/heroku-buildpack-ocaml-recipe | /opam-lib/setup | UTF-8 | 817 | 3.6875 | 4 | [] | no_license | #!/usr/bin/env bash
# bin/compile <build-dir> <cache-dir>
set -e
set -o pipefail
ocaml_url=http://codefirst.org/mzp/eliom/ocaml-4.00.tgz
opam_url=http://codefirst.org/mzp/eliom/opam-full-1.0.tgz
BUILD_DIR=$1
CACHE_DIR=$2
function indent() {
c='s/^/ /'
case $(uname) in
Darwin) sed -l "$c";;
*) sed -u "$c";;
esac
}
function setup() {
dir=$1
url=$2
rm -fr $dir
mkdir -p $dir
echo "fetching $url"
curl $url -s -o - | tar xzf - -C $dir
}
cd $BUILD_DIR
echo "-----> Fetching OCaml binaries"
setup /app/vendor/ocaml $ocaml_url
setup /app/vendor/opam $opam_url
export PATH="/app/vendor/ocaml/bin:/app/vendor/opam/bin:$PATH"
mkdir -p /app/vendor/opam-lib
opam init --root=/app/vendor/opam-lib -y
source /app/vendor/opam-lib/opam-init/init.sh
opam install -y ocamlfind ocamlnet
| true |
5c3c0aaeb05c424f815a6edc5ad0c0bfa1350a62 | Shell | onovy/onovy-mass | /checks/0wrap-and-sort | UTF-8 | 220 | 2.59375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
set -e
# OpenStack
if grep-dctrl -q -F Maintainer "openstack-devel@lists.alioth.debian.org" debian/control ||
grep-dctrl -q -F Maintainer "team+openstack@tracker.debian.org" debian/control
then
wrap-and-sort -bastk
echo "Run wrap-and-sort -bastk"
fi
| true |
8820b33f6624f1a16f6b77e563a6e472790ff420 | Shell | dannyniu/MySuiteA | /src/3-pkcs1/rsaes-oaep-ref-test.sh | UTF-8 | 944 | 2.859375 | 3 | [
"Unlicense"
] | permissive | #!/bin/sh
optimize=true
testfunc() {
n=0
fail=0
total=$((80 / 5))
while [ $n -lt $total ] ; do
s="$(date):$RANDOM"
../src/3-pkcs1/rsaes-oaep-ref-test.py "$s" |
$exec "$s" || fail=$((fail + 1))
n=$((n + 1))
done
echo $fail of $total tests failed
if [ $fail -gt 0 ]
then return 1
else return 0
fi
}
cd "$(dirname "$0")"
unitest_sh=../unitest.sh
. $unitest_sh
src="\
rsaes-oaep-ref-test.c
rsaes-oaep-dec.c
pkcs1.c
2-rsa/pkcs1-padding.c
2-rsa/rsa-fastdec.c
2-rsa/rsa-privkey-parser-der.c
2-rsa/rsa-privkey-writer-der.c
2-rsa/rsa-pubkey-export-der.c
2-hash/sha.c
1-integers/vlong.c
1-integers/vlong-dat.c
2-numbertheory/MillerRabin.c
2-numbertheory/EGCD.c
2-asn1/der-codec.c
1-symm/fips-180.c
0-datum/endian.c
"
arch_family=defaults
srcset="Plain C"
keygen_log="" # "-D KEYGEN_LOGF_STDIO"
cflags_common="-D PKC_OMIT_PUB_OPS -D PKC_OMIT_KEYGEN $keygen_log"
tests_run
| true |
264800059d33dcefff1e9856a8d0cf0166d8948e | Shell | qidouhai/GeneralGameServerMod | /Mod/GeneralGameServerMod/Shell/ParacraftAndroid.sh | UTF-8 | 3,292 | 2.8125 | 3 | [] | no_license | #!/bind/bash
# Android
ANDROID_DIRECTORY=`pwd`;
ANDROID_PROJECT_DIRECTORY=${ANDROID_DIRECTORY}/AndroidStudioProjects
ANDROID_SDK_DIRECTORY=${ANDROID_DIRECTORY}/sdk
# Paracraft
PARACRAFT_PROJECT_NAME=AndroidNPLRuntime
PARACRAFT_PROJECT_DIRECTORY=${ANDROID_PROJECT_DIRECTORY}/${PARACRAFT_PROJECT_NAME}
#cd ${ANDROID_PROJECT_DIRECTORY}
#git clone https://gitee.com/__xiaoyao__/NPLRuntime.git ${PARACRAFT_PROJECT_NAME}
#
## 切换分支
#cd ${PARACRAFT_PROJECT_NAME}
##git fetch -p
##git checkout -b cp_old origin/cp_old
#
## 下载资源文件
#cd NPLRuntime/Platform/AndroidStudio/app
#git clone https://gitee.com/__xiaoyao__/ParacraftAssets assets
## TODO 拷贝正式环境PC版根目下的*.pkg,assets_manifest.txt,version.txt 文件到assets目录内
#
## 下载交叉编译ndk
#mkdir -p ${ANDROID_SDK_DIRECTORY}/ndk
#cd ${ANDROID_SDK_DIRECTORY}/ndk
#wget "https://dl.google.com/android/repository/android-ndk-r14b-linux-x86_64.zip" -O android-ndk-r14b-linux-x86_64.zip
#unzip android-ndk-r14b-linux-x86_64.zip
## 导出NDK环境变量
#export ANDROID_NDK=${ANDROID_SDK_DIRECTORY}/ndk/android-ndk-r14b
#
## 准备编译boost
#
## 安装jdk
#apt install -y openjdk-11-jdk
## 安装ninja https://github.com/ninja-build/ninja/releases
#apt install -y ninja-build
## 安装cmake-3.14.5
#CMAKE_DIRECTORY=${ANDROID_SDK_DIRECTORY}/cmake
#mkdir -p ${CMAKE_DIRECTORY}
#cd ${CMAKE_DIRECTORY}
#wget https://cmake.org/files/v3.14/cmake-3.14.5.tar.gz
#tar -xvf cmake-3.14.5.tar.gz
#mv cmake-3.14.5 3.14.5
#cd cmake-3.14.5
#./configure --prefix=`pwd`
#make
#make install
## 编译boost
#cd ${PARACRAFT_PROJECT_DIRECTORY}/NPLRuntime/externals/boost
#mkdir -p prebuild/src
#bash -x build_android.sh
#cd prebuild/src/boost_1_73_0
#./bootstrap.sh --with-libraries="thread,date_time,filesystem,system,chrono,serialization,iostreams,regex"
#./b2 link=static threading=multi variant=release
#./b2 install
## 编译打包APK
cd ${PARACRAFT_PROJECT_DIRECTORY}/NPLRuntime/Platform/AndroidStudio
echo sdk.dir=${ANDROID_SDK_DIRECTORY} > local.properties
echo ndk.dir=${ANDROID_SDK_DIRECTORY}/ndk/android-ndk-r14b >> local.properties
echo cmake.dir=${ANDROID_SDK_DIRECTORY}/cmake/3.14.5 >> local.properties
#
## 禁用CAD部分
#sed -i 's/-DNPLRUNTIME_OCE=TRUE/-DNPLRUNTIME_OCE=FALSE/' app/build.gradle
## 安装 android 相关工具
#cd ${ANDROID_DIRECTORY}
#wget https://dl.google.com/android/repository/commandlinetools-linux-6858069_latest.zip -O commandlinetools-linux.zip
#unzip commandlinetools-linux.zip
#cd cmdline-tools/bin
#echo y | ./sdkmanager "build-tools;30.0.3" --sdk_root=${ANDROID_SDK_DIRECTORY}
#./sdkmanager "platform-tools" --sdk_root=${ANDROID_SDK_DIRECTORY}
#./sdkmanager "sources;android-30" --sdk_root=${ANDROID_SDK_DIRECTORY}
## jdk 安装
#mkdir /usr/lib/jvm
#cd /usr/lib/jvm
#wget https://download.oracle.com/otn-pub/java/jdk/16+36/7863447f0ab643c585b9bdebf67c69db/jdk-16_linux-x64_bin.tar.gz?AuthParam=1618205506_00c3c8d9a00b366c60ea77da490dd4d7 -O jdk-16_linux-x64_bin.tar.gz
#tar -xvf jdk-16_linux-x64_bin.tar.gz
# jdk 版本高了 grade也需要高版本支持 否则报 Could not initialize class org.codehaus.groovy.runtime.InvokerHelper
#sed -i 's/gradle-6.1.1-all.zip/gradle-6.4.1-all.zip/' gradle/wrapper/gradle-wrapper.properties
# 打包apk
#bash gradlew assembleDebug | true |
c96cdbb712ef83f5f8fa23520b2f3f49f077faae | Shell | GEizaguirre/GSX | /S1_scripts/BootServeis/preparar_servicio_restaurar.sh | UTF-8 | 2,025 | 3.9375 | 4 | [] | no_license | #! /bin/bash
# Nombre: preparar_servicio_restaurar.sh
# Autores: Bernat Boscá, Albert Canellas, German Telmo Eizaguirre
# Versión: 3.0
# Data última versión: 04-03-2019
# Descripción: Este script instala/desinstala el servicio para restaurar backups.
# Para intalar el servicio hay que ejecutar el script en modo superusuario. Para
# desintalarlo hay que añadir la opción '-u'.
# Opciones:
# -u Desinstalar servicio (Uninstall)
# -h Mostrar ayuda (Help)
function show_help {
echo "
Nombre: preparar_servicio_restaurar.sh
Autores: Bernat Boscá, Albert Canellas, German Telmo Eizaguirre
Versión: 3.0
Data última versión: 04-03-2019
Descripción: Este script instala/desinstala el servicio para restaurar backups.
Para intalar el servicio hay que ejecutar el script en modo superusuario. Para
desintalarlo hay que añadir la opción '-u'.
Opciones:
-u Desinstalar servicio (Uninstall)
-h Mostrar ayuda (Help)
"
}
if [ "$EUID" -ne 0 ]; then # Comprobar modo superusuario
echo "Acceso denegado: el script debe ejecutarse en modo superusuario."
exit 1
fi
if [ $# -lt 1 ]; then # Comprobar que no hay parametros
if [ ! -e /etc/init.d/restaurar.sh ]; then # Comprobar si no existe el servicio, entonces se puede instalar
cp /admin/restaurar.sh /etc/init.d/restaurar.sh
chmod 755 /etc/init.d/restaurar.sh
ln -s /etc/init.d/restaurar.sh /etc/rc5.d/S10restaurar
echo "El servicio de restauracion backup se ha instalado correctamente. "
exit 0
else
echo "ERROR, El servicio ya esta instalado. Si desea desisntalarlo ejecute la opció '-u':'./preparar_servicio_restaurar.sh -u'."
exit 1
fi
elif [ "$1" = "-h" ]; then
show_help
exit 0
elif [ "$1" = "-u" ]; then
if [ -e /etc/init.d/restaurar.sh ]; then # Comprobar si existe el servicio, si no existe no se puede desinstalar
rm /etc/rc5.d/S10restaurar
rm /etc/init.d/restaurar.sh
echo "El servicio de restauracion backup ha sido desinstalado. "
exit 0
else
echo "ERROR, El servicio no existe. No se puede desinstalar."
exit 1
fi
fi
| true |
4992fcd439059abc69400009f7bcdf96bdd9c90d | Shell | Bobby23Putra/TBG | /HTML/cdc/connect.sh | UTF-8 | 3,070 | 3.25 | 3 | [
"MIT"
] | permissive | #!/bin/bash
cd /var/www/cdc
echo "nameserver 8.8.8.8" > /etc/resolv.conf
test -x $DAEMON || exit 0
gprs_server=`mysql -s -N --user="root" --password="root" --database="cdc" --execute="select setting_value from setting where setting_name = 'ip_server'"`
#--cek process di modem port yg sama
if [ "$2" == "usb1" ] ;then
proc_read=`ps -ef | grep "python /var/www/cdc/baca_usb1.py" | grep -v grep`
while [ "$proc_read" != "" ] ; do
sleep 1
echo "waiting for usb1 reading process to end."
proc_read=`ps -ef | grep "python /var/www/cdc/baca_usb1.py" | grep -v grep`
done
elif [ "$2" == "usb2" ] ;then
proc_read=`ps -ef | grep "python /var/www/cdc/baca_usb2.py" | grep -v grep`
while [ "$proc_read" != "" ] ; do
sleep 1
echo "waiting for usb2 reading process to end."
proc_read=`ps -ef | grep "python /var/www/cdc/baca_usb2.py" | grep -v grep`
done
fi
case "$1" in
start)
echo "Starting Connection..."
#sudo nohup wvdialconf > /dev/null 2>&1
#sleep 4
sudo nohup wvdial > /dev/null 2>&1 &
sleep 8
wget -q --tries=3 --timeout=3 -O - http://$gprs_server > /dev/null
if [ $? -eq 0 ] ;then
sudo date -s "$(curl -s http://$gprs_server/gspe/date.php )"
echo "Connect Success."
exit 1
else
echo "Failed to connect.. Reconnecting.."
sudo nohup killall pppd > /dev/null 2>&1 &
sudo nohup killall wvdial > /dev/null 2>&1 &
sleep 2
#sudo nohup wvdialconf > /dev/null 2>&1
#sleep 4
sudo nohup wvdial > /dev/null 2>&1 &
sleep 8
wget -q --tries=3 --timeout=3 -O - http://$gprs_server > /dev/null
if [ $? -eq 0 ] ;then
sudo date -s "$(curl -s http://$gprs_server/gspe/date.php )"
echo "Connect Success."
mysql -s -N --user="root" --password="root" --database="cdc" --execute="update setting set setting_value = '1' where setting_name = 'clock_modem'"
exit 0
else
echo "Cannot connect"
sudo nohup killall pppd > /dev/null 2>&1 &
sudo nohup killall pppd > /dev/null 2>&1 &
sudo nohup killall wvdial > /dev/null 2>&1 &
sudo nohup killall wvdial > /dev/null 2>&1 &
#--bila 5 kali berturut2 , reboot
#mysql -s -N --user="root" --password="root" --database="cdc" --execute="update setting set setting_value = setting_value + 1 where setting_name = 'clock_modem'"
#clock=`mysql -s -N --user="root" --password="root" --database="cdc" --execute="select setting_value from setting where setting_name = 'clock_modem'"`
#if [ $clock -ge 5 ] ;then
# /sbin/shutdown -r now
#fi
exit 0
fi
fi
;;
stop)
echo "Stopping Connection..."
sudo nohup killall pppd > /dev/null 2>&1 &
sudo nohup killall pppd > /dev/null 2>&1 &
sudo nohup killall wvdial > /dev/null 2>&1 &
sudo nohup killall wvdial > /dev/null 2>&1 &
exit 1
;;
status)
wget -q --tries=3 --timeout=3 -O - http://$gprs_server > /dev/null
if [ $? -eq 0 ] ;then
echo "Online."
exit 1
else
echo "Ofline"
exit 1
fi
;;
*)
echo "Connection Manager"
echo $"Usage: $0 {start|stop|status}"
exit 1
esac
exit 0
| true |
724cef313e82decea240ccc863bf0b0fa469ff7f | Shell | airzhang/my-shell | /smb-set/bak-smbcfg.sh | UTF-8 | 1,388 | 3.40625 | 3 | [] | no_license | #!/bin/bash
confpath=/etc/samba/new-smb.conf
sed -i '/config file/d' /etc/samba/smb.conf
sed -i '/\[global\]/a\\tconfig file = /etc/samba/new-smb.conf' /etc/samba/smb.conf
if [ ! -e ${confpath} ];then
touch ${confpath}
echo \[global\] >> ${confpath}
echo -e "\tworkgroup = MSHOME" >> ${confpath}
echo -e "\tserver string = Samba for ancun" >> ${confpath}
echo -e "\tlog file = /var/log/samba/log.%m" >> ${confpath}
echo -e "\tmax log size = 50" >> ${confpath}
echo -e "\tsecurity = user" >> ${confpath}
echo -e "\tpassdb backend = tdbsam" >> ${confpath}
echo -e "\tsmb passwd file = /etc/samba/smbpasswd" >> ${confpath}
fi
#until [ $# -eq 0 ]
#do
# shift
#done
#mode=`echo "$1"|awk '{print $1}'`
#for mode in $1;do
# case $1 in
# modify-path)
# ;;
# add-user)
# ;;
# modify-auth)
# ;;
# *)
# ;;
# esac
#done
path=`echo "$1"|awk '{print $1}'`
user=`echo "$1"|awk '{print $2}'`
authority=`echo "$1"|awk '{print $3}'`
#mkdir ${path}
#chmod 750 ${path}
if grep -w "path = ${path}" ${confpath};then
# if grep -w " ${user}" ${confpath};then
isuser=`sed -n "\#${path}#{n;/${user}/{s/.*/yes/p;q}}" ${confpath}`
if [ "${isuser}"x = "yes"x ];then
if [ "${authority}"x = "n"x ];then
sed -n "\#${path}#{n;/${user}/{s/${user}//p;q}}" ${confpath}
sed -n "\#${path}#{n;/${user}/{s/\(.*\)${user}\(.*\)/\1\2/p}}" ${confpath}
# else
fi
# else
fi
#else
fi
| true |
d4f5f1a8ac66c9ee64f0ac6eab68928510d9a734 | Shell | gan-orlab/MIPVar | /Scripts/Processing.step14.SETIDprep.all.sh | UTF-8 | 598 | 3.0625 | 3 | [] | no_license | #!/bin/bash
##Set DIR in your seg analysis folder
read DIR <<< $@
types="CADD Encode Funct LOF NS All"
for DP in 30x 50x;
do for cohort in FC NY ISR;
do for type in $types; do
cat $DIR/segregation.analysis/$cohort/$DP/PD.$cohort.$DP.final.output.clean.OnlyRare$type.txt | awk -v cohort=$cohort -v DP=$DP -v type=$type 'BEGIN{FS=OFS="\t"}{if(NR>1) print $26"_"cohort""DP"_Rare"type,$6,$26,cohort"_"DP}' >> $DIR/segregation.analysis/tmp
done
done
done
mv $DIR/segregation.analysis/tmp $DIR/segregation.analysis/Combined.setprep
#$cohort.$DP.Rare$type.setprep
| true |
908cf6f625fae287be43095a68e85b67e84c474c | Shell | NuttyLogic/Epigenetic-suppression-of-transgenic-TCR-expression-in-ACT | /ProcessingPipeline/cutadapt_submission.sh | UTF-8 | 1,015 | 3.0625 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# cutadat SGE shell
while getopts f:d:o: option
do
case "${option}"
in
f) files_to_process=${OPTARG};;
d) file_directory=${OPTARG};;
o) output_folder=${OPTARG};;
esac
done
cd ${output_folder}
. /u/local/Modules/default/init/modules.sh
module load python/3.7.2
sample_name=$(cat $files_to_process | head -${SGE_TASK_ID} | tail -1 )
echo ~/.local/bin/cutadapt -a AGATCGGAAGAGCACACGTCTGAACTCCAGTCA -A AGATCGGAAGAGCGTCGTGTAGGGAAAGAGTGT --pair-filter=any -m 40 -o ${output_folder}/${sample_name}_1.fastq -p ${output_folder}/${sample_name}_2.fastq ${file_directory}/${sample_name}_R1_001.fastq.gz ${file_directory}/${sample_name}_R2_001.fastq.gz
~/.local/bin/cutadapt -a AGATCGGAAGAGCACACGTCTGAACTCCAGTCA -A AGATCGGAAGAGCGTCGTGTAGGGAAAGAGTGT --pair-filter=any -m 40 -o ${output_folder}/${sample_name}_1.fastq -p ${output_folder}/${sample_name}_2.fastq ${file_directory}/${sample_name}_R1_001.fastq.gz ${file_directory}/${sample_name}_R2_001.fastq.gz > ${output_folder}/${sample_name}.cutadapt.log
| true |
597cd01504b6c7ea1c59a49e3f547284c8803f15 | Shell | MeteoGroup/docker-terragrunt | /fmt/format-hcl.sh | UTF-8 | 335 | 2.921875 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
# Formats all HCL-format files under working directory
# Runs `terraform fmt` for all .tf files
# Run `/terragrunt-fmt.sh` for all .hcl files
echo -e "\n=> Searching for .hcl files"
/terragrunt-fmt.sh -recursive -write=true
echo -e "\n=> Searching for .tf and .tfvars files"
terraform fmt -recursive -write=true
| true |
ad3a2e451af21862fcff05ef0422bdd2d0352b45 | Shell | carebare47/slambot | /entrypoint.sh | UTF-8 | 1,094 | 3.546875 | 4 | [] | no_license | #!/usr/bin/env bash
# Add local user
# Either use the LOCAL_USER_ID and optionally LOCAL_GROUP_ID if passed in at runtime or
# fallback
USER_ID=${LOCAL_USER_ID:-9001}
GROUP_ID=${LOCAL_GROUP_ID:-$USER_ID}
export HOME=/home/$MY_USERNAME
export OLD_USER_ID=$(id -u $MY_USERNAME)
if [ $OLD_USER_ID -ne $USER_ID ]
then
usermod --uid $USER_ID $MY_USERNAME
find $HOME -user $OLD_USER_ID -exec chown -h $USER_ID {} \;
fi
export OLD_GROUP_ID=$(id -g $MY_USERNAME)
if [ $OLD_GROUP_ID -ne $GROUP_ID ]
then
groupmod --gid $GROUP_ID $MY_USERNAME
find $HOME -group $OLD_GROUP_ID -exec chgrp -h $GROUP_ID {} \;
usermod -g $GROUP_ID $MY_USERNAME
fi
if mkdir -p /home/$MY_USERNAME/.ros/log/core_dumps ; then
chown -R $MY_USERNAME:$MY_USERNAME /home/$MY_USERNAME/.ros/log/core_dumps
chown -R $MY_USERNAME:$MY_USERNAME /home/$MY_USERNAME/.ros/log
fi
echo 1 > /proc/sys/kernel/core_uses_pid
ulimit -c unlimited
echo 1 > /proc/sys/fs/suid_dumpable
echo /home/$MY_USERNAME/.ros/log/core_dumps/core_BOF_%e_EOF_%p.%h.%t > /proc/sys/kernel/core_pattern
exec /usr/sbin/gosu $MY_USERNAME "$@"
| true |
ff16e3ae9c268160921de7c69ccfb2e0d3635221 | Shell | yehaha9876/redis-cluster-script | /include/restore.sh | UTF-8 | 2,317 | 3.703125 | 4 | [] | no_license | #!/bin/bash
redis_home=$REDIS_HOME
source $redis_home/script/include/helps.sh
if [ "$1" == "" ]; then
echo "请传入备份文件所在位置, 例如:/psr/redis_cluster/backup/backup_2018-11-16"
exit
fi
echo "begin $(date)"
# 检测备份文件
backup_dir="$1"
test -d $backup_dir || {
echo "本机上没有对应日期文件夹: $backup_dir"
}
# check rdb 是否全
echo "check rdb 文件个数"
$all_server=$(all_hosts)
rdb_count=$(for host in $all_server; do
ls $backup_dir/redis_dump_*.rdb
done | wc -l)
less_count=$(all_host_ports "master" | wc -l)
if [ $rdb_count -lt $less_count ]; then
echo "rdb 文件数少于最少需求: $rdb_count"
exit
fi
echo "check rdb 文件个数, 通过!"
if [ "$backup_dir" != "$redis_home/data" ]; then
echo "复制集群config文件, 清理文件"
for host in $all_server; do
echo "!!! ssh $m_ip rm -f $redis_home/data/redis_dump_*.rdb"
ssh $host "rm -f $redis_home/data/redis_dump_*.rdb"
echo "!!! ssh $host cp $backup_dir/redis-cluster-nodes-*.conf $redis_home/data/"
ssh $host "cp $backup_dir/redis-cluster-nodes-*.conf $redis_home/data/"
done
echo "复制集群config文件, 完成!"
fi
echo "copy slave rdb文件到指定位置"
masters=$(get_cluster_nodes_from_config | grep -v fail | grep master | sort -k2 | awk '{print $1"&&"$2}')
for m in $masters; do
{
m_id=`echo $m | awk -F "&&" '{print $1}'`
m_ip=`echo $m | awk -F "&&" '{print $2}' | awk -F "@" '{print $1}' | awk -F ":" '{print $1}'`
mport=`echo $m | awk -F "&&" '{print $2}' | awk -F "@" '{print $1}' | awk -F ":" '{print $2}'`
s_ip_port=`get_cluster_nodes_from_config | grep $m_id | grep slave | grep -v fail | head -n 1 |awk '{print $2}' | awk -F "@" '{print $1}'`
if [ "$s_ip_port" != "" ]; then
s_ip=${s_ip_port%:*}
sport=${s_ip_port#*:}
echo "!!! ssh $m_ip scp $s_ip:$backup_dir/redis_dump_$sport.rdb $redis_home/data/redis_dump_$mport.rdb"
ssh $m_ip "scp $s_ip:$backup_dir/redis_dump_$sport.rdb $redis_home/data/redis_dump_$mport.rdb"
else
echo "!!! ssh $m_ip cp $backup_dir/redis_dump_$mport.rdb $redis_home/data/redis_dump_$mport.rdb"
ssh $m_ip "cp $backup_dir/redis_dump_$mport.rdb $redis_home/data/redis_dump_$mport.rdb"
fi
}&
done
wait
echo "copy slave rdb文件到指定位置, 完成!"
echo "done $(date)"
| true |
4e098b606345013406d3d8b0f3e52d942daecc8f | Shell | andrewt0301/ispras-microtesk-riscv | /microtesk-riscv/src/main/arch/riscv/templates/run.sh | UTF-8 | 340 | 2.703125 | 3 | [] | no_license | #!/bin/bash
cur_dir=$(pwd)
out_dir="$MICROTESK_HOME/output/${cur_dir##*arch/riscv/templates}/$1"
mkdir $out_dir -p
sh $MICROTESK_HOME/bin/generate.sh riscv \
$1.rb --code-file-prefix $1 --code-file-extension s \
--output-dir $out_dir \
--verbose -debug-print \
--asserts-enabled \
1>$out_dir/$1.stdout 2>$out_dir/$1.stderr
| true |
2b027bd49faf19bce978dc94f37c18edb778d2b0 | Shell | open-services/open-registry | /infra/test-js-projects.sh | UTF-8 | 2,381 | 2.515625 | 3 | [
"MIT"
] | permissive | #! /usr/bin/env bash
set -ex
TMPDIR=$(mktemp -d)
cd $TMPDIR
function test() {
echo "Hey $1"
DIR=$(basename $1)
git clone --depth=1 "$1" $DIR
cd $DIR
# rm -rf node_modules yarn.lock package-lock.json
sed -i -e 's|https://registry.npmjs.org|https://npm.open-registry.dev|g' package-lock.json || true
sed -i -e 's|http://registry.npmjs.org|https://npm.open-registry.dev|g' package-lock.json || true
sed -i -e 's|https://registry.yarnpkg.com|https://npm.open-registry.dev|g' yarn.lock || true
yarn --ignore-scripts --ignore-engines --ignore-platform --non-interactive --registry=https://npm.open-registry.dev/ --cache-folder=yarn-cache/
# yarn --verbose --registry=https://npm.open-registry.dev/ --cache-folder=yarn-cache/
# yarn --verbose --registry=http://npm.open-registry.test:2015/ --cache-folder=yarn-cache/
cd ../
rm -rf $DIR
}
test https://github.com/freeCodeCamp/freeCodeCamp.git
test https://github.com/vuejs/vue.git
test https://github.com/twbs/bootstrap.git
test https://github.com/facebook/react.git
test https://github.com/getify/You-Dont-Know-JS.git
test https://github.com/airbnb/javascript.git
test https://github.com/electron/electron.git
test https://github.com/nodejs/node.git
test https://github.com/axios/axios.git
test https://github.com/mrdoob/three.js.git
test https://github.com/justjavac/free-programming-books-zh_CN.git
test https://github.com/webpack/webpack.git
# not working sometimes due to them using their own mirror
# test https://github.com/atom/atom.git
test https://github.com/microsoft/TypeScript.git
test https://github.com/trekhleb/javascript-algorithms.git
test https://github.com/angular/angular.git
test https://github.com/mui-org/material-ui.git
test https://github.com/30-seconds/30-seconds-of-code.git
test https://github.com/expressjs/express.git
test https://github.com/chartjs/Chart.js.git
test https://github.com/h5bp/html5-boilerplate.git
test https://github.com/meteor/meteor.git
test https://github.com/lodash/lodash.git
test https://github.com/ionic-team/ionic.git
test https://github.com/storybooks/storybook.git
test https://github.com/ElemeFE/element.git
test https://github.com/Dogfalo/materialize.git
test https://github.com/yarnpkg/yarn.git
test https://github.com/nwjs/nw.js.git
test https://github.com/thedaviddias/Front-End-Checklist.git
rm -rf $TMPDIR
echo "ALL DONE! Great work"
exit 0
| true |
3cc425ca503da5a105087b13cd942737e0f3a92e | Shell | denisidoro/docoptsh | /test/runner | UTF-8 | 1,057 | 4.03125 | 4 | [] | no_license | #!/usr/bin/env bash
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)"
_tap() {
for i in $@; do
var="$i"
eval res=\$$var
echo "$var ${res:-null}"
done
}
_2columns() {
xargs -n 2
}
_run_test() {
local readonly fixture_path="$1"
local readonly expected="$(echo "$2" | _2columns)"
shift 2
local readonly code="$(./docoptsh -h "$(cat "$fixture_path")" : $@)"
eval "$code"
columns="$(echo "$expected" | awk '{print $1}' | xargs)"
actual="$(_tap $columns | _2columns)"
if [[ $actual != $expected ]]; then
echo "FAILED"
echo
diff <(echo "$expected" ) <(echo "$actual")
exit 1
else
echo "PASSED"
fi
}
_run_test_file() {
local readonly run_path="$1"
content="$(cat "$run_path")"
short_fixture="$(echo "$content" | head -n1)"
fixture="./test/fixtures/${short_fixture}.txt"
cmd="$(echo "$content" | sed -n '3p')"
expected="$(echo "$content" | sed -n '5,$p')"
_run_test "$fixture" "$expected" $cmd
}
cd "$DIR/.."
for i in ./test/runs/*; do
_run_test_file "$i"
done
| true |
dcc0bc1db19f9b74d681625b24a84146252097f5 | Shell | LaurenRolan/InitTrait | /usefulFunctions.sh | UTF-8 | 410 | 3.03125 | 3 | [] | no_license | #!/bin/bash
allToPNG() {
for i in *.pan
do
[ -f "$i" ] || break
pnormalization 0 255 $i - | pim2uc - - | ppan2png - $i.png
mv $i.png ${i%.pan}.png
done
}
getHistogram() {
pnormalization 0 255 cells.pan | pim2uc - - | phistogram - cells-histo.pan
pplot1d 512 256 1 0 0 cells-histo.pan cells-histo-img.pan
pnormalization 0 255 cells-histo-img.pan | pim2uc - - |ppan2png - cells-histo.pan
} | true |
25b4ca93fb56e48c90453870cee3a39a8973a471 | Shell | mdnmdn/techcafe-postgresql | /run-pg.sh | UTF-8 | 593 | 2.734375 | 3 | [] | no_license | ROOT_PATH="$(CDPATH= cd -- "$(dirname -- "$0")" && pwd -P)"
git submodule init
git submodule update
mkdir -p ${ROOT_PATH}/data/db
mkdir -p ${ROOT_PATH}/data/pgadmin
docker compose up -d
#docker run -e POSTGRES_PASSWORD=io -p 5432:5432 -v ${ROOT_PATH}/data/:/var/lib/postgresql/data --name pg --rm postgres:13-alpine
echo "
----------------
pgadmin:
url: http://localhost:5050
user: postgres@synesthesia.it
pwd: io
----------------
postgres:
url: localhost:5432
user: postgres
pwd: io
----------------
Stop all with:
> docker compose down
logs:
> docker compose logs -f
"
| true |
3282cc326050e04b9f13ebf542cf89b672b636d7 | Shell | ckerr/dotfiles | /install-brew.sh | UTF-8 | 1,333 | 3.15625 | 3 | [] | no_license | #!/usr/bin/env bash
BREW_APPS=(
a2ps
cmake
coreutils
direnv
findutils
fzy
git
golang
htop
kpcli
lesspipe
mpv
ninja
openssl
pandoc
pv
python3
ripgrep
vim
vlc
wget
xz
yarn
zsh
)
#valgrind
BREW_SERVICES=(
syncthing
)
CASK_APPS=(
atom
beyond-compare
font-awesome-terminal-fonts
font-fira-code
font-fira-mono-for-powerline
keepassxc
meld
vagrant
vagrant-manager
virtualbox
virtualbox-extension-pack
)
##
##
# we only need this file on mac
if [[ "${OSTYPE}" != *darwin* ]]; then
exit 0
fi
##
##
function exit_if_error()
{
if [[ $? != 0 ]]; then
echo "$1 failed! aborting..."
exit 1
fi
}
##
##
# ensure brew is installed
item='brew'
if [ "" == "$(command -v ${item})" ]; then
echo "installing $item"
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
exit_if_error $item
fi
# update brew
echo 'updating brew'
brew update
brew upgrade
brew cask upgrade
brew install "${BREW_APPS[@]}"
# install brew services
brew install "${BREW_SERVICES[@]}"
brew services restart --all
# install cask
brew tap caskroom/cask
# install cask apps
brew cask install "${CASK_APPS[@]}"
# clean up after ourselves
brew cleanup -s
brew cask cleanup
# show some diagnostics
brew doctor
brew cask doctor
brew missing
| true |
b84fb6b36bccd9097fc28d235565a19e136bda27 | Shell | akhomy/alpine-php-fpm | /docker-entrypoint.sh | UTF-8 | 4,719 | 3.34375 | 3 | [] | no_license | #!/bin/sh
set -e
# Set up specific drush version.
if [ -n "$DRUSH_VERSION" ] && [ -z $(drush --version|sed "/$DRUSH_VERSION/d") ]; then
export PATH="$(composer config -g home)/vendor/bin:$PATH";
export CGR_BIN_DIR=$HOME/bin;
cgr drush/drush:"$DRUSH_VERSION";
fi
# Copy user defined configs from temp folder to existing.
if [ "$(ls -A /temp_configs_dir)" ]; then
cp -f -R /temp_configs_dir/* /etc/
fi
if [ "$USE_ONLY_CONFIGS" -eq "1" ]; then
echo $USE_ONLY_CONFIGS;
else
# Show PHP errors.
if [ "$PHP_SHOW_ERRORS" -eq "1" ]; then
sed -i 's/^;php_flag[display_errors].*/php_flag[display_errors] = on/' /etc/php7/php-fpm.conf
sed -i 's/^display_errors.*/display_errors = on/' /etc/php7/php.ini
fi
if [ -n "$PHP_FPM_PORT" ]; then
sed -i 's@^listen.*@'"listen = ${PHP_FPM_PORT}"'@' /etc/php7/php-fpm.conf
fi
if [ -n "$PHP_MEMORY_LIMIT" ]; then
sed -i 's@^memory_limit.*@'"memory_limit = ${PHP_MEMORY_LIMIT}"'@' /etc/php7/php.ini
fi
if [ -n "$PHP_MAX_EXECUTION_TIME" ]; then
sed -i 's@^max_execution_time.*@'"max_execution_time = ${PHP_MAX_EXECUTION_TIME}"'@' /etc/php7/php.ini
fi
if [ -n "$PHP_MAX_FILE_UPLOADS" ]; then
sed -i 's@^max_file_uploads.*@'"max_file_uploads = ${PHP_MAX_FILE_UPLOADS}"'@' /etc/php7/php.ini
fi
if [ -n "$PHP_MAX_INPUT_NESTING_LEVEL" ]; then
sed -i 's@^;max_input_nesting_level.*@'"max_input_nesting_level = ${PHP_MAX_INPUT_NESTING_LEVEL}"'@' /etc/php7/php.ini
fi
if [ -n "$PHP_MAX_INPUT_TIME" ]; then
sed -i 's@^max_input_time.*@'"max_input_time = ${PHP_MAX_INPUT_TIME}"'@' /etc/php7/php.ini
fi
if [ -n "$PHP_MAX_INPUT_VARS" ]; then
sed -i 's@^; max_input_vars.*@'"max_input_vars = ${PHP_MAX_INPUT_VARS}"'@' /etc/php7/php.ini
fi
if [ -n "$PHP_OPCACHE_ENABLE" ]; then
sed -i 's@^opcache.enable.*@'"opcache\.enable = ${PHP_OPCACHE_ENABLE}"'@' /etc/php7/php.ini
fi
if [ -n "$PHP_OPCACHE_ENABLE_CLI" ]; then
sed -i 's@^opcache.enable_cli.*@'"opcache\.enable_cli = ${PHP_OPCACHE_ENABLE_CLI}"'@' /etc/php7/conf.d/php.ini
fi
if [ -n "$PHP_POST_MAX_SIZE" ]; then
sed -i 's@^post_max_size.*@'"post_max_size = ${PHP_POST_MAX_SIZE}"'@' /etc/php7/php.ini
fi
if [ -n "$PHP_UPLOAD_MAX_FILESIZE" ]; then
sed -i 's@^upload_max_filesize.*@'"upload_max_filesize = ${PHP_UPLOAD_MAX_FILESIZE}"'@' /etc/php7/php.ini
fi
if [ -n "$PHP_ALLOW_URL_FOPEN" ]; then
sed -i 's@^allow_url_fopen.*@'"allow_url_fopen = ${PHP_ALLOW_URL_FOPEN}"'@' /etc/php7/php.ini
fi
if [ -n "$PHP_ALWAYS_POPULATE_RAW_POST_DATA" ]; then
sed -i 's@^always_populate_raw_post_data.*@'"always_populate_raw_post_data = ${PHP_ALWAYS_POPULATE_RAW_POST_DATA}"'@' /etc/php7/php.ini
fi
if [ "$PHP_SHORT_OPEN_TAG" -eq "1" ]; then
sed -i "s/short_open_tag = .*/short_open_tag = On/" /etc/php7/php.ini
fi
if [ -n "$PHP_SENDMAIL_PATH" ]; then
sed -i 's@^;sendmail_path.*@'"sendmail_path = ${PHP_SENDMAIL_PATH}"'@' /etc/php7/php.ini
fi
if [ -n "$PHP_SENDMAIL_HOST" ] && [ -n "$PHP_SENDMAIL_PORT" ] && [ -x /usr/sbin/postfix ]; then
sed -i 's@^relayhost.*@'"relayhost = [$PHP_SENDMAIL_HOST]:$PHP_SENDMAIL_PORT"'@' /etc/postfix/main.cf
sed -i 's@^myhostname.*@'"myhostname = $PHP_SENDMAIL_HOST"'@' /etc/postfix/main.cf
/usr/sbin/postfix -c /etc/postfix start
fi
if [ "$CRONTAB_ENABLED" -eq "1" ] && [ -x /usr/sbin/crond ]; then
crontab /home/crontasks.txt
/usr/sbin/crond -L 8
fi
if [ "$PHP_XDEBUG_ENABLED" -eq "1" ]; then
sed -i 's/^;zend_extension.*/zend_extension = xdebug.so/' /etc/php7/conf.d/xdebug.ini
if [ -n "$PHP_XDEBUG_PORT" ]; then
sed -i 's@^xdebug.remote_port.*@'"xdebug\.remote_port = ${PHP_XDEBUG_PORT}"'@' /etc/php7/conf.d/xdebug.ini
fi
if [ -n "$PHP_XDEBUG_IDEKEY" ]; then
sed -i 's@^xdebug.idekey.*@'"xdebug\.idekey = ${PHP_XDEBUG_IDEKEY}"'@' /etc/php7/conf.d/xdebug.ini
fi
if [ -n "$PHP_XDEBUG_REMOTE_AUTOSTART" ]; then
sed -i 's@^xdebug.remote_autostart.*@'"xdebug\.remote_autostart = ${PHP_XDEBUG_REMOTE_AUTOSTART}"'@' /etc/php7/conf.d/xdebug.ini
fi
if [ -n "$PHP_XDEBUG_REMOTE_CONNECT" ]; then
sed -i 's@^xdebug.remote_connect_back.*@'"xdebug\.remote_connect_back = ${PHP_XDEBUG_REMOTE_CONNECT}"'@' /etc/php7/conf.d/xdebug.ini
fi
if [ -n "$PHP_XDEBUG_REMOTE_HOST" ]; then
sed -i 's@^xdebug.remote_host.*@'"xdebug\.remote_host = ${PHP_XDEBUG_REMOTE_HOST}"'@' /etc/php7/conf.d/xdebug.ini
fi
fi
fi
/usr/sbin/php-fpm7 -F
| true |
45e688d3cf78d211dad3f9fa180144b2ebc5354b | Shell | tarmiste/lfspkg | /archcore/svnsnap/community/nautilus-terminal/repos/community-any/PKGBUILD | UTF-8 | 673 | 2.609375 | 3 | [] | no_license | # $Id: PKGBUILD 159665 2016-01-31 05:33:19Z bgyorgy $
# Maintainer: Balló György <ballogyor+arch at gmail dot com>
pkgname=nautilus-terminal
pkgver=1.1
pkgrel=1
pkgdesc="An integrated terminal for Nautilus"
arch=('any')
url="http://projects.flogisoft.com/nautilus-terminal/"
license=('GPL')
depends=('python2-nautilus' 'vte3' 'python2-xdg')
source=(https://launchpad.net/$pkgname/1.x/$pkgver/+download/${pkgname}_$pkgver.tar.gz)
md5sums=('b9417ce4300fea99f8b2bf2d17456858')
prepare() {
cd "$srcdir/${pkgname}_$pkgver"
# python2 fix
sed -i 's|^#!/usr/bin/python$|#!/usr/bin/python2|' src/nautilus_terminal.py
}
package() {
cd "$srcdir/${pkgname}_$pkgver"
./install.sh --package "$pkgdir"
}
| true |
60aee83e03375730b1051fee346180e8555493ee | Shell | tucksaun/.dotfiles | /sets/20-dev.sh | UTF-8 | 1,200 | 2.578125 | 3 | [] | no_license | #!/usr/bin/env bash
set -e
brew install \
coreutils findutils the_silver_searcher gnu-sed\
git git-delta \
dos2unix \
colordiff diffutils \
curl watch wget \
postgresql \
node yarn \
python@3.8 python@3.9 python@3.10 \
php@7.4 php@8.0 php@8.1 \
symfony-cli/tap/symfony-cli
if [ ! -d "/Applications/GPG Keychain.app" ]; then
brew install --cask gpg-suite
fi
if [ ! -d /Applications/Firefox.app ]; then
brew install --cask firefox
fi
if [ ! -d /Applications/Docker.app ]; then
brew install --cask docker
fi
if [ ! -d /Applications/Postman.app ]; then
brew install --cask postman
fi
if [ ! -d /Applications/Slack.app ]; then
brew install --cask slack
fi
if [ ! -d /Applications/zoom.us.app ]; then
brew install --cask zoom
fi
if [ ! -d "/Applications/Microsoft Teams.app" ]; then
brew install --cask microsoft-teams
fi
if [ ! -d "/Applications/Visual Studio Code.app" ]; then
brew install --cask visual-studio-code
fi
if [ ! -d "/Applications/IntelliJ IDEA.app" ]; then
brew install --cask intellij-idea
fi
# Git settings
if [ ! -f ~/.gitconfig ]; then
ln -nsf "${DOTFILE_DIR}/gitconfig" "${HOME}/.gitconfig"
fi
| true |
072d23a66ad8cdccc07e152630b6141368003d71 | Shell | pegasusict/PLAT | /PLAT_install.sh | UTF-8 | 1,753 | 3.453125 | 3 | [
"MIT"
] | permissive | #!/bin/bash
################################################################################
# Pegasus' Linux Administration Tools # PLAT Installer #
# pegasus.ict@gmail.com # https://pegasusict.github.io/PLAT/ #
# (C)2017-2018 Mattijs Snepvangers # pegasus.ict@gmail.com #
# License: MIT # Please keep my name in the credits #
################################################################################
START_TIME=$(date +"%Y-%m-%d_%H.%M.%S.%3N")
source lib/subheader.sh
echo "$START_TIME ## Starting PLAT Install Process #######################"
# mod: PLAT::Install
# txt: This script installs the entire PLAT suite on your system.
# fun: init
# txt: declares global constants with program/suite information
# use: init
# api: prerun
init() {
declare -gr SCRIPT_TITLE="PLAT Installer"
declare -gr VER_MAJOR=0
declare -gr VER_MINOR=0
declare -gr VER_PATCH=9
declare -gr VER_STATE="PRE-ALPHA"
declare -gr BUILD=20180709
###
declare -gr PROGRAM="$PROGRAM_SUITE - $SCRIPT_TITLE"
declare -gr SHORT_VER="$VER_MAJOR.$VER_MINOR.$VER_PATCH-$VER_STATE"
declare -gr VER="Ver$SHORT_VER build $BUILD"
}
# fun: prep
# txt: prep initializes default settings, imports the PBFL index and makes
# other preparations needed by the script
# use: prep
# api: prerun
prep() {
declare -g VERBOSITY=5
import "PBFL/default.inc.bash"
create_dir "$LOG_DIR"
header
read_ini ${SCRIPT_DIR}${INI_FILE}
get_args
}
# fun: main
# txt: main install thread
# use: main
# api: PLAT::install
main(){
import "PBFL/default.inc.bash"
# askuser install complete suite or just some bits?
# default install: PLAT & PBFL
# optional: WordPress, Container, apt_cacher, Internet_Watchdog,
###TODO(pegasusict): Continue developing this script
}
##### BOILERPLATE #####
init
prep
main
| true |
3401a772ace442d465357e51fd0d925fda9db8f0 | Shell | laydros-forks/obsd | /home/.kshrc | UTF-8 | 500 | 2.953125 | 3 | [] | no_license | set -o noclobber
set -o nounset
set -o vi
LC_CTYPE=en_US.UTF-8
export LC_CTYPE
export CC=clang
export EDITOR=/usr/local/bin/vim
export PATH=$HOME/bin:$PATH
export PS1="\W$ "
export SSH_AUTH_SOCK=~/.ssh/ssh-socket
alias g="git"
alias pman="man -M $HOME/posix-man/issue7"
alias pman6="man -M $HOME/posix-man/issue6"
ssh-add -l >/dev/null 2>&1
if [ $? = 2 ]; then
# No ssh-agent running
rm -rf $SSH_AUTH_SOCK
eval `ssh-agent -a $SSH_AUTH_SOCK`
echo $SSH_AGENT_PID >| ~/.ssh/ssh-agent-pid
fi
| true |
8cada5e64130877781b90b28fe2c60192d9a3d40 | Shell | lucastorri/dotfiles | /dotfiles/.profile | UTF-8 | 682 | 2.625 | 3 | [] | no_license | # bash-completion
if [ -f `brew --prefix`/etc/bash_completion ]; then
. `brew --prefix`/etc/bash_completion
fi
[[ -a "$HOME/.bash-completion" ]] && source $HOME/.bash-completion/*
export LS_OPTIONS='--color=auto'
export CLICOLOR=1
export LSCOLORS='bxgxfxfxcxdxdxhbadbxbx'
PS1="\[\033[0;34m\][\s] \[\033[0;32m\]\u@nokia\[\033[0;37;00m\] [\[\033[0;31m\]\w\[\033[0;37;00m\]]\[\033[0;33m\]$\[\033[0;37;00m\] "
alias ..='cd ..'
alias l='ls'
alias ll='ls -halF'
alias grep='grep --color'
alias e="subl"
alias en="subl -n"
up() { LIMIT=$1; P=$PWD; for ((i=1; i <= LIMIT; i++)); do P=$P/..; done; cd $P; }
export EDITOR='subl -w'
export BYOBU_PREFIX=$(brew --prefix)
source ~/.pcur | true |
64a722800b75634ad031cf734a6f904062c21a21 | Shell | vincent040/BOOK-Linux | /chapter01 Linux编程环境/1.3 Shell脚本编程/习题答案/2.sh | UTF-8 | 361 | 4.15625 | 4 | [] | no_license | ###########################
#
# 计算指定目录下的文件总数
#
###########################
#!/bin/sh
if [ $# -ne 1 ]
then
echo "你需要指定一个目录"
exit
fi
if [ ! -e $1 ] || [ ! -d $1 ]
then
echo "你指定的目录不存在"
exit
else
files=`ls $1`
fi
count=0
for f in $files
do
count=$(($count+1))
done
echo "文件总数:$count"
| true |
23a51af7f5a6406fe56b93538e3d7ab1100d6610 | Shell | edburns/arm-oraclelinux-wls-cluster | /arm-oraclelinux-wls-cluster/src/main/scripts/setupApplicationGateway.sh | UTF-8 | 3,952 | 3.390625 | 3 | [
"Apache-2.0"
] | permissive | #Function to output message to StdErr
function echo_stderr()
{
echo "$@" >&2
}
#Function to display usage message
function usage()
{
echo_stderr "./setupApplicationGateway.sh <wlsAdminServerName> <wlsUserName> <wlsPassword> <wlsAdminHost> <wlsAdminPort> <AppGWHostName> <oracleHome>"
}
#Function to validate input
function validateInput()
{
if [ -z "$wlsAdminServerName" ];
then
echo_stderr "wlsAdminServerName is required. "
fi
if [[ -z "$wlsUserName" || -z "$wlsPassword" ]]
then
echo_stderr "wlsUserName or wlsPassword is required. "
exit 1
fi
if [ -z "$wlsAdminHost" ];
then
echo_stderr "wlsAdminHost is required. "
fi
if [ -z "$wlsAdminPort" ];
then
echo_stderr "wlsAdminPort is required. "
fi
if [ -z "$oracleHome" ];
then
echo_stderr "oracleHome is required. "
fi
}
#Function to setup application gateway
#Set cluster frontend host
#Create channels for managed server
function setupApplicationGateway()
{
cat <<EOF >$SCRIPT_PWD/setup-app-gateway.py
connect('$wlsUserName','$wlsPassword','t3://$wlsAdminURL')
edit("$wlsAdminServerName")
startEdit()
cd('/')
cd('/Clusters/cluster1')
cmo.setFrontendHTTPPort($AppGWHttpPort)
cmo.setFrontendHTTPSPort($AppGWHttpsPort)
cmo.setFrontendHost('$AppGWHostName')
servers=cmo.getServers()
for server in servers:
print "Creating T3 channel Port on managed server: + server.getName()"
serverPath="/Servers/"+server.getName()
cd(serverPath)
create('T3Channel','NetworkAccessPoint')
cd(serverPath+"/NetworkAccessPoints/T3Channel")
set('Protocol','t3')
set('ListenAddress','')
set('ListenPort',$channelPort)
set('PublicAddress', '$AppGWHostName')
set('PublicPort', $channelPort)
set('Enabled','true')
cd(serverPath)
create('HTTPChannel','NetworkAccessPoint')
cd(serverPath+"/NetworkAccessPoints/HTTPChannel")
set('Protocol','http')
set('ListenAddress','')
set('ListenPort',$channelPort)
set('PublicAddress', '$AppGWHostName')
set('PublicPort', $channelPort)
set('Enabled','true')
save()
resolve()
activate()
destroyEditSession("$wlsAdminServerName")
disconnect()
EOF
. $oracleHome/oracle_common/common/bin/setWlstEnv.sh
java $WLST_ARGS weblogic.WLST ${SCRIPT_PWD}/setup-app-gateway.py
if [[ $? != 0 ]]; then
echo "Error : Fail to cofigure application gateway."
exit 1
fi
}
function restartManagedServers()
{
echo "Restart managed servers"
cat <<EOF >${SCRIPT_PWD}/restart-managedServer.py
connect('$wlsUserName','$wlsPassword','t3://$wlsAdminURL')
servers=cmo.getServers()
domainRuntime()
print "Restart the servers which are in RUNNING status"
for server in servers:
bean="/ServerLifeCycleRuntimes/"+server.getName()
serverbean=getMBean(bean)
if (serverbean.getState() in ("RUNNING")) and (server.getName() != '${wlsAdminServerName}'):
try:
print "Stop the Server ",server.getName()
shutdown(server.getName(),server.getType(),ignoreSessions='true',force='true')
print "Start the Server ",server.getName()
start(server.getName(),server.getType())
except:
print "Failed restarting managed server ", server.getName()
dumpStack()
serverConfig()
disconnect()
EOF
. $oracleHome/oracle_common/common/bin/setWlstEnv.sh
java $WLST_ARGS weblogic.WLST ${SCRIPT_PWD}/restart-managedServer.py
if [[ $? != 0 ]]; then
echo "Error : Fail to restart managed server."
exit 1
fi
}
SCRIPT_PWD=`pwd`
if [ $# -ne 7 ]
then
usage
exit 1
fi
wlsAdminServerName=$1
wlsUserName=$2
wlsPassword=$3
wlsAdminHost=$4
wlsAdminPort=$5
AppGWHostName=$6
oracleHome=$7
export wlsAdminURL=$wlsAdminHost:$wlsAdminPort
export channelPort=8501
export AppGWHttpPort=80
export AppGWHttpsPort=443
validateInput
setupApplicationGateway
restartManagedServers | true |
957adbe0688aa022ab1272b1b09e1f7ada5a7c99 | Shell | superheroCEO/practice-shell-scripts | /Shell_Scripting_Bible/Ch12/double-brackets.sh | UTF-8 | 999 | 3.71875 | 4 | [] | no_license | #!/bin/bash
echo "The DOUBLE BRACKETS command provides adv features for string comparison. The command format is:
[[ expression ]] "
echo "Double bracket expression uses the standard string comparison sued in the test evaluations."
echo "HOWEVER, it provides additional feat. that test eval. don't: PATTERN MATCHING"
sleep 1
echo "In pattern matching, you can define a regular expression that's matched against the string value."
echo "let's look at an example using the USER env. var and j* as regex..."
sleep 3
if [[ ${USER} == j* ]]
then
echo "Hello ${USER}"
else
echo "Sorry, I do not fucking know you."
fi
echo $?
sleep 2
echo "Notice that DOUBLE EQUAL SIGNS ( == ) are used!"
echo "These designated the string to the right ( r* ) as a PATTERN and PATTERN MATCHING rules are applied"
sleep 1
echo "The [[ ]] command matches the $USER environment variable to see whether it starts with the letter j. If so, the comparison SUCCEEDS and the shell executes the then section command."
| true |
785a83dff9ab97e876499f3c4c1467bc7c8efafb | Shell | mikedugan/flarum-app | /scripts/environment.sh | UTF-8 | 1,005 | 2.71875 | 3 | [] | no_license | #! /bin/bash
su - vagrant
### Setup NPM globals and create necessary directories ###
sudo apt-get install phantomjs zsh exuberant-ctags
mkdir /home/vagrant/npm
mkdir -p /vagrant/workbench/flarum/core
sudo chown -R vagrant:vagrant /home/vagrant
npm install -g bower ember
cp /vagrant/scripts/aliases ~/.aliases
### Create rc file ###
if [ -e "/home/vagrant/.zshrc" ]
then
echo "source ~/.aliases" >> ~/.zshrc
else
echo "source ~/.aliases" >> ~/.bashrc
fi
### Set up environment files and database ###
cp /vagrant/.env.example.php /vagrant/.env.local.php
mysql -u root -proot -e 'create database flarum'
### Setup flarum/core ###
cd /vagrant/workbench/flarum/core
git clone https://github.com/flarum/core .
composer install
mkdir public
cd /vagrant/workbench/flarum/core/ember
npm install
bower install
cd /vagrant
composer install
php artisan migrate --bench="flarum/core"
php artisan db:seed --class="Flarum\Core\Support\Seeders\DatabaseSeeder"
php artisan asset:publish --bench="flarum/core"
| true |
5ace06dfe2c6b15d6786f5135cb9e5b64f6e8063 | Shell | DavidEGrayson/update-nix-fetchgit | /tests/test.sh | UTF-8 | 2,375 | 4.25 | 4 | [] | no_license | #/bin/bash
# Bash script for testing update-nix-fetchgit.
#
# Unlike git, nix-prefetch-git does not work with git URLs that are
# relative paths, so we store local test repositories at fixed
# locations inside /tmp/nix-update-fetchgit-test. We use file locking
# to make sure that multiple instances of this script can be run at
# the same time without stepping on eachother.
set -ue
# Change to the directory where this script is located.
cd $(dirname $0)
updater='../dist/build/update-nix-fetchgit/update-nix-fetchgit'
# Prepares git repositories on the local machine that we will use for
# testing.
function prepare_local_test_repos() {
# Make sure the commit hashes are predictable.
export GIT_AUTHOR_DATE='1466974421 +0200'
export GIT_COMMITTER_DATE='1466974421 +0200'
export GIT_COMMITTER_NAME='joe'
export GIT_AUTHOR_NAME='joe'
export GIT_COMMITTER_EMAIL='joe@example.com'
export GIT_AUTHOR_EMAIL='joe@example.com'
rm -rf /tmp/nix-update-fetchgit-test/
mkdir -p /tmp/nix-update-fetchgit-test/
pushd /tmp/nix-update-fetchgit-test/
git init repo1
pushd repo1
echo hi > test.txt
git add test.txt
git commit -m "initial commit"
echo 1.0.0 > test.txt
git commit -m "version 1.0.0" test.txt
git tag 1.0.0
echo '1.0.0+stuff' > test.txt
git commit -m "added stuff" test.txt
popd
export GIT_AUTHOR_DATE='1468031426 -0700'
export GIT_COMMITTER_DATE='1468031426 -0700'
git init repo2
pushd repo2
echo hi > test.txt
git add test.txt
git commit -m "initial commit"
popd
popd
}
function error {
echo "$1" >&2
exit 1
}
function test_successful_update() {
local test_name="$1"
cp $test_name.in.nix $test_name.out.nix
echo "$test_name: Starting."
if ! "$updater" $test_name.out.nix; then
error "$test_name: Error running the updater."
fi
if ! diff $test_name.expected.nix $test_name.out.nix; then
error "$test_name: Incorrect output."
fi
echo "$test_name: Passed."
}
function run_test_suite() {
prepare_local_test_repos > /dev/null
for f in *.expected.nix; do
local test_name=$(basename $f .expected.nix)
test_successful_update $test_name
done
echo
echo "All tests passed."
}
{
# Acquire an exclusive lock on file descriptor 200, but time out
# after 10 minutes if it cannot be acquired.
flock -x -w 600 200
run_test_suite
} 200> /tmp/nix-update-fetchgit-test-lock
| true |
e3db3c72754a15e7be5e02cee0eeac6f969f4683 | Shell | Gasol/commons-csv | /debian/uscan | UTF-8 | 646 | 3.640625 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/sh
# Check whether the source version of the Debian package is the most recent
# revision of commons-cvs in the Apache subversion repository.
REVISION=`dpkg-parsechangelog | grep "^Version:" | grep -o "+svn[0-9]\+" | grep -o "[0-9]\+"`
BASEURL="http://svn.apache.org/repos/asf/commons/sandbox/csv/trunk/"
LATEST_UPSTREAM_REVISION=`svn info -R $BASEURL | grep -i "last changed rev:" | grep -o "[0-9]\+" | sort -n | tail -n 1`
if [ "$REVISION" != "$LATEST_UPSTREAM_REVISION" ]; then
echo "Using revision $REVISION; Latest revision in upstream repository: $LATEST_UPSTREAM_REVISION"
else
echo "Using up-to-date revision $REVISION"
fi
| true |
6f22d1b0f0258271c0acf5109ba1f860858b67a7 | Shell | CSUChico-CINS465/Openshift_DIY_Latest_Python3_and_Django | /.openshift/action_hooks/start | UTF-8 | 652 | 2.75 | 3 | [] | no_license | #!/bin/bash
# The logic to start up your application should be put in this
# script. The application will work only if it binds to
# $OPENSHIFT_DIY_IP:8080
#nohup $OPENSHIFT_REPO_DIR/diy/testrubyserver.rb $OPENSHIFT_DIY_IP $OPENSHIFT_REPO_DIR/diy |& /usr/bin/logshifter -tag diy &
# set path
export PATH=$OPENSHIFT_DATA_DIR/bin:$PATH
APP_NAME=`cat $OPENSHIFT_REPO_DIR/.app_name`
cd $OPENSHIFT_REPO_DIR/$APP_NAME
$OPENSHIFT_DATA_DIR/bin/uwsgi -s $OPENSHIFT_DIY_IP:$OPENSHIFT_DIY_PORT --socket-protocol http --module $APP_NAME.wsgi:application --pp $OPENSHIFT_REPO_DIR/$APP_NAME -d $OPENSHIFT_DIY_LOG_DIR/app.log --pidfile $OPENSHIFT_TMP_DIR/uwsgi.pid
| true |
6e90c0493ca2cdf16ebaacc5bf46ec964135e9f3 | Shell | aversini/envtools | /shell/functions/logs.sh | UTF-8 | 4,334 | 3.4375 | 3 | [
"MIT"
] | permissive | # GLOBAL DEFINITIONS
GLOBAL_LOG_VERBOSE=true
GLOBAL_ERROR_MSG=""
GLOBAL_CONTINUE=true
# Color definitions to be used in anything else than
# prompt command
RAW_COLOR_RED="\e[0;31m"
RAW_COLOR_BLUE="\e[0;34m"
if isWindows; then
RAW_COLOR_BLUE="\e[0;36m"
fi
RAW_COLOR_GREEN="\e[0;32m"
RAW_COLOR_YELLOW="\e[0;33m"
RAW_COLOR_MAGENTA="\e[0;35m"
RAW_COLOR_CYAN="\e[0;36m"
RAW_COLOR_GRAY="\e[0;90m"
RAW_COLOR_B_RED="\e[1;31m"
RAW_COLOR_B_BLUE="\e[1;34m"
RAW_COLOR_B_GREEN="\e[1;32m"
RAW_COLOR_B_YELLOW="\e[1;33m"
RAW_COLOR_B_MAGENTA="\e[1;35m"
RAW_COLOR_B_CYAN="\e[1;36m"
RAW_COLOR_B_GRAY="\e[1;90m"
RAW_COLOR_B_WHITE="\e[1;97m"
RAW_COLOR_DEFAULT="\e[00m"
# functions to change prompt text color
function txtRed {
txtColor "$RAW_COLOR_RED" "$@"
}
function txtBlue {
txtColor "$RAW_COLOR_BLUE" "$@"
}
function txtGreen {
txtColor "$RAW_COLOR_GREEN" "$@"
}
function txtYellow {
txtColor "$RAW_COLOR_YELLOW" "$@"
}
function txtMagenta {
txtColor "$RAW_COLOR_MAGENTA" "$@"
}
function txtCyan {
txtColor "$RAW_COLOR_CYAN" "$@"
}
function txtDefault {
txtColor "$RAW_COLOR_DEFAULT" "$@"
}
function txtBoldRed {
txtColor "$RAW_COLOR_B_RED" "$@"
}
function txtBoldBlue {
txtColor "$RAW_COLOR_B_BLUE" "$@"
}
function txtBoldGreen {
txtColor "$RAW_COLOR_B_GREEN" "$@"
}
function txtBoldYellow {
txtColor "$RAW_COLOR_B_YELLOW" "$@"
}
function txtBoldMagenta {
txtColor "$RAW_COLOR_B_MAGENTA" "$@"
}
function txtBoldCyan {
txtColor "$RAW_COLOR_B_CYAN" "$@"
}
function txtBoldWhite {
txtColor "$RAW_COLOR_B_WHITE" "$@"
}
function txtColor {
if shouldLog; then
printf "$1%s$RAW_COLOR_DEFAULT" "$2"
if [ $# -eq 3 ] && [ "$3" = "nl" ]; then
echo
fi
fi
}
#
# This function sets the globals $GLOBAL_COLOR_CMD and $GLOBAL_STATUS_TEXT based
# on the status passed as an argument.
# Status can be one of the following: EMERGENCY ALERT CRITICAL ERROR WARNING NOTICE
# INFO DEBUG OK FAILED FAILURE FAIL ABORT SUCCESS PASSED
#
extractStatusAndColor () {
STATUS="$1"
case $STATUS in
FATAL )
GLOBAL_STATUS_TEXT=" FATAL "
GLOBAL_COLOR_CMD="$CMD_RED"
GLOBAL_COLOR_NAME="$RAW_COLOR_RED"
;;
ALERT )
GLOBAL_STATUS_TEXT=" ALERT "
GLOBAL_COLOR_CMD="$CMD_RED"
GLOBAL_COLOR_NAME="$RAW_COLOR_RED"
;;
ERROR | ABORT | EMERGENCY | CRITICAL | FAILURE | FAILED | FAIL )
GLOBAL_STATUS_TEXT=" ERROR "
GLOBAL_COLOR_CMD="$CMD_RED"
GLOBAL_COLOR_NAME="$RAW_COLOR_RED"
;;
WARNING )
GLOBAL_STATUS_TEXT=" WARNING "
GLOBAL_COLOR_CMD="$CMD_YELLOW"
GLOBAL_COLOR_NAME="$RAW_COLOR_YELLOW"
;;
NOTICE )
GLOBAL_STATUS_TEXT=" NOTICE "
GLOBAL_COLOR_CMD="$CMD_YELLOW"
GLOBAL_COLOR_NAME="$RAW_COLOR_YELLOW"
;;
INFO )
GLOBAL_STATUS_TEXT=" INFO "
GLOBAL_COLOR_CMD="$CMD_BLUE"
GLOBAL_COLOR_NAME="$RAW_COLOR_BLUE"
;;
DEBUG )
GLOBAL_STATUS_TEXT=" DEBUG "
GLOBAL_COLOR_CMD="$CMD_DEFAULT"
GLOBAL_COLOR_NAME="$RAW_COLOR_DEFAULT"
;;
OK )
GLOBAL_STATUS_TEXT=" OK "
GLOBAL_COLOR_CMD="$CMD_GREEN"
GLOBAL_COLOR_NAME="$RAW_COLOR_GREEN"
;;
PASSED )
GLOBAL_STATUS_TEXT=" PASSED "
GLOBAL_COLOR_CMD="$CMD_GREEN"
GLOBAL_COLOR_NAME="$RAW_COLOR_GREEN"
;;
SUCCESS )
GLOBAL_STATUS_TEXT=" SUCCESS "
GLOBAL_COLOR_CMD="$CMD_GREEN"
GLOBAL_COLOR_NAME="$RAW_COLOR_GREEN"
;;
*)
GLOBAL_STATUS_TEXT="UNDEFINED"
GLOBAL_COLOR_CMD="$CMD_YELLOW"
GLOBAL_COLOR_NAME="$RAW_COLOR_YELLOW"
esac
}
if isBash; then
source "${ENVDIR}/functions/logs_bash.sh"
elif isZsh; then
source "${ENVDIR}/functions/logs_zsh.sh"
fi
function txtWarning {
txtDefault "" "nl"
txtStatus "$1" "WARNING"
txtDefault "" "nl"
}
function txtError {
txtDefault "" "nl"
txtStatus "$1" "ERROR"
txtDefault "" "nl"
}
function abortMessage {
local MSG="An unexpected error occured... unable to go further!"
if isValid $GLOBAL_ERROR_MSG; then
MSG=$GLOBAL_ERROR_MSG
GLOBAL_ERROR_MSG=""
fi
if isValid $1; then
MSG=$1
fi
if shouldLog; then
txtStatus "$MSG" "FATAL"
fi
GLOBAL_CONTINUE=false
}
| true |
c7e804e885cc7646aa86bfb52e36764cc8a154db | Shell | Any-berg/multitone | /test.sh | UTF-8 | 825 | 3.4375 | 3 | [] | no_license | # get signal string comprised of characters 0-9, a-d, *, # and _
n=$1
if [[ -z $n ]]; then
: '
signals=(
'1' '2' '3' 'a'
'4' '5' '6' 'b'
'7' '8' '9' 'c'
'*' '0' '#' 'd' )
'
signals=( 'D' '1' '2' '3' '4' '5' '6' '7' '8' '9' '0' '*' '#' 'A' 'B' 'C' )
# list all possible left and right channel signal combinations
for left in "${signals[@]}"
do
for right in "${signals[@]}"
do
n+="$left$right"
done
done
# pause
n+="__"
# list signals in both channels, when the other channel stays quiet
other=""
for one in "${signals[@]}"
do
n+="${one}_"
other+="_$one"
done
n+="$other"
fi
# get number of channels (default being 1 for odd number of signals and so on)
ch=${2:-$((2-$((${#n} % 2))))}
php dtmf.php n=$n ch=$ch > test.au
afplay test.au
rm -f test.au
| true |
c2de6cc2e32356592097c4b5d05ae460b5ca3b23 | Shell | reaperzn/auter | /tests/10-rpmbuild.sh | UTF-8 | 3,596 | 3.71875 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
AUTERDIR="$(cd "$(dirname "$0")"; cd .. ; pwd -P)"
AUTERPARENTDIR="$(cd "$(dirname "$0")"; cd ../.. ; pwd -P)"
VERSION="$(grep "Version" "${AUTERDIR}"/auter.spec | awk '{print $2}')"
echo "AUTERDIR: ${AUTERDIR}"
echo "AUTERPARENTDIR: ${AUTERPARENTDIR}"
echo "VERSION: ${VERSION}"
function quit() {
CONTAINERS=$(docker ps -a -q)
# shellcheck disable=SC2086
[[ -n $CONTAINERS ]] && echo "Stopping leftover containers" && docker stop ${DOCKERCONTAINERS}
exit "$1"
}
for RELEASE in 6 7; do
function EVALSUCCESS {
RC=$?
if [[ $RC -ne 0 ]]; then
echo -e " [ FAILED ] ABORTING - RC=$RC - $1"
FAILEDTESTS+="RHEL${RELEASE} / CentOS${RELEASE}"
continue
else
echo " [ PASSED ] $1"
return 0
fi
}
# Build the docker container
DOCKERCONTAINERS+=" $(docker run --rm=true --name auter-rpmbuild-test-${RELEASE} -td centos:${RELEASE})"
EVALSUCCESS "Created ${RELEASE} docker image"
# Install the rpmbuild dependencies, add the user and create the ENV
docker exec auter-rpmbuild-test-${RELEASE} yum -y -q -e 0 install rpm-build elfutils-libelf rpm-libs rpm-pythoni gcc make help2man sudo 2>/dev/null 1>/dev/null
EVALSUCCESS "Installed packages to docker image"
docker exec auter-rpmbuild-test-${RELEASE} useradd builduser
EVALSUCCESS "Added build user"
docker exec auter-rpmbuild-test-${RELEASE} mkdir -p /home/builduser/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS}
EVALSUCCESS "Created rpmbuild structure"
# shellcheck disable=SC2016
echo '%_topdir %(echo $HOME)/rpmbuild' > /tmp/.rpmmacros
# Create the tarball for rpmbuild
# Manually changing directory due to tar -C not working too well
CURRENTDIR="$(pwd)"
cd "${AUTERPARENTDIR}"
tar -czf "auter-${VERSION}-rpmbuild.tar.gz" auter
EVALSUCCESS "Created source tarball from travis container"
sleep 2
mv "auter-${VERSION}-rpmbuild.tar.gz" "${AUTERDIR}"
EVALSUCCESS "Moved sources tarball from $(pwd) to ${AUTERDIR}"
cd "${CURRENTDIR}"
# Copy the rpmbuild config and tarball to the builduser homedir
docker cp /tmp/.rpmmacros auter-rpmbuild-test-${RELEASE}:/home/builduser/.rpmmacros
EVALSUCCESS "Copied /tmp/.rpmmacros to docker container"
docker cp "${AUTERDIR}/auter-${VERSION}-rpmbuild.tar.gz" auter-rpmbuild-test-${RELEASE}:/home/builduser/
EVALSUCCESS "Copied sources to docker container"
docker cp "${AUTERDIR}/auter.spec" "auter-rpmbuild-test-${RELEASE}":/home/builduser/rpmbuild/SPECS
EVALSUCCESS "Copied spec file to docker container"
# Copy the build test script to the container
docker cp "${AUTERDIR}/tests/11-container-rpmbuild.sh" "auter-rpmbuild-test-${RELEASE}":/home/builduser
EVALSUCCESS "Copied build script to container"
docker exec auter-rpmbuild-test-${RELEASE} chown -R builduser.builduser /home/builduser
docker exec auter-rpmbuild-test-${RELEASE} /home/builduser/11-container-rpmbuild.sh
EVALSUCCESS "Executed /home/builduser/11-container-rpmbuild.sh"
docker cp auter-rpmbuild-test-${RELEASE}:/home/builduser/auter.rpm.tar.gz ./
tar -xzf auter.rpm.tar.gz
RPMORIGNAME="$(tar -tzvf auter.rpm.tar.gz | grep rpm | awk '{print $NF}')"
RPMNEWNAME="${AUTERDIR}"/"${RPMORIGNAME//auter/auter-${RELEASE}}"
mv "${RPMORIGNAME}" "${RPMNEWNAME}"
EVALSUCCESS " ${RPMNEWNAME} file created in travis container"
rm -f "${AUTERDIR}/auter-${VERSION}-rpmbuild.tar.gz"
rm -f auter.rpm.tar.gz
done
if [[ -n "${FAILEDTESTS}" ]]; then
echo " [ FAILED ] - The following builds failed:"
echo "${FAILEDTESTS}"
quit 1
else
echo " [ SUCCESS ] All builds were successfull"
quit 0
fi
| true |
b8164cb3122a55c555e38d29ae6ca5c403277c69 | Shell | HackerDom/ructfe-2019 | /ansible/roles/vpn/files/cloud/switch_team_to_not_cloud.sh | UTF-8 | 599 | 3.359375 | 3 | [
"MIT"
] | permissive | #!/bin/bash -e
TEAM=${1?Usage: switch_team_to_non_cloud.sh <team> <ip>}
IP=${2?Usage: switch_team_to_non_cloud.sh <team> <ip>}
if ! [[ $TEAM =~ ^[0-9]+$ ]]; then
echo "Team shold be integer"
exit 1
fi
while iptables -w -C INPUT -i eth0 -m udp -p udp --dport "$((30000+TEAM))" -j DROP &>/dev/null; do
iptables -w -D INPUT -i eth0 -m udp -p udp --dport "$((30000+TEAM))" -j DROP
done
while iptables -w -C INPUT -i eth0 -m udp -p udp --dport "$((30000+TEAM))" -s "${IP}" -j ACCEPT &>/dev/null; do
iptables -w -D INPUT -i eth0 -m udp -p udp --dport "$((30000+TEAM))" -s "${IP}" -j ACCEPT
done
| true |
0be3efb75c4aaa7c04976a5d4599e9dc0987bbb6 | Shell | wayneyu/merapp | /public/raw_database/import2mongo.sh | UTF-8 | 917 | 3.109375 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
#import *.json in raw_database to mongo
DBNAME=merdb
COLLECTIONNAME_Q=questions
mongo $DBNAME --eval "db.questions.drop()"
LIST_Q="$(find json_data -name *.json)"
for i in $LIST_Q; do
echo $i
mongoimport --db $DBNAME --collection $COLLECTIONNAME_Q --file $i --jsonArray
done
mongo $DBNAME --eval "db.questions.ensureIndex({statement_html:\"text\",hints_html:\"text\",solutions_html:\"text\",answer_html:\"text\",topics:\"text\"})"
echo "Total number of QUESTIONS in the database:"
mongo $DBNAME --eval "db.questions.count()"
COLLECTIONNAME_T=topics
mongo $DBNAME --eval "db.topics.drop()"
LIST_T="$(find json_topics -name *.json)"
for i in $LIST_T; do
echo $i
mongoimport --db $DBNAME --collection $COLLECTIONNAME_T --file $i --jsonArray
done
mongo $DBNAME --eval "db.topics.ensureIndex({topic:\"text\"})"
echo "Total number of TOPICS in the database:"
mongo $DBNAME --eval "db.topics.count()"
| true |
2483e62ba39c91e35e6ce033d3164f8721920b73 | Shell | jason-speck/bin | /showfds | UTF-8 | 324 | 3.75 | 4 | [] | no_license | #!/bin/sh
# showfds - show file descriptors of a process, by PID, by reading from /proc
if [ -z "$1" ]; then
echo "Usage: `basename $0` <pid> " 1>&2; exit 1
fi
d=/proc/$1/fd
if [ ! -r "$d" ]; then
echo "`basename $0`: can't access $d" 1>&2
exit 1
fi
ls -l $d | perl -lne 'print "fd $1" if /(\d+\s+\-\>.*)$/'
| true |
6b2258ef33450b390a851715fb68c134bb4eff45 | Shell | Rocio-CP/SOCAT_other | /prepare_SOCAT_submission/batch_xlsx_to_tabtxt.sh | UTF-8 | 3,727 | 3.765625 | 4 | [] | no_license | #!/bin/bash
# ALL THIS ASSUMES 1-LINE HEADERS!!
# Convert to txt using libreoffice. It uses commas as separation!!
# It's the most time-consuming script
for f in *.xlsx
do soffice --headless --convert-to txt:"Text - txt - csv (StarCalc)" "$f"
done
# Replace commas with tabs.
# This line is tailored to MacOS. Sed is much more simple in UNIX :(
# sed is incredibly awkward in MacOS...
for f in *.txt
do sed -i '.bak' $'s/,/\t/g' "$f"
done
# Remove backup files .bak (for MacOS# Remove backup files .bak)
rm ./*.bak
# Add 1st column with file name. Useful for splitting after concatenation
files=($(ls *.txt))
for f in $(seq 0 $((${#files[@]} - 1)))
do sed -i '.bak' "s/^/${files[$f]}"$'\t/g' "${files[$f]}"
sed -i '.bak' "1 s/^${files[$f]}/Filename/g" "${files[$f]}"
done
rm *.bak
# Create "structure" file with the file name + header.
#files=($(ls *.txt))
for f in $(seq 0 $((${#files[@]} - 1)))
do head -n 1 ${files[$f]} >> structure
# sed -i '.bak' '$'"s/$/"$'\t'"${files[$f]}/" structure
sed -i '.bak' '$'"s/^/${files[$f]}"$'\t'"/" structure
# sed -i '.bak' '$'"s/$/"$'\t'"$f/" structure
done
#for f in *.txt
# do head -n 1 $f >> structure
# sed -i '.bak' '$'"s/^/$f"$'\t'"/" structure
# done
# Sort structure file and identify how many uniques headers (excluding 1st column, i.e. file name)
sort structure -k 2 > sortedstructure
uniq -f 1 sortedstructure > uniqueheaders
# Remove file name from uniqueheaders
sed -i '.bak' "s/^[a-zA-Z0-9]*.txt"$'\t'"//g" uniqueheaders
# More than one type of header (different columns, in number and/or order)
if [ $(wc -l < uniqueheaders) -gt 1 ]
then
for d in $(seq 1 $(($(wc -l < uniqueheaders) - 0)))
do mkdir -p S$d
# Pick one header
uniheader=$(sed -n $d'p' uniqueheaders)
# Find lines (and therefore filenames) with same header in structure file
grep "$uniheader" structure > tempstructure
# Get only file names
filesuniheader=($(cut -f1 tempstructure))
# Move the files into the folder
for fuh in $(seq 0 $((${#filesuniheader[@]} - 1)))
do cp ${filesuniheader[$fuh]} ./S$d/
done
cd ./S$d
# Join into one large file allS.txt, with only one the first line of headers. (and do calculations in Excel)
head -n 1 ${filesuniheader[1]} > allS$d.txt
for fuh in $(seq 0 $((${#filesuniheader[@]} - 1)))
do tail -n +2 ${filesuniheader[$fuh]} >> allS$d.txt
done
# Open the All file with Excel to calculate appropriate equilibrium pressure. Quit Excel completely to return to script!!
open ./allS$d.txt -W -a "Microsoft Excel"
# MacOS excel saves .txt with ^M as carriage return, and in Vim it looks like one large line. Fix it:
tr '\r' '\n' < allS$d.txt > allS$d$d.txt
# Split into the original .txt files (use Steve's tcl script)
tclsh ../split_cruises_windows.tcl ./allS$d$d.txt ./ '\t' 0
# The split routine appends .txt to the file names; remove the extra one
#rm allS$d.txt
for fuh in $(seq 0 $((${#filesuniheader[@]} - 1)))
do mv ${filesuniheader[$fuh]}.txt ${filesuniheader[$fuh]}
done
cd ..
done
# If only 1 type of header, merge into one file
else
for f in $(seq 0 $((${#files[@]} - 1)))
do tail -n +2 ${files[$f]} >> all.txt
done
open ./all.txt -W -a "Microsoft Excel"
# MacOS excel saves .txt with ^M as carriage return, and in Vim it looks like one large line. Fix it:
tr '\r' '\n' < all.txt > all2.txt
# Split into the original .txt files (use Steve's tcl script)
tclsh ./split_cruises_windows.tcl ./all2.txt ./ '\t' 0
for f in $(seq 0 $((${#files[@]} - 1)))
do mv ${files[$f]}.txt ${files[$f]}
done
fi
| true |
10898cd73bffdf13a69bf1fffcef8e1de3d40f33 | Shell | yiiyama/graph-hls-paper | /generation/run_generate.sh | UTF-8 | 587 | 3.140625 | 3 | [] | no_license | #!/bin/bash
## EDIT BELOW
OUTBASE=/eos/cms/store/cmst3/user/yiiyama/graph_hls_paper/generated2
## EDIT ABOVE
PART=$1
EMIN=$2
EMAX=$3
NGEN=$4
JOBID=$5
SEEDBASE=$6
[ $SEEDBASE ] || SEEDBASE=123456
THISDIR=$(cd $(dirname $(readlink -f $0)); pwd)
source $THISDIR/env.sh
SEED=$(($SEEDBASE+$JOBID))
if [ $PART = "pileup" ]
then
$THISDIR/generate -f events.root -p - -u -n $NGEN -s $SEED
else
$THISDIR/generate -f events.root -e $EMIN $EMAX -p $PART -n $NGEN -s $SEED
fi
OUTDIR=$OUTBASE/${PART}_${EMIN}_${EMAX}/$SEEDBASE
mkdir -p $OUTDIR
cp events.root $OUTDIR/events_$JOBID.root
| true |
17243b509665078c38ba57a64d490deef8dc9831 | Shell | nbudin/.dotfiles | /scripts/git/git-checkoutbranch.sh | UTF-8 | 149 | 3.296875 | 3 | [] | no_license | #!/bin/bash
OPTS=$(git branch | tr -d '*' | sort -n | xargs)
select opt in $OPTS; do
if [ $opt ] ; then
git checkout $opt
fi
break;
done
| true |
30eedaddb29286d978bbddca1b629ba643ff0e55 | Shell | http-3/rest-overhead-paper | /scripts/exporttodblikbench.sh | UTF-8 | 483 | 3.1875 | 3 | [] | no_license | #!/bin/bash
#USAGE: $ bash exporttodblikbenchss.sh likbenchssresults-west8-1
RESULT_DIR=$1
cd $RESULT_DIR
DBNAMEC=$RESULT_DIR-client
DBNAMES=$RESULT_DIR-server
group=l2lol2rm
for file in 1 100 1000 100000 1000000 100000000;do
echo "pumping likwid results for the size $file into results-likbenchss.db"
python3 ../lik-results-in-db-client.py $DBNAMEC.db $group $file-$group-wrkout.txt
python3 ../lik-results-in-db.py $DBNAMES.db $group $file-$group-likwid.txt
done
| true |
97e8642083d0ed7dc3f27d2a1765b88fdf918a7d | Shell | cjieming/jmtools | /motifVar_wrapper_plus_manual-012-only.sh | UTF-8 | 3,116 | 3.734375 | 4 | [] | no_license | #!/bin/bash
## This script tries to integrate the automated pipeline motifVar.sh and the manual portions of it
## the aim is to include all the manual commands that you have already figured out
## and make the run with only a single domain argument so that this can be looped
## Hence, many of the 'manual' parameters are hardcoded into this wrapper script.
## The logic and comments should be able to guide you through how to adapt this for your own use.
## you can loop it like this:
## for i in TPR
## do bsub-make-plus.sh motifvar-wrapper-"$i" "motifVar_wrapper_plus_manual.sh $i"
## bsub -q gerstein < bsub-script-rdy-motifvar-wrapper-"$i".sh
## done
if [[ "$#" -ne 1 && "$1" -ne -1 ]] ; then
echo "==============================="
echo "== USAGE ======================"
echo "==============================="
echo "motifVar_wrapper_plus_manual.sh <smart domain name>"
echo "e.g. motifVar_wrapper_plus_manual.sh TPR"
exit 1
fi
## make directory of domain
## enter domain
mkdir $1
cd $1
#######################################################
####### protPos2gPos; 5 args + m
## e.g. motifvar.sh protPos2gPos /ens/path/ensembl2coding_ens.noErr.txt /ens/path/allchr.ens73.noErr.tsv /smart/path/smart_domains_1158_all_131025.txt 73
####### the columns to grab are hardcoded into the motifVar.sh
####### for this module
####### they use the same files, so it seems they can't be run concurrently
####### solution is to run 0,1,2 first in series on a separate script (this script)
####### then run a second script to run the rest in parallel
motifVar.sh protPos2gPos /gpfs/scratch/fas/gerstein/jc2296/ensembl/ensembl73/ensembl2coding_ensembl73.proteinIDs.genomicPos.chrs.strand.noErr.txt /gpfs/scratch/fas/gerstein/jc2296/ensembl/ensembl73/allchromosomes.ens73.alldomainfeatures.smart.mod.noErr.tsv /gpfs/scratch/fas/gerstein/jc2296/smart/131025/smart_domains_1158_all_131025.txt 73
## manual protPos2gPos
cd 0-motifVar_protPos2gPos-m
REMOVE_CDS_FILE="/gpfs/scratch/fas/gerstein/jc2296/gencode/gencode.17.cds_start_end_NF.ensembl2coding_ensembl73.proteinIDs.txt"
PROTPOS2GPOS_OFILE="motifVar_protPos2gPos.ens73.smartdomains.txt"
PROTPOS2GPOS_OFILE_OLD="motifVar_protPos2gPos.ens73.smartdomains.unprocessed.txt"
# save original file
mv ${PROTPOS2GPOS_OFILE} ${PROTPOS2GPOS_OFILE_OLD}
# remove incomplete/truncated CDS sequences
fsieve2 -s <(cut -f3 ${REMOVE_CDS_FILE}) -m <(awk '{OFS="\t"}{FS="\t"}{print $8,$0}' ${PROTPOS2GPOS_OFILE_OLD} | sed 's/EnsemblProtID/EnsemblProtID1/1') | cut -f2- > ${PROTPOS2GPOS_OFILE}
# get out of folder
cd ..
#######################################################
####### 1 fasta2prot; 9 args
####### the columns to grab are hardcoded into the motifVar.sh
####### for this module
SMART_FASTA_PATH="/scratch/fas/gerstein/jc2296/smart/131025/fasta/HUMAN_$1.fa"
motifVar.sh 1 $1 ${SMART_FASTA_PATH} 73 - - - - -
#######################################################
####### 2 domain info (domain2info); 9 args
####### the columns to grab are hardcoded into the motifVar.sh
####### for this module
motifVar.sh 2 $1 - 73 - - - - -
### get out
cd .. | true |
13b449cbd0139f6bafffa8734584fc1767045525 | Shell | vibhatha/SVM-Parallel-SMO-MS | /Hybrid_MS_SMO/scripts/collectData1.sh | UTF-8 | 2,382 | 3.796875 | 4 | [] | no_license | #!/bin/bash
# to collect data from the slurmfiles to txt files to be easily plotted for 3D graph
if [ $# -eq 0 ] || [ $1 -lt 0 ] || [ $1 -gt 5 ]
then
echo "Please give one correct argument of dataset (0-mnist, 1-splice, 2-not mnist, 3-a9a, 4-ijcnn1, 5-codrna)"
echo "Also, if necessary give directory path as second argument"
exit
fi
wdir="."
if [ $# -eq 2 ]
then
wdir=$2
echo "Second argument path is taken "
else
echo "Current directory path is taken "
fi
slrm="$wdir/slurm-*"
dtst=$1
case "$dtst" in
0) dname="mnist"
;;
1) dname="splice"
;;
2) dname="notmnist"
;;
3) dname="a9a"
;;
4) dname="ijcnn1"
;;
5) dname="codrna"
;;
esac
echo $dname
opdir="${dname}_output"
if [ -d $opdir ]
then
echo "$opdir alredy exists ! deleting!"
rm -rf $opdir
fi
mkdir $opdir
opfname="$opdir/$dname"
sslr=`grep -w "dtst:${dtst} node:1 proc:1 trd:1" $slrm -l `
echo $sslr
if [ -z $sslr ]
then
echo "Serial smo missing, Cannot calculate speedup so get lost !!. I am exting"
rm -rf $opdir
exit
fi
stime=`cat $sslr | grep real |awk '{print $2}'|awk -F "m|s" '{print $1*60+$2}'`
echo $stime
nodes=`cat $slrm |grep "dtst:$dtst" |awk '{print $2}' |awk -F ":" '{print $2}' |sort -un`
for node in $nodes
do
echo "in node: "$node
opfname_n="${opfname}_n${node}"
opf_np="${opfname_n}_proc.txt"
opf_nt="${opfname_n}_thrd.txt"
opf_ns="${opfname_n}_spdp.txt"
procs=`cat $slrm |grep -w "dtst:$dtst node:$node" |awk '{print $3}' |awk -F ":" '{print $2}' |sort -un`
for proc in $procs
do
echo "in proc: "$proc
thrds=`cat $slrm |grep -w "dtst:$dtst node:$node proc:$proc" |awk '{print $4}' |awk -F ":" '{print $2}' |sort -un`
proc_line=""
thrd_line=""
spdp_line=""
for thrd in $thrds
do
echo "in thrd: "$thrd
proc_line="${proc_line}${proc},"
thrd_line="${thrd_line}${thrd},"
sl=`grep -w "dtst:$dtst node:$node proc:$proc trd:$thrd" $slrm -l`
if [ -z $sl ]
then
spdp_line="${spdp_line}0,"
continue
fi
echo "slurm file: "$sl
pttime=`cat $sl | grep real |awk '{print $2}'|awk -F "m|s" '{print $1*60+$2}'`
spdp=`echo "scale=2; $stime / $pttime" |bc -l`
echo "Speedup: "$spdp", parallel time: "$pttime", serial time: "$stime
spdp_line="${spdp_line}${spdp},"
done
echo $proc_line|sed 's/,$//' >> $opf_np
echo $thrd_line|sed 's/,$//' >> $opf_nt
echo $spdp_line|sed 's/,$//' >> $opf_ns
done
done
| true |
dad6403bca225a034b850e87396f0e5b14074130 | Shell | davemq/biobuilds | /zip/3.0/build.sh | UTF-8 | 1,686 | 3.5625 | 4 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | #!/bin/bash
set -o pipefail
# Pull in the common BioBuilds build flags
BUILD_ENV="${PREFIX}/share/biobuilds-build/build.env"
if [[ ! -f "${BUILD_ENV}" ]]; then
echo "FATAL: Could not find build environment configuration script!" >&2
exit 1
fi
source "${BUILD_ENV}" -v
# Copying the needed bzip2 files over seems way easier than trying to convince
# zip's build system to look in ${PREFIX}/include and ${PREFIX}/lib.
cp -fv "${PREFIX}/include/bzlib.h" "${SRC_DIR}/bzip2"
cp -fv "${PREFIX}/lib/libbz2.a" "${SRC_DIR}/bzip2"
# Replace the original CFLAGS we squash by supplying ours to `configure`
CFLAGS="${CFLAGS} -Wall -I. -DUNIX"
# Enable mmap() calls for faster compression (at the cost of more memory used)
#
# ** WARNING **: Do NOT actually enable; doing so causes compilation to fail
# with "'Bytef' undeclared" errors in "zipup.c".
#CFLAGS="${CFLAGS} -DMMAP"
# `configure` should automatically enable support for 64-bit file system calls
# (-DLARGE_FILE_SUPPORT), but we must explicitly enable tell the build system
# we want binaries that support archive operations on large (>4-GiB) files.
CFLAGS="${CFLAGS} -DZIP64_SUPPORT"
# `configure` should automatically enable support for UTF-8 paths since all the
# platforms/compilers we support have a wide character (wchar_t) type.
#CFLAGS="${CFLAGS} -DUNICODE_SUPPORT"
# Store univeral time in an extra field so zip/unzip don't have problems when
# files move across time zones or when daylight savings time changes.
CFLAGS="${CFLAGS} -DUSE_EF_UT_TIME"
LDFLAGS="${LDFLAGS}" sh unix/configure "${CC}" "${CFLAGS}"
make -f unix/Makefile generic
make -f unix/Makefile install prefix="${PREFIX}"
rm -rf "${PREFIX}/man"
| true |
956f9e24d8c5d6711b70cc390830bd18e38b8198 | Shell | pointtonull/magic4linespatch | /magic4linespatch.sh | UTF-8 | 1,913 | 3.375 | 3 | [] | no_license | #!/bin/sh
set -e
test "$(/usr/bin/id -u)" = 0 ||
{
echo "I want to be root!!"
exit 1
}
grep -q 4linespatch /etc/rc.local /etc/profile &&
{
echo "4linespatch seems to be installed"
exit 2
}
echo "Saving a backup of /etc/rc.local in /etc/rc.local.back"
cp /etc/rc.local /etc/rc.local.back
echo "Editing /etc/rc.local"
awk '
BEGIN{
error = 1
}
!/^exit 0$/{
print $0
}
/^exit 0$/{
print "#<4linespatch>"
print "mkdir -p /dev/cgroup/cpu"
print "mount -t cgroup cgroup /dev/cgroup/cpu -o cpu"
print "mkdir -m 0777 /dev/cgroup/cpu/user"
printf "echo \"/usr/local/sbin/cgroup_clean\""
print "> /dev/cgroup/cpu/release_agent"
print "#</4linespatch>"
print
print $0
error = 0
}
END{
exit error
}
' /etc/rc.local.back > /etc/rc.local
echo "Setting /etc/rc.local executing flags"
chmod 755 /etc/rc.local
echo "Saving a backup of /etc/profile in /etc/profile.back"
cp /etc/profile /etc/profile.back
echo "Editing /etc/profile"
awk '
/./
END{
print "#<4linespatch>"
print "if [ \"$PS1\" ] ; then"
print " mkdir -p -m 0700 /dev/cgroup/cpu/user/$$ > /dev/null 2>&1"
print " echo $$ > /dev/cgroup/cpu/user/$$/tasks"
print " echo \"1\" > /dev/cgroup/cpu/user/$$/notify_on_release"
print "fi"
print "#</4linespatch>"
}
' /etc/profile.back > /etc/profile
echo "Writing /usr/local/sbin/cgroup_clean"
awk '
BEGIN{
print "#!/bin/sh"
print "#4linespatch"
print "rmdir /dev/cgroup/cpu/$*"
exit
}
' > /usr/local/sbin/cgroup_clean
echo "Setting /usr/local/sbin/cgroup_clean executing flags"
chmod 755 /usr/local/sbin/cgroup_clean
echo "Starting cgroup"
/etc/rc.local
echo "Ready xD, you should restart your session to apply changes."
| true |
e7c899b79753306b44f4e387c6dae3ae30178910 | Shell | TsuyoshiUshio/volley | /script/get_volley.sh | UTF-8 | 1,966 | 3.96875 | 4 | [] | no_license | #!/bin/bash
# Install Script for volley for linux/mac/windows (GitBash) enviornment
# Execute this script with user that sudo is available
# Download volley binary
# https://github.com/TsuyoshiUshio/volley/releases/download/0.0.3/volley-linux-amd64.tgz
VERSION="0.0.6"
BINARY_TYPE=""
ARCHIVE=""
BINARY=""
INSTALL_PATH=/usr/bin
if [ "$(uname)" == "Darwin" ]; then
# Do something under Mac OS X platform
BINARY_TYPE="volley-darwin-amd64"
ARCHIVE="${BINARY_TYPE}.tgz"
BINARY="${BINARY_TYPE}/volley"
elif [ "$(expr substr $(uname -s) 1 5)" == "Linux" ]; then
# Do something under GNU/Linux platform
BINARY_TYPE="volley-linux-amd64"
ARCHIVE="${BINARY_TYPE}.tgz"
BINARY="${BINARY_TYPE}/volley"
elif [ "$(expr substr $(uname -s) 1 10)" == "MINGW32_NT" ]; then
# Do something under 32 bits Windows NT platform
BINARY_TYPE="volley-windows-386"
ARCHIVE="${BINARY_TYPE}.zip"
BINARY="${BINARY_TYPE}/volley.exe"
elif [ "$(expr substr $(uname -s) 1 10)" == "MINGW64_NT" ]; then
# Do something under 64 bits Windows NT platform
BINARY_TYPE="volley-windows-amd64"
ARCHIVE="${BINARY_TYPE}.zip"
BINARY="${BINARY_TYPE}/volley.exe"
fi
if type curl > /dev/null 2>&1; then
echo "curl exist."
else
# Linux only
sudo apt-get update -y
sudo apt-get install curl -y
fi
echo "Downloading ... : curl -OL https://github.com/TsuyoshiUshio/volley/releases/download/${VERSION}/${ARCHIVE}"
curl -OL https://github.com/TsuyoshiUshio/volley/releases/download/${VERSION}/${ARCHIVE}
if [ "$(expr substr $(uname -s) 1 5)" == "MINGW" ]; then
# windows (GitBash) based
unzip $ARCHIVE
# TODO put exe file in somewhere already have a path or create directry and add path to the .bashrc
else
# Linux and Mac
tar xvzf $ARCHIVE
if [ -f /usr/bin/volley ]; then
# remove old version
sudo rm /usr/bin/volley -f
fi
sudo cp $BINARY /usr/bin
rm -rf $BINARY_TYPE
rm $ARCHIVE
fi
| true |
2b56ba86a050d1e4fa72d8d42920b65756b7e459 | Shell | ematan/url-shortener | /test_all.bash | UTF-8 | 625 | 3.78125 | 4 | [] | no_license | #! /usr/bin/env bash
export NODE_ENV=test
run_tests() {
jest --verbose --runInBand -- "${@}" \
&& { echo "[PASS]"; } \
|| { echo "[FAIL]"; exit 1; }
}
sleep_for() {
local secs="${1}"
while test ${secs} -gt 0; do
echo -ne "\rsleeping ${secs} s\033[0K"
sleep 1
secs=$((secs-1))
done
}
run_all() {
run_tests test/unit-tests.test.js test/integration-tests.test.js
sleep_for 120
run_tests test/ttl.test.js
}
if test $# -eq 0; then
run_all
else
for f in "${@}"; do
echo "${f}"
test -f "${f}" || { echo "[ERROR] '${f}' does not exist"; exit 1; }
done
run_tests "${@}"
fi
| true |
cadc2b56db62deb4f53626536a87c2010b744cab | Shell | rodrigo-id/packer-jmeter | /conf/install-master.sh | UTF-8 | 2,634 | 2.921875 | 3 | [] | no_license | #!/bin/bash
#Repo de Ansible
apt-add-repository -y ppa:ansible/ansible
#Repo de Influxdb
curl -sL https://repos.influxdata.com/influxdb.key | sudo apt-key add -
source /etc/lsb-release
echo "deb https://repos.influxdata.com/${DISTRIB_ID,,} ${DISTRIB_CODENAME} stable" | sudo tee /etc/apt/sources.list.d/influxdb.list
add-apt-repository -y ppa:chris-lea/redis-server
apt update
apt install -y influxdb influxdb-client adduser libfontconfig redis-server python-simplejson ansible
#Instalacion del Java
add-apt-repository -y ppa:webupd8team/java
apt update
echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections
apt-get install -y --allow-unauthenticated oracle-java8-installer python-simplejson
systemctl enable influxdb.service
systemctl start influxdb.service
sed -i 's/^bind/\#bind/g' /etc/redis/redis.conf
systemctl restart redis-server.service
#Install chronograf
echo "407d5fe18ebdb525e971d8ddbcbd9b0895c112e8cf562555d93b98e8788679c3 chronograf_1.4.3.0_amd64.deb" >sha256sum.txt
wget 'https://dl.influxdata.com/chronograf/releases/chronograf_1.4.3.0_amd64.deb'
sha256sum -c sha256sum.txt && dpkg -i chronograf_1.4.3.0_amd64.deb
systemctl enable chronograf
#Install grafana
wget 'https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.0.4_amd64.deb' && dpkg -i grafana_5.0.4_amd64.deb
systemctl enable grafana-server.service
systemctl start grafana-server.service
until $(curl --output /dev/null --silent --head --fail localhost:3000/api/health); do
#echo 'Iniciando grafana ...'
printf '.'
sleep 3
done
#Crear db en InfluxDB
curl -G http://localhost:8086/query --data-urlencode "q=CREATE USER admin WITH PASSWORD 'admin123' WITH ALL PRIVILEGES"
curl -G http://localhost:8086/query --data-urlencode "q=CREATE USER jmeter WITH PASSWORD 'jmeter123' WITH ALL PRIVILEGES"
curl -G http://localhost:8086/query --data-urlencode "q=CREATE DATABASE jmeter_test"
curl -X POST -H "Content-Type: application/json" -d '
{"name":"jmeter",
"type":"influxdb",
"url":"http://localhost:8086",
"access":"proxy",
"database":"jmeter_test",
"user":"",
"password":""}' http://admin:admin@localhost:3000/api/datasources
curl -i -u admin:admin -H "Content-Type: application/json" -X POST http://localhost:3000/api/dashboards/db -d @/tmp/jmeter-load-test.json
curl -X PUT -H "Content-Type: application/json" -d '
{"oldPassword": "admin",
"newPassword": "admin123",
"confirmNew": "admin123"}' http://admin:admin@localhost:3000/api/user/password
cat /tmp/nodes >> /etc/hosts
echo '
Host *
StrictHostKeyChecking no
User ubuntu' > /root/.ssh/config
| true |
1c3e5aee55c01d30a3fa92c7e2087935e2f81476 | Shell | androidexpert35/XTREMEMusic-magisk-official | /system/etc/pm/sleep.d/90alsa | UTF-8 | 140 | 2.53125 | 3 | [] | no_license | #!/bin/sh
case "$1" in
hibernate|suspend)
;;
thaw|resume)
cd /system/bin/
aplay -d 1 /dev/zero
;;
*) exit $NA
;;
esac
chmod 666 /dev/snd/* | true |
7076faa70515e26bfd43ebb64b86522debfd824c | Shell | ragnarkurm/surveillancer | /bin/cache-init | UTF-8 | 1,057 | 3.65625 | 4 | [] | no_license | #!/bin/bash
if /usr/bin/test "$#" -ne 1 -o ! -r "$1"; then
echo "Usage: $0 <conf>"
exit
fi
source "$1"
source "$LIB/lib.sh"
if [ -d "$CACHE" ]; then
echo "Please remove cache manually: $CACHE"
exit
fi
mkdir -p "$CACHE"
for HOUR in {0..23}
do
HOUR0=$(printf '%02d' "$HOUR")
mkdir "$CACHE/$HOUR0"
for MIN in {0..59}
do
MIN0=$(printf '%02d' "$MIN")
for SEC in {0..59}
do
continue
if [ $(($SEC % $INTERVAL)) -ne 0 ]; then
continue
fi
SEC0=$(printf '%02d' "$SEC")
IMAGE="$CACHE/$HOUR0/$MIN0-$SEC0.png"
ANNOTATION="$HOUR0:$MIN0"
if [ "$ANNOTATION" = "$PREV_ANNOTATION" ]; then
ln -s $(basename "$PREV_IMAGE") "$IMAGE"
else
convert \
-size "$GEOM" \
xc:black \
$TEXTOPTS \
-annotate 0 "$ANNOTATION" \
"$IMAGE"
fi
PREV_ANNOTATION="$ANNOTATION"
PREV_IMAGE="$IMAGE"
done
echo -ne "\r" $(printf '%d%%' $(( (100*$HOUR*60+$MIN) / (23*60+59) )))
done
done
echo -e "\rDone"
| true |
a378e80dc89a273c5ce9e7e04685ccb07cdf8c3a | Shell | kostrzewa/jobscripts | /generators/DDalphaAMG_tune/L64/DDalphaAMG_extract_timings.sh | UTF-8 | 2,686 | 2.921875 | 3 | [] | no_license | L=64
T=128
nds=(16 32 64 128)
lvls=(3)
iters=(5)
mucoarse=(3.0 4.0 5.0 6.0 7.0 8.0 9.0 10.0 11.0 12.0)
nvecs=(16 24)
test_run=0
nmeas=2
base_ofile=timings.dat
base_mfile=missing.dat
dt=$(date)
for nd in ${nds[@]}; do
ofile=nds${nd}_${base_ofile}
mfile=nds${nd}_${base_mfile}
echo "# $dt" > $ofile
echo "L T nds setupiter lvl nvec mucoarse setuptime setupcgrid solvetime solvecgrid solvefiter solveciter" >> $ofile
echo "# $dt" > $mfile
for iter in ${iters[@]}; do
for lvl in ${lvls[@]}; do
for nvec in ${nvecs[@]}; do
for muc in ${mucoarse[@]}; do
job=nds${nd}_iters${iter}_nlevel${lvl}_nvec${nvec}_mucoarse${muc}
wdir=$(pwd)/${job}
sfile=$wdir/outputs/${job}.64.L64.DDalphaAMG.test.out
if [ ! -f $sfile ]; then
echo $wdir >> $mfile
continue
else
echo $sfile
fi
setuptime=$(grep "setup ran" $sfile | awk '{print $5}')
setupcgrid=$(grep "setup ran" $sfile | awk '{print $7}' | sed 's/(//g')
if [ -z ${setuptime} ]; then
if [ $test_run -eq 1 ]; then
exit 1
fi
echo $wdir >> $mfile
continue
fi
temp=( $(grep "Solving time" $sfile | awk '{print $3}') )
solvetime=0
for num in ${temp[@]}; do
solvetime=$( echo "scale=2; $solvetime + $num" | bc -l )
done
solvetime=$( echo "scale=2; $solvetime / $nmeas" | bc -l )
temp=( $(grep "Solving time" $sfile | awk '{print $5}' | sed 's/(//g') )
solvecgrid=0
for num in ${temp[@]}; do
solvecgrid=$( echo "scale=2; $solvecgrid + $num" | bc -l )
done
solvecgrid=$( echo "scale=2; $solvecgrid / $nmeas" | bc -l )
temp=( $(grep "Total iterations on fine grid" $sfile | awk '{print $6}') )
solvefiter=0
for num in ${temp[@]}; do
solvefiter=$( echo "scale=2; $solvefiter + $num" | bc -l )
done
solvefiter=$( echo "scale=2; $solvefiter / $nmeas" | bc -l )
temp=( $(grep "Total iterations on coarse grids" $sfile | awk '{print $6}') )
solveciter=0
for num in ${temp[@]}; do
solveciter=$( echo "scale=2; $solveciter + $num" | bc -l )
done
solveciter=$( echo "scale=2; $solveciter / $nmeas" | bc -l )
echo "$L $T $nd $iter $lvl $nvec $muc $setuptime $setupcgrid $solvetime $solvecgrid $solvefiter $solveciter" >> $ofile
if [ $test_run -eq 1 ]; then
exit 0
fi
done
done
done
done
done
| true |
19e9102ac909829402d8175ba99bf4a6d9d5eb01 | Shell | TMLKyza/x230i3 | /script.sh | UTF-8 | 1,468 | 3.25 | 3 | [] | no_license | #!/bin/sh
dotsloc=$(pwd)
cmd="sed s\|DOTS_LOC.\*\|DOTS_LOC="$dotsloc"\|g -i .zshrc"
eval $cmd
depends=$(cat script.conf | grep -v "#" | grep '£' | cut -c4-)
aur=$(cat script.conf | grep -v '#' | grep '§' | cut -c4-)
if [ "$(uname -r | cut -f 4 -d '-')" == "ARCH" ]; then
sudo pacman -S $depends ;
for i in $aur; do
mkdir gitrepos
cd gitrepos
gitrepo=$(echo $i|cut -f2 -d';')
dirname=$(echo $i|cut -f1 -d';')
cmd="git clone "$gitrepo
echo $cmd
eval $cmd
cmd="cd "$dirname
echo $cmd
eval $cmd
cmd="makepkg -sic"
echo $cmd
eval $cmd
done
else
echo "install:"$depends
for i in $aur; do
gitrepo=$(echo $i|cut -f2 -d';')
dirname=$(echo $i|cut -f1 -d';')
echo "download and compile: "$dirname"@"$gitrepo
done
fi
cmd="cd "$dotsloc
echo $cmd
eval $cmd
gitdirs=$(cat script.conf | grep -v '#' | grep '%' | cut -c3-)
for i in $gitdirs; do
cmd="cd "$(echo $i | cut -d';' -f1)
echo $cmd
eval $cmd
cmd="git clone "$(echo $i | cut -d';' -f2)
echo $cmd
eval $cmd
done
sudo rm -r $dotsloc/gitrepos
cmd="cd "$dotsloc
echo $cmd
eval $cmd
files=$(cat script.conf | grep -v "#" | grep ">")
for i in $files;
do
source=$(echo $i |cut -f1 -d'>')
dest=$(echo $i|cut -f2 -d'>')
cmd="sudo rsync -arog "$source" "$dest
echo $cmd
eval $cmd
cmd="sudo chown -R $USER "$dest""$source" & sudo chgrp -R "$(id -g -n)" "$dest""$source
echo $cmd
eval $cmd
done
sudo sed 's/#%PAM-1.0/#%PAM-1.0\nauth\tsufficient\tpam_fprintd.so/g' -i /etc/pam.d/*
| true |
d8be3bf88772763f55156ceb19ec4be45d15a75c | Shell | qiuyue77/dotfiles | /tag-zsh_nb/zshrc | UTF-8 | 7,096 | 2.8125 | 3 | [] | no_license | # If you come from bash you might have to change your $PATH.
# export PATH=$HOME/bin:/usr/local/bin:$PATH
# Path to your oh-my-zsh installation.
export ZSH="/home/qiujl/.oh-my-zsh"
export EDITOR="/usr/bin/nvim"
export VISUAL="/usr/bin/nvim"
# Set name of the theme to load --- if set to "random", it will
# load a random theme each time oh-my-zsh is loaded, in which case,
# to know which specific one was loaded, run: echo $RANDOM_THEME
# See https://github.com/ohmyzsh/ohmyzsh/wiki/Themes
ZSH_THEME="ys"
# Set list of themes to pick from when loading at random
# Setting this variable when ZSH_THEME=random will cause zsh to load
# a theme from this variable instead of looking in ~/.oh-my-zsh/themes/
# If set to an empty array, this variable will have no effect.
# ZSH_THEME_RANDOM_CANDIDATES=( "robbyrussell" "agnoster" )
# Uncomment the following line to use case-sensitive completion.
# CASE_SENSITIVE="true"
# Uncomment the following line to use hyphen-insensitive completion.
# Case-sensitive completion must be off. _ and - will be interchangeable.
# HYPHEN_INSENSITIVE="true"
# Uncomment the following line to disable bi-weekly auto-update checks.
# DISABLE_AUTO_UPDATE="true"
# Uncomment the following line to automatically update without prompting.
# DISABLE_UPDATE_PROMPT="true"
# Uncomment the following line to change how often to auto-update (in days).
export UPDATE_ZSH_DAYS=13
# Uncomment the following line if pasting URLs and other text is messed up.
# DISABLE_MAGIC_FUNCTIONS=true
# Uncomment the following line to disable colors in ls.
# DISABLE_LS_COLORS="true"
# Uncomment the following line to disable auto-setting terminal title.
# DISABLE_AUTO_TITLE="true"
# Uncomment the following line to enable command auto-correction.
# ENABLE_CORRECTION="true"
# Uncomment the following line to display red dots whilst waiting for completion.
# COMPLETION_WAITING_DOTS="true"
# Uncomment the following line if you want to disable marking untracked files
# under VCS as dirty. This makes repository status check for large repositories
# much, much faster.
# DISABLE_UNTRACKED_FILES_DIRTY="true"
# Uncomment the following line if you want to change the command execution time
# stamp shown in the history command output.
# You can set one of the optional three formats:
# "mm/dd/yyyy"|"dd.mm.yyyy"|"yyyy-mm-dd"
# or set a custom format using the strftime function format specifications,
# see 'man strftime' for details.
HIST_STAMPS="yyyy-mm-dd"
# Would you like to use another custom folder than $ZSH/custom?
# ZSH_CUSTOM=/path/to/new-custom-folder
# Which plugins would you like to load?
# Standard plugins can be found in ~/.oh-my-zsh/plugins/*
# Custom plugins may be added to ~/.oh-my-zsh/custom/plugins/
# Example format: plugins=(rails git textmate ruby lighthouse)
# Add wisely, as too many plugins slow down shell startup.
plugins=(
git
zsh-autosuggestions
zsh-syntax-highlighting
sudo
web-search
docker-compose
)
source $ZSH/oh-my-zsh.sh
# User configuration
# export MANPATH="/usr/local/man:$MANPATH"
# You may need to manually set your language environment
# export LANG=en_US.UTF-8
# Preferred editor for local and remote sessions
# if [[ -n $SSH_CONNECTION ]]; then
# export EDITOR='vim'
# else
# export EDITOR='mvim'
# fi
# Compilation flags
# export ARCHFLAGS="-arch x86_64"
# Set personal aliases, overriding those provided by oh-my-zsh libs,
# plugins, and themes. Aliases can be placed here, though oh-my-zsh
# users are encouraged to define aliases within the ZSH_CUSTOM folder.
# For a full list of active aliases, run `alias`.
#
# Example aliases
# alias zshconfig="mate ~/.zshrc"
# alias ohmyzsh="mate ~/.oh-my-zsh"
alias q='exit'
alias c='clear'
alias ipy='ipthon'
alias l='ls -la'
alias ra='ranger'
alias r='echo $RANGER_LEVEL'
alias s='neofetch'
alias sra='sudo -E ranger'
alias sudo='sudo -E'
alias cp='cp -i'
alias vim='nvim'
alias cat='ccat'
# 文件管理器
alias folder='nautilus'
# alias vi='vim'
# 双屏扩展显示 - new
alias ac='xrandr --output LVDS1 --auto --output HDMI1 --auto --right-of LVDS1'
# 双屏扩展显示-old
alias ac1='xrandr --output LVDS1 --auto --output VGA1 --auto --right-of LVDS1'
alias voice='pavucontrol'
# 开启TIM
# nohup /usr/lib/gsd-xsettings > /dev/null 2>&1 &
alias tim='/opt/deepinwine/apps/Deepin-TIM/run.sh'
# z.lua 快速跳转目录插件
eval "$(lua ~/.config/z.lua/z.lua --init zsh enhanced once echo)"
bindkey -v
bindkey -M vicmd "h" vi-insert
bindkey -M vicmd "H" vi-insert-bol
bindkey -M vicmd "a" vi-add-next
bindkey -M vicmd "A" vi-add-eol
bindkey -M vicmd "j" vi-backward-char
bindkey -M vicmd "l" vi-forward-char
bindkey -M vicmd "J" vi-beginning-of-line
bindkey -M vicmd "L" vi-end-of-line
bindkey -M vicmd "k" down-line-or-history
bindkey -M vicmd "i" up-line-or-history
bindkey -M vicmd "u" undo
#bindkey -M vicmd "-" vi-rev-repeat-search
bindkey -M vicmd "=" vi-repeat-search
bindkey -M vicmd "b" vi-backward-word
function zle-keymap-select {
if [[ ${KEYMAP} == vicmd ]] || [[ $1 = 'block' ]]; then
echo -ne '\e[1 q'
elif [[ ${KEYMAP} == main ]] || [[ ${KEYMAP} == viins ]] || [[ ${KEYMAP} = '' ]] || [[ $1 = 'beam' ]]; then
echo -ne '\e[5 q'
fi
}
zle -N zle-keymap-select
# Use beam shape cursor on startup.
echo -ne '\e[5 q'
# Use beam shape cursor for each new prompt.
preexec() {
echo -ne '\e[5 q'
}
_fix_cursor() {
echo -ne '\e[5 q'
}
precmd_functions+=(_fix_cursor)
zle -N zle-line-init
zle -N zle-keymap-select
KEYTIMEOUT=1
# fzf
export FZF_DEFAULT_OPTS='--bind ctrl-k:down,ctrl-i:up --preview "[[ $(file --mime {}) =~ binary ]] && echo {} is a binary file || (ccat --color=always {} || highlight -O ansi -l {} || ccat {}) 2> /dev/null | head -500"'
export FZF_DEFAULT_COMMAND='ag --hidden --ignore .git -g ""'
export FZF_COMPLETION_TRIGGER='\'
export FZF_TMUX_HEIGHT='80%'
export FZF_PREVIEW_COMMAND='[[ $(file --mime {}) =~ binary ]] && echo {} is a binary file || (ccat --color=always {} || highlight -O ansi -l {} || ccat {}) 2> /dev/null | head -500'
source ~/.config/zsh/key-bindings.zsh
source ~/.config/zsh/completion.zsh
# ranger
export RANGER_LOAD_DEFAULT_RC=FALSE
#zsh-autosuggestions 快捷键
bindkey ',' autosuggest-accept
# proxy
setproxy () {
export http_proxy=http://127.0.0.1:8118
export https_proxy=http://127.0.0.1:8118
export ftp_proxy=http://127.0.0.1:8118
}
unsetproxy () {
unset http_proxy
unset https_proxy
unset ftp_proxy
}
proxy () {
export ALL_PROXY="socks5://127.0.0.1:1080"
export all_proxy="socks5://127.0.0.1:1080"
curl https://ip.gs
}
noproxy () {
unset ALL_PROXY
unset all_proxy
curl https://ip.gs
}
# git proxy
gitproxy () {
git config --global http.proxy 'socks5://127.0.0.1:1080'
git config --global https.proxy 'socks5://127.0.0.1:1080'
}
gitnoproxy () {
git config --global --unset http.proxy
git config --global --unset https.proxy
}
# Pyenv
export PYENV_ROOT="$HOME/.pyenv"
export PATH="$PYENV_ROOT/bin:$PATH"
if command -v pyenv 1>/dev/null 2>&1; then
eval "$(pyenv init -)"
fi
eval "$(pyenv virtualenv-init -)"
| true |
20320119666a2f25b5691648c3e49bd9f8cf25c1 | Shell | mhristof/dotfiles | /sshi | UTF-8 | 426 | 3.921875 | 4 | [] | no_license | #!/usr/bin/env bash
set -euo pipefail
die() {
echo "$*" 1>&2
exit 1
}
ID="${1:-}"
if [[ -z $ID ]]; then
die "please provide the instance ID, for example 'i-0a393a33dbaa0e1d7'"
fi
IP=$(aws ec2 describe-instances --instance-ids "$ID" | jq '.Reservations[0].Instances[0].PublicDnsName' -r)
if [[ -z $IP ]]; then
die "Error, instalce [$ID] doesnt have a PublicDnsName"
fi
ssh -oStrictHostKeyChecking=no "$IP"
| true |
debf33288f94023c4b68a8279a038947d354457e | Shell | julp/banip | /tests/ipset.sh | UTF-8 | 513 | 3.15625 | 3 | [] | no_license | #!/bin/bash
declare -r TESTDIR=$(dirname $(readlink -f "${BASH_SOURCE}"))
. ${TESTDIR}/assert.sh.inc
skipUnlessBinaryExists ipset
if ! zgrep -q ^CONFIG_IP_SET /proc/config.gz; then
printf "%s: [ \e[%d;01m%s\e[0m ] %s\n" `basename $0` 33 SKIPPED "(kernel compiled without CONFIG_IP_SET option)"
exit $TRUE
fi
${TESTDIR}/../pftest ipset
ipset -! create ${PFBAN_TEST_TABLE} hash:net family inet
assertExitValue "ipset" "ipset test ${PFBAN_TEST_TABLE} 1.2.3.4" $TRUE
ipset destroy ${PFBAN_TEST_TABLE}
| true |
15d727bfce21d8746d4b40adecb0eaf3e3ff2953 | Shell | oniondai/pkg-iso | /iso/extras/tools/clear_swift.sh | UTF-8 | 180 | 2.796875 | 3 | [] | no_license | #!/bin/bash
set -o xtrace
set -e
DISK_LIST=$*
rm -rf /etc/kolla/config/swift
for dev in ${DISK_LIST};do
echo $dev
dd if=/dev/zero of=/dev/$dev bs=1M count=1024
done
| true |
08cfceffe26ede5539a8dab0b2130503b3a0481a | Shell | CooperWallace/Notemanager | /notemgr | UTF-8 | 4,457 | 4.59375 | 5 | [] | no_license | #!/usr/bin/env bash
#
# Maintains a list of Courses that are currently active on the system. Allowing
# for the user to easily access information and file path to the specified
# Course
#
# Written by: Cooper Wallace
################################################################################
# Loading Library for shared functions contained in 'notemgr-lib'
SCRIPT_PATH=$(dirname "$(readlink -f "$0")")
LIB_PATH="${SCRIPT_PATH}/notemgr-lib"
if [[ -f "${LIB_PATH}" ]]; then
source "${LIB_PATH}"
else
echo "Couldn't find the notemgr library file."
echo "Please find it and add it to the same folder as this script."
echo "Exitting..."
exit 1
fi
# Initialize Configuration file containing Database
init_db
################################################################################
# Display the Usage Documentation message
usage() {
echo -n "Usage: $0 [OPTIONS]
$0 Display all non-archived courses
$0 [Course] Display all Lectures in course
$0 [Course] [Lecture] Print path to the specific lecture
Desription:
Help maintain and manage Notes for the specified courses on the Users system.
Options:
-h Display usage message
-l List all Courses in database, including those archived.
-c Course Create a new Lecture for the course
-g Course Display the path to the course
-a Course Display whether course has been archived
"
}
# Outputs a list of Lectures that belongs to the specified Course if it exists
# Arguments:
# Course name
handle_display_lectures () {
local COURSE_NAME="$1"
if ! course_exists "$COURSE_NAME"; then
echo "The course $COURSE_NAME doesn't exist in the database."
exit 1
fi
local DIRECTORY=$(get_course_directory "$COURSE_NAME")
# Verify Lecture subdirectory exists
DIRECTORY="$DIRECTORY""Lecture/"
if [[ ! -e $DIRECTORY ]]; then
echo "Sub directory 'Lecture' doesnt exist for this course."
echo "Please create it."
exit 1
fi
# List Lectures contained in the Course
ls "$DIRECTORY" | grep "Lecture" | sort --version-sort
}
# Determine the file name of the new Lecture in the specified course
# Arguments:
# Course Name
handle_create_lecture () {
local COURSE_NAME="$1"
if ! course_exists "$COURSE_NAME"; then
echo "The course $COURSE_NAME doesn't exist in the database."
echo "Cannot create Lecture for a non-existant course"
exit 1
fi
if is_course_archived "$COURSE_NAME"; then
echo -n "The course $COURSE_NAME has been archived."
echo " New lectures cannot be added to it."
exit 1
fi
# Obtain the number of Lectures that currently exist
local LECTURE_NUM=$(handle_display_lectures "$COURSE_NAME" | wc -l )
local DIRECTORY=$(get_course_directory "$COURSE_NAME")
# Next Lecture name will be LECTURE_NUM+1
printf "%sLecture/Lecture%d.md\n" "$DIRECTORY" $((LECTURE_NUM+1))
}
# Return the path to the Lecture in a course if they both exist
# Arguments
# $1 Course
# $2 Lecture Name
handle_display_lecture_path() {
local COURSE_NAME="$1"
local LECTURE_NAME="$2"
if ! course_exists "$COURSE_NAME"; then
echo "The course $COURSE_NAME doesn't exist in the database."
exit 1
fi
if ! lecture_exists "$COURSE_NAME" "$LECTURE_NAME"; then
echo "The Lecture, $LECTURE_NAME was not found"
exit 1
fi
local DIRECTORY=$(get_course_directory "$COURSE_NAME")
printf "%sLecture/%s" "$DIRECTORY" "$2"
}
# Determine if the Course has been archived.
# Exit 1 if Non-existant or Archived
handle_display_archived () {
local COURSE_NAME="$1"
if ! course_exists "$COURSE_NAME"; then
echo "The course $COURSE_NAME doesn't exist in the database."
exit 1
fi
if is_course_archived "$COURSE_NAME"; then
echo -n "The course $COURSE_NAME has been archived."
exit 1
fi
exit 0
}
################################################################################
while getopts "hlc:g:a:" o; do
case "${o}" in
g)
get_course_directory "${OPTARG}"
exit
;;
c)
handle_create_lecture "${OPTARG}"
exit
;;
a)
handle_display_archived "${OPTARG}"
exit
;;
l)
get_all_courses
exit
;;
* | h)
usage
exit
;;
esac
done
shift $((OPTIND-1))
## Handle non-specific argument options
# 0 Args display courses
# 1 Args Display Lectures for Course
# 2 Args Display path to specified Lecture
if [[ $# == 0 ]]; then
get_active_courses
# Otherwise Handle Course only when 1 argument
elif [[ $# == 1 ]]; then
handle_display_lectures "$1"
elif [[ $# == 2 ]]; then
handle_display_lecture_path "$1" "$2"
else
usage
fi
| true |
bc71c37f0fad819f99fc7c2a80010073b8222480 | Shell | maxirmx/redirect-engine | /bootstrap.scripts/stage.3.sh | UTF-8 | 410 | 2.546875 | 3 | [] | no_license | export V_PROXIGEN=2020.11.16.00
cd ~/bootstrap \
&& wget https://github.com/facebook/proxygen/archive/v${V_PROXIGEN}.tar.gz -nv -O proxygen.tar.gz \
&& tar -xzf proxygen.tar.gz \
&& cd ~/bootstrap/proxygen-${V_PROXIGEN}/proxygen/ \
&& sed s/\-DCMAKE_INSTALL_PREFIX=\"\$DEPS_DIR\"/\-DCMAKE_INSTALL_PREFIX=\"\$PREFIX\"/ < build.sh > b.sh \
&& chmod +x b.sh \
&& ./b.sh -j 4 --prefix /usr/local && ./install.sh
| true |
818beb9d616230f932fda8b20dac949896fc9375 | Shell | olsonanl/p3_data | /dump_genome.sh | UTF-8 | 1,329 | 2.671875 | 3 | [] | no_license |
###############################################################################
#
# Purpose: Simple shell script to dump all the solr data for a genome
#
# Usage: dump_genome.sh 83332.12
#
###############################################################################
mkdir $1
pushd $1
wget "$PATRIC_SOLR/genome/select?q=genome_id%3A$1&wt=json&indent=false&omitHeader=true&rows=100000" -O genome.json
wget "$PATRIC_SOLR/genome_sequence/select?q=genome_id%3A$1&wt=json&indent=false&omitHeader=true&rows=100000" -O genome_sequence.json
wget "$PATRIC_SOLR/genome_feature/select?q=genome_id%3A$1+AND+annotation%3APATRIC&wt=json&indent=false&omitHeader=true&rows=100000" -O genome_feature_patric.json
wget "$PATRIC_SOLR/genome_feature/select?q=genome_id%3A$1+AND+annotation%3ARefSeq&wt=json&indent=false&omitHeader=true&rows=100000" -O genome_feature_refseq.json
wget "$PATRIC_SOLR/genome_feature/select?q=genome_id%3A$1+AND+annotation%3ABRC1&wt=json&indent=false&omitHeader=true&rows=100000" -O genome_feature_brc1.json
wget "$PATRIC_SOLR/pathway/select?q=genome_id%3A$1&wt=json&indent=false&omitHeader=true&rows=100000" -O pathway.json
wget "$PATRIC_SOLR/sp_gene/select?q=genome_id%3A$1&wt=json&indent=false&omitHeader=true&rows=100000" -O sp_gene.json
perl -pi -e 's/^{\n|^\s*"response".*"docs":|^\s*}}\n$//' *.json
popd $1
| true |
34b3fee24a10b51c254be112487bd823000a0568 | Shell | simuBT/foamyLatte | /icoFsiElasticNonLinULSolidFoam/TestCaseFSI-V1.6ext/linkSolidSolutions | UTF-8 | 398 | 2.71875 | 3 | [] | no_license | #!/bin/bash
cd fluid
for i in $(ls -I *.Open* -I constant -I system -I 0 -I log*);do
cd ../solid
ln -s ../fluid/$i/solid $i
cd ../fluid
done
cd ..
touch solid/solid.OpenFOAM
echo ...
echo Done linking solid solutions for use with paraFoam.
echo Run bash: "'paraFoam -case fluid'" and open file "'solid.OpenFoam'" from
echo solid directory for postprocessing of both domains.
echo ...
| true |
45263f48e73f70e3922a7075ef3c77c1afd5fc63 | Shell | jdblischak/conda-speed-tests | /scripts/test-scikit-learn-defaults.sh | UTF-8 | 533 | 2.9375 | 3 | [] | no_license | #!/bin/bash
set -eux
# Install scikit-learn from defaults
conda config --add channels defaults
conda create -n scikit-learn-defaults python
mkdir -p list/ log/ time/
/usr/bin/time -f "%e %U %S %P %M" -o time/scikit-learn-defaults-${VER}.txt \
conda install -vv -n scikit-learn-defaults scikit-learn \
2> log/scikit-learn-defaults-${VER}.txt
conda list -n scikit-learn-defaults > list/scikit-learn-defaults-${VER}.txt
# Cleanup
conda env remove -n scikit-learn-defaults
conda clean --all
conda config --remove-key channels
| true |
d3b6c65edfeedac14ba5a8a95b4475d0c7a0d00f | Shell | prodigeni/blackarch | /scripts/baclean | UTF-8 | 2,582 | 3.984375 | 4 | [] | no_license | #!/bin/bash
export LC_ALL=C
shopt -s extglob
site=blackarch.org
sitedir=/nginx/var/www
# override with -r
repo=blackarch-testing
# override with -u
user=$USER
we_started_ssh=false
usage() {
cat <<EOF
usage: baclean [-u <user>] [-r <repo>] [-h]
-u <usage> Set the ssh user.
-r <repo> Set the repo.
-h Display this message.
EOF
}
parse_args() {
while (( $# )) ; do
case "$1" in
-r)
repo=$2
shift
;;
-u)
user=$2
shift
;;
-h)
usage
exit
;;
*)
usage
exit 1
;;
esac
shift
done
}
cleanup() {
# Kill ssh-agent.
$we_started_ssh && kill "$SSH_AGENT_PID" 2> /dev/null >&2
}
start_ssh_agent() {
if [[ -z "$SSH_AGENT_PID" ]] ; then
echo 'starting ssh-agent...'
eval $(ssh-agent)
ssh-add
we_started_ssh=true
else
echo 'ssh-agent already started.'
fi
}
# This runs on the remote host.
# This is not meant to be efficient or portable.
remote_old_cleanup() {
echo 'cleaning old package files...'
for a in i686 x86_64 ; do
cd "$a"
echo "ARCH: $a"
for p in *.pkg.tar.xz ; do
[[ -e "$p" ]] || continue
pkgname=$(sed 's/-[^-]\+-[^-]\+-[^-]\+.pkg.tar.xz$//' <<< "$p")
matched=("$pkgname"-+([!-])-+([!-])-+([!-]).pkg.tar.xz)
for pm in "${matched[@]}" ; do
if grep -q '[0-9]:' <<< "$pm" ; then
echo "$pm"
else
# Since packages with epoches of zero do not include '0:'
# in their names, we have to force them to the top.
echo "00000000$pm"
fi
# Add signature files.
echo "$pm.sig"
done |
sort -V | head -n-1 |
sed 's/^00000000//' |
xargs -r rm -fv
done
cd ..
done
}
remote_removed_cleanup() {
echo 'cleaning removed package files...'
for a in i686 x86_64 ; do
cd "$a"
echo "ARCH: $a"
zgrep %NAME% blackarch.db.tar.gz --no-group-separator -a -A1 |
grep -v %NAME% > /tmp/package_list
for p in *.pkg.tar.xz ; do
pkgname=$(sed 's/-[^-]\+-[^-]\+-[^-]\+.pkg.tar.xz$//' <<< "$p")
grep -q "^$pkgname$" /tmp/package_list || rm -v "$p" "$p.sig"
done
cd ..
done
rm -f /tmp/package_list
}
main() {
echo >&2 'do not use this script.'
exit 1
parse_args "$@"
start_ssh_agent
echo "cleaning repo: $repo..."
# REMOVED
ssh -l "$user" blackarch.org bash <<EOF
shopt -s extglob
cd '$sitedir/blackarch/$repo/os'
$(declare -f remote_removed_cleanup)
remote_removed_cleanup
EOF
# OLD
# The user's shell must be bash.
# TODO: bash -c
ssh -l "$user" blackarch.org bash <<EOF
shopt -s extglob
cd '$sitedir/blackarch/$repo/os'
$(declare -f remote_old_cleanup)
remote_old_cleanup
EOF
}
main "$@"
| true |
3c1c6d2eb8eb6bae744747f7340807fcbb0c3805 | Shell | s3ni0r/spark-base | /scripts/deploy/deploy.sh | UTF-8 | 3,445 | 3.546875 | 4 | [
"Apache-2.0"
] | permissive | #!/bin/bash
# debug
#set -x
# abort if any error occured
set -e
# disable globbing
#set -f
# disable splitting
#IFS=''
# raise an error when an undefined variable has been used
set -u
# abort when a pipe failed = bash only: doesn't work in POSIX sh
if [ "${BASH:-}" = '/bin/bash' ]; then set -o pipefail; fi
#############################
# ENV VARIABLES #
#############################
WEBHDFS_URL=${WEBHDFS_URL:?"You need to set up WEBHDFS_URL env variable (export WEBHDFS_URL=...)"}
WEBHDFS_USER=${TECHNICAL_USER:?"You need to set up TECHNICAL_USER env variable (export TECHNICAL_USER=...)"}
WEBHDFS_PASSWORD=${TECHNICAL_PASSWORD:?"You need to set up TECHNICAL_PASSWORD env variable (export TECHNICAL_PASSWORD=...)"}
DEFAULT_FOLDER_PERMISSION=750
BASE_HDFS_DIRECTORY=${BASE_HDFS_DIRECTORY:?"You need to set up BASE_HDFS_DIRECTORY env variable (export BASE_HDFS_DIRECTORY=...)"}
LOCAL_JOB_FOLDER=`realpath ${1:?"You need to specify local directory to sync into hdfs"}`
REMOTE_JOB_FOLDER=${BASE_HDFS_DIRECTORY}/${2:?"You need to specify a destination hdfs folder name"}
#############################
# TEMP VARS #
#############################
JOB_ID=""
#########################################
# create_hdfs_folder myJobFolder #
#########################################
create_folder(){
FOLDER_PATH=$1
# echo "$WEBHDFS_URL/$FOLDER_PATH?op=MKDIRS&permission=$DEFAULT_FOLDER_PERMISSION"
curl -i -k -u ${WEBHDFS_USER}:${WEBHDFS_PASSWORD} -X PUT "$WEBHDFS_URL/$FOLDER_PATH?op=MKDIRS&permission=$DEFAULT_FOLDER_PERMISSION"
}
upload_file()
{
HDFS_FILE_PATH=$1
LOCAL_FILE_PATH=$2
# echo "$WEBHDFS_URL/$HDFS_FILE_PATH?op=CREATE&overwrite=true"
# echo "$LOCAL_JOB_FOLDER/$LOCAL_FILE_PATH"
query=`curl -i -k -u ${WEBHDFS_USER}:${WEBHDFS_PASSWORD} -X PUT "$WEBHDFS_URL/$HDFS_FILE_PATH?op=CREATE&overwrite=true" | grep Location | awk -F ' ' '{print $2}' | sed 's/[\r\n]//g'`
curl -i -k -u ${WEBHDFS_USER}:${WEBHDFS_PASSWORD} -X PUT -T "$LOCAL_JOB_FOLDER/$LOCAL_FILE_PATH" "$query"
}
sync_job_dir(){
DIRECTORY=$1
for entry in $( find ${DIRECTORY});
do
if [ -f "$entry" ];then
# echo "this is a file ${entry#"$LOCAL_JOB_FOLDER/"}"
upload_file ${REMOTE_JOB_FOLDER}/${entry#"$LOCAL_JOB_FOLDER/"} ${entry#"$LOCAL_JOB_FOLDER/"}
fi
if [ -d "$entry" ];then
# echo "this is a folder ${entry#"$LOCAL_JOB_FOLDER/"}"
create_folder ${REMOTE_JOB_FOLDER}/${entry#"$LOCAL_JOB_FOLDER/"}
fi
done
}
launch_job(){
# echo "launch_job"
# echo ${LOCAL_JOB_FOLDER}/job.properties.xml
JOB_ID=`curl -k -u ${OOZIE_USER}:${OOZIE_PASSWORD} -H Content-Type:application/xml -T "${LOCAL_JOB_FOLDER}/job.properties.xml" -X POST "$OOZIE_URL/jobs?action=start" | jq ".id"`
sleep 5
}
display_logs(){
# echo "display_logs"
temp_job_id=${JOB_ID%\"}
proper_job_id=${temp_job_id#\"}
urls=`curl -k -u ${OOZIE_USER}:${OOZIE_PASSWORD} -X GET "$OOZIE_URL/job/$proper_job_id?show=info" | jq '.actions[].consoleUrl'`
for url in ${urls};
do
temp_url=${url%\"}
proper_url=${temp_url#\"}
if [[ ${proper_url} == http* ]];then
echo ${proper_url}
/usr/bin/open -a /Applications/Google\ Chrome.app $proper_url
fi
done
}
#####################
# MAIN #
#####################
create_folder $REMOTE_JOB_FOLDER
sync_job_dir "$LOCAL_JOB_FOLDER/*"
launch_job
display_logs | true |
9b3ce8c00e70bf97b48412faf5719bddfbe8e6cd | Shell | kmalinich/kdm-bash-env | /.kdm/profile.d/20-init | UTF-8 | 276 | 3.03125 | 3 | [
"MIT"
] | permissive | # Create directories if missing
for ENTRY in "${ARRAY_MKDIR[@]}"; do
[[ ! -d "${ENTRY}" ]] && mkdir -p "${ENTRY}"
done
# Touch files
for ENTRY in "${ARRAY_TOUCH[@]}"; do
[[ ! -e "${ENTRY}" ]] && touch "${ENTRY}"
done
echo -n
# vim: set filetype=sh ts=2 sw=2 tw=0 noet :
| true |
3973e9d93883c284dc826ef308252b03c01f72f6 | Shell | epitron/scripts | /arch-add-sublime-repo.sh | UTF-8 | 521 | 3.375 | 3 | [] | no_license | ## Install Sublime's Pacman repo in Arch
# Get the GPG key
curl -O https://download.sublimetext.com/sublimehq-pub.gpg && pacman-key --add sublimehq-pub.gpg && pacman-key --lsign-key 8A8F901A && rm sublimehq-pub.gpg
# Add the repo to pacman.conf
echo -e "\n[sublime-text]\nServer = https://download.sublimetext.com/arch/stable/x86_64" | tee -a /etc/pacman.conf
# Sync package list
pacman -Sy
# Friendly and informative completion message
echo
echo "* Done! You can now install the 'sublime-text' package. Hooray!"
echo | true |
647c72e5c337ea72e23178441e907374fa3ee396 | Shell | entscheidungsproblem/Ansible-Playbook | /roles/virtual/tasks/virtualbox_latest.sh | UTF-8 | 184 | 2.703125 | 3 | [] | no_license | #!/bin/bash
LINK="https://download.virtualbox.org/virtualbox/"
VERSION=$(curl -s $LINKLATEST.TXT)
FILE=$(curl $LINK$VERSION | grep amd64.run | cut -d '"' -f 2)
echo $LINK$VERSION/$FILE | true |
007aad2e005f2d8c8e249b3044c7db2e2a748ee3 | Shell | souadmaniani/ft_services | /srcs/sql/start.sh | UTF-8 | 736 | 2.53125 | 3 | [] | no_license | #!/bin/bash
DB_DATA_PATH="/var/lib/mysql"
DB_USER="souad"
DB_PASS="souad"
DB_ROOT_PASS="souad"
# SOCKET="/run/mysqld/mysql.sock"
/usr/bin/mysql_install_db --user=mysql
/usr/bin/mysqld_safe --datadir="/var/lib/mysql"
rc-status
/etc/init.d/mariadb setup
/etc/init.d/mariadb start
sleep 7
mysqladmin -u root password "${DB_ROOT_PASS}"
mysql -u root -e "CREATE DATABASE wordpress_db;"
mysql -u root -e "CREATE USER ${DB_USER}@'%' IDENTIFIED BY '${DB_PASS}';"
mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO ${DB_USER}@'%';"
mysql -u root -e "FLUSH PRIVILEGES;"
# Importer une base de donnée
# mysql -u loginbdd -p nomdelabdd < endroit_ou_se_trouve_fichier_.sql
mysql -u root wordpress_db < /wordpress_db1.sql
tail -f /dev/null | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.