blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
9fc521871e7697981754a2a5626e07ff74283259
|
Shell
|
dsalin/efs
|
/Build.sh
|
UTF-8
| 335
| 3.3125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
VERSION="latest"
if [ $1 != "" ]; then
VERSION=$1
fi
echo "Building efsctl test docker image ..."
echo "VERSION is $VERSION"
# Build the app using version number specified as the first script parameter
CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o efsctl main.go
docker build -t dsalin/efsctl:$VERSION .
| true
|
0bfdc8a0464f9bcf0ad128e2007ff538d31af6d4
|
Shell
|
leipzig/gatk-sv
|
/src/sv-pipeline/04_variant_resolution/scripts/clean_vcf_part1b.sh
|
UTF-8
| 6,003
| 3.421875
| 3
|
[] |
no_license
|
#!/bin/bash
#
# clean_vcf_part1b.sh
#
#
#
# Copyright (C) 2018 Harrison Brand<hbrand1@mgh.harvard.edu>
# Distributed under terms of the MIT license.
set -euxo pipefail
##gzipped vcf from clean vcf part1.sh##
int_vcf_gz=$1
##Remove CNVs that are improperly genotyped by depth because they are nested within a real CNV##
##Determine columns of VCF after header##
zcat $int_vcf_gz\
|sed -n '1,1000p'\
|egrep ^# \
|tail -n 1 \
|tr '\t' '\n' \
|cat -n - \
>col.txt
##Only affects CNV so pull those out##
zcat $int_vcf_gz \
|awk '{if ($5~"DEL" || $5~"DUP" || $1~"#") print}' \
|svtk vcf2bed stdin stdout \
|awk -F"\t" '{if ($6=="") print $6="blanksample";print $0}' OFS='\t' \
|gzip>int.bed.gz
##list of potenital overlaps with a normal copy state variant (>5kb variants require depth but nested events could be missed; i.e a duplication with a nest deletion will have a normal copy state for the deletion)##
##flip bed intersect so largest is CNV is always first##
bedtools intersect -wa -wb -a <(zcat int.bed.gz|awk '{if ($3-$2>=5000 ) print}') \
-b <(zcat int.bed.gz|awk '{if ($3-$2>=5000) print}') \
|awk -F'\t' '{if ($4!=$10 && $3-$2>=$9-$8 && $5!=$11) print ;\
else if ($4!=$10 && $5!=$11) print $7,$8,$9,$10,$11,$12,$1,$2,$3,$4,$5,$6}' OFS='\t' \
|awk -F'\t' '{if ($6!="blanksample") print}' \
|sort -u \
>normaloverlap.txt
##pull out the depth based copy number variant for each normal overlapping variant##
{ cat <(zcat $int_vcf_gz|awk -F"\t" '{if ($1~"#") print}') \
<(awk '{print $4 "\n" $10}' normaloverlap.txt|sort -u|fgrep -wf - <(zcat $int_vcf_gz)) || true; }\
|awk '{if ($1!~"#") $1=$3;print}' OFS="\t" \
|awk '{if ($1~"#" || $5=="<DEL>" || $5=="<DUP>") print}' \
|vcftools --vcf - --stdout --extract-FORMAT-info RD_CN \
|awk -F"\t" 'NR==1{for (i=3;i<=NF;i++) header[i]=$i} NR>1{for(j=3;j<=NF;j++) print $1"@"header[j] "\t" $j }' \
|sort -k1,1 \
|gzip \
>RD_CN.normalcheck.FORMAT.gz
##pull out evidence supporting each normal overlapping variant##
{ cat <(zcat $int_vcf_gz|awk -F"\t" '{if ($1~"#") print}') \
<(awk '{print $4 "\n" $10}' normaloverlap.txt|sort -u|fgrep -wf - <(zcat $int_vcf_gz)) || true; }\
|awk '{if ($1!~"#") $1=$3;print}' OFS="\t"\
|vcftools --vcf - --stdout --extract-FORMAT-info EV \
|awk -F"\t" 'NR==1{for (i=3;i<=NF;i++) header[i]=$i} NR>1{for(j=3;j<=NF;j++) print $1"@"header[j] "\t" $j }' \
|sort -k1,1 \
|gzip \
>EV.normalcheck.FORMAT.gz
##check if nested is incorrectly classified as normal##
touch overlap.test.txt
while read bed
do
echo $bed|tr ' ' '\t'|cut -f1-6 >large.bed
echo $bed|tr ' ' '\t'|cut -f7-12>small.bed
##require at least 50% coverage to consider a variant overlapping##
overlap=$(bedtools coverage -a small.bed -b large.bed|awk '{if ($NF>=0.50) print "YES";else print "NO"}')
if [ "$overlap" == "YES" ]
then
smallid=$(awk '{print $4}' small.bed)
##pull out variants that are called a variants for both the smaller and larger CNVs (don't have normal copy state to check for)##
if [ $(awk '{print $NF}' small.bed \
|tr ',' '\n' \
|fgrep -wvf - <(awk -F"[,\t]" -v var=$smallid '{for(i=6;i<=NF;i++) print var"@"$i "\t" $4"@"$i "\t" $5}' large.bed)|wc -l) -gt 0 ]
then
awk '{print $NF}' small.bed \
|tr ',' '\n' \
|fgrep -wvf - <(awk -F"[,\t]" -v var=$smallid '{for(i=6;i<=NF;i++) print var"@"$i "\t" $4"@"$i "\t" $5}' large.bed) \
>>overlap.test.txt
fi
fi
done<normaloverlap.txt
##determine variants that need to be revised from a normal copy state into a CNV##
cat overlap.test.txt \
|sort -k1,1 \
|join -j 1 - <(zcat RD_CN.normalcheck.FORMAT.gz) \
|join -j 1 - <(zcat EV.normalcheck.FORMAT.gz) \
|tr ' ' '\t' \
|sort -k2,2 \
|join -1 2 -2 1 - <(zcat RD_CN.normalcheck.FORMAT.gz) \
|awk '{if ($3=="DUP" && $4==2 && $6==3) print $2 "\t" 1; else if ($3=="DEL" && $4==2 && $6==1) print $2 "\t" 3 }' \
|tr '@' '\t'\
>geno.normal.revise.txt
##Update genotypes##
{ zfgrep -wf <(awk '{print $1}' geno.normal.revise.txt|sort -u) $int_vcf_gz || true; }\
|bgzip \
>subset.vcf.gz || true
##pull out and revise vcf line that needs to be edited##
while read variant
do
echo $variant
#note no longer change depth from id.txt (column 2)##
{ fgrep $variant geno.normal.revise.txt || true; }|awk '{print $2 "\t" $3}'>id.txt
zcat subset.vcf.gz |{ fgrep -w $variant || true; }>line.txt
cat line.txt \
|tr '\t' '\n' \
|paste col.txt - \
|tr ':' '\t' \
|awk 'NR==FNR{inFileA[$1]=$2; next} {if ($2 in inFileA ) $3="0/1"; print }' OFS='\t' id.txt - \
|awk 'NR==FNR{inFileA[$1]=$2; next} {if ($2 in inFileA ) $4=$6; print }' OFS='\t' id.txt - \
|cut -f3-|tr '\t' ':' \
|tr '\n' '\t' \
|awk '{print $0}' \
>>normal.revise.vcf.lines.txt
done< <(awk '{print $1}' geno.normal.revise.txt|sort -u)
##rewrite vcf with updated genotypes##
cat <(zcat $int_vcf_gz|fgrep -wvf <(awk '{print $3}' normal.revise.vcf.lines.txt|sort -u)) \
<(sed 's/\t$//' normal.revise.vcf.lines.txt) \
|vcf-sort \
|bgzip \
>normal.revise.vcf.gz || true
bcftools index normal.revise.vcf.gz
##get copy state per variant##
zcat normal.revise.vcf.gz \
|awk '{if ($1!~"#") $1=$3;print}' OFS="\t" \
|vcftools --vcf - --stdout --extract-FORMAT-info RD_CN \
|gzip \
>copystate.RD_CN.FORMAT.gz
##get copy state per variant##
zcat copystate.RD_CN.FORMAT.gz \
|awk 'NR>1{for(i=3;i<=NF;i++) lines[$1 "\t" $i]++ } END{for (x in lines) print x}' \
|gzip \
>copystate.per.variant.txt.gz
##Find multi-allelic for del or dup ; CNV >1kb we trust depth ##
##del##
zcat copystate.per.variant.txt.gz \
|awk '{if ($2!="." && $2>3) print $1}' \
|sort -u \
|fgrep -wf <(zcat int.bed.gz|awk -F"\t" '{if ($5=="DEL" && $3-$2>=1000) print $4}' ) \
>multi.cnvs.txt || true
##dup##
zcat copystate.per.variant.txt.gz \
|awk '{if ($2!="." && ($2<1 || $2>4)) print $1}' \
|sort -u \
|fgrep -wf <(zcat int.bed.gz|awk -F"\t" '{if ($5=="DUP" && $3-$2>=1000) print $4}' ) \
>>multi.cnvs.txt || true
| true
|
31921f1687fcf40216f29b4867921a0e13b6be0c
|
Shell
|
ergottli/corewar
|
/champs/tests/mine.sh
|
UTF-8
| 946
| 2.703125
| 3
|
[] |
no_license
|
cp $1.s $1_zaz.s
./asm_zaz $1_zaz.s
./asm $1.s
if [ ! -e $1.cor ] && [ ! -e $1.cor ]
then
echo "$1 : [OK] NO FILE CREATE"
fi
if [ -e $1.cor ] && [ -e $1_zaz.cor ] && diff $1.cor $1_zaz.cor
then
echo "$1: [OK]"
if [ -e $1.cor ]
then
hexdump -C $1.cor > ok_$1.txt
rm $1.cor $1_zaz.cor
fi
else
if [ -e $1.cor ] && [ -e $1_zaz.cor ]
then
echo $1": [KO]"
diff $1.cor $1_zaz.cor > diff_$1.txt
echo "\n\n$1.cor \n" >> diff_$1.txt
hexdump -C $1.cor >> diff_$1.txt
echo "\n\n$1_zaz.cor\n" >> diff_$1.txt
hexdump -C $1_zaz.cor >> diff_$1.txt
echo "$1.s : binary diff" >> all_error.txt
fi
fi
if [ ! -e $1.cor ] && [ -e $1_zaz.cor ]
then
echo "J'ai une erreur que je ne devrais pas avoir $1.s"
echo "$1.s : error not legit" >> all_error.txt
fi
if [ -e $1.cor ] && [ ! -e $1_zaz.cor ]
then
echo "Il me manque la detection d'un erreur sur le fichier $1.s"
echo "$1.s : missing an error" >> all_error.txt
fi
rm $1_zaz.s
| true
|
b6831a697573d8253a3122cfdbcfcb14e490f680
|
Shell
|
Jitpanu/My-first-program
|
/backup.sh
|
UTF-8
| 77
| 2.640625
| 3
|
[] |
no_license
|
for file in output*.txt
do
cp $file $(echo $(basename $file .txt)).bak
done
| true
|
ad89ea8e7f2220c17747374a7d7884a8ed10f598
|
Shell
|
shaokaiyang/hadoop-build-env
|
/utilities/change_binarycode_mode_own.sh
|
UTF-8
| 316
| 2.78125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#paras are group, user, codepath
if [ $# -ne 3 ]
then
exit 1
fi
chown $2:$1 -R $3
chown root:$1 $3/etc
chown root:$1 $3/etc/hadoop
chown root:$1 $3/etc/hadoop/container-executor.cfg
chown root:$1 $3
chown root:$1 $3/../
chown root:$1 $3/bin/container-executor
chmod 6150 $3/bin/container-executor
| true
|
924187d4a467b2bf1c27f035ef21aadaae3aaeee
|
Shell
|
chonochonovuk/DevOps-Fundamentals
|
/Homeworks/Homework5/Practice/node1.sh
|
UTF-8
| 1,637
| 3.21875
| 3
|
[] |
no_license
|
#!/usr/bin/bash
echo "Installing Docker..."
sudo apt-get update
sudo apt-get remove docker docker-engine docker.io
echo '* libraries/restart-without-asking boolean true' | sudo debconf-set-selections
sudo apt-get install apt-transport-https ca-certificates curl software-properties-common -y
sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
sudo apt-key fingerprint 0EBFCD88
sudo add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
sudo apt-get update
sudo apt-get install -y docker-ce
# Restart docker to make sure we get the latest version of the daemon if there is an upgrade
sudo service docker restart
# Make sure we can actually use docker as the vagrant user
sudo usermod -aG docker vagrant
sudo docker --version
# Update apt and get dependencies
sudo apt-get update
sudo apt-get install -y unzip curl wget vim
# Download Nomad
echo Fetching Nomad...
NOMAD_VERSION=1.1.3
cd /tmp/
curl -sSL https://releases.hashicorp.com/nomad/${NOMAD_VERSION}/nomad_${NOMAD_VERSION}_linux_amd64.zip -o nomad.zip
echo Installing Nomad...
unzip nomad.zip
rm -f nomad.zip
sudo chmod +x nomad
sudo mv nomad /usr/bin/nomad
sudo mkdir -p /etc/nomad.d
sudo chmod a+w /etc/nomad.d
# Install Consul
echo Fetching Consul...
cd /tmp/
CONSUL_VERSION=1.10.1
curl -sSL https://releases.hashicorp.com/consul/${CONSUL_VERSION}/consul_${CONSUL_VERSION}_linux_amd64.zip > consul.zip
echo Installing Consul...
unzip consul.zip
rm -f consul.zip
sudo chmod +x consul
sudo mv consul /usr/bin/consul
sudo mkdir /etc/consul.d
sudo chmod a+w /etc/consul.d
| true
|
1b94e53f9ad954527992f45b7388e91751b33e79
|
Shell
|
gaojunqi/cocolian-data
|
/cocolian-data-server/src/main/bin/start-dev-centos.sh
|
UTF-8
| 771
| 2.734375
| 3
|
[] |
no_license
|
#!/bin/bash
# Run in centos
# Author: Shamphone Lee
JAVA="$JAVA_HOME/bin/java"
WORKING_DIR=$(cd `dirname $0`; pwd)
WORKING_DIR="$(dirname ${WORKING_DIR})"
echo "running in ${WORKING_DIR}"
MAIN_CLASS="org.cocolian.data.server.DataServer"
JAR_LIB="$WORKING_DIR/${project.build.finalName}.jar"
JAR_LIB="${JAR_LIB}:${WORKING_DIR}/lib/*"
LOG_PATH="${WORKING_DIR}/logs"
# JAVA_JVM_OPTION="-Xmx25g -Xms25g -XX:NewSize=15g -XX:MaxNewSize=15g -XX:SurvivorRatio=17 -XX:PermSize=256m -XX:MaxPermSize=256m"
JAVA_JVM_OPTION="-Xmx1g -Xms1g"
JAVA_JVM_OPTION="${JAVA_JVM_OPTION} -Duser.timezone=GMT+8 -Dcocolian.log.path=${LOG_PATH}"
JAVA_JVM_OPTION="${JAVA_JVM_OPTION} -cp ${JAR_LIB} ${MAIN_CLASS} "
echo "${JAVA} ${JAVA_JVM_OPTION} "
${JAVA} ${JAVA_JVM_OPTION}
| true
|
39edf45a20a90c8c760adfd2649516424f9ae902
|
Shell
|
svandiek/summa-farsi
|
/local/farsi_dict_prep.sh
|
UTF-8
| 1,395
| 3.109375
| 3
|
[] |
no_license
|
#! /usr/bin/env bash
if [ ! -d data/local/dict ]; then
mkdir -p data/local/dict
fi
# After applying the farsi_preprocessing script the dictionary needs a few beauty operations
# namingly. removing the EOS and spoken noise tokens, removing all double spaces, and spaces at the end of a line
#cp /group/project/summa/svandiek/farsi/fa_grapheme_dict_prep.txt data/local/dict/lexicon_nosil.txt
#cp /disk/scratch1/svandiek/persian/lm/new_farsi_grapheme_dict.txt data/local/dict/lexicon_nosil.txt
#cp /disk/data1/svandiek/persian/data/new_grapheme_dict_500k.txt data/local/dict/lexicon_nosil.txt
cp farsi_asr_grapheme_dict.txt data/local/dict/lexicon_nosil.txt
(printf '!SIL\tsil\n[EN]\tspn\n[UNK]\tspn\n[EH]\tspn\n[SN]\tspn\n';) \
| cat - data/local/dict/lexicon_nosil.txt \
> data/local/dict/lexicon.txt;
# taken from egs/gp/s5/local/gp_dict_prep.sh
{ echo sil; echo spn; } > data/local/dict/silence_phones.txt
echo sil > data/local/dict/optional_silence.txt
# The "tail -n +2" is a bit of a hack as other wise there will be an empty line in the file, which Kaldi doesn't like
cut -f2- data/local/dict/lexicon_nosil.txt | tr ' ' '\n' | sort -u | tail -n +2 \
> data/local/dict/nonsilence_phones.txt
( tr '\n' ' ' < data/local/dict/silence_phones.txt; echo;
tr '\n' ' ' < data/local/dict/nonsilence_phones.txt; echo;
) > data/local/dict/extra_questions.txt
echo "Done with dict prep"
| true
|
05934be489a26d0cd4bb9404f5e5993133626ebc
|
Shell
|
gurjeet/cfenv
|
/env/bin/.installers/.tools.sh
|
UTF-8
| 1,321
| 3.921875
| 4
|
[] |
no_license
|
debug () (
limit=${DEBUG:-0}
level=$1
shift
if [ $level -le $limit ]; then
echo "$@" >&2
fi
)
die() {
rc=$1
shift
echo "$@" >&2
exit $rc
}
Download() {
local target="$1"
shift
if ! ls $HOME/.downloads/${target}* 2>/dev/null; then
echo "Downloading $target"
debug 1 wget "$@"
# Download into a work directory, then do a (presumably atomic) mv. This
# prevents issues with failed downloads. For some odd reason mv fails when
# DEBUG > 0, so we force it to 0 for that command.
mkdir -p $HOME/.downloads/work &&
( cd $HOME/.downloads/work && rm -f "${target}"* && wget "$@" && DEBUG=0 mv "${target}"* .. ) ||
die $? "error downloading $target"
fi
}
APTdownload () {
local target="$1"
shift
# assumes that the .deb's always start with the name of the package
if ! ls $HOME/.downloads/${target}* 2>/dev/null; then
echo "Downloading $1"
mkdir -p ~/.downloads &&
( cd $HOME/.downloads && apt-get download $1 ) ||
die $? "error downloading $1"
fi
}
APTinstall () {
for p in "$@"; do
APTdownload "$p"
echo "Installing $p"
( cd /tmp &&
rm -f data.tar.* &&
ar x $HOME/.downloads/${p}* &&
tar xf data.tar.* --directory $HOME
) || die $? "unable to install $p"
done
}
# vi: expandtab ts=2 sw=2
| true
|
d1640b4570c4c5cc77d075cd43bb2a3a26803b03
|
Shell
|
Codeberg-org/gitea
|
/docker/root/etc/s6/openssh/setup
|
UTF-8
| 1,083
| 3.390625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
if [ ! -d /data/ssh ]; then
mkdir -p /data/ssh
fi
if [ ! -f /data/ssh/ssh_host_ed25519_key ]; then
echo "Generating /data/ssh/ssh_host_ed25519_key..."
ssh-keygen -t ed25519 -f /data/ssh/ssh_host_ed25519_key -N "" > /dev/null
fi
if [ ! -f /data/ssh/ssh_host_rsa_key ]; then
echo "Generating /data/ssh/ssh_host_rsa_key..."
ssh-keygen -t rsa -b 2048 -f /data/ssh/ssh_host_rsa_key -N "" > /dev/null
fi
if [ ! -f /data/ssh/ssh_host_dsa_key ]; then
echo "Generating /data/ssh/ssh_host_dsa_key..."
ssh-keygen -t dsa -f /data/ssh/ssh_host_dsa_key -N "" > /dev/null
fi
if [ ! -f /data/ssh/ssh_host_ecdsa_key ]; then
echo "Generating /data/ssh/ssh_host_ecdsa_key..."
ssh-keygen -t ecdsa -b 256 -f /data/ssh/ssh_host_ecdsa_key -N "" > /dev/null
fi
if [ -d /etc/ssh ]; then
SSH_PORT=${SSH_PORT:-"22"} \
SSH_LISTEN_PORT=${SSH_LISTEN_PORT:-"${SSH_PORT}"} \
envsubst < /etc/templates/sshd_config > /etc/ssh/sshd_config
chmod 0644 /etc/ssh/sshd_config
fi
chown root:root /data/ssh/*
chmod 0700 /data/ssh
chmod 0600 /data/ssh/*
| true
|
377b297a6ede4daee30ce3cd61139021a5893657
|
Shell
|
dwxie/salt
|
/state/prod/application/gb/sipmediaserver/files/stop.sh
|
UTF-8
| 219
| 3.109375
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# [global]
SCRIPT_PATH="$(cd "$(dirname "$0")"; pwd)"
cd "${SCRIPT_PATH}"
pids=$(ps -ef | grep './sipmediaserver' | grep -v 'grep' | awk '{print $2}')
if [ -n "${pids}" ];
then
kill ${pids}
fi
| true
|
c7b83c515134c72a6925808d5cb318d7192ec871
|
Shell
|
MW-autocat-script/MW-autocat-script
|
/catscripts/Government/Countries/New_Zealand/NewZealand.sh
|
UTF-8
| 309
| 3.140625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
KEYWORDS_NEWZEALAND="New(| )Zealand|\bNZ\b"
KEYWORDS_NEWZEALAND_ALL="$KEYWORDS_NEWZEALAND"
if [ "$1" == "" ]; #Normal operation
then
debug_start "New Zealand"
NEWZEALAND=$(egrep -i "$KEYWORDS_NEWZEALAND" "$NEWPAGES")
categorize "NEWZEALAND" "New Zealand"
debug_end "New Zealand"
fi
| true
|
d96e252d54e9c84eb085365269721c2839a24a4b
|
Shell
|
top-bettercode/docker-thunderspeed
|
/entrypoint.sh
|
UTF-8
| 1,610
| 3.09375
| 3
|
[] |
no_license
|
#!/bin/bash
groupmod -o -g $AUDIO_GID audio
if [ $GID != $(echo `id -g thunderspeed`) ]; then
groupmod -o -g $GID thunderspeed
fi
if [ $UID != $(echo `id -u thunderspeed`) ]; then
usermod -o -u $UID thunderspeed
fi
chown thunderspeed:thunderspeed "/迅雷下载"
if [ -d "/home/thunderspeed/.thunderspeed" ]; then
chown thunderspeed:thunderspeed /home/thunderspeed/.thunderspeed
fi
su thunderspeed <<EOF
echo "启动 $APP"
if [ ! -d "/home/thunderspeed/.deepinwine/Deepin-ThunderSpeed/" ]; then
"/opt/deepinwine/apps/Deepin-$APP/run.sh" -c
if [ "$CRACKED" == "true" ]; then
echo 'crack thunderspeed'
cp /home/thunderspeed/dll/* "/home/thunderspeed/.deepinwine/Deepin-ThunderSpeed/drive_c/Program Files/Thunder Network/Thunder/Program/"
fi
if [ -d "/home/thunderspeed/.deepinwine/Deepin-ThunderSpeed/drive_c/Program Files/Thunder Network/Thunder/Profiles" ]; then
if [ ! -d "/home/thunderspeed/.thunderspeed/Community" ]; then
mv "/home/thunderspeed/.deepinwine/Deepin-ThunderSpeed/drive_c/Program Files/Thunder Network/Thunder/Profiles/*" "/home/thunderspeed/.thunderspeed/"
fi
rm -rf "/home/thunderspeed/.deepinwine/Deepin-ThunderSpeed/drive_c/Program Files/Thunder Network/Thunder/Profiles"
else
mkdir -p "/home/thunderspeed/.deepinwine/Deepin-ThunderSpeed/drive_c/Program Files/Thunder Network/Thunder"
fi
ln -s /home/thunderspeed/.thunderspeed "/home/thunderspeed/.deepinwine/Deepin-ThunderSpeed/drive_c/Program Files/Thunder Network/Thunder/Profiles"
fi
"/opt/deepinwine/apps/Deepin-$APP/run.sh"
EOF
| true
|
1abc52c1bdd47973d7ff70f605d9d1aefceb2a58
|
Shell
|
nickup9/bgp_packager
|
/packager.sh
|
UTF-8
| 8,185
| 3.703125
| 4
|
[] |
no_license
|
#!/bin/bash
# This is the packager for bgp-extrapolator and rov.
# Making sure that jq, python3, and curl are installed
python3_test=$(command -v python3)
jq_test=$(command -v jq)
curl_test=$(command -v curl)
if [ -z "$python3_test" ]
then
echo "Aborting: python3 not installed"
exit 1
fi
if [ -z "$jq_test" ]
then
echo "Aborting: jq not installed"
exit 1
fi
if [ -z "$curl_test" ]
then
echo "Aborting: curl not installed"
exit 1
fi
# Getting forecast release commit shas
fore_rel_sha="$(python3 get_ver.py forecast-sha)"
fore_rel_ver="$(python3 get_ver.py forecast-tag)"
echo "Forcast Release: " $fore_rel_sha
echo "From: "$fore_rel_ver
# Getting forecast nightly commit shas
url_branch='https://api.github.com/repos/c-morris/BGPExtrapolator/branches/master'
fore_night_sha=$(curl -s $url_branch | jq -r '.commit.sha')
fore_night_cut=$(echo $fore_night_sha | cut -c1-7)
echo "Forecast Nightly: " $fore_night_cut
# Getting rov release commit shas
rov_rel_sha="$(python3 get_ver.py rov-sha)"
rov_rel_ver="$(python3 get_ver.py rov-tag)"
echo "rov Release: " $rov_rel_sha
echo "From: " $rov_rel_ver
# Getting rov nightly commit urls
url_branch='https://api.github.com/repos/c-morris/BGPExtrapolator/branches/rovpp2'
rov_night_sha=$(curl -s $url_branch | jq -r '.commit.sha')
rov_night_cut=$(echo $rov_night_sha | cut -c1-7)
echo "rov Nightly: " $rov_night_cut
# Get the debian metadata file:
url_branch='https://api.github.com/repos/c-morris/BGPExtrapolator-debian-package-metadata/branches/master'
debian_sha=$(curl -s $url_branch | jq -r '.commit.sha')
debian_cut=$(echo $debian_sha | cut -c1-7)
echo "debian metadata: " $debian_cut
# Download it:
dwnload_url=('https://codeload.github.com/c-morris/BGPExtrapolator-debian-package-metadata/tar.gz/'$debian_cut)
output_tar=("debian.tar.gz")
output_dir=('debian')
echo "Getting debian from: "$dwnload_url
curl -l $dwnload_url --output $output_tar
mkdir $output_dir && tar -xf $output_tar -C $output_dir --strip-components 2
# Clean up the debian tarball, we don't need it
rm $output_tar
# Save the debian's location for later use
deb=$(pwd)/debian
# Now to download sources and build. We will start with bgp-extrap-release first
dwnload_url=('https://codeload.github.com/c-morris/BGPExtrapolator/tar.gz/'$fore_rel_sha)
# Get the version
ver="$(python3 get_ver.py convert $fore_rel_ver)"
type=('stable')
# Make formatted name for unpacked and packed source
output_tar=('bgp-extrapolator-'$type'_'$ver'.orig.tar.gz')
output_dir=('bgp-extrapolator-'$type'_'$ver)
echo "Getting bgp-extrapolator-stable from: "$dwnload_url
# Make and go into a new dir to save on clutter
mkdir $output_dir && cd $output_dir
# Download the tarball
curl -l $dwnload_url --output $output_tar
# Make a dir for the tarball to extract into and extract
mkdir $output_dir && tar -xf $output_tar -C $output_dir --strip-components 1
# Remove the tarball. We're gonna make a new one later.
rm -r $output_tar
# copy the debian into the new dir
cp -r $deb $output_dir
# cd into directory for building and more work
cd $output_dir
# convert all references to bgp-extrapolator in deb files to bgp-extrapolator-$type
# Fix the makefile
sed -i "s/bgp-extrapolator/bgp-extrapolator-$type/g" Makefile
# Go into the debian folder
cd debian
# Change all folder names
for file in bgp-extrapolator* ; do mv $file ${file//bgp-extrapolator/bgp-extrapolator-$type} ; done
# Change content within folders to reflect the type
for file in * ; do sed -i "s/bgp-extrapolator/bgp-extrapolator-$type/g" $file ;done
# Take two steps out to get out of the debian and source
cd .. && cd ..
# Rebuild the tarball
tar -czf $output_tar $output_dir
# Go back into the output dir for building
cd $output_dir
# Build it
debuild -us -uc
# Get back out into the top level of the packager
cd .. && cd ..
# Now for the forecast nightly
dwnload_url=('https://codeload.github.com/c-morris/BGPExtrapolator/tar.gz/'$fore_night_cut)
type=('unstable')
output_tar=('bgp-extrapolator-'$type'.orig.tar.gz')
output_dir=('bgp-extrapolator-'$type)
echo "Getting bgp-extrapolator-unstable from: "$dwnload_url
# Make and go into a new dir to save on clutter
mkdir $output_dir && cd $output_dir
# Download it
curl -l $dwnload_url --output $output_tar
# Unpack it into a new dir
mkdir $output_dir && tar -xf $output_tar -C $output_dir --strip-components 1
# Remove the tar to rebuild later
rm -r $output_tar
# Copy debian into this new dir
cp -r $deb $output_dir
# Go into the dir
cd $output_dir
# convert all references to bgp-extrapolator in deb files to bgp-extrapolator-$type
# Fix the makefile
sed -i "s/bgp-extrapolator/bgp-extrapolator-$type/g" Makefile
# Go into the debian
cd debian
# change file names to reflect type
for file in bgp-extrapolator* ; do mv $file ${file//bgp-extrapolator/bgp-extrapolator-$type} ; done
# change content in files to reflect type
for file in * ; do sed -i "s/bgp-extrapolator/bgp-extrapolator-$type/g" $file ;done
# Get the version of this nightly release from the changelog
ver=$(grep -o -m 1 '.\..\..' changelog)
# go back to the dir with tarball and unpacked dir
cd .. && cd ..
# rebuild our tarball
tar -czf $output_dir'_'$ver'.orig.tar.gz' $output_dir
# Edit dir and tarball names with the version of the nightly
mv $output_dir $output_dir'_'$ver
# Go into the dir to build
cd $output_dir'_'$ver
# Build it
debuild -us -uc
# Go back out for the next package
cd .. && cd ..
# Next: rov release
dwnload_url=('https://codeload.github.com/c-morris/BGPExtrapolator/tar.gz/'$rov_rel_sha)
ver="$(python3 get_ver.py convert $rov_rel_ver)"
type=('stable')
output_tar=('rov-'$type'_'$ver'.orig.tar.gz')
output_dir=('rov-'$type'_'$ver)
echo "Getting rov-stable from: "$dwnload_url
# Make and go into a new dir to save on clutter
mkdir $output_dir && cd $output_dir
curl -l $dwnload_url --output $output_tar
# Make a dir for the tarball to extract into and extract
mkdir $output_dir && tar -xf $output_tar -C $output_dir --strip-components 1
# Remove the tarball to rebuild later
rm -r $output_tar
# copy the debian into the new dir
cp -r $deb $output_dir
# cd into directory for building
cd $output_dir
# convert all references to bgp-extrapolator in deb files to rov-$type
# Makefile changes
sed -i "s/bgp-extrapolator/rov-$type/g" Makefile
# deb changes
cd debian
for file in bgp-extrapolator* ; do mv $file ${file//bgp-extrapolator/rov-$type} ; done
for file in * ; do sed -i "s/bgp-extrapolator/rov-$type/g" $file ;done
# Leave deb and source
cd .. && cd ..
# Rebuild tarball
tar -czf $output_tar $output_dir
# Go back into output dir for building
cd $output_dir
# Build it
debuild -us -uc
# Get back out into the top level of the packager
cd .. && cd ..
# Now for the rov nightly
dwnload_url=('https://codeload.github.com/c-morris/BGPExtrapolator/tar.gz/'$rov_night_cut)
type=('unstable')
output_tar=('rov-'$type'.orig.tar.gz')
output_dir=('rov-'$type)
echo "Getting rov-unstable from: "$dwnload_url
# Make and go into a new dir to save on clutter
mkdir $output_dir && cd $output_dir
# Download it
curl -l $dwnload_url --output $output_tar
# Unpack it into a new dir
mkdir $output_dir && tar -xf $output_tar -C $output_dir --strip-components 1
# Remove the tar to rebuild later
rm -r $output_tar
# Copy debian into this new dir
cp -r $deb $output_dir
# Go into the dir
cd $output_dir
# convert all references to bgp-extrapolator in deb files to bgp-extrapolator-$type
# Fix the makefile
sed -i "s/bgp-extrapolator/rov-$type/g" Makefile
# Go into the debian
cd debian
# change file names to reflect type
for file in bgp-extrapolator* ; do mv $file ${file//bgp-extrapolator/rov-$type} ; done
# change content in files to reflect type
for file in * ; do sed -i "s/bgp-extrapolator/rov-$type/g" $file ;done
# Get the version of this nightly release from the changelog
ver=$(grep -o -m 1 '.\..\..' changelog)
# go back to the dir with tarball and unpacked dir
cd .. && cd ..
# rebuild our tarball
tar -czf $output_dir'_'$ver'.orig.tar.gz' $output_dir
# Edit dir and tarball names with the version of the nightly
mv $output_dir $output_dir'_'$ver
# Go into the dir to build
cd $output_dir'_'$ver
# Build it
debuild -us -uc
# And now we're done.
| true
|
be771a779fcbdeca1847a2c8bc0c835f4ac6384c
|
Shell
|
arch4edu/arch4edu
|
/x86_64/libsearpc/PKGBUILD
|
UTF-8
| 1,056
| 2.640625
| 3
|
[] |
no_license
|
# Maintainer: eolianoe <eolianoe [at] gmail [DoT] com>
# Contributor: Aaron Lindsay <aaron@aclindsay.com>
# Contributor: Edvinas Valatka <edacval@gmail.com>
# Contributor: Adrian Hühn <adrian.huehn@web.de>
pkgname='libsearpc'
epoch=2
pkgver=3.3.0
pkgrel=4
pkgdesc="A simple C language RPC framework (including both server side & client side)"
arch=('i686' 'x86_64' 'armv7h' 'armv6h' 'aarch64')
url="https://github.com/haiwen/libsearpc"
license=('Apache')
depends=(
'glib2'
'jansson'
'python-gobject'
'python-simplejson'
)
_pkgver="${pkgver%.*}-latest"
source=("libsearpc-$_pkgver.tar.gz::$url/archive/v$_pkgver.tar.gz")
sha256sums=('143ada255ea852a58577a833df0462af98376a3fd2886dc43ead621d656c317d')
prepare () {
cd "$srcdir/$pkgname-$_pkgver"
sed -i 's|(DESTDIR)@prefix@|@prefix@|' './libsearpc.pc.in'
}
build () {
cd "$srcdir/$pkgname-$_pkgver"
./autogen.sh
./configure --prefix=/usr PYTHON='/usr/bin/python'
make
}
check () {
cd "$srcdir/$pkgname-$_pkgver"
make check
}
package () {
cd "$srcdir/$pkgname-$_pkgver"
make DESTDIR="$pkgdir" install
}
| true
|
86b22fbbeb888517c7398b6ab6e4f5df1e702474
|
Shell
|
riteshchaman/scripts
|
/ambari_db_backup.sh
|
UTF-8
| 1,123
| 3.609375
| 4
|
[] |
no_license
|
db_type=`grep -w server.jdbc.database /etc/ambari-server/conf/ambari.properties|cut -d= -f2`
db_name=`grep -w server.jdbc.database_name /etc/ambari-server/conf/ambari.properties|cut -d= -f2`
db_host=`grep -w server.jdbc.hostname /etc/ambari-server/conf/ambari.properties|cut -d= -f2`
db_user=`grep -w server.jdbc.rca.user.name /etc/ambari-server/conf/ambari.properties|cut -d= -f2`
backupdir_name=/var/log/backups
date=`date +"%Y%m%d-%H%M%S"`
if [ ! -d $backupdir_name ]
then
mkdir -p $backupdir_name
fi
if [ $db_type == postgres ]
then
echo "The Ambari database is postgres. Running backup command"
pg_dump -h $db_host -U $db_user $db_name > /var/log/backups/ambaridb_bakup-$date 2> $backupdir_name/script-err_$date
if [ $? == 0 ]
then
echo "Backup successfully taken"
else
echo "back up has error. Please check $backupdir_name for log file"
fi
else
echo "No database found. Please run the command manually"
fi
echo "Backing up Jaas file"
cp /etc/ambari-server/conf/krb5JAASLogin.conf /etc/ambari-server/conf/krb5JAASLogin.conf_$date
| true
|
5f41f4e310d73a84876ab2aaba6dceee2f932ea0
|
Shell
|
dilipgurung/dotfiles
|
/setup-git.sh
|
UTF-8
| 742
| 2.75
| 3
|
[] |
no_license
|
#!/bin/sh
source $HOME/.extra
# Git aliases
git config --global alias.co checkout
git config --global alias.amend "commit --amend -C HEAD"
git config --global alias.delete "branch -D"
git config --global alias.ds "diff --staged"
git config --global alias.graph "log --graph --pretty=format':%C(yellow)%h%Cblue%d%Creset %s %C(white) %an, %ar%Creset'"
git config --global alias.ls "log --pretty=format:\"%C(yellow)%h %C(blue)%ad%C(red)%d %C(reset)%s%C(green) [%cn]\" --decorate --date=short"
git config --global alias.st "status -sb"
git config --global alias.standup "log --since '1 day ago' --oneline --author $GIT_AUTHOR_EMAIL"
git config --global alias.undo "reset --soft HEAD^"
# Rebase on git pull
git config --global pull.rebase true
| true
|
9e6f6a5599936ff9b290a69590befa281be5b74b
|
Shell
|
desertedscorpion/solidpostal
|
/testing.sh
|
UTF-8
| 3,595
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/bash
BASE_URL=127.209.102.127:28056 &&
SLEEP=1m &&
docker build -t ninthgrimmercury/solidpostal . &&
docker build -t freakygamma/solidpostal test &&
if docker run --interactive freakygamma/solidpostal dnf update --assumeyes | grep "^Last metadata expiration check: 0:0"
then
echo dnf was updated within the last ten minutes &&
true
else
echo dnf was not updated within the last ten minutes &&
exit 64 &&
true
fi &&
docker run --interactive --tty --privileged --detach --volume /sys/fs/cgroup:/sys/fs/cgroup:ro --volume ${PWD}/test/src:/usr/local/src:ro --volume ${HOME}/.private:/var/private -p ${BASE_URL} freakygamma/solidpostal &&
echo We are now sleeping for ${SLEEP} to allow the system to set itself up before we run tests. If we ran tests immediately then all the tests would fail. &&
sleep ${SLEEP} &&
if [[ "HTTP/1.1 200 OK" == $(curl --head http://${BASE_URL} | head --lines 1 | tr -d "[:cntrl:]") ]]
then
echo the web page is up &&
true
else
echo the web page is down &&
exit 65 &&
true
fi &&
if [[ "HTTP/1.1 200 OK" == $(curl --head http://${BASE_URL}/credential-store/domain/_/credential/79ad7607-ef6e-4e5f-a139-e633aded192b/ | head --lines 1 | tr -d "[:cntrl:]") ]]
then
echo the credentials were added &&
true
else
echo the credentials were not added &&
exit 66 &&
true
fi &&
if [[ "HTTP/1.1 200 OK" == $(curl --head http://${BASE_URL}/computer/slave/ | head --lines 1 | tr -d "[:cntrl:]") ]]
then
echo the slave was added &&
true
else
echo the slave was not added &&
exit 67 &&
true
fi &&
if [[ "HTTP/1.1 200 OK" == $(curl --head http://${BASE_URL}/job/job/ | head --lines 1 | tr -d "[:cntrl:]") ]]
then
echo the job was added &&
true
else
echo the job was not added &&
exit 68 &&
true
fi &&
if [[ "HTTP/1.1 200 OK" == $(curl --head http://${BASE_URL}/job/git1/ws/Dockerfile/*view*/ | head --lines 1 | tr -d "[:cntrl:]") ]]
then
echo "the plugin was probably added. we triggered a job that depended on this plugin. In order for /var/libs/jenkins/jobs/git/workspace/Dockerfile to exist the job must have succeeded." &&
true
else
echo "the plugin was probably not added. we triggered a job that depended on an installed plugin. In order for /var/lib/jenkins/jobs/git/workspace/Dockerfile to exist, the job must succeed." &&
exit 69 &&
true
fi &&
if [[ "HTTP/1.1 200 OK" == $(curl --head http://${BASE_URL}/job/job2/ws/data.txt | head --lines 1 | tr -d "[:cntrl:]") ]]
then
echo "the key was added. we triggered a job that depended on this key. In order for /var/libs/jenkins/jobs/git/workspace/Dockerfile to exist the job must have succeeded." &&
true
else
echo "the key was probably not added. we triggered a job that depended on this key. In order for /var/lib/jenkins/jobs/git/workspace/Dockerfile to exist, the job must succeed (alternatively there is something wrong with the slave)." &&
exit 69 &&
true
fi &&
if [[ "HTTP/1.1 200 OK" == $(curl --head http://${BASE_URL}/job/job3/ws/data.txt | head --lines 1 | tr -d "[:cntrl:]") ]]
then
echo "the build command works" &&
true
else
echo "the build command does not work" &&
exit 70 &&
true
fi &&
docker rm $(docker stop $(docker ps -a -q --filter ancestor=freakygamma/solidpostal --format="{{.ID}}")) &&
docker rmi --force freakygamma/solidpostal &&
docker rmi --force ninthgrimmercury/solidpostal &&
true
| true
|
ae72410bda55aceea6b62b888e5e6a3c011ba615
|
Shell
|
sbosnick/luther
|
/ci/install_kcov
|
UTF-8
| 1,220
| 3.890625
| 4
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
#!/usr/bin/env bash
# Copyright 2018 Steven Bosnick
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE-2.0 or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms
set -ev
if [[ $# -lt 2 ]]; then
cat <<- EOF
usage: $0 kcov_version install_prefix [working_dir_base]
The default working_dir_base is \$HOME.
EOF
exit
fi
kcov_slug=SimonKagstrom/kcov
kcov_version=$1
install_prefix=$2
working_dir=${3:-$HOME}/${kcov_slug}
# check for ${install_prefix}/bin/kcov and exit early if it exists and its version is ${kcov_version}
if [[ -x ${install_prefix}/bin/kcov && $(${install_prefix}/bin/kcov --version | cut -f 2 -d ' ') == ${kcov_version} ]]; then
exit
fi
# get and untar the archive
mkdir -p ${working_dir}
wget -O - https://github.com/${kcov_slug}/archive/v${kcov_version}.tar.gz | tar --directory=${working_dir} -xz
# cmake
mkdir -p ${working_dir}/kcov-${kcov_version}/build
cd ${working_dir}/kcov-${kcov_version}/build
cmake -DCMAKE_INSTALL_PREFIX=${install_prefix} ..
# make
make
# make install
make install
| true
|
cf6a22d2c3c77c400607bcd6d2169b852d39ddae
|
Shell
|
Chandandhani/ANSIBLE_AWS
|
/script_user.sh
|
UTF-8
| 318
| 3.09375
| 3
|
[] |
no_license
|
#!/bin/bash
#echo -e "Enter the file name: \c"
v_file="/etc/passwd"
#echo -e "Enter the delimeter \c"
delimeter=":"
IFS=$delimeter
while read -r USER PASSWORD USERID GID COMNT HOMEDIR LOGINSHELL
do
if [ $LOGINSHELL == "/sbin/nologin" ]; then
echo "$USER" >> /home/ansible/output.txt
fi
done < "$v_file"
| true
|
4ac53cb930b767798570b9f811f2a7c1ec731ae7
|
Shell
|
huhlig/rust-buildpack
|
/bin/supply
|
UTF-8
| 1,473
| 4.03125
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# usage: bin/supply <build-path> <cache-path> <deps-path> <index>
BUILD_PATH=${1:-}
CACHE_PATH=${2:-}
DEPS_PATH=${3:-}
INDEX=${4:-}
CURL="curl -s -L --retry 15 --retry-delay 2" # retry for up to 30 seconds
RUSTUP_URI="https://sh.rustup.rs"
# Rust Version Channel
VERSION=stable
# Bash Error Handling
set -eu
# Setup Environment File
cat << EOF > $CACHE_PATH/envsrc.sh
#!/usr/bin/env bash
# Rustup installation directory.
export RUSTUP_HOME="$CACHE_PATH/multirust"
# Cargo installation directory.
export CARGO_HOME="$CACHE_PATH/cargo"
# Cargo Target Directory
export CARGO_TARGET_DIR="$CACHE_PATH/target"
# Set cargo binaries in our path.
export PATH="\$CARGO_HOME/bin:\$PATH"
EOF
# Set our environment to our envsrc
source $CACHE_PATH/envsrc.sh
# Create and switch to our Cache Path
mkdir -p "$CACHE_PATH"
cd "$CACHE_PATH"
# Install or Update Rust Toolchain
if [ -d "$CARGO_HOME" ]; then
echo "-----> Checking for new releases of Rust $VERSION channel"
rustup self update
rustup update "$VERSION"
rustup default "$VERSION"
else
echo "-----> Downloading rustup from $RUSTUP_URI"
curl $RUSTUP_URI -sSf > rustup.sh
chmod u+x rustup.sh
echo "-----> Using rustup to install Rust $VERSION channel"
./rustup.sh -y --default-toolchain "$VERSION"
rm rustup.sh
fi
# Verify we have an working Rust toolchain installed.
if [ ! -x "$CARGO_HOME/bin/rustc" ]; then
echo "failed: Cannot find Rust binaries at $CARGO_HOME"
exit 1
fi
| true
|
a6292bf8be210763ab67c8f89429e5e4e30d3970
|
Shell
|
kroseneg/old
|
/utils/release
|
UTF-8
| 710
| 3.734375
| 4
|
[
"DOC"
] |
permissive
|
#!/bin/bash
PKG=old
if [ "$1" == "" -o "$2" == "" ]; then
echo "Use: release OLDREL NEWREL"
echo "Run from the repo root"
exit
fi
OLDREL=$1
NEWREL=$2
TARGZBALL="$PKG-$NEWREL.tar.gz"
TARBZBALL="$PKG-$NEWREL.tar.bz2"
RELDIR="../$NEWREL"
if [ -d $RELDIR ]; then
echo "$RELDIR already exists!"
exit
fi
echo "* making $RELDIR"
mkdir $RELDIR > /dev/null 2> /dev/null
echo "* darcs dist"
darcs dist -d $PKG-$NEWREL
mv $TARGZBALL $RELDIR
echo "* darcs changes"
darcs changes --from-tag $OLDREL > $RELDIR/Changelog-$NEWREL
echo "* darcs diff"
darcs diff -u --from-tag $OLDREL > $RELDIR/$PKG-$NEWREL.patch
echo "* unpack"
cd $RELDIR
tar -zxf $TARGZBALL
echo "* tar.bz2"
tar -cjf $TARBZBALL $PKG-$NEWREL
| true
|
fc550d13287b4e04e31937a4ea77b4baccd18f1c
|
Shell
|
jwalzer/debian-initrd-debug-helper-git
|
/hooks/zfs
|
UTF-8
| 3,865
| 3.75
| 4
|
[] |
no_license
|
#!/bin/sh
#
# Add ZoL filesystem capabilities to an initrd, usually for a native ZFS root.
#
# This hook installs udev rules for ZoL.
PREREQ=""
# These prerequisites are provided by the zfsutils package. The zdb utility is
# not strictly required, but it can be useful at the initramfs recovery prompt.
COPY_EXEC_LIST=
#COPY_EXEC_LIST="$COPY_EXEC_LIST /usr/sbin/splat"
#COPY_EXEC_LIST="$COPY_EXEC_LIST /usr/sbin/spl"
COPY_EXEC_LIST="$COPY_EXEC_LIST /sbin/zdb /sbin/zpool /sbin/zfs /sbin/mount.zfs"
# These prerequisites are provided by the base system.
COPY_EXEC_LIST="$COPY_EXEC_LIST /bin/hostname /sbin/blkid /lib/udev/zvol_id /lib/udev/vdev_id /usr/bin/hostid"
# zfs export uses umount -t, busybox does not know
COPY_EXEC_LIST="$COPY_EXEC_LIST /bin/umount"
# Explicitly specify all kernel modules because automatic dependency resolution
# is unreliable on many systems.
MANUAL_ADD_MODULES_LIST="zlib_deflate spl zavl zcommon znvpair zunicode zfs"
UDEV_RULES="69-vdev.rules 60-zvol.rules 90-zfs.rules"
prereqs() { echo "$PREREQ"; exit 0; }
[ "$1" = prereqs ] && prereqs
. /usr/share/initramfs-tools/hook-functions
[ -f /etc/initramfs-tools/conf.d/initramfs-x.conf ] && . /etc/initramfs-tools/conf.d/initramfs-x.conf
case "$X_LOCAL_ZFS" in
1 | yes | y ) :;;
* ) exit 0;;
esac
echo "I: add '$0'"
# Generic result code.
RC=0
for ii in $COPY_EXEC_LIST; do
if [ ! -x "$ii" ]; then
echo "Error: $ii is not executable."
RC=2
fi
done
if [ "$RC" -ne 0 ]; then
exit "$RC"
fi
mkdir -p "$DESTDIR/etc/"
# copy zfs udev rules:
mkdir -p "$DESTDIR/lib/udev/rules.d"
for ii in $UDEV_RULES; do
cp -p "/lib/udev/rules.d/$ii" "$DESTDIR/lib/udev/rules.d"
done
# ZDB uses pthreads for some functions, but the library dependency is not
# automatically detected. The `find` utility and extended `cp` options are
# used here because libgcc_s.so could be in a subdirectory of /lib for
# multi-arch installations.
cp --target-directory="$DESTDIR" --parents $(find /lib -type f -name libgcc_s.so.1)
for ii in $COPY_EXEC_LIST; do
copy_exec "$ii"
done
for ii in $MANUAL_ADD_MODULES_LIST; do
manual_add_modules "$ii"
done
if [ -f "/etc/hostname" ]; then
cp -p "/etc/hostname" "$DESTDIR/etc/"
else
hostname >"$DESTDIR/etc/hostname"
fi
# The spl package ensures that the /etc/hostid file exists.
# NB: Commentary in the spl.postinst script.
cp -p "/etc/hostid" "$DESTDIR/etc/hostid"
# copy zpool.cache
mkdir -p "$DESTDIR/etc/zfs"
#if [ ! -f "/etc/zfs/zpool.cache" ]; then
POOL="$(awk '$2 == "/" && $3 == "zfs" {print $1} ' /proc/mounts)"
POOL="${POOL%%/*}"
if [ -n "$POOL" ]; then
zpool set cachefile=/etc/zfs/zpool.cache "$POOL"
cp /etc/zfs/zpool.cache "$DESTDIR/etc/zfs"
fi
#fi
# ensure /etc/mtab exists
ln -sf /proc/mounts "$DESTDIR"/etc/mtab
# rename zfs module so that it not loaded automatically on zvol detection
find "$DESTDIR"/lib/modules -name "zfs.ko" -exec mv {} {}.disabled \;
cat > "$DESTDIR"/bin/rczfs << EOF
#!/bin/sh -x
# rename zfs moduled, previously renamed to disable automatic loading
find /lib/modules -name "zfs.ko.disabled" -exec echo mv {} {} \; | sed 's/\.disabled$//' | sh
/sbin/depmod -a
# Load the module now to get consistent automatic pool import behavior.
rm -f /etc/zfs/zpool.cache
/sbin/rmmod zfs 2> /dev/null
/sbin/modprobe zfs || modprobe zfs
EOF
chmod 755 "$DESTDIR"/bin/rczfs
if [ -f /root/scripts/zfs-snapshots.sh ]; then
mkdir -p "$DESTDIR"/bin
cp /root/scripts/zfs-snapshots.sh "$DESTDIR"/bin/zfs-snapshots.sh
chmod 755 "$DESTDIR"/bin/zfs-snapshots.sh
fi
# With pull request #1476 (not yet merged) comes a verbose warning
# if /usr/bin/net doesn't exist or isn't executable. Just create
# a dummy...
[ ! -d "$DESTDIR/usr/bin" ] && mkdir -p "$DESTDIR/usr/bin"
if [ ! -x "$DESTDIR/usr/bin/net" ]; then
touch "$DESTDIR/usr/bin/net"
chmod +x "$DESTDIR/usr/bin/net"
fi
| true
|
ee96a86334f06ef0ef825c1384ca17701d142f44
|
Shell
|
ShefWuzi/msc-dissertation
|
/data_collection/git_metadata.sh
|
UTF-8
| 5,612
| 3.421875
| 3
|
[] |
no_license
|
#!/bin/bash
if [ $# -ne 1 ]; then
echo "[*] Usage $0 <git url>"
exit;
fi
check_github_api=$(curl -s https://api.github.com/users/ShefWuzi | jq 'select (.message != null) | .message' | grep "API rate limit")
while true;
do
if [[ $check_github_api != "" ]]; then
sleep 10m;
check_github_api=$(curl -s https://api.github.com/users/ShefWuzi | jq 'select (.message != null) | .message')
else
break
fi
done
user=$(echo $1 | rev | cut -d / -f 2 | rev)
repo=$(echo $1 | rev | cut -d / -f 1 | rev)
contrib_res=$(curl -s https://api.github.com/repos/$user/$repo/contributors)
if [[ $contrib_res == "" ]]; then
echo "[X] No issues returned"
exit;
fi
for contrib in $(echo $contrib_res | jq -r '.[] | @base64'); do
login=$(echo "$contrib" | base64 -d | jq -r '.login')
user_profile=$(curl -s https://api.github.com/users/$login)
name=$(echo "$user_profile" | jq '.name')
public_repos=$(echo "$user_profile" | jq '.public_repos')
public_gists=$(echo "$user_profile" | jq '.public_gists')
followers=$(echo "$user_profile" | jq '.followers')
following=$(echo "$user_profile" | jq '.following')
u_type=$(echo "$user_profile" | jq '.type')
is_company=$(echo "$user_profile" | jq 'select (.company != null) | .company')
if [[ $is_company == "" ]]; then
is_company="0"
else
is_company="1"
fi
created_at=$(echo "$user_profile" | jq 'select (.created_at != null) | .created_at')
updated_at=$(echo "$user_profile" | jq 'select (.updated_at != null) | .updated_at')
if [[ $created_at != "" && $updated_at != "" ]];then
created_at=$(date -d $(echo "$user_profile" | jq '.created_at' | cut -d T -f 1 | tr -d \") '+%s')
updated_at=$(date -d $(echo "$user_profile" | jq '.updated_at' | cut -d T -f 1 | tr -d \") '+%s')
n_days=$(printf "%.0f" $(echo "scale=2; ( $updated_at - $created_at )/(60*60*24)" | bc))
else
n_days=$(echo "-1")
fi
contrib_arr=$(echo -e "$contrib_arr\n$name,$public_repos,$public_gists,$followers,$following,$u_type,$is_company,$n_days")
done
author_details(){
author_name=$(awk 'NF{NF--};1' < <(echo "$1"))
while read -r contrib;
do
author=$(echo "$contrib" | grep "$author_name")
if [[ $author != "" ]]; then
commit_author=$(echo "$1,$contrib" | cut -d , -f 2-)
return 1
fi
done <<< "$contrib_arr"
commit_author=$(echo "$1,-1,-1,-1,-1,-1,-1,-1")
}
get_date(){
dow=$(echo "$1" | cut -d ' ' -f 1)
hod=$(echo "$1" | cut -d ' ' -f 4 | cut -d ':' -f 1)
moh=$(echo "$1" | cut -d ' ' -f 4 | cut -d ':' -f 2)
dom=$(echo "$1" | cut -d ' ' -f 3)
moy=$(echo "$1" | cut -d ' ' -f 2)
yoc=$(echo "$1" | cut -d ' ' -f 5)
commit_date=$(echo "$dow,$hod,$moh,$dom $moy $yoc")
}
repo_deets=$(curl -s https://api.github.com/repos/$user/$repo)
forks_count=$(echo "$repo_deets" | jq '.forks_count')
stargazers_count=$(echo "$repo_deets" | jq '.stargazers_count')
watchers_count=$(echo "$repo_deets" | jq '.watchers_count')
size=$(echo "$repo_deets" | jq '.size')
open_issues_count=$(echo "$repo_deets" | jq '.open_issues_count')
subscribers_count=$(echo "$repo_deets" | jq '.subscribers_count')
repo_deets=$(echo "$forks_count,$stargazers_count,$watchers_count,$size,$open_issues_count,$subscribers_count")
folder=$(echo "$1" | cut -d / -f 5)
git clone $1 /tmp/$folder
cd /tmp/$folder
echo "Repo_Forks, Repo_Stars, Repo_Watchers, Repo_Size, Repo_Issues, Repo_Subscribers, Author_Name, Author_Repos, Author_Gists, Author_Followers, Author_Following, Author_Type, Author_Company, Author_Days, Commit_Date_DOW, Commit_Date_HOD, Commit_Date_MOH, Commit_Date, Commit Message, Number of Removed Files, Number of Added Files, Number of Edited Files, Number_of_edited_lines, Amount of edit bytes, Added content, Removed content"
for id in $(git log | grep -E "^commit" | cut -d ' ' -f 2); do
commit_details=$(git show $id | cat)
author_details "$(echo "$commit_details" | grep -E '^Author:\s' | sed -e 's/^Author:\s//g')"
commit_msg=$(git show $id --stat | grep -E '^\s{4}' | sed -e "s/'//g" | sed -e "s/,//g" | xargs)
get_date "$(echo "$commit_details" | grep -E '^Date:\s{3}' | sed -e 's/^Date:\s\{3\}//g')"
n_removed_files=$(echo "$commit_details" | grep -E '^\+{3}\s/dev/null' | wc -l)
n_added_files=$(echo "$commit_details" | grep -E '^-{3}\s/dev/null' | wc -l)
n_edit_files=$(echo "$commit_details" | grep -E '^-{3}|^\+{3}' | grep -v /dev/null | grep -oE '^-{3}|^\+{3}' | sort | uniq -c | sort -n | head -n 1 | awk '{print $1}')
n_edit_size=$(echo "$commit_details" | grep -E '^\+[^\+{2}]|^-[^-{2}]' | wc -c)
n_edit_lines=$(echo "$commit_details" | grep -E '^\+[^\+{2}]|^-[^-{2}]' | wc -l)
added_lines=$(echo "$commit_details" | grep -E '^\+[^\+{2}]' | perl -p -e 's/\n/<\\n>/' | sed -e 's/,/<c>/g')
removed_lines=$(echo "$commit_details" | grep -E '^-[^-{2}]' | perl -p -e 's/\n/<\\n>/' | sed -e 's/,/<c>/g')
if [ -z "$n_edit_files" ]; then
n_edit_files=0
fi
echo "$repo_deets,$commit_author,$commit_date,$commit_msg,$n_removed_files,$n_added_files,$n_edit_files,$n_edit_lines,$n_edit_size,$added_lines,$removed_lines"
done
rm -rf /tmp/$folder
# for repo in "https://github.com/ShefWuzi/msc-dissertation" "https://github.com/dominictarr/event-stream" "https://github.com/eslint/eslint-scope" "https://github.com/vasilevich/nginxbeautifier" "https://github.com/Neil-UWA/simple-alipay" "https://github.com/andrewjstone/s3asy" "https://github.com/OpusCapita/react-dates" "https://github.com/react-component/calendar" "https://github.com/anbi/mydatepicker"; do repo_name=$(echo $repo | rev | cut -d / -f 1 | rev); ./git_metadata.sh $repo > $repo_name.csv ; echo "Done with $repo_name....Sleeping"; sleep 15m; done
| true
|
e06d3ead9da7498bc49fb6e44371a19c80b64fb6
|
Shell
|
harlo/UnveillanceAnnex
|
/setup.sh
|
UTF-8
| 1,076
| 3.4375
| 3
|
[] |
no_license
|
#! /bin/bash
THIS_DIR=`pwd`
if [ $# -eq 0 ]
then
LAUNCH_ANNEX=true
WITH_CONFIG=0
else
LAUNCH_ANNEX=false
WITH_CONFIG=$1
fi
sudo apt-get install -y gcc lsof
PYTHON_VERSION=$(which python)
if [[ $PYTHON_VERSION == *anaconda/bin/python ]]
then
echo "ANACONDA already installed. Skipping"
else
wget -O lib/anaconda.sh http://09c8d0b2229f813c1b93-c95ac804525aac4b6dba79b00b39d1d3.r79.cf1.rackcdn.com/Anaconda-2.0.1-Linux-x86_64.sh
chmod +x lib/anaconda.sh
echo "**************************************************"
echo "Installing Python Framework via ANACONDA"
sleep 10
./lib/anaconda.sh
sleep 3
ANACONDA=$(grep "anaconda" ~/.bashrc)
echo $ANACONDA >> ~/.bash_profile
sleep 3
fi
source ~/.bash_profile
cd lib/Core
pip install -r requirements.txt
cd $THIS_DIR
pip install -r requirements.txt
cd lib/socksjs-tornado
python setup.py install
cd $THIS_DIR
echo "**************************************************"
python setup.py $WITH_CONFIG
source ~/.bash_profile
sleep 2
if $LAUNCH_ANNEX; then
chmod 0400 conf/*
python unveillance_annex.py -firstuse
fi
| true
|
87091de877cf86ea96d787348f89579ca42105be
|
Shell
|
golden75/Variant_Detection_GATK
|
/scripts/picard_sort.sh
|
UTF-8
| 1,363
| 2.859375
| 3
|
[] |
no_license
|
#!/bin/bash
#SBATCH --job-name=picard_sort
#SBATCH -n 1
#SBATCH -N 1
#SBATCH -c 1
#SBATCH --mem=30G
#SBATCH --partition=general
#SBATCH --qos=general
#SBATCH --array=[1-8]%8
##SBATCH --mail-type=ALL
##SBATCH --mail-user=neranjan.perera@uconn.edu
#SBATCH -o ../log_files/%x_%A_%a.out
#SBATCH -e ../log_files/%x_%A_%a.err
hg19=/home/FCAM/nperera/Tutorial/variant_detection_GATK/Illumina/hg19/hg19.fa
R1="_1.fastq"
R2="_2.fastq"
d1="raw_data"
d2=align
INPUT_FILES=(SRR1517848 SRR1517878 SRR1517884 SRR1517906 SRR1517991 SRR1518011 SRR1518158 SRR1518253)
INPUT_FILE_NAME="${INPUT_FILES[$SLURM_ARRAY_TASK_ID - 1]}"
echo "host name : " `hostname`
echo "input file name : " $INPUT_FILE_NAME
echo "SLURM_ARRAY_TASK_ID : " $SLURM_ARRAY_TASK_ID
if [ ! -d ../${d2} ]; then
mkdir -p ../${d2}
fi
cd ../${d2}
##################################################################
## Sort
##################################################################
echo "=== Sort start: `date` ==="
module load picard/2.9.2
export _JAVA_OPTIONS=-Djava.io.tmpdir=/scratch
java -jar $PICARD SortSam \
INPUT=${INPUT_FILE_NAME}_filtered.bam \
OUTPUT=${INPUT_FILE_NAME}_filtered_sort.bam \
SORT_ORDER=coordinate \
CREATE_INDEX=True
echo "Sorted : " ${INPUT_FILE_NAME} `date`
module unload picard/2.9.2
echo "=== BAM file sort : ${INPUT_FILE_NAME} `date`==="
| true
|
b73ec4e506c57a75f12fa80f16732abedf6bc05c
|
Shell
|
creio/dots
|
/.bin/macho.sh
|
UTF-8
| 595
| 3.40625
| 3
|
[] |
no_license
|
#!/bin/sh
export FZF_DEFAULT_OPTS='
--height=30%
--layout=reverse
--prompt="Manual: "
--preview="echo {1} | sed -E \"s/^\((.+)\)/\1/\" | xargs -I{S} man -Pcat {S} {2} 2>/dev/null"'
while getopts ":s:" opt; do
case $opt in
s ) SECTION=$OPTARG; shift; shift;;
\?) echo "Invalid option: -$OPTARG" >&2; exit 1;;
: ) echo "Option -$OPTARG requires an argument" >&2; exit 1;;
esac
done
manual=$(apropos -s ${SECTION:-''} ${@:-.} | \
grep -v -E '^.+ \(0\)' |\
awk '{print $2 " " $1}' | \
sort | \
fzf | \
sed -E 's/^\((.+)\)/\1/')
[ -z "$manual" ] && exit 0
man $manual
| true
|
723eff9cd2f0bcd7f32697394bba1d45ae14182f
|
Shell
|
antenore/svntogit-community
|
/python-markdown-it-py/trunk/PKGBUILD
|
UTF-8
| 1,045
| 2.546875
| 3
|
[] |
no_license
|
# Maintainer: Filipe Laíns (FFY00) <lains@archlinux.org>
_pkgname=markdown-it-py
pkgname=python-$_pkgname
pkgver=1.1.0
pkgrel=1
pkgdesc='Python port of markdown-it. Markdown parsing, done right!'
arch=('any')
url='https://github.com/executablebooks/markdown-it-py'
license=('MIT')
depends=('python-attrs')
optdepends=('python-mdit_py_plugins: core plugins')
makedepends=('python-setuptools')
checkdepends=('python-pytest' 'python-pytest-regressions' 'python-pytest-benchmark' 'python-psutil' 'python-mdit_py_plugins')
source=("$pkgname-$pkgver.tar.gz::$url/archive/v$pkgver.tar.gz")
sha512sums=('36b8a557afb4f6314aeba47e4c26ce7748679d771e9793a92a9e1a3b93319ed640bddf7e3f46a74a1ec71d837952115a76edf93b33ba4b693e496f31e4528bee')
build() {
cd $_pkgname-$pkgver
python setup.py build
}
check() {
cd $_pkgname-$pkgver
pytest -k 'not test_linkify'
}
package() {
cd $_pkgname-$pkgver
python setup.py install --root="$pkgdir" --optimize=1 --skip-build
install -Dm 644 LICENSE "$pkgdir"/usr/share/licenses/$pkgname/LICENSE
}
# vim:set ts=2 sw=2 et:
| true
|
93fb35ac27f1b78351e66faca645490ec9b60c5a
|
Shell
|
kh42z/ft_services
|
/srcs/mysql/files/import.sh
|
UTF-8
| 628
| 3.09375
| 3
|
[] |
no_license
|
#/bin/bash
maxcounter=900
counter=0
while ! mysql -uroot -e "show databases;" > /dev/null 2>&1; do
sleep 1
counter=`expr $counter + 1`
echo "Waiting for MariaDB to be started: ${counter}"
if [ $counter -gt $maxcounter ]; then
>&2 echo "Cant wait more"
exit 1
fi;
done
sleep 1
mysql -uroot -Bse "CREATE USER '$FT_USER'@'%' IDENTIFIED BY '$FT_PASSWORD';GRANT ALL PRIVILEGES ON *.* TO '$FT_USER'@'%' IDENTIFIED BY '$FT_PASSWORD' WITH GRANT OPTION;"
sed 's/{EIP}/'${EIP}'/g' -i /opt/wp.sql
mysql -uroot < /opt/wp.sql
mysql -uroot < /opt/permissions.sql > /tmp/install.log 2>&1
touch /tmp/ready
| true
|
cb71260325d70affc2ead846a8dc621aae37731c
|
Shell
|
mani-durai/Launch-ec2-awscli
|
/Launch-ec2-awscli.sh
|
UTF-8
| 689
| 2.75
| 3
|
[] |
no_license
|
#!/bin/bash
#scripts for ec2 instances launch using AWS CLI
#Script by Manidurai
#Email ID:dd.manikandan55@gmail.com
#First of please install and configured AWS CLI
echo "Ec2 instances provisioned to AWS CLI"
read -p "Enter your image ID : " IMAGEID
read -p "Enter your instance count : " COUNT
read -p "Enter Instance type :" INSTANCE_TYPE
read -p "Enter private/public subnet id :" SUBNET_ID
read -p "Enter existing key-pair name :" KEY_PAIR
read -p "Enter existing security group id :" SECURITY_GROUP
aws ec2 run-instances --image-id $IMAGEID --count $COUNT --instance-type $INSTANCE_TYPE --subnet-id $SUBNET_ID --key-name $KEY_PAIR --security-group-id $SECURITY_GROUP --output text > /tmp/ec2_inst.txt
| true
|
ca9a9a5bb378460741ce77338caa2a5ba749e2c2
|
Shell
|
openbmc/phosphor-debug-collector
|
/tools/dreport.d/plugins.d/dmesginfo
|
UTF-8
| 198
| 2.71875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#
# config: 2 20
# @brief: Collect dmesg information.
#
. $DREPORT_INCLUDE/functions
desc="dmesg"
file_name="dmesg.log"
command="dmesg"
add_cmd_output "$command" "$file_name" "$desc"
| true
|
36fe2e22ca393044fcfbf8ce10c92b5f7f28a6ac
|
Shell
|
cryanez/chilegenomico2
|
/GTSEQ_Ancestry_Estimate/code/get_Ancestry.sh
|
UTF-8
| 3,727
| 3.046875
| 3
|
[] |
no_license
|
#!/bin/bash
##########################################################################################
# Programmed by Cristian Yanez, Engineer in bioinformatics, ChileGenomico Laboratory,
# Faculty of Medicine, University of Chile.
#
# 2020-07-13
##########################################################################################
shopt -s expand_aliases
source ~/.bashrc
conda activate anaconda3_python2
nameSet=$1
CLG2_Ancestry=$2
name_folder_set=$3
ruta_set_plink=$4
#Execute as:
#bash code/get_Ancestry.sh Nombre_Set_plink input/CLG2_Ancestry.tsv Nombre_carpeta_etapa4_set ruta_set_plink
#bash code/get_Ancestry.sh GTS5_BiLS input/CLG2_Ancestry.tsv BiLs ../1_crear_sets_por_proyectos/SETS_FINALES/BiLs/GTS5_BiLS
echo $nameSet
echo $CLG2_Ancestry
echo $name_folder_set
echo $ruta_set_plink
#Extract AIMS
plink1.9 --bfile $ruta_set_plink --extract "input/List_AIMS.csv" --make-bed --out "ancestry/"$name_folder_set"/"$nameSet"_AIMS"
plink1.9 --bfile "ancestry/"$name_folder_set"/"$nameSet"_AIMS" --missing --out "ancestry/"$name_folder_set"/"$nameSet"_AIMS_missing"
awk '$5 > 0.2 {print $2}' "ancestry/"$name_folder_set"/"$nameSet"_AIMS_missing.lmiss" | tail -n +2 > "ancestry/"$name_folder_set"/list_SNPs_missing_"$nameSet".list"
plink1.9 --bfile "ancestry/"$name_folder_set"/"$nameSet"_AIMS" --exclude "ancestry/"$name_folder_set"/list_SNPs_missing_"$nameSet".list" --make-bed --out "ancestry/"$name_folder_set"/"$nameSet"_AIMS_filter1"
comm -12 <(cut -f 2 "ancestry/"$name_folder_set"/"$nameSet"_AIMS_filter1.bim" | sort | uniq) <(cut -f 2 "input/REF224_SORT.bim" | sort | uniq) > "ancestry/"$name_folder_set"/list_SNPs_comm.list"
plink1.9 --bfile "ancestry/"$name_folder_set"/"$nameSet"_AIMS_filter1" --extract "ancestry/"$name_folder_set"/list_SNPs_comm.list" --make-bed --out "ancestry/"$name_folder_set"/"$nameSet"_AIMS_filter2"
plink1.9 --bfile "input/REF224_SORT" --extract "ancestry/"$name_folder_set"/list_SNPs_comm.list" --indiv-sort f "input/list_REF224.list" --make-bed --out "ancestry/"$name_folder_set"/REF224_SORT_AIMS"
#We create samples popinfo
cat "input/REF224_popinfo.csv" > "ancestry/"$name_folder_set"/popinfo_REF224_"$nameSet".csv"
awk '{print $1}' "ancestry/"$name_folder_set"/"$nameSet"_AIMS_filter2.fam" | xargs -I{} echo -e {}"\t\tunknown\tunknown\tunknown\tadmixed\tadmixed\t"{}"\tunknown\t1\t\t\t\t\t" >> "ancestry/"$name_folder_set"/popinfo_REF224_"$nameSet".csv"
cut -f 1 "ancestry/"$name_folder_set"/popinfo_REF224_"$nameSet".csv" | tail -n +2 | xargs -I{} echo -e {}"\t"{} > "ancestry/"$name_folder_set"/list_REF224_"$nameSet".list"
plink1.9 --bfile "ancestry/"$name_folder_set"/"$nameSet"_AIMS_filter2" --bmerge "ancestry/"$name_folder_set"/REF224_SORT_AIMS" --make-bed --out "ancestry/"$name_folder_set"/REF224_"$nameSet
plink1.9 --bfile "ancestry/"$name_folder_set"/REF224_"$nameSet --indiv-sort f "ancestry/"$name_folder_set"/list_REF224_"$nameSet".list" --make-bed --out "ancestry/"$name_folder_set"/REF224_"$nameSet"_SORT"
mkdir "ancestry/"$name_folder_set"/input"
cp "ancestry/"$name_folder_set"/REF224_"$nameSet"_SORT.bed" "ancestry/"$name_folder_set"/REF224_"$nameSet"_SORT.bim" "ancestry/"$name_folder_set"/REF224_"$nameSet"_SORT.fam" "ancestry/"$name_folder_set"/input"
cp "ancestry/"$name_folder_set"/popinfo_REF224_"$nameSet".csv" "ancestry/"$name_folder_set"/input"
#Ancestry calculation by windows
endAdmixed=$(wc -l "ancestry/"$name_folder_set"/input/REF224_"$nameSet"_SORT.fam" | awk '{print $1}')
bash code/1_ancestry_windows_iterator.sh -p "ancestry/"$name_folder_set"/input/REF224_"$nameSet"_SORT" -s 225 -e $endAdmixed -i "ancestry/"$name_folder_set"/input/popinfo_REF224_"$nameSet".csv" -t 8 -d 1 -w "ancestry/"$name_folder_set -r $nameSet -a $CLG2_Ancestry
| true
|
14626af2fc395883c77ec4cb6f02bf725816d8bd
|
Shell
|
bioboxes/minia
|
/run.sh
|
UTF-8
| 1,092
| 3.953125
| 4
|
[] |
no_license
|
#!/bin/bash
set -o errexit
set -o nounset
INPUT=/bbx/input/biobox.yaml
OUTPUT=/bbx/output
METADATA=/bbx/metadata
TASK=$1
# Ensure the biobox.yml file is valid
sudo ${VALIDATOR}validate-biobox-file --input ${INPUT} --schema ${VALIDATOR}schema.yaml
# Parse the read locations from this file
READS=$(sudo /usr/local/bin/yaml2json < ${INPUT} \
| jq --raw-output '.arguments[] | select(has("fastq")) | .fastq[].value ' | tr '\n' ' ')
TMP_DIR=$(mktemp -d)
# Run the given task
CMD=$(egrep ^${TASK}: /Taskfile | cut -f 2 -d ':')
if [[ -z ${CMD} ]]; then
echo "Abort, no task found for '${TASK}'."
exit 1
fi
MINIA_INPUT=${TMP_DIR}/minia_input
touch $MINIA_INPUT
for READ_PATH in $READS
do
echo $READ_PATH >> $MINIA_INPUT
done
cd $TMP_DIR
# if /bbx/metadata mounted create log.txt
if [ -d "$METADATA" ]; then
CMD="($CMD) >& $METADATA/log.txt"
fi
eval ${CMD}
mkdir -p $OUTPUT
sudo mv ${TMP_DIR}/minia.contigs.fa ${OUTPUT}
cat << EOF > ${OUTPUT}/biobox.yaml
version: 0.9.0
arguments:
- fasta:
- id: minia_contigs
value: minia.contigs.fa
type: contigs
EOF
| true
|
43c5fccbda8e1d0c235fb648fc81f3791ec81c35
|
Shell
|
kishori82/multi-seq
|
/regtests/run_regtests.sh
|
UTF-8
| 403
| 2.828125
| 3
|
[] |
no_license
|
#!/bin/bash
# To run the tests type ". run_regtests.sh"
source RiboCensusrc
SAMPLE=zymopure
#SAMPLE=beaver
if [ ! -d regtests/output ]; then
mkdir regtests/output
else
rm -rf regtests/output/*
fi
echo python RiboCensus.py -i regtests/input -o regtests/output -s ${SAMPLE} -v 1
python RiboCensus.py -i regtests/input -o regtests/output -s ${SAMPLE} -v 1
exitcode=$?
#exit ${exitcode}
| true
|
120db99e832d24240b06a3b43e4424023834e836
|
Shell
|
leonsk32/library
|
/scripts/run-e2e-tests.sh
|
UTF-8
| 574
| 3.40625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
declare ROOT_DIR=""
function set_bash_error_handling() {
set -euo pipefail
}
function set_project_root_directory() {
local -r script_dir=$( dirname "${BASH_SOURCE[0]}")
cd "$script_dir/.."
ROOT_DIR=$(pwd)
}
function run_e2e_tests() {
cd "$ROOT_DIR/library-backend-service"
./gradlew bootRun
cd "$ROOT_DIR/library-frontend-service"
yarn serve
cd "$ROOT_DIR/library-backend-service"
./gradlew cucumber
}
function main() {
set_bash_error_handling
set_project_root_directory
run_e2e_tests
}
main
| true
|
d938920c6c3eee7119c9f56199784d64be3a24ef
|
Shell
|
ammolitor/bin
|
/old-scripts/splice/copy-data.sh
|
UTF-8
| 1,017
| 3.421875
| 3
|
[] |
no_license
|
#!/bin/bash
function copy_files {
s3source=$1
dest=$2
folders=$(for folder in $(aws s3 ls s3://${s3source} --recursive | grep -v -E "(Bucket: |Prefix: |LastWriteTime|^$|--)" | awk '{print $4}' | grep \/$); do echo ${folder}; done)
for folder in ${folders[@]}; do
sudo -su hdfs hadoop fs -mkdir -p /${folder}
done
files=$(for file in $(aws s3 ls s3://${s3source} --recursive | grep -v -E "(Bucket: |Prefix: |LastWriteTime|^$|--|\/$)" | awk '{print $4}'); do echo ${file}; done)
for file in ${files[@]}; do
sudo -su hdfs hadoop distcp -i s3n://${s3source}/${file} hdfs:///${dest}/${file}
done
}
function copy_folder_overwrite {
s3source=$1
dest=$2
sudo -su hdfs hadoop fs -mkdir -p ${dest}
sudo -su hdfs hadoop distcp -i -overwrite -log /tmp/amm$(date +%s) s3n://${s3source}/ hdfs:///${dest}
}
function copy_folder_update {
s3source=$1
dest=$2
sudo -su hdfs hadoop fs -mkdir -p ${dest}
sudo -su hdfs hadoop distcp -i -overwrite -log /tmp/amm$(date +%s) s3n://${s3source}/ hdfs:///${dest}
}
| true
|
8c2201fc23cdb0f4a087c479084cc162f0799a98
|
Shell
|
abes-esr/filebeat-example-docker
|
/myapp/custom-log-generator.sh
|
UTF-8
| 252
| 3.21875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
while true;
do
sleep 2
if [ $(shuf -i1-10 -n1) -gt 5 ]
then
# >&1 permet d'envoyer vers stdout
echo "INFO - ca se passe bien" >&1
else
# >&1 permet d'envoyer vers stderr
echo "ERROR - ca se passe mal" >&2
fi
done
| true
|
85b10782a696a30e43a688f65afee5c78da5b9df
|
Shell
|
abshkd/malware
|
/scripts-malware-defense/get_methods.sh
|
UTF-8
| 246
| 2.546875
| 3
|
[] |
no_license
|
#!/bin/bash
for webservmethod in GET POST PUT TRACE CONNECT OPTIONS PROPFIND HEAD MOVE COPY PROPPATCH MKCOL LOCK UNLOCK SEARCH;
do
printf "$webservmethod " ;
printf "$webservmethod / HTTP/1.1\nHost: $1\n\n" | nc $1 80 | grep "HTTP/1.1"
done
| true
|
9d63f8f2a6a2877c194d10cd66b578a02fda59dd
|
Shell
|
testxsubject/docker-deadsnakes
|
/build.sh
|
UTF-8
| 630
| 3.640625
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -ex
generate_docker_files() {
mkdir -p $1
cat Dockerfile \
| sed s/__PY_VER__/$1/ \
> $1/Dockerfile
cp get-pip.py $1/
}
build_docker_images() {
docker build --no-cache --force-rm --pull -t mrupgrade/deadsnakes:$1 $1/
}
test_image() {
docker run -it --rm mrupgrade/deadsnakes:$1 python --version
}
wget https://bootstrap.pypa.io/get-pip.py -O get-pip.py
for VER in 2.6 2.7 3.3 3.4 3.5 3.6
do
if [ "$1" = 'gen' ]
then
generate_docker_files $VER
else
generate_docker_files $VER
build_docker_images $VER
test_image $VER
fi
done
| true
|
43afe430b4ccf6deb1c15ffa3ecf9e0b0c6c87f8
|
Shell
|
flmartinez07/so
|
/p2_4.sh
|
UTF-8
| 156
| 2.546875
| 3
|
[] |
no_license
|
#!/bin/bash
saludos="Bienvenido/a "
user=$(whoami)
dia=$(date +%A)
echo "$saludos nuevamente $user"
echo " Hoy es $dia"
echo "ESPERO TENGAS UN GRAN DIA"
| true
|
83a5b9b4dfa841fc0fdb40a018e1bcbc93ba38d2
|
Shell
|
Vaibhavtyagi13/devops_training
|
/bash/demo.txt
|
UTF-8
| 105
| 2.796875
| 3
|
[] |
no_license
|
#!/bin/bash
echo "pid is $$"
while (( count < 10 ))
do
sleep 10
(( count++ ))
echo $count
done
exit 0
| true
|
23a91ed0a32fb5c34d2de4706d312e305ba032f0
|
Shell
|
canonical/charmcraft
|
/tests/spread/tools/prepare.sh
|
UTF-8
| 261
| 3
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash -e
install_charmcraft()
{
if stat /charmcraft/charmcraft_*.snap 2>/dev/null; then
snap install --classic --dangerous /charmcraft/charmcraft_*.snap
else
echo "Expected a snap to exist in /charmcraft/"
exit 1
fi
}
| true
|
cdbde3bca6cd4fcc773d3537c68ca421f1ede1c5
|
Shell
|
ip0000h/dotfiles
|
/zsh/.zshrc
|
UTF-8
| 1,168
| 2.5625
| 3
|
[] |
no_license
|
# Path to your oh-my-zsh installation.
export ZSH="$HOME/.oh-my-zsh"
# For tmuxp
export DISABLE_AUTO_TITLE='true'
# Theme
ZSH_THEME="gnzh"
# Plugins
plugins=(
colorize
common-aliases
command-not-found
docker
docker-compose
git
git-extras
github
kubectl
minikube
node
npm
nvm
pip
poetry
pyenv
pylint
python
tmux
virtualenv
web-search
)
# Init oh-my-zsh
source $ZSH/oh-my-zsh.sh
# bash aliases
source $HOME/.bash_aliases
# PyEnv configuration
export PATH="$HOME/.pyenv/bin:$PATH"
eval "$(pyenv init -)"
eval "$(pyenv virtualenv-init -)"
# Poetry configuration
export PATH="/home/ip0000h/.local/bin:$PATH"
# Direnv configuration
eval "$(direnv hook zsh)"
# GoEnv configuration
export PATH="$HOME/.goenv/bin:$PATH"
eval "$(goenv init -)"
# Nvm configuration
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion
# GoLang binaries
export PATH=$PATH:$(go env GOPATH)/bin
# nsc tool
export PATH="$PATH:$HOME/.nsccli/bin"
| true
|
9a99e01de8626472272f9bf5ada816ee3b4fe54a
|
Shell
|
UMCUGenetics/DxNextflowWES
|
/run_nextflow_wes_fingerprint.sh
|
UTF-8
| 1,578
| 3.390625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
set -euo pipefail
workflow_path='/hpc/diaggen/software/production/DxNextflowWES'
# Set input and output dirs
input=`realpath -e $1`
output=`realpath $2`
email=$3
mkdir -p $output && cd $output
mkdir -p log
if ! { [ -f 'workflow.running' ] || [ -f 'workflow.done' ] || [ -f 'workflow.failed' ]; }; then
touch workflow.running
sbatch <<EOT
#!/bin/bash
#SBATCH --time=2:00:00
#SBATCH --nodes=1
#SBATCH --mem 5G
#SBATCH --gres=tmpspace:10G
#SBATCH --job-name Nextflow_WES_Fingerprint
#SBATCH -o log/slurm_nextflow_wes_fingerprint.%j.out
#SBATCH -e log/slurm_nextflow_wes_fingerprint.%j.err
#SBATCH --mail-user $email
#SBATCH --mail-type FAIL
#SBATCH --export=NONE
#SBATCH --account=diaggen
module load Java/1.8.0_60
/hpc/diaggen/software/tools/nextflow run $workflow_path/WES_Fingerprint.nf \
-c $workflow_path/WES.config \
--bam_path $input \
--outdir $output \
--email $email \
-profile slurm \
-resume -ansi-log false
if [ \$? -eq 0 ]; then
echo "Nextflow done."
echo "Zip work directory"
find work -type f | egrep "\.(command|exitcode)" | zip -@ -q work.zip
echo "Remove work directory"
rm -r work
echo "WES Fingerprint workflow completed successfully."
rm workflow.running
touch workflow.done
echo "Change permissions"
chmod 775 -R $output
exit 0
else
echo "Nextflow failed"
rm workflow.running
touch workflow.failed
echo "Change permissions"
chmod 775 -R $output
exit 1
fi
EOT
else
echo "Workflow job not submitted, please check $output for 'workflow.status' files."
fi
| true
|
9eecfa79477a77dfdc0fd69fe8830a1d4b35ff58
|
Shell
|
JaidenCook/bin
|
/aegean_snapshot.sh
|
UTF-8
| 850
| 3.09375
| 3
|
[] |
no_license
|
#!/bin/bash
# Run Aegean with some defaults
filename=$1
bane=$2
compress=$3
background=`echo $filename | sed "s/.fits/_bkg.fits/"`
noise=`echo $filename | sed "s/.fits/_rms.fits/"`
root=`echo $filename | sed "s/.fits//"`
ncpus=20
if [[ ! -e $background ]]
then
if [[ $bane ]]
then
if [[ $compress ]]
then
compress="--compress"
fi
BANE --cores=${ncpus} $compress $filename
else
aegean --cores=${ncpus} --save $filename
fi
fi
if [[ ! -e ${root}_comp.vot ]]
then
aegean --cores=${ncpus} --seedclip=8 --island --maxsummits=5 --background=$background --noise=$noise --out=/dev/null --table=$root.vot,$root.reg $filename
fi
#aegean.py --telescope=mwa --island --maxsummits=5 --background=$background --noise=$noise --out=/dev/null --table=$root.vot,$root.reg $filename
| true
|
653b5f80baed33a59e65e19002cf0fbef8cec267
|
Shell
|
TimeDelta/dotfiles
|
/source/aliases_mac.bash
|
UTF-8
| 11,824
| 3.484375
| 3
|
[
"MIT"
] |
permissive
|
is_osx || return 1
################################################################################
# Notes:
# Aliases and functions followed by a "# [BN]" is written entirely by me
# Aliases and functions followed by a "# {BN}" is adapted by me from somebody else's code
# Aliases and functions without a comment after it is completely taken from elsewhere
################################################################################
##########################
# This File and Sourcing #
################################################################################
# make sure this file is in the PLATFORM_ALIAS_FILES environment variable
[[ $PLATFORM_ALIAS_FILES == *$DOTFILES/source/aliases_mac.bash* ]] || \
export PLATFORM_ALIAS_FILES="$PLATFORM_ALIAS_FILES $DOTFILES/source/aliases_mac.bash"
################################################################################
########
# PATH #
################################################################################
atp "$DOTFILES/osx_bin" # add osx-only binaries directory to PATH
################################################################################
#############
# File Info #
################################################################################
alias ls="ls -GFh" # [BN]
# lskey: display a key for information given by the ls command
lskey () { # [BN]
echo -e "${FBLUE}directory${RES}/ ${FRED}executable${RES}* ${FPURPLE}symbolic link${RES}@ ${FGREEN}socket${RES}= whiteout% FIFO| ${FBLACK}${BCYAN}postdrop${RES}*"
}
# ql: show a "Quick Look" view of files
ql () { qlmanage -p "$@" >& /dev/null & }
# bsizeof: display the size of a file in bytes
bsizeof () { # [BN]
if [[ $# -eq 0 ]]; then while read -s file; do stat -f %z $file; done
else stat -f %z $@; fi
}
# fullpath: get the absolute path
fullpath (){ realpath "$@"; } # [BN]
################################################################################
#####################
# File Manipulation #
################################################################################
# rmds: removes all .DS_Store file from the current dir and below
rmds () { find . -name .DS_Store -exec rm {} \;; }
# hide: make a file or folder hidden
alias hide="chflags hidden"
################################################################################
##########
# Bazaar #
################################################################################
bzrhelp () { bzr help commands | less; } # [BN]
################################################################################
##############
# Networking #
################################################################################
# rt_table: display routing table
alias rt_table="netstat -rn" # [BN]
# active_con: display active connections
alias active_con="netstat -an" # [BN]
restart_bonjour () {
sudo launchctl unload /System/Library/LaunchDaemons/com.apple.mDNSResponder.plist
sudo launchctl load /System/Library/LaunchDaemons/com.apple.mDNSResponder.plist
}
# tcpip: show all open TCP/IP sockets
alias tcpip='lsof -i'
# lsock: list all open sockets
alias lsock='sudo lsof -i -P'
# lsockudp: list open UDP sockets
alias lsockudp='sudo /usr/sbin/lsof -nP | grep UDP'
# lsocktcp: list open TCP sockets
alias lsocktcp='sudo /usr/sbin/lsof -nP | grep TCP'
# openports: show listening connections
alias openports='sudo lsof -i | egrep "^COMMAND|LISTEN"'
# flushdns: flush the DNS cache
alias flushdns='dscacheutil -flushcache'
# httpdump: view http traffic
alias httpdump="sudo tcpdump -i en1 -n -s 0 -w - | grep -a -o -E \"Host\: .*|GET \/.*\""
# vpn: wrapper for openvpnstart (command line interface for Tunnelblick)
vpn (){ # [BN]
# requires that the PATH environment variable has a certain prefix
if [[ -z "`echo "$PATH" | grep "^/usr/bin:/bin:/usr/sbin:/sbin"`" ]]; then
local OLD_PATH="$PATH"
export PATH="/usr/bin:/bin:/usr/sbin:/sbin:$PATH"
fi
# requires that if present, the SHELL environment variable must be set to '/bin/bash'
if [[ -n $SHELL && $SHELL != "/bin/bash" ]]; then
local OLD_SHELL="$SHELL"
export SHELL="/bin/bash"
fi
# quote args
local args
while [[ $# -gt 0 ]]; do
args="$args \"$1\""
shift
done
openvpnstart $args
# put things back the way they were
if [[ -n $OLD_SHELL ]]; then export SHELL="$OLD_SHELL"; fi
if [[ -n $OLD_PATH ]]; then export PATH="$OLD_PATH"; fi
}
################################################################################
#############
# Clipboard #
################################################################################
paste () { pbpaste; }
copy () { pbcopy; }
# copynl: copy without newline characters
copynl() { tr -d '\n' | pbcopy; }
alias cpnl=copynl
# sclip: sort the clipboard
sclip () { paste | sort | copy; }
# rclip: reverse the contents of the clipboard
rclip () { paste | rev | copy; }
clipdups () { paste | sort | uniq -d | copy; }
clipuniq () { paste | sort | uniq -u | copy; }
################################################################################
##########
# Finder #
################################################################################
show_hidden () { defaults write com.apple.finder AppleShowAllFiles TRUE; }
hide_hidden () { defaults write com.apple.finder AppleShowAllFiles FALSE; }
# force_eject: force a volume to eject
force_eject () { # {BN}
diskutil unmountDisk force /Volumes/$@ 2> /dev/null
if [[ $? -ne 0 ]]; then hdiutil eject -force $@; fi
}
################################################################################
##############
# Navigation #
################################################################################
# cdf: cd's to frontmost window of Finder
cdf () {
local currFolderPath=$( osascript <<" EOT"
tell application "Finder"
try
set currFolder to (folder of the front window as alias)
on error
set currFolder to (path to desktop folder as alias)
end try
POSIX path of currFolder
end tell
EOT
)
echo "$currFolderPath"
cd "$currFolderPath"
}
# onall: run a command on all open terminal windows
onall () { # [BN]
if [[ $1 == "--help" ]]; then
echo "Usage: onall <command>"
return 0
fi
osascript -e "tell application \"Terminal\"
repeat with w in windows
repeat with t in tabs of w
do script \"${1//\"/\\\"}\" in t
end repeat
end repeat
end tell"
}
################################################################################
#############
# Searching #
################################################################################
# dict: lookup a word with Dictionary.app
dict () { open dict:///"$@" ; }
################################################################################
######################
# Process Management #
################################################################################
alias top="top -R -F"
memhogs () { ps wwaxm -Ao pid,stat=STATE,%mem,vsize=VSIZE,rss,time,command | head; } # {BN}
cpuhogs () { ps wwaxr -Ao pid,stat=STATE,%cpu,time,command | head; } # {BN}
################################################################################
######################
# System Information #
################################################################################
total_ram () { # [BN]
system_profiler SPMemoryDataType | \
grep -C0 "Size" | \
sed s/[^0-9BKkMGT]//g | \
human2bytes | \
awk '{s += $1} END {print s}' | \
bytes2human
}
################################################################################
#############
# Profiling #
################################################################################
export LIB_PROFILER=/usr/local/Cellar/google-perftools/2.4/lib/libprofiler.dylib
export GPROFILER_BIN=pprof
################################################################################
###########
# Android #
################################################################################
andevref () { adb kill-server; sudo adb start-server; adb devices; } # [BN]
################################################################################
#########
# Misc. #
################################################################################
# sethoverfocus: change whether hovering over a window gives it focus
sethoverfocus (){
defaults write com.apple.terminal FocusFollowsMouse -bool $1
defaults write org.x.X11 wm_ffm -bool $1
}
# email_file: email a file to somebody
email_file () { # {BN}
if [[ $# -lt 2 || $1 == "-h" || $1 == "--help" || $1 == "-help" ]]; then
{ echo "Usage: email_file <file> [options] <email_address>"
echo " -s subject"
echo " Specify subject on command line. (Only the first argument after the -s flag is used as a subject; \
be careful to quote subjects containing spaces.)"
echo " -c addresses"
echo " Send carbon copies to addresses list of users. The addresses argument should be a comma-separated list of names."
echo " -b addresses"
echo " Send blind carbon copies to addresses list of users. The addresses argument should be a comma-separated list of names."; } | wrapindent -w
return 0
fi
uuencode $1 $1 | mailx ${@: +2}
}
# remake: rebuild from scratch
alias remake="make clean; make -j2" # [BN]
# mj: run make using at most 2 jobs
alias mj="make -j2" # [BN]
# fix_audio: fix locked volume issue
alias fix_audio="sudo killall coreaudiod"
# fix_icloud: fix icloud not syncing locally
alias fix_icloud='rm -rf "$HOME/Library/Application Support/CloudDocs"; killall cloudd bird'
# bangcp: copy a previously entered command to the clipboard
bangcp() { # [BN]
history $((${1:-1}+1)) | \
head -1 | \
awk '{$1=""; print $0}' | \
awk -v f=`echo "$HISTTIMEFORMAT" | awk '{print NF}'` '\
{ \
for (i=1; i<=f; i++)\
$i=""; \
print $0 \
}' | \
stripws | \
tr -d '\n' | \
pbcopy
}
# quote_args: surround each argument with quotes
quote_args() { # [BN]
while [[ $# -gt 0 ]]; do
if [[ $1 == '&' ]]; then
echo -n "$1"
else
echo -n "\\\"$1\\\""
fi
shift
if [[ $# -gt 0 ]]; then
echo -n ' '
fi
done
}
# insession: run a command in another (named) session
insession() { # [BN]
local session_name="$1"; shift
local command="$1"; shift
osascript -e "
tell application \"iTerm\"
set done to false
set allWindows to (every window)
repeat with currentWindow in allWindows
set allTabs to (every tab of currentWindow)
repeat with currentTab in allTabs
set currentTabSessions to (every session of currentTab)
repeat with currentSession in currentTabSessions
if name of currentSession is \"$session_name (bash)\" then
tell currentSession to write text \"$command `quote_args "$@"`\"
set done to true
exit repeat
end if
end repeat
if done then exit repeat
end repeat
if done then exit repeat
end repeat
if not done then
\"Session not found\"
end if
end tell"
}
alias inses='insession'
# alfred: install newly downloaded Alfred workflows and then move them to iCloud
alfred() { # [BN]
local junk
local OLD_IFS="$IFS"
IFS=$'\n'
for workflow in `find ~/Downloads -iname '*.alfredworkflow'`; do
open "$workflow"
echo "Installing `basename "${workflow%.*}"`"
echo "Press [Enter] to continue"
read -s junk
mkdir -p ~/Library/Mobile\ Documents/com~apple~CloudDocs/alfred/
command mv "$workflow" ~/Library/Mobile\ Documents/com~apple~CloudDocs/alfred/
done
IFS="$OLD_IFS"
}
################################################################################
##################
# Tab Completion #
################################################################################
if [[ "$(type -P bzr)" ]]; then
eval "`bzr bash-completion`"
fi
eval "`task bash-completion`" # task is a custom script
eval "`todo bash-completion`" # todo is a custom script
################################################################################
| true
|
400a658a44638bd41cc963cbebe8d9d4d1ac1990
|
Shell
|
redpandoraweb/CentOS-6-Quick-Install-Scripts
|
/Install_Xfce_VNC_CentOS_6.sh
|
UTF-8
| 1,393
| 2.9375
| 3
|
[] |
no_license
|
echo 'Install xfce4 + TigerVNC-Server + Firefox + Flash on Centos 6.2'
echo 'Install EPEL & RPMI & YUM-Priorities'
rpm -Uvh http://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm
rpm -ivh http://rpms.famillecollet.com/enterprise/remi-release-6.rpm
yum -y install yum-priorities nano
yum -y update
echo 'Enable epel and remi repo'
sed -i '/enabled=1/a\priority=10' /etc/yum.repos.d/epel.repo
sed -i '/\[remi\]/,/enabled=0/ { s/enabled=0/enabled=1/ }' /etc/yum.repos.d/remi.repo
yum -y groupinstall xfce
yum -y install tigervnc-server
yum -y install fontforge
cat > /etc/sysconfig/vncservers << EOF
VNCSERVERS="1:root"
VNCSERVERARGS[1]="-geometry 1024x768"
EOF
#nano /etc/sysconfig/vncservers
#VNCSERVERS="1:root"
#VNCSERVERARGS[1]="-geometry 1024x768"
#useradd admin
#su admin
#vncpasswd
#vncserver
#nano /root/.vnc/xstartup
#nano /home/admin/.vnc/xstartup
echo 'Set password for root user'
vncpasswd
service vncserver restart
service vncserver stop
cat > /root/.vnc/xstartup << EOF
#!/bin/sh
/usr/bin/startxfce4
EOF
chmod +x ~/.vnc/xstartup
chkconfig vncserver on
service vncserver restart
#yum -y install firefox
#Flash Player
#rpm -ivh http://linuxdownload.adobe.com/linux/i386/adobe-release-i386-1.0-1.noarch.rpm
#rpm --import /etc/pki/rpm-gpg/RPM-GPG-KEY-adobe-linux
#yum check-update
#yum install flash-plugin
echo 'INSTALL VNC SERVER COMPLETE'
| true
|
e312431a8fa2c8c87fac2505597513a58db33ba4
|
Shell
|
sascho1993/asr-disfluency
|
/sony-egs/swbd_disfluency/switchboard_data/utils/prepare_ms_words.sh
|
UTF-8
| 2,328
| 3.421875
| 3
|
[] |
no_license
|
#!/bin/bash
# Run parts of Kaldi scripts for preprocessing of MS-State data
dir=data/silver_wd/
rm -f $dir/ms_words
cat data/silver_transcript/switchboard_corrected_with_silver_reannotation.tsv | awk -F '\t' '{print $4}' | sed 's/\.trans//g' | uniq | grep -v "file" | while read line
do
number=`echo $line | sed 's/sw//g'`
prefix=${number:0:2}
cat /speech/db/SpeechCorpora/LDC97S62/transcriptions/swb_ms98_transcriptions/$prefix/$number/sw${number}A-ms98-a-word.text >> $dir/ms_words
cat /speech/db/SpeechCorpora/LDC97S62/transcriptions/swb_ms98_transcriptions/$prefix/$number/sw${number}B-ms98-a-word.text >> $dir/ms_words
done
# (1a) Transcriptions preparation
# make basic transcription file (add segments info)
# **NOTE: In the default Kaldi recipe, everything is made uppercase, while we
# make everything lowercase here. This is because we will be using SRILM which
# can optionally make everything lowercase (but not uppercase) when mapping
# LM vocabs.
awk '{
name=substr($1,1,6); gsub("^sw","sw0",name); side=substr($1,7,1);
stime=$2; etime=$3;
printf("%s-%s_%06.0f-%06.0f",
name, side, int(100*stime+0.5), int(100*etime+0.5));
for(i=4;i<=NF;i++) printf(" %s", $i); printf "\n"
}' $dir/ms_words > $dir/ms_words1
# Remove SILENCE, <B_ASIDE> and <E_ASIDE>.
# Note: we have [NOISE], [VOCALIZED-NOISE], [LAUGHTER], [SILENCE].
# removing [SILENCE], and the <B_ASIDE> and <E_ASIDE> markers that mark
# speech to somone; we will give phones to the other three (NSN, SPN, LAU).
# There will also be a silence phone, SIL.
# **NOTE: modified the pattern matches to make them case insensitive
cat $dir/ms_words1 \
| perl -ane 's:\s\[SILENCE\](\s|$):$1:gi;
s:\s\[NOISE\](\s|$):$1:gi;
s:\s\[LAUGHTER\](\s|$):$1:gi;
s:\s\[VOCALIZED-NOISE\](\s|$):$1:gi;
s/<B_ASIDE>//gi;
s/<E_ASIDE>//gi;
print;' \
| awk '{if(NF > 1) { print; } } ' > $dir/ms_words2
# **NOTE: swbd1_map_words.pl has been modified to make the pattern matches
# case insensitive
local/swbd1_map_words.pl -f 2- $dir/ms_words2 > $dir/words
# format acronyms in text
python local/map_acronyms_transcripts.py -i $dir/words -o $dir/words_map \
-M local/acronyms.map
mv $dir/words_map $dir/words
| true
|
07a5ef6034ff68cd188f6064b2b62ad2b75c9898
|
Shell
|
aruneral01/fanscribed
|
/install-mp3splt.sh
|
UTF-8
| 3,162
| 3.28125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash -ex
if [ "${VIRTUAL_ENV}" == "" ]; then
echo "Must be in a virtualenv"
exit 1
fi
TMP_INSTALL=$VIRTUAL_ENV/tmp-install-mp3splt
mkdir -p $TMP_INSTALL
cp -v patched-libmad-0.15.1b-configure $TMP_INSTALL/
cd $TMP_INSTALL
LIBDIR=${VIRTUAL_ENV}/lib
export LD_LIBRARY_PATH=${LIBDIR}
export LDFLAGS=-L${LIBDIR}
export PKG_CONFIG_PATH=${LIBDIR}/pkgconfig
(
if ! [ -f libmad-0.15.1b.tar.gz ]; then
wget -O libmad-0.15.1b.tar.gz http://sourceforge.net/projects/mad/files/libmad/0.15.1b/libmad-0.15.1b.tar.gz/download
fi
if ! [ -d libmad-0.15.1b ]; then
tar xzvf libmad-0.15.1b.tar.gz
fi
cp patched-libmad-0.15.1b-configure libmad-0.15.1b/configure
cd libmad-0.15.1b
./configure --prefix=${VIRTUAL_ENV}
make -j16
make install
)
(
if ! [ -f libogg-1.3.0.tar.gz ]; then
wget -O libogg-1.3.0.tar.gz http://downloads.xiph.org/releases/ogg/libogg-1.3.0.tar.gz
fi
if ! [ -d libogg-1.3.0 ]; then
tar xzvf libogg-1.3.0.tar.gz
fi
cd libogg-1.3.0
./configure --prefix=${VIRTUAL_ENV}
make -j16
make install
)
(
if ! [ -f libvorbis-1.3.2.tar.bz2 ]; then
wget -O libvorbis-1.3.2.tar.bz2 http://downloads.xiph.org/releases/vorbis/libvorbis-1.3.2.tar.bz2
fi
if ! [ -d libvorbis-1.3.2 ]; then
tar xjvf libvorbis-1.3.2.tar.bz2
fi
cd libvorbis-1.3.2
./configure --prefix=${VIRTUAL_ENV}
make -j16
make install
)
(
if ! [ -f libid3tag-0.15.1b.tar.gz ]; then
wget -O libid3tag-0.15.1b.tar.gz http://sourceforge.net/projects/mad/files/libid3tag/0.15.1b/libid3tag-0.15.1b.tar.gz/download
fi
if ! [ -d libid3tag-0.15.1b ]; then
tar xzvf libid3tag-0.15.1b.tar.gz
fi
cd libid3tag-0.15.1b
./configure --prefix=${VIRTUAL_ENV}
make -j16
make install
)
(
if ! [ -f pcre-8.21.tar.bz2 ]; then
wget -O pcre-8.21.tar.bz2 http://sourceforge.net/projects/pcre/files/pcre/8.21/pcre-8.21.tar.bz2/download
fi
if ! [ -d pcre-8.21 ]; then
tar xjvf pcre-8.21.tar.bz2
fi
cd pcre-8.21
./configure --prefix=${VIRTUAL_ENV}
make -j16
make install
)
(
if ! [ -f libmp3splt-0.8.2.tar.gz ]; then
wget -O libmp3splt-0.8.2.tar.gz http://prdownloads.sourceforge.net/mp3splt/libmp3splt-0.8.2.tar.gz
fi
if ! [ -d libmp3splt-0.8.2 ]; then
tar xzvf libmp3splt-0.8.2.tar.gz
fi
cd libmp3splt-0.8.2
./configure --prefix=${VIRTUAL_ENV} --with-mad=${VIRTUAL_ENV} --with-id3=${VIRTUAL_ENV} --with-ogg=${VIRTUAL_ENV} --with-vorbis=${VIRTUAL_ENV} --enable-ltdl-install
make -j16
make install
)
(
if ! [ -f mp3splt-2.5.2.tar.gz ]; then
wget -O mp3splt-2.5.2.tar.gz http://prdownloads.sourceforge.net/mp3splt/mp3splt-2.5.2.tar.gz
fi
if ! [ -d mp3splt-2.5.2 ]; then
tar xzvf mp3splt-2.5.2.tar.gz
fi
cd mp3splt-2.5.2
./configure --prefix=${VIRTUAL_ENV} CFLAGS=-I${VIRTUAL_ENV}/include
make -j16
make install
)
cd ${VIRTUAL_ENV}/bin
mv mp3splt _mp3splt
cat > mp3splt <<EOF
#!/bin/bash
LD_LIBRARY_PATH=${VIRTUAL_ENV}/lib ${VIRTUAL_ENV}/bin/_mp3splt \$@
EOF
chmod +x mp3splt
| true
|
ef914ec7be906065267237218658e1ee2fb3d1da
|
Shell
|
coi-gov-pl/puppet-jboss
|
/templates/systemd/launch.sh
|
UTF-8
| 228
| 2.859375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
. /etc/<%= @product %>/<%= @product %>.conf
set -x
if [ "x$JBOSS_HOME" = "x" ]; then
JBOSS_HOME="/usr/lib/<%= @product %>-<%= @version %>"
fi
$JBOSS_HOME/bin/$JBOSS_MODE.sh -c "${1}" | tee $JBOSS_CONSOLE_LOG
| true
|
539e6b2522f4efbbc64f463b7c181a8aa0d3cad6
|
Shell
|
tfull/image_factory
|
/geometric_fusion/convert.sh
|
UTF-8
| 142
| 2.890625
| 3
|
[] |
no_license
|
cd images
ppm_files=`ls ppm`
for file in $ppm_files
do
if [ ! -f png/${file%.*}.png ]; then
convert ppm/$file png/${file%.*}.png
fi
done
| true
|
8706908aa30e6f7c8133b16cfc1edec0eadb7d55
|
Shell
|
bergental/unisinos-sistemas-operacionais-lab
|
/material-apoio/Aula02 - Shell Script/exercicios/correcao/Lista2_Resolucao/dir_monitor4.sh
|
UTF-8
| 624
| 3.671875
| 4
|
[] |
no_license
|
#!/bin/bash
CONTADOR=0
if [ ! -d "$MON_DIR" ]; then
echo "Não é um diretório válido!" >&2
elif [ -z "$MON_DIR" ]; then
echo "Diretório nulo. Não informado!" >&2
else
ESTADO_INICIAL=$(ls "$MON_DIR" -l)
echo "Diretório válido: $MON_DIR"
while [ 1 ]; do
ESTADO_ATUAL=$(ls "$MON_DIR" -l)
#houve mudanca no diretorio
if [ "$ESTADO_INICIAL" != "$ESTADO_ATUAL" ]; then
bash -c $* &
ESTADO_INICIAL=$ESTADO_ATUAL
CONTADOR=$((CONTADOR+1))
fi
sleep 2
#controla o numero de vezes que houve alteracao, se = 5, terminacao com status 5
if [ "$CONTADOR" -eq $((5)) ]; then
exit 5
fi
done
fi
| true
|
56197644da03a51fb0ba8a4db3fbce8d4888af0a
|
Shell
|
chakralinux/desktop
|
/eclipse-ecj/PKGBUILD
|
UTF-8
| 1,453
| 2.59375
| 3
|
[] |
no_license
|
# contributions from Arch: https://git.archlinux.org/svntogit/packages.git/tree/trunk?h=packages/eclipse-ecj
pkgname=eclipse-ecj
pkgver=4.6.1
pkgrel=1
_date=201609071200
pkgdesc='Eclipse java bytecode compiler'
arch=('x86_64')
license=('EPL')
url='http://www.eclipse.org/'
depends=('java-runtime')
makedepends=('apache-ant' 'java-environment')
source=(http://download.eclipse.org/eclipse/downloads/drops4/R-${pkgver}-${_date}/ecjsrc-${pkgver}.jar
01-ecj-include-props.patch
02-buildxml-fix-manifest.patch
ecj)
sha256sums=('36a664f84b5d2ba2c31dc8b3cf72783a979c9cffe689fbed5c58130f2b269fbc'
'8f6259c76dfe493549bbaec3c8a7ba29e82c70e127c918adca28737dcb570f6b'
'5ca6bd94c2b1cb4c6f116d38c160edf1c4ca520647ac74b26486f958254767af'
'63aff3d126243d303ddc4305cfa77827df72e87ccf85bd8a22a2f832357e396c')
build() {
cd "${srcdir}"
patch -p0 -i "$srcdir/01-ecj-include-props.patch"
patch -p0 -i "$srcdir/02-buildxml-fix-manifest.patch"
sed -i -e "s/Xlint:none/Xlint:none -encoding cp1252/g" build.xml
LANG=en_US.UTF-8
ant build
}
package() {
install -Dm644 "${srcdir}/ecj.jar" "${pkgdir}/usr/share/java/eclipse-ecj-${pkgver}.jar"
ln -s eclipse-ecj-${pkgver}.jar "${pkgdir}/usr/share/java/ecj.jar"
ln -s eclipse-ecj-${pkgver}.jar "${pkgdir}/usr/share/java/eclipse-ecj.jar"
install -Dm755 ecj "${pkgdir}/usr/bin/ecj"
install -D -m 644 ecj.1 "${pkgdir}/usr/share/man/man1/ecj.1"
}
| true
|
e89cae6c05997f13d2c8139436c5d44f579e2e8a
|
Shell
|
sumitsidana/NERvE
|
/bash_scripts/writerelevancevectorcofactor.sh
|
UTF-8
| 760
| 2.75
| 3
|
[] |
no_license
|
LANG=en_US.utf8
cd java/
mkdir -p /data/sidana/recnet_draft/$1/cofactor/$2/rv/
mkdir -p /data/sidana/recnet_draft/$1/cofactor/$2/em/
javac -cp binaries/commons-lang3-3.5.jar preProcess/ConvertIntoRelVecGeneralized_update.java preProcess/InputOutput.java
echo 'making relevance vector'
if [ $2 == "one" ]
then
rank=2
elif [ $2 == "five" ]
then
rank=5
else
rank=10
fi
#temp=$1
#var=${temp/\//_}
java -cp . preProcess.ConvertIntoRelVecGeneralized_update /data/sidana/recnet_draft/$1/cofactor/vectors/gt_$1 /data/sidana/recnet_draft/$1/cofactor/vectors/pr_$1 /data/sidana/recnet_draft/$1/cofactor/$2/rv/relevanceVector_$1 $rank
cd -
echo 'compute offline metrics'
python3 compOfflineEvalMetrics_len$rank.py /data/sidana/recnet_draft/$1/cofactor/$2 $1
| true
|
0ea0db16489f5dc09b3c5362e344c9bb05f16bc8
|
Shell
|
todd-dsm/smashNgrab
|
/payload/src/harden_samba.sh
|
UTF-8
| 3,605
| 3.828125
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
# PURPOSE: Harden Samba: Restrict Samba to only:
# a) Listen on the localhost segment 127.
# b) Use its own password table
# c) Use encrypted passwords
# d) Use defined password table
# e) Allow permitted users: none ()
# f)
# ------------------------------------------------------------------
# CREATED: 2013/01/14
# AUTHOR: Todd E Thomas
# MODIFIED:
#set -x
###----------------------------------------------------------------------------
### VARIABLES
###----------------------------------------------------------------------------
paramsFile="$varLists/samba_populate.list"
targetSambaConfig="$varTargets/smb.conf"
###----------------------------------------------------------------------------
### FUNCTIONS
###----------------------------------------------------------------------------
source "$instLib/finish.sh"
source "$instLib/get_stats.sh"
source "$instLib/printfmsg.sh"
source "$instLib/start.sh"
displayConfig() {
egrep -v '^(#|.*#|;|*$)' "$1" | sed '/^\s*$/d'
}
###----------------------------------------------------------------------------
### MAIN PROGRAM
###----------------------------------------------------------------------------
### What time is it?
###---
start
printReq "Installing, configuring and harndening Samba..."
###---
### Ensure the program is installed
###---
progInstalled="$(type -P smbd)"
if [[ "${progInstalled##*/}" != 'smbd' ]]; then
case "$myDistro" in
'CentOS')
yum -y install samba
;;
'Debian')
apt-get install samba
;;
esac
else
printSStat "Samba is already installed."
fi
###---
### Record the permissions on the file
###---
getStats "$sysSambaConfig"
###---
### Backup the file
###---
cp -p "$sysSambaConfig" "$backupDir"
###---
### Display configuration and normalize the file
###---
displayConfig "$backupDir/${sysSambaConfig##*/}" > "$sysSambaConfig"
printInfo ""
printInfo ""
###---
### Diff the file and update it to the required specification.
###---
diff "$sysSambaConfig" "$targetSambaConfig" >/dev/null
if [[ "$?" -ne '0' ]]; then
printInfo "The Samba configuration file does not meet the specification."
printInfo "Adding new parameters:"
# Read parameters into a while loop
# URL: http://goo.gl/sehtX
while IFS='\t' read -r confLine; do
[[ "$confLine" = \#* ]] && continue
# Using BASH PE (parameter expansion)
# URL: http://goo.gl/stZWt
strSrch="${confLine%% = *}"
grep "$strSrch" "$sysSambaConfig"
if [[ "$?" -ne '0' ]]; then
printInfo "$confLine"
sed -i "/security \= user/a\ $confLine" "$sysSambaConfig"
else
printSStat "$confLine is already set in the configuration."
fi
done < "$paramsFile"
else
printInfo "Samba configuration file meets the specification."
fi
###---
### Ensure correct permissions and ownership
###---
### Ensure permissions are correct on the new file
printInfo ""
printInfo "Re-applying original permissions to the file."
chmod "$fsoPerms" "$sysSambaConfig"
###---
### Ensure correct ownership
###---
printInfo "Re-applying original user and group ownership to the file."
chown "$fsoOwner:$fsoGroup" "$sysSambaConfig"
###---
### After: Display configuration for reporting
###---
printInfo ""
printSStat "Samba is now HARD:"
displayConfig "$sysSambaConfig"
###---
### Reset Test
###---
cp -p "$backupDir/smb.conf" "$sysSambaConfig"
###---
### fin~
###---
finish
exit 0
| true
|
7f3281cb8a826edca302c68575e624babd5993f9
|
Shell
|
jasonTtan/Parallel-Project
|
/cs133-img-proc.sh
|
UTF-8
| 3,163
| 3.265625
| 3
|
[] |
no_license
|
#!/bin/bash
if [ $# -lt 1 ]; then
echo -e "Usage:"
echo -e "\t$0 img-op img-op-args";
echo -e "Where";
echo -e "\timg-op = { motion-estimation[-parallel], corner-detection[-parallel], rotation[-parallel], scaling[-parallel], gaussian-blur[-parallel], high-pass-filter[-parallel] }"
echo -e "and img-op-args are the arguments for that operation.";
exit 0;
else
if [ "$1" = "motion-estimation" ]; then
if [ $# -lt 3 ]; then
echo -e $1 "usage: ";
echo -e "\t$0 $1 input-file1 input-file2 [blockWidth] [searchPadding]";
exit 0;
else
./Motion_Estimation/bin/motion-estimation-sequential "${@:2}";
fi
elif [ "$1" = "motion-estimation-parallel" ]; then
if [ $# -lt 3 ]; then
echo -e $1 "usage: ";
echo -e "\t$0 $1 input-file1 input-file2 [blockWidth] [searchPadding]";
exit 0;
else
./Motion_Estimation/bin/motion-estimation-parallel "${@:2}";
fi
elif [ "$1" = "corner-detection" ]; then
if [ $# -lt 3 ]; then
echo -e $1 "usage: ";
echo -e "\t$0 $1 input_file output_file [threshold]";
exit 0;
else
./corner-detection/bin/corner-detection-sequential "${@:2}";
fi
elif [ "$1" = "corner-detection-parallel" ]; then
if [ $# -lt 3 ]; then
echo -e $1 "usage: ";
echo -e "\t$0 $1 input_file output_file [threshold] [num_threads]";
exit 0;
else
./corner-detection/bin/corner-detection-parallel "${@:2}";
fi
elif [ "$1" = "rotation" ]; then
if [ $# -lt 3 ]; then
echo -e $1 "usage: ";
echo -e "\t$0 $1 image-file rotation-degrees";
exit 0;
else
./Rotation/bin/rotate-seq "${@:2}";
fi
elif [ "$1" = "rotation-parallel" ]; then
if [ $# -lt 3 ]; then
echo -e $1 "usage: ";
echo -e "\t$0 $1 image-file rotation-degrees";
exit 0;
else
./Rotation/bin/rotate-parallel "${@:2}";
fi
elif [ "$1" = "scaling" ]; then
if [ $# -lt 3 ]; then
echo -e $1 "usage: ";
echo -e "\t$0 $1 input scaleFactor";
exit 0;
else
./scaling/bin/scalingSequential "${@:2}";
fi
elif [ "$1" = "scaling-parallel" ]; then
if [ $# -lt 3 ]; then
echo -e $1 "usage: ";
echo -e "\t$0 $1 input scaleFactor numThreads";
exit 0;
else
./scaling/bin/scalingParallel "${@:2}";
fi
elif [ "$1" = "gaussian-blur" ]; then
if [ $# -lt 3 ]; then
echo -e $1 "usage: ";
echo -e "\t$0 $1 input_file output_file sigma";
exit 0;
else
./gaussianblur/bin/gb-sequential "${@:2}";
fi
elif [ "$1" = "gaussian-blur-parallel" ]; then
if [ $# -lt 3 ]; then
echo -e $1 "usage: ";
echo -e "\t$0 $1 input_file output_file sigma";
exit 0;
else
./gaussianblur/bin/gb-parallel "${@:2}";
fi
elif [ "$1" = "high-pass-filter" ]; then
if [ $# -lt 3 ]; then
echo -e $1 "usage: ";
echo -e "\t$0 $1 input.bmp output.bmp strength";
echo -e "where strength = [1-10]"
exit 0;
else
./HPF_Overlay/bin/projSequential "${@:2}";
fi
elif [ "$1" = "high-pass-filter-parallel" ]; then
if [ $# -lt 3 ]; then
echo -e $1 "usage: ";
echo -e "\t$0 $1 input.bmp output.bmp strength numThreads";
echo -e "where strength = [1-10]"
exit 0;
else
./HPF_Overlay/bin/projParallel "${@:2}";
fi
fi
fi
| true
|
e231ca5a2beddc60ae77b8e40b020f3c32b2048d
|
Shell
|
liberty8080/JacobAssistant
|
/build.sh
|
UTF-8
| 394
| 2.796875
| 3
|
[
"MIT"
] |
permissive
|
containerName="jacob-assistant";imageName="jacob-assistant:latest"
oldContainer=`docker ps -aq -f name=${containerName}`
oldImage=`docker images -aq ${imageName}`
if [ -n ${oldContainer} ]; then docker rm -f ${containerName}; fi
if [ -n ${oldImage} ]; then docker rmi ${oldImage}; fi
docker build . -t ${imageName}
docker run -d --net=host --name jacob-assistant ${imageName} --restart=always
| true
|
bc8ef6b192a949c22318f5446bd7bf5120204a49
|
Shell
|
leobyeon/holberton-system_engineering-devops
|
/0x04-loops_conditions_and_parsing/7-clock
|
UTF-8
| 252
| 3.390625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
# displays the time for 12 hours and 59 minutes:
# display hours from 0 to 12
# display minutes from 1 to 59
i=0
j=1
while [ $i -le 12 ]; do
echo "Hour:" $i
(( i++ ))
while [ $j -le 59 ]; do
echo $j
(( j++ ))
done
j=1
done
| true
|
2704d84d1d0c1ac3d44ea691239b9fca85d0421d
|
Shell
|
chamchisand/dot
|
/i3/i3blocks/scripts/transmission
|
UTF-8
| 925
| 3.5625
| 4
|
[] |
no_license
|
#!/bin/bash
. $(dirname "$0")/colors
lines=$(transmission-remote -l)
check=$(echo "$lines" | head -n 1 | awk '{ print $1 }')
if [ "$check" != "ID" ]; then
exit
fi
lines=$(echo "$lines" | tail -n +2 | head -n -1)
count=0
done=0
out=""
IFS='
'
for x in $lines; do
# ID,Done,Have,ETA,Up,Down,Ratio,Status,Name
percent=$(echo $x | awk '{ print $2 }')
count=$((count + 1))
if [ "$percent" == "100%" ]; then
done=$((done + 1))
out="$out $(okColor $percent)"
elif [[ "$x" == *"Up & Down"* ]]; then
out="$out $percent↑↓"
elif [[ "$x" == *"Down"* ]]; then
out="$out $percent↓"
elif [[ "$x" == *"Up"* ]]; then
out="$out $percent↑"
elif [[ "$x" == *"Stopped"* ]]; then
out="$out $(warnColor "$percent [stopped]")"
elif [[ "$x" == *"Idle"* ]]; then
out="$out $(criticalColor "$percent [idle]")"
fi
done
if [ $count -eq 0 ]; then
exit
fi
echo "$out"
echo "$done/$count"
| true
|
c6dfff1965e93bc56dd08145e755e02dbbb73b42
|
Shell
|
thlorenz/dotfiles
|
/scripts/xcode.sh
|
UTF-8
| 944
| 2.921875
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
echo "Linking FontAndColor Themes"
rm -rf ~/Library/Developer/Xcode/UserData/FontAndColorThemes && \
ln -s ~/dotfiles/xcode/UserData/FontAndColorThemes ~/Library/Developer/Xcode/UserData/FontAndColorThemes
## Settings
mv ~/Library/Developer/Xcode/UserData/KeyBindings ~/Library/Developer/Xcode/UserData/KeyBindings.orig && \
ln -s ~/dotfiles/xcode/UserData/KeyBindings ~/Library/Developer/Xcode/UserData/KeyBindings
echo "No longer Installing Alcatraz Plugin Manager"
echo "Follow instructions at https://github.com/XVimProject/XVim2 instead"
# curl -fsSL https://raw.github.com/supermarin/Alcatraz/master/Scripts/install.sh | sh
echo "Now open Xcode and allow XVim plugins"
## Plugins (Not working) -- need to be installed manually via Alcatraz manager
# find ~/dotfiles/xcode/Plug-ins -maxdepth 1 -mindepth 1 -type d -exec \
# ln -s '{}' ~/Library/Application\ Support/Developer/Shared/Xcode/Plug-ins/ \;
| true
|
0d0ce5820866af3e002c64c4b1a4e134a8a98cb1
|
Shell
|
lcesarmonteiro/bash-experiments
|
/loops
|
UTF-8
| 140
| 3.171875
| 3
|
[] |
no_license
|
#!/bin/bash
for i in 1 2 3
do
case $i in
1) echo "First option";;
2) echo "Second option";;
3) echo "Third option";;
esac
done
| true
|
680e503bea037b3aa896e638a844284ecfe93ce6
|
Shell
|
shixukai/seting_backup
|
/.zprofile
|
UTF-8
| 224
| 2.671875
| 3
|
[] |
no_license
|
alias fuwu='lsof -Pni4 | grep LISTEN'
function unsetproxy() {
unset {http,https,ftp}_proxy
}
function de() {
cmd="$@"
docker-compose exec dev bash -lc "$cmd"
}
alias dr='docker run --rm -it -v `pwd`:`pwd` -w `pwd`'
| true
|
7afa4df31199f4f9482fe337bc25a1a728be7889
|
Shell
|
PrashantSalunke22/loseface
|
/scripts/batch_job.sh
|
UTF-8
| 4,459
| 2.8125
| 3
|
[
"MIT"
] |
permissive
|
#! /bin/sh
LOSEFACE=../loseface
FACES=orl_patterns
SUBJECTS=40
echo "IMPORTANT! The following steps will take a lot of time (days, maybe weeks)"
echo "Patterns=\"$FACES\", Subjects=$SUBJECTS"
echo "Creating patterns..."
$LOSEFACE $FACES.lua >/dev/null
echo "Done"
echo "MLP global (25 inputs)..."
$LOSEFACE mlp_global.lua $FACES 25 25 $SUBJECTS mse > $FACES/mlp_global_25_25.output
$LOSEFACE mlp_global.lua $FACES 25 50 $SUBJECTS mse > $FACES/mlp_global_25_50.output
$LOSEFACE mlp_global.lua $FACES 25 75 $SUBJECTS mse > $FACES/mlp_global_25_75.output
echo "MLP global (50 inputs)..."
$LOSEFACE mlp_global.lua $FACES 50 25 $SUBJECTS mse > $FACES/mlp_global_50_25.output
$LOSEFACE mlp_global.lua $FACES 50 50 $SUBJECTS mse > $FACES/mlp_global_50_50.output
$LOSEFACE mlp_global.lua $FACES 50 75 $SUBJECTS mse > $FACES/mlp_global_50_75.output
echo "MLP global (75 inputs)..."
$LOSEFACE mlp_global.lua $FACES 75 25 $SUBJECTS mse > $FACES/mlp_global_75_25.output
$LOSEFACE mlp_global.lua $FACES 75 50 $SUBJECTS mse > $FACES/mlp_global_75_50.output
$LOSEFACE mlp_global.lua $FACES 75 75 $SUBJECTS mse > $FACES/mlp_global_75_75.output
echo "aMLP (25 inputs - basic training)..."
$LOSEFACE mlp_array.lua $FACES 25 25 $SUBJECTS 0 mse > $FACES/mlp_array_25_25_0neg.output
$LOSEFACE mlp_array.lua $FACES 25 50 $SUBJECTS 0 mse > $FACES/mlp_array_25_50_0neg.output
$LOSEFACE mlp_array.lua $FACES 25 75 $SUBJECTS 0 mse > $FACES/mlp_array_25_75_0neg.output
echo "aMLP (50 inputs - basic training)..."
$LOSEFACE mlp_array.lua $FACES 50 25 $SUBJECTS 0 mse > $FACES/mlp_array_50_25_0neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 0 mse > $FACES/mlp_array_50_50_0neg.output
$LOSEFACE mlp_array.lua $FACES 50 75 $SUBJECTS 0 mse > $FACES/mlp_array_50_75_0neg.output
echo "aMLP (75 inputs - basic training)..."
$LOSEFACE mlp_array.lua $FACES 75 25 $SUBJECTS 0 mse > $FACES/mlp_array_75_25_0neg.output
$LOSEFACE mlp_array.lua $FACES 75 50 $SUBJECTS 0 mse > $FACES/mlp_array_75_50_0neg.output
$LOSEFACE mlp_array.lua $FACES 75 75 $SUBJECTS 0 mse > $FACES/mlp_array_75_75_0neg.output
echo "aMLP (25 inputs - special training)..."
$LOSEFACE mlp_array.lua $FACES 25 25 $SUBJECTS 3 mse > $FACES/mlp_array_25_25_3neg.output
$LOSEFACE mlp_array.lua $FACES 25 50 $SUBJECTS 3 mse > $FACES/mlp_array_25_50_3neg.output
$LOSEFACE mlp_array.lua $FACES 25 75 $SUBJECTS 3 mse > $FACES/mlp_array_25_75_3neg.output
echo "aMLP (50 inputs - special training)..."
$LOSEFACE mlp_array.lua $FACES 50 25 $SUBJECTS 3 mse > $FACES/mlp_array_50_25_3neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 3 mse > $FACES/mlp_array_50_50_3neg.output
$LOSEFACE mlp_array.lua $FACES 50 75 $SUBJECTS 3 mse > $FACES/mlp_array_50_75_3neg.output
echo "aMLP (75 inputs - special training)..."
$LOSEFACE mlp_array.lua $FACES 75 25 $SUBJECTS 3 mse > $FACES/mlp_array_75_25_3neg.output
$LOSEFACE mlp_array.lua $FACES 75 50 $SUBJECTS 3 mse > $FACES/mlp_array_75_50_3neg.output
$LOSEFACE mlp_array.lua $FACES 75 75 $SUBJECTS 3 mse > $FACES/mlp_array_75_75_3neg.output
echo "Testing with different number of negatives..."
#$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 0 mse > $FACES/mlp_array_50_50_0neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 1 mse > $FACES/mlp_array_50_50_1neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 2 mse > $FACES/mlp_array_50_50_2neg.output
#$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 3 mse > $FACES/mlp_array_50_50_3neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 4 mse > $FACES/mlp_array_50_50_4neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 5 mse > $FACES/mlp_array_50_50_5neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 6 mse > $FACES/mlp_array_50_50_6neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 7 mse > $FACES/mlp_array_50_50_7neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 8 mse > $FACES/mlp_array_50_50_8neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 9 mse > $FACES/mlp_array_50_50_9neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 10 mse > $FACES/mlp_array_50_50_10neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 11 mse > $FACES/mlp_array_50_50_11neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 12 mse > $FACES/mlp_array_50_50_12neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 13 mse > $FACES/mlp_array_50_50_13neg.output
$LOSEFACE mlp_array.lua $FACES 50 50 $SUBJECTS 14 mse > $FACES/mlp_array_50_50_14neg.output
| true
|
6e3a1b37812b7710759a0a54a8750ded738cd0f8
|
Shell
|
weilaidb/PythonExample
|
/regularexpress/home/weilaidb/software/git-2.0.5/t/lib-rebase.sh
|
UTF-8
| 1,517
| 2.765625
| 3
|
[] |
no_license
|
# Helper functions used by interactive rebase tests.
# After setting the fake editor with this function, you can
#
# - override the commit message with $FAKE_COMMIT_MESSAGE
# - amend the commit message with $FAKE_COMMIT_AMEND
# - check that non-commit messages have a certain line count with $EXPECT_COUNT
# - check the commit count in the commit message header with $EXPECT_HEADER_COUNT
# - rewrite a rebase -i script as directed by $FAKE_LINES.
# $FAKE_LINES consists of a sequence of words separated by spaces.
# The following word combinations are possible:
#
# "<lineno>" -- add a "pick" line with the SHA1 taken from the
# specified line.
#
# "<cmd> <lineno>" -- add a line with the specified command
# ("squash", "fixup", "edit", or "reword") and the SHA1 taken
# from the specified line.
#
# "exec_cmd_with_args" -- add an "exec cmd with args" line.
#
# "#" -- Add a comment line.
#
# ">" -- Add a blank line.
set_fake_editor ()
# After set_cat_todo_editor, rebase -i will write the todo list (ignoring
# blank lines and comments) to stdout, and exit failure (so you should run
# it with test_must_fail). This can be used to verify the expected user
# experience, for todo list changes that do not affect the outcome of
# rebase; or as an extra check in addition to checking the outcome.
set_cat_todo_editor ()
# checks that the revisions in "$2" represent a linear range with the
# subjects in "$1"
test_linear_range ()
reset_rebase ()
cherry_pick ()
revert ()
make_empty ()
| true
|
bd9edc8ca9acb28aaf88cfcb0153e9bd60a138b9
|
Shell
|
supercontainers/sc20-tutorial
|
/exercises/openfoam/mpirun.sh
|
UTF-8
| 843
| 2.671875
| 3
|
[
"CC-BY-4.0",
"MIT",
"LicenseRef-scancode-public-domain"
] |
permissive
|
#!/bin/bash
NTASKS="2"
image="library://marcodelapierre/beta/openfoam:v1812"
# this configuration depends on the host
export MPICH_ROOT="/usr/local/packages/e4s/spack"
export MPICH_LIBS="$( which mpirun )"
export MPICH_LIBS="${MPICH_LIBS%/bin/mpirun*}/lib"
export SINGULARITY_BINDPATH="$MPICH_ROOT"
export SINGULARITYENV_LD_LIBRARY_PATH="$MPICH_LIBS"
# pre-processing
singularity exec $image \
blockMesh | tee log.blockMesh
singularity exec $image \
topoSet | tee log.topoSet
singularity exec $image \
decomposePar -fileHandler uncollated | tee log.decomposePar
# run OpenFoam with MPI
mpirun -n $NTASKS \
singularity exec $image \
simpleFoam -fileHandler uncollated -parallel | tee log.simpleFoam
# post-processing
singularity exec $image \
reconstructPar -latestTime -fileHandler uncollated | tee log.reconstructPar
| true
|
d5fc41938b3ba6b569125e5045c5d38cb04058b0
|
Shell
|
jeonghyunkeem/CompNet
|
/common_3d/compile.sh
|
UTF-8
| 334
| 3.296875
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
ROOT_DIR=$(pwd)
echo "ROOT_DIR=${ROOT_DIR}"
for EXTENSION_DIR in ops/*
do
if [ -d "${EXTENSION_DIR}" ] ; then
echo ${EXTENSION_DIR}
cd ${EXTENSION_DIR}
if [ -d "build" ]; then
rm -r build
fi
python setup.py build_ext --inplace
fi
cd ${ROOT_DIR}
done
| true
|
7c2b56ef42639e3a0f87933e2eb4dbe279791df1
|
Shell
|
CaptainQuirk/ranger-recipe
|
/install
|
UTF-8
| 975
| 2.984375
| 3
|
[] |
no_license
|
#!/bin/bash
# depends: nerd-fonts
CONFIG_SRC_DIR=~/Documents/configuration/src
RANGER_RECIPE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
shef brew ranger w3m
shef brew w3m
shef apt w3m-img
if [ ! -d $CONFIG_SRC_DIR/.ranger-config ]; then
git clone https://github.com/CaptainQuirk/.ranger $CONFIG_SRC_DIR/.ranger-config
ln -s $CONFIG_SRC_DIR/.ranger-config ~/.config/ranger
fi
cd $CONFIG_SRC_DIR/.ranger-config || exit 1
git pull
cd - || exit 2
sudo cp --update "$RANGER_RECIPE_DIR/files/x-ranger" /usr/local/bin
cp --update "$RANGER_RECIPE_DIR/files/ranger.desktop" "$HOME/.local/share/applications"
cp --update "$RANGER_RECIPE_DIR/files/ranger.png" "$HOME/.local/share/icons"
grep -nri "inode" "$HOME/.local/share/applications/mimeapps.list" > /dev/null 2>&1
if [ $? -eq 1 ]; then
echo "inode/directory=x-ranger.desktop" >> "$HOME/.local/share/applications/mimeapps.list"
fi
shef apt zenity
shef apt highlight
shef apt xpdf
shef brew atool
| true
|
470ae12f2e4dd28b23380f86c869f67f6a76988f
|
Shell
|
GeorgePiskorsky/dns-aws-selectel
|
/getAwsZone.sh
|
UTF-8
| 213
| 2.53125
| 3
|
[] |
no_license
|
#!/bin/bash
DST="/home/george/dns_backup/backup";
ZONE_ID="Z1AP1YP3QDRHMT";
DATE=`date +"%Y-%m-%d-%H%M%S"`
aws route53 list-resource-record-sets --hosted-zone-id $ZONE_ID --output json > $DST/backup_$DATE.json
| true
|
c7e2c8457ba338ff13f9cab7a1c0d6a4a6c72846
|
Shell
|
FauxFaux/debian-control
|
/m/mail-spf-perl/spf-tools-perl_2.9.0-4_all/postinst
|
UTF-8
| 569
| 3.046875
| 3
|
[] |
no_license
|
#!/bin/sh
set -e
mode=$1
source_package=mail-spf-perl
case "$mode" in
configure )
prev_version=$2
update-alternatives --install /usr/bin/spfquery spfquery /usr/bin/spfquery.$source_package 100 \
--slave /usr/share/man/man1/spfquery.1.gz spfquery.1.gz /usr/share/man/man1/spfquery.$source_package.1p.gz
update-alternatives --install /usr/sbin/spfd spfd /usr/sbin/spfd.$source_package 100 \
--slave /usr/share/man/man8/spfd.8.gz spfd.8.gz /usr/share/man/man8/spfd.$source_package.8p.gz
;;
esac
| true
|
06faef7ddcc731b36e528a0ab20902c4f2b3bc68
|
Shell
|
jbuhacoff/bash-fn-import
|
/src/main/script/fn.sh
|
UTF-8
| 3,670
| 4.375
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
# User defines FN_PATH to be ":"-separated list of directories to search, in order.
# If FN_PATH is empty, it is assumed to be "."
to_stderr() {
(>&2 "$@")
}
# the function definition is enclosed in a subshell to allow the use of its own private
# library functions that won't also be exported to the namespace of the importing
# script
# global variables referenced: FN_PATH (default value "." if undefined or empty)
fn_import() {
local option_force option_locate paths err
declare -a fn_array
until [ $# -eq 0 ]
do
case "$1" in
--force|-f)
option_force=yes
shift
;;
--locate|-l)
option_locate=yes
shift
;;
*)
fn_array+=("$1")
shift
;;
esac
done
paths=$(
declare -a FN_PATH_ARRAY
# split FN_PATH on ":" and populate the global FN_PATH_ARRAY so we only do this once
# per invocation.
fnpath_to_array() {
local fnpath="$FN_PATH"
if [ -z "$fnpath" ]; then
fnpath=.
fi
mapfile -d : -t FN_PATH_ARRAY <<< "$fnpath"
}
trim() {
local var="$*"
# remove leading whitespace characters
var="${var#"${var%%[![:space:]]*}"}"
# remove trailing whitespace characters
var="${var%"${var##*[![:space:]]}"}"
echo -n "$var"
}
to_stderr() {
(>&2 "$@")
}
fn_locate_in_path() {
local file=$1
local pathentry
for pathentry in "${FN_PATH_ARRAY[@]}"
do
pathentry=$(trim "$pathentry")
if [ -f "$pathentry/$file.sh" ]; then
echo "$pathentry/$file.sh"
return 0
fi
done
to_stderr echo "error: file not found: $file"
return 1
}
fn_locate() {
local file found err=0
for file in "${fn_array[@]}"
do
fn_locate_in_path $file
((err+=$?))
done
if [ $err -gt 255 ]; then
to_stderr echo "error: too many missing files: $err"
err=255
fi
return $err
}
fnpath_to_array
fn_locate "${fn_array[@]}"
exit $?
)
err=$?
if [ -n "$option_locate" ]; then
echo "$paths"
else
if [ -n "$paths" ]; then
source $paths
fi
fi
return $err
}
# credit: https://stackoverflow.com/a/18839557/1347300
copy_function() {
test -n "$(declare -f "$1")" || return
eval "${_/$1/$2}"
}
print_help() {
echo "usage: source <(fn import) && import [--force|-f] lib1 lib2 ..."
echo "usage: source <(fn import as import2) && import2 [--force|-f] lib1 lib2 ..."
echo "usage: fn locate lib1 lib2 ..."
echo "usage: fn --locate lib1 lib2 ..."
echo "usage: fn -l lib1 lib2 ..."
}
# Main
if [ $# -eq 0 ]; then
to_stderr print_help
exit 1
fi
case "$1" in
import)
if [ -n "$2" ] && [ "$2" == "as" ] && [ -n "$3" ]; then
import_name=$3
else
import_name=import
fi
copy_function fn_import $import_name
declare -f $import_name
exit 0
;;
locate|--locate|-l)
shift
fn_import --locate "$@"
exit $?
;;
*)
to_stderr print_help
exit 1
;;
esac
| true
|
210d8b02e0cb6d93afa01f58a32229643dda6144
|
Shell
|
UTMIST/utmist.gitlab.io
|
/onedrive.sh
|
UTF-8
| 496
| 2.71875
| 3
|
[
"MIT"
] |
permissive
|
if test -f ".env"; then
. ./.env
fi
FOLDER_NAME=$ONEDRIVE_FOLDER_NAME
cd onedeath
lua main.lua $ONEDRIVE_FOLDER_LINK
find . -type f -name '*.exf' -delete
rm -rf ../content* ../insertions* ../static/*.pdf ../static/images/profilepics/
mv $FOLDER_NAME/config.yaml ../
mv $FOLDER_NAME/content ../content_base
mv $FOLDER_NAME/insertions ../insertions_base
mv $FOLDER_NAME/static/*.pdf ../static/
mv $FOLDER_NAME/static/images/profilepics ../static/images/
rm -rf $FOLDER_NAME cookies.txt
cd ..
| true
|
de90a7adc8dba9adbb534e5e2e4ae6f657878904
|
Shell
|
MarvelFisher/midas
|
/info/fxruninfo.sh
|
UTF-8
| 887
| 2.84375
| 3
|
[] |
no_license
|
#!/bin/bash
if [ -s LTS-INFO_PID ]; then
INFO_PID=`cat LTS-INFO_PID`
kill -9 $INFO_PID
fi
sleep 5
#JAVA_OPTS="-Dcom.sun.management.jmxremote -Djava.rmi.server.hostname=10.0.0.51 -Dcom.sun.management.jmxremote.port=1234 -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false"
JAVA_OPTS="${JAVA_OPTS} -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintTenuringDistribution -Xloggc:log/info_gc`date +%Y%m%d_%H%M%S`.log -server -d64 -Xmx1g -XX:+HeapDumpOnOutOfMemoryError -XX:+DisableExplicitGC -XX:MaxGCPauseMillis=100 -XX:+UseG1GC -XX:InitiatingHeapOccupancyPercent=0 -XX:MaxDirectMemorySize=1G -XX:AutoBoxCacheMax=3000000 -XX:MaxPermSize=128m"
echo "Starting LTS-INFO "`date` | tee -a ./log/console.log
java ${JAVA_OPTS} -Duser.timezone=GMT+8 -jar jars/cyanspring-info-2.56.jar conf/info_fxserver.xml >> ./log/console.log &
echo $! > LTS-INFO_PID
| true
|
a5be0a0c3d39aab6b74144f424a1d5b5e6384e52
|
Shell
|
aajanki/finnish-pos-accuracy
|
/run.sh
|
UTF-8
| 540
| 2.578125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
set -eu
export PATH=$(pwd)/models/cg3/src:$PATH
# Clean up the test data and save it to data/preprocessed
python preprocess_data.py
# Predict lemmas and POS tags using all models.
# Writes results under results/predictions/*/
python predict.py
# Evaluate by comparing the predictions with the gold standard data.
# Writes results to results/evaluation.csv
python evaluate.py
# Plot the evaluations.
# Saves the plots under results/images/
python plot_results.py
# Save lemma error is results/errors/
python print_errors.py
| true
|
2e6cdb19d3eb00e5b5e3518284ed62ecade2c533
|
Shell
|
mshkliai/Nibbler
|
/install.sh
|
UTF-8
| 2,494
| 3.15625
| 3
|
[] |
no_license
|
#!/bin/bash
echo "\033[0;33mChecking required libraries...\033[0m"
if brew ls --versions sfml > /dev/null; then
echo "\033[0;36mSFML is already installed\033[0m"
else
echo "\033[0;31mSFML is required in order to start the game. Please wait until it is installed\033[0m"
brew install sfml
echo "\033[0;32mSFML installation completed\033[0m"
fi
if brew ls --versions sdl2 > /dev/null; then
echo "\033[0;36mSDL2 is already installed\033[0m"
else
echo "\033[0;31mSDL2 is required in order to start the game. Please wait until it is installed\033[0m"
brew install sdl2
echo "\033[0;32mSDL2 installation completed\033[0m"
fi
if brew ls --versions sdl2_image > /dev/null; then
echo "\033[0;36mSDL2_Image is already installed\033[0m"
else
echo "\033[0;31mSDL2_Image is required in order to start the game. Please wait until it is installed\033[0m"
brew install sdl2_image
echo "\033[0;32mSDL2_Image installation completed\033[0m"
fi
if brew ls --versions sdl2_ttf > /dev/null; then
echo "\033[0;36mSDL2_TTF is already installed\033[0m"
else
echo "\033[0;31mSDL2_TTF is required in order to start the game. Please wait until it is installed\033[0m"
brew install sdl2_ttf
echo "\033[0;32mSDL2_TTF installation completed\033[0m"
fi
if brew ls --versions glfw > /dev/null; then
echo "\033[0;36mGLFW is already installed\033[0m"
else
echo "\033[0;31mGLFW is required in order to start the game. Please wait until it is installed\033[0m"
brew install glfw
echo "\033[0;32mGLFW installation completed\033[0m"
fi
if brew ls --versions freetype > /dev/null; then
echo "\033[0;36mFreeType is already installed\033[0m"
else
echo "\033[0;31mFreeType is required in order to start the game. Please wait until it is installed\033[0m"
brew install freetype
echo "\033[0;32mFreeType installation completed\033[0m"
fi
if brew ls --versions ftgl > /dev/null; then
echo "\033[0;36mFTGL is already installed\033[0m"
else
echo "\033[0;31mFTGL is required in order to start the game. Please wait until it is installed\033[0m"
brew install ftgl
echo "\033[0;32mFTGL installation completed\033[0m"
fi
echo "\033[0;33mDownloading and installing a SOIL library (macos compatible)...\033[0m"
git clone https://github.com/itiievskyi/SOIL-recompiled.git ./temp-soil 2> /dev/null
mv ./temp-soil/SOIL.h ~/.brew/include/ 2> /dev/null
mv ./temp-soil/libSOIL.a ~/.brew/lib/ 2> /dev/null
rm -rf ./temp-soil/ 2> /dev/null
echo "\033[0;32mSOIL installation completed\033[0m"
| true
|
aef5129d47cfe5097d29bf7accee862b8e7ade6d
|
Shell
|
hmasmoudi/SyphaxOS
|
/Default/0006-SyphaxOS-Extra/001_BuildPackagesScripts/0960-strace/PKGBUILD
|
UTF-8
| 541
| 2.578125
| 3
|
[] |
no_license
|
# Maintainer: Hatem Masmoudi <hatem.masmoudi@gmail.com>
pkgname=strace
pkgver=4.20
pkgrel=2
pkgdesc='A diagnostic, debugging and instructional userspace tracer'
arch=(x86_64)
url='http://sourceforge.net/projects/strace'
license=(BSD)
depends=(perl)
source=(http://downloads.sourceforge.net/$pkgname/${pkgname}-${pkgver}.tar.xz)
md5sums=('SKIP')
build() {
cd $pkgname-$pkgver
./configure --prefix=/usr
make
}
package() {
cd $pkgname-$pkgver
make DESTDIR="$pkgdir" install
install -Dm644 COPYING "$pkgdir/usr/share/licenses/$pkgname/LICENSE"
}
| true
|
e97828c66dbe3c7ce5fa92b80973eb0d5a9771e2
|
Shell
|
stayhigh/scriptforFAE
|
/scripts/third_party/linux_shell_ols3/chk/samples/getip1.sh
|
BIG5
| 210
| 3.078125
| 3
|
[] |
no_license
|
#! /bin/bash
# ܼƭnŧi~ϥ
shopt -s -o nounset
ListIPcmd="/sbin/ifconfig"
IP=$($ListIPcmd | grep 'inet addr:' | grep -v '127.0.0.1' | awk '{print $2}' | awk -F: '{print $2}')
echo $IP
| true
|
f57be149367194168c35f1bfad1c61914c45c9d8
|
Shell
|
cyliustack/clusim
|
/vcluster/stop-vcluster.sh
|
UTF-8
| 766
| 3.078125
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
C_NONE="\033[0m"
C_CYAN="\033[36m"
C_RED="\033[31m"
C_GREEN="\033[32m"
C_ORANGE="\033[33m"
C_BLUE="\033[34m"
C_PURPLE="\033[35m"
C_CYAN="\033[36m"
C_LIGHT_GRAY="\033[37m"
print_misc() {
echo -e "${C_PURPLE} $1 ${C_NONE}"
}
print_info() {
echo -e "${C_BLUE} $1 ${C_NONE}"
}
print_error() {
echo -e "${C_RED} $1 ${C_NONE}"
}
print_warning() {
echo -e "${C_ORANGE} $1 ${C_NONE}"
}
#print_misc "Bash version ${BASH_VERSION}."
if [[ "$(sudo docker ps -aq)" == "" ]]; then
print_warning "No containers to be stopped."
else
print_info "Stopping containers..."
sudo docker stop $(sudo docker ps -aq)
print_info "Removing containers..."
sudo docker rm $(sudo docker ps -aq)
print_info "Done."
fi
./show-vcluster.sh
| true
|
d18ea94010a2bd4474d6a0bb1a83745af8af814e
|
Shell
|
david-stratusee/backup
|
/bin/batch_rename.sh
|
UTF-8
| 382
| 3.734375
| 4
|
[] |
no_license
|
#!/bin/bash -
if [ $# -lt 2 ]
then
echo "Usage: `basename $0` src dst [dir]"
exit
fi
if [ $# -eq 3 ]
then
echo "directory is $3"
dir=$3
else
dir="."
fi
#list=`find $dir -name \"*$1*\" -type f`
#list=`ls $dir | grep -e \"$1\"`
ls $dir | grep -e "$1" | while read file
do
newfile=`echo $file | sed s/$1/$2/g`
# echo $file $newfile
mv "$dir/$file" "$dir/$newfile" -f
done
| true
|
18241e495a13007add045977fed44ffecfc3e259
|
Shell
|
convoy-studio/havana-eliott-erwitt
|
/npm
|
UTF-8
| 163
| 2.65625
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -aeu -o pipefail
source .env
NODE_CONTAINER="${PROJECT}_${ENVIRONMENT}_node"
docker exec -u node -ti $NODE_CONTAINER bash -c "npm $*"
| true
|
98f2fcd2970300d7d4d0140038e67af57421bdd4
|
Shell
|
kot-behemoth/dotfiles
|
/direnvrc
|
UTF-8
| 321
| 3.375
| 3
|
[] |
no_license
|
# ; -*- mode: sh -*-
layout_activate() {
if [ -n "$(which pyenv)" ]; then
source $(pyenv root)/versions/$1/bin/activate
fi
}
# From https://blog.differentpla.net/blog/2019/01/30/nvm-direnv/
use_nvm() {
NODE_VERSION="$1"
type nvm >/dev/null 2>&1 || . ~/.nvm/nvm.sh
nvm use "$NODE_VERSION"
}
| true
|
a398f0296e5be5f7fdc8beecd9d3130ff737bcc3
|
Shell
|
sivasankariit/sw-rl
|
/tests/run.sh
|
UTF-8
| 507
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
dir=`date +%b%d-%H:%M`
time=30
ns=0
mkdir -p $dir
for nrr in 512; do
for rrsize in 1; do
for rl in htb newrl none; do
exptid=rl-$rl-rrsize-$rrsize
python netperf.py --nrr $nrr \
--exptid $exptid \
-t $time \
--ns $ns \
--rl $rl \
--rrsize $rrsize
mv $exptid.tar.gz $dir/
pushd $dir;
tar xf $exptid.tar.gz
python ../plot.py --rr $exptid/* -o $exptid.png --ymin 0.9
popd;
done;
done
done
echo Experiment results are in $dir
| true
|
6219df54ce348a3f42bacb506dae084ea8268f84
|
Shell
|
petronny/aur3-mirror
|
/chromium-update/PKGBUILD
|
UTF-8
| 852
| 2.515625
| 3
|
[] |
no_license
|
# Maintainer: Logan Allen <loganfynne at gmail dot com>
pkgname=chromium-update
pkgver=1.1
pkgrel=13
pkgdesc='Simple SH installer and updater of precompiled Chromium nightly builds'
arch=('any')
url="http://www.loganfynne.com/pkg/chromium-update.tar"
license=('GPL')
conflicts=('chromium')
depends=('gconf' 'libpng12' 'gtk2' 'dbus-glib' 'nss' 'alsa-lib' 'xdg-utils' 'bzip2' 'libevent' 'libxss' 'libxtst' 'ttf-dejavu' 'desktop-file-utils' 'hicolor-icon-theme' 'unzip')
source=('http://www.loganfynne.com/pkg/chromium-update.tar')
md5sums=('26846ca8759a47bd5a97e91df560b205')
package() {
tar -xf chromium-update.tar
mkdir -p ${pkgdir}/usr/share/applications
mkdir -p ${pkgdir}/usr/bin
cp `uname -m`/chromium-update ${pkgdir}/usr/bin/
chmod +x ${pkgdir}/usr/bin/chromium-update
cp chromium.desktop ${pkgdir}/usr/share/applications/
cp chromium ${pkgdir}/usr/bin/
}
| true
|
b7a5f4785e37869146de53c9267a6348fc078e2e
|
Shell
|
ArroTahur/skriptimine
|
/praks9/yl3
|
UTF-8
| 332
| 2.609375
| 3
|
[] |
no_license
|
#!/bin/bash
echo -n "Sisesta ridade arv: "
read row
echo -n "Sisesta t2rnide rav reas: "
read t
for (( i = 1; i <= 5; i++ ))
do
if (( $i == 1 || $i == 5 ))
then
echo -n $i". "
for (( j = 1; j <= $t; j++ ))
do
echo -n "*"
done
echo ""
else
echo -n $i". "
echo -n "*"
for (( q = 2; q <= 4; q++ ))
do
echo -n " "
done
echo "*"
fi
done
| true
|
2cf69936f1c18af7a5d7a073dad9164989dd744d
|
Shell
|
djdagovs/Post-Installation-Script-for-Ubuntu-16.04-LTS-ENG
|
/English/2_spi_Con_Sp.sh
|
UTF-8
| 1,604
| 2.515625
| 3
|
[] |
no_license
|
#!/bin/sh
#Add gdiplus to wine which is quite usefull while installing games bought on gog.com
winetricks gdiplus
#Cleans the system after the first script (removes downloaded install packages etc.)
sudo apt-get autoclean
sudo apt-get clean
sudo apt-get autoremove
#Sets a couple of things in the bahaviour of the system
gsettings set org.gnome.desktop.privacy remember-recent-files false #The 'record file and application usage' option is turned off
gsettings set org.compiz.core:/org/compiz/profiles/unity/plugins/core/ hsize 2 #
gsettings set org.compiz.core:/org/compiz/profiles/unity/plugins/core/ vsize 2 #Turn on workspaces
gsettings set org.gnome.settings-daemon.plugins.power sleep-inactive-ac-timeout 0 #The computer won't go to sleep when pluged in.
gsettings set org.gnome.settings-daemon.plugins.power sleep-inactive-battery-timeout 600 #When on battery it will go to sleep after 10 minutes
gsettings set org.gnome.settings-daemon.plugins.power lid-close-battery-action suspend #When the Lid is closed the computer will go to sleep (pluged in)
gsettings set org.gnome.settings-daemon.plugins.power lid-close-ac-action suspend #When the Lid is closed the computer will go to sleep (battery)
gsettings set org.gnome.desktop.privacy send-software-usage-stats false #Stops sending software usage stats to Canonical
gsettings set org.gnome.desktop.screensaver lock-enabled true #Enable lock screen when idle
gsettings set org.gnome.desktop.screensaver lock-delay uint32 3600 #Lock screen after 5 minutes
gsettings set org.gnome.desktop.screensaver idle-delay uint32 0 #Never turn of screen
| true
|
040a52566fbcb276fe710c4a6a889de27ef3ae74
|
Shell
|
sonph/dotfiles
|
/testing/make.sh
|
UTF-8
| 1,571
| 4.28125
| 4
|
[] |
no_license
|
#!/bin/bash
# Run the tests in a local docker container, which runs the same travis test script.
# This script does the following:
# - create a new test image with necessary software installed e.g. git
# - remove existing containers, and create a new one for each test run
# - mount the dotfiles directory and execute the travis test script
# - if the test fails, drop into bash for debugging
# with these assumptions:
# - docker is properly installed and running
# - dotfiles is cloned to ~/.files
# - script is invoked from .files
TESTING_DIR="$(dirname "$0")"
source "$TESTING_DIR/../bin/common_utils.sh"
DOCKER_CONTAINER_NAME='dotfiles_test_ctn'
DOCKER_IMAGE_NAME='dotfiles_test_img'
function build() {
info "Building dotfiles test docker image"
docker build ./ \
-f dotfiles_test.Dockerfile \
-t "$DOCKER_IMAGE_NAME"
}
function test() {
if [[ "$(docker image ls | grep -ic "$DOCKER_IMAGE_NAME")" -lt 1 ]]; then
info "Building test image"
docker_build
fi
if [[ "$(docker container ls -a | grep -c "$DOCKER_CONTAINER_NAME")" -ge 1 ]]; then
info "Stopping container "
docker container stop $DOCKER_CONTAINER_NAME
info "Removing container "
docker container rm $DOCKER_CONTAINER_NAME
fi
info "Starting docker container..."
docker run \
--name $DOCKER_CONTAINER_NAME \
--volume "$HOME/.files:/files" \
--env TRAVIS_OS_NAME=linux \
--interactive \
--tty \
$DOCKER_IMAGE_NAME
}
if [ $# -eq 0 ]; then
compgen -A function | grep -Ev '^(fail|info|ok)'
exit 0
fi
"$@"
| true
|
87d54facdead03994fa14a2d890c7288d41424c8
|
Shell
|
liddhappy/vuepress
|
/blog_deploy.sh
|
UTF-8
| 952
| 3.65625
| 4
|
[] |
no_license
|
#!/usr/bin/env sh
# 确保脚本抛出遇到的错误
set -e
outputPath=./blog/.vuepress/dist/
setGitGlobalConfig() {
git config --global user.name "liddhappy"
git config --global user.email "870886477@qq.com"
}
if [ -f "${outputPath}index.html" ];then
echo "index.html文件存在"
else
echo "index.html文件不存在"
# 生成静态文件
yarn build
fi
# 进入生成的文件夹
cd ${outputPath}
# deploy to coding
if [ -z "$CODING_TOKEN" ]; then # -z 字符串 长度为0则为true;$CODING_TOKEN来自于github仓库`Settings/Secrets`设置的私密环境变量
msg='local deploy'
codingUrl=git@e.coding.net:liddhappy/blog/vuepress-docs.git
else
msg='来自github actions的自动部署'
codingUrl=https://${CODING_TOKEN}@e.coding.net/liddhappy/blog/vuepress-docs.git
setGitGlobalConfig
fi
git init
git add -A
git commit -m "${msg}"
git push -f $codingUrl master # 推送到coding
cd - # 退回开始所在目录
rm -rf ${outputPath}
| true
|
b5c0532231476a01cf504325cf539b106889d73d
|
Shell
|
SuperScript/shttpd
|
/src/service_run.sh
|
UTF-8
| 365
| 2.890625
| 3
|
[] |
no_license
|
# service_run dir user prog
service_run() {
service_run_tmp1="$1"
shift
safe mkdir -p "$service_run_tmp1" "$service_run_tmp1/env"
safe cat > "$service_run_tmp1/run.tmp" <<EOF
#!/bin/sh
exec 2>&1
exec envdir ./env sh -c '
exec envuidgid $@
'
EOF
safe chmod 0755 "$service_run_tmp1/run.tmp"
safe mv "$service_run_tmp1/run.tmp" "$service_run_tmp1/run"
}
| true
|
23de9bdb076dc4cb8d26be303a2c7bb34ea38bd4
|
Shell
|
SJTU-IPADS/reef-artifacts
|
/reef-env/hip/bin/hip_embed_pch.sh
|
UTF-8
| 3,083
| 2.953125
| 3
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
#!/bin/bash
# Copyright (c) 2020-2021 Advanced Micro Devices, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#set -x
HIP_BUILD_INC_DIR=$1
HIP_INC_DIR=$2
LLVM_DIR="$3/../../../"
HSA_DIR="$4"
tmp=/tmp/hip_pch.$$
mkdir -p $tmp
cat >$tmp/hip_macros.h <<EOF
#define __device__ __attribute__((device))
#define __host__ __attribute__((host))
#define __global__ __attribute__((global))
#define __constant__ __attribute__((constant))
#define __shared__ __attribute__((shared))
#define launch_bounds_impl0(requiredMaxThreadsPerBlock) \
__attribute__((amdgpu_flat_work_group_size(1, requiredMaxThreadsPerBlock)))
#define launch_bounds_impl1(requiredMaxThreadsPerBlock, minBlocksPerMultiprocessor) \
__attribute__((amdgpu_flat_work_group_size(1, requiredMaxThreadsPerBlock), \
amdgpu_waves_per_eu(minBlocksPerMultiprocessor)))
#define select_impl_(_1, _2, impl_, ...) impl_
#define __launch_bounds__(...) \
select_impl_(__VA_ARGS__, launch_bounds_impl1, launch_bounds_impl0)(__VA_ARGS__)
EOF
cat >$tmp/hip_pch.h <<EOF
#include "hip/hip_runtime.h"
#include "hip/hip_fp16.h"
EOF
cat >$tmp/hip_pch.mcin <<EOF
.type __hip_pch,@object
.section .hip_pch,"aMS",@progbits,1
.data
.globl __hip_pch
.globl __hip_pch_size
.p2align 3
__hip_pch:
.incbin "$tmp/hip.pch"
__hip_pch_size:
.long __hip_pch_size - __hip_pch
EOF
set -x
$LLVM_DIR/bin/clang -O3 --rocm-path=$HIP_INC_DIR/.. -std=c++17 -nogpulib -isystem $HIP_INC_DIR -isystem $HIP_BUILD_INC_DIR -isystem $HSA_DIR/include --cuda-device-only -x hip $tmp/hip_pch.h -E >$tmp/pch.cui &&
cat $tmp/hip_macros.h >> $tmp/pch.cui &&
$LLVM_DIR/bin/clang -cc1 -O3 -emit-pch -triple amdgcn-amd-amdhsa -aux-triple x86_64-unknown-linux-gnu -fcuda-is-device -std=c++17 -fgnuc-version=4.2.1 -o $tmp/hip.pch -x hip-cpp-output - <$tmp/pch.cui &&
$LLVM_DIR/bin/llvm-mc -o hip_pch.o $tmp/hip_pch.mcin --filetype=obj &&
rm -rf $tmp
| true
|
8711c0cb7bf11b0559287d095fe1322a8c48dd5b
|
Shell
|
naoki-mizuno/ds4_driver
|
/run_docker.bash
|
UTF-8
| 525
| 3.25
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
DS4_DRIVER_LOCAL_PATH="${1}"
# Override with local copy of ds4_driver (for development)
if [[ -n $DS4_DRIVER_LOCAL_PATH ]]; then
_ws_prefix=/opt/underlay_ws
docker run -it \
-v "/dev:/dev" \
--privileged \
-v "$( realpath $DS4_DRIVER_LOCAL_PATH ):$_ws_prefix/src/ds4_driver" \
naomiz/ds4_driver:humble \
bash -c "colcon build && bash -i"
else
docker run -it \
-v "/dev:/dev" \
--privileged \
naomiz/ds4_driver:humble \
bash
fi
| true
|
2ff710d8ef1ac2865b9f0f12a13a79017a894133
|
Shell
|
tejusingam/5143-OpSys-singam
|
/versiona.sh
|
UTF-8
| 108
| 2.578125
| 3
|
[] |
no_license
|
#!/bin/bash
sample=$1
DATE=`date +%Y-%m-%d`
modified="$DATE"_"$sample"
echo $modified
cp $sample $modified
| true
|
66da2b729a59343372267c5e2604de6f6b640125
|
Shell
|
th-m/fullstack-code-gen
|
/generator/openapi/openapi.sh
|
UTF-8
| 436
| 3
| 3
|
[] |
no_license
|
#!/bin/bash
# prevent using docs copied in docker volume
rm -rf $openapiTmp
for gateway in $gatewayTmp/*.json; do
echo $gateway
file=$(basename $gateway)
mkdir -p $openapiTmp/$file
java -jar /swagger-codegen-cli.jar generate -i $gateway -l openapi -o $openapiTmp/$file
done
# remove all the extra junk
find $openapiTmp -type f -not -name 'openapi.json' -delete
find $openapiTmp -type d -iname '.swagger-codegen' -delete
| true
|
a35d2ae863c71b0d5e0197a143ff808aace9aa1d
|
Shell
|
orsosa/M_cls
|
/extract_files.sh
|
UTF-8
| 196
| 3.203125
| 3
|
[] |
no_license
|
#!/bin/bash
if [ -z "$1" ];then
echo "you must supply a directory"
exit 1
fi
cd $1
ls */p*/*.tar.gz | xargs -I V bash -c 'a=V; d=`dirname $a`; fn=`basename $a`; tar -C $d -xvzf $a'
cd ..
| true
|
ff78cd6cf30e6925b438071f87957596118d6814
|
Shell
|
gerswin/ihomeaddons
|
/sync/run.sh
|
UTF-8
| 259
| 2.734375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
set -e
CONFIG_PATH=/data/options.json
TARGET=$(jq --raw-output ".target" $CONFIG_PATH)
USERNAME=$(jq --raw-output ".username" $CONFIG_PATH)
PASSWORD=$(jq --raw-output ".password" $CONFIG_PATH)
echo "fake copy from /config to $TARGET@$USERNAME"
| true
|
573e631bc76560f11bde5fd83501850b4734c2d5
|
Shell
|
tienex/Darwin-0.3
|
/ntp-1/ntp/scripts/stats/summary.sh
|
UTF-8
| 1,819
| 3.828125
| 4
|
[
"NTP"
] |
permissive
|
#!/bin/sh
#
# Script to summarize ipeerstats, loopstats and clockstats files
#
# This script can be run from a cron job once per day, week or month. It
# runs the file-specific summary script and appends the summary data to
# designated files.
#
DATE=`date +19%y%m%d`
S=/usr/local/bin/S
SIN=S.in
SOUT=S.out
LOOP=loop_summary
PEER=peer_summary
CLOCK=clock_summary
rm -f $SIN $SOUT
#
# Summarize loopstats files
#
for f in loopstats.[12][0-9][0-9][0-9][0-1][0-9][0-3][0-9]; do
d=`echo $f | cut -f2 -d.`
if [ -f $f ] && [ $DATE != $d ]; then
echo " " >>$LOOP
echo $f >>$LOOP
awk -f loop.awk $f >>$LOOP
if [ -f $S ]; then
echo "file1<-"\"${f}\" >>$SIN
echo "source("\""loop.S"\"")" >>$SIN
echo "unix("\""rm ${f}"\"")" >>$SIN
else
rm -f $f
fi
fi
done
#
# Summarize peerstats files
#
for f in peerstats.199[4-9][0-1][0-9][0-3][0-9]; do
d=`echo $f | cut -f2 -d.`
if [ -f $f ] && [ $DATE != $d ]; then
echo " " >>$PEER
echo $f >>$PEER
awk -f peer.awk $f >>$PEER
rm -f $f
fi
done
#
# Summarize clockstats files
#
for f in clockstats.199[4-9][0-1][0-9][0-3][0-9]; do
d=`echo $f | cut -f2 -d.`
if [ -f $f ] && [ $DATE != $d ]; then
echo " " >>$CLOCK
echo $f >>$CLOCK
awk -f clock.awk $f >>$CLOCK
if [ -f /dev/gps[0-9] ]; then
awk -f itf.awk $f >itf.$d
awk -f etf.awk $f >etf.$d
awk -f ensemble.awk $f >ensemble.$d
awk -f tdata.awk $f >tdata.$d
fi
rm -f $f
fi
done
#
# Process clockstat files with S and generate PostScript plots
#
for f in itf etf ensemble tdata; do
for d in ${f}.199[4-9][0-1][0-9][0-3][0-9]; do
if [ -f $d ]; then
if [ -f $S ]; then
echo "file1<-"\"${d}\" >>$SIN
echo "source("\"${f}.S\"")" >>$SIN
echo "unix("\""rm ${d}"\"")" >>$SIN
else
rm -f $d
fi
fi
done
done
if [ -f $SIN ]; then
$S BATCH $SIN $SOUT
fi
| true
|
bf83f273b0933e657cb2796b45cdbc2af4b51502
|
Shell
|
chakra-project/packages-core
|
/clean-builddir.sh
|
UTF-8
| 1,243
| 3.453125
| 3
|
[] |
no_license
|
#!/bin/bash
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
# setup
#
_script_name="clean build pkgs"
_build_arch="$_arch"
_cur_repo=`pwd | awk -F '/' '{print $NF}'`
_needed_functions="config_handling messages"
# load functions
for subroutine in ${_needed_functions}
do
source _buildscripts/functions/${subroutine}
done
#
# main
#
cleanup_pkgs() {
title2 "Cleaning build packages"
pushd _repo/build/ &>/dev/null
status_start "_repo/build"
rm -rf *.tar.gz &>/dev/null
status_done
popd &>/dev/null
}
#
# startup
#
title "${_script_name}"
check_configs
load_configs
cleanup_pkgs
title "All done"
newline
| true
|
62dd1b281e5d48fbc5d8d37cece5c8532b458b02
|
Shell
|
yukihane/prefs
|
/install/git-from-src-on-ubuntu.sh
|
UTF-8
| 559
| 3.203125
| 3
|
[] |
no_license
|
#!/bin/bash -eux
git_ver=git-2.23.0
# Dockerイメージにはsudoが無い、かつrootでセットアップしている前提なので
sudo=$(command -v sudo)
${sudo} apt -y install gcc make autoconf zlib1g-dev libcurl4-openssl-dev libexpat1-dev libssl-dev gettext
pushd /tmp
curl -L -o git-src.tar.xz https://mirrors.edge.kernel.org/pub/software/scm/git/${git_ver}.tar.xz
tar xf git-src.tar.xz
cd ${git_ver}
make configure
./configure --prefix=/usr/local
make
${sudo} make install
cd ..
rm -rf git-src.tar.xz ${git_ver}
unset git_ver
unset sudo
popd
| true
|
6ce31fbd667bc5e2ad2107f05890527c692b1409
|
Shell
|
dwxie/salt
|
/checkout.sh
|
UTF-8
| 289
| 2.828125
| 3
|
[] |
no_license
|
PATH="${HOME}/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
export PATH
# [global]
SCRIPT_PATH="$(cd "$(dirname "$0")"; pwd)"
SCRIPT_NAME="$(basename "$0")"
PARA_NUM="$#"
PROC_PID="$$"
cd "${SCRIPT_PATH}"
./checkout_salt.sh
./checkout_pillar.sh
./checkout_state.sh
| true
|
4805db8edbd33c6a4a350d800c60df9543b1d933
|
Shell
|
sablesoft/phalcon-weather
|
/docker/dump/bin/import
|
UTF-8
| 162
| 3.140625
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
if [ -f "$1" ]; then
mysql -uroot -p"${MYSQL_ROOT_PASSWORD}" "${MYSQL_DATABASE}" -vvv < "$1";
else
echo "Dump file does not found!."
fi
| true
|
76a86ef8e9b2a869af020bddbb381956fc2133f1
|
Shell
|
dmadisetti/dylanmadisetti
|
/toolbelt.sh
|
UTF-8
| 2,457
| 3.546875
| 4
|
[] |
no_license
|
#!/bin/sh
show_help(){
echo "
Why Hello there! You must be looking for help\n\
\n\
The Flags: \n\
r - run \n\
t - test \n\
d - deploy \n\
s - setup\n\
p - ci push\n\
c - clean\n\
m - manual deploy\n\
e - letsencrypt certificate\n\
\n\
Chain em together as you see fit \n\
"
}
setup(){
pip install -I pytest==2.2.0
pip install nosegae
pip install webtest
export FILE=$(curl https://storage.googleapis.com/appengine-sdks/ | grep -oP '(?<=featured/)google_appengine_[^\<]*' | head -1)
curl -O https://storage.googleapis.com/appengine-sdks/featured/$FILE;
unzip -q $FILE;
mkdir -p static/challenge;
docker pull dxjoke/tectonic-docker
docker run --mount src=$TRAVIS_BUILD_DIR/static/resumes,target=/usr/src/tex,type=bind dxjoke/tectonic-docker /bin/sh -c "tectonic DylanMadisetti.tex"
}
run(){
google_appengine/dev_appserver.py --allow_skipped_files 1 ./app.yaml;
}
try(){
nosetests --with-gae --gae-lib-root=google_appengine --gae-application=./;
}
manual(){
google_appengine/appcfg.py --email=dylan.madisetti@gmail.com update ./
}
encrypt(){
echo "Run 'letsencrypt-auto -a manualcertonly' Agree to everything, and right before it verifies, paste the key/secret in here and hit enter: "
read secret
echo $secret > static/challenge/$(echo $secret | grep -oP ^[^\.]*)
manual && {
echo "Hit enter on other terminal now.\n Did it work?"
read verified
sudo cat /etc/letsencrypt/live/www.dylanmadisetti.com/fullchain.pem > cert.pem;
sudo openssl rsa -inform pem -in /etc/letsencrypt/live/www.dylanmadisetti.com/privkey.pem -outform pem > key.pem;
echo 'Also most there. Upload keys to GAE: https://console.cloud.google.com/appengine/settings/certificates'
} || {
echo "Something broke....";
}
}
deploy(){
echo $PASSWORD | google_appengine/appcfg.py --no_oauth2 --email=dylan.madisetti@gmail.com --passin update ./
}
push(){
try || exit 1;
git branch | grep "\*\ [^(master)\]" || {
deploy;
}
}
clean(){
rm -rf google_appengine*;
rm -rf *.pyc;
rm static/challenge/*;
}
while getopts "h?rtpscdmex:" opt; do
case "$opt" in
h|\?)
show_help
;;
s) setup
;;
d) deploy
;;
r) run
;;
t) try
;;
p) push
;;
c) clean
;;
m) manual
;;
e) encrypt
;;
esac
done
| true
|
9a6f5e288a8c101042c2dfe1c4dcdc9ba903ff42
|
Shell
|
himozhang/cmd-tools
|
/cmd-tools-client/bin/cmd-client.sh
|
UTF-8
| 807
| 3.296875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
# script directory
SCRIPT_PATH=$(readlink -f $0)
# bin directory, It's like /opt/appname/bin, also same with SCRIPT_PATH
BIN_DIR=$(dirname $SCRIPT_PATH)
# work directory
WORK_DIR=$(dirname $BIN_DIR)
# where is app root path in location
APP_ROOT=$WORK_DIR
# app mainclass
MAINCLASS=com.lefu.cmdtools.client.CmdClient
# where is app lib path in location
LIB_DIR=${APP_ROOT}'/lib'
# app all jar file path
FILE_LIST=''
for filename in `ls $LIB_DIR`
do
if [ -f $LIB_DIR'/'$filename ] ; then
FILE_LIST=${FILE_LIST}${LIB_DIR}'/'$filename':'
fi
done
if [ "$JAVA_HOME" = "" ]; then
echo "JAVA_HOME not found."
exit 1
fi
JAVA_CMD=${JAVA_HOME}'/bin/java'
CLASSPATH="-cp ${FILE_LIST}"
$JAVA_CMD $CLASSPATH $MAINCLASS $*
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.