blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
930499d4e4421ab01e681adfd994f002d83a9237
|
Shell
|
steveyiyo/IRR-AS-SET-Grapher
|
/lookup.sh
|
UTF-8
| 658
| 3.34375
| 3
|
[] |
no_license
|
#!/bin/bash
function err(){
echo "$1" >&2
}
function asset(){
whois -h whois.radb.net "$1" | grep -oP 'members:\ +\KAS.*' | tr ',' '\n' | tr -d ' ' | while read line
do
echo "$(echo "$1" | sed 's/[-:]/_/g')->$(echo "$line" | sed 's/[-:]/_/g')"
[ -n "$(echo $2 | grep "$1")" ] && err "$1 Abort" && continue
"$0" "$line" "$2 $1"
done
}
function as(){
err "$1"
}
[ -z "$(echo "$1" | grep '^AS[0-9\-]')" ] && err "Not an AS/AS-SET" && exit 1
[ -n "$(echo "$1" | grep '^AS-.*$')" ] || [ -n "$(echo "$1" | grep '^AS[0-9]\+:.*$')" ] && err "$1 AS-SET" && asset "$1" "$2"
[ -n "$(echo "$1" | grep '^AS[0-9]\+$')" ] && err "$1 Normal AS" && as "$1"
| true
|
1260788d38e2e355583a13733ca3f1766e353999
|
Shell
|
softfire-eu/openimscore-packages
|
/compress.sh
|
UTF-8
| 487
| 2.671875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
dest="tars"
if [ ! -d $dest ]; then
echo "creating $dest folder"
mkdir -p $dest
fi
dest="../$dest"
pushd bind9
tar -cvf $dest/bind9.tar Metadata.yaml vnfd.json scripts
popd
pushd icscf
tar -cvf $dest/icscf.tar Metadata.yaml vnfd.json scripts
popd
pushd pcscf
tar -cvf $dest/pcscf.tar Metadata.yaml vnfd.json scripts
popd
pushd scscf
tar -cvf $dest/scscf.tar Metadata.yaml vnfd.json scripts
popd
pushd fhoss
tar -cvf $dest/fhoss.tar Metadata.yaml vnfd.json scripts
popd
| true
|
1ac284f50c41d3e55ddad313ddf0209113ef181d
|
Shell
|
yohanakh/insightedge
|
/insightedge-integration-tests/tests/src/test/resources/docker/maven-install-libs/bootstrap.sh
|
UTF-8
| 76
| 2.515625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
if [[ $1 == "-d" ]]; then
while true; do sleep 1000; done
fi
| true
|
71145bc004aea4faa8f28cbe4b2d63577a8c20ea
|
Shell
|
JasonLiu798/bashlib
|
/bin/bssh
|
UTF-8
| 671
| 3.328125
| 3
|
[] |
no_license
|
#!/bin/bash
source ~/bin/lib.sh
#SRVFILE=$1
CMD=$1
#dp113 192.168.143.113 22 work work
#0 1 2 3 4
if [ $# -lt 1 ]; then
echo "$0 must have two parameter
example:
$0 [server_file] [command] "
exit;
fi
SRVFILE=$UBIN/servers.dat
#UPPATH=/home/work
#LC_FILENAME=`get_filename $`
#echo "Filename $FILE_NAME"
cat $SRVFILE|while read line
do
resisempty=`isempty $line`
if [ "$resisempty" = "true" ]; then
continue
fi
declare -a params=($line)
echo "IP:${params[1]} PORT:${params[2]} USER:${params[3]} PASS:${params[4]}"
set -x
ssh -p ${params[2]} ${params[3]}@${params[1]} $CMD
set +x
done
| true
|
0023d94004398650e82f31982a2baab4a346f5ce
|
Shell
|
abx67/detectron2
|
/docker/remove_container.sh
|
UTF-8
| 299
| 3.875
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# Remove a specified container by name
# Fail on first error.
set -e
. ./docker/config_docker.sh
CONTAINER_NAME=$1
if [ -z "$CONTAINER_NAME" ]
then
CONTAINER_NAME=${DEFAULT_CONTAINER_NAME}
fi
echo "Remove container ${CONTAINER_NAME}"
docker rm --force ${CONTAINER_NAME}
| true
|
c09fb455d261fb08ac88ae263793ca38927af6db
|
Shell
|
Kimxons/django-project-template
|
/utils/web.sh
|
UTF-8
| 225
| 2.84375
| 3
|
[] |
no_license
|
#!/bin/bash
function postgres_ready(){
python3 $(dirname "$0")/is_postgres_ready.py
}
until postgres_ready; do
>&2 echo "Postgres is unavailable - sleeping"
sleep 1
done
python3 manage.py runserver 0.0.0.0:8000
| true
|
5c89f94d26231ab73ebd52b601785f816879c2bf
|
Shell
|
bincker/Android-forensic-timeline
|
/jsonizer/convert_to_json.sh
|
UTF-8
| 470
| 4
| 4
|
[] |
no_license
|
#!/bin/bash
MASK="pjson"
if [ $# -lt 2 ]
then
echo 'Usage: '$0' input_path output_path'
exit -1
fi
IN_PATH=$1
OUT_PATH=$2
if [ ! -d $OUT_PATH ]
then
mkdir -p $OUT_PATH
fi
targets=`ls -l $IN_PATH | grep $MASK | awk '{print $NF}'`
for file in $targets
do
output=`echo $file | awk -F . '{print $1}'`".json"
echo -ne "converting... "$IN_PATH"/"$file"\t"
./json_formatter.py $IN_PATH"/"$file $OUT_PATH"/"$output
echo "ok"
done
echo "done!"
| true
|
0546e9d8d8b073eda992d319aae68e0c948ceb86
|
Shell
|
emartineznunez/AutoMeKin
|
/scripts/HLscripts/IRC_ANALYSIS.sh
|
UTF-8
| 4,785
| 3.421875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#
source utils.sh
cwd=$PWD
sharedir=${AMK}/share
if [ -f amk.dat ];then
echo "amk.dat is in the current dir"
inputfile=amk.dat
else
echo "amk input file is missing. You sure you are in the right folder?"
exit
fi
echo "Input file" $inputfile
exe=$(basename $0)
##Reading HL stuff
read_input
##
##max values 0.001 and 1
avgerr=$(echo $avgerr | awk '{avg=$1;if(avg>0.001) avg=0.001;print avg}' )
bigerr=$(echo $bigerr | awk '{big=$1;if(big>1) big=1;print big}' )
#On exit remove tmp files
tmp_files=($tsdirhl/IRC/*.chk black* minfailed_list labels mingeom sprint.dat sprint.out sprint.* deg.out deg_form.out deg* tmp* ConnMat ScalMat)
trap 'err_report $LINENO' ERR
trap cleanup EXIT INT
if [ ! -d "$tsdirhl/PRODs" ]; then
echo "$tsdirhl/PRODs does not exist. It will be created"
mkdir $tsdirhl/PRODs
else
echo "$tsdirhl/PRODs already exists. Remove PR files"
rm -f $tsdirhl/PRODs/PR*
fi
if [ ! -d "$tsdirhl/MINs/norep" ]; then
echo "$tsdirhl/MINs/norep does not exist. It will be created"
mkdir $tsdirhl/MINs/norep
else
echo "$tsdirhl/MINs/norep already exists."
rm -r $tsdirhl/MINs/norep
mkdir $tsdirhl/MINs/norep
fi
#remove some stuff at the beginning (in case of repeating the same script)
rm -f $tsdirhl/MINs/*min*ts*
#
echo "Nomenclature" > $tsdirhl/MINs/names_of_minima
echo "Screening" > $tsdirhl/MINs/minlist_screened
### and prodhl minnrhl table
sqlite3 ${tsdirhl}/PRODs/prodhl.db "drop table if exists prodhl; create table prodhl (id INTEGER PRIMARY KEY,natom INTEGER, name TEXT,energy REAL,zpe REAL,g REAL,geom TEXT,freq TEXT, formula TEXT);"
sqlite3 ${tsdirhl}/MINs/norep/minnrhl.db "drop table if exists minnrhl; create table minnrhl (id INTEGER PRIMARY KEY,natom INTEGER, name TEXT,energy REAL,zpe REAL,g REAL,geom TEXT,freq TEXT, sigma INTEGER);"
echo "List of bad MINIMA" > minfailed_list
echo "PR list" > $tsdirhl/PRODs/PRlist
n=0
nmin=0
npro=0
nrm=0
cp $tsdirhl/min0* $tsdirhl/IRC
for i in $(ls ${tsdirhl}/IRC/min*.log)
do
((n=n+1))
name=$(basename $i .log)
check_freq_min
if [ $ok -eq 1 ]; then
echo "$name optimized correctly"
get_data_hl_output_mins
#Now we screen the list to rename duplicates
screen_min_hl
else
echo "$name check later on-->this is a product or problem in opt"
echo $name.rxyz >> minfailed_list
continue
fi
done
#reduce output
stats_hl_mins
##################
echo "Now running MINFAILED.sh"
##################
cp $tsdirhl/PRODs/PRlist $tsdirhl/PRODs/PRlist.old
npro=$(awk '{npro=$2};END{print npro}' $tsdirhl/PRODs/PRlist )
#file to screen the failed minima and/or products
nfail=$(wc -l minfailed_list | awk '{print $1-1}')
if [ $nfail -eq 0 ]; then
echo "You are lucky. All minima have been optimized correctly. Exit here"
exit
else
echo "number of minima that failed and/or are products" $nfail
fi
for i in $(awk 'NR>1{print $0}' minfailed_list)
do
name=$(basename $i .rxyz)
if [ "$program_hl" = "g09" ] || [ "$program_hl" = "g16" ]; then
geom="$(get_geom_g09.sh ${tsdirhl}/IRC/${name}.log)"
elif [ "$program_hl" = "qcore" ]; then
if [ -f ${tsdirhl}/IRC/${name}_opt.xyz ]; then
xyz=${tsdirhl}/IRC/${name}_opt.xyz
else
xyz=${tsdirhl}/IRC/${name}.xyz
fi
geom="$(awk 'NR>2{print $0}' $xyz)"
fi
#Now we screen the list to rename duplicates
echo $natom > mingeom
echo '' >> mingeom
echo "$geom" >>mingeom
##If the calc. was not done skip this minimum
anlf=$(wc -l mingeom | awk '{print $1}')
nlmg=$(($natom+2))
if [ $anlf -lt $nlmg ]; then
echo "Double check this opt: $name"
continue
fi
##
echo "1" $natom > sprint.dat
createMat.py mingeom 3 $nA
cat ConnMat >> sprint.dat
sprint2.exe <sprint.dat >sprint.out
paste <(awk 'NF==4{print $1}' mingeom) <(deg.sh) >deg.out
deg_form.sh > deg_form.out
##
echo "This is a just to see if there is more than one fragment" > $tsdirhl/MINs/${name}_data
#
format.sh $name $tsdirhl/MINs ${nfrag_th}
ndis=$(awk '{ndis=$1};END{print ndis}' $tsdirhl/MINs/${name}_data )
# echo $name $ndis
### mv MINs where there is 2 or more fragments already formed
if [[ ("$ndis" -gt "1") ]]
then
((npro=npro+1))
##remove this later on
namepr="PR"$npro"_"$name
##EMNinsert into prodhl.db
sqlite3 ${tsdirhl}/PRODs/prodhl.db "insert into prodhl (natom,name,energy,zpe,g,geom,freq) values ($natom,'$namepr',0,0,0,'$geom',0);"
lp=$(awk 'BEGIN{lp=1};/'$name'/{lp=0};END{print lp}' $tsdirhl/PRODs/PRlist )
if [[ ("$lp" -eq "1") ]]
then
echo "PROD" $npro $name.rxyz >> $tsdirhl/PRODs/PRlist
echo "Move structure $name to PROD # $npro"
else
echo "Structure $name already moved to PROD"
fi
else
echo "Double check this opt: $name"
fi
done
| true
|
b46de114f92669e5ba5fc49f3519495ef1d8dc93
|
Shell
|
syoyo/oi-build
|
/components/ipmitool/test_ipmitool
|
UTF-8
| 12,253
| 3.546875
| 4
|
[
"LicenseRef-scancode-bsd-3-clause-sun"
] |
permissive
|
#!/bin/sh
# CDDL HEADER START
#
# The contents of this file are subject to the terms of the
# Common Development and Distribution License (the "License").
# You may not use this file except in compliance with the License.
#
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
# or http://www.opensolaris.org/os/licensing.
# See the License for the specific language governing permissions
# and limitations under the License.
#
# When distributing Covered Code, include this CDDL HEADER in each
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
# If applicable, add the following below this CDDL HEADER, with the
# fields enclosed by brackets "[]" replaced with your own identifying
# information: Portions Copyright [yyyy] [name of copyright owner]
#
# CDDL HEADER END
#
# Copyright (c) 2008, 2011, Oracle and/or its affiliates. All rights reserved.
#
# ----------------------------------------------------------------------------
# test_ipmitool confirm basic ipmitool functionality works
#
# exercise a commonly used subset of all ipmitool commands,
# for each such command:
# save its output to a *.out file
# confirm ipmitool exit status is 0 (good)
# confirm *.out contains some piece of expected data
# ----------------------------------------------------------------------------
cmd_name="`basename $0`" # the name of this command
cmd_err="${cmd_name}: error:" # the (fatal) error header
cmd_warn="${cmd_name}: warning:" # the warning header
cmd_info="${cmd_name}: info:" # the informational header
cmd_query="${cmd_name}: query:" # the interrogative header
bin_path="/usr/sbin" # path(s) to ipmitool binary executable(s)
# ----------------------------------------------------------------------------
usage()
{
echo "usage: $cmd_name [-p <bin_path>]"
echo " -p path to ipmitool binary"
echo "purpose: confirm basic ipmitool functionality works;"
echo " this is NOT a comprehensive test"
echo "examples:"
echo " $cmd_name -p \$CODEMGR_WS/proto/root_\`uname -p\`/usr/sbin"
} # usage()
# ----------------------------------------------------------------------------
show_env()
{
echo "$cmd_info general environment:"
date ; id ; uname -a ; pwd ; echo "$PATH"
[ -r /etc/release ] && cat /etc/release
[ -r /etc/motd ] && grep bfu /etc/motd
ipmitool -V
} # show_env()
# ----------------------------------------------------------------------------
enforce_i386()
{
proctype=`uname -p`
[ "$proctype" != "i386" ] && {
echo "$cmd_err processor type $proctype != i386"
echo "$cmd_info future versions of this test tool may use"
echo " ipmitool -I lan -H <i386_host>"
echo "when run on $proctype but that is a future enhancement (TBD)"
exit 1
}
} # enforce_i386()
# ----------------------------------------------------------------------------
test_opt_version()
{
tnm="version option"
ipmitool -V > ipmitool_V.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`grep -i -c 'version.*[0-9][0-9]*\.[0-9]' ipmitool_V.out`
[ $cnt -le 0 ] && {
echo "$cmd_err $tnm output missing version" ; exit 1
}
echo "$cmd_info $tnm passed"
} # test_opt_version()
# ----------------------------------------------------------------------------
test_cmd_help()
{
tnm="help command"
ipmitool help > ipmitool_help.out 2>&1
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`grep -i -c 'chassis.*status' ipmitool_help.out`
[ $cnt -le 0 ] && {
echo "$cmd_err $tnm output missing chassis" ; exit 1
}
echo "$cmd_info $tnm passed"
} # test_cmd_help()
# ----------------------------------------------------------------------------
test_cmd_bmc()
{
tnm="bmc info command"
ipmitool bmc info > ipmitool_bmc_info.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`grep -c 'IPMI Version' ipmitool_bmc_info.out`
[ $cnt -lt 1 ] && {
echo "$cmd_err $tnm output missing IPMI Version" ; exit 1
}
echo "$cmd_info $tnm passed"
tnm="bmc getenables command"
ipmitool bmc getenables > ipmitool_bmc_getenables.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`grep -c 'OEM' ipmitool_bmc_getenables.out`
[ $cnt -lt 3 ] && {
echo "$cmd_err $tnm output too few OEM" ; exit 1
}
echo "$cmd_info $tnm passed"
} # test_cmd_bmc()
# ----------------------------------------------------------------------------
test_cmd_sel()
{
tnm="sel info command"
ipmitool sel info > ipmitool_sel_info.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`egrep -c 'Version|Entries' ipmitool_sel_info.out`
[ $cnt -lt 2 ] && {
echo "$cmd_err $tnm output missing Version or Entries" ; exit 1
}
echo "$cmd_info $tnm passed"
tnm="sel time get command"
ipmitool sel time get > ipmitool_sel_time_get.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
nowyr=`date '+%Y'`
cnt=`grep -c "$nowyr" ipmitool_sel_time_get.out`
[ $cnt -lt 1 ] && {
echo "$cmd_err $tnm missing $nowyr" ; exit 1
}
echo "$cmd_info $tnm passed"
tnm="sel list command"
cnt=`grep 'Entries' ipmitool_sel_info.out | awk '{print $NF}'`
[ $cnt -eq 0 ] && {
echo "$cmd_info $tnm passed" ; return 0
}
ipmitool sel list $cnt > ipmitool_sel_list.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt2=`wc ipmitool_sel_list.out | awk '{print $1}'`
[ $cnt2 -ne $cnt ] && {
echo "$cmd_err $tnm output lines $cnt2 != $cnt" ; exit 1
}
echo "$cmd_info $tnm passed"
} # test_cmd_sel()
# ----------------------------------------------------------------------------
test_cmd_chassis()
{
tnm="chassis status command"
ipmitool chassis status > ipmitool_chassis_status.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`grep -c 'System Power' ipmitool_chassis_status.out`
[ $cnt -eq 0 ] && {
echo "$cmd_err $tnm output missing System Power" ; exit 1
}
echo "$cmd_info $tnm passed"
} # test_cmd_chassis()
# ----------------------------------------------------------------------------
test_cmd_fru()
{
tnm="fru print command"
ipmitool fru print > ipmitool_fru_print.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`grep -i -c 'Manufacturer.*Sun Microsystems' ipmitool_fru_print.out`
[ $cnt -lt 1 ] && {
echo "$cmd_err $tnm output too few Sun Microsystems" ; exit 1
}
echo "$cmd_info $tnm passed"
} # test_cmd_fru()
# ----------------------------------------------------------------------------
test_cmd_lan()
{
tnm="lan print command"
ipmitool lan print > ipmitool_lan_print.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`egrep -c 'ARP|MAC|IP' ipmitool_lan_print.out`
[ $cnt -lt 3 ] && {
echo "$cmd_err $tnm output too few ARP|MAC|IP" ; exit 1
}
echo "$cmd_info $tnm passed"
tnm="lan alert print command"
ipmitool lan alert print > ipmitool_lan_alert_print.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`grep -c 'Destination' ipmitool_lan_alert_print.out`
[ $cnt -lt 1 ] && {
echo "$cmd_err $tnm output missing Destination" ; exit 1
}
echo "$cmd_info $tnm passed"
} # test_cmd_lan()
# ----------------------------------------------------------------------------
test_cmd_channel()
{
tnm="channel info command"
ipmitool channel info > ipmitool_channel_info.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`grep -i -c 'Channel 0x.*info' ipmitool_channel_info.out`
[ $cnt -lt 1 ] && {
echo "$cmd_err $tnm output missing Channel number" ; exit 1
}
echo "$cmd_info $tnm passed"
} # test_cmd_channel()
# ----------------------------------------------------------------------------
test_cmd_sensor()
{
tnm="sensor list command"
ipmitool sensor list > ipmitool_sensor_list.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`egrep -i -c 'degrees|RPM|volts' ipmitool_sensor_list.out`
[ $cnt -lt 10 ] && {
echo "$cmd_err $tnm output too few degrees|RPM|volts" ; exit 1
}
echo "$cmd_info $tnm passed"
tnm="sensor get command"
cnt=`expr $cnt % 10`
# note: sensor names might have whitespace in them
snm=`egrep -i 'degrees|RPM|volts' ipmitool_sensor_list.out | \
head -$cnt | tail -1 | cut -d'|' -f1`
snm=`echo $snm` # drop any leading or trailing whitespace
ipmitool sensor get "$snm" > ipmitool_sensor_get.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`egrep -i 'degrees|RPM|volts' ipmitool_sensor_get.out | \
grep -c 'Sensor Reading'`
[ $cnt -ne 1 ] && {
echo "$cmd_err $tnm wrong Sensor Reading for $snm" ; exit 1
}
echo "$cmd_info $tnm passed"
} # test_cmd_sensor()
# ----------------------------------------------------------------------------
test_cmd_sdr()
{
tnm="sdr list command"
ipmitool sdr list all > ipmitool_sdr_list_all.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`egrep -i -c 'degrees|RPM|volts' ipmitool_sdr_list_all.out`
[ $cnt -lt 10 ] && {
echo "$cmd_err $tnm output too few degrees|RPM|volts" ; exit 1
}
echo "$cmd_info $tnm passed"
tnm="sdr get command"
cnt=`expr $cnt / 2`
# note: sensor names might have whitespace in them
snm=`egrep -i 'degrees|RPM|volts' ipmitool_sdr_list_all.out | \
head -$cnt | tail -1 | cut -d'|' -f1`
snm=`echo $snm` # drop any leading or trailing whitespace
ipmitool sdr get "$snm" > ipmitool_sdr_get.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`egrep -i 'degrees|RPM|volts' ipmitool_sdr_get.out | \
grep -c 'Sensor Reading'`
[ $cnt -ne 1 ] && {
echo "$cmd_err $tnm wrong Sensor Reading for $snm" ; exit 1
}
echo "$cmd_info $tnm passed"
} # test_cmd_sdr()
# ----------------------------------------------------------------------------
test_cmd_power()
{
tnm="power status command"
ipmitool power status > ipmitool_power_status.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`grep -i -c 'power.*on' ipmitool_power_status.out`
[ $cnt -lt 1 ] && {
echo "$cmd_err $tnm output missing power on" ; exit 1
}
echo "$cmd_info $tnm passed"
} # test_cmd_power()
# ----------------------------------------------------------------------------
test_cmd_echo()
{
tnm="echo command"
ipmitool echo "hello world from pid $$" > ipmitool_echo.out
rc=$?
[ $rc -ne 0 ] && {
echo "$cmd_err $tnm failed with exit code $rc" ; exit $rc
}
cnt=`grep -c "pid $$" ipmitool_echo.out`
[ $cnt -ne 1 ] && {
echo "$cmd_err $tnm output missing my pid" ; exit 1
}
echo "$cmd_info $tnm passed"
} # test_cmd_echo()
# ----------------------------------------------------------------------------
# main()
while getopts p: opt; do # {
case $opt in
p) bin_path="$OPTARG";;
-) break;;
\?) echo "$cmd_err bad option(s)" ; usage ; exit 22;;
esac
done # } while grabbing cmd line args
shift `expr $OPTIND - 1`
too_much="$1"
[ "$too_much" != "" ] && {
echo "$cmd_err too few/many args" ; usage ; exit 7
}
PATH="${bin_path}:$PATH"
export PATH
# ----------------------------------------------------------------------------
enforce_i386
show_env
# version 1.8.8 and later
test_opt_version
test_cmd_help
test_cmd_bmc
test_cmd_sel
test_cmd_chassis
test_cmd_fru
test_cmd_lan
test_cmd_channel
test_cmd_sensor
test_cmd_sdr
# version 1.8.10 and later
test_cmd_power
test_cmd_echo
echo "$cmd_info all tests passed" # if reach here then aok
| true
|
67ed93f7734795b9af6ce061ad629f345b2c31b9
|
Shell
|
isMTv/wget_linker
|
/wget_linker.sh
|
UTF-8
| 1,839
| 4
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
# required applications: wget, tree
clear
PS3=" --- Wget URL's Linker:"
# Меню, постоянный цикл;
OPTIONS=("wget_link" "list_info" "create_dir" "quit")
select OPT in "${OPTIONS[@]}"
do
case $OPT in
"wget_link")
while [[ "$LINK_NAME" = "" ]]; do
echo "Как будет называться страница?"
read -p " - Ввести имя: " 'LINK_NAME'
done
while [[ "$DIR" = "" ]]; do
TREE_DIR="$(tree -d)"
echo "-----"
echo "$TREE_DIR"
echo "-----"
echo "Указать путь."
read -p " - Ввести путь: " 'DIR'
done
while [[ "$URL" = "" ]]; do
echo "Указать URL."
read -p " - Ввести ссылку: " URL
done
if [[ -n "${LINK_NAME}" && "${DIR}" && "$URL" ]]; then
wget -q -p -E --convert-links -np -nd --default-page="${LINK_NAME}" -P "${DIR}" $URL
# Сбрасываем значения переменных;
unset LINK_NAME DIR URL
echo " -- Готово!"
fi
;;
"list_info")
count_html="$(find . -name *.html | wc -l)"
count_dir="$(find . -type d | wc -l)"
count_file="$(find . -type f | wc -l)"
size_total="$(du -sh)"
echo "Линков: $count_html | Каталогов: $count_dir | Файлов: $count_file | Общий размер: $size_total"
;;
"create_dir")
while [[ "$CREATE_DIR" = "" ]]; do
TREE_DIR="$(tree -d)"
echo "-----"
echo "$TREE_DIR"
echo "-----"
echo "Создать каталог."
read -p " - Ввести путь или имя: " CREATE_DIR
done
if [[ -n "$CREATE_DIR" ]]; then
mkdir -p "$CREATE_DIR"
# Сбрасываем значения переменных;
unset CREATE_DIR
fi
;;
"quit")
break
;;
*) echo " -- Недопустимый вариант!";;
esac
done
| true
|
599ce987869f0b19ac8b20ff452e3809765cb220
|
Shell
|
cpmech/cloud
|
/scripts/git-pre-commit.sh
|
UTF-8
| 753
| 3.796875
| 4
|
[] |
no_license
|
#!/bin/sh
staged_files=$(git diff --cached --diff-filter=d --name-only | grep -E '\.(js|jsx|ts|tsx)$')
# skip if there are no js or ts files
if [ -z "$staged_files" ]; then
exit 0
fi
# run type-check
npm run --silent tsc
tsc_exit_code=$?
# check the tsc exit code
if [ $tsc_exit_code -ne 0 ]; then
echo "🥵 tsc failed"
exit 1
else
echo "👍 tsc"
fi
# run linter on staged files => save exit code for later
npm run --silent eslint -- $staged_files --quiet --fix
linter_exit_code=$?
# add files auto-fixed by the linter
git add $staged_files
# check linter exit code
if [ $linter_exit_code -ne 0 ]; then
echo "🥵 lint failed"
exit 1
else
echo "👍 lint"
fi
# return 0-exit code
echo "🎉 all good to go"
exit 0
| true
|
c8724e97335beba099353d8c0cf4b8086c12623e
|
Shell
|
pankajshrestha/Debian_on_Buffalo
|
/Stretch/installer_images/build/generate_images_armel-lowmem.sh
|
UTF-8
| 4,480
| 3.328125
| 3
|
[] |
no_license
|
#!/bin/bash
##requires uboot-tools, gzip, faketime, rsync, wget, cpio, libarchive-cpio-perl
dtb_dir="../../device_trees"
tools_dir="../../../Tools"
distro="stretch"
mkdir output 2>/dev/null
rm output/*
rm -r armel-payload/* 2>/dev/null
mkdir armel-files 2>/dev/null
mkdir -p armel-payload/source/ 2>/dev/null
cd armel-files
if [ -d "tmp" ]; then
rm -r "tmp/"
fi
wget -N "http://ftp.nl.debian.org/debian/dists/$distro/main/installer-armel/current/images/kirkwood/netboot/initrd.gz" 2>/dev/null
kernel_ver="$(zcat initrd.gz | cpio -t | grep -m 1 lib/modules/ | gawk -F/ '{print $3}')"
wget -N "http://ftp.debian.org/debian/dists/$distro/main/binary-armel/Packages.gz" 2>/dev/null
kernel_deb_url="$(zcat Packages.gz | grep linux-image-$kernel_ver\_ | grep Filename | gawk '{print $2}')"
wget -N "http://ftp.debian.org/debian/$kernel_deb_url" 2>/dev/null
kernel_deb="$(basename $kernel_deb_url)"
mkdir tmp
dpkg --extract $kernel_deb tmp/
if [ $? -ne 0 ]; then
echo "failed to unpack kernel, quitting"
exit
fi
cd ..
rm -r armel-payload/lib/modules/* 2>/dev/null
rsync -rtWhmv --include "*/" \
--include="sd_mod.ko" \
--include="sata_mv.ko" \
--include="libata.ko" \
--include="scsi_mod.ko" \
--include="leds-gpio.ko" \
--exclude="*" armel-files/tmp/lib/ armel-payload/lib/
if [ $? -ne 0 ]; then
echo "failed to copy module files, quitting"
exit
fi
cp -v $tools_dir/*.sh armel-payload/source/
if [ $? -ne 0 ]; then
echo "failed to copy tools, quitting"
exit
fi
rm -r armel-payload/source/micon_scripts/ 2>/dev/null
cp -vrp $tools_dir/micon_scripts armel-payload/source/
if [ $? -ne 0 ]; then
echo "failed to copy micon tools, quitting"
exit
fi
cp -v $dtb_dir/{orion,kirkwood}*.dtb armel-payload/source/
if [ $? -ne 0 ]; then
echo "failed to copy dtb files, quitting"
#exit
fi
cp -v $tools_dir/*.db armel-payload/source/
if [ $? -ne 0 ]; then
echo "failed to copy device db, quitting"
exit
fi
cp -v preseed-armel.cfg armel-payload/preseed.cfg
if [ $? -ne 0 ]; then
echo "failed to copy preseed, quitting"
exit
fi
cp -v $tools_dir/micro-evtd-armel armel-payload/source/micro-evtd
if [ $? -ne 0 ]; then
echo "failed to copy micro-evtd , quitting"
exit
fi
cp -v $tools_dir/phytool-armel armel-payload/source/phytool
if [ $? -ne 0 ]; then
echo "failed to copy phytool , quitting"
exit
fi
zcat armel-files/initrd.gz | \
cpio-filter --exclude "sbin/wpa_supplicant" | \
cpio-filter --exclude "*/kernel/drivers/video" | \
cpio-filter --exclude "*/kernel/drivers/mmc" | \
cpio-filter --exclude "*/kernel/drivers/staging" | \
cpio-filter --exclude "*/kernel/drivers/usb" | \
cpio-filter --exclude "*/kernel/drivers/hid" > initrd
if [ $? -ne 0 ]; then
echo "failed to unpack initrd, quitting"
exit
fi
cd armel-payload
find . | cpio -v -H newc -o -A -F ../initrd
if [ $? -ne 0 ]; then
echo "failed to patch initrd.gz, quitting"
exit
fi
cd ..
#pigz -9 -k initrd
#if [ $? -ne 0 ]; then
# echo "failed to pack gz initrd, quitting"
# exit 99
#fi
xz --check=crc32 -2e initrd
if [ $? -ne 0 ]; then
echo "failed to pack xz initrd, quitting"
exit 99
fi
#faketime '2018-01-01 01:01:01' /bin/bash -c "mkimage -A arm -O linux -T ramdisk -C gzip -a 0x0 -e 0x0 -n installer-initrd -d initrd.gz output/initrd.buffalo.armel-lowmem-gz"
#if [ $? -ne 0 ]; then
# echo "failed to create initrd.buffalo.armel-lowmem-gz, quitting"
# exit
#fi
faketime '2018-01-01 01:01:01' /bin/bash -c "mkimage -A arm -O linux -T ramdisk -C gzip -a 0x0 -e 0x0 -n installer-initrd -d initrd.xz output/initrd.buffalo.armel-lowmem"
if [ $? -ne 0 ]; then
echo "failed to create initrd.buffalo.armel-lowmem-xz, quitting"
exit
fi
cp "$(ls armel-files/tmp/boot/vmlinuz*)" vmlinuz
dtb_list="$(ls armel-files/dtb/*{orion,kirkwood}*dtb)"
dtb_list="$dtb_dir/kirkwood-linkstation-lsxl.dtb $dtb_dir/orion5x-linkstation-lswtgl.dtb $dtb_dir/kirkwood-lschlv2.dtb $dtb_dir/orion5x-linkstation-lsgl.dtb"
for dtb in $dtb_list
do
model="$(echo $dtb | gawk -F- '{print $NF}' | gawk -F. '{print $1}')"
cat vmlinuz $dtb > tmpkern
faketime '2018-01-01 01:01:01' /bin/bash -c "mkimage -A arm -O linux -T Kernel -C none -a 0x01a00000 -e 0x01a00000 -n debian_installer -d tmpkern output/uImage.buffalo.$model"
done
cp vmlinuz output/vmlinuz-armel
rm tmpkern
rm vmlinuz
rm initrd*
rm -r armel-payload/*
| true
|
dab5efffdc772b885b21283ef538fefcab495006
|
Shell
|
zhenggangpku/network-testing
|
/tc/xps_info.sh
|
UTF-8
| 1,098
| 3.984375
| 4
|
[] |
no_license
|
#!/bin/bash
#
# Script to get an overview of how XPS mapping is configured
# Author: Jesper Dangaaard Brouer <netoptimizer@brouer.com>
# License: GPLv2
DEV=$1
if [[ -z "$DEV" ]]; then
err 1 "Must specify DEV as argument"
fi
# Convert a mask to a list of CPUs this cover
function mask_to_cpus() {
local mask=$1
local cpu=0
printf "CPUs in MASK=0x%02X =>" $mask
while [ $mask -gt 0 ]; do
if [[ $((mask & 1)) -eq 1 ]]; then
echo -n " cpu:$cpu"
fi
let cpu++
let mask=$((mask >> 1))
done
}
function cpu_to_mask() {
local cpu=$1
printf "%X" $((1 << $cpu))
}
function sorted_txq_xps_cpus() {
local queues=$(ls /sys/class/net/$DEV/queues/tx-*/xps_cpus | sort --field-separator='-' -k2n)
echo $queues
}
set -v
# Simple grep to show xps_cpus mapping:
grep -H . $(sorted_txq_xps_cpus)
# Listing that convert the MASK to CPUs
set +v
txq=0
mqleaf=0
for xps_cpus in $(sorted_txq_xps_cpus); do
let mqleaf++
mask=$(cat $xps_cpus)
value=$((0x$mask))
#echo MASK:0x$mask
txt=$(mask_to_cpus $value)
echo "NIC=$DEV TXQ:$txq (MQ-leaf :$mqleaf) use $txt"
let txq++
done
| true
|
c0bdc3aa4cb78cecabfe524145488e0fb120d130
|
Shell
|
JACKHAHA363/research
|
/diplomacy_research/scripts/update_proto.sh
|
UTF-8
| 2,676
| 2.90625
| 3
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
TF_ARCHIVE="https://github.com/tensorflow/tensorflow/archive/6612da89516247503f03ef76e974b51a434fb52e.zip" # Tensorflow v1.13.1
TF_SERVING_ARCHIVE="https://github.com/tensorflow/serving/archive/f16e77783927353fca89dbb411fc01cbd3d42bda.zip" # Serving v1.13.0
PROTO_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../proto/" && pwd )"
# Downloading Tensorflow
rm -Rf $PROTO_DIR/temp/
rm -Rf $PROTO_DIR/diplomacy_tensorflow/
wget $TF_ARCHIVE -O tf.zip
unzip tf.zip -d $PROTO_DIR/temp/
mv $PROTO_DIR/temp/*/tensorflow $PROTO_DIR/diplomacy_tensorflow
find $PROTO_DIR/diplomacy_tensorflow/ ! -name '*.proto' -type f -delete
find $PROTO_DIR/diplomacy_tensorflow/ -name '*test*.proto' -type f -delete
find $PROTO_DIR/diplomacy_tensorflow/ -type d -empty -delete
find $PROTO_DIR/diplomacy_tensorflow/ -type f -exec sed -i 's@package tensorflow@package diplomacy.tensorflow@g' {} +
find $PROTO_DIR/diplomacy_tensorflow/ -type f -exec sed -i 's@import "tensorflow@import "diplomacy_tensorflow@g' {} +
find $PROTO_DIR/diplomacy_tensorflow/ -type f -exec sed -i 's@ tensorflow.tf2xla.@ diplomacy.tensorflow.tf2xla.@g' {} +
# Downloading Tensorflow Serving
rm -Rf $PROTO_DIR/temp/
rm -Rf $PROTO_DIR/tensorflow_serving/
wget $TF_SERVING_ARCHIVE -O tf_serving.zip
unzip tf_serving.zip -d $PROTO_DIR/temp/
mv $PROTO_DIR/temp/*/tensorflow_serving $PROTO_DIR/
find $PROTO_DIR/tensorflow_serving/ ! -name '*.proto' -type f -delete
find $PROTO_DIR/tensorflow_serving/ -name '*test*.proto' -type f -delete
find $PROTO_DIR/tensorflow_serving/ -type d -empty -delete
find $PROTO_DIR/tensorflow_serving/ -type f -exec sed -i 's@import "tensorflow/@import "diplomacy_tensorflow/@g' {} +
find $PROTO_DIR/tensorflow_serving/ -type f -exec sed -i 's@ SignatureDef>@ diplomacy.tensorflow.SignatureDef>@g' {} +
find $PROTO_DIR/tensorflow_serving/ -type f -exec sed -i 's@ tensorflow.Example@ diplomacy.tensorflow.Example@g' {} +
find $PROTO_DIR/tensorflow_serving/ -type f -exec sed -i 's@ TensorProto@ diplomacy.tensorflow.TensorProto@g' {} +
find $PROTO_DIR/tensorflow_serving/ -type f -exec sed -i 's@ NamedTensorProto@ diplomacy.tensorflow.NamedTensorProto@g' {} +
find $PROTO_DIR/tensorflow_serving/ -type f -exec sed -i 's@ RunOptions@ diplomacy.tensorflow.RunOptions@g' {} +
find $PROTO_DIR/tensorflow_serving/ -type f -exec sed -i 's@ RunMetadata@ diplomacy.tensorflow.RunMetadata@g' {} +
find $PROTO_DIR/tensorflow_serving/ -type f -exec sed -i 's@ ConfigProto@ diplomacy.tensorflow.ConfigProto@g' {} +
find $PROTO_DIR/tensorflow_serving/ -type f -exec sed -i 's@ error.Code@ diplomacy.tensorflow.error.Code@g' {} +
# Cleaning up
rm -Rf $PROTO_DIR/temp/
rm ./*.zip
| true
|
7294f8a00a490b5c9def9485d2d6070a226759e4
|
Shell
|
jason-callaway/gluster-perf
|
/env.sh
|
UTF-8
| 1,302
| 3.34375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
echo "****************************************"
echo "Are you running bash from ssh-agent?"
echo "Have you loaded your ssh key?"
echo "And don't forget your secrets directory."
echo "See README.md for details."
echo "****************************************"
echo
# Use the correct PYTHONPATH on Mac OS X
#if [ "$(uname -s)" == "Darwin" ]; then
# export PYTHONPATH=/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages:/usr/local/lib/python2.7/site-packages
#fi
# Read RHN info
printf "RHN username: "
read rhn_username
export rhn_username
printf "RHN password: "
read -s rhn_password
export rhn_password
echo
printf "RHN pool id: "
read rhn_pool
export rhn_pool
echo
printf "ec2_keypair: "
read ec2_keypair
export ec2_keypair
echo
# Load AWS credentials if they're in ~/.aws, otherwise prompt for them
# TODO make this support more than one keypair in .aws/credentials
if [ -f ~/.aws/credentials ]; then
export aws_access_key=`grep ^aws_access_key_id ~/.aws/credentials | cut -d' ' -f3`
export aws_secret_key=`grep ^aws_secret_access_key ~/.aws/credentials | cut -d' ' -f3`
else
printf "aws_access_key: "
read aws_access_key
export aws_access_key
printf "aws_secret_key: "
read aws_secret_key
export aws_secret_key
fi
| true
|
721975dcc3cb3ab94327f0a4d696c3b6ed4d96bc
|
Shell
|
d2s/dot
|
/scripts/install-suse.sh
|
UTF-8
| 3,522
| 4.09375
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
#############################################################
# install-apt-packages.sh
# This script installs tools from openSUSE packages
#############################################################
#
# More details about openSUSE package management:
# - https://en.opensuse.org/Package_management
# - https://en.opensuse.org/Package_repositories
# - https://en.opensuse.org/Portal:Zypper
# - https://en.opensuse.org/SDB:Zypper_usage
# - https://en.opensuse.org/SDB:Zypper_manual
#
#
# -----------------------------------------------------------
# Unoffical Bash "strict mode"
# http://redsymbol.net/articles/unofficial-bash-strict-mode/
# -----------------------------------------------------------
set -euo pipefail
IFS=$'\t\n' # Stricter IFS settings
# shellcheck disable=SC2034 # Unused variable needed for the Bash strict mode
ORIGINAL_IFS=$IFS
# -----------------------------------------------------------
# Go to user account's home directory
# -----------------------------------------------------------
cd "$HOME"
# -----------------------------------------------------------
# Update openSUSE package lists
# Usage:
# opensuse_package_list_update
# -----------------------------------------------------------
opensuse_package_list_update() {
printf "\\n"
printf "Updating OpenSUSE package lists:"
printf "\\n"
sudo zypper refresh
printf "\\n"
}
# -----------------------------------------------------------
# Update previously installed packages to latest versions
# Usage:
# update_opensuse_packages
# -----------------------------------------------------------
update_opensuse_packages() {
printf "\\n"
printf "Updating all existing packages:"
printf "\\n"
sudo zypper up
printf "\\n"
}
# -----------------------------------------------------------
# Install openSUSE package defined in string variable
# Usage:
# install_opensuse_package "package"
# -----------------------------------------------------------
install_opensuse_package() {
printf "\\n"
printf "sudo zypper in %s\\n" "$1"
sudo zypper in "$1"
printf "\\n\\n"
}
# -----------------------------------------------------------
# Tell user that package has already been installed
# Usage:
# opensuse_package_already_installed "package"
# -----------------------------------------------------------
opensuse_package_already_installed() {
printf "NOTE: openSUSE package already installed: %s\\n" "$1"
printf "\\n"
}
# -----------------------------------------------------------
# If package is not already installed, install it
# Usage:
# if_not_already_installed "package"
# -----------------------------------------------------------
if_not_already_installed() {
INSTALL_COMPARISON_PATTERN="Installed : Yes"
if [[ ! "$(zypper info "$1" |grep "$INSTALL_COMPARISON_PATTERN")" == "$INSTALL_COMPARISON_PATTERN" ]]; then
install_opensuse_package "$1"
else
opensuse_package_already_installed "$1"
fi
}
# -----------------------------------------------------------
# If `zypper` is available
# -----------------------------------------------------------
if [ -x "$(command -v zypper)" ]; then
opensuse_package_list_update
install_opensuse_package "git"
install_opensuse_package "curl"
install_opensuse_package "zsh"
install_opensuse_package "ShellCheck"
install_opensuse_package "whois"
install_opensuse_package "ack"
else
# Exit with error code when zypper is not available
echo "ERROR: zypper is not available! Nothing happened."
exit 1
fi
| true
|
ece0bb434ebfe2ec069d981ca3fd1bcc58cfae9a
|
Shell
|
adrielb/DCell
|
/sims/SurfaceTensionAdhesion/render.sge
|
UTF-8
| 366
| 2.5625
| 3
|
[] |
no_license
|
#!/bin/bash
# request Bourne again shell as shell for job
#$ -S /bin/bash
# merge stderr with stdout
#$ -j y
# assign a name to submitted job
#$ -N render
# change output file name
#$ -o $HOME/output/$JOB_NAME.$JOB_ID.$TASK_ID
# export environment variables
#$ -V
echo $HOSTNAME
echo TASK_ID = $TASK_ID
echo GE_TASK ID = $GE_TASK_ID
echo SGE_TASK ID = $SGE_TASK_ID
| true
|
8a22c8e1d5adaee663c1c0650aa329b962592dbb
|
Shell
|
RealOrko/bosh
|
/ci/tasks/utils.sh
|
UTF-8
| 1,632
| 4.125
| 4
|
[
"LGPL-2.1-only",
"LicenseRef-scancode-other-permissive",
"LGPL-2.1-or-later",
"LGPL-2.0-or-later",
"LicenseRef-scancode-unicode-mappings",
"Artistic-2.0",
"LGPL-3.0-only",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-3.0-or-later",
"GPL-2.0-or-later",
"GPL-3.0-only",
"MPL-1.1",
"Artistic-1.0",
"GPL-1.0-or-later",
"MIT",
"LicenseRef-scancode-public-domain-disclaimer",
"Artistic-1.0-Perl",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"GPL-2.0-only",
"Apache-2.0",
"Ruby",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause"
] |
permissive
|
#!/usr/bin/env bash
check_param() {
local name=$1
local value=$(eval echo '$'$name)
if [ "$value" == 'replace-me' ]; then
echo "environment variable $name must be set"
exit 1
fi
}
commit_bbl_state_dir() {
local input_dir=${1?'Input git repository absolute path is required.'}
local bbl_state_dir=${2?'BBL state relative path is required.'}
local output_dir=${3?'Output git repository absolute path is required.'}
local commit_message=${4:-'Update bbl state.'}
pushd "${input_dir}/${bbl_state_dir}"
if [[ -n $(git status --porcelain) ]]; then
git config user.name "CI Bot"
git config user.email "ci@localhost"
git add --all .
git commit -m "${commit_message}"
fi
popd
shopt -s dotglob
cp -R "${input_dir}/." "${output_dir}"
}
print_git_state() {
if [ -d ".git" ] ; then
echo "--> last commit..."
TERM=xterm-256color git --no-pager log -1
echo "---"
echo "--> local changes (e.g., from 'fly execute')..."
TERM=xterm-256color git --no-pager status --verbose
echo "---"
fi
}
set_up_vagrant_private_key() {
if [ ! -f "$BOSH_VAGRANT_PRIVATE_KEY" ]; then
key_path=$(mktemp -d /tmp/ssh_key.XXXXXXXXXX)/value
echo "$BOSH_VAGRANT_PRIVATE_KEY" > $key_path
chmod 600 $key_path
export BOSH_VAGRANT_KEY_PATH=$key_path
fi
}
retry_command() {
local retryable_command=$1
set +e
for i in {1..10}; do
$retryable_command
local status=$?
if [ $status -ne 0 ]; then
echo "sleeping 3s"
sleep 3s
else
return 0
fi
done
set -e
echo "Timed out running command '$retryable_command'"
return 1
}
| true
|
79047beb10b9938218d4b38b2937eba8c4041d9f
|
Shell
|
shubhankar-mern/BashPracticeProblems
|
/array/sumofthree.sh
|
UTF-8
| 258
| 3.296875
| 3
|
[] |
no_license
|
read -p "Enter numbet of ele: " n
echo " Enter array elements : "
for (( i=0; i<n ; i++ ))
do
read a[$i]
done
sum=0;
for (( i=0 ;i<n ;i++ ))
do
sum=$(($sum+${a[$i]}))
done
if [ $sum -eq 0 ]
then
echo "is zero"
else
echo "is not zero"
fi
| true
|
aefd38c86538c70980000ec981b196f70d65e0e6
|
Shell
|
cxwn/summary-of-work-and-study
|
/ops-scripts/UserDel.sh
|
UTF-8
| 432
| 3.015625
| 3
|
[] |
no_license
|
#!/bin/bash
#批量删除用户脚本
USER1='payer'
UserName=('tangchanggen' 'wuyaxiong' 'lihui' 'wangyifeng' 'yanglongjun' 'liyunfeng' 'xiaoyongan' 'ivandu')
echo -e "\033[41;34m These account were deleting now! Please wait! \033[0m"
userdel -r $USER1
echo -e "\033[47;31m The account $USER1 had been deleted! \033[0m"
for U in ${UserName[@]};
do
userdel -r $U
echo -e "\033[47;31m The account $U had been deleted! \033[0m"
done
| true
|
d7e3d8deff04c87d3102d40af4603026c917ec2d
|
Shell
|
pr-martin/rcptt_ci
|
/runner.sh
|
UTF-8
| 1,276
| 2.765625
| 3
|
[] |
no_license
|
#! /bin/sh
# Set properties below
runnerPath=/usr/lfs/ssd_v0/opt/rcptt.runner-2.0.1/eclipse
autPath=./osate2-core/org.osate.build.product/target/products/osate2/linux/gtk/x86_64
project=./regressionSuite/rcpttSuite
# properties below configure all intermediate and result files
# to be put in "results" folder next to a project folder. If
# that's ok, you can leave them as is
testResults=$project/../results
runnerWorkspace=$testResults/runner-workspace
# autWorkspace=$autPath/osate2_workspace
# autWorkspace=/usr/lfs/ssd_v0/testing/agree/runtime-Osate2product
# autWorkspace=./localRepo/runtime-Osate2product
autWorkspace=/usr/lfs/ssd_v0/opt/jenkins/workspace/AGREE/rcptt_ci/localRepo/runtime-Osate2product
autOut=$testResults/aut-out-
junitReport=$testResults/results.xml
htmlReport=$testResults/results.html
rm -rf $testResults
mkdir $testResults
java -jar $runnerPath/plugins/org.eclipse.equinox.launcher_1.3.100.v20150511-1540.jar \
-application org.eclipse.rcptt.runner.headless \
-data $runnerWorkspace \
-aut $autPath \
-autWsPrefix $autWorkspace \
-autConsolePrefix $autOut \
-htmlReport $htmlReport \
-junitReport $junitReport \
-import $project \
-reuseExistingWorkspace \
-testOptions "testExecTimeout=3600"
| true
|
5675ddaa156d87347ead39ff8087885a86cac062
|
Shell
|
fungs/taxator-tk
|
/core/build.sh
|
UTF-8
| 394
| 3.734375
| 4
|
[] |
no_license
|
#!/bin/sh
# This script will try to compile the source code in the bin subdirectory,
# using one processor core by default
set -o errexit
set -o nounset
compile_threads=${1:-1}
bdir='bin'
test -d "$bdir" || mkdir "$bdir"
echo 'Running cmake'
cd "$bdir"
cmake ../
echo "Compiling source code in $bdir"
make -j "$compile_threads"
echo "Programs successfully built in $bdir, check it out!"
| true
|
a5a440d40df93a2e11f31012c01b14ab3e94939f
|
Shell
|
birdming22/file_util
|
/mp3_split.sh
|
UTF-8
| 339
| 3.625
| 4
|
[] |
no_license
|
#!/bin/bash
# check if argument is empty
if [ -z $1 ]; then
echo "usage: bash mp3_split.sh src_dir dst_dir"
exit
fi
if [ -z $2 ]; then
echo "usage: bash mp3_split.sh src_dir dst_dir"
exit
fi
SRC_DIR=$1
cd $1
FILE_LIST=$(ls $SRC_DIR/*.mp3)
SPLIT_TIME=59.59
for file in $FILE_LIST; do
mp3splt -t $SPLIT_TIME $file -d $2
done
| true
|
dce49183454c7eb715827c5b71177b8bdd1b7771
|
Shell
|
lbnl-science-it/aws-sagemaker-keras-text-classification
|
/container/build_docker.sh
|
UTF-8
| 1,396
| 3.5625
| 4
|
[] |
no_license
|
#!/bin/sh
# The name of our algorithm
algorithm_name=sagemaker-keras-text-classification
chmod +x sagemaker_keras_text_classification/train
chmod +x sagemaker_keras_text_classification/serve
account=$(aws sts get-caller-identity --query Account --output text)
# Get the region defined in the current configuration (default to us-west-2 if none defined)
region=$(aws configure get region)
region=${region:-us-west-2}
fullname="${account}.dkr.ecr.${region}.amazonaws.com/${algorithm_name}:latest"
# If the repository doesn't exist in ECR, create it.
aws ecr describe-repositories --repository-names "${algorithm_name}" > /dev/null 2>&1
if [ $? -ne 0 ]
then
aws ecr create-repository --repository-name "${algorithm_name}" > /dev/null
fi
# Get the login command from ECR and execute it directly
$(aws ecr get-login --region ${region} --no-include-email)
#$(aws ecr get-login --no-include-email --region ${region} --registry-ids 763104351884)
# Build the docker image locally with the image name and then push it to ECR
# with the full name.
# On a SageMaker Notebook Instance, the docker daemon may need to be restarted in order
# to detect your network configuration correctly. (This is a known issue.)
if [ -d "/home/ec2-user/SageMaker" ]; then
sudo service docker restart
fi
docker build -t ${algorithm_name} .
docker tag ${algorithm_name} ${fullname}
docker push ${fullname}
| true
|
68f5293d02a1dd421e550b708121016e3b5a10a6
|
Shell
|
KAVANABELAVADI/day1011problem3
|
/flipCoinCombination.sh
|
UTF-8
| 3,744
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/bash
declare -A singlet
declare -A doublet
declare -A tripplet
heads=0
tails=0;
doubletHead=0;
doubletHeadTail=0;
doubletTailHead=0;
doubletTail=0;
tHHH=0;
tHHT=0;
tHTH=0;
tHTT=0;
tTHH=0;
tTHT=0;
tTTH=0;
tTTT=0;
read -p "Enter Number of Trails - " trails
temp=$trails
while [ $temp -gt 0 ]
do
#singlet
res=$((RANDOM % 2))
if [ $res == 0 ]
then
(( heads++ ))
singlet[heads]+=" $res"
else
(( tails++ ))
singlet[tails]+=" $res"
fi
#Doublet
res1=$res$((RANDOM%2))
case $res1 in
00) ((doubletHead++))
doublet[heads]+=" $res1" ;;
01) ((doubletHeadTail++))
doublet[headtail]+=" $res1" ;;
10) ((doubletTailHead++))
doublet[tailhead]+=" $res1" ;;
11) ((doubletTail++))
doublet[tails]+=" $res1" ;;
esac
#tripplet
res2=$res1$((RANDOM%2))
case $res2 in
000) ((tHHH++))
tripplet[HHH]+=" $res2" ;;
001) ((tHHT++))
tripplet[HHT]+=" $res2";;
010) ((tHTH++))
tripplet[HTH]+=" $res2";;
011) ((tHTT++))
tripplet[HTT]+=" $res2";;
100) ((tTHH++))
tripplet[THH]+=" $res2";;
101) ((tTHT++))
tripplet[THT]+=" $res2";;
110) ((tTTH++))
tripplet[TTH]+=" $res2";;
111) ((tTTT++))
tripplet[TTT]+=" $res2";;
esac
((temp--))
done
echo ${singlet[heads]}
echo ${singlet[tails]}
array[0]=$(echo $heads $trails | awk '{print ((100*$1/$2)) }')
array[1]=$(echo $tails $trails | awk '{print ((100*$1/$2)) }')
echo "Persentage of heads - " ${array[0]}
echo "Persentage of tails - " ${array[1]}
echo ${doublet[heads]}
echo ${doublet[headtail]}
echo ${doublet[tailhead]}
echo ${doublet[tails]}
array[2]=$(echo $doubletHead $trails | awk '{print ((100*$1/$2)) }')
array[3]=$(echo $doubletHeadTail $trails | awk '{print ((100*$1/$2)) }')
array[4]=$(echo $doubletTailHead $trails | awk '{print ((100*$1/$2)) }')
array[5]=$(echo $doubletTail $trails | awk '{print ((100*$1/$2)) }')
echo "Persentage of heads combination - " ${array[2]}
echo "Persentage of head tail combination - " ${array[3]}
echo "Persentage of tail head combination - " ${array[4]}
echo "Persentage of tails combination - " ${array[5]}
echo ${tripplet[HHH]}
echo ${tripplet[HHT]}
echo ${tripplet[HTH]}
echo ${tripplet[HTT]}
echo ${tripplet[THH]}
echo ${tripplet[THT]}
echo ${tripplet[TTH]}
echo ${tripplet[TTT]}
array[6]=$(echo $tHHH $trails | awk '{print ((100*$1/$2)) }')
array[7]=$(echo $tHHT $trails | awk '{print ((100*$1/$2)) }')
array[8]=$(echo $tHTH $trails | awk '{print ((100*$1/$2)) }')
array[9]=$(echo $tHTT $trails | awk '{print ((100*$1/$2)) }')
array[10]=$(echo $tTHH $trails | awk '{print ((100*$1/$2)) }')
array[11]=$(echo $tTHT $trails | awk '{print ((100*$1/$2)) }')
array[12]=$(echo $tTTH $trails | awk '{print ((100*$1/$2)) }')
array[13]=$(echo $tTTT $trails | awk '{print ((100*$1/$2)) }')
echo "Persentage of HHH combination - " ${array[6]}
echo "Persentage of HHT combination - " ${array[7]}
echo "Persentage of HTH combination - " ${array[8]}
echo "Persentage of HTT combination - " ${array[9]}
echo "Persentage of THH combination - " ${array[10]}
echo "Persentage of THT combination - " ${array[11]}
echo "Persentage of TTH combination - " ${array[12]}
echo "Persentage of TTT combination - " ${array[13]}
for (( i=0; i<14; i++ ))
do
for (( j=0; j<$((14-$i-1)); j++ ))
do
k=$(echo ${array[$j]} | awk '{printf "%d",$1}')
l=$(echo ${array[$(($j+1))]} | awk '{printf "%d",(($1))}')
if [ $k -gt $l ]
then
temp=${array[$j]}
array[$j]=${array[$(($j+1))]}
array[$(($j+1))]=$temp
fi
done
done
echo "The percentage of winning combination of these is " ${array[13]}
| true
|
605e4fe3c42b0e3a83ea84ea54916c6f2e89e96f
|
Shell
|
Chen-Gu/slp
|
/data/testbed/info/fitiotlab/aggregator.sh
|
UTF-8
| 1,065
| 3.890625
| 4
|
[] |
no_license
|
#!/bin/bash
echo "Starting Aggregator"
# Redirect all nodes serial output to a file
readonly OUTFILE="${HOME}/.iot-lab/${EXP_ID}/aggregator_log.stdout"
readonly ERRFILE="${HOME}/.iot-lab/${EXP_ID}/aggregator_log.stderr"
trap 'catch_signal; exit' SIGTERM
catch_signal()
{
echo "Catch SIGTERM signal" >&2
# Try to sync anything serial_aggregator might have buffered
# to the filesystem
sync
SA_PID=$(ps aux | grep serial_aggregator | grep python | tr -s ' ' | cut -f 2 -d' ')
echo "Killing serial_aggregator with PID $SA_PID" >&2
kill $SA_PID
sleep 0.1
sync
gzip -9 ${OUTFILE}
gzip -9 ${ERRFILE}
# Final sync to make sure anything else is written
sync
}
echo "serial_aggregator -v" >&2
serial_aggregator -v >&2
# Wait for all nodes to be running
iotlab-experiment wait -i ${EXP_ID}
# Reset all nodes, so they boot at the same time
iotlab-node -i ${EXP_ID} --reset
# Wait for a bit before starting the aggregator
sleep 5
echo "Launch serial_aggregator with exp_id: ${EXP_ID}" >&2
serial_aggregator -i ${EXP_ID} 2> ${ERRFILE} 1> ${OUTFILE}
| true
|
58ad3103f4a201a22fb1a54a8a44bb043183b920
|
Shell
|
fyamvbf/example-chat
|
/.ci/build-frontend.sh
|
UTF-8
| 233
| 2.59375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
set -eux
# ここで指定している.npmディレクトリはGitLab CI/CDによってキャッシュされる
cat <<_EOF_>frontend/.npmrc
cache=$(pwd)/.npm
prefer-offline=true
_EOF_
cd frontend
npm ci
npm run build
| true
|
19f61c37a0f31334a3c7c4fd779a5aec8cbd0732
|
Shell
|
chrisbangun/release-scripts
|
/dump-collection.sh
|
UTF-8
| 689
| 4
| 4
|
[] |
no_license
|
#!/bin/bash
function dump_specific_table {
rm -r ./dump/$1
mongodump --db $1 --out ./dump
cd dump
for dir in */
do
base=$(basename "$dir")
if [ "$base" == "$1" ]; then
tar -czf "${base}.tar.gz" "$dir"
fi
done
}
function dump_all {
rm -r dump
mongodump --out ./dump
cd dump
for dir in */
do
base=$(basename "$dir")
tar -czf "${base}.tar.gz" "$dir"
done
}
function dump_collection {
db=$2
collection=$3
mongodump --db=$db --collection=$collection --out=./dump
}
if [ "$#" -ge 1 ]; then
shopt -s nocasematch
if [[ $1 == *"collection"* ]]; then
dump_collection $2 $3
else
dump_specific_table $1
fi
else
dump_all
fi
| true
|
f6dd9a998fb5568e62fbeb28745a6e49b59bbe26
|
Shell
|
KerberosSec/KerberosSec
|
/install.sh
|
UTF-8
| 4,946
| 3.375
| 3
|
[] |
no_license
|
#!/bin/bash
#Developer: Diego333-ms
#Github: https://github.com/Diego333-ms/KerberosSec
#Copyright: Diego333-ms
#Todos os Direitos Reservados
instalacao() {
#Instalação das Dependências
cd ..
mv KerberosSec /data/data/com.termux/files/home
cd /data/data/com.termux/files/home/KerberosSec
chmod +x KerberosSec.sh
mkdir /data/data/com.termux/files/home/KerberosSec/scripts
mkdir /data/data/com.termux/files/home/KerberosSec/logs
touch /data/data/com.termux/files/home/KerberosSec/logs/history.log
mv load.sh /data/data/com.termux/files/home/KerberosSec/scripts
clear
setterm -foreground blue
printf "\n[~] - Ao Prosseguir com o Programa, você concorda com os Termos de Uso descritos no arquivo "
setterm -foreground cyan
printf "LICENSE"
setterm -foreground blue
printf "? sim | não: "
setterm -foreground cyan
read license
if [[ $license == "sim" || $license == "Sim" ]]; then
setterm -foreground yellow
printf "\n[~] - Deseja Fazer o Download com Interface Gráfica? sim | não: "
setterm -foreground blue
read choice
if [[ $choice == "sim" || $choice == "Sim" ]]; then
manual
else
setterm -foreground blue
printf "\n[~] - A instalação pode demorar até 3 minutos para ser concluida. Digite "
setterm -foreground yellow
printf "Enter "
setterm -foreground blue
printf "para aceitar e prosseguir\n"
setterm -foreground green
read choice
setterm -foreground yellow
printf "\n[*] - Instalando Pacotes...\n"
apt-get update > /dev/null 2>&1 &
sleep 7
setterm -foreground cyan
printf "\n[*] - Instalando Recursos...\n"
apt-get install ncurses-utils > /dev/null 2>&1 &
sleep 7
setterm -foreground cyan
printf "\n[*] - Instalando Python...\n"
apt-get install python > /dev/null 2>&1 &
sleep 25
setterm -foreground cyan
printf "\n[*] - Instalando SSH...\n"
apt-get install openssh > /dev/null 2>&1 &
sleep 33
setterm -foreground cyan
printf "\n[*] - Instalando Bash...\n"
apt-get install bash > /dev/null 2>&1 &
sleep 3
setterm -foreground cyan
printf "\n[*] - Instalando Figlet...\n"
apt-get install figlet > /dev/null 2>&1 &
sleep 10
setterm -foreground cyan
printf "\n[*] - Instalando PHP...\n"
apt-get install php > /dev/null 2>&1 &
sleep 33
setterm -foreground cyan
printf "\n[*] - Instalando Ngrok...\n"
git clone https://github.com/PSecurity/ps.ngrok > /dev/null 2>&1 &
sleep 12
mv ps.ngrok /data/data/com.termux/files/home
cd /data/data/com.termux/files/home/ps.ngrok
mv ngrok /data/data/com.termux/files/home
cd /data/data/com.termux/files/home/KerberosSec
setterm -foreground yellow
printf "\n[*] - Verificando Instalações...\n"
sleep 2
verificacao
fi
else
exit 1;
fi
}
verificacao() {
if [[ -e tput ]]; then
echo ""
else
setterm -foreground red
command -v tput > /dev/null 2>&1 || { printf >&2 "Erro durante instalação dos Recursos. Tente: pkg install ncurses-utils"; exit 1; }
fi
##
if [[ -e python ]]; then
echo ""
else
setterm -foreground red
command -v python > /dev/null 2>&1 || { printf >&2 "Erro durante instalação do Python. Tente: pkg install python"; exit 1; }
fi
##
if [[ -e ssh ]]; then
echo ""
else
setterm -foreground red
command -v ssh > /dev/null 2>&1 || { printf >&2 "Erro durante instalação do SSH. Tente: pkg install openssh"; exit 1; }
fi
##
if [[ -e bash ]]; then
echo ""
else
setterm -foreground red
command -v bash > /dev/null 2>&1 || { printf >&2 "Erro durante instalação do Bash. Tente: pkg install bash"; exit 1; }
fi
##
if [[ -e figlet ]]; then
echo ""
else
setterm -foreground red
command -v figlet > /dev/null 2>&1 || { printf >&2 "Erro durante instalação do Figlet. Tente: pkg install figlet"; exit 1; }
fi
##
if [[ -e php ]]; then
echo ""
else
setterm -foreground red
command -v php > /dev/null 2>&1 || { printf >&2 "Erro durante instalação do PHP. Tente: pkg install php"; exit 1; }
fi
##
setterm -foreground green
printf "\n[*] - Instalação Concluida com Sucesso.\n"
#Fim da Instalação
}
manual() {
setterm -foreground cyan
printf "\n[*] - Instalando Recursos...\n"
setterm -foreground blue
apt-get install ncurses-utils
setterm -foreground cyan
printf "\n[*] - Instalando Figlet...\n"
setterm -foreground blue
apt-get install figlet
setterm -foreground cyan
printf "\n[*] - Instalando Python...\n"
setterm -foreground blue
apt-get install python
setterm -foreground cyan
printf "\n[*] - Instalando SSH...\n"
setterm -foreground blue
apt-get install openssh
setterm -foreground cyan
printf "\n[*] - Instalando PHP...\n"
setterm -foreground blue
apt-get install php
setterm -foreground cyan
printf "\n[*] - Instalando Ngrok...\n"
setterm -foreground blue
git clone https://github.com/PSecurity/ps.ngrok
mv ps.ngrok /data/data/com.termux/files/home
cd /data/data/com.termux/files/home/ps.ngrok
mv ngrok /data/data/com.termux/files/home
cd /data/data/com.termux/files/home/KerberosSec
setterm -foreground cyan
printf "\n[*] - Instalando Bash...\n"
setterm -foreground blue
apt-get install bash
verificacao
}
instalacao
| true
|
4770044c83d6cbf83824a3ec95915ded06ff2d2c
|
Shell
|
thirdless/faculta
|
/sisteme-operare/lab4/p2.sh
|
UTF-8
| 1,183
| 3.5
| 4
|
[] |
no_license
|
proiect(){
cd ~/
mkdir -p $1
cd $1
mkdir -p bin
mkdir -p obj
mkdir -p src
mkdir -p inc
> "$1.c"
> "$1.h"
> build.sh
cd src
> "$1_src.c"
cd ../inc
> "$1_inc.h"
}
proiectbasic(){
cd ~/
mkdir -p $1
cd $1
mkdir -p bin
mkdir -p obj
mkdir -p src
mkdir -p inc
> "$1.c"
> "$1.h"
> build.sh
}
proiectsingur(){
cd ~/
cd $1
cd src
> "$1_src.c"
cd ../inc
> "$1_inc.h"
}
proiectdublu(){
cd ~/
cd $1
cd inc
> "$1_inc.h"
cd ../src
if ["$2" = "c++"]; then
> "$1_src.cpp"
else
> "$1_src.c"
fi
}
x=1
proiecttriplu(){
cd ~/
cd $1
cd inc
while [x -lt $3]
do
done
}
ajutor(){
echo
}
compressed="$(echo $1 | sed -e 's/[^[:alnum:]\_\-]//g')"
if [[ ! -n "$1" || -n "$4" || "$compressed" != "$1" ]]; then
echo Parametrii sunt setati incorect
exit 0
elif [[ ! -n "$2" && "$1" = "--help" ]]; then
ajutor
elif [[ ! -n "$2" && "$1" = "-h" ]]; then
ajutor
elif [[ ! -n "$3" ]];then
proiectbasic $1
proiecttriplu $1 $2 $3
elif [[ ! -n "$2" ]];then
proiectbasic $1
proiectdublu $1 $2
else
proiectbasic $1
proiectsingur $1
fi
| true
|
cd758ef9ffa207f6d8438a444c1e2492f921546c
|
Shell
|
rubyonracetracks/gatsby_neutrino
|
/mod/mod-03/mod-03.sh
|
UTF-8
| 284
| 2.921875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# AGENDA:
# * Add README.md
echo '###########################'
echo 'Chapter 3: Adding README.md'
echo '###########################'
cp README.md README-orig.md # Save the original README.md file
mv mod-03-README.md README.md
git add .
git commit -m "Added README.md"
| true
|
b016d0a9e0e0871139c1cea1af8897862a98a71f
|
Shell
|
ariakh55/Excalibur
|
/lstage.sh
|
UTF-8
| 517
| 2.546875
| 3
|
[
"MIT"
] |
permissive
|
ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
STAGE=$ROOT/build-vscode/WebServer
PLUGINS=$STAGE/plugins
RUNTIME=$PLUGINS/runtime
mkdir -p $PLUGINS
mkdir -p $RUNTIME
cp $ROOT/build-vscode/WebServer/ChatRoomPlugin/libChatRoomPlugin.so $PLUGINS
cp $ROOT/build-vscode/WebServer/EchoPlugin/libEchoPlugin.so $PLUGINS
cp $ROOT/build-vscode/WebServer/StaticContentPlugin/libStaticContentPlugin.so $PLUGINS
cp $ROOT/test-cert-key-localhost/cert.pem $STAGE
cp $ROOT/test-cert-key-localhost/key.pem $STAGE
| true
|
a3040a2d064f628266021b6c8cf37b042b3df943
|
Shell
|
kolaente/xgo
|
/docker/base/build.sh
|
UTF-8
| 36,341
| 3.953125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
#
# Contains the main cross compiler, that individually sets up each target build
# platform, compiles all the C dependencies, then build the requested executable
# itself.
#
# Usage: build.sh <import path>
#
# Needed environment variables:
# REPO_REMOTE - Optional VCS remote if not the primary repository is needed
# REPO_BRANCH - Optional VCS branch to use, if not the master branch
# DEPS - Optional list of C dependency packages to build
# ARGS - Optional arguments to pass to C dependency configure scripts
# PACK - Optional sub-package, if not the import path is being built
# OUT - Optional output prefix to override the package name
# FLAG_V - Optional verbosity flag to set on the Go builder
# FLAG_X - Optional flag to print the build progress commands
# FLAG_RACE - Optional race flag to set on the Go builder
# FLAG_TAGS - Optional tag flag to set on the Go builder
# FLAG_LDFLAGS - Optional ldflags flag to set on the Go builder
# FLAG_BUILDMODE - Optional buildmode flag to set on the Go builder
# TARGETS - Comma separated list of build targets to compile for
# GO_VERSION - Bootstrapped version of Go to disable uncupported targets
# EXT_GOPATH - GOPATH elements mounted from the host filesystem
# Define a function that figures out the binary extension
function extension {
if [ "$FLAG_BUILDMODE" == "archive" ] || [ "$FLAG_BUILDMODE" == "c-archive" ]; then
if [ "$1" == "windows" ]; then
echo ".lib"
else
echo ".a"
fi
elif [ "$FLAG_BUILDMODE" == "shared" ] || [ "$FLAG_BUILDMODE" == "c-shared" ]; then
if [ "$1" == "windows" ]; then
echo ".dll"
elif [ "$1" == "darwin" ] || [ "$1" == "ios" ]; then
echo ".dylib"
else
echo ".so"
fi
else
if [ "$1" == "windows" ]; then
echo ".exe"
fi
fi
}
# Either set a local build environemnt, or pull any remote imports
if [ "$EXT_GOPATH" != "" ]; then
# If local builds are requested, inject the sources
echo "Building locally $1..."
export GOPATH=$GOPATH:$EXT_GOPATH
set -e
# Find and change into the package folder
cd `go list -e -f {{.Dir}} $1`
export GOPATH=$GOPATH:`pwd`/Godeps/_workspace
else
# Inject all possible Godep paths to short circuit go gets
GOPATH_ROOT=$GOPATH/src
IMPORT_PATH=$1
while [ "$IMPORT_PATH" != "." ]; do
export GOPATH=$GOPATH:$GOPATH_ROOT/$IMPORT_PATH/Godeps/_workspace
IMPORT_PATH=`dirname $IMPORT_PATH`
done
# Otherwise download the canonical import path (may fail, don't allow failures beyond)
echo "Fetching main repository $1..."
go get -v -d $1
set -e
cd $GOPATH_ROOT/$1
# Switch over the code-base to another checkout if requested
if [ "$REPO_REMOTE" != "" ] || [ "$REPO_BRANCH" != "" ]; then
# Detect the version control system type
IMPORT_PATH=$1
while [ "$IMPORT_PATH" != "." ] && [ "$REPO_TYPE" == "" ]; do
if [ -d "$GOPATH_ROOT/$IMPORT_PATH/.git" ]; then
REPO_TYPE="git"
elif [ -d "$GOPATH_ROOT/$IMPORT_PATH/.hg" ]; then
REPO_TYPE="hg"
fi
IMPORT_PATH=`dirname $IMPORT_PATH`
done
if [ "$REPO_TYPE" == "" ]; then
echo "Unknown version control system type, cannot switch remotes and branches."
exit -1
fi
# If we have a valid VCS, execute the switch operations
if [ "$REPO_REMOTE" != "" ]; then
echo "Switching over to remote $REPO_REMOTE..."
if [ "$REPO_TYPE" == "git" ]; then
git remote set-url origin $REPO_REMOTE
git fetch --all
git reset --hard origin/HEAD
git clean -dxf
elif [ "$REPO_TYPE" == "hg" ]; then
echo -e "[paths]\ndefault = $REPO_REMOTE\n" >> .hg/hgrc
hg pull
fi
fi
if [ "$REPO_BRANCH" != "" ]; then
echo "Switching over to branch $REPO_BRANCH..."
if [ "$REPO_TYPE" == "git" ]; then
git reset --hard origin/$REPO_BRANCH
git clean -dxf
elif [ "$REPO_TYPE" == "hg" ]; then
hg checkout $REPO_BRANCH
fi
fi
fi
fi
# Download all the C dependencies
mkdir /deps
DEPS=($DEPS) && for dep in "${DEPS[@]}"; do
if [ "${dep##*.}" == "tar" ]; then cat "/deps-cache/`basename $dep`" | tar -C /deps -x; fi
if [ "${dep##*.}" == "gz" ]; then cat "/deps-cache/`basename $dep`" | tar -C /deps -xz; fi
if [ "${dep##*.}" == "bz2" ]; then cat "/deps-cache/`basename $dep`" | tar -C /deps -xj; fi
done
DEPS_ARGS=($ARGS)
# Save the contents of the pre-build /usr/local folder for post cleanup
USR_LOCAL_CONTENTS=`ls /usr/local`
# Configure some global build parameters
NAME=`basename $1/$PACK`
if [ "$OUT" != "" ]; then
NAME=$OUT
fi
if [ "$FLAG_V" == "true" ]; then V=-v; fi
if [ "$FLAG_X" == "true" ]; then X=-x; fi
if [ "$FLAG_RACE" == "true" ]; then R=-race; fi
if [ "$FLAG_TAGS" != "" ]; then T=(--tags "$FLAG_TAGS"); fi
if [ "$FLAG_LDFLAGS" != "" ]; then LD="$FLAG_LDFLAGS"; fi
if [ "$FLAG_BUILDMODE" != "" ] && [ "$FLAG_BUILDMODE" != "default" ]; then BM="--buildmode=$FLAG_BUILDMODE"; fi
# If no build targets were specified, inject a catch all wildcard
if [ "$TARGETS" == "" ]; then
TARGETS="./."
fi
# Build for each requested platform individually
for TARGET in $TARGETS; do
# Split the target into platform and architecture
XGOOS=`echo $TARGET | cut -d '/' -f 1`
XGOARCH=`echo $TARGET | cut -d '/' -f 2`
# Check and build for Android targets
if ([ $XGOOS == "." ] || [[ $XGOOS == android* ]]); then
# Split the platform version and configure the linker options
PLATFORM=`echo $XGOOS | cut -d '-' -f 2`
if [ "$PLATFORM" == "" ] || [ "$PLATFORM" == "." ] || [ "$PLATFORM" == "android" ]; then
PLATFORM=16 # Jelly Bean 4.0.0
fi
if [ "$PLATFORM" -ge 16 ]; then
CGO_CCPIE="-fPIE"
CGO_LDPIE="-fPIE"
EXT_LDPIE="-extldflags=-pie"
else
unset CGO_CCPIE CGO_LDPIE EXT_LDPIE
fi
mkdir -p /build-android-aar
# Iterate over the requested architectures, bootstrap and build
if [ $XGOARCH == "." ] || [ $XGOARCH == "arm" ] || [ $XGOARCH == "aar" ]; then
if [ "$GO_VERSION" -lt 150 ]; then
echo "Go version too low, skipping android-$PLATFORM/arm..."
else
# Include a linker workaround for pre Go 1.6 releases
if [ "$GO_VERSION" -lt 160 ]; then
EXT_LDAMD="-extldflags=-Wl,--allow-multiple-definition"
fi
echo "Assembling toolchain for android-$PLATFORM/arm..."
$ANDROID_NDK_ROOT/build/tools/make-standalone-toolchain.sh --ndk-dir=$ANDROID_NDK_ROOT --install-dir=/usr/$ANDROID_CHAIN_ARM --toolchain=$ANDROID_CHAIN_ARM --arch=arm > /dev/null 2>&1
echo "Bootstrapping android-$PLATFORM/arm..."
CC=arm-linux-androideabi-gcc GOOS=android GOARCH=arm GOARM=7 CGO_ENABLED=1 CGO_CFLAGS="$CGO_CCPIE" CGO_LDFLAGS="$CGO_LDPIE" go install std
echo "Compiling for android-$PLATFORM/arm..."
CC=arm-linux-androideabi-gcc CXX=arm-linux-androideabi-g++ HOST=arm-linux-androideabi PREFIX=/usr/$ANDROID_CHAIN_ARM/arm-linux-androideabi $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/$ANDROID_CHAIN_ARM/arm-linux-androideabi/lib/pkgconfig
if [ $XGOARCH == "." ] || [ $XGOARCH == "arm" ]; then
CC=arm-linux-androideabi-gcc CXX=arm-linux-androideabi-g++ GOOS=android GOARCH=arm GOARM=7 CGO_ENABLED=1 CGO_CFLAGS="$CGO_CCPIE" CGO_CXXFLAGS="$CGO_CCPIE" CGO_LDFLAGS="$CGO_LDPIE" go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=arm-linux-androideabi-gcc CXX=arm-linux-androideabi-g++ GOOS=android GOARCH=arm GOARM=7 CGO_ENABLED=1 CGO_CFLAGS="$CGO_CCPIE" CGO_CXXFLAGS="$CGO_CCPIE" CGO_LDFLAGS="$CGO_LDPIE" go build $V $X "${T[@]}" --ldflags="$V $EXT_LDPIE $EXT_LDAMD $LD" $BM -o "/build/$NAME-android-$PLATFORM-arm`extension android`" ./$PACK
fi
if [ $XGOARCH == "." ] || [ $XGOARCH == "aar" ]; then
CC=arm-linux-androideabi-gcc CXX=arm-linux-androideabi-g++ GOOS=android GOARCH=arm GOARM=7 CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=arm-linux-androideabi-gcc CXX=arm-linux-androideabi-g++ GOOS=android GOARCH=arm GOARM=7 CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$V $EXT_LDAMD $LD" --buildmode=c-shared -o "/build-android-aar/$NAME-android-$PLATFORM-arm.so" ./$PACK
fi
fi
fi
if [ "$GO_VERSION" -lt 160 ]; then
echo "Go version too low, skipping android-$PLATFORM/386,arm64..."
else
if [ "$PLATFORM" -ge 9 ] && ([ $XGOARCH == "." ] || [ $XGOARCH == "386" ] || [ $XGOARCH == "aar" ]); then
echo "Assembling toolchain for android-$PLATFORM/386..."
$ANDROID_NDK_ROOT/build/tools/make-standalone-toolchain.sh --ndk-dir=$ANDROID_NDK_ROOT --install-dir=/usr/$ANDROID_CHAIN_386 --toolchain=$ANDROID_CHAIN_386 --arch=x86 > /dev/null 2>&1
echo "Bootstrapping android-$PLATFORM/386..."
CC=i686-linux-android-gcc GOOS=android GOARCH=386 CGO_ENABLED=1 CGO_CFLAGS="$CGO_CCPIE" CGO_LDFLAGS="$CGO_LDPIE" go install std
echo "Compiling for android-$PLATFORM/386..."
CC=i686-linux-android-gcc CXX=i686-linux-android-g++ HOST=i686-linux-android PREFIX=/usr/$ANDROID_CHAIN_386/i686-linux-android $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/$ANDROID_CHAIN_386/i686-linux-android/lib/pkgconfig
if [ $XGOARCH == "." ] || [ $XGOARCH == "386" ]; then
CC=i686-linux-android-gcc CXX=i686-linux-android-g++ GOOS=android GOARCH=386 CGO_ENABLED=1 CGO_CFLAGS="$CGO_CCPIE" CGO_CXXFLAGS="$CGO_CCPIE" CGO_LDFLAGS="$CGO_LDPIE" go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=i686-linux-android-gcc CXX=i686-linux-android-g++ GOOS=android GOARCH=386 CGO_ENABLED=1 CGO_CFLAGS="$CGO_CCPIE" CGO_CXXFLAGS="$CGO_CCPIE" CGO_LDFLAGS="$CGO_LDPIE" go build $V $X "${T[@]}" --ldflags="$V $EXT_LDPIE $LD" $BM -o "/build/$NAME-android-$PLATFORM-386`extension android`" ./$PACK
fi
if [ $XGOARCH == "." ] || [ $XGOARCH == "aar" ]; then
CC=i686-linux-android-gcc CXX=i686-linux-android-g++ GOOS=android GOARCH=386 CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=i686-linux-android-gcc CXX=i686-linux-android-g++ GOOS=android GOARCH=386 CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$V $LD" --buildmode=c-shared -o "/build-android-aar/$NAME-android-$PLATFORM-386.so" ./$PACK
fi
fi
if [ "$PLATFORM" -ge 21 ] && ([ $XGOARCH == "." ] || [ $XGOARCH == "arm64" ] || [ $XGOARCH == "aar" ]); then
echo "Assembling toolchain for android-$PLATFORM/arm64..."
$ANDROID_NDK_ROOT/build/tools/make-standalone-toolchain.sh --ndk-dir=$ANDROID_NDK_ROOT --install-dir=/usr/$ANDROID_CHAIN_ARM64 --toolchain=$ANDROID_CHAIN_ARM64 --arch=arm64 > /dev/null 2>&1
echo "Bootstrapping android-$PLATFORM/arm64..."
CC=aarch64-linux-android-gcc GOOS=android GOARCH=arm64 CGO_ENABLED=1 CGO_CFLAGS="$CGO_CCPIE" CGO_LDFLAGS="$CGO_LDPIE" go install std
echo "Compiling for android-$PLATFORM/arm64..."
CC=aarch64-linux-android-gcc CXX=aarch64-linux-android-g++ HOST=aarch64-linux-android PREFIX=/usr/$ANDROID_CHAIN_ARM64/aarch64-linux-android $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/$ANDROID_CHAIN_ARM64/aarch64-linux-android/lib/pkgconfig
if [ $XGOARCH == "." ] || [ $XGOARCH == "arm64" ]; then
CC=aarch64-linux-android-gcc CXX=aarch64-linux-android-g++ GOOS=android GOARCH=arm64 CGO_ENABLED=1 CGO_CFLAGS="$CGO_CCPIE" CGO_CXXFLAGS="$CGO_CCPIE" CGO_LDFLAGS="$CGO_LDPIE" go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=aarch64-linux-android-gcc CXX=aarch64-linux-android-g++ GOOS=android GOARCH=arm64 CGO_ENABLED=1 CGO_CFLAGS="$CGO_CCPIE" CGO_CXXFLAGS="$CGO_CCPIE" CGO_LDFLAGS="$CGO_LDPIE" go build $V $X "${T[@]}" --ldflags="$V $EXT_LDPIE $LD" $BM -o "/build/$NAME-android-$PLATFORM-arm64`extension android`" ./$PACK
fi
if [ $XGOARCH == "." ] || [ $XGOARCH == "aar" ]; then
CC=aarch64-linux-android-gcc CXX=aarch64-linux-android-g++ GOOS=android GOARCH=arm64 CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=aarch64-linux-android-gcc CXX=aarch64-linux-android-g++ GOOS=android GOARCH=arm64 CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$V $LD" --buildmode=c-shared -o "/build-android-aar/$NAME-android-$PLATFORM-arm64.so" ./$PACK
fi
fi
fi
# Assemble the Android Archive from the built shared libraries
if [ $XGOARCH == "." ] || [ $XGOARCH == "aar" ]; then
title=${NAME^}
archive=/build/$NAME-android-$PLATFORM-aar
bundle=/build/$NAME-android-$PLATFORM.aar
# Generate the Java import path based on the Go one
package=`go list ./$PACK | tr '-' '_'`
package=$(for p in `echo ${package//\// }`; do echo $p | awk 'BEGIN{FS="."}{for (i=NF; i>0; i--){printf "%s.", $i;}}'; done | sed 's/.$//')
package=${package%.*}
# Create a fresh empty Android archive
rm -rf $archive $bundle
mkdir -p $archive
echo -e "<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\" package=\"$package\">\n <uses-sdk android:minSdkVersion=\"$PLATFORM\"/>\n</manifest>" > $archive/AndroidManifest.xml
mkdir -p $archive/res
touch $archive/R.txt
# Generate the JNI wrappers automatically with SWIG
jni=`mktemp -d`
header=`find /build-android-aar | grep '\.h$' | head -n 1`
if [ "$header" == "" ]; then
echo "No API C header specified, skipping android-$PLATFORM/aar..."
else
cp $header $jni/$NAME.h
sed -i -e 's|__complex|complex|g' $jni/$NAME.h
sed -i -e 's|_Complex|complex|g' $jni/$NAME.h
echo -e "%module $title\n%{\n#include \"$NAME.h\"\n%}\n%pragma(java) jniclasscode=%{\nstatic {\nSystem.loadLibrary(\"$NAME\");\n}\n%}\n%include \"$NAME.h\"" > $jni/$NAME.i
mkdir -p $jni/${package//.//}
swig -java -package $package -outdir $jni/${package//.//} $jni/$NAME.i
# Assemble the Go static libraries and the JNI interface into shared libraries
for lib in `find /build-android-aar | grep '\.so$'`; do
if [[ "$lib" = *-arm.so ]]; then cc=arm-linux-androideabi-gcc; abi="armeabi-v7a"; fi
if [[ "$lib" = *-arm64.so ]]; then cc=aarch64-linux-android-gcc; abi="arm64-v8a"; fi
if [[ "$lib" = *-386.so ]]; then cc=i686-linux-android-gcc; abi="x86"; fi
mkdir -p $archive/jni/$abi
cp ${lib%.*}.h $jni/${NAME}.h
cp $lib $archive/jni/$abi/lib${NAME}raw.so
(cd $archive/jni/$abi && $cc -shared -fPIC -o lib${NAME}.so -I"$ANDROID_NDK_LIBC/include" -I"$ANDROID_NDK_LIBC/libs/$abi/include" -I"$jni" lib${NAME}raw.so $jni/${NAME}_wrap.c)
done
# Compile the Java wrapper and assemble into a .jar file
mkdir -p $jni/build
javac -target 1.7 -source 1.7 -cp . -d $jni/build $jni/${package//.//}/*.java
(cd $jni/build && jar cvf $archive/classes.jar *)
# Finally assemble the archive contents into an .aar and clean up
(cd $archive && zip -r $bundle *)
rm -rf $jni $archive
fi
fi
# Clean up the android builds, toolchains and runtimes
rm -rf /build-android-aar
rm -rf /usr/local/go/pkg/android_*
rm -rf /usr/$ANDROID_CHAIN_ARM /usr/$ANDROID_CHAIN_ARM64 /usr/$ANDROID_CHAIN_386
fi
# Check and build for Linux targets
if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "amd64" ]); then
echo "Compiling for linux/amd64..."
HOST=x86_64-linux PREFIX=/usr/local $BUILD_DEPS /deps ${DEPS_ARGS[@]}
GOOS=linux GOARCH=amd64 CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
GOOS=linux GOARCH=amd64 CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$V $LD" $R $BM -o "/build/$NAME-linux-amd64$R`extension linux`" ./$PACK
fi
if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "386" ]); then
echo "Compiling for linux/386..."
HOST=i686-linux PREFIX=/usr/local $BUILD_DEPS /deps ${DEPS_ARGS[@]}
GOOS=linux GOARCH=386 CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
GOOS=linux GOARCH=386 CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$V $LD" $BM -o "/build/$NAME-linux-386`extension linux`" ./$PACK
fi
if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "arm" ] || [ $XGOARCH == "arm-5" ]); then
if [ "$GO_VERSION" -ge 150 ]; then
echo "Bootstrapping linux/arm-5..."
CC=arm-linux-gnueabi-gcc-5 GOOS=linux GOARCH=arm GOARM=5 CGO_ENABLED=1 CGO_CFLAGS="-march=armv5" CGO_CXXFLAGS="-march=armv5" go install std
fi
echo "Compiling for linux/arm-5..."
CC=arm-linux-gnueabi-gcc-5 CXX=arm-linux-gnueabi-g++-5 HOST=arm-linux-gnueabi PREFIX=/usr/arm-linux-gnueabi CFLAGS="-march=armv5" CXXFLAGS="-march=armv5" $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/arm-linux-gnueabi/lib/pkgconfig
CC=arm-linux-gnueabi-gcc-5 CXX=arm-linux-gnueabi-g++-5 GOOS=linux GOARCH=arm GOARM=5 CGO_ENABLED=1 CGO_CFLAGS="-march=armv5" CGO_CXXFLAGS="-march=armv5" go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=arm-linux-gnueabi-gcc-5 CXX=arm-linux-gnueabi-g++-5 GOOS=linux GOARCH=arm GOARM=5 CGO_ENABLED=1 CGO_CFLAGS="-march=armv5" CGO_CXXFLAGS="-march=armv5" go build $V $X "${T[@]}" --ldflags="$V $LD" $BM -o "/build/$NAME-linux-arm-5`extension linux`" ./$PACK
if [ "$GO_VERSION" -ge 150 ]; then
echo "Cleaning up Go runtime for linux/arm-5..."
rm -rf /usr/local/go/pkg/linux_arm
fi
fi
if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "arm-6" ]); then
if [ "$GO_VERSION" -lt 150 ]; then
echo "Go version too low, skipping linux/arm-6..."
else
echo "Bootstrapping linux/arm-6..."
CC=arm-linux-gnueabi-gcc-5 GOOS=linux GOARCH=arm GOARM=6 CGO_ENABLED=1 CGO_CFLAGS="-march=armv6" CGO_CXXFLAGS="-march=armv6" go install std
echo "Compiling for linux/arm-6..."
CC=arm-linux-gnueabi-gcc-5 CXX=arm-linux-gnueabi-g++-5 HOST=arm-linux-gnueabi PREFIX=/usr/arm-linux-gnueabi CFLAGS="-march=armv6" CXXFLAGS="-march=armv6" $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/arm-linux-gnueabi/lib/pkgconfig
CC=arm-linux-gnueabi-gcc-5 CXX=arm-linux-gnueabi-g++-5 GOOS=linux GOARCH=arm GOARM=6 CGO_ENABLED=1 CGO_CFLAGS="-march=armv6" CGO_CXXFLAGS="-march=armv6" go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=arm-linux-gnueabi-gcc-5 CXX=arm-linux-gnueabi-g++-5 GOOS=linux GOARCH=arm GOARM=6 CGO_ENABLED=1 CGO_CFLAGS="-march=armv6" CGO_CXXFLAGS="-march=armv6" go build $V $X "${T[@]}" --ldflags="$V $LD" $BM -o "/build/$NAME-linux-arm-6`extension linux`" ./$PACK
echo "Cleaning up Go runtime for linux/arm-6..."
rm -rf /usr/local/go/pkg/linux_arm
fi
fi
if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "arm-7" ]); then
if [ "$GO_VERSION" -lt 150 ]; then
echo "Go version too low, skipping linux/arm-7..."
else
echo "Bootstrapping linux/arm-7..."
CC=arm-linux-gnueabihf-gcc-5 GOOS=linux GOARCH=arm GOARM=7 CGO_ENABLED=1 CGO_CFLAGS="-march=armv7-a" CGO_CXXFLAGS="-march=armv7-a" go install std
echo "Compiling for linux/arm-7..."
CC=arm-linux-gnueabihf-gcc-5 CXX=arm-linux-gnueabihf-g++-5 HOST=arm-linux-gnueabihf PREFIX=/usr/arm-linux-gnueabihf CFLAGS="-march=armv7-a -fPIC" CXXFLAGS="-march=armv7-a -fPIC" $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/arm-linux-gnueabihf/lib/pkgconfig
CC=arm-linux-gnueabihf-gcc-5 CXX=arm-linux-gnueabihf-g++-5 GOOS=linux GOARCH=arm GOARM=7 CGO_ENABLED=1 CGO_CFLAGS="-march=armv7-a -fPIC" CGO_CXXFLAGS="-march=armv7-a -fPIC" go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=arm-linux-gnueabihf-gcc-5 CXX=arm-linux-gnueabihf-g++-5 GOOS=linux GOARCH=arm GOARM=7 CGO_ENABLED=1 CGO_CFLAGS="-march=armv7-a -fPIC" CGO_CXXFLAGS="-march=armv7-a -fPIC" go build $V $X "${T[@]}" --ldflags="$V $LD" $BM -o "/build/$NAME-linux-arm-7`extension linux`" ./$PACK
echo "Cleaning up Go runtime for linux/arm-7..."
rm -rf /usr/local/go/pkg/linux_arm
fi
fi
if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "arm64" ]); then
if [ "$GO_VERSION" -lt 150 ]; then
echo "Go version too low, skipping linux/arm64..."
else
echo "Compiling for linux/arm64..."
CC=aarch64-linux-gnu-gcc-5 CXX=aarch64-linux-gnu-g++-5 HOST=aarch64-linux-gnu PREFIX=/usr/aarch64-linux-gnu $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/aarch64-linux-gnu/lib/pkgconfig
CC=aarch64-linux-gnu-gcc-5 CXX=aarch64-linux-gnu-g++-5 GOOS=linux GOARCH=arm64 CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=aarch64-linux-gnu-gcc-5 CXX=aarch64-linux-gnu-g++-5 GOOS=linux GOARCH=arm64 CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$V $LD" $BM -o "/build/$NAME-linux-arm64`extension linux`" ./$PACK
fi
fi
if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "mips64" ]); then
if [ "$GO_VERSION" -lt 170 ]; then
echo "Go version too low, skipping linux/mips64..."
else
echo "Compiling for linux/mips64..."
CC=mips64-linux-gnuabi64-gcc-5 CXX=mips64-linux-gnuabi64-g++-5 HOST=mips64-linux-gnuabi64 PREFIX=/usr/mips64-linux-gnuabi64 $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/mips64-linux-gnuabi64/lib/pkgconfig
CC=mips64-linux-gnuabi64-gcc-5 CXX=mips64-linux-gnuabi64-g++-5 GOOS=linux GOARCH=mips64 CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=mips64-linux-gnuabi64-gcc-5 CXX=mips64-linux-gnuabi64-g++-5 GOOS=linux GOARCH=mips64 CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$V $LD" $BM -o "/build/$NAME-linux-mips64`extension linux`" ./$PACK
fi
fi
if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "mips64le" ]); then
if [ "$GO_VERSION" -lt 170 ]; then
echo "Go version too low, skipping linux/mips64le..."
else
echo "Compiling for linux/mips64le..."
CC=mips64el-linux-gnuabi64-gcc-5 CXX=mips64el-linux-gnuabi64-g++-5 HOST=mips64el-linux-gnuabi64 PREFIX=/usr/mips64el-linux-gnuabi64 $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/mips64le-linux-gnuabi64/lib/pkgconfig
CC=mips64el-linux-gnuabi64-gcc-5 CXX=mips64el-linux-gnuabi64-g++-5 GOOS=linux GOARCH=mips64le CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=mips64el-linux-gnuabi64-gcc-5 CXX=mips64el-linux-gnuabi64-g++-5 GOOS=linux GOARCH=mips64le CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$V $LD" $BM -o "/build/$NAME-linux-mips64le`extension linux`" ./$PACK
fi
fi
if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "mips" ]); then
if [ "$GO_VERSION" -lt 180 ]; then
echo "Go version too low, skipping linux/mips..."
else
echo "Compiling for linux/mips..."
CC=mips-linux-gnu-gcc-5 CXX=mips-linux-gnu-g++-5 HOST=mips-linux-gnu PREFIX=/usr/mips-linux-gnu $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/mips-linux-gnu/lib/pkgconfig
CC=mips-linux-gnu-gcc-5 CXX=mips-linux-gnu-g++-5 GOOS=linux GOARCH=mips CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=mips-linux-gnu-gcc-5 CXX=mips-linux-gnu-g++-5 GOOS=linux GOARCH=mips CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$V $LD" $BM -o "/build/$NAME-linux-mips`extension linux`" ./$PACK
fi
fi
if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "mipsle" ]); then
if [ "$GO_VERSION" -lt 180 ]; then
echo "Go version too low, skipping linux/mipsle..."
else
echo "Compiling for linux/mipsle..."
CC=mipsel-linux-gnu-gcc-5 CXX=mipsel-linux-gnu-g++-5 HOST=mipsel-linux-gnu PREFIX=/usr/mipsel-linux-gnu $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/mipsle-linux-gnu/lib/pkgconfig
CC=mipsel-linux-gnu-gcc-5 CXX=mipsel-linux-gnu-g++-5 GOOS=linux GOARCH=mipsle CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=mipsel-linux-gnu-gcc-5 CXX=mipsel-linux-gnu-g++-5 GOOS=linux GOARCH=mipsle CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$V $LD" $BM -o "/build/$NAME-linux-mipsle`extension linux`" ./$PACK
fi
fi
# Check and build for Windows targets
if [ $XGOOS == "." ] || [[ $XGOOS == windows* ]]; then
# Split the platform version and configure the Windows NT version
PLATFORM=`echo $XGOOS | cut -d '-' -f 2`
if [ "$PLATFORM" == "" ] || [ "$PLATFORM" == "." ] || [ "$PLATFORM" == "windows" ]; then
PLATFORM=4.0 # Windows NT
fi
MAJOR=`echo $PLATFORM | cut -d '.' -f 1`
if [ "${PLATFORM/.}" != "$PLATFORM" ] ; then
MINOR=`echo $PLATFORM | cut -d '.' -f 2`
fi
CGO_NTDEF="-D_WIN32_WINNT=0x`printf "%02d" $MAJOR``printf "%02d" $MINOR`"
# Build the requested windows binaries
if [ $XGOARCH == "." ] || [ $XGOARCH == "amd64" ]; then
echo "Compiling for windows-$PLATFORM/amd64..."
CC=x86_64-w64-mingw32-gcc-posix CXX=x86_64-w64-mingw32-g++-posix HOST=x86_64-w64-mingw32 PREFIX=/usr/x86_64-w64-mingw32 $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/x86_64-w64-mingw32/lib/pkgconfig
CC=x86_64-w64-mingw32-gcc-posix CXX=x86_64-w64-mingw32-g++-posix GOOS=windows GOARCH=amd64 CGO_ENABLED=1 CGO_CFLAGS="$CGO_NTDEF" CGO_CXXFLAGS="$CGO_NTDEF" go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=x86_64-w64-mingw32-gcc-posix CXX=x86_64-w64-mingw32-g++-posix GOOS=windows GOARCH=amd64 CGO_ENABLED=1 CGO_CFLAGS="$CGO_NTDEF" CGO_CXXFLAGS="$CGO_NTDEF" go build $V $X "${T[@]}" --ldflags="$V $LD" $R $BM -o "/build/$NAME-windows-$PLATFORM-amd64$R`extension windows`" ./$PACK
fi
if [ $XGOARCH == "." ] || [ $XGOARCH == "386" ]; then
echo "Compiling for windows-$PLATFORM/386..."
CC=i686-w64-mingw32-gcc-posix CXX=i686-w64-mingw32-g++-posix HOST=i686-w64-mingw32 PREFIX=/usr/i686-w64-mingw32 $BUILD_DEPS /deps ${DEPS_ARGS[@]}
export PKG_CONFIG_PATH=/usr/i686-w64-mingw32/lib/pkgconfig
CC=i686-w64-mingw32-gcc-posix CXX=i686-w64-mingw32-g++-posix GOOS=windows GOARCH=386 CGO_ENABLED=1 CGO_CFLAGS="$CGO_NTDEF" CGO_CXXFLAGS="$CGO_NTDEF" go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK
CC=i686-w64-mingw32-gcc-posix CXX=i686-w64-mingw32-g++-posix GOOS=windows GOARCH=386 CGO_ENABLED=1 CGO_CFLAGS="$CGO_NTDEF" CGO_CXXFLAGS="$CGO_NTDEF" go build $V $X "${T[@]}" --ldflags="$V $LD" $BM -o "/build/$NAME-windows-$PLATFORM-386`extension windows`" ./$PACK
fi
fi
# Check and build for OSX targets
if [ $XGOOS == "." ] || [[ $XGOOS == darwin* ]]; then
# Split the platform version and configure the deployment target
PLATFORM=`echo $XGOOS | cut -d '-' -f 2`
if [ "$PLATFORM" == "" ] || [ "$PLATFORM" == "." ] || [ "$PLATFORM" == "darwin" ]; then
PLATFORM=10.6 # OS X Snow Leopard
fi
export MACOSX_DEPLOYMENT_TARGET=$PLATFORM
# Strip symbol table below Go 1.6 to prevent DWARF issues
LDSTRIP=""
if [ "$GO_VERSION" -lt 160 ]; then
LDSTRIP="-s"
fi
# Build the requested darwin binaries
if [ $XGOARCH == "." ] || [ $XGOARCH == "amd64" ]; then
echo "Compiling for darwin-$PLATFORM/amd64..."
CC=o64-clang CXX=o64-clang++ HOST=x86_64-apple-darwin15 PREFIX=/usr/local $BUILD_DEPS /deps ${DEPS_ARGS[@]}
CC=o64-clang CXX=o64-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$LDSTRIP $V $LD" -d ./$PACK
CC=o64-clang CXX=o64-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$LDSTRIP $V $LD" $R $BM -o "/build/$NAME-darwin-$PLATFORM-amd64$R`extension darwin`" ./$PACK
fi
if [ $XGOARCH == "." ] || [ $XGOARCH == "386" ]; then
echo "Compiling for darwin-$PLATFORM/386..."
CC=o32-clang CXX=o32-clang++ HOST=i386-apple-darwin15 PREFIX=/usr/local $BUILD_DEPS /deps ${DEPS_ARGS[@]}
CC=o32-clang CXX=o32-clang++ GOOS=darwin GOARCH=386 CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$LDSTRIP $V $LD" -d ./$PACK
CC=o32-clang CXX=o32-clang++ GOOS=darwin GOARCH=386 CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$LDSTRIP $V $LD" $BM -o "/build/$NAME-darwin-$PLATFORM-386`extension darwin`" ./$PACK
fi
# Remove any automatically injected deployment target vars
unset MACOSX_DEPLOYMENT_TARGET
fi
# Check and build for iOS targets
if [ $XGOOS == "." ] || [[ $XGOOS == ios* ]]; then
# Split the platform version and configure the deployment target
PLATFORM=`echo $XGOOS | cut -d '-' -f 2`
if [ "$PLATFORM" == "" ] || [ "$PLATFORM" == "." ] || [ "$PLATFORM" == "ios" ]; then
PLATFORM=5.0 # first iPad and upwards
fi
export IPHONEOS_DEPLOYMENT_TARGET=$PLATFORM
# Build the requested iOS binaries
if [ "$GO_VERSION" -lt 150 ]; then
echo "Go version too low, skipping ios..."
else
# Add the 'ios' tag to all builds, otherwise the std libs will fail
if [ "$FLAG_TAGS" != "" ]; then
IOSTAGS=(--tags "ios $FLAG_TAGS")
else
IOSTAGS=(--tags ios)
fi
mkdir -p /build-ios-fw
# Strip symbol table below Go 1.6 to prevent DWARF issues
LDSTRIP=""
if [ "$GO_VERSION" -lt 160 ]; then
LDSTRIP="-s"
fi
# Cross compile to all available iOS and simulator platforms
if [ -d "$IOS_NDK_ARM_7" ] && ([ $XGOARCH == "." ] || [ $XGOARCH == "arm-7" ] || [ $XGOARCH == "framework" ]); then
echo "Bootstrapping ios-$PLATFORM/arm-7..."
export PATH=$IOS_NDK_ARM_7/bin:$PATH
GOOS=darwin GOARCH=arm GOARM=7 CGO_ENABLED=1 CC=arm-apple-darwin11-clang go install --tags ios std
echo "Compiling for ios-$PLATFORM/arm-7..."
CC=arm-apple-darwin11-clang CXX=arm-apple-darwin11-clang++ HOST=arm-apple-darwin11 PREFIX=/usr/local $BUILD_DEPS /deps ${DEPS_ARGS[@]}
CC=arm-apple-darwin11-clang CXX=arm-apple-darwin11-clang++ GOOS=darwin GOARCH=arm GOARM=7 CGO_ENABLED=1 go get $V $X "${IOSTAGS[@]}" --ldflags="$V $LD" -d ./$PACK
if [ $XGOARCH == "." ] || [ $XGOARCH == "arm-7" ]; then
CC=arm-apple-darwin11-clang CXX=arm-apple-darwin11-clang++ GOOS=darwin GOARCH=arm GOARM=7 CGO_ENABLED=1 go build $V $X "${IOSTAGS[@]}" --ldflags="$LDSTRIP $V $LD" $BM -o "/build/$NAME-ios-$PLATFORM-armv7`extension darwin`" ./$PACK
fi
if [ $XGOARCH == "." ] || [ $XGOARCH == "framework" ]; then
CC=arm-apple-darwin11-clang CXX=arm-apple-darwin11-clang++ GOOS=darwin GOARCH=arm GOARM=7 CGO_ENABLED=1 go build $V $X "${IOSTAGS[@]}" --ldflags="$V $LD" --buildmode=c-archive -o "/build-ios-fw/$NAME-ios-$PLATFORM-armv7.a" ./$PACK
fi
echo "Cleaning up Go runtime for ios-$PLATFORM/arm-7..."
rm -rf /usr/local/go/pkg/darwin_arm
fi
if [ -d "$IOS_NDK_ARM64" ] && ([ $XGOARCH == "." ] || [ $XGOARCH == "arm64" ] || [ $XGOARCH == "framework" ]); then
echo "Bootstrapping ios-$PLATFORM/arm64..."
export PATH=$IOS_NDK_ARM64/bin:$PATH
GOOS=darwin GOARCH=arm64 CGO_ENABLED=1 CC=arm-apple-darwin11-clang go install --tags ios std
echo "Compiling for ios-$PLATFORM/arm64..."
CC=arm-apple-darwin11-clang CXX=arm-apple-darwin11-clang++ HOST=arm-apple-darwin11 PREFIX=/usr/local $BUILD_DEPS /deps ${DEPS_ARGS[@]}
CC=arm-apple-darwin11-clang CXX=arm-apple-darwin11-clang++ GOOS=darwin GOARCH=arm64 CGO_ENABLED=1 go get $V $X "${IOSTAGS[@]}" --ldflags="$V $LD" -d ./$PACK
if [ $XGOARCH == "." ] || [ $XGOARCH == "arm64" ]; then
CC=arm-apple-darwin11-clang CXX=arm-apple-darwin11-clang++ GOOS=darwin GOARCH=arm64 CGO_ENABLED=1 go build $V $X "${IOSTAGS[@]}" --ldflags="$LDSTRIP $V $LD" $BM -o "/build/$NAME-ios-$PLATFORM-arm64`extension darwin`" ./$PACK
fi
if [ $XGOARCH == "." ] || [ $XGOARCH == "framework" ]; then
CC=arm-apple-darwin11-clang CXX=arm-apple-darwin11-clang++ GOOS=darwin GOARCH=arm64 CGO_ENABLED=1 go build $V $X "${IOSTAGS[@]}" --ldflags="$V $LD" --buildmode=c-archive -o "/build-ios-fw/$NAME-ios-$PLATFORM-arm64.a" ./$PACK
fi
echo "Cleaning up Go runtime for ios-$PLATFORM/arm64..."
rm -rf /usr/local/go/pkg/darwin_arm64
fi
if [ -d "$IOS_SIM_NDK_AMD64" ] && ([ $XGOARCH == "." ] || [ $XGOARCH == "amd64" ] || [ $XGOARCH == "framework" ]); then
echo "Bootstrapping ios-$PLATFORM/amd64..."
export PATH=$IOS_SIM_NDK_AMD64/bin:$PATH
mv /usr/local/go/pkg/darwin_amd64 /usr/local/go/pkg/darwin_amd64_bak
GOOS=darwin GOARCH=amd64 CGO_ENABLED=1 CC=arm-apple-darwin11-clang go install --tags ios std
echo "Compiling for ios-$PLATFORM/amd64..."
CC=arm-apple-darwin11-clang CXX=arm-apple-darwin11-clang++ HOST=arm-apple-darwin11 PREFIX=/usr/local $BUILD_DEPS /deps ${DEPS_ARGS[@]}
CC=arm-apple-darwin11-clang CXX=arm-apple-darwin11-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=1 go get $V $X "${IOSTAGS[@]}" --ldflags="$V $LD" -d ./$PACK
if [ $XGOARCH == "." ] || [ $XGOARCH == "amd64" ]; then
CC=arm-apple-darwin11-clang CXX=arm-apple-darwin11-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=1 go build $V $X "${IOSTAGS[@]}" --ldflags="$LDSTRIP $V $LD" $BM -o "/build/$NAME-ios-$PLATFORM-x86_64`extension darwin`" ./$PACK
fi
if [ $XGOARCH == "." ] || [ $XGOARCH == "framework" ]; then
CC=arm-apple-darwin11-clang CXX=arm-apple-darwin11-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=1 go build $V $X "${IOSTAGS[@]}" --ldflags="$V $LD" --buildmode=c-archive -o "/build-ios-fw/$NAME-ios-$PLATFORM-x86_64.a" ./$PACK
fi
echo "Cleaning up Go runtime for ios-$PLATFORM/amd64..."
rm -rf /usr/local/go/pkg/darwin_amd64
mv /usr/local/go/pkg/darwin_amd64_bak /usr/local/go/pkg/darwin_amd64
fi
# Assemble the iOS framework from the built binaries
if [ $XGOARCH == "." ] || [ $XGOARCH == "framework" ]; then
title=${NAME^}
framework=/build/$NAME-ios-$PLATFORM-framework/$title.framework
rm -rf $framework
mkdir -p $framework/Versions/A
(cd $framework/Versions && ln -nsf A Current)
arches=()
for lib in `ls /build-ios-fw | grep -e '\.a$'`; do
arches+=("-arch" "`echo ${lib##*-} | cut -d '.' -f 1`" "/build-ios-fw/$lib")
done
arm-apple-darwin11-lipo -create "${arches[@]}" -o $framework/Versions/A/$title
arm-apple-darwin11-ranlib $framework/Versions/A/$title
(cd $framework && ln -nsf Versions/A/$title $title)
mkdir -p $framework/Versions/A/Headers
for header in `ls /build-ios-fw | grep -e '\.h$'`; do
cp -f /build-ios-fw/$header $framework/Versions/A/Headers/$title.h
done
(cd $framework && ln -nsf Versions/A/Headers Headers)
mkdir -p $framework/Versions/A/Resources
echo -e "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">\n<plist version=\"1.0\">\n<dict>\n</dict>\n</plist>" > $framework/Versions/A/Resources/Info.plist
(cd $framework && ln -nsf Versions/A/Resources Resources)
mkdir -p $framework/Versions/A/Modules
echo -e "framework module \"$title\" {\n header \"$title.h\"\n export *\n}" > $framework/Versions/A/Modules/module.modulemap
(cd $framework && ln -nsf Versions/A/Modules Modules)
chmod 777 -R /build/$NAME-ios-$PLATFORM-framework
fi
rm -rf /build-ios-fw
fi
# Remove any automatically injected deployment target vars
unset IPHONEOS_DEPLOYMENT_TARGET
fi
done
# Clean up any leftovers for subsequent build invocations
echo "Cleaning up build environment..."
rm -rf /deps
for dir in `ls /usr/local`; do
keep=0
# Check against original folder contents
for old in $USR_LOCAL_CONTENTS; do
if [ "$old" == "$dir" ]; then
keep=1
fi
done
# Delete anything freshly generated
if [ "$keep" == "0" ]; then
rm -rf "/usr/local/$dir"
fi
done
| true
|
5667bbcc19eba979feb73369601a716396a42dad
|
Shell
|
wdoucette/wayne
|
/qa/exe_test
|
UTF-8
| 498
| 3.640625
| 4
|
[] |
no_license
|
#!/bin/bash
# Executes command with arguments. Capturing stdout in $RESULT keeps
# stdout clean while maintaining the same performance bottleneck
# as dumping to stdout
# time measures return time
# Usage
# ./exit_test ./test_prog args
set -e
COMMAND="$1 $2 $3"
echo >&2
echo "exe_test command: $COMMAND" >&2
# Dont bring into shell
$COMMAND || { echo "Shell Error: " >&2 ; exit 1 ; }
# Bring into shell
#RESULT=`$COMMAND` || { echo "Shell Error: " ; exit 1 ; }
#echo $RESULT > /dev/null
| true
|
0c5def851252819256b007bb92e719558f0f6f7a
|
Shell
|
wvandertoorn/cmp_benchmarks
|
/cmp_benchmarks.sh
|
UTF-8
| 7,295
| 3.71875
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
set -Eeuo pipefail
# default values
jobct=1
def_rel_perf_path="test/performance"
threshold=0.05
usage="Usage: ./cmp_bnchm.sh -m mode -n name_tag -b baseline -c contender \
[-s source/code/path] [-r relative/benchm/suite/path] [-t threshold] [-o results/path] [-g compiler] \n\
\n\
-m : 'results', 'execs', 'build' \n\
-n : identifyer string for comparison results, e.g. 3.0.0-3.0.1 \n\
\n\
-b -c:
\[-m results\] -b and -c should be full paths to directories containing SOLELY benchmark results in SJON format.\n\
Directories do may contain subdirectories.\n\
\[-m execs\]: -b and -c should be full/path/to/built/benchmark/suites (containing the executables).\n\
\[-m build\]: -b and -c should be tags to checkout using git.\n\
\n\
For \[-m build\]: -s is required. Set to full/path/to/source/code (git repository).\n\
-g is optional. /path/to/executable/compiler (e.g. g++-7). Defaults to g++-7.\n\
-r is optional. relative/path/from/-s/to/benchmark/suite. Defaults to ${def_rel_perf_path}.\n\
\n\
Optional arguments:\n\
-t : a threshold [0, 1] for filtering significant results, e.g. 0.2 . Defaults to ${threshold} (5% difference).\n\
-o : full/path/to/save/results . Defaults to current directory.\n\
-j : number of jobs. This number is passed to make. Defaults to ${jobct}.\n"
while getopts ":m:n:b:c:s:t:o:r:g:j:" option ; do
case "${option}"
in
m) mode=${OPTARG};;
n) name_tag=${OPTARG};;
b) baseline=${OPTARG%/};; # trailing backslash is removed for all paths
c) contender=${OPTARG%/};;
s) source_code=${OPTARG%/};;
t) threshold=${OPTARG};;
o) outpathp=${OPTARG%/};;
r) rel_perf_path=${OPTARG%/};;
g) compiler=${OPTARG};;
j) jobct=${OPTARG};;
esac
done
if [[ -z ${mode+x} || -z ${name_tag+x} || -z ${baseline+x} || -z ${contender+x} ]]; then
echo -e "ERROR! Missing required arguments.\n"
echo -e "$usage"
exit 1
fi
shift $((OPTIND -1))
if [[ $# -ge 1 ]] ; then
echo -e "ERROR! Unused arguments: $@\n"
echo -e "$usage"
exit 1
fi
if ! [[ "$mode" =~ ^(results|execs|build)$ ]]; then
echo -e "ERROR! Invalid mode.\n"
echo -e "$usage"
exit 1
fi
if [[ "$jobct" -lt 1 ]]; then
echo -e "ERROR! number of jobs must be at least 1.\n"
echo -e "$usage"
exit 1
fi
if [ $mode = "build" ]; then
if [[ -z ${source_code+x} ]]; then
echo -e "ERROR! Path to source code is required in 'build' mode.\n"
echo -e "$usage"
exit 1
fi
if [[ -z ${rel_perf_path+x} ]]; then
echo "Relative path from source code to benchmark suite was not given (option -r )."
echo "Relative path is set to: ${def_rel_perf_path}"
if ! [[ -d $source_code/$def_rel_perf_path ]]; then
echo "ERROR! $source_code/$def_rel_perf_path does not exist."
exit 1
else
rel_perf_path=$def_rel_perf_path
fi
fi
if [[ -z ${compiler+x} ]]; then
echo "Compiler was not given (option -g ). Compiler is set to g++-7 by default."
compiler="g++-7"
fi
command -v $compiler >/dev/null || { echo "ERROR! $compiler was not found in PATH."; exit 1; }
fi
#---------------------------------------------------------------------------------------------#
# save directory of cmp_benchmarks.sh
wrkdir=${PWD%/}
# In 'build' mode, checkout the respective tags, and build the benchmark executables
if [ $mode = "build" ]; then
mkdir -p {build/$baseline,build/$contender}
cd $source_code
git checkout $baseline
git submodule update
cd $wrkdir/build/$baseline
cmake $source_code/$rel_perf_path -DCMAKE_CXX_COMPILER=$compiler -DCMAKE_BUILD_TYPE=Release
make -j ${jobct}
cd $source_code
git checkout $contender
git submodule update
cd $wrkdir/build/$contender
cmake $source_code/$rel_perf_path -DCMAKE_CXX_COMPILER=$compiler -DCMAKE_BUILD_TYPE=Release
make -j ${jobct}
baseline=$wrkdir/build/$baseline
contender=$wrkdir/build/$contender
cd $wrkdir
fi
# create output folder for given name_tag
if [[ -z ${outpathp+x} ]]; then outpathp=$wrkdir ; fi
outpath=$outpathp/$name_tag
[[ -d $outpath ]] || mkdir -p $outpath
#----------------------------------Find common benchmarks-------------------------------------#
if ! [ $mode = "results" ]; then
find $baseline -type f -not -name "*.*" -executable -print \
| awk -F/ '{print $NF}' \
| sort > $outpath/baseline
find $contender -type f -not -name "*.*" -executable -print \
| awk -F/ '{print $NF}' \
| sort > $outpath/contender
else
find $baseline -type f -print \
| awk -F/ '{print $NF}' \
| sort > $outpath/baseline
find $contender -type f -print \
| awk -F/ '{print $NF}' \
| sort > $outpath/contender
fi
comm -12 --nocheck-order $outpath/baseline $outpath/contender > $outpath/common
rm $outpath/baseline $outpath/contender
## No common benchmarks!
[[ -s $outpath/common ]] || { echo "No common benchmarks between Baseline and Contender were found. "; exit 1; }
#-------------------------------Compare benchmarks--------------------------------------------#
#find all benchmark executables in baseline and contender suite
[[ -d $outpath/indiv_benchmrks ]] || mkdir $outpath/indiv_benchmrks
while read benchmark ; do
bench_bl=$(find $baseline -type f -name "$benchmark")
bench_cp=$(find $contender -type f -name "$benchmark")
# sed expression removes colorcodes from compare.py output
python ./gbench-compare/compare.py benchmarks $bench_bl $bench_cp \
| sed 's/\x1b\[[0-9;]*m//g' > $outpath/indiv_benchmrks/$benchmark
done < $outpath/common
rm $outpath/common
#-------------------------------------Summarize results---------------------------------------#
results=$outpath/all_diffs
signif=$outpath/significant_diffs
signif_decr=$outpath/significant_decr
signif_incr=$outpath/significant_incr
# write headers
echo "Benchmarkfile;Benchmark;Time;CPU" \
| tee ${results}.csv ${signif}.csv ${signif_incr}.csv ${signif_decr}.csv > /dev/null
# filter out the actual performance stats from compare.py output
for file in $outpath/indiv_benchmrks/* ; do
fin=$( echo "$file" | awk -F/ '{print $NF}' ) #save name of benchmark file
# diffs between benchmarks are summarized after fifth line of "-----" comp.py output
# use multiple whitespaces as seperator in case benchmark names includes whitespaces
cat $file \
| awk 'x==5 {print ;next} /---/ {x++}' \
| awk -v fin="$fin" 'BEGIN {FS =" +"} {print fin";"$1";"$2";"$3 }' >> ${results}.csv
done
# split stats in signif/signif_incr/signif_decr based on threshold
# sed statement skips first header line of results file
cat "${results}.csv" | sed 1d | \
awk -v thr=$threshold -v s="${signif}.csv" -v si="${signif_incr}.csv" -v sd="${signif_decr}.csv" \
'BEGIN {FS =";"} $3 <= - thr { print >> s ; print >> sd } $3 >= thr { print >> s ; print >> si }'
for file in $results $signif $signif_incr $signif_decr; do
column -t -s $';' "${file}.csv" > "${file}.txt"
done
| true
|
fca643bec7c963cb45f3c624b4ac8112ee831cd8
|
Shell
|
foobar999/Wikipedia-Cluster-Analysis
|
/old/get_swapped_author_doc_contribs.sh
|
ISO-8859-3
| 399
| 3.28125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash -e
if (( $# != 1 )); then
echo "Usage: $0 DOC_AUTH_FILE"
exit 1
fi
DOC_AUTH_FILE=$1
head -n 1 $DOC_AUTH_FILE # bernehme erste Zeile
head -n 2 $DOC_AUTH_FILE | tail -n 1 | awk '{ print $2 " " $1 " " $3}' # tausche #dokumente,#autoren in zweiter zeile
tail -n +3 $DOC_AUTH_FILE | sort -k 2 -ns | awk '{ print $2 " " $1 " " $3}' # sortiere nach autoren und tausche docid,autorid
| true
|
6f83fd90338524fb55cb0ee7aad9e40eddc0a84d
|
Shell
|
s-hell/dotfiles
|
/configs/.bash.d/bashdreader
|
UTF-8
| 254
| 3.25
| 3
|
[] |
no_license
|
#!/bin/bash
# Source local definitions
if [ -d ~/.bash.d ]; then
for RC_FILE in ~/.bash.d/*.bash; do
# FILE exists and read permission is granted
if [ -r "$RC_FILE" ]
then
. "$RC_FILE"
fi
done
fi
# vim: set syntax=sh:ts=4:sw=4
| true
|
ffa41d2f88d547d715ecded5f83ede77db17059b
|
Shell
|
tumayouzi/airi
|
/build.sh
|
UTF-8
| 1,818
| 3.15625
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# for build arg
export LANG=C
export LC_ALL=C.UTF-8
export ALLOW_MISSING_DEPENDENCIES=true
export SOONG_ALLOW_MISSING_DEPENDENCIES=true
export CCACHE_DIR=${CCACHE_DIR}
export USE_CCACHE=1
#ccache
#CCACHE_CAP="50G"
# record the current dir.
workdir=`pwd`
# set log file name
filename="${BUILD_TIMESTAMP}_${BUILD_DIR}_${DEVICE}.log"
# move build dir
cd ../${BUILD_DIR}
# make clean
if [ "${MAKE_CLEAN}" = "true" ]; then
make clean
echo -e "\n"
fi
# set ccache
ccache -M ${CCACHE_CAP}
# if offical?
# Must be before breakfast
if [ "${BUILD_TYPE}" = "UNOFFICIAL" ]; then
: # NOP
else
export FLOKO_BUILD_TYPE=${BUILD_TYPE}
fi
# build preparation
source build/envsetup.sh
breakfast ${DEVICE}
# Build Information Settings
if [ ${BUILD_DIR} = lineage ]; then
vernum="$(get_build_var PRODUCT_VERSION_MAJOR).$(get_build_var PRODUCT_VERSION_MINOR)"
source="LineageOS ${vernum}"
short="${source}"
zipname="lineage-$(get_build_var LINEAGE_VERSION)"
newzipname="lineage-$(get_build_var PRODUCT_VERSION_MAJOR).$(get_build_var PRODUCT_VERSION_MINOR)-${BUILD_TIMESTAMP}-${get_build_var LINEAGE_BUILDTYPE}-$(device)"
elif [ ${BUILD_DIR} = floko ]; then
vernum="$(get_build_var FLOKO_VERSION)"
source="floko-v${vernum}"
short="${source}"
zipname="$(get_build_var LINEAGE_VERSION)"
newzipname="Floko-v${vernum}-${DEVICE}-${BUILD_TIMESTAMP}-$(get_build_var FLOKO_BUILD_TYPE)"
else
# Other
source=${BUILD_DIR}
short="${source}"
zipname="*"
newzipname="${zipname}"
fi
# Start build
mka bacon 2>&1 | tee "${LOG_DIR}/${filename}"
if [ $(echo ${PIPESTATUS[0]}) -eq 0 ]; then
ans=0
statusdir="success"
else
ans=1
statusdir="fail"
fi
# KILL JACK-SERVER FOR SURE
prebuilts/sdk/tools/jack-admin kill-server
if [ ${ans} -eq 0 ]; then
exit 0
else
exit 1
fi
| true
|
4d26729f645b03af7140408283b5c754a9ffacc0
|
Shell
|
hamrogeet/stackfor
|
/_resources/crispy/crispy/bin/crispy-go
|
UTF-8
| 975
| 3.40625
| 3
|
[] |
no_license
|
#!/bin/bash
set -e
# FILENAME: crispy-go
# BUILD: 170830
# DESCRIPTION: Manages the "Go" language package for crispy
# AUTHORS: Christopher Banwarth (development@aprettycoolprogram.com)
# COPYRIGHT: 2017 A Pretty Cool Program
# LICENSE: Apache License, Version 2.0 [http://www.apache.org/licenses/LICENSE-2.0]
# MORE INFO: http://aprettycoolprogram.com/crispy
source "$crispyDir/bin/crispyengine"
function install()
{
wget -P $HOME/Downloads/Go https://storage.googleapis.com/golang/go1.8.3.linux-amd64.tar.gz
sudo tar -C /usr/local -xzf $HOME/Downloads/Go/go1.8.3.linux-amd64.tar.gz
echo "PATH=$PATH:/usr/local/go/bin" >> .profile
touch $HOME/.crispy/history/buildessential.install
}
# MAIN
FunctionNames "${0#*./}"
if [[ "$#" > 0 ]]; then
if [[ "${functionList[*]}" =~ "$1" ]]; then
eval "$1 ${@:2}"
else
printf "\n ERROR in ${0#*./} -> Invalid action: $1.\n\n"
fi
else
printf "\n[ERROR] in ${0#*./} -> No action passed.\n\n"
fi
| true
|
6934abf4355c328110e133bc15ee8deb1d01d20c
|
Shell
|
ingridHu/RobAirInterfaces
|
/launch.sh
|
UTF-8
| 219
| 2.765625
| 3
|
[] |
no_license
|
#! /bin/bash
echo -e "Lancement des serveurs"
cd server
nodejs server.js &
sleep 3
nodejs ../signalingserver/server.js &
echo -e "Serveur lancé"
namevariable=$(uname)
echo "$namevariable"
echo -e "Fin du programme"
| true
|
0663ba29c76f955252f679aebfd9327c7a5722c9
|
Shell
|
deanban/quoverflow
|
/bin/configure_db.sh
|
UTF-8
| 1,238
| 2.59375
| 3
|
[] |
no_license
|
#!/bin/bash
echo "Dropping QUOVERFLOW"
dropdb -U node_user quoverflow
echo "Creating QUOVERFLOW"
createdb -U node_user quoverflow
echo "Assigning People: Running account.sql"
psql -U node_user quoverflow < ./bin/sql/account.sql
echo "Creating Categories: Running category.sql"
psql -U node_user quoverflow < ./bin/sql/category.sql
echo "Assigning People to Categories: Running userCategory.sql"
psql -U node_user quoverflow < ./bin/sql/userCategory.sql
echo "Following People: Running follow.sql"
psql -U node_user quoverflow < ./bin/sql/follow.sql
echo "Creating Questions: Running question.sql"
psql -U node_user quoverflow < ./bin/sql/question.sql
echo "Creating Search Trigger and Tokenizing: Running tokenizeQBody.sql"
psql -U node_user quoverflow < ./bin/sql/tokenizeQBody.sql
echo "Answering questions: Running answer.sql"
psql -U node_user quoverflow < ./bin/sql/answer.sql
echo "Commenting: Running comment.sql"
psql -U node_user quoverflow < ./bin/sql/comment.sql
echo "Upvoting things: Running vote.sql"
psql -U node_user quoverflow < ./bin/sql/vote.sql
echo "Seeding QUOVERFLOW With Some Data"
node ./bin/seed.js
node ./bin/seed2.js
node ./bin/seed3.js
node ./bin/seed4.js
echo "QUOVERFLOW Database Created. Good Luck!"
| true
|
15bedcf72293b0aa790438d54805e70e8296a3f4
|
Shell
|
openequella/openEQUELLA-Tools
|
/zk-poll-ensemble/zk-poll-ensemble.sh
|
UTF-8
| 898
| 2.703125
| 3
|
[] |
no_license
|
#!/bin/bash
# Gather information about a ZK ensemble.
date
echo Querying node 1...
echo Querying [[mntr]] on node 1:
echo mntr | nc zk_node_1_ip zk_node_1_port
echo Querying [[cons]] on node 1:
echo cons | nc zk_node_1_ip zk_node_1_port
echo Querying [[wchc]] on node 1:
echo wchc | nc zk_node_1_ip zk_node_1_port
echo Querying node 2...
echo Querying [[mntr]] on node 2:
echo mntr | nc zk_node_2_ip zk_node_2_port
echo Querying [[cons]] on node 2:
echo cons | nc zk_node_2_ip zk_node_2_port
echo Querying [[wchc]] on node 2:
echo wchc | nc zk_node_2_ip zk_node_2_port
echo Querying node 3...
echo Querying [[mntr]] on node 3:
echo mntr | nc zk_node_3_ip zk_node_3_port
echo Querying [[cons]] on node 3:
echo cons | nc zk_node_3_ip zk_node_3_port
echo Querying [[wchc]] on node 3:
echo wchc | nc zk_node_3_ip zk_node_3_port
echo ===============================================================
| true
|
bed79bfef6568ed7b85f21c1a6a85c5d36626e0f
|
Shell
|
envato/bundle-update-buildkite-plugin
|
/unwrappr/annotate.sh
|
UTF-8
| 651
| 3.546875
| 4
|
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -euo pipefail
echo "--- :bundler: Installing Unwrappr"
cat <<\GEMS > Gemfile
source 'https://rubygems.org/'
gem 'unwrappr'
GEMS
bundle install --jobs="$(nproc)"
echo "+++ :github: Annotating Github pull request"
repository=$1
pull_request=$2
gemfile_lock_files=("${@:3}")
if [[ ${#gemfile_lock_files[@]} -eq 0 ]]; then
gemfile_lock_files+=("--lock-file" "Gemfile.lock")
fi
echo "Annotating https://github.com/${repository}/pull/${pull_request}"
echo "Files: " "${gemfile_lock_files[@]}"
echo
bundle exec unwrappr annotate-pull-request \
--repo "${repository}" \
--pr "${pull_request}" \
"${gemfile_lock_files[@]}"
| true
|
1d85a83970820e886c44b0e6a769fedc708f6047
|
Shell
|
ark3/dotfiles
|
/bin/listening_on
|
UTF-8
| 193
| 3.46875
| 3
|
[] |
no_license
|
#!/bin/bash
function listening_on() {
if [ "$#" -ne 1 ]; then
echo "Usage: $0 <port>"
return 1
fi
lsof -nP -i :"$1" | egrep '^COMMAND|LISTEN'
}
listening_on "$@"
| true
|
06b044ed11280eecbca3c45063d3f10553d2c444
|
Shell
|
Shanni/dht-kvstore
|
/gcp/bin/gcp-set-zone.sh
|
UTF-8
| 164
| 2.875
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
#set -x
set -e
ZONE="us-west1-b"
if [[ $1 -gt 7 && $1 -lt 24 ]]; then
ZONE="us-west2-b"
elif [[ $1 -ge 24 && $1 -lt 40 ]]; then
ZONE="us-west1-a"
fi
| true
|
69d74f4a8a7110becc10bf5460b5a4dc0fa444f1
|
Shell
|
wsicheng/CSCOfflineTiming
|
/CSCTimingAnalyzer/batch/mergeTrees.sh
|
UTF-8
| 136
| 2.59375
| 3
|
[] |
no_license
|
#!/bin/bash
INPUT_DIR=$1
OUTPUT_FILE=$2
input_files=(`ls -1 ${INPUT_DIR}/timingBaby*.root`)
hadd -f ${OUTPUT_FILE} ${input_files[*]}
| true
|
4035ff3dae97f09cd63310c0b170b3a2c9306456
|
Shell
|
puppetlabs-seteam/workshop_deploy
|
/scripts/generate_boltws_inventory.sh
|
UTF-8
| 1,502
| 3.90625
| 4
|
[] |
no_license
|
#! /bin/bash
# call this script with 3 arguments: <amount of targets> <AWS region> <AWS user>
# example:
# sh ./scripts/generate_boltws_targets_inventory.sh 15 eu-west-3 kevin
if [ $# -eq 0 ]; then
echo "No arguments supplied, must supply a count"
exit 1
fi
[ -n $1 ] && [ $1 -eq $1 ] 2>/dev/null
if [ $? -ne 0 ]; then
echo The supplied count is not a number
exit 1
fi
export AWS_REGION=$2
export FACTER_aws_region=$2
export FACTER_user=$3
echo
echo "Sleeping for 10 seconds..."
sleep 10
echo
echo "Building Bolt inventory file at ~/inventory.yaml..."
echo "groups:
- name: linux
nodes:" > ~/inventory.yaml
for i in $(seq 1 $1); do
ip=$(puppet resource ec2_instance "$3-awskit-boltws-linux-student$i" | grep public_ip_address | echo " - $(awk '{split($0,a,"'\''"); print a[2]}')")
echo "student$i-Linux$ip"
echo " $ip" >> ~/inventory.yaml
done
echo " config:
transport: ssh
ssh:
user: centos
run-as: root
private-key: ~/bolt_ws_key.pem
host-key-check: false
- name: windows
nodes:" >> ~/inventory.yaml
for i in $(seq 1 $1); do
ip=$(puppet resource ec2_instance "$3-awskit-boltws-windows-student$i" | grep public_ip_address | echo " - $(awk '{split($0,a,"'\''"); print a[2]}')")
echo "student$i-Windows$ip"
echo " $ip" >> ~/inventory.yaml
done
echo " config:
transport: winrm
winrm:
user: Administrator
password: BoltR0cks!
ssl: false" >> ~/inventory.yaml
exit 0
| true
|
60f1e680b8c6f4841669470809f83f396965fbc2
|
Shell
|
pramuditorh/iperf3-qos-measurement
|
/server.sh
|
UTF-8
| 624
| 3.84375
| 4
|
[] |
no_license
|
#!/bin/bash
while getopts p:i:f:l:h: flag
do
case "${flag}" in
p) port=${OPTARG};;
i) interval=${OPTARG};;
f) filename=${OPTARG};;
l) loops=${OPTARG};;
h) echo "Script to take QoS measurement using Iperf3"
echo "How to USE: ./server.sh -p [PORT] -i [INTERVAL] -f [FILENAME] -l [LOOPS]";;
esac
done
echo "port: $port"
echo "interval: $interval"
echo "filename: $filename"
echo "loops: $loops"
for (( c=1; c<=$loops; c++ ))
do
echo "DO $filename-$c"
iperf3 --server --port $port --interval $interval -1 > $filename-$c.txt
echo "DONE $filename-$c"
sleep 3
done
| true
|
ec89d175967e9c0f484023cf52678dc4d993f595
|
Shell
|
skulbrane/dtconf
|
/.zshrc.d/01-init.zsh
|
UTF-8
| 1,328
| 3.375
| 3
|
[] |
no_license
|
# TODO: This file does not make much sense, most of this
# config should prob live in shlenv.zsh
#
export DATE=$(date +'20%y-%m-%d')
export TRUE=1
export FALSE=0
source "$HOME/dtconf/shlib/shlenv.zsh"
if [ ! "$(is_linux)" -eq 0 ]; then
# Required Environment Variables
#
# SYSTEMROOT - Windows Base Dir (usually X:\Windows)
MSYS_ID="Msys"
MINGW64_ID="MINGW64"
function print_motd() {
echo $ - "Initialized shell: "$SHELL""
echo $(uname -a)
echo $(uptime)
#echo $(df -h /)
}
# Print error if any essential envar missing
# TODO: Define and try some fallbacks
#
# ${STATE?"Need to set STATE"}
# ${DEST:?"Need to set DEST non-empty"}
function ensure_environment() {
# TODO...
echo $FALSE
}
function set_path() {
os=$(uname -o)
if [[ $os = $MSYS_ID ]]; then
[[ ! -z $DBG ]] && echo "System: Msys"
if [[ $MSYSTEM = $MINGW64_ID ]]; then
[[ ! -z $DBG ]] && echo "System Type: MINGW64"
export PATH=/e/Portable/Win/msys64/mingw64/bin:$PATH
[[ ! -z $DBG ]] && echo "Setting PATH:"$PATH
fi
fi
export PATH="$PATH:$HOME/bin/:/usr/bin:/opt/bin/:/c/Windows/System32/"
}
set_path
print_motd
fi
| true
|
fc841e40df8df029f9348ac5a212930cc56e10c3
|
Shell
|
mowgli/ansible-firefox-addon
|
/tests/container-test.bats
|
UTF-8
| 3,680
| 3.359375
| 3
|
[
"BSD-3-Clause"
] |
permissive
|
#!/usr/bin/env bats
# testing requirements: docker, ansible
# https://github.com/tutumcloud/tutum-fedora
readonly docker_image="tutum/fedora:21"
readonly docker_container_name="ansible-firefox-addon"
readonly addon_url=https://addons.mozilla.org/en-US/firefox/addon/adblock-plus
docker_exec() {
docker exec $docker_container_name $@ > /dev/null
}
docker_exec_d() {
docker exec -d $docker_container_name $@ > /dev/null
}
docker_exec_sh() {
# workaround for https://github.com/sstephenson/bats/issues/89
local IFS=' '
docker exec $docker_container_name sh -c "$*" > /dev/null
}
ansible_exec_module() {
local _name=$1
local _args=$2
ANSIBLE_LIBRARY=../ ansible localhost -i hosts -u root -m $_name ${_args:+-a "$_args"}
}
setup() {
local _ssh_public_key=~/.ssh/id_rsa.pub
docker run --name $docker_container_name -d -p 5555:22 -e AUTHORIZED_KEYS="$(< $_ssh_public_key)" -v $docker_container_name:/var/cache/yum/x86_64/21/ $docker_image
docker_exec sed -i -e 's/keepcache=\(.*\)/keepcache=1/' /etc/yum.conf
docker_exec yum -y install deltarpm xorg-x11-server-Xvfb
docker_exec_d Xvfb :1
}
@test "Module exec with url arg missing" {
run ansible_exec_module firefox_addon
[[ $output =~ "missing required arguments: url" ]]
}
@test "Module exec with state arg having invalid value" {
run ansible_exec_module firefox_addon "url=$addon_url state=latest"
[[ $output =~ "value of state must be one of: present,absent, got: latest" ]]
}
@test "Module exec with state arg having default value of present" {
docker_exec yum -y install firefox unzip curl
run ansible_exec_module firefox_addon "url=$addon_url display=:1"
[[ $output =~ changed.*true ]]
docker_exec_sh test -d "~/.mozilla/firefox/*.default/extensions/{d10d0bf8-f5b5-c8b4-a8b2-2b9879e08c5d}"
}
@test "Module exec with state present" {
docker_exec yum -y install firefox unzip curl
run ansible_exec_module firefox_addon "url=$addon_url state=present display=:1"
[[ $output =~ changed.*true ]]
}
@test "Module exec with state absent" {
docker_exec yum -y install firefox unzip curl
run ansible_exec_module firefox_addon "url=$addon_url state=absent display=:1"
[[ $output =~ changed.*false ]]
}
@test "Module exec with state absent and addon already installed" {
docker_exec yum -y install firefox unzip curl
run ansible_exec_module firefox_addon "url=$addon_url state=present display=:1"
[[ $output =~ changed.*true ]]
run ansible_exec_module firefox_addon "url=$addon_url state=absent display=:1"
[[ $output =~ changed.*true ]]
docker_exec_sh test ! -e "~/.mozilla/firefox/*.default/extensions/{d10d0bf8-f5b5-c8b4-a8b2-2b9879e08c5d}"
}
@test "Module exec with state present twice and check idempotent" {
docker_exec yum -y install firefox unzip curl
run ansible_exec_module firefox_addon "url=$addon_url display=:1"
run ansible_exec_module firefox_addon "url=$addon_url display=:1"
[[ $output =~ changed.*false ]]
}
@test "Module exec with complete theme addon and check selected skin pref" {
local _addon_url=https://addons.mozilla.org/en-US/firefox/addon/fxchrome
docker_exec yum -y install firefox unzip curl
run ansible_exec_module firefox_addon "url=$_addon_url display=:1"
[[ $output =~ changed.*true ]]
docker_exec_sh grep FXChrome "~/.mozilla/firefox/*.default/user.js"
}
@test "Module exec with display arg missing when there is no DISPLAY environment" {
docker_exec yum -y install firefox unzip curl
run ansible_exec_module firefox_addon "url=$addon_url"
[[ $output =~ 'Error: no display specified' ]]
}
teardown() {
docker stop $docker_container_name > /dev/null
docker rm $docker_container_name > /dev/null
}
| true
|
c5dcaa7d6d7369106a9a9074f675f6234a8ec112
|
Shell
|
JuanParker1/pr-noe
|
/crawlers/lanceUpdateFresh.sh
|
UTF-8
| 528
| 2.921875
| 3
|
[] |
no_license
|
while true; do
echo "Search crypto 1/2"
coffee ./updateNew2WithCrawl.coffee > tokens.json
ret=$?
cp ./tokens.json ../cryptos.json
if [[ $ret -gt 0 ]]; then
echo "Fin"
break
fi
echo "Search crypto 2/2"
coffee ./updateNew2WithCrawl.coffee > tokens.json
ret=$?
cp ./tokens.json ../cryptos.json
if [[ $ret -gt 0 ]]; then
echo "Fin"
break
fi
sleep 60
done
if [[ $ret -gt 1 ]]; then
exit
fi
set `date +%Y%m%d`
echo "fichier stat-$*"
coffee ./statistics.coffee $* > ../files/stat-$*.txt
| true
|
2bd547f587bf18e15b5ae1a4537b4c5d0d28e961
|
Shell
|
elliott-davis/hab-image-export
|
/hab-image-system/plan.sh
|
UTF-8
| 444
| 2.515625
| 3
|
[] |
no_license
|
pkg_name=hab-image-system
pkg_origin=core
pkg_version="0.1.0"
pkg_maintainer="The Habitat Maintainers <humans@habitat.sh>"
pkg_license=('Apache-2.0')
pkg_deps=(
core/iproute2
core/busybox-static
core/util-linux
core/coreutils
core/hab
smacfarlane/kmod
)
pkg_bin_dirs=(bin)
do_build() {
return 0
}
do_install() {
install -vD "${PLAN_CONTEXT}/bin/setup.sh" "${pkg_prefix}/bin/setup.sh"
cp -rv "${PLAN_CONTEXT}/files" "${pkg_prefix}/"
}
| true
|
fd2618c262905e7539cea9d8ea93e03b696b116a
|
Shell
|
Lucas-Fan/shell
|
/if.sh
|
UTF-8
| 158
| 3.046875
| 3
|
[] |
no_license
|
#!/bin/bash
#by authors fzyt 2019
NUM1=100
NUM2=200
if [ $NUM1 -gt $NUM2 ];then
echo "This $NUM1 greate $NUM2 !"
else
echo "This $NUM2 greate $NUM1 !"
fi
| true
|
cba06f10aac0b48786c788088aa98694da547b05
|
Shell
|
kkbaranski/journey-planner
|
/client/uninstall
|
UTF-8
| 1,316
| 3.359375
| 3
|
[] |
no_license
|
#!/bin/bash
#===============================================================
#
# [ author: Krzysztof Baranski ]
#
# CLIENT ( uninstall )
#
#===============================================================
setuplock='.setuplock'
project_name='client'
build='.build'
source='source'
main_class='Application'
commons_dir='../commons'
commons_jar='commons.jar'
project_jar=$project_name.jar
echo "
---------------------------------------------------
- UNINSTALL CLIENT -
---------------------------------------------------"
#--------------------------------------------------------------| remove setup lock
echo "Removing setup lock..."
rm -f $setuplock
if [ $? -eq 0 ]
then
echo "OK!"
else
echo "Error!"
fi
#--------------------------------------------------------------| reset config file
echo "Reseting config file..."
sed -i config.properties -e '/server_address/cserver_address='
sed -i config.properties -e '/server_port/cserver_port='
#--------------------------------------------------------------| remove jars
echo "Removing jars..."
rm -vf libs/$commons_jar
rm -vf $project_jar
#--------------------------------------------------------------| remove logs
if [ -d logs ]
then
echo "Removing logs..."
rm -rvf logs
fi
echo "---------------------------------------------------
"
| true
|
bbd012e0f8683ee3ccf93d2f8621234b964c5991
|
Shell
|
VladRyvkin/LPI-OS
|
/mediainst.sh
|
UTF-8
| 2,538
| 3.171875
| 3
|
[] |
no_license
|
#!/bin/bash
sleep 0.3s;
echo ""
sleep 0.3s;
echo "file: mediainst.sh"
echo ""
sleep 0.3s;
echo "In this file we have program for multimedia"
sleep 0.3s;
echo ""
echo "vlc, musescore"
echo ""
sleep 0.3s;
while true; do
read -p "Do you wish to install this programs?" yn
case $yn in
[Yy]* ) sleep 0.3s; echo ""; echo "Starting mediainst.sh"; sleep 0.3s; echo "";
while true; do
read -p "Do you wish to install vlc?" yn
case $yn in
[Yy]* ) sleep 0.3s; echo ""; echo "Installing vlc...";
sudo zypper install vlc;
sleep 0.3s; echo ""; echo "Installing vlc complete"; echo ""; break;;
[Nn]* ) sleep 0.3s; echo ""; break;;
* ) echo "Please answer yes or no.(y or n)";;
esac
done
sleep 0.3s;
while true; do
read -p "Do you wish to install musescore?" yn
case $yn in
[Yy]* ) sleep 0.3s; echo ""; echo "Installing musescore...";
sudo zypper addrepo http://download.opensuse.org/repositories/multimedia:/musescore2/openSUSE_Leap_42.3/multimedia:musescore2.repo;
sudo zypper refresh;
sudo zypper install musescore;
sleep 0.3s; echo ""; echo "Installing musescore complete"; echo ""; break;;
[Nn]* ) sleep 0.3s; echo ""; break;;
* ) echo "Please answer yes or no.(y or n)";;
esac
done
while true; do
read -p "Do you wish to install musescore?" yn
case $yn in
[Yy]* ) sleep 0.3s; echo ""; echo "Installing musescore...";
sudo zypper install audacious;
sleep 0.3s; echo ""; echo "Installing musescore complete"; echo ""; break;;
[Nn]* ) sleep 0.3s; echo ""; break;;
* ) echo "Please answer yes or no.(y or n)";;
esac
done
while true; do
read -p "Do you wish to install musescore?" yn
case $yn in
[Yy]* ) sleep 0.3s; echo ""; echo "Installing musescore...";
sudo zypper install audacity;
sleep 0.3s; echo ""; echo "Installing musescore complete"; echo ""; break;;
[Nn]* ) sleep 0.3s; echo ""; break;;
* ) echo "Please answer yes or no.(y or n)";;
esac
done
sleep 0.3s; echo ""; echo "Close mediainst.sh"; echo ""; break;;
[Nn]* ) sleep 0.3s; echo ""; echo "Close mediainst.sh"; sleep 0.3s; echo ""; exit;;
* ) echo "Please answer yes or no.(y or n)";;
esac
done
#sudo zypper install vlc
#sudo zypper addrepo http://download.opensuse.org/repositories/multimedia:/musescore2/openSUSE_Leap_42.3/multimedia:musescore2.repo
#sudo zypper refresh
#sudo zypper install musescore
| true
|
d5d20a2274db27ac8974656aeab662c333236589
|
Shell
|
maxiroellplenty/roellsh
|
/rsh.sh
|
UTF-8
| 874
| 3.375
| 3
|
[
"MIT"
] |
permissive
|
# Version
export VERSION="1.0"
# Global Vars
export _ROELLSH_DIR=~/rsh;
export _PLUGINS_DIR=$_ROELLSH_DIR/plugins;
# Config
export showQuote=false;
# Source all Plugins
for f in `ls $_PLUGINS_DIR`; do source $_PLUGINS_DIR/$f; done;
# Source core files
source $_ROELLSH_DIR/core/core.sh
function rsh()
{
# Show menu when there is no first param
if [ -z "$1" ]; then
sh $_ROELLSH_DIR/core/menu/menu.sh;
fi
if [ "$1" = "help" ]; then
_help;
return 1;
fi
# Get pugin name to pase the function to
local pluginName=""
if [ -n "$1" ]; then
pluginName=$1;
fi
local param=""
if [ -n "$2" ]; then
param=$2;
fi
local secondParam=""
if [ -n "$3" ]; then
secondParam=$3;
fi
if [ -n "$1" ]; then
sh $_PLUGINS_DIR/$pluginName.sh $param $secondParam;
fi
}
| true
|
c764ea7c04fb124d8b70bc9398c8f52b1d6cd993
|
Shell
|
HSchmale16/UsefulScripts
|
/bashrc
|
UTF-8
| 2,176
| 3.78125
| 4
|
[
"MIT"
] |
permissive
|
# Henry J Schmale
# October 11, 2015
#
# An extra bashrc file that contains various useful aliases and functions
# Add to your enviroment by adding the following lines to your regular
# .bashrc. It also does some other terminal magic like setting up my
# enviroment the way that I like it to be setup.
# I wrote this because I wanted to be able to share configuration
# settings between all of my machines, and this way allows me to do just
# that with the magic of source.
#
# if [ -f PATH_TO_THIS_FILE ]; then
# source PATH_TO_THIS_FILE
# fi
#
# Where PATH_TO_THIS_FILE is the path to this file
# Custom Prompt
PS1=\
'\[\e[0;32m\]\u@\h\[\e[m\] \[\e[1;34m\]\w\[\e[m\] \[\e[1;32m\]\$\[\e[m\]\[\e[1;37m\]'
# Set my editor, which is vim.
# Set up piping to my xclipboard
alias p2clip='xclip -selection c'
# pandoc bash completion, only if pandoc is installed
if ! hash pandoc 2>&1 /dev/null
then
eval "$(pandoc --bash-completion)"
fi
function copy-ssh-pub-key() {
cat ~/.ssh/id_rsa.pub | p2clip
}
# Parallel Make and other improvements to make
export NUMCPUS=$(grep -c 'cpu cores' /proc/cpuinfo)
alias make='time make'
alias pmake='time make -j$NUMCPUS'
function gmake() {
cdir=$(pwd)
cd $(git rev-parse --show-toplevel)
pmake
cd "$cdir"
}
case "$(uname)" in
# Linux only things
Linux)
# say command for linux while maintaining mac compatibility
function say {
echo "$@" | espeak -s 120 2> /dev/null
}
;;
# Mac Only things
Darwin)
;;
esac
# Clear Screen in THE WINDOWS WAY!
# This is the only good thing about windows
alias cls=clear
# Awesome Commit Message
# sometimes I just fell like getting the commit out of the way and not
# leaving a good one. This is mostly for personal projects.
alias git-randcommit='git commit -am "$(curl -s http://whatthecommit.com/index.txt)"'
# Removes cmake makefiles for a project.
function cmake-clean() {
rm CMakeCache.txt
rm -rf CMakeFiles
rm -rf CMakeScripts
rm Makefile
rm cmake_install.cmake
rm install_manifest.txt
}
function getBatteryPercent() {
upower -d | grep percent | tail -n1 | awk '{print $2}'
}
| true
|
68dc4b7a884ee302361d7afbb491e9614d1fff9f
|
Shell
|
google/site-kit-wp
|
/bin/backstop
|
UTF-8
| 1,388
| 3.375
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# Site Kit by Google, Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
BIN_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &> /dev/null && pwd)
ROOT_DIR=$(dirname "$BIN_DIR")
VERSION=6.1.1
IMAGE=googlesitekit/vrt:$VERSION
# Build the backstopjs image if it doesn't exist yet.
docker image inspect $IMAGE -f "{{.Id}}" >/dev/null 2>&1 ||
docker build -t $IMAGE --build-arg BACKSTOPJS_VERSION=$VERSION $ROOT_DIR/docker/backstopjs
# Run the backstopjs command.
docker run --rm -t --mount type=bind,source="$ROOT_DIR",target="/src" $IMAGE $1 --config=/src/tests/backstop/config.js
DOCKER_EXIT_CODE=$?
# Display a link to the report only after the "test" command.
if [ $1 == "test" ]; then
echo ""
echo "To see the VRT report open this URL in your browser: file://$ROOT_DIR/tests/backstop/html_report/index.html"
fi
exit $DOCKER_EXIT_CODE
| true
|
0233eed719beca599c945311f7dc771b77e34fb5
|
Shell
|
pankajyadav23/xoriant-devops
|
/prometheus/1.8/Exporters/mysqld_exporter/mysqld_exporter-0.10.0.sh
|
UTF-8
| 925
| 3.015625
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
# Author : Pankaj Yadav
# What does the script do:
# 1. Add user mysqld_exporter
# 2. Download mysqld_exporter-0.10.0 and intall it on local system
# 3. start mysqld_exporter as service
# 4. Perform a "./mysqld_exporter-0.10.0.sh" ->> This should succeed
sudo useradd --no-create-home --shell /bin/false mysqld_exporter
sudo groupadd mysqld_exporter
cd /tmp
curl -LO https://github.com/prometheus/mysqld_exporter/releases/download/v0.10.0/mysqld_exporter-0.10.0.linux-amd64.tar.gz
tar xvf mysqld_exporter-0.10.0.linux-amd64.tar.gz
sudo cp /tmp/mysqld_exporter-0.10.0.linux-amd64/mysqld_exporter /usr/local/bin
sudo chown mysqld_exporter:mysqld_exporter /usr/local/bin/mysqld_exporter
sudo cp /tmp/mysqld_exporter.service /etc/systemd/system/
sudo cp /tmp/.my.cnf /root/
sudo systemctl daemon-reload
sudo systemctl start mysqld_exporter
sudo systemctl status mysqld_exporter
| true
|
2dfb5024ddb656deeac43ba3760413e5b66a1d5b
|
Shell
|
Tmark1022/shell-script
|
/util/find_jltx_db_backup_data/find_db_data_uid.sh
|
UTF-8
| 629
| 3.5
| 4
|
[] |
no_license
|
#!/bin/bash
#*************************************************************************
# > Author : tmark
# > Created Time : Wed 01 Jul 2020 09:41:04 AM CST
# > File Name : find_db_data_uid.sh
# > Description : 找uid对应的文件
#*************************************************************************
usage() {
echo "usage : sh $0 uid search_dir"
}
if [ $# -ne 2 ]; then
usage
exit 1
fi
uid=$1
search_dir=$2
ls -alF "$search_dir"| awk 'BEGIN {FS=" "} {print $9}' | sed -n '/^back/p' | sed -E 's/(^.*\[([0-9]+)_([0-9]+)\].*gz$)/\2 \3 \1/' | awk -v uid="$uid" 'BEGIN {FS=" "} {if (uid >= $1 && uid <= $2) print $3}'
| true
|
d97fb9143a5c17e97528a562d29e7e093ca48c22
|
Shell
|
hackclub/hack-as-a-service-v0
|
/dokku_plugin/subcommands/letsencrypt-enabled.sh
|
UTF-8
| 198
| 2.984375
| 3
|
[] |
no_license
|
#!/bin/bash
source "$PLUGIN_AVAILABLE_PATH/letsencrypt/functions"
set +e
letsencrypt_is_active $2 > /dev/null
case $? in
"0")
# Enabled
echo "true"
;;
*)
# Wheeeeee
echo "false"
;;
esac
| true
|
48080843a234f08cd3561d34d35de0adb0722c03
|
Shell
|
thomastweets/kentron
|
/vars.sh
|
UTF-8
| 462
| 2.53125
| 3
|
[] |
no_license
|
# Set the IP address of the VPS
export VPS_IP=1.2.3.4
export VPS_NAME=kentron
# SSH credentials
export SSH_USER=deploy
export SSH_KEY=~/.ssh/server_deploy_rsa
# docker environment
alias de='env | grep DOCKER_'
alias dm='docker-machine'
eval "$(docker-machine env $VPS_NAME)"
# reverseProxy settings
export DEFAULT_HOST=domain.com
# webserver settings
export WEBSERVER_DOMAIN=web.$DEFAULT_HOST
# unifi settings
export UNIFI_DOMAIN=unifi.$DEFAULT_HOST
| true
|
7919b6a1342f52daef2a35f17b7e4dee2d7a7051
|
Shell
|
kazi-nutanix/karbon-toolkit
|
/Pro SDK/get_KarbonK8sConfig.sh
|
UTF-8
| 2,635
| 3.5625
| 4
|
[] |
no_license
|
#!/usr/bin/env bash
# this script is test and verfied against karbon 2.2.1 only
set -e
now=$(date)
KUBECONFIG_PATH="$HOME/.kube"
mkdir -p ${KUBECONFIG_PATH}
source $SERVER_CONFIG/data.cfg
SVC_ACCOUNT_PASSWORD=$PRISM_PASSWD
function join { local IFS="$1"; shift; echo "$*"; }
rm -f clusters 2> /dev/null
echo "$now Fetching clusters access information"
KARBON_API_ENDPOINT="https://$PRISM_IP:9440/karbon/v1-beta.1/k8s/clusters"
curl -ks --request GET --url ${KARBON_API_ENDPOINT} -H 'Content-Type: application/json' -u ${PRISM_USER}:${SVC_ACCOUNT_PASSWORD} | jq -r '.[].name' > clusters
karbon_files=()
while read line
do
words=( $line )
CLUSTER=$(echo ${words[0]})
#UUID=$(echo ${words[1]})
echo "$now Getting certificate for $CLUSTER"
karbon_file="$KUBECONFIG_PATH/$CLUSTER.cfg"
KARBON_API_ENDPOINT="https://$PRISM_IP:9440/karbon/v1/k8s/clusters/$CLUSTER/kubeconfig"
#eval "/usr/local/bin/karbonctl --ip=$PRISM_IP --port=$PRISM_PORT --user=$PRISM_USER --password=$SVC_ACCOUNT_PASSWORD kubecfg $UUID > $karbon_file"
curl -ks --request GET --url ${KARBON_API_ENDPOINT} -H 'Content-Type: application/json' -u ${PRISM_USER}:${SVC_ACCOUNT_PASSWORD} > temp_out.json
cat temp_out.json | jq -r '.kube_config' > $karbon_file
#| sed 's/\\n/\n/g'
#cp temp_out.json $karbon_file
#cat $karbon_file
#echo "$now Getting ssh files for $CLUSTER"
karbon_files+=($karbon_file)
#ssh_access_file="$PROJECT_PATH/$CLUSTER-ssh-access.json"
KARBON_API_ENDPOINT="https://$PRISM_IP:9440/karbon/v1/k8s/clusters/$CLUSTER/ssh"
curl -ks --request GET --url ${KARBON_API_ENDPOINT} -H 'Content-Type: application/json' -u ${PRISM_USER}:${SVC_ACCOUNT_PASSWORD} > temp_out.json
#private_key=$(cat temp_out.json | jq .private_key | tr -d "\"" | sed 's/\\n/\n/g')
# echo $private_key
private_key_file=~/.ssh/$CLUSTER
if [ -f "$private_key_file" ]; then
chmod 777 $private_key_file
fi
#cat temp_out.json | jq .private_key | tr -d "\"" | sed 's/\\n/\n/g' > $private_key_file
cat temp_out.json | jq -r '.private_key' > $private_key_file
chmod 0400 $private_key_file
user_cert_file=~/.ssh/$CLUSTER.pub
cat temp_out.json | jq -r '.certificate' > $user_cert_file
#cat temp_out.json | jq .certificate | tr -d "\"" | sed 's/\\n/\n/g' > $user_cert_file
done < clusters
result=$(join : ${karbon_files[@]})
KUBECONFIG=$result kubectl config view --merge --flatten > "${KUBECONFIG}"
now=$(date)
echo "$now sucessfully retrieved access information for clusters"
| true
|
fdf8e39c0cf54c70231e03bd3bc57f36882b80fb
|
Shell
|
Fematich/EMall
|
/workflow.sh
|
UTF-8
| 372
| 2.671875
| 3
|
[] |
no_license
|
#!/bin/bash
#hstnm=$(hostname)
#hostid=$(echo ${hstnm:0:18} | egrep -o '[[:digit:]]{1,2}')
#if [ $hostid -ne 1 ]
# then
# ./EMall.sh init /work/data/mediargus_2011_be/configs/config1520
# fi
for config in /work/configs/boostconfigs/config_boost*
do
./EMall.sh mine_bursts $config
done
for config in /work/configs/boostconfigs/*
do
./EMall.sh cluto $config
done
| true
|
ebe3c8b465adf261839c95ae3b1990b1d3e81790
|
Shell
|
j3n660t/fiberhome-AN5506-04F-
|
/filesystem/fh/extend/upnp/upnp-run
|
UTF-8
| 1,119
| 2.515625
| 3
|
[] |
no_license
|
#!/bin/sh
if [ ! -d /etc/linuxigd ]
then
rm -rf /etc/linuxigd
mkdir /etc/linuxigd
fi
if [ ! -f /etc/linuxigd/dummy.xml ]
then
cp /fh/extend/upnp/dummy.xml /etc/linuxigd
fi
if [ ! -f /etc/linuxigd/gateconnSCPD.xml ]
then
cp /fh/extend/upnp/gateconnSCPD.xml /etc/linuxigd
fi
if [ ! -f /etc/linuxigd/gatedesc.skl ]
then
cp /fh/extend/upnp/gatedesc.skl /etc/linuxigd
fi
if [ ! -f /etc/linuxigd/gatedesc.xml ]
then
cp /fh/extend/upnp/gatedesc.xml /etc/linuxigd
fi
if [ ! -f /etc/linuxigd/gateicfgSCPD.xml ]
then
cp /fh/extend/upnp/gateicfgSCPD.xml /etc/linuxigd
fi
if [ ! -f /etc/linuxigd/gateinfoSCPD.xml ]
then
cp /fh/extend/upnp/gateinfoSCPD.xml /etc/linuxigd
fi
ln -s /fh/extend/upnp/libupnp.so.6.3.4 /lib/libupnp.so
ln -s /fh/extend/upnp/libupnp.so.6.3.4 /lib/libupnp.so.6
ln -s /fh/extend/upnp/libixml.so.2.0.8 /lib/libixml.so
ln -s /fh/extend/upnp/libixml.so.2.0.8 /lib/libixml.so.2
ln -s /fh/extend/upnp/libthreadutil.so.6.0.4 /lib/libthreadutil.so
ln -s /fh/extend/upnp/libthreadutil.so.6.0.4 /lib/libthreadutil.so.6
ln -s /fh/extend/upnp/upnpd.conf /etc/upnpd.conf
exit 0
| true
|
9450d4ee1cf26ffca359ee9d94ca9c4af309202c
|
Shell
|
vletroye/SynoPackages
|
/DSM 6.x/MODS phpMyAdmin 4.9.0/scripts/start-stop-status
|
UTF-8
| 441
| 3.015625
| 3
|
[] |
no_license
|
#!/bin/sh
. "$(dirname $0)"/common
case "$1" in
start)
rm -rf "${NGINX_CONF_DIR}/${NGINX_DISABLE_CONF}"
ln -sf "${PHPMYADMIN_NIGNX_CONF}/${NGINX_ENALBE_CONF}" "${NGINX_CONF_DIR}"
;;
stop)
rm -rf "${NGINX_CONF_DIR}/${NGINX_ENALBE_CONF}"
ln -sf "${PHPMYADMIN_NIGNX_CONF}/${NGINX_DISABLE_CONF}" "${NGINX_CONF_DIR}"
;;
status)
if [ ! -d "$WEB_PHPMYADMIN" ]; then
exit 150
fi
exit 0
;;
*)
exit 1
;;
esac
exit 0
| true
|
8e1f36c62ee8f4a8e5211455986d71c2b7d6fbc7
|
Shell
|
sam-hirsch/blender_updater
|
/blender_updater.sh
|
UTF-8
| 185
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
BRANCH=$1
BASE_PATH=$2
FULL_PATH=$3/$1
cd ${BASE_PATH}
echo CHECKOUT
git checkout ${BRANCH}
echo UPDATE
make update
echo BUILD
make release BUILD_DIR=${FULL_PATH}
echo DONE
| true
|
b748dabef75bc087e9da2331afbd7e3b2d2df8ab
|
Shell
|
marcelohmdias/dotfiles
|
/setup/tools/sdkman.sh
|
UTF-8
| 526
| 3.484375
| 3
|
[] |
no_license
|
#!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")" && . "./../utils.sh"
# -----------------------------------------------------------------------------
main() {
local -a url="https://get.sdkman.io"
print_msg_sub_info "SDKMan"
if cmd_exists "sdk"; then
print_msg_success "SDKMan Installed"
else
execute "wget -qO- $url | bash &> /dev/null" "Install SDKMan"
source "$HOME/.sdkman/bin/sdkman-init.sh"
fi
}
# -----------------------------------------------------------------------------
main
break_line
| true
|
ee52579bff738b9dad5d9a8e89fc15b87aab59ee
|
Shell
|
plume-design/opensync-platform-bcm
|
/rootfs/hooks/common/pre-install
|
UTF-8
| 746
| 3.234375
| 3
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
#!/bin/sh
# libopensync.so needs to be accessible from /lib or /usr/lib
# otherwise hostTools/libcreduction complains with error:
# *** Missing 32-bit libraries: libopensync.so. Stop.
#
# Create a symlink from /usr/lib/opensync to /usr/opensync/lib
ROOTFS="$1"
DEST_LIB_DIR="/usr/lib"
[ -z "$INSTALL_PREFIX" ] && INSTALL_PREFIX="/usr/opensync"
REL_SRC_DIR="$(realpath "$INSTALL_PREFIX" --relative-to="$DEST_LIB_DIR")"
mkdir -p "$ROOTFS$DEST_LIB_DIR"
ln -sf "$REL_SRC_DIR/lib" "$ROOTFS$DEST_LIB_DIR/$(basename "$INSTALL_PREFIX")"
# healthcheck
test "$CONFIG_BCM_USE_NAS" = y || rm -vf "$ROOTFS${INSTALL_PREFIX}/scripts/healthcheck.d/"*nas*
test "$CONFIG_BCM_USE_HOSTAP" = y || rm -vf "$ROOTFS${INSTALL_PREFIX}/scripts/healthcheck.d/"*hostap*
| true
|
2ae6b4a96221d5b9a3c5214e990fba9d2cd78eb5
|
Shell
|
jeff-brownlee/bisg
|
/public/build/build.sh
|
UTF-8
| 428
| 2.640625
| 3
|
[] |
no_license
|
rm -rf output
node r.js -o build.js
node r.js -o cssIn=../css/styles.css out=output/css/styles.css
cp ../index.html output/index.html
REQUIRE_VERSION='1.0.5'
SEDCMD='sed -i'
if [[ $OSTYPE == *"darwin"* ]]; then
SEDCMD=$SEDCMD' .tmp'
fi
SEDCMD=$SEDCMD' s/js\/libs\/require\/require.js/http:\/\/requirejs.org\/docs\/release\/'$REQUIRE_VERSION'\/minified\/require.js/g output/index.html'
$SEDCMD
rm -f output/*.tmp
| true
|
3bb0878736e5727bc240ad12b1ff3145e84a6f8d
|
Shell
|
lianarem/powscript
|
/.tools/compile
|
UTF-8
| 5,195
| 3.6875
| 4
|
[
"BSD-2-Clause"
] |
permissive
|
#!/usr/bin/env bash
set -E
Dir="$(readlink -m "$(dirname "${BASH_SOURCE[0]}")/..")"
PowscriptSourceDirectory="$Dir/src"
PowscriptLibDirectory="$Dir/lib"
if [ -f ./powscript ]; then
mv "$Dir/powscript" "$Dir/.powscript.backup"
trap '{ mv "$Dir/.powscript.backup" "$Dir/powscript"; exit; }' ERR
fi
source "$PowscriptSourceDirectory/compiler/version.bash"
OldPatchVersion="$(version:patch-of "$(version:number)")"
NewPatchVersion="$(( OldPatchVersion + 1))"
update_patch_version() {
sed -i -e 's/"version":[ ]*\"\(.*\)\.\(.*\)\..*",/"version": "\1.\2.'"$1"'",/g' "$Dir/package.json" || true
sed -i -e 's/POWSCRIPT_VERSION=*\(.*\)\.\(.*\)\..*/POWSCRIPT_VERSION=\1.\2.'"$1"'/g' "$Dir/powscript" || true
}
printf '' >"$Dir/powscript"
add() {
echo "$1" >>"$Dir/powscript"
}
if ${POWSCRIPT_CDEBUG-false}; then
LineNum=0
InsideSingleQuotes=false
InsideDoubleQuotes=false
MultiLine=false
NextMultiLine=false
Escape=false
echo >"$Dir/.cdebug"
add "$(cat "$Dir/.tools/debugger/preamble.bash")"
debuggable_line() {
! $InsideSingleQuotes && ! $InsideDoubleQuotes && ! $MultiLine
}
non_empty_line() {
[ -n "$line" ] && [[ ! "$line" =~ ^[\ ]*'#'.*$ ]]
}
not_before_case() {
[[ ! "$line" =~ ^("'""$"?"("|[^'('])*')' ]] && [[ ! "$line" =~ esac$ ]]
}
at_function_start() {
[[ "$line" =~ ^.*'() {'$ ]]
}
at_function_exit() {
[ "$line" = '}' ] || [[ "$line" =~ ^[\ ]*exit([\ ]+[0-9]+)?$ ]]
}
flip() {
if ${!1}; then
printf -v "$1" false
else
printf -v "$1" true
fi
}
update_quote_count() {
local i
MultiLine=false
for i in $( seq 0 $((${#line}-1)) ); do
case ${line:$i:1} in
'\')
if ! $InsideSingleQuotes; then flip Escape; fi; ;;
'"')
if ! $Escape && ! $InsideSingleQuotes; then flip InsideDoubleQuotes; fi
Escape=false;;
"'")
if ! $Escape && ! $InsideDoubleQuotes; then flip InsideSingleQuotes; fi
Escape=false;;
*)
Escape=false;;
esac
done
if $Escape && ! $InsideSingleQuotes && ! $InsideDoubleQuotes; then MultiLine=true; Escape=false; fi
}
check_before_line() {
LineNum=$((LineNum+1))
if debuggable_line && at_function_exit; then
local ec="cdebug_err_code_$LineNum"
if [ "$line" = '}' ]; then add "$ec=\$?"; fi
add "powscript-cdebug:line '${line//\'/,}'"
add 'powscript-cdebug:function-end'
if [ "$line" = '}' ]; then add "return \$$ec"; fi
elif debuggable_line && non_empty_line && not_before_case; then
add "powscript-cdebug:line '${line//\'/\`}'"
fi
if at_function_start; then
add "POWSCRIPT_CDEBUG_FUNCTIONS[${line:0:-4}]=true"
fi
if non_empty_line; then update_quote_count; fi
}
check_after_line() {
if debuggable_line && non_empty_line && at_function_start; then
add "powscript-cdebug:function-start '${line:0:-4}' \"\$@\""
fi
}
else
if [ -f "$Dir/.cdebug" ]; then rm "$Dir/.cdebug"; fi
fi
read_file() {
local line file lib var val noshadow_mode=false noshadow_func=""
export RequireOp=add
export ShadowingOp=trimming_add
export ShadowingGetFunc=get_noshadow_func
trimming_add() {
local line
while IFS='' read -r line || [ -n "$line" ]; do
line="${line% }"
if ${POWSCRIPT_CDEBUG-false}; then check_before_line; fi
add "$line"
if ${POWSCRIPT_CDEBUG-false}; then check_after_line; fi
done <<< "$1"
}
get_noshadow_func() {
echo "$noshadow_func"
}
while IFS='' read -r line || [ -n "$line" ]; do
if ${POWSCRIPT_CDEBUG-false} && ! $noshadow_mode; then check_before_line; fi
if [[ "$line" =~ .*'#<<EXPAND>>' ]]; then
file=${line//*source/}
file=${file//#*/}
file=${file// /}
read_file "$Dir/src/$file"
add "# FILE: $file"
elif [[ "$line" =~ .*'#<<INCLUDE>>' ]]; then
file=${line//*source/}
file=${file//#*/}
file=${file// /}
read_file "$(eval echo "$file")"
source "$(eval echo "$file")"
add "# FILE: $file"
elif [[ "$line" =~ .*'#<<REQUIRE>>' ]]; then
lib=${line//*powscript_require/}
lib=${lib//#*/}
lib=${lib// /}
powscript_require "$lib"
elif [[ "$line" =~ .*'#<<IGNORE>>' ]]; then
:
elif [[ "$line" =~ .*'#<<VAR>>' ]]; then
var="${line%%=*}"
eval "val=${line#$var=}"
add "$var=$val"
elif [[ "$line" =~ .*'#<<NOSHADOW>>' ]]; then
noshadow_func="dummy()"$'\n'"{"
noshadow_mode=true
elif $noshadow_mode; then
if [[ "$line" =~ noshadow.* ]]; then
eval "$line"
noshadow_mode=false
else
noshadow_func="$noshadow_func"$'\n'"$line"
fi
else
add "$line"
fi
if ${POWSCRIPT_CDEBUG-false}; then check_after_line; fi
done <"$1"
}
read_file "$Dir/src/powscript.bash"
chmod +x "$Dir/powscript"
if [ -f "$Dir/.powscript.backup" ]; then
if ! cmp -s "$Dir/.powscript.backup" "$Dir/powscript"; then
if [[ "$*" =~ (--verbose|-v) ]]; then
diff "$Dir/.powscript.backup" "$Dir/powscript" || true
fi
update_patch_version "$NewPatchVersion"
fi
rm "$Dir/.powscript.backup"
fi
| true
|
681bb2125af38dec9bc8b874604198cb75d2ac06
|
Shell
|
jeffgarrett/dotfiles
|
/old/install
|
UTF-8
| 1,557
| 3.203125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/bash -ex
function install {
url="$1"
dir="$2"
ext="$3"
arg="$4 $5"
tmp="$(mktemp -d)"
trap "rm -rf $tmp" EXIT
curl -s -L "$url" -o "$tmp/archive"
mkdir "$tmp/extract"
(
cd "$tmp/extract"
"$ext" $arg ../archive
mkdir -p "$dir/"
rsync --delete -av ./ "$dir/"
)
}
# rust-analyzer
ln -sf ~/.rustup/toolchains/nightly-x86_64-unknown-linux-gnu/bin/rust-analyzer ~/.local/bin
## Language package managers
### npm
mkdir -p ~/.local/n/bin
curl -L https://raw.githubusercontent.com/tj/n/master/bin/n -o ~/.local/n/bin/n
chmod +x ~/.local/n/bin/n
N_PREFIX=~/.local/n N_CACHE_PREFIX=~/.cache ~/.local/n/bin/n lts
install https://github.com/ryanoasis/nerd-fonts/releases/download/2.2.0-RC/FiraCode.zip \
~/.local/share/fonts/nerd-fonts-fira-code unzip
fc-cache -f
install https://github.com/neovim/neovim/releases/download/nightly/nvim-linux64.tar.gz \
~/.local/nvim-nightly tar --strip-components=1 -xvf
go install github.com/mattn/efm-langserver@latest
#
# Protobuf editor support
#
install https://github.com/bufbuild/buf/releases/download/v1.3.1/buf-Linux-x86_64.tar.gz \
~/.local/buf tar --strip-components=1 -xvf
#
# Shell editor support
#
PATH=~/.local/n/bin:$PATH npm install --global --no-progress --silent bash-language-server
go install mvdan.cc/sh/v3/cmd/shfmt@latest
#cabal update
#cabal install ShellCheck
#
# Lua editor support
#
install https://github.com/sumneko/lua-language-server/releases/download/2.6.7/lua-language-server-2.6.7-linux-x64.tar.gz \
~/.local/lua-language-server tar -xvf
| true
|
e394ce963f54aa94b3fdf574e618c6156d150380
|
Shell
|
anishchakka/misc_codes
|
/shell/do_these_files_exist.sh
|
UTF-8
| 264
| 3.5625
| 4
|
[] |
no_license
|
#! /bin/bash
# if transferring excel txt file to unix, use the below command
# sed -i sed 's/\r$//' input_file.txt
# file with list of file names
file=$1
while IFS= read -r line
do
if [ ! -f "$line" ]; then
echo "$line does not exist."
fi
done < "$file"
| true
|
4b980944898ff96f28bfbc95fec0772813cc877f
|
Shell
|
hsondd/learn
|
/shell/sed_Highlight_First_char.sh
|
UTF-8
| 101
| 2.734375
| 3
|
[] |
no_license
|
#!/bin/bash
IFS= read -r -p "Enter sentence: " input
echo "$input" | sed 's/\(\b[A-Z]\)/\(\1\)/g'
| true
|
07128155db1fdf84855efc44827a9483f8326633
|
Shell
|
mmgaggle/hive-testbench
|
/tpcds-concurrent-run.sh
|
UTF-8
| 613
| 2.984375
| 3
|
[] |
no_license
|
#!/bin/bash
QUERY_DIR=$1
CONCURRENCY=$2
for x in `seq 1 $CONCURRENCY`
do
echo "query_set,run_id,engine,format,scale_factor,query,status,start,end,tot_time,query_time,rows" > $1/tpc_stats_$x.log
done
#for scale in 1000 10000 100000
for scale in 1000 10000
do
#for engine_format in "presto orc" "spark parquet" "hive orc" "hive-spark orc"
for engine_format in "spark parquet" "presto orc"
do
for x in `seq 1 $CONCURRENCY`
do
perl runSuite.pl $QUERY_DIR $x $engine_format $scale &
done
wait
done
done
cp $1/tpc_stats_$x.log $1/tpc_stats_$x.log.`date "+%F-%T"`
| true
|
08b332892b01ef8bbb657f8bd7daff888db8c23c
|
Shell
|
MitchRatquest/HackadaySynth
|
/configs/overlay/etc/network/if-up.d/wireguard_startup
|
UTF-8
| 536
| 2.625
| 3
|
[] |
no_license
|
#!/bin/sh
source /etc/wireguard/conf
#private key is generated via `wg genkey`
#public key is `echo $PRIVATEKEY | wp pubkey`
#the peer is the server's public key
#you put this computer's public key in the server's conf
#with the appropriate setup and routing information
#sets up the wireguard interface
ip link add dev wg0 type wireguard
ip address add dev wg0 $LOCALADDRESS
wg set wg0 listen-port 21841 \
private-key $PRIVKEY \
peer $PEER \
allowed-ips $SUBNET/$MASK \
endpoint $SERVERIP:24735
ip link set up dev wg0
exit 0
| true
|
c20ffdb4d4ca582b85985d1d5b17b896f4616f08
|
Shell
|
Shengliang/language
|
/bash/lonely_int.sh
|
UTF-8
| 173
| 3.3125
| 3
|
[] |
no_license
|
#!/bin/bash
#Find the lonely int in an array.
# cat lonely_int.txt | ./lonely_int.sh
read -r LIMIT
x="0"
read -a arr
for n in ${arr[@]}
do
x=$(( $n ^ $x ))
done
echo $x
| true
|
586f47bdcd08a85cf8cbbbff6cdfad5b3d1b63a1
|
Shell
|
swordhui/xglibs
|
/stages/filedel.sh
|
UTF-8
| 246
| 3.546875
| 4
|
[] |
no_license
|
#!/bin/sh
# file system check tool
# check what's inside a directory
#generate file list
if [ -z "$1" ]; then
echo "No ext specified. filedel.sh exe"
exit 1
fi
echo "deleting *.$1..."
find -type f -iname "*.$1" -exec rm '{}' ';'
echo "done"
| true
|
c850814a634f8894863e3a1f0e9e61fa7ba17982
|
Shell
|
Anthony-Mattson/mini-project
|
/launch.sh
|
UTF-8
| 1,737
| 3.515625
| 4
|
[] |
no_license
|
#!/bin/bash
### USAGE:
# User calls `launch-app stack-name`
### REQUIREMENTS:
# * Valid AWS account (run aws configure)
# * Docker
###
CORE_STACK_NAME=$1
# Create the ECR repo.
echo "Creating ECR Stack..."
aws cloudformation create-stack \
--stack-name ${CORE_STACK_NAME}-ecr \
--template-body file://infrastructure/templates/ecr.yaml \
--parameters file://infrastructure/parameters/ecrparams.json
# Wait for it to come up.
aws cloudformation wait stack-create-complete --stack-name ${CORE_STACK_NAME}-ecr
# Log in to the ECR repo.
echo "Performing Docker actions..."
$(aws ecr get-login --no-include-email --region us-east-1)
ECR_REPO_URI=$(aws ecr describe-repositories --query 'repositories[].repositoryUri' --output text)
# Build, tag, and upload the Docker image.
docker build . -t ${CORE_STACK_NAME}
docker tag ${CORE_STACK_NAME}:latest ${ECR_REPO_URI}:latest
docker push ${ECR_REPO_URI}:latest
# Create the loadbalancer stack.
echo "Creating Load Balancer Stack..."
aws cloudformation create-stack \
--stack-name ${CORE_STACK_NAME}-loadbalancer \
--template-body file://infrastructure/templates/loadbalancer.yaml \
--parameters file://infrastructure/parameters/loadbalancerparams.json
aws cloudformation wait stack-create-complete --stack-name ${CORE_STACK_NAME}-loadbalancer
# Deploying Fargate stack.
echo "Creating Fargate Stack..."
aws cloudformation create-stack \
--stack-name ${CORE_STACK_NAME}-fargate \
--template-body file://infrastructure/templates/fargate.yaml \
--parameters file://infrastructure/parameters/fargateparams.json \
--capabilities CAPABILITY_NAMED_IAM
aws cloudformation wait stack-create-complete --stack-name ${CORE_STACK_NAME}-fargate
| true
|
295ff30fa4c95c74bfd6752842d94ad694752d27
|
Shell
|
OnGle/torrentserver
|
/conf.d/main
|
UTF-8
| 1,054
| 2.90625
| 3
|
[] |
no_license
|
#!/bin/sh -ex
STORAGE=/srv/storage
PASSWORD=turnkey
#SRC=/usr/local/src
#WEBROOT=/var/www/rutorrent
mkdir -p $STORAGE/incoming
mkdir -p $STORAGE/quarantine
mkdir -p $STORAGE/download
chown -R debian-transmission:users $STORAGE/incoming
chown -R debian-transmission:users $STORAGE/quarantine
chown -R debian-transmission:users $STORAGE/download
mkdir /var/log/rtorrent
# configure transmission
CONF=/etc/transmission-daemon/settings.json
sed -i 's|"download-dir": .*|"download-dir": "\/srv\/storage\/download",|' $CONF
# configure clamav
CONF=/etc/clamav/clamd.conf
sed -i 's|\(ScanMail\) true|\1 false|; s|\(LogSyslog\) false|\1 true|' $CONF
# configure samba
CONF=/etc/samba/smb.conf
NETBIOS_NAME=$(echo $HOSTNAME | tr [a-z] [A-Z])
sed -i "s|\(netbios name =\) \(.*\)|\1 $NETBIOS_NAME|" $CONF
sed -i "s|\(server string =\) \(.*\)|\1 $CREDIT_ANCHORTEXT|" $CONF
ln -s /etc/nginx/sites-available/transmission /etc/nginx/sites-enabled/transmission
rm -f /etc/nginx/sites-enabled/default
# initscript
update-rc.d transmission-daemon defaults 99
| true
|
61972f45203fd89e1022e119d2ca6698f6c1f923
|
Shell
|
fizzed/jne
|
/setup/build-native-lib-linux-action.sh
|
UTF-8
| 556
| 3.046875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
set -e
BASEDIR=$(dirname "$0")
cd "$BASEDIR/.."
PROJECT_DIR=$PWD
BUILDOS=$1
BUILDARCH=$2
# Setup cross compile environment
if [ -f /opt/setup-cross-build-environment.sh ]; then
. /opt/setup-cross-build-environment.sh $BUILDOS $BUILDARCH
fi
. setup/setup-environment.sh
mkdir -p target
rsync -avrt --delete ./native/ ./target/
cd target/jcat
make
export CXXFLAGS="-z noexecstack"
cd ../libhelloj
make
cd ..
OUTPUT_DIR="../src/test/resources/jne/${BUILDOS}/${BUILDARCH}"
cp jcat/jcat "$OUTPUT_DIR"
cp libhelloj/libhelloj.so "$OUTPUT_DIR"
| true
|
89fd5b4bc4159a51aef5bc4f92b036a8f4b23cd5
|
Shell
|
urbanmassage/gist
|
/octopus/push_package.sh
|
UTF-8
| 690
| 3.09375
| 3
|
[
"MIT"
] |
permissive
|
set -f
BASE_DIR=$(dirname $0)
RELEASE_DIR="${BASE_DIR}/deploy"
OCTOPUS_FULL_BASE="https://${OCTOPUS_BASE}/api"
OCTOPUS_PROJECTS_URL="${OCTOPUS_FULL_BASE}/projects/all"
CURRENT_BUILD_NO=$1
CURRENT_PROJECT_NAME=$2
PROJECT_NAME="${CURRENT_PROJECT_NAME}.${CURRENT_BUILD_NO}"
ZIP_FILE="${PROJECT_NAME}.zip"
zip -r "${BASE_DIR}/${ZIP_FILE}" "${RELEASE_DIR}"
mv "${BASE_DIR}/${ZIP_FILE}" "${RELEASE_DIR}/${ZIP_FILE}"
post_status_code=$(curl --silent --output /dev/stderr --write-out "%{http_code}" -X POST ${OCTOPUS_FULL_BASE}/packages/raw -H "X-Octopus-ApiKey:${OCTO_API_KEY}" -F "data=@${RELEASE_DIR}/${ZIP_FILE}")
if [ ${post_status_code} -ge 300 ];
then
exit ${post_status_code};
fi
| true
|
98283e3055f755dc003012e0cf81e35330dc7536
|
Shell
|
deathgrindfreak/dotfiles
|
/zsh/Library/bashrc.d/009.misc.sh
|
UTF-8
| 135
| 2.953125
| 3
|
[] |
no_license
|
#!/usr/bin/env bash
boop() (
local last="$?";
if [ $last -eq 0 ]; then
sfx good
else
sfx bad
fi
$(exit "$last")
)
| true
|
975a9cb024fa0357941f67c030a5bf8e1c6d74b5
|
Shell
|
redhat-cip/puppet-installserver
|
/install.sh
|
UTF-8
| 1,018
| 3.65625
| 4
|
[] |
no_license
|
#!/bin/bash
#
# Deploy a SpinalStack Install Server Quickly
#
ORIG=$(cd $(dirname $0); pwd)
if [ -d /vagrant ]; then
ORIG="/vagrant"
fi
if [ -f /etc/redhat-release ]; then
OS=redhat
elif [ -f /etc/lsb-release ]; then
OS=ubuntu
elif [ -f /etc/debian_version ]; then
OS=debian
else
echo "Error - OS unknown"
exit 1
fi
case "$OS" in
"redhat")
yum update
yum install puppet rubygems git
gem install r10k
cd $ORIG
r10k puppetfile install
;;
"ubuntu")
apt-get update
apt-get -y install puppet rubygems git
gem install r10k
cd $ORIG
r10k puppetfile install
;;
"debian")
wget https://apt.puppetlabs.com/puppetlabs-release-wheezy.deb
dpkg -i puppetlabs-release-wheezy.deb
apt-get update
apt-get -y --force-yes install puppet rubygems git tzdata=2014a-0wheezy1
gem install r10k
cd $ORIG
r10k puppetfile install
echo "127.0.0.1 `facter fqdn`" >> /etc/hosts
;;
*)
exit 0
;;
esac
puppet apply --modulepath=$ORIG/modules $ORIG/install-server.pp
| true
|
f1e263d56448b6a5bbeb8d96c42e1c08a3da1a57
|
Shell
|
darrenldl/oali-profiles
|
/laptop/scripts/usb_key_reset_loader_template.unused
|
UTF-8
| 1,375
| 4.09375
| 4
|
[
"MIT",
"Unlicense"
] |
permissive
|
#!/bin/bash
echo "This is the loader script for the main USB key resetting script"
echo ""
echo "Looking for main USB key resetting script over to live CD environment"
target_path="/root"
main_script_name=LLSH_USB_KEY_RESET_MAIN_DUMMY
main_script_path="$target_path"/"$main_script_name"
usb_key_unloader_name=LLSH_USB_KEY_RESET_UNLOADER_DUMMY
usb_key_unloader_path="$target_path"/"$usb_key_unloader_name"
if [ -f main_script_path ]; then
echo "Main script found"
echo "Copying over to "$target_path
cp $main_script_name $main_script_path
echo "Looking for USB key unloading script"
if [ -f usb_key_unloader_path ]; then
echo "Unloader script template found"
echo "Generating unloader script to store at "$target_path
cp usb_key_unloader_template $usb_key_unloader_path
# Figure out boot partition mapper name (current partition)
boot_part=$(df -P . | tail -1 | cut -d' ' -f 1)
sed -i "s@BOOT_CRYPT_MOUNTED_DUMMY@true@g" "$usb_key_unloader_path"
sed -i "s@BOOT_CRYPT_DUMMY@$boot_part@g" "$usb_key_unloader_path"
echo "Executing main script"
else
echo "Unloader script not found"
echo "Please close and unmount the encrypted boot partition manually"
echo "then execute the main USB key resetting script stored at "$target_path
fi
else
echo "Main script not found"
echo "Exiting loader script"
exit
fi
| true
|
5c5ef8abc95fd181e38bc406526025d411e4107b
|
Shell
|
willgage/git-scripts
|
/branch_gc
|
UTF-8
| 6,678
| 4.25
| 4
|
[] |
no_license
|
#!/bin/bash
usage() {
echo ""
echo "$0 [-D -m -r -h]";
echo ""
echo "DANGER: MAY DELETE MANY BRANCHES. This program tries to be careful, but a mistake in use could be costly."
echo ""
echo "Cleans up branches which have already been merged with respect to a master branch and a remote;"
echo "removing both their local and remote copies and syncing the local repository with the"
echo "remote one. The master branch need not be named \"master\", though following such a convention "
echo "is encouraged."
echo ""
echo "Options:"
echo " -D (no arguments) Do the deletion for real. By default we execute in dry run mode."
echo " -m (with value) Specifies the branch to use as the \"master\". By default the value is \"master\"."
echo " -r (with value) Specifies the remote to operate against. By default the value is \"origin\"."
echo " -h (no argument) Display this help message."
echo ""
exit 0;
}
log() {
echo "[$1]: $2"
}
safe_delete() {
local my_cmd=$1;
if [ $dryRun -gt 0 ]
then
log "INFO" "(Dry Run) $my_cmd";
else
log "INFO" "Executing: $my_cmd";
$my_cmd
fi
}
strip_branch_name() {
local name=$1
local my_remote=$2
echo "$name" | sed -E -e "s/^\*?[[:space:]]*(remotes\/$my_remote\/)?([^[:space:]]+).*\$/\2/";
}
exists() {
local -i my_count=$1;
# normal boolean interpretation of 0 and 1 is reversed with return
if [ $my_count -gt 0 ];
then
return 0;
else
return 1;
fi
}
branch_exists() {
local branch=$1;
exists $((`git branch | egrep "[[:space:]]$branch$" | wc -l`))
}
remote_branch_exists() {
local branch=$1;
exists $((`git branch -r | egrep "[[:space:]]*$branch$" | wc -l`))
}
branch_contains() {
local branch1=$1;
local branch2=$2
exists $((`git branch -a --contains "$branch1" 2> /dev/null | egrep "[[:space:]]*$branch2$" | wc -l`));
}
delete_branch() {
local to_delete=$1;
local my_remote=$2;
if [ -n "$my_remote" ];
then
# - delete the remote branch (git push origin :$branch)
log "INFO" "deleting remote branch $my_remote/$to_delete";
safe_delete "git push $my_remote :$to_delete";
test $? -ne 0 && log "ERROR" "Failed to delete $my_remote/$to_delete." && exit 1
else
log "INFO" "deleting local branch $to_delete";
safe_delete "git branch -d $to_delete";
test $? -ne 0 && log "ERROR" "Failed to delete $to_delete." && exit 1
fi
}
check_delete() {
local my_branch="$1"
if [ $dryRun -eq 0 ]
then
log "WARN" "Do you really want to delete $my_branch?"
select yn in "Yes" "No"; do
case $yn in
Yes ) return 0;;
No ) return 1;;
esac
done
fi
return 0;
}
remote="origin"
masterBranch="master"
# we are in dry run mode by default
let -i dryRun=1
while getopts "r:m:Dh" value; do
case "$value" in
r) remote="$OPTARG"; log "INFO" "setting remote=$OPTARG";;
m) masterBranch="$OPTARG"; log "INFO" "setting masterBranch=$OPTARG";;
D) dryRun=0; log "INFO" "Training wheels off -- we are deleting for reals.";;
h) usage;;
[?]) usage;;
esac
done
# need to have git
which git > /dev/null
test $? -ne 0 && echo "you need to install git" && exit 1
#Check that we are in a git repository
git status > /dev/null ; test $? -ne 0 && log "ERROR" "Wake up! You need to be in a git repository." && exit 1
# save current branch name
currentBranch=`git branch | grep '^*'`
startingBranch=$(strip_branch_name "$currentBranch" "$remote")
# checkout master
# test if successful, fail otherwise
if [ "$startingBranch" != "$masterBranch" ];
then
git checkout "$masterBranch"
test $? -ne 0 && log "ERROR" "Failed to checkout $masterBranch." && exit 1
fi
# Find all the branches which have been merged to master already
# By default, we'll assume we're processing both locals and remotes
listBranchCmd="git branch -a --merged $masterBranch | egrep '^[^*>]*$' | sed -E -e 's/^\*?[[:space:]]*(remotes\/$remote\/)?([^[:space:]]+).*\$/\2/' | sort | uniq"
let -i mergedBranchCount=0
if [ -n "$remote" ]
then
# get synced up with the remote
git fetch "$remote";
git remote prune "$remote";
# we subtract one, knowing that 'origin/master' will be in the list too
mergedBranchCount=$((`eval "$listBranchCmd | wc -l"`-1))
else
# if no remote, then just deal with local branches
listBranchCmd="git branch --merged $masterBranch | egrep '^[^*>]*$'"
mergedBranchCount=$((`eval "$listBranchCmd | wc -l"`))
fi
if [ $dryRun -eq 0 ]
then
log "WARN" "$mergedBranchCount candidate branches to delete. Are you sure you want to do this?"
select yn in "Yes" "No"; do
case $yn in
Yes ) log "INFO" "OK, proceeding. You will be prompted before each specific branch and may skip individual branches."; break;;
No ) log "INFO" "Finished. No action taken."; exit ;;
esac
done
else
log "INFO" "$mergedBranchCount candidate branches to delete..."
fi
let -i deletedCount=0
let -i skippedCount=0
for x in `eval "$listBranchCmd"`;
do
branch=$(strip_branch_name "$x" "$remote")
# DON'T DELETE MASTER
if [ "$branch" != "$masterBranch" ];
then
# if we've got a local and remote, and the remote contains the local
# then delete both local and remote
if branch_exists "$branch" && branch_contains "$branch" "remotes/$remote/$branch";
then
if check_delete "$branch"
then
delete_branch "$branch" ""
delete_branch "$branch" "$remote"
deletedCount=$((deletedCount+1));
else
skippedCount=$((skippedCount+1));
fi
elif branch_exists "$branch";
then
# we've got a local, but its commits are not in remote
# somewhat sketchy -- let's skip this branch
log "INFO" "Local branch $branch is either not on $remote, or contains commits not on $remote. Skipping."
skippedCount=$((skippedCount+1));
elif remote_branch_exists "$remote/$branch";
then
# no local, but we've got the remote. delete it.
if check_delete "$branch"
then
delete_branch "$branch" "$remote"
deletedCount=$((deletedCount+1));
else
skippedCount=$((skippedCount+1));
fi
fi
fi
done
log "INFO" "Deleted $deletedCount branches, skipped $skippedCount branches"
if [ -n "$remote" ]
then
# again, get synced up with the remote
git remote prune "$remote";
fi
# if start branch is still there, go back to it
if [ "$startingBranch" != "$masterBranch" ];
then
if branch_exists "$startingBranch";
then
git checkout "$startingBranch"
else
log "INFO" "Starting branch $startingBranch was deleted"
fi
fi
| true
|
8123cf7cd4b3668671d8bd142133bbd8fe97f26a
|
Shell
|
starlingx/config
|
/sysinv/sysinv/sysinv/etc/sysinv/sysinv_goenabled_check.sh
|
UTF-8
| 790
| 3.75
| 4
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#
# Copyright (c) 2013-2014 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
# SysInv "goenabled" check.
# Wait for sysinv information to be posted prior to allowing goenabled.
NAME=$(basename $0)
SYSINV_READY_FLAG=/var/run/.sysinv_ready
logfile=/var/log/platform.log
function LOG {
logger "$NAME: $*"
echo "`date "+%FT%T"`: $NAME: $*" >> $logfile
}
count=0
while [ $count -le 45 ]; do
if [ -f $SYSINV_READY_FLAG ]; then
LOG "SysInv is ready. Passing goenabled check."
echo "SysInv goenabled iterations PASS $count"
LOG "SysInv goenabled iterations PASS $count"
exit 0
fi
sleep 1
count=$(($count+1))
done
echo "SysInv goenabled iterations FAIL $count"
LOG "SysInv is not ready. Continue."
exit 0
| true
|
c96dff8993bde5376e5435e85364e45c65573d44
|
Shell
|
apache/bookkeeper
|
/tests/docker-images/all-versions-image/image_builder.sh
|
UTF-8
| 1,730
| 3.046875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#
#/**
# * Licensed to the Apache Software Foundation (ASF) under one
# * or more contributor license agreements. See the NOTICE file
# * distributed with this work for additional information
# * regarding copyright ownership. The ASF licenses this file
# * to you under the Apache License, Version 2.0 (the
# * "License"); you may not use this file except in compliance
# * with the License. You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
set -e
IMAGE_NAME=apachebookkeeper/bookkeeper-all-versions:latest
FORCE_REBUILD="${BOOKKEEPER_DOCKER_IMAGES_FORCE_REBUILD:-false}"
if [[ "$FORCE_REBUILD" != "true" && "$(docker images -q $IMAGE_NAME 2> /dev/null)" != "" ]]; then
echo "reusing local image: $IMAGE_NAME"
exit 0
fi
SCRIPT_DIR=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P )
## BASE_DIR will be ./bookkeeper/
BASE_DIR=${SCRIPT_DIR}/../../../
VERSION=${1:-UNKNOWN}
mkdir -p "${BASE_DIR}"/tests/docker-images/all-versions-image/build
ls -la ${BASE_DIR}bookkeeper-dist/server/build/distributions
cp ${BASE_DIR}bookkeeper-dist/server/build/distributions/bookkeeper-server-${VERSION}-bin.tar.gz "${BASE_DIR}"/tests/docker-images/all-versions-image/build/
TARBALL=build/bookkeeper-server-${VERSION}-bin.tar.gz
docker build -t ${IMAGE_NAME} "${BASE_DIR}"tests/docker-images/all-versions-image --build-arg BK_TARBALL="${TARBALL}"
| true
|
8cdc160aeee21a0cd0abe746c80d0cc0798aa7f3
|
Shell
|
speechLabBcCuny/nnaAudiosetClassification
|
/bin/copyWav2Flac.sh
|
UTF-8
| 772
| 4.15625
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/bash
# Copy wav files in a directory tree to another directory tree of flac
# files. If third and fourth arguments (K and N) are provided, then
# take every Nth filename starting at position K (for parallelization
# across several simultaneous runs).
if [ $# -lt 3 -o $# -gt 5 ]; then
echo "Usage: `basename $0` src_dir dst_dir [k n]"
exit 1
fi
SRC=$1
DST=$2
K=${3:-1}
N=${4:-1}
find "$SRC" -iname '*.wav' \
| sort \
| sed -n "${K}~${N}p" \
| while read fullwav ; do
wav="${fullwav#${SRC}}"
outfile="${DST}/${wav%.wav}.flac"
if [ -f "$outfile" ] ; then
echo "Skipping $outfile"
else
subdir=`dirname "$wav"`
mkdir -p "${DST}/${subdir}"
ffmpeg -n -nostdin -i "$fullwav" "$outfile"
fi
done
| true
|
5c426e1a7d17bcbb6ab96b620aa423dcaf50a24d
|
Shell
|
mixianghang/shellscripts
|
/bph/whoisData/format/addAsn2InetnumForOneRegistry.sh
|
UTF-8
| 1,045
| 3.59375
| 4
|
[] |
no_license
|
#!/bin/bash
formatBaseDir=/data/seclab/BPH/Uniform/
bgpBaseDir=/data/salrwais/BPH/Whois/API/BGP/
currDir=/data/seclab/BPH/Xianghang/bulkData/Scripts/format/
scriptDir=/data/seclab/BPH/Xianghang/bulkData/Scripts/format/
if [ $# -ge 3 ];then
startDate=$1
endDate=$2
registry=$3
else
echo "Usage startDate endDate registry"
exit 1
fi
tempBaseDir=$currDir/temp_addAsn2Inetnum_$(date +"%Y%m%d_%H%M%S")
date=$startDate
while [ $date -le $endDate ]
do
tempDir=$tempBaseDir/$date
formatDir=$formatBaseDir/$date
bgpDir=$bgpBaseDir/$date
echo $date
mkdir -p $tempDir
mkdir -p $formatDir
mkdir -p $bgpDir
echo "$scriptDir/addAsn2Inetnum.py $formatDir $bgpDir/bpgTable $tempDir $registry"
$scriptDir/addAsn2Inetnum.py $formatDir $bgpDir/bpgTable $tempDir $registry
if [ ! $? -eq 0 ];then
echo "run addAsn2Inetnum.py failed for $date $registry"
date=$(date -d "$date +1day" +"%Y%m%d")
continue
fi
mv $tempDir/* $formatDir/
rm -rf $tempDir/*
date=$(date -d "$date +1day" +"%Y%m%d")
done
rm -rf $tempBaseDir
| true
|
0fcf080a0f40bbfe65fc8a5ae8d0fe22e550705f
|
Shell
|
andsmedeiros/uevloop
|
/scripts/publish.sh
|
UTF-8
| 3,062
| 4.03125
| 4
|
[
"MIT"
] |
permissive
|
#!/bin/env bash
function get_version() {
if [[ ! -e .version ]]; then
echo "No .version file found. Creating."
touch .version
echo "0.0.0" > .version
fi
current_version=`cat .version`
custom_version=""
new_version=""
replace_pat=""
echo "Current version is $current_version"
read -p "Select version type ((M)ajor, (m)inor, (p)atch or (c)ustom): " version_type
if [[ $version_type == "major" || $version_type == "M" ]]; then
replace_pat="(\$1+1), 0, 0"
echo "Publishing major version."
version_type="major"
elif [[ $version_type == "minor" || $version_type == "m" ]]; then
replace_pat="\$1, (\$2+1), 0"
echo "Publishing minor version."
version_type="minor"
elif [[ $version_type == "patch" || $version_type == "p" || $version_type == "P" ]]; then
replace_pat="\$1, \$2, (\$3+1)"
echo "Publishing patch."
version_type="patch"
elif [[ $version_type == "custom" || $version_type == "c" || $version_type == "C" ]]; then
read -p "Custom version: " custom_version
version_type="custom"
else
echo "Unknown option " $version_type ". Exitting."
exit 1
fi
if [[ $replace_pat != "" ]]; then
new_version=`echo $current_version | awk -F. "{printf \"%d.%d.%d\", $replace_pat}"`
else
new_version=$curstom_version
fi
}
function get_annotation() {
tmpfile=$(mktemp)
nano $tmpfile --temp
annotation=`cat $tmpfile`
rm $tmpfile
}
branch_name=`git rev-parse --abbrev-ref HEAD`
if [[ $branch_name != "master" ]]; then
while [[ $branch_warned != "yes" && $branch_warned != "y" &&
$branch_warned != "no" && $branch_warned != "n" ]]; do
read -p "Currently on branch $branch_name. Publish from here (instead of master)? ((y)es or (n)o) " branch_warned
done
if [[ $branch_warned == "no" || $branch_warned == "n" ]]; then
echo "Operation aborted."
exit 1
fi
fi
get_version
while [[ $refresh_docs != "yes" && $refresh_docs != "y" &&
$refresh_docs != "no" && $refresh_docs != "n" ]]; do
read -p "Refresh docs? ((y)es or (n)o) " refresh_docs
done
while [[ $annotate != "yes" && $annotate != "y" &&
$annotate != "no" && $annotate != "n" ]]; do
read -p "Annotate commit? ((y)es or (n)o) " annotate
done
while [[ $confirm != "yes" && $confirm != "y" &&
$confirm != "no" && $confirm != "n" ]]; do
echo "Publishing new $version_type version: $new_version."
echo "Refresh docs? $refresh_docs"
echo "Annotate? ${annotate}"
read -p "Confirm pusblishing? ((y)es or (n)o) " confirm
if [[ $confirm == "no" || $confirm == "n" ]]; then
echo "Publishing canceled by the user. Aborting."
exit 1
fi
done
if [[ $refresh_docs == "yes" || $refresh_docs == "y" ]]; then
export DOXYGEN_PROJECT_NUMBER=$new_version
make clean && make docs
git add docs
fi
if [[ $annotate == "yes" || $annotate == "y" ]]; then
get_annotation
fi
echo $new_version > .version
git add .version
git commit -m "Automatically bumped version to $new_version"
git tag -a $new_version -m "$annotation"
git push origin $new_version
| true
|
7e8988da0cc08c8ca626442187bc25c0e1be9c55
|
Shell
|
shrikantb-tux/scripting
|
/python/wow_1.sh
|
UTF-8
| 1,424
| 3.890625
| 4
|
[] |
no_license
|
#! /bin/bash
# This is simple shell script with below exercises.
# for loop on array to give the colors
declare -a listnew=(yellow blue organge green)
# for loop with case
while listnew
do
echo $colors
done
# for loop
for colors in listnew
do
echo $colors
done
# conditional for file exists or not
if [[ -f next1/unmounted ]]
then
echo "yes.. unmounted file exist"
else
echo "umounted file does not exist"
fi
# conditional for dir exists or not
if [[ -d nest1 ]]
then
echo "yes.. unmounted dir exist"
else
echo "umounted dir does not exist"
fi
# variables passed to script can be interprepated here..
# echo -n : using -n will prevent adding new line on print
echo $0 and $1 and $2
echo $@
echo $#
# array
declare -a sports=(ball basket win new)
echo "first ${sports[1]}"
echo "second ${sports[2]}"
echo "third ${sports[3]}"
echo "array list :- ${sports[*]} "
moresports=(${sports[*]} tennis)
echo "more sport array list :- ${moresports[*]} "
# declare as int, even first time decleration as int apart from int
declare -i t=100
echo ""\$t = $t""
# we can unset the variable in between as well.
#unset t
echo "\"$t = $t"
# hello workd just print and changing the dirs.
echo "Hello World !"
cd ~
pwd
cd -
# actual command addition and execution
echo " I am in \"`pwd`\" "
echo " Today's date is \"`date +%D`\" "
# script can return an integer, useful for writing conditional scripts
N=23
exit $N
| true
|
c4ccc30ac16d73e8b3c7ec29ff6d3a2f22d19f9e
|
Shell
|
oamg/leapp-repository
|
/repos/system_upgrade/common/tools/removerpmgpgkeys
|
UTF-8
| 233
| 3.421875
| 3
|
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/sh
exit_code=0
for key in "$@"; do
echo >&2 "Info: Removing RPM GPG key: $key"
rpm --erase "$key" || {
exit_code=1
echo >&2 "Error: Failed to remove RPM GPG key: $key"
}
done
exit $exit_code
| true
|
cb26a3e5e09dba872e134d640f3eebcef4878f99
|
Shell
|
n3rd4n1/bitsNpieces
|
/tc-bfs/scripts/uclibc-stage2.sh
|
UTF-8
| 446
| 2.8125
| 3
|
[] |
no_license
|
#!/bin/bash
set -e
. ./env.sh
NAME=uclibc
SRC=${WORKDIR_SRC}/${NAME}
BLD=${WORKDIR_BLD}/${NAME}-stage2
FLAGS="V=1 ARCH_CFLAGS= CPU_CFLAGS= KERNEL_SOURCE=${SYSROOT}/usr/ KERNEL_HEADERS=${SYSROOT}/usr/include/ CROSS_COMPILER_PREFIX=${INSTALL}/bin/${CROSS_COMPILE} RUNTIME_PREFIX=/ DEVEL_PREFIX=/usr/"
rm -rf ${BLD}
mkdir -p ${BLD}
cd ${BLD}
cp -r ${SRC}/*/. .
make ${MAKE_FLAGS} ${FLAGS}
make ${MAKE_FLAGS} ${FLAGS} PREFIX=${SYSROOT} install
| true
|
a23b15d8dc1b19f85c154039cb106bd3c39d9e90
|
Shell
|
4179e1/misc
|
/shell/bash/a.sh
|
UTF-8
| 139
| 2.796875
| 3
|
[] |
no_license
|
#!/bin/bash
func ()
{
echo $#
echo $*
echo $@
}
func $*
func $@
func "$*"
func "$@"
func "abc def"
func '"abc" "def"'
| true
|
35d49e3d11e36c1bfa22e3a7d06a26de320c947f
|
Shell
|
Ascend/ModelZoo-PyTorch
|
/PyTorch/dev/cv/image_classification/CycleGAN_ID0521_for_PyTorch/weights/download_weights.sh
|
UTF-8
| 776
| 3.078125
| 3
|
[
"GPL-1.0-or-later",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
#!/bin/bash
FILE=$1
if [[ ${FILE} != "apple2orange" && ${FILE} != "summer2winter_yosemite" && ${FILE} != "horse2zebra" && ${FILE} != "monet2photo" && ${FILE} != "cezanne2photo" && ${FILE} != "ukiyoe2photo" && ${FILE} != "vangogh2photo" && ${FILE} != "maps" && ${FILE} != "facades" && ${FILE} != "iphone2dslr_flower" && ${FILE} != "ae_photos" && ${FILE} != "selfie2anime" ]]; then
echo "Available datasets are: apple2orange, summer2winter_yosemite, horse2zebra, monet2photo, cezanne2photo, ukiyoe2photo, vangogh2photo, maps, facades, iphone2dslr_flower, selfie2anime"
exit 1
fi
URL=https://github.com/Lornatang/CycleGAN-PyTorch/releases/download/1.0/${FILE}.zip
ZIP_FILE=${FILE}.zip
TARGET_DIR=${FILE}
wget -N ${URL} -O ${ZIP_FILE}
unzip ${ZIP_FILE}
rm ${ZIP_FILE}
| true
|
301010dbd4e1519a0f8315707c50708f6a2a6e94
|
Shell
|
area3001/knixx-fez
|
/board/raspberrypi-fez/post-build.sh
|
UTF-8
| 594
| 3.390625
| 3
|
[] |
no_license
|
#!/bin/sh
set -u
set -e
# Add a console on tty1
if [ -e ${TARGET_DIR}/etc/inittab ]; then
grep -qE '^tty1::' ${TARGET_DIR}/etc/inittab || \
sed -i '/GENERIC_SERIAL/a\
tty1::respawn:/sbin/getty -L tty1 0 vt100 # HDMI console' ${TARGET_DIR}/etc/inittab
fi
# add release version to /etc/os-release
VERSION=$(git --git-dir=../.git --work-tree=../ describe --dirty --always --tags)
echo "Building version: ${VERSION}"
echo "SYSTEM=${VERSION}" >> "$TARGET_DIR/etc/os-release" 2> /dev/null
(d="$(git --git-dir=../.git log --date=iso --pretty=%ad -1)"; echo "$d") > $TARGET_DIR/etc/os-release
| true
|
e16e95c56a45efcee4d222b69eade529f5ff429a
|
Shell
|
jfroment/seedbox
|
/tools/init-setup-nextcloud.sh
|
UTF-8
| 598
| 2.53125
| 3
|
[
"MIT"
] |
permissive
|
#!/bin/sh
echo "[$0] Loading variables..."
source .env
echo "[$0] Installing nextcloud..."
docker exec -it -u abc -w /config/www/nextcloud \
nextcloud bash -c " \
php occ maintenance:install \
--database \"mysql\" \
--database-host \"nextcloud-db\" \
--database-name \"${MYSQL_DATABASE}\" \
--database-user \"${MYSQL_USER}\" \
--database-pass \"${MYSQL_PASSWORD}\" \
--admin-user \"${NEXTCLOUD_ADMIN_USER}\" \
--admin-pass \"${NEXTCLOUD_ADMIN_PASSWORD}\" \
--admin-email \"${ACME_MAIL}\" \
--data-dir \"/data\" \
"
echo "[$0] Done."
| true
|
cfd113f5a609241b31142ba7d114dcfa7a068f9e
|
Shell
|
AnilKumarchunduri/Demo
|
/build.sh
|
UTF-8
| 1,053
| 3.03125
| 3
|
[] |
no_license
|
#!/bin/bash
COMPONENT_NAME=$1
BRANCH_NAME=$2
WORKSPACE=$3
RPM_DIR=${WORKSPACE}/${COMPONENT_NAME}/RPMs_${BRANCH_NAME}
mkdir -p ${WORKSPACE}/${COMPONENT_NAME}
mkdir -p ${RPM_DIR}
COMP=$(echo $COMPONENT_NAME | tr '[A-Z]' '[a-z]')
cd ${WORKSPACE}/${COMPONENT_NAME}
chmod -R 777 ${BRANCH_NAME}
if [ ${BRANCH_NAME} == trunk ]; then
if [ -d ${BRANCH_NAME} ]; then
cd ${BRANCH_NAME}
svn update
echo "test1"
else
svn co http:///svn/nmrf/${COMPONENT_NAME}/trunk --username jenkins ${WORKSPACE}/${COMPONENT_NAME}/${BRANCH_NAME}
cd ${WORKSPACE}/${COMPONENT_NAME}/
chmod -R 777 ${BRANCH_NAME}
cd ${BRANCH_NAME}
echo "test2"
fi
else
if [ -d ${BRANCH_NAME} ]; then
cd ${BRANCH_NAME}
svn update
echo "test3"
else
svn co http://svn..com/svn/nmrf/${COMPONENT_NAME}/branches/${COMP}_mrf${BRANCH_NAME} --username jenkins ${WORKSPACE}/${COMPONENT_NAME}/${BRANCH_NAME}
cd ${WORKSPACE}/${COMPONENT_NAME}/
chmod -R 777 ${BRANCH_NAME}
cd ${BRANCH_NAME}
echo "test4"
fi
fi
| true
|
11aac7b399cb938a551b2e04bee57396c34460a9
|
Shell
|
mohamedafrid-lab/Python-UI
|
/cgi-bin/cpu-stress.sh
|
UTF-8
| 94
| 2.890625
| 3
|
[] |
no_license
|
#!/bin/bash
time=0
while [ $time -le 10 ]
do
uptime
sleep 1s
time=`expr $time + 1`
done
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.